mirror of
https://github.com/karl0ss/GoGoDownloader.git
synced 2025-04-26 03:29:23 +01:00
rework download logic
This commit is contained in:
parent
1852b8fe2e
commit
81b4c63892
@ -146,11 +146,19 @@ def gogodownloader(config):
|
|||||||
|
|
||||||
dl_links = []
|
dl_links = []
|
||||||
episode_links = gogo.get_links(source)
|
episode_links = gogo.get_links(source)
|
||||||
|
print(f"{OK}Scraping Links")
|
||||||
for link in episode_links:
|
for link in episode_links:
|
||||||
dl_links.append(gogo.get_download_link(link))
|
dl_links.append(gogo.get_download_link(link))
|
||||||
|
|
||||||
gogo.file_downloader(dl_links)
|
result = gogo.file_downloader(dl_links)
|
||||||
|
if len(result.errors) > 0:
|
||||||
|
while len(result.errors) > 0:
|
||||||
|
print(f"{ERR}{len(result.errors)} links failed retrying.")
|
||||||
|
episode_links = gogo.get_links(source)
|
||||||
|
print(f"{OK}Re-Scraping Links")
|
||||||
|
for link in episode_links:
|
||||||
|
dl_links.append(gogo.get_download_link(link))
|
||||||
|
result = gogo.file_downloader(dl_links, overwrite_downloads=0)
|
||||||
|
|
||||||
use_again = input(f"{IN}Do you want to use the app again? (y|n) > ").lower()
|
use_again = input(f"{IN}Do you want to use the app again? (y|n) > ").lower()
|
||||||
if use_again == "y":
|
if use_again == "y":
|
||||||
|
47
backend.py
47
backend.py
@ -1,3 +1,4 @@
|
|||||||
|
import re
|
||||||
import requests
|
import requests
|
||||||
import json
|
import json
|
||||||
import os
|
import os
|
||||||
@ -120,17 +121,42 @@ class gogoanime:
|
|||||||
url,
|
url,
|
||||||
cookies=dict(auth=gogoanime.get_gogoanime_auth_cookie(self)),
|
cookies=dict(auth=gogoanime.get_gogoanime_auth_cookie(self)),
|
||||||
)
|
)
|
||||||
|
quality_arr = ["1080", "720", "640", "480"]
|
||||||
soup = BeautifulSoup(page.content, "html.parser")
|
soup = BeautifulSoup(page.content, "html.parser")
|
||||||
|
try:
|
||||||
for link in soup.find_all("a", href=True):
|
for link in soup.find_all(
|
||||||
if self.episode_quality in link.text:
|
"a", href=True, string=re.compile(self.episode_quality)
|
||||||
|
):
|
||||||
return link["href"]
|
return link["href"]
|
||||||
|
else:
|
||||||
|
ep_num = url.rsplit("-", 1)[1]
|
||||||
|
print(
|
||||||
|
f"{self.episode_quality} not found for ep{ep_num} checking for next best"
|
||||||
|
)
|
||||||
|
for q in quality_arr:
|
||||||
|
for link in soup.find_all("a", href=True, string=re.compile(q)):
|
||||||
|
print(f"{q} found.")
|
||||||
|
return link["href"]
|
||||||
|
except:
|
||||||
|
print("No matching download found")
|
||||||
|
|
||||||
def file_downloader(self, file_list: dict):
|
def file_downloader(self, file_list: dict, overwrite_downloads: bool = None):
|
||||||
|
"""[summary]
|
||||||
|
|
||||||
|
Args:
|
||||||
|
file_list (dict): [description]
|
||||||
|
overwrite_downloads (bool, optional): [description]. Defaults to None.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
[type]: [description]
|
||||||
|
"""
|
||||||
|
if overwrite_downloads is None:
|
||||||
|
overwrite = self.config["OverwriteDownloads"]
|
||||||
|
else:
|
||||||
|
overwrite = overwrite_downloads
|
||||||
dl = Downloader(
|
dl = Downloader(
|
||||||
max_conn=max_concurrent_downloads(self.config["MaxConcurrentDownloads"]),
|
max_conn=max_concurrent_downloads(self.config["MaxConcurrentDownloads"]),
|
||||||
overwrite=self.config["OverwriteDownloads"],
|
overwrite=overwrite,
|
||||||
headers=dict(
|
headers=dict(
|
||||||
[
|
[
|
||||||
(
|
(
|
||||||
@ -147,10 +173,11 @@ class gogoanime:
|
|||||||
)
|
)
|
||||||
|
|
||||||
for link in file_list:
|
for link in file_list:
|
||||||
dl.enqueue_file(
|
if link is not None:
|
||||||
link,
|
dl.enqueue_file(
|
||||||
path=f"./{self.title}",
|
link,
|
||||||
)
|
path=f"./{self.title}",
|
||||||
|
)
|
||||||
|
|
||||||
files = dl.download()
|
files = dl.download()
|
||||||
return files
|
return files
|
||||||
|
Loading…
x
Reference in New Issue
Block a user