mirror of
https://github.com/karl0ss/GoGoDownloader.git
synced 2025-04-26 19:49:23 +01:00
Update backend.py
Reformated and added link checks for each quality. Added output when program fails to download as well as removes the file
This commit is contained in:
parent
2f9ba1c78b
commit
c6bad6cc13
166
src/backend.py
166
src/backend.py
@ -7,6 +7,52 @@ from bs4 import BeautifulSoup
|
|||||||
from dataclasses import dataclass
|
from dataclasses import dataclass
|
||||||
from colorama import Fore
|
from colorama import Fore
|
||||||
from random import choice
|
from random import choice
|
||||||
|
from requests.exceptions import Timeout
|
||||||
|
import time
|
||||||
|
|
||||||
|
OK = f"{Fore.RESET}[{Fore.GREEN}+{Fore.RESET}] "
|
||||||
|
ERR = f"{Fore.RESET}[{Fore.RED}-{Fore.RESET}] "
|
||||||
|
IN = f"{Fore.RESET}[{Fore.LIGHTBLUE_EX}>{Fore.RESET}] "
|
||||||
|
|
||||||
|
|
||||||
|
def random_headers():
|
||||||
|
desktop_agents = [
|
||||||
|
"Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/94.0.4606.81 Safari/537.36 Edg/94.0.992.47",
|
||||||
|
'Mozilla/5.0 (Windows NT 10.0; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/72.0.3626.121 Safari/537.36',
|
||||||
|
'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/74.0.3729.157 Safari/537.36',
|
||||||
|
'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/60.0.3112.113 Safari/537.36',
|
||||||
|
'Mozilla/5.0 (Windows NT 6.1; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/60.0.3112.90 Safari/537.36',
|
||||||
|
'Mozilla/5.0 (Windows NT 10.0) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/72.0.3626.121 Safari/537.36',
|
||||||
|
'Mozilla/5.0 (Windows NT 6.1; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/74.0.3729.169 Safari/537.36',
|
||||||
|
'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/92.0.4515.159 Safari/537.36']
|
||||||
|
return {'User-Agent': choice(desktop_agents),
|
||||||
|
"Accept": "text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,*/*;q=0.8",
|
||||||
|
"Accept-Language": "en-US,en;q=0.5",
|
||||||
|
"Accept-Encoding": "gzip, deflate, br",
|
||||||
|
"Referer": "https://goload.one/",
|
||||||
|
"Connection": "keep-alive"}
|
||||||
|
|
||||||
|
|
||||||
|
def get_download_links(episode_link):
|
||||||
|
with req.get(episode_link) as res:
|
||||||
|
soup = BeautifulSoup(res.content, "html.parser")
|
||||||
|
exist = soup.find("h1", {"class": "entry-title"})
|
||||||
|
workinglinkis = episode_link.split('-')
|
||||||
|
if exist is None:
|
||||||
|
# Episode link == 200
|
||||||
|
episode_link = soup.find("li", {"class": "dowloads"})
|
||||||
|
return [workinglinkis[-1], episode_link.a.get("href")]
|
||||||
|
else:
|
||||||
|
# Episode link == 404
|
||||||
|
episode_link = f"{episode_link}-"
|
||||||
|
with req.get(episode_link) as find:
|
||||||
|
soup = BeautifulSoup(find.content, "html.parser")
|
||||||
|
exist = soup.find("h1", {"class": "entry-title"})
|
||||||
|
if exist is None:
|
||||||
|
episode_link = soup.find("li", {"class": "dowloads"})
|
||||||
|
return [workinglinkis[-1], episode_link.a.get("href")]
|
||||||
|
else:
|
||||||
|
return None
|
||||||
|
|
||||||
|
|
||||||
@dataclass(init=True)
|
@dataclass(init=True)
|
||||||
@ -35,28 +81,9 @@ class Download:
|
|||||||
]
|
]
|
||||||
return episode_links
|
return episode_links
|
||||||
|
|
||||||
def get_download_links(self, episode_link):
|
|
||||||
with req.get(episode_link) as res:
|
|
||||||
soup = BeautifulSoup(res.content, "html.parser")
|
|
||||||
exist = soup.find("h1", {"class": "entry-title"})
|
|
||||||
if exist is None:
|
|
||||||
# Episode link == 200
|
|
||||||
episode_link = soup.find("li", {"class": "dowloads"})
|
|
||||||
return episode_link.a.get("href")
|
|
||||||
else:
|
|
||||||
# Episode link == 404
|
|
||||||
episode_link = f"{episode_link}-"
|
|
||||||
with req.get(episode_link) as find:
|
|
||||||
soup = BeautifulSoup(find.content, "html.parser")
|
|
||||||
exist = soup.find("h1", {"class": "entry-title"})
|
|
||||||
if exist is None:
|
|
||||||
episode_link = soup.find("li", {"class": "dowloads"})
|
|
||||||
return episode_link.a.get("href")
|
|
||||||
else:
|
|
||||||
return None
|
|
||||||
|
|
||||||
def get_download_urls(self, download_link):
|
def get_download_urls(self, download_link):
|
||||||
episode_quality = self.episode_quality
|
episode_quality = self.episode_quality
|
||||||
|
workingepisode = download_link[0]
|
||||||
if episode_quality == "FullHDP":
|
if episode_quality == "FullHDP":
|
||||||
episode_quality = "1080P - mp4"
|
episode_quality = "1080P - mp4"
|
||||||
elif episode_quality == "HDP":
|
elif episode_quality == "HDP":
|
||||||
@ -64,58 +91,78 @@ class Download:
|
|||||||
elif episode_quality == "SDP":
|
elif episode_quality == "SDP":
|
||||||
episode_quality = "360P - mp4"
|
episode_quality = "360P - mp4"
|
||||||
else:
|
else:
|
||||||
episode_quality = "1080P"
|
episode_quality = "1080P - mp4"
|
||||||
with req.get(download_link) as res:
|
with req.get(download_link[1]) as res:
|
||||||
soup = BeautifulSoup(res.content, "html.parser")
|
soup = BeautifulSoup(res.content, "html.parser")
|
||||||
link = soup.find("div", {"class": "dowload"}, text=re.compile(episode_quality))
|
link = soup.find("div", {"class": "dowload"}, text=re.compile(episode_quality))
|
||||||
if link is None:
|
if link is None:
|
||||||
pass
|
pass
|
||||||
else:
|
else:
|
||||||
with req.get(link.a.get("href")) as checkingurl:
|
try:
|
||||||
if checkingurl.status_code == 200:
|
with req.get(link.a.get("href"), headers=random_headers(), stream=True,
|
||||||
link = None
|
timeout=3) as workingit:
|
||||||
|
if workingit.status_code != 200:
|
||||||
|
link = None
|
||||||
|
except Timeout:
|
||||||
|
link = None
|
||||||
if link is None:
|
if link is None:
|
||||||
episode_quality = "720P"
|
if episode_quality == "1080P - mp4":
|
||||||
|
episode_quality = "FullHDP"
|
||||||
|
time.sleep(1)
|
||||||
|
CustomMessage('None', episode_quality, workingepisode).qual_not_found()
|
||||||
|
episode_quality = "HDP"
|
||||||
|
time.sleep(1)
|
||||||
|
CustomMessage('None', episode_quality, workingepisode).use_default_qual()
|
||||||
|
episode_quality = "720P - mp4"
|
||||||
link = soup.find("div", {"class": "dowload"}, text=re.compile(episode_quality))
|
link = soup.find("div", {"class": "dowload"}, text=re.compile(episode_quality))
|
||||||
if link is None:
|
if link is None:
|
||||||
pass
|
pass
|
||||||
else:
|
else:
|
||||||
with req.get(link.a.get("href")) as checkingurl:
|
try:
|
||||||
if checkingurl.status_code == 200:
|
with req.get(link.a.get("href"), headers=random_headers(), stream=True,
|
||||||
link = None
|
timeout=3) as workingit:
|
||||||
|
if workingit.status_code != 200:
|
||||||
|
link = None
|
||||||
|
except Timeout:
|
||||||
|
link = None
|
||||||
if link is None:
|
if link is None:
|
||||||
episode_quality = "360P"
|
if episode_quality == "720P - mp4":
|
||||||
|
episode_quality = "HDP"
|
||||||
|
time.sleep(1)
|
||||||
|
CustomMessage('None', episode_quality, workingepisode).qual_not_found()
|
||||||
|
episode_quality = "SDP"
|
||||||
|
time.sleep(1)
|
||||||
|
CustomMessage('None', episode_quality, workingepisode).use_default_qual()
|
||||||
|
episode_quality = "360P - mp4"
|
||||||
link = soup.find("div", {"class": "dowload"}, text=re.compile(episode_quality))
|
link = soup.find("div", {"class": "dowload"}, text=re.compile(episode_quality))
|
||||||
CustomMessage('None', self.episode_quality).qual_not_found()
|
else:
|
||||||
self.episode_quality = link.text.split()[1][1:]
|
pass
|
||||||
CustomMessage('None', self.episode_quality).use_default_qual()
|
return [download_link[1].split("+")[-1], link.a.get("href")]
|
||||||
self.printed = True
|
|
||||||
return [
|
|
||||||
download_link.split("+")[-1],
|
|
||||||
link.a.get("href"),
|
|
||||||
]
|
|
||||||
|
|
||||||
def random_headers(self):
|
|
||||||
desktop_agents = ["Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/94.0.4606.81 Safari/537.36 Edg/94.0.992.47",
|
|
||||||
'Mozilla/5.0 (Windows NT 10.0; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/72.0.3626.121 Safari/537.36',
|
|
||||||
'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/74.0.3729.157 Safari/537.36',
|
|
||||||
'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/60.0.3112.113 Safari/537.36',
|
|
||||||
'Mozilla/5.0 (Windows NT 6.1; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/60.0.3112.90 Safari/537.36',
|
|
||||||
'Mozilla/5.0 (Windows NT 10.0) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/72.0.3626.121 Safari/537.36',
|
|
||||||
'Mozilla/5.0 (Windows NT 6.1; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/74.0.3729.169 Safari/537.36',
|
|
||||||
'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/92.0.4515.159 Safari/537.36']
|
|
||||||
return {'User-Agent': choice(desktop_agents), "Accept": "text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,*/*;q=0.8",
|
|
||||||
"Accept-Language": "en-US,en;q=0.5",
|
|
||||||
"Accept-Encoding": "gzip, deflate, br",
|
|
||||||
"Referer": "https://goload.one/",
|
|
||||||
"Connection": "keep-alive"}
|
|
||||||
|
|
||||||
def download_episodes(self, url):
|
def download_episodes(self, url):
|
||||||
with req.get(url[1], headers=self.random_headers(), stream=True, timeout=10) as workingurl:
|
with req.get(url[1], headers=random_headers(), stream=True, timeout=10,
|
||||||
|
allow_redirects=True) as workingurl:
|
||||||
episode_name = "EP." + url[0] + ".mp4"
|
episode_name = "EP." + url[0] + ".mp4"
|
||||||
file_loc = os.path.join(self.folder, episode_name)
|
file_loc = os.path.join(self.folder, episode_name)
|
||||||
with open(file_loc, "w+b") as file:
|
with open(file_loc, "w+b") as file:
|
||||||
shutil.copyfileobj(workingurl.raw, file)
|
shutil.copyfileobj(workingurl.raw, file, 8192)
|
||||||
|
|
||||||
|
size = os.stat(file_loc).st_size
|
||||||
|
count = 0
|
||||||
|
while int(size) < 5 and count < 5:
|
||||||
|
with req.get(url[1], headers=random_headers(), stream=True, timeout=10,
|
||||||
|
allow_redirects=True) as workingurl:
|
||||||
|
episode_name = "EP." + url[0] + ".mp4"
|
||||||
|
file_loc = os.path.join(self.folder, episode_name)
|
||||||
|
with open(file_loc, "w+b") as file:
|
||||||
|
shutil.copyfileobj(workingurl.raw, file, 8192)
|
||||||
|
count += 1
|
||||||
|
size = os.stat(file_loc).st_size
|
||||||
|
size = os.stat(file_loc).st_size
|
||||||
|
if int(size) < 5:
|
||||||
|
print("\n")
|
||||||
|
CustomMessage('Could not download episode ' + url[0]).print_error()
|
||||||
|
os.remove(file_loc)
|
||||||
|
|
||||||
|
|
||||||
@dataclass(init=True)
|
@dataclass(init=True)
|
||||||
@ -124,16 +171,15 @@ class CustomMessage(Exception):
|
|||||||
|
|
||||||
message: str = None
|
message: str = None
|
||||||
episode_quality: str = None
|
episode_quality: str = None
|
||||||
|
workingepisode: str = None
|
||||||
|
|
||||||
def print_error(self):
|
def print_error(self):
|
||||||
print(self.message)
|
print(ERR, self.message, end=' ')
|
||||||
|
|
||||||
def qual_not_found(self):
|
def qual_not_found(self):
|
||||||
print(
|
print(
|
||||||
f"{Fore.RESET}[{Fore.RED}-{Fore.RESET}] {Fore.LIGHTCYAN_EX}{self.episode_quality}{Fore.RESET} quality not found."
|
f"{ERR}Episode {self.workingepisode} {Fore.LIGHTCYAN_EX}{self.episode_quality}{Fore.RESET} quality not found.")
|
||||||
)
|
|
||||||
|
|
||||||
def use_default_qual(self):
|
def use_default_qual(self):
|
||||||
print(
|
print(
|
||||||
f"{Fore.RESET}[{Fore.GREEN}+{Fore.RESET}] Using {Fore.LIGHTCYAN_EX}{self.episode_quality}{Fore.RESET} as a default quality."
|
f"{OK}Trying {Fore.LIGHTCYAN_EX}{self.episode_quality}{Fore.RESET} quality for Episode {self.workingepisode}.")
|
||||||
)
|
|
||||||
|
Loading…
x
Reference in New Issue
Block a user