mirror of
https://github.com/karl0ss/GoGoDownloader.git
synced 2025-04-26 11:39:22 +01:00
create new CLI to process users bookmarks
This commit is contained in:
parent
64acd0abb2
commit
9ea3c6a6ce
@ -1,72 +1,40 @@
|
|||||||
from backend import *
|
from backend import *
|
||||||
import argparse
|
|
||||||
import os
|
|
||||||
import requests
|
|
||||||
from bs4 import BeautifulSoup
|
|
||||||
|
|
||||||
|
|
||||||
def get_archives(archive_file):
|
|
||||||
|
|
||||||
if os.path.isfile(archive_file):
|
|
||||||
with open(archive_file, "r") as f:
|
|
||||||
return f.read().split()
|
|
||||||
|
|
||||||
else:
|
|
||||||
return []
|
|
||||||
|
|
||||||
|
|
||||||
def find_episodes(url):
|
|
||||||
resp = requests.get(url)
|
|
||||||
soup = BeautifulSoup(resp.text, "html.parser")
|
|
||||||
all_episodes = soup.find("ul", {"id": "episode_page"})
|
|
||||||
all_episodes = int(all_episodes.get_text().split("-")[-1].strip())
|
|
||||||
return all_episodes
|
|
||||||
|
|
||||||
|
|
||||||
def main():
|
def main():
|
||||||
placeholder = "DUMMY"
|
|
||||||
config = config_check()
|
config = config_check()
|
||||||
parser = argparse.ArgumentParser()
|
|
||||||
parser.add_argument("url", metavar="url", type=str)
|
|
||||||
parser.add_argument("--archive", metavar="Archive File", type=str)
|
|
||||||
parser.add_argument(
|
|
||||||
"--quality",
|
|
||||||
metavar="download quality",
|
|
||||||
choices={"360", "480", "720", "1080"},
|
|
||||||
nargs="?",
|
|
||||||
const="1080",
|
|
||||||
default="1080",
|
|
||||||
type=str,
|
|
||||||
)
|
|
||||||
args = parser.parse_args()
|
|
||||||
|
|
||||||
name = args.url.split("/")[-1]
|
|
||||||
title = name.replace("-", " ").title().strip()
|
|
||||||
all_episodes = find_episodes(args.url)
|
|
||||||
|
|
||||||
archives = get_archives(args.archive)
|
|
||||||
downloader = gogoanime(
|
downloader = gogoanime(
|
||||||
config, name, args.quality, placeholder, all_episodes, 1, all_episodes, title
|
config,
|
||||||
|
1,
|
||||||
|
config["CLIQuality"],
|
||||||
|
"a",
|
||||||
|
1,
|
||||||
|
1,
|
||||||
|
1,
|
||||||
|
config["CLIDownloadLocation"],
|
||||||
)
|
)
|
||||||
|
list = downloader.get_show_from_bookmark()
|
||||||
episode_links = [link for link in downloader.get_links() if link not in archives]
|
|
||||||
dl_links = {}
|
dl_links = {}
|
||||||
for ep in episode_links:
|
for ep in list:
|
||||||
ep_num = ep.split("-")[-1]
|
print(
|
||||||
dl_links[downloader.get_download_link(ep)] = (title, ep_num)
|
"{OK}Scraping DL for " + ep["showName"] + " Ep " + str(ep["latestEpisode"])
|
||||||
results = downloader.file_downloader(dl_links)
|
)
|
||||||
failed = []
|
dl_links[downloader.get_download_link(ep["downloadURL"])] = (
|
||||||
with open(args.archive, "a+") as f:
|
ep["showName"],
|
||||||
for ep in episode_links:
|
ep["latestEpisode"],
|
||||||
ep_num = ep.split("-")[-1]
|
)
|
||||||
if os.path.join(title, f"{title} Episode {ep_num}.mp4") in results:
|
result = downloader.file_downloader(dl_links)
|
||||||
f.write(f"{ep}\n")
|
if len(result.errors) > 0:
|
||||||
else:
|
while len(result.errors) > 0:
|
||||||
failed.append(ep)
|
print(f"{ERR}{len(result.errors)} links failed retrying.")
|
||||||
|
print(f"{OK}Re-Scraping Links")
|
||||||
with open("logs.txt", "w+") as f:
|
dl_links.clear()
|
||||||
for failure in failed:
|
for ep in list:
|
||||||
f.write(f"{failed} failed to download\n")
|
dl_links[downloader.get_download_link(ep["downloadURL"])] = (
|
||||||
|
ep["showName"],
|
||||||
|
ep["latestEpisode"],
|
||||||
|
)
|
||||||
|
result = downloader.file_downloader(dl_links, overwrite_downloads=0)
|
||||||
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
if __name__ == "__main__":
|
||||||
|
Loading…
x
Reference in New Issue
Block a user