Add handler for Mediafile downloads
This commit is contained in:
parent
aa680dadc2
commit
447eb4120a
125
c3dbdl/c3dbdl.py
125
c3dbdl/c3dbdl.py
|
@ -153,8 +153,6 @@ def fetchSongData(entries):
|
|||
for link_entry in download_links:
|
||||
link = link_entry.get("href")
|
||||
description = link_entry.get_text().strip()
|
||||
if "c3universe.com" not in link:
|
||||
continue
|
||||
messages.append(f"Found download link: {link} ({description})")
|
||||
dl_links.append(
|
||||
{
|
||||
|
@ -252,6 +250,72 @@ def buildDatabase(pages, concurrency):
|
|||
return found_songs
|
||||
|
||||
|
||||
def downloadFile(download_url, download_path, download_filename):
|
||||
attempts = 1
|
||||
p = None
|
||||
try:
|
||||
with requests.get(download_url, stream=True) as r:
|
||||
while attempts <= 3:
|
||||
try:
|
||||
r.raise_for_status()
|
||||
break
|
||||
except Exception:
|
||||
click.echo(
|
||||
f"Download attempt failed: HTTP {r.status_code}; retrying {attempts}/3"
|
||||
)
|
||||
sleep(attempts)
|
||||
attempts += 1
|
||||
if r is None or r.status_code != 200:
|
||||
if r:
|
||||
code = r.status_code
|
||||
else:
|
||||
code = "-1"
|
||||
raise HTTPError(download_url, code, "", None, None)
|
||||
|
||||
if not os.path.exists(download_path):
|
||||
os.makedirs(download_path)
|
||||
|
||||
with open(download_filename, "wb") as f:
|
||||
for chunk in r.iter_content(chunk_size=8192):
|
||||
f.write(chunk)
|
||||
click.echo(f"Successfully downloaded to {download_filename}")
|
||||
except Exception as e:
|
||||
click.echo(f"Download attempt failed: {e}")
|
||||
return None
|
||||
|
||||
def parseC3Universe(dl_link):
|
||||
try:
|
||||
p = requests.get(dl_link)
|
||||
if p.status_code != 200:
|
||||
raise HTTPError(dl_link, p.status_code, "", None, None)
|
||||
|
||||
parsed_html = BeautifulSoup(p.text, "html.parser")
|
||||
download_url = (
|
||||
parsed_html.body.find("div", attrs={"class": "lock-head"})
|
||||
.find("a")
|
||||
.get("href")
|
||||
)
|
||||
return download_url
|
||||
except Exception as e:
|
||||
click.echo(f"Failed parsing or retrieving HTML link: {e}")
|
||||
return None
|
||||
|
||||
|
||||
def parseMediafire(dl_link):
|
||||
try:
|
||||
p = requests.get(dl_link)
|
||||
if p.status_code != 200:
|
||||
raise HTTPError(dl_link, p.status_code, "", None, None)
|
||||
|
||||
parsed_html = BeautifulSoup(p.text, "html.parser")
|
||||
download_url = parsed_html.find(
|
||||
"a", attrs={"id": "downloadButton", "rel": "nofollow", "aria-label": "Download file"}
|
||||
).get("href")
|
||||
return download_url
|
||||
except Exception as e:
|
||||
click.echo(f"Failed parsing or retrieving HTML link: {e}")
|
||||
return None
|
||||
|
||||
def downloadSong(destination, filename, entry, dlid, dldesc):
|
||||
click.echo(
|
||||
f"""> Downloading song "{entry['artist']} - {entry['title']}" by {entry['author']}..."""
|
||||
|
@ -278,21 +342,19 @@ def downloadSong(destination, filename, entry, dlid, dldesc):
|
|||
return
|
||||
|
||||
for dl_link in dl_links:
|
||||
try:
|
||||
p = requests.get(dl_link["link"])
|
||||
if p.status_code != 200:
|
||||
raise HTTPError(dl_link["link"], p.status_code, "", None, None)
|
||||
|
||||
parsed_html = BeautifulSoup(p.text, "html.parser")
|
||||
download_url = (
|
||||
parsed_html.body.find("div", attrs={"class": "lock-head"})
|
||||
.find("a")
|
||||
.get("href")
|
||||
)
|
||||
except Exception as e:
|
||||
click.echo(f"Failed parsing or retrieving HTML link: {e}")
|
||||
if 'dl.c3universe.com' in dl_link['link']:
|
||||
download_url = parseC3Universe(dl_link["link"])
|
||||
elif 'www.mediafire.com' in dl_link["link"]:
|
||||
download_url = parseMediafire(dl_link["link"])
|
||||
else:
|
||||
click.echo("Download URL is not valid for CLI download; skipping...")
|
||||
click.echo(f"URL: {dl_link['link']}")
|
||||
continue
|
||||
|
||||
if download_url is None:
|
||||
continue
|
||||
|
||||
print(entry)
|
||||
download_filename = filename.format(
|
||||
genre=entry["genre"],
|
||||
artist=entry["artist"],
|
||||
|
@ -312,38 +374,7 @@ def downloadSong(destination, filename, entry, dlid, dldesc):
|
|||
click.echo(f"File exists at {download_filename}")
|
||||
continue
|
||||
|
||||
attempts = 1
|
||||
p = None
|
||||
try:
|
||||
with requests.get(download_url, stream=True) as r:
|
||||
while attempts <= 3:
|
||||
try:
|
||||
r.raise_for_status()
|
||||
break
|
||||
except Exception:
|
||||
click.echo(
|
||||
f"Download attempt failed: HTTP {r.status_code}; retrying {attempts}/3"
|
||||
)
|
||||
sleep(attempts)
|
||||
attempts += 1
|
||||
if r is None or r.status_code != 200:
|
||||
if r:
|
||||
code = r.status_code
|
||||
else:
|
||||
code = "-1"
|
||||
raise HTTPError(download_url, code, "", None, None)
|
||||
|
||||
if not os.path.exists(download_path):
|
||||
os.makedirs(download_path)
|
||||
|
||||
with open(download_filename, "wb") as f:
|
||||
for chunk in r.iter_content(chunk_size=8192):
|
||||
f.write(chunk)
|
||||
click.echo(f"Successfully downloaded to {download_filename}")
|
||||
except Exception as e:
|
||||
click.echo(f"Download attempt failed: {e}")
|
||||
continue
|
||||
|
||||
downloadFile(download_url, download_path, download_filename)
|
||||
|
||||
@click.command(name="build", short_help="Build the local database.")
|
||||
@click.option(
|
||||
|
|
Loading…
Reference in New Issue