Skip to content

Commit

Permalink
fix ddl mediafire v2
Browse files Browse the repository at this point in the history
  • Loading branch information
aenulrofik authored Oct 9, 2024
1 parent f1932d1 commit c7267df
Showing 1 changed file with 23 additions and 1 deletion.
Original file line number Diff line number Diff line change
Expand Up @@ -231,6 +231,13 @@ def mediafire(url, session=None):
r"https?:\/\/download\d+\.mediafire\.com\/\S+\/\S+\/\S+", url
):
return final_link[0]
def _repair_download(url, session):
try:
html = HTML(session.get(url).text)
if new_link := html.xpath('//a[@id="continue-btn"]/@href'):
return mediafire(f"https://mediafire.com/{new_link[0]}")
except Exception as e:
raise DirectDownloadLinkException(f"ERROR: {e.__class__.__name__}") from e
if session is None:
session = create_scraper()
parsed_url = urlparse(url)
Expand Down Expand Up @@ -258,7 +265,8 @@ def mediafire(url, session=None):
session.close()
raise DirectDownloadLinkException("ERROR: Wrong password.")
if not (final_link := html.xpath('//a[@aria-label="Download file"]/@href')):
session.close()
if repair_link := html.xpath("//a[@class='retry']/@href"):
return _repair_download(repair_link[0], session)
raise DirectDownloadLinkException(
"ERROR: No links found in this page Try Again"
)
Expand Down Expand Up @@ -1092,6 +1100,16 @@ def __get_info(folderkey):
details["title"] = folder_infos[0]["name"]

def __scraper(url):
session = create_scraper()
parsed_url = urlparse(url)
url = f"{parsed_url.scheme}://{parsed_url.netloc}{parsed_url.path}"
def __repair_download(url):
try:
html = HTML(session.get(url).text)
if new_link := html.xpath('//a[@id="continue-btn"]/@href'):
return __scraper(f"https://mediafire.com/{new_link[0]}")
except:
return
try:
html = HTML(session.get(url).text)
except:
Expand All @@ -1108,7 +1126,11 @@ def __scraper(url):
if html.xpath("//div[@class='passwordPrompt']"):
return
if final_link := html.xpath('//a[@aria-label="Download file"]/@href'):
if final_link[0].startswith("//"):
return __scraper(f"https://{final_link[0][2:]}")
return final_link[0]
if repair_link := html.xpath("//a[@class='retry']/@href"):
return __repair_download(repair_link[0])

def __get_content(folderKey, folderPath="", content_type="folders"):
try:
Expand Down

0 comments on commit c7267df

Please sign in to comment.