Skip to content
This repository has been archived by the owner on Feb 22, 2025. It is now read-only.

Commit

Permalink
Merge pull request #802 from rix1337/dev
Browse files Browse the repository at this point in the history
 v.20.1.6 - Improve NX package name handling
  • Loading branch information
rix1337 authored Jul 27, 2024
2 parents 88666b1 + bae7d22 commit 75a2195
Show file tree
Hide file tree
Showing 11 changed files with 235 additions and 297 deletions.
4 changes: 2 additions & 2 deletions feedcrawler/external_sites/feed_search/content_shows.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,7 @@

from feedcrawler.providers import shared_state
from feedcrawler.providers.common_functions import check_is_ignored
from feedcrawler.providers.myjd_connection import add_decrypt
from feedcrawler.providers.myjd_connection import add_for_manual_decryption
from feedcrawler.providers.notifications import notify
from feedcrawler.providers.sqlite_database import ListDb
from feedcrawler.providers.url_functions import get_url
Expand Down Expand Up @@ -84,7 +84,7 @@ def send_package(self, title, link, language_id, season, episode, site, source,
else:
if season and episode:
link = link.replace('&_=', '&season=' + str(season) + '&episode=' + str(episode) + '&_=')
download = add_decrypt(title, link, self.url)
download = add_for_manual_decryption(title, link, self.url)
if download:
self.db.store(title, 'added')
log_entry = release_type + title + ' - [' + site + '] - ' + size + ' - ' + source
Expand Down
4 changes: 2 additions & 2 deletions feedcrawler/external_sites/feed_search/shared.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,7 @@

from feedcrawler.providers.common_functions import check_hoster
from feedcrawler.providers.common_functions import readable_size
from feedcrawler.providers.myjd_connection import add_decrypt
from feedcrawler.providers.myjd_connection import add_for_manual_decryption
from feedcrawler.providers.url_functions import get_redirected_url


Expand Down Expand Up @@ -88,7 +88,7 @@ def standardize_size_value(size):
def add_decrypt_instead_of_download(key, path, download_links, password, replace=False):
unused_get_feed_parameter(path)

if add_decrypt(key.strip(), download_links[0], password, replace):
if add_for_manual_decryption(key.strip(), download_links[0], password, replace):
return True
else:
return False
4 changes: 2 additions & 2 deletions feedcrawler/external_sites/web_search/content_shows.py
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,7 @@
from feedcrawler.providers.common_functions import keep_alphanumeric_with_special_characters
from feedcrawler.providers.common_functions import simplified_search_term_in_title
from feedcrawler.providers.config import CrawlerConfig
from feedcrawler.providers.myjd_connection import add_decrypt
from feedcrawler.providers.myjd_connection import add_for_manual_decryption
from feedcrawler.providers.notifications import notify
from feedcrawler.providers.sqlite_database import ListDb, FeedDb
from feedcrawler.providers.url_functions import get_url, get_redirected_url
Expand Down Expand Up @@ -377,7 +377,7 @@ def download(payload):
url = source

if url:
if add_decrypt(title, url, password):
if add_for_manual_decryption(title, url, password):
if incomplete:
db.store(title, 'incomplete')
log_entry = '[Suche/Serie/Unvollständig] - ' + title + ' - [' + site + '] - ' + size + ' - ' + source
Expand Down
2 changes: 1 addition & 1 deletion feedcrawler/external_tools/myjd_api.py
Original file line number Diff line number Diff line change
Expand Up @@ -548,7 +548,7 @@ def is_collecting(self):

def rename_package(self, package_id, new_name):
params = package_id, new_name
resp = self.device.action(self.url + "/renamePackage", params)
resp = self.device.action("/linkgrabberv2/renamePackage", params=params)
return resp

def query_packages(self, params=[
Expand Down
12 changes: 8 additions & 4 deletions feedcrawler/jobs/package_watcher.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,7 @@
from feedcrawler.providers.common_functions import Unbuffered
from feedcrawler.providers.common_functions import longest_substr
from feedcrawler.providers.config import CrawlerConfig
from feedcrawler.providers.myjd_connection import add_decrypt
from feedcrawler.providers.myjd_connection import add_for_manual_decryption
from feedcrawler.providers.myjd_connection import get_info
from feedcrawler.providers.myjd_connection import hoster_check
from feedcrawler.providers.myjd_connection import jdownloader_start
Expand All @@ -32,8 +32,8 @@ def match_package_name(title, package_name):
return True
else:
try:
title = title.lower()
package_name = package_name.lower()
title = title.replace(" ", ".").lower()
package_name = package_name.replace(" ", ".").lower()
pattern = re.compile(r'(\d{3,4}p)')
match = pattern.search(package_name)
if match:
Expand Down Expand Up @@ -101,6 +101,8 @@ def watch_packages(shared_state_dict, shared_state_lock):
if packages_in_downloader_decrypted:
for package in packages_in_downloader_decrypted:
if match_package_name(title[0], package['name']):
if not title[0] == package['name']:
rename_package_in_linkgrabber(package['uuid'], title[0])
check = hoster_check([package], title[0], [0])
remove = check[0]
if remove:
Expand All @@ -109,6 +111,8 @@ def watch_packages(shared_state_dict, shared_state_lock):
if packages_in_linkgrabber_decrypted:
for package in packages_in_linkgrabber_decrypted:
if match_package_name(title[0], package['name']):
if not title[0] == package['name']:
rename_package_in_linkgrabber(package['uuid'], title[0])
hoster_check([package], title[0], [0])
episodes = FeedDb('episode_remover').retrieve(title[0])
if episodes:
Expand Down Expand Up @@ -299,7 +303,7 @@ def watch_packages(shared_state_dict, shared_state_lock):
db.delete(title[0])
db.store(title[0], 'retried')
else:
add_decrypt(package['name'], package['url'], "")
add_for_manual_decryption(package['name'], package['url'], "")
remove_from_linkgrabber(package['linkids'],
[package['uuid']])
notify_list.append({"text": "[CAPTCHA zu lösen] - " + title[0]})
Expand Down
Loading

0 comments on commit 75a2195

Please sign in to comment.