diff --git a/CHANGELOG.md b/CHANGELOG.md index c41029bb95..429c88255e 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,3 +1,21 @@ +## 0.5.25 (08-03-2022) + +#### New Features +- New indexer: Added support for adding shows from Imdb ([3603](https://github.com/pymedusa/Medusa/pull/3603)) + +#### Improvements +- Enhanced test guessit tool ([10357](https://github.com/pymedusa/Medusa/pull/10357)) +- Discord notifier: added ability to override avatar ([10351](https://github.com/pymedusa/Medusa/pull/10351)) +- Purge recommended shows cache after x days ([10352](https://github.com/pymedusa/Medusa/pull/10352)) +- Added a "load more" button to recommended shows ([10380](https://github.com/pymedusa/Medusa/pull/10380)) +- Improve menu layout on mobile ([10386](https://github.com/pymedusa/Medusa/pull/10386)) + +#### Fixes +- Fix saving specific post-processing method ([10350](https://github.com/pymedusa/Medusa/pull/10350)) +- Fix pasing torrent size when using torznab provider that have torrent_size available in the attrs. ([10365](https://github.com/pymedusa/Medusa/pull/10365)) +- Fix provider MoreThenTv ([10391](https://github.com/pymedusa/Medusa/pull/10391)) +- Fix Manage mass-update: Starting the refresh action ([10377](https://github.com/pymedusa/Medusa/pull/10377)) + ## 0.5.24 (15-02-2022) #### Improvements diff --git a/medusa/__main__.py b/medusa/__main__.py index 4072559835..e3ba7ad89e 100755 --- a/medusa/__main__.py +++ b/medusa/__main__.py @@ -800,6 +800,7 @@ def initialize(self, console_logging=True): check_setting_int(app.CFG, 'Discord', 'discord_notify_onsubtitledownload', 0)) app.DISCORD_WEBHOOK = check_setting_str(app.CFG, 'Discord', 'discord_webhook', '', censor_log='normal') app.DISCORD_TTS = check_setting_bool(app.CFG, 'Discord', 'discord_tts', 0) + app.DISCORD_OVERRIDE_AVATAR = check_setting_bool(app.CFG, 'Discord', 'override_avatar', 0) app.DISCORD_NAME = check_setting_str(app.CFG, 'Discord', 'discord_name', '', censor_log='normal') app.USE_PROWL = bool(check_setting_int(app.CFG, 'Prowl', 'use_prowl', 0)) @@ -1049,6 +1050,7 @@ def initialize(self, console_logging=True): 0, min(23, check_setting_int(app.CFG, 'Recommended', 'recommended_show_update_hour', app.DEFAULT_RECOMMENDED_SHOW_UPDATE_HOUR)) ) app.CACHE_RECOMMENDED_TRAKT_LISTS = check_setting_list(app.CFG, 'Recommended', 'trakt_lists', app.CACHE_RECOMMENDED_TRAKT_LISTS) + app.CACHE_RECOMMENDED_PURGE_AFTER_DAYS = check_setting_int(app.CFG, 'Recommended', 'purge_after_days', 180) # Initialize trakt config path. trakt.core.CONFIG_PATH = os.path.join(app.CACHE_DIR, '.pytrakt.json') @@ -1199,6 +1201,7 @@ def initialize(self, console_logging=True): # initialize the recommended shows database recommended_db_con = db.DBConnection('recommended.db') db.upgradeDatabase(recommended_db_con, recommended_db.InitialSchema) + db.sanityCheckDatabase(recommended_db_con, recommended_db.RecommendedSanityCheck) # Performs a vacuum on cache.db logger.debug(u'Performing a vacuum on the CACHE database') @@ -1748,6 +1751,7 @@ def save_config(): new_config['Recommended']['cache_anilist'] = app.CACHE_RECOMMENDED_ANILIST new_config['Recommended']['recommended_show_update_hour'] = int(app.RECOMMENDED_SHOW_UPDATE_HOUR) new_config['Recommended']['trakt_lists'] = app.CACHE_RECOMMENDED_TRAKT_LISTS + new_config['Recommended']['purge_after_days'] = int(app.CACHE_RECOMMENDED_PURGE_AFTER_DAYS) new_config['Blackhole'] = {} new_config['Blackhole']['nzb_dir'] = app.NZB_DIR @@ -1907,6 +1911,7 @@ def save_config(): new_config['Discord']['discord_notify_onsubtitledownload'] = int(app.DISCORD_NOTIFY_ONSUBTITLEDOWNLOAD) new_config['Discord']['discord_webhook'] = app.DISCORD_WEBHOOK new_config['Discord']['discord_tts'] = int(app.DISCORD_TTS) + new_config['Discord']['override_avatar'] = int(app.DISCORD_OVERRIDE_AVATAR) new_config['Discord']['discord_name'] = app.DISCORD_NAME new_config['Prowl'] = {} diff --git a/medusa/app.py b/medusa/app.py index 9d7fb6338e..133a7ff8cf 100644 --- a/medusa/app.py +++ b/medusa/app.py @@ -435,6 +435,7 @@ def __init__(self): self.DISCORD_NAME = 'pymedusa' self.DISCORD_AVATAR_URL = '{base_url}/images/ico/favicon-144.png'.format(base_url=self.BASE_PYMEDUSA_URL) self.DISCORD_TTS = False + self.DISCORD_OVERRIDE_AVATAR = False self.USE_PROWL = False self.PROWL_NOTIFY_ONSNATCH = False @@ -727,6 +728,7 @@ def __init__(self): 'trending', 'popular', 'anticipated', 'collected', 'watched', 'played', 'recommendations', 'newshow', 'newseason' ] + self.CACHE_RECOMMENDED_PURGE_AFTER_DAYS = 180 def _init_scheduler(self, app_prop=None, scheduler=None, enabled=None): from medusa.logger.adapters.style import BraceAdapter diff --git a/medusa/classes.py b/medusa/classes.py index 1044a155ec..d179749701 100644 --- a/medusa/classes.py +++ b/medusa/classes.py @@ -23,17 +23,21 @@ from dateutil import parser + from medusa import app, ws from medusa.common import ( MULTI_EP_RESULT, Quality, SEASON_RESULT, ) +from medusa.helper.common import sanitize_filename from medusa.logger.adapters.style import BraceAdapter from medusa.search import SearchType from six import itervalues +from trans import trans + log = BraceAdapter(logging.getLogger(__name__)) log.logger.addHandler(logging.NullHandler()) @@ -365,6 +369,22 @@ def select_series(self, all_series): search_results = [] series_names = [] + def searchterm_in_result(search_term, search_result): + norm_search_term = sanitize_filename(search_term.lower()) + norm_result = sanitize_filename(search_result.lower()) + + if norm_search_term in norm_result: + return True + + # translates national characters into similar sounding latin characters + # For ex. Физрук -> Fizruk + search_term_alpha = trans(self.config['searchterm']) + + if search_term_alpha != search_term and search_term_alpha in norm_result: + return True + + return False + # get all available shows if all_series: if 'searchterm' in self.config: @@ -382,8 +402,11 @@ def select_series(self, all_series): if search_term.isdigit(): series_names.append(search_term) + if search_term.startswith('tt'): + series_names.append(search_term) + for name in series_names: - if search_term.lower() in name.lower(): + if searchterm_in_result(search_term, name): if 'firstaired' not in cur_show: default_date = parser.parse('1900-01-01').date() cur_show['firstaired'] = default_date.strftime(dateFormat) diff --git a/medusa/clients/torrent/qbittorrent.py b/medusa/clients/torrent/qbittorrent.py index ea862f4447..42e0f67ccd 100644 --- a/medusa/clients/torrent/qbittorrent.py +++ b/medusa/clients/torrent/qbittorrent.py @@ -416,8 +416,9 @@ def get_status(self, info_hash): # Store destination client_status.destination = torrent['save_path'] - # Store resource - client_status.resource = basename(torrent['content_path']) + if torrent.get('content_path'): + # Store resource + client_status.resource = basename(torrent['content_path']) log.info('Qbittorrent torrent: [{name}] using state: [{state}]', { 'name': client_status.resource, 'state': torrent['state'] diff --git a/medusa/common.py b/medusa/common.py index 77461729f8..5ced1503a6 100644 --- a/medusa/common.py +++ b/medusa/common.py @@ -39,7 +39,7 @@ log.logger.addHandler(logging.NullHandler()) INSTANCE_ID = text_type(uuid.uuid1()) -VERSION = '0.5.24' +VERSION = '0.5.25' USER_AGENT = 'Medusa/{version} ({system}; {release}; {instance})'.format( version=VERSION, system=platform.system(), release=platform.release(), diff --git a/medusa/databases/cache_db.py b/medusa/databases/cache_db.py index 37660834ca..00972be837 100644 --- a/medusa/databases/cache_db.py +++ b/medusa/databases/cache_db.py @@ -16,10 +16,14 @@ # Add new migrations at the bottom of the list # and subclass the previous migration. class InitialSchema(db.SchemaUpgrade): + """Cache.db initial schema class.""" + def test(self): + """Test db version.""" return self.hasTable('db_version') def execute(self): + """Execute.""" queries = [ ('CREATE TABLE lastUpdate (provider TEXT, time NUMERIC);',), ('CREATE TABLE lastSearch (provider TEXT, time NUMERIC);',), @@ -229,3 +233,19 @@ def test(self): def execute(self): self.connection.action('DROP TABLE IF EXISTS scene_exceptions;') self.inc_major_version() + + +class AddSeasonUpdatesTable(RemoveSceneExceptionsTable): # pylint:disable=too-many-ancestors + def test(self): + return self.hasTable('season_updates') + + def execute(self): + self.connection.action( + """CREATE TABLE "season_updates" ( + `season_updates_id` INTEGER, + `indexer` INTEGER NOT NULL, + `series_id` INTEGER NOT NULL, + `season` INTEGER, + `time` INTEGER, + PRIMARY KEY(season_updates_id))""" + ) diff --git a/medusa/databases/main_db.py b/medusa/databases/main_db.py index 84342d1fb9..be3354fda9 100644 --- a/medusa/databases/main_db.py +++ b/medusa/databases/main_db.py @@ -36,6 +36,7 @@ def check(self): self.fix_show_nfo_lang() self.fix_subtitle_reference() self.clean_null_indexer_mappings() + self.clean_imdb_tt_ids() def clean_null_indexer_mappings(self): log.debug(u'Checking for null indexer mappings') @@ -219,6 +220,13 @@ def fix_subtitles_codes(self): def fix_show_nfo_lang(self): self.connection.action("UPDATE tv_shows SET lang = '' WHERE lang = 0 OR lang = '0';") + def clean_imdb_tt_ids(self): + # Get all records with 'tt' + log.debug(u'Cleaning indexer_mapping table, removing references to same indexer') + self.connection.action('DELETE from indexer_mapping WHERE indexer = mindexer') + log.debug(u'Cleaning indexer_mapping table from tt indexer ids') + self.connection.action("DELETE FROM indexer_mapping where indexer_id like '%tt%' or mindexer_id like '%tt%'") + # ====================== # = Main DB Migrations = diff --git a/medusa/databases/recommended_db.py b/medusa/databases/recommended_db.py index 1c17530424..d0d33b887f 100644 --- a/medusa/databases/recommended_db.py +++ b/medusa/databases/recommended_db.py @@ -12,6 +12,19 @@ log.logger.addHandler(logging.NullHandler()) +class RecommendedSanityCheck(db.DBSanityCheck): + """Sanity check class.""" + + def check(self): + """Check functions.""" + self.remove_imdb_tt() + + def remove_imdb_tt(self): + """Remove tt from imdb id's.""" + log.debug(u'Remove shows added with an incorrect imdb id.') + self.connection.action("DELETE FROM shows WHERE source = 10 AND series_id like '%tt%'") + + # Add new migrations at the bottom of the list # and subclass the previous migration. class InitialSchema(db.SchemaUpgrade): diff --git a/medusa/generic_update_queue.py b/medusa/generic_update_queue.py index 1c6b1e6283..dfec035de2 100644 --- a/medusa/generic_update_queue.py +++ b/medusa/generic_update_queue.py @@ -20,7 +20,7 @@ import logging from datetime import date, datetime, timedelta -from medusa import app, ws +from medusa import app, db, ws from medusa.helper.exceptions import CantUpdateRecommendedShowsException from medusa.logger.adapters.style import BraceAdapter from medusa.queues import generic_queue @@ -136,6 +136,23 @@ def __init__(self, update_action): self.started = False self.success = False + def _purge_after_days(self): + log.info('Purge shows that have been added more then {days} ago', {'days': app.CACHE_RECOMMENDED_PURGE_AFTER_DAYS}) + if not app.CACHE_RECOMMENDED_PURGE_AFTER_DAYS: + return + + sql = """ + DELETE FROM shows + WHERE added < datetime('now', '-{days} days') + """.format(days=app.CACHE_RECOMMENDED_PURGE_AFTER_DAYS) + params = [] + + if self.recommended_list != GenericQueueActions.UPDATE_RECOMMENDED_LIST_ALL: + sql += ' AND source = ?' + params = [self.recommended_list] + + db.DBConnection('recommended.db').action(sql, params) + def _get_trakt_shows(self): """Get Trakt shows.""" if self.recommended_list not in ( @@ -213,6 +230,8 @@ def run(self): # Update recommended shows from trakt, imdb and anidb # recommended shows are dogpilled into cache/recommended.dbm + self._purge_after_days() + log.info(u'Started caching recommended shows') self._get_trakt_shows() diff --git a/medusa/helpers/trakt.py b/medusa/helpers/trakt.py index 9bd4b22491..64734192b3 100644 --- a/medusa/helpers/trakt.py +++ b/medusa/helpers/trakt.py @@ -3,6 +3,7 @@ import logging from medusa.helpers import get_title_without_year +from medusa.indexers.imdb.api import ImdbIdentifier from medusa.logger.adapters.style import BraceAdapter from requests.exceptions import RequestException @@ -70,8 +71,11 @@ def create_show_structure(show_obj): 'ids': {} } for valid_trakt_id in ['tvdb_id', 'trakt_id', 'tmdb_id', 'imdb_id']: - if show_obj.externals.get(valid_trakt_id): - show['ids'][valid_trakt_id[:-3]] = show_obj.externals.get(valid_trakt_id) + external = show_obj.externals.get(valid_trakt_id) + if external: + if valid_trakt_id == 'imdb_id': + external = ImdbIdentifier(external).imdb_id + show['ids'][valid_trakt_id[:-3]] = external return show diff --git a/medusa/indexers/api.py b/medusa/indexers/api.py index f7e081f60d..4205b3adb7 100644 --- a/medusa/indexers/api.py +++ b/medusa/indexers/api.py @@ -32,9 +32,12 @@ def indexer(self, *args, **kwargs): def config(self): if self.indexer_id: return indexerConfig[self.indexer_id] - # Sort and put the default language first - init_config['valid_languages'].sort(key=lambda i: '\0' if i == app.INDEXER_DEFAULT_LANGUAGE else i) - return init_config + _ = init_config + if app.INDEXER_DEFAULT_LANGUAGE in _: + del _[_['valid_languages'].index(app.INDEXER_DEFAULT_LANGUAGE)] + _['valid_languages'].sort() + _['valid_languages'].insert(0, app.INDEXER_DEFAULT_LANGUAGE) + return _ @property def name(self): diff --git a/medusa/indexers/base.py b/medusa/indexers/base.py index bd5a713359..91d5d22ff1 100644 --- a/medusa/indexers/base.py +++ b/medusa/indexers/base.py @@ -20,13 +20,14 @@ IndexerSeasonNotFound, IndexerSeasonUpdatesNotSupported, IndexerShowNotFound, + IndexerShowUpdatesNotSupported, ) from medusa.indexers.ui import BaseUI, ConsoleUI from medusa.logger.adapters.style import BraceAdapter from medusa.session.core import IndexerSession from medusa.statistics import weights -from six import integer_types, itervalues, string_types, text_type, viewitems +from six import integer_types, itervalues, string_types, viewitems log = BraceAdapter(logging.getLogger(__name__)) @@ -57,24 +58,18 @@ def __init__(self, """Pass these arguments on as args from the subclass.""" self.shows = ShowContainer() # Holds all Show classes self.corrections = {} # Holds show-name to show_id mapping - - self.config = {} - - self.config['debug_enabled'] = debug # show debugging messages - - self.config['custom_ui'] = custom_ui - - self.config['interactive'] = interactive # prompt for correct series? - - self.config['select_first'] = select_first - - self.config['search_all_languages'] = search_all_languages - - self.config['use_zip'] = use_zip - - self.config['dvdorder'] = dvdorder - - self.config['proxy'] = proxy + self.name = None + + self.config = { + 'debug_enabled': debug, + 'custom_ui': custom_ui, + 'interactive': interactive, + 'select_first': select_first, + 'search_all_languages': search_all_languages, + 'use_zip': use_zip, + 'dvdorder': dvdorder, + 'proxy': proxy + } if cache is True: self.config['cache_enabled'] = True @@ -93,6 +88,7 @@ def __init__(self, self.config['banners_enabled'] = banners self.config['image_type'] = image_type self.config['actors_enabled'] = actors + self.config['limit_seasons'] = [] if self.config['debug_enabled']: warnings.warn('The debug argument to tvdbv2_api.__init__ will be removed in the next version. ' @@ -127,7 +123,46 @@ def __init__(self, else: self.config['language'] = language - def _get_temp_dir(self): # pylint: disable=no-self-use + def get_nested_value(self, value, config): + """ + Get a nested value from a dictionary using a dot separated string. + + For example the config 'plot.summaries[0].text' will return the value for dict['plot']['summaries'][0]. + :param value: Dictionary you want to get a value from. + :param config: Dot separated string. + :return: The value matching the config. + """ + # Remove a level + split_config = config.split('.') + check_key = split_config[0] + + if check_key.endswith(']'): + list_index = int(check_key.split('[')[-1].rstrip(']')) + check_key = check_key.split('[')[0] + check_value = value.get(check_key) + if check_value and list_index < len(check_value): + check_value = check_value[list_index] + else: + check_value = value.get(check_key) + next_keys = '.'.join(split_config[1:]) + + if check_value is None: + return None + + if isinstance(check_value, dict) and next_keys: + return self.get_nested_value(check_value, next_keys) + else: + try: + # Some object have a __dict__ attr. Let's try that. + # It shouldn't match basic types like strings, integers or floats. + parse_dict = check_value.__dict__ + except AttributeError: + return check_value + else: + return self.get_nested_value(parse_dict, next_keys) + + @staticmethod + def _get_temp_dir(): # pylint: disable=no-self-use """Return the [system temp dir]/tvdb_api-u501 (or tvdb_api-myuser).""" if hasattr(os, 'getuid'): uid = 'u{0}'.format(os.getuid()) # pylint: disable=no-member @@ -145,19 +180,21 @@ def _get_show_data(self, sid, language): return None def _get_series(self, series): - """Search for the series name. + """Search indexer for the series name. If a custom_ui UI is configured, it uses this to select the correct series. If not, and interactive == True, ConsoleUI is used, if not BaseUI is used to select the first result. :param series: the query for the series name - :return: A list of series mapped to a UI (for example: a BaseUI or custom_ui). + :return: A list of series mapped to a UI (for example: a BaseUi or custom_ui). """ all_series = self.search(series) if not all_series: log.debug('Series result returned zero') - raise IndexerShowNotFound('Show search returned zero results (cannot find show on Indexer)') + raise IndexerShowNotFound( + 'Show search for {series} returned zero results (cannot find show on Indexer)'.format(series=series) + ) if not isinstance(all_series, list): all_series = [all_series] @@ -184,7 +221,7 @@ def _set_show_data(self, sid, key, value): def __repr__(self): """Indexer representation, returning representation of all shows indexed.""" - return text_type(self.shows) + return str(self.shows) def _set_item(self, sid, seas, ep, attrib, value): # pylint: disable=too-many-arguments """Create a new episode, creating Show(), Season() and Episode()s as required. @@ -391,14 +428,14 @@ def _save_images(self, series_id, images): self._save_images_by_type(img_type, series_id, images_by_type) def __getitem__(self, key): - """Handle tvdbv2_instance['seriesname'] calls. The dict index should be the show id.""" + """Handle indexer['seriesname'] calls. The dict index should be the show id.""" if isinstance(key, (integer_types, int)): # Item is integer, treat as show id if key not in self.shows: self._get_show_data(key, self.config['language']) return self.shows[key] - key = text_type(key).lower() + key = str(key).lower() self.config['searchterm'] = key selected_series = self._get_series(key) if isinstance(selected_series, dict): @@ -409,19 +446,14 @@ def __getitem__(self, key): self._set_show_data(show['id'], k, v) return selected_series - def get_last_updated_series(self, from_time, weeks=1, filter_show_list=None): - """Retrieve a list with updated shows. + def get_last_updated_series(self, *args, **kwargs): + """Retrieve a list with updated shows.""" + raise IndexerShowUpdatesNotSupported('Method get_last_updated_series not implemented by this indexer') - :param from_time: epoch timestamp, with the start date/time - :param weeks: number of weeks to get updates for. - :param filter_show_list: Optional list of show objects, to use for filtering the returned list. - """ + def get_last_updated_seasons(self, *args, **kwargs): + """Retrieve a list with updated show seasons.""" raise IndexerSeasonUpdatesNotSupported('Method get_last_updated_series not implemented by this indexer') - def get_episodes_for_season(self, show_id, *args, **kwargs): - self._get_episodes(show_id, *args, **kwargs) - return self.shows[show_id] - class ShowContainer(dict): """Simple dict that holds a series of Show instances.""" @@ -502,7 +534,7 @@ def __bool__(self): def aired_on(self, date): """Search and return a list of episodes with the airdates.""" - ret = self.search(text_type(date), 'firstaired') + ret = self.search(str(date), 'firstaired') if len(ret) == 0: raise IndexerEpisodeNotFound('Could not find any episodes that aired on {0}'.format(date)) return ret @@ -631,13 +663,13 @@ def search(self, term=None, key=None): if term is None: raise TypeError('must supply string to search for (contents)') - term = text_type(term).lower() + term = str(term).lower() for cur_key, cur_value in viewitems(self): - cur_key, cur_value = text_type(cur_key).lower(), text_type(cur_value).lower() + cur_key, cur_value = str(cur_key).lower(), str(cur_value).lower() if key is not None and cur_key != key: # Do not search this key continue - if cur_value.find(text_type(term).lower()) > -1: + if cur_value.find(str(term).lower()) > -1: return self diff --git a/medusa/indexers/config.py b/medusa/indexers/config.py index 2be2d6b05c..ee6443c888 100644 --- a/medusa/indexers/config.py +++ b/medusa/indexers/config.py @@ -6,6 +6,7 @@ from builtins import str from medusa.app import app +from medusa.indexers.imdb.api import Imdb from medusa.indexers.tmdb.api import Tmdb from medusa.indexers.tvdbv2.api import TVDBv2 from medusa.indexers.tvmaze.api import TVmaze @@ -31,13 +32,13 @@ INDEXER_TVRAGE = 2 # Must keep INDEXER_TVMAZE = 3 INDEXER_TMDB = 4 -EXTERNAL_IMDB = 10 +# FIXME: Change all references to EXTERNAL_IMDB to INDEXER_IMDB +INDEXER_IMDB = EXTERNAL_IMDB = 10 EXTERNAL_ANIDB = 11 EXTERNAL_TRAKT = 12 EXTERNAL_ANILIST = 13 EXTERNAL_MAPPINGS = { - EXTERNAL_IMDB: 'imdb_id', EXTERNAL_ANIDB: 'anidb_id', INDEXER_TVRAGE: 'tvrage_id', EXTERNAL_TRAKT: 'trakt_id', @@ -45,7 +46,7 @@ } # trakt indexer name vs Medusa indexer -TRAKT_INDEXERS = {'tvdb': INDEXER_TVDBV2, 'tmdb': INDEXER_TMDB, 'imdb': EXTERNAL_IMDB, 'trakt': EXTERNAL_TRAKT} +TRAKT_INDEXERS = {'tvdb': INDEXER_TVDBV2, 'tmdb': INDEXER_TMDB, 'imdb': INDEXER_IMDB, 'trakt': EXTERNAL_TRAKT} STATUS_MAP = { 'Continuing': [ @@ -127,6 +128,24 @@ 'show_url': 'https://www.themoviedb.org/tv/', 'mapped_to': 'tmdb_id', # The attribute to which other indexers can map there tmdb id to 'identifier': 'tmdb', # Also used as key for the custom scenename exceptions. (_get_custom_exceptions()) + }, + INDEXER_IMDB: { + 'enabled': True, + 'id': INDEXER_IMDB, + 'name': 'IMDb', + 'module': Imdb, + 'api_params': { + 'language': 'en', + 'use_zip': True, + 'session': IndexerSession(cache_control={'cache_etags': False}), + }, + 'xem_mapped_to': INDEXER_TVDBV2, + 'icon': 'imdb16.png', + 'scene_loc': '{base_url}/scene_exceptions/scene_exceptions_imdb.json'.format(base_url=app.BASE_PYMEDUSA_URL), + 'show_url': 'http://www.imdb.com/title/tt', + 'base_url': 'https://v2.sg.media-imdb.com', + 'mapped_to': 'imdb_id', # The attribute to which other indexers can map their imdb id to + 'identifier': 'imdb', # Also used as key for the custom scenename exceptions. (_get_custom_exceptions()) } } diff --git a/medusa/indexers/exceptions.py b/medusa/indexers/exceptions.py index 0620a3405f..391e4dbb61 100644 --- a/medusa/indexers/exceptions.py +++ b/medusa/indexers/exceptions.py @@ -29,6 +29,10 @@ def __init__(self, message, language): self.language = language +class IndexerShowIncomplete(IndexerException): + """Show found but incomplete in the indexer (incomplete show).""" + + class IndexerSeasonNotFound(IndexerException): """Season cannot be found in the indexer.""" @@ -41,6 +45,10 @@ class IndexerAttributeNotFound(IndexerException): """Raised if an episode does not have the requested attribute (such as a episode name).""" +class IndexerShowUpdatesNotSupported(IndexerException): + """Raised if an episode does not have the requested attribute (such as a episode name).""" + + class IndexerSeasonUpdatesNotSupported(IndexerException): """Raised if an episode does not have the requested attribute (such as a episode name).""" diff --git a/medusa/indexers/imdb/__init__.py b/medusa/indexers/imdb/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/medusa/indexers/imdb/api.py b/medusa/indexers/imdb/api.py new file mode 100644 index 0000000000..815d662c7b --- /dev/null +++ b/medusa/indexers/imdb/api.py @@ -0,0 +1,828 @@ +# coding=utf-8 +"""Imdb indexer api module.""" + +from __future__ import unicode_literals + +import locale +import logging +from collections import OrderedDict, namedtuple +from datetime import datetime +from itertools import chain +from time import time + +from imdbpie import imdbpie + +from medusa import app +from medusa.bs4_parser import BS4Parser +from medusa.indexers.base import (Actor, Actors, BaseIndexer) +from medusa.indexers.exceptions import ( + IndexerError, IndexerShowIncomplete, IndexerShowNotFound, IndexerUnavailable +) +from medusa.logger.adapters.style import BraceAdapter +from medusa.show.show import Show + +from requests.exceptions import RequestException + +from six import string_types, text_type + + +log = BraceAdapter(logging.getLogger(__name__)) +log.logger.addHandler(logging.NullHandler()) + + +class ImdbIdentifier(object): + """Imdb identifier class.""" + + def __init__(self, imdb_id): + """Initialize an identifier object. Can be used to get the full textual id e.a. 'tt3986523'. + + Or the series_id: 3986523 + """ + self._imdb_id = None + self._series_id = None + self.imdb_id = imdb_id + + def _clean(self, imdb_id): + if isinstance(imdb_id, string_types): + return imdb_id.strip('/').split('/')[-1] + + @property + def series_id(self): + """Return series id.""" + return self._series_id + + @series_id.setter + def series_id(self, value): + """Set series id.""" + self._series_id = value + + @property + def imdb_id(self): + """Return imdb id.""" + return self._imdb_id + + @imdb_id.setter + def imdb_id(self, value): + """Set imdb id.""" + if isinstance(value, string_types) and 'tt' in value: + self._imdb_id = self._clean(value) + self.series_id = int(self._imdb_id.split('tt')[-1]) + else: + self._imdb_id = 'tt{0}'.format(text_type(value).zfill(7)) + try: + self.series_id = int(value) + except (TypeError, ValueError): + self.series_id = None + + +class Imdb(BaseIndexer): + """Create easy-to-use interface to name of season/episode name. + + >>> indexer_api = imdb() + >>> indexer_api['Scrubs'][1][24]['episodename'] + u'My Last Day' + """ + + def __init__(self, *args, **kwargs): # pylint: disable=too-many-locals,too-many-arguments + """Imdb constructor.""" + super(Imdb, self).__init__(*args, **kwargs) + + self.indexer = 10 + + # Initiate the imdbpie API + self.imdb_api = imdbpie.Imdb(session=self.config['session']) + + self.config['artwork_prefix'] = '{base_url}{image_size}{file_path}' + + # An api to indexer series/episode object mapping + self.series_map = [ + ('id', 'imdb_id'), + ('id', 'base.id'), + ('seriesname', 'title'), + ('seriesname', 'base.title'), + ('summary', 'plot.outline.text'), + ('firstaired', 'year'), + ('poster', 'base.image.url'), + ('show_url', 'base.id'), + ('firstaired', 'base.seriesStartYear'), + ('rating', 'ratings.rating'), + ('votes', 'ratings.ratingCount'), + ('nextepisode', 'base.nextEpisode'), + ('lastaired', 'base.seriesEndYear'), + # Could not find contentrating in api. + ] + + self.episode_map = [ + ('id', 'id'), + ('episodename', 'title'), + ('firstaired', 'year'), + ('absolute_number', 'absolute_number'), + ] + + def _map_results(self, imdb_response, key_mappings=None, list_separator='|'): + """ + Map results to a a key_mapping dict. + + :param imdb_response: imdb response obect, or a list of response objects. + :type imdb_response: list(object) + :param key_mappings: Dict of imdb attributes, that are mapped to normalized keys. + :type key_mappings: list + :param list_separator: A list separator used to transform lists to a character separator string. + :type list_separator: string. + """ + parsed_response = [] + + if not isinstance(imdb_response, list): + imdb_response = [imdb_response] + + for item in imdb_response: + return_dict = {} + try: + title_type = item.get('type') or item.get('base', {}).get('titleType') + if title_type in ('feature', 'video game', 'TV short', 'TV movie', None): + continue + + return_dict['status'] = 'Ended' + + for key, config in self.series_map: + value = self.get_nested_value(item, config) + if not value: + continue + if key == 'id' and value: + value = ImdbIdentifier(value.rstrip('/')).series_id + if key == 'contentrating': + value = text_type(value) + if key == 'poster': + return_dict['poster_thumb'] = value.split('V1')[0] + 'V1_SY{0}_AL_.jpg'.format('640').split('/')[-1] + if key == 'nextepisode' and value: + return_dict['status'] = 'Continuing' + + return_dict[key] = value + + # Add static value for airs time. + return_dict['airs_time'] = '0:00AM' + + if return_dict.get('firstaired'): + return_dict['status'] = 'Ended' if return_dict.get('lastaired') else 'Continuing' + + except Exception as error: + log.warning('Exception trying to parse attribute: {0}, with exception: {1!r}', item, error) + + parsed_response.append(return_dict) + + return parsed_response if len(parsed_response) != 1 else parsed_response[0] + + def _show_search(self, series): + """ + Use the Imdb API to search for a show. + + :param series: The series name that's searched for as a string + :return: A list of Show objects.series_map + """ + try: + results = self.imdb_api.search_for_title(series) + except LookupError as error: + raise IndexerShowNotFound('Could not get any results searching for {series} using indexer Imdb. Cause: {cause!r}'.format( + series=series, cause=error + )) + except (AttributeError, RequestException) as error: + raise IndexerUnavailable('Could not get any results searching for {series} using indexer Imdb. Cause: {cause!r}'.format( + series=series, cause=error + )) + + if results: + return results + else: + return None + + def search(self, series): + """Search imdb.com for the series name. + + :param series: the query for the series name + :return: An ordered dict with the show searched for. In the format of OrderedDict{"series": [list of shows]} + """ + # series = series.encode('utf-8') + log.debug('Searching for show {0}', series) + mapped_results = [] + try: + if series.startswith('tt'): + show_by_id = self._get_show_by_id(series) + # The search by id result, is already mapped. We can just add it to the array with results. + mapped_results.append(show_by_id['series']) + return OrderedDict({'series': mapped_results})['series'] + results = self._show_search(series) + except IndexerShowNotFound: + results = None + + if not results: + return + + mapped_results = self._map_results(results, self.series_map, '|') + + return OrderedDict({'series': mapped_results})['series'] + + def _get_show_by_id(self, imdb_id): # pylint: disable=unused-argument + """Retrieve imdb show information by imdb id, or if no imdb id provided by passed external id. + + :param imdb_id: The shows imdb id + :return: An ordered dict with the show searched for. + """ + results = None + log.debug('Getting all show data for {0}', imdb_id) + try: + results = self.imdb_api.get_title(ImdbIdentifier(imdb_id).imdb_id) + except LookupError as error: + raise IndexerShowNotFound('Could not find show {imdb_id} using indexer Imdb. Cause: {cause!r}'.format( + imdb_id=imdb_id, cause=error + )) + except (AttributeError, RequestException) as error: + raise IndexerUnavailable('Could not find show {imdb_id} using indexer Imdb. Cause: {cause!r}'.format( + imdb_id=imdb_id, cause=error + )) + + if not results: + return + + mapped_results = self._map_results(results, self.series_map) + + if not mapped_results: + return + + try: + # Get firstaired + releases = self.imdb_api.get_title_releases(ImdbIdentifier(imdb_id).imdb_id) + except LookupError as error: + raise IndexerShowNotFound('Could not find show {imdb_id} using indexer Imdb. Cause: {cause!r}'.format( + imdb_id=imdb_id, cause=error + )) + except (AttributeError, RequestException) as error: + raise IndexerUnavailable('Could not get title releases for show {imdb_id} using indexer Imdb. Cause: {cause!r}'.format( + imdb_id=imdb_id, cause=error + )) + + if releases.get('releases'): + first_released = sorted([r['date'] for r in releases['releases']])[0] + mapped_results['firstaired'] = first_released + + try: + companies = self.imdb_api.get_title_companies(ImdbIdentifier(imdb_id).imdb_id) + # If there was a release check if it was distributed. + if companies.get('distribution'): + origins = self.imdb_api.get_title_versions(ImdbIdentifier(imdb_id).imdb_id)['origins'][0] + released_in_regions = [ + dist for dist in companies['distribution'] if dist.get('regions') and origins in dist['regions'] + ] + # Used item.get('startYear') because a startYear is not always available. + first_release = sorted(released_in_regions, key=lambda x: x.get('startYear')) + + if first_release: + mapped_results['network'] = first_release[0]['company']['name'] + except (AttributeError, LookupError, RequestException): + log.info('No company data available for {0}, cant get a network', imdb_id) + + return OrderedDict({'series': mapped_results}) + + def _get_episodes(self, imdb_id, detailed=True, aired_season=None, *args, **kwargs): # pylint: disable=unused-argument + """Get all the episodes for a show by imdb id. + + :param imdb_id: Series imdb id. + :return: An ordered dict with the show searched for. In the format of OrderedDict{"episode": [list of episodes]} + """ + # Parse episode data + log.debug('Getting all episodes of {0}', imdb_id) + + if aired_season: + aired_season = [aired_season] if not isinstance(aired_season, list) else aired_season + + series_id = imdb_id + imdb_id = ImdbIdentifier(imdb_id).imdb_id + + try: + # results = self.imdb_api.get_title_episodes(imdb_id) + results = self.imdb_api.get_title_episodes(ImdbIdentifier(imdb_id).imdb_id) + except LookupError as error: + raise IndexerShowIncomplete( + 'Show episode search exception, ' + 'could not get any episodes. Exception: {e!r}'.format( + e=error + ) + ) + except (AttributeError, RequestException) as error: + raise IndexerUnavailable('Error connecting to Imdb api. Caused by: {0!r}'.format(error)) + + if not results or not results.get('seasons'): + return False + + absolute_number_counter = 1 + for season in results.get('seasons'): + if aired_season and season.get('season') not in aired_season: + continue + + for episode in season['episodes']: + season_no, episode_no = episode.get('season'), episode.get('episode') + + if season_no is None or episode_no is None: + log.debug('{0}: Found incomplete episode with season: {1!r} and episode: {2!r})', + imdb_id, season_no, episode_no) + continue # Skip to next episode + + if season_no > 0: + episode['absolute_number'] = absolute_number_counter + absolute_number_counter += 1 + + for k, config in self.episode_map: + v = self.get_nested_value(episode, config) + if v is not None: + if k == 'id': + v = ImdbIdentifier(v).series_id + if k == 'firstaired': + v = '{year}-01-01'.format(year=v) + + self._set_item(series_id, season_no, episode_no, k, v) + + if detailed and season.get('season'): + # Enrich episode for the current season. + self._get_episodes_detailed(imdb_id, season['season']) + + # Scrape the synopsys and the episode thumbnail. + self._enrich_episodes(imdb_id, season['season']) + + # Try to calculate the airs day of week + self._calc_airs_day_of_week(imdb_id) + + def _calc_airs_day_of_week(self, imdb_id): + series_id = ImdbIdentifier(imdb_id).series_id + + if self[series_id]: + all_episodes = [] + + for season in self[series_id]: + all_episodes.extend([ + self[series_id][season][ep] + for ep in self[series_id][season] + if self[series_id][season][ep].get('firstaired') + ]) + + # Get the last (max 10 airdates) and try to calculate an airday + time. + last_airdates = sorted(all_episodes, key=lambda x: x['firstaired'], reverse=True)[:10] + weekdays = {} + for episode in last_airdates: + if episode['firstaired']: + day = self._parse_date_with_local(datetime.strptime(episode['firstaired'], '%Y-%m-%d'), '%A', 'C', method='strftime') + weekdays[day] = 1 if day not in weekdays else weekdays[day] + 1 + + airs_day_of_week = sorted(weekdays.keys(), key=lambda x: weekdays[x], reverse=True)[0] if weekdays else None + self._set_show_data(series_id, 'airs_dayofweek', airs_day_of_week) + + @staticmethod + def _parse_date_with_local(date, template, locale_format='C', method='strptime'): + lc = locale.setlocale(locale.LC_TIME) + locale.setlocale(locale.LC_ALL, locale_format) + try: + if method == 'strptime': + return datetime.strptime(date, template) + else: + return date.strftime(template) + except (AttributeError, ValueError): + raise + finally: + locale.setlocale(locale.LC_TIME, lc) + + def _get_episodes_detailed(self, imdb_id, season): + """Enrich the episodes with additional information for a specific season. + + :param imdb_id: imdb id including the `tt`. + :param season: season passed as integer. + """ + try: + results = self.imdb_api.get_title_episodes_detailed(imdb_id=ImdbIdentifier(imdb_id).imdb_id, season=season) + except (AttributeError, LookupError, RequestException) as error: + raise IndexerShowIncomplete( + 'Show episode search exception, ' + 'could not get any episodes. Exception: {e!r}'.format( + e=error + ) + ) + + if not results.get('episodes'): + return + + series_id = ImdbIdentifier(imdb_id).series_id + for episode in results.get('episodes'): + try: + if episode['releaseDate']['first']['date']: + first_aired = self._parse_date_with_local( + datetime.strptime( + episode['releaseDate']['first']['date'], '%Y-%m-%d' + ), '%Y-%m-%d', 'C', method='strftime' + ) + self._set_item(series_id, season, episode['episodeNumber'], 'firstaired', first_aired) + except ValueError: + pass + + self._set_item(series_id, season, episode['episodeNumber'], 'rating', episode['rating']) + self._set_item(series_id, season, episode['episodeNumber'], 'votes', episode['ratingCount']) + + def _enrich_episodes(self, imdb_id, season): + """Enrich the episodes with additional information for a specific season. + + For this we're making use of html scraping using beautiful soup. + :param imdb_id: imdb id including the `tt`. + :param season: season passed as integer. + """ + episodes_url = 'http://www.imdb.com/title/{imdb_id}/episodes?season={season}' + episodes = [] + + try: + response = self.config['session'].get(episodes_url.format( + imdb_id=ImdbIdentifier(imdb_id).imdb_id, season=season) + ) + if not response or not response.text: + log.warning('Problem requesting episode information for show {0}, and season {1}.', imdb_id, season) + return + + Episode = namedtuple('Episode', ['episode_number', 'season_number', 'synopsis', 'thumbnail']) + with BS4Parser(response.text, 'html5lib') as html: + for episode in html.find_all('div', class_='list_item'): + try: + episode_number = int(episode.find('meta')['content']) + except AttributeError: + pass + + try: + synopsis = episode.find('div', class_='item_description').get_text(strip=True) + if 'Know what this is about?' in synopsis: + synopsis = '' + except AttributeError: + synopsis = '' + + try: + episode_thumbnail = episode.find('img', class_='zero-z-index')['src'] + except (AttributeError, TypeError): + episode_thumbnail = None + + episodes.append(Episode(episode_number=episode_number, season_number=season, + synopsis=synopsis, thumbnail=episode_thumbnail)) + + except Exception as error: + log.exception('Error while trying to enrich imdb series {0}, {1}', ImdbIdentifier(imdb_id).imdb_id, error) + + for episode in episodes: + self._set_item(imdb_id, episode.season_number, episode.episode_number, 'overview', episode.synopsis) + self._set_item(imdb_id, episode.season_number, episode.episode_number, 'filename', episode.thumbnail) + + def _parse_images(self, imdb_id, language='en'): + """Parse Show and Season posters. + + Any key starting with an underscore has been processed (not the raw + data from the XML) + + This interface will be improved in future versions. + Available sources: amazon, custom, getty, paidcustomer, presskit, userupload. + Available types: behind_the_scenes, event, poster, product, production_art, publicity, still_frame + """ + log.debug('Getting show banners for {0}', imdb_id) + + try: + images = self.imdb_api.get_title_images(ImdbIdentifier(imdb_id).imdb_id) + except LookupError as error: + raise IndexerShowNotFound('Could not find show {imdb_id} using indexer Imdb. Cause: {cause!r}'.format( + imdb_id=imdb_id, cause=error + )) + except (AttributeError, RequestException) as error: + raise IndexerUnavailable('Could not get images for show {imdb_id} using indexer Imdb. Cause: {cause!r}'.format( + imdb_id=imdb_id, cause=error + )) + + image_mapping = {'poster': 'poster', 'production_art': 'fanart'} # Removed 'still_frame': 'fanart', + thumb_height = 640 + + _images = {} + try: + for image in images.get('images', []): + image_type = image_mapping.get(image.get('type')) + if image_type not in ('poster', 'fanart'): + continue + image_type_thumb = image_type + '_thumb' + if image_type not in _images: + _images[image_type] = {} + _images[image_type + '_thumb'] = {} + + # Store the images for each resolution available + # Always provide a resolution or 'original'. + resolution = '{0}x{1}'.format(image['width'], image['height']) + thumb_width = int((float(image['width']) / image['height']) * thumb_height) + resolution_thumb = '{0}x{1}'.format(thumb_width, thumb_height) + + if resolution not in _images[image_type]: + _images[image_type][resolution] = {} + _images[image_type_thumb][resolution_thumb] = {} + + bid = image['id'].split('/')[-1] + + if image_type in ['season', 'seasonwide']: + if int(image.sub_key) not in _images[image_type][resolution]: + _images[image_type][resolution][int(image.sub_key)] = {} + if bid not in _images[image_type][resolution][int(image.sub_key)]: + _images[image_type][resolution][int(image.sub_key)][bid] = {} + base_path = _images[image_type_thumb][resolution][int(image.sub_key)][bid] + else: + if bid not in _images[image_type][resolution]: + _images[image_type][resolution][bid] = {} + _images[image_type_thumb][resolution_thumb][bid] = {} + base_path = _images[image_type][resolution][bid] + base_path_thumb = _images[image_type_thumb][resolution_thumb][bid] + + base_path['bannertype'] = image_type + base_path['bannertype2'] = resolution + base_path['_bannerpath'] = image.get('url') + base_path['bannerpath'] = image.get('url').split('/')[-1] + base_path['languages'] = image.get('languages') + base_path['source'] = image.get('source') + base_path['id'] = bid + + base_path_thumb['bannertype'] = image_type_thumb + base_path_thumb['bannertype2'] = resolution_thumb + base_path_thumb['_bannerpath'] = image['url'].split('V1')[0] + 'V1_SY{0}_AL_.jpg'.format(thumb_height) + base_path_thumb['bannerpath'] = image['url'].split('V1')[0] + 'V1_SY{0}_AL_.jpg'.format(thumb_height).split('/')[-1] + base_path_thumb['id'] = bid + + except Exception as error: + log.warning('Could not parse Poster for show id: {0}, with exception: {1!r}', imdb_id, error) + return + + def _get_poster_thumb(thumbs): + for bid in thumbs.values(): + for image in bid.values(): + return image.get('bannerpath') + + if _images.get('poster_thumb'): + self._set_show_data(imdb_id, 'poster_thumb', _get_poster_thumb(_images.get('poster_thumb'))) + + self._save_images(imdb_id, _images, language=language) + self._set_show_data(imdb_id, '_banners', _images) + + def _save_images(self, series_id, images, language='en'): + """ + Save the highest rated images for the show. + + :param series_id: The series ID + :param images: A nested mapping of image info + images[type][res][id] = image_info_mapping + type: image type such as `banner`, `poster`, etc + res: resolution such as `1024x768`, `original`, etc + id: the image id + """ + def by_aspect_ratio(image): + w, h = image['bannertype2'].split('x') + return int(w) / int(h) + + # Parse Posters and Banners (by aspect ratio) + if images.get('poster'): + # Flatten image_type[res][id].values() into list of values + merged_images = chain.from_iterable( + resolution.values() + for resolution in images['poster'].values() + ) + + # Sort by aspect ratio + sort_images = sorted( + merged_images, + key=by_aspect_ratio + ) + + # Filter out the posters with an aspect ratio between 0.6 and 0.8 + posters = [ + image for image in sort_images if by_aspect_ratio(image) > 0.6 and by_aspect_ratio(image) < 0.8 + and image.get('languages') + and image['languages'] == [language] + ] + banners = [image for image in sort_images if by_aspect_ratio(image) > 3] + + if len(posters): + highest_rated = posters[0] + img_url = highest_rated['_bannerpath'] + log.debug( + u'Selecting poster with the lowest aspect ratio (resolution={resolution})\n' + 'aspect ratio of {aspect_ratio} ', { + 'resolution': highest_rated['bannertype2'], + 'aspect_ratio': by_aspect_ratio(highest_rated) + } + ) + self._set_show_data(series_id, 'poster', img_url) + + if len(banners): + highest_rated = banners[-1] + img_url = highest_rated['_bannerpath'] + log.debug( + u'Selecting poster with the lowest aspect ratio (resolution={resolution})\n' + 'aspect ratio of {aspect_ratio} ', { + 'resolution': highest_rated['bannertype2'], + 'aspect_ratio': by_aspect_ratio(highest_rated) + } + ) + self._set_show_data(series_id, 'banner', img_url) + + if images.get('fanart'): + # Flatten image_type[res][id].values() into list of values + merged_images = chain.from_iterable( + resolution.values() + for resolution in images['fanart'].values() + ) + + # Sort by resolution + sort_images = sorted( + merged_images, + key=by_aspect_ratio, + reverse=True, + ) + + if len(sort_images): + highest_rated = sort_images[0] + img_url = highest_rated['_bannerpath'] + log.debug( + u'Selecting poster with the lowest aspect ratio (resolution={resolution})\n' + 'aspect ratio of {aspect_ratio} ', { + 'resolution': highest_rated['bannertype2'], + 'aspect_ratio': by_aspect_ratio(highest_rated) + } + ) + self._set_show_data(series_id, 'fanart', img_url) + + def _parse_actors(self, imdb_id): + """Get and parse actors using the get_title_credits route. + + Actors are retrieved using t['show name]['_actors']. + + Any key starting with an underscore has been processed (not the raw + data from the indexer) + """ + log.debug('Getting actors for {0}', imdb_id) + + try: + actors = self.imdb_api.get_title_credits(ImdbIdentifier(imdb_id).imdb_id) + except LookupError as error: + raise IndexerShowNotFound('Could not find show {imdb_id} using indexer Imdb. Cause: {cause!r}'.format( + imdb_id=imdb_id, cause=error + )) + except (AttributeError, RequestException) as error: + raise IndexerUnavailable('Could not get actors for show {imdb_id} using indexer Imdb. Cause: {cause!r}'.format( + imdb_id=imdb_id, cause=error + )) + + if not actors.get('credits') or not actors['credits'].get('cast'): + return + + cur_actors = Actors() + for order, cur_actor in enumerate(actors['credits']['cast'][:25]): + save_actor = Actor() + save_actor['id'] = cur_actor['id'].split('/')[-2] + save_actor['image'] = cur_actor.get('image', {}).get('url', None) + save_actor['name'] = cur_actor['name'] + save_actor['role'] = cur_actor['characters'][0] if cur_actor.get('characters') else '' + save_actor['sortorder'] = order + cur_actors.append(save_actor) + self._set_show_data(imdb_id, '_actors', cur_actors) + + def _get_show_data(self, imdb_id, language='en'): # pylint: disable=too-many-branches,too-many-statements,too-many-locals + """Get show data by imdb id. + + Take a series ID, gets the epInfo URL and parses the imdb json response into the shows dict in a format: + shows[series_id][season_number][episode_number] + """ + # Parse show information + log.debug('Getting all series data for {0}', imdb_id) + + # Parse show information + series_info = self._get_show_by_id(imdb_id) + + if not series_info: + log.debug('Series result returned zero') + raise IndexerError('Series result returned zero') + + # save all retrieved show information to Show object. + for k, v in series_info['series'].items(): + if v is not None: + self._set_show_data(imdb_id, k, v) + + # Get external ids. + # As the external id's are not part of the shows default response, we need to make an additional call for it. + # Im checking for the external value. to make sure only externals with a value get in. + self._set_show_data(imdb_id, 'externals', {external_id: text_type(getattr(self.shows[imdb_id], external_id, None)) + for external_id in ['tvdb_id', 'imdb_id', 'tvrage_id'] + if getattr(self.shows[imdb_id], external_id, None)}) + + # get episode data + if self.config['episodes_enabled']: + self._get_episodes(imdb_id, aired_season=self.config['limit_seasons']) + + # Parse banners + if self.config['banners_enabled']: + self._parse_images(imdb_id, language=language) + + # Parse actors + if self.config['actors_enabled']: + self._parse_actors(imdb_id) + + return True + + @staticmethod + def _calc_update_interval(date_season_last, season_finished=True): + + minimum_interval = 2 * 24 * 3600 # 2 days + + # Season net yet finished, let's use the minimum update interval of 2 days. + if not season_finished: + return minimum_interval + + # season is finished, or show has ended. So let's calculate using the delta divided by 50. + interval = int((datetime.combine(date_season_last, datetime.min.time()) - datetime.utcfromtimestamp(0)).total_seconds() / 50) + + return max(minimum_interval, interval) + + # Public methods, usable separate from the default api's interface api['show_id'] + def get_last_updated_seasons(self, show_list=None, cache=None, *args, **kwargs): + """Return updated seasons for shows passed, using the from_time. + + :param show_list[int]: The list of shows, where seasons updates are retrieved for. + :param from_time[int]: epoch timestamp, with the start date/time + :param weeks: number of weeks to get updates for. + """ + show_season_updates = {} + + # we don't have a single api call tha we can run to check if an update is required. + # So we'll have to check what's there in the library, and decide based on the last episode's date, if a + # season update is needed. + + for series_id in show_list: + series_obj = Show.find_by_id(app.showList, self.indexer, series_id) + all_episodes_local = series_obj.get_all_episodes() + + total_updates = [] + results = None + # A small api call to get the amount of known seasons + try: + results = self.imdb_api.get_title_episodes(ImdbIdentifier(series_id).imdb_id) + except LookupError as error: + raise IndexerShowIncomplete( + 'Show episode search exception, ' + 'could not get any episodes. Exception: {error!r}'.format( + error=error + ) + ) + except (AttributeError, RequestException) as error: + raise IndexerUnavailable('Error connecting to Imdb api. Caused by: {0!r}'.format(error)) + + if not results or not results.get('seasons'): + continue + + # Get all the seasons + + # Loop through seasons + for season in results['seasons']: + season_number = season.get('season') + + # Imdb api gives back a season without the 'season' key. This season has special episodes. + # Dont know what this is, but skipping it. + if not season_number: + continue + + # Check if the season is already known in our local db. + local_season_episodes = [ep for ep in all_episodes_local if ep.season == season_number] + remote_season_episodes = season['episodes'] + if not local_season_episodes or len(remote_season_episodes) != len(local_season_episodes): + total_updates.append(season_number) + log.debug('{series}: Season {season} seems to be a new season. Adding it.', + {'series': series_obj.name, 'season': season_number}) + continue + + # Per season, get latest episode airdate + sorted_episodes = sorted(local_season_episodes, key=lambda x: x.airdate) + # date_season_start = sorted_episodes[0].airdate + date_season_last = sorted_episodes[-1].airdate + + # Get date for last updated, from the cache object. + + # Calculate update interval for the season + update_interval = self._calc_update_interval( + # date_season_start, + date_season_last, + season_finished=bool([s for s in results['seasons'] if s.get('season') == season_number + 1]) + ) + + last_update = cache.get_last_update_season(self.indexer, series_id, season_number) + if last_update < time() - update_interval: + # This season should be updated. + total_updates.append(season_number) + + # Update last_update for this season. + cache.set_last_update_season(self.indexer, series_id, season_number) + else: + log.debug( + '{series}: Season {season} seems to have been recently updated. Not scheduling a new refresh', + {'series': series_obj.name, 'season': season_number} + ) + + show_season_updates[series_id] = list(set(total_updates)) + + return show_season_updates diff --git a/medusa/indexers/imdb/exceptions.py b/medusa/indexers/imdb/exceptions.py new file mode 100644 index 0000000000..ce7494e1c5 --- /dev/null +++ b/medusa/indexers/imdb/exceptions.py @@ -0,0 +1,57 @@ +# coding=utf-8 +# Author: p0psicles +# +# This file is part of Medusa. +# +# Medusa is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# Medusa is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with Medusa. If not, see . + +"""Custom exceptions used or raised by imdb_api.""" + +__author__ = 'p0psicles' +__version__ = '1.0' + +__all__ = ['ImdbException', 'ImdbError', 'ImdbUserAbort', 'ImdbShowNotFound', 'ImdbShowIncomplete', + 'ImdbSeasonNotFound', 'ImdbEpisodeNotFound', 'ImdbAttributeNotFound'] + + +class ImdbException(Exception): + """Any exception generated by imdb_api.""" + + +class ImdbError(ImdbException): + """An error with the indexer (Cannot connect, for example).""" + + +class ImdbUserAbort(ImdbException): + """User aborted the interactive selection (via the q command, ^c etc).""" + + +class ImdbShowNotFound(ImdbException): + """Show cannot be found on the indexer (non-existant show).""" + + +class ImdbShowIncomplete(ImdbException): + """Show found but incomplete on the indexer (incomplete show).""" + + +class ImdbSeasonNotFound(ImdbException): + """Season cannot be found on indexer.""" + + +class ImdbEpisodeNotFound(ImdbException): + """Episode cannot be found on the indexer.""" + + +class ImdbAttributeNotFound(ImdbException): + """Raised if an episode does not have the requested attribute (such as a episode name).""" diff --git a/medusa/indexers/tmdb/api.py b/medusa/indexers/tmdb/api.py index 69f5aa4b45..3da02a0edc 100644 --- a/medusa/indexers/tmdb/api.py +++ b/medusa/indexers/tmdb/api.py @@ -17,6 +17,7 @@ IndexerError, IndexerException, IndexerShowNotFound, IndexerUnavailable ) +from medusa.indexers.imdb.api import ImdbIdentifier from medusa.logger.adapters.style import BraceAdapter from requests.exceptions import RequestException @@ -47,49 +48,52 @@ def __init__(self, *args, **kwargs): # pylint: disable=too-many-locals,too-many self.tmdb_configuration = self.tmdb.Configuration() try: self.response = self.tmdb_configuration.info() - except RequestException as e: + except (AttributeError, RequestException) as e: raise IndexerUnavailable('Indexer TMDB is unavailable at this time. Cause: {cause}'.format(cause=e)) self.config['artwork_prefix'] = '{base_url}{image_size}{file_path}' # An api to indexer series/episode object mapping - self.series_map = { - 'id': 'id', - 'name': 'seriesname', - 'original_name': 'aliases', - 'overview': 'overview', - 'air_date': 'firstaired', - 'first_air_date': 'firstaired', - 'backdrop_path': 'fanart', - 'url': 'show_url', - 'episode_number': 'episodenumber', - 'season_number': 'seasonnumber', - 'dvd_episode_number': 'dvd_episodenumber', - 'last_air_date': 'airs_dayofweek', - 'last_updated': 'lastupdated', - 'network_id': 'networkid', - 'vote_average': 'rating', - 'poster_path': 'poster', - 'genres': 'genre', - 'type': 'classification', - 'networks': 'network', - 'episode_run_time': 'runtime' - } - - self.episodes_map = { - 'id': 'id', - 'name': 'episodename', - 'overview': 'overview', - 'air_date': 'firstaired', - 'episode_run_time': 'runtime', - 'episode_number': 'episodenumber', - 'season_number': 'seasonnumber', - 'vote_average': 'rating', - 'still_path': 'filename' - } - - @staticmethod - def _map_results(tmdb_response, key_mappings=None, list_separator='|'): + self.series_map = [ + ('id', 'id'), + ('status', 'status'), + ('seriesname', 'name'), + ('aliasnames', 'original_name'), + ('overview', 'overview'), + ('firstaired', 'air_date'), + ('firstaired', 'first_air_date'), + ('show_url', 'url'), + ('episodenumber', 'episode_number'), + ('seasonnumber', 'season_number'), + ('dvd_episodenumber', 'dvd_episode_number'), + ('airs_dayofweek', 'last_air_date'), + ('lastupdated', 'last_updated'), + ('networkid', 'network_id'), + ('rating', 'vote_average'), + ('genre', 'genres'), + ('classification', 'type'), + ('network', 'networks[0].name'), + ('runtime', 'episode_run_time'), + ('seasons', 'seasons'), + ('poster_thumb', 'poster_path'), + ('fanart', 'backdrop_path'), + ('origin_country', 'origin_country'), + ('external_ids', 'external_ids'), + ] + + self.episodes_map = [ + ('id', 'id'), + ('episodename', 'name'), + ('overview', 'overview'), + ('firstaired', 'air_date'), + ('runtime', 'episode_run_time'), + ('episodenumber', 'episode_number'), + ('seasonnumber', 'season_number'), + ('rating', 'vote_average'), + ('filename', 'still_path'), + ] + + def _map_results(self, tmdb_response, key_mappings=None, list_separator='|'): """Map results to a a key_mapping dict. :type tmdb_response: object @@ -107,7 +111,10 @@ def week_day(input_date): for item in tmdb_response: return_dict = {} try: - for key, value in viewitems(item): + + for key, config in key_mappings: + value = self.get_nested_value(item, config) + if value is None or value == []: continue @@ -117,23 +124,16 @@ def week_day(input_date): value = list_separator.join(text_type(v) for v in value) # Process genres - if key == 'genres': + if key == 'genre': value = list_separator.join(item['name'] for item in value) - if key == 'networks': - value = value[0].get('name') if value else '' - - if key == 'last_air_date': + if key == 'airs_dayofweek': value = week_day(value) - if key == 'episode_run_time': + if key == 'runtime': # Using the longest episode runtime if there are multiple. value = max(value) if isinstance(value, list) else '' - # Try to map the key - if key in key_mappings: - key = key_mappings[key] - # Set value to key return_dict[key] = value @@ -161,12 +161,12 @@ def _show_search(self, show, request_language='en'): results = [] while page <= last: search_result = self.tmdb.Search().tv(query=show, - language=request_language, + language='request_language', page=page) last = search_result.get('total_pages', 0) results += search_result.get('results') page += 1 - except RequestException as error: + except (AttributeError, RequestException) as error: raise IndexerUnavailable('Show search failed using indexer TMDB. Cause: {cause}'.format(cause=error)) if not results: @@ -214,7 +214,7 @@ def get_show_country_codes(self, tmdb_id): :param tmdb_id: The show's tmdb id :return: A list with the show's country codes """ - show_info = self._get_show_by_id(tmdb_id)['series'] + show_info = self._get_show_by_id(tmdb_id, extra_info=['origin_country'])['series'] if show_info and show_info.get('origin_country'): return show_info['origin_country'].split('|') @@ -238,7 +238,7 @@ def _get_show_by_id(self, tmdb_id, request_language='en', extra_info=None): language='{0}'.format(request_language), append_to_response=extra_info ) - except RequestException as error: + except (AttributeError, RequestException) as error: raise IndexerUnavailable('Show info retrieval failed using indexer TMDB. Cause: {cause!r}'.format( cause=error )) @@ -276,7 +276,7 @@ def _get_episodes(self, tmdb_id, specials=False, aired_season=None): # pylint: try: season_info = self.tmdb.TV_Seasons(tmdb_id, season).info(language=self.config['language']) results += season_info['episodes'] - except RequestException as error: + except (AttributeError, RequestException) as error: raise IndexerException( 'Could not get episodes for series {series} using indexer TMDB. Cause: {cause}'.format( series=tmdb_id, cause=error @@ -347,7 +347,15 @@ def _parse_images(self, tmdb_id): This interface will be improved in future versions. """ - key_mapping = {'file_path': 'bannerpath', 'vote_count': 'ratingcount', 'vote_average': 'rating', 'id': 'id'} + key_mapping = [ + ('bannerpath', 'file_path'), + ('ratingcount', 'vote_count'), + ('rating', 'vote_average'), + ('id', 'id'), + ('width', 'width'), + ('height', 'height'), + ('aspect_ratio', 'aspect_ratio'), + ] image_sizes = {'fanart': 'backdrop_sizes', 'poster': 'poster_sizes'} typecasts = {'rating': float, 'ratingcount': int} @@ -359,7 +367,7 @@ def _parse_images(self, tmdb_id): try: images = self.tmdb.TV(tmdb_id).images(params=params) - except RequestException as error: + except (AttributeError, RequestException) as error: raise IndexerUnavailable('Error trying to get images. Cause: {cause}'.format(cause=error)) bid = images['id'] @@ -448,7 +456,7 @@ def _parse_actors(self, tmdb_id): # TMDB also support passing language here as a param. try: credits = self.tmdb.TV(tmdb_id).credits(language=self.config['language']) # pylint: disable=W0622 - except RequestException as error: + except (AttributeError, RequestException) as error: raise IndexerException('Could not get actors. Cause: {cause}'.format(cause=error)) if not credits or not credits.get('cast'): @@ -525,11 +533,13 @@ def _get_show_data(self, tmdb_id, language='en'): # Get external ids. external_ids = series_info['series'].get('external_ids', {}) + if 'imdb_id' in external_ids: + external_ids['imdb_id'] = ImdbIdentifier(external_ids['imdb_id']).series_id self._set_show_data(tmdb_id, 'externals', external_ids) # get episode data if self.config['episodes_enabled']: - self._get_episodes(tmdb_id, specials=False, aired_season=None) + self._get_episodes(tmdb_id, specials=False, aired_season=self.config['limit_seasons']) # Parse banners if self.config['banners_enabled']: @@ -550,7 +560,6 @@ def _get_series_season_updates(self, tmdb_id, start_date=None, end_date=None): results = [] page = 1 total_pages = 1 - try: while page <= total_pages: # Requesting for the changes on a specific showid, will result in json with changes per season. @@ -561,7 +570,7 @@ def _get_series_season_updates(self, tmdb_id, start_date=None, end_date=None): results += [season['value']['season_number']] total_pages = updates.get('total_pages', 0) page += 1 - except RequestException as error: + except (AttributeError, RequestException) as error: raise IndexerException('Could not get latest series season updates for series {series}. Cause: {cause}'.format( series=tmdb_id, cause=error )) @@ -582,7 +591,7 @@ def _get_all_updates(self, start_date=None, end_date=None): results += [_.get('id') for _ in updates.get('results')] total_pages = updates.get('total_pages') page += 1 - except RequestException as error: + except (AttributeError, RequestException) as error: raise IndexerException('Could not get latest updates. Cause: {cause}'.format( cause=error )) @@ -621,7 +630,7 @@ def get_last_updated_series(self, from_time, weeks=1, filter_show_list=None): return list(total_updates) # Public methods, usable separate from the default api's interface api['show_id'] - def get_last_updated_seasons(self, show_list, from_time, weeks=1): + def get_last_updated_seasons(self, show_list, from_time=None, weeks=1, *args, **kwargs): """Retrieve a list with updated shows. :param show_list: The list of shows, where seasons updates are retrieved for. @@ -657,19 +666,26 @@ def get_id_by_external(self, **kwargs): :returns: A dict with externals, including the tvmaze id. """ try: - wanted_externals = ['tvdb_id', 'imdb_id', 'tvrage_id', 'imdb_id'] + wanted_externals = ['tvdb_id', 'imdb_id', 'tvrage_id'] for external_id in wanted_externals: if kwargs.get(external_id): - result = self.tmdb.Find(kwargs.get(external_id)).info(**{'external_source': external_id}) + external_id_value = kwargs.get(external_id) + if external_id == 'imdb_id': + external_id_value = ImdbIdentifier(external_id_value).imdb_id + result = self.tmdb.Find(external_id_value).info(**{'external_source': external_id}) if result.get('tv_results') and result['tv_results'][0]: # Get the external id's for the passed shows id. externals = self.tmdb.TV(result['tv_results'][0]['id']).external_ids() - externals = {tmdb_external_id: external_value - for tmdb_external_id, external_value - in viewitems(externals) - if external_value and tmdb_external_id in wanted_externals} + externals = { + tmdb_external_id: external_value + for tmdb_external_id, external_value + in viewitems(externals) + if external_value and tmdb_external_id in wanted_externals + } externals['tmdb_id'] = result['tv_results'][0]['id'] + if 'imdb_id' in externals: + externals['imdb_id'] = ImdbIdentifier(externals['imdb_id']).series_id return externals return {} - except RequestException as error: + except (AttributeError, RequestException) as error: raise IndexerException("Could not get external id's. Cause: {cause}".format(cause=error)) diff --git a/medusa/indexers/tmdb/exceptions.py b/medusa/indexers/tmdb/exceptions.py index b17df3d1cb..b30b130e95 100644 --- a/medusa/indexers/tmdb/exceptions.py +++ b/medusa/indexers/tmdb/exceptions.py @@ -16,61 +16,43 @@ # You should have received a copy of the GNU General Public License # along with Medusa. If not, see . -"""Custom exceptions used or raised by tvdbv2_api.""" +"""Custom exceptions used or raised by tmdb api.""" from __future__ import unicode_literals __author__ = 'p0psicles' __version__ = '1.0' -__all__ = ['tvdbv2_error', 'tvdbv2_userabort', 'tvdbv2_shownotfound', 'tvdbv2_showincomplete', - 'tvdbv2_seasonnotfound', 'tvdbv2_episodenotfound', 'tvdbv2_attributenotfound'] +__all__ = ['TmdbException', 'TmdbError', 'TmdbUserAbort', 'TmdbShowNotFound', 'TmdbShowIncomplete', + 'TmdbSeasonNotFound', 'TmdbEpisodeNotFound', 'TmdbAttributeNotFound'] -class tvdbv2_exception(Exception): - """Any exception generated by tvdbv2_api - """ - pass +class TmdbException(Exception): + """Any exception generated by tvdbv2_api.""" -class tvdbv2_error(tvdbv2_exception): - """An error with thetvdb.com (Cannot connect, for example) - """ - pass +class TmdbError(TmdbException): + """An error with thetvdb.com (Cannot connect, for example).""" -class tvdbv2_userabort(tvdbv2_exception): - """User aborted the interactive selection (via - the q command, ^c etc) - """ - pass +class TmdbUserAbort(TmdbException): + """User aborted the interactive selection (via the q command, ^c etc).""" -class tvdbv2_shownotfound(tvdbv2_exception): - """Show cannot be found on thetvdb.com (non-existant show) - """ - pass +class TmdbShowNotFound(TmdbException): + """Show cannot be found on thetvdb.com (non-existant show).""" -class tvdbv2_showincomplete(tvdbv2_exception): - """Show found but incomplete on thetvdb.com (incomplete show) - """ - pass +class TmdbShowIncomplete(TmdbException): + """Show found but incomplete on thetvdb.com (incomplete show).""" -class tvdbv2_seasonnotfound(tvdbv2_exception): - """Season cannot be found on thetvdb.com - """ - pass +class TmdbSeasonNotFound(TmdbException): + """Season cannot be found on thetvdb.com.""" -class tvdbv2_episodenotfound(tvdbv2_exception): - """Episode cannot be found on thetvdb.com - """ - pass +class TmdbEpisodeNotFound(TmdbException): + """Episode cannot be found on thetvdb.com.""" -class tvdbv2_attributenotfound(tvdbv2_exception): - """Raised if an episode does not have the requested - attribute (such as a episode name) - """ - pass +class TmdbAttributeNotFound(TmdbException): + """Raised if an episode does not have the requested attribute (such as a episode name).""" diff --git a/medusa/indexers/tvdbv2/api.py b/medusa/indexers/tvdbv2/api.py index 043fc2e72c..80f5b1d152 100644 --- a/medusa/indexers/tvdbv2/api.py +++ b/medusa/indexers/tvdbv2/api.py @@ -11,16 +11,17 @@ from medusa.helper.metadata import needs_metadata from medusa.indexers.base import (Actor, Actors, BaseIndexer) from medusa.indexers.exceptions import ( - IndexerAuthFailed, IndexerError, IndexerShowNotFound, - IndexerShowNotFoundInLanguage, IndexerUnavailable + IndexerAuthFailed, IndexerError, + IndexerShowNotFound, IndexerShowNotFoundInLanguage, IndexerUnavailable ) +from medusa.indexers.imdb.api import ImdbIdentifier from medusa.indexers.tvdbv2.fallback import PlexFallback from medusa.logger.adapters.style import BraceAdapter from medusa.show.show import Show from requests.compat import urljoin -from six import string_types, text_type, viewitems +from six import string_types, viewitems from tvdbapiv2 import ApiClient, EpisodesApi, SearchApi, SeriesApi, UpdatesApi from tvdbapiv2.exceptions import ApiException @@ -128,8 +129,8 @@ def _map_results(self, tvdb_response, key_mapping=None, list_separator='|'): def _show_search(self, show, request_language='en'): """Use the pytvdbv2 API to search for a show. - @param show: The show name that's searched for as a string - @return: A list of Show objects. + :param show: The show name that's searched for as a string + :return: A list of Show objects. """ try: results = self.config['session'].search_api.search_series_get(name=show, accept_language=request_language) @@ -148,6 +149,7 @@ def _show_search(self, show, request_language='en'): return results + # Tvdb implementation @PlexFallback def search(self, series): """Search tvdbv2.com for the series name. @@ -340,7 +342,7 @@ def _parse_episodes(self, tvdb_id, episode_data): if self.config['dvdorder'] and not flag_dvd_numbering: log.warning( 'No DVD order available for episode (season: {0}, episode: {1}). Skipping this episode. ' - 'If you want to have this episode visible, please change it on the TheTVDB site, ' + 'If you want to have this episode visible, please change it on the TheTvdb site, ' 'or consider disabling DVD order for the show: {2}({3})', dvd_seas_no or seas_no, dvd_ep_no or ep_no, self.shows[tvdb_id]['seriesname'], tvdb_id @@ -378,12 +380,10 @@ def _parse_episodes(self, tvdb_id, episode_data): self._set_item(tvdb_id, seas_no, ep_no, k, v) @PlexFallback - def _parse_images(self, sid): - """Parse images XML. + def _parse_images(self, tvdb_id): + """Fetch and parse images from api. - From http://thetvdb.com/api/[APIKEY]/series/[SERIES ID]/banners.xml images are retrieved using t['show name]['_banners'], for example: - >>> indexer_api = Tvdb(images = True) >>> indexer_api['scrubs']['_banners'].keys() ['fanart', 'poster', 'series', 'season', 'seasonwide'] @@ -394,10 +394,8 @@ def _parse_images(self, sid): >>> t['scrubs']['_banners']['seasonwide'][4]['680x1000']['35308']['_bannerpath'] u'http://thetvdb.com/banners/posters/76156-4-2.jpg' >>> - Any key starting with an underscore has been processed (not the raw data from the XML) - This interface will be improved in future versions. """ key_mapping = {'file_name': 'bannerpath', 'language_id': 'language', 'key_type': 'bannertype', @@ -406,16 +404,16 @@ def _parse_images(self, sid): search_for_image_type = self.config['image_type'] - log.debug('Getting show banners for {0}', sid) + log.debug('Getting show banners for {0}', tvdb_id) _images = {} # Let's get the different types of images available for this series try: series_images_count = self.config['session'].series_api.series_id_images_get( - sid, accept_language=self.config['language'] + tvdb_id, accept_language=self.config['language'] ) except ApiException as error: - log.info('Could not get image count for show ID: {0} with reason: {1}', sid, error.reason) + log.info('Could not get image count for show ID: {0} with reason: {1}', tvdb_id, error.reason) return for image_type, image_count in viewitems(self._map_results(series_images_count)): @@ -429,12 +427,12 @@ def _parse_images(self, sid): try: images = self.config['session'].series_api.series_id_images_query_get( - sid, key_type=image_type, accept_language=self.config['language'] + tvdb_id, key_type=image_type, accept_language=self.config['language'] ) except ApiException as error: log.debug( - 'Could not parse {image} for show ID: {sid}, with exception: {reason}', - {'image': image_type, 'sid': sid, 'reason': error.reason} + 'Could not parse {image} for show ID: {tvdb_id}, with exception: {reason}', + {'image': image_type, 'tvdb_id': tvdb_id, 'reason': error.reason} ) continue @@ -476,16 +474,14 @@ def _parse_images(self, sid): base_path[k] = v - self._save_images(sid, _images) - self._set_show_data(sid, '_banners', _images) + self._save_images(tvdb_id, _images) + self._set_show_data(tvdb_id, '_banners', _images) @PlexFallback - def _parse_actors(self, sid): - """Parser actors XML. + def _parse_actors(self, tvdb_id): + """Fetch and parse actors. - From http://thetvdb.com/api/[APIKEY]/series/[SERIES ID]/actors.xml Actors are retrieved using t['show name]['_actors'], for example: - >>> indexer_api = Tvdb(actors = True) >>> actors = indexer_api['scrubs']['_actors'] >>> type(actors) @@ -500,16 +496,15 @@ def _parse_actors(self, sid): u'Zach Braff' >>> actors[0]['image'] u'http://thetvdb.com/banners/actors/43640.jpg' - Any key starting with an underscore has been processed (not the raw data from the XML) """ - log.debug('Getting actors for {0}', sid) + log.debug('Getting actors for {0}', tvdb_id) try: - actors = self.config['session'].series_api.series_id_actors_get(sid) + actors = self.config['session'].series_api.series_id_actors_get(tvdb_id) except ApiException as error: - log.info('Could not get actors for show ID: {0} with reason: {1}', sid, error.reason) + log.info('Could not get actors for show ID: {0} with reason: {1}', tvdb_id, error.reason) return if not actors or not actors.data: @@ -525,10 +520,10 @@ def _parse_actors(self, sid): new_actor['role'] = cur_actor.role new_actor['sortorder'] = 0 cur_actors.append(new_actor) - self._set_show_data(sid, '_actors', cur_actors) + self._set_show_data(tvdb_id, '_actors', cur_actors) - def _get_show_data(self, sid, language): - """Parse TheTVDB json response. + def _get_show_data(self, tvdb_id, language): + """Get the show data using tvdb id.. Takes a series ID, gets the epInfo URL and parses the TheTVDB json response into the shows dict in layout: @@ -547,10 +542,10 @@ def _get_show_data(self, sid, language): get_show_in_language = self.config['language'] # Parse show information - log.debug('Getting all series data for {0}', sid) + log.debug('Getting all series data for {0}', tvdb_id) # Parse show information - series_info = self._get_show_by_id(sid, request_language=get_show_in_language) + series_info = self._get_show_by_id(tvdb_id, request_language=get_show_in_language) if not series_info: log.debug('Series result returned zero') @@ -561,22 +556,22 @@ def _get_show_data(self, sid, language): if v is not None: if v and k in ['banner', 'fanart', 'poster']: v = self.config['artwork_prefix'].format(image=v) - self._set_show_data(sid, k, v) + self._set_show_data(tvdb_id, k, v) # Create the externals structure - self._set_show_data(sid, 'externals', {'imdb_id': text_type(getattr(self[sid], 'imdb_id', ''))}) + self._set_show_data(tvdb_id, 'externals', {'imdb_id': ImdbIdentifier(getattr(self[tvdb_id], 'imdb_id', None)).series_id}) # get episode data if self.config['episodes_enabled']: - self._get_episodes(sid, specials=False, aired_season=None) + self._get_episodes(tvdb_id, specials=False, aired_season=self.config['limit_seasons']) # Parse banners if self.config['banners_enabled']: - self._parse_images(sid) + self._parse_images(tvdb_id) # Parse actors if self.config['actors_enabled']: - self._parse_actors(sid) + self._parse_actors(tvdb_id) return True @@ -623,7 +618,7 @@ def get_last_updated_series(self, from_time, weeks=1, filter_show_list=None): return total_updates # Public methods, usable separate from the default api's interface api['show_id'] - def get_last_updated_seasons(self, show_list, from_time, weeks=1): + def get_last_updated_seasons(self, show_list, from_time=None, weeks=1, *args, **kwargs): """Return updated seasons for shows passed, using the from_time. :param show_list[int]: The list of shows, where seasons updates are retrieved for. diff --git a/medusa/indexers/tvdbv2/exceptions.py b/medusa/indexers/tvdbv2/exceptions.py index 29d6531738..a43ffe8a3e 100644 --- a/medusa/indexers/tvdbv2/exceptions.py +++ b/medusa/indexers/tvdbv2/exceptions.py @@ -22,37 +22,37 @@ __author__ = 'p0psicles' __version__ = '1.0' -__all__ = ['Tvdb2Error', 'Tvdb2UserAbort', 'Tvdb2ShowNotFound', 'Tvdb2ShowIncomplete', - 'Tvdb2SeasonNotFound', 'Tvdb2EpisodeNotFound', 'Tvdb2AttributeNotFound'] +__all__ = ['TvdbError', 'TvdbUserAbort', 'TvdbShowNotFound', 'TvdbShowIncomplete', + 'TvdbSeasonNotFound', 'TvdbEpisodeNotFound', 'TvdbAttributeNotFound'] -class Tvdb2Exception(Exception): - """Any exception generated by Tvdb2Api.""" +class TvdbException(Exception): + """Any exception generated by TvdbApi.""" -class Tvdb2Error(Tvdb2Exception): +class TvdbError(TvdbException): """An error with thetvdb.com (Cannot connect, for example).""" -class Tvdb2UserAbort(Tvdb2Exception): +class TvdbUserAbort(TvdbException): """User aborted the interactive selection (via the q command, ^c etc).""" -class Tvdb2ShowNotFound(Tvdb2Exception): +class TvdbShowNotFound(TvdbException): """Show cannot be found on thetvdb.com (non-existant show).""" -class Tvdb2ShowIncomplete(Tvdb2Exception): +class TvdbShowIncomplete(TvdbException): """Show found but incomplete on thetvdb.com (incomplete show).""" -class Tvdb2SeasonNotFound(Tvdb2Exception): +class TvdbSeasonNotFound(TvdbException): """Season cannot be found on thetvdb.com.""" -class Tvdb2EpisodeNotFound(Tvdb2Exception): +class TvdbEpisodeNotFound(TvdbException): """Episode cannot be found on thetvdb.com.""" -class Tvdb2AttributeNotFound(Tvdb2Exception): +class TvdbAttributeNotFound(TvdbException): """Raised if an episode does not have the requested attribute (such as a episode name).""" diff --git a/medusa/indexers/tvmaze/api.py b/medusa/indexers/tvmaze/api.py index 86e65437b8..7277cff5af 100644 --- a/medusa/indexers/tvmaze/api.py +++ b/medusa/indexers/tvmaze/api.py @@ -1,4 +1,5 @@ # coding=utf-8 +"""Tvmaze indexer api module.""" from __future__ import unicode_literals @@ -11,8 +12,9 @@ IndexerError, IndexerException, IndexerShowNotFound, - IndexerUnavailable + IndexerUnavailable, ) +from medusa.indexers.imdb.api import ImdbIdentifier from medusa.logger.adapters.style import BraceAdapter from pytvmaze import TVMaze @@ -33,6 +35,7 @@ class TVmaze(BaseIndexer): """ def __init__(self, *args, **kwargs): # pylint: disable=too-many-locals,too-many-arguments + """Tvmaze api constructor.""" super(TVmaze, self).__init__(*args, **kwargs) # List of language from http://thetvmaze.com/api/0629B785CE550C8D/languages.xml @@ -55,22 +58,32 @@ def __init__(self, *args, **kwargs): # pylint: disable=too-many-locals,too-many self.config['artwork_prefix'] = '{base_url}{image_size}{file_path}' # An api to indexer series/episode object mapping - self.series_map = { - 'id': 'id', - 'maze_id': 'id', - 'name': 'seriesname', - 'summary': 'overview', - 'premiered': 'firstaired', - 'image': 'fanart', - 'url': 'show_url', - 'genres': 'genre', - 'epnum': 'absolute_number', - 'title': 'episodename', - 'airdate': 'firstaired', - 'screencap': 'filename', - 'episode_number': 'episodenumber', - 'season_number': 'seasonnumber', - } + self.series_map = [ + ('id', 'id'), + ('id', 'maze_id'), + ('rating', 'rating.average'), + ('status', 'status'), + ('seriesname', 'name'), + ('overview', 'summary'), + ('firstaired', 'premiered'), + ('poster_thumb', 'image'), + ('show_url', 'url'), + ('genre', 'genres'), + ('absolute_number', 'epnum'), + ('episodename', 'title'), + ('firstaired', 'airdate'), + ('filename', 'screencap'), + ('episodenumber', 'episode_number'), + ('seasonnumber', 'season_number'), + ('airs_time', 'schedule.time'), + ('airs_dayofweek', 'schedule.days[0]'), + ('network', 'network.name'), + ('code', 'network.code'), + ('timezone', 'network.timezone'), + ('tvrage_id', 'externals.tvrage'), + ('tvdb_id', 'externals.thetvdb'), + ('imdb_id', 'externals.imdb'), + ] def _map_results(self, tvmaze_response, key_mappings=None, list_separator='|'): """ @@ -96,45 +109,20 @@ def _map_results(self, tvmaze_response, key_mappings=None, list_separator='|'): for item in tvmaze_response: return_dict = {} try: - for key, value in viewitems(item.__dict__): - if value is None or value == []: + + for key, config in self.series_map: + value = self.get_nested_value(item.__dict__, config) + if not value: continue - # These keys have more complex dictionaries, let's map these manually - if key in ['schedule', 'network', 'image', 'externals', 'rating']: - if key == 'schedule': - return_dict['airs_time'] = value.get('time') or '0:00AM' - return_dict['airs_dayofweek'] = value.get('days')[0] if value.get('days') else None - if key == 'network': - return_dict['network'] = value.name - return_dict['code'] = value.code - return_dict['timezone'] = value.timezone - if key == 'image': - if value.get('medium'): - return_dict['image_medium'] = value.get('medium') - return_dict['image_original'] = value.get('original') - return_dict['poster'] = value.get('medium') - if key == 'externals': - return_dict['tvrage_id'] = value.get('tvrage') - return_dict['tvdb_id'] = value.get('thetvdb') - return_dict['imdb_id'] = value.get('imdb') - if key == 'rating': - return_dict['rating'] = value.get('average') \ - if isinstance(value, dict) else value - else: - # Do some value sanitizing - if isinstance(value, list): - if all(isinstance(x, (string_types, integer_types)) for x in value): - value = list_separator.join(text_type(v) for v in value) - - # Try to map the key - if key in key_mappings: - key = key_mappings[key] - - # Set value to key - return_dict[key] = text_type(value) if isinstance(value, (float, integer_types)) else value - - # For episodes + # Do some value sanitizing + if isinstance(value, list): + if all(isinstance(x, (string_types, integer_types)) for x in value): + value = list_separator.join(text_type(v) for v in value) + + return_dict[key] = value + + # For special episodes if hasattr(item, 'season_number') and getattr(item, 'episode_number') is None: return_dict['episodenumber'] = text_type(index_special_episodes) return_dict['seasonnumber'] = 0 @@ -233,7 +221,7 @@ def _get_show_by_id(self, tvmaze_id, request_language='en'): # pylint: disable= mapped_results = self._map_results(results, self.series_map) return OrderedDict({'series': mapped_results}) - def _get_episodes(self, tvmaze_id, specials=False, aired_season=None): # pylint: disable=unused-argument + def _get_episodes(self, tvmaze_id, specials=False, *args, **kwargs): # pylint: disable=unused-argument """ Get all the episodes for a show by tvmaze id. @@ -300,17 +288,14 @@ def _parse_images(self, tvmaze_id): """Parse Show and Season posters. images are retrieved using t['show name]['_banners'], for example: - >>> indexer_api = TVMaze(images = True) >>> indexer_api['scrubs']['_banners'].keys() ['fanart', 'poster', 'series', 'season'] >>> t['scrubs']['_banners']['poster']['680x1000']['35308']['_bannerpath'] u'http://thetvmaze.com/banners/posters/76156-2.jpg' >>> - Any key starting with an underscore has been processed (not the raw data from the XML) - This interface will be improved in future versions. """ log.debug('Getting show banners for {0}', tvmaze_id) @@ -397,11 +382,9 @@ def _parse_season_images(self, tvmaze_id): return _images def _parse_actors(self, tvmaze_id): - """Parsers actors XML, from - http://thetvmaze.com/api/[APIKEY]/series/[SERIES ID]/actors.xml + """Parsers actors XML, from http://thetvmaze.com/api/[APIKEY]/series/[SERIES ID]/actors.xml. Actors are retrieved using t['show name]['_actors'], for example: - >>> indexer_api = TVMaze(actors = True) >>> actors = indexer_api['scrubs']['_actors'] >>> type(actors) @@ -426,8 +409,8 @@ def _parse_actors(self, tvmaze_id): except CastNotFound: log.debug('Actors result returned zero') return - except (AttributeError, BaseError) as e: - log.warning('Getting actors failed. Cause: {0}', e) + except (AttributeError, BaseError) as error: + log.warning('Getting actors failed. Cause: {0}', error) return cur_actors = Actors() @@ -484,7 +467,7 @@ def _get_show_data(self, tvmaze_id, language='en'): # get episode data if self.config['episodes_enabled']: - self._get_episodes(tvmaze_id, specials=False, aired_season=None) + self._get_episodes(tvmaze_id, specials=False) # Parse banners if self.config['banners_enabled']: @@ -503,10 +486,10 @@ def _get_all_updates(self, start_date=None, end_date=None): updates = self.tvmaze_api.show_updates() except (ShowIndexError, UpdateNotFound): return results - except (AttributeError, BaseError) as e: + except (AttributeError, BaseError) as error: # Tvmaze api depends on .status_code in.., but does not catch request exceptions. # Therefor the AttributeError. - log.warning('Getting show updates failed. Cause: {0}', e) + log.warning('Getting show updates failed. Cause: {0}', error) return results if getattr(updates, 'updates', None): @@ -517,7 +500,7 @@ def _get_all_updates(self, start_date=None, end_date=None): return results # Public methods, usable separate from the default api's interface api['show_id'] - def get_last_updated_series(self, from_time, weeks=1, filter_show_list=None): + def get_last_updated_series(self, from_time, weeks=1, filter_show_list=None, *args, **kwargs): """Retrieve a list with updated shows. :param from_time: epoch timestamp, with the start date/time @@ -549,19 +532,24 @@ def get_id_by_external(self, **kwargs): for external_id in itervalues(mapping): if kwargs.get(external_id): try: - result = self.tvmaze_api.get_show(**{external_id: kwargs.get(external_id)}) + external_id_value = kwargs.get(external_id) + if external_id == 'imdb_id': + external_id_value = ImdbIdentifier(external_id_value).imdb_id + result = self.tvmaze_api.get_show(**{external_id: external_id_value}) if result: externals = {mapping[tvmaze_external_id]: external_value for tvmaze_external_id, external_value in viewitems(result.externals) if external_value and mapping.get(tvmaze_external_id)} externals['tvmaze_id'] = result.maze_id + if 'imdb_id' in externals: + externals['imdb_id'] = ImdbIdentifier(externals['imdb_id']).series_id return externals except ShowNotFound: log.debug('Could not get tvmaze externals using external key {0} and id {1}', external_id, kwargs.get(external_id)) continue - except (AttributeError, BaseError) as e: - log.warning('Could not get tvmaze externals. Cause: {0}', e) + except (AttributeError, BaseError) as error: + log.warning('Could not get tvmaze externals. Cause: {0}', error) continue return {} diff --git a/medusa/indexers/tvmaze/exceptions.py b/medusa/indexers/tvmaze/exceptions.py index b17df3d1cb..f0fa0c16bc 100644 --- a/medusa/indexers/tvmaze/exceptions.py +++ b/medusa/indexers/tvmaze/exceptions.py @@ -16,61 +16,43 @@ # You should have received a copy of the GNU General Public License # along with Medusa. If not, see . -"""Custom exceptions used or raised by tvdbv2_api.""" +"""Custom exceptions used or raised by tvmaze api.""" from __future__ import unicode_literals __author__ = 'p0psicles' __version__ = '1.0' -__all__ = ['tvdbv2_error', 'tvdbv2_userabort', 'tvdbv2_shownotfound', 'tvdbv2_showincomplete', - 'tvdbv2_seasonnotfound', 'tvdbv2_episodenotfound', 'tvdbv2_attributenotfound'] +__all__ = ['TvmazeException', 'TvmazeError', 'TvmazeUserAbort', 'TvmazeShowNotFound', 'TvmazeShowIncomplete', + 'TvmazeSeasonNotFound', 'TvmazeEpisodeNotFound', 'TvmazeAttributeNotFound'] -class tvdbv2_exception(Exception): - """Any exception generated by tvdbv2_api - """ - pass +class TvmazeException(Exception): + """Any exception generated by tvmaze_api.""" -class tvdbv2_error(tvdbv2_exception): - """An error with thetvdb.com (Cannot connect, for example) - """ - pass +class TvmazeError(TvmazeException): + """An error with thetvdb.com (Cannot connect, for example).""" -class tvdbv2_userabort(tvdbv2_exception): - """User aborted the interactive selection (via - the q command, ^c etc) - """ - pass +class TvmazeUserAbort(TvmazeException): + """User aborted the interactive selection (via the q command, ^c etc).""" -class tvdbv2_shownotfound(tvdbv2_exception): - """Show cannot be found on thetvdb.com (non-existant show) - """ - pass +class TvmazeShowNotFound(TvmazeException): + """Show cannot be found on thetvdb.com (non-existant show).""" -class tvdbv2_showincomplete(tvdbv2_exception): - """Show found but incomplete on thetvdb.com (incomplete show) - """ - pass +class TvmazeShowIncomplete(TvmazeException): + """Show found but incomplete on thetvdb.com (incomplete show).""" -class tvdbv2_seasonnotfound(tvdbv2_exception): - """Season cannot be found on thetvdb.com - """ - pass +class TvmazeSeasonNotFound(TvmazeException): + """Season cannot be found on thetvdb.com.""" -class tvdbv2_episodenotfound(tvdbv2_exception): - """Episode cannot be found on thetvdb.com - """ - pass +class TvmazeEpisodeNotFound(TvmazeException): + """Episode cannot be found on thetvdb.com.""" -class tvdbv2_attributenotfound(tvdbv2_exception): - """Raised if an episode does not have the requested - attribute (such as a episode name) - """ - pass +class TvmazeAttributeNotFound(TvmazeException): + """Raised if an episode does not have the requested attribute (such as a episode name).""" diff --git a/medusa/indexers/ui.py b/medusa/indexers/ui.py index dd32b41a57..494baf3771 100644 --- a/medusa/indexers/ui.py +++ b/medusa/indexers/ui.py @@ -78,7 +78,7 @@ def select_series(self, all_series): while True: # return breaks this loop try: print("Enter choice (first number, return for default, 'all', ? for help):") - ans = eval(input()) + ans = input() except KeyboardInterrupt: raise IndexerUserAbort('User aborted (^c keyboard interupt)') except EOFError: diff --git a/medusa/metadata/generic.py b/medusa/metadata/generic.py index f98b160cee..27fb1dd28a 100644 --- a/medusa/metadata/generic.py +++ b/medusa/metadata/generic.py @@ -17,6 +17,7 @@ from medusa.indexers.config import INDEXER_TMDB, INDEXER_TVDBV2, INDEXER_TVMAZE from medusa.indexers.exceptions import (IndexerEpisodeNotFound, IndexerException, IndexerSeasonNotFound, IndexerShowNotFound) +from medusa.indexers.imdb.api import ImdbIdentifier from medusa.indexers.utils import indexer_name_mapping from medusa.logger.adapters.style import BraceAdapter @@ -984,7 +985,7 @@ def retrieveShowMetadata(self, folder): if uniqueid is not None and uniqueid.get('type') and indexer_name_mapping.get(uniqueid.get('type')): indexer = indexer_name_mapping.get(uniqueid.get('type')) - indexer_id = int(uniqueid.text) + indexer_id = int(ImdbIdentifier(uniqueid.text).series_id) else: # For legacy nfo's if showXML.findtext('tvdbid'): diff --git a/medusa/metadata/kodi_12plus.py b/medusa/metadata/kodi_12plus.py index 5dd4174c06..b6d5cf112e 100644 --- a/medusa/metadata/kodi_12plus.py +++ b/medusa/metadata/kodi_12plus.py @@ -15,6 +15,7 @@ from medusa.indexers.api import indexerApi from medusa.indexers.config import INDEXER_TVDBV2 from medusa.indexers.exceptions import IndexerEpisodeNotFound, IndexerSeasonNotFound +from medusa.indexers.imdb.api import ImdbIdentifier from medusa.indexers.tvdbv2.api import API_BASE_TVDB from medusa.logger.adapters.style import BraceAdapter from medusa.metadata import generic @@ -122,11 +123,11 @@ def _show_data(self, series_obj): rating.set('max', '10') rating.set('default', 'true') value = etree.SubElement(rating, 'value') - value.text = text_type(str(my_show['rating'])) + value.text = str(my_show['rating']) votes = etree.SubElement(rating, 'votes') - votes.text = '' + votes.text = str(my_show.get('votes', '')) - if series_obj.imdb_id and series_obj.imdb_rating and series_obj.imdb_votes: + if series_obj.identifier.indexer.slug != 'imdb' and series_obj.imdb_id and series_obj.imdb_rating and series_obj.imdb_votes: rating_imdb = etree.SubElement(ratings, 'rating') rating_imdb.set('name', 'imdb') rating_imdb.set('max', '10') @@ -165,7 +166,10 @@ def _show_data(self, series_obj): uniqueid = etree.SubElement(tv_node, 'uniqueid') uniqueid.set('default', 'true') uniqueid.set('type', series_obj.identifier.indexer.slug) - uniqueid.text = str(series_obj.identifier.id) + show_id = series_obj.identifier.id + if (series_obj.identifier.indexer.slug == 'imdb'): + show_id = ImdbIdentifier(show_id).imdb_id + uniqueid.text = str(show_id) for indexer_slug in ('tvdb', 'tmdb', 'imdb', 'tvmaze', 'anidb'): if indexer_slug == series_obj.identifier.indexer.slug: @@ -227,7 +231,7 @@ def _show_data(self, series_obj): cur_actor_role = etree.SubElement(cur_actor, 'role') cur_actor_role.text = actor['role'].strip() - if 'image' in actor and actor['image'].strip(): + if 'image' in actor and actor['image'] and actor['image'].strip(): cur_actor_thumb = etree.SubElement(cur_actor, 'thumb') cur_actor_thumb.text = actor['image'].strip() @@ -369,11 +373,11 @@ def _ep_data(self, ep_obj): else: continue - if 'role' in actor and actor['role'].strip(): + if 'role' in actor and actor['role'] and actor['role'].strip(): cur_actor_role = etree.SubElement(cur_actor, 'role') cur_actor_role.text = actor['role'].strip() - if 'image' in actor and actor['image'].strip(): + if 'image' in actor and actor['image'] and actor['image'].strip(): cur_actor_thumb = etree.SubElement(cur_actor, 'thumb') cur_actor_thumb.text = actor['image'].strip() diff --git a/medusa/name_parser/rules/rules.py b/medusa/name_parser/rules/rules.py index e2f0ab0f2f..ab38c7aa9f 100644 --- a/medusa/name_parser/rules/rules.py +++ b/medusa/name_parser/rules/rules.py @@ -390,17 +390,17 @@ def when(self, matches, context): if not absolute_episodes: return + to_remove = [] + to_append = [] + fileparts = matches.markers.named('path') for filepart in marker_sorted(fileparts, matches): old_title = matches.range(filepart.start, filepart.end, predicate=lambda match: match.name == 'title', index=0) + absolute_episode = matches.range(filepart.start, filepart.end, predicate=lambda match: match.name == 'absolute_episode', index=0) - if not filepart.value.startswith(str(absolute_episodes[0].value)): + if not absolute_episode or not filepart.value.startswith(str(absolute_episode.value)): continue - to_remove = [] - to_append = [] - - absolute_episode = absolute_episodes[0] new_title = copy.copy(absolute_episode) new_title.name = 'title' new_title.value = str(absolute_episode.value) @@ -409,7 +409,7 @@ def when(self, matches, context): to_remove.append(absolute_episode) to_remove.append(old_title) - return to_remove, to_append + return to_remove, to_append class CreateAliasWithCountryOrYear(Rule): diff --git a/medusa/notifiers/discord.py b/medusa/notifiers/discord.py index b18b94d443..d3f09f4d88 100644 --- a/medusa/notifiers/discord.py +++ b/medusa/notifiers/discord.py @@ -33,10 +33,11 @@ class Notifier(object): https://discordapp.com """ - def _send_discord_msg(self, title, msg, webhook=None, tts=False): + def _send_discord_msg(self, title, msg, webhook=None, tts=None, override_avatar=None): """Collect the parameters and send the message to the discord webhook.""" webhook = app.DISCORD_WEBHOOK if webhook is None else webhook tts = app.DISCORD_TTS if tts is None else tts + override_avatar = app.DISCORD_OVERRIDE_AVATAR if override_avatar is None else override_avatar log.debug('Discord in use with API webhook: {webhook}', {'webhook': webhook}) @@ -46,10 +47,12 @@ def _send_discord_msg(self, title, msg, webhook=None, tts=False): payload = { 'content': message, 'username': app.DISCORD_NAME, - 'avatar_url': app.DISCORD_AVATAR_URL, 'tts': tts } + if override_avatar: + payload['avatar_url'] = app.DISCORD_AVATAR_URL + success = False try: r = requests.post(webhook, json=payload, headers=headers) @@ -128,13 +131,16 @@ def notify_login(self, ipaddress=''): title = notifyStrings[NOTIFY_LOGIN] self._notify_discord(title, update_text.format(ipaddress)) - def test_notify(self, discord_webhook=None, discord_tts=None): + def test_notify(self, discord_webhook=None, discord_tts=None, override_avatar=None): """Create the test notification.""" - return self._notify_discord('test', 'This is a test notification from Medusa', webhook=discord_webhook, tts=discord_tts, force=True) + return self._notify_discord( + 'test', 'This is a test notification from Medusa', + webhook=discord_webhook, tts=discord_tts, override_avatar=override_avatar, force=True + ) - def _notify_discord(self, title='', message='', webhook=None, tts=False, force=False): + def _notify_discord(self, title='', message='', webhook=None, tts=None, override_avatar=None, force=False): """Validate if USE_DISCORD or Force is enabled and send.""" if not app.USE_DISCORD and not force: return False - return self._send_discord_msg(title, message, webhook, tts) + return self._send_discord_msg(title, message, webhook, tts, override_avatar) diff --git a/medusa/providers/torrent/html/morethantv.py b/medusa/providers/torrent/html/morethantv.py index 7bdefc3374..8b3d053538 100644 --- a/medusa/providers/torrent/html/morethantv.py +++ b/medusa/providers/torrent/html/morethantv.py @@ -1,5 +1,4 @@ # coding=utf-8 - """Provider code for MoreThanTV.""" from __future__ import unicode_literals @@ -142,13 +141,9 @@ def process_column_header(td): if row.find('img', alt='Nuked'): continue - title = cells[labels.index('Name')].find('table').get_text(strip=True) - download_url = urljoin(self.url, cells[labels.index('Name')].find('table').find('a')['href']) - if not all([title, download_url]): - continue - seeders = int(cells[labels.index('Seeders')].get_text(strip=True).replace(',', '')) leechers = int(cells[labels.index('Leechers')].get_text(strip=True).replace(',', '')) + title = cells[labels.index('Name')].find('a').get_text(strip=True) # Filter unseeded torrent if seeders < self.minseed: @@ -166,19 +161,26 @@ def process_column_header(td): pubdate_raw = cells[4].find('span')['title'] pubdate = self.parse_pubdate(pubdate_raw) - item = { - 'title': title, - 'link': download_url, - 'size': size, - 'seeders': seeders, - 'leechers': leechers, - 'pubdate': pubdate, - } - if mode != 'RSS': - log.debug('Found result: {0} with {1} seeders and {2} leechers', - title, seeders, leechers) - - items.append(item) + releases = cells[labels.index('Name')].find('table').find_all('tr') + for release in releases: + release_title = release.find('td').get_text(strip=True) + download_url = urljoin(self.url, release.find('a')['href']) + if not all([release_title, download_url]): + continue + + item = { + 'title': release_title, + 'link': download_url, + 'size': size, + 'seeders': seeders, + 'leechers': leechers, + 'pubdate': pubdate, + } + if mode != 'RSS': + log.debug('Found result: {0} with {1} seeders and {2} leechers', + title, seeders, leechers) + + items.append(item) except (AttributeError, TypeError, KeyError, ValueError, IndexError): log.exception('Failed parsing provider.') @@ -191,6 +193,10 @@ def login(self): # Get the login page, to retrieve the token response = self.session.get(self.urls['login']) + if not response: + log.warning('Unable to get login page') + return False + token = re.search(r'token".value="([^"]+)"', response.text) if not token: log.warning('Unable to get login token') diff --git a/medusa/providers/torrent/html/sdbits.py b/medusa/providers/torrent/html/sdbits.py index 177291f956..1f5f3eddbd 100644 --- a/medusa/providers/torrent/html/sdbits.py +++ b/medusa/providers/torrent/html/sdbits.py @@ -13,6 +13,7 @@ convert_size, try_int, ) +from medusa.indexers.imdb.api import ImdbIdentifier from medusa.indexers.utils import mappings from medusa.logger.adapters.style import BraceAdapter from medusa.providers.torrent.torrent_provider import TorrentProvider @@ -83,6 +84,7 @@ def search(self, search_strings, age=0, ep_obj=None, **kwargs): if mode != 'RSS': imdb_id = self.series.externals.get(mappings[10]) if imdb_id: + imdb_id = ImdbIdentifier(imdb_id).imdb_id search_params['imdb'] = imdb_id log.debug('Search string (IMDb ID): {imdb_id}', {'imdb_id': imdb_id}) diff --git a/medusa/providers/torrent/json/eztv.py b/medusa/providers/torrent/json/eztv.py index f6b60050d5..cc218e19df 100644 --- a/medusa/providers/torrent/json/eztv.py +++ b/medusa/providers/torrent/json/eztv.py @@ -8,6 +8,7 @@ from medusa import tv from medusa.helper.common import convert_size +from medusa.indexers.imdb.api import ImdbIdentifier from medusa.indexers.utils import mappings from medusa.logger.adapters.style import BraceAdapter from medusa.providers.torrent.torrent_provider import TorrentProvider @@ -63,7 +64,7 @@ def search(self, search_strings, age=0, ep_obj=None, **kwargs): if mode != 'RSS': imdb_id = self.series.externals.get(mappings[10]) if imdb_id: - imdb_id = imdb_id[2:] # strip two tt's of id as they are not used + imdb_id = ImdbIdentifier(imdb_id).imdb_id search_params['imdb_id'] = imdb_id log.debug('Search string (IMDb ID): {imdb_id}', {'imdb_id': imdb_id}) else: diff --git a/medusa/providers/torrent/torznab/torznab.py b/medusa/providers/torrent/torznab/torznab.py index 0aa2ba4838..f166843385 100644 --- a/medusa/providers/torrent/torznab/torznab.py +++ b/medusa/providers/torrent/torznab/torznab.py @@ -202,7 +202,12 @@ def parse(self, data, mode): title, seeders) continue - torrent_size = item.size.get_text(strip=True) + torrent_size = item.find('torznab:attr', attrs={'name': 'size'}) + if torrent_size: + torrent_size = torrent_size.get('value') + + if not torrent_size: + torrent_size = item.size.get_text(strip=True) size = convert_size(torrent_size, default=-1) pubdate_raw = item.pubdate.get_text(strip=True) diff --git a/medusa/queues/show_queue.py b/medusa/queues/show_queue.py index 8f3694ff85..3fb89dd4e1 100644 --- a/medusa/queues/show_queue.py +++ b/medusa/queues/show_queue.py @@ -1052,7 +1052,7 @@ def run(self): try: # Let's make sure we refresh the indexer_api object attached to the show object. self.show.create_indexer() - self.show.load_from_indexer() + self.show.load_from_indexer(limit_seasons=self.seasons) except IndexerError as error: log.warning( '{id}: Unable to contact {indexer}. Aborting: {error_msg}', diff --git a/medusa/schedulers/show_updater.py b/medusa/schedulers/show_updater.py index abb2d44747..f721283013 100644 --- a/medusa/schedulers/show_updater.py +++ b/medusa/schedulers/show_updater.py @@ -1,4 +1,5 @@ # coding=utf-8 +"""Show updater module.""" from __future__ import unicode_literals @@ -10,7 +11,7 @@ from medusa import app, db, network_timezones, ui from medusa.helper.exceptions import CantRefreshShowException, CantUpdateShowException from medusa.indexers.api import indexerApi -from medusa.indexers.exceptions import IndexerException, IndexerUnavailable +from medusa.indexers.exceptions import IndexerException, IndexerSeasonUpdatesNotSupported, IndexerShowUpdatesNotSupported, IndexerUnavailable from medusa.scene_exceptions import refresh_exceptions_cache from medusa.session.core import MedusaSession @@ -20,14 +21,17 @@ class ShowUpdater(object): + """Show updater class.""" + def __init__(self): + """Show updatere constructor.""" self.lock = threading.Lock() self.amActive = False self.session = MedusaSession() self.update_cache = UpdateCache() def run(self, force=False): - + """Start show updater.""" self.amActive = True refresh_shows = [] # A list of shows, that need to be refreshed season_updates = [] # A list of show seasons that have passed their next_update timestamp @@ -47,6 +51,8 @@ def run(self, force=False): # Loop through the list of shows, and per show evaluate if we can use the .get_last_updated_seasons() for show in app.showList: + show_updates_supported = True + if show.paused: logger.info('The show {show} is paused, not updating it.', show=show.name) continue @@ -77,6 +83,11 @@ def run(self, force=False): indexer_updated_shows[show.indexer] = indexer_api.get_last_updated_series( last_updates[indexer_name], update_max_weeks ) + except IndexerShowUpdatesNotSupported: + logger.info('Could not get a list with updated shows from indexer {indexer_name},' + ' as this is not supported. Attempting a regular update for show: {show}', + indexer_name=indexer_name, show=show.name) + show_updates_supported = False except IndexerUnavailable: logger.warning('Problem running show_updater, Indexer {indexer_name} seems to be having ' 'connectivity issues while trying to look for show updates on show: {show}', @@ -107,7 +118,7 @@ def run(self, force=False): # Update shows that were updated in the last X weeks # or were not updated within the last X weeks - if show.indexerid not in indexer_updated_shows.get(show.indexer, []): + if show.indexerid not in indexer_updated_shows.get(show.indexer, []) and show_updates_supported: if show.last_update_indexer > time.time() - 604800 * update_max_weeks: logger.debug('Skipping show update for {show}. Show was not in the ' 'indexers {indexer_name} list with updated shows and it ' @@ -115,19 +126,16 @@ def run(self, force=False): indexer_name=indexer_name, weeks=update_max_weeks) continue - # If indexer doesn't have season updates. - if not hasattr(indexer_api, 'get_last_updated_seasons'): - logger.debug('Adding the following show for full update to queue: {show}', show=show.name) - refresh_shows.append(show) - - # Else fall back to per season updates. - elif hasattr(indexer_api, 'get_last_updated_seasons'): + try: # Get updated seasons and add them to the season update list. try: updated_seasons = indexer_api.get_last_updated_seasons( - [show.indexerid], show.last_update_indexer, update_max_weeks) + [show.indexerid], from_time=show.last_update_indexer, weeks=update_max_weeks, cache=self.update_cache + ) + except IndexerSeasonUpdatesNotSupported: + raise except IndexerUnavailable: - logger.warning('Problem running show_updater, Indexer {indexer_name} seems to be having ' + logger.warning('Problem get_last_updated_seasonsrunning show_updater, Indexer {indexer_name} seems to be having ' 'connectivity issues while trying to look for show updates for show: {show}', indexer_name=indexer_name, show=show.name) continue @@ -152,6 +160,9 @@ def run(self, force=False): logger.debug('Could not detect a season update, but an update is required. \n' 'Adding the following show for full update to queue: {show}', show=show.name) refresh_shows.append(show) + except IndexerSeasonUpdatesNotSupported: + logger.debug('Adding the following show for full update to queue: {show}', show=show.name) + refresh_shows.append(show) pi_list = [] # Full refreshes @@ -201,7 +212,10 @@ def run(self, force=False): class UpdateCache(db.DBConnection): + """Show updater update cache class.""" + def __init__(self): + """Show updater update cache constructor.""" super(UpdateCache, self).__init__('cache.db') def get_last_indexer_update(self, indexer): @@ -231,3 +245,33 @@ def set_last_indexer_update(self, indexer): return self.upsert('lastUpdate', {'time': int(time.time())}, {'provider': indexer}) + + def get_last_update_season(self, indexer, series_id, season): + """Get the last update timestamp from the season_updates table. + + :param indexer: Indexer id. 1 for TheTvdb. + :type indexer: int + :param series: Indexers series id. + :type indexer: int + :param season: Season + """ + last_season_update = self.select( + 'SELECT time ' + 'FROM season_updates ' + 'WHERE indexer = ? AND series_id = ? AND season = ?', + [indexer, series_id, season] + ) + return last_season_update[0]['time'] if last_season_update else 0 + + def set_last_update_season(self, indexer, series_id, season): + """Set the last update timestamp from the season_updates table. + + :param indexer: Indexer id. 1 for TheTvdb. + :type indexer: int + :param series: Indexers series id. + :type indexer: int + :param season: Season + """ + return self.upsert('season_updates', + {'time': int(time.time())}, + {'indexer': indexer, 'series_id': series_id, 'season': season}) diff --git a/medusa/schedulers/trakt_checker.py b/medusa/schedulers/trakt_checker.py index 9b48cafc3e..fee5d0325b 100644 --- a/medusa/schedulers/trakt_checker.py +++ b/medusa/schedulers/trakt_checker.py @@ -15,6 +15,7 @@ from medusa.helpers.externals import show_in_library from medusa.helpers.trakt import create_episode_structure, create_show_structure, get_trakt_user from medusa.indexers.config import EXTERNAL_IMDB, EXTERNAL_TRAKT, indexerConfig +from medusa.indexers.imdb.api import ImdbIdentifier from medusa.indexers.utils import get_trakt_indexer from medusa.logger.adapters.style import BraceAdapter from medusa.search.queue import BacklogQueueItem @@ -488,7 +489,7 @@ def remove_from_library(self): continue try: - trakt_show = tv.TVShow(str(trakt_id or show.imdb_id)) + trakt_show = tv.TVShow(str(trakt_id or ImdbIdentifier(show.imdb_id).imdb_id)) progress = trakt_show.progress except (TraktException, RequestException) as error: log.info("Unable to check if show '{show}' is ended/completed. Error: {error!r}", { @@ -683,7 +684,7 @@ def match_trakt_by_id(trakt_show, medusa_show): if trakt_supported_indexer and getattr(trakt_show, trakt_supported_indexer) == medusa_show.indexerid: return True # Try to match by imdb_id - if getattr(trakt_show, 'imdb') == medusa_show.imdb_id: + if getattr(trakt_show, 'imdb') == ImdbIdentifier(medusa_show.imdb_id).imdb_id: return True return False diff --git a/medusa/server/api/v2/config.py b/medusa/server/api/v2/config.py index 2865350b9e..2f436bde1e 100644 --- a/medusa/server/api/v2/config.py +++ b/medusa/server/api/v2/config.py @@ -192,6 +192,7 @@ class ConfigHandler(BaseRequestHandler): 'recommended.cache.imdb': BooleanField(app, 'CACHE_RECOMMENDED_IMDB'), 'recommended.cache.anidb': BooleanField(app, 'CACHE_RECOMMENDED_ANIDB'), 'recommended.cache.anilist': BooleanField(app, 'CACHE_RECOMMENDED_ANILIST'), + 'recommended.cache.purgeAfterDays': IntegerField(app, 'CACHE_RECOMMENDED_PURGE_AFTER_DAYS'), 'recommended.trakt.selectedLists': ListField(app, 'CACHE_RECOMMENDED_TRAKT_LISTS'), # Sections @@ -234,7 +235,6 @@ class ConfigHandler(BaseRequestHandler): 'clients.nzb.sabnzbd.password': StringField(app, 'SAB_PASSWORD'), 'clients.nzb.sabnzbd.username': StringField(app, 'SAB_USERNAME'), - 'postProcessing.showDownloadDir': StringField(app, 'TV_DOWNLOAD_DIR'), 'postProcessing.defaultClientPath': StringField(app, 'DEFAULT_CLIENT_PATH'), 'postProcessing.processAutomatically': BooleanField(app, 'PROCESS_AUTOMATICALLY'), @@ -432,6 +432,7 @@ class ConfigHandler(BaseRequestHandler): 'notifiers.discord.enabled': BooleanField(app, 'USE_DISCORD'), 'notifiers.discord.webhook': StringField(app, 'DISCORD_WEBHOOK'), 'notifiers.discord.tts': BooleanField(app, 'DISCORD_TTS'), + 'notifiers.discord.overrideAvatar': BooleanField(app, 'DISCORD_OVERRIDE_AVATAR'), 'notifiers.discord.notifyOnSnatch': BooleanField(app, 'DISCORD_NOTIFY_ONSNATCH'), 'notifiers.discord.notifyOnDownload': BooleanField(app, 'DISCORD_NOTIFY_ONDOWNLOAD'), 'notifiers.discord.notifyOnSubtitleDownload': BooleanField(app, 'DISCORD_NOTIFY_ONSUBTITLEDOWNLOAD'), @@ -709,6 +710,7 @@ def data_main(): section_data['recommended']['cache']['imdb'] = bool(app.CACHE_RECOMMENDED_IMDB) section_data['recommended']['cache']['anidb'] = bool(app.CACHE_RECOMMENDED_ANIDB) section_data['recommended']['cache']['anilist'] = bool(app.CACHE_RECOMMENDED_ANILIST) + section_data['recommended']['cache']['purgeAfterDays'] = int(app.CACHE_RECOMMENDED_PURGE_AFTER_DAYS) section_data['recommended']['trakt']['selectedLists'] = app.CACHE_RECOMMENDED_TRAKT_LISTS section_data['recommended']['trakt']['availableLists'] = TraktPopular.CATEGORIES @@ -1054,6 +1056,7 @@ def data_notifiers(): section_data['discord']['notifyOnSubtitleDownload'] = bool(app.DISCORD_NOTIFY_ONSUBTITLEDOWNLOAD) section_data['discord']['webhook'] = app.DISCORD_WEBHOOK section_data['discord']['tts'] = bool(app.DISCORD_TTS) + section_data['discord']['overrideAvatar'] = bool(app.DISCORD_OVERRIDE_AVATAR) section_data['discord']['name'] = app.DISCORD_NAME section_data['twitter'] = {} diff --git a/medusa/server/api/v2/guessit.py b/medusa/server/api/v2/guessit.py index 5ac2b6ba72..1b5d4eb868 100644 --- a/medusa/server/api/v2/guessit.py +++ b/medusa/server/api/v2/guessit.py @@ -2,11 +2,19 @@ """Request handler for statistics.""" from __future__ import unicode_literals +import logging + import guessit +from medusa.logger.adapters.style import CustomBraceAdapter +from medusa.name_parser.parser import InvalidNameException, InvalidShowException, NameParser from medusa.server.api.v2.base import BaseRequestHandler +log = CustomBraceAdapter(logging.getLogger(__name__)) +log.logger.addHandler(logging.NullHandler()) + + class GuessitHandler(BaseRequestHandler): """Guessit parser request handler.""" @@ -26,6 +34,26 @@ def get(self): if not release: return self._bad_request('Missing release name to guess') - guess = guessit.guessit(release) - - return self._ok(data=dict(guess)) + result = {'error': None} + show = None + + try: + parse_result = NameParser().parse(release) + show = parse_result.series.to_json() + except InvalidNameException as error: + log.debug( + 'Not enough information to parse release name into a valid show. ' + 'improve naming for: {release}', + {'release': release}) + result['error'] = str(error) + except InvalidShowException as error: + log.debug( + 'Could not match the parsed title to a show in your library for: ' + 'Consider adding scene exceptions for {release}', + {'release': release}) + result['error'] = str(error) + + result['guess'] = guessit.guessit(release) + result['show'] = show + + return self._ok(data=dict(result)) diff --git a/medusa/server/api/v2/internal.py b/medusa/server/api/v2/internal.py index 732b4b59a6..6a08d88a2d 100644 --- a/medusa/server/api/v2/internal.py +++ b/medusa/server/api/v2/internal.py @@ -271,7 +271,7 @@ def resource_search_indexers_for_show_name(self): ( indexer_api.name, indexer, - indexer_api.config['show_url'], + indexer_api.config['show_url'].format(show_id), show_id, show['seriesname'], show['firstaired'] or 'N/A', diff --git a/medusa/server/api/v2/recommended.py b/medusa/server/api/v2/recommended.py index b7f128c2c5..87a115934e 100644 --- a/medusa/server/api/v2/recommended.py +++ b/medusa/server/api/v2/recommended.py @@ -39,27 +39,23 @@ class RecommendedHandler(BaseRequestHandler): def get(self, identifier, path_param=None): """Query available recommended show lists.""" - if identifier and not RecommendedHandler.IDENTIFIER_TO_LIST.get(identifier): + if identifier and not RecommendedHandler.IDENTIFIER_TO_LIST.get(identifier) and identifier != 'categories': return self._bad_request("Invalid recommended list identifier '{0}'".format(identifier)) - data = {'shows': [], 'trakt': {'removedFromMedusa': []}} - - shows = get_recommended_shows(source=RecommendedHandler.IDENTIFIER_TO_LIST.get(identifier)) - - if shows: - data['shows'] = [show.to_json() for show in shows] - - data['trakt']['removedFromMedusa'] = [] - if app.USE_TRAKT: + if identifier == 'trakt' and path_param == 'removed' and app.USE_TRAKT: + data = {'removedFromMedusa': [], 'blacklistEnabled': None} try: - data['trakt']['removedFromMedusa'] = TraktPopular().get_removed_from_medusa() + data['removedFromMedusa'] = TraktPopular().get_removed_from_medusa() except Exception: log.warning('Could not get the `removed from medusa` list') - data['trakt']['blacklistEnabled'] = app.TRAKT_BLACKLIST_NAME != '' + data['blacklistEnabled'] = app.TRAKT_BLACKLIST_NAME != '' + return self._ok(data) - data['categories'] = get_categories() + if identifier == 'categories': + return self._ok(get_categories()) - return self._ok(data) + shows = get_recommended_shows(source=RecommendedHandler.IDENTIFIER_TO_LIST.get(identifier)) + return self._paginate([show.to_json() for show in shows], sort='-rating') def post(self, identifier, path_param=None): """Force the start of a recommended show queue item.""" diff --git a/medusa/server/api/v2/series.py b/medusa/server/api/v2/series.py index 0af3322f50..908a8a9f19 100644 --- a/medusa/server/api/v2/series.py +++ b/medusa/server/api/v2/series.py @@ -85,7 +85,7 @@ def post(self, series_slug=None, path_param=None): if not data or 'id' not in data: return self._bad_request('Invalid series data') - ids = {k: v for k, v in viewitems(data['id']) if k != 'imdb'} + ids = {k: v for k, v in viewitems(data['id'])} if len(ids) != 1: return self._bad_request('Only 1 indexer identifier should be specified') diff --git a/medusa/server/web/home/handler.py b/medusa/server/web/home/handler.py index f884fc5335..e211a2d7a0 100644 --- a/medusa/server/web/home/handler.py +++ b/medusa/server/web/home/handler.py @@ -155,8 +155,10 @@ def testTelegram(telegram_id=None, telegram_apikey=None): return 'Error sending Telegram notification: {msg}'.format(msg=message) @staticmethod - def testDiscord(discord_webhook=None, discord_tts=False): - result, message = notifiers.discord_notifier.test_notify(discord_webhook, config.checkbox_to_value(discord_tts)) + def testDiscord(discord_webhook=None, discord_tts=None, discord_override_avatar=None): + result, message = notifiers.discord_notifier.test_notify( + discord_webhook, config.checkbox_to_value(discord_tts), config.checkbox_to_value(discord_override_avatar) + ) if result: return 'Discord notification succeeded. Check your Discord channels to make sure it worked' else: diff --git a/medusa/show/recommendations/imdb.py b/medusa/show/recommendations/imdb.py index 93c1a60c9d..2b01175671 100644 --- a/medusa/show/recommendations/imdb.py +++ b/medusa/show/recommendations/imdb.py @@ -11,6 +11,7 @@ from medusa.imdb import Imdb from medusa.indexers.api import indexerApi from medusa.indexers.config import EXTERNAL_IMDB, INDEXER_TMDB +from medusa.indexers.imdb.api import ImdbIdentifier from medusa.logger.adapters.style import BraceAdapter from medusa.show.recommendations.recommended import ( BasePopular, @@ -43,7 +44,7 @@ def __init__(self): @recommended_series_cache.cache_on_arguments(namespace='imdb', function_key_generator=create_key_from_series) def _create_recommended_show(self, series): """Create the RecommendedShow object from the returned showobj.""" - externals = {'imdb_id': series.get('imdb_tt')} + externals = {'imdb_id': ImdbIdentifier(series.get('imdb_tt')).series_id} # Get tmdb id using a call to tmdb api. t = indexerApi(INDEXER_TMDB).indexer(**indexerApi(INDEXER_TMDB).api_params.copy()) @@ -51,7 +52,7 @@ def _create_recommended_show(self, series): rec_show = RecommendedShow( self, - series.get('imdb_tt'), + ImdbIdentifier(series.get('imdb_tt')).series_id, series.get('name'), **{ 'rating': series.get('rating'), @@ -113,18 +114,18 @@ def fetch_popular_shows(self): series['genres'] = show_genres.get('genres', []) - if all([series['year'], series['name'], series['imdb_tt']]): - try: - recommended_show = self._create_recommended_show(series) - if recommended_show: - recommended_show.save_to_db() - result.append(recommended_show) - except RequestException: - log.warning( - u'Could not connect to indexers to check if you already have' - u' this show in your library: {show} ({year})', - {'show': series['name'], 'year': series['name']} - ) + if all([series['year'], series['name'], series['imdb_tt']]): + try: + recommended_show = self._create_recommended_show(series) + if recommended_show: + recommended_show.save_to_db() + result.append(recommended_show) + except RequestException: + log.warning( + u'Could not connect to indexers to check if you already have' + u' this show in your library: {show} ({year})', + {'show': series['name'], 'year': series['name']} + ) return result diff --git a/medusa/show/show.py b/medusa/show/show.py index 8bc999294b..92d3c70359 100644 --- a/medusa/show/show.py +++ b/medusa/show/show.py @@ -132,12 +132,6 @@ def find_by_id(series, indexer_id, series_id): except ValueError: indexer_id = indexer_name_to_id(indexer_id) - try: - if indexer_id != 10: # 10 = EXTERNAL_IMDB - series_id = int(series_id) - except ValueError: - log.warning('Invalid series id: {series_id}', {'series_id': series_id}) - if series_id is None or series is None or len(series) == 0: return None diff --git a/medusa/tv/series.py b/medusa/tv/series.py index 2892cd0085..6999a5b763 100644 --- a/medusa/tv/series.py +++ b/medusa/tv/series.py @@ -69,6 +69,7 @@ from medusa.indexers.api import indexerApi from medusa.indexers.config import ( EXTERNAL_MAPPINGS, + INDEXER_IMDB, INDEXER_TVRAGE, STATUS_MAP, indexerConfig @@ -76,6 +77,7 @@ from medusa.indexers.exceptions import ( IndexerAttributeNotFound, IndexerException, IndexerSeasonNotFound, IndexerShowAlreadyInLibrary ) +from medusa.indexers.imdb.api import ImdbIdentifier from medusa.indexers.tmdb.api import Tmdb from medusa.indexers.utils import ( indexer_id_to_slug, @@ -1582,7 +1584,7 @@ def _load_from_db(self): self.reset_dirty() return True - def load_from_indexer(self, tvapi=None): + def load_from_indexer(self, tvapi=None, limit_seasons=None): """Load show from indexer. :param tvapi: @@ -1598,6 +1600,9 @@ def load_from_indexer(self, tvapi=None): ) indexer_api = tvapi or self.indexer_api + if limit_seasons: + self.indexer_api.config['limit_seasons'] = limit_seasons + indexed_show = indexer_api[self.series_id] if getattr(indexed_show, 'firstaired', ''): @@ -1623,7 +1628,10 @@ def load_from_indexer(self, tvapi=None): # Enrich the externals, using reverse lookup. self.externals.update(get_externals(self)) - self.imdb_id = self.externals.get('imdb_id') or getattr(indexed_show, 'imdb_id', '') + if self.indexer_api.indexer == INDEXER_IMDB: + self.externals['imdb_id'] = ImdbIdentifier(getattr(indexed_show, 'id')).series_id + + self.imdb_id = ImdbIdentifier(self.externals.get('imdb_id')).imdb_id or getattr(indexed_show, 'imdb_id', '') if getattr(indexed_show, 'airs_dayofweek', '') and getattr(indexed_show, 'airs_time', ''): self.airs = '{airs_day_of_week} {airs_time}'.format(airs_day_of_week=indexed_show['airs_dayofweek'], @@ -1863,10 +1871,8 @@ def next_episode(self): ' indexer = ?' ' AND showid = ? ' ' AND airdate >= ? ' - 'ORDER BY' - ' airdate ' - 'ASC LIMIT 1', - [self.indexer, self.series_id, today]) + 'ORDER BY airdate ', + [self.indexer, self.series_id, today - 1]) if sql_results is None or len(sql_results) == 0: log.debug(u'{id}: ({name}) Could not find a next episode', {'name': self.name, 'id': self.series_id}) @@ -1878,7 +1884,18 @@ def next_episode(self): 'ep': episode_num(sql_results[0]['season'], sql_results[0]['episode']), } ) - self._next_aired = sql_results[0]['airdate'] + next_aired_with_time = None + for result in sql_results: + if result['airdate'] > MILLIS_YEAR_1900: + next_aired_with_time = sbdatetime.convert_to_setting( + network_timezones.parse_date_time(result['airdate'], self.airs, self.network) + ) + + if next_aired_with_time < datetime.datetime.now().astimezone(): + continue + + self._next_aired = result['airdate'] + break return self._next_aired @@ -2343,7 +2360,7 @@ def to_json(self, detailed=False, episodes=False): data = {} data['id'] = {} data['id'][self.indexer_name] = self.series_id - data['id']['imdb'] = self.imdb_id + # data['id']['imdb'] = self.imdb_id data['id']['slug'] = self.identifier.slug data['id']['trakt'] = self.externals.get('trakt_id') data['externals'] = {k.split('_')[0]: v for k, v in self.externals.items()} diff --git a/setup.cfg b/setup.cfg index 58a323210c..958417eb2f 100644 --- a/setup.cfg +++ b/setup.cfg @@ -72,6 +72,7 @@ flake8-ignore = medusa/indexers/tvmaze/__init__.py D104 medusa/indexers/tvmaze/api.py D100 D102 D103 D202 D205 D400 D401 medusa/indexers/tvmaze/exceptions.py D200 D204 D205 D400 N801 + medusa/indexers/imdb/__init__.py D104 medusa/init/logconfig.py E305 medusa/logger/__init__.py D401 medusa/media/__init__.py D104 diff --git a/tests/apiv2/test_config.py b/tests/apiv2/test_config.py index b334b06715..dfc7d82079 100644 --- a/tests/apiv2/test_config.py +++ b/tests/apiv2/test_config.py @@ -108,6 +108,7 @@ def config_main(monkeypatch, app_config): section_data['recommended']['cache']['imdb'] = bool(app.CACHE_RECOMMENDED_IMDB) section_data['recommended']['cache']['anidb'] = bool(app.CACHE_RECOMMENDED_ANIDB) section_data['recommended']['cache']['anilist'] = bool(app.CACHE_RECOMMENDED_ANILIST) + section_data['recommended']['cache']['purgeAfterDays'] = int(app.CACHE_RECOMMENDED_PURGE_AFTER_DAYS) section_data['recommended']['trakt']['selectedLists'] = app.CACHE_RECOMMENDED_TRAKT_LISTS section_data['recommended']['trakt']['availableLists'] = TraktPopular.CATEGORIES @@ -419,6 +420,7 @@ def config_postprocessing(): section_data['naming']['animeNamingType'] = int_default(app.NAMING_ANIME, 3) section_data['naming']['stripYear'] = bool(app.NAMING_STRIP_YEAR) section_data['showDownloadDir'] = app.TV_DOWNLOAD_DIR + section_data['defaultClientPath'] = app.DEFAULT_CLIENT_PATH section_data['processAutomatically'] = bool(app.PROCESS_AUTOMATICALLY) section_data['postponeIfSyncFiles'] = bool(app.POSTPONE_IF_SYNC_FILES) section_data['postponeIfNoSubs'] = bool(app.POSTPONE_IF_NO_SUBS) @@ -432,6 +434,9 @@ def config_postprocessing(): section_data['deleteRarContent'] = bool(app.DELRARCONTENTS) section_data['noDelete'] = bool(app.NO_DELETE) section_data['processMethod'] = app.PROCESS_METHOD + section_data['specificProcessMethod'] = bool(app.USE_SPECIFIC_PROCESS_METHOD) + section_data['processMethodTorrent'] = app.PROCESS_METHOD_TORRENT + section_data['processMethodNzb'] = app.PROCESS_METHOD_NZB section_data['reflinkAvailable'] = bool(pkgutil.find_loader('reflink')) section_data['autoPostprocessorFrequency'] = int(app.AUTOPOSTPROCESSOR_FREQUENCY) section_data['syncFiles'] = app.SYNC_FILES @@ -695,6 +700,7 @@ def config_notifiers(): section_data['discord']['notifyOnSubtitleDownload'] = bool(app.DISCORD_NOTIFY_ONSUBTITLEDOWNLOAD) section_data['discord']['webhook'] = app.DISCORD_WEBHOOK section_data['discord']['tts'] = bool(app.DISCORD_TTS) + section_data['discord']['overrideAvatar'] = bool(app.DISCORD_OVERRIDE_AVATAR) section_data['discord']['name'] = app.DISCORD_NAME section_data['twitter'] = {} @@ -910,7 +916,7 @@ def config_subtitles(): @pytest.mark.gen_test -async def test_config_get_postprocessing(http_client, create_url, auth_headers, config_subtitles): +async def test_config_get_subtitles(http_client, create_url, auth_headers, config_subtitles): # given expected = config_subtitles diff --git a/tests/test_guessit.yml b/tests/test_guessit.yml index 7ae731b75f..37a5c5e1f9 100644 --- a/tests/test_guessit.yml +++ b/tests/test_guessit.yml @@ -4558,3 +4558,19 @@ proper_count: 1 proper_tag: REPACK type: episode + + +? /1883 (2021)/Season 01/1883 (2021) (S01E03) (2021-12-26) (1080p-AVC BluRay EAC3-6ch) River.mkv +: title: "1883" + year: 2021 + season: 1 + episode: 3 + date: 2021-12-26 + screen_size: 1080p + video_codec: H.264 + container: mkv + source: 'Blu-ray' + audio_codec: 'Dolby Digital Plus' + audio_channels: '5.1' + video_profile: 'Advanced Video Codec High Definition' + type: episode diff --git a/themes-default/slim/src/components/add-recommended.vue b/themes-default/slim/src/components/add-recommended.vue deleted file mode 100644 index 18f8149cbc..0000000000 --- a/themes-default/slim/src/components/add-recommended.vue +++ /dev/null @@ -1,45 +0,0 @@ - - - diff --git a/themes-default/slim/src/components/app-header.vue b/themes-default/slim/src/components/app-header.vue index bbf3a4f007..cf05dc819c 100644 --- a/themes-default/slim/src/components/app-header.vue +++ b/themes-default/slim/src/components/app-header.vue @@ -12,7 +12,7 @@ Medusa @@ -46,7 +47,8 @@ export default { filter: { tvdb: true, tvmaze: true, - tmdb: true + tmdb: true, + imdb: true }, started: false }; @@ -76,7 +78,8 @@ export default { show => (show.indexer === 'tvdb' && filter.tvdb) || (show.indexer === 'tvmaze' && filter.tvmaze) || - (show.indexer === 'tmdb' && filter.tmdb) + (show.indexer === 'tmdb' && filter.tmdb) || + (show.indexer === 'imdb' && filter.imdb) ); } }, @@ -90,6 +93,14 @@ export default { const { filteredShows } = this; Vue.set(filteredShows.find(s => s === show), 'selected', { indexer, showId }); }, + /** + * Convert an Imdb Id to an id without the `tt` prefix. + * @param {String} value - Imdb id with tt prefix. + * @returns {Number} - Id without the tt prefix. + */ + imdbToId(value) { + return Number(String(value).replace(/^tt0*/g, '')); + }, /** * Start changing the shows indexer. */ @@ -99,7 +110,7 @@ export default { // Loop through the shows and start a ChangeIndexerQueueItem for each. // Store the queueItem identifier, to keep track. const oldSlug = show.id.slug; - const newSlug = `${show.selected.indexer}${show.selected.showId}`; + const newSlug = `${show.selected.indexer}${this.imdbToId(show.selected.showId)}`; if (oldSlug === newSlug) { this.$snotify.warning( 'Old shows indexer and new shows indexer are the same, skipping', @@ -130,17 +141,25 @@ export default { watch: { queueitems(queueitems) { const { allShows } = this; + let changingShows = false; for (const show of allShows) { if (!('changeStatus' in show)) { continue; } const foundItem = queueitems.find(item => item.identifier === show.changeStatus.identifier); + if (foundItem && foundItem.success === null) { + changingShows = true; + } + if (foundItem && foundItem.oldShow.id.slug === show.id.slug && foundItem.success !== null) { // Found a queueItem for this show. Let's search for a new show. And replace it. - allShows.find(s => s.id.slug === foundItem.oldShow.id.slug).id = foundItem.newShow.id; + const foundShow = allShows.find(s => s.id.slug === foundItem.oldShow.id.slug); + foundShow.id = foundItem.newShow.id; + foundShow.checked = false; } } + this.started = changingShows; } } }; diff --git a/themes-default/slim/src/components/config-anime.vue b/themes-default/slim/src/components/config-anime.vue index 66686e5639..a8e42777e3 100644 --- a/themes-default/slim/src/components/config-anime.vue +++ b/themes-default/slim/src/components/config-anime.vue @@ -62,6 +62,7 @@ v-model="animeShowlistDefaultAnime" :multiple="true" :options="layout.show.showListOrder" + class="max-input350" /> Customize the showslist when auto anime lists is enabled diff --git a/themes-default/slim/src/components/config-general.vue b/themes-default/slim/src/components/config-general.vue index ac9cc4a043..b522ef9e07 100644 --- a/themes-default/slim/src/components/config-general.vue +++ b/themes-default/slim/src/components/config-general.vue @@ -23,7 +23,7 @@ - when launching Medusa interface @@ -86,6 +86,11 @@ + + +

Number of days to keep shows in the cache (default: 180) (0 will not purge shows at all)

+
+ @@ -116,7 +121,7 @@ - @@ -185,7 +190,7 @@
- @@ -233,13 +238,13 @@ - - Note: seconds are only shown on the History page @@ -303,7 +308,7 @@ -

enable to be notified when a new login happens in webserver

+

enable to listen for connections on IPv6

@@ -345,7 +350,7 @@
- Normal (default). High is lower and Low is higher CPU use @@ -411,7 +416,7 @@ - @@ -449,7 +454,7 @@ - @@ -476,7 +481,7 @@
- @@ -548,6 +553,7 @@ v-model="general.git.resetBranches" :multiple="true" :options="gitRemoteBranches" + class="max-input350" /> Note: Empty selection means that any branch could be reset. diff --git a/themes-default/slim/src/components/config-notifications.vue b/themes-default/slim/src/components/config-notifications.vue index 0a4ecde5f8..59a63ce656 100644 --- a/themes-default/slim/src/components/config-notifications.vue +++ b/themes-default/slim/src/components/config-notifications.vue @@ -725,6 +725,7 @@ +
Click below to test your settings.
@@ -1603,7 +1604,8 @@ export default { $('#testDiscord-result').html(MEDUSA.config.layout.loading); $.get('home/testDiscord', { discord_webhook: notifiers.discord.webhook, // eslint-disable-line camelcase - discord_tts: notifiers.discord.tts // eslint-disable-line camelcase + discord_tts: notifiers.discord.tts, // eslint-disable-line camelcase + discord_override_avatar: notifiers.discord.overrideAvatar // eslint-disable-line camelcase }).done(data => { $('#testDiscord-result').html(data); $('#testDiscord').prop('disabled', false); diff --git a/themes-default/slim/src/components/config-post-processing.vue b/themes-default/slim/src/components/config-post-processing.vue index c303b64e31..c67363d46b 100644 --- a/themes-default/slim/src/components/config-post-processing.vue +++ b/themes-default/slim/src/components/config-post-processing.vue @@ -95,12 +95,12 @@

To use reference linking, the reflink package needs to be installed.

- + Enable this option if you want to use different processing methods (copy, move, etc..) for torrent and nzb downloads.

Note:This option is only used by the Automated Download Handling option

- + @@ -109,7 +109,7 @@

To use reference linking, the reflink package needs to be installed.

- + diff --git a/themes-default/slim/src/components/config-providers.vue b/themes-default/slim/src/components/config-providers.vue index ded025c6f7..559630c99c 100644 --- a/themes-default/slim/src/components/config-providers.vue +++ b/themes-default/slim/src/components/config-providers.vue @@ -51,7 +51,7 @@
-