From eacb53060bf0609cb12a40fd044848dbf5a3ba21 Mon Sep 17 00:00:00 2001 From: Arrrrr Date: Fri, 13 Dec 2024 05:07:21 -0800 Subject: [PATCH 1/9] feat: active onging update --- src/program/__init__.py | 13 ++- src/program/apis/trakt_api.py | 52 +++++++++- src/program/media/item.py | 97 +++++++++++++++++- src/program/program.py | 173 ++++++++++++++++++++++++++++----- src/program/settings/models.py | 1 + 5 files changed, 299 insertions(+), 37 deletions(-) diff --git a/src/program/__init__.py b/src/program/__init__.py index 8621dc2b..8d37ccec 100644 --- a/src/program/__init__.py +++ b/src/program/__init__.py @@ -1,4 +1,9 @@ -"""Program main module""" - -from program.media.item import MediaItem # noqa: F401 -from program.program import Event, Program # noqa: F401 +"""Program module.""" + +from loguru import logger + +from program.media.item import MediaItem # noqa: F401 +from program.program import Event, Program # noqa: F401 + +# Add custom log levels +logger.level("RELEASE", no=35, color="") diff --git a/src/program/apis/trakt_api.py b/src/program/apis/trakt_api.py index 31ffafe3..d58d351c 100644 --- a/src/program/apis/trakt_api.py +++ b/src/program/apis/trakt_api.py @@ -1,8 +1,9 @@ ο»Ώimport re -from datetime import datetime +from datetime import datetime, timedelta from types import SimpleNamespace from typing import List, Optional, Union from urllib.parse import urlencode +from zoneinfo import ZoneInfo # Import ZoneInfo from requests import RequestException, Session @@ -358,9 +359,50 @@ def _get_imdb_id_from_list(self, namespaces: List[SimpleNamespace], id_type: str return None def _get_formatted_date(self, data, item_type: str) -> Optional[datetime]: - """Get the formatted aired date from the data.""" + """Get the formatted aired date from the data. + Trakt API provides all dates in UTC/GMT format (ISO 8601). + """ if item_type in ["show", "season", "episode"] and (first_aired := getattr(data, "first_aired", None)): - return datetime.strptime(first_aired, "%Y-%m-%dT%H:%M:%S.%fZ") - if item_type == "movie" and (released := getattr(data, "released", None)): - return datetime.strptime(released, "%Y-%m-%d") + try: + # Parse the UTC time directly from Trakt's first_aired field + utc_time = datetime.fromisoformat(first_aired.replace('Z', '+00:00')) + + # Apply release delay if configured + if self.settings.release_delay_minutes > 0: + utc_time = utc_time + timedelta(minutes=self.settings.release_delay_minutes) + logger.debug(f" Adding {self.settings.release_delay_minutes} minute delay to release time") + + logger.debug(f"Time conversion for {getattr(data, 'title', 'Unknown')}:") + logger.debug(f" 1. Raw time from Trakt (UTC): {first_aired}") + logger.debug(f" 2. Parsed UTC time: {utc_time}") + + # Convert to local time for display + local_time = utc_time.astimezone() + logger.debug(f" 3. Your local time will be: {local_time}") + + # Check if we have timezone information from Trakt + tz = getattr(data, "airs", {}).get("timezone", None) + if tz: + logger.debug(f" 4. Show timezone from Trakt: {tz}") + try: + # Convert UTC time to show's timezone for reference + show_time = utc_time.astimezone(ZoneInfo(tz)) + logger.debug(f" 5. Time in show's timezone: {show_time}") + except Exception as e: + logger.error(f"Failed to convert to show timezone: {e}") + + return utc_time + except (ValueError, TypeError) as e: + logger.error(f"Failed to parse airtime: {e}") + return None + + elif item_type == "movie" and (released := getattr(data, "released", None)): + try: + # Movies just have dates, set to midnight UTC + utc_time = datetime.strptime(released, "%Y-%m-%d").replace(hour=0, minute=0, second=0, tzinfo=ZoneInfo("UTC")) + logger.debug(f"Parsed movie release date: {released} -> {utc_time} UTC") + return utc_time + except ValueError: + logger.error(f"Failed to parse release date: {released}") + return None return None \ No newline at end of file diff --git a/src/program/media/item.py b/src/program/media/item.py index 178a64f6..446acef4 100644 --- a/src/program/media/item.py +++ b/src/program/media/item.py @@ -3,6 +3,7 @@ from enum import Enum from pathlib import Path from typing import List, Optional, Self +from zoneinfo import ZoneInfo import sqlalchemy from loguru import logger @@ -174,9 +175,16 @@ def blacklist_stream(self, stream: Stream): @property def is_released(self) -> bool: """Check if an item has been released.""" - if self.aired_at and self.aired_at <= datetime.now(): - return True - return False + if self.aired_at is None: + return False + + # Get current time with timezone info + if self.aired_at.tzinfo is None: + # If aired_at is naive, assume UTC + self.aired_at = self.aired_at.replace(tzinfo=ZoneInfo("UTC")) + + now = datetime.now(tz=self.aired_at.tzinfo) + return self.aired_at <= now @property def state(self): @@ -391,6 +399,82 @@ def _reset(self): def log_string(self): return self.title or self.id + def get_top_title(self) -> str: + """Get the top title of the item.""" + if self.type == "season": + return self.parent.title + elif self.type == "episode": + return self.parent.parent.title + else: + return self.title + + def get_top_imdb_id(self) -> str: + """Get the imdb_id of the item at the top of the hierarchy.""" + if self.type == "season": + return self.parent.imdb_id + elif self.type == "episode": + return self.parent.parent.imdb_id + return self.imdb_id + + def get_aliases(self) -> dict: + """Get the aliases of the item.""" + if self.type == "season": + return self.parent.aliases + elif self.type == "episode": + return self.parent.parent.aliases + else: + return self.aliases + + def __hash__(self): + return hash(self.id) + + def reset(self): + """Reset item attributes.""" + if self.type == "show": + for season in self.seasons: + for episode in season.episodes: + episode._reset() + season._reset() + elif self.type == "season": + for episode in self.episodes: + episode._reset() + self._reset() + if self.title: + self.store_state(States.Indexed) + else: + self.store_state(States.Requested) + + def _reset(self): + """Reset item attributes for rescraping.""" + if self.symlink_path: + if Path(self.symlink_path).exists(): + Path(self.symlink_path).unlink() + self.set("symlink_path", None) + + try: + for subtitle in self.subtitles: + subtitle.remove() + except Exception as e: + logger.warning(f"Failed to remove subtitles for {self.log_string}: {str(e)}") + + self.set("file", None) + self.set("folder", None) + self.set("alternative_folder", None) + + reset_streams(self) + self.active_stream = {} + + self.set("active_stream", {}) + self.set("symlinked", False) + self.set("symlinked_at", None) + self.set("update_folder", None) + self.set("scraped_at", None) + + self.set("symlinked_times", 0) + self.set("scraped_times", 0) + + logger.debug(f"Item {self.log_string} has been reset") + @property def collection(self): return self.parent.collection if self.parent else self.id @@ -581,7 +665,12 @@ def _determine_state(self): @property def is_released(self) -> bool: - return any(episode.is_released for episode in self.episodes) + """Check if an item has been released.""" + if self.aired_at: + # Use current time for comparison to include time component + current_time = datetime.now() + return self.aired_at <= current_time + return False def __repr__(self): return f"Season:{self.number}:{self.state.name}" diff --git a/src/program/program.py b/src/program/program.py index 1f3fab7c..0dca109a 100644 --- a/src/program/program.py +++ b/src/program/program.py @@ -3,13 +3,15 @@ import threading import time from concurrent.futures import ThreadPoolExecutor, as_completed -from datetime import datetime +from datetime import datetime, timedelta from queue import Empty +from zoneinfo import ZoneInfo from apscheduler.schedulers.background import BackgroundScheduler +from kink import di from rich.live import Live -from program.apis import bootstrap_apis +from program.apis import bootstrap_apis, TraktAPI from program.managers.event_manager import EventManager from program.media.item import Episode, MediaItem, Movie, Season, Show from program.media.state import States @@ -60,6 +62,9 @@ def __init__(self): self.services = {} self.enable_trace = settings_manager.settings.tracemalloc self.em = EventManager() + self.scheduled_releases = {} # Track scheduled releases + self.scheduler = BackgroundScheduler() # Initialize the scheduler + self.scheduler.start() # Start the scheduler if self.enable_trace: tracemalloc.start() self.malloc_time = time.monotonic()-50 @@ -178,12 +183,10 @@ def start(self): logger.log("ITEM", f"Total Items: {total_items} (Symlinks: {total_symlinks})") self.executors = [] - self.scheduler = BackgroundScheduler() self._schedule_services() self._schedule_functions() super().start() - self.scheduler.start() logger.success("Riven is running!") self.initialized = True @@ -214,40 +217,162 @@ def _retry_library(self) -> None: def _update_ongoing(self) -> None: """Update state for ongoing and unreleased items.""" - with db.Session() as session: - item_ids = session.execute( - select(MediaItem.id) - .where(MediaItem.type.in_(["movie", "episode"])) - .where(MediaItem.last_state.in_([States.Ongoing, States.Unreleased])) - ).scalars().all() + # Use the user's local time as source of truth + current_time = datetime.fromisoformat("2024-12-13T04:55:45-08:00") + logger.debug(f"Current time: {current_time} ({current_time.tzinfo})") + + logger.log("PROGRAM", "Checking for today's releases...") + trakt_api = di[TraktAPI] + + # Clear old scheduled releases + new_scheduled_releases = {} + for k, v in self.scheduled_releases.items(): + if v > current_time: + new_scheduled_releases[k] = v + self.scheduled_releases = new_scheduled_releases - if not item_ids: - logger.debug("No ongoing or unreleased items to update.") - return + with db.Session() as session: + try: + # Get items that are either ongoing or unreleased + items = session.execute( + select(MediaItem, MediaItem.aired_at) + .where(MediaItem.type.in_(["movie", "episode"])) + .where(MediaItem.last_state.in_([States.Ongoing, States.Unreleased])) + ).unique().all() + + if not items: + logger.debug("No ongoing or unreleased items to update.") + return - logger.debug(f"Updating state for {len(item_ids)} ongoing and unreleased items.") + # Group items by release time today + todays_releases = [] # Already aired + upcoming_today = [] # Will air later today + + for item, aired_at in items: + if not aired_at: + continue + + # Fetch latest airtime from Trakt + try: + if item.imdb_id: + trakt_item = trakt_api.create_item_from_imdb_id(item.imdb_id, type=item.type) + if trakt_item and trakt_item.aired_at: + # Ensure Trakt time has UTC timezone + utc_time = trakt_item.aired_at + if utc_time.tzinfo is None: + utc_time = utc_time.replace(tzinfo=ZoneInfo("UTC")) + + # Store network info if available + if hasattr(trakt_item, "network"): + item.network = trakt_item.network + + # Store the UTC time in the database + item.aired_at = utc_time + session.merge(item) + aired_at = utc_time + logger.debug(f"Updated airtime for {item.log_string} to UTC {utc_time}") + except Exception as e: + logger.error(f"Failed to fetch airtime from Trakt for {item.log_string}: {e}") + + # Ensure aired_at has UTC timezone + if aired_at.tzinfo is None: + aired_at = aired_at.replace(tzinfo=ZoneInfo("UTC")) + + # Convert to local time for comparison + local_aired_at = aired_at.astimezone(current_time.tzinfo) + logger.debug(f"Checking {item.log_string}:") + logger.debug(f" UTC time: {aired_at}") + logger.debug(f" Local time: {local_aired_at} ({local_aired_at.tzinfo})") + + # Calculate delayed release time + delay_minutes = settings_manager.settings.content.trakt.release_delay_minutes + delayed_time = local_aired_at + timedelta(minutes=delay_minutes) + + # Format times for display (e.g., "8:00 PM") + air_time_str = local_aired_at.strftime("%-I:%M %p").lstrip('0') + release_time_str = delayed_time.strftime("%-I:%M %p").lstrip('0') + + # Compare in local time + if (local_aired_at.year == current_time.year and + local_aired_at.month == current_time.month and + local_aired_at.day == current_time.day): + logger.debug(f"Found today's item: {item.log_string}") + if delayed_time <= current_time: + todays_releases.append(item) + logger.debug(f"Added to today's releases: {item.log_string} (aired at {air_time_str}, released at {release_time_str})") + else: + upcoming_today.append((item, delayed_time)) + logger.log("PROGRAM", f"- {item.log_string} will air at {air_time_str} and release at {release_time_str} (after {delay_minutes}min delay)") + + # Commit any airtime updates + session.commit() + + # Log today's schedule + if todays_releases or upcoming_today: + logger.log("PROGRAM", f"Found {len(todays_releases) + len(upcoming_today)} items scheduled for release today:") + for item in todays_releases: + local_time = item.aired_at.astimezone(current_time.tzinfo) + time_str = local_time.strftime("%-I:%M %p").lstrip('0') + logger.log("PROGRAM", f"- {item.log_string} was scheduled to release at {time_str}") + for item, air_time in upcoming_today: + time_str = air_time.strftime("%-I:%M %p").lstrip('0') + logger.log("PROGRAM", f"- {item.log_string} will release at {time_str}") + # Schedule it + self.scheduled_releases[item.id] = air_time + self.scheduler.add_job( + self._update_item_state, + 'date', + run_date=air_time, + args=[item.id], + id=f"release_{item.id}", + replace_existing=True + ) + else: + logger.log("PROGRAM", "No items scheduled for release today. Next check in 24 hours.") - counter = 0 - for item_id in item_ids: - try: - item = session.execute(select(MediaItem).filter_by(id=item_id)).unique().scalar_one_or_none() - if item: + # Update items that have already aired + counter = 0 + for item in todays_releases: + try: previous_state, new_state = item.store_state() if previous_state != new_state: - self.em.add_event(Event(emitted_by="UpdateOngoing", item_id=item_id)) + self.em.add_event(Event(emitted_by="UpdateOngoing", item_id=item.id)) logger.debug(f"Updated state for {item.log_string} ({item.id}) from {previous_state.name} to {new_state.name}") counter += 1 session.merge(item) session.commit() - except Exception as e: - logger.error(f"Failed to update state for item with ID {item_id}: {e}") + except Exception as e: + logger.error(f"Failed to update state for item with ID {item.id}: {e}") + + if counter > 0: + logger.debug(f"Updated state for {counter} items.") + + except Exception as e: + logger.error(f"Error in _update_ongoing: {str(e)}") - logger.debug(f"Found {counter} items with updated state.") + def _update_item_state(self, item_id: str) -> None: + """Update the state of a single item.""" + with db.Session() as session: + try: + item = session.execute(select(MediaItem).filter_by(id=item_id)).unique().scalar_one_or_none() + if item: + previous_state, new_state = item.store_state() + if previous_state != new_state: + self.em.add_event(Event(emitted_by="UpdateOngoing", item_id=item_id)) + logger.log("RELEASE", f" {item.log_string} has been released!") + logger.debug(f"Changed state for {item.log_string} ({item.id}) from {previous_state.name} to {new_state.name}") + session.merge(item) + session.commit() + except Exception as e: + logger.error(f"Failed to update scheduled state for item with ID {item_id}: {e}") + finally: + # Remove from scheduled releases after processing + self.scheduled_releases.pop(item_id, None) def _schedule_functions(self) -> None: """Schedule each service based on its update interval.""" scheduled_functions = { - self._update_ongoing: {"interval": 60 * 60 * 24}, + self._update_ongoing: {"interval": 60 * 60 * 24}, # Daily check self._retry_library: {"interval": 60 * 60 * 24}, log_cleaner: {"interval": 60 * 60}, vacuum_and_analyze_index_maintenance: {"interval": 60 * 60 * 24}, diff --git a/src/program/settings/models.py b/src/program/settings/models.py index ed18d06a..6b0800c7 100644 --- a/src/program/settings/models.py +++ b/src/program/settings/models.py @@ -173,6 +173,7 @@ class TraktModel(Updatable): most_watched_period: str = "weekly" most_watched_count: int = 10 update_interval: int = 86400 + release_delay_minutes: int = Field(default=60, description="Number of minutes to delay the state change after release") oauth: TraktOauthModel = TraktOauthModel() From 01f2b38f9224d53ce873de4f26b3a246336e02ea Mon Sep 17 00:00:00 2001 From: Arrrrr Date: Sat, 14 Dec 2024 21:01:27 -0800 Subject: [PATCH 2/9] feat: active ongoing update --- src/program/apis/__init__.py | 11 +- src/program/apis/trakt_api.py | 3 +- src/program/apis/tvmaze_api.py | 184 +++++++++++++++++++++++++++++ src/program/program.py | 208 ++++++++++++++++++++------------- 4 files changed, 320 insertions(+), 86 deletions(-) create mode 100644 src/program/apis/tvmaze_api.py diff --git a/src/program/apis/__init__.py b/src/program/apis/__init__.py index 5fd463b6..ef0f8386 100644 --- a/src/program/apis/__init__.py +++ b/src/program/apis/__init__.py @@ -7,7 +7,7 @@ from .overseerr_api import OverseerrAPI, OverseerrAPIError from .plex_api import PlexAPI, PlexAPIError from .trakt_api import TraktAPI, TraktAPIError - +from program.apis.tvmaze_api import TVMazeAPI def bootstrap_apis(): __setup_trakt() @@ -15,10 +15,11 @@ def bootstrap_apis(): __setup_mdblist() __setup_overseerr() __setup_listrr() + __setup_tvmaze() def __setup_trakt(): - traktApi = TraktAPI(settings_manager.settings.content.trakt) - di[TraktAPI] = traktApi + """Setup Trakt API.""" + di[TraktAPI] = TraktAPI(settings_manager.settings.content.trakt) def __setup_plex(): if not settings_manager.settings.updaters.plex.enabled: @@ -43,3 +44,7 @@ def __setup_listrr(): return listrrApi = ListrrAPI(settings_manager.settings.content.listrr.api_key) di[ListrrAPI] = listrrApi + +def __setup_tvmaze(): + """Setup TVMaze API.""" + di[TVMazeAPI] = TVMazeAPI() diff --git a/src/program/apis/trakt_api.py b/src/program/apis/trakt_api.py index d58d351c..fbffdbaa 100644 --- a/src/program/apis/trakt_api.py +++ b/src/program/apis/trakt_api.py @@ -381,7 +381,8 @@ def _get_formatted_date(self, data, item_type: str) -> Optional[datetime]: logger.debug(f" 3. Your local time will be: {local_time}") # Check if we have timezone information from Trakt - tz = getattr(data, "airs", {}).get("timezone", None) + airs = getattr(data, "airs", None) + tz = getattr(airs, "timezone", None) if airs else None if tz: logger.debug(f" 4. Show timezone from Trakt: {tz}") try: diff --git a/src/program/apis/tvmaze_api.py b/src/program/apis/tvmaze_api.py new file mode 100644 index 00000000..fc7594ad --- /dev/null +++ b/src/program/apis/tvmaze_api.py @@ -0,0 +1,184 @@ +"""TVMaze API client for fetching show information.""" +from datetime import datetime +from typing import Optional +from zoneinfo import ZoneInfo + +from loguru import logger +from program.utils.request import ( + BaseRequestHandler, + HttpMethod, + ResponseType, + create_service_session, + get_cache_params, + get_rate_limit_params, +) +from requests.exceptions import HTTPError + +class TVMazeAPI: + """Handles TVMaze API communication.""" + + BASE_URL = "https://api.tvmaze.com" + + def __init__(self): + rate_limit_params = get_rate_limit_params(max_calls=20, period=10) # TVMaze allows 20 requests per 10 seconds + tvmaze_cache = get_cache_params("tvmaze", 86400) # Cache for 24 hours + session = create_service_session(rate_limit_params=rate_limit_params, use_cache=True, cache_params=tvmaze_cache) + self.request_handler = BaseRequestHandler(session, response_type=ResponseType.SIMPLE_NAMESPACE) + + def get_show_by_imdb(self, imdb_id: str, show_name: Optional[str] = None, season_number: Optional[int] = None, episode_number: Optional[int] = None) -> Optional[datetime]: + """Get show information from TVMaze using IMDB ID. + + Args: + imdb_id: IMDB ID of the show or episode (with or without 'tt' prefix) + show_name: Optional show name to use for search if IMDB lookup fails + season_number: Optional season number to find specific episode + episode_number: Optional episode number to find specific episode + + Returns: + Next episode airtime in local time if available, None otherwise + """ + try: + # Add 'tt' prefix if not present + if not imdb_id.startswith('tt'): + imdb_id = f'tt{imdb_id}' + + show = None + + # Try singlesearch by show name first if provided, since episode IDs won't work with lookup + if show_name: + logger.debug(f"Trying singlesearch by name: {show_name}") + try: + response = self.request_handler._request(HttpMethod.GET, f"{self.BASE_URL}/singlesearch/shows", params={'q': show_name}) + show = response.data if response.is_ok else None + except HTTPError as e: + if e.response.status_code == 404: + show = None + else: + raise + + # If show name search fails or wasn't provided, try direct lookup + # This will only work for show-level IMDB IDs, not episode IDs + if not show: + try: + response = self.request_handler._request(HttpMethod.GET, f"{self.BASE_URL}/lookup/shows", params={'imdb': imdb_id}) + show = response.data if response.is_ok else None + except HTTPError as e: + if e.response.status_code == 404: + show = None + else: + raise + + # If that fails too, try regular search + if not show and show_name: + logger.debug(f"Singlesearch failed for {show_name}, trying regular search") + try: + response = self.request_handler._request(HttpMethod.GET, f"{self.BASE_URL}/search/shows", params={'q': show_name}) + if response.is_ok and response.data: + # Take the first result with highest score + show = response.data[0].show if response.data else None + except HTTPError as e: + if e.response.status_code == 404: + show = None + else: + raise + + if not show: + logger.debug(f"Could not find show for {imdb_id} / {show_name}") + return None + + # Get next episode + try: + response = self.request_handler._request(HttpMethod.GET, f"{self.BASE_URL}/shows/{show.id}/episodes") + episodes = response.data if response.is_ok else None + except HTTPError as e: + if e.response.status_code == 404: + episodes = None + else: + raise + + if not episodes: + return None + + # Find the next episode that hasn't aired yet + current_time = datetime.fromisoformat("2024-12-14T20:04:26-08:00") + next_episode = None + target_episode_time = None + + for episode in episodes: + try: + if not episode.airstamp: + continue + + # First try to get air time using network timezone + air_time = None + if (hasattr(show, 'network') and show.network and + hasattr(show.network, 'country') and show.network.country and + hasattr(show.network.country, 'timezone') and show.network.country.timezone and + episode.airdate and episode.airtime): + + # Combine airdate and airtime in network timezone + network_tz = ZoneInfo(show.network.country.timezone) + air_datetime = f"{episode.airdate}T{episode.airtime}" + try: + # Parse the time in network timezone + air_time = datetime.fromisoformat(air_datetime).replace(tzinfo=network_tz) + # Only log network time for the target episode + if (season_number is not None and episode_number is not None and + hasattr(episode, 'number') and hasattr(episode, 'season') and + episode.season == season_number and episode.number == episode_number): + logger.debug(f"Network airs show at {air_time} ({show.network.country.timezone})") + except Exception as e: + logger.error(f"Failed to parse network air time: {e}") + air_time = None + + # Fallback to airstamp if needed + if not air_time and episode.airstamp: + try: + air_time = datetime.fromisoformat(episode.airstamp.replace('Z', '+00:00')) + if (season_number is not None and episode_number is not None and + hasattr(episode, 'number') and hasattr(episode, 'season') and + episode.season == season_number and episode.number == episode_number): + logger.debug(f"Using UTC airstamp: {air_time}") + except Exception as e: + logger.error(f"Failed to parse airstamp: {e}") + continue + + if not air_time: + continue + + # Convert to local time + air_time = air_time.astimezone(current_time.tzinfo) + + # Check if this is the specific episode we want + if season_number is not None and episode_number is not None: + if hasattr(episode, 'number') and hasattr(episode, 'season'): + if episode.season == season_number and episode.number == episode_number: + # Found our target episode + if hasattr(episode, 'name'): + logger.debug(f"Found S{season_number}E{episode_number} '{episode.name}' airing at {air_time}") + else: + logger.debug(f"Found S{season_number}E{episode_number} airing at {air_time}") + target_episode_time = air_time + break # No need to continue looking + + # If we're looking for next episode and this one is in the future + elif air_time > current_time: + # If we haven't found any future episode yet, or this one airs sooner + if not next_episode or air_time < next_episode: + next_episode = air_time + + except Exception as e: + logger.error(f"Failed to process episode {getattr(episode, 'number', '?')}: {e}") + continue + + # Return target episode time if we found one, otherwise return next episode + if target_episode_time is not None: + return target_episode_time + + if next_episode: + logger.debug(f"Next episode airs at {next_episode}") + return next_episode + + except Exception as e: + logger.error(f"Error fetching TVMaze data for {imdb_id}: {e}") + return None diff --git a/src/program/program.py b/src/program/program.py index 0dca109a..d0b5ec8f 100644 --- a/src/program/program.py +++ b/src/program/program.py @@ -11,7 +11,7 @@ from kink import di from rich.live import Live -from program.apis import bootstrap_apis, TraktAPI +from program.apis import bootstrap_apis, TraktAPI, TVMazeAPI from program.managers.event_manager import EventManager from program.media.item import Episode, MediaItem, Movie, Season, Show from program.media.state import States @@ -218,8 +218,8 @@ def _retry_library(self) -> None: def _update_ongoing(self) -> None: """Update state for ongoing and unreleased items.""" # Use the user's local time as source of truth - current_time = datetime.fromisoformat("2024-12-13T04:55:45-08:00") - logger.debug(f"Current time: {current_time} ({current_time.tzinfo})") + current_time = datetime.fromisoformat("2024-12-14T02:59:41-08:00") + logger.debug(f"Current time: {current_time}") logger.log("PROGRAM", "Checking for today's releases...") trakt_api = di[TraktAPI] @@ -240,99 +240,118 @@ def _update_ongoing(self) -> None: .where(MediaItem.last_state.in_([States.Ongoing, States.Unreleased])) ).unique().all() - if not items: - logger.debug("No ongoing or unreleased items to update.") - return - - # Group items by release time today - todays_releases = [] # Already aired - upcoming_today = [] # Will air later today - for item, aired_at in items: if not aired_at: continue - # Fetch latest airtime from Trakt + # Fetch latest airtime from both Trakt and TVMaze try: if item.imdb_id: - trakt_item = trakt_api.create_item_from_imdb_id(item.imdb_id, type=item.type) - if trakt_item and trakt_item.aired_at: - # Ensure Trakt time has UTC timezone - utc_time = trakt_item.aired_at - if utc_time.tzinfo is None: - utc_time = utc_time.replace(tzinfo=ZoneInfo("UTC")) - - # Store network info if available - if hasattr(trakt_item, "network"): - item.network = trakt_item.network - - # Store the UTC time in the database - item.aired_at = utc_time - session.merge(item) - aired_at = utc_time - logger.debug(f"Updated airtime for {item.log_string} to UTC {utc_time}") + trakt_time = None + tvmaze_time = None + air_time = None # Initialize air_time here + + # Get Trakt time + try: + trakt_item = trakt_api.create_item_from_imdb_id(item.imdb_id, type=item.type) + if trakt_item and trakt_item.aired_at: + # Convert Trakt time to local time + trakt_time = trakt_item.aired_at + if trakt_time.tzinfo is None: + trakt_time = trakt_time.replace(tzinfo=ZoneInfo("UTC")) + trakt_time = trakt_time.astimezone(current_time.tzinfo) + logger.debug(f"Trakt airtime for {item.log_string}: {trakt_time}") + + # Store network info if available + if hasattr(trakt_item, "network"): + item.network = trakt_item.network + except Exception as e: + logger.error(f"Failed to fetch airtime from Trakt for {item.log_string}: {e}") + + # Get TVMaze time (already in local time) + try: + # Skip TVMaze lookup for movies + if item.type == "movie": + continue + + tvmaze_api = di[TVMazeAPI] + # Get show title - for episodes, use the parent show's title + show_name = item.get_top_title() + if not show_name: + continue + + # Skip if it's just an episode number + if show_name.lower().startswith("episode "): + continue + + # For episodes, pass the season and episode numbers + season_number = None + episode_number = None + if item.type == "episode": + if isinstance(item, Episode): + season_number = item.parent.number if item.parent else None + episode_number = item.number + + tvmaze_time = tvmaze_api.get_show_by_imdb( + item.imdb_id, + show_name=show_name, + season_number=season_number, + episode_number=episode_number + ) + if tvmaze_time: + logger.debug(f"TVMaze airtime for {item.log_string}: {tvmaze_time}") + except Exception as e: + logger.error(f"Failed to fetch airtime from TVMaze for {item.log_string}: {e}") + + # Use the earliest available time + if trakt_time and tvmaze_time: + air_time = min(trakt_time, tvmaze_time) + logger.debug(f"Using earliest time between Trakt ({trakt_time}) and TVMaze ({tvmaze_time})") + else: + air_time = trakt_time or tvmaze_time + if not air_time: + logger.debug(f"No airtime available from either Trakt or TVMaze for {item.log_string}") + continue # Skip this item + + if not air_time: # Add explicit check + logger.debug(f"No valid air time found for {item.log_string}") + continue + + # Store the local time in the database + item.aired_at = air_time + session.merge(item) + logger.debug(f"Updated airtime for {item.log_string} to {air_time}") + + # Calculate delayed release time + delay_minutes = settings_manager.settings.content.trakt.release_delay_minutes + delayed_time = air_time + timedelta(minutes=delay_minutes) + + # Format times for display (e.g., "8:00 PM") + air_time_str = air_time.strftime("%-I:%M %p").lstrip('0') + release_time_str = delayed_time.strftime("%-I:%M %p").lstrip('0') + + # Compare in local time + if (air_time.year == current_time.year and + air_time.month == current_time.month and + air_time.day == current_time.day): + logger.debug(f"Found today's item: {item.log_string}") + if delayed_time <= current_time: + logger.log("PROGRAM", f"- {item.log_string} was scheduled to release at {air_time_str}") + else: + logger.log("PROGRAM", f"- {item.log_string} will release at {release_time_str} (after {delay_minutes}min delay)") except Exception as e: - logger.error(f"Failed to fetch airtime from Trakt for {item.log_string}: {e}") - - # Ensure aired_at has UTC timezone - if aired_at.tzinfo is None: - aired_at = aired_at.replace(tzinfo=ZoneInfo("UTC")) - - # Convert to local time for comparison - local_aired_at = aired_at.astimezone(current_time.tzinfo) - logger.debug(f"Checking {item.log_string}:") - logger.debug(f" UTC time: {aired_at}") - logger.debug(f" Local time: {local_aired_at} ({local_aired_at.tzinfo})") - - # Calculate delayed release time - delay_minutes = settings_manager.settings.content.trakt.release_delay_minutes - delayed_time = local_aired_at + timedelta(minutes=delay_minutes) - - # Format times for display (e.g., "8:00 PM") - air_time_str = local_aired_at.strftime("%-I:%M %p").lstrip('0') - release_time_str = delayed_time.strftime("%-I:%M %p").lstrip('0') - - # Compare in local time - if (local_aired_at.year == current_time.year and - local_aired_at.month == current_time.month and - local_aired_at.day == current_time.day): - logger.debug(f"Found today's item: {item.log_string}") - if delayed_time <= current_time: - todays_releases.append(item) - logger.debug(f"Added to today's releases: {item.log_string} (aired at {air_time_str}, released at {release_time_str})") - else: - upcoming_today.append((item, delayed_time)) - logger.log("PROGRAM", f"- {item.log_string} will air at {air_time_str} and release at {release_time_str} (after {delay_minutes}min delay)") + logger.error(f"Failed to fetch airtime for {item.log_string}: {e}") + continue # Commit any airtime updates session.commit() # Log today's schedule - if todays_releases or upcoming_today: - logger.log("PROGRAM", f"Found {len(todays_releases) + len(upcoming_today)} items scheduled for release today:") - for item in todays_releases: - local_time = item.aired_at.astimezone(current_time.tzinfo) - time_str = local_time.strftime("%-I:%M %p").lstrip('0') - logger.log("PROGRAM", f"- {item.log_string} was scheduled to release at {time_str}") - for item, air_time in upcoming_today: - time_str = air_time.strftime("%-I:%M %p").lstrip('0') - logger.log("PROGRAM", f"- {item.log_string} will release at {time_str}") - # Schedule it - self.scheduled_releases[item.id] = air_time - self.scheduler.add_job( - self._update_item_state, - 'date', - run_date=air_time, - args=[item.id], - id=f"release_{item.id}", - replace_existing=True - ) - else: - logger.log("PROGRAM", "No items scheduled for release today. Next check in 24 hours.") + logger.log("PROGRAM", "No items scheduled for release today. Next check in 24 hours.") # Update items that have already aired counter = 0 - for item in todays_releases: + for item, aired_at in items: try: previous_state, new_state = item.store_state() if previous_state != new_state: @@ -371,8 +390,33 @@ def _update_item_state(self, item_id: str) -> None: def _schedule_functions(self) -> None: """Schedule each service based on its update interval.""" + # Schedule the ongoing state update function + self.scheduler.add_job( + self._update_ongoing, + 'interval', + hours=24, + id="update_ongoing", + replace_existing=True + ) + + # Schedule midnight update + current_time = datetime.now() + midnight = (current_time + timedelta(days=1)).replace(hour=0, minute=0, second=0, microsecond=0) + logger.debug(f"Scheduling next midnight update at: {midnight}") + + self.scheduler.add_job( + self._update_ongoing, + 'cron', + hour=0, + minute=0, + id="midnight_update", + replace_existing=True + ) + + # Run update_ongoing immediately on startup + self._update_ongoing() + scheduled_functions = { - self._update_ongoing: {"interval": 60 * 60 * 24}, # Daily check self._retry_library: {"interval": 60 * 60 * 24}, log_cleaner: {"interval": 60 * 60}, vacuum_and_analyze_index_maintenance: {"interval": 60 * 60 * 24}, From 53f9898127d4f540ebb6c960e31ce7b03332371c Mon Sep 17 00:00:00 2001 From: Arrrrr Date: Mon, 16 Dec 2024 00:39:26 -0800 Subject: [PATCH 3/9] feat: Active ongoing episode releasing on the time of airing --- src/program/apis/tvmaze_api.py | 2 +- src/program/program.py | 320 ++++++++++++++++----------------- 2 files changed, 161 insertions(+), 161 deletions(-) diff --git a/src/program/apis/tvmaze_api.py b/src/program/apis/tvmaze_api.py index fc7594ad..f040d33c 100644 --- a/src/program/apis/tvmaze_api.py +++ b/src/program/apis/tvmaze_api.py @@ -100,7 +100,7 @@ def get_show_by_imdb(self, imdb_id: str, show_name: Optional[str] = None, season return None # Find the next episode that hasn't aired yet - current_time = datetime.fromisoformat("2024-12-14T20:04:26-08:00") + current_time = datetime.now() next_episode = None target_episode_time = None diff --git a/src/program/program.py b/src/program/program.py index d0b5ec8f..a350fbd5 100644 --- a/src/program/program.py +++ b/src/program/program.py @@ -217,157 +217,164 @@ def _retry_library(self) -> None: def _update_ongoing(self) -> None: """Update state for ongoing and unreleased items.""" - # Use the user's local time as source of truth - current_time = datetime.fromisoformat("2024-12-14T02:59:41-08:00") - logger.debug(f"Current time: {current_time}") - - logger.log("PROGRAM", "Checking for today's releases...") - trakt_api = di[TraktAPI] - - # Clear old scheduled releases - new_scheduled_releases = {} - for k, v in self.scheduled_releases.items(): - if v > current_time: - new_scheduled_releases[k] = v - self.scheduled_releases = new_scheduled_releases + try: + # Get current time with timezone info + current_time = datetime.now().astimezone() + logger.debug(f"Current time: {current_time.strftime('%I:%M %p').lstrip('0')}") + + logger.log("PROGRAM", "Checking for today's releases...") + trakt_api = di[TraktAPI] + + # Clear old scheduled releases + new_scheduled_releases = {} + for k, v in self.scheduled_releases.items(): + if v > current_time: + new_scheduled_releases[k] = v + self.scheduled_releases = new_scheduled_releases + + items_found_today = False # Track if we found any items for today + todays_releases = [] # Track items and their release times - with db.Session() as session: - try: - # Get items that are either ongoing or unreleased - items = session.execute( - select(MediaItem, MediaItem.aired_at) - .where(MediaItem.type.in_(["movie", "episode"])) - .where(MediaItem.last_state.in_([States.Ongoing, States.Unreleased])) - ).unique().all() - - for item, aired_at in items: - if not aired_at: - continue - - # Fetch latest airtime from both Trakt and TVMaze - try: - if item.imdb_id: - trakt_time = None - tvmaze_time = None - air_time = None # Initialize air_time here - - # Get Trakt time - try: - trakt_item = trakt_api.create_item_from_imdb_id(item.imdb_id, type=item.type) - if trakt_item and trakt_item.aired_at: - # Convert Trakt time to local time - trakt_time = trakt_item.aired_at - if trakt_time.tzinfo is None: - trakt_time = trakt_time.replace(tzinfo=ZoneInfo("UTC")) - trakt_time = trakt_time.astimezone(current_time.tzinfo) - logger.debug(f"Trakt airtime for {item.log_string}: {trakt_time}") - - # Store network info if available - if hasattr(trakt_item, "network"): - item.network = trakt_item.network - except Exception as e: - logger.error(f"Failed to fetch airtime from Trakt for {item.log_string}: {e}") - - # Get TVMaze time (already in local time) - try: - # Skip TVMaze lookup for movies - if item.type == "movie": + with db.Session() as session: + try: + # Get items that are either ongoing or unreleased + items = session.execute( + select(MediaItem, MediaItem.aired_at) + .where(MediaItem.type.in_(["movie", "episode"])) + .where(MediaItem.last_state.in_([States.Ongoing, States.Unreleased])) + ).unique().all() + + for item, aired_at in items: + if not aired_at: + continue + + try: + if item.imdb_id: + trakt_time = None + tvmaze_time = None + air_time = None # Initialize air_time here + + # Get Trakt time + try: + trakt_item = trakt_api.create_item_from_imdb_id(item.imdb_id, type=item.type) + if trakt_item and trakt_item.aired_at: + # Convert Trakt time to local time + trakt_time = trakt_item.aired_at + if trakt_time.tzinfo is None: + trakt_time = trakt_time.replace(tzinfo=ZoneInfo("UTC")) + trakt_time = trakt_time.astimezone(current_time.tzinfo) + logger.debug(f"Trakt airtime for {item.log_string}: {trakt_time}") + + # Store network info if available + if hasattr(trakt_item, "network"): + item.network = trakt_item.network + except Exception as e: + logger.error(f"Failed to fetch airtime from Trakt for {item.log_string}: {e}") + + # Get TVMaze time (already in local time) + try: + # Skip TVMaze lookup for movies + if item.type == "movie": + continue + + tvmaze_api = di[TVMazeAPI] + # Get show title - for episodes, use the parent show's title + show_name = item.get_top_title() + if not show_name: + continue + + # Skip if it's just an episode number + if show_name.lower().startswith("episode "): + continue + + # For episodes, pass the season and episode numbers + season_number = None + episode_number = None + if item.type == "episode": + if isinstance(item, Episode): + season_number = item.parent.number if item.parent else None + episode_number = item.number + + tvmaze_time = tvmaze_api.get_show_by_imdb( + item.imdb_id, + show_name=show_name, + season_number=season_number, + episode_number=episode_number + ) + if tvmaze_time: + logger.debug(f"TVMaze airtime for {item.log_string}: {tvmaze_time}") + except Exception as e: + logger.error(f"Failed to fetch airtime from TVMaze for {item.log_string}: {e}") + + # Use the earliest available time + if trakt_time and tvmaze_time: + air_time = min(trakt_time, tvmaze_time) + logger.debug(f"Using earliest time between Trakt ({trakt_time}) and TVMaze ({tvmaze_time})") + else: + air_time = trakt_time or tvmaze_time + if not air_time: + logger.debug(f"No airtime available from either Trakt or TVMaze for {item.log_string}") + continue # Skip this item + + if not air_time: # Add explicit check + logger.debug(f"No valid air time found for {item.log_string}") continue - - tvmaze_api = di[TVMazeAPI] - # Get show title - for episodes, use the parent show's title - show_name = item.get_top_title() - if not show_name: + + # Store the local time in the database + item.aired_at = air_time + session.merge(item) + logger.debug(f"Updated airtime for {item.log_string} to {air_time}") + + # Calculate delayed release time + delay_minutes = settings_manager.settings.content.trakt.release_delay_minutes + delayed_time = air_time + timedelta(minutes=delay_minutes) + + # Format times for display (e.g., "8:00 PM") + air_time_str = air_time.strftime("%I:%M %p").lstrip('0') + release_time_str = delayed_time.strftime("%I:%M %p").lstrip('0') + + # If it aired in the past (including delay), release it immediately + if delayed_time <= current_time: + logger.log("PROGRAM", f"- {item.log_string} was scheduled to release at {air_time_str}") + # Trigger immediate state update + previous_state, new_state = item.store_state() + if previous_state != new_state: + self.em.add_event(Event(emitted_by="UpdateOngoing", item_id=item.id)) + logger.debug(f"Updated state for {item.log_string} ({item.id}) from {previous_state.name} to {new_state.name}") continue + + # For future items, only schedule if they air today + if (air_time.year == current_time.year and + air_time.month == current_time.month and + air_time.day == current_time.day): + items_found_today = True + todays_releases.append((item.log_string, release_time_str)) + logger.debug(f"Found today's item: {item.log_string}") + logger.log("PROGRAM", f"- {item.log_string} will release at {release_time_str} (included {delay_minutes}min delay)") + # Add to scheduled releases if not already there + if item.id not in self.scheduled_releases: + self.scheduled_releases[item.id] = delayed_time + except Exception as e: + logger.error(f"Failed to fetch airtime for {item.log_string}: {e}") + continue + + # Commit any airtime updates + session.commit() - # Skip if it's just an episode number - if show_name.lower().startswith("episode "): - continue + # Log summary of today's releases + if todays_releases: + logger.log("PROGRAM", "\nToday's releases:") + for item_name, release_time in sorted(todays_releases, key=lambda x: x[1]): + logger.log("PROGRAM", f" β€’ {item_name} at {release_time}") + else: + logger.log("PROGRAM", "\nNo releases scheduled for today") - # For episodes, pass the season and episode numbers - season_number = None - episode_number = None - if item.type == "episode": - if isinstance(item, Episode): - season_number = item.parent.number if item.parent else None - episode_number = item.number - - tvmaze_time = tvmaze_api.get_show_by_imdb( - item.imdb_id, - show_name=show_name, - season_number=season_number, - episode_number=episode_number - ) - if tvmaze_time: - logger.debug(f"TVMaze airtime for {item.log_string}: {tvmaze_time}") - except Exception as e: - logger.error(f"Failed to fetch airtime from TVMaze for {item.log_string}: {e}") - - # Use the earliest available time - if trakt_time and tvmaze_time: - air_time = min(trakt_time, tvmaze_time) - logger.debug(f"Using earliest time between Trakt ({trakt_time}) and TVMaze ({tvmaze_time})") - else: - air_time = trakt_time or tvmaze_time - if not air_time: - logger.debug(f"No airtime available from either Trakt or TVMaze for {item.log_string}") - continue # Skip this item - - if not air_time: # Add explicit check - logger.debug(f"No valid air time found for {item.log_string}") - continue - - # Store the local time in the database - item.aired_at = air_time - session.merge(item) - logger.debug(f"Updated airtime for {item.log_string} to {air_time}") - - # Calculate delayed release time - delay_minutes = settings_manager.settings.content.trakt.release_delay_minutes - delayed_time = air_time + timedelta(minutes=delay_minutes) - - # Format times for display (e.g., "8:00 PM") - air_time_str = air_time.strftime("%-I:%M %p").lstrip('0') - release_time_str = delayed_time.strftime("%-I:%M %p").lstrip('0') - - # Compare in local time - if (air_time.year == current_time.year and - air_time.month == current_time.month and - air_time.day == current_time.day): - logger.debug(f"Found today's item: {item.log_string}") - if delayed_time <= current_time: - logger.log("PROGRAM", f"- {item.log_string} was scheduled to release at {air_time_str}") - else: - logger.log("PROGRAM", f"- {item.log_string} will release at {release_time_str} (after {delay_minutes}min delay)") - except Exception as e: - logger.error(f"Failed to fetch airtime for {item.log_string}: {e}") - continue - - # Commit any airtime updates - session.commit() - - # Log today's schedule - logger.log("PROGRAM", "No items scheduled for release today. Next check in 24 hours.") - - # Update items that have already aired - counter = 0 - for item, aired_at in items: - try: - previous_state, new_state = item.store_state() - if previous_state != new_state: - self.em.add_event(Event(emitted_by="UpdateOngoing", item_id=item.id)) - logger.debug(f"Updated state for {item.log_string} ({item.id}) from {previous_state.name} to {new_state.name}") - counter += 1 - session.merge(item) - session.commit() - except Exception as e: - logger.error(f"Failed to update state for item with ID {item.id}: {e}") - - if counter > 0: - logger.debug(f"Updated state for {counter} items.") - - except Exception as e: - logger.error(f"Error in _update_ongoing: {str(e)}") + except Exception as e: + session.rollback() + logger.error(f"Database error in _update_ongoing: {e}") + raise + except Exception as e: + logger.error(f"Error in _update_ongoing: {e}") def _update_item_state(self, item_id: str) -> None: """Update the state of a single item.""" @@ -383,6 +390,7 @@ def _update_item_state(self, item_id: str) -> None: session.merge(item) session.commit() except Exception as e: + session.rollback() logger.error(f"Failed to update scheduled state for item with ID {item_id}: {e}") finally: # Remove from scheduled releases after processing @@ -390,27 +398,19 @@ def _update_item_state(self, item_id: str) -> None: def _schedule_functions(self) -> None: """Schedule each service based on its update interval.""" - # Schedule the ongoing state update function - self.scheduler.add_job( - self._update_ongoing, - 'interval', - hours=24, - id="update_ongoing", - replace_existing=True - ) - - # Schedule midnight update - current_time = datetime.now() + # Schedule the ongoing state update function to run at midnight + current_time = datetime.now().astimezone() midnight = (current_time + timedelta(days=1)).replace(hour=0, minute=0, second=0, microsecond=0) - logger.debug(f"Scheduling next midnight update at: {midnight}") + logger.debug(f"Scheduling next midnight update at: {midnight.strftime('%Y-%m-%d %H:%M:%S %z')}") self.scheduler.add_job( self._update_ongoing, 'cron', hour=0, minute=0, - id="midnight_update", - replace_existing=True + id="update_ongoing", + replace_existing=True, + misfire_grace_time=3600 # Allow up to 1 hour delay if system is busy ) # Run update_ongoing immediately on startup From 6d690cc49f3b1916746daf1212f672b6ed1859e3 Mon Sep 17 00:00:00 2001 From: Arrrrr Date: Mon, 16 Dec 2024 02:21:03 -0800 Subject: [PATCH 4/9] feat: Active ongoing episode releasing on the time of airing --- src/program/program.py | 43 ++++++++++++++++++++++++++++--------- src/routers/secure/items.py | 23 ++++++++++++++++---- 2 files changed, 52 insertions(+), 14 deletions(-) diff --git a/src/program/program.py b/src/program/program.py index a350fbd5..325469af 100644 --- a/src/program/program.py +++ b/src/program/program.py @@ -380,21 +380,44 @@ def _update_item_state(self, item_id: str) -> None: """Update the state of a single item.""" with db.Session() as session: try: - item = session.execute(select(MediaItem).filter_by(id=item_id)).unique().scalar_one_or_none() - if item: - previous_state, new_state = item.store_state() - if previous_state != new_state: - self.em.add_event(Event(emitted_by="UpdateOngoing", item_id=item_id)) - logger.log("RELEASE", f" {item.log_string} has been released!") - logger.debug(f"Changed state for {item.log_string} ({item.id}) from {previous_state.name} to {new_state.name}") - session.merge(item) - session.commit() + item = session.execute( + select(MediaItem).where(MediaItem.id == item_id) + ).scalar_one() + + if not item: + logger.error(f"Item {item_id} not found") + return + + # Check if this item should be scheduled for release today + current_time = datetime.now().astimezone() + if item.aired_at: + delay_minutes = settings_manager.settings.content.trakt.release_delay_minutes + delayed_time = item.aired_at + timedelta(minutes=delay_minutes) + + # If it's a future release for today, schedule it + if (delayed_time > current_time and + item.aired_at.year == current_time.year and + item.aired_at.month == current_time.month and + item.aired_at.day == current_time.day): + release_time_str = delayed_time.strftime("%I:%M %p").lstrip('0') + logger.log("PROGRAM", f"Scheduling {item.log_string} for release at {release_time_str}") + self.scheduled_releases[item.id] = delayed_time + # If it should have been released already, release it now + elif delayed_time <= current_time: + previous_state, new_state = item.store_state() + if previous_state != new_state: + self.em.add_event(Event(emitted_by="UpdateOngoing", item_id=item.id)) + logger.debug(f"Updated state for {item.log_string} ({item.id}) from {previous_state.name} to {new_state.name}") + + session.merge(item) + session.commit() except Exception as e: session.rollback() logger.error(f"Failed to update scheduled state for item with ID {item_id}: {e}") finally: # Remove from scheduled releases after processing - self.scheduled_releases.pop(item_id, None) + if item_id in self.scheduled_releases and self.scheduled_releases[item_id] <= current_time: + del self.scheduled_releases[item_id] def _schedule_functions(self) -> None: """Schedule each service based on its update interval.""" diff --git a/src/routers/secure/items.py b/src/routers/secure/items.py index b5a49808..038dc8b7 100644 --- a/src/routers/secure/items.py +++ b/src/routers/secure/items.py @@ -213,10 +213,11 @@ async def add_items(request: Request, imdb_ids: str = None) -> MessageResponse: return {"message": f"Added {len(valid_ids)} item(s) to the queue"} + @router.get( "/{id}", - summary="Retrieve Media Item", - description="Fetch a single media item by ID", + summary="Retrieve Media Item By ID", + description="Fetch a media item by its ID", operation_id="get_item", ) async def get_item(_: Request, id: str, use_tmdb_id: Optional[bool] = False) -> dict: @@ -227,10 +228,24 @@ async def get_item(_: Request, id: str, use_tmdb_id: Optional[bool] = False) -> query = query.where(MediaItem.tmdb_id == id) else: query = query.where(MediaItem.id == id) - item = session.execute(query).unique().scalar_one() + + # Get all matching items and use the first one + items = session.execute(query).unique().scalars().all() + if not items: + raise HTTPException(status_code=404, detail="Item not found") + + # Use the first item if there are multiple + item = items[0] + if len(items) > 1: + # Log details about each duplicate + logger.warning(f"Multiple items found for ID {id}:") + for i, dupe in enumerate(items): + logger.warning(f" {i+1}. {dupe.type} - {dupe.log_string} (ID: {dupe.id})") + logger.warning(f"Using first item: {item.type} - {item.log_string}") + + return item.to_extended_dict(with_streams=False) except NoResultFound: raise HTTPException(status_code=404, detail="Item not found") - return item.to_extended_dict(with_streams=False) @router.get( From e9c0103cf207ffc4dc070d87e925c2b90b351335 Mon Sep 17 00:00:00 2001 From: Arrrrr Date: Tue, 17 Dec 2024 00:48:15 -0800 Subject: [PATCH 5/9] Update program.py --- src/program/program.py | 34 +++++++++++++++++++++++++++++++++- 1 file changed, 33 insertions(+), 1 deletion(-) diff --git a/src/program/program.py b/src/program/program.py index 325469af..8da6fb0a 100644 --- a/src/program/program.py +++ b/src/program/program.py @@ -402,12 +402,22 @@ def _update_item_state(self, item_id: str) -> None: release_time_str = delayed_time.strftime("%I:%M %p").lstrip('0') logger.log("PROGRAM", f"Scheduling {item.log_string} for release at {release_time_str}") self.scheduled_releases[item.id] = delayed_time + + # Schedule a one-time job at the release time + self.scheduler.add_job( + self._process_release, + 'date', + run_date=delayed_time, + args=[item.id, item.log_string], + id=f"release_{item.id}", + replace_existing=True + ) # If it should have been released already, release it now elif delayed_time <= current_time: previous_state, new_state = item.store_state() if previous_state != new_state: self.em.add_event(Event(emitted_by="UpdateOngoing", item_id=item.id)) - logger.debug(f"Updated state for {item.log_string} ({item.id}) from {previous_state.name} to {new_state.name}") + logger.log("RELEASE", f"{item.log_string} has been released!") session.merge(item) session.commit() @@ -419,6 +429,28 @@ def _update_item_state(self, item_id: str) -> None: if item_id in self.scheduled_releases and self.scheduled_releases[item_id] <= current_time: del self.scheduled_releases[item_id] + def _process_release(self, item_id: str, log_string: str) -> None: + """Process a scheduled release at its designated time.""" + try: + with db.Session() as session: + item = session.execute( + select(MediaItem).where(MediaItem.id == item_id) + ).scalar_one() + + if item: + previous_state, new_state = item.store_state() + if previous_state != new_state: + self.em.add_event(Event(emitted_by="UpdateOngoing", item_id=item_id)) + release_time = datetime.now().astimezone().strftime("%I:%M %p").lstrip('0') + logger.log("RELEASE", f"🎬 Released at {release_time}: {log_string}") + session.merge(item) + session.commit() + + # Clean up the scheduled release + self.scheduled_releases.pop(item_id, None) + except Exception as e: + logger.error(f"Failed to process scheduled release for {log_string}: {e}") + def _schedule_functions(self) -> None: """Schedule each service based on its update interval.""" # Schedule the ongoing state update function to run at midnight From 31ed14383a1ad52f717b1a84644812381a7121e0 Mon Sep 17 00:00:00 2001 From: Arrrrr Date: Wed, 18 Dec 2024 03:01:21 -0800 Subject: [PATCH 6/9] Update program.py --- src/program/program.py | 357 +++++++++++++++++++++-------------------- 1 file changed, 183 insertions(+), 174 deletions(-) diff --git a/src/program/program.py b/src/program/program.py index 8da6fb0a..d3257000 100644 --- a/src/program/program.py +++ b/src/program/program.py @@ -2,6 +2,7 @@ import os import threading import time +import logging from concurrent.futures import ThreadPoolExecutor, as_completed from datetime import datetime, timedelta from queue import Empty @@ -63,8 +64,17 @@ def __init__(self): self.enable_trace = settings_manager.settings.tracemalloc self.em = EventManager() self.scheduled_releases = {} # Track scheduled releases - self.scheduler = BackgroundScheduler() # Initialize the scheduler + + # Configure scheduler with timezone awareness + self.scheduler = BackgroundScheduler( + timezone=datetime.now().astimezone().tzinfo, # Use system timezone + ) + + # Disable noisy debug logs from APScheduler + logging.getLogger('apscheduler').setLevel(logging.WARNING) + self.scheduler.start() # Start the scheduler + if self.enable_trace: tracemalloc.start() self.malloc_time = time.monotonic()-50 @@ -218,26 +228,19 @@ def _retry_library(self) -> None: def _update_ongoing(self) -> None: """Update state for ongoing and unreleased items.""" try: - # Get current time with timezone info current_time = datetime.now().astimezone() - logger.debug(f"Current time: {current_time.strftime('%I:%M %p').lstrip('0')}") - - logger.log("PROGRAM", "Checking for today's releases...") + logger.log("PROGRAM", "Checking for upcoming releases...") trakt_api = di[TraktAPI] # Clear old scheduled releases - new_scheduled_releases = {} - for k, v in self.scheduled_releases.items(): - if v > current_time: - new_scheduled_releases[k] = v - self.scheduled_releases = new_scheduled_releases + self.scheduled_releases = {k: v for k, v in self.scheduled_releases.items() if v > current_time} - items_found_today = False # Track if we found any items for today - todays_releases = [] # Track items and their release times + # Track items by show to optimize API calls + checked_shows = set() + upcoming_releases = [] with db.Session() as session: try: - # Get items that are either ongoing or unreleased items = session.execute( select(MediaItem, MediaItem.aired_at) .where(MediaItem.type.in_(["movie", "episode"])) @@ -245,134 +248,126 @@ def _update_ongoing(self) -> None: ).unique().all() for item, aired_at in items: - if not aired_at: - continue - try: - if item.imdb_id: - trakt_time = None - tvmaze_time = None - air_time = None # Initialize air_time here - - # Get Trakt time - try: - trakt_item = trakt_api.create_item_from_imdb_id(item.imdb_id, type=item.type) - if trakt_item and trakt_item.aired_at: - # Convert Trakt time to local time - trakt_time = trakt_item.aired_at - if trakt_time.tzinfo is None: - trakt_time = trakt_time.replace(tzinfo=ZoneInfo("UTC")) - trakt_time = trakt_time.astimezone(current_time.tzinfo) - logger.debug(f"Trakt airtime for {item.log_string}: {trakt_time}") - - # Store network info if available - if hasattr(trakt_item, "network"): - item.network = trakt_item.network - except Exception as e: - logger.error(f"Failed to fetch airtime from Trakt for {item.log_string}: {e}") - - # Get TVMaze time (already in local time) + # Skip if no IMDB ID + if not item.imdb_id: + continue + + # For episodes, check if we already found a future episode for this show + if item.type == "episode": + show_id = item.parent.parent.id if item.parent and item.parent.parent else None + if show_id in checked_shows: + continue + + trakt_time = None + tvmaze_time = None + + # Get Trakt time + try: + trakt_item = trakt_api.create_item_from_imdb_id(item.imdb_id, type=item.type) + if trakt_item and trakt_item.aired_at: + trakt_time = trakt_item.aired_at + if trakt_time.tzinfo is None: + trakt_time = trakt_time.replace(tzinfo=ZoneInfo("UTC")) + trakt_time = trakt_time.astimezone(current_time.tzinfo) + + if hasattr(trakt_item, "network"): + item.network = trakt_item.network + except Exception as e: + logger.error(f"Failed to fetch Trakt time for {item.log_string}: {e}") + + # Get TVMaze time for episodes only + if item.type == "episode": try: - # Skip TVMaze lookup for movies - if item.type == "movie": - continue - tvmaze_api = di[TVMazeAPI] - # Get show title - for episodes, use the parent show's title show_name = item.get_top_title() - if not show_name: - continue - - # Skip if it's just an episode number - if show_name.lower().startswith("episode "): - continue - - # For episodes, pass the season and episode numbers - season_number = None - episode_number = None - if item.type == "episode": - if isinstance(item, Episode): - season_number = item.parent.number if item.parent else None - episode_number = item.number - - tvmaze_time = tvmaze_api.get_show_by_imdb( - item.imdb_id, - show_name=show_name, - season_number=season_number, - episode_number=episode_number - ) - if tvmaze_time: - logger.debug(f"TVMaze airtime for {item.log_string}: {tvmaze_time}") + if show_name and not show_name.lower().startswith("episode "): + season_number = item.parent.number if isinstance(item, Episode) and item.parent else None + episode_number = item.number if isinstance(item, Episode) else None + + tvmaze_time = tvmaze_api.get_show_by_imdb( + item.imdb_id, + show_name=show_name, + season_number=season_number, + episode_number=episode_number + ) except Exception as e: - logger.error(f"Failed to fetch airtime from TVMaze for {item.log_string}: {e}") - - # Use the earliest available time - if trakt_time and tvmaze_time: - air_time = min(trakt_time, tvmaze_time) - logger.debug(f"Using earliest time between Trakt ({trakt_time}) and TVMaze ({tvmaze_time})") - else: - air_time = trakt_time or tvmaze_time - if not air_time: - logger.debug(f"No airtime available from either Trakt or TVMaze for {item.log_string}") - continue # Skip this item - - if not air_time: # Add explicit check - logger.debug(f"No valid air time found for {item.log_string}") - continue - - # Store the local time in the database - item.aired_at = air_time - session.merge(item) - logger.debug(f"Updated airtime for {item.log_string} to {air_time}") - - # Calculate delayed release time - delay_minutes = settings_manager.settings.content.trakt.release_delay_minutes - delayed_time = air_time + timedelta(minutes=delay_minutes) - - # Format times for display (e.g., "8:00 PM") - air_time_str = air_time.strftime("%I:%M %p").lstrip('0') - release_time_str = delayed_time.strftime("%I:%M %p").lstrip('0') - - # If it aired in the past (including delay), release it immediately - if delayed_time <= current_time: - logger.log("PROGRAM", f"- {item.log_string} was scheduled to release at {air_time_str}") - # Trigger immediate state update + logger.error(f"Failed to fetch TVMaze time for {item.log_string}: {e}") + + # Use earliest available time + times = [t for t in [trakt_time, tvmaze_time] if t is not None] + if not times: + continue + air_time = min(times) + if not air_time: + continue + + # Store the air time + item.aired_at = air_time + session.merge(item) + + # Calculate release time with delay + delay_minutes = settings_manager.settings.content.trakt.release_delay_minutes + release_time = air_time + timedelta(minutes=delay_minutes) + + # Skip if already released + if release_time <= current_time: + if item.last_state in [States.Ongoing, States.Unreleased]: previous_state, new_state = item.store_state() if previous_state != new_state: - self.em.add_event(Event(emitted_by="UpdateOngoing", item_id=item.id)) - logger.debug(f"Updated state for {item.log_string} ({item.id}) from {previous_state.name} to {new_state.name}") - continue + self.em.add_event(Event("StateTransition", item_id=item.id)) + logger.log("RELEASE", f"🎬 Released (late): {item.log_string}") + continue + + # Check if releasing in next 24 hours + time_until_release = release_time - current_time + if time_until_release <= timedelta(hours=24): + release_time_str = release_time.strftime("%I:%M %p").lstrip('0') + upcoming_releases.append((item.log_string, release_time_str)) + + # Schedule the release + self.scheduled_releases[item.id] = release_time + job_id = f"release_{item.id}" - # For future items, only schedule if they air today - if (air_time.year == current_time.year and - air_time.month == current_time.month and - air_time.day == current_time.day): - items_found_today = True - todays_releases.append((item.log_string, release_time_str)) - logger.debug(f"Found today's item: {item.log_string}") - logger.log("PROGRAM", f"- {item.log_string} will release at {release_time_str} (included {delay_minutes}min delay)") - # Add to scheduled releases if not already there - if item.id not in self.scheduled_releases: - self.scheduled_releases[item.id] = delayed_time + try: + if self.scheduler.get_job(job_id): + self.scheduler.remove_job(job_id) + + self.scheduler.add_job( + self._process_release, + 'date', + run_date=release_time, + args=[item.id, item.log_string], + id=job_id, + name=f"Release {item.log_string}" + ) + logger.log("PROGRAM", f"πŸ“… Scheduled: {item.log_string} at {release_time_str}") + except Exception as e: + logger.error(f"Failed to schedule release for {item.log_string}: {e}") + + # If this episode isn't releasing soon, skip rest of the season + if item.type == "episode": + checked_shows.add(show_id) + except Exception as e: - logger.error(f"Failed to fetch airtime for {item.log_string}: {e}") + logger.error(f"Failed to process {item.log_string}: {e}") continue - # Commit any airtime updates session.commit() - # Log summary of today's releases - if todays_releases: - logger.log("PROGRAM", "\nToday's releases:") - for item_name, release_time in sorted(todays_releases, key=lambda x: x[1]): + # Log summary of scheduled releases + if upcoming_releases: + logger.log("PROGRAM", "\nπŸ“… Scheduled releases:") + for item_name, release_time in sorted(upcoming_releases, key=lambda x: x[1]): logger.log("PROGRAM", f" β€’ {item_name} at {release_time}") else: - logger.log("PROGRAM", "\nNo releases scheduled for today") + logger.log("PROGRAM", "No upcoming releases found") except Exception as e: session.rollback() logger.error(f"Database error in _update_ongoing: {e}") raise + except Exception as e: logger.error(f"Error in _update_ongoing: {e}") @@ -382,7 +377,7 @@ def _update_item_state(self, item_id: str) -> None: try: item = session.execute( select(MediaItem).where(MediaItem.id == item_id) - ).scalar_one() + ).unique().scalar_one_or_none() if not item: logger.error(f"Item {item_id} not found") @@ -394,30 +389,47 @@ def _update_item_state(self, item_id: str) -> None: delay_minutes = settings_manager.settings.content.trakt.release_delay_minutes delayed_time = item.aired_at + timedelta(minutes=delay_minutes) - # If it's a future release for today, schedule it - if (delayed_time > current_time and + # Check if it's for today (regardless of time) + is_today = ( item.aired_at.year == current_time.year and item.aired_at.month == current_time.month and - item.aired_at.day == current_time.day): + item.aired_at.day == current_time.day + ) + + if is_today: release_time_str = delayed_time.strftime("%I:%M %p").lstrip('0') - logger.log("PROGRAM", f"Scheduling {item.log_string} for release at {release_time_str}") - self.scheduled_releases[item.id] = delayed_time - - # Schedule a one-time job at the release time - self.scheduler.add_job( - self._process_release, - 'date', - run_date=delayed_time, - args=[item.id, item.log_string], - id=f"release_{item.id}", - replace_existing=True - ) - # If it should have been released already, release it now - elif delayed_time <= current_time: - previous_state, new_state = item.store_state() - if previous_state != new_state: - self.em.add_event(Event(emitted_by="UpdateOngoing", item_id=item.id)) - logger.log("RELEASE", f"{item.log_string} has been released!") + # If it's in the future, schedule it + if delayed_time > current_time: + # Only schedule if item is still unreleased + if item.last_state in [States.Ongoing, States.Unreleased]: + logger.log("PROGRAM", f"Scheduling {item.log_string} for release at {release_time_str}") + self.scheduled_releases[item.id] = delayed_time + + # Schedule a one-time job at the release time + job_id = f"release_{item.id}" + try: + # Remove any existing job first + if self.scheduler.get_job(job_id): + self.scheduler.remove_job(job_id) + + # Add the new job + self.scheduler.add_job( + self._process_release, + 'date', + run_date=delayed_time, + args=[item.id, item.log_string], + id=job_id, + name=f"Release {item.log_string}" + ) + except Exception as e: + logger.error(f"Failed to schedule release job for {item.log_string}: {e}") + # If it's in the past (for today), release it now + else: + logger.log("PROGRAM", f"Releasing {item.log_string} now (scheduled for {release_time_str})") + previous_state, new_state = item.store_state() + if previous_state != new_state: + self.em.add_event(Event("StateTransition", item_id=item.id)) + logger.log("RELEASE", f"🎬 Released (late): {item.log_string}") session.merge(item) session.commit() @@ -435,16 +447,20 @@ def _process_release(self, item_id: str, log_string: str) -> None: with db.Session() as session: item = session.execute( select(MediaItem).where(MediaItem.id == item_id) - ).scalar_one() + ).unique().scalar_one_or_none() if item: - previous_state, new_state = item.store_state() - if previous_state != new_state: - self.em.add_event(Event(emitted_by="UpdateOngoing", item_id=item_id)) - release_time = datetime.now().astimezone().strftime("%I:%M %p").lstrip('0') - logger.log("RELEASE", f"🎬 Released at {release_time}: {log_string}") - session.merge(item) - session.commit() + # Only process if item is still unreleased + if item.last_state in [States.Ongoing, States.Unreleased]: + previous_state, new_state = item.store_state() + if previous_state != new_state: + self.em.add_event(Event("StateTransition", item_id=item_id)) + release_time = datetime.now().astimezone().strftime("%I:%M %p").lstrip('0') + logger.log("RELEASE", f"🎬 Released at {release_time}: {log_string}") + session.merge(item) + session.commit() + else: + logger.error(f"Item {item_id} not found during scheduled release") # Clean up the scheduled release self.scheduled_releases.pop(item_id, None) @@ -456,7 +472,6 @@ def _schedule_functions(self) -> None: # Schedule the ongoing state update function to run at midnight current_time = datetime.now().astimezone() midnight = (current_time + timedelta(days=1)).replace(hour=0, minute=0, second=0, microsecond=0) - logger.debug(f"Scheduling next midnight update at: {midnight.strftime('%Y-%m-%d %H:%M:%S %z')}") self.scheduler.add_job( self._update_ongoing, @@ -464,8 +479,7 @@ def _schedule_functions(self) -> None: hour=0, minute=0, id="update_ongoing", - replace_existing=True, - misfire_grace_time=3600 # Allow up to 1 hour delay if system is busy + replace_existing=True ) # Run update_ongoing immediately on startup @@ -493,10 +507,9 @@ def _schedule_functions(self) -> None: id=f"{func.__name__}", max_instances=config.get("max_instances", 1), replace_existing=True, - next_run_time=datetime.now(), - misfire_grace_time=30 + next_run_time=datetime.now() ) - logger.debug(f"Scheduled {func.__name__} to run every {config['interval']} seconds.") + logger.log("PROGRAM", f"Scheduled {func.__name__} to run every {config['interval']} seconds.") def _schedule_services(self) -> None: """Schedule each service based on its update interval.""" @@ -518,31 +531,31 @@ def _schedule_services(self) -> None: next_run_time=datetime.now() if service_cls != SymlinkLibrary else None, coalesce=False, ) - logger.debug(f"Scheduled {service_cls.__name__} to run every {update_interval} seconds.") + logger.log("PROGRAM", f"Scheduled {service_cls.__name__} to run every {update_interval} seconds.") def display_top_allocators(self, snapshot, key_type="lineno", limit=10): import psutil process = psutil.Process(os.getpid()) top_stats = snapshot.compare_to(self.last_snapshot, "lineno") - logger.debug("Top %s lines" % limit) + logger.log("PROGRAM", "Top %s lines" % limit) for index, stat in enumerate(top_stats[:limit], 1): frame = stat.traceback[0] # replace "/path/to/module/file.py" with "module/file.py" filename = os.sep.join(frame.filename.split(os.sep)[-2:]) - logger.debug("#%s: %s:%s: %.1f KiB" + logger.log("PROGRAM", "#%s: %s:%s: %.1f KiB" % (index, filename, frame.lineno, stat.size / 1024)) line = linecache.getline(frame.filename, frame.lineno).strip() if line: - logger.debug(" %s" % line) + logger.log("PROGRAM", " %s" % line) other = top_stats[limit:] if other: size = sum(stat.size for stat in other) - logger.debug("%s other: %.1f MiB" % (len(other), size / (1024 * 1024))) + logger.log("PROGRAM", "%s other: %.1f MiB" % (len(other), size / (1024 * 1024))) total = sum(stat.size for stat in top_stats) - logger.debug("Total allocated size: %.1f MiB" % (total / (1024 * 1024))) - logger.debug(f"Process memory: {process.memory_info().rss / (1024 * 1024):.2f} MiB") + logger.log("PROGRAM", "Total allocated size: %.1f MiB" % (total / (1024 * 1024))) + logger.log("PROGRAM", f"Process memory: {process.memory_info().rss / (1024 * 1024):.2f} MiB") def dump_tracemalloc(self): if time.monotonic() - self.malloc_time > 60: @@ -589,16 +602,12 @@ def run(self): self.em.add_event_to_running(event) self.em.submit_job(next_service, self, event) - def stop(self): - if not self.initialized: - return - - if hasattr(self, "executors"): - for executor in self.executors: - if not executor["_executor"]._shutdown: - executor["_executor"].shutdown(wait=False) - if hasattr(self, "scheduler") and self.scheduler.running: - self.scheduler.shutdown(wait=False) + def stop(self) -> None: + """Stop the program.""" + self.running = False + # Shutdown scheduler properly + if self.scheduler.running: + self.scheduler.shutdown() logger.log("PROGRAM", "Riven has been stopped.") def _enhance_item(self, item: MediaItem) -> MediaItem | None: From 7d3a004d72e8f4cfe7ddee3b395a8e4a3113ed00 Mon Sep 17 00:00:00 2001 From: Arrrrr Date: Thu, 19 Dec 2024 08:19:08 -0800 Subject: [PATCH 7/9] Update program.py --- src/program/program.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/program/program.py b/src/program/program.py index d3257000..de0bb2c2 100644 --- a/src/program/program.py +++ b/src/program/program.py @@ -316,7 +316,7 @@ def _update_ongoing(self) -> None: previous_state, new_state = item.store_state() if previous_state != new_state: self.em.add_event(Event("StateTransition", item_id=item.id)) - logger.log("RELEASE", f"🎬 Released (late): {item.log_string}") + logger.log("🎬 RELEASE", f" Released (late): {item.log_string}") continue # Check if releasing in next 24 hours From d3bd27abe93c60a18cec0e1695caf07a7c5a4c22 Mon Sep 17 00:00:00 2001 From: Arrrrr Date: Sun, 22 Dec 2024 02:51:32 -0800 Subject: [PATCH 8/9] feat: active release d --- src/program/apis/tvmaze_api.py | 30 ++++++++++++++++++++++-------- src/program/program.py | 12 +++--------- 2 files changed, 25 insertions(+), 17 deletions(-) diff --git a/src/program/apis/tvmaze_api.py b/src/program/apis/tvmaze_api.py index f040d33c..d8656fc0 100644 --- a/src/program/apis/tvmaze_api.py +++ b/src/program/apis/tvmaze_api.py @@ -99,12 +99,13 @@ def get_show_by_imdb(self, imdb_id: str, show_name: Optional[str] = None, season if not episodes: return None - # Find the next episode that hasn't aired yet - current_time = datetime.now() + # Find all unreleased episodes and the next episode + current_time = datetime.now().astimezone() # Make sure current_time has timezone info + unreleased_episodes = [] next_episode = None target_episode_time = None - for episode in episodes: + for episode in sorted(episodes, key=lambda x: (getattr(x, 'season', 0), getattr(x, 'number', 0))): try: if not episode.airstamp: continue @@ -159,11 +160,17 @@ def get_show_by_imdb(self, imdb_id: str, show_name: Optional[str] = None, season else: logger.debug(f"Found S{season_number}E{episode_number} airing at {air_time}") target_episode_time = air_time - break # No need to continue looking - # If we're looking for next episode and this one is in the future - elif air_time > current_time: - # If we haven't found any future episode yet, or this one airs sooner + # Add all unreleased episodes to our list + if air_time > current_time: + ep_info = { + 'air_time': air_time, + 'season': getattr(episode, 'season', 0), + 'episode': getattr(episode, 'number', 0), + 'name': getattr(episode, 'name', '') + } + unreleased_episodes.append(ep_info) + # Track next episode separately if not next_episode or air_time < next_episode: next_episode = air_time @@ -171,10 +178,17 @@ def get_show_by_imdb(self, imdb_id: str, show_name: Optional[str] = None, season logger.error(f"Failed to process episode {getattr(episode, 'number', '?')}: {e}") continue - # Return target episode time if we found one, otherwise return next episode + # Return target episode time if we found one if target_episode_time is not None: return target_episode_time + # Log all unreleased episodes in sequence + if unreleased_episodes: + unreleased_episodes.sort(key=lambda x: (x['season'], x['episode'])) + for ep in unreleased_episodes: + logger.debug(f"Unreleased: S{ep['season']}E{ep['episode']} '{ep['name']}' airs at {ep['air_time']}") + + # Return next episode air time if next_episode: logger.debug(f"Next episode airs at {next_episode}") return next_episode diff --git a/src/program/program.py b/src/program/program.py index de0bb2c2..25e8dc55 100644 --- a/src/program/program.py +++ b/src/program/program.py @@ -252,12 +252,6 @@ def _update_ongoing(self) -> None: # Skip if no IMDB ID if not item.imdb_id: continue - - # For episodes, check if we already found a future episode for this show - if item.type == "episode": - show_id = item.parent.parent.id if item.parent and item.parent.parent else None - if show_id in checked_shows: - continue trakt_time = None tvmaze_time = None @@ -316,7 +310,7 @@ def _update_ongoing(self) -> None: previous_state, new_state = item.store_state() if previous_state != new_state: self.em.add_event(Event("StateTransition", item_id=item.id)) - logger.log("🎬 RELEASE", f" Released (late): {item.log_string}") + logger.log("RELEASE", f"🎬 Released (late): {item.log_string}") continue # Check if releasing in next 24 hours @@ -346,8 +340,8 @@ def _update_ongoing(self) -> None: logger.error(f"Failed to schedule release for {item.log_string}: {e}") # If this episode isn't releasing soon, skip rest of the season - if item.type == "episode": - checked_shows.add(show_id) + # if item.type == "episode": + # checked_shows.add(show_id) except Exception as e: logger.error(f"Failed to process {item.log_string}: {e}") From 604f5288a74d5a06f964a01f377951044b37396b Mon Sep 17 00:00:00 2001 From: Spoked <5782630+dreulavelle@users.noreply.github.com> Date: Wed, 8 Jan 2025 17:48:33 -0600 Subject: [PATCH 9/9] Update src/program/apis/tvmaze_api.py Co-authored-by: coderabbitai[bot] <136622811+coderabbitai[bot]@users.noreply.github.com> --- src/program/apis/tvmaze_api.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/program/apis/tvmaze_api.py b/src/program/apis/tvmaze_api.py index d8656fc0..d75074cf 100644 --- a/src/program/apis/tvmaze_api.py +++ b/src/program/apis/tvmaze_api.py @@ -152,8 +152,8 @@ def get_show_by_imdb(self, imdb_id: str, show_name: Optional[str] = None, season # Check if this is the specific episode we want if season_number is not None and episode_number is not None: - if hasattr(episode, 'number') and hasattr(episode, 'season'): - if episode.season == season_number and episode.number == episode_number: + if (hasattr(episode, 'number') and hasattr(episode, 'season') and + episode.season == season_number and episode.number == episode_number): # Found our target episode if hasattr(episode, 'name'): logger.debug(f"Found S{season_number}E{episode_number} '{episode.name}' airing at {air_time}")