Skip to content

Commit

Permalink
fix: Improve failure handling and logging
Browse files Browse the repository at this point in the history
  • Loading branch information
janw committed Oct 20, 2024
1 parent 406f119 commit b165f67
Show file tree
Hide file tree
Showing 9 changed files with 92 additions and 41 deletions.
4 changes: 1 addition & 3 deletions podcast_archiver/base.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,9 +3,7 @@
import xml.etree.ElementTree as etree
from typing import TYPE_CHECKING

from rich import print as rprint

from podcast_archiver.logging import logger
from podcast_archiver.logging import logger, rprint
from podcast_archiver.processor import FeedProcessor

if TYPE_CHECKING:
Expand Down
2 changes: 1 addition & 1 deletion podcast_archiver/cli.py
Original file line number Diff line number Diff line change
Expand Up @@ -283,8 +283,8 @@ def generate_default_config(ctx: click.Context, param: click.Parameter, value: b
)
@click.pass_context
def main(ctx: click.RichContext, /, **kwargs: Any) -> int:
get_console().quiet = kwargs["quiet"]
configure_logging(kwargs["verbose"])
get_console().quiet = kwargs["quiet"] or kwargs["verbose"] > 1
try:
settings = Settings.load_from_dict(kwargs)

Expand Down
4 changes: 2 additions & 2 deletions podcast_archiver/download.py
Original file line number Diff line number Diff line change
Expand Up @@ -59,7 +59,8 @@ def __call__(self) -> EpisodeResult:
try:
return self.run()
except Exception as exc:
logger.error("Download failed", exc_info=exc)
logger.error(f"Download failed: {exc}")
logger.debug("Exception while downloading", exc_info=exc)
return EpisodeResult(self.episode, DownloadResult.FAILED)

def run(self) -> EpisodeResult:
Expand All @@ -73,7 +74,6 @@ def run(self) -> EpisodeResult:
self.episode.enclosure.href,
stream=True,
allow_redirects=True,
timeout=constants.REQUESTS_TIMEOUT,
)
response.raise_for_status()
total_size = int(response.headers.get("content-length", "0"))
Expand Down
14 changes: 11 additions & 3 deletions podcast_archiver/logging.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,18 +2,25 @@

import logging
import logging.config
from typing import Any

from rich import get_console
from rich import print as _print
from rich.logging import RichHandler

logger = logging.getLogger("podcast_archiver")


def rprint(*objects: Any, sep: str = " ", end: str = "\n", **kwargs: Any) -> None:
if logger.level == logging.NOTSET or logger.level >= logging.WARNING:
_print(*objects, sep=sep, end=end, **kwargs)
return
logger.info(objects[0].strip(), *objects[1:])

Check warning on line 18 in podcast_archiver/logging.py

View check run for this annotation

Codecov / codecov/patch

podcast_archiver/logging.py#L18

Added line #L18 was not covered by tests


def configure_logging(verbosity: int) -> None:
if verbosity > 2:
if verbosity > 1:
level = logging.DEBUG
elif verbosity == 2:
level = logging.INFO
elif verbosity == 1:
level = logging.WARNING
else:
Expand All @@ -35,4 +42,5 @@ def configure_logging(verbosity: int) -> None:
)
],
)
logger.setLevel(level)
logger.debug("Running in debug mode.")
36 changes: 11 additions & 25 deletions podcast_archiver/processor.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,16 +5,12 @@
from threading import Event
from typing import TYPE_CHECKING

from pydantic import ValidationError
from requests import HTTPError
from rich import print as rprint

from podcast_archiver.download import DownloadJob
from podcast_archiver.enums import DownloadResult, QueueCompletionType
from podcast_archiver.logging import logger
from podcast_archiver.logging import logger, rprint
from podcast_archiver.models import Episode, Feed, FeedInfo
from podcast_archiver.types import EpisodeResult, EpisodeResultsList
from podcast_archiver.utils import FilenameFormatter
from podcast_archiver.utils import FilenameFormatter, handle_feed_request

if TYPE_CHECKING:
from pathlib import Path
Expand Down Expand Up @@ -48,25 +44,15 @@ def __init__(self, settings: Settings) -> None:

def process(self, url: str) -> ProcessingResult:
result = ProcessingResult()
try:
feed = Feed.from_url(url)
except HTTPError as exc:
if exc.response is not None:
rprint(f"[error]Received status code {exc.response.status_code} from {url}[/]")
logger.debug("Failed to request feed url %s", url, exc_info=exc)
return result
except ValidationError as exc:
logger.debug("Invalid feed", exc_info=exc)
rprint(f"[error]Received invalid feed from {url}[/]")
return result

result.feed = feed
rprint(f"\n[bold bright_magenta]Downloading archive for: {feed.info.title}[/]\n")

episode_results, completion_msg = self._process_episodes(feed=feed)
self._handle_results(episode_results, result=result)

rprint(f"\n[bar.finished]✔ {completion_msg}[/]")
with handle_feed_request(url):
result.feed = Feed.from_url(url)

if result.feed:
rprint(f"\n[bold bright_magenta]Downloading archive for: {result.feed.info.title}[/]\n")
episode_results, completion_msg = self._process_episodes(feed=result.feed)
self._handle_results(episode_results, result=result)

rprint(f"\n[bar.finished]✔ {completion_msg}[/]")
return result

def _preflight_check(self, episode: Episode, target: Path) -> DownloadResult | None:
Expand Down
31 changes: 29 additions & 2 deletions podcast_archiver/session.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,33 @@
from requests import Session
from typing import Any

from podcast_archiver.constants import USER_AGENT
from requests import PreparedRequest, Session
from requests.adapters import HTTPAdapter
from requests.models import Response as Response
from urllib3.util import Retry

from podcast_archiver.constants import REQUESTS_TIMEOUT, USER_AGENT


class DefaultTimeoutHTTPAdapter(HTTPAdapter):
def send(
self,
request: PreparedRequest,
timeout: None | float | tuple[float, float] | tuple[float, None] = None,
**kwargs: Any,
) -> Response:
return super().send(request, timeout=timeout or REQUESTS_TIMEOUT, **kwargs)


_retries = Retry(
total=3,
connect=1,
backoff_factor=0.5,
status_forcelist=[500, 501, 502, 503, 504],
)

_adapter = DefaultTimeoutHTTPAdapter(max_retries=_retries)

session = Session()
session.mount("http://", _adapter)
session.mount("https://", _adapter)
session.headers.update({"user-agent": USER_AGENT})
27 changes: 25 additions & 2 deletions podcast_archiver/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,11 +4,13 @@
import re
from contextlib import contextmanager
from string import Formatter
from typing import IO, TYPE_CHECKING, Any, Iterable, Iterator, TypedDict
from typing import IO, TYPE_CHECKING, Any, Generator, Iterable, Iterator, TypedDict

from pydantic import ValidationError
from requests import HTTPError
from slugify import slugify as _slugify

from podcast_archiver.logging import logger
from podcast_archiver.logging import logger, rprint

if TYPE_CHECKING:
from pathlib import Path
Expand Down Expand Up @@ -119,3 +121,24 @@ def atomic_write(target: Path, mode: str = "w") -> Iterator[IO[Any]]:
os.rename(tempfile, target)
finally:
tempfile.unlink(missing_ok=True)


@contextmanager
def handle_feed_request(url: str) -> Generator[None, Any, None]:
try:
yield
except HTTPError as exc:
logger.debug("Failed to request feed url %s", url, exc_info=exc)

Check warning on line 131 in podcast_archiver/utils.py

View check run for this annotation

Codecov / codecov/patch

podcast_archiver/utils.py#L131

Added line #L131 was not covered by tests
if (response := getattr(exc, "response", None)) is None:
rprint(f"[error]Failed to retrieve feed {url}: {exc}[/]")
return

Check warning on line 134 in podcast_archiver/utils.py

View check run for this annotation

Codecov / codecov/patch

podcast_archiver/utils.py#L133-L134

Added lines #L133 - L134 were not covered by tests

rprint(f"[error]Received status code {response.status_code} from {url}[/]")

Check warning on line 136 in podcast_archiver/utils.py

View check run for this annotation

Codecov / codecov/patch

podcast_archiver/utils.py#L136

Added line #L136 was not covered by tests

except ValidationError as exc:
logger.debug("Feed validation failed for %s", url, exc_info=exc)
rprint(f"[error]Received invalid feed from {url}[/]")

Check warning on line 140 in podcast_archiver/utils.py

View check run for this annotation

Codecov / codecov/patch

podcast_archiver/utils.py#L139-L140

Added lines #L139 - L140 were not covered by tests

except Exception as exc:
logger.debug("Unexpected error for url %s", url, exc_info=exc)
rprint(f"[error]Failed to retrieve feed {url}: {exc}[/]")

Check warning on line 144 in podcast_archiver/utils.py

View check run for this annotation

Codecov / codecov/patch

podcast_archiver/utils.py#L143-L144

Added lines #L143 - L144 were not covered by tests
13 changes: 11 additions & 2 deletions tests/test_download.py
Original file line number Diff line number Diff line change
Expand Up @@ -77,13 +77,22 @@ def test_download_failed(
responses.add(responses.GET, MEDIA_URL, b"BLOB")

job = download.DownloadJob(episode=episode, target=Path("file.mp3"))
with failure_mode(side_effect=side_effect), caplog.at_level(logging.ERROR):
with failure_mode(side_effect=side_effect), caplog.at_level(logging.DEBUG):
result = job()

assert result == (episode, DownloadResult.FAILED)
failure_rec = None
for record in caplog.records:
if record.message == "Download failed":
if record.message.startswith("Download failed: "):
failure_rec = record
break

assert failure_rec
assert not failure_rec.exc_info

failure_rec = None
for record in caplog.records:
if record.message == "Exception while downloading":
failure_rec = record
break

Expand Down
2 changes: 1 addition & 1 deletion tests/test_filenames.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,7 @@
from podcast_archiver.utils import FilenameFormatter

FEED_INFO = FeedInfo(
title="That\Show",
title="That\\Show",
subtitle="The one that never comes/came to be",
author="TheJanwShow",
language="de-DE",
Expand Down

0 comments on commit b165f67

Please sign in to comment.