Skip to content

Commit

Permalink
find_media_errors.py: fix compute and time limit logic to finish usin…
Browse files Browse the repository at this point in the history
…g cached results
  • Loading branch information
double16 committed Aug 12, 2024
1 parent 283a947 commit d7f02dc
Showing 1 changed file with 11 additions and 6 deletions.
17 changes: 11 additions & 6 deletions dvrprocess/find_media_errors.py
Original file line number Diff line number Diff line change
Expand Up @@ -124,25 +124,30 @@ def media_errors_generator(media_paths: list[str], media_roots: list[str],
time_limit=config.get_global_config_time_seconds('background_limits', 'time_limit'),
check_compute=True) -> Iterable[MediaErrorFileInfo]:
time_start = time.time()
only_cached = False

for media_path in media_paths:
for root, dirs, files in os.walk(media_path, topdown=True):
for file in common.filter_for_mkv(files):
duration = time.time() - time_start
if 0 < time_limit < duration:
logger.debug(
f"Exiting normally after processing {common.s_to_ts(int(duration))}, limit of {common.s_to_ts(time_limit)} reached")
return

filepath = os.path.join(root, file)
cached_error_count = config.get_file_config_option(filepath, 'error', 'count')
if cached_error_count:
error_count = int(cached_error_count)
elif only_cached:
continue
else:
duration = time.time() - time_start
if 0 < time_limit < duration:
logger.debug(
f"Time limit expired after processing {common.s_to_ts(int(duration))}, limit of {common.s_to_ts(time_limit)} reached, only using cached data")
only_cached = True
continue
if check_compute and common.should_stop_processing():
# when compute limit is reached, use cached data
logger.debug("not enough compute available, only using cached data")
only_cached = True
continue

error_count = len(tools.ffmpeg.check_output(
['-y', '-v', 'error', '-i', filepath, '-c:v', 'vnull', '-c:a', 'anull', '-f', 'null',
'/dev/null'],
Expand Down

0 comments on commit d7f02dc

Please sign in to comment.