diff --git a/bases/ecoindex/cli/app.py b/bases/ecoindex/cli/app.py index 4ce67c7..0059e90 100644 --- a/bases/ecoindex/cli/app.py +++ b/bases/ecoindex/cli/app.py @@ -1,4 +1,3 @@ -from asyncio import run from datetime import datetime from multiprocessing import cpu_count from os.path import dirname @@ -225,15 +224,13 @@ def analyze( count_errors = 0 task = progress.add_task("Processing", total=len(urls) * len(window_sizes)) - analysis_results = run( - bulk_analysis( - max_workers=max_workers, - urls=urls, - window_sizes=window_sizes, - wait_after_scroll=wait_after_scroll, - wait_before_scroll=wait_before_scroll, - logger=logger, - ) + analysis_results = bulk_analysis( + max_workers=max_workers, + urls=urls, + window_sizes=window_sizes, + wait_after_scroll=wait_after_scroll, + wait_before_scroll=wait_before_scroll, + logger=logger, ) for result, success in analysis_results: diff --git a/components/ecoindex/scraper/helper.py b/components/ecoindex/scraper/helper.py index f139149..f375ece 100644 --- a/components/ecoindex/scraper/helper.py +++ b/components/ecoindex/scraper/helper.py @@ -1,6 +1,6 @@ from asyncio import run from concurrent.futures import ThreadPoolExecutor, as_completed -from typing import AsyncGenerator +from typing import Generator from ecoindex.models.compute import Result, WindowSize from ecoindex.scraper.scrap import EcoindexScraper @@ -40,14 +40,14 @@ def run_page_analysis( ) -async def bulk_analysis( +def bulk_analysis( max_workers, urls, window_sizes, wait_after_scroll: int = 0, wait_before_scroll: int = 0, logger=None, -) -> AsyncGenerator[tuple[Result, bool], None]: +) -> Generator[tuple[Result, bool], None]: with ThreadPoolExecutor(max_workers=max_workers) as executor: future_to_analysis = {}