From 29a5208ce688e8cfae65fed0fe3c4ed32821402c Mon Sep 17 00:00:00 2001 From: Xavier Fernandez Date: Thu, 30 Jan 2025 15:56:46 +0100 Subject: [PATCH 1/5] cities: migrate sync_cities to logging --- .../cities/management/commands/sync_cities.py | 10 ++-- tests/cities/tests.py | 49 +++++++++++++------ 2 files changed, 40 insertions(+), 19 deletions(-) diff --git a/itou/cities/management/commands/sync_cities.py b/itou/cities/management/commands/sync_cities.py index 5c1879f989..6dd3b4bb82 100755 --- a/itou/cities/management/commands/sync_cities.py +++ b/itou/cities/management/commands/sync_cities.py @@ -82,14 +82,14 @@ def handle(self, *, wet_run, **options): elif item.kind == DiffItemKind.EDITION: db_city = item.db_obj if db_city.edition_mode != EditionModeChoices.AUTO: - self.stdout.write(f"! skipping manually edited city={db_city} from update") + self.logger.warning("skipping manually edited city=%s from update", db_city) continue city = api_city_to_db_city(item.raw) city.pk = db_city.pk cities_updated_by_api.append(city) elif item.kind == DiffItemKind.DELETION: cities_removed_by_api.add(item.key) - self.stdout.write(item.label) + self.logger.info(item.label) if wet_run: with transaction.atomic(): @@ -97,10 +97,10 @@ def handle(self, *, wet_run, **options): # a City that is linked to one of our JobDescriptions would suddenly disappear. Handle that # case as it happens, by "deactivating" the city by instance: the cron would crash. n_objs, _ = City.objects.filter(code_insee__in=cities_removed_by_api).delete() - self.stdout.write(f"> successfully deleted count={n_objs} cities insee_codes={cities_removed_by_api}") + self.logger.info("successfully deleted count=%d cities insee_codes=%s", n_objs, cities_removed_by_api) objs = City.objects.bulk_create(cities_added_by_api) - self.stdout.write(f"> successfully created count={len(objs)} new cities") + self.logger.info("successfully created count=%d new cities", len(objs)) n_objs = City.objects.bulk_update( cities_updated_by_api, @@ -114,4 +114,4 @@ def handle(self, *, wet_run, **options): ], batch_size=1000, ) - self.stdout.write(f"> successfully updated count={n_objs} cities") + self.logger.info("successfully updated count=%d cities", n_objs) diff --git a/tests/cities/tests.py b/tests/cities/tests.py index 9e2a32492d..886ccb63d4 100644 --- a/tests/cities/tests.py +++ b/tests/cities/tests.py @@ -16,7 +16,7 @@ def test_create_test_cities(): assert City.objects.filter(department="93").count() == 10 -def test_sync_cities(settings, capsys, respx_mock): +def test_sync_cities(settings, caplog, respx_mock): settings.API_GEO_BASE_URL = "https://geo.foo" respx_mock.get( "https://geo.foo/communes?fields=nom,code,codesPostaux,codeDepartement,codeRegion,centre&format=json" @@ -82,9 +82,16 @@ def test_sync_cities(settings, capsys, respx_mock): City.objects.update(edition_mode=EditionModeChoices.AUTO) management.call_command("sync_cities", wet_run=True) - stdout, stderr = capsys.readouterr() - assert stderr == "" - assert stdout.splitlines() == [ + assert caplog.messages[:-1] == [ + ( + "HTTP Request: GET https://geo.foo/communes" + '?fields=nom%2Ccode%2CcodesPostaux%2CcodeDepartement%2CcodeRegion%2Ccentre&format=json "HTTP/1.1 200 OK"' + ), + ( + "HTTP Request: GET https://geo.foo/communes?fields=" + "nom%2Ccode%2CcodesPostaux%2CcodeDepartement%2CcodeRegion%2Ccentre&format=json" + '&type=arrondissement-municipal "HTTP/1.1 200 OK"' + ), "count=1 label=City had the same key in collection and queryset", "\tCHANGED name=Nouveau Nom de Ville changed to value=L'Abergement-Clémenciat", "\tCHANGED post_codes=['01234'] changed to value=['01234', '01400']", @@ -101,10 +108,13 @@ def test_sync_cities(settings, capsys, respx_mock): '"codesPostaux": ["13002"], "nom": "Marseille 2e"}', "count=1 label=City removed by collection", "\tREMOVED Guérande (44)", - "> successfully deleted count=1 cities insee_codes={'44350'}", - "> successfully created count=3 new cities", - "> successfully updated count=1 cities", + "successfully deleted count=1 cities insee_codes={'44350'}", + "successfully created count=3 new cities", + "successfully updated count=1 cities", ] + assert caplog.messages[-1].startswith( + "Management command itou.cities.management.commands.sync_cities succeeded in" + ) # Introduce a "bogus" item in the regular (non arrondissement) cities: # - "new" city in respect to its non-registered INSEE code yet @@ -136,26 +146,37 @@ def test_sync_cities(settings, capsys, respx_mock): # Introduce a change in one of the arrondissements: the change is now permanent # and any new automatic sync update to it will be skipped. + caplog.clear() marseille_1er = City.objects.get(slug="marseille-1er-13") marseille_1er.name = "Marssssssssseillle bébé" marseille_1er.save() management.call_command("sync_cities", wet_run=True) - stdout, stderr = capsys.readouterr() - assert stderr == "" - assert stdout.splitlines() == [ + assert caplog.messages[:-1] == [ + ( + "HTTP Request: GET https://geo.foo/communes" + '?fields=nom%2Ccode%2CcodesPostaux%2CcodeDepartement%2CcodeRegion%2Ccentre&format=json "HTTP/1.1 200 OK"' + ), + ( + "HTTP Request: GET https://geo.foo/communes?fields=" + "nom%2Ccode%2CcodesPostaux%2CcodeDepartement%2CcodeRegion%2Ccentre&format=json" + '&type=arrondissement-municipal "HTTP/1.1 200 OK"' + ), "count=3 label=City had the same key in collection and queryset", - "! skipping manually edited city=Marssssssssseillle bébé (13) from update", + "skipping manually edited city=Marssssssssseillle bébé (13) from update", "count=1 label=City added by collection", '\tADDED {"centre": {"coordinates": [4.9306, 46.1517], "type": "Point"}, ' '"code": "01003", "codeDepartement": "01", "codeRegion": "84", ' '"codesPostaux": ["01400"], "nom": "L\'Abergement-de-Varey"}', "count=1 label=City removed by collection", "\tREMOVED L'Abergement-de-Varey (01)", - "> successfully deleted count=1 cities insee_codes={'01002'}", - "> successfully created count=1 new cities", - "> successfully updated count=0 cities", # no update to post codes + "successfully deleted count=1 cities insee_codes={'01002'}", + "successfully created count=1 new cities", + "successfully updated count=0 cities", # no update to post codes ] + assert caplog.messages[-1].startswith( + "Management command itou.cities.management.commands.sync_cities succeeded in" + ) assertQuerySetEqual( City.objects.all().order_by("code_insee"), From 4936bd2d756376527635bb7e39698576485b0f3d Mon Sep 17 00:00:00 2001 From: Xavier Fernandez Date: Thu, 30 Jan 2025 16:02:10 +0100 Subject: [PATCH 2/5] jobs: migrate sync_romes_and_appellations to logging --- .../commands/sync_romes_and_appellations.py | 8 ++++---- .../management/test_sync_romes_and_appellations.py | 12 ++++++++---- 2 files changed, 12 insertions(+), 8 deletions(-) diff --git a/itou/jobs/management/commands/sync_romes_and_appellations.py b/itou/jobs/management/commands/sync_romes_and_appellations.py index 255eb42582..98ebe7d5ef 100644 --- a/itou/jobs/management/commands/sync_romes_and_appellations.py +++ b/itou/jobs/management/commands/sync_romes_and_appellations.py @@ -38,7 +38,7 @@ def handle(self, *, wet_run, **options): romes_data = pe_client.referentiel(pe_api_enums.REFERENTIEL_ROME) for item in yield_sync_diff(romes_data, "code", Rome.objects.all(), "code", [("libelle", "name")]): - self.stdout.write(item.label) + self.logger.info(item.label) if wet_run: romes = [pe_data_to_rome(item, now) for item in romes_data] @@ -49,13 +49,13 @@ def handle(self, *, wet_run, **options): update_fields=("name", "updated_at"), unique_fields=("code",), ) - self.stdout.write(f"len={len(romes)} ROME entries have been created or updated.") + self.logger.info("len=%d ROME entries have been created or updated.", len(romes)) appellations_data = pe_client.appellations() for item in yield_sync_diff( appellations_data, "code", Appellation.objects.all(), "code", [("libelle", "name")] ): - self.stdout.write(item.label) + self.logger.info(item.label) if wet_run: appellations = [pe_data_to_appellation(item, now) for item in appellations_data] @@ -66,4 +66,4 @@ def handle(self, *, wet_run, **options): update_fields=("name", "updated_at", "rome_id"), unique_fields=("code",), ) - self.stdout.write(f"len={len(appellations)} Appellation entries have been created or updated.") + self.logger.info("len=%d Appellation entries have been created or updated.", len(appellations)) diff --git a/tests/jobs/management/test_sync_romes_and_appellations.py b/tests/jobs/management/test_sync_romes_and_appellations.py index 195cdc4c72..ad0111a4b0 100644 --- a/tests/jobs/management/test_sync_romes_and_appellations.py +++ b/tests/jobs/management/test_sync_romes_and_appellations.py @@ -12,7 +12,7 @@ "SECRET": "pe-secret", } ) -def test_sync_rome_appellation(capsys, respx_mock): +def test_sync_rome_appellation(caplog, respx_mock): respx_mock.post("https://auth.fr/connexion/oauth2/access_token?realm=%2Fpartenaire").respond( 200, json={"token_type": "foo", "access_token": "batman", "expires_in": 3600} ) @@ -43,9 +43,9 @@ def test_sync_rome_appellation(capsys, respx_mock): appellation0.save() appellation1.save() management.call_command("sync_romes_and_appellations", wet_run=True) - stdout, stderr = capsys.readouterr() - assert stderr == "" - assert stdout.splitlines() == [ + assert caplog.messages[:-1] == [ + 'HTTP Request: POST https://auth.fr/connexion/oauth2/access_token?realm=%2Fpartenaire "HTTP/1.1 200 OK"', + 'HTTP Request: GET https://pe.fake/offresdemploi/v2/referentiel/metiers "HTTP/1.1 200 OK"', "count=1 label=Rome had the same key in collection and queryset", "\tCHANGED name=Patisserie changed to value=Pâtisserie avec accent", "count=1 label=Rome added by collection", @@ -54,6 +54,7 @@ def test_sync_rome_appellation(capsys, respx_mock): "\tREMOVED Métiers du corps (B001)", "\tREMOVED Arts de la table (F002)", # not really removed though by our command, see docstring "len=2 ROME entries have been created or updated.", + 'HTTP Request: GET https://pe.fake/rome/v1/appellation?champs=code,libelle,metier(code) "HTTP/1.1 200 OK"', "count=2 label=Appellation had the same key in collection and queryset", "\tCHANGED name=Entraîneur sportif changed to value=Entraîneur sportif avéré", "\tCHANGED name=Chef cuistot d'élite changed to value=Chef cuistor d'élite", @@ -62,6 +63,9 @@ def test_sync_rome_appellation(capsys, respx_mock): "count=0 label=Appellation removed by collection", "len=3 Appellation entries have been created or updated.", ] + caplog.messages[-1].startswith( + "Management command itou.jobs.management.commands.sync_romes_and_appellations succeeded in" + ) rome_3 = Rome(code="MET01", name="Edition") assert list(Rome.objects.all().order_by("code")) == [ Rome(code="B001", name="Métiers du corps"), From 632d172f4419433b6a9e9d90defd80c2e86ed109 Mon Sep 17 00:00:00 2001 From: Xavier Fernandez Date: Thu, 30 Jan 2025 16:28:34 +0100 Subject: [PATCH 3/5] users: migrate sync_group_and_perms to logging --- .../commands/sync_group_and_perms.py | 6 ++--- .../test_sync_group_and_perms.ambr | 23 +++++++++---------- tests/users/test_sync_group_and_perms.py | 11 +++++---- 3 files changed, 20 insertions(+), 20 deletions(-) diff --git a/itou/users/management/commands/sync_group_and_perms.py b/itou/users/management/commands/sync_group_and_perms.py index 12317484fd..d9f23b7403 100644 --- a/itou/users/management/commands/sync_group_and_perms.py +++ b/itou/users/management/commands/sync_group_and_perms.py @@ -151,7 +151,7 @@ def handle(self, **options): group.permissions.clear() group.permissions.add(*perms) if created: - self.stdout.write(f"group name={group} created") + self.logger.info(f"group name={group} created") else: - self.stdout.write(f"permissions of group={group} updated") - self.stdout.write("All done!") + self.logger.info(f"permissions of group={group} updated") + self.logger.info("All done!") diff --git a/tests/users/__snapshots__/test_sync_group_and_perms.ambr b/tests/users/__snapshots__/test_sync_group_and_perms.ambr index 1c6601cf58..a3d3db8efa 100644 --- a/tests/users/__snapshots__/test_sync_group_and_perms.ambr +++ b/tests/users/__snapshots__/test_sync_group_and_perms.ambr @@ -169,6 +169,17 @@ 'view_user', ]) # --- +# name: test_command[logs] + list([ + 'group name=gps-admin created', + 'group name=gps-admin-readonly created', + 'group name=itou-admin created', + 'group name=itou-admin-readonly created', + 'group name=pilotage-admin created', + 'group name=pilotage-admin-readonly created', + 'All done!', + ]) +# --- # name: test_command[pilotage-admin-readonly] list([ 'view_statsdashboardvisit', @@ -212,15 +223,3 @@ 'view_user', ]) # --- -# name: test_command[stdout] - ''' - group name=gps-admin created - group name=gps-admin-readonly created - group name=itou-admin created - group name=itou-admin-readonly created - group name=pilotage-admin created - group name=pilotage-admin-readonly created - All done! - - ''' -# --- diff --git a/tests/users/test_sync_group_and_perms.py b/tests/users/test_sync_group_and_perms.py index c79b300a2d..a20c787ffb 100644 --- a/tests/users/test_sync_group_and_perms.py +++ b/tests/users/test_sync_group_and_perms.py @@ -1,4 +1,3 @@ -import io import itertools from django.contrib.auth.models import Group @@ -7,10 +6,12 @@ from itou.users.management.commands import sync_group_and_perms -def test_command(snapshot): - stdout = io.StringIO() - call_command("sync_group_and_perms", stdout=stdout) - assert stdout.getvalue() == snapshot(name="stdout") +def test_command(snapshot, caplog): + call_command("sync_group_and_perms") + assert caplog.messages[:-1] == snapshot(name="logs") + assert caplog.messages[-1].startswith( + "Management command itou.users.management.commands.sync_group_and_perms succeeded in" + ) assert Group.objects.all().count() == len(sync_group_and_perms.get_permissions_dict()) for group in Group.objects.all(): From 0ef54e76a32035678bf7fde4d79ac1f878ed0bd9 Mon Sep 17 00:00:00 2001 From: Xavier Fernandez Date: Thu, 30 Jan 2025 16:18:24 +0100 Subject: [PATCH 4/5] metabase: migrate upload_data_to_pilotage to logging --- .../commands/upload_data_to_pilotage.py | 10 +++++----- .../test_upload_data_to_pilotage.ambr | 15 +++++++-------- .../management/test_upload_data_to_pilotage.py | 13 ++++++------- 3 files changed, 18 insertions(+), 20 deletions(-) diff --git a/itou/metabase/management/commands/upload_data_to_pilotage.py b/itou/metabase/management/commands/upload_data_to_pilotage.py index f4e1ed8328..bfadbb3b45 100644 --- a/itou/metabase/management/commands/upload_data_to_pilotage.py +++ b/itou/metabase/management/commands/upload_data_to_pilotage.py @@ -41,7 +41,7 @@ def log_progress(chunk_size): bytes_transferred += chunk_size progress = int((bytes_transferred / file_size) * 100) if progress > previous_progress and progress % 5 == 0: - self.stdout.write( + self.logger.info( f"> {file.name}: {filesizeformat(bytes_transferred)}/{filesizeformat(file_size)} transferred ({progress}%)." # noqa: E501 ) previous_progress = progress @@ -65,14 +65,14 @@ def handle(self, *, directory: pathlib.Path, wet_run, **options): datastore_files.update( metadata["Key"].replace(self.DATASTORE_DIRECTORY, "") for metadata in response["Contents"] ) - self.stdout.write(f"Files in datastore's {self.DATASTORE_DIRECTORY!r}: {sorted(datastore_files)}") + self.logger.info(f"Files in datastore's {self.DATASTORE_DIRECTORY!r}: {sorted(datastore_files)}") local_files = set(file.name for file in directory.glob(f"{self.FILENAME_PREFIX}*.tar.gz")) - self.stdout.write(f"Files in local's {directory.name!r}: {sorted(local_files)}") + self.logger.info(f"Files in local's {directory.name!r}: {sorted(local_files)}") files_to_upload = local_files - datastore_files - self.stdout.write(f"Files to upload: {sorted(files_to_upload)}") + self.logger.info(f"Files to upload: {sorted(files_to_upload)}") for filename in files_to_upload: - self.stdout.write(f"Uploading {filename!r}...") + self.logger.info(f"Uploading {filename!r}...") self._upload_file(directory / filename, wet_run=wet_run) diff --git a/tests/metabase/management/__snapshots__/test_upload_data_to_pilotage.ambr b/tests/metabase/management/__snapshots__/test_upload_data_to_pilotage.ambr index de4bc0bc1a..4fea2c3059 100644 --- a/tests/metabase/management/__snapshots__/test_upload_data_to_pilotage.ambr +++ b/tests/metabase/management/__snapshots__/test_upload_data_to_pilotage.ambr @@ -1,11 +1,10 @@ # serializer version: 1 # name: test_command_call - ''' - Files in datastore's 'flux-iae/': [] - Files in local's 'test_command_call0': ['fluxIAE_ITOU_.tar.gz'] - Files to upload: ['fluxIAE_ITOU_.tar.gz'] - Uploading 'fluxIAE_ITOU_.tar.gz'... - > fluxIAE_ITOU_.tar.gz: 11 octets/11 octets transferred (100%). - - ''' + list([ + "Files in datastore's 'flux-iae/': []", + "Files in local's 'test_command_call0': ['fluxIAE_ITOU_.tar.gz']", + "Files to upload: ['fluxIAE_ITOU_.tar.gz']", + "Uploading 'fluxIAE_ITOU_.tar.gz'...", + '> fluxIAE_ITOU_.tar.gz: 11\xa0octets/11\xa0octets transferred (100%).', + ]) # --- diff --git a/tests/metabase/management/test_upload_data_to_pilotage.py b/tests/metabase/management/test_upload_data_to_pilotage.py index 4b781808d6..83ae0c629d 100644 --- a/tests/metabase/management/test_upload_data_to_pilotage.py +++ b/tests/metabase/management/test_upload_data_to_pilotage.py @@ -1,5 +1,3 @@ -import io - import pytest from django.core.management import call_command from django.utils import timezone @@ -8,12 +6,13 @@ @pytest.mark.usefixtures("temporary_bucket") -def test_command_call(tmp_path, snapshot): +def test_command_call(tmp_path, snapshot, caplog): tmp_path.joinpath("fluxIAE_ITOU_.tar.gz").write_bytes(b"Hello World") - stdout = io.StringIO() - - call_command("upload_data_to_pilotage", tmp_path.as_posix(), "--wet-run", stdout=stdout) - assert stdout.getvalue() == snapshot + call_command("upload_data_to_pilotage", tmp_path.as_posix(), "--wet-run") + assert caplog.messages[:-1] == snapshot + assert caplog.messages[-1].startswith( + "Management command itou.metabase.management.commands.upload_data_to_pilotage succeeded in" + ) @pytest.mark.usefixtures("temporary_bucket") From c537517a675bb231ce1ca63c4a2f7c48f0afe8cb Mon Sep 17 00:00:00 2001 From: Xavier Fernandez Date: Thu, 30 Jan 2025 16:38:37 +0100 Subject: [PATCH 5/5] metabase: migrate populate_metabase_emplois to logging --- .../commands/populate_metabase_emplois.py | 8 ++++---- itou/utils/python.py | 6 +++++- .../test_populate_metabase_emplois.py | 19 +++++++++++-------- 3 files changed, 20 insertions(+), 13 deletions(-) diff --git a/itou/metabase/management/commands/populate_metabase_emplois.py b/itou/metabase/management/commands/populate_metabase_emplois.py index 88ae6c266a..33779c102b 100644 --- a/itou/metabase/management/commands/populate_metabase_emplois.py +++ b/itou/metabase/management/commands/populate_metabase_emplois.py @@ -466,7 +466,7 @@ def populate_insee_codes(self): def populate_departments(self): table_name = "departements" - self.stdout.write(f"Preparing content for {table_name} table...") + self.logger.info("Preparing content for %s table...", table_name) rows = [] for dpt_code, dpt_name in DEPARTMENTS.items(): @@ -493,7 +493,7 @@ def populate_enums(self): RefusalReason: "c1_ref_motif_de_refus", } for enum, table_name in enum_to_table.items(): - self.stdout.write(f"Preparing content for {table_name} table...") + self.logger.info("Preparing content for %s table...", table_name) rows = [OrderedDict(code=str(item), label=item.label) for item in enum] df = get_df_from_rows(rows) store_df(df=df, table_name=table_name) @@ -532,9 +532,9 @@ def report_data_inconsistencies(self): fatal errors after having completed its job. """ fatal_errors = 0 - self.stdout.write("Checking data for inconsistencies.") + self.logger.info("Checking data for inconsistencies.") if Approval.objects.exclude(user__kind=UserKind.JOB_SEEKER).exists(): - self.stdout.write("FATAL ERROR: At least one user has an approval but is not a job seeker") + self.logger.error("FATAL ERROR: At least one user has an approval but is not a job seeker") fatal_errors += 1 if fatal_errors >= 1: diff --git a/itou/utils/python.py b/itou/utils/python.py index 428ecc7356..a68985f993 100644 --- a/itou/utils/python.py +++ b/itou/utils/python.py @@ -1,7 +1,11 @@ +import logging from functools import wraps from time import perf_counter +logger = logging.getLogger(__name__) + + class Sentinel: """Class to be used for sentinel object, but the object will be falsy""" @@ -23,7 +27,7 @@ def timeit(f): def wrap(*args, **kw): ts = perf_counter() result = f(*args, **kw) - print(f"timeit: method={f.__name__} completed in seconds={perf_counter() - ts:.2f}") + logger.info("timeit: method=%s completed in seconds=%.2f", f.__name__, perf_counter() - ts) return result return wrap diff --git a/tests/metabase/management/test_populate_metabase_emplois.py b/tests/metabase/management/test_populate_metabase_emplois.py index 37da8ee2e3..85e021ba7b 100644 --- a/tests/metabase/management/test_populate_metabase_emplois.py +++ b/tests/metabase/management/test_populate_metabase_emplois.py @@ -1024,28 +1024,31 @@ def test_populate_enums(): assert rows[0] == ("approval_expiration_too_close", "La date de fin du PASS IAE / agrément est trop proche") -def test_data_inconsistencies(capsys): +def test_data_inconsistencies(caplog): approval = ApprovalFactory() with assertNumQueries(1): # Select the job seekers management.call_command("populate_metabase_emplois", mode="data_inconsistencies") - stdout, _ = capsys.readouterr() - out_lines = stdout.splitlines() - assert out_lines[0] == "Checking data for inconsistencies." - assert "timeit: method=report_data_inconsistencies completed in seconds=" in out_lines[1] - assert "timeit: method=handle completed in seconds=" in out_lines[2] + logs = caplog.messages + assert logs[0] == "Checking data for inconsistencies." + assert "timeit: method=report_data_inconsistencies completed in seconds=" in logs[1] + assert "timeit: method=handle completed in seconds=" in logs[2] approval.user.kind = UserKind.EMPLOYER approval.user.save() + caplog.clear() with pytest.raises(RuntimeError): management.call_command("populate_metabase_emplois", mode="data_inconsistencies") - stdout, _ = capsys.readouterr() - assert stdout.splitlines() == [ + assert caplog.messages[:-1] == [ "Checking data for inconsistencies.", "FATAL ERROR: At least one user has an approval but is not a job seeker", + "Error when executing itou.metabase.management.commands.populate_metabase_emplois", ] + assert caplog.messages[-1].startswith( + "Management command itou.metabase.management.commands.populate_metabase_emplois failed in" + ) @freeze_time("2023-02-02")