From 07db9d6b53e0ca630e64957397a097081b7d8764 Mon Sep 17 00:00:00 2001 From: Jacob Pierce Date: Thu, 27 Feb 2025 15:38:27 -0800 Subject: [PATCH] update storages module; allow defaults to do their magic for auth; fixes --- .../core/logger/management/commands/exportlogs.py | 9 +++------ kolibri/core/logger/tasks.py | 10 +++------- kolibri/core/utils/csv.py | 13 +++++++++++++ kolibri/deployment/default/settings/base.py | 4 ---- kolibri/plugins/facility/views.py | 4 +++- requirements/storages.txt | 3 ++- 6 files changed, 24 insertions(+), 19 deletions(-) diff --git a/kolibri/core/logger/management/commands/exportlogs.py b/kolibri/core/logger/management/commands/exportlogs.py index a149b9f260b..f45e3ba51da 100644 --- a/kolibri/core/logger/management/commands/exportlogs.py +++ b/kolibri/core/logger/management/commands/exportlogs.py @@ -1,6 +1,5 @@ import logging import ntpath -import os from dateutil import parser from django.conf import settings @@ -128,8 +127,6 @@ def handle_async(self, *args, **options): else: filename = options["output_file"] - filepath = os.path.join(os.getcwd(), filename) - queryset = log_info["queryset"] total_rows = queryset.count() @@ -139,7 +136,7 @@ def handle_async(self, *args, **options): for row in csv_file_generator( facility, log_type, - filepath, + filename, start_date=start_date, end_date=end_date, overwrite=options["overwrite"], @@ -150,14 +147,14 @@ def handle_async(self, *args, **options): if job: job.extra_metadata["overall_error"] = self.overall_error - self.job.extra_metadata["filename"] = ntpath.basename(filepath) + self.job.extra_metadata["filename"] = ntpath.basename(filename) job.save_meta() else: if self.overall_error: raise CommandError(self.overall_error) else: logger.info( - "Created csv file {} with {} lines".format(filepath, total_rows) + "Created csv file {} with {} lines".format(filename, total_rows) ) translation.deactivate() diff --git a/kolibri/core/logger/tasks.py b/kolibri/core/logger/tasks.py index b98018d0fe0..14186266f01 100644 --- a/kolibri/core/logger/tasks.py +++ b/kolibri/core/logger/tasks.py @@ -1,5 +1,3 @@ -import os - from django.core.files.storage import default_storage from django.core.management import call_command from rest_framework import serializers @@ -19,10 +17,8 @@ def get_filepath(log_type, facility_id, start_date, end_date): facility = Facility.objects.get(id=facility_id) - filepath = default_storage.path( - CSV_EXPORT_FILENAMES[log_type].format( - facility.name, facility.id[:4], start_date[:10], end_date[:10] - ), + filepath = CSV_EXPORT_FILENAMES[log_type].format( + facility.name, facility.id[:4], start_date[:10], end_date[:10] ) return filepath @@ -42,7 +38,7 @@ def get_valid_logs_csv_filenames(): log_request.selected_start_date.strftime("%Y-%m-%d"), log_request.selected_end_date.strftime("%Y-%m-%d"), ) - valid_filenames_set.add(os.path.basename(full_path)) + valid_filenames_set.add(full_path) return valid_filenames_set diff --git a/kolibri/core/utils/csv.py b/kolibri/core/utils/csv.py index dde98d8938d..fb9d2e00048 100644 --- a/kolibri/core/utils/csv.py +++ b/kolibri/core/utils/csv.py @@ -1,10 +1,13 @@ import io +import logging import re from contextlib import contextmanager from numbers import Number from django.core.files.storage import default_storage +logger = logging.getLogger(__name__) + @contextmanager def open_csv_for_writing(filename): @@ -17,6 +20,11 @@ def open_csv_for_writing(filename): yield encoded_fh encoded_fh.flush() default_storage.save(filename, f) + logger.info("CSV file {} updated".format(filename)) + try: + logger.info("File path: {}".format(default_storage.path(filename))) + except NotImplementedError: + logger.info("File url: {}".format(default_storage.url(filename))) else: # If the file does not exist, we need to create it and return it wrapped in a TextIOWrapper with io.BytesIO() as f: @@ -30,6 +38,11 @@ def open_csv_for_writing(filename): yield encoded_fh encoded_fh.flush() default_storage.save(filename, f) + logger.info("CSV file {} saved".format(filename)) + try: + logger.info("File path: {}".format(default_storage.path(filename))) + except NotImplementedError: + logger.info("File url: {}".format(default_storage.url(filename))) @contextmanager diff --git a/kolibri/deployment/default/settings/base.py b/kolibri/deployment/default/settings/base.py index bd0ddddfc0b..9940e95219a 100644 --- a/kolibri/deployment/default/settings/base.py +++ b/kolibri/deployment/default/settings/base.py @@ -195,10 +195,6 @@ if not os.environ.get("DEFAULT_FILE_STORAGE"): if conf.OPTIONS["FileStorage"]["STORAGE_BACKEND"] == "gcs": - # Initialize GS_CREDENTIALS as a proper google.auth.credentials.Credentials object - import google.auth - - GS_CREDENTIALS, _ = google.auth.default() # Options per https://django-storages.readthedocs.io/en/latest/backends/gcloud.html DEFAULT_FILE_STORAGE = "storages.backends.gcloud.GoogleCloudStorage" GS_BUCKET_NAME = conf.OPTIONS["FileStorage"]["GS_BUCKET_NAME"] diff --git a/kolibri/plugins/facility/views.py b/kolibri/plugins/facility/views.py index 02c280aabf2..85ce50e80db 100644 --- a/kolibri/plugins/facility/views.py +++ b/kolibri/plugins/facility/views.py @@ -27,7 +27,6 @@ from kolibri.core.logger.models import ContentSessionLog from kolibri.core.logger.models import GenerateCSVLogRequest - CSV_EXPORT_FILENAMES = {} CSV_EXPORT_FILENAMES.update(LOGGER_CSV_EXPORT_FILENAMES) CSV_EXPORT_FILENAMES.update(USER_CSV_EXPORT_FILENAMES) @@ -103,6 +102,9 @@ def exported_csv_info(request, facility_id): if log_request is not None: start = log_request.selected_start_date.isoformat() end = log_request.selected_end_date.isoformat() + filename = CSV_EXPORT_FILENAMES[log_type].format( + facility.name, facility.id[:4], start[:10], end[:10] + ) else: start = "" end = "" diff --git a/requirements/storages.txt b/requirements/storages.txt index a20ce2fe0c2..d71ce725ef0 100644 --- a/requirements/storages.txt +++ b/requirements/storages.txt @@ -1,2 +1,3 @@ # Additional reqs for running kolibri with GCS file storage backend -django-storages[google]==1.14.2 +django-storages[google]==1.14.5 +google-auth==2.38.0