diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 42364593e..a7c2dac20 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -21,12 +21,12 @@ jobs: - 5432:5432 redis: - image: redis:7.4.1 + image: redis:7.4.2 ports: - 6379:6379 elastic: - image: docker.elastic.co/elasticsearch/elasticsearch:8.16.0 + image: docker.elastic.co/elasticsearch/elasticsearch:8.17.0 env: network.host: "0.0.0.0" http.cors.enabled: "true" diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 6f182bd9f..8031feeae 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -51,7 +51,7 @@ repos: - --exclude-files - "_test.js$" - repo: https://github.com/astral-sh/ruff-pre-commit - rev: "v0.9.1" + rev: "v0.9.2" hooks: - id: ruff-format - id: ruff @@ -103,7 +103,7 @@ repos: - id: shellcheck args: ["--severity=warning"] - repo: https://github.com/rhysd/actionlint - rev: v1.7.6 + rev: v1.7.7 hooks: - id: actionlint name: actionlint diff --git a/.secrets.baseline b/.secrets.baseline index f2860aaad..5dbd9d01d 100644 --- a/.secrets.baseline +++ b/.secrets.baseline @@ -205,7 +205,7 @@ "filename": "sheets/dev-setup.md", "hashed_secret": "1348b145fa1a555461c1b790a2f66614781091e9", "is_verified": false, - "line_number": 179 + "line_number": 178 } ], "static/js/constants.js": [ @@ -261,5 +261,5 @@ } ] }, - "generated_at": "2024-09-26T09:26:28Z" + "generated_at": "2025-01-13T12:49:44Z" } diff --git a/RELEASE.rst b/RELEASE.rst index 9bb41418d..3edbd7c27 100644 --- a/RELEASE.rst +++ b/RELEASE.rst @@ -1,6 +1,20 @@ Release Notes ============= +Version 0.172.0 +--------------- + +- fix: filter enrollments instead of get in defer_enrollment (#3215) +- fix: external sync when there is no platform with passed name (#3385) +- fix(deps): update dependency boto3 to v1.36.2 (#3383) +- [pre-commit.ci] pre-commit autoupdate (#3382) +- chore(deps): update docker.elastic.co/elasticsearch/elasticsearch docker tag to v8.17.0 (#3381) +- refactor: remove COUPON_SHEETS (#3370) +- chore(deps): update node.js to v20.18.1 (#3379) +- chore(deps): update redis docker tag to v7.4.2 (#3380) +- chore(deps): update nginx docker tag to v1.27.3 (#3378) +- refactor: migrate digital credentials flag to posthog (#3367) + Version 0.171.0 (Released January 30, 2025) --------------- diff --git a/courses/api.py b/courses/api.py index c542f0dbd..3d52d91e3 100644 --- a/courses/api.py +++ b/courses/api.py @@ -305,9 +305,17 @@ def defer_enrollment( (CourseRunEnrollment, CourseRunEnrollment): The deactivated enrollment paired with the new enrollment that was the target of the deferral """ - from_enrollment = CourseRunEnrollment.all_objects.get( - user=user, run__courseware_id=from_courseware_id + from_enrollment = ( + CourseRunEnrollment.all_objects.filter( + user=user, run__courseware_id=from_courseware_id + ) + .order_by("-created_on") + .first() ) + if not from_enrollment: + raise ValidationError( + f"User is not enrolled in course run '{from_courseware_id}'" # noqa: EM102 + ) if not force and not from_enrollment.active: raise ValidationError( f"Cannot defer from inactive enrollment (id: {from_enrollment.id}, run: {from_enrollment.run.courseware_id}, user: {user.email}). " # noqa: EM102 diff --git a/courses/management/commands/sync_external_course_runs.py b/courses/management/commands/sync_external_course_runs.py index a549bb8a9..507f1cca4 100644 --- a/courses/management/commands/sync_external_course_runs.py +++ b/courses/management/commands/sync_external_course_runs.py @@ -35,12 +35,19 @@ def handle(self, *args, **options): # noqa: ARG002 """Handle command execution""" vendor_name = options["vendor_name"] keymap = EXTERNAL_COURSE_VENDOR_KEYMAPS.get(vendor_name.lower()) - if not keymap: + platform = Platform.objects.filter(name__iexact=vendor_name).first() + + if not platform: self.stdout.write(self.style.ERROR(f"Unknown vendor name {vendor_name}.")) return - platform = Platform.objects.filter(name__iexact=vendor_name).first() - if platform and not platform.enable_sync and not options.get("force"): + if not keymap: + self.stdout.write( + self.style.ERROR(f"Mapping does not exist for {vendor_name}.") + ) + return + + if not platform.enable_sync and not options.get("force"): self.stdout.write( self.style.ERROR( f"Course sync is off for {vendor_name}. Please enable it before syncing." diff --git a/docker-compose.yml b/docker-compose.yml index d8c341abc..b0a21550d 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -28,12 +28,12 @@ services: - "5432" redis: - image: redis:7.4.1 + image: redis:7.4.2 ports: - "6379" nginx: - image: nginx:1.27.2 + image: nginx:1.27.3 ports: - "8053:8053" links: @@ -68,7 +68,7 @@ services: extra_hosts: *default-extra-hosts watch: - image: node:20.18.0 + image: node:20.18.1 working_dir: /src command: > /bin/bash -c './webpack_dev_server.sh --install' diff --git a/mitxpro/features.py b/mitxpro/features.py index 07e278480..28a77a6d5 100644 --- a/mitxpro/features.py +++ b/mitxpro/features.py @@ -1,4 +1,5 @@ """MIT xPRO features""" +DIGITAL_CREDENTIALS = "digital_credentials" ENABLE_ENTERPRISE = "enable_enterprise" ENROLLMENT_WELCOME_EMAIL = "enrollment_welcome_email" diff --git a/mitxpro/settings.py b/mitxpro/settings.py index 1087bc2c1..7d91b00c6 100644 --- a/mitxpro/settings.py +++ b/mitxpro/settings.py @@ -26,7 +26,7 @@ from mitxpro.celery_utils import OffsettingSchedule from mitxpro.sentry import init_sentry -VERSION = "0.171.0" +VERSION = "0.172.0" env.reset() @@ -905,39 +905,40 @@ month_of_year="*", ), }, -} -if FEATURES.get("COUPON_SHEETS"): - CELERY_BEAT_SCHEDULE["renew_all_file_watches"] = { + "renew_all_file_watches": { "task": "sheets.tasks.renew_all_file_watches", "schedule": ( DRIVE_WEBHOOK_EXPIRATION_MINUTES - DRIVE_WEBHOOK_RENEWAL_PERIOD_MINUTES ) * 60, - } - alt_sheets_processing = FEATURES.get("COUPON_SHEETS_ALT_PROCESSING") - if alt_sheets_processing: - CELERY_BEAT_SCHEDULE.update( - { - "handle-coupon-request-sheet": { - "task": "sheets.tasks.handle_unprocessed_coupon_requests", - "schedule": SHEETS_MONITORING_FREQUENCY, - } - } - ) + }, +} + +alt_sheets_processing = FEATURES.get("COUPON_SHEETS_ALT_PROCESSING") +if alt_sheets_processing: CELERY_BEAT_SCHEDULE.update( { - "update-assignment-delivery-dates": { - "task": "sheets.tasks.update_incomplete_assignment_delivery_statuses", - "schedule": OffsettingSchedule( - run_every=timedelta(seconds=SHEETS_MONITORING_FREQUENCY), - offset=timedelta( - seconds=0 if not alt_sheets_processing else SHEETS_TASK_OFFSET - ), - ), + "handle-coupon-request-sheet": { + "task": "sheets.tasks.handle_unprocessed_coupon_requests", + "schedule": SHEETS_MONITORING_FREQUENCY, } } ) +CELERY_BEAT_SCHEDULE.update( + { + "update-assignment-delivery-dates": { + "task": "sheets.tasks.update_incomplete_assignment_delivery_statuses", + "schedule": OffsettingSchedule( + run_every=timedelta(seconds=SHEETS_MONITORING_FREQUENCY), + offset=timedelta( + seconds=0 if not alt_sheets_processing else SHEETS_TASK_OFFSET + ), + ), + } + } +) + # Hijack HIJACK_INSERT_BEFORE = "" diff --git a/mitxpro/utils.py b/mitxpro/utils.py index 30fc4522c..15c1669c4 100644 --- a/mitxpro/utils.py +++ b/mitxpro/utils.py @@ -601,7 +601,7 @@ def get_js_settings(request: HttpRequest): "help_widget_enabled": settings.ZENDESK_CONFIG.get("HELP_WIDGET_ENABLED"), "help_widget_key": settings.ZENDESK_CONFIG.get("HELP_WIDGET_KEY"), }, - "digital_credentials": settings.FEATURES.get("DIGITAL_CREDENTIALS", False), + "digital_credentials": is_enabled(features.DIGITAL_CREDENTIALS, default=False), "digital_credentials_supported_runs": settings.DIGITAL_CREDENTIALS_SUPPORTED_RUNS, "is_tax_applicable": is_tax_applicable(request), "enable_enterprise": is_enabled(features.ENABLE_ENTERPRISE, default=False), diff --git a/mitxpro/utils_test.py b/mitxpro/utils_test.py index 0c702d2b6..dfecc0709 100644 --- a/mitxpro/utils_test.py +++ b/mitxpro/utils_test.py @@ -11,6 +11,7 @@ from ecommerce.api import is_tax_applicable from ecommerce.models import Order +from mitxpro import features from mitxpro.test_utils import MockResponse from mitxpro.utils import ( all_equal, @@ -463,6 +464,8 @@ def posthog_is_enabled_side_effect(*args, **kwargs): """ Side effect to return True/False for specific features while mocking posthog is_enabled. """ + if args[0] == features.DIGITAL_CREDENTIALS: # noqa: SIM103 + return True return False settings.GA_TRACKING_ID = "fake" @@ -476,7 +479,6 @@ def posthog_is_enabled_side_effect(*args, **kwargs): "HELP_WIDGET_ENABLED": False, "HELP_WIDGET_KEY": "fake_key", } - settings.FEATURES["DIGITAL_CREDENTIALS"] = True settings.DIGITAL_CREDENTIALS_SUPPORTED_RUNS = "test_run1,test_run2" mocker.patch( "mitol.olposthog.features.is_enabled", @@ -498,7 +500,7 @@ def posthog_is_enabled_side_effect(*args, **kwargs): "support_email": settings.EMAIL_SUPPORT, "site_name": settings.SITE_NAME, "zendesk_config": {"help_widget_enabled": False, "help_widget_key": "fake_key"}, - "digital_credentials": settings.FEATURES.get("DIGITAL_CREDENTIALS", False), + "digital_credentials": True, "digital_credentials_supported_runs": settings.DIGITAL_CREDENTIALS_SUPPORTED_RUNS, "is_tax_applicable": is_tax_applicable(request), "enable_enterprise": False, diff --git a/package.json b/package.json index b4f777161..f0d06cd8e 100644 --- a/package.json +++ b/package.json @@ -123,7 +123,7 @@ "yup": "^1.0.0" }, "engines": { - "node": "20.18.0" + "node": "20.18.1" }, "scripts": { "postinstall": "./webpack_if_prod.sh", diff --git a/poetry.lock b/poetry.lock index 4996144dc..43d643e4d 100644 --- a/poetry.lock +++ b/poetry.lock @@ -193,32 +193,32 @@ files = [ [[package]] name = "boto3" -version = "1.35.97" +version = "1.36.2" description = "The AWS SDK for Python" optional = false python-versions = ">=3.8" files = [ - {file = "boto3-1.35.97-py3-none-any.whl", hash = "sha256:8e49416216a6e3a62c2a0c44fba4dd2852c85472e7b702516605b1363867d220"}, - {file = "boto3-1.35.97.tar.gz", hash = "sha256:7d398f66a11e67777c189d1f58c0a75d9d60f98d0ee51b8817e828930bf19e4e"}, + {file = "boto3-1.36.2-py3-none-any.whl", hash = "sha256:76cfc9a705be46e8d22607efacc8d688c064f923d785a01c00b28e9a96425d1a"}, + {file = "boto3-1.36.2.tar.gz", hash = "sha256:fde1c29996b77274a60b7bc9f741525afa6267bb1716eb644a764fb7c124a0d2"}, ] [package.dependencies] -botocore = ">=1.35.97,<1.36.0" +botocore = ">=1.36.2,<1.37.0" jmespath = ">=0.7.1,<2.0.0" -s3transfer = ">=0.10.0,<0.11.0" +s3transfer = ">=0.11.0,<0.12.0" [package.extras] crt = ["botocore[crt] (>=1.21.0,<2.0a0)"] [[package]] name = "botocore" -version = "1.35.97" +version = "1.36.2" description = "Low-level, data-driven core of boto 3." optional = false python-versions = ">=3.8" files = [ - {file = "botocore-1.35.97-py3-none-any.whl", hash = "sha256:fed4f156b1a9b8ece53738f702ba5851b8c6216b4952de326547f349cc494f14"}, - {file = "botocore-1.35.97.tar.gz", hash = "sha256:88f2fab29192ffe2f2115d5bafbbd823ff4b6eb2774296e03ec8b5b0fe074f61"}, + {file = "botocore-1.36.2-py3-none-any.whl", hash = "sha256:bc3b7e3b573a48af2bd7116b80fe24f9a335b0b67314dcb2697a327d009abf29"}, + {file = "botocore-1.36.2.tar.gz", hash = "sha256:a1fe6610983f0214b0c7655fe6990b6a731746baf305b182976fc7b568fc3cb0"}, ] [package.dependencies] @@ -227,7 +227,7 @@ python-dateutil = ">=2.1,<3.0.0" urllib3 = {version = ">=1.25.4,<2.2.0 || >2.2.0,<3", markers = "python_version >= \"3.10\""} [package.extras] -crt = ["awscrt (==0.22.0)"] +crt = ["awscrt (==0.23.4)"] [[package]] name = "bpython" @@ -3570,20 +3570,20 @@ files = [ [[package]] name = "s3transfer" -version = "0.10.4" +version = "0.11.1" description = "An Amazon S3 Transfer Manager" optional = false python-versions = ">=3.8" files = [ - {file = "s3transfer-0.10.4-py3-none-any.whl", hash = "sha256:244a76a24355363a68164241438de1b72f8781664920260c48465896b712a41e"}, - {file = "s3transfer-0.10.4.tar.gz", hash = "sha256:29edc09801743c21eb5ecbc617a152df41d3c287f67b615f73e5f750583666a7"}, + {file = "s3transfer-0.11.1-py3-none-any.whl", hash = "sha256:8fa0aa48177be1f3425176dfe1ab85dcd3d962df603c3dbfc585e6bf857ef0ff"}, + {file = "s3transfer-0.11.1.tar.gz", hash = "sha256:3f25c900a367c8b7f7d8f9c34edc87e300bde424f779dc9f0a8ae4f9df9264f6"}, ] [package.dependencies] -botocore = ">=1.33.2,<2.0a.0" +botocore = ">=1.36.0,<2.0a.0" [package.extras] -crt = ["botocore[crt] (>=1.33.2,<2.0a.0)"] +crt = ["botocore[crt] (>=1.36.0,<2.0a.0)"] [[package]] name = "safety" @@ -4214,4 +4214,4 @@ xmlsec = ["xmlsec (>=0.6.1)"] [metadata] lock-version = "2.0" python-versions = "3.12.6" -content-hash = "619815f0276a2d610c8cc77e1aceca8bbdea701bc04e6523f20b96c854bdc7bb" +content-hash = "58f2f150fed8a4fc264963eb8c49f6c1a8f5ed3e1992d789fade224e1c395358" diff --git a/pyproject.toml b/pyproject.toml index f36eaf6a4..6d8348577 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -13,7 +13,7 @@ python = "3.12.6" Pillow = "10.4.0" PyNaCl = "1.5.0" beautifulsoup4 = "4.8.2" -boto3 = "1.35.97" +boto3 = "1.36.2" celery = "5.4.0" celery-redbeat = "2.2.0" dj-database-url = "0.5.0" diff --git a/sheets/conftest.py b/sheets/conftest.py index 6ea01c306..72973b7d9 100644 --- a/sheets/conftest.py +++ b/sheets/conftest.py @@ -6,7 +6,6 @@ @pytest.fixture(autouse=True) def sheets_settings(settings): """Default settings for sheets tests""" - settings.FEATURES["COUPON_SHEETS"] = True settings.SHEETS_REQ_EMAIL_COL = 7 settings.SHEETS_REQ_PROCESSED_COL = 8 settings.SHEETS_REQ_ERROR_COL = 9 diff --git a/sheets/dev-setup.md b/sheets/dev-setup.md index 98b038d83..a3fadd18d 100644 --- a/sheets/dev-setup.md +++ b/sheets/dev-setup.md @@ -85,7 +85,6 @@ First, gather a bunch of ID-type values from Drive: _If it's not obvious, remove the angle brackets (`<>`) for the actual values._ ```dotenv -FEATURE_COUPON_SHEETS=True SHEETS_ADMIN_EMAILS= DRIVE_CLIENT_ID= DRIVE_CLIENT_SECRET= diff --git a/sheets/views.py b/sheets/views.py index eae561bd2..f6807cda4 100644 --- a/sheets/views.py +++ b/sheets/views.py @@ -6,7 +6,7 @@ from django.conf import settings from django.contrib.admin.views.decorators import staff_member_required from django.db import transaction -from django.http import Http404, HttpResponse +from django.http import HttpResponse from django.shortcuts import redirect, render from django.urls import reverse from django.views.decorators.csrf import csrf_exempt @@ -32,8 +32,6 @@ @staff_member_required(login_url="login") def sheets_admin_view(request): """Admin view that renders a page that allows a user to begin Google OAuth auth""" - if not settings.FEATURES.get("COUPON_SHEETS"): - raise Http404 existing_api_auth = GoogleApiAuth.objects.first() successful_action = request.GET.get("success") return render( @@ -49,8 +47,6 @@ def sheets_admin_view(request): @staff_member_required(login_url="login") def request_google_auth(request): """Admin view to begin Google OAuth auth""" - if not settings.FEATURES.get("COUPON_SHEETS"): - raise Http404 flow = Flow.from_client_config( generate_google_client_config(), scopes=REQUIRED_GOOGLE_API_SCOPES ) @@ -66,8 +62,6 @@ def request_google_auth(request): @csrf_exempt def complete_google_auth(request): """Admin view that handles the redirect from Google after completing Google auth""" - if not settings.FEATURES.get("COUPON_SHEETS"): - raise Http404 state = request.session.get("state") if not state: raise GoogleAuthError( @@ -98,8 +92,6 @@ def handle_watched_sheet_update(request): View that handles requests sent from Google's push notification service when changes are made to the a sheet with a file watch applied. """ - if not settings.FEATURES.get("COUPON_SHEETS"): - raise Http404 channel_id = request.META.get("HTTP_X_GOOG_CHANNEL_ID") if not channel_id: log.error(