From cbe4d745c676dfbfe16d507df2856258e87bcf5d Mon Sep 17 00:00:00 2001 From: Adrian Date: Thu, 29 Feb 2024 15:51:47 -0800 Subject: [PATCH 01/36] feat: initial setup + most codecov_auth migrations moved --- setup.py | 3 + shared/django_apps/codecov/__init__.py | 0 shared/django_apps/codecov/models.py | 13 + shared/django_apps/codecov_auth/__init__.py | 0 .../codecov_auth/migrations/0001_initial.py | 170 ++++ .../migrations/0002_auto_20210817_1346.py | 44 + .../migrations/0003_auto_20210924_1003.py | 72 ++ .../migrations/0004_auto_20210930_1429.py | 29 + .../migrations/0005_auto_20211029_1709.py | 13 + .../migrations/0006_auto_20211123_1535.py | 44 + .../migrations/0007_auto_20211129_1228.py | 14 + .../migrations/0008_auto_20220119_1811.py | 14 + .../migrations/0009_auto_20220511_1313.py | 78 ++ .../migrations/0010_owner_is_superuser.py | 18 + .../migrations/0011_new_enterprise_plans.py | 20 + .../migrations/0012_auto_20220531_1452.py | 18 + .../0013_alter_owner_organizations.py | 24 + .../0014_alter_repositorytoken_token_type.py | 21 + .../migrations/0015_organizationleveltoken.py | 44 + .../migrations/0016_alter_owner_admins.py | 24 + ...alter_organizationleveltoken_token_type.py | 20 + .../codecov_auth/migrations/0018_usertoken.py | 47 ++ .../0019_alter_repositorytoken_token_type.py | 25 + .../0020_ownerprofile_default_org.py | 32 + .../migrations/0021_owner_max_upload_limit.py | 18 + .../0022_alter_owner_max_upload_limit.py | 18 + .../migrations/0023_auto_20230214_1129.py | 20 + .../0024_alter_owner_max_upload_limit.py | 18 + .../migrations/0025_owner_stripe_coupon_id.py | 26 + .../0026_alter_owner_plan_user_count.py | 18 + .../migrations/0027_auto_20230307_1751.py | 15 + ...r_sentry_user_data_owner_sentry_user_id.py | 23 + ...9_ownerprofile_terms_agreement_and_more.py | 25 + ...r_trial_end_date_owner_trial_start_date.py | 25 + .../migrations/0031_user_owner_user.py | 69 ++ .../migrations/0032_owner_trial_status.py | 28 + .../migrations/0033_sentryuser.py | 42 + .../0034_alter_owner_trial_status.py | 40 + .../0035_owner_pretrial_users_count.py | 27 + .../0036_add_user_terms_agreement.py | 37 + .../migrations/0037_owner_uses_invoice.py | 18 + .../0038_alter_owner_uses_invoice.py | 24 + .../0039_alter_owner_uses_invoice.py | 18 + .../codecov_auth/migrations/0040_oktauser.py | 41 + .../migrations/0041_auto_20230918_1825.py | 25 + .../migrations/0042_owner_trial_fired_by.py | 18 + .../0043_sync_user_terms_agreement.py | 34 + ...er_agreements_and_alter_user_agreements.py | 25 + ...045_remove_ownerprofile_terms_agreement.py | 34 + .../0046_dedupe_owner_admin_values.py | 27 + .../migrations/0047_auto_20231009_1257.py | 21 + .../migrations/0048_githubappinstallation.py | 61 ++ .../0049_ownerprofile_customer_intent.py | 25 + ...050_remove_ownerprofile_customer_intent.py | 22 + .../migrations/0051_user_customer_intent.py | 25 + ...2_githubappinstallation_app_id_and_more.py | 34 + .../codecov_auth/migrations/__init__.py | 0 shared/django_apps/codecov_auth/models.py | 668 +++++++++++++++ shared/django_apps/core/__init__.py | 0 shared/django_apps/core/models.py | 484 +++++++++++ shared/django_apps/dummy_settings.py | 5 +- shared/plan/constants.py | 360 ++++++++ shared/plan/service.py | 247 ++++++ shared/plan/test_plan.py | 788 ++++++++++++++++++ 64 files changed, 4239 insertions(+), 1 deletion(-) create mode 100644 shared/django_apps/codecov/__init__.py create mode 100644 shared/django_apps/codecov/models.py create mode 100644 shared/django_apps/codecov_auth/__init__.py create mode 100644 shared/django_apps/codecov_auth/migrations/0001_initial.py create mode 100644 shared/django_apps/codecov_auth/migrations/0002_auto_20210817_1346.py create mode 100644 shared/django_apps/codecov_auth/migrations/0003_auto_20210924_1003.py create mode 100644 shared/django_apps/codecov_auth/migrations/0004_auto_20210930_1429.py create mode 100644 shared/django_apps/codecov_auth/migrations/0005_auto_20211029_1709.py create mode 100644 shared/django_apps/codecov_auth/migrations/0006_auto_20211123_1535.py create mode 100644 shared/django_apps/codecov_auth/migrations/0007_auto_20211129_1228.py create mode 100644 shared/django_apps/codecov_auth/migrations/0008_auto_20220119_1811.py create mode 100644 shared/django_apps/codecov_auth/migrations/0009_auto_20220511_1313.py create mode 100644 shared/django_apps/codecov_auth/migrations/0010_owner_is_superuser.py create mode 100644 shared/django_apps/codecov_auth/migrations/0011_new_enterprise_plans.py create mode 100644 shared/django_apps/codecov_auth/migrations/0012_auto_20220531_1452.py create mode 100644 shared/django_apps/codecov_auth/migrations/0013_alter_owner_organizations.py create mode 100644 shared/django_apps/codecov_auth/migrations/0014_alter_repositorytoken_token_type.py create mode 100644 shared/django_apps/codecov_auth/migrations/0015_organizationleveltoken.py create mode 100644 shared/django_apps/codecov_auth/migrations/0016_alter_owner_admins.py create mode 100644 shared/django_apps/codecov_auth/migrations/0017_alter_organizationleveltoken_token_type.py create mode 100644 shared/django_apps/codecov_auth/migrations/0018_usertoken.py create mode 100644 shared/django_apps/codecov_auth/migrations/0019_alter_repositorytoken_token_type.py create mode 100644 shared/django_apps/codecov_auth/migrations/0020_ownerprofile_default_org.py create mode 100644 shared/django_apps/codecov_auth/migrations/0021_owner_max_upload_limit.py create mode 100644 shared/django_apps/codecov_auth/migrations/0022_alter_owner_max_upload_limit.py create mode 100644 shared/django_apps/codecov_auth/migrations/0023_auto_20230214_1129.py create mode 100644 shared/django_apps/codecov_auth/migrations/0024_alter_owner_max_upload_limit.py create mode 100644 shared/django_apps/codecov_auth/migrations/0025_owner_stripe_coupon_id.py create mode 100644 shared/django_apps/codecov_auth/migrations/0026_alter_owner_plan_user_count.py create mode 100644 shared/django_apps/codecov_auth/migrations/0027_auto_20230307_1751.py create mode 100644 shared/django_apps/codecov_auth/migrations/0028_owner_sentry_user_data_owner_sentry_user_id.py create mode 100644 shared/django_apps/codecov_auth/migrations/0029_ownerprofile_terms_agreement_and_more.py create mode 100644 shared/django_apps/codecov_auth/migrations/0030_owner_trial_end_date_owner_trial_start_date.py create mode 100644 shared/django_apps/codecov_auth/migrations/0031_user_owner_user.py create mode 100644 shared/django_apps/codecov_auth/migrations/0032_owner_trial_status.py create mode 100644 shared/django_apps/codecov_auth/migrations/0033_sentryuser.py create mode 100644 shared/django_apps/codecov_auth/migrations/0034_alter_owner_trial_status.py create mode 100644 shared/django_apps/codecov_auth/migrations/0035_owner_pretrial_users_count.py create mode 100644 shared/django_apps/codecov_auth/migrations/0036_add_user_terms_agreement.py create mode 100644 shared/django_apps/codecov_auth/migrations/0037_owner_uses_invoice.py create mode 100644 shared/django_apps/codecov_auth/migrations/0038_alter_owner_uses_invoice.py create mode 100644 shared/django_apps/codecov_auth/migrations/0039_alter_owner_uses_invoice.py create mode 100644 shared/django_apps/codecov_auth/migrations/0040_oktauser.py create mode 100644 shared/django_apps/codecov_auth/migrations/0041_auto_20230918_1825.py create mode 100644 shared/django_apps/codecov_auth/migrations/0042_owner_trial_fired_by.py create mode 100644 shared/django_apps/codecov_auth/migrations/0043_sync_user_terms_agreement.py create mode 100644 shared/django_apps/codecov_auth/migrations/0044_remove_owner_agreements_and_alter_user_agreements.py create mode 100644 shared/django_apps/codecov_auth/migrations/0045_remove_ownerprofile_terms_agreement.py create mode 100644 shared/django_apps/codecov_auth/migrations/0046_dedupe_owner_admin_values.py create mode 100644 shared/django_apps/codecov_auth/migrations/0047_auto_20231009_1257.py create mode 100644 shared/django_apps/codecov_auth/migrations/0048_githubappinstallation.py create mode 100644 shared/django_apps/codecov_auth/migrations/0049_ownerprofile_customer_intent.py create mode 100644 shared/django_apps/codecov_auth/migrations/0050_remove_ownerprofile_customer_intent.py create mode 100644 shared/django_apps/codecov_auth/migrations/0051_user_customer_intent.py create mode 100644 shared/django_apps/codecov_auth/migrations/0052_githubappinstallation_app_id_and_more.py create mode 100644 shared/django_apps/codecov_auth/migrations/__init__.py create mode 100644 shared/django_apps/codecov_auth/models.py create mode 100644 shared/django_apps/core/__init__.py create mode 100644 shared/django_apps/core/models.py create mode 100644 shared/plan/constants.py create mode 100644 shared/plan/service.py create mode 100644 shared/plan/test_plan.py diff --git a/setup.py b/setup.py index 2c5225bbf..8dca13910 100644 --- a/setup.py +++ b/setup.py @@ -50,5 +50,8 @@ "codecov-ribs", "cachetools", "django-better-admin-arrayfield", + # API Deps + "django-prometheus", + "django-model-utils" ], ) diff --git a/shared/django_apps/codecov/__init__.py b/shared/django_apps/codecov/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/shared/django_apps/codecov/models.py b/shared/django_apps/codecov/models.py new file mode 100644 index 000000000..ff7e9b937 --- /dev/null +++ b/shared/django_apps/codecov/models.py @@ -0,0 +1,13 @@ +import uuid + +from django.db import models + + +class BaseCodecovModel(models.Model): + id = models.BigAutoField(primary_key=True) + external_id = models.UUIDField(default=uuid.uuid4, editable=False) + created_at = models.DateTimeField(auto_now_add=True) + updated_at = models.DateTimeField(auto_now=True) + + class Meta: + abstract = True diff --git a/shared/django_apps/codecov_auth/__init__.py b/shared/django_apps/codecov_auth/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/shared/django_apps/codecov_auth/migrations/0001_initial.py b/shared/django_apps/codecov_auth/migrations/0001_initial.py new file mode 100644 index 000000000..8a69c5fa3 --- /dev/null +++ b/shared/django_apps/codecov_auth/migrations/0001_initial.py @@ -0,0 +1,170 @@ +# Generated by Django 3.1.6 on 2021-04-08 19:21 + +import datetime +import uuid + +import django.contrib.postgres.fields +import django.contrib.postgres.fields.citext +import django.db.models.deletion +from django.conf import settings +from django.contrib.postgres.operations import CITextExtension +from django.db import migrations, models + +from shared.django_apps.core.models import DateTimeWithoutTZField + + +class Migration(migrations.Migration): + + initial = True + + dependencies = [] + + operations = [ + CITextExtension(), + migrations.CreateModel( + name="User", + fields=[ + ("id", models.BigAutoField(primary_key=True, serialize=False)), + ("created_at", models.DateTimeField(auto_now_add=True)), + ("updated_at", models.DateTimeField(auto_now=True)), + ("email", django.contrib.postgres.fields.citext.CITextField(null=True)), + ("name", models.TextField(null=True)), + ("is_staff", models.BooleanField(default=False, null=True)), + ("is_superuser", models.BooleanField(default=False, null=True)), + ( + "external_id", + models.UUIDField(default=uuid.uuid4, editable=False, unique=True), + ), + ], + options={ + "db_table": "users", + }, + ), + migrations.CreateModel( + name="Owner", + fields=[ + ("ownerid", models.AutoField(primary_key=True, serialize=False)), + ( + "service", + models.TextField( + choices=[ + ("github", "Github"), + ("gitlab", "Gitlab"), + ("bitbucket", "Bitbucket"), + ("github_enterprise", "Github Enterprise"), + ("gitlab_enterprise", "Gitlab Enterprise"), + ("bitbucket_server", "Bitbucket Server"), + ] + ), + ), + ( + "username", + django.contrib.postgres.fields.citext.CITextField( + null=True, unique=True + ), + ), + ("email", models.TextField(null=True)), + ("name", models.TextField(null=True)), + ("oauth_token", models.TextField(null=True)), + ("stripe_customer_id", models.TextField(null=True)), + ("stripe_subscription_id", models.TextField(null=True)), + ("createstamp", models.DateTimeField(null=True)), + ("service_id", models.TextField()), + ("parent_service_id", models.TextField(null=True)), + ("root_parent_service_id", models.TextField(null=True)), + ("private_access", models.BooleanField(null=True)), + ("staff", models.BooleanField(default=False, null=True)), + ("cache", models.JSONField(null=True)), + ("plan", models.TextField(default="users-free", null=True)), + ("plan_provider", models.TextField(null=True)), + ("plan_user_count", models.SmallIntegerField(default=5, null=True)), + ("plan_auto_activate", models.BooleanField(default=True, null=True)), + ( + "plan_activated_users", + django.contrib.postgres.fields.ArrayField( + base_field=models.IntegerField(null=True), null=True, size=None + ), + ), + ("did_trial", models.BooleanField(null=True)), + ("free", models.SmallIntegerField(default=0)), + ("invoice_details", models.TextField(null=True)), + ("delinquent", models.BooleanField(null=True)), + ("yaml", models.JSONField(null=True)), + ( + "updatestamp", + DateTimeWithoutTZField(default=datetime.datetime.now), + ), + ( + "organizations", + django.contrib.postgres.fields.ArrayField( + base_field=models.IntegerField(null=True), null=True, size=None + ), + ), + ( + "admins", + django.contrib.postgres.fields.ArrayField( + base_field=models.IntegerField(null=True), null=True, size=None + ), + ), + ("integration_id", models.IntegerField(null=True)), + ( + "permission", + django.contrib.postgres.fields.ArrayField( + base_field=models.IntegerField(null=True), null=True, size=None + ), + ), + ("student", models.BooleanField(default=False)), + ("student_created_at", DateTimeWithoutTZField(null=True)), + ("student_updated_at", DateTimeWithoutTZField(null=True)), + ( + "bot", + models.ForeignKey( + db_column="bot", + null=True, + on_delete=django.db.models.deletion.SET_NULL, + to="codecov_auth.owner", + ), + ), + ], + options={"db_table": "owners", "ordering": ["ownerid"]}, + ), + migrations.CreateModel( + name="Session", + fields=[ + ("sessionid", models.AutoField(primary_key=True, serialize=False)), + ( + "token", + models.UUIDField(default=uuid.uuid4, editable=False, unique=True), + ), + ("name", models.TextField(null=True)), + ("useragent", models.TextField(null=True)), + ("ip", models.TextField(null=True)), + ("lastseen", models.DateTimeField(null=True)), + ( + "type", + models.TextField(choices=[("api", "Api"), ("login", "Login")]), + ), + ( + "owner", + models.ForeignKey( + db_column="ownerid", + on_delete=django.db.models.deletion.CASCADE, + to="codecov_auth.owner", + ), + ), + ], + options={"db_table": "sessions", "ordering": ["-lastseen"]}, + ), + migrations.AddConstraint( + model_name="owner", + constraint=models.UniqueConstraint( + fields=("service", "username"), name="owner_service_username" + ), + ), + migrations.AddConstraint( + model_name="owner", + constraint=models.UniqueConstraint( + fields=("service", "service_id"), name="owner_service_ids" + ), + ), + ] diff --git a/shared/django_apps/codecov_auth/migrations/0002_auto_20210817_1346.py b/shared/django_apps/codecov_auth/migrations/0002_auto_20210817_1346.py new file mode 100644 index 000000000..e8b1a1cc4 --- /dev/null +++ b/shared/django_apps/codecov_auth/migrations/0002_auto_20210817_1346.py @@ -0,0 +1,44 @@ +# Generated by Django 3.1.6 on 2021-08-17 13:46 + +import uuid + +import django.db.models.deletion +from django.db import migrations, models + + +class Migration(migrations.Migration): + + dependencies = [ + ("core", "0004_pull_user_provided_base_sha"), + ("codecov_auth", "0001_initial"), + ] + + operations = [ + migrations.AlterField( + model_name="owner", + name="plan_provider", + field=models.TextField(choices=[("github", "Github")], null=True), + ), + migrations.CreateModel( + name="RepositoryToken", + fields=[ + ("id", models.BigAutoField(primary_key=True, serialize=False)), + ("external_id", models.UUIDField(default=uuid.uuid4, editable=False)), + ("created_at", models.DateTimeField(auto_now_add=True)), + ("updated_at", models.DateTimeField(auto_now=True)), + ("token_type", models.CharField(max_length=50)), + ("valid_until", models.DateTimeField(null=True)), + ("key", models.CharField(max_length=40, unique=True)), + ( + "repository", + models.ForeignKey( + db_column="repoid", + on_delete=django.db.models.deletion.CASCADE, + related_name="tokens", + to="core.repository", + ), + ), + ], + options={"abstract": False}, + ), + ] diff --git a/shared/django_apps/codecov_auth/migrations/0003_auto_20210924_1003.py b/shared/django_apps/codecov_auth/migrations/0003_auto_20210924_1003.py new file mode 100644 index 000000000..77cae2347 --- /dev/null +++ b/shared/django_apps/codecov_auth/migrations/0003_auto_20210924_1003.py @@ -0,0 +1,72 @@ +# Generated by Django 3.1.13 on 2021-09-24 10:03 + +import uuid + +import django.contrib.postgres.fields +import django.db.models.deletion +from django.conf import settings +from django.db import migrations, models + + +class Migration(migrations.Migration): + + dependencies = [("codecov_auth", "0002_auto_20210817_1346")] + + operations = [ + migrations.AddField( + model_name="owner", name="business_email", field=models.TextField(null=True) + ), + migrations.AddField( + model_name="owner", + name="onboarding_completed", + field=models.BooleanField(default=False), + ), + migrations.CreateModel( + name="OwnerProfile", + fields=[ + ("id", models.BigAutoField(primary_key=True, serialize=False)), + ("external_id", models.UUIDField(default=uuid.uuid4, editable=False)), + ("created_at", models.DateTimeField(auto_now_add=True)), + ("updated_at", models.DateTimeField(auto_now=True)), + ( + "type_projects", + django.contrib.postgres.fields.ArrayField( + base_field=models.TextField( + choices=[ + ("PERSONAL", "Personal"), + ("YOUR_ORG", "Your Org"), + ("OPEN_SOURCE", "Open Source"), + ("EDUCATIONAL", "Educational"), + ] + ), + default=list, + size=None, + ), + ), + ( + "goals", + django.contrib.postgres.fields.ArrayField( + base_field=models.TextField( + choices=[ + ("STARTING_WITH_TESTS", "Starting With Tests"), + ("IMPROVE_COVERAGE", "Improve Coverage"), + ("MAINTAIN_COVERAGE", "Maintain Coverage"), + ("OTHER", "Other"), + ] + ), + default=list, + size=None, + ), + ), + ("other_goal", models.TextField(null=True)), + ( + "owner", + models.OneToOneField( + on_delete=django.db.models.deletion.CASCADE, + to="codecov_auth.owner", + ), + ), + ], + options={"abstract": False}, + ), + ] diff --git a/shared/django_apps/codecov_auth/migrations/0004_auto_20210930_1429.py b/shared/django_apps/codecov_auth/migrations/0004_auto_20210930_1429.py new file mode 100644 index 000000000..b0fa19ed3 --- /dev/null +++ b/shared/django_apps/codecov_auth/migrations/0004_auto_20210930_1429.py @@ -0,0 +1,29 @@ +# Generated by Django 3.1.13 on 2021-09-30 14:29 + +import django.contrib.postgres.fields +from django.db import migrations, models + + +class Migration(migrations.Migration): + + dependencies = [("codecov_auth", "0003_auto_20210924_1003")] + + operations = [ + migrations.AlterField( + model_name="ownerprofile", + name="goals", + field=django.contrib.postgres.fields.ArrayField( + base_field=models.TextField( + choices=[ + ("STARTING_WITH_TESTS", "Starting With Tests"), + ("IMPROVE_COVERAGE", "Improve Coverage"), + ("MAINTAIN_COVERAGE", "Maintain Coverage"), + ("TEAM_REQUIREMENTS", "Team Requirements"), + ("OTHER", "Other"), + ] + ), + default=list, + size=None, + ), + ) + ] diff --git a/shared/django_apps/codecov_auth/migrations/0005_auto_20211029_1709.py b/shared/django_apps/codecov_auth/migrations/0005_auto_20211029_1709.py new file mode 100644 index 000000000..3e496e9f9 --- /dev/null +++ b/shared/django_apps/codecov_auth/migrations/0005_auto_20211029_1709.py @@ -0,0 +1,13 @@ +# Generated by Django 3.1.13 on 2021-10-29 17:09 + +from django.db import migrations + + +class Migration(migrations.Migration): + atomic = False + + dependencies = [("codecov_auth", "0004_auto_20210930_1429")] + + operations = [ + migrations.RunSQL("ALTER TYPE plans ADD VALUE IF NOT EXISTS 'users-basic';") + ] diff --git a/shared/django_apps/codecov_auth/migrations/0006_auto_20211123_1535.py b/shared/django_apps/codecov_auth/migrations/0006_auto_20211123_1535.py new file mode 100644 index 000000000..28184806f --- /dev/null +++ b/shared/django_apps/codecov_auth/migrations/0006_auto_20211123_1535.py @@ -0,0 +1,44 @@ +# Generated by Django 3.1.13 on 2021-11-23 15:35 + +import django.db.models.deletion +from django.conf import settings +from django.db import migrations, models + +from shared.django_apps.codecov_auth.models import _generate_key + + +class Migration(migrations.Migration): + + dependencies = [("codecov_auth", "0005_auto_20211029_1709")] + + operations = [ + migrations.AlterField( + model_name="owner", + name="plan", + field=models.TextField(default="users-basic", null=True), + ), + migrations.AlterField( + model_name="ownerprofile", + name="owner", + field=models.OneToOneField( + on_delete=django.db.models.deletion.CASCADE, + related_name="profile", + to="codecov_auth.owner", + ), + ), + migrations.AlterField( + model_name="repositorytoken", + name="key", + field=models.CharField( + default=_generate_key, + editable=False, + max_length=40, + unique=True, + ), + ), + migrations.AlterField( + model_name="repositorytoken", + name="valid_until", + field=models.DateTimeField(blank=True, null=True), + ), + ] diff --git a/shared/django_apps/codecov_auth/migrations/0007_auto_20211129_1228.py b/shared/django_apps/codecov_auth/migrations/0007_auto_20211129_1228.py new file mode 100644 index 000000000..051d0a9fe --- /dev/null +++ b/shared/django_apps/codecov_auth/migrations/0007_auto_20211129_1228.py @@ -0,0 +1,14 @@ +# Generated by Django 3.1.13 on 2021-11-29 12:28 + +from django.db import migrations + + +class Migration(migrations.Migration): + + dependencies = [("codecov_auth", "0006_auto_20211123_1535")] + + operations = [ + migrations.RunSQL( + "ALTER TABLE owners ALTER COLUMN plan SET DEFAULT 'users-basic';" + ) + ] diff --git a/shared/django_apps/codecov_auth/migrations/0008_auto_20220119_1811.py b/shared/django_apps/codecov_auth/migrations/0008_auto_20220119_1811.py new file mode 100644 index 000000000..c72cbbba4 --- /dev/null +++ b/shared/django_apps/codecov_auth/migrations/0008_auto_20220119_1811.py @@ -0,0 +1,14 @@ +# Generated by Django 3.1.13 on 2022-01-19 18:11 + +from django.db import migrations + + +class Migration(migrations.Migration): + + dependencies = [("codecov_auth", "0007_auto_20211129_1228")] + + operations = [ + migrations.RunSQL( + "ALTER TABLE owners ALTER COLUMN onboarding_completed SET DEFAULT FALSE;" + ) + ] diff --git a/shared/django_apps/codecov_auth/migrations/0009_auto_20220511_1313.py b/shared/django_apps/codecov_auth/migrations/0009_auto_20220511_1313.py new file mode 100644 index 000000000..000316a5a --- /dev/null +++ b/shared/django_apps/codecov_auth/migrations/0009_auto_20220511_1313.py @@ -0,0 +1,78 @@ +# Generated by Django 3.1.13 on 2022-05-11 13:13 + +import django.contrib.postgres.fields +import django.db.models.deletion +from django.conf import settings +from django.db import migrations, models + + +class Migration(migrations.Migration): + + dependencies = [ + ("codecov_auth", "0008_auto_20220119_1811"), + ] + + operations = [ + migrations.RunSQL( + """-- + -- Alter field bot on Owner + -- + COMMIT; + """, + state_operations=[ + migrations.AlterField( + model_name="owner", + name="bot", + field=models.ForeignKey( + blank=True, + db_column="bot", + null=True, + on_delete=django.db.models.deletion.SET_NULL, + to="codecov_auth.owner", + ), + ), + ], + ), + migrations.AlterField( + model_name="owner", + name="integration_id", + field=models.IntegerField(blank=True, null=True), + ), + migrations.AlterField( + model_name="owner", + name="plan", + field=models.TextField(blank=True, default="users-basic", null=True), + ), + migrations.AlterField( + model_name="owner", + name="plan_activated_users", + field=django.contrib.postgres.fields.ArrayField( + base_field=models.IntegerField(null=True), + blank=True, + null=True, + size=None, + ), + ), + migrations.AlterField( + model_name="owner", + name="plan_provider", + field=models.TextField( + blank=True, choices=[("github", "Github")], null=True + ), + ), + migrations.AlterField( + model_name="owner", + name="plan_user_count", + field=models.SmallIntegerField(blank=True, default=5, null=True), + ), + migrations.AlterField( + model_name="owner", + name="stripe_customer_id", + field=models.TextField(blank=True, null=True), + ), + migrations.AlterField( + model_name="owner", + name="stripe_subscription_id", + field=models.TextField(blank=True, null=True), + ), + ] diff --git a/shared/django_apps/codecov_auth/migrations/0010_owner_is_superuser.py b/shared/django_apps/codecov_auth/migrations/0010_owner_is_superuser.py new file mode 100644 index 000000000..4bb5c9d70 --- /dev/null +++ b/shared/django_apps/codecov_auth/migrations/0010_owner_is_superuser.py @@ -0,0 +1,18 @@ +# Generated by Django 3.1.13 on 2022-05-24 16:35 + +from django.db import migrations, models + + +class Migration(migrations.Migration): + + dependencies = [ + ("codecov_auth", "0009_auto_20220511_1313"), + ] + + operations = [ + migrations.AddField( + model_name="owner", + name="is_superuser", + field=models.BooleanField(null=True), + ), + ] diff --git a/shared/django_apps/codecov_auth/migrations/0011_new_enterprise_plans.py b/shared/django_apps/codecov_auth/migrations/0011_new_enterprise_plans.py new file mode 100644 index 000000000..af6836d47 --- /dev/null +++ b/shared/django_apps/codecov_auth/migrations/0011_new_enterprise_plans.py @@ -0,0 +1,20 @@ +# Generated by Django 3.2.12 on 2022-05-09 14:00 + +from django.db import migrations + + +class Migration(migrations.Migration): + atomic = False + + dependencies = [ + ("codecov_auth", "0010_owner_is_superuser"), + ] + + operations = [ + migrations.RunSQL( + "ALTER TYPE plans ADD VALUE IF NOT EXISTS 'users-enterprisey';" + ), + migrations.RunSQL( + "ALTER TYPE plans ADD VALUE IF NOT EXISTS 'users-enterprisem';" + ), + ] diff --git a/shared/django_apps/codecov_auth/migrations/0012_auto_20220531_1452.py b/shared/django_apps/codecov_auth/migrations/0012_auto_20220531_1452.py new file mode 100644 index 000000000..56d9312fd --- /dev/null +++ b/shared/django_apps/codecov_auth/migrations/0012_auto_20220531_1452.py @@ -0,0 +1,18 @@ +# Generated by Django 3.1.13 on 2022-05-31 14:52 + +from django.db import migrations, models + + +class Migration(migrations.Migration): + + dependencies = [ + ("codecov_auth", "0011_new_enterprise_plans"), + ] + + operations = [ + migrations.AlterField( + model_name="owner", + name="is_superuser", + field=models.BooleanField(default=False, null=True), + ), + ] diff --git a/shared/django_apps/codecov_auth/migrations/0013_alter_owner_organizations.py b/shared/django_apps/codecov_auth/migrations/0013_alter_owner_organizations.py new file mode 100644 index 000000000..2c8124cd7 --- /dev/null +++ b/shared/django_apps/codecov_auth/migrations/0013_alter_owner_organizations.py @@ -0,0 +1,24 @@ +# Generated by Django 3.2.12 on 2022-06-22 12:05 + +import django.contrib.postgres.fields +from django.db import migrations, models + + +class Migration(migrations.Migration): + + dependencies = [ + ("codecov_auth", "0012_auto_20220531_1452"), + ] + + operations = [ + migrations.AlterField( + model_name="owner", + name="organizations", + field=django.contrib.postgres.fields.ArrayField( + base_field=models.IntegerField(null=True), + blank=True, + null=True, + size=None, + ), + ), + ] diff --git a/shared/django_apps/codecov_auth/migrations/0014_alter_repositorytoken_token_type.py b/shared/django_apps/codecov_auth/migrations/0014_alter_repositorytoken_token_type.py new file mode 100644 index 000000000..28d9cd07e --- /dev/null +++ b/shared/django_apps/codecov_auth/migrations/0014_alter_repositorytoken_token_type.py @@ -0,0 +1,21 @@ +# Generated by Django 3.2.12 on 2022-08-16 17:37 + +from django.db import migrations, models + + +class Migration(migrations.Migration): + + dependencies = [ + ("codecov_auth", "0013_alter_owner_organizations"), + ] + + operations = [ + migrations.AlterField( + model_name="repositorytoken", + name="token_type", + field=models.CharField( + choices=[("upload", "Upload"), ("profiling", "Profiling")], + max_length=50, + ), + ), + ] diff --git a/shared/django_apps/codecov_auth/migrations/0015_organizationleveltoken.py b/shared/django_apps/codecov_auth/migrations/0015_organizationleveltoken.py new file mode 100644 index 000000000..a61b2ecad --- /dev/null +++ b/shared/django_apps/codecov_auth/migrations/0015_organizationleveltoken.py @@ -0,0 +1,44 @@ +# Generated by Django 3.2.12 on 2022-08-17 18:35 + +import uuid + +import django.db.models.deletion +from django.conf import settings +from django.db import migrations, models + + +class Migration(migrations.Migration): + + dependencies = [ + ("codecov_auth", "0014_alter_repositorytoken_token_type"), + ] + + operations = [ + migrations.CreateModel( + name="OrganizationLevelToken", + fields=[ + ("id", models.BigAutoField(primary_key=True, serialize=False)), + ("external_id", models.UUIDField(default=uuid.uuid4, editable=False)), + ("created_at", models.DateTimeField(auto_now_add=True)), + ("updated_at", models.DateTimeField(auto_now=True)), + ("token", models.UUIDField(default=uuid.uuid4, unique=True)), + ("valid_until", models.DateTimeField(blank=True, null=True)), + ( + "token_type", + models.CharField(choices=[("upload", "Upload")], max_length=50), + ), + ( + "owner", + models.ForeignKey( + db_column="ownerid", + on_delete=django.db.models.deletion.CASCADE, + related_name="organization_tokens", + to="codecov_auth.owner", + ), + ), + ], + options={ + "abstract": False, + }, + ), + ] diff --git a/shared/django_apps/codecov_auth/migrations/0016_alter_owner_admins.py b/shared/django_apps/codecov_auth/migrations/0016_alter_owner_admins.py new file mode 100644 index 000000000..6c2866d84 --- /dev/null +++ b/shared/django_apps/codecov_auth/migrations/0016_alter_owner_admins.py @@ -0,0 +1,24 @@ +# Generated by Django 3.2.12 on 2022-08-22 09:43 + +import django.contrib.postgres.fields +from django.db import migrations, models + + +class Migration(migrations.Migration): + + dependencies = [ + ("codecov_auth", "0015_organizationleveltoken"), + ] + + operations = [ + migrations.AlterField( + model_name="owner", + name="admins", + field=django.contrib.postgres.fields.ArrayField( + base_field=models.IntegerField(null=True), + blank=True, + null=True, + size=None, + ), + ), + ] diff --git a/shared/django_apps/codecov_auth/migrations/0017_alter_organizationleveltoken_token_type.py b/shared/django_apps/codecov_auth/migrations/0017_alter_organizationleveltoken_token_type.py new file mode 100644 index 000000000..98cc8e4c3 --- /dev/null +++ b/shared/django_apps/codecov_auth/migrations/0017_alter_organizationleveltoken_token_type.py @@ -0,0 +1,20 @@ +# Generated by Django 3.2.12 on 2022-08-19 14:30 + +from django.db import migrations, models + + +class Migration(migrations.Migration): + + dependencies = [ + ("codecov_auth", "0016_alter_owner_admins"), + ] + + operations = [ + migrations.AlterField( + model_name="organizationleveltoken", + name="token_type", + field=models.CharField( + choices=[("upload", "Upload")], default="upload", max_length=50 + ), + ), + ] diff --git a/shared/django_apps/codecov_auth/migrations/0018_usertoken.py b/shared/django_apps/codecov_auth/migrations/0018_usertoken.py new file mode 100644 index 000000000..eb6a704e8 --- /dev/null +++ b/shared/django_apps/codecov_auth/migrations/0018_usertoken.py @@ -0,0 +1,47 @@ +# Generated by Django 3.2.12 on 2022-09-07 17:38 + +import uuid + +import django.db.models.deletion +from django.conf import settings +from django.db import migrations, models + + +class Migration(migrations.Migration): + + dependencies = [ + ("codecov_auth", "0017_alter_organizationleveltoken_token_type"), + ] + + operations = [ + migrations.CreateModel( + name="UserToken", + fields=[ + ("id", models.BigAutoField(primary_key=True, serialize=False)), + ("external_id", models.UUIDField(default=uuid.uuid4, editable=False)), + ("created_at", models.DateTimeField(auto_now_add=True)), + ("updated_at", models.DateTimeField(auto_now=True)), + ("name", models.CharField(max_length=100)), + ("token", models.UUIDField(default=uuid.uuid4, unique=True)), + ("valid_until", models.DateTimeField(blank=True, null=True)), + ( + "token_type", + models.CharField( + choices=[("api", "Api")], default="api", max_length=50 + ), + ), + ( + "owner", + models.ForeignKey( + db_column="ownerid", + on_delete=django.db.models.deletion.CASCADE, + related_name="user_tokens", + to="codecov_auth.owner", + ), + ), + ], + options={ + "abstract": False, + }, + ), + ] diff --git a/shared/django_apps/codecov_auth/migrations/0019_alter_repositorytoken_token_type.py b/shared/django_apps/codecov_auth/migrations/0019_alter_repositorytoken_token_type.py new file mode 100644 index 000000000..2f16796b0 --- /dev/null +++ b/shared/django_apps/codecov_auth/migrations/0019_alter_repositorytoken_token_type.py @@ -0,0 +1,25 @@ +# Generated by Django 3.2.12 on 2022-12-06 04:19 + +from django.db import migrations, models + + +class Migration(migrations.Migration): + + dependencies = [ + ("codecov_auth", "0018_usertoken"), + ] + + operations = [ + migrations.AlterField( + model_name="repositorytoken", + name="token_type", + field=models.CharField( + choices=[ + ("upload", "Upload"), + ("profiling", "Profiling"), + ("static_analysis", "Static Analysis"), + ], + max_length=50, + ), + ), + ] diff --git a/shared/django_apps/codecov_auth/migrations/0020_ownerprofile_default_org.py b/shared/django_apps/codecov_auth/migrations/0020_ownerprofile_default_org.py new file mode 100644 index 000000000..d517a42f0 --- /dev/null +++ b/shared/django_apps/codecov_auth/migrations/0020_ownerprofile_default_org.py @@ -0,0 +1,32 @@ +# Generated by Django 3.2.12 on 2023-01-19 19:06 + +import django.db.models.deletion +from django.conf import settings +from django.db import migrations, models + + +class Migration(migrations.Migration): + # BEGIN; + # -- + # -- Add field default_org to ownerprofile + # -- + # ALTER TABLE "codecov_auth_ownerprofile" ADD COLUMN "default_org_id" integer NULL CONSTRAINT "codecov_auth_ownerpr_default_org_id_da545ea8_fk_owners_ow" REFERENCES "owners"("ownerid") DEFERRABLE INITIALLY DEFERRED; SET CONSTRAINTS "codecov_auth_ownerpr_default_org_id_da545ea8_fk_owners_ow" IMMEDIATE; + # CREATE INDEX "codecov_auth_ownerprofile_default_org_id_da545ea8" ON "codecov_auth_ownerprofile" ("default_org_id"); + # COMMIT; + + dependencies = [ + ("codecov_auth", "0019_alter_repositorytoken_token_type"), + ] + + operations = [ + migrations.AddField( + model_name="ownerprofile", + name="default_org", + field=models.ForeignKey( + null=True, + on_delete=django.db.models.deletion.CASCADE, + related_name="profiles_with_default", + to="codecov_auth.owner", + ), + ), + ] diff --git a/shared/django_apps/codecov_auth/migrations/0021_owner_max_upload_limit.py b/shared/django_apps/codecov_auth/migrations/0021_owner_max_upload_limit.py new file mode 100644 index 000000000..1380b6580 --- /dev/null +++ b/shared/django_apps/codecov_auth/migrations/0021_owner_max_upload_limit.py @@ -0,0 +1,18 @@ +# Generated by Django 3.2.12 on 2023-02-13 19:56 + +from django.db import migrations, models + + +class Migration(migrations.Migration): + + dependencies = [ + ("codecov_auth", "0020_ownerprofile_default_org"), + ] + + operations = [ + migrations.AddField( + model_name="owner", + name="max_upload_limit", + field=models.IntegerField(null=True), + ), + ] diff --git a/shared/django_apps/codecov_auth/migrations/0022_alter_owner_max_upload_limit.py b/shared/django_apps/codecov_auth/migrations/0022_alter_owner_max_upload_limit.py new file mode 100644 index 000000000..0e13408b6 --- /dev/null +++ b/shared/django_apps/codecov_auth/migrations/0022_alter_owner_max_upload_limit.py @@ -0,0 +1,18 @@ +# Generated by Django 3.2.12 on 2023-02-13 20:12 + +from django.db import migrations, models + + +class Migration(migrations.Migration): + + dependencies = [ + ("codecov_auth", "0021_owner_max_upload_limit"), + ] + + operations = [ + migrations.AlterField( + model_name="owner", + name="max_upload_limit", + field=models.IntegerField(default=150, null=True), + ), + ] diff --git a/shared/django_apps/codecov_auth/migrations/0023_auto_20230214_1129.py b/shared/django_apps/codecov_auth/migrations/0023_auto_20230214_1129.py new file mode 100644 index 000000000..61f1c1aa3 --- /dev/null +++ b/shared/django_apps/codecov_auth/migrations/0023_auto_20230214_1129.py @@ -0,0 +1,20 @@ +# Generated by Django 3.2.12 on 2023-02-14 11:29 + +from django.db import migrations +from shared.django_apps.migration_utils import RiskyRunSQL + + +class Migration(migrations.Migration): + + dependencies = [ + ("codecov_auth", "0022_alter_owner_max_upload_limit"), + ] + + operations = [ + migrations.RunSQL( + "ALTER TABLE owners ALTER COLUMN max_upload_limit SET DEFAULT 150;" + ), + RiskyRunSQL( + "UPDATE owners SET max_upload_limit=150 WHERE max_upload_limit is null;" + ), + ] diff --git a/shared/django_apps/codecov_auth/migrations/0024_alter_owner_max_upload_limit.py b/shared/django_apps/codecov_auth/migrations/0024_alter_owner_max_upload_limit.py new file mode 100644 index 000000000..d34381002 --- /dev/null +++ b/shared/django_apps/codecov_auth/migrations/0024_alter_owner_max_upload_limit.py @@ -0,0 +1,18 @@ +# Generated by Django 3.2.12 on 2023-02-23 11:44 + +from django.db import migrations, models + + +class Migration(migrations.Migration): + + dependencies = [ + ("codecov_auth", "0023_auto_20230214_1129"), + ] + + operations = [ + migrations.AlterField( + model_name="owner", + name="max_upload_limit", + field=models.IntegerField(blank=True, default=150, null=True), + ), + ] diff --git a/shared/django_apps/codecov_auth/migrations/0025_owner_stripe_coupon_id.py b/shared/django_apps/codecov_auth/migrations/0025_owner_stripe_coupon_id.py new file mode 100644 index 000000000..c60a2606f --- /dev/null +++ b/shared/django_apps/codecov_auth/migrations/0025_owner_stripe_coupon_id.py @@ -0,0 +1,26 @@ +# Generated by Django 3.2.12 on 2023-02-22 19:50 + +from django.db import migrations, models + + +class Migration(migrations.Migration): + """ + BEGIN; + -- + -- Add field stripe_coupon_id to owner + -- + ALTER TABLE "owners" ADD COLUMN "stripe_coupon_id" text NULL; + COMMIT; + """ + + dependencies = [ + ("codecov_auth", "0024_alter_owner_max_upload_limit"), + ] + + operations = [ + migrations.AddField( + model_name="owner", + name="stripe_coupon_id", + field=models.TextField(blank=True, null=True), + ), + ] diff --git a/shared/django_apps/codecov_auth/migrations/0026_alter_owner_plan_user_count.py b/shared/django_apps/codecov_auth/migrations/0026_alter_owner_plan_user_count.py new file mode 100644 index 000000000..906cb58e3 --- /dev/null +++ b/shared/django_apps/codecov_auth/migrations/0026_alter_owner_plan_user_count.py @@ -0,0 +1,18 @@ +# Generated by Django 4.1.7 on 2023-03-09 20:55 + +from django.db import migrations, models + + +class Migration(migrations.Migration): + + dependencies = [ + ("codecov_auth", "0025_owner_stripe_coupon_id"), + ] + + operations = [ + migrations.AlterField( + model_name="owner", + name="plan_user_count", + field=models.SmallIntegerField(blank=True, default=1, null=True), + ), + ] diff --git a/shared/django_apps/codecov_auth/migrations/0027_auto_20230307_1751.py b/shared/django_apps/codecov_auth/migrations/0027_auto_20230307_1751.py new file mode 100644 index 000000000..87342b76a --- /dev/null +++ b/shared/django_apps/codecov_auth/migrations/0027_auto_20230307_1751.py @@ -0,0 +1,15 @@ +# Generated by Django 4.1.7 on 2023-03-07 17:51 + +from django.db import migrations + + +class Migration(migrations.Migration): + + dependencies = [ + ("codecov_auth", "0026_alter_owner_plan_user_count"), + ] + + operations = [ + migrations.RunSQL("ALTER TYPE plans ADD VALUE IF NOT EXISTS 'users-sentrym';"), + migrations.RunSQL("ALTER TYPE plans ADD VALUE IF NOT EXISTS 'users-sentryy';"), + ] diff --git a/shared/django_apps/codecov_auth/migrations/0028_owner_sentry_user_data_owner_sentry_user_id.py b/shared/django_apps/codecov_auth/migrations/0028_owner_sentry_user_data_owner_sentry_user_id.py new file mode 100644 index 000000000..5f1ad6386 --- /dev/null +++ b/shared/django_apps/codecov_auth/migrations/0028_owner_sentry_user_data_owner_sentry_user_id.py @@ -0,0 +1,23 @@ +# Generated by Django 4.1.7 on 2023-03-07 22:04 + +from django.db import migrations, models + + +class Migration(migrations.Migration): + + dependencies = [ + ("codecov_auth", "0027_auto_20230307_1751"), + ] + + operations = [ + migrations.AddField( + model_name="owner", + name="sentry_user_data", + field=models.JSONField(null=True), + ), + migrations.AddField( + model_name="owner", + name="sentry_user_id", + field=models.TextField(blank=True, null=True, unique=True), + ), + ] diff --git a/shared/django_apps/codecov_auth/migrations/0029_ownerprofile_terms_agreement_and_more.py b/shared/django_apps/codecov_auth/migrations/0029_ownerprofile_terms_agreement_and_more.py new file mode 100644 index 000000000..17bd684a2 --- /dev/null +++ b/shared/django_apps/codecov_auth/migrations/0029_ownerprofile_terms_agreement_and_more.py @@ -0,0 +1,25 @@ +# Generated by Django 4.1.7 on 2023-03-17 22:01 + +from django.db import migrations, models + +from shared.django_apps.core.models import DateTimeWithoutTZField + + +class Migration(migrations.Migration): + + dependencies = [ + ("codecov_auth", "0028_owner_sentry_user_data_owner_sentry_user_id"), + ] + + operations = [ + migrations.AddField( + model_name="ownerprofile", + name="terms_agreement", + field=models.BooleanField(null=True), + ), + migrations.AddField( + model_name="ownerprofile", + name="terms_agreement_at", + field=DateTimeWithoutTZField(null=True), + ), + ] diff --git a/shared/django_apps/codecov_auth/migrations/0030_owner_trial_end_date_owner_trial_start_date.py b/shared/django_apps/codecov_auth/migrations/0030_owner_trial_end_date_owner_trial_start_date.py new file mode 100644 index 000000000..0bd42e89d --- /dev/null +++ b/shared/django_apps/codecov_auth/migrations/0030_owner_trial_end_date_owner_trial_start_date.py @@ -0,0 +1,25 @@ +# Generated by Django 4.1.7 on 2023-06-20 17:14 + +from django.db import migrations + +from shared.django_apps.core.models import DateTimeWithoutTZField + + +class Migration(migrations.Migration): + + dependencies = [ + ("codecov_auth", "0029_ownerprofile_terms_agreement_and_more"), + ] + + operations = [ + migrations.AddField( + model_name="owner", + name="trial_end_date", + field=DateTimeWithoutTZField(null=True), + ), + migrations.AddField( + model_name="owner", + name="trial_start_date", + field=DateTimeWithoutTZField(null=True), + ), + ] diff --git a/shared/django_apps/codecov_auth/migrations/0031_user_owner_user.py b/shared/django_apps/codecov_auth/migrations/0031_user_owner_user.py new file mode 100644 index 000000000..c7b5f2929 --- /dev/null +++ b/shared/django_apps/codecov_auth/migrations/0031_user_owner_user.py @@ -0,0 +1,69 @@ +# Generated by Django 4.1.7 on 2023-05-22 17:53 + +import uuid + +import django.contrib.postgres.fields.citext +import django.db.models.deletion +from django.conf import settings +from django.db import migrations, models + + +class Migration(migrations.Migration): + + dependencies = [ + ("codecov_auth", "0030_owner_trial_end_date_owner_trial_start_date"), + ] + + operations = [ + # NOTE: this migration had to be moved to the `0001_initial` migration + # since there are internal Django migrations that need to refer to this + # model via AUTH_USER_MODEL. + # It is not actually applied there since our `legacy_migrations` override + # the `migrate` command and mark 0001_initial migrations as fake. The + # raw SQL to create this table is in `legacy_migrations` and needs to be applied + # manually before running this migration. We have a raw SQL migration below to + # create the table if it does not already exist. + # + # migrations.CreateModel( + # name='User', + # fields=[ + # ('id', models.BigAutoField(primary_key=True, serialize=False)), + # ('created_at', models.DateTimeField(auto_now_add=True)), + # ('updated_at', models.DateTimeField(auto_now=True)), + # ('email', django.contrib.postgres.fields.citext.CITextField(null=True)), + # ('name', models.TextField(null=True)), + # ('is_staff', models.BooleanField(default=False, null=True)), + # ('is_superuser', models.BooleanField(default=False, null=True)), + # ('external_id', models.UUIDField(default=uuid.uuid4, editable=False, unique=True)), + # ], + # options={ + # 'db_table': 'users', + # }, + # ), + migrations.RunSQL( + """ + CREATE TABLE IF NOT EXISTS "users" ( + "id" bigint NOT NULL PRIMARY KEY GENERATED BY DEFAULT AS IDENTITY, + "external_id" uuid NOT NULL UNIQUE, + "created_at" timestamp with time zone NOT NULL, + "updated_at" timestamp with time zone NOT NULL, + "email" citext NULL, + "name" text NULL, + "is_staff" boolean NULL, + "is_superuser" boolean NULL + ); + """, + reverse_sql="DROP TABLE users", + ), + migrations.AddField( + model_name="owner", + name="user", + field=models.ForeignKey( + blank=True, + null=True, + on_delete=django.db.models.deletion.SET_NULL, + related_name="owners", + to=settings.AUTH_USER_MODEL, + ), + ), + ] diff --git a/shared/django_apps/codecov_auth/migrations/0032_owner_trial_status.py b/shared/django_apps/codecov_auth/migrations/0032_owner_trial_status.py new file mode 100644 index 000000000..178093091 --- /dev/null +++ b/shared/django_apps/codecov_auth/migrations/0032_owner_trial_status.py @@ -0,0 +1,28 @@ +# Generated by Django 4.1.7 on 2023-07-20 00:28 + +from django.db import migrations, models + + +class Migration(migrations.Migration): + + dependencies = [ + ("codecov_auth", "0031_user_owner_user"), + ] + + operations = [ + migrations.RunSQL("ALTER TYPE plans ADD VALUE IF NOT EXISTS 'users-trial';"), + migrations.AddField( + model_name="owner", + name="trial_status", + field=models.CharField( + choices=[ + ("not_started", "Not Started"), + ("ongoing", "Ongoing"), + ("expired", "Expired"), + ("cannot_trial", "Cannot Trial"), + ], + max_length=50, + null=True, + ), + ), + ] diff --git a/shared/django_apps/codecov_auth/migrations/0033_sentryuser.py b/shared/django_apps/codecov_auth/migrations/0033_sentryuser.py new file mode 100644 index 000000000..b755baf2b --- /dev/null +++ b/shared/django_apps/codecov_auth/migrations/0033_sentryuser.py @@ -0,0 +1,42 @@ +# Generated by Django 4.2.2 on 2023-07-06 16:05 + +import uuid + +import django.db.models.deletion +from django.conf import settings +from django.db import migrations, models + + +class Migration(migrations.Migration): + + dependencies = [ + ("codecov_auth", "0032_owner_trial_status"), + ] + + operations = [ + migrations.CreateModel( + name="SentryUser", + fields=[ + ("id", models.BigAutoField(primary_key=True, serialize=False)), + ("external_id", models.UUIDField(default=uuid.uuid4, editable=False)), + ("created_at", models.DateTimeField(auto_now_add=True)), + ("updated_at", models.DateTimeField(auto_now=True)), + ("access_token", models.TextField(null=True)), + ("refresh_token", models.TextField(null=True)), + ("sentry_id", models.TextField(unique=True)), + ("email", models.TextField(null=True)), + ("name", models.TextField(null=True)), + ( + "user", + models.ForeignKey( + on_delete=django.db.models.deletion.CASCADE, + related_name="sentry_user", + to=settings.AUTH_USER_MODEL, + ), + ), + ], + options={ + "abstract": False, + }, + ), + ] diff --git a/shared/django_apps/codecov_auth/migrations/0034_alter_owner_trial_status.py b/shared/django_apps/codecov_auth/migrations/0034_alter_owner_trial_status.py new file mode 100644 index 000000000..28d842f7a --- /dev/null +++ b/shared/django_apps/codecov_auth/migrations/0034_alter_owner_trial_status.py @@ -0,0 +1,40 @@ +# Generated by Django 4.1.7 on 2023-07-27 00:38 + +from django.db import migrations, models +from shared.django_apps.migration_utils import RiskyRunSQL + + +class Migration(migrations.Migration): + """ + BEGIN; + -- + -- Alter field trial_status on owner + -- + -- (no-op) + COMMIT; + """ + + dependencies = [ + ("codecov_auth", "0033_sentryuser"), + ] + + operations = [ + migrations.AlterField( + model_name="owner", + name="trial_status", + field=models.CharField( + choices=[ + ("not_started", "Not Started"), + ("ongoing", "Ongoing"), + ("expired", "Expired"), + ("cannot_trial", "Cannot Trial"), + ], + default="not_started", + max_length=50, + null=True, + ), + ), + RiskyRunSQL( + "alter table owners alter column trial_status set default 'not_started';" + ), + ] diff --git a/shared/django_apps/codecov_auth/migrations/0035_owner_pretrial_users_count.py b/shared/django_apps/codecov_auth/migrations/0035_owner_pretrial_users_count.py new file mode 100644 index 000000000..692e91390 --- /dev/null +++ b/shared/django_apps/codecov_auth/migrations/0035_owner_pretrial_users_count.py @@ -0,0 +1,27 @@ +# Generated by Django 4.1.7 on 2023-07-27 23:40 + +from django.db import migrations, models +from shared.django_apps.migration_utils import RiskyAddField + + +class Migration(migrations.Migration): + """ + BEGIN; + -- + -- Add field pretrial_users_count to owner + -- + ALTER TABLE "owners" ADD COLUMN "pretrial_users_count" smallint NULL; + COMMIT; + """ + + dependencies = [ + ("codecov_auth", "0034_alter_owner_trial_status"), + ] + + operations = [ + RiskyAddField( + model_name="owner", + name="pretrial_users_count", + field=models.SmallIntegerField(blank=True, null=True), + ), + ] diff --git a/shared/django_apps/codecov_auth/migrations/0036_add_user_terms_agreement.py b/shared/django_apps/codecov_auth/migrations/0036_add_user_terms_agreement.py new file mode 100644 index 000000000..7bb058dc1 --- /dev/null +++ b/shared/django_apps/codecov_auth/migrations/0036_add_user_terms_agreement.py @@ -0,0 +1,37 @@ +# Generated by Django 4.2.2 on 2023-08-30 13:27 + +from django.db import migrations, models + +from shared.django_apps.core.models import DateTimeWithoutTZField + + +class Migration(migrations.Migration): + """ + BEGIN; + -- + -- Add field terms_agreement to user + -- + ALTER TABLE "users" ADD COLUMN "terms_agreement" boolean NULL; + -- + -- Add field terms_agreement_at to user + -- + ALTER TABLE "users" ADD COLUMN "terms_agreement_at" timestamp NULL; + COMMIT; + """ + + dependencies = [ + ("codecov_auth", "0035_owner_pretrial_users_count"), + ] + + operations = [ + migrations.AddField( + model_name="user", + name="terms_agreement", + field=models.BooleanField(null=True), + ), + migrations.AddField( + model_name="user", + name="terms_agreement_at", + field=DateTimeWithoutTZField(null=True), + ), + ] diff --git a/shared/django_apps/codecov_auth/migrations/0037_owner_uses_invoice.py b/shared/django_apps/codecov_auth/migrations/0037_owner_uses_invoice.py new file mode 100644 index 000000000..068906645 --- /dev/null +++ b/shared/django_apps/codecov_auth/migrations/0037_owner_uses_invoice.py @@ -0,0 +1,18 @@ +# Generated by Django 4.2.2 on 2023-08-17 20:59 + +from django.db import migrations, models +from shared.django_apps.migration_utils import RiskyAddField + + +class Migration(migrations.Migration): + dependencies = [ + ("codecov_auth", "0036_add_user_terms_agreement"), + ] + + operations = [ + RiskyAddField( + model_name="owner", + name="uses_invoice", + field=models.BooleanField(null=True), + ), + ] diff --git a/shared/django_apps/codecov_auth/migrations/0038_alter_owner_uses_invoice.py b/shared/django_apps/codecov_auth/migrations/0038_alter_owner_uses_invoice.py new file mode 100644 index 000000000..d9f32e107 --- /dev/null +++ b/shared/django_apps/codecov_auth/migrations/0038_alter_owner_uses_invoice.py @@ -0,0 +1,24 @@ +# Generated by Django 4.2.2 on 2023-08-28 18:27 + +from django.db import migrations, models +from shared.django_apps.migration_utils import RiskyAlterField, RiskyRunSQL + + +class Migration(migrations.Migration): + dependencies = [ + ("codecov_auth", "0037_owner_uses_invoice"), + ] + + operations = [ + RiskyAlterField( + model_name="owner", + name="uses_invoice", + field=models.BooleanField(default=False, null=True), + ), + RiskyRunSQL( + """ + UPDATE "owners" SET "uses_invoice" = false WHERE "uses_invoice" IS NULL; + ALTER TABLE "owners" ALTER COLUMN "uses_invoice" SET DEFAULT false; + """ + ), + ] diff --git a/shared/django_apps/codecov_auth/migrations/0039_alter_owner_uses_invoice.py b/shared/django_apps/codecov_auth/migrations/0039_alter_owner_uses_invoice.py new file mode 100644 index 000000000..484f577a7 --- /dev/null +++ b/shared/django_apps/codecov_auth/migrations/0039_alter_owner_uses_invoice.py @@ -0,0 +1,18 @@ +# Generated by Django 4.2.2 on 2023-08-28 17:42 + +from django.db import migrations, models +from shared.django_apps.migration_utils import RiskyAlterField + + +class Migration(migrations.Migration): + dependencies = [ + ("codecov_auth", "0038_alter_owner_uses_invoice"), + ] + + operations = [ + RiskyAlterField( + model_name="owner", + name="uses_invoice", + field=models.BooleanField(default=False, null=False), + ), + ] diff --git a/shared/django_apps/codecov_auth/migrations/0040_oktauser.py b/shared/django_apps/codecov_auth/migrations/0040_oktauser.py new file mode 100644 index 000000000..a3120a3be --- /dev/null +++ b/shared/django_apps/codecov_auth/migrations/0040_oktauser.py @@ -0,0 +1,41 @@ +# Generated by Django 4.2.2 on 2023-07-25 18:08 + +import uuid + +import django.db.models.deletion +from django.conf import settings +from django.db import migrations, models + + +class Migration(migrations.Migration): + + dependencies = [ + ("codecov_auth", "0039_alter_owner_uses_invoice"), + ] + + operations = [ + migrations.CreateModel( + name="OktaUser", + fields=[ + ("id", models.BigAutoField(primary_key=True, serialize=False)), + ("external_id", models.UUIDField(default=uuid.uuid4, editable=False)), + ("created_at", models.DateTimeField(auto_now_add=True)), + ("updated_at", models.DateTimeField(auto_now=True)), + ("access_token", models.TextField(null=True)), + ("okta_id", models.TextField(unique=True)), + ("email", models.TextField(null=True)), + ("name", models.TextField(null=True)), + ( + "user", + models.ForeignKey( + on_delete=django.db.models.deletion.CASCADE, + related_name="okta_user", + to=settings.AUTH_USER_MODEL, + ), + ), + ], + options={ + "abstract": False, + }, + ), + ] diff --git a/shared/django_apps/codecov_auth/migrations/0041_auto_20230918_1825.py b/shared/django_apps/codecov_auth/migrations/0041_auto_20230918_1825.py new file mode 100644 index 000000000..cfd30b7ab --- /dev/null +++ b/shared/django_apps/codecov_auth/migrations/0041_auto_20230918_1825.py @@ -0,0 +1,25 @@ +# Generated by Django 4.2.2 on 2023-09-18 18:25 + +from django.db import migrations + + +class Migration(migrations.Migration): + # BEGIN; + # -- + # -- Raw SQL operation + # -- + # ALTER TYPE plans ADD VALUE IF NOT EXISTS 'users-litem'; + # -- + # -- Raw SQL operation + # -- + # ALTER TYPE plans ADD VALUE IF NOT EXISTS 'users-litey'; + # COMMIT; + + dependencies = [ + ("codecov_auth", "0040_oktauser"), + ] + + operations = [ + migrations.RunSQL("ALTER TYPE plans ADD VALUE IF NOT EXISTS 'users-litem';"), + migrations.RunSQL("ALTER TYPE plans ADD VALUE IF NOT EXISTS 'users-litey';"), + ] diff --git a/shared/django_apps/codecov_auth/migrations/0042_owner_trial_fired_by.py b/shared/django_apps/codecov_auth/migrations/0042_owner_trial_fired_by.py new file mode 100644 index 000000000..19e103371 --- /dev/null +++ b/shared/django_apps/codecov_auth/migrations/0042_owner_trial_fired_by.py @@ -0,0 +1,18 @@ +# Generated by Django 4.2.3 on 2023-09-19 09:28 + +from django.db import migrations, models + + +class Migration(migrations.Migration): + + dependencies = [ + ("codecov_auth", "0041_auto_20230918_1825"), + ] + + operations = [ + migrations.AddField( + model_name="owner", + name="trial_fired_by", + field=models.IntegerField(null=True), + ), + ] diff --git a/shared/django_apps/codecov_auth/migrations/0043_sync_user_terms_agreement.py b/shared/django_apps/codecov_auth/migrations/0043_sync_user_terms_agreement.py new file mode 100644 index 000000000..ec46c5239 --- /dev/null +++ b/shared/django_apps/codecov_auth/migrations/0043_sync_user_terms_agreement.py @@ -0,0 +1,34 @@ +# Generated by Django 4.2.2 on 2023-09-18 14:51 + + +from django.db import migrations +from shared.django_apps.migration_utils import RiskyRunSQL + + +class Migration(migrations.Migration): + + dependencies = [ + ("codecov_auth", "0042_owner_trial_fired_by"), + ] + + operations = [ + RiskyRunSQL( + sql=""" + UPDATE users + SET + terms_agreement = subquery.terms_agreement, + terms_agreement_at = subquery.terms_agreement_at + FROM ( + SELECT + owners.user_id, + codecov_auth_ownerprofile.terms_agreement, + codecov_auth_ownerprofile.terms_agreement_at + FROM owners + INNER JOIN codecov_auth_ownerprofile + ON codecov_auth_ownerprofile.owner_id = owners.ownerid + ) subquery + WHERE subquery.user_id = users.id; + """, + reverse_sql=migrations.RunSQL.noop, + ) + ] diff --git a/shared/django_apps/codecov_auth/migrations/0044_remove_owner_agreements_and_alter_user_agreements.py b/shared/django_apps/codecov_auth/migrations/0044_remove_owner_agreements_and_alter_user_agreements.py new file mode 100644 index 000000000..dfd68ff00 --- /dev/null +++ b/shared/django_apps/codecov_auth/migrations/0044_remove_owner_agreements_and_alter_user_agreements.py @@ -0,0 +1,25 @@ +# Generated by Django 4.2.3 on 2023-09-19 20:52 + +from django.db import migrations, models + +from shared.django_apps.core.models import DateTimeWithoutTZField + + +class Migration(migrations.Migration): + + dependencies = [ + ("codecov_auth", "0043_sync_user_terms_agreement"), + ] + + operations = [ + migrations.AlterField( + model_name="user", + name="terms_agreement", + field=models.BooleanField(blank=True, default=False, null=True), + ), + migrations.AlterField( + model_name="user", + name="terms_agreement_at", + field=DateTimeWithoutTZField(blank=True, null=True), + ), + ] diff --git a/shared/django_apps/codecov_auth/migrations/0045_remove_ownerprofile_terms_agreement.py b/shared/django_apps/codecov_auth/migrations/0045_remove_ownerprofile_terms_agreement.py new file mode 100644 index 000000000..37b8b231d --- /dev/null +++ b/shared/django_apps/codecov_auth/migrations/0045_remove_ownerprofile_terms_agreement.py @@ -0,0 +1,34 @@ +# Generated by Django 4.2.3 on 2023-09-21 14:24 + +from django.db import migrations +from shared.django_apps.migration_utils import RiskyRemoveField + + +class Migration(migrations.Migration): + """ + BEGIN; + -- + -- Remove field terms_agreement from ownerprofile + -- + ALTER TABLE "codecov_auth_ownerprofile" DROP COLUMN "terms_agreement" CASCADE; + -- + -- Remove field terms_agreement_at from ownerprofile + -- + ALTER TABLE "codecov_auth_ownerprofile" DROP COLUMN "terms_agreement_at" CASCADE; + COMMIT; + """ + + dependencies = [ + ("codecov_auth", "0044_remove_owner_agreements_and_alter_user_agreements"), + ] + + operations = [ + RiskyRemoveField( + model_name="ownerprofile", + name="terms_agreement", + ), + RiskyRemoveField( + model_name="ownerprofile", + name="terms_agreement_at", + ), + ] diff --git a/shared/django_apps/codecov_auth/migrations/0046_dedupe_owner_admin_values.py b/shared/django_apps/codecov_auth/migrations/0046_dedupe_owner_admin_values.py new file mode 100644 index 000000000..a67214baf --- /dev/null +++ b/shared/django_apps/codecov_auth/migrations/0046_dedupe_owner_admin_values.py @@ -0,0 +1,27 @@ +# Generated by Django 4.2.3 on 2023-09-19 19:48 + + +from django.db import migrations +from shared.django_apps.migration_utils import RiskyRunSQL + + +class Migration(migrations.Migration): + + dependencies = [ + ("codecov_auth", "0045_remove_ownerprofile_terms_agreement"), + ] + + operations = [ + RiskyRunSQL( + sql=""" + UPDATE owners + SET admins = ARRAY ( + SELECT v + FROM unnest(admins) WITH ORDINALITY t(v,ord) + GROUP BY 1 + ORDER BY min(ord) + ); + """, + reverse_sql=migrations.RunSQL.noop, + ) + ] diff --git a/shared/django_apps/codecov_auth/migrations/0047_auto_20231009_1257.py b/shared/django_apps/codecov_auth/migrations/0047_auto_20231009_1257.py new file mode 100644 index 000000000..6c9058488 --- /dev/null +++ b/shared/django_apps/codecov_auth/migrations/0047_auto_20231009_1257.py @@ -0,0 +1,21 @@ +# Generated by Django 4.2.3 on 2023-10-09 12:57 + +from django.db import migrations + + +class Migration(migrations.Migration): + + dependencies = [ + ("codecov_auth", "0046_dedupe_owner_admin_values"), + ] + + operations = [ + migrations.RunSQL( + "ALTER TYPE plans ADD VALUE IF NOT EXISTS 'users-teamm';", + reverse_sql=migrations.RunSQL.noop, + ), + migrations.RunSQL( + "ALTER TYPE plans ADD VALUE IF NOT EXISTS 'users-teamy';", + reverse_sql=migrations.RunSQL.noop, + ), + ] diff --git a/shared/django_apps/codecov_auth/migrations/0048_githubappinstallation.py b/shared/django_apps/codecov_auth/migrations/0048_githubappinstallation.py new file mode 100644 index 000000000..13ee79d5e --- /dev/null +++ b/shared/django_apps/codecov_auth/migrations/0048_githubappinstallation.py @@ -0,0 +1,61 @@ +# Generated by Django 4.2.7 on 2024-01-17 13:37 + +import uuid + +import django.contrib.postgres.fields +import django.db.models.deletion +import django_prometheus.models +from django.db import migrations, models + + +class Migration(migrations.Migration): + + dependencies = [ + ("codecov_auth", "0047_auto_20231009_1257"), + ] + + # BEGIN; + # -- + # -- Create model GithubAppInstallation + # -- + # CREATE TABLE "codecov_auth_githubappinstallation" ("id" bigint NOT NULL PRIMARY KEY GENERATED BY DEFAULT AS IDENTITY, "external_id" uuid NOT NULL, "created_at" timestamp with time zone NOT NULL, "updated_at" timestamp with time zone NOT NULL, "installation_id" integer NOT NULL, "name" text NOT NULL, "repository_service_ids" text[] NULL, "owner_id" integer NOT NULL); + # ALTER TABLE "codecov_auth_githubappinstallation" ADD CONSTRAINT "codecov_auth_githuba_owner_id_82ba29b1_fk_owners_ow" FOREIGN KEY ("owner_id") REFERENCES "owners" ("ownerid") DEFERRABLE INITIALLY DEFERRED; + # CREATE INDEX "codecov_auth_githubappinstallation_owner_id_82ba29b1" ON "codecov_auth_githubappinstallation" ("owner_id"); + # COMMIT; + + operations = [ + migrations.CreateModel( + name="GithubAppInstallation", + fields=[ + ("id", models.BigAutoField(primary_key=True, serialize=False)), + ("external_id", models.UUIDField(default=uuid.uuid4, editable=False)), + ("created_at", models.DateTimeField(auto_now_add=True)), + ("updated_at", models.DateTimeField(auto_now=True)), + ("installation_id", models.IntegerField()), + ("name", models.TextField(default="codecov_app_installation")), + ( + "repository_service_ids", + django.contrib.postgres.fields.ArrayField( + base_field=models.TextField(), null=True, size=None + ), + ), + ( + "owner", + models.ForeignKey( + on_delete=django.db.models.deletion.CASCADE, + related_name="github_app_installations", + to="codecov_auth.owner", + ), + ), + ], + options={ + "abstract": False, + }, + bases=( + django_prometheus.models.ExportModelOperationsMixin( + "codecov_auth.github_app_installation" + ), + models.Model, + ), + ), + ] diff --git a/shared/django_apps/codecov_auth/migrations/0049_ownerprofile_customer_intent.py b/shared/django_apps/codecov_auth/migrations/0049_ownerprofile_customer_intent.py new file mode 100644 index 000000000..6069105f4 --- /dev/null +++ b/shared/django_apps/codecov_auth/migrations/0049_ownerprofile_customer_intent.py @@ -0,0 +1,25 @@ +# Generated by Django 4.2.7 on 2024-02-09 19:24 + +from django.db import migrations, models + + +class Migration(migrations.Migration): + dependencies = [ + ("codecov_auth", "0048_githubappinstallation"), + ] + + # BEGIN; + # -- + # -- Add field customer_intent to ownerprofile + # -- + # ALTER TABLE "codecov_auth_ownerprofile" ADD COLUMN "customer_intent" text NULL; + # COMMIT; + operations = [ + migrations.AddField( + model_name="ownerprofile", + name="customer_intent", + field=models.TextField( + choices=[("BUSINESS", "Business"), ("PERSONAL", "Personal")], null=True + ), + ), + ] diff --git a/shared/django_apps/codecov_auth/migrations/0050_remove_ownerprofile_customer_intent.py b/shared/django_apps/codecov_auth/migrations/0050_remove_ownerprofile_customer_intent.py new file mode 100644 index 000000000..cbd2e5b80 --- /dev/null +++ b/shared/django_apps/codecov_auth/migrations/0050_remove_ownerprofile_customer_intent.py @@ -0,0 +1,22 @@ +# Generated by Django 4.2.7 on 2024-02-13 21:48 + +from django.db import migrations + + +class Migration(migrations.Migration): + dependencies = [ + ("codecov_auth", "0049_ownerprofile_customer_intent"), + ] + + # BEGIN; + # -- + # -- Remove field customer_intent from ownerprofile + # -- + # ALTER TABLE "codecov_auth_ownerprofile" DROP COLUMN "customer_intent" CASCADE; + # COMMIT; + operations = [ + migrations.RemoveField( + model_name="ownerprofile", + name="customer_intent", + ), + ] diff --git a/shared/django_apps/codecov_auth/migrations/0051_user_customer_intent.py b/shared/django_apps/codecov_auth/migrations/0051_user_customer_intent.py new file mode 100644 index 000000000..9ee1b4288 --- /dev/null +++ b/shared/django_apps/codecov_auth/migrations/0051_user_customer_intent.py @@ -0,0 +1,25 @@ +# Generated by Django 4.2.7 on 2024-02-14 14:52 + +from django.db import migrations, models + + +class Migration(migrations.Migration): + dependencies = [ + ("codecov_auth", "0050_remove_ownerprofile_customer_intent"), + ] + + # BEGIN; + # -- + # -- Add field customer_intent to user + # -- + # ALTER TABLE "users" ADD COLUMN "customer_intent" text NULL; + # COMMIT; + operations = [ + migrations.AddField( + model_name="user", + name="customer_intent", + field=models.TextField( + choices=[("BUSINESS", "Business"), ("PERSONAL", "Personal")], null=True + ), + ), + ] diff --git a/shared/django_apps/codecov_auth/migrations/0052_githubappinstallation_app_id_and_more.py b/shared/django_apps/codecov_auth/migrations/0052_githubappinstallation_app_id_and_more.py new file mode 100644 index 000000000..a292605a7 --- /dev/null +++ b/shared/django_apps/codecov_auth/migrations/0052_githubappinstallation_app_id_and_more.py @@ -0,0 +1,34 @@ +# Generated by Django 4.2.7 on 2024-02-19 14:56 + +from django.db import migrations, models + + +class Migration(migrations.Migration): + + # BEGIN; + # -- + # -- Add field app_id to githubappinstallation + # -- + # ALTER TABLE "codecov_auth_githubappinstallation" ADD COLUMN "app_id" integer NULL; + # -- + # -- Add field pem_path to githubappinstallation + # -- + # ALTER TABLE "codecov_auth_githubappinstallation" ADD COLUMN "pem_path" text NULL; + # COMMIT; + + dependencies = [ + ("codecov_auth", "0051_user_customer_intent"), + ] + + operations = [ + migrations.AddField( + model_name="githubappinstallation", + name="app_id", + field=models.IntegerField(null=True), + ), + migrations.AddField( + model_name="githubappinstallation", + name="pem_path", + field=models.TextField(null=True), + ), + ] diff --git a/shared/django_apps/codecov_auth/migrations/__init__.py b/shared/django_apps/codecov_auth/migrations/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/shared/django_apps/codecov_auth/models.py b/shared/django_apps/codecov_auth/models.py new file mode 100644 index 000000000..c8a7684e9 --- /dev/null +++ b/shared/django_apps/codecov_auth/models.py @@ -0,0 +1,668 @@ +import binascii +import logging +import os +import uuid +from datetime import datetime + +from django.contrib.postgres.fields import ArrayField, CITextField +from django.db import models +from django_prometheus.models import ExportModelOperationsMixin + +from shared.django_apps.codecov.models import BaseCodecovModel +from shared.django_apps.core.models import DateTimeWithoutTZField, Repository +from shared.plan.constants import PlanName + + +# Large number to represent Infinity as float('int') is not JSON serializable +INFINITY = 99999999 + +SERVICE_GITHUB = "github" +SERVICE_GITHUB_ENTERPRISE = "github_enterprise" +SERVICE_BITBUCKET = "bitbucket" +SERVICE_BITBUCKET_SERVER = "bitbucket_server" +SERVICE_GITLAB = "gitlab" +SERVICE_CODECOV_ENTERPRISE = "enterprise" + +DEFAULT_AVATAR_SIZE = 55 + +log = logging.getLogger(__name__) + +class DateTimeWithoutTZField(models.DateTimeField): + def db_type(self, connection): + return "timestamp" + + +# TODO use this to refactor avatar_url +class Service(models.TextChoices): + GITHUB = "github" + GITLAB = "gitlab" + BITBUCKET = "bitbucket" + GITHUB_ENTERPRISE = "github_enterprise" + GITLAB_ENTERPRISE = "gitlab_enterprise" + BITBUCKET_SERVER = "bitbucket_server" + + +class PlanProviders(models.TextChoices): + GITHUB = "github" + + +# Follow the shape of TrialStatus in plan folder +class TrialStatus(models.TextChoices): + NOT_STARTED = "not_started" + ONGOING = "ongoing" + EXPIRED = "expired" + CANNOT_TRIAL = "cannot_trial" + + +class User(ExportModelOperationsMixin("codecov_auth.user"), BaseCodecovModel): + class CustomerIntent(models.TextChoices): + BUSINESS = "BUSINESS" + PERSONAL = "PERSONAL" + + email = CITextField(null=True) + name = models.TextField(null=True) + is_staff = models.BooleanField(null=True, default=False) + is_superuser = models.BooleanField(null=True, default=False) + external_id = models.UUIDField(default=uuid.uuid4, editable=False, unique=True) + terms_agreement = models.BooleanField(null=True, default=False, blank=True) + terms_agreement_at = DateTimeWithoutTZField(null=True, blank=True) + customer_intent = models.TextField(choices=CustomerIntent.choices, null=True) + + REQUIRED_FIELDS = [] + USERNAME_FIELD = "external_id" + + class Meta: + db_table = "users" + + # Missing Key/Methods + # @property + # def is_active(self): + # # Required to implement django's user-model interface + # return True + + # @property + # def is_anonymous(self): + # # Required to implement django's user-model interface + # return False + + # @property + # def is_authenticated(self): + # # Required to implement django's user-model interface + # return True + + # def has_perm(self, perm, obj=None): + # # Required to implement django's user-model interface + # return self.is_staff + + # def has_perms(self, *args, **kwargs): + # # Required to implement django's user-model interface + # return self.is_staff + + # def has_module_perms(self, package_name): + # # Required to implement django's user-model interface + # return self.is_staff + + # def get_username(self): + # # Required to implement django's user-model interface + # return self.external_id + + +class Owner(ExportModelOperationsMixin("codecov_auth.owner"), models.Model): + class Meta: + db_table = "owners" + ordering = ["ownerid"] + constraints = [ + models.UniqueConstraint( + fields=["service", "username"], name="owner_service_username" + ), + models.UniqueConstraint( + fields=["service", "service_id"], name="owner_service_ids" + ), + ] + + REQUIRED_FIELDS = [] + USERNAME_FIELD = "username" + + ownerid = models.AutoField(primary_key=True) + service = models.TextField(choices=Service.choices) # Really an ENUM in db + username = CITextField( + unique=True, null=True + ) # No actual unique constraint on this in the DB + email = models.TextField(null=True) + business_email = models.TextField(null=True) + name = models.TextField(null=True) + oauth_token = models.TextField(null=True) + stripe_customer_id = models.TextField(null=True, blank=True) + stripe_subscription_id = models.TextField(null=True, blank=True) + stripe_coupon_id = models.TextField(null=True, blank=True) + + # createstamp seems to be used by legacy to track first login + # so we shouldn't touch this outside login + createstamp = models.DateTimeField(null=True) + + service_id = models.TextField(null=False) + parent_service_id = models.TextField(null=True) + root_parent_service_id = models.TextField(null=True) + private_access = models.BooleanField(null=True) + staff = models.BooleanField(null=True, default=False) + cache = models.JSONField(null=True) + # Really an ENUM in db + plan = models.TextField( + null=True, default=PlanName.BASIC_PLAN_NAME.value, blank=True + ) + plan_provider = models.TextField( + null=True, choices=PlanProviders.choices, blank=True + ) # postgres enum containing only "github" + plan_user_count = models.SmallIntegerField(null=True, default=1, blank=True) + plan_auto_activate = models.BooleanField(null=True, default=True) + plan_activated_users = ArrayField( + models.IntegerField(null=True), null=True, blank=True + ) + did_trial = models.BooleanField(null=True) + trial_start_date = DateTimeWithoutTZField(null=True) + trial_end_date = DateTimeWithoutTZField(null=True) + trial_status = models.CharField( + max_length=50, + choices=TrialStatus.choices, + null=True, + default=TrialStatus.NOT_STARTED.value, + ) + trial_fired_by = models.IntegerField(null=True) + pretrial_users_count = models.SmallIntegerField(null=True, blank=True) + free = models.SmallIntegerField(default=0) + invoice_details = models.TextField(null=True) + uses_invoice = models.BooleanField(default=False, null=False) + delinquent = models.BooleanField(null=True) + yaml = models.JSONField(null=True) + updatestamp = DateTimeWithoutTZField(default=datetime.now) + organizations = ArrayField(models.IntegerField(null=True), null=True, blank=True) + admins = ArrayField(models.IntegerField(null=True), null=True, blank=True) + + # DEPRECATED - replaced by GithubAppInstallation model + integration_id = models.IntegerField(null=True, blank=True) + + permission = ArrayField(models.IntegerField(null=True), null=True) + bot = models.ForeignKey( + "Owner", db_column="bot", null=True, on_delete=models.SET_NULL, blank=True + ) + student = models.BooleanField(default=False) + student_created_at = DateTimeWithoutTZField(null=True) + student_updated_at = DateTimeWithoutTZField(null=True) + onboarding_completed = models.BooleanField(default=False) + is_superuser = models.BooleanField(null=True, default=False) + max_upload_limit = models.IntegerField(null=True, default=150, blank=True) + + sentry_user_id = models.TextField(null=True, blank=True, unique=True) + sentry_user_data = models.JSONField(null=True) + + user = models.ForeignKey( + User, + null=True, + on_delete=models.SET_NULL, + blank=True, + related_name="owners", + ) + + # Missing Key/Methods + # objects = OwnerManager() + + # repository_set = RepositoryManager() + + # def __str__(self): + # return f"Owner<{self.service}/{self.username}>" + + # def save(self, *args, **kwargs): + # self.updatestamp = timezone.now() + # super().save(*args, **kwargs) + + # @property + # def has_yaml(self): + # return self.yaml is not None + + # @property + # def default_org(self): + # try: + # if self.profile: + # return self.profile.default_org + # except OwnerProfile.DoesNotExist: + # return None + + # @property + # def has_legacy_plan(self): + # return self.plan is None or not self.plan.startswith("users") + + # @property + # def repo_total_credits(self): + # # Returns the number of private repo credits remaining + # # Only meaningful for legacy plans + # V4_PLAN_PREFIX = "v4-" + # if not self.has_legacy_plan: + # return INFINITY + # if self.plan is None: + # return int(1 + self.free or 0) + # elif self.plan.startswith(V4_PLAN_PREFIX): + # return int(self.plan[3:-1]) + # else: + # return int(self.plan[:-1]) + + # @property + # def root_organization(self): + # """ + # Find the root organization of Gitlab, by using the root_parent_service_id + # if it exists, otherwise iterating through the parents and caches it in root_parent_service_id + # """ + # if self.root_parent_service_id: + # return Owner.objects.get( + # service_id=self.root_parent_service_id, service=self.service + # ) + + # root = None + # if self.service == "gitlab" and self.parent_service_id: + # root = self + # while root.parent_service_id is not None: + # root = Owner.objects.get( + # service_id=root.parent_service_id, service=root.service + # ) + # self.root_parent_service_id = root.service_id + # self.save() + # return root + + # @property + # def nb_active_private_repos(self): + # return self.repository_set.filter(active=True, private=True).count() + + # @property + # def has_private_repos(self): + # return self.repository_set.filter(private=True).exists() + + # @property + # def repo_credits(self): + # # Returns the number of private repo credits remaining + # # Only meaningful for legacy plans + # if not self.has_legacy_plan: + # return INFINITY + # return self.repo_total_credits - self.nb_active_private_repos + + # @property + # def orgs(self): + # if self.organizations: + # return Owner.objects.filter(ownerid__in=self.organizations) + # return Owner.objects.none() + + # @property + # def active_repos(self): + # return Repository.objects.filter(active=True, author=self.ownerid).order_by( + # "-updatestamp" + # ) + + # @property + # def activated_user_count(self): + # if not self.plan_activated_users: + # return 0 + # return Owner.objects.filter( + # ownerid__in=self.plan_activated_users, student=False + # ).count() + + # @property + # def activated_student_count(self): + # if not self.plan_activated_users: + # return 0 + # return Owner.objects.filter( + # ownerid__in=self.plan_activated_users, student=True + # ).count() + + # @property + # def student_count(self): + # return Owner.objects.users_of(self).filter(student=True).count() + + # @property + # def inactive_user_count(self): + # return ( + # Owner.objects.users_of(self).filter(student=False).count() + # - self.activated_user_count + # ) + + # def is_admin(self, owner): + # return self.ownerid == owner.ownerid or ( + # bool(self.admins) and owner.ownerid in self.admins + # ) + + # @property + # def is_authenticated(self): + # # NOTE: this is here to support `UserTokenAuthentication` which still returns + # # an `Owner` as the authenticatable record. Since there is code that calls + # # `request.user.is_authenticated` we need to support that here. + # return True + + # def clean(self): + # if self.staff: + # domain = self.email.split("@")[1] if self.email else "" + # if domain not in ["codecov.io", "sentry.io"]: + # raise ValidationError( + # "User not part of Codecov or Sentry cannot be a staff member" + # ) + # if not self.plan: + # self.plan = None + # if not self.stripe_customer_id: + # self.stripe_customer_id = None + # if not self.stripe_subscription_id: + # self.stripe_subscription_id = None + + # @property + # def avatar_url(self, size=DEFAULT_AVATAR_SIZE): + # if self.service == SERVICE_GITHUB and self.service_id: + # return "{}/u/{}?v=3&s={}".format( + # AVATAR_GITHUB_BASE_URL, self.service_id, size + # ) + + # elif self.service == SERVICE_GITHUB_ENTERPRISE and self.service_id: + # return "{}/avatars/u/{}?v=3&s={}".format( + # get_config("github_enterprise", "url"), self.service_id, size + # ) + + # # Bitbucket + # elif self.service == SERVICE_BITBUCKET and self.username: + # return "{}/account/{}/avatar/{}".format( + # BITBUCKET_BASE_URL, self.username, size + # ) + + # elif ( + # self.service == SERVICE_BITBUCKET_SERVER + # and self.service_id + # and self.username + # ): + # if "U" in self.service_id: + # return "{}/users/{}/avatar.png?s={}".format( + # get_config("bitbucket_server", "url"), self.username, size + # ) + # else: + # return "{}/projects/{}/avatar.png?s={}".format( + # get_config("bitbucket_server", "url"), self.username, size + # ) + + # # Gitlab + # elif self.service == SERVICE_GITLAB and self.email: + # return get_gitlab_url(self.email, size) + + # # Codecov config + # elif get_config("services", "gravatar") and self.email: + # return "{}/avatar/{}?s={}".format( + # GRAVATAR_BASE_URL, md5(self.email.lower().encode()).hexdigest(), size + # ) + + # elif get_config("services", "avatars.io") and self.email: + # return "{}/avatar/{}/{}".format( + # AVATARIO_BASE_URL, md5(self.email.lower().encode()).hexdigest(), size + # ) + + # elif self.ownerid: + # return "{}/users/{}.png?size={}".format( + # get_config("setup", "codecov_url"), self.ownerid, size + # ) + + # elif os.getenv("APP_ENV") == SERVICE_CODECOV_ENTERPRISE: + # return "{}/media/images/gafsi/avatar.svg".format( + # get_config("setup", "codecov_url") + # ) + + # else: + # return "{}/media/images/gafsi/avatar.svg".format( + # get_config("setup", "media", "assets") + # ) + + # @property + # def pretty_plan(self): + # if self.plan in USER_PLAN_REPRESENTATIONS: + # plan_details = asdict(USER_PLAN_REPRESENTATIONS[self.plan]) + + # # update with quantity they've purchased + # # allows api users to update the quantity + # # by modifying the "plan", sidestepping + # # some iffy data modeling + + # plan_details.update({"quantity": self.plan_user_count}) + # return plan_details + + # def can_activate_user(self, user): + # return ( + # user.student or self.activated_user_count < self.plan_user_count + self.free + # ) + + # def activate_user(self, user): + # log.info(f"Activating user {user.ownerid} in ownerid {self.ownerid}") + # if isinstance(self.plan_activated_users, list): + # if user.ownerid not in self.plan_activated_users: + # self.plan_activated_users.append(user.ownerid) + # else: + # self.plan_activated_users = [user.ownerid] + # self.save() + + # def deactivate_user(self, user): + # log.info(f"Deactivating user {user.ownerid} in ownerid {self.ownerid}") + # if isinstance(self.plan_activated_users, list): + # try: + # self.plan_activated_users.remove(user.ownerid) + # except ValueError: + # pass + # self.save() + + # def add_admin(self, user): + # log.info( + # f"Granting admin permissions to user {user.ownerid} within owner {self.ownerid}" + # ) + # if isinstance(self.admins, list): + # if user.ownerid not in self.admins: + # self.admins.append(user.ownerid) + # else: + # self.admins = [user.ownerid] + # self.save() + + # def remove_admin(self, user): + # log.info( + # f"Revoking admin permissions for user {user.ownerid} within owner {self.ownerid}" + # ) + # if isinstance(self.admins, list): + # try: + # self.admins.remove(user.ownerid) + # except ValueError: + # pass + # self.save() + + +GITHUB_APP_INSTALLATION_DEFAULT_NAME = "codecov_app_installation" + + +class GithubAppInstallation( + ExportModelOperationsMixin("codecov_auth.github_app_installation"), BaseCodecovModel +): + # replacement for owner.integration_id + # installation id GitHub sends us in the installation-related webhook events + installation_id = models.IntegerField(null=False, blank=False) + name = models.TextField(default=GITHUB_APP_INSTALLATION_DEFAULT_NAME) + # if null, all repos are covered by this installation + # otherwise, it's a list of repo.id values + repository_service_ids = ArrayField(models.TextField(null=False), null=True) + + # Needed to get a JWT for the app + # NULL for the default app, which is configured in the install YAML + app_id = models.IntegerField(null=True, blank=False) + # Same comments for app_id apply + pem_path = models.TextField(null=True, blank=False) + + owner = models.ForeignKey( + Owner, + null=False, + on_delete=models.CASCADE, + blank=False, + related_name="github_app_installations", + ) + + # Missing Key/Methods + # def repository_queryset(self) -> BaseManager[Repository]: + # """Returns a QuerySet of repositories covered by this installation""" + # if self.repository_service_ids is None: + # # All repos covered + # return Repository.objects.filter(author=self.owner) + # # Some repos covered + # return Repository.objects.filter( + # service_id__in=self.repository_service_ids, author=self.owner + # ) + + # def covers_all_repos(self) -> bool: + # return self.repository_service_ids is None + + # def is_repo_covered_by_integration(self, repo: Repository) -> bool: + # if self.covers_all_repos(): + # return repo.author.ownerid == self.owner.ownerid + # return repo.service_id in self.repository_service_ids + + +class SentryUser( + ExportModelOperationsMixin("codecov_auth.sentry_user"), BaseCodecovModel +): + user = models.ForeignKey( + User, + null=False, + on_delete=models.CASCADE, + related_name="sentry_user", + ) + access_token = models.TextField(null=True) + refresh_token = models.TextField(null=True) + sentry_id = models.TextField(null=False, unique=True) + email = models.TextField(null=True) + name = models.TextField(null=True) + + +class OktaUser(ExportModelOperationsMixin("codecov_auth.okta_user"), BaseCodecovModel): + user = models.ForeignKey( + User, + null=False, + on_delete=models.CASCADE, + related_name="okta_user", + ) + access_token = models.TextField(null=True) + okta_id = models.TextField(null=False, unique=True) + email = models.TextField(null=True) + name = models.TextField(null=True) + + +class TokenTypeChoices(models.TextChoices): + UPLOAD = "upload" + + +class OrganizationLevelToken( + ExportModelOperationsMixin("codecov_auth.organization_level_token"), + BaseCodecovModel, +): + owner = models.ForeignKey( + "Owner", + db_column="ownerid", + related_name="organization_tokens", + on_delete=models.CASCADE, + ) + token = models.UUIDField(unique=True, default=uuid.uuid4) + valid_until = models.DateTimeField(blank=True, null=True) + token_type = models.CharField( + max_length=50, choices=TokenTypeChoices.choices, default=TokenTypeChoices.UPLOAD + ) + + def save(self, *args, **kwargs): + super().save(*args, **kwargs) + + +class OwnerProfile( + ExportModelOperationsMixin("codecov_auth.owner_profile"), BaseCodecovModel +): + class ProjectType(models.TextChoices): + PERSONAL = "PERSONAL" + YOUR_ORG = "YOUR_ORG" + OPEN_SOURCE = "OPEN_SOURCE" + EDUCATIONAL = "EDUCATIONAL" + + class Goal(models.TextChoices): + STARTING_WITH_TESTS = "STARTING_WITH_TESTS" + IMPROVE_COVERAGE = "IMPROVE_COVERAGE" + MAINTAIN_COVERAGE = "MAINTAIN_COVERAGE" + TEAM_REQUIREMENTS = "TEAM_REQUIREMENTS" + OTHER = "OTHER" + + owner = models.OneToOneField( + Owner, on_delete=models.CASCADE, unique=True, related_name="profile" + ) + type_projects = ArrayField( + models.TextField(choices=ProjectType.choices), default=list + ) + goals = ArrayField(models.TextField(choices=Goal.choices), default=list) + other_goal = models.TextField(null=True) + default_org = models.ForeignKey( + Owner, on_delete=models.CASCADE, null=True, related_name="profiles_with_default" + ) + + +class Session(ExportModelOperationsMixin("codecov_auth.session"), models.Model): + class Meta: + db_table = "sessions" + ordering = ["-lastseen"] + + class SessionType(models.TextChoices): + API = "api" + LOGIN = "login" + + sessionid = models.AutoField(primary_key=True) + token = models.UUIDField(unique=True, default=uuid.uuid4, editable=False) + name = models.TextField(null=True) + useragent = models.TextField(null=True) + ip = models.TextField(null=True) + owner = models.ForeignKey(Owner, db_column="ownerid", on_delete=models.CASCADE) + lastseen = models.DateTimeField(null=True) + # Really an ENUM in db + type = models.TextField(choices=SessionType.choices) + + +def _generate_key(): + return binascii.hexlify(os.urandom(20)).decode() + + +class RepositoryToken( + ExportModelOperationsMixin("codecov_auth.repository_token"), BaseCodecovModel +): + class TokenType(models.TextChoices): + UPLOAD = "upload" + PROFILING = "profiling" + STATIC_ANALYSIS = "static_analysis" + + repository = models.ForeignKey( + "core.Repository", + db_column="repoid", + on_delete=models.CASCADE, + related_name="tokens", + ) + token_type = models.CharField(max_length=50, choices=TokenType.choices) + valid_until = models.DateTimeField(blank=True, null=True) + key = models.CharField( + max_length=40, unique=True, editable=False, default=_generate_key + ) + + @classmethod + def generate_key(cls): + return _generate_key() + + +class UserToken( + ExportModelOperationsMixin("codecov_auth.user_token"), BaseCodecovModel +): + class TokenType(models.TextChoices): + API = "api" + + name = models.CharField(max_length=100, null=False, blank=False) + owner = models.ForeignKey( + "Owner", + db_column="ownerid", + related_name="user_tokens", + on_delete=models.CASCADE, + ) + token = models.UUIDField(unique=True, default=uuid.uuid4) + valid_until = models.DateTimeField(blank=True, null=True) + token_type = models.CharField( + max_length=50, choices=TokenType.choices, default=TokenType.API + ) diff --git a/shared/django_apps/core/__init__.py b/shared/django_apps/core/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/shared/django_apps/core/models.py b/shared/django_apps/core/models.py new file mode 100644 index 000000000..642466b21 --- /dev/null +++ b/shared/django_apps/core/models.py @@ -0,0 +1,484 @@ +from django.db import models + +# Create your models here. +import random +import string +import uuid +from datetime import datetime + +from django.contrib.postgres.fields import ArrayField, CITextField +from django.contrib.postgres.indexes import GinIndex, OpClass +from django.db import models +from django.db.models.functions import Lower, Substr, Upper +from django.utils import timezone +from django_prometheus.models import ExportModelOperationsMixin + +from model_utils import FieldTracker + +from shared.django_apps.codecov.models import BaseCodecovModel + +class DateTimeWithoutTZField(models.DateTimeField): + def db_type(self, connection): + return "timestamp" + + +class Version(ExportModelOperationsMixin("core.version"), models.Model): + version = models.TextField(primary_key=True) + + class Meta: + db_table = "version" + + +class Constants(ExportModelOperationsMixin("core.constants"), models.Model): + key = models.CharField(primary_key=True) + value = models.CharField() + + class Meta: + db_table = "constants" + + +def _gen_image_token(): + return "".join( + random.choice(string.ascii_letters + string.digits) for _ in range(10) + ) + + +class Repository(ExportModelOperationsMixin("core.repository"), models.Model): + class Languages(models.TextChoices): + JAVASCRIPT = "javascript" + SHELL = "shell" + PYTHON = "python" + RUBY = "ruby" + PERL = "perl" + DART = "dart" + JAVA = "java" + C = "c" + CLOJURE = "clojure" + D = "d" + FORTRAN = "fortran" + GO = "go" + GROOVY = "groovy" + KOTLIN = "kotlin" + PHP = "php" + R = "r" + SCALA = "scala" + SWIFT = "swift" + OBJECTIVE_C = "objective-c" + XTEND = "xtend" + TYPESCRIPT = "typescript" + HASKELL = "haskell" + RUST = "rust" + LUA = "lua" + MATLAB = "matlab" + ASSEMBLY = "assembly" + SCHEME = "scheme" + POWERSHELL = "powershell" + APEX = "apex" + VERILOG = "verilog" + COMMON_LISP = "common lisp" + ERLANG = "erlang" + JULIA = "julia" + PROLOG = "prolog" + VUE = "vue" + CPP = "c++" + C_SHARP = "c#" + F_SHARP = "f#" + + repoid = models.AutoField(primary_key=True) + name = CITextField() + author = models.ForeignKey( + "codecov_auth.Owner", db_column="ownerid", on_delete=models.CASCADE + ) + service_id = models.TextField() + private = models.BooleanField() + updatestamp = models.DateTimeField(auto_now=True) + active = models.BooleanField(null=True, default=False) + language = models.TextField( + null=True, blank=True, choices=Languages.choices + ) # Really an ENUM in db + languages = ArrayField(models.CharField(), default=[], blank=True, null=True) + languages_last_updated = DateTimeWithoutTZField(null=True, blank=True) + fork = models.ForeignKey( + "core.Repository", + db_column="forkid", + on_delete=models.DO_NOTHING, + null=True, + blank=True, + ) + branch = models.TextField(default="master") + upload_token = models.UUIDField(unique=True, default=uuid.uuid4) + yaml = models.JSONField(null=True) + image_token = models.TextField(null=True, default=_gen_image_token) + + # DEPRECATED - replaced by GithubAppInstallation model + using_integration = models.BooleanField(null=True) + + hookid = models.TextField(null=True) + webhook_secret = models.TextField(null=True) + bot = models.ForeignKey( + "codecov_auth.Owner", + db_column="bot", + null=True, + on_delete=models.SET_NULL, + related_name="bot_repos", + blank=True, + ) + activated = models.BooleanField(null=True, default=False) + deleted = models.BooleanField(default=False) + bundle_analysis_enabled = models.BooleanField(default=False, null=True) + coverage_enabled = models.BooleanField(default=False, null=True) + + # tracks field changes being saved + tracker = FieldTracker() + + class Meta: + db_table = "repos" + ordering = ["-repoid"] + indexes = [ + models.Index( + fields=["service_id", "author"], + name="repos_service_id_author", + ), + ] + constraints = [ + models.UniqueConstraint(fields=["author", "name"], name="repos_slug"), + models.UniqueConstraint( + fields=["author", "service_id"], name="repos_service_ids" + ), + ] + verbose_name_plural = "Repositories" + + + def __str__(self): + return f"Repo<{self.author}/{self.name}>" + + @property + def service(self): + return self.author.service + + # Missing Key/Methods + # objects = RepositoryManager() + # def clean(self): + # if self.using_integration is None: + # raise ValidationError("using_integration cannot be null") + + +class Branch(ExportModelOperationsMixin("core.branch"), models.Model): + name = models.TextField(primary_key=True, db_column="branch") + repository = models.ForeignKey( + "core.Repository", + db_column="repoid", + on_delete=models.CASCADE, + related_name="branches", + ) + authors = ArrayField( + models.IntegerField(null=True, blank=True), + null=True, + blank=True, + db_column="authors", + ) + head = models.TextField() + base = models.TextField(null=True) + updatestamp = models.DateTimeField(auto_now=True) + + class Meta: + db_table = "branches" + constraints = [ + models.UniqueConstraint( + fields=["name", "repository"], name="branches_repoid_branch" + ) + ] + indexes = [ + models.Index( + fields=["repository", "-updatestamp"], + name="branches_repoid_updatestamp", + ), + ] + + +class Commit(ExportModelOperationsMixin("core.commit"), models.Model): + class CommitStates(models.TextChoices): + COMPLETE = "complete" + PENDING = "pending" + ERROR = "error" + SKIPPED = "skipped" + + id = models.BigAutoField(primary_key=True) + commitid = models.TextField() + timestamp = DateTimeWithoutTZField(default=timezone.now) + updatestamp = DateTimeWithoutTZField(default=timezone.now) + author = models.ForeignKey( + "codecov_auth.Owner", db_column="author", on_delete=models.SET_NULL, null=True + ) + repository = models.ForeignKey( + "core.Repository", + db_column="repoid", + on_delete=models.CASCADE, + related_name="commits", + ) + ci_passed = models.BooleanField(null=True) + totals = models.JSONField(null=True) + merged = models.BooleanField(null=True) + deleted = models.BooleanField(null=True) + notified = models.BooleanField(null=True) + branch = models.TextField(null=True) + pullid = models.IntegerField(null=True) + message = models.TextField(null=True) + parent_commit_id = models.TextField(null=True, db_column="parent") + state = models.TextField( + null=True, choices=CommitStates.choices + ) # Really an ENUM in db + + def save(self, *args, **kwargs): + self.updatestamp = timezone.now() + super().save(*args, **kwargs) + + # Missing Key/Methods + # @cached_property + # def parent_commit(self): + # return Commit.objects.filter( + # repository=self.repository, commitid=self.parent_commit_id + # ).first() + + # @cached_property + # def commitreport(self): + # reports = list(self.reports.all()) + # # This is almost always prefetched w/ `filter(code=None)` and + # # `filter(Q(report_type=None) | Q(report_type=CommitReport.ReportType.COVERAGE))` + # # (in which case `.all()` returns the already filtered results) + # # In the case that the reports were not prefetched we'll filter again in memory. + # reports = [ + # report + # for report in reports + # if report.code is None + # and (report.report_type is None or report.report_type == "coverage") + # ] + # return reports[0] if reports else None + + # @cached_property + # def full_report(self) -> Optional[Report]: + # # TODO: we should probably remove use of this method since it inverts the + # # dependency tree (services should be importing models and not the other + # # way around). The caching should be preserved somehow though. + # from services.report import build_report_from_commit + + # return build_report_from_commit(self) + + class Meta: + db_table = "commits" + constraints = [ + models.UniqueConstraint( + fields=["repository", "commitid"], name="commits_repoid_commitid" + ) + ] + indexes = [ + models.Index( + fields=["repository", "-timestamp"], + name="commits_repoid_timestamp_desc", + ), + models.Index( + fields=["repository", "branch", "state", "-timestamp"], + name="commits_repoid_branch_state_ts", + ), + models.Index( + fields=["repository", "pullid"], + name="commits_on_pull", + condition=~models.Q(deleted=True), + ), + models.Index( + fields=["repository", "pullid"], + name="all_commits_on_pull", + ), + models.Index( + "repository", + Substr(Lower("commitid"), 1, 7), + name="commits_repoid_commitid_short", + ), + GinIndex( + "repository", + OpClass(Upper("message"), name="gin_trgm_ops"), + name="commit_message_gin_trgm", + ), + ] + + # Missing Key/Methods + # def get_repository(self): + # return self.repository + + # def get_commitid(self): + # return self.commitid + + # @property + # def external_id(self): + # return self.commitid + + # def should_write_to_storage(self) -> bool: + # if self.repository is None or self.repository.author is None: + # return False + # is_codecov_repo = self.repository.author.username == "codecov" + # return should_write_data_to_storage_config_check( + # "commit_report", is_codecov_repo, self.repository.repoid + # ) + + # # Use custom JSON to properly serialize custom data classes on reports + # _report = models.JSONField(null=True, db_column="report", encoder=ReportJSONEncoder) + # _report_storage_path = models.URLField(null=True, db_column="report_storage_path") + # report = ArchiveField( + # should_write_to_storage_fn=should_write_to_storage, + # json_encoder=ReportJSONEncoder, + # default_value_class=dict, + # ) + + +class PullStates(models.TextChoices): + OPEN = "open" + MERGED = "merged" + CLOSED = "closed" + + +class Pull(ExportModelOperationsMixin("core.pull"), models.Model): + repository = models.ForeignKey( + "core.Repository", + db_column="repoid", + on_delete=models.CASCADE, + related_name="pull_requests", + ) + id = models.BigAutoField(primary_key=True) + pullid = models.IntegerField() + issueid = models.IntegerField(null=True) + state = models.TextField( + choices=PullStates.choices, default=PullStates.OPEN.value + ) # Really an ENUM in db + title = models.TextField(null=True) + base = models.TextField(null=True) + head = models.TextField(null=True) + user_provided_base_sha = models.TextField(null=True) + compared_to = models.TextField(null=True) + commentid = models.TextField(null=True) + bundle_analysis_commentid = models.TextField(null=True) + author = models.ForeignKey( + "codecov_auth.Owner", db_column="author", on_delete=models.SET_NULL, null=True + ) + updatestamp = DateTimeWithoutTZField(default=timezone.now) + diff = models.JSONField(null=True) + behind_by = models.IntegerField(null=True) + behind_by_commit = models.TextField(null=True) + + class Meta: + db_table = "pulls" + ordering = ["-pullid"] + constraints = [ + models.UniqueConstraint( + fields=["repository", "pullid"], name="pulls_repoid_pullid" + ) + ] + indexes = [ + models.Index( + fields=["repository"], + name="pulls_repoid_state_open", + condition=models.Q(state=PullStates.OPEN.value), + ), + models.Index( + fields=["author", "updatestamp"], + name="pulls_author_updatestamp", + ), + models.Index( + fields=["repository", "pullid", "updatestamp"], + name="pulls_repoid_pullid_ts", + ), + models.Index( + fields=["repository", "id"], + name="pulls_repoid_id", + ), + ] + + # Missing Key/Methods + # def get_repository(self): + # return self.repository + + # def get_commitid(self): + # return None + + # @property + # def external_id(self): + # return self.pullid + + # def should_write_to_storage(self) -> bool: + # if self.repository is None or self.repository.author is None: + # return False + # is_codecov_repo = self.repository.author.username == "codecov" + # return should_write_data_to_storage_config_check( + # master_switch_key="pull_flare", + # is_codecov_repo=is_codecov_repo, + # repoid=self.repository.repoid, + # ) + + # _flare = models.JSONField(db_column="flare", null=True) + # _flare_storage_path = models.URLField(db_column="flare_storage_path", null=True) + # flare = ArchiveField( + # should_write_to_storage_fn=should_write_to_storage, default_value_class=dict + # ) + + def save(self, *args, **kwargs): + self.updatestamp = timezone.now() + super().save(*args, **kwargs) + + +class CommitNotification( + ExportModelOperationsMixin("core.commit_notification"), models.Model +): + class NotificationTypes(models.TextChoices): + COMMENT = "comment" + GITTER = "gitter" + HIPCHAT = "hipchat" + IRC = "irc" + SLACK = "slack" + STATUS_CHANGES = "status_changes" + STATUS_PATCH = "status_patch" + STATUS_PROJECT = "status_project" + WEBHOOK = "webhook" + CODECOV_SLACK_APP = "codecov_slack_app" + + class DecorationTypes(models.TextChoices): + STANDARD = "standard" + UPGRADE = "upgrade" + UPLOAD_LIMIT = "upload_limit" + PASSING_EMPTY_UPLOAD = "passing_empty_upload" + FAILING_EMPTY_UPLOAD = "failing_empty_upload" + PROCESSING_UPLOAD = "processing_upload" + + class States(models.TextChoices): + PENDING = "pending" + SUCCESS = "success" + ERROR = "error" + + id = models.BigAutoField(primary_key=True) + commit = models.ForeignKey( + "core.Commit", on_delete=models.CASCADE, related_name="notifications" + ) + notification_type = models.TextField( + choices=NotificationTypes.choices + ) # Really an ENUM in db + decoration_type = models.TextField( + choices=DecorationTypes.choices, null=True + ) # Really an ENUM in db + state = models.TextField(choices=States.choices, null=True) # Really an ENUM in db + created_at = DateTimeWithoutTZField(default=datetime.now) + updated_at = DateTimeWithoutTZField(default=datetime.now) + + def save(self, *args, **kwargs): + self.updated_at = timezone.now() + super().save(*args, **kwargs) + + class Meta: + db_table = "commit_notifications" + + +class CommitError(ExportModelOperationsMixin("core.commit_error"), BaseCodecovModel): + commit = models.ForeignKey( + "Commit", + related_name="errors", + on_delete=models.CASCADE, + ) + error_code = models.CharField(max_length=100) + error_params = models.JSONField(default=dict) diff --git a/shared/django_apps/dummy_settings.py b/shared/django_apps/dummy_settings.py index 01c398378..cf240e2a3 100644 --- a/shared/django_apps/dummy_settings.py +++ b/shared/django_apps/dummy_settings.py @@ -2,7 +2,7 @@ # Build paths inside the project like this: BASE_DIR / 'subdir'. BASE_DIR = Path(__file__).resolve().parent -from .db_settings import * +from shared.django_apps.db_settings import * ALLOWED_HOSTS = [] @@ -11,6 +11,9 @@ "shared.django_apps.pg_telemetry", "shared.django_apps.ts_telemetry", "shared.django_apps.rollouts", + # API models + "shared.django_apps.codecov_auth", + "shared.django_apps.core" ] MIDDLEWARE = [] diff --git a/shared/plan/constants.py b/shared/plan/constants.py new file mode 100644 index 000000000..c7c9e74ec --- /dev/null +++ b/shared/plan/constants.py @@ -0,0 +1,360 @@ +import enum +from dataclasses import dataclass +from typing import List, Optional + + +class MonthlyUploadLimits(enum.Enum): + CODECOV_BASIC_PLAN = 250 + CODECOV_TEAM_PLAN = 2500 + + +class TrialDaysAmount(enum.Enum): + CODECOV_SENTRY = 14 + + +class PlanMarketingName(enum.Enum): + CODECOV_PRO = "Pro" + SENTRY_PRO = "Sentry Pro" + ENTERPRISE_CLOUD = "Enterprise Cloud" + GITHUB_MARKETPLACE = "Github Marketplace" + FREE = "Developer" + BASIC = "Developer" + TRIAL = "Developer" + TEAM = "Team" + + +class PlanName(enum.Enum): + BASIC_PLAN_NAME = "users-basic" + TRIAL_PLAN_NAME = "users-trial" + CODECOV_PRO_MONTHLY = "users-pr-inappm" + CODECOV_PRO_YEARLY = "users-pr-inappy" + SENTRY_MONTHLY = "users-sentrym" + SENTRY_YEARLY = "users-sentryy" + TEAM_MONTHLY = "users-teamm" + TEAM_YEARLY = "users-teamy" + GHM_PLAN_NAME = "users" + FREE_PLAN_NAME = "users-free" + CODECOV_PRO_MONTHLY_LEGACY = "users-inappm" + CODECOV_PRO_YEARLY_LEGACY = "users-inappy" + ENTERPRISE_CLOUD_MONTHLY = "users-enterprisem" + ENTERPRISE_CLOUD_YEARLY = "users-enterprisey" + + +class PlanBillingRate(enum.Enum): + MONTHLY = "monthly" + YEARLY = "annually" + + +class PlanPrice(enum.Enum): + MONTHLY = 12 + YEARLY = 10 + CODECOV_FREE = 0 + CODECOV_BASIC = 0 + CODECOV_TRIAL = 0 + TEAM_MONTHLY = 5 + TEAM_YEARLY = 4 + GHM_PRICE = 12 + + +class TrialStatus(enum.Enum): + NOT_STARTED = "not_started" + ONGOING = "ongoing" + EXPIRED = "expired" + CANNOT_TRIAL = "cannot_trial" + + +class TierName(enum.Enum): + BASIC = "basic" + TEAM = "team" + PRO = "pro" + ENTERPRISE = "enterprise" + + +@dataclass(repr=False) +class PlanData: + """ + Dataclass that represents plan related information + """ + + marketing_name: PlanMarketingName + value: PlanName + billing_rate: Optional[PlanBillingRate] + base_unit_price: PlanPrice + benefits: List[str] + tier_name: TierName + monthly_uploads_limit: Optional[MonthlyUploadLimits] + trial_days: Optional[TrialDaysAmount] + + +NON_PR_AUTHOR_PAID_USER_PLAN_REPRESENTATIONS = { + PlanName.CODECOV_PRO_MONTHLY_LEGACY.value: PlanData( + marketing_name=PlanMarketingName.CODECOV_PRO.value, + value=PlanName.CODECOV_PRO_MONTHLY_LEGACY.value, + billing_rate=PlanBillingRate.MONTHLY.value, + base_unit_price=PlanPrice.MONTHLY.value, + benefits=[ + "Configurable # of users", + "Unlimited public repositories", + "Unlimited private repositories", + "Priority Support", + ], + tier_name=TierName.PRO.value, + monthly_uploads_limit=None, + trial_days=None, + ), + PlanName.CODECOV_PRO_YEARLY_LEGACY.value: PlanData( + marketing_name=PlanMarketingName.CODECOV_PRO.value, + value=PlanName.CODECOV_PRO_YEARLY_LEGACY.value, + billing_rate=PlanBillingRate.YEARLY.value, + base_unit_price=PlanPrice.YEARLY.value, + benefits=[ + "Configurable # of users", + "Unlimited public repositories", + "Unlimited private repositories", + "Priority Support", + ], + tier_name=TierName.PRO.value, + monthly_uploads_limit=None, + trial_days=None, + ), +} + + +PR_AUTHOR_PAID_USER_PLAN_REPRESENTATIONS = { + PlanName.CODECOV_PRO_MONTHLY.value: PlanData( + marketing_name=PlanMarketingName.CODECOV_PRO.value, + value=PlanName.CODECOV_PRO_MONTHLY.value, + billing_rate=PlanBillingRate.MONTHLY.value, + base_unit_price=PlanPrice.MONTHLY.value, + benefits=[ + "Configurable # of users", + "Unlimited public repositories", + "Unlimited private repositories", + "Priority Support", + ], + tier_name=TierName.PRO.value, + monthly_uploads_limit=None, + trial_days=None, + ), + PlanName.CODECOV_PRO_YEARLY.value: PlanData( + marketing_name=PlanMarketingName.CODECOV_PRO.value, + value=PlanName.CODECOV_PRO_YEARLY.value, + billing_rate=PlanBillingRate.YEARLY.value, + base_unit_price=PlanPrice.YEARLY.value, + benefits=[ + "Configurable # of users", + "Unlimited public repositories", + "Unlimited private repositories", + "Priority Support", + ], + tier_name=TierName.PRO.value, + monthly_uploads_limit=None, + trial_days=None, + ), +} + +SENTRY_PAID_USER_PLAN_REPRESENTATIONS = { + PlanName.SENTRY_MONTHLY.value: PlanData( + marketing_name=PlanMarketingName.SENTRY_PRO.value, + value=PlanName.SENTRY_MONTHLY.value, + billing_rate=PlanBillingRate.MONTHLY.value, + base_unit_price=PlanPrice.MONTHLY.value, + benefits=[ + "Includes 5 seats", + "$12 per additional seat", + "Unlimited public repositories", + "Unlimited private repositories", + "Priority Support", + ], + tier_name=TierName.PRO.value, + trial_days=TrialDaysAmount.CODECOV_SENTRY.value, + monthly_uploads_limit=None, + ), + PlanName.SENTRY_YEARLY.value: PlanData( + marketing_name=PlanMarketingName.SENTRY_PRO.value, + value=PlanName.SENTRY_YEARLY.value, + billing_rate=PlanBillingRate.YEARLY.value, + base_unit_price=PlanPrice.YEARLY.value, + benefits=[ + "Includes 5 seats", + "$10 per additional seat", + "Unlimited public repositories", + "Unlimited private repositories", + "Priority Support", + ], + tier_name=TierName.PRO.value, + trial_days=TrialDaysAmount.CODECOV_SENTRY.value, + monthly_uploads_limit=None, + ), +} + +# TODO: Update these values +ENTERPRISE_CLOUD_USER_PLAN_REPRESENTATIONS = { + PlanName.ENTERPRISE_CLOUD_MONTHLY.value: PlanData( + marketing_name=PlanMarketingName.ENTERPRISE_CLOUD.value, + value=PlanName.ENTERPRISE_CLOUD_MONTHLY.value, + billing_rate=PlanBillingRate.MONTHLY.value, + base_unit_price=PlanPrice.MONTHLY.value, + benefits=[ + "Configurable # of users", + "Unlimited public repositories", + "Unlimited private repositories", + "Priority Support", + ], + tier_name=TierName.ENTERPRISE.value, + trial_days=None, + monthly_uploads_limit=None, + ), + PlanName.ENTERPRISE_CLOUD_YEARLY.value: PlanData( + marketing_name=PlanMarketingName.ENTERPRISE_CLOUD.value, + value=PlanName.ENTERPRISE_CLOUD_YEARLY.value, + billing_rate=PlanBillingRate.YEARLY.value, + base_unit_price=PlanPrice.YEARLY.value, + benefits=[ + "Configurable # of users", + "Unlimited public repositories", + "Unlimited private repositories", + "Priority Support", + ], + tier_name=TierName.ENTERPRISE.value, + trial_days=None, + monthly_uploads_limit=None, + ), +} + +GHM_PLAN_REPRESENTATION = { + PlanName.GHM_PLAN_NAME.value: PlanData( + marketing_name=PlanMarketingName.GITHUB_MARKETPLACE.value, + value=PlanName.GHM_PLAN_NAME.value, + billing_rate=None, + base_unit_price=PlanPrice.GHM_PRICE.value, + benefits=[ + "Configurable # of users", + "Unlimited public repositories", + "Unlimited private repositories", + ], + tier_name=TierName.PRO.value, + trial_days=None, + monthly_uploads_limit=None, + ) +} + +BASIC_PLAN = PlanData( + marketing_name=PlanMarketingName.BASIC.value, + value=PlanName.BASIC_PLAN_NAME.value, + billing_rate=None, + base_unit_price=PlanPrice.CODECOV_BASIC.value, + benefits=[ + "Up to 1 user", + "Unlimited public repositories", + "Unlimited private repositories", + ], + tier_name=TierName.BASIC.value, + monthly_uploads_limit=MonthlyUploadLimits.CODECOV_BASIC_PLAN.value, + trial_days=None, +) + +FREE_PLAN = PlanData( + marketing_name=PlanMarketingName.FREE.value, + value=PlanName.FREE_PLAN_NAME.value, + billing_rate=None, + base_unit_price=PlanPrice.CODECOV_FREE.value, + benefits=[ + "Up to 1 user", + "Unlimited public repositories", + "Unlimited private repositories", + ], + tier_name=TierName.BASIC.value, + trial_days=None, + monthly_uploads_limit=None, +) + +FREE_PLAN_REPRESENTATIONS = { + PlanName.FREE_PLAN_NAME.value: FREE_PLAN, + PlanName.BASIC_PLAN_NAME.value: BASIC_PLAN, +} + +TEAM_PLAN_REPRESENTATIONS = { + PlanName.TEAM_MONTHLY.value: PlanData( + marketing_name=PlanMarketingName.TEAM.value, + value=PlanName.TEAM_MONTHLY.value, + billing_rate=PlanBillingRate.MONTHLY.value, + base_unit_price=PlanPrice.TEAM_MONTHLY.value, + benefits=[ + "Up to 10 users", + "Unlimited repositories", + "2500 private repo uploads", + "Patch coverage analysis", + ], + tier_name=TierName.TEAM.value, + trial_days=None, + monthly_uploads_limit=MonthlyUploadLimits.CODECOV_TEAM_PLAN.value, + ), + PlanName.TEAM_YEARLY.value: PlanData( + marketing_name=PlanMarketingName.TEAM.value, + value=PlanName.TEAM_YEARLY.value, + billing_rate=PlanBillingRate.YEARLY.value, + base_unit_price=PlanPrice.TEAM_YEARLY.value, + benefits=[ + "Up to 10 users", + "Unlimited repositories", + "2500 private repo uploads", + "Patch coverage analysis", + ], + tier_name=TierName.TEAM.value, + trial_days=None, + monthly_uploads_limit=MonthlyUploadLimits.CODECOV_TEAM_PLAN.value, + ), +} + +TRIAL_PLAN_REPRESENTATION = { + PlanName.TRIAL_PLAN_NAME.value: PlanData( + marketing_name=PlanMarketingName.TRIAL.value, + value=PlanName.TRIAL_PLAN_NAME.value, + billing_rate=None, + base_unit_price=PlanPrice.CODECOV_TRIAL.value, + benefits=[ + "Configurable # of users", + "Unlimited public repositories", + "Unlimited private repositories", + "Priority Support", + ], + tier_name=TierName.PRO.value, + trial_days=None, + monthly_uploads_limit=None, + ), +} + +PAID_PLANS = { + **PR_AUTHOR_PAID_USER_PLAN_REPRESENTATIONS, + **SENTRY_PAID_USER_PLAN_REPRESENTATIONS, + **ENTERPRISE_CLOUD_USER_PLAN_REPRESENTATIONS, + **TEAM_PLAN_REPRESENTATIONS, +} + +TRIAL_PLANS = {**TRIAL_PLAN_REPRESENTATION} + +TEAM_PLANS = {**TEAM_PLAN_REPRESENTATIONS} + + +USER_PLAN_REPRESENTATIONS = { + **FREE_PLAN_REPRESENTATIONS, + **NON_PR_AUTHOR_PAID_USER_PLAN_REPRESENTATIONS, + **GHM_PLAN_REPRESENTATION, + **PAID_PLANS, + **TRIAL_PLANS, + **TEAM_PLANS, +} + +PLANS_THAT_CAN_TRIAL = [ + PlanName.FREE_PLAN_NAME.value, + PlanName.BASIC_PLAN_NAME.value, + PlanName.CODECOV_PRO_MONTHLY.value, + PlanName.CODECOV_PRO_YEARLY.value, + PlanName.SENTRY_MONTHLY.value, + PlanName.SENTRY_YEARLY.value, + PlanName.TRIAL_PLAN_NAME.value, +] + +TRIAL_PLAN_SEATS = 1000 +TEAM_PLAN_MAX_USERS = 10 diff --git a/shared/plan/service.py b/shared/plan/service.py new file mode 100644 index 000000000..67fa21a30 --- /dev/null +++ b/shared/plan/service.py @@ -0,0 +1,247 @@ +import logging +from datetime import datetime, timedelta +from typing import List, Optional + +from codecov.commands.exceptions import ValidationError +from shared.django_apps.codecov_auth.models import Owner +from plan.constants import ( + BASIC_PLAN, + FREE_PLAN, + FREE_PLAN_REPRESENTATIONS, + PR_AUTHOR_PAID_USER_PLAN_REPRESENTATIONS, + SENTRY_PAID_USER_PLAN_REPRESENTATIONS, + TEAM_PLAN_MAX_USERS, + TEAM_PLAN_REPRESENTATIONS, + TRIAL_PLAN_SEATS, + USER_PLAN_REPRESENTATIONS, + PlanData, + PlanName, + TrialDaysAmount, + TrialStatus, +) +from services import sentry + +log = logging.getLogger(__name__) + + +# TODO: Consider moving some of these methods to the billing directory as they overlap billing functionality +class PlanService: + def __init__(self, current_org: Owner): + """ + Initializes a plan service object with a plan. The plan will be a trial plan + if applicable + + Args: + current_org (Owner): this is selected organization entry. This is not the user that is sending the request. + + Returns: + No value + """ + self.current_org = current_org + if self.current_org.plan not in USER_PLAN_REPRESENTATIONS: + raise ValueError("Unsupported plan") + else: + self.plan_data = USER_PLAN_REPRESENTATIONS[self.current_org.plan] + + def update_plan(self, name: PlanName, user_count: int) -> None: + if name not in USER_PLAN_REPRESENTATIONS: + raise ValueError("Unsupported plan") + self.current_org.plan = name + self.current_org.plan_user_count = user_count + self.plan_data = USER_PLAN_REPRESENTATIONS[self.current_org.plan] + self.current_org.save() + + def current_org(self) -> Owner: + return self.current_org + + def set_default_plan_data(self) -> None: + log.info(f"Setting plan to users-basic for owner {self.current_org.ownerid}") + self.current_org.plan = PlanName.BASIC_PLAN_NAME.value + self.current_org.plan_activated_users = None + self.current_org.plan_user_count = 1 + self.current_org.stripe_subscription_id = None + self.current_org.save() + + @property + def plan_name(self) -> str: + return self.plan_data.value + + @property + def plan_user_count(self) -> int: + return self.current_org.plan_user_count + + @property + def plan_activated_users(self) -> Optional[List[int]]: + return self.current_org.plan_activated_users + + @property + def pretrial_users_count(self) -> int: + return self.current_org.pretrial_users_count or 1 + + @property + def marketing_name(self) -> str: + return self.plan_data.marketing_name + + @property + def billing_rate(self) -> Optional[str]: + return self.plan_data.billing_rate + + @property + def base_unit_price(self) -> int: + return self.plan_data.base_unit_price + + @property + def benefits(self) -> List[str]: + return self.plan_data.benefits + + @property + def monthly_uploads_limit(self) -> Optional[int]: + """ + Property that returns monthly uploads limit based on your trial status + + Returns: + Optional number of monthly uploads + """ + return self.plan_data.monthly_uploads_limit + + @property + def tier_name(self) -> str: + return self.plan_data.tier_name + + def available_plans(self, owner: Owner) -> List[PlanData]: + """ + Returns the available plans for an owner and an organization + + Args: + current_owner (Owner): this is the user that is sending the request. + + Returns: + No value + """ + available_plans = [] + available_plans.append(BASIC_PLAN) + + if self.plan_name == FREE_PLAN.value: + available_plans.append(FREE_PLAN) + + available_plans += PR_AUTHOR_PAID_USER_PLAN_REPRESENTATIONS.values() + + if owner and sentry.is_sentry_user(owner=owner): + available_plans += SENTRY_PAID_USER_PLAN_REPRESENTATIONS.values() + + # If number of activated users is less than or equal to TEAM_PLAN_MAX_USERS + if ( + self.plan_activated_users is None + or len(self.plan_activated_users) <= TEAM_PLAN_MAX_USERS + ): + available_plans += TEAM_PLAN_REPRESENTATIONS.values() + + return available_plans + + def _start_trial_helper( + self, current_owner: Owner, end_date: datetime = None + ) -> None: + start_date = datetime.utcnow() + self.current_org.trial_start_date = start_date + if end_date is None: + self.current_org.trial_end_date = start_date + timedelta( + days=TrialDaysAmount.CODECOV_SENTRY.value + ) + else: + self.current_org.trial_end_date = end_date + self.current_org.trial_status = TrialStatus.ONGOING.value + self.current_org.plan = PlanName.TRIAL_PLAN_NAME.value + self.current_org.pretrial_users_count = self.current_org.plan_user_count + self.current_org.plan_user_count = TRIAL_PLAN_SEATS + self.current_org.plan_auto_activate = True + self.current_org.trial_fired_by = current_owner.ownerid + self.current_org.save() + + # Trial Data + def start_trial(self, current_owner: Owner) -> None: + """ + Method that starts trial on an organization if the trial_start_date + is not empty. + + Returns: + No value + + Raises: + ValidationError: if trial has already started + """ + if self.trial_status != TrialStatus.NOT_STARTED.value: + raise ValidationError("Cannot start an existing trial") + if self.plan_name not in FREE_PLAN_REPRESENTATIONS: + raise ValidationError("Cannot trial from a paid plan") + + self._start_trial_helper(current_owner) + + def start_trial_manually(self, current_owner: Owner, end_date: datetime) -> None: + """ + Method that start trial immediately and ends at a predefined date for an organization + Used by administrators to manually start and extend trials + + Returns: + No value + """ + if self.plan_name not in FREE_PLAN_REPRESENTATIONS: + raise ValidationError("Cannot trial from a paid plan") + + self._start_trial_helper(current_owner, end_date) + + def cancel_trial(self) -> None: + if not self.is_org_trialing: + raise ValidationError("Cannot cancel a trial that is not ongoing") + now = datetime.utcnow() + self.current_org.trial_status = TrialStatus.EXPIRED.value + self.current_org.trial_end_date = now + self.set_default_plan_data() + + def expire_trial_when_upgrading(self) -> None: + """ + Method that expires trial on an organization based on it's current trial status. + + + Returns: + No value + """ + if self.trial_status == TrialStatus.EXPIRED.value: + return + if self.trial_status != TrialStatus.CANNOT_TRIAL.value: + # Not adjusting the trial start/end dates here as some customers can + # directly purchase a plan without trialing first + self.current_org.trial_status = TrialStatus.EXPIRED.value + self.current_org.plan_activated_users = None + self.current_org.plan_user_count = ( + self.current_org.pretrial_users_count or 1 + ) + self.current_org.trial_end_date = datetime.utcnow() + + self.current_org.save() + + @property + def trial_status(self) -> TrialStatus: + return self.current_org.trial_status + + @property + def trial_start_date(self) -> Optional[datetime]: + return self.current_org.trial_start_date + + @property + def trial_end_date(self) -> Optional[datetime]: + return self.current_org.trial_end_date + + @property + def trial_total_days(self) -> Optional[int]: + return self.plan_data.trial_days + + @property + def is_org_trialing(self) -> bool: + return ( + self.trial_status == TrialStatus.ONGOING.value + and self.plan_name == PlanName.TRIAL_PLAN_NAME.value + ) + + @property + def has_trial_dates(self) -> bool: + return bool(self.trial_start_date and self.trial_end_date) diff --git a/shared/plan/test_plan.py b/shared/plan/test_plan.py new file mode 100644 index 000000000..4bef3d5d6 --- /dev/null +++ b/shared/plan/test_plan.py @@ -0,0 +1,788 @@ +from datetime import datetime, timedelta +from unittest.mock import patch + +from django.test import TestCase +from freezegun import freeze_time + +from codecov.commands.exceptions import ValidationError +from codecov_auth.tests.factories import OwnerFactory +from plan.constants import ( + BASIC_PLAN, + FREE_PLAN, + FREE_PLAN_REPRESENTATIONS, + PR_AUTHOR_PAID_USER_PLAN_REPRESENTATIONS, + SENTRY_PAID_USER_PLAN_REPRESENTATIONS, + TEAM_PLAN_REPRESENTATIONS, + TRIAL_PLAN_REPRESENTATION, + TRIAL_PLAN_SEATS, + PlanName, + TrialDaysAmount, + TrialStatus, +) +from plan.service import PlanService + + +@freeze_time("2023-06-19") +class PlanServiceTests(TestCase): + def test_plan_service_trial_status_not_started(self): + current_org = OwnerFactory(plan=PlanName.BASIC_PLAN_NAME.value) + plan_service = PlanService(current_org=current_org) + + assert plan_service.trial_status == TrialStatus.NOT_STARTED.value + + def test_plan_service_trial_status_expired(self): + trial_start_date = datetime.utcnow() + trial_end_date_expired = trial_start_date - timedelta(days=1) + current_org = OwnerFactory( + plan=PlanName.BASIC_PLAN_NAME.value, + trial_start_date=trial_start_date, + trial_end_date=trial_end_date_expired, + trial_status=TrialStatus.EXPIRED.value, + ) + plan_service = PlanService(current_org=current_org) + + assert plan_service.trial_status == TrialStatus.EXPIRED.value + + def test_plan_service_trial_status_ongoing(self): + trial_start_date = datetime.utcnow() + trial_end_date_ongoing = trial_start_date + timedelta(days=5) + current_org = OwnerFactory( + plan=PlanName.TRIAL_PLAN_NAME.value, + trial_start_date=trial_start_date, + trial_end_date=trial_end_date_ongoing, + trial_status=TrialStatus.ONGOING.value, + ) + plan_service = PlanService(current_org=current_org) + + assert plan_service.trial_status == TrialStatus.ONGOING.value + assert plan_service.is_org_trialing == True + + def test_plan_service_expire_trial_when_upgrading_successful_if_trial_is_not_started( + self, + ): + current_org_with_ongoing_trial = OwnerFactory( + plan=PlanName.BASIC_PLAN_NAME.value, + trial_start_date=None, + trial_end_date=None, + trial_status=TrialStatus.NOT_STARTED.value, + ) + plan_service = PlanService(current_org=current_org_with_ongoing_trial) + plan_service.expire_trial_when_upgrading() + assert current_org_with_ongoing_trial.trial_status == TrialStatus.EXPIRED.value + assert current_org_with_ongoing_trial.plan_activated_users == None + assert current_org_with_ongoing_trial.plan_user_count == 1 + assert current_org_with_ongoing_trial.trial_end_date == datetime.utcnow() + + def test_plan_service_expire_trial_when_upgrading_successful_if_trial_is_ongoing( + self, + ): + trial_start_date = datetime.utcnow() + trial_end_date_ongoing = trial_start_date + timedelta(days=5) + current_org_with_ongoing_trial = OwnerFactory( + plan=PlanName.BASIC_PLAN_NAME.value, + trial_start_date=trial_start_date, + trial_end_date=trial_end_date_ongoing, + trial_status=TrialStatus.ONGOING.value, + ) + plan_service = PlanService(current_org=current_org_with_ongoing_trial) + plan_service.expire_trial_when_upgrading() + assert current_org_with_ongoing_trial.trial_status == TrialStatus.EXPIRED.value + assert current_org_with_ongoing_trial.plan_activated_users == None + assert current_org_with_ongoing_trial.plan_user_count == 1 + assert current_org_with_ongoing_trial.trial_end_date == datetime.utcnow() + + def test_plan_service_expire_trial_users_pretrial_users_count_if_existing( + self, + ): + trial_start_date = datetime.utcnow() + trial_end_date_ongoing = trial_start_date + timedelta(days=5) + pretrial_users_count = 5 + current_org_with_ongoing_trial = OwnerFactory( + plan=PlanName.BASIC_PLAN_NAME.value, + trial_start_date=trial_start_date, + trial_end_date=trial_end_date_ongoing, + trial_status=TrialStatus.ONGOING.value, + pretrial_users_count=pretrial_users_count, + ) + plan_service = PlanService(current_org=current_org_with_ongoing_trial) + plan_service.expire_trial_when_upgrading() + assert current_org_with_ongoing_trial.trial_status == TrialStatus.EXPIRED.value + assert current_org_with_ongoing_trial.plan_activated_users == None + assert current_org_with_ongoing_trial.plan_user_count == pretrial_users_count + assert current_org_with_ongoing_trial.trial_end_date == datetime.utcnow() + + def test_plan_service_start_trial_errors_if_status_is_ongoing(self): + trial_start_date = datetime.utcnow() + trial_end_date = trial_start_date + timedelta( + days=TrialDaysAmount.CODECOV_SENTRY.value + ) + current_org = OwnerFactory( + plan=PlanName.BASIC_PLAN_NAME.value, + trial_start_date=trial_start_date, + trial_end_date=trial_end_date, + trial_status=TrialStatus.ONGOING.value, + ) + plan_service = PlanService(current_org=current_org) + current_owner = OwnerFactory() + + with self.assertRaises(ValidationError) as e: + plan_service.start_trial(current_owner=current_owner) + + def test_plan_service_start_trial_errors_if_status_is_expired(self): + trial_start_date = datetime.utcnow() + trial_end_date = trial_start_date + timedelta(days=-1) + current_org = OwnerFactory( + plan=PlanName.BASIC_PLAN_NAME.value, + trial_start_date=trial_start_date, + trial_end_date=trial_end_date, + trial_status=TrialStatus.EXPIRED.value, + ) + plan_service = PlanService(current_org=current_org) + current_owner = OwnerFactory() + + with self.assertRaises(ValidationError) as e: + plan_service.start_trial(current_owner=current_owner) + + def test_plan_service_start_trial_errors_if_status_is_cannot_trial(self): + current_org = OwnerFactory( + plan=PlanName.BASIC_PLAN_NAME.value, + trial_start_date=None, + trial_end_date=None, + trial_status=TrialStatus.CANNOT_TRIAL.value, + ) + plan_service = PlanService(current_org=current_org) + current_owner = OwnerFactory() + + with self.assertRaises(ValidationError) as e: + plan_service.start_trial(current_owner=current_owner) + + def test_plan_service_start_trial_errors_owners_plan_is_not_a_free_plan(self): + current_org = OwnerFactory( + plan=PlanName.CODECOV_PRO_MONTHLY.value, + trial_start_date=None, + trial_end_date=None, + trial_status=TrialStatus.CANNOT_TRIAL.value, + ) + plan_service = PlanService(current_org=current_org) + current_owner = OwnerFactory() + + with self.assertRaises(ValidationError) as e: + plan_service.start_trial(current_owner=current_owner) + + def test_plan_service_start_trial_succeeds_if_trial_has_not_started(self): + trial_start_date = None + trial_end_date = None + plan_user_count = 5 + current_org = OwnerFactory( + plan=PlanName.BASIC_PLAN_NAME.value, + trial_start_date=trial_start_date, + trial_end_date=trial_end_date, + trial_status=TrialStatus.NOT_STARTED.value, + plan_user_count=plan_user_count, + ) + plan_service = PlanService(current_org=current_org) + current_owner = OwnerFactory() + + plan_service.start_trial(current_owner=current_owner) + assert current_org.trial_start_date == datetime.utcnow() + assert current_org.trial_end_date == datetime.utcnow() + timedelta( + days=TrialDaysAmount.CODECOV_SENTRY.value + ) + assert current_org.trial_status == TrialStatus.ONGOING.value + assert current_org.plan == PlanName.TRIAL_PLAN_NAME.value + assert current_org.pretrial_users_count == plan_user_count + assert current_org.plan_user_count == TRIAL_PLAN_SEATS + assert current_org.plan_auto_activate == True + assert current_org.trial_fired_by == current_owner.ownerid + + def test_plan_service_start_trial_manually(self): + trial_start_date = None + trial_end_date = None + plan_user_count = 5 + current_org = OwnerFactory( + plan=PlanName.BASIC_PLAN_NAME.value, + trial_start_date=trial_start_date, + trial_end_date=trial_end_date, + trial_status=TrialStatus.NOT_STARTED.value, + plan_user_count=plan_user_count, + ) + plan_service = PlanService(current_org=current_org) + current_owner = OwnerFactory() + + plan_service.start_trial_manually( + current_owner=current_owner, end_date="2024-01-01 00:00:00" + ) + assert current_org.trial_start_date == datetime.utcnow() + assert current_org.trial_end_date == "2024-01-01 00:00:00" + assert current_org.trial_status == TrialStatus.ONGOING.value + assert current_org.plan == PlanName.TRIAL_PLAN_NAME.value + assert current_org.pretrial_users_count == plan_user_count + assert current_org.plan_user_count == TRIAL_PLAN_SEATS + assert current_org.plan_auto_activate == True + assert current_org.trial_fired_by == current_owner.ownerid + + def test_plan_service_start_trial_manually_already_on_paid_plan(self): + current_org = OwnerFactory( + plan=PlanName.CODECOV_PRO_MONTHLY.value, + trial_start_date=None, + trial_end_date=None, + trial_status=TrialStatus.NOT_STARTED.value, + ) + plan_service = PlanService(current_org=current_org) + current_owner = OwnerFactory() + + with self.assertRaises(ValidationError) as e: + plan_service.start_trial_manually( + current_owner=current_owner, end_date="2024-01-01 00:00:00" + ) + + def test_plan_service_returns_plan_data_for_non_trial_basic_plan(self): + trial_start_date = None + trial_end_date = None + current_org = OwnerFactory( + plan=PlanName.BASIC_PLAN_NAME.value, + trial_start_date=trial_start_date, + trial_end_date=trial_end_date, + ) + plan_service = PlanService(current_org=current_org) + + basic_plan = FREE_PLAN_REPRESENTATIONS[PlanName.BASIC_PLAN_NAME.value] + assert plan_service.current_org == current_org + assert plan_service.trial_status == TrialStatus.NOT_STARTED.value + assert plan_service.marketing_name == basic_plan.marketing_name + assert plan_service.plan_name == basic_plan.value + assert plan_service.tier_name == basic_plan.tier_name + assert plan_service.billing_rate == basic_plan.billing_rate + assert plan_service.base_unit_price == basic_plan.base_unit_price + assert plan_service.benefits == basic_plan.benefits + assert ( + plan_service.monthly_uploads_limit == basic_plan.monthly_uploads_limit + ) # should be 250 + assert ( + plan_service.monthly_uploads_limit == 250 + ) # should be 250 since not trialing + assert plan_service.trial_total_days == basic_plan.trial_days + + def test_plan_service_returns_plan_data_for_trialing_user_trial_plan(self): + trial_start_date = datetime.utcnow() + trial_end_date = datetime.utcnow() + timedelta( + days=TrialDaysAmount.CODECOV_SENTRY.value + ) + current_org = OwnerFactory( + plan=PlanName.TRIAL_PLAN_NAME.value, + trial_start_date=trial_start_date, + trial_end_date=trial_end_date, + trial_status=TrialStatus.ONGOING.value, + ) + plan_service = PlanService(current_org=current_org) + + trial_plan = TRIAL_PLAN_REPRESENTATION[PlanName.TRIAL_PLAN_NAME.value] + assert plan_service.trial_status == TrialStatus.ONGOING.value + assert plan_service.marketing_name == trial_plan.marketing_name + assert plan_service.plan_name == trial_plan.value + assert plan_service.tier_name == trial_plan.tier_name + assert plan_service.billing_rate == trial_plan.billing_rate + assert plan_service.base_unit_price == trial_plan.base_unit_price + assert plan_service.benefits == trial_plan.benefits + assert plan_service.monthly_uploads_limit == None # Not 250 since it's trialing + assert plan_service.trial_total_days == trial_plan.trial_days + + def test_plan_service_sets_default_plan_data_values_correctly(self): + current_org = OwnerFactory( + plan=PlanName.CODECOV_PRO_MONTHLY.value, + stripe_subscription_id="test-sub-123", + plan_user_count=20, + plan_activated_users=[44], + plan_auto_activate=False, + ) + current_org.save() + + plan_service = PlanService(current_org=current_org) + plan_service.set_default_plan_data() + + assert current_org.plan == PlanName.BASIC_PLAN_NAME.value + assert current_org.plan_user_count == 1 + assert current_org.plan_activated_users == None + assert current_org.stripe_subscription_id == None + + def test_plan_service_returns_if_owner_has_trial_dates(self): + current_org = OwnerFactory( + plan=PlanName.CODECOV_PRO_MONTHLY.value, + trial_start_date=datetime.utcnow(), + trial_end_date=datetime.utcnow() + timedelta(days=14), + ) + current_org.save() + + plan_service = PlanService(current_org=current_org) + + assert plan_service.has_trial_dates == True + + +class AvailablePlansBeforeTrial(TestCase): + """ + - users-basic, no trial -> users-pr-inappm/y, users-basic + - users-free, no trial -> users-pr-inappm/y, users-basic, users-free + - users-teamm/y, no trial -> users-pr-inappm/y, users-basic, users-teamm/y + - users-pr-inappm/y, no trial -> users-pr-inappm/y, users-basic + - sentry customer, users-basic, no trial -> users-pr-inappm/y, users-sentrym/y, users-basic + - sentry customer, users-teamm/y, no trial -> users-pr-inappm/y, users-sentrym/y, users-basic, users-teamm/y + - sentry customer, users-sentrym/y, no trial -> users-pr-inappm/y, users-sentrym/y, users-basic + """ + + def setUp(self): + self.current_org = OwnerFactory( + trial_start_date=None, + trial_end_date=None, + trial_status=TrialStatus.NOT_STARTED.value, + ) + self.owner = OwnerFactory() + + def test_available_plans_for_basic_plan_non_trial( + self, + ): + self.current_org.plan = PlanName.BASIC_PLAN_NAME.value + self.current_org.save() + + plan_service = PlanService(current_org=self.current_org) + + expected_result = [] + expected_result.append(BASIC_PLAN) + expected_result += PR_AUTHOR_PAID_USER_PLAN_REPRESENTATIONS.values() + expected_result += TEAM_PLAN_REPRESENTATIONS.values() + + assert plan_service.available_plans(owner=self.owner) == expected_result + + def test_available_plans_for_free_plan_non_trial( + self, + ): + self.current_org.plan = PlanName.FREE_PLAN_NAME.value + self.current_org.save() + + plan_service = PlanService(current_org=self.current_org) + + expected_result = [] + expected_result.append(BASIC_PLAN) + expected_result.append(FREE_PLAN) + expected_result += PR_AUTHOR_PAID_USER_PLAN_REPRESENTATIONS.values() + expected_result += TEAM_PLAN_REPRESENTATIONS.values() + + assert plan_service.available_plans(owner=self.owner) == expected_result + + def test_available_plans_for_team_plan_non_trial( + self, + ): + self.current_org.plan = PlanName.TEAM_MONTHLY.value + self.current_org.save() + + plan_service = PlanService(current_org=self.current_org) + + expected_result = [] + expected_result.append(BASIC_PLAN) + expected_result += PR_AUTHOR_PAID_USER_PLAN_REPRESENTATIONS.values() + expected_result += TEAM_PLAN_REPRESENTATIONS.values() + + assert plan_service.available_plans(owner=self.owner) == expected_result + + def test_available_plans_for_pro_plan_non_trial(self): + self.current_org.plan = PlanName.CODECOV_PRO_MONTHLY.value + self.current_org.save() + + plan_service = PlanService(current_org=self.current_org) + + expected_result = [] + expected_result.append(BASIC_PLAN) + expected_result += PR_AUTHOR_PAID_USER_PLAN_REPRESENTATIONS.values() + expected_result += TEAM_PLAN_REPRESENTATIONS.values() + + assert plan_service.available_plans(owner=self.owner) == expected_result + + @patch("services.sentry.is_sentry_user") + def test_available_plans_for_sentry_customer_basic_plan_non_trial( + self, is_sentry_user + ): + is_sentry_user.return_value = True + self.current_org.plan = PlanName.BASIC_PLAN_NAME.value + self.current_org.save() + + plan_service = PlanService(current_org=self.current_org) + + expected_result = [] + expected_result.append(BASIC_PLAN) + expected_result += PR_AUTHOR_PAID_USER_PLAN_REPRESENTATIONS.values() + expected_result += SENTRY_PAID_USER_PLAN_REPRESENTATIONS.values() + expected_result += TEAM_PLAN_REPRESENTATIONS.values() + + assert plan_service.available_plans(owner=self.owner) == expected_result + + @patch("services.sentry.is_sentry_user") + def test_available_plans_for_sentry_customer_team_plan_non_trial( + self, is_sentry_user + ): + is_sentry_user.return_value = True + self.current_org.plan = PlanName.TEAM_MONTHLY.value + self.current_org.save() + + plan_service = PlanService(current_org=self.current_org) + + expected_result = [] + expected_result.append(BASIC_PLAN) + expected_result += PR_AUTHOR_PAID_USER_PLAN_REPRESENTATIONS.values() + expected_result += SENTRY_PAID_USER_PLAN_REPRESENTATIONS.values() + expected_result += TEAM_PLAN_REPRESENTATIONS.values() + + assert plan_service.available_plans(owner=self.owner) == expected_result + + @patch("services.sentry.is_sentry_user") + def test_available_plans_for_sentry_plan_non_trial(self, is_sentry_user): + is_sentry_user.return_value = True + self.current_org.plan = PlanName.SENTRY_MONTHLY.value + self.current_org.save() + + plan_service = PlanService(current_org=self.current_org) + + expected_result = [] + expected_result.append(BASIC_PLAN) + expected_result += PR_AUTHOR_PAID_USER_PLAN_REPRESENTATIONS.values() + expected_result += SENTRY_PAID_USER_PLAN_REPRESENTATIONS.values() + expected_result += TEAM_PLAN_REPRESENTATIONS.values() + + assert plan_service.available_plans(owner=self.owner) == expected_result + + +@freeze_time("2023-06-19") +class AvailablePlansExpiredTrialLessThanTenUsers(TestCase): + """ + - users-basic, has trialed, less than 10 users -> users-pr-inappm/y, users-basic, users-teamm/y + - users-teamm/y, has trialed, less than 10 users -> users-pr-inappm/y, users-basic, users-teamm/y + - users-pr-inappm/y, has trialed, less than 10 users -> users-pr-inappm/y, users-basic, users-teamm/y + - sentry customer, users-basic, has trialed, less than 10 users -> users-pr-inappm/y, users-sentrym/y, users-basic, users-teamm/y + - sentry customer, users-teamm/y, has trialed, less than 10 users -> users-pr-inappm/y, users-sentrym/y, users-basic, users-teamm/y + - sentry customer, users-sentrym/y, has trialed, less than 10 users -> users-pr-inappm/y, users-sentrym/y, users-basic, users-teamm/y + """ + + def setUp(self): + self.current_org = OwnerFactory( + trial_start_date=datetime.utcnow() + timedelta(days=-10), + trial_end_date=datetime.utcnow() + timedelta(days=-3), + trial_status=TrialStatus.EXPIRED.value, + plan_user_count=3, + ) + self.owner = OwnerFactory() + + def test_available_plans_for_basic_plan_expired_trial_less_than_10_users( + self, + ): + self.current_org.plan = PlanName.BASIC_PLAN_NAME.value + self.current_org.save() + + plan_service = PlanService(current_org=self.current_org) + + expected_result = [] + expected_result.append(BASIC_PLAN) + expected_result += PR_AUTHOR_PAID_USER_PLAN_REPRESENTATIONS.values() + expected_result += TEAM_PLAN_REPRESENTATIONS.values() + + assert plan_service.available_plans(owner=self.owner) == expected_result + + def test_available_plans_for_team_plan_expired_trial_less_than_10_users( + self, + ): + self.current_org.plan = PlanName.TEAM_MONTHLY.value + self.current_org.save() + + plan_service = PlanService(current_org=self.current_org) + + expected_result = [] + expected_result.append(BASIC_PLAN) + expected_result += PR_AUTHOR_PAID_USER_PLAN_REPRESENTATIONS.values() + expected_result += TEAM_PLAN_REPRESENTATIONS.values() + + assert plan_service.available_plans(owner=self.owner) == expected_result + + def test_available_plans_for_pro_plan_expired_trial_less_than_10_users(self): + self.current_org.plan = PlanName.CODECOV_PRO_MONTHLY.value + self.current_org.save() + + plan_service = PlanService(current_org=self.current_org) + + expected_result = [] + expected_result.append(BASIC_PLAN) + expected_result += PR_AUTHOR_PAID_USER_PLAN_REPRESENTATIONS.values() + expected_result += TEAM_PLAN_REPRESENTATIONS.values() + + assert plan_service.available_plans(owner=self.owner) == expected_result + + @patch("services.sentry.is_sentry_user") + def test_available_plans_for_sentry_customer_basic_plan_expired_trial_less_than_10_users( + self, is_sentry_user + ): + is_sentry_user.return_value = True + self.current_org.plan = PlanName.BASIC_PLAN_NAME.value + self.current_org.save() + + plan_service = PlanService(current_org=self.current_org) + + expected_result = [] + expected_result.append(BASIC_PLAN) + expected_result += PR_AUTHOR_PAID_USER_PLAN_REPRESENTATIONS.values() + expected_result += SENTRY_PAID_USER_PLAN_REPRESENTATIONS.values() + expected_result += TEAM_PLAN_REPRESENTATIONS.values() + + assert plan_service.available_plans(owner=self.owner) == expected_result + + @patch("services.sentry.is_sentry_user") + def test_available_plans_for_sentry_customer_team_plan_expired_trial_less_than_10_users( + self, is_sentry_user + ): + is_sentry_user.return_value = True + self.current_org.plan = PlanName.TEAM_MONTHLY.value + self.current_org.save() + + plan_service = PlanService(current_org=self.current_org) + + expected_result = [] + expected_result.append(BASIC_PLAN) + expected_result += PR_AUTHOR_PAID_USER_PLAN_REPRESENTATIONS.values() + expected_result += SENTRY_PAID_USER_PLAN_REPRESENTATIONS.values() + expected_result += TEAM_PLAN_REPRESENTATIONS.values() + + assert plan_service.available_plans(owner=self.owner) == expected_result + + @patch("services.sentry.is_sentry_user") + def test_available_plans_for_sentry_plan_expired_trial_less_than_10_users( + self, is_sentry_user + ): + is_sentry_user.return_value = True + self.current_org.plan = PlanName.SENTRY_MONTHLY.value + self.current_org.save() + + plan_service = PlanService(current_org=self.current_org) + + expected_result = [] + expected_result.append(BASIC_PLAN) + expected_result += PR_AUTHOR_PAID_USER_PLAN_REPRESENTATIONS.values() + expected_result += SENTRY_PAID_USER_PLAN_REPRESENTATIONS.values() + expected_result += TEAM_PLAN_REPRESENTATIONS.values() + + assert plan_service.available_plans(owner=self.owner) == expected_result + + +@freeze_time("2023-06-19") +class AvailablePlansExpiredTrialMoreThanTenActivatedUsers(TestCase): + """ + - users-pr-inappm/y, has trialed, more than 10 activated users -> users-pr-inappm/y, users-basic + - sentry customer, users-basic, has trialed, more than 10 activated users -> users-pr-inappm/y, users-sentrym/y, users-basic + - sentry customer, users-sentrym/y, has trialed, more than 10 activated users -> users-pr-inappm/y, users-sentrym/y, users-basic + """ + + def setUp(self): + self.current_org = OwnerFactory( + trial_start_date=datetime.utcnow() + timedelta(days=-10), + trial_end_date=datetime.utcnow() + timedelta(days=-3), + trial_status=TrialStatus.EXPIRED.value, + plan_user_count=1, + plan_activated_users=[i for i in range(13)], + ) + self.owner = OwnerFactory() + + def test_available_plans_for_pro_plan_expired_trial_more_than_10_users(self): + self.current_org.plan = PlanName.CODECOV_PRO_MONTHLY.value + self.current_org.save() + + plan_service = PlanService(current_org=self.current_org) + + expected_result = [] + expected_result.append(BASIC_PLAN) + expected_result += PR_AUTHOR_PAID_USER_PLAN_REPRESENTATIONS.values() + + assert plan_service.available_plans(owner=self.owner) == expected_result + + @patch("services.sentry.is_sentry_user") + def test_available_plans_for_sentry_customer_basic_plan_expired_trial_more_than_10_users( + self, is_sentry_user + ): + is_sentry_user.return_value = True + self.current_org.plan = PlanName.BASIC_PLAN_NAME.value + self.current_org.save() + + plan_service = PlanService(current_org=self.current_org) + + expected_result = [] + expected_result.append(BASIC_PLAN) + expected_result += PR_AUTHOR_PAID_USER_PLAN_REPRESENTATIONS.values() + expected_result += SENTRY_PAID_USER_PLAN_REPRESENTATIONS.values() + + assert plan_service.available_plans(owner=self.owner) == expected_result + + @patch("services.sentry.is_sentry_user") + def test_available_plans_for_sentry_plan_expired_trial_more_than_10_users( + self, is_sentry_user + ): + is_sentry_user.return_value = True + self.current_org.plan = PlanName.SENTRY_MONTHLY.value + self.current_org.save() + + plan_service = PlanService(current_org=self.current_org) + + expected_result = [] + expected_result.append(BASIC_PLAN) + expected_result += PR_AUTHOR_PAID_USER_PLAN_REPRESENTATIONS.values() + expected_result += SENTRY_PAID_USER_PLAN_REPRESENTATIONS.values() + + assert plan_service.available_plans(owner=self.owner) == expected_result + + +@freeze_time("2023-06-19") +class AvailablePlansExpiredTrialMoreThanTenSeatsLessThanTenActivatedUsers(TestCase): + """ + Tests that what matters for Team plan is activated users not the total seat count + """ + + def setUp(self): + self.expected_result = [] + self.expected_result.append(BASIC_PLAN) + self.expected_result += PR_AUTHOR_PAID_USER_PLAN_REPRESENTATIONS.values() + self.expected_result += TEAM_PLAN_REPRESENTATIONS.values() + + def test_currently_team_plan(self): + self.current_org = OwnerFactory( + plan_user_count=100, + plan_activated_users=[i for i in range(10)], + plan=PlanName.TEAM_MONTHLY.value, + ) + self.owner = OwnerFactory() + self.plan_service = PlanService(current_org=self.current_org) + + assert ( + self.plan_service.available_plans(owner=self.owner) == self.expected_result + ) + + def test_trial_expired(self): + self.current_org = OwnerFactory( + plan_user_count=100, + plan_activated_users=[i for i in range(10)], + trial_status=TrialStatus.EXPIRED.value, + trial_start_date=datetime.utcnow() + timedelta(days=-10), + trial_end_date=datetime.utcnow() + timedelta(days=-3), + ) + self.owner = OwnerFactory() + self.plan_service = PlanService(current_org=self.current_org) + + assert ( + self.plan_service.available_plans(owner=self.owner) == self.expected_result + ) + + def test_trial_ongoing(self): + self.current_org = OwnerFactory( + plan_user_count=100, + plan_activated_users=[i for i in range(10)], + trial_status=TrialStatus.ONGOING.value, + trial_start_date=datetime.utcnow() + timedelta(days=-10), + trial_end_date=datetime.utcnow() + timedelta(days=3), + ) + self.owner = OwnerFactory() + self.plan_service = PlanService(current_org=self.current_org) + + assert ( + self.plan_service.available_plans(owner=self.owner) == self.expected_result + ) + + def test_trial_not_started(self): + self.current_org = OwnerFactory( + plan_user_count=100, + plan_activated_users=[i for i in range(10)], + trial_status=TrialStatus.NOT_STARTED.value, + ) + self.owner = OwnerFactory() + self.plan_service = PlanService(current_org=self.current_org) + + self.expected_result = [] + self.expected_result.append(BASIC_PLAN) + self.expected_result += PR_AUTHOR_PAID_USER_PLAN_REPRESENTATIONS.values() + self.expected_result += TEAM_PLAN_REPRESENTATIONS.values() + assert ( + self.plan_service.available_plans(owner=self.owner) == self.expected_result + ) + + +@freeze_time("2023-06-19") +class AvailablePlansOngoingTrial(TestCase): + """ + Non Sentry User is trialing + when <=10 activated seats -> users-pr-inappm/y, users-basic, users-teamm/y + when > 10 activated seats -> users-pr-inappm/y, users-basic + Sentry User is trialing + when <=10 activated seats -> users-pr-inappm/y, users-sentrym/y, users-basic, users-teamm/y + when > 10 activated seats -> users-pr-inappm/y, users-sentrym/y, users-basic + """ + + def setUp(self): + self.current_org = OwnerFactory( + plan=PlanName.TRIAL_PLAN_NAME.value, + trial_start_date=datetime.utcnow(), + trial_end_date=datetime.utcnow() + timedelta(days=14), + trial_status=TrialStatus.ONGOING.value, + plan_user_count=1000, + plan_activated_users=None, + ) + self.owner = OwnerFactory() + self.plan_service = PlanService(current_org=self.current_org) + + def test_non_sentry_user(self): + # [Basic, Pro Monthly, Pro Yearly, Team Monthly, Team Yearly] + expected_result = [] + expected_result.append(BASIC_PLAN) + expected_result += PR_AUTHOR_PAID_USER_PLAN_REPRESENTATIONS.values() + expected_result += TEAM_PLAN_REPRESENTATIONS.values() + + # Can do Team plan when plan_activated_users is null + assert self.plan_service.available_plans(owner=self.owner) == expected_result + + self.current_org.plan_activated_users = [i for i in range(10)] + self.current_org.save() + + # Can do Team plan when at 10 activated users + assert self.plan_service.available_plans(owner=self.owner) == expected_result + + self.current_org.plan_activated_users = [i for i in range(11)] + self.current_org.save() + + # [Basic, Pro Monthly, Pro Yearly, Team Monthly, Team Yearly] + expected_result = [] + expected_result.append(BASIC_PLAN) + expected_result += PR_AUTHOR_PAID_USER_PLAN_REPRESENTATIONS.values() + + # Can not do Team plan when at 11 activated users + assert self.plan_service.available_plans(owner=self.owner) == expected_result + + @patch("services.sentry.is_sentry_user") + def test_sentry_user(self, is_sentry_user): + is_sentry_user.return_value = True + + # [Basic, Pro Monthly, Pro Yearly, Sentry Monthly, Sentry Yearly, Team Monthly, Team Yearly] + expected_result = [] + expected_result.append(BASIC_PLAN) + expected_result += PR_AUTHOR_PAID_USER_PLAN_REPRESENTATIONS.values() + expected_result += SENTRY_PAID_USER_PLAN_REPRESENTATIONS.values() + expected_result += TEAM_PLAN_REPRESENTATIONS.values() + + # Can do Team plan when plan_activated_users is null + assert self.plan_service.available_plans(owner=self.owner) == expected_result + + self.current_org.plan_activated_users = [i for i in range(10)] + self.current_org.save() + + # Can do Team plan when at 10 activated users + assert self.plan_service.available_plans(owner=self.owner) == expected_result + + self.current_org.plan_activated_users = [i for i in range(11)] + self.current_org.save() + + # [Basic, Pro Monthly, Pro Yearly, Sentry Monthly, Sentry Yearly] + expected_result = [] + expected_result.append(BASIC_PLAN) + expected_result += PR_AUTHOR_PAID_USER_PLAN_REPRESENTATIONS.values() + expected_result += SENTRY_PAID_USER_PLAN_REPRESENTATIONS.values() + + # Can not do Team plan when at 11 activated users + assert self.plan_service.available_plans(owner=self.owner) == expected_result From ae2b1927857a7831f241a7c4d3289b787ffcfa34 Mon Sep 17 00:00:00 2001 From: Adrian Date: Thu, 29 Feb 2024 16:12:57 -0800 Subject: [PATCH 02/36] feat: migrated core + codeocv_auth --- shared/django_apps/codecov_auth/models.py | 7 +- shared/django_apps/core/encoders.py | 10 + .../core/migrations/0001_initial.py | 335 ++++++++++++++++++ .../migrations/0002_auto_20210517_1223.py | 18 + .../migrations/0003_auto_20210520_0841.py | 44 +++ .../0004_pull_user_provided_base_sha.py | 16 + .../migrations/0005_auto_20210916_0313.py | 20 ++ .../core/migrations/0006_version_v4_6_2.py | 15 + .../core/migrations/0007_version_v4_6_3.py | 15 + .../core/migrations/0008_version_v4_6_4.py | 15 + .../core/migrations/0009_version_v4_6_5.py | 15 + .../core/migrations/0010_add_new_langs.py | 84 +++++ .../migrations/0011_add_decoration_type.py | 28 ++ .../migrations/0012_auto_20220511_1732.py | 37 ++ ...0013_repository_repos_service_id_author.py | 20 ++ .../0014_pull_pulls_author_updatestamp.py | 28 ++ .../core/migrations/0015_commiterror.py | 38 ++ .../core/migrations/0016_version_v4_6_6.py | 15 + ...0017_branch_branches_repoid_updatestamp.py | 29 ++ .../0018_commit_all_commits_on_pull.py | 28 ++ ...9_commit_commits_repoid_branch_state_ts.py | 29 ++ ..._commits_repoid_commitid_short_and_more.py | 60 ++++ ...21_pull_behind_by_pull_behind_by_commit.py | 23 ++ .../0022_pull_pulls_repoid_pullid_ts.py | 29 ++ ...lter_commitnotification_decoration_type.py | 46 +++ ...stamp_alter_commit_updatestamp_and_more.py | 49 +++ .../core/migrations/0025_v5_0_1.py | 17 + .../migrations/0026_auto_20230605_1134.py | 18 + ...t_rename_report_commit__report_and_more.py | 47 +++ .../0028_repository_webhook_secret.py | 27 ++ .../0029_constants_delete_version.py | 22 ++ .../migrations/0030_auto_20230727_1539.py | 21 ++ .../migrations/0031_auto_20230731_1627.py | 18 + .../migrations/0032_auto_20230731_1641.py | 38 ++ ...flare_rename_flare_pull__flare_and_more.py | 43 +++ .../0034_remove_repository_cache.py | 25 ++ .../migrations/0035_auto_20230907_2123.py | 17 + .../migrations/0036_auto_20231003_1342.py | 19 + ...lter_commitnotification_decoration_type.py | 31 ++ .../core/migrations/0038_increment_version.py | 19 + .../migrations/0039_pull_pulls_repoid_id.py | 26 ++ .../core/migrations/0040_increment_version.py | 19 + .../0041_pull_bundle_analysis_commentid.py | 27 ++ .../migrations/0042_repository_languages.py | 31 ++ ...0043_repository_bundle_analysis_enabled.py | 28 ++ ...sitory_bundle_analysis_enabled_and_more.py | 43 +++ .../0045_repository_languages_last_updated.py | 29 ++ .../0046_repository_coverage_enabled.py | 33 ++ .../core/migrations/0047_increment_version.py | 18 + .../django_apps/core/migrations/__init__.py | 0 shared/django_apps/core/models.py | 13 +- shared/django_apps/dummy_settings.py | 4 +- 52 files changed, 1674 insertions(+), 12 deletions(-) create mode 100644 shared/django_apps/core/encoders.py create mode 100644 shared/django_apps/core/migrations/0001_initial.py create mode 100644 shared/django_apps/core/migrations/0002_auto_20210517_1223.py create mode 100644 shared/django_apps/core/migrations/0003_auto_20210520_0841.py create mode 100644 shared/django_apps/core/migrations/0004_pull_user_provided_base_sha.py create mode 100644 shared/django_apps/core/migrations/0005_auto_20210916_0313.py create mode 100644 shared/django_apps/core/migrations/0006_version_v4_6_2.py create mode 100644 shared/django_apps/core/migrations/0007_version_v4_6_3.py create mode 100644 shared/django_apps/core/migrations/0008_version_v4_6_4.py create mode 100644 shared/django_apps/core/migrations/0009_version_v4_6_5.py create mode 100644 shared/django_apps/core/migrations/0010_add_new_langs.py create mode 100644 shared/django_apps/core/migrations/0011_add_decoration_type.py create mode 100644 shared/django_apps/core/migrations/0012_auto_20220511_1732.py create mode 100644 shared/django_apps/core/migrations/0013_repository_repos_service_id_author.py create mode 100644 shared/django_apps/core/migrations/0014_pull_pulls_author_updatestamp.py create mode 100644 shared/django_apps/core/migrations/0015_commiterror.py create mode 100644 shared/django_apps/core/migrations/0016_version_v4_6_6.py create mode 100644 shared/django_apps/core/migrations/0017_branch_branches_repoid_updatestamp.py create mode 100644 shared/django_apps/core/migrations/0018_commit_all_commits_on_pull.py create mode 100644 shared/django_apps/core/migrations/0019_commit_commits_repoid_branch_state_ts.py create mode 100644 shared/django_apps/core/migrations/0020_commit_commits_repoid_commitid_short_and_more.py create mode 100644 shared/django_apps/core/migrations/0021_pull_behind_by_pull_behind_by_commit.py create mode 100644 shared/django_apps/core/migrations/0022_pull_pulls_repoid_pullid_ts.py create mode 100644 shared/django_apps/core/migrations/0023_alter_commitnotification_decoration_type.py create mode 100644 shared/django_apps/core/migrations/0024_alter_commit_timestamp_alter_commit_updatestamp_and_more.py create mode 100644 shared/django_apps/core/migrations/0025_v5_0_1.py create mode 100644 shared/django_apps/core/migrations/0026_auto_20230605_1134.py create mode 100644 shared/django_apps/core/migrations/0027_alter_commit_report_rename_report_commit__report_and_more.py create mode 100644 shared/django_apps/core/migrations/0028_repository_webhook_secret.py create mode 100644 shared/django_apps/core/migrations/0029_constants_delete_version.py create mode 100644 shared/django_apps/core/migrations/0030_auto_20230727_1539.py create mode 100644 shared/django_apps/core/migrations/0031_auto_20230731_1627.py create mode 100644 shared/django_apps/core/migrations/0032_auto_20230731_1641.py create mode 100644 shared/django_apps/core/migrations/0033_alter_pull_flare_rename_flare_pull__flare_and_more.py create mode 100644 shared/django_apps/core/migrations/0034_remove_repository_cache.py create mode 100644 shared/django_apps/core/migrations/0035_auto_20230907_2123.py create mode 100644 shared/django_apps/core/migrations/0036_auto_20231003_1342.py create mode 100644 shared/django_apps/core/migrations/0037_alter_commitnotification_decoration_type.py create mode 100644 shared/django_apps/core/migrations/0038_increment_version.py create mode 100644 shared/django_apps/core/migrations/0039_pull_pulls_repoid_id.py create mode 100644 shared/django_apps/core/migrations/0040_increment_version.py create mode 100644 shared/django_apps/core/migrations/0041_pull_bundle_analysis_commentid.py create mode 100644 shared/django_apps/core/migrations/0042_repository_languages.py create mode 100644 shared/django_apps/core/migrations/0043_repository_bundle_analysis_enabled.py create mode 100644 shared/django_apps/core/migrations/0044_alter_repository_bundle_analysis_enabled_and_more.py create mode 100644 shared/django_apps/core/migrations/0045_repository_languages_last_updated.py create mode 100644 shared/django_apps/core/migrations/0046_repository_coverage_enabled.py create mode 100644 shared/django_apps/core/migrations/0047_increment_version.py create mode 100644 shared/django_apps/core/migrations/__init__.py diff --git a/shared/django_apps/codecov_auth/models.py b/shared/django_apps/codecov_auth/models.py index c8a7684e9..feb86be62 100644 --- a/shared/django_apps/codecov_auth/models.py +++ b/shared/django_apps/codecov_auth/models.py @@ -9,7 +9,7 @@ from django_prometheus.models import ExportModelOperationsMixin from shared.django_apps.codecov.models import BaseCodecovModel -from shared.django_apps.core.models import DateTimeWithoutTZField, Repository +from shared.django_apps.core.models import DateTimeWithoutTZField from shared.plan.constants import PlanName @@ -27,11 +27,6 @@ log = logging.getLogger(__name__) -class DateTimeWithoutTZField(models.DateTimeField): - def db_type(self, connection): - return "timestamp" - - # TODO use this to refactor avatar_url class Service(models.TextChoices): GITHUB = "github" diff --git a/shared/django_apps/core/encoders.py b/shared/django_apps/core/encoders.py new file mode 100644 index 000000000..2665a6341 --- /dev/null +++ b/shared/django_apps/core/encoders.py @@ -0,0 +1,10 @@ +from dataclasses import astuple, is_dataclass + +from django.core.serializers.json import DjangoJSONEncoder + + +class ReportJSONEncoder(DjangoJSONEncoder): + def default(self, obj): + if is_dataclass(obj): + return astuple(obj) + return super().default(self, obj) diff --git a/shared/django_apps/core/migrations/0001_initial.py b/shared/django_apps/core/migrations/0001_initial.py new file mode 100644 index 000000000..7fa63c505 --- /dev/null +++ b/shared/django_apps/core/migrations/0001_initial.py @@ -0,0 +1,335 @@ +# Generated by Django 3.1.6 on 2021-04-08 19:33 + +import datetime +import uuid + +import django.contrib.postgres.fields +import django.contrib.postgres.fields.citext +import django.db.models.deletion +from django.conf import settings +from django.db import migrations, models + +from shared.django_apps.core.encoders import ReportJSONEncoder +from shared.django_apps.core.models import DateTimeWithoutTZField, _gen_image_token + + +class Migration(migrations.Migration): + + initial = True + + dependencies = [migrations.swappable_dependency(settings.AUTH_USER_MODEL)] + + operations = [ + migrations.CreateModel( + name="Commit", + fields=[ + ("id", models.BigAutoField(primary_key=True, serialize=False)), + ("commitid", models.TextField()), + ( + "timestamp", + DateTimeWithoutTZField(default=datetime.datetime.now), + ), + ( + "updatestamp", + DateTimeWithoutTZField(default=datetime.datetime.now), + ), + ("ci_passed", models.BooleanField(null=True)), + ("totals", models.JSONField(null=True)), + ( + "report", + models.JSONField( + encoder=ReportJSONEncoder, null=True + ), + ), + ("merged", models.BooleanField(null=True)), + ("deleted", models.BooleanField(null=True)), + ("notified", models.BooleanField(null=True)), + ("branch", models.TextField(null=True)), + ("pullid", models.IntegerField(null=True)), + ("message", models.TextField(null=True)), + ("parent_commit_id", models.TextField(db_column="parent", null=True)), + ( + "state", + models.TextField( + choices=[ + ("complete", "Complete"), + ("pending", "Pending"), + ("error", "Error"), + ("skipped", "Skipped"), + ], + null=True, + ), + ), + ( + "author", + models.ForeignKey( + db_column="author", + null=True, + on_delete=django.db.models.deletion.SET_NULL, + to="codecov_auth.owner", + ), + ), + ], + options={"db_table": "commits"}, + ), + migrations.CreateModel( + name="Version", + fields=[("version", models.TextField(primary_key=True, serialize=False))], + options={"db_table": "version"}, + ), + migrations.CreateModel( + name="Repository", + fields=[ + ("repoid", models.AutoField(primary_key=True, serialize=False)), + ("name", django.contrib.postgres.fields.citext.CITextField()), + ("service_id", models.TextField()), + ("private", models.BooleanField()), + ("updatestamp", models.DateTimeField(auto_now=True)), + ("active", models.BooleanField(null=True)), + ("language", models.TextField(blank=True, null=True)), + ("branch", models.TextField(default="master")), + ("upload_token", models.UUIDField(default=uuid.uuid4, unique=True)), + ("yaml", models.JSONField(null=True)), + ("cache", models.JSONField(null=True)), + ( + "image_token", + models.TextField(default=_gen_image_token, null=True), + ), + ("using_integration", models.BooleanField(null=True)), + ("hookid", models.TextField(null=True)), + ("activated", models.BooleanField(default=False, null=True)), + ("deleted", models.BooleanField(default=False)), + ( + "author", + models.ForeignKey( + db_column="ownerid", + on_delete=django.db.models.deletion.CASCADE, + to="codecov_auth.owner", + ), + ), + ( + "bot", + models.ForeignKey( + db_column="bot", + null=True, + on_delete=django.db.models.deletion.SET_NULL, + related_name="bot_repos", + to="codecov_auth.owner", + ), + ), + ( + "fork", + models.ForeignKey( + blank=True, + db_column="forkid", + null=True, + on_delete=django.db.models.deletion.DO_NOTHING, + to="core.repository", + ), + ), + ], + options={"db_table": "repos", "ordering": ["-repoid"]}, + ), + migrations.CreateModel( + name="Pull", + fields=[ + ("pullid", models.IntegerField(primary_key=True, serialize=False)), + ("issueid", models.IntegerField(null=True)), + ( + "state", + models.TextField( + choices=[ + ("open", "Open"), + ("merged", "Merged"), + ("closed", "Closed"), + ], + default="open", + ), + ), + ("title", models.TextField(null=True)), + ("base", models.TextField(null=True)), + ("head", models.TextField(null=True)), + ("compared_to", models.TextField(null=True)), + ("commentid", models.TextField(null=True)), + ( + "updatestamp", + DateTimeWithoutTZField(default=datetime.datetime.now), + ), + ("diff", models.JSONField(null=True)), + ("flare", models.JSONField(null=True)), + ( + "author", + models.ForeignKey( + db_column="author", + null=True, + on_delete=django.db.models.deletion.SET_NULL, + to="codecov_auth.owner", + ), + ), + ( + "repository", + models.ForeignKey( + db_column="repoid", + on_delete=django.db.models.deletion.CASCADE, + related_name="pull_requests", + to="core.repository", + ), + ), + ], + options={"db_table": "pulls", "ordering": ["-pullid"]}, + ), + migrations.CreateModel( + name="CommitNotification", + fields=[ + ("id", models.BigAutoField(primary_key=True, serialize=False)), + ( + "notification_type", + models.TextField( + choices=[ + ("comment", "Comment"), + ("gitter", "Gitter"), + ("hipchat", "Hipchat"), + ("irc", "Irc"), + ("slack", "Slack"), + ("status_changes", "Status Changes"), + ("status_patch", "Status Patch"), + ("status_project", "Status Project"), + ("webhook", "Webhook"), + ] + ), + ), + ( + "decoration_type", + models.TextField( + choices=[("standard", "Standard"), ("upgrade", "Upgrade")], + null=True, + ), + ), + ( + "state", + models.TextField( + choices=[ + ("pending", "Pending"), + ("success", "Success"), + ("error", "Error"), + ], + null=True, + ), + ), + ( + "created_at", + DateTimeWithoutTZField(default=datetime.datetime.now), + ), + ( + "updated_at", + DateTimeWithoutTZField(default=datetime.datetime.now), + ), + ( + "commit", + models.ForeignKey( + on_delete=django.db.models.deletion.CASCADE, + related_name="notifications", + to="core.commit", + ), + ), + ], + options={"db_table": "commit_notifications"}, + ), + migrations.AddField( + model_name="commit", + name="repository", + field=models.ForeignKey( + db_column="repoid", + on_delete=django.db.models.deletion.CASCADE, + related_name="commits", + to="core.repository", + ), + ), + migrations.CreateModel( + name="Branch", + fields=[ + ( + "name", + models.TextField( + db_column="branch", primary_key=True, serialize=False + ), + ), + ( + "authors", + django.contrib.postgres.fields.ArrayField( + base_field=models.IntegerField(blank=True, null=True), + blank=True, + db_column="authors", + null=True, + size=None, + ), + ), + ("head", models.TextField()), + ("base", models.TextField(null=True)), + ("updatestamp", models.DateTimeField(auto_now=True)), + ( + "repository", + models.ForeignKey( + db_column="repoid", + on_delete=django.db.models.deletion.CASCADE, + related_name="branches", + to="core.repository", + ), + ), + ], + options={"db_table": "branches"}, + ), + migrations.AddConstraint( + model_name="repository", + constraint=models.UniqueConstraint( + fields=("author", "name"), name="repos_slug" + ), + ), + migrations.AddConstraint( + model_name="repository", + constraint=models.UniqueConstraint( + fields=("author", "service_id"), name="repos_service_ids" + ), + ), + migrations.AddIndex( + model_name="pull", + index=models.Index( + condition=models.Q(state="open"), + fields=["repository"], + name="pulls_repoid_state_open", + ), + ), + migrations.AddConstraint( + model_name="pull", + constraint=models.UniqueConstraint( + fields=("repository", "pullid"), name="pulls_repoid_pullid" + ), + ), + migrations.AddIndex( + model_name="commit", + index=models.Index( + fields=["repository", "-timestamp"], + name="commits_repoid_timestamp_desc", + ), + ), + migrations.AddIndex( + model_name="commit", + index=models.Index( + condition=models.Q(_negated=True, deleted=True), + fields=["repository", "pullid"], + name="commits_on_pull", + ), + ), + migrations.AddConstraint( + model_name="commit", + constraint=models.UniqueConstraint( + fields=("repository", "commitid"), name="commits_repoid_commitid" + ), + ), + migrations.AddConstraint( + model_name="branch", + constraint=models.UniqueConstraint( + fields=("name", "repository"), name="branches_repoid_branch" + ), + ), + ] diff --git a/shared/django_apps/core/migrations/0002_auto_20210517_1223.py b/shared/django_apps/core/migrations/0002_auto_20210517_1223.py new file mode 100644 index 000000000..737981253 --- /dev/null +++ b/shared/django_apps/core/migrations/0002_auto_20210517_1223.py @@ -0,0 +1,18 @@ +# Generated by Django 3.1.6 on 2021-05-17 12:23 + +from django.db import migrations, models +from shared.django_apps.migration_utils import RiskyRunSQL + + +class Migration(migrations.Migration): + + dependencies = [("core", "0001_initial")] + + operations = [ + migrations.AlterField( + model_name="repository", + name="active", + field=models.BooleanField(default=False, null=True), + ), + RiskyRunSQL("UPDATE repos SET active=false WHERE active is null;"), + ] diff --git a/shared/django_apps/core/migrations/0003_auto_20210520_0841.py b/shared/django_apps/core/migrations/0003_auto_20210520_0841.py new file mode 100644 index 000000000..155ecffc4 --- /dev/null +++ b/shared/django_apps/core/migrations/0003_auto_20210520_0841.py @@ -0,0 +1,44 @@ +# Generated by Django 3.1.6 on 2021-05-20 08:41 + +from django.db import migrations, models +from shared.django_apps.migration_utils import RiskyRunSQL + + +class Migration(migrations.Migration): + + dependencies = [("core", "0002_auto_20210517_1223")] + + operations = [ + migrations.AlterField( + model_name="repository", + name="language", + field=models.TextField( + blank=True, + choices=[ + ("javascript", "Javascript"), + ("shell", "Shell"), + ("python", "Python"), + ("ruby", "Ruby"), + ("perl", "Perl"), + ("dart", "Dart"), + ("java", "Java"), + ("c", "C"), + ("clojure", "Clojure"), + ("d", "D"), + ("fortran", "Fortran"), + ("go", "Go"), + ("groovy", "Groovy"), + ("kotlin", "Kotlin"), + ("php", "Php"), + ("r", "R"), + ("scala", "Scala"), + ("swift", "Swift"), + ("objective-c", "Objective C"), + ("xtend", "Xtend"), + ], + null=True, + ), + ), + RiskyRunSQL("ALTER TABLE repos ALTER COLUMN active SET DEFAULT FALSE;"), + RiskyRunSQL("UPDATE repos SET active=false WHERE active is null;"), + ] diff --git a/shared/django_apps/core/migrations/0004_pull_user_provided_base_sha.py b/shared/django_apps/core/migrations/0004_pull_user_provided_base_sha.py new file mode 100644 index 000000000..ecde6b34c --- /dev/null +++ b/shared/django_apps/core/migrations/0004_pull_user_provided_base_sha.py @@ -0,0 +1,16 @@ +# Generated by Django 3.1.6 on 2021-06-23 19:34 + +from django.db import migrations, models + + +class Migration(migrations.Migration): + + dependencies = [("core", "0003_auto_20210520_0841")] + + operations = [ + migrations.AddField( + model_name="pull", + name="user_provided_base_sha", + field=models.TextField(null=True), + ) + ] diff --git a/shared/django_apps/core/migrations/0005_auto_20210916_0313.py b/shared/django_apps/core/migrations/0005_auto_20210916_0313.py new file mode 100644 index 000000000..f9a57bc56 --- /dev/null +++ b/shared/django_apps/core/migrations/0005_auto_20210916_0313.py @@ -0,0 +1,20 @@ +# Generated by Django 3.1.13 on 2021-09-16 03:13 + +from django.db import migrations, models + + +class Migration(migrations.Migration): + + dependencies = [("core", "0004_pull_user_provided_base_sha")] + + operations = [ + migrations.AddField( + model_name="pull", + name="id", + field=models.BigAutoField(primary_key=True, serialize=False), + preserve_default=False, + ), + migrations.AlterField( + model_name="pull", name="pullid", field=models.IntegerField() + ), + ] diff --git a/shared/django_apps/core/migrations/0006_version_v4_6_2.py b/shared/django_apps/core/migrations/0006_version_v4_6_2.py new file mode 100644 index 000000000..aa4884663 --- /dev/null +++ b/shared/django_apps/core/migrations/0006_version_v4_6_2.py @@ -0,0 +1,15 @@ +from django.db import migrations + + +def add_version(apps, schema): + version = apps.get_model("core", "Version") + version.objects.all().delete() + v = version(version="v4.6.2") + v.save() + + +class Migration(migrations.Migration): + + dependencies = [("core", "0005_auto_20210916_0313")] + + operations = [migrations.RunPython(add_version)] diff --git a/shared/django_apps/core/migrations/0007_version_v4_6_3.py b/shared/django_apps/core/migrations/0007_version_v4_6_3.py new file mode 100644 index 000000000..8d382c194 --- /dev/null +++ b/shared/django_apps/core/migrations/0007_version_v4_6_3.py @@ -0,0 +1,15 @@ +from django.db import migrations + + +def add_version(apps, schema): + version = apps.get_model("core", "Version") + version.objects.all().delete() + v = version(version="v4.6.3") + v.save() + + +class Migration(migrations.Migration): + + dependencies = [("core", "0006_version_v4_6_2")] + + operations = [migrations.RunPython(add_version)] diff --git a/shared/django_apps/core/migrations/0008_version_v4_6_4.py b/shared/django_apps/core/migrations/0008_version_v4_6_4.py new file mode 100644 index 000000000..dceed9a3e --- /dev/null +++ b/shared/django_apps/core/migrations/0008_version_v4_6_4.py @@ -0,0 +1,15 @@ +from django.db import migrations + + +def add_version(apps, schema): + version = apps.get_model("core", "Version") + version.objects.all().delete() + v = version(version="v4.6.4") + v.save() + + +class Migration(migrations.Migration): + + dependencies = [("core", "0007_version_v4_6_3")] + + operations = [migrations.RunPython(add_version)] diff --git a/shared/django_apps/core/migrations/0009_version_v4_6_5.py b/shared/django_apps/core/migrations/0009_version_v4_6_5.py new file mode 100644 index 000000000..40e87313c --- /dev/null +++ b/shared/django_apps/core/migrations/0009_version_v4_6_5.py @@ -0,0 +1,15 @@ +from django.db import migrations + + +def add_version(apps, schema): + version = apps.get_model("core", "Version") + version.objects.all().delete() + v = version(version="v4.6.5") + v.save() + + +class Migration(migrations.Migration): + + dependencies = [("core", "0008_version_v4_6_4")] + + operations = [migrations.RunPython(add_version)] diff --git a/shared/django_apps/core/migrations/0010_add_new_langs.py b/shared/django_apps/core/migrations/0010_add_new_langs.py new file mode 100644 index 000000000..cb8229a7f --- /dev/null +++ b/shared/django_apps/core/migrations/0010_add_new_langs.py @@ -0,0 +1,84 @@ +# Generated by Django 3.1.13 on 2022-04-06 17:23 + +from django.db import migrations, models + + +class Migration(migrations.Migration): + atomic = False + + dependencies = [("core", "0009_version_v4_6_5")] + + operations = [ + migrations.AlterModelOptions( + name="repository", + options={"ordering": ["-repoid"], "verbose_name_plural": "Repositories"}, + ), + migrations.AlterField( + model_name="repository", + name="language", + field=models.TextField( + blank=True, + choices=[ + ("javascript", "Javascript"), + ("shell", "Shell"), + ("python", "Python"), + ("ruby", "Ruby"), + ("perl", "Perl"), + ("dart", "Dart"), + ("java", "Java"), + ("c", "C"), + ("clojure", "Clojure"), + ("d", "D"), + ("fortran", "Fortran"), + ("go", "Go"), + ("groovy", "Groovy"), + ("kotlin", "Kotlin"), + ("php", "Php"), + ("r", "R"), + ("scala", "Scala"), + ("swift", "Swift"), + ("objective-c", "Objective C"), + ("xtend", "Xtend"), + ("typescript", "Typescript"), + ("haskell", "Haskell"), + ("rust", "Rust"), + ("lua", "Lua"), + ("matlab", "Matlab"), + ("assembly", "Assembly"), + ("scheme", "Scheme"), + ("powershell", "Powershell"), + ("apex", "Apex"), + ("verilog", "Verilog"), + ("common lisp", "Common Lisp"), + ("erlang", "Erlang"), + ("julia", "Julia"), + ("prolog", "Prolog"), + ("vue", "Vue"), + ("c++", "Cpp"), + ("c#", "C Sharp"), + ("f#", "F Sharp"), + ], + null=True, + ), + ), + migrations.RunSQL("ALTER TYPE languages ADD VALUE IF NOT exists 'typescript';"), + migrations.RunSQL("ALTER TYPE languages ADD VALUE IF NOT exists 'haskell';"), + migrations.RunSQL("ALTER TYPE languages ADD VALUE IF NOT exists 'rust';"), + migrations.RunSQL("ALTER TYPE languages ADD VALUE IF NOT exists 'lua';"), + migrations.RunSQL("ALTER TYPE languages ADD VALUE IF NOT exists 'matlab';"), + migrations.RunSQL("ALTER TYPE languages ADD VALUE IF NOT exists 'assembly';"), + migrations.RunSQL("ALTER TYPE languages ADD VALUE IF NOT exists 'scheme';"), + migrations.RunSQL("ALTER TYPE languages ADD VALUE IF NOT exists 'powershell';"), + migrations.RunSQL("ALTER TYPE languages ADD VALUE IF NOT exists 'apex';"), + migrations.RunSQL("ALTER TYPE languages ADD VALUE IF NOT exists 'verilog';"), + migrations.RunSQL( + "ALTER TYPE languages ADD VALUE IF NOT exists 'common lisp';" + ), + migrations.RunSQL("ALTER TYPE languages ADD VALUE IF NOT exists 'erlang';"), + migrations.RunSQL("ALTER TYPE languages ADD VALUE IF NOT exists 'julia';"), + migrations.RunSQL("ALTER TYPE languages ADD VALUE IF NOT exists 'prolog';"), + migrations.RunSQL("ALTER TYPE languages ADD VALUE IF NOT exists 'vue';"), + migrations.RunSQL("ALTER TYPE languages ADD VALUE IF NOT exists 'c++';"), + migrations.RunSQL("ALTER TYPE languages ADD VALUE IF NOT exists 'c#';"), + migrations.RunSQL("ALTER TYPE languages ADD VALUE IF NOT exists 'f#';"), + ] diff --git a/shared/django_apps/core/migrations/0011_add_decoration_type.py b/shared/django_apps/core/migrations/0011_add_decoration_type.py new file mode 100644 index 000000000..6d0baccaa --- /dev/null +++ b/shared/django_apps/core/migrations/0011_add_decoration_type.py @@ -0,0 +1,28 @@ +# Generated by Django 3.1.13 on 2022-04-27 10:53 + +from django.db import migrations, models + + +class Migration(migrations.Migration): + atomic = False + dependencies = [ + ("core", "0010_add_new_langs"), + ] + + operations = [ + migrations.AlterField( + model_name="commitnotification", + name="decoration_type", + field=models.TextField( + choices=[ + ("standard", "Standard"), + ("upgrade", "Upgrade"), + ("upload_limit", "Upload Limit"), + ], + null=True, + ), + ), + migrations.RunSQL( + "ALTER TYPE decorations ADD VALUE IF NOT exists 'upload_limit';" + ), + ] diff --git a/shared/django_apps/core/migrations/0012_auto_20220511_1732.py b/shared/django_apps/core/migrations/0012_auto_20220511_1732.py new file mode 100644 index 000000000..b3941dcf6 --- /dev/null +++ b/shared/django_apps/core/migrations/0012_auto_20220511_1732.py @@ -0,0 +1,37 @@ +# Generated by Django 3.1.13 on 2022-05-11 17:32 + +import django.db.models.deletion +from django.conf import settings +from django.db import migrations, models + + +class Migration(migrations.Migration): + + dependencies = [ + migrations.swappable_dependency(settings.AUTH_USER_MODEL), + ("core", "0011_add_decoration_type"), + ] + + operations = [ + migrations.RunSQL( + """-- + -- Alter field bot on Repository + -- + COMMIT; + """, + state_operations=[ + migrations.AlterField( + model_name="repository", + name="bot", + field=models.ForeignKey( + blank=True, + db_column="bot", + null=True, + on_delete=django.db.models.deletion.SET_NULL, + related_name="bot_repos", + to="codecov_auth.owner", + ), + ), + ], + ), + ] diff --git a/shared/django_apps/core/migrations/0013_repository_repos_service_id_author.py b/shared/django_apps/core/migrations/0013_repository_repos_service_id_author.py new file mode 100644 index 000000000..054d14d8f --- /dev/null +++ b/shared/django_apps/core/migrations/0013_repository_repos_service_id_author.py @@ -0,0 +1,20 @@ +# Generated by Django 3.2.12 on 2022-05-03 02:57 + +from django.contrib.postgres.operations import AddIndexConcurrently +from django.db import migrations, models + + +class Migration(migrations.Migration): + atomic = False + dependencies = [ + ("core", "0012_auto_20220511_1732"), + ] + + operations = [ + AddIndexConcurrently( + model_name="repository", + index=models.Index( + fields=["service_id", "author"], name="repos_service_id_author" + ), + ), + ] diff --git a/shared/django_apps/core/migrations/0014_pull_pulls_author_updatestamp.py b/shared/django_apps/core/migrations/0014_pull_pulls_author_updatestamp.py new file mode 100644 index 000000000..dbc08263d --- /dev/null +++ b/shared/django_apps/core/migrations/0014_pull_pulls_author_updatestamp.py @@ -0,0 +1,28 @@ +# Generated by Django 3.2.12 on 2022-07-11 13:34 + +from django.db import migrations, models +from shared.django_apps.migration_utils import RiskyAddIndex + + +class Migration(migrations.Migration): + """ + BEGIN; + -- + -- Create index pulls_author_updatestamp on field(s) author, updatestamp of model pull + -- + CREATE INDEX "pulls_author_updatestamp" ON "pulls" ("author", "updatestamp"); + COMMIT; + """ + + dependencies = [ + ("core", "0013_repository_repos_service_id_author"), + ] + + operations = [ + RiskyAddIndex( + model_name="pull", + index=models.Index( + fields=["author", "updatestamp"], name="pulls_author_updatestamp" + ), + ), + ] diff --git a/shared/django_apps/core/migrations/0015_commiterror.py b/shared/django_apps/core/migrations/0015_commiterror.py new file mode 100644 index 000000000..673e97213 --- /dev/null +++ b/shared/django_apps/core/migrations/0015_commiterror.py @@ -0,0 +1,38 @@ +# Generated by Django 3.2.12 on 2022-08-09 15:14 + +import uuid + +import django.db.models.deletion +from django.db import migrations, models + + +class Migration(migrations.Migration): + + dependencies = [ + ("core", "0014_pull_pulls_author_updatestamp"), + ] + + operations = [ + migrations.CreateModel( + name="CommitError", + fields=[ + ("id", models.BigAutoField(primary_key=True, serialize=False)), + ("external_id", models.UUIDField(default=uuid.uuid4, editable=False)), + ("created_at", models.DateTimeField(auto_now_add=True)), + ("updated_at", models.DateTimeField(auto_now=True)), + ("error_code", models.CharField(max_length=100)), + ("error_params", models.JSONField(default=dict)), + ( + "commit", + models.ForeignKey( + on_delete=django.db.models.deletion.CASCADE, + related_name="errors", + to="core.commit", + ), + ), + ], + options={ + "abstract": False, + }, + ), + ] diff --git a/shared/django_apps/core/migrations/0016_version_v4_6_6.py b/shared/django_apps/core/migrations/0016_version_v4_6_6.py new file mode 100644 index 000000000..0f9f421ea --- /dev/null +++ b/shared/django_apps/core/migrations/0016_version_v4_6_6.py @@ -0,0 +1,15 @@ +from django.db import migrations + + +def add_version(apps, schema): + version = apps.get_model("core", "Version") + version.objects.all().delete() + v = version(version="v4.6.6") + v.save() + + +class Migration(migrations.Migration): + + dependencies = [("core", "0015_commiterror")] + + operations = [migrations.RunPython(add_version)] diff --git a/shared/django_apps/core/migrations/0017_branch_branches_repoid_updatestamp.py b/shared/django_apps/core/migrations/0017_branch_branches_repoid_updatestamp.py new file mode 100644 index 000000000..b492250ea --- /dev/null +++ b/shared/django_apps/core/migrations/0017_branch_branches_repoid_updatestamp.py @@ -0,0 +1,29 @@ +# Generated by Django 3.2.12 on 2023-01-13 16:44 + +from django.db import migrations, models +from shared.django_apps.migration_utils import RiskyAddIndex + + +class Migration(migrations.Migration): + """ + BEGIN; + -- + -- Create index branches_repoid_updatestamp on field(s) repository, -updatestamp of model branch + -- + CREATE INDEX "branches_repoid_updatestamp" ON "branches" ("repoid", "updatestamp" DESC); + COMMIT; + """ + + dependencies = [ + ("core", "0016_version_v4_6_6"), + ] + + operations = [ + RiskyAddIndex( + model_name="branch", + index=models.Index( + fields=["repository", "-updatestamp"], + name="branches_repoid_updatestamp", + ), + ), + ] diff --git a/shared/django_apps/core/migrations/0018_commit_all_commits_on_pull.py b/shared/django_apps/core/migrations/0018_commit_all_commits_on_pull.py new file mode 100644 index 000000000..f78cb994c --- /dev/null +++ b/shared/django_apps/core/migrations/0018_commit_all_commits_on_pull.py @@ -0,0 +1,28 @@ +# Generated by Django 3.2.12 on 2023-01-26 17:52 + +from django.db import migrations, models +from shared.django_apps.migration_utils import RiskyAddIndex + + +class Migration(migrations.Migration): + """ + BEGIN; + -- + -- Create index all_commits_on_pull on field(s) repository, pullid of model commit + -- + CREATE INDEX "all_commits_on_pull" ON "commits" ("repoid", "pullid"); + COMMIT; + """ + + dependencies = [ + ("core", "0017_branch_branches_repoid_updatestamp"), + ] + + operations = [ + RiskyAddIndex( + model_name="commit", + index=models.Index( + fields=["repository", "pullid"], name="all_commits_on_pull" + ), + ), + ] diff --git a/shared/django_apps/core/migrations/0019_commit_commits_repoid_branch_state_ts.py b/shared/django_apps/core/migrations/0019_commit_commits_repoid_branch_state_ts.py new file mode 100644 index 000000000..1f5b1e2fb --- /dev/null +++ b/shared/django_apps/core/migrations/0019_commit_commits_repoid_branch_state_ts.py @@ -0,0 +1,29 @@ +# Generated by Django 3.2.12 on 2023-02-01 15:04 + +from django.db import migrations, models +from shared.django_apps.migration_utils import RiskyAddIndex + + +class Migration(migrations.Migration): + """ + BEGIN; + -- + -- Create index commits_repoid_branch_state_ts on field(s) repository, branch, state, -timestamp of model commit + -- + CREATE INDEX "commits_repoid_branch_state_ts" ON "commits" ("repoid", "branch", "state", "timestamp" DESC); + COMMIT; + """ + + dependencies = [ + ("core", "0018_commit_all_commits_on_pull"), + ] + + operations = [ + RiskyAddIndex( + model_name="commit", + index=models.Index( + fields=["repository", "branch", "state", "-timestamp"], + name="commits_repoid_branch_state_ts", + ), + ), + ] diff --git a/shared/django_apps/core/migrations/0020_commit_commits_repoid_commitid_short_and_more.py b/shared/django_apps/core/migrations/0020_commit_commits_repoid_commitid_short_and_more.py new file mode 100644 index 000000000..1d0d3fa6b --- /dev/null +++ b/shared/django_apps/core/migrations/0020_commit_commits_repoid_commitid_short_and_more.py @@ -0,0 +1,60 @@ +# Generated by Django 4.1.7 on 2023-03-10 18:24 + +import django.contrib.postgres.indexes +import django.db.models.functions.text +from django.contrib.postgres.operations import BtreeGinExtension, TrigramExtension +from django.db import migrations, models +from shared.django_apps.migration_utils import RiskyAddIndex + + +class Migration(migrations.Migration): + """ + BEGIN; + -- + -- Creates extension pg_trgm + -- + CREATE EXTENSION IF NOT EXISTS "pg_trgm"; + -- + -- Creates extension btree_gin + -- + CREATE EXTENSION IF NOT EXISTS "btree_gin"; + -- + -- Create index commits_repoid_commitid_short on F(repository), Substr(Lower(F(commitid)), Value(1), Value(7)) on model commit + -- + CREATE INDEX "commits_repoid_commitid_short" ON "commits" ("repoid", (SUBSTRING(LOWER("commitid"), 1, 7))); + -- + -- Create index commit_message_gin_trgm on F(repository), OpClass(Upper(F(message)), name=gin_trgm_ops) on model commit + -- + CREATE INDEX "commit_message_gin_trgm" ON "commits" USING gin ("repoid", (UPPER("message")) gin_trgm_ops); + COMMIT; + """ + + dependencies = [ + ("core", "0019_commit_commits_repoid_branch_state_ts"), + ] + + operations = [ + TrigramExtension(), + BtreeGinExtension(), + RiskyAddIndex( + model_name="commit", + index=models.Index( + models.F("repository"), + django.db.models.functions.text.Substr( + django.db.models.functions.text.Lower("commitid"), 1, 7 + ), + name="commits_repoid_commitid_short", + ), + ), + RiskyAddIndex( + model_name="commit", + index=django.contrib.postgres.indexes.GinIndex( + models.F("repository"), + django.contrib.postgres.indexes.OpClass( + django.db.models.functions.text.Upper("message"), + name="gin_trgm_ops", + ), + name="commit_message_gin_trgm", + ), + ), + ] diff --git a/shared/django_apps/core/migrations/0021_pull_behind_by_pull_behind_by_commit.py b/shared/django_apps/core/migrations/0021_pull_behind_by_pull_behind_by_commit.py new file mode 100644 index 000000000..a03a7ea3f --- /dev/null +++ b/shared/django_apps/core/migrations/0021_pull_behind_by_pull_behind_by_commit.py @@ -0,0 +1,23 @@ +# Generated by Django 4.1.7 on 2023-04-04 11:45 + +from django.db import migrations, models + + +class Migration(migrations.Migration): + + dependencies = [ + ("core", "0020_commit_commits_repoid_commitid_short_and_more"), + ] + + operations = [ + migrations.AddField( + model_name="pull", + name="behind_by", + field=models.IntegerField(null=True), + ), + migrations.AddField( + model_name="pull", + name="behind_by_commit", + field=models.TextField(null=True), + ), + ] diff --git a/shared/django_apps/core/migrations/0022_pull_pulls_repoid_pullid_ts.py b/shared/django_apps/core/migrations/0022_pull_pulls_repoid_pullid_ts.py new file mode 100644 index 000000000..14632bd24 --- /dev/null +++ b/shared/django_apps/core/migrations/0022_pull_pulls_repoid_pullid_ts.py @@ -0,0 +1,29 @@ +# Generated by Django 4.1.7 on 2023-04-24 18:59 + +from django.db import migrations, models +from shared.django_apps.migration_utils import RiskyAddIndex + + +class Migration(migrations.Migration): + """ + BEGIN; + -- + -- Create index pulls_repoid_pullid_ts on field(s) repository, pullid, updatestamp of model pull + -- + CREATE INDEX "pulls_repoid_pullid_ts" ON "pulls" ("repoid", "pullid", "updatestamp"); + COMMIT; + """ + + dependencies = [ + ("core", "0021_pull_behind_by_pull_behind_by_commit"), + ] + + operations = [ + RiskyAddIndex( + model_name="pull", + index=models.Index( + fields=["repository", "pullid", "updatestamp"], + name="pulls_repoid_pullid_ts", + ), + ), + ] diff --git a/shared/django_apps/core/migrations/0023_alter_commitnotification_decoration_type.py b/shared/django_apps/core/migrations/0023_alter_commitnotification_decoration_type.py new file mode 100644 index 000000000..792915fdf --- /dev/null +++ b/shared/django_apps/core/migrations/0023_alter_commitnotification_decoration_type.py @@ -0,0 +1,46 @@ +# Generated by Django 4.1.7 on 2023-05-10 07:35 + +from django.db import migrations, models + + +class Migration(migrations.Migration): + # -- + # -- Alter field decoration_type on commitnotification + # -- + # -- (no-op) + # -- + # -- Raw SQL operation + # -- + # ALTER TYPE decorations ADD VALUE IF NOT exists 'passing_empty_upload'; + # -- + # -- Raw SQL operation + # -- + # ALTER TYPE decorations ADD VALUE IF NOT exists 'failing_empty_upload'; + + atomic = False + dependencies = [ + ("core", "0022_pull_pulls_repoid_pullid_ts"), + ] + + operations = [ + migrations.AlterField( + model_name="commitnotification", + name="decoration_type", + field=models.TextField( + choices=[ + ("standard", "Standard"), + ("upgrade", "Upgrade"), + ("upload_limit", "Upload Limit"), + ("passing_empty_upload", "Passing Empty Upload"), + ("failing_empty_upload", "Failing Empty Upload"), + ], + null=True, + ), + ), + migrations.RunSQL( + "ALTER TYPE decorations ADD VALUE IF NOT exists 'passing_empty_upload';" + ), + migrations.RunSQL( + "ALTER TYPE decorations ADD VALUE IF NOT exists 'failing_empty_upload';" + ), + ] diff --git a/shared/django_apps/core/migrations/0024_alter_commit_timestamp_alter_commit_updatestamp_and_more.py b/shared/django_apps/core/migrations/0024_alter_commit_timestamp_alter_commit_updatestamp_and_more.py new file mode 100644 index 000000000..bc1e24d65 --- /dev/null +++ b/shared/django_apps/core/migrations/0024_alter_commit_timestamp_alter_commit_updatestamp_and_more.py @@ -0,0 +1,49 @@ +# Generated by Django 4.1.7 on 2023-05-29 15:24 + +import django.utils.timezone +from django.db import migrations, models + +from shared.django_apps.core.models import DateTimeWithoutTZField + + +class Migration(migrations.Migration): + + dependencies = [ + ("core", "0023_alter_commitnotification_decoration_type"), + ] + + operations = [ + migrations.AlterField( + model_name="commit", + name="timestamp", + field=DateTimeWithoutTZField(default=django.utils.timezone.now), + ), + migrations.AlterField( + model_name="commit", + name="updatestamp", + field=DateTimeWithoutTZField(default=django.utils.timezone.now), + ), + migrations.AlterField( + model_name="commitnotification", + name="notification_type", + field=models.TextField( + choices=[ + ("comment", "Comment"), + ("gitter", "Gitter"), + ("hipchat", "Hipchat"), + ("irc", "Irc"), + ("slack", "Slack"), + ("status_changes", "Status Changes"), + ("status_patch", "Status Patch"), + ("status_project", "Status Project"), + ("webhook", "Webhook"), + ("codecov_slack_app", "Codecov Slack App"), + ] + ), + ), + migrations.AlterField( + model_name="pull", + name="updatestamp", + field=DateTimeWithoutTZField(default=django.utils.timezone.now), + ), + ] diff --git a/shared/django_apps/core/migrations/0025_v5_0_1.py b/shared/django_apps/core/migrations/0025_v5_0_1.py new file mode 100644 index 000000000..ecf97aa49 --- /dev/null +++ b/shared/django_apps/core/migrations/0025_v5_0_1.py @@ -0,0 +1,17 @@ +from django.db import migrations + + +def add_version(apps, schema): + version = apps.get_model("core", "Version") + version.objects.all().delete() + v = version(version="v5.0.1") + v.save() + + +class Migration(migrations.Migration): + + dependencies = [ + ("core", "0024_alter_commit_timestamp_alter_commit_updatestamp_and_more") + ] + + operations = [migrations.RunPython(add_version)] diff --git a/shared/django_apps/core/migrations/0026_auto_20230605_1134.py b/shared/django_apps/core/migrations/0026_auto_20230605_1134.py new file mode 100644 index 000000000..422401b68 --- /dev/null +++ b/shared/django_apps/core/migrations/0026_auto_20230605_1134.py @@ -0,0 +1,18 @@ +# Generated by Django 4.1.7 on 2023-06-05 11:34 + +from django.db import migrations + + +class Migration(migrations.Migration): + + dependencies = [ + ("core", "0025_v5_0_1"), + ] + + operations = [ + migrations.RunSQL( + """ + ALTER TYPE notifications ADD VALUE IF NOT exists 'codecov_slack_app'; + """ + ), + ] diff --git a/shared/django_apps/core/migrations/0027_alter_commit_report_rename_report_commit__report_and_more.py b/shared/django_apps/core/migrations/0027_alter_commit_report_rename_report_commit__report_and_more.py new file mode 100644 index 000000000..d2966c252 --- /dev/null +++ b/shared/django_apps/core/migrations/0027_alter_commit_report_rename_report_commit__report_and_more.py @@ -0,0 +1,47 @@ +# Generated by Django 4.2.2 on 2023-07-18 07:33 + +from django.db import migrations, models + +from shared.django_apps.core.encoders import ReportJSONEncoder + + +class Migration(migrations.Migration): + + dependencies = [ + ("core", "0026_auto_20230605_1134"), + ] + + # BEGIN; + # -- + # -- Alter field report on commit + # -- + # -- (no-op) + # -- + # -- Rename field report on commit to _report + # -- + # -- (no-op) + # -- + # -- Add field _report_storage_path to commit + # -- + # ALTER TABLE "commits" ADD COLUMN "report_storage_path" varchar(200) NULL; + # COMMIT; + + operations = [ + migrations.AlterField( + model_name="commit", + name="report", + field=models.JSONField( + db_column="report", encoder=ReportJSONEncoder, null=True + ), + ), + migrations.RenameField( + model_name="commit", + old_name="report", + new_name="_report", + ), + migrations.AddField( + model_name="commit", + name="_report_storage_path", + field=models.URLField(db_column="report_storage_path", null=True), + ), + ] diff --git a/shared/django_apps/core/migrations/0028_repository_webhook_secret.py b/shared/django_apps/core/migrations/0028_repository_webhook_secret.py new file mode 100644 index 000000000..d41a8cb84 --- /dev/null +++ b/shared/django_apps/core/migrations/0028_repository_webhook_secret.py @@ -0,0 +1,27 @@ +# Generated by Django 4.2.2 on 2023-07-24 16:38 + +from django.db import migrations, models +from shared.django_apps.migration_utils import RiskyAddField + + +class Migration(migrations.Migration): + """ + BEGIN; + -- + -- Add field webhook_secret to repository + -- + ALTER TABLE "repos" ADD COLUMN "webhook_secret" text NULL; + COMMIT; + """ + + dependencies = [ + ("core", "0027_alter_commit_report_rename_report_commit__report_and_more"), + ] + + operations = [ + RiskyAddField( + model_name="repository", + name="webhook_secret", + field=models.TextField(null=True), + ), + ] diff --git a/shared/django_apps/core/migrations/0029_constants_delete_version.py b/shared/django_apps/core/migrations/0029_constants_delete_version.py new file mode 100644 index 000000000..34b2539cd --- /dev/null +++ b/shared/django_apps/core/migrations/0029_constants_delete_version.py @@ -0,0 +1,22 @@ +# Generated by Django 4.2.2 on 2023-07-27 15:39 + +from django.db import migrations, models + + +class Migration(migrations.Migration): + dependencies = [ + ("core", "0028_repository_webhook_secret"), + ] + + operations = [ + migrations.CreateModel( + name="Constants", + fields=[ + ("key", models.CharField(primary_key=True, serialize=False)), + ("value", models.CharField()), + ], + options={ + "db_table": "constants", + }, + ), + ] diff --git a/shared/django_apps/core/migrations/0030_auto_20230727_1539.py b/shared/django_apps/core/migrations/0030_auto_20230727_1539.py new file mode 100644 index 000000000..9cc1af28c --- /dev/null +++ b/shared/django_apps/core/migrations/0030_auto_20230727_1539.py @@ -0,0 +1,21 @@ +# Generated by Django 4.2.2 on 2023-07-27 15:39 + +from uuid import uuid4 + +from django.db import migrations + + +def generate_constants(apps, schema_editor): + Constants = apps.get_model("core", "Constants") + version = Constants(key="version", value="23.7.27") + install_id = Constants(key="install_id", value=uuid4()) + version.save() + install_id.save() + + +class Migration(migrations.Migration): + dependencies = [ + ("core", "0029_constants_delete_version"), + ] + + operations = [migrations.RunPython(generate_constants)] diff --git a/shared/django_apps/core/migrations/0031_auto_20230731_1627.py b/shared/django_apps/core/migrations/0031_auto_20230731_1627.py new file mode 100644 index 000000000..616ef65d2 --- /dev/null +++ b/shared/django_apps/core/migrations/0031_auto_20230731_1627.py @@ -0,0 +1,18 @@ +# Generated by Django 4.2.2 on 2023-07-31 16:27 + +from django.db import migrations + + +def update_version(apps, schema_editor): + version = apps.get_model("core", "Version") + version.objects.all().delete() + v = version(version="23.7.27") + v.save() + + +class Migration(migrations.Migration): + dependencies = [ + ("core", "0030_auto_20230727_1539"), + ] + + operations = [migrations.RunPython(update_version)] diff --git a/shared/django_apps/core/migrations/0032_auto_20230731_1641.py b/shared/django_apps/core/migrations/0032_auto_20230731_1641.py new file mode 100644 index 000000000..171f5e741 --- /dev/null +++ b/shared/django_apps/core/migrations/0032_auto_20230731_1641.py @@ -0,0 +1,38 @@ +# Generated by Django 4.2.2 on 2023-07-31 16:41 + +from django.db import migrations +from shared.django_apps.migration_utils import RiskyRunSQL + + +class Migration(migrations.Migration): + dependencies = [ + ("core", "0031_auto_20230731_1627"), + ] + + operations = [ + RiskyRunSQL( + """ + create or replace function branches_update() returns trigger as $$ + declare _ownerid int; + begin + -- update repos cache if main branch + update repos + set updatestamp = now() + where repoid = new.repoid + and branch = new.branch + returning ownerid into _ownerid; + + if found then + -- default branch updated, so we can update the owners timestamp + -- to refresh the team list + update owners + set updatestamp=now() + where ownerid=_ownerid; + end if; + + return null; + end; + $$ language plpgsql; + """ + ) + ] diff --git a/shared/django_apps/core/migrations/0033_alter_pull_flare_rename_flare_pull__flare_and_more.py b/shared/django_apps/core/migrations/0033_alter_pull_flare_rename_flare_pull__flare_and_more.py new file mode 100644 index 000000000..eae59be21 --- /dev/null +++ b/shared/django_apps/core/migrations/0033_alter_pull_flare_rename_flare_pull__flare_and_more.py @@ -0,0 +1,43 @@ +# Generated by Django 4.2.2 on 2023-08-03 09:07 + +from django.db import migrations, models + + +class Migration(migrations.Migration): + + dependencies = [ + ("core", "0032_auto_20230731_1641"), + ] + + # BEGIN; + # -- + # -- Alter field flare on pull + # -- + # -- (no-op) + # -- + # -- Rename field flare on pull to _flare + # -- + # -- (no-op) + # -- + # -- Add field _flare_storage_path to pull + # -- + # ALTER TABLE "pulls" ADD COLUMN "flare_storage_path" varchar(200) NULL; + # COMMIT; + + operations = [ + migrations.AlterField( + model_name="pull", + name="flare", + field=models.JSONField(db_column="flare", null=True), + ), + migrations.RenameField( + model_name="pull", + old_name="flare", + new_name="_flare", + ), + migrations.AddField( + model_name="pull", + name="_flare_storage_path", + field=models.URLField(db_column="flare_storage_path", null=True), + ), + ] diff --git a/shared/django_apps/core/migrations/0034_remove_repository_cache.py b/shared/django_apps/core/migrations/0034_remove_repository_cache.py new file mode 100644 index 000000000..adea85487 --- /dev/null +++ b/shared/django_apps/core/migrations/0034_remove_repository_cache.py @@ -0,0 +1,25 @@ +# Generated by Django 4.2.2 on 2023-08-14 13:23 + +from django.db import migrations +from shared.django_apps.migration_utils import RiskyRemoveField + + +class Migration(migrations.Migration): + dependencies = [ + ( + "core", + "0033_alter_pull_flare_rename_flare_pull__flare_and_more", + ), + ] + + operations = [ + migrations.RunSQL( + sql=migrations.RunSQL.noop, + state_operations=[ + migrations.RemoveField( + model_name="repository", + name="cache", + ) + ], + ) + ] diff --git a/shared/django_apps/core/migrations/0035_auto_20230907_2123.py b/shared/django_apps/core/migrations/0035_auto_20230907_2123.py new file mode 100644 index 000000000..3b08bd7fd --- /dev/null +++ b/shared/django_apps/core/migrations/0035_auto_20230907_2123.py @@ -0,0 +1,17 @@ +# Generated by Django 4.2.2 on 2023-09-07 21:23 + +from django.db import migrations + + +class Migration(migrations.Migration): + def add_version(apps, schema): + Constants = apps.get_model("core", "Constants") + version = Constants.objects.get(key="version") + version.value = "23.9.5" + version.save() + + dependencies = [ + ("core", "0034_remove_repository_cache"), + ] + + operations = [migrations.RunPython(add_version)] diff --git a/shared/django_apps/core/migrations/0036_auto_20231003_1342.py b/shared/django_apps/core/migrations/0036_auto_20231003_1342.py new file mode 100644 index 000000000..06f12cb29 --- /dev/null +++ b/shared/django_apps/core/migrations/0036_auto_20231003_1342.py @@ -0,0 +1,19 @@ +# Generated by Django 4.2.3 on 2023-10-03 13:42 + +from django.db import migrations + + +def update_version(apps, schema): + Constants = apps.get_model("core", "Constants") + version = Constants.objects.get(key="version") + version.value = "23.10.2" + version.save() + + +class Migration(migrations.Migration): + + dependencies = [ + ("core", "0035_auto_20230907_2123"), + ] + + operations = [migrations.RunPython(update_version)] diff --git a/shared/django_apps/core/migrations/0037_alter_commitnotification_decoration_type.py b/shared/django_apps/core/migrations/0037_alter_commitnotification_decoration_type.py new file mode 100644 index 000000000..4c20e56a4 --- /dev/null +++ b/shared/django_apps/core/migrations/0037_alter_commitnotification_decoration_type.py @@ -0,0 +1,31 @@ +# Generated by Django 4.2.3 on 2023-10-06 16:06 + +from django.db import migrations, models + + +class Migration(migrations.Migration): + + dependencies = [ + ("core", "0036_auto_20231003_1342"), + ] + + operations = [ + migrations.AlterField( + model_name="commitnotification", + name="decoration_type", + field=models.TextField( + choices=[ + ("standard", "Standard"), + ("upgrade", "Upgrade"), + ("upload_limit", "Upload Limit"), + ("passing_empty_upload", "Passing Empty Upload"), + ("failing_empty_upload", "Failing Empty Upload"), + ("processing_upload", "Processing Upload"), + ], + null=True, + ), + ), + migrations.RunSQL( + "ALTER TYPE decorations ADD VALUE IF NOT exists 'processing_upload';" + ), + ] diff --git a/shared/django_apps/core/migrations/0038_increment_version.py b/shared/django_apps/core/migrations/0038_increment_version.py new file mode 100644 index 000000000..929a09d58 --- /dev/null +++ b/shared/django_apps/core/migrations/0038_increment_version.py @@ -0,0 +1,19 @@ +# Generated by Django 4.2.3 on 2023-11-03 13:24 + +from django.db import migrations + + +def update_version(apps, schema): + Constants = apps.get_model("core", "Constants") + version = Constants.objects.get(key="version") + version.value = "23.11.2" + version.save() + + +class Migration(migrations.Migration): + + dependencies = [ + ("core", "0037_alter_commitnotification_decoration_type"), + ] + + operations = [migrations.RunPython(update_version)] diff --git a/shared/django_apps/core/migrations/0039_pull_pulls_repoid_id.py b/shared/django_apps/core/migrations/0039_pull_pulls_repoid_id.py new file mode 100644 index 000000000..07dd6247f --- /dev/null +++ b/shared/django_apps/core/migrations/0039_pull_pulls_repoid_id.py @@ -0,0 +1,26 @@ +# Generated by Django 4.2.3 on 2023-10-30 16:16 + +from django.db import migrations, models +from shared.django_apps.migration_utils import RiskyAddIndex + + +class Migration(migrations.Migration): + """ + BEGIN; + -- + -- Create index pulls_repoid_id on field(s) repository, id of model pull + -- + CREATE INDEX "pulls_repoid_id" ON "pulls" ("repoid", "id"); + COMMIT; + """ + + dependencies = [ + ("core", "0038_increment_version"), + ] + + operations = [ + RiskyAddIndex( + model_name="pull", + index=models.Index(fields=["repository", "id"], name="pulls_repoid_id"), + ), + ] diff --git a/shared/django_apps/core/migrations/0040_increment_version.py b/shared/django_apps/core/migrations/0040_increment_version.py new file mode 100644 index 000000000..44e00f613 --- /dev/null +++ b/shared/django_apps/core/migrations/0040_increment_version.py @@ -0,0 +1,19 @@ +# Generated by Django 4.2.7 on 2023-12-04 21:13 + +from django.db import migrations + + +def update_version(apps, schema): + Constants = apps.get_model("core", "Constants") + version = Constants.objects.get(key="version") + version.value = "23.12.4" + version.save() + + +class Migration(migrations.Migration): + + dependencies = [ + ("core", "0039_pull_pulls_repoid_id"), + ] + + operations = [migrations.RunPython(update_version)] diff --git a/shared/django_apps/core/migrations/0041_pull_bundle_analysis_commentid.py b/shared/django_apps/core/migrations/0041_pull_bundle_analysis_commentid.py new file mode 100644 index 000000000..f7c2edb3e --- /dev/null +++ b/shared/django_apps/core/migrations/0041_pull_bundle_analysis_commentid.py @@ -0,0 +1,27 @@ +# Generated by Django 4.2.7 on 2023-12-27 17:00 + +from django.db import migrations, models +from shared.django_apps.migration_utils import RiskyAddField + + +class Migration(migrations.Migration): + """ + BEGIN; + -- + -- Add field bundle_analysis_commentid to pull + -- + ALTER TABLE "pulls" ADD COLUMN "bundle_analysis_commentid" text NULL; + COMMIT; + """ + + dependencies = [ + ("core", "0040_increment_version"), + ] + + operations = [ + RiskyAddField( + model_name="pull", + name="bundle_analysis_commentid", + field=models.TextField(null=True), + ), + ] diff --git a/shared/django_apps/core/migrations/0042_repository_languages.py b/shared/django_apps/core/migrations/0042_repository_languages.py new file mode 100644 index 000000000..ee067ee5f --- /dev/null +++ b/shared/django_apps/core/migrations/0042_repository_languages.py @@ -0,0 +1,31 @@ +# Generated by Django 4.2.7 on 2024-01-09 18:54 + +import django.contrib.postgres.fields +from django.db import migrations, models +from shared.django_apps.migration_utils import RiskyAddField + + +class Migration(migrations.Migration): + """ + BEGIN; + -- + -- Add field languages to repository + -- + ALTER TABLE "repos" ADD COLUMN "languages" varchar[] DEFAULT '{}' NOT NULL; + ALTER TABLE "repos" ALTER COLUMN "languages" DROP DEFAULT; + COMMIT; + """ + + dependencies = [ + ("core", "0041_pull_bundle_analysis_commentid"), + ] + + operations = [ + RiskyAddField( + model_name="repository", + name="languages", + field=django.contrib.postgres.fields.ArrayField( + base_field=models.CharField(), blank=True, default=[], size=None + ), + ), + ] diff --git a/shared/django_apps/core/migrations/0043_repository_bundle_analysis_enabled.py b/shared/django_apps/core/migrations/0043_repository_bundle_analysis_enabled.py new file mode 100644 index 000000000..99dbb0178 --- /dev/null +++ b/shared/django_apps/core/migrations/0043_repository_bundle_analysis_enabled.py @@ -0,0 +1,28 @@ +# Generated by Django 4.2.7 on 2024-01-09 21:10 + +from django.db import migrations, models +from shared.django_apps.migration_utils import RiskyAddField + + +class Migration(migrations.Migration): + """ + BEGIN; + -- + -- Add field bundle_analysis_enabled to repository + -- + ALTER TABLE "repos" ADD COLUMN "bundle_analysis_enabled" boolean DEFAULT false NOT NULL; + ALTER TABLE "repos" ALTER COLUMN "bundle_analysis_enabled" DROP DEFAULT; + COMMIT; + """ + + dependencies = [ + ("core", "0042_repository_languages"), + ] + + operations = [ + RiskyAddField( + model_name="repository", + name="bundle_analysis_enabled", + field=models.BooleanField(default=False), + ), + ] diff --git a/shared/django_apps/core/migrations/0044_alter_repository_bundle_analysis_enabled_and_more.py b/shared/django_apps/core/migrations/0044_alter_repository_bundle_analysis_enabled_and_more.py new file mode 100644 index 000000000..c370cbc9a --- /dev/null +++ b/shared/django_apps/core/migrations/0044_alter_repository_bundle_analysis_enabled_and_more.py @@ -0,0 +1,43 @@ +# Generated by Django 4.2.7 on 2024-01-10 12:28 + +import django.contrib.postgres.fields +from django.db import migrations, models +from shared.django_apps.migration_utils import RiskyAlterField + + +class Migration(migrations.Migration): + """ + BEGIN; + -- + -- Alter field bundle_analysis_enabled on repository + -- + ALTER TABLE "repos" ALTER COLUMN "bundle_analysis_enabled" DROP NOT NULL; + -- + -- Alter field languages on repository + -- + ALTER TABLE "repos" ALTER COLUMN "languages" DROP NOT NULL; + COMMIT; + """ + + dependencies = [ + ("core", "0043_repository_bundle_analysis_enabled"), + ] + + operations = [ + RiskyAlterField( + model_name="repository", + name="bundle_analysis_enabled", + field=models.BooleanField(default=False, null=True), + ), + RiskyAlterField( + model_name="repository", + name="languages", + field=django.contrib.postgres.fields.ArrayField( + base_field=models.CharField(), + blank=True, + default=[], + null=True, + size=None, + ), + ), + ] diff --git a/shared/django_apps/core/migrations/0045_repository_languages_last_updated.py b/shared/django_apps/core/migrations/0045_repository_languages_last_updated.py new file mode 100644 index 000000000..aa442726a --- /dev/null +++ b/shared/django_apps/core/migrations/0045_repository_languages_last_updated.py @@ -0,0 +1,29 @@ +# Generated by Django 4.2.7 on 2024-01-11 05:32 + +from django.db import migrations +from shared.django_apps.migration_utils import RiskyAddField + +from shared.django_apps.core.models import DateTimeWithoutTZField + + +class Migration(migrations.Migration): + """ + BEGIN; + -- + -- Add field languages_last_updated to repository + -- + ALTER TABLE "repos" ADD COLUMN "languages_last_updated" timestamp NULL; + COMMIT; + """ + + dependencies = [ + ("core", "0044_alter_repository_bundle_analysis_enabled_and_more"), + ] + + operations = [ + RiskyAddField( + model_name="repository", + name="languages_last_updated", + field=DateTimeWithoutTZField(blank=True, null=True), + ), + ] diff --git a/shared/django_apps/core/migrations/0046_repository_coverage_enabled.py b/shared/django_apps/core/migrations/0046_repository_coverage_enabled.py new file mode 100644 index 000000000..90fda0cc7 --- /dev/null +++ b/shared/django_apps/core/migrations/0046_repository_coverage_enabled.py @@ -0,0 +1,33 @@ +# Generated by Django 4.2.7 on 2024-01-15 20:36 + +from django.db import migrations, models +from shared.django_apps.migration_utils import RiskyAddField, RiskyRunSQL + + +class Migration(migrations.Migration): + """ + BEGIN; + -- + -- Add field coverage_enabled to repository + -- + ALTER TABLE "repos" ADD COLUMN "coverage_enabled" boolean DEFAULT false NULL; + ALTER TABLE "repos" ALTER COLUMN "coverage_enabled" DROP DEFAULT; + -- + -- Raw SQL operation + -- + UPDATE repos SET coverage_enabled=true WHERE active=true; + COMMIT; + """ + + dependencies = [ + ("core", "0045_repository_languages_last_updated"), + ] + + operations = [ + RiskyAddField( + model_name="repository", + name="coverage_enabled", + field=models.BooleanField(default=False, null=True), + ), + RiskyRunSQL("UPDATE repos SET coverage_enabled=true WHERE active=true;"), + ] diff --git a/shared/django_apps/core/migrations/0047_increment_version.py b/shared/django_apps/core/migrations/0047_increment_version.py new file mode 100644 index 000000000..3740abc00 --- /dev/null +++ b/shared/django_apps/core/migrations/0047_increment_version.py @@ -0,0 +1,18 @@ +# Generated by Django 4.2.7 on 2024-01-31 18:04 + +from django.db import migrations + + +def update_version(apps, schema): + Constants = apps.get_model("core", "Constants") + version = Constants.objects.get(key="version") + version.value = "24.2.1" + version.save() + + +class Migration(migrations.Migration): + dependencies = [ + ("core", "0046_repository_coverage_enabled"), + ] + + operations = [migrations.RunPython(update_version)] diff --git a/shared/django_apps/core/migrations/__init__.py b/shared/django_apps/core/migrations/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/shared/django_apps/core/models.py b/shared/django_apps/core/models.py index 642466b21..10cc7418f 100644 --- a/shared/django_apps/core/models.py +++ b/shared/django_apps/core/models.py @@ -17,6 +17,8 @@ from shared.django_apps.codecov.models import BaseCodecovModel +from shared.django_apps.core.encoders import ReportJSONEncoder + class DateTimeWithoutTZField(models.DateTimeField): def db_type(self, connection): return "timestamp" @@ -228,6 +230,9 @@ class CommitStates(models.TextChoices): state = models.TextField( null=True, choices=CommitStates.choices ) # Really an ENUM in db + # # Use custom JSON to properly serialize custom data classes on reports + _report = models.JSONField(null=True, db_column="report", encoder=ReportJSONEncoder) + _report_storage_path = models.URLField(null=True, db_column="report_storage_path") def save(self, *args, **kwargs): self.updatestamp = timezone.now() @@ -320,9 +325,7 @@ class Meta: # "commit_report", is_codecov_repo, self.repository.repoid # ) - # # Use custom JSON to properly serialize custom data classes on reports - # _report = models.JSONField(null=True, db_column="report", encoder=ReportJSONEncoder) - # _report_storage_path = models.URLField(null=True, db_column="report_storage_path") + # Missing Key/Method # report = ArchiveField( # should_write_to_storage_fn=should_write_to_storage, # json_encoder=ReportJSONEncoder, @@ -413,8 +416,8 @@ class Meta: # repoid=self.repository.repoid, # ) - # _flare = models.JSONField(db_column="flare", null=True) - # _flare_storage_path = models.URLField(db_column="flare_storage_path", null=True) + _flare = models.JSONField(db_column="flare", null=True) + _flare_storage_path = models.URLField(db_column="flare_storage_path", null=True) # flare = ArchiveField( # should_write_to_storage_fn=should_write_to_storage, default_value_class=dict # ) diff --git a/shared/django_apps/dummy_settings.py b/shared/django_apps/dummy_settings.py index cf240e2a3..7762c0b99 100644 --- a/shared/django_apps/dummy_settings.py +++ b/shared/django_apps/dummy_settings.py @@ -13,7 +13,7 @@ "shared.django_apps.rollouts", # API models "shared.django_apps.codecov_auth", - "shared.django_apps.core" + "shared.django_apps.core", ] MIDDLEWARE = [] @@ -23,6 +23,8 @@ TELEMETRY_VANILLA_DB = "default" TELEMETRY_TIMESCALE_DB = "timeseries" +AUTH_USER_MODEL = "codecov_auth.User" + TEST = True # Database From 637fd784ada2e70b21f0bbdf6dd8e277561f39eb Mon Sep 17 00:00:00 2001 From: Adrian Date: Thu, 29 Feb 2024 16:22:48 -0800 Subject: [PATCH 03/36] feat: add reports app + migrations --- shared/django_apps/dummy_settings.py | 1 + shared/django_apps/reports/__init__.py | 0 .../reports/migrations/0001_initial.py | 209 +++++++++++++ .../migrations/0002_auto_20211006_2211.py | 18 ++ .../migrations/0003_auto_20211118_1150.py | 25 ++ .../migrations/0004_commitreport_code.py | 18 ++ .../migrations/0005_auto_20221114_1428.py | 43 +++ .../migrations/0006_auto_20221212_1111.py | 18 ++ .../migrations/0007_auto_20230220_1245.py | 17 + .../migrations/0008_auto_20230228_1059.py | 25 ++ .../migrations/0009_auto_20230223_1624.py | 39 +++ ...lter_reportdetails_files_array_and_more.py | 60 ++++ .../0011_commitreport_report_type.py | 34 ++ .../0012_alter_repositoryflag_flag_name.py | 17 + .../migrations/0013_test_testinstance.py | 94 ++++++ ...014_rename_env_test_flags_hash_and_more.py | 54 ++++ .../reports/migrations/__init__.py | 0 shared/django_apps/reports/models.py | 290 ++++++++++++++++++ 18 files changed, 962 insertions(+) create mode 100644 shared/django_apps/reports/__init__.py create mode 100644 shared/django_apps/reports/migrations/0001_initial.py create mode 100644 shared/django_apps/reports/migrations/0002_auto_20211006_2211.py create mode 100644 shared/django_apps/reports/migrations/0003_auto_20211118_1150.py create mode 100644 shared/django_apps/reports/migrations/0004_commitreport_code.py create mode 100644 shared/django_apps/reports/migrations/0005_auto_20221114_1428.py create mode 100644 shared/django_apps/reports/migrations/0006_auto_20221212_1111.py create mode 100644 shared/django_apps/reports/migrations/0007_auto_20230220_1245.py create mode 100644 shared/django_apps/reports/migrations/0008_auto_20230228_1059.py create mode 100644 shared/django_apps/reports/migrations/0009_auto_20230223_1624.py create mode 100644 shared/django_apps/reports/migrations/0010_alter_reportdetails_files_array_and_more.py create mode 100644 shared/django_apps/reports/migrations/0011_commitreport_report_type.py create mode 100644 shared/django_apps/reports/migrations/0012_alter_repositoryflag_flag_name.py create mode 100644 shared/django_apps/reports/migrations/0013_test_testinstance.py create mode 100644 shared/django_apps/reports/migrations/0014_rename_env_test_flags_hash_and_more.py create mode 100644 shared/django_apps/reports/migrations/__init__.py create mode 100644 shared/django_apps/reports/models.py diff --git a/shared/django_apps/dummy_settings.py b/shared/django_apps/dummy_settings.py index 7762c0b99..525fe18cc 100644 --- a/shared/django_apps/dummy_settings.py +++ b/shared/django_apps/dummy_settings.py @@ -14,6 +14,7 @@ # API models "shared.django_apps.codecov_auth", "shared.django_apps.core", + "shared.django_apps.reports", ] MIDDLEWARE = [] diff --git a/shared/django_apps/reports/__init__.py b/shared/django_apps/reports/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/shared/django_apps/reports/migrations/0001_initial.py b/shared/django_apps/reports/migrations/0001_initial.py new file mode 100644 index 000000000..a088f3a74 --- /dev/null +++ b/shared/django_apps/reports/migrations/0001_initial.py @@ -0,0 +1,209 @@ +# Generated by Django 3.1.6 on 2021-04-08 19:33 + +import uuid + +import django.contrib.postgres.fields +import django.db.models.deletion +from django.db import migrations, models + + +class Migration(migrations.Migration): + + initial = True + + dependencies = [("core", "0001_initial")] + + operations = [ + migrations.CreateModel( + name="CommitReport", + fields=[ + ("id", models.BigAutoField(primary_key=True, serialize=False)), + ("external_id", models.UUIDField(default=uuid.uuid4, editable=False)), + ("created_at", models.DateTimeField(auto_now_add=True)), + ("updated_at", models.DateTimeField(auto_now=True)), + ( + "commit", + models.ForeignKey( + on_delete=django.db.models.deletion.CASCADE, + related_name="reports", + to="core.commit", + ), + ), + ], + options={"abstract": False}, + ), + migrations.CreateModel( + name="ReportSession", + fields=[ + ("id", models.BigAutoField(primary_key=True, serialize=False)), + ("external_id", models.UUIDField(default=uuid.uuid4, editable=False)), + ("created_at", models.DateTimeField(auto_now_add=True)), + ("updated_at", models.DateTimeField(auto_now=True)), + ("build_code", models.TextField(null=True)), + ("build_url", models.TextField(null=True)), + ("env", models.JSONField(null=True)), + ("job_code", models.TextField(null=True)), + ("name", models.CharField(max_length=100, null=True)), + ("provider", models.CharField(max_length=50, null=True)), + ("state", models.CharField(max_length=100)), + ("storage_path", models.TextField()), + ("order_number", models.IntegerField(null=True)), + ("upload_type", models.CharField(max_length=100, default="uploaded")), + ("upload_extras", models.JSONField(default=dict)), + ], + options={"db_table": "reports_upload"}, + ), + migrations.CreateModel( + name="SessionLevelTotals", + fields=[ + ("id", models.BigAutoField(primary_key=True, serialize=False)), + ("external_id", models.UUIDField(default=uuid.uuid4, editable=False)), + ("created_at", models.DateTimeField(auto_now_add=True)), + ("updated_at", models.DateTimeField(auto_now=True)), + ("branches", models.IntegerField()), + ("coverage", models.DecimalField(decimal_places=2, max_digits=7)), + ("hits", models.IntegerField()), + ("lines", models.IntegerField()), + ("methods", models.IntegerField()), + ("misses", models.IntegerField()), + ("partials", models.IntegerField()), + ("files", models.IntegerField()), + ( + "report_session", + models.OneToOneField( + db_column="upload_id", + on_delete=django.db.models.deletion.CASCADE, + to="reports.reportsession", + ), + ), + ], + options={"db_table": "reports_uploadleveltotals"}, + ), + migrations.CreateModel( + name="RepositoryFlag", + fields=[ + ("id", models.BigAutoField(primary_key=True, serialize=False)), + ("external_id", models.UUIDField(default=uuid.uuid4, editable=False)), + ("created_at", models.DateTimeField(auto_now_add=True)), + ("updated_at", models.DateTimeField(auto_now=True)), + ("flag_name", models.CharField(max_length=255)), + ( + "repository", + models.ForeignKey( + on_delete=django.db.models.deletion.CASCADE, + related_name="flags", + to="core.repository", + ), + ), + ], + options={"abstract": False}, + ), + migrations.CreateModel( + name="ReportSessionFlagMembership", + fields=[ + ("id", models.BigAutoField(primary_key=True, serialize=False)), + ( + "flag", + models.ForeignKey( + on_delete=django.db.models.deletion.CASCADE, + to="reports.repositoryflag", + ), + ), + ( + "report_session", + models.ForeignKey( + db_column="upload_id", + on_delete=django.db.models.deletion.CASCADE, + to="reports.reportsession", + ), + ), + ], + options={"db_table": "reports_uploadflagmembership"}, + ), + migrations.CreateModel( + name="ReportSessionError", + fields=[ + ("id", models.BigAutoField(primary_key=True, serialize=False)), + ("external_id", models.UUIDField(default=uuid.uuid4, editable=False)), + ("created_at", models.DateTimeField(auto_now_add=True)), + ("updated_at", models.DateTimeField(auto_now=True)), + ("error_code", models.CharField(max_length=100)), + ("error_params", models.JSONField(default=dict)), + ( + "report_session", + models.ForeignKey( + db_column="upload_id", + on_delete=django.db.models.deletion.CASCADE, + related_name="errors", + to="reports.reportsession", + ), + ), + ], + options={"db_table": "reports_uploaderror"}, + ), + migrations.AddField( + model_name="reportsession", + name="flags", + field=models.ManyToManyField( + through="reports.ReportSessionFlagMembership", + to="reports.RepositoryFlag", + ), + ), + migrations.AddField( + model_name="reportsession", + name="report", + field=models.ForeignKey( + on_delete=django.db.models.deletion.CASCADE, + related_name="sessions", + to="reports.commitreport", + ), + ), + migrations.CreateModel( + name="ReportLevelTotals", + fields=[ + ("id", models.BigAutoField(primary_key=True, serialize=False)), + ("external_id", models.UUIDField(default=uuid.uuid4, editable=False)), + ("created_at", models.DateTimeField(auto_now_add=True)), + ("updated_at", models.DateTimeField(auto_now=True)), + ("branches", models.IntegerField()), + ("coverage", models.DecimalField(decimal_places=2, max_digits=7)), + ("hits", models.IntegerField()), + ("lines", models.IntegerField()), + ("methods", models.IntegerField()), + ("misses", models.IntegerField()), + ("partials", models.IntegerField()), + ("files", models.IntegerField()), + ( + "report", + models.OneToOneField( + on_delete=django.db.models.deletion.CASCADE, + to="reports.commitreport", + ), + ), + ], + options={"abstract": False}, + ), + migrations.CreateModel( + name="ReportDetails", + fields=[ + ("id", models.BigAutoField(primary_key=True, serialize=False)), + ("external_id", models.UUIDField(default=uuid.uuid4, editable=False)), + ("created_at", models.DateTimeField(auto_now_add=True)), + ("updated_at", models.DateTimeField(auto_now=True)), + ( + "files_array", + django.contrib.postgres.fields.ArrayField( + base_field=models.JSONField(), size=None + ), + ), + ( + "report", + models.OneToOneField( + on_delete=django.db.models.deletion.CASCADE, + to="reports.commitreport", + ), + ), + ], + options={"abstract": False}, + ), + ] diff --git a/shared/django_apps/reports/migrations/0002_auto_20211006_2211.py b/shared/django_apps/reports/migrations/0002_auto_20211006_2211.py new file mode 100644 index 000000000..e5b5ec6e9 --- /dev/null +++ b/shared/django_apps/reports/migrations/0002_auto_20211006_2211.py @@ -0,0 +1,18 @@ +# Generated by Django 3.1.13 on 2021-10-06 22:11 + +from django.db import migrations + + +class Migration(migrations.Migration): + + dependencies = [("reports", "0001_initial")] + + operations = [ + migrations.RenameModel(old_name="ReportSessionError", new_name="UploadError"), + migrations.RenameModel( + old_name="ReportSessionFlagMembership", new_name="UploadFlagMembership" + ), + migrations.RenameModel( + old_name="SessionLevelTotals", new_name="UploadLevelTotals" + ), + ] diff --git a/shared/django_apps/reports/migrations/0003_auto_20211118_1150.py b/shared/django_apps/reports/migrations/0003_auto_20211118_1150.py new file mode 100644 index 000000000..515e6abab --- /dev/null +++ b/shared/django_apps/reports/migrations/0003_auto_20211118_1150.py @@ -0,0 +1,25 @@ +# Generated by Django 3.1.13 on 2021-11-18 11:50 + +from django.db import migrations, models + + +class Migration(migrations.Migration): + + dependencies = [("reports", "0002_auto_20211006_2211")] + + operations = [ + migrations.AddField( + model_name="reportsession", + name="state_id", + field=models.IntegerField( + choices=[(1, "uploaded"), (2, "processed"), (3, "error")], null=True + ), + ), + migrations.AddField( + model_name="reportsession", + name="upload_type_id", + field=models.IntegerField( + choices=[(1, "uploaded"), (2, "carryforwarded")], null=True + ), + ), + ] diff --git a/shared/django_apps/reports/migrations/0004_commitreport_code.py b/shared/django_apps/reports/migrations/0004_commitreport_code.py new file mode 100644 index 000000000..867fee568 --- /dev/null +++ b/shared/django_apps/reports/migrations/0004_commitreport_code.py @@ -0,0 +1,18 @@ +# Generated by Django 3.2.12 on 2022-09-22 15:08 + +from django.db import migrations, models + + +class Migration(migrations.Migration): + + dependencies = [ + ("reports", "0003_auto_20211118_1150"), + ] + + operations = [ + migrations.AddField( + model_name="commitreport", + name="code", + field=models.CharField(max_length=100, null=True), + ), + ] diff --git a/shared/django_apps/reports/migrations/0005_auto_20221114_1428.py b/shared/django_apps/reports/migrations/0005_auto_20221114_1428.py new file mode 100644 index 000000000..36d14409b --- /dev/null +++ b/shared/django_apps/reports/migrations/0005_auto_20221114_1428.py @@ -0,0 +1,43 @@ +# Generated by Django 3.2.12 on 2022-11-14 14:28 + +import uuid + +import django.db.models.deletion +from django.db import migrations, models + + +class Migration(migrations.Migration): + + dependencies = [ + ("reports", "0004_commitreport_code"), + ] + + operations = [ + migrations.CreateModel( + name="ReportResults", + fields=[ + ("id", models.BigAutoField(primary_key=True, serialize=False)), + ("external_id", models.UUIDField(default=uuid.uuid4, editable=False)), + ("created_at", models.DateTimeField(auto_now_add=True)), + ("updated_at", models.DateTimeField(auto_now=True)), + ( + "state", + models.TextField( + choices=[("created", "Created"), ("ready", "Ready")], null=True + ), + ), + ("completed_at", models.DateTimeField(null=True)), + ("result", models.JSONField(default=dict)), + ( + "report", + models.OneToOneField( + on_delete=django.db.models.deletion.CASCADE, + to="reports.commitreport", + ), + ), + ], + options={ + "abstract": False, + }, + ), + ] diff --git a/shared/django_apps/reports/migrations/0006_auto_20221212_1111.py b/shared/django_apps/reports/migrations/0006_auto_20221212_1111.py new file mode 100644 index 000000000..d2297e991 --- /dev/null +++ b/shared/django_apps/reports/migrations/0006_auto_20221212_1111.py @@ -0,0 +1,18 @@ +# Generated by Django 3.2.12 on 2022-12-12 11:11 + +from django.db import migrations + + +class Migration(migrations.Migration): + + atomic = False + + dependencies = [ + ("reports", "0005_auto_20221114_1428"), + ] + + operations = [ + migrations.RunSQL( + 'CREATE UNIQUE INDEX CONCURRENTLY unique_commit_id_code_idx ON reports_commitreport ("commit_id", "code");' + ), + ] diff --git a/shared/django_apps/reports/migrations/0007_auto_20230220_1245.py b/shared/django_apps/reports/migrations/0007_auto_20230220_1245.py new file mode 100644 index 000000000..848e903ae --- /dev/null +++ b/shared/django_apps/reports/migrations/0007_auto_20230220_1245.py @@ -0,0 +1,17 @@ +# Generated by Django 3.2.12 on 2023-02-20 12:45 + +from django.db import migrations +from shared.django_apps.migration_utils import RiskyRunSQL + + +class Migration(migrations.Migration): + + dependencies = [ + ("reports", "0006_auto_20221212_1111"), + ] + + operations = [ + RiskyRunSQL( + 'ALTER TABLE "reports_commitreport" ADD CONSTRAINT "unique_commit_id_code" UNIQUE USING INDEX unique_commit_id_code_idx;' + ), + ] diff --git a/shared/django_apps/reports/migrations/0008_auto_20230228_1059.py b/shared/django_apps/reports/migrations/0008_auto_20230228_1059.py new file mode 100644 index 000000000..1cfb1665a --- /dev/null +++ b/shared/django_apps/reports/migrations/0008_auto_20230228_1059.py @@ -0,0 +1,25 @@ +# Generated by Django 3.2.12 on 2023-02-28 10:59 + +from django.db import migrations, models + + +class Migration(migrations.Migration): + + dependencies = [ + ("reports", "0007_auto_20230220_1245"), + ] + + operations = [ + migrations.AlterField( + model_name="reportresults", + name="state", + field=models.TextField( + choices=[ + ("pending", "Pending"), + ("completed", "Completed"), + ("error", "Error"), + ], + null=True, + ), + ), + ] diff --git a/shared/django_apps/reports/migrations/0009_auto_20230223_1624.py b/shared/django_apps/reports/migrations/0009_auto_20230223_1624.py new file mode 100644 index 000000000..185ae8aef --- /dev/null +++ b/shared/django_apps/reports/migrations/0009_auto_20230223_1624.py @@ -0,0 +1,39 @@ +# Generated by Django 3.2.12 on 2023-02-23 16:24 + +from django.db import migrations, models + + +class Migration(migrations.Migration): + + dependencies = [ + ("reports", "0008_auto_20230228_1059"), + ] + + operations = [ + migrations.AddField( + model_name="repositoryflag", + name="deleted", + field=models.BooleanField(null=True), + ), + migrations.AlterField( + model_name="reportsession", + name="state_id", + field=models.IntegerField( + choices=[ + (1, "UPLOADED"), + (2, "PROCESSED"), + (3, "ERROR"), + (4, "FULLY_OVERWRITTEN"), + (5, "PARTIALLY_OVERWRITTEN"), + ], + null=True, + ), + ), + migrations.AlterField( + model_name="reportsession", + name="upload_type_id", + field=models.IntegerField( + choices=[(1, "UPLOADED"), (2, "CARRIEDFORWARD")], null=True + ), + ), + ] diff --git a/shared/django_apps/reports/migrations/0010_alter_reportdetails_files_array_and_more.py b/shared/django_apps/reports/migrations/0010_alter_reportdetails_files_array_and_more.py new file mode 100644 index 000000000..187b3de92 --- /dev/null +++ b/shared/django_apps/reports/migrations/0010_alter_reportdetails_files_array_and_more.py @@ -0,0 +1,60 @@ +# Generated by Django 4.1.7 on 2023-05-29 14:15 + +import django.contrib.postgres.fields +from django.db import migrations, models + + +class Migration(migrations.Migration): + # Generated SQL + # BEGIN; + # -- + # -- Alter field files_array on reportdetails + # -- + # -- (no-op) + # -- + # -- Rename field files_array on reportdetails to _files_array + # -- + # -- (no-op) + # -- + # -- Add field _files_array_storage_path to reportdetails + # -- + # ALTER TABLE "reports_reportdetails" ADD COLUMN "files_array_storage_path" varchar(200) NULL; + # -- + # -- Alter field _files_array on reportdetails + # -- + # ALTER TABLE "reports_reportdetails" ALTER COLUMN "files_array" DROP NOT NULL; + # COMMIT; + + dependencies = [ + ("reports", "0009_auto_20230223_1624"), + ] + + operations = [ + migrations.AlterField( + model_name="reportdetails", + name="files_array", + field=django.contrib.postgres.fields.ArrayField( + base_field=models.JSONField(), db_column="files_array", size=None + ), + ), + migrations.RenameField( + model_name="reportdetails", + old_name="files_array", + new_name="_files_array", + ), + migrations.AddField( + model_name="reportdetails", + name="_files_array_storage_path", + field=models.URLField(db_column="files_array_storage_path", null=True), + ), + migrations.AlterField( + model_name="reportdetails", + name="_files_array", + field=django.contrib.postgres.fields.ArrayField( + base_field=models.JSONField(), + db_column="files_array", + null=True, + size=None, + ), + ), + ] diff --git a/shared/django_apps/reports/migrations/0011_commitreport_report_type.py b/shared/django_apps/reports/migrations/0011_commitreport_report_type.py new file mode 100644 index 000000000..f35133697 --- /dev/null +++ b/shared/django_apps/reports/migrations/0011_commitreport_report_type.py @@ -0,0 +1,34 @@ +# Generated by Django 4.2.7 on 2023-12-06 13:28 + +from django.db import migrations, models + + +class Migration(migrations.Migration): + """ + BEGIN; + -- + -- Add field report_type to commitreport + -- + ALTER TABLE "reports_commitreport" ADD COLUMN "report_type" varchar(100) NULL; + COMMIT; + """ + + dependencies = [ + ("reports", "0010_alter_reportdetails_files_array_and_more"), + ] + + operations = [ + migrations.AddField( + model_name="commitreport", + name="report_type", + field=models.CharField( + choices=[ + ("coverage", "Coverage"), + ("test_results", "Test Results"), + ("bundle_analysis", "Bundle Analysis"), + ], + max_length=100, + null=True, + ), + ), + ] diff --git a/shared/django_apps/reports/migrations/0012_alter_repositoryflag_flag_name.py b/shared/django_apps/reports/migrations/0012_alter_repositoryflag_flag_name.py new file mode 100644 index 000000000..b5a00d44b --- /dev/null +++ b/shared/django_apps/reports/migrations/0012_alter_repositoryflag_flag_name.py @@ -0,0 +1,17 @@ +# Generated by Django 4.2.7 on 2023-12-12 00:26 + +from django.db import migrations, models + + +class Migration(migrations.Migration): + dependencies = [ + ("reports", "0011_commitreport_report_type"), + ] + + operations = [ + migrations.AlterField( + model_name="repositoryflag", + name="flag_name", + field=models.CharField(max_length=1024), + ), + ] diff --git a/shared/django_apps/reports/migrations/0013_test_testinstance.py b/shared/django_apps/reports/migrations/0013_test_testinstance.py new file mode 100644 index 000000000..a04733b0a --- /dev/null +++ b/shared/django_apps/reports/migrations/0013_test_testinstance.py @@ -0,0 +1,94 @@ +# Generated by Django 4.2.7 on 2024-01-17 20:41 + +import uuid + +import django.db.models.deletion +from django.db import migrations, models + + +class Migration(migrations.Migration): + """ + BEGIN; + -- + -- Create model Test + -- + CREATE TABLE "reports_test" ("id" text NOT NULL PRIMARY KEY, "external_id" uuid NOT NULL, "created_at" timestamp with time zone NOT NULL, "updated_at" timestamp with time zone NOT NULL, "name" text NOT NULL, "testsuite" text NOT NULL, "env" text NOT NULL, "repoid" integer NOT NULL); + -- + -- Create model TestInstance + -- + CREATE TABLE "reports_testinstance" ("id" bigint NOT NULL PRIMARY KEY GENERATED BY DEFAULT AS IDENTITY, "external_id" uuid NOT NULL, "created_at" timestamp with time zone NOT NULL, "updated_at" timestamp with time zone NOT NULL, "duration_seconds" double precision NOT NULL, "outcome" integer NOT NULL, "failure_message" text NULL, "test_id" text NOT NULL, "upload_id" bigint NOT NULL); + ALTER TABLE "reports_test" ADD CONSTRAINT "reports_test_repoid_445c33d7_fk_repos_repoid" FOREIGN KEY ("repoid") REFERENCES "repos" ("repoid") DEFERRABLE INITIALLY DEFERRED; + CREATE INDEX "reports_test_id_5c60c58c_like" ON "reports_test" ("id" text_pattern_ops); + CREATE INDEX "reports_test_repoid_445c33d7" ON "reports_test" ("repoid"); + ALTER TABLE "reports_testinstance" ADD CONSTRAINT "reports_testinstance_test_id_9c8dd6c1_fk_reports_test_id" FOREIGN KEY ("test_id") REFERENCES "reports_test" ("id") DEFERRABLE INITIALLY DEFERRED; + ALTER TABLE "reports_testinstance" ADD CONSTRAINT "reports_testinstance_upload_id_7350520f_fk_reports_upload_id" FOREIGN KEY ("upload_id") REFERENCES "reports_upload" ("id") DEFERRABLE INITIALLY DEFERRED; + CREATE INDEX "reports_testinstance_test_id_9c8dd6c1" ON "reports_testinstance" ("test_id"); + CREATE INDEX "reports_testinstance_test_id_9c8dd6c1_like" ON "reports_testinstance" ("test_id" text_pattern_ops); + CREATE INDEX "reports_testinstance_upload_id_7350520f" ON "reports_testinstance" ("upload_id"); + COMMIT; + """ + + dependencies = [ + ("core", "0045_repository_languages_last_updated"), + ("reports", "0012_alter_repositoryflag_flag_name"), + ] + + operations = [ + migrations.CreateModel( + name="Test", + fields=[ + ("id", models.TextField(primary_key=True, serialize=False)), + ("external_id", models.UUIDField(default=uuid.uuid4, editable=False)), + ("created_at", models.DateTimeField(auto_now_add=True)), + ("updated_at", models.DateTimeField(auto_now=True)), + ("name", models.TextField()), + ("testsuite", models.TextField()), + ("env", models.TextField()), + ( + "repository", + models.ForeignKey( + db_column="repoid", + on_delete=django.db.models.deletion.CASCADE, + related_name="tests", + to="core.repository", + ), + ), + ], + options={ + "db_table": "reports_test", + }, + ), + migrations.CreateModel( + name="TestInstance", + fields=[ + ("id", models.BigAutoField(primary_key=True, serialize=False)), + ("external_id", models.UUIDField(default=uuid.uuid4, editable=False)), + ("created_at", models.DateTimeField(auto_now_add=True)), + ("updated_at", models.DateTimeField(auto_now=True)), + ("duration_seconds", models.FloatField()), + ("outcome", models.IntegerField()), + ("failure_message", models.TextField(null=True)), + ( + "test", + models.ForeignKey( + db_column="test_id", + on_delete=django.db.models.deletion.CASCADE, + related_name="testinstances", + to="reports.test", + ), + ), + ( + "upload", + models.ForeignKey( + db_column="upload_id", + on_delete=django.db.models.deletion.CASCADE, + related_name="testinstances", + to="reports.reportsession", + ), + ), + ], + options={ + "db_table": "reports_testinstance", + }, + ), + ] diff --git a/shared/django_apps/reports/migrations/0014_rename_env_test_flags_hash_and_more.py b/shared/django_apps/reports/migrations/0014_rename_env_test_flags_hash_and_more.py new file mode 100644 index 000000000..d42a2ccdb --- /dev/null +++ b/shared/django_apps/reports/migrations/0014_rename_env_test_flags_hash_and_more.py @@ -0,0 +1,54 @@ +# Generated by Django 4.2.7 on 2024-01-24 22:47 + +from django.db import migrations, models + + +class Migration(migrations.Migration): + """ + BEGIN; + -- + -- Rename field env on test to flags_hash + -- + ALTER TABLE "reports_test" RENAME COLUMN "env" TO "flags_hash"; + -- + -- Alter field outcome on testinstance + -- + ALTER TABLE "reports_testinstance" ALTER COLUMN "outcome" TYPE varchar(100) USING "outcome"::varchar(100); + -- + -- Create constraint reports_test_repoid_name_testsuite_flags_hash on model test + -- + ALTER TABLE "reports_test" ADD CONSTRAINT "reports_test_repoid_name_testsuite_flags_hash" UNIQUE ("repoid", "name", "testsuite", "flags_hash"); + COMMIT; + """ + + dependencies = [ + ("reports", "0013_test_testinstance"), + ] + + operations = [ + migrations.RenameField( + model_name="test", + old_name="env", + new_name="flags_hash", + ), + migrations.AlterField( + model_name="testinstance", + name="outcome", + field=models.CharField( + choices=[ + ("failure", "Failure"), + ("skip", "Skip"), + ("error", "Error"), + ("pass", "Pass"), + ], + max_length=100, + ), + ), + migrations.AddConstraint( + model_name="test", + constraint=models.UniqueConstraint( + fields=("repository", "name", "testsuite", "flags_hash"), + name="reports_test_repoid_name_testsuite_flags_hash", + ), + ), + ] diff --git a/shared/django_apps/reports/migrations/__init__.py b/shared/django_apps/reports/migrations/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/shared/django_apps/reports/models.py b/shared/django_apps/reports/models.py new file mode 100644 index 000000000..bcbbddbb1 --- /dev/null +++ b/shared/django_apps/reports/models.py @@ -0,0 +1,290 @@ +import logging +import uuid + +from django.contrib.postgres.fields import ArrayField +from django.db import models +from django_prometheus.models import ExportModelOperationsMixin +from shared.reports.enums import UploadState, UploadType + +from codecov.models import BaseCodecovModel +# from reports.managers import CommitReportManager +# from upload.constants import ci +# from utils.config import should_write_data_to_storage_config_check +# from utils.model_utils import ArchiveField +# from utils.services import get_short_service_name + +log = logging.getLogger(__name__) + + +class ReportType(models.TextChoices): + COVERAGE = "coverage" + TEST_RESULTS = "test_results" + BUNDLE_ANALYSIS = "bundle_analysis" + + +class AbstractTotals( + ExportModelOperationsMixin("reports.abstract_totals"), BaseCodecovModel +): + branches = models.IntegerField() + coverage = models.DecimalField(max_digits=7, decimal_places=2) + hits = models.IntegerField() + lines = models.IntegerField() + methods = models.IntegerField() + misses = models.IntegerField() + partials = models.IntegerField() + files = models.IntegerField() + + class Meta: + abstract = True + + +class CommitReport( + ExportModelOperationsMixin("reports.commit_report"), BaseCodecovModel +): + class ReportType(models.TextChoices): + COVERAGE = "coverage" + TEST_RESULTS = "test_results" + BUNDLE_ANALYSIS = "bundle_analysis" + + commit = models.ForeignKey( + "core.Commit", related_name="reports", on_delete=models.CASCADE + ) + code = models.CharField(null=True, max_length=100) + report_type = models.CharField( + null=True, max_length=100, choices=ReportType.choices + ) + + # Missing Keys/Methods + # objects = CommitReportManager() + + +class ReportResults( + ExportModelOperationsMixin("reports.report_results"), BaseCodecovModel +): + class ReportResultsStates(models.TextChoices): + PENDING = "pending" + COMPLETED = "completed" + ERROR = "error" + + report = models.OneToOneField(CommitReport, on_delete=models.CASCADE) + state = models.TextField(null=True, choices=ReportResultsStates.choices) + completed_at = models.DateTimeField(null=True) + result = models.JSONField(default=dict) + + +class ReportDetails( + ExportModelOperationsMixin("reports.report_details"), BaseCodecovModel +): + report = models.OneToOneField(CommitReport, on_delete=models.CASCADE) + _files_array = ArrayField(models.JSONField(), db_column="files_array", null=True) + _files_array_storage_path = models.URLField( + db_column="files_array_storage_path", null=True + ) + + # Missing Keys/Methods + # def get_repository(self): + # return self.report.commit.repository + + # def get_commitid(self): + # return self.report.commit.commitid + + # def should_write_to_storage(self) -> bool: + # if ( + # self.report is None + # or self.report.commit is None + # or self.report.commit.repository is None + # or self.report.commit.repository.author is None + # ): + # return False + # is_codecov_repo = self.report.commit.repository.author.username == "codecov" + # return should_write_data_to_storage_config_check( + # master_switch_key="report_details_files_array", + # is_codecov_repo=is_codecov_repo, + # repoid=self.report.commit.repository.repoid, + # ) + + # files_array = ArchiveField( + # should_write_to_storage_fn=should_write_to_storage, + # default_value_class=list, + # ) + + +class ReportLevelTotals(AbstractTotals): + report = models.OneToOneField(CommitReport, on_delete=models.CASCADE) + + +class UploadError(ExportModelOperationsMixin("reports.upload_error"), BaseCodecovModel): + report_session = models.ForeignKey( + "ReportSession", + db_column="upload_id", + related_name="errors", + on_delete=models.CASCADE, + ) + error_code = models.CharField(max_length=100) + error_params = models.JSONField(default=dict) + + class Meta: + db_table = "reports_uploaderror" + + +class UploadFlagMembership( + ExportModelOperationsMixin("reports.upload_flag_membership"), models.Model +): + report_session = models.ForeignKey( + "ReportSession", db_column="upload_id", on_delete=models.CASCADE + ) + flag = models.ForeignKey("RepositoryFlag", on_delete=models.CASCADE) + id = models.BigAutoField(primary_key=True) + + class Meta: + db_table = "reports_uploadflagmembership" + + +class RepositoryFlag( + ExportModelOperationsMixin("reports.repository_flag"), BaseCodecovModel +): + repository = models.ForeignKey( + "core.Repository", related_name="flags", on_delete=models.CASCADE + ) + flag_name = models.CharField(max_length=1024) + deleted = models.BooleanField(null=True) + + +class ReportSession( + ExportModelOperationsMixin("reports.report_session"), BaseCodecovModel +): + # should be called Upload, but to do it we have to make the + # constraints be manually named, which take a bit + build_code = models.TextField(null=True) + build_url = models.TextField(null=True) + env = models.JSONField(null=True) + flags = models.ManyToManyField(RepositoryFlag, through=UploadFlagMembership) + job_code = models.TextField(null=True) + name = models.CharField(null=True, max_length=100) + provider = models.CharField(max_length=50, null=True) + report = models.ForeignKey( + "CommitReport", related_name="sessions", on_delete=models.CASCADE + ) + state = models.CharField(max_length=100) + storage_path = models.TextField() + order_number = models.IntegerField(null=True) + upload_type = models.CharField(max_length=100, default="uploaded") + upload_extras = models.JSONField(default=dict) + state_id = models.IntegerField(null=True, choices=UploadState.choices()) + upload_type_id = models.IntegerField(null=True, choices=UploadType.choices()) + + class Meta: + db_table = "reports_upload" + + # Missing Keys/Methods + # @property + # def download_url(self): + # repository = self.report.commit.repository + # return ( + # reverse( + # "upload-download", + # kwargs={ + # "service": get_short_service_name(repository.author.service), + # "owner_username": repository.author.username, + # "repo_name": repository.name, + # }, + # ) + # + f"?path={self.storage_path}" + # ) + + # @property + # def ci_url(self): + # if self.build_url: + # # build_url was saved in the database + # return self.build_url + + # # otherwise we need to construct it ourself (if possible) + # build_url = ci.get(self.provider, {}).get("build_url") + # if not build_url: + # return + # repository = self.report.commit.repository + # data = { + # "service_short": get_short_service_name(repository.author.service), + # "owner": repository.author, + # "upload": self, + # "repo": repository, + # "commit": self.report.commit, + # } + # return build_url.format(**data) + + # @property + # def flag_names(self): + # return [flag.flag_name for flag in self.flags.all()] + + +class UploadLevelTotals(AbstractTotals): + report_session = models.OneToOneField( + ReportSession, db_column="upload_id", on_delete=models.CASCADE + ) + + class Meta: + db_table = "reports_uploadleveltotals" + + +class Test(models.Model): + # the reason we aren't using the regular primary key + # in this case is because we want to be able to compute/predict + # the primary key of a Test object ourselves in the processor + # so we can easily do concurrent writes to the database + # this is a hash of the repoid, name, testsuite and flags_hash + id = models.TextField(primary_key=True) + + external_id = models.UUIDField(default=uuid.uuid4, editable=False) + created_at = models.DateTimeField(auto_now_add=True) + updated_at = models.DateTimeField(auto_now=True) + + repository = models.ForeignKey( + "core.Repository", + db_column="repoid", + related_name="tests", + on_delete=models.CASCADE, + ) + name = models.TextField() + testsuite = models.TextField() + # this is a hash of the flags associated with this test + # users will use flags to distinguish the same test being run + # in a different environment + # for example: the same test being run on windows vs. mac + flags_hash = models.TextField() + + class Meta: + db_table = "reports_test" + constraints = [ + models.UniqueConstraint( + fields=["repository", "name", "testsuite", "flags_hash"], + name="reports_test_repoid_name_testsuite_flags_hash", + ), + ] + + +class TestInstance(BaseCodecovModel): + test = models.ForeignKey( + "Test", + db_column="test_id", + related_name="testinstances", + on_delete=models.CASCADE, + ) + + class Outcome(models.TextChoices): + FAILURE = "failure" + SKIP = "skip" + ERROR = "error" + PASS = "pass" + + duration_seconds = models.FloatField() + outcome = models.CharField(max_length=100, choices=Outcome.choices) + upload = models.ForeignKey( + "ReportSession", + db_column="upload_id", + related_name="testinstances", + on_delete=models.CASCADE, + ) + failure_message = models.TextField(null=True) + + class Meta: + db_table = "reports_testinstance" From 662437137b3bbe7830d4dd8606cc794070ccc3ad Mon Sep 17 00:00:00 2001 From: Adrian Date: Thu, 29 Feb 2024 16:58:15 -0800 Subject: [PATCH 04/36] feat: add as many properties from original api models to shared --- shared/django_apps/codecov_auth/constants.py | 5 + shared/django_apps/codecov_auth/managers.py | 83 +++ shared/django_apps/codecov_auth/models.py | 625 ++++++++++--------- shared/django_apps/core/managers.py | 347 ++++++++++ shared/django_apps/core/models.py | 132 ++-- shared/django_apps/reports/managers.py | 17 + shared/django_apps/reports/models.py | 137 ++-- shared/django_apps/utils/config.py | 21 + shared/django_apps/utils/services.py | 21 + shared/upload/constants.py | 222 +++++++ 10 files changed, 1166 insertions(+), 444 deletions(-) create mode 100644 shared/django_apps/codecov_auth/constants.py create mode 100644 shared/django_apps/codecov_auth/managers.py create mode 100644 shared/django_apps/core/managers.py create mode 100644 shared/django_apps/reports/managers.py create mode 100644 shared/django_apps/utils/config.py create mode 100644 shared/django_apps/utils/services.py create mode 100644 shared/upload/constants.py diff --git a/shared/django_apps/codecov_auth/constants.py b/shared/django_apps/codecov_auth/constants.py new file mode 100644 index 000000000..a27a4f230 --- /dev/null +++ b/shared/django_apps/codecov_auth/constants.py @@ -0,0 +1,5 @@ +AVATAR_GITHUB_BASE_URL = "https://avatars0.githubusercontent.com" +BITBUCKET_BASE_URL = "https://bitbucket.org" +GITLAB_BASE_URL = "https://gitlab.com" +GRAVATAR_BASE_URL = "https://www.gravatar.com" +AVATARIO_BASE_URL = "https://avatars.io" diff --git a/shared/django_apps/codecov_auth/managers.py b/shared/django_apps/codecov_auth/managers.py new file mode 100644 index 000000000..0a65e868d --- /dev/null +++ b/shared/django_apps/codecov_auth/managers.py @@ -0,0 +1,83 @@ +from django.db.models import Exists, Func, Manager, OuterRef, Q, QuerySet, Subquery +from django.db.models.functions import Coalesce + +from shared.django_apps.core.models import Pull + + +class OwnerQuerySet(QuerySet): + def users_of(self, owner): + """ + Returns users of "owner", which is defined as Owner objects + whose "organizations" field contains the "owner"s ownerid + or is one of the "owner"s "plan_activated_users". + """ + filters = Q(organizations__contains=[owner.ownerid]) + if owner.plan_activated_users: + filters = filters | Q(ownerid__in=owner.plan_activated_users) + + return self.filter(filters) + + def annotate_activated_in(self, owner): + """ + Annotates queryset with "activated" field, which is True + if a given user is activated in organization "owner", false + otherwise. + """ + from codecov_auth.models import Owner + + return self.annotate( + activated=Coalesce( + Exists( + Owner.objects.filter( + ownerid=owner.ownerid, + plan_activated_users__contains=Func( + OuterRef("ownerid"), + function="ARRAY", + template="%(function)s[%(expressions)s]", + ), + ) + ), + False, + ) + ) + + def annotate_is_admin_in(self, owner): + """ + Annotates queryset with "is_admin" field, which is True + if a given user is an admin in organization "owner", and + false otherwise. + """ + from codecov_auth.models import Owner + + return self.annotate( + is_admin=Coalesce( + Exists( + Owner.objects.filter( + ownerid=owner.ownerid, + admins__contains=Func( + OuterRef("ownerid"), + function="ARRAY", + template="%(function)s[%(expressions)s]", + ), + ) + ), + False, + ) + ) + + def annotate_last_pull_timestamp(self): + pulls = Pull.objects.filter(author=OuterRef("pk")).order_by("-updatestamp") + return self.annotate( + last_pull_timestamp=Subquery(pulls.values("updatestamp")[:1]), + ) + + +# We cannot use `QuerySet.as_manager()` since it relies on the `inspect` module and will +# not play nicely with Cython (which we use for self-hosted): +# https://cython.readthedocs.io/en/latest/src/userguide/limitations.html#inspect-support +class OwnerManager(Manager): + def get_queryset(self): + return OwnerQuerySet(self.model, using=self._db) + + def users_of(self, *args, **kwargs): + return self.get_queryset().users_of(*args, **kwargs) diff --git a/shared/django_apps/codecov_auth/models.py b/shared/django_apps/codecov_auth/models.py index feb86be62..578398d46 100644 --- a/shared/django_apps/codecov_auth/models.py +++ b/shared/django_apps/codecov_auth/models.py @@ -2,15 +2,24 @@ import logging import os import uuid +from dataclasses import asdict from datetime import datetime from django.contrib.postgres.fields import ArrayField, CITextField from django.db import models +from django.db.models.manager import BaseManager from django_prometheus.models import ExportModelOperationsMixin +from django.forms import ValidationError +from django.utils import timezone +from shared.config import get_config +from hashlib import md5 from shared.django_apps.codecov.models import BaseCodecovModel -from shared.django_apps.core.models import DateTimeWithoutTZField -from shared.plan.constants import PlanName +from shared.django_apps.codecov_auth.constants import AVATAR_GITHUB_BASE_URL, AVATARIO_BASE_URL, BITBUCKET_BASE_URL, GRAVATAR_BASE_URL +from shared.django_apps.codecov_auth.managers import OwnerManager +from shared.django_apps.core.managers import RepositoryManager +from shared.django_apps.core.models import DateTimeWithoutTZField, Repository +from shared.plan.constants import USER_PLAN_REPRESENTATIONS, PlanName # Large number to represent Infinity as float('int') is not JSON serializable @@ -69,37 +78,36 @@ class CustomerIntent(models.TextChoices): class Meta: db_table = "users" - # Missing Key/Methods - # @property - # def is_active(self): - # # Required to implement django's user-model interface - # return True + @property + def is_active(self): + # Required to implement django's user-model interface + return True - # @property - # def is_anonymous(self): - # # Required to implement django's user-model interface - # return False + @property + def is_anonymous(self): + # Required to implement django's user-model interface + return False - # @property - # def is_authenticated(self): - # # Required to implement django's user-model interface - # return True + @property + def is_authenticated(self): + # Required to implement django's user-model interface + return True - # def has_perm(self, perm, obj=None): - # # Required to implement django's user-model interface - # return self.is_staff + def has_perm(self, perm, obj=None): + # Required to implement django's user-model interface + return self.is_staff - # def has_perms(self, *args, **kwargs): - # # Required to implement django's user-model interface - # return self.is_staff + def has_perms(self, *args, **kwargs): + # Required to implement django's user-model interface + return self.is_staff - # def has_module_perms(self, package_name): - # # Required to implement django's user-model interface - # return self.is_staff + def has_module_perms(self, package_name): + # Required to implement django's user-model interface + return self.is_staff - # def get_username(self): - # # Required to implement django's user-model interface - # return self.external_id + def get_username(self): + # Required to implement django's user-model interface + return self.external_id class Owner(ExportModelOperationsMixin("codecov_auth.owner"), models.Model): @@ -198,270 +206,270 @@ class Meta: related_name="owners", ) - # Missing Key/Methods - # objects = OwnerManager() - - # repository_set = RepositoryManager() - - # def __str__(self): - # return f"Owner<{self.service}/{self.username}>" - - # def save(self, *args, **kwargs): - # self.updatestamp = timezone.now() - # super().save(*args, **kwargs) - - # @property - # def has_yaml(self): - # return self.yaml is not None - - # @property - # def default_org(self): - # try: - # if self.profile: - # return self.profile.default_org - # except OwnerProfile.DoesNotExist: - # return None - - # @property - # def has_legacy_plan(self): - # return self.plan is None or not self.plan.startswith("users") - - # @property - # def repo_total_credits(self): - # # Returns the number of private repo credits remaining - # # Only meaningful for legacy plans - # V4_PLAN_PREFIX = "v4-" - # if not self.has_legacy_plan: - # return INFINITY - # if self.plan is None: - # return int(1 + self.free or 0) - # elif self.plan.startswith(V4_PLAN_PREFIX): - # return int(self.plan[3:-1]) - # else: - # return int(self.plan[:-1]) - - # @property - # def root_organization(self): - # """ - # Find the root organization of Gitlab, by using the root_parent_service_id - # if it exists, otherwise iterating through the parents and caches it in root_parent_service_id - # """ - # if self.root_parent_service_id: - # return Owner.objects.get( - # service_id=self.root_parent_service_id, service=self.service - # ) - - # root = None - # if self.service == "gitlab" and self.parent_service_id: - # root = self - # while root.parent_service_id is not None: - # root = Owner.objects.get( - # service_id=root.parent_service_id, service=root.service - # ) - # self.root_parent_service_id = root.service_id - # self.save() - # return root - - # @property - # def nb_active_private_repos(self): - # return self.repository_set.filter(active=True, private=True).count() - - # @property - # def has_private_repos(self): - # return self.repository_set.filter(private=True).exists() - - # @property - # def repo_credits(self): - # # Returns the number of private repo credits remaining - # # Only meaningful for legacy plans - # if not self.has_legacy_plan: - # return INFINITY - # return self.repo_total_credits - self.nb_active_private_repos - - # @property - # def orgs(self): - # if self.organizations: - # return Owner.objects.filter(ownerid__in=self.organizations) - # return Owner.objects.none() - - # @property - # def active_repos(self): - # return Repository.objects.filter(active=True, author=self.ownerid).order_by( - # "-updatestamp" - # ) - - # @property - # def activated_user_count(self): - # if not self.plan_activated_users: - # return 0 - # return Owner.objects.filter( - # ownerid__in=self.plan_activated_users, student=False - # ).count() - - # @property - # def activated_student_count(self): - # if not self.plan_activated_users: - # return 0 - # return Owner.objects.filter( - # ownerid__in=self.plan_activated_users, student=True - # ).count() - - # @property - # def student_count(self): - # return Owner.objects.users_of(self).filter(student=True).count() - - # @property - # def inactive_user_count(self): - # return ( - # Owner.objects.users_of(self).filter(student=False).count() - # - self.activated_user_count - # ) - - # def is_admin(self, owner): - # return self.ownerid == owner.ownerid or ( - # bool(self.admins) and owner.ownerid in self.admins - # ) - - # @property - # def is_authenticated(self): - # # NOTE: this is here to support `UserTokenAuthentication` which still returns - # # an `Owner` as the authenticatable record. Since there is code that calls - # # `request.user.is_authenticated` we need to support that here. - # return True - - # def clean(self): - # if self.staff: - # domain = self.email.split("@")[1] if self.email else "" - # if domain not in ["codecov.io", "sentry.io"]: - # raise ValidationError( - # "User not part of Codecov or Sentry cannot be a staff member" - # ) - # if not self.plan: - # self.plan = None - # if not self.stripe_customer_id: - # self.stripe_customer_id = None - # if not self.stripe_subscription_id: - # self.stripe_subscription_id = None - - # @property - # def avatar_url(self, size=DEFAULT_AVATAR_SIZE): - # if self.service == SERVICE_GITHUB and self.service_id: - # return "{}/u/{}?v=3&s={}".format( - # AVATAR_GITHUB_BASE_URL, self.service_id, size - # ) - - # elif self.service == SERVICE_GITHUB_ENTERPRISE and self.service_id: - # return "{}/avatars/u/{}?v=3&s={}".format( - # get_config("github_enterprise", "url"), self.service_id, size - # ) - - # # Bitbucket - # elif self.service == SERVICE_BITBUCKET and self.username: - # return "{}/account/{}/avatar/{}".format( - # BITBUCKET_BASE_URL, self.username, size - # ) - - # elif ( - # self.service == SERVICE_BITBUCKET_SERVER - # and self.service_id - # and self.username - # ): - # if "U" in self.service_id: - # return "{}/users/{}/avatar.png?s={}".format( - # get_config("bitbucket_server", "url"), self.username, size - # ) - # else: - # return "{}/projects/{}/avatar.png?s={}".format( - # get_config("bitbucket_server", "url"), self.username, size - # ) - - # # Gitlab - # elif self.service == SERVICE_GITLAB and self.email: - # return get_gitlab_url(self.email, size) - - # # Codecov config - # elif get_config("services", "gravatar") and self.email: - # return "{}/avatar/{}?s={}".format( - # GRAVATAR_BASE_URL, md5(self.email.lower().encode()).hexdigest(), size - # ) - - # elif get_config("services", "avatars.io") and self.email: - # return "{}/avatar/{}/{}".format( - # AVATARIO_BASE_URL, md5(self.email.lower().encode()).hexdigest(), size - # ) - - # elif self.ownerid: - # return "{}/users/{}.png?size={}".format( - # get_config("setup", "codecov_url"), self.ownerid, size - # ) - - # elif os.getenv("APP_ENV") == SERVICE_CODECOV_ENTERPRISE: - # return "{}/media/images/gafsi/avatar.svg".format( - # get_config("setup", "codecov_url") - # ) - - # else: - # return "{}/media/images/gafsi/avatar.svg".format( - # get_config("setup", "media", "assets") - # ) - - # @property - # def pretty_plan(self): - # if self.plan in USER_PLAN_REPRESENTATIONS: - # plan_details = asdict(USER_PLAN_REPRESENTATIONS[self.plan]) - - # # update with quantity they've purchased - # # allows api users to update the quantity - # # by modifying the "plan", sidestepping - # # some iffy data modeling - - # plan_details.update({"quantity": self.plan_user_count}) - # return plan_details - - # def can_activate_user(self, user): - # return ( - # user.student or self.activated_user_count < self.plan_user_count + self.free - # ) - - # def activate_user(self, user): - # log.info(f"Activating user {user.ownerid} in ownerid {self.ownerid}") - # if isinstance(self.plan_activated_users, list): - # if user.ownerid not in self.plan_activated_users: - # self.plan_activated_users.append(user.ownerid) - # else: - # self.plan_activated_users = [user.ownerid] - # self.save() - - # def deactivate_user(self, user): - # log.info(f"Deactivating user {user.ownerid} in ownerid {self.ownerid}") - # if isinstance(self.plan_activated_users, list): - # try: - # self.plan_activated_users.remove(user.ownerid) - # except ValueError: - # pass - # self.save() - - # def add_admin(self, user): - # log.info( - # f"Granting admin permissions to user {user.ownerid} within owner {self.ownerid}" - # ) - # if isinstance(self.admins, list): - # if user.ownerid not in self.admins: - # self.admins.append(user.ownerid) - # else: - # self.admins = [user.ownerid] - # self.save() - - # def remove_admin(self, user): - # log.info( - # f"Revoking admin permissions for user {user.ownerid} within owner {self.ownerid}" - # ) - # if isinstance(self.admins, list): - # try: - # self.admins.remove(user.ownerid) - # except ValueError: - # pass - # self.save() + objects = OwnerManager() + + repository_set = RepositoryManager() + + def __str__(self): + return f"Owner<{self.service}/{self.username}>" + + def save(self, *args, **kwargs): + self.updatestamp = timezone.now() + super().save(*args, **kwargs) + + @property + def has_yaml(self): + return self.yaml is not None + + @property + def default_org(self): + try: + if self.profile: + return self.profile.default_org + except OwnerProfile.DoesNotExist: + return None + + @property + def has_legacy_plan(self): + return self.plan is None or not self.plan.startswith("users") + + @property + def repo_total_credits(self): + # Returns the number of private repo credits remaining + # Only meaningful for legacy plans + V4_PLAN_PREFIX = "v4-" + if not self.has_legacy_plan: + return INFINITY + if self.plan is None: + return int(1 + self.free or 0) + elif self.plan.startswith(V4_PLAN_PREFIX): + return int(self.plan[3:-1]) + else: + return int(self.plan[:-1]) + + @property + def root_organization(self): + """ + Find the root organization of Gitlab, by using the root_parent_service_id + if it exists, otherwise iterating through the parents and caches it in root_parent_service_id + """ + if self.root_parent_service_id: + return Owner.objects.get( + service_id=self.root_parent_service_id, service=self.service + ) + + root = None + if self.service == "gitlab" and self.parent_service_id: + root = self + while root.parent_service_id is not None: + root = Owner.objects.get( + service_id=root.parent_service_id, service=root.service + ) + self.root_parent_service_id = root.service_id + self.save() + return root + + @property + def nb_active_private_repos(self): + return self.repository_set.filter(active=True, private=True).count() + + @property + def has_private_repos(self): + return self.repository_set.filter(private=True).exists() + + @property + def repo_credits(self): + # Returns the number of private repo credits remaining + # Only meaningful for legacy plans + if not self.has_legacy_plan: + return INFINITY + return self.repo_total_credits - self.nb_active_private_repos + + @property + def orgs(self): + if self.organizations: + return Owner.objects.filter(ownerid__in=self.organizations) + return Owner.objects.none() + + @property + def active_repos(self): + return Repository.objects.filter(active=True, author=self.ownerid).order_by( + "-updatestamp" + ) + + @property + def activated_user_count(self): + if not self.plan_activated_users: + return 0 + return Owner.objects.filter( + ownerid__in=self.plan_activated_users, student=False + ).count() + + @property + def activated_student_count(self): + if not self.plan_activated_users: + return 0 + return Owner.objects.filter( + ownerid__in=self.plan_activated_users, student=True + ).count() + + @property + def student_count(self): + return Owner.objects.users_of(self).filter(student=True).count() + + @property + def inactive_user_count(self): + return ( + Owner.objects.users_of(self).filter(student=False).count() + - self.activated_user_count + ) + + def is_admin(self, owner): + return self.ownerid == owner.ownerid or ( + bool(self.admins) and owner.ownerid in self.admins + ) + + @property + def is_authenticated(self): + # NOTE: this is here to support `UserTokenAuthentication` which still returns + # an `Owner` as the authenticatable record. Since there is code that calls + # `request.user.is_authenticated` we need to support that here. + return True + + def clean(self): + if self.staff: + domain = self.email.split("@")[1] if self.email else "" + if domain not in ["codecov.io", "sentry.io"]: + raise ValidationError( + "User not part of Codecov or Sentry cannot be a staff member" + ) + if not self.plan: + self.plan = None + if not self.stripe_customer_id: + self.stripe_customer_id = None + if not self.stripe_subscription_id: + self.stripe_subscription_id = None + + @property + def avatar_url(self, size=DEFAULT_AVATAR_SIZE): + if self.service == SERVICE_GITHUB and self.service_id: + return "{}/u/{}?v=3&s={}".format( + AVATAR_GITHUB_BASE_URL, self.service_id, size + ) + + elif self.service == SERVICE_GITHUB_ENTERPRISE and self.service_id: + return "{}/avatars/u/{}?v=3&s={}".format( + get_config("github_enterprise", "url"), self.service_id, size + ) + + # Bitbucket + elif self.service == SERVICE_BITBUCKET and self.username: + return "{}/account/{}/avatar/{}".format( + BITBUCKET_BASE_URL, self.username, size + ) + + elif ( + self.service == SERVICE_BITBUCKET_SERVER + and self.service_id + and self.username + ): + if "U" in self.service_id: + return "{}/users/{}/avatar.png?s={}".format( + get_config("bitbucket_server", "url"), self.username, size + ) + else: + return "{}/projects/{}/avatar.png?s={}".format( + get_config("bitbucket_server", "url"), self.username, size + ) + + # Gitlab + # TODO: This is missing porting; required a lot more django apps + extra files that will ignore for this 1st pass + # elif self.service == SERVICE_GITLAB and self.email: + # return get_gitlab_url(self.email, size) + + # Codecov config + elif get_config("services", "gravatar") and self.email: + return "{}/avatar/{}?s={}".format( + GRAVATAR_BASE_URL, md5(self.email.lower().encode()).hexdigest(), size + ) + + elif get_config("services", "avatars.io") and self.email: + return "{}/avatar/{}/{}".format( + AVATARIO_BASE_URL, md5(self.email.lower().encode()).hexdigest(), size + ) + + elif self.ownerid: + return "{}/users/{}.png?size={}".format( + get_config("setup", "codecov_url"), self.ownerid, size + ) + + elif os.getenv("APP_ENV") == SERVICE_CODECOV_ENTERPRISE: + return "{}/media/images/gafsi/avatar.svg".format( + get_config("setup", "codecov_url") + ) + + else: + return "{}/media/images/gafsi/avatar.svg".format( + get_config("setup", "media", "assets") + ) + + @property + def pretty_plan(self): + if self.plan in USER_PLAN_REPRESENTATIONS: + plan_details = asdict(USER_PLAN_REPRESENTATIONS[self.plan]) + + # update with quantity they've purchased + # allows api users to update the quantity + # by modifying the "plan", sidestepping + # some iffy data modeling + + plan_details.update({"quantity": self.plan_user_count}) + return plan_details + + def can_activate_user(self, user): + return ( + user.student or self.activated_user_count < self.plan_user_count + self.free + ) + + def activate_user(self, user): + log.info(f"Activating user {user.ownerid} in ownerid {self.ownerid}") + if isinstance(self.plan_activated_users, list): + if user.ownerid not in self.plan_activated_users: + self.plan_activated_users.append(user.ownerid) + else: + self.plan_activated_users = [user.ownerid] + self.save() + + def deactivate_user(self, user): + log.info(f"Deactivating user {user.ownerid} in ownerid {self.ownerid}") + if isinstance(self.plan_activated_users, list): + try: + self.plan_activated_users.remove(user.ownerid) + except ValueError: + pass + self.save() + + def add_admin(self, user): + log.info( + f"Granting admin permissions to user {user.ownerid} within owner {self.ownerid}" + ) + if isinstance(self.admins, list): + if user.ownerid not in self.admins: + self.admins.append(user.ownerid) + else: + self.admins = [user.ownerid] + self.save() + + def remove_admin(self, user): + log.info( + f"Revoking admin permissions for user {user.ownerid} within owner {self.ownerid}" + ) + if isinstance(self.admins, list): + try: + self.admins.remove(user.ownerid) + except ValueError: + pass + self.save() GITHUB_APP_INSTALLATION_DEFAULT_NAME = "codecov_app_installation" @@ -492,24 +500,23 @@ class GithubAppInstallation( related_name="github_app_installations", ) - # Missing Key/Methods - # def repository_queryset(self) -> BaseManager[Repository]: - # """Returns a QuerySet of repositories covered by this installation""" - # if self.repository_service_ids is None: - # # All repos covered - # return Repository.objects.filter(author=self.owner) - # # Some repos covered - # return Repository.objects.filter( - # service_id__in=self.repository_service_ids, author=self.owner - # ) - - # def covers_all_repos(self) -> bool: - # return self.repository_service_ids is None - - # def is_repo_covered_by_integration(self, repo: Repository) -> bool: - # if self.covers_all_repos(): - # return repo.author.ownerid == self.owner.ownerid - # return repo.service_id in self.repository_service_ids + def repository_queryset(self) -> BaseManager[Repository]: + """Returns a QuerySet of repositories covered by this installation""" + if self.repository_service_ids is None: + # All repos covered + return Repository.objects.filter(author=self.owner) + # Some repos covered + return Repository.objects.filter( + service_id__in=self.repository_service_ids, author=self.owner + ) + + def covers_all_repos(self) -> bool: + return self.repository_service_ids is None + + def is_repo_covered_by_integration(self, repo: Repository) -> bool: + if self.covers_all_repos(): + return repo.author.ownerid == self.owner.ownerid + return repo.service_id in self.repository_service_ids class SentryUser( diff --git a/shared/django_apps/core/managers.py b/shared/django_apps/core/managers.py new file mode 100644 index 000000000..86f8f680d --- /dev/null +++ b/shared/django_apps/core/managers.py @@ -0,0 +1,347 @@ +import datetime + +from dateutil import parser +from django.db.models import ( + Avg, + Count, + DateTimeField, + F, + FloatField, + IntegerField, + Manager, + OuterRef, + Q, + QuerySet, + Subquery, + Sum, + Value, +) +from django.db.models.fields.json import KeyTextTransform +from django.db.models.functions import Cast, Coalesce +from django.utils import timezone + + +class RepositoryQuerySet(QuerySet): + def viewable_repos(self, owner): + """ + Filters queryset so that result only includes repos viewable by the + given owner. + """ + filters = Q(private=False) + + if owner is not None: + filters = filters | Q(author__ownerid=owner.ownerid) + if owner.permission: + filters = filters | Q(repoid__in=owner.permission) + + filters &= ~Q(deleted=True) + + return self.filter(filters).exclude(name=None) + + def exclude_uncovered(self): + """ + Excludes repositories with no latest-commit val. Requires calling + 'with_latest_commit_totals_before' on queryset first. + """ + return self.exclude(latest_commit_totals__isnull=True) + + def with_recent_coverage(self) -> QuerySet: + """ + Annotates queryset with recent commit totals from latest commit + that is more than an hour old. This ensures that the coverage totals + are not changing as the most recent commit is uploading coverage + reports. + """ + from core.models import Commit + + timestamp = timezone.now() - timezone.timedelta(hours=1) + + commits_queryset = Commit.objects.filter( + repository_id=OuterRef("pk"), + state=Commit.CommitStates.COMPLETE, + branch=OuterRef("branch"), + timestamp__lte=timestamp, + ).order_by("-timestamp") + + coverage = Cast( + KeyTextTransform("c", "recent_commit_totals"), + output_field=FloatField(), + ) + hits = Cast( + KeyTextTransform("h", "recent_commit_totals"), + output_field=IntegerField(), + ) + misses = Cast( + KeyTextTransform("m", "recent_commit_totals"), + output_field=IntegerField(), + ) + lines = Cast( + KeyTextTransform("n", "recent_commit_totals"), + output_field=IntegerField(), + ) + + return self.annotate( + recent_commit_totals=Subquery(commits_queryset.values("totals")[:1]), + coverage_sha=Subquery(commits_queryset.values("commitid")[:1]), + recent_coverage=coverage, + coverage=Coalesce( + coverage, + Value(-1), + output_field=FloatField(), + ), + hits=hits, + misses=misses, + lines=lines, + ) + + def with_latest_commit_totals_before( + self, before_date, branch, include_previous_totals=False + ): + """ + Annotates queryset with coverage of latest commit totals before cerain date. + """ + from core.models import Commit + + # Parsing the date given in parameters so we receive a datetime rather than a string + timestamp = parser.parse(before_date) + + commit_query_set = Commit.objects.filter( + repository_id=OuterRef("repoid"), + state=Commit.CommitStates.COMPLETE, + branch=branch or OuterRef("branch"), + # The __date cast function will case the datetime based timestamp on the commit to a date object that only + # contains the year, month and day. This allows us to filter through a daily granularity rather than + # a second granularity since this is the level of granularity we get from other parts of the API. + timestamp__date__lte=timestamp, + ).order_by("-timestamp") + + queryset = self.annotate( + latest_commit_totals=Subquery(commit_query_set.values("totals")[:1]) + ) + + if include_previous_totals: + queryset = queryset.annotate( + prev_commit_totals=Subquery(commit_query_set.values("totals")[1:2]) + ) + return queryset + + def with_latest_coverage_change(self): + """ + Annotates the queryset with the latest "coverage change" (cov of last commit + made to default branch, minus cov of second-to-last commit made to default + branch) of each repository. Depends on having called "with_latest_commit_totals_before" with + "include_previous_totals=True". + """ + from core.models import Commit + + return self.annotate( + latest_coverage=Cast( + KeyTextTransform("c", "latest_commit_totals"), output_field=FloatField() + ), + second_latest_coverage=Cast( + KeyTextTransform("c", "prev_commit_totals"), output_field=FloatField() + ), + ).annotate( + latest_coverage_change=F("latest_coverage") - F("second_latest_coverage") + ) + + def get_aggregated_coverage(self): + """ + Adds group_bys in the queryset to aggregate the repository coverage totals together to access + statistics on an organization repositories. Requires `with_latest_coverage_change` and + `with_latest_commit_before` to have been executed beforehand. + + Does not return a queryset and instead returns the aggregated values, fetched from the database. + """ + + return self.aggregate( + repo_count=Count("repoid"), + sum_hits=Sum( + Cast( + KeyTextTransform("h", "latest_commit_totals"), + output_field=FloatField(), + ) + ), + sum_lines=Sum( + Cast( + KeyTextTransform("n", "latest_commit_totals"), + output_field=FloatField(), + ) + ), + sum_partials=Sum( + Cast( + KeyTextTransform("p", "latest_commit_totals"), + output_field=FloatField(), + ) + ), + sum_misses=Sum( + Cast( + KeyTextTransform("m", "latest_commit_totals"), + output_field=FloatField(), + ) + ), + average_complexity=Avg( + Cast( + KeyTextTransform("C", "latest_commit_totals"), + output_field=FloatField(), + ) + ), + weighted_coverage=( + Sum( + Cast( + KeyTextTransform("h", "latest_commit_totals"), + output_field=FloatField(), + ) + ) + / Sum( + Cast( + KeyTextTransform("n", "latest_commit_totals"), + output_field=FloatField(), + ) + ) + * 100 + ), + # Function to get the weighted coverage change is to calculate the weighted coverage for the previous commit + # minus the weighted coverage from the current commit + weighted_coverage_change=( + Sum( + Cast( + KeyTextTransform("h", "latest_commit_totals"), + output_field=FloatField(), + ) + ) + / Sum( + Cast( + KeyTextTransform("n", "latest_commit_totals"), + output_field=FloatField(), + ) + ) + * 100 + ) + - ( + Sum( + Cast( + KeyTextTransform("h", "prev_commit_totals"), + output_field=FloatField(), + ) + ) + / Sum( + Cast( + KeyTextTransform("n", "prev_commit_totals"), + output_field=FloatField(), + ) + ) + * 100 + ), + ) + + def with_latest_commit_at(self): + """ + Annotates queryset with latest commit based on a Repository. We annotate: + - true_latest_commit_at as the real value from the table + - latest_commit_at as the true_coverage except NULL are transformed to 1/1/1900 + This make sure when we order the repo with no commit appears last. + """ + from core.models import Commit + + latest_commit_at = Subquery( + Commit.objects.filter(repository_id=OuterRef("pk")) + .order_by("-timestamp") + .values("timestamp")[:1] + ) + return self.annotate( + true_latest_commit_at=latest_commit_at, + latest_commit_at=Coalesce( + latest_commit_at, Value(datetime.datetime(1900, 1, 1)) + ), + ) + + def with_oldest_commit_at(self): + """ + Annotates the queryset with the oldest commit timestamp. + """ + from core.models import Commit + + commits = Commit.objects.filter(repository_id=OuterRef("pk")).order_by( + "timestamp" + ) + return self.annotate( + oldest_commit_at=Subquery(commits.values("timestamp")[:1]), + ) + + def get_or_create_from_git_repo(self, git_repo, owner): + from codecov_auth.models import Owner + + repo, created = self.get_or_create( + author=owner, + service_id=git_repo.get("service_id") or git_repo.get("id"), + private=git_repo["private"], + branch=git_repo.get("branch") or git_repo.get("default_branch") or "master", + name=git_repo["name"], + ) + + # If this is a fork, create the forked repo and save it to the new repo. + # Depending on the source of this data, 'fork' may either be a boolean or a dict + # containing data of the fork. In the case it is a boolean, the forked repo's data + # is contained in the 'parent' field. + fork = git_repo.get("fork") + if fork: + if isinstance(fork, dict): + git_repo_fork = git_repo["fork"]["repo"] + git_repo_fork_owner = git_repo["fork"]["owner"] + + elif isinstance(fork, bool): + # This is supposed to indicate that the repo json comes + # in the form of a github API repo + # (https://docs.github.com/en/rest/reference/repos#get-a-repository) + # but sometimes this will unexpectedly be missing the 'parent' field, + # which contains information about a fork's parent. So we check again + # below. + parent = git_repo.get("parent") + if parent: + git_repo_fork_owner = { + "service_id": parent["owner"]["id"], + "username": parent["owner"]["login"], + } + git_repo_fork = { + "service_id": parent["id"], + "private": parent["private"], + "language": parent["language"], + "branch": parent["default_branch"], + "name": parent["name"], + } + else: + # If the parent data doesn't exist, there is nothing else to do. + return repo, created + + fork_owner, _ = Owner.objects.get_or_create( + service=owner.service, + username=git_repo_fork_owner["username"], + service_id=git_repo_fork_owner["service_id"], + ) + fork, _ = self.get_or_create( + author=fork_owner, + service_id=git_repo_fork["service_id"], + private=git_repo_fork["private"], + branch=git_repo_fork.get("branch") + or git_repo_fork.get("default_branch"), + name=git_repo_fork["name"], + ) + repo.fork = fork + repo.save() + + return repo, created + + +# We cannot use `QuerySet.as_manager()` since it relies on the `inspect` module and will +# not play nicely with Cython (which we use for self-hosted): +# https://cython.readthedocs.io/en/latest/src/userguide/limitations.html#inspect-support +class RepositoryManager(Manager): + def get_queryset(self): + return RepositoryQuerySet(self.model, using=self._db) + + def viewable_repos(self, *args, **kwargs): + return self.get_queryset().viewable_repos(*args, **kwargs) + + def get_or_create_from_git_repo(self, *args, **kwargs): + return self.get_queryset().get_or_create_from_git_repo(*args, **kwargs) diff --git a/shared/django_apps/core/models.py b/shared/django_apps/core/models.py index 10cc7418f..ec3bbdbc8 100644 --- a/shared/django_apps/core/models.py +++ b/shared/django_apps/core/models.py @@ -8,8 +8,10 @@ from django.contrib.postgres.fields import ArrayField, CITextField from django.contrib.postgres.indexes import GinIndex, OpClass +from django.utils.functional import cached_property from django.db import models from django.db.models.functions import Lower, Substr, Upper +from django.forms import ValidationError from django.utils import timezone from django_prometheus.models import ExportModelOperationsMixin @@ -18,6 +20,8 @@ from shared.django_apps.codecov.models import BaseCodecovModel from shared.django_apps.core.encoders import ReportJSONEncoder +from shared.django_apps.core.managers import RepositoryManager +from shared.django_apps.utils.config import should_write_data_to_storage_config_check class DateTimeWithoutTZField(models.DateTimeField): def db_type(self, connection): @@ -150,6 +154,7 @@ class Meta: ] verbose_name_plural = "Repositories" + objects = RepositoryManager() def __str__(self): return f"Repo<{self.author}/{self.name}>" @@ -158,11 +163,9 @@ def __str__(self): def service(self): return self.author.service - # Missing Key/Methods - # objects = RepositoryManager() - # def clean(self): - # if self.using_integration is None: - # raise ValidationError("using_integration cannot be null") + def clean(self): + if self.using_integration is None: + raise ValidationError("using_integration cannot be null") class Branch(ExportModelOperationsMixin("core.branch"), models.Model): @@ -230,36 +233,33 @@ class CommitStates(models.TextChoices): state = models.TextField( null=True, choices=CommitStates.choices ) # Really an ENUM in db - # # Use custom JSON to properly serialize custom data classes on reports - _report = models.JSONField(null=True, db_column="report", encoder=ReportJSONEncoder) - _report_storage_path = models.URLField(null=True, db_column="report_storage_path") def save(self, *args, **kwargs): self.updatestamp = timezone.now() super().save(*args, **kwargs) - # Missing Key/Methods - # @cached_property - # def parent_commit(self): - # return Commit.objects.filter( - # repository=self.repository, commitid=self.parent_commit_id - # ).first() - - # @cached_property - # def commitreport(self): - # reports = list(self.reports.all()) - # # This is almost always prefetched w/ `filter(code=None)` and - # # `filter(Q(report_type=None) | Q(report_type=CommitReport.ReportType.COVERAGE))` - # # (in which case `.all()` returns the already filtered results) - # # In the case that the reports were not prefetched we'll filter again in memory. - # reports = [ - # report - # for report in reports - # if report.code is None - # and (report.report_type is None or report.report_type == "coverage") - # ] - # return reports[0] if reports else None + @cached_property + def parent_commit(self): + return Commit.objects.filter( + repository=self.repository, commitid=self.parent_commit_id + ).first() + + @cached_property + def commitreport(self): + reports = list(self.reports.all()) + # This is almost always prefetched w/ `filter(code=None)` and + # `filter(Q(report_type=None) | Q(report_type=CommitReport.ReportType.COVERAGE))` + # (in which case `.all()` returns the already filtered results) + # In the case that the reports were not prefetched we'll filter again in memory. + reports = [ + report + for report in reports + if report.code is None + and (report.report_type is None or report.report_type == "coverage") + ] + return reports[0] if reports else None + #TODO: needs porting; property heavily tethered to report service # @cached_property # def full_report(self) -> Optional[Report]: # # TODO: we should probably remove use of this method since it inverts the @@ -306,26 +306,28 @@ class Meta: ), ] - # Missing Key/Methods - # def get_repository(self): - # return self.repository - - # def get_commitid(self): - # return self.commitid + def get_repository(self): + return self.repository - # @property - # def external_id(self): - # return self.commitid + def get_commitid(self): + return self.commitid - # def should_write_to_storage(self) -> bool: - # if self.repository is None or self.repository.author is None: - # return False - # is_codecov_repo = self.repository.author.username == "codecov" - # return should_write_data_to_storage_config_check( - # "commit_report", is_codecov_repo, self.repository.repoid - # ) - - # Missing Key/Method + @property + def external_id(self): + return self.commitid + + def should_write_to_storage(self) -> bool: + if self.repository is None or self.repository.author is None: + return False + is_codecov_repo = self.repository.author.username == "codecov" + return should_write_data_to_storage_config_check( + "commit_report", is_codecov_repo, self.repository.repoid + ) + + # Use custom JSON to properly serialize custom data classes on reports + _report = models.JSONField(null=True, db_column="report", encoder=ReportJSONEncoder) + _report_storage_path = models.URLField(null=True, db_column="report_storage_path") + # TODO: This needs porting as it is very tethered to the archive service # report = ArchiveField( # should_write_to_storage_fn=should_write_to_storage, # json_encoder=ReportJSONEncoder, @@ -395,29 +397,29 @@ class Meta: ), ] - # Missing Key/Methods - # def get_repository(self): - # return self.repository - - # def get_commitid(self): - # return None + def get_repository(self): + return self.repository - # @property - # def external_id(self): - # return self.pullid + def get_commitid(self): + return None - # def should_write_to_storage(self) -> bool: - # if self.repository is None or self.repository.author is None: - # return False - # is_codecov_repo = self.repository.author.username == "codecov" - # return should_write_data_to_storage_config_check( - # master_switch_key="pull_flare", - # is_codecov_repo=is_codecov_repo, - # repoid=self.repository.repoid, - # ) + @property + def external_id(self): + return self.pullid + + def should_write_to_storage(self) -> bool: + if self.repository is None or self.repository.author is None: + return False + is_codecov_repo = self.repository.author.username == "codecov" + return should_write_data_to_storage_config_check( + master_switch_key="pull_flare", + is_codecov_repo=is_codecov_repo, + repoid=self.repository.repoid, + ) _flare = models.JSONField(db_column="flare", null=True) _flare_storage_path = models.URLField(db_column="flare_storage_path", null=True) + # TODO: This needs porting as it is very tethered to the archive service # flare = ArchiveField( # should_write_to_storage_fn=should_write_to_storage, default_value_class=dict # ) diff --git a/shared/django_apps/reports/managers.py b/shared/django_apps/reports/managers.py new file mode 100644 index 000000000..0db5e5097 --- /dev/null +++ b/shared/django_apps/reports/managers.py @@ -0,0 +1,17 @@ +from django.db.models import Manager, Q, QuerySet + + +class CommitReportQuerySet(QuerySet): + def coverage_reports(self): + """ + Filters queryset such that only coverage reports are included. + """ + return self.filter(Q(report_type=None) | Q(report_type="coverage")) + + +class CommitReportManager(Manager): + def get_queryset(self): + return CommitReportQuerySet(self.model, using=self._db) + + def coverage_reports(self, *args, **kwargs): + return self.get_queryset().coverage_reports(*args, **kwargs) diff --git a/shared/django_apps/reports/models.py b/shared/django_apps/reports/models.py index bcbbddbb1..a0e49ec1e 100644 --- a/shared/django_apps/reports/models.py +++ b/shared/django_apps/reports/models.py @@ -3,15 +3,14 @@ from django.contrib.postgres.fields import ArrayField from django.db import models +from django.urls import reverse from django_prometheus.models import ExportModelOperationsMixin +from shared.django_apps.reports.managers import CommitReportManager +from shared.django_apps.utils.config import should_write_data_to_storage_config_check +from shared.django_apps.utils.services import get_short_service_name +from shared.django_apps.codecov.models import BaseCodecovModel from shared.reports.enums import UploadState, UploadType - -from codecov.models import BaseCodecovModel -# from reports.managers import CommitReportManager -# from upload.constants import ci -# from utils.config import should_write_data_to_storage_config_check -# from utils.model_utils import ArchiveField -# from utils.services import get_short_service_name +from shared.upload.constants import ci log = logging.getLogger(__name__) @@ -54,8 +53,7 @@ class ReportType(models.TextChoices): null=True, max_length=100, choices=ReportType.choices ) - # Missing Keys/Methods - # objects = CommitReportManager() + objects = CommitReportManager() class ReportResults( @@ -81,28 +79,28 @@ class ReportDetails( db_column="files_array_storage_path", null=True ) - # Missing Keys/Methods - # def get_repository(self): - # return self.report.commit.repository - - # def get_commitid(self): - # return self.report.commit.commitid - - # def should_write_to_storage(self) -> bool: - # if ( - # self.report is None - # or self.report.commit is None - # or self.report.commit.repository is None - # or self.report.commit.repository.author is None - # ): - # return False - # is_codecov_repo = self.report.commit.repository.author.username == "codecov" - # return should_write_data_to_storage_config_check( - # master_switch_key="report_details_files_array", - # is_codecov_repo=is_codecov_repo, - # repoid=self.report.commit.repository.repoid, - # ) - + def get_repository(self): + return self.report.commit.repository + + def get_commitid(self): + return self.report.commit.commitid + + def should_write_to_storage(self) -> bool: + if ( + self.report is None + or self.report.commit is None + or self.report.commit.repository is None + or self.report.commit.repository.author is None + ): + return False + is_codecov_repo = self.report.commit.repository.author.username == "codecov" + return should_write_data_to_storage_config_check( + master_switch_key="report_details_files_array", + is_codecov_repo=is_codecov_repo, + repoid=self.report.commit.repository.repoid, + ) + + # TODO: This needs porting as it is very tethered to the archive service # files_array = ArchiveField( # should_write_to_storage_fn=should_write_to_storage, # default_value_class=list, @@ -176,45 +174,44 @@ class ReportSession( class Meta: db_table = "reports_upload" - # Missing Keys/Methods - # @property - # def download_url(self): - # repository = self.report.commit.repository - # return ( - # reverse( - # "upload-download", - # kwargs={ - # "service": get_short_service_name(repository.author.service), - # "owner_username": repository.author.username, - # "repo_name": repository.name, - # }, - # ) - # + f"?path={self.storage_path}" - # ) - - # @property - # def ci_url(self): - # if self.build_url: - # # build_url was saved in the database - # return self.build_url - - # # otherwise we need to construct it ourself (if possible) - # build_url = ci.get(self.provider, {}).get("build_url") - # if not build_url: - # return - # repository = self.report.commit.repository - # data = { - # "service_short": get_short_service_name(repository.author.service), - # "owner": repository.author, - # "upload": self, - # "repo": repository, - # "commit": self.report.commit, - # } - # return build_url.format(**data) - - # @property - # def flag_names(self): - # return [flag.flag_name for flag in self.flags.all()] + @property + def download_url(self): + repository = self.report.commit.repository + return ( + reverse( + "upload-download", + kwargs={ + "service": get_short_service_name(repository.author.service), + "owner_username": repository.author.username, + "repo_name": repository.name, + }, + ) + + f"?path={self.storage_path}" + ) + + @property + def ci_url(self): + if self.build_url: + # build_url was saved in the database + return self.build_url + + # otherwise we need to construct it ourself (if possible) + build_url = ci.get(self.provider, {}).get("build_url") + if not build_url: + return + repository = self.report.commit.repository + data = { + "service_short": get_short_service_name(repository.author.service), + "owner": repository.author, + "upload": self, + "repo": repository, + "commit": self.report.commit, + } + return build_url.format(**data) + + @property + def flag_names(self): + return [flag.flag_name for flag in self.flags.all()] class UploadLevelTotals(AbstractTotals): diff --git a/shared/django_apps/utils/config.py b/shared/django_apps/utils/config.py new file mode 100644 index 000000000..893c98316 --- /dev/null +++ b/shared/django_apps/utils/config.py @@ -0,0 +1,21 @@ +from shared.config import get_config + +def should_write_data_to_storage_config_check( + master_switch_key: str, is_codecov_repo: bool, repoid: int +) -> bool: + master_write_switch = get_config( + "setup", + "save_report_data_in_storage", + master_switch_key, + default=False, + ) + if master_write_switch == "restricted_access": + allowed_repo_ids = get_config( + "setup", "save_report_data_in_storage", "repo_ids", default=[] + ) + is_in_allowed_repoids = repoid in allowed_repo_ids + elif master_write_switch == "general_access": + is_in_allowed_repoids = True + else: + is_in_allowed_repoids = False + return master_write_switch and (is_codecov_repo or is_in_allowed_repoids) \ No newline at end of file diff --git a/shared/django_apps/utils/services.py b/shared/django_apps/utils/services.py new file mode 100644 index 000000000..1c6edb298 --- /dev/null +++ b/shared/django_apps/utils/services.py @@ -0,0 +1,21 @@ +short_services = { + "gh": "github", + "bb": "bitbucket", + "gl": "gitlab", + "ghe": "github_enterprise", + "gle": "gitlab_enterprise", + "bbs": "bitbucket_server", +} +long_services = {value: key for (key, value) in short_services.items()} + + +def get_long_service_name(service): + if service in short_services: + return short_services[service] + return service + + +def get_short_service_name(service): + if service in long_services: + return long_services[service] + return service diff --git a/shared/upload/constants.py b/shared/upload/constants.py new file mode 100644 index 000000000..1d6808b1c --- /dev/null +++ b/shared/upload/constants.py @@ -0,0 +1,222 @@ +ci = { + "travis": { + "title": "Travis-CI", + "icon": "travis", + "require_token_when_public": False, + "instructions": "travis", + "build_url": "https://travis-ci.com/{owner.username}/{repo.name}/jobs/{upload.job_code}", + }, + "azure_pipelines": { + "title": "Azure", + "icon": "azure_pipelines", + "require_token_when_public": True, + "instructions": "generic", + "build_url": None, + }, + "docker": { + "title": "Docker", + "icon": "custom", + "require_token_when_public": True, + "instructions": "generic", + "build_url": None, + }, + "buildbot": { + "title": "Buildbot", + "icon": "buildbot", + "require_token_when_public": True, + "instructions": "generic", + "build_url": None, + }, + "cirrus-ci": { + "title": "Cirrus CI", + "icon": "cirrus-ci", + "require_token_when_public": False, + "instructions": "generic", + "build_url": "https://cirrus-ci.com/build/{upload.build_code}", + }, + "codebuild": { + "title": "AWS Codebuild", + "icon": "codebuild", + "require_token_when_public": True, + "instructions": "generic", + "build_url": None, + }, + "codefresh": { + "title": "Codefresh", + "icon": "custom", + "require_token_when_public": True, + "instructions": "generic", + "build_url": "https://g.codefresh.io/repositories/{owner.username}/{repo.name}/builds/{upload.build_code}", + }, + "bitbucket": { + "title": "Bitbucket Pipelines", + "icon": "bitbucket", + "require_token_when_public": False, + "instructions": "generic", + "build_url": "https://bitbucket.org/{owner.username}/{repo.name}/addon/pipelines/home#!/results/{upload.job_code}", + }, + "circleci": { + "title": "CircleCI", + "icon": "circleci", + "require_token_when_public": False, + "instructions": "circleci", + "build_url": "https://circleci.com/{service_short}/{owner.username}/{repo.name}/{upload.build_code}#tests/containers/{upload.job_code}", + }, + "buddybuild": { + "title": "buddybuild", + "icon": "custom", + "require_token_when_public": True, + "instructions": "generic", + "build_url": None, + }, + "buddy": { + "title": "buddy", + "icon": "custom", + "require_token_when_public": True, + "instructions": "generic", + "build_url": None, + }, + "github-actions": { + "title": "GitHub Actions", + "icon": "github-actions", + "require_token_when_public": True, + "instructions": "generic", + "build_url": None, + }, + "solano": { + "title": "Solano", + "icon": "custom", + "require_token_when_public": True, + "instructions": "generic", + "build_url": None, + }, + "teamcity": { + "title": "TeamCity", + "icon": "teamcity", + "require_token_when_public": True, + "instructions": "teamcity", + "build_url": None, + }, + "appveyor": { + "title": "AppVeyor", + "icon": "appveyor", + "require_token_when_public": False, + "instructions": "appveyor", + "build_url": None, + }, + "wercker": { + "title": "Wercker", + "icon": "wercker", + "require_token_when_public": True, + "instructions": "generic", + "build_url": "https://app.wercker.com/#build/{upload.build_code}", + }, + "shippable": { + "title": "Shippable", + "icon": "shippable", + "require_token_when_public": True, + "instructions": "generic", + "build_url": None, # provided in upload, + }, + "codeship": { + "title": "Codeship", + "icon": "codeship", + "require_token_when_public": True, + "instructions": "generic", + "build_url": None, # provided in upload, + }, + "drone.io": { + "title": "Drone.io", + "icon": "drone.io", + "require_token_when_public": True, + "instructions": "generic", + "build_url": None, # provided in upload, + }, + "jenkins": { + "title": "Jenkins", + "icon": "jenkins", + "require_token_when_public": True, + "instructions": "generic", + "build_url": None, # provided in upload, + }, + "semaphore": { + "title": "Semaphore", + "icon": "semaphore", + "require_token_when_public": True, + "instructions": "generic", + "build_url": "https://semaphoreapp.com/{owner.username}/{repo.name}/branches/{commit.branch}/builds/{upload.build_code}", + }, + "gitlab": { + "title": "GitLab CI", + "icon": "gitlab", + "require_token_when_public": True, + "instructions": "generic", + "build_url": "https://gitlab.com/{owner.username}/{repo.name}/builds/{upload.build_code}", + }, + "bamboo": { + "title": "Bamboo", + "icon": "bamboo", + "require_token_when_public": True, + "instructions": "generic", + "build_url": None, + }, + "buildkite": { + "title": "BuildKite", + "icon": "buildkite", + "require_token_when_public": True, + "instructions": "generic", + "build_url": None, # provided in upload, + }, + "bitrise": { + "title": "Bitrise", + "icon": "bitrise", + "require_token_when_public": True, + "instructions": "generic", + "build_url": None, # provided in upload, + }, + "greenhouse": { + "title": "Greenhouse", + "icon": "greenhouse", + "require_token_when_public": True, + "instructions": "generic", + "build_url": None, + }, + "heroku": { + "title": "Heroku", + "icon": "heroku", + "require_token_when_public": True, + "instructions": "generic", + "build_url": None, + }, + "woodpecker": { + "title": "WoodpeckerCI", + "icon": "custom", + "require_token_when_public": True, + "instructions": "generic", + "build_url": None, # provided in upload, + }, + "custom": { + "title": "Custom", + "icon": "custom", + "require_token_when_public": True, + "instructions": "generic", + "build_url": None, # provided in upload, + }, +} + +errors = { + "travis": { + "tokenless-general-error": "\nERROR: Tokenless uploads are only supported for public repositories on Travis that can be verified through the Travis API. Please use an upload token if your repository is private and specify it via the -t flag. You can find the token for this repository at the url below on codecov.io (login required):\n\nRepo token: {}\nDocumentation: https://docs.codecov.io/docs/about-the-codecov-bash-uploader#section-upload-token", + "tokenless-stale-build": "\nERROR: The coverage upload was rejected because the build is out of date. Please make sure the build is not stale for uploads to process correctly.", + "tokenless-bad-status": "\nERROR: The build status does not indicate that the current build is in progress. Please make sure the build is in progress or was finished within the past 4 minutes to ensure reports upload properly.", + } +} + +global_upload_token_providers = [ + "github", + "github_enterprise", + "gitlab", + "gitlab_enterprise", + "bitbucket", + "bitbucket_server", +] From 5a1b7cfba87f97101788d89103609b0ba67dada7 Mon Sep 17 00:00:00 2001 From: Adrian Date: Thu, 29 Feb 2024 17:04:09 -0800 Subject: [PATCH 05/36] lint --- setup.py | 2 +- .../migrations/0023_auto_20230214_1129.py | 1 + .../migrations/0034_alter_owner_trial_status.py | 1 + .../migrations/0035_owner_pretrial_users_count.py | 1 + .../migrations/0037_owner_uses_invoice.py | 1 + .../migrations/0038_alter_owner_uses_invoice.py | 1 + .../migrations/0039_alter_owner_uses_invoice.py | 1 + .../migrations/0043_sync_user_terms_agreement.py | 1 + .../0045_remove_ownerprofile_terms_agreement.py | 1 + .../migrations/0046_dedupe_owner_admin_values.py | 1 + shared/django_apps/codecov_auth/models.py | 14 +++++++++----- shared/django_apps/core/migrations/0001_initial.py | 4 +--- .../core/migrations/0002_auto_20210517_1223.py | 1 + .../core/migrations/0003_auto_20210520_0841.py | 1 + .../0014_pull_pulls_author_updatestamp.py | 1 + .../0017_branch_branches_repoid_updatestamp.py | 1 + .../migrations/0018_commit_all_commits_on_pull.py | 1 + .../0019_commit_commits_repoid_branch_state_ts.py | 1 + ...ommit_commits_repoid_commitid_short_and_more.py | 1 + .../migrations/0022_pull_pulls_repoid_pullid_ts.py | 1 + .../migrations/0028_repository_webhook_secret.py | 1 + .../core/migrations/0032_auto_20230731_1641.py | 1 + .../migrations/0034_remove_repository_cache.py | 1 + .../core/migrations/0039_pull_pulls_repoid_id.py | 1 + .../0041_pull_bundle_analysis_commentid.py | 1 + .../core/migrations/0042_repository_languages.py | 1 + .../0043_repository_bundle_analysis_enabled.py | 1 + ..._repository_bundle_analysis_enabled_and_more.py | 1 + .../0045_repository_languages_last_updated.py | 2 +- .../migrations/0046_repository_coverage_enabled.py | 1 + shared/django_apps/core/models.py | 9 +++------ .../reports/migrations/0007_auto_20230220_1245.py | 1 + shared/django_apps/reports/models.py | 3 ++- shared/django_apps/utils/config.py | 3 ++- shared/plan/service.py | 3 ++- shared/plan/test_plan.py | 5 ++--- 36 files changed, 50 insertions(+), 22 deletions(-) diff --git a/setup.py b/setup.py index 8dca13910..693d1a6ef 100644 --- a/setup.py +++ b/setup.py @@ -52,6 +52,6 @@ "django-better-admin-arrayfield", # API Deps "django-prometheus", - "django-model-utils" + "django-model-utils", ], ) diff --git a/shared/django_apps/codecov_auth/migrations/0023_auto_20230214_1129.py b/shared/django_apps/codecov_auth/migrations/0023_auto_20230214_1129.py index 61f1c1aa3..558ef6536 100644 --- a/shared/django_apps/codecov_auth/migrations/0023_auto_20230214_1129.py +++ b/shared/django_apps/codecov_auth/migrations/0023_auto_20230214_1129.py @@ -1,6 +1,7 @@ # Generated by Django 3.2.12 on 2023-02-14 11:29 from django.db import migrations + from shared.django_apps.migration_utils import RiskyRunSQL diff --git a/shared/django_apps/codecov_auth/migrations/0034_alter_owner_trial_status.py b/shared/django_apps/codecov_auth/migrations/0034_alter_owner_trial_status.py index 28d842f7a..8478f1f41 100644 --- a/shared/django_apps/codecov_auth/migrations/0034_alter_owner_trial_status.py +++ b/shared/django_apps/codecov_auth/migrations/0034_alter_owner_trial_status.py @@ -1,6 +1,7 @@ # Generated by Django 4.1.7 on 2023-07-27 00:38 from django.db import migrations, models + from shared.django_apps.migration_utils import RiskyRunSQL diff --git a/shared/django_apps/codecov_auth/migrations/0035_owner_pretrial_users_count.py b/shared/django_apps/codecov_auth/migrations/0035_owner_pretrial_users_count.py index 692e91390..73b7e217b 100644 --- a/shared/django_apps/codecov_auth/migrations/0035_owner_pretrial_users_count.py +++ b/shared/django_apps/codecov_auth/migrations/0035_owner_pretrial_users_count.py @@ -1,6 +1,7 @@ # Generated by Django 4.1.7 on 2023-07-27 23:40 from django.db import migrations, models + from shared.django_apps.migration_utils import RiskyAddField diff --git a/shared/django_apps/codecov_auth/migrations/0037_owner_uses_invoice.py b/shared/django_apps/codecov_auth/migrations/0037_owner_uses_invoice.py index 068906645..61eae6e75 100644 --- a/shared/django_apps/codecov_auth/migrations/0037_owner_uses_invoice.py +++ b/shared/django_apps/codecov_auth/migrations/0037_owner_uses_invoice.py @@ -1,6 +1,7 @@ # Generated by Django 4.2.2 on 2023-08-17 20:59 from django.db import migrations, models + from shared.django_apps.migration_utils import RiskyAddField diff --git a/shared/django_apps/codecov_auth/migrations/0038_alter_owner_uses_invoice.py b/shared/django_apps/codecov_auth/migrations/0038_alter_owner_uses_invoice.py index d9f32e107..b8b77a079 100644 --- a/shared/django_apps/codecov_auth/migrations/0038_alter_owner_uses_invoice.py +++ b/shared/django_apps/codecov_auth/migrations/0038_alter_owner_uses_invoice.py @@ -1,6 +1,7 @@ # Generated by Django 4.2.2 on 2023-08-28 18:27 from django.db import migrations, models + from shared.django_apps.migration_utils import RiskyAlterField, RiskyRunSQL diff --git a/shared/django_apps/codecov_auth/migrations/0039_alter_owner_uses_invoice.py b/shared/django_apps/codecov_auth/migrations/0039_alter_owner_uses_invoice.py index 484f577a7..949d49140 100644 --- a/shared/django_apps/codecov_auth/migrations/0039_alter_owner_uses_invoice.py +++ b/shared/django_apps/codecov_auth/migrations/0039_alter_owner_uses_invoice.py @@ -1,6 +1,7 @@ # Generated by Django 4.2.2 on 2023-08-28 17:42 from django.db import migrations, models + from shared.django_apps.migration_utils import RiskyAlterField diff --git a/shared/django_apps/codecov_auth/migrations/0043_sync_user_terms_agreement.py b/shared/django_apps/codecov_auth/migrations/0043_sync_user_terms_agreement.py index ec46c5239..a2408bd69 100644 --- a/shared/django_apps/codecov_auth/migrations/0043_sync_user_terms_agreement.py +++ b/shared/django_apps/codecov_auth/migrations/0043_sync_user_terms_agreement.py @@ -2,6 +2,7 @@ from django.db import migrations + from shared.django_apps.migration_utils import RiskyRunSQL diff --git a/shared/django_apps/codecov_auth/migrations/0045_remove_ownerprofile_terms_agreement.py b/shared/django_apps/codecov_auth/migrations/0045_remove_ownerprofile_terms_agreement.py index 37b8b231d..fa9f21a23 100644 --- a/shared/django_apps/codecov_auth/migrations/0045_remove_ownerprofile_terms_agreement.py +++ b/shared/django_apps/codecov_auth/migrations/0045_remove_ownerprofile_terms_agreement.py @@ -1,6 +1,7 @@ # Generated by Django 4.2.3 on 2023-09-21 14:24 from django.db import migrations + from shared.django_apps.migration_utils import RiskyRemoveField diff --git a/shared/django_apps/codecov_auth/migrations/0046_dedupe_owner_admin_values.py b/shared/django_apps/codecov_auth/migrations/0046_dedupe_owner_admin_values.py index a67214baf..5ae43feb2 100644 --- a/shared/django_apps/codecov_auth/migrations/0046_dedupe_owner_admin_values.py +++ b/shared/django_apps/codecov_auth/migrations/0046_dedupe_owner_admin_values.py @@ -2,6 +2,7 @@ from django.db import migrations + from shared.django_apps.migration_utils import RiskyRunSQL diff --git a/shared/django_apps/codecov_auth/models.py b/shared/django_apps/codecov_auth/models.py index 578398d46..9d606876b 100644 --- a/shared/django_apps/codecov_auth/models.py +++ b/shared/django_apps/codecov_auth/models.py @@ -4,24 +4,28 @@ import uuid from dataclasses import asdict from datetime import datetime +from hashlib import md5 from django.contrib.postgres.fields import ArrayField, CITextField from django.db import models from django.db.models.manager import BaseManager -from django_prometheus.models import ExportModelOperationsMixin from django.forms import ValidationError from django.utils import timezone -from shared.config import get_config -from hashlib import md5 +from django_prometheus.models import ExportModelOperationsMixin +from shared.config import get_config from shared.django_apps.codecov.models import BaseCodecovModel -from shared.django_apps.codecov_auth.constants import AVATAR_GITHUB_BASE_URL, AVATARIO_BASE_URL, BITBUCKET_BASE_URL, GRAVATAR_BASE_URL +from shared.django_apps.codecov_auth.constants import ( + AVATAR_GITHUB_BASE_URL, + AVATARIO_BASE_URL, + BITBUCKET_BASE_URL, + GRAVATAR_BASE_URL, +) from shared.django_apps.codecov_auth.managers import OwnerManager from shared.django_apps.core.managers import RepositoryManager from shared.django_apps.core.models import DateTimeWithoutTZField, Repository from shared.plan.constants import USER_PLAN_REPRESENTATIONS, PlanName - # Large number to represent Infinity as float('int') is not JSON serializable INFINITY = 99999999 diff --git a/shared/django_apps/core/migrations/0001_initial.py b/shared/django_apps/core/migrations/0001_initial.py index 7fa63c505..4d6118dd0 100644 --- a/shared/django_apps/core/migrations/0001_initial.py +++ b/shared/django_apps/core/migrations/0001_initial.py @@ -37,9 +37,7 @@ class Migration(migrations.Migration): ("totals", models.JSONField(null=True)), ( "report", - models.JSONField( - encoder=ReportJSONEncoder, null=True - ), + models.JSONField(encoder=ReportJSONEncoder, null=True), ), ("merged", models.BooleanField(null=True)), ("deleted", models.BooleanField(null=True)), diff --git a/shared/django_apps/core/migrations/0002_auto_20210517_1223.py b/shared/django_apps/core/migrations/0002_auto_20210517_1223.py index 737981253..e82e972f1 100644 --- a/shared/django_apps/core/migrations/0002_auto_20210517_1223.py +++ b/shared/django_apps/core/migrations/0002_auto_20210517_1223.py @@ -1,6 +1,7 @@ # Generated by Django 3.1.6 on 2021-05-17 12:23 from django.db import migrations, models + from shared.django_apps.migration_utils import RiskyRunSQL diff --git a/shared/django_apps/core/migrations/0003_auto_20210520_0841.py b/shared/django_apps/core/migrations/0003_auto_20210520_0841.py index 155ecffc4..59a27748d 100644 --- a/shared/django_apps/core/migrations/0003_auto_20210520_0841.py +++ b/shared/django_apps/core/migrations/0003_auto_20210520_0841.py @@ -1,6 +1,7 @@ # Generated by Django 3.1.6 on 2021-05-20 08:41 from django.db import migrations, models + from shared.django_apps.migration_utils import RiskyRunSQL diff --git a/shared/django_apps/core/migrations/0014_pull_pulls_author_updatestamp.py b/shared/django_apps/core/migrations/0014_pull_pulls_author_updatestamp.py index dbc08263d..4a661c597 100644 --- a/shared/django_apps/core/migrations/0014_pull_pulls_author_updatestamp.py +++ b/shared/django_apps/core/migrations/0014_pull_pulls_author_updatestamp.py @@ -1,6 +1,7 @@ # Generated by Django 3.2.12 on 2022-07-11 13:34 from django.db import migrations, models + from shared.django_apps.migration_utils import RiskyAddIndex diff --git a/shared/django_apps/core/migrations/0017_branch_branches_repoid_updatestamp.py b/shared/django_apps/core/migrations/0017_branch_branches_repoid_updatestamp.py index b492250ea..3551dfe9e 100644 --- a/shared/django_apps/core/migrations/0017_branch_branches_repoid_updatestamp.py +++ b/shared/django_apps/core/migrations/0017_branch_branches_repoid_updatestamp.py @@ -1,6 +1,7 @@ # Generated by Django 3.2.12 on 2023-01-13 16:44 from django.db import migrations, models + from shared.django_apps.migration_utils import RiskyAddIndex diff --git a/shared/django_apps/core/migrations/0018_commit_all_commits_on_pull.py b/shared/django_apps/core/migrations/0018_commit_all_commits_on_pull.py index f78cb994c..0acfa7534 100644 --- a/shared/django_apps/core/migrations/0018_commit_all_commits_on_pull.py +++ b/shared/django_apps/core/migrations/0018_commit_all_commits_on_pull.py @@ -1,6 +1,7 @@ # Generated by Django 3.2.12 on 2023-01-26 17:52 from django.db import migrations, models + from shared.django_apps.migration_utils import RiskyAddIndex diff --git a/shared/django_apps/core/migrations/0019_commit_commits_repoid_branch_state_ts.py b/shared/django_apps/core/migrations/0019_commit_commits_repoid_branch_state_ts.py index 1f5b1e2fb..7fa0de61f 100644 --- a/shared/django_apps/core/migrations/0019_commit_commits_repoid_branch_state_ts.py +++ b/shared/django_apps/core/migrations/0019_commit_commits_repoid_branch_state_ts.py @@ -1,6 +1,7 @@ # Generated by Django 3.2.12 on 2023-02-01 15:04 from django.db import migrations, models + from shared.django_apps.migration_utils import RiskyAddIndex diff --git a/shared/django_apps/core/migrations/0020_commit_commits_repoid_commitid_short_and_more.py b/shared/django_apps/core/migrations/0020_commit_commits_repoid_commitid_short_and_more.py index 1d0d3fa6b..fd403cc1b 100644 --- a/shared/django_apps/core/migrations/0020_commit_commits_repoid_commitid_short_and_more.py +++ b/shared/django_apps/core/migrations/0020_commit_commits_repoid_commitid_short_and_more.py @@ -4,6 +4,7 @@ import django.db.models.functions.text from django.contrib.postgres.operations import BtreeGinExtension, TrigramExtension from django.db import migrations, models + from shared.django_apps.migration_utils import RiskyAddIndex diff --git a/shared/django_apps/core/migrations/0022_pull_pulls_repoid_pullid_ts.py b/shared/django_apps/core/migrations/0022_pull_pulls_repoid_pullid_ts.py index 14632bd24..3f8519f98 100644 --- a/shared/django_apps/core/migrations/0022_pull_pulls_repoid_pullid_ts.py +++ b/shared/django_apps/core/migrations/0022_pull_pulls_repoid_pullid_ts.py @@ -1,6 +1,7 @@ # Generated by Django 4.1.7 on 2023-04-24 18:59 from django.db import migrations, models + from shared.django_apps.migration_utils import RiskyAddIndex diff --git a/shared/django_apps/core/migrations/0028_repository_webhook_secret.py b/shared/django_apps/core/migrations/0028_repository_webhook_secret.py index d41a8cb84..b74641226 100644 --- a/shared/django_apps/core/migrations/0028_repository_webhook_secret.py +++ b/shared/django_apps/core/migrations/0028_repository_webhook_secret.py @@ -1,6 +1,7 @@ # Generated by Django 4.2.2 on 2023-07-24 16:38 from django.db import migrations, models + from shared.django_apps.migration_utils import RiskyAddField diff --git a/shared/django_apps/core/migrations/0032_auto_20230731_1641.py b/shared/django_apps/core/migrations/0032_auto_20230731_1641.py index 171f5e741..f77c719fd 100644 --- a/shared/django_apps/core/migrations/0032_auto_20230731_1641.py +++ b/shared/django_apps/core/migrations/0032_auto_20230731_1641.py @@ -1,6 +1,7 @@ # Generated by Django 4.2.2 on 2023-07-31 16:41 from django.db import migrations + from shared.django_apps.migration_utils import RiskyRunSQL diff --git a/shared/django_apps/core/migrations/0034_remove_repository_cache.py b/shared/django_apps/core/migrations/0034_remove_repository_cache.py index adea85487..558c5f406 100644 --- a/shared/django_apps/core/migrations/0034_remove_repository_cache.py +++ b/shared/django_apps/core/migrations/0034_remove_repository_cache.py @@ -1,6 +1,7 @@ # Generated by Django 4.2.2 on 2023-08-14 13:23 from django.db import migrations + from shared.django_apps.migration_utils import RiskyRemoveField diff --git a/shared/django_apps/core/migrations/0039_pull_pulls_repoid_id.py b/shared/django_apps/core/migrations/0039_pull_pulls_repoid_id.py index 07dd6247f..6d09ed6b7 100644 --- a/shared/django_apps/core/migrations/0039_pull_pulls_repoid_id.py +++ b/shared/django_apps/core/migrations/0039_pull_pulls_repoid_id.py @@ -1,6 +1,7 @@ # Generated by Django 4.2.3 on 2023-10-30 16:16 from django.db import migrations, models + from shared.django_apps.migration_utils import RiskyAddIndex diff --git a/shared/django_apps/core/migrations/0041_pull_bundle_analysis_commentid.py b/shared/django_apps/core/migrations/0041_pull_bundle_analysis_commentid.py index f7c2edb3e..7fbfda690 100644 --- a/shared/django_apps/core/migrations/0041_pull_bundle_analysis_commentid.py +++ b/shared/django_apps/core/migrations/0041_pull_bundle_analysis_commentid.py @@ -1,6 +1,7 @@ # Generated by Django 4.2.7 on 2023-12-27 17:00 from django.db import migrations, models + from shared.django_apps.migration_utils import RiskyAddField diff --git a/shared/django_apps/core/migrations/0042_repository_languages.py b/shared/django_apps/core/migrations/0042_repository_languages.py index ee067ee5f..875635bc3 100644 --- a/shared/django_apps/core/migrations/0042_repository_languages.py +++ b/shared/django_apps/core/migrations/0042_repository_languages.py @@ -2,6 +2,7 @@ import django.contrib.postgres.fields from django.db import migrations, models + from shared.django_apps.migration_utils import RiskyAddField diff --git a/shared/django_apps/core/migrations/0043_repository_bundle_analysis_enabled.py b/shared/django_apps/core/migrations/0043_repository_bundle_analysis_enabled.py index 99dbb0178..b051f3bf2 100644 --- a/shared/django_apps/core/migrations/0043_repository_bundle_analysis_enabled.py +++ b/shared/django_apps/core/migrations/0043_repository_bundle_analysis_enabled.py @@ -1,6 +1,7 @@ # Generated by Django 4.2.7 on 2024-01-09 21:10 from django.db import migrations, models + from shared.django_apps.migration_utils import RiskyAddField diff --git a/shared/django_apps/core/migrations/0044_alter_repository_bundle_analysis_enabled_and_more.py b/shared/django_apps/core/migrations/0044_alter_repository_bundle_analysis_enabled_and_more.py index c370cbc9a..2dff49fff 100644 --- a/shared/django_apps/core/migrations/0044_alter_repository_bundle_analysis_enabled_and_more.py +++ b/shared/django_apps/core/migrations/0044_alter_repository_bundle_analysis_enabled_and_more.py @@ -2,6 +2,7 @@ import django.contrib.postgres.fields from django.db import migrations, models + from shared.django_apps.migration_utils import RiskyAlterField diff --git a/shared/django_apps/core/migrations/0045_repository_languages_last_updated.py b/shared/django_apps/core/migrations/0045_repository_languages_last_updated.py index aa442726a..30f35e9d0 100644 --- a/shared/django_apps/core/migrations/0045_repository_languages_last_updated.py +++ b/shared/django_apps/core/migrations/0045_repository_languages_last_updated.py @@ -1,9 +1,9 @@ # Generated by Django 4.2.7 on 2024-01-11 05:32 from django.db import migrations -from shared.django_apps.migration_utils import RiskyAddField from shared.django_apps.core.models import DateTimeWithoutTZField +from shared.django_apps.migration_utils import RiskyAddField class Migration(migrations.Migration): diff --git a/shared/django_apps/core/migrations/0046_repository_coverage_enabled.py b/shared/django_apps/core/migrations/0046_repository_coverage_enabled.py index 90fda0cc7..dc71c90e7 100644 --- a/shared/django_apps/core/migrations/0046_repository_coverage_enabled.py +++ b/shared/django_apps/core/migrations/0046_repository_coverage_enabled.py @@ -1,6 +1,7 @@ # Generated by Django 4.2.7 on 2024-01-15 20:36 from django.db import migrations, models + from shared.django_apps.migration_utils import RiskyAddField, RiskyRunSQL diff --git a/shared/django_apps/core/models.py b/shared/django_apps/core/models.py index ec3bbdbc8..d72b4c72e 100644 --- a/shared/django_apps/core/models.py +++ b/shared/django_apps/core/models.py @@ -1,5 +1,3 @@ -from django.db import models - # Create your models here. import random import string @@ -8,21 +6,20 @@ from django.contrib.postgres.fields import ArrayField, CITextField from django.contrib.postgres.indexes import GinIndex, OpClass -from django.utils.functional import cached_property from django.db import models from django.db.models.functions import Lower, Substr, Upper from django.forms import ValidationError from django.utils import timezone +from django.utils.functional import cached_property from django_prometheus.models import ExportModelOperationsMixin - from model_utils import FieldTracker from shared.django_apps.codecov.models import BaseCodecovModel - from shared.django_apps.core.encoders import ReportJSONEncoder from shared.django_apps.core.managers import RepositoryManager from shared.django_apps.utils.config import should_write_data_to_storage_config_check + class DateTimeWithoutTZField(models.DateTimeField): def db_type(self, connection): return "timestamp" @@ -259,7 +256,7 @@ def commitreport(self): ] return reports[0] if reports else None - #TODO: needs porting; property heavily tethered to report service + # TODO: needs porting; property heavily tethered to report service # @cached_property # def full_report(self) -> Optional[Report]: # # TODO: we should probably remove use of this method since it inverts the diff --git a/shared/django_apps/reports/migrations/0007_auto_20230220_1245.py b/shared/django_apps/reports/migrations/0007_auto_20230220_1245.py index 848e903ae..b9b3977ce 100644 --- a/shared/django_apps/reports/migrations/0007_auto_20230220_1245.py +++ b/shared/django_apps/reports/migrations/0007_auto_20230220_1245.py @@ -1,6 +1,7 @@ # Generated by Django 3.2.12 on 2023-02-20 12:45 from django.db import migrations + from shared.django_apps.migration_utils import RiskyRunSQL diff --git a/shared/django_apps/reports/models.py b/shared/django_apps/reports/models.py index a0e49ec1e..f7ff0fc19 100644 --- a/shared/django_apps/reports/models.py +++ b/shared/django_apps/reports/models.py @@ -5,10 +5,11 @@ from django.db import models from django.urls import reverse from django_prometheus.models import ExportModelOperationsMixin + +from shared.django_apps.codecov.models import BaseCodecovModel from shared.django_apps.reports.managers import CommitReportManager from shared.django_apps.utils.config import should_write_data_to_storage_config_check from shared.django_apps.utils.services import get_short_service_name -from shared.django_apps.codecov.models import BaseCodecovModel from shared.reports.enums import UploadState, UploadType from shared.upload.constants import ci diff --git a/shared/django_apps/utils/config.py b/shared/django_apps/utils/config.py index 893c98316..976eab1d4 100644 --- a/shared/django_apps/utils/config.py +++ b/shared/django_apps/utils/config.py @@ -1,5 +1,6 @@ from shared.config import get_config + def should_write_data_to_storage_config_check( master_switch_key: str, is_codecov_repo: bool, repoid: int ) -> bool: @@ -18,4 +19,4 @@ def should_write_data_to_storage_config_check( is_in_allowed_repoids = True else: is_in_allowed_repoids = False - return master_write_switch and (is_codecov_repo or is_in_allowed_repoids) \ No newline at end of file + return master_write_switch and (is_codecov_repo or is_in_allowed_repoids) diff --git a/shared/plan/service.py b/shared/plan/service.py index 67fa21a30..62ab72e19 100644 --- a/shared/plan/service.py +++ b/shared/plan/service.py @@ -3,7 +3,6 @@ from typing import List, Optional from codecov.commands.exceptions import ValidationError -from shared.django_apps.codecov_auth.models import Owner from plan.constants import ( BASIC_PLAN, FREE_PLAN, @@ -21,6 +20,8 @@ ) from services import sentry +from shared.django_apps.codecov_auth.models import Owner + log = logging.getLogger(__name__) diff --git a/shared/plan/test_plan.py b/shared/plan/test_plan.py index 4bef3d5d6..ce9bcb62e 100644 --- a/shared/plan/test_plan.py +++ b/shared/plan/test_plan.py @@ -1,11 +1,10 @@ from datetime import datetime, timedelta from unittest.mock import patch -from django.test import TestCase -from freezegun import freeze_time - from codecov.commands.exceptions import ValidationError from codecov_auth.tests.factories import OwnerFactory +from django.test import TestCase +from freezegun import freeze_time from plan.constants import ( BASIC_PLAN, FREE_PLAN, From b6e140c41f381e5b6f4ed488cb2390e1032be839 Mon Sep 17 00:00:00 2001 From: Adrian Date: Thu, 29 Feb 2024 19:42:35 -0800 Subject: [PATCH 06/36] feat: add legacy_migrations app --- shared/django_apps/dummy_settings.py | 5 + .../django_apps/legacy_migrations/__init__.py | 0 .../migrations/0001_initial.py | 31 ++ .../migrations/0002_yaml_history_table.py | 96 +++++ .../migrations/0003_auto_20230120_1837.py | 38 ++ .../migrations/0004_auto_20231024_1937.py | 78 ++++ .../legacy_migrations/migrations/__init__.py | 0 .../migrations/legacy_sql/main/__init__.py | 0 .../legacy_sql/main/functions/__init__.py | 0 .../legacy_sql/main/functions/aggregates.py | 71 ++++ .../main/functions/array_append_unique.py | 11 + .../legacy_sql/main/functions/coverage.py | 23 ++ .../main/functions/get_access_token.py | 14 + .../legacy_sql/main/functions/get_author.py | 13 + .../legacy_sql/main/functions/get_commit.py | 139 +++++++ .../legacy_sql/main/functions/get_customer.py | 133 +++++++ .../main/functions/get_graph_for.py | 240 ++++++++++++ .../legacy_sql/main/functions/get_ownerid.py | 93 +++++ .../legacy_sql/main/functions/get_repo.py | 79 ++++ .../legacy_sql/main/functions/get_user.py | 33 ++ .../main/functions/insert_commit.py | 29 ++ .../legacy_sql/main/functions/main.py | 33 ++ .../main/functions/refresh_repos.py | 160 ++++++++ .../legacy_sql/main/functions/update_json.py | 57 +++ .../main/functions/verify_session.py | 15 + .../migrations/legacy_sql/main/main.py | 24 ++ .../legacy_sql/main/tables/__init__.py | 0 .../legacy_sql/main/tables/branches.py | 17 + .../main/tables/commit_notifications.py | 17 + .../legacy_sql/main/tables/commits.py | 31 ++ .../migrations/legacy_sql/main/tables/main.py | 21 ++ .../legacy_sql/main/tables/owners.py | 51 +++ .../legacy_sql/main/tables/pulls.py | 24 ++ .../legacy_sql/main/tables/reports.py | 142 +++++++ .../legacy_sql/main/tables/repos.py | 31 ++ .../legacy_sql/main/tables/sessions.py | 15 + .../legacy_sql/main/tables/users.py | 15 + .../legacy_sql/main/triggers/__init__.py | 0 .../legacy_sql/main/triggers/branches.py | 32 ++ .../legacy_sql/main/triggers/commits.py | 108 ++++++ .../legacy_sql/main/triggers/main.py | 13 + .../legacy_sql/main/triggers/owners.py | 97 +++++ .../legacy_sql/main/triggers/pulls.py | 17 + .../legacy_sql/main/triggers/repos.py | 73 ++++ .../migrations/legacy_sql/main/types.py | 26 ++ .../migrations/legacy_sql/upgrades/main.py | 55 +++ .../migrations/legacy_sql/upgrades/v440.py | 353 ++++++++++++++++++ .../migrations/legacy_sql/upgrades/v4410.py | 29 ++ .../migrations/legacy_sql/upgrades/v442.py | 104 ++++++ .../migrations/legacy_sql/upgrades/v443.py | 191 ++++++++++ .../migrations/legacy_sql/upgrades/v446.py | 67 ++++ .../migrations/legacy_sql/upgrades/v447.py | 25 ++ .../migrations/legacy_sql/upgrades/v448.py | 27 ++ .../migrations/legacy_sql/upgrades/v449.py | 41 ++ .../migrations/legacy_sql/upgrades/v451.py | 43 +++ .../migrations/legacy_sql/upgrades/v4510.py | 7 + .../migrations/legacy_sql/upgrades/v452.py | 14 + .../migrations/legacy_sql/upgrades/v453.py | 20 + .../migrations/legacy_sql/upgrades/v454.py | 148 ++++++++ .../migrations/legacy_sql/upgrades/v455.py | 14 + .../migrations/legacy_sql/upgrades/v461.py | 12 + .../django_apps/legacy_migrations/models.py | 25 ++ 62 files changed, 3320 insertions(+) create mode 100644 shared/django_apps/legacy_migrations/__init__.py create mode 100644 shared/django_apps/legacy_migrations/migrations/0001_initial.py create mode 100644 shared/django_apps/legacy_migrations/migrations/0002_yaml_history_table.py create mode 100644 shared/django_apps/legacy_migrations/migrations/0003_auto_20230120_1837.py create mode 100644 shared/django_apps/legacy_migrations/migrations/0004_auto_20231024_1937.py create mode 100644 shared/django_apps/legacy_migrations/migrations/__init__.py create mode 100644 shared/django_apps/legacy_migrations/migrations/legacy_sql/main/__init__.py create mode 100644 shared/django_apps/legacy_migrations/migrations/legacy_sql/main/functions/__init__.py create mode 100644 shared/django_apps/legacy_migrations/migrations/legacy_sql/main/functions/aggregates.py create mode 100644 shared/django_apps/legacy_migrations/migrations/legacy_sql/main/functions/array_append_unique.py create mode 100644 shared/django_apps/legacy_migrations/migrations/legacy_sql/main/functions/coverage.py create mode 100644 shared/django_apps/legacy_migrations/migrations/legacy_sql/main/functions/get_access_token.py create mode 100644 shared/django_apps/legacy_migrations/migrations/legacy_sql/main/functions/get_author.py create mode 100644 shared/django_apps/legacy_migrations/migrations/legacy_sql/main/functions/get_commit.py create mode 100644 shared/django_apps/legacy_migrations/migrations/legacy_sql/main/functions/get_customer.py create mode 100644 shared/django_apps/legacy_migrations/migrations/legacy_sql/main/functions/get_graph_for.py create mode 100644 shared/django_apps/legacy_migrations/migrations/legacy_sql/main/functions/get_ownerid.py create mode 100644 shared/django_apps/legacy_migrations/migrations/legacy_sql/main/functions/get_repo.py create mode 100644 shared/django_apps/legacy_migrations/migrations/legacy_sql/main/functions/get_user.py create mode 100644 shared/django_apps/legacy_migrations/migrations/legacy_sql/main/functions/insert_commit.py create mode 100644 shared/django_apps/legacy_migrations/migrations/legacy_sql/main/functions/main.py create mode 100644 shared/django_apps/legacy_migrations/migrations/legacy_sql/main/functions/refresh_repos.py create mode 100644 shared/django_apps/legacy_migrations/migrations/legacy_sql/main/functions/update_json.py create mode 100644 shared/django_apps/legacy_migrations/migrations/legacy_sql/main/functions/verify_session.py create mode 100644 shared/django_apps/legacy_migrations/migrations/legacy_sql/main/main.py create mode 100644 shared/django_apps/legacy_migrations/migrations/legacy_sql/main/tables/__init__.py create mode 100644 shared/django_apps/legacy_migrations/migrations/legacy_sql/main/tables/branches.py create mode 100644 shared/django_apps/legacy_migrations/migrations/legacy_sql/main/tables/commit_notifications.py create mode 100644 shared/django_apps/legacy_migrations/migrations/legacy_sql/main/tables/commits.py create mode 100644 shared/django_apps/legacy_migrations/migrations/legacy_sql/main/tables/main.py create mode 100644 shared/django_apps/legacy_migrations/migrations/legacy_sql/main/tables/owners.py create mode 100644 shared/django_apps/legacy_migrations/migrations/legacy_sql/main/tables/pulls.py create mode 100644 shared/django_apps/legacy_migrations/migrations/legacy_sql/main/tables/reports.py create mode 100644 shared/django_apps/legacy_migrations/migrations/legacy_sql/main/tables/repos.py create mode 100644 shared/django_apps/legacy_migrations/migrations/legacy_sql/main/tables/sessions.py create mode 100644 shared/django_apps/legacy_migrations/migrations/legacy_sql/main/tables/users.py create mode 100644 shared/django_apps/legacy_migrations/migrations/legacy_sql/main/triggers/__init__.py create mode 100644 shared/django_apps/legacy_migrations/migrations/legacy_sql/main/triggers/branches.py create mode 100644 shared/django_apps/legacy_migrations/migrations/legacy_sql/main/triggers/commits.py create mode 100644 shared/django_apps/legacy_migrations/migrations/legacy_sql/main/triggers/main.py create mode 100644 shared/django_apps/legacy_migrations/migrations/legacy_sql/main/triggers/owners.py create mode 100644 shared/django_apps/legacy_migrations/migrations/legacy_sql/main/triggers/pulls.py create mode 100644 shared/django_apps/legacy_migrations/migrations/legacy_sql/main/triggers/repos.py create mode 100644 shared/django_apps/legacy_migrations/migrations/legacy_sql/main/types.py create mode 100644 shared/django_apps/legacy_migrations/migrations/legacy_sql/upgrades/main.py create mode 100644 shared/django_apps/legacy_migrations/migrations/legacy_sql/upgrades/v440.py create mode 100644 shared/django_apps/legacy_migrations/migrations/legacy_sql/upgrades/v4410.py create mode 100644 shared/django_apps/legacy_migrations/migrations/legacy_sql/upgrades/v442.py create mode 100644 shared/django_apps/legacy_migrations/migrations/legacy_sql/upgrades/v443.py create mode 100644 shared/django_apps/legacy_migrations/migrations/legacy_sql/upgrades/v446.py create mode 100644 shared/django_apps/legacy_migrations/migrations/legacy_sql/upgrades/v447.py create mode 100644 shared/django_apps/legacy_migrations/migrations/legacy_sql/upgrades/v448.py create mode 100644 shared/django_apps/legacy_migrations/migrations/legacy_sql/upgrades/v449.py create mode 100644 shared/django_apps/legacy_migrations/migrations/legacy_sql/upgrades/v451.py create mode 100644 shared/django_apps/legacy_migrations/migrations/legacy_sql/upgrades/v4510.py create mode 100644 shared/django_apps/legacy_migrations/migrations/legacy_sql/upgrades/v452.py create mode 100644 shared/django_apps/legacy_migrations/migrations/legacy_sql/upgrades/v453.py create mode 100644 shared/django_apps/legacy_migrations/migrations/legacy_sql/upgrades/v454.py create mode 100644 shared/django_apps/legacy_migrations/migrations/legacy_sql/upgrades/v455.py create mode 100644 shared/django_apps/legacy_migrations/migrations/legacy_sql/upgrades/v461.py create mode 100644 shared/django_apps/legacy_migrations/models.py diff --git a/shared/django_apps/dummy_settings.py b/shared/django_apps/dummy_settings.py index 525fe18cc..18d80bd19 100644 --- a/shared/django_apps/dummy_settings.py +++ b/shared/django_apps/dummy_settings.py @@ -15,6 +15,7 @@ "shared.django_apps.codecov_auth", "shared.django_apps.core", "shared.django_apps.reports", + "shared.django_apps.legacy_migrations", ] MIDDLEWARE = [] @@ -24,8 +25,12 @@ TELEMETRY_VANILLA_DB = "default" TELEMETRY_TIMESCALE_DB = "timeseries" +# Needed for migrations that depend on settings.auth_user_model AUTH_USER_MODEL = "codecov_auth.User" +# Needed as certain migrations refer to it +SKIP_RISKY_MIGRATION_STEPS = get_config("migrations", "skip_risky_steps", default=False) + TEST = True # Database diff --git a/shared/django_apps/legacy_migrations/__init__.py b/shared/django_apps/legacy_migrations/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/shared/django_apps/legacy_migrations/migrations/0001_initial.py b/shared/django_apps/legacy_migrations/migrations/0001_initial.py new file mode 100644 index 000000000..2f110bdf9 --- /dev/null +++ b/shared/django_apps/legacy_migrations/migrations/0001_initial.py @@ -0,0 +1,31 @@ +# Generated by Django 3.1.6 on 2021-03-15 20:15 + +from django.conf import settings +from django.db import migrations + +from .legacy_sql.main.main import run_sql as main_run_sql +from .legacy_sql.upgrades.main import run_sql as upgrade_run_sql + +BASE_VERSION = "base" + + +def forwards_func(apps, schema_editor): + Version = apps.get_model("core", "Version") + + schema_editor.execute("create table if not exists version (version text);") + + db_version = Version.objects.first() + current_version = db_version.version if db_version else BASE_VERSION + + if current_version == BASE_VERSION: + main_run_sql(schema_editor) + return + + upgrade_run_sql(schema_editor, current_version) + + +class Migration(migrations.Migration): + + dependencies = [("core", "0001_initial")] + + operations = [migrations.RunPython(forwards_func)] diff --git a/shared/django_apps/legacy_migrations/migrations/0002_yaml_history_table.py b/shared/django_apps/legacy_migrations/migrations/0002_yaml_history_table.py new file mode 100644 index 000000000..ac832f838 --- /dev/null +++ b/shared/django_apps/legacy_migrations/migrations/0002_yaml_history_table.py @@ -0,0 +1,96 @@ +# Generated by Django 3.2.12 on 2022-04-19 20:22 + +import django.db.models.deletion +from django.conf import settings +from django.db import migrations, models + + +class Migration(migrations.Migration): + + initial = True + + dependencies = [ + migrations.swappable_dependency(settings.AUTH_USER_MODEL), + ("legacy_migrations", "0001_initial"), + ] + + operations = [ + migrations.SeparateDatabaseAndState( + database_operations=[ + migrations.RunSQL( + sql=""" + create table if not exists yaml_history + ( + id serial primary key, + ownerid integer not null + references owners + on delete cascade, + timestamp timestamp with time zone not null, + author integer + references owners + on delete cascade, + message text, + source text not null, + diff text + ); + """, + reverse_sql="drop table yaml_history;", + ), + migrations.RunSQL( + sql=""" + create index if not exists yaml_history_ownerid_timestamp + on yaml_history (ownerid, timestamp); + """, + reverse_sql="drop index if exists yaml_history_ownerid_timestamp;", + ), + ], + state_operations=[ + migrations.CreateModel( + name="YamlHistory", + fields=[ + ( + "id", + models.AutoField( + auto_created=True, + primary_key=True, + serialize=False, + verbose_name="ID", + ), + ), + ("timestamp", models.DateTimeField()), + ("message", models.TextField(blank=True, null=True)), + ("source", models.TextField()), + ("diff", models.TextField(null=True)), + ( + "author", + models.ForeignKey( + on_delete=django.db.models.deletion.CASCADE, + related_name="authors", + db_column="author", + to="codecov_auth.owner", + ), + ), + ( + "ownerid", + models.ForeignKey( + on_delete=django.db.models.deletion.CASCADE, + related_name="ownerids", + db_column="ownerid", + to="codecov_auth.owner", + ), + ), + ], + options={ + "db_table": "yaml_history", + }, + ), + migrations.AddIndex( + model_name="yamlhistory", + index=models.Index( + fields=["ownerid", "timestamp"], + name="yaml_histor_ownerid_74e79b_idx", + ), + ), + ], + ) + ] diff --git a/shared/django_apps/legacy_migrations/migrations/0003_auto_20230120_1837.py b/shared/django_apps/legacy_migrations/migrations/0003_auto_20230120_1837.py new file mode 100644 index 000000000..a3a2c8d24 --- /dev/null +++ b/shared/django_apps/legacy_migrations/migrations/0003_auto_20230120_1837.py @@ -0,0 +1,38 @@ +# Generated by Django 3.2.12 on 2023-01-20 18:37 + +from django.db import migrations + + +class Migration(migrations.Migration): + + dependencies = [ + ("legacy_migrations", "0002_yaml_history_table"), + ] + + # These 2 triggers were wrong in the database and did not match what is found + # in the codebase. They were casting the values to ::text types which was breaking + # the case-insensitive comparisons of ::citext. These migrations just drop and + # recreate the triggers exactly as they appear in the `legacy_sql` files. + + operations = [ + migrations.RunSQL( + """ + drop trigger owners_before_update on owners; + + create trigger owners_before_update before update on owners + for each row + when (new.username is not null and new.username is distinct from old.username) + execute procedure owners_before_insert_or_update(); + """ + ), + migrations.RunSQL( + """ + drop trigger repos_before_update on repos; + + create trigger repos_before_update before update on repos + for each row + when (new.name is not null and new.name is distinct from old.name) + execute procedure repos_before_insert_or_update(); + """ + ), + ] diff --git a/shared/django_apps/legacy_migrations/migrations/0004_auto_20231024_1937.py b/shared/django_apps/legacy_migrations/migrations/0004_auto_20231024_1937.py new file mode 100644 index 000000000..c99005586 --- /dev/null +++ b/shared/django_apps/legacy_migrations/migrations/0004_auto_20231024_1937.py @@ -0,0 +1,78 @@ +# Generated by Django 4.2.3 on 2023-10-24 19:37 + +from django.db import migrations +from shared.django_apps.migration_utils import RiskyRunSQL + +# from `legacy_migrations/migrations/legacy_sql/main/triggers/commits.py` +original_function = """ +create or replace function commits_insert_pr_branch() returns trigger as $$ +begin + if new.pullid is not null and new.merged is not true then + begin + insert into pulls (repoid, pullid, author, head) + values (new.repoid, new.pullid, new.author, new.commitid); + exception when unique_violation then + end; + end if; + + if new.branch is not null then + begin + insert into branches (repoid, updatestamp, branch, authors, head) + values (new.repoid, new.timestamp, + new.branch, + case when new.author is not null then array[new.author] else null end, + new.commitid); + exception when unique_violation then + end; + end if; + + update repos + set updatestamp=now() + where repoid=new.repoid; + + return null; +end; +$$ language plpgsql; +""" + +# we're removing the `update repos` part since it can be very slow +replacement_function = """ +create or replace function commits_insert_pr_branch() returns trigger as $$ +begin + if new.pullid is not null and new.merged is not true then + begin + insert into pulls (repoid, pullid, author, head) + values (new.repoid, new.pullid, new.author, new.commitid); + exception when unique_violation then + end; + end if; + + if new.branch is not null then + begin + insert into branches (repoid, updatestamp, branch, authors, head) + values (new.repoid, new.timestamp, + new.branch, + case when new.author is not null then array[new.author] else null end, + new.commitid); + exception when unique_violation then + end; + end if; + + return null; +end; +$$ language plpgsql; +""" + + +class Migration(migrations.Migration): + + dependencies = [ + ("legacy_migrations", "0003_auto_20230120_1837"), + ] + + operations = [ + RiskyRunSQL( + replacement_function, + reverse_sql=original_function, + ), + ] diff --git a/shared/django_apps/legacy_migrations/migrations/__init__.py b/shared/django_apps/legacy_migrations/migrations/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/shared/django_apps/legacy_migrations/migrations/legacy_sql/main/__init__.py b/shared/django_apps/legacy_migrations/migrations/legacy_sql/main/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/shared/django_apps/legacy_migrations/migrations/legacy_sql/main/functions/__init__.py b/shared/django_apps/legacy_migrations/migrations/legacy_sql/main/functions/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/shared/django_apps/legacy_migrations/migrations/legacy_sql/main/functions/aggregates.py b/shared/django_apps/legacy_migrations/migrations/legacy_sql/main/functions/aggregates.py new file mode 100644 index 000000000..85354b128 --- /dev/null +++ b/shared/django_apps/legacy_migrations/migrations/legacy_sql/main/functions/aggregates.py @@ -0,0 +1,71 @@ +def run_sql(schema_editor): + schema_editor.execute( + """ + drop function if exists _pop_first_as_json(jsonb[]) cascade; + drop function if exists _max_coverage(jsonb[]) cascade; + drop function if exists _min_coverage(jsonb[]) cascade; + + create or replace function _pop_first_as_json(jsonb[]) returns jsonb as $$ + select $1[1]::jsonb; + $$ language sql immutable; + + + create or replace function _max_coverage(jsonb[], jsonb) returns jsonb[] as $$ + select case when $1 is null then array[$2] + when ($1[1]->>'c')::numeric > ($2->>'c')::numeric then $1 + else array[$2] end; + $$ language sql immutable; + + + create aggregate max_coverage(jsonb) ( + SFUNC = _max_coverage, + STYPE = jsonb[], + FINALFUNC = _pop_first_as_json + ); + + + create or replace function _min_coverage(jsonb[], jsonb) returns jsonb[] as $$ + select case when $1 is null then array[$2] + when ($1[1]->>'c')::numeric < ($2->>'c')::numeric then $1 + else array[$2] end; + $$ language sql immutable; + + + create aggregate min_coverage(jsonb) ( + SFUNC = _min_coverage, + STYPE = jsonb[], + FINALFUNC = _pop_first_as_json + ); + + + create or replace function ratio(int, int) returns text as $$ + select case when $2 = 0 then '0' else round(($1::numeric/$2::numeric)*100.0, 5)::text end; + $$ language sql immutable; + + + create or replace function _agg_report_totals(text[], jsonb) returns text[] as $$ + -- fnhmpcbdMs + select case when $1 is null + then array[$2->>0, $2->>1, $2->>2, $2->>3, + $2->>4, $2->>5, $2->>6, $2->>7, + $2->>8, $2->>9] + else array[($1[1]::int + ($2->>0)::int)::text, + ($1[2]::int + ($2->>1)::int)::text, + ($1[3]::int + ($2->>2)::int)::text, + ($1[4]::int + ($2->>3)::int)::text, + ($1[5]::int + ($2->>4)::int)::text, + ratio(($1[3]::int + ($2->>2)::int), ($1[2]::int + ($2->>1)::int)), + ($1[7]::int + ($2->>6)::int)::text, + ($1[8]::int + ($2->>7)::int)::text, + ($1[9]::int + ($2->>8)::int)::text, + ($1[10]::int + ($2->>9)::int)::text] end; + $$ language sql immutable; + + + create aggregate agg_totals(jsonb) ( + SFUNC = _agg_report_totals, + STYPE = text[] + ); + + """ + ) diff --git a/shared/django_apps/legacy_migrations/migrations/legacy_sql/main/functions/array_append_unique.py b/shared/django_apps/legacy_migrations/migrations/legacy_sql/main/functions/array_append_unique.py new file mode 100644 index 000000000..85e6b7adc --- /dev/null +++ b/shared/django_apps/legacy_migrations/migrations/legacy_sql/main/functions/array_append_unique.py @@ -0,0 +1,11 @@ +def run_sql(schema_editor): + schema_editor.execute( + """ + create or replace function array_append_unique(anyarray, anyelement) returns anyarray as $$ + select case when $2 is null + then $1 + else array_remove($1, $2) || array[$2] + end; + $$ language sql immutable; + """ + ) diff --git a/shared/django_apps/legacy_migrations/migrations/legacy_sql/main/functions/coverage.py b/shared/django_apps/legacy_migrations/migrations/legacy_sql/main/functions/coverage.py new file mode 100644 index 000000000..4d3b73766 --- /dev/null +++ b/shared/django_apps/legacy_migrations/migrations/legacy_sql/main/functions/coverage.py @@ -0,0 +1,23 @@ +def run_sql(schema_editor): + schema_editor.execute( + """ + create or replace function get_coverage(service, citext, citext, citext default null) returns jsonb as $$ + -- floor is temporary here + with d as ( + select floor((c.totals->>'c')::numeric) as c, + coalesce((r.yaml->'coverage'->'range')::jsonb, + (o.yaml->'coverage'->'range')::jsonb) as r, + case when r.private then r.image_token else null end as t + from repos r + inner join owners o using (ownerid) + left join branches b using (repoid) + inner join commits c on b.repoid=c.repoid and c.commitid=b.head + where o.service = $1 + and o.username = $2 + and r.name = $3 + and b.branch = coalesce($4, r.branch) + limit 1 + ) select to_jsonb(d) from d; + $$ language sql stable; + """ + ) diff --git a/shared/django_apps/legacy_migrations/migrations/legacy_sql/main/functions/get_access_token.py b/shared/django_apps/legacy_migrations/migrations/legacy_sql/main/functions/get_access_token.py new file mode 100644 index 000000000..9f0cd688c --- /dev/null +++ b/shared/django_apps/legacy_migrations/migrations/legacy_sql/main/functions/get_access_token.py @@ -0,0 +1,14 @@ +def run_sql(schema_editor): + schema_editor.execute( + """ + create or replace function get_access_token(int) returns jsonb as $$ + with data as ( + select ownerid, oauth_token, username + from owners o + where ownerid = $1 + and oauth_token is not null + limit 1 + ) select to_jsonb(data) from data; + $$ language sql stable strict; + """ + ) diff --git a/shared/django_apps/legacy_migrations/migrations/legacy_sql/main/functions/get_author.py b/shared/django_apps/legacy_migrations/migrations/legacy_sql/main/functions/get_author.py new file mode 100644 index 000000000..1cd068679 --- /dev/null +++ b/shared/django_apps/legacy_migrations/migrations/legacy_sql/main/functions/get_author.py @@ -0,0 +1,13 @@ +def run_sql(schema_editor): + schema_editor.execute( + """ + create or replace function get_author(int) returns jsonb as $$ + with data as ( + select service, service_id, username, email, name + from owners + where ownerid=$1 + limit 1 + ) select to_jsonb(data) from data; + $$ language sql stable strict; + """ + ) diff --git a/shared/django_apps/legacy_migrations/migrations/legacy_sql/main/functions/get_commit.py b/shared/django_apps/legacy_migrations/migrations/legacy_sql/main/functions/get_commit.py new file mode 100644 index 000000000..ffa3633f9 --- /dev/null +++ b/shared/django_apps/legacy_migrations/migrations/legacy_sql/main/functions/get_commit.py @@ -0,0 +1,139 @@ +def run_sql(schema_editor): + schema_editor.execute( + """ + create or replace function get_commitid_from_short(int, text) returns text as $$ + select commitid + from commits + where repoid = $1 + and commitid like $2||'%%'; + $$ language sql immutable; + + + -- pull + create or replace function get_tip_of_pull(int, int) returns text as $$ + select head + from pulls + where repoid = $1 + and pullid = $2 + limit 1; + $$ language sql stable; + + + -- tips + create or replace function get_tip(int, text) returns text as $$ + select case when char_length($2) = 40 then $2 + else coalesce((select head from branches where repoid=$1 and branch=$2 limit 1), + (select commitid from commits where repoid=$1 and commitid like $2||'%%' limit 1)) end + limit 1; + $$ language sql stable; + + + -- branch + create or replace function get_tip_of_branch(int, text) returns text as $$ + select head + from branches + where repoid = $1 + and branch = $2 + limit 1; + $$ language sql stable; + + + create or replace function get_commit_totals(int, text) returns jsonb as $$ + select totals + from commits + where repoid = $1 + and commitid = $2 + limit 1; + $$ language sql stable; + + + create or replace function get_commit_totals(int, text, text) returns jsonb as $$ + select report->'files'->$3->1 + from commits + where repoid = $1 + and commitid = $2 + limit 1; + $$ language sql stable; + + + create or replace function get_commit(repoid integer, _commitid text) returns jsonb as $$ + with d as ( + select timestamp, commitid, branch, pullid::text, parent, + ci_passed, updatestamp, message, deleted, totals, + get_author(author) as author, state, merged, + get_commit_totals($1, c.parent) as parent_totals, notified, + report + from commits c + where c.repoid = $1 + and commitid = (case when char_length(_commitid) < 40 then get_commitid_from_short($1, _commitid) else _commitid end) + limit 1 + ) select to_jsonb(d) from d; + $$ language sql stable; + + + create or replace function get_commit_minimum(int, text) returns jsonb as $$ + with d as ( + select timestamp, commitid, ci_passed, message, + get_author(author) as author, totals + from commits + where repoid = $1 + and commitid = $2 + limit 1 + ) select to_jsonb(d) from d; + $$ language sql stable; + + + create or replace function get_commit_on_branch(int, text) returns jsonb as $$ + select get_commit($1, head) + from branches + where repoid = $1 and branch = $2 + limit 1; + $$ language sql stable; + + + create or replace function find_parent_commit(_repoid int, + _this_commitid text, + _this_timestamp timestamp, + _parent_commitids text[], + _branch text, + _pullid int) returns text as $$ + declare commitid_ text default null; + begin + if array_length(_parent_commitids, 1) > 0 then + -- first: find a direct decendant + select commitid into commitid_ + from commits + where repoid = _repoid + and array[commitid] <@ _parent_commitids + limit 1; + end if; + + if commitid_ is null then + -- second: find latest on branch + select commitid into commitid_ + from commits + where repoid = _repoid + and branch = _branch + and pullid is not distinct from _pullid + and commitid != _this_commitid + and ci_passed + and deleted is not true + and timestamp < _this_timestamp + order by timestamp desc + limit 1; + + if commitid_ is null then + -- third: use pull base + select base into commitid_ + from pulls + where repoid = _repoid + and pullid = _pullid + limit 1; + end if; + end if; + + return commitid_; + end; + $$ language plpgsql stable; + """ + ) diff --git a/shared/django_apps/legacy_migrations/migrations/legacy_sql/main/functions/get_customer.py b/shared/django_apps/legacy_migrations/migrations/legacy_sql/main/functions/get_customer.py new file mode 100644 index 000000000..61e55a0fc --- /dev/null +++ b/shared/django_apps/legacy_migrations/migrations/legacy_sql/main/functions/get_customer.py @@ -0,0 +1,133 @@ +def run_sql(schema_editor): + schema_editor.execute( + """ + create or replace function get_gitlab_root_group(int) returns jsonb as $$ + /* get root group by following parent_service_id to highest level */ + with recursive tree as ( + select o.service_id, + o.parent_service_id, + o.ownerid, + 1 as depth + from owners o + where o.ownerid = $1 + and o.service = 'gitlab' + and o.parent_service_id is not null + + union all + + select o.service_id, + o.parent_service_id, + o.ownerid, + depth + 1 as depth + from tree t + join owners o + on o.service_id = t.parent_service_id + /* avoid infinite loop in case of cycling (2 > 5 > 3 > 2 > 5...) up to Gitlab max subgroup depth of 20 */ + where depth <= 20 + ), data as ( + select t.ownerid, + t.service_id + from tree t + where t.parent_service_id is null + ) + select to_jsonb(data) from data limit 1; + $$ language sql stable strict; + + create or replace function get_gitlab_repos_activated(int, text) returns int as $$ + declare _repos_activated int; + declare _decendents_owner_ids int[]; + begin + /* get array of owner ids for all subgroups under this group */ + select array( + with recursive tree as ( + /* seed the recursive query */ + select ownerid, + service_id, + array[]::text[] as ancestors_service_id, + 1 as depth + from owners + where parent_service_id is null + and service = 'gitlab' + and ownerid = $1 + + union all + + /* find the descendents */ + select owners.ownerid, + owners.service_id, + tree.ancestors_service_id || owners.parent_service_id, + depth + 1 as depth + from owners, tree + where owners.parent_service_id = tree.service_id + /* avoid infinite loop in case of cycling (2 > 5 > 3 > 2 > 5...) up to Gitlab max subgroup depth of 20 */ + and depth <= 20 + ) + select ownerid + from tree + where $2 = any(tree.ancestors_service_id) + ) into _decendents_owner_ids; + + /* get count of all repos that are active and private owned by this gitlab group and all of its subgroups */ + select count(*) into _repos_activated + from repos + where ownerid in (select unnest(array_append(_decendents_owner_ids, $1))) + and private + and activated; + + return _repos_activated; + end; + $$ language plpgsql stable; + + create or replace function get_repos_activated(int) returns int as $$ + declare _repos_activated int; + declare _service text; + declare _service_id text; + begin + select o.service, o.service_id into _service, _service_id + from owners o where o.ownerid = $1; + + if _service = 'gitlab' then + select get_gitlab_repos_activated($1, _service_id) into _repos_activated; + else + select count(*) into _repos_activated + from repos + where ownerid=$1 + and private + and activated; + end if; + + return _repos_activated; + end; + $$ language plpgsql stable; + + create or replace function get_customer(int) returns jsonb as $$ + with data as ( + select t.stripe_customer_id, + t.stripe_subscription_id, + t.ownerid::text, + t.service, + t.service_id, + t.plan_user_count, + t.plan_provider, + t.plan_auto_activate, + t.plan_activated_users, + t.plan, + t.email, + t.free, + t.did_trial, + t.invoice_details, + t.yaml, + t.student, + t.student_created_at, + t.student_updated_at, + b.username as bot_username, + get_users(t.admins) as admins, + get_repos_activated($1::int) as repos_activated + from owners t + LEFT JOIN owners b ON (b.ownerid = t.bot) + where t.ownerid = $1 + limit 1 + ) select to_jsonb(data) from data limit 1; + $$ language sql stable strict; + """ + ) diff --git a/shared/django_apps/legacy_migrations/migrations/legacy_sql/main/functions/get_graph_for.py b/shared/django_apps/legacy_migrations/migrations/legacy_sql/main/functions/get_graph_for.py new file mode 100644 index 000000000..2cc54f7f1 --- /dev/null +++ b/shared/django_apps/legacy_migrations/migrations/legacy_sql/main/functions/get_graph_for.py @@ -0,0 +1,240 @@ +def run_sql(schema_editor): + schema_editor.execute( + """ + create or replace function sum_of_file_totals_filtering_sessionids(jsonb, int[]) returns text[] as $$ + -- sum totals for filtered flags + -- in [, , ], [1, 2] + -- out ( + ) = + with totals as ( + select $1->i as t from unnest($2) as i + ) select agg_totals(totals.t) from totals; + $$ language sql immutable; + + + create or replace function extract_totals(files jsonb, sessionids int[]) returns jsonb as $$ + -- return {"filename": , ...} + with files as ( + select case + when sessionids is not null then (select jsonb_agg(row(key, sum_of_file_totals_filtering_sessionids(value->2, sessionids))) from jsonb_each(files)) + else (select jsonb_agg(row(key, value->1)) from jsonb_each(files)) + end as data + ) select to_jsonb(data) from files; + $$ language sql immutable; + + + create or replace function list_sessionid_by_filtering_flags(sessions jsonb, flags text[]) returns int[] as $$ + -- return session index where flags overlap $1 + with indexes as ( + select (session.key)::int as key + from jsonb_each(sessions) as session + where (session.value->>'f')::text is not null + and flags <@ (select array_agg(trim(f::text, '"')) from jsonb_array_elements((session.value->'f')) f)::text[] + ) select array_agg(key) from indexes; + $$ language sql strict immutable; + + + create or replace function total_list_to_json(totals text[]) returns jsonb as $$ + select ('{"f":'||totals[1]||','|| + '"n":'||totals[2]||','|| + '"h":'||totals[3]||','|| + '"m":'||totals[4]||','|| + '"p":'||totals[5]||','|| + '"c":'||totals[6]||','|| + '"b":'||totals[7]||','|| + '"d":'||totals[8]||','|| + '"M":'||totals[9]||','|| + '"s":'||totals[10]||'}')::jsonb; + $$ language sql strict immutable; + + + create or replace function sum_session_totals(sessions jsonb, flags text[]) returns jsonb as $$ + -- sum totals for filtered flags + -- in {"0": {"t": }, "1": {"t": }, "2", {"t": }], [1, 2] + -- out ( + ) = + with totals as ( + select sessions->(i::text)->'t' as t from unnest(list_sessionid_by_filtering_flags(sessions, flags)) as i + ) select total_list_to_json(agg_totals(totals.t)) from totals; + $$ language sql strict immutable; + + + create or replace function get_graph_for_flare_pull(int, text, text, text[]) returns jsonb as $$ + with data as ( + select r.repoid, r.service_id, p.head as commitid, r.branch, + p.flare, + case when p.flare is null + then extract_totals(c.report->'files', list_sessionid_by_filtering_flags(c.report->'sessions', $4)) + else null + end as files_by_total, + coalesce((r.yaml->'coverage'->'range')::jsonb, + (o.yaml->'coverage'->'range')::jsonb) as coverage_range + from repos r + inner join owners o using (ownerid) + inner join pulls p using (repoid) + inner join commits c on c.repoid = r.repoid and c.commitid = p.head + where r.repoid = $1 + and p.pullid = $2::int + and (not r.private or r.image_token = $3) + limit 1 + ) select to_jsonb(data) from data limit 1; + $$ language sql stable; + + + create or replace function get_graph_for_flare_commit(int, text, text, text[]) returns jsonb as $$ + with data as ( + select r.repoid, r.service_id, c.commitid, r.branch, + extract_totals(c.report->'files', list_sessionid_by_filtering_flags(c.report->'sessions', $4)) as files_by_total, + coalesce((r.yaml->'coverage'->'range')::jsonb, + (o.yaml->'coverage'->'range')::jsonb) as coverage_range + from repos r + inner join owners o using (ownerid) + inner join commits c using (repoid) + where r.repoid = $1 + and c.commitid = $2 + and (not r.private or r.image_token = $3) + limit 1 + ) select to_jsonb(data) from data limit 1; + $$ language sql stable; + + + create or replace function get_graph_for_flare_branch(int, text, text, text[]) returns jsonb as $$ + with data as ( + select r.repoid, r.service_id, c.commitid, r.branch, + extract_totals(c.report->'files', list_sessionid_by_filtering_flags(c.report->'sessions', $4)) as files_by_total, + coalesce((r.yaml->'coverage'->'range')::jsonb, + (o.yaml->'coverage'->'range')::jsonb) as coverage_range + from repos r + inner join owners o using (ownerid) + inner join branches b using (repoid) + inner join commits c on c.repoid = r.repoid and c.commitid = b.head + where r.repoid = $1 + and b.branch = case when $2 is null then r.branch else $2 end + and (not r.private or r.image_token = $3) + limit 1 + ) select to_jsonb(data) from data limit 1; + $$ language sql stable; + + + create or replace function get_graph_for_totals_pull(int, text, text, text[]) returns jsonb as $$ + with data as ( + select r.repoid, r.service_id, r.branch, + p.base as base_commitid, + case when $4 is null + then (select totals from commits where repoid=p.repoid and commitid=p.base limit 1) + else (select sum_session_totals(report->'sessions', $4) + from commits + where repoid=$1 + and commitid=p.base + limit 1) + end as base_totals, + p.head as head_commitid, + case when $4 is null + then (select totals from commits where repoid=p.repoid and commitid=p.head limit 1) + else (select sum_session_totals(report->'sessions', $4) + from commits + where repoid=$1 + and commitid=p.head + limit 1) + end as head_totals, + coalesce((r.yaml->'coverage'->'range')::jsonb, + (o.yaml->'coverage'->'range')::jsonb) as coverage_range + from repos r + inner join owners o using (ownerid) + inner join pulls p using (repoid) + where r.repoid = $1 + and p.pullid = $2::int + and (not r.private or r.image_token = $3) + limit 1 + ) select to_jsonb(data) from data limit 1; + $$ language sql stable; + + + create or replace function get_graph_for_totals_commit(int, text, text, text[]) returns jsonb as $$ + with data as ( + select r.repoid, r.service_id, r.branch, + base.commitid as base_commitid, + case when $4 is null + then base.totals + else sum_session_totals(base.report->'sessions', $4) + end as base_totals, + head.commitid as head_commitid, + case when $4 is null + then head.totals + else sum_session_totals(head.report->'sessions', $4) + end as head_totals, + coalesce((r.yaml->'coverage'->'range')::jsonb, + (o.yaml->'coverage'->'range')::jsonb) as coverage_range + from repos r + inner join owners o using (ownerid) + inner join commits head using (repoid) + left join commits base on base.repoid = r.repoid + and base.commitid = head.parent + where r.repoid = $1 + and head.commitid = $2 + and (not r.private or r.image_token = $3) + limit 1 + ) select to_jsonb(data) from data limit 1; + $$ language sql stable; + + + create or replace function get_graph_for_totals_branch(int, text, text, text[]) returns jsonb as $$ + with data as ( + select r.repoid, r.service_id, r.branch, + base.commitid as base_commitid, + case when $4 is null + then base.totals + else sum_session_totals(base.report->'sessions', $4) + end as base_totals, + head.commitid as head_commitid, + case when $4 is null + then head.totals + else sum_session_totals(head.report->'sessions', $4) + end as head_totals, + coalesce((r.yaml->'coverage'->'range')::jsonb, + (o.yaml->'coverage'->'range')::jsonb) as coverage_range + from repos r + inner join owners o using (ownerid) + inner join branches b using (repoid) + left join commits base on base.repoid = r.repoid + and base.commitid = b.base + inner join commits head on head.repoid = r.repoid + and head.commitid = b.head + where r.repoid = $1 + and b.branch = case when $2 is null then r.branch else $2 end + and (not r.private or r.image_token = $3) + limit 1 + ) select to_jsonb(data) from data limit 1; + $$ language sql stable; + + + create or replace function get_graph_for_commits_pull(int, text, text, text[]) returns jsonb as $$ + with data as ( + select r.repoid, r.service_id, r.branch, + coalesce((r.yaml->'coverage'->'range')::jsonb, + (o.yaml->'coverage'->'range')::jsonb) as coverage_range + from repos r + inner join owners o using (ownerid) + inner join pulls p using (repoid) + where r.repoid = $1 + and p.pullid = $2::int + and (not r.private or r.image_token = $3) + limit 1 + ) select to_jsonb(data) from data limit 1; + $$ language sql stable; + + + create or replace function get_graph_for_commits_branch(int, text, text, text[]) returns jsonb as $$ + with data as ( + select r.repoid, r.service_id, r.branch, + coalesce((r.yaml->'coverage'->'range')::jsonb, + (o.yaml->'coverage'->'range')::jsonb) as coverage_range + from repos r + inner join owners o using (ownerid) + inner join branches b using (repoid) + where r.repoid = $1 + and b.branch = case when $2 is null then r.branch else $2 end + and (not r.private or r.image_token = $3) + limit 1 + ) select to_jsonb(data) from data limit 1; + $$ language sql stable; + """ + ) diff --git a/shared/django_apps/legacy_migrations/migrations/legacy_sql/main/functions/get_ownerid.py b/shared/django_apps/legacy_migrations/migrations/legacy_sql/main/functions/get_ownerid.py new file mode 100644 index 000000000..fdf0fc13a --- /dev/null +++ b/shared/django_apps/legacy_migrations/migrations/legacy_sql/main/functions/get_ownerid.py @@ -0,0 +1,93 @@ +def run_sql(schema_editor): + schema_editor.execute( + """ + create or replace function get_ownerid_if_member(service, citext, int) returns int as $$ + select ownerid + from owners + where service=$1 + and username=$2::citext + and array[$3] <@ organizations + and private_access is true + limit 1; + $$ language sql stable strict; + + + create or replace function get_ownerid(service, text, citext, text, text) returns int as $$ + declare _ownerid int; + begin + + select ownerid into _ownerid + from owners + where service=$1 + and service_id=$2 + limit 1; + + if not found and $2 is not null then + insert into owners (service, service_id, username, name, email) + values ($1, $2, $3::citext, $4, $5) + returning ownerid into _ownerid; + end if; + + return _ownerid; + end; + $$ language plpgsql; + + + create or replace function try_to_auto_activate(int, int) returns boolean as $$ + update owners + set plan_activated_users = ( + case when coalesce(array_length(plan_activated_users, 1), 0) < plan_user_count -- we have credits + then array_append_unique(plan_activated_users, $2) -- add user + else plan_activated_users + end) + where ownerid=$1 + returning (plan_activated_users @> array[$2]); + $$ language sql volatile strict; + + + create or replace function get_owner(service, citext) returns jsonb as $$ + with data as ( + select service_id, service, ownerid::text, username, avatar_url, + updatestamp, plan, name, integration_id, free, + plan_activated_users, plan_auto_activate, plan_user_count + from owners + where service=$1 + and username=$2::citext + limit 1 + ) select to_jsonb(data) + from data + limit 1; + $$ language sql stable strict; + + + create or replace function get_teams(service, integer[]) returns jsonb as $$ + with data as ( + select service_id, service, ownerid::text, username, name + from owners + where service=$1 + and array[ownerid] <@ $2 + ) select jsonb_agg(data) from data; + $$ language sql stable strict; + + + create or replace function get_or_create_owner(service, text, text, text, text) returns int as $$ + declare _ownerid int; + begin + update owners + set username = $3, avatar_url = $4, parent_service_id = $5 + where service = $1 + and service_id = $2 + returning ownerid into _ownerid; + + if not found then + insert into owners (service, service_id, username, avatar_url, parent_service_id) + values ($1, $2, $3, $4, $5) + returning ownerid into _ownerid; + end if; + + return _ownerid; + + end; + $$ language plpgsql volatile; + """ + ) diff --git a/shared/django_apps/legacy_migrations/migrations/legacy_sql/main/functions/get_repo.py b/shared/django_apps/legacy_migrations/migrations/legacy_sql/main/functions/get_repo.py new file mode 100644 index 000000000..2da4965f7 --- /dev/null +++ b/shared/django_apps/legacy_migrations/migrations/legacy_sql/main/functions/get_repo.py @@ -0,0 +1,79 @@ +def run_sql(schema_editor): + schema_editor.execute( + """ + -- used for app/tasks + create or replace function get_repo(int) returns jsonb as $$ + with d as (select o.service, o.username, o.service_id as owner_service_id, r.ownerid::text, + r.name, r.repoid::text, r.service_id, r.updatestamp, + r.branch, r.private, hookid, image_token, b.username as bot_username, + r.yaml, o.yaml as org_yaml, r.using_integration, o.plan, + (r.cache->>'yaml') as _yaml_location, + case when r.using_integration then o.integration_id else null end as integration_id, + get_access_token(coalesce(r.bot, o.bot, o.ownerid)) as token, + case when private and activated is not true and forkid is not null + then (select rr.activated from repos rr where rr.repoid = r.forkid limit 1) + else activated end as activated + from repos r + inner join owners o using (ownerid) + left join owners b ON (r.bot=b.ownerid) + where r.repoid = $1 + limit 1) select to_jsonb(d) from d; + $$ language sql stable strict; + + + -- used for app/handlers + create or replace function get_repo(int, citext) returns jsonb as $$ + with repo as ( + select r.yaml, r.name, "language", repoid::text, r.private, r.deleted, r.active, r.cache, b.username as bot_username, + r.branch, r.service_id, r.updatestamp, upload_token, image_token, hookid, using_integration, + case when private and activated is not true and forkid is not null + then (select rr.activated from repos rr where rr.repoid = r.forkid limit 1) + else activated end as activated + from repos r + left join owners b ON (r.bot=b.ownerid) + where r.ownerid = $1 and r.name = $2::citext + limit 1 + ) select to_jsonb(repo) from repo; + $$ language sql stable; + + + -- used for app/handlers/upload + create or replace function get_repo_by_token(uuid) returns jsonb as $$ + with d as ( + select get_repo(r.repoid) as repo, o.service + from repos r + inner join owners o using (ownerid) + where r.upload_token = $1 + limit 1 + ) select to_jsonb(d) from d limit 1; + $$ language sql stable; + + + -- used for app/handlers/teams + create or replace function get_repos(int, int default 0, int default 5) returns jsonb as $$ + with _repos as ( + select private, cache, name, updatestamp, upload_token, branch, + language, repoid::text, get_repo(forkid) as fork, yaml, + case when private and activated is not true and forkid is not null + then (select rr.activated from repos rr where rr.repoid = r.forkid limit 1) + else activated end as activated + from repos r + where ownerid = $1 + and active + offset $2 + limit $3 + ) select coalesce(jsonb_agg(_repos), '[]'::jsonb) from _repos; + $$ language sql stable; + + + create or replace function get_repoid(service, citext, citext) returns int as $$ + select repoid + from repos r + inner join owners o using (ownerid) + where o.service = $1 + and o.username = $2::citext + and r.name = $3::citext + limit 1 + $$ language sql stable; + """ + ) diff --git a/shared/django_apps/legacy_migrations/migrations/legacy_sql/main/functions/get_user.py b/shared/django_apps/legacy_migrations/migrations/legacy_sql/main/functions/get_user.py new file mode 100644 index 000000000..95103add4 --- /dev/null +++ b/shared/django_apps/legacy_migrations/migrations/legacy_sql/main/functions/get_user.py @@ -0,0 +1,33 @@ +def run_sql(schema_editor): + schema_editor.execute( + """ + create or replace function get_user(int) returns jsonb as $$ + with data as ( + select ownerid::text, private_access, staff, service, service_id, + username, organizations, avatar_url, + oauth_token, plan, permission, + free, email, name, createstamp + from owners + where ownerid=$1 + limit 1 + ) select to_jsonb(data) from data; + $$ language sql stable; + + + create or replace function get_username(int) returns citext as $$ + select username from owners where ownerid=$1 limit 1; + $$ language sql stable strict; + + + create or replace function get_users(int[]) returns jsonb as $$ + with data as ( + select service, service_id::text, ownerid::text, username, name, email, avatar_url + from owners + where array[ownerid] <@ $1 + limit array_length($1, 1) + ) select jsonb_agg(data) + from data + limit array_length($1, 1); + $$ language sql stable strict; + """ + ) diff --git a/shared/django_apps/legacy_migrations/migrations/legacy_sql/main/functions/insert_commit.py b/shared/django_apps/legacy_migrations/migrations/legacy_sql/main/functions/insert_commit.py new file mode 100644 index 000000000..7bafbd6e3 --- /dev/null +++ b/shared/django_apps/legacy_migrations/migrations/legacy_sql/main/functions/insert_commit.py @@ -0,0 +1,29 @@ +def run_sql(schema_editor): + schema_editor.execute( + """ + create or replace function insert_commit(int, text, text, int) returns void as $$ + begin + + update commits + set state='pending' + where repoid = $1 + and commitid = $2; + + if not found then + insert into commits (repoid, commitid, branch, pullid, merged, timestamp, state) + values ($1, $2, $3, $4, case when $4 is not null then false else null end, now(), 'pending') + on conflict (repoid, commitid) do update + set branch=$3, pullid=$4, + merged=(case when $4 is not null then false else null end), + state='pending'; + end if; + + update repos + set active=true, deleted=false, updatestamp=now() + where repoid = $1 + and (active is not true or deleted is true); + + end; + $$ language plpgsql volatile; + """ + ) diff --git a/shared/django_apps/legacy_migrations/migrations/legacy_sql/main/functions/main.py b/shared/django_apps/legacy_migrations/migrations/legacy_sql/main/functions/main.py new file mode 100644 index 000000000..f404a7902 --- /dev/null +++ b/shared/django_apps/legacy_migrations/migrations/legacy_sql/main/functions/main.py @@ -0,0 +1,33 @@ +from .aggregates import run_sql as aggregates_run_sql +from .array_append_unique import run_sql as array_append_unique_run_sql +from .coverage import run_sql as coverage_run_sql +from .get_access_token import run_sql as get_access_token_run_sql +from .get_author import run_sql as get_author_run_sql +from .get_commit import run_sql as get_commit_run_sql +from .get_customer import run_sql as get_customer_run_sql +from .get_graph_for import run_sql as get_graph_for_run_sql +from .get_ownerid import run_sql as get_ownerid_run_sql +from .get_repo import run_sql as get_repo_run_sql +from .get_user import run_sql as get_user_run_sql +from .insert_commit import run_sql as insert_commit_run_sql +from .refresh_repos import run_sql as refresh_repos_run_sql +from .update_json import run_sql as update_json_run_sql +from .verify_session import run_sql as verify_session_run_sql + + +def run_sql(schema_editor): + aggregates_run_sql(schema_editor) + update_json_run_sql(schema_editor) + get_author_run_sql(schema_editor) + array_append_unique_run_sql(schema_editor) + coverage_run_sql(schema_editor) + get_access_token_run_sql(schema_editor) + get_repo_run_sql(schema_editor) + get_user_run_sql(schema_editor) + get_customer_run_sql(schema_editor) + get_commit_run_sql(schema_editor) + get_ownerid_run_sql(schema_editor) + verify_session_run_sql(schema_editor) + refresh_repos_run_sql(schema_editor) + insert_commit_run_sql(schema_editor) + get_graph_for_run_sql(schema_editor) diff --git a/shared/django_apps/legacy_migrations/migrations/legacy_sql/main/functions/refresh_repos.py b/shared/django_apps/legacy_migrations/migrations/legacy_sql/main/functions/refresh_repos.py new file mode 100644 index 000000000..1037f5ba1 --- /dev/null +++ b/shared/django_apps/legacy_migrations/migrations/legacy_sql/main/functions/refresh_repos.py @@ -0,0 +1,160 @@ +def run_sql(schema_editor): + schema_editor.execute( + """ + create or replace function refresh_teams(service, jsonb) returns int[] as $$ + declare ownerids int[]; + declare _ownerid int; + declare _team record; + begin + for _team in select d from jsonb_array_elements($2) d loop + update owners o + set username = (_team.d->>'username')::citext, + name = (_team.d->>'name')::text, + email = (_team.d->>'email')::text, + avatar_url = (_team.d->>'avatar_url')::text, + parent_service_id = (_team.d->>'parent_id')::text, + updatestamp = now() + where service = $1 + and service_id = (_team.d->>'id')::text + returning ownerid into _ownerid; + + if not found then + insert into owners (service, service_id, username, name, email, avatar_url, parent_service_id) + values ($1, + (_team.d->>'id')::text, + (_team.d->>'username')::citext, + (_team.d->>'name')::text, + (_team.d->>'email')::text, + (_team.d->>'avatar_url')::text, + (_team.d->>'parent_id')::text + ) + returning ownerid into _ownerid; + end if; + + select array_append(ownerids, _ownerid) into ownerids; + + end loop; + + return ownerids; + + end; + $$ language plpgsql volatile strict; + + + create or replace function refresh_repos(service, jsonb, int, boolean) returns text[] as $$ + declare _ text; + declare _branch text; + declare _forkid int; + declare _previous_ownerid int; + declare _ownerid int; + declare _repo record; + declare _repoid int; + declare _bot int; + declare repos text[]; + begin + + for _repo in select d from jsonb_array_elements($2) d loop + + select r.ownerid into _previous_ownerid + from repos r + inner join owners o using (ownerid) + where o.service = $1 + and r.service_id = (_repo.d->'repo'->>'service_id')::text + limit 1; + + -- owner + -- ===== + -- its import to check all three below. otherwise update the record. + select ownerid, bot, (yaml->'codecov'->>'branch')::text + into _ownerid, _bot, _branch + from owners + where service = $1 + and service_id = (_repo.d->'owner'->>'service_id')::text + and username = (_repo.d->'owner'->>'username')::citext + limit 1; + + if not found then + update owners + set username = (_repo.d->'owner'->>'username')::citext, + updatestamp = now() + where service = $1 + and service_id = (_repo.d->'owner'->>'service_id')::text + returning ownerid, bot, (yaml->'codecov'->>'branch')::text + into _ownerid, _bot, _branch; + + if not found then + insert into owners (service, service_id, username, bot) + values ($1, (_repo.d->'owner'->>'service_id')::text, (_repo.d->'owner'->>'username')::citext, $3) + returning ownerid, bot into _ownerid, _bot; + end if; + + end if; + + -- fork + -- ==== + if (_repo.d->'repo'->>'fork') is not null then + -- converts fork into array + select refresh_repos($1, (select jsonb_agg(d.d::jsonb)::jsonb + from (select (_repo.d->'repo'->>'fork')::jsonb d limit 1) d + limit 1), null, null) + into _ + limit 1; + + -- get owner + select r.repoid into _forkid + from repos r + inner join owners o using (ownerid) + where o.service = $1 + and o.username = (_repo.d->'repo'->'fork'->'owner'->>'username')::citext + and r.name = (_repo.d->'repo'->'fork'->'repo'->>'name')::citext + limit 1; + else + _forkid := null; + end if; + + -- update repo + -- =========== + if _previous_ownerid is not null then + -- repo already existed with this service_id, update it + update repos set + private = ((_repo.d)->'repo'->>'private')::boolean, + forkid = _forkid, + language = ((_repo.d)->'repo'->>'language')::languages, + ownerid = _ownerid, + using_integration=(using_integration or $4), + name = (_repo.d->'repo'->>'name')::citext, + deleted = false, + updatestamp=now() + where ownerid = _previous_ownerid + and service_id = (_repo.d->'repo'->>'service_id')::text + returning repoid + into _repoid; + + -- new repo + -- ======== + else + insert into repos (service_id, ownerid, private, forkid, name, branch, language, using_integration) + values ((_repo.d->'repo'->>'service_id')::text, + _ownerid, + (_repo.d->'repo'->>'private')::boolean, + _forkid, + (_repo.d->'repo'->>'name')::citext, + coalesce(_branch, (_repo.d->'repo'->>'branch')), + (_repo.d->'repo'->>'language')::languages, + $4) + returning repoid into _repoid; + + end if; + + -- return private repoids + if (_repo.d->'repo'->>'private')::boolean then + repos = array_append(repos, _repoid::text); + end if; + + end loop; + + return repos; + end; + $$ language plpgsql volatile; + """ + ) diff --git a/shared/django_apps/legacy_migrations/migrations/legacy_sql/main/functions/update_json.py b/shared/django_apps/legacy_migrations/migrations/legacy_sql/main/functions/update_json.py new file mode 100644 index 000000000..2e9dfe6fd --- /dev/null +++ b/shared/django_apps/legacy_migrations/migrations/legacy_sql/main/functions/update_json.py @@ -0,0 +1,57 @@ +def run_sql(schema_editor): + schema_editor.execute( + """ + create or replace function add_key_to_json(jsonb, text, jsonb) returns jsonb as $$ + select case when $1 is null and $3 is null then ('{"'||$2||'":null}')::jsonb + when $1 is null or $1::text = '{}' then ('{"'||$2||'":'||$3||'}')::jsonb + when $3 is null then (left($1::text, -1)||',"'||$2||'":null}')::jsonb + else (left($1::text, -1)||',"'||$2||'":'||$3::text||'}')::jsonb end; + $$ language sql stable; + + + create or replace function add_key_to_json(jsonb, text, integer) returns jsonb as $$ + select case when $1 is null and $3 is null then ('{"'||$2||'":null}')::jsonb + when $1 is null or $1::text = '{}' then ('{"'||$2||'":'||$3||'}')::jsonb + when $3 is null then (left($1::text, -1)||',"'||$2||'":null}')::jsonb + else (left($1::text, -1)||',"'||$2||'":'||$3::text||'}')::jsonb end; + $$ language sql stable; + + + create or replace function add_key_to_json(jsonb, text, text) returns jsonb as $$ + select case when $1 is null and $3 is null then ('{"'||$2||'":null}')::jsonb + when $1 is null or $1::text = '{}' then ('{"'||$2||'":"'||$3||'"}')::jsonb + when $3 is null then (left($1::text, -1)||',"'||$2||'":null}')::jsonb + else (left($1::text, -1)||',"'||$2||'":"'||$3::text||'"}')::jsonb end; + $$ language sql stable; + + + create or replace function remove_key_from_json(jsonb, text) returns jsonb as $$ + with drop_key as ( + select key, value::text + from jsonb_each($1::jsonb) + where key != $2::text and value is not null + ) select ('{'||array_to_string((select array_agg('"'||key||'":'||value) from drop_key), ',')||'}')::jsonb; + $$ language sql stable; + + + create or replace function update_json(jsonb, text, jsonb) returns jsonb as $$ + select case when $1 is not null then add_key_to_json(coalesce(remove_key_from_json($1, $2), '{}'::jsonb), $2, $3) + when $3 is null then ('{"'||$2||'":null}')::jsonb + else ('{"'||$2||'":'||coalesce($3::text, 'null')::text||'}')::jsonb end; + $$ language sql stable; + + + create or replace function update_json(jsonb, text, integer) returns jsonb as $$ + select case when $1 is not null then add_key_to_json(coalesce(remove_key_from_json($1, $2), '{}'::jsonb), $2, $3) + when $3 is null then ('{"'||$2||'":null}')::jsonb + else ('{"'||$2||'":'||$3::text||'}')::jsonb end; + $$ language sql stable; + + + create or replace function update_json(jsonb, text, text) returns jsonb as $$ + select case when $1 is not null then add_key_to_json(coalesce(remove_key_from_json($1, $2), '{}'::jsonb), $2, $3) + when $3 is null then ('{"'||$2||'":null}')::jsonb + else ('{"'||$2||'":"'||$3||'"}')::jsonb end; + $$ language sql stable; + """ + ) diff --git a/shared/django_apps/legacy_migrations/migrations/legacy_sql/main/functions/verify_session.py b/shared/django_apps/legacy_migrations/migrations/legacy_sql/main/functions/verify_session.py new file mode 100644 index 000000000..d743f1e27 --- /dev/null +++ b/shared/django_apps/legacy_migrations/migrations/legacy_sql/main/functions/verify_session.py @@ -0,0 +1,15 @@ +def run_sql(schema_editor): + schema_editor.execute( + """ + create or replace function verify_session(text, text, uuid, sessiontype) returns jsonb as $$ + -- try any members + update sessions + set lastseen = now(), + ip = $1, + useragent = $2 + where token = $3 + and type = $4 + returning get_user(ownerid); + $$ language sql volatile; + """ + ) diff --git a/shared/django_apps/legacy_migrations/migrations/legacy_sql/main/main.py b/shared/django_apps/legacy_migrations/migrations/legacy_sql/main/main.py new file mode 100644 index 000000000..2164ad1a8 --- /dev/null +++ b/shared/django_apps/legacy_migrations/migrations/legacy_sql/main/main.py @@ -0,0 +1,24 @@ +from .functions.main import run_sql as functions_run_sql +from .tables.main import run_sql as tables_run_sql +from .triggers.main import run_sql as triggers_run_sql +from .types import run_sql as types_run_sql + + +def run_sql(schema_editor): + schema_editor.execute( + """ + create extension if not exists "uuid-ossp"; + create extension if not exists "citext"; + + create table if not exists version (version text); + + create or replace function random_string(int) returns char as $$ + select string_agg(((string_to_array('abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ1234567890', null))[floor(random()*62)+1])::text, '') + from generate_series(1, $1); + $$ language sql; + """ + ) + types_run_sql(schema_editor) + tables_run_sql(schema_editor) + functions_run_sql(schema_editor) + triggers_run_sql(schema_editor) diff --git a/shared/django_apps/legacy_migrations/migrations/legacy_sql/main/tables/__init__.py b/shared/django_apps/legacy_migrations/migrations/legacy_sql/main/tables/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/shared/django_apps/legacy_migrations/migrations/legacy_sql/main/tables/branches.py b/shared/django_apps/legacy_migrations/migrations/legacy_sql/main/tables/branches.py new file mode 100644 index 000000000..26a384950 --- /dev/null +++ b/shared/django_apps/legacy_migrations/migrations/legacy_sql/main/tables/branches.py @@ -0,0 +1,17 @@ +def run_sql(schema_editor): + schema_editor.execute( + """ + create table branches( + repoid int references repos on delete cascade not null, + updatestamp timestamptz not null, + branch text not null, + base text, + head text not null, + authors int[] + ); + + create index branches_repoid on branches (repoid); + + create unique index branches_repoid_branch on branches (repoid, branch); + """ + ) diff --git a/shared/django_apps/legacy_migrations/migrations/legacy_sql/main/tables/commit_notifications.py b/shared/django_apps/legacy_migrations/migrations/legacy_sql/main/tables/commit_notifications.py new file mode 100644 index 000000000..975dc80c6 --- /dev/null +++ b/shared/django_apps/legacy_migrations/migrations/legacy_sql/main/tables/commit_notifications.py @@ -0,0 +1,17 @@ +def run_sql(schema_editor): + schema_editor.execute( + """ + create table commit_notifications( + id bigserial primary key, + commit_id bigint references commits(id) on delete cascade not null, + notification_type notifications not null, + decoration_type decorations, + created_at timestamp, + updated_at timestamp, + state commit_notification_state, + CONSTRAINT commit_notifications_commit_id_notification_type UNIQUE(commit_id, notification_type) + ); + + create index commit_notifications_commit_id on commit_notifications (commit_id); + """ + ) diff --git a/shared/django_apps/legacy_migrations/migrations/legacy_sql/main/tables/commits.py b/shared/django_apps/legacy_migrations/migrations/legacy_sql/main/tables/commits.py new file mode 100644 index 000000000..8c2d80bec --- /dev/null +++ b/shared/django_apps/legacy_migrations/migrations/legacy_sql/main/tables/commits.py @@ -0,0 +1,31 @@ +def run_sql(schema_editor): + schema_editor.execute( + """ + create table commits( + commitid text not null, + id bigserial primary key, + timestamp timestamp not null, + repoid int references repos on delete cascade not null, + branch text, + pullid int, + author int references owners on delete set null, + ci_passed boolean, + updatestamp timestamp, + message text, + state commit_state, + merged boolean, + deleted boolean, + notified boolean, + version smallint, -- will be removed after migrations + parent text, + totals jsonb, + report jsonb + ); + + create unique index commits_repoid_commitid on commits (repoid, commitid); + + create index commits_repoid_timestamp_desc on commits (repoid, timestamp desc); + + create index commits_on_pull on commits (repoid, pullid) where deleted is not true; + """ + ) diff --git a/shared/django_apps/legacy_migrations/migrations/legacy_sql/main/tables/main.py b/shared/django_apps/legacy_migrations/migrations/legacy_sql/main/tables/main.py new file mode 100644 index 000000000..d1cc1224d --- /dev/null +++ b/shared/django_apps/legacy_migrations/migrations/legacy_sql/main/tables/main.py @@ -0,0 +1,21 @@ +from .branches import run_sql as branches_run_sql +from .commit_notifications import run_sql as commit_notifications_run_sql +from .commits import run_sql as commits_run_sql +from .owners import run_sql as owners_run_sql +from .pulls import run_sql as pulls_run_sql +from .reports import run_sql as reports_run_sql +from .repos import run_sql as repos_run_sql +from .sessions import run_sql as sessions_run_sql +from .users import run_sql as users_run_sql + + +def run_sql(schema_editor): + users_run_sql(schema_editor) + owners_run_sql(schema_editor) + sessions_run_sql(schema_editor) + repos_run_sql(schema_editor) + branches_run_sql(schema_editor) + pulls_run_sql(schema_editor) + commits_run_sql(schema_editor) + commit_notifications_run_sql(schema_editor) + reports_run_sql(schema_editor) diff --git a/shared/django_apps/legacy_migrations/migrations/legacy_sql/main/tables/owners.py b/shared/django_apps/legacy_migrations/migrations/legacy_sql/main/tables/owners.py new file mode 100644 index 000000000..2c3fa7b6e --- /dev/null +++ b/shared/django_apps/legacy_migrations/migrations/legacy_sql/main/tables/owners.py @@ -0,0 +1,51 @@ +def run_sql(schema_editor): + schema_editor.execute( + """ + create table owners( + ownerid serial primary key, + service service not null, + username citext, + email text, + name text, + oauth_token text, + stripe_customer_id text, + stripe_subscription_id text, + createstamp timestamptz, + service_id text not null, + private_access boolean, + staff boolean default false, -- codecov staff + cache jsonb, -- {"stats": {}} + plan plans default null, + plan_provider plan_providers, + plan_user_count smallint, + plan_auto_activate boolean, + plan_activated_users int[], + did_trial boolean, + free smallint default 0 not null, + invoice_details text, + student boolean default false not null, + student_created_at timestamp default null, + student_updated_at timestamp default null, + -- bot int, SEE BELOW + delinquent boolean, + yaml jsonb, + updatestamp timestamp, + organizations int[], -- what teams I'm member of + admins int[], -- who can edit my billing + integration_id int, -- github integration id + permission int[] + ); + + create unique index owner_service_username on owners (service, username); + + create unique index owner_service_ids on owners (service, service_id); + + alter table owners add column bot int references owners on delete set null; + + alter table owners add column avatar_url text; + + alter table owners add column parent_service_id text; + + alter table owners add column root_parent_service_id text; + """ + ) diff --git a/shared/django_apps/legacy_migrations/migrations/legacy_sql/main/tables/pulls.py b/shared/django_apps/legacy_migrations/migrations/legacy_sql/main/tables/pulls.py new file mode 100644 index 000000000..ca326f120 --- /dev/null +++ b/shared/django_apps/legacy_migrations/migrations/legacy_sql/main/tables/pulls.py @@ -0,0 +1,24 @@ +def run_sql(schema_editor): + schema_editor.execute( + """ + create table pulls( + repoid int references repos on delete cascade not null, + pullid int not null, + issueid int, -- gitlab + updatestamp timestamp, + state pull_state not null default 'open', + title text, + base text, + compared_to text, + head text, + commentid text, + diff jsonb, + flare jsonb, -- only when pull is open + author int references owners on delete set null + ); + + create unique index pulls_repoid_pullid on pulls (repoid, pullid); + + create index pulls_repoid_state_open on pulls (repoid) where state = 'open'; + """ + ) diff --git a/shared/django_apps/legacy_migrations/migrations/legacy_sql/main/tables/reports.py b/shared/django_apps/legacy_migrations/migrations/legacy_sql/main/tables/reports.py new file mode 100644 index 000000000..84291c643 --- /dev/null +++ b/shared/django_apps/legacy_migrations/migrations/legacy_sql/main/tables/reports.py @@ -0,0 +1,142 @@ +def run_sql(schema_editor): + schema_editor.execute( + """ + -- EOF + -- + -- Create model CommitReport + -- + CREATE TABLE "reports_commitreport" ( + "id" bigserial NOT NULL PRIMARY KEY, + "external_id" uuid NOT NULL, + "created_at" timestamp with time zone NOT NULL, + "updated_at" timestamp with time zone NOT NULL, + "commit_id" bigint NOT NULL + ); + -- + -- Create model ReportDetails + -- + CREATE TABLE "reports_reportdetails" ( + "id" bigserial NOT NULL PRIMARY KEY, + "external_id" uuid NOT NULL, + "created_at" timestamp with time zone NOT NULL, + "updated_at" timestamp with time zone NOT NULL, + "files_array" jsonb[] NOT NULL, + "report_id" bigint NOT NULL UNIQUE + ); + -- + -- Create model ReportLevelTotals + -- + CREATE TABLE "reports_reportleveltotals" ( + "id" bigserial NOT NULL PRIMARY KEY, + "external_id" uuid NOT NULL, + "created_at" timestamp with time zone NOT NULL, + "updated_at" timestamp with time zone NOT NULL, + "branches" integer NOT NULL, + "coverage" numeric(7, 2) NOT NULL, + "hits" integer NOT NULL, + "lines" integer NOT NULL, + "methods" integer NOT NULL, + "misses" integer NOT NULL, + "partials" integer NOT NULL, + "files" integer NOT NULL, + "report_id" bigint NOT NULL UNIQUE + ); + -- + -- Create model ReportSession + -- + CREATE TABLE "reports_upload" ( + "id" bigserial NOT NULL PRIMARY KEY, + "external_id" uuid NOT NULL, + "created_at" timestamp with time zone NOT NULL, + "updated_at" timestamp with time zone NOT NULL, + "build_code" text NULL, + "build_url" text NULL, + "env" jsonb NULL, + "job_code" text NULL, + "name" varchar(100) NULL, + "provider" varchar(50) NULL, + "state" varchar(100) NOT NULL, + "storage_path" text NOT NULL, + "order_number" integer NULL, + "upload_extras" jsonb NOT NULL, + "upload_type" varchar(100) NOT NULL + ); + -- + -- Create model ReportSessionError + -- + CREATE TABLE "reports_uploaderror" ( + "id" bigserial NOT NULL PRIMARY KEY, + "external_id" uuid NOT NULL, + "created_at" timestamp with time zone NOT NULL, + "updated_at" timestamp with time zone NOT NULL, + "error_code" varchar(100) NOT NULL, + "error_params" jsonb NOT NULL, + "upload_id" bigint NOT NULL + ); + -- + -- Create model ReportSessionFlagMembership + -- + CREATE TABLE "reports_uploadflagmembership" ( + "id" bigserial NOT NULL PRIMARY KEY + ); + -- + -- Create model RepositoryFlag + -- + CREATE TABLE "reports_repositoryflag" ( + "id" bigserial NOT NULL PRIMARY KEY, + "external_id" uuid NOT NULL, + "created_at" timestamp with time zone NOT NULL, + "updated_at" timestamp with time zone NOT NULL, + "flag_name" varchar(255) NOT NULL, + "repository_id" integer NOT NULL + ); + -- + -- Create model SessionLevelTotals + -- + CREATE TABLE "reports_uploadleveltotals" ( + "id" bigserial NOT NULL PRIMARY KEY, + "external_id" uuid NOT NULL, + "created_at" timestamp with time zone NOT NULL, + "updated_at" timestamp with time zone NOT NULL, + "branches" integer NOT NULL, + "coverage" numeric(7, 2) NOT NULL, + "hits" integer NOT NULL, + "lines" integer NOT NULL, + "methods" integer NOT NULL, + "misses" integer NOT NULL, + "partials" integer NOT NULL, + "files" integer NOT NULL, + "upload_id" bigint NOT NULL UNIQUE + ); + -- + -- Add field flag to reportsessionflagmembership + -- + ALTER TABLE "reports_uploadflagmembership" ADD COLUMN "flag_id" bigint NOT NULL; + -- + -- Add field report_session to reportsessionflagmembership + -- + ALTER TABLE "reports_uploadflagmembership" ADD COLUMN "upload_id" bigint NOT NULL; + -- + -- Add field flags to reportsession + -- + -- + -- Add field report to reportsession + -- + ALTER TABLE "reports_upload" ADD COLUMN "report_id" bigint NOT NULL; + ALTER TABLE "reports_commitreport" ADD CONSTRAINT "reports_commitreport_commit_id_06d0bd39_fk_commits_id" FOREIGN KEY ("commit_id") REFERENCES "commits" ("id") DEFERRABLE INITIALLY DEFERRED; + CREATE INDEX "reports_commitreport_commit_id_06d0bd39" ON "reports_commitreport" ("commit_id"); + ALTER TABLE "reports_reportdetails" ADD CONSTRAINT "reports_reportdetail_report_id_4681bfd3_fk_reports_c" FOREIGN KEY ("report_id") REFERENCES "reports_commitreport" ("id") DEFERRABLE INITIALLY DEFERRED; + ALTER TABLE "reports_reportleveltotals" ADD CONSTRAINT "reports_reportlevelt_report_id_b690dffa_fk_reports_c" FOREIGN KEY ("report_id") REFERENCES "reports_commitreport" ("id") DEFERRABLE INITIALLY DEFERRED; + ALTER TABLE "reports_uploaderror" ADD CONSTRAINT "reports_reportsessio_report_session_id_bb6563f1_fk_reports_r" FOREIGN KEY ("upload_id") REFERENCES "reports_upload" ("id") DEFERRABLE INITIALLY DEFERRED; + CREATE INDEX "reports_uploaderror_report_session_id_bb6563f1" ON "reports_uploaderror" ("upload_id"); + ALTER TABLE "reports_repositoryflag" ADD CONSTRAINT "reports_repositoryflag_repository_id_9b64b64c_fk_repos_repoid" FOREIGN KEY ("repository_id") REFERENCES "repos" ("repoid") DEFERRABLE INITIALLY DEFERRED; + CREATE INDEX "reports_repositoryflag_repository_id_9b64b64c" ON "reports_repositoryflag" ("repository_id"); + ALTER TABLE "reports_uploadleveltotals" ADD CONSTRAINT "reports_sessionlevel_report_session_id_e2cd6669_fk_reports_r" FOREIGN KEY ("upload_id") REFERENCES "reports_upload" ("id") DEFERRABLE INITIALLY DEFERRED; + CREATE INDEX "reports_uploadflagmembership_flag_id_59edee69" ON "reports_uploadflagmembership" ("flag_id"); + ALTER TABLE "reports_uploadflagmembership" ADD CONSTRAINT "reports_reportsessio_flag_id_59edee69_fk_reports_r" FOREIGN KEY ("flag_id") REFERENCES "reports_repositoryflag" ("id") DEFERRABLE INITIALLY DEFERRED; + CREATE INDEX "reports_uploadflagmembership_report_session_id_7d7f9546" ON "reports_uploadflagmembership" ("upload_id"); + ALTER TABLE "reports_uploadflagmembership" ADD CONSTRAINT "reports_reportsessio_report_session_id_7d7f9546_fk_reports_r" FOREIGN KEY ("upload_id") REFERENCES "reports_upload" ("id") DEFERRABLE INITIALLY DEFERRED; + CREATE INDEX "reports_upload_report_id_f6b4ffae" ON "reports_upload" ("report_id"); + ALTER TABLE "reports_upload" ADD CONSTRAINT "reports_reportsessio_report_id_f6b4ffae_fk_reports_c" FOREIGN KEY ("report_id") REFERENCES "reports_commitreport" ("id") DEFERRABLE INITIALLY DEFERRED; + """ + ) diff --git a/shared/django_apps/legacy_migrations/migrations/legacy_sql/main/tables/repos.py b/shared/django_apps/legacy_migrations/migrations/legacy_sql/main/tables/repos.py new file mode 100644 index 000000000..af83a9f52 --- /dev/null +++ b/shared/django_apps/legacy_migrations/migrations/legacy_sql/main/tables/repos.py @@ -0,0 +1,31 @@ +def run_sql(schema_editor): + schema_editor.execute( + """ + create table repos( + repoid serial primary key, + ownerid int references owners on delete cascade not null, + service_id text not null, + name citext, + private boolean not null, + branch text default 'master' not null, + upload_token uuid unique default uuid_generate_v4(), + image_token text default random_string(10), + updatestamp timestamptz, + language languages, + active boolean, + deleted boolean default false not null, + activated boolean default false, + bot int references owners on delete set null, + yaml jsonb, + cache jsonb, -- {"totals": {}, "trends": [], "commit": {}, "yaml": ""} + hookid text, + using_integration boolean -- using github integration + ); + + create unique index repos_slug on repos (ownerid, name); + + create unique index repos_service_ids on repos (ownerid, service_id); + + alter table repos add column forkid int references repos; + """ + ) diff --git a/shared/django_apps/legacy_migrations/migrations/legacy_sql/main/tables/sessions.py b/shared/django_apps/legacy_migrations/migrations/legacy_sql/main/tables/sessions.py new file mode 100644 index 000000000..9c17b22e6 --- /dev/null +++ b/shared/django_apps/legacy_migrations/migrations/legacy_sql/main/tables/sessions.py @@ -0,0 +1,15 @@ +def run_sql(schema_editor): + schema_editor.execute( + """ + create table sessions( + sessionid serial primary key, + token uuid unique default uuid_generate_v4() not null, + name text, + ownerid int references owners on delete cascade not null, + type sessiontype not null, + lastseen timestamptz, + useragent text, + ip text + ); + """ + ) diff --git a/shared/django_apps/legacy_migrations/migrations/legacy_sql/main/tables/users.py b/shared/django_apps/legacy_migrations/migrations/legacy_sql/main/tables/users.py new file mode 100644 index 000000000..7c0528803 --- /dev/null +++ b/shared/django_apps/legacy_migrations/migrations/legacy_sql/main/tables/users.py @@ -0,0 +1,15 @@ +def run_sql(schema_editor): + schema_editor.execute( + """ + CREATE TABLE IF NOT EXISTS "users" ( + "id" bigint NOT NULL PRIMARY KEY GENERATED BY DEFAULT AS IDENTITY, + "external_id" uuid NOT NULL UNIQUE, + "created_at" timestamp with time zone NOT NULL, + "updated_at" timestamp with time zone NOT NULL, + "email" citext NULL, + "name" text NULL, + "is_staff" boolean NULL, + "is_superuser" boolean NULL + ); + """ + ) diff --git a/shared/django_apps/legacy_migrations/migrations/legacy_sql/main/triggers/__init__.py b/shared/django_apps/legacy_migrations/migrations/legacy_sql/main/triggers/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/shared/django_apps/legacy_migrations/migrations/legacy_sql/main/triggers/branches.py b/shared/django_apps/legacy_migrations/migrations/legacy_sql/main/triggers/branches.py new file mode 100644 index 000000000..2d377df3b --- /dev/null +++ b/shared/django_apps/legacy_migrations/migrations/legacy_sql/main/triggers/branches.py @@ -0,0 +1,32 @@ +def run_sql(schema_editor): + schema_editor.execute( + """ + create or replace function branches_update() returns trigger as $$ + declare _ownerid int; + begin + -- update repos cache if main branch + update repos + set updatestamp = now(), + cache = update_json(cache::jsonb, 'commit', get_commit_minimum(new.repoid, new.head)::jsonb) + where repoid = new.repoid + and branch = new.branch + returning ownerid into _ownerid; + + if found then + -- default branch updated, so we can update the owners timestamp + -- to refresh the team list + update owners + set updatestamp=now() + where ownerid=_ownerid; + end if; + + return null; + end; + $$ language plpgsql; + + create trigger branch_update after update on branches + for each row + when (new.head is distinct from old.head) + execute procedure branches_update(); + """ + ) diff --git a/shared/django_apps/legacy_migrations/migrations/legacy_sql/main/triggers/commits.py b/shared/django_apps/legacy_migrations/migrations/legacy_sql/main/triggers/commits.py new file mode 100644 index 000000000..231d9e156 --- /dev/null +++ b/shared/django_apps/legacy_migrations/migrations/legacy_sql/main/triggers/commits.py @@ -0,0 +1,108 @@ +def run_sql(schema_editor): + schema_editor.execute( + """ + create or replace function commits_update_heads() returns trigger as $$ + begin + + if new.pullid is not null and new.merged is not true then + -- update head of pulls + update pulls p + set updatestamp = now(), + head = case when head is not null + and (select timestamp > new.timestamp + from commits c + where c.repoid=new.repoid + and c.commitid=p.head + and c.deleted is not true + limit 1) + then head + else new.commitid + end, + author = coalesce(author, new.author) + where repoid = new.repoid + and pullid = new.pullid; + + end if; + + -- update head of branches + if new.branch is not null then + update branches + set updatestamp = now(), + authors = array_append_unique(coalesce(authors, '{}'::int[]), new.author), + head = case + when head is null then new.commitid + when ( + head != new.commitid + and new.timestamp >= coalesce((select timestamp + from commits + where commitid=head + and deleted is not true + and repoid=new.repoid + limit 1), '-infinity'::timestamp) + ) then new.commitid + else head end + where repoid = new.repoid + and branch = new.branch; + if not found then + insert into branches (repoid, updatestamp, branch, head, authors) + values (new.repoid, new.timestamp, new.branch, new.commitid, + case when new.author is not null then array[new.author] else null end); + end if; + end if; + + return null; + end; + $$ language plpgsql; + + create trigger commits_update_heads after update on commits + for each row + when (( + new.deleted is distinct from old.deleted + ) or ( + new.state = 'complete'::commit_state + and new.deleted is not true + and + ( + new.state is distinct from old.state + or new.pullid is distinct from old.pullid + or new.merged is distinct from old.merged + or new.branch is distinct from old.branch + ) + )) + execute procedure commits_update_heads(); + + + create or replace function commits_insert_pr_branch() returns trigger as $$ + begin + if new.pullid is not null and new.merged is not true then + begin + insert into pulls (repoid, pullid, author, head) + values (new.repoid, new.pullid, new.author, new.commitid); + exception when unique_violation then + end; + end if; + + if new.branch is not null then + begin + insert into branches (repoid, updatestamp, branch, authors, head) + values (new.repoid, new.timestamp, + new.branch, + case when new.author is not null then array[new.author] else null end, + new.commitid); + exception when unique_violation then + end; + end if; + + update repos + set updatestamp=now() + where repoid=new.repoid; + + return null; + end; + $$ language plpgsql; + + create trigger commits_insert_pr_branch after insert on commits + for each row + execute procedure commits_insert_pr_branch(); + """ + ) diff --git a/shared/django_apps/legacy_migrations/migrations/legacy_sql/main/triggers/main.py b/shared/django_apps/legacy_migrations/migrations/legacy_sql/main/triggers/main.py new file mode 100644 index 000000000..1e00e5c55 --- /dev/null +++ b/shared/django_apps/legacy_migrations/migrations/legacy_sql/main/triggers/main.py @@ -0,0 +1,13 @@ +from .branches import run_sql as branches_run_sql +from .commits import run_sql as commits_run_sql +from .owners import run_sql as owners_run_sql +from .pulls import run_sql as pulls_run_sql +from .repos import run_sql as repos_run_sql + + +def run_sql(schema_editor): + commits_run_sql(schema_editor) + branches_run_sql(schema_editor) + owners_run_sql(schema_editor) + repos_run_sql(schema_editor) + pulls_run_sql(schema_editor) diff --git a/shared/django_apps/legacy_migrations/migrations/legacy_sql/main/triggers/owners.py b/shared/django_apps/legacy_migrations/migrations/legacy_sql/main/triggers/owners.py new file mode 100644 index 000000000..4f47270e5 --- /dev/null +++ b/shared/django_apps/legacy_migrations/migrations/legacy_sql/main/triggers/owners.py @@ -0,0 +1,97 @@ +def run_sql(schema_editor): + schema_editor.execute( + """ + create or replace function owner_yaml_updated() returns trigger as $$ + begin + if (new.yaml->'codecov'->'bot')::citext is distinct from 'null' then + new.bot = coalesce( + get_ownerid_if_member( + new.service, + (new.yaml->'codecov'->>'bot')::citext, + new.ownerid + ), + old.bot + ); + else + new.bot = null; + end if; + + -- update repo branches + update repos r + set branch = coalesce((r.yaml->'codecov'->>'branch'), (new.yaml->'codecov'->>'branch'), branch) + where ownerid = new.ownerid; + + return new; + end; + $$ language plpgsql; + + create trigger owner_yaml_updated before update on owners + for each row + when ( + ((new.yaml->'codecov'->>'bot')::text is distinct from (old.yaml->'codecov'->>'bot')::text) + or ((new.yaml->'codecov'->>'branch')::text is distinct from (old.yaml->'codecov'->>'branch')::text) + ) + execute procedure owner_yaml_updated(); + + + create or replace function owner_cache_state_update() returns trigger as $$ + declare _ownerid int; + begin + -- update cache of number of repos + for _ownerid in (select unnest from unnest(new.organizations)) loop + update owners o + set cache=update_json(cache, 'stats', update_json(cache->'stats', 'users', (select count(*) + from owners + where organizations @> array[_ownerid])::int)) + where ownerid=_ownerid; + end loop; + return null; + end; + $$ language plpgsql; + + create trigger owner_cache_state_update after update on owners + for each row + when (new.organizations is distinct from old.organizations) + execute procedure owner_cache_state_update(); + + create trigger owner_cache_state_insert after insert on owners + for each row + execute procedure owner_cache_state_update(); + + -- clear the user sessions when the token is set to null, requiring login + create or replace function owner_token_clered() returns trigger as $$ + begin + delete from sessions where ownerid=new.ownerid and type='login'; + return new; + end; + $$ language plpgsql; + + create trigger owner_token_clered after update on owners + for each row + when (new.oauth_token is distinct from old.oauth_token and new.oauth_token is null) + execute procedure owner_token_clered(); + + + create or replace function owners_before_insert_or_update() returns trigger as $$ + begin + -- user has changed name or deleted and invalidate sessions + with _owners as (update owners + set username = null + where service = new.service + and username = new.username::citext + returning ownerid) + delete from sessions where ownerid in (select ownerid from _owners); + return new; + end; + $$ language plpgsql; + + create trigger owners_before_insert before insert on owners + for each row + execute procedure owners_before_insert_or_update(); + + create trigger owners_before_update before update on owners + for each row + when (new.username is not null and new.username is distinct from old.username) + execute procedure owners_before_insert_or_update(); + """ + ) diff --git a/shared/django_apps/legacy_migrations/migrations/legacy_sql/main/triggers/pulls.py b/shared/django_apps/legacy_migrations/migrations/legacy_sql/main/triggers/pulls.py new file mode 100644 index 000000000..39c0e8afa --- /dev/null +++ b/shared/django_apps/legacy_migrations/migrations/legacy_sql/main/triggers/pulls.py @@ -0,0 +1,17 @@ +def run_sql(schema_editor): + schema_editor.execute( + """ + create or replace function pulls_drop_flare() returns trigger as $$ + begin + new.flare = null; + return new; + end; + $$ language plpgsql; + + + create trigger pulls_before_update_drop_flare before update on pulls + for each row + when (new.state != 'open'::pull_state) + execute procedure pulls_drop_flare(); + """ + ) diff --git a/shared/django_apps/legacy_migrations/migrations/legacy_sql/main/triggers/repos.py b/shared/django_apps/legacy_migrations/migrations/legacy_sql/main/triggers/repos.py new file mode 100644 index 000000000..c99307a4e --- /dev/null +++ b/shared/django_apps/legacy_migrations/migrations/legacy_sql/main/triggers/repos.py @@ -0,0 +1,73 @@ +def run_sql(schema_editor): + schema_editor.execute( + """ + create or replace function repo_yaml_update() returns trigger as $$ + declare _service service; + declare _branch text; + begin + select service, (yaml->'codecov'->>'branch') into _service, _branch + from owners + where ownerid=new.ownerid + limit 1; + + -- update repo bot and branch + update repos + set bot = case when (yaml->'codecov'->>'bot') is not null + then coalesce(get_ownerid_if_member(_service, (yaml->'codecov'->>'bot')::citext, ownerid), bot) + else null end, + branch = coalesce((yaml->'codecov'->>'branch'), _branch, branch) + where repoid=new.repoid; + return null; + end; + $$ language plpgsql; + + create trigger repo_yaml_update after update on repos + for each row + when ( + ((new.yaml->'codecov'->>'bot')::text is distinct from (old.yaml->'codecov'->>'bot')::text) + or ((new.yaml->'codecov'->>'branch')::text is distinct from (old.yaml->'codecov'->>'branch')::text) + ) + execute procedure repo_yaml_update(); + + + create or replace function repo_cache_state_update() returns trigger as $$ + begin + -- update cache of number of repos + update owners o + set cache=update_json(cache, 'stats', update_json(cache->'stats', 'repos', (select count(*) from repos r where r.ownerid=o.ownerid and active)::int)), + updatestamp=now() + where ownerid=new.ownerid; + return null; + end; + $$ language plpgsql; + + create trigger repo_cache_state_update after update on repos + for each row + when (new.active is distinct from old.active) + execute procedure repo_cache_state_update(); + + + create or replace function repos_before_insert_or_update() returns trigger as $$ + begin + -- repo name changed or deleted + update repos + set name = null, + deleted = true, + active = false, + activated = false + where ownerid = new.ownerid + and name = new.name; + return new; + end; + $$ language plpgsql; + + create trigger repos_before_insert before insert on repos + for each row + execute procedure repos_before_insert_or_update(); + + create trigger repos_before_update before update on repos + for each row + when (new.name is not null and new.name is distinct from old.name) + execute procedure repos_before_insert_or_update(); + """ + ) diff --git a/shared/django_apps/legacy_migrations/migrations/legacy_sql/main/types.py b/shared/django_apps/legacy_migrations/migrations/legacy_sql/main/types.py new file mode 100644 index 000000000..0e799c999 --- /dev/null +++ b/shared/django_apps/legacy_migrations/migrations/legacy_sql/main/types.py @@ -0,0 +1,26 @@ +def run_sql(schema_editor): + schema_editor.execute( + """ + create type service as enum ('github', 'bitbucket', 'gitlab', 'github_enterprise', 'gitlab_enterprise', 'bitbucket_server'); + + create type plans as enum('5m', '5y', '25m', '25y', '50m', '50y', '100m', '100y', '250m', '250y', '500m', '500y', '1000m', '1000y', '1m', '1y', + 'v4-10m', 'v4-10y', 'v4-20m', 'v4-20y', 'v4-50m', 'v4-50y', 'v4-125m', 'v4-125y', 'v4-300m', 'v4-300y', + 'users', 'users-inappm', 'users-inappy', 'users-pr-inappm', 'users-pr-inappy', 'users-free'); + + create type sessiontype as enum('api', 'login'); + + create type languages as enum('javascript', 'shell', 'python', 'ruby', 'perl', 'dart', 'java', 'c', 'clojure', 'd', 'fortran', 'go', 'groovy', 'kotlin', 'php', 'r', 'scala', 'swift', 'objective-c', 'xtend'); + + create type pull_state as enum('open', 'closed', 'merged'); + + create type commit_state as enum('pending', 'complete', 'error', 'skipped'); + + create type plan_providers as enum('github'); + + create type notifications as enum('comment', 'gitter', 'hipchat', 'irc', 'slack', 'status_changes', 'status_patch', 'status_project', 'webhook', 'checks_patch', 'checks_project', 'checks_changes'); + + create type decorations as enum('standard', 'upgrade'); + + create type commit_notification_state as enum('pending', 'success', 'error'); + """ + ) diff --git a/shared/django_apps/legacy_migrations/migrations/legacy_sql/upgrades/main.py b/shared/django_apps/legacy_migrations/migrations/legacy_sql/upgrades/main.py new file mode 100644 index 000000000..52bf531fe --- /dev/null +++ b/shared/django_apps/legacy_migrations/migrations/legacy_sql/upgrades/main.py @@ -0,0 +1,55 @@ +from .v440 import run_sql as v440_run_sql +from .v442 import run_sql as v442_run_sql +from .v443 import run_sql as v443_run_sql +from .v446 import run_sql as v446_run_sql +from .v447 import run_sql as v447_run_sql +from .v448 import run_sql as v448_run_sql +from .v449 import run_sql as v449_run_sql +from .v451 import run_sql as v451_run_sql +from .v452 import run_sql as v452_run_sql +from .v453 import run_sql as v453_run_sql +from .v454 import run_sql as v454_run_sql +from .v455 import run_sql as v455_run_sql +from .v461 import run_sql as v461_run_sql +from .v4410 import run_sql as v4410_run_sql +from .v4510 import run_sql as v4510_run_sql + +UPGRADE_MIGRATIONS_BY_VERSION = ( + ((4, 4, 0), v440_run_sql), + ((4, 4, 2), v442_run_sql), + ((4, 4, 3), v443_run_sql), + ((4, 4, 6), v446_run_sql), + ((4, 4, 7), v447_run_sql), + ((4, 4, 8), v448_run_sql), + ((4, 4, 9), v449_run_sql), + ((4, 4, 10), v4410_run_sql), + ((4, 5, 1), v451_run_sql), + ((4, 5, 2), v452_run_sql), + ((4, 5, 3), v453_run_sql), + ((4, 5, 4), v454_run_sql), + ((4, 5, 5), v455_run_sql), + ((4, 5, 10), v4510_run_sql), + ((4, 6, 1), v461_run_sql), +) + + +def _version_normalize(version): + return tuple(int(x or 0) for x in version.replace("v", "").split(".")) + + +def run_sql(schema_editor, current_version): + normalized_current_version = _version_normalize(current_version) + upgrade_migration_index_to_start_from = None + + for idx, (upgrade_version, _) in enumerate(UPGRADE_MIGRATIONS_BY_VERSION): + if upgrade_version > normalized_current_version: + upgrade_migration_index_to_start_from = idx + break + + if not upgrade_migration_index_to_start_from: + return + + for (_, upgrade_migration) in UPGRADE_MIGRATIONS_BY_VERSION[ + upgrade_migration_index_to_start_from: + ]: + upgrade_migration(schema_editor) diff --git a/shared/django_apps/legacy_migrations/migrations/legacy_sql/upgrades/v440.py b/shared/django_apps/legacy_migrations/migrations/legacy_sql/upgrades/v440.py new file mode 100644 index 000000000..ba30a10f7 --- /dev/null +++ b/shared/django_apps/legacy_migrations/migrations/legacy_sql/upgrades/v440.py @@ -0,0 +1,353 @@ +from ..main.functions.aggregates import run_sql as aggregates_run_sql +from ..main.functions.coverage import run_sql as coverage_run_sql +from ..main.functions.get_access_token import run_sql as get_access_token_run_sql +from ..main.functions.get_author import run_sql as get_author_run_sql +from ..main.functions.get_commit import run_sql as get_commit_run_sql +from ..main.functions.get_customer import run_sql as get_customer_run_sql +from ..main.functions.get_graph_for import run_sql as get_graph_for_run_sql +from ..main.functions.get_ownerid import run_sql as get_ownerid_run_sql +from ..main.functions.get_repo import run_sql as get_repo_run_sql +from ..main.functions.get_user import run_sql as get_user_run_sql +from ..main.functions.insert_commit import run_sql as insert_commit_run_sql +from ..main.functions.refresh_repos import run_sql as refresh_repos_run_sql +from ..main.functions.update_json import run_sql as update_json_run_sql +from ..main.functions.verify_session import run_sql as verify_session_run_sql + + +# v4.4.0 +def run_sql(schema_editor): + schema_editor.execute( + """ + ---- Column Updates ----- + drop trigger repo_yaml_update on repos; + drop trigger owner_yaml_updated on owners; + + alter table owners drop column if exists errors; + alter table owners drop column if exists yaml_repoid; + alter table commits drop column if exists logs; + alter table commits drop column if exists archived; + alter table pulls rename column totals to diff; + alter table pulls drop column if exists changes; + alter table pulls drop column if exists base_branch; + alter table pulls drop column if exists head_branch; + alter table repos alter column yaml set data type jsonb; + alter table repos alter column cache set data type jsonb; + alter table owners alter column cache set data type jsonb; + alter table owners alter column yaml set data type jsonb; + alter table commits alter column totals set data type jsonb; + alter table commits alter column report set data type jsonb; + alter table pulls alter column diff set data type jsonb; + alter table pulls alter column flare set data type jsonb; + alter table owners alter column integration_id set data type integer; + + create trigger repo_yaml_update after update on repos + for each row + when ( + ((new.yaml->'codecov'->>'bot')::text is distinct from (old.yaml->'codecov'->>'bot')::text) + or ((new.yaml->'codecov'->>'branch')::text is distinct from (old.yaml->'codecov'->>'branch')::text) + ) + execute procedure repo_yaml_update(); + + + create trigger owner_yaml_updated before update on owners + for each row + when ( + ((new.yaml->'codecov'->>'bot')::text is distinct from (old.yaml->'codecov'->>'bot')::text) + or ((new.yaml->'codecov'->>'branch')::text is distinct from (old.yaml->'codecov'->>'branch')::text) + ) + execute procedure owner_yaml_updated(); + + + drop trigger pulls_before_insert on pulls; + drop function pulls_insert(); + drop trigger pulls_before_update on pulls; + drop function pulls_update(); + + create or replace function pulls_drop_flare() returns trigger as $$ + begin + new.flare = null; + return new; + end; + $$ language plpgsql; + + create trigger pulls_before_update_drop_flare before update on pulls + for each row + when (new.state != 'open'::pull_state) + execute procedure pulls_drop_flare(); + + ---- Function Changes ----- + drop function if exists get_new_repos(int); + drop function if exists get_pull(int, int); + drop function if exists coverage(service, text, text, text, text); + drop function if exists extract_totals(version smallint, files json, sessionids integer[]); + drop function if exists get_commit(repoid, _commitid, path, tree_only); + drop function if exists get_commit_on_branch(integer, text, text, boolean); + drop function if exists get_totals_for_file(smallint, json); + drop function if exists refresh_teams(service, json, integer); + drop function if exists get_commit(integer, text, text, boolean); + + -- insert_commit.sql + drop function if exists insert_commit(integer, text, text, integer, json); + """ + ) + insert_commit_run_sql(schema_editor) + + schema_editor.execute( + """ + -- aggregates.sql + drop function if exists _pop_first_as_json(json[]) cascade; + drop function if exists _max_coverage(json[]) cascade; + drop function if exists _min_coverage(json[]) cascade; + drop function _max_coverage(json[], json); + drop function _min_coverage(json[], json); + drop aggregate agg_totals(json); + drop function _agg_report_totals(text[], json); + """ + ) + aggregates_run_sql(schema_editor) + + schema_editor.execute( + """ + -- coverage.sql + drop function if exists get_coverage(service,citext,citext,citext); + """ + ) + coverage_run_sql(schema_editor) + + schema_editor.execute( + """ + -- get_access_token.sql + drop function if exists get_access_token(int); + """ + ) + get_access_token_run_sql(schema_editor) + + schema_editor.execute( + """ + -- get_author.sql + drop function if exists get_author(int); + """ + ) + get_author_run_sql(schema_editor) + + schema_editor.execute( + """ + -- get_commit.sql + drop function if exists get_commit_totals(int, text); + drop function if exists get_commit_totals(int, text, text); + drop function if exists get_commit(repoid integer, _commitid text); + drop function if exists get_commit_minimum(int, text); + drop function if exists get_ + commit_on_branch(int, text); + """ + ) + get_commit_run_sql(schema_editor) + + schema_editor.execute( + """ + -- get_customer.sql + drop function if exists get_customer(int); + """ + ) + get_customer_run_sql(schema_editor) + + schema_editor.execute( + """ + -- get_graph_for.sql + drop function if exists sum_of_file_totals_filtering_sessionids(json, int[]); + drop function if exists extract_totals(files json, sessionids int[]); + drop function if exists list_sessionid_by_filtering_flags(sessions json, flags text[]); + drop function if exists total_list_to_json(totals text[]); + drop function if exists sum_session_totals(sessions json, flags text[]); + drop function if exists get_graph_for_flare_pull(int, text, text, text[]); + drop function if exists get_graph_for_flare_commit(int, text, text, text[]); + drop function if exists get_graph_for_flare_branch(int, text, text, text[]); + drop function if exists get_graph_for_totals_pull(int, text, text, text[]); + drop function if exists get_graph_for_totals_commit(int, text, text, text[]); + drop function if exists get_graph_for_totals_branch(int, text, text, text[]); + drop function if exists get_graph_for_commits_pull(int, text, text, text[]); + drop function if exists get_graph_for_commits_branch(int, text, text, text[]); + """ + ) + get_graph_for_run_sql(schema_editor) + + schema_editor.execute( + """ + -- get_ownerid.sql + drop function if exists get_owner(service, citext); + drop function if exists get_teams(service, integer[]); + """ + ) + get_ownerid_run_sql(schema_editor) + + schema_editor.execute( + """ + -- get_repo.sql + drop function if exists get_repo(int); + drop function if exists get_repo(int, citext); + drop function if exists get_repo_by_token(uuid); + drop function if exists get_repos(int, int, int); + """ + ) + get_repo_run_sql(schema_editor) + + schema_editor.execute( + """ + -- get_user.sql + drop function if exists get_user(int); + drop function if exists get_username(int); + drop function if exists get_users(int[]); + """ + ) + get_user_run_sql(schema_editor) + + schema_editor.execute( + """ + -- refresh_repos.sql + drop function if exists refresh_teams(service, json); + drop function if exists refresh_repos(service, json, int, boolean); + """ + ) + refresh_repos_run_sql(schema_editor) + + schema_editor.execute( + """ + -- update_json.sql + drop function if exists add_key_to_json(json, text, json); + drop function if exists add_key_to_json(json, text, integer); + drop function if exists add_key_to_json(json, text, text); + drop function if exists remove_key_from_json(json, text); + drop function if exists update_json(json, text, json); + drop function if exists update_json(json, text, integer); + drop function if exists update_json(json, text, text); + """ + ) + update_json_run_sql(schema_editor) + + schema_editor.execute( + """ + -- verify_session.sql + drop function if exists verify_session(text, text, uuid, sessiontype); + """ + ) + verify_session_run_sql(schema_editor) + + schema_editor.execute( + """ + -- Trigger Changes -- + create or replace function commits_update_heads() returns trigger as $$ + begin + + if new.pullid is not null and new.merged is not true then + -- update head of pulls + update pulls p + set updatestamp = now(), + head = case when head is not null + and (select timestamp > new.timestamp + from commits c + where c.repoid=new.repoid + and c.commitid=p.head + and c.deleted is not true + limit 1) + then head + else new.commitid + end, + author = coalesce(author, new.author) + where repoid = new.repoid + and pullid = new.pullid; + + end if; + + -- update head of branches + if new.branch is not null then + update branches + set updatestamp = now(), + authors = array_append_unique(coalesce(authors, '{}'::int[]), new.author), + head = case + when head is null then new.commitid + when ( + head != new.commitid + and new.timestamp >= coalesce((select timestamp + from commits + where commitid=head + and deleted is not true + and repoid=new.repoid + limit 1), '-infinity'::timestamp) + ) then new.commitid + else head end + where repoid = new.repoid + and branch = new.branch; + if not found then + insert into branches (repoid, updatestamp, branch, head, authors) + values (new.repoid, new.timestamp, new.branch, new.commitid, + case when new.author is not null then array[new.author] else null end); + end if; + end if; + + return null; + end; + $$ language plpgsql; + + create or replace function branches_update() returns trigger as $$ + declare _ownerid int; + begin + -- update repos cache if main branch + update repos + set updatestamp = now(), + cache = update_json(cache::jsonb, 'commit', get_commit_minimum(new.repoid, new.head)::jsonb) + where repoid = new.repoid + and branch = new.branch + returning ownerid into _ownerid; + + if found then + -- default branch updated, so we can update the owners timestamp + -- to refresh the team list + update owners + set updatestamp=now() + where ownerid=_ownerid; + end if; + + return null; + end; + $$ language plpgsql; + + + create or replace function repos_before_insert_or_update() returns trigger as $$ + begin + -- repo name changed or deleted + update repos + set name = null, + deleted = true, + active = false, + activated = false + where ownerid = new.ownerid + and name = new.name; + return new; + end; + $$ language plpgsql; + + + create index commits_on_pull on commits (repoid, pullid) where deleted is not true; + + alter table commits drop column chunks; + + drop trigger commits_update_heads on commits; + + create trigger commits_update_heads after update on commits + for each row + when (( + new.deleted is distinct from old.deleted + ) or ( + new.state = 'complete'::commit_state + and new.deleted is not true + and + ( + new.state is distinct from old.state + or new.pullid is distinct from old.pullid + or new.merged is distinct from old.merged + or new.branch is distinct from old.branch + ) + )) + execute procedure commits_update_heads(); + """ + ) diff --git a/shared/django_apps/legacy_migrations/migrations/legacy_sql/upgrades/v4410.py b/shared/django_apps/legacy_migrations/migrations/legacy_sql/upgrades/v4410.py new file mode 100644 index 000000000..4223b56e6 --- /dev/null +++ b/shared/django_apps/legacy_migrations/migrations/legacy_sql/upgrades/v4410.py @@ -0,0 +1,29 @@ +# v4.4.10 +def run_sql(schema_editor): + schema_editor.execute( + """ + create or replace function owner_yaml_updated() returns trigger as $$ + begin + if (new.yaml->'codecov'->'bot')::citext is distinct from 'null' then + new.bot = coalesce( + get_ownerid_if_member( + new.service, + (new.yaml->'codecov'->>'bot')::citext, + new.ownerid + ), + old.bot + ); + else + new.bot = null; + end if; + + -- update repo branches + update repos r + set branch = coalesce((r.yaml->'codecov'->>'branch'), (new.yaml->'codecov'->>'branch'), branch) + where ownerid = new.ownerid; + + return new; + end; + $$ language plpgsql; + """ + ) diff --git a/shared/django_apps/legacy_migrations/migrations/legacy_sql/upgrades/v442.py b/shared/django_apps/legacy_migrations/migrations/legacy_sql/upgrades/v442.py new file mode 100644 index 000000000..3bad7a519 --- /dev/null +++ b/shared/django_apps/legacy_migrations/migrations/legacy_sql/upgrades/v442.py @@ -0,0 +1,104 @@ +# v4.4.2 +def run_sql(schema_editor): + schema_editor.execute( + """ + ---- Column Updates ----- + alter table owners add column avatar_url text; + + + ---- Function Changes ----- + + -- get_ownerid.sql + create or replace function get_owner(service, citext) returns jsonb as $$ + with data as ( + select service_id, service, ownerid::text, username, avatar_url, + updatestamp, plan, name, integration_id, free, + plan_activated_users, plan_auto_activate, plan_user_count + from owners + where service=$1 + and username=$2::citext + limit 1 + ) select to_jsonb(data) + from data + limit 1; + $$ language sql stable strict; + + -- get_ownerid.sql + create or replace function get_or_create_owner(service, text, text, text) returns int as $$ + declare _ownerid int; + begin + update owners + set username = $3, avatar_url = $4 + where service = $1 + and service_id = $2 + returning ownerid into _ownerid; + + if not found then + insert into owners (service, service_id, username, avatar_url) + values ($1, $2, $3, $4) + returning ownerid into _ownerid; + end if; + + return _ownerid; + + end; + $$ language plpgsql volatile; + + -- get_user.sql + create or replace function get_user(int) returns jsonb as $$ + with data as ( + select ownerid::text, private_access, staff, service, service_id, + username, organizations, avatar_url, + oauth_token, plan, permission, + free, email, name, createstamp + from owners + where ownerid=$1 + limit 1 + ) select to_jsonb(data) from data; + $$ language sql stable; + + -- get_user.sql + create or replace function get_users(int[]) returns jsonb as $$ + with data as ( + select service, service_id::text, ownerid::text, username, name, email, avatar_url + from owners + where array[ownerid] <@ $1 + limit array_length($1, 1) + ) select jsonb_agg(data) + from data + limit array_length($1, 1); + $$ language sql stable strict; + + -- refresh_repos.sql + create or replace function refresh_teams(service, jsonb) returns int[] as $$ + declare ownerids int[]; + declare _ownerid int; + declare _team record; + begin + for _team in select d from jsonb_array_elements($2) d loop + update owners o + set username = (_team.d->>'username')::citext, + name = (_team.d->>'name')::text, + email = (_team.d->>'email')::text, + avatar_url = (_team.d->>'avatar_url')::text, + updatestamp = now() + where service = $1 + and service_id = (_team.d->>'id')::text + returning ownerid into _ownerid; + + if not found then + insert into owners (service, service_id, username, name, email, avatar_url) + values ($1, (_team.d->>'id')::text, (_team.d->>'username')::citext, (_team.d->>'name')::text, (_team.d->>'email')::text, (_team.d->>'avatar_url')::text) + returning ownerid into _ownerid; + end if; + + select array_append(ownerids, _ownerid) into ownerids; + + end loop; + + return ownerids; + + end; + $$ language plpgsql volatile strict; + """ + ) diff --git a/shared/django_apps/legacy_migrations/migrations/legacy_sql/upgrades/v443.py b/shared/django_apps/legacy_migrations/migrations/legacy_sql/upgrades/v443.py new file mode 100644 index 000000000..1e60eb31a --- /dev/null +++ b/shared/django_apps/legacy_migrations/migrations/legacy_sql/upgrades/v443.py @@ -0,0 +1,191 @@ +# v4.4.3 +def run_sql(schema_editor): + schema_editor.execute( + """ + ---- Table Changes ----- + alter table owners add column parent_service_id text; + + + ----- Functions Created ----- + + -- get_customer.sql + create or replace function get_gitlab_root_group(int) returns jsonb as $$ + with recursive tree as ( + select o.service_id, + o.parent_service_id, + o.ownerid, + 1 as depth + from owners o + where o.ownerid = $1 + and o.service = 'gitlab' + and o.parent_service_id is not null + union all + select o.service_id, + o.parent_service_id, + o.ownerid, + depth + 1 as depth + from tree t + join owners o + on o.service_id = t.parent_service_id + where depth <= 20 + ), data as ( + select t.ownerid, + t.service_id + from tree t + where t.parent_service_id is null + ) + select to_jsonb(data) from data limit 1; + $$ language sql stable strict; + + -- get_customer.sql + create or replace function get_gitlab_repos_activated(int, text) returns int as $$ + declare _repos_activated int; + declare _decendents_owner_ids int[]; + begin + select array( + with recursive tree as ( + select ownerid, + service_id, + array[]::text[] as ancestors_service_id, + 1 as depth + from owners + where parent_service_id is null + and service = 'gitlab' + and ownerid = $1 + union all + select owners.ownerid, + owners.service_id, + tree.ancestors_service_id || owners.parent_service_id, + depth + 1 as depth + from owners, tree + where owners.parent_service_id = tree.service_id + and depth <= 20 + ) + select ownerid + from tree + where $2 = any(tree.ancestors_service_id) + ) into _decendents_owner_ids; + + select count(*) into _repos_activated + from repos + where ownerid in (select unnest(array_append(_decendents_owner_ids, $1))) + and private + and activated; + + return _repos_activated; + end; + $$ language plpgsql stable; + + -- get_customer.sql + create or replace function get_repos_activated(int) returns int as $$ + declare _repos_activated int; + declare _service text; + declare _service_id text; + begin + select o.service, o.service_id into _service, _service_id + from owners o where o.ownerid = $1; + + if _service = 'gitlab' then + select get_gitlab_repos_activated($1, _service_id) into _repos_activated; + else + select count(*) into _repos_activated + from repos + where ownerid=$1 + and private + and activated; + end if; + + return _repos_activated; + end; + $$ language plpgsql stable; + + + ---- Functions Modified ----- + + drop function if exists get_or_create_owner(service, text, text, text); -- signature change + + -- get_customer.sql + create or replace function get_customer(int) returns jsonb as $$ + with data as ( + select t.stripe_customer_id, + t.stripe_subscription_id, + t.ownerid::text, + t.service, + t.service_id, + t.plan_user_count, + t.plan_provider, + t.plan_auto_activate, + t.plan_activated_users, + t.plan, t.email, + t.free, t.did_trial, + t.invoice_details, + get_users(t.admins) as admins, + get_repos_activated($1) as repos_activated + from owners t + where t.ownerid = $1 + limit 1 + ) select to_jsonb(data) from data limit 1; + $$ language sql stable strict; + + -- refresh_repos.sql + create or replace function refresh_teams(service, jsonb) returns int[] as $$ + declare ownerids int[]; + declare _ownerid int; + declare _team record; + begin + for _team in select d from jsonb_array_elements($2) d loop + update owners o + set username = (_team.d->>'username')::citext, + name = (_team.d->>'name')::text, + email = (_team.d->>'email')::text, + avatar_url = (_team.d->>'avatar_url')::text, + parent_service_id = (_team.d->>'parent_id')::text, + updatestamp = now() + where service = $1 + and service_id = (_team.d->>'id')::text + returning ownerid into _ownerid; + + if not found then + insert into owners (service, service_id, username, name, email, avatar_url, parent_service_id) + values ($1, + (_team.d->>'id')::text, + (_team.d->>'username')::citext, + (_team.d->>'name')::text, + (_team.d->>'email')::text, + (_team.d->>'avatar_url')::text, + (_team.d->>'parent_id')::text + ) + returning ownerid into _ownerid; + end if; + + select array_append(ownerids, _ownerid) into ownerids; + + end loop; + + return ownerids; + + end; + $$ language plpgsql volatile strict; + + -- get_ownerid.sql + create or replace function get_or_create_owner(service, text, text, text, text) returns int as $$ + declare _ownerid int; + begin + update owners + set username = $3, avatar_url = $4, parent_service_id = $5 + where service = $1 + and service_id = $2 + returning ownerid into _ownerid; + + if not found then + insert into owners (service, service_id, username, avatar_url, parent_service_id) + values ($1, $2, $3, $4, $5) + returning ownerid into _ownerid; + end if; + + return _ownerid; + + end; + $$ language plpgsql volatile; + """ + ) diff --git a/shared/django_apps/legacy_migrations/migrations/legacy_sql/upgrades/v446.py b/shared/django_apps/legacy_migrations/migrations/legacy_sql/upgrades/v446.py new file mode 100644 index 000000000..d7e4253e3 --- /dev/null +++ b/shared/django_apps/legacy_migrations/migrations/legacy_sql/upgrades/v446.py @@ -0,0 +1,67 @@ +# v4.4.6 +def run_sql(schema_editor): + schema_editor.execute( + """ + -- used for app/tasks + create or replace function get_repo(int) returns jsonb as $$ + with d as (select o.service, o.username, o.service_id as owner_service_id, r.ownerid::text, + r.name, r.repoid::text, r.service_id, r.updatestamp, + r.branch, r.private, hookid, image_token, b.username as bot_username, + r.yaml, o.yaml as org_yaml, r.using_integration, o.plan, + (r.cache->>'yaml') as _yaml_location, + case when r.using_integration then o.integration_id else null end as integration_id, + get_access_token(coalesce(r.bot, o.bot, o.ownerid)) as token, + case when private and activated is not true and forkid is not null + then (select rr.activated from repos rr where rr.repoid = r.forkid limit 1) + else activated end as activated + from repos r + inner join owners o using (ownerid) + left join owners b ON (r.bot=b.ownerid) + where r.repoid = $1 + limit 1) select to_jsonb(d) from d; + $$ language sql stable strict; + + + -- used for app/handlers + create or replace function get_repo(int, citext) returns jsonb as $$ + with repo as ( + select r.yaml, r.name, "language", repoid::text, r.private, r.deleted, r.active, r.cache, b.username as bot_username, + r.branch, r.service_id, r.updatestamp, upload_token, image_token, hookid, using_integration, + case when private and activated is not true and forkid is not null + then (select rr.activated from repos rr where rr.repoid = r.forkid limit 1) + else activated end as activated + from repos r + left join owners b ON (r.bot=b.ownerid) + where r.ownerid = $1 and r.name = $2::citext + limit 1 + ) select to_jsonb(repo) from repo; + $$ language sql stable; + + create or replace function get_customer(int) returns jsonb as $$ + with data as ( + select t.stripe_customer_id, + t.stripe_subscription_id, + t.ownerid::text, + t.service, + t.service_id, + t.plan_user_count, + t.plan_provider, + t.plan_auto_activate, + t.plan_activated_users, + t.plan, + t.email, + t.free, + t.did_trial, + t.invoice_details, + t.yaml, + b.username as bot_username, + get_users(t.admins) as admins, + get_repos_activated($1::int) as repos_activated + from owners t + LEFT JOIN owners b ON (b.ownerid = t.bot) + where t.ownerid = $1 + limit 1 + ) select to_jsonb(data) from data limit 1; + $$ language sql stable strict; + """ + ) diff --git a/shared/django_apps/legacy_migrations/migrations/legacy_sql/upgrades/v447.py b/shared/django_apps/legacy_migrations/migrations/legacy_sql/upgrades/v447.py new file mode 100644 index 000000000..e1a7d256c --- /dev/null +++ b/shared/django_apps/legacy_migrations/migrations/legacy_sql/upgrades/v447.py @@ -0,0 +1,25 @@ +# v4.4.7 +def run_sql(schema_editor): + schema_editor.execute( + """ + drop trigger repo_yaml_update on repos; + drop trigger owner_yaml_updated on owners; + + create trigger repo_yaml_update after update on repos + for each row + when ( + ((new.yaml->'codecov'->>'bot')::text is distinct from (old.yaml->'codecov'->>'bot')::text) + or ((new.yaml->'codecov'->>'branch')::text is distinct from (old.yaml->'codecov'->>'branch')::text) + ) + execute procedure repo_yaml_update(); + + + create trigger owner_yaml_updated before update on owners + for each row + when ( + ((new.yaml->'codecov'->>'bot')::text is distinct from (old.yaml->'codecov'->>'bot')::text) + or ((new.yaml->'codecov'->>'branch')::text is distinct from (old.yaml->'codecov'->>'branch')::text) + ) + execute procedure owner_yaml_updated(); + """ + ) diff --git a/shared/django_apps/legacy_migrations/migrations/legacy_sql/upgrades/v448.py b/shared/django_apps/legacy_migrations/migrations/legacy_sql/upgrades/v448.py new file mode 100644 index 000000000..11265e621 --- /dev/null +++ b/shared/django_apps/legacy_migrations/migrations/legacy_sql/upgrades/v448.py @@ -0,0 +1,27 @@ +# v4.4.8 +def run_sql(schema_editor): + schema_editor.execute( + """ + --- transaction friendly enum column upates. See: https://stackoverflow.com/questions/1771543/adding-a-new-value-to-an-existing-enum-type#7834949 -- + + + -- rename the old enum -- rename the old enum + alter type plans rename to plans__; -- alter type plans rename to plans__; + -- create the new enum -- -- create the new enum + create type plans as enum('5m', '5y', '25m', '25y', '50m', '50y', '100m', '100y', '250m', '250y', '500m', '500y', '1000m', '1000y', '1m', '1y', -- create type plans as enum('5m', '5y', '25m', '25y', '50m', '50y', '100m', '100y', '250m', '250y', '500m', '500y', '1000m', '1000y', '1m', '1y', + 'v4-10m', 'v4-10y', 'v4-20m', 'v4-20y', 'v4-50m', 'v4-50y', 'v4-125m', 'v4-125y', 'v4-300m', 'v4-300y', -- 'v4-10m', 'v4-10y', 'v4-20m', 'v4-20y', 'v4-50m', 'v4-50y', 'v4-125m', 'v4-125y', 'v4-300m', 'v4-300y', + 'users', 'users-inappm', 'users-inappy', 'users-free'); -- 'users', 'users-inappm', 'users-inappy', 'users-free'); + -- alter all enum columns + alter table owners + alter column plan type plans using plan::text::plans; + + + -- drop the old enum + drop type plans__; + + + ALTER TABLE ONLY owners ALTER COLUMN plan SET DEFAULT 'users-free'; -- ALTER TABLE ONLY owners ALTER COLUMN plan SET DEFAULT 'users-free'; + ALTER TABLE ONLY owners ALTER COLUMN plan_user_count SET DEFAULT 5; -- ALTER TABLE ONLY owners ALTER COLUMN plan_user_count SET DEFAULT 5; + ALTER TABLE ONLY owners ALTER COLUMN plan_auto_activate SET DEFAULT true; -- ALTER TABLE ONLY owners ALTER COLUMN plan_auto_activate SET DEFAULT true; + """ + ) diff --git a/shared/django_apps/legacy_migrations/migrations/legacy_sql/upgrades/v449.py b/shared/django_apps/legacy_migrations/migrations/legacy_sql/upgrades/v449.py new file mode 100644 index 000000000..7bf480d97 --- /dev/null +++ b/shared/django_apps/legacy_migrations/migrations/legacy_sql/upgrades/v449.py @@ -0,0 +1,41 @@ +# v4.4.9 +def run_sql(schema_editor): + schema_editor.execute( + """ + alter table owners add column student boolean null; + alter table owners add column student_updated_at timestamp; + alter table owners add column student_created_at timestamp; + + + -- new get customer to return student status + create or replace function get_customer(int) returns jsonb as $$ + with data as ( + select t.stripe_customer_id, + t.stripe_subscription_id, + t.ownerid::text, + t.service, + t.service_id, + t.plan_user_count, + t.plan_provider, + t.plan_auto_activate, + t.plan_activated_users, + t.plan, + t.email, + t.free, + t.did_trial, + t.invoice_details, + t.yaml, + t.student, + t.student_created_at, + t.student_updated_at, + b.username as bot_username, + get_users(t.admins) as admins, + get_repos_activated($1::int) as repos_activated + from owners t + LEFT JOIN owners b ON (b.ownerid = t.bot) + where t.ownerid = $1 + limit 1 + ) select to_jsonb(data) from data limit 1; + $$ language sql stable strict; + """ + ) diff --git a/shared/django_apps/legacy_migrations/migrations/legacy_sql/upgrades/v451.py b/shared/django_apps/legacy_migrations/migrations/legacy_sql/upgrades/v451.py new file mode 100644 index 000000000..3369a6168 --- /dev/null +++ b/shared/django_apps/legacy_migrations/migrations/legacy_sql/upgrades/v451.py @@ -0,0 +1,43 @@ +# 4.5.1 +def run_sql(schema_editor): + schema_editor.execute( + """ + -- create enums used by commit_notifications table + create type notifications as enum('comment', 'gitter', 'hipchat', 'irc', 'slack', 'status_changes', 'status_patch', 'status_project', 'webhook', 'checks_patch', 'checks_project', 'checks_changes'); + create type decorations as enum('standard', 'upgrade'); + create type commit_notification_state as enum('pending', 'success', 'error'); + + -- Here we're commenting out all plan related migrations below because they break on enterprise + -- these migrations have been run already for production, but can break some production + -- deployments. Specifically the setting of the plan column to a new default causes problems with + -- web's ability to migrate effectively in some scenarios. + + -- If you're starting from scratch in dev, you will need to run the below migrations manually, + -- or comment out these migrations before starting up codecov.io for the first time. + + -- This isn't ideal, and will hopefully be addressed when we move all migrations to Django. + + -- Transaction friendly enum column upates. See: https://stackoverflow.com/questions/1771543/adding-a-new-value-to-an-existing-enum-type#7834949 + -- NOTE: we will not change the plan default yet + + -- first remove the default from plan column otherwise we'll get an error below with trying to cast the default + -- alter table owners alter column plan drop default; + + -- rename the old enum + -- alter type plans rename to plans__; + + -- create the new enum adding users-pr-inappm and users-pr-inappy plans + -- create type plans as enum('5m', '5y', '25m', '25y', '50m', '50y', '100m', '100y', '250m', '250y', '500m', '500y', '1000m', '1000y', '1m', '1y', + -- 'v4-10m', 'v4-10y', 'v4-20m', 'v4-20y', 'v4-50m', 'v4-50y', 'v4-125m', 'v4-125y', 'v4-300m', 'v4-300y', + -- 'users', 'users-inappm', 'users-inappy', 'users-pr-inappm', 'users-pr-inappy', 'users-free'); + + -- use the new enum + -- alter table owners alter column plan type plans using plan::text::plans; + + + --ALTER TABLE ONLY owners ALTER COLUMN plan SET DEFAULT 'users-free'; + + -- drop the old enum + -- drop type plans__; + """ + ) diff --git a/shared/django_apps/legacy_migrations/migrations/legacy_sql/upgrades/v4510.py b/shared/django_apps/legacy_migrations/migrations/legacy_sql/upgrades/v4510.py new file mode 100644 index 000000000..5659ee811 --- /dev/null +++ b/shared/django_apps/legacy_migrations/migrations/legacy_sql/upgrades/v4510.py @@ -0,0 +1,7 @@ +# v4.5.10 +def run_sql(schema_editor): + schema_editor.execute( + """ + alter table owners add column root_parent_service_id text; + """ + ) diff --git a/shared/django_apps/legacy_migrations/migrations/legacy_sql/upgrades/v452.py b/shared/django_apps/legacy_migrations/migrations/legacy_sql/upgrades/v452.py new file mode 100644 index 000000000..eec8e575d --- /dev/null +++ b/shared/django_apps/legacy_migrations/migrations/legacy_sql/upgrades/v452.py @@ -0,0 +1,14 @@ +# v4.5.2 +def run_sql(schema_editor): + schema_editor.execute( + """ + ALTER TABLE commits ADD COLUMN id bigint; + COMMIT; + -- EOF + CREATE SEQUENCE commits_id_seq OWNED BY commits.id; + COMMIT; + -- EOF + ALTER TABLE commits ALTER COLUMN id SET DEFAULT nextval('commits_id_seq'); + COMMIT; + """ + ) diff --git a/shared/django_apps/legacy_migrations/migrations/legacy_sql/upgrades/v453.py b/shared/django_apps/legacy_migrations/migrations/legacy_sql/upgrades/v453.py new file mode 100644 index 000000000..21575d0ae --- /dev/null +++ b/shared/django_apps/legacy_migrations/migrations/legacy_sql/upgrades/v453.py @@ -0,0 +1,20 @@ +# v4.5.3 +def run_sql(schema_editor): + schema_editor.execute( + """ + CREATE UNIQUE INDEX IF NOT EXISTS commits_pkey on commits (id); + + create table commit_notifications( + id bigserial primary key, + commit_id bigint references commits(id) on delete cascade not null, + notification_type notifications not null, + decoration_type decorations, + created_at timestamp, + updated_at timestamp, + state commit_notification_state, + CONSTRAINT commit_notifications_commit_id_notification_type UNIQUE(commit_id, notification_type) + ); + + create index commit_notifications_commit_id on commit_notifications (commit_id); + """ + ) diff --git a/shared/django_apps/legacy_migrations/migrations/legacy_sql/upgrades/v454.py b/shared/django_apps/legacy_migrations/migrations/legacy_sql/upgrades/v454.py new file mode 100644 index 000000000..cb9bb707b --- /dev/null +++ b/shared/django_apps/legacy_migrations/migrations/legacy_sql/upgrades/v454.py @@ -0,0 +1,148 @@ +# v4.5.4 +def run_sql(schema_editor): + schema_editor.execute( + """ + ALTER TABLE commit_notifications drop CONSTRAINT IF EXISTS commit_notifications_commit_id_fkey; + ALTER TABLE commits drop CONSTRAINT IF EXISTS commits_pkey; + CREATE UNIQUE INDEX IF NOT EXISTS commits_pkey on commits (id); + ALTER TABLE commits ADD PRIMARY KEY USING INDEX commits_pkey; + ALTER TABLE commit_notifications ADD CONSTRAINT commit_notifications_commit_id_fkey FOREIGN KEY (commit_id) REFERENCES commits(id) ON DELETE CASCADE + -- EOF + BEGIN; + -- + -- Create model CommitReport + -- + CREATE TABLE "reports_commitreport" ( + "id" bigserial NOT NULL PRIMARY KEY, + "external_id" uuid NOT NULL, + "created_at" timestamp with time zone NOT NULL, + "updated_at" timestamp with time zone NOT NULL, + "commit_id" bigint NOT NULL + ); + -- + -- Create model ReportDetails + -- + CREATE TABLE "reports_reportdetails" ( + "id" bigserial NOT NULL PRIMARY KEY, + "external_id" uuid NOT NULL, + "created_at" timestamp with time zone NOT NULL, + "updated_at" timestamp with time zone NOT NULL, + "files_array" jsonb[] NOT NULL, + "report_id" bigint NOT NULL UNIQUE + ); + -- + -- Create model ReportLevelTotals + -- + CREATE TABLE "reports_reportleveltotals" ( + "id" bigserial NOT NULL PRIMARY KEY, + "external_id" uuid NOT NULL, + "created_at" timestamp with time zone NOT NULL, + "updated_at" timestamp with time zone NOT NULL, + "branches" integer NOT NULL, + "coverage" numeric(7, 2) NOT NULL, + "hits" integer NOT NULL, + "lines" integer NOT NULL, + "methods" integer NOT NULL, + "misses" integer NOT NULL, + "partials" integer NOT NULL, + "files" integer NOT NULL, + "report_id" bigint NOT NULL UNIQUE + ); + -- + -- Create model ReportSession + -- + CREATE TABLE "reports_upload" ( + "id" bigserial NOT NULL PRIMARY KEY, + "external_id" uuid NOT NULL, + "created_at" timestamp with time zone NOT NULL, + "updated_at" timestamp with time zone NOT NULL, + "build_code" text NULL, + "build_url" text NULL, + "env" jsonb NULL, + "job_code" text NULL, + "name" varchar(100) NULL, + "provider" varchar(50) NULL, + "state" varchar(100) NOT NULL, + "storage_path" text NOT NULL, + "order_number" integer NULL + ); + -- + -- Create model ReportSessionError + -- + CREATE TABLE "reports_uploaderror" ( + "id" bigserial NOT NULL PRIMARY KEY, + "external_id" uuid NOT NULL, + "created_at" timestamp with time zone NOT NULL, + "updated_at" timestamp with time zone NOT NULL, + "error_code" varchar(100) NOT NULL, + "error_params" jsonb NOT NULL, + "report_session_id" bigint NOT NULL + ); + -- + -- Create model ReportSessionFlagMembership + -- + CREATE TABLE "reports_uploadflagmembership" ( + "id" bigserial NOT NULL PRIMARY KEY + ); + -- + -- Create model RepositoryFlag + -- + CREATE TABLE "reports_repositoryflag" ( + "id" bigserial NOT NULL PRIMARY KEY, + "external_id" uuid NOT NULL, + "created_at" timestamp with time zone NOT NULL, + "updated_at" timestamp with time zone NOT NULL, + "flag_name" varchar(255) NOT NULL, + "repository_id" integer NOT NULL + ); + -- + -- Create model SessionLevelTotals + -- + CREATE TABLE "reports_sessionleveltotals" ( + "id" bigserial NOT NULL PRIMARY KEY, + "external_id" uuid NOT NULL, + "created_at" timestamp with time zone NOT NULL, + "updated_at" timestamp with time zone NOT NULL, + "branches" integer NOT NULL, + "coverage" numeric(7, 2) NOT NULL, + "hits" integer NOT NULL, + "lines" integer NOT NULL, + "methods" integer NOT NULL, + "misses" integer NOT NULL, + "partials" integer NOT NULL, + "files" integer NOT NULL, + "report_session_id" bigint NOT NULL UNIQUE + ); + -- + -- Add field flag to reportsessionflagmembership + -- + ALTER TABLE "reports_uploadflagmembership" ADD COLUMN "flag_id" bigint NOT NULL; + -- + -- Add field report_session to reportsessionflagmembership + -- + ALTER TABLE "reports_uploadflagmembership" ADD COLUMN "report_session_id" bigint NOT NULL; + -- + -- Add field flags to reportsession + -- + -- + -- Add field report to reportsession + -- + ALTER TABLE "reports_upload" ADD COLUMN "report_id" bigint NOT NULL; + ALTER TABLE "reports_commitreport" ADD CONSTRAINT "reports_commitreport_commit_id_06d0bd39_fk_commits_id" FOREIGN KEY ("commit_id") REFERENCES "commits" ("id") DEFERRABLE INITIALLY DEFERRED; + CREATE INDEX "reports_commitreport_commit_id_06d0bd39" ON "reports_commitreport" ("commit_id"); + ALTER TABLE "reports_reportdetails" ADD CONSTRAINT "reports_reportdetail_report_id_4681bfd3_fk_reports_c" FOREIGN KEY ("report_id") REFERENCES "reports_commitreport" ("id") DEFERRABLE INITIALLY DEFERRED; + ALTER TABLE "reports_reportleveltotals" ADD CONSTRAINT "reports_reportlevelt_report_id_b690dffa_fk_reports_c" FOREIGN KEY ("report_id") REFERENCES "reports_commitreport" ("id") DEFERRABLE INITIALLY DEFERRED; + ALTER TABLE "reports_uploaderror" ADD CONSTRAINT "reports_reportsessio_report_session_id_bb6563f1_fk_reports_r" FOREIGN KEY ("report_session_id") REFERENCES "reports_upload" ("id") DEFERRABLE INITIALLY DEFERRED; + CREATE INDEX "reports_uploaderror_report_session_id_bb6563f1" ON "reports_uploaderror" ("report_session_id"); + ALTER TABLE "reports_repositoryflag" ADD CONSTRAINT "reports_repositoryflag_repository_id_9b64b64c_fk_repos_repoid" FOREIGN KEY ("repository_id") REFERENCES "repos" ("repoid") DEFERRABLE INITIALLY DEFERRED; + CREATE INDEX "reports_repositoryflag_repository_id_9b64b64c" ON "reports_repositoryflag" ("repository_id"); + ALTER TABLE "reports_sessionleveltotals" ADD CONSTRAINT "reports_sessionlevel_report_session_id_e2cd6669_fk_reports_r" FOREIGN KEY ("report_session_id") REFERENCES "reports_upload" ("id") DEFERRABLE INITIALLY DEFERRED; + CREATE INDEX "reports_uploadflagmembership_flag_id_59edee69" ON "reports_uploadflagmembership" ("flag_id"); + ALTER TABLE "reports_uploadflagmembership" ADD CONSTRAINT "reports_reportsessio_flag_id_59edee69_fk_reports_r" FOREIGN KEY ("flag_id") REFERENCES "reports_repositoryflag" ("id") DEFERRABLE INITIALLY DEFERRED; + CREATE INDEX "reports_uploadflagmembership_report_session_id_7d7f9546" ON "reports_uploadflagmembership" ("report_session_id"); + ALTER TABLE "reports_uploadflagmembership" ADD CONSTRAINT "reports_reportsessio_report_session_id_7d7f9546_fk_reports_r" FOREIGN KEY ("report_session_id") REFERENCES "reports_upload" ("id") DEFERRABLE INITIALLY DEFERRED; + CREATE INDEX "reports_upload_report_id_f6b4ffae" ON "reports_upload" ("report_id"); + ALTER TABLE "reports_upload" ADD CONSTRAINT "reports_reportsessio_report_id_f6b4ffae_fk_reports_c" FOREIGN KEY ("report_id") REFERENCES "reports_commitreport" ("id") DEFERRABLE INITIALLY DEFERRED; + COMMIT; + """ + ) diff --git a/shared/django_apps/legacy_migrations/migrations/legacy_sql/upgrades/v455.py b/shared/django_apps/legacy_migrations/migrations/legacy_sql/upgrades/v455.py new file mode 100644 index 000000000..dc5c81589 --- /dev/null +++ b/shared/django_apps/legacy_migrations/migrations/legacy_sql/upgrades/v455.py @@ -0,0 +1,14 @@ +# v4.5.5 +def run_sql(schema_editor): + schema_editor.execute( + """ + ALTER TABLE "reports_uploaderror" RENAME COLUMN "report_session_id" TO "upload_id"; + ALTER TABLE "reports_uploadflagmembership" RENAME COLUMN "report_session_id" TO "upload_id"; + ALTER TABLE "reports_sessionleveltotals" RENAME COLUMN "report_session_id" TO "upload_id"; + + ALTER TABLE "reports_upload" ADD COLUMN "upload_extras" jsonb NOT NULL; + ALTER TABLE "reports_upload" ADD COLUMN "upload_type" varchar(100) NOT NULL; + + ALTER TABLE "reports_sessionleveltotals" RENAME TO "reports_uploadleveltotals"; + """ + ) diff --git a/shared/django_apps/legacy_migrations/migrations/legacy_sql/upgrades/v461.py b/shared/django_apps/legacy_migrations/migrations/legacy_sql/upgrades/v461.py new file mode 100644 index 000000000..a78b08908 --- /dev/null +++ b/shared/django_apps/legacy_migrations/migrations/legacy_sql/upgrades/v461.py @@ -0,0 +1,12 @@ +# v4.6.1 +def run_sql(schema_editor): + schema_editor.execute( + """ + ALTER TABLE reports_uploadleveltotals ALTER COLUMN coverage DROP NOT NULL; + ALTER TABLE reports_reportleveltotals ALTER COLUMN coverage DROP NOT NULL; + + ALTER TABLE owners ALTER COLUMN student SET DEFAULT FALSE; + + UPDATE owners SET student=false WHERE student is NULL; + """ + ) diff --git a/shared/django_apps/legacy_migrations/models.py b/shared/django_apps/legacy_migrations/models.py new file mode 100644 index 000000000..caff392c3 --- /dev/null +++ b/shared/django_apps/legacy_migrations/models.py @@ -0,0 +1,25 @@ +from django.db import models +from django_prometheus.models import ExportModelOperationsMixin + +from shared.django_apps.codecov_auth.models import Owner + + +# Create your models here. +class YamlHistory( + ExportModelOperationsMixin("legacy_migrations.yaml_history"), models.Model +): + id = models.AutoField(primary_key=True) + ownerid = models.ForeignKey( + Owner, on_delete=models.CASCADE, related_name="ownerids", db_column="ownerid" + ) + author = models.ForeignKey( + Owner, on_delete=models.CASCADE, related_name="authors", db_column="author" + ) + timestamp = models.DateTimeField() + message = models.TextField(blank=True, null=True) + source = models.TextField() + diff = models.TextField(null=True) + + class Meta: + db_table = "yaml_history" + indexes = [models.Index(fields=["ownerid", "timestamp"])] From 1454b4fcef48d0d566d8b71add29ac1890a7b965 Mon Sep 17 00:00:00 2001 From: Adrian Date: Thu, 29 Feb 2024 19:48:51 -0800 Subject: [PATCH 07/36] lint --- .../legacy_migrations/migrations/0004_auto_20231024_1937.py | 1 + 1 file changed, 1 insertion(+) diff --git a/shared/django_apps/legacy_migrations/migrations/0004_auto_20231024_1937.py b/shared/django_apps/legacy_migrations/migrations/0004_auto_20231024_1937.py index c99005586..f9555b0b9 100644 --- a/shared/django_apps/legacy_migrations/migrations/0004_auto_20231024_1937.py +++ b/shared/django_apps/legacy_migrations/migrations/0004_auto_20231024_1937.py @@ -1,6 +1,7 @@ # Generated by Django 4.2.3 on 2023-10-24 19:37 from django.db import migrations + from shared.django_apps.migration_utils import RiskyRunSQL # from `legacy_migrations/migrations/legacy_sql/main/triggers/commits.py` From e038da8991c84228ec35eb8215407818b8aa29a0 Mon Sep 17 00:00:00 2001 From: Adrian Date: Fri, 1 Mar 2024 15:04:53 -0800 Subject: [PATCH 08/36] feat: adjust DB connection name --- docker-compose.yml | 2 +- shared/django_apps/dummy_settings.py | 4 ++-- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/docker-compose.yml b/docker-compose.yml index 1e83f3edd..04c01aa17 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -22,6 +22,7 @@ services: postgres: image: postgres:14.7-alpine environment: + - POSTGRES_DB=postgres - POSTGRES_USER=postgres - POSTGRES_PASSWORD=password - POSTGRES_HOST_AUTH_METHOD=trust @@ -31,7 +32,6 @@ services: tmpfs: size: 1024M - timescale: image: timescale/timescaledb-ha:pg14-latest environment: diff --git a/shared/django_apps/dummy_settings.py b/shared/django_apps/dummy_settings.py index 18d80bd19..2c7705543 100644 --- a/shared/django_apps/dummy_settings.py +++ b/shared/django_apps/dummy_settings.py @@ -38,7 +38,7 @@ DATABASES = { "default": { "ENGINE": "django.db.backends.postgresql", - "NAME": "POSTGRES_USER", + "NAME": "postgres", "USER": "postgres", "PASSWORD": "password", "HOST": "postgres", @@ -46,7 +46,7 @@ }, "timeseries": { "ENGINE": "django.db.backends.postgresql", - "NAME": "POSTGRES_USER", + "NAME": "postgres", "USER": "postgres", "PASSWORD": "postgres", "HOST": "timescale", From d2b6ebece25e2d61ef24140de32543144e91c137 Mon Sep 17 00:00:00 2001 From: Adrian Date: Fri, 1 Mar 2024 15:51:10 -0800 Subject: [PATCH 09/36] fix tests for plan service --- setup.py | 1 + .../django_apps/codecov/commands/__init__.py | 0 .../codecov/commands/exceptions.py | 24 ++++ .../codecov_auth/tests/__init__.py | 0 .../codecov_auth/tests/factories.py | 125 ++++++++++++++++++ shared/plan/service.py | 16 ++- shared/plan/test_plan.py | 8 +- 7 files changed, 165 insertions(+), 9 deletions(-) create mode 100644 shared/django_apps/codecov/commands/__init__.py create mode 100644 shared/django_apps/codecov/commands/exceptions.py create mode 100644 shared/django_apps/codecov_auth/tests/__init__.py create mode 100644 shared/django_apps/codecov_auth/tests/factories.py diff --git a/setup.py b/setup.py index 693d1a6ef..2730c6a99 100644 --- a/setup.py +++ b/setup.py @@ -53,5 +53,6 @@ # API Deps "django-prometheus", "django-model-utils", + "factory-boy" ], ) diff --git a/shared/django_apps/codecov/commands/__init__.py b/shared/django_apps/codecov/commands/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/shared/django_apps/codecov/commands/exceptions.py b/shared/django_apps/codecov/commands/exceptions.py new file mode 100644 index 000000000..965450c02 --- /dev/null +++ b/shared/django_apps/codecov/commands/exceptions.py @@ -0,0 +1,24 @@ +class BaseException(Exception): + pass + + +class Unauthenticated(BaseException): + message = "You are not authenticated" + + +class ValidationError(BaseException): + @property + def message(self): + return str(self) + + +class Unauthorized(BaseException): + message = "You are not authorized" + + +class NotFound(BaseException): + message = "Cant find the requested resource" + + +class MissingService(BaseException): + message = "Missing required service" diff --git a/shared/django_apps/codecov_auth/tests/__init__.py b/shared/django_apps/codecov_auth/tests/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/shared/django_apps/codecov_auth/tests/factories.py b/shared/django_apps/codecov_auth/tests/factories.py new file mode 100644 index 000000000..8d86f651a --- /dev/null +++ b/shared/django_apps/codecov_auth/tests/factories.py @@ -0,0 +1,125 @@ +from uuid import uuid4 + +import factory +from django.utils import timezone +from factory.django import DjangoModelFactory + +from shared.django_apps.codecov_auth.models import ( + OktaUser, + OrganizationLevelToken, + Owner, + OwnerProfile, + SentryUser, + Session, + TokenTypeChoices, + User, + UserToken, +) +from shared.plan.constants import TrialStatus +from shared.encryption.oauth import get_encryptor_from_configuration + +encryptor = get_encryptor_from_configuration() + + +class UserFactory(DjangoModelFactory): + class Meta: + model = User + + email = factory.Faker("email") + name = factory.Faker("name") + terms_agreement = False + terms_agreement_at = None + customer_intent = "Business" + + +class OwnerFactory(DjangoModelFactory): + class Meta: + model = Owner + exclude = ("unencrypted_oauth_token",) + + name = factory.Faker("name") + email = factory.Faker("email") + username = factory.Faker("user_name") + service = "github" + service_id = factory.Sequence(lambda n: f"{n}") + updatestamp = factory.LazyFunction(timezone.now) + plan_activated_users = [] + admins = [] + permission = [] + free = 0 + onboarding_completed = False + unencrypted_oauth_token = factory.LazyFunction(lambda: uuid4().hex) + cache = {"stats": {"repos": 1, "members": 2, "users": 1}} + oauth_token = factory.LazyAttribute( + lambda o: encryptor.encode(o.unencrypted_oauth_token).decode() + ) + user = factory.SubFactory(UserFactory) + trial_status = TrialStatus.NOT_STARTED.value + + +class SentryUserFactory(DjangoModelFactory): + class Meta: + model = SentryUser + + email = factory.Faker("email") + name = factory.Faker("name") + sentry_id = factory.LazyFunction(lambda: uuid4().hex) + access_token = factory.LazyFunction(lambda: uuid4().hex) + refresh_token = factory.LazyFunction(lambda: uuid4().hex) + user = factory.SubFactory(UserFactory) + + +class OktaUserFactory(DjangoModelFactory): + class Meta: + model = OktaUser + + email = factory.Faker("email") + name = factory.Faker("name") + okta_id = factory.LazyFunction(lambda: uuid4().hex) + access_token = factory.LazyFunction(lambda: uuid4().hex) + user = factory.SubFactory(UserFactory) + + +class OwnerProfileFactory(DjangoModelFactory): + class Meta: + model = OwnerProfile + + owner = factory.SubFactory(OwnerFactory) + default_org = factory.SubFactory(OwnerFactory) + + +class SessionFactory(DjangoModelFactory): + class Meta: + model = Session + + owner = factory.SubFactory(OwnerFactory) + lastseen = timezone.now() + type = Session.SessionType.API.value + token = factory.Faker("uuid4") + + +class OrganizationLevelTokenFactory(DjangoModelFactory): + class Meta: + model = OrganizationLevelToken + + owner = factory.SubFactory(OwnerFactory) + token = uuid4() + token_type = TokenTypeChoices.UPLOAD + + +class GetAdminProviderAdapter: + def __init__(self, result=False): + self.result = result + self.last_call_args = None + + async def get_is_admin(self, user): + self.last_call_args = user + return self.result + + +class UserTokenFactory(DjangoModelFactory): + class Meta: + model = UserToken + + owner = factory.SubFactory(OwnerFactory) + token = factory.LazyAttribute(lambda _: uuid4()) diff --git a/shared/plan/service.py b/shared/plan/service.py index 62ab72e19..70d188cd0 100644 --- a/shared/plan/service.py +++ b/shared/plan/service.py @@ -2,8 +2,9 @@ from datetime import datetime, timedelta from typing import List, Optional -from codecov.commands.exceptions import ValidationError -from plan.constants import ( +from shared.django_apps.codecov_auth.models import Owner +from shared.django_apps.codecov.commands.exceptions import ValidationError +from shared.plan.constants import ( BASIC_PLAN, FREE_PLAN, FREE_PLAN_REPRESENTATIONS, @@ -18,12 +19,17 @@ TrialDaysAmount, TrialStatus, ) -from services import sentry -from shared.django_apps.codecov_auth.models import Owner log = logging.getLogger(__name__) +# This originally belongs to the sentry service in API but this is a temporary fn to avoid importing the whole service +def is_sentry_user(owner: Owner) -> bool: + """ + Returns true if the given owner has been linked with a Sentry user. + """ + return owner.sentry_user_id is not None + # TODO: Consider moving some of these methods to the billing directory as they overlap billing functionality class PlanService: @@ -127,7 +133,7 @@ def available_plans(self, owner: Owner) -> List[PlanData]: available_plans += PR_AUTHOR_PAID_USER_PLAN_REPRESENTATIONS.values() - if owner and sentry.is_sentry_user(owner=owner): + if owner and is_sentry_user(owner=owner): available_plans += SENTRY_PAID_USER_PLAN_REPRESENTATIONS.values() # If number of activated users is less than or equal to TEAM_PLAN_MAX_USERS diff --git a/shared/plan/test_plan.py b/shared/plan/test_plan.py index ce9bcb62e..b8e34cbde 100644 --- a/shared/plan/test_plan.py +++ b/shared/plan/test_plan.py @@ -1,11 +1,11 @@ from datetime import datetime, timedelta from unittest.mock import patch -from codecov.commands.exceptions import ValidationError -from codecov_auth.tests.factories import OwnerFactory +from shared.django_apps.codecov.commands.exceptions import ValidationError +from shared.django_apps.codecov_auth.tests.factories import OwnerFactory from django.test import TestCase from freezegun import freeze_time -from plan.constants import ( +from shared.plan.constants import ( BASIC_PLAN, FREE_PLAN, FREE_PLAN_REPRESENTATIONS, @@ -18,7 +18,7 @@ TrialDaysAmount, TrialStatus, ) -from plan.service import PlanService +from shared.plan.service import PlanService @freeze_time("2023-06-19") From 1f7dcdd9de9ddf7e6c5b7485b079db4683b6132e Mon Sep 17 00:00:00 2001 From: Adrian Date: Mon, 4 Mar 2024 12:47:30 -0800 Subject: [PATCH 10/36] feat: add more necessary code for migrations --- docker-compose.yml | 8 +- setup.py | 3 +- .../codecov_auth/tests/factories.py | 2 +- shared/django_apps/dummy_settings.py | 3 +- .../management/commands/__init__.py | 0 .../management/commands/migrate.py | 109 ++++++++++++++++++ shared/django_apps/utils/config.py | 4 + shared/plan/service.py | 3 +- shared/plan/test_plan.py | 5 +- shared/torngit/cache/__init__.py | 9 ++ 10 files changed, 138 insertions(+), 8 deletions(-) create mode 100644 shared/django_apps/legacy_migrations/management/commands/__init__.py create mode 100644 shared/django_apps/legacy_migrations/management/commands/migrate.py diff --git a/docker-compose.yml b/docker-compose.yml index 04c01aa17..c1ef2f336 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -3,6 +3,7 @@ version: "3" volumes: postgres-volume: timescale-volume: + redis-volume: services: shared: @@ -20,7 +21,7 @@ services: - "86400" # so the container doesn't exit postgres: - image: postgres:14.7-alpine + image: postgres:14.4-alpine environment: - POSTGRES_DB=postgres - POSTGRES_USER=postgres @@ -32,6 +33,11 @@ services: tmpfs: size: 1024M + redis: + image: redis:4.0-alpine + volumes: + - redis-volume:/data + timescale: image: timescale/timescaledb-ha:pg14-latest environment: diff --git a/setup.py b/setup.py index 2730c6a99..1c8d8e8b4 100644 --- a/setup.py +++ b/setup.py @@ -53,6 +53,7 @@ # API Deps "django-prometheus", "django-model-utils", - "factory-boy" + "factory-boy", + "python-redis-lock", ], ) diff --git a/shared/django_apps/codecov_auth/tests/factories.py b/shared/django_apps/codecov_auth/tests/factories.py index 8d86f651a..1a941b1b6 100644 --- a/shared/django_apps/codecov_auth/tests/factories.py +++ b/shared/django_apps/codecov_auth/tests/factories.py @@ -15,8 +15,8 @@ User, UserToken, ) -from shared.plan.constants import TrialStatus from shared.encryption.oauth import get_encryptor_from_configuration +from shared.plan.constants import TrialStatus encryptor = get_encryptor_from_configuration() diff --git a/shared/django_apps/dummy_settings.py b/shared/django_apps/dummy_settings.py index 2c7705543..cc48e3922 100644 --- a/shared/django_apps/dummy_settings.py +++ b/shared/django_apps/dummy_settings.py @@ -8,14 +8,15 @@ # Install apps so that you can make migrations for them INSTALLED_APPS = [ + "shared.django_apps.legacy_migrations", "shared.django_apps.pg_telemetry", "shared.django_apps.ts_telemetry", "shared.django_apps.rollouts", # API models + "django.contrib.postgres", "shared.django_apps.codecov_auth", "shared.django_apps.core", "shared.django_apps.reports", - "shared.django_apps.legacy_migrations", ] MIDDLEWARE = [] diff --git a/shared/django_apps/legacy_migrations/management/commands/__init__.py b/shared/django_apps/legacy_migrations/management/commands/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/shared/django_apps/legacy_migrations/management/commands/migrate.py b/shared/django_apps/legacy_migrations/management/commands/migrate.py new file mode 100644 index 000000000..06dd48d9d --- /dev/null +++ b/shared/django_apps/legacy_migrations/management/commands/migrate.py @@ -0,0 +1,109 @@ +import logging + +import redis_lock +from django.conf import settings +from django.core.management.commands.migrate import Command as MigrateCommand +from django.db import connections +from django.db.utils import ProgrammingError + +from shared.django_apps.utils.config import RUN_ENV +from shared.torngit.cache import get_redis_connection + +log = logging.getLogger(__name__) + +MIGRATION_LOCK_NAME = "djang-migrations-lock" + + +class MockLock: + def release(self): + pass + + +""" +We need to override the base Django migrate command to handle the legacy migrations we have in the "legacy_migrations" app. +Those migrations are the source of truth for the initial db state, which is captured in Django migrations 0001 for the +core, codecov_auth and reports apps. Thus we need to fake out the initial migrations for those apps to apply duplicate migration +steps eg. creating the same table twice. The source of truth for all other state is captured in the standard Django migrations +and can be safely applied after runnin the legacy migrations. +""" + + +class Command(MigrateCommand): + def _fake_initial_migrations(self, cursor, args, options): + try: + cursor.execute("SELECT * FROM django_migrations;") + except ProgrammingError: + codecov_auth_options = {**options} + codecov_auth_options["fake"] = True + codecov_auth_options["app_label"] = "codecov_auth" + codecov_auth_options["migration_name"] = "0001" + + core_options = {**options} + core_options["fake"] = True + core_options["app_label"] = "core" + core_options["migration_name"] = "0001" + + reports_options = {**options} + reports_options["fake"] = True + reports_options["app_label"] = "reports" + reports_options["migration_name"] = "0001" + + legacy_options = {**options} + legacy_options["app_label"] = "legacy_migrations" + legacy_options["migration_name"] = None + + super().handle(*args, **codecov_auth_options) + super().handle(*args, **core_options) + super().handle(*args, **reports_options) + super().handle(*args, **legacy_options) + + def _obtain_lock(self): + """ + In certain environments we might be running mutliple servers that will try and run the migrations at the same time. This is + not safe to do. So we have the command obtain a lock to try and run the migration. If it cannot get a lock, it will wait + until it is able to do so before continuing to run. We need to wait for the lock instead of hard exiting on seeing another + server running the migrations because we write code in such a way that the server expects for migrations to be applied before + new code is deployed (but the opposite of new db with old code is fine). + """ + # If we're running in a non-server environment, we don't need to worry about acquiring a lock + if RUN_ENV == "DEV": + return MockLock() + + redis_connection = get_redis_connection() + lock = redis_lock.Lock( + redis_connection, MIGRATION_LOCK_NAME, expire=180, auto_renewal=True + ) + log.info("Trying to acquire migrations lock...") + acquired = lock.acquire(timeout=180) + + if not acquired: + return None + + return lock + + def handle(self, *args, **options): + log.info("Codecov is starting migrations...") + print("inside the overwritten migrate comman") + database = options["database"] + db_connection = connections[database] + options["run_syncdb"] = False + + lock = self._obtain_lock() + + # Failed to acquire lock due to timeout + if not lock: + log.error("Potential deadlock detected in api migrations.") + raise Exception("Failed to obtain lock for api migration.") + + try: + with db_connection.cursor() as cursor: + self._fake_initial_migrations(cursor, args, options) + + super().handle(*args, **options) + except: + log.info("Codecov migrations failed.") + raise + else: + log.info("Codecov migrations succeeded.") + finally: + lock.release() diff --git a/shared/django_apps/utils/config.py b/shared/django_apps/utils/config.py index 976eab1d4..b87bc6e45 100644 --- a/shared/django_apps/utils/config.py +++ b/shared/django_apps/utils/config.py @@ -1,5 +1,9 @@ +import os + from shared.config import get_config +RUN_ENV = os.environ.get("RUN_ENV", "PRODUCTION") + def should_write_data_to_storage_config_check( master_switch_key: str, is_codecov_repo: bool, repoid: int diff --git a/shared/plan/service.py b/shared/plan/service.py index 70d188cd0..2de82312b 100644 --- a/shared/plan/service.py +++ b/shared/plan/service.py @@ -2,8 +2,8 @@ from datetime import datetime, timedelta from typing import List, Optional -from shared.django_apps.codecov_auth.models import Owner from shared.django_apps.codecov.commands.exceptions import ValidationError +from shared.django_apps.codecov_auth.models import Owner from shared.plan.constants import ( BASIC_PLAN, FREE_PLAN, @@ -20,7 +20,6 @@ TrialStatus, ) - log = logging.getLogger(__name__) # This originally belongs to the sentry service in API but this is a temporary fn to avoid importing the whole service diff --git a/shared/plan/test_plan.py b/shared/plan/test_plan.py index b8e34cbde..f1052a614 100644 --- a/shared/plan/test_plan.py +++ b/shared/plan/test_plan.py @@ -1,10 +1,11 @@ from datetime import datetime, timedelta from unittest.mock import patch -from shared.django_apps.codecov.commands.exceptions import ValidationError -from shared.django_apps.codecov_auth.tests.factories import OwnerFactory from django.test import TestCase from freezegun import freeze_time + +from shared.django_apps.codecov.commands.exceptions import ValidationError +from shared.django_apps.codecov_auth.tests.factories import OwnerFactory from shared.plan.constants import ( BASIC_PLAN, FREE_PLAN, diff --git a/shared/torngit/cache/__init__.py b/shared/torngit/cache/__init__.py index ad1318701..8ecd92341 100644 --- a/shared/torngit/cache/__init__.py +++ b/shared/torngit/cache/__init__.py @@ -16,6 +16,15 @@ def get_redis_url() -> str: return f"redis://{hostname}:{port}" +def get_redis_connection() -> Redis: + url = get_redis_url() + return _get_redis_instance_from_url(url) + + +def _get_redis_instance_from_url(url): + return Redis.from_url(url) + + CachedEndpoint = Union[Literal["check"], Literal["compare"], Literal["status"]] From 812713336dec7bae9ec79e7eeaf5d3631080a6ed Mon Sep 17 00:00:00 2001 From: Adrian Date: Thu, 7 Mar 2024 11:05:41 -0800 Subject: [PATCH 11/36] add init for utils module --- shared/django_apps/utils/__init__.py | 0 1 file changed, 0 insertions(+), 0 deletions(-) create mode 100644 shared/django_apps/utils/__init__.py diff --git a/shared/django_apps/utils/__init__.py b/shared/django_apps/utils/__init__.py new file mode 100644 index 000000000..e69de29bb From 0ae209a4a6a5aadb5db347c21e07b0dcad1e1464 Mon Sep 17 00:00:00 2001 From: Adrian Date: Mon, 11 Mar 2024 15:24:29 -0700 Subject: [PATCH 12/36] try adding app_label to Core --- shared/django_apps/core/models.py | 24 ++++++++++++++++-------- 1 file changed, 16 insertions(+), 8 deletions(-) diff --git a/shared/django_apps/core/models.py b/shared/django_apps/core/models.py index d72b4c72e..48af1c2bf 100644 --- a/shared/django_apps/core/models.py +++ b/shared/django_apps/core/models.py @@ -25,14 +25,20 @@ def db_type(self, connection): return "timestamp" -class Version(ExportModelOperationsMixin("core.version"), models.Model): +class BaseCoreModel(models.Model): + class Meta: + abstract = True + app_label = "shared-core" + + +class Version(ExportModelOperationsMixin("core.version"), BaseCoreModel): version = models.TextField(primary_key=True) class Meta: db_table = "version" -class Constants(ExportModelOperationsMixin("core.constants"), models.Model): +class Constants(ExportModelOperationsMixin("core.constants"), BaseCoreModel): key = models.CharField(primary_key=True) value = models.CharField() @@ -46,7 +52,7 @@ def _gen_image_token(): ) -class Repository(ExportModelOperationsMixin("core.repository"), models.Model): +class Repository(ExportModelOperationsMixin("core.repository"), BaseCoreModel): class Languages(models.TextChoices): JAVASCRIPT = "javascript" SHELL = "shell" @@ -165,7 +171,7 @@ def clean(self): raise ValidationError("using_integration cannot be null") -class Branch(ExportModelOperationsMixin("core.branch"), models.Model): +class Branch(ExportModelOperationsMixin("core.branch"), BaseCoreModel): name = models.TextField(primary_key=True, db_column="branch") repository = models.ForeignKey( "core.Repository", @@ -198,7 +204,7 @@ class Meta: ] -class Commit(ExportModelOperationsMixin("core.commit"), models.Model): +class Commit(ExportModelOperationsMixin("core.commit"), BaseCoreModel): class CommitStates(models.TextChoices): COMPLETE = "complete" PENDING = "pending" @@ -338,7 +344,7 @@ class PullStates(models.TextChoices): CLOSED = "closed" -class Pull(ExportModelOperationsMixin("core.pull"), models.Model): +class Pull(ExportModelOperationsMixin("core.pull"), BaseCoreModel): repository = models.ForeignKey( "core.Repository", db_column="repoid", @@ -427,7 +433,7 @@ def save(self, *args, **kwargs): class CommitNotification( - ExportModelOperationsMixin("core.commit_notification"), models.Model + ExportModelOperationsMixin("core.commit_notification"), BaseCoreModel ): class NotificationTypes(models.TextChoices): COMMENT = "comment" @@ -476,7 +482,9 @@ class Meta: db_table = "commit_notifications" -class CommitError(ExportModelOperationsMixin("core.commit_error"), BaseCodecovModel): +class CommitError( + ExportModelOperationsMixin("core.commit_error"), BaseCoreModel, BaseCodecovModel +): commit = models.ForeignKey( "Commit", related_name="errors", From 96f2a6a5d5ce2630a90ab6952e07147c2e608f61 Mon Sep 17 00:00:00 2001 From: Adrian Date: Mon, 11 Mar 2024 16:20:24 -0700 Subject: [PATCH 13/36] Added individual app_lablels to all models --- shared/django_apps/core/models.py | 37 ++++++++++++++++++------------- 1 file changed, 21 insertions(+), 16 deletions(-) diff --git a/shared/django_apps/core/models.py b/shared/django_apps/core/models.py index 48af1c2bf..7940e8e66 100644 --- a/shared/django_apps/core/models.py +++ b/shared/django_apps/core/models.py @@ -19,30 +19,29 @@ from shared.django_apps.core.managers import RepositoryManager from shared.django_apps.utils.config import should_write_data_to_storage_config_check +# Added to avoid 'doesn't declare an explicit app_label and isn't in an application in INSTALLED_APPS' error +CORE_APP_LABEL = "shared-core" + class DateTimeWithoutTZField(models.DateTimeField): def db_type(self, connection): return "timestamp" -class BaseCoreModel(models.Model): - class Meta: - abstract = True - app_label = "shared-core" - - -class Version(ExportModelOperationsMixin("core.version"), BaseCoreModel): +class Version(ExportModelOperationsMixin("core.version"), models.Model): version = models.TextField(primary_key=True) class Meta: + app_label = CORE_APP_LABEL db_table = "version" -class Constants(ExportModelOperationsMixin("core.constants"), BaseCoreModel): +class Constants(ExportModelOperationsMixin("core.constants"), models.Model): key = models.CharField(primary_key=True) value = models.CharField() class Meta: + app_label = CORE_APP_LABEL db_table = "constants" @@ -52,7 +51,7 @@ def _gen_image_token(): ) -class Repository(ExportModelOperationsMixin("core.repository"), BaseCoreModel): +class Repository(ExportModelOperationsMixin("core.repository"), models.Model): class Languages(models.TextChoices): JAVASCRIPT = "javascript" SHELL = "shell" @@ -142,6 +141,7 @@ class Languages(models.TextChoices): class Meta: db_table = "repos" + app_label = CORE_APP_LABEL ordering = ["-repoid"] indexes = [ models.Index( @@ -171,7 +171,7 @@ def clean(self): raise ValidationError("using_integration cannot be null") -class Branch(ExportModelOperationsMixin("core.branch"), BaseCoreModel): +class Branch(ExportModelOperationsMixin("core.branch"), models.Model): name = models.TextField(primary_key=True, db_column="branch") repository = models.ForeignKey( "core.Repository", @@ -191,6 +191,7 @@ class Branch(ExportModelOperationsMixin("core.branch"), BaseCoreModel): class Meta: db_table = "branches" + app_label = CORE_APP_LABEL constraints = [ models.UniqueConstraint( fields=["name", "repository"], name="branches_repoid_branch" @@ -204,7 +205,7 @@ class Meta: ] -class Commit(ExportModelOperationsMixin("core.commit"), BaseCoreModel): +class Commit(ExportModelOperationsMixin("core.commit"), models.Model): class CommitStates(models.TextChoices): COMPLETE = "complete" PENDING = "pending" @@ -274,6 +275,7 @@ def commitreport(self): class Meta: db_table = "commits" + app_label = CORE_APP_LABEL constraints = [ models.UniqueConstraint( fields=["repository", "commitid"], name="commits_repoid_commitid" @@ -344,7 +346,7 @@ class PullStates(models.TextChoices): CLOSED = "closed" -class Pull(ExportModelOperationsMixin("core.pull"), BaseCoreModel): +class Pull(ExportModelOperationsMixin("core.pull"), models.Model): repository = models.ForeignKey( "core.Repository", db_column="repoid", @@ -374,6 +376,7 @@ class Pull(ExportModelOperationsMixin("core.pull"), BaseCoreModel): class Meta: db_table = "pulls" + app_label = CORE_APP_LABEL ordering = ["-pullid"] constraints = [ models.UniqueConstraint( @@ -433,7 +436,7 @@ def save(self, *args, **kwargs): class CommitNotification( - ExportModelOperationsMixin("core.commit_notification"), BaseCoreModel + ExportModelOperationsMixin("core.commit_notification"), models.Model ): class NotificationTypes(models.TextChoices): COMMENT = "comment" @@ -479,12 +482,11 @@ def save(self, *args, **kwargs): super().save(*args, **kwargs) class Meta: + app_label = CORE_APP_LABEL db_table = "commit_notifications" -class CommitError( - ExportModelOperationsMixin("core.commit_error"), BaseCoreModel, BaseCodecovModel -): +class CommitError(ExportModelOperationsMixin("core.commit_error"), BaseCodecovModel): commit = models.ForeignKey( "Commit", related_name="errors", @@ -492,3 +494,6 @@ class CommitError( ) error_code = models.CharField(max_length=100) error_params = models.JSONField(default=dict) + + class Meta: + app_label = CORE_APP_LABEL From 1b6accc422da917d788095bc5e65dba2a553950a Mon Sep 17 00:00:00 2001 From: Adrian Date: Tue, 12 Mar 2024 14:44:57 -0700 Subject: [PATCH 14/36] added archive, storage, report_service code + some model additions - no tests yet --- setup.py | 3 + shared/api_archive/__init__.py | 0 shared/api_archive/archive.py | 277 ++++++++++++++++++ shared/api_archive/storage.py | 52 ++++ shared/django_apps/codecov_auth/helpers.py | 61 ++++ ...erinstallationnametousefortask_and_more.py | 63 ++++ shared/django_apps/codecov_auth/models.py | 35 ++- shared/django_apps/core/models.py | 38 +-- shared/django_apps/dummy_settings.py | 3 + shared/django_apps/utils/config.py | 21 ++ shared/django_apps/utils/model_utils.py | 146 +++++++++ shared/reports/api_report_service.py | 276 +++++++++++++++++ 12 files changed, 953 insertions(+), 22 deletions(-) create mode 100644 shared/api_archive/__init__.py create mode 100644 shared/api_archive/archive.py create mode 100644 shared/api_archive/storage.py create mode 100644 shared/django_apps/codecov_auth/helpers.py create mode 100644 shared/django_apps/codecov_auth/migrations/0053_ownerinstallationnametousefortask_and_more.py create mode 100644 shared/django_apps/utils/model_utils.py create mode 100644 shared/reports/api_report_service.py diff --git a/setup.py b/setup.py index 1c8d8e8b4..8f8992069 100644 --- a/setup.py +++ b/setup.py @@ -55,5 +55,8 @@ "django-model-utils", "factory-boy", "python-redis-lock", + "django-model-utils==4.3.1", + "requests==2.31.0", + "sentry-sdk>=1.40.0", ], ) diff --git a/shared/api_archive/__init__.py b/shared/api_archive/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/shared/api_archive/archive.py b/shared/api_archive/archive.py new file mode 100644 index 000000000..d03c8788d --- /dev/null +++ b/shared/api_archive/archive.py @@ -0,0 +1,277 @@ +import json +import logging +from base64 import b16encode +from enum import Enum +from hashlib import md5 +from uuid import uuid4 + +from django.conf import settings +from django.utils import timezone +from minio import Minio + +from shared.api_archive.storage import StorageService +from shared.config import get_config +from shared.utils.ReportEncoder import ReportEncoder + +log = logging.getLogger(__name__) + + +class MinioEndpoints(Enum): + chunks = "{version}/repos/{repo_hash}/commits/{commitid}/chunks.txt" + json_data = "{version}/repos/{repo_hash}/commits/{commitid}/json_data/{table}/{field}/{external_id}.json" + json_data_no_commit = ( + "{version}/repos/{repo_hash}/json_data/{table}/{field}/{external_id}.json" + ) + raw = "v4/raw/{date}/{repo_hash}/{commit_sha}/{reportid}.txt" + raw_with_upload_id = ( + "v4/raw/{date}/{repo_hash}/{commit_sha}/{reportid}/{uploadid}.txt" + ) + profiling_upload = ( + "{version}/repos/{repo_hash}/profilinguploads/{profiling_version}/{location}" + ) + static_analysis_single_file = ( + "{version}/repos/{repo_hash}/static_analysis/files/{location}" + ) + + test_results = "test_results/v1/raw/{date}/{repo_hash}/{commit_sha}/{uploadid}.txt" + + def get_path(self, **kwaargs): + return self.value.format(**kwaargs) + + +def get_minio_client(): + return Minio( + settings.MINIO_LOCATION, + access_key=settings.MINIO_SECRET_KEY, + secret_key=settings.MINIO_ACCESS_KEY, + secure=True, + ) + + +# Service class for performing archive operations. Meant to work against the +# underlying StorageService +class ArchiveService(object): + + """ + The root level of the archive. In s3 terms, + this would be the name of the bucket + """ + + root = None + + """ + Region where the storage is located. + """ + region = None + + """ + A hash key of the repo for internal storage + """ + storage_hash = None + + """ + Time to life, how long presigned PUTs/GETs should live + """ + ttl = 10 + + def __init__(self, repository, ttl=None): + self.root = get_config("services", "minio", "bucket", default="archive") + self.region = get_config("services", "minio", "region", default="us-east-1") + # Set TTL from config and default to existing value + self.ttl = ttl or int(get_config("services", "minio", "ttl", default=self.ttl)) + self.storage = StorageService() + self.storage_hash = self.get_archive_hash(repository) + + """ + Accessor for underlying StorageService. You typically shouldn't need + this for anything. + """ + + def storage_client(self): + return self.storage + + """ + Getter. Returns true if the current configuration is enterprise. + """ + + def is_enterprise(self): + return settings.IS_ENTERPRISE + + """ + Generates a hash key from repo specific information. + Provides slight obfuscation of data in minio storage + """ + + @classmethod + def get_archive_hash(cls, repository): + _hash = md5() + hash_key = get_config("services", "minio", "hash_key", default="") + val = "".join( + map( + str, + ( + repository.repoid, + repository.service, + repository.service_id, + hash_key, + ), + ) + ).encode() + _hash.update(val) + return b16encode(_hash.digest()).decode() + + def write_json_data_to_storage( + self, + commit_id, + table: str, + field: str, + external_id: str, + data: dict, + *, + encoder=ReportEncoder, + ): + if commit_id is None: + # Some classes don't have a commit associated with them + # For example Pull belongs to multiple commits. + path = MinioEndpoints.json_data_no_commit.get_path( + version="v4", + repo_hash=self.storage_hash, + table=table, + field=field, + external_id=external_id, + ) + else: + path = MinioEndpoints.json_data.get_path( + version="v4", + repo_hash=self.storage_hash, + commitid=commit_id, + table=table, + field=field, + external_id=external_id, + ) + stringified_data = json.dumps(data, cls=encoder) + self.write_file(path, stringified_data) + return path + + """ + Grabs path from storage, adds data to path object + writes back to path, overwriting the original contents + """ + + def update_archive(self, path, data): + self.storage.append_to_file(self.root, path, data) + + """ + Writes a generic file to the archive -- it's typically recommended to + not use this in lieu of the convenience methods write_raw_upload and + write_chunks + """ + + def write_file(self, path, data, reduced_redundancy=False, gzipped=False): + self.storage.write_file( + self.root, + path, + data, + reduced_redundancy=reduced_redundancy, + gzipped=gzipped, + ) + + """ + Convenience write method, writes a raw upload to a destination. + Returns the path it writes. + """ + + def write_raw_upload(self, commit_sha, report_id, data, gzipped=False): + # create a custom report path for a raw upload. + # write the file. + path = "/".join( + ( + "v4/raw", + timezone.now().strftime("%Y-%m-%d"), + self.storage_hash, + commit_sha, + "%s.txt" % report_id, + ) + ) + + self.write_file(path, data, gzipped=gzipped) + + return path + + """ + Convenience method to write a chunks.txt file to storage. + """ + + def write_chunks(self, commit_sha, data): + path = MinioEndpoints.chunks.get_path( + version="v4", repo_hash=self.storage_hash, commitid=commit_sha + ) + + self.write_file(path, data) + return path + + """ + Generic method to read a file from the archive + """ + + def read_file(self, path): + contents = self.storage.read_file(self.root, path) + return contents.decode() + + """ + Generic method to delete a file from the archive. + """ + + def delete_file(self, path): + self.storage.delete_file(self.root, path) + + """ + Deletes an entire repository's contents + """ + + def delete_repo_files(self): + path = "v4/repos/{}".format(self.storage_hash) + objects = self.storage.list_folder_contents(self.root, path) + for obj in objects: + self.storage.delete_file(self.root, obj.object_name) + + """ + Convenience method to read a chunks file from the archive. + """ + + def read_chunks(self, commit_sha): + path = MinioEndpoints.chunks.get_path( + version="v4", repo_hash=self.storage_hash, commitid=commit_sha + ) + log.info("Downloading chunks from path %s for commit %s", path, commit_sha) + return self.read_file(path) + + """ + Delete a chunk file from the archive + """ + + def delete_chunk_from_archive(self, commit_sha): + path = "v4/repos/{}/commits/{}/chunks.txt".format(self.storage_hash, commit_sha) + + self.delete_file(path) + + def create_presigned_put(self, path): + return self.storage.create_presigned_put(self.root, path, self.ttl) + + def create_raw_upload_presigned_put( + self, commit_sha, repo_hash=None, filename=None, expires=None + ): + if repo_hash is None: + repo_hash = self.storage_hash + + if not filename: + filename = "{}.txt".format(uuid4()) + + path = "v4/raw/{}/{}/{}/{}".format( + timezone.now().strftime("%Y-%m-%d"), self.storage_hash, commit_sha, filename + ) + + if expires is None: + expires = self.ttl + + return self.storage.create_presigned_put(self.root, path, expires) diff --git a/shared/api_archive/storage.py b/shared/api_archive/storage.py new file mode 100644 index 000000000..c1ae943d1 --- /dev/null +++ b/shared/api_archive/storage.py @@ -0,0 +1,52 @@ +import logging +from datetime import timedelta + +from shared.config import get_config +from shared.storage.minio import MinioStorageService + +log = logging.getLogger(__name__) + + +MINIO_CLIENT = None + + +# Service class for interfacing with codecov's underlying storage layer, minio +class StorageService(MinioStorageService): + def __init__(self, in_config=None): + global MINIO_CLIENT + + # init minio + if in_config is None: + self.minio_config = get_config("services", "minio", default={}) + else: + self.minio_config = in_config + + if "host" not in self.minio_config: + self.minio_config["host"] = "minio" + if "port" not in self.minio_config: + self.minio_config["port"] = 9000 + if "iam_auth" not in self.minio_config: + self.minio_config["iam_auth"] = False + if "iam_endpoint" not in self.minio_config: + self.minio_config["iam_endpoint"] = None + + if not MINIO_CLIENT: + MINIO_CLIENT = self.init_minio_client( + self.minio_config["host"], + self.minio_config["port"], + self.minio_config["access_key_id"], + self.minio_config["secret_access_key"], + self.minio_config["verify_ssl"], + self.minio_config["iam_auth"], + self.minio_config["iam_endpoint"], + ) + log.info("----- created minio_client: ---- ") + self.minio_client = MINIO_CLIENT + + def create_presigned_put(self, bucket, path, expires): + expires = timedelta(seconds=expires) + return self.minio_client.presigned_put_object(bucket, path, expires) + + def create_presigned_get(self, bucket, path, expires): + expires = timedelta(seconds=expires) + return self.minio_client.presigned_get_object(bucket, path, expires) diff --git a/shared/django_apps/codecov_auth/helpers.py b/shared/django_apps/codecov_auth/helpers.py new file mode 100644 index 000000000..09e91aa38 --- /dev/null +++ b/shared/django_apps/codecov_auth/helpers.py @@ -0,0 +1,61 @@ +from traceback import format_stack + +import requests +from django.contrib.admin.models import CHANGE, LogEntry +from django.contrib.contenttypes.models import ContentType + +from shared.django_apps.codecov_auth.constants import GITLAB_BASE_URL + +GITLAB_PAYLOAD_AVATAR_URL_KEY = "avatar_url" + + +def get_gitlab_url(email, size): + res = requests.get( + "{}/api/v4/avatar?email={}&size={}".format(GITLAB_BASE_URL, email, size) + ) + url = "" + if res.status_code == 200: + data = res.json() + try: + url = data[GITLAB_PAYLOAD_AVATAR_URL_KEY] + except KeyError: + pass + + return url + + +# https://stackoverflow.com/questions/7905106/adding-a-log-entry-for-an-action-by-a-user-in-a-django-ap + + +class History: + @staticmethod + def log(objects, message, user, action_flag=None, add_traceback=False): + """ + Log an action in the admin log + :param objects: Objects being operated on + :param message: Message to log + :param user: User performing action + :param action_flag: Type of action being performed + :param add_traceback: Add the stack trace to the message + """ + if action_flag is None: + action_flag = CHANGE + + if type(objects) is not list: + objects = [objects] + + if add_traceback: + message = f"{message}: {format_stack()}" + + for obj in objects: + if not obj: + continue + + LogEntry.objects.log_action( + user_id=user.pk, + content_type_id=ContentType.objects.get_for_model(obj).pk, + object_repr=str(obj), + object_id=obj.ownerid, + change_message=message, + action_flag=action_flag, + ) diff --git a/shared/django_apps/codecov_auth/migrations/0053_ownerinstallationnametousefortask_and_more.py b/shared/django_apps/codecov_auth/migrations/0053_ownerinstallationnametousefortask_and_more.py new file mode 100644 index 000000000..90febb82f --- /dev/null +++ b/shared/django_apps/codecov_auth/migrations/0053_ownerinstallationnametousefortask_and_more.py @@ -0,0 +1,63 @@ +# Generated by Django 4.2.7 on 2024-02-21 16:03 + +import uuid + +import django.db.models.deletion +import django_prometheus.models +from django.db import migrations, models + + +class Migration(migrations.Migration): + + dependencies = [ + ("codecov_auth", "0052_githubappinstallation_app_id_and_more"), + ] + + # BEGIN; + # -- + # -- Create model OwnerInstallationNameToUseForTask + # -- + # CREATE TABLE "codecov_auth_ownerinstallationnametousefortask" ("id" bigint NOT NULL PRIMARY KEY GENERATED BY DEFAULT AS IDENTITY, "external_id" uuid NOT NULL, "created_at" timestamp with time zone NOT NULL, "updated_at" timestamp with time zone NOT NULL, "installation_name" text NOT NULL, "task_name" text NOT NULL, "owner_id" integer NOT NULL); + # -- + # -- Create constraint single_task_name_per_owner on model ownerinstallationnametousefortask + # -- + # CREATE UNIQUE INDEX "single_task_name_per_owner" ON "codecov_auth_ownerinstallationnametousefortask" ("owner_id", "task_name"); + # ALTER TABLE "codecov_auth_ownerinstallationnametousefortask" ADD CONSTRAINT "codecov_auth_ownerin_owner_id_8bf0ce9b_fk_owners_ow" FOREIGN KEY ("owner_id") REFERENCES "owners" ("ownerid") DEFERRABLE INITIALLY DEFERRED; + # CREATE INDEX "codecov_auth_ownerinstalla_owner_id_8bf0ce9b" ON "codecov_auth_ownerinstallationnametousefortask" ("owner_id"); + # COMMIT; + + operations = [ + migrations.CreateModel( + name="OwnerInstallationNameToUseForTask", + fields=[ + ("id", models.BigAutoField(primary_key=True, serialize=False)), + ("external_id", models.UUIDField(default=uuid.uuid4, editable=False)), + ("created_at", models.DateTimeField(auto_now_add=True)), + ("updated_at", models.DateTimeField(auto_now=True)), + ("installation_name", models.TextField()), + ("task_name", models.TextField()), + ( + "owner", + models.ForeignKey( + on_delete=django.db.models.deletion.CASCADE, + related_name="installation_name_to_use_for_tasks", + to="codecov_auth.owner", + ), + ), + ], + bases=( + django_prometheus.models.ExportModelOperationsMixin( + "codecov_auth.github_app_installation" + ), + models.Model, + ), + ), + migrations.AddConstraint( + model_name="ownerinstallationnametousefortask", + constraint=models.UniqueConstraint( + models.F("owner_id"), + models.F("task_name"), + name="single_task_name_per_owner", + ), + ), + ] diff --git a/shared/django_apps/codecov_auth/models.py b/shared/django_apps/codecov_auth/models.py index 9d606876b..4e4bdbc7d 100644 --- a/shared/django_apps/codecov_auth/models.py +++ b/shared/django_apps/codecov_auth/models.py @@ -21,6 +21,7 @@ BITBUCKET_BASE_URL, GRAVATAR_BASE_URL, ) +from shared.django_apps.codecov_auth.helpers import get_gitlab_url from shared.django_apps.codecov_auth.managers import OwnerManager from shared.django_apps.core.managers import RepositoryManager from shared.django_apps.core.models import DateTimeWithoutTZField, Repository @@ -387,9 +388,8 @@ def avatar_url(self, size=DEFAULT_AVATAR_SIZE): ) # Gitlab - # TODO: This is missing porting; required a lot more django apps + extra files that will ignore for this 1st pass - # elif self.service == SERVICE_GITLAB and self.email: - # return get_gitlab_url(self.email, size) + elif self.service == SERVICE_GITLAB and self.email: + return get_gitlab_url(self.email, size) # Codecov config elif get_config("services", "gravatar") and self.email: @@ -504,6 +504,13 @@ class GithubAppInstallation( related_name="github_app_installations", ) + def is_configured(self) -> bool: + """Returns whether this installation is properly configured and can be used""" + if self.name == GITHUB_APP_INSTALLATION_DEFAULT_NAME: + # The default app is configured in the installation YAML + return True + return self.app_id is not None and self.pem_path is not None + def repository_queryset(self) -> BaseManager[Repository]: """Returns a QuerySet of repositories covered by this installation""" if self.repository_service_ids is None: @@ -523,6 +530,28 @@ def is_repo_covered_by_integration(self, repo: Repository) -> bool: return repo.service_id in self.repository_service_ids +class OwnerInstallationNameToUseForTask( + ExportModelOperationsMixin("codecov_auth.github_app_installation"), BaseCodecovModel +): + owner = models.ForeignKey( + Owner, + null=False, + on_delete=models.CASCADE, + blank=False, + related_name="installation_name_to_use_for_tasks", + ) + installation_name = models.TextField(null=False, blank=False) + task_name = models.TextField(null=False, blank=False) + + class Meta: + constraints = [ + # Only 1 app name per task per owner_id + models.UniqueConstraint( + "owner_id", "task_name", name="single_task_name_per_owner" + ) + ] + + class SentryUser( ExportModelOperationsMixin("codecov_auth.sentry_user"), BaseCodecovModel ): diff --git a/shared/django_apps/core/models.py b/shared/django_apps/core/models.py index 7940e8e66..d71747ab4 100644 --- a/shared/django_apps/core/models.py +++ b/shared/django_apps/core/models.py @@ -3,6 +3,7 @@ import string import uuid from datetime import datetime +from typing import Optional from django.contrib.postgres.fields import ArrayField, CITextField from django.contrib.postgres.indexes import GinIndex, OpClass @@ -18,9 +19,11 @@ from shared.django_apps.core.encoders import ReportJSONEncoder from shared.django_apps.core.managers import RepositoryManager from shared.django_apps.utils.config import should_write_data_to_storage_config_check +from shared.django_apps.utils.model_utils import ArchiveField +from shared.reports.resources import Report # Added to avoid 'doesn't declare an explicit app_label and isn't in an application in INSTALLED_APPS' error -CORE_APP_LABEL = "shared-core" +CORE_APP_LABEL = "core" class DateTimeWithoutTZField(models.DateTimeField): @@ -263,15 +266,14 @@ def commitreport(self): ] return reports[0] if reports else None - # TODO: needs porting; property heavily tethered to report service - # @cached_property - # def full_report(self) -> Optional[Report]: - # # TODO: we should probably remove use of this method since it inverts the - # # dependency tree (services should be importing models and not the other - # # way around). The caching should be preserved somehow though. - # from services.report import build_report_from_commit + @cached_property + def full_report(self) -> Optional[Report]: + # TODO: we should probably remove use of this method since it inverts the + # dependency tree (services should be importing models and not the other + # way around). The caching should be preserved somehow though. + from shared.reports.api_report_service import build_report_from_commit - # return build_report_from_commit(self) + return build_report_from_commit(self) class Meta: db_table = "commits" @@ -332,12 +334,11 @@ def should_write_to_storage(self) -> bool: # Use custom JSON to properly serialize custom data classes on reports _report = models.JSONField(null=True, db_column="report", encoder=ReportJSONEncoder) _report_storage_path = models.URLField(null=True, db_column="report_storage_path") - # TODO: This needs porting as it is very tethered to the archive service - # report = ArchiveField( - # should_write_to_storage_fn=should_write_to_storage, - # json_encoder=ReportJSONEncoder, - # default_value_class=dict, - # ) + report = ArchiveField( + should_write_to_storage_fn=should_write_to_storage, + json_encoder=ReportJSONEncoder, + default_value_class=dict, + ) class PullStates(models.TextChoices): @@ -425,10 +426,9 @@ def should_write_to_storage(self) -> bool: _flare = models.JSONField(db_column="flare", null=True) _flare_storage_path = models.URLField(db_column="flare_storage_path", null=True) - # TODO: This needs porting as it is very tethered to the archive service - # flare = ArchiveField( - # should_write_to_storage_fn=should_write_to_storage, default_value_class=dict - # ) + flare = ArchiveField( + should_write_to_storage_fn=should_write_to_storage, default_value_class=dict + ) def save(self, *args, **kwargs): self.updatestamp = timezone.now() diff --git a/shared/django_apps/dummy_settings.py b/shared/django_apps/dummy_settings.py index cc48e3922..8b4a116cd 100644 --- a/shared/django_apps/dummy_settings.py +++ b/shared/django_apps/dummy_settings.py @@ -13,6 +13,9 @@ "shared.django_apps.ts_telemetry", "shared.django_apps.rollouts", # API models + "django.contrib.admin", + "django.contrib.auth", + "django.contrib.contenttypes", "django.contrib.postgres", "shared.django_apps.codecov_auth", "shared.django_apps.core", diff --git a/shared/django_apps/utils/config.py b/shared/django_apps/utils/config.py index b87bc6e45..dc7d416af 100644 --- a/shared/django_apps/utils/config.py +++ b/shared/django_apps/utils/config.py @@ -1,9 +1,30 @@ import os +from enum import Enum from shared.config import get_config + +class SettingsModule(Enum): + DEV = "codecov.settings_dev" + STAGING = "codecov.settings_staging" + TESTING = "codecov.settings_test" + ENTERPRISE = "codecov.settings_enterprise" + PRODUCTION = "codecov.settings_prod" + + RUN_ENV = os.environ.get("RUN_ENV", "PRODUCTION") +if RUN_ENV == "DEV": + settings_module = SettingsModule.DEV.value +elif RUN_ENV == "STAGING": + settings_module = SettingsModule.STAGING.value +elif RUN_ENV == "TESTING": + settings_module = SettingsModule.TESTING.value +elif RUN_ENV == "ENTERPRISE": + settings_module = SettingsModule.ENTERPRISE.value +else: + settings_module = SettingsModule.PRODUCTION.value + def should_write_data_to_storage_config_check( master_switch_key: str, is_codecov_repo: bool, repoid: int diff --git a/shared/django_apps/utils/model_utils.py b/shared/django_apps/utils/model_utils.py new file mode 100644 index 000000000..4712b2b62 --- /dev/null +++ b/shared/django_apps/utils/model_utils.py @@ -0,0 +1,146 @@ +import json +import logging +from typing import Any, Callable, Optional + +from shared.api_archive.archive import ArchiveService +from shared.storage.exceptions import FileNotInStorageError +from shared.utils.ReportEncoder import ReportEncoder + +log = logging.getLogger(__name__) + + +class ArchiveFieldInterfaceMeta(type): + def __subclasscheck__(cls, subclass): + return ( + hasattr(subclass, "get_repository") + and callable(subclass.get_repository) + and hasattr(subclass, "get_commitid") + and callable(subclass.get_commitid) + and hasattr(subclass, "external_id") + ) + + +class ArchiveFieldInterface(metaclass=ArchiveFieldInterfaceMeta): + """Any class that uses ArchiveField must implement this interface""" + + external_id: str + + def get_repository(self): + """Returns the repository object associated with self""" + raise NotImplementedError() + + def get_commitid(self) -> Optional[str]: + """Returns the commitid associated with self. + If no commitid is associated return None. + """ + raise NotImplementedError() + + +class ArchiveField: + """This is a helper class that transparently handles models' fields that are saved in storage. + Classes that use the ArchiveField MUST implement ArchiveFieldInterface. It ill throw an error otherwise. + It uses the Descriptor pattern: https://docs.python.org/3/howto/descriptor.html + + Arguments: + should_write_to_storage_fn: Callable function that decides if data should be written to storage. + It should take 1 argument: the object instance. + + rehydrate_fn: Callable function to allow you to decode your saved data into internal representations. + The default value does nothing. + Data retrieved both from DB and storage pass through this function to guarantee consistency. + It should take 2 arguments: the object instance and the encoded data. + + default_value: Any value that will be returned if we can't save the data for whatever reason + + Example: + archive_field = ArchiveField( + should_write_to_storage_fn=should_write_data, + rehydrate_fn=rehidrate_data, + default_value='default' + ) + For a full example check utils/tests/unit/test_model_utils.py + """ + + def __init__( + self, + should_write_to_storage_fn: Callable[[object], bool], + rehydrate_fn: Callable[[object, object], Any] = lambda self, x: x, + json_encoder=ReportEncoder, + default_value_class=lambda: None, + ): + self.default_value_class = default_value_class + self.rehydrate_fn = rehydrate_fn + self.should_write_to_storage_fn = should_write_to_storage_fn + self.json_encoder = json_encoder + + def __set_name__(self, owner, name): + # Validate that the owner class has the methods we need + assert issubclass( + owner, ArchiveFieldInterface + ), "Missing some required methods to use AchiveField" + self.public_name = name + self.db_field_name = "_" + name + self.archive_field_name = "_" + name + "_storage_path" + self.cached_value_property_name = f"__{self.public_name}_cached_value" + + def _get_value_from_archive(self, obj): + repository = obj.get_repository() + archive_service = ArchiveService(repository=repository) + archive_field = getattr(obj, self.archive_field_name) + if archive_field: + try: + file_str = archive_service.read_file(archive_field) + return self.rehydrate_fn(obj, json.loads(file_str)) + except FileNotInStorageError: + log.error( + "Archive enabled field not in storage", + extra=dict( + storage_path=archive_field, + object_id=obj.id, + commit=obj.get_commitid(), + ), + ) + else: + log.debug( + "Both db_field and archive_field are None", + extra=dict( + object_id=obj.id, + commit=obj.get_commitid(), + ), + ) + return self.default_value_class() + + def __get__(self, obj, objtype=None): + cached_value = getattr(obj, self.cached_value_property_name, None) + if cached_value: + return cached_value + db_field = getattr(obj, self.db_field_name) + if db_field is not None: + value = self.rehydrate_fn(obj, db_field) + else: + value = self._get_value_from_archive(obj) + setattr(obj, self.cached_value_property_name, value) + return value + + def __set__(self, obj, value): + # Set the new value + if self.should_write_to_storage_fn(obj): + repository = obj.get_repository() + archive_service = ArchiveService(repository=repository) + old_file_path = getattr(obj, self.archive_field_name) + table_name = obj._meta.db_table + path = archive_service.write_json_data_to_storage( + commit_id=obj.get_commitid(), + table=table_name, + field=self.public_name, + external_id=obj.external_id, + data=value, + encoder=self.json_encoder, + ) + if old_file_path is not None and path != old_file_path: + archive_service.delete_file(old_file_path) + setattr(obj, self.archive_field_name, path) + setattr(obj, self.db_field_name, None) + else: + setattr(obj, self.db_field_name, value) + setattr(obj, self.cached_value_property_name, value) diff --git a/shared/reports/api_report_service.py b/shared/reports/api_report_service.py new file mode 100644 index 000000000..1e15a8318 --- /dev/null +++ b/shared/reports/api_report_service.py @@ -0,0 +1,276 @@ +import logging +from typing import List, Optional + +import sentry_sdk +from django.conf import settings +from django.db.models import Prefetch, Q +from django.utils.functional import cached_property + +from shared.api_archive.archive import ArchiveService +from shared.django_apps.core.models import Commit +from shared.django_apps.reports.models import ( + AbstractTotals, + CommitReport, + ReportSession, +) +from shared.django_apps.utils.config import RUN_ENV +from shared.helpers.flag import Flag +from shared.reports.readonly import ReadOnlyReport as SharedReadOnlyReport +from shared.reports.resources import Report +from shared.reports.types import ReportFileSummary, ReportTotals +from shared.storage.exceptions import FileNotInStorageError +from shared.utils.sessions import Session, SessionType + +log = logging.getLogger(__name__) + + +class ReportMixin: + def file_reports(self): + for f in self.files: + yield self.get(f) + + @cached_property + def flags(self): + """returns dict(:name=)""" + flags_dict = {} + for sid, session in self.sessions.items(): + if session.flags is not None: + carriedforward = session.session_type.value == "carriedforward" + carriedforward_from = session.session_extras.get("carriedforward_from") + for flag in session.flags: + flags_dict[flag] = Flag( + self, + flag, + carriedforward=carriedforward, + carriedforward_from=carriedforward_from, + ) + return flags_dict + + +class SerializableReport(ReportMixin, Report): + pass + + +class ReadOnlyReport(ReportMixin, SharedReadOnlyReport): + pass + + +@sentry_sdk.trace +def build_report(chunks, files, sessions, totals, report_class=None): + if report_class is None: + report_class = SerializableReport + return report_class.from_chunks( + chunks=chunks, files=files, sessions=sessions, totals=totals + ) + + +@sentry_sdk.trace +def build_report_from_commit(commit: Commit, report_class=None): + """ + Builds a `shared.reports.resources.Report` from a given commit. + + Chunks are fetched from archive storage and the rest of the data is sourced + from various `reports_*` tables in the database. + """ + + # TODO: this can be removed once confirmed working well on prod + new_report_builder_enabled = ( + RUN_ENV == "DEV" + or RUN_ENV == "STAGING" + or RUN_ENV == "TESTING" + or commit.repository_id in settings.REPORT_BUILDER_REPO_IDS + ) + + with sentry_sdk.start_span(description="Fetch files/sessions/totals"): + commit_report = fetch_commit_report(commit) + if commit_report and new_report_builder_enabled: + files = build_files(commit_report) + sessions = build_sessions(commit_report) + try: + totals = build_totals(commit_report.reportleveltotals) + except CommitReport.reportleveltotals.RelatedObjectDoesNotExist: + totals = None + else: + if not commit.report: + return None + + files = commit.report["files"] + sessions = commit.report["sessions"] + totals = commit.totals + + try: + with sentry_sdk.start_span(description="Fetch chunks"): + chunks = ArchiveService(commit.repository).read_chunks(commit.commitid) + return build_report(chunks, files, sessions, totals, report_class=report_class) + except FileNotInStorageError: + log.warning( + "File for chunks not found in storage", + extra=dict( + commit=commit.commitid, + repo=commit.repository_id, + ), + ) + return None + + +def fetch_commit_report(commit: Commit) -> Optional[CommitReport]: + """ + Fetch a single `CommitReport` for the given commit. + All the necessary report relations are prefetched. + """ + return ( + commit.reports.coverage_reports() + .filter(code=None) + .prefetch_related( + Prefetch( + "sessions", + queryset=ReportSession.objects.prefetch_related("flags").select_related( + "uploadleveltotals" + ), + ), + ) + .select_related("reportdetails", "reportleveltotals") + .first() + ) + + +def build_totals(totals: AbstractTotals) -> ReportTotals: + """ + Build a `shared.reports.types.ReportTotals` instance from one of the + various database totals records. + """ + return ReportTotals( + files=totals.files, + lines=totals.lines, + hits=totals.hits, + misses=totals.misses, + partials=totals.partials, + coverage=totals.coverage, + branches=totals.branches, + methods=totals.methods, + ) + + +def build_session(upload: ReportSession) -> Session: + """ + Build a `shared.utils.sessions.Session` from a database `reports_upload` record. + """ + try: + upload_totals = build_totals(upload.uploadleveltotals) + except ReportSession.uploadleveltotals.RelatedObjectDoesNotExist: + # upload does not have any totals - maybe the processing failed + # or the upload was empty? + upload_totals = None + flags = [flag.flag_name for flag in upload.flags.all()] + + return Session( + id=upload.id, + totals=upload_totals, + time=upload.created_at.timestamp, + archive=upload.storage_path, + flags=flags, + provider=upload.provider, + build=upload.build_code, + job=upload.job_code, + url=upload.build_url, + state=upload.state, + env=upload.env, + name=upload.name, + session_type=SessionType.get_from_string(upload.upload_type), + session_extras=upload.upload_extras, + ) + + +def build_sessions(commit_report: CommitReport) -> dict[int, Session]: + """ + Build mapping of report number -> session that can be passed to the report class. + Does not include CF sessions if there is also an upload session with the same + flag name. + """ + sessions = {} + + carryforward_sessions = {} + uploaded_flags = set() + + for upload in commit_report.sessions.filter( + Q(state="complete") | Q(state="processed") + ): + session = build_session(upload) + if session.session_type == SessionType.carriedforward: + carryforward_sessions[upload.order_number] = session + else: + sessions[upload.order_number] = session + uploaded_flags |= set(session.flags) + + for sid, session in carryforward_sessions.items(): + # we only ever expect 1 flag for CF sessions + overlapping_flags = uploaded_flags & set(session.flags) + + if len(overlapping_flags) == 0: + # we can include this CF session since there are no direct uploads + # with the same flag name + sessions[sid] = session + + return sessions + + +def build_files(commit_report: CommitReport) -> dict[str, ReportFileSummary]: + """ + Construct a files dictionary in a format compatible with `shared.reports.resources.Report` + from data in the `reports_reportdetails.files_array` column in the database. + """ + try: + report_details = commit_report.reportdetails + except CommitReport.reportdetails.RelatedObjectDoesNotExist: + # we don't expect this but something could have gone wrong in the worker + # we can't really recover here + return {} + + return { + file["filename"]: ReportFileSummary( + file_index=file["file_index"], + file_totals=ReportTotals(*file["file_totals"]), + session_totals=file["session_totals"], + diff_totals=file["diff_totals"], + ) + for file in report_details.files_array + } + + +def files_belonging_to_flags(commit_report: Report, flags: List[str]) -> List[str]: + sessions_for_specific_flags = sessions_with_specific_flags( + commit_report=commit_report, flags=flags + ) + session_ids = list(sessions_for_specific_flags.keys()) + files_in_specific_sessions = files_in_sessions( + commit_report=commit_report, session_ids=session_ids + ) + return files_in_specific_sessions + + +def sessions_with_specific_flags( + commit_report: Report, flags: List[str] +) -> dict[int, Session]: + sessions = [ + (sid, session) + for sid, session in commit_report.sessions.items() + if session.flags and set(session.flags) & set(flags) + ] + return dict(sessions) + + +def files_in_sessions(commit_report: Report, session_ids: List[int]) -> List[str]: + files, session_ids = [], set(session_ids) + for file in commit_report: + found = False + for line in file: + if line: + for session in line.sessions: + if session.id in session_ids: + found = True + break + if found: + break + if found: + files.append(file.name) + return files From edd361a7813c361de817ea6a45f8ec4c4bc31105 Mon Sep 17 00:00:00 2001 From: Adrian Date: Tue, 12 Mar 2024 15:04:37 -0700 Subject: [PATCH 15/36] lets see if this fixes it --- shared/django_apps/codecov_auth/managers.py | 4 ++-- shared/django_apps/core/managers.py | 12 ++++++------ 2 files changed, 8 insertions(+), 8 deletions(-) diff --git a/shared/django_apps/codecov_auth/managers.py b/shared/django_apps/codecov_auth/managers.py index 0a65e868d..67dfff0bd 100644 --- a/shared/django_apps/codecov_auth/managers.py +++ b/shared/django_apps/codecov_auth/managers.py @@ -23,7 +23,7 @@ def annotate_activated_in(self, owner): if a given user is activated in organization "owner", false otherwise. """ - from codecov_auth.models import Owner + from shared.django_apps.codecov_auth.models import Owner return self.annotate( activated=Coalesce( @@ -47,7 +47,7 @@ def annotate_is_admin_in(self, owner): if a given user is an admin in organization "owner", and false otherwise. """ - from codecov_auth.models import Owner + from shared.django_apps.codecov_auth.models import Owner return self.annotate( is_admin=Coalesce( diff --git a/shared/django_apps/core/managers.py b/shared/django_apps/core/managers.py index 86f8f680d..8d54a50bd 100644 --- a/shared/django_apps/core/managers.py +++ b/shared/django_apps/core/managers.py @@ -52,7 +52,7 @@ def with_recent_coverage(self) -> QuerySet: are not changing as the most recent commit is uploading coverage reports. """ - from core.models import Commit + from shared.django_apps.core.models import Commit timestamp = timezone.now() - timezone.timedelta(hours=1) @@ -100,7 +100,7 @@ def with_latest_commit_totals_before( """ Annotates queryset with coverage of latest commit totals before cerain date. """ - from core.models import Commit + from shared.django_apps.core.models import Commit # Parsing the date given in parameters so we receive a datetime rather than a string timestamp = parser.parse(before_date) @@ -132,7 +132,7 @@ def with_latest_coverage_change(self): branch) of each repository. Depends on having called "with_latest_commit_totals_before" with "include_previous_totals=True". """ - from core.models import Commit + from shared.django_apps.core.models import Commit return self.annotate( latest_coverage=Cast( @@ -242,7 +242,7 @@ def with_latest_commit_at(self): - latest_commit_at as the true_coverage except NULL are transformed to 1/1/1900 This make sure when we order the repo with no commit appears last. """ - from core.models import Commit + from shared.django_apps.core.models import Commit latest_commit_at = Subquery( Commit.objects.filter(repository_id=OuterRef("pk")) @@ -260,7 +260,7 @@ def with_oldest_commit_at(self): """ Annotates the queryset with the oldest commit timestamp. """ - from core.models import Commit + from shared.django_apps.core.models import Commit commits = Commit.objects.filter(repository_id=OuterRef("pk")).order_by( "timestamp" @@ -270,7 +270,7 @@ def with_oldest_commit_at(self): ) def get_or_create_from_git_repo(self, git_repo, owner): - from codecov_auth.models import Owner + from shared.django_apps.codecov_auth.models import Owner repo, created = self.get_or_create( author=owner, From 6ddac404ba4c1b81c05d0aecee112b4dcc2fd852 Mon Sep 17 00:00:00 2001 From: Adrian Date: Tue, 12 Mar 2024 19:41:03 -0700 Subject: [PATCH 16/36] add __init__ to shared.upload folder --- shared/django_apps/core/models.py | 3 ++- shared/upload/__init__.py | 0 2 files changed, 2 insertions(+), 1 deletion(-) create mode 100644 shared/upload/__init__.py diff --git a/shared/django_apps/core/models.py b/shared/django_apps/core/models.py index d71747ab4..09f0c457b 100644 --- a/shared/django_apps/core/models.py +++ b/shared/django_apps/core/models.py @@ -22,7 +22,8 @@ from shared.django_apps.utils.model_utils import ArchiveField from shared.reports.resources import Report -# Added to avoid 'doesn't declare an explicit app_label and isn't in an application in INSTALLED_APPS' error +# Added to avoid 'doesn't declare an explicit app_label and isn't in an application in INSTALLED_APPS' error\ +# Needs to be called the same as the API app CORE_APP_LABEL = "core" diff --git a/shared/upload/__init__.py b/shared/upload/__init__.py new file mode 100644 index 000000000..e69de29bb From dda6cf9426f00362a7b994b6f194f92928445e07 Mon Sep 17 00:00:00 2001 From: Adrian Date: Tue, 12 Mar 2024 21:19:58 -0700 Subject: [PATCH 17/36] feat: add labels to all new models --- shared/django_apps/codecov_auth/models.py | 32 +++++++++++++++++ .../django_apps/legacy_migrations/models.py | 4 +++ shared/django_apps/reports/models.py | 35 ++++++++++++++++--- shared/plan/__init__.py | 0 4 files changed, 66 insertions(+), 5 deletions(-) create mode 100644 shared/plan/__init__.py diff --git a/shared/django_apps/codecov_auth/models.py b/shared/django_apps/codecov_auth/models.py index 4e4bdbc7d..ad9b9c80f 100644 --- a/shared/django_apps/codecov_auth/models.py +++ b/shared/django_apps/codecov_auth/models.py @@ -27,6 +27,10 @@ from shared.django_apps.core.models import DateTimeWithoutTZField, Repository from shared.plan.constants import USER_PLAN_REPRESENTATIONS, PlanName +# Added to avoid 'doesn't declare an explicit app_label and isn't in an application in INSTALLED_APPS' error\ +# Needs to be called the same as the API app +CODECOV_AUTH_APP_LABEL = "codecov_auth" + # Large number to represent Infinity as float('int') is not JSON serializable INFINITY = 99999999 @@ -37,10 +41,13 @@ SERVICE_GITLAB = "gitlab" SERVICE_CODECOV_ENTERPRISE = "enterprise" + DEFAULT_AVATAR_SIZE = 55 + log = logging.getLogger(__name__) + # TODO use this to refactor avatar_url class Service(models.TextChoices): GITHUB = "github" @@ -82,6 +89,7 @@ class CustomerIntent(models.TextChoices): class Meta: db_table = "users" + app_label = CODECOV_AUTH_APP_LABEL @property def is_active(self): @@ -118,6 +126,7 @@ def get_username(self): class Owner(ExportModelOperationsMixin("codecov_auth.owner"), models.Model): class Meta: db_table = "owners" + app_label = CODECOV_AUTH_APP_LABEL ordering = ["ownerid"] constraints = [ models.UniqueConstraint( @@ -504,6 +513,9 @@ class GithubAppInstallation( related_name="github_app_installations", ) + class Meta: + app_label = CODECOV_AUTH_APP_LABEL + def is_configured(self) -> bool: """Returns whether this installation is properly configured and can be used""" if self.name == GITHUB_APP_INSTALLATION_DEFAULT_NAME: @@ -544,6 +556,7 @@ class OwnerInstallationNameToUseForTask( task_name = models.TextField(null=False, blank=False) class Meta: + app_label = CODECOV_AUTH_APP_LABEL constraints = [ # Only 1 app name per task per owner_id models.UniqueConstraint( @@ -567,6 +580,9 @@ class SentryUser( email = models.TextField(null=True) name = models.TextField(null=True) + class Meta: + app_label = CODECOV_AUTH_APP_LABEL + class OktaUser(ExportModelOperationsMixin("codecov_auth.okta_user"), BaseCodecovModel): user = models.ForeignKey( @@ -580,6 +596,9 @@ class OktaUser(ExportModelOperationsMixin("codecov_auth.okta_user"), BaseCodecov email = models.TextField(null=True) name = models.TextField(null=True) + class Meta: + app_label = CODECOV_AUTH_APP_LABEL + class TokenTypeChoices(models.TextChoices): UPLOAD = "upload" @@ -601,6 +620,9 @@ class OrganizationLevelToken( max_length=50, choices=TokenTypeChoices.choices, default=TokenTypeChoices.UPLOAD ) + class Meta: + app_label = CODECOV_AUTH_APP_LABEL + def save(self, *args, **kwargs): super().save(*args, **kwargs) @@ -633,9 +655,13 @@ class Goal(models.TextChoices): Owner, on_delete=models.CASCADE, null=True, related_name="profiles_with_default" ) + class Meta: + app_label = CODECOV_AUTH_APP_LABEL + class Session(ExportModelOperationsMixin("codecov_auth.session"), models.Model): class Meta: + app_label = CODECOV_AUTH_APP_LABEL db_table = "sessions" ordering = ["-lastseen"] @@ -678,6 +704,9 @@ class TokenType(models.TextChoices): max_length=40, unique=True, editable=False, default=_generate_key ) + class Meta: + app_label = CODECOV_AUTH_APP_LABEL + @classmethod def generate_key(cls): return _generate_key() @@ -689,6 +718,9 @@ class UserToken( class TokenType(models.TextChoices): API = "api" + class Meta: + app_label = CODECOV_AUTH_APP_LABEL + name = models.CharField(max_length=100, null=False, blank=False) owner = models.ForeignKey( "Owner", diff --git a/shared/django_apps/legacy_migrations/models.py b/shared/django_apps/legacy_migrations/models.py index caff392c3..3c56f04be 100644 --- a/shared/django_apps/legacy_migrations/models.py +++ b/shared/django_apps/legacy_migrations/models.py @@ -3,6 +3,9 @@ from shared.django_apps.codecov_auth.models import Owner +# Added to avoid 'doesn't declare an explicit app_label and isn't in an application in INSTALLED_APPS' error\ +# Needs to be called the same as the API app +LEGACY_MIGRATIONS_APP_LABEL = "legacy_migrations" # Create your models here. class YamlHistory( @@ -22,4 +25,5 @@ class YamlHistory( class Meta: db_table = "yaml_history" + app_label = LEGACY_MIGRATIONS_APP_LABEL indexes = [models.Index(fields=["ownerid", "timestamp"])] diff --git a/shared/django_apps/reports/models.py b/shared/django_apps/reports/models.py index f7ff0fc19..5128483de 100644 --- a/shared/django_apps/reports/models.py +++ b/shared/django_apps/reports/models.py @@ -9,12 +9,17 @@ from shared.django_apps.codecov.models import BaseCodecovModel from shared.django_apps.reports.managers import CommitReportManager from shared.django_apps.utils.config import should_write_data_to_storage_config_check +from shared.django_apps.utils.model_utils import ArchiveField from shared.django_apps.utils.services import get_short_service_name from shared.reports.enums import UploadState, UploadType from shared.upload.constants import ci log = logging.getLogger(__name__) +# Added to avoid 'doesn't declare an explicit app_label and isn't in an application in INSTALLED_APPS' error\ +# Needs to be called the same as the API app +REPORTS_APP_LABEL = "reports" + class ReportType(models.TextChoices): COVERAGE = "coverage" @@ -54,6 +59,9 @@ class ReportType(models.TextChoices): null=True, max_length=100, choices=ReportType.choices ) + class Meta: + app_label = REPORTS_APP_LABEL + objects = CommitReportManager() @@ -70,6 +78,9 @@ class ReportResultsStates(models.TextChoices): completed_at = models.DateTimeField(null=True) result = models.JSONField(default=dict) + class Meta: + app_label = REPORTS_APP_LABEL + class ReportDetails( ExportModelOperationsMixin("reports.report_details"), BaseCodecovModel @@ -80,6 +91,9 @@ class ReportDetails( db_column="files_array_storage_path", null=True ) + class Meta: + app_label = REPORTS_APP_LABEL + def get_repository(self): return self.report.commit.repository @@ -101,16 +115,18 @@ def should_write_to_storage(self) -> bool: repoid=self.report.commit.repository.repoid, ) - # TODO: This needs porting as it is very tethered to the archive service - # files_array = ArchiveField( - # should_write_to_storage_fn=should_write_to_storage, - # default_value_class=list, - # ) + files_array = ArchiveField( + should_write_to_storage_fn=should_write_to_storage, + default_value_class=list, + ) class ReportLevelTotals(AbstractTotals): report = models.OneToOneField(CommitReport, on_delete=models.CASCADE) + class Meta: + app_label = REPORTS_APP_LABEL + class UploadError(ExportModelOperationsMixin("reports.upload_error"), BaseCodecovModel): report_session = models.ForeignKey( @@ -123,6 +139,7 @@ class UploadError(ExportModelOperationsMixin("reports.upload_error"), BaseCodeco error_params = models.JSONField(default=dict) class Meta: + app_label = REPORTS_APP_LABEL db_table = "reports_uploaderror" @@ -136,6 +153,7 @@ class UploadFlagMembership( id = models.BigAutoField(primary_key=True) class Meta: + app_label = REPORTS_APP_LABEL db_table = "reports_uploadflagmembership" @@ -148,6 +166,9 @@ class RepositoryFlag( flag_name = models.CharField(max_length=1024) deleted = models.BooleanField(null=True) + class Meta: + app_label = REPORTS_APP_LABEL + class ReportSession( ExportModelOperationsMixin("reports.report_session"), BaseCodecovModel @@ -173,6 +194,7 @@ class ReportSession( upload_type_id = models.IntegerField(null=True, choices=UploadType.choices()) class Meta: + app_label = REPORTS_APP_LABEL db_table = "reports_upload" @property @@ -221,6 +243,7 @@ class UploadLevelTotals(AbstractTotals): ) class Meta: + app_label = REPORTS_APP_LABEL db_table = "reports_uploadleveltotals" @@ -251,6 +274,7 @@ class Test(models.Model): flags_hash = models.TextField() class Meta: + app_label = REPORTS_APP_LABEL db_table = "reports_test" constraints = [ models.UniqueConstraint( @@ -285,4 +309,5 @@ class Outcome(models.TextChoices): failure_message = models.TextField(null=True) class Meta: + app_label = REPORTS_APP_LABEL db_table = "reports_testinstance" diff --git a/shared/plan/__init__.py b/shared/plan/__init__.py new file mode 100644 index 000000000..e69de29bb From 939a3b24b708f8c49669342006c6d045e07fa754 Mon Sep 17 00:00:00 2001 From: Adrian Date: Wed, 13 Mar 2024 13:03:25 -0700 Subject: [PATCH 18/36] add missing model to reports folder + its migration --- .../migrations/0015_testresultreporttotals.py | 38 +++++++++++++++++++ shared/django_apps/reports/models.py | 12 ++++++ 2 files changed, 50 insertions(+) create mode 100644 shared/django_apps/reports/migrations/0015_testresultreporttotals.py diff --git a/shared/django_apps/reports/migrations/0015_testresultreporttotals.py b/shared/django_apps/reports/migrations/0015_testresultreporttotals.py new file mode 100644 index 000000000..bdda85b39 --- /dev/null +++ b/shared/django_apps/reports/migrations/0015_testresultreporttotals.py @@ -0,0 +1,38 @@ +# Generated by Django 4.2.7 on 2024-02-08 21:30 + +import uuid + +import django.db.models.deletion +from django.db import migrations, models + + +class Migration(migrations.Migration): + + dependencies = [ + ("reports", "0014_rename_env_test_flags_hash_and_more"), + ] + + operations = [ + migrations.CreateModel( + name="TestResultReportTotals", + fields=[ + ("id", models.BigAutoField(primary_key=True, serialize=False)), + ("external_id", models.UUIDField(default=uuid.uuid4, editable=False)), + ("created_at", models.DateTimeField(auto_now_add=True)), + ("updated_at", models.DateTimeField(auto_now=True)), + ("passed", models.IntegerField()), + ("skipped", models.IntegerField()), + ("failed", models.IntegerField()), + ( + "report", + models.OneToOneField( + on_delete=django.db.models.deletion.CASCADE, + to="reports.commitreport", + ), + ), + ], + options={ + "db_table": "reports_testresultreporttotals", + }, + ), + ] diff --git a/shared/django_apps/reports/models.py b/shared/django_apps/reports/models.py index 5128483de..f4b446761 100644 --- a/shared/django_apps/reports/models.py +++ b/shared/django_apps/reports/models.py @@ -311,3 +311,15 @@ class Outcome(models.TextChoices): class Meta: app_label = REPORTS_APP_LABEL db_table = "reports_testinstance" + + +class TestResultReportTotals(BaseCodecovModel): + passed = models.IntegerField() + skipped = models.IntegerField() + failed = models.IntegerField() + + report = models.OneToOneField(CommitReport, on_delete=models.CASCADE) + + class Meta: + app_label = REPORTS_APP_LABEL + db_table = "reports_testresultreporttotals" From bb3a218fc9faad1258ce5b75479c1dbafd8edc26 Mon Sep 17 00:00:00 2001 From: Adrian Date: Wed, 13 Mar 2024 17:39:55 -0700 Subject: [PATCH 19/36] add more test code + factories --- .../services/org_level_token_service.py | 71 +++ .../codecov_auth/tests/test_models.py | 545 ++++++++++++++++++ shared/django_apps/core/tests/__init__.py | 0 shared/django_apps/core/tests/factories.py | 256 ++++++++ .../management/commands/migrate.py | 2 +- shared/django_apps/reports/tests/__init__.py | 0 shared/django_apps/reports/tests/factories.py | 105 ++++ 7 files changed, 978 insertions(+), 1 deletion(-) create mode 100644 shared/django_apps/codecov_auth/services/org_level_token_service.py create mode 100644 shared/django_apps/codecov_auth/tests/test_models.py create mode 100644 shared/django_apps/core/tests/__init__.py create mode 100644 shared/django_apps/core/tests/factories.py create mode 100644 shared/django_apps/reports/tests/__init__.py create mode 100644 shared/django_apps/reports/tests/factories.py diff --git a/shared/django_apps/codecov_auth/services/org_level_token_service.py b/shared/django_apps/codecov_auth/services/org_level_token_service.py new file mode 100644 index 000000000..508b3de81 --- /dev/null +++ b/shared/django_apps/codecov_auth/services/org_level_token_service.py @@ -0,0 +1,71 @@ +import logging +import uuid + +from django.db.models.signals import post_save +from django.dispatch import receiver +from django.forms import ValidationError + +from shared.django_apps.codecov_auth.models import OrganizationLevelToken, Owner +from shared.plan.constants import USER_PLAN_REPRESENTATIONS + +log = logging.getLogger(__name__) + + +class OrgLevelTokenService(object): + """ + Groups some basic CRUD functionality to create and delete OrganizationLevelToken. + Restrictions: + -- only 1 token per Owner + """ + + @classmethod + def org_can_have_upload_token(cls, org: Owner): + return org.plan in USER_PLAN_REPRESENTATIONS + + @classmethod + def get_or_create_org_token(cls, org: Owner): + if not cls.org_can_have_upload_token(org): + raise ValidationError( + "Organization-wide upload tokens are not available for your organization." + ) + token, created = OrganizationLevelToken.objects.get_or_create(owner=org) + if created: + log.info( + "New OrgLevelToken created", + extra=dict( + ownerid=org.ownerid, + valid_until=token.valid_until, + token_type=token.token_type, + ), + ) + return token + + @classmethod + def refresh_token(cls, tokenid: int): + try: + token = OrganizationLevelToken.objects.get(id=tokenid) + token.token = uuid.uuid4() + token.save() + except OrganizationLevelToken.DoesNotExist: + raise ValidationError( + "Token to refresh was not found", params=dict(tokenid=tokenid) + ) + + @classmethod + def delete_org_token_if_exists(cls, org: Owner): + try: + org_token = OrganizationLevelToken.objects.get(owner=org) + org_token.delete() + except OrganizationLevelToken.DoesNotExist: + pass + + +@receiver(post_save, sender=Owner) +def manage_org_tokens_if_owner_plan_changed(sender, instance: Owner, **kwargs): + """ + Gets executed after saving a Owner instance to DB. + Manages OrganizationLevelToken according to Owner plan, either creating or deleting them as necessary + """ + owner_can_have_org_token = OrgLevelTokenService.org_can_have_upload_token(instance) + if not owner_can_have_org_token: + OrgLevelTokenService.delete_org_token_if_exists(instance) diff --git a/shared/django_apps/codecov_auth/tests/test_models.py b/shared/django_apps/codecov_auth/tests/test_models.py new file mode 100644 index 000000000..40ae2e7d3 --- /dev/null +++ b/shared/django_apps/codecov_auth/tests/test_models.py @@ -0,0 +1,545 @@ +from unittest.mock import patch + +from django.forms import ValidationError +from django.test import TransactionTestCase + +from shared.django_apps.codecov_auth.models import ( + DEFAULT_AVATAR_SIZE, + INFINITY, + SERVICE_BITBUCKET, + SERVICE_BITBUCKET_SERVER, + SERVICE_CODECOV_ENTERPRISE, + SERVICE_GITHUB, + SERVICE_GITHUB_ENTERPRISE, + GithubAppInstallation, + OrganizationLevelToken, + Service, +) +from shared.django_apps.codecov_auth.tests.factories import ( + OrganizationLevelTokenFactory, + OwnerFactory, +) +from shared.django_apps.core.tests.factories import RepositoryFactory + + +class TestOwnerModel(TransactionTestCase): + def setUp(self): + self.owner = OwnerFactory(username="codecov_name", email="name@codecov.io") + + def test_repo_total_credits_returns_correct_repos_for_legacy_plan(self): + self.owner.plan = "5m" + assert self.owner.repo_total_credits == 5 + + def test_repo_total_credits_returns_correct_repos_for_v4_plan(self): + self.owner.plan = "v4-100m" + assert self.owner.repo_total_credits == 100 + + def test_repo_total_credits_returns_infinity_for_user_plans(self): + users_plans = ("users", "users-inappm", "users-inappy", "users-free") + for plan in users_plans: + self.owner.plan = plan + assert self.owner.repo_total_credits == INFINITY + + def test_repo_credits_accounts_for_currently_active_private_repos(self): + self.owner.plan = "5m" + RepositoryFactory(author=self.owner, active=True, private=True) + + assert self.owner.repo_credits == 4 + + def test_repo_credits_ignores_active_public_repos(self): + self.owner.plan = "5m" + RepositoryFactory(author=self.owner, active=True, private=True) + RepositoryFactory(author=self.owner, active=True, private=False) + + assert self.owner.repo_credits == 4 + + def test_repo_credits_returns_infinity_for_user_plans(self): + users_plans = ("users", "users-inappm", "users-inappy", "users-free") + for plan in users_plans: + self.owner.plan = plan + assert self.owner.repo_credits == INFINITY + + def test_repo_credits_treats_null_plan_as_free_plan(self): + self.owner.plan = None + self.owner.save() + assert self.owner.repo_credits == 1 + self.owner.free or 0 + + def test_nb_active_private_repos(self): + owner = OwnerFactory() + RepositoryFactory(author=owner, active=True, private=True) + RepositoryFactory(author=owner, active=True, private=False) + RepositoryFactory(author=owner, active=False, private=True) + RepositoryFactory(author=owner, active=False, private=False) + + assert owner.nb_active_private_repos == 1 + + def test_plan_is_null_when_validating_form(self): + owner = OwnerFactory() + owner.plan = "" + owner.stripe_customer_id = "" + owner.stripe_subscription_id = "" + owner.clean() + assert owner.plan == None + assert owner.stripe_customer_id == None + assert owner.stripe_subscription_id == None + + def test_setting_staff_on_for_not_a_codecov_member(self): + user_not_part_of_codecov = OwnerFactory(email="user@notcodecov.io", staff=True) + with self.assertRaises(ValidationError): + user_not_part_of_codecov.clean() + + def test_setting_staff_on_with_email_null(self): + user_with_null_email = OwnerFactory(email=None, staff=True) + with self.assertRaises(ValidationError): + user_with_null_email.clean() + + @patch("shared.django_apps.codecov_auth.models.get_config") + def test_main_avatar_url_services(self, mock_get_config): + test_cases = [ + { + "service": SERVICE_GITHUB, + "get_config": None, + "expected": f"https://avatars0.githubusercontent.com/u/{self.owner.service_id}?v=3&s={DEFAULT_AVATAR_SIZE}", + }, + { + "service": SERVICE_GITHUB_ENTERPRISE, + "get_config": "github_enterprise", + "expected": f"github_enterprise/avatars/u/{self.owner.service_id}?v=3&s={DEFAULT_AVATAR_SIZE}", + }, + { + "service": SERVICE_BITBUCKET, + "get_config": None, + "expected": f"https://bitbucket.org/account/codecov_name/avatar/{DEFAULT_AVATAR_SIZE}", + }, + ] + for i in range(0, len(test_cases)): + with self.subTest(i=i): + mock_get_config.return_value = test_cases[i]["get_config"] + self.owner.service = test_cases[i]["service"] + self.assertEqual(self.owner.avatar_url, test_cases[i]["expected"]) + + @patch("shared.django_apps.codecov_auth.models.get_config") + def test_bitbucket_without_u_url(self, mock_get_config): + def side_effect(*args): + if ( + len(args) == 2 + and args[0] == SERVICE_BITBUCKET_SERVER + and args[1] == "url" + ): + return SERVICE_BITBUCKET_SERVER + + mock_get_config.side_effect = side_effect + self.owner.service = SERVICE_BITBUCKET_SERVER + self.assertEqual( + self.owner.avatar_url, + f"bitbucket_server/projects/codecov_name/avatar.png?s={DEFAULT_AVATAR_SIZE}", + ) + + @patch("shared.django_apps.codecov_auth.models.get_config") + def test_bitbucket_with_u_url(self, mock_get_config): + def side_effect(*args): + if ( + len(args) == 2 + and args[0] == SERVICE_BITBUCKET_SERVER + and args[1] == "url" + ): + return SERVICE_BITBUCKET_SERVER + + mock_get_config.side_effect = side_effect + self.owner.service = SERVICE_BITBUCKET_SERVER + self.owner.service_id = "U1234" + self.assertEqual( + self.owner.avatar_url, + f"bitbucket_server/users/codecov_name/avatar.png?s={DEFAULT_AVATAR_SIZE}", + ) + + @patch("shared.django_apps.codecov_auth.models.get_gitlab_url") + def test_gitlab_service(self, mock_gitlab_url): + mock_gitlab_url.return_value = "gitlab_url" + self.owner.service = "gitlab" + self.assertEqual(self.owner.avatar_url, "gitlab_url") + self.assertTrue(mock_gitlab_url.called_once()) + + @patch("shared.django_apps.codecov_auth.models.get_config") + def test_gravatar_url(self, mock_get_config): + def side_effect(*args): + if len(args) == 2 and args[0] == "services" and args[1] == "gravatar": + return "gravatar" + + mock_get_config.side_effect = side_effect + self.owner.service = None + self.assertEqual( + self.owner.avatar_url, + f"https://www.gravatar.com/avatar/9a74a018e6162103a2845e22ec5d88ef?s={DEFAULT_AVATAR_SIZE}", + ) + + @patch("shared.django_apps.codecov_auth.models.get_config") + def test_avatario_url(self, mock_get_config): + def side_effect(*args): + if len(args) == 2 and args[0] == "services" and args[1] == "avatars.io": + return "avatars.io" + + mock_get_config.side_effect = side_effect + self.owner.service = None + self.assertEqual( + self.owner.avatar_url, + f"https://avatars.io/avatar/9a74a018e6162103a2845e22ec5d88ef/{DEFAULT_AVATAR_SIZE}", + ) + + @patch("shared.django_apps.codecov_auth.models.get_config") + def test_ownerid_url(self, mock_get_config): + def side_effect(*args): + if len(args) == 2 and args[0] == "setup" and args[1] == "codecov_url": + return "codecov_url" + + mock_get_config.side_effect = side_effect + self.owner.service = None + self.assertEqual( + self.owner.avatar_url, + f"codecov_url/users/{self.owner.ownerid}.png?size={DEFAULT_AVATAR_SIZE}", + ) + + @patch("shared.django_apps.codecov_auth.models.get_config") + @patch("shared.django_apps.codecov_auth.models.os.getenv") + def test_service_codecov_enterprise_url(self, mock_getenv, mock_get_config): + def side_effect(*args): + if len(args) == 2 and args[0] == "setup" and args[1] == "codecov_url": + return "codecov_url" + + mock_get_config.side_effect = side_effect + mock_getenv.return_value = SERVICE_CODECOV_ENTERPRISE + self.owner.service = None + self.owner.ownerid = None + self.assertEqual( + self.owner.avatar_url, "codecov_url/media/images/gafsi/avatar.svg" + ) + + @patch("shared.django_apps.codecov_auth.models.get_config") + def test_service_codecov_media_url(self, mock_get_config): + def side_effect(*args): + if ( + len(args) == 3 + and args[0] == "setup" + and args[1] == "media" + and args[2] == "assets" + ): + return "codecov_url_media" + + mock_get_config.side_effect = side_effect + self.owner.service = None + self.owner.ownerid = None + self.assertEqual( + self.owner.avatar_url, "codecov_url_media/media/images/gafsi/avatar.svg" + ) + + def test_is_admin_returns_false_if_admin_array_is_null(self): + assert self.owner.is_admin(OwnerFactory()) is False + + def test_is_admin_returns_true_when_comparing_with_self(self): + assert self.owner.is_admin(self.owner) is True + + def test_is_admin_returns_true_if_ownerid_in_admin_array(self): + owner = OwnerFactory() + self.owner.admins = [owner.ownerid] + assert self.owner.is_admin(owner) is True + + def test_is_admin_returns_false_if_ownerid_not_in_admin_array(self): + owner = OwnerFactory() + self.owner.admins = [] + assert self.owner.is_admin(owner) is False + + def test_activated_user_count_returns_num_activated_users(self): + owner = OwnerFactory( + plan_activated_users=[OwnerFactory().ownerid, OwnerFactory().ownerid] + ) + assert owner.activated_user_count == 2 + + def test_activated_user_count_returns_0_if_plan_activated_users_is_null(self): + owner = OwnerFactory(plan_activated_users=None) + assert owner.plan_activated_users == None + assert owner.activated_user_count == 0 + + def test_activated_user_count_ignores_students(self): + student = OwnerFactory(student=True) + self.owner.plan_activated_users = [student.ownerid] + self.owner.save() + assert self.owner.activated_user_count == 0 + + def test_activate_user_adds_ownerid_to_plan_activated_users(self): + to_activate = OwnerFactory() + self.owner.activate_user(to_activate) + self.owner.refresh_from_db() + assert to_activate.ownerid in self.owner.plan_activated_users + + def test_activate_user_does_nothing_if_user_is_activated(self): + to_activate = OwnerFactory() + self.owner.plan_activated_users = [to_activate.ownerid] + self.owner.save() + self.owner.activate_user(to_activate) + self.owner.refresh_from_db() + assert self.owner.plan_activated_users == [to_activate.ownerid] + + def test_deactivate_removes_ownerid_from_plan_activated_users(self): + to_deactivate = OwnerFactory() + self.owner.plan_activated_users = [3, 4, to_deactivate.ownerid] + self.owner.save() + self.owner.deactivate_user(to_deactivate) + self.owner.refresh_from_db() + assert to_deactivate.ownerid not in self.owner.plan_activated_users + + def test_deactivate_non_activated_user_doesnt_crash(self): + to_deactivate = OwnerFactory() + self.owner.plan_activated_users = [] + self.owner.save() + self.owner.deactivate_user(to_deactivate) + + def test_can_activate_user_returns_true_if_user_is_student(self): + student = OwnerFactory(student=True) + assert self.owner.can_activate_user(student) is True + + def test_can_activate_user_returns_true_if_activated_user_count_not_maxed(self): + to_activate = OwnerFactory() + existing_user = OwnerFactory(ownerid=1000, student=False) + self.owner.plan_activated_users = [existing_user.ownerid] + self.owner.plan_user_count = 2 + self.owner.save() + assert self.owner.can_activate_user(to_activate) is True + + def test_can_activate_user_factors_free_seats_into_total_allowed(self): + to_activate = OwnerFactory() + self.owner.free = 1 + self.owner.plan_user_count = 0 + self.owner.save() + assert self.owner.can_activate_user(to_activate) is True + + def test_add_admin_adds_ownerid_to_admin_array(self): + self.owner.admins = [] + self.owner.save() + admin = OwnerFactory() + self.owner.add_admin(admin) + + self.owner.refresh_from_db() + assert admin.ownerid in self.owner.admins + + def test_add_admin_creates_array_if_null(self): + self.owner.admins = None + self.owner.save() + admin = OwnerFactory() + self.owner.add_admin(admin) + + self.owner.refresh_from_db() + assert self.owner.admins == [admin.ownerid] + + def test_add_admin_doesnt_add_if_ownerid_already_in_admins(self): + admin = OwnerFactory() + self.owner.admins = [admin.ownerid] + self.owner.save() + + self.owner.add_admin(admin) + + self.owner.refresh_from_db() + assert self.owner.admins == [admin.ownerid] + + def test_remove_admin_removes_ownerid_from_admins(self): + admin1 = OwnerFactory() + admin2 = OwnerFactory() + self.owner.admins = [admin1.ownerid, admin2.ownerid] + self.owner.save() + + self.owner.remove_admin(admin1) + + self.owner.refresh_from_db() + assert self.owner.admins == [admin2.ownerid] + + def test_remove_admin_does_nothing_if_user_not_admin(self): + admin1 = OwnerFactory() + admin2 = OwnerFactory() + self.owner.admins = [admin1.ownerid] + self.owner.save() + + self.owner.remove_admin(admin2) + + self.owner.refresh_from_db() + assert self.owner.admins == [admin1.ownerid] + + def test_access_no_root_organization(self): + assert self.owner.root_organization == None + + def test_access_root_organization(self): + root = OwnerFactory(service="gitlab") + parent = OwnerFactory(parent_service_id=root.service_id, service="gitlab") + self.owner.parent_service_id = parent.service_id + self.owner.service = "gitlab" + self.owner.save() + + # In some cases, there will be a 4th query from OrganizationLevelToken. There's a hook that rnus after Owner is saved + # To see if a org-wide token should be deleted. For cases when it should be deleted, the number of queries becomes 4 + with self.assertNumQueries(3): + assert self.owner.root_organization == root + + # cache the root organization id + assert self.owner.root_parent_service_id == root.service_id + + with self.assertNumQueries(1): + self.owner.root_organization + + def test_inactive_users_count(self): + org = OwnerFactory() + + activated_user = OwnerFactory() + activated_user_in_org = OwnerFactory(organizations=[org.ownerid]) + activated_student = OwnerFactory(student=True) + activated_student_in_org = OwnerFactory( + organizations=[org.ownerid], student=True + ) + + inactive_student_in_org = OwnerFactory( + organizations=[org.ownerid], student=True + ) + inactive_user_in_org = OwnerFactory(organizations=[org.ownerid]) + + org.plan_activated_users = [ + activated_user.ownerid, + activated_user_in_org.ownerid, + activated_student.ownerid, + activated_student_in_org.ownerid, + ] + org.save() + + self.assertEqual(org.inactive_user_count, 1) + + def test_student_count(self): + org = OwnerFactory(service=Service.GITHUB.value, service_id="1") + + activated_user = OwnerFactory() + activated_user_in_org = OwnerFactory(organizations=[org.ownerid]) + activated_student = OwnerFactory(student=True) + activated_student_in_org = OwnerFactory( + organizations=[org.ownerid], student=True + ) + + inactive_student_in_org = OwnerFactory( + organizations=[org.ownerid], student=True + ) + inactive_user_in_org = OwnerFactory(organizations=[org.ownerid]) + + org.plan_activated_users = [ + activated_user.ownerid, + activated_user_in_org.ownerid, + activated_student.ownerid, + activated_student_in_org.ownerid, + ] + org.save() + + self.assertEqual(org.student_count, 3) + + def test_has_yaml(self): + org = OwnerFactory(yaml=None) + assert org.has_yaml is False + org.yaml = {"require_ci_to_pass": True} + org.save() + assert org.has_yaml is True + + +class TestOrganizationLevelTokenModel(TransactionTestCase): + def test_can_save_org_token_for_org_basic_plan(self): + owner = OwnerFactory(plan="users-basic") + owner.save() + token = OrganizationLevelToken(owner=owner) + token.save() + assert OrganizationLevelToken.objects.filter(owner=owner).count() == 1 + + @patch( + "shared.django_apps.codecov_auth.services.org_level_token_service.OrgLevelTokenService.org_can_have_upload_token" + ) + def test_token_is_deleted_when_changing_user_plan( + self, mocked_org_can_have_upload_token + ): + mocked_org_can_have_upload_token.return_value = False + owner = OwnerFactory(plan="users-enterprisem") + org_token = OrganizationLevelTokenFactory(owner=owner) + owner.save() + org_token.save() + assert OrganizationLevelToken.objects.filter(owner=owner).count() == 1 + owner.plan = "users-basic" + owner.save() + assert OrganizationLevelToken.objects.filter(owner=owner).count() == 0 + + +class TestGithubAppInstallationModel(TransactionTestCase): + def test_covers_all_repos(self): + owner = OwnerFactory() + repo1 = RepositoryFactory(author=owner) + repo2 = RepositoryFactory(author=owner) + repo3 = RepositoryFactory(author=owner) + other_repo_different_owner = RepositoryFactory() + installation_obj = GithubAppInstallation( + owner=owner, + repository_service_ids=None, + installation_id=100, + ) + installation_obj.save() + assert installation_obj.name == "codecov_app_installation" + assert installation_obj.covers_all_repos() == True + assert installation_obj.is_repo_covered_by_integration(repo1) == True + assert ( + installation_obj.is_repo_covered_by_integration(other_repo_different_owner) + == False + ) + assert list(owner.github_app_installations.all()) == [installation_obj] + assert installation_obj.repository_queryset().exists() + assert set(installation_obj.repository_queryset().all()) == set( + [repo1, repo2, repo3] + ) + + def test_covers_some_repos(self): + owner = OwnerFactory() + repo = RepositoryFactory(author=owner) + same_owner_other_repo = RepositoryFactory(author=owner) + other_repo_different_owner = RepositoryFactory() + installation_obj = GithubAppInstallation( + owner=owner, + repository_service_ids=[repo.service_id], + installation_id=100, + ) + installation_obj.save() + assert installation_obj.covers_all_repos() == False + assert installation_obj.is_repo_covered_by_integration(repo) == True + assert ( + installation_obj.is_repo_covered_by_integration(other_repo_different_owner) + == False + ) + assert ( + installation_obj.is_repo_covered_by_integration(same_owner_other_repo) + == False + ) + assert list(owner.github_app_installations.all()) == [installation_obj] + assert installation_obj.repository_queryset().exists() + assert list(installation_obj.repository_queryset().all()) == [repo] + + def test_is_configured(self): + owner = OwnerFactory() + installation_default = GithubAppInstallation( + owner=owner, repository_service_ids=None, installation_id=100 + ) + installation_configured = GithubAppInstallation( + owner=owner, + repository_service_ids=None, + name="my_installation", + installation_id=100, + app_id=123, + pem_path="some_path", + ) + installation_not_configured = GithubAppInstallation( + owner=owner, + repository_service_ids=None, + installation_id=100, + name="my_other_installation", + app_id=1234, + ) + installation_default.save() + installation_configured.save() + installation_not_configured.save() + assert installation_default.is_configured() == True + assert installation_configured.is_configured() == True + assert installation_not_configured.is_configured() == False diff --git a/shared/django_apps/core/tests/__init__.py b/shared/django_apps/core/tests/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/shared/django_apps/core/tests/factories.py b/shared/django_apps/core/tests/factories.py new file mode 100644 index 000000000..c50baf143 --- /dev/null +++ b/shared/django_apps/core/tests/factories.py @@ -0,0 +1,256 @@ +import random +from hashlib import sha1 + +import factory +from django.utils import timezone +from factory.django import DjangoModelFactory + +from shared.django_apps.codecov_auth.models import RepositoryToken +from shared.django_apps.codecov_auth.tests.factories import OwnerFactory +from shared.django_apps.core import models + + +class RepositoryFactory(DjangoModelFactory): + class Meta: + model = models.Repository + + private = True + name = factory.Faker("word") + service_id = factory.Sequence(lambda n: f"{n}") + author = factory.SubFactory(OwnerFactory) + language = factory.Iterator( + [language.value for language in models.Repository.Languages] + ) + languages = [] + fork = None + branch = "master" + upload_token = factory.Faker("uuid4") + image_token = factory.Faker("pystr", min_chars=10, max_chars=10) + using_integration = False + + +class CommitFactory(DjangoModelFactory): + class Meta: + model = models.Commit + + commitid = factory.LazyAttribute( + lambda o: sha1(o.message.encode("utf-8")).hexdigest() + ) + message = factory.Faker("sentence", nb_words=7) + ci_passed = True + pullid = 1 + author = factory.SubFactory(OwnerFactory) + repository = factory.SubFactory(RepositoryFactory) + branch = "master" + totals = { + "C": 0, + "M": 0, + "N": 0, + "b": 0, + "c": "85.00000", + "d": 0, + "diff": [1, 2, 1, 1, 0, "50.00000", 0, 0, 0, 0, 0, 0, 0], + "f": 3, + "h": 17, + "m": 3, + "n": 20, + "p": 0, + "s": 1, + } + parent_commit_id = factory.LazyAttribute( + lambda o: sha1((o.message + "parent").encode("utf-8")).hexdigest() + ) + state = "complete" + + +class CommitWithReportFactory(CommitFactory): + @classmethod + def _create(cls, model_class, *args, **kwargs): + commit = super()._create(model_class, *args, **kwargs) + + # The following replaces the old `commits.report` JSON column + # TODO: we may want to find another way to create this since the imports below + # create a cyclic dependency + + from reports.tests.factories import ( + CommitReportFactory, + ReportDetailsFactory, + ReportLevelTotalsFactory, + UploadFactory, + UploadFlagMembershipFactory, + UploadLevelTotalsFactory, + ) + + commit_report = CommitReportFactory(commit=commit) + ReportDetailsFactory( + report=commit_report, + _files_array=[ + { + "filename": "tests/__init__.py", + "file_index": 0, + "file_totals": [0, 3, 2, 1, 0, "66.66667", 0, 0, 0, 0, 0, 0, 0], + "session_totals": [ + [0, 3, 2, 1, 0, "66.66667", 0, 0, 0, 0, 0, 0, 0] + ], + "diff_totals": None, + }, + { + "filename": "tests/test_sample.py", + "file_index": 1, + "file_totals": [0, 7, 7, 0, 0, "100", 0, 0, 0, 0, 0, 0, 0], + "session_totals": [[0, 7, 7, 0, 0, "100", 0, 0, 0, 0, 0, 0, 0]], + "diff_totals": None, + }, + { + "filename": "awesome/__init__.py", + "file_index": 2, + "file_totals": [0, 10, 8, 2, 0, "80.00000", 0, 0, 0, 0, 0, 0, 0], + "session_totals": [ + [0, 10, 8, 2, 0, "80.00000", 0, 0, 0, 0, 0, 0, 0] + ], + "diff_totals": [0, 2, 1, 1, 0, "50.00000", 0, 0, 0, 0, 0, 0, 0], + }, + ], + ) + ReportLevelTotalsFactory( + report=commit_report, + files=3, + lines=20, + hits=17, + misses=3, + partials=0, + coverage=85.0, + branches=0, + methods=0, + ) + + flag_unittests, created = commit.repository.flags.get_or_create( + flag_name="unittests" + ) + flag_integrations, created = commit.repository.flags.get_or_create( + flag_name="integrations" + ) + + upload1 = UploadFactory( + report=commit_report, + order_number=0, + storage_path="v4/raw/2019-01-10/4434BC2A2EC4FCA57F77B473D83F928C/abf6d4df662c47e32460020ab14abf9303581429/9ccc55a1-8b41-4bb1-a946-ee7a33a7fb56.txt", + ) + UploadLevelTotalsFactory( + report_session=upload1, + files=3, + lines=20, + hits=17, + misses=3, + partials=0, + coverage=85.0, + branches=0, + methods=0, + ) + UploadFlagMembershipFactory( + report_session=upload1, + flag=flag_unittests, + ) + + upload2 = UploadFactory( + report=commit_report, + order_number=1, + storage_path="v4/raw/2019-01-10/4434BC2A2EC4FCA57F77B473D83F928C/abf6d4df662c47e32460020ab14abf9303581429/9ccc55a1-8b41-4bb1-a946-ee7a33a7fb56.txt", + ) + UploadLevelTotalsFactory( + report_session=upload2, + files=3, + lines=20, + hits=17, + misses=3, + partials=0, + coverage=85.0, + branches=0, + methods=0, + ) + UploadFlagMembershipFactory( + report_session=upload2, + flag=flag_integrations, + ) + + return commit + + +class PullFactory(DjangoModelFactory): + class Meta: + model = models.Pull + + pullid = factory.Sequence(lambda n: n) + issueid = random.randint(1, 1000) + commentid = factory.LazyAttribute( + lambda o: sha1(o.title.encode("utf-8")).hexdigest() + ) + _flare = { + "name": "", + "color": "#e05d44", + "lines": 14, + "_class": None, + "children": [ + { + "name": "tests.py", + "color": "#baaf1b", + "lines": 7, + "_class": None, + "coverage": "85.71429", + } + ], + } + diff = [2, 3, 0, 3, 0, "0", 0, 0, 0, 0, 0, 0, 0] + title = factory.Faker("sentence", nb_words=7) + head = factory.LazyAttribute(lambda o: sha1(o.title.encode("utf-8")).hexdigest()) + base = factory.LazyAttribute(lambda o: sha1(o.title.encode("utf-8")).hexdigest()) + compared_to = factory.LazyAttribute( + lambda o: sha1(o.title.encode("utf-8")).hexdigest() + ) + updatestamp = factory.LazyFunction(timezone.now) + + +class BranchFactory(DjangoModelFactory): + class Meta: + model = models.Branch + + repository = factory.SubFactory(RepositoryFactory) + name = factory.Faker("sentence", nb_words=1) + head = factory.LazyAttribute(lambda o: sha1(o.name.encode("utf-8")).hexdigest()) + + +class ConstantsFactory(DjangoModelFactory): + class Meta: + model = models.Constants + + +class VersionFactory(DjangoModelFactory): + class Meta: + model = models.Version + + +class RepositoryTokenFactory(DjangoModelFactory): + repository = factory.SubFactory(RepositoryFactory) + key = factory.LazyFunction(RepositoryToken.generate_key) + token_type = "profiling" + + class Meta: + model = RepositoryToken + + +class CommitErrorFactory(DjangoModelFactory): + class Meta: + model = models.CommitError + + commit = factory.SubFactory(CommitFactory) + error_code = factory.Faker("") + + +class CommitNotificationFactory(DjangoModelFactory): + commit = factory.SubFactory(CommitFactory) + notification_type = models.CommitNotification.NotificationTypes.COMMENT + decoration_type = models.CommitNotification.DecorationTypes.STANDARD + state = models.CommitNotification.States.SUCCESS + + class Meta: + model = models.CommitNotification diff --git a/shared/django_apps/legacy_migrations/management/commands/migrate.py b/shared/django_apps/legacy_migrations/management/commands/migrate.py index 06dd48d9d..97300aef4 100644 --- a/shared/django_apps/legacy_migrations/management/commands/migrate.py +++ b/shared/django_apps/legacy_migrations/management/commands/migrate.py @@ -83,7 +83,7 @@ def _obtain_lock(self): def handle(self, *args, **options): log.info("Codecov is starting migrations...") - print("inside the overwritten migrate comman") + print("inside the overwritten migrate command") database = options["database"] db_connection = connections[database] options["run_syncdb"] = False diff --git a/shared/django_apps/reports/tests/__init__.py b/shared/django_apps/reports/tests/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/shared/django_apps/reports/tests/factories.py b/shared/django_apps/reports/tests/factories.py new file mode 100644 index 000000000..8e712f898 --- /dev/null +++ b/shared/django_apps/reports/tests/factories.py @@ -0,0 +1,105 @@ +import enum + +import factory +from core.tests.factories import CommitFactory, RepositoryFactory +from factory.django import DjangoModelFactory + +from shared.django_apps.reports import models +from shared.django_apps.reports.models import ReportResults + + +# TODO: deduplicate this from graphql_api.types.enums +class UploadErrorEnum(enum.Enum): + FILE_NOT_IN_STORAGE = "file_not_in_storage" + REPORT_EXPIRED = "report_expired" + REPORT_EMPTY = "report_empty" + + +class CommitReportFactory(DjangoModelFactory): + class Meta: + model = models.CommitReport + + commit = factory.SubFactory(CommitFactory) + + +class UploadFactory(DjangoModelFactory): + class Meta: + model = models.ReportSession + + build_code = factory.Sequence(lambda n: f"{n}") + report = factory.SubFactory(CommitReportFactory) + state = "processed" + + +class RepositoryFlagFactory(DjangoModelFactory): + class Meta: + model = models.RepositoryFlag + + repository = factory.SubFactory(RepositoryFactory) + flag_name = factory.Faker("word") + + +class UploadFlagMembershipFactory(DjangoModelFactory): + class Meta: + model = models.UploadFlagMembership + + flag = factory.SubFactory(RepositoryFlagFactory) + report_session = factory.SubFactory(UploadFactory) + + +class ReportLevelTotalsFactory(DjangoModelFactory): + class Meta: + model = models.ReportLevelTotals + + report = factory.SubFactory(CommitReportFactory) + branches = factory.Faker("pyint") + coverage = factory.Faker("pydecimal", min_value=10, max_value=90, right_digits=2) + hits = factory.Faker("pyint") + lines = factory.Faker("pyint") + methods = factory.Faker("pyint") + misses = factory.Faker("pyint") + partials = factory.Faker("pyint") + files = factory.Faker("pyint") + + +class UploadLevelTotalsFactory(DjangoModelFactory): + class Meta: + model = models.UploadLevelTotals + + report_session = factory.SubFactory(UploadFactory) + + +class ReportDetailsFactory(DjangoModelFactory): + class Meta: + model = models.ReportDetails + + report = factory.SubFactory(CommitReportFactory) + _files_array = factory.LazyAttribute(lambda _: []) + _files_array_storage_path = None + + +class UploadErrorFactory(DjangoModelFactory): + class Meta: + model = models.UploadError + + report_session = factory.SubFactory(UploadFactory) + error_code = factory.Iterator( + [ + UploadErrorEnum.FILE_NOT_IN_STORAGE, + UploadErrorEnum.REPORT_EMPTY, + UploadErrorEnum.REPORT_EXPIRED, + ] + ) + + +class ReportResultsFactory(DjangoModelFactory): + class Meta: + model = ReportResults + + report = factory.SubFactory(CommitReportFactory) + state = factory.Iterator( + [ + ReportResults.ReportResultsStates.PENDING, + ReportResults.ReportResultsStates.COMPLETED, + ] + ) From d7b4a4b67ef760c6075bf84d9e1765c3cb3bcfc8 Mon Sep 17 00:00:00 2001 From: Adrian Date: Thu, 14 Mar 2024 13:34:17 -0700 Subject: [PATCH 20/36] add test model field to Owner --- .../migrations/0054_owner_fake_field.py | 18 +++++++++++++ shared/django_apps/codecov_auth/models.py | 2 ++ shared/django_apps/dummy_settings.py | 25 +++++++++++++++++-- 3 files changed, 43 insertions(+), 2 deletions(-) create mode 100644 shared/django_apps/codecov_auth/migrations/0054_owner_fake_field.py diff --git a/shared/django_apps/codecov_auth/migrations/0054_owner_fake_field.py b/shared/django_apps/codecov_auth/migrations/0054_owner_fake_field.py new file mode 100644 index 000000000..ea7e4c772 --- /dev/null +++ b/shared/django_apps/codecov_auth/migrations/0054_owner_fake_field.py @@ -0,0 +1,18 @@ +# Generated by Django 5.0.3 on 2024-03-14 20:33 + +from django.db import migrations, models + + +class Migration(migrations.Migration): + + dependencies = [ + ("codecov_auth", "0053_ownerinstallationnametousefortask_and_more"), + ] + + operations = [ + migrations.AddField( + model_name="owner", + name="fake_field", + field=models.TextField(blank=True, null=True, unique=True), + ), + ] diff --git a/shared/django_apps/codecov_auth/models.py b/shared/django_apps/codecov_auth/models.py index ad9b9c80f..30cc5e098 100644 --- a/shared/django_apps/codecov_auth/models.py +++ b/shared/django_apps/codecov_auth/models.py @@ -212,6 +212,8 @@ class Meta: sentry_user_id = models.TextField(null=True, blank=True, unique=True) sentry_user_data = models.JSONField(null=True) + fake_field = models.TextField(null=True, blank=True, unique=True) + user = models.ForeignKey( User, null=True, diff --git a/shared/django_apps/dummy_settings.py b/shared/django_apps/dummy_settings.py index 8b4a116cd..efad3e5ab 100644 --- a/shared/django_apps/dummy_settings.py +++ b/shared/django_apps/dummy_settings.py @@ -16,15 +16,36 @@ "django.contrib.admin", "django.contrib.auth", "django.contrib.contenttypes", + "django.contrib.messages", "django.contrib.postgres", "shared.django_apps.codecov_auth", "shared.django_apps.core", "shared.django_apps.reports", ] -MIDDLEWARE = [] +MIDDLEWARE = [ + "django.contrib.sessions.middleware.SessionMiddleware", + "django.contrib.auth.middleware.AuthenticationMiddleware", + "django.contrib.messages.middleware.MessageMiddleware", +] + +# Migrated from API +TEMPLATES = [ + { + "BACKEND": "django.template.backends.django.DjangoTemplates", + "DIRS": [], + "APP_DIRS": True, + "OPTIONS": { + "context_processors": [ + "django.template.context_processors.debug", + "django.template.context_processors.request", + "django.contrib.auth.context_processors.auth", + "django.contrib.messages.context_processors.messages", + ] + }, + } +] -TEMPLATES = [] TELEMETRY_VANILLA_DB = "default" TELEMETRY_TIMESCALE_DB = "timeseries" From 8c16a790a46660e5dfeb741b1456cee2a84b300a Mon Sep 17 00:00:00 2001 From: Adrian Date: Thu, 14 Mar 2024 17:39:58 -0700 Subject: [PATCH 21/36] add more tests + settings to shared from api --- codecov.yml | 14 +- docker-compose.yml | 13 ++ shared/bundle_analysis/models.py | 3 +- shared/django_apps/dummy_settings.py | 7 + .../migrations/0001_initial.py | 8 +- shared/django_apps/reports/tests/factories.py | 2 +- .../django_apps/reports/tests/test_models.py | 142 ++++++++++++++++++ .../codecov_auth/test_codecov_auth_models.py | 0 .../unit/django_apps/core/test_core_models.py | 119 +++++++++++++++ .../reports/test_reports_models.py | 141 +++++++++++++++++ 10 files changed, 444 insertions(+), 5 deletions(-) create mode 100644 shared/django_apps/reports/tests/test_models.py rename shared/django_apps/codecov_auth/tests/test_models.py => tests/unit/django_apps/codecov_auth/test_codecov_auth_models.py (100%) create mode 100644 tests/unit/django_apps/core/test_core_models.py create mode 100644 tests/unit/django_apps/reports/test_reports_models.py diff --git a/codecov.yml b/codecov.yml index 869be920e..b5fb38d1d 100644 --- a/codecov.yml +++ b/codecov.yml @@ -11,8 +11,20 @@ ignore: beta_groups: - "labels" +services: + minio: + hash_key: 12345bf3f7d947f2a0681b2154067890 + verify_ssl: false + host: "minio" + port: 9000 + # bucket: + # region: + access_key_id: codecov-default-key + secret_access_key: codecov-default-secret + client_uploads: true + flag_management: individual_flags: - name: "smart-labels" carryforward: true - carryforward_mode: "labels" \ No newline at end of file + carryforward_mode: "labels" diff --git a/docker-compose.yml b/docker-compose.yml index c1ef2f336..bd2ab695e 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -4,6 +4,7 @@ volumes: postgres-volume: timescale-volume: redis-volume: + archive-volume: services: shared: @@ -13,6 +14,7 @@ services: depends_on: - postgres - timescale + - minio volumes: - ./shared/:/app/shared - ./tests/:/app/tests @@ -49,3 +51,14 @@ services: target: /var/lib/postgresql/data tmpfs: size: 1024M + + minio: + image: minio/minio:RELEASE.2019-04-09T01-22-30Z + command: server /export + ports: + - "9000:9000" + environment: + - MINIO_ACCESS_KEY=codecov-default-key + - MINIO_SECRET_KEY=codecov-default-secret + volumes: + - archive-volume:/export diff --git a/shared/bundle_analysis/models.py b/shared/bundle_analysis/models.py index b7f80ba54..4e9f05d75 100644 --- a/shared/bundle_analysis/models.py +++ b/shared/bundle_analysis/models.py @@ -86,7 +86,6 @@ Base = declarative_base() - """ Create a custom context manager for SQLAlchemy session because worker is currently stuck on SQLAlchemy version <1.4, and built in context manager for session is introduced @@ -98,6 +97,8 @@ For now if the SQLAlchemy version is <1.4 it will go through the custom LegacySessionManager context manager object to handle opening and closing its sessions. """ + + class LegacySessionManager: def __init__(self, session: DbSession): self.session = session diff --git a/shared/django_apps/dummy_settings.py b/shared/django_apps/dummy_settings.py index efad3e5ab..8941f5d4b 100644 --- a/shared/django_apps/dummy_settings.py +++ b/shared/django_apps/dummy_settings.py @@ -23,6 +23,13 @@ "shared.django_apps.reports", ] +# Migrated from API to get Minio working +MINIO_ACCESS_KEY = get_config("services", "minio", "access_key_id") +MINIO_SECRET_KEY = get_config("services", "minio", "secret_access_key") +MINIO_LOCATION = "codecov.s3.amazonaws.com" +MINIO_HASH_KEY = get_config("services", "minio", "hash_key") +ARCHIVE_BUCKET_NAME = "codecov" + MIDDLEWARE = [ "django.contrib.sessions.middleware.SessionMiddleware", "django.contrib.auth.middleware.AuthenticationMiddleware", diff --git a/shared/django_apps/legacy_migrations/migrations/0001_initial.py b/shared/django_apps/legacy_migrations/migrations/0001_initial.py index 2f110bdf9..7cfa95aa1 100644 --- a/shared/django_apps/legacy_migrations/migrations/0001_initial.py +++ b/shared/django_apps/legacy_migrations/migrations/0001_initial.py @@ -3,8 +3,12 @@ from django.conf import settings from django.db import migrations -from .legacy_sql.main.main import run_sql as main_run_sql -from .legacy_sql.upgrades.main import run_sql as upgrade_run_sql +from shared.django_apps.legacy_migrations.migrations.legacy_sql.main.main import ( + run_sql as main_run_sql, +) +from shared.django_apps.legacy_migrations.migrations.legacy_sql.upgrades.main import ( + run_sql as upgrade_run_sql, +) BASE_VERSION = "base" diff --git a/shared/django_apps/reports/tests/factories.py b/shared/django_apps/reports/tests/factories.py index 8e712f898..2e622b6c5 100644 --- a/shared/django_apps/reports/tests/factories.py +++ b/shared/django_apps/reports/tests/factories.py @@ -1,9 +1,9 @@ import enum import factory -from core.tests.factories import CommitFactory, RepositoryFactory from factory.django import DjangoModelFactory +from shared.django_apps.core.tests.factories import CommitFactory, RepositoryFactory from shared.django_apps.reports import models from shared.django_apps.reports.models import ReportResults diff --git a/shared/django_apps/reports/tests/test_models.py b/shared/django_apps/reports/tests/test_models.py new file mode 100644 index 000000000..d8df50148 --- /dev/null +++ b/shared/django_apps/reports/tests/test_models.py @@ -0,0 +1,142 @@ +import json +from unittest.mock import MagicMock, patch + +from django.test import TestCase + +from shared.django_apps.reports.models import ReportDetails +from shared.django_apps.reports.tests.factories import ( + ReportDetailsFactory, + RepositoryFlagFactory, + UploadFactory, + UploadFlagMembershipFactory, +) +from shared.storage.exceptions import FileNotInStorageError + + +class UploadTests(TestCase): + def test_get_download_url(self): + storage_path = "v4/123/123.txt" + session = UploadFactory(storage_path=storage_path) + repository = session.report.commit.repository + assert ( + session.download_url + == f"/upload/gh/{repository.author.username}/{repository.name}/download?path={storage_path}" + ) + + def test_ci_url_when_no_provider(self): + session = UploadFactory(provider=None) + assert session.ci_url is None + + def test_ci_url_when_provider_do_not_have_build_url(self): + session = UploadFactory(provider="azure_pipelines") + assert session.ci_url is None + + def test_ci_url_when_provider_has_build_url(self): + session = UploadFactory(provider="travis", job_code="123") + repo = session.report.commit.repository + assert ( + session.ci_url + == f"https://travis-ci.com/{repo.author.username}/{repo.name}/jobs/{session.job_code}" + ) + + def test_ci_url_when_db_has_build_url(self): + session = UploadFactory(build_url="http://example.com") + assert session.ci_url == "http://example.com" + + def test_flags(self): + session = UploadFactory() + flag_one = RepositoryFlagFactory() + flag_two = RepositoryFlagFactory() + # connect the flag and membership + UploadFlagMembershipFactory(flag=flag_one, report_session=session) + UploadFlagMembershipFactory(flag=flag_two, report_session=session) + + assert ( + session.flag_names.sort() == [flag_one.flag_name, flag_two.flag_name].sort() + ) + + +# @pytest.mark.skip(reason="moved to shared") +class ReportDetailsTests(TestCase): + sample_files_array = [ + { + "filename": "test_file_1.py", + "file_index": 2, + "file_totals": [1, 10, 8, 2, 5, "80.00000", 6, 7, 9, 8, 20, 40, 13], + "session_totals": [[0, 10, 8, 2, 0, "80.00000", 0, 0, 0, 0, 0, 0, 0]], + "diff_totals": [0, 2, 1, 1, 0, "50.00000", 0, 0, 0, 0, 0, 0, 0], + }, + { + "filename": "test_file_2.py", + "file_index": 0, + "file_totals": [1, 3, 2, 1, 0, "66.66667", 0, 0, 0, 0, 0, 0, 0], + "session_totals": [[0, 3, 2, 1, 0, "66.66667", 0, 0, 0, 0, 0, 0, 0]], + "diff_totals": None, + }, + ] + + @patch("shared.django_apps.utils.model_utils.ArchiveService") + def test_get_files_array_from_db(self, mock_archive): + details = ReportDetailsFactory() + mock_read_file = MagicMock() + mock_archive.return_value.read_file = mock_read_file + details._files_array = self.sample_files_array + details._files_array_storage_path = None + details.save() + + fetched = ReportDetails.objects.get(id=details.id) + assert fetched.files_array == self.sample_files_array + mock_archive.assert_not_called() + mock_read_file.assert_not_called() + + @patch("shared.django_apps.utils.model_utils.ArchiveService") + def test_get_files_array_from_storage(self, mock_archive): + details = ReportDetailsFactory() + storage_path = "https://storage/path/files_array.json" + mock_read_file = MagicMock(return_value=json.dumps(self.sample_files_array)) + mock_archive.return_value.read_file = mock_read_file + details._files_array = None + details._files_array_storage_path = storage_path + details.save() + + fetched = ReportDetails.objects.get(id=details.id) + assert fetched.files_array == self.sample_files_array + mock_archive.assert_called() + mock_read_file.assert_called_with(storage_path) + # Calls it again to test caching + assert fetched.files_array == self.sample_files_array + assert mock_archive.call_count == 1 + assert mock_read_file.call_count == 1 + # This one to help us understand caching across different instances + assert details.files_array == self.sample_files_array + assert mock_archive.call_count == 2 + assert mock_read_file.call_count == 2 + # Let's see for objects with different IDs + diff_details = ReportDetailsFactory() + storage_path = "https://storage/path/files_array.json" + diff_details._files_array = None + diff_details._files_array_storage_path = storage_path + diff_details.save() + assert diff_details.files_array == self.sample_files_array + assert mock_archive.call_count == 3 + assert mock_read_file.call_count == 3 + + @patch("shared.django_apps.utils.model_utils.ArchiveService") + def test_get_files_array_from_storage_file_not_found(self, mock_archive): + details = ReportDetailsFactory() + storage_path = "https://storage/path/files_array.json" + + def side_effect(*args, **kwargs): + raise FileNotInStorageError() + + mock_read_file = MagicMock(side_effect=side_effect) + mock_archive.return_value.read_file = mock_read_file + details._files_array = None + details._files_array_storage_path = storage_path + details.save() + + fetched = ReportDetails.objects.get(id=details.id) + assert fetched._files_array_storage_path == storage_path + assert fetched.files_array == [] + mock_archive.assert_called() + mock_read_file.assert_called_with(storage_path) diff --git a/shared/django_apps/codecov_auth/tests/test_models.py b/tests/unit/django_apps/codecov_auth/test_codecov_auth_models.py similarity index 100% rename from shared/django_apps/codecov_auth/tests/test_models.py rename to tests/unit/django_apps/codecov_auth/test_codecov_auth_models.py diff --git a/tests/unit/django_apps/core/test_core_models.py b/tests/unit/django_apps/core/test_core_models.py new file mode 100644 index 000000000..56ee75bfd --- /dev/null +++ b/tests/unit/django_apps/core/test_core_models.py @@ -0,0 +1,119 @@ +import json +from unittest.mock import MagicMock, patch + +from django.forms import ValidationError +from django.test import TestCase + +from shared.django_apps.core.models import Commit +from shared.django_apps.core.tests.factories import CommitFactory, RepositoryFactory +from shared.django_apps.reports.tests.factories import CommitReportFactory +from shared.storage.exceptions import FileNotInStorageError + + +class RepoTests(TestCase): + def test_clean_repo(self): + repo = RepositoryFactory(using_integration=None) + with self.assertRaises(ValidationError): + repo.clean() + + +class CommitTests(TestCase): + def test_commitreport_no_code(self): + commit = CommitFactory() + report1 = CommitReportFactory( + commit=commit, code="testing" + ) # this is a report for a "local upload" + report2 = CommitReportFactory(commit=commit, code=None) + assert commit.commitreport == report2 + + sample_report = { + "files": { + "different/test_file.py": [ + 2, + [0, 10, 8, 2, 0, "80.00000", 0, 0, 0, 0, 0, 0, 0], + [[0, 10, 8, 2, 0, "80.00000", 0, 0, 0, 0, 0, 0, 0]], + [0, 2, 1, 1, 0, "50.00000", 0, 0, 0, 0, 0, 0, 0], + ], + }, + "sessions": { + "0": { + "N": None, + "a": "v4/raw/2019-01-10/4434BC2A2EC4FCA57F77B473D83F928C/abf6d4df662c47e32460020ab14abf9303581429/9ccc55a1-8b41-4bb1-a946-ee7a33a7fb56.txt", + "c": None, + "d": 1547084427, + "e": None, + "f": ["unittests"], + "j": None, + "n": None, + "p": None, + "t": [3, 20, 17, 3, 0, "85.00000", 0, 0, 0, 0, 0, 0, 0], + "": None, + } + }, + } + + @patch("shared.django_apps.utils.model_utils.ArchiveService") + def test_get_report_from_db(self, mock_archive): + commit = CommitFactory() + mock_read_file = MagicMock() + mock_archive.return_value.read_file = mock_read_file + commit._report = self.sample_report + commit._files_array_storage_path = None + commit.save() + + fetched = Commit.objects.get(id=commit.id) + assert fetched.report == self.sample_report + mock_archive.assert_not_called() + mock_read_file.assert_not_called() + + @patch("shared.django_apps.utils.model_utils.ArchiveService") + def test_get_report_from_storage(self, mock_archive): + commit = CommitFactory() + storage_path = "https://storage/path/report.json" + mock_read_file = MagicMock(return_value=json.dumps(self.sample_report)) + mock_archive.return_value.read_file = mock_read_file + commit._report = None + commit._report_storage_path = storage_path + commit.save() + + fetched = Commit.objects.get(id=commit.id) + assert fetched.report == self.sample_report + mock_archive.assert_called() + mock_read_file.assert_called_with(storage_path) + # Calls it again to test caching + assert fetched.report == self.sample_report + assert mock_archive.call_count == 1 + assert mock_read_file.call_count == 1 + # This one to help us understand caching across different instances + assert commit.report == self.sample_report + assert mock_archive.call_count == 2 + assert mock_read_file.call_count == 2 + # Let's see for objects with different IDs + diff_commit = CommitFactory() + storage_path = "https://storage/path/files_array.json" + diff_commit._report = None + diff_commit._report_storage_path = storage_path + diff_commit.save() + assert diff_commit.report == self.sample_report + assert mock_archive.call_count == 3 + assert mock_read_file.call_count == 3 + + @patch("shared.django_apps.utils.model_utils.ArchiveService") + def test_get_report_from_storage_file_not_found(self, mock_archive): + commit = CommitFactory() + storage_path = "https://storage/path/files_array.json" + + def side_effect(*args, **kwargs): + raise FileNotInStorageError() + + mock_read_file = MagicMock(side_effect=side_effect) + mock_archive.return_value.read_file = mock_read_file + commit._report = None + commit._report_storage_path = storage_path + commit.save() + + fetched = Commit.objects.get(id=commit.id) + assert fetched._report_storage_path == storage_path + assert fetched.report == {} + mock_archive.assert_called() + mock_read_file.assert_called_with(storage_path) diff --git a/tests/unit/django_apps/reports/test_reports_models.py b/tests/unit/django_apps/reports/test_reports_models.py new file mode 100644 index 000000000..d12fdd787 --- /dev/null +++ b/tests/unit/django_apps/reports/test_reports_models.py @@ -0,0 +1,141 @@ +import json +from unittest.mock import MagicMock, patch + +from django.test import TestCase + +from shared.django_apps.reports.models import ReportDetails +from shared.django_apps.reports.tests.factories import ( + ReportDetailsFactory, + RepositoryFlagFactory, + UploadFactory, + UploadFlagMembershipFactory, +) +from shared.storage.exceptions import FileNotInStorageError + + +class UploadTests(TestCase): + def test_get_download_url(self): + storage_path = "v4/123/123.txt" + session = UploadFactory(storage_path=storage_path) + repository = session.report.commit.repository + assert ( + session.download_url + == f"/upload/gh/{repository.author.username}/{repository.name}/download?path={storage_path}" + ) + + def test_ci_url_when_no_provider(self): + session = UploadFactory(provider=None) + assert session.ci_url is None + + def test_ci_url_when_provider_do_not_have_build_url(self): + session = UploadFactory(provider="azure_pipelines") + assert session.ci_url is None + + def test_ci_url_when_provider_has_build_url(self): + session = UploadFactory(provider="travis", job_code="123") + repo = session.report.commit.repository + assert ( + session.ci_url + == f"https://travis-ci.com/{repo.author.username}/{repo.name}/jobs/{session.job_code}" + ) + + def test_ci_url_when_db_has_build_url(self): + session = UploadFactory(build_url="http://example.com") + assert session.ci_url == "http://example.com" + + def test_flags(self): + session = UploadFactory() + flag_one = RepositoryFlagFactory() + flag_two = RepositoryFlagFactory() + # connect the flag and membership + UploadFlagMembershipFactory(flag=flag_one, report_session=session) + UploadFlagMembershipFactory(flag=flag_two, report_session=session) + + assert ( + session.flag_names.sort() == [flag_one.flag_name, flag_two.flag_name].sort() + ) + + +class ReportDetailsTests(TestCase): + sample_files_array = [ + { + "filename": "test_file_1.py", + "file_index": 2, + "file_totals": [1, 10, 8, 2, 5, "80.00000", 6, 7, 9, 8, 20, 40, 13], + "session_totals": [[0, 10, 8, 2, 0, "80.00000", 0, 0, 0, 0, 0, 0, 0]], + "diff_totals": [0, 2, 1, 1, 0, "50.00000", 0, 0, 0, 0, 0, 0, 0], + }, + { + "filename": "test_file_2.py", + "file_index": 0, + "file_totals": [1, 3, 2, 1, 0, "66.66667", 0, 0, 0, 0, 0, 0, 0], + "session_totals": [[0, 3, 2, 1, 0, "66.66667", 0, 0, 0, 0, 0, 0, 0]], + "diff_totals": None, + }, + ] + + @patch("shared.django_apps.utils.model_utils.ArchiveService") + def test_get_files_array_from_db(self, mock_archive): + details = ReportDetailsFactory() + mock_read_file = MagicMock() + mock_archive.return_value.read_file = mock_read_file + details._files_array = self.sample_files_array + details._files_array_storage_path = None + details.save() + + fetched = ReportDetails.objects.get(id=details.id) + assert fetched.files_array == self.sample_files_array + mock_archive.assert_not_called() + mock_read_file.assert_not_called() + + @patch("shared.django_apps.utils.model_utils.ArchiveService") + def test_get_files_array_from_storage(self, mock_archive): + details = ReportDetailsFactory() + storage_path = "https://storage/path/files_array.json" + mock_read_file = MagicMock(return_value=json.dumps(self.sample_files_array)) + mock_archive.return_value.read_file = mock_read_file + details._files_array = None + details._files_array_storage_path = storage_path + details.save() + + fetched = ReportDetails.objects.get(id=details.id) + assert fetched.files_array == self.sample_files_array + mock_archive.assert_called() + mock_read_file.assert_called_with(storage_path) + # Calls it again to test caching + assert fetched.files_array == self.sample_files_array + assert mock_archive.call_count == 1 + assert mock_read_file.call_count == 1 + # This one to help us understand caching across different instances + assert details.files_array == self.sample_files_array + assert mock_archive.call_count == 2 + assert mock_read_file.call_count == 2 + # Let's see for objects with different IDs + diff_details = ReportDetailsFactory() + storage_path = "https://storage/path/files_array.json" + diff_details._files_array = None + diff_details._files_array_storage_path = storage_path + diff_details.save() + assert diff_details.files_array == self.sample_files_array + assert mock_archive.call_count == 3 + assert mock_read_file.call_count == 3 + + @patch("shared.django_apps.utils.model_utils.ArchiveService") + def test_get_files_array_from_storage_file_not_found(self, mock_archive): + details = ReportDetailsFactory() + storage_path = "https://storage/path/files_array.json" + + def side_effect(*args, **kwargs): + raise FileNotInStorageError() + + mock_read_file = MagicMock(side_effect=side_effect) + mock_archive.return_value.read_file = mock_read_file + details._files_array = None + details._files_array_storage_path = storage_path + details.save() + + fetched = ReportDetails.objects.get(id=details.id) + assert fetched._files_array_storage_path == storage_path + assert fetched.files_array == [] + mock_archive.assert_called() + mock_read_file.assert_called_with(storage_path) From 3450b88b5f31c42266b651c266dd1039f8585906 Mon Sep 17 00:00:00 2001 From: Adrian Date: Thu, 14 Mar 2024 18:32:08 -0700 Subject: [PATCH 22/36] get rid of download_url as it's tethered to api code --- shared/django_apps/reports/models.py | 15 --------------- shared/django_apps/reports/tests/factories.py | 8 +++++++- 2 files changed, 7 insertions(+), 16 deletions(-) diff --git a/shared/django_apps/reports/models.py b/shared/django_apps/reports/models.py index f4b446761..3e2d1bb5a 100644 --- a/shared/django_apps/reports/models.py +++ b/shared/django_apps/reports/models.py @@ -197,21 +197,6 @@ class Meta: app_label = REPORTS_APP_LABEL db_table = "reports_upload" - @property - def download_url(self): - repository = self.report.commit.repository - return ( - reverse( - "upload-download", - kwargs={ - "service": get_short_service_name(repository.author.service), - "owner_username": repository.author.username, - "repo_name": repository.name, - }, - ) - + f"?path={self.storage_path}" - ) - @property def ci_url(self): if self.build_url: diff --git a/shared/django_apps/reports/tests/factories.py b/shared/django_apps/reports/tests/factories.py index 2e622b6c5..319dbecb9 100644 --- a/shared/django_apps/reports/tests/factories.py +++ b/shared/django_apps/reports/tests/factories.py @@ -5,7 +5,9 @@ from shared.django_apps.core.tests.factories import CommitFactory, RepositoryFactory from shared.django_apps.reports import models -from shared.django_apps.reports.models import ReportResults +from shared.django_apps.reports.models import ReportResults\ + +from reports import models as ProxyModels # TODO: deduplicate this from graphql_api.types.enums @@ -30,6 +32,10 @@ class Meta: report = factory.SubFactory(CommitReportFactory) state = "processed" +class ProxyUploadFactory(DjangoModelFactory): + class Meta: + model = ProxyModels.ProxyReportSession + class RepositoryFlagFactory(DjangoModelFactory): class Meta: From a7c56c81f198e052870833d41f4b4babe7e01a37 Mon Sep 17 00:00:00 2001 From: Adrian Date: Thu, 14 Mar 2024 18:38:38 -0700 Subject: [PATCH 23/36] get rid of unecessary code --- shared/django_apps/reports/models.py | 1 - shared/django_apps/reports/tests/factories.py | 8 - .../django_apps/reports/tests/test_models.py | 142 ------------------ 3 files changed, 151 deletions(-) delete mode 100644 shared/django_apps/reports/tests/test_models.py diff --git a/shared/django_apps/reports/models.py b/shared/django_apps/reports/models.py index 3e2d1bb5a..4536041dd 100644 --- a/shared/django_apps/reports/models.py +++ b/shared/django_apps/reports/models.py @@ -3,7 +3,6 @@ from django.contrib.postgres.fields import ArrayField from django.db import models -from django.urls import reverse from django_prometheus.models import ExportModelOperationsMixin from shared.django_apps.codecov.models import BaseCodecovModel diff --git a/shared/django_apps/reports/tests/factories.py b/shared/django_apps/reports/tests/factories.py index 319dbecb9..8f42b5d34 100644 --- a/shared/django_apps/reports/tests/factories.py +++ b/shared/django_apps/reports/tests/factories.py @@ -7,9 +7,6 @@ from shared.django_apps.reports import models from shared.django_apps.reports.models import ReportResults\ -from reports import models as ProxyModels - - # TODO: deduplicate this from graphql_api.types.enums class UploadErrorEnum(enum.Enum): FILE_NOT_IN_STORAGE = "file_not_in_storage" @@ -32,11 +29,6 @@ class Meta: report = factory.SubFactory(CommitReportFactory) state = "processed" -class ProxyUploadFactory(DjangoModelFactory): - class Meta: - model = ProxyModels.ProxyReportSession - - class RepositoryFlagFactory(DjangoModelFactory): class Meta: model = models.RepositoryFlag diff --git a/shared/django_apps/reports/tests/test_models.py b/shared/django_apps/reports/tests/test_models.py deleted file mode 100644 index d8df50148..000000000 --- a/shared/django_apps/reports/tests/test_models.py +++ /dev/null @@ -1,142 +0,0 @@ -import json -from unittest.mock import MagicMock, patch - -from django.test import TestCase - -from shared.django_apps.reports.models import ReportDetails -from shared.django_apps.reports.tests.factories import ( - ReportDetailsFactory, - RepositoryFlagFactory, - UploadFactory, - UploadFlagMembershipFactory, -) -from shared.storage.exceptions import FileNotInStorageError - - -class UploadTests(TestCase): - def test_get_download_url(self): - storage_path = "v4/123/123.txt" - session = UploadFactory(storage_path=storage_path) - repository = session.report.commit.repository - assert ( - session.download_url - == f"/upload/gh/{repository.author.username}/{repository.name}/download?path={storage_path}" - ) - - def test_ci_url_when_no_provider(self): - session = UploadFactory(provider=None) - assert session.ci_url is None - - def test_ci_url_when_provider_do_not_have_build_url(self): - session = UploadFactory(provider="azure_pipelines") - assert session.ci_url is None - - def test_ci_url_when_provider_has_build_url(self): - session = UploadFactory(provider="travis", job_code="123") - repo = session.report.commit.repository - assert ( - session.ci_url - == f"https://travis-ci.com/{repo.author.username}/{repo.name}/jobs/{session.job_code}" - ) - - def test_ci_url_when_db_has_build_url(self): - session = UploadFactory(build_url="http://example.com") - assert session.ci_url == "http://example.com" - - def test_flags(self): - session = UploadFactory() - flag_one = RepositoryFlagFactory() - flag_two = RepositoryFlagFactory() - # connect the flag and membership - UploadFlagMembershipFactory(flag=flag_one, report_session=session) - UploadFlagMembershipFactory(flag=flag_two, report_session=session) - - assert ( - session.flag_names.sort() == [flag_one.flag_name, flag_two.flag_name].sort() - ) - - -# @pytest.mark.skip(reason="moved to shared") -class ReportDetailsTests(TestCase): - sample_files_array = [ - { - "filename": "test_file_1.py", - "file_index": 2, - "file_totals": [1, 10, 8, 2, 5, "80.00000", 6, 7, 9, 8, 20, 40, 13], - "session_totals": [[0, 10, 8, 2, 0, "80.00000", 0, 0, 0, 0, 0, 0, 0]], - "diff_totals": [0, 2, 1, 1, 0, "50.00000", 0, 0, 0, 0, 0, 0, 0], - }, - { - "filename": "test_file_2.py", - "file_index": 0, - "file_totals": [1, 3, 2, 1, 0, "66.66667", 0, 0, 0, 0, 0, 0, 0], - "session_totals": [[0, 3, 2, 1, 0, "66.66667", 0, 0, 0, 0, 0, 0, 0]], - "diff_totals": None, - }, - ] - - @patch("shared.django_apps.utils.model_utils.ArchiveService") - def test_get_files_array_from_db(self, mock_archive): - details = ReportDetailsFactory() - mock_read_file = MagicMock() - mock_archive.return_value.read_file = mock_read_file - details._files_array = self.sample_files_array - details._files_array_storage_path = None - details.save() - - fetched = ReportDetails.objects.get(id=details.id) - assert fetched.files_array == self.sample_files_array - mock_archive.assert_not_called() - mock_read_file.assert_not_called() - - @patch("shared.django_apps.utils.model_utils.ArchiveService") - def test_get_files_array_from_storage(self, mock_archive): - details = ReportDetailsFactory() - storage_path = "https://storage/path/files_array.json" - mock_read_file = MagicMock(return_value=json.dumps(self.sample_files_array)) - mock_archive.return_value.read_file = mock_read_file - details._files_array = None - details._files_array_storage_path = storage_path - details.save() - - fetched = ReportDetails.objects.get(id=details.id) - assert fetched.files_array == self.sample_files_array - mock_archive.assert_called() - mock_read_file.assert_called_with(storage_path) - # Calls it again to test caching - assert fetched.files_array == self.sample_files_array - assert mock_archive.call_count == 1 - assert mock_read_file.call_count == 1 - # This one to help us understand caching across different instances - assert details.files_array == self.sample_files_array - assert mock_archive.call_count == 2 - assert mock_read_file.call_count == 2 - # Let's see for objects with different IDs - diff_details = ReportDetailsFactory() - storage_path = "https://storage/path/files_array.json" - diff_details._files_array = None - diff_details._files_array_storage_path = storage_path - diff_details.save() - assert diff_details.files_array == self.sample_files_array - assert mock_archive.call_count == 3 - assert mock_read_file.call_count == 3 - - @patch("shared.django_apps.utils.model_utils.ArchiveService") - def test_get_files_array_from_storage_file_not_found(self, mock_archive): - details = ReportDetailsFactory() - storage_path = "https://storage/path/files_array.json" - - def side_effect(*args, **kwargs): - raise FileNotInStorageError() - - mock_read_file = MagicMock(side_effect=side_effect) - mock_archive.return_value.read_file = mock_read_file - details._files_array = None - details._files_array_storage_path = storage_path - details.save() - - fetched = ReportDetails.objects.get(id=details.id) - assert fetched._files_array_storage_path == storage_path - assert fetched.files_array == [] - mock_archive.assert_called() - mock_read_file.assert_called_with(storage_path) From e111513db853d42be5405ab18eb2b988c54b0e51 Mon Sep 17 00:00:00 2001 From: Adrian Date: Fri, 15 Mar 2024 11:40:10 -0700 Subject: [PATCH 24/36] get rid of download_url in reportsession model --- shared/django_apps/reports/tests/factories.py | 4 +++- tests/unit/django_apps/reports/test_reports_models.py | 9 --------- 2 files changed, 3 insertions(+), 10 deletions(-) diff --git a/shared/django_apps/reports/tests/factories.py b/shared/django_apps/reports/tests/factories.py index 8f42b5d34..2e622b6c5 100644 --- a/shared/django_apps/reports/tests/factories.py +++ b/shared/django_apps/reports/tests/factories.py @@ -5,7 +5,8 @@ from shared.django_apps.core.tests.factories import CommitFactory, RepositoryFactory from shared.django_apps.reports import models -from shared.django_apps.reports.models import ReportResults\ +from shared.django_apps.reports.models import ReportResults + # TODO: deduplicate this from graphql_api.types.enums class UploadErrorEnum(enum.Enum): @@ -29,6 +30,7 @@ class Meta: report = factory.SubFactory(CommitReportFactory) state = "processed" + class RepositoryFlagFactory(DjangoModelFactory): class Meta: model = models.RepositoryFlag diff --git a/tests/unit/django_apps/reports/test_reports_models.py b/tests/unit/django_apps/reports/test_reports_models.py index d12fdd787..f35d6ba40 100644 --- a/tests/unit/django_apps/reports/test_reports_models.py +++ b/tests/unit/django_apps/reports/test_reports_models.py @@ -14,15 +14,6 @@ class UploadTests(TestCase): - def test_get_download_url(self): - storage_path = "v4/123/123.txt" - session = UploadFactory(storage_path=storage_path) - repository = session.report.commit.repository - assert ( - session.download_url - == f"/upload/gh/{repository.author.username}/{repository.name}/download?path={storage_path}" - ) - def test_ci_url_when_no_provider(self): session = UploadFactory(provider=None) assert session.ci_url is None From ed60fcc556bce53e8a9f299ae79242c94518c377 Mon Sep 17 00:00:00 2001 From: Adrian Date: Tue, 2 Apr 2024 13:45:37 -0600 Subject: [PATCH 25/36] add model code that was added since last pr --- shared/django_apps/codecov_auth/models.py | 7 +-- .../codecov_auth/test_codecov_auth_models.py | 48 ++++++++++++++++++- 2 files changed, 51 insertions(+), 4 deletions(-) diff --git a/shared/django_apps/codecov_auth/models.py b/shared/django_apps/codecov_auth/models.py index 30cc5e098..a92321154 100644 --- a/shared/django_apps/codecov_auth/models.py +++ b/shared/django_apps/codecov_auth/models.py @@ -520,10 +520,11 @@ class Meta: def is_configured(self) -> bool: """Returns whether this installation is properly configured and can be used""" - if self.name == GITHUB_APP_INSTALLATION_DEFAULT_NAME: - # The default app is configured in the installation YAML + if self.app_id is not None and self.pem_path is not None: return True - return self.app_id is not None and self.pem_path is not None + # The default app is configured in the installation YAML + installation_default_app_id = get_config("github", "integration", "id") + return str(self.app_id) == str(installation_default_app_id) def repository_queryset(self) -> BaseManager[Repository]: """Returns a QuerySet of repositories covered by this installation""" diff --git a/tests/unit/django_apps/codecov_auth/test_codecov_auth_models.py b/tests/unit/django_apps/codecov_auth/test_codecov_auth_models.py index 40ae2e7d3..ae46250a6 100644 --- a/tests/unit/django_apps/codecov_auth/test_codecov_auth_models.py +++ b/tests/unit/django_apps/codecov_auth/test_codecov_auth_models.py @@ -1,10 +1,12 @@ from unittest.mock import patch +import pytest from django.forms import ValidationError from django.test import TransactionTestCase from shared.django_apps.codecov_auth.models import ( DEFAULT_AVATAR_SIZE, + GITHUB_APP_INSTALLATION_DEFAULT_NAME, INFINITY, SERVICE_BITBUCKET, SERVICE_BITBUCKET_SERVER, @@ -20,6 +22,7 @@ OwnerFactory, ) from shared.django_apps.core.tests.factories import RepositoryFactory +from shared.utils.test_utils import mock_config_helper class TestOwnerModel(TransactionTestCase): @@ -467,6 +470,14 @@ def test_token_is_deleted_when_changing_user_plan( class TestGithubAppInstallationModel(TransactionTestCase): + DEFAULT_APP_ID = 12345 + + @pytest.fixture(autouse=True) + def mock_default_app_id(self, mocker): + mock_config_helper( + mocker, configs={"github.integration.id": self.DEFAULT_APP_ID} + ) + def test_covers_all_repos(self): owner = OwnerFactory() repo1 = RepositoryFactory(author=owner) @@ -520,7 +531,11 @@ def test_covers_some_repos(self): def test_is_configured(self): owner = OwnerFactory() installation_default = GithubAppInstallation( - owner=owner, repository_service_ids=None, installation_id=100 + owner=owner, + repository_service_ids=None, + installation_id=123, + app_id=self.DEFAULT_APP_ID, + name=GITHUB_APP_INSTALLATION_DEFAULT_NAME, ) installation_configured = GithubAppInstallation( owner=owner, @@ -537,9 +552,40 @@ def test_is_configured(self): name="my_other_installation", app_id=1234, ) + installation_default_name_not_configured = GithubAppInstallation( + owner=owner, + repository_service_ids=None, + installation_id=100, + app_id=121212, + name=GITHUB_APP_INSTALLATION_DEFAULT_NAME, + ) + installation_default_name_not_default_id_configured = GithubAppInstallation( + owner=owner, + repository_service_ids=None, + installation_id=100, + app_id=121212, + name=GITHUB_APP_INSTALLATION_DEFAULT_NAME, + pem_path="some_path", + ) installation_default.save() + installation_configured.save() installation_not_configured.save() + installation_default_name_not_configured.save() + installation_default_name_not_default_id_configured.save() + + assert installation_default.is_configured() == True + installation_default.app_id = str(self.DEFAULT_APP_ID) assert installation_default.is_configured() == True + assert installation_configured.is_configured() == True assert installation_not_configured.is_configured() == False + assert installation_default_name_not_configured.app_id != self.DEFAULT_APP_ID + assert installation_default_name_not_configured.is_configured() == False + assert ( + installation_default_name_not_default_id_configured.app_id + != self.DEFAULT_APP_ID + ) + assert ( + installation_default_name_not_default_id_configured.is_configured() == True + ) From d2a6dc40ba0247bb43259826844796769574b75a Mon Sep 17 00:00:00 2001 From: Adrian Date: Tue, 2 Apr 2024 14:25:02 -0600 Subject: [PATCH 26/36] delete print and add csrf middleware --- shared/django_apps/dummy_settings.py | 4 +++- .../legacy_migrations/management/commands/migrate.py | 1 - 2 files changed, 3 insertions(+), 2 deletions(-) diff --git a/shared/django_apps/dummy_settings.py b/shared/django_apps/dummy_settings.py index 8941f5d4b..2114976f3 100644 --- a/shared/django_apps/dummy_settings.py +++ b/shared/django_apps/dummy_settings.py @@ -31,9 +31,11 @@ ARCHIVE_BUCKET_NAME = "codecov" MIDDLEWARE = [ - "django.contrib.sessions.middleware.SessionMiddleware", "django.contrib.auth.middleware.AuthenticationMiddleware", + "django.middleware.common.CommonMiddleware", + "django.middleware.csrf.CsrfViewMiddleware", "django.contrib.messages.middleware.MessageMiddleware", + "django.contrib.sessions.middleware.SessionMiddleware", ] # Migrated from API diff --git a/shared/django_apps/legacy_migrations/management/commands/migrate.py b/shared/django_apps/legacy_migrations/management/commands/migrate.py index 97300aef4..b6ad2b39d 100644 --- a/shared/django_apps/legacy_migrations/management/commands/migrate.py +++ b/shared/django_apps/legacy_migrations/management/commands/migrate.py @@ -83,7 +83,6 @@ def _obtain_lock(self): def handle(self, *args, **options): log.info("Codecov is starting migrations...") - print("inside the overwritten migrate command") database = options["database"] db_connection = connections[database] options["run_syncdb"] = False From e80424c20033ca96077002d7097d53f9aaec83e4 Mon Sep 17 00:00:00 2001 From: Adrian Date: Tue, 2 Apr 2024 15:17:31 -0600 Subject: [PATCH 27/36] test: move tests to a valid folder --- {shared => tests/unit}/plan/test_plan.py | 18 +++++++++--------- 1 file changed, 9 insertions(+), 9 deletions(-) rename {shared => tests/unit}/plan/test_plan.py (98%) diff --git a/shared/plan/test_plan.py b/tests/unit/plan/test_plan.py similarity index 98% rename from shared/plan/test_plan.py rename to tests/unit/plan/test_plan.py index f1052a614..f8059642d 100644 --- a/shared/plan/test_plan.py +++ b/tests/unit/plan/test_plan.py @@ -396,7 +396,7 @@ def test_available_plans_for_pro_plan_non_trial(self): assert plan_service.available_plans(owner=self.owner) == expected_result - @patch("services.sentry.is_sentry_user") + @patch("shared.plan.service.is_sentry_user") def test_available_plans_for_sentry_customer_basic_plan_non_trial( self, is_sentry_user ): @@ -414,7 +414,7 @@ def test_available_plans_for_sentry_customer_basic_plan_non_trial( assert plan_service.available_plans(owner=self.owner) == expected_result - @patch("services.sentry.is_sentry_user") + @patch("shared.plan.service.is_sentry_user") def test_available_plans_for_sentry_customer_team_plan_non_trial( self, is_sentry_user ): @@ -432,7 +432,7 @@ def test_available_plans_for_sentry_customer_team_plan_non_trial( assert plan_service.available_plans(owner=self.owner) == expected_result - @patch("services.sentry.is_sentry_user") + @patch("shared.plan.service.is_sentry_user") def test_available_plans_for_sentry_plan_non_trial(self, is_sentry_user): is_sentry_user.return_value = True self.current_org.plan = PlanName.SENTRY_MONTHLY.value @@ -512,7 +512,7 @@ def test_available_plans_for_pro_plan_expired_trial_less_than_10_users(self): assert plan_service.available_plans(owner=self.owner) == expected_result - @patch("services.sentry.is_sentry_user") + @patch("shared.plan.service.is_sentry_user") def test_available_plans_for_sentry_customer_basic_plan_expired_trial_less_than_10_users( self, is_sentry_user ): @@ -530,7 +530,7 @@ def test_available_plans_for_sentry_customer_basic_plan_expired_trial_less_than_ assert plan_service.available_plans(owner=self.owner) == expected_result - @patch("services.sentry.is_sentry_user") + @patch("shared.plan.service.is_sentry_user") def test_available_plans_for_sentry_customer_team_plan_expired_trial_less_than_10_users( self, is_sentry_user ): @@ -548,7 +548,7 @@ def test_available_plans_for_sentry_customer_team_plan_expired_trial_less_than_1 assert plan_service.available_plans(owner=self.owner) == expected_result - @patch("services.sentry.is_sentry_user") + @patch("shared.plan.service.is_sentry_user") def test_available_plans_for_sentry_plan_expired_trial_less_than_10_users( self, is_sentry_user ): @@ -597,7 +597,7 @@ def test_available_plans_for_pro_plan_expired_trial_more_than_10_users(self): assert plan_service.available_plans(owner=self.owner) == expected_result - @patch("services.sentry.is_sentry_user") + @patch("shared.plan.service.is_sentry_user") def test_available_plans_for_sentry_customer_basic_plan_expired_trial_more_than_10_users( self, is_sentry_user ): @@ -614,7 +614,7 @@ def test_available_plans_for_sentry_customer_basic_plan_expired_trial_more_than_ assert plan_service.available_plans(owner=self.owner) == expected_result - @patch("services.sentry.is_sentry_user") + @patch("shared.plan.service.is_sentry_user") def test_available_plans_for_sentry_plan_expired_trial_more_than_10_users( self, is_sentry_user ): @@ -755,7 +755,7 @@ def test_non_sentry_user(self): # Can not do Team plan when at 11 activated users assert self.plan_service.available_plans(owner=self.owner) == expected_result - @patch("services.sentry.is_sentry_user") + @patch("shared.plan.service.is_sentry_user") def test_sentry_user(self, is_sentry_user): is_sentry_user.return_value = True From d6b18582fd21ab6a90b58d99ef289c847a9e75ea Mon Sep 17 00:00:00 2001 From: Adrian Date: Tue, 2 Apr 2024 17:39:07 -0600 Subject: [PATCH 28/36] make legacy_sql a module --- .../legacy_migrations/migrations/legacy_sql/__init__.py | 0 1 file changed, 0 insertions(+), 0 deletions(-) create mode 100644 shared/django_apps/legacy_migrations/migrations/legacy_sql/__init__.py diff --git a/shared/django_apps/legacy_migrations/migrations/legacy_sql/__init__.py b/shared/django_apps/legacy_migrations/migrations/legacy_sql/__init__.py new file mode 100644 index 000000000..e69de29bb From 6b613da19d2ce1a8411c4bbe529aa4ca20562cf4 Mon Sep 17 00:00:00 2001 From: Adrian Date: Tue, 2 Apr 2024 18:17:20 -0600 Subject: [PATCH 29/36] tweak imports for django migrations --- .../legacy_migrations/migrations/0001_initial.py | 8 ++------ .../legacy_migrations/migrations/legacy_sql/__init__.py | 0 2 files changed, 2 insertions(+), 6 deletions(-) delete mode 100644 shared/django_apps/legacy_migrations/migrations/legacy_sql/__init__.py diff --git a/shared/django_apps/legacy_migrations/migrations/0001_initial.py b/shared/django_apps/legacy_migrations/migrations/0001_initial.py index 7cfa95aa1..2f110bdf9 100644 --- a/shared/django_apps/legacy_migrations/migrations/0001_initial.py +++ b/shared/django_apps/legacy_migrations/migrations/0001_initial.py @@ -3,12 +3,8 @@ from django.conf import settings from django.db import migrations -from shared.django_apps.legacy_migrations.migrations.legacy_sql.main.main import ( - run_sql as main_run_sql, -) -from shared.django_apps.legacy_migrations.migrations.legacy_sql.upgrades.main import ( - run_sql as upgrade_run_sql, -) +from .legacy_sql.main.main import run_sql as main_run_sql +from .legacy_sql.upgrades.main import run_sql as upgrade_run_sql BASE_VERSION = "base" diff --git a/shared/django_apps/legacy_migrations/migrations/legacy_sql/__init__.py b/shared/django_apps/legacy_migrations/migrations/legacy_sql/__init__.py deleted file mode 100644 index e69de29bb..000000000 From ec31b1bed2487cc708f24c761cc5431bc6bfdd5c Mon Sep 17 00:00:00 2001 From: Adrian Date: Wed, 3 Apr 2024 10:09:39 -0600 Subject: [PATCH 30/36] keep adjusting imports --- .../legacy_migrations/migrations/0001_initial.py | 8 ++++++-- .../legacy_migrations/migrations/legacy_sql/__init__.py | 0 .../migrations/legacy_sql/upgrades/__init__.py | 0 3 files changed, 6 insertions(+), 2 deletions(-) create mode 100644 shared/django_apps/legacy_migrations/migrations/legacy_sql/__init__.py create mode 100644 shared/django_apps/legacy_migrations/migrations/legacy_sql/upgrades/__init__.py diff --git a/shared/django_apps/legacy_migrations/migrations/0001_initial.py b/shared/django_apps/legacy_migrations/migrations/0001_initial.py index 2f110bdf9..7cfa95aa1 100644 --- a/shared/django_apps/legacy_migrations/migrations/0001_initial.py +++ b/shared/django_apps/legacy_migrations/migrations/0001_initial.py @@ -3,8 +3,12 @@ from django.conf import settings from django.db import migrations -from .legacy_sql.main.main import run_sql as main_run_sql -from .legacy_sql.upgrades.main import run_sql as upgrade_run_sql +from shared.django_apps.legacy_migrations.migrations.legacy_sql.main.main import ( + run_sql as main_run_sql, +) +from shared.django_apps.legacy_migrations.migrations.legacy_sql.upgrades.main import ( + run_sql as upgrade_run_sql, +) BASE_VERSION = "base" diff --git a/shared/django_apps/legacy_migrations/migrations/legacy_sql/__init__.py b/shared/django_apps/legacy_migrations/migrations/legacy_sql/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/shared/django_apps/legacy_migrations/migrations/legacy_sql/upgrades/__init__.py b/shared/django_apps/legacy_migrations/migrations/legacy_sql/upgrades/__init__.py new file mode 100644 index 000000000..e69de29bb From 9234599057c78bc935e4577ab2b4bd17cb0b8cc5 Mon Sep 17 00:00:00 2001 From: Adrian Date: Wed, 3 Apr 2024 13:30:50 -0600 Subject: [PATCH 31/36] add init + remove test field --- .../migrations/0054_owner_fake_field.py | 18 ------------------ shared/django_apps/codecov_auth/models.py | 4 ++-- .../legacy_migrations/management/__init__.py | 0 .../codecov_auth/test_codecov_auth_models.py | 3 +++ 4 files changed, 5 insertions(+), 20 deletions(-) delete mode 100644 shared/django_apps/codecov_auth/migrations/0054_owner_fake_field.py create mode 100644 shared/django_apps/legacy_migrations/management/__init__.py diff --git a/shared/django_apps/codecov_auth/migrations/0054_owner_fake_field.py b/shared/django_apps/codecov_auth/migrations/0054_owner_fake_field.py deleted file mode 100644 index ea7e4c772..000000000 --- a/shared/django_apps/codecov_auth/migrations/0054_owner_fake_field.py +++ /dev/null @@ -1,18 +0,0 @@ -# Generated by Django 5.0.3 on 2024-03-14 20:33 - -from django.db import migrations, models - - -class Migration(migrations.Migration): - - dependencies = [ - ("codecov_auth", "0053_ownerinstallationnametousefortask_and_more"), - ] - - operations = [ - migrations.AddField( - model_name="owner", - name="fake_field", - field=models.TextField(blank=True, null=True, unique=True), - ), - ] diff --git a/shared/django_apps/codecov_auth/models.py b/shared/django_apps/codecov_auth/models.py index a92321154..07f71f3d3 100644 --- a/shared/django_apps/codecov_auth/models.py +++ b/shared/django_apps/codecov_auth/models.py @@ -212,8 +212,6 @@ class Meta: sentry_user_id = models.TextField(null=True, blank=True, unique=True) sentry_user_data = models.JSONField(null=True) - fake_field = models.TextField(null=True, blank=True, unique=True) - user = models.ForeignKey( User, null=True, @@ -522,6 +520,8 @@ def is_configured(self) -> bool: """Returns whether this installation is properly configured and can be used""" if self.app_id is not None and self.pem_path is not None: return True + if self.name == "unconfigured_app": + return False # The default app is configured in the installation YAML installation_default_app_id = get_config("github", "integration", "id") return str(self.app_id) == str(installation_default_app_id) diff --git a/shared/django_apps/legacy_migrations/management/__init__.py b/shared/django_apps/legacy_migrations/management/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/tests/unit/django_apps/codecov_auth/test_codecov_auth_models.py b/tests/unit/django_apps/codecov_auth/test_codecov_auth_models.py index ae46250a6..049b232f1 100644 --- a/tests/unit/django_apps/codecov_auth/test_codecov_auth_models.py +++ b/tests/unit/django_apps/codecov_auth/test_codecov_auth_models.py @@ -577,6 +577,9 @@ def test_is_configured(self): assert installation_default.is_configured() == True installation_default.app_id = str(self.DEFAULT_APP_ID) assert installation_default.is_configured() == True + # Unconfigured apps are not configured + installation_default.name = "unconfigured_app" + assert installation_default.is_configured() == False assert installation_configured.is_configured() == True assert installation_not_configured.is_configured() == False From c3bf9569a27f52a4f16d186e949ff67315afe7fe Mon Sep 17 00:00:00 2001 From: Adrian Date: Wed, 3 Apr 2024 15:55:43 -0600 Subject: [PATCH 32/36] hopefully last update --- .../0054_update_owners_column_defaults.py | 43 +++++++++++++++++++ 1 file changed, 43 insertions(+) create mode 100644 shared/django_apps/codecov_auth/migrations/0054_update_owners_column_defaults.py diff --git a/shared/django_apps/codecov_auth/migrations/0054_update_owners_column_defaults.py b/shared/django_apps/codecov_auth/migrations/0054_update_owners_column_defaults.py new file mode 100644 index 000000000..ce945dd71 --- /dev/null +++ b/shared/django_apps/codecov_auth/migrations/0054_update_owners_column_defaults.py @@ -0,0 +1,43 @@ +# Generated by Django 4.2.11 on 2024-03-28 19:25 + +from django.db import migrations + +from shared.django_apps.migration_utils import RiskyRunSQL + + +class Migration(migrations.Migration): + + dependencies = [ + ("codecov_auth", "0053_ownerinstallationnametousefortask_and_more"), + ] + + operations = [ + RiskyRunSQL( + "ALTER TABLE owners ALTER COLUMN plan_user_count SET DEFAULT 1;", + reverse_sql="ALTER TABLE owners ALTER COLUMN plan_user_count SET DEFAULT NULL;", + ), + RiskyRunSQL( + "ALTER TABLE owners ALTER COLUMN updatestamp SET DEFAULT now();", + reverse_sql="ALTER TABLE owners ALTER COLUMN updatestamp SET DEFAULT NULL;", + ), + RiskyRunSQL( + "ALTER TABLE owners ALTER COLUMN is_superuser SET DEFAULT false;", + reverse_sql="ALTER TABLE owners ALTER COLUMN is_superuser SET DEFAULT NULL;", + ), + RiskyRunSQL( + "ALTER TABLE owners ALTER COLUMN createstamp SET DEFAULT now();", + reverse_sql="ALTER TABLE owners ALTER COLUMN createstamp SET DEFAULT NULL;", + ), + RiskyRunSQL( + "UPDATE owners SET plan_user_count=1 WHERE plan_user_count IS NULL;", + reverse_sql=migrations.RunSQL.noop, + ), + RiskyRunSQL( + "UPDATE owners SET updatestamp=now() WHERE updatestamp IS NULL;", + reverse_sql=migrations.RunSQL.noop, + ), + RiskyRunSQL( + "UPDATE owners SET is_superuser=false WHERE is_superuser IS NULL;", + reverse_sql=migrations.RunSQL.noop, + ), + ] From 27c0170d8f5008bbc798647d619894091bb49a24 Mon Sep 17 00:00:00 2001 From: Adrian Date: Thu, 4 Apr 2024 13:58:50 -0600 Subject: [PATCH 33/36] remove path also removed in another PR in api for api_archive --- shared/api_archive/archive.py | 9 --------- 1 file changed, 9 deletions(-) diff --git a/shared/api_archive/archive.py b/shared/api_archive/archive.py index d03c8788d..59f0c3955 100644 --- a/shared/api_archive/archive.py +++ b/shared/api_archive/archive.py @@ -39,15 +39,6 @@ def get_path(self, **kwaargs): return self.value.format(**kwaargs) -def get_minio_client(): - return Minio( - settings.MINIO_LOCATION, - access_key=settings.MINIO_SECRET_KEY, - secret_key=settings.MINIO_ACCESS_KEY, - secure=True, - ) - - # Service class for performing archive operations. Meant to work against the # underlying StorageService class ArchiveService(object): From c607c8663ef56cb2c32438e5c7a07e4194b82a3d Mon Sep 17 00:00:00 2001 From: Adrian Date: Thu, 4 Apr 2024 23:08:51 -0600 Subject: [PATCH 34/36] more changes for big migration --- setup.py | 2 -- shared/api_archive/archive.py | 1 + shared/django_apps/dummy_settings.py | 34 ++-------------------------- shared/storage/minio.py | 6 ++++- tests/requirements.in | 1 + tests/requirements.txt | 15 +++++++++--- tests/unit/storage/test_minio.py | 7 ++++-- 7 files changed, 26 insertions(+), 40 deletions(-) diff --git a/setup.py b/setup.py index 88f7ea0fe..5e8aac654 100644 --- a/setup.py +++ b/setup.py @@ -52,8 +52,6 @@ "django-better-admin-arrayfield", # API Deps "django-prometheus", - "django-model-utils", - "factory-boy", "python-redis-lock", "django-model-utils==4.3.1", "requests==2.31.0", diff --git a/shared/api_archive/archive.py b/shared/api_archive/archive.py index 59f0c3955..2622c39ce 100644 --- a/shared/api_archive/archive.py +++ b/shared/api_archive/archive.py @@ -16,6 +16,7 @@ log = logging.getLogger(__name__) +# TODO deduplicate this logic from worker, api, shared class MinioEndpoints(Enum): chunks = "{version}/repos/{repo_hash}/commits/{commitid}/chunks.txt" json_data = "{version}/repos/{repo_hash}/commits/{commitid}/json_data/{table}/{field}/{external_id}.json" diff --git a/shared/django_apps/dummy_settings.py b/shared/django_apps/dummy_settings.py index 2114976f3..c4d2df06d 100644 --- a/shared/django_apps/dummy_settings.py +++ b/shared/django_apps/dummy_settings.py @@ -14,46 +14,16 @@ "shared.django_apps.rollouts", # API models "django.contrib.admin", - "django.contrib.auth", "django.contrib.contenttypes", - "django.contrib.messages", "django.contrib.postgres", "shared.django_apps.codecov_auth", "shared.django_apps.core", "shared.django_apps.reports", ] -# Migrated from API to get Minio working -MINIO_ACCESS_KEY = get_config("services", "minio", "access_key_id") -MINIO_SECRET_KEY = get_config("services", "minio", "secret_access_key") -MINIO_LOCATION = "codecov.s3.amazonaws.com" -MINIO_HASH_KEY = get_config("services", "minio", "hash_key") -ARCHIVE_BUCKET_NAME = "codecov" - -MIDDLEWARE = [ - "django.contrib.auth.middleware.AuthenticationMiddleware", - "django.middleware.common.CommonMiddleware", - "django.middleware.csrf.CsrfViewMiddleware", - "django.contrib.messages.middleware.MessageMiddleware", - "django.contrib.sessions.middleware.SessionMiddleware", -] +MIDDLEWARE = [] -# Migrated from API -TEMPLATES = [ - { - "BACKEND": "django.template.backends.django.DjangoTemplates", - "DIRS": [], - "APP_DIRS": True, - "OPTIONS": { - "context_processors": [ - "django.template.context_processors.debug", - "django.template.context_processors.request", - "django.contrib.auth.context_processors.auth", - "django.contrib.messages.context_processors.messages", - ] - }, - } -] +TEMPLATES = [] TELEMETRY_VANILLA_DB = "default" diff --git a/shared/storage/minio.py b/shared/storage/minio.py index 43280f6a7..17e772e18 100644 --- a/shared/storage/minio.py +++ b/shared/storage/minio.py @@ -95,7 +95,11 @@ def init_minio_client( ), ) return Minio( - host, access_key=access_key, secret_key=secret_key, secure=verify_ssl, region=region + host, + access_key=access_key, + secret_key=secret_key, + secure=verify_ssl, + region=region, ) # writes the initial storage bucket to storage via minio. diff --git a/tests/requirements.in b/tests/requirements.in index d9ad80846..20c2023c7 100644 --- a/tests/requirements.in +++ b/tests/requirements.in @@ -15,3 +15,4 @@ urllib3>=1.26.18 pyyaml>=6.0.1 pytest-freezegun psycopg2 +factory-boy diff --git a/tests/requirements.txt b/tests/requirements.txt index 8cc049ce3..6ebe640de 100644 --- a/tests/requirements.txt +++ b/tests/requirements.txt @@ -1,5 +1,5 @@ # -# This file is autogenerated by pip-compile with Python 3.10 +# This file is autogenerated by pip-compile with Python 3.9 # by the following command: # # pip-compile tests/requirements.in @@ -23,6 +23,10 @@ ddt==1.4.4 # via -r tests/requirements.in exceptiongroup==1.2.0 # via pytest +factory-boy==3.3.0 + # via -r tests/requirements.in +faker==24.4.0 + # via factory-boy freezegun==1.4.0 # via pytest-freezegun h11==0.12.0 @@ -72,7 +76,9 @@ pytest-freezegun==0.4.2 pytest-mock==3.6.1 # via -r tests/requirements.in python-dateutil==2.8.2 - # via freezegun + # via + # faker + # freezegun pyyaml==6.0.1 # via # -r tests/requirements.in @@ -82,7 +88,9 @@ requests==2.31.0 respx==0.19.2 # via -r tests/requirements.in rfc3986[idna2008]==1.5.0 - # via httpx + # via + # httpx + # rfc3986 six==1.16.0 # via python-dateutil sniffio==1.2.0 @@ -100,6 +108,7 @@ urllib3==1.26.18 # via # -r tests/requirements.in # requests + # vcrpy vcrpy==5.1.0 # via -r tests/requirements.in wrapt==1.15.0 diff --git a/tests/unit/storage/test_minio.py b/tests/unit/storage/test_minio.py index 02b7179c6..ee79dfad7 100644 --- a/tests/unit/storage/test_minio.py +++ b/tests/unit/storage/test_minio.py @@ -242,10 +242,13 @@ def test_minio_with_region(self, mocker): "port": "9000", "iam_auth": True, "iam_endpoint": None, - "region": "example" + "region": "example", } storage = MinioStorageService(minio_no_ports_config) assert storage.minio_config == minio_no_ports_config mocked_minio_client.assert_called_with( - "cute_url_no_ports:9000", credentials=mocker.ANY, secure=False, region="example" + "cute_url_no_ports:9000", + credentials=mocker.ANY, + secure=False, + region="example", ) From 082bc4938fb716d6c26a956dd9b7859d0d996fee Mon Sep 17 00:00:00 2001 From: Adrian Date: Fri, 5 Apr 2024 09:47:16 -0600 Subject: [PATCH 35/36] add changes from other API prs --- shared/django_apps/codecov_auth/models.py | 4 ---- shared/django_apps/core/managers.py | 1 + .../core/migrations/0048_increment_version.py | 19 +++++++++++++++++++ shared/torngit/github.py | 4 +++- 4 files changed, 23 insertions(+), 5 deletions(-) create mode 100644 shared/django_apps/core/migrations/0048_increment_version.py diff --git a/shared/django_apps/codecov_auth/models.py b/shared/django_apps/codecov_auth/models.py index 07f71f3d3..58ec75431 100644 --- a/shared/django_apps/codecov_auth/models.py +++ b/shared/django_apps/codecov_auth/models.py @@ -152,11 +152,7 @@ class Meta: stripe_customer_id = models.TextField(null=True, blank=True) stripe_subscription_id = models.TextField(null=True, blank=True) stripe_coupon_id = models.TextField(null=True, blank=True) - - # createstamp seems to be used by legacy to track first login - # so we shouldn't touch this outside login createstamp = models.DateTimeField(null=True) - service_id = models.TextField(null=False) parent_service_id = models.TextField(null=True) root_parent_service_id = models.TextField(null=True) diff --git a/shared/django_apps/core/managers.py b/shared/django_apps/core/managers.py index 8d54a50bd..c47cb4bec 100644 --- a/shared/django_apps/core/managers.py +++ b/shared/django_apps/core/managers.py @@ -318,6 +318,7 @@ def get_or_create_from_git_repo(self, git_repo, owner): service=owner.service, username=git_repo_fork_owner["username"], service_id=git_repo_fork_owner["service_id"], + defaults={"createstamp": timezone.now()}, ) fork, _ = self.get_or_create( author=fork_owner, diff --git a/shared/django_apps/core/migrations/0048_increment_version.py b/shared/django_apps/core/migrations/0048_increment_version.py new file mode 100644 index 000000000..6e00c0df1 --- /dev/null +++ b/shared/django_apps/core/migrations/0048_increment_version.py @@ -0,0 +1,19 @@ +# Generated by Django 4.2.11 on 2024-04-01 19:36 + +from django.db import migrations + + +def update_version(apps, schema): + Constants = apps.get_model("core", "Constants") + version = Constants.objects.get(key="version") + version.value = "24.4.1" + version.save() + + +class Migration(migrations.Migration): + + dependencies = [ + ("core", "0047_increment_version"), + ] + + operations = [migrations.RunPython(update_version)] diff --git a/shared/torngit/github.py b/shared/torngit/github.py index 1bf4c60ad..f898046ef 100644 --- a/shared/torngit/github.py +++ b/shared/torngit/github.py @@ -1272,7 +1272,9 @@ async def get_compare( ( "\ndeleted file mode 100644" if f["status"] == "removed" - else "\nnew file mode 100644" if f["status"] == "added" else "" + else "\nnew file mode 100644" + if f["status"] == "added" + else "" ), "--- " + ( From b73c97853d6221bf9ab7c412f6858b5c36b30634 Mon Sep 17 00:00:00 2001 From: Adrian Date: Fri, 5 Apr 2024 15:05:42 -0600 Subject: [PATCH 36/36] get rid of unecessary stuff --- codecov.yml | 12 ------------ 1 file changed, 12 deletions(-) diff --git a/codecov.yml b/codecov.yml index b5fb38d1d..37999bc6d 100644 --- a/codecov.yml +++ b/codecov.yml @@ -11,18 +11,6 @@ ignore: beta_groups: - "labels" -services: - minio: - hash_key: 12345bf3f7d947f2a0681b2154067890 - verify_ssl: false - host: "minio" - port: 9000 - # bucket: - # region: - access_key_id: codecov-default-key - secret_access_key: codecov-default-secret - client_uploads: true - flag_management: individual_flags: - name: "smart-labels"