diff --git a/_integration-test/conftest.py b/_integration-test/conftest.py index b36097d605..e80cef95e6 100644 --- a/_integration-test/conftest.py +++ b/_integration-test/conftest.py @@ -1,58 +1,17 @@ import os +from os.path import join import subprocess -import time -import httpx import pytest SENTRY_CONFIG_PY = "sentry/sentry.conf.py" SENTRY_TEST_HOST = os.getenv("SENTRY_TEST_HOST", "http://localhost:9000") TEST_USER = "test@example.com" TEST_PASS = "test123TEST" -TIMEOUT_SECONDS = 60 - - -def pytest_addoption(parser): - parser.addoption("--customizations", default="disabled") @pytest.fixture(scope="session", autouse=True) def configure_self_hosted_environment(request): - subprocess.run( - ["docker", "compose", "--ansi", "never", "up", "-d"], - check=True, - capture_output=True, - ) - for i in range(TIMEOUT_SECONDS): - try: - response = httpx.get(SENTRY_TEST_HOST, follow_redirects=True) - except httpx.RequestError: - time.sleep(1) - else: - if response.status_code == 200: - break - else: - raise AssertionError("timeout waiting for self-hosted to come up") - - if request.config.getoption("--customizations") == "enabled": - os.environ["TEST_CUSTOMIZATIONS"] = "enabled" - script_content = """\ -#!/bin/bash -touch /created-by-enhance-image -apt-get update -apt-get install -y gcc libsasl2-dev python-dev-is-python3 libldap2-dev libssl-dev -""" - - with open("sentry/enhance-image.sh", "w") as script_file: - script_file.write(script_content) - # Set executable permissions for the shell script - os.chmod("sentry/enhance-image.sh", 0o755) - - # Write content to the requirements.txt file - with open("sentry/requirements.txt", "w") as req_file: - req_file.write("python-ldap\n") - os.environ["MINIMIZE_DOWNTIME"] = "1" - subprocess.run(["./install.sh"], check=True, capture_output=True) # Create test user subprocess.run( [ diff --git a/_integration-test/test_backup.py b/_integration-test/test_backup.py index 41c099741a..b73e0cfb16 100644 --- a/_integration-test/test_backup.py +++ b/_integration-test/test_backup.py @@ -1,4 +1,5 @@ import os +from os.path import join import subprocess @@ -20,7 +21,7 @@ def test_sentry_admin(setup_backup_restore_env_variables): def test_backup(setup_backup_restore_env_variables): - # Docker was giving me permissioning issues when trying to create this file and write to it even after giving read + write access + # Docker was giving me permission issues when trying to create this file and write to it even after giving read + write access # to group and owner. Instead, try creating the empty file and then give everyone write access to the backup file file_path = os.path.join(os.getcwd(), "sentry", "backup.json") sentry_admin_sh = os.path.join(os.getcwd(), "sentry-admin.sh") @@ -42,21 +43,46 @@ def test_backup(setup_backup_restore_env_variables): def test_import(setup_backup_restore_env_variables): # Bring postgres down and recreate the docker volume + subprocess.run(["docker", "compose", "--ansi", "never", "down"], check=True) + # We reset all DB-related volumes here and not just Postgres although the backups + # are only for Postgres. The reason is to get a "clean slate" as we need the Kafka + # and Clickhouse volumes to be back to their initial state as well ( without any events) + # We cannot just rm and create them as they still need migrations. + for name in ("postgres", "clickhouse", "kafka"): + subprocess.run(["docker", "volume", "rm", f"sentry-{name}"], check=True) + subprocess.run( + [ + "rsync", + "-aW", + "--no-compress", + "--mkpath", + join(os.environ["RUNNER_TEMP"], "volumes", f"sentry-{name}", ""), + f"/var/lib/docker/volumes/sentry-{name}/", + ], + check=True, + capture_output=True, + ) + subprocess.run(["docker", "volume", "create", f"sentry-{name}"], check=True) + subprocess.run( - ["docker", "compose", "--ansi", "never", "stop", "postgres"], check=True - ) - subprocess.run( - ["docker", "compose", "--ansi", "never", "rm", "-f", "-v", "postgres"], - check=True, - ) - subprocess.run(["docker", "volume", "rm", "sentry-postgres"], check=True) - subprocess.run(["docker", "volume", "create", "--name=sentry-postgres"], check=True) - subprocess.run( - ["docker", "compose", "--ansi", "never", "run", "web", "upgrade", "--noinput"], + [ + "docker", + "run", + "--rm", + "-v", + "sentry-kafka:/data", + "busybox", + "chown", + "-R", + "1000:1000", + "/data", + ], check=True, + capture_output=True, ) + subprocess.run( - ["docker", "compose", "--ansi", "never", "up", "-d"], + ["docker", "compose", "--ansi", "never", "up", "--wait"], check=True, capture_output=True, ) diff --git a/_integration-test/test_run.py b/_integration-test/test_run.py index d1c8f4547c..110de08385 100644 --- a/_integration-test/test_run.py +++ b/_integration-test/test_run.py @@ -326,7 +326,15 @@ def test_custom_certificate_authorities(): ) subprocess.run( - ["docker", "compose", "--ansi", "never", "up", "-d", "fixture-custom-ca-roots"], + [ + "docker", + "compose", + "--ansi", + "never", + "up", + "--wait", + "fixture-custom-ca-roots", + ], check=True, ) subprocess.run( @@ -448,7 +456,4 @@ def test_customizations(): ] for command in commands: result = subprocess.run(command, check=False) - if os.getenv("TEST_CUSTOMIZATIONS", "disabled") == "enabled": - assert result.returncode == 0 - else: - assert result.returncode != 0 + assert result.returncode == 0 diff --git a/action.yaml b/action.yaml index 6ca973e98c..24baf35d6b 100644 --- a/action.yaml +++ b/action.yaml @@ -132,7 +132,7 @@ runs: "$RUNNER_TEMP/volumes/" cd ${{ github.action_path }} docker compose up --wait - TEST_CUSTOMIZATIONS=enabled pytest -x --cov --junitxml=junit.xml _integration-test/ + pytest -x --cov --junitxml=junit.xml _integration-test/ - name: Upload coverage to Codecov uses: codecov/codecov-action@v5 diff --git a/install/bootstrap-snuba.sh b/install/bootstrap-snuba.sh index 489c4da0e1..20666302ad 100644 --- a/install/bootstrap-snuba.sh +++ b/install/bootstrap-snuba.sh @@ -1,5 +1,9 @@ echo "${_group}Bootstrapping and migrating Snuba ..." -$dcr snuba-api bootstrap --force +if [[ -z "${SKIP_DB_MIGRATIONS:-}" ]]; then + $dcr snuba-api bootstrap --force +else + echo "Skipped DB migrations due to SKIP_DB_MIGRATIONS=$SKIP_DB_MIGRATIONS" +fi echo "${_endgroup}"