diff --git a/.github/workflows/integration-tests-against-emulator.yaml b/.github/workflows/integration-tests-against-emulator.yaml index 3a4390219d..6a5cd59511 100644 --- a/.github/workflows/integration-tests-against-emulator.yaml +++ b/.github/workflows/integration-tests-against-emulator.yaml @@ -29,4 +29,4 @@ jobs: env: SPANNER_EMULATOR_HOST: localhost:9010 GOOGLE_CLOUD_PROJECT: emulator-test-project - GOOGLE_CLOUD_TESTS_CREATE_SPANNER_INSTANCE: true + GOOGLE_CLOUD_TESTS_CREATE_SPANNER_INSTANCE: false diff --git a/.kokoro/build.sh b/.kokoro/build.sh index b278d3723f..e863f7952d 100755 --- a/.kokoro/build.sh +++ b/.kokoro/build.sh @@ -33,8 +33,8 @@ export GOOGLE_APPLICATION_CREDENTIALS=${KOKORO_GFILE_DIR}/service-account.json # Setup project id. export PROJECT_ID=$(cat "${KOKORO_GFILE_DIR}/project-id.json") -# Set up creating a new instance for each system test run -export GOOGLE_CLOUD_TESTS_CREATE_SPANNER_INSTANCE=true +# use existing instance for every system test run +export GOOGLE_CLOUD_TESTS_CREATE_SPANNER_INSTANCE=false # Remove old nox python3 -m pip uninstall --yes --quiet nox-automation diff --git a/noxfile.py b/noxfile.py index 9b71c55a7a..8461dab8fd 100644 --- a/noxfile.py +++ b/noxfile.py @@ -217,6 +217,7 @@ def install_systemtest_dependencies(session, *constraints): # Exclude version 1.52.0rc1 which has a known issue. # See https://github.com/grpc/grpc/issues/32163 session.install("--pre", "grpcio!=1.52.0rc1") + session.install("pytest-xdist") session.install(*SYSTEM_TEST_STANDARD_DEPENDENCIES, *constraints) @@ -282,7 +283,7 @@ def system(session, database_dialect): if system_test_exists: session.run( "py.test", - "--quiet", + "-n=12", f"--junitxml=system_{session.python}_sponge_log.xml", system_test_path, *session.posargs, @@ -294,7 +295,7 @@ def system(session, database_dialect): if system_test_folder_exists: session.run( "py.test", - "--quiet", + "-n=12", f"--junitxml=system_{session.python}_sponge_log.xml", system_test_folder_path, *session.posargs, diff --git a/owlbot.py b/owlbot.py index 7c249527b2..5e728e7f74 100644 --- a/owlbot.py +++ b/owlbot.py @@ -145,8 +145,6 @@ def get_staging_dirs( ".kokoro/build.sh", "# Remove old nox", """\ -# Set up creating a new instance for each system test run -export GOOGLE_CLOUD_TESTS_CREATE_SPANNER_INSTANCE=true # Remove old nox""", ) diff --git a/samples/samples/conftest.py b/samples/samples/conftest.py index 5b1af63876..4943c94ed4 100644 --- a/samples/samples/conftest.py +++ b/samples/samples/conftest.py @@ -15,6 +15,7 @@ import time import uuid +from random import randrange from google.api_core import exceptions @@ -26,11 +27,16 @@ from google.cloud.spanner_v1 import instance import pytest from test_utils import retry +from test_utils import system INSTANCE_CREATION_TIMEOUT = 560 # seconds OPERATION_TIMEOUT_SECONDS = 120 # seconds +CREATE_INSTANCE = False + +INSTANCE_ID_DEFAULT = "test-instance" + retry_429 = retry.RetryErrors(exceptions.ResourceExhausted, delay=15) @@ -76,23 +82,26 @@ def scrub_instance_ignore_not_found(to_scrub): @pytest.fixture(scope="session") def cleanup_old_instances(spanner_client): """Delete instances, created by samples, that are older than an hour.""" - cutoff = int(time.time()) - 1 * 60 * 60 - instance_filter = "labels.cloud_spanner_samples:true" + if CREATE_INSTANCE: + cutoff = int(time.time()) - 1 * 60 * 60 + instance_filter = "labels.cloud_spanner_samples:true" - for instance_pb in spanner_client.list_instances(filter_=instance_filter): - inst = instance.Instance.from_pb(instance_pb, spanner_client) + for instance_pb in spanner_client.list_instances(filter_=instance_filter): + inst = instance.Instance.from_pb(instance_pb, spanner_client) - if "created" in inst.labels: - create_time = int(inst.labels["created"]) + if "created" in inst.labels: + create_time = int(inst.labels["created"]) - if create_time <= cutoff: - scrub_instance_ignore_not_found(inst) + if create_time <= cutoff: + scrub_instance_ignore_not_found(inst) @pytest.fixture(scope="module") def instance_id(): """Unique id for the instance used in samples.""" - return f"test-instance-{uuid.uuid4().hex[:10]}" + if CREATE_INSTANCE: + return f"test-instance-{uuid.uuid4().hex[:10]}" + return INSTANCE_ID_DEFAULT @pytest.fixture(scope="module") @@ -121,31 +130,36 @@ def sample_instance( instance_config, sample_name, ): - sample_instance = spanner_client.instance( - instance_id, - instance_config, - labels={ - "cloud_spanner_samples": "true", - "sample_name": sample_name, - "created": str(int(time.time())), - }, - ) - op = retry_429(sample_instance.create)() - op.result(INSTANCE_CREATION_TIMEOUT) # block until completion + if CREATE_INSTANCE: + sample_instance = spanner_client.instance( + instance_id, + instance_config, + labels={ + "cloud_spanner_samples": "true", + "sample_name": sample_name, + "created": str(int(time.time())), + }, + ) + op = retry_429(sample_instance.create)() + op.result(INSTANCE_CREATION_TIMEOUT) # block until completion - # Eventual consistency check - retry_found = retry.RetryResult(bool) - retry_found(sample_instance.exists)() + # Eventual consistency check + retry_found = retry.RetryResult(bool) + retry_found(sample_instance.exists)() + else: + sample_instance = spanner_client.instance(instance_id) + sample_instance.reload() yield sample_instance - for database_pb in sample_instance.list_databases(): - database.Database.from_pb(database_pb, sample_instance).drop() + if CREATE_INSTANCE: + for database_pb in sample_instance.list_databases(): + database.Database.from_pb(database_pb, sample_instance).drop() - for backup_pb in sample_instance.list_backups(): - backup.Backup.from_pb(backup_pb, sample_instance).delete() + for backup_pb in sample_instance.list_backups(): + backup.Backup.from_pb(backup_pb, sample_instance).delete() - sample_instance.delete() + sample_instance.delete() @pytest.fixture(scope="module") @@ -189,7 +203,7 @@ def database_id(): Sample testcase modules can override as needed. """ - return "my-database-id" + return unique_id("dbapi-txn") @pytest.fixture(scope="module") @@ -291,3 +305,6 @@ def kms_key_name(spanner_client): "spanner-test-keyring", "spanner-test-cmek", ) + +def unique_id(prefix, separator="-"): + return f"{prefix}{system.unique_resource_id(separator)}{randrange(100)}" \ No newline at end of file diff --git a/samples/samples/noxfile.py b/samples/samples/noxfile.py index 483b559017..b24aafcc2e 100644 --- a/samples/samples/noxfile.py +++ b/samples/samples/noxfile.py @@ -230,6 +230,7 @@ def _session_tests( session.run( "pytest", + "-n=12", *(PYTEST_COMMON_ARGS + session.posargs + concurrent_args), # Pytest will return 5 when no tests are collected. This can happen # on travis where slow and flaky tests are excluded. diff --git a/samples/samples/requirements-test.txt b/samples/samples/requirements-test.txt index 915735b7fd..90b7a96a57 100644 --- a/samples/samples/requirements-test.txt +++ b/samples/samples/requirements-test.txt @@ -2,3 +2,4 @@ pytest==8.0.0 pytest-dependency==0.6.0 mock==5.1.0 google-cloud-testutils==1.4.0 +pytest-xdist==3.5.0 diff --git a/tests/system/_helpers.py b/tests/system/_helpers.py index 60926b216e..e1c7918024 100644 --- a/tests/system/_helpers.py +++ b/tests/system/_helpers.py @@ -15,6 +15,7 @@ import operator import os import time +from random import randrange from google.api_core import exceptions from google.cloud.spanner_v1 import instance as instance_mod @@ -24,7 +25,7 @@ CREATE_INSTANCE_ENVVAR = "GOOGLE_CLOUD_TESTS_CREATE_SPANNER_INSTANCE" -CREATE_INSTANCE = os.getenv(CREATE_INSTANCE_ENVVAR) is not None +CREATE_INSTANCE = os.getenv(CREATE_INSTANCE_ENVVAR) == "true" INSTANCE_ID_ENVVAR = "GOOGLE_CLOUD_TESTS_SPANNER_INSTANCE" INSTANCE_ID_DEFAULT = "google-cloud-python-systest" @@ -134,4 +135,4 @@ def cleanup_old_instances(spanner_client): def unique_id(prefix, separator="-"): - return f"{prefix}{system.unique_resource_id(separator)}" + return f"{prefix}{system.unique_resource_id(separator)}{randrange(100)}" diff --git a/tests/system/test_database_api.py b/tests/system/test_database_api.py index 052e628188..f7a233ff57 100644 --- a/tests/system/test_database_api.py +++ b/tests/system/test_database_api.py @@ -342,7 +342,7 @@ def test_update_ddl_w_pitr_invalid( databases_to_delete, ): pool = spanner_v1.BurstyPool(labels={"testcase": "update_database_ddl_pitr"}) - temp_db_id = _helpers.unique_id("pitr_upd_ddl_inv", separator="_") + temp_db_id = _helpers.unique_id("pitr_upd_ddl", separator="_") retention_period = "0d" temp_db = shared_instance.database(temp_db_id, pool=pool) @@ -367,7 +367,7 @@ def test_update_ddl_w_pitr_success( databases_to_delete, ): pool = spanner_v1.BurstyPool(labels={"testcase": "update_database_ddl_pitr"}) - temp_db_id = _helpers.unique_id("pitr_upd_ddl_inv", separator="_") + temp_db_id = _helpers.unique_id("pitr_upd_ddl", separator="_") retention_period = "7d" temp_db = shared_instance.database(temp_db_id, pool=pool) @@ -399,7 +399,7 @@ def test_update_ddl_w_default_leader_success( labels={"testcase": "update_database_ddl_default_leader"}, ) - temp_db_id = _helpers.unique_id("dfl_ldrr_upd_ddl", separator="_") + temp_db_id = _helpers.unique_id("dfl_upd_ddl", separator="_") default_leader = "us-east4" temp_db = multiregion_instance.database(temp_db_id, pool=pool) @@ -427,7 +427,7 @@ def test_create_role_grant_access_success( creator_role_parent = _helpers.unique_id("role_parent", separator="_") creator_role_orphan = _helpers.unique_id("role_orphan", separator="_") - temp_db_id = _helpers.unique_id("dfl_ldrr_upd_ddl", separator="_") + temp_db_id = _helpers.unique_id("dfl_ldrr_ddl", separator="_") temp_db = shared_instance.database(temp_db_id, database_dialect=database_dialect) create_op = temp_db.create() @@ -487,7 +487,7 @@ def test_list_database_role_success( creator_role_parent = _helpers.unique_id("role_parent", separator="_") creator_role_orphan = _helpers.unique_id("role_orphan", separator="_") - temp_db_id = _helpers.unique_id("dfl_ldrr_upd_ddl", separator="_") + temp_db_id = _helpers.unique_id("dfl_ldrr_ddl", separator="_") temp_db = shared_instance.database(temp_db_id, database_dialect=database_dialect) create_op = temp_db.create()