Skip to content

Commit

Permalink
refactor
Browse files Browse the repository at this point in the history
  • Loading branch information
mwojtyczka committed Feb 1, 2025
1 parent 625819e commit 8a37122
Show file tree
Hide file tree
Showing 2 changed files with 6 additions and 2 deletions.
1 change: 0 additions & 1 deletion .github/workflows/acceptance.yml
Original file line number Diff line number Diff line change
Expand Up @@ -52,4 +52,3 @@ jobs:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
ARM_CLIENT_ID: ${{ secrets.ARM_CLIENT_ID }}
ARM_TENANT_ID: ${{ secrets.ARM_TENANT_ID }}
DATABRICKS_CLUSTER_ID: ${{ secrets.TEST_USER_ISOLATION_CLUSTER_ID }}
7 changes: 6 additions & 1 deletion tests/integration/fixtures/test_connect.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,6 +12,11 @@ def serverless_env():
os.environ.pop('DATABRICKS_SERVERLESS_COMPUTE_ID')


@fixture
def set_shared_cluster(monkeypatch, debug_env, env_or_skip):
monkeypatch.setitem(debug_env, "DATABRICKS_CLUSTER_ID", env_or_skip("TEST_USER_ISOLATION_CLUSTER_ID"))


@fixture
def spark_serverless_cluster_id(ws):
# get new spark session with serverless cluster outside the actual spark fixture under test
Expand All @@ -23,7 +28,7 @@ def spark_serverless_cluster_id(ws):
spark_serverless.stop()


def test_databricks_connect(ws, spark):
def test_databricks_connect(set_shared_cluster, ws, spark):
rows = spark.sql("SELECT 1").collect()
assert rows[0][0] == 1
assert not is_serverless_cluster(spark, ws)
Expand Down

0 comments on commit 8a37122

Please sign in to comment.