From e6e1e1039fbbfa9096329dbce12bdb3be191e1dd Mon Sep 17 00:00:00 2001 From: Colin Rogers <111200756+colin-rogers-dbt@users.noreply.github.com> Date: Tue, 15 Oct 2024 11:42:52 -0700 Subject: [PATCH 1/2] Setup upper bound on numpy import in TestChangingSchemaSpark (#1120) --- tests/functional/adapter/test_python_model.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/functional/adapter/test_python_model.py b/tests/functional/adapter/test_python_model.py index 50132b883..2ca1c2211 100644 --- a/tests/functional/adapter/test_python_model.py +++ b/tests/functional/adapter/test_python_model.py @@ -67,7 +67,7 @@ def model(dbt, spark): "ResourceClass": "SingleNode" } }, - packages=['spacy', 'torch', 'pydantic>=1.10.8'] + packages=['spacy', 'torch', 'pydantic>=1.10.8', 'numpy<2'] ) data = [[1,2]] * 10 return spark.createDataFrame(data, schema=['test', 'test2']) From 805a08e6e0442da43851ae2511d2b1532c8637ea Mon Sep 17 00:00:00 2001 From: Gerda Shank Date: Tue, 15 Oct 2024 17:31:32 -0400 Subject: [PATCH 2/2] Enable setting dbt_valid_to snapshot column to new setting dbt_valid_to_current (#1113) --- .changes/unreleased/Features-20240927-133927.yaml | 6 ++++++ dbt/include/spark/macros/materializations/snapshot.sql | 7 ++++++- tests/functional/adapter/test_python_model.py | 6 +++++- 3 files changed, 17 insertions(+), 2 deletions(-) create mode 100644 .changes/unreleased/Features-20240927-133927.yaml diff --git a/.changes/unreleased/Features-20240927-133927.yaml b/.changes/unreleased/Features-20240927-133927.yaml new file mode 100644 index 000000000..ce04ac073 --- /dev/null +++ b/.changes/unreleased/Features-20240927-133927.yaml @@ -0,0 +1,6 @@ +kind: Features +body: Enable setting current value of dbt_valid_to +time: 2024-09-27T13:39:27.268886-04:00 +custom: + Author: gshank + Issue: "1112" diff --git a/dbt/include/spark/macros/materializations/snapshot.sql b/dbt/include/spark/macros/materializations/snapshot.sql index 43c4750f6..b4ef6e5d4 100644 --- a/dbt/include/spark/macros/materializations/snapshot.sql +++ b/dbt/include/spark/macros/materializations/snapshot.sql @@ -24,7 +24,12 @@ {% endif %} on DBT_INTERNAL_SOURCE.{{ columns.dbt_scd_id }} = DBT_INTERNAL_DEST.{{ columns.dbt_scd_id }} when matched - and DBT_INTERNAL_DEST.{{ columns.dbt_valid_to }} is null + {% if config.get("dbt_valid_to_current") %} + and ( DBT_INTERNAL_DEST.{{ columns.dbt_valid_to }} = {{ config.get('dbt_valid_to_current') }} or + DBT_INTERNAL_DEST.{{ columns.dbt_valid_to }} is null ) + {% else %} + and DBT_INTERNAL_DEST.{{ columns.dbt_valid_to }} is null + {% endif %} and DBT_INTERNAL_SOURCE.dbt_change_type in ('update', 'delete') then update set {{ columns.dbt_valid_to }} = DBT_INTERNAL_SOURCE.{{ columns.dbt_valid_to }} diff --git a/tests/functional/adapter/test_python_model.py b/tests/functional/adapter/test_python_model.py index 2ca1c2211..2ecce2662 100644 --- a/tests/functional/adapter/test_python_model.py +++ b/tests/functional/adapter/test_python_model.py @@ -85,7 +85,11 @@ def model(dbt, spark): @pytest.mark.skip_profile( - "apache_spark", "spark_session", "databricks_sql_endpoint", "spark_http_odbc" + "apache_spark", + "spark_session", + "databricks_sql_endpoint", + "spark_http_odbc", + "databricks_http_cluster", ) class TestChangingSchemaSpark: """