Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

[Backport 1.5.latest] persist view column comments #931

Merged
merged 2 commits into from
Nov 30, 2023
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
6 changes: 6 additions & 0 deletions .changes/unreleased/Features-20230817-130731.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,6 @@
kind: Features
body: Persist Column level comments when creating views
time: 2023-08-17T13:07:31.6812862Z
custom:
Author: jurasan
Issue: 372
21 changes: 21 additions & 0 deletions dbt/include/spark/macros/adapters.sql
Original file line number Diff line number Diff line change
Expand Up @@ -223,9 +223,30 @@
{% endfor %}
{% endmacro %}

{% macro get_column_comment_sql(column_name, column_dict) -%}
{% if column_name in column_dict and column_dict[column_name]["description"] -%}
{% set escaped_description = column_dict[column_name]["description"] | replace("'", "\\'") %}
{% set column_comment_clause = "comment '" ~ escaped_description ~ "'" %}
{%- endif -%}
{{ adapter.quote(column_name) }} {{ column_comment_clause }}
{% endmacro %}

{% macro get_persist_docs_column_list(model_columns, query_columns) %}
{% for column_name in query_columns %}
{{ get_column_comment_sql(column_name, model_columns) }}
{{- ", " if not loop.last else "" }}
{% endfor %}
{% endmacro %}

{% macro spark__create_view_as(relation, sql) -%}
create or replace view {{ relation }}
{% if config.persist_column_docs() -%}
{% set model_columns = model.columns %}
{% set query_columns = get_columns_in_query(sql) %}
(
{{ get_persist_docs_column_list(model_columns, query_columns) }}
)
{% endif %}
{{ comment_clause() }}
{%- set contract_config = config.get('contract') -%}
{%- if contract_config.enforced -%}
Expand Down
28 changes: 28 additions & 0 deletions tests/functional/adapter/persist_docs/fixtures.py
Original file line number Diff line number Diff line change
Expand Up @@ -21,11 +21,39 @@
select 1 as id, 'Joe' as name
"""

_MODELS__VIEW_DELTA_MODEL = """
{{ config(materialized='view') }}
select id, count(*) as count from {{ ref('table_delta_model') }} group by id
"""

_MODELS__TABLE_DELTA_MODEL_MISSING_COLUMN = """
{{ config(materialized='table', file_format='delta') }}
select 1 as id, 'Joe' as different_name
"""
_VIEW_PROPERTIES_MODELS = """
version: 2

models:
- name: view_delta_model
description: |
View model description "with double quotes"
and with 'single quotes' as welll as other;
'''abc123'''
reserved -- characters
--
/* comment */
Some $lbl$ labeled $lbl$ and $$ unlabeled $$ dollar-quoting
columns:
- name: id
description: |
id Column description "with double quotes"
and with 'single quotes' as welll as other;
'''abc123'''
reserved -- characters
--
/* comment */
Some $lbl$ labeled $lbl$ and $$ unlabeled $$ dollar-quoting
"""
_PROPERTIES__MODELS = """
version: 2

Expand Down
44 changes: 44 additions & 0 deletions tests/functional/adapter/persist_docs/test_persist_docs.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,6 +10,8 @@
_PROPERTIES__MODELS,
_PROPERTIES__SEEDS,
_SEEDS__BASIC,
_MODELS__VIEW_DELTA_MODEL,
_VIEW_PROPERTIES_MODELS,
)


Expand Down Expand Up @@ -76,6 +78,48 @@ def test_delta_comments(self, project):
assert result[2].startswith("Some stuff here and then a call to")


@pytest.mark.skip_profile("apache_spark", "spark_session")
class TestPersistDocsDeltaView:
@pytest.fixture(scope="class")
def models(self):
return {
"table_delta_model.sql": _MODELS__TABLE_DELTA_MODEL,
"view_delta_model.sql": _MODELS__VIEW_DELTA_MODEL,
"schema.yml": _VIEW_PROPERTIES_MODELS,
}

@pytest.fixture(scope="class")
def project_config_update(self):
return {
"models": {
"test": {
"+persist_docs": {
"relation": True,
"columns": True,
},
}
},
}

def test_delta_comments(self, project):
run_dbt(["run"])

results = project.run_sql(
"describe extended {schema}.{table}".format(
schema=project.test_schema, table="view_delta_model"
),
fetch="all",
)

for result in results:
if result[0] == "Comment":
assert result[1].startswith("View model description")
if result[0] == "id":
assert result[2].startswith("id Column description")
if result[0] == "count":
assert result[2] is None


@pytest.mark.skip_profile("apache_spark", "spark_session")
class TestPersistDocsMissingColumn:
@pytest.fixture(scope="class")
Expand Down