Skip to content

Commit

Permalink
add condition based on create_if_not_exists flag to set log level and…
Browse files Browse the repository at this point in the history
… run black
  • Loading branch information
femilian-6582 committed Sep 27, 2024
1 parent 45b6fe6 commit 3a2ae30
Show file tree
Hide file tree
Showing 4 changed files with 16 additions and 2 deletions.
1 change: 1 addition & 0 deletions src/koheesio/integrations/spark/tableau/hyper.py
Original file line number Diff line number Diff line change
Expand Up @@ -301,6 +301,7 @@ class HyperFileDataFrameWriter(HyperFileWriter):
hw.hyper_path
```
"""

df: DataFrame = Field(default=..., description="Spark DataFrame to write to the Hyper file")
table_definition: Optional[TableDefinition] = None # table_definition is not required for this class

Expand Down
2 changes: 2 additions & 0 deletions src/koheesio/integrations/spark/tableau/server.py
Original file line number Diff line number Diff line change
Expand Up @@ -23,6 +23,7 @@ class TableauServer(Step):
"""
Base class for Tableau server interactions. Class provides authentication and project identification functionality.
"""

url: str = Field(
default=...,
alias="url",
Expand Down Expand Up @@ -190,6 +191,7 @@ class TableauHyperPublisher(TableauServer):
"""
Publish the given Hyper file to the Tableau server. Hyper file will be treated by Tableau server as a datasource.
"""

datasource_name: str = Field(default=..., description="Name of the datasource to publish")
hyper_path: PurePath = Field(default=..., description="Path to Hyper file")
publish_mode: TableauHyperPublishMode = Field(
Expand Down
14 changes: 12 additions & 2 deletions src/koheesio/spark/delta.py
Original file line number Diff line number Diff line change
Expand Up @@ -295,7 +295,8 @@ def has_change_type(self) -> bool:

@property
def exists(self) -> bool:
"""Check if table exists"""
"""Check if table exists.
Depending on the value of the boolean flag `create_if_not_exists` a different logging level is provided."""
result = False

try:
Expand All @@ -304,7 +305,16 @@ def exists(self) -> bool:
except AnalysisException as e:
err_msg = str(e).lower()
if err_msg.startswith("[table_or_view_not_found]") or err_msg.startswith("table or view not found"):
self.log.error(f"Table `{self.table}` doesn't exist.")
if self.create_if_not_exists:
self.log.info(
f"Table `{self.table}` doesn't exist. "
f"The `create_if_not_exists` flag is set to True, therefore the table will be created."
)
else:
self.log.error(
f"Table `{self.table}` doesn't exist. "
f"The `create_if_not_exists` flag is set to False, therefore the table will not be created."
)
else:
raise e

Expand Down
1 change: 1 addition & 0 deletions tests/spark/integrations/snowflake/test_snowflake.py
Original file line number Diff line number Diff line change
Expand Up @@ -39,6 +39,7 @@
"warehouse": "warehouse",
}


def test_snowflake_module_import():
# test that the pass-through imports in the koheesio.spark snowflake modules are working
from koheesio.spark.writers import snowflake as snowflake_readers
Expand Down

0 comments on commit 3a2ae30

Please sign in to comment.