Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

bump black in dev-requirements and pre-commit-config #10407

Merged
merged 5 commits into from
Jul 17, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
6 changes: 6 additions & 0 deletions .changes/unreleased/Under the Hood-20240716-184859.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,6 @@
kind: Under the Hood
body: bump black to 24.3.0
time: 2024-07-16T18:48:59.651834-04:00
custom:
Author: michelleark
Issue: "10454"
1 change: 1 addition & 0 deletions .flake8
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,7 @@ ignore =
W503 # makes Flake8 work like black
W504
E203 # makes Flake8 work like black
E704 # makes Flake8 work like black
E741
E501 # long line checking is done in black
exclude = test/
Expand Down
2 changes: 1 addition & 1 deletion .pre-commit-config.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -26,7 +26,7 @@ repos:
- id: isort
- repo: https://github.com/psf/black
# rev must match what's in dev-requirements.txt
rev: 22.3.0
rev: 24.3.0
hooks:
- id: black
- id: black
Expand Down
3 changes: 2 additions & 1 deletion core/dbt/artifacts/schemas/run/v5/run.py
Original file line number Diff line number Diff line change
Expand Up @@ -158,7 +158,8 @@ def compatible_previous_versions(cls) -> Iterable[Tuple[str, int]]:
@classmethod
def upgrade_schema_version(cls, data):
"""This overrides the "upgrade_schema_version" call in VersionedSchema (via
ArtifactMixin) to modify the dictionary passed in from earlier versions of the run_results."""
ArtifactMixin) to modify the dictionary passed in from earlier versions of the run_results.
"""
run_results_schema_version = get_artifact_schema_version(data)
# If less than the current version (v5), preprocess contents to match latest schema version
if run_results_schema_version <= 5:
Expand Down
8 changes: 5 additions & 3 deletions core/dbt/cli/requires.py
Original file line number Diff line number Diff line change
Expand Up @@ -179,9 +179,11 @@ def wrapper(*args, **kwargs):
process_in_blocks=rusage.ru_inblock,
process_out_blocks=rusage.ru_oublock,
),
EventLevel.INFO
if "flags" in ctx.obj and ctx.obj["flags"].SHOW_RESOURCE_REPORT
else None,
(
EventLevel.INFO
if "flags" in ctx.obj and ctx.obj["flags"].SHOW_RESOURCE_REPORT
else None
),
)

fire_event(
Expand Down
6 changes: 3 additions & 3 deletions core/dbt/config/runtime.py
Original file line number Diff line number Diff line change
Expand Up @@ -290,9 +290,9 @@ def get_metadata(self) -> ManifestMetadata:
project_name=self.project_name,
project_id=self.hashed_name(),
user_id=tracking.active_user.id if tracking.active_user else None,
send_anonymous_usage_stats=get_flags().SEND_ANONYMOUS_USAGE_STATS
if tracking.active_user
else None,
send_anonymous_usage_stats=(
get_flags().SEND_ANONYMOUS_USAGE_STATS if tracking.active_user else None
),
adapter_type=self.credentials.type,
)

Expand Down
14 changes: 6 additions & 8 deletions core/dbt/context/context_config.py
Original file line number Diff line number Diff line change
Expand Up @@ -27,8 +27,7 @@ class ConfigSource:
def __init__(self, project):
self.project = project

def get_config_dict(self, resource_type: NodeType):
...
def get_config_dict(self, resource_type: NodeType): ...


class UnrenderedConfig(ConfigSource):
Expand Down Expand Up @@ -130,12 +129,12 @@ def _active_project_configs(
return self._project_configs(self._active_project, fqn, resource_type)

@abstractmethod
def _update_from_config(self, result: T, partial: Dict[str, Any], validate: bool = False) -> T:
...
def _update_from_config(
self, result: T, partial: Dict[str, Any], validate: bool = False
) -> T: ...

@abstractmethod
def initial_result(self, resource_type: NodeType, base: bool) -> T:
...
def initial_result(self, resource_type: NodeType, base: bool) -> T: ...

def calculate_node_config(
self,
Expand Down Expand Up @@ -181,8 +180,7 @@ def calculate_node_config_dict(
project_name: str,
base: bool,
patch_config_dict: Optional[Dict[str, Any]] = None,
) -> Dict[str, Any]:
...
) -> Dict[str, Any]: ...


class ContextConfigGenerator(BaseContextConfigGenerator[C]):
Expand Down
9 changes: 3 additions & 6 deletions core/dbt/context/providers.py
Original file line number Diff line number Diff line change
Expand Up @@ -239,8 +239,7 @@ class BaseRefResolver(BaseResolver):
@abc.abstractmethod
def resolve(
self, name: str, package: Optional[str] = None, version: Optional[NodeVersion] = None
) -> RelationProxy:
...
) -> RelationProxy: ...

def _repack_args(
self, name: str, package: Optional[str], version: Optional[NodeVersion]
Expand Down Expand Up @@ -306,8 +305,7 @@ def __call__(self, *args: str) -> RelationProxy:

class BaseMetricResolver(BaseResolver):
@abc.abstractmethod
def resolve(self, name: str, package: Optional[str] = None) -> MetricReference:
...
def resolve(self, name: str, package: Optional[str] = None) -> MetricReference: ...

def _repack_args(self, name: str, package: Optional[str]) -> List[str]:
if package is None:
Expand Down Expand Up @@ -341,8 +339,7 @@ def __call__(self, *args: str) -> MetricReference:


class Config(Protocol):
def __init__(self, model, context_config: Optional[ContextConfig]):
...
def __init__(self, model, context_config: Optional[ContextConfig]): ...


# Implementation of "config(..)" calls in models
Expand Down
6 changes: 3 additions & 3 deletions core/dbt/contracts/graph/manifest.py
Original file line number Diff line number Diff line change
Expand Up @@ -1676,9 +1676,9 @@ def __init__(self, macros) -> None:
self.macros = macros
self.metadata = ManifestMetadata(
user_id=tracking.active_user.id if tracking.active_user else None,
send_anonymous_usage_stats=get_flags().SEND_ANONYMOUS_USAGE_STATS
if tracking.active_user
else None,
send_anonymous_usage_stats=(
get_flags().SEND_ANONYMOUS_USAGE_STATS if tracking.active_user else None
),
)
# This is returned by the 'graph' context property
# in the ProviderContext class.
Expand Down
8 changes: 4 additions & 4 deletions core/dbt/contracts/graph/nodes.py
Original file line number Diff line number Diff line change
Expand Up @@ -636,9 +636,9 @@
contract_enforced_disabled: bool = False
columns_removed: List[str] = []
column_type_changes: List[Dict[str, str]] = []
enforced_column_constraint_removed: List[
Dict[str, str]
] = [] # column_name, constraint_type
enforced_column_constraint_removed: List[Dict[str, str]] = (

Check warning on line 639 in core/dbt/contracts/graph/nodes.py

View check run for this annotation

Codecov / codecov/patch

core/dbt/contracts/graph/nodes.py#L639

Added line #L639 was not covered by tests
[]
) # column_name, constraint_type
enforced_model_constraint_removed: List[Dict[str, Any]] = [] # constraint_type, columns
materialization_changed: List[str] = []

Expand Down Expand Up @@ -1554,7 +1554,7 @@
return False

# exports should be in the same order, so we zip them for easy iteration
for (old_export, new_export) in zip(old.exports, self.exports):
for old_export, new_export in zip(old.exports, self.exports):
if not (
old_export.name == new_export.name
and old_export.config.export_as == new_export.config.export_as
Expand Down
4 changes: 1 addition & 3 deletions core/dbt/events/logging.py
Original file line number Diff line number Diff line change
Expand Up @@ -74,9 +74,7 @@ def setup_event_logger(flags, callbacks: List[Callable[[EventMsg], None]] = [])
log_level = (
EventLevel.ERROR
if flags.QUIET
else EventLevel.DEBUG
if flags.DEBUG
else EventLevel(flags.LOG_LEVEL)
else EventLevel.DEBUG if flags.DEBUG else EventLevel(flags.LOG_LEVEL)
)
console_config = get_stdout_config(
line_format,
Expand Down
2 changes: 1 addition & 1 deletion core/dbt/parser/models.py
Original file line number Diff line number Diff line change
Expand Up @@ -204,7 +204,7 @@ def parse_python_model(self, node, config, context):
dbt_parser = PythonParseVisitor(node)
dbt_parser.visit(tree)

for (func, args, kwargs) in dbt_parser.dbt_function_calls:
for func, args, kwargs in dbt_parser.dbt_function_calls:
if func == "get":
num_args = len(args)
if num_args == 0:
Expand Down
8 changes: 5 additions & 3 deletions core/dbt/task/base.py
Original file line number Diff line number Diff line change
Expand Up @@ -274,9 +274,11 @@ def from_run_result(self, result, start_time, timing_info):

def compile_and_execute(self, manifest, ctx):
result = None
with self.adapter.connection_named(
self.node.unique_id, self.node
) if get_flags().INTROSPECT else nullcontext():
with (
self.adapter.connection_named(self.node.unique_id, self.node)
if get_flags().INTROSPECT
else nullcontext()
):
ctx.node.update_event_status(node_status=RunningStatus.Compiling)
fire_event(
NodeCompiling(
Expand Down
1 change: 1 addition & 0 deletions core/dbt/tests/fixtures/project.py
Original file line number Diff line number Diff line change
Expand Up @@ -341,6 +341,7 @@ def write_project_files_recursively(path, file_dict):
# Provide a dictionary of file names to contents. Nested directories
# are handle by nested dictionaries.


# models directory
@pytest.fixture(scope="class")
def models():
Expand Down
1 change: 1 addition & 0 deletions core/dbt/tests/util.py
Original file line number Diff line number Diff line change
Expand Up @@ -291,6 +291,7 @@ class TestProcessingException(Exception):

# Testing utilities that use adapter code


# Uses:
# adapter.config.credentials
# adapter.quote
Expand Down
2 changes: 1 addition & 1 deletion dev-requirements.txt
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@ git+https://github.com/dbt-labs/dbt-adapters.git@main#subdirectory=dbt-tests-ada
git+https://github.com/dbt-labs/dbt-common.git@main
git+https://github.com/dbt-labs/dbt-postgres.git@main
# black must match what's in .pre-commit-config.yaml to be sure local env matches CI
black==22.3.0
black==24.3.0
bumpversion
ddtrace==2.3.0
docutils
Expand Down
4 changes: 2 additions & 2 deletions tests/functional/adapter/constraints/test_constraints.py
Original file line number Diff line number Diff line change
Expand Up @@ -104,7 +104,7 @@ def test__constraints_wrong_column_names(self, project, string_type, int_type):
def test__constraints_wrong_column_data_types(
self, project, string_type, int_type, schema_string_type, schema_int_type, data_types
):
for (sql_column_value, schema_data_type, error_data_type) in data_types:
for sql_column_value, schema_data_type, error_data_type in data_types:
# Write parametrized data_type to sql file
write_file(
my_model_data_type_sql.format(sql_value=sql_column_value),
Expand Down Expand Up @@ -146,7 +146,7 @@ def test__constraints_wrong_column_data_types(
assert all([(exp in log_output or exp.upper() in log_output) for exp in expected])

def test__constraints_correct_column_data_types(self, project, data_types):
for (sql_column_value, schema_data_type, _) in data_types:
for sql_column_value, schema_data_type, _ in data_types:
# Write parametrized data_type to sql file
write_file(
my_model_data_type_sql.format(sql_value=sql_column_value),
Expand Down
6 changes: 3 additions & 3 deletions tests/functional/list/test_list.py
Original file line number Diff line number Diff line change
Expand Up @@ -697,9 +697,9 @@ def expect_resource_type_env_var(self):
"test.unique_outer_id",
}
del os.environ["DBT_RESOURCE_TYPES"]
os.environ[
"DBT_EXCLUDE_RESOURCE_TYPES"
] = "test saved_query metric source semantic_model snapshot seed"
os.environ["DBT_EXCLUDE_RESOURCE_TYPES"] = (
"test saved_query metric source semantic_model snapshot seed"
)
results = self.run_dbt_ls()
assert set(results) == {
"test.ephemeral",
Expand Down
6 changes: 3 additions & 3 deletions tests/unit/parser/test_manifest.py
Original file line number Diff line number Diff line change
Expand Up @@ -124,9 +124,9 @@ def set_required_mocks(
mocker.patch("dbt.parser.manifest.get_adapter").return_value = mock_adapter
mocker.patch("dbt.parser.manifest.ManifestLoader.load").return_value = manifest
mocker.patch("dbt.parser.manifest._check_manifest").return_value = None
mocker.patch(
"dbt.parser.manifest.ManifestLoader.save_macros_to_adapter"
).return_value = None
mocker.patch("dbt.parser.manifest.ManifestLoader.save_macros_to_adapter").return_value = (
None
)
mocker.patch("dbt.tracking.active_user").return_value = User(None)

def test_write_perf_info(
Expand Down
6 changes: 3 additions & 3 deletions tests/unit/parser/test_partial.py
Original file line number Diff line number Diff line change
Expand Up @@ -193,9 +193,9 @@ class TestFileDiff:
def partial_parsing(self, manifest, files):
safe_set_invocation_context()
saved_files = deepcopy(files)
saved_files[
"my_test://models/python_model_untouched.py"
].checksum = FileHash.from_contents("something new")
saved_files["my_test://models/python_model_untouched.py"].checksum = (
FileHash.from_contents("something new")
)
return PartialParsing(manifest, saved_files)

def test_build_file_diff_basic(self, partial_parsing):
Expand Down
10 changes: 5 additions & 5 deletions tests/unit/test_compilation.py
Original file line number Diff line number Diff line change
Expand Up @@ -85,7 +85,7 @@ def _get_graph_queue(
def test_linker_add_dependency(self, linker: Linker) -> None:
actual_deps = [("A", "B"), ("A", "C"), ("B", "C")]

for (l, r) in actual_deps:
for l, r in actual_deps:
linker.dependency(l, r)

queue = self._get_graph_queue(_mock_manifest("ABC"), linker)
Expand Down Expand Up @@ -119,7 +119,7 @@ def test_linker_add_disjoint_dependencies(self, linker: Linker) -> None:
actual_deps = [("A", "B")]
additional_node = "Z"

for (l, r) in actual_deps:
for l, r in actual_deps:
linker.dependency(l, r)
linker.add_node(additional_node)

Expand Down Expand Up @@ -150,7 +150,7 @@ def test_linker_add_disjoint_dependencies(self, linker: Linker) -> None:
def test_linker_dependencies_limited_to_some_nodes(self, linker: Linker) -> None:
actual_deps = [("A", "B"), ("B", "C"), ("C", "D")]

for (l, r) in actual_deps:
for l, r in actual_deps:
linker.dependency(l, r)

queue = self._get_graph_queue(_mock_manifest("ABCD"), linker, ["B"])
Expand Down Expand Up @@ -181,15 +181,15 @@ def test_linker_dependencies_limited_to_some_nodes(self, linker: Linker) -> None
def test__find_cycles__cycles(self, linker: Linker) -> None:
actual_deps = [("A", "B"), ("B", "C"), ("C", "A")]

for (l, r) in actual_deps:
for l, r in actual_deps:
linker.dependency(l, r)

assert linker.find_cycles() is not None

def test__find_cycles__no_cycles(self, linker: Linker) -> None:
actual_deps = [("A", "B"), ("B", "C"), ("C", "D")]

for (l, r) in actual_deps:
for l, r in actual_deps:
linker.dependency(l, r)

assert linker.find_cycles() is None
1 change: 1 addition & 0 deletions tests/unit/utils/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,7 @@
Note that all imports should be inside the functions to avoid import/mocking
issues.
"""

import os
import string
from unittest import TestCase, mock
Expand Down
6 changes: 3 additions & 3 deletions tests/unit/utils/adapter.py
Original file line number Diff line number Diff line change
Expand Up @@ -42,9 +42,9 @@ def postgres_adapter(
adapter = get_adapter(runtime_config)
assert isinstance(adapter, PostgresAdapter)

mocker.patch(
"dbt.parser.manifest.ManifestLoader.build_manifest_state_check"
).return_value = ManifestStateCheck()
mocker.patch("dbt.parser.manifest.ManifestLoader.build_manifest_state_check").return_value = (
ManifestStateCheck()
)
manifest = ManifestLoader.load_macros(
runtime_config,
adapter.connections.set_query_header,
Expand Down
1 change: 1 addition & 0 deletions third-party-stubs/mashumaro/jsonschema/models.pyi
Original file line number Diff line number Diff line change
Expand Up @@ -106,6 +106,7 @@ class JSONSchema(DataClassJSONMixin):
serialize_by_alias: bool
aliases: Incomplete
serialization_strategy: Incomplete

def __pre_serialize__(self, context: Optional[Dict]) -> JSONSchema: ...
def __post_serialize__(self, d: Dict[Any, Any], context: Optional[Dict]) -> Dict[Any, Any]: ...
def __init__(
Expand Down
Loading