Skip to content

Commit

Permalink
upgrade black (#1949)
Browse files Browse the repository at this point in the history
* upgrade black

* adding additional env vars to capture error (#1946)

* changes

* changes

---------

Co-authored-by: Saurabh Garg <[email protected]>
  • Loading branch information
savingoyal and iamsgarg-ob authored Aug 8, 2024
1 parent 6a40c20 commit b6ef234
Show file tree
Hide file tree
Showing 30 changed files with 219 additions and 171 deletions.
5 changes: 2 additions & 3 deletions .pre-commit-config.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -6,13 +6,12 @@ repos:
- id: check-yaml
- id: check-json
- repo: https://github.com/ambv/black
rev: 22.10.0
rev: 24.4.2
hooks:
- id: black
language_version: python3
exclude: "^metaflow/_vendor/"
additional_dependencies: ["click<8.1.0"]
# python3.12 is not supported in black 22.10.0
args: [-t, py34, -t, py35, -t, py36, -t, py37, -t, py38, -t, py39, -t, py310, -t, py311]
args: [-t, py34, -t, py35, -t, py36, -t, py37, -t, py38, -t, py39, -t, py310, -t, py311, -t, py312]


12 changes: 6 additions & 6 deletions metaflow/client/core.py
Original file line number Diff line number Diff line change
Expand Up @@ -391,9 +391,9 @@ def __iter__(self) -> Iterator["MetaflowObject"]:
_object=obj,
_parent=self,
_namespace_check=self._namespace_check,
_current_namespace=self._current_namespace
if self._namespace_check
else None,
_current_namespace=(
self._current_namespace if self._namespace_check else None
),
)
for obj in unfiltered_children
),
Expand Down Expand Up @@ -506,9 +506,9 @@ def __getitem__(self, id: str) -> "MetaflowObject":
_object=obj,
_parent=self,
_namespace_check=self._namespace_check,
_current_namespace=self._current_namespace
if self._namespace_check
else None,
_current_namespace=(
self._current_namespace if self._namespace_check else None
),
)
else:
raise KeyError(id)
Expand Down
109 changes: 62 additions & 47 deletions metaflow/cmd/develop/stub_generator.py
Original file line number Diff line number Diff line change
Expand Up @@ -292,9 +292,11 @@ def _add_to_typing_check(name, is_module=False):
try:
potential_element = eval(
element,
self._current_parent_module.__dict__
if self._current_parent_module
else None,
(
self._current_parent_module.__dict__
if self._current_parent_module
else None
),
)
if potential_element:
element = potential_element
Expand Down Expand Up @@ -555,14 +557,14 @@ def _extract_signature_from_decorator(
inspect.Parameter(
name=arg_name,
kind=inspect.Parameter.KEYWORD_ONLY,
default=default
if default_set
else None
if is_optional
else inspect.Parameter.empty,
annotation=Optional[type_name]
if is_optional
else type_name,
default=(
default
if default_set
else None if is_optional else inspect.Parameter.empty
),
annotation=(
Optional[type_name] if is_optional else type_name
),
)
)
if not default_set:
Expand Down Expand Up @@ -706,24 +708,31 @@ def _add():
result = result + [
(
inspect.Signature(
parameters=[
inspect.Parameter(
name="f",
kind=inspect.Parameter.POSITIONAL_OR_KEYWORD,
annotation=Optional[typing.Type[FlowSpecDerived]],
default=None
if no_arg_version
else inspect.Parameter.empty,
)
]
+ parameters
if no_arg_version
else [] + parameters,
return_annotation=inspect.Signature.empty
if no_arg_version
else Callable[
[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]
],
parameters=(
[
inspect.Parameter(
name="f",
kind=inspect.Parameter.POSITIONAL_OR_KEYWORD,
annotation=Optional[typing.Type[FlowSpecDerived]],
default=(
None
if no_arg_version
else inspect.Parameter.empty
),
)
]
+ parameters
if no_arg_version
else [] + parameters
),
return_annotation=(
inspect.Signature.empty
if no_arg_version
else Callable[
[typing.Type[FlowSpecDerived]],
typing.Type[FlowSpecDerived],
]
),
),
"",
),
Expand All @@ -732,24 +741,30 @@ def _add():
result = result + [
(
inspect.Signature(
parameters=[
inspect.Parameter(
name="f",
kind=inspect.Parameter.POSITIONAL_OR_KEYWORD,
annotation=Optional[MetaflowStepFunction],
default=None
if no_arg_version
else inspect.Parameter.empty,
)
]
+ parameters
if no_arg_version
else [] + parameters,
return_annotation=inspect.Signature.empty
if no_arg_version
else typing.Callable[
[MetaflowStepFunction], MetaflowStepFunction
],
parameters=(
[
inspect.Parameter(
name="f",
kind=inspect.Parameter.POSITIONAL_OR_KEYWORD,
annotation=Optional[MetaflowStepFunction],
default=(
None
if no_arg_version
else inspect.Parameter.empty
),
)
]
+ parameters
if no_arg_version
else [] + parameters
),
return_annotation=(
inspect.Signature.empty
if no_arg_version
else typing.Callable[
[MetaflowStepFunction], MetaflowStepFunction
]
),
),
"",
),
Expand Down
2 changes: 1 addition & 1 deletion metaflow/datastore/content_addressed_store.py
Original file line number Diff line number Diff line change
Expand Up @@ -133,7 +133,7 @@ def load_blobs(self, keys, force_raw=False):
load_paths.append((key, path))

with self._storage_impl.load_bytes([p for _, p in load_paths]) as loaded:
for (path_key, file_path, meta) in loaded:
for path_key, file_path, meta in loaded:
key = self._storage_impl.path_split(path_key)[-1]
# At this point, we either return the object as is (if raw) or
# decode it according to the encoding version
Expand Down
2 changes: 1 addition & 1 deletion metaflow/datastore/task_datastore.py
Original file line number Diff line number Diff line change
Expand Up @@ -381,7 +381,7 @@ def load_artifacts(self, names):
# We assume that if we have one "old" style artifact, all of them are
# like that which is an easy assumption to make since artifacts are all
# stored by the same implementation of the datastore for a given task.
for (key, blob) in self._ca_store.load_blobs(to_load.keys()):
for key, blob in self._ca_store.load_blobs(to_load.keys()):
names = to_load[key]
for name in names:
# We unpickle everytime to have fully distinct objects (the user
Expand Down
6 changes: 2 additions & 4 deletions metaflow/decorators.py
Original file line number Diff line number Diff line change
Expand Up @@ -575,15 +575,13 @@ def _init_step_decorators(flow, graph, environment, flow_datastore, logger):
@overload
def step(
f: Callable[[FlowSpecDerived], None]
) -> Callable[[FlowSpecDerived, StepFlag], None]:
...
) -> Callable[[FlowSpecDerived, StepFlag], None]: ...


@overload
def step(
f: Callable[[FlowSpecDerived, Any], None],
) -> Callable[[FlowSpecDerived, Any, StepFlag], None]:
...
) -> Callable[[FlowSpecDerived, Any, StepFlag], None]: ...


def step(
Expand Down
6 changes: 3 additions & 3 deletions metaflow/extension_support/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -980,9 +980,9 @@ def exec_module(self, module):
if self._previously_loaded_module:
sys.modules[self._orig_name] = self._previously_loaded_module
if self._previously_loaded_parent_module:
sys.modules[
".".join(self._orig_name.split(".")[:-1])
] = self._previously_loaded_parent_module
sys.modules[".".join(self._orig_name.split(".")[:-1])] = (
self._previously_loaded_parent_module
)


class _LazyFinder(MetaPathFinder):
Expand Down
6 changes: 3 additions & 3 deletions metaflow/extension_support/plugins.py
Original file line number Diff line number Diff line change
Expand Up @@ -186,9 +186,9 @@ def resolve_plugins(category):
"logging_sidecar": None,
"monitor_sidecar": None,
"aws_client_provider": lambda x: x.name,
"cli": lambda x: list(x.commands)[0]
if len(x.commands) == 1
else "too many commands",
"cli": lambda x: (
list(x.commands)[0] if len(x.commands) == 1 else "too many commands"
),
}


Expand Down
48 changes: 30 additions & 18 deletions metaflow/metaflow_config.py
Original file line number Diff line number Diff line change
Expand Up @@ -104,9 +104,11 @@
DATATOOLS_SUFFIX = from_conf("DATATOOLS_SUFFIX", "data")
DATATOOLS_S3ROOT = from_conf(
"DATATOOLS_S3ROOT",
os.path.join(DATASTORE_SYSROOT_S3, DATATOOLS_SUFFIX)
if DATASTORE_SYSROOT_S3
else None,
(
os.path.join(DATASTORE_SYSROOT_S3, DATATOOLS_SUFFIX)
if DATASTORE_SYSROOT_S3
else None
),
)

TEMPDIR = from_conf("TEMPDIR", ".")
Expand All @@ -124,25 +126,31 @@
# Similar to DATATOOLS_LOCALROOT, this is used ONLY by the IncludeFile's internal implementation.
DATATOOLS_AZUREROOT = from_conf(
"DATATOOLS_AZUREROOT",
os.path.join(DATASTORE_SYSROOT_AZURE, DATATOOLS_SUFFIX)
if DATASTORE_SYSROOT_AZURE
else None,
(
os.path.join(DATASTORE_SYSROOT_AZURE, DATATOOLS_SUFFIX)
if DATASTORE_SYSROOT_AZURE
else None
),
)
# GS datatools root location
# Note: we do not expose an actual datatools library for GS (like we do for S3)
# Similar to DATATOOLS_LOCALROOT, this is used ONLY by the IncludeFile's internal implementation.
DATATOOLS_GSROOT = from_conf(
"DATATOOLS_GSROOT",
os.path.join(DATASTORE_SYSROOT_GS, DATATOOLS_SUFFIX)
if DATASTORE_SYSROOT_GS
else None,
(
os.path.join(DATASTORE_SYSROOT_GS, DATATOOLS_SUFFIX)
if DATASTORE_SYSROOT_GS
else None
),
)
# Local datatools root location
DATATOOLS_LOCALROOT = from_conf(
"DATATOOLS_LOCALROOT",
os.path.join(DATASTORE_SYSROOT_LOCAL, DATATOOLS_SUFFIX)
if DATASTORE_SYSROOT_LOCAL
else None,
(
os.path.join(DATASTORE_SYSROOT_LOCAL, DATATOOLS_SUFFIX)
if DATASTORE_SYSROOT_LOCAL
else None
),
)

# Secrets Backend - AWS Secrets Manager configuration
Expand Down Expand Up @@ -176,9 +184,11 @@
)
CARD_AZUREROOT = from_conf(
"CARD_AZUREROOT",
os.path.join(DATASTORE_SYSROOT_AZURE, CARD_SUFFIX)
if DATASTORE_SYSROOT_AZURE
else None,
(
os.path.join(DATASTORE_SYSROOT_AZURE, CARD_SUFFIX)
if DATASTORE_SYSROOT_AZURE
else None
),
)
CARD_GSROOT = from_conf(
"CARD_GSROOT",
Expand Down Expand Up @@ -311,9 +321,11 @@
# Amazon S3 path for storing the results of AWS Step Functions Distributed Map
SFN_S3_DISTRIBUTED_MAP_OUTPUT_PATH = from_conf(
"SFN_S3_DISTRIBUTED_MAP_OUTPUT_PATH",
os.path.join(DATASTORE_SYSROOT_S3, "sfn_distributed_map_output")
if DATASTORE_SYSROOT_S3
else None,
(
os.path.join(DATASTORE_SYSROOT_S3, "sfn_distributed_map_output")
if DATASTORE_SYSROOT_S3
else None
),
)
###
# Kubernetes configuration
Expand Down
6 changes: 3 additions & 3 deletions metaflow/parameters.py
Original file line number Diff line number Diff line change
Expand Up @@ -115,9 +115,9 @@ def __init__(
self.parameter_name = parameter_name
self.parameter_type = parameter_type
self.return_str = return_str
self.print_representation = (
self.user_print_representation
) = print_representation
self.print_representation = self.user_print_representation = (
print_representation
)
if self.print_representation is None:
self.print_representation = str(self.fun)

Expand Down
12 changes: 6 additions & 6 deletions metaflow/plugins/airflow/airflow.py
Original file line number Diff line number Diff line change
Expand Up @@ -399,17 +399,17 @@ def _to_job(self, node):
"METAFLOW_CARD_GSROOT": CARD_GSROOT,
"METAFLOW_S3_ENDPOINT_URL": S3_ENDPOINT_URL,
}
env[
"METAFLOW_AZURE_STORAGE_BLOB_SERVICE_ENDPOINT"
] = AZURE_STORAGE_BLOB_SERVICE_ENDPOINT
env["METAFLOW_AZURE_STORAGE_BLOB_SERVICE_ENDPOINT"] = (
AZURE_STORAGE_BLOB_SERVICE_ENDPOINT
)
env["METAFLOW_DATASTORE_SYSROOT_AZURE"] = DATASTORE_SYSROOT_AZURE
env["METAFLOW_CARD_AZUREROOT"] = CARD_AZUREROOT
if DEFAULT_SECRETS_BACKEND_TYPE:
env["METAFLOW_DEFAULT_SECRETS_BACKEND_TYPE"] = DEFAULT_SECRETS_BACKEND_TYPE
if AWS_SECRETS_MANAGER_DEFAULT_REGION:
env[
"METAFLOW_AWS_SECRETS_MANAGER_DEFAULT_REGION"
] = AWS_SECRETS_MANAGER_DEFAULT_REGION
env["METAFLOW_AWS_SECRETS_MANAGER_DEFAULT_REGION"] = (
AWS_SECRETS_MANAGER_DEFAULT_REGION
)
if GCP_SECRET_MANAGER_PREFIX:
env["METAFLOW_GCP_SECRET_MANAGER_PREFIX"] = GCP_SECRET_MANAGER_PREFIX

Expand Down
8 changes: 5 additions & 3 deletions metaflow/plugins/airflow/airflow_utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -567,9 +567,11 @@ def from_dict(cls, task_dict, flow_name=None, flow_contains_foreach=False):
return cls(
task_dict["name"],
is_mapper_node=is_mapper_node,
operator_type=task_dict["operator_type"]
if "operator_type" in task_dict
else "kubernetes",
operator_type=(
task_dict["operator_type"]
if "operator_type" in task_dict
else "kubernetes"
),
flow_name=flow_name,
flow_contains_foreach=flow_contains_foreach,
).set_operator_args(**op_args)
Expand Down
Loading

0 comments on commit b6ef234

Please sign in to comment.