Skip to content

Commit f2271b0

Browse files
committed
Update Python formatting with ruff 0.9.3
1 parent 4979319 commit f2271b0

File tree

7 files changed

+75
-78
lines changed

7 files changed

+75
-78
lines changed

org.knime.python3.arrow.tests/src/test/python/tests_launcher.py

Lines changed: 15 additions & 15 deletions
Original file line numberDiff line numberDiff line change
@@ -281,15 +281,15 @@ def assert_value(r, b, v):
281281

282282
elif data_type == "float":
283283
# Compare with epsilon
284-
assert (
285-
abs(v.as_py() - expected) < FLOAT_COMPARISON_EPSILON
286-
), wrong_value_message(r, b, expected, v)
284+
assert abs(v.as_py() - expected) < FLOAT_COMPARISON_EPSILON, (
285+
wrong_value_message(r, b, expected, v)
286+
)
287287

288288
elif data_type == "double":
289289
# Compare with epsilon
290-
assert (
291-
abs(v.as_py() - expected) < DOUBLE_COMPARISON_EPSILON
292-
), wrong_value_message(r, b, expected, v)
290+
assert abs(v.as_py() - expected) < DOUBLE_COMPARISON_EPSILON, (
291+
wrong_value_message(r, b, expected, v)
292+
)
293293

294294
elif data_type in ["localdatetime", "localtime"]:
295295
# We do not compare python values but arrow values for them
@@ -315,22 +315,22 @@ def check_batch(batch, b):
315315
array = batch.column(0)
316316

317317
# Check length
318-
assert (
319-
len(array) == NUM_ROWS
320-
), f"Array has the wrong length. Expected {NUM_ROWS} got {len(array)}."
318+
assert len(array) == NUM_ROWS, (
319+
f"Array has the wrong length. Expected {NUM_ROWS} got {len(array)}."
320+
)
321321
# Check array type
322-
assert isinstance(
323-
array, expected_array_type
324-
), f"Array has wrong type. Expected '{expected_array_type}', got '{type(array)}'."
322+
assert isinstance(array, expected_array_type), (
323+
f"Array has wrong type. Expected '{expected_array_type}', got '{type(array)}'."
324+
)
325325
for r, v in enumerate(array):
326326
if r % 13 == 0:
327327
# Check that every 13th value is missing
328328
assert v.as_py() is None, wrong_value_message(r, b, None, v)
329329
else:
330330
# Check value type
331-
assert (
332-
v.type == expected_value_type
333-
), f"Value has wrong type. Expected '{expected_value_type}', got '{v.type}'"
331+
assert v.type == expected_value_type, (
332+
f"Value has wrong type. Expected '{expected_value_type}', got '{v.type}'"
333+
)
334334
# Check value
335335
assert_value(r, b, v)
336336

org.knime.python3.arrow.types.tests/src/test/python/extension_tests_launcher.py

Lines changed: 15 additions & 15 deletions
Original file line numberDiff line numberDiff line change
@@ -103,21 +103,21 @@ def assertIntListEquals(self, data_source, a, b, c, d, e):
103103
py_array = array.to_pylist()
104104
values = [[a, b, c, d, e]]
105105

106-
assert (
107-
py_array == values
108-
), f"Wrong list of ints, expected '{values}' got '{py_array}'"
109-
assert (
110-
pd_array.shape == (1,)
111-
), f"Wrong shape returned from pandas, expected '(1,)', got '{pd_array.shape}'"
112-
assert (
113-
type(pd_array[0]) == np.ndarray
114-
), f"Wrong type returned from pandas, expected 'numpy.ndarray' got '{type(pd_array[0])}'"
115-
assert (
116-
len(pd_array[0]) == 5
117-
), f"Wrong length of list returned from pandas, expected '5', got '{len(pd_array[0])}'"
118-
assert np.all(
119-
pd_array[0] == values[0]
120-
), f"Wrong list of ints returned from pandas, expected '{values}' got '{pd_array[0]}'"
106+
assert py_array == values, (
107+
f"Wrong list of ints, expected '{values}' got '{py_array}'"
108+
)
109+
assert pd_array.shape == (1,), (
110+
f"Wrong shape returned from pandas, expected '(1,)', got '{pd_array.shape}'"
111+
)
112+
assert type(pd_array[0]) == np.ndarray, (
113+
f"Wrong type returned from pandas, expected 'numpy.ndarray' got '{type(pd_array[0])}'"
114+
)
115+
assert len(pd_array[0]) == 5, (
116+
f"Wrong length of list returned from pandas, expected '5', got '{len(pd_array[0])}'"
117+
)
118+
assert np.all(pd_array[0] == values[0]), (
119+
f"Wrong list of ints returned from pandas, expected '{values}' got '{pd_array[0]}'"
120+
)
121121

122122
def writeFsLocationViaPandas(self, data_sink, category, specifier, path):
123123
with kg.data_sink_mapper(data_sink) as sink:

org.knime.python3.nodes.tests/src/test/python/unittest/test_knime_parameter.py

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -367,7 +367,7 @@ def generate_versioned_schema_dict(extension_version):
367367
"description": "An EnumSet Parameter\n\n**Available options:**\n\n- Foo: The "
368368
"foo\n- Bar: The bar\n- Baz: The baz\n",
369369
"items": {"type": "string"},
370-
"title": "EnumSet " "Parameter",
370+
"title": "EnumSet Parameter",
371371
"type": "array",
372372
},
373373
"group": {
@@ -418,7 +418,7 @@ def generate_versioned_schema_dict(extension_version):
418418
"description": "An EnumSet Parameter\n\n**Available options:**\n\n- Foo: The "
419419
"foo\n- Bar: The bar\n- Baz: The baz\n",
420420
"items": {"type": "string"},
421-
"title": "EnumSet " "Parameter",
421+
"title": "EnumSet Parameter",
422422
"type": "array",
423423
},
424424
"group": {
@@ -1490,7 +1490,7 @@ def test_extract_schema(self):
14901490
"description": "An EnumSet Parameter\n\n**Available options:**\n\n- Foo: The "
14911491
"foo\n- Bar: The bar\n- Baz: The baz\n",
14921492
"items": {"type": "string"},
1493-
"title": "EnumSet " "Parameter",
1493+
"title": "EnumSet Parameter",
14941494
"type": "array",
14951495
},
14961496
"parameter_group": {

org.knime.python3.nodes/src/main/python/_node_backend_launcher.py

Lines changed: 29 additions & 29 deletions
Original file line numberDiff line numberDiff line change
@@ -517,9 +517,9 @@ def deserialize_custom_spec() -> kn.PortObjectSpec:
517517
):
518518
if port.type == kn.PortType.BINARY:
519519
bpos = ks.BinaryPortObjectSpec.deserialize(data)
520-
assert (
521-
bpos.id == port.id
522-
), f"Expected binary input port ID {port.id} but got {bpos.id}"
520+
assert bpos.id == port.id, (
521+
f"Expected binary input port ID {port.id} but got {bpos.id}"
522+
)
523523
return bpos
524524
else: # custom spec
525525
return deserialize_custom_spec()
@@ -533,9 +533,9 @@ def deserialize_custom_spec() -> kn.PortObjectSpec:
533533
elif (
534534
class_name == "org.knime.core.node.workflow.capture.WorkflowPortObjectSpec"
535535
):
536-
assert (
537-
port.type == kn.PortType.WORKFLOW
538-
), f"Expected a {port.type} but got a Workflow instead."
536+
assert port.type == kn.PortType.WORKFLOW, (
537+
f"Expected a {port.type} but got a Workflow instead."
538+
)
539539
return ks.WorkflowPortObjectSpec.deserialize(data)
540540

541541
raise TypeError("Unsupported PortObjectSpec found in Python, got " + class_name)
@@ -613,17 +613,17 @@ def spec_from_python(
613613
class_name = "org.knime.core.data.DataTableSpec"
614614
elif port.type == kn.PortType.BINARY:
615615
assert isinstance(spec, ks.BinaryPortObjectSpec)
616-
assert (
617-
port.id == spec.id
618-
), f"Expected binary output port ID {port.id} but got {spec.id}"
616+
assert port.id == spec.id, (
617+
f"Expected binary output port ID {port.id} but got {spec.id}"
618+
)
619619

620620
data = spec.serialize()
621621
class_name = "org.knime.python3.nodes.ports.PythonBinaryBlobPortObjectSpec"
622622
elif port.type == kn.PortType.IMAGE:
623623
assert isinstance(spec, ks.ImagePortObjectSpec)
624-
assert any(
625-
spec.format == option.value for option in kn.ImageFormat
626-
), f"Expected image formats are: {kn.ImageFormat.available_options()}."
624+
assert any(spec.format == option.value for option in kn.ImageFormat), (
625+
f"Expected image formats are: {kn.ImageFormat.available_options()}."
626+
)
627627

628628
data = spec.serialize()
629629
class_name = "org.knime.core.node.port.image.ImagePortObjectSpec"
@@ -632,12 +632,12 @@ def spec_from_python(
632632
"WorkflowPortObjectSpecs can't be created in a Python node."
633633
)
634634
else: # custom spec
635-
assert (
636-
port.type.id in self._port_types_by_id
637-
), f"Invalid output spec, no port type with id '{port.type.id}' registered. Please register the port type."
638-
assert isinstance(
639-
spec, port.type.spec_class
640-
), f"Expected output spec of type {port.type.spec_class} but got spec of type {type(spec)}"
635+
assert port.type.id in self._port_types_by_id, (
636+
f"Invalid output spec, no port type with id '{port.type.id}' registered. Please register the port type."
637+
)
638+
assert isinstance(spec, port.type.spec_class), (
639+
f"Expected output spec of type {port.type.spec_class} but got spec of type {type(spec)}"
640+
)
641641
data = {"id": port.type.id, "data": spec.serialize()}
642642

643643
if issubclass(port.type.object_class, kn.ConnectionPortObject):
@@ -690,16 +690,16 @@ def read_port_object_data() -> Union[Any, kn.PortObject]:
690690
class_name
691691
== "org.knime.python3.nodes.ports.PythonTransientConnectionPortObject"
692692
):
693-
assert issubclass(
694-
port.type.object_class, kn.ConnectionPortObject
695-
), f"unexpected port type {port.type}"
693+
assert issubclass(port.type.object_class, kn.ConnectionPortObject), (
694+
f"unexpected port type {port.type}"
695+
)
696696
spec = self.spec_to_python(port_object.getSpec(), port, java_callback)
697697

698698
data = json.loads(port_object.getSpec().toJsonString())
699-
key = f'{data["node_id"]}:{data["port_idx"]}'
699+
key = f"{data['node_id']}:{data['port_idx']}"
700700
if key not in _PortTypeRegistry._connection_port_data:
701701
raise KeyError(
702-
f'No connection data found for node {data["node_id"]}, port {data["port_idx"]}. '
702+
f"No connection data found for node {data['node_id']}, port {data['port_idx']}. "
703703
+ "Please re-execute the upstream node providing the connection."
704704
)
705705

@@ -769,12 +769,12 @@ def port_object_from_python(
769769
elif port.type == kn.PortType.WORKFLOW:
770770
raise AssertionError("WorkflowPortObjects can't be created in Python.")
771771
else:
772-
assert (
773-
port.type.id in self._port_types_by_id
774-
), f"Invalid output port value, no port type with id '{id}' registered. Please register the port type."
775-
assert isinstance(
776-
obj, port.type.object_class
777-
), f"Expected output object of type {port.type.object_class}, got object of type {type(obj)}"
772+
assert port.type.id in self._port_types_by_id, (
773+
f"Invalid output port value, no port type with id '{id}' registered. Please register the port type."
774+
)
775+
assert isinstance(obj, port.type.object_class), (
776+
f"Expected output object of type {port.type.object_class}, got object of type {type(obj)}"
777+
)
778778
spec = self.spec_from_python(obj.spec, port, node_id, port_idx)
779779

780780
if issubclass(port.type.object_class, kn.ConnectionPortObject):

org.knime.python3.nodes/src/main/python/knime/extension/parameter.py

Lines changed: 3 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -2770,7 +2770,9 @@ def __get__(self, obj, obj_type=None):
27702770
27712771
Composed: return this instance.
27722772
"""
2773-
assert self._is_descriptor(), "__get__ should only be called if the paramter_group is used as a descriptor."
2773+
assert self._is_descriptor(), (
2774+
"__get__ should only be called if the paramter_group is used as a descriptor."
2775+
)
27742776
return self._get_param_holder(obj)
27752777

27762778
def _get_value(self, obj, name, for_dialog: bool = False) -> Dict[str, Any]:

org.knime.python3.tests/src/test/python/unittest/test_knime_schema.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -542,7 +542,7 @@ def test_to_str(self):
542542
sep = ",\n\t"
543543
self.assertEqual(
544544
str(s),
545-
f"Schema<\n\t{sep.join(str(k.Column(t,n,None)) for t,n in zip(types, names))}>",
545+
f"Schema<\n\t{sep.join(str(k.Column(t, n, None)) for t, n in zip(types, names))}>",
546546
)
547547

548548
def test_logical_type_wrapping(self):

org.knime.python3/src/main/python/dataclasses_py36backport.py

Lines changed: 9 additions & 14 deletions
Original file line numberDiff line numberDiff line change
@@ -366,7 +366,7 @@ def _tuple_str(obj_name, fields):
366366
if not fields:
367367
return "()"
368368
# Note the trailing comma, needed if this turns out to be a 1-tuple.
369-
return f'({",".join([f"{obj_name}.{f.name}" for f in fields])},)'
369+
return f"({','.join([f'{obj_name}.{f.name}' for f in fields])},)"
370370

371371

372372
def _create_fn(name, args, body, *, globals=None, locals=None, return_type=MISSING):
@@ -412,9 +412,7 @@ def _field_init(f, frozen, globals, self_name):
412412
# given, use it. If not, call the factory.
413413
globals[default_name] = f.default_factory
414414
value = (
415-
f"{default_name}() "
416-
f"if {f.name} is _HAS_DEFAULT_FACTORY "
417-
f"else {f.name}"
415+
f"{default_name}() if {f.name} is _HAS_DEFAULT_FACTORY else {f.name}"
418416
)
419417
else:
420418
# This is a field that's not in the __init__ params, but
@@ -492,7 +490,7 @@ def _init_fn(fields, frozen, has_post_init, self_name):
492490
seen_default = True
493491
elif seen_default:
494492
raise TypeError(
495-
f"non-default argument {f.name!r} " "follows default argument"
493+
f"non-default argument {f.name!r} follows default argument"
496494
)
497495

498496
globals = {"MISSING": MISSING, "_HAS_DEFAULT_FACTORY": _HAS_DEFAULT_FACTORY}
@@ -729,7 +727,7 @@ def _get_field(cls, a_name, a_type):
729727
# Special restrictions for ClassVar and InitVar.
730728
if f._field_type in (_FIELD_CLASSVAR, _FIELD_INITVAR):
731729
if f.default_factory is not MISSING:
732-
raise TypeError(f"field {f.name} cannot have a " "default factory")
730+
raise TypeError(f"field {f.name} cannot have a default factory")
733731
# Should I check for other field settings? default_factory
734732
# seems the most serious to check for. Maybe add others. For
735733
# example, how about init=False (or really,
@@ -772,7 +770,7 @@ def _hash_add(cls, fields):
772770

773771
def _hash_exception(cls, fields):
774772
# Raise an exception.
775-
raise TypeError(f"Cannot overwrite attribute __hash__ " f"in class {cls.__name__}")
773+
raise TypeError(f"Cannot overwrite attribute __hash__ in class {cls.__name__}")
776774

777775

778776
#
@@ -878,11 +876,11 @@ def _process_class(cls, init, repr, eq, order, unsafe_hash, frozen):
878876
if has_dataclass_bases:
879877
# Raise an exception if any of our bases are frozen, but we're not.
880878
if any_frozen_base and not frozen:
881-
raise TypeError("cannot inherit non-frozen dataclass from a " "frozen one")
879+
raise TypeError("cannot inherit non-frozen dataclass from a frozen one")
882880

883881
# Raise an exception if we're frozen, but none of our bases are.
884882
if not any_frozen_base and frozen:
885-
raise TypeError("cannot inherit frozen dataclass from a " "non-frozen one")
883+
raise TypeError("cannot inherit frozen dataclass from a non-frozen one")
886884

887885
# Remember all of the fields on our class (including bases). This
888886
# also marks this class as being a dataclass.
@@ -965,8 +963,7 @@ def _process_class(cls, init, repr, eq, order, unsafe_hash, frozen):
965963
for fn in _frozen_get_del_attr(cls, field_list):
966964
if _set_new_attribute(cls, fn.__name__, fn):
967965
raise TypeError(
968-
f"Cannot overwrite attribute {fn.__name__} "
969-
f"in class {cls.__name__}"
966+
f"Cannot overwrite attribute {fn.__name__} in class {cls.__name__}"
970967
)
971968

972969
# Decide if/how we're going to create a hash function.
@@ -1262,9 +1259,7 @@ class C:
12621259

12631260
if f.name not in changes:
12641261
if f._field_type is _FIELD_INITVAR:
1265-
raise ValueError(
1266-
f"InitVar {f.name!r} " "must be specified with replace()"
1267-
)
1262+
raise ValueError(f"InitVar {f.name!r} must be specified with replace()")
12681263
changes[f.name] = getattr(obj, f.name)
12691264

12701265
# Create the new object, which calls __init__() and

0 commit comments

Comments
 (0)