Skip to content

Commit

Permalink
[patch] Better error messages (#365)
Browse files Browse the repository at this point in the history
* Introduce `HasLabel.full_label` for providing more detail

In nodes it uses the semantic path, in channels it uses the owner's full label

* Extend error messages to use the full label
  • Loading branch information
liamhuber authored Jun 18, 2024
1 parent 7f48546 commit 05ebef7
Show file tree
Hide file tree
Showing 12 changed files with 89 additions and 56 deletions.
46 changes: 28 additions & 18 deletions pyiron_workflow/channels.py
Original file line number Diff line number Diff line change
Expand Up @@ -103,6 +103,11 @@ def scoped_label(self) -> str:
"""A label combining the channel's usual label and its owner's label"""
return f"{self.owner.label}__{self.label}"

@property
def full_label(self) -> str:
"""A label combining the channel's usual label and its owner's semantic path"""
return f"{self.owner.full_label}.{self.label}"

def _valid_connection(self, other: Channel) -> bool:
"""
Logic for determining if a connection is valid.
Expand Down Expand Up @@ -142,16 +147,19 @@ def connect(self, *others: Channel) -> None:
else:
if isinstance(other, self.connection_partner_type):
raise ChannelConnectionError(
f"{other.label} ({other.__class__.__name__}) has the correct "
f"type ({self.connection_partner_type.__name__} to connect "
f"with {self.label} ({self.__class__.__name__}), but is not a "
f"valid connection. Please check type hints, etc."
)
f"The channel {other.full_label} ({other.__class__.__name__}"
f") has the correct type "
f"({self.connection_partner_type.__name__}) to connect with "
f"{self.full_label} ({self.__class__.__name__}), but is not "
f"a valid connection. Please check type hints, etc."
f"{other.full_label}.type_hint = {other.type_hint}; "
f"{self.full_label}.type_hint = {self.type_hint}"
) from None
else:
raise TypeError(
f"Can only connect to {self.connection_partner_type.__name__} "
f"objects, but {self.label} ({self.__class__.__name__}) got "
f"{other} ({type(other)})"
f"objects, but {self.full_label} ({self.__class__.__name__}) "
f"got {other} ({type(other)})"
)

def disconnect(self, *others: Channel) -> list[tuple[Channel, Channel]]:
Expand Down Expand Up @@ -369,8 +377,9 @@ def _type_check_new_value(self, new_value):
and not valid_value(new_value, self.type_hint)
):
raise TypeError(
f"The channel {self.label} cannot take the value `{new_value}` because "
f"it is not compliant with the type hint {self.type_hint}"
f"The channel {self.full_label} cannot take the value `{new_value}` "
f"({type(new_value)}) because it is not compliant with the type hint "
f"{self.type_hint}"
)

@property
Expand All @@ -389,24 +398,25 @@ def value_receiver(self, new_partner: InputData | OutputData | None):
if new_partner is not None:
if not isinstance(new_partner, self.__class__):
raise TypeError(
f"The {self.__class__.__name__} {self.label} got a coupling "
f"The {self.__class__.__name__} {self.full_label} got a coupling "
f"partner {new_partner} but requires something of the same type"
)

if new_partner is self:
raise ValueError(
f"{self.__class__.__name__} {self.label} cannot couple to itself"
f"{self.__class__.__name__} {self.full_label} cannot couple to "
f"itself"
)

if self._both_typed(new_partner) and new_partner.strict_hints:
if not type_hint_is_as_or_more_specific_than(
self.type_hint, new_partner.type_hint
):
raise ValueError(
f"The channel {self.label} cannot take {new_partner.label} as "
f"a value receiver because this type hint ({self.type_hint}) "
f"is not as or more specific than the receiving type hint "
f"({new_partner.type_hint})."
f"The channel {self.full_label} cannot take "
f"{new_partner.full_label} as a value receiver because this "
f"type hint ({self.type_hint}) is not as or more specific than "
f"the receiving type hint ({new_partner.type_hint})."
)

new_partner.value = self.value
Expand Down Expand Up @@ -528,7 +538,7 @@ def value(self):
def value(self, new_value):
if self.owner.data_input_locked():
raise RuntimeError(
f"Owner {self.owner.label} of {self.label} has its data input locked, "
f"Owner {self.full_label} has its data input locked, "
f"so value cannot be updated."
)
self._type_check_new_value(new_value)
Expand Down Expand Up @@ -592,8 +602,8 @@ def __init__(
self._callback: str = callback.__name__
else:
raise BadCallbackError(
f"The channel {self.label} on {self.owner.label} got an unexpected "
f"callback: {callback}. "
f"The channel {self.full_label} got an unexpected callback: "
f"{callback}. "
f"Lives on owner: {self._is_method_on_owner(callback)}; "
f"all args are optional: {self._all_args_arg_optional(callback)} "
)
Expand Down
6 changes: 3 additions & 3 deletions pyiron_workflow/io.py
Original file line number Diff line number Diff line change
Expand Up @@ -220,9 +220,9 @@ def _channel_class(self) -> type(OutputData):
class SignalIO(IO, ABC):
def _assign_a_non_channel_value(self, channel: SignalChannel, value) -> None:
raise TypeError(
f"Tried to assign {value} ({type(value)} to the {channel.label}, which is "
f"already a {type(channel)}. Only other signal channels may be connected "
f"in this way."
f"Tried to assign {value} ({type(value)} to the {channel.full_label}, "
f"which is already a {type(channel)}. Only other signal channels may be "
f"connected in this way."
)


Expand Down
8 changes: 8 additions & 0 deletions pyiron_workflow/mixin/has_interface_mixins.py
Original file line number Diff line number Diff line change
Expand Up @@ -44,6 +44,14 @@ class HasLabel(ABC):
def label(self) -> str:
"""A label for the object."""

@property
def full_label(self) -> str:
"""
A more verbose label based off the underlying label attribute (and possibly
other data) -- in the root class, it's just the same as the label.
"""
return self.label


class HasParent(ABC):
"""
Expand Down
8 changes: 4 additions & 4 deletions pyiron_workflow/mixin/injection.py
Original file line number Diff line number Diff line change
Expand Up @@ -113,10 +113,10 @@ def __getattr__(self, name):
)
if name.startswith("_"):
raise AttributeError(
f"{OutputDataWithInjection.__name__} {self.label} tried to inject on "
f"the attribute {name}, but injecting on private attributes is "
f"forbidden -- if you really need it create a {GetAttr.__name__} node "
f"manually."
f"{self.full_label} ({OutputDataWithInjection.__name__}) tried to "
f"inject on the attribute {name}, but injecting on private attributes "
f"is forbidden -- if you really need it create a {GetAttr.__name__} "
f"node manually."
)
return self._node_injection(GetAttr, name)

Expand Down
7 changes: 7 additions & 0 deletions pyiron_workflow/mixin/semantics.py
Original file line number Diff line number Diff line change
Expand Up @@ -85,6 +85,13 @@ def semantic_path(self) -> str:
prefix = self.parent.semantic_path if isinstance(self.parent, Semantic) else ""
return prefix + self.semantic_delimiter + self.label

@property
def full_label(self) -> str:
"""
A shortcut that combines the semantic path and label into a single string.
"""
return self.semantic_path

@property
def semantic_root(self) -> Semantic:
"""The parent-most object in this semantic path; may be self."""
Expand Down
16 changes: 9 additions & 7 deletions pyiron_workflow/node.py
Original file line number Diff line number Diff line change
Expand Up @@ -369,14 +369,15 @@ def _after_node_setup(

if do_load and run_after_init:
raise ValueError(
"Can't both load _and_ run after init -- either delete the save file "
"(e.g. with with the `overwrite_save=True` kwarg), change the node "
"label to work in a new space, or give up on running after init."
f"{self.full_label} can't both load _and_ run after init -- either"
f" delete the save file (e.g. with with the `overwrite_save=True` "
f"kwarg), change the node label to work in a new space, or give up on "
f"running after init."
)
elif do_load:
logger.info(
f"A saved file was found for the node {self.label} -- attempting to "
f"load it...(To delete the saved file instead, use "
f"A saved file was found for the node {self.full_label} -- "
f"attempting to load it...(To delete the saved file instead, use "
f"`overwrite_save=True`)"
)
self.load()
Expand Down Expand Up @@ -522,8 +523,9 @@ def run_data_tree(self, run_parent_trees_too=False) -> None:
if node.executor is not None:
raise ValueError(
f"Running the data tree is pull-paradigm action, and is "
f"incompatible with using executors. An executor request was found "
f"on {node.label}"
f"incompatible with using executors. While running "
f"{self.full_label}, an executor request was found on "
f"{node.full_label}"
)

for node in data_tree_nodes:
Expand Down
2 changes: 1 addition & 1 deletion pyiron_workflow/nodes/function.py
Original file line number Diff line number Diff line change
Expand Up @@ -151,7 +151,7 @@ class Function(StaticNode, ScrapesIO, ABC):
... plus_minus_1.inputs.x = "not an int or float"
... except TypeError as e:
... print("TypeError:", e.args[0])
TypeError: The channel x cannot take the value `not an int or float` because it is not compliant with the type hint typing.Union[int, float]
TypeError: The channel /hinted_example.x cannot take the value `not an int or float` (<class 'str'>) because it is not compliant with the type hint typing.Union[int, float]
We can turn off type hinting with the `strict_hints` boolean property, or just
circumvent the type hinting by applying the new data directly to the private
Expand Down
2 changes: 1 addition & 1 deletion pyiron_workflow/nodes/macro.py
Original file line number Diff line number Diff line change
Expand Up @@ -398,7 +398,7 @@ def _configure_graph_execution(self, ui_nodes):
self.set_run_signals_to_dag_execution()
else:
raise ValueError(
f"The macro '{self.label}' has {len(run_signals)} run signals "
f"The macro {self.full_label} has {len(run_signals)} run signals "
f"internally and {len(self.starting_nodes)} starting nodes. Either "
f"the entire execution graph must be specified manually, or both run "
f"signals and starting nodes must be left entirely unspecified for "
Expand Down
4 changes: 2 additions & 2 deletions pyiron_workflow/nodes/static_io.py
Original file line number Diff line number Diff line change
Expand Up @@ -141,6 +141,6 @@ def _guarantee_names_are_input_channels(self, presumed_input_keys: tuple[str]):
non_input_kwargs = set(presumed_input_keys).difference(self.inputs.labels)
if len(non_input_kwargs) > 0:
raise ValueError(
f"{self.label} cannot iterate on {non_input_kwargs} because they are "
f"not among input channels {self.inputs.labels}"
f"{self.full_label} cannot iterate on {non_input_kwargs} because "
f"they are not among input channels {self.inputs.labels}"
)
25 changes: 13 additions & 12 deletions pyiron_workflow/topology.py
Original file line number Diff line number Diff line change
Expand Up @@ -45,9 +45,10 @@ def nodes_to_data_digraph(nodes: dict[str, Node]) -> dict[str, set[str]]:

parent = next(iter(nodes.values())).parent # Just grab any one
if not all(n.parent is parent for n in nodes.values()):
node_identifiers = "\n".join([n.full_label for n in nodes.values()])
raise ValueError(
"Nodes in a data digraph must all be siblings -- i.e. have the same "
"`parent` attribute."
f"Nodes in a data digraph must all be siblings -- i.e. have the same "
f"`parent` attribute. Some of these do not: {node_identifiers}"
)

for node in nodes.values():
Expand All @@ -59,18 +60,17 @@ def nodes_to_data_digraph(nodes: dict[str, Node]) -> dict[str, set[str]]:
upstream_node = nodes[upstream.owner.label]
except KeyError as e:
raise KeyError(
f"The {channel.label} channel of {node.label} has a connection "
f"to {upstream.label} channel of {upstream.owner.label}, but "
f"{upstream.owner.label} was not found among nodes. All nodes "
f"in the data flow dependency tree must be included."
f"The channel {channel.full_label} has a connection to the "
f"upstream channel {upstream.full_label}, but the upstream "
f"owner {upstream.owner.label} was not found among nodes. "
f"All nodes in the data flow dependency tree must be included."
)
if upstream_node is not upstream.owner:
raise ValueError(
f"The {channel.label} channel of {node.label} has a connection "
f"to {upstream.label} channel of {upstream.owner.label}, but "
f"that channel's node is not the same as the nodes passed "
f"here. All nodes in the data flow dependency tree must be "
f"included."
f"The channel {channel.full_label} has a connection to the "
f"upstream channel {upstream.full_label}, but that channel's "
f"node is not the same as the nodes passed here. All nodes in "
f"the data flow dependency tree must be included."
)
locally_scoped_dependencies.append(upstream.owner.label)
node_dependencies.extend(locally_scoped_dependencies)
Expand All @@ -81,7 +81,8 @@ def nodes_to_data_digraph(nodes: dict[str, Node]) -> dict[str, set[str]]:
# That self-dependency isn't caught, so we catch it manually here.
raise CircularDataFlowError(
f"Detected a cycle in the data flow topology, unable to automate "
f"the execution of non-DAGs: {node.label} appears in its own input."
f"the execution of non-DAGs: {node.full_label} appears in its own "
f"input."
)
digraph[node.label] = node_dependencies

Expand Down
17 changes: 9 additions & 8 deletions pyiron_workflow/workflow.py
Original file line number Diff line number Diff line change
Expand Up @@ -441,7 +441,7 @@ def _rebuild_data_io(self):
self._inputs = old_inputs
self._outputs = old_outputs
e.message = (
f"Unable to rebuild IO for {self.label}; reverting to old IO."
f"Unable to rebuild IO for {self.full_label}; reverting to old IO."
f"{e.message}"
)
raise e
Expand Down Expand Up @@ -545,13 +545,14 @@ def save(self):
node.package_identifier is None for node in self
):
raise NotImplementedError(
f"{self.__class__.__name__} can currently only save itself to file if "
f"_all_ of its child nodes were created via the creator and have an "
f"associated `package_identifier` -- otherwise we won't know how to "
f"re-instantiate them at load time! Right now this is as easy as "
f"moving your custom nodes to their own .py file and registering it "
f"like any other node package. Remember that this new module needs to "
f"be in your python path and importable at load time too."
f"{self.full_label} ({self.__class__.__name__}) can currently only "
f"save itself to file if _all_ of its child nodes were created via the "
f"creator and have an associated `package_identifier` -- otherwise we "
f"won't know how to re-instantiate them at load time! Right now this "
f"is as easy as moving your custom nodes to their own .py file and "
f"registering it like any other node package. Remember that this new "
f"module needs to be in your python path and importable at load time "
f"too."
)
super().save()

Expand Down
4 changes: 4 additions & 0 deletions tests/unit/test_channels.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,6 +12,10 @@ def __init__(self):
self.locked = False
self.label = "owner_label"

@property
def full_label(self):
return self.label

def update(self):
self.foo.append(self.foo[-1] + 1)

Expand Down

0 comments on commit 05ebef7

Please sign in to comment.