From 9b3735c3b50fda374840104652c5fdd451a02c80 Mon Sep 17 00:00:00 2001 From: liamhuber Date: Wed, 10 Jan 2024 13:44:24 -0800 Subject: [PATCH 001/166] Expose the set of package identifiers for child nodes on a composite --- pyiron_workflow/composite.py | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/pyiron_workflow/composite.py b/pyiron_workflow/composite.py index 663f47c8..2b955e23 100644 --- a/pyiron_workflow/composite.py +++ b/pyiron_workflow/composite.py @@ -561,3 +561,10 @@ def __dir__(self): def color(self) -> str: """For drawing the graph""" return SeabornColors.brown + + @property + def package_requirements(self) -> set[str]: + """ + A list of node package identifiers for children. + """ + return set(n.package_identifier for n in self) From c4a457d4848918ef8504874c3d8774f349cbc275 Mon Sep 17 00:00:00 2001 From: liamhuber Date: Wed, 10 Jan 2024 13:48:06 -0800 Subject: [PATCH 002/166] Add methods for interacting with `tinybase.storage.H5ioStorage` --- pyiron_workflow/channels.py | 12 ++++++++ pyiron_workflow/composite.py | 20 +++++++++++++ pyiron_workflow/node.py | 27 +++++++++++++++++ pyiron_workflow/workflow.py | 56 ++++++++++++++++++++++++++++++++++++ 4 files changed, 115 insertions(+) diff --git a/pyiron_workflow/channels.py b/pyiron_workflow/channels.py index d60853cb..ab9af4ca 100644 --- a/pyiron_workflow/channels.py +++ b/pyiron_workflow/channels.py @@ -457,6 +457,18 @@ def activate_strict_hints(self) -> None: def deactivate_strict_hints(self) -> None: self.strict_hints = False + def to_storage(self, storage): + storage["strict_hints"] = self.strict_hints + storage["type_hint"] = self.type_hint + storage["default"] = self.default + storage["value"] = self.value + + def from_storage(self, storage): + self.strict_hints = storage["strict_hints"] + self.type_hint = storage["type_hint"] + self.default = storage["default"] + self.value = storage["value"] + class InputData(DataChannel): @property diff --git a/pyiron_workflow/composite.py b/pyiron_workflow/composite.py index 2b955e23..38cdbbde 100644 --- a/pyiron_workflow/composite.py +++ b/pyiron_workflow/composite.py @@ -568,3 +568,23 @@ def package_requirements(self) -> set[str]: A list of node package identifiers for children. """ return set(n.package_identifier for n in self) + + def to_storage(self, storage): + nodes_storage = storage.create_group("child_nodes") + for label, node in self.nodes.items(): + node.to_storage(nodes_storage.create_group(label)) + + storage["inputs_map"] = self.inputs_map + storage["outputs_map"] = self.outputs_map + + super().to_storage(storage) + + def from_storage(self, storage): + for node in self: + node.from_storage(storage["child_nodes"][node.label]) + + self.inputs_map = storage["inputs_map"] + self.outputs_map = storage["outputs_map"] + self._rebuild_data_io() # To apply any map that was saved + + super().from_storage(storage) diff --git a/pyiron_workflow/node.py b/pyiron_workflow/node.py index a4751550..68179b02 100644 --- a/pyiron_workflow/node.py +++ b/pyiron_workflow/node.py @@ -980,3 +980,30 @@ def executor_shutdown(self, wait=True, *, cancel_futures=False): self.executor.shutdown(wait=wait, cancel_futures=cancel_futures) except AttributeError: pass + + def to_storage(self, storage): + storage["package_identifier"] = self.package_identifier + storage["class_name"] = self.__class__.__name__ + storage["label"] = self.label + storage["running"] = self.running + storage["failed"] = self.failed + + data_inputs = storage.create_group("data_inputs") + for label, channel in self.inputs.items(): + channel.to_storage(data_inputs.create_group(label)) + + data_outputs = storage.create_group("data_outputs") + for label, channel in self.outputs.items(): + channel.to_storage(data_outputs.create_group(label)) + + def from_storage(self, storage): + self.running = storage["running"] + self.failed = storage["failed"] + + data_inputs = storage["data_inputs"] + for label in data_inputs.list_groups(): + self.inputs[label].from_storage(data_inputs[label]) + + data_outputs = storage["data_outputs"] + for label in data_outputs.list_groups(): + self.outputs[label].from_storage(data_outputs[label]) diff --git a/pyiron_workflow/workflow.py b/pyiron_workflow/workflow.py index 8e89229e..92a25bb4 100644 --- a/pyiron_workflow/workflow.py +++ b/pyiron_workflow/workflow.py @@ -278,3 +278,59 @@ def _parent(self, new_parent: None): f"{self.__class__} may only take None as a parent but got " f"{type(new_parent)}" ) + + def to_storage(self, storage): + storage["package_requirements"] = self.package_requirements + storage["automate_execution"] = self.automate_execution + super().to_storage(storage) + + data_connections = [] + for node in self: + for inp_label, inp in node.inputs.items(): + for conn in inp.connections: + data_connections.append( + ((node.label, inp_label), (conn.node.label, conn.label)) + ) + storage["data_connections"] = data_connections + + if not self.automate_execution: + signal_connections = [] + for node in self: + for inp_label, inp in node.signals.input.items(): + for conn in inp.connections: + signal_connections.append( + ((node.label, inp_label), (conn.node.label, conn.label)) + ) + storage["signal_connections"] = signal_connections + storage["starting_nodes_labels"] = [n.label for n in self.starting_nodes] + + def from_storage(self, storage): + for package_identifier in storage["package_requirements"]: + self.register(package_identifier) + + nodes_storage = storage["child_nodes"] + for child_label in nodes_storage.list_groups(): + child_data = nodes_storage[child_label] + pid = child_data["package_identifier"] + cls = child_data["class_name"] + self.create[pid][cls](label=child_label, parent=self) + + self.automate_execution = storage["automate_execution"] + + super().from_storage(storage) + + for data_connection in storage["data_connections"]: + (inp_label, inp_channel), (out_label, out_channel) = data_connection + self.nodes[inp_label].inputs[inp_channel].connect( + self.nodes[out_label].outputs[out_channel] + ) + + if not self.automate_execution: + for signal_connection in storage["signal_connections"]: + (inp_label, inp_channel), (out_label, out_channel) = signal_connection + self.nodes[inp_label].signals.input[inp_channel].connect( + self.nodes[out_label].signals.output[out_channel] + ) + self.starting_nodes = [ + self.nodes[label] for label in storage["starting_nodes_labels"] + ] From 5de74c5703fffe0d1e9a6e982da2b6d6d38bb1ac Mon Sep 17 00:00:00 2001 From: liamhuber Date: Wed, 10 Jan 2024 13:49:20 -0800 Subject: [PATCH 003/166] Give public-facing save and load methods --- pyiron_workflow/node.py | 60 +++++++++++++++++++++++++++++++++++++++++ 1 file changed, 60 insertions(+) diff --git a/pyiron_workflow/node.py b/pyiron_workflow/node.py index 68179b02..e737865f 100644 --- a/pyiron_workflow/node.py +++ b/pyiron_workflow/node.py @@ -1007,3 +1007,63 @@ def from_storage(self, storage): data_outputs = storage["data_outputs"] for label in data_outputs.list_groups(): self.outputs[label].from_storage(data_outputs[label]) + + _save_load_warnings = """ + HERE BE DRAGONS!!! + + Warning: + This almost certainly only fails for subclasses of :class:`Node` that don't + override `node_function` or `macro_creator` directly, as these are expected + to be part of the class itself (and thus already present on our instantiated + object) and are never stored. Nodes created using the provided decorators + should all work. + + Warning: + If you modify a `Macro` class in any way (changing its IO maps, rewiring + internal connections, or replacing internal nodes), don't expect + saving/loading to work. + + Warning: + If the underlying source code has changed since saving (i.e. the node doing + the loading does not use the same code as the node doing the saving, or the + nodes in some node package have been modified), then all bets are off. + + Note: + Saving and loading `Workflows` only works when all child nodes were created + via the creator (and thus have a `package_identifier`). Right now, this is + not a big barrier to custom nodes as all you need to do is move them into a + .py file, make sure it's in your python path, and :func:`register` it as + usual. + """ + + def save(self): + """ + Writes the node to file (using HDF5) such that a new node instance of the same + type can :meth:`load()` the data to return to the same state as the save point, + i.e. the same data IO channel values, the same flags, etc. + """ + self.to_storage(self.storage) + save.__doc__ += _save_load_warnings + + def load(self): + """ + Loads the node file (from HDF5) such that this node restores its state at time + of loading. + + Raises: + TypeError) when the saved node has a different class name. + """ + if self.storage["class_name"] != self.__class__.__name__: + raise TypeError( + f"{self.label} cannot load, as it has type {self.__class__.__name__}, " + f"but the saved node has type {self.storage['class_name']}" + ) + self.from_storage(self.storage) + save.__doc__ += _save_load_warnings + + @property + def storage(self): + from pyiron_contrib.tinybase.project.h5io import SingleHdfProject + return SingleHdfProject.open_location( + str(self.working_directory.path.resolve()) + ).create_storage(self.label) From 57b06d4abdc8b0c97dd74ba9fc640de7519c522e Mon Sep 17 00:00:00 2001 From: liamhuber Date: Wed, 10 Jan 2024 13:49:41 -0800 Subject: [PATCH 004/166] Fail early on Workflow saves if any children are not from a package --- pyiron_workflow/workflow.py | 13 +++++++++++++ 1 file changed, 13 insertions(+) diff --git a/pyiron_workflow/workflow.py b/pyiron_workflow/workflow.py index 92a25bb4..30db654e 100644 --- a/pyiron_workflow/workflow.py +++ b/pyiron_workflow/workflow.py @@ -267,6 +267,19 @@ def deserialize(self, source): def _parent(self) -> None: return None + def save(self): + if any(node.package_identifier is None for node in self): + raise NotImplementedError( + f"{self.__class__.__name__} can currently only save itself to file if " + f"_all_ of its child nodes were created via the creator and have an " + f"associated `package_identifier` -- otherwise we won't know how to " + f"re-instantiate them at load time! Right now this is as easy as " + f"moving your custom nodes to their own .py file and registering it " + f"like any other node package. Remember that this new module needs to " + f"be in your python path and importable at load time too." + ) + self.to_storage(self.storage) + @_parent.setter def _parent(self, new_parent: None): # Currently workflows are not allowed to have a parent -- maybe we want to From 8606a63bc2fd8bf1287ab4375781cc3af1f4d0d3 Mon Sep 17 00:00:00 2001 From: liamhuber Date: Wed, 10 Jan 2024 13:50:32 -0800 Subject: [PATCH 005/166] Load discovered save files automatically Unless instructed to delete them --- pyiron_workflow/composite.py | 1 + pyiron_workflow/function.py | 1 + pyiron_workflow/macro.py | 1 + pyiron_workflow/node.py | 36 ++++++++++++++++++++++++++++++++++-- pyiron_workflow/workflow.py | 1 + 5 files changed, 38 insertions(+), 2 deletions(-) diff --git a/pyiron_workflow/composite.py b/pyiron_workflow/composite.py index 38cdbbde..a2f3173e 100644 --- a/pyiron_workflow/composite.py +++ b/pyiron_workflow/composite.py @@ -106,6 +106,7 @@ def __init__( label: str, *args, parent: Optional[Composite] = None, + overwrite_save: bool = False, run_after_init: bool = False, strict_naming: bool = True, inputs_map: Optional[dict | bidict] = None, diff --git a/pyiron_workflow/function.py b/pyiron_workflow/function.py index 97408469..d5906284 100644 --- a/pyiron_workflow/function.py +++ b/pyiron_workflow/function.py @@ -331,6 +331,7 @@ def __init__( *args, label: Optional[str] = None, parent: Optional[Composite] = None, + overwrite_save: bool = False, run_after_init: bool = False, output_labels: Optional[str | list[str] | tuple[str]] = None, **kwargs, diff --git a/pyiron_workflow/macro.py b/pyiron_workflow/macro.py index af3bd4e9..70602ee5 100644 --- a/pyiron_workflow/macro.py +++ b/pyiron_workflow/macro.py @@ -265,6 +265,7 @@ def __init__( graph_creator: callable[[Macro], None], label: Optional[str] = None, parent: Optional[Composite] = None, + overwrite_save: bool = False, run_after_init: bool = False, strict_naming: bool = True, inputs_map: Optional[dict | bidict] = None, diff --git a/pyiron_workflow/node.py b/pyiron_workflow/node.py index e737865f..c0d965c1 100644 --- a/pyiron_workflow/node.py +++ b/pyiron_workflow/node.py @@ -230,6 +230,7 @@ def __init__( label: str, *args, parent: Optional[Composite] = None, + overwrite_save: bool = False, run_after_init: bool = False, **kwargs, ): @@ -259,8 +260,39 @@ def __init__( # (or create) an executor process without ever trying to pickle a `_thread.lock` self.future: None | Future = None - def __post__(self, *args, run_after_init: bool = False, **kwargs): - if run_after_init: + def __post__( + self, + *args, + overwrite_save: bool = False, + run_after_init: bool = False, + **kwargs + ): + hardcoded_tinybase_filename = "project.h5" + save_exists = self.working_directory.file_exists(hardcoded_tinybase_filename) + + if save_exists and overwrite_save: + self.working_directory.remove_file(hardcoded_tinybase_filename) + + self.working_directory.delete(only_if_empty=True) + # Touching the working directory may have created it -- if it's there and empty + # just clean it up + + do_load = save_exists and not overwrite_save + + if do_load and run_after_init: + raise ValueError( + "Can't both load _and_ run after init -- either delete the save file " + "(e.g. with with the `overwrite_save=True` kwarg), change the node " + "label to work in a new space, or give up on running after init." + ) + elif do_load: + warnings.warn( + f"A saved file was found for the node {self.label} -- attempting to " + f"load it...(To delete the saved file instead, use " + f"`overwrite_save=True`)" + ) + self.load() + elif run_after_init: try: self.run() except ReadinessError: diff --git a/pyiron_workflow/workflow.py b/pyiron_workflow/workflow.py index 30db654e..b294a74d 100644 --- a/pyiron_workflow/workflow.py +++ b/pyiron_workflow/workflow.py @@ -188,6 +188,7 @@ def __init__( self, label: str, *nodes: Node, + overwrite_save: bool = False, run_after_init: bool = False, strict_naming: bool = True, inputs_map: Optional[dict | bidict] = None, From b2999730536e1575e01d5f1318de76f6ec98dd68 Mon Sep 17 00:00:00 2001 From: liamhuber Date: Wed, 10 Jan 2024 14:14:27 -0800 Subject: [PATCH 006/166] :bug: use correct method name --- pyiron_workflow/node.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pyiron_workflow/node.py b/pyiron_workflow/node.py index c0d965c1..05a83a1a 100644 --- a/pyiron_workflow/node.py +++ b/pyiron_workflow/node.py @@ -271,7 +271,7 @@ def __post__( save_exists = self.working_directory.file_exists(hardcoded_tinybase_filename) if save_exists and overwrite_save: - self.working_directory.remove_file(hardcoded_tinybase_filename) + self.working_directory.remove(hardcoded_tinybase_filename) self.working_directory.delete(only_if_empty=True) # Touching the working directory may have created it -- if it's there and empty From c1d107bd8a4c9abef892913c42ac14aba5db941f Mon Sep 17 00:00:00 2001 From: liamhuber Date: Wed, 10 Jan 2024 14:17:46 -0800 Subject: [PATCH 007/166] Save list not set The storage can't handle the set and leaves empty contents --- pyiron_workflow/workflow.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pyiron_workflow/workflow.py b/pyiron_workflow/workflow.py index b294a74d..ec780c4c 100644 --- a/pyiron_workflow/workflow.py +++ b/pyiron_workflow/workflow.py @@ -294,7 +294,7 @@ def _parent(self, new_parent: None): ) def to_storage(self, storage): - storage["package_requirements"] = self.package_requirements + storage["package_requirements"] = list(self.package_requirements) storage["automate_execution"] = self.automate_execution super().to_storage(storage) From 87ae9f9bea234c9d7b1b75f57b66d8289528d137 Mon Sep 17 00:00:00 2001 From: pyiron-runner Date: Wed, 10 Jan 2024 22:32:01 +0000 Subject: [PATCH 008/166] Format black --- pyiron_workflow/node.py | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/pyiron_workflow/node.py b/pyiron_workflow/node.py index 05a83a1a..d97693fa 100644 --- a/pyiron_workflow/node.py +++ b/pyiron_workflow/node.py @@ -265,7 +265,7 @@ def __post__( *args, overwrite_save: bool = False, run_after_init: bool = False, - **kwargs + **kwargs, ): hardcoded_tinybase_filename = "project.h5" save_exists = self.working_directory.file_exists(hardcoded_tinybase_filename) @@ -1075,6 +1075,7 @@ def save(self): i.e. the same data IO channel values, the same flags, etc. """ self.to_storage(self.storage) + save.__doc__ += _save_load_warnings def load(self): @@ -1091,11 +1092,13 @@ def load(self): f"but the saved node has type {self.storage['class_name']}" ) self.from_storage(self.storage) + save.__doc__ += _save_load_warnings @property def storage(self): from pyiron_contrib.tinybase.project.h5io import SingleHdfProject + return SingleHdfProject.open_location( str(self.working_directory.path.resolve()) ).create_storage(self.label) From 583cb33d8f066b057dfaca2536334ba8a87040d7 Mon Sep 17 00:00:00 2001 From: liamhuber Date: Mon, 15 Jan 2024 13:36:40 -0800 Subject: [PATCH 009/166] Use renamed method --- pyiron_workflow/node.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pyiron_workflow/node.py b/pyiron_workflow/node.py index d97693fa..87b18a65 100644 --- a/pyiron_workflow/node.py +++ b/pyiron_workflow/node.py @@ -271,7 +271,7 @@ def __post__( save_exists = self.working_directory.file_exists(hardcoded_tinybase_filename) if save_exists and overwrite_save: - self.working_directory.remove(hardcoded_tinybase_filename) + self.working_directory.remove_files(hardcoded_tinybase_filename) self.working_directory.delete(only_if_empty=True) # Touching the working directory may have created it -- if it's there and empty From f377971c18e6fa7ea1ac25be0a22d1cfcc980ac3 Mon Sep 17 00:00:00 2001 From: liamhuber Date: Tue, 16 Jan 2024 13:47:15 -0800 Subject: [PATCH 010/166] Clean the working directory attribute --- pyiron_workflow/node.py | 2 +- pyiron_workflow/snippets/files.py | 2 ++ 2 files changed, 3 insertions(+), 1 deletion(-) diff --git a/pyiron_workflow/node.py b/pyiron_workflow/node.py index 87b18a65..7707cbfb 100644 --- a/pyiron_workflow/node.py +++ b/pyiron_workflow/node.py @@ -273,7 +273,7 @@ def __post__( if save_exists and overwrite_save: self.working_directory.remove_files(hardcoded_tinybase_filename) - self.working_directory.delete(only_if_empty=True) + self._working_directory = self.working_directory.delete(only_if_empty=True) # Touching the working directory may have created it -- if it's there and empty # just clean it up diff --git a/pyiron_workflow/snippets/files.py b/pyiron_workflow/snippets/files.py index 8914913b..cdf1fb60 100644 --- a/pyiron_workflow/snippets/files.py +++ b/pyiron_workflow/snippets/files.py @@ -46,6 +46,8 @@ def create(self): def delete(self, only_if_empty: bool = False): if self.is_empty() or not only_if_empty: delete_files_and_directories_recursively(self.path) + else: + return self def list_content(self): return categorize_folder_items(self.path) From 9f6153adabb147e9ca7f1b2b0cb702ed067ad845 Mon Sep 17 00:00:00 2001 From: liamhuber Date: Tue, 16 Jan 2024 13:47:40 -0800 Subject: [PATCH 011/166] Use H5ioStorage directly Instead of the project --- pyiron_workflow/node.py | 9 +++++---- 1 file changed, 5 insertions(+), 4 deletions(-) diff --git a/pyiron_workflow/node.py b/pyiron_workflow/node.py index 7707cbfb..9ad724d4 100644 --- a/pyiron_workflow/node.py +++ b/pyiron_workflow/node.py @@ -1097,8 +1097,9 @@ def load(self): @property def storage(self): - from pyiron_contrib.tinybase.project.h5io import SingleHdfProject + from pyiron_contrib.tinybase.storage import H5ioStorage + from h5io_browser import Pointer - return SingleHdfProject.open_location( - str(self.working_directory.path.resolve()) - ).create_storage(self.label) + # UGLY -- make sure it exists, as accessing .path directly doesn't! + storage_file = str((self.working_directory.path / "project.h5").resolve()) # self.label + return H5ioStorage(Pointer(storage_file), None) From f541fa957b1e7e103587fe3a5feb1f8ebc8f2794 Mon Sep 17 00:00:00 2001 From: liamhuber Date: Wed, 17 Jan 2024 16:24:05 -0800 Subject: [PATCH 012/166] Don't rely on `return` behaviour on `DirectoryObject.delete` --- pyiron_workflow/node.py | 8 +++++--- 1 file changed, 5 insertions(+), 3 deletions(-) diff --git a/pyiron_workflow/node.py b/pyiron_workflow/node.py index 9ad724d4..dc1db5d8 100644 --- a/pyiron_workflow/node.py +++ b/pyiron_workflow/node.py @@ -273,9 +273,11 @@ def __post__( if save_exists and overwrite_save: self.working_directory.remove_files(hardcoded_tinybase_filename) - self._working_directory = self.working_directory.delete(only_if_empty=True) - # Touching the working directory may have created it -- if it's there and empty - # just clean it up + if self.working_directory.is_empty(): + self.working_directory.delete() + self._working_directory = None + # Touching the working directory may have created it -- if it's there and + # empty just clean it up do_load = save_exists and not overwrite_save From b6b47ce95ddc47d412907533e53cb8c829a4f68f Mon Sep 17 00:00:00 2001 From: liamhuber Date: Wed, 17 Jan 2024 16:25:06 -0800 Subject: [PATCH 013/166] Revert the return behaviour on `DirectoryObject.delete` It was just weird. Just explicitly check for empty. Honestly, we can probably revert the `only_if_empty` kwarg too, but that can be its own PR. --- pyiron_workflow/snippets/files.py | 2 -- 1 file changed, 2 deletions(-) diff --git a/pyiron_workflow/snippets/files.py b/pyiron_workflow/snippets/files.py index cdf1fb60..8914913b 100644 --- a/pyiron_workflow/snippets/files.py +++ b/pyiron_workflow/snippets/files.py @@ -46,8 +46,6 @@ def create(self): def delete(self, only_if_empty: bool = False): if self.is_empty() or not only_if_empty: delete_files_and_directories_recursively(self.path) - else: - return self def list_content(self): return categorize_folder_items(self.path) From b55702c4f2f89a90c34dcbb49c37f21d967ec7e3 Mon Sep 17 00:00:00 2001 From: liamhuber Date: Wed, 17 Jan 2024 16:28:06 -0800 Subject: [PATCH 014/166] Remove comment --- pyiron_workflow/node.py | 1 - 1 file changed, 1 deletion(-) diff --git a/pyiron_workflow/node.py b/pyiron_workflow/node.py index dc1db5d8..fbb5b8ad 100644 --- a/pyiron_workflow/node.py +++ b/pyiron_workflow/node.py @@ -1102,6 +1102,5 @@ def storage(self): from pyiron_contrib.tinybase.storage import H5ioStorage from h5io_browser import Pointer - # UGLY -- make sure it exists, as accessing .path directly doesn't! storage_file = str((self.working_directory.path / "project.h5").resolve()) # self.label return H5ioStorage(Pointer(storage_file), None) From ff0463d37302642792fb3eaac4fd2772f43c7549 Mon Sep 17 00:00:00 2001 From: liamhuber Date: Wed, 17 Jan 2024 16:36:08 -0800 Subject: [PATCH 015/166] No magic strings --- pyiron_workflow/node.py | 13 +++++++++---- 1 file changed, 9 insertions(+), 4 deletions(-) diff --git a/pyiron_workflow/node.py b/pyiron_workflow/node.py index fbb5b8ad..12ec4516 100644 --- a/pyiron_workflow/node.py +++ b/pyiron_workflow/node.py @@ -225,6 +225,10 @@ class Node(HasToDict, ABC, metaclass=AbstractHasPost): package_identifier = None + _STORAGE_FILE_NAME = "project.h5" + # This isn't nice, just a technical necessity in the current implementation + # Eventually, of course, this needs to be _at least_ file-format independent + def __init__( self, label: str, @@ -267,11 +271,10 @@ def __post__( run_after_init: bool = False, **kwargs, ): - hardcoded_tinybase_filename = "project.h5" - save_exists = self.working_directory.file_exists(hardcoded_tinybase_filename) + save_exists = self.working_directory.file_exists(self._STORAGE_FILE_NAME) if save_exists and overwrite_save: - self.working_directory.remove_files(hardcoded_tinybase_filename) + self.working_directory.remove_files(self._STORAGE_FILE_NAME) if self.working_directory.is_empty(): self.working_directory.delete() @@ -1102,5 +1105,7 @@ def storage(self): from pyiron_contrib.tinybase.storage import H5ioStorage from h5io_browser import Pointer - storage_file = str((self.working_directory.path / "project.h5").resolve()) # self.label + storage_file = str( + (self.working_directory.path / self._STORAGE_FILE_NAME).resolve() + ) return H5ioStorage(Pointer(storage_file), None) From 6309516a557450dc5ba7e379e5659c68b3edf94d Mon Sep 17 00:00:00 2001 From: liamhuber Date: Thu, 18 Jan 2024 09:49:45 -0800 Subject: [PATCH 016/166] Revert name changes on the DirectoryObject removing files method --- pyiron_workflow/node.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pyiron_workflow/node.py b/pyiron_workflow/node.py index 12ec4516..ba11724c 100644 --- a/pyiron_workflow/node.py +++ b/pyiron_workflow/node.py @@ -274,7 +274,7 @@ def __post__( save_exists = self.working_directory.file_exists(self._STORAGE_FILE_NAME) if save_exists and overwrite_save: - self.working_directory.remove_files(self._STORAGE_FILE_NAME) + self.working_directory.remove_file(self._STORAGE_FILE_NAME) if self.working_directory.is_empty(): self.working_directory.delete() From 8edc5a0fb61fdf93254fa1d636e52713af548a48 Mon Sep 17 00:00:00 2001 From: liamhuber Date: Thu, 18 Jan 2024 09:54:36 -0800 Subject: [PATCH 017/166] Use FileObject directly --- pyiron_workflow/node.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/pyiron_workflow/node.py b/pyiron_workflow/node.py index ba11724c..4191fcd4 100644 --- a/pyiron_workflow/node.py +++ b/pyiron_workflow/node.py @@ -19,7 +19,7 @@ NotData, ) from pyiron_workflow.draw import Node as GraphvizNode -from pyiron_workflow.snippets.files import DirectoryObject +from pyiron_workflow.snippets.files import FileObject, DirectoryObject from pyiron_workflow.has_to_dict import HasToDict from pyiron_workflow.io import Signals from pyiron_workflow.topology import ( @@ -274,7 +274,7 @@ def __post__( save_exists = self.working_directory.file_exists(self._STORAGE_FILE_NAME) if save_exists and overwrite_save: - self.working_directory.remove_file(self._STORAGE_FILE_NAME) + FileObject(self._STORAGE_FILE_NAME, self.working_directory).delete() if self.working_directory.is_empty(): self.working_directory.delete() From 404d876bbde1fb74b4fdb6fb2be25709e303d604 Mon Sep 17 00:00:00 2001 From: liamhuber Date: Thu, 18 Jan 2024 13:29:09 -0800 Subject: [PATCH 018/166] Store child nodes under their own label, not in a substorage This will let us use the semantic path to access storage later --- pyiron_workflow/composite.py | 6 +++--- pyiron_workflow/workflow.py | 5 ++--- 2 files changed, 5 insertions(+), 6 deletions(-) diff --git a/pyiron_workflow/composite.py b/pyiron_workflow/composite.py index a2f3173e..27c4177e 100644 --- a/pyiron_workflow/composite.py +++ b/pyiron_workflow/composite.py @@ -571,9 +571,9 @@ def package_requirements(self) -> set[str]: return set(n.package_identifier for n in self) def to_storage(self, storage): - nodes_storage = storage.create_group("child_nodes") + storage["child_node_labels"] = list(self.nodes.keys()) for label, node in self.nodes.items(): - node.to_storage(nodes_storage.create_group(label)) + node.to_storage(storage.create_group(label)) storage["inputs_map"] = self.inputs_map storage["outputs_map"] = self.outputs_map @@ -582,7 +582,7 @@ def to_storage(self, storage): def from_storage(self, storage): for node in self: - node.from_storage(storage["child_nodes"][node.label]) + node.from_storage(storage[node.label]) self.inputs_map = storage["inputs_map"] self.outputs_map = storage["outputs_map"] diff --git a/pyiron_workflow/workflow.py b/pyiron_workflow/workflow.py index ec780c4c..22838658 100644 --- a/pyiron_workflow/workflow.py +++ b/pyiron_workflow/workflow.py @@ -322,9 +322,8 @@ def from_storage(self, storage): for package_identifier in storage["package_requirements"]: self.register(package_identifier) - nodes_storage = storage["child_nodes"] - for child_label in nodes_storage.list_groups(): - child_data = nodes_storage[child_label] + for child_label in storage["child_node_labels"]: + child_data = storage[child_label] pid = child_data["package_identifier"] cls = child_data["class_name"] self.create[pid][cls](label=child_label, parent=self) From 359c3a5e55fa82c2b37b57a8436cc00de05f04fa Mon Sep 17 00:00:00 2001 From: liamhuber Date: Thu, 18 Jan 2024 14:01:43 -0800 Subject: [PATCH 019/166] Stick the signals information into properties This way there is an attribute for the data, which prevents the field from being shared by a child node --- pyiron_workflow/workflow.py | 62 ++++++++++++++++++++++++++++--------- 1 file changed, 47 insertions(+), 15 deletions(-) diff --git a/pyiron_workflow/workflow.py b/pyiron_workflow/workflow.py index 22838658..28d374ad 100644 --- a/pyiron_workflow/workflow.py +++ b/pyiron_workflow/workflow.py @@ -293,11 +293,20 @@ def _parent(self, new_parent: None): f"{type(new_parent)}" ) - def to_storage(self, storage): - storage["package_requirements"] = list(self.package_requirements) - storage["automate_execution"] = self.automate_execution - super().to_storage(storage) + @property + def _data_connections(self) -> list[tuple[tuple[str, str], tuple[str, str]]]: + """ + A string-tuple representation of all connections between the data channels of + child nodes. + Intended for internal use during storage, so that connections can be + represented in plain strings, and stored on an attribute to guarantee that the + name does not conflict with a child node label. + + Returns: + (list): Nested-pair tuples of (node label, channel label) data for + (input, output) channels of data connections between children. + """ data_connections = [] for node in self: for inp_label, inp in node.inputs.items(): @@ -305,17 +314,40 @@ def to_storage(self, storage): data_connections.append( ((node.label, inp_label), (conn.node.label, conn.label)) ) - storage["data_connections"] = data_connections + return data_connections + + @property + def _signal_connections(self) -> list[tuple[tuple[str, str], tuple[str, str]]]: + """ + A string-tuple representation of all connections between the signal channels of + child nodes. + + Intended for internal use during storage, so that connections can be + represented in plain strings, and stored on an attribute to guarantee that the + name does not conflict + + Returns: + (list): Nested-pair tuples of (node label, channel label) data for + (input, output) channels of signal connections between children. + """ + signal_connections = [] + for node in self: + for inp_label, inp in node.signals.input.items(): + for conn in inp.connections: + signal_connections.append( + ((node.label, inp_label), (conn.node.label, conn.label)) + ) + return signal_connections + + def to_storage(self, storage): + storage["package_requirements"] = list(self.package_requirements) + storage["automate_execution"] = self.automate_execution + super().to_storage(storage) + + storage["_data_connections"] = self._data_connections if not self.automate_execution: - signal_connections = [] - for node in self: - for inp_label, inp in node.signals.input.items(): - for conn in inp.connections: - signal_connections.append( - ((node.label, inp_label), (conn.node.label, conn.label)) - ) - storage["signal_connections"] = signal_connections + storage["_signal_connections"] = self._signal_connections storage["starting_nodes_labels"] = [n.label for n in self.starting_nodes] def from_storage(self, storage): @@ -332,14 +364,14 @@ def from_storage(self, storage): super().from_storage(storage) - for data_connection in storage["data_connections"]: + for data_connection in storage["_data_connections"]: (inp_label, inp_channel), (out_label, out_channel) = data_connection self.nodes[inp_label].inputs[inp_channel].connect( self.nodes[out_label].outputs[out_channel] ) if not self.automate_execution: - for signal_connection in storage["signal_connections"]: + for signal_connection in storage["_signal_connections"]: (inp_label, inp_channel), (out_label, out_channel) = signal_connection self.nodes[inp_label].signals.input[inp_channel].connect( self.nodes[out_label].signals.output[out_channel] From 53b3d7ecb9fc83db5897985ca02db90970ddba23 Mon Sep 17 00:00:00 2001 From: liamhuber Date: Thu, 18 Jan 2024 14:11:46 -0800 Subject: [PATCH 020/166] Pull the same trick with the node class name --- pyiron_workflow/node.py | 9 ++++++++- 1 file changed, 8 insertions(+), 1 deletion(-) diff --git a/pyiron_workflow/node.py b/pyiron_workflow/node.py index 4191fcd4..ce298dc8 100644 --- a/pyiron_workflow/node.py +++ b/pyiron_workflow/node.py @@ -1018,9 +1018,16 @@ def executor_shutdown(self, wait=True, *, cancel_futures=False): except AttributeError: pass + @property + def class_name(self) -> str: + """The class name of the node""" + # Since we want this directly in storage, put it in an attribute so it is + # guaranteed not to conflict with a child node label + return self.__class__.__name__ + def to_storage(self, storage): storage["package_identifier"] = self.package_identifier - storage["class_name"] = self.__class__.__name__ + storage["class_name"] = self.class_name storage["label"] = self.label storage["running"] = self.running storage["failed"] = self.failed From e2883835d383b1bbfb40993c79a13a37682e5c39 Mon Sep 17 00:00:00 2001 From: liamhuber Date: Thu, 18 Jan 2024 14:12:07 -0800 Subject: [PATCH 021/166] Modify remaining storage fields to use existing class fields --- pyiron_workflow/composite.py | 2 +- pyiron_workflow/node.py | 8 ++++---- pyiron_workflow/workflow.py | 6 +++--- 3 files changed, 8 insertions(+), 8 deletions(-) diff --git a/pyiron_workflow/composite.py b/pyiron_workflow/composite.py index 27c4177e..6f76b1bb 100644 --- a/pyiron_workflow/composite.py +++ b/pyiron_workflow/composite.py @@ -571,7 +571,7 @@ def package_requirements(self) -> set[str]: return set(n.package_identifier for n in self) def to_storage(self, storage): - storage["child_node_labels"] = list(self.nodes.keys()) + storage["nodes"] = list(self.nodes.keys()) for label, node in self.nodes.items(): node.to_storage(storage.create_group(label)) diff --git a/pyiron_workflow/node.py b/pyiron_workflow/node.py index ce298dc8..da4a6a8f 100644 --- a/pyiron_workflow/node.py +++ b/pyiron_workflow/node.py @@ -1032,11 +1032,11 @@ def to_storage(self, storage): storage["running"] = self.running storage["failed"] = self.failed - data_inputs = storage.create_group("data_inputs") + data_inputs = storage.create_group("inputs") for label, channel in self.inputs.items(): channel.to_storage(data_inputs.create_group(label)) - data_outputs = storage.create_group("data_outputs") + data_outputs = storage.create_group("outputs") for label, channel in self.outputs.items(): channel.to_storage(data_outputs.create_group(label)) @@ -1044,11 +1044,11 @@ def from_storage(self, storage): self.running = storage["running"] self.failed = storage["failed"] - data_inputs = storage["data_inputs"] + data_inputs = storage["inputs"] for label in data_inputs.list_groups(): self.inputs[label].from_storage(data_inputs[label]) - data_outputs = storage["data_outputs"] + data_outputs = storage["outputs"] for label in data_outputs.list_groups(): self.outputs[label].from_storage(data_outputs[label]) diff --git a/pyiron_workflow/workflow.py b/pyiron_workflow/workflow.py index 28d374ad..15aeba96 100644 --- a/pyiron_workflow/workflow.py +++ b/pyiron_workflow/workflow.py @@ -348,13 +348,13 @@ def to_storage(self, storage): if not self.automate_execution: storage["_signal_connections"] = self._signal_connections - storage["starting_nodes_labels"] = [n.label for n in self.starting_nodes] + storage["starting_nodes"] = [n.label for n in self.starting_nodes] def from_storage(self, storage): for package_identifier in storage["package_requirements"]: self.register(package_identifier) - for child_label in storage["child_node_labels"]: + for child_label in storage["nodes"]: child_data = storage[child_label] pid = child_data["package_identifier"] cls = child_data["class_name"] @@ -377,5 +377,5 @@ def from_storage(self, storage): self.nodes[out_label].signals.output[out_channel] ) self.starting_nodes = [ - self.nodes[label] for label in storage["starting_nodes_labels"] + self.nodes[label] for label in storage["starting_nodes"] ] From 89515288ad19e3235adbc70117ae0ac5afb3c103 Mon Sep 17 00:00:00 2001 From: liamhuber Date: Thu, 18 Jan 2024 14:18:56 -0800 Subject: [PATCH 022/166] Refactor: slide --- pyiron_workflow/workflow.py | 26 +++++++++++++------------- 1 file changed, 13 insertions(+), 13 deletions(-) diff --git a/pyiron_workflow/workflow.py b/pyiron_workflow/workflow.py index 15aeba96..622e72c6 100644 --- a/pyiron_workflow/workflow.py +++ b/pyiron_workflow/workflow.py @@ -268,19 +268,6 @@ def deserialize(self, source): def _parent(self) -> None: return None - def save(self): - if any(node.package_identifier is None for node in self): - raise NotImplementedError( - f"{self.__class__.__name__} can currently only save itself to file if " - f"_all_ of its child nodes were created via the creator and have an " - f"associated `package_identifier` -- otherwise we won't know how to " - f"re-instantiate them at load time! Right now this is as easy as " - f"moving your custom nodes to their own .py file and registering it " - f"like any other node package. Remember that this new module needs to " - f"be in your python path and importable at load time too." - ) - self.to_storage(self.storage) - @_parent.setter def _parent(self, new_parent: None): # Currently workflows are not allowed to have a parent -- maybe we want to @@ -379,3 +366,16 @@ def from_storage(self, storage): self.starting_nodes = [ self.nodes[label] for label in storage["starting_nodes"] ] + + def save(self): + if any(node.package_identifier is None for node in self): + raise NotImplementedError( + f"{self.__class__.__name__} can currently only save itself to file if " + f"_all_ of its child nodes were created via the creator and have an " + f"associated `package_identifier` -- otherwise we won't know how to " + f"re-instantiate them at load time! Right now this is as easy as " + f"moving your custom nodes to their own .py file and registering it " + f"like any other node package. Remember that this new module needs to " + f"be in your python path and importable at load time too." + ) + self.to_storage(self.storage) \ No newline at end of file From 7777c34a2a78efda2e1fc727de94240220e3a799 Mon Sep 17 00:00:00 2001 From: liamhuber Date: Thu, 18 Jan 2024 14:20:10 -0800 Subject: [PATCH 023/166] Remove serialization placeholders --- pyiron_workflow/workflow.py | 7 ------- 1 file changed, 7 deletions(-) diff --git a/pyiron_workflow/workflow.py b/pyiron_workflow/workflow.py index 622e72c6..27fbef47 100644 --- a/pyiron_workflow/workflow.py +++ b/pyiron_workflow/workflow.py @@ -257,13 +257,6 @@ def to_node(self): """ raise NotImplementedError - # (De)serialization is necessary throughout these classes, but not implemented here - def serialize(self): - raise NotImplementedError - - def deserialize(self, source): - raise NotImplementedError - @property def _parent(self) -> None: return None From 49924fbce27a322deaf7b0fbe59b5d230b40f324 Mon Sep 17 00:00:00 2001 From: pyiron-runner Date: Thu, 18 Jan 2024 23:13:16 +0000 Subject: [PATCH 024/166] Format black --- pyiron_workflow/workflow.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pyiron_workflow/workflow.py b/pyiron_workflow/workflow.py index 27fbef47..31a56f03 100644 --- a/pyiron_workflow/workflow.py +++ b/pyiron_workflow/workflow.py @@ -371,4 +371,4 @@ def save(self): f"like any other node package. Remember that this new module needs to " f"be in your python path and importable at load time too." ) - self.to_storage(self.storage) \ No newline at end of file + self.to_storage(self.storage) From a5bea04eb2c29842ed02ae51e674b073160077b3 Mon Sep 17 00:00:00 2001 From: liamhuber Date: Thu, 18 Jan 2024 15:14:02 -0800 Subject: [PATCH 025/166] Point storage to that node's particular storage spot --- pyiron_workflow/node.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/pyiron_workflow/node.py b/pyiron_workflow/node.py index 3b162545..bd3e4ea2 100644 --- a/pyiron_workflow/node.py +++ b/pyiron_workflow/node.py @@ -1144,6 +1144,6 @@ def storage(self): from h5io_browser import Pointer storage_file = str( - (self.working_directory.path / self._STORAGE_FILE_NAME).resolve() + (self.graph_root.working_directory.path / self._STORAGE_FILE_NAME).resolve() ) - return H5ioStorage(Pointer(storage_file), None) + return H5ioStorage(Pointer(storage_file, h5_path=self.graph_path), None) From 64f6d505dab6fcae3894fc61dab02723ff65fde5 Mon Sep 17 00:00:00 2001 From: liamhuber Date: Fri, 19 Jan 2024 08:56:25 -0800 Subject: [PATCH 026/166] Break out the storage file path as a property --- pyiron_workflow/node.py | 12 +++++++++--- 1 file changed, 9 insertions(+), 3 deletions(-) diff --git a/pyiron_workflow/node.py b/pyiron_workflow/node.py index bd3e4ea2..27ce9202 100644 --- a/pyiron_workflow/node.py +++ b/pyiron_workflow/node.py @@ -1138,12 +1138,18 @@ def load(self): save.__doc__ += _save_load_warnings + @property + def _storage_file_path(self) -> str: + return str( + (self.graph_root.working_directory.path / self._STORAGE_FILE_NAME).resolve() + ) + @property def storage(self): from pyiron_contrib.tinybase.storage import H5ioStorage from h5io_browser import Pointer - storage_file = str( - (self.graph_root.working_directory.path / self._STORAGE_FILE_NAME).resolve() + return H5ioStorage( + Pointer(self._storage_file_path, h5_path=self.graph_path), + None ) - return H5ioStorage(Pointer(storage_file, h5_path=self.graph_path), None) From afc2ff1e62b49b0923d7af1aad3ed19fc75d964c Mon Sep 17 00:00:00 2001 From: liamhuber Date: Fri, 19 Jan 2024 09:13:04 -0800 Subject: [PATCH 027/166] Use the save file property --- pyiron_workflow/node.py | 32 ++++++++++++++++++++++---------- 1 file changed, 22 insertions(+), 10 deletions(-) diff --git a/pyiron_workflow/node.py b/pyiron_workflow/node.py index 27ce9202..1e44d34b 100644 --- a/pyiron_workflow/node.py +++ b/pyiron_workflow/node.py @@ -10,6 +10,7 @@ import warnings from abc import ABC, abstractmethod from concurrent.futures import Executor as StdLibExecutor, Future +import os from typing import Any, Literal, Optional, TYPE_CHECKING from pyiron_workflow.channels import ( @@ -276,16 +277,22 @@ def __post__( run_after_init: bool = False, **kwargs, ): - save_exists = self.working_directory.file_exists(self._STORAGE_FILE_NAME) - - if save_exists and overwrite_save: - FileObject(self._STORAGE_FILE_NAME, self.working_directory).delete() - - if self.working_directory.is_empty(): - self.working_directory.delete() - self._working_directory = None - # Touching the working directory may have created it -- if it's there and - # empty just clean it up + save_file_exists = os.path.isfile(self._storage_file_path) + if save_file_exists: + save_exists = self.storage_has_contents + if save_exists and overwrite_save: + up = self.storage.close() + del up[self.label] + if self.parent is None: + FileObject(self._STORAGE_FILE_NAME, self.working_directory).delete() + + if self.working_directory.is_empty(): + self.working_directory.delete() + self._working_directory = None + # Touching the working directory may have created it -- if it's there and + # empty just clean it up + else: + save_exists = False do_load = save_exists and not overwrite_save @@ -1153,3 +1160,8 @@ def storage(self): Pointer(self._storage_file_path, h5_path=self.graph_path), None ) + + @property + def storage_has_contents(self) -> bool: + n_items = len(self.storage.list_groups()) + len(self.storage.list_nodes()) + return n_items > 0 From dd3bb737016b110343af7cf17cb19c7ab6542b61 Mon Sep 17 00:00:00 2001 From: liamhuber Date: Fri, 19 Jan 2024 09:14:30 -0800 Subject: [PATCH 028/166] Refactor: extract method for cleaning empty working directory --- pyiron_workflow/node.py | 16 +++++++++++----- 1 file changed, 11 insertions(+), 5 deletions(-) diff --git a/pyiron_workflow/node.py b/pyiron_workflow/node.py index 1e44d34b..fa2388d9 100644 --- a/pyiron_workflow/node.py +++ b/pyiron_workflow/node.py @@ -285,12 +285,8 @@ def __post__( del up[self.label] if self.parent is None: FileObject(self._STORAGE_FILE_NAME, self.working_directory).delete() + self.tidy_working_directory() - if self.working_directory.is_empty(): - self.working_directory.delete() - self._working_directory = None - # Touching the working directory may have created it -- if it's there and - # empty just clean it up else: save_exists = False @@ -1165,3 +1161,13 @@ def storage(self): def storage_has_contents(self) -> bool: n_items = len(self.storage.list_groups()) + len(self.storage.list_nodes()) return n_items > 0 + + def tidy_working_directory(self): + """ + If the working directory is completely empty, deletes it. + """ + if self.working_directory.is_empty(): + self.working_directory.delete() + self._working_directory = None + # Touching the working directory may have created it -- if it's there and + # empty just clean it up From b00fd9e82954185ea45c2d0acd2d57a6f40a49f5 Mon Sep 17 00:00:00 2001 From: liamhuber Date: Fri, 19 Jan 2024 09:18:36 -0800 Subject: [PATCH 029/166] Refactor: extract method for deleting storage --- pyiron_workflow/node.py | 21 +++++++++++++-------- 1 file changed, 13 insertions(+), 8 deletions(-) diff --git a/pyiron_workflow/node.py b/pyiron_workflow/node.py index fa2388d9..049091a0 100644 --- a/pyiron_workflow/node.py +++ b/pyiron_workflow/node.py @@ -279,14 +279,11 @@ def __post__( ): save_file_exists = os.path.isfile(self._storage_file_path) if save_file_exists: - save_exists = self.storage_has_contents - if save_exists and overwrite_save: - up = self.storage.close() - del up[self.label] - if self.parent is None: - FileObject(self._STORAGE_FILE_NAME, self.working_directory).delete() - self.tidy_working_directory() - + if overwrite_save: + self.delete_storage() + save_exists = False + else: + save_exists = self.storage_has_contents else: save_exists = False @@ -1171,3 +1168,11 @@ def tidy_working_directory(self): self._working_directory = None # Touching the working directory may have created it -- if it's there and # empty just clean it up + + def delete_storage(self): + if self.storage_has_contents: + up = self.storage.close() + del up[self.label] + if self.parent is None: + FileObject(self._STORAGE_FILE_NAME, self.working_directory).delete() + self.tidy_working_directory() From 18f147746144a067cd1c6a99df928badce57399b Mon Sep 17 00:00:00 2001 From: liamhuber Date: Fri, 19 Jan 2024 09:21:24 -0800 Subject: [PATCH 030/166] Simplify logic clauses (a little) --- pyiron_workflow/node.py | 11 ++++------- 1 file changed, 4 insertions(+), 7 deletions(-) diff --git a/pyiron_workflow/node.py b/pyiron_workflow/node.py index 049091a0..83e1df9f 100644 --- a/pyiron_workflow/node.py +++ b/pyiron_workflow/node.py @@ -277,17 +277,14 @@ def __post__( run_after_init: bool = False, **kwargs, ): - save_file_exists = os.path.isfile(self._storage_file_path) - if save_file_exists: + if os.path.isfile(self._storage_file_path): if overwrite_save: self.delete_storage() - save_exists = False + do_load = False else: - save_exists = self.storage_has_contents + do_load = self.storage_has_contents else: - save_exists = False - - do_load = save_exists and not overwrite_save + do_load = False if do_load and run_after_init: raise ValueError( From 61470aadd172cdc19e4616c30196084752084f8a Mon Sep 17 00:00:00 2001 From: liamhuber Date: Fri, 19 Jan 2024 09:24:54 -0800 Subject: [PATCH 031/166] Simplify logic clauses (actually) --- pyiron_workflow/node.py | 19 ++++++++++--------- 1 file changed, 10 insertions(+), 9 deletions(-) diff --git a/pyiron_workflow/node.py b/pyiron_workflow/node.py index 83e1df9f..80ff2a8b 100644 --- a/pyiron_workflow/node.py +++ b/pyiron_workflow/node.py @@ -277,14 +277,11 @@ def __post__( run_after_init: bool = False, **kwargs, ): - if os.path.isfile(self._storage_file_path): - if overwrite_save: - self.delete_storage() - do_load = False - else: - do_load = self.storage_has_contents - else: + if overwrite_save: + self.delete_storage() do_load = False + else: + do_load = self.storage_has_contents if do_load and run_after_init: raise ValueError( @@ -1153,8 +1150,12 @@ def storage(self): @property def storage_has_contents(self) -> bool: - n_items = len(self.storage.list_groups()) + len(self.storage.list_nodes()) - return n_items > 0 + return ( + os.path.isfile(self._storage_file_path) + and ( + len(self.storage.list_groups()) + len(self.storage.list_nodes()) + ) > 0 + ) def tidy_working_directory(self): """ From 39e943d47538e730233967c50451ed7138342c04 Mon Sep 17 00:00:00 2001 From: liamhuber Date: Fri, 19 Jan 2024 09:25:13 -0800 Subject: [PATCH 032/166] Add comment --- pyiron_workflow/node.py | 1 + 1 file changed, 1 insertion(+) diff --git a/pyiron_workflow/node.py b/pyiron_workflow/node.py index 80ff2a8b..4b131117 100644 --- a/pyiron_workflow/node.py +++ b/pyiron_workflow/node.py @@ -301,6 +301,7 @@ def __post__( self.run() except ReadinessError: pass + # Else neither loading nor running now -- no action required! @property def label(self) -> str: From 681ea49f5ff46d950ecd2aac8ce1d1bb00963571 Mon Sep 17 00:00:00 2001 From: liamhuber Date: Fri, 19 Jan 2024 09:27:49 -0800 Subject: [PATCH 033/166] Rely on children loading themselves at instantiation --- pyiron_workflow/composite.py | 3 --- 1 file changed, 3 deletions(-) diff --git a/pyiron_workflow/composite.py b/pyiron_workflow/composite.py index 6f76b1bb..e9170550 100644 --- a/pyiron_workflow/composite.py +++ b/pyiron_workflow/composite.py @@ -581,9 +581,6 @@ def to_storage(self, storage): super().to_storage(storage) def from_storage(self, storage): - for node in self: - node.from_storage(storage[node.label]) - self.inputs_map = storage["inputs_map"] self.outputs_map = storage["outputs_map"] self._rebuild_data_io() # To apply any map that was saved From e9cac1e2a22420e2729d9bbc80b51f3205f45095 Mon Sep 17 00:00:00 2001 From: liamhuber Date: Fri, 19 Jan 2024 09:29:30 -0800 Subject: [PATCH 034/166] Refactor: extract method for readability --- pyiron_workflow/workflow.py | 22 +++++++++++++--------- 1 file changed, 13 insertions(+), 9 deletions(-) diff --git a/pyiron_workflow/workflow.py b/pyiron_workflow/workflow.py index 31a56f03..2de7ef3d 100644 --- a/pyiron_workflow/workflow.py +++ b/pyiron_workflow/workflow.py @@ -331,15 +331,7 @@ def to_storage(self, storage): storage["starting_nodes"] = [n.label for n in self.starting_nodes] def from_storage(self, storage): - for package_identifier in storage["package_requirements"]: - self.register(package_identifier) - - for child_label in storage["nodes"]: - child_data = storage[child_label] - pid = child_data["package_identifier"] - cls = child_data["class_name"] - self.create[pid][cls](label=child_label, parent=self) - + self._reinstantiate_children(storage) self.automate_execution = storage["automate_execution"] super().from_storage(storage) @@ -360,6 +352,18 @@ def from_storage(self, storage): self.nodes[label] for label in storage["starting_nodes"] ] + def _reinstantiate_children(self, storage): + # Parents attempt to reload their data on instantiation, + # so there is no need to explicitly load any of these children + for package_identifier in storage["package_requirements"]: + self.register(package_identifier) + + for child_label in storage["nodes"]: + child_data = storage[child_label] + pid = child_data["package_identifier"] + cls = child_data["class_name"] + self.create[pid][cls](label=child_label, parent=self) + def save(self): if any(node.package_identifier is None for node in self): raise NotImplementedError( From 02b416f067a7d3e1ad7d93da6274772f44cb7a92 Mon Sep 17 00:00:00 2001 From: liamhuber Date: Fri, 19 Jan 2024 09:30:06 -0800 Subject: [PATCH 035/166] Add comment --- pyiron_workflow/workflow.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pyiron_workflow/workflow.py b/pyiron_workflow/workflow.py index 2de7ef3d..66413435 100644 --- a/pyiron_workflow/workflow.py +++ b/pyiron_workflow/workflow.py @@ -333,7 +333,7 @@ def to_storage(self, storage): def from_storage(self, storage): self._reinstantiate_children(storage) self.automate_execution = storage["automate_execution"] - + # Super call will rebuild the IO, so first get our automate_execution flag super().from_storage(storage) for data_connection in storage["_data_connections"]: From 00ef4927c8d2883325feb8e172ebb6726798efbf Mon Sep 17 00:00:00 2001 From: liamhuber Date: Fri, 19 Jan 2024 09:32:41 -0800 Subject: [PATCH 036/166] Refactor: extract methods for readability --- pyiron_workflow/workflow.py | 39 ++++++++++++++++++++++--------------- 1 file changed, 23 insertions(+), 16 deletions(-) diff --git a/pyiron_workflow/workflow.py b/pyiron_workflow/workflow.py index 66413435..42552178 100644 --- a/pyiron_workflow/workflow.py +++ b/pyiron_workflow/workflow.py @@ -335,22 +335,7 @@ def from_storage(self, storage): self.automate_execution = storage["automate_execution"] # Super call will rebuild the IO, so first get our automate_execution flag super().from_storage(storage) - - for data_connection in storage["_data_connections"]: - (inp_label, inp_channel), (out_label, out_channel) = data_connection - self.nodes[inp_label].inputs[inp_channel].connect( - self.nodes[out_label].outputs[out_channel] - ) - - if not self.automate_execution: - for signal_connection in storage["_signal_connections"]: - (inp_label, inp_channel), (out_label, out_channel) = signal_connection - self.nodes[inp_label].signals.input[inp_channel].connect( - self.nodes[out_label].signals.output[out_channel] - ) - self.starting_nodes = [ - self.nodes[label] for label in storage["starting_nodes"] - ] + self._rebuild_connections(storage) def _reinstantiate_children(self, storage): # Parents attempt to reload their data on instantiation, @@ -364,6 +349,28 @@ def _reinstantiate_children(self, storage): cls = child_data["class_name"] self.create[pid][cls](label=child_label, parent=self) + def _rebuild_connections(self, storage): + self._rebuild_data_connections(storage) + if not self.automate_execution: + self._rebuild_execution_graph(storage) + + def _rebuild_data_connections(self, storage): + for data_connection in storage["_data_connections"]: + (inp_label, inp_channel), (out_label, out_channel) = data_connection + self.nodes[inp_label].inputs[inp_channel].connect( + self.nodes[out_label].outputs[out_channel] + ) + + def _rebuild_execution_graph(self, storage): + for signal_connection in storage["_signal_connections"]: + (inp_label, inp_channel), (out_label, out_channel) = signal_connection + self.nodes[inp_label].signals.input[inp_channel].connect( + self.nodes[out_label].signals.output[out_channel] + ) + self.starting_nodes = [ + self.nodes[label] for label in storage["starting_nodes"] + ] + def save(self): if any(node.package_identifier is None for node in self): raise NotImplementedError( From 5388e1910b638477526974a637aa34ff8d611781 Mon Sep 17 00:00:00 2001 From: liamhuber Date: Fri, 19 Jan 2024 09:42:38 -0800 Subject: [PATCH 037/166] Tidy the working directory after checking for storage contents --- pyiron_workflow/node.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/pyiron_workflow/node.py b/pyiron_workflow/node.py index 4b131117..87278260 100644 --- a/pyiron_workflow/node.py +++ b/pyiron_workflow/node.py @@ -1151,12 +1151,14 @@ def storage(self): @property def storage_has_contents(self) -> bool: - return ( + has_contents = ( os.path.isfile(self._storage_file_path) and ( len(self.storage.list_groups()) + len(self.storage.list_nodes()) ) > 0 ) + self.tidy_working_directory() + return has_contents def tidy_working_directory(self): """ From 032642adacc8c59905da9eed81b2773c95b94e50 Mon Sep 17 00:00:00 2001 From: liamhuber Date: Fri, 19 Jan 2024 09:53:01 -0800 Subject: [PATCH 038/166] Tidy the working directory recursively for composites --- pyiron_workflow/composite.py | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/pyiron_workflow/composite.py b/pyiron_workflow/composite.py index e9170550..0bb9db7a 100644 --- a/pyiron_workflow/composite.py +++ b/pyiron_workflow/composite.py @@ -586,3 +586,8 @@ def from_storage(self, storage): self._rebuild_data_io() # To apply any map that was saved super().from_storage(storage) + + def tidy_working_directory(self): + for node in self: + node.tidy_working_directory() + super().tidy_working_directory() From eda307ab9b99b2b1ac52954402fd9ef7e0345793 Mon Sep 17 00:00:00 2001 From: liamhuber Date: Fri, 19 Jan 2024 12:56:56 -0800 Subject: [PATCH 039/166] Clean up at the very end too --- pyiron_workflow/node.py | 1 + 1 file changed, 1 insertion(+) diff --git a/pyiron_workflow/node.py b/pyiron_workflow/node.py index 87278260..cdfeb836 100644 --- a/pyiron_workflow/node.py +++ b/pyiron_workflow/node.py @@ -302,6 +302,7 @@ def __post__( except ReadinessError: pass # Else neither loading nor running now -- no action required! + self.graph_root.tidy_working_directory() @property def label(self) -> str: From af0313a591c442d21d0ed56ad83234b2ed4293d0 Mon Sep 17 00:00:00 2001 From: liamhuber Date: Fri, 19 Jan 2024 13:22:52 -0800 Subject: [PATCH 040/166] Document saving as an alpha feature --- pyiron_workflow/node.py | 33 ++++++++++++++++++++++++++++++++- 1 file changed, 32 insertions(+), 1 deletion(-) diff --git a/pyiron_workflow/node.py b/pyiron_workflow/node.py index cdfeb836..9def20cf 100644 --- a/pyiron_workflow/node.py +++ b/pyiron_workflow/node.py @@ -150,6 +150,35 @@ class Node(HasToDict, ABC, metaclass=AbstractHasPost): context when you're done with them; we give a convenience method for this. - Nodes created from a registered package store their package identifier as a class attribute. + - [ALPHA FEATURE] Nodes can be saved to and loaded from file. + - On instantiation, nodes will look in the working directory of their + parent-most node for a save file; they will search within this along their + relative semantic path (i.e. the path of node labels) for stored data; if + found, they will use it to load their state. + - Found save files can be deleted and ignored with an initialization kwarg + - You can't load a saved node _and_ run that node after instantiation during + the same instantiation. + - To save a composite graph, _all_ children need to be created from a + registered module or saving will raise an error; + - [ALPHA ISSUE?] Right now that means moving any nodes defined in-notebook + off to a `.py` file. + - [ALPHA ISSUE] Modifications to macros (e.g. replacing a child node) are not + reflected in the saved data -- saving and loading such a graph is likely to + _silently_ misbehave, as the loaded macro will just reinstantiate its + original nodes and connections. + - [ALPHA ISSUE] If the source code (i.e. `.py` files) for a saved graph is + altered between saving and loading the graph, there are no guarnatees about + the loaded state; depending on the nature of the changes everything may + work fine with the new node definition, the graph may load but silently + behave unexpectedly (e.g. if node functionality has changed but the + interface is the same), or may crash on loading (e.g. if IO channel labels + have changed). + - [ALPHA ISSUE] There is no filtering available, saving a node stores all of + its IO and does the same thing recursively for its children; depending on + your graph this could be expensive in terms of storage space and/or time. + - Since nodes store their IO data, all data is expected to be serializable; as + a fallback, the save process will attempt to `pickle` the data. + - While loading is attempted at instantiation, saving only happens on request. This is an abstract class. Children *must* define how :attr:`inputs` and :attr:`outputs` are constructed, what will @@ -159,7 +188,9 @@ class Node(HasToDict, ABC, metaclass=AbstractHasPost): TODO: - - Everything with (de)serialization for storage + - Allow saving/loading at locations _other_ than the interpreter's working + directory combined with the node's working directory, i.e. decouple the + working directory from the interpreter's `cwd`. - Integration with more powerful tools for remote execution (anything obeying the standard interface of a :meth:`submit` method taking the callable and arguments and returning a futures object should work, as long as it can From 38b4b45ad7ae0ea9ce959fe7fcc26a1e6e3034c9 Mon Sep 17 00:00:00 2001 From: liamhuber Date: Fri, 19 Jan 2024 13:41:13 -0800 Subject: [PATCH 041/166] Test saving and loading a node --- tests/unit/test_node.py | 47 ++++++++++++++++++++++++++++++++++++++++- 1 file changed, 46 insertions(+), 1 deletion(-) diff --git a/tests/unit/test_node.py b/tests/unit/test_node.py index 6b03ae86..198c7b93 100644 --- a/tests/unit/test_node.py +++ b/tests/unit/test_node.py @@ -17,7 +17,7 @@ def add_one(x): class ANode(Node): """To de-abstract the class""" - def __init__(self, label, run_after_init=False, x=None): + def __init__(self, label, run_after_init=False, overwrite_save=False, x=None): super().__init__(label=label) self._inputs = Inputs(InputData("x", self, type_hint=int)) self._outputs = Outputs(OutputData("y", self, type_hint=int)) @@ -361,6 +361,51 @@ def test_graph_info(self): "above." ) + def test_storage(self): + self.assertIs( + self.n1.outputs.y.value, + NotData, + msg="Sanity check on initial state" + ) + y = self.n1() + self.n1.save() + + x = self.n1.inputs.x.value + reloaded = ANode(self.n1.label, x=x) + self.assertEqual( + y, + reloaded.outputs.y.value, + msg="Nodes should load by default if they find a save file" + ) + + clean_slate = ANode(self.n1.label, x=x, overwrite_save=True) + self.assertIs( + clean_slate.outputs.y.value, + NotData, + msg="Users should be able to ignore a save" + ) + + run_right_away = ANode(self.n1.label, x=x, run_after_init=True) + self.assertEqual( + y, + run_right_away.outputs.y.value, + msg="With nothing to load, running after init is fine" + ) + + run_right_away.save() + with self.assertRaises( + ValueError, + msg="Should be able to both immediately run _and_ load a node at once" + ): + ANode(self.n1.label, x=x, run_after_init=True) + + force_run = ANode(self.n1.label, x=x, run_after_init=True, overwrite_save=True) + self.assertEqual( + y, + force_run.outputs.y.value, + msg="Destroying the save should allow immediate re-running" + ) + if __name__ == '__main__': unittest.main() From 3e604c77f170ad923d62ec23dcd133ac7729dc6d Mon Sep 17 00:00:00 2001 From: liamhuber Date: Fri, 19 Jan 2024 14:52:15 -0800 Subject: [PATCH 042/166] :bug: hack around the fact we're not inheriting from storable And to_object stuff that is complex enough to be coming back as storage (mostly things that are getting cast into `PickleStorable` or whatever it's called --- pyiron_workflow/channels.py | 7 ++++++- pyiron_workflow/composite.py | 13 +++++++++++-- 2 files changed, 17 insertions(+), 3 deletions(-) diff --git a/pyiron_workflow/channels.py b/pyiron_workflow/channels.py index ab9af4ca..c9673927 100644 --- a/pyiron_workflow/channels.py +++ b/pyiron_workflow/channels.py @@ -467,7 +467,12 @@ def from_storage(self, storage): self.strict_hints = storage["strict_hints"] self.type_hint = storage["type_hint"] self.default = storage["default"] - self.value = storage["value"] + from pyiron_contrib.tinybase.storage import GenericStorage + self.value = ( + storage["value"].to_object() + if isinstance(storage["value"], GenericStorage) + else storage["value"] + ) class InputData(DataChannel): diff --git a/pyiron_workflow/composite.py b/pyiron_workflow/composite.py index 0bb9db7a..84e94525 100644 --- a/pyiron_workflow/composite.py +++ b/pyiron_workflow/composite.py @@ -581,8 +581,17 @@ def to_storage(self, storage): super().to_storage(storage) def from_storage(self, storage): - self.inputs_map = storage["inputs_map"] - self.outputs_map = storage["outputs_map"] + from pyiron_contrib.tinybase.storage import GenericStorage + self.inputs_map = ( + storage["inputs_map"].to_object() + if isinstance(storage["inputs_map"], GenericStorage) + else storage["inputs_map"] + ) + self.outputs_map = ( + storage["outputs_map"].to_object() + if isinstance(storage["outputs_map"], GenericStorage) + else storage["outputs_map"] + ) self._rebuild_data_io() # To apply any map that was saved super().from_storage(storage) From 133c6d01de23dd7b4b5624fae3868a4e3f16d36c Mon Sep 17 00:00:00 2001 From: liamhuber Date: Fri, 19 Jan 2024 15:48:16 -0800 Subject: [PATCH 043/166] Add a macro to the test demo nodes --- tests/static/demo_nodes.py | 10 +++++++++- 1 file changed, 9 insertions(+), 1 deletion(-) diff --git a/tests/static/demo_nodes.py b/tests/static/demo_nodes.py index 533e1005..4a4d7b88 100644 --- a/tests/static/demo_nodes.py +++ b/tests/static/demo_nodes.py @@ -13,4 +13,12 @@ def OptionallyAdd(x: int, y: Optional[int] = None) -> int: return x + y -nodes = [OptionallyAdd] +@Workflow.wrap_as.macro_node("add_three") +def AddThree(macro, x: int) -> int: + macro.one = macro.create.standard.Add(x, 1) + macro.two = macro.create.standard.Add(macro.one, 1) + macro.three = macro.create.standard.Add(macro.two, 1) + return macro.three + + +nodes = [OptionallyAdd, AddThree] From 0504f314b719f305ebb822186258b42e6da4dc47 Mon Sep 17 00:00:00 2001 From: liamhuber Date: Fri, 19 Jan 2024 15:58:58 -0800 Subject: [PATCH 044/166] Reload macro children explicitly :bug: the macro children aren't necessarily being given their parent information at instantiation time, so they can't rely on __post__ to reload their data. We do assume that if a macro has been saved, all its children have been saved, so just re-load manually --- pyiron_workflow/macro.py | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/pyiron_workflow/macro.py b/pyiron_workflow/macro.py index 70602ee5..3372fadc 100644 --- a/pyiron_workflow/macro.py +++ b/pyiron_workflow/macro.py @@ -502,6 +502,14 @@ def _reconnect_run(self, run_signal_pairs_to_restore): def to_workfow(self): raise NotImplementedError + def from_storage(self, storage): + super().from_storage(storage) + # Nodes instantiated in macros probably aren't aware of their parent at + # instantiation time, and thus may be clean (un-loaded) objects -- + # reload their data + for label, node in self.nodes.items(): + node.from_storage(storage[label]) + def macro_node(*output_labels, **node_class_kwargs): """ From a342d34551263e8772a34fc5ca0e8dccef774e0e Mon Sep 17 00:00:00 2001 From: liamhuber Date: Fri, 19 Jan 2024 15:59:26 -0800 Subject: [PATCH 045/166] Test reloading composed workflows --- tests/unit/test_workflow.py | 30 ++++++++++++++++++++++++++++++ 1 file changed, 30 insertions(+) diff --git a/tests/unit/test_workflow.py b/tests/unit/test_workflow.py index 7b0d64c1..faa9d37f 100644 --- a/tests/unit/test_workflow.py +++ b/tests/unit/test_workflow.py @@ -331,6 +331,36 @@ def add_three_macro(macro): wf.m.two.pull(run_parent_trees_too=False) wf.executor_shutdown() + def test_storage(self): + with self.subTest("Fail when nodes have no package"): + wf = Workflow("wf") + wf.n1 = wf.create.Function(plus_one) + with self.assertRaises( + NotImplementedError, msg="We can't handle nodes without a package yet" + ): + wf.save() + + wf = Workflow("wf") + wf.register("static.demo_nodes", domain="demo") + wf.inp = wf.create.demo.AddThree(x=0) + wf.out = wf.inp.outputs.add_three + 1 + wf_out = wf() + three_result = wf.inp.three.outputs.add.value + + wf.save() + + reloaded = Workflow("wf") + self.assertEqual( + wf_out.out__add, + reloaded.outputs.out__add.value, + msg="Workflow-level data should get reloaded" + ) + self.assertEqual( + three_result, + reloaded.inp.three.value, + msg="Child data arbitrarily deep should get reloaded" + ) + if __name__ == '__main__': unittest.main() From e40dafac5c9be3611278ca3c4dd96db0e61b3405 Mon Sep 17 00:00:00 2001 From: liamhuber Date: Fri, 19 Jan 2024 16:00:53 -0800 Subject: [PATCH 046/166] Clean up after test --- tests/unit/test_workflow.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/tests/unit/test_workflow.py b/tests/unit/test_workflow.py index faa9d37f..90d4a550 100644 --- a/tests/unit/test_workflow.py +++ b/tests/unit/test_workflow.py @@ -361,6 +361,9 @@ def test_storage(self): msg="Child data arbitrarily deep should get reloaded" ) + # Clean up after ourselves + reloaded.delete_storage() + if __name__ == '__main__': unittest.main() From f8dbeea2e7a35d98fe2390be60a42290e0570109 Mon Sep 17 00:00:00 2001 From: liamhuber Date: Fri, 19 Jan 2024 16:14:18 -0800 Subject: [PATCH 047/166] Update node package test to reflect new demo node --- tests/unit/test_node_package.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/unit/test_node_package.py b/tests/unit/test_node_package.py index 836450f4..0ef1b3f6 100644 --- a/tests/unit/test_node_package.py +++ b/tests/unit/test_node_package.py @@ -36,7 +36,7 @@ def test_nodes(self): def test_length(self): package = NodePackage("static.demo_nodes") - self.assertEqual(1, len(package)) + self.assertEqual(2, len(package)) if __name__ == '__main__': From 1a5b9536377919e17b4d52c6f8650a56d48a68dd Mon Sep 17 00:00:00 2001 From: liamhuber Date: Fri, 19 Jan 2024 16:15:05 -0800 Subject: [PATCH 048/166] Only allow saving from the root For efficiency, we'll want to allow saving just a particular node, but that is not working robustly yet so just explain the current limitations and keep moving --- pyiron_workflow/node.py | 8 +++++++- 1 file changed, 7 insertions(+), 1 deletion(-) diff --git a/pyiron_workflow/node.py b/pyiron_workflow/node.py index 9def20cf..24fb5698 100644 --- a/pyiron_workflow/node.py +++ b/pyiron_workflow/node.py @@ -176,6 +176,8 @@ class Node(HasToDict, ABC, metaclass=AbstractHasPost): - [ALPHA ISSUE] There is no filtering available, saving a node stores all of its IO and does the same thing recursively for its children; depending on your graph this could be expensive in terms of storage space and/or time. + - [ALPHA ISSUE] Similarly, there is no way to save only part of a graph; only + the entire graph may be saved at once. - Since nodes store their IO data, all data is expected to be serializable; as a fallback, the save process will attempt to `pickle` the data. - While loading is attempted at instantiation, saving only happens on request. @@ -1144,7 +1146,11 @@ def save(self): type can :meth:`load()` the data to return to the same state as the save point, i.e. the same data IO channel values, the same flags, etc. """ - self.to_storage(self.storage) + if self.parent is None: + self.to_storage(self.storage) + else: + root = self.graph_root + root.to_storage(root.storage) save.__doc__ += _save_load_warnings From dfd8217845f5878079927598407401031338c825 Mon Sep 17 00:00:00 2001 From: pyiron-runner Date: Sat, 20 Jan 2024 03:59:48 +0000 Subject: [PATCH 049/166] Format black --- pyiron_workflow/channels.py | 1 + pyiron_workflow/composite.py | 1 + pyiron_workflow/node.py | 7 ++----- pyiron_workflow/workflow.py | 4 +--- 4 files changed, 5 insertions(+), 8 deletions(-) diff --git a/pyiron_workflow/channels.py b/pyiron_workflow/channels.py index c9673927..6c9f69d6 100644 --- a/pyiron_workflow/channels.py +++ b/pyiron_workflow/channels.py @@ -468,6 +468,7 @@ def from_storage(self, storage): self.type_hint = storage["type_hint"] self.default = storage["default"] from pyiron_contrib.tinybase.storage import GenericStorage + self.value = ( storage["value"].to_object() if isinstance(storage["value"], GenericStorage) diff --git a/pyiron_workflow/composite.py b/pyiron_workflow/composite.py index 84e94525..c9189615 100644 --- a/pyiron_workflow/composite.py +++ b/pyiron_workflow/composite.py @@ -582,6 +582,7 @@ def to_storage(self, storage): def from_storage(self, storage): from pyiron_contrib.tinybase.storage import GenericStorage + self.inputs_map = ( storage["inputs_map"].to_object() if isinstance(storage["inputs_map"], GenericStorage) diff --git a/pyiron_workflow/node.py b/pyiron_workflow/node.py index 24fb5698..3ce740fc 100644 --- a/pyiron_workflow/node.py +++ b/pyiron_workflow/node.py @@ -1183,17 +1183,14 @@ def storage(self): from h5io_browser import Pointer return H5ioStorage( - Pointer(self._storage_file_path, h5_path=self.graph_path), - None + Pointer(self._storage_file_path, h5_path=self.graph_path), None ) @property def storage_has_contents(self) -> bool: has_contents = ( os.path.isfile(self._storage_file_path) - and ( - len(self.storage.list_groups()) + len(self.storage.list_nodes()) - ) > 0 + and (len(self.storage.list_groups()) + len(self.storage.list_nodes())) > 0 ) self.tidy_working_directory() return has_contents diff --git a/pyiron_workflow/workflow.py b/pyiron_workflow/workflow.py index 42552178..a53c2506 100644 --- a/pyiron_workflow/workflow.py +++ b/pyiron_workflow/workflow.py @@ -367,9 +367,7 @@ def _rebuild_execution_graph(self, storage): self.nodes[inp_label].signals.input[inp_channel].connect( self.nodes[out_label].signals.output[out_channel] ) - self.starting_nodes = [ - self.nodes[label] for label in storage["starting_nodes"] - ] + self.starting_nodes = [self.nodes[label] for label in storage["starting_nodes"]] def save(self): if any(node.package_identifier is None for node in self): From 59c20b3beafcfdd46d887af51da1ed4e0a6436f7 Mon Sep 17 00:00:00 2001 From: liamhuber Date: Fri, 19 Jan 2024 20:00:35 -0800 Subject: [PATCH 050/166] Add storage dependencies --- .ci_support/environment.yml | 2 ++ setup.py | 2 ++ 2 files changed, 4 insertions(+) diff --git a/.ci_support/environment.yml b/.ci_support/environment.yml index 5491c58f..f8db3b39 100644 --- a/.ci_support/environment.yml +++ b/.ci_support/environment.yml @@ -6,7 +6,9 @@ dependencies: - bidict =0.22.1 - cloudpickle =3.0.0 - graphviz =8.1.0 +- h5io_browser =0.0.6 - matplotlib =3.8.2 +- pyiron_contrib =0.1.13 - pympipool =0.7.9 - python-graphviz =0.20.1 - toposort =1.10 diff --git a/setup.py b/setup.py index b3db3fe9..15231308 100644 --- a/setup.py +++ b/setup.py @@ -31,7 +31,9 @@ 'bidict==0.22.1', 'cloudpickle==3.0.0', 'graphviz==0.20.1', + 'h5io_browser==0.0.6', 'matplotlib==3.8.2', + 'pyiron_contrib==0.1.13', 'pympipool==0.7.9', 'toposort==1.10', 'typeguard==4.1.5', From 90556712dd5344dfd418cfcf313fea8aedb99e0f Mon Sep 17 00:00:00 2001 From: pyiron-runner Date: Sat, 20 Jan 2024 04:00:56 +0000 Subject: [PATCH 051/166] [dependabot skip] Update env file --- .binder/environment.yml | 2 ++ docs/environment.yml | 2 ++ 2 files changed, 4 insertions(+) diff --git a/.binder/environment.yml b/.binder/environment.yml index 2124b284..bc9f0619 100644 --- a/.binder/environment.yml +++ b/.binder/environment.yml @@ -6,7 +6,9 @@ dependencies: - bidict =0.22.1 - cloudpickle =3.0.0 - graphviz =8.1.0 +- h5io_browser =0.0.6 - matplotlib =3.8.2 +- pyiron_contrib =0.1.13 - pympipool =0.7.9 - python-graphviz =0.20.1 - toposort =1.10 diff --git a/docs/environment.yml b/docs/environment.yml index 3facc4ff..b551b862 100644 --- a/docs/environment.yml +++ b/docs/environment.yml @@ -11,7 +11,9 @@ dependencies: - bidict =0.22.1 - cloudpickle =3.0.0 - graphviz =8.1.0 +- h5io_browser =0.0.6 - matplotlib =3.8.2 +- pyiron_contrib =0.1.13 - pympipool =0.7.9 - python-graphviz =0.20.1 - toposort =1.10 From 23b2145c9b6546d31774a103569f75b528f2f255 Mon Sep 17 00:00:00 2001 From: liamhuber Date: Fri, 19 Jan 2024 20:21:58 -0800 Subject: [PATCH 052/166] Add boto to componsate for contrib --- .ci_support/environment.yml | 1 + setup.py | 1 + 2 files changed, 2 insertions(+) diff --git a/.ci_support/environment.yml b/.ci_support/environment.yml index f8db3b39..c34f2156 100644 --- a/.ci_support/environment.yml +++ b/.ci_support/environment.yml @@ -4,6 +4,7 @@ dependencies: - coveralls - coverage - bidict =0.22.1 +- boto3 - cloudpickle =3.0.0 - graphviz =8.1.0 - h5io_browser =0.0.6 diff --git a/setup.py b/setup.py index 15231308..ef32e8dd 100644 --- a/setup.py +++ b/setup.py @@ -29,6 +29,7 @@ packages=find_packages(exclude=["*tests*", "*docs*", "*binder*", "*conda*", "*notebooks*", "*.ci_support*"]), install_requires=[ 'bidict==0.22.1', + 'boto3', # Just because pyiron_contrib is not making sure it's there 'cloudpickle==3.0.0', 'graphviz==0.20.1', 'h5io_browser==0.0.6', From 6d85b1f32b1bc0632547b01015fdfdc20742ce25 Mon Sep 17 00:00:00 2001 From: pyiron-runner Date: Sat, 20 Jan 2024 04:22:34 +0000 Subject: [PATCH 053/166] [dependabot skip] Update env file --- .binder/environment.yml | 1 + docs/environment.yml | 1 + 2 files changed, 2 insertions(+) diff --git a/.binder/environment.yml b/.binder/environment.yml index bc9f0619..b3f806c2 100644 --- a/.binder/environment.yml +++ b/.binder/environment.yml @@ -4,6 +4,7 @@ dependencies: - coveralls - coverage - bidict =0.22.1 +- boto3 - cloudpickle =3.0.0 - graphviz =8.1.0 - h5io_browser =0.0.6 diff --git a/docs/environment.yml b/docs/environment.yml index b551b862..5c1fef1e 100644 --- a/docs/environment.yml +++ b/docs/environment.yml @@ -9,6 +9,7 @@ dependencies: - coveralls - coverage - bidict =0.22.1 +- boto3 - cloudpickle =3.0.0 - graphviz =8.1.0 - h5io_browser =0.0.6 From b95d38568287bae8e156c8a716f888a899b12cbb Mon Sep 17 00:00:00 2001 From: liamhuber Date: Sat, 20 Jan 2024 15:45:42 -0800 Subject: [PATCH 054/166] Add a flag to save at the end of a run Since the current implementation saves the entire graph, this is equivalent to introducing an `is_checkpoint` flag, but these two functionalities can be separated later when `save` truly only saves the actual node and `.graph_root.save()` checkpoints things. --- pyiron_workflow/composite.py | 5 ++++- pyiron_workflow/function.py | 2 ++ pyiron_workflow/macro.py | 2 ++ pyiron_workflow/node.py | 11 +++++++++++ pyiron_workflow/workflow.py | 2 ++ tests/unit/test_node.py | 32 ++++++++++++++++++++++++++++++-- 6 files changed, 51 insertions(+), 3 deletions(-) diff --git a/pyiron_workflow/composite.py b/pyiron_workflow/composite.py index c9189615..39e5a45f 100644 --- a/pyiron_workflow/composite.py +++ b/pyiron_workflow/composite.py @@ -108,12 +108,15 @@ def __init__( parent: Optional[Composite] = None, overwrite_save: bool = False, run_after_init: bool = False, + save_after_run: bool = False, strict_naming: bool = True, inputs_map: Optional[dict | bidict] = None, outputs_map: Optional[dict | bidict] = None, **kwargs, ): - super().__init__(*args, label=label, parent=parent, **kwargs) + super().__init__( + *args, label=label, parent=parent, save_after_run=save_after_run, **kwargs + ) self.strict_naming: bool = strict_naming self._inputs_map = None self._outputs_map = None diff --git a/pyiron_workflow/function.py b/pyiron_workflow/function.py index d5906284..866583bc 100644 --- a/pyiron_workflow/function.py +++ b/pyiron_workflow/function.py @@ -333,6 +333,7 @@ def __init__( parent: Optional[Composite] = None, overwrite_save: bool = False, run_after_init: bool = False, + save_after_run: bool = False, output_labels: Optional[str | list[str] | tuple[str]] = None, **kwargs, ): @@ -358,6 +359,7 @@ def __init__( super().__init__( label=label if label is not None else self.node_function.__name__, parent=parent, + save_after_run=save_after_run, # **kwargs, ) diff --git a/pyiron_workflow/macro.py b/pyiron_workflow/macro.py index 3372fadc..56da65fd 100644 --- a/pyiron_workflow/macro.py +++ b/pyiron_workflow/macro.py @@ -267,6 +267,7 @@ def __init__( parent: Optional[Composite] = None, overwrite_save: bool = False, run_after_init: bool = False, + save_after_run: bool = False, strict_naming: bool = True, inputs_map: Optional[dict | bidict] = None, outputs_map: Optional[dict | bidict] = None, @@ -294,6 +295,7 @@ def __init__( super().__init__( label=label if label is not None else self.graph_creator.__name__, parent=parent, + save_after_run=save_after_run, strict_naming=strict_naming, inputs_map=inputs_map, outputs_map=outputs_map, diff --git a/pyiron_workflow/node.py b/pyiron_workflow/node.py index 3ce740fc..90fc5551 100644 --- a/pyiron_workflow/node.py +++ b/pyiron_workflow/node.py @@ -151,6 +151,8 @@ class Node(HasToDict, ABC, metaclass=AbstractHasPost): - Nodes created from a registered package store their package identifier as a class attribute. - [ALPHA FEATURE] Nodes can be saved to and loaded from file. + - Saving is triggered manually, or by setting a flag to save after the nodes + runs. - On instantiation, nodes will look in the working directory of their parent-most node for a save file; they will search within this along their relative semantic path (i.e. the path of node labels) for stored data; if @@ -224,6 +226,8 @@ class Node(HasToDict, ABC, metaclass=AbstractHasPost): node. Must be specified in child classes. running (bool): Whether the node has called :meth:`run` and has not yet received output from this call. (Default is False.) + save_after_run (bool): Whether to trigger a save after each run of the node + (currently causes the entire graph to save). (Default is False.) signals (pyiron_workflow.io.Signals): A container for input and output signals, which are channels for controlling execution flow. By default, has a :attr:`signals.inputs.run` channel which has a callback to the :meth:`run` method @@ -274,6 +278,7 @@ def __init__( parent: Optional[Composite] = None, overwrite_save: bool = False, run_after_init: bool = False, + save_after_run: bool = False, **kwargs, ): """ @@ -302,6 +307,7 @@ def __init__( # This is a simply stop-gap as we work out more sophisticated ways to reference # (or create) an executor process without ever trying to pickle a `_thread.lock` self.future: None | Future = None + self.save_after_run = save_after_run def __post__( self, @@ -637,6 +643,9 @@ def _finish_run(self, run_output: tuple | Future) -> Any | tuple: except Exception as e: self.failed = True raise e + finally: + if self.save_after_run: + self.save() def _finish_run_and_emit_ran(self, run_output: tuple | Future) -> Any | tuple: processed_output = self._finish_run(run_output) @@ -1091,6 +1100,7 @@ def to_storage(self, storage): storage["label"] = self.label storage["running"] = self.running storage["failed"] = self.failed + storage["save_after_run"] = self.save_after_run data_inputs = storage.create_group("inputs") for label, channel in self.inputs.items(): @@ -1103,6 +1113,7 @@ def to_storage(self, storage): def from_storage(self, storage): self.running = storage["running"] self.failed = storage["failed"] + self.save_after_run = storage["save_after_run"] data_inputs = storage["inputs"] for label in data_inputs.list_groups(): diff --git a/pyiron_workflow/workflow.py b/pyiron_workflow/workflow.py index a53c2506..86b0c013 100644 --- a/pyiron_workflow/workflow.py +++ b/pyiron_workflow/workflow.py @@ -190,6 +190,7 @@ def __init__( *nodes: Node, overwrite_save: bool = False, run_after_init: bool = False, + save_after_run: bool = False, strict_naming: bool = True, inputs_map: Optional[dict | bidict] = None, outputs_map: Optional[dict | bidict] = None, @@ -198,6 +199,7 @@ def __init__( super().__init__( label=label, parent=None, + save_after_run=save_after_run, strict_naming=strict_naming, inputs_map=inputs_map, outputs_map=outputs_map, diff --git a/tests/unit/test_node.py b/tests/unit/test_node.py index 198c7b93..12a91a4d 100644 --- a/tests/unit/test_node.py +++ b/tests/unit/test_node.py @@ -17,8 +17,15 @@ def add_one(x): class ANode(Node): """To de-abstract the class""" - def __init__(self, label, run_after_init=False, overwrite_save=False, x=None): - super().__init__(label=label) + def __init__( + self, + label, + overwrite_save=False, + run_after_init=False, + save_after_run=False, + x=None, + ): + super().__init__(label=label, save_after_run=save_after_run) self._inputs = Inputs(InputData("x", self, type_hint=int)) self._outputs = Outputs(OutputData("y", self, type_hint=int)) if x is not None: @@ -406,6 +413,27 @@ def test_storage(self): msg="Destroying the save should allow immediate re-running" ) + def test_save_after_run(self): + ANode("just_run", x=0, run_after_init=True) + saves = ANode("run_and_save", x=0, run_after_init=True, save_after_run=True) + y = saves.outputs.y.value + + not_reloaded = ANode("just_run") + self.assertIs( + NotData, + not_reloaded.outputs.y.value, + msg="Should not have saved, therefore should have been nothing to load" + ) + + find_saved = ANode("run_and_save") + self.assertEqual( + y, + find_saved.outputs.y.value, + msg="Should have saved automatically after run, and reloaded on " + "instantiation" + ) + find_saved.delete_storage() # Clean up + if __name__ == '__main__': unittest.main() From 2a6152768ef35d97b3b90ab0ee9f920b6f28731c Mon Sep 17 00:00:00 2001 From: liamhuber Date: Sat, 20 Jan 2024 15:47:32 -0800 Subject: [PATCH 055/166] Cast known bools as bools on load To compensate for the way json(?) is converting them to 0/1 --- pyiron_workflow/channels.py | 2 +- pyiron_workflow/node.py | 6 +++--- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/pyiron_workflow/channels.py b/pyiron_workflow/channels.py index 6c9f69d6..6648eb54 100644 --- a/pyiron_workflow/channels.py +++ b/pyiron_workflow/channels.py @@ -464,7 +464,7 @@ def to_storage(self, storage): storage["value"] = self.value def from_storage(self, storage): - self.strict_hints = storage["strict_hints"] + self.strict_hints = bool(storage["strict_hints"]) self.type_hint = storage["type_hint"] self.default = storage["default"] from pyiron_contrib.tinybase.storage import GenericStorage diff --git a/pyiron_workflow/node.py b/pyiron_workflow/node.py index 90fc5551..8d0b615b 100644 --- a/pyiron_workflow/node.py +++ b/pyiron_workflow/node.py @@ -1111,9 +1111,9 @@ def to_storage(self, storage): channel.to_storage(data_outputs.create_group(label)) def from_storage(self, storage): - self.running = storage["running"] - self.failed = storage["failed"] - self.save_after_run = storage["save_after_run"] + self.running = bool(storage["running"]) + self.failed = bool(storage["failed"]) + self.save_after_run = bool(storage["save_after_run"]) data_inputs = storage["inputs"] for label in data_inputs.list_groups(): From 1c9d541063a4538c4d8e5076b7145121269884fb Mon Sep 17 00:00:00 2001 From: liamhuber Date: Sat, 20 Jan 2024 15:53:55 -0800 Subject: [PATCH 056/166] Update README --- docs/README.md | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/docs/README.md b/docs/README.md index 9b06a860..4efb7a21 100644 --- a/docs/README.md +++ b/docs/README.md @@ -26,7 +26,9 @@ Individual node computations can be shipped off to parallel processes for scalab Once you're happy with a workflow, it can be easily turned it into a macro for use in other workflows. This allows the clean construction of increasingly complex computation graphs by composing simpler graphs. -Nodes (including macros) can be stored in plain text, and registered by future workflows for easy access. This encourages and supports an ecosystem of useful nodes, so you don't need to re-invent the wheel. (This is an alpha-feature, with full support of [FAIR](https://en.wikipedia.org/wiki/FAIR_data) principles for node packages planned.) +Nodes (including macros) can be stored in plain text, and registered by future workflows for easy access. This encourages and supports an ecosystem of useful nodes, so you don't need to re-invent the wheel. (This is a beta-feature, with full support of [FAIR](https://en.wikipedia.org/wiki/FAIR_data) principles for node packages planned.) + +Executed or partially-executed graphs can be stored to file, either by explicit call or automatically after running. When creating a new node(/macro/workflow), the working directory is automatically inspected for a save-file and the node will try to reload itself if one is found. (This is an alpha-feature, so it is currently only possible to save entire graphs at once and not individual nodes within a graph, all the child nodes in a saved graph must have been instantiated by `Workflow.create` (or equivalent, i.e. their code lives in a `.py` file that has been registered), and there are no safety rails to protect you from changing the node source code between saving and loading (which may cause errors/inconsistencies depending on the nature of the changes).) ## Example From 7f2df33eb824518c79e2a3288927d06af33bc260 Mon Sep 17 00:00:00 2001 From: liamhuber Date: Sat, 20 Jan 2024 16:16:29 -0800 Subject: [PATCH 057/166] Update deepdive --- notebooks/deepdive.ipynb | 349 +++++++++++++++++++++++++-------------- 1 file changed, 222 insertions(+), 127 deletions(-) diff --git a/notebooks/deepdive.ipynb b/notebooks/deepdive.ipynb index b913fd02..488acdbc 100644 --- a/notebooks/deepdive.ipynb +++ b/notebooks/deepdive.ipynb @@ -989,7 +989,7 @@ }, { "data": { - "image/png": "iVBORw0KGgoAAAANSUhEUgAAAiMAAAGdCAYAAADAAnMpAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjguMiwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy8g+/7EAAAACXBIWXMAAA9hAAAPYQGoP6dpAAAmQklEQVR4nO3df1Dd1Z3/8dflItzownVJCtyYlGKqaZCtCgwpxGymajDRoZOd7ZhuNj90486Sthsjq9Ok2RFxnGHstra1BjRtomvzY7Ot2jWzKZWZrZEYu2wSslMku+kathBzkSWsF9RCmsv5/sEXJncB5XO595774/mY+fxxD+eTz/vMTbivfM7nnOsyxhgBAABYkma7AAAAkNoIIwAAwCrCCAAAsIowAgAArCKMAAAAqwgjAADAKsIIAACwijACAACsSrddwEyMjo7qwoULysrKksvlsl0OAACYAWOMhoaGNH/+fKWlTX//IyHCyIULF7Rw4ULbZQAAgDD09PRowYIF0/48IcJIVlaWpLHBZGdnW64GAADMxODgoBYuXDjxOT6dhAgj41Mz2dnZhBEAABLMJz1iwQOsAADAKsIIAACwijACAACsIowAAACrCCMAAMAqx2HkjTfeUHV1tebPny+Xy6Wf/exnn3jO0aNHVVpaKo/Ho+uvv17PPvtsOLUCAIAk5DiMfPjhh7r55pv1zDPPzKh/V1eX7r77bi1fvlzt7e365je/qa1bt+qll15yXCwAAEg+jvcZWb16tVavXj3j/s8++6w+/elP63vf+54kacmSJTpx4oS+/e1v60//9E+dXh4AACSZqG969tZbb6mqqiqk7a677tKePXv0+9//XlddddWkc0ZGRjQyMjLxenBwMNplAgCQcoKjRm1dA+obGlZulkflhTlyp8X+O+CiHkZ6e3uVl5cX0paXl6fLly+rv79fPp9v0jkNDQ2qr6+PdmkAAKSs5g6/6g93yh8YnmjzeT2qqy7SquLJn83RFJPVNP93G1hjzJTt43bs2KFAIDBx9PT0RL1GAABSRXOHX1v2nQoJIpLUGxjWln2n1Nzhj2k9Ub8zkp+fr97e3pC2vr4+paena+7cuVOek5mZqczMzGiXBgBAygmOGtUf7pSZ4mdGkktS/eFOrSzKj9mUTdTvjFRUVKilpSWk7bXXXlNZWdmUz4sAAIDoaesamHRH5EpGkj8wrLaugZjV5DiMfPDBBzp9+rROnz4taWzp7unTp9Xd3S1pbIpl48aNE/1ramr029/+VrW1tTpz5oz27t2rPXv26OGHH47MCAAAwIz1DU0fRMLpFwmOp2lOnDihL37xixOva2trJUmbNm3SCy+8IL/fPxFMJKmwsFBHjhzRQw89pF27dmn+/Pl6+umnWdYLAIAFuVmeiPaLBJcZf5o0jg0ODsrr9SoQCCg7O9t2OQAAJKzgqNFtT/6LegPDUz434pKU7/Xo2Ddun/UzIzP9/Oa7aQAASCHuNJfqqoskjQWPK42/rqsuiul+I4QRAABSzKpin5rWlyjfGzoVk+/1qGl9Scz3GYn60l4AABB/VhX7tLIoPzV2YAUAAPHJneZSxaKp9/yKJaZpAACAVYQRAABgFWEEAABYRRgBAABWEUYAAIBVhBEAAGAVYQQAAFhFGAEAAFYRRgAAgFWEEQAAYBVhBAAAWEUYAQAAVhFGAACAVYQRAABgFWEEAABYRRgBAABWEUYAAIBVhBEAAGAVYQQAAFhFGAEAAFYRRgAAgFWEEQAAYBVhBAAAWEUYAQAAVhFGAACAVYQRAABgFWEEAABYRRgBAABWEUYAAIBVhBEAAGAVYQQAAFhFGAEAAFYRRgAAgFWEEQAAYBVhBAAAWJVuuwBETnDUqK1rQH1Dw8rN8qi8MEfuNJftsgAA+FiEkSTR3OFX/eFO+QPDE20+r0d11UVaVeyzWBkAAB+PaZok0Nzh15Z9p0KCiCT1Boa1Zd8pNXf4LVUGAMAnI4wkuOCoUf3hTpkpfjbeVn+4U8HRqXoAAGAfYSTBtXUNTLojciUjyR8YVlvXQOyKAgDAAcJIgusbmj6IhNMPAIBY4wHWBJeb5YloPwDJj5V3iDeEkQRXXpgjn9ej3sDwlM+NuCTle8d+2QAAK+8Qj5imSXDuNJfqqoskjQWPK42/rqsu4n89AFh5h7hFGEkCq4p9alpfonxv6FRMvtejpvUl/G8HACvvENeYpkkSq4p9WlmUzzwwgCk5WXlXsWhu7AoDRBhJKu40F79EAEyJlXeIZ0zTAEAKYOUd4hlhBABSwPjKu+kmbl0aW1XDyjvYQBgBgBTAyjvEM8IIAKQIVt4hXvEAKwCkEFbeIR4RRgAgxbDyDvGGaRoAAGAVYQQAAFgVVhhpbGxUYWGhPB6PSktL1dra+rH99+/fr5tvvllXX321fD6f7r//fl28eDGsggEAQHJxHEYOHTqkbdu2aefOnWpvb9fy5cu1evVqdXd3T9n/2LFj2rhxozZv3qy3335bP/nJT/Rv//ZveuCBB2ZdPAAASHyOw8hTTz2lzZs364EHHtCSJUv0ve99TwsXLlRTU9OU/X/1q1/pM5/5jLZu3arCwkLddttt+qu/+iudOHFi1sUDAIDE5yiMXLp0SSdPnlRVVVVIe1VVlY4fPz7lOZWVlTp//ryOHDkiY4zee+89/fSnP9U999wz7XVGRkY0ODgYcgAAgOTkKIz09/crGAwqLy8vpD0vL0+9vb1TnlNZWan9+/dr7dq1ysjIUH5+vq699lr94Ac/mPY6DQ0N8nq9E8fChQudlAkAABJIWA+wulyhm+MYYya1jevs7NTWrVv16KOP6uTJk2publZXV5dqamqm/fN37NihQCAwcfT09IRTJgAASACONj2bN2+e3G73pLsgfX19k+6WjGtoaNCyZcv0yCOPSJI+//nP65prrtHy5cv1xBNPyOebvP1wZmamMjMznZQGAAASlKM7IxkZGSotLVVLS0tIe0tLiyorK6c856OPPlJaWuhl3G63pLE7KgAAILU5nqapra3Vj370I+3du1dnzpzRQw89pO7u7olplx07dmjjxo0T/aurq/Xyyy+rqalJ586d05tvvqmtW7eqvLxc8+fPj9xIAABAQnL83TRr167VxYsX9fjjj8vv96u4uFhHjhxRQUGBJMnv94fsOXLfffdpaGhIzzzzjP7mb/5G1157rW6//XY9+eSTkRsFAABIWC6TAHMlg4OD8nq9CgQCys7Otl0OAACYgZl+fvPdNAAAwCrCCAAAsIowAgAArCKMAAAAqwgjAADAKsIIAACwijACAACsIowAAACrCCMAAMAqwggAALCKMAIAAKwijAAAAKsIIwAAwCrCCAAAsIowAgAArCKMAAAAqwgjAADAKsIIAACwijACAACsIowAAACrCCMAAMCqdNsFJKvgqFFb14D6hoaVm+VReWGO3Gku22UBABB3CCNR0NzhV/3hTvkDwxNtPq9HddVFWlXss1gZAADxh2maCGvu8GvLvlMhQUSSegPD2rLvlJo7/JYqAwAgPhFGIig4alR/uFNmip+Nt9Uf7lRwdKoeAACkJsJIBLV1DUy6I3IlI8kfGFZb10DsigIAIM4RRiKob2j6IBJOPwAAUgFhJIJyszwR7QcAQCogjERQeWGOfF6PplvA69LYqprywpxYlgUAQFwjjESQO82luuoiSZoUSMZf11UXsd8IAABXIIxE2Kpin5rWlyjfGzoVk+/1qGl9CfuMAADwf7DpWRSsKvZpZVE+O7ACADADhJEocae5VLForu0yAACIe0zTAAAAqwgjAADAKsIIAACwijACAACsIowAAACrCCMAAMAqwggAALCKMAIAAKwijAAAAKsIIwAAwCrCCAAAsIowAgAArCKMAAAAqwgjAADAKsIIAACwijACAACsIowAAACrCCMAAMAqwggAALCKMAIAAKwijAAAAKsIIwAAwCrCCAAAsIowAgAArCKMAAAAqwgjAADAKsIIAACwKqww0tjYqMLCQnk8HpWWlqq1tfVj+4+MjGjnzp0qKChQZmamFi1apL1794ZVMAAASC7pTk84dOiQtm3bpsbGRi1btkzPPfecVq9erc7OTn3605+e8px7771X7733nvbs2aPPfvaz6uvr0+XLl2ddPAAASHwuY4xxcsLSpUtVUlKipqamibYlS5ZozZo1amhomNS/ublZX/nKV3Tu3Dnl5OSEVeTg4KC8Xq8CgYCys7PD+jMAAEBszfTz29E0zaVLl3Ty5ElVVVWFtFdVVen48eNTnvPqq6+qrKxM3/rWt3Tdddfpxhtv1MMPP6zf/e53015nZGREg4ODIQcAAEhOjqZp+vv7FQwGlZeXF9Kel5en3t7eKc85d+6cjh07Jo/Ho1deeUX9/f366le/qoGBgWmfG2loaFB9fb2T0gAAQIIK6wFWl8sV8toYM6lt3OjoqFwul/bv36/y8nLdfffdeuqpp/TCCy9Me3dkx44dCgQCE0dPT084ZQIAgATg6M7IvHnz5Ha7J90F6evrm3S3ZJzP59N1110nr9c70bZkyRIZY3T+/HndcMMNk87JzMxUZmamk9IAAECCcnRnJCMjQ6WlpWppaQlpb2lpUWVl5ZTnLFu2TBcuXNAHH3ww0Xb27FmlpaVpwYIFYZQMAACSieNpmtraWv3oRz/S3r17debMGT300EPq7u5WTU2NpLEplo0bN070X7dunebOnav7779fnZ2deuONN/TII4/oL/7iLzRnzpzIjQQAACQkx/uMrF27VhcvXtTjjz8uv9+v4uJiHTlyRAUFBZIkv9+v7u7uif5/8Ad/oJaWFv31X/+1ysrKNHfuXN1777164oknIjcKAACQsBzvM2ID+4wAAJB4orLPCAAAQKQRRgAAgFWEEQAAYBVhBAAAWEUYAQAAVhFGAACAVYQRAABgFWEEAABYRRgBAABWEUYAAIBVhBEAAGAVYQQAAFhFGAEAAFYRRgAAgFWEEQAAYBVhBAAAWJVuuwBgKsFRo7auAfUNDSs3y6Pywhy501y2ywIARAFhBHGnucOv+sOd8geGJ9p8Xo/qqou0qthnsTIAQDQwTYO40tzh15Z9p0KCiCT1Boa1Zd8pNXf4LVUGAIgWwgjiRnDUqP5wp8wUPxtvqz/cqeDoVD0AAImKMIK40dY1MOmOyJWMJH9gWG1dA7ErCgAQdYQRxI2+oemDSDj9AACJgTCCuJGb5YloPwBAYiCMIG6UF+bI5/VougW8Lo2tqikvzIllWQCAKCOMIG6401yqqy6SpEmBZPx1XXUR+40AQJIhjCCurCr2qWl9ifK9oVMx+V6PmtaXsM8IACQhNj1D3FlV7NPKonx2YAWAFEEYQVxyp7lUsWiu7TIAADHANA0AALCKMAIAAKwijAAAAKsIIwAAwCrCCAAAsIowAgAArCKMAAAAqwgjAADAKsIIAACwijACAACsIowAAACrCCMAAMAqwggAALCKMAIAAKwijAAAAKsIIwAAwCrCCAAAsIowAgAArCKMAAAAqwgjAADAKsIIAACwijACAACsIowAAACrCCMAAMAqwggAALCKMAIAAKwijAAAAKsIIwAAwCrCCAAAsIowAgAArCKMAAAAq8IKI42NjSosLJTH41FpaalaW1tndN6bb76p9PR03XLLLeFcFgAAJCHHYeTQoUPatm2bdu7cqfb2di1fvlyrV69Wd3f3x54XCAS0ceNG3XHHHWEXCwAAko/LGGOcnLB06VKVlJSoqalpom3JkiVas2aNGhoapj3vK1/5im644Qa53W797Gc/0+nTp2d8zcHBQXm9XgUCAWVnZzspFwAAWDLTz29Hd0YuXbqkkydPqqqqKqS9qqpKx48fn/a8559/Xu+8847q6upmdJ2RkRENDg6GHAAAIDk5CiP9/f0KBoPKy8sLac/Ly1Nvb++U5/zmN7/R9u3btX//fqWnp8/oOg0NDfJ6vRPHwoULnZQJAAASSFgPsLpcrpDXxphJbZIUDAa1bt061dfX68Ybb5zxn79jxw4FAoGJo6enJ5wyAQBAApjZrYr/b968eXK73ZPugvT19U26WyJJQ0NDOnHihNrb2/X1r39dkjQ6OipjjNLT0/Xaa6/p9ttvn3ReZmamMjMznZQGAAASlKM7IxkZGSotLVVLS0tIe0tLiyorKyf1z87O1q9//WudPn164qipqdHixYt1+vRpLV26dHbVAwCAhOfozogk1dbWasOGDSorK1NFRYV2796t7u5u1dTUSBqbYnn33Xf14osvKi0tTcXFxSHn5+bmyuPxTGoHAACpyXEYWbt2rS5evKjHH39cfr9fxcXFOnLkiAoKCiRJfr//E/ccAQAAGOd4nxEb2GcEAIDEE5V9RgAAACKNMAIAAKwijAAAAKsIIwAAwCrCCAAAsMrx0t5kERw1ausaUN/QsHKzPCovzJE7bfKW9gAAILpSMow0d/hVf7hT/sDwRJvP61FddZFWFfssVgYAQOpJuWma5g6/tuw7FRJEJKk3MKwt+06pucNvqTIAAFJTSoWR4KhR/eFOTbXL23hb/eFOBUfjfh84AACSRkqFkbaugUl3RK5kJPkDw2rrGohdUQAApLiUCiN9Q9MHkXD6AQCA2UupMJKb5YloPwAAMHspFUbKC3Pk83o03QJel8ZW1ZQX5sSyLAAAUlpKhRF3mkt11UWSNCmQjL+uqy5ivxEAAGIopcKIJK0q9qlpfYnyvaFTMflej5rWl7DPCAAAMZaSm56tKvZpZVE+O7ACABAHUjKMSGNTNhWL5touAwCAlJdy0zQAACC+EEYAAIBVhBEAAGAVYQQAAFhFGAEAAFYRRgAAgFWEEQAAYBVhBAAAWEUYAQAAVhFGAACAVYQRAABgFWEEAABYRRgBAABWEUYAAIBVhBEAAGAVYQQAAFhFGAEAAFYRRgAAgFWEEQAAYBVhBAAAWEUYAQAAVhFGAACAVYQRAABgFWEEAABYRRgBAABWEUYAAIBVhBEAAGAVYQQAAFhFGAEAAFYRRgAAgFXptgsAAERWcNSorWtAfUPDys3yqLwwR+40l+2ygGkRRgAgiTR3+FV/uFP+wPBEm8/rUV11kVYV+yxWBkyPaRoASBLNHX5t2XcqJIhIUm9gWFv2nVJzh99SZcDHI4wAQBIIjhrVH+6UmeJn4231hzsVHJ2qB2AXYQQAkkBb18CkOyJXMpL8gWG1dQ3ErihghggjAJAE+oamDyLh9ANiiTACAEkgN8sT0X5ALBFGACAJlBfmyOf1aLoFvC6NraopL8yJZVnAjBBGACAJuNNcqqsukqRJgWT8dV11EfuNIC4RRgAgSawq9qlpfYnyvaFTMflej5rWl7DPCOIWm54BQBJZVezTyqJ8dmBFQiGMAECScae5VLForu0ygBkLa5qmsbFRhYWF8ng8Ki0tVWtr67R9X375Za1cuVKf+tSnlJ2drYqKCv3iF78Iu2AAAJBcHIeRQ4cOadu2bdq5c6fa29u1fPlyrV69Wt3d3VP2f+ONN7Ry5UodOXJEJ0+e1Be/+EVVV1ervb191sUDAIDE5zLGONobeOnSpSopKVFTU9NE25IlS7RmzRo1NDTM6M+46aabtHbtWj366KMz6j84OCiv16tAIKDs7Gwn5QIAAEtm+vnt6M7IpUuXdPLkSVVVVYW0V1VV6fjx4zP6M0ZHRzU0NKScHNa6AwAAhw+w9vf3KxgMKi8vL6Q9Ly9Pvb29M/ozvvOd7+jDDz/UvffeO22fkZERjYyMTLweHBx0UiYAAEggYT3A6nKFLhEzxkxqm8rBgwf12GOP6dChQ8rNzZ22X0NDg7xe78SxcOHCcMoEAAAJwFEYmTdvntxu96S7IH19fZPulvxfhw4d0ubNm/WP//iPuvPOOz+2744dOxQIBCaOnp4eJ2UCAIAE4iiMZGRkqLS0VC0tLSHtLS0tqqysnPa8gwcP6r777tOBAwd0zz33fOJ1MjMzlZ2dHXIAAIDk5HjTs9raWm3YsEFlZWWqqKjQ7t271d3drZqaGkljdzXeffddvfjii5LGgsjGjRv1/e9/X1/4whcm7qrMmTNHXq83gkMBAACJyHEYWbt2rS5evKjHH39cfr9fxcXFOnLkiAoKCiRJfr8/ZM+R5557TpcvX9bXvvY1fe1rX5to37Rpk1544YXZjwAAACQ0x/uM2MA+IwAAJJ6o7DMCAAAQaYQRAABgFWEEAABYRRgBAABWEUYAAIBVjpf2AgCA2AuOGrV1DahvaFi5WR6VF+bInfbJX8WSCAgjAADEueYOv+oPd8ofGJ5o83k9qqsu0qpin8XKIoNpGgAA4lhzh19b9p0KCSKS1BsY1pZ9p9Tc4bdUWeQQRgAAiFPBUaP6w52aanfS8bb6w50Kjsb9/qUfizACAECcausamHRH5EpGkj8wrLaugdgVFQWEEQAA4lTf0PRBJJx+8YowAgBAnMrN8kS0X7wijAAAEKfKC3Pk83o03QJel8ZW1ZQX5sSyrIgjjAAAEKfcaS7VVRdJ0qRAMv66rroo4fcbIYwAABDHVhX71LS+RPne0KmYfK9HTetLkmKfETY9AwAgzq0q9mllUT47sAIAAHvcaS5VLJpru4yoYJoGAABYRRgBAABWEUYAAIBVhBEAAGAVYQQAAFhFGAEAAFYRRgAAgFWEEQAAYBVhBAAAWEUYAQAAVhFGAACAVYQRAABgFWEEAABYRRgBAABWEUYAAIBVhBEAAGAVYQQAAFhFGAEAAFYRRgAAgFWEEQAAYBVhBAAAWEUYAQAAVhFGAACAVYQRAABgVbrtAoBYCI4atXUNqG9oWLlZHpUX5sid5rJdFgBAhBGkgOYOv+oPd8ofGJ5o83k9qqsu0qpin8XKAAAS0zRIcs0dfm3ZdyokiEhSb2BYW/adUnOH31JlAIBxhBEkreCoUf3hTpkpfjbeVn+4U8HRqXoAAGKFMIKk1dY1MOmOyJWMJH9gWG1dA7ErCgAwCWEESatvaPogEk4/AEB08AArklZuliei/RD/WDUFJCbCCJJWeWGOfF6PegPDUz434pKU7x37wELiY9UUkLiYpkHScqe5VFddJGkseFxp/HVddRH/c04CrJoCEhthBEltVbFPTetLlO8NnYrJ93rUtL6E/zEnAVZNAYmPaRokvVXFPq0syudZgiTlZNVUxaK5sSsMwIwRRpAS3GkuPoiSFKumgMTHNA2AhMaqKSDxEUYAJLTxVVPTTbq5NLaqhlVTQPwijABIaKyaAhIfYQRAwmPVFJDYeIAVQFJg1RSQuAgjAJIGq6aAxMQ0DQAAsCqsMNLY2KjCwkJ5PB6VlpaqtbX1Y/sfPXpUpaWl8ng8uv766/Xss8+GVSwAAEg+jsPIoUOHtG3bNu3cuVPt7e1avny5Vq9ere7u7in7d3V16e6779by5cvV3t6ub37zm9q6dateeumlWRcPAAASn8sY4+gLG5YuXaqSkhI1NTVNtC1ZskRr1qxRQ0PDpP7f+MY39Oqrr+rMmTMTbTU1Nfr3f/93vfXWWzO65uDgoLxerwKBgLKzs52UCwAALJnp57ejOyOXLl3SyZMnVVVVFdJeVVWl48ePT3nOW2+9Nan/XXfdpRMnTuj3v//9lOeMjIxocHAw5AAAAMnJURjp7+9XMBhUXl5eSHteXp56e3unPKe3t3fK/pcvX1Z/f/+U5zQ0NMjr9U4cCxcudFImAABIIGE9wOpyha7bN8ZMavuk/lO1j9uxY4cCgcDE0dPTE06ZAAAgATjaZ2TevHlyu92T7oL09fVNuvsxLj8/f8r+6enpmjt36v0AMjMzlZmZ6aQ0AACQoBzdGcnIyFBpaalaWlpC2ltaWlRZWTnlORUVFZP6v/baayorK9NVV13lsFwAAJBsHO/AWltbqw0bNqisrEwVFRXavXu3uru7VVNTI2lsiuXdd9/Viy++KGls5cwzzzyj2tpa/eVf/qXeeust7dmzRwcPHpzxNcendXiQFQCAxDH+uf2JC3dNGHbt2mUKCgpMRkaGKSkpMUePHp342aZNm8yKFStC+r/++uvm1ltvNRkZGeYzn/mMaWpqcnS9np4eI4mDg4ODg4MjAY+enp6P/Zx3vM+IDaOjo7pw4YKysrI+9kHZRDE4OKiFCxeqp6cnpfZNScVxp+KYpdQcdyqOWWLcqTTucMZsjNHQ0JDmz5+vtLTpnwxJiC/KS0tL04IFC2yXEXHZ2dkp85f4Sqk47lQcs5Sa407FMUuMO5U4HbPX6/3EPnxRHgAAsIowAgAArCKMWJCZmam6urqU20slFcedimOWUnPcqThmiXGn0rijOeaEeIAVAAAkL+6MAAAAqwgjAADAKsIIAACwijACAACsIoxESWNjowoLC+XxeFRaWqrW1tZp+/r9fq1bt06LFy9WWlqatm3bFrtCI8zJuF9++WWtXLlSn/rUp5Sdna2Kigr94he/iGG1keFkzMeOHdOyZcs0d+5czZkzR5/73Of03e9+N4bVRo6TcV/pzTffVHp6um655ZboFhgFTsb8+uuvy+VyTTr+4z/+I4YVR4bT93pkZEQ7d+5UQUGBMjMztWjRIu3duzdG1UaGkzHfd999U77XN910Uwwrjgyn7/X+/ft188036+qrr5bP59P999+vixcvOr+woy+JwYz8wz/8g7nqqqvMD3/4Q9PZ2WkefPBBc80115jf/va3U/bv6uoyW7duNX//939vbrnlFvPggw/GtuAIcTruBx980Dz55JOmra3NnD171uzYscNcddVV5tSpUzGuPHxOx3zq1Clz4MAB09HRYbq6usyPf/xjc/XVV5vnnnsuxpXPjtNxj3v//ffN9ddfb6qqqszNN98cm2IjxOmYf/nLXxpJ5j//8z+N3++fOC5fvhzjymcnnPf6S1/6klm6dKlpaWkxXV1d5l//9V/Nm2++GcOqZ8fpmN9///2Q97inp8fk5OSYurq62BY+S07H3draatLS0sz3v/99c+7cOdPa2mpuuukms2bNGsfXJoxEQXl5uampqQlp+9znPme2b9/+ieeuWLEiYcPIbMY9rqioyNTX10e6tKiJxJj/5E/+xKxfvz7SpUVVuONeu3at+du//VtTV1eXcGHE6ZjHw8j//u//xqC66HE67p///OfG6/WaixcvxqK8qJjtv+tXXnnFuFwu89///d/RKC9qnI777/7u78z1118f0vb000+bBQsWOL420zQRdunSJZ08eVJVVVUh7VVVVTp+/LilqqIvEuMeHR3V0NCQcnJyolFixEVizO3t7Tp+/LhWrFgRjRKjItxxP//883rnnXdUV1cX7RIjbjbv9a233iqfz6c77rhDv/zlL6NZZsSFM+5XX31VZWVl+ta3vqXrrrtON954ox5++GH97ne/i0XJsxaJf9d79uzRnXfeqYKCgmiUGBXhjLuyslLnz5/XkSNHZIzRe++9p5/+9Ke65557HF8/Ib4oL5H09/crGAwqLy8vpD0vL0+9vb2Wqoq+SIz7O9/5jj788EPde++90Sgx4mYz5gULFuh//ud/dPnyZT322GN64IEHollqRIUz7t/85jfavn27WltblZ6eeL92whmzz+fT7t27VVpaqpGREf34xz/WHXfcoddff11//Md/HIuyZy2ccZ87d07Hjh2Tx+PRK6+8ov7+fn31q1/VwMBAQjw3MtvfZX6/Xz//+c914MCBaJUYFeGMu7KyUvv379fatWs1PDysy5cv60tf+pJ+8IMfOL5+4v1WSBAulyvktTFmUlsyCnfcBw8e1GOPPaZ/+qd/Um5ubrTKi4pwxtza2qoPPvhAv/rVr7R9+3Z99rOf1Z/92Z9Fs8yIm+m4g8Gg1q1bp/r6et14442xKi8qnLzXixcv1uLFiydeV1RUqKenR9/+9rcTJoyMczLu0dFRuVwu7d+/f+LbWp966il9+ctf1q5duzRnzpyo1xsJ4f4ue+GFF3TttddqzZo1UaosupyMu7OzU1u3btWjjz6qu+66S36/X4888ohqamq0Z88eR9cljETYvHnz5Ha7JyXJvr6+SYkzmcxm3IcOHdLmzZv1k5/8RHfeeWc0y4yo2Yy5sLBQkvRHf/RHeu+99/TYY48lTBhxOu6hoSGdOHFC7e3t+vrXvy5p7APLGKP09HS99tpruv3222NSe7gi9e/6C1/4gvbt2xfp8qImnHH7fD5dd911IV8bv2TJEhljdP78ed1www1RrXm2ZvNeG2O0d+9ebdiwQRkZGdEsM+LCGXdDQ4OWLVumRx55RJL0+c9/Xtdcc42WL1+uJ554Qj6fb8bX55mRCMvIyFBpaalaWlpC2ltaWlRZWWmpqugLd9wHDx7UfffdpwMHDoQ1z2hTpN5rY4xGRkYiXV7UOB13dna2fv3rX+v06dMTR01NjRYvXqzTp09r6dKlsSo9bJF6r9vb2x39grYtnHEvW7ZMFy5c0AcffDDRdvbsWaWlpWnBggVRrTcSZvNeHz16VP/1X/+lzZs3R7PEqAhn3B999JHS0kJjhNvtljT2e80Rx4+84hONL4/as2eP6ezsNNu2bTPXXHPNxJPV27dvNxs2bAg5p7293bS3t5vS0lKzbt06097ebt5++20b5YfN6bgPHDhg0tPTza5du0KWxb3//vu2huCY0zE/88wz5tVXXzVnz541Z8+eNXv37jXZ2dlm586dtoYQlnD+jl8pEVfTOB3zd7/7XfPKK6+Ys2fPmo6ODrN9+3Yjybz00ku2hhAWp+MeGhoyCxYsMF/+8pfN22+/bY4ePWpuuOEG88ADD9gagmPh/v1ev369Wbp0aazLjRin437++edNenq6aWxsNO+88445duyYKSsrM+Xl5Y6vTRiJkl27dpmCggKTkZFhSkpKzNGjRyd+tmnTJrNixYqQ/pImHQUFBbEtOgKcjHvFihVTjnvTpk2xL3wWnIz56aefNjfddJO5+uqrTXZ2trn11ltNY2OjCQaDFiqfHad/x6+UiGHEGGdjfvLJJ82iRYuMx+Mxf/iHf2huu+0288///M8Wqp49p+/1mTNnzJ133mnmzJljFixYYGpra81HH30U46pnx+mY33//fTNnzhyze/fuGFcaWU7H/fTTT5uioiIzZ84c4/P5zJ//+Z+b8+fPO76uyxin91IAAAAih2dGAACAVYQRAABgFWEEAABYRRgBAABWEUYAAIBVhBEAAGAVYQQAAFhFGAEAAFYRRgAAgFWEEQAAYBVhBAAAWEUYAQAAVv0/s+IfGSmJreoAAAAASUVORK5CYII=", + "image/png": "iVBORw0KGgoAAAANSUhEUgAAAiMAAAGdCAYAAADAAnMpAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjguMiwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy8g+/7EAAAACXBIWXMAAA9hAAAPYQGoP6dpAAAh+UlEQVR4nO3df1Bc1d3H8c+yBDam2XVIDKwJjSQ1GmTUAkOENOPUx2Cig02nTvCxSdTGjkRtTKi2SdMRyTjDaEen/gj4K9FxElOq1VZmKIZ/GsmPloaQjpHM6BhaErPIAOOCPyAGzvNHCo8rS8LdwJ7s5v2a2T84nMt+994w95Nzzj24jDFGAAAAliTYLgAAAFzYCCMAAMAqwggAALCKMAIAAKwijAAAAKsIIwAAwCrCCAAAsIowAgAArEq0XcBYDA4O6sSJE5o6dapcLpftcgAAwBgYY9Tb26tLL71UCQmjj3/ERBg5ceKE0tPTbZcBAAAicOzYMc2aNWvU78dEGJk6daqk0x/G6/VargYAAIxFT0+P0tPTh+/jo4mJMDI0NeP1egkjAADEmLMtsWABKwAAsIowAgAArCKMAAAAqwgjAADAKsIIAACwijACAACsIowAAACrCCMAAMCqmNj0DBgYNGps7VZHb59mTPUoLyNF7gT+ThEAxAPCyDniJjnx6g4HVF7TokCwb7jN7/OorChTS7L8FisDAIwHwsg54CY58eoOB7Rm+0GZb7W3B/u0ZvtBVa3I5lwDQIxjzUiEhm6S3wwi0v/fJOsOByxVFj8GBo3Ka1pGBBFJw23lNS0aGAzXAwAQKwgjEeAmGR2Nrd0jwt43GUmBYJ8aW7ujVxQAYNwRRiLATTI6OnpHP8eR9AMAnJ8IIxHgJhkdM6Z6xrUfAOD8RBiJADfJ6MjLSJHf59Fozya5dHrBcF5GSjTLAgCMM8JIBLhJRoc7waWyokxJGnGuh74uK8rkUWoAiHGEkQhwk4yeJVl+Va3IVpovdJQpzefhsV4AiBMuY8x5/8hHT0+PfD6fgsGgvF6v7XKGsc9I9LC5HADEnrHevwkj54ibJAAA4Y31/s0OrOfIneBS/txptssAACBmsWYEAABYRRgBAABWEUYAAIBVhBEAAGAVYQQAAFhFGAEAAFYRRgAAgFWEEQAAYBVhBAAAWEUYAQAAVhFGAACAVYQRAABgFWEEAABYRRgBAABWEUYAAIBVhBEAAGAVYQQAAFhFGAEAAFYRRgAAgFWEEQAAYBVhBAAAWEUYAQAAVhFGAACAVYQRAABgFWEEAABYRRgBAABWEUYAAIBVhBEAAGAVYQQAAFhFGAEAAFYRRgAAgFWEEQAAYBVhBAAAWEUYAQAAVhFGAACAVRGFkcrKSmVkZMjj8SgnJ0cNDQ1n7L9jxw5dc801uuiii+T3+3X33Xerq6srooIBAEB8cRxGqqurtW7dOm3atEnNzc1atGiRli5dqra2trD99+zZo1WrVmn16tX64IMP9MYbb+if//yn7rnnnnMuHgAAxD7HYeSpp57S6tWrdc8992j+/Pn6/e9/r/T0dFVVVYXt//e//12XXXaZ1q5dq4yMDP3gBz/QvffeqwMHDpxz8QAAIPY5CiMnT55UU1OTCgsLQ9oLCwu1b9++sMcUFBTo+PHjqq2tlTFGn376qd58803dcssto75Pf3+/enp6Ql4AACA+OQojnZ2dGhgYUGpqakh7amqq2tvbwx5TUFCgHTt2qLi4WElJSUpLS9PFF1+sZ599dtT3qaiokM/nG36lp6c7KRMAAMSQiBawulyukK+NMSPahrS0tGjt2rV65JFH1NTUpLq6OrW2tqqkpGTUn79x40YFg8Hh17FjxyIpEwAAxIBEJ52nT58ut9s9YhSko6NjxGjJkIqKCi1cuFAPP/ywJOnqq6/WlClTtGjRIj322GPy+/0jjklOTlZycrKT0gAAQIxyNDKSlJSknJwc1dfXh7TX19eroKAg7DFffvmlEhJC38btdks6PaICAAAubI6naUpLS/Xyyy9r27ZtOnLkiNavX6+2trbhaZeNGzdq1apVw/2Lior01ltvqaqqSkePHtXevXu1du1a5eXl6dJLLx2/TwIAAGKSo2kaSSouLlZXV5c2b96sQCCgrKws1dbWavbs2ZKkQCAQsufIXXfdpd7eXj333HP65S9/qYsvvlg33HCDHn/88fH7FAAAIGa5TAzMlfT09Mjn8ykYDMrr9douBwAAjMFY79/8bRoAAGAVYQQAAFhFGAEAAFYRRgAAgFWEEQAAYBVhBAAAWEUYAQAAVhFGAACAVYQRAABgFWEEAABYRRgBAABWEUYAAIBVhBEAAGAVYQQAAFhFGAEAAFYRRgAAgFWEEQAAYBVhBAAAWEUYAQAAVhFGAACAVYQRAABgFWEEAABYRRgBAABWEUYAAIBVhBEAAGAVYQQAAFhFGAEAAFYRRgAAgFWEEQAAYBVhBAAAWEUYAQAAVhFGAACAVYQRAABgFWEEAABYRRgBAABWEUYAAIBVhBEAAGAVYQQAAFhFGAEAAFYRRgAAgFWEEQAAYBVhBAAAWEUYAQAAVhFGAACAVYQRAABgFWEEAABYRRgBAABWEUYAAIBVhBEAAGAVYQQAAFhFGAEAAFYRRgAAgFWEEQAAYBVhBAAAWEUYAQAAVhFGAACAVYQRAABgFWEEAABYRRgBAABWEUYAAIBVhBEAAGAVYQQAAFgVURiprKxURkaGPB6PcnJy1NDQcMb+/f392rRpk2bPnq3k5GTNnTtX27Zti6hgAAAQXxKdHlBdXa1169apsrJSCxcu1AsvvKClS5eqpaVF3/3ud8Mes3z5cn366afaunWrvve976mjo0OnTp065+IBAEDscxljjJMDFixYoOzsbFVVVQ23zZ8/X8uWLVNFRcWI/nV1dbr99tt19OhRpaSkRFRkT0+PfD6fgsGgvF5vRD8DAABE11jv346maU6ePKmmpiYVFhaGtBcWFmrfvn1hj3nnnXeUm5urJ554QjNnztS8efP00EMP6auvvhr1ffr7+9XT0xPyAgAA8cnRNE1nZ6cGBgaUmpoa0p6amqr29vawxxw9elR79uyRx+PR22+/rc7OTt13333q7u4edd1IRUWFysvLnZQGAMC4Ghg0amztVkdvn2ZM9SgvI0XuBJftsuKS4zUjkuRyhV4MY8yItiGDg4NyuVzasWOHfD6fJOmpp57Sbbfdpi1btmjy5Mkjjtm4caNKS0uHv+7p6VF6enokpQIA4Fjd4YDKa1oUCPYNt/l9HpUVZWpJlt9iZfHJ0TTN9OnT5Xa7R4yCdHR0jBgtGeL3+zVz5szhICKdXmNijNHx48fDHpOcnCyv1xvyAgAgGuoOB7Rm+8GQICJJ7cE+rdl+UHWHA5Yqi1+OwkhSUpJycnJUX18f0l5fX6+CgoKwxyxcuFAnTpzQ559/Ptz24YcfKiEhQbNmzYqgZAAAJsbAoFF5TYvCPdkx1FZe06KBQUfPfuAsHO8zUlpaqpdfflnbtm3TkSNHtH79erW1tamkpETS6SmWVatWDfe/4447NG3aNN19991qaWnRe++9p4cfflg/+9nPwk7RAABgS2Nr94gRkW8ykgLBPjW2dkevqAuA4zUjxcXF6urq0ubNmxUIBJSVlaXa2lrNnj1bkhQIBNTW1jbc/zvf+Y7q6+v1i1/8Qrm5uZo2bZqWL1+uxx57bPw+BQAA46Cjd/QgEkk/jI3jfUZsYJ8RAEA07P+4S//70t/P2m/nz69T/txpUagotk3IPiMAAMSzvIwU+X0ejfYAr0unn6rJy4hsE0+ERxgBAOC/3AkulRVlStKIQDL0dVlRJvuNjDPCCAAA37Aky6+qFdlK83lC2tN8HlWtyGafkQkQ0aZnAADEsyVZfi3OTGMH1ighjAAAEIY7wcUi1ShhmgYAAFhFGAEAAFYRRgAAgFWEEQAAYBVhBAAAWEUYAQAAVhFGAACAVYQRAABgFWEEAABYRRgBAABWEUYAAIBVhBEAAGAVYQQAAFhFGAEAAFYRRgAAgFWEEQAAYBVhBAAAWEUYAQAAVhFGAACAVYQRAABgFWEEAABYRRgBAABWEUYAAIBVhBEAAGAVYQQAAFhFGAEAAFYRRgAAgFWEEQAAYFWi7QIAAIAdA4NGja3d6ujt04ypHuVlpMid4Ip6HYQRAAAuQHWHAyqvaVEg2Dfc5vd5VFaUqSVZ/qjWwjQNAAAXmLrDAa3ZfjAkiEhSe7BPa7YfVN3hQFTrIYwAAHABGRg0Kq9pkQnzvaG28poWDQyG6zExCCMAAFxAGlu7R4yIfJORFAj2qbG1O2o1EUYAALiAdPSOHkQi6TceCCMAAFxAZkz1jGu/8cDTNGNwvjz6BADAucrLSJHf51F7sC/suhGXpDTf6XtdtBBGzuJ8evQJAIBz5U5wqawoU2u2H5RLCgkkQ//NLivKjOp/upmmOYPz7dEnAADGw5Isv6pWZCvNFzoVk+bzqGpFdtT/s83IyCjO9uiTS6cffVqcmcaUDQAg5izJ8mtxZtp5sQyBMDIKJ48+5c+dFr3CAAAYJ+4E13lxD2OaZhTn46NPAADEI8LIKM7HR58AAIhHhJFRDD36NNrMmUunn6qJ5qNPAADEI8LIKIYefZI0IpDYevQJAIB4RBg5g/Pt0ScAAOIRT9Ocxfn06BMAAPGIMDIG58ujTwAAxCOmaQAAgFWEEQAAYBVhBAAAWEUYAQAAVhFGAACAVYQRAABgFWEEAABYRRgBAABWEUYAAIBVhBEAAGBVRGGksrJSGRkZ8ng8ysnJUUNDw5iO27t3rxITE3XttddG8rYAACAOOQ4j1dXVWrdunTZt2qTm5mYtWrRIS5cuVVtb2xmPCwaDWrVqlf7nf/4n4mIBAED8cRljjJMDFixYoOzsbFVVVQ23zZ8/X8uWLVNFRcWox91+++26/PLL5Xa79ec//1mHDh0a83v29PTI5/MpGAzK6/U6KRcAAFgy1vu3o5GRkydPqqmpSYWFhSHthYWF2rdv36jHvfLKK/r4449VVlY2pvfp7+9XT09PyAsAAMQnR2Gks7NTAwMDSk1NDWlPTU1Ve3t72GM++ugjbdiwQTt27FBiYuKY3qeiokI+n2/4lZ6e7qRMAAAQQyJawOpyuUK+NsaMaJOkgYEB3XHHHSovL9e8efPG/PM3btyoYDA4/Dp27FgkZQIAgBgwtqGK/5o+fbrcbveIUZCOjo4RoyWS1NvbqwMHDqi5uVkPPPCAJGlwcFDGGCUmJmrXrl264YYbRhyXnJys5ORkJ6UBAIAY5WhkJCkpSTk5Oaqvrw9pr6+vV0FBwYj+Xq9X77//vg4dOjT8Kikp0RVXXKFDhw5pwYIF51Y9AACIeY5GRiSptLRUK1euVG5urvLz8/Xiiy+qra1NJSUlkk5PsXzyySd67bXXlJCQoKysrJDjZ8yYIY/HM6IdAABcmByHkeLiYnV1dWnz5s0KBALKyspSbW2tZs+eLUkKBAJn3XMEAABgiON9RmxgnxEAAGLPhOwzAgAAMN4IIwAAwCrCCAAAsIowAgAArCKMAAAAqwgjAADAKsIIAACwijACAACsIowAAACrCCMAAMAqwggAALCKMAIAAKwijAAAAKsIIwAAwCrCCAAAsIowAgAArCKMAAAAqwgjAADAKsIIAACwijACAACsIowAAACrCCMAAMAqwggAALCKMAIAAKwijAAAAKsIIwAAwCrCCAAAsIowAgAArEq0XUCsGhg0amztVkdvn2ZM9SgvI0XuBJftsgAAiDmEkQjUHQ6ovKZFgWDfcJvf51FZUaaWZPktVgYAQOxhmsahusMBrdl+MCSISFJ7sE9rth9U3eGApcoAAIhNhBEHBgaNymtaZMJ8b6itvKZFA4PhegAAgHAIIw40tnaPGBH5JiMpEOxTY2t39IoCACDGEUYc6OgdPYhE0g8AABBGHJkx1TOu/QAAAGHEkbyMFPl9Ho32AK9Lp5+qyctIiWZZAADENMKIA+4El8qKMiVpRCAZ+rqsKJP9RgAAcIAw4tCSLL+qVmQrzRc6FZPm86hqRTb7jAAA4BCbnkVgSZZfizPT2IEVAIBxQBiJkDvBpfy502yXAQBAzGOaBgAAWEUYAQAAVhFGAACAVYQRAABgFWEEAABYRRgBAABWEUYAAIBVhBEAAGAVYQQAAFhFGAEAAFYRRgAAgFWEEQAAYBVhBAAAWEUYAQAAVhFGAACAVYQRAABgFWEEAABYRRgBAABWEUYAAIBVhBEAAGAVYQQAAFgVURiprKxURkaGPB6PcnJy1NDQMGrft956S4sXL9Yll1wir9er/Px8vfvuuxEXDAAA4ovjMFJdXa1169Zp06ZNam5u1qJFi7R06VK1tbWF7f/ee+9p8eLFqq2tVVNTk374wx+qqKhIzc3N51w8AACIfS5jjHFywIIFC5Sdna2qqqrhtvnz52vZsmWqqKgY08+46qqrVFxcrEceeWRM/Xt6euTz+RQMBuX1ep2UCwAALBnr/dvRyMjJkyfV1NSkwsLCkPbCwkLt27dvTD9jcHBQvb29SklJcfLWAAAgTiU66dzZ2amBgQGlpqaGtKempqq9vX1MP+PJJ5/UF198oeXLl4/ap7+/X/39/cNf9/T0OCkT42hg0KixtVsdvX2aMdWjvIwUuRNctssCAMQRR2FkiMsVejMyxoxoC2fnzp169NFH9Ze//EUzZswYtV9FRYXKy8sjKQ3jqO5wQOU1LQoE+4bb/D6PyooytSTLb7EyAEA8cTRNM336dLnd7hGjIB0dHSNGS76turpaq1ev1h//+EfdeOONZ+y7ceNGBYPB4dexY8eclIlxUHc4oDXbD4YEEUlqD/ZpzfaDqjscsFQZACDeOAojSUlJysnJUX19fUh7fX29CgoKRj1u586duuuuu/T666/rlltuOev7JCcny+v1hrwQPQODRuU1LQq3snmorbymRQODjtY+AwAQluNpmtLSUq1cuVK5ubnKz8/Xiy++qLa2NpWUlEg6ParxySef6LXXXpN0OoisWrVKTz/9tK677rrhUZXJkyfL5/ON40fBeGls7R4xIvJNRlIg2KfG1m7lz50WvcIAAHHJcRgpLi5WV1eXNm/erEAgoKysLNXW1mr27NmSpEAgELLnyAsvvKBTp07p/vvv1/333z/cfuedd+rVV18990+AcdfRO3oQiaQfAABn4nifERvYZyS69n/cpf996e9n7bfz59cxMgIAGNWE7DOCC0NeRor8Po9Gez7KpdNP1eRlsFcMAODcEUYwgjvBpbKiTEkaEUiGvi4rymS/EQDAuCCMIKwlWX5VrchWms8T0p7m86hqRTb7jAAAxk1Em57hwrAky6/FmWnswAoAmFCEEZyRO8HFIlUAwIRimgYAAFhFGAEAAFYRRgAAgFWEEQAAYBVhBAAAWEUYAQAAVhFGAACAVYQRAABgFWEEAABYRRgBAABWEUYAAIBVhBEAAGAVYQQAAFhFGAEAAFYRRgAAgFWEEQAAYBVhBAAAWEUYAQAAVhFGAACAVYQRAABgFWEEAABYRRgBAABWJdou4EI1MGjU2Nqtjt4+zZjqUV5GitwJLttlAQAQdYQRC+oOB1Re06JAsG+4ze/zqKwoU0uy/BYrAwAg+pimibK6wwGt2X4wJIhIUnuwT2u2H1Td4YClygAAsIMwEkUDg0blNS0yYb431FZe06KBwXA9AACIT4SRKGps7R4xIvJNRlIg2KfG1u7oFQUAgGWEkSjq6B09iETSDwCAeEAYiaIZUz3j2g8AgHhAGImivIwU+X0ejfYAr0unn6rJy0iJZlkAAFhFGIkid4JLZUWZkjQikAx9XVaUyX4jAIALCmEkypZk+VW1IltpvtCpmDSfR1UrstlnBABwwWHTMwuWZPm1ODONHVgBABBhxBp3gkv5c6fZLgMAAOuYpgEAAFYRRgAAgFWEEQAAYBVhBAAAWEUYAQAAVhFGAACAVYQRAABgFWEEAABYRRgBAABWEUYAAIBVhBEAAGAVYQQAAFhFGAEAAFYRRgAAgFWEEQAAYFWi7QIQewYGjRpbu9XR26cZUz3Ky0iRO8FluywAQIwijMCRusMBlde0KBDsG27z+zwqK8rUkiy/xcoAALGKaRqMWd3hgNZsPxgSRCSpPdinNdsPqu5wwFJlAIBYRhjBmAwMGpXXtMiE+d5QW3lNiwYGw/UAAGB0hBGMSWNr94gRkW8ykgLBPjW2dkevKABAXLhg14ywCNOZjt7Rg0gk/QAAGHJBhhEWYTo3Y6pnXPsBADDkgpumYRFmZPIyUuT3eTTa2JFLpwNdXkZKNMsCAMSBiMJIZWWlMjIy5PF4lJOTo4aGhjP23717t3JycuTxeDRnzhw9//zzERV7rliEGTl3gktlRZmSNCKQDH1dVpTJVBcAwDHHYaS6ulrr1q3Tpk2b1NzcrEWLFmnp0qVqa2sL27+1tVU333yzFi1apObmZv3mN7/R2rVr9ac//emci3eKRZjnZkmWX1UrspXmC52KSfN5VLUimykuAEBEXMYYR8MACxYsUHZ2tqqqqobb5s+fr2XLlqmiomJE/1//+td65513dOTIkeG2kpIS/etf/9L+/fvH9J49PT3y+XwKBoPyer1Oyg3xl0Of6ME/HDprv6dvv1Y/unZmxO8T71j8CwAYi7Hevx0tYD158qSampq0YcOGkPbCwkLt27cv7DH79+9XYWFhSNtNN92krVu36uuvv9akSZNGHNPf36/+/v6QDzMeWIQ5PtwJLuXPnWa7DABAnHA0TdPZ2amBgQGlpqaGtKempqq9vT3sMe3t7WH7nzp1Sp2dnWGPqaiokM/nG36lp6c7KXNULMIEAOD8E9ECVpcr9HZujBnRdrb+4dqHbNy4UcFgcPh17NixSMocgUWYAACcfxyFkenTp8vtdo8YBeno6Bgx+jEkLS0tbP/ExERNmxZ+qD85OVlerzfkNV5YhAkAwPnF0ZqRpKQk5eTkqL6+Xj/+8Y+H2+vr6/WjH/0o7DH5+fmqqakJadu1a5dyc3PDrheJhiVZfi3OTGMRJgAA5wHHO7CWlpZq5cqVys3NVX5+vl588UW1tbWppKRE0ukplk8++USvvfaapNNPzjz33HMqLS3Vz3/+c+3fv19bt27Vzp07x/eTOMQiTAAAzg+Ow0hxcbG6urq0efNmBQIBZWVlqba2VrNnz5YkBQKBkD1HMjIyVFtbq/Xr12vLli269NJL9cwzz+gnP/nJ+H0KAAAQsxzvM2LDeO0zAgAAomes9+8L7m/TAACA8wthBAAAWEUYAQAAVhFGAACAVYQRAABgFWEEAABYRRgBAABWOd70zIahrVB6enosVwIAAMZq6L59ti3NYiKM9Pb2SpLS09MtVwIAAJzq7e2Vz+cb9fsxsQPr4OCgTpw4oalTp8rlGtsfs+vp6VF6erqOHTvGrq2WcA3s4vzbxzWwj2tglzFGvb29uvTSS5WQMPrKkJgYGUlISNCsWbMiOtbr9fIP0DKugV2cf/u4BvZxDew504jIEBawAgAAqwgjAADAqrgNI8nJySorK1NycrLtUi5YXAO7OP/2cQ3s4xrEhphYwAoAAOJX3I6MAACA2EAYAQAAVhFGAACAVYQRAABgVUyHkcrKSmVkZMjj8SgnJ0cNDQ1n7L97927l5OTI4/Fozpw5ev7556NUafxycg3eeustLV68WJdccom8Xq/y8/P17rvvRrHa+OP0d2DI3r17lZiYqGuvvXZiC7wAOL0G/f392rRpk2bPnq3k5GTNnTtX27Zti1K18cfp+d+xY4euueYaXXTRRfL7/br77rvV1dUVpWoxKhOj/vCHP5hJkyaZl156ybS0tJgHH3zQTJkyxfznP/8J2//o0aPmoosuMg8++KBpaWkxL730kpk0aZJ58803o1x5/HB6DR588EHz+OOPm8bGRvPhhx+ajRs3mkmTJpmDBw9GufL44PT8D/nss8/MnDlzTGFhobnmmmuiU2yciuQa3HrrrWbBggWmvr7etLa2mn/84x9m7969Uaw6fjg9/w0NDSYhIcE8/fTT5ujRo6ahocFcddVVZtmyZVGuHN8Ws2EkLy/PlJSUhLRdeeWVZsOGDWH7/+pXvzJXXnllSNu9995rrrvuugmrMd45vQbhZGZmmvLy8vEu7YIQ6fkvLi42v/3tb01ZWRlh5Bw5vQZ//etfjc/nM11dXdEoL+45Pf+/+93vzJw5c0LannnmGTNr1qwJqxFjE5PTNCdPnlRTU5MKCwtD2gsLC7Vv376wx+zfv39E/5tuukkHDhzQ119/PWG1xqtIrsG3DQ4Oqre3VykpKRNRYlyL9Py/8sor+vjjj1VWVjbRJca9SK7BO++8o9zcXD3xxBOaOXOm5s2bp4ceekhfffVVNEqOK5Gc/4KCAh0/fly1tbUyxujTTz/Vm2++qVtuuSUaJeMMYuIP5X1bZ2enBgYGlJqaGtKempqq9vb2sMe0t7eH7X/q1Cl1dnbK7/dPWL3xKJJr8G1PPvmkvvjiCy1fvnwiSoxrkZz/jz76SBs2bFBDQ4MSE2PyV/+8Esk1OHr0qPbs2SOPx6O3335bnZ2duu+++9Td3c26EYciOf8FBQXasWOHiouL1dfXp1OnTunWW2/Vs88+G42ScQYxOTIyxOVyhXxtjBnRdrb+4doxdk6vwZCdO3fq0UcfVXV1tWbMmDFR5cW9sZ7/gYEB3XHHHSovL9e8efOiVd4FwcnvwODgoFwul3bs2KG8vDzdfPPNeuqpp/Tqq68yOhIhJ+e/paVFa9eu1SOPPKKmpibV1dWptbVVJSUl0SgVZxCT/z2aPn263G73iPTb0dExIiUPSUtLC9s/MTFR06ZNm7Ba41Uk12BIdXW1Vq9erTfeeEM33njjRJYZt5ye/97eXh04cEDNzc164IEHJJ2+MRpjlJiYqF27dumGG26ISu3xIpLfAb/fr5kzZ4b8SfX58+fLGKPjx4/r8ssvn9Ca40kk57+iokILFy7Uww8/LEm6+uqrNWXKFC1atEiPPfYYI+QWxeTISFJSknJyclRfXx/SXl9fr4KCgrDH5Ofnj+i/a9cu5ebmatKkSRNWa7yK5BpIp0dE7rrrLr3++uvM054Dp+ff6/Xq/fff16FDh4ZfJSUluuKKK3To0CEtWLAgWqXHjUh+BxYuXKgTJ07o888/H2778MMPlZCQoFmzZk1ovfEmkvP/5ZdfKiEh9Lbndrsl/f9IOSyxtXL2XA090rV161bT0tJi1q1bZ6ZMmWL+/e9/G2OM2bBhg1m5cuVw/6FHe9evX29aWlrM1q1bebT3HDm9Bq+//rpJTEw0W7ZsMYFAYPj12Wef2foIMc3p+f82nqY5d06vQW9vr5k1a5a57bbbzAcffGB2795tLr/8cnPPPffY+ggxzen5f+WVV0xiYqKprKw0H3/8sdmzZ4/Jzc01eXl5tj4C/itmw4gxxmzZssXMnj3bJCUlmezsbLN79+7h7915553m+uuvD+n/t7/9zXz/+983SUlJ5rLLLjNVVVVRrjj+OLkG119/vZE04nXnnXdGv/A44fR34JsII+PD6TU4cuSIufHGG83kyZPNrFmzTGlpqfnyyy+jXHX8cHr+n3nmGZOZmWkmT55s/H6/+elPf2qOHz8e5arxbS5jGJsCAAD2xOSaEQAAED8IIwAAwCrCCAAAsIowAgAArCKMAAAAqwgjAADAKsIIAACwijACAACsIowAAACrCCMAAMAqwggAALCKMAIAAKz6PygrOcUH0mZqAAAAAElFTkSuQmCC", "text/plain": [ "
" ] @@ -1249,7 +1249,7 @@ "\n", "\n", - "\n", "\n", "\n", "\n", "clustersimpleInputsax->clustersimpleaInputsx\n", - "\n", - "\n", - "\n", + "\n", + "\n", + "\n", "\n", "\n", "\n", @@ -1432,9 +1432,9 @@ "\n", "\n", "clustersimpleInputsb__x->clustersimplebInputsx\n", - "\n", - "\n", - "\n", + "\n", + "\n", + "\n", "\n", "\n", "\n", @@ -1478,7 +1478,7 @@ "clustersimpleaOutputsran->clustersimplesumInputsaccumulate_and_run\n", "\n", "\n", - "\n", + "\n", "\n", "\n", "\n", @@ -1489,9 +1489,9 @@ "\n", "\n", "clustersimpleaOutputsy->clustersimpleOutputsay\n", - "\n", - "\n", - "\n", + "\n", + "\n", + "\n", "\n", "\n", "\n", @@ -1502,9 +1502,9 @@ "\n", "\n", "clustersimpleaOutputsy->clustersimplesumInputsx\n", - "\n", - "\n", - "\n", + "\n", + "\n", + "\n", "\n", "\n", "\n", @@ -1528,9 +1528,9 @@ "\n", "\n", "clustersimplebOutputsran->clustersimplesumInputsaccumulate_and_run\n", - "\n", - "\n", - "\n", + "\n", + "\n", + "\n", "\n", "\n", "\n", @@ -1547,9 +1547,9 @@ "\n", "\n", "clustersimplebOutputsy->clustersimplesumInputsy\n", - "\n", - "\n", - "\n", + "\n", + "\n", + "\n", "\n", "\n", "\n", @@ -1573,15 +1573,15 @@ "\n", "\n", "clustersimplesumOutputssum->clustersimpleOutputsa + b + 2\n", - "\n", - "\n", - "\n", + "\n", + "\n", + "\n", "\n", "\n", "\n" ], "text/plain": [ - "" + "" ] }, "execution_count": 41, @@ -1618,7 +1618,7 @@ { "data": { "application/vnd.jupyter.widget-view+json": { - "model_id": "49b823d50af54026a277c866f96814fb", + "model_id": "5bd9e89a0aa74920939a5c6d01de2db7", "version_major": 2, "version_minor": 0 }, @@ -1645,7 +1645,7 @@ { "data": { "text/plain": [ - "" + "" ] }, "execution_count": 42, @@ -1701,7 +1701,7 @@ "\n", "\n", - "\n", "\n", "clusterwith_prebuilt\n", "\n", "with_prebuilt: Workflow\n", - "\n", - "clusterwith_prebuiltOutputs\n", + "\n", + "clusterwith_prebuiltInputs\n", "\n", - "\n", + "\n", "\n", "\n", "\n", "\n", - "\n", - "Outputs\n", + "\n", + "Inputs\n", "\n", - "\n", - "clusterwith_prebuiltInputs\n", + "\n", + "clusterwith_prebuiltOutputs\n", "\n", - "\n", + "\n", "\n", "\n", "\n", "\n", - "\n", - "Inputs\n", + "\n", + "Outputs\n", "\n", "\n", "\n", @@ -1905,7 +1905,7 @@ "\n" ], "text/plain": [ - "" + "" ] }, "execution_count": 43, @@ -2146,7 +2146,7 @@ "\n", "\n", - "\n", "\n", "\n", "\n", "clusterphase_preferenceInputselement->clusterphase_preferenceelementInputsuser_input\n", - "\n", - "\n", - "\n", + "\n", + "\n", + "\n", "\n", "\n", "\n", @@ -2560,9 +2560,9 @@ "\n", "\n", "clusterphase_preferenceInputsphase1->clusterphase_preferencemin_phase1Inputscrystalstructure\n", - "\n", - "\n", - "\n", + "\n", + "\n", + "\n", "\n", "\n", "\n", @@ -2579,9 +2579,9 @@ "\n", "\n", "clusterphase_preferenceInputslattice_guess1->clusterphase_preferencemin_phase1Inputslattice_guess\n", - "\n", - "\n", - "\n", + "\n", + "\n", + "\n", "\n", "\n", "\n", @@ -2598,9 +2598,9 @@ "\n", "\n", "clusterphase_preferenceInputsphase2->clusterphase_preferencemin_phase2Inputscrystalstructure\n", - "\n", - "\n", - "\n", + "\n", + "\n", + "\n", "\n", "\n", "\n", @@ -2617,9 +2617,9 @@ "\n", "\n", "clusterphase_preferenceInputslattice_guess2->clusterphase_preferencemin_phase2Inputslattice_guess\n", - "\n", - "\n", - "\n", + "\n", + "\n", + "\n", "\n", "\n", "\n", @@ -2636,9 +2636,9 @@ "\n", "\n", "clusterphase_preferenceInputse1__item->clusterphase_preferencee1Inputsitem\n", - "\n", - "\n", - "\n", + "\n", + "\n", + "\n", "\n", "\n", "\n", @@ -2655,9 +2655,9 @@ "\n", "\n", "clusterphase_preferenceInputse2__item->clusterphase_preferencee2Inputsitem\n", - "\n", - "\n", - "\n", + "\n", + "\n", + "\n", "\n", "\n", "\n", @@ -2699,9 +2699,9 @@ "\n", "\n", "clusterphase_preferenceelementOutputsuser_input->clusterphase_preferencemin_phase1Inputselement\n", - "\n", - "\n", - "\n", + "\n", + "\n", + "\n", "\n", "\n", "\n", @@ -2712,9 +2712,9 @@ "\n", "\n", "clusterphase_preferenceelementOutputsuser_input->clusterphase_preferencemin_phase2Inputselement\n", - "\n", - "\n", - "\n", + "\n", + "\n", + "\n", "\n", "\n", "\n", @@ -2750,9 +2750,9 @@ "\n", "\n", "clusterphase_preferencemin_phase1Outputsstructure->clusterphase_preferencen1Inputsobj\n", - "\n", - "\n", - "\n", + "\n", + "\n", + "\n", "\n", "\n", "\n", @@ -2769,9 +2769,9 @@ "\n", "\n", "clusterphase_preferencemin_phase1Outputsenergy->clusterphase_preferencee1Inputsobj\n", - "\n", - "\n", - "\n", + "\n", + "\n", + "\n", "\n", "\n", "\n", @@ -2807,9 +2807,9 @@ "\n", "\n", "clusterphase_preferencemin_phase2Outputsstructure->clusterphase_preferencen2Inputsobj\n", - "\n", - "\n", - "\n", + "\n", + "\n", + "\n", "\n", "\n", "\n", @@ -2828,7 +2828,7 @@ "clusterphase_preferencemin_phase2Outputsenergy->clusterphase_preferencee2Inputsobj\n", "\n", "\n", - "\n", + "\n", "\n", "\n", "\n", @@ -2864,9 +2864,9 @@ "\n", "\n", "clusterphase_preferencee1Outputsgetitem->clusterphase_preferencee1__getitem_Divide_n1__lenInputsobj\n", - "\n", - "\n", - "\n", + "\n", + "\n", + "\n", "\n", "\n", "\n", @@ -2902,9 +2902,9 @@ "\n", "\n", "clusterphase_preferencen1Outputslen->clusterphase_preferencee1__getitem_Divide_n1__lenInputsother\n", - "\n", - "\n", - "\n", + "\n", + "\n", + "\n", "\n", "\n", "\n", @@ -2940,9 +2940,9 @@ "\n", "\n", "clusterphase_preferencee2Outputsgetitem->clusterphase_preferencee2__getitem_Divide_n2__lenInputsobj\n", - "\n", - "\n", - "\n", + "\n", + "\n", + "\n", "\n", "\n", "\n", @@ -2978,9 +2978,9 @@ "\n", "\n", "clusterphase_preferencen2Outputslen->clusterphase_preferencee2__getitem_Divide_n2__lenInputsother\n", - "\n", - "\n", - "\n", + "\n", + "\n", + "\n", "\n", "\n", "\n", @@ -3016,9 +3016,9 @@ "\n", "\n", "clusterphase_preferencee2__getitem_Divide_n2__lenOutputstruediv->clusterphase_preferencecompareInputsobj\n", - "\n", - "\n", - "\n", + "\n", + "\n", + "\n", "\n", "\n", "\n", @@ -3054,9 +3054,9 @@ "\n", "\n", "clusterphase_preferencee1__getitem_Divide_n1__lenOutputstruediv->clusterphase_preferencecompareInputsother\n", - "\n", - "\n", - "\n", + "\n", + "\n", + "\n", "\n", "\n", "\n", @@ -3086,15 +3086,15 @@ "\n", "\n", "clusterphase_preferencecompareOutputssub->clusterphase_preferenceOutputscompare__sub\n", - "\n", - "\n", - "\n", + "\n", + "\n", + "\n", "\n", "\n", "\n" ], "text/plain": [ - "" + "" ] }, "execution_count": 49, @@ -3377,7 +3377,7 @@ "output_type": "stream", "text": [ "None 1\n", - " \n" + " \n" ] } ], @@ -3459,7 +3459,7 @@ "output_type": "stream", "text": [ "None 1\n", - " \n", + " \n", "Finally 5\n", "b (Add) output single-value: 6\n" ] @@ -3521,7 +3521,7 @@ "name": "stdout", "output_type": "stream", "text": [ - "6.019378479017178\n" + "6.015147686994169\n" ] } ], @@ -3553,7 +3553,7 @@ "name": "stdout", "output_type": "stream", "text": [ - "3.3633984430052806\n" + "2.5117498940089718\n" ] } ], @@ -3612,6 +3612,111 @@ "While everything in the workflows sub-module is under development, the following complex features are _even more likely_ to see substantial modifications to their interface and behaviour. Nonetheless, they're fun so let's look at them." ] }, + { + "cell_type": "markdown", + "id": "6fe95ca3-95c4-4c82-9a45-c38275dabb22", + "metadata": {}, + "source": [ + "## Saving and loading\n", + "\n", + "Graphs can be saved and loaded on request -- either by manually invoking the `.save()` method, or by setting the `save_after_run` attribute to `True` (on the object or at instantiation by kwarg). This creates a save file (currently using HDF5 as a backend) in the parent-most node's working directory.\n", + "\n", + "Subsequently instantiating a node with the same name in the same place will attempt to reload the saved graph automatically. \n", + "\n", + "Since data IO is also saved, all IO must be serializable. As a fallback, we attempt to store data IO values using pickle, so any pickle-able object should be fine.\n", + "\n", + "As an alpha feature, this functionality comes with the following caveats:\n", + "- Before saving, any child nodes in the graph must have been created via the `Workflow.create` creator -- right now that means they must live in their own `.py` file which has been subject to `.register(...)`, so not a terribly high bar, but it means any nodes defined in-notebook need to be moved over to a file in the python path.\n", + "- It is not yet possible to save just one node in a composite graph, the entire graph gets saved at once regardless of which node calls `.save()`. This may negatively impact performance depending on how much data is being saved (size of graph/size of data IO objects), and how frequently saving is done.\n", + "- There are no safety rails to check whether node source code has changed between save and load time; i.e. if the code defining a particular node has changed, it may crash on load or the saved data may silently fail to reflect the new behaviour of the node.\n", + " - Related, if a `Macro` instance is modified (e.g. by changing internal connections, or `.replace`-ing child nodes, this will be lost on load; the loaded macro will silently re-instantiate its _original_ children and connections reflected in it's `graph_creator`." + ] + }, + { + "cell_type": "code", + "execution_count": 62, + "id": "ffd741a3-b086-4ed0-9a62-76143a3705b2", + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "{'out__user_input': 42}" + ] + }, + "execution_count": 62, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "wf = Workflow(\"save_demo\")\n", + "wf.inp = wf.create.standard.UserInput(42)\n", + "wf.middle = 2 * wf.inp\n", + "wf.end = wf.middle - 42\n", + "wf.out = wf.create.standard.UserInput(wf.end, save_after_run=True)\n", + "wf()\n", + "# wf.save() # Not needed, since `wf.out` saves after running" + ] + }, + { + "cell_type": "code", + "execution_count": 63, + "id": "3a22c622-f8c1-449b-a910-c52beb6a09c3", + "metadata": {}, + "outputs": [ + { + "name": "stderr", + "output_type": "stream", + "text": [ + "/Users/huber/work/pyiron/pyiron_workflow/pyiron_workflow/node.py:332: UserWarning: A saved file was found for the node save_demo -- attempting to load it...(To delete the saved file instead, use `overwrite_save=True`)\n", + " warnings.warn(\n", + "/Users/huber/work/pyiron/pyiron_workflow/pyiron_workflow/node.py:332: UserWarning: A saved file was found for the node inp -- attempting to load it...(To delete the saved file instead, use `overwrite_save=True`)\n", + " warnings.warn(\n", + "/Users/huber/work/pyiron/pyiron_workflow/pyiron_workflow/node.py:332: UserWarning: A saved file was found for the node middle -- attempting to load it...(To delete the saved file instead, use `overwrite_save=True`)\n", + " warnings.warn(\n", + "/Users/huber/work/pyiron/pyiron_workflow/pyiron_workflow/node.py:332: UserWarning: A saved file was found for the node end -- attempting to load it...(To delete the saved file instead, use `overwrite_save=True`)\n", + " warnings.warn(\n", + "/Users/huber/work/pyiron/pyiron_workflow/pyiron_workflow/node.py:332: UserWarning: A saved file was found for the node out -- attempting to load it...(To delete the saved file instead, use `overwrite_save=True`)\n", + " warnings.warn(\n" + ] + }, + { + "data": { + "text/plain": [ + "True" + ] + }, + "execution_count": 63, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "reloaded = Workflow(\"save_demo\")\n", + "reloaded.out.value == wf.out.value" + ] + }, + { + "cell_type": "markdown", + "id": "f9238cf7-3050-4607-a542-010e3bb83f41", + "metadata": {}, + "source": [ + "You can force a newly instantiated node to ignore (and delete!) an existing save file by setting the `overwrite_save` kwarg to `True`.\n", + "\n", + "Finally, let's clean up our save to keep this demo director clean:" + ] + }, + { + "cell_type": "code", + "execution_count": 64, + "id": "0999d3e8-3a5a-451d-8667-a01dae7c1193", + "metadata": {}, + "outputs": [], + "source": [ + "reloaded.delete_storage()" + ] + }, { "cell_type": "markdown", "id": "1f012460-19af-45f7-98aa-a0ad5b8e6faa", @@ -3635,7 +3740,7 @@ }, { "cell_type": "code", - "execution_count": 62, + "execution_count": 65, "id": "0b373764-b389-4c24-8086-f3d33a4f7fd7", "metadata": {}, "outputs": [ @@ -3649,7 +3754,7 @@ " 17.230249999999995]" ] }, - "execution_count": 62, + "execution_count": 65, "metadata": {}, "output_type": "execute_result" } @@ -3686,7 +3791,7 @@ }, { "cell_type": "code", - "execution_count": 63, + "execution_count": 66, "id": "0dd04b4c-e3e7-4072-ad34-58f2c1e4f596", "metadata": {}, "outputs": [ @@ -3745,7 +3850,7 @@ }, { "cell_type": "code", - "execution_count": 64, + "execution_count": 67, "id": "2dfb967b-41ac-4463-b606-3e315e617f2a", "metadata": {}, "outputs": [ @@ -3769,7 +3874,7 @@ }, { "cell_type": "code", - "execution_count": 65, + "execution_count": 68, "id": "2e87f858-b327-4f6b-9237-c8a557f29aeb", "metadata": {}, "outputs": [ @@ -3777,14 +3882,14 @@ "name": "stdout", "output_type": "stream", "text": [ - "0.879 > 0.2\n", - "0.214 > 0.2\n", - "0.550 > 0.2\n", - "0.477 > 0.2\n", - "0.826 > 0.2\n", - "0.288 > 0.2\n", - "0.028 <= 0.2\n", - "Finally 0.028\n" + "0.696 > 0.2\n", + "0.855 > 0.2\n", + "0.355 > 0.2\n", + "0.690 > 0.2\n", + "0.439 > 0.2\n", + "0.527 > 0.2\n", + "0.172 <= 0.2\n", + "Finally 0.172\n" ] } ], @@ -3822,16 +3927,6 @@ "print(f\"Finally {wf(threshold=0.2).capped_result:.3f}\")" ] }, - { - "cell_type": "markdown", - "id": "1f29fde8-1645-444e-99dc-3ec465461c7e", - "metadata": {}, - "source": [ - "## Serialization for data storage\n", - "\n", - "Serialization for storage doesn't exist yet. There are a million little things to do for `pyiron_workflow`, but data persistence is the last big missing piece for 1.0.0." - ] - }, { "cell_type": "code", "execution_count": null, @@ -3857,7 +3952,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.11.4" + "version": "3.11.7" } }, "nbformat": 4, From 5e0097f47f96a9e7400693dd5e486bca4f5bb228 Mon Sep 17 00:00:00 2001 From: liamhuber Date: Mon, 22 Jan 2024 12:43:23 -0800 Subject: [PATCH 058/166] Propagate change to environment-notebooks.yml Because I still haven't fixed the CI to find this automatically --- .ci_support/environment-notebooks.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.ci_support/environment-notebooks.yml b/.ci_support/environment-notebooks.yml index 63d17e59..c7520f58 100644 --- a/.ci_support/environment-notebooks.yml +++ b/.ci_support/environment-notebooks.yml @@ -5,6 +5,6 @@ dependencies: - atomistics =0.1.20 - lammps - phonopy =2.21.0 - - pyiron_atomistics =0.4.6 + - pyiron_atomistics =0.4.7 - pyiron-data =0.0.27 - numpy =1.26.3 \ No newline at end of file From 3b405fd54ac9bc593c88073f04062fa486098b05 Mon Sep 17 00:00:00 2001 From: pyiron-runner Date: Mon, 22 Jan 2024 20:43:57 +0000 Subject: [PATCH 059/166] [dependabot skip] Update env file --- .binder/environment.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.binder/environment.yml b/.binder/environment.yml index b3f806c2..c5b5363d 100644 --- a/.binder/environment.yml +++ b/.binder/environment.yml @@ -18,6 +18,6 @@ dependencies: - atomistics =0.1.20 - lammps - phonopy =2.21.0 -- pyiron_atomistics =0.4.6 +- pyiron_atomistics =0.4.7 - pyiron-data =0.0.27 - numpy =1.26.3 From 181c88462cc2d705c0c14e169c2706c062f0ed9c Mon Sep 17 00:00:00 2001 From: liamhuber Date: Mon, 22 Jan 2024 14:38:16 -0800 Subject: [PATCH 060/166] Accumulate run signals with scoped labels not objects --- pyiron_workflow/channels.py | 12 +++++++++--- tests/unit/test_channels.py | 2 +- 2 files changed, 10 insertions(+), 4 deletions(-) diff --git a/pyiron_workflow/channels.py b/pyiron_workflow/channels.py index 6648eb54..dfb2da0b 100644 --- a/pyiron_workflow/channels.py +++ b/pyiron_workflow/channels.py @@ -807,7 +807,7 @@ def __init__( callback: callable, ): super().__init__(label=label, node=node, callback=callback) - self.received_signals: set[OutputSignal] = set() + self.received_signals: set[str] = set() def __call__(self, other: OutputSignal) -> None: """ @@ -816,8 +816,14 @@ def __call__(self, other: OutputSignal) -> None: Resets the collection of received signals when firing. """ - self.received_signals.update([other]) - if len(set(self.connections).difference(self.received_signals)) == 0: + self.received_signals.update([other.scoped_label]) + if len( + set( + c.scoped_label for c in self.connections + ).difference( + self.received_signals + ) + ) == 0: self.reset() self.callback() diff --git a/tests/unit/test_channels.py b/tests/unit/test_channels.py index b501875b..341badc8 100644 --- a/tests/unit/test_channels.py +++ b/tests/unit/test_channels.py @@ -381,7 +381,7 @@ def test_aggregating_call(self): ): agg() - out2 = OutputSignal(label="out", node=DummyNode()) + out2 = OutputSignal(label="out2", node=DummyNode()) agg.connect(self.out, out2) self.assertEqual( From ffae6398aaebcd84b2cb5a1e0e76f97d14b28c88 Mon Sep 17 00:00:00 2001 From: liamhuber Date: Tue, 23 Jan 2024 20:45:36 -0800 Subject: [PATCH 061/166] Require the callback to always be a method of the owning node And clarify in the docstring that it must have no arguments --- pyiron_workflow/channels.py | 35 ++++++++++++++++++++++++++++++++++- 1 file changed, 34 insertions(+), 1 deletion(-) diff --git a/pyiron_workflow/channels.py b/pyiron_workflow/channels.py index 72d9b27d..abbe6e89 100644 --- a/pyiron_workflow/channels.py +++ b/pyiron_workflow/channels.py @@ -10,6 +10,7 @@ import typing from abc import ABC, abstractmethod +import inspect from warnings import warn from pyiron_workflow.has_channel import HasChannel @@ -740,6 +741,7 @@ class SignalChannel(Channel, ABC): """ Signal channels give the option control execution flow by triggering callback functions when the channel is called. + Callbacks must be methods on the parent node that require no positional arguments. Inputs optionally accept an output signal on call, which output signals always send when they call their input connections. @@ -755,6 +757,10 @@ def __call__(self) -> None: pass +class BadCallbackError(ValueError): + pass + + class InputSignal(SignalChannel): @property def connection_partner_type(self): @@ -777,7 +783,34 @@ def __init__( object. """ super().__init__(label=label, node=node) - self.callback: callable = callback + if self._is_node_method(callback) and self._takes_zero_arguments(callback): + self.callback: callable = callback + else: + raise BadCallbackError( + f"The channel {self.label} on {self.node.label} got an unexpected " + f"callback: {callback}. " + f"Lives on node: {self._is_node_method(callback)}; " + f"take no args: {self._takes_zero_arguments(callback)} " + ) + + def _is_node_method(self, callback): + try: + return callback == getattr(self.node, callback.__name__) + except AttributeError: + return False + + def _takes_zero_arguments(self, callback): + return callable(callback) and self._no_positional_args(callback) + + @staticmethod + def _no_positional_args(func): + return sum( + 1 for parameter in inspect.signature(func).parameters.values() + if ( + parameter.default == inspect.Parameter.empty + and parameter.kind != inspect._ParameterKind.VAR_KEYWORD + ) + ) == 0 def __call__(self, other: typing.Optional[OutputSignal] = None) -> None: self.callback() From 0e66e0300b26a0509b07be22f5da4b165eb5582f Mon Sep 17 00:00:00 2001 From: liamhuber Date: Tue, 23 Jan 2024 20:46:31 -0800 Subject: [PATCH 062/166] Just store the name of the callback instead of the object Since we've guaranteed it lives on the owning node, this is safe now --- pyiron_workflow/channels.py | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/pyiron_workflow/channels.py b/pyiron_workflow/channels.py index abbe6e89..bcced171 100644 --- a/pyiron_workflow/channels.py +++ b/pyiron_workflow/channels.py @@ -784,7 +784,7 @@ def __init__( """ super().__init__(label=label, node=node) if self._is_node_method(callback) and self._takes_zero_arguments(callback): - self.callback: callable = callback + self._callback: str = callback.__name__ else: raise BadCallbackError( f"The channel {self.label} on {self.node.label} got an unexpected " @@ -812,6 +812,10 @@ def _no_positional_args(func): ) ) == 0 + @property + def callback(self) -> callable: + return getattr(self.node, self._callback) + def __call__(self, other: typing.Optional[OutputSignal] = None) -> None: self.callback() From 3fa241981fe6cec829559b1d0f22db37ffb42dff Mon Sep 17 00:00:00 2001 From: liamhuber Date: Tue, 23 Jan 2024 20:46:43 -0800 Subject: [PATCH 063/166] Add tests --- tests/unit/test_channels.py | 51 +++++++++++++++++++++++++++++++++++-- 1 file changed, 49 insertions(+), 2 deletions(-) diff --git a/tests/unit/test_channels.py b/tests/unit/test_channels.py index 341badc8..05472438 100644 --- a/tests/unit/test_channels.py +++ b/tests/unit/test_channels.py @@ -2,7 +2,7 @@ from pyiron_workflow.channels import ( Channel, InputData, OutputData, InputSignal, AccumulatingInputSignal, OutputSignal, - NotData, ChannelConnectionError + NotData, ChannelConnectionError, BadCallbackError ) @@ -15,7 +15,6 @@ def __init__(self): def update(self): self.foo.append(self.foo[-1] + 1) - class InputChannel(Channel): """Just to de-abstract the base class""" def __str__(self): @@ -451,6 +450,54 @@ def test_aggregating_call(self): msg="All signals, including vestigial ones, should get cleared on call" ) + def test_callbacks(self): + class Extended(DummyNode): + def method_with_args(self, x): + return x + 1 + + def method_with_only_kwargs(self, x=0): + return x + 1 + + @staticmethod + def staticmethod_without_args(): + return 42 + + @staticmethod + def staticmethod_with_args(x): + return x + 1 + + @classmethod + def classmethod_without_args(cls): + return 42 + + @classmethod + def classmethod_with_args(cls, x): + return x + 1 + + def doesnt_belong_to_node(): + return 42 + + node = Extended() + with self.subTest("Callbacks that belong to the node and take no arguments"): + for callback in [ + node.update, + node.method_with_only_kwargs, + node.staticmethod_without_args, + node.classmethod_without_args + ]: + with self.subTest(callback.__name__): + InputSignal(label="inp", node=node, callback=callback) + + with self.subTest("Invalid callbacks"): + for callback in [ + node.method_with_args, + node.staticmethod_with_args, + node.classmethod_with_args, + doesnt_belong_to_node, + ]: + with self.subTest(callback.__name__): + with self.assertRaises(BadCallbackError): + InputSignal(label="inp", node=node, callback=callback) if __name__ == '__main__': unittest.main() From b93f585dd127cfae1c9f439ffc1d05a3c8a1cc8f Mon Sep 17 00:00:00 2001 From: liamhuber Date: Tue, 23 Jan 2024 20:46:55 -0800 Subject: [PATCH 064/166] Fix tests where callback was not owned by the node --- tests/unit/test_io.py | 14 +++++++++----- 1 file changed, 9 insertions(+), 5 deletions(-) diff --git a/tests/unit/test_io.py b/tests/unit/test_io.py index efae3620..8eb85e61 100644 --- a/tests/unit/test_io.py +++ b/tests/unit/test_io.py @@ -152,14 +152,18 @@ def test_to_list(self): class TestSignalIO(unittest.TestCase): def setUp(self) -> None: - node = DummyNode() + class Extended(DummyNode): + @staticmethod + def do_nothing(): + pass + + node = Extended() + - def do_nothing(): - pass signals = Signals() - signals.input.run = InputSignal("run", node, do_nothing) - signals.input.foo = InputSignal("foo", node, do_nothing) + signals.input.run = InputSignal("run", node, node.do_nothing) + signals.input.foo = InputSignal("foo", node, node.do_nothing) signals.output.ran = OutputSignal("ran", node) signals.output.bar = OutputSignal("bar", node) From c7975f20d1464f23f1ad331c3e74d41100fcc42e Mon Sep 17 00:00:00 2001 From: pyiron-runner Date: Wed, 24 Jan 2024 17:27:30 +0000 Subject: [PATCH 065/166] Format black --- pyiron_workflow/channels.py | 16 ++++++++++------ 1 file changed, 10 insertions(+), 6 deletions(-) diff --git a/pyiron_workflow/channels.py b/pyiron_workflow/channels.py index bcced171..b2cde355 100644 --- a/pyiron_workflow/channels.py +++ b/pyiron_workflow/channels.py @@ -804,13 +804,17 @@ def _takes_zero_arguments(self, callback): @staticmethod def _no_positional_args(func): - return sum( - 1 for parameter in inspect.signature(func).parameters.values() - if ( - parameter.default == inspect.Parameter.empty - and parameter.kind != inspect._ParameterKind.VAR_KEYWORD + return ( + sum( + 1 + for parameter in inspect.signature(func).parameters.values() + if ( + parameter.default == inspect.Parameter.empty + and parameter.kind != inspect._ParameterKind.VAR_KEYWORD + ) ) - ) == 0 + == 0 + ) @property def callback(self) -> callable: From bf3f50638a1726c5c299233e48f8b506ec65b9cd Mon Sep 17 00:00:00 2001 From: liamhuber Date: Wed, 24 Jan 2024 11:21:38 -0800 Subject: [PATCH 066/166] :bug: Fix typo in __getstate__ return Actually use the state we create! --- pyiron_workflow/node.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pyiron_workflow/node.py b/pyiron_workflow/node.py index 8d0b615b..d73cc859 100644 --- a/pyiron_workflow/node.py +++ b/pyiron_workflow/node.py @@ -1073,7 +1073,7 @@ def __getstate__(self): # _but_ if the user is just passing instructions on how to _build_ an executor, # we'll trust that those serialize OK (this way we can, hopefully, eventually # support nesting executors!) - return self.__dict__ + return state def __setstate__(self, state): # Update instead of overriding in case some other attributes were added on the From d3b1a2318289ce6396f8b91415390ee1d837b1eb Mon Sep 17 00:00:00 2001 From: liamhuber Date: Wed, 24 Jan 2024 11:50:15 -0800 Subject: [PATCH 067/166] Fix typo (missing words) in test message --- tests/unit/test_macro.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/unit/test_macro.py b/tests/unit/test_macro.py index 4afd1f3a..afc45ac1 100644 --- a/tests/unit/test_macro.py +++ b/tests/unit/test_macro.py @@ -270,7 +270,7 @@ def test_with_executor(self): self.assertIs( downstream.inputs.x.connections[0], macro.outputs.three__result, - msg="The macro should still be connected to " + msg=f"The macro output should still be connected to downstream" ) sleep(0.2) # Give a moment for the ran signal to emit and downstream to run # I'm a bit surprised this sleep is necessary From e44958306ff6b7081dcac32de74ae565e2a049bb Mon Sep 17 00:00:00 2001 From: liamhuber Date: Wed, 24 Jan 2024 12:21:58 -0800 Subject: [PATCH 068/166] Have composite pass its entire self for remote execution Instead of just the children. In the case of macros, which are not parent-most objects, we need to be sure to restore any connections the local object had when parsing the remote result --- pyiron_workflow/composite.py | 23 +++++++++++++++------- pyiron_workflow/macro.py | 38 +++++++++++++++++++++++++++++++++++- 2 files changed, 53 insertions(+), 8 deletions(-) diff --git a/pyiron_workflow/composite.py b/pyiron_workflow/composite.py index 39e5a45f..30ec78e2 100644 --- a/pyiron_workflow/composite.py +++ b/pyiron_workflow/composite.py @@ -177,21 +177,23 @@ def on_run(self): return self.run_graph @staticmethod - def run_graph(_nodes: dict[Node], _starting_nodes: list[Node]): - for node in _starting_nodes: + def run_graph(_composite: Composite): + for node in _composite.starting_nodes: node.run() - return _nodes + return _composite @property def run_args(self) -> dict: - return {"_nodes": self.nodes, "_starting_nodes": self.starting_nodes} + return {"_composite": self} def process_run_result(self, run_output): - if run_output is not self.nodes: - # Then we probably ran on a parallel process and have an unpacked future - self._update_children(run_output) + if run_output is not self: + self._parse_remotely_executed_self(run_output) return DotDict(self.outputs.to_value_dict()) + def _parse_remotely_executed_self(self, other_self): + self.__setstate__(other_self.__getstate__()) + def _update_children(self, children_from_another_process: DotDict[str, Node]): """ If you receive a new dictionary of children, e.g. from unpacking a futures @@ -604,3 +606,10 @@ def tidy_working_directory(self): for node in self: node.tidy_working_directory() super().tidy_working_directory() + + def __setstate__(self, state): + super().__setstate__(state) + # Nodes purge their _parent information in their __getstate__ + # so return it to them: + for node in self: + node._parent = self diff --git a/pyiron_workflow/macro.py b/pyiron_workflow/macro.py index 56da65fd..84028024 100644 --- a/pyiron_workflow/macro.py +++ b/pyiron_workflow/macro.py @@ -7,7 +7,7 @@ from functools import partialmethod import inspect -from typing import get_type_hints, Literal, Optional +from typing import get_type_hints, Literal, Optional, TYPE_CHECKING from bidict import bidict @@ -17,6 +17,9 @@ from pyiron_workflow.io import Outputs, Inputs from pyiron_workflow.output_parser import ParseOutput +if TYPE_CHECKING: + from pyiron_workflow.channels import Channel + class Macro(Composite): """ @@ -471,6 +474,39 @@ def inputs(self) -> Inputs: def outputs(self) -> Outputs: return self._outputs + def _parse_remotely_executed_self(self, other_self): + local_connection_data = [ + [(c, c.label, c.connections) for c in io_panel] + for io_panel + in [self.inputs, self.outputs, self.signals.input, self.signals.output] + ] + + super()._parse_remotely_executed_self(other_self) + + for old_data, io_panel in zip( + local_connection_data, + [self.inputs, self.outputs, self.signals.input, self.signals.output] + # Get fresh copies of the IO panels post-update + ): + for original_channel, label, connections in old_data: + new_channel = io_panel[label] # Fetch it from the fresh IO panel + new_channel.connections = connections + for other_channel in connections: + self._replace_connection( + other_channel, original_channel, new_channel + ) + + @staticmethod + def _replace_connection( + channel: Channel, old_connection: Channel, new_connection: Channel + ): + """Brute-force replace an old connection in a channel with a new one""" + channel.connections = [ + c if c is not old_connection + else new_connection + for c in channel + ] + def _update_children(self, children_from_another_process): super()._update_children(children_from_another_process) self._rebuild_data_io() From dc09c824d2cf4aea671cbb02b5ad3cd22b062f8a Mon Sep 17 00:00:00 2001 From: liamhuber Date: Wed, 24 Jan 2024 12:23:06 -0800 Subject: [PATCH 069/166] Remove unused methods We parse the whole composite now, not just its children --- pyiron_workflow/composite.py | 10 ---------- pyiron_workflow/macro.py | 4 ---- 2 files changed, 14 deletions(-) diff --git a/pyiron_workflow/composite.py b/pyiron_workflow/composite.py index 30ec78e2..a3224745 100644 --- a/pyiron_workflow/composite.py +++ b/pyiron_workflow/composite.py @@ -194,16 +194,6 @@ def process_run_result(self, run_output): def _parse_remotely_executed_self(self, other_self): self.__setstate__(other_self.__getstate__()) - def _update_children(self, children_from_another_process: DotDict[str, Node]): - """ - If you receive a new dictionary of children, e.g. from unpacking a futures - object of your own children you sent off to another process for computation, - replace your own nodes with them, and set yourself as their parent. - """ - for child in children_from_another_process.values(): - child._parent = self - self.nodes = children_from_another_process - def disconnect_run(self) -> list[tuple[Channel, Channel]]: """ Disconnect all `signals.input.run` connections on all child nodes. diff --git a/pyiron_workflow/macro.py b/pyiron_workflow/macro.py index 84028024..b64742cb 100644 --- a/pyiron_workflow/macro.py +++ b/pyiron_workflow/macro.py @@ -507,10 +507,6 @@ def _replace_connection( for c in channel ] - def _update_children(self, children_from_another_process): - super()._update_children(children_from_another_process) - self._rebuild_data_io() - def _configure_graph_execution(self): run_signals = self.disconnect_run() From 6685d7a79c508b80ab6db041f16af8375c00ef68 Mon Sep 17 00:00:00 2001 From: pyiron-runner Date: Wed, 24 Jan 2024 20:28:34 +0000 Subject: [PATCH 070/166] Format black --- pyiron_workflow/macro.py | 12 +++++++----- 1 file changed, 7 insertions(+), 5 deletions(-) diff --git a/pyiron_workflow/macro.py b/pyiron_workflow/macro.py index b64742cb..1790af94 100644 --- a/pyiron_workflow/macro.py +++ b/pyiron_workflow/macro.py @@ -477,8 +477,12 @@ def outputs(self) -> Outputs: def _parse_remotely_executed_self(self, other_self): local_connection_data = [ [(c, c.label, c.connections) for c in io_panel] - for io_panel - in [self.inputs, self.outputs, self.signals.input, self.signals.output] + for io_panel in [ + self.inputs, + self.outputs, + self.signals.input, + self.signals.output, + ] ] super()._parse_remotely_executed_self(other_self) @@ -502,9 +506,7 @@ def _replace_connection( ): """Brute-force replace an old connection in a channel with a new one""" channel.connections = [ - c if c is not old_connection - else new_connection - for c in channel + c if c is not old_connection else new_connection for c in channel ] def _configure_graph_execution(self): From d7369126cf40fad019696f569db3037237ab1c32 Mon Sep 17 00:00:00 2001 From: liamhuber Date: Wed, 24 Jan 2024 13:19:39 -0800 Subject: [PATCH 071/166] Be consistent about copying and updating dict Sometimes we mess with stuff in __getstate__, e.g. to avoid reflexive relationships between nodes and channels, but getting the state should never modify the current state, so always make a fresh dictionary. Similarly, the returned state might possibly not have all the content our current state does, so use update instead of overwriting. --- pyiron_workflow/channels.py | 4 +--- pyiron_workflow/interfaces.py | 4 ++-- pyiron_workflow/io.py | 2 +- pyiron_workflow/node.py | 2 +- 4 files changed, 5 insertions(+), 7 deletions(-) diff --git a/pyiron_workflow/channels.py b/pyiron_workflow/channels.py index b2cde355..16fd3923 100644 --- a/pyiron_workflow/channels.py +++ b/pyiron_workflow/channels.py @@ -729,11 +729,9 @@ def __round__(self): # Because we override __getattr__ we need to get and set state for serialization def __getstate__(self): - return self.__dict__ + return dict(self.__dict__) def __setstate__(self, state): - # Update instead of overriding in case some other attributes were added on the - # main process while a remote process was working away self.__dict__.update(**state) diff --git a/pyiron_workflow/interfaces.py b/pyiron_workflow/interfaces.py index 90e19338..9e30390f 100644 --- a/pyiron_workflow/interfaces.py +++ b/pyiron_workflow/interfaces.py @@ -147,10 +147,10 @@ def __getitem__(self, item): ) from e def __getstate__(self): - return self.__dict__ + return dict(self.__dict__) def __setstate__(self, state): - self.__dict__ = state + self.__dict__.update(**state) def register(self, package_identifier: str, domain: Optional[str] = None) -> None: """ diff --git a/pyiron_workflow/io.py b/pyiron_workflow/io.py index 18ed8d01..f1cf6232 100644 --- a/pyiron_workflow/io.py +++ b/pyiron_workflow/io.py @@ -157,7 +157,7 @@ def to_dict(self): def __getstate__(self): # Compatibility with python <3.11 - return self.__dict__ + return dict(self.__dict__) def __setstate__(self, state): # Because we override getattr, we need to use __dict__ assignment directly in diff --git a/pyiron_workflow/node.py b/pyiron_workflow/node.py index d73cc859..9bc949b1 100644 --- a/pyiron_workflow/node.py +++ b/pyiron_workflow/node.py @@ -1040,7 +1040,7 @@ def replace_with(self, other: Node | type[Node]): warnings.warn(f"Could not replace_node {self.label}, as it has no parent.") def __getstate__(self): - state = self.__dict__ + state = dict(self.__dict__) state["_parent"] = None # I am not at all confident that removing the parent here is the _right_ # solution. From 1ce79ce3d450991fa36c5020b89555888b6a69db Mon Sep 17 00:00:00 2001 From: liamhuber Date: Wed, 24 Jan 2024 13:20:03 -0800 Subject: [PATCH 072/166] Unparent local nodes before taking remote In the case of running on an executor --- pyiron_workflow/composite.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/pyiron_workflow/composite.py b/pyiron_workflow/composite.py index a3224745..68a4cc2e 100644 --- a/pyiron_workflow/composite.py +++ b/pyiron_workflow/composite.py @@ -192,6 +192,9 @@ def process_run_result(self, run_output): return DotDict(self.outputs.to_value_dict()) def _parse_remotely_executed_self(self, other_self): + # Un-parent existing nodes before ditching them + for node in self: + node._parent = None self.__setstate__(other_self.__getstate__()) def disconnect_run(self) -> list[tuple[Channel, Channel]]: From 1f3893d294b8ce87bac07b57ab5fc8d737403140 Mon Sep 17 00:00:00 2001 From: liamhuber Date: Wed, 24 Jan 2024 13:45:22 -0800 Subject: [PATCH 073/166] :bug: stop running By setting `running=False` on the local instance _before_ updating the state, we were then re-writing `running=True` when updating the state. Update the flag on the received instance as well, and add a test so the same issue doesn't crop up again. --- pyiron_workflow/composite.py | 1 + tests/unit/test_macro.py | 4 ++++ 2 files changed, 5 insertions(+) diff --git a/pyiron_workflow/composite.py b/pyiron_workflow/composite.py index 68a4cc2e..8f4f0cd8 100644 --- a/pyiron_workflow/composite.py +++ b/pyiron_workflow/composite.py @@ -195,6 +195,7 @@ def _parse_remotely_executed_self(self, other_self): # Un-parent existing nodes before ditching them for node in self: node._parent = None + other_self.running = False # It's done now self.__setstate__(other_self.__getstate__()) def disconnect_run(self) -> list[tuple[Channel, Channel]]: diff --git a/tests/unit/test_macro.py b/tests/unit/test_macro.py index afc45ac1..3b9cdcb0 100644 --- a/tests/unit/test_macro.py +++ b/tests/unit/test_macro.py @@ -238,6 +238,10 @@ def test_with_executor(self): returned_nodes = result.result(timeout=120) # Wait for the process to finish sleep(1) + self.assertFalse( + macro.running, + msg="Macro should be done running" + ) self.assertIsNot( original_one, returned_nodes.one, From 2f6972a7bcc30e093eb8c21a6b00572b02319640 Mon Sep 17 00:00:00 2001 From: liamhuber Date: Wed, 24 Jan 2024 10:12:28 -0800 Subject: [PATCH 074/166] Don't reciprocally store the owning node in channel state Leave it up to the owning node to (re)store this --- pyiron_workflow/channels.py | 13 +++++++++++++ pyiron_workflow/node.py | 8 ++++++++ 2 files changed, 21 insertions(+) diff --git a/pyiron_workflow/channels.py b/pyiron_workflow/channels.py index 16fd3923..1e40a8da 100644 --- a/pyiron_workflow/channels.py +++ b/pyiron_workflow/channels.py @@ -215,6 +215,19 @@ def to_dict(self) -> dict: "connections": [f"{c.node.label}.{c.label}" for c in self.connections], } + def __getstate__(self): + state = self.__dict__ + # To avoid cyclic storage and avoid storing complex objects, purge some + # properties from the state + state["node"] = None + # It is the responsibility of the owning node to restore the node property + return state + + def __setstate__(self, state): + # Update instead of overriding in case some other attributes were added on the + # main process while a remote process was working away + self.__dict__.update(**state) + class NotData: """ diff --git a/pyiron_workflow/node.py b/pyiron_workflow/node.py index 9bc949b1..6484ecc3 100644 --- a/pyiron_workflow/node.py +++ b/pyiron_workflow/node.py @@ -1079,6 +1079,14 @@ def __setstate__(self, state): # Update instead of overriding in case some other attributes were added on the # main process while a remote process was working away self.__dict__.update(**state) + for io_panel in [ + self.inputs, + self.outputs, + self.signals.input, + self.signals.output, + ]: + for channel in io_panel: + channel.node = self def executor_shutdown(self, wait=True, *, cancel_futures=False): """Invoke shutdown on the executor (if present).""" From e0d1f46962d85c98026ac2f105abd947b094546e Mon Sep 17 00:00:00 2001 From: liamhuber Date: Wed, 24 Jan 2024 10:14:11 -0800 Subject: [PATCH 075/166] Don't store the owner-scope value receiver on channels Leave it up to the owning macro to (re)store this --- pyiron_workflow/channels.py | 7 ++++++ pyiron_workflow/macro.py | 47 +++++++++++++++++++++++++++++++++++++ 2 files changed, 54 insertions(+) diff --git a/pyiron_workflow/channels.py b/pyiron_workflow/channels.py index 1e40a8da..d3b0c573 100644 --- a/pyiron_workflow/channels.py +++ b/pyiron_workflow/channels.py @@ -489,6 +489,13 @@ def from_storage(self, storage): else storage["value"] ) + def __getstate__(self): + state = super().__getstate__() + state["_value_receiver"] = None + # Value receivers live in the scope of Macros, so (re)storing them is the + # owning macro's responsibility + return state + class InputData(DataChannel): @property diff --git a/pyiron_workflow/macro.py b/pyiron_workflow/macro.py index 1790af94..2754b5cc 100644 --- a/pyiron_workflow/macro.py +++ b/pyiron_workflow/macro.py @@ -546,6 +546,53 @@ def from_storage(self, storage): for label, node in self.nodes.items(): node.from_storage(storage[label]) + @property + def _input_value_links(self): + """ + Value connections between child output and macro in string representation based + on labels. + + The string representation helps storage, and having it as a property ensures + the name is protected. + """ + return [ + (c.label, (c.value_receiver.node.label, c.value_receiver.label)) + for c + in self.inputs + ] + + @property + def _output_value_links(self): + """ + Value connections between macro and child input in string representation based + on labels. + + The string representation helps storage, and having it as a property ensures + the name is protected. + """ + return [ + ((c.node.label, c.label), c.value_receiver.label) + for child in self + for c in child.outputs + if c.value_receiver is not None + ] + + def __getstate__(self): + state = super().__getstate__() + state["_input_value_links"] = self._input_value_links + state["_output_value_links"] = self._output_value_links + return state + + def __setstate__(self, state): + # Purge value links from the state and re-forge them + for (inp, (child, child_inp)) in state.pop("_input_value_links"): + self.inputs[inp].value_receiver = self.nodes[child].inputs[child_inp] + + for ((child, child_out), out) in state.pop("_output_value_links"): + self.nodes[child].outputs[child_out].value_receiver = self.outputs[out] + + super().__setstate__(state) + def macro_node(*output_labels, **node_class_kwargs): """ From 1d0047f93b32e442a19a0d184a035cb5b0223d52 Mon Sep 17 00:00:00 2001 From: liamhuber Date: Wed, 24 Jan 2024 15:00:57 -0800 Subject: [PATCH 076/166] :bug: workflows don't need to re-parent data IO They only hold pointers, so these guys already have parents --- pyiron_workflow/node.py | 16 +++++++++++----- pyiron_workflow/workflow.py | 11 +++++++++++ 2 files changed, 22 insertions(+), 5 deletions(-) diff --git a/pyiron_workflow/node.py b/pyiron_workflow/node.py index 6484ecc3..8ffed211 100644 --- a/pyiron_workflow/node.py +++ b/pyiron_workflow/node.py @@ -22,7 +22,7 @@ from pyiron_workflow.draw import Node as GraphvizNode from pyiron_workflow.snippets.files import FileObject, DirectoryObject from pyiron_workflow.has_to_dict import HasToDict -from pyiron_workflow.io import Signals +from pyiron_workflow.io import Signals, IO from pyiron_workflow.topology import ( get_nodes_in_data_tree, set_run_connections_according_to_linear_dag, @@ -1079,14 +1079,20 @@ def __setstate__(self, state): # Update instead of overriding in case some other attributes were added on the # main process while a remote process was working away self.__dict__.update(**state) - for io_panel in [ + + # Channels don't store their own node in their state, so repopulate it + for io_panel in self._owned_io_panels: + for channel in io_panel: + channel.node = self + + @property + def _owned_io_panels(self) -> list[IO]: + return [ self.inputs, self.outputs, self.signals.input, self.signals.output, - ]: - for channel in io_panel: - channel.node = self + ] def executor_shutdown(self, wait=True, *, cancel_futures=False): """Invoke shutdown on the executor (if present).""" diff --git a/pyiron_workflow/workflow.py b/pyiron_workflow/workflow.py index 86b0c013..1ba79d4c 100644 --- a/pyiron_workflow/workflow.py +++ b/pyiron_workflow/workflow.py @@ -16,6 +16,7 @@ from bidict import bidict from pyiron_workflow.channels import InputData, OutputData + from pyiron_workflow.io import IO from pyiron_workflow.node import Node @@ -383,3 +384,13 @@ def save(self): f"be in your python path and importable at load time too." ) self.to_storage(self.storage) + + @property + def _owned_io_panels(self) -> list[IO]: + # Workflow data IO is just pointers to child IO, not actually owned directly + # by the workflow; this is used in re-parenting channels, and we don't want to + # override the real parent with this workflow! + return [ + self.signals.input, + self.signals.output, + ] From e2dcd5c8fb45805523a1da2cb17dd1a40db3e517 Mon Sep 17 00:00:00 2001 From: liamhuber Date: Wed, 24 Jan 2024 15:01:48 -0800 Subject: [PATCH 077/166] Don't store the connections on channels Leave it up to the owning composite to (re)store this --- pyiron_workflow/channels.py | 5 +- pyiron_workflow/composite.py | 105 +++++++++++++++++++++++++++++++++++ pyiron_workflow/macro.py | 15 +++-- 3 files changed, 118 insertions(+), 7 deletions(-) diff --git a/pyiron_workflow/channels.py b/pyiron_workflow/channels.py index d3b0c573..bb032daa 100644 --- a/pyiron_workflow/channels.py +++ b/pyiron_workflow/channels.py @@ -216,11 +216,14 @@ def to_dict(self) -> dict: } def __getstate__(self): - state = self.__dict__ + state = dict(self.__dict__) # To avoid cyclic storage and avoid storing complex objects, purge some # properties from the state state["node"] = None # It is the responsibility of the owning node to restore the node property + state["connections"] = [] + # It is the responsibility of the owning node's parent to store and restore + # connections (if any) return state def __setstate__(self, state): diff --git a/pyiron_workflow/composite.py b/pyiron_workflow/composite.py index 8f4f0cd8..0348ede6 100644 --- a/pyiron_workflow/composite.py +++ b/pyiron_workflow/composite.py @@ -601,9 +601,114 @@ def tidy_working_directory(self): node.tidy_working_directory() super().tidy_working_directory() + def _get_connections_as_strings( + self, panel_getter: callable + ) -> list[tuple[tuple[str, str], tuple[str, str]]]: + """ + Connections between children in string representation based on labels. + + The string representation helps storage, and having it as a property ensures + the name is protected. + """ + return [ + ( + (inp.node.label, inp.label), + (out.node.label, out.label) + ) + for child in self + for inp in panel_getter(child) + for out in inp.connections + ] + + @staticmethod + def _get_data_inputs(node: Node): + return node.inputs + + @staticmethod + def _get_signals_input(node: Node): + return node.signals.input + + @property + def _child_data_connections(self) -> list[tuple[tuple[str, str], tuple[str, str]]]: + return self._get_connections_as_strings(self._get_data_inputs) + + @property + def _child_signal_connections( + self + ) -> list[tuple[tuple[str, str], tuple[str, str]]]: + return self._get_connections_as_strings(self._get_signals_input) + + def __getstate__(self): + state = super().__getstate__() + state["_child_data_connections"] = self._child_data_connections + state["_child_signal_connections"] = self._child_signal_connections + return state + def __setstate__(self, state): + # Purge child connection info from the state + child_data_connections = state.pop("_child_data_connections") + child_signal_connections = state.pop("_child_signal_connections") + super().__setstate__(state) + # Nodes purge their _parent information in their __getstate__ # so return it to them: for node in self: node._parent = self + # Nodes don't store connection information, so restore it to them + self._restore_data_connections_from_strings(child_data_connections) + self._restore_signal_connections_from_strings(child_signal_connections) + + @staticmethod + def _restore_connections_from_strings( + nodes: dict[str, Node] | DotDict[str, Node], + connections: list[tuple[tuple[str, str], tuple[str, str]]], + input_panel_getter: callable, + output_panel_getter: callable, + ) -> None: + """ + Set connections among a dictionary of nodes. + + This is useful for recreating node connections after (de)serialization of the + individual nodes, which don't know about their connections (that information is + the responsibility of their parent `Composite`). + + Args: + nodes (dict[Node]): The nodes to connect. + connections (list[tuple[tuple[str, str], tuple[str, str]]]): Connections + among these nodes in the format ((input node label, input channel label + ), (output node label, output channel label)). + """ + for ((inp_node, inp), (out_node, out)) in connections: + input_panel_getter(nodes[inp_node])[inp].connect( + output_panel_getter(nodes[out_node])[out] + ) + + @staticmethod + def _get_data_outputs(node: Node): + return node.outputs + + @staticmethod + def _get_signals_output(node: Node): + return node.signals.output + + def _restore_data_connections_from_strings( + self, connections: list[tuple[tuple[str, str], tuple[str, str]]] + ) -> None: + self._restore_connections_from_strings( + self.nodes, + connections, + self._get_data_inputs, + self._get_data_outputs, + ) + + def _restore_signal_connections_from_strings( + self, connections: list[tuple[tuple[str, str], tuple[str, str]]] + ) -> None: + self._restore_connections_from_strings( + self.nodes, + connections, + self._get_signals_input, + self._get_signals_output, + ) + diff --git a/pyiron_workflow/macro.py b/pyiron_workflow/macro.py index 2754b5cc..6d24f41b 100644 --- a/pyiron_workflow/macro.py +++ b/pyiron_workflow/macro.py @@ -484,7 +484,6 @@ def _parse_remotely_executed_self(self, other_self): self.signals.output, ] ] - super()._parse_remotely_executed_self(other_self) for old_data, io_panel in zip( @@ -584,15 +583,19 @@ def __getstate__(self): return state def __setstate__(self, state): - # Purge value links from the state and re-forge them - for (inp, (child, child_inp)) in state.pop("_input_value_links"): + # Purge value links from the state + input_links = state.pop("_input_value_links") + output_links = state.pop("_output_value_links") + + super().__setstate__(state) + + # Re-forge value links + for (inp, (child, child_inp)) in input_links: self.inputs[inp].value_receiver = self.nodes[child].inputs[child_inp] - for ((child, child_out), out) in state.pop("_output_value_links"): + for ((child, child_out), out) in output_links: self.nodes[child].outputs[child_out].value_receiver = self.outputs[out] - super().__setstate__(state) - def macro_node(*output_labels, **node_class_kwargs): """ From 95727c65808b142807c269eabc2961c302dff6c5 Mon Sep 17 00:00:00 2001 From: pyiron-runner Date: Thu, 25 Jan 2024 01:12:05 +0000 Subject: [PATCH 078/166] Format black --- pyiron_workflow/composite.py | 10 +++------- pyiron_workflow/macro.py | 7 +++---- 2 files changed, 6 insertions(+), 11 deletions(-) diff --git a/pyiron_workflow/composite.py b/pyiron_workflow/composite.py index 0348ede6..3a66ad1c 100644 --- a/pyiron_workflow/composite.py +++ b/pyiron_workflow/composite.py @@ -611,10 +611,7 @@ def _get_connections_as_strings( the name is protected. """ return [ - ( - (inp.node.label, inp.label), - (out.node.label, out.label) - ) + ((inp.node.label, inp.label), (out.node.label, out.label)) for child in self for inp in panel_getter(child) for out in inp.connections @@ -634,7 +631,7 @@ def _child_data_connections(self) -> list[tuple[tuple[str, str], tuple[str, str] @property def _child_signal_connections( - self + self, ) -> list[tuple[tuple[str, str], tuple[str, str]]]: return self._get_connections_as_strings(self._get_signals_input) @@ -679,7 +676,7 @@ def _restore_connections_from_strings( among these nodes in the format ((input node label, input channel label ), (output node label, output channel label)). """ - for ((inp_node, inp), (out_node, out)) in connections: + for (inp_node, inp), (out_node, out) in connections: input_panel_getter(nodes[inp_node])[inp].connect( output_panel_getter(nodes[out_node])[out] ) @@ -711,4 +708,3 @@ def _restore_signal_connections_from_strings( self._get_signals_input, self._get_signals_output, ) - diff --git a/pyiron_workflow/macro.py b/pyiron_workflow/macro.py index 6d24f41b..bce59392 100644 --- a/pyiron_workflow/macro.py +++ b/pyiron_workflow/macro.py @@ -556,8 +556,7 @@ def _input_value_links(self): """ return [ (c.label, (c.value_receiver.node.label, c.value_receiver.label)) - for c - in self.inputs + for c in self.inputs ] @property @@ -590,10 +589,10 @@ def __setstate__(self, state): super().__setstate__(state) # Re-forge value links - for (inp, (child, child_inp)) in input_links: + for inp, (child, child_inp) in input_links: self.inputs[inp].value_receiver = self.nodes[child].inputs[child_inp] - for ((child, child_out), out) in output_links: + for (child, child_out), out in output_links: self.nodes[child].outputs[child_out].value_receiver = self.outputs[out] From dbbae003dc1d54cedd92e9b64c1fc8a9349ea9b8 Mon Sep 17 00:00:00 2001 From: liamhuber Date: Thu, 25 Jan 2024 13:36:41 -0800 Subject: [PATCH 079/166] :bug: Don't override __get/setstate__ for `OutputData` This was just an oversight, as this class was one of the first to have these methods. --- pyiron_workflow/channels.py | 7 ------- 1 file changed, 7 deletions(-) diff --git a/pyiron_workflow/channels.py b/pyiron_workflow/channels.py index bb032daa..e08c859c 100644 --- a/pyiron_workflow/channels.py +++ b/pyiron_workflow/channels.py @@ -750,13 +750,6 @@ def __round__(self): return self._node_injection(Round) - # Because we override __getattr__ we need to get and set state for serialization - def __getstate__(self): - return dict(self.__dict__) - - def __setstate__(self, state): - self.__dict__.update(**state) - class SignalChannel(Channel, ABC): """ From bd26858a108e0f12f5a51007e2299c277d5beb90 Mon Sep 17 00:00:00 2001 From: liamhuber Date: Thu, 25 Jan 2024 13:20:16 -0800 Subject: [PATCH 080/166] Use a singleton for NotData instead of the class --- pyiron_workflow/channels.py | 21 ++++++++++++++------- pyiron_workflow/function.py | 6 +++--- pyiron_workflow/macro.py | 4 ++-- pyiron_workflow/node.py | 4 ++-- pyiron_workflow/node_library/standard.py | 8 ++++---- tests/integration/test_parallel_speedup.py | 6 +++--- tests/unit/test_channels.py | 8 ++++---- tests/unit/test_composite.py | 4 ++-- tests/unit/test_function.py | 16 ++++++++-------- tests/unit/test_macro.py | 8 ++++---- tests/unit/test_node.py | 10 +++++----- tests/unit/test_workflow.py | 6 +++--- 12 files changed, 54 insertions(+), 47 deletions(-) diff --git a/pyiron_workflow/channels.py b/pyiron_workflow/channels.py index e08c859c..81c6649b 100644 --- a/pyiron_workflow/channels.py +++ b/pyiron_workflow/channels.py @@ -15,6 +15,7 @@ from pyiron_workflow.has_channel import HasChannel from pyiron_workflow.has_to_dict import HasToDict +from pyiron_workflow.snippets.singleton import Singleton from pyiron_workflow.type_hinting import ( valid_value, type_hint_is_as_or_more_specific_than, @@ -232,7 +233,7 @@ def __setstate__(self, state): self.__dict__.update(**state) -class NotData: +class NotData(metaclass=Singleton): """ This class exists purely to initialize data channel values where no default value is provided; it lets the channel know that it has _no data in it_ and thus should @@ -243,7 +244,13 @@ class NotData: def __repr__(cls): # We use the class directly (not instances of it) where there is not yet data # So give it a decent repr, even as just a class - return cls.__name__ + return "NOT_DATA" + + def __reduce__(self): + return "NOT_DATA" + + +NOT_DATA = NotData() class DataChannel(Channel, ABC): @@ -338,13 +345,13 @@ def __init__( self, label: str, node: Node, - default: typing.Optional[typing.Any] = NotData, + default: typing.Optional[typing.Any] = NOT_DATA, type_hint: typing.Optional[typing.Any] = None, strict_hints: bool = True, value_receiver: typing.Optional[InputData] = None, ): super().__init__(label=label, node=node) - self._value = NotData + self._value = NOT_DATA self._value_receiver = None self.type_hint = type_hint self.strict_hints = strict_hints @@ -366,7 +373,7 @@ def value(self, new_value): def _type_check_new_value(self, new_value): if ( self.strict_hints - and new_value is not NotData + and new_value is not NOT_DATA and self._has_hint and not valid_value(new_value, self.type_hint) ): @@ -430,7 +437,7 @@ def ready(self) -> bool: @property def _value_is_data(self) -> bool: - return self.value is not NotData + return self.value is not NOT_DATA @property def _has_hint(self) -> bool: @@ -518,7 +525,7 @@ def fetch(self) -> None: RuntimeError: If the parent node is :attr:`running`. """ for out in self.connections: - if out.value is not NotData: + if out.value is not NOT_DATA: self.value = out.value break diff --git a/pyiron_workflow/function.py b/pyiron_workflow/function.py index 866583bc..dffacce0 100644 --- a/pyiron_workflow/function.py +++ b/pyiron_workflow/function.py @@ -5,7 +5,7 @@ from functools import partialmethod from typing import Any, get_args, get_type_hints, Optional, TYPE_CHECKING -from pyiron_workflow.channels import InputData, OutputData, NotData +from pyiron_workflow.channels import InputData, OutputData, NOT_DATA from pyiron_workflow.has_channel import HasChannel from pyiron_workflow.io import Inputs, Outputs from pyiron_workflow.node import Node @@ -78,7 +78,7 @@ class Function(Node): >>> plus_minus_1 = Function(mwe) >>> >>> print(plus_minus_1.outputs["x+1"]) - + NOT_DATA There is no output because we haven't given our function any input, it has no defaults, and we never ran it! So outputs have the channel default value of @@ -445,7 +445,7 @@ def _build_input_channels(self): except KeyError: type_hint = None - default = NotData # The standard default in DataChannel + default = NOT_DATA # The standard default in DataChannel if value.default is not inspect.Parameter.empty: if is_self: warnings.warn("default value for self ignored") diff --git a/pyiron_workflow/macro.py b/pyiron_workflow/macro.py index bce59392..c83a605d 100644 --- a/pyiron_workflow/macro.py +++ b/pyiron_workflow/macro.py @@ -11,7 +11,7 @@ from bidict import bidict -from pyiron_workflow.channels import InputData, OutputData, NotData +from pyiron_workflow.channels import InputData, OutputData, NOT_DATA from pyiron_workflow.composite import Composite from pyiron_workflow.has_channel import HasChannel from pyiron_workflow.io import Outputs, Inputs @@ -364,7 +364,7 @@ def _prepopulate_ui_nodes_from_graph_creator_signature(self): continue # Skip the macro argument itself, it's like `self` here default = ( - NotData + NOT_DATA if inspected_value.default is inspect.Parameter.empty else inspected_value.default ) diff --git a/pyiron_workflow/node.py b/pyiron_workflow/node.py index 8ffed211..521ea7a7 100644 --- a/pyiron_workflow/node.py +++ b/pyiron_workflow/node.py @@ -17,7 +17,7 @@ InputSignal, AccumulatingInputSignal, OutputSignal, - NotData, + NOT_DATA, ) from pyiron_workflow.draw import Node as GraphvizNode from pyiron_workflow.snippets.files import FileObject, DirectoryObject @@ -1006,7 +1006,7 @@ def _copy_values( (self.outputs, other.outputs), ]: for key, to_copy in other_panel.items(): - if to_copy.value is not NotData: + if to_copy.value is not NOT_DATA: try: old_value = my_panel[key].value my_panel[key].value = to_copy.value # Gets hint-checked diff --git a/pyiron_workflow/node_library/standard.py b/pyiron_workflow/node_library/standard.py index 1e7820df..9f4bb5bb 100644 --- a/pyiron_workflow/node_library/standard.py +++ b/pyiron_workflow/node_library/standard.py @@ -6,7 +6,7 @@ from inspect import isclass -from pyiron_workflow.channels import NotData, OutputSignal +from pyiron_workflow.channels import NOT_DATA, OutputSignal from pyiron_workflow.function import SingleValue, single_value_node @@ -28,9 +28,9 @@ def __init__(self, **kwargs): @staticmethod def if_(condition): - if isclass(condition) and issubclass(condition, NotData): + if condition is NOT_DATA: raise TypeError( - f"Logic 'If' node expected data otherut got NotData as input." + f"Logic 'If' node expected data other but got NotData as input." ) return bool(condition) @@ -47,7 +47,7 @@ def process_run_result(self, function_output): @single_value_node("slice") -def Slice(start=None, stop=NotData, step=None): +def Slice(start=None, stop=NOT_DATA, step=None): if start is None: if stop is None: raise ValueError( diff --git a/tests/integration/test_parallel_speedup.py b/tests/integration/test_parallel_speedup.py index c656af74..985426cc 100644 --- a/tests/integration/test_parallel_speedup.py +++ b/tests/integration/test_parallel_speedup.py @@ -2,7 +2,7 @@ import unittest from pyiron_workflow import Workflow -from pyiron_workflow.channels import NotData +from pyiron_workflow.channels import NOT_DATA class TestParallelSpeedup(unittest.TestCase): @@ -28,7 +28,7 @@ def make_workflow(label): wf.starting_nodes = [wf.a] t0 = perf_counter() wf() - while wf.outputs.d__user_input.value is NotData: + while wf.outputs.d__user_input.value is NOT_DATA: sleep(0.001) dt_serial = perf_counter() - t0 @@ -43,7 +43,7 @@ def make_workflow(label): t1 = perf_counter() wf() - while wf.outputs.d__user_input.value is NotData: + while wf.outputs.d__user_input.value is NOT_DATA: sleep(0.001) dt_parallel = perf_counter() - t1 diff --git a/tests/unit/test_channels.py b/tests/unit/test_channels.py index 05472438..35cd2267 100644 --- a/tests/unit/test_channels.py +++ b/tests/unit/test_channels.py @@ -2,7 +2,7 @@ from pyiron_workflow.channels import ( Channel, InputData, OutputData, InputSignal, AccumulatingInputSignal, OutputSignal, - NotData, ChannelConnectionError, BadCallbackError + NOT_DATA, ChannelConnectionError, BadCallbackError ) @@ -140,7 +140,7 @@ def test_mutable_defaults(self): ) def test_fetch(self): - self.no.value = NotData + self.no.value = NOT_DATA self.ni1.value = 1 self.ni1.connect(self.no_empty) @@ -285,7 +285,7 @@ def test_value_receiver(self): def test_value_assignment(self): self.ni1.value = 2 # Should be fine when value matches hint - self.ni1.value = NotData # Should be able to clear the data + self.ni1.value = NOT_DATA # Should be able to clear the data self.ni1.node.running = True with self.assertRaises( @@ -313,7 +313,7 @@ def test_ready(self): without_default = InputData(label="without_default", node=DummyNode()) self.assertIs( without_default.value, - NotData, + NOT_DATA, msg=f"Without a default, spec is to have a NotData value but got " f"{type(without_default.value)}" ) diff --git a/tests/unit/test_composite.py b/tests/unit/test_composite.py index 64185c74..4c4e4d3b 100644 --- a/tests/unit/test_composite.py +++ b/tests/unit/test_composite.py @@ -3,7 +3,7 @@ from bidict import ValueDuplicationError from pyiron_workflow._tests import ensure_tests_in_python_path -from pyiron_workflow.channels import NotData +from pyiron_workflow.channels import NOT_DATA from pyiron_workflow.composite import Composite from pyiron_workflow.io import Outputs, Inputs from pyiron_workflow.topology import CircularDataFlowError @@ -423,7 +423,7 @@ def test_run(self): msg="Expected to start from starting node and propagate" ) self.assertIs( - NotData, + NOT_DATA, self.comp.n3.outputs.y.value, msg="n3 was omitted from the execution diagram, it should not have run" ) diff --git a/tests/unit/test_function.py b/tests/unit/test_function.py index 285aa586..7ffacc99 100644 --- a/tests/unit/test_function.py +++ b/tests/unit/test_function.py @@ -2,7 +2,7 @@ import unittest import warnings -from pyiron_workflow.channels import NotData, ChannelConnectionError +from pyiron_workflow.channels import NOT_DATA, ChannelConnectionError from pyiron_workflow.function import Function, SingleValue, function_node from pyiron_workflow.interfaces import Executor @@ -44,7 +44,7 @@ def test_instantiation(self): with self.subTest("Args and kwargs at initialization"): node = Function(plus_one) self.assertIs( - NotData, + NOT_DATA, node.outputs.y.value, msg="Sanity check that output just has the standard not-data value at " "instantiation", @@ -52,7 +52,7 @@ def test_instantiation(self): node.inputs.x = 10 self.assertIs( node.outputs.y.value, - NotData, + NOT_DATA, msg="Nodes should not run on input updates", ) node.run() @@ -104,8 +104,8 @@ def test_defaults(self): without_defaults = Function(no_default) self.assertIs( without_defaults.inputs.x.value, - NotData, - msg=f"Expected values with no default specified to start as {NotData} but " + NOT_DATA, + msg=f"Expected values with no default specified to start as {NOT_DATA} but " f"got {without_defaults.inputs.x.value}", ) self.assertFalse( @@ -383,7 +383,7 @@ def reference(x=0, y: int = 0, z: int | float = 0, omega=None, extra_here=None): return out @function_node() - def all_floats(x=1.1, y=1.1, z=1.1, omega=NotData, extra_there=None) -> float: + def all_floats(x=1.1, y=1.1, z=1.1, omega=NOT_DATA, extra_there=None) -> float: out = 42.1 return out @@ -531,9 +531,9 @@ def test_repr(self): with self.subTest("Not data"): svn = SingleValue(no_default, output_labels="output") - self.assertIs(svn.outputs.output.value, NotData) + self.assertIs(svn.outputs.output.value, NOT_DATA) self.assertTrue( - svn.__repr__().endswith(NotData.__name__), + svn.__repr__().endswith(NOT_DATA.__repr__()), msg="When the output is still not data, the representation should " "indicate this" ) diff --git a/tests/unit/test_macro.py b/tests/unit/test_macro.py index 3b9cdcb0..054c3e62 100644 --- a/tests/unit/test_macro.py +++ b/tests/unit/test_macro.py @@ -4,7 +4,7 @@ from time import sleep import unittest -from pyiron_workflow.channels import NotData +from pyiron_workflow.channels import NOT_DATA from pyiron_workflow.function import SingleValue from pyiron_workflow.macro import Macro, macro_node from pyiron_workflow.topology import CircularDataFlowError @@ -142,7 +142,7 @@ def test_creation_from_decorator(self): self.assertIs( m.outputs.three__result.value, - NotData, + NOT_DATA, msg="Output should be accessible with the usual naming convention, but we " "have not run yet so there shouldn't be any data" ) @@ -218,7 +218,7 @@ def test_with_executor(self): macro.executor = macro.create.Executor() self.assertIs( - NotData, + NOT_DATA, macro.outputs.three__result.value, msg="Sanity check that test is in right starting condition" ) @@ -230,7 +230,7 @@ def test_with_executor(self): msg="Should be running as a parallel process" ) self.assertIs( - NotData, + NOT_DATA, downstream.outputs.result.value, msg="Downstream events should not yet have triggered either, we should wait" "for the callback when the result is ready" diff --git a/tests/unit/test_node.py b/tests/unit/test_node.py index 12a91a4d..aab4c763 100644 --- a/tests/unit/test_node.py +++ b/tests/unit/test_node.py @@ -3,7 +3,7 @@ import unittest -from pyiron_workflow.channels import InputData, OutputData, NotData +from pyiron_workflow.channels import InputData, OutputData, NOT_DATA from pyiron_workflow.snippets.files import DirectoryObject from pyiron_workflow.interfaces import Executor from pyiron_workflow.io import Inputs, Outputs @@ -342,7 +342,7 @@ def test_draw(self): def test_run_after_init(self): self.assertIs( self.n1.outputs.y.value, - NotData, + NOT_DATA, msg="By default, nodes should not be getting run until asked" ) self.assertEqual( @@ -371,7 +371,7 @@ def test_graph_info(self): def test_storage(self): self.assertIs( self.n1.outputs.y.value, - NotData, + NOT_DATA, msg="Sanity check on initial state" ) y = self.n1() @@ -388,7 +388,7 @@ def test_storage(self): clean_slate = ANode(self.n1.label, x=x, overwrite_save=True) self.assertIs( clean_slate.outputs.y.value, - NotData, + NOT_DATA, msg="Users should be able to ignore a save" ) @@ -420,7 +420,7 @@ def test_save_after_run(self): not_reloaded = ANode("just_run") self.assertIs( - NotData, + NOT_DATA, not_reloaded.outputs.y.value, msg="Should not have saved, therefore should have been nothing to load" ) diff --git a/tests/unit/test_workflow.py b/tests/unit/test_workflow.py index 90d4a550..2f47cb34 100644 --- a/tests/unit/test_workflow.py +++ b/tests/unit/test_workflow.py @@ -4,7 +4,7 @@ import unittest from pyiron_workflow._tests import ensure_tests_in_python_path -from pyiron_workflow.channels import NotData +from pyiron_workflow.channels import NOT_DATA from pyiron_workflow.snippets.dotdict import DotDict from pyiron_workflow.workflow import Workflow @@ -83,7 +83,7 @@ def test_with_executor(self): wf.executor = wf.create.Executor() self.assertIs( - NotData, + NOT_DATA, wf.outputs.b__y.value, msg="Sanity check that test is in right starting condition" ) @@ -151,7 +151,7 @@ def sum(a, b): ) self.assertEqual( wf.sum.outputs.sum.value, - NotData, + NOT_DATA, msg="The slow node _should_ hold up the downstream node to which it inputs" ) From f3c390900955ab350a16829889018a2c9d5b532a Mon Sep 17 00:00:00 2001 From: liamhuber Date: Thu, 25 Jan 2024 13:31:36 -0800 Subject: [PATCH 081/166] Update references to `NotData` in docs and strings --- notebooks/deepdive.ipynb | 8 ++++---- pyiron_workflow/channels.py | 14 +++++++------- pyiron_workflow/function.py | 6 +++--- pyiron_workflow/node_library/standard.py | 2 +- tests/unit/test_channels.py | 8 ++++---- tests/unit/test_function.py | 4 ++-- 6 files changed, 21 insertions(+), 21 deletions(-) diff --git a/notebooks/deepdive.ipynb b/notebooks/deepdive.ipynb index 488acdbc..a0c4a2f6 100644 --- a/notebooks/deepdive.ipynb +++ b/notebooks/deepdive.ipynb @@ -90,7 +90,7 @@ "id": "22ee2a49-47d1-4cec-bb25-8441ea01faf7", "metadata": {}, "source": [ - "The output is still empty (`NotData`) because we haven't `run()` the node:" + "The output is still empty (`NOT_DATA`) because we haven't `run()` the node:" ] }, { @@ -3501,7 +3501,7 @@ "source": [ "from time import perf_counter, sleep\n", "\n", - "from pyiron_workflow.channels import NotData\n", + "from pyiron_workflow.channels import NOT_DATA\n", "\n", "@Workflow.wrap_as.single_value_node()\n", "def Wait(t):\n", @@ -3536,7 +3536,7 @@ "wf.starting_nodes = [wf.a]\n", "t0 = perf_counter()\n", "out = wf()\n", - "while wf.outputs.d__user_input.value is NotData:\n", + "while wf.outputs.d__user_input.value is NOT_DATA:\n", " sleep(0.001)\n", "dt_serial = perf_counter() - t0\n", "\n", @@ -3575,7 +3575,7 @@ "\n", " t1 = perf_counter()\n", " out = wf()\n", - " while wf.outputs.d__user_input.value is NotData:\n", + " while wf.outputs.d__user_input.value is NOT_DATA:\n", " sleep(0.001)\n", " dt_parallel = perf_counter() - t1\n", "\n", diff --git a/pyiron_workflow/channels.py b/pyiron_workflow/channels.py index 81c6649b..8d46201c 100644 --- a/pyiron_workflow/channels.py +++ b/pyiron_workflow/channels.py @@ -260,7 +260,7 @@ class DataChannel(Channel, ABC): They store data persistently (:attr:`value`). This value may have a default (:attr:`default`) and the default-default is to be - `NotData`. + `NOT_DATA`. They may optionally have a type hint (:attr:`type_hint`). @@ -284,8 +284,8 @@ class DataChannel(Channel, ABC): in production runs. Channels can indicate whether they hold data they are happy with - (:attr:`ready: bool`), which is to say it is data (not :class:`NotData`) and that - it conforms to the type hint (if one is provided and checking is active). + (:attr:`ready: bool`), which is to say it is data (not the singleton `NOT_DATA`) + and that it conforms to the type hint (if one is provided and checking is active). Output data facilitates many (but not all) python operators by injecting a new node to perform that operation. Where the operator is not supported, we try to @@ -331,7 +331,7 @@ class DataChannel(Channel, ABC): label (str): The label for the channel. node (pyiron_workflow.node.Node): The node to which this channel belongs. default (typing.Any|None): The default value to initialize to. - (Default is the class `NotData`.) + (Default is the singleton `NOT_DATA`.) type_hint (typing.Any|None): A type hint for values. (Default is None.) strict_hints (bool): Whether to check new values, connections, and partners when this node is a value receiver. This can potentially be expensive, so @@ -514,9 +514,9 @@ def connection_partner_type(self): def fetch(self) -> None: """ - Sets :attr:`value` to the first value among connections that is something other than - :class:`NotData`; if no such value exists (e.g. because there are no connections or - because all the connected output channels have `NotData` as their value), + Sets :attr:`value` to the first value among connections that is something other + than `NOT_DATA`; if no such value exists (e.g. because there are no connections + or because all the connected output channels have `NOT_DATA` as their value), :attr:`value` remains unchanged. I.e., the connection with the highest priority for updating input data is the 0th connection; build graphs accordingly. diff --git a/pyiron_workflow/function.py b/pyiron_workflow/function.py index dffacce0..3d3dd58b 100644 --- a/pyiron_workflow/function.py +++ b/pyiron_workflow/function.py @@ -82,8 +82,8 @@ class Function(Node): There is no output because we haven't given our function any input, it has no defaults, and we never ran it! So outputs have the channel default value of - `NotData` -- a special non-data class (since `None` is sometimes a meaningful - value in python). + `NOT_DATA` -- a special non-data singleton (since `None` is sometimes a + meaningful value in python). We'll run into a hiccup if we try to set only one of the inputs and force the run: @@ -115,7 +115,7 @@ class Function(Node): y ready: False This is because the second input (`y`) still has no input value -- indicated in - the error message -- so we can't do the sum between `NotData` and `2`. + the error message -- so we can't do the sum between `NOT_DATA` and `2`. Once we update `y`, all the input is ready we will be allowed to proceed to a `run()` call, which succeeds and updates the output. diff --git a/pyiron_workflow/node_library/standard.py b/pyiron_workflow/node_library/standard.py index 9f4bb5bb..98daa916 100644 --- a/pyiron_workflow/node_library/standard.py +++ b/pyiron_workflow/node_library/standard.py @@ -30,7 +30,7 @@ def __init__(self, **kwargs): def if_(condition): if condition is NOT_DATA: raise TypeError( - f"Logic 'If' node expected data other but got NotData as input." + f"Logic 'If' node expected data other but got NOT_DATA as input." ) return bool(condition) diff --git a/tests/unit/test_channels.py b/tests/unit/test_channels.py index 35cd2267..7bb3a77f 100644 --- a/tests/unit/test_channels.py +++ b/tests/unit/test_channels.py @@ -156,7 +156,7 @@ def test_fetch(self): self.assertEqual( self.ni1.value, 1, - msg="NotData values should not be getting pulled, so no update expected" + msg="NOT_DATA values should not be getting pulled, so no update expected" ) self.no.value = 3 @@ -314,13 +314,13 @@ def test_ready(self): self.assertIs( without_default.value, NOT_DATA, - msg=f"Without a default, spec is to have a NotData value but got " + msg=f"Without a default, spec is to have a NOT_DATA value but got " f"{type(without_default.value)}" ) self.assertFalse( without_default.ready, - msg="Even without type hints, readiness should be false when the value" - "is NotData" + msg="Even without type hints, readiness should be false when the value " + "is NOT_DATA" ) self.ni1.value = 1 diff --git a/tests/unit/test_function.py b/tests/unit/test_function.py index 7ffacc99..e69d71ad 100644 --- a/tests/unit/test_function.py +++ b/tests/unit/test_function.py @@ -394,7 +394,7 @@ def all_floats(x=1.1, y=1.1, z=1.1, omega=NOT_DATA, extra_there=None) -> float: floats.run( check_readiness=False, # We force-skip the readiness check since we are explicitly _trying_ to - # have one of the inputs be `NotData` -- a value which triggers the channel + # have one of the inputs be `NOT_DATA` -- a value which triggers the channel # to be "not ready" ) @@ -417,7 +417,7 @@ def all_floats(x=1.1, y=1.1, z=1.1, omega=NOT_DATA, extra_there=None) -> float: self.assertEqual( ref.inputs.omega.value, None, - msg="NotData should be ignored when copying" + msg="NOT_DATA should be ignored when copying" ) self.assertEqual( ref.outputs.out.value, From 81c70894e46bd304cd33b83aeaa3a0a8ae663a7b Mon Sep 17 00:00:00 2001 From: liamhuber Date: Thu, 25 Jan 2024 13:49:20 -0800 Subject: [PATCH 082/166] Re-execute the deepdive To update "NotData" output --- notebooks/deepdive.ipynb | 59 ++++++++++++++++++---------------------- 1 file changed, 27 insertions(+), 32 deletions(-) diff --git a/notebooks/deepdive.ipynb b/notebooks/deepdive.ipynb index a0c4a2f6..640f81c7 100644 --- a/notebooks/deepdive.ipynb +++ b/notebooks/deepdive.ipynb @@ -103,7 +103,7 @@ "name": "stdout", "output_type": "stream", "text": [ - "{'x+1': , 'x-1': }\n" + "{'x+1': NOT_DATA, 'x-1': NOT_DATA}\n" ] } ], @@ -524,7 +524,7 @@ "name": "stderr", "output_type": "stream", "text": [ - "/Users/huber/work/pyiron/pyiron_workflow/pyiron_workflow/channels.py:166: UserWarning: The channel run was not connected to ran, andthus could not disconnect from it.\n", + "/Users/huber/work/pyiron/pyiron_workflow/pyiron_workflow/channels.py:168: UserWarning: The channel run was not connected to ran, andthus could not disconnect from it.\n", " warn(\n" ] }, @@ -983,13 +983,13 @@ "name": "stderr", "output_type": "stream", "text": [ - "/Users/huber/work/pyiron/pyiron_workflow/pyiron_workflow/channels.py:166: UserWarning: The channel run was not connected to ran, andthus could not disconnect from it.\n", + "/Users/huber/work/pyiron/pyiron_workflow/pyiron_workflow/channels.py:168: UserWarning: The channel run was not connected to ran, andthus could not disconnect from it.\n", " warn(\n" ] }, { "data": { - "image/png": "iVBORw0KGgoAAAANSUhEUgAAAiMAAAGdCAYAAADAAnMpAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjguMiwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy8g+/7EAAAACXBIWXMAAA9hAAAPYQGoP6dpAAAh+UlEQVR4nO3df1Bc1d3H8c+yBDam2XVIDKwJjSQ1GmTUAkOENOPUx2Cig02nTvCxSdTGjkRtTKi2SdMRyTjDaEen/gj4K9FxElOq1VZmKIZ/GsmPloaQjpHM6BhaErPIAOOCPyAGzvNHCo8rS8LdwJ7s5v2a2T84nMt+994w95Nzzj24jDFGAAAAliTYLgAAAFzYCCMAAMAqwggAALCKMAIAAKwijAAAAKsIIwAAwCrCCAAAsIowAgAArEq0XcBYDA4O6sSJE5o6dapcLpftcgAAwBgYY9Tb26tLL71UCQmjj3/ERBg5ceKE0tPTbZcBAAAicOzYMc2aNWvU78dEGJk6daqk0x/G6/VargYAAIxFT0+P0tPTh+/jo4mJMDI0NeP1egkjAADEmLMtsWABKwAAsIowAgAArCKMAAAAqwgjAADAKsIIAACwijACAACsIowAAACrCCMAAMCqmNj0DBgYNGps7VZHb59mTPUoLyNF7gT+ThEAxAPCyDniJjnx6g4HVF7TokCwb7jN7/OorChTS7L8FisDAIwHwsg54CY58eoOB7Rm+0GZb7W3B/u0ZvtBVa3I5lwDQIxjzUiEhm6S3wwi0v/fJOsOByxVFj8GBo3Ka1pGBBFJw23lNS0aGAzXAwAQKwgjEeAmGR2Nrd0jwt43GUmBYJ8aW7ujVxQAYNwRRiLATTI6OnpHP8eR9AMAnJ8IIxHgJhkdM6Z6xrUfAOD8RBiJADfJ6MjLSJHf59Fozya5dHrBcF5GSjTLAgCMM8JIBLhJRoc7waWyokxJGnGuh74uK8rkUWoAiHGEkQhwk4yeJVl+Va3IVpovdJQpzefhsV4AiBMuY8x5/8hHT0+PfD6fgsGgvF6v7XKGsc9I9LC5HADEnrHevwkj54ibJAAA4Y31/s0OrOfIneBS/txptssAACBmsWYEAABYRRgBAABWEUYAAIBVhBEAAGAVYQQAAFhFGAEAAFYRRgAAgFWEEQAAYBVhBAAAWEUYAQAAVhFGAACAVYQRAABgFWEEAABYRRgBAABWEUYAAIBVhBEAAGAVYQQAAFhFGAEAAFYRRgAAgFWEEQAAYBVhBAAAWEUYAQAAVhFGAACAVYQRAABgFWEEAABYRRgBAABWEUYAAIBVhBEAAGAVYQQAAFhFGAEAAFYRRgAAgFWEEQAAYBVhBAAAWEUYAQAAVhFGAACAVRGFkcrKSmVkZMjj8SgnJ0cNDQ1n7L9jxw5dc801uuiii+T3+3X33Xerq6srooIBAEB8cRxGqqurtW7dOm3atEnNzc1atGiRli5dqra2trD99+zZo1WrVmn16tX64IMP9MYbb+if//yn7rnnnnMuHgAAxD7HYeSpp57S6tWrdc8992j+/Pn6/e9/r/T0dFVVVYXt//e//12XXXaZ1q5dq4yMDP3gBz/QvffeqwMHDpxz8QAAIPY5CiMnT55UU1OTCgsLQ9oLCwu1b9++sMcUFBTo+PHjqq2tlTFGn376qd58803dcssto75Pf3+/enp6Ql4AACA+OQojnZ2dGhgYUGpqakh7amqq2tvbwx5TUFCgHTt2qLi4WElJSUpLS9PFF1+sZ599dtT3qaiokM/nG36lp6c7KRMAAMSQiBawulyukK+NMSPahrS0tGjt2rV65JFH1NTUpLq6OrW2tqqkpGTUn79x40YFg8Hh17FjxyIpEwAAxIBEJ52nT58ut9s9YhSko6NjxGjJkIqKCi1cuFAPP/ywJOnqq6/WlClTtGjRIj322GPy+/0jjklOTlZycrKT0gAAQIxyNDKSlJSknJwc1dfXh7TX19eroKAg7DFffvmlEhJC38btdks6PaICAAAubI6naUpLS/Xyyy9r27ZtOnLkiNavX6+2trbhaZeNGzdq1apVw/2Lior01ltvqaqqSkePHtXevXu1du1a5eXl6dJLLx2/TwIAAGKSo2kaSSouLlZXV5c2b96sQCCgrKws1dbWavbs2ZKkQCAQsufIXXfdpd7eXj333HP65S9/qYsvvlg33HCDHn/88fH7FAAAIGa5TAzMlfT09Mjn8ykYDMrr9douBwAAjMFY79/8bRoAAGAVYQQAAFhFGAEAAFYRRgAAgFWEEQAAYBVhBAAAWEUYAQAAVhFGAACAVYQRAABgFWEEAABYRRgBAABWEUYAAIBVhBEAAGAVYQQAAFhFGAEAAFYRRgAAgFWEEQAAYBVhBAAAWEUYAQAAVhFGAACAVYQRAABgFWEEAABYRRgBAABWEUYAAIBVhBEAAGAVYQQAAFhFGAEAAFYRRgAAgFWEEQAAYBVhBAAAWEUYAQAAVhFGAACAVYQRAABgFWEEAABYRRgBAABWEUYAAIBVhBEAAGAVYQQAAFhFGAEAAFYRRgAAgFWEEQAAYBVhBAAAWEUYAQAAVhFGAACAVYQRAABgFWEEAABYRRgBAABWEUYAAIBVhBEAAGAVYQQAAFhFGAEAAFYRRgAAgFWEEQAAYBVhBAAAWEUYAQAAVhFGAACAVYQRAABgFWEEAABYRRgBAABWEUYAAIBVhBEAAGAVYQQAAFgVURiprKxURkaGPB6PcnJy1NDQcMb+/f392rRpk2bPnq3k5GTNnTtX27Zti6hgAAAQXxKdHlBdXa1169apsrJSCxcu1AsvvKClS5eqpaVF3/3ud8Mes3z5cn366afaunWrvve976mjo0OnTp065+IBAEDscxljjJMDFixYoOzsbFVVVQ23zZ8/X8uWLVNFRcWI/nV1dbr99tt19OhRpaSkRFRkT0+PfD6fgsGgvF5vRD8DAABE11jv346maU6ePKmmpiYVFhaGtBcWFmrfvn1hj3nnnXeUm5urJ554QjNnztS8efP00EMP6auvvhr1ffr7+9XT0xPyAgAA8cnRNE1nZ6cGBgaUmpoa0p6amqr29vawxxw9elR79uyRx+PR22+/rc7OTt13333q7u4edd1IRUWFysvLnZQGAMC4Ghg0amztVkdvn2ZM9SgvI0XuBJftsuKS4zUjkuRyhV4MY8yItiGDg4NyuVzasWOHfD6fJOmpp57Sbbfdpi1btmjy5Mkjjtm4caNKS0uHv+7p6VF6enokpQIA4Fjd4YDKa1oUCPYNt/l9HpUVZWpJlt9iZfHJ0TTN9OnT5Xa7R4yCdHR0jBgtGeL3+zVz5szhICKdXmNijNHx48fDHpOcnCyv1xvyAgAgGuoOB7Rm+8GQICJJ7cE+rdl+UHWHA5Yqi1+OwkhSUpJycnJUX18f0l5fX6+CgoKwxyxcuFAnTpzQ559/Ptz24YcfKiEhQbNmzYqgZAAAJsbAoFF5TYvCPdkx1FZe06KBQUfPfuAsHO8zUlpaqpdfflnbtm3TkSNHtH79erW1tamkpETS6SmWVatWDfe/4447NG3aNN19991qaWnRe++9p4cfflg/+9nPwk7RAABgS2Nr94gRkW8ykgLBPjW2dkevqAuA4zUjxcXF6urq0ubNmxUIBJSVlaXa2lrNnj1bkhQIBNTW1jbc/zvf+Y7q6+v1i1/8Qrm5uZo2bZqWL1+uxx57bPw+BQAA46Cjd/QgEkk/jI3jfUZsYJ8RAEA07P+4S//70t/P2m/nz69T/txpUagotk3IPiMAAMSzvIwU+X0ejfYAr0unn6rJy4hsE0+ERxgBAOC/3AkulRVlStKIQDL0dVlRJvuNjDPCCAAA37Aky6+qFdlK83lC2tN8HlWtyGafkQkQ0aZnAADEsyVZfi3OTGMH1ighjAAAEIY7wcUi1ShhmgYAAFhFGAEAAFYRRgAAgFWEEQAAYBVhBAAAWEUYAQAAVhFGAACAVYQRAABgFWEEAABYRRgBAABWEUYAAIBVhBEAAGAVYQQAAFhFGAEAAFYRRgAAgFWEEQAAYBVhBAAAWEUYAQAAVhFGAACAVYQRAABgFWEEAABYRRgBAABWEUYAAIBVhBEAAGAVYQQAAFhFGAEAAFYRRgAAgFWEEQAAYFWi7QIAAIAdA4NGja3d6ujt04ypHuVlpMid4Ip6HYQRAAAuQHWHAyqvaVEg2Dfc5vd5VFaUqSVZ/qjWwjQNAAAXmLrDAa3ZfjAkiEhSe7BPa7YfVN3hQFTrIYwAAHABGRg0Kq9pkQnzvaG28poWDQyG6zExCCMAAFxAGlu7R4yIfJORFAj2qbG1O2o1EUYAALiAdPSOHkQi6TceCCMAAFxAZkz1jGu/8cDTNGNwvjz6BADAucrLSJHf51F7sC/suhGXpDTf6XtdtBBGzuJ8evQJAIBz5U5wqawoU2u2H5RLCgkkQ//NLivKjOp/upmmOYPz7dEnAADGw5Isv6pWZCvNFzoVk+bzqGpFdtT/s83IyCjO9uiTS6cffVqcmcaUDQAg5izJ8mtxZtp5sQyBMDIKJ48+5c+dFr3CAAAYJ+4E13lxD2OaZhTn46NPAADEI8LIKM7HR58AAIhHhJFRDD36NNrMmUunn6qJ5qNPAADEI8LIKIYefZI0IpDYevQJAIB4RBg5g/Pt0ScAAOIRT9Ocxfn06BMAAPGIMDIG58ujTwAAxCOmaQAAgFWEEQAAYBVhBAAAWEUYAQAAVhFGAACAVYQRAABgFWEEAABYRRgBAABWEUYAAIBVhBEAAGBVRGGksrJSGRkZ8ng8ysnJUUNDw5iO27t3rxITE3XttddG8rYAACAOOQ4j1dXVWrdunTZt2qTm5mYtWrRIS5cuVVtb2xmPCwaDWrVqlf7nf/4n4mIBAED8cRljjJMDFixYoOzsbFVVVQ23zZ8/X8uWLVNFRcWox91+++26/PLL5Xa79ec//1mHDh0a83v29PTI5/MpGAzK6/U6KRcAAFgy1vu3o5GRkydPqqmpSYWFhSHthYWF2rdv36jHvfLKK/r4449VVlY2pvfp7+9XT09PyAsAAMQnR2Gks7NTAwMDSk1NDWlPTU1Ve3t72GM++ugjbdiwQTt27FBiYuKY3qeiokI+n2/4lZ6e7qRMAAAQQyJawOpyuUK+NsaMaJOkgYEB3XHHHSovL9e8efPG/PM3btyoYDA4/Dp27FgkZQIAgBgwtqGK/5o+fbrcbveIUZCOjo4RoyWS1NvbqwMHDqi5uVkPPPCAJGlwcFDGGCUmJmrXrl264YYbRhyXnJys5ORkJ6UBAIAY5WhkJCkpSTk5Oaqvrw9pr6+vV0FBwYj+Xq9X77//vg4dOjT8Kikp0RVXXKFDhw5pwYIF51Y9AACIeY5GRiSptLRUK1euVG5urvLz8/Xiiy+qra1NJSUlkk5PsXzyySd67bXXlJCQoKysrJDjZ8yYIY/HM6IdAABcmByHkeLiYnV1dWnz5s0KBALKyspSbW2tZs+eLUkKBAJn3XMEAABgiON9RmxgnxEAAGLPhOwzAgAAMN4IIwAAwCrCCAAAsIowAgAArCKMAAAAqwgjAADAKsIIAACwijACAACsIowAAACrCCMAAMAqwggAALCKMAIAAKwijAAAAKsIIwAAwCrCCAAAsIowAgAArCKMAAAAqwgjAADAKsIIAACwijACAACsIowAAACrCCMAAMAqwggAALCKMAIAAKwijAAAAKsIIwAAwCrCCAAAsIowAgAArEq0XUCsGhg0amztVkdvn2ZM9SgvI0XuBJftsgAAiDmEkQjUHQ6ovKZFgWDfcJvf51FZUaaWZPktVgYAQOxhmsahusMBrdl+MCSISFJ7sE9rth9U3eGApcoAAIhNhBEHBgaNymtaZMJ8b6itvKZFA4PhegAAgHAIIw40tnaPGBH5JiMpEOxTY2t39IoCACDGEUYc6OgdPYhE0g8AABBGHJkx1TOu/QAAAGHEkbyMFPl9Ho32AK9Lp5+qyctIiWZZAADENMKIA+4El8qKMiVpRCAZ+rqsKJP9RgAAcIAw4tCSLL+qVmQrzRc6FZPm86hqRTb7jAAA4BCbnkVgSZZfizPT2IEVAIBxQBiJkDvBpfy502yXAQBAzGOaBgAAWEUYAQAAVhFGAACAVYQRAABgFWEEAABYRRgBAABWEUYAAIBVhBEAAGAVYQQAAFhFGAEAAFYRRgAAgFWEEQAAYBVhBAAAWEUYAQAAVhFGAACAVYQRAABgFWEEAABYRRgBAABWEUYAAIBVhBEAAGAVYQQAAFgVURiprKxURkaGPB6PcnJy1NDQMGrft956S4sXL9Yll1wir9er/Px8vfvuuxEXDAAA4ovjMFJdXa1169Zp06ZNam5u1qJFi7R06VK1tbWF7f/ee+9p8eLFqq2tVVNTk374wx+qqKhIzc3N51w8AACIfS5jjHFywIIFC5Sdna2qqqrhtvnz52vZsmWqqKgY08+46qqrVFxcrEceeWRM/Xt6euTz+RQMBuX1ep2UCwAALBnr/dvRyMjJkyfV1NSkwsLCkPbCwkLt27dvTD9jcHBQvb29SklJcfLWAAAgTiU66dzZ2amBgQGlpqaGtKempqq9vX1MP+PJJ5/UF198oeXLl4/ap7+/X/39/cNf9/T0OCkT42hg0KixtVsdvX2aMdWjvIwUuRNctssCAMQRR2FkiMsVejMyxoxoC2fnzp169NFH9Ze//EUzZswYtV9FRYXKy8sjKQ3jqO5wQOU1LQoE+4bb/D6PyooytSTLb7EyAEA8cTRNM336dLnd7hGjIB0dHSNGS76turpaq1ev1h//+EfdeOONZ+y7ceNGBYPB4dexY8eclIlxUHc4oDXbD4YEEUlqD/ZpzfaDqjscsFQZACDeOAojSUlJysnJUX19fUh7fX29CgoKRj1u586duuuuu/T666/rlltuOev7JCcny+v1hrwQPQODRuU1LQq3snmorbymRQODjtY+AwAQluNpmtLSUq1cuVK5ubnKz8/Xiy++qLa2NpWUlEg6ParxySef6LXXXpN0OoisWrVKTz/9tK677rrhUZXJkyfL5/ON40fBeGls7R4xIvJNRlIg2KfG1m7lz50WvcIAAHHJcRgpLi5WV1eXNm/erEAgoKysLNXW1mr27NmSpEAgELLnyAsvvKBTp07p/vvv1/333z/cfuedd+rVV18990+AcdfRO3oQiaQfAABn4nifERvYZyS69n/cpf996e9n7bfz59cxMgIAGNWE7DOCC0NeRor8Po9Gez7KpdNP1eRlsFcMAODcEUYwgjvBpbKiTEkaEUiGvi4rymS/EQDAuCCMIKwlWX5VrchWms8T0p7m86hqRTb7jAAAxk1Em57hwrAky6/FmWnswAoAmFCEEZyRO8HFIlUAwIRimgYAAFhFGAEAAFYRRgAAgFWEEQAAYBVhBAAAWEUYAQAAVhFGAACAVYQRAABgFWEEAABYRRgBAABWEUYAAIBVhBEAAGAVYQQAAFhFGAEAAFYRRgAAgFWEEQAAYBVhBAAAWEUYAQAAVhFGAACAVYQRAABgFWEEAABYRRgBAABWJdou4EI1MGjU2Nqtjt4+zZjqUV5GitwJLttlAQAQdYQRC+oOB1Re06JAsG+4ze/zqKwoU0uy/BYrAwAg+pimibK6wwGt2X4wJIhIUnuwT2u2H1Td4YClygAAsIMwEkUDg0blNS0yYb431FZe06KBwXA9AACIT4SRKGps7R4xIvJNRlIg2KfG1u7oFQUAgGWEkSjq6B09iETSDwCAeEAYiaIZUz3j2g8AgHhAGImivIwU+X0ejfYAr0unn6rJy0iJZlkAAFhFGIkid4JLZUWZkjQikAx9XVaUyX4jAIALCmEkypZk+VW1IltpvtCpmDSfR1UrstlnBABwwWHTMwuWZPm1ODONHVgBABBhxBp3gkv5c6fZLgMAAOuYpgEAAFYRRgAAgFWEEQAAYBVhBAAAWEUYAQAAVhFGAACAVYQRAABgFWEEAABYRRgBAABWEUYAAIBVhBEAAGAVYQQAAFhFGAEAAFYRRgAAgFWEEQAAYFWi7QIQewYGjRpbu9XR26cZUz3Ky0iRO8FluywAQIwijMCRusMBlde0KBDsG27z+zwqK8rUkiy/xcoAALGKaRqMWd3hgNZsPxgSRCSpPdinNdsPqu5wwFJlAIBYRhjBmAwMGpXXtMiE+d5QW3lNiwYGw/UAAGB0hBGMSWNr94gRkW8ykgLBPjW2dkevKABAXLhg14ywCNOZjt7Rg0gk/QAAGHJBhhEWYTo3Y6pnXPsBADDkgpumYRFmZPIyUuT3eTTa2JFLpwNdXkZKNMsCAMSBiMJIZWWlMjIy5PF4lJOTo4aGhjP23717t3JycuTxeDRnzhw9//zzERV7rliEGTl3gktlRZmSNCKQDH1dVpTJVBcAwDHHYaS6ulrr1q3Tpk2b1NzcrEWLFmnp0qVqa2sL27+1tVU333yzFi1apObmZv3mN7/R2rVr9ac//emci3eKRZjnZkmWX1UrspXmC52KSfN5VLUimykuAEBEXMYYR8MACxYsUHZ2tqqqqobb5s+fr2XLlqmiomJE/1//+td65513dOTIkeG2kpIS/etf/9L+/fvH9J49PT3y+XwKBoPyer1Oyg3xl0Of6ME/HDprv6dvv1Y/unZmxO8T71j8CwAYi7Hevx0tYD158qSampq0YcOGkPbCwkLt27cv7DH79+9XYWFhSNtNN92krVu36uuvv9akSZNGHNPf36/+/v6QDzMeWIQ5PtwJLuXPnWa7DABAnHA0TdPZ2amBgQGlpqaGtKempqq9vT3sMe3t7WH7nzp1Sp2dnWGPqaiokM/nG36lp6c7KXNULMIEAOD8E9ECVpcr9HZujBnRdrb+4dqHbNy4UcFgcPh17NixSMocgUWYAACcfxyFkenTp8vtdo8YBeno6Bgx+jEkLS0tbP/ExERNmxZ+qD85OVlerzfkNV5YhAkAwPnF0ZqRpKQk5eTkqL6+Xj/+8Y+H2+vr6/WjH/0o7DH5+fmqqakJadu1a5dyc3PDrheJhiVZfi3OTGMRJgAA5wHHO7CWlpZq5cqVys3NVX5+vl588UW1tbWppKRE0ukplk8++USvvfaapNNPzjz33HMqLS3Vz3/+c+3fv19bt27Vzp07x/eTOMQiTAAAzg+Ow0hxcbG6urq0efNmBQIBZWVlqba2VrNnz5YkBQKBkD1HMjIyVFtbq/Xr12vLli269NJL9cwzz+gnP/nJ+H0KAAAQsxzvM2LDeO0zAgAAomes9+8L7m/TAACA8wthBAAAWEUYAQAAVhFGAACAVYQRAABgFWEEAABYRRgBAABWOd70zIahrVB6enosVwIAAMZq6L59ti3NYiKM9Pb2SpLS09MtVwIAAJzq7e2Vz+cb9fsxsQPr4OCgTpw4oalTp8rlGtsfs+vp6VF6erqOHTvGrq2WcA3s4vzbxzWwj2tglzFGvb29uvTSS5WQMPrKkJgYGUlISNCsWbMiOtbr9fIP0DKugV2cf/u4BvZxDew504jIEBawAgAAqwgjAADAqrgNI8nJySorK1NycrLtUi5YXAO7OP/2cQ3s4xrEhphYwAoAAOJX3I6MAACA2EAYAQAAVhFGAACAVYQRAABgVUyHkcrKSmVkZMjj8SgnJ0cNDQ1n7L97927l5OTI4/Fozpw5ev7556NUafxycg3eeustLV68WJdccom8Xq/y8/P17rvvRrHa+OP0d2DI3r17lZiYqGuvvXZiC7wAOL0G/f392rRpk2bPnq3k5GTNnTtX27Zti1K18cfp+d+xY4euueYaXXTRRfL7/br77rvV1dUVpWoxKhOj/vCHP5hJkyaZl156ybS0tJgHH3zQTJkyxfznP/8J2//o0aPmoosuMg8++KBpaWkxL730kpk0aZJ58803o1x5/HB6DR588EHz+OOPm8bGRvPhhx+ajRs3mkmTJpmDBw9GufL44PT8D/nss8/MnDlzTGFhobnmmmuiU2yciuQa3HrrrWbBggWmvr7etLa2mn/84x9m7969Uaw6fjg9/w0NDSYhIcE8/fTT5ujRo6ahocFcddVVZtmyZVGuHN8Ws2EkLy/PlJSUhLRdeeWVZsOGDWH7/+pXvzJXXnllSNu9995rrrvuugmrMd45vQbhZGZmmvLy8vEu7YIQ6fkvLi42v/3tb01ZWRlh5Bw5vQZ//etfjc/nM11dXdEoL+45Pf+/+93vzJw5c0LannnmGTNr1qwJqxFjE5PTNCdPnlRTU5MKCwtD2gsLC7Vv376wx+zfv39E/5tuukkHDhzQ119/PWG1xqtIrsG3DQ4Oqre3VykpKRNRYlyL9Py/8sor+vjjj1VWVjbRJca9SK7BO++8o9zcXD3xxBOaOXOm5s2bp4ceekhfffVVNEqOK5Gc/4KCAh0/fly1tbUyxujTTz/Vm2++qVtuuSUaJeMMYuIP5X1bZ2enBgYGlJqaGtKempqq9vb2sMe0t7eH7X/q1Cl1dnbK7/dPWL3xKJJr8G1PPvmkvvjiCy1fvnwiSoxrkZz/jz76SBs2bFBDQ4MSE2PyV/+8Esk1OHr0qPbs2SOPx6O3335bnZ2duu+++9Td3c26EYciOf8FBQXasWOHiouL1dfXp1OnTunWW2/Vs88+G42ScQYxOTIyxOVyhXxtjBnRdrb+4doxdk6vwZCdO3fq0UcfVXV1tWbMmDFR5cW9sZ7/gYEB3XHHHSovL9e8efOiVd4FwcnvwODgoFwul3bs2KG8vDzdfPPNeuqpp/Tqq68yOhIhJ+e/paVFa9eu1SOPPKKmpibV1dWptbVVJSUl0SgVZxCT/z2aPn263G73iPTb0dExIiUPSUtLC9s/MTFR06ZNm7Ba41Uk12BIdXW1Vq9erTfeeEM33njjRJYZt5ye/97eXh04cEDNzc164IEHJJ2+MRpjlJiYqF27dumGG26ISu3xIpLfAb/fr5kzZ4b8SfX58+fLGKPjx4/r8ssvn9Ca40kk57+iokILFy7Uww8/LEm6+uqrNWXKFC1atEiPPfYYI+QWxeTISFJSknJyclRfXx/SXl9fr4KCgrDH5Ofnj+i/a9cu5ebmatKkSRNWa7yK5BpIp0dE7rrrLr3++uvM054Dp+ff6/Xq/fff16FDh4ZfJSUluuKKK3To0CEtWLAgWqXHjUh+BxYuXKgTJ07o888/H2778MMPlZCQoFmzZk1ovfEmkvP/5ZdfKiEh9Lbndrsl/f9IOSyxtXL2XA090rV161bT0tJi1q1bZ6ZMmWL+/e9/G2OM2bBhg1m5cuVw/6FHe9evX29aWlrM1q1bebT3HDm9Bq+//rpJTEw0W7ZsMYFAYPj12Wef2foIMc3p+f82nqY5d06vQW9vr5k1a5a57bbbzAcffGB2795tLr/8cnPPPffY+ggxzen5f+WVV0xiYqKprKw0H3/8sdmzZ4/Jzc01eXl5tj4C/itmw4gxxmzZssXMnj3bJCUlmezsbLN79+7h7915553m+uuvD+n/t7/9zXz/+983SUlJ5rLLLjNVVVVRrjj+OLkG119/vZE04nXnnXdGv/A44fR34JsII+PD6TU4cuSIufHGG83kyZPNrFmzTGlpqfnyyy+jXHX8cHr+n3nmGZOZmWkmT55s/H6/+elPf2qOHz8e5arxbS5jGJsCAAD2xOSaEQAAED8IIwAAwCrCCAAAsIowAgAArCKMAAAAqwgjAADAKsIIAACwijACAACsIowAAACrCCMAAMAqwggAALCKMAIAAKz6PygrOcUH0mZqAAAAAElFTkSuQmCC", + "image/png": "iVBORw0KGgoAAAANSUhEUgAAAiMAAAGdCAYAAADAAnMpAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjguMiwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy8g+/7EAAAACXBIWXMAAA9hAAAPYQGoP6dpAAAkVklEQVR4nO3dfUzd5f3/8dfhUDjalWNoBU5v1h0bb4pkOiB04BozZ7HV4K+JixhXq06XUbfVlulW1kWkMSHuxkw3i1NbjWl1RKf7ScKwZL+s0puNlcIi0kxjmbT2MAJkB7yB2nOu3x/9wtfjOVTOKZzrcM7zkZw/znWuD+d9ctmcl9d1ruvjMMYYAQAAWJJmuwAAAJDaCCMAAMAqwggAALCKMAIAAKwijAAAAKsIIwAAwCrCCAAAsIowAgAArEq3XcB0BINBnTp1SgsWLJDD4bBdDgAAmAZjjEZHR7V48WKlpU09/zEnwsipU6e0bNky22UAAIAYnDhxQkuXLp3y9ajDyJtvvqlf/vKX6ujokM/n02uvvab169ef85r9+/erurpab7/9thYvXqyf/OQnqqqqmvZ7LliwQNLZD5OVlRVtyQAAwIKRkREtW7Zs8nt8KlGHkY8++khXXXWV7r77bt1yyy1f2L+3t1c33nijvve972nPnj06ePCg7rvvPl188cXTul7S5NJMVlYWYQQAgDnmi35iEXUYWbdundatWzft/k899ZS+/OUv6ze/+Y0kaeXKlTpy5Ih+9atfTTuMAACA5DXru2kOHz6s8vLykLYbbrhBR44c0aeffhrxmvHxcY2MjIQ8AABAcpr1MNLf36/c3NyQttzcXJ05c0aDg4MRr6mvr5fb7Z588ONVAACSV1zOGfn8WpExJmL7hJqaGvn9/snHiRMnZr1GAABgx6xv7c3Ly1N/f39I28DAgNLT07Vw4cKI12RmZiozM3O2SwMAAAlg1mdGSktL1draGtK2b98+FRcXa968ebP99gAAIMFFHUY+/PBDdXV1qaurS9LZrbtdXV3q6+uTdHaJZePGjZP9q6qq9P7776u6ulrHjh3T7t27tWvXLj3wwAMz8wkAAMCcFvUyzZEjR/TNb35z8nl1dbUk6c4779Tzzz8vn883GUwkyev1qrm5WVu3btWTTz6pxYsX64knnmBbLwAAkCQ5zMSvSRPYyMiI3G63/H4/h54BQIoIBI3ae4c1MDqmnAUulXiz5Uzj/mRzyXS/v+fEvWkAAKmlpdunuqYe+fxjk20et0u1FflaW+CxWBlmQ1y29gIAMF0t3T5t2nM0JIhIUr9/TJv2HFVLt89SZZgthBEAQMIIBI3qmnoU6fcDE211TT0KBBP+FwaIAmEEAJAw2nuHw2ZEPstI8vnH1N47HL+iMOsIIwCAhDEwOnUQiaUf5gbCCAAgYeQscM1oP8wNhBEAQMIo8WbL43Zpqg28Dp3dVVPizY5nWZhlhBEAQMJwpjlUW5EvSWGBZOJ5bUU+540kGcIIACChrC3wqGFDofLcoUsxeW6XGjYUcs5IEuLQMwBAwllb4NGa/DxOYE0RhBEAQEJypjlUumKh7TIQByzTAAAAqwgjAADAKsIIAACwijACAACsIowAAACrCCMAAMAqwggAALCKMAIAAKwijAAAAKsIIwAAwCrCCAAAsIowAgAArCKMAAAAqwgjAADAKsIIAACwijACAACsIowAAACrCCMAAMAqwggAALCKMAIAAKwijAAAAKsIIwAAwCrCCAAAsIowAgAArCKMAAAAqwgjAADAKsIIAACwijACAACsIowAAACrCCMAAMAqwggAALAq3XYBtgSCRu29wxoYHVPOApdKvNlypjlslwUAQMpJyTDS0u1TXVOPfP6xyTaP26XainytLfBYrAwAgNSTcss0Ld0+bdpzNCSISFK/f0yb9hxVS7fPUmUAAKSmlAojgaBRXVOPTITXJtrqmnoUCEbqAQAAZkNKhZH23uGwGZHPMpJ8/jG19w7HrygAAFJcSoWRgdGpg0gs/QAAwPlLqTCSs8A1o/0AAMD5S6kwUuLNlsft0lQbeB06u6umxJsdz7IAAEhpKRVGnGkO1VbkS1JYIJl4XluRz3kjAADEUUqFEUlaW+BRw4ZC5blDl2Ly3C41bCjknBEAAOIsJQ89W1vg0Zr8PE5gBQAgAaRkGJHOLtmUrlhouwwAAFJeyi3TAACAxEIYAQAAVhFGAACAVYQRAABgFWEEAABYRRgBAABWEUYAAIBVhBEAAGBVTGFk586d8nq9crlcKioqUltb2zn77927V1dddZUuvPBCeTwe3X333RoaGoqpYAAAkFyiDiONjY3asmWLtm/frs7OTq1evVrr1q1TX19fxP4HDhzQxo0bdc899+jtt9/Wyy+/rH/84x+69957z7t4AAAw90UdRh577DHdc889uvfee7Vy5Ur95je/0bJly9TQ0BCx/9/+9jd95Stf0ebNm+X1evWNb3xD3//+93XkyJHzLh4AAMx9UYWR06dPq6OjQ+Xl5SHt5eXlOnToUMRrysrKdPLkSTU3N8sYo//85z965ZVXdNNNN035PuPj4xoZGQl5AACA5BRVGBkcHFQgEFBubm5Ie25urvr7+yNeU1ZWpr1796qyslIZGRnKy8vTRRddpN/+9rdTvk99fb3cbvfkY9myZdGUCQAA5pCYfsDqcDhCnhtjwtom9PT0aPPmzXrooYfU0dGhlpYW9fb2qqqqasq/X1NTI7/fP/k4ceJELGUCAIA5ID2azosWLZLT6QybBRkYGAibLZlQX1+va665Rg8++KAk6atf/armz5+v1atX65FHHpHH4wm7JjMzU5mZmdGUBgAA5qioZkYyMjJUVFSk1tbWkPbW1laVlZVFvObjjz9WWlro2zidTklnZ1QAAEBqi3qZprq6Ws8++6x2796tY8eOaevWrerr65tcdqmpqdHGjRsn+1dUVOjVV19VQ0ODjh8/roMHD2rz5s0qKSnR4sWLZ+6TAACAOSmqZRpJqqys1NDQkHbs2CGfz6eCggI1Nzdr+fLlkiSfzxdy5shdd92l0dFR/e53v9OPf/xjXXTRRbruuuv06KOPztynAAAAc5bDzIG1kpGREbndbvn9fmVlZdkuBwAATMN0v7+5Nw0AALCKMAIAAKwijAAAAKsIIwAAwCrCCAAAsIowAgAArIr6nBHETyBo1N47rIHRMeUscKnEmy1nWuR7AAEAMFcRRhJUS7dPdU098vnHJts8bpdqK/K1tiD8fj4AAMxVLNMkoJZunzbtORoSRCSp3z+mTXuOqqXbZ6kyAABmHmEkwQSCRnVNPYp0LO5EW11TjwLBhD84FwCAaSGMJJj23uGwGZHPMpJ8/jG19w7HrygAAGYRYSTBDIxOHURi6QcAQKIjjCSYnAWuGe0HAECiI4wkmBJvtjxul6bawOvQ2V01Jd7seJYFAMCsIYwkGGeaQ7UV+ZIUFkgmntdW5HPeCAAgaRBGEtDaAo8aNhQqzx26FJPndqlhQyHnjAAAkgqHniWotQUercnP4wRWAEDSI4wkMGeaQ6UrFtouAwCAWcUyDQAAsIowAgAArCKMAAAAqwgjAADAKsIIAACwijACAACsIowAAACrCCMAAMAqwggAALCKMAIAAKwijAAAAKsIIwAAwCrCCAAAsIowAgAArCKMAAAAqwgjAADAKsIIAACwijACAACsIowAAACrCCMAAMAqwggAALCKMAIAAKwijAAAAKsIIwAAwCrCCAAAsIowAgAArCKMAAAAqwgjAADAKsIIAACwijACAACsIowAAACrCCMAAMAqwggAALCKMAIAAKwijAAAAKsIIwAAwKp02wUAAJCKAkGj9t5hDYyOKWeBSyXebDnTHLbLsoIwAgBAnLV0+1TX1COff2yyzeN2qbYiX2sLPBYrs4NlGgAA4qil26dNe46GBBFJ6vePadOeo2rp9lmqzB7CCAAAcRIIGtU19chEeG2ira6pR4FgpB7JizACAECctPcOh82IfJaR5POPqb13OH5FJQDCCAAAcTIwOnUQiaVfsiCMAAAQJzkLXDPaL1kQRgAAiJMSb7Y8bpem2sDr0NldNSXe7HiWZV1MYWTnzp3yer1yuVwqKipSW1vbOfuPj49r+/btWr58uTIzM7VixQrt3r07poIBAJirnGkO1VbkS1JYIJl4XluRn3LnjUQdRhobG7VlyxZt375dnZ2dWr16tdatW6e+vr4pr7n11lv1l7/8Rbt27dK//vUvvfTSS7riiivOq3AAAOaitQUeNWwoVJ47dCkmz+1Sw4bClDxnxGGMiWr/0KpVq1RYWKiGhobJtpUrV2r9+vWqr68P69/S0qLbbrtNx48fV3Z2bNNOIyMjcrvd8vv9ysrKiulvAACQSFLhBNbpfn9HNTNy+vRpdXR0qLy8PKS9vLxchw4dinjN66+/ruLiYv3iF7/QkiVLdNlll+mBBx7QJ598MuX7jI+Pa2RkJOQBAEAycaY5VLpiof7P1UtUumJh0gWRaER1HPzg4KACgYByc3ND2nNzc9Xf3x/xmuPHj+vAgQNyuVx67bXXNDg4qPvuu0/Dw8NT/m6kvr5edXV10ZQGAADmqJh+wOpwhKY3Y0xY24RgMCiHw6G9e/eqpKREN954ox577DE9//zzU86O1NTUyO/3Tz5OnDgRS5kAAGAOiGpmZNGiRXI6nWGzIAMDA2GzJRM8Ho+WLFkit9s92bZy5UoZY3Ty5EldeumlYddkZmYqMzMzmtIAAMAcFdXMSEZGhoqKitTa2hrS3traqrKysojXXHPNNTp16pQ+/PDDybZ33nlHaWlpWrp0aQwlAwCAZBL1Mk11dbWeffZZ7d69W8eOHdPWrVvV19enqqoqSWeXWDZu3DjZ//bbb9fChQt19913q6enR2+++aYefPBBffe739UFF1wwc58EAADMSVEt00hSZWWlhoaGtGPHDvl8PhUUFKi5uVnLly+XJPl8vpAzR770pS+ptbVVP/rRj1RcXKyFCxfq1ltv1SOPPDJznwIpIxW2wgFAqon6nBEbOGcEktTS7VNdU0/IHS89bpdqK/JT8pAgAEh0s3LOCGBLS7dPm/YcDbv1dr9/TJv2HFVLt89SZQCA80UYQcILBI3qmnoUaQpvoq2uqUeBYMJP8gEAIiCMIOG19w6HzYh8lpHk84+pvXc4fkUBAGYMYQQJb2B06iASSz8AQGIhjCDh5SxwfXGnKPoBABILYQQJr8SbLY/bpak28Dp0dldNiTe2u0IDAOwijCDhOdMcqq3Il6SwQDLxvLYin/NGAGCOIoxgTlhb4FHDhkLluUOXYvLcLjVsKOScEQCYw6I+gRWwZW2BR2vy8ziBFQCSDGEEc4ozzaHSFQttlwEAmEEs0wAAAKsIIwAAwCrCCAAAsIowAgAArCKMAAAAqwgjAADAKsIIAACwinNGACAFBIKGAwORsAgjAJDkWrp9qmvqkc8/NtnmcbtUW5HPrRSQEFimAYAk1tLt06Y9R0OCiCT1+8e0ac9RtXT7LFUG/C/CCAAkqUDQqK6pRybCaxNtdU09CgQj9QDihzACAEmqvXc4bEbks4wkn39M7b3D8SsKiIAwAgBJamB06iASSz9gthBGACBJ5SxwzWg/YLYQRgAgSZV4s+VxuzTVBl6Hzu6qKfFmx7MsIAxhBACSlDPNodqKfEkKCyQTz2sr8jlvBNYRRgAgia0t8KhhQ6Hy3KFLMXlulxo2FHLOCBICh54BQJJbW+DRmvw8TmBFwiKMAEAKcKY5VLpioe0ygIhYpgEAAFYRRgAAgFWEEQAAYBVhBAAAWEUYAQAAVhFGAACAVYQRAABgFWEEAABYRRgBAABWEUYAAIBVhBEAAGAVYQQAAFhFGAEAAFYRRgAAgFWEEQAAYBVhBAAAWEUYAQAAVhFGAACAVYQRAABgFWEEAABYRRgBAABWEUYAAIBVhBEAAGAVYQQAAFhFGAEAAFYRRgAAgFWEEQAAYFW67QIAAIAdgaBRe++wBkbHlLPApRJvtpxpjrjXQRgBACAFtXT7VNfUI59/bLLN43aptiJfaws8ca2FZRoAAFJMS7dPm/YcDQkiktTvH9OmPUfV0u2Laz2EEQAAUkggaFTX1CMT4bWJtrqmHgWCkXrMDsIIAAAppL13OGxG5LOMJJ9/TO29w3GrKaYwsnPnTnm9XrlcLhUVFamtrW1a1x08eFDp6em6+uqrY3lbAABwngZGpw4isfSbCVGHkcbGRm3ZskXbt29XZ2enVq9erXXr1qmvr++c1/n9fm3cuFHf+ta3Yi4WAIBYBYJGh98b0v/t+kCH3xuK6zJEIslZ4JrRfjPBYYyJajRWrVqlwsJCNTQ0TLatXLlS69evV319/ZTX3Xbbbbr00kvldDr1pz/9SV1dXdN+z5GREbndbvn9fmVlZUVTLgAACbVzxLZA0Ogbj/4/9fvHIv5uxCEpz+3SgZ9ed97bfKf7/R3VzMjp06fV0dGh8vLykPby8nIdOnRoyuuee+45vffee6qtrZ3W+4yPj2tkZCTkAQBALBJt54htzjSHaivyJZ0NHp818by2Ij+u541EFUYGBwcVCASUm5sb0p6bm6v+/v6I17z77rvatm2b9u7dq/T06R1rUl9fL7fbPflYtmxZNGUCACApMXeOJIK1BR41bChUnjt0KSbP7VLDhsK4zxbFdOiZwxGalowxYW2SFAgEdPvtt6uurk6XXXbZtP9+TU2NqqurJ5+PjIwQSAAAUYtm50jpioXxKywBrC3waE1+3tw7gXXRokVyOp1hsyADAwNhsyWSNDo6qiNHjqizs1M//OEPJUnBYFDGGKWnp2vfvn267rrrwq7LzMxUZmZmNKUBABAmEXeOJBJnmiMhQlhUyzQZGRkqKipSa2trSHtra6vKysrC+mdlZemtt95SV1fX5KOqqkqXX365urq6tGrVqvOrHgCAc0jEnSMIF/UyTXV1te644w4VFxertLRUTz/9tPr6+lRVVSXp7BLLBx98oBdeeEFpaWkqKCgIuT4nJ0culyusHQCAmVbizZbH7frCnSMl3ux4l4bPiDqMVFZWamhoSDt27JDP51NBQYGam5u1fPlySZLP5/vCM0cAAIiHiZ0jm/YclUMKCSS2do4gXNTnjNjAOSMAgPPBOSN2TPf7O6bdNAAAzCWJtHME4QgjAICUkCg7RxCOu/YCAACrCCMAAMAqwggAALCKMAIAAKwijAAAAKsIIwAAwCrCCAAAsIowAgAArCKMAAAAqwgjAADAKsIIAACwijACAACsIowAAACrCCMAAMAqwggAALCKMAIAAKwijAAAAKsIIwAAwCrCCAAAsIowAgAArCKMAAAAqwgjAADAKsIIAACwijACAACsIowAAACrCCMAAMAqwggAALCKMAIAAKwijAAAAKvSbRcAILEFgkbtvcMaGB1TzgKXSrzZcqY5bJcFIIkQRgBMqaXbp7qmHvn8Y5NtHrdLtRX5WlvgsVgZgGTCMg2AiFq6fdq052hIEJGkfv+YNu05qpZun6XKACQbwgiAMIGgUV1Tj0yE1yba6pp6FAhG6gEA0SGMAAjT3jscNiPyWUaSzz+m9t7h+BUFIGkRRgCEGRidOojE0g8AzoUwAiBMzgLXjPYDgHMhjAAIU+LNlsft0lQbeB06u6umxJsdz7IAJCnCCIAwzjSHaivyJSkskEw8r63I57wRADOCMAIgorUFHjVsKFSeO3QpJs/tUsOGQs4ZATBjOPQMwJTWFni0Jj+PE1gBzCrCCIBzcqY5VLpioe0yACQxlmkAAIBVhBEAAGAVYQQAAFhFGAEAAFYRRgAAgFWEEQAAYBVhBAAAWEUYAQAAVhFGAACAVYQRAABgFWEEAABYRRgBAABWEUYAAIBVhBEAAGAVYQQAAFhFGAEAAFYRRgAAgFWEEQAAYBVhBAAAWBVTGNm5c6e8Xq9cLpeKiorU1tY2Zd9XX31Va9as0cUXX6ysrCyVlpbqjTfeiLlgAACQXKIOI42NjdqyZYu2b9+uzs5OrV69WuvWrVNfX1/E/m+++abWrFmj5uZmdXR06Jvf/KYqKirU2dl53sUDAIC5z2GMMdFcsGrVKhUWFqqhoWGybeXKlVq/fr3q6+un9TeuvPJKVVZW6qGHHppW/5GREbndbvn9fmVlZUVTLgAAsGS6399RzYycPn1aHR0dKi8vD2kvLy/XoUOHpvU3gsGgRkdHlZ2dPWWf8fFxjYyMhDwAAEByiiqMDA4OKhAIKDc3N6Q9NzdX/f390/obv/71r/XRRx/p1ltvnbJPfX293G735GPZsmXRlAkAAOaQmH7A6nA4Qp4bY8LaInnppZf08MMPq7GxUTk5OVP2q6mpkd/vn3ycOHEiljIBAMAckB5N50WLFsnpdIbNggwMDITNlnxeY2Oj7rnnHr388su6/vrrz9k3MzNTmZmZ0ZQGAADmqKhmRjIyMlRUVKTW1taQ9tbWVpWVlU153UsvvaS77rpLL774om666abYKgUAAEkpqpkRSaqurtYdd9yh4uJilZaW6umnn1ZfX5+qqqoknV1i+eCDD/TCCy9IOhtENm7cqMcff1xf//rXJ2dVLrjgArnd7hn8KAAAYC6KOoxUVlZqaGhIO3bskM/nU0FBgZqbm7V8+XJJks/nCzlz5Pe//73OnDmjH/zgB/rBD34w2X7nnXfq+eefP/9PAAAA5rSozxmxgXNGAACYe2blnBEAAICZRhgBAABWEUYAAIBVhBEAAGAVYQQAAFhFGAEAAFYRRgAAgFWEEQAAYBVhBAAAWEUYAQAAVhFGAACAVYQRAABgFWEEAABYRRgBAABWEUYAAIBVhBEAAGAVYQQAAFhFGAEAAFYRRgAAgFWEEQAAYBVhBAAAWEUYAQAAVhFGAACAVYQRAABgFWEEAABYRRgBAABWEUYAAIBVhBEAAGAVYQQAAFhFGAEAAFYRRgAAgFWEEQAAYBVhBAAAWEUYAQAAVhFGAACAVYQRAABgFWEEAABYRRgBAABWEUYAAIBVhBEAAGAVYQQAAFhFGAEAAFYRRgAAgFWEEQAAYFW67QIAINUFgkbtvcMaGB1TzgKXSrzZcqY5bJcFxA1hBAAsaun2qa6pRz7/2GSbx+1SbUW+1hZ4LFYGxA/LNABgSUu3T5v2HA0JIpLU7x/Tpj1H1dLts1QZEF+EEQCwIBA0qmvqkYnw2kRbXVOPAsFIPYDkQhgBAAvae4fDZkQ+y0jy+cfU3jscv6IASwgjAGDBwOjUQSSWfsBcRhgBAAtyFrhmtB8wlxFGAMCCEm+2PG6XptrA69DZXTUl3ux4lgVYQRgBAAucaQ7VVuRLUlggmXheW5HPeSNICYQRALBkbYFHDRsKlecOXYrJc7vUsKGQc0aQMjj0DAAsWlvg0Zr8PE5gRUojjACAZc40h0pXLLRdBmANyzQAAMAqwggAALCKMAIAAKwijAAAAKsIIwAAwCrCCAAAsCqmMLJz5055vV65XC4VFRWpra3tnP3379+voqIiuVwuXXLJJXrqqadiKhYAACSfqMNIY2OjtmzZou3bt6uzs1OrV6/WunXr1NfXF7F/b2+vbrzxRq1evVqdnZ362c9+ps2bN+uPf/zjeRcPAADmPocxxkRzwapVq1RYWKiGhobJtpUrV2r9+vWqr68P6//Tn/5Ur7/+uo4dOzbZVlVVpX/+8586fPjwtN5zZGREbrdbfr9fWVlZ0ZQLAAAsme73d1QnsJ4+fVodHR3atm1bSHt5ebkOHToU8ZrDhw+rvLw8pO2GG27Qrl279Omnn2revHlh14yPj2t8fHzyud/vl3T2QwEAgLlh4nv7i+Y9ogojg4ODCgQCys3NDWnPzc1Vf39/xGv6+/sj9j9z5owGBwfl8YTfCKq+vl51dXVh7cuWLYumXAAAkABGR0fldrunfD2me9M4HKE3cDLGhLV9Uf9I7RNqampUXV09+TwYDOr999/X1VdfrRMnTrBUkyBGRka0bNkyxiSBMCaJhfFIPIxJfBljNDo6qsWLF5+zX1RhZNGiRXI6nWGzIAMDA2GzHxPy8vIi9k9PT9fChZFvDJWZmanMzMyQtrS0s7+1zcrK4j+gBMOYJB7GJLEwHomHMYmfc82ITIhqN01GRoaKiorU2toa0t7a2qqysrKI15SWlob137dvn4qLiyP+XgQAAKSWqLf2VldX69lnn9Xu3bt17Ngxbd26VX19faqqqpJ0doll48aNk/2rqqr0/vvvq7q6WseOHdPu3bu1a9cuPfDAAzP3KQAAwJwV9W9GKisrNTQ0pB07dsjn86mgoEDNzc1avny5JMnn84WcOeL1etXc3KytW7fqySef1OLFi/XEE0/olltuiep9MzMzVVtbG7Z8A3sYk8TDmCQWxiPxMCaJKepzRgAAAGYS96YBAABWEUYAAIBVhBEAAGAVYQQAAFiVUGFk586d8nq9crlcKioqUltb2zn779+/X0VFRXK5XLrkkkv01FNPxanS1BHNmLz66qtas2aNLr74YmVlZam0tFRvvPFGHKtNftH+G5lw8OBBpaen6+qrr57dAlNQtGMyPj6u7du3a/ny5crMzNSKFSu0e/fuOFWbGqIdk7179+qqq67ShRdeKI/Ho7vvvltDQ0NxqhaSJJMg/vCHP5h58+aZZ555xvT09Jj777/fzJ8/37z//vsR+x8/ftxceOGF5v777zc9PT3mmWeeMfPmzTOvvPJKnCtPXtGOyf33328effRR097ebt555x1TU1Nj5s2bZ44ePRrnypNTtOMx4b///a+55JJLTHl5ubnqqqviU2yKiGVMbr75ZrNq1SrT2tpqent7zd///ndz8ODBOFad3KIdk7a2NpOWlmYef/xxc/z4cdPW1mauvPJKs379+jhXntoSJoyUlJSYqqqqkLYrrrjCbNu2LWL/n/zkJ+aKK64Iafv+979vvv71r89ajakm2jGJJD8/39TV1c10aSkp1vGorKw0P//5z01tbS1hZIZFOyZ//vOfjdvtNkNDQ/EoLyVFOya//OUvzSWXXBLS9sQTT5ilS5fOWo0IlxDLNKdPn1ZHR4fKy8tD2svLy3Xo0KGI1xw+fDis/w033KAjR47o008/nbVaU0UsY/J5wWBQo6Ojys7Ono0SU0qs4/Hcc8/pvffeU21t7WyXmHJiGZPXX39dxcXF+sUvfqElS5bosssu0wMPPKBPPvkkHiUnvVjGpKysTCdPnlRzc7OMMfrPf/6jV155RTfddFM8Ssb/iOmuvTNtcHBQgUAg7GZ7ubm5YTfZm9Df3x+x/5kzZzQ4OCiPxzNr9aaCWMbk837961/ro48+0q233jobJaaUWMbj3Xff1bZt29TW1qb09IT4p55UYhmT48eP68CBA3K5XHrttdc0ODio++67T8PDw/xuZAbEMiZlZWXau3evKisrNTY2pjNnzujmm2/Wb3/723iUjP+REDMjExwOR8hzY0xY2xf1j9SO2EU7JhNeeuklPfzww2psbFROTs5slZdypjsegUBAt99+u+rq6nTZZZfFq7yUFM2/kWAwKIfDob1796qkpEQ33nijHnvsMT3//PPMjsygaMakp6dHmzdv1kMPPaSOjg61tLSot7d38n5riI+E+N+lRYsWyel0hiXXgYGBsIQ7IS8vL2L/9PR0LVy4cNZqTRWxjMmExsZG3XPPPXr55Zd1/fXXz2aZKSPa8RgdHdWRI0fU2dmpH/7wh5LOfhEaY5Senq59+/bpuuuui0vtySqWfyMej0dLliwJuaX6ypUrZYzRyZMndemll85qzckuljGpr6/XNddcowcffFCS9NWvflXz58/X6tWr9cgjjzDLHicJMTOSkZGhoqIitba2hrS3traqrKws4jWlpaVh/fft26fi4mLNmzdv1mpNFbGMiXR2RuSuu+7Siy++yJrrDIp2PLKysvTWW2+pq6tr8lFVVaXLL79cXV1dWrVqVbxKT1qx/Bu55pprdOrUKX344YeTbe+8847S0tK0dOnSWa03FcQyJh9//LHS0kK/Cp1Op6T/nW1HHNj65eznTWzH2rVrl+np6TFbtmwx8+fPN//+97+NMcZs27bN3HHHHZP9J7b2bt261fT09Jhdu3axtXeGRTsmL774oklPTzdPPvmk8fl8k4///ve/tj5CUol2PD6P3TQzL9oxGR0dNUuXLjXf/va3zdtvv232799vLr30UnPvvffa+ghJJ9oxee6550x6errZuXOnee+998yBAwdMcXGxKSkpsfURUlLChBFjjHnyySfN8uXLTUZGhiksLDT79++ffO3OO+801157bUj/v/71r+ZrX/uaycjIMF/5yldMQ0NDnCtOftGMybXXXmskhT3uvPPO+BeepKL9N/JZhJHZEe2YHDt2zFx//fXmggsuMEuXLjXV1dXm448/jnPVyS3aMXniiSdMfn6+ueCCC4zH4zHf+c53zMmTJ+NcdWpzGMM8FAAAsCchfjMCAABSF2EEAABYRRgBAABWEUYAAIBVhBEAAGAVYQQAAFhFGAEAAFYRRgAAgFWEEQAAYBVhBAAAWEUYAQAAVhFGAACAVf8fBF+wjk5nx9AAAAAASUVORK5CYII=", "text/plain": [ "
" ] @@ -1581,7 +1581,7 @@ "\n" ], "text/plain": [ - "" + "" ] }, "execution_count": 41, @@ -1618,7 +1618,7 @@ { "data": { "application/vnd.jupyter.widget-view+json": { - "model_id": "5bd9e89a0aa74920939a5c6d01de2db7", + "model_id": "4fef19c0ced14384945efb5eaa3a64a5", "version_major": 2, "version_minor": 0 }, @@ -1645,7 +1645,7 @@ { "data": { "text/plain": [ - "" + "" ] }, "execution_count": 42, @@ -1905,7 +1905,7 @@ "\n" ], "text/plain": [ - "" + "" ] }, "execution_count": 43, @@ -1963,7 +1963,7 @@ "name": "stderr", "output_type": "stream", "text": [ - "/Users/huber/work/pyiron/pyiron_workflow/pyiron_workflow/channels.py:166: UserWarning: The channel run was not connected to ran, andthus could not disconnect from it.\n", + "/Users/huber/work/pyiron/pyiron_workflow/pyiron_workflow/channels.py:168: UserWarning: The channel run was not connected to ran, andthus could not disconnect from it.\n", " warn(\n" ] }, @@ -3094,7 +3094,7 @@ "\n" ], "text/plain": [ - "" + "" ] }, "execution_count": 49, @@ -3159,7 +3159,7 @@ "name": "stderr", "output_type": "stream", "text": [ - "/Users/huber/work/pyiron/pyiron_workflow/pyiron_workflow/channels.py:166: UserWarning: The channel ran was not connected to accumulate_and_run, andthus could not disconnect from it.\n", + "/Users/huber/work/pyiron/pyiron_workflow/pyiron_workflow/channels.py:168: UserWarning: The channel ran was not connected to accumulate_and_run, andthus could not disconnect from it.\n", " warn(\n", "/Users/huber/work/pyiron/pyiron_atomistics/pyiron_atomistics/lammps/base.py:294: UserWarning: No potential set via job.potential - use default potential, 1997--Liu-X-Y--Al-Mg--LAMMPS--ipr1\n", " warnings.warn(\n" @@ -3216,15 +3216,15 @@ "name": "stderr", "output_type": "stream", "text": [ - "/Users/huber/work/pyiron/pyiron_workflow/pyiron_workflow/channels.py:166: UserWarning: The channel job was not connected to job, andthus could not disconnect from it.\n", + "/Users/huber/work/pyiron/pyiron_workflow/pyiron_workflow/channels.py:168: UserWarning: The channel job was not connected to job, andthus could not disconnect from it.\n", " warn(\n", - "/Users/huber/work/pyiron/pyiron_workflow/pyiron_workflow/channels.py:166: UserWarning: The channel accumulate_and_run was not connected to ran, andthus could not disconnect from it.\n", + "/Users/huber/work/pyiron/pyiron_workflow/pyiron_workflow/channels.py:168: UserWarning: The channel accumulate_and_run was not connected to ran, andthus could not disconnect from it.\n", " warn(\n", - "/Users/huber/work/pyiron/pyiron_workflow/pyiron_workflow/channels.py:166: UserWarning: The channel element was not connected to user_input, andthus could not disconnect from it.\n", + "/Users/huber/work/pyiron/pyiron_workflow/pyiron_workflow/channels.py:168: UserWarning: The channel element was not connected to user_input, andthus could not disconnect from it.\n", " warn(\n", - "/Users/huber/work/pyiron/pyiron_workflow/pyiron_workflow/channels.py:166: UserWarning: The channel structure was not connected to obj, andthus could not disconnect from it.\n", + "/Users/huber/work/pyiron/pyiron_workflow/pyiron_workflow/channels.py:168: UserWarning: The channel structure was not connected to obj, andthus could not disconnect from it.\n", " warn(\n", - "/Users/huber/work/pyiron/pyiron_workflow/pyiron_workflow/channels.py:166: UserWarning: The channel energy was not connected to obj, andthus could not disconnect from it.\n", + "/Users/huber/work/pyiron/pyiron_workflow/pyiron_workflow/channels.py:168: UserWarning: The channel energy was not connected to obj, andthus could not disconnect from it.\n", " warn(\n" ] } @@ -3254,7 +3254,7 @@ "name": "stderr", "output_type": "stream", "text": [ - "/Users/huber/work/pyiron/pyiron_workflow/pyiron_workflow/channels.py:166: UserWarning: The channel ran was not connected to accumulate_and_run, andthus could not disconnect from it.\n", + "/Users/huber/work/pyiron/pyiron_workflow/pyiron_workflow/channels.py:168: UserWarning: The channel ran was not connected to accumulate_and_run, andthus could not disconnect from it.\n", " warn(\n", "/Users/huber/work/pyiron/pyiron_atomistics/pyiron_atomistics/lammps/base.py:294: UserWarning: No potential set via job.potential - use default potential, 1995--Angelo-J-E--Ni-Al-H--LAMMPS--ipr1\n", " warnings.warn(\n" @@ -3300,7 +3300,7 @@ "name": "stderr", "output_type": "stream", "text": [ - "/Users/huber/work/pyiron/pyiron_workflow/pyiron_workflow/channels.py:166: UserWarning: The channel ran was not connected to accumulate_and_run, andthus could not disconnect from it.\n", + "/Users/huber/work/pyiron/pyiron_workflow/pyiron_workflow/channels.py:168: UserWarning: The channel ran was not connected to accumulate_and_run, andthus could not disconnect from it.\n", " warn(\n", "/Users/huber/work/pyiron/pyiron_atomistics/pyiron_atomistics/lammps/base.py:294: UserWarning: No potential set via job.potential - use default potential, 1995--Angelo-J-E--Ni-Al-H--LAMMPS--ipr1\n", " warnings.warn(\n" @@ -3377,7 +3377,7 @@ "output_type": "stream", "text": [ "None 1\n", - " \n" + " NOT_DATA\n" ] } ], @@ -3459,7 +3459,7 @@ "output_type": "stream", "text": [ "None 1\n", - " \n", + " NOT_DATA\n", "Finally 5\n", "b (Add) output single-value: 6\n" ] @@ -3521,7 +3521,7 @@ "name": "stdout", "output_type": "stream", "text": [ - "6.015147686994169\n" + "6.011735447998944\n" ] } ], @@ -3553,7 +3553,7 @@ "name": "stdout", "output_type": "stream", "text": [ - "2.5117498940089718\n" + "2.7731033529998967\n" ] } ], @@ -3799,9 +3799,9 @@ "name": "stderr", "output_type": "stream", "text": [ - "/Users/huber/work/pyiron/pyiron_workflow/pyiron_workflow/channels.py:166: UserWarning: The channel run was not connected to true, andthus could not disconnect from it.\n", + "/Users/huber/work/pyiron/pyiron_workflow/pyiron_workflow/channels.py:168: UserWarning: The channel run was not connected to true, andthus could not disconnect from it.\n", " warn(\n", - "/Users/huber/work/pyiron/pyiron_workflow/pyiron_workflow/channels.py:166: UserWarning: The channel run was not connected to ran, andthus could not disconnect from it.\n", + "/Users/huber/work/pyiron/pyiron_workflow/pyiron_workflow/channels.py:168: UserWarning: The channel run was not connected to ran, andthus could not disconnect from it.\n", " warn(\n" ] } @@ -3882,14 +3882,9 @@ "name": "stdout", "output_type": "stream", "text": [ - "0.696 > 0.2\n", - "0.855 > 0.2\n", - "0.355 > 0.2\n", - "0.690 > 0.2\n", - "0.439 > 0.2\n", - "0.527 > 0.2\n", - "0.172 <= 0.2\n", - "Finally 0.172\n" + "0.785 > 0.2\n", + "0.073 <= 0.2\n", + "Finally 0.073\n" ] } ], From 2bbc61d760af9644d8341186d038fce3ccbb6025 Mon Sep 17 00:00:00 2001 From: liamhuber Date: Thu, 25 Jan 2024 13:55:06 -0800 Subject: [PATCH 083/166] Parent decorated nodes to the module their function came from Instead of `abc` --- pyiron_workflow/function.py | 1 + pyiron_workflow/macro.py | 1 + 2 files changed, 2 insertions(+) diff --git a/pyiron_workflow/function.py b/pyiron_workflow/function.py index 3d3dd58b..eb2a1914 100644 --- a/pyiron_workflow/function.py +++ b/pyiron_workflow/function.py @@ -764,6 +764,7 @@ def as_node(node_function: callable): ), "node_function": staticmethod(node_function), "_type_hints": get_type_hints(node_function), + "__module__": node_function.__module__, }, ) diff --git a/pyiron_workflow/macro.py b/pyiron_workflow/macro.py index c83a605d..e4f143b8 100644 --- a/pyiron_workflow/macro.py +++ b/pyiron_workflow/macro.py @@ -625,6 +625,7 @@ def as_node(graph_creator: callable[[Macro, ...], Optional[tuple[HasChannel]]]): **node_class_kwargs, ), "graph_creator": staticmethod(graph_creator), + "__module__": graph_creator.__module__, }, ) From ced66f7c1e595a87e2832bad9d412a240af608d2 Mon Sep 17 00:00:00 2001 From: liamhuber Date: Thu, 25 Jan 2024 16:48:35 -0800 Subject: [PATCH 084/166] Use h5io as the default storage routine --- pyiron_workflow/composite.py | 13 +++++ pyiron_workflow/function.py | 3 +- pyiron_workflow/macro.py | 1 + pyiron_workflow/node.py | 95 +++++++++++++++++++++++++++++------- pyiron_workflow/workflow.py | 9 ++-- tests/unit/test_node.py | 3 +- tests/unit/test_workflow.py | 15 +++--- 7 files changed, 108 insertions(+), 31 deletions(-) diff --git a/pyiron_workflow/composite.py b/pyiron_workflow/composite.py index 3a66ad1c..27313bc4 100644 --- a/pyiron_workflow/composite.py +++ b/pyiron_workflow/composite.py @@ -639,12 +639,25 @@ def __getstate__(self): state = super().__getstate__() state["_child_data_connections"] = self._child_data_connections state["_child_signal_connections"] = self._child_signal_connections + # Bidict implements a custom reconstructor that is not playing well with h5io + state["_inputs_map"] = ( + None if self._inputs_map is None else dict(self._inputs_map) + ) + state["_outputs_map"] = ( + None if self._outputs_map is None else dict(self._outputs_map) + ) return state def __setstate__(self, state): # Purge child connection info from the state child_data_connections = state.pop("_child_data_connections") child_signal_connections = state.pop("_child_signal_connections") + state["_inputs_map"] = ( + None if state["_inputs_map"] is None else bidict(state["_inputs_map"]) + ) + state["_outputs_map"] = ( + None if state["_outputs_map"] is None else bidict(state["_outputs_map"]) + ) super().__setstate__(state) diff --git a/pyiron_workflow/function.py b/pyiron_workflow/function.py index eb2a1914..2d3e525c 100644 --- a/pyiron_workflow/function.py +++ b/pyiron_workflow/function.py @@ -3,7 +3,7 @@ import inspect import warnings from functools import partialmethod -from typing import Any, get_args, get_type_hints, Optional, TYPE_CHECKING +from typing import Any, get_args, get_type_hints, Literal, Optional, TYPE_CHECKING from pyiron_workflow.channels import InputData, OutputData, NOT_DATA from pyiron_workflow.has_channel import HasChannel @@ -333,6 +333,7 @@ def __init__( parent: Optional[Composite] = None, overwrite_save: bool = False, run_after_init: bool = False, + storage_mode: Literal["h5io", "tinybase"] = "h5io", save_after_run: bool = False, output_labels: Optional[str | list[str] | tuple[str]] = None, **kwargs, diff --git a/pyiron_workflow/macro.py b/pyiron_workflow/macro.py index e4f143b8..f9923e96 100644 --- a/pyiron_workflow/macro.py +++ b/pyiron_workflow/macro.py @@ -270,6 +270,7 @@ def __init__( parent: Optional[Composite] = None, overwrite_save: bool = False, run_after_init: bool = False, + storage_mode: Literal["h5io", "tinybase"] = "h5io", save_after_run: bool = False, strict_naming: bool = True, inputs_map: Optional[dict | bidict] = None, diff --git a/pyiron_workflow/node.py b/pyiron_workflow/node.py index 521ea7a7..aec97c73 100644 --- a/pyiron_workflow/node.py +++ b/pyiron_workflow/node.py @@ -13,6 +13,8 @@ import os from typing import Any, Literal, Optional, TYPE_CHECKING +import h5io + from pyiron_workflow.channels import ( InputSignal, AccumulatingInputSignal, @@ -267,7 +269,8 @@ class Node(HasToDict, ABC, metaclass=AbstractHasPost): package_identifier = None _semantic_delimiter = "/" - _STORAGE_FILE_NAME = "project.h5" + _TINYBASE_STORAGE_FILE_NAME = "project.h5" + _H5IO_STORAGE_FILE_NAME = "h5io.h5" # This isn't nice, just a technical necessity in the current implementation # Eventually, of course, this needs to be _at least_ file-format independent @@ -278,6 +281,7 @@ def __init__( parent: Optional[Composite] = None, overwrite_save: bool = False, run_after_init: bool = False, + storage_mode: Literal["h5io", "tinybase"] = "h5io", save_after_run: bool = False, **kwargs, ): @@ -314,6 +318,7 @@ def __post__( *args, overwrite_save: bool = False, run_after_init: bool = False, + storage_mode: Literal["h5io", "tinybase"] = "h5io", **kwargs, ): if overwrite_save: @@ -334,7 +339,7 @@ def __post__( f"load it...(To delete the saved file instead, use " f"`overwrite_save=True`)" ) - self.load() + self.load(mode=storage_mode) elif run_after_init: try: self.run() @@ -1165,21 +1170,37 @@ def from_storage(self, storage): usual. """ - def save(self): + def save(self, mode: Literal["h5io", "tinybase"] = "h5io"): """ Writes the node to file (using HDF5) such that a new node instance of the same type can :meth:`load()` the data to return to the same state as the save point, i.e. the same data IO channel values, the same flags, etc. """ if self.parent is None: - self.to_storage(self.storage) + self._save(mode=mode) else: root = self.graph_root - root.to_storage(root.storage) + root._save(mode=mode) save.__doc__ += _save_load_warnings - def load(self): + def _save(self, mode: Literal["h5io", "tinybase"] = "h5io"): + if mode == "h5io": + h5io.write_hdf5( + fname=self._h5io_storage_file_path, + data=self, + title=self.label, + use_state=True, + overwrite=True, # Don't worry about efficiency or updating yet + ) + elif mode == "tinybase": + self.to_storage(self.storage) + else: + raise ValueError( + f"Mode {mode} not recognized, please use 'h5io' or 'tinybase'." + ) + + def load(self, mode: Literal["h5io", "tinybase"] = "h5io"): """ Loads the node file (from HDF5) such that this node restores its state at time of loading. @@ -1187,19 +1208,40 @@ def load(self): Raises: TypeError) when the saved node has a different class name. """ - if self.storage["class_name"] != self.__class__.__name__: - raise TypeError( - f"{self.label} cannot load, as it has type {self.__class__.__name__}, " - f"but the saved node has type {self.storage['class_name']}" + if mode == "h5io": + inst = h5io.read_hdf5(fname=self._h5io_storage_file_path, title=self.label) + self.__setstate__(inst.__getstate__()) + elif mode == "tinybase": + if self.storage["class_name"] != self.__class__.__name__: + raise TypeError( + f"{self.label} cannot load, as it has type " + f"{self.__class__.__name__}, but the saved node has type " + f"{self.storage['class_name']}" + ) + self.from_storage(self.storage) + else: + raise ValueError( + f"Mode {mode} not recognized, please use 'h5io' or 'tinybase'." ) - self.from_storage(self.storage) save.__doc__ += _save_load_warnings @property - def _storage_file_path(self) -> str: + def _tinybase_storage_file_path(self) -> str: return str( - (self.graph_root.working_directory.path / self._STORAGE_FILE_NAME).resolve() + ( + self.graph_root.working_directory.path / + self._TINYBASE_STORAGE_FILE_NAME + ).resolve() + ) + + @property + def _h5io_storage_file_path(self) -> str: + return str( + ( + self.graph_root.working_directory.path / + self._H5IO_STORAGE_FILE_NAME + ).resolve() ) @property @@ -1208,14 +1250,14 @@ def storage(self): from h5io_browser import Pointer return H5ioStorage( - Pointer(self._storage_file_path, h5_path=self.graph_path), None + Pointer(self._tinybase_storage_file_path, h5_path=self.graph_path), None ) @property def storage_has_contents(self) -> bool: has_contents = ( - os.path.isfile(self._storage_file_path) - and (len(self.storage.list_groups()) + len(self.storage.list_nodes())) > 0 + self._tinybase_storage_is_there + or self._h5io_storage_is_there ) self.tidy_working_directory() return has_contents @@ -1230,10 +1272,27 @@ def tidy_working_directory(self): # Touching the working directory may have created it -- if it's there and # empty just clean it up + @property + def _tinybase_storage_is_there(self) -> bool: + return ( + os.path.isfile(self._tinybase_storage_file_path) + and (len(self.storage.list_groups()) + len(self.storage.list_nodes())) > 0 + ) + + @property + def _h5io_storage_is_there(self) -> bool: + return os.path.isfile(self._h5io_storage_file_path) + def delete_storage(self): - if self.storage_has_contents: + if self._tinybase_storage_is_there: up = self.storage.close() del up[self.label] if self.parent is None: - FileObject(self._STORAGE_FILE_NAME, self.working_directory).delete() + FileObject( + self._TINYBASE_STORAGE_FILE_NAME, self.working_directory + ).delete() + if self._h5io_storage_is_there: + FileObject( + self._H5IO_STORAGE_FILE_NAME, self.working_directory + ).delete() self.tidy_working_directory() diff --git a/pyiron_workflow/workflow.py b/pyiron_workflow/workflow.py index 1ba79d4c..6ff08d95 100644 --- a/pyiron_workflow/workflow.py +++ b/pyiron_workflow/workflow.py @@ -6,7 +6,7 @@ from __future__ import annotations -from typing import Optional, TYPE_CHECKING +from typing import Literal, Optional, TYPE_CHECKING from pyiron_workflow.composite import Composite from pyiron_workflow.io import Inputs, Outputs @@ -191,6 +191,7 @@ def __init__( *nodes: Node, overwrite_save: bool = False, run_after_init: bool = False, + storage_mode: Literal["h5io", "tinybase"] = "h5io", save_after_run: bool = False, strict_naming: bool = True, inputs_map: Optional[dict | bidict] = None, @@ -372,8 +373,8 @@ def _rebuild_execution_graph(self, storage): ) self.starting_nodes = [self.nodes[label] for label in storage["starting_nodes"]] - def save(self): - if any(node.package_identifier is None for node in self): + def save(self, mode: Literal["h5io", "tinybase"] = "h5io"): + if mode == "tinybase" and any(node.package_identifier is None for node in self): raise NotImplementedError( f"{self.__class__.__name__} can currently only save itself to file if " f"_all_ of its child nodes were created via the creator and have an " @@ -383,7 +384,7 @@ def save(self): f"like any other node package. Remember that this new module needs to " f"be in your python path and importable at load time too." ) - self.to_storage(self.storage) + super().save(mode=mode) @property def _owned_io_panels(self) -> list[IO]: diff --git a/tests/unit/test_node.py b/tests/unit/test_node.py index aab4c763..bf51d9ff 100644 --- a/tests/unit/test_node.py +++ b/tests/unit/test_node.py @@ -1,6 +1,6 @@ from concurrent.futures import Future import os - +from typing import Literal import unittest from pyiron_workflow.channels import InputData, OutputData, NOT_DATA @@ -22,6 +22,7 @@ def __init__( label, overwrite_save=False, run_after_init=False, + storage_mode: Literal["h5io", "tinybase"] = "h5io", save_after_run=False, x=None, ): diff --git a/tests/unit/test_workflow.py b/tests/unit/test_workflow.py index 2f47cb34..d7396a7c 100644 --- a/tests/unit/test_workflow.py +++ b/tests/unit/test_workflow.py @@ -332,13 +332,14 @@ def add_three_macro(macro): wf.executor_shutdown() def test_storage(self): - with self.subTest("Fail when nodes have no package"): - wf = Workflow("wf") - wf.n1 = wf.create.Function(plus_one) - with self.assertRaises( - NotImplementedError, msg="We can't handle nodes without a package yet" - ): - wf.save() + # Only do the package check when using tinybase, which isn't available + # with self.subTest("Fail when nodes have no package"): + # wf = Workflow("wf") + # wf.n1 = wf.create.Function(plus_one) + # with self.assertRaises( + # NotImplementedError, msg="We can't handle nodes without a package yet" + # ): + # wf.save() wf = Workflow("wf") wf.register("static.demo_nodes", domain="demo") From b1adbd4b2dfe1013b075b812cd5b4e01ead192cd Mon Sep 17 00:00:00 2001 From: liamhuber Date: Thu, 25 Jan 2024 16:51:01 -0800 Subject: [PATCH 085/166] Move tinybase requirements to optional --- .ci_support/environment-tinybase.yml | 6 ++++++ .ci_support/environment.yml | 3 --- setup.py | 8 +++++--- 3 files changed, 11 insertions(+), 6 deletions(-) create mode 100644 .ci_support/environment-tinybase.yml diff --git a/.ci_support/environment-tinybase.yml b/.ci_support/environment-tinybase.yml new file mode 100644 index 00000000..f77ff0d9 --- /dev/null +++ b/.ci_support/environment-tinybase.yml @@ -0,0 +1,6 @@ +channels: +- conda-forge +dependencies: +- boto3 +- h5io_browser =0.0.6 +- pyiron_contrib =0.1.13 \ No newline at end of file diff --git a/.ci_support/environment.yml b/.ci_support/environment.yml index c34f2156..5491c58f 100644 --- a/.ci_support/environment.yml +++ b/.ci_support/environment.yml @@ -4,12 +4,9 @@ dependencies: - coveralls - coverage - bidict =0.22.1 -- boto3 - cloudpickle =3.0.0 - graphviz =8.1.0 -- h5io_browser =0.0.6 - matplotlib =3.8.2 -- pyiron_contrib =0.1.13 - pympipool =0.7.9 - python-graphviz =0.20.1 - toposort =1.10 diff --git a/setup.py b/setup.py index 3cef4732..d722797f 100644 --- a/setup.py +++ b/setup.py @@ -29,12 +29,9 @@ packages=find_packages(exclude=["*tests*", "*docs*", "*binder*", "*conda*", "*notebooks*", "*.ci_support*"]), install_requires=[ 'bidict==0.22.1', - 'boto3', # Just because pyiron_contrib is not making sure it's there 'cloudpickle==3.0.0', 'graphviz==0.20.1', - 'h5io_browser==0.0.6', 'matplotlib==3.8.2', - 'pyiron_contrib==0.1.13', 'pympipool==0.7.9', 'toposort==1.10', 'typeguard==4.1.5', @@ -47,6 +44,11 @@ 'phonopy==2.21.0', 'pyiron_atomistics==0.4.7', ], + "tinybase": [ + 'boto3', # Just because pyiron_contrib is not making sure it's there + 'h5io_browser==0.0.6', + 'pyiron_contrib==0.1.13', + ] }, cmdclass=versioneer.get_cmdclass(), From e396668a6014c97b95d183c3fe2de04aafa267c6 Mon Sep 17 00:00:00 2001 From: liamhuber Date: Thu, 25 Jan 2024 16:51:52 -0800 Subject: [PATCH 086/166] Depend on a version of h5io that doesn't exist yet --- .ci_support/environment.yml | 1 + setup.py | 1 + 2 files changed, 2 insertions(+) diff --git a/.ci_support/environment.yml b/.ci_support/environment.yml index 5491c58f..f51dd566 100644 --- a/.ci_support/environment.yml +++ b/.ci_support/environment.yml @@ -6,6 +6,7 @@ dependencies: - bidict =0.22.1 - cloudpickle =3.0.0 - graphviz =8.1.0 +- h5io =0.2.2 - matplotlib =3.8.2 - pympipool =0.7.9 - python-graphviz =0.20.1 diff --git a/setup.py b/setup.py index d722797f..dc58de45 100644 --- a/setup.py +++ b/setup.py @@ -31,6 +31,7 @@ 'bidict==0.22.1', 'cloudpickle==3.0.0', 'graphviz==0.20.1', + 'h5io==0.2.2', 'matplotlib==3.8.2', 'pympipool==0.7.9', 'toposort==1.10', From b4f479f1a3766ea53cf2b417cba7048fe99278f3 Mon Sep 17 00:00:00 2001 From: pyiron-runner Date: Fri, 26 Jan 2024 01:08:40 +0000 Subject: [PATCH 087/166] [dependabot skip] Update env file --- .binder/environment.yml | 4 +--- docs/environment.yml | 4 +--- 2 files changed, 2 insertions(+), 6 deletions(-) diff --git a/.binder/environment.yml b/.binder/environment.yml index c5b5363d..692a91c9 100644 --- a/.binder/environment.yml +++ b/.binder/environment.yml @@ -4,12 +4,10 @@ dependencies: - coveralls - coverage - bidict =0.22.1 -- boto3 - cloudpickle =3.0.0 - graphviz =8.1.0 -- h5io_browser =0.0.6 +- h5io =0.2.2 - matplotlib =3.8.2 -- pyiron_contrib =0.1.13 - pympipool =0.7.9 - python-graphviz =0.20.1 - toposort =1.10 diff --git a/docs/environment.yml b/docs/environment.yml index 5c1fef1e..20a9e197 100644 --- a/docs/environment.yml +++ b/docs/environment.yml @@ -9,12 +9,10 @@ dependencies: - coveralls - coverage - bidict =0.22.1 -- boto3 - cloudpickle =3.0.0 - graphviz =8.1.0 -- h5io_browser =0.0.6 +- h5io =0.2.2 - matplotlib =3.8.2 -- pyiron_contrib =0.1.13 - pympipool =0.7.9 - python-graphviz =0.20.1 - toposort =1.10 From 5ff842470b287fb8edf4f12a60bb324efed98ad4 Mon Sep 17 00:00:00 2001 From: liamhuber Date: Mon, 29 Jan 2024 10:57:58 -0800 Subject: [PATCH 088/166] Test tinybase backend The promises are slightly different, but the saving of aggregate workflows with all children from packages should work the same for both back ends, so test it --- tests/unit/test_workflow.py | 49 +++++++++++++++++++------------------ 1 file changed, 25 insertions(+), 24 deletions(-) diff --git a/tests/unit/test_workflow.py b/tests/unit/test_workflow.py index d7396a7c..46ecc284 100644 --- a/tests/unit/test_workflow.py +++ b/tests/unit/test_workflow.py @@ -340,30 +340,31 @@ def test_storage(self): # NotImplementedError, msg="We can't handle nodes without a package yet" # ): # wf.save() - - wf = Workflow("wf") - wf.register("static.demo_nodes", domain="demo") - wf.inp = wf.create.demo.AddThree(x=0) - wf.out = wf.inp.outputs.add_three + 1 - wf_out = wf() - three_result = wf.inp.three.outputs.add.value - - wf.save() - - reloaded = Workflow("wf") - self.assertEqual( - wf_out.out__add, - reloaded.outputs.out__add.value, - msg="Workflow-level data should get reloaded" - ) - self.assertEqual( - three_result, - reloaded.inp.three.value, - msg="Child data arbitrarily deep should get reloaded" - ) - - # Clean up after ourselves - reloaded.delete_storage() + for storage_mode in ["h5io", "tinybase"]: + with self.subTest(storage_mode): + wf = Workflow("wf") + wf.register("static.demo_nodes", domain="demo") + wf.inp = wf.create.demo.AddThree(x=0) + wf.out = wf.inp.outputs.add_three + 1 + wf_out = wf() + three_result = wf.inp.three.outputs.add.value + + wf.save(mode=storage_mode) + + reloaded = Workflow("wf", storage_mode=storage_mode) + self.assertEqual( + wf_out.out__add, + reloaded.outputs.out__add.value, + msg="Workflow-level data should get reloaded" + ) + self.assertEqual( + three_result, + reloaded.inp.three.value, + msg="Child data arbitrarily deep should get reloaded" + ) + + # Clean up after ourselves + reloaded.delete_storage() if __name__ == '__main__': From 721d44d83fb665bdfb4a213233c6f3af4609a91f Mon Sep 17 00:00:00 2001 From: liamhuber Date: Mon, 29 Jan 2024 10:58:11 -0800 Subject: [PATCH 089/166] :bug: Get the test passing by propagating storage mode flag --- pyiron_workflow/macro.py | 12 +++++++++--- pyiron_workflow/workflow.py | 4 +++- 2 files changed, 12 insertions(+), 4 deletions(-) diff --git a/pyiron_workflow/macro.py b/pyiron_workflow/macro.py index f9923e96..9e582027 100644 --- a/pyiron_workflow/macro.py +++ b/pyiron_workflow/macro.py @@ -306,7 +306,9 @@ def __init__( ) output_labels = self._validate_output_labels(output_labels) - ui_nodes = self._prepopulate_ui_nodes_from_graph_creator_signature() + ui_nodes = self._prepopulate_ui_nodes_from_graph_creator_signature( + storage_mode=storage_mode + ) returned_has_channel_objects = self.graph_creator(self, *ui_nodes) self._configure_graph_execution() @@ -355,7 +357,9 @@ def _validate_output_labels(self, output_labels) -> tuple[str]: ) return () if output_labels is None else tuple(output_labels) - def _prepopulate_ui_nodes_from_graph_creator_signature(self): + def _prepopulate_ui_nodes_from_graph_creator_signature( + self, storage_mode: Literal["h5io", "tinybase"] + ): hints_dict = get_type_hints(self.graph_creator) interface_nodes = () for i, (arg_name, inspected_value) in enumerate( @@ -369,7 +373,9 @@ def _prepopulate_ui_nodes_from_graph_creator_signature(self): if inspected_value.default is inspect.Parameter.empty else inspected_value.default ) - node = self.create.standard.UserInput(default, label=arg_name, parent=self) + node = self.create.standard.UserInput( + default, label=arg_name, parent=self, storage_mode=storage_mode + ) node.inputs.user_input.default = default try: node.inputs.user_input.type_hint = hints_dict[arg_name] diff --git a/pyiron_workflow/workflow.py b/pyiron_workflow/workflow.py index 6ff08d95..6add91ef 100644 --- a/pyiron_workflow/workflow.py +++ b/pyiron_workflow/workflow.py @@ -351,7 +351,9 @@ def _reinstantiate_children(self, storage): child_data = storage[child_label] pid = child_data["package_identifier"] cls = child_data["class_name"] - self.create[pid][cls](label=child_label, parent=self) + self.create[pid][cls]( + label=child_label, parent=self, storage_mode="tinybase" + ) def _rebuild_connections(self, storage): self._rebuild_data_connections(storage) From 0d506702e41e87115908e9c03106e4fc4a26e118 Mon Sep 17 00:00:00 2001 From: pyiron-runner Date: Mon, 29 Jan 2024 19:02:03 +0000 Subject: [PATCH 090/166] Format black --- pyiron_workflow/__init__.py | 1 + pyiron_workflow/macro.py | 2 +- pyiron_workflow/node.py | 22 +++++++------------ .../node_library/pyiron_atomistics.py | 21 +++++++++++------- pyiron_workflow/snippets/testcase.py | 2 -- 5 files changed, 23 insertions(+), 25 deletions(-) diff --git a/pyiron_workflow/__init__.py b/pyiron_workflow/__init__.py index 4bdf2160..41231fb5 100644 --- a/pyiron_workflow/__init__.py +++ b/pyiron_workflow/__init__.py @@ -28,4 +28,5 @@ - Ontological hinting for data channels in order to provide guided workflow design - GUI on top for code-lite/code-free visual scripting """ + from pyiron_workflow.workflow import Workflow diff --git a/pyiron_workflow/macro.py b/pyiron_workflow/macro.py index 9e582027..04e2b898 100644 --- a/pyiron_workflow/macro.py +++ b/pyiron_workflow/macro.py @@ -495,7 +495,7 @@ def _parse_remotely_executed_self(self, other_self): for old_data, io_panel in zip( local_connection_data, - [self.inputs, self.outputs, self.signals.input, self.signals.output] + [self.inputs, self.outputs, self.signals.input, self.signals.output], # Get fresh copies of the IO panels post-update ): for original_channel, label, connections in old_data: diff --git a/pyiron_workflow/node.py b/pyiron_workflow/node.py index aec97c73..bdd513a8 100644 --- a/pyiron_workflow/node.py +++ b/pyiron_workflow/node.py @@ -508,9 +508,9 @@ def run( ) return self._run( - finished_callback=self._finish_run_and_emit_ran - if emit_ran_signal - else self._finish_run, + finished_callback=( + self._finish_run_and_emit_ran if emit_ran_signal else self._finish_run + ), force_local_execution=force_local_execution, ) @@ -1230,8 +1230,8 @@ def load(self, mode: Literal["h5io", "tinybase"] = "h5io"): def _tinybase_storage_file_path(self) -> str: return str( ( - self.graph_root.working_directory.path / - self._TINYBASE_STORAGE_FILE_NAME + self.graph_root.working_directory.path + / self._TINYBASE_STORAGE_FILE_NAME ).resolve() ) @@ -1239,8 +1239,7 @@ def _tinybase_storage_file_path(self) -> str: def _h5io_storage_file_path(self) -> str: return str( ( - self.graph_root.working_directory.path / - self._H5IO_STORAGE_FILE_NAME + self.graph_root.working_directory.path / self._H5IO_STORAGE_FILE_NAME ).resolve() ) @@ -1255,10 +1254,7 @@ def storage(self): @property def storage_has_contents(self) -> bool: - has_contents = ( - self._tinybase_storage_is_there - or self._h5io_storage_is_there - ) + has_contents = self._tinybase_storage_is_there or self._h5io_storage_is_there self.tidy_working_directory() return has_contents @@ -1292,7 +1288,5 @@ def delete_storage(self): self._TINYBASE_STORAGE_FILE_NAME, self.working_directory ).delete() if self._h5io_storage_is_there: - FileObject( - self._H5IO_STORAGE_FILE_NAME, self.working_directory - ).delete() + FileObject(self._H5IO_STORAGE_FILE_NAME, self.working_directory).delete() self.tidy_working_directory() diff --git a/pyiron_workflow/node_library/pyiron_atomistics.py b/pyiron_workflow/node_library/pyiron_atomistics.py index 24669e94..8b660268 100644 --- a/pyiron_workflow/node_library/pyiron_atomistics.py +++ b/pyiron_workflow/node_library/pyiron_atomistics.py @@ -1,6 +1,7 @@ """ Nodes wrapping a subset of pyiron_atomistics functionality """ + from __future__ import annotations from typing import Literal, Optional @@ -126,10 +127,12 @@ def CalcMd( n_ionic_steps: int = 1000, n_print: int = 100, temperature: int | float = 300.0, - pressure: float - | tuple[float, float, float] - | tuple[float, float, float, float, float, float] - | None = None, + pressure: ( + float + | tuple[float, float, float] + | tuple[float, float, float, float, float, float] + | None + ) = None, ): def calc_md(job, n_ionic_steps, n_print, temperature, pressure): job.calc_md( @@ -169,10 +172,12 @@ def CalcMin( job: AtomisticGenericJob, n_ionic_steps: int = 1000, n_print: int = 100, - pressure: float - | tuple[float, float, float] - | tuple[float, float, float, float, float, float] - | None = None, + pressure: ( + float + | tuple[float, float, float] + | tuple[float, float, float, float, float, float] + | None + ) = None, ): def calc_min(job, n_ionic_steps, n_print, pressure): job.calc_minimize( diff --git a/pyiron_workflow/snippets/testcase.py b/pyiron_workflow/snippets/testcase.py index 5d7c2b2a..5be6b89a 100644 --- a/pyiron_workflow/snippets/testcase.py +++ b/pyiron_workflow/snippets/testcase.py @@ -3,7 +3,6 @@ numpy arrays (if numpy is available). """ - from abc import ABC from contextlib import redirect_stdout import doctest @@ -30,7 +29,6 @@ class PyironTestCase(unittest.TestCase, ABC): - """ Base class for all pyiron unit tets. From 9fffaf84976aef75a0fbeff5ffb9d9b039542fdb Mon Sep 17 00:00:00 2001 From: liamhuber Date: Mon, 29 Jan 2024 12:00:55 -0800 Subject: [PATCH 091/166] Refactor: break out a storage interface Ultimately, I want to lean directly on tinybase for this, but in the intermediate where we have both tinybase _and_ pure h5io back ends, let's pull things out to keep the base node class tidier. --- pyiron_workflow/node.py | 103 ++-------------------------- pyiron_workflow/storage.py | 129 ++++++++++++++++++++++++++++++++++++ tests/unit/test_node.py | 2 +- tests/unit/test_workflow.py | 2 +- 4 files changed, 138 insertions(+), 98 deletions(-) create mode 100644 pyiron_workflow/storage.py diff --git a/pyiron_workflow/node.py b/pyiron_workflow/node.py index bdd513a8..9e9043b5 100644 --- a/pyiron_workflow/node.py +++ b/pyiron_workflow/node.py @@ -10,11 +10,8 @@ import warnings from abc import ABC, abstractmethod from concurrent.futures import Executor as StdLibExecutor, Future -import os from typing import Any, Literal, Optional, TYPE_CHECKING -import h5io - from pyiron_workflow.channels import ( InputSignal, AccumulatingInputSignal, @@ -22,9 +19,10 @@ NOT_DATA, ) from pyiron_workflow.draw import Node as GraphvizNode -from pyiron_workflow.snippets.files import FileObject, DirectoryObject +from pyiron_workflow.snippets.files import DirectoryObject from pyiron_workflow.has_to_dict import HasToDict from pyiron_workflow.io import Signals, IO +from pyiron_workflow.storage import StorageInterface from pyiron_workflow.topology import ( get_nodes_in_data_tree, set_run_connections_according_to_linear_dag, @@ -269,8 +267,6 @@ class Node(HasToDict, ABC, metaclass=AbstractHasPost): package_identifier = None _semantic_delimiter = "/" - _TINYBASE_STORAGE_FILE_NAME = "project.h5" - _H5IO_STORAGE_FILE_NAME = "h5io.h5" # This isn't nice, just a technical necessity in the current implementation # Eventually, of course, this needs to be _at least_ file-format independent @@ -322,10 +318,10 @@ def __post__( **kwargs, ): if overwrite_save: - self.delete_storage() + self.storage.delete() do_load = False else: - do_load = self.storage_has_contents + do_load = self.storage.has_contents if do_load and run_after_init: raise ValueError( @@ -1176,30 +1172,10 @@ def save(self, mode: Literal["h5io", "tinybase"] = "h5io"): type can :meth:`load()` the data to return to the same state as the save point, i.e. the same data IO channel values, the same flags, etc. """ - if self.parent is None: - self._save(mode=mode) - else: - root = self.graph_root - root._save(mode=mode) + self.storage.save(backend=mode) save.__doc__ += _save_load_warnings - def _save(self, mode: Literal["h5io", "tinybase"] = "h5io"): - if mode == "h5io": - h5io.write_hdf5( - fname=self._h5io_storage_file_path, - data=self, - title=self.label, - use_state=True, - overwrite=True, # Don't worry about efficiency or updating yet - ) - elif mode == "tinybase": - self.to_storage(self.storage) - else: - raise ValueError( - f"Mode {mode} not recognized, please use 'h5io' or 'tinybase'." - ) - def load(self, mode: Literal["h5io", "tinybase"] = "h5io"): """ Loads the node file (from HDF5) such that this node restores its state at time @@ -1208,55 +1184,13 @@ def load(self, mode: Literal["h5io", "tinybase"] = "h5io"): Raises: TypeError) when the saved node has a different class name. """ - if mode == "h5io": - inst = h5io.read_hdf5(fname=self._h5io_storage_file_path, title=self.label) - self.__setstate__(inst.__getstate__()) - elif mode == "tinybase": - if self.storage["class_name"] != self.__class__.__name__: - raise TypeError( - f"{self.label} cannot load, as it has type " - f"{self.__class__.__name__}, but the saved node has type " - f"{self.storage['class_name']}" - ) - self.from_storage(self.storage) - else: - raise ValueError( - f"Mode {mode} not recognized, please use 'h5io' or 'tinybase'." - ) + self.storage.load(backend=mode) save.__doc__ += _save_load_warnings - @property - def _tinybase_storage_file_path(self) -> str: - return str( - ( - self.graph_root.working_directory.path - / self._TINYBASE_STORAGE_FILE_NAME - ).resolve() - ) - - @property - def _h5io_storage_file_path(self) -> str: - return str( - ( - self.graph_root.working_directory.path / self._H5IO_STORAGE_FILE_NAME - ).resolve() - ) - @property def storage(self): - from pyiron_contrib.tinybase.storage import H5ioStorage - from h5io_browser import Pointer - - return H5ioStorage( - Pointer(self._tinybase_storage_file_path, h5_path=self.graph_path), None - ) - - @property - def storage_has_contents(self) -> bool: - has_contents = self._tinybase_storage_is_there or self._h5io_storage_is_there - self.tidy_working_directory() - return has_contents + return StorageInterface(self) def tidy_working_directory(self): """ @@ -1267,26 +1201,3 @@ def tidy_working_directory(self): self._working_directory = None # Touching the working directory may have created it -- if it's there and # empty just clean it up - - @property - def _tinybase_storage_is_there(self) -> bool: - return ( - os.path.isfile(self._tinybase_storage_file_path) - and (len(self.storage.list_groups()) + len(self.storage.list_nodes())) > 0 - ) - - @property - def _h5io_storage_is_there(self) -> bool: - return os.path.isfile(self._h5io_storage_file_path) - - def delete_storage(self): - if self._tinybase_storage_is_there: - up = self.storage.close() - del up[self.label] - if self.parent is None: - FileObject( - self._TINYBASE_STORAGE_FILE_NAME, self.working_directory - ).delete() - if self._h5io_storage_is_there: - FileObject(self._H5IO_STORAGE_FILE_NAME, self.working_directory).delete() - self.tidy_working_directory() diff --git a/pyiron_workflow/storage.py b/pyiron_workflow/storage.py new file mode 100644 index 00000000..e5d8dd5e --- /dev/null +++ b/pyiron_workflow/storage.py @@ -0,0 +1,129 @@ +""" +A bit of abstraction to declutter the node class while we support two very different +back ends. +""" + +from __future__ import annotations + +import os +from typing import Literal, TYPE_CHECKING + +import h5io + +from pyiron_workflow.snippets.files import FileObject + +if TYPE_CHECKING: + from pyiron_workflow.node import Node + + +class StorageInterface: + + _TINYBASE_STORAGE_FILE_NAME = "project.h5" + _H5IO_STORAGE_FILE_NAME = "h5io.h5" + + def __init__(self, node: Node): + self.node = node + + def save(self, backend: Literal["h5io", "tinybase"]): + if self.node.parent is None: + self._save(backend=backend) + else: + root = self.node.graph_root + root.storage.save(backend=backend) + + def _save(self, backend: Literal["h5io", "tinybase"]): + if backend == "h5io": + h5io.write_hdf5( + fname=self._h5io_storage_file_path, + data=self.node, + title=self.node.label, + use_state=True, + overwrite=True, # Don't worry about efficiency or updating yet + ) + elif backend == "tinybase": + self.node.to_storage(self._tinybase_storage) + else: + raise ValueError( + f"Backend {backend} not recognized, please use 'h5io' or 'tinybase'." + ) + + def load(self, backend: Literal["h5io", "tinybase"]): + if backend == "h5io": + inst = h5io.read_hdf5( + fname=self._h5io_storage_file_path, + title=self.node.label + ) + self.node.__setstate__(inst.__getstate__()) + elif backend == "tinybase": + tinybase_storage = self._tinybase_storage + if tinybase_storage["class_name"] != self.node.class_name: + raise TypeError( + f"{self.node.label} cannot load, as it has type " + f"{self.node.class_name}, but the saved node has type " + f"{tinybase_storage['class_name']}" + ) + self.node.from_storage(tinybase_storage) + else: + raise ValueError( + f"Backend {backend} not recognized, please use 'h5io' or 'tinybase'." + ) + + @property + def has_contents(self) -> bool: + has_contents = self._tinybase_storage_is_there or self._h5io_storage_is_there + self.node.tidy_working_directory() + return has_contents + + def delete(self): + if self._tinybase_storage_is_there: + up = self._tinybase_storage.close() + del up[self.node.label] + if self.node.parent is None: + FileObject( + self._TINYBASE_STORAGE_FILE_NAME, self.node.working_directory + ).delete() + if self._h5io_storage_is_there: + FileObject( + self._H5IO_STORAGE_FILE_NAME, self.node.working_directory + ).delete() + self.node.tidy_working_directory() + + @property + def _h5io_storage_file_path(self) -> str: + return str( + ( + self.node.graph_root.working_directory.path + / self._H5IO_STORAGE_FILE_NAME + ).resolve() + ) + + @property + def _h5io_storage_is_there(self) -> bool: + return os.path.isfile(self._h5io_storage_file_path) + + @property + def _tinybase_storage_file_path(self) -> str: + return str( + ( + self.node.graph_root.working_directory.path + / self._TINYBASE_STORAGE_FILE_NAME + ).resolve() + ) + + @property + def _tinybase_storage(self): + from pyiron_contrib.tinybase.storage import H5ioStorage + from h5io_browser import Pointer + + return H5ioStorage( + Pointer(self._tinybase_storage_file_path, h5_path=self.node.graph_path), + None + ) + + @property + def _tinybase_storage_is_there(self) -> bool: + storage = self._tinybase_storage + return ( + os.path.isfile(self._tinybase_storage_file_path) + and (len(storage.list_groups()) + len(storage.list_nodes())) > 0 + ) diff --git a/tests/unit/test_node.py b/tests/unit/test_node.py index bf51d9ff..ad5c5a6e 100644 --- a/tests/unit/test_node.py +++ b/tests/unit/test_node.py @@ -433,7 +433,7 @@ def test_save_after_run(self): msg="Should have saved automatically after run, and reloaded on " "instantiation" ) - find_saved.delete_storage() # Clean up + find_saved.storage.delete() # Clean up if __name__ == '__main__': diff --git a/tests/unit/test_workflow.py b/tests/unit/test_workflow.py index 46ecc284..44a1b884 100644 --- a/tests/unit/test_workflow.py +++ b/tests/unit/test_workflow.py @@ -364,7 +364,7 @@ def test_storage(self): ) # Clean up after ourselves - reloaded.delete_storage() + reloaded.storage.delete() if __name__ == '__main__': From d2ec42ebedbc68bddab0c0829ab59359e9604a91 Mon Sep 17 00:00:00 2001 From: liamhuber Date: Mon, 29 Jan 2024 12:07:53 -0800 Subject: [PATCH 092/166] Check for allowable back ends --- pyiron_workflow/storage.py | 19 ++++++++++++++----- 1 file changed, 14 insertions(+), 5 deletions(-) diff --git a/pyiron_workflow/storage.py b/pyiron_workflow/storage.py index e5d8dd5e..bd07673f 100644 --- a/pyiron_workflow/storage.py +++ b/pyiron_workflow/storage.py @@ -15,6 +15,8 @@ if TYPE_CHECKING: from pyiron_workflow.node import Node +ALLOWED_BACKENDS = ["h5io", "tinybase"] + class StorageInterface: @@ -25,6 +27,12 @@ def __init__(self, node: Node): self.node = node def save(self, backend: Literal["h5io", "tinybase"]): + if backend not in ALLOWED_BACKENDS: + raise ValueError( + f"Backend {backend} not recognized, please use one of " + f"{ALLOWED_BACKENDS}." + ) + if self.node.parent is None: self._save(backend=backend) else: @@ -48,7 +56,12 @@ def _save(self, backend: Literal["h5io", "tinybase"]): ) def load(self, backend: Literal["h5io", "tinybase"]): - if backend == "h5io": + if backend not in ALLOWED_BACKENDS: + raise ValueError( + f"Backend {backend} not recognized, please use one of " + f"{ALLOWED_BACKENDS}." + ) + elif backend == "h5io": inst = h5io.read_hdf5( fname=self._h5io_storage_file_path, title=self.node.label @@ -63,10 +76,6 @@ def load(self, backend: Literal["h5io", "tinybase"]): f"{tinybase_storage['class_name']}" ) self.node.from_storage(tinybase_storage) - else: - raise ValueError( - f"Backend {backend} not recognized, please use 'h5io' or 'tinybase'." - ) @property def has_contents(self) -> bool: From 262bdb66ee2a8cb23d2e714c746f2db8bf4def54 Mon Sep 17 00:00:00 2001 From: liamhuber Date: Mon, 29 Jan 2024 12:25:17 -0800 Subject: [PATCH 093/166] Refactor: rename Call it the backend instead of the mode throughout, prepending with "storage_" if the context is not 100% clear (e.g. in node inits) --- pyiron_workflow/function.py | 2 +- pyiron_workflow/macro.py | 8 ++++---- pyiron_workflow/node.py | 10 +++++----- pyiron_workflow/workflow.py | 10 +++++----- tests/unit/test_node.py | 2 +- tests/unit/test_workflow.py | 8 ++++---- 6 files changed, 20 insertions(+), 20 deletions(-) diff --git a/pyiron_workflow/function.py b/pyiron_workflow/function.py index 2d3e525c..1ef14a24 100644 --- a/pyiron_workflow/function.py +++ b/pyiron_workflow/function.py @@ -333,7 +333,7 @@ def __init__( parent: Optional[Composite] = None, overwrite_save: bool = False, run_after_init: bool = False, - storage_mode: Literal["h5io", "tinybase"] = "h5io", + storage_backend: Literal["h5io", "tinybase"] = "h5io", save_after_run: bool = False, output_labels: Optional[str | list[str] | tuple[str]] = None, **kwargs, diff --git a/pyiron_workflow/macro.py b/pyiron_workflow/macro.py index 04e2b898..d7a89424 100644 --- a/pyiron_workflow/macro.py +++ b/pyiron_workflow/macro.py @@ -270,7 +270,7 @@ def __init__( parent: Optional[Composite] = None, overwrite_save: bool = False, run_after_init: bool = False, - storage_mode: Literal["h5io", "tinybase"] = "h5io", + storage_backend: Literal["h5io", "tinybase"] = "h5io", save_after_run: bool = False, strict_naming: bool = True, inputs_map: Optional[dict | bidict] = None, @@ -307,7 +307,7 @@ def __init__( output_labels = self._validate_output_labels(output_labels) ui_nodes = self._prepopulate_ui_nodes_from_graph_creator_signature( - storage_mode=storage_mode + storage_backend=storage_backend ) returned_has_channel_objects = self.graph_creator(self, *ui_nodes) self._configure_graph_execution() @@ -358,7 +358,7 @@ def _validate_output_labels(self, output_labels) -> tuple[str]: return () if output_labels is None else tuple(output_labels) def _prepopulate_ui_nodes_from_graph_creator_signature( - self, storage_mode: Literal["h5io", "tinybase"] + self, storage_backend: Literal["h5io", "tinybase"] ): hints_dict = get_type_hints(self.graph_creator) interface_nodes = () @@ -374,7 +374,7 @@ def _prepopulate_ui_nodes_from_graph_creator_signature( else inspected_value.default ) node = self.create.standard.UserInput( - default, label=arg_name, parent=self, storage_mode=storage_mode + default, label=arg_name, parent=self, storage_backend=storage_backend ) node.inputs.user_input.default = default try: diff --git a/pyiron_workflow/node.py b/pyiron_workflow/node.py index 9e9043b5..bb8efb65 100644 --- a/pyiron_workflow/node.py +++ b/pyiron_workflow/node.py @@ -277,7 +277,7 @@ def __init__( parent: Optional[Composite] = None, overwrite_save: bool = False, run_after_init: bool = False, - storage_mode: Literal["h5io", "tinybase"] = "h5io", + storage_backend: Literal["h5io", "tinybase"] = "h5io", save_after_run: bool = False, **kwargs, ): @@ -314,7 +314,7 @@ def __post__( *args, overwrite_save: bool = False, run_after_init: bool = False, - storage_mode: Literal["h5io", "tinybase"] = "h5io", + storage_backend: Literal["h5io", "tinybase"] = "h5io", **kwargs, ): if overwrite_save: @@ -335,7 +335,7 @@ def __post__( f"load it...(To delete the saved file instead, use " f"`overwrite_save=True`)" ) - self.load(mode=storage_mode) + self.load(mode=storage_backend) elif run_after_init: try: self.run() @@ -1166,13 +1166,13 @@ def from_storage(self, storage): usual. """ - def save(self, mode: Literal["h5io", "tinybase"] = "h5io"): + def save(self, backend: Literal["h5io", "tinybase"] = "h5io"): """ Writes the node to file (using HDF5) such that a new node instance of the same type can :meth:`load()` the data to return to the same state as the save point, i.e. the same data IO channel values, the same flags, etc. """ - self.storage.save(backend=mode) + self.storage.save(backend=backend) save.__doc__ += _save_load_warnings diff --git a/pyiron_workflow/workflow.py b/pyiron_workflow/workflow.py index 6add91ef..55014a5a 100644 --- a/pyiron_workflow/workflow.py +++ b/pyiron_workflow/workflow.py @@ -191,7 +191,7 @@ def __init__( *nodes: Node, overwrite_save: bool = False, run_after_init: bool = False, - storage_mode: Literal["h5io", "tinybase"] = "h5io", + storage_backend: Literal["h5io", "tinybase"] = "h5io", save_after_run: bool = False, strict_naming: bool = True, inputs_map: Optional[dict | bidict] = None, @@ -352,7 +352,7 @@ def _reinstantiate_children(self, storage): pid = child_data["package_identifier"] cls = child_data["class_name"] self.create[pid][cls]( - label=child_label, parent=self, storage_mode="tinybase" + label=child_label, parent=self, storage_backend="tinybase" ) def _rebuild_connections(self, storage): @@ -375,8 +375,8 @@ def _rebuild_execution_graph(self, storage): ) self.starting_nodes = [self.nodes[label] for label in storage["starting_nodes"]] - def save(self, mode: Literal["h5io", "tinybase"] = "h5io"): - if mode == "tinybase" and any(node.package_identifier is None for node in self): + def save(self, backend: Literal["h5io", "tinybase"] = "h5io"): + if backend == "tinybase" and any(node.package_identifier is None for node in self): raise NotImplementedError( f"{self.__class__.__name__} can currently only save itself to file if " f"_all_ of its child nodes were created via the creator and have an " @@ -386,7 +386,7 @@ def save(self, mode: Literal["h5io", "tinybase"] = "h5io"): f"like any other node package. Remember that this new module needs to " f"be in your python path and importable at load time too." ) - super().save(mode=mode) + super().save(backend=backend) @property def _owned_io_panels(self) -> list[IO]: diff --git a/tests/unit/test_node.py b/tests/unit/test_node.py index ad5c5a6e..b0827a46 100644 --- a/tests/unit/test_node.py +++ b/tests/unit/test_node.py @@ -22,7 +22,7 @@ def __init__( label, overwrite_save=False, run_after_init=False, - storage_mode: Literal["h5io", "tinybase"] = "h5io", + storage_backend: Literal["h5io", "tinybase"] = "h5io", save_after_run=False, x=None, ): diff --git a/tests/unit/test_workflow.py b/tests/unit/test_workflow.py index 44a1b884..9f697161 100644 --- a/tests/unit/test_workflow.py +++ b/tests/unit/test_workflow.py @@ -340,8 +340,8 @@ def test_storage(self): # NotImplementedError, msg="We can't handle nodes without a package yet" # ): # wf.save() - for storage_mode in ["h5io", "tinybase"]: - with self.subTest(storage_mode): + for storage_backend in ["h5io", "tinybase"]: + with self.subTest(storage_backend): wf = Workflow("wf") wf.register("static.demo_nodes", domain="demo") wf.inp = wf.create.demo.AddThree(x=0) @@ -349,9 +349,9 @@ def test_storage(self): wf_out = wf() three_result = wf.inp.three.outputs.add.value - wf.save(mode=storage_mode) + wf.save(backend=storage_backend) - reloaded = Workflow("wf", storage_mode=storage_mode) + reloaded = Workflow("wf", storage_backend=storage_backend) self.assertEqual( wf_out.out__add, reloaded.outputs.out__add.value, From f80d7abc04040c798a260a45f00f0ede63ef2a72 Mon Sep 17 00:00:00 2001 From: liamhuber Date: Mon, 29 Jan 2024 12:35:23 -0800 Subject: [PATCH 094/166] Refactor to delay contrib import --- pyiron_workflow/storage.py | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/pyiron_workflow/storage.py b/pyiron_workflow/storage.py index bd07673f..e520a63f 100644 --- a/pyiron_workflow/storage.py +++ b/pyiron_workflow/storage.py @@ -131,8 +131,8 @@ def _tinybase_storage(self): @property def _tinybase_storage_is_there(self) -> bool: - storage = self._tinybase_storage - return ( - os.path.isfile(self._tinybase_storage_file_path) - and (len(storage.list_groups()) + len(storage.list_nodes())) > 0 - ) + if os.path.isfile(self._tinybase_storage_file_path): + storage = self._tinybase_storage + return (len(storage.list_groups()) + len(storage.list_nodes())) > 0 + else: + return False From 7aec1575f1941dd2b24be97a0f0a7f3d1f9b4c5f Mon Sep 17 00:00:00 2001 From: liamhuber Date: Mon, 29 Jan 2024 13:10:08 -0800 Subject: [PATCH 095/166] Update docstring --- pyiron_workflow/node.py | 61 +++++++++++++++++++++++++++-------------- 1 file changed, 41 insertions(+), 20 deletions(-) diff --git a/pyiron_workflow/node.py b/pyiron_workflow/node.py index bb8efb65..12fe4f93 100644 --- a/pyiron_workflow/node.py +++ b/pyiron_workflow/node.py @@ -153,23 +153,11 @@ class Node(HasToDict, ABC, metaclass=AbstractHasPost): - [ALPHA FEATURE] Nodes can be saved to and loaded from file. - Saving is triggered manually, or by setting a flag to save after the nodes runs. - - On instantiation, nodes will look in the working directory of their - parent-most node for a save file; they will search within this along their - relative semantic path (i.e. the path of node labels) for stored data; if - found, they will use it to load their state. - - Found save files can be deleted and ignored with an initialization kwarg - - You can't load a saved node _and_ run that node after instantiation during - the same instantiation. - - To save a composite graph, _all_ children need to be created from a - registered module or saving will raise an error; - - [ALPHA ISSUE?] Right now that means moving any nodes defined in-notebook - off to a `.py` file. - - [ALPHA ISSUE] Modifications to macros (e.g. replacing a child node) are not - reflected in the saved data -- saving and loading such a graph is likely to - _silently_ misbehave, as the loaded macro will just reinstantiate its - original nodes and connections. - - [ALPHA ISSUE] If the source code (i.e. `.py` files) for a saved graph is - altered between saving and loading the graph, there are no guarnatees about + - On instantiation, nodes will load automatically if they find saved content. + - Discovered content can instead be deleted with a kwarg. + - You can't load saved content _and_ run after instantiation at once. + - [ALPHA ISSUE] If the source code (cells, `.py` files...) for a saved graph is + altered between saving and loading the graph, there are no guarantees about the loaded state; depending on the nature of the changes everything may work fine with the new node definition, the graph may load but silently behave unexpectedly (e.g. if node functionality has changed but the @@ -180,9 +168,42 @@ class Node(HasToDict, ABC, metaclass=AbstractHasPost): your graph this could be expensive in terms of storage space and/or time. - [ALPHA ISSUE] Similarly, there is no way to save only part of a graph; only the entire graph may be saved at once. - - Since nodes store their IO data, all data is expected to be serializable; as - a fallback, the save process will attempt to `pickle` the data. - - While loading is attempted at instantiation, saving only happens on request. + - There are two possible back-ends for saving: one leaning on + `tinybase.storage.GenericStorage` (in practice, + `H5ioStorage(GenericStorage)`), and the other, default back-end that uses + the `h5io` module directly. + - [ALPHA ISSUE] Restrictions on data: + - For the `h5io` backend: Most data that can be pickled will be fine, but + some classes will hit an edge case and throw an exception from `h5io` + (e.g. the `Calculator` class and its children from `ase`). + - For the `tinybase` backend: Any data that can be pickled will be fine, + although it might get stored in a pickled state, which is not ideal for + long-term storage or sharing. + - [ALPHA ISSUE] Restrictions on composites: + - For the `h5io` backend: all child nodes must be defined in an importable + location. This includes `__main__` in a jupyter notebook (as long as + the same `__main__` cells get executed prior to trying to load!) but + not, e.g., inside functions in `__main__`. + - For the `tinybase` backend: all child nodes must have been created via + the creator (i.e. `wf.create...`), which is to say they come from a + registered node package. The composite will run a check and fail early + in the save process if this is not the case. Fulfilling this + requirement is as simple as moving all the desired nodes off to a `.py` + file, registering it, and building the composite from there. + - [ALPHA ISSUE] Restrictions to macros: + - For the `h5io` backend: there are none; if a macro is modified, saved, + and reloaded, the modifications will be reflected in the loaded state. + Note there is a little bit of danger here, as the macro class still + corresponds to the un-modified macro class. + - For the `tinybase` backend: the macro will re-instantiate its original + nodes and try to update their data. Any modifications to the macro + prior to saving are completely disregarded; if the interface to the + macro was modified (e.g. different channel names in the IO), then this + will save fine but throw an exception on load; if the interface was + unchanged but the functionality changed (e.g. replacing a child node), + the original, unmodified macro will cleanly load and the loaded data + will _silently_ mis-represent the macro functionality (insofaras the + internal changes would cause a difference in the output data). This is an abstract class. Children *must* define how :attr:`inputs` and :attr:`outputs` are constructed, what will From b030571f17bdb20c39f4af460d809dc27f993259 Mon Sep 17 00:00:00 2001 From: liamhuber Date: Mon, 29 Jan 2024 13:19:49 -0800 Subject: [PATCH 096/166] h5io storage file should always be in the working directory Saving and loading is done all at once by the parent; new child instances do not look for their own saved data. --- pyiron_workflow/storage.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pyiron_workflow/storage.py b/pyiron_workflow/storage.py index e520a63f..21cc4805 100644 --- a/pyiron_workflow/storage.py +++ b/pyiron_workflow/storage.py @@ -101,7 +101,7 @@ def delete(self): def _h5io_storage_file_path(self) -> str: return str( ( - self.node.graph_root.working_directory.path + self.node.working_directory.path / self._H5IO_STORAGE_FILE_NAME ).resolve() ) From c1ca1d68ef9f2145ed11e78a60855e9124b7e862 Mon Sep 17 00:00:00 2001 From: liamhuber Date: Mon, 29 Jan 2024 13:28:26 -0800 Subject: [PATCH 097/166] Add tests for modified macro storage --- tests/unit/test_macro.py | 54 ++++++++++++++++++++++++++++++++++++++++ 1 file changed, 54 insertions(+) diff --git a/tests/unit/test_macro.py b/tests/unit/test_macro.py index 054c3e62..9315dad4 100644 --- a/tests/unit/test_macro.py +++ b/tests/unit/test_macro.py @@ -23,6 +23,24 @@ def add_three_macro(macro): # although these are more thoroughly tested in Workflow tests +@Macro.wrap_as.single_value_node("result") +def AddOne(x): + return x + 1 + + +@Macro.wrap_as.macro_node("result") +def AddThreeMacro(macro, x=0): + macro.one = AddOne(x) + macro.two = AddOne(macro.one) + macro.three = AddOne(macro.two) + return macro.three + + +@Macro.wrap_as.single_value_node("result") +def AddTwo(x): + return x + 2 + + class TestMacro(unittest.TestCase): def test_static_input(self): @@ -517,6 +535,42 @@ def LikeAFunction(macro, lin: list, n: int = 2): self.assertListEqual(override_io_maps.inputs.labels, ["my_lin"]) self.assertDictEqual(override_io_maps(), {"the_input_list": [1, 2, 3, 4]}) + def test_storage_for_modified_macros(self): + + macro = AddThreeMacro(label="m") + macro.replace_node(macro.two, AddTwo()) + macro.remove_node(macro.three) + macro.five = AddOne(macro.two) + macro.two >> macro.five + macro._rebuild_data_io() # Need this because of the explicitly created node + # Note that it destroys our output labeling, since the new output never existed + modified_result = macro(x=1) + + try: + macro.save() + reloaded = AddThreeMacro(label="m") + self.assertDictEqual( + macro.outputs.to_value_dict(), + reloaded.outputs.to_value_dict(), + msg="Updated IO should have been (de)serialized" + ) + self.assertSetEqual( + set(macro.nodes.keys()), + set(reloaded.nodes.keys()), + msg="All nodes, including the new one, should have been (de)serialized." + ) + self.assertEqual( + AddThreeMacro.__name__, + reloaded.class_name, + msg=f"LOOK OUT! This all (de)serialized nicely, but what we loaded is " + f"_falsely_ claiming to be an {AddThreeMacro.__name__}. This is " + f"not any sort of technical error -- what other class name would " + f"we load? -- but is a deeper problem with saving modified objects " + f"that we need ot figure out some better solution for later." + ) + finally: + macro.storage.delete() + if __name__ == '__main__': unittest.main() From 0477cf7f0f9ec8d0c1200dd908eba6d8e48eccc2 Mon Sep 17 00:00:00 2001 From: liamhuber Date: Mon, 29 Jan 2024 13:52:33 -0800 Subject: [PATCH 098/166] Test modified macro storage for the backends separately Required moving the macro nodes over to a node package --- tests/static/demo_nodes.py | 8 ++- tests/unit/test_macro.py | 122 +++++++++++++++++++++---------------- 2 files changed, 77 insertions(+), 53 deletions(-) diff --git a/tests/static/demo_nodes.py b/tests/static/demo_nodes.py index 4a4d7b88..ae6c4d91 100644 --- a/tests/static/demo_nodes.py +++ b/tests/static/demo_nodes.py @@ -21,4 +21,10 @@ def AddThree(macro, x: int) -> int: return macro.three -nodes = [OptionallyAdd, AddThree] +@Workflow.wrap_as.single_value_node("add") +def AddPlusOne(obj, other): + """The same IO labels as `standard.Add`, but with type hints and a boost.""" + return obj + other + 1 + + +nodes = [OptionallyAdd, AddThree, AddPlusOne] diff --git a/tests/unit/test_macro.py b/tests/unit/test_macro.py index 9315dad4..ea6b9a81 100644 --- a/tests/unit/test_macro.py +++ b/tests/unit/test_macro.py @@ -4,6 +4,8 @@ from time import sleep import unittest + +from pyiron_workflow._tests import ensure_tests_in_python_path from pyiron_workflow.channels import NOT_DATA from pyiron_workflow.function import SingleValue from pyiron_workflow.macro import Macro, macro_node @@ -23,24 +25,6 @@ def add_three_macro(macro): # although these are more thoroughly tested in Workflow tests -@Macro.wrap_as.single_value_node("result") -def AddOne(x): - return x + 1 - - -@Macro.wrap_as.macro_node("result") -def AddThreeMacro(macro, x=0): - macro.one = AddOne(x) - macro.two = AddOne(macro.one) - macro.three = AddOne(macro.two) - return macro.three - - -@Macro.wrap_as.single_value_node("result") -def AddTwo(x): - return x + 2 - - class TestMacro(unittest.TestCase): def test_static_input(self): @@ -536,40 +520,74 @@ def LikeAFunction(macro, lin: list, n: int = 2): self.assertDictEqual(override_io_maps(), {"the_input_list": [1, 2, 3, 4]}) def test_storage_for_modified_macros(self): - - macro = AddThreeMacro(label="m") - macro.replace_node(macro.two, AddTwo()) - macro.remove_node(macro.three) - macro.five = AddOne(macro.two) - macro.two >> macro.five - macro._rebuild_data_io() # Need this because of the explicitly created node - # Note that it destroys our output labeling, since the new output never existed - modified_result = macro(x=1) - - try: - macro.save() - reloaded = AddThreeMacro(label="m") - self.assertDictEqual( - macro.outputs.to_value_dict(), - reloaded.outputs.to_value_dict(), - msg="Updated IO should have been (de)serialized" - ) - self.assertSetEqual( - set(macro.nodes.keys()), - set(reloaded.nodes.keys()), - msg="All nodes, including the new one, should have been (de)serialized." - ) - self.assertEqual( - AddThreeMacro.__name__, - reloaded.class_name, - msg=f"LOOK OUT! This all (de)serialized nicely, but what we loaded is " - f"_falsely_ claiming to be an {AddThreeMacro.__name__}. This is " - f"not any sort of technical error -- what other class name would " - f"we load? -- but is a deeper problem with saving modified objects " - f"that we need ot figure out some better solution for later." - ) - finally: - macro.storage.delete() + ensure_tests_in_python_path() + Macro.register("static.demo_nodes", domain="demo") + + for backend in ["h5io", "tinybase"]: + with self.subTest(backend): + try: + macro = Macro.create.demo.AddThree(label="m", x=0) + original_result = macro() + macro.replace_node(macro.two, Macro.create.demo.AddPlusOne()) + + if backend == "h5io": + # Go really wild and actually change the interface to the node + # By replacing one of the terminal nodes + macro.remove_node(macro.three) + macro.five = Macro.create.standard.Add(macro.two, 1) + macro.two >> macro.five + macro._rebuild_data_io() # Need this because of the + # explicitly created node! + # Note that it destroys our output labeling, since the new + # output never existed + + modified_result = macro() + + macro.save(backend=backend) + reloaded = Macro.create.demo.AddThree( + label="m", storage_backend=backend + ) + self.assertDictEqual( + modified_result, + reloaded.outputs.to_value_dict(), + msg="Updated IO should have been (de)serialized" + ) + self.assertSetEqual( + set(macro.nodes.keys()), + set(reloaded.nodes.keys()), + msg="All nodes should have been (de)serialized." + ) # Note that this snags the _new_ one in the case of h5io! + self.assertEqual( + Macro.create.demo.AddThree.__name__, + reloaded.class_name, + msg=f"LOOK OUT! This all (de)serialized nicely, but what we " + f"loaded is _falsely_ claiming to be an " + f"{Macro.create.demo.AddThree.__name__}. This is " + f"not any sort of technical error -- what other class name " + f"would we load? -- but is a deeper problem with saving " + f"modified objects that we need ot figure out some better " + f"solution for later." + ) + rerun = reloaded() + + if backend == "h5io": + self.assertDictEqual( + modified_result, + rerun, + msg="Rerunning should re-execute the _modified_ " + "functionality" + ) + elif backend == "tinybase": + self.assertDictEqual( + original_result, + rerun, + msg="Rerunning should re-execute the _original_ " + "functionality" + ) + else: + raise ValueError(f"Unexpected backend {backend}?") + finally: + macro.storage.delete() if __name__ == '__main__': From 189c204ae795fb472cd64502ded64746953a7253 Mon Sep 17 00:00:00 2001 From: samwaseda Date: Mon, 29 Jan 2024 21:59:53 +0000 Subject: [PATCH 099/166] NOT_DATA should work like None in the if-statement --- pyiron_workflow/channels.py | 3 +++ tests/unit/test_channels.py | 7 +++++++ 2 files changed, 10 insertions(+) diff --git a/pyiron_workflow/channels.py b/pyiron_workflow/channels.py index 8d46201c..05c0447a 100644 --- a/pyiron_workflow/channels.py +++ b/pyiron_workflow/channels.py @@ -249,6 +249,9 @@ def __repr__(cls): def __reduce__(self): return "NOT_DATA" + def __bool__(self): + return False + NOT_DATA = NotData() diff --git a/tests/unit/test_channels.py b/tests/unit/test_channels.py index 7bb3a77f..0643ccc4 100644 --- a/tests/unit/test_channels.py +++ b/tests/unit/test_channels.py @@ -329,6 +329,13 @@ def test_ready(self): self.ni1._value = "Not numeric at all" # Bypass type checking self.assertFalse(self.ni1.ready) + def test_if_not_data(self): + if NOT_DATA: + a = 0 + else: + a = 1 + self.assertEqual(a, 1) + class TestSignalChannels(unittest.TestCase): def setUp(self) -> None: From 1908914e6caa192a20d059d888fcfed0e9be1d33 Mon Sep 17 00:00:00 2001 From: samwaseda Date: Mon, 29 Jan 2024 22:02:27 +0000 Subject: [PATCH 100/166] add error message --- tests/unit/test_channels.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/tests/unit/test_channels.py b/tests/unit/test_channels.py index 0643ccc4..ca3fb950 100644 --- a/tests/unit/test_channels.py +++ b/tests/unit/test_channels.py @@ -334,7 +334,9 @@ def test_if_not_data(self): a = 0 else: a = 1 - self.assertEqual(a, 1) + self.assertEqual( + a, 1, msg="NOT_DATA failed behave like None in the if-statement" + ) class TestSignalChannels(unittest.TestCase): From 9de61c8bce5582af86b29466f35d8260162c73a7 Mon Sep 17 00:00:00 2001 From: liamhuber Date: Mon, 29 Jan 2024 14:24:32 -0800 Subject: [PATCH 101/166] Update spec --- pyiron_workflow/node.py | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/pyiron_workflow/node.py b/pyiron_workflow/node.py index 12fe4f93..7a8e6759 100644 --- a/pyiron_workflow/node.py +++ b/pyiron_workflow/node.py @@ -175,11 +175,13 @@ class Node(HasToDict, ABC, metaclass=AbstractHasPost): - [ALPHA ISSUE] Restrictions on data: - For the `h5io` backend: Most data that can be pickled will be fine, but some classes will hit an edge case and throw an exception from `h5io` - (e.g. the `Calculator` class and its children from `ase`). + (at a minimum, those classes which define a custom reconstructor hit, + this, but there also seems to be issues with dynamic methods, e.g. the + `Calculator` class and its children from `ase`). - For the `tinybase` backend: Any data that can be pickled will be fine, although it might get stored in a pickled state, which is not ideal for long-term storage or sharing. - - [ALPHA ISSUE] Restrictions on composites: + - [ALPHA ISSUE] Restrictions on workflows: - For the `h5io` backend: all child nodes must be defined in an importable location. This includes `__main__` in a jupyter notebook (as long as the same `__main__` cells get executed prior to trying to load!) but From 63c7e639504a1901d3678edaac492aaf4b8e2290 Mon Sep 17 00:00:00 2001 From: liamhuber Date: Mon, 29 Jan 2024 14:24:55 -0800 Subject: [PATCH 102/166] Test claims about saving workflows with children from different places --- tests/unit/test_workflow.py | 77 ++++++++++++++++++++++++++++++++----- 1 file changed, 68 insertions(+), 9 deletions(-) diff --git a/tests/unit/test_workflow.py b/tests/unit/test_workflow.py index 9f697161..c2f58b8b 100644 --- a/tests/unit/test_workflow.py +++ b/tests/unit/test_workflow.py @@ -14,6 +14,11 @@ def plus_one(x=0): return y +@Workflow.wrap_as.single_value_node("y") +def PlusOne(x: int = 0): + return x + 1 + + class TestWorkflow(unittest.TestCase): @classmethod def setUpClass(cls) -> None: @@ -331,15 +336,7 @@ def add_three_macro(macro): wf.m.two.pull(run_parent_trees_too=False) wf.executor_shutdown() - def test_storage(self): - # Only do the package check when using tinybase, which isn't available - # with self.subTest("Fail when nodes have no package"): - # wf = Workflow("wf") - # wf.n1 = wf.create.Function(plus_one) - # with self.assertRaises( - # NotImplementedError, msg="We can't handle nodes without a package yet" - # ): - # wf.save() + def test_storage_values(self): for storage_backend in ["h5io", "tinybase"]: with self.subTest(storage_backend): wf = Workflow("wf") @@ -365,6 +362,68 @@ def test_storage(self): # Clean up after ourselves reloaded.storage.delete() + + def test_storage_scopes(self): + wf = Workflow("wf") + wf.register("static.demo_nodes", "demo") + + # Test invocation + wf.add_node(wf.create.demo.AddPlusOne(label="by_add")) + # Note that the type hint `Optional[int]` from OptionallyAdd defines a custom + # reconstructor, which borks h5io + + for backend in ["h5io", "tinybase"]: + with self.subTest(backend): + try: + wf.save(backend=backend) + Workflow(wf.label, storage_backend=backend) + finally: + wf.storage.delete() + + wf.add_node(PlusOne(label="local_but_importable")) + try: + wf.save(backend="h5io") + Workflow(wf.label, storage_backend="h5io") + finally: + wf.storage.delete() + + with self.assertRaises( + NotImplementedError, + msg="Storage docs for tinybase claim all children must be registered nodes" + ): + wf.save(backend="tinybase") + + with self.subTest("Instanced node"): + wf.direct_instance = Workflow.create.Function(plus_one) + try: + with self.assertRaises( + TypeError, + msg="No direct node instances, only children with functions as " + "_class_ attribtues" + ): + wf.save(backend="h5io") + finally: + wf.remove_node(wf.direct_instance) + wf.storage.delete() + + with self.subTest("Unimportable node"): + @Workflow.wrap_as.single_value_node("y") + def UnimportableScope(x): + return x + + wf.unimportable_scope = UnimportableScope() + + try: + wf.save(backend="h5io") + with self.assertRaises( + AttributeError, + msg="Nodes must live in an importable scope to save with the h5io " + "backend" + ): + Workflow(wf.label, storage_backend="h5io") + finally: + wf.remove_node(wf.unimportable_scope) + wf.storage.delete() if __name__ == '__main__': From dd7f06ca86855e94dbc656a587a4cc429658dd27 Mon Sep 17 00:00:00 2001 From: liamhuber Date: Mon, 29 Jan 2024 14:40:31 -0800 Subject: [PATCH 103/166] Update deepdive --- notebooks/deepdive.ipynb | 251 ++++++++++++++++++++------------------- 1 file changed, 127 insertions(+), 124 deletions(-) diff --git a/notebooks/deepdive.ipynb b/notebooks/deepdive.ipynb index 640f81c7..337ff46b 100644 --- a/notebooks/deepdive.ipynb +++ b/notebooks/deepdive.ipynb @@ -524,6 +524,8 @@ "name": "stderr", "output_type": "stream", "text": [ + "/Users/huber/work/pyiron/pyiron_workflow/pyiron_workflow/channels.py:168: UserWarning: The channel ran was not connected to run, andthus could not disconnect from it.\n", + " warn(\n", "/Users/huber/work/pyiron/pyiron_workflow/pyiron_workflow/channels.py:168: UserWarning: The channel run was not connected to ran, andthus could not disconnect from it.\n", " warn(\n" ] @@ -989,7 +991,7 @@ }, { "data": { - "image/png": "iVBORw0KGgoAAAANSUhEUgAAAiMAAAGdCAYAAADAAnMpAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjguMiwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy8g+/7EAAAACXBIWXMAAA9hAAAPYQGoP6dpAAAkVklEQVR4nO3dfUzd5f3/8dfhUDjalWNoBU5v1h0bb4pkOiB04BozZ7HV4K+JixhXq06XUbfVlulW1kWkMSHuxkw3i1NbjWl1RKf7ScKwZL+s0puNlcIi0kxjmbT2MAJkB7yB2nOu3x/9wtfjOVTOKZzrcM7zkZw/znWuD+d9ctmcl9d1ruvjMMYYAQAAWJJmuwAAAJDaCCMAAMAqwggAALCKMAIAAKwijAAAAKsIIwAAwCrCCAAAsIowAgAArEq3XcB0BINBnTp1SgsWLJDD4bBdDgAAmAZjjEZHR7V48WKlpU09/zEnwsipU6e0bNky22UAAIAYnDhxQkuXLp3y9ajDyJtvvqlf/vKX6ujokM/n02uvvab169ef85r9+/erurpab7/9thYvXqyf/OQnqqqqmvZ7LliwQNLZD5OVlRVtyQAAwIKRkREtW7Zs8nt8KlGHkY8++khXXXWV7r77bt1yyy1f2L+3t1c33nijvve972nPnj06ePCg7rvvPl188cXTul7S5NJMVlYWYQQAgDnmi35iEXUYWbdundatWzft/k899ZS+/OUv6ze/+Y0kaeXKlTpy5Ih+9atfTTuMAACA5DXru2kOHz6s8vLykLYbbrhBR44c0aeffhrxmvHxcY2MjIQ8AABAcpr1MNLf36/c3NyQttzcXJ05c0aDg4MRr6mvr5fb7Z588ONVAACSV1zOGfn8WpExJmL7hJqaGvn9/snHiRMnZr1GAABgx6xv7c3Ly1N/f39I28DAgNLT07Vw4cKI12RmZiozM3O2SwMAAAlg1mdGSktL1draGtK2b98+FRcXa968ebP99gAAIMFFHUY+/PBDdXV1qaurS9LZrbtdXV3q6+uTdHaJZePGjZP9q6qq9P7776u6ulrHjh3T7t27tWvXLj3wwAMz8wkAAMCcFvUyzZEjR/TNb35z8nl1dbUk6c4779Tzzz8vn883GUwkyev1qrm5WVu3btWTTz6pxYsX64knnmBbLwAAkCQ5zMSvSRPYyMiI3G63/H4/h54BQIoIBI3ae4c1MDqmnAUulXiz5Uzj/mRzyXS/v+fEvWkAAKmlpdunuqYe+fxjk20et0u1FflaW+CxWBlmQ1y29gIAMF0t3T5t2nM0JIhIUr9/TJv2HFVLt89SZZgthBEAQMIIBI3qmnoU6fcDE211TT0KBBP+FwaIAmEEAJAw2nuHw2ZEPstI8vnH1N47HL+iMOsIIwCAhDEwOnUQiaUf5gbCCAAgYeQscM1oP8wNhBEAQMIo8WbL43Zpqg28Dp3dVVPizY5nWZhlhBEAQMJwpjlUW5EvSWGBZOJ5bUU+540kGcIIACChrC3wqGFDofLcoUsxeW6XGjYUcs5IEuLQMwBAwllb4NGa/DxOYE0RhBEAQEJypjlUumKh7TIQByzTAAAAqwgjAADAKsIIAACwijACAACsIowAAACrCCMAAMAqwggAALCKMAIAAKwijAAAAKsIIwAAwCrCCAAAsIowAgAArCKMAAAAqwgjAADAKsIIAACwijACAACsIowAAACrCCMAAMAqwggAALCKMAIAAKwijAAAAKsIIwAAwCrCCAAAsIowAgAArCKMAAAAqwgjAADAKsIIAACwijACAACsIowAAACrCCMAAMAqwggAALAq3XYBtgSCRu29wxoYHVPOApdKvNlypjlslwUAQMpJyTDS0u1TXVOPfP6xyTaP26XainytLfBYrAwAgNSTcss0Ld0+bdpzNCSISFK/f0yb9hxVS7fPUmUAAKSmlAojgaBRXVOPTITXJtrqmnoUCEbqAQAAZkNKhZH23uGwGZHPMpJ8/jG19w7HrygAAFJcSoWRgdGpg0gs/QAAwPlLqTCSs8A1o/0AAMD5S6kwUuLNlsft0lQbeB06u6umxJsdz7IAAEhpKRVGnGkO1VbkS1JYIJl4XluRz3kjAADEUUqFEUlaW+BRw4ZC5blDl2Ly3C41bCjknBEAAOIsJQ89W1vg0Zr8PE5gBQAgAaRkGJHOLtmUrlhouwwAAFJeyi3TAACAxEIYAQAAVhFGAACAVYQRAABgFWEEAABYRRgBAABWEUYAAIBVhBEAAGBVTGFk586d8nq9crlcKioqUltb2zn77927V1dddZUuvPBCeTwe3X333RoaGoqpYAAAkFyiDiONjY3asmWLtm/frs7OTq1evVrr1q1TX19fxP4HDhzQxo0bdc899+jtt9/Wyy+/rH/84x+69957z7t4AAAw90UdRh577DHdc889uvfee7Vy5Ur95je/0bJly9TQ0BCx/9/+9jd95Stf0ebNm+X1evWNb3xD3//+93XkyJHzLh4AAMx9UYWR06dPq6OjQ+Xl5SHt5eXlOnToUMRrysrKdPLkSTU3N8sYo//85z965ZVXdNNNN035PuPj4xoZGQl5AACA5BRVGBkcHFQgEFBubm5Ie25urvr7+yNeU1ZWpr1796qyslIZGRnKy8vTRRddpN/+9rdTvk99fb3cbvfkY9myZdGUCQAA5pCYfsDqcDhCnhtjwtom9PT0aPPmzXrooYfU0dGhlpYW9fb2qqqqasq/X1NTI7/fP/k4ceJELGUCAIA5ID2azosWLZLT6QybBRkYGAibLZlQX1+va665Rg8++KAk6atf/armz5+v1atX65FHHpHH4wm7JjMzU5mZmdGUBgAA5qioZkYyMjJUVFSk1tbWkPbW1laVlZVFvObjjz9WWlro2zidTklnZ1QAAEBqi3qZprq6Ws8++6x2796tY8eOaevWrerr65tcdqmpqdHGjRsn+1dUVOjVV19VQ0ODjh8/roMHD2rz5s0qKSnR4sWLZ+6TAACAOSmqZRpJqqys1NDQkHbs2CGfz6eCggI1Nzdr+fLlkiSfzxdy5shdd92l0dFR/e53v9OPf/xjXXTRRbruuuv06KOPztynAAAAc5bDzIG1kpGREbndbvn9fmVlZdkuBwAATMN0v7+5Nw0AALCKMAIAAKwijAAAAKsIIwAAwCrCCAAAsIowAgAArIr6nBHETyBo1N47rIHRMeUscKnEmy1nWuR7AAEAMFcRRhJUS7dPdU098vnHJts8bpdqK/K1tiD8fj4AAMxVLNMkoJZunzbtORoSRCSp3z+mTXuOqqXbZ6kyAABmHmEkwQSCRnVNPYp0LO5EW11TjwLBhD84FwCAaSGMJJj23uGwGZHPMpJ8/jG19w7HrygAAGYRYSTBDIxOHURi6QcAQKIjjCSYnAWuGe0HAECiI4wkmBJvtjxul6bawOvQ2V01Jd7seJYFAMCsIYwkGGeaQ7UV+ZIUFkgmntdW5HPeCAAgaRBGEtDaAo8aNhQqzx26FJPndqlhQyHnjAAAkgqHniWotQUercnP4wRWAEDSI4wkMGeaQ6UrFtouAwCAWcUyDQAAsIowAgAArCKMAAAAqwgjAADAKsIIAACwijACAACsIowAAACrCCMAAMAqwggAALCKMAIAAKwijAAAAKsIIwAAwCrCCAAAsIowAgAArCKMAAAAqwgjAADAKsIIAACwijACAACsIowAAACrCCMAAMAqwggAALCKMAIAAKwijAAAAKsIIwAAwCrCCAAAsIowAgAArCKMAAAAqwgjAADAKsIIAACwijACAACsIowAAACrCCMAAMAqwggAALCKMAIAAKwijAAAAKsIIwAAwKp02wUAAJCKAkGj9t5hDYyOKWeBSyXebDnTHLbLsoIwAgBAnLV0+1TX1COff2yyzeN2qbYiX2sLPBYrs4NlGgAA4qil26dNe46GBBFJ6vePadOeo2rp9lmqzB7CCAAAcRIIGtU19chEeG2ira6pR4FgpB7JizACAECctPcOh82IfJaR5POPqb13OH5FJQDCCAAAcTIwOnUQiaVfsiCMAAAQJzkLXDPaL1kQRgAAiJMSb7Y8bpem2sDr0NldNSXe7HiWZV1MYWTnzp3yer1yuVwqKipSW1vbOfuPj49r+/btWr58uTIzM7VixQrt3r07poIBAJirnGkO1VbkS1JYIJl4XluRn3LnjUQdRhobG7VlyxZt375dnZ2dWr16tdatW6e+vr4pr7n11lv1l7/8Rbt27dK//vUvvfTSS7riiivOq3AAAOaitQUeNWwoVJ47dCkmz+1Sw4bClDxnxGGMiWr/0KpVq1RYWKiGhobJtpUrV2r9+vWqr68P69/S0qLbbrtNx48fV3Z2bNNOIyMjcrvd8vv9ysrKiulvAACQSFLhBNbpfn9HNTNy+vRpdXR0qLy8PKS9vLxchw4dinjN66+/ruLiYv3iF7/QkiVLdNlll+mBBx7QJ598MuX7jI+Pa2RkJOQBAEAycaY5VLpiof7P1UtUumJh0gWRaER1HPzg4KACgYByc3ND2nNzc9Xf3x/xmuPHj+vAgQNyuVx67bXXNDg4qPvuu0/Dw8NT/m6kvr5edXV10ZQGAADmqJh+wOpwhKY3Y0xY24RgMCiHw6G9e/eqpKREN954ox577DE9//zzU86O1NTUyO/3Tz5OnDgRS5kAAGAOiGpmZNGiRXI6nWGzIAMDA2GzJRM8Ho+WLFkit9s92bZy5UoZY3Ty5EldeumlYddkZmYqMzMzmtIAAMAcFdXMSEZGhoqKitTa2hrS3traqrKysojXXHPNNTp16pQ+/PDDybZ33nlHaWlpWrp0aQwlAwCAZBL1Mk11dbWeffZZ7d69W8eOHdPWrVvV19enqqoqSWeXWDZu3DjZ//bbb9fChQt19913q6enR2+++aYefPBBffe739UFF1wwc58EAADMSVEt00hSZWWlhoaGtGPHDvl8PhUUFKi5uVnLly+XJPl8vpAzR770pS+ptbVVP/rRj1RcXKyFCxfq1ltv1SOPPDJznwIpIxW2wgFAqon6nBEbOGcEktTS7VNdU0/IHS89bpdqK/JT8pAgAEh0s3LOCGBLS7dPm/YcDbv1dr9/TJv2HFVLt89SZQCA80UYQcILBI3qmnoUaQpvoq2uqUeBYMJP8gEAIiCMIOG19w6HzYh8lpHk84+pvXc4fkUBAGYMYQQJb2B06iASSz8AQGIhjCDh5SxwfXGnKPoBABILYQQJr8SbLY/bpak28Dp0dldNiTe2u0IDAOwijCDhOdMcqq3Il6SwQDLxvLYin/NGAGCOIoxgTlhb4FHDhkLluUOXYvLcLjVsKOScEQCYw6I+gRWwZW2BR2vy8ziBFQCSDGEEc4ozzaHSFQttlwEAmEEs0wAAAKsIIwAAwCrCCAAAsIowAgAArCKMAAAAqwgjAADAKsIIAACwinNGACAFBIKGAwORsAgjAJDkWrp9qmvqkc8/NtnmcbtUW5HPrRSQEFimAYAk1tLt06Y9R0OCiCT1+8e0ac9RtXT7LFUG/C/CCAAkqUDQqK6pRybCaxNtdU09CgQj9QDihzACAEmqvXc4bEbks4wkn39M7b3D8SsKiIAwAgBJamB06iASSz9gthBGACBJ5SxwzWg/YLYQRgAgSZV4s+VxuzTVBl6Hzu6qKfFmx7MsIAxhBACSlDPNodqKfEkKCyQTz2sr8jlvBNYRRgAgia0t8KhhQ6Hy3KFLMXlulxo2FHLOCBICh54BQJJbW+DRmvw8TmBFwiKMAEAKcKY5VLpioe0ygIhYpgEAAFYRRgAAgFWEEQAAYBVhBAAAWEUYAQAAVhFGAACAVYQRAABgFWEEAABYRRgBAABWEUYAAIBVhBEAAGAVYQQAAFhFGAEAAFYRRgAAgFWEEQAAYBVhBAAAWEUYAQAAVhFGAACAVYQRAABgFWEEAABYRRgBAABWEUYAAIBVhBEAAGAVYQQAAFhFGAEAAFYRRgAAgFWEEQAAYFW67QIAAIAdgaBRe++wBkbHlLPApRJvtpxpjrjXQRgBACAFtXT7VNfUI59/bLLN43aptiJfaws8ca2FZRoAAFJMS7dPm/YcDQkiktTvH9OmPUfV0u2Laz2EEQAAUkggaFTX1CMT4bWJtrqmHgWCkXrMDsIIAAAppL13OGxG5LOMJJ9/TO29w3GrKaYwsnPnTnm9XrlcLhUVFamtrW1a1x08eFDp6em6+uqrY3lbAABwngZGpw4isfSbCVGHkcbGRm3ZskXbt29XZ2enVq9erXXr1qmvr++c1/n9fm3cuFHf+ta3Yi4WAIBYBYJGh98b0v/t+kCH3xuK6zJEIslZ4JrRfjPBYYyJajRWrVqlwsJCNTQ0TLatXLlS69evV319/ZTX3Xbbbbr00kvldDr1pz/9SV1dXdN+z5GREbndbvn9fmVlZUVTLgAACbVzxLZA0Ogbj/4/9fvHIv5uxCEpz+3SgZ9ed97bfKf7/R3VzMjp06fV0dGh8vLykPby8nIdOnRoyuuee+45vffee6qtrZ3W+4yPj2tkZCTkAQBALBJt54htzjSHaivyJZ0NHp818by2Ij+u541EFUYGBwcVCASUm5sb0p6bm6v+/v6I17z77rvatm2b9u7dq/T06R1rUl9fL7fbPflYtmxZNGUCACApMXeOJIK1BR41bChUnjt0KSbP7VLDhsK4zxbFdOiZwxGalowxYW2SFAgEdPvtt6uurk6XXXbZtP9+TU2NqqurJ5+PjIwQSAAAUYtm50jpioXxKywBrC3waE1+3tw7gXXRokVyOp1hsyADAwNhsyWSNDo6qiNHjqizs1M//OEPJUnBYFDGGKWnp2vfvn267rrrwq7LzMxUZmZmNKUBABAmEXeOJBJnmiMhQlhUyzQZGRkqKipSa2trSHtra6vKysrC+mdlZemtt95SV1fX5KOqqkqXX365urq6tGrVqvOrHgCAc0jEnSMIF/UyTXV1te644w4VFxertLRUTz/9tPr6+lRVVSXp7BLLBx98oBdeeEFpaWkqKCgIuT4nJ0culyusHQCAmVbizZbH7frCnSMl3ux4l4bPiDqMVFZWamhoSDt27JDP51NBQYGam5u1fPlySZLP5/vCM0cAAIiHiZ0jm/YclUMKCSS2do4gXNTnjNjAOSMAgPPBOSN2TPf7O6bdNAAAzCWJtHME4QgjAICUkCg7RxCOu/YCAACrCCMAAMAqwggAALCKMAIAAKwijAAAAKsIIwAAwCrCCAAAsIowAgAArCKMAAAAqwgjAADAKsIIAACwijACAACsIowAAACrCCMAAMAqwggAALCKMAIAAKwijAAAAKsIIwAAwCrCCAAAsIowAgAArCKMAAAAqwgjAADAKsIIAACwijACAACsIowAAACrCCMAAMAqwggAALCKMAIAAKwijAAAAKvSbRcAILEFgkbtvcMaGB1TzgKXSrzZcqY5bJcFIIkQRgBMqaXbp7qmHvn8Y5NtHrdLtRX5WlvgsVgZgGTCMg2AiFq6fdq052hIEJGkfv+YNu05qpZun6XKACQbwgiAMIGgUV1Tj0yE1yba6pp6FAhG6gEA0SGMAAjT3jscNiPyWUaSzz+m9t7h+BUFIGkRRgCEGRidOojE0g8AzoUwAiBMzgLXjPYDgHMhjAAIU+LNlsft0lQbeB06u6umxJsdz7IAJCnCCIAwzjSHaivyJSkskEw8r63I57wRADOCMAIgorUFHjVsKFSeO3QpJs/tUsOGQs4ZATBjOPQMwJTWFni0Jj+PE1gBzCrCCIBzcqY5VLpioe0yACQxlmkAAIBVhBEAAGAVYQQAAFhFGAEAAFYRRgAAgFWEEQAAYBVhBAAAWEUYAQAAVhFGAACAVYQRAABgFWEEAABYRRgBAABWEUYAAIBVhBEAAGAVYQQAAFhFGAEAAFYRRgAAgFWEEQAAYBVhBAAAWBVTGNm5c6e8Xq9cLpeKiorU1tY2Zd9XX31Va9as0cUXX6ysrCyVlpbqjTfeiLlgAACQXKIOI42NjdqyZYu2b9+uzs5OrV69WuvWrVNfX1/E/m+++abWrFmj5uZmdXR06Jvf/KYqKirU2dl53sUDAIC5z2GMMdFcsGrVKhUWFqqhoWGybeXKlVq/fr3q6+un9TeuvPJKVVZW6qGHHppW/5GREbndbvn9fmVlZUVTLgAAsGS6399RzYycPn1aHR0dKi8vD2kvLy/XoUOHpvU3gsGgRkdHlZ2dPWWf8fFxjYyMhDwAAEByiiqMDA4OKhAIKDc3N6Q9NzdX/f390/obv/71r/XRRx/p1ltvnbJPfX293G735GPZsmXRlAkAAOaQmH7A6nA4Qp4bY8LaInnppZf08MMPq7GxUTk5OVP2q6mpkd/vn3ycOHEiljIBAMAckB5N50WLFsnpdIbNggwMDITNlnxeY2Oj7rnnHr388su6/vrrz9k3MzNTmZmZ0ZQGAADmqKhmRjIyMlRUVKTW1taQ9tbWVpWVlU153UsvvaS77rpLL774om666abYKgUAAEkpqpkRSaqurtYdd9yh4uJilZaW6umnn1ZfX5+qqqoknV1i+eCDD/TCCy9IOhtENm7cqMcff1xf//rXJ2dVLrjgArnd7hn8KAAAYC6KOoxUVlZqaGhIO3bskM/nU0FBgZqbm7V8+XJJks/nCzlz5Pe//73OnDmjH/zgB/rBD34w2X7nnXfq+eefP/9PAAAA5rSozxmxgXNGAACYe2blnBEAAICZRhgBAABWEUYAAIBVhBEAAGAVYQQAAFhFGAEAAFYRRgAAgFWEEQAAYBVhBAAAWEUYAQAAVhFGAACAVYQRAABgFWEEAABYRRgBAABWEUYAAIBVhBEAAGAVYQQAAFhFGAEAAFYRRgAAgFWEEQAAYBVhBAAAWEUYAQAAVhFGAACAVYQRAABgFWEEAABYRRgBAABWEUYAAIBVhBEAAGAVYQQAAFhFGAEAAFYRRgAAgFWEEQAAYBVhBAAAWEUYAQAAVhFGAACAVYQRAABgFWEEAABYRRgBAABWEUYAAIBVhBEAAGAVYQQAAFhFGAEAAFYRRgAAgFWEEQAAYFW67QIAINUFgkbtvcMaGB1TzgKXSrzZcqY5bJcFxA1hBAAsaun2qa6pRz7/2GSbx+1SbUW+1hZ4LFYGxA/LNABgSUu3T5v2HA0JIpLU7x/Tpj1H1dLts1QZEF+EEQCwIBA0qmvqkYnw2kRbXVOPAsFIPYDkQhgBAAvae4fDZkQ+y0jy+cfU3jscv6IASwgjAGDBwOjUQSSWfsBcRhgBAAtyFrhmtB8wlxFGAMCCEm+2PG6XptrA69DZXTUl3ux4lgVYQRgBAAucaQ7VVuRLUlggmXheW5HPeSNICYQRALBkbYFHDRsKlecOXYrJc7vUsKGQc0aQMjj0DAAsWlvg0Zr8PE5gRUojjACAZc40h0pXLLRdBmANyzQAAMAqwggAALCKMAIAAKwijAAAAKsIIwAAwCrCCAAAsCqmMLJz5055vV65XC4VFRWpra3tnP3379+voqIiuVwuXXLJJXrqqadiKhYAACSfqMNIY2OjtmzZou3bt6uzs1OrV6/WunXr1NfXF7F/b2+vbrzxRq1evVqdnZ362c9+ps2bN+uPf/zjeRcPAADmPocxxkRzwapVq1RYWKiGhobJtpUrV2r9+vWqr68P6//Tn/5Ur7/+uo4dOzbZVlVVpX/+8586fPjwtN5zZGREbrdbfr9fWVlZ0ZQLAAAsme73d1QnsJ4+fVodHR3atm1bSHt5ebkOHToU8ZrDhw+rvLw8pO2GG27Qrl279Omnn2revHlh14yPj2t8fHzyud/vl3T2QwEAgLlh4nv7i+Y9ogojg4ODCgQCys3NDWnPzc1Vf39/xGv6+/sj9j9z5owGBwfl8YTfCKq+vl51dXVh7cuWLYumXAAAkABGR0fldrunfD2me9M4HKE3cDLGhLV9Uf9I7RNqampUXV09+TwYDOr999/X1VdfrRMnTrBUkyBGRka0bNkyxiSBMCaJhfFIPIxJfBljNDo6qsWLF5+zX1RhZNGiRXI6nWGzIAMDA2GzHxPy8vIi9k9PT9fChZFvDJWZmanMzMyQtrS0s7+1zcrK4j+gBMOYJB7GJLEwHomHMYmfc82ITIhqN01GRoaKiorU2toa0t7a2qqysrKI15SWlob137dvn4qLiyP+XgQAAKSWqLf2VldX69lnn9Xu3bt17Ngxbd26VX19faqqqpJ0doll48aNk/2rqqr0/vvvq7q6WseOHdPu3bu1a9cuPfDAAzP3KQAAwJwV9W9GKisrNTQ0pB07dsjn86mgoEDNzc1avny5JMnn84WcOeL1etXc3KytW7fqySef1OLFi/XEE0/olltuiep9MzMzVVtbG7Z8A3sYk8TDmCQWxiPxMCaJKepzRgAAAGYS96YBAABWEUYAAIBVhBEAAGAVYQQAAFiVUGFk586d8nq9crlcKioqUltb2zn779+/X0VFRXK5XLrkkkv01FNPxanS1BHNmLz66qtas2aNLr74YmVlZam0tFRvvPFGHKtNftH+G5lw8OBBpaen6+qrr57dAlNQtGMyPj6u7du3a/ny5crMzNSKFSu0e/fuOFWbGqIdk7179+qqq67ShRdeKI/Ho7vvvltDQ0NxqhaSJJMg/vCHP5h58+aZZ555xvT09Jj777/fzJ8/37z//vsR+x8/ftxceOGF5v777zc9PT3mmWeeMfPmzTOvvPJKnCtPXtGOyf33328effRR097ebt555x1TU1Nj5s2bZ44ePRrnypNTtOMx4b///a+55JJLTHl5ubnqqqviU2yKiGVMbr75ZrNq1SrT2tpqent7zd///ndz8ODBOFad3KIdk7a2NpOWlmYef/xxc/z4cdPW1mauvPJKs379+jhXntoSJoyUlJSYqqqqkLYrrrjCbNu2LWL/n/zkJ+aKK64Iafv+979vvv71r89ajakm2jGJJD8/39TV1c10aSkp1vGorKw0P//5z01tbS1hZIZFOyZ//vOfjdvtNkNDQ/EoLyVFOya//OUvzSWXXBLS9sQTT5ilS5fOWo0IlxDLNKdPn1ZHR4fKy8tD2svLy3Xo0KGI1xw+fDis/w033KAjR47o008/nbVaU0UsY/J5wWBQo6Ojys7Ono0SU0qs4/Hcc8/pvffeU21t7WyXmHJiGZPXX39dxcXF+sUvfqElS5bosssu0wMPPKBPPvkkHiUnvVjGpKysTCdPnlRzc7OMMfrPf/6jV155RTfddFM8Ssb/iOmuvTNtcHBQgUAg7GZ7ubm5YTfZm9Df3x+x/5kzZzQ4OCiPxzNr9aaCWMbk837961/ro48+0q233jobJaaUWMbj3Xff1bZt29TW1qb09IT4p55UYhmT48eP68CBA3K5XHrttdc0ODio++67T8PDw/xuZAbEMiZlZWXau3evKisrNTY2pjNnzujmm2/Wb3/723iUjP+REDMjExwOR8hzY0xY2xf1j9SO2EU7JhNeeuklPfzww2psbFROTs5slZdypjsegUBAt99+u+rq6nTZZZfFq7yUFM2/kWAwKIfDob1796qkpEQ33nijHnvsMT3//PPMjsygaMakp6dHmzdv1kMPPaSOjg61tLSot7d38n5riI+E+N+lRYsWyel0hiXXgYGBsIQ7IS8vL2L/9PR0LVy4cNZqTRWxjMmExsZG3XPPPXr55Zd1/fXXz2aZKSPa8RgdHdWRI0fU2dmpH/7wh5LOfhEaY5Senq59+/bpuuuui0vtySqWfyMej0dLliwJuaX6ypUrZYzRyZMndemll85qzckuljGpr6/XNddcowcffFCS9NWvflXz58/X6tWr9cgjjzDLHicJMTOSkZGhoqIitba2hrS3traqrKws4jWlpaVh/fft26fi4mLNmzdv1mpNFbGMiXR2RuSuu+7Siy++yJrrDIp2PLKysvTWW2+pq6tr8lFVVaXLL79cXV1dWrVqVbxKT1qx/Bu55pprdOrUKX344YeTbe+8847S0tK0dOnSWa03FcQyJh9//LHS0kK/Cp1Op6T/nW1HHNj65eznTWzH2rVrl+np6TFbtmwx8+fPN//+97+NMcZs27bN3HHHHZP9J7b2bt261fT09Jhdu3axtXeGRTsmL774oklPTzdPPvmk8fl8k4///ve/tj5CUol2PD6P3TQzL9oxGR0dNUuXLjXf/va3zdtvv232799vLr30UnPvvffa+ghJJ9oxee6550x6errZuXOnee+998yBAwdMcXGxKSkpsfURUlLChBFjjHnyySfN8uXLTUZGhiksLDT79++ffO3OO+801157bUj/v/71r+ZrX/uaycjIMF/5yldMQ0NDnCtOftGMybXXXmskhT3uvPPO+BeepKL9N/JZhJHZEe2YHDt2zFx//fXmggsuMEuXLjXV1dXm448/jnPVyS3aMXniiSdMfn6+ueCCC4zH4zHf+c53zMmTJ+NcdWpzGMM8FAAAsCchfjMCAABSF2EEAABYRRgBAABWEUYAAIBVhBEAAGAVYQQAAFhFGAEAAFYRRgAAgFWEEQAAYBVhBAAAWEUYAQAAVhFGAACAVf8fBF+wjk5nx9AAAAAASUVORK5CYII=", + "image/png": "iVBORw0KGgoAAAANSUhEUgAAAiMAAAGgCAYAAAB45mdaAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjguMiwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy8g+/7EAAAACXBIWXMAAA9hAAAPYQGoP6dpAAAqoElEQVR4nO3df1RcdX7/8dcwBCamYSyJgYlBgmlMIHTdBUoC2XSPP4KJlt2cbSutNTE28UhWV5HqfuXEiuR4Dt0fptFtQOMmpmmiy9Gouzll0Tnnu5sQsaUh5JzNYldr2IXEQQrpzqAuYOB+/8gX6jiQcEeYDzM8H+fcP+bD586853Oi9zWfe+/nOizLsgQAAGBInOkCAADAzEYYAQAARhFGAACAUYQRAABgFGEEAAAYRRgBAABGEUYAAIBRhBEAAGAUYQQAABhFGAEAAEbZDiPHjh1TcXGxFi5cKIfDoddff/2y+xw9elS5ublyuVy69tpr9eyzz4ZTKwAAiEHxdnf4+OOPdf311+vuu+/Wn//5n1+2f3t7u2699Vbdc889OnjwoN566y1961vf0lVXXTWh/SVpeHhYH3zwgebOnSuHw2G3ZAAAYIBlWerr69PChQsVF3eJ+Q/rC5Bkvfbaa5fs853vfMdavnx5UNu9995rrVq1asKf09nZaUliY2NjY2Nji8Kts7Pzksd52zMjdr399tsqKioKarvlllu0d+9effrpp5o1a1bIPgMDAxoYGBh9bf3/Bwt3dnYqKSlpagsGAACTIhAIKC0tTXPnzr1kvykPI11dXUpJSQlqS0lJ0YULF9TT0yOPxxOyT3V1taqqqkLak5KSCCMAAESZy11iEZG7aT5fxMhMx3jFVVRUyO/3j26dnZ1TXiMAADBjymdGUlNT1dXVFdTW3d2t+Ph4zZs3b8x9EhMTlZiYONWlAQCAaWDKZ0YKCgrk9XqD2t58803l5eWNeb0IAACYWWyHkY8++kinTp3SqVOnJF28dffUqVPq6OiQdPEUy6ZNm0b7l5aW6re//a3Ky8v1zjvvaN++fdq7d68efvjhyfkGAAAgqtk+TXPixAndcMMNo6/Ly8slSXfddZf2798vn883GkwkKSMjQ/X19XrooYe0e/duLVy4UM8888yE1xgBAACxzWGNXE06jQUCAbndbvn9fu6mAQAgSkz0+M2zaQAAgFGEEQAAYNSU39oLmDA0bKm5/by6+/q1YK5L+RnJcsbxXCMAmI4II4g5Dad9qjrSJp+/f7TN43apsjhL67JDV/wFgJlquvxwI4wgpjSc9mnbwZP6/FXZXf5+bTt4UrV35hBIAEDT64cb14wgZgwNW6o60hYSRCSNtlUdadPQ8LS/gQwAptTID7fPBhHpf3+4NZz2RbQewghiRnP7+ZD/sD7LkuTz96u5/XzkigKAaWY6/nAjjCBmdPeNH0TC6QcAsWg6/nAjjCBmLJjrmtR+ABCLpuMPN8IIYkZ+RrI8bpfGuw7coYsXZ+VnJEeyLACYVqbjDzfCCGKGM86hyuIsSQoJJCOvK4uzWG8EwIw2HX+4EUYQU9Zle1R7Z45S3cGJPtXt4rZeAND0/OHGg/IQk6bLQj4AMF1FYp2RiR6/CSMAAMxQU/3DbaLHb1ZgBQBghnLGOVSwZJ7pMrhmBAAAmEUYAQAARhFGAACAUYQRAABgFGEEAAAYRRgBAABGEUYAAIBRhBEAAGAUYQQAABhFGAEAAEYRRgAAgFGEEQAAYBRhBAAAGEUYAQAARhFGAACAUYQRAABgVFhhpKamRhkZGXK5XMrNzVVjY+Ml++/evVuZmZmaPXu2li1bpgMHDoRVLAAAiD3xdneoq6tTWVmZampqtHr1aj333HNav3692tradM0114T0r62tVUVFhZ5//nn9yZ/8iZqbm3XPPffoD//wD1VcXDwpXwIAAEQvh2VZlp0dVq5cqZycHNXW1o62ZWZmasOGDaqurg7pX1hYqNWrV+v73//+aFtZWZlOnDih48ePT+gzA4GA3G63/H6/kpKS7JQLAAAMmejx29ZpmsHBQbW0tKioqCiovaioSE1NTWPuMzAwIJfLFdQ2e/ZsNTc369NPPx13n0AgELQBAIDYZCuM9PT0aGhoSCkpKUHtKSkp6urqGnOfW265RT/60Y/U0tIiy7J04sQJ7du3T59++ql6enrG3Ke6ulput3t0S0tLs1MmAACIImFdwOpwOIJeW5YV0jbi7//+77V+/XqtWrVKs2bN0je+8Q1t3rxZkuR0Osfcp6KiQn6/f3Tr7OwMp0wAABAFbIWR+fPny+l0hsyCdHd3h8yWjJg9e7b27dunTz75RL/5zW/U0dGhxYsXa+7cuZo/f/6Y+yQmJiopKSloAwAAsclWGElISFBubq68Xm9Qu9frVWFh4SX3nTVrlhYtWiSn06kf//jH+rM/+zPFxbHMCQAAM53tW3vLy8u1ceNG5eXlqaCgQHv27FFHR4dKS0slXTzFcu7cudG1RN599101Nzdr5cqV+p//+R/t3LlTp0+f1j//8z9P7jcBAABRyXYYKSkpUW9vr3bs2CGfz6fs7GzV19crPT1dkuTz+dTR0THaf2hoSE899ZR+/etfa9asWbrhhhvU1NSkxYsXT9qXAAAA0cv2OiMmsM4IAADRZ0rWGQEAAJhshBEAAGAUYQQAABhFGAEAAEYRRgAAgFGEEQAAYBRhBAAAGEUYAQAARhFGAACAUYQRAABgFGEEAAAYRRgBAABGEUYAAIBRhBEAAGAUYQQAABhFGAEAAEYRRgAAgFGEEQAAYBRhBAAAGEUYAQAARhFGAACAUYQRAABgFGEEAAAYRRgBAABGEUYAAIBRhBEAAGAUYQQAABhFGAEAAEYRRgAAgFGEEQAAYBRhBAAAGEUYAQAARoUVRmpqapSRkSGXy6Xc3Fw1NjZesv+hQ4d0/fXX64orrpDH49Hdd9+t3t7esAoGAACxxXYYqaurU1lZmbZv367W1latWbNG69evV0dHx5j9jx8/rk2bNmnLli361a9+pZdffln/8R//oa1bt37h4gEAQPSzHUZ27typLVu2aOvWrcrMzNSuXbuUlpam2traMfv/27/9mxYvXqwHHnhAGRkZ+upXv6p7771XJ06c+MLFAwCA6GcrjAwODqqlpUVFRUVB7UVFRWpqahpzn8LCQp09e1b19fWyLEsffvihXnnlFd12223jfs7AwIACgUDQBgAAYpOtMNLT06OhoSGlpKQEtaekpKirq2vMfQoLC3Xo0CGVlJQoISFBqampuvLKK/XDH/5w3M+prq6W2+0e3dLS0uyUCQAAokhYF7A6HI6g15ZlhbSNaGtr0wMPPKDHH39cLS0tamhoUHt7u0pLS8d9/4qKCvn9/tGts7MznDIBAEAUiLfTef78+XI6nSGzIN3d3SGzJSOqq6u1evVqPfLII5KkL33pS5ozZ47WrFmjJ598Uh6PJ2SfxMREJSYm2ikNAABEKVszIwkJCcrNzZXX6w1q93q9KiwsHHOfTz75RHFxwR/jdDolXZxRAQAAFw0NW3r7/V795NQ5vf1+r4aGZ8Zx0tbMiCSVl5dr48aNysvLU0FBgfbs2aOOjo7R0y4VFRU6d+6cDhw4IEkqLi7WPffco9raWt1yyy3y+XwqKytTfn6+Fi5cOLnfBgCAKNVw2qeqI23y+ftH2zxulyqLs7QuO/QsQiyxHUZKSkrU29urHTt2yOfzKTs7W/X19UpPT5ck+Xy+oDVHNm/erL6+Pv3TP/2T/u7v/k5XXnmlbrzxRn33u9+dvG8BAEAUazjt07aDJ/X5eZAuf7+2HTyp2jtzYjqQOKwoOFcSCATkdrvl9/uVlJRkuhwAACbN0LClr373/wbNiHyWQ1Kq26Xj/+dGOePGvllkupro8Ztn0wAAYFBz+/lxg4gkWZJ8/n41t5+PXFERRhgBAMCg7r7xg0g4/aIRYQQAAIMWzHVNar9oRBgBAMCg/IxkedwujXc1iEMX76rJz0iOZFkRRRgBAMAgZ5xDlcVZkhQSSEZeVxZnRd3Fq3YQRgAAMGxdtke1d+Yo1R18KibV7Yr523qlMNYZAQAAk29dtkdrs1LV3H5e3X39WjD34qmZWJ4RGUEYAQBgmnDGOVSwZJ7pMiKO0zQAAMAowggAADCKMAIAAIwijAAAAKMIIwAAwCjCCAAAMIowAgAAjGKdkRloaNiakYvqAACmJ8LIDNNw2qeqI23y+f/3UdQet0uVxVkxv9wwAGB64jTNDNJw2qdtB08GBRFJ6vL3a9vBk2o47TNUGQBgJiOMzBBDw5aqjrTJGuNvI21VR9o0NDxWDwAApg5hZIZobj8fMiPyWZYkn79fze3nI1cUAAAijMwY3X3jB5Fw+gEAMFkIIzPEgrmuSe0HAMBkIYzMEPkZyfK4XRrvBl6HLt5Vk5+RHMmyAAAgjMwUzjiHKouzJCkkkIy8rizOYr0RAEDEEUZmkHXZHtXemaNUd/CpmFS3S7V35rDOCADACBY9m2HWZXu0NiuVFVgBANMGYWQGcsY5VLBknukyAACQxGkaAABgGGEEAAAYRRgBAABGEUYAAIBRYYWRmpoaZWRkyOVyKTc3V42NjeP23bx5sxwOR8i2YsWKsIsGAACxw3YYqaurU1lZmbZv367W1latWbNG69evV0dHx5j9n376afl8vtGts7NTycnJ+su//MsvXDwAAIh+DsuybD0zfuXKlcrJyVFtbe1oW2ZmpjZs2KDq6urL7v/666/rm9/8ptrb25Wenj6hzwwEAnK73fL7/UpKSrJTLgAAMGSix29bMyODg4NqaWlRUVFRUHtRUZGampom9B579+7VzTfffMkgMjAwoEAgELQBAIDYZCuM9PT0aGhoSCkpKUHtKSkp6urquuz+Pp9PP/vZz7R169ZL9quurpbb7R7d0tLS7JQJAACiSFgXsDocwUuHW5YV0jaW/fv368orr9SGDRsu2a+iokJ+v3906+zsDKdMAAAQBWwtBz9//nw5nc6QWZDu7u6Q2ZLPsyxL+/bt08aNG5WQkHDJvomJiUpMTLRTGgAAiFK2ZkYSEhKUm5srr9cb1O71elVYWHjJfY8ePar/+q//0pYtW+xXCQAAYpbtB+WVl5dr48aNysvLU0FBgfbs2aOOjg6VlpZKuniK5dy5czpw4EDQfnv37tXKlSuVnZ09OZUDAICYYDuMlJSUqLe3Vzt27JDP51N2drbq6+tH747x+Xwha474/X4dPnxYTz/99ORUDQAAYobtdUZMYJ0RAACiz5SsMwIAADDZCCMAAMAowggAADCKMAIAAIwijAAAAKMIIwAAwCjCCAAAMIowAgAAjCKMAAAAowgjAADAKMIIAAAwijACAACMIowAAACjCCMAAMAowggAADCKMAIAAIyKN10AgMkxNGypuf28uvv6tWCuS/kZyXLGOUyXBQCXRRgBYkDDaZ+qjrTJ5+8fbfO4XaosztK6bI/BygDg8jhNA0S5htM+bTt4MiiISFKXv1/bDp5Uw2mfocoAYGIII0AUGxq2VHWkTdYYfxtpqzrSpqHhsXoAwPRAGAGiWHP7+ZAZkc+yJPn8/WpuPx+5ogDAJsIIEMW6+8YPIuH0AwATCCNAFFsw1zWp/QDABMIIEMXyM5Llcbs03g28Dl28qyY/IzmSZQGALYQRIIo54xyqLM6SpJBAMvK6sjiL9UYATGuEESDKrcv2qPbOHKW6g0/FpLpdqr0zh3VGAEx7LHoGxIB12R6tzUplBVYAUYkwAsQIZ5xDBUvmmS4DAGwjjEQpnkMCAIgVhJEoxHNIAACxhAtYowzPIQEAxBrCSBThOSQAgFgUVhipqalRRkaGXC6XcnNz1djYeMn+AwMD2r59u9LT05WYmKglS5Zo3759YRU8k/EcEgBALLJ9zUhdXZ3KyspUU1Oj1atX67nnntP69evV1tama665Zsx9br/9dn344Yfau3ev/uiP/kjd3d26cOHCFy5+puE5JACAWGQ7jOzcuVNbtmzR1q1bJUm7du3SG2+8odraWlVXV4f0b2ho0NGjR3XmzBklJ19cknrx4sVfrOoZiueQAABika3TNIODg2ppaVFRUVFQe1FRkZqamsbc56c//any8vL0ve99T1dffbWuu+46Pfzww/r9738/7ucMDAwoEAgEbeA5JACA2GQrjPT09GhoaEgpKSlB7SkpKerq6hpznzNnzuj48eM6ffq0XnvtNe3atUuvvPKK7rvvvnE/p7q6Wm63e3RLS0uzU2bM4jkkAIBYFNYFrA5H8MHOsqyQthHDw8NyOBw6dOiQ8vPzdeutt2rnzp3av3//uLMjFRUV8vv9o1tnZ2c4ZcYknkMCAIg1tq4ZmT9/vpxOZ8gsSHd3d8hsyQiPx6Orr75abrd7tC0zM1OWZens2bNaunRpyD6JiYlKTEy0U9qMwnNIAACxxNbMSEJCgnJzc+X1eoPavV6vCgsLx9xn9erV+uCDD/TRRx+Ntr377ruKi4vTokWLwigZ0v8+h+QbX75aBUvmEUQAAFHL9mma8vJy/ehHP9K+ffv0zjvv6KGHHlJHR4dKS0slXTzFsmnTptH+d9xxh+bNm6e7775bbW1tOnbsmB555BH97d/+rWbPnj153wQAAEQl27f2lpSUqLe3Vzt27JDP51N2drbq6+uVnp4uSfL5fOro6Bjt/wd/8Afyer369re/rby8PM2bN0+33367nnzyycn7FgAAIGo5LMua9muHBwIBud1u+f1+JSUlmS4HAABMwESP3zybBgAAGEUYAQAARhFGAACAUYQRAABgFGEEAAAYRRgBAABGEUYAAIBRhBEAAGAUYQQAABhFGAEAAEYRRgAAgFGEEQAAYBRhBAAAGEUYAQAARhFGAACAUYQRAABgFGEEAAAYRRgBAABGEUYAAIBRhBEAAGAUYQQAABhFGAEAAEYRRgAAgFGEEQAAYBRhBAAAGEUYAQAARhFGAACAUYQRAABgFGEEAAAYRRgBAABGEUYAAIBRYYWRmpoaZWRkyOVyKTc3V42NjeP2/cUvfiGHwxGy/ed//mfYRQMAgNhhO4zU1dWprKxM27dvV2trq9asWaP169ero6Pjkvv9+te/ls/nG92WLl0adtEAACB22A4jO3fu1JYtW7R161ZlZmZq165dSktLU21t7SX3W7BggVJTU0c3p9MZdtEAACB22Aojg4ODamlpUVFRUVB7UVGRmpqaLrnvV77yFXk8Ht100036+c9/fsm+AwMDCgQCQRsAAIhNtsJIT0+PhoaGlJKSEtSekpKirq6uMffxeDzas2ePDh8+rFdffVXLli3TTTfdpGPHjo37OdXV1XK73aNbWlqanTIBAEAUiQ9nJ4fDEfTasqyQthHLli3TsmXLRl8XFBSos7NTP/jBD/Snf/qnY+5TUVGh8vLy0deBQIBAAgBAjLI1MzJ//nw5nc6QWZDu7u6Q2ZJLWbVqld57771x/56YmKikpKSgDQAAxCZbYSQhIUG5ubnyer1B7V6vV4WFhRN+n9bWVnk8HjsfDQAAYpTt0zTl5eXauHGj8vLyVFBQoD179qijo0OlpaWSLp5iOXfunA4cOCBJ2rVrlxYvXqwVK1ZocHBQBw8e1OHDh3X48OHJ/SYAACAq2Q4jJSUl6u3t1Y4dO+Tz+ZSdna36+nqlp6dLknw+X9CaI4ODg3r44Yd17tw5zZ49WytWrNC//uu/6tZbb528bwEAAKKWw7Isy3QRlxMIBOR2u+X3+7l+BACAKDHR4zfPpgEAAEYRRgAAgFGEEQAAYBRhBAAAGEUYAQAARhFGAACAUYQRAABgFGEEAAAYRRgBAABGEUYAAIBRhBEAAGAUYQQAABhFGAEAAEYRRgAAgFGEEQAAYBRhBAAAGEUYAQAARhFGAACAUYQRAABgFGEEAAAYFW+6ACAaDQ1bam4/r+6+fi2Y61J+RrKccQ7TZQFAVCKMADY1nPap6kibfP7+0TaP26XK4iyty/YYrAwAohOnaQAbGk77tO3gyaAgIkld/n5tO3hSDad9hioDgOhFGAEmaGjYUtWRNllj/G2krepIm4aGx+oBABgPYQSYoOb28yEzIp9lSfL5+9Xcfj5yRQFADCCMABPU3Td+EAmnHwDgIsIIMEEL5romtR8A4CLCCDBB+RnJ8rhdGu8GXocu3lWTn5EcybIAIOoRRoAJcsY5VFmcJUkhgWTkdWVxFuuNAIBNhBHAhnXZHtXemaNUd/CpmFS3S7V35rDOCACEgUXPAJvWZXu0NiuVFVgBYJIQRoAwOOMcKlgyz3QZABATwjpNU1NTo4yMDLlcLuXm5qqxsXFC+7311luKj4/Xl7/85XA+FgAAxCDbYaSurk5lZWXavn27WltbtWbNGq1fv14dHR2X3M/v92vTpk266aabwi4WAADEHodlWbbWrl65cqVycnJUW1s72paZmakNGzaourp63P3+6q/+SkuXLpXT6dTrr7+uU6dOjdt3YGBAAwMDo68DgYDS0tLk9/uVlJRkp1wAAGBIIBCQ2+2+7PHb1szI4OCgWlpaVFRUFNReVFSkpqamcfd74YUX9P7776uysnJCn1NdXS232z26paWl2SkTAABEEVthpKenR0NDQ0pJSQlqT0lJUVdX15j7vPfee3r00Ud16NAhxcdP7HrZiooK+f3+0a2zs9NOmQAAIIqEdTeNwxF8C6NlWSFtkjQ0NKQ77rhDVVVVuu666yb8/omJiUpMTAynNAAAEGVshZH58+fL6XSGzIJ0d3eHzJZIUl9fn06cOKHW1lbdf//9kqTh4WFZlqX4+Hi9+eabuvHGG79A+QAAINrZOk2TkJCg3Nxceb3eoHav16vCwsKQ/klJSfrlL3+pU6dOjW6lpaVatmyZTp06pZUrV36x6gEAQNSzfZqmvLxcGzduVF5engoKCrRnzx51dHSotLRU0sXrPc6dO6cDBw4oLi5O2dnZQfsvWLBALpcrpB0AAMxMtsNISUmJent7tWPHDvl8PmVnZ6u+vl7p6emSJJ/Pd9k1RwAAAEbYXmfEhInepwwAAKaPKVlnBAAAYLIRRgAAgFGEEQAAYBRhBAAAGEUYAQAARhFGAACAUYQRAABgFGEEAAAYRRgBAABGEUYAAIBRhBEAAGAUYQQAABhFGAEAAEYRRgAAgFGEEQAAYBRhBAAAGEUYAQAARhFGAACAUYQRAABgFGEEAAAYRRgBAABGEUYAAIBRhBEAAGAUYQQAABgVb7oAYMTQsKXm9vPq7uvXgrku5WckyxnnMF0WAGCKEUYwLTSc9qnqSJt8/v7RNo/bpcriLK3L9hisDAAw1ThNA+MaTvu07eDJoCAiSV3+fm07eFINp32GKgMARAJhBEYNDVuqOtIma4y/jbRVHWnT0PBYPQAAsYAwAqOa28+HzIh8liXJ5+9Xc/v5yBUFAIgowgiM6u4bP4iE0w8AEH0IIzBqwVzXpPYDAESfsMJITU2NMjIy5HK5lJubq8bGxnH7Hj9+XKtXr9a8efM0e/ZsLV++XP/4j/8YdsGILfkZyfK4XRrvBl6HLt5Vk5+RHMmyAAARZDuM1NXVqaysTNu3b1dra6vWrFmj9evXq6OjY8z+c+bM0f33369jx47pnXfe0WOPPabHHntMe/bs+cLFI/o54xyqLM6SpJBAMvK6sjiL9UYAIIY5LMuydZvCypUrlZOTo9ra2tG2zMxMbdiwQdXV1RN6j29+85uaM2eO/uVf/mVC/QOBgNxut/x+v5KSkuyUiyjBOiMAEHsmevy2tejZ4OCgWlpa9Oijjwa1FxUVqampaULv0draqqamJj355JPj9hkYGNDAwMDo60AgYKdMRKF12R6tzUplBVYAmIFshZGenh4NDQ0pJSUlqD0lJUVdXV2X3HfRokX67//+b124cEFPPPGEtm7dOm7f6upqVVVV2SkNMcAZ51DBknmmywAARFhYF7A6HMG/Vi3LCmn7vMbGRp04cULPPvusdu3apZdeemncvhUVFfL7/aNbZ2dnOGUCAIAoYGtmZP78+XI6nSGzIN3d3SGzJZ+XkZEhSfrjP/5jffjhh3riiSf013/912P2TUxMVGJiop3SAABAlLI1M5KQkKDc3Fx5vd6gdq/Xq8LCwgm/j2VZQdeEAACAmcv2U3vLy8u1ceNG5eXlqaCgQHv27FFHR4dKS0slXTzFcu7cOR04cECStHv3bl1zzTVavny5pIvrjvzgBz/Qt7/97Un8GgAAIFrZDiMlJSXq7e3Vjh075PP5lJ2drfr6eqWnp0uSfD5f0Jojw8PDqqioUHt7u+Lj47VkyRL9wz/8g+69997J+xYAACBq2V5nxATWGQEAIPpM9PjNs2kAAIBRhBEAAGAUYQQAABhFGAEAAEYRRgAAgFGEEQAAYBRhBAAAGEUYAQAARhFGAACAUYQRAABgFGEEAAAYRRgBAABG2X5qb6wYGrbU3H5e3X39WjDXpfyMZDnjHKbLAgBgxpmRYaThtE9VR9rk8/ePtnncLlUWZ2ldtsdgZQAAzDwz7jRNw2mfth08GRREJKnL369tB0+q4bTPUGUAAMxMMyqMDA1bqjrSJmuMv420VR1p09DwWD0AAMBUmFFhpLn9fMiMyGdZknz+fjW3n49cUQAAzHAzKox0940fRMLpBwAAvrgZFUYWzHVNaj8AAPDFzagwkp+RLI/bpfFu4HXo4l01+RnJkSwLAIAZbUaFEWecQ5XFWZIUEkhGXlcWZ7HeCAAAETSjwogkrcv2qPbOHKW6g0/FpLpdqr0zh3VGAACIsBm56Nm6bI/WZqWyAisAANPAjAwj0sVTNgVL5pkuAwCAGW/GnaYBAADTC2EEAAAYRRgBAABGEUYAAIBRhBEAAGAUYQQAABhFGAEAAEYRRgAAgFFhhZGamhplZGTI5XIpNzdXjY2N4/Z99dVXtXbtWl111VVKSkpSQUGB3njjjbALBgAAscV2GKmrq1NZWZm2b9+u1tZWrVmzRuvXr1dHR8eY/Y8dO6a1a9eqvr5eLS0tuuGGG1RcXKzW1tYvXDwAAIh+DsuyLDs7rFy5Ujk5OaqtrR1ty8zM1IYNG1RdXT2h91ixYoVKSkr0+OOPj/n3gYEBDQwMjL4OBAJKS0uT3+9XUlKSnXIBAIAhgUBAbrf7ssdvWzMjg4ODamlpUVFRUVB7UVGRmpqaJvQew8PD6uvrU3Jy8rh9qqur5Xa7R7e0tDQ7ZQIAgChiK4z09PRoaGhIKSkpQe0pKSnq6uqa0Hs89dRT+vjjj3X77beP26eiokJ+v3906+zstFMmAACIImE9tdfhcAS9tiwrpG0sL730kp544gn95Cc/0YIFC8btl5iYqMTExHBKAwAAUcZWGJk/f76cTmfILEh3d3fIbMnn1dXVacuWLXr55Zd18803268UAADEJFunaRISEpSbmyuv1xvU7vV6VVhYOO5+L730kjZv3qwXX3xRt912W3iVAgCAmGT7NE15ebk2btyovLw8FRQUaM+ePero6FBpaamki9d7nDt3TgcOHJB0MYhs2rRJTz/9tFatWjU6qzJ79my53e5J/CoAACAa2Q4jJSUl6u3t1Y4dO+Tz+ZSdna36+nqlp6dLknw+X9CaI88995wuXLig++67T/fdd99o+1133aX9+/d/8W8AAAAkSUPDlprbz6u7r18L5rqUn5EsZ9zlr+k0zfY6IyZM9D5lAABmqobTPlUdaZPP3z/a5nG7VFmcpXXZHiM1Tck6IwAAYPppOO3TtoMng4KIJHX5+7Xt4Ek1nPYZqmxiCCMAAESxoWFLVUfaNNZpjpG2qiNtGhqevidCCCMAAESx5vbzITMin2VJ8vn71dx+PnJF2UQYAQAginX3jR9EwulnAmEEAIAotmCua1L7mUAYAQAgiuVnJMvjdmm8G3gdunhXTX7G+A+oNY0wAgBAFHPGOVRZnCVJIYFk5HVlcda0Xm+EMAIAQJRbl+1R7Z05SnUHn4pJdbtUe2eOsXVGJiqsp/YCAIDpZV22R2uzUqNyBVbCCAAAMcIZ51DBknmmy7CN0zQAAMAowggAADCKMAIAAIwijAAAAKMIIwAAwCjCCAAAMIowAgAAjCKMAAAAowgjAADAqKhYgdWyLElSIBAwXAkAAJiokeP2yHF8PFERRvr6+iRJaWlphisBAAB29fX1ye12j/t3h3W5uDINDA8P64MPPtDcuXPlcIT3wJ9AIKC0tDR1dnYqKSlpkivEZzHWkcV4Rw5jHTmMdWRN1XhblqW+vj4tXLhQcXHjXxkSFTMjcXFxWrRo0aS8V1JSEv+wI4SxjizGO3IY68hhrCNrKsb7UjMiI7iAFQAAGEUYAQAARs2YMJKYmKjKykolJiaaLiXmMdaRxXhHDmMdOYx1ZJke76i4gBUAAMSuGTMzAgAApifCCAAAMIowAgAAjCKMAAAAowgjAADAqJgKIzU1NcrIyJDL5VJubq4aGxsv2f/o0aPKzc2Vy+XStddeq2effTZClUY/O2P96quvau3atbrqqquUlJSkgoICvfHGGxGsNrrZ/Xc94q233lJ8fLy+/OUvT22BMcbueA8MDGj79u1KT09XYmKilixZon379kWo2uhmd6wPHTqk66+/XldccYU8Ho/uvvtu9fb2Rqja6HXs2DEVFxdr4cKFcjgcev311y+7T8SPj1aM+PGPf2zNmjXLev755622tjbrwQcftObMmWP99re/HbP/mTNnrCuuuMJ68MEHrba2Nuv555+3Zs2aZb3yyisRrjz62B3rBx980Prud79rNTc3W++++65VUVFhzZo1yzp58mSEK48+dsd6xO9+9zvr2muvtYqKiqzrr78+MsXGgHDG++tf/7q1cuVKy+v1Wu3t7da///u/W2+99VYEq45Odse6sbHRiouLs55++mnrzJkzVmNjo7VixQprw4YNEa48+tTX11vbt2+3Dh8+bEmyXnvttUv2N3F8jJkwkp+fb5WWlga1LV++3Hr00UfH7P+d73zHWr58eVDbvffea61atWrKaowVdsd6LFlZWVZVVdVklxZzwh3rkpIS67HHHrMqKysJIzbYHe+f/exnltvttnp7eyNRXkyxO9bf//73rWuvvTao7ZlnnrEWLVo0ZTXGoomEERPHx5g4TTM4OKiWlhYVFRUFtRcVFampqWnMfd5+++2Q/rfccotOnDihTz/9dMpqjXbhjPXnDQ8Pq6+vT8nJyVNRYswId6xfeOEFvf/++6qsrJzqEmNKOOP905/+VHl5efre976nq6++Wtddd50efvhh/f73v49EyVErnLEuLCzU2bNnVV9fL8uy9OGHH+qVV17RbbfdFomSZxQTx8eoeGrv5fT09GhoaEgpKSlB7SkpKerq6hpzn66urjH7X7hwQT09PfJ4PFNWbzQLZ6w/76mnntLHH3+s22+/fSpKjBnhjPV7772nRx99VI2NjYqPj4n/vCMmnPE+c+aMjh8/LpfLpddee009PT361re+pfPnz3PdyCWEM9aFhYU6dOiQSkpK1N/frwsXLujrX/+6fvjDH0ai5BnFxPExJmZGRjgcjqDXlmWFtF2u/1jtCGV3rEe89NJLeuKJJ1RXV6cFCxZMVXkxZaJjPTQ0pDvuuENVVVW67rrrIlVezLHzb3t4eFgOh0OHDh1Sfn6+br31Vu3cuVP79+9ndmQC7Ix1W1ubHnjgAT3++ONqaWlRQ0OD2tvbVVpaGolSZ5xIHx9j4qfT/Pnz5XQ6QxJ1d3d3SLobkZqaOmb/+Ph4zZs3b8pqjXbhjPWIuro6bdmyRS+//LJuvvnmqSwzJtgd676+Pp04cUKtra26//77JV08WFqWpfj4eL355pu68cYbI1J7NArn37bH49HVV18tt9s92paZmSnLsnT27FktXbp0SmuOVuGMdXV1tVavXq1HHnlEkvSlL31Jc+bM0Zo1a/Tkk08ymz2JTBwfY2JmJCEhQbm5ufJ6vUHtXq9XhYWFY+5TUFAQ0v/NN99UXl6eZs2aNWW1Rrtwxlq6OCOyefNmvfjii5zjnSC7Y52UlKRf/vKXOnXq1OhWWlqqZcuW6dSpU1q5cmWkSo9K4fzbXr16tT744AN99NFHo23vvvuu4uLitGjRoimtN5qFM9affPKJ4uKCD1lOp1PS//5qx+QwcnycsktjI2zkNrG9e/dabW1tVllZmTVnzhzrN7/5jWVZlvXoo49aGzduHO0/cuvSQw89ZLW1tVl79+7l1t4JsjvWL774ohUfH2/t3r3b8vl8o9vvfvc7U18hatgd68/jbhp77I53X1+ftWjRIusv/uIvrF/96lfW0aNHraVLl1pbt2419RWiht2xfuGFF6z4+HirpqbGev/9963jx49beXl5Vn5+vqmvEDX6+vqs1tZWq7W11ZJk7dy502ptbR29jXo6HB9jJoxYlmXt3r3bSk9PtxISEqycnBzr6NGjo3+76667rK997WtB/X/xi19YX/nKV6yEhARr8eLFVm1tbYQrjl52xvprX/uaJSlku+uuuyJfeBSy++/6swgj9tkd73feece6+eabrdmzZ1uLFi2yysvLrU8++STCVUcnu2P9zDPPWFlZWdbs2bMtj8dj/c3f/I119uzZCFcdfX7+859f8v/B0+H46LAs5rcAAIA5MXHNCAAAiF6EEQAAYBRhBAAAGEUYAQAARhFGAACAUYQRAABgFGEEAAAYRRgBAABGEUYAAIBRhBEAAGAUYQQAABj1/wC1Y7WUuFmB1QAAAABJRU5ErkJggg==", "text/plain": [ "
" ] @@ -1581,7 +1583,7 @@ "\n" ], "text/plain": [ - "" + "" ] }, "execution_count": 41, @@ -1618,7 +1620,7 @@ { "data": { "application/vnd.jupyter.widget-view+json": { - "model_id": "4fef19c0ced14384945efb5eaa3a64a5", + "model_id": "65f5f31b56ce41c6bb1e992beaf73b8f", "version_major": 2, "version_minor": 0 }, @@ -1645,7 +1647,7 @@ { "data": { "text/plain": [ - "" + "" ] }, "execution_count": 42, @@ -1905,7 +1907,7 @@ "\n" ], "text/plain": [ - "" + "" ] }, "execution_count": 43, @@ -2155,357 +2157,357 @@ "clusterphase_preference\n", "\n", "phase_preference: Workflow\n", + "\n", + "clusterphase_preferencecompare\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "compare: Subtract\n", + "\n", + "\n", + "clusterphase_preferencecompareInputs\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "Inputs\n", + "\n", + "\n", + "clusterphase_preferencecompareOutputs\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "Outputs\n", + "\n", "\n", "clusterphase_preferenceInputs\n", "\n", - "\n", + "\n", "\n", "\n", "\n", "\n", - "\n", + "\n", "Inputs\n", "\n", "\n", "clusterphase_preferenceOutputs\n", "\n", - "\n", + "\n", "\n", "\n", "\n", "\n", - "\n", + "\n", "Outputs\n", "\n", "\n", "clusterphase_preferenceelement\n", "\n", - "\n", + "\n", "\n", "\n", "\n", "\n", - "\n", + "\n", "element: UserInput\n", "\n", "\n", "clusterphase_preferenceelementInputs\n", "\n", - "\n", + "\n", "\n", "\n", "\n", "\n", - "\n", + "\n", "Inputs\n", "\n", "\n", "clusterphase_preferenceelementOutputs\n", "\n", - "\n", + "\n", "\n", "\n", "\n", "\n", - "\n", + "\n", "Outputs\n", "\n", "\n", "clusterphase_preferencemin_phase1\n", "\n", - "\n", + "\n", "\n", "\n", "\n", "\n", - "\n", + "\n", "min_phase1: LammpsMinimize\n", "\n", "\n", "clusterphase_preferencemin_phase1Inputs\n", "\n", - "\n", + "\n", "\n", "\n", "\n", "\n", - "\n", + "\n", "Inputs\n", "\n", "\n", "clusterphase_preferencemin_phase1Outputs\n", "\n", - "\n", + "\n", "\n", "\n", "\n", "\n", - "\n", + "\n", "Outputs\n", "\n", "\n", "clusterphase_preferencemin_phase2\n", "\n", - "\n", + "\n", "\n", "\n", "\n", "\n", - "\n", + "\n", "min_phase2: LammpsMinimize\n", "\n", "\n", "clusterphase_preferencemin_phase2Inputs\n", "\n", - "\n", + "\n", "\n", "\n", "\n", "\n", - "\n", + "\n", "Inputs\n", "\n", "\n", "clusterphase_preferencemin_phase2Outputs\n", "\n", - "\n", + "\n", "\n", "\n", "\n", "\n", - "\n", + "\n", "Outputs\n", "\n", "\n", "clusterphase_preferencee1\n", "\n", - "\n", + "\n", "\n", "\n", "\n", "\n", - "\n", + "\n", "e1: GetItem\n", "\n", "\n", "clusterphase_preferencee1Inputs\n", "\n", - "\n", + "\n", "\n", "\n", "\n", "\n", - "\n", + "\n", "Inputs\n", "\n", "\n", "clusterphase_preferencee1Outputs\n", "\n", - "\n", + "\n", "\n", "\n", "\n", "\n", - "\n", + "\n", "Outputs\n", "\n", "\n", "clusterphase_preferencen1\n", "\n", - "\n", + "\n", "\n", "\n", "\n", "\n", - "\n", + "\n", "n1: Length\n", "\n", "\n", "clusterphase_preferencen1Inputs\n", "\n", - "\n", + "\n", "\n", "\n", "\n", "\n", - "\n", + "\n", "Inputs\n", "\n", "\n", "clusterphase_preferencen1Outputs\n", "\n", - "\n", + "\n", "\n", "\n", "\n", "\n", - "\n", + "\n", "Outputs\n", "\n", "\n", "clusterphase_preferencee2\n", "\n", - "\n", + "\n", "\n", "\n", "\n", "\n", - "\n", + "\n", "e2: GetItem\n", "\n", "\n", "clusterphase_preferencee2Inputs\n", "\n", - "\n", + "\n", "\n", "\n", "\n", "\n", - "\n", + "\n", "Inputs\n", "\n", "\n", "clusterphase_preferencee2Outputs\n", "\n", - "\n", + "\n", "\n", "\n", "\n", "\n", - "\n", + "\n", "Outputs\n", "\n", "\n", "clusterphase_preferencen2\n", "\n", - "\n", + "\n", "\n", "\n", "\n", "\n", - "\n", + "\n", "n2: Length\n", "\n", "\n", "clusterphase_preferencen2Inputs\n", "\n", - "\n", + "\n", "\n", "\n", "\n", "\n", - "\n", + "\n", "Inputs\n", "\n", "\n", "clusterphase_preferencen2Outputs\n", "\n", - "\n", + "\n", "\n", "\n", "\n", "\n", - "\n", + "\n", "Outputs\n", "\n", "\n", "clusterphase_preferencee2__getitem_Divide_n2__len\n", "\n", - "\n", + "\n", "\n", "\n", "\n", "\n", - "\n", + "\n", "e2__getitem_Divide_n2__len: Divide\n", "\n", "\n", "clusterphase_preferencee2__getitem_Divide_n2__lenInputs\n", "\n", - "\n", + "\n", "\n", "\n", "\n", "\n", - "\n", + "\n", "Inputs\n", "\n", "\n", "clusterphase_preferencee2__getitem_Divide_n2__lenOutputs\n", "\n", - "\n", + "\n", "\n", "\n", "\n", "\n", - "\n", + "\n", "Outputs\n", "\n", "\n", "clusterphase_preferencee1__getitem_Divide_n1__len\n", "\n", - "\n", + "\n", "\n", "\n", "\n", "\n", - "\n", + "\n", "e1__getitem_Divide_n1__len: Divide\n", "\n", - "\n", - "clusterphase_preferencee1__getitem_Divide_n1__lenInputs\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "Inputs\n", - "\n", "\n", "clusterphase_preferencee1__getitem_Divide_n1__lenOutputs\n", "\n", - "\n", + "\n", "\n", "\n", "\n", "\n", - "\n", + "\n", "Outputs\n", "\n", - "\n", - "clusterphase_preferencecompare\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "compare: Subtract\n", - "\n", - "\n", - "clusterphase_preferencecompareInputs\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "Inputs\n", - "\n", - "\n", - "clusterphase_preferencecompareOutputs\n", + "\n", + "clusterphase_preferencee1__getitem_Divide_n1__lenInputs\n", "\n", - "\n", + "\n", "\n", "\n", "\n", "\n", - "\n", - "Outputs\n", + "\n", + "Inputs\n", "\n", "\n", "\n", @@ -3094,7 +3096,7 @@ "\n" ], "text/plain": [ - "" + "" ] }, "execution_count": 49, @@ -3377,7 +3379,7 @@ "output_type": "stream", "text": [ "None 1\n", - " NOT_DATA\n" + " NOT_DATA\n" ] } ], @@ -3459,7 +3461,7 @@ "output_type": "stream", "text": [ "None 1\n", - " NOT_DATA\n", + " NOT_DATA\n", "Finally 5\n", "b (Add) output single-value: 6\n" ] @@ -3521,7 +3523,7 @@ "name": "stdout", "output_type": "stream", "text": [ - "6.011735447998944\n" + "6.006511921004858\n" ] } ], @@ -3553,7 +3555,7 @@ "name": "stdout", "output_type": "stream", "text": [ - "2.7731033529998967\n" + "2.4266665390023263\n" ] } ], @@ -3619,17 +3621,27 @@ "source": [ "## Saving and loading\n", "\n", - "Graphs can be saved and loaded on request -- either by manually invoking the `.save()` method, or by setting the `save_after_run` attribute to `True` (on the object or at instantiation by kwarg). This creates a save file (currently using HDF5 as a backend) in the parent-most node's working directory.\n", + "Graphs can be saved and loaded on request -- either by manually invoking the `.save()` method, or by setting the `save_after_run` attribute to `True` (on the object or at instantiation by kwarg). This creates a save file (currently using HDF5 as a format) in the parent-most node's working directory.\n", "\n", "Subsequently instantiating a node with the same name in the same place will attempt to reload the saved graph automatically. \n", "\n", - "Since data IO is also saved, all IO must be serializable. As a fallback, we attempt to store data IO values using pickle, so any pickle-able object should be fine.\n", + "Since data IO is also saved, all IO must be serializable. As a fallback, we attempt to store data IO values using pickle, so most pickle-able objects will work.\n", + "\n", + "You can look at the `Node` class docstring for a full and detailed list of all storage features as they currently are in this alpha feature, but here are a few key caveats:\n", + "- The _entire graph_ is always saved, regardless of which node is calling `save()`\n", + "- Similarly, you can only reload the entire graph at once\n", + "- Also related, there is currently zero filtering of which data, or to which depth the graph gets stored -- it's all or nothing\n", + "- If the source code for nodes gets modified between saving and loading, weird stuff is likely to happen, and some of it may happen silently.\n", "\n", - "As an alpha feature, this functionality comes with the following caveats:\n", - "- Before saving, any child nodes in the graph must have been created via the `Workflow.create` creator -- right now that means they must live in their own `.py` file which has been subject to `.register(...)`, so not a terribly high bar, but it means any nodes defined in-notebook need to be moved over to a file in the python path.\n", - "- It is not yet possible to save just one node in a composite graph, the entire graph gets saved at once regardless of which node calls `.save()`. This may negatively impact performance depending on how much data is being saved (size of graph/size of data IO objects), and how frequently saving is done.\n", - "- There are no safety rails to check whether node source code has changed between save and load time; i.e. if the code defining a particular node has changed, it may crash on load or the saved data may silently fail to reflect the new behaviour of the node.\n", - " - Related, if a `Macro` instance is modified (e.g. by changing internal connections, or `.replace`-ing child nodes, this will be lost on load; the loaded macro will silently re-instantiate its _original_ children and connections reflected in it's `graph_creator`." + "Lastly, we currently use two backends: `tinybase.storage.H5ioStorage` and `h5io` directly. They have slightly different strengths:\n", + "- `\"h5io\"` (the default) \n", + " - Will let you save and load any nodes that defined by subclassing (this includes all nodes defined using the decorators)\n", + " - Will preserve changes to a macro (replace/add/remove/rewire)\n", + " - Has trouble with some data\n", + "- `\"tinybase\"`\n", + " - Requires all nodes to have been instantiated with the creator (`wf.create...`; this means moving node definitions to a `.py` file in your pythonpath and registering it as a node package -- not particularly difficult!)\n", + " - _Ignores_ changes to a macro (will crash nicely if the macro IO changed)\n", + " - Falls back to `pickle` for data failures, so can handle a wider variety of data IO objects" ] }, { @@ -3669,15 +3681,7 @@ "name": "stderr", "output_type": "stream", "text": [ - "/Users/huber/work/pyiron/pyiron_workflow/pyiron_workflow/node.py:332: UserWarning: A saved file was found for the node save_demo -- attempting to load it...(To delete the saved file instead, use `overwrite_save=True`)\n", - " warnings.warn(\n", - "/Users/huber/work/pyiron/pyiron_workflow/pyiron_workflow/node.py:332: UserWarning: A saved file was found for the node inp -- attempting to load it...(To delete the saved file instead, use `overwrite_save=True`)\n", - " warnings.warn(\n", - "/Users/huber/work/pyiron/pyiron_workflow/pyiron_workflow/node.py:332: UserWarning: A saved file was found for the node middle -- attempting to load it...(To delete the saved file instead, use `overwrite_save=True`)\n", - " warnings.warn(\n", - "/Users/huber/work/pyiron/pyiron_workflow/pyiron_workflow/node.py:332: UserWarning: A saved file was found for the node end -- attempting to load it...(To delete the saved file instead, use `overwrite_save=True`)\n", - " warnings.warn(\n", - "/Users/huber/work/pyiron/pyiron_workflow/pyiron_workflow/node.py:332: UserWarning: A saved file was found for the node out -- attempting to load it...(To delete the saved file instead, use `overwrite_save=True`)\n", + "/Users/huber/work/pyiron/pyiron_workflow/pyiron_workflow/node.py:356: UserWarning: A saved file was found for the node save_demo -- attempting to load it...(To delete the saved file instead, use `overwrite_save=True`)\n", " warnings.warn(\n" ] }, @@ -3714,7 +3718,7 @@ "metadata": {}, "outputs": [], "source": [ - "reloaded.delete_storage()" + "reloaded.storage.delete()" ] }, { @@ -3882,9 +3886,8 @@ "name": "stdout", "output_type": "stream", "text": [ - "0.785 > 0.2\n", - "0.073 <= 0.2\n", - "Finally 0.073\n" + "0.118 <= 0.2\n", + "Finally 0.118\n" ] } ], From 4199a33e97552abed0a38ff82dc91e513f3b7c06 Mon Sep 17 00:00:00 2001 From: pyiron-runner Date: Mon, 29 Jan 2024 22:42:26 +0000 Subject: [PATCH 104/166] Format black --- pyiron_workflow/storage.py | 10 +++------- pyiron_workflow/workflow.py | 4 +++- 2 files changed, 6 insertions(+), 8 deletions(-) diff --git a/pyiron_workflow/storage.py b/pyiron_workflow/storage.py index 21cc4805..94937e14 100644 --- a/pyiron_workflow/storage.py +++ b/pyiron_workflow/storage.py @@ -63,8 +63,7 @@ def load(self, backend: Literal["h5io", "tinybase"]): ) elif backend == "h5io": inst = h5io.read_hdf5( - fname=self._h5io_storage_file_path, - title=self.node.label + fname=self._h5io_storage_file_path, title=self.node.label ) self.node.__setstate__(inst.__getstate__()) elif backend == "tinybase": @@ -100,10 +99,7 @@ def delete(self): @property def _h5io_storage_file_path(self) -> str: return str( - ( - self.node.working_directory.path - / self._H5IO_STORAGE_FILE_NAME - ).resolve() + (self.node.working_directory.path / self._H5IO_STORAGE_FILE_NAME).resolve() ) @property @@ -126,7 +122,7 @@ def _tinybase_storage(self): return H5ioStorage( Pointer(self._tinybase_storage_file_path, h5_path=self.node.graph_path), - None + None, ) @property diff --git a/pyiron_workflow/workflow.py b/pyiron_workflow/workflow.py index 55014a5a..ffe6b64e 100644 --- a/pyiron_workflow/workflow.py +++ b/pyiron_workflow/workflow.py @@ -376,7 +376,9 @@ def _rebuild_execution_graph(self, storage): self.starting_nodes = [self.nodes[label] for label in storage["starting_nodes"]] def save(self, backend: Literal["h5io", "tinybase"] = "h5io"): - if backend == "tinybase" and any(node.package_identifier is None for node in self): + if backend == "tinybase" and any( + node.package_identifier is None for node in self + ): raise NotImplementedError( f"{self.__class__.__name__} can currently only save itself to file if " f"_all_ of its child nodes were created via the creator and have an " From e5311c1131593486e0496b34a59ada882bdb0ba1 Mon Sep 17 00:00:00 2001 From: pyiron-runner Date: Mon, 29 Jan 2024 22:46:02 +0000 Subject: [PATCH 105/166] Format black --- pyiron_workflow/__init__.py | 1 + pyiron_workflow/macro.py | 2 +- pyiron_workflow/node.py | 6 +++--- .../node_library/pyiron_atomistics.py | 21 ++++++++++++------- pyiron_workflow/snippets/testcase.py | 2 -- 5 files changed, 18 insertions(+), 14 deletions(-) diff --git a/pyiron_workflow/__init__.py b/pyiron_workflow/__init__.py index 4bdf2160..41231fb5 100644 --- a/pyiron_workflow/__init__.py +++ b/pyiron_workflow/__init__.py @@ -28,4 +28,5 @@ - Ontological hinting for data channels in order to provide guided workflow design - GUI on top for code-lite/code-free visual scripting """ + from pyiron_workflow.workflow import Workflow diff --git a/pyiron_workflow/macro.py b/pyiron_workflow/macro.py index c83a605d..697adcc2 100644 --- a/pyiron_workflow/macro.py +++ b/pyiron_workflow/macro.py @@ -488,7 +488,7 @@ def _parse_remotely_executed_self(self, other_self): for old_data, io_panel in zip( local_connection_data, - [self.inputs, self.outputs, self.signals.input, self.signals.output] + [self.inputs, self.outputs, self.signals.input, self.signals.output], # Get fresh copies of the IO panels post-update ): for original_channel, label, connections in old_data: diff --git a/pyiron_workflow/node.py b/pyiron_workflow/node.py index 521ea7a7..2f2698f1 100644 --- a/pyiron_workflow/node.py +++ b/pyiron_workflow/node.py @@ -503,9 +503,9 @@ def run( ) return self._run( - finished_callback=self._finish_run_and_emit_ran - if emit_ran_signal - else self._finish_run, + finished_callback=( + self._finish_run_and_emit_ran if emit_ran_signal else self._finish_run + ), force_local_execution=force_local_execution, ) diff --git a/pyiron_workflow/node_library/pyiron_atomistics.py b/pyiron_workflow/node_library/pyiron_atomistics.py index 24669e94..8b660268 100644 --- a/pyiron_workflow/node_library/pyiron_atomistics.py +++ b/pyiron_workflow/node_library/pyiron_atomistics.py @@ -1,6 +1,7 @@ """ Nodes wrapping a subset of pyiron_atomistics functionality """ + from __future__ import annotations from typing import Literal, Optional @@ -126,10 +127,12 @@ def CalcMd( n_ionic_steps: int = 1000, n_print: int = 100, temperature: int | float = 300.0, - pressure: float - | tuple[float, float, float] - | tuple[float, float, float, float, float, float] - | None = None, + pressure: ( + float + | tuple[float, float, float] + | tuple[float, float, float, float, float, float] + | None + ) = None, ): def calc_md(job, n_ionic_steps, n_print, temperature, pressure): job.calc_md( @@ -169,10 +172,12 @@ def CalcMin( job: AtomisticGenericJob, n_ionic_steps: int = 1000, n_print: int = 100, - pressure: float - | tuple[float, float, float] - | tuple[float, float, float, float, float, float] - | None = None, + pressure: ( + float + | tuple[float, float, float] + | tuple[float, float, float, float, float, float] + | None + ) = None, ): def calc_min(job, n_ionic_steps, n_print, pressure): job.calc_minimize( diff --git a/pyiron_workflow/snippets/testcase.py b/pyiron_workflow/snippets/testcase.py index 5d7c2b2a..5be6b89a 100644 --- a/pyiron_workflow/snippets/testcase.py +++ b/pyiron_workflow/snippets/testcase.py @@ -3,7 +3,6 @@ numpy arrays (if numpy is available). """ - from abc import ABC from contextlib import redirect_stdout import doctest @@ -30,7 +29,6 @@ class PyironTestCase(unittest.TestCase, ABC): - """ Base class for all pyiron unit tets. From d9cfcba8283b6e20f066a4d94f6c6ab5f66c1afe Mon Sep 17 00:00:00 2001 From: liamhuber Date: Mon, 29 Jan 2024 14:53:57 -0800 Subject: [PATCH 106/166] Fix expected package length Now that I added another node --- tests/unit/test_node_package.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/unit/test_node_package.py b/tests/unit/test_node_package.py index 0ef1b3f6..c74445fe 100644 --- a/tests/unit/test_node_package.py +++ b/tests/unit/test_node_package.py @@ -36,7 +36,7 @@ def test_nodes(self): def test_length(self): package = NodePackage("static.demo_nodes") - self.assertEqual(2, len(package)) + self.assertEqual(3, len(package)) if __name__ == '__main__': From 819a8d5f1bd7544ae19b7a413cb1397885a40dfe Mon Sep 17 00:00:00 2001 From: Liam Huber Date: Tue, 30 Jan 2024 07:58:11 -0800 Subject: [PATCH 107/166] Beautify _no_positional_args logic And make it more efficient with early stopping Co-authored-by: Sam Dareska <37879103+samwaseda@users.noreply.github.com> --- pyiron_workflow/channels.py | 16 +++++----------- 1 file changed, 5 insertions(+), 11 deletions(-) diff --git a/pyiron_workflow/channels.py b/pyiron_workflow/channels.py index b2cde355..b3acca88 100644 --- a/pyiron_workflow/channels.py +++ b/pyiron_workflow/channels.py @@ -804,17 +804,11 @@ def _takes_zero_arguments(self, callback): @staticmethod def _no_positional_args(func): - return ( - sum( - 1 - for parameter in inspect.signature(func).parameters.values() - if ( - parameter.default == inspect.Parameter.empty - and parameter.kind != inspect._ParameterKind.VAR_KEYWORD - ) - ) - == 0 - ) + return all([ + parameter.default != inspect.Parameter.empty + or parameter.kind == inspect.Parameter.VAR_KEYWORD + for parameter in inspect.signature(func).parameters.values() + ]) @property def callback(self) -> callable: From df0c6175f6426ea285c9b2022c124e552f065146 Mon Sep 17 00:00:00 2001 From: liamhuber Date: Tue, 30 Jan 2024 10:38:03 -0800 Subject: [PATCH 108/166] Clean up the save file even if the test fails --- tests/unit/test_workflow.py | 45 +++++++++++++++++++------------------ 1 file changed, 23 insertions(+), 22 deletions(-) diff --git a/tests/unit/test_workflow.py b/tests/unit/test_workflow.py index c2f58b8b..9db3d8e2 100644 --- a/tests/unit/test_workflow.py +++ b/tests/unit/test_workflow.py @@ -340,28 +340,29 @@ def test_storage_values(self): for storage_backend in ["h5io", "tinybase"]: with self.subTest(storage_backend): wf = Workflow("wf") - wf.register("static.demo_nodes", domain="demo") - wf.inp = wf.create.demo.AddThree(x=0) - wf.out = wf.inp.outputs.add_three + 1 - wf_out = wf() - three_result = wf.inp.three.outputs.add.value - - wf.save(backend=storage_backend) - - reloaded = Workflow("wf", storage_backend=storage_backend) - self.assertEqual( - wf_out.out__add, - reloaded.outputs.out__add.value, - msg="Workflow-level data should get reloaded" - ) - self.assertEqual( - three_result, - reloaded.inp.three.value, - msg="Child data arbitrarily deep should get reloaded" - ) - - # Clean up after ourselves - reloaded.storage.delete() + try: + wf.register("static.demo_nodes", domain="demo") + wf.inp = wf.create.demo.AddThree(x=0) + wf.out = wf.inp.outputs.add_three + 1 + wf_out = wf() + three_result = wf.inp.three.outputs.add.value + + wf.save(backend=storage_backend) + + reloaded = Workflow("wf", storage_backend=storage_backend) + self.assertEqual( + wf_out.out__add, + reloaded.outputs.out__add.value, + msg="Workflow-level data should get reloaded" + ) + self.assertEqual( + three_result, + reloaded.inp.three.value, + msg="Child data arbitrarily deep should get reloaded" + ) + finally: + # Clean up after ourselves + wf.storage.delete() def test_storage_scopes(self): wf = Workflow("wf") From 991765d8558ec56892160aed7aa45fc97ef4046a Mon Sep 17 00:00:00 2001 From: liamhuber Date: Tue, 30 Jan 2024 10:38:40 -0800 Subject: [PATCH 109/166] Flatten the state path so it matches the semantic path i.e. remove the "nodes" element between composites and their children --- pyiron_workflow/composite.py | 13 +++++++++++++ 1 file changed, 13 insertions(+) diff --git a/pyiron_workflow/composite.py b/pyiron_workflow/composite.py index 27313bc4..b66c96bf 100644 --- a/pyiron_workflow/composite.py +++ b/pyiron_workflow/composite.py @@ -646,6 +646,14 @@ def __getstate__(self): state["_outputs_map"] = ( None if self._outputs_map is None else dict(self._outputs_map) ) + + # Remove the nodes container from the state and store each element (node) right + # in the state -- the labels are guaranteed to not be attributes already so + # this is safe, and it makes sure that the storage path matches the graph path + del state["nodes"] + state["node_labels"] = list(self.nodes.keys()) + for node in self: + state[node.label] = node return state def __setstate__(self, state): @@ -659,6 +667,11 @@ def __setstate__(self, state): None if state["_outputs_map"] is None else bidict(state["_outputs_map"]) ) + # Reconstruct nodes from state + state["nodes"] = DotDict( + {label: state[label] for label in state.pop("node_labels")} + ) + super().__setstate__(state) # Nodes purge their _parent information in their __getstate__ From 8ff5181828aef49a7ca6a77bb84005dbbf61881e Mon Sep 17 00:00:00 2001 From: liamhuber Date: Tue, 30 Jan 2024 10:40:41 -0800 Subject: [PATCH 110/166] Add comments for the other state modifications For consistency with our new comments --- pyiron_workflow/composite.py | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/pyiron_workflow/composite.py b/pyiron_workflow/composite.py index b66c96bf..6a08bbee 100644 --- a/pyiron_workflow/composite.py +++ b/pyiron_workflow/composite.py @@ -637,9 +637,12 @@ def _child_signal_connections( def __getstate__(self): state = super().__getstate__() + # Store connections as strings state["_child_data_connections"] = self._child_data_connections state["_child_signal_connections"] = self._child_signal_connections - # Bidict implements a custom reconstructor that is not playing well with h5io + + # Transform the IO maps into a datatype that plays well with h5io + # (Bidict implements a custom reconstructor, which hurts us) state["_inputs_map"] = ( None if self._inputs_map is None else dict(self._inputs_map) ) @@ -660,6 +663,8 @@ def __setstate__(self, state): # Purge child connection info from the state child_data_connections = state.pop("_child_data_connections") child_signal_connections = state.pop("_child_signal_connections") + + # Transform the IO maps back into the right class (bidict) state["_inputs_map"] = ( None if state["_inputs_map"] is None else bidict(state["_inputs_map"]) ) From 636dcb07e658073666d62140ae3945c95c70ba34 Mon Sep 17 00:00:00 2001 From: pyiron-runner Date: Tue, 30 Jan 2024 20:15:32 +0000 Subject: [PATCH 111/166] Format black --- pyiron_workflow/channels.py | 12 +++++++----- 1 file changed, 7 insertions(+), 5 deletions(-) diff --git a/pyiron_workflow/channels.py b/pyiron_workflow/channels.py index fb75fc91..da735497 100644 --- a/pyiron_workflow/channels.py +++ b/pyiron_workflow/channels.py @@ -828,11 +828,13 @@ def _takes_zero_arguments(self, callback): @staticmethod def _no_positional_args(func): - return all([ - parameter.default != inspect.Parameter.empty - or parameter.kind == inspect.Parameter.VAR_KEYWORD - for parameter in inspect.signature(func).parameters.values() - ]) + return all( + [ + parameter.default != inspect.Parameter.empty + or parameter.kind == inspect.Parameter.VAR_KEYWORD + for parameter in inspect.signature(func).parameters.values() + ] + ) @property def callback(self) -> callable: From 26620b229a9b02751a2239175dfb15ac512f5313 Mon Sep 17 00:00:00 2001 From: liamhuber Date: Tue, 30 Jan 2024 12:16:59 -0800 Subject: [PATCH 112/166] Purge and restore starting nodes from the state To minimize the number of complex instance objects stored in the state --- pyiron_workflow/composite.py | 10 ++++++++++ 1 file changed, 10 insertions(+) diff --git a/pyiron_workflow/composite.py b/pyiron_workflow/composite.py index 6a08bbee..ee36ca2f 100644 --- a/pyiron_workflow/composite.py +++ b/pyiron_workflow/composite.py @@ -657,6 +657,11 @@ def __getstate__(self): state["node_labels"] = list(self.nodes.keys()) for node in self: state[node.label] = node + + # Also remove the starting node instances + del state["starting_nodes"] + state["starting_node_labels"] = [n.label for n in self.starting_nodes] + return state def __setstate__(self, state): @@ -677,6 +682,11 @@ def __setstate__(self, state): {label: state[label] for label in state.pop("node_labels")} ) + # Restore starting nodes + state["starting_nodes"] = [ + state[label] for label in state.pop("starting_node_labels") + ] + super().__setstate__(state) # Nodes purge their _parent information in their __getstate__ From ff65a32dbc02444c583ea9d9e603ab5bcb99df2c Mon Sep 17 00:00:00 2001 From: liamhuber Date: Wed, 31 Jan 2024 09:08:04 -0800 Subject: [PATCH 113/166] Add promise about child name namespace uniqueness --- pyiron_workflow/composite.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/pyiron_workflow/composite.py b/pyiron_workflow/composite.py index 6a08bbee..a024841d 100644 --- a/pyiron_workflow/composite.py +++ b/pyiron_workflow/composite.py @@ -50,6 +50,8 @@ class Composite(Node, ABC): - Have no other parent - Can be replaced in-place with another node that has commensurate IO - Have their working directory nested inside the composite's + - Are disallowed from having a label that conflicts with any of the parent's + other methods or attributes - The length of a composite instance is its number of child nodes - Running the composite... - Runs the child nodes (either using manually specified execution signals, or From 60de0b80fa7346b7debeb2de974ffda39d17bf99 Mon Sep 17 00:00:00 2001 From: liamhuber Date: Wed, 31 Jan 2024 09:09:29 -0800 Subject: [PATCH 114/166] Clarify availability for future devs --- pyiron_workflow/composite.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/pyiron_workflow/composite.py b/pyiron_workflow/composite.py index a024841d..16c76f85 100644 --- a/pyiron_workflow/composite.py +++ b/pyiron_workflow/composite.py @@ -659,6 +659,8 @@ def __getstate__(self): state["node_labels"] = list(self.nodes.keys()) for node in self: state[node.label] = node + # This key is guaranteed to be available in the state, since children are + # forbidden from having labels that clash with their parent's __dir__ return state def __setstate__(self, state): From 9e389cb10d32fd2f0e8d45f31485d79a82943a89 Mon Sep 17 00:00:00 2001 From: liamhuber Date: Wed, 31 Jan 2024 09:11:27 -0800 Subject: [PATCH 115/166] Guarantee storage key availability By refactoring the node labels to a property (which then shows up in __dir__) --- pyiron_workflow/composite.py | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/pyiron_workflow/composite.py b/pyiron_workflow/composite.py index 16c76f85..0748c432 100644 --- a/pyiron_workflow/composite.py +++ b/pyiron_workflow/composite.py @@ -637,6 +637,10 @@ def _child_signal_connections( ) -> list[tuple[tuple[str, str], tuple[str, str]]]: return self._get_connections_as_strings(self._get_signals_input) + @property + def node_labels(self) -> tuple[str]: + return (n.label for n in self) + def __getstate__(self): state = super().__getstate__() # Store connections as strings @@ -656,7 +660,7 @@ def __getstate__(self): # in the state -- the labels are guaranteed to not be attributes already so # this is safe, and it makes sure that the storage path matches the graph path del state["nodes"] - state["node_labels"] = list(self.nodes.keys()) + state["node_labels"] = self.node_labels for node in self: state[node.label] = node # This key is guaranteed to be available in the state, since children are From eb862b1741a6c5e9896d1b295644dcc3df3e9970 Mon Sep 17 00:00:00 2001 From: liamhuber Date: Wed, 31 Jan 2024 09:25:19 -0800 Subject: [PATCH 116/166] Guarantee storage key availability --- pyiron_workflow/composite.py | 10 ++++++++-- 1 file changed, 8 insertions(+), 2 deletions(-) diff --git a/pyiron_workflow/composite.py b/pyiron_workflow/composite.py index 6bbb732b..e21b8bed 100644 --- a/pyiron_workflow/composite.py +++ b/pyiron_workflow/composite.py @@ -641,6 +641,12 @@ def _child_signal_connections( def node_labels(self) -> tuple[str]: return (n.label for n in self) + @property + def _starting_node_labels(self): + # As a property so it appears in `__dir__` and thus is guaranteed to not + # conflict with a child node name in the state + return tuple(n.label for n in self.starting_nodes) + def __getstate__(self): state = super().__getstate__() # Store connections as strings @@ -668,7 +674,7 @@ def __getstate__(self): # Also remove the starting node instances del state["starting_nodes"] - state["starting_node_labels"] = [n.label for n in self.starting_nodes] + state["_starting_node_labels"] = self._starting_node_labels return state @@ -692,7 +698,7 @@ def __setstate__(self, state): # Restore starting nodes state["starting_nodes"] = [ - state[label] for label in state.pop("starting_node_labels") + state[label] for label in state.pop("_starting_node_labels") ] super().__setstate__(state) From 48141ac76cfe90f30e39289785821098f1d866e0 Mon Sep 17 00:00:00 2001 From: liamhuber Date: Wed, 31 Jan 2024 09:25:44 -0800 Subject: [PATCH 117/166] :bug: hotfix generator to tuple conversion --- pyiron_workflow/composite.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pyiron_workflow/composite.py b/pyiron_workflow/composite.py index e21b8bed..e63bc00b 100644 --- a/pyiron_workflow/composite.py +++ b/pyiron_workflow/composite.py @@ -639,7 +639,7 @@ def _child_signal_connections( @property def node_labels(self) -> tuple[str]: - return (n.label for n in self) + return tuple(n.label for n in self) @property def _starting_node_labels(self): From 21cd42e5f2a8198ca258e5811ec7a5d53fe3549e Mon Sep 17 00:00:00 2001 From: liamhuber Date: Wed, 31 Jan 2024 17:07:25 -0800 Subject: [PATCH 118/166] Add and test wrappers for sticking nodes in a pyiron job --- pyiron_workflow/job.py | 213 +++++++++++++++++++++++++++++++++++++++++ tests/unit/test_job.py | 195 +++++++++++++++++++++++++++++++++++++ 2 files changed, 408 insertions(+) create mode 100644 pyiron_workflow/job.py create mode 100644 tests/unit/test_job.py diff --git a/pyiron_workflow/job.py b/pyiron_workflow/job.py new file mode 100644 index 00000000..df1abfbd --- /dev/null +++ b/pyiron_workflow/job.py @@ -0,0 +1,213 @@ +""" +Wrapper for running a node as a pyiron base job. + +Two approaches are provided while we work out which is more convenient and how some +edge cases may be handled differently: +- A direct sub-class of :class:`TemplateJob`, created using the usual job creation. +- A helper method for using :meth:`Project.wrap_python_function`. + +The wrapper function appears to be slightly slower (presumably because of the extra +layer of serialization). + +The intent of this module is to provide immediate access to pyiron's queue submission +functionality, while in the long run this should be integrated more directly with the +workflows. E.g., this solution doesn't permit individual nodes in a workflow to be +submitted to the queue, but only standalone nodes/macros, or entire workflows. + +Parallel processing inside node job will not be possible until the node executor can +take values _other_ than an actual executor instance (which don't serialize). The +infrastructure is in place for this in :meth:`Node._parse_executor`, but it is not yet +leveraged. +""" + +from __future__ import annotations + +import os + +from pyiron_base import TemplateJob, JOB_CLASS_DICT +from pyiron_workflow.node import Node +from h5io._h5io import _import_class + +_WARNINGS_STRING = """ + Warnings: + The job can be run with `run_mode="non_modal"`, but _only_ if all the nodes + being run are defined in an importable file location -- i.e. copying and + pasting the example above into a jupyter notebook works fine in modal mode, but + will throw an exception if you try to run it non-modally. + + This hasn't been tested for running on a remote queue. It should work, but it's + _possible_ the same requirement from non-modal mode (importable nodes) will + apply. +""" + + +class NodeJob(TemplateJob): + __doc__ = """ + This job is an intermediate feature for accessing pyiron's queue submission + infrastructure for nodes (function nodes, macros, or entire workflows). + + It leans directly on the storage capabilities of the node itself, except for + the node class and name, and the storage backend mode, all of which are held in the + traditional job input. (Only the storage backend ever needs to be specified, the + node information gets populated automatically). + + The job provides direct access to its owned node (as both input and output) on the + :attr:`node` attribute. The only requirement is that the node have an untouched + working directory (so we can make sure its files get stored _inside_ the job's + directory tree), and that it be compatible with the storage backend used. + + Examples: + >>> from pyiron_base import Project + >>> from pyiron_workflow import Workflow + >>> import pyiron_workflow.job # To get the job registered in JOB_CLASS_DICT + >>> + >>> @Workflow.wrap_as.single_value_node("t") + ... def Sleep(t): + ... from time import sleep + ... sleep(t) + ... return t + >>> + >>> wf = Workflow("pyiron_node", overwrite_save=True) + >>> wf.sleep = Sleep(0) + >>> wf.out = wf.create.standard.UserInput(wf.sleep) + >>> + >>> pr = Project("test") + >>> + >>> nj = pr.create.job.NodeJob("my_node") + >>> nj.node = wf + >>> nj.run() + >>> print(nj.node.outputs.to_value_dict()) + {'out__user_input': 0} + + >>> lj = pr.load(nj.job_name) + >>> print(nj.node.outputs.to_value_dict()) + {'out__user_input': 0} + + >>> pr.remove_jobs(recursive=True, silently=True) + >>> pr.remove(enable=True) + + """ + _WARNINGS_STRING + + def __init__(self, project, job_name): + super().__init__(project, job_name) + self._python_only_job = True + self._write_work_dir_warnings = False + self._node = None + self.input._label = None + self.input._class_type = None + self.input.storage_backend = "h5io" # Or "tinybase" + + @property + def node(self) -> Node: + if self._node is None and self.status.finished: + self._load_node() + return self._node + + @node.setter + def node(self, new_node: Node): + if self._node is not None: + raise ValueError("Node already set, make a new job") + elif self._node_working_directory_already_there(new_node): + self.raise_working_directory_error() + else: + self._node = new_node + self.input._class_type = f"{new_node.__class__.__module__}." \ + f"{new_node.__class__.__name__}" + self.input._label = new_node.label + + @staticmethod + def _node_working_directory_already_there(node): + return node._working_directory is not None + + @staticmethod + def raise_working_directory_error(): + raise ValueError("Only nodes with un-touched working directories!") + + def _save_node(self): + here = os.getcwd() + os.makedirs(self.working_directory, exist_ok=True) + os.chdir(self.working_directory) + self.node.save(backend=self.input.storage_backend) + os.chdir(here) + + def _load_node(self): + here = os.getcwd() + os.chdir(self.working_directory) + self._node = _import_class(self.input._class_type)(self.input._label) + os.chdir(here) + + def to_hdf(self, hdf=None, group_name=None): + super().to_hdf(hdf=hdf, group_name=group_name) + self._save_node() + + def from_hdf(self, hdf=None, group_name=None): + super().from_hdf(hdf=hdf, group_name=group_name) + self._load_node() + + def validate_ready_to_run(self): + if self._node_working_directory_already_there(self.node): + self.raise_working_directory_error() + + def run_static(self): + self.status.running = True + self.node.run() + self.to_hdf() + self.status.finished = True + + +JOB_CLASS_DICT[NodeJob.__name__] = NodeJob.__module__ + + +def _run_node(node): + node.run() + return node + + +def create_job_with_python_wrapper(project, node): + __doc__ = """ + A convenience wrapper around :meth:`pyiron_base.Project.wrap_python_function` for + running a `pyiron_workflow.Workflow`. (And _only_ workflows, `Function` and `Macro` + children will fail.) + + Args: + project (pyiron_base.Project): A pyiron project. + node (pyiron_workflow.node.Node): The node to run. + + Returns: + (pyiron_base.jobs.flex.pythonfunctioncontainer.PythonFunctionContainerJob): + A job which wraps a function for running the node, with the `"node"` input + pre-populated with the provided node. + + Examples: + >>> from pyiron_base import Project + >>> from pyiron_workflow import Workflow + >>> from pyiron_workflow.job import create_job_with_python_wrapper + >>> + >>> @Workflow.wrap_as.single_value_node("t") + ... def Sleep(t): + ... from time import sleep + ... sleep(t) + ... return t + >>> + >>> wf = Workflow("pyiron_node", overwrite_save=True) + >>> wf.sleep = Sleep(0) + >>> wf.out = wf.create.standard.UserInput(wf.sleep) + >>> + >>> pr = Project("test") + >>> + >>> nj = create_job_with_python_wrapper(pr, wf) + >>> nj.run() + >>> print(nj.output["result"].outputs.to_value_dict()) + {'out__user_input': 0} + + >>> lj = pr.load(nj.job_name) + >>> print(nj.output["result"].outputs.to_value_dict()) + {'out__user_input': 0} + + >>> pr.remove_jobs(recursive=True, silently=True) + >>> pr.remove(enable=True) + + """ + _WARNINGS_STRING + job = project.wrap_python_function(_run_node) + job.input["node"] = node + return job diff --git a/tests/unit/test_job.py b/tests/unit/test_job.py new file mode 100644 index 00000000..8e1dce04 --- /dev/null +++ b/tests/unit/test_job.py @@ -0,0 +1,195 @@ +from abc import ABC, abstractmethod +from time import sleep +import unittest + +from pyiron_base import Project +from pyiron_workflow import Workflow +from pyiron_workflow.channels import NOT_DATA +from pyiron_workflow.job import create_job_with_python_wrapper + + +@Workflow.wrap_as.single_value_node("t") +def Sleep(t): + sleep(t) + return t + + +class _WithAJob(unittest.TestCase, ABC): + @abstractmethod + def make_a_job_from_node(self, node): + pass + + def setUp(self) -> None: + self.pr = Project("test") + + def tearDown(self) -> None: + self.pr.remove_jobs(recursive=True, silently=True) + self.pr.remove(enable=True) + + +class TestNodeJob(_WithAJob): + def make_a_job_from_node(self, node): + job = self.pr.create.job.NodeJob(node.label) + job.node = node + return job + + def test_modal(self): + modal_wf = Workflow("modal_wf") + modal_wf.sleep = Sleep(0) + modal_wf.out = modal_wf.create.standard.UserInput(modal_wf.sleep) + nj = self.make_a_job_from_node(modal_wf) + + nj.run() + self.assertTrue( + nj.status.finished, + msg="The interpreter should not release until the job is done" + ) + self.assertEqual( + 0, + nj.node.outputs.out__user_input.value, + msg="The node should have run, and since it's modal there's no need to " + "update the instance" + ) + + lj = self.pr.load(nj.job_name) + self.assertIsNot( + lj, + nj, + msg="The loaded job should be a new instance." + ) + self.assertEqual( + nj.node.outputs.out__user_input.value, + lj.node.outputs.out__user_input.value, + msg="The loaded job should still have all the same values" + ) + + def test_nonmodal(self): + nonmodal_node = Workflow("non_modal") + nonmodal_node.out = Workflow.create.standard.UserInput(42) + + nj = self.make_a_job_from_node(nonmodal_node) + nj.run(run_mode="non_modal") + self.assertFalse( + nj.status.finished, + msg=f"The local process should released immediately per non-modal " + f"style, but got status {nj.status}" + ) + while not nj.status.finished: + sleep(0.1) + self.assertTrue( + nj.status.finished, + msg="The job status should update on completion" + ) + self.assertIs( + nj.node.outputs.out__user_input.value, + NOT_DATA, + msg="As usual with remote processes, we expect to require a data read " + "before the local instance reflects its new state." + ) + + lj = self.pr.load(nj.job_name) + self.assertEqual( + 42, + lj.node.outputs.out__user_input.value, + msg="The loaded job should have the finished values" + ) + + def test_bad_workflow(self): + has_wd_wf = Workflow("not_empty") + try: + has_wd_wf.working_directory # Touch the working directory, creating it + with self.assertRaises( + ValueError, + msg="To make sure the node gets stored _inside_ the job, we only " + "accept the assignment of nodes who haven't looked at their working " + "directory yet" + ): + self.make_a_job_from_node(has_wd_wf) + finally: + has_wd_wf.working_directory.delete() + + +class TestWrapperFunction(_WithAJob): + def make_a_job_from_node(self, node): + return create_job_with_python_wrapper(self.pr, node) + + def test_modal(self): + modal_wf = Workflow("modal_wf") + modal_wf.sleep = Sleep(0) + modal_wf.out = modal_wf.create.standard.UserInput(modal_wf.sleep) + nj = self.make_a_job_from_node(modal_wf) + + nj.run() + self.assertTrue( + nj.status.finished, + msg="The interpreter should not release until the job is done" + ) + self.assertEqual( + 0, + nj.output["result"].outputs.out__user_input.value, + msg="The node should have run, and since it's modal there's no need to " + "update the instance" + ) + + lj = self.pr.load(nj.job_name) + self.assertIsNot( + lj, + nj, + msg="The loaded job should be a new instance." + ) + self.assertEqual( + nj.output["result"].outputs.out__user_input.value, + lj.output["result"].outputs.out__user_input.value, + msg="The loaded job should still have all the same values" + ) + + def test_node(self): + node = Workflow.create.standard.UserInput(42) + nj = self.make_a_job_from_node(node) + nj.run() + self.assertEqual( + 42, + nj.node.outputs.user_input, + msg="A single node should run just as well as a workflow" + ) + + def test_nonmodal(self): + nonmodal_node = Workflow("non_modal") + nonmodal_node.out = Workflow.create.standard.UserInput(42) + + nj = self.make_a_job_from_node(nonmodal_node) + nj.run(run_mode="non_modal") + self.assertFalse( + nj.status.finished, + msg=f"The local process should released immediately per non-modal " + f"style, but got status {nj.status}" + ) + while not nj.status.finished: + sleep(0.1) + self.assertTrue( + nj.status.finished, + msg="The job status should update on completion" + ) + with self.assertRaises( + KeyError, + msg="As usual with remote processes, we expect to require a data read " + "before the local instance reflects its new state." + ): + nj.output["result"] + + lj = self.pr.load(nj.job_name) + self.assertEqual( + 42, + lj.output["result"].outputs.out__user_input.value, + msg="The loaded job should have the finished values" + ) + + def test_node(self): + node = Workflow.create.standard.UserInput(42) + nj = self.make_a_job_from_node(node) + with self.assertRaises( + AttributeError, + msg="The wrapping routine doesn't interact well with getattr overrides on " + "node state elements (output data)" + ): + nj.run() From d7c780179ccf55671f4b84463db56c5e781278a1 Mon Sep 17 00:00:00 2001 From: liamhuber Date: Tue, 6 Feb 2024 15:03:19 -0800 Subject: [PATCH 119/166] Introduce and test an importability property --- pyiron_workflow/composite.py | 4 ++++ pyiron_workflow/node.py | 22 ++++++++++++++++++++ tests/static/demo_nodes.py | 8 +++++++- tests/unit/test_composite.py | 40 ++++++++++++++++++++++++++++++++++++ 4 files changed, 73 insertions(+), 1 deletion(-) diff --git a/pyiron_workflow/composite.py b/pyiron_workflow/composite.py index e63bc00b..476b54e3 100644 --- a/pyiron_workflow/composite.py +++ b/pyiron_workflow/composite.py @@ -763,3 +763,7 @@ def _restore_signal_connections_from_strings( self._get_signals_input, self._get_signals_output, ) + + @property + def import_ready(self) -> bool: + return super().import_ready and all(node.import_ready for node in self) diff --git a/pyiron_workflow/node.py b/pyiron_workflow/node.py index 7a8e6759..6e5b479a 100644 --- a/pyiron_workflow/node.py +++ b/pyiron_workflow/node.py @@ -10,6 +10,7 @@ import warnings from abc import ABC, abstractmethod from concurrent.futures import Executor as StdLibExecutor, Future +from importlib import import_module from typing import Any, Literal, Optional, TYPE_CHECKING from pyiron_workflow.channels import ( @@ -231,6 +232,8 @@ class Node(HasToDict, ABC, metaclass=AbstractHasPost): connected. future (concurrent.futures.Future | None): A futures object, if the node is currently running or has already run using an executor. + import_ready (bool): Whether importing the node's class from its class's module + returns the same thing as its type. (Recursive on sub-nodes for composites.) inputs (pyiron_workflow.io.Inputs): **Abstract.** Children must define a property returning an :class:`Inputs` object. label (str): A name for the node. @@ -1224,3 +1227,22 @@ def tidy_working_directory(self): self._working_directory = None # Touching the working directory may have created it -- if it's there and # empty just clean it up + + @property + def import_ready(self) -> bool: + """ + Checks whether `importlib` can find this node's class, and if so whether the + imported object matches the node's type. + + Returns: + (bool): Whether the imported module and name of this node's class match + its type. + """ + try: + module = self.__class__.__module__ + class_ = getattr(import_module(module), self.__class__.__name__) + if module == "__main__": + warnings.warn(f"{self.label} is only defined in __main__") + return type(self) is class_ + except (ModuleNotFoundError, AttributeError): + return False diff --git a/tests/static/demo_nodes.py b/tests/static/demo_nodes.py index ae6c4d91..201ab514 100644 --- a/tests/static/demo_nodes.py +++ b/tests/static/demo_nodes.py @@ -27,4 +27,10 @@ def AddPlusOne(obj, other): return obj + other + 1 -nodes = [OptionallyAdd, AddThree, AddPlusOne] +def dynamic(x): + return x + 1 + + +Dynamic = Workflow.wrap_as.single_value_node()(dynamic) + +nodes = [OptionallyAdd, AddThree, AddPlusOne, Dynamic] diff --git a/tests/unit/test_composite.py b/tests/unit/test_composite.py index 4c4e4d3b..87321639 100644 --- a/tests/unit/test_composite.py +++ b/tests/unit/test_composite.py @@ -652,6 +652,46 @@ def test_graph_info(self): "from all depths." ) + def test_import_ready(self): + self.comp.register("static.demo_nodes", "demo") + + totally_findable = Composite.create.demo.OptionallyAdd() + self.assertTrue( + totally_findable.import_ready, + msg="The node class is well defined and in an importable module" + ) + bad_class = Composite.create.demo.dynamic() + self.assertFalse( + bad_class.import_ready, + msg="The node is in an importable location, but the imported object is not " + "the node class (but rather the node function)" + ) + og_module = totally_findable.__class__.__module__ + totally_findable.__class__.__module__ = "something I totally made up" + self.assertFalse( + totally_findable.import_ready, + msg="The node class is well defined, but the module is not in the python " + "path so import fails" + ) + totally_findable.__class__.__module__ = og_module # Fix what you broke + + self.assertTrue( + self.comp.import_ready, + msg="Sanity check on initial condition -- tests are in the path, so this " + "is importable" + ) + self.comp.totally_findable = totally_findable + print(self.comp.import_ready, self.comp.import_ready, self.comp.node_labels) + self.assertTrue( + self.comp.import_ready, + msg="Adding importable children should leave the parent import-ready" + ) + self.comp.bad_class = bad_class + self.assertFalse( + self.comp.import_ready, + msg="Adding un-importable children should make the parent not import ready" + ) + if __name__ == '__main__': unittest.main() From b361523a73cd167577e6b25f743a03f78f757a49 Mon Sep 17 00:00:00 2001 From: liamhuber Date: Tue, 6 Feb 2024 15:03:37 -0800 Subject: [PATCH 120/166] Extend node package count --- tests/unit/test_node_package.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/unit/test_node_package.py b/tests/unit/test_node_package.py index c74445fe..3abb7797 100644 --- a/tests/unit/test_node_package.py +++ b/tests/unit/test_node_package.py @@ -36,7 +36,7 @@ def test_nodes(self): def test_length(self): package = NodePackage("static.demo_nodes") - self.assertEqual(3, len(package)) + self.assertEqual(4, len(package)) if __name__ == '__main__': From bd388b1dbcacab1912dd04ef77ae70b8a7df6a1e Mon Sep 17 00:00:00 2001 From: liamhuber Date: Tue, 6 Feb 2024 15:09:02 -0800 Subject: [PATCH 121/166] Fail saving early if h5io won't be able to import your nodes --- pyiron_workflow/storage.py | 14 ++++++++++++++ tests/unit/test_workflow.py | 6 +++--- 2 files changed, 17 insertions(+), 3 deletions(-) diff --git a/pyiron_workflow/storage.py b/pyiron_workflow/storage.py index 94937e14..e7aef538 100644 --- a/pyiron_workflow/storage.py +++ b/pyiron_workflow/storage.py @@ -18,6 +18,13 @@ ALLOWED_BACKENDS = ["h5io", "tinybase"] +class TypeNotFoundError(ImportError): + """ + Raised when you try to save a node, but importing its module and class give + something other than its type. + """ + + class StorageInterface: _TINYBASE_STORAGE_FILE_NAME = "project.h5" @@ -41,6 +48,13 @@ def save(self, backend: Literal["h5io", "tinybase"]): def _save(self, backend: Literal["h5io", "tinybase"]): if backend == "h5io": + if not self.node.import_ready: + raise TypeNotFoundError( + f"{self.node.label} cannot be saved with h5io because it (or one " + f"of its child nodes) has a type that cannot be imported. Did you " + f"dynamically define this node? Try using the node wrapper as a " + f"decorator instead." + ) h5io.write_hdf5( fname=self._h5io_storage_file_path, data=self.node, diff --git a/tests/unit/test_workflow.py b/tests/unit/test_workflow.py index 9db3d8e2..6b03bec0 100644 --- a/tests/unit/test_workflow.py +++ b/tests/unit/test_workflow.py @@ -6,6 +6,7 @@ from pyiron_workflow._tests import ensure_tests_in_python_path from pyiron_workflow.channels import NOT_DATA from pyiron_workflow.snippets.dotdict import DotDict +from pyiron_workflow.storage import TypeNotFoundError from pyiron_workflow.workflow import Workflow @@ -415,13 +416,12 @@ def UnimportableScope(x): wf.unimportable_scope = UnimportableScope() try: - wf.save(backend="h5io") with self.assertRaises( - AttributeError, + TypeNotFoundError, msg="Nodes must live in an importable scope to save with the h5io " "backend" ): - Workflow(wf.label, storage_backend="h5io") + wf.save(backend="h5io") finally: wf.remove_node(wf.unimportable_scope) wf.storage.delete() From 8ced849525ccea94461f8bbd40532a25bd9b089c Mon Sep 17 00:00:00 2001 From: Liam Huber Date: Wed, 7 Feb 2024 10:10:24 -0800 Subject: [PATCH 122/166] try...finally changing the classname Co-authored-by: Niklas Siemer <70580458+niklassiemer@users.noreply.github.com> --- tests/unit/test_composite.py | 19 +++++++++++-------- 1 file changed, 11 insertions(+), 8 deletions(-) diff --git a/tests/unit/test_composite.py b/tests/unit/test_composite.py index 87321639..bb748512 100644 --- a/tests/unit/test_composite.py +++ b/tests/unit/test_composite.py @@ -666,14 +666,17 @@ def test_import_ready(self): msg="The node is in an importable location, but the imported object is not " "the node class (but rather the node function)" ) - og_module = totally_findable.__class__.__module__ - totally_findable.__class__.__module__ = "something I totally made up" - self.assertFalse( - totally_findable.import_ready, - msg="The node class is well defined, but the module is not in the python " - "path so import fails" - ) - totally_findable.__class__.__module__ = og_module # Fix what you broke + with self.subTest(msg="Made up class"): + try: + og_module = totally_findable.__class__.__module__ + totally_findable.__class__.__module__ = "something I totally made up" + self.assertFalse( + totally_findable.import_ready, + msg="The node class is well defined, but the module is not in the python " + "path so import fails" + ) + finally: + totally_findable.__class__.__module__ = og_module # Fix what you broke self.assertTrue( self.comp.import_ready, From 6b0bf50d484547aed87a85ee61524627983c1312 Mon Sep 17 00:00:00 2001 From: liamhuber Date: Wed, 7 Feb 2024 10:12:01 -0800 Subject: [PATCH 123/166] Slide var used in finally outside the try --- tests/unit/test_composite.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/unit/test_composite.py b/tests/unit/test_composite.py index bb748512..80c19843 100644 --- a/tests/unit/test_composite.py +++ b/tests/unit/test_composite.py @@ -667,8 +667,8 @@ def test_import_ready(self): "the node class (but rather the node function)" ) with self.subTest(msg="Made up class"): + og_module = totally_findable.__class__.__module__ try: - og_module = totally_findable.__class__.__module__ totally_findable.__class__.__module__ = "something I totally made up" self.assertFalse( totally_findable.import_ready, From 604a8cec685165173d232d34a61ff1b8a1b52eae Mon Sep 17 00:00:00 2001 From: liamhuber Date: Wed, 7 Feb 2024 10:12:14 -0800 Subject: [PATCH 124/166] Fix line length --- tests/unit/test_composite.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/tests/unit/test_composite.py b/tests/unit/test_composite.py index 80c19843..ee07eab2 100644 --- a/tests/unit/test_composite.py +++ b/tests/unit/test_composite.py @@ -672,8 +672,8 @@ def test_import_ready(self): totally_findable.__class__.__module__ = "something I totally made up" self.assertFalse( totally_findable.import_ready, - msg="The node class is well defined, but the module is not in the python " - "path so import fails" + msg="The node class is well defined, but the module is not in the " + "python path so import fails" ) finally: totally_findable.__class__.__module__ = og_module # Fix what you broke From 6d0903aeacd54de781a440d8d77384869d23bed7 Mon Sep 17 00:00:00 2001 From: liamhuber Date: Wed, 7 Feb 2024 10:12:30 -0800 Subject: [PATCH 125/166] Remove debug print --- tests/unit/test_composite.py | 1 - 1 file changed, 1 deletion(-) diff --git a/tests/unit/test_composite.py b/tests/unit/test_composite.py index ee07eab2..00b077f4 100644 --- a/tests/unit/test_composite.py +++ b/tests/unit/test_composite.py @@ -684,7 +684,6 @@ def test_import_ready(self): "is importable" ) self.comp.totally_findable = totally_findable - print(self.comp.import_ready, self.comp.import_ready, self.comp.node_labels) self.assertTrue( self.comp.import_ready, msg="Adding importable children should leave the parent import-ready" From 519f176ca6018d14b3f7f0ab2ab4c7591f3355c6 Mon Sep 17 00:00:00 2001 From: liamhuber Date: Wed, 7 Feb 2024 10:13:00 -0800 Subject: [PATCH 126/166] Fix test message --- tests/unit/test_composite.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/unit/test_composite.py b/tests/unit/test_composite.py index 00b077f4..03574259 100644 --- a/tests/unit/test_composite.py +++ b/tests/unit/test_composite.py @@ -666,7 +666,7 @@ def test_import_ready(self): msg="The node is in an importable location, but the imported object is not " "the node class (but rather the node function)" ) - with self.subTest(msg="Made up class"): + with self.subTest(msg="Made up module"): og_module = totally_findable.__class__.__module__ try: totally_findable.__class__.__module__ = "something I totally made up" From 284b9ca7b0c02af9b9aaa3bdbaa4b6604c4a341a Mon Sep 17 00:00:00 2001 From: liamhuber Date: Wed, 7 Feb 2024 10:25:02 -0800 Subject: [PATCH 127/166] Protect tinybase from object type-import type mismatches --- pyiron_workflow/storage.py | 14 +++++++------- tests/unit/test_workflow.py | 14 ++++++++++++++ 2 files changed, 21 insertions(+), 7 deletions(-) diff --git a/pyiron_workflow/storage.py b/pyiron_workflow/storage.py index e7aef538..22c5d795 100644 --- a/pyiron_workflow/storage.py +++ b/pyiron_workflow/storage.py @@ -47,14 +47,14 @@ def save(self, backend: Literal["h5io", "tinybase"]): root.storage.save(backend=backend) def _save(self, backend: Literal["h5io", "tinybase"]): + if not self.node.import_ready: + raise TypeNotFoundError( + f"{self.node.label} cannot be saved with h5io because it (or one " + f"of its child nodes) has a type that cannot be imported. Did you " + f"dynamically define this node? Try using the node wrapper as a " + f"decorator instead." + ) if backend == "h5io": - if not self.node.import_ready: - raise TypeNotFoundError( - f"{self.node.label} cannot be saved with h5io because it (or one " - f"of its child nodes) has a type that cannot be imported. Did you " - f"dynamically define this node? Try using the node wrapper as a " - f"decorator instead." - ) h5io.write_hdf5( fname=self._h5io_storage_file_path, data=self.node, diff --git a/tests/unit/test_workflow.py b/tests/unit/test_workflow.py index 6b03bec0..f3a59a84 100644 --- a/tests/unit/test_workflow.py +++ b/tests/unit/test_workflow.py @@ -382,6 +382,20 @@ def test_storage_scopes(self): finally: wf.storage.delete() + with self.subTest("No unimportable nodes for either back-end"): + try: + wf.import_type_mismatch = wf.create.demo.dynamic() + for backend in ["h5io", "tinybase"]: + with self.subTest(backend): + with self.assertRaises( + TypeNotFoundError, + msg="Imported object is function but node type is node -- " + "should fail early on save" + ): + wf.save(backend=backend) + finally: + wf.remove_node(wf.import_type_mismatch) + wf.add_node(PlusOne(label="local_but_importable")) try: wf.save(backend="h5io") From 9af564a33a8f944dc17073a4399f3e744b7b6309 Mon Sep 17 00:00:00 2001 From: liamhuber Date: Wed, 7 Feb 2024 10:25:15 -0800 Subject: [PATCH 128/166] Update the storage docs on Node --- pyiron_workflow/node.py | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/pyiron_workflow/node.py b/pyiron_workflow/node.py index 6e5b479a..44d2ecd0 100644 --- a/pyiron_workflow/node.py +++ b/pyiron_workflow/node.py @@ -157,6 +157,13 @@ class Node(HasToDict, ABC, metaclass=AbstractHasPost): - On instantiation, nodes will load automatically if they find saved content. - Discovered content can instead be deleted with a kwarg. - You can't load saved content _and_ run after instantiation at once. + - The nodes must be somewhere importable, and the imported object must match + the type of the node being saved. This basically just rules out one edge + case where a node class is defined like + `SomeFunctionNode = Workflow.wrap_as.function_node()(some_function)`, since + then the new class gets the name `some_function`, which when imported is + the _function_ "some_function" and not the desired class "SomeFunctionNode". + This is checked for at save-time and will cause a nice early failure. - [ALPHA ISSUE] If the source code (cells, `.py` files...) for a saved graph is altered between saving and loading the graph, there are no guarantees about the loaded state; depending on the nature of the changes everything may From a59e01ffcc1f69b3e04e62103584e70a9a912b6c Mon Sep 17 00:00:00 2001 From: liamhuber Date: Wed, 7 Feb 2024 10:35:37 -0800 Subject: [PATCH 129/166] Add a convenience method for reporting importability --- pyiron_workflow/composite.py | 5 +++++ pyiron_workflow/node.py | 6 ++++++ 2 files changed, 11 insertions(+) diff --git a/pyiron_workflow/composite.py b/pyiron_workflow/composite.py index 476b54e3..5a2adc70 100644 --- a/pyiron_workflow/composite.py +++ b/pyiron_workflow/composite.py @@ -767,3 +767,8 @@ def _restore_signal_connections_from_strings( @property def import_ready(self) -> bool: return super().import_ready and all(node.import_ready for node in self) + + def import_readiness_report(self, tabs=0): + super().import_readiness_report(tabs=tabs) + for node in self: + node.import_readiness_report(tabs=tabs + 1) diff --git a/pyiron_workflow/node.py b/pyiron_workflow/node.py index 6e5b479a..8bc7e12a 100644 --- a/pyiron_workflow/node.py +++ b/pyiron_workflow/node.py @@ -1246,3 +1246,9 @@ def import_ready(self) -> bool: return type(self) is class_ except (ModuleNotFoundError, AttributeError): return False + + def import_readiness_report(self, tabs=0): + tabspace = tabs * "\t" + print( + f"{tabspace}{self.label}: {'ok' if self.import_ready else 'NOT IMPORTABLE'}" + ) From 1a99af7c551afc936deca1bb8c86168c9c281f6d Mon Sep 17 00:00:00 2001 From: Liam Huber Date: Wed, 7 Feb 2024 12:29:30 -0800 Subject: [PATCH 130/166] Good catch by Niklas Co-authored-by: Niklas Siemer <70580458+niklassiemer@users.noreply.github.com> --- pyiron_workflow/storage.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pyiron_workflow/storage.py b/pyiron_workflow/storage.py index 22c5d795..cc977cff 100644 --- a/pyiron_workflow/storage.py +++ b/pyiron_workflow/storage.py @@ -49,7 +49,7 @@ def save(self, backend: Literal["h5io", "tinybase"]): def _save(self, backend: Literal["h5io", "tinybase"]): if not self.node.import_ready: raise TypeNotFoundError( - f"{self.node.label} cannot be saved with h5io because it (or one " + f"{self.node.label} cannot be saved because it (or one " f"of its child nodes) has a type that cannot be imported. Did you " f"dynamically define this node? Try using the node wrapper as a " f"decorator instead." From 87366936cd1c6403a6daa72b89abf8b9efa4ab05 Mon Sep 17 00:00:00 2001 From: liamhuber Date: Wed, 7 Feb 2024 12:40:34 -0800 Subject: [PATCH 131/166] Return a string instead of printing --- pyiron_workflow/composite.py | 7 ++++--- pyiron_workflow/node.py | 8 ++++---- 2 files changed, 8 insertions(+), 7 deletions(-) diff --git a/pyiron_workflow/composite.py b/pyiron_workflow/composite.py index 5a2adc70..e82cb5e0 100644 --- a/pyiron_workflow/composite.py +++ b/pyiron_workflow/composite.py @@ -768,7 +768,8 @@ def _restore_signal_connections_from_strings( def import_ready(self) -> bool: return super().import_ready and all(node.import_ready for node in self) - def import_readiness_report(self, tabs=0): - super().import_readiness_report(tabs=tabs) + def import_readiness_report(self, tabs=0, report_so_far=""): + report = super().import_readiness_report(tabs=tabs, report_so_far=report_so_far) for node in self: - node.import_readiness_report(tabs=tabs + 1) + report = node.import_readiness_report(tabs=tabs + 1, report_so_far=report) + return report diff --git a/pyiron_workflow/node.py b/pyiron_workflow/node.py index 8bc7e12a..526a25c2 100644 --- a/pyiron_workflow/node.py +++ b/pyiron_workflow/node.py @@ -1247,8 +1247,8 @@ def import_ready(self) -> bool: except (ModuleNotFoundError, AttributeError): return False - def import_readiness_report(self, tabs=0): + def import_readiness_report(self, tabs=0, report_so_far=""): + newline = "\n" if len(report_so_far) > 0 else "" tabspace = tabs * "\t" - print( - f"{tabspace}{self.label}: {'ok' if self.import_ready else 'NOT IMPORTABLE'}" - ) + return report_so_far + f"{newline}{tabspace}{self.label}: " \ + f"{'ok' if self.import_ready else 'NOT IMPORTABLE'}" From 88a0f8a11894d81d0704750990e6ffb0b85ce66f Mon Sep 17 00:00:00 2001 From: liamhuber Date: Wed, 7 Feb 2024 12:40:47 -0800 Subject: [PATCH 132/166] Add the report to the error message --- pyiron_workflow/storage.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/pyiron_workflow/storage.py b/pyiron_workflow/storage.py index e7aef538..404cbf2d 100644 --- a/pyiron_workflow/storage.py +++ b/pyiron_workflow/storage.py @@ -53,7 +53,9 @@ def _save(self, backend: Literal["h5io", "tinybase"]): f"{self.node.label} cannot be saved with h5io because it (or one " f"of its child nodes) has a type that cannot be imported. Did you " f"dynamically define this node? Try using the node wrapper as a " - f"decorator instead." + f"decorator instead. \n" + f"Import readiness report: \n" + f"{self.node.import_readiness_report()}" ) h5io.write_hdf5( fname=self._h5io_storage_file_path, From efafe97a84128e493eb1b8a4472ebdaf3acaffb3 Mon Sep 17 00:00:00 2001 From: liamhuber Date: Wed, 7 Feb 2024 12:42:07 -0800 Subject: [PATCH 133/166] Refactor: rename method --- pyiron_workflow/composite.py | 6 +++--- pyiron_workflow/node.py | 2 +- pyiron_workflow/storage.py | 2 +- 3 files changed, 5 insertions(+), 5 deletions(-) diff --git a/pyiron_workflow/composite.py b/pyiron_workflow/composite.py index e82cb5e0..22a37ff9 100644 --- a/pyiron_workflow/composite.py +++ b/pyiron_workflow/composite.py @@ -768,8 +768,8 @@ def _restore_signal_connections_from_strings( def import_ready(self) -> bool: return super().import_ready and all(node.import_ready for node in self) - def import_readiness_report(self, tabs=0, report_so_far=""): - report = super().import_readiness_report(tabs=tabs, report_so_far=report_so_far) + def _report_import_readiness(self, tabs=0, report_so_far=""): + report = super()._report_import_readiness(tabs=tabs, report_so_far=report_so_far) for node in self: - report = node.import_readiness_report(tabs=tabs + 1, report_so_far=report) + report = node._report_import_readiness(tabs=tabs + 1, report_so_far=report) return report diff --git a/pyiron_workflow/node.py b/pyiron_workflow/node.py index 526a25c2..d3eeefbc 100644 --- a/pyiron_workflow/node.py +++ b/pyiron_workflow/node.py @@ -1247,7 +1247,7 @@ def import_ready(self) -> bool: except (ModuleNotFoundError, AttributeError): return False - def import_readiness_report(self, tabs=0, report_so_far=""): + def _report_import_readiness(self, tabs=0, report_so_far=""): newline = "\n" if len(report_so_far) > 0 else "" tabspace = tabs * "\t" return report_so_far + f"{newline}{tabspace}{self.label}: " \ diff --git a/pyiron_workflow/storage.py b/pyiron_workflow/storage.py index 404cbf2d..02a7023c 100644 --- a/pyiron_workflow/storage.py +++ b/pyiron_workflow/storage.py @@ -55,7 +55,7 @@ def _save(self, backend: Literal["h5io", "tinybase"]): f"dynamically define this node? Try using the node wrapper as a " f"decorator instead. \n" f"Import readiness report: \n" - f"{self.node.import_readiness_report()}" + f"{self.node._report_import_readiness()}" ) h5io.write_hdf5( fname=self._h5io_storage_file_path, From 51ec2e4eaddc240b1eb4fa87aa1fa7fd6dade8ab Mon Sep 17 00:00:00 2001 From: liamhuber Date: Wed, 7 Feb 2024 12:43:29 -0800 Subject: [PATCH 134/166] Add back a property on the old name Which prints so it formats nicely without "\n"/"\t" characters when called --- pyiron_workflow/node.py | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/pyiron_workflow/node.py b/pyiron_workflow/node.py index d3eeefbc..6a8e502a 100644 --- a/pyiron_workflow/node.py +++ b/pyiron_workflow/node.py @@ -1247,6 +1247,10 @@ def import_ready(self) -> bool: except (ModuleNotFoundError, AttributeError): return False + @property + def import_readiness_report(self): + print(self._report_import_readiness()) + def _report_import_readiness(self, tabs=0, report_so_far=""): newline = "\n" if len(report_so_far) > 0 else "" tabspace = tabs * "\t" From 8c463dcabe7792296fff972af830cae5cf3a2292 Mon Sep 17 00:00:00 2001 From: liamhuber Date: Wed, 7 Feb 2024 12:56:35 -0800 Subject: [PATCH 135/166] :bug: Fix a string typo that snuck in on the github web merger Would have noticed this in the CI prior to merging the stack, but I've been completely ignoring the CI while we wait for the dependencies to actually release --- pyiron_workflow/storage.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pyiron_workflow/storage.py b/pyiron_workflow/storage.py index e6e2e2c8..545e6541 100644 --- a/pyiron_workflow/storage.py +++ b/pyiron_workflow/storage.py @@ -54,7 +54,7 @@ def _save(self, backend: Literal["h5io", "tinybase"]): f"dynamically define this node? Try using the node wrapper as a " f"decorator instead. \n" f"Import readiness report: \n" - f"{self.node._report_import_readiness()}"" + f"{self.node._report_import_readiness()}" ) if backend == "h5io": h5io.write_hdf5( From 8bbaf32eed7ed24bb8b8cd20084c018e08f4a29b Mon Sep 17 00:00:00 2001 From: pyiron-runner Date: Wed, 7 Feb 2024 20:58:10 +0000 Subject: [PATCH 136/166] Format black --- pyiron_workflow/composite.py | 4 +++- pyiron_workflow/node.py | 6 ++++-- 2 files changed, 7 insertions(+), 3 deletions(-) diff --git a/pyiron_workflow/composite.py b/pyiron_workflow/composite.py index 22a37ff9..bef971dc 100644 --- a/pyiron_workflow/composite.py +++ b/pyiron_workflow/composite.py @@ -769,7 +769,9 @@ def import_ready(self) -> bool: return super().import_ready and all(node.import_ready for node in self) def _report_import_readiness(self, tabs=0, report_so_far=""): - report = super()._report_import_readiness(tabs=tabs, report_so_far=report_so_far) + report = super()._report_import_readiness( + tabs=tabs, report_so_far=report_so_far + ) for node in self: report = node._report_import_readiness(tabs=tabs + 1, report_so_far=report) return report diff --git a/pyiron_workflow/node.py b/pyiron_workflow/node.py index cf423dd6..b7096d41 100644 --- a/pyiron_workflow/node.py +++ b/pyiron_workflow/node.py @@ -1261,5 +1261,7 @@ def import_readiness_report(self): def _report_import_readiness(self, tabs=0, report_so_far=""): newline = "\n" if len(report_so_far) > 0 else "" tabspace = tabs * "\t" - return report_so_far + f"{newline}{tabspace}{self.label}: " \ - f"{'ok' if self.import_ready else 'NOT IMPORTABLE'}" + return ( + report_so_far + f"{newline}{tabspace}{self.label}: " + f"{'ok' if self.import_ready else 'NOT IMPORTABLE'}" + ) From 8f56a2b4da6a04ff4ba7885430c55bb50ec06794 Mon Sep 17 00:00:00 2001 From: liamhuber Date: Wed, 7 Feb 2024 13:07:46 -0800 Subject: [PATCH 137/166] Bump pyiron_contrib --- .ci_support/environment-tinybase.yml | 2 +- setup.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/.ci_support/environment-tinybase.yml b/.ci_support/environment-tinybase.yml index f77ff0d9..a766e7ca 100644 --- a/.ci_support/environment-tinybase.yml +++ b/.ci_support/environment-tinybase.yml @@ -3,4 +3,4 @@ channels: dependencies: - boto3 - h5io_browser =0.0.6 -- pyiron_contrib =0.1.13 \ No newline at end of file +- pyiron_contrib =0.1.14 \ No newline at end of file diff --git a/setup.py b/setup.py index 66f3c788..0b8c35a6 100644 --- a/setup.py +++ b/setup.py @@ -48,7 +48,7 @@ "tinybase": [ 'boto3', # Just because pyiron_contrib is not making sure it's there 'h5io_browser==0.0.6', - 'pyiron_contrib==0.1.13', + 'pyiron_contrib==0.1.14', ] }, cmdclass=versioneer.get_cmdclass(), From e6e3b831b439871357d1b9d1375475da6ba4d86c Mon Sep 17 00:00:00 2001 From: liamhuber Date: Wed, 7 Feb 2024 13:22:58 -0800 Subject: [PATCH 138/166] Update docstring examples Saving now requires that the saved nodes be importable, which would be the case if you copied and pasted the example code into a notebook, but is not the case in the dynamic place doctest is running things. So just don't define a custom "Sleep" node here, instead use a standard node. --- pyiron_workflow/job.py | 29 ++++++++--------------------- 1 file changed, 8 insertions(+), 21 deletions(-) diff --git a/pyiron_workflow/job.py b/pyiron_workflow/job.py index df1abfbd..68bd9195 100644 --- a/pyiron_workflow/job.py +++ b/pyiron_workflow/job.py @@ -61,27 +61,21 @@ class NodeJob(TemplateJob): >>> from pyiron_workflow import Workflow >>> import pyiron_workflow.job # To get the job registered in JOB_CLASS_DICT >>> - >>> @Workflow.wrap_as.single_value_node("t") - ... def Sleep(t): - ... from time import sleep - ... sleep(t) - ... return t - >>> >>> wf = Workflow("pyiron_node", overwrite_save=True) - >>> wf.sleep = Sleep(0) - >>> wf.out = wf.create.standard.UserInput(wf.sleep) + >>> wf.answer = Workflow.create.standard.UserInput(42) # Or your nodes >>> >>> pr = Project("test") >>> >>> nj = pr.create.job.NodeJob("my_node") >>> nj.node = wf - >>> nj.run() + >>> nj.run() # doctest:+ELLIPSIS + The job my_node was saved and received the ID: ... >>> print(nj.node.outputs.to_value_dict()) - {'out__user_input': 0} + {'answer__user_input': 42} >>> lj = pr.load(nj.job_name) >>> print(nj.node.outputs.to_value_dict()) - {'out__user_input': 0} + {'answer__user_input': 42} >>> pr.remove_jobs(recursive=True, silently=True) >>> pr.remove(enable=True) @@ -183,26 +177,19 @@ def create_job_with_python_wrapper(project, node): >>> from pyiron_workflow import Workflow >>> from pyiron_workflow.job import create_job_with_python_wrapper >>> - >>> @Workflow.wrap_as.single_value_node("t") - ... def Sleep(t): - ... from time import sleep - ... sleep(t) - ... return t - >>> >>> wf = Workflow("pyiron_node", overwrite_save=True) - >>> wf.sleep = Sleep(0) - >>> wf.out = wf.create.standard.UserInput(wf.sleep) + >>> wf.answer = Workflow.create.standard.UserInput(42) # Or your nodes >>> >>> pr = Project("test") >>> >>> nj = create_job_with_python_wrapper(pr, wf) >>> nj.run() >>> print(nj.output["result"].outputs.to_value_dict()) - {'out__user_input': 0} + {'answer__user_input': 42} >>> lj = pr.load(nj.job_name) >>> print(nj.output["result"].outputs.to_value_dict()) - {'out__user_input': 0} + {'answer__user_input': 42} >>> pr.remove_jobs(recursive=True, silently=True) >>> pr.remove(enable=True) From 9cc5fa5f0a0f40ddeaa81fdc1ad6f7bb5292918e Mon Sep 17 00:00:00 2001 From: pyiron-runner Date: Wed, 7 Feb 2024 21:28:16 +0000 Subject: [PATCH 139/166] Format black --- pyiron_workflow/job.py | 19 +++++++++++++------ 1 file changed, 13 insertions(+), 6 deletions(-) diff --git a/pyiron_workflow/job.py b/pyiron_workflow/job.py index 68bd9195..873b80f5 100644 --- a/pyiron_workflow/job.py +++ b/pyiron_workflow/job.py @@ -42,7 +42,8 @@ class NodeJob(TemplateJob): - __doc__ = """ + __doc__ = ( + """ This job is an intermediate feature for accessing pyiron's queue submission infrastructure for nodes (function nodes, macros, or entire workflows). @@ -80,7 +81,9 @@ class NodeJob(TemplateJob): >>> pr.remove_jobs(recursive=True, silently=True) >>> pr.remove(enable=True) - """ + _WARNINGS_STRING + """ + + _WARNINGS_STRING + ) def __init__(self, project, job_name): super().__init__(project, job_name) @@ -105,8 +108,9 @@ def node(self, new_node: Node): self.raise_working_directory_error() else: self._node = new_node - self.input._class_type = f"{new_node.__class__.__module__}." \ - f"{new_node.__class__.__name__}" + self.input._class_type = ( + f"{new_node.__class__.__module__}." f"{new_node.__class__.__name__}" + ) self.input._label = new_node.label @staticmethod @@ -158,7 +162,8 @@ def _run_node(node): def create_job_with_python_wrapper(project, node): - __doc__ = """ + __doc__ = ( + """ A convenience wrapper around :meth:`pyiron_base.Project.wrap_python_function` for running a `pyiron_workflow.Workflow`. (And _only_ workflows, `Function` and `Macro` children will fail.) @@ -194,7 +199,9 @@ def create_job_with_python_wrapper(project, node): >>> pr.remove_jobs(recursive=True, silently=True) >>> pr.remove(enable=True) - """ + _WARNINGS_STRING + """ + + _WARNINGS_STRING + ) job = project.wrap_python_function(_run_node) job.input["node"] = node return job From bb2647ba70bc0465ed9ccecb798ee3ab27bcde80 Mon Sep 17 00:00:00 2001 From: liamhuber Date: Wed, 14 Feb 2024 13:05:11 -0800 Subject: [PATCH 140/166] Move contrib and h5io_browser to the main dependencies --- .ci_support/environment-tinybase.yml | 6 ------ .ci_support/environment.yml | 2 ++ setup.py | 7 ++----- 3 files changed, 4 insertions(+), 11 deletions(-) delete mode 100644 .ci_support/environment-tinybase.yml diff --git a/.ci_support/environment-tinybase.yml b/.ci_support/environment-tinybase.yml deleted file mode 100644 index a766e7ca..00000000 --- a/.ci_support/environment-tinybase.yml +++ /dev/null @@ -1,6 +0,0 @@ -channels: -- conda-forge -dependencies: -- boto3 -- h5io_browser =0.0.6 -- pyiron_contrib =0.1.14 \ No newline at end of file diff --git a/.ci_support/environment.yml b/.ci_support/environment.yml index 978884a7..acbc180e 100644 --- a/.ci_support/environment.yml +++ b/.ci_support/environment.yml @@ -7,7 +7,9 @@ dependencies: - cloudpickle =3.0.0 - graphviz =8.1.0 - h5io =0.2.2 +- h5io_browser =0.0.6 - matplotlib =3.8.2 +- pyiron_contrib =0.1.14 - pympipool =0.7.13 - python-graphviz =0.20.1 - toposort =1.10 diff --git a/setup.py b/setup.py index e8d02b61..835f8959 100644 --- a/setup.py +++ b/setup.py @@ -32,7 +32,9 @@ 'cloudpickle==3.0.0', 'graphviz==0.20.1', 'h5io==0.2.2', + 'h5io_browser==0.0.6', 'matplotlib==3.8.2', + 'pyiron_contrib==0.1.14', 'pympipool==0.7.13', 'toposort==1.10', 'typeguard==4.1.5', @@ -45,11 +47,6 @@ 'phonopy==2.21.0', 'pyiron_atomistics==0.4.14', ], - "tinybase": [ - 'boto3', # Just because pyiron_contrib is not making sure it's there - 'h5io_browser==0.0.6', - 'pyiron_contrib==0.1.14', - ] }, cmdclass=versioneer.get_cmdclass(), From 4a8eec2dd6df64229af52e988a9ba4b4047b4eda Mon Sep 17 00:00:00 2001 From: pyiron-runner Date: Wed, 14 Feb 2024 21:05:38 +0000 Subject: [PATCH 141/166] [dependabot skip] Update env file --- .binder/environment.yml | 2 ++ docs/environment.yml | 2 ++ 2 files changed, 4 insertions(+) diff --git a/.binder/environment.yml b/.binder/environment.yml index b7f1e24d..1f04e8bb 100644 --- a/.binder/environment.yml +++ b/.binder/environment.yml @@ -7,7 +7,9 @@ dependencies: - cloudpickle =3.0.0 - graphviz =8.1.0 - h5io =0.2.2 +- h5io_browser =0.0.6 - matplotlib =3.8.2 +- pyiron_contrib =0.1.14 - pympipool =0.7.13 - python-graphviz =0.20.1 - toposort =1.10 diff --git a/docs/environment.yml b/docs/environment.yml index 4d501b22..ec1ef77f 100644 --- a/docs/environment.yml +++ b/docs/environment.yml @@ -12,7 +12,9 @@ dependencies: - cloudpickle =3.0.0 - graphviz =8.1.0 - h5io =0.2.2 +- h5io_browser =0.0.6 - matplotlib =3.8.2 +- pyiron_contrib =0.1.14 - pympipool =0.7.13 - python-graphviz =0.20.1 - toposort =1.10 From 83b7db4ef2ea211e0dcaa4841ea4b82e427e1af6 Mon Sep 17 00:00:00 2001 From: liamhuber Date: Wed, 14 Feb 2024 13:09:58 -0800 Subject: [PATCH 142/166] Update h5io_browser dep --- .ci_support/environment.yml | 2 +- setup.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/.ci_support/environment.yml b/.ci_support/environment.yml index acbc180e..955addd5 100644 --- a/.ci_support/environment.yml +++ b/.ci_support/environment.yml @@ -7,7 +7,7 @@ dependencies: - cloudpickle =3.0.0 - graphviz =8.1.0 - h5io =0.2.2 -- h5io_browser =0.0.6 +- h5io_browser =0.0.8 - matplotlib =3.8.2 - pyiron_contrib =0.1.14 - pympipool =0.7.13 diff --git a/setup.py b/setup.py index 835f8959..6c44aaa5 100644 --- a/setup.py +++ b/setup.py @@ -32,7 +32,7 @@ 'cloudpickle==3.0.0', 'graphviz==0.20.1', 'h5io==0.2.2', - 'h5io_browser==0.0.6', + 'h5io_browser==0.0.8', 'matplotlib==3.8.2', 'pyiron_contrib==0.1.14', 'pympipool==0.7.13', From 0b65f3d50e6620c7bd9c3f4f46f6cb5dcfedc983 Mon Sep 17 00:00:00 2001 From: liamhuber Date: Wed, 14 Feb 2024 13:43:21 -0800 Subject: [PATCH 143/166] Make sure the tinybase storage directory exists before saving there Or you can get a weird h5py error about not being able to synchronously create file --- pyiron_workflow/storage.py | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/pyiron_workflow/storage.py b/pyiron_workflow/storage.py index 545e6541..107e807b 100644 --- a/pyiron_workflow/storage.py +++ b/pyiron_workflow/storage.py @@ -65,6 +65,10 @@ def _save(self, backend: Literal["h5io", "tinybase"]): overwrite=True, # Don't worry about efficiency or updating yet ) elif backend == "tinybase": + os.makedirs( + os.path.dirname(self._tinybase_storage_file_path), + exist_ok=True + ) # Make sure the path to the storage location exists self.node.to_storage(self._tinybase_storage) else: raise ValueError( From 9392a1e141d81e1bcfbadc6797b7239b33843649 Mon Sep 17 00:00:00 2001 From: liamhuber Date: Wed, 14 Feb 2024 13:47:20 -0800 Subject: [PATCH 144/166] Make available backend depend on python version h5io_browser used in the tinybase backend borks hard if the version is <3.11. We'll see if h5io has performance issues once this hits the CI --- pyiron_workflow/storage.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/pyiron_workflow/storage.py b/pyiron_workflow/storage.py index 107e807b..771c19d9 100644 --- a/pyiron_workflow/storage.py +++ b/pyiron_workflow/storage.py @@ -6,6 +6,7 @@ from __future__ import annotations import os +import sys from typing import Literal, TYPE_CHECKING import h5io @@ -15,7 +16,7 @@ if TYPE_CHECKING: from pyiron_workflow.node import Node -ALLOWED_BACKENDS = ["h5io", "tinybase"] +ALLOWED_BACKENDS = ["h5io", "tinybase"] if sys.version_info >= (3, 11) else ["h5io"] class TypeNotFoundError(ImportError): From d874c87c2c7fc666d4f844ceb965d68bfbbcfed6 Mon Sep 17 00:00:00 2001 From: liamhuber Date: Wed, 14 Feb 2024 14:13:37 -0800 Subject: [PATCH 145/166] Force save_after_run to carry the backend --- pyiron_workflow/composite.py | 2 +- pyiron_workflow/function.py | 2 +- pyiron_workflow/macro.py | 2 +- pyiron_workflow/node.py | 11 ++++++----- pyiron_workflow/workflow.py | 2 +- 5 files changed, 10 insertions(+), 9 deletions(-) diff --git a/pyiron_workflow/composite.py b/pyiron_workflow/composite.py index bef971dc..c5a25279 100644 --- a/pyiron_workflow/composite.py +++ b/pyiron_workflow/composite.py @@ -110,7 +110,7 @@ def __init__( parent: Optional[Composite] = None, overwrite_save: bool = False, run_after_init: bool = False, - save_after_run: bool = False, + save_after_run: Literal["h5io", "tinybase"] | None = None, strict_naming: bool = True, inputs_map: Optional[dict | bidict] = None, outputs_map: Optional[dict | bidict] = None, diff --git a/pyiron_workflow/function.py b/pyiron_workflow/function.py index 1ef14a24..16b99092 100644 --- a/pyiron_workflow/function.py +++ b/pyiron_workflow/function.py @@ -334,7 +334,7 @@ def __init__( overwrite_save: bool = False, run_after_init: bool = False, storage_backend: Literal["h5io", "tinybase"] = "h5io", - save_after_run: bool = False, + save_after_run: Literal["h5io", "tinybase"] | None = None, output_labels: Optional[str | list[str] | tuple[str]] = None, **kwargs, ): diff --git a/pyiron_workflow/macro.py b/pyiron_workflow/macro.py index d7a89424..ec4fcead 100644 --- a/pyiron_workflow/macro.py +++ b/pyiron_workflow/macro.py @@ -271,7 +271,7 @@ def __init__( overwrite_save: bool = False, run_after_init: bool = False, storage_backend: Literal["h5io", "tinybase"] = "h5io", - save_after_run: bool = False, + save_after_run: Literal["h5io", "tinybase"] | None = None, strict_naming: bool = True, inputs_map: Optional[dict | bidict] = None, outputs_map: Optional[dict | bidict] = None, diff --git a/pyiron_workflow/node.py b/pyiron_workflow/node.py index b7096d41..9deee0f3 100644 --- a/pyiron_workflow/node.py +++ b/pyiron_workflow/node.py @@ -259,8 +259,9 @@ class Node(HasToDict, ABC, metaclass=AbstractHasPost): node. Must be specified in child classes. running (bool): Whether the node has called :meth:`run` and has not yet received output from this call. (Default is False.) - save_after_run (bool): Whether to trigger a save after each run of the node - (currently causes the entire graph to save). (Default is False.) + save_after_run (Literal["h5io" | "tinybase"] | None): Whether to trigger a save + after each run of the node (currently causes the entire graph to save). + (Default is None, which does not save after running.) signals (pyiron_workflow.io.Signals): A container for input and output signals, which are channels for controlling execution flow. By default, has a :attr:`signals.inputs.run` channel which has a callback to the :meth:`run` method @@ -311,7 +312,7 @@ def __init__( overwrite_save: bool = False, run_after_init: bool = False, storage_backend: Literal["h5io", "tinybase"] = "h5io", - save_after_run: bool = False, + save_after_run: Literal["h5io", "tinybase"] | None = None, **kwargs, ): """ @@ -678,8 +679,8 @@ def _finish_run(self, run_output: tuple | Future) -> Any | tuple: self.failed = True raise e finally: - if self.save_after_run: - self.save() + if self.save_after_run is not None: + self.save(backend=self.save_after_run) def _finish_run_and_emit_ran(self, run_output: tuple | Future) -> Any | tuple: processed_output = self._finish_run(run_output) diff --git a/pyiron_workflow/workflow.py b/pyiron_workflow/workflow.py index ffe6b64e..bfabd55b 100644 --- a/pyiron_workflow/workflow.py +++ b/pyiron_workflow/workflow.py @@ -192,7 +192,7 @@ def __init__( overwrite_save: bool = False, run_after_init: bool = False, storage_backend: Literal["h5io", "tinybase"] = "h5io", - save_after_run: bool = False, + save_after_run: Literal["h5io", "tinybase"] | None = None, strict_naming: bool = True, inputs_map: Optional[dict | bidict] = None, outputs_map: Optional[dict | bidict] = None, From b04dc8b0f774befd5b5ffae9d8a09f88aae93c12 Mon Sep 17 00:00:00 2001 From: liamhuber Date: Wed, 14 Feb 2024 15:01:36 -0800 Subject: [PATCH 146/166] Make the backend an attribute and return save_after_run to bool The storage backend is always grabbed from the graph root --- pyiron_workflow/composite.py | 10 +++++-- pyiron_workflow/function.py | 5 ++-- pyiron_workflow/macro.py | 5 ++-- pyiron_workflow/node.py | 55 +++++++++++++++++++++++++++--------- pyiron_workflow/workflow.py | 11 ++++---- 5 files changed, 61 insertions(+), 25 deletions(-) diff --git a/pyiron_workflow/composite.py b/pyiron_workflow/composite.py index c5a25279..98c99926 100644 --- a/pyiron_workflow/composite.py +++ b/pyiron_workflow/composite.py @@ -110,14 +110,20 @@ def __init__( parent: Optional[Composite] = None, overwrite_save: bool = False, run_after_init: bool = False, - save_after_run: Literal["h5io", "tinybase"] | None = None, + storage_backend: Optional[Literal["h5io", "tinybase"]] = None, + save_after_run: bool = False, strict_naming: bool = True, inputs_map: Optional[dict | bidict] = None, outputs_map: Optional[dict | bidict] = None, **kwargs, ): super().__init__( - *args, label=label, parent=parent, save_after_run=save_after_run, **kwargs + *args, + label=label, + parent=parent, + save_after_run=save_after_run, + storage_backend=storage_backend, + **kwargs ) self.strict_naming: bool = strict_naming self._inputs_map = None diff --git a/pyiron_workflow/function.py b/pyiron_workflow/function.py index 16b99092..abda650c 100644 --- a/pyiron_workflow/function.py +++ b/pyiron_workflow/function.py @@ -333,8 +333,8 @@ def __init__( parent: Optional[Composite] = None, overwrite_save: bool = False, run_after_init: bool = False, - storage_backend: Literal["h5io", "tinybase"] = "h5io", - save_after_run: Literal["h5io", "tinybase"] | None = None, + storage_backend: Optional[Literal["h5io", "tinybase"]] = None, + save_after_run: bool = False, output_labels: Optional[str | list[str] | tuple[str]] = None, **kwargs, ): @@ -361,6 +361,7 @@ def __init__( label=label if label is not None else self.node_function.__name__, parent=parent, save_after_run=save_after_run, + storage_backend=storage_backend, # **kwargs, ) diff --git a/pyiron_workflow/macro.py b/pyiron_workflow/macro.py index ec4fcead..dabefc70 100644 --- a/pyiron_workflow/macro.py +++ b/pyiron_workflow/macro.py @@ -270,8 +270,8 @@ def __init__( parent: Optional[Composite] = None, overwrite_save: bool = False, run_after_init: bool = False, - storage_backend: Literal["h5io", "tinybase"] = "h5io", - save_after_run: Literal["h5io", "tinybase"] | None = None, + storage_backend: Optional[Literal["h5io", "tinybase"]] = None, + save_after_run: bool = False, strict_naming: bool = True, inputs_map: Optional[dict | bidict] = None, outputs_map: Optional[dict | bidict] = None, @@ -300,6 +300,7 @@ def __init__( label=label if label is not None else self.graph_creator.__name__, parent=parent, save_after_run=save_after_run, + storage_backend=storage_backend, strict_naming=strict_naming, inputs_map=inputs_map, outputs_map=outputs_map, diff --git a/pyiron_workflow/node.py b/pyiron_workflow/node.py index 9deee0f3..af7b3b2e 100644 --- a/pyiron_workflow/node.py +++ b/pyiron_workflow/node.py @@ -176,10 +176,11 @@ class Node(HasToDict, ABC, metaclass=AbstractHasPost): your graph this could be expensive in terms of storage space and/or time. - [ALPHA ISSUE] Similarly, there is no way to save only part of a graph; only the entire graph may be saved at once. - - There are two possible back-ends for saving: one leaning on + - [ALPHA ISSUE] There are two possible back-ends for saving: one leaning on `tinybase.storage.GenericStorage` (in practice, `H5ioStorage(GenericStorage)`), and the other, default back-end that uses - the `h5io` module directly. + the `h5io` module directly. The backend used is always the one on the graph + root. - [ALPHA ISSUE] Restrictions on data: - For the `h5io` backend: Most data that can be pickled will be fine, but some classes will hit an edge case and throw an exception from `h5io` @@ -259,9 +260,11 @@ class Node(HasToDict, ABC, metaclass=AbstractHasPost): node. Must be specified in child classes. running (bool): Whether the node has called :meth:`run` and has not yet received output from this call. (Default is False.) - save_after_run (Literal["h5io" | "tinybase"] | None): Whether to trigger a save - after each run of the node (currently causes the entire graph to save). - (Default is None, which does not save after running.) + save_after_run (bool): Whether to trigger a save after each run of the node + (currently causes the entire graph to save). (Default is False.) + storage_backend (Literal["h5io" | "tinybase"] | None): The flag for the the + backend to use for saving and loading; for nodes in a graph the value on + the root node is always used. signals (pyiron_workflow.io.Signals): A container for input and output signals, which are channels for controlling execution flow. By default, has a :attr:`signals.inputs.run` channel which has a callback to the :meth:`run` method @@ -311,8 +314,8 @@ def __init__( parent: Optional[Composite] = None, overwrite_save: bool = False, run_after_init: bool = False, - storage_backend: Literal["h5io", "tinybase"] = "h5io", - save_after_run: Literal["h5io", "tinybase"] | None = None, + storage_backend: Optional[Literal["h5io", "tinybase"]] = None, + save_after_run: bool = False, **kwargs, ): """ @@ -341,6 +344,8 @@ def __init__( # This is a simply stop-gap as we work out more sophisticated ways to reference # (or create) an executor process without ever trying to pickle a `_thread.lock` self.future: None | Future = None + self._storage_backend = None + self.storage_backend = storage_backend self.save_after_run = save_after_run def __post__( @@ -348,7 +353,6 @@ def __post__( *args, overwrite_save: bool = False, run_after_init: bool = False, - storage_backend: Literal["h5io", "tinybase"] = "h5io", **kwargs, ): if overwrite_save: @@ -369,7 +373,7 @@ def __post__( f"load it...(To delete the saved file instead, use " f"`overwrite_save=True`)" ) - self.load(mode=storage_backend) + self.load() elif run_after_init: try: self.run() @@ -679,8 +683,8 @@ def _finish_run(self, run_output: tuple | Future) -> Any | tuple: self.failed = True raise e finally: - if self.save_after_run is not None: - self.save(backend=self.save_after_run) + if self.save_after_run: + self.save() def _finish_run_and_emit_ran(self, run_output: tuple | Future) -> Any | tuple: processed_output = self._finish_run(run_output) @@ -1200,17 +1204,18 @@ def from_storage(self, storage): usual. """ - def save(self, backend: Literal["h5io", "tinybase"] = "h5io"): + def save(self): """ Writes the node to file (using HDF5) such that a new node instance of the same type can :meth:`load()` the data to return to the same state as the save point, i.e. the same data IO channel values, the same flags, etc. """ + backend = "h5io" if self.storage_backend is None else self.storage_backend self.storage.save(backend=backend) save.__doc__ += _save_load_warnings - def load(self, mode: Literal["h5io", "tinybase"] = "h5io"): + def load(self): """ Loads the node file (from HDF5) such that this node restores its state at time of loading. @@ -1218,10 +1223,32 @@ def load(self, mode: Literal["h5io", "tinybase"] = "h5io"): Raises: TypeError) when the saved node has a different class name. """ - self.storage.load(backend=mode) + backend = "h5io" if self.storage_backend is None else self.storage_backend + self.storage.load(backend=backend) save.__doc__ += _save_load_warnings + @property + def storage_backend(self): + if self.parent is None: + return self._storage_backend + else: + return self.graph_root.storage_backend + + @storage_backend.setter + def storage_backend(self, new_backend): + if ( + new_backend is not None + and self.parent is not None + and new_backend != self.graph_root.storage_backend + ): + raise ValueError( + f"Storage backends should only be set on the graph root " + f"({self.graph_root.label}), not on child ({self.label})" + ) + else: + self._storage_backend = new_backend + @property def storage(self): return StorageInterface(self) diff --git a/pyiron_workflow/workflow.py b/pyiron_workflow/workflow.py index bfabd55b..bb2c6a6f 100644 --- a/pyiron_workflow/workflow.py +++ b/pyiron_workflow/workflow.py @@ -191,8 +191,8 @@ def __init__( *nodes: Node, overwrite_save: bool = False, run_after_init: bool = False, - storage_backend: Literal["h5io", "tinybase"] = "h5io", - save_after_run: Literal["h5io", "tinybase"] | None = None, + storage_backend: Optional[Literal["h5io", "tinybase"]] = None, + save_after_run: bool = False, strict_naming: bool = True, inputs_map: Optional[dict | bidict] = None, outputs_map: Optional[dict | bidict] = None, @@ -202,6 +202,7 @@ def __init__( label=label, parent=None, save_after_run=save_after_run, + storage_backend=storage_backend, strict_naming=strict_naming, inputs_map=inputs_map, outputs_map=outputs_map, @@ -375,8 +376,8 @@ def _rebuild_execution_graph(self, storage): ) self.starting_nodes = [self.nodes[label] for label in storage["starting_nodes"]] - def save(self, backend: Literal["h5io", "tinybase"] = "h5io"): - if backend == "tinybase" and any( + def save(self): + if self.storage_backend == "tinybase" and any( node.package_identifier is None for node in self ): raise NotImplementedError( @@ -388,7 +389,7 @@ def save(self, backend: Literal["h5io", "tinybase"] = "h5io"): f"like any other node package. Remember that this new module needs to " f"be in your python path and importable at load time too." ) - super().save(backend=backend) + super().save() @property def _owned_io_panels(self) -> list[IO]: From 22954eb1db6a30f55b4c76f652a7bf4884d7316c Mon Sep 17 00:00:00 2001 From: liamhuber Date: Wed, 14 Feb 2024 15:01:45 -0800 Subject: [PATCH 147/166] Update tests --- tests/unit/test_macro.py | 9 ++- tests/unit/test_node.py | 143 +++++++++++++++++++++--------------- tests/unit/test_workflow.py | 95 +++++++++++++----------- 3 files changed, 143 insertions(+), 104 deletions(-) diff --git a/tests/unit/test_macro.py b/tests/unit/test_macro.py index ea6b9a81..2d6a4a79 100644 --- a/tests/unit/test_macro.py +++ b/tests/unit/test_macro.py @@ -9,6 +9,7 @@ from pyiron_workflow.channels import NOT_DATA from pyiron_workflow.function import SingleValue from pyiron_workflow.macro import Macro, macro_node +from pyiron_workflow.storage import ALLOWED_BACKENDS from pyiron_workflow.topology import CircularDataFlowError @@ -523,10 +524,12 @@ def test_storage_for_modified_macros(self): ensure_tests_in_python_path() Macro.register("static.demo_nodes", domain="demo") - for backend in ["h5io", "tinybase"]: + for backend in ALLOWED_BACKENDS: with self.subTest(backend): try: - macro = Macro.create.demo.AddThree(label="m", x=0) + macro = Macro.create.demo.AddThree( + label="m", x=0, storage_backend=backend + ) original_result = macro() macro.replace_node(macro.two, Macro.create.demo.AddPlusOne()) @@ -543,7 +546,7 @@ def test_storage_for_modified_macros(self): modified_result = macro() - macro.save(backend=backend) + macro.save() reloaded = Macro.create.demo.AddThree( label="m", storage_backend=backend ) diff --git a/tests/unit/test_node.py b/tests/unit/test_node.py index b0827a46..b0bee1a9 100644 --- a/tests/unit/test_node.py +++ b/tests/unit/test_node.py @@ -1,6 +1,6 @@ from concurrent.futures import Future import os -from typing import Literal +from typing import Literal, Optional import unittest from pyiron_workflow.channels import InputData, OutputData, NOT_DATA @@ -8,6 +8,7 @@ from pyiron_workflow.interfaces import Executor from pyiron_workflow.io import Inputs, Outputs from pyiron_workflow.node import Node +from pyiron_workflow.storage import ALLOWED_BACKENDS def add_one(x): @@ -22,11 +23,13 @@ def __init__( label, overwrite_save=False, run_after_init=False, - storage_backend: Literal["h5io", "tinybase"] = "h5io", - save_after_run=False, + storage_backend: Optional[Literal["h5io", "tinybase"]] = None, + save_after_run: bool = False, x=None, ): - super().__init__(label=label, save_after_run=save_after_run) + super().__init__( + label=label, save_after_run=save_after_run, storage_backend=storage_backend + ) self._inputs = Inputs(InputData("x", self, type_hint=int)) self._outputs = Outputs(OutputData("y", self, type_hint=int)) if x is not None: @@ -376,64 +379,86 @@ def test_storage(self): msg="Sanity check on initial state" ) y = self.n1() - self.n1.save() - - x = self.n1.inputs.x.value - reloaded = ANode(self.n1.label, x=x) - self.assertEqual( - y, - reloaded.outputs.y.value, - msg="Nodes should load by default if they find a save file" - ) - - clean_slate = ANode(self.n1.label, x=x, overwrite_save=True) - self.assertIs( - clean_slate.outputs.y.value, - NOT_DATA, - msg="Users should be able to ignore a save" - ) - - run_right_away = ANode(self.n1.label, x=x, run_after_init=True) - self.assertEqual( - y, - run_right_away.outputs.y.value, - msg="With nothing to load, running after init is fine" - ) - - run_right_away.save() - with self.assertRaises( - ValueError, - msg="Should be able to both immediately run _and_ load a node at once" - ): - ANode(self.n1.label, x=x, run_after_init=True) - force_run = ANode(self.n1.label, x=x, run_after_init=True, overwrite_save=True) - self.assertEqual( - y, - force_run.outputs.y.value, - msg="Destroying the save should allow immediate re-running" - ) + for backend in ALLOWED_BACKENDS: + with self.subTest(backend): + self.n1.storage_backend = backend + self.n1.save() + + x = self.n1.inputs.x.value + reloaded = ANode(self.n1.label, x=x, storage_backend=backend) + self.assertEqual( + y, + reloaded.outputs.y.value, + msg="Nodes should load by default if they find a save file" + ) + + clean_slate = ANode(self.n1.label, x=x, overwrite_save=True) + self.assertIs( + clean_slate.outputs.y.value, + NOT_DATA, + msg="Users should be able to ignore a save" + ) + + run_right_away = ANode( + self.n1.label, x=x, run_after_init=True, storage_backend=backend + ) + self.assertEqual( + y, + run_right_away.outputs.y.value, + msg="With nothing to load, running after init is fine" + ) + + run_right_away.save() + with self.assertRaises( + ValueError, + msg="Should be able to both immediately run _and_ load a node at " + "once" + ): + ANode( + self.n1.label, x=x, run_after_init=True, storage_backend=backend + ) + + force_run = ANode( + self.n1.label, x=x, run_after_init=True, overwrite_save=True + ) + self.assertEqual( + y, + force_run.outputs.y.value, + msg="Destroying the save should allow immediate re-running" + ) def test_save_after_run(self): - ANode("just_run", x=0, run_after_init=True) - saves = ANode("run_and_save", x=0, run_after_init=True, save_after_run=True) - y = saves.outputs.y.value - - not_reloaded = ANode("just_run") - self.assertIs( - NOT_DATA, - not_reloaded.outputs.y.value, - msg="Should not have saved, therefore should have been nothing to load" - ) - - find_saved = ANode("run_and_save") - self.assertEqual( - y, - find_saved.outputs.y.value, - msg="Should have saved automatically after run, and reloaded on " - "instantiation" - ) - find_saved.storage.delete() # Clean up + for backend in ALLOWED_BACKENDS: + with self.subTest(backend): + try: + ANode("just_run", x=0, run_after_init=True, storage_backend=backend) + saves = ANode( + "run_and_save", + x=0, + run_after_init=True, + save_after_run=True, + storage_backend=backend + ) + y = saves.outputs.y.value + + not_reloaded = ANode("just_run", storage_backend=backend) + self.assertIs( + NOT_DATA, + not_reloaded.outputs.y.value, + msg="Should not have saved, therefore should have been nothing " + "to load" + ) + + find_saved = ANode("run_and_save", storage_backend=backend) + self.assertEqual( + y, + find_saved.outputs.y.value, + msg="Should have saved automatically after run, and reloaded " + "on instantiation" + ) + finally: + saves.storage.delete() # Clean up if __name__ == '__main__': diff --git a/tests/unit/test_workflow.py b/tests/unit/test_workflow.py index f3a59a84..69221612 100644 --- a/tests/unit/test_workflow.py +++ b/tests/unit/test_workflow.py @@ -6,7 +6,7 @@ from pyiron_workflow._tests import ensure_tests_in_python_path from pyiron_workflow.channels import NOT_DATA from pyiron_workflow.snippets.dotdict import DotDict -from pyiron_workflow.storage import TypeNotFoundError +from pyiron_workflow.storage import TypeNotFoundError, ALLOWED_BACKENDS from pyiron_workflow.workflow import Workflow @@ -338,9 +338,9 @@ def add_three_macro(macro): wf.executor_shutdown() def test_storage_values(self): - for storage_backend in ["h5io", "tinybase"]: - with self.subTest(storage_backend): - wf = Workflow("wf") + for backend in ALLOWED_BACKENDS: + with self.subTest(backend): + wf = Workflow("wf", storage_backend=backend) try: wf.register("static.demo_nodes", domain="demo") wf.inp = wf.create.demo.AddThree(x=0) @@ -348,9 +348,9 @@ def test_storage_values(self): wf_out = wf() three_result = wf.inp.three.outputs.add.value - wf.save(backend=storage_backend) + wf.save() - reloaded = Workflow("wf", storage_backend=storage_backend) + reloaded = Workflow("wf", storage_backend=backend) self.assertEqual( wf_out.out__add, reloaded.outputs.out__add.value, @@ -374,10 +374,11 @@ def test_storage_scopes(self): # Note that the type hint `Optional[int]` from OptionallyAdd defines a custom # reconstructor, which borks h5io - for backend in ["h5io", "tinybase"]: + for backend in ALLOWED_BACKENDS: with self.subTest(backend): try: - wf.save(backend=backend) + wf.storage_backend = backend + wf.save() Workflow(wf.label, storage_backend=backend) finally: wf.storage.delete() @@ -385,43 +386,51 @@ def test_storage_scopes(self): with self.subTest("No unimportable nodes for either back-end"): try: wf.import_type_mismatch = wf.create.demo.dynamic() - for backend in ["h5io", "tinybase"]: + for backend in ALLOWED_BACKENDS: with self.subTest(backend): with self.assertRaises( TypeNotFoundError, msg="Imported object is function but node type is node -- " "should fail early on save" ): - wf.save(backend=backend) + wf.storage_backend = backend + wf.save() finally: wf.remove_node(wf.import_type_mismatch) - wf.add_node(PlusOne(label="local_but_importable")) - try: - wf.save(backend="h5io") - Workflow(wf.label, storage_backend="h5io") - finally: - wf.storage.delete() - - with self.assertRaises( - NotImplementedError, - msg="Storage docs for tinybase claim all children must be registered nodes" - ): - wf.save(backend="tinybase") - - with self.subTest("Instanced node"): - wf.direct_instance = Workflow.create.Function(plus_one) + if "h5io" in ALLOWED_BACKENDS: + wf.add_node(PlusOne(label="local_but_importable")) try: - with self.assertRaises( - TypeError, - msg="No direct node instances, only children with functions as " - "_class_ attribtues" - ): - wf.save(backend="h5io") + wf.storage_backend = "h5io" + wf.save() + Workflow(wf.label, storage_backend="h5io") finally: - wf.remove_node(wf.direct_instance) wf.storage.delete() + if "tinybase" in ALLOWED_BACKENDS: + with self.assertRaises( + NotImplementedError, + msg="Storage docs for tinybase claim all children must be registered " + "nodes" + ): + wf.storage_backend = "tinybase" + wf.save() + + if "h5io" in ALLOWED_BACKENDS: + with self.subTest("Instanced node"): + wf.direct_instance = Workflow.create.Function(plus_one) + try: + with self.assertRaises( + TypeError, + msg="No direct node instances, only children with functions as " + "_class_ attribtues" + ): + wf.storage_backend = "h5io" + wf.save() + finally: + wf.remove_node(wf.direct_instance) + wf.storage.delete() + with self.subTest("Unimportable node"): @Workflow.wrap_as.single_value_node("y") def UnimportableScope(x): @@ -429,16 +438,18 @@ def UnimportableScope(x): wf.unimportable_scope = UnimportableScope() - try: - with self.assertRaises( - TypeNotFoundError, - msg="Nodes must live in an importable scope to save with the h5io " - "backend" - ): - wf.save(backend="h5io") - finally: - wf.remove_node(wf.unimportable_scope) - wf.storage.delete() + if "h5io" in ALLOWED_BACKENDS: + try: + with self.assertRaises( + TypeNotFoundError, + msg="Nodes must live in an importable scope to save with the " + "h5io backend" + ): + wf.storage_backend = "h5io" + wf.save() + finally: + wf.remove_node(wf.unimportable_scope) + wf.storage.delete() if __name__ == '__main__': From aebd83556803e8bf7c5097b331c4f678f1a016f4 Mon Sep 17 00:00:00 2001 From: liamhuber Date: Wed, 14 Feb 2024 15:01:56 -0800 Subject: [PATCH 148/166] Rerun notebooks --- notebooks/atomistics_nodes.ipynb | 12 +- notebooks/deepdive.ipynb | 402 ++++++++++++++++++------------ notebooks/quickstart.ipynb | 412 +++++++++++++++---------------- 3 files changed, 462 insertions(+), 364 deletions(-) diff --git a/notebooks/atomistics_nodes.ipynb b/notebooks/atomistics_nodes.ipynb index 4b34fb26..933155f4 100644 --- a/notebooks/atomistics_nodes.ipynb +++ b/notebooks/atomistics_nodes.ipynb @@ -91,18 +91,18 @@ "name": "stderr", "output_type": "stream", "text": [ - "/Users/huber/work/pyiron/pyiron_workflow/pyiron_workflow/channels.py:166: UserWarning: The channel accumulate_and_run was not connected to ran, andthus could not disconnect from it.\n", + "/Users/huber/work/pyiron/pyiron_workflow/pyiron_workflow/channels.py:168: UserWarning: The channel ran was not connected to accumulate_and_run, andthus could not disconnect from it.\n", " warn(\n", - "/Users/huber/work/pyiron/pyiron_workflow/pyiron_workflow/channels.py:166: UserWarning: The channel ran was not connected to accumulate_and_run, andthus could not disconnect from it.\n", + "/Users/huber/work/pyiron/pyiron_workflow/pyiron_workflow/channels.py:168: UserWarning: The channel accumulate_and_run was not connected to ran, andthus could not disconnect from it.\n", " warn(\n", - "/Users/huber/work/pyiron/pyiron_workflow/pyiron_workflow/channels.py:166: UserWarning: The channel run was not connected to ran, andthus could not disconnect from it.\n", + "/Users/huber/work/pyiron/pyiron_workflow/pyiron_workflow/channels.py:168: UserWarning: The channel run was not connected to ran, andthus could not disconnect from it.\n", " warn(\n" ] }, { "data": { "text/plain": [ - "" + "" ] }, "execution_count": 4, @@ -168,7 +168,7 @@ { "data": { "text/plain": [ - "" + "" ] }, "execution_count": 6, @@ -207,7 +207,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.11.4" + "version": "3.11.7" } }, "nbformat": 4, diff --git a/notebooks/deepdive.ipynb b/notebooks/deepdive.ipynb index 337ff46b..bc98496b 100644 --- a/notebooks/deepdive.ipynb +++ b/notebooks/deepdive.ipynb @@ -524,8 +524,6 @@ "name": "stderr", "output_type": "stream", "text": [ - "/Users/huber/work/pyiron/pyiron_workflow/pyiron_workflow/channels.py:168: UserWarning: The channel ran was not connected to run, andthus could not disconnect from it.\n", - " warn(\n", "/Users/huber/work/pyiron/pyiron_workflow/pyiron_workflow/channels.py:168: UserWarning: The channel run was not connected to ran, andthus could not disconnect from it.\n", " warn(\n" ] @@ -991,7 +989,7 @@ }, { "data": { - "image/png": "iVBORw0KGgoAAAANSUhEUgAAAiMAAAGgCAYAAAB45mdaAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjguMiwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy8g+/7EAAAACXBIWXMAAA9hAAAPYQGoP6dpAAAqoElEQVR4nO3df1RcdX7/8dcwBCamYSyJgYlBgmlMIHTdBUoC2XSPP4KJlt2cbSutNTE28UhWV5HqfuXEiuR4Dt0fptFtQOMmpmmiy9Gouzll0Tnnu5sQsaUh5JzNYldr2IXEQQrpzqAuYOB+/8gX6jiQcEeYDzM8H+fcP+bD586853Oi9zWfe+/nOizLsgQAAGBInOkCAADAzEYYAQAARhFGAACAUYQRAABgFGEEAAAYRRgBAABGEUYAAIBRhBEAAGAUYQQAABhFGAEAAEbZDiPHjh1TcXGxFi5cKIfDoddff/2y+xw9elS5ublyuVy69tpr9eyzz4ZTKwAAiEHxdnf4+OOPdf311+vuu+/Wn//5n1+2f3t7u2699Vbdc889OnjwoN566y1961vf0lVXXTWh/SVpeHhYH3zwgebOnSuHw2G3ZAAAYIBlWerr69PChQsVF3eJ+Q/rC5Bkvfbaa5fs853vfMdavnx5UNu9995rrVq1asKf09nZaUliY2NjY2Nji8Kts7Pzksd52zMjdr399tsqKioKarvlllu0d+9effrpp5o1a1bIPgMDAxoYGBh9bf3/Bwt3dnYqKSlpagsGAACTIhAIKC0tTXPnzr1kvykPI11dXUpJSQlqS0lJ0YULF9TT0yOPxxOyT3V1taqqqkLak5KSCCMAAESZy11iEZG7aT5fxMhMx3jFVVRUyO/3j26dnZ1TXiMAADBjymdGUlNT1dXVFdTW3d2t+Ph4zZs3b8x9EhMTlZiYONWlAQCAaWDKZ0YKCgrk9XqD2t58803l5eWNeb0IAACYWWyHkY8++kinTp3SqVOnJF28dffUqVPq6OiQdPEUy6ZNm0b7l5aW6re//a3Ky8v1zjvvaN++fdq7d68efvjhyfkGAAAgqtk+TXPixAndcMMNo6/Ly8slSXfddZf2798vn883GkwkKSMjQ/X19XrooYe0e/duLVy4UM8888yE1xgBAACxzWGNXE06jQUCAbndbvn9fu6mAQAgSkz0+M2zaQAAgFGEEQAAYNSU39oLmDA0bKm5/by6+/q1YK5L+RnJcsbxXCMAmI4II4g5Dad9qjrSJp+/f7TN43apsjhL67JDV/wFgJlquvxwI4wgpjSc9mnbwZP6/FXZXf5+bTt4UrV35hBIAEDT64cb14wgZgwNW6o60hYSRCSNtlUdadPQ8LS/gQwAptTID7fPBhHpf3+4NZz2RbQewghiRnP7+ZD/sD7LkuTz96u5/XzkigKAaWY6/nAjjCBmdPeNH0TC6QcAsWg6/nAjjCBmLJjrmtR+ABCLpuMPN8IIYkZ+RrI8bpfGuw7coYsXZ+VnJEeyLACYVqbjDzfCCGKGM86hyuIsSQoJJCOvK4uzWG8EwIw2HX+4EUYQU9Zle1R7Z45S3cGJPtXt4rZeAND0/OHGg/IQk6bLQj4AMF1FYp2RiR6/CSMAAMxQU/3DbaLHb1ZgBQBghnLGOVSwZJ7pMrhmBAAAmEUYAQAARhFGAACAUYQRAABgFGEEAAAYRRgBAABGEUYAAIBRhBEAAGAUYQQAABhFGAEAAEYRRgAAgFGEEQAAYBRhBAAAGEUYAQAARhFGAACAUYQRAABgVFhhpKamRhkZGXK5XMrNzVVjY+Ml++/evVuZmZmaPXu2li1bpgMHDoRVLAAAiD3xdneoq6tTWVmZampqtHr1aj333HNav3692tradM0114T0r62tVUVFhZ5//nn9yZ/8iZqbm3XPPffoD//wD1VcXDwpXwIAAEQvh2VZlp0dVq5cqZycHNXW1o62ZWZmasOGDaqurg7pX1hYqNWrV+v73//+aFtZWZlOnDih48ePT+gzA4GA3G63/H6/kpKS7JQLAAAMmejx29ZpmsHBQbW0tKioqCiovaioSE1NTWPuMzAwIJfLFdQ2e/ZsNTc369NPPx13n0AgELQBAIDYZCuM9PT0aGhoSCkpKUHtKSkp6urqGnOfW265RT/60Y/U0tIiy7J04sQJ7du3T59++ql6enrG3Ke6ulput3t0S0tLs1MmAACIImFdwOpwOIJeW5YV0jbi7//+77V+/XqtWrVKs2bN0je+8Q1t3rxZkuR0Osfcp6KiQn6/f3Tr7OwMp0wAABAFbIWR+fPny+l0hsyCdHd3h8yWjJg9e7b27dunTz75RL/5zW/U0dGhxYsXa+7cuZo/f/6Y+yQmJiopKSloAwAAsclWGElISFBubq68Xm9Qu9frVWFh4SX3nTVrlhYtWiSn06kf//jH+rM/+zPFxbHMCQAAM53tW3vLy8u1ceNG5eXlqaCgQHv27FFHR4dKS0slXTzFcu7cudG1RN599101Nzdr5cqV+p//+R/t3LlTp0+f1j//8z9P7jcBAABRyXYYKSkpUW9vr3bs2CGfz6fs7GzV19crPT1dkuTz+dTR0THaf2hoSE899ZR+/etfa9asWbrhhhvU1NSkxYsXT9qXAAAA0cv2OiMmsM4IAADRZ0rWGQEAAJhshBEAAGAUYQQAABhFGAEAAEYRRgAAgFGEEQAAYBRhBAAAGEUYAQAARhFGAACAUYQRAABgFGEEAAAYRRgBAABGEUYAAIBRhBEAAGAUYQQAABhFGAEAAEYRRgAAgFGEEQAAYBRhBAAAGEUYAQAARhFGAACAUYQRAABgFGEEAAAYRRgBAABGEUYAAIBRhBEAAGAUYQQAABhFGAEAAEYRRgAAgFGEEQAAYBRhBAAAGEUYAQAARoUVRmpqapSRkSGXy6Xc3Fw1NjZesv+hQ4d0/fXX64orrpDH49Hdd9+t3t7esAoGAACxxXYYqaurU1lZmbZv367W1latWbNG69evV0dHx5j9jx8/rk2bNmnLli361a9+pZdffln/8R//oa1bt37h4gEAQPSzHUZ27typLVu2aOvWrcrMzNSuXbuUlpam2traMfv/27/9mxYvXqwHHnhAGRkZ+upXv6p7771XJ06c+MLFAwCA6GcrjAwODqqlpUVFRUVB7UVFRWpqahpzn8LCQp09e1b19fWyLEsffvihXnnlFd12223jfs7AwIACgUDQBgAAYpOtMNLT06OhoSGlpKQEtaekpKirq2vMfQoLC3Xo0CGVlJQoISFBqampuvLKK/XDH/5w3M+prq6W2+0e3dLS0uyUCQAAokhYF7A6HI6g15ZlhbSNaGtr0wMPPKDHH39cLS0tamhoUHt7u0pLS8d9/4qKCvn9/tGts7MznDIBAEAUiLfTef78+XI6nSGzIN3d3SGzJSOqq6u1evVqPfLII5KkL33pS5ozZ47WrFmjJ598Uh6PJ2SfxMREJSYm2ikNAABEKVszIwkJCcrNzZXX6w1q93q9KiwsHHOfTz75RHFxwR/jdDolXZxRAQAAFw0NW3r7/V795NQ5vf1+r4aGZ8Zx0tbMiCSVl5dr48aNysvLU0FBgfbs2aOOjo7R0y4VFRU6d+6cDhw4IEkqLi7WPffco9raWt1yyy3y+XwqKytTfn6+Fi5cOLnfBgCAKNVw2qeqI23y+ftH2zxulyqLs7QuO/QsQiyxHUZKSkrU29urHTt2yOfzKTs7W/X19UpPT5ck+Xy+oDVHNm/erL6+Pv3TP/2T/u7v/k5XXnmlbrzxRn33u9+dvG8BAEAUazjt07aDJ/X5eZAuf7+2HTyp2jtzYjqQOKwoOFcSCATkdrvl9/uVlJRkuhwAACbN0LClr373/wbNiHyWQ1Kq26Xj/+dGOePGvllkupro8Ztn0wAAYFBz+/lxg4gkWZJ8/n41t5+PXFERRhgBAMCg7r7xg0g4/aIRYQQAAIMWzHVNar9oRBgBAMCg/IxkedwujXc1iEMX76rJz0iOZFkRRRgBAMAgZ5xDlcVZkhQSSEZeVxZnRd3Fq3YQRgAAMGxdtke1d+Yo1R18KibV7Yr523qlMNYZAQAAk29dtkdrs1LV3H5e3X39WjD34qmZWJ4RGUEYAQBgmnDGOVSwZJ7pMiKO0zQAAMAowggAADCKMAIAAIwijAAAAKMIIwAAwCjCCAAAMIowAgAAjGKdkRloaNiakYvqAACmJ8LIDNNw2qeqI23y+f/3UdQet0uVxVkxv9wwAGB64jTNDNJw2qdtB08GBRFJ6vL3a9vBk2o47TNUGQBgJiOMzBBDw5aqjrTJGuNvI21VR9o0NDxWDwAApg5hZIZobj8fMiPyWZYkn79fze3nI1cUAAAijMwY3X3jB5Fw+gEAMFkIIzPEgrmuSe0HAMBkIYzMEPkZyfK4XRrvBl6HLt5Vk5+RHMmyAAAgjMwUzjiHKouzJCkkkIy8rizOYr0RAEDEEUZmkHXZHtXemaNUd/CpmFS3S7V35rDOCADACBY9m2HWZXu0NiuVFVgBANMGYWQGcsY5VLBknukyAACQxGkaAABgGGEEAAAYRRgBAABGEUYAAIBRYYWRmpoaZWRkyOVyKTc3V42NjeP23bx5sxwOR8i2YsWKsIsGAACxw3YYqaurU1lZmbZv367W1latWbNG69evV0dHx5j9n376afl8vtGts7NTycnJ+su//MsvXDwAAIh+DsuybD0zfuXKlcrJyVFtbe1oW2ZmpjZs2KDq6urL7v/666/rm9/8ptrb25Wenj6hzwwEAnK73fL7/UpKSrJTLgAAMGSix29bMyODg4NqaWlRUVFRUHtRUZGampom9B579+7VzTfffMkgMjAwoEAgELQBAIDYZCuM9PT0aGhoSCkpKUHtKSkp6urquuz+Pp9PP/vZz7R169ZL9quurpbb7R7d0tLS7JQJAACiSFgXsDocwUuHW5YV0jaW/fv368orr9SGDRsu2a+iokJ+v3906+zsDKdMAAAQBWwtBz9//nw5nc6QWZDu7u6Q2ZLPsyxL+/bt08aNG5WQkHDJvomJiUpMTLRTGgAAiFK2ZkYSEhKUm5srr9cb1O71elVYWHjJfY8ePar/+q//0pYtW+xXCQAAYpbtB+WVl5dr48aNysvLU0FBgfbs2aOOjg6VlpZKuniK5dy5czpw4EDQfnv37tXKlSuVnZ09OZUDAICYYDuMlJSUqLe3Vzt27JDP51N2drbq6+tH747x+Xwha474/X4dPnxYTz/99ORUDQAAYobtdUZMYJ0RAACiz5SsMwIAADDZCCMAAMAowggAADCKMAIAAIwijAAAAKMIIwAAwCjCCAAAMIowAgAAjCKMAAAAowgjAADAKMIIAAAwijACAACMIowAAACjCCMAAMAowggAADCKMAIAAIyKN10AgMkxNGypuf28uvv6tWCuS/kZyXLGOUyXBQCXRRgBYkDDaZ+qjrTJ5+8fbfO4XaosztK6bI/BygDg8jhNA0S5htM+bTt4MiiISFKXv1/bDp5Uw2mfocoAYGIII0AUGxq2VHWkTdYYfxtpqzrSpqHhsXoAwPRAGAGiWHP7+ZAZkc+yJPn8/WpuPx+5ogDAJsIIEMW6+8YPIuH0AwATCCNAFFsw1zWp/QDABMIIEMXyM5Llcbs03g28Dl28qyY/IzmSZQGALYQRIIo54xyqLM6SpJBAMvK6sjiL9UYATGuEESDKrcv2qPbOHKW6g0/FpLpdqr0zh3VGAEx7LHoGxIB12R6tzUplBVYAUYkwAsQIZ5xDBUvmmS4DAGwjjEQpnkMCAIgVhJEoxHNIAACxhAtYowzPIQEAxBrCSBThOSQAgFgUVhipqalRRkaGXC6XcnNz1djYeMn+AwMD2r59u9LT05WYmKglS5Zo3759YRU8k/EcEgBALLJ9zUhdXZ3KyspUU1Oj1atX67nnntP69evV1tama665Zsx9br/9dn344Yfau3ev/uiP/kjd3d26cOHCFy5+puE5JACAWGQ7jOzcuVNbtmzR1q1bJUm7du3SG2+8odraWlVXV4f0b2ho0NGjR3XmzBklJ19cknrx4sVfrOoZiueQAABika3TNIODg2ppaVFRUVFQe1FRkZqamsbc56c//any8vL0ve99T1dffbWuu+46Pfzww/r9738/7ucMDAwoEAgEbeA5JACA2GQrjPT09GhoaEgpKSlB7SkpKerq6hpznzNnzuj48eM6ffq0XnvtNe3atUuvvPKK7rvvvnE/p7q6Wm63e3RLS0uzU2bM4jkkAIBYFNYFrA5H8MHOsqyQthHDw8NyOBw6dOiQ8vPzdeutt2rnzp3av3//uLMjFRUV8vv9o1tnZ2c4ZcYknkMCAIg1tq4ZmT9/vpxOZ8gsSHd3d8hsyQiPx6Orr75abrd7tC0zM1OWZens2bNaunRpyD6JiYlKTEy0U9qMwnNIAACxxNbMSEJCgnJzc+X1eoPavV6vCgsLx9xn9erV+uCDD/TRRx+Ntr377ruKi4vTokWLwigZ0v8+h+QbX75aBUvmEUQAAFHL9mma8vJy/ehHP9K+ffv0zjvv6KGHHlJHR4dKS0slXTzFsmnTptH+d9xxh+bNm6e7775bbW1tOnbsmB555BH97d/+rWbPnj153wQAAEQl27f2lpSUqLe3Vzt27JDP51N2drbq6+uVnp4uSfL5fOro6Bjt/wd/8Afyer369re/rby8PM2bN0+33367nnzyycn7FgAAIGo5LMua9muHBwIBud1u+f1+JSUlmS4HAABMwESP3zybBgAAGEUYAQAARhFGAACAUYQRAABgFGEEAAAYRRgBAABGEUYAAIBRhBEAAGAUYQQAABhFGAEAAEYRRgAAgFGEEQAAYBRhBAAAGEUYAQAARhFGAACAUYQRAABgFGEEAAAYRRgBAABGEUYAAIBRhBEAAGAUYQQAABhFGAEAAEYRRgAAgFGEEQAAYBRhBAAAGEUYAQAARhFGAACAUYQRAABgFGEEAAAYRRgBAABGEUYAAIBRYYWRmpoaZWRkyOVyKTc3V42NjeP2/cUvfiGHwxGy/ed//mfYRQMAgNhhO4zU1dWprKxM27dvV2trq9asWaP169ero6Pjkvv9+te/ls/nG92WLl0adtEAACB22A4jO3fu1JYtW7R161ZlZmZq165dSktLU21t7SX3W7BggVJTU0c3p9MZdtEAACB22Aojg4ODamlpUVFRUVB7UVGRmpqaLrnvV77yFXk8Ht100036+c9/fsm+AwMDCgQCQRsAAIhNtsJIT0+PhoaGlJKSEtSekpKirq6uMffxeDzas2ePDh8+rFdffVXLli3TTTfdpGPHjo37OdXV1XK73aNbWlqanTIBAEAUiQ9nJ4fDEfTasqyQthHLli3TsmXLRl8XFBSos7NTP/jBD/Snf/qnY+5TUVGh8vLy0deBQIBAAgBAjLI1MzJ//nw5nc6QWZDu7u6Q2ZJLWbVqld57771x/56YmKikpKSgDQAAxCZbYSQhIUG5ubnyer1B7V6vV4WFhRN+n9bWVnk8HjsfDQAAYpTt0zTl5eXauHGj8vLyVFBQoD179qijo0OlpaWSLp5iOXfunA4cOCBJ2rVrlxYvXqwVK1ZocHBQBw8e1OHDh3X48OHJ/SYAACAq2Q4jJSUl6u3t1Y4dO+Tz+ZSdna36+nqlp6dLknw+X9CaI4ODg3r44Yd17tw5zZ49WytWrNC//uu/6tZbb528bwEAAKKWw7Isy3QRlxMIBOR2u+X3+7l+BACAKDHR4zfPpgEAAEYRRgAAgFGEEQAAYBRhBAAAGEUYAQAARhFGAACAUYQRAABgFGEEAAAYRRgBAABGEUYAAIBRhBEAAGAUYQQAABhFGAEAAEYRRgAAgFGEEQAAYBRhBAAAGEUYAQAARhFGAACAUYQRAABgFGEEAAAYFW+6ACAaDQ1bam4/r+6+fi2Y61J+RrKccQ7TZQFAVCKMADY1nPap6kibfP7+0TaP26XK4iyty/YYrAwAohOnaQAbGk77tO3gyaAgIkld/n5tO3hSDad9hioDgOhFGAEmaGjYUtWRNllj/G2krepIm4aGx+oBABgPYQSYoOb28yEzIp9lSfL5+9Xcfj5yRQFADCCMABPU3Td+EAmnHwDgIsIIMEEL5romtR8A4CLCCDBB+RnJ8rhdGu8GXocu3lWTn5EcybIAIOoRRoAJcsY5VFmcJUkhgWTkdWVxFuuNAIBNhBHAhnXZHtXemaNUd/CpmFS3S7V35rDOCACEgUXPAJvWZXu0NiuVFVgBYJIQRoAwOOMcKlgyz3QZABATwjpNU1NTo4yMDLlcLuXm5qqxsXFC+7311luKj4/Xl7/85XA+FgAAxCDbYaSurk5lZWXavn27WltbtWbNGq1fv14dHR2X3M/v92vTpk266aabwi4WAADEHodlWbbWrl65cqVycnJUW1s72paZmakNGzaourp63P3+6q/+SkuXLpXT6dTrr7+uU6dOjdt3YGBAAwMDo68DgYDS0tLk9/uVlJRkp1wAAGBIIBCQ2+2+7PHb1szI4OCgWlpaVFRUFNReVFSkpqamcfd74YUX9P7776uysnJCn1NdXS232z26paWl2SkTAABEEVthpKenR0NDQ0pJSQlqT0lJUVdX15j7vPfee3r00Ud16NAhxcdP7HrZiooK+f3+0a2zs9NOmQAAIIqEdTeNwxF8C6NlWSFtkjQ0NKQ77rhDVVVVuu666yb8/omJiUpMTAynNAAAEGVshZH58+fL6XSGzIJ0d3eHzJZIUl9fn06cOKHW1lbdf//9kqTh4WFZlqX4+Hi9+eabuvHGG79A+QAAINrZOk2TkJCg3Nxceb3eoHav16vCwsKQ/klJSfrlL3+pU6dOjW6lpaVatmyZTp06pZUrV36x6gEAQNSzfZqmvLxcGzduVF5engoKCrRnzx51dHSotLRU0sXrPc6dO6cDBw4oLi5O2dnZQfsvWLBALpcrpB0AAMxMtsNISUmJent7tWPHDvl8PmVnZ6u+vl7p6emSJJ/Pd9k1RwAAAEbYXmfEhInepwwAAKaPKVlnBAAAYLIRRgAAgFGEEQAAYBRhBAAAGEUYAQAARhFGAACAUYQRAABgFGEEAAAYRRgBAABGEUYAAIBRhBEAAGAUYQQAABhFGAEAAEYRRgAAgFGEEQAAYBRhBAAAGEUYAQAARhFGAACAUYQRAABgFGEEAAAYRRgBAABGEUYAAIBRhBEAAGAUYQQAABgVb7oAYMTQsKXm9vPq7uvXgrku5WckyxnnMF0WAGCKEUYwLTSc9qnqSJt8/v7RNo/bpcriLK3L9hisDAAw1ThNA+MaTvu07eDJoCAiSV3+fm07eFINp32GKgMARAJhBEYNDVuqOtIma4y/jbRVHWnT0PBYPQAAsYAwAqOa28+HzIh8liXJ5+9Xc/v5yBUFAIgowgiM6u4bP4iE0w8AEH0IIzBqwVzXpPYDAESfsMJITU2NMjIy5HK5lJubq8bGxnH7Hj9+XKtXr9a8efM0e/ZsLV++XP/4j/8YdsGILfkZyfK4XRrvBl6HLt5Vk5+RHMmyAAARZDuM1NXVqaysTNu3b1dra6vWrFmj9evXq6OjY8z+c+bM0f33369jx47pnXfe0WOPPabHHntMe/bs+cLFI/o54xyqLM6SpJBAMvK6sjiL9UYAIIY5LMuydZvCypUrlZOTo9ra2tG2zMxMbdiwQdXV1RN6j29+85uaM2eO/uVf/mVC/QOBgNxut/x+v5KSkuyUiyjBOiMAEHsmevy2tejZ4OCgWlpa9Oijjwa1FxUVqampaULv0draqqamJj355JPj9hkYGNDAwMDo60AgYKdMRKF12R6tzUplBVYAmIFshZGenh4NDQ0pJSUlqD0lJUVdXV2X3HfRokX67//+b124cEFPPPGEtm7dOm7f6upqVVVV2SkNMcAZ51DBknmmywAARFhYF7A6HMG/Vi3LCmn7vMbGRp04cULPPvusdu3apZdeemncvhUVFfL7/aNbZ2dnOGUCAIAoYGtmZP78+XI6nSGzIN3d3SGzJZ+XkZEhSfrjP/5jffjhh3riiSf013/912P2TUxMVGJiop3SAABAlLI1M5KQkKDc3Fx5vd6gdq/Xq8LCwgm/j2VZQdeEAACAmcv2U3vLy8u1ceNG5eXlqaCgQHv27FFHR4dKS0slXTzFcu7cOR04cECStHv3bl1zzTVavny5pIvrjvzgBz/Qt7/97Un8GgAAIFrZDiMlJSXq7e3Vjh075PP5lJ2drfr6eqWnp0uSfD5f0Jojw8PDqqioUHt7u+Lj47VkyRL9wz/8g+69997J+xYAACBq2V5nxATWGQEAIPpM9PjNs2kAAIBRhBEAAGAUYQQAABhFGAEAAEYRRgAAgFGEEQAAYBRhBAAAGEUYAQAARhFGAACAUYQRAABgFGEEAAAYRRgBAABG2X5qb6wYGrbU3H5e3X39WjDXpfyMZDnjHKbLAgBgxpmRYaThtE9VR9rk8/ePtnncLlUWZ2ldtsdgZQAAzDwz7jRNw2mfth08GRREJKnL369tB0+q4bTPUGUAAMxMMyqMDA1bqjrSJmuMv420VR1p09DwWD0AAMBUmFFhpLn9fMiMyGdZknz+fjW3n49cUQAAzHAzKox0940fRMLpBwAAvrgZFUYWzHVNaj8AAPDFzagwkp+RLI/bpfFu4HXo4l01+RnJkSwLAIAZbUaFEWecQ5XFWZIUEkhGXlcWZ7HeCAAAETSjwogkrcv2qPbOHKW6g0/FpLpdqr0zh3VGAACIsBm56Nm6bI/WZqWyAisAANPAjAwj0sVTNgVL5pkuAwCAGW/GnaYBAADTC2EEAAAYRRgBAABGEUYAAIBRhBEAAGAUYQQAABhFGAEAAEYRRgAAgFFhhZGamhplZGTI5XIpNzdXjY2N4/Z99dVXtXbtWl111VVKSkpSQUGB3njjjbALBgAAscV2GKmrq1NZWZm2b9+u1tZWrVmzRuvXr1dHR8eY/Y8dO6a1a9eqvr5eLS0tuuGGG1RcXKzW1tYvXDwAAIh+DsuyLDs7rFy5Ujk5OaqtrR1ty8zM1IYNG1RdXT2h91ixYoVKSkr0+OOPj/n3gYEBDQwMjL4OBAJKS0uT3+9XUlKSnXIBAIAhgUBAbrf7ssdvWzMjg4ODamlpUVFRUVB7UVGRmpqaJvQew8PD6uvrU3Jy8rh9qqur5Xa7R7e0tDQ7ZQIAgChiK4z09PRoaGhIKSkpQe0pKSnq6uqa0Hs89dRT+vjjj3X77beP26eiokJ+v3906+zstFMmAACIImE9tdfhcAS9tiwrpG0sL730kp544gn95Cc/0YIFC8btl5iYqMTExHBKAwAAUcZWGJk/f76cTmfILEh3d3fIbMnn1dXVacuWLXr55Zd18803268UAADEJFunaRISEpSbmyuv1xvU7vV6VVhYOO5+L730kjZv3qwXX3xRt912W3iVAgCAmGT7NE15ebk2btyovLw8FRQUaM+ePero6FBpaamki9d7nDt3TgcOHJB0MYhs2rRJTz/9tFatWjU6qzJ79my53e5J/CoAACAa2Q4jJSUl6u3t1Y4dO+Tz+ZSdna36+nqlp6dLknw+X9CaI88995wuXLig++67T/fdd99o+1133aX9+/d/8W8AAAAkSUPDlprbz6u7r18L5rqUn5EsZ9zlr+k0zfY6IyZM9D5lAABmqobTPlUdaZPP3z/a5nG7VFmcpXXZHiM1Tck6IwAAYPppOO3TtoMng4KIJHX5+7Xt4Ek1nPYZqmxiCCMAAESxoWFLVUfaNNZpjpG2qiNtGhqevidCCCMAAESx5vbzITMin2VJ8vn71dx+PnJF2UQYAQAginX3jR9EwulnAmEEAIAotmCua1L7mUAYAQAgiuVnJMvjdmm8G3gdunhXTX7G+A+oNY0wAgBAFHPGOVRZnCVJIYFk5HVlcda0Xm+EMAIAQJRbl+1R7Z05SnUHn4pJdbtUe2eOsXVGJiqsp/YCAIDpZV22R2uzUqNyBVbCCAAAMcIZ51DBknmmy7CN0zQAAMAowggAADCKMAIAAIwijAAAAKMIIwAAwCjCCAAAMIowAgAAjCKMAAAAowgjAADAqKhYgdWyLElSIBAwXAkAAJiokeP2yHF8PFERRvr6+iRJaWlphisBAAB29fX1ye12j/t3h3W5uDINDA8P64MPPtDcuXPlcIT3wJ9AIKC0tDR1dnYqKSlpkivEZzHWkcV4Rw5jHTmMdWRN1XhblqW+vj4tXLhQcXHjXxkSFTMjcXFxWrRo0aS8V1JSEv+wI4SxjizGO3IY68hhrCNrKsb7UjMiI7iAFQAAGEUYAQAARs2YMJKYmKjKykolJiaaLiXmMdaRxXhHDmMdOYx1ZJke76i4gBUAAMSuGTMzAgAApifCCAAAMIowAgAAjCKMAAAAowgjAADAqJgKIzU1NcrIyJDL5VJubq4aGxsv2f/o0aPKzc2Vy+XStddeq2effTZClUY/O2P96quvau3atbrqqquUlJSkgoICvfHGGxGsNrrZ/Xc94q233lJ8fLy+/OUvT22BMcbueA8MDGj79u1KT09XYmKilixZon379kWo2uhmd6wPHTqk66+/XldccYU8Ho/uvvtu9fb2Rqja6HXs2DEVFxdr4cKFcjgcev311y+7T8SPj1aM+PGPf2zNmjXLev755622tjbrwQcftObMmWP99re/HbP/mTNnrCuuuMJ68MEHrba2Nuv555+3Zs2aZb3yyisRrjz62B3rBx980Prud79rNTc3W++++65VUVFhzZo1yzp58mSEK48+dsd6xO9+9zvr2muvtYqKiqzrr78+MsXGgHDG++tf/7q1cuVKy+v1Wu3t7da///u/W2+99VYEq45Odse6sbHRiouLs55++mnrzJkzVmNjo7VixQprw4YNEa48+tTX11vbt2+3Dh8+bEmyXnvttUv2N3F8jJkwkp+fb5WWlga1LV++3Hr00UfH7P+d73zHWr58eVDbvffea61atWrKaowVdsd6LFlZWVZVVdVklxZzwh3rkpIS67HHHrMqKysJIzbYHe+f/exnltvttnp7eyNRXkyxO9bf//73rWuvvTao7ZlnnrEWLVo0ZTXGoomEERPHx5g4TTM4OKiWlhYVFRUFtRcVFampqWnMfd5+++2Q/rfccotOnDihTz/9dMpqjXbhjPXnDQ8Pq6+vT8nJyVNRYswId6xfeOEFvf/++6qsrJzqEmNKOOP905/+VHl5efre976nq6++Wtddd50efvhh/f73v49EyVErnLEuLCzU2bNnVV9fL8uy9OGHH+qVV17RbbfdFomSZxQTx8eoeGrv5fT09GhoaEgpKSlB7SkpKerq6hpzn66urjH7X7hwQT09PfJ4PFNWbzQLZ6w/76mnntLHH3+s22+/fSpKjBnhjPV7772nRx99VI2NjYqPj4n/vCMmnPE+c+aMjh8/LpfLpddee009PT361re+pfPnz3PdyCWEM9aFhYU6dOiQSkpK1N/frwsXLujrX/+6fvjDH0ai5BnFxPExJmZGRjgcjqDXlmWFtF2u/1jtCGV3rEe89NJLeuKJJ1RXV6cFCxZMVXkxZaJjPTQ0pDvuuENVVVW67rrrIlVezLHzb3t4eFgOh0OHDh1Sfn6+br31Vu3cuVP79+9ndmQC7Ix1W1ubHnjgAT3++ONqaWlRQ0OD2tvbVVpaGolSZ5xIHx9j4qfT/Pnz5XQ6QxJ1d3d3SLobkZqaOmb/+Ph4zZs3b8pqjXbhjPWIuro6bdmyRS+//LJuvvnmqSwzJtgd676+Pp04cUKtra26//77JV08WFqWpfj4eL355pu68cYbI1J7NArn37bH49HVV18tt9s92paZmSnLsnT27FktXbp0SmuOVuGMdXV1tVavXq1HHnlEkvSlL31Jc+bM0Zo1a/Tkk08ymz2JTBwfY2JmJCEhQbm5ufJ6vUHtXq9XhYWFY+5TUFAQ0v/NN99UXl6eZs2aNWW1Rrtwxlq6OCOyefNmvfjii5zjnSC7Y52UlKRf/vKXOnXq1OhWWlqqZcuW6dSpU1q5cmWkSo9K4fzbXr16tT744AN99NFHo23vvvuu4uLitGjRoimtN5qFM9affPKJ4uKCD1lOp1PS//5qx+QwcnycsktjI2zkNrG9e/dabW1tVllZmTVnzhzrN7/5jWVZlvXoo49aGzduHO0/cuvSQw89ZLW1tVl79+7l1t4JsjvWL774ohUfH2/t3r3b8vl8o9vvfvc7U18hatgd68/jbhp77I53X1+ftWjRIusv/uIvrF/96lfW0aNHraVLl1pbt2419RWiht2xfuGFF6z4+HirpqbGev/9963jx49beXl5Vn5+vqmvEDX6+vqs1tZWq7W11ZJk7dy502ptbR29jXo6HB9jJoxYlmXt3r3bSk9PtxISEqycnBzr6NGjo3+76667rK997WtB/X/xi19YX/nKV6yEhARr8eLFVm1tbYQrjl52xvprX/uaJSlku+uuuyJfeBSy++/6swgj9tkd73feece6+eabrdmzZ1uLFi2yysvLrU8++STCVUcnu2P9zDPPWFlZWdbs2bMtj8dj/c3f/I119uzZCFcdfX7+859f8v/B0+H46LAs5rcAAIA5MXHNCAAAiF6EEQAAYBRhBAAAGEUYAQAARhFGAACAUYQRAABgFGEEAAAYRRgBAABGEUYAAIBRhBEAAGAUYQQAABj1/wC1Y7WUuFmB1QAAAABJRU5ErkJggg==", + "image/png": "iVBORw0KGgoAAAANSUhEUgAAAiMAAAGdCAYAAADAAnMpAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjguMiwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy8g+/7EAAAACXBIWXMAAA9hAAAPYQGoP6dpAAAfuklEQVR4nO3db0zd5f3/8dc5B+HUDo6hFTgWxo5d63okaoDQQdeYOcuoBtMbSzGurbq6jKrTrtOkTReRxoToopk6Ibr5J6bYEY3uJwlDuaXUfjcmtIl4TDQtG/1zkADxcPxDG8+5fjcYpEdAOadwrh7O85Fwgw+fA2/yScOzn+uc6ziMMUYAAACWOG0PAAAA0hsxAgAArCJGAACAVcQIAACwihgBAABWESMAAMAqYgQAAFhFjAAAAKsybA8wH9FoVGfOnFF2drYcDoftcQAAwDwYYxQOh3XFFVfI6Zz7/kdKxMiZM2dUVFRkewwAAJCAkydPqrCwcM6vp0SMZGdnS5r8ZXJycixPAwAA5mN8fFxFRUXTf8fnkhIxMrU0k5OTQ4wAAJBivuspFjyBFQAAWEWMAAAAq4gRAABgFTECAACsIkYAAIBVxAgAALCKGAEAAFYRIwAAwKqU2PQMS0skatQzMKbh8ITyst2q8OXK5eQ9hwAgXREjSKrO/qAa2wMKhiamj3k9bjXU+lVT4rU4GQDAFpZpkDSd/UHtOtgXEyKSNBSa0K6DfersD1qaDABgEzGCpIhEjRrbAzKzfG3qWGN7QJHobGcAAJYyYgRJ0TMwNuOOyPmMpGBoQj0DY8kbCgBwUSBGkBTD4blDJJHzAABLBzGCpMjLdi/oeQCApYMYQVJU+HLl9bg11wt4HZp8VU2FLzeZYwEALgLECJLC5XSoodYvSTOCZOrzhlo/+40AQBoiRpA0NSVetWwrVYEndimmwONWy7ZS9hkBgDTFpmdIqpoSrzb5C9iBFQAwjRhB0rmcDlWuXmF7DADARYJlGgAAYBUxAgAArCJGAACAVcQIAACwihgBAABWESMAAMAqYgQAAFhFjAAAAKuIEQAAYBUxAgAArCJGAACAVcQIAACwihgBAABWESMAAMAqYgQAAFhFjAAAAKuIEQAAYBUxAgAArCJGAACAVcQIAACwihgBAABWESMAAMAqYgQAAFhFjAAAAKsybA8AAEC6iESNegbGNByeUF62WxW+XLmcDttjWUeMAACQBJ39QTW2BxQMTUwf83rcaqj1q6bEa3Ey+xJapmlubpbP55Pb7VZZWZm6u7u/9fzW1lZde+21uvTSS+X1enXnnXdqdHQ0oYEBAEg1nf1B7TrYFxMikjQUmtCug33q7A9amuziEHeMtLW1affu3dq/f7+OHj2qjRs3avPmzRocHJz1/MOHD2vHjh3auXOnPvzwQ7366qv697//rbvuuuuChwcA4GIXiRo1tgdkZvna1LHG9oAi0dnOSA9xx8gTTzyhnTt36q677tK6dev0pz/9SUVFRWppaZn1/H/+85/6wQ9+oPvuu08+n08/+clP9Jvf/Ebvv//+BQ8PAMDFrmdgbMYdkfMZScHQhHoGxpI31EUmrhg5d+6cent7VV1dHXO8urpaR44cmfUxVVVVOnXqlDo6OmSM0aeffqrXXntNN99885w/5+zZsxofH4/5AAAgFQ2H5w6RRM5biuKKkZGREUUiEeXn58ccz8/P19DQ0KyPqaqqUmtrq+rq6pSZmamCggJddtllevrpp+f8OU1NTfJ4PNMfRUVF8YwJAMBFIy/bvaDnLUUJPYHV4Yh9GZIxZsaxKYFAQPfdd58eeugh9fb2qrOzUwMDA6qvr5/z++/bt0+hUGj64+TJk4mMCQCAdRW+XHk9bs31Al6HJl9VU+HLTeZYF5W4Xtq7cuVKuVyuGXdBhoeHZ9wtmdLU1KQNGzbowQcflCRdc801Wr58uTZu3KhHHnlEXu/MlzNlZWUpKysrntEAALgouZwONdT6tetgnxxSzBNZpwKlodaf1vuNxHVnJDMzU2VlZerq6oo53tXVpaqqqlkf8+WXX8rpjP0xLpdL0uQdFQAAlrqaEq9atpWqwBO7FFPgcatlW2na7zMS96Zne/bs0fbt21VeXq7Kyko999xzGhwcnF522bdvn06fPq2XX35ZklRbW6tf//rXamlp0c9//nMFg0Ht3r1bFRUVuuKKKxb2twEA4CJVU+LVJn8BO7DOIu4Yqaur0+joqA4cOKBgMKiSkhJ1dHSouLhYkhQMBmP2HLnjjjsUDof15z//Wb///e912WWX6YYbbtCjjz66cL8FAAApwOV0qHL1CttjXHQcJgXWSsbHx+XxeBQKhZSTk2N7HAAAMA/z/fvNu/YCAACriBEAAGAVMQIAAKwiRgAAgFXECAAAsIoYAQAAVhEjAADAKmIEAABYRYwAAACriBEAAGAVMQIAAKwiRgAAgFXECAAAsIoYAQAAVhEjAADAKmIEAABYRYwAAACriBEAAGAVMQIAAKwiRgAAgFXECAAAsIoYAQAAVhEjAADAqgzbA9gSiRr1DIxpODyhvGy3Kny5cjkdtscCACDtpGWMdPYH1dgeUDA0MX3M63GrodavmhKvxckAAEg/abdM09kf1K6DfTEhIklDoQntOtinzv6gpckAAEhPaRUjkahRY3tAZpavTR1rbA8oEp3tDAAAJv+W/N/xUf2/Y6f1f8dH+ZuxANJqmaZnYGzGHZHzGUnB0IR6BsZUuXpF8gYDAKQElvkXR1rdGRkOzx0iiZwHAEgfLPMvnrSKkbxs94KeBwBIDyzzL660ipEKX668HrfmegGvQ5O32yp8uckcCwBwkYtnmR/xS6sYcTkdaqj1S9KMIJn6vKHWz34jAIAYLPMvrrSKEUmqKfGqZVupCjyxSzEFHrdatpXyBCQAwAws8y+utHo1zZSaEq82+QvYgRUAMC9Ty/xDoYlZnzfi0OR/alnmT0xaxog0uWTDy3cBAPMxtcy/62CfHFJMkLDMf+HSbpkGAIBEsMy/eNL2zggAAPFimX9xECMAAMSBZf6FxzINAACwihgBAABWESMAAMAqYgQAAFhFjAAAAKuIEQAAYBUxAgAArCJGAACAVcQIAACwihgBAABWESMAAMAqYgQAAFhFjAAAAKt4117AskjU8HbkANIaMQJY1NkfVGN7QMHQxPQxr8ethlq/akq8FicDgORhmQawpLM/qF0H+2JCRJKGQhPadbBPnf1BS5MBQHIRI4AFkahRY3tAZpavTR1rbA8oEp3tDABYWogRwIKegbEZd0TOZyQFQxPqGRhL3lAAYAkxAlgwHJ47RBI5DwBSGTECWJCX7V7Q8wAglREjgAUVvlx5PW7N9QJehyZfVVPhy03mWABgBTECWOByOtRQ65ekGUEy9XlDrZ/9RgCkBWIEsKSmxKuWbaUq8MQuxRR43GrZVso+IwDSBpueARbVlHi1yV/ADqwA0hoxAljmcjpUuXqF7TEAwBqWaQAAgFXECAAAsIoYAQAAVhEjAADAKmIEAABYRYwAAACriBEAAGAVMQIAAKxKKEaam5vl8/nkdrtVVlam7u7ubz3/7Nmz2r9/v4qLi5WVlaXVq1frhRdeSGhgAACwtMS9A2tbW5t2796t5uZmbdiwQc8++6w2b96sQCCg73//+7M+ZuvWrfr000/1/PPP64c//KGGh4f19ddfX/DwAAAg9TmMMSaeB6xfv16lpaVqaWmZPrZu3Tpt2bJFTU1NM87v7OzUrbfeqhMnTig3N7G3Qx8fH5fH41EoFFJOTk5C3wMAACTXfP9+x7VMc+7cOfX29qq6ujrmeHV1tY4cOTLrY958802Vl5frscce06pVq7R27Vo98MAD+uqrr+b8OWfPntX4+HjMBwAAWJriWqYZGRlRJBJRfn5+zPH8/HwNDQ3N+pgTJ07o8OHDcrvdeuONNzQyMqK7775bY2Njcz5vpKmpSY2NjfGMBgAAUlRCT2B1OGLf3twYM+PYlGg0KofDodbWVlVUVOimm27SE088oZdeemnOuyP79u1TKBSa/jh58mQiYwIAgBQQ152RlStXyuVyzbgLMjw8PONuyRSv16tVq1bJ4/FMH1u3bp2MMTp16pTWrFkz4zFZWVnKysqKZzQAAJCi4rozkpmZqbKyMnV1dcUc7+rqUlVV1ayP2bBhg86cOaPPP/98+tjHH38sp9OpwsLCBEYGAABLSdzLNHv27NFf//pXvfDCC/roo4/0u9/9ToODg6qvr5c0ucSyY8eO6fNvu+02rVixQnfeeacCgYDeffddPfjgg/rVr36lZcuWLdxvAgAAUlLc+4zU1dVpdHRUBw4cUDAYVElJiTo6OlRcXCxJCgaDGhwcnD7/e9/7nrq6uvTb3/5W5eXlWrFihbZu3apHHnlk4X4LAACQsuLeZ8QG9hkBACD1LMo+IwAAAAuNGAEAAFYRIwAAwCpiBAAAWEWMAAAAq4gRAABgFTECAACsIkYAAIBVxAgAALCKGAEAAFYRIwAAwCpiBAAAWEWMAAAAq4gRAABgFTECAACsIkYAAIBVxAgAALCKGAEAAFYRIwAAwCpiBAAAWJVhewAsDZGoUc/AmIbDE8rLdqvClyuX02F7LABACiBGcME6+4NqbA8oGJqYPub1uNVQ61dNidfiZACAVMAyDS5IZ39Quw72xYSIJA2FJrTrYJ86+4OWJgMApApiBAmLRI0a2wMys3xt6lhje0CR6GxnAAAwiRhBwnoGxmbcETmfkRQMTahnYCx5QwEAUg4xgoQNh+cOkUTOAwCkJ2IECcvLdi/oeQCA9ESMIGEVvlx5PW7N9QJehyZfVVPhy03mWACAFEOMIGEup0MNtX5JmhEkU5831PrZbwQA8K2IEVyQmhKvWraVqsATuxRT4HGrZVsp+4wAAL4Tm57hgtWUeLXJX8AOrACAhBAjWBAup0OVq1fYHgMAkIJYpgEAAFYRIwAAwCpiBAAAWEWMAAAAq4gRAABgFTECAACsIkYAAIBVxAgAALCKGAEAAFYRIwAAwCpiBAAAWEWMAAAAq4gRAABgFTECAACsIkYAAIBVxAgAALCKGAEAAFYRIwAAwCpiBAAAWEWMAAAAq4gRAABgFTECAACsIkYAAIBVxAgAALCKGAEAAFYRIwAAwCpiBAAAWEWMAAAAq4gRAABgFTECAACsIkYAAIBVxAgAALCKGAEAAFYRIwAAwCpiBAAAWEWMAAAAq4gRAABgFTECAACsIkYAAIBVCcVIc3OzfD6f3G63ysrK1N3dPa/Hvffee8rIyNB1112XyI8FAABLUNwx0tbWpt27d2v//v06evSoNm7cqM2bN2twcPBbHxcKhbRjxw797Gc/S3hYAACw9DiMMSaeB6xfv16lpaVqaWmZPrZu3Tpt2bJFTU1Ncz7u1ltv1Zo1a+RyufT3v/9dx44dm/fPHB8fl8fjUSgUUk5OTjzjAgAAS+b79zuuOyPnzp1Tb2+vqqurY45XV1fryJEjcz7uxRdf1PHjx9XQ0DCvn3P27FmNj4/HfAAAgKUprhgZGRlRJBJRfn5+zPH8/HwNDQ3N+phPPvlEe/fuVWtrqzIyMub1c5qamuTxeKY/ioqK4hkTAACkkISewOpwOGI+N8bMOCZJkUhEt912mxobG7V27dp5f/99+/YpFApNf5w8eTKRMQEAQAqY362K/1m5cqVcLteMuyDDw8Mz7pZIUjgc1vvvv6+jR4/q3nvvlSRFo1EZY5SRkaG3335bN9xww4zHZWVlKSsrK57RAABAiorrzkhmZqbKysrU1dUVc7yrq0tVVVUzzs/JydEHH3ygY8eOTX/U19frqquu0rFjx7R+/foLmx4AAKS8uO6MSNKePXu0fft2lZeXq7KyUs8995wGBwdVX18vaXKJ5fTp03r55ZfldDpVUlIS8/i8vDy53e4ZxwEAQHqKO0bq6uo0OjqqAwcOKBgMqqSkRB0dHSouLpYkBYPB79xzBAAAYErc+4zYwD4jAACknkXZZwQAAGChESMAAMAqYgQAAFhFjAAAAKuIEQAAYBUxAgAArCJGAACAVcQIAACwihgBAABWESMAAMAqYgQAAFhFjAAAAKuIEQAAYBUxAgAArCJGAACAVcQIAACwihgBAABWESMAAMAqYgQAAFhFjAAAAKuIEQAAYBUxAgAArCJGAACAVcQIAACwihgBAABWESMAAMAqYgQAAFhFjAAAAKuIEQAAYBUxAgAArCJGAACAVcQIAACwKsP2AAAApLNI1KhnYEzD4QnlZbtV4cuVy+mwPVZSESMAAFjS2R9UY3tAwdDE9DGvx62GWr9qSrwWJ0sulmkAALCgsz+oXQf7YkJEkoZCE9p1sE+d/UFLkyUfMQIAQJJFokaN7QGZWb42dayxPaBIdLYzlh5iBACAJOsZGJtxR+R8RlIwNKGegbHkDWURMQIAQJINh+cOkUTOS3XECAAASZaX7V7Q81IdMQIAQJJV+HLl9bg11wt4HZp8VU2FLzeZY1lDjAAAkGQup0MNtX5JmhEkU5831PrTZr8RYgQAAAtqSrxq2VaqAk/sUkyBx62WbaVptc8Im54BAGBJTYlXm/wF7MBqewAAANKZy+lQ5eoVtsewimUaAABgFTECAACsIkYAAIBVxAgAALCKGAEAAFYRIwAAwCpiBAAAWEWMAAAAq9j0DACANBWJmoti91diBACANNTZH1Rje0DB0MT0Ma/HrYZaf9LfF4dlGgAA0kxnf1C7DvbFhIgkDYUmtOtgnzr7g0mdhxgBACCNRKJGje0BmVm+NnWssT2gSHS2MxYHMQIAQBrpGRibcUfkfEZSMDShnoGxpM1EjAAAkEaGw3OHSCLnLQRiBACANJKX7V7Q8xYCMQIAQBqp8OXK63FrrhfwOjT5qpoKX27SZiJGAABIIy6nQw21fkmaESRTnzfU+pO63wgxAgBAmqkp8aplW6kKPLFLMQUet1q2lSZ9nxE2PQMAIA3VlHi1yV/ADqwAAMAel9OhytUrbI/BMg0AALCLGAEAAFYRIwAAwCpiBAAAWEWMAAAAq4gRAABgFTECAACsSihGmpub5fP55Ha7VVZWpu7u7jnPff3117Vp0yZdfvnlysnJUWVlpd56662EBwYAAEtL3DHS1tam3bt3a//+/Tp69Kg2btyozZs3a3BwcNbz3333XW3atEkdHR3q7e3VT3/6U9XW1uro0aMXPDwAAEh9DmOMiecB69evV2lpqVpaWqaPrVu3Tlu2bFFTU9O8vsfVV1+turo6PfTQQ/M6f3x8XB6PR6FQSDk5OfGMCwAALJnv3++47oycO3dOvb29qq6ujjleXV2tI0eOzOt7RKNRhcNh5ebO/dbEZ8+e1fj4eMwHAABYmuKKkZGREUUiEeXn58ccz8/P19DQ0Ly+x+OPP64vvvhCW7dunfOcpqYmeTye6Y+ioqJ4xgQAACkkoSewOhyx7+hnjJlxbDaHDh3Sww8/rLa2NuXl5c153r59+xQKhaY/Tp48mciYAAAgBcT1rr0rV66Uy+WacRdkeHh4xt2Sb2pra9POnTv16quv6sYbb/zWc7OyspSVlRXPaAAAIEXFdWckMzNTZWVl6urqijne1dWlqqqqOR936NAh3XHHHXrllVd08803JzYpAABYkuK6MyJJe/bs0fbt21VeXq7Kyko999xzGhwcVH19vaTJJZbTp0/r5ZdfljQZIjt27NCTTz6pH//4x9N3VZYtWyaPx7OAvwoAAEhFccdIXV2dRkdHdeDAAQWDQZWUlKijo0PFxcWSpGAwGLPnyLPPPquvv/5a99xzj+65557p47fffrteeumlC/8NAABASot7nxEb2GcEAIDUsyj7jAAAACw0YgQAAFhFjAAAAKuIEQAAYBUxAgAArCJGAACAVcQIAACwihgBAABWxb0DK4CFE4ka9QyMaTg8obxstyp8uXI5v/sdsAFgKSFGAEs6+4NqbA8oGJqYPub1uNVQ61dNidfiZACQXCzTABZ09ge162BfTIhI0lBoQrsO9qmzP2hpMgBIPmIESLJI1KixPaDZ3hRq6lhje0CR6EX/tlEAsCCIESDJegbGZtwROZ+RFAxNqGdgLHlDAYBFxAiQZMPhuUMkkfMAINURI0CS5WW7F/Q8AEh1xAiQZBW+XHk9bs31Al6HJl9VU+HLTeZYAGANMQIkmcvpUEOtX5JmBMnU5w21fvYbAZA2iBHAgpoSr1q2larAE7sUU+Bxq2VbKfuMAEgrbHoGWFJT4tUmfwE7sAJIe8QIYJHL6VDl6hW2xwAAq1imAQAAVhEjAADAKmIEAABYRYwAAACriBEAAGAVMQIAAKwiRgAAgFXECAAAsIoYAQAAVhEjAADAKmIEAABYRYwAAACriBEAAGAVMQIAAKwiRgAAgFXECAAAsIoYAQAAVhEjAADAKmIEAABYRYwAAACriBEAAGBVhu0BgFQUiRr1DIxpODyhvGy3Kny5cjkdtscCgJREjABx6uwPqrE9oGBoYvqY1+NWQ61fNSVei5MBQGpimQaIQ2d/ULsO9sWEiCQNhSa062CfOvuDliYDgNRFjADzFIkaNbYHZGb52tSxxvaAItHZzgAAzIUYAeapZ2Bsxh2R8xlJwdCEegbGkjcUACwBxAgwT8PhuUMkkfMAAJOIEWCe8rLdC3oeAGASMQLMU4UvV16PW3O9gNehyVfVVPhykzkWAKQ8YgSYJ5fToYZavyTNCJKpzxtq/ew3AgBxIkaAONSUeNWyrVQFntilmAKPWy3bStlnBAASwKZnQJxqSrza5C9gB1YAWCDECJAAl9OhytUrbI8BAEsCyzQAAMAqYgQAAFhFjAAAAKuIEQAAYBUxAgAArCJGAACAVcQIAACwihgBAABWESMAAMCqlNiB1RgjSRofH7c8CQAAmK+pv9tTf8fnkhIxEg6HJUlFRUWWJwEAAPEKh8PyeDxzft1hvitXLgLRaFRnzpxRdna2HA7ejMyG8fFxFRUV6eTJk8rJybE9Dr4F1yp1cK1SB9cqMcYYhcNhXXHFFXI6535mSErcGXE6nSosLLQ9BiTl5OTwDzFFcK1SB9cqdXCt4vdtd0Sm8ARWAABgFTECAACsIkYwL1lZWWpoaFBWVpbtUfAduFapg2uVOrhWiyslnsAKAACWLu6MAAAAq4gRAABgFTECAACsIkYAAIBVxAimNTc3y+fzye12q6ysTN3d3XOe+/rrr2vTpk26/PLLlZOTo8rKSr311ltJnDa9xXOtzvfee+8pIyND11133eIOiGnxXquzZ89q//79Ki4uVlZWllavXq0XXnghSdOmt3ivVWtrq6699lpdeuml8nq9uvPOOzU6OpqkaZcYAxhj/va3v5lLLrnE/OUvfzGBQMDcf//9Zvny5ea///3vrOfff//95tFHHzU9PT3m448/Nvv27TOXXHKJ6evrS/Lk6SfeazXls88+M1deeaWprq421157bXKGTXOJXKtbbrnFrF+/3nR1dZmBgQHzr3/9y7z33ntJnDo9xXuturu7jdPpNE8++aQ5ceKE6e7uNldffbXZsmVLkidfGogRGGOMqaioMPX19THHfvSjH5m9e/fO+3v4/X7T2Ni40KPhGxK9VnV1deYPf/iDaWhoIEaSJN5r9Y9//MN4PB4zOjqajPFwnniv1R//+Edz5ZVXxhx76qmnTGFh4aLNuJSxTAOdO3dOvb29qq6ujjleXV2tI0eOzOt7RKNRhcNh5ebmLsaI+J9Er9WLL76o48ePq6GhYbFHxP8kcq3efPNNlZeX67HHHtOqVau0du1aPfDAA/rqq6+SMXLaSuRaVVVV6dSpU+ro6JAxRp9++qlee+013XzzzckYeclJiTfKw+IaGRlRJBJRfn5+zPH8/HwNDQ3N63s8/vjj+uKLL7R169bFGBH/k8i1+uSTT7R37151d3crI4N/8smSyLU6ceKEDh8+LLfbrTfeeEMjIyO6++67NTY2xvNGFlEi16qqqkqtra2qq6vTxMSEvv76a91yyy16+umnkzHyksOdEUxzOBwxnxtjZhybzaFDh/Twww+rra1NeXl5izUezjPfaxWJRHTbbbepsbFRa9euTdZ4OE88/66i0agcDodaW1tVUVGhm266SU888YReeukl7o4kQTzXKhAI6L777tNDDz2k3t5edXZ2amBgQPX19ckYdcnhv0nQypUr5XK5ZvwPYHh4eMb/FL6pra1NO3fu1Kuvvqobb7xxMceE4r9W4XBY77//vo4ePap7771X0uQfPGOMMjIy9Pbbb+uGG25IyuzpJpF/V16vV6tWrYp5y/V169bJGKNTp05pzZo1izpzukrkWjU1NWnDhg168MEHJUnXXHONli9fro0bN+qRRx6R1+td9LmXEu6MQJmZmSorK1NXV1fM8a6uLlVVVc35uEOHDumOO+7QK6+8wjppksR7rXJycvTBBx/o2LFj0x/19fW66qqrdOzYMa1fvz5Zo6edRP5dbdiwQWfOnNHnn38+fezjjz+W0+lUYWHhos6bzhK5Vl9++aWcztg/oS6XS9LkHRXEyd5zZ3ExmXpZ2/PPP28CgYDZvXu3Wb58ufnPf/5jjDFm7969Zvv27dPnv/LKKyYjI8M888wzJhgMTn989tlntn6FtBHvtfomXk2TPPFeq3A4bAoLC80vfvEL8+GHH5p33nnHrFmzxtx11122foW0Ee+1evHFF01GRoZpbm42x48fN4cPHzbl5eWmoqLC1q+Q0ogRTHvmmWdMcXGxyczMNKWlpeadd96Z/trtt99urr/++unPr7/+eiNpxsftt9+e/MHTUDzX6puIkeSK91p99NFH5sYbbzTLli0zhYWFZs+ePebLL79M8tTpKd5r9dRTTxm/32+WLVtmvF6v+eUvf2lOnTqV5KmXBocx3E8CAAD28JwRAABgFTECAACsIkYAAIBVxAgAALCKGAEAAFYRIwAAwCpiBAAAWEWMAAAAq4gRAABgFTECAACsIkYAAIBVxAgAALDq/wM44josBsaCygAAAABJRU5ErkJggg==", "text/plain": [ "
" ] @@ -1583,7 +1581,7 @@ "\n" ], "text/plain": [ - "" + "" ] }, "execution_count": 41, @@ -1620,7 +1618,7 @@ { "data": { "application/vnd.jupyter.widget-view+json": { - "model_id": "65f5f31b56ce41c6bb1e992beaf73b8f", + "model_id": "378652557fef4e3e945db1f49236d114", "version_major": 2, "version_minor": 0 }, @@ -1630,24 +1628,30 @@ "output_type": "display_data" }, { - "name": "stderr", + "name": "stdout", "output_type": "stream", "text": [ - "/Users/huber/work/pyiron/pyiron_atomistics/pyiron_atomistics/lammps/base.py:294: UserWarning: No potential set via job.potential - use default potential, 1995--Angelo-J-E--Ni-Al-H--LAMMPS--ipr1\n", - " warnings.warn(\n" + "The job JUSTAJOBNAME was saved and received the ID: 9568\n" ] }, { - "name": "stdout", - "output_type": "stream", - "text": [ - "The job JUSTAJOBNAME was saved and received the ID: 9563\n" - ] + "data": { + "application/vnd.jupyter.widget-view+json": { + "model_id": "09d110c4b0224a4586f39121ab594118", + "version_major": 2, + "version_minor": 0 + }, + "text/plain": [ + "0it [00:00, ?it/s]" + ] + }, + "metadata": {}, + "output_type": "display_data" }, { "data": { "text/plain": [ - "" + "" ] }, "execution_count": 42, @@ -1907,7 +1911,7 @@ "\n" ], "text/plain": [ - "" + "" ] }, "execution_count": 43, @@ -2157,357 +2161,357 @@ "clusterphase_preference\n", "\n", "phase_preference: Workflow\n", - "\n", - "clusterphase_preferencecompare\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "compare: Subtract\n", - "\n", - "\n", - "clusterphase_preferencecompareInputs\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "Inputs\n", - "\n", - "\n", - "clusterphase_preferencecompareOutputs\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "Outputs\n", - "\n", "\n", "clusterphase_preferenceInputs\n", "\n", - "\n", + "\n", "\n", "\n", "\n", "\n", - "\n", + "\n", "Inputs\n", "\n", "\n", "clusterphase_preferenceOutputs\n", "\n", - "\n", + "\n", "\n", "\n", "\n", "\n", - "\n", + "\n", "Outputs\n", "\n", "\n", "clusterphase_preferenceelement\n", "\n", - "\n", + "\n", "\n", "\n", "\n", "\n", - "\n", + "\n", "element: UserInput\n", "\n", "\n", "clusterphase_preferenceelementInputs\n", "\n", - "\n", + "\n", "\n", "\n", "\n", "\n", - "\n", + "\n", "Inputs\n", "\n", "\n", "clusterphase_preferenceelementOutputs\n", "\n", - "\n", + "\n", "\n", "\n", "\n", "\n", - "\n", + "\n", "Outputs\n", "\n", "\n", "clusterphase_preferencemin_phase1\n", "\n", - "\n", + "\n", "\n", "\n", "\n", "\n", - "\n", + "\n", "min_phase1: LammpsMinimize\n", "\n", "\n", "clusterphase_preferencemin_phase1Inputs\n", "\n", - "\n", + "\n", "\n", "\n", "\n", "\n", - "\n", + "\n", "Inputs\n", "\n", "\n", "clusterphase_preferencemin_phase1Outputs\n", "\n", - "\n", + "\n", "\n", "\n", "\n", "\n", - "\n", + "\n", "Outputs\n", "\n", "\n", "clusterphase_preferencemin_phase2\n", "\n", - "\n", + "\n", "\n", "\n", "\n", "\n", - "\n", + "\n", "min_phase2: LammpsMinimize\n", "\n", "\n", "clusterphase_preferencemin_phase2Inputs\n", "\n", - "\n", + "\n", "\n", "\n", "\n", "\n", - "\n", + "\n", "Inputs\n", "\n", "\n", "clusterphase_preferencemin_phase2Outputs\n", "\n", - "\n", + "\n", "\n", "\n", "\n", "\n", - "\n", + "\n", "Outputs\n", "\n", "\n", "clusterphase_preferencee1\n", "\n", - "\n", + "\n", "\n", "\n", "\n", "\n", - "\n", + "\n", "e1: GetItem\n", "\n", "\n", "clusterphase_preferencee1Inputs\n", "\n", - "\n", + "\n", "\n", "\n", "\n", "\n", - "\n", + "\n", "Inputs\n", "\n", "\n", "clusterphase_preferencee1Outputs\n", "\n", - "\n", + "\n", "\n", "\n", "\n", "\n", - "\n", + "\n", "Outputs\n", "\n", "\n", "clusterphase_preferencen1\n", "\n", - "\n", + "\n", "\n", "\n", "\n", "\n", - "\n", + "\n", "n1: Length\n", "\n", "\n", "clusterphase_preferencen1Inputs\n", "\n", - "\n", + "\n", "\n", "\n", "\n", "\n", - "\n", + "\n", "Inputs\n", "\n", "\n", "clusterphase_preferencen1Outputs\n", "\n", - "\n", + "\n", "\n", "\n", "\n", "\n", - "\n", + "\n", "Outputs\n", "\n", "\n", "clusterphase_preferencee2\n", "\n", - "\n", + "\n", "\n", "\n", "\n", "\n", - "\n", + "\n", "e2: GetItem\n", "\n", "\n", "clusterphase_preferencee2Inputs\n", "\n", - "\n", + "\n", "\n", "\n", "\n", "\n", - "\n", + "\n", "Inputs\n", "\n", "\n", "clusterphase_preferencee2Outputs\n", "\n", - "\n", + "\n", "\n", "\n", "\n", "\n", - "\n", + "\n", "Outputs\n", "\n", "\n", "clusterphase_preferencen2\n", "\n", - "\n", + "\n", "\n", "\n", "\n", "\n", - "\n", + "\n", "n2: Length\n", "\n", "\n", "clusterphase_preferencen2Inputs\n", "\n", - "\n", + "\n", "\n", "\n", "\n", "\n", - "\n", + "\n", "Inputs\n", "\n", "\n", "clusterphase_preferencen2Outputs\n", "\n", - "\n", + "\n", "\n", "\n", "\n", "\n", - "\n", + "\n", "Outputs\n", "\n", "\n", "clusterphase_preferencee2__getitem_Divide_n2__len\n", "\n", - "\n", + "\n", "\n", "\n", "\n", "\n", - "\n", + "\n", "e2__getitem_Divide_n2__len: Divide\n", "\n", "\n", "clusterphase_preferencee2__getitem_Divide_n2__lenInputs\n", "\n", - "\n", + "\n", "\n", "\n", "\n", "\n", - "\n", + "\n", "Inputs\n", "\n", "\n", "clusterphase_preferencee2__getitem_Divide_n2__lenOutputs\n", "\n", - "\n", + "\n", "\n", "\n", "\n", "\n", - "\n", + "\n", "Outputs\n", "\n", "\n", "clusterphase_preferencee1__getitem_Divide_n1__len\n", "\n", - "\n", + "\n", "\n", "\n", "\n", "\n", - "\n", + "\n", "e1__getitem_Divide_n1__len: Divide\n", "\n", + "\n", + "clusterphase_preferencee1__getitem_Divide_n1__lenInputs\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "Inputs\n", + "\n", "\n", "clusterphase_preferencee1__getitem_Divide_n1__lenOutputs\n", "\n", - "\n", + "\n", "\n", "\n", "\n", "\n", - "\n", + "\n", "Outputs\n", "\n", - "\n", - "clusterphase_preferencee1__getitem_Divide_n1__lenInputs\n", + "\n", + "clusterphase_preferencecompare\n", "\n", - "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "compare: Subtract\n", + "\n", + "\n", + "clusterphase_preferencecompareInputs\n", + "\n", + "\n", "\n", "\n", "\n", "\n", - "\n", - "Inputs\n", + "\n", + "Inputs\n", + "\n", + "\n", + "clusterphase_preferencecompareOutputs\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "Outputs\n", "\n", "\n", "\n", @@ -3096,7 +3100,7 @@ "\n" ], "text/plain": [ - "" + "" ] }, "execution_count": 49, @@ -3115,33 +3119,51 @@ "metadata": {}, "outputs": [ { - "name": "stderr", + "name": "stdout", "output_type": "stream", "text": [ - "/Users/huber/work/pyiron/pyiron_atomistics/pyiron_atomistics/lammps/base.py:294: UserWarning: No potential set via job.potential - use default potential, 1995--Angelo-J-E--Ni-Al-H--LAMMPS--ipr1\n", - " warnings.warn(\n" + "The job JUSTAJOBNAME was saved and received the ID: 9568\n" ] }, + { + "data": { + "application/vnd.jupyter.widget-view+json": { + "model_id": "a11221f6a85c4ebd8e05052f719e0236", + "version_major": 2, + "version_minor": 0 + }, + "text/plain": [ + "0it [00:00, ?it/s]" + ] + }, + "metadata": {}, + "output_type": "display_data" + }, { "name": "stdout", "output_type": "stream", "text": [ - "The job JUSTAJOBNAME was saved and received the ID: 9563\n" + "The job JUSTAJOBNAME was saved and received the ID: 9568\n" ] }, { - "name": "stderr", - "output_type": "stream", - "text": [ - "/Users/huber/work/pyiron/pyiron_atomistics/pyiron_atomistics/lammps/base.py:294: UserWarning: No potential set via job.potential - use default potential, 1995--Angelo-J-E--Ni-Al-H--LAMMPS--ipr1\n", - " warnings.warn(\n" - ] + "data": { + "application/vnd.jupyter.widget-view+json": { + "model_id": "82e56419b1594fecbf9c19a2745709c6", + "version_major": 2, + "version_minor": 0 + }, + "text/plain": [ + "0it [00:00, ?it/s]" + ] + }, + "metadata": {}, + "output_type": "display_data" }, { "name": "stdout", "output_type": "stream", "text": [ - "The job JUSTAJOBNAME was saved and received the ID: 9563\n", "Al: E(hcp) - E(fcc) = 1.17 eV/atom\n" ] } @@ -3162,31 +3184,55 @@ "output_type": "stream", "text": [ "/Users/huber/work/pyiron/pyiron_workflow/pyiron_workflow/channels.py:168: UserWarning: The channel ran was not connected to accumulate_and_run, andthus could not disconnect from it.\n", - " warn(\n", - "/Users/huber/work/pyiron/pyiron_atomistics/pyiron_atomistics/lammps/base.py:294: UserWarning: No potential set via job.potential - use default potential, 1997--Liu-X-Y--Al-Mg--LAMMPS--ipr1\n", - " warnings.warn(\n" + " warn(\n" ] }, { "name": "stdout", "output_type": "stream", "text": [ - "The job JUSTAJOBNAME was saved and received the ID: 9563\n" + "The job JUSTAJOBNAME was saved and received the ID: 9568\n" ] }, { - "name": "stderr", + "data": { + "application/vnd.jupyter.widget-view+json": { + "model_id": "1000698e2eb147f28c59cd4e2382348c", + "version_major": 2, + "version_minor": 0 + }, + "text/plain": [ + "0it [00:00, ?it/s]" + ] + }, + "metadata": {}, + "output_type": "display_data" + }, + { + "name": "stdout", "output_type": "stream", "text": [ - "/Users/huber/work/pyiron/pyiron_atomistics/pyiron_atomistics/lammps/base.py:294: UserWarning: No potential set via job.potential - use default potential, 1997--Liu-X-Y--Al-Mg--LAMMPS--ipr1\n", - " warnings.warn(\n" + "The job JUSTAJOBNAME was saved and received the ID: 9568\n" ] }, + { + "data": { + "application/vnd.jupyter.widget-view+json": { + "model_id": "c6f6b36355da488d801dfeb7b25b8260", + "version_major": 2, + "version_minor": 0 + }, + "text/plain": [ + "0it [00:00, ?it/s]" + ] + }, + "metadata": {}, + "output_type": "display_data" + }, { "name": "stdout", "output_type": "stream", "text": [ - "The job JUSTAJOBNAME was saved and received the ID: 9563\n", "Mg: E(hcp) - E(fcc) = -4.54 eV/atom\n" ] } @@ -3257,31 +3303,55 @@ "output_type": "stream", "text": [ "/Users/huber/work/pyiron/pyiron_workflow/pyiron_workflow/channels.py:168: UserWarning: The channel ran was not connected to accumulate_and_run, andthus could not disconnect from it.\n", - " warn(\n", - "/Users/huber/work/pyiron/pyiron_atomistics/pyiron_atomistics/lammps/base.py:294: UserWarning: No potential set via job.potential - use default potential, 1995--Angelo-J-E--Ni-Al-H--LAMMPS--ipr1\n", - " warnings.warn(\n" + " warn(\n" ] }, { "name": "stdout", "output_type": "stream", "text": [ - "The job JUSTAJOBNAME was saved and received the ID: 9563\n" + "The job JUSTAJOBNAME was saved and received the ID: 9568\n" ] }, { - "name": "stderr", + "data": { + "application/vnd.jupyter.widget-view+json": { + "model_id": "b160a5c60936418988d5cc94c3e63640", + "version_major": 2, + "version_minor": 0 + }, + "text/plain": [ + "0it [00:00, ?it/s]" + ] + }, + "metadata": {}, + "output_type": "display_data" + }, + { + "name": "stdout", "output_type": "stream", "text": [ - "/Users/huber/work/pyiron/pyiron_atomistics/pyiron_atomistics/lammps/base.py:294: UserWarning: No potential set via job.potential - use default potential, 1995--Angelo-J-E--Ni-Al-H--LAMMPS--ipr1\n", - " warnings.warn(\n" + "The job JUSTAJOBNAME was saved and received the ID: 9568\n" ] }, + { + "data": { + "application/vnd.jupyter.widget-view+json": { + "model_id": "e72528ca94c84f0f815a3a428bd1395b", + "version_major": 2, + "version_minor": 0 + }, + "text/plain": [ + "0it [00:00, ?it/s]" + ] + }, + "metadata": {}, + "output_type": "display_data" + }, { "name": "stdout", "output_type": "stream", "text": [ - "The job JUSTAJOBNAME was saved and received the ID: 9563\n", "Al: E(hcp) - E(fcc) = -5.57 eV/atom\n" ] } @@ -3303,31 +3373,55 @@ "output_type": "stream", "text": [ "/Users/huber/work/pyiron/pyiron_workflow/pyiron_workflow/channels.py:168: UserWarning: The channel ran was not connected to accumulate_and_run, andthus could not disconnect from it.\n", - " warn(\n", - "/Users/huber/work/pyiron/pyiron_atomistics/pyiron_atomistics/lammps/base.py:294: UserWarning: No potential set via job.potential - use default potential, 1995--Angelo-J-E--Ni-Al-H--LAMMPS--ipr1\n", - " warnings.warn(\n" + " warn(\n" ] }, { "name": "stdout", "output_type": "stream", "text": [ - "The job JUSTAJOBNAME was saved and received the ID: 9563\n" + "The job JUSTAJOBNAME was saved and received the ID: 9568\n" ] }, { - "name": "stderr", + "data": { + "application/vnd.jupyter.widget-view+json": { + "model_id": "87f030b7c77f495ca364f1123d2cd1c7", + "version_major": 2, + "version_minor": 0 + }, + "text/plain": [ + "0it [00:00, ?it/s]" + ] + }, + "metadata": {}, + "output_type": "display_data" + }, + { + "name": "stdout", "output_type": "stream", "text": [ - "/Users/huber/work/pyiron/pyiron_atomistics/pyiron_atomistics/lammps/base.py:294: UserWarning: No potential set via job.potential - use default potential, 1995--Angelo-J-E--Ni-Al-H--LAMMPS--ipr1\n", - " warnings.warn(\n" + "The job JUSTAJOBNAME was saved and received the ID: 9568\n" ] }, + { + "data": { + "application/vnd.jupyter.widget-view+json": { + "model_id": "9f8f038b79a4437aa12965f2b9364e3f", + "version_major": 2, + "version_minor": 0 + }, + "text/plain": [ + "0it [00:00, ?it/s]" + ] + }, + "metadata": {}, + "output_type": "display_data" + }, { "name": "stdout", "output_type": "stream", "text": [ - "The job JUSTAJOBNAME was saved and received the ID: 9563\n", "Al: E(hcp) - E(fcc) = 0.03 eV/atom\n" ] } @@ -3379,7 +3473,7 @@ "output_type": "stream", "text": [ "None 1\n", - " NOT_DATA\n" + " NOT_DATA\n" ] } ], @@ -3461,7 +3555,7 @@ "output_type": "stream", "text": [ "None 1\n", - " NOT_DATA\n", + " NOT_DATA\n", "Finally 5\n", "b (Add) output single-value: 6\n" ] @@ -3523,7 +3617,7 @@ "name": "stdout", "output_type": "stream", "text": [ - "6.006511921004858\n" + "6.011244249995798\n" ] } ], @@ -3555,7 +3649,7 @@ "name": "stdout", "output_type": "stream", "text": [ - "2.4266665390023263\n" + "2.496095469017746\n" ] } ], @@ -3681,7 +3775,7 @@ "name": "stderr", "output_type": "stream", "text": [ - "/Users/huber/work/pyiron/pyiron_workflow/pyiron_workflow/node.py:356: UserWarning: A saved file was found for the node save_demo -- attempting to load it...(To delete the saved file instead, use `overwrite_save=True`)\n", + "/Users/huber/work/pyiron/pyiron_workflow/pyiron_workflow/node.py:370: UserWarning: A saved file was found for the node save_demo -- attempting to load it...(To delete the saved file instead, use `overwrite_save=True`)\n", " warnings.warn(\n" ] }, @@ -3886,8 +3980,12 @@ "name": "stdout", "output_type": "stream", "text": [ - "0.118 <= 0.2\n", - "Finally 0.118\n" + "0.325 > 0.2\n", + "0.481 > 0.2\n", + "0.635 > 0.2\n", + "0.286 > 0.2\n", + "0.153 <= 0.2\n", + "Finally 0.153\n" ] } ], diff --git a/notebooks/quickstart.ipynb b/notebooks/quickstart.ipynb index 2944dcb7..79f1d2fe 100644 --- a/notebooks/quickstart.ipynb +++ b/notebooks/quickstart.ipynb @@ -123,7 +123,7 @@ "name": "stderr", "output_type": "stream", "text": [ - "/Users/huber/work/pyiron/pyiron_workflow/pyiron_workflow/channels.py:166: UserWarning: The channel run was not connected to ran, andthus could not disconnect from it.\n", + "/Users/huber/work/pyiron/pyiron_workflow/pyiron_workflow/channels.py:168: UserWarning: The channel run was not connected to ran, andthus could not disconnect from it.\n", " warn(\n" ] }, @@ -206,7 +206,7 @@ "\n", "\n", - "\n", "\n", "\n", "\n", "clustermy_workflowInputsarange__n->clustermy_workflowarangeInputsn\n", - "\n", - "\n", - "\n", + "\n", + "\n", + "\n", "\n", "\n", "\n", @@ -488,9 +488,9 @@ "\n", "\n", "clustermy_workflowInputsarange__len_Subtract_1__other->clustermy_workflowarange__len_Subtract_1Inputsother\n", - "\n", - "\n", - "\n", + "\n", + "\n", + "\n", "\n", "\n", "\n", @@ -507,9 +507,9 @@ "\n", "\n", "clustermy_workflowInputsarange__arange_Slice_None_arange__len_Subtract_1__sub_None__start->clustermy_workflowarange__arange_Slice_None_arange__len_Subtract_1__sub_NoneInputsstart\n", - "\n", - "\n", - "\n", + "\n", + "\n", + "\n", "\n", "\n", "\n", @@ -527,8 +527,8 @@ "\n", "clustermy_workflowInputsarange__arange_Slice_None_arange__len_Subtract_1__sub_None__step->clustermy_workflowarange__arange_Slice_None_arange__len_Subtract_1__sub_NoneInputsstep\n", "\n", - "\n", - "\n", + "\n", + "\n", "\n", "\n", "\n", @@ -545,9 +545,9 @@ "\n", "\n", "clustermy_workflowInputsarange__arange_GetItem_arange__arange_Slice_None_arange__len_Subtract_1__sub_None__slice__getitem_Power_2__other->clustermy_workflowarange__arange_GetItem_arange__arange_Slice_None_arange__len_Subtract_1__sub_None__slice__getitem_Power_2Inputsother\n", - "\n", - "\n", - "\n", + "\n", + "\n", + "\n", "\n", "\n", "\n", @@ -589,9 +589,9 @@ "\n", "\n", "clustermy_workflowarangeOutputsarange->clustermy_workflowarange__arange_GetItem_arange__arange_Slice_None_arange__len_Subtract_1__sub_None__sliceInputsobj\n", - "\n", - "\n", - "\n", + "\n", + "\n", + "\n", "\n", "\n", "\n", @@ -608,9 +608,9 @@ "\n", "\n", "clustermy_workflowarangeOutputslen->clustermy_workflowarange__len_Subtract_1Inputsobj\n", - "\n", - "\n", - "\n", + "\n", + "\n", + "\n", "\n", "\n", "\n", @@ -646,9 +646,9 @@ "\n", "\n", "clustermy_workflowarange__len_Subtract_1Outputssub->clustermy_workflowarange__arange_Slice_None_arange__len_Subtract_1__sub_NoneInputsstop\n", - "\n", - "\n", - "\n", + "\n", + "\n", + "\n", "\n", "\n", "\n", @@ -684,9 +684,9 @@ "\n", "\n", "clustermy_workflowarange__arange_Slice_None_arange__len_Subtract_1__sub_NoneOutputsslice->clustermy_workflowarange__arange_GetItem_arange__arange_Slice_None_arange__len_Subtract_1__sub_None__sliceInputsitem\n", - "\n", - "\n", - "\n", + "\n", + "\n", + "\n", "\n", "\n", "\n", @@ -722,9 +722,9 @@ "\n", "\n", "clustermy_workflowarange__arange_GetItem_arange__arange_Slice_None_arange__len_Subtract_1__sub_None__sliceOutputsgetitem->clustermy_workflowarange__arange_GetItem_arange__arange_Slice_None_arange__len_Subtract_1__sub_None__slice__getitem_Power_2Inputsobj\n", - "\n", - "\n", - "\n", + "\n", + "\n", + "\n", "\n", "\n", "\n", @@ -735,9 +735,9 @@ "\n", "\n", "clustermy_workflowarange__arange_GetItem_arange__arange_Slice_None_arange__len_Subtract_1__sub_None__sliceOutputsgetitem->clustermy_workflowplotInputsx\n", - "\n", - "\n", - "\n", + "\n", + "\n", + "\n", "\n", "\n", "\n", @@ -773,9 +773,9 @@ "\n", "\n", "clustermy_workflowarange__arange_GetItem_arange__arange_Slice_None_arange__len_Subtract_1__sub_None__slice__getitem_Power_2Outputspow->clustermy_workflowplotInputsy\n", - "\n", - "\n", - "\n", + "\n", + "\n", + "\n", "\n", "\n", "\n", @@ -805,15 +805,15 @@ "\n", "\n", "clustermy_workflowplotOutputsfig->clustermy_workflowOutputsplot__fig\n", - "\n", - "\n", - "\n", + "\n", + "\n", + "\n", "\n", "\n", "\n" ], "text/plain": [ - "" + "" ] }, "execution_count": 7, @@ -864,14 +864,14 @@ "name": "stderr", "output_type": "stream", "text": [ - "/Users/huber/work/pyiron/pyiron_workflow/pyiron_workflow/node.py:621: UserWarning: The keyword 'arrays__x' was not found among input labels. If you are trying to update a node keyword, please use attribute assignment directly instead of calling\n", + "/Users/huber/work/pyiron/pyiron_workflow/pyiron_workflow/node.py:765: UserWarning: The keyword 'arrays__x' was not found among input labels. If you are trying to update a node keyword, please use attribute assignment directly instead of calling\n", " warnings.warn(\n" ] }, { "data": { "text/plain": [ - "" + "" ] }, "execution_count": 8, @@ -998,8 +998,8 @@ "data": { "text/plain": [ "{'square_plot__n': 10,\n", - " 'square_plot__fig': ,\n", - " 'plus_one_square_plot__fig': }" + " 'square_plot__fig': ,\n", + " 'plus_one_square_plot__fig': }" ] }, "execution_count": 12, @@ -1049,7 +1049,7 @@ "\n", "\n", - "\n", "\n", "clustersquare_plot\n", "\n", "square_plot: MySquarePlot\n", + "\n", + "clustersquare_plotplot\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "plot: Scatter\n", + "\n", + "\n", + "clustersquare_plotplotInputs\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "Inputs\n", + "\n", + "\n", + "clustersquare_plotplotOutputs\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "Outputs\n", + "\n", + "\n", + "clustersquare_plotarange__arange_GetItem_arange__arange_Slice_None_arange__len_Subtract_1__sub_None__slice__getitem_Power_2\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "arange__arange_GetItem_arange__arange_Slice_None_arange__len_Subtract_1__sub_None__slice__getitem_Power_2: Power\n", + "\n", + "\n", + "clustersquare_plotarange__arange_GetItem_arange__arange_Slice_None_arange__len_Subtract_1__sub_None__slice__getitem_Power_2Outputs\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "Outputs\n", + "\n", + "\n", + "clustersquare_plotarange__arange_GetItem_arange__arange_Slice_None_arange__len_Subtract_1__sub_None__slice__getitem_Power_2Inputs\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "Inputs\n", + "\n", "\n", "clustersquare_plotInputs\n", "\n", - "\n", + "\n", "\n", "\n", "\n", "\n", - "\n", + "\n", "Inputs\n", "\n", "\n", "clustersquare_plotOutputs\n", "\n", - "\n", + "\n", "\n", "\n", "\n", "\n", - "\n", + "\n", "Outputs\n", "\n", "\n", "clustersquare_plotn\n", "\n", - "\n", + "\n", "\n", "\n", "\n", "\n", - "\n", + "\n", "n: UserInput\n", "\n", "\n", "clustersquare_plotnInputs\n", "\n", - "\n", + "\n", "\n", "\n", "\n", "\n", - "\n", + "\n", "Inputs\n", "\n", "\n", "clustersquare_plotnOutputs\n", "\n", - "\n", + "\n", "\n", "\n", "\n", "\n", - "\n", + "\n", "Outputs\n", "\n", "\n", "clustersquare_plotarange\n", "\n", - "\n", + "\n", "\n", "\n", "\n", "\n", - "\n", + "\n", "arange: Arange\n", "\n", "\n", "clustersquare_plotarangeInputs\n", "\n", - "\n", + "\n", "\n", "\n", "\n", "\n", - "\n", + "\n", "Inputs\n", "\n", "\n", "clustersquare_plotarangeOutputs\n", "\n", - "\n", + "\n", "\n", "\n", "\n", "\n", - "\n", + "\n", "Outputs\n", "\n", "\n", "clustersquare_plotarange__len_Subtract_1\n", "\n", - "\n", + "\n", "\n", "\n", "\n", "\n", - "\n", + "\n", "arange__len_Subtract_1: Subtract\n", "\n", "\n", "clustersquare_plotarange__len_Subtract_1Inputs\n", "\n", - "\n", + "\n", "\n", "\n", "\n", "\n", - "\n", + "\n", "Inputs\n", "\n", "\n", "clustersquare_plotarange__len_Subtract_1Outputs\n", "\n", - "\n", + "\n", "\n", "\n", "\n", "\n", - "\n", + "\n", "Outputs\n", "\n", "\n", "clustersquare_plotarange__arange_Slice_None_arange__len_Subtract_1__sub_None\n", "\n", - "\n", + "\n", "\n", "\n", "\n", "\n", - "\n", + "\n", "arange__arange_Slice_None_arange__len_Subtract_1__sub_None: Slice\n", "\n", "\n", "clustersquare_plotarange__arange_Slice_None_arange__len_Subtract_1__sub_NoneInputs\n", "\n", - "\n", + "\n", "\n", "\n", "\n", "\n", - "\n", + "\n", "Inputs\n", "\n", "\n", "clustersquare_plotarange__arange_Slice_None_arange__len_Subtract_1__sub_NoneOutputs\n", "\n", - "\n", + "\n", "\n", "\n", "\n", "\n", - "\n", + "\n", "Outputs\n", "\n", "\n", "clustersquare_plotarange__arange_GetItem_arange__arange_Slice_None_arange__len_Subtract_1__sub_None__slice\n", "\n", - "\n", + "\n", "\n", "\n", "\n", "\n", - "\n", + "\n", "arange__arange_GetItem_arange__arange_Slice_None_arange__len_Subtract_1__sub_None__slice: GetItem\n", "\n", "\n", "clustersquare_plotarange__arange_GetItem_arange__arange_Slice_None_arange__len_Subtract_1__sub_None__sliceInputs\n", "\n", - "\n", + "\n", "\n", "\n", "\n", "\n", - "\n", + "\n", "Inputs\n", "\n", "\n", "clustersquare_plotarange__arange_GetItem_arange__arange_Slice_None_arange__len_Subtract_1__sub_None__sliceOutputs\n", "\n", - "\n", + "\n", "\n", "\n", "\n", "\n", - "\n", + "\n", "Outputs\n", "\n", - "\n", - "clustersquare_plotarange__arange_GetItem_arange__arange_Slice_None_arange__len_Subtract_1__sub_None__slice__getitem_Power_2\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "arange__arange_GetItem_arange__arange_Slice_None_arange__len_Subtract_1__sub_None__slice__getitem_Power_2: Power\n", - "\n", - "\n", - "clustersquare_plotarange__arange_GetItem_arange__arange_Slice_None_arange__len_Subtract_1__sub_None__slice__getitem_Power_2Inputs\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "Inputs\n", - "\n", - "\n", - "clustersquare_plotarange__arange_GetItem_arange__arange_Slice_None_arange__len_Subtract_1__sub_None__slice__getitem_Power_2Outputs\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "Outputs\n", - "\n", - "\n", - "clustersquare_plotplot\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "plot: Scatter\n", - "\n", - "\n", - "clustersquare_plotplotInputs\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "Inputs\n", - "\n", - "\n", - "clustersquare_plotplotOutputs\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "Outputs\n", - "\n", "\n", "\n", "clustersquare_plotInputsrun\n", @@ -1345,9 +1345,9 @@ "\n", "\n", "clustersquare_plotInputsn->clustersquare_plotnInputsuser_input\n", - "\n", - "\n", - "\n", + "\n", + "\n", + "\n", "\n", "\n", "\n", @@ -1395,9 +1395,9 @@ "\n", "\n", "clustersquare_plotnOutputsran->clustersquare_plotarangeInputsaccumulate_and_run\n", - "\n", - "\n", - "\n", + "\n", + "\n", + "\n", "\n", "\n", "\n", @@ -1414,9 +1414,9 @@ "\n", "\n", "clustersquare_plotnOutputsuser_input->clustersquare_plotarangeInputsn\n", - "\n", - "\n", - "\n", + "\n", + "\n", + "\n", "\n", "\n", "\n", @@ -1440,9 +1440,9 @@ "\n", "\n", "clustersquare_plotarangeOutputsran->clustersquare_plotarange__len_Subtract_1Inputsaccumulate_and_run\n", - "\n", - "\n", - "\n", + "\n", + "\n", + "\n", "\n", "\n", "\n", @@ -1453,9 +1453,9 @@ "\n", "\n", "clustersquare_plotarangeOutputsran->clustersquare_plotarange__arange_GetItem_arange__arange_Slice_None_arange__len_Subtract_1__sub_None__sliceInputsaccumulate_and_run\n", - "\n", - "\n", - "\n", + "\n", + "\n", + "\n", "\n", "\n", "\n", @@ -1466,9 +1466,9 @@ "\n", "\n", "clustersquare_plotarangeOutputsarange->clustersquare_plotOutputsx\n", - "\n", - "\n", - "\n", + "\n", + "\n", + "\n", "\n", "\n", "\n", @@ -1479,9 +1479,9 @@ "\n", "\n", "clustersquare_plotarangeOutputsarange->clustersquare_plotarange__arange_GetItem_arange__arange_Slice_None_arange__len_Subtract_1__sub_None__sliceInputsobj\n", - "\n", - "\n", - "\n", + "\n", + "\n", + "\n", "\n", "\n", "\n", @@ -1492,9 +1492,9 @@ "\n", "\n", "clustersquare_plotarangeOutputslen->clustersquare_plotOutputsn\n", - "\n", - "\n", - "\n", + "\n", + "\n", + "\n", "\n", "\n", "\n", @@ -1505,9 +1505,9 @@ "\n", "\n", "clustersquare_plotarangeOutputslen->clustersquare_plotarange__len_Subtract_1Inputsobj\n", - "\n", - "\n", - "\n", + "\n", + "\n", + "\n", "\n", "\n", "\n", @@ -1537,9 +1537,9 @@ "\n", "\n", "clustersquare_plotarange__len_Subtract_1Outputsran->clustersquare_plotarange__arange_Slice_None_arange__len_Subtract_1__sub_NoneInputsaccumulate_and_run\n", - "\n", - "\n", - "\n", + "\n", + "\n", + "\n", "\n", "\n", "\n", @@ -1556,9 +1556,9 @@ "\n", "\n", "clustersquare_plotarange__len_Subtract_1Outputssub->clustersquare_plotarange__arange_Slice_None_arange__len_Subtract_1__sub_NoneInputsstop\n", - "\n", - "\n", - "\n", + "\n", + "\n", + "\n", "\n", "\n", "\n", @@ -1588,9 +1588,9 @@ "\n", "\n", "clustersquare_plotarange__arange_Slice_None_arange__len_Subtract_1__sub_NoneOutputsran->clustersquare_plotarange__arange_GetItem_arange__arange_Slice_None_arange__len_Subtract_1__sub_None__sliceInputsaccumulate_and_run\n", - "\n", - "\n", - "\n", + "\n", + "\n", + "\n", "\n", "\n", "\n", @@ -1607,9 +1607,9 @@ "\n", "\n", "clustersquare_plotarange__arange_Slice_None_arange__len_Subtract_1__sub_NoneOutputsslice->clustersquare_plotarange__arange_GetItem_arange__arange_Slice_None_arange__len_Subtract_1__sub_None__sliceInputsitem\n", - "\n", - "\n", - "\n", + "\n", + "\n", + "\n", "\n", "\n", "\n", @@ -1633,9 +1633,9 @@ "\n", "\n", "clustersquare_plotarange__arange_GetItem_arange__arange_Slice_None_arange__len_Subtract_1__sub_None__sliceOutputsran->clustersquare_plotarange__arange_GetItem_arange__arange_Slice_None_arange__len_Subtract_1__sub_None__slice__getitem_Power_2Inputsaccumulate_and_run\n", - "\n", - "\n", - "\n", + "\n", + "\n", + "\n", "\n", "\n", "\n", @@ -1648,7 +1648,7 @@ "clustersquare_plotarange__arange_GetItem_arange__arange_Slice_None_arange__len_Subtract_1__sub_None__sliceOutputsran->clustersquare_plotplotInputsaccumulate_and_run\n", "\n", "\n", - "\n", + "\n", "\n", "\n", "\n", @@ -1665,9 +1665,9 @@ "\n", "\n", "clustersquare_plotarange__arange_GetItem_arange__arange_Slice_None_arange__len_Subtract_1__sub_None__sliceOutputsgetitem->clustersquare_plotarange__arange_GetItem_arange__arange_Slice_None_arange__len_Subtract_1__sub_None__slice__getitem_Power_2Inputsobj\n", - "\n", - "\n", - "\n", + "\n", + "\n", + "\n", "\n", "\n", "\n", @@ -1678,9 +1678,9 @@ "\n", "\n", "clustersquare_plotarange__arange_GetItem_arange__arange_Slice_None_arange__len_Subtract_1__sub_None__sliceOutputsgetitem->clustersquare_plotplotInputsx\n", - "\n", - "\n", - "\n", + "\n", + "\n", + "\n", "\n", "\n", "\n", @@ -1704,9 +1704,9 @@ "\n", "\n", "clustersquare_plotarange__arange_GetItem_arange__arange_Slice_None_arange__len_Subtract_1__sub_None__slice__getitem_Power_2Outputsran->clustersquare_plotplotInputsaccumulate_and_run\n", - "\n", - "\n", - "\n", + "\n", + "\n", + "\n", "\n", "\n", "\n", @@ -1723,9 +1723,9 @@ "\n", "\n", "clustersquare_plotarange__arange_GetItem_arange__arange_Slice_None_arange__len_Subtract_1__sub_None__slice__getitem_Power_2Outputspow->clustersquare_plotplotInputsy\n", - "\n", - "\n", - "\n", + "\n", + "\n", + "\n", "\n", "\n", "\n", @@ -1749,15 +1749,15 @@ "\n", "\n", "clustersquare_plotplotOutputsfig->clustersquare_plotOutputsfig\n", - "\n", - "\n", - "\n", + "\n", + "\n", + "\n", "\n", "\n", "\n" ], "text/plain": [ - "" + "" ] }, "execution_count": 13, @@ -1821,7 +1821,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.11.4" + "version": "3.11.7" } }, "nbformat": 4, From 2871a7b73ef019abe2e29f75cdde0abb48a0376a Mon Sep 17 00:00:00 2001 From: pyiron-runner Date: Wed, 14 Feb 2024 23:02:27 +0000 Subject: [PATCH 149/166] [dependabot skip] Update env file --- .binder/environment.yml | 2 +- docs/environment.yml | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/.binder/environment.yml b/.binder/environment.yml index 1f04e8bb..ef2a8443 100644 --- a/.binder/environment.yml +++ b/.binder/environment.yml @@ -7,7 +7,7 @@ dependencies: - cloudpickle =3.0.0 - graphviz =8.1.0 - h5io =0.2.2 -- h5io_browser =0.0.6 +- h5io_browser =0.0.8 - matplotlib =3.8.2 - pyiron_contrib =0.1.14 - pympipool =0.7.13 diff --git a/docs/environment.yml b/docs/environment.yml index ec1ef77f..e89c3c60 100644 --- a/docs/environment.yml +++ b/docs/environment.yml @@ -12,7 +12,7 @@ dependencies: - cloudpickle =3.0.0 - graphviz =8.1.0 - h5io =0.2.2 -- h5io_browser =0.0.6 +- h5io_browser =0.0.8 - matplotlib =3.8.2 - pyiron_contrib =0.1.14 - pympipool =0.7.13 From e3c609dd5ba7e4add0e1c66dbe6baa34221a33e8 Mon Sep 17 00:00:00 2001 From: liamhuber Date: Thu, 15 Feb 2024 06:00:26 -0800 Subject: [PATCH 150/166] Bumpy h5io_browser --- .ci_support/environment.yml | 2 +- setup.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/.ci_support/environment.yml b/.ci_support/environment.yml index 955addd5..7585b553 100644 --- a/.ci_support/environment.yml +++ b/.ci_support/environment.yml @@ -7,7 +7,7 @@ dependencies: - cloudpickle =3.0.0 - graphviz =8.1.0 - h5io =0.2.2 -- h5io_browser =0.0.8 +- h5io_browser =0.0.9 - matplotlib =3.8.2 - pyiron_contrib =0.1.14 - pympipool =0.7.13 diff --git a/setup.py b/setup.py index 6c44aaa5..e8ffe05a 100644 --- a/setup.py +++ b/setup.py @@ -32,7 +32,7 @@ 'cloudpickle==3.0.0', 'graphviz==0.20.1', 'h5io==0.2.2', - 'h5io_browser==0.0.8', + 'h5io_browser==0.0.9', 'matplotlib==3.8.2', 'pyiron_contrib==0.1.14', 'pympipool==0.7.13', From 4561724d54ce41548008867bc3f3ec763668c51a Mon Sep 17 00:00:00 2001 From: pyiron-runner Date: Thu, 15 Feb 2024 14:00:50 +0000 Subject: [PATCH 151/166] [dependabot skip] Update env file --- .binder/environment.yml | 2 +- docs/environment.yml | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/.binder/environment.yml b/.binder/environment.yml index ef2a8443..07e19476 100644 --- a/.binder/environment.yml +++ b/.binder/environment.yml @@ -7,7 +7,7 @@ dependencies: - cloudpickle =3.0.0 - graphviz =8.1.0 - h5io =0.2.2 -- h5io_browser =0.0.8 +- h5io_browser =0.0.9 - matplotlib =3.8.2 - pyiron_contrib =0.1.14 - pympipool =0.7.13 diff --git a/docs/environment.yml b/docs/environment.yml index e89c3c60..94c634b5 100644 --- a/docs/environment.yml +++ b/docs/environment.yml @@ -12,7 +12,7 @@ dependencies: - cloudpickle =3.0.0 - graphviz =8.1.0 - h5io =0.2.2 -- h5io_browser =0.0.8 +- h5io_browser =0.0.9 - matplotlib =3.8.2 - pyiron_contrib =0.1.14 - pympipool =0.7.13 From 394ccd4fee19ba32e396c6aac322f49cb6fee8a7 Mon Sep 17 00:00:00 2001 From: pyiron-runner Date: Thu, 15 Feb 2024 22:06:01 +0000 Subject: [PATCH 152/166] Format black --- pyiron_workflow/composite.py | 2 +- pyiron_workflow/storage.py | 3 +-- 2 files changed, 2 insertions(+), 3 deletions(-) diff --git a/pyiron_workflow/composite.py b/pyiron_workflow/composite.py index 98c99926..7e6dceb7 100644 --- a/pyiron_workflow/composite.py +++ b/pyiron_workflow/composite.py @@ -123,7 +123,7 @@ def __init__( parent=parent, save_after_run=save_after_run, storage_backend=storage_backend, - **kwargs + **kwargs, ) self.strict_naming: bool = strict_naming self._inputs_map = None diff --git a/pyiron_workflow/storage.py b/pyiron_workflow/storage.py index 771c19d9..e5dc2f76 100644 --- a/pyiron_workflow/storage.py +++ b/pyiron_workflow/storage.py @@ -67,8 +67,7 @@ def _save(self, backend: Literal["h5io", "tinybase"]): ) elif backend == "tinybase": os.makedirs( - os.path.dirname(self._tinybase_storage_file_path), - exist_ok=True + os.path.dirname(self._tinybase_storage_file_path), exist_ok=True ) # Make sure the path to the storage location exists self.node.to_storage(self._tinybase_storage) else: From e685c84ae292d9384f1686e19c5998d6109df41e Mon Sep 17 00:00:00 2001 From: liamhuber Date: Fri, 16 Feb 2024 10:38:16 -0800 Subject: [PATCH 153/166] Bump atomistics and contrib --- .ci_support/environment-notebooks.yml | 2 +- .ci_support/environment.yml | 2 +- setup.py | 4 ++-- 3 files changed, 4 insertions(+), 4 deletions(-) diff --git a/.ci_support/environment-notebooks.yml b/.ci_support/environment-notebooks.yml index d24aae09..9c9b587f 100644 --- a/.ci_support/environment-notebooks.yml +++ b/.ci_support/environment-notebooks.yml @@ -5,6 +5,6 @@ dependencies: - atomistics =0.1.23 - lammps - phonopy =2.21.0 - - pyiron_atomistics =0.4.14 + - pyiron_atomistics =0.4.15 - pyiron-data =0.0.27 - numpy =1.26.4 \ No newline at end of file diff --git a/.ci_support/environment.yml b/.ci_support/environment.yml index 7585b553..f47948b4 100644 --- a/.ci_support/environment.yml +++ b/.ci_support/environment.yml @@ -9,7 +9,7 @@ dependencies: - h5io =0.2.2 - h5io_browser =0.0.9 - matplotlib =3.8.2 -- pyiron_contrib =0.1.14 +- pyiron_contrib =0.1.15 - pympipool =0.7.13 - python-graphviz =0.20.1 - toposort =1.10 diff --git a/setup.py b/setup.py index e8ffe05a..9b4ee326 100644 --- a/setup.py +++ b/setup.py @@ -34,7 +34,7 @@ 'h5io==0.2.2', 'h5io_browser==0.0.9', 'matplotlib==3.8.2', - 'pyiron_contrib==0.1.14', + 'pyiron_contrib==0.1.15', 'pympipool==0.7.13', 'toposort==1.10', 'typeguard==4.1.5', @@ -45,7 +45,7 @@ 'atomistics==0.1.23', 'numpy==1.26.4', 'phonopy==2.21.0', - 'pyiron_atomistics==0.4.14', + 'pyiron_atomistics==0.4.15', ], }, cmdclass=versioneer.get_cmdclass(), From 648cfdc08c8af6198b9883a046eb9ca27a1db47a Mon Sep 17 00:00:00 2001 From: pyiron-runner Date: Fri, 16 Feb 2024 18:38:42 +0000 Subject: [PATCH 154/166] [dependabot skip] Update env file --- .binder/environment.yml | 4 ++-- docs/environment.yml | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/.binder/environment.yml b/.binder/environment.yml index 07e19476..19b35b35 100644 --- a/.binder/environment.yml +++ b/.binder/environment.yml @@ -9,7 +9,7 @@ dependencies: - h5io =0.2.2 - h5io_browser =0.0.9 - matplotlib =3.8.2 -- pyiron_contrib =0.1.14 +- pyiron_contrib =0.1.15 - pympipool =0.7.13 - python-graphviz =0.20.1 - toposort =1.10 @@ -18,6 +18,6 @@ dependencies: - atomistics =0.1.23 - lammps - phonopy =2.21.0 -- pyiron_atomistics =0.4.14 +- pyiron_atomistics =0.4.15 - pyiron-data =0.0.27 - numpy =1.26.4 diff --git a/docs/environment.yml b/docs/environment.yml index 94c634b5..30a62d29 100644 --- a/docs/environment.yml +++ b/docs/environment.yml @@ -14,7 +14,7 @@ dependencies: - h5io =0.2.2 - h5io_browser =0.0.9 - matplotlib =3.8.2 -- pyiron_contrib =0.1.14 +- pyiron_contrib =0.1.15 - pympipool =0.7.13 - python-graphviz =0.20.1 - toposort =1.10 From 0370f52a1f5e4f375268520714e5a8441ce03983 Mon Sep 17 00:00:00 2001 From: liamhuber Date: Fri, 16 Feb 2024 14:00:34 -0800 Subject: [PATCH 155/166] Make NodeJob compliant with the storage interface --- pyiron_workflow/job.py | 19 +++++++++++++------ 1 file changed, 13 insertions(+), 6 deletions(-) diff --git a/pyiron_workflow/job.py b/pyiron_workflow/job.py index 873b80f5..443997ee 100644 --- a/pyiron_workflow/job.py +++ b/pyiron_workflow/job.py @@ -49,13 +49,14 @@ class NodeJob(TemplateJob): It leans directly on the storage capabilities of the node itself, except for the node class and name, and the storage backend mode, all of which are held in the - traditional job input. (Only the storage backend ever needs to be specified, the - node information gets populated automatically). + traditional job input. (WARNING: This might be fragile to adjusting the storage + backend on the node _after_ the node has been assign to the job.) The job provides direct access to its owned node (as both input and output) on the :attr:`node` attribute. The only requirement is that the node have an untouched working directory (so we can make sure its files get stored _inside_ the job's - directory tree), and that it be compatible with the storage backend used. + directory tree), and that it be saveable (not all objects work with the "h5io" + storage backend, e.g. `ase.Calculator` objects may break it). Examples: >>> from pyiron_base import Project @@ -92,7 +93,7 @@ def __init__(self, project, job_name): self._node = None self.input._label = None self.input._class_type = None - self.input.storage_backend = "h5io" # Or "tinybase" + self.input._storage_backend = None @property def node(self) -> Node: @@ -112,6 +113,7 @@ def node(self, new_node: Node): f"{new_node.__class__.__module__}." f"{new_node.__class__.__name__}" ) self.input._label = new_node.label + self.input._storage_backend = new_node.storage_backend @staticmethod def _node_working_directory_already_there(node): @@ -125,13 +127,18 @@ def _save_node(self): here = os.getcwd() os.makedirs(self.working_directory, exist_ok=True) os.chdir(self.working_directory) - self.node.save(backend=self.input.storage_backend) + self.node.save() os.chdir(here) def _load_node(self): here = os.getcwd() os.chdir(self.working_directory) - self._node = _import_class(self.input._class_type)(self.input._label) + self._node = _import_class( + self.input._class_type + )( + label=self.input._label, + storage_backend=self.input._storage_backend, + ) os.chdir(here) def to_hdf(self, hdf=None, group_name=None): From 84c326c74d6667826ba9da3d60c8d9a4db6fb3b0 Mon Sep 17 00:00:00 2001 From: liamhuber Date: Fri, 16 Feb 2024 20:58:08 -0800 Subject: [PATCH 156/166] Wrap tests that use storage in a version check --- tests/unit/test_job.py | 8 ++++++++ tests/unit/test_macro.py | 2 ++ tests/unit/test_node.py | 2 ++ tests/unit/test_workflow.py | 6 ++++-- 4 files changed, 16 insertions(+), 2 deletions(-) diff --git a/tests/unit/test_job.py b/tests/unit/test_job.py index 8e1dce04..bd4ebb94 100644 --- a/tests/unit/test_job.py +++ b/tests/unit/test_job.py @@ -1,4 +1,5 @@ from abc import ABC, abstractmethod +import sys from time import sleep import unittest @@ -33,6 +34,7 @@ def make_a_job_from_node(self, node): job.node = node return job + @unittest.skipIf(sys.version_info < (3, 11), "Storage will only work in 3.11+") def test_modal(self): modal_wf = Workflow("modal_wf") modal_wf.sleep = Sleep(0) @@ -63,6 +65,7 @@ def test_modal(self): msg="The loaded job should still have all the same values" ) + @unittest.skipIf(sys.version_info < (3, 11), "Storage will only work in 3.11+") def test_nonmodal(self): nonmodal_node = Workflow("non_modal") nonmodal_node.out = Workflow.create.standard.UserInput(42) @@ -94,6 +97,7 @@ def test_nonmodal(self): msg="The loaded job should have the finished values" ) + @unittest.skipIf(sys.version_info < (3, 11), "Storage will only work in 3.11+") def test_bad_workflow(self): has_wd_wf = Workflow("not_empty") try: @@ -113,6 +117,7 @@ class TestWrapperFunction(_WithAJob): def make_a_job_from_node(self, node): return create_job_with_python_wrapper(self.pr, node) + @unittest.skipIf(sys.version_info < (3, 11), "Storage will only work in 3.11+") def test_modal(self): modal_wf = Workflow("modal_wf") modal_wf.sleep = Sleep(0) @@ -143,6 +148,7 @@ def test_modal(self): msg="The loaded job should still have all the same values" ) + @unittest.skipIf(sys.version_info < (3, 11), "Storage will only work in 3.11+") def test_node(self): node = Workflow.create.standard.UserInput(42) nj = self.make_a_job_from_node(node) @@ -153,6 +159,7 @@ def test_node(self): msg="A single node should run just as well as a workflow" ) + @unittest.skipIf(sys.version_info < (3, 11), "Storage will only work in 3.11+") def test_nonmodal(self): nonmodal_node = Workflow("non_modal") nonmodal_node.out = Workflow.create.standard.UserInput(42) @@ -184,6 +191,7 @@ def test_nonmodal(self): msg="The loaded job should have the finished values" ) + @unittest.skipIf(sys.version_info < (3, 11), "Storage will only work in 3.11+") def test_node(self): node = Workflow.create.standard.UserInput(42) nj = self.make_a_job_from_node(node) diff --git a/tests/unit/test_macro.py b/tests/unit/test_macro.py index 2d6a4a79..bd8009bd 100644 --- a/tests/unit/test_macro.py +++ b/tests/unit/test_macro.py @@ -1,3 +1,4 @@ +import sys from concurrent.futures import Future from functools import partialmethod @@ -520,6 +521,7 @@ def LikeAFunction(macro, lin: list, n: int = 2): self.assertListEqual(override_io_maps.inputs.labels, ["my_lin"]) self.assertDictEqual(override_io_maps(), {"the_input_list": [1, 2, 3, 4]}) + @unittest.skipIf(sys.version_info < (3, 11), "Storage will only work in 3.11+") def test_storage_for_modified_macros(self): ensure_tests_in_python_path() Macro.register("static.demo_nodes", domain="demo") diff --git a/tests/unit/test_node.py b/tests/unit/test_node.py index b0bee1a9..9bda7309 100644 --- a/tests/unit/test_node.py +++ b/tests/unit/test_node.py @@ -1,5 +1,6 @@ from concurrent.futures import Future import os +import sys from typing import Literal, Optional import unittest @@ -372,6 +373,7 @@ def test_graph_info(self): "above." ) + @unittest.skipIf(sys.version_info < (3, 11), "Storage will only work in 3.11+") def test_storage(self): self.assertIs( self.n1.outputs.y.value, diff --git a/tests/unit/test_workflow.py b/tests/unit/test_workflow.py index 69221612..06dbde91 100644 --- a/tests/unit/test_workflow.py +++ b/tests/unit/test_workflow.py @@ -1,5 +1,5 @@ from concurrent.futures import Future - +import sys from time import sleep import unittest @@ -337,6 +337,7 @@ def add_three_macro(macro): wf.m.two.pull(run_parent_trees_too=False) wf.executor_shutdown() + @unittest.skipIf(sys.version_info < (3, 11), "Storage will only work in 3.11+") def test_storage_values(self): for backend in ALLOWED_BACKENDS: with self.subTest(backend): @@ -364,7 +365,8 @@ def test_storage_values(self): finally: # Clean up after ourselves wf.storage.delete() - + + @unittest.skipIf(sys.version_info < (3, 11), "Storage will only work in 3.11+") def test_storage_scopes(self): wf = Workflow("wf") wf.register("static.demo_nodes", "demo") From f8314205826835ebb2a631e1b583e83147ef9c27 Mon Sep 17 00:00:00 2001 From: liamhuber Date: Fri, 16 Feb 2024 21:00:35 -0800 Subject: [PATCH 157/166] :bug: de-double and de-bug the node test It was just not showing up because the test name was doubled up --- tests/unit/test_job.py | 22 +++++++++++----------- 1 file changed, 11 insertions(+), 11 deletions(-) diff --git a/tests/unit/test_job.py b/tests/unit/test_job.py index bd4ebb94..b31f39a2 100644 --- a/tests/unit/test_job.py +++ b/tests/unit/test_job.py @@ -34,6 +34,17 @@ def make_a_job_from_node(self, node): job.node = node return job + @unittest.skipIf(sys.version_info < (3, 11), "Storage will only work in 3.11+") + def test_node(self): + node = Workflow.create.standard.UserInput(42) + nj = self.make_a_job_from_node(node) + nj.run() + self.assertEqual( + 42, + nj.node.outputs.user_input.value, + msg="A single node should run just as well as a workflow" + ) + @unittest.skipIf(sys.version_info < (3, 11), "Storage will only work in 3.11+") def test_modal(self): modal_wf = Workflow("modal_wf") @@ -148,17 +159,6 @@ def test_modal(self): msg="The loaded job should still have all the same values" ) - @unittest.skipIf(sys.version_info < (3, 11), "Storage will only work in 3.11+") - def test_node(self): - node = Workflow.create.standard.UserInput(42) - nj = self.make_a_job_from_node(node) - nj.run() - self.assertEqual( - 42, - nj.node.outputs.user_input, - msg="A single node should run just as well as a workflow" - ) - @unittest.skipIf(sys.version_info < (3, 11), "Storage will only work in 3.11+") def test_nonmodal(self): nonmodal_node = Workflow("non_modal") From 6abc23add7f12001b27f49535dab0c9ad9338766 Mon Sep 17 00:00:00 2001 From: liamhuber Date: Fri, 16 Feb 2024 21:10:10 -0800 Subject: [PATCH 158/166] Fail hard and clean when the python version is too low And test for it, hopefully, we'll find out on the CI I guess --- pyiron_workflow/job.py | 7 +++++++ pyiron_workflow/storage.py | 2 ++ tests/unit/test_job.py | 20 ++++++++++++++++++++ tests/unit/test_node.py | 9 +++++++++ 4 files changed, 38 insertions(+) diff --git a/pyiron_workflow/job.py b/pyiron_workflow/job.py index 443997ee..7c1e355b 100644 --- a/pyiron_workflow/job.py +++ b/pyiron_workflow/job.py @@ -23,6 +23,7 @@ from __future__ import annotations import os +import sys from pyiron_base import TemplateJob, JOB_CLASS_DICT from pyiron_workflow.node import Node @@ -87,6 +88,9 @@ class NodeJob(TemplateJob): ) def __init__(self, project, job_name): + if sys.version_info < (3, 11): + raise NotImplementedError("Node jobs are only available in python 3.11+") + super().__init__(project, job_name) self._python_only_job = True self._write_work_dir_warnings = False @@ -209,6 +213,9 @@ def create_job_with_python_wrapper(project, node): """ + _WARNINGS_STRING ) + if sys.version_info < (3, 11): + raise NotImplementedError("Node jobs are only available in python 3.11+") + job = project.wrap_python_function(_run_node) job.input["node"] = node return job diff --git a/pyiron_workflow/storage.py b/pyiron_workflow/storage.py index e5dc2f76..9567a8ca 100644 --- a/pyiron_workflow/storage.py +++ b/pyiron_workflow/storage.py @@ -32,6 +32,8 @@ class StorageInterface: _H5IO_STORAGE_FILE_NAME = "h5io.h5" def __init__(self, node: Node): + if sys.version_info < (3, 11): + raise NotImplementedError("Storage is only available in python 3.11+") self.node = node def save(self, backend: Literal["h5io", "tinybase"]): diff --git a/tests/unit/test_job.py b/tests/unit/test_job.py index b31f39a2..17b94ae9 100644 --- a/tests/unit/test_job.py +++ b/tests/unit/test_job.py @@ -34,6 +34,16 @@ def make_a_job_from_node(self, node): job.node = node return job + @unittest.skipIf(sys.version_info >= (3, 11), "Storage should only work in 3.11+") + def test_clean_failure(self): + with self.assertRaises( + NotImplementedError, + msg="Storage, and therefore node jobs, are only available in python 3.11+, " + "so we should fail hard and clean here" + ): + node = Workflow.create.standard.UserInput(42) + self.make_a_job_from_node(node) + @unittest.skipIf(sys.version_info < (3, 11), "Storage will only work in 3.11+") def test_node(self): node = Workflow.create.standard.UserInput(42) @@ -128,6 +138,16 @@ class TestWrapperFunction(_WithAJob): def make_a_job_from_node(self, node): return create_job_with_python_wrapper(self.pr, node) + @unittest.skipIf(sys.version_info >= (3, 11), "Storage should only work in 3.11+") + def test_clean_failure(self): + with self.assertRaises( + NotImplementedError, + msg="Storage, and therefore node jobs, are only available in python 3.11+, " + "so we should fail hard and clean here" + ): + node = Workflow.create.standard.UserInput(42) + self.make_a_job_from_node(node) + @unittest.skipIf(sys.version_info < (3, 11), "Storage will only work in 3.11+") def test_modal(self): modal_wf = Workflow("modal_wf") diff --git a/tests/unit/test_node.py b/tests/unit/test_node.py index 9bda7309..373cb9d5 100644 --- a/tests/unit/test_node.py +++ b/tests/unit/test_node.py @@ -373,6 +373,15 @@ def test_graph_info(self): "above." ) + @unittest.skipIf(sys.version_info >= (3, 11), "Storage should only work in 3.11+") + def test_storage_failure(self): + with self.assertRaises( + NotImplementedError, + msg="Storage is only available in python 3.11+, so we should fail hard and " + "clean here" + ): + self.n1.storage + @unittest.skipIf(sys.version_info < (3, 11), "Storage will only work in 3.11+") def test_storage(self): self.assertIs( From 740e236dac10a506c90af631e463148d61487749 Mon Sep 17 00:00:00 2001 From: liamhuber Date: Fri, 16 Feb 2024 21:15:43 -0800 Subject: [PATCH 159/166] Don't even try loading if python <3.11 Otherwise we check to see if storage is there and get an error --- pyiron_workflow/node.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/pyiron_workflow/node.py b/pyiron_workflow/node.py index af7b3b2e..07515bad 100644 --- a/pyiron_workflow/node.py +++ b/pyiron_workflow/node.py @@ -7,6 +7,7 @@ from __future__ import annotations +import sys import warnings from abc import ABC, abstractmethod from concurrent.futures import Executor as StdLibExecutor, Future @@ -355,11 +356,11 @@ def __post__( run_after_init: bool = False, **kwargs, ): - if overwrite_save: + if overwrite_save and sys.version_info >= (3, 11): self.storage.delete() do_load = False else: - do_load = self.storage.has_contents + do_load = sys.version_info >= (3, 11) and self.storage.has_contents if do_load and run_after_init: raise ValueError( From 8fbd0706f7f1d6ab5781f42bc187444e72c00ad9 Mon Sep 17 00:00:00 2001 From: liamhuber Date: Fri, 16 Feb 2024 21:19:55 -0800 Subject: [PATCH 160/166] :bug: Add missed skipIf --- tests/unit/test_node.py | 1 + 1 file changed, 1 insertion(+) diff --git a/tests/unit/test_node.py b/tests/unit/test_node.py index 373cb9d5..1cb42031 100644 --- a/tests/unit/test_node.py +++ b/tests/unit/test_node.py @@ -439,6 +439,7 @@ def test_storage(self): msg="Destroying the save should allow immediate re-running" ) + @unittest.skipIf(sys.version_info < (3, 11), "Storage will only work in 3.11+") def test_save_after_run(self): for backend in ALLOWED_BACKENDS: with self.subTest(backend): From dc1ea55966a81c13495d48c953cd7a3b3566349e Mon Sep 17 00:00:00 2001 From: liamhuber Date: Fri, 16 Feb 2024 21:27:09 -0800 Subject: [PATCH 161/166] Add a warning to the node job tests --- pyiron_workflow/job.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/pyiron_workflow/job.py b/pyiron_workflow/job.py index 7c1e355b..b0cce8fa 100644 --- a/pyiron_workflow/job.py +++ b/pyiron_workflow/job.py @@ -31,6 +31,9 @@ _WARNINGS_STRING = """ Warnings: + Node jobs rely on storing the node to file, which means these are also only + available for python >= 3.11. + The job can be run with `run_mode="non_modal"`, but _only_ if all the nodes being run are defined in an importable file location -- i.e. copying and pasting the example above into a jupyter notebook works fine in modal mode, but From e03ab9393c161164c83decdf6f3eec46b1a52c0c Mon Sep 17 00:00:00 2001 From: liamhuber Date: Fri, 16 Feb 2024 21:27:24 -0800 Subject: [PATCH 162/166] Only test node job docs on valid python versions --- tests/unit/test_docs.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/tests/unit/test_docs.py b/tests/unit/test_docs.py index bd297ea0..afa8c601 100644 --- a/tests/unit/test_docs.py +++ b/tests/unit/test_docs.py @@ -1,5 +1,6 @@ import doctest import pkgutil +import sys import unittest import pyiron_workflow @@ -11,6 +12,8 @@ def load_tests(loader, tests, ignore): ): if "node_library" in name: continue + if sys.version_info >= (3, 11) and "job" in name: + continue tests.addTests(doctest.DocTestSuite(name)) return tests From afc7d906047147fbdd62de58d334f670853cedd1 Mon Sep 17 00:00:00 2001 From: liamhuber Date: Fri, 16 Feb 2024 21:28:40 -0800 Subject: [PATCH 163/166] Add the version requirement right to the top of the docs about storage --- pyiron_workflow/node.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pyiron_workflow/node.py b/pyiron_workflow/node.py index 07515bad..4bff36a6 100644 --- a/pyiron_workflow/node.py +++ b/pyiron_workflow/node.py @@ -152,7 +152,7 @@ class Node(HasToDict, ABC, metaclass=AbstractHasPost): context when you're done with them; we give a convenience method for this. - Nodes created from a registered package store their package identifier as a class attribute. - - [ALPHA FEATURE] Nodes can be saved to and loaded from file. + - [ALPHA FEATURE] Nodes can be saved to and loaded from file if python >= 3.11. - Saving is triggered manually, or by setting a flag to save after the nodes runs. - On instantiation, nodes will load automatically if they find saved content. From c7474f38374bdc05d48831eedb71ca0405b9c86c Mon Sep 17 00:00:00 2001 From: liamhuber Date: Fri, 16 Feb 2024 21:28:40 -0800 Subject: [PATCH 164/166] Add the version requirement right to the top of the docs about storage --- pyiron_workflow/node.py | 2 +- tests/unit/test_docs.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/pyiron_workflow/node.py b/pyiron_workflow/node.py index 07515bad..4bff36a6 100644 --- a/pyiron_workflow/node.py +++ b/pyiron_workflow/node.py @@ -152,7 +152,7 @@ class Node(HasToDict, ABC, metaclass=AbstractHasPost): context when you're done with them; we give a convenience method for this. - Nodes created from a registered package store their package identifier as a class attribute. - - [ALPHA FEATURE] Nodes can be saved to and loaded from file. + - [ALPHA FEATURE] Nodes can be saved to and loaded from file if python >= 3.11. - Saving is triggered manually, or by setting a flag to save after the nodes runs. - On instantiation, nodes will load automatically if they find saved content. diff --git a/tests/unit/test_docs.py b/tests/unit/test_docs.py index afa8c601..9f85b7f1 100644 --- a/tests/unit/test_docs.py +++ b/tests/unit/test_docs.py @@ -12,7 +12,7 @@ def load_tests(loader, tests, ignore): ): if "node_library" in name: continue - if sys.version_info >= (3, 11) and "job" in name: + if sys.version_info < (3, 11) and "job" in name: continue tests.addTests(doctest.DocTestSuite(name)) return tests From fabf4650b3e925c80788d709170e9a9485ef3bab Mon Sep 17 00:00:00 2001 From: liamhuber Date: Fri, 16 Feb 2024 21:35:51 -0800 Subject: [PATCH 165/166] Add version filtering to the deepdive Out of kindness --- notebooks/deepdive.ipynb | 121 ++++++++++++++++++--------------------- 1 file changed, 55 insertions(+), 66 deletions(-) diff --git a/notebooks/deepdive.ipynb b/notebooks/deepdive.ipynb index bc98496b..fb33ca50 100644 --- a/notebooks/deepdive.ipynb +++ b/notebooks/deepdive.ipynb @@ -989,7 +989,7 @@ }, { "data": { - "image/png": "iVBORw0KGgoAAAANSUhEUgAAAiMAAAGdCAYAAADAAnMpAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjguMiwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy8g+/7EAAAACXBIWXMAAA9hAAAPYQGoP6dpAAAfuklEQVR4nO3db0zd5f3/8dc5B+HUDo6hFTgWxo5d63okaoDQQdeYOcuoBtMbSzGurbq6jKrTrtOkTReRxoToopk6Ibr5J6bYEY3uJwlDuaXUfjcmtIl4TDQtG/1zkADxcPxDG8+5fjcYpEdAOadwrh7O85Fwgw+fA2/yScOzn+uc6ziMMUYAAACWOG0PAAAA0hsxAgAArCJGAACAVcQIAACwihgBAABWESMAAMAqYgQAAFhFjAAAAKsybA8wH9FoVGfOnFF2drYcDoftcQAAwDwYYxQOh3XFFVfI6Zz7/kdKxMiZM2dUVFRkewwAAJCAkydPqrCwcM6vp0SMZGdnS5r8ZXJycixPAwAA5mN8fFxFRUXTf8fnkhIxMrU0k5OTQ4wAAJBivuspFjyBFQAAWEWMAAAAq4gRAABgFTECAACsIkYAAIBVxAgAALCKGAEAAFYRIwAAwKqU2PQMS0skatQzMKbh8ITyst2q8OXK5eQ9hwAgXREjSKrO/qAa2wMKhiamj3k9bjXU+lVT4rU4GQDAFpZpkDSd/UHtOtgXEyKSNBSa0K6DfersD1qaDABgEzGCpIhEjRrbAzKzfG3qWGN7QJHobGcAAJYyYgRJ0TMwNuOOyPmMpGBoQj0DY8kbCgBwUSBGkBTD4blDJJHzAABLBzGCpMjLdi/oeQCApYMYQVJU+HLl9bg11wt4HZp8VU2FLzeZYwEALgLECJLC5XSoodYvSTOCZOrzhlo/+40AQBoiRpA0NSVetWwrVYEndimmwONWy7ZS9hkBgDTFpmdIqpoSrzb5C9iBFQAwjRhB0rmcDlWuXmF7DADARYJlGgAAYBUxAgAArCJGAACAVcQIAACwihgBAABWESMAAMAqYgQAAFhFjAAAAKuIEQAAYBUxAgAArCJGAACAVcQIAACwihgBAABWESMAAMAqYgQAAFhFjAAAAKuIEQAAYBUxAgAArCJGAACAVcQIAACwihgBAABWESMAAMAqYgQAAFhFjAAAAKsybA8AAEC6iESNegbGNByeUF62WxW+XLmcDttjWUeMAACQBJ39QTW2BxQMTUwf83rcaqj1q6bEa3Ey+xJapmlubpbP55Pb7VZZWZm6u7u/9fzW1lZde+21uvTSS+X1enXnnXdqdHQ0oYEBAEg1nf1B7TrYFxMikjQUmtCug33q7A9amuziEHeMtLW1affu3dq/f7+OHj2qjRs3avPmzRocHJz1/MOHD2vHjh3auXOnPvzwQ7366qv697//rbvuuuuChwcA4GIXiRo1tgdkZvna1LHG9oAi0dnOSA9xx8gTTzyhnTt36q677tK6dev0pz/9SUVFRWppaZn1/H/+85/6wQ9+oPvuu08+n08/+clP9Jvf/Ebvv//+BQ8PAMDFrmdgbMYdkfMZScHQhHoGxpI31EUmrhg5d+6cent7VV1dHXO8urpaR44cmfUxVVVVOnXqlDo6OmSM0aeffqrXXntNN99885w/5+zZsxofH4/5AAAgFQ2H5w6RRM5biuKKkZGREUUiEeXn58ccz8/P19DQ0KyPqaqqUmtrq+rq6pSZmamCggJddtllevrpp+f8OU1NTfJ4PNMfRUVF8YwJAMBFIy/bvaDnLUUJPYHV4Yh9GZIxZsaxKYFAQPfdd58eeugh9fb2qrOzUwMDA6qvr5/z++/bt0+hUGj64+TJk4mMCQCAdRW+XHk9bs31Al6HJl9VU+HLTeZYF5W4Xtq7cuVKuVyuGXdBhoeHZ9wtmdLU1KQNGzbowQcflCRdc801Wr58uTZu3KhHHnlEXu/MlzNlZWUpKysrntEAALgouZwONdT6tetgnxxSzBNZpwKlodaf1vuNxHVnJDMzU2VlZerq6oo53tXVpaqqqlkf8+WXX8rpjP0xLpdL0uQdFQAAlrqaEq9atpWqwBO7FFPgcatlW2na7zMS96Zne/bs0fbt21VeXq7Kyko999xzGhwcnF522bdvn06fPq2XX35ZklRbW6tf//rXamlp0c9//nMFg0Ht3r1bFRUVuuKKKxb2twEA4CJVU+LVJn8BO7DOIu4Yqaur0+joqA4cOKBgMKiSkhJ1dHSouLhYkhQMBmP2HLnjjjsUDof15z//Wb///e912WWX6YYbbtCjjz66cL8FAAApwOV0qHL1CttjXHQcJgXWSsbHx+XxeBQKhZSTk2N7HAAAMA/z/fvNu/YCAACriBEAAGAVMQIAAKwiRgAAgFXECAAAsIoYAQAAVhEjAADAKmIEAABYRYwAAACriBEAAGAVMQIAAKwiRgAAgFXECAAAsIoYAQAAVhEjAADAKmIEAABYRYwAAACriBEAAGAVMQIAAKwiRgAAgFXECAAAsIoYAQAAVhEjAADAqgzbA9gSiRr1DIxpODyhvGy3Kny5cjkdtscCACDtpGWMdPYH1dgeUDA0MX3M63GrodavmhKvxckAAEg/abdM09kf1K6DfTEhIklDoQntOtinzv6gpckAAEhPaRUjkahRY3tAZpavTR1rbA8oEp3tDAAAJv+W/N/xUf2/Y6f1f8dH+ZuxANJqmaZnYGzGHZHzGUnB0IR6BsZUuXpF8gYDAKQElvkXR1rdGRkOzx0iiZwHAEgfLPMvnrSKkbxs94KeBwBIDyzzL660ipEKX668HrfmegGvQ5O32yp8uckcCwBwkYtnmR/xS6sYcTkdaqj1S9KMIJn6vKHWz34jAIAYLPMvrrSKEUmqKfGqZVupCjyxSzEFHrdatpXyBCQAwAws8y+utHo1zZSaEq82+QvYgRUAMC9Ty/xDoYlZnzfi0OR/alnmT0xaxog0uWTDy3cBAPMxtcy/62CfHFJMkLDMf+HSbpkGAIBEsMy/eNL2zggAAPFimX9xECMAAMSBZf6FxzINAACwihgBAABWESMAAMAqYgQAAFhFjAAAAKuIEQAAYBUxAgAArCJGAACAVcQIAACwihgBAABWESMAAMAqYgQAAFhFjAAAAKt4117AskjU8HbkANIaMQJY1NkfVGN7QMHQxPQxr8ethlq/akq8FicDgORhmQawpLM/qF0H+2JCRJKGQhPadbBPnf1BS5MBQHIRI4AFkahRY3tAZpavTR1rbA8oEp3tDABYWogRwIKegbEZd0TOZyQFQxPqGRhL3lAAYAkxAlgwHJ47RBI5DwBSGTECWJCX7V7Q8wAglREjgAUVvlx5PW7N9QJehyZfVVPhy03mWABgBTECWOByOtRQ65ekGUEy9XlDrZ/9RgCkBWIEsKSmxKuWbaUq8MQuxRR43GrZVso+IwDSBpueARbVlHi1yV/ADqwA0hoxAljmcjpUuXqF7TEAwBqWaQAAgFXECAAAsIoYAQAAVhEjAADAKmIEAABYRYwAAACriBEAAGAVMQIAAKxKKEaam5vl8/nkdrtVVlam7u7ubz3/7Nmz2r9/v4qLi5WVlaXVq1frhRdeSGhgAACwtMS9A2tbW5t2796t5uZmbdiwQc8++6w2b96sQCCg73//+7M+ZuvWrfr000/1/PPP64c//KGGh4f19ddfX/DwAAAg9TmMMSaeB6xfv16lpaVqaWmZPrZu3Tpt2bJFTU1NM87v7OzUrbfeqhMnTig3N7G3Qx8fH5fH41EoFFJOTk5C3wMAACTXfP9+x7VMc+7cOfX29qq6ujrmeHV1tY4cOTLrY958802Vl5frscce06pVq7R27Vo98MAD+uqrr+b8OWfPntX4+HjMBwAAWJriWqYZGRlRJBJRfn5+zPH8/HwNDQ3N+pgTJ07o8OHDcrvdeuONNzQyMqK7775bY2Njcz5vpKmpSY2NjfGMBgAAUlRCT2B1OGLf3twYM+PYlGg0KofDodbWVlVUVOimm27SE088oZdeemnOuyP79u1TKBSa/jh58mQiYwIAgBQQ152RlStXyuVyzbgLMjw8PONuyRSv16tVq1bJ4/FMH1u3bp2MMTp16pTWrFkz4zFZWVnKysqKZzQAAJCi4rozkpmZqbKyMnV1dcUc7+rqUlVV1ayP2bBhg86cOaPPP/98+tjHH38sp9OpwsLCBEYGAABLSdzLNHv27NFf//pXvfDCC/roo4/0u9/9ToODg6qvr5c0ucSyY8eO6fNvu+02rVixQnfeeacCgYDeffddPfjgg/rVr36lZcuWLdxvAgAAUlLc+4zU1dVpdHRUBw4cUDAYVElJiTo6OlRcXCxJCgaDGhwcnD7/e9/7nrq6uvTb3/5W5eXlWrFihbZu3apHHnlk4X4LAACQsuLeZ8QG9hkBACD1LMo+IwAAAAuNGAEAAFYRIwAAwCpiBAAAWEWMAAAAq4gRAABgFTECAACsIkYAAIBVxAgAALCKGAEAAFYRIwAAwCpiBAAAWEWMAAAAq4gRAABgFTECAACsIkYAAIBVxAgAALCKGAEAAFYRIwAAwCpiBAAAWJVhewAsDZGoUc/AmIbDE8rLdqvClyuX02F7LABACiBGcME6+4NqbA8oGJqYPub1uNVQ61dNidfiZACAVMAyDS5IZ39Quw72xYSIJA2FJrTrYJ86+4OWJgMApApiBAmLRI0a2wMys3xt6lhje0CR6GxnAAAwiRhBwnoGxmbcETmfkRQMTahnYCx5QwEAUg4xgoQNh+cOkUTOAwCkJ2IECcvLdi/oeQCA9ESMIGEVvlx5PW7N9QJehyZfVVPhy03mWACAFEOMIGEup0MNtX5JmhEkU5831PrZbwQA8K2IEVyQmhKvWraVqsATuxRT4HGrZVsp+4wAAL4Tm57hgtWUeLXJX8AOrACAhBAjWBAup0OVq1fYHgMAkIJYpgEAAFYRIwAAwCpiBAAAWEWMAAAAq4gRAABgFTECAACsIkYAAIBVxAgAALCKGAEAAFYRIwAAwCpiBAAAWEWMAAAAq4gRAABgFTECAACsIkYAAIBVxAgAALCKGAEAAFYRIwAAwCpiBAAAWEWMAAAAq4gRAABgFTECAACsIkYAAIBVxAgAALCKGAEAAFYRIwAAwCpiBAAAWEWMAAAAq4gRAABgFTECAACsIkYAAIBVxAgAALCKGAEAAFYRIwAAwCpiBAAAWEWMAAAAq4gRAABgFTECAACsIkYAAIBVCcVIc3OzfD6f3G63ysrK1N3dPa/Hvffee8rIyNB1112XyI8FAABLUNwx0tbWpt27d2v//v06evSoNm7cqM2bN2twcPBbHxcKhbRjxw797Gc/S3hYAACw9DiMMSaeB6xfv16lpaVqaWmZPrZu3Tpt2bJFTU1Ncz7u1ltv1Zo1a+RyufT3v/9dx44dm/fPHB8fl8fjUSgUUk5OTjzjAgAAS+b79zuuOyPnzp1Tb2+vqqurY45XV1fryJEjcz7uxRdf1PHjx9XQ0DCvn3P27FmNj4/HfAAAgKUprhgZGRlRJBJRfn5+zPH8/HwNDQ3N+phPPvlEe/fuVWtrqzIyMub1c5qamuTxeKY/ioqK4hkTAACkkISewOpwOGI+N8bMOCZJkUhEt912mxobG7V27dp5f/99+/YpFApNf5w8eTKRMQEAQAqY362K/1m5cqVcLteMuyDDw8Mz7pZIUjgc1vvvv6+jR4/q3nvvlSRFo1EZY5SRkaG3335bN9xww4zHZWVlKSsrK57RAABAiorrzkhmZqbKysrU1dUVc7yrq0tVVVUzzs/JydEHH3ygY8eOTX/U19frqquu0rFjx7R+/foLmx4AAKS8uO6MSNKePXu0fft2lZeXq7KyUs8995wGBwdVX18vaXKJ5fTp03r55ZfldDpVUlIS8/i8vDy53e4ZxwEAQHqKO0bq6uo0OjqqAwcOKBgMqqSkRB0dHSouLpYkBYPB79xzBAAAYErc+4zYwD4jAACknkXZZwQAAGChESMAAMAqYgQAAFhFjAAAAKuIEQAAYBUxAgAArCJGAACAVcQIAACwihgBAABWESMAAMAqYgQAAFhFjAAAAKuIEQAAYBUxAgAArCJGAACAVcQIAACwihgBAABWESMAAMAqYgQAAFhFjAAAAKuIEQAAYBUxAgAArCJGAACAVcQIAACwihgBAABWESMAAMAqYgQAAFhFjAAAAKuIEQAAYBUxAgAArCJGAACAVcQIAACwKsP2AAAApLNI1KhnYEzD4QnlZbtV4cuVy+mwPVZSESMAAFjS2R9UY3tAwdDE9DGvx62GWr9qSrwWJ0sulmkAALCgsz+oXQf7YkJEkoZCE9p1sE+d/UFLkyUfMQIAQJJFokaN7QGZWb42dayxPaBIdLYzlh5iBACAJOsZGJtxR+R8RlIwNKGegbHkDWURMQIAQJINh+cOkUTOS3XECAAASZaX7V7Q81IdMQIAQJJV+HLl9bg11wt4HZp8VU2FLzeZY1lDjAAAkGQup0MNtX5JmhEkU5831PrTZr8RYgQAAAtqSrxq2VaqAk/sUkyBx62WbaVptc8Im54BAGBJTYlXm/wF7MBqewAAANKZy+lQ5eoVtsewimUaAABgFTECAACsIkYAAIBVxAgAALCKGAEAAFYRIwAAwCpiBAAAWEWMAAAAq9j0DACANBWJmoti91diBACANNTZH1Rje0DB0MT0Ma/HrYZaf9LfF4dlGgAA0kxnf1C7DvbFhIgkDYUmtOtgnzr7g0mdhxgBACCNRKJGje0BmVm+NnWssT2gSHS2MxYHMQIAQBrpGRibcUfkfEZSMDShnoGxpM1EjAAAkEaGw3OHSCLnLQRiBACANJKX7V7Q8xYCMQIAQBqp8OXK63FrrhfwOjT5qpoKX27SZiJGAABIIy6nQw21fkmaESRTnzfU+pO63wgxAgBAmqkp8aplW6kKPLFLMQUet1q2lSZ9nxE2PQMAIA3VlHi1yV/ADqwAAMAel9OhytUrbI/BMg0AALCLGAEAAFYRIwAAwCpiBAAAWEWMAAAAq4gRAABgFTECAACsSihGmpub5fP55Ha7VVZWpu7u7jnPff3117Vp0yZdfvnlysnJUWVlpd56662EBwYAAEtL3DHS1tam3bt3a//+/Tp69Kg2btyozZs3a3BwcNbz3333XW3atEkdHR3q7e3VT3/6U9XW1uro0aMXPDwAAEh9DmOMiecB69evV2lpqVpaWqaPrVu3Tlu2bFFTU9O8vsfVV1+turo6PfTQQ/M6f3x8XB6PR6FQSDk5OfGMCwAALJnv3++47oycO3dOvb29qq6ujjleXV2tI0eOzOt7RKNRhcNh5ebO/dbEZ8+e1fj4eMwHAABYmuKKkZGREUUiEeXn58ccz8/P19DQ0Ly+x+OPP64vvvhCW7dunfOcpqYmeTye6Y+ioqJ4xgQAACkkoSewOhyx7+hnjJlxbDaHDh3Sww8/rLa2NuXl5c153r59+xQKhaY/Tp48mciYAAAgBcT1rr0rV66Uy+WacRdkeHh4xt2Sb2pra9POnTv16quv6sYbb/zWc7OyspSVlRXPaAAAIEXFdWckMzNTZWVl6urqijne1dWlqqqqOR936NAh3XHHHXrllVd08803JzYpAABYkuK6MyJJe/bs0fbt21VeXq7Kyko999xzGhwcVH19vaTJJZbTp0/r5ZdfljQZIjt27NCTTz6pH//4x9N3VZYtWyaPx7OAvwoAAEhFccdIXV2dRkdHdeDAAQWDQZWUlKijo0PFxcWSpGAwGLPnyLPPPquvv/5a99xzj+65557p47fffrteeumlC/8NAABASot7nxEb2GcEAIDUsyj7jAAAACw0YgQAAFhFjAAAAKuIEQAAYBUxAgAArCJGAACAVcQIAACwihgBAABWxb0DK4CFE4ka9QyMaTg8obxstyp8uXI5v/sdsAFgKSFGAEs6+4NqbA8oGJqYPub1uNVQ61dNidfiZACQXCzTABZ09ge162BfTIhI0lBoQrsO9qmzP2hpMgBIPmIESLJI1KixPaDZ3hRq6lhje0CR6EX/tlEAsCCIESDJegbGZtwROZ+RFAxNqGdgLHlDAYBFxAiQZMPhuUMkkfMAINURI0CS5WW7F/Q8AEh1xAiQZBW+XHk9bs31Al6HJl9VU+HLTeZYAGANMQIkmcvpUEOtX5JmBMnU5w21fvYbAZA2iBHAgpoSr1q2larAE7sUU+Bxq2VbKfuMAEgrbHoGWFJT4tUmfwE7sAJIe8QIYJHL6VDl6hW2xwAAq1imAQAAVhEjAADAKmIEAABYRYwAAACriBEAAGAVMQIAAKwiRgAAgFXECAAAsIoYAQAAVhEjAADAKmIEAABYRYwAAACriBEAAGAVMQIAAKwiRgAAgFXECAAAsIoYAQAAVhEjAADAKmIEAABYRYwAAACriBEAAGBVhu0BgFQUiRr1DIxpODyhvGy3Kny5cjkdtscCgJREjABx6uwPqrE9oGBoYvqY1+NWQ61fNSVei5MBQGpimQaIQ2d/ULsO9sWEiCQNhSa062CfOvuDliYDgNRFjADzFIkaNbYHZGb52tSxxvaAItHZzgAAzIUYAeapZ2Bsxh2R8xlJwdCEegbGkjcUACwBxAgwT8PhuUMkkfMAAJOIEWCe8rLdC3oeAGASMQLMU4UvV16PW3O9gNehyVfVVPhykzkWAKQ8YgSYJ5fToYZavyTNCJKpzxtq/ew3AgBxIkaAONSUeNWyrVQFntilmAKPWy3bStlnBAASwKZnQJxqSrza5C9gB1YAWCDECJAAl9OhytUrbI8BAEsCyzQAAMAqYgQAAFhFjAAAAKuIEQAAYBUxAgAArCJGAACAVcQIAACwihgBAABWESMAAMCqlNiB1RgjSRofH7c8CQAAmK+pv9tTf8fnkhIxEg6HJUlFRUWWJwEAAPEKh8PyeDxzft1hvitXLgLRaFRnzpxRdna2HA7ejMyG8fFxFRUV6eTJk8rJybE9Dr4F1yp1cK1SB9cqMcYYhcNhXXHFFXI6535mSErcGXE6nSosLLQ9BiTl5OTwDzFFcK1SB9cqdXCt4vdtd0Sm8ARWAABgFTECAACsIkYwL1lZWWpoaFBWVpbtUfAduFapg2uVOrhWiyslnsAKAACWLu6MAAAAq4gRAABgFTECAACsIkYAAIBVxAimNTc3y+fzye12q6ysTN3d3XOe+/rrr2vTpk26/PLLlZOTo8rKSr311ltJnDa9xXOtzvfee+8pIyND11133eIOiGnxXquzZ89q//79Ki4uVlZWllavXq0XXnghSdOmt3ivVWtrq6699lpdeuml8nq9uvPOOzU6OpqkaZcYAxhj/va3v5lLLrnE/OUvfzGBQMDcf//9Zvny5ea///3vrOfff//95tFHHzU9PT3m448/Nvv27TOXXHKJ6evrS/Lk6SfeazXls88+M1deeaWprq421157bXKGTXOJXKtbbrnFrF+/3nR1dZmBgQHzr3/9y7z33ntJnDo9xXuturu7jdPpNE8++aQ5ceKE6e7uNldffbXZsmVLkidfGogRGGOMqaioMPX19THHfvSjH5m9e/fO+3v4/X7T2Ni40KPhGxK9VnV1deYPf/iDaWhoIEaSJN5r9Y9//MN4PB4zOjqajPFwnniv1R//+Edz5ZVXxhx76qmnTGFh4aLNuJSxTAOdO3dOvb29qq6ujjleXV2tI0eOzOt7RKNRhcNh5ebmLsaI+J9Er9WLL76o48ePq6GhYbFHxP8kcq3efPNNlZeX67HHHtOqVau0du1aPfDAA/rqq6+SMXLaSuRaVVVV6dSpU+ro6JAxRp9++qlee+013XzzzckYeclJiTfKw+IaGRlRJBJRfn5+zPH8/HwNDQ3N63s8/vjj+uKLL7R169bFGBH/k8i1+uSTT7R37151d3crI4N/8smSyLU6ceKEDh8+LLfbrTfeeEMjIyO6++67NTY2xvNGFlEi16qqqkqtra2qq6vTxMSEvv76a91yyy16+umnkzHyksOdEUxzOBwxnxtjZhybzaFDh/Twww+rra1NeXl5izUezjPfaxWJRHTbbbepsbFRa9euTdZ4OE88/66i0agcDodaW1tVUVGhm266SU888YReeukl7o4kQTzXKhAI6L777tNDDz2k3t5edXZ2amBgQPX19ckYdcnhv0nQypUr5XK5ZvwPYHh4eMb/FL6pra1NO3fu1Kuvvqobb7xxMceE4r9W4XBY77//vo4ePap7771X0uQfPGOMMjIy9Pbbb+uGG25IyuzpJpF/V16vV6tWrYp5y/V169bJGKNTp05pzZo1izpzukrkWjU1NWnDhg168MEHJUnXXHONli9fro0bN+qRRx6R1+td9LmXEu6MQJmZmSorK1NXV1fM8a6uLlVVVc35uEOHDumOO+7QK6+8wjppksR7rXJycvTBBx/o2LFj0x/19fW66qqrdOzYMa1fvz5Zo6edRP5dbdiwQWfOnNHnn38+fezjjz+W0+lUYWHhos6bzhK5Vl9++aWcztg/oS6XS9LkHRXEyd5zZ3ExmXpZ2/PPP28CgYDZvXu3Wb58ufnPf/5jjDFm7969Zvv27dPnv/LKKyYjI8M888wzJhgMTn989tlntn6FtBHvtfomXk2TPPFeq3A4bAoLC80vfvEL8+GHH5p33nnHrFmzxtx11122foW0Ee+1evHFF01GRoZpbm42x48fN4cPHzbl5eWmoqLC1q+Q0ogRTHvmmWdMcXGxyczMNKWlpeadd96Z/trtt99urr/++unPr7/+eiNpxsftt9+e/MHTUDzX6puIkeSK91p99NFH5sYbbzTLli0zhYWFZs+ePebLL79M8tTpKd5r9dRTTxm/32+WLVtmvF6v+eUvf2lOnTqV5KmXBocx3E8CAAD28JwRAABgFTECAACsIkYAAIBVxAgAALCKGAEAAFYRIwAAwCpiBAAAWEWMAAAAq4gRAABgFTECAACsIkYAAIBVxAgAALDq/wM44josBsaCygAAAABJRU5ErkJggg==", + "image/png": "iVBORw0KGgoAAAANSUhEUgAAAioAAAGdCAYAAAA8F1jjAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjguMiwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy8g+/7EAAAACXBIWXMAAA9hAAAPYQGoP6dpAAApxUlEQVR4nO3df3TUVX7/8dckITNIyXggkgwSY6SICaloJicxodTWH1nQwy7d9hhrDWqhNayKkWqXlB5jOJ6T7naXolsTZQU9FKQ5ilo5zbLOH7sYjC0lhJ5lY6uV2AScmCacnYmrSZbkfv+gmS/DJJjPkB+fmXk+zvmc49zcz8x7zj2eeXHv53M/DmOMEQAAgA0lzXQBAAAA4yGoAAAA2yKoAAAA2yKoAAAA2yKoAAAA2yKoAAAA2yKoAAAA2yKoAAAA20qZ6QImYmRkRJ999pnmzp0rh8Mx0+UAAIAJMMaov79fCxcuVFJSdHMjMRFUPvvsM2VlZc10GQAAIApdXV1atGhRVOfGRFCZO3eupPNfNC0tbYarAQAAExEMBpWVlRX6HY9GTASV0eWetLQ0ggoAADHmci7b4GJaAABgWwQVAABgWwQVAABgWwQVAABgWwQVAABgWwQVAABgWwQVAABgWwQVAABgWzGx4Rtg1fCI0dGOs+rpH9CCuS4V5cxTchLPiQKAWENQQdw5dNKv2oPt8gcGQm0et0s1a/K0Kt8zg5UBAKxi6Qdx5dBJvzbuPR4WUiSpOzCgjXuP69BJ/wxVBgCIBkEFcWN4xKj2YLvMGH8bbas92K7hkbF6AADsiKCCuHG042zETMqFjCR/YEBHO85OX1EAgMtCUEHc6OkfP6RE0w8AMPMIKogbC+a6JrUfAGDmEVQQN4py5snjdmm8m5AdOn/3T1HOvOksCwBwGQgqiBvJSQ7VrMmTpIiwMvq6Zk0e+6kAQAwhqCCurMr3qOH+AmW6w5d3Mt0uNdxfwD4qABBj2PANcWdVvkd35mWyMy0AxAGCCuJScpJDJYvnz3QZAIDLxNIPAACwLYIKAACwLYIKAACwLYIKAACwLYIKAACwLYIKAACwLYIKAACwLYIKAACwLYIKAACwLYIKAACwraiCSn19vXJycuRyueT1etXc3HzJ/vv27dPy5ct1xRVXyOPx6KGHHlJfX19UBQMAgMRhOag0NjaqqqpKW7duVVtbm1auXKnVq1ers7NzzP5HjhzRunXrtH79ev3yl7/U66+/rn//93/Xhg0bLrt4AAAQ3ywHle3bt2v9+vXasGGDcnNztWPHDmVlZamhoWHM/v/6r/+qa6+9Vps2bVJOTo5+93d/Vw8//LCOHTt22cUDAID4ZimoDA0NqbW1VWVlZWHtZWVlamlpGfOc0tJSnT59Wk1NTTLG6PPPP9cbb7yhu+++e9zPGRwcVDAYDDsAAEDisRRUent7NTw8rIyMjLD2jIwMdXd3j3lOaWmp9u3bp/LycqWmpiozM1NXXnmlfvSjH437OXV1dXK73aEjKyvLSpkAACBORHUxrcPhCHttjIloG9Xe3q5Nmzbp6aefVmtrqw4dOqSOjg5VVlaO+/7V1dUKBAKho6urK5oyAQDAJQyPGH3wSZ/++cQZffBJn4ZHzEyXFCHFSuf09HQlJydHzJ709PREzLKMqqur04oVK/TUU09Jkm688UbNmTNHK1eu1LPPPiuPxxNxjtPplNPptFIaAACw4NBJv2oPtssfGAi1edwu1azJ06r8yN/mmWJpRiU1NVVer1c+ny+s3efzqbS0dMxzvvzySyUlhX9McnKypPMzMQAAYHodOunXxr3Hw0KKJHUHBrRx73EdOumfocoiWV762bx5s15++WXt3r1bH374oZ544gl1dnaGlnKqq6u1bt26UP81a9bozTffVENDg06dOqX3339fmzZtUlFRkRYuXDh53wQAAHyt4RGj2oPtGmuqYLSt9mC7bZaBLC39SFJ5ebn6+vq0bds2+f1+5efnq6mpSdnZ2ZIkv98ftqfKgw8+qP7+fv3DP/yD/vIv/1JXXnmlbrvtNn3ve9+bvG8BAAAm5GjH2YiZlAsZSf7AgI52nFXJ4vnTV9g4HCYG1l+CwaDcbrcCgYDS0tJmuhwAAGLWP584o8f/6cTX9nvu3pv0rZuuvqzPmozfb571AwBAAlkw1zWp/aYaQQUAgARSlDNPHrdLY28qIjl0/u6fopx501nWuAgqAAAkkOQkh2rW5ElSRFgZfV2zJk/JSeNFmelFUAEAIMGsyveo4f4CZbrDl3cy3S413F9gq31ULN/1AwAAYt+qfI/uzMvU0Y6z6ukf0IK555d77DKTMoqgAgBAgkpOctjiFuRLYekHAADYFkEFAADYFkEFAADYFkEFAADYFkEFAADYFkEFAADYFkEFAADYFkEFAADYFkEFAADYFkEFAADYFkEFAADYFkEFAADYFkEFAADYFkEFAADYFkEFAADYVspMFwAA0RgeMTracVY9/QNaMNelopx5Sk5yzHRZACYZQQVAzDl00q/ag+3yBwZCbR63SzVr8rQq3zODlQGYbCz9AIgph076tXHv8bCQIkndgQFt3Htch076Z6gyAFOBoAIgZgyPGNUebJcZ42+jbbUH2zU8MlYPALGIoAIgZhztOBsxk3IhI8kfGNDRjrPTVxSAKUVQARAzevrHDynR9ANgfwQVADFjwVzXpPYDYH8EFQAxoyhnnjxul8a7Cdmh83f/FOXMm86yAEwhggqAmJGc5FDNmjxJiggro69r1uSxnwoQR6IKKvX19crJyZHL5ZLX61Vzc/O4fR988EE5HI6IY9myZVEXDSBxrcr3qOH+AmW6w5d3Mt0uNdxfwD4qQJxxGGMs3cfX2NioiooK1dfXa8WKFXrppZf08ssvq729Xddcc01E/0AgoK+++ir0+ty5c1q+fLkee+wxPfPMMxP6zGAwKLfbrUAgoLS0NCvlAohT7EwL2N9k/H5bDirFxcUqKChQQ0NDqC03N1dr165VXV3d157/9ttv69vf/rY6OjqUnZ09oc8kqAAAEHsm4/fb0tLP0NCQWltbVVZWFtZeVlamlpaWCb3Hrl27dMcdd1wypAwODioYDIYdAAAg8VgKKr29vRoeHlZGRkZYe0ZGhrq7u7/2fL/fr5/85CfasGHDJfvV1dXJ7XaHjqysLCtlAgCAOBHVxbQOR/g6sDEmom0sr776qq688kqtXbv2kv2qq6sVCARCR1dXVzRlAgCAGGfp6cnp6elKTk6OmD3p6emJmGW5mDFGu3fvVkVFhVJTUy/Z1+l0yul0WikNAADEIUszKqmpqfJ6vfL5fGHtPp9PpaWllzz38OHD+u///m+tX7/eepUAACAhWZpRkaTNmzeroqJChYWFKikp0c6dO9XZ2anKykpJ55dtzpw5oz179oSdt2vXLhUXFys/P39yKgcAAHHPclApLy9XX1+ftm3bJr/fr/z8fDU1NYXu4vH7/ers7Aw7JxAI6MCBA3ruuecmp2oAAJAQLO+jMhPYRwUAgNgz7fuoAAAATCeCCgAAsC2CCgAAsC2CCgAAsC2CCgAAsC2CCgAAsC2CCgAAsC2CCgAAsC2CCgAAsC2CCgAAsC2CCgAAsC2CCgAAsC2CCgAAsK2UmS5gpgyPGB3tOKue/gEtmOtSUc48JSc5ZrosAABwgYQMKodO+lV7sF3+wECozeN2qWZNnlble2awMgAAcKGEW/o5dNKvjXuPh4UUSeoODGjj3uM6dNI/Q5UBAICLJVRQGR4xqj3YLjPG30bbag+2a3hkrB4AAGC6JVRQOdpxNmIm5UJGkj8woKMdZ6evKAAAMK6ECio9/eOHlGj6AQCAqZVQQWXBXNek9gMAAFMroYJKUc48edwujXcTskPn7/4pypk3nWUBAIBxJFRQSU5yqGZNniRFhJXR1zVr8thPBQAAm0iooCJJq/I9ari/QJnu8OWdTLdLDfcXsI8KAAA2kpAbvq3K9+jOvEx2pgUAwOYSMqhI55eBShbPn+kyAADAJSTc0g8AAIgdBBUAAGBbBBUAAGBbBBUAAGBbBBUAAGBbUQWV+vp65eTkyOVyyev1qrm5+ZL9BwcHtXXrVmVnZ8vpdGrx4sXavXt3VAUDAIDEYfn25MbGRlVVVam+vl4rVqzQSy+9pNWrV6u9vV3XXHPNmOfcc889+vzzz7Vr1y799m//tnp6enTu3LnLLh4AAMQ3hzHGWDmhuLhYBQUFamhoCLXl5uZq7dq1qquri+h/6NAh3XvvvTp16pTmzYvuGTrBYFBut1uBQEBpaWlRvQcAAJhek/H7bWnpZ2hoSK2trSorKwtrLysrU0tLy5jnvPPOOyosLNT3v/99XX311br++uv15JNP6quvvhr3cwYHBxUMBsMOAACQeCwt/fT29mp4eFgZGRlh7RkZGeru7h7znFOnTunIkSNyuVx666231Nvbq+985zs6e/bsuNep1NXVqba21kppAAAgDkV1Ma3DEf5MHGNMRNuokZERORwO7du3T0VFRbrrrru0fft2vfrqq+POqlRXVysQCISOrq6uaMoEAAAxztKMSnp6upKTkyNmT3p6eiJmWUZ5PB5dffXVcrvdobbc3FwZY3T69GktWbIk4hyn0ymn02mlNAAAEIcszaikpqbK6/XK5/OFtft8PpWWlo55zooVK/TZZ5/piy++CLV99NFHSkpK0qJFi6IoGQAAJArLSz+bN2/Wyy+/rN27d+vDDz/UE088oc7OTlVWVko6v2yzbt26UP/77rtP8+fP10MPPaT29na99957euqpp/Rnf/Znmj179uR9EwAAEHcs76NSXl6uvr4+bdu2TX6/X/n5+WpqalJ2drYkye/3q7OzM9T/t37rt+Tz+fTYY4+psLBQ8+fP1z333KNnn3128r4FAACIS5b3UZkJ7KMCAEDsmfZ9VAAAAKYTQQUAANgWQQUAANgWQQUAANgWQQUAANgWQQUAANgWQQUAANgWQQUAANgWQQUAANgWQQUAANgWQQUAANgWQQUAANgWQQUAANgWQQUAANgWQQUAANgWQQUAANgWQQUAANgWQQUAANgWQQUAANgWQQUAANgWQQUAANgWQQUAANgWQQUAANgWQQUAANgWQQUAANgWQQUAANgWQQUAANgWQQUAANgWQQUAANgWQQUAANhWVEGlvr5eOTk5crlc8nq9am5uHrfvz3/+czkcjojjP//zP6MuGgAAJAbLQaWxsVFVVVXaunWr2tratHLlSq1evVqdnZ2XPO+//uu/5Pf7Q8eSJUuiLhoAACQGy0Fl+/btWr9+vTZs2KDc3Fzt2LFDWVlZamhouOR5CxYsUGZmZuhITk6OumgAAJAYLAWVoaEhtba2qqysLKy9rKxMLS0tlzz35ptvlsfj0e23366f/exnl+w7ODioYDAYdgAAgMRjKaj09vZqeHhYGRkZYe0ZGRnq7u4e8xyPx6OdO3fqwIEDevPNN7V06VLdfvvteu+998b9nLq6Ornd7tCRlZVlpUwAABAnUqI5yeFwhL02xkS0jVq6dKmWLl0ael1SUqKuri794Ac/0O/93u+NeU51dbU2b94ceh0MBgkrAAAkIEszKunp6UpOTo6YPenp6YmYZbmUW265RR9//PG4f3c6nUpLSws7AABA4rEUVFJTU+X1euXz+cLafT6fSktLJ/w+bW1t8ng8Vj4aAAAkIMtLP5s3b1ZFRYUKCwtVUlKinTt3qrOzU5WVlZLOL9ucOXNGe/bskSTt2LFD1157rZYtW6ahoSHt3btXBw4c0IEDByb3mwAAgLhjOaiUl5err69P27Ztk9/vV35+vpqampSdnS1J8vv9YXuqDA0N6cknn9SZM2c0e/ZsLVu2TP/yL/+iu+66a/K+BQAAiEsOY4yZ6SK+TjAYlNvtViAQ4HoVAABixGT8fvOsHwAAYFsEFQAAYFsEFQAAYFsEFQAAYFsEFQAAYFsEFQAAYFsEFQAAYFsEFQAAYFsEFQAAYFsEFQAAYFsEFQAAYFsEFQAAYFsEFQAAYFsEFQAAYFsEFQAAYFsEFQAAYFsEFQAAYFsEFQAAYFsEFQAAYFsEFQAAYFsEFQAAYFsEFQAAYFsEFQAAYFsEFQAAYFsEFQAAYFsEFQAAYFsEFQAAYFsEFQAAYFsEFQAAYFsEFQAAYFtRBZX6+nrl5OTI5XLJ6/Wqubl5Que9//77SklJ0U033RTNxwIAgARjOag0NjaqqqpKW7duVVtbm1auXKnVq1ers7PzkucFAgGtW7dOt99+e9TFAgCAxOIwxhgrJxQXF6ugoEANDQ2httzcXK1du1Z1dXXjnnfvvfdqyZIlSk5O1ttvv60TJ05M+DODwaDcbrcCgYDS0tKslAsAAGbIZPx+W5pRGRoaUmtrq8rKysLay8rK1NLSMu55r7zyij755BPV1NRM6HMGBwcVDAbDDgAAkHgsBZXe3l4NDw8rIyMjrD0jI0Pd3d1jnvPxxx9ry5Yt2rdvn1JSUib0OXV1dXK73aEjKyvLSpkAACBORHUxrcPhCHttjIlok6Th4WHdd999qq2t1fXXXz/h96+urlYgEAgdXV1d0ZQJAABi3MSmOP5Penq6kpOTI2ZPenp6ImZZJKm/v1/Hjh1TW1ubHn30UUnSyMiIjDFKSUnRu+++q9tuuy3iPKfTKafTaaU0AAAQhyzNqKSmpsrr9crn84W1+3w+lZaWRvRPS0vTL37xC504cSJ0VFZWaunSpTpx4oSKi4svr3oAABDXLM2oSNLmzZtVUVGhwsJClZSUaOfOners7FRlZaWk88s2Z86c0Z49e5SUlKT8/Pyw8xcsWCCXyxXRDgAAcDHLQaW8vFx9fX3atm2b/H6/8vPz1dTUpOzsbEmS3+//2j1VAAAAJsLyPiozgX1UAACIPdO+jwoAAMB0IqgAAADbIqgAAADbIqgAAADbIqgAAADbIqgAAADbIqgAAADbIqgAAADbIqgAAADbIqgAAADbIqgAAADbIqgAAADbIqgAAADbIqgAAADbIqgAAADbIqgAAADbIqgAAADbIqgAAADbIqgAAADbIqgAAADbIqgAAADbIqgAAADbIqgAAADbIqgAAADbIqgAAADbIqgAAADbIqgAAADbIqgAAADbIqgAAADbIqgAAADbiiqo1NfXKycnRy6XS16vV83NzeP2PXLkiFasWKH58+dr9uzZuuGGG/T3f//3URcMAAASR4rVExobG1VVVaX6+nqtWLFCL730klavXq329nZdc801Ef3nzJmjRx99VDfeeKPmzJmjI0eO6OGHH9acOXP0F3/xF5PyJQAAQHxyGGOMlROKi4tVUFCghoaGUFtubq7Wrl2rurq6Cb3Ht7/9bc2ZM0f/+I//OKH+wWBQbrdbgUBAaWlpVsoFAAAzZDJ+vy0t/QwNDam1tVVlZWVh7WVlZWppaZnQe7S1tamlpUW33nrruH0GBwcVDAbDDgAAkHgsBZXe3l4NDw8rIyMjrD0jI0Pd3d2XPHfRokVyOp0qLCzUI488og0bNozbt66uTm63O3RkZWVZKRMAAMSJqC6mdTgcYa+NMRFtF2tubtaxY8f04osvaseOHdq/f/+4faurqxUIBEJHV1dXNGUCAIAYZ+li2vT0dCUnJ0fMnvT09ETMslwsJydHkvQ7v/M7+vzzz/XMM8/oT/7kT8bs63Q65XQ6rZQGAADikKUZldTUVHm9Xvl8vrB2n8+n0tLSCb+PMUaDg4NWPhoAACQgy7cnb968WRUVFSosLFRJSYl27typzs5OVVZWSjq/bHPmzBnt2bNHkvTCCy/ommuu0Q033CDp/L4qP/jBD/TYY49N4tcAAADxyHJQKS8vV19fn7Zt2ya/36/8/Hw1NTUpOztbkuT3+9XZ2RnqPzIyourqanV0dCglJUWLFy/W3/7t3+rhhx+evG8BAADikuV9VGYC+6gAABB7pn0fFQAAgOlEUAEAALZFUAEAALZFUAEAALZFUAEAALZFUAEAALZFUAEAALZFUAEAALZFUAEAALZFUAEAALZFUAEAALZFUAEAALZFUAEAALZFUAEAALZFUAEAALZFUAEAALZFUAEAALZFUAEAALZFUAEAALZFUAEAALZFUAEAALZFUAEAALaVMtMFwN6GR4yOdpxVT/+AFsx1qShnnpKTHDNdFgAgQRBUMK5DJ/2qPdguf2Ag1OZxu1SzJk+r8j0zWBkAIFGw9IMxHTrp18a9x8NCiiR1Bwa0ce9xHTrpn6HKAACJhKCCCMMjRrUH22XG+NtoW+3Bdg2PjNUDAIDJQ1BBhKMdZyNmUi5kJPkDAzracXb6igIAJCSCCiL09I8fUqLpBwBAtAgqiLBgrmtS+wEAEC2CCiIU5cyTx+3SeDchO3T+7p+inHnTWRYAIAFFFVTq6+uVk5Mjl8slr9er5ubmcfu++eabuvPOO3XVVVcpLS1NJSUl+ulPfxp1wZh6yUkO1azJk6SIsDL6umZNHvupAACmnOWg0tjYqKqqKm3dulVtbW1auXKlVq9erc7OzjH7v/fee7rzzjvV1NSk1tZW/cEf/IHWrFmjtra2yy4eU2dVvkcN9xco0x2+vJPpdqnh/gL2UQEATAuHMcbSPabFxcUqKChQQ0NDqC03N1dr165VXV3dhN5j2bJlKi8v19NPPz2h/sFgUG63W4FAQGlpaVbKxWViZ1oAQLQm4/fb0s60Q0NDam1t1ZYtW8Lay8rK1NLSMqH3GBkZUX9/v+bNG//6hsHBQQ0ODoZeB4NBK2ViEiUnOVSyeP5MlwEASFCWln56e3s1PDysjIyMsPaMjAx1d3dP6D1++MMf6te//rXuueeecfvU1dXJ7XaHjqysLCtlAgCAOBHVxbQOR/jUvzEmom0s+/fv1zPPPKPGxkYtWLBg3H7V1dUKBAKho6urK5oyAQBAjLO09JOenq7k5OSI2ZOenp6IWZaLNTY2av369Xr99dd1xx13XLKv0+mU0+m0UhoAAIhDlmZUUlNT5fV65fP5wtp9Pp9KS0vHPW///v168MEH9dprr+nuu++OrlIAAJBwLM2oSNLmzZtVUVGhwsJClZSUaOfOners7FRlZaWk88s2Z86c0Z49eySdDynr1q3Tc889p1tuuSU0GzN79my53e5J/CoAACDeWA4q5eXl6uvr07Zt2+T3+5Wfn6+mpiZlZ2dLkvx+f9ieKi+99JLOnTunRx55RI888kio/YEHHtCrr756+d8AAADELcv7qMwE9lEBACD2TMbvN8/6AQAAtkVQAQAAtkVQAQAAtkVQAQAAtkVQAQAAtkVQAQAAtkVQAQAAtkVQAQAAtkVQAQAAtkVQAQAAtkVQAQAAtkVQAQAAtkVQAQAAtkVQAQAAtkVQAQAAtkVQAQAAtkVQAQAAtkVQAQAAtkVQAQAAtkVQAQAAtpUy0wUAAGAnwyNGRzvOqqd/QAvmulSUM0/JSY6ZLithEVQAAPg/h076VXuwXf7AQKjN43apZk2eVuV7ZrCyxMXSDwAAOh9SNu49HhZSJKk7MKCNe4/r0En/DFWW2AgqAICENzxiVHuwXWaMv4221R5s1/DIWD0wlQgqAICEd7TjbMRMyoWMJH9gQEc7zk5fUZBEUAEAQD3944eUaPph8hBUAAAJb8Fc16T2w+QhqAAAEl5Rzjx53C6NdxOyQ+fv/inKmTedZUEEFQAAlJzkUM2aPEmKCCujr2vW5LGfygwgqAAAIGlVvkcN9xco0x2+vJPpdqnh/gL2UZkhUQWV+vp65eTkyOVyyev1qrm5edy+fr9f9913n5YuXaqkpCRVVVVFWysAAFNqVb5HR757m/b/+S167t6btP/Pb9GR795GSJlBloNKY2OjqqqqtHXrVrW1tWnlypVavXq1Ojs7x+w/ODioq666Slu3btXy5csvu2AAAKZScpJDJYvn61s3Xa2SxfNZ7plhDmOMpd1riouLVVBQoIaGhlBbbm6u1q5dq7q6ukue+/u///u66aabtGPHDktFBoNBud1uBQIBpaWlWToXAADMjMn4/bY0ozI0NKTW1laVlZWFtZeVlamlpSWqAsYyODioYDAYdgAAgMRjKaj09vZqeHhYGRkZYe0ZGRnq7u6etKLq6urkdrtDR1ZW1qS9NwAAiB1RXUzrcISv1xljItouR3V1tQKBQOjo6uqatPcGAACxI8VK5/T0dCUnJ0fMnvT09ETMslwOp9Mpp9M5ae8HAABik6UZldTUVHm9Xvl8vrB2n8+n0tLSSS0MAADA0oyKJG3evFkVFRUqLCxUSUmJdu7cqc7OTlVWVko6v2xz5swZ7dmzJ3TOiRMnJElffPGF/vd//1cnTpxQamqq8vLyJudbAACAuGQ5qJSXl6uvr0/btm2T3+9Xfn6+mpqalJ2dLen8Bm8X76ly8803h/67tbVVr732mrKzs/Xpp59eXvVIWMMjRkc7zqqnf0AL5p5//gZ7HQBA/LG8j8pMYB8VXOjQSb9qD7bLH/j/j1v3uF2qWZPH7pEAYCPTvo8KMNMOnfRr497jYSFFkroDA9q497gOnfTPUGUAgKlAUEHMGB4xqj3YrrGmAEfbag+2a3jE9pOEAIAJIqggZhztOBsxk3IhI8kfGNDRjrPTVxQAYEoRVBAzevrHDynR9AMA2B9BBTFjwVzXpPYDANgfQQUxoyhnnjxul8a7Cdmh83f/FOXMm86yAABTiKCCmJGc5FDNmvObBF4cVkZf16zJYz8VAIgjBBXElFX5HjXcX6BMd/jyTqbbpYb7C9hHBQDijOWdaYGZtirfozvzMtmZFgASAEEFMSk5yaGSxfNnugwAwBRj6QcAANgWMyoI4UF/AAC7IahAEg/6AwDYE0s/4EF/AADbIqiMY3jE6INP+vTPJ87og0/64vZBdzzoDwBgZyz9jCGRlkGsPOiPu2wAANONGZWLJNoyCA/6A+JfoswQIz4xo3KBr1sGcej8MsideZlxczcMD/oD4lsizRAjPjGjcgEryyDxggf9AfEr0WaIEZ8IKhdIxGUQHvQHxCculEe8IKhcIFGXQXjQHxB/EnGGGPGJa1QuMLoM0h0YGPNfIQ6d//GOx2UQHvQHxJdEnCFGfCKoXGB0GWTj3uNySGFhJRGWQXjQHxA/EnWGGPGHpZ+LsAwCIB5woTziBTMqY2AZBECsS/QZYsQPhzHG9pd8B4NBud1uBQIBpaWlzXQ5ABAz2EcFM2kyfr+ZUQGAOMYMMWIdQQUA4hwXyiOWcTEtAACwLYIKAACwraiCSn19vXJycuRyueT1etXc3HzJ/ocPH5bX65XL5dJ1112nF198MapiAQBAYrEcVBobG1VVVaWtW7eqra1NK1eu1OrVq9XZ2Tlm/46ODt11111auXKl2tra9Nd//dfatGmTDhw4cNnFAwCA+Gb59uTi4mIVFBSooaEh1Jabm6u1a9eqrq4uov93v/tdvfPOO/rwww9DbZWVlfqP//gPffDBBxP6TG5PBgAg9kzG77elGZWhoSG1traqrKwsrL2srEwtLS1jnvPBBx9E9P/GN76hY8eO6Te/+Y3FcgEAQCKxdHtyb2+vhoeHlZGREdaekZGh7u7uMc/p7u4es/+5c+fU29srjydyw6HBwUENDg6GXgeDQStlAgCAOBHVxbQOR/hGQcaYiLav6z9W+6i6ujq53e7QkZWVFU2ZAAAgxlkKKunp6UpOTo6YPenp6YmYNRmVmZk5Zv+UlBTNnz/2BkTV1dUKBAKho6ury0qZAAAgTlha+klNTZXX65XP59Mf/uEfhtp9Pp++9a1vjXlOSUmJDh48GNb27rvvqrCwULNmzRrzHKfTKafTGXo9OgPDEhAAALFj9Hf7sh4raCz6p3/6JzNr1iyza9cu097ebqqqqsycOXPMp59+aowxZsuWLaaioiLU/9SpU+aKK64wTzzxhGlvbze7du0ys2bNMm+88caEP7Orq8vo/MM/OTg4ODg4OGLs6Orqsho3Qiw/66e8vFx9fX3atm2b/H6/8vPz1dTUpOzsbEmS3+8P21MlJydHTU1NeuKJJ/TCCy9o4cKFev755/VHf/RHE/7MhQsXqqurS3Pnzr3ktTCxKBgMKisrS11dXdx6HSMYs9jDmMUexiz2jDVmxhj19/dr4cKFUb+v5X1UMLnYIyb2MGaxhzGLPYxZ7JmqMeNZPwAAwLYIKgAAwLYIKjPM6XSqpqYm7C4n2BtjFnsYs9jDmMWeqRozrlEBAAC2xYwKAACwLYIKAACwLYIKAACwLYIKAACwLYLKNKivr1dOTo5cLpe8Xq+am5vH7fvmm2/qzjvv1FVXXaW0tDSVlJTopz/96TRWC8namF3o/fffV0pKim666aapLRARrI7Z4OCgtm7dquzsbDmdTi1evFi7d++epmohWR+zffv2afny5briiivk8Xj00EMPqa+vb5qqxXvvvac1a9Zo4cKFcjgcevvtt7/2nMOHD8vr9crlcum6667Tiy++aP2Do958HxMy+mykH//4x6a9vd08/vjjZs6cOeZ//ud/xuz/+OOPm+9973vm6NGj5qOPPjLV1dVm1qxZ5vjx49NceeKyOmajfvWrX5nrrrvOlJWVmeXLl09PsTDGRDdm3/zmN01xcbHx+Xymo6PD/Nu//Zt5//33p7HqxGZ1zJqbm01SUpJ57rnnzKlTp0xzc7NZtmyZWbt27TRXnriamprM1q1bzYEDB4wk89Zbb12y/+iz/h5//HHT3t5ufvzjH1t+1p8xxhBUplhRUZGprKwMa7vhhhvMli1bJvweeXl5pra2drJLwziiHbPy8nLzN3/zN6ampoagMs2sjtlPfvIT43a7TV9f33SUhzFYHbO/+7u/M9ddd11Y2/PPP28WLVo0ZTVifBMJKn/1V39lbrjhhrC2hx9+2Nxyyy2WPoulnyk0NDSk1tZWlZWVhbWXlZWppaVlQu8xMjKi/v5+zZs3bypKxEWiHbNXXnlFn3zyiWpqaqa6RFwkmjF75513VFhYqO9///u6+uqrdf311+vJJ5/UV199NR0lJ7xoxqy0tFSnT59WU1OTjDH6/PPP9cYbb+juu++ejpIRhQ8++CBijL/xjW/o2LFj+s1vfjPh97H89GRMXG9vr4aHh5WRkRHWnpGRoe7u7gm9xw9/+EP9+te/1j333DMVJeIi0YzZxx9/rC1btqi5uVkpKfwvNd2iGbNTp07pyJEjcrlceuutt9Tb26vvfOc7Onv2LNepTINoxqy0tFT79u1TeXm5BgYGdO7cOX3zm9/Uj370o+koGVHo7u4ec4zPnTun3t5eeTyeCb0PMyrTwOFwhL02xkS0jWX//v165pln1NjYqAULFkxVeRjDRMdseHhY9913n2pra3X99ddPV3kYg5X/z0ZGRuRwOLRv3z4VFRXprrvu0vbt2/Xqq68yqzKNrIxZe3u7Nm3apKefflqtra06dOiQOjo6VFlZOR2lIkpjjfFY7ZfCP/+mUHp6upKTkyP+hdDT0xORMi/W2Nio9evX6/XXX9cdd9wxlWXiAlbHrL+/X8eOHVNbW5seffRRSed/BI0xSklJ0bvvvqvbbrttWmpPVNH8f+bxeHT11VfL7XaH2nJzc2WM0enTp7VkyZIprTnRRTNmdXV1WrFihZ566ilJ0o033qg5c+Zo5cqVevbZZyf8r3NMn8zMzDHHOCUlRfPnz5/w+zCjMoVSU1Pl9Xrl8/nC2n0+n0pLS8c9b//+/XrwwQf12muvsf46zayOWVpamn7xi1/oxIkToaOyslJLly7ViRMnVFxcPF2lJ6xo/j9bsWKFPvvsM33xxRehto8++khJSUlatGjRlNaL6Mbsyy+/VFJS+E9WcnKypP//r3TYS0lJScQYv/vuuyosLNSsWbMm/kaWLr2FZaO34O3atcu0t7ebqqoqM2fOHPPpp58aY4zZsmWLqaioCPV/7bXXTEpKinnhhReM3+8PHb/61a9m6iskHKtjdjHu+pl+Vsesv7/fLFq0yPzxH/+x+eUvf2kOHz5slixZYjZs2DBTXyHhWB2zV155xaSkpJj6+nrzySefmCNHjpjCwkJTVFQ0U18h4fT395u2tjbT1tZmJJnt27ebtra20C3lF4/Z6O3JTzzxhGlvbze7du3i9mS7euGFF0x2drZJTU01BQUF5vDhw6G/PfDAA+bWW28Nvb711luNpIjjgQcemP7CE5iVMbsYQWVmWB2zDz/80Nxxxx1m9uzZZtGiRWbz5s3myy+/nOaqE5vVMXv++edNXl6emT17tvF4POZP//RPzenTp6e56sT1s5/97JK/T2ON2c9//nNz8803m9TUVHPttdeahoYGy5/rMIY5MwAAYE9cowIAAGyLoAIAAGyLoAIAAGyLoAIAAGyLoAIAAGyLoAIAAGyLoAIAAGyLoAIAAGyLoAIAAGyLoAIAAGyLoAIAAGyLoAIAAGzr/wFy8VSvF76pGgAAAABJRU5ErkJggg==", "text/plain": [ "
" ] @@ -1581,7 +1581,7 @@ "\n" ], "text/plain": [ - "" + "" ] }, "execution_count": 41, @@ -1618,7 +1618,7 @@ { "data": { "application/vnd.jupyter.widget-view+json": { - "model_id": "378652557fef4e3e945db1f49236d114", + "model_id": "e8ae157d636644b183f97332649d5076", "version_major": 2, "version_minor": 0 }, @@ -1637,7 +1637,7 @@ { "data": { "application/vnd.jupyter.widget-view+json": { - "model_id": "09d110c4b0224a4586f39121ab594118", + "model_id": "e564a4c3ac734deab79b0f1fb25e5893", "version_major": 2, "version_minor": 0 }, @@ -1651,7 +1651,7 @@ { "data": { "text/plain": [ - "" + "" ] }, "execution_count": 42, @@ -1911,7 +1911,7 @@ "\n" ], "text/plain": [ - "" + "" ] }, "execution_count": 43, @@ -3100,7 +3100,7 @@ "\n" ], "text/plain": [ - "" + "" ] }, "execution_count": 49, @@ -3128,7 +3128,7 @@ { "data": { "application/vnd.jupyter.widget-view+json": { - "model_id": "a11221f6a85c4ebd8e05052f719e0236", + "model_id": "2e9cbb2070c345a1add65ee3cc6ab714", "version_major": 2, "version_minor": 0 }, @@ -3149,7 +3149,7 @@ { "data": { "application/vnd.jupyter.widget-view+json": { - "model_id": "82e56419b1594fecbf9c19a2745709c6", + "model_id": "89cb51049ead4bd8a6b3acccdca821ab", "version_major": 2, "version_minor": 0 }, @@ -3197,7 +3197,7 @@ { "data": { "application/vnd.jupyter.widget-view+json": { - "model_id": "1000698e2eb147f28c59cd4e2382348c", + "model_id": "485138d8b7d5459fbd7da596eb120f91", "version_major": 2, "version_minor": 0 }, @@ -3218,7 +3218,7 @@ { "data": { "application/vnd.jupyter.widget-view+json": { - "model_id": "c6f6b36355da488d801dfeb7b25b8260", + "model_id": "e0c24f219db0451091dabaa3b7d3bd2a", "version_major": 2, "version_minor": 0 }, @@ -3316,7 +3316,7 @@ { "data": { "application/vnd.jupyter.widget-view+json": { - "model_id": "b160a5c60936418988d5cc94c3e63640", + "model_id": "c2945dbf0d7c497494b4680c1d6d955b", "version_major": 2, "version_minor": 0 }, @@ -3337,7 +3337,7 @@ { "data": { "application/vnd.jupyter.widget-view+json": { - "model_id": "e72528ca94c84f0f815a3a428bd1395b", + "model_id": "1711894637154100a71d43404d42a464", "version_major": 2, "version_minor": 0 }, @@ -3386,7 +3386,7 @@ { "data": { "application/vnd.jupyter.widget-view+json": { - "model_id": "87f030b7c77f495ca364f1123d2cd1c7", + "model_id": "843ec24931b9442b99bf6755c70203c4", "version_major": 2, "version_minor": 0 }, @@ -3407,7 +3407,7 @@ { "data": { "application/vnd.jupyter.widget-view+json": { - "model_id": "9f8f038b79a4437aa12965f2b9364e3f", + "model_id": "6d3d0e83271f4ec78445d2a8c0a545ab", "version_major": 2, "version_minor": 0 }, @@ -3473,7 +3473,7 @@ "output_type": "stream", "text": [ "None 1\n", - " NOT_DATA\n" + " NOT_DATA\n" ] } ], @@ -3555,7 +3555,7 @@ "output_type": "stream", "text": [ "None 1\n", - " NOT_DATA\n", + " NOT_DATA\n", "Finally 5\n", "b (Add) output single-value: 6\n" ] @@ -3617,7 +3617,7 @@ "name": "stdout", "output_type": "stream", "text": [ - "6.011244249995798\n" + "6.02082441499806\n" ] } ], @@ -3649,7 +3649,7 @@ "name": "stdout", "output_type": "stream", "text": [ - "2.496095469017746\n" + "2.4505110399986734\n" ] } ], @@ -3715,7 +3715,7 @@ "source": [ "## Saving and loading\n", "\n", - "Graphs can be saved and loaded on request -- either by manually invoking the `.save()` method, or by setting the `save_after_run` attribute to `True` (on the object or at instantiation by kwarg). This creates a save file (currently using HDF5 as a format) in the parent-most node's working directory.\n", + "For python >= 3.11, graphs can be saved and loaded on request -- either by manually invoking the `.save()` method, or by setting the `save_after_run` attribute to `True` (on the object or at instantiation by kwarg). This creates a save file (currently using HDF5 as a format) in the parent-most node's working directory.\n", "\n", "Subsequently instantiating a node with the same name in the same place will attempt to reload the saved graph automatically. \n", "\n", @@ -3741,33 +3741,33 @@ { "cell_type": "code", "execution_count": 62, - "id": "ffd741a3-b086-4ed0-9a62-76143a3705b2", + "id": "c8196054-aff3-4d39-a872-b428d329dac9", "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "{'out__user_input': 42}" - ] - }, - "execution_count": 62, - "metadata": {}, - "output_type": "execute_result" - } - ], + "outputs": [], "source": [ - "wf = Workflow(\"save_demo\")\n", - "wf.inp = wf.create.standard.UserInput(42)\n", - "wf.middle = 2 * wf.inp\n", - "wf.end = wf.middle - 42\n", - "wf.out = wf.create.standard.UserInput(wf.end, save_after_run=True)\n", - "wf()\n", - "# wf.save() # Not needed, since `wf.out` saves after running" + "import sys" ] }, { "cell_type": "code", "execution_count": 63, + "id": "ffd741a3-b086-4ed0-9a62-76143a3705b2", + "metadata": {}, + "outputs": [], + "source": [ + "if sys.version_info >= (3, 11):\n", + " wf = Workflow(\"save_demo\")\n", + " wf.inp = wf.create.standard.UserInput(42)\n", + " wf.middle = 2 * wf.inp\n", + " wf.end = wf.middle - 42\n", + " wf.out = wf.create.standard.UserInput(wf.end, save_after_run=True)\n", + " wf()\n", + " # wf.save() # Not needed, since `wf.out` saves after running" + ] + }, + { + "cell_type": "code", + "execution_count": 64, "id": "3a22c622-f8c1-449b-a910-c52beb6a09c3", "metadata": {}, "outputs": [ @@ -3775,24 +3775,15 @@ "name": "stderr", "output_type": "stream", "text": [ - "/Users/huber/work/pyiron/pyiron_workflow/pyiron_workflow/node.py:370: UserWarning: A saved file was found for the node save_demo -- attempting to load it...(To delete the saved file instead, use `overwrite_save=True`)\n", + "/Users/huber/work/pyiron/pyiron_workflow/pyiron_workflow/node.py:372: UserWarning: A saved file was found for the node save_demo -- attempting to load it...(To delete the saved file instead, use `overwrite_save=True`)\n", " warnings.warn(\n" ] - }, - { - "data": { - "text/plain": [ - "True" - ] - }, - "execution_count": 63, - "metadata": {}, - "output_type": "execute_result" } ], "source": [ - "reloaded = Workflow(\"save_demo\")\n", - "reloaded.out.value == wf.out.value" + "if sys.version_info >= (3, 11):\n", + " reloaded = Workflow(\"save_demo\")\n", + " reloaded.out.value == wf.out.value" ] }, { @@ -3807,12 +3798,13 @@ }, { "cell_type": "code", - "execution_count": 64, + "execution_count": 65, "id": "0999d3e8-3a5a-451d-8667-a01dae7c1193", "metadata": {}, "outputs": [], "source": [ - "reloaded.storage.delete()" + "if sys.version_info >= (3, 11):\n", + " reloaded.storage.delete()" ] }, { @@ -3838,7 +3830,7 @@ }, { "cell_type": "code", - "execution_count": 65, + "execution_count": 66, "id": "0b373764-b389-4c24-8086-f3d33a4f7fd7", "metadata": {}, "outputs": [ @@ -3852,7 +3844,7 @@ " 17.230249999999995]" ] }, - "execution_count": 65, + "execution_count": 66, "metadata": {}, "output_type": "execute_result" } @@ -3889,7 +3881,7 @@ }, { "cell_type": "code", - "execution_count": 66, + "execution_count": 67, "id": "0dd04b4c-e3e7-4072-ad34-58f2c1e4f596", "metadata": {}, "outputs": [ @@ -3948,7 +3940,7 @@ }, { "cell_type": "code", - "execution_count": 67, + "execution_count": 68, "id": "2dfb967b-41ac-4463-b606-3e315e617f2a", "metadata": {}, "outputs": [ @@ -3972,7 +3964,7 @@ }, { "cell_type": "code", - "execution_count": 68, + "execution_count": 69, "id": "2e87f858-b327-4f6b-9237-c8a557f29aeb", "metadata": {}, "outputs": [ @@ -3980,12 +3972,9 @@ "name": "stdout", "output_type": "stream", "text": [ - "0.325 > 0.2\n", - "0.481 > 0.2\n", - "0.635 > 0.2\n", - "0.286 > 0.2\n", - "0.153 <= 0.2\n", - "Finally 0.153\n" + "0.834 > 0.2\n", + "0.186 <= 0.2\n", + "Finally 0.186\n" ] } ], From 3f0f8d938201262c9ed214b9d76a422ab8b8952e Mon Sep 17 00:00:00 2001 From: pyiron-runner Date: Sat, 17 Feb 2024 05:38:00 +0000 Subject: [PATCH 166/166] Format black --- pyiron_workflow/job.py | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/pyiron_workflow/job.py b/pyiron_workflow/job.py index b0cce8fa..af33db62 100644 --- a/pyiron_workflow/job.py +++ b/pyiron_workflow/job.py @@ -140,9 +140,7 @@ def _save_node(self): def _load_node(self): here = os.getcwd() os.chdir(self.working_directory) - self._node = _import_class( - self.input._class_type - )( + self._node = _import_class(self.input._class_type)( label=self.input._label, storage_backend=self.input._storage_backend, )