From c4a3126c229fd31be67a0fafa9c104edb27e38db Mon Sep 17 00:00:00 2001 From: "Vignesh.Vaidyanathan" <62492557+VigneshVSV@users.noreply.github.com> Date: Sun, 10 Dec 2023 12:56:48 +0100 Subject: [PATCH 001/167] Sync commit for testing object proxy --- doc/source/examples/index.rst | 3 + doc/source/index.rst | 2 + hololinked/server/constants.py | 1 + hololinked/server/proxy_client.py | 138 +++++++++++------------ hololinked/server/remote_object.py | 1 + hololinked/server/utils.py | 12 ++ hololinked/server/zmq_message_brokers.py | 14 ++- 7 files changed, 95 insertions(+), 76 deletions(-) diff --git a/doc/source/examples/index.rst b/doc/source/examples/index.rst index fc990bf..dc57c5f 100644 --- a/doc/source/examples/index.rst +++ b/doc/source/examples/index.rst @@ -13,5 +13,8 @@ The code is hosted at the repository `hololinked-examples `_ to have an web-interface to interact with RemoteObjects (after you can run your example object) * hoppscotch or postman +GUI +--- + Some browser based client examples based on ReactJS & `react material UI `_ are hosted at `hololinked.dev `_ diff --git a/doc/source/index.rst b/doc/source/index.rst index 08ae350..b2484ab 100644 --- a/doc/source/index.rst +++ b/doc/source/index.rst @@ -55,3 +55,5 @@ Indices and tables .. note:: This project is under development and is an idealogical state. Please use it only for playtesting or exploring. + +Documentation last build : |today| \ No newline at end of file diff --git a/hololinked/server/constants.py b/hololinked/server/constants.py index d3f0ed1..90f7b23 100644 --- a/hololinked/server/constants.py +++ b/hololinked/server/constants.py @@ -14,6 +14,7 @@ # types FUNC = "FUNC" ATTRIBUTE = "ATTRIBUTE" +PARAMETER = "PARAMETER" IMAGE_STREAM = "IMAGE_STREAM" CALLABLE = "CALLABLE" FILE = "FILE" diff --git a/hololinked/server/proxy_client.py b/hololinked/server/proxy_client.py index d18560c..0f36d75 100644 --- a/hololinked/server/proxy_client.py +++ b/hololinked/server/proxy_client.py @@ -1,48 +1,36 @@ -from typing import Any, Callable, Any, Tuple, Type, Callable -from threading import get_ident +import threading import asyncio import typing +import logging from .zmq_message_brokers import SyncZMQClient -from .utils import current_datetime_ms_str -from .constants import SERIALIZABLE_WRAPPER_ASSIGNMENTS, FUNC +from .utils import current_datetime_ms_str, raise_local_exception +from .constants import PARAMETER, SERIALIZABLE_WRAPPER_ASSIGNMENTS, FUNC -def addMethod(cls : object, method : Callable, func_info : Tuple[Any] ): - for index, dunder in enumerate(SERIALIZABLE_WRAPPER_ASSIGNMENTS, 2): - if dunder == '__qualname__': - func_infor = '{}.{}'.format(cls.__class__.__name__, func_info[index].split('.')[1]) - else: - func_infor = func_info[index] - setattr(method, dunder, func_infor) - cls.__setattr__(method.__name__, method) - - -class ProxyClient: +class ObjectProxy: __own_attrs__ = frozenset([ '_client', '_server_methods', '_server_attrs', '_server_oneway', '_max_retries', - '_timeout', '_owner_thread', '_get_meta_data', 'instance_name', '__annotations__' + '_timeout', '_owner_thread', '_load_remote_object', 'instance_name', '__annotations__' ]) - def __init__(self, instance_name : str, **kwargs) -> None: + def __init__(self, instance_name : str, timeout : float = 3, max_retries = 1, **kwargs) -> None: self.instance_name = instance_name + self.logger = logging.Logger() # compose ZMQ client in Proxy client to give the idea that all sending and receiving is actually # done by the ZMQ client and not by the Proxy client directly self._client = SyncZMQClient(instance_name, instance_name+current_datetime_ms_str(), **kwargs) + self._max_retries = kwargs.get("max_retires", 1) + self._timeout = timeout + self._owner_thread = threading.get_ident() # the thread that owns this proxy + self._load_remote_object() - self._server_methods = set() # all methods of the remote object, gotten from meta-data - self._server_attrs = set() # attributes of the remote object, gotten from meta-data - self._server_oneway = set() # oneway-methods of the remote object, gotten from meta-data # self._pyroSeq = 0 # message sequence number # self._pyroRawWireResponse = False # internal switch to enable wire level responses # self._pyroHandshake = "hello" # the data object that should be sent in the initial connection handshake message - self._max_retries = kwargs.get("max_retires", 3) - self._timeout = kwargs.get("timeout", 3) - self._owner_thread = get_ident() # the thread that owns this proxy - self._get_meta_data() # if config.SERIALIZER not in serializers.serializers: # raise ValueError("unknown serializer configured") # # note: we're not clearing the client annotations dict here. @@ -55,30 +43,10 @@ def __init__(self, instance_name : str, **kwargs) -> None: def __del__(self): self.exit() - def __getattribute__(self, __name: str) -> Any: - if __name in ProxyClient.__own_attrs__: # self.__own_attrs__ will likely cause an infinite recursion - return super(ProxyClient, self).__getattribute__(__name) - if __name in self._server_attrs: - return super(ProxyClient, self).__getattribute__(__name)() - # elif __name in self._server_methods: - # # client side check if the requested attr actually exists - # return - return super().__getattribute__(__name) - - def __setattr__(self, __name : str, __value : Any): - if __name in ProxyClient.__own_attrs__ or __name in self._server_methods: # self.__own_attrs__ will likely cause an infinite recursion - return super(ProxyClient, self).__setattr__(__name, __value) - elif __name in self._server_attrs: - return _RemoteAttribute(self._client, __name, self._max_retries) - raise AttributeError("Cannot set foreign attribute to ProxyClient class for {}".format(self.instance_name)) - # remote attribute - # client side validation if the requested attr actually exists - # if __name in ProxyClient.__own_attrs__: - # return super().__setattr__(__name, __value) # one of the special pyro attributes - # # get metadata if it's not there yet - # if not self._pyroMethods and not self._pyroAttrs: - # self._pyroGetMetadata() - # raise AttributeError("remote object '%s' has no exposed attribute '%s'" % (self._pyroUri, name)) + def __setattr__(self, __name : str, __value : typing.Any): + if __name in ObjectProxy.__own_attrs__ or (__name not in self.__dict__ and isinstance(__value, __allowed_attribute_types__)): + return super(ObjectProxy, self).__setattr__(__name, __value) + raise AttributeError(f"Cannot set foreign attribute {__name} to ObjectProxy for {self.instance_name}. Given attribute not found in RemoteObject.") def __repr__(self): if self._pyroConnection: @@ -357,19 +325,20 @@ def exit(self): # self._pyroGetMetadata(uri.object) # return True - def _get_meta_data(self, objectId=None, known_metadata=None): + def _load_remote_object(self): """ - Get metadata from server (methods, attrs, oneway, ...) and remember them in some attributes of the proxy. + Get metadata from server (methods, parameters...) and remember them in some attributes of the proxy. Usually this will already be known due to the default behavior of the connect handshake, where the connect response also includes the metadata. """ - func = _RemoteMethod(self._client, '/proxy', 3) - reply = func()[4][self.instance_name]["returnValue"] + fetch = _RemoteMethod(self._client, '/resources/object-proxy', 3) + reply = fetch()[4][self.instance_name]["returnValue"] + for name, data in reply.items(): if data[1] == FUNC: - self._server_methods.add(data[3]) - addMethod(self, _RemoteMethod(self._client, data[0], self._max_retries), data) - + _add_method(self, _RemoteMethod(self._client, data[0], self._max_retries), data) + if data[1] == PARAMETER: + _add_parameter(self, _RemoteParameter(self._client, data[0], self._max_retries), data) # objectId = objectId or self._pyroUri.object # log.debug("getting metadata for object %s", objectId) # if self._pyroConnection is None and not known_metadata: @@ -487,38 +456,40 @@ def __init__(self, client : SyncZMQClient, instruction : str, max_retries : int) self._max_retries = max_retries self._loop = asyncio.get_event_loop() + @property # i.e. cannot have setter def last_return_value(self): return self._last_return_value - def __call__(self, *args, **kwargs) -> Any: + def __call__(self, *args, **kwargs) -> typing.Any: for attempt in range(self._max_retries + 1): - try: - self._last_return_value = self._client.execute_function(self._instruction, kwargs) - return self._last_return_value - except Exception as E: - print(E) + self._last_return_value : typing.Dict = self._client.execute(self._instruction, kwargs) + exception = self._last_return_value.get("exception", None) + if exception: + raise_local_exception(exception, "remote method") + return self._last_return_value + +class _RemoteParameter(object): + """parameter set & get abstraction""" - -class _RemoteAttribute(object): - """method call abstraction""" - - def __init__(self, client, instruction, max_retries): + def __init__(self, client : SyncZMQClient, instruction : str, max_retries : int): self._client = client self._instruction = instruction self._max_retries = max_retries + @property # i.e. cannot have setter def last_value(self): return self._last_value - def __call__(self, *args, **kwargs) -> Any: + def set(self, *args, **kwargs) -> typing.Any: for attempt in range(self._max_retries + 1): - try: - self._last_value = self._client.execute_function(self._instruction, kwargs) - return self._last_value - except Exception as E: - print(E) + self._last_value : typing.Dict = self._client.execute(self._instruction, kwargs) + exception = self._last_value.get("exception", None) + if exception: + raise_local_exception(exception, "remote method") + return self._last_value + @@ -575,5 +546,26 @@ def close(self): self.proxy = None -__all__ = ['ProxyClient'] +__allowed_attribute_types__ = (_RemoteParameter, _RemoteMethod) + +def _add_method(cls : ObjectProxy, method : _RemoteMethod, func_info : Tuple[Any] ) -> None: + for index, dunder in enumerate(SERIALIZABLE_WRAPPER_ASSIGNMENTS, 2): + if dunder == '__qualname__': + func_infor = '{}.{}'.format(cls.__class__.__name__, func_info[index].split('.')[1]) + else: + func_infor = func_info[index] + setattr(method, dunder, func_infor) + cls.__setattr__(method.__name__, method) + +def _add_parameter(cls : ObjectProxy, parameter : _RemoteParameter, parameter_info : typing.Tuple[typing.Any]) -> None: + for index, dunder in enumerate(SERIALIZABLE_WRAPPER_ASSIGNMENTS, 2): + if dunder == '__qualname__': + func_infor = '{}.{}'.format(cls.__class__.__name__, func_info[index].split('.')[1]) + else: + func_infor = func_info[index] + setattr(method, dunder, func_infor) + cls.__setattr__(method.__name__, method) + + +__all__ = ['ObjectProxy'] diff --git a/hololinked/server/remote_object.py b/hololinked/server/remote_object.py index 6935b63..893cacc 100644 --- a/hololinked/server/remote_object.py +++ b/hololinked/server/remote_object.py @@ -10,6 +10,7 @@ import datetime from enum import EnumMeta, Enum from dataclasses import asdict, dataclass + from sqlalchemy import (Integer as DBInteger, String as DBString, JSON as DB_JSON, LargeBinary as DBBinary) from sqlalchemy import select from sqlalchemy.orm import Mapped, mapped_column, DeclarativeBase, MappedAsDataclass diff --git a/hololinked/server/utils.py b/hololinked/server/utils.py index 052cd5b..4219d4a 100644 --- a/hololinked/server/utils.py +++ b/hololinked/server/utils.py @@ -209,6 +209,18 @@ def get_signature(function : typing.Callable): +def raise_local_exception(exception : typing.Dict[str, typing.Any], caller : str): + exception = getattr(__builtins__, exception["name"], None) + message = f"{caller} raised exception, check notes for traceback." + if exception is None: + E = Exception(message) + else: + E = exception(message) + E.__notes__ = exception["traceback"] + raise E + + + __all__ = ['current_datetime_ms_str', 'wrap_text', 'copy_parameters', 'dashed_URL'] diff --git a/hololinked/server/zmq_message_brokers.py b/hololinked/server/zmq_message_brokers.py index 3ca1a8f..46b4c51 100644 --- a/hololinked/server/zmq_message_brokers.py +++ b/hololinked/server/zmq_message_brokers.py @@ -4,6 +4,7 @@ import asyncio import logging import typing +import threading from enum import Enum from typing import Union, List, Any, Dict, Sequence, Iterator, Set @@ -302,10 +303,16 @@ class AsyncPollingZMQServer(AsyncZMQServer): """ - def __init__(self, instance_name : str, server_type : Enum, context : Union[zmq.asyncio.Context, None] = None, - poll_timeout = 25, **kwargs) -> None: + def __init__(self, instance_name : str, executor_thread_event : threading.Event, + server_type : Enum, context : Union[zmq.asyncio.Context, None] = None, + poll_timeout = 25, **kwargs) -> None: super().__init__(instance_name, server_type, context, **kwargs) self.poller = zmq.asyncio.Poller() + self._inproc_socket = None # definitions to be used later + self._ipc_socket = None + self._tcp_socket = None + self._executor_thread_event = executor_thread_event + self._instructions = [] self.poller.register(self.socket, zmq.POLLIN) self.poll_timeout = poll_timeout @@ -336,7 +343,8 @@ async def poll_instructions(self) -> List[List[bytes]]: instruction[4])) instructions.append(instruction) if len(instructions) > 0: - break + self._instructions.extend(instructions) + self._executor_thread_event.set() return instructions def stop_polling(self) -> None: From 012fe150dcd243b2aad1ad298879b4a7669d6459 Mon Sep 17 00:00:00 2001 From: "Vignesh.Vaidyanathan" Date: Sun, 10 Dec 2023 20:37:13 +0100 Subject: [PATCH 002/167] improvement of object-proxy, parameter get-set, method call barely possible. contains bugs, edge cases and still does not work for all serializers. --- hololinked/server/data_classes.py | 5 +- hololinked/server/handlers.py | 2 +- hololinked/server/proxy_client.py | 575 ++++++++--------------- hololinked/server/remote_object.py | 7 +- hololinked/server/serializers.py | 2 +- hololinked/server/utils.py | 14 +- hololinked/server/zmq_message_brokers.py | 13 +- 7 files changed, 217 insertions(+), 401 deletions(-) diff --git a/hololinked/server/data_classes.py b/hololinked/server/data_classes.py index 047b0f7..c930b2e 100644 --- a/hololinked/server/data_classes.py +++ b/hololinked/server/data_classes.py @@ -195,7 +195,8 @@ class ProxyResourceData: Used by Proxy objects to fill attributes & methods in a proxy class. """ what : str - instruction : str + instruction : str + # below are all dunders, when something else is added, be careful to remember to edit ObjectProxy logic when necessary module : typing.Union[str, None] name : str qualname : str @@ -206,6 +207,8 @@ class ProxyResourceData: def json(self): return asdict(self) + def get_dunder_attr(self, __dunder_name : str): + return getattr(self, __dunder_name.strip('_')) @dataclass class GUIResources: diff --git a/hololinked/server/handlers.py b/hololinked/server/handlers.py index 1f45cfc..06304a5 100644 --- a/hololinked/server/handlers.py +++ b/hololinked/server/handlers.py @@ -105,7 +105,7 @@ async def handle_bound_method(self, info : HTTPServerResourceData, arguments): }) async def handle_instruction(self, info : HTTPServerResourceData, path_arguments : typing.Optional[typing.Dict] = None) -> None: - self.set_status(202) + self.set_status(200) self.add_header("Access-Control-Allow-Origin", self.client_address) self.set_header("Content-Type" , "application/json") try: diff --git a/hololinked/server/proxy_client.py b/hololinked/server/proxy_client.py index 0f36d75..b152574 100644 --- a/hololinked/server/proxy_client.py +++ b/hololinked/server/proxy_client.py @@ -2,73 +2,126 @@ import asyncio import typing import logging +from typing import Any -from .zmq_message_brokers import SyncZMQClient +from .zmq_message_brokers import SyncZMQClient, EventConsumer, PROXY from .utils import current_datetime_ms_str, raise_local_exception -from .constants import PARAMETER, SERIALIZABLE_WRAPPER_ASSIGNMENTS, FUNC +from .constants import PARAMETER, SERIALIZABLE_WRAPPER_ASSIGNMENTS, FUNC, CALLABLE, ATTRIBUTE, EVENT +from .data_classes import ProxyResourceData +SingleLevelNestedJSON = typing.Dict[str, typing.Dict[str, typing.Any]] + + class ObjectProxy: __own_attrs__ = frozenset([ - '_client', '_server_methods', '_server_attrs', '_server_oneway', '_max_retries', - '_timeout', '_owner_thread', '_load_remote_object', 'instance_name', '__annotations__' + '_client', '_client_ID', '__annotations__', + 'instance_name', 'logger', 'timeout', '_timeout', ]) - def __init__(self, instance_name : str, timeout : float = 3, max_retries = 1, **kwargs) -> None: + def __init__(self, instance_name : str, timeout : float = 5, load_remote_object = True, **kwargs) -> None: self.instance_name = instance_name - self.logger = logging.Logger() - # compose ZMQ client in Proxy client to give the idea that all sending and receiving is actually - # done by the ZMQ client and not by the Proxy client directly - self._client = SyncZMQClient(instance_name, instance_name+current_datetime_ms_str(), **kwargs) - self._max_retries = kwargs.get("max_retires", 1) - self._timeout = timeout - self._owner_thread = threading.get_ident() # the thread that owns this proxy - self._load_remote_object() - - # self._pyroSeq = 0 # message sequence number - # self._pyroRawWireResponse = False # internal switch to enable wire level responses - # self._pyroHandshake = "hello" # the data object that should be sent in the initial connection handshake message - # if config.SERIALIZER not in serializers.serializers: - # raise ValueError("unknown serializer configured") - # # note: we're not clearing the client annotations dict here. - # that is because otherwise it will be wiped if a new proxy is needed to connect PYRONAME uris. - # clearing the response annotations is okay. - # current_context.response_annotations = {} - # if connected_socket: - # self.__pyroCreateConnection(False, connected_socket) + self._client_ID = instance_name+current_datetime_ms_str() + self.logger = logging.Logger(self._client_ID) + self.timeout = timeout + # compose ZMQ client in Proxy client so that all sending and receiving is + # done by the ZMQ client and not by the Proxy client directly. Proxy client only + # bothers mainly about __setattr__ and _getattr__ + self._client = SyncZMQClient(instance_name, self._client_ID, client_type=PROXY, **kwargs) + if load_remote_object: + self.load_remote_object() def __del__(self): - self.exit() + self._client.exit() + + def __getattribute__(self, __name: str) -> Any: + obj = super().__getattribute__(__name) + if isinstance(obj, _RemoteParameter): + return obj.get() + return obj def __setattr__(self, __name : str, __value : typing.Any): if __name in ObjectProxy.__own_attrs__ or (__name not in self.__dict__ and isinstance(__value, __allowed_attribute_types__)): + print(f"setting {__name}") return super(ObjectProxy, self).__setattr__(__name, __value) + elif __name in self.__dict__: + obj = self.__dict__[__name] + if isinstance(obj, _RemoteParameter): + obj.set(value=__value) + return + raise AttributeError(f"Cannot reset attribute {__name} again to ObjectProxy for {self.instance_name}.") raise AttributeError(f"Cannot set foreign attribute {__name} to ObjectProxy for {self.instance_name}. Given attribute not found in RemoteObject.") def __repr__(self): - if self._pyroConnection: - connected = "connected " + self._pyroConnection.family() - else: - connected = "not connected" - return "<%s.%s at 0x%x; %s; for %s; owner %s>" % (self.__class__.__module__, self.__class__.__name__, - id(self), connected, self._pyroUri, self.__pyroOwnerThread) + return f'ObjectProxy {self.instance_name}' - def invoke(self, method : str, oneway : bool = False, noblock : bool = False, **kwargs): - pass + def __enter__(self): + return self - async def async_invoke(self, method : str, oneway : bool = False, noblock : bool = False, **kwargs): - pass + def __exit__(self, exc_type, exc_value, traceback): + raise NotImplementedError("with statement exit is not yet implemented. Avoid.") + + def __bool__(self): return True - def set_parameter(self, parameter : str, value : typing.Any, oneway : bool, noblock : bool = False): - pass + def __eq__(self, other): + if other is self: + return True + return isinstance(other, ObjectProxy) and other.instance_name == self.instance_name + + def __ne__(self, other): + if other and isinstance(other, ObjectProxy): + return other.instance_name != self.instance_name + return True + + def __hash__(self): + return hash(self._client_ID) + + @property + def timeout(self) -> typing.Union[float, int]: + return self._timeout + + @timeout.setter + def timeout(self, value : typing.Union[float, int]): + if not isinstance(value, (float, int, type(None))): + raise TypeError(f"Timeout can only be float or int greater than 0, or None. Given type {type(value)}.") + elif value is not None and value < 0: + raise ValueError("Timeout must be at least 0 or None, not negative.") + self._timeout = value + + timeout.__doc__ = """Timeout in seconds on server side for execution of method. Defaults to 5 seconds and + network times not considered.""" + + def invoke(self, method : str, oneway : bool = False, **kwargs) -> typing.Any: + method : _RemoteMethod = getattr(self, method, None) + if not method: + raise AttributeError(f"No remote method named {method}") + if oneway: + method.oneway(**kwargs) + else: + return method(**kwargs) + + async def async_invoke(self, method : str, **kwargs): + method : _RemoteMethod = getattr(self, method, None) + if not method: + raise AttributeError(f"No remote method named {method}") + return await method.async_call(**kwargs) + + def set_parameter(self, parameter : str, value : typing.Any, oneway : bool) -> None: + parameter : _RemoteParameter = getattr(self, parameter, None) + if not parameter: + raise AttributeError(f"No remote parameter named {parameter}") + if oneway: + parameter.oneway(value) + else: + parameter.set(value) async def async_set_parameters(self, oneway : bool = False, noblock : bool = False, **parameters): pass - def subscribe_event(self, event_name : str): + def subscribe_event(self, event_name : str, callback : typing.Callable): pass def unsubscribe_event(self, event_name : str): @@ -102,24 +155,6 @@ def unsubscribe_event(self, event_name : str): # p._pyroMaxRetries = self._pyroMaxRetries # return p - def __enter__(self): - return self - - def __exit__(self, exc_type, exc_value, traceback): - self.exit() - - # def __eq__(self, other): - # if other is self: - # return True - # return isinstance(other, Proxy) and other._pyroUri == self._pyroUri - - # def __ne__(self, other): - # if other and isinstance(other, Proxy): - # return other._pyroUri != self._pyroUri - # return True - - # def __hash__(self): - # return hash(self._pyroUri) # def __dir__(self): # result = dir(self.__class__) + list(self.__dict__.keys()) @@ -129,7 +164,7 @@ def __exit__(self, exc_type, exc_value, traceback): # # obj.__getitem__(index)), the special methods are not looked up via __getattr__ # # for efficiency reasons; instead, their presence is checked directly. # # Thus we need to define them here to force (remote) lookup through __getitem__. - # def __bool__(self): return True + # def __len__(self): return self.__getattr__('__len__')() # def __getitem__(self, index): return self.__getattr__('__getitem__')(index) # def __setitem__(self, index, val): return self.__getattr__('__setitem__')(index, val) @@ -146,352 +181,125 @@ def __exit__(self, exc_type, exc_value, traceback): # except (StopIteration, IndexError): # return - def exit(self): - """release the connection to the pyro daemon""" - self.__check_owner_thread() - self._client.exit() - - # def _pyroBind(self): - # """ - # Bind this proxy to the exact object from the uri. That means that the proxy's uri - # will be updated with a direct PYRO uri, if it isn't one yet. - # If the proxy is already bound, it will not bind again. - # """ - # return self.__pyroCreateConnection(True) - - # def __pyroGetTimeout(self): - # return self.__pyroTimeout - - # def __pyroSetTimeout(self, timeout): - # self.__pyroTimeout = timeout - # if self._pyroConnection is not None: - # self._pyroConnection.timeout = timeout - - # _pyroTimeout = property(__pyroGetTimeout, __pyroSetTimeout, doc=""" - # The timeout in seconds for calls on this proxy. Defaults to ``None``. - # If the timeout expires before the remote method call returns, - # Pyro will raise a :exc:`Pyro5.errors.TimeoutError`""") - - # async def _invoke(self, instruction, flags=0, objectId=None): - # """perform the remote method call communication""" - # self.__check_owner() - # current_context.response_annotations = {} - # if self._pyroConnection is None: - # self.__pyroCreateConnection() - # serializer = serializers.serializers[self._pyroSerializer or config.SERIALIZER] - # objectId = objectId or self._pyroConnection.objectId - # annotations = current_context.annotations - # if vargs and isinstance(vargs[0], SerializedBlob): - # # special serialization of a 'blob' that stays serialized - # data, flags = self.__serializeBlobArgs(vargs, kwargs, annotations, flags, objectId, methodname, serializer) - # else: - # # normal serialization of the remote call - # data = serializer.dumpsCall(objectId, methodname, vargs, kwargs) - # if methodname in self._pyroOneway: - # flags |= protocol.FLAGS_ONEWAY - # self._pyroSeq = (self._pyroSeq + 1) & 0xffff - # msg = protocol.SendingMessage(protocol.MSG_INVOKE, flags, self._pyroSeq, serializer.serializer_id, data, annotations=annotations) - # if config.LOGWIRE: - # protocol.log_wiredata(log, "proxy wiredata sending", msg) - # try: - # self._pyroConnection.send(msg.data) - # del msg # invite GC to collect the object, don't wait for out-of-scope - # if flags & protocol.FLAGS_ONEWAY: - # return None # oneway call, no response data - # else: - # msg = protocol.recv_stub(self._pyroConnection, [protocol.MSG_RESULT]) - # if config.LOGWIRE: - # protocol.log_wiredata(log, "proxy wiredata received", msg) - # self.__pyroCheckSequence(msg.seq) - # if msg.serializer_id != serializer.serializer_id: - # error = "invalid serializer in response: %d" % msg.serializer_id - # log.error(error) - # raise errors.SerializeError(error) - # if msg.annotations: - # current_context.response_annotations = msg.annotations - # if self._pyroRawWireResponse: - # return msg - # data = serializer.loads(msg.data) - # if msg.flags & protocol.FLAGS_ITEMSTREAMRESULT: - # streamId = bytes(msg.annotations.get("STRM", b"")).decode() - # if not streamId: - # raise errors.ProtocolError("result of call is an iterator, but the server is not configured to allow streaming") - # return _StreamResultIterator(streamId, self) - # if msg.flags & protocol.FLAGS_EXCEPTION: - # raise data # if you see this in your traceback, you should probably inspect the remote traceback as well - # else: - # return data - # except (errors.CommunicationError, KeyboardInterrupt): - # # Communication error during read. To avoid corrupt transfers, we close the connection. - # # Otherwise we might receive the previous reply as a result of a new method call! - # # Special case for keyboardinterrupt: people pressing ^C to abort the client - # # may be catching the keyboardinterrupt in their code. We should probably be on the - # # safe side and release the proxy connection in this case too, because they might - # # be reusing the proxy object after catching the exception... - # self._pyroRelease() - # raise - - # def __pyroCheckSequence(self, seq): - # if seq != self._pyroSeq: - # err = "invoke: reply sequence out of sync, got %d expected %d" % (seq, self._pyroSeq) - # log.error(err) - # raise errors.ProtocolError(err) - - # def __pyroCreateConnection(self, replaceUri=False, connected_socket=None): - # """ - # Connects this proxy to the remote Pyro daemon. Does connection handshake. - # Returns true if a new connection was made, false if an existing one was already present. - # """ - # def connect_and_handshake(conn): - # try: - # if self._pyroConnection is not None: - # return False # already connected - # if config.SSL: - # sslContext = socketutil.get_ssl_context(clientcert=config.SSL_CLIENTCERT, - # clientkey=config.SSL_CLIENTKEY, - # keypassword=config.SSL_CLIENTKEYPASSWD, - # cacerts=config.SSL_CACERTS) - # else: - # sslContext = None - # sock = socketutil.create_socket(connect=connect_location, - # reuseaddr=config.SOCK_REUSE, - # timeout=self.__pyroTimeout, - # nodelay=config.SOCK_NODELAY, - # sslContext=sslContext) - # conn = socketutil.SocketConnection(sock, uri.object) - # # Do handshake. - # serializer = serializers.serializers[self._pyroSerializer or config.SERIALIZER] - # data = {"handshake": self._pyroHandshake, "object": uri.object} - # data = serializer.dumps(data) - # msg = protocol.SendingMessage(protocol.MSG_CONNECT, 0, self._pyroSeq, serializer.serializer_id, - # data, annotations=current_context.annotations) - # if config.LOGWIRE: - # protocol.log_wiredata(log, "proxy connect sending", msg) - # conn.send(msg.data) - # msg = protocol.recv_stub(conn, [protocol.MSG_CONNECTOK, protocol.MSG_CONNECTFAIL]) - # if config.LOGWIRE: - # protocol.log_wiredata(log, "proxy connect response received", msg) - # except Exception as x: - # if conn: - # conn.close() - # err = "cannot connect to %s: %s" % (connect_location, x) - # log.error(err) - # if isinstance(x, errors.CommunicationError): - # raise - # else: - # raise errors.CommunicationError(err) from x - # else: - # handshake_response = "?" - # if msg.data: - # serializer = serializers.serializers_by_id[msg.serializer_id] - # handshake_response = serializer.loads(msg.data) - # if msg.type == protocol.MSG_CONNECTFAIL: - # error = "connection to %s rejected: %s" % (connect_location, handshake_response) - # conn.close() - # log.error(error) - # raise errors.CommunicationError(error) - # elif msg.type == protocol.MSG_CONNECTOK: - # self.__processMetadata(handshake_response["meta"]) - # handshake_response = handshake_response["handshake"] - # self._pyroConnection = conn - # self._pyroLocalSocket = conn.sock.getsockname() - # if replaceUri: - # self._pyroUri = uri - # self._pyroValidateHandshake(handshake_response) - # log.debug("connected to %s - %s - %s", self._pyroUri, conn.family(), "SSL" if sslContext else "unencrypted") - # if msg.annotations: - # current_context.response_annotations = msg.annotations - # else: - # conn.close() - # err = "cannot connect to %s: invalid msg type %d received" % (connect_location, msg.type) - # log.error(err) - # raise errors.ProtocolError(err) - - # self.__check_owner() - # if self._pyroConnection is not None: - # return False # already connected - # uri = core.resolve(self._pyroUri) - # # socket connection (normal or Unix domain socket) - # conn = None - # log.debug("connecting to %s", uri) - # connect_location = uri.sockname or (uri.host, uri.port) - # if connected_socket: - # self._pyroConnection = socketutil.SocketConnection(connected_socket, uri.object, True) - # self._pyroLocalSocket = connected_socket.getsockname() - # else: - # connect_and_handshake(conn) - # # obtain metadata if this feature is enabled, and the metadata is not known yet - # if not self._pyroMethods and not self._pyroAttrs: - # self._pyroGetMetadata(uri.object) - # return True - - def _load_remote_object(self): + + def load_remote_object(self): """ Get metadata from server (methods, parameters...) and remember them in some attributes of the proxy. Usually this will already be known due to the default behavior of the connect handshake, where the connect response also includes the metadata. """ - fetch = _RemoteMethod(self._client, '/resources/object-proxy', 3) - reply = fetch()[4][self.instance_name]["returnValue"] + fetch = _RemoteMethod(self._client, f'/{self.instance_name}/resources/object-proxy/read') + reply : SingleLevelNestedJSON = fetch()[5]["returnValue"] for name, data in reply.items(): - if data[1] == FUNC: - _add_method(self, _RemoteMethod(self._client, data[0], self._max_retries), data) - if data[1] == PARAMETER: - _add_parameter(self, _RemoteParameter(self._client, data[0], self._max_retries), data) - # objectId = objectId or self._pyroUri.object - # log.debug("getting metadata for object %s", objectId) - # if self._pyroConnection is None and not known_metadata: - # try: - # self.__pyroCreateConnection() - # except errors.PyroError: - # log.error("problem getting metadata: cannot connect") - # raise - # if self._pyroMethods or self._pyroAttrs: - # return # metadata has already been retrieved as part of creating the connection - # try: - # # invoke the get_metadata method on the daemon - # result = known_metadata or self._pyroInvoke("get_metadata", [objectId], {}, objectId=core.DAEMON_NAME) - # self.__processMetadata(result) - # except errors.PyroError: - # log.exception("problem getting metadata") - # raise - - # def __processMetadata(self, metadata): - # if not metadata: - # return - # self._pyroOneway = set(metadata["oneway"]) - # self._pyroMethods = set(metadata["methods"]) - # self._pyroAttrs = set(metadata["attrs"]) - # if log.isEnabledFor(logging.DEBUG): - # log.debug("from meta: methods=%s, oneway methods=%s, attributes=%s", - # sorted(self._pyroMethods), sorted(self._pyroOneway), sorted(self._pyroAttrs)) - # if not self._pyroMethods and not self._pyroAttrs: - # raise errors.PyroError("remote object doesn't expose any methods or attributes. Did you forget setting @expose on them?") - - # def _pyroReconnect(self, tries=100000000): - # """ - # (Re)connect the proxy to the daemon containing the pyro object which the proxy is for. - # In contrast to the _pyroBind method, this one first releases the connection (if the proxy is still connected) - # and retries making a new connection until it succeeds or the given amount of tries ran out. - # """ - # self._pyroRelease() - # while tries: - # try: - # self.__pyroCreateConnection() - # return - # except errors.CommunicationError: - # tries -= 1 - # if tries: - # time.sleep(2) - # msg = "failed to reconnect" - # log.error(msg) - # raise errors.ConnectionClosedError(msg) - + data = ProxyResourceData(**data) + if data.what == CALLABLE: + _add_method(self, _RemoteMethod(self._client, data.instruction), data) + elif data.what == ATTRIBUTE: + _add_parameter(self, _RemoteParameter(self._client, data.instruction), data) + elif data.what == EVENT: + pass + # def _pyroInvokeBatch(self, calls, oneway=False): # flags = protocol.FLAGS_BATCH # if oneway: # flags |= protocol.FLAGS_ONEWAY # return self._pyroInvoke("", calls, None, flags) - # def _pyroValidateHandshake(self, response): - # """ - # Process and validate the initial connection handshake response data received from the daemon. - # Simply return without error if everything is ok. - # Raise an exception if something is wrong and the connection should not be made. - # """ - # return - - # def _pyroClaimOwnership(self): - # """ - # The current thread claims the ownership of this proxy from another thread. - # Any existing connection will remain active! - # """ - # if get_ident() != self.__pyroOwnerThread: - # # if self._pyroConnection is not None: - # # self._pyroConnection.close() - # # self._pyroConnection = None - # self.__pyroOwnerThread = get_ident() - - # def __serializeBlobArgs(self, vargs, kwargs, annotations, flags, objectId, methodname, serializer): - # """ - # Special handling of a "blob" argument that has to stay serialized until explicitly deserialized in client code. - # This makes efficient, transparent gateways or dispatchers and such possible: - # they don't have to de/reserialize the message and are independent from the serialized class definitions. - # Annotations are passed in because some blob metadata is added. They're not part of the blob itself. - # """ - # if len(vargs) > 1 or kwargs: - # raise errors.SerializeError("if SerializedBlob is used, it must be the only argument") - # blob = vargs[0] - # flags |= protocol.FLAGS_KEEPSERIALIZED - # # Pass the objectId and methodname separately in an annotation because currently, - # # they are embedded inside the serialized message data. And we're not deserializing that, - # # so we have to have another means of knowing the object and method it is meant for... - # # A better solution is perhaps to split the actual remote method arguments from the - # # control data (object + methodname) but that requires a major protocol change. - # # The code below is not as nice but it works without any protocol change and doesn't - # # require a hack either - so it's actually not bad like this. - # import marshal - # annotations["BLBI"] = marshal.dumps((blob.info, objectId, methodname)) - # if blob._contains_blob: - # # directly pass through the already serialized msg data from within the blob - # protocol_msg = blob._data - # return protocol_msg.data, flags - # else: - # # replaces SerializedBlob argument with the data to be serialized - # return serializer.dumpsCall(objectId, methodname, blob._data, kwargs), flags - - def __check_owner_thread(self): - if get_ident() != self._owner_thread: - raise RuntimeError("the calling thread is not the owner of this proxy, \ - create a new proxy in this thread or transfer ownership.") - - -class _RemoteMethod(object): + + +class _RemoteMethod: """method call abstraction""" - def __init__(self, client : SyncZMQClient, instruction : str, max_retries : int) -> None: + def __init__(self, client : SyncZMQClient, instruction : str) -> None: self._client = client self._instruction = instruction - self._max_retries = max_retries self._loop = asyncio.get_event_loop() + + def __del__(self): + self._client = None # remove ref, as of now weakref is not used. @property # i.e. cannot have setter def last_return_value(self): return self._last_return_value + + def oneway(self, *args, **kwargs) -> None: + self._client.execute(self._instruction, kwargs) def __call__(self, *args, **kwargs) -> typing.Any: - for attempt in range(self._max_retries + 1): - self._last_return_value : typing.Dict = self._client.execute(self._instruction, kwargs) - exception = self._last_return_value.get("exception", None) - if exception: - raise_local_exception(exception, "remote method") - return self._last_return_value + self._last_return_value : typing.Dict = self._client.execute(self._instruction, kwargs, + raise_client_side_exception=True) + return self._last_return_value + async def async_call(self, *args, **kwargs) -> typing.Any: + self._last_return_value : typing.Dict = self._client.execute(self._instruction, kwargs, + raise_client_side_exception=True) + return self._last_return_value -class _RemoteParameter(object): +class _RemoteParameter: """parameter set & get abstraction""" - def __init__(self, client : SyncZMQClient, instruction : str, max_retries : int): + def __init__(self, client : SyncZMQClient, instruction : str): self._client = client - self._instruction = instruction - self._max_retries = max_retries + self._read_instruction = instruction + '/read' + self._write_instruction = instruction + '/write' + + def __del__(self): + self._client = None @property # i.e. cannot have setter def last_value(self): return self._last_value - def set(self, *args, **kwargs) -> typing.Any: - for attempt in range(self._max_retries + 1): - self._last_value : typing.Dict = self._client.execute(self._instruction, kwargs) - exception = self._last_value.get("exception", None) - if exception: - raise_local_exception(exception, "remote method") - return self._last_value - - + def set(self, value : typing.Any) -> typing.Any: + self._last_value : typing.Dict = self._client.execute(self._write_instruction, dict(value=value), + raise_client_side_exception=True) + + def get(self): + self._last_value : typing.Dict = self._client.execute(self._read_instruction, + raise_client_side_exception=True) + return self._last_value + + async def async_set(self, value : typing.Any) -> typing.Any: + self._last_value : typing.Dict = await self._client.execute(self._write_instruction, dict(value=value), + raise_client_side_exception=True) + async def async_get(self): + self._last_value : typing.Dict = await self._client.execute(self._read_instruction, + raise_client_side_exception=True) + return self._last_value + + +class _Event: + """event streaming""" + + def __init__(self, client : SyncZMQClient, event_name : str, event_socket : str) -> None: + self._client = client + self._event_name = event_name + self._event_socket = event_socket + + def _subscribe(self, callback : typing.Callable): + self._event_consumer = EventConsumer(request.path, event_info.socket_address, + f"{request.path}_HTTPEvent@"+current_datetime_ms_str()) + self._cb = callback + self._subscribed = True + self._thread = threading.Thread(target=self.listen) + self._thread.start() + + def listen(self): + while self._subscribed: + try: + data = self._event_consumer.receive_event(deserialize=True) + self._cb(data) + except Exception as E: + print(E) + self._event_consumer.exit() + + def _unsubscribe(self): + self._subscribed = False + + class _StreamResultIterator(object): """ @@ -546,25 +354,26 @@ def close(self): self.proxy = None -__allowed_attribute_types__ = (_RemoteParameter, _RemoteMethod) -def _add_method(cls : ObjectProxy, method : _RemoteMethod, func_info : Tuple[Any] ) -> None: - for index, dunder in enumerate(SERIALIZABLE_WRAPPER_ASSIGNMENTS, 2): - if dunder == '__qualname__': - func_infor = '{}.{}'.format(cls.__class__.__name__, func_info[index].split('.')[1]) - else: - func_infor = func_info[index] - setattr(method, dunder, func_infor) - cls.__setattr__(method.__name__, method) +__allowed_attribute_types__ = (_RemoteParameter, _RemoteMethod) -def _add_parameter(cls : ObjectProxy, parameter : _RemoteParameter, parameter_info : typing.Tuple[typing.Any]) -> None: - for index, dunder in enumerate(SERIALIZABLE_WRAPPER_ASSIGNMENTS, 2): +def _add_method(client_obj : ObjectProxy, method : _RemoteMethod, func_info : ProxyResourceData) -> None: + for dunder in SERIALIZABLE_WRAPPER_ASSIGNMENTS: if dunder == '__qualname__': - func_infor = '{}.{}'.format(cls.__class__.__name__, func_info[index].split('.')[1]) + info = '{}.{}'.format(client_obj.__class__.__name__, func_info.get_dunder_attr(dunder).split('.')[1]) else: - func_infor = func_info[index] - setattr(method, dunder, func_infor) - cls.__setattr__(method.__name__, method) + info = func_info.get_dunder_attr(dunder) + setattr(method, dunder, info) + client_obj.__setattr__(method.__name__, method) + +def _add_parameter(client_obj : ObjectProxy, parameter : _RemoteParameter, parameter_info : ProxyResourceData) -> None: + for attr in ['doc', 'name']: + # just to imitate _add_method logic + setattr(parameter, attr, getattr(parameter_info, attr)) + client_obj.__setattr__(parameter_info.name, parameter) + +def _add_event(client_obj : ObjectProxy, event, event_info) -> None: + pass __all__ = ['ObjectProxy'] diff --git a/hololinked/server/remote_object.py b/hololinked/server/remote_object.py index 893cacc..5631c28 100644 --- a/hololinked/server/remote_object.py +++ b/hololinked/server/remote_object.py @@ -332,7 +332,7 @@ class RemoteSubobject(Parameterized, metaclass=RemoteObjectMetaclass): doc="returns a dictionary with two fields " ) # type: ignore httpserver_resources = RemoteParameter(readonly=True, URL_path='/resources/http', doc="""""" ) # type: ignore - proxy_resources = RemoteParameter(readonly=True, URL_path='/resources/proxy', + proxy_resources = RemoteParameter(readonly=True, URL_path='/resources/object-proxy', doc= """object's resources exposed to ProxyClient, similar to http_resources but differs in details.""") # type: ignore @@ -462,7 +462,7 @@ def _prepare_resources(self): resource._owner = self resource.full_URL_path_prefix = self.full_URL_path_prefix resource.publisher = self._event_publisher - httpserver_resources[GET]['{}/event{}'.format( + httpserver_resources[GET]['{}{}'.format( self.full_URL_path_prefix, resource.URL_path)] = HTTPServerEventData( # event URL_path has '/' prefix what=EVENT, @@ -491,7 +491,7 @@ def _prepare_resources(self): parameter.event._owner = self parameter.event.full_URL_path_prefix = self.full_URL_path_prefix parameter.event.publisher = self._event_publisher - httpserver_resources[GET]['{}/event{}'.format( + httpserver_resources[GET]['{}{}'.format( self.full_URL_path_prefix, parameter.event.URL_path)] = HTTPServerEventData( what=EVENT, event_name=parameter.event.name, @@ -658,6 +658,7 @@ def __post_init__(self): def _prepare_message_brokers(self): self.message_broker = AsyncPollingZMQServer( instance_name=self.instance_name, + executor_thread_event=threading.Event(), server_type=self.__server_type__, protocols=self.server_protocols, json_serializer=self.json_serializer, proxy_serializer=self.proxy_serializer diff --git a/hololinked/server/serializers.py b/hololinked/server/serializers.py index e184f7f..8ace9ba 100644 --- a/hololinked/server/serializers.py +++ b/hololinked/server/serializers.py @@ -256,7 +256,7 @@ def custom_serializer(obj, serpent_serializer, outputstream, indentlevel): 'pickle' : PickleSerializer, # 'dill' : DillSerializer, 'JSON' : JSONSerializer, - 'Serpent' : SerpentSerializer, + 'serpent' : SerpentSerializer, None : SerpentSerializer } diff --git a/hololinked/server/utils.py b/hololinked/server/utils.py index 4219d4a..ac57cf9 100644 --- a/hololinked/server/utils.py +++ b/hololinked/server/utils.py @@ -6,6 +6,8 @@ import asyncio import inspect import typing +import builtins +import types from typing import List from ..param.exceptions import wrap_error_text as wrap_text @@ -209,13 +211,15 @@ def get_signature(function : typing.Callable): -def raise_local_exception(exception : typing.Dict[str, typing.Any], caller : str): - exception = getattr(__builtins__, exception["name"], None) - message = f"{caller} raised exception, check notes for traceback." - if exception is None: +def raise_local_exception(exception : typing.Dict[str, typing.Any]): + exc = getattr(builtins, exception["type"], None) + message = f"server raised exception, check following for server side traceback & above for client side traceback : " + # tb = types.TracebackType() + if exc is None: E = Exception(message) else: - E = exception(message) + E = exc(message) + # E.with_traceback() E.__notes__ = exception["traceback"] raise E diff --git a/hololinked/server/zmq_message_brokers.py b/hololinked/server/zmq_message_brokers.py index 46b4c51..644a6be 100644 --- a/hololinked/server/zmq_message_brokers.py +++ b/hololinked/server/zmq_message_brokers.py @@ -9,7 +9,8 @@ from typing import Union, List, Any, Dict, Sequence, Iterator, Set -from .utils import current_datetime_ms_str, create_default_logger, run_coro_somehow, run_coro_sync, wrap_text +from .utils import (current_datetime_ms_str, create_default_logger, run_coro_somehow, run_coro_sync, wrap_text, + raise_local_exception) from .config import global_config from .serializers import (JSONSerializer, PickleSerializer, BaseSerializer, SerpentSerializer, # DillSerializer, serializers) @@ -152,7 +153,7 @@ def parse_client_message(self, message : List[bytes]) -> Any: if client_type == PROXY: message[5] = self.proxy_serializer.loads(message[5]) # type: ignore message[6] = self.proxy_serializer.loads(message[6]) # type: ignore - message[7] = self.proxy_serializer.loads(message[6]) # type: ignore + message[7] = self.proxy_serializer.loads(message[7]) # type: ignore elif client_type == HTTP_SERVER: message[5] = self.json_serializer.loads(message[5]) # type: ignore message[6] = self.json_serializer.loads(message[6]) # type: ignore @@ -557,7 +558,7 @@ def __init__(self, server_instance_name : str, identity : str, client_type = HTT handshake : bool = True, protocol : str = "IPC", context : Union[zmq.asyncio.Context, None] = None, **serializer) -> None: BaseZMQClient.__init__(self, server_address = bytes(server_instance_name, encoding='utf-8'), - server_instance_name = server_instance_name, client_type = client_type, **serializer) + server_instance_name=server_instance_name, client_type=client_type, **serializer) BaseSyncZMQ.__init__(self) self.create_socket(context or zmq.Context(), server_instance_name, identity) self._terminate_context = context == None @@ -577,9 +578,7 @@ def recv_reply(self, raise_client_side_exception : bool = False) -> Sequence[Un if reply: self.logger.debug("received reply with msg-id {}".format(reply[3])) if reply[5].get('exception', None) is not None and raise_client_side_exception: - exc_info = reply[5]['exception'] - raise Exception("traceback : {},\nmessage : {},\ntype : {}".format('\n'.join(exc_info["traceback"]), - exc_info['message'], exc_info["type"])) + raise_local_exception(reply[5]['exception']) return reply def execute(self, instruction : str, arguments : Dict[str, Any] = EMPTY_DICT, context : Dict[str, Any] = EMPTY_DICT, @@ -1001,7 +1000,7 @@ def publisher(self) -> "EventPublisher": def publisher(self, value : "EventPublisher") -> None: if not hasattr(self, '_publisher'): self._publisher = value - self._event_unique_str = bytes(f"{self.full_URL_path_prefix}/event{self.URL_path}", encoding='utf-8') + self._event_unique_str = bytes(f"{self.full_URL_path_prefix}{self.URL_path}", encoding='utf-8') self._publisher.register_event(self) else: raise AttributeError("cannot reassign publisher attribute of event {}".format(self.name)) From 71ac90e5a505b6f105118ed00298ebd5f67166d3 Mon Sep 17 00:00:00 2001 From: "Vignesh.Vaidyanathan" <62492557+VigneshVSV@users.noreply.github.com> Date: Sat, 6 Jan 2024 18:09:34 +0100 Subject: [PATCH 003/167] removed param non-git folder to add git submodule --- hololinked/param/__init__.py | 68 - hololinked/param/_async.py | 23 - hololinked/param/exceptions.py | 64 - hololinked/param/extensions.py | 171 --- hololinked/param/ipython.py | 357 ----- hololinked/param/logger.py | 47 - hololinked/param/parameterized.py | 2088 -------------------------- hololinked/param/parameters.py | 2101 --------------------------- hololinked/param/serializer.py | 343 ----- hololinked/param/utils.py | 89 -- hololinked/param/version.py | 771 ---------- hololinked/server/host_utilities.py | 14 +- 12 files changed, 7 insertions(+), 6129 deletions(-) delete mode 100644 hololinked/param/__init__.py delete mode 100644 hololinked/param/_async.py delete mode 100644 hololinked/param/exceptions.py delete mode 100644 hololinked/param/extensions.py delete mode 100644 hololinked/param/ipython.py delete mode 100644 hololinked/param/logger.py delete mode 100644 hololinked/param/parameterized.py delete mode 100644 hololinked/param/parameters.py delete mode 100644 hololinked/param/serializer.py delete mode 100644 hololinked/param/utils.py delete mode 100644 hololinked/param/version.py diff --git a/hololinked/param/__init__.py b/hololinked/param/__init__.py deleted file mode 100644 index 1fff831..0000000 --- a/hololinked/param/__init__.py +++ /dev/null @@ -1,68 +0,0 @@ -# adapted from param holoviz - https://github.com/holoviz/param - see following license -""" -Copyright (c) 2005-2022, HoloViz team. -All rights reserved. - -Redistribution and use in source and binary forms, with or without -modification, are permitted provided that the following conditions are -met: - - * Redistributions of source code must retain the above copyright - notice, this list of conditions and the following disclaimer. - - * Redistributions in binary form must reproduce the above copyright - notice, this list of conditions and the following disclaimer in the - documentation and/or other materials provided with the - distribution. - - * Neither the name of the copyright holder nor the names of any - contributors may be used to endorse or promote products derived - from this software without specific prior written permission. - -THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. -""" - -from __future__ import print_function - -""" -Parameters are a kind of class attribute allowing special behavior, -including dynamically generated parameter values, documentation -strings, constant and read-only parameters, and type or range checking -at assignment time. - -Potentially useful for any large Python program that needs -user-modifiable object attributes; see the Parameter and Parameterized -classes for more information. If you do not want to add a dependency -on external code by importing from a separately installed param -package, you can simply save this file as param.py and copy it and -parameterized.py directly into your own package. - -This file contains subclasses of Parameter, implementing specific -parameter types (e.g. Number), and also imports the definition of -Parameters and Parameterized classes. -""" -from . import exceptions -from .parameterized import (Parameterized, ParameterizedFunction, ParamOverrides, Parameter, - depends_on, instance_descriptor, discard_events, edit_constant, ) - -from .logger import get_logger, logging_level, VERBOSE - -# Determine up-to-date version information, if possible, but with a -# safe fallback to ensure that this file and parameterized.py are the -# only two required files. -try: - from .version import Version - __version__ = str(Version(fpath=__file__, archive_commit="$Format:%h$", reponame="param")) -except: - __version__ = "0.0.0+unknown" - diff --git a/hololinked/param/_async.py b/hololinked/param/_async.py deleted file mode 100644 index ad95021..0000000 --- a/hololinked/param/_async.py +++ /dev/null @@ -1,23 +0,0 @@ -""" -Module that implements asyncio.coroutine function wrappers to be used -by param internal callbacks. These are defined in a separate file due -to py2 incompatibility with both `async/await` and `yield from` syntax. -""" - -# import asyncio - -# def generate_depends(func): -# @asyncio.coroutine -# def _depends(*args, **kw): -# yield from func(*args, **kw) # noqa: E999 -# return _depends - - - -# def generate_callback(func, dependencies, kw): -# @asyncio.coroutine -# def cb(*events): -# args = (getattr(dep.owner, dep.name) for dep in dependencies) -# dep_kwargs = {n: getattr(dep.owner, dep.name) for n, dep in kw.items()} -# yield from func(*args, **dep_kwargs) # noqa: E999 -# return cb diff --git a/hololinked/param/exceptions.py b/hololinked/param/exceptions.py deleted file mode 100644 index 864139a..0000000 --- a/hololinked/param/exceptions.py +++ /dev/null @@ -1,64 +0,0 @@ -import textwrap -import typing -from contextlib import contextmanager - -def wrap_error_text(text : str) -> str: - # return T.wrap(text) - #'\n'+'\n'.join([line.lstrip() - return textwrap.fill( - text = textwrap.dedent(text).lstrip(), - initial_indent='\n', - expand_tabs=True, - replace_whitespace= True, - ) - -def raise_TypeError(message, parameter) -> typing.NoReturn: - owner_str = '' - if isinstance(parameter, Parameter): - owner_str = f" Owner info : {parameter.owner}, parameter name : {parameter.name}." - elif issubclass(parameter, Parameter): - owner_str = '' - raise TypeError(message + owner_str) - -def raise_ValueError(message, parameter) -> typing.NoReturn: - owner_str = '' - if isinstance(parameter, Parameter): - owner_str = f" Owner info : {parameter.owner}, parameter name : {parameter.name}." - elif issubclass(parameter, Parameter): - owner_str = '' - raise ValueError(message + owner_str) - - -def get_iterable_printfriendly_repr(iterable): - # This method can be called before __init__ has called - # super's __init__, so there may not be any name set yet. - items = [] - limiter = ']' - length = 0 - for item in iterable: - string = str(item) - length += len(string) - if length < 200: - items.append(string) - else: - limiter = ', ...]' - break - items = '[' + ', '.join(items) + limiter - return items - - -@contextmanager -def exceptions_summarized(): - """Useful utility for writing docs that need to show expected errors. - Shows exception only, concisely, without a traceback. - """ - try: - yield - except Exception: - import sys - etype, value, tb = sys.exc_info() - print("{}: {}".format(etype.__name__,value), file=sys.stderr) - -from .parameterized import Parameter - -__all__ = ['wrap_error_text', 'raise_TypeError', 'raise_ValueError', 'get_iterable_printfriendly_repr'] \ No newline at end of file diff --git a/hololinked/param/extensions.py b/hololinked/param/extensions.py deleted file mode 100644 index 3a43ca5..0000000 --- a/hololinked/param/extensions.py +++ /dev/null @@ -1,171 +0,0 @@ -import typing -import numpy -from types import FunctionType - -from .parameterized import Parameterized, ParameterizedMetaclass -from .parameters import * - - - -class NumpyArray(ClassSelector): - """ - Parameter whose value is a numpy array. - """ - - def __init__(self, default=None, doc : typing.Union[str, None] = None, - constant : bool = False, readonly : bool = False, allow_None : bool = False, - label : typing.Union[str, None] = None, per_instance : bool = False, deep_copy : bool = False, - class_member : bool = False, fget : FunctionType = None, fset : FunctionType = None, - precedence : float = None) -> None: - - super(NumpyArray, self).__init__(class_=numpy.ndarray, default=default, doc=doc, - constant=constant, readonly=readonly, allow_None=allow_None, label=label, per_instance=per_instance, - deep_copy=deep_copy, class_member=class_member, fget=fget, fset=fset, precedence=precedence) - - @typing.overload - def __get__(self, obj : typing.Union[Parameterized, typing.Any], - objtype: typing.Union[ParameterizedMetaclass, typing.Any]) -> numpy.ndarray: - ... - - @classmethod - def serialize(cls, value : typing.Union[numpy.ndarray, None]): - if value is None: - return None - return value.tolist() - - @classmethod - def deserialize(cls, value): - if value == 'null' or value is None: - return None - return numpy.asarray(value) - - -from pandas import DataFrame as pdDFrame - - -class DataFrame(ClassSelector): - """ - Parameter whose value is a pandas DataFrame. - - The structure of the DataFrame can be constrained by the rows and - columns arguments: - - rows: If specified, may be a number or an integer bounds tuple to - constrain the allowable number of rows. - - columns: If specified, may be a number, an integer bounds tuple, a - list or a set. If the argument is numeric, constrains the number of - columns using the same semantics as used for rows. If either a list - or set of strings, the column names will be validated. If a set is - used, the supplied DataFrame must contain the specified columns and - if a list is given, the supplied DataFrame must contain exactly the - same columns and in the same order and no other columns. - """ - - __slots__ = ['rows', 'columns', 'ordered'] - - def __init__(self, default=None, rows=None, columns=None, ordered=None, **params): - self.rows = rows - self.columns = columns - self.ordered = ordered - super(DataFrame,self).__init__(pdDFrame, default=default, **params) - self._validate(self.default) - - def _length_bounds_check(self, bounds, length, name): - message = '{name} length {length} does not match declared bounds of {bounds}' - if not isinstance(bounds, tuple): - if (bounds != length): - raise ValueError(message.format(name=name, length=length, bounds=bounds)) - else: - return - (lower, upper) = bounds - failure = ((lower is not None and (length < lower)) - or (upper is not None and length > upper)) - if failure: - raise ValueError(message.format(name=name,length=length, bounds=bounds)) - - def _validate(self, val): - super(DataFrame, self)._validate(val) - - if isinstance(self.columns, set) and self.ordered is True: - raise ValueError('Columns cannot be ordered when specified as a set') - - if self.allow_None and val is None: - return - - if self.columns is None: - pass - elif (isinstance(self.columns, tuple) and len(self.columns)==2 - and all(isinstance(v, (type(None), numbers.Number)) for v in self.columns)): # Numeric bounds tuple - self._length_bounds_check(self.columns, len(val.columns), 'Columns') - elif isinstance(self.columns, (list, set)): - self.ordered = isinstance(self.columns, list) if self.ordered is None else self.ordered - difference = set(self.columns) - set([str(el) for el in val.columns]) - if difference: - msg = 'Provided DataFrame columns {found} does not contain required columns {expected}' - raise ValueError(msg.format(found=list(val.columns), expected=sorted(self.columns))) - else: - self._length_bounds_check(self.columns, len(val.columns), 'Column') - - if self.ordered: - if list(val.columns) != list(self.columns): - msg = 'Provided DataFrame columns {found} must exactly match {expected}' - raise ValueError(msg.format(found=list(val.columns), expected=self.columns)) - - if self.rows is not None: - self._length_bounds_check(self.rows, len(val), 'Row') - - @classmethod - def serialize(cls, value): - if value is None: - return 'null' - return value.to_dict('records') - - @classmethod - def deserialize(cls, value): - if value == 'null': - return None - from pandas import DataFrame as pdDFrame - return pdDFrame(value) - - - -class Series(ClassSelector): - """ - Parameter whose value is a pandas Series. - - The structure of the Series can be constrained by the rows argument - which may be a number or an integer bounds tuple to constrain the - allowable number of rows. - """ - - __slots__ = ['rows'] - - def __init__(self, default=None, rows=None, allow_None=False, **params): - from pandas import Series as pdSeries - self.rows = rows - super(Series,self).__init__(pdSeries, default=default, allow_None=allow_None, - **params) - self._validate(self.default) - - def _length_bounds_check(self, bounds, length, name): - message = '{name} length {length} does not match declared bounds of {bounds}' - if not isinstance(bounds, tuple): - if (bounds != length): - raise ValueError(message.format(name=name, length=length, bounds=bounds)) - else: - return - (lower, upper) = bounds - failure = ((lower is not None and (length < lower)) - or (upper is not None and length > upper)) - if failure: - raise ValueError(message.format(name=name,length=length, bounds=bounds)) - - def _validate(self, val): - super(Series, self)._validate(val) - - if self.allow_None and val is None: - return - - if self.rows is not None: - self._length_bounds_check(self.rows, len(val), 'Row') diff --git a/hololinked/param/ipython.py b/hololinked/param/ipython.py deleted file mode 100644 index 030875b..0000000 --- a/hololinked/param/ipython.py +++ /dev/null @@ -1,357 +0,0 @@ -""" -Optional IPython extension for working with Parameters. - -This extension offers extended but completely optional functionality -for IPython users. From within IPython, it may be loaded using: - -%load_ext param.ipython - -This will register the %params line magic to allow easy inspection of -all the parameters defined on a parameterized class or object: - -%params - -All parameters of the class or object will be listed in the IPython -pager together with all their corresponding attributes and -docstrings. Note that the class or object to be inspected must already -exist in the active namespace. -""" - -__author__ = "Jean-Luc Stevens" - -import re -import sys -import itertools -import textwrap -import param - - -# Whether to generate warnings when misformatted docstrings are found -WARN_MISFORMATTED_DOCSTRINGS = False - -# ANSI color codes for the IPython pager -red = '\x1b[1;31m%s\x1b[0m' -blue = '\x1b[1;34m%s\x1b[0m' -green = '\x1b[1;32m%s\x1b[0m' -cyan = '\x1b[1;36m%s\x1b[0m' - - - -class ParamPager(object): - """ - Callable class that displays information about the supplied - Parameterized object or class in the IPython pager. - """ - - def __init__(self, metaclass=False): - """ - If metaclass is set to True, the checks for Parameterized - classes objects are disabled. This option is for use in - ParameterizedMetaclass for automatic docstring generation. - """ - # Order of the information to be listed in the table (left to right) - self.order = ['name', 'changed', 'value', 'type', 'bounds', 'mode'] - self.metaclass = metaclass - - - def get_param_info(self, obj, include_super=True): - """ - Get the parameter dictionary, the list of modifed parameters - and the dictionary of parameter values. If include_super is - True, parameters are also collected from the super classes. - """ - - params = dict(obj.parameters.objects(existing=True)) - if isinstance(obj,type): - changed = [] - val_dict = dict((k,p.default) for (k,p) in params.items()) - self_class = obj - else: - changed = list(obj.param.values(onlychanged=True).keys()) - val_dict = obj.param.values() - self_class = obj.__class__ - - if not include_super: - params = dict((k,v) for (k,v) in params.items() - if k in self_class.__dict__) - - params.pop('name') # Already displayed in the title. - return (params, val_dict, changed) - - - def param_docstrings(self, info, max_col_len=100, only_changed=False): - """ - Build a string to that presents all of the parameter - docstrings in a clean format (alternating red and blue for - readability). - """ - - (params, val_dict, changed) = info - contents = [] - displayed_params = [] - for name in self.sort_by_precedence(params): - if only_changed and not (name in changed): - continue - displayed_params.append((name, params[name])) - - right_shift = max(len(name) for name, _ in displayed_params)+2 - - for i, (name, p) in enumerate(displayed_params): - heading = "%s: " % name - unindented = textwrap.dedent("< No docstring available >" if p.doc is None else p.doc) - - if (WARN_MISFORMATTED_DOCSTRINGS - and not unindented.startswith("\n") and len(unindented.splitlines()) > 1): - param.main.warning("Multi-line docstring for %r is incorrectly formatted " - " (should start with newline)", name) - # Strip any starting newlines - while unindented.startswith("\n"): - unindented = unindented[1:] - - lines = unindented.splitlines() - if len(lines) > 1: - tail = ['%s%s' % (' ' * right_shift, line) for line in lines[1:]] - all_lines = [ heading.ljust(right_shift) + lines[0]] + tail - elif len(lines) == 1: - all_lines = [ heading.ljust(right_shift) + lines[0]] - else: - all_lines = [] - - if i % 2: # Alternate red and blue for docstrings - contents.extend([red %el for el in all_lines]) - else: - contents.extend([blue %el for el in all_lines]) - - return "\n".join(contents) - - - def sort_by_precedence(self, parameters): - """ - Sort the provided dictionary of parameters by their precedence value. - In Python 3, preserves the original ordering for parameters with the - same precedence; for Python 2 sorts them lexicographically by name, - unless explicit precedences are provided. - """ - params = [(p, pobj) for p, pobj in parameters.items()] - key_fn = lambda x: x[1].precedence if x[1].precedence is not None else 1e-8 - sorted_params = sorted(params, key=key_fn) - groups = itertools.groupby(sorted_params, key=key_fn) - # Params preserve definition order in Python 3.6+ - dict_ordered = ( - (sys.version_info.major == 3 and sys.version_info.minor >= 6) or - (sys.version_info.major > 3) or - all(p.precedence is not None for p in parameters.values()) - ) - ordered_groups = [list(grp) if dict_ordered else sorted(grp) for (_, grp) in groups] - ordered_params = [el[0] for group in ordered_groups for el in group - if (el[0] != 'name' or el[0] in parameters)] - return ordered_params - - - def _build_table(self, info, order, max_col_len=40, only_changed=False): - """ - Collect the information about parameters needed to build a - properly formatted table and then tabulate it. - """ - - info_list, bounds_dict = [], {} - (params, val_dict, changed) = info - col_widths = dict((k,0) for k in order) - - ordering = self.sort_by_precedence(params) - for name in ordering: - p = params[name] - if only_changed and not (name in changed): - continue - - constant = 'C' if p.constant else 'V' - readonly = 'RO' if p.readonly else 'RW' - allow_None = ' AN' if hasattr(p, 'allow_None') and p.allow_None else '' - - mode = '%s %s%s' % (constant, readonly, allow_None) - - value = repr(val_dict[name]) - if len(value) > (max_col_len - 3): - value = value[:max_col_len-3] + '...' - - p_dict = {'name': name, 'type': p.__class__.__name__, - 'mode': mode, 'value': value} - - if hasattr(p, 'bounds'): - lbound, ubound = (None,None) if p.bounds is None else p.bounds - - mark_lbound, mark_ubound = False, False - # Use soft_bounds when bounds not defined. - if hasattr(p, 'get_soft_bounds'): - soft_lbound, soft_ubound = p.get_soft_bounds() - if lbound is None and soft_lbound is not None: - lbound = soft_lbound - mark_lbound = True - if ubound is None and soft_ubound is not None: - ubound = soft_ubound - mark_ubound = True - - if (lbound, ubound) != (None,None): - bounds_dict[name] = (mark_lbound, mark_ubound) - p_dict['bounds'] = '(%s, %s)' % (lbound, ubound) - - for col in p_dict: - max_width = max([col_widths[col], len(p_dict[col])]) - col_widths[col] = max_width - - info_list.append((name, p_dict)) - - return self._tabulate(info_list, col_widths, changed, order, bounds_dict) - - - def _tabulate(self, info_list, col_widths, changed, order, bounds_dict): - """ - Returns the supplied information as a table suitable for - printing or paging. - - info_list: List of the parameters name, type and mode. - col_widths: Dictionary of column widths in characters - changed: List of parameters modified from their defaults. - order: The order of the table columns - bound_dict: Dictionary of appropriately formatted bounds - """ - - contents, tail = [], [] - column_set = set(k for _, row in info_list for k in row) - columns = [col for col in order if col in column_set] - - title_row = [] - # Generate the column headings - for i, col in enumerate(columns): - width = col_widths[col]+2 - col = col.capitalize() - formatted = col.ljust(width) if i == 0 else col.center(width) - title_row.append(formatted) - contents.append(blue % ''.join(title_row)+"\n") - - # Format the table rows - for row, info in info_list: - row_list = [] - for i,col in enumerate(columns): - width = col_widths[col]+2 - val = info[col] if (col in info) else '' - formatted = val.ljust(width) if i==0 else val.center(width) - - if col == 'bounds' and bounds_dict.get(row,False): - (mark_lbound, mark_ubound) = bounds_dict[row] - lval, uval = formatted.rsplit(',') - lspace, lstr = lval.rsplit('(') - ustr, uspace = uval.rsplit(')') - lbound = lspace + '('+(cyan % lstr) if mark_lbound else lval - ubound = (cyan % ustr)+')'+uspace if mark_ubound else uval - formatted = "%s,%s" % (lbound, ubound) - row_list.append(formatted) - - row_text = ''.join(row_list) - if row in changed: - row_text = red % row_text - - contents.append(row_text) - - return '\n'.join(contents+tail) - - - def __call__(self, param_obj): - """ - Given a Parameterized object or class, display information - about the parameters in the IPython pager. - """ - title = None - if not self.metaclass: - parameterized_object = isinstance(param_obj, param.parameterized) - parameterized_class = (isinstance(param_obj,type) - and issubclass(param_obj,param.parameterized)) - - if not (parameterized_object or parameterized_class): - print("Object is not a Parameterized class or object.") - return - - if parameterized_object: - # Only show the name if not autogenerated - class_name = param_obj.__class__.__name__ - default_name = re.match('^'+class_name+'[0-9]+$', param_obj.name) - obj_name = '' if default_name else (' %r' % param_obj.name) - title = 'Parameters of %r instance%s' % (class_name, obj_name) - - if title is None: - title = 'Parameters of %r' % param_obj.name - - heading_line = '=' * len(title) - heading_text = "%s\n%s\n" % (title, heading_line) - - param_info = self.get_param_info(param_obj, include_super=True) - if not param_info[0]: - return "%s\n%s" % ((green % heading_text), "Object has no parameters.") - - table = self._build_table(param_info, self.order, max_col_len=40, - only_changed=False) - - docstrings = self.param_docstrings(param_info, max_col_len=100, only_changed=False) - dflt_msg = "Parameters changed from their default values are marked in red." - top_heading = (green % heading_text) - top_heading += "\n%s" % (red % dflt_msg) - top_heading += "\n%s" % (cyan % "Soft bound values are marked in cyan.") - top_heading += '\nC/V= Constant/Variable, RO/RW = ReadOnly/ReadWrite, AN=Allow None' - - heading_text = 'Parameter docstrings:' - heading_string = "%s\n%s" % (heading_text, '=' * len(heading_text)) - docstring_heading = (green % heading_string) - return "%s\n\n%s\n\n%s\n\n%s" % (top_heading, table, docstring_heading, docstrings) - - -message = """Welcome to the param IPython extension! (https://param.holoviz.org/)""" -message += '\nAvailable magics: %params' - -_loaded = False - -def load_ipython_extension(ip, verbose=True): - - from IPython.core.magic import Magics, magics_class, line_magic - from IPython.core import page - - - @magics_class - class ParamMagics(Magics): - """ - Implements the %params line magic used to inspect the parameters - of a parameterized class or object. - """ - def __init__(self, *args, **kwargs): - super(ParamMagics, self).__init__(*args, **kwargs) - self.param_pager = ParamPager() - - - @line_magic - def params(self, parameter_s='', namespaces=None): - """ - The %params line magic accepts a single argument which is a - handle on the parameterized object to be inspected. If the - object can be found in the active namespace, information about - the object's parameters is displayed in the IPython pager. - - Usage: %params - """ - if parameter_s=='': - print("Please specify an object to inspect.") - return - - # Beware! Uses IPython internals that may change in future... - obj = self.shell._object_find(parameter_s) - if obj.found is False: - print("Object %r not found in the namespace." % parameter_s) - return - - page.page(self.param_pager(obj.obj)) - - - if verbose: print(message) - - global _loaded - if not _loaded: - _loaded = True - ip.register_magics(ParamMagics) diff --git a/hololinked/param/logger.py b/hololinked/param/logger.py deleted file mode 100644 index 07378ff..0000000 --- a/hololinked/param/logger.py +++ /dev/null @@ -1,47 +0,0 @@ -import logging -from contextlib import contextmanager -from logging import DEBUG, INFO, WARNING, ERROR, CRITICAL, Logger - - - -VERBOSE = INFO - 1 -logging.addLevelName(VERBOSE, "VERBOSE") - -def get_logger(name : str = None) -> Logger: - if name is None: - root_logger = logging.getLogger('param') - if not root_logger.handlers: - root_logger.setLevel(logging.INFO) - formatter = logging.Formatter( - fmt='%(levelname)s:%(name)s: %(message)s') - handler = logging.StreamHandler() - handler.setFormatter(formatter) - root_logger.addHandler(handler) - return root_logger - else: - return logging.getLogger('param.' + name) - - -@contextmanager -def logging_level(level : int): - """ - Temporarily modify param's logging level. - """ - level = level.upper() - levels = [DEBUG, INFO, WARNING, ERROR, CRITICAL, VERBOSE] - level_names = ['DEBUG', 'INFO', 'WARNING', 'ERROR', 'CRITICAL', 'VERBOSE'] - - if level not in level_names: - raise Exception("Level %r not in %r" % (level, levels)) - - param_logger = get_logger() - logging_level = param_logger.getEffectiveLevel() - param_logger.setLevel(levels[level_names.index(level)]) - try: - yield None - finally: - param_logger.setLevel(logging_level) - - - -__all__ = ['logging_level', 'get_logger'] \ No newline at end of file diff --git a/hololinked/param/parameterized.py b/hololinked/param/parameterized.py deleted file mode 100644 index 974a8ed..0000000 --- a/hololinked/param/parameterized.py +++ /dev/null @@ -1,2088 +0,0 @@ -""" -Generic support for objects with full-featured Parameters and -messaging. - -This file comes from the Param library (https://github.com/holoviz/param) -but can be taken out of the param module and used on its own if desired, -either alone (providing basic Parameter support) or with param's -__init__.py (providing specialized Parameter types). -""" - -import copy -import datetime -import re -import numbers -import operator -import inspect -import threading -import typing -from types import FunctionType, TracebackType -from enum import Enum -from dataclasses import dataclass, field -from collections import OrderedDict, defaultdict -from functools import partial, wraps -from operator import itemgetter, attrgetter -from contextlib import contextmanager - -from .utils import * -from .exceptions import * -from .serializer import serializers - -try: - # In case the optional ipython module is unavailable - from .ipython import ParamPager - param_pager = ParamPager(metaclass=True) # Generates param description -except: - param_pager = None - -dt_types = (datetime.datetime, datetime.date) - -try: - import numpy as np - dt_types = dt_types + (np.datetime64,) -except: - pass - -# External components can register an async executor which will run -# async functions -async_executor = None - -Undefined = NotImplemented - - -def instance_descriptor(f : typing.Callable[['Parameter', 'Parameterized', typing.Any], None]) -> typing.Callable[[ - 'Parameter', 'Parameterized', typing.Any], None]: - # If parameter has an instance Parameter, delegate setting - def fset(self : 'Parameter', obj : 'Parameterized', val : typing.Any) -> None: - if hasattr(obj, 'parameters'): - if hasattr(obj.parameters, '_instance_params'): - instance_param = obj.parameters._instance_params.get(self.name, None) - if instance_param is not None and self is not instance_param: - instance_param.__set__(obj, val) - return - return f(self, obj, val) - return fset - - - -class ParameterMetaclass(type): - """ - Metaclass allowing control over creation of Parameter classes. - """ - def __new__(mcs, classname : str, bases : typing.Tuple[typing.Any], - classdict : typing.Dict[str, typing.Any]) -> 'ParameterMetaclass': - - # store the class's docstring in __classdoc - if '__doc__' in classdict: - classdict['__classdoc'] = classdict['__doc__'] - - # when asking for help on Parameter *object*, return the doc slot - classdict['__doc__'] = property(attrgetter('doc')) - - # To get the benefit of slots, subclasses must themselves define - # __slots__, whether or not they define attributes not present in - # the base Parameter class. That's because a subclass will have - # a __dict__ unless it also defines __slots__. - if '__slots__' not in classdict: - classdict['__slots__'] = [] - if '__parent_slots__' not in classdict: - classdict['__parent_slots__'] = [] - - for base in bases: # there will almost always only one base because slots dont support multiple inheritance - for base_ in inspect.getmro(base): - if hasattr(base_, '__slots__'): - # check _post_slot_set in Parameter to understand the requirement - classdict['__parent_slots__'].extend(base_.__slots__) # type: ignore - - # No special handling for a __dict__ slot; should there be? - return type.__new__(mcs, classname, bases, classdict) - - def __getattribute__(mcs, name : str) -> typing.Any: - if name == '__doc__': - # when asking for help on Parameter *class*, return the - # stored class docstring - return type.__getattribute__(mcs, '__classdoc') - else: - return type.__getattribute__(mcs, name) - - - -class Parameter(metaclass=ParameterMetaclass): - """ - An attribute descriptor for declaring parameters. - - Parameters are a special kind of class attribute. Setting a - Parameterized class attribute to be a Parameter instance causes - that attribute of the class (and the class's instances) to be - treated as a Parameter. This allows special behavior, including - dynamically generated parameter values, documentation strings, - constant and read-only parameters, and type or range checking at - assignment time. - - For example, suppose someone wants to define two new kinds of - objects Foo and Bar, such that Bar has a parameter delta, Foo is a - subclass of Bar, and Foo has parameters alpha, sigma, and gamma - (and delta inherited from Bar). She would begin her class - definitions with something like this:: - - class Bar(Parameterized): - delta = Parameter(default=0.6, doc='The difference between steps.') - ... - class Foo(Bar): - alpha = Parameter(default=0.1, doc='The starting value.') - sigma = Parameter(default=0.5, doc='The standard deviation.', - constant=True) - gamma = Parameter(default=1.0, doc='The ending value.') - ... - - Class Foo would then have four parameters, with delta defaulting - to 0.6. - - Parameters have several advantages over plain attributes: - - 1. Parameters can be set automatically when an instance is - constructed: The default constructor for Foo (and Bar) will - accept arbitrary keyword arguments, each of which can be used - to specify the value of a Parameter of Foo (or any of Foo's - superclasses). E.g., if a script does this:: - - myfoo = Foo(alpha=0.5) - - myfoo.alpha will return 0.5, without the Foo constructor - needing special code to set alpha. - - If Foo implements its own constructor, keyword arguments will - still be accepted if the constructor accepts a dictionary of - keyword arguments (as in ``def __init__(self,**params):``), and - then each class calls its superclass (as in - ``super(Foo,self).__init__(**params)``) so that the - Parameterized constructor will process the keywords. - - 2. A Parameterized class need specify only the attributes of a - Parameter whose values differ from those declared in - superclasses; the other values will be inherited. E.g. if Foo - declares:: - - delta = Parameter(default=0.2) - - the default value of 0.2 will override the 0.6 inherited from - Bar, but the doc will be inherited from Bar. - - 3. The Parameter descriptor class can be subclassed to provide - more complex behavior, allowing special types of parameters - that, for example, require their values to be numbers in - certain ranges, generate their values dynamically from a random - distribution, or read their values from a file or other - external source. - - 4. The attributes associated with Parameters provide enough - information for automatically generating property sheets in - graphical user interfaces, allowing Parameterized instances to - be edited by users. - - Note that Parameters can only be used when set as class attributes - of Parameterized classes. Parameters used as standalone objects, - or as class attributes of non-Parameterized classes, will not have - the behavior described here. - """ - - # Be careful when referring to the 'name' of a Parameter: - # - # * A Parameterized class has a name for the attribute which is - # being represented by the Parameter in the code, - # this is called the 'attrib_name'. - # - # * When a Parameterized instance has its own local value for a - # parameter, it is stored as '_X_param_value' (where X is the - # attrib_name for the Parameter); in the code, this is called - # the internal_name. - - - # So that the extra features of Parameters do not require a lot of - # overhead, Parameters are implemented using __slots__ (see - # http://www.python.org/doc/2.4/ref/slots.html). - - __slots__ = ['default', 'doc', 'constant', 'readonly', 'allow_None', - 'per_instance_descriptor', 'deepcopy_default', 'class_member', 'precedence', - 'owner', 'name', '_internal_name', 'watchers', 'overloads', - '_disable_post_slot_set'] - - # Note: When initially created, a Parameter does not know which - # Parameterized class owns it. Once the owning Parameterized - # class is created, owner, name, and _internal_name are - # set. - - def __init__(self, default : typing.Any, *, doc : typing.Optional[str] = None, - constant : bool = False, readonly : bool = False, allow_None : bool = False, - per_instance_descriptor : bool = False, deepcopy_default : bool = False, class_member : bool = False, - fget : typing.Optional[typing.Callable] = None, fset : typing.Optional[typing.Callable] = None, - fdel : typing.Optional[typing.Callable] = None, precedence : typing.Optional[float] = None) -> None: # pylint: disable-msg=R0913 - - """Initialize a new Parameter object and store the supplied attributes: - - default: the owning class's value for the attribute represented - by this Parameter, which can be overridden in an instance. - - doc: docstring explaining what this parameter represents. - - constant: if true, the Parameter value can be changed only at - the class level or in a Parameterized constructor call. The - value is otherwise constant on the Parameterized instance, - once it has been constructed. - - readonly: if true, the Parameter value cannot ordinarily be - changed by setting the attribute at the class or instance - levels at all. The value can still be changed in code by - temporarily overriding the value of this slot and then - restoring it, which is useful for reporting values that the - _user_ should never change but which do change during code - execution. - - allow_None: if True, None is accepted as a valid value for - this Parameter, in addition to any other values that are - allowed. If the default value is defined as None, allow_None - is set to True automatically. - - label: optional text label to be used when this Parameter is - shown in a listing. If no label is supplied, the attribute name - for this parameter in the owning Parameterized object is used. - - per_instance_descriptor: whether a separate Parameter instance will be - created for every Parameterized instance. True by default. - If False, all instances of a Parameterized class will share - the same Parameter object, including all validation - attributes (bounds, etc.). - - deepcopy_default: controls whether the value of this Parameter will - be deepcopied when a Parameterized object is instantiated (if - True), or if the single default value will be shared by all - Parameterized instances (if False). For an immutable Parameter - value, it is best to leave deepcopy_default at the default of - False, so that a user can choose to change the value at the - Parameterized instance level (affecting only that instance) or - at the Parameterized class or superclass level (affecting all - existing and future instances of that class or superclass). For - a mutable Parameter value, the default of False is also appropriate - if you want all instances to share the same value state, e.g. if - they are each simply referring to a single global object like - a singleton. If instead each Parameterized should have its own - independently mutable value, deepcopy_default should be set to - True, but note that there is then no simple way to change the - value of this Parameter at the class or superclass level, - because each instance, once created, will then have an - independently deepcopied value. - - class_member : To make a ... - - precedence: a numeric value, usually in the range 0.0 to 1.0, - which allows the order of Parameters in a class to be defined in - a listing or e.g. in GUI menus. A negative precedence indicates - a parameter that should be hidden in such listings. - - default, doc, and precedence all default to None, which allows - inheritance of Parameter slots (attributes) from the owning-class' - class hierarchy (see ParameterizedMetaclass). - - Note - parameter's own attributes are not type checked. if one sets - allow_None = 45 instead of allow_None = True, allow_None will be taken to be True. - """ - self._disable_post_slot_set = False - # the above slot should better to stay at top of init for __setattr__ to work uniformly - self.default = default - self.doc = doc - self.constant = constant # readonly is also constant however constants can be set once - self.readonly = readonly - self.allow_None = constant or allow_None - self.per_instance_descriptor = per_instance_descriptor - self.deepcopy_default = deepcopy_default - self.class_member = class_member - self.precedence = precedence - self.watchers : typing.Dict[str, typing.List] = {} - self.overloads : typing.Dict[str, typing.Union[typing.Callable, None]] = dict(fget=fget, - fset=fset, fdel=fdel) - - def __set_name__(self, owner : typing.Any, attrib_name : str) -> None: - self._internal_name = f"_{attrib_name}_param_value" - self.name = attrib_name - self.owner = owner - # This particular order is generally important - - def __setattr__(self, attribute : str, value : typing.Any) -> None: - if attribute == 'name' and getattr(self, 'name', None) and value != self.name: - raise AttributeError("Parameter name cannot be modified after " - "it has been bound to a Parameterized.") - - watched = (attribute != "default" and hasattr(self, 'watchers') and attribute in self.watchers) - slot_attribute = attribute in self.__slots__ or attribute in self.__parent_slots__ # type: ignore - try: - old = getattr(self, attribute) if watched else NotImplemented - except AttributeError as exc: - if slot_attribute: - # If Parameter slot is defined but an AttributeError was raised - # we are in __setstate__ and watchers should not be triggered - old = NotImplemented - else: - raise # exc , dont raise exc as it will cause multiple tracebacks - - super(Parameter, self).__setattr__(attribute, value) - - if slot_attribute and hasattr(self, '_disable_post_slot_set') and not self._disable_post_slot_set: - self._post_slot_set(attribute, old, value) - - if old is NotImplemented or not isinstance(self.owner, Parameterized): - return - - event_dispatcher = self.owner.parameters.event_dispatcher - event = Event(what=attribute, name=self.name, obj=None, cls=self.owner, - old=old, new=value, type=None) - for watcher in self.watchers[attribute]: - event_dispatcher.call_watcher(watcher, event) - if not event_dispatcher.state.BATCH_WATCH: - event_dispatcher.batch_call_watchers() - - def _post_slot_set(self, slot : str, old : typing.Any, value : typing.Any) -> None: - """ - Can be overridden on subclasses to handle changes when parameter - attribute is set. Be very careful of circular calls. - """ - # __parent_slots__ attribute is needed for entry into this function correctly otherwise - # slot_attribute in __setattr__ will have wrong boolean flag - if slot == 'owner' and self.owner is not None: - with disable_post_slot_set(self): - self.default = self.validate_and_adapt(self.default) - - def __get__(self, obj : typing.Union['Parameterized', typing.Any], - objtype : typing.Union['ParameterizedMetaclass', typing.Any]) -> typing.Any: # pylint: disable-msg=W0613 - """ - Return the value for this Parameter. - - If called for a Parameterized class, produce that - class's value (i.e. this Parameter object's 'default' - attribute). - - If called for a Parameterized instance, produce that - instance's value, if one has been set - otherwise produce the - class's value (default). - """ - if obj is None: - return self - fget = self.overloads['fget'] - if fget is not None: - return fget(obj) - return obj.__dict__.get(self._internal_name, self.default) - - @instance_descriptor - def __set__(self, obj : typing.Union['Parameterized', typing.Any], value : typing.Any) -> None: - """ - Set the value for this Parameter. - - If called for a Parameterized class, set that class's - value (i.e. set this Parameter object's 'default' attribute). - - If called for a Parameterized instance, set the value of - this Parameter on that instance (i.e. in the instance's - __dict__, under the parameter's internal_name). - - If the Parameter's constant attribute is True, only allows - the value to be set for a Parameterized class or on - uninitialized Parameterized instances. - - If the Parameter's readonly attribute is True, only allows the - value to be specified in the Parameter declaration inside the - Parameterized source code. A read-only parameter also - cannot be set on a Parameterized class. - - Note that until we support some form of read-only - object, it is still possible to change the attributes of the - object stored in a constant or read-only Parameter (e.g. one - item in a list). - """ - if self.readonly: - raise_TypeError("Read-only parameter cannot be set/modified.", self) - - value = self.validate_and_adapt(value) - - obj = obj if not self.class_member else self.owner - - old = NotImplemented - if self.constant: - old = None - if (obj.__dict__.get(self._internal_name, NotImplemented) != NotImplemented) or self.default is not None: - # Dont even entertain any type of setting, even if its the same value - raise_TypeError("Constant parameter cannot be modified.", self) - else: - old = obj.__dict__.get(self._internal_name, self.default) - - # The following needs to be optimised, probably through lambda functions? - fset = self.overloads['fset'] - if fset is not None: - fset(obj, value) - else: - obj.__dict__[self._internal_name] = value - - self._post_value_set(obj, value) - - if not isinstance(obj, (Parameterized, ParameterizedMetaclass)): - """ - dont deal with events, watchers etc when object is not a Parameterized class child. - Many variables like obj.param below will also raise AttributeError. - This will enable generic use of Parameters without adherence to Parameterized subclassing. - """ - return - - event_dispatcher = obj.parameters.event_dispatcher - event_dispatcher.update_dynamic_dependencies(self.name) - - if self.name in event_dispatcher.all_watchers: - watchers = event_dispatcher.all_watchers[self.name].get('value', None) - if watchers is None: - watchers = self.watchers.get('value', None) - if watchers is None: - return - - event = Event(what=parameter.VALUE, name=self.name, obj=obj, cls=self.owner, - old=old, new=value, type=None) - - # Copy watchers here since they may be modified inplace during iteration - for watcher in sorted(watchers, key=lambda w: w.precedence): - event_dispatcher.call_watcher(watcher, event) - if not event_dispatcher.state.BATCH_WATCH: - event_dispatcher.batch_call_watchers() - - def validate_and_adapt(self, value : typing.Any) -> typing.Any: - """ - modify the given value if a proper logical reasoning can be given. - returns modified value. Should not be mostly used unless the data stored is quite complex by structure. - """ - # raise NotImplementedError("overload this function in child class to validate your value and adapt it if necessary.") - return value - - def _post_value_set(self, obj : typing.Union['Parameterized', typing.Any], value : typing.Any) -> None: - """Called after the parameter value has been validated and set""" - - def __getstate__(self): - """ - All Parameters have slots, not a dict, so we have to support - pickle and deepcopy ourselves. - """ - - state = {} - for slot in self.__slots__ + self.__parent_slots__: - state[slot] = getattr(self, slot) - state.pop('_disable_post_slot_set') - return state - - def __setstate__(self, state : typing.Dict[str, typing.Any]): - """ - set values of __slots__ (instead of in non-existent __dict__) - """ - # Handle renamed slots introduced for instance params - # if '_attrib_name' in state: - # state['name'] = state.pop('_attrib_name') - # if '_owner' in state: - # state['owner'] = state.pop('_owner') - # if 'watchers' not in state: - # state['watchers'] = {} - # if 'per_instance_descriptor' not in state: - # state['per_instance_descriptor'] = False - # if '_label' not in state: - # state['_label'] = None - with disable_post_slot_set(self): - for (k,v) in state.items(): - setattr(self,k,v) - - def getter(self, func : typing.Callable) -> typing.Callable: - self.overloads['fget'] = func - return func - - def setter(self, func : typing.Callable) -> typing.Callable: - self.overloads['fset'] = func - return func - - def deleter(self, func : typing.Callable) -> typing.Callable: - self.overloads['fdel'] = func - return func - - @classmethod - def serialize(cls, value : typing.Any) -> typing.Any: - "Given the parameter value, return a Python value suitable for serialization" - return value - - @classmethod - def deserialize(cls, value : typing.Any) -> typing.Any: - "Given a serializable Python value, return a value that the parameter can be set to" - return value - - def schema(self, safe : bool = False, subset : typing.Optional[typing.List] = None, - mode : str = 'json') -> typing.Dict[str, typing.Any]: - if mode not in serializers: - raise KeyError(f"Mode {mode} not in available serialization formats {list(serializers.keys())}") - return serializers[mode].param_schema(self.__class__.__name__, self, safe=safe, subset=subset) - - - -class disable_post_slot_set: - def __init__(self, parameter : 'Parameter'): - self.parameter = parameter - - def __enter__(self): - self.parameter._disable_post_slot_set = True - - def __exit__(self, exc_type, exc_value, traceback): - self.parameter._disable_post_slot_set = False - - -class parameter(Enum): - VALUE = 'value' - DOC = 'doc' - CONSTANT = 'constant' - READONLY = 'readonly' - ALLOW_NONE = 'allow_None' - PER_INSTANCE_DESCRIPTORS = 'per_instance_descriptor' - DEEPCOPY_DEFAULT = 'deepcopy_default' - CLASS_MEMBER = 'class_member' - PRECEDENCE = 'precedence' - OWNER = 'owner' - NAME = 'name' - WATCHERS = 'watchers' - OVERLOADS = 'overload' - # small letters creates clashes with name and value attribute - - -@dataclass -class Event: - """ - Object representing an event that triggers a Watcher. - what : What is being watched on the Parameter (either value or a slot name) - name : Name of the Parameter that was set or triggered - obj : Parameterized instance owning the watched Parameter, or None - cls : Parameterized class owning the watched Parameter - old : Previous value of the item being watched - new : New value of the item being watched - type : 'triggered' if this event was triggered explicitly, 'changed' if - the item was set and watching for 'onlychanged', 'set' if the item was set, - or None if type not yet known - """ - what : typing.Union[str, Enum] - name : str - obj : typing.Optional[typing.Union["Parameterized", "ParameterizedMetaclass"]] - cls : typing.Union["Parameterized", "ParameterizedMetaclass"] - old : typing.Any - new : typing.Any - type: typing.Optional[str] - - -@contextmanager -def edit_constant(obj : typing.Union['Parameterized', 'Parameter']): - """ - Temporarily set parameters on Parameterized object to constant=False - to allow editing them. - """ - if isinstance(obj, Parameterized): - params = obj.parameters.descriptors.values() - constants = [p.constant for p in params] - for p in params: - p.constant = False - try: - yield - except: - raise - finally: - for (p, const) in zip(params, constants): - p.constant = const - elif isinstance(obj, Parameter): - constant = obj.constant - obj.constant = False - try: - yield - except: - raise - finally: - obj.constant = constant - else: - raise TypeError(f"argument to edit_constant must be a parameter or parameterized instance, given type : {type(obj)}") - - -@dataclass -class GeneralDependencyInfo: - """ - Dependency info attached to a method of a Parameterized subclass. - """ - dependencies : typing.Tuple[typing.Union[str, Parameter]] - queued : bool - on_init : bool - invoke : bool - - -@dataclass -class ParameterDependencyInfo: - """ - Object describing something being watched about a Parameter. - - inst: Parameterized instance owning the Parameter, or None - cls: Parameterized class owning the Parameter - name: Name of the Parameter being watched - pobj: Parameter object being watched - what: What is being watched on the Parameter (either 'value' or a slot name) - """ - inst : typing.Optional["Parameterized"] # optional while being unbound - cls : "ParameterizedMetaclass" - name : str - pobj : Parameter - what : typing.Union[str, Enum] - - -@dataclass -class DynamicDependencyInfo: - """ - Object describing dynamic dependencies. - spec: Dependency specification to resolve - """ - notation : str - - -@dataclass -class SortedDependencies: - static : typing.List[ParameterDependencyInfo] = field(default_factory = list) - dynamic : typing.List[DynamicDependencyInfo] = field(default_factory = list) - - def __iadd__(self, other : "SortedDependencies") -> "SortedDependencies": - assert isinstance(other, SortedDependencies), wrap_error_text( - f"Can only add other ResolvedDepedency types to iteself, given type {type(other)}") - self.static += other.static - self.dynamic += other.dynamic - return self - - - -def depends_on(*parameters, invoke : bool = True, on_init : bool = True, queued : bool = False) -> typing.Callable: - """ - Annotates a function or Parameterized method to express its - dependencies. The specified dependencies can be either be - Parameter instances or if a method is supplied they can be - defined as strings referring to Parameters of the class, - or Parameters of subobjects (Parameterized objects that are - values of this object's parameters). Dependencies can either be - on Parameter values, or on other metadata about the Parameter. - """ - def decorator(func): - if not isinstance(parameters, tuple): - deps = tuple(parameters) - else: - deps = parameters - for dep in deps: - if not isinstance(dep, (str, Parameter)): - raise ValueError(wrap_error_text( - f"""The depends_on decorator only accepts string types referencing a parameter or parameter - instances, found {type(dep).__name__} type instead.""")) - - _dinfo = GeneralDependencyInfo( - dependencies=deps, - queued=queued, - on_init=on_init, - invoke=invoke - ) - if hasattr(func, 'param_dependency_info') and not isinstance(func.param_dependency_info, GeneralDependencyInfo): - raise TypeError(f"attribute 'param_depency_info' reserved by param library, please use another name.") - func.param_dependency_info = _dinfo - return func - return decorator - - - -@dataclass -class Watcher: - """ - Object declaring a callback function to invoke when an Event is - triggered on a watched item. - - inst : Parameterized instance owning the watched Parameter, or - None - - cls: Parameterized class owning the watched Parameter - - fn : Callback function to invoke when triggered by a watched - Parameter - - mode: 'args' for param.watch (call fn with PInfo object - positional args), or 'kwargs' for param.watch_values (call fn - with : keywords) - - onlychanged: If True, only trigger for actual changes, not - setting to the current value - - parameter_names: List of Parameters to watch, by name - - what: What to watch on the Parameters (either 'value' or a slot - name) - - queued: Immediately invoke callbacks triggered during processing - of an Event (if False), or queue them up for processing - later, after this event has been handled (if True) - - precedence: A numeric value which determines the precedence of - the watcher. Lower precedence values are executed - with higher priority. - """ - - inst : "Parameterized" - cls : "ParameterizedMetaclass" - fn : typing.Callable - mode : str - onlychanged : bool - parameter_names : typing.Tuple[str] - what : str - queued : bool - precedence : typing.Union[float, int] = field(default=0) - - -@contextmanager -def _batch_call_watchers(parameterized : typing.Union['Parameterized', 'ParameterizedMetaclass'], - queued : bool = True, run : bool = True): - """ - Internal version of batch_call_watchers, adding control over queueing and running. - Only actually batches events if enable=True; otherwise a no-op. Only actually - calls the accumulated watchers on exit if run=True; otherwise they remain queued. - """ - state = parameterized.parameters.event_dispatcher.state - BATCH_WATCH = state.BATCH_WATCH - state.BATCH_WATCH = queued or state.BATCH_WATCH - try: - yield - finally: - state.BATCH_WATCH = BATCH_WATCH - if run and not BATCH_WATCH: - parameterized.parameters.event_dispatcher.batch_call_watchers() - - -@contextmanager -def batch_call_watchers(parameterized : 'Parameterized'): - """ - Context manager to batch events to provide to Watchers on a - parameterized object. This context manager queues any events - triggered by setting a parameter on the supplied parameterized - object, saving them up to dispatch them all at once when the - context manager exits. - """ - state = parameterized.parameters.event_dispatcher.state - old_BATCH_WATCH = state.BATCH_WATCH - state.BATCH_WATCH = True - try: - yield - finally: - state.BATCH_WATCH = old_BATCH_WATCH - if not old_BATCH_WATCH: - parameterized.parameters.event_dispatcher.batch_call_watchers() - - -@contextmanager -def discard_events(parameterized : 'Parameterized'): - """ - Context manager that discards any events within its scope - triggered on the supplied parameterized object. - """ - state = parameterized.parameters.event_dispatcher.state - old_watchers = state.watchers - old_events = state.events - state.watchers = [] - state.events = [] - try: - yield - except: - raise - finally: - state.watchers = old_watchers - state.events = old_events - - -def _skip_event(*events : Event, **kwargs) -> bool: - """ - Checks whether a subobject event should be skipped. - Returns True if all the values on the new subobject - match the values on the previous subobject. - """ - what = kwargs.get('what', 'value') - changed = kwargs.get('changed') - if changed is None: - return False - for e in events: - for p in changed: - if what == 'value': - old = NotImplemented if e.old is None else get_dot_resolved_attr(e.old, p, None) - new = NotImplemented if e.new is None else get_dot_resolved_attr(e.new, p, None) - else: - old = NotImplemented if e.old is None else get_dot_resolved_attr(e.old.parameters[p], what, None) - new = NotImplemented if e.new is None else get_dot_resolved_attr(e.new.parameters[p], what, None) - if not Comparator.is_equal(old, new): - return False - return True - - - -class Comparator(object): - """ - Comparator defines methods for determining whether two objects - should be considered equal. It works by registering custom - comparison functions, which may either be registed by type or with - a predicate function. If no matching comparison can be found for - the two objects the comparison will return False. - - If registered by type the Comparator will check whether both - objects are of that type and apply the comparison. If the equality - function is instead registered with a function it will call the - function with each object individually to check if the comparison - applies. This is useful for defining comparisons for objects - without explicitly importing them. - - To use the Comparator simply call the is_equal function. - """ - - equalities = { - numbers.Number: operator.eq, - str: operator.eq, - bytes: operator.eq, - type(None): operator.eq, - type(NotImplemented) : operator.eq - } - equalities.update({ dtt : operator.eq for dtt in dt_types }) # type: ignore - - @classmethod - def is_equal(cls, obj1 : typing.Any, obj2 : typing.Any) -> bool: - for eq_type, eq in cls.equalities.items(): - if ((isinstance(eq_type, FunctionType) and eq_type(obj1) and eq_type(obj2)) - or (isinstance(obj1, eq_type) and isinstance(obj2, eq_type))): - return eq(obj1, obj2) - if isinstance(obj2, (list, set, tuple)): - return cls.compare_iterator(obj1, obj2) - elif isinstance(obj2, dict): - return cls.compare_mapping(obj1, obj2) - return False - - @classmethod - def compare_iterator(cls, obj1 : typing.Any, obj2 : typing.Any) -> bool: - if type(obj1) != type(obj2) or len(obj1) != len(obj2): return False - for o1, o2 in zip(obj1, obj2): - if not cls.is_equal(o1, o2): - return False - return True - - @classmethod - def compare_mapping(cls, obj1 : typing.Any, obj2 : typing.Any) -> bool: - if type(obj1) != type(obj2) or len(obj1) != len(obj2): return False - for k in obj1: - if k in obj2: - if not cls.is_equal(obj1[k], obj2[k]): - return False - else: - return False - return True - - - -@dataclass -class UnresolvedWatcherInfo: - method_name : str - invoke : bool - on_init : bool - static_dependencies : typing.List[ParameterDependencyInfo] - dynamic_dependencies : typing.List[DynamicDependencyInfo] - queued : bool = field(default = False) - - -class EventResolver: - - def __init__(self, owner_cls : 'ParameterizedMetaclass') -> None: - self.owner_cls = owner_cls - self._unresolved_watcher_info : typing.List[UnresolvedWatcherInfo] - - def create_unresolved_watcher_info(self, owner_class_members : dict): - # retrieve depends info from methods and store more conveniently - dependers : typing.List[typing.Tuple[str, typing.Callable, GeneralDependencyInfo]] = [ - (name, method, method.param_dependency_info) for (name, method) in owner_class_members.items() - if hasattr(method, 'param_dependency_info')] - - # Resolve dependencies of current class - _watch : typing.List[UnresolvedWatcherInfo] = [] - for name, method, dinfo in dependers: - if not dinfo.invoke: - continue - # No need MInfo - sorted_dependencies = self.method_depends_on(method, dynamic=False) - _watch.append(UnresolvedWatcherInfo( - method_name=name, - invoke=dinfo.invoke, - on_init=dinfo.on_init, - queued=dinfo.queued, - static_dependencies=sorted_dependencies.static, - dynamic_dependencies=sorted_dependencies.dynamic - )) - - # Resolve dependencies in class hierarchy - _inherited : typing.List[UnresolvedWatcherInfo] = [] - for mcs_super in classlist(self.owner_cls)[:-1][::-1]: - if isinstance(mcs_super, ParameterizedMetaclass): - for dep in mcs_super.parameters.event_resolver._unresolved_watcher_info: # type: ignore - why doesnt it work? - assert isinstance(dep, UnresolvedWatcherInfo), wrap_error_text( # dummy assertion to check types - f"""Parameters._unresolved_watcher_info only accept UnresolvedWatcherInfo type, given type {type(dep)}""") - method = getattr(mcs_super, dep.method_name, None) - if method is not None and hasattr(method, 'param_dependency_info'): - assert isinstance(method.param_dependency_info, GeneralDependencyInfo), wrap_error_text( - f"""attribute 'param_depency_info' reserved by param library, - please use another name for your attributes of type {type(method.param_dependency_info)}.""" - ) # dummy assertion to check types - dinfo : GeneralDependencyInfo = method.param_dependency_info - if (not any(dep.method_name == w.method_name for w in _watch+_inherited) and dinfo.invoke): - _inherited.append(dep) - - self._unresolved_watcher_info = _inherited + _watch - - - def method_depends_on(self, method : typing.Callable, dynamic : bool = True, intermediate : bool = True) -> SortedDependencies: - """ - Resolves dependencies declared on a method of Parameterized class. - Dynamic dependencies, i.e. dependencies on sub-objects which may - or may not yet be available, are only resolved if dynamic=True. - By default intermediate dependencies, i.e. dependencies on the - path to a sub-object are returned. For example for a dependency - on 'a.b.c' dependencies on 'a' and 'b' are returned as long as - intermediate=True. - - Returns lists of concrete dependencies on available parameters - and dynamic dependencies specifications which have to resolved - if the referenced sub-objects are defined. - """ - dependencies = SortedDependencies() - dinfo : GeneralDependencyInfo = method.param_dependency_info - for d in dinfo.dependencies: - _sorted_dependencies = self.convert_notation_to_dependency_info(d, dynamic, intermediate) - dependencies.dynamic += _sorted_dependencies.dynamic - for dep in _sorted_dependencies.static: - if isinstance(dep, ParameterDependencyInfo): - dependencies.static.append(dep) - else: - dependencies += self.method_depends_on(dep, dynamic, intermediate) - return dependencies - - - def convert_notation_to_dependency_info(self, depended_obj_notation : typing.Union[Parameter, str], - owner_inst : typing.Optional["Parameterized"] = None, dynamic : bool = True, - intermediate : bool = True) -> SortedDependencies: - """ - Resolves a dependency specification into lists of explicit - parameter dependencies and dynamic dependencies. - - Dynamic dependencies are specifications to be resolved when - the sub-object whose parameters are being depended on is - defined. - - During class creation set dynamic=False which means sub-object - dependencies are not resolved. At instance creation and - whenever a sub-object is set on an object this method will be - invoked to determine whether the dependency is available. - - For sub-object dependencies we also return dependencies for - every part of the path, e.g. for a dependency specification - like "a.b.c" we return dependencies for sub-object "a" and the - sub-sub-object "b" in addition to the dependency on the actual - parameter "c" on object "b". This is to ensure that if a - sub-object is swapped out we are notified and can update the - dynamic dependency to the new object. Even if a sub-object - dependency can only partially resolved, e.g. if object "a" - does not yet have a sub-object "b" we must watch for changes - to "b" on sub-object "a" in case such a subobject is put in "b". - """ - if isinstance(depended_obj_notation, Parameter): - if not intermediate: - inst = depended_obj_notation.owner if isinstance(depended_obj_notation.owner, Parameterized) else None - cls = depended_obj_notation.owner - if not isinstance(cls, ParameterizedMetaclass): - raise TypeError(wrap_error_text("""Currently dependencies of a parameter from another class except a subclass - of parameterized is not supported""")) - info = ParameterDependencyInfo(inst=inst, cls=cls, name=depended_obj_notation.name, - pobj=depended_obj_notation, what=parameter.VALUE) - return SortedDependencies(static=[info]) - return SortedDependencies() - - obj, attr, what = self.parse_notation(depended_obj_notation) - if obj is None: - src = owner_inst or self.owner_cls - elif not dynamic: - return SortedDependencies(dynamic=[DynamicDependencyInfo(notation=depended_obj_notation)]) - else: - src = get_dot_resolved_attr(owner_inst or self.owner_cls, obj[1::], NotImplemented) - if src == NotImplemented: - path = obj[1:].split('.') - static_deps = [] - # Attempt to partially resolve subobject path to ensure - # that if a subobject is later updated making the full - # subobject path available we have to be notified and - # set up watchers - if len(path) >= 1 and intermediate: - sub_src = None - subpath = path - while sub_src is None and subpath: - subpath = subpath[:-1] - sub_src = get_dot_resolved_attr(owner_inst or self.owner_cls, '.'.join(subpath), None) - if subpath: - static_deps += self.convert_notation_to_dependency_info( - '.'.join(path[:len(subpath)+1]), owner_inst, dynamic, intermediate).static - return SortedDependencies( - static=static_deps, - dynamic=[] if intermediate else [DynamicDependencyInfo(notation=depended_obj_notation)] - ) - - cls = (src, None) if isinstance(src, type) else (type(src), src) - if attr == 'parameters': - assert isinstance(obj, str), wrap_error_text("""object preceding parameters access (i.e. .parameters) - in dependency resolution became None due to internal error.""") - sorted_dependencies = self.convert_notation_to_dependency_info(obj[1:], - dynamic, intermediate) - for p in src.parameters: - sorted_dependencies += src.parameters.event_resolver.convert_notation_to_dependency_info(p, - dynamic, intermediate) - return sorted_dependencies - elif attr in src.parameters: - info = ParameterDependencyInfo(inst=owner_inst, cls=src, name=attr, - pobj=src.parameters[attr], what=what) - if obj is None or not intermediate: - return SortedDependencies(static=[info]) - sorted_dependencies = self.convert_notation_to_dependency_info(obj[1:], dynamic, intermediate) - if not intermediate: - sorted_dependencies.static.append(info) - return sorted_dependencies - elif hasattr(src, attr): - attr_obj = getattr(src, attr) - if isinstance(attr_obj, Parameterized): - return SortedDependencies() - elif isinstance(attr_obj, FunctionType): - raise NotImplementedError(wrap_error_text( - f"""In this version of param, support for dependency on other callbacks is removed. - Please divide your methods with your own logic. - """)) - else: - raise AttributeError(wrap_error_text( - f"""Attribute {attr!r} could not be resolved on {src} or resolved attribute not supported - for dependent events""")) - else: - raise AttributeError(f"Attribute {attr!r} could not be resolved on {src}.") - - - @classmethod - def parse_notation(cls, notation : str) -> typing.Tuple[typing.Union[str, None], str, str]: - """ - Parses param.depends specifications into three components: - - 1. The dotted path to the sub-object - 2. The attribute being depended on, i.e. either a parameter or method - 3. The parameter attribute being depended on - """ - assert notation.count(":") <= 1, "argument '{notation}' for depends has more than one colon" - notation = notation.strip() - m = re.match(r"(?P[^:]*):?(?P.*)", notation) - assert m is not None, f"could not parse object notation for finding dependecies {notation}" - what = m.group('what') - path = "."+m.group('path') - m = re.match(r"(?P.*)(\.)(?P.*)", path) - assert m is not None, f"could not parse object notation for finding dependecies {notation}" - obj = m.group('obj') - attr = m.group("attr") - return obj or None, attr, what or 'value' - - - def bind_static_dependencies(self, obj : "Parameterized", - static_dependencies : typing.List[ParameterDependencyInfo] = []) -> typing.List["ParameterDependencyInfo"]: - """ - Resolves constant and dynamic parameter dependencies previously - obtained using the method_depends_on function. Existing resolved - dependencies are updated with a supplied parameter instance while - dynamic dependencies are resolved if possible. - """ - dependencies = [] - for dep in static_dependencies: - if not issubclass(type(obj), dep.cls): - dependencies.append(dep) - continue - dep.inst = obj if dep.inst is None else dep.inst - dep.pobj = dep.inst.parameters[dep.name] - dependencies.append(dep) - return dependencies - - - def attempt_conversion_from_dynamic_to_static_dep(self, obj : "Parameterized", - dynamic_dependencies : typing.List[DynamicDependencyInfo] = [], - intermediate : bool = True): - dependencies = [] - for dep in dynamic_dependencies: - subresolved = obj.parameters.event_resolver.convert_notation_to_dependency_info(dep.notation, - intermediate=intermediate).static - for subdep in subresolved: - if isinstance(subdep, ParameterDependencyInfo): - subdep.inst = obj if subdep.inst is None else subdep.inst - subdep.pobj = subdep.inst.parameters[subdep.name] - dependencies.append(subdep) - else: - dependencies += self.method_depends_on(subdep, intermediate=intermediate).static - return dependencies - - - def resolve_dynamic_dependencies(self, obj : 'Parameterized', dynamic_dep : DynamicDependencyInfo, - param_dep : ParameterDependencyInfo, attribute : str) -> typing.Tuple: - """ - If a subobject whose parameters are being depended on changes - we should only trigger events if the actual parameter values - of the new object differ from those on the old subobject, - therefore we accumulate parameters to compare on a subobject - change event. - - Additionally we need to make sure to notify the parent object - if a subobject changes so the dependencies can be - reinitialized so we return a callback which updates the - dependencies. - """ - subobj = obj - subobjs : typing.List = [obj] - for subpath in dynamic_dep.notation.split('.')[:-1]: - subobj = getattr(subobj, subpath.split(':')[0], None) - subobjs.append(subobj) - - dep_obj = param_dep.inst or param_dep.cls - if dep_obj not in subobjs[:-1]: - return None, None, param_dep.what - - depth = subobjs.index(dep_obj) - callback = None - if depth > 0: - def callback(*events): - """ - If a subobject changes, we need to notify the main - object to update the dependencies. - """ - obj.parameters.event_dispatcher.update_dynamic_dependencies(attribute) - - p = '.'.join(dynamic_dep.notation.split(':')[0].split('.')[depth+1:]) - if p == 'param': - subparams = [sp for sp in list(subobjs[-1].parameters)] - else: - subparams = [p] - - if ':' in dynamic_dep.notation: - what = dynamic_dep.notation.split(':')[-1] - else: - what = param_dep.what - - return subparams, callback, what - - - -class EventDispatcherState: - - def __init__(self): - self._BATCH_WATCH : typing.Dict[int, bool] = {} # If true, Event and watcher objects are queued. - self._TRIGGER : typing.Dict[int, bool] = {} - self._events : typing.Dict[int, typing.List[Event]] = {} # Queue of batched events - self._watchers : typing.Dict[int, typing.List[Watcher]] = {} # Queue of batched watchers - - @property - def BATCH_WATCH(self) -> bool: - return self._BATCH_WATCH[threading.get_ident()] - - @BATCH_WATCH.setter - def BATCH_WATCH(self, value : bool): - self._BATCH_WATCH[threading.get_ident()] = value - - @property - def TRIGGER(self): - return self._TRIGGER[threading.get_ident()] - - @TRIGGER.setter - def TRIGGER(self, value): - self._TRIGGER[threading.get_ident()] = value - - @property - def events(self) -> typing.List[Event]: - thread_id = threading.get_ident() - try: - return self._events[thread_id] - except KeyError: - self._events[thread_id] = [] - return self._events[thread_id] - - @events.setter - def events(self, value): - self._events[threading.get_ident()] = value - - @property - def watchers(self) -> typing.List[Watcher]: - return self._watchers[threading.get_ident()] - - @watchers.setter - def watchers(self, value): - self._watchers[threading.get_ident()] = value - - - -class EventDispatcher: - - # This entire class is supposed to be instantiated as a private variable, therefore we dont use underscores - # for variables within this class - - def __init__(self, owner_inst : typing.Union['Parameterized', 'ParameterizedMetaclass'], - event_resolver : EventResolver) -> None: - self.owner_inst = owner_inst - self.owner_class = event_resolver.owner_cls - self.event_resolver = event_resolver - self.all_watchers : typing.Dict[str, typing.Dict[str, typing.List[Watcher]]] = {} - self.dynamic_watchers : typing.Dict[str, typing.List[Watcher]] = defaultdict(list) - self.state : EventDispatcherState = EventDispatcherState() - - - def prepare_instance_dependencies(self): - init_methods = [] - for watcher_info in self.event_resolver._unresolved_watcher_info: - static = defaultdict(list) - for dep in self.event_resolver.bind_static_dependencies(self.owner_inst, watcher_info.static_dependencies): - static[(id(dep.inst), id(dep.cls), dep.what)].append((dep, None)) - for group in static.values(): - self.watch_group(self.owner_inst, watcher_info.method_name, watcher_info.invoke, group) - m = getattr(self.owner_inst, watcher_info.method_name) - if watcher_info.on_init and m not in init_methods: - init_methods.append(m) - - self.update_dynamic_dependencies() - for m in init_methods: - m() - - - def update_dynamic_dependencies(self, attribute : typing.Optional[str] = None) -> None: - for watcher_info in self.event_resolver._unresolved_watcher_info: - # On initialization set up constant watchers; otherwise - # clean up previous dynamic watchers for the updated attribute - dynamic = [d for d in watcher_info.dynamic_dependencies if attribute is None or - d.notation.split(".")[0] == attribute] - if len(dynamic) > 0: - for w in self.dynamic_watchers.pop(watcher_info.method_name, []): - (w.inst or w.cls).parameters.event_dispatcher.deregister_watcher(w) - # Resolve dynamic dependencies one-by-one to be able to trace their watchers - grouped = defaultdict(list) - for ddep in dynamic: - for dep in self.event_resolver.attempt_conversion_from_dynamic_to_static_dep(self.owner_inst, - dynamic_dependencies=[ddep]): - grouped[(id(dep.inst), id(dep.cls), dep.what)].append((dep, ddep)) - - for group in grouped.values(): - watcher = self.watch_group(self.owner_inst, watcher_info.method_name, watcher_info.invoke, - group, attribute) - self.dynamic_watchers[watcher_info.method_name].append(watcher) - - - def watch_group(self, obj : "Parameterized", name : str, queued : bool, group : typing.List[typing.Tuple], - attribute : typing.Optional[str] = None): - """ - Sets up a watcher for a group of dependencies. Ensures that - if the dependency was dynamically generated we check whether - a subobject change event actually causes a value change and - that we update the existing watchers, i.e. clean up watchers - on the old subobject and create watchers on the new subobject. - """ - some_param_dep, dynamic_dep = group[0] - dep_obj : typing.Union[ParameterizedMetaclass, Parameterized] = some_param_dep.inst or some_param_dep.cls - params = [] - for p in group: - if p.name not in params: - params.append(p.name) - - if dynamic_dep is None or len(dynamic_dep) == 0: - subparams, callback, what = None, None, some_param_dep.what - else: - subparams, callback, what = self.event_resolver.resolve_dynamic_dependencies( - obj, dynamic_dep, some_param_dep, attribute) - - executor = self.create_method_caller(obj, name, what, subparams, callback) - return dep_obj.parameters.event_dispatcher.watch( - executor, params, some_param_dep.what, queued=queued, precedence=-1) - - - def create_method_caller(self, bound_inst : typing.Union["ParameterizedMetaclass", "Parameterized"], - method_name : str, what : str = 'value', changed : typing.Optional[typing.List] = None, callback=None): - """ - Wraps a method call adding support for scheduling a callback - before it is executed and skipping events if a subobject has - changed but its values have not. - """ - function = getattr(bound_inst, method_name) - if iscoroutinefunction(function): - async def caller(*events): # type: ignore - if callback: callback(*events) - if not _skip_event(*events, what=what, changed=changed): - await function() - else: - def caller(*events): - if callback: callback(*events) - if not _skip_event(*events, what=what, changed=changed): - return function() - caller._watcher_name = method_name - return caller - - - def watch(self, fn : typing.Callable, parameter_names : typing.Union[typing.List[str], str], what : str = 'value', - onlychanged : bool = True, queued : bool = False, precedence : float = -1): - parameter_names = tuple(parameter_names) if isinstance(parameter_names, list) else (parameter_names,) # type: ignore - watcher = Watcher(inst=self.owner_inst, cls=self.owner_class, fn=fn, mode='args', - onlychanged=onlychanged, parameter_names=parameter_names, # type: ignore - what=what, queued=queued, precedence=precedence) - self.register_watcher(watcher, what) - return watcher - - - def register_watcher(self, watcher : Watcher, what = 'value'): - parameter_names = watcher.parameter_names - for parameter_name in parameter_names: - # Execution should never reach here if parameter is not found. - # this should be solved in resolution itself - TODO - make sure - # if parameter_name not in self_.cls.param: - # raise ValueError("{} parameter was not found in list of " - # "parameters of class {}".format(parameter_name, self_.cls.__name__)) - if self.owner_inst is not None and what == "value": - if parameter_name not in self.all_watchers: - self.all_watchers[parameter_name] = {} - if what not in self.all_watchers[parameter_name]: - self.all_watchers[parameter_name][what] = [] - self.all_watchers[parameter_name][what].append(watcher) - else: - watchers = self.owner_inst.parameters[parameter_name].watchers - if what not in watchers: - watchers[what] = [] - watchers[what].append(watcher) - - - def deregister_watcher(self, watcher : Watcher, what = 'value'): - parameter_names = watcher.parameter_names - for parameter_name in parameter_names: - if self.owner_inst is not None and what == "value": - if parameter_name not in self.all_watchers or what not in self.all_watchers[parameter_name]: - return - self.all_watchers[parameter_name][what].remove(watcher) - else: - watchers = self.owner_inst.parameters[parameter_name].watchers - if what not in watchers: - return - watchers[what].remove(watcher) - - - def call_watcher(self, watcher : Watcher, event : Event) -> None: - """ - Invoke the given watcher appropriately given an Event object. - """ - if watcher.onlychanged and not Comparator.is_equal(event.old, event.new): - return - - if self.state.BATCH_WATCH: - self.state.events.append(event) - if not any(watcher is w for w in self.state.watchers): - self.state.watchers.append(watcher) - else: - with _batch_call_watchers(self.owner_inst or self.owner_class, - queued=watcher.queued, run=False): - self.execute_watcher(watcher, (event,)) - - - def batch_call_watchers(self): - """ - Batch call a set of watchers based on the parameter value - settings in kwargs using the queued Event and watcher objects. - """ - watchers = self.state.watchers - events = self.state.events - while len(events) > 0: - event = events.pop(0) - for watcher in sorted(watchers, key=lambda w: w.precedence): - with _batch_call_watchers(self.owner_inst or self.owner_class, - queued=watcher.queued, run=False): - self.execute_watcher(watcher, (event,)) - events.clear() - watchers.clear() - - - def execute_watcher(self, watcher : Watcher, events : typing.Tuple[Event]): - if watcher.mode == 'args': - args, kwargs = events, {} - else: - args, kwargs = (), {event.name: event.new for event in events} - - if iscoroutinefunction(watcher.fn): - if async_executor is None: - raise RuntimeError(wrap_error_text(f"""Could not execute {watcher.fn} coroutine function. Please - register a asynchronous executor on param.parameterized.async_executor, which - schedules the function on an event loop.""")) - async_executor(partial(watcher.fn, *args, **kwargs)) - else: - watcher.fn(*args, **kwargs) - - - def trigger(self, *parameters : str) -> None: - """ - Trigger watchers for the given set of parameter names. Watchers - will be triggered whether or not the parameter values have - actually changed. As a special case, the value will actually be - changed for a Parameter of type Event, setting it to True so - that it is clear which Event parameter has been triggered. - """ - trigger_params = [p for p in self_.self_or_cls.param - if hasattr(self_.self_or_cls.param[p], '_autotrigger_value')] - triggers = {p:self_.self_or_cls.param[p]._autotrigger_value - for p in trigger_params if p in param_names} - - events = self_.self_or_cls.param._events - watchers = self_.self_or_cls.param._watchers - self_.self_or_cls.param._events = [] - self_.self_or_cls.param._watchers = [] - param_values = self_.values() - params = {name: param_values[name] for name in param_names} - self.self_or_cls.param._TRIGGER = True - self.update(dict(params, **triggers)) - self.self_or_cls.param._TRIGGER = False - self.self_or_cls.param._events += events - self.self_or_cls.param._watchers += watchers - - - -class ClassParameters(object): - """ - Object that holds the namespace and implementation of Parameterized - methods as well as any state that is not in __slots__ or the - Parameters themselves. - - Exists at metaclass level (instantiated by the metaclass) - and at the instance level. Contains state specific to the - class. - """ - - def __init__(self, owner_cls : 'ParameterizedMetaclass', owner_class_members : typing.Optional[dict] = None) -> None: - """ - cls is the Parameterized class which is always set. - self is the instance if set. - """ - self.owner_cls = owner_cls - self.owner_inst = None - if owner_class_members is not None: - self.event_resolver = EventResolver(owner_cls=owner_cls) - self.event_dispatcher = EventDispatcher(owner_cls, self.event_resolver) - self.event_resolver.create_unresolved_watcher_info(owner_class_members) - - def __getitem__(self, key : str) -> 'Parameter': - """ - Returns the class or instance parameter like a dictionary dict[key] syntax lookup - """ - # code change comment - - # metaclass instance has a param attribute remember, no need to repeat logic of self_.self_or_cls - # as we create only one instance of Parameters object - return self.descriptors[key] # if self.owner_inst is None else self.owner_inst.param.objects(False) - - def __dir__(self) -> typing.List[str]: - """ - Adds parameters to dir - """ - return super().__dir__() + self.descriptors().keys() # type: ignore - - def __iter__(self): - """ - Iterates over the parameters on this object. - """ - yield from self.descriptors - - def __contains__(self, param : 'Parameter') -> bool: - return param in list(self) - - @property - def owner(self): - return self.owner_inst if self.owner_inst is not None else self.owner_cls - - @property - def descriptors(self) -> typing.Dict[str, 'Parameter']: - try: - paramdict = getattr(self.owner_cls, '__%s_params__' % self.owner_cls.__name__) - except AttributeError: - paramdict = {} - for class_ in classlist(self.owner_cls): - if class_ == object or class_ == type: - continue - for name, val in class_.__dict__.items(): - if isinstance(val, Parameter): - paramdict[name] = val - # We only want the cache to be visible to the cls on which - # params() is called, so we mangle the name ourselves at - # runtime (if we were to mangle it now, it would be - # _Parameterized.__params for all classes). - # print(self.owner_cls, '__%s_params__' % self.owner_cls.__name__, paramdict) - setattr(self.owner_cls, '__%s_params__' % self.owner_cls.__name__, paramdict) - return paramdict - - @property - def names(self) -> typing.Iterable[str]: - return self.descriptors.keys() - - @property - def defaults(self): - """Print the default values of all cls's Parameters.""" - defaults = {} - for key, val in self.descriptors.items(): - defaults[key] = val.default - return defaults - - @property - def values(self, onlychanged : bool = False): - """ - Return a dictionary of name,value pairs for the Parameters of this - object. - - When called on an instance with onlychanged set to True, will - only return values that are not equal to the default value - (onlychanged has no effect when called on a class). - """ - self_or_cls = self_.self_or_cls - vals = [] - for name, val in self_or_cls.param.objects('existing').items(): - value = self_or_cls.param.get_value_generator(name) - if not onlychanged or not all_equal(value, val.default): - vals.append((name, value)) - - vals.sort(key=itemgetter(0)) - return dict(vals) - - def serialize(self, subset : typing.Optional[typing.List[str]] = None, - mode : typing.Optional[str] = 'json') -> typing.Dict[str, str]: - if mode not in serializers: - raise ValueError(f'Mode {mode} not in available serialization formats {serializers.keys()}') - serializer = serializers[mode] - return serializer.serialize_parameters(self.owner, subset=subset) - - def serialize_value(self, parameter_name : str, mode : typing.Optional[str] = 'json') -> str: - if mode not in serializers: - raise ValueError(f'Mode {mode} not in available serialization formats {serializers.keys()}') - serializer = serializers[mode] - return serializer.serialize_parameter_value(self.owner, parameter_name) - - def deserialize(self, serialization : str, subset : typing.Optional[typing.List[str]] = None, - mode : typing.Optional[str] = 'json') -> typing.Dict[str, typing.Any]: - if mode not in serializers: - raise ValueError(f'Mode {mode} not in available serialization formats {serializers.keys()}') - serializer = serializers[mode] - return serializer.deserialize_parameters(self.owner, serialization, subset=subset) - - def deserialize_value(self, parameter_name : str, value : str, mode : str = 'json'): - if mode not in serializers: - raise ValueError(f'Mode {mode} not in available serialization formats {serializers.keys()}') - serializer = serializers[mode] - return serializer.deserialize_parameter_value(self.owner, parameter_name, value) - - def schema(self, safe : bool = False, subset : typing.Optional[typing.List[str]] = None, - mode : typing.Optional[str] = 'json') -> typing.Dict[str, typing.Any]: - """ - Returns a schema for the parameters on this Parameterized object. - """ - if mode not in serializers: - raise ValueError(f'Mode {mode} not in available serialization formats {serializers.keys()}') - serializer = serializers[mode] - return serializer.schema(self.owner, safe=safe, subset=subset) - - - -class InstanceParameters(ClassParameters): - - def __init__(self, owner_cls : 'ParameterizedMetaclass', owner_inst : 'Parameterized') -> None: - super().__init__(owner_cls=owner_cls, owner_class_members=None) - self.owner_inst = owner_inst - self._instance_params = {} - self.event_resolver = self.owner_cls.parameters.event_resolver - self.event_dispatcher = EventDispatcher(owner_inst, self.event_resolver) - self.event_dispatcher.prepare_instance_dependencies() - - - def _setup_parameters(self, **parameters): - """ - Initialize default and keyword parameter values. - - First, ensures that all Parameters with 'deepcopy_default=True' - (typically used for mutable Parameters) are copied directly - into each object, to ensure that there is an independent copy - (to avoid surprising aliasing errors). Then sets each of the - keyword arguments, warning when any of them are not defined as - parameters. - - Constant Parameters can be set during calls to this method. - """ - ## Deepcopy all 'deepcopy_default=True' parameters - # (building a set of names first to avoid redundantly - # instantiating a later-overridden parent class's parameter) - param_default_values_to_deepcopy = {} - param_descriptors_to_deepcopy = {} - for (k, v) in self.owner_cls.parameters.descriptors.items(): - if v.deepcopy_default and k != "name": - # (avoid replacing name with the default of None) - param_default_values_to_deepcopy[k] = v - if v.per_instance_descriptor and k != "name": - param_descriptors_to_deepcopy[k] = v - - for p in param_default_values_to_deepcopy.values(): - self._deep_copy_param_default(p) - for p in param_descriptors_to_deepcopy.values(): - self._deep_copy_param_descriptor(p) - - ## keyword arg setting - if len(parameters) > 0: - descs = self.descriptors - for name, val in parameters.items(): - desc = descs.get(name, None) # pylint: disable-msg=E1101 - if desc: - setattr(self.owner_inst, name, val) - # Its erroneous to set a non-descriptor (& non-param-descriptor) with a value from init. - # we dont know what that value even means, so we silently ignore - - - def _deep_copy_param_default(self, param_obj : 'Parameter') -> None: - # deepcopy param_obj.default into self.__dict__ (or dict_ if supplied) - # under the parameter's _internal_name (or key if supplied) - _old = self.owner_inst.__dict__.get(param_obj._internal_name, NotImplemented) - _old = _old if _old is not NotImplemented else param_obj.default - new_object = copy.deepcopy(_old) - # remember : simply setting in the dict does not activate post setter and remaining logic which is sometimes important - self.owner_inst.__dict__[param_obj._internal_name] = new_object - - - def _deep_copy_param_descriptor(self, param_obj : Parameter): - param_obj_copy = copy.deepcopy(param_obj) - self._instance_params[param_obj.name] = param_obj_copy - - - def add_parameter(self, param_name: str, param_obj: Parameter) -> None: - setattr(self.owner_inst, param_name, param_obj) - if param_obj.deepcopy_default: - self._deep_copy_param_default(param_obj) - try: - delattr(self.owner_cls, '__%s_params__'%self.owner_cls.__name__) - except AttributeError: - pass - - - @property - def descriptors(self) -> typing.Dict[str, 'Parameter']: - """ - Returns the Parameters of this instance or class - - If instance=True and called on a Parameterized instance it - will create instance parameters for all Parameters defined on - the class. To force class parameters to be returned use - instance=False. Since classes avoid creating instance - parameters unless necessary you may also request only existing - instance parameters to be returned by setting - instance='existing'. - """ - # We cache the parameters because this method is called often, - # and parameters are rarely added (and cannot be deleted) - return dict(super().descriptors, **self._instance_params) - - - -class ParameterizedMetaclass(type): - """ - The metaclass of Parameterized (and all its descendents). - - The metaclass overrides type.__setattr__ to allow us to set - Parameter values on classes without overwriting the attribute - descriptor. That is, for a Parameterized class of type X with a - Parameter y, the user can type X.y=3, which sets the default value - of Parameter y to be 3, rather than overwriting y with the - constant value 3 (and thereby losing all other info about that - Parameter, such as the doc string, bounds, etc.). - - The __init__ method is used when defining a Parameterized class, - usually when the module where that class is located is imported - for the first time. That is, the __init__ in this metaclass - initializes the *class* object, while the __init__ method defined - in each Parameterized class is called for each new instance of - that class. - - Additionally, a class can declare itself abstract by having an - attribute __abstract set to True. The 'abstract' attribute can be - used to find out if a class is abstract or not. - """ - def __init__(mcs, name : str, bases : typing.Tuple, dict_ : dict) -> None: - """ - Initialize the class object (not an instance of the class, but - the class itself). - """ - type.__init__(mcs, name, bases, dict_) - mcs._create_param_container(dict_) - mcs._update_docstring_signature(dict_.get('parameterized_docstring_signature', False)) - - def _create_param_container(mcs, mcs_members : dict): - mcs._param_container = ClassParameters(mcs, mcs_members) # value return when accessing cls/self.param - - @property - def parameters(mcs) -> ClassParameters: - return mcs._param_container - - def _update_docstring_signature(mcs, do : bool = True) -> None: - """ - Autogenerate a keyword signature in the class docstring for - all available parameters. This is particularly useful in the - IPython Notebook as IPython will parse this signature to allow - tab-completion of keywords. - - max_repr_len: Maximum length (in characters) of value reprs. - """ - if do: - processed_kws, keyword_groups = set(), [] - for cls in reversed(mcs.mro()): - keyword_group = [] - for (k, v) in sorted(cls.__dict__.items()): - if isinstance(v, Parameter) and k not in processed_kws: - param_type = v.__class__.__name__ - keyword_group.append("%s=%s" % (k, param_type)) - processed_kws.add(k) - keyword_groups.append(keyword_group) - - keywords = [el for grp in reversed(keyword_groups) for el in grp] - class_docstr = "\n" + mcs.__doc__ if mcs.__doc__ else '' - signature = "params(%s)" % (", ".join(keywords)) - description = param_pager(mcs) if param_pager else '' - mcs.__doc__ = signature + class_docstr + '\n' + description # type: ignore - - def __setattr__(mcs, attribute_name : str, value : typing.Any) -> None: - """ - Implements 'self.attribute_name=value' in a way that also supports Parameters. - - If there is already a descriptor named attribute_name, and - that descriptor is a Parameter, and the new value is *not* a - Parameter, then call that Parameter's __set__ method with the - specified value. - - In all other cases set the attribute normally (i.e. overwrite - the descriptor). If the new value is a Parameter, once it has - been set we make sure that the value is inherited from - Parameterized superclasses as described in __param_inheritance(). - """ - # Find out if there's a Parameter called attribute_name as a - # class attribute of this class - if not, parameter is None. - if attribute_name != '_param_container' and attribute_name != '__%s_params__' % mcs.__name__: - parameter = mcs.parameters.descriptors.get(attribute_name, None) - # checking isinstance(value, Parameter) will not work for ClassSelector - # and besides value is anyway validated. On the downside, this does not allow - # altering of parameter instances if class already of the parameter with attribute_name - if parameter: # and not isinstance(value, Parameter): - # if owning_class != mcs: - # parameter = copy.copy(parameter) - # parameter.owner = mcs - # type.__setattr__(mcs, attribute_name, parameter) - mcs.__dict__[attribute_name].__set__(mcs, value) - return - # set with None should not supported as with mcs it supports - # class attributes which can be validated - type.__setattr__(mcs, attribute_name, value) - - - -class Parameterized(metaclass=ParameterizedMetaclass): - """ - Base class for named objects that support Parameters and message - formatting. - - Automatic object naming: Every Parameterized instance has a name - parameter. If the user doesn't designate a name= argument - when constructing the object, the object will be given a name - consisting of its class name followed by a unique 5-digit number. - - Automatic parameter setting: The Parameterized __init__ method - will automatically read the list of keyword parameters. If any - keyword matches the name of a Parameter (see Parameter class) - defined in the object's class or any of its superclasses, that - parameter in the instance will get the value given as a keyword - argument. For example: - - class Foo(Parameterized): - xx = Parameter(default=1) - - foo = Foo(xx=20) - - in this case foo.xx gets the value 20. - - When initializing a Parameterized instance ('foo' in the example - above), the values of parameters can be supplied as keyword - arguments to the constructor (using parametername=parametervalue); - these values will override the class default values for this one - instance. - - If no 'name' parameter is supplied, self.name defaults to the - object's class name with a unique number appended to it. - - Message formatting: Each Parameterized instance has several - methods for optionally printing output. This functionality is - based on the standard Python 'logging' module; using the methods - provided here, wraps calls to the 'logging' module's root logger - and prepends each message with information about the instance - from which the call was made. For more information on how to set - the global logging level and change the default message prefix, - see documentation for the 'logging' module. - """ - def __init__(self, **params): - self.create_param_containers(**params) - - def create_param_containers(self, **params): - self._param_container = InstanceParameters(self.__class__, self) - self._param_container._setup_parameters(**params) - - @property - def parameters(self) -> InstanceParameters: - return self._param_container - - # 'Special' methods - def __getstate__(self): - """ - Save the object's state: return a dictionary that is a shallow - copy of the object's __dict__ and that also includes the - object's __slots__ (if it has any). - """ - state = self.__dict__.copy() - for slot in get_occupied_slots(self): - state[slot] = getattr(self, slot) - # Note that Parameterized object pickling assumes that - # attributes to be saved are only in __dict__ or __slots__ - # (the standard Python places to store attributes, so that's a - # reasonable assumption). (Additionally, class attributes that - # are Parameters are also handled, even when they haven't been - # instantiated - see PickleableClassAttributes.) - return state - - def __setstate__(self, state): - """ - Restore objects from the state dictionary to this object. - - During this process the object is considered uninitialized. - """ - # When making a copy the internal watchers have to be - # recreated and point to the new instance - if '_param_watchers' in state: - param_watchers = state['_param_watchers'] - for p, attrs in param_watchers.items(): - for attr, watchers in attrs.items(): - new_watchers = [] - for watcher in watchers: - watcher_args = list(watcher) - if watcher.inst is not None: - watcher_args[0] = self - fn = watcher.fn - if hasattr(fn, '_watcher_name'): - watcher_args[2] = _m_caller(self, fn._watcher_name) - elif get_method_owner(fn) is watcher.inst: - watcher_args[2] = getattr(self, fn.__name__) - new_watchers.append(Watcher(*watcher_args)) - param_watchers[p][attr] = new_watchers - - if '_instance__params' not in state: - state['_instance__params'] = {} - if '_param_watchers' not in state: - state['_param_watchers'] = {} - state.pop('param', None) - - for name,value in state.items(): - setattr(self,name,value) - self.initialized=True - - - - -# As of Python 2.6+, a fn's **args no longer has to be a -# dictionary. This might allow us to use a decorator to simplify using -# ParamOverrides (if that does indeed make them simpler to use). -# http://docs.python.org/whatsnew/2.6.html -class ParamOverrides(dict): - """ - A dictionary that returns the attribute of a specified object if - that attribute is not present in itself. - - Used to override the parameters of an object. - """ - - # NOTE: Attribute names of this object block parameters of the - # same name, so all attributes of this object should have names - # starting with an underscore (_). - - def __init__(self, overridden : Parameterized, dict_ : typing.Dict[str, typing.Any], - allow_extra_keywords : bool = False) -> None: - """ - If allow_extra_keywords is False, then all keys in the - supplied dict_ must match parameter names on the overridden - object (otherwise a warning will be printed). - - If allow_extra_keywords is True, then any items in the - supplied dict_ that are not also parameters of the overridden - object will be available via the extra_keywords() method. - """ - # This method should be fast because it's going to be - # called a lot. This _might_ be faster (not tested): - # def __init__(self,overridden,**kw): - # ... - # dict.__init__(self,**kw) - self._overridden = overridden - dict.__init__(self, dict_) - if allow_extra_keywords: - self._extra_keywords = self._extract_extra_keywords(dict_) - else: - self._check_params(dict_) - - def extra_keywords(self): - """ - Return a dictionary containing items from the originally - supplied dict_ whose names are not parameters of the - overridden object. - """ - return self._extra_keywords - - def param_keywords(self): - """ - Return a dictionary containing items from the originally - supplied dict_ whose names are parameters of the - overridden object (i.e. not extra keywords/parameters). - """ - return dict((key, self[key]) for key in self if key not in self.extra_keywords()) - - def __missing__(self,name): - # Return 'name' from the overridden object - return getattr(self._overridden, name) - - def __repr__(self): - # As dict.__repr__, but indicate the overridden object - return dict.__repr__(self) + " overriding params from %s"%repr(self._overridden) - - def __getattr__(self,name): - # Provide 'dot' access to entries in the dictionary. - # (This __getattr__ method is called only if 'name' isn't an - # attribute of self.) - return self.__getitem__(name) - - def __setattr__(self,name,val): - # Attributes whose name starts with _ are set on self (as - # normal), but all other attributes are inserted into the - # dictionary. - if not name.startswith('_'): - self.__setitem__(name, val) - else: - dict.__setattr__(self, name, val) - - def get(self, key, default = None): - try: - return self[key] - except KeyError: - return default - - def __contains__(self, key): - return key in self.__dict__ or key in self._overridden.parameters - - def _check_params(self,params): - """ - Print a warning if params contains something that is not a - Parameter of the overridden object. - """ - overridden_object_params = list(self._overridden.parameters) - for item in params: - if item not in overridden_object_params: - self.param.warning("'%s' will be ignored (not a Parameter).",item) - - def _extract_extra_keywords(self,params): - """ - Return any items in params that are not also - parameters of the overridden object. - """ - extra_keywords = {} - overridden_object_params = list(self._overridden.parameters) - for name, val in params.items(): - if name not in overridden_object_params: - extra_keywords[name]=val - # Could remove name from params (i.e. del params[name]) - # so that it's only available via extra_keywords() - return extra_keywords - - -# Helper function required by ParameterizedFunction.__reduce__ -def _new_parameterized(cls): - return Parameterized.__new__(cls) - - -class ParameterizedFunction(Parameterized): - """ - Acts like a Python function, but with arguments that are Parameters. - - Implemented as a subclass of Parameterized that, when instantiated, - automatically invokes __call__ and returns the result, instead of - returning an instance of the class. - - To obtain an instance of this class, call instance(). - """ - def __str__(self): - return self.__class__.__name__ + "()" - - def __call__(self, *args, **kw): - raise NotImplementedError("Subclasses must implement __call__.") - - def __reduce__(self): - # Control reconstruction (during unpickling and copying): - # ensure that ParameterizedFunction.__new__ is skipped - state = ParameterizedFunction.__getstate__(self) - # Here it's necessary to use a function defined at the - # module level rather than Parameterized.__new__ directly - # because otherwise pickle will find .__new__'s module to be - # __main__. Pretty obscure aspect of pickle.py... - return (_new_parameterized, (self.__class__,), state) - - def __new__(cls, *args, **params): - # Create and __call__() an instance of this class. - inst = super().__new__(cls) - return inst.__call__(*args, **params) - - - -def descendents(class_ : type) -> typing.List[type]: - """ - Return a list of the class hierarchy below (and including) the given class. - - The list is ordered from least- to most-specific. Can be useful for - printing the contents of an entire class hierarchy. - """ - assert isinstance(class_,type) - q = [class_] - out = [] - while len(q): - x = q.pop(0) - out.insert(0,x) - for b in x.__subclasses__(): - if b not in q and b not in out: - q.append(b) - return out[::-1] - - -def param_union(*parameterizeds : Parameterized, warn_duplicate : bool = False): - """ - Given a set of Parameterized objects, returns a dictionary - with the union of all param name, value pairs across them. - If warn is True (default), prints a warning if the same parameter has - been given multiple values; otherwise uses the last value - """ - d = dict() - for o in parameterizeds: - for k in o.parameters: - if k != 'name': - if k in d and warn_duplicate: - print(f"overwriting parameter {k}") - d[k] = getattr(o, k) - return d - - -def parameterized_class(name, params, bases = Parameterized): - """ - Dynamically create a parameterized class with the given name and the - supplied parameters, inheriting from the specified base(s). - """ - if not (isinstance(bases, list) or isinstance(bases, tuple)): - bases=[bases] - return type(name, tuple(bases), params) - diff --git a/hololinked/param/parameters.py b/hololinked/param/parameters.py deleted file mode 100644 index aa5005a..0000000 --- a/hololinked/param/parameters.py +++ /dev/null @@ -1,2101 +0,0 @@ -import glob -import re -import os.path -import datetime as dt -import typing -import numbers -import sys -import collections.abc -from collections import OrderedDict - -from .utils import * -from .exceptions import * -from .parameterized import ParamOverrides, Parameterized, ParameterizedFunction, descendents, dt_types, Parameter - - - -class Infinity(object): - """ - An instance of this class represents an infinite value. Unlike - Python's float('inf') value, this object can be safely compared - with gmpy numeric types across different gmpy versions. - - All operators on Infinity() return Infinity(), apart from the - comparison and equality operators. Equality works by checking - whether the two objects are both instances of this class. - """ - def __eq__ (self, other): return isinstance(other,self.__class__) - def __ne__ (self, other): return not self == other - def __lt__ (self, other): return False - def __le__ (self, other): return False - def __gt__ (self, other): return True - def __ge__ (self, other): return True - def __add__ (self, other): return self - def __radd__(self, other): return self - def __ladd__(self, other): return self - def __sub__ (self, other): return self - def __iadd_ (self, other): return self - def __isub__(self, other): return self - def __repr__(self): return "Infinity()" - def __str__ (self): return repr(self) - - - -class String(Parameter): - """ - A string parameter with a default value and optional regular expression (regex) matching. - - Example of using a regex to implement IPv4 address matching:: - - class IPAddress(String): - '''IPv4 address as a string (dotted decimal notation)''' - def __init__(self, default="0.0.0.0", allow_None=False, **kwargs): - ip_regex = r'^((25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)\.){3}(25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)$' - super(IPAddress, self).__init__(default=default, regex=ip_regex, **kwargs) - """ - - __slots__ = ['regex'] - - def __init__(self, default : typing.Optional[str] = "", *, regex : typing.Optional[str] = None, - doc : typing.Optional[str] = None, constant : bool = False, readonly : bool = False, allow_None : bool = False, - per_instance_descriptor : bool = False, deepcopy_default : bool = False, class_member : bool = False, - fget : typing.Optional[typing.Callable] = None, fset : typing.Optional[typing.Callable] = None, - fdel : typing.Optional[typing.Callable] = None, precedence : typing.Optional[float] = None) -> None: - super().__init__(default=default, doc=doc, constant=constant, readonly=readonly, - allow_None=allow_None, per_instance_descriptor=per_instance_descriptor, - deepcopy_default=deepcopy_default, class_member=class_member, fget=fget, fset=fset, fdel=fdel, - precedence=precedence) - self.regex = regex - - def validate_and_adapt(self, value : typing.Any) -> str: - self._assert(value, self.regex, self.allow_None) - return value - - @classmethod - def _assert(obj, value : typing.Any, regex : typing.Optional[str] = None, allow_None : bool = False) -> None: - """ - the method that implements the validator - """ - if value is None: - if allow_None: - return - else: - raise_ValueError(f"None not allowed for string type", obj) - if not isinstance(value, str): - raise_TypeError("given value is not string type, but {}.".format(type(value)), obj) - if regex is not None: - match = re.match(regex, value) - if match is None or match.group(0) != value: - # match should be original string, not some substring - raise_ValueError("given string value {} does not match regex {}.".format(value, regex), obj) - - @classmethod - def isinstance(cls, value : typing.Any, regex : typing.Optional[str] = None, allow_None : bool = False) -> bool: - """ - verify if given value is a string confirming to regex. - - Args: - value (Any): input value - regex (str, None): regex required to match, leave None if unnecessary - allow_None (bool): set True if None is tolerated - - Returns: - bool: True if conformant, else False. Any exceptions due to wrong inputs resulting in TypeError and ValueError - also lead to False - """ - try: - cls._assert(value, regex, allow_None) - return True - except (TypeError, ValueError): - return False - - - -class Bytes(String): - """ - A bytes parameter with a default value and optional regular - expression (regex) matching. - - Similar to the string parameter, but instead of type basestring - this parameter only allows objects of type bytes (e.g. b'bytes'). - """ - - @classmethod - def _assert(obj, value : typing.Any, regex : typing.Optional[bytes] = None, allow_None : bool = False) -> None: - """ - verify if given value is a bytes confirming to regex. - - Args: - value (Any): input value - regex (str, None): regex required to match, leave None if unnecessary - allow_None (bool): set True if None is tolerated - - Raises: - TypeError: if given type is not bytes - ValueError: if regex does not match - """ - if value is None: - if allow_None: - return - else: - raise_ValueError(f"None not allowed for string type", obj) - if not isinstance(value, bytes): - raise_TypeError("given value is not bytes type, but {}.".format(type(value)), obj) - if regex is not None: - match = re.match(regex, value) - if match is None or match.group(0) != value: - # match should be original string, not some substring - raise_ValueError("given bytes value {} does not match regex {}.".format(value, regex), obj) - - - -class IPAddress(Parameter): - - __slots__ = ['allow_localhost', 'allow_ipv4', 'allow_ipv6'] - - def __init__(self, default : typing.Optional[str] = "0.0.0.0", *, allow_ipv4 : bool = True, allow_ipv6 : bool = True, - allow_localhost : bool = True, - doc : typing.Optional[str] = None, constant : bool = False, readonly : bool = False, - allow_None : bool = False, per_instance_descriptor : bool = False, deepcopy_default : bool = False, - class_member : bool = False, fget : typing.Optional[typing.Callable] = None, fset : typing.Optional[typing.Callable] = None, - fdel : typing.Optional[typing.Callable] = None, precedence : typing.Optional[float] = None) -> None: - super().__init__(default=default, doc=doc, constant=constant, readonly=readonly, - allow_None=allow_None, per_instance_descriptor=per_instance_descriptor, - deepcopy_default=deepcopy_default, class_member=class_member, fget=fget, fset=fset, fdel=fdel, - precedence=precedence) - self.allow_localhost = allow_localhost - self.allow_ipv4 = allow_ipv4 - self.allow_ipv6 = allow_ipv6 - - def validate_and_adapt(self, value: typing.Any) -> str: - self._assert(value, self.allow_ipv4, self.allow_ipv6, self.allow_localhost, self.allow_None) - return value - - @classmethod - def _assert(obj, value : typing.Any, allow_ipv4 : bool = True, allow_ipv6 : bool = True, - allow_localhost : bool = True, allow_None : bool = False) -> None: - if value is None and allow_None: - return - if not isinstance(value, str): - raise_TypeError('given value for IP address not a string, but type {}'.format(type(value)), obj) - if allow_localhost and value == 'localhost': - return - if not ((allow_ipv4 and (obj.isipv4(value) or obj.isipv4cidr(value))) - or (allow_ipv6 and (obj.isipv6(value) or obj.isipv6cidr(value)))): - raise_ValueError("Given value {} is not a valid IP address.".format(value), obj) - - @classmethod - def isinstance(obj, value : typing.Any, allow_ipv4 : bool = True, allow_ipv6 : bool = True , - allow_localhost : bool = True, allow_None : bool = False) -> bool: - try: - obj._assert(value, allow_ipv4, allow_ipv6, allow_localhost, allow_None) - return True - except (TypeError, ValueError): - return False - - @classmethod - def isipv4(obj, value : str) -> bool: - """ - Return whether a given value is a valid IP version 4 address. - - This validator is based on `WTForms IPAddress validator`_ - - .. _WTForms IPAddress validator: - https://github.com/wtforms/wtforms/blob/master/wtforms/validators.py - - Args: - value (str): IP address string to validate, other types will raise unexpected errors (mostly attribute error) - - Returns: - bool : True if conformant - """ - groups = value.split(".") - if ( - len(groups) != 4 - or any(not x.isdigit() for x in groups) - or any(len(x) > 3 for x in groups) - ): - return False - return all(0 <= int(part) < 256 for part in groups) - - - @classmethod - def isipv4cidr(obj, value : str) -> bool: - """ - Return whether a given value is a valid CIDR-notated IP version 4 - address range. - - This validator is based on RFC4632 3.1. - - Args: - value (str): IP address string to validate, other types will raise unexpected errors (mostly attribute error) - - Returns: - bool : True if conformant - """ - try: - prefix, suffix = value.split('/', 2) - except ValueError: - return False - if not obj.is_ipv4(prefix) or not suffix.isdigit(): - return False - return 0 <= int(suffix) <= 32 - - @classmethod - def isipv6(obj, value : str) -> bool: - """ - Return whether a given value is a valid IP version 6 address - (including IPv4-mapped IPv6 addresses). - - This validator is based on `WTForms IPAddress validator`_. - - .. _WTForms IPAddress validator: - https://github.com/wtforms/wtforms/blob/master/wtforms/validators.py - - Examples:: - - >>> ipv6('abcd:ef::42:1') - True - - >>> ipv6('::ffff:192.0.2.128') - True - - >>> ipv6('::192.0.2.128') - True - - >>> ipv6('abc.0.0.1') - ValidationFailure(func=ipv6, args={'value': 'abc.0.0.1'}) - - .. versionadded:: 0.2 - - :param value: IP address string to validate - """ - ipv6_groups = value.split(':') - if len(ipv6_groups) == 1: - return False - ipv4_groups = ipv6_groups[-1].split('.') - - if len(ipv4_groups) > 1: - if not obj.is_ipv4(ipv6_groups[-1]): - return False - ipv6_groups = ipv6_groups[:-1] - else: - ipv4_groups = [] - - count_blank = 0 - for part in ipv6_groups: - if not part: - count_blank += 1 - continue - try: - num = int(part, 16) - except ValueError: - return False - else: - if not 0 <= num <= 65536 or len(part) > 4: - return False - - max_groups = 6 if ipv4_groups else 8 - part_count = len(ipv6_groups) - count_blank - if count_blank == 0 and part_count == max_groups: - # no :: -> must have size of max_groups - return True - elif count_blank == 1 and ipv6_groups[-1] and ipv6_groups[0] and part_count < max_groups: - # one :: inside the address or prefix or suffix : -> filter least two cases - return True - elif count_blank == 2 and part_count < max_groups and ( - ((ipv6_groups[0] and not ipv6_groups[-1]) or (not ipv6_groups[0] and ipv6_groups[-1])) or ipv4_groups): - # leading or trailing :: or : at end and begin -> filter last case - # Check if it has ipv4 groups because they get removed from the ipv6_groups - return True - elif count_blank == 3 and part_count == 0: - # :: is the address -> filter everything else - return True - return False - - @classmethod - def isipv6cidr(obj, value : str) -> bool: - """ - Returns whether a given value is a valid CIDR-notated IP version 6 - address range. - - This validator is based on RFC4632 3.1. - - Examples:: - - >>> ipv6_cidr('::1/128') - True - - >>> ipv6_cidr('::1') - ValidationFailure(func=ipv6_cidr, args={'value': '::1'}) - """ - try: - prefix, suffix = value.split('/', 2) - except ValueError: - return False - if not obj.is_ipv6(prefix) or not suffix.isdigit(): - return False - return 0 <= int(suffix) <= 128 - - - -class Number(Parameter): - """ - A numeric parameter with a default value and optional bounds. - - There are two types of bounds: ``bounds`` and - ``softbounds``. ``bounds`` are hard bounds: the parameter must - have a value within the specified range. The default bounds are - (None,None), meaning there are actually no hard bounds. One or - both bounds can be set by specifying a value - (e.g. bounds=(None,10) means there is no lower bound, and an upper - bound of 10). Bounds are inclusive by default, but exclusivity - can be specified for each bound by setting inclusive_bounds - (e.g. inclusive_bounds=(True,False) specifies an exclusive upper - bound). - - Using a default value outside the hard - bounds, or one that is not numeric, results in an exception. - - As a special case, if allow_None=True (which is true by default if - the parameter has a default of None when declared) then a value - of None is also allowed. - - A separate function set_in_bounds() is provided that will - silently crop the given value into the legal range, for use - in, for instance, a GUI. - - ``softbounds`` are present to indicate the typical range of - the parameter, but are not enforced. Setting the soft bounds - allows, for instance, a GUI to know what values to display on - sliders for the Number. - - Example of creating a Number:: - AB = Number(default=0.5, bounds=(None,10), softbounds=(0,1), doc='Distance from A to B.') - - """ - - __slots__ = ['bounds', 'inclusive_bounds', 'crop_to_bounds', 'dtype', 'step'] - - def __init__(self, default : typing.Optional[typing.Union[float, int]] = 0.0, *, bounds : typing.Optional[typing.Tuple] = None, - crop_to_bounds : bool = False, inclusive_bounds : typing.Tuple = (True,True), step : typing.Any = None, - doc : typing.Optional[str] = None, constant : bool = False, readonly : bool = False, allow_None : bool = False, - per_instance_descriptor : bool = False, deepcopy_default : bool = False, - class_member : bool = False, fget : typing.Optional[typing.Callable] = None, fset : typing.Optional[typing.Callable] = None, - fdel : typing.Optional[typing.Callable] = None, precedence : typing.Optional[float] = None) -> None: - super().__init__(default=default, doc=doc, constant=constant, readonly=readonly, - allow_None=allow_None, per_instance_descriptor=per_instance_descriptor, deepcopy_default=deepcopy_default, - class_member=class_member, fget=fget, fset=fset, fdel=fdel, precedence=precedence) - self.bounds = bounds - self.crop_to_bounds = crop_to_bounds - self.inclusive_bounds = inclusive_bounds - self.dtype = (float, int) - self.step = step - - def set_in_bounds(self, obj : typing.Union[Parameterized, typing.Any], value : typing.Union[float, int]) -> None: - """ - Set to the given value, but cropped to be within the legal bounds. - See crop_to_bounds for details on how cropping is done. - """ - self._assert(value, self.dtype, None, (False, False), self.allow_None) - bounded_value = self._crop_to_bounds(value) - super().__set__(obj, bounded_value) - - def _crop_to_bounds(self, value : typing.Union[int, float]) -> typing.Union[int, float]: - """ - Return the given value cropped to be within the hard bounds - for this parameter. - - If a numeric value is passed in, check it is within the hard - bounds. If it is larger than the high bound, return the high - bound. If it's smaller, return the low bound. In either case, the - returned value could be None. If a non-numeric value is passed - in, set to be the default value (which could be None). In no - case is an exception raised; all values are accepted. - """ - # Values outside the bounds are silently cropped to - # be inside the bounds. - assert self.bounds is not None, "Cannot crop to bounds when bounds is None" - vmin, vmax = self.bounds - incmin, incmax = self.inclusive_bounds - if vmin is not None: - if value < vmin: - if incmin: - return vmin - else: - return vmin + self.step - if vmax is not None: - if value > vmax: - if incmax: - return vmax - else: - return vmax - self.step - return value - - def validate_and_adapt(self, value: typing.Any) -> typing.Union[int, float]: - self._assert(value, self.dtype, None if self.crop_to_bounds else self.bounds, - self.inclusive_bounds, self.allow_None) - if self.crop_to_bounds and self.bounds and value is not None: - return self._crop_to_bounds(value) - return value - - @classmethod - def _assert(obj, value, dtype : typing.Tuple, bounds : typing.Optional[typing.Tuple] = None, - inclusive_bounds : typing.Tuple[bool, bool] = (True, True), allow_None : bool = False): - if allow_None and value is None: - return - if dtype is None: - if not obj.isnumber(value): - raise_TypeError("given value not of number type, but type {}.".format(type(value)), - obj) - elif not isinstance(value, dtype): - raise_TypeError("given value not of type {}, but type {}.".format(dtype, type(value)), obj) - if bounds: - vmin, vmax = bounds - incmin, incmax = inclusive_bounds - if vmax is not None: - if incmax is True: - if not value <= vmax: - raise_ValueError("given value must be at most {}, not {}.".format(vmax, value), obj) - else: - if not value < vmax: - raise_ValueError("Parameter must be less than {}, not {}.".format(vmax, value), obj) - - if vmin is not None: - if incmin is True: - if not value >= vmin: - raise_ValueError("Parameter must be at least {}, not {}.".format(vmin, value), obj) - else: - if not value > vmin: - raise_ValueError("Parameter must be greater than {}, not {}.".format(vmin, value), obj) - return value - - def _validate_step(self, value : typing.Any) -> None: - if value is not None: - if self.dtype: - if not isinstance(value, self.dtype): - raise_ValueError("Step can only be None or {}, not type {}.".format(self.dtype, type(value)), self) - elif not self.isnumber(self.step): - raise_ValueError("Step can only be None or numeric value, not type {}.".format(type(value)), self) - - def _post_slot_set(self, slot : str, old : typing.Any, value : typing.Any) -> None: - if slot == 'step': - self._validate_step(value) - return super()._post_slot_set(slot, old, value) - - @classmethod - def isinstance(obj, value, dtype : typing.Tuple, bounds : typing.Optional[typing.Tuple] = None, - inclusive_bounds : typing.Tuple[bool, bool] = (True, True), allow_None : bool = False): - try: - obj._assert(value, dtype, bounds, inclusive_bounds, allow_None) - return True - except (ValueError, TypeError): - return False - - @classmethod - def isnumber(cls, value : typing.Any) -> bool: - if isinstance(value, numbers.Number): return True - # The extra check is for classes that behave like numbers, such as those - # found in numpy, gmpy, etc. - elif (hasattr(value, '__int__') and hasattr(value, '__add__')): return True - # This is for older versions of gmpy - elif hasattr(value, 'qdiv'): return True - else: return False - - - -class Integer(Number): - """Numeric Parameter required to be an Integer""" - - def __init__(self, default : typing.Optional[int] = 0, *, bounds : typing.Optional[typing.Tuple] = None, - crop_to_bounds : bool = False, inclusive_bounds : typing.Tuple = (True,True), step : typing.Any = None, - doc : typing.Optional[str] = None, constant : bool = False, readonly : bool = False, allow_None : bool = False, - per_instance_descriptor : bool = False, deepcopy_default : bool = False, class_member : bool = False, - fget : typing.Optional[typing.Callable] = None, fset : typing.Optional[typing.Callable] = None, - fdel : typing.Optional[typing.Callable] = None, precedence : typing.Optional[float] = None) -> None: - super().__init__(default=default, bounds=bounds, crop_to_bounds=crop_to_bounds, inclusive_bounds=inclusive_bounds, - doc=doc, constant=constant, readonly=readonly, allow_None=allow_None, per_instance_descriptor=per_instance_descriptor, - deepcopy_default=deepcopy_default, class_member=class_member, fget=fget, fset=fset, fdel=fdel, - precedence=precedence) - self.dtype = (int,) - - def _validate_step(self, step : int): - if step is not None and not isinstance(step, int): - raise_ValueError("Step can only be None or an integer value, not type {}".format(type(step)), self) - - - -class Boolean(Parameter): - """Binary or tristate Boolean Parameter.""" - - def __init__(self, default : typing.Optional[bool] = False, *, - doc : typing.Optional[str] = None, constant : bool = False, readonly : bool = False, allow_None : bool = False, - per_instance_descriptor : bool = False, deepcopy_default : bool = False, - class_member : bool = False, fget : typing.Optional[typing.Callable] = None, fset : typing.Optional[typing.Callable] = None, - fdel : typing.Optional[typing.Callable] = None, precedence : typing.Optional[float] = None) -> None: - super().__init__(default=default, doc=doc, constant=constant, readonly=readonly, - allow_None=allow_None, per_instance_descriptor=per_instance_descriptor, deepcopy_default=deepcopy_default, - class_member=class_member, fget=fget, fset=fset, fdel=fdel, precedence=precedence) - - def validate_and_adapt(self, value : typing.Any) -> bool: - if self.allow_None and value is None: - return - elif not isinstance(value, bool): - raise_ValueError("given value not boolean type, but type {}".format(type(value)), self) - return value - - - -class Iterable(Parameter): - """A tuple or list Parameter (e.g. ('a',7.6,[3,5])) with a fixed tuple length.""" - - __slots__ = ['bounds', 'length', 'item_type', 'dtype'] - - def __init__(self, default : typing.Any, *, bounds : typing.Optional[typing.Tuple[int, int]] = None, - length : typing.Optional[int] = None, item_type : typing.Optional[typing.Tuple] = None, deepcopy_default : bool = False, - allow_None : bool = False, doc : typing.Optional[str] = None, constant : bool = False, readonly : bool = False, - per_instance_descriptor : bool = False, class_member : bool = False, - fget : typing.Optional[typing.Callable] = None, fset : typing.Optional[typing.Callable] = None, - fdel : typing.Optional[typing.Callable] = None, precedence : typing.Optional[float] = None) -> None: - """ - Initialize a tuple parameter with a fixed length (number of - elements). The length is determined by the initial default - value, if any, and must be supplied explicitly otherwise. The - length is not allowed to change after instantiation. - """ - super().__init__(default=default, doc=doc, constant=constant, readonly=readonly, - allow_None=allow_None, per_instance_descriptor=per_instance_descriptor, deepcopy_default=deepcopy_default, - class_member=class_member, fget=fget, fset=fset, fdel=fdel, precedence=precedence) - self.bounds = bounds - self.length = length - self.item_type = item_type - self.dtype = (list, tuple) - - def validate_and_adapt(self, value: typing.Any) -> typing.Union[typing.List, typing.Tuple]: - self._assert(value, self.bounds, self.length, self.dtype, self.item_type, self.allow_None) - return value - - @classmethod - def _assert(obj, value : typing.Any, bounds : typing.Optional[typing.Tuple[int, int]] = None, - length : typing.Optional[int] = None, dtype : typing.Union[type, typing.Tuple] = (list, tuple), - item_type : typing.Any = None, allow_None : bool = False) -> None: - if value is None and allow_None: - return - if not isinstance(value, dtype): - raise_ValueError("given value not of iterable type {}, but {}.".format(dtype, type(value)), obj) - if bounds is not None: - if not (len(value) >= bounds[0] and len(value) <= bounds[1]): - raise_ValueError("given iterable is not of the correct length ({} instead of between {} and {}).".format( - len(value), 0 if not bounds[0] else bounds[0], bounds[1]), obj) - elif length is not None and len(value) != length: - raise_ValueError("given iterable is not of correct length ({} instead of {})".format(len(value), length), - obj) - if item_type is not None: - for val in value: - if not isinstance(val, item_type): - raise_TypeError("not all elements of given iterable of item type {}, found object of type {}".format( - item_type, type(val)), obj) - - @classmethod - def isinstance(obj, value : typing.Any, bounds : typing.Optional[typing.Tuple[int, int]], - length : typing.Optional[int] = None, dtype : typing.Union[type, typing.Tuple] = (list, tuple), - item_type : typing.Any = None, allow_None : bool = False) -> bool: - try: - obj._assert(value, bounds, length, dtype, item_type, allow_None) - return True - except (ValueError, TypeError): - return False - - - -class Tuple(Iterable): - - __slots__ = ['accept_list', 'accept_item'] - - def __init__(self, default : typing.Any, *, bounds : typing.Optional[typing.Tuple[int, int]] = None, - length: typing.Optional[int] = None, item_type : typing.Optional[typing.Tuple] = None, - accept_list : bool = False, accept_item : bool = False, deepcopy_default : bool = False, - allow_None : bool = False, doc : typing.Optional[str] = None, constant : bool = False, readonly : bool = False, - per_instance_descriptor : bool = False, class_member : bool = False, - fget : typing.Optional[typing.Callable] = None, fset : typing.Optional[typing.Callable] = None, - fdel : typing.Optional[typing.Callable] = None, precedence : typing.Optional[float] = None) -> None: - super().__init__(default=default, bounds=bounds, length=length, item_type=item_type, doc=doc, constant=constant, - readonly=readonly, allow_None=allow_None, per_instance_descriptor=per_instance_descriptor, - deepcopy_default=deepcopy_default, class_member=class_member, fget=fget, fset=fset, fdel=fdel, - precedence=precedence) - self.accept_list = accept_list - self.accept_item = accept_item - self.dtype = (tuple,) # re-assigned - - def validate_and_adapt(self, value: typing.Any) -> typing.Tuple: - if self.accept_list and isinstance(value, list): - value = tuple(value) - if self.accept_item and not isinstance(value, (list, tuple, type(None))): - value = (value,) - self._assert(value, self.bounds, self.length, self.dtype, self.item_type, self.allow_None) - return value - - @classmethod - def serialize(cls, value): - if value is None: - return None - return list(value) # As JSON has no tuple representation - - @classmethod - def deserialize(cls, value): - if value == 'null': - return None - return tuple(value) # As JSON has no tuple representation - - - -class List(Iterable): - """ - Parameter whose value is a list of objects, usually of a specified type. - - The bounds allow a minimum and/or maximum length of - list to be enforced. If the item_type is non-None, all - items in the list are checked to be of that type. - - `class_` is accepted as an alias for `item_type`, but is - deprecated due to conflict with how the `class_` slot is - used in Selector classes. - """ - - __slots__ = ['accept_tuple'] - - def __init__(self, default: typing.Any, *, bounds : typing.Optional[typing.Tuple[int, int]] = None, - length : typing.Optional[int] = None, item_type : typing.Optional[typing.Tuple] = None, - accept_tuple : bool = False, deepcopy_default : bool = False, - allow_None : bool = False, doc : typing.Optional[str] = None, constant : bool = False, readonly : bool = False, - per_instance_descriptor : bool = False, - class_member : bool = False, fget : typing.Optional[typing.Callable] = None, - fset : typing.Optional[typing.Callable] = None, fdel : typing.Optional[typing.Callable] = None, - precedence : typing.Optional[float] = None) -> None: - super().__init__(default=default, bounds=bounds, length=length, item_type=item_type, - doc=doc, constant=constant, readonly=readonly, allow_None=allow_None, - per_instance_descriptor=per_instance_descriptor, deepcopy_default=deepcopy_default, - class_member=class_member, fget=fget, fset=fset, fdel=fdel, precedence=precedence) - self.accept_tuple = accept_tuple - self.dtype = list - - def validate_and_adapt(self, value: typing.Any) -> typing.Tuple: - if self.accept_tuple and isinstance(value, tuple): - value = list(value) - self._assert(value, self.bounds, self.length, self.dtype, self.item_type, self.allow_None) - return value - - - -class Callable(Parameter): - """ - Parameter holding a value that is a callable object, such as a function. - - A keyword argument instantiate=True should be provided when a - function object is used that might have state. On the other hand, - regular standalone functions cannot be deepcopied as of Python - 2.4, so instantiate must be False for those values. - """ - - def validate_and_adapt(self, value : typing.Any) -> typing.Callable: - if (self.allow_None and value is None) or callable(value): - return value - raise_ValueError("given value not a callable object, but type {}.".format(type(value)), self) - - - -class Composite(Parameter): - """ - A Parameter that is a composite of a set of other attributes of the class. - - The constructor argument 'attribs' takes a list of attribute - names, which may or may not be Parameters. Getting the parameter - returns a list of the values of the constituents of the composite, - in the order specified. Likewise, setting the parameter takes a - sequence of values and sets the value of the constituent - attributes. - - This Parameter type has not been tested with watchers and - dependencies, and may not support them properly. - """ - - __slots__ = ['attribs'] - - def __init__(self, attribs : typing.List[typing.Union[str, Parameter]], *, - doc : typing.Optional[str] = None, constant : bool = False, readonly : bool = False, allow_None : bool = False, - per_instance_descriptor : bool = False, deepcopy_default : bool = False, - class_member : bool = False, fget : typing.Optional[typing.Callable] = None, fset : typing.Optional[typing.Callable] = None, - fdel : typing.Optional[typing.Callable] = None, precedence : typing.Optional[float] = None) -> None: - super().__init__(None, doc=doc, constant=constant, readonly=readonly, allow_None=allow_None, - per_instance_descriptor=per_instance_descriptor, deepcopy_default=deepcopy_default, - class_member=class_member, fget=fget, fset=fset, fdel=fdel, precedence=precedence) - self.attribs = [] - if attribs is not None: - for attrib in attribs: - if isinstance(attrib, Parameter): - self.attribs.append(attrib.name) - else: - self.attribs.append(attrib) - - def __get__(self, obj, objtype) -> typing.List[typing.Any]: - """ - Return the values of all the attribs, as a list. - """ - return [getattr(obj, attr) for attr in self.attribs] - - def validate_and_adapt(self, value): - if not len(value) == len(self.attribs): - raise_ValueError("Compound parameter got the wrong number of values (needed {}, but got {}).".format( - len(self.attribs), len(value)), self) - return value - - def _post_setter(self, obj, val): - for a, v in zip(self.attribs, val): - setattr(obj, a, v) - - - -class SelectorBase(Parameter): - """ - Parameter whose value must be chosen from a list of possibilities. - - Subclasses must implement get_range(). - """ - - __abstract = True - - @property - def range(self): - raise NotImplementedError("get_range() must be implemented in subclasses.") - - - -class Selector(SelectorBase): - """ - Parameter whose value must be one object from a list of possible objects. - - By default, if no default is specified, picks the first object from - the provided set of objects, as long as the objects are in an - ordered data collection. - - check_on_set restricts the value to be among the current list of - objects. By default, if objects are initially supplied, - check_on_set is True, whereas if no objects are initially - supplied, check_on_set is False. This can be overridden by - explicitly specifying check_on_set initially. - - If check_on_set is True (either because objects are supplied - initially, or because it is explicitly specified), the default - (initial) value must be among the list of objects (unless the - default value is None). - - The list of objects can be supplied as a list (appropriate for - selecting among a set of strings, or among a set of objects with a - "name" parameter), or as a (preferably ordered) dictionary from - names to objects. If a dictionary is supplied, the objects - will need to be hashable so that their names can be looked - up from the object value. - """ - - __slots__ = ['objects', 'names'] - - # Selector is usually used to allow selection from a list of - # existing objects, therefore instantiate is False by default. - def __init__(self, *, objects : typing.List[typing.Any], default : typing.Any, empty_default : bool = False, - doc : typing.Optional[str] = None, constant : bool = False, readonly : bool = False, allow_None : bool = False, - per_instance_descriptor : bool = False, deepcopy_default : bool = False, - class_member : bool = False, fget : typing.Optional[typing.Callable] = None, - fset : typing.Optional[typing.Callable] = None, fdel : typing.Optional[typing.Callable] = None, - precedence : typing.Optional[float] = None) -> None: - super().__init__(default=default, doc=doc, constant=constant, readonly=readonly, - allow_None=allow_None, per_instance_descriptor=per_instance_descriptor, deepcopy_default=deepcopy_default, - class_member=class_member, fget=fget, fset=fset, fdel=fdel, precedence=precedence) - if objects is None: - objects = [] - autodefault = None - elif isinstance(objects, collections.abc.Mapping): - self.names = objects - self.objects = list(objects.values()) - autodefault = self.objects[0] - elif isinstance(objects, (list, tuple)): - self.names = None - self.objects = objects - autodefault = objects[0] - else: - raise TypeError("objects should be a list, tuple, mapping or None. Given type : {}".format(type(objects))) - default = autodefault if (not empty_default and default is None) else default - - def validate_and_adapt(self, value: typing.Any) -> typing.Any: - """ - val must be None or one of the objects in self.objects. - """ - if not (value in self.objects or (self.allow_None and value is None)): - raise_ValueError("given value not in list of possible objects, valid options include {}".format( - get_iterable_printfriendly_repr(self.objects)), self) - return value - - @property - def range(self): - """ - Return the possible objects to which this parameter could be set. - - (Returns the dictionary {object.name:object}.) - """ - if self.names is not None: - return named_objs(self.objects, self.names) - else: - return self.objects - - - -class ClassSelector(SelectorBase): - """ - Parameter allowing selection of either a subclass or an instance of a given set of classes. - By default, requires an instance, but if isinstance=False, accepts a class instead. - Both class and instance values respect the instantiate slot, though it matters only - for isinstance=True. - """ - - __slots__ = ['class_', 'isinstance'] - - def __init__(self, *, class_ , default : typing.Any, isinstance : bool = True, deepcopy_default : bool = False, - doc : typing.Optional[str] = None, constant : bool = False, readonly : bool = False, allow_None : bool = False, - per_instance_descriptor : bool = False, class_member : bool = False, - fget : typing.Optional[typing.Callable] = None, fset : typing.Optional[typing.Callable] = None, - fdel : typing.Optional[typing.Callable] = None, precedence : typing.Optional[float] = None) -> None: - super().__init__(default=default, doc=doc, constant=constant, readonly=readonly, - allow_None=allow_None, per_instance_descriptor=per_instance_descriptor, deepcopy_default=deepcopy_default, - class_member=class_member, fget=fget, fset=fset, fdel=fdel, precedence=precedence) - self.class_ = class_ - self.isinstance = isinstance - - def _get_class_name(self): - if isinstance(self.class_, tuple): - return ('(%s)' % ', '.join(cl.__name__ for cl in self.class_)) - else: - return self.class_.__name__ - - def validate_and_adapt(self, value): - if (value is None and self.allow_None): - return - if self.isinstance: - if not isinstance(value, self.class_): - raise_ValueError("{} parameter {} value must be an instance of {}, not {}.".format( - self.__class__.__name__, self.name, self._get_class_name(), value), self) - else: - if not issubclass(value, self.class_): - raise_ValueError("{} parameter {} must be a subclass of {}, not {}.".format( - self.__class__.__name__, self.name, self._get_class_name(), value.__name__), self) - return value - - @property - def range(self): - """ - Return the possible types for this parameter's value. - - (I.e. return `{name: }` for all classes that are - concrete_descendents() of `self.class_`.) - - Only classes from modules that have been imported are added - (see concrete_descendents()). - """ - classes = self.class_ if isinstance(self.class_, tuple) else (self.class_,) - all_classes = {} - for cls in classes: - all_classes.update(concrete_descendents(cls)) - d = OrderedDict((name, class_) for name,class_ in all_classes.items()) - if self.allow_None: - d['None'] = None - return d - - - -class TupleSelector(Selector): - """ - Variant of Selector where the value can be multiple objects from - a list of possible objects. - """ - - # Changed from ListSelector. Iterables need to be frozen to prevent spurious addition. - # To prevent duplicates, use frozen set selector - - __slots__ = ['accept_list'] - - def __init__(self, *, objects : typing.List, default : typing.Any, accept_list : bool = True, - doc : typing.Optional[str] = None, constant : bool = False, readonly : bool = False, allow_None : bool = False, - per_instance_descriptor : bool = False, deepcopy_default : bool = False, - class_member : bool = False, fget : typing.Optional[typing.Callable] = None, fset : typing.Optional[typing.Callable] = None, - fdel : typing.Optional[typing.Callable] = None, precedence : typing.Optional[float] = None) -> None: - super().__init__(objects=objects, default=default, empty_default=True, doc=doc, - constant=constant, readonly=readonly, allow_None=allow_None, per_instance_descriptor=per_instance_descriptor, - deepcopy_default=deepcopy_default, class_member=class_member, fget=fget, fset=fset, fdel=fdel, - precedence=precedence) - self.accept_list = accept_list - - def validate_and_adapt(self, value : typing.Any): - if value is None and self.allow_None: - return - if value not in self.objects: - # i.e. without iterating, we check that the value is not present in the objects - # This is useful to have list or iterables themselves as part of objects - # let objects = [[1,2], 3 ,4], if [1,2] is passed, then we should try to accept it plainly before moving to iterating - # and checking - if isinstance(value, list) and self.accept_list: - value = tuple(value) - if not isinstance(value, tuple): - raise_ValueError(f"object {value} not specified as a valid member of list of objects.", self) - else: - for obj in value: - if obj not in self.objects: - raise_ValueError("object {} not specified as a valid member of list of objects.".format(obj), self) - return value - - -# For portable code: -# - specify paths in unix (rather than Windows) style; -# - use resolve_path(path_to_file=True) for paths to existing files to be read, -# - use resolve_path(path_to_file=False) for paths to existing folders to be read, -# and normalize_path() for paths to new files to be written. - -class resolve_path(ParameterizedFunction): - """ - Find the path to an existing file, searching the paths specified - in the search_paths parameter if the filename is not absolute, and - converting a UNIX-style path to the current OS's format if - necessary. - - To turn a supplied relative path into an absolute one, the path is - appended to paths in the search_paths parameter, in order, until - the file is found. - - An IOError is raised if the file is not found. - - Similar to Python's os.path.abspath(), except more search paths - than just os.getcwd() can be used, and the file must exist. - """ - - search_paths = List(default=[os.getcwd()], doc=""" - Prepended to a non-relative path, in order, until a file is - found.""") - - path_to_file = Boolean(default=True, - allow_None=True, doc=""" - String specifying whether the path refers to a 'File' or a - 'Folder'. If None, the path may point to *either* a 'File' *or* - a 'Folder'.""") - - def __call__(self, path : str, **params) -> str: - p = ParamOverrides(self, params) - path = os.path.normpath(path) - ftype = "File" if p.path_to_file is True \ - else "Folder" if p.path_to_file is False else "Path" - - if not p.search_paths: - p.search_paths = [os.getcwd()] - - if os.path.isabs(path): - if ((p.path_to_file is None and os.path.exists(path)) or - (p.path_to_file is True and os.path.isfile(path)) or - (p.path_to_file is False and os.path.isdir( path))): - return path - raise IOError("%s '%s' not found." % (ftype,path)) - - else: - paths_tried = [] - for prefix in p.search_paths: - try_path = os.path.join(os.path.normpath(prefix), path) - - if ((p.path_to_file is None and os.path.exists(try_path)) or - (p.path_to_file is True and os.path.isfile(try_path)) or - (p.path_to_file is False and os.path.isdir( try_path))): - return try_path - - paths_tried.append(try_path) - - raise IOError(ftype + " " + os.path.split(path)[1] + " was not found in the following place(s): " + str(paths_tried) + ".") - - -class normalize_path(ParameterizedFunction): - """ - Convert a UNIX-style path to the current OS's format, - typically for creating a new file or directory. - - If the path is not already absolute, it will be made absolute - (using the prefix parameter). - - Should do the same as Python's os.path.abspath(), except using - prefix rather than os.getcwd). - """ - - prefix = String(default=os.getcwd(),doc=""" - Prepended to the specified path, if that path is not - absolute.""") - - def __call__(self, path : str = "", **params): - p = ParamOverrides(self,params) - - if not os.path.isabs(path): - path = os.path.join(os.path.normpath(p.prefix), path) - - return os.path.normpath(path) - - - -class Path(Parameter): - """ - Parameter that can be set to a string specifying the path of a file or folder. - - The string should be specified in UNIX style, but it will be - returned in the format of the user's operating system. Please use - the Filename or Foldername classes if you require discrimination - between the two possibilities. - - The specified path can be absolute, or relative to either: - - * any of the paths specified in the search_paths attribute (if - search_paths is not None); - - or - - * any of the paths searched by resolve_path() (if search_paths - is None). - """ - - __slots__ = ['search_paths'] - - def __init__(self, default : typing.Any = '', *, search_paths : typing.Optional[str] = None, - doc : typing.Optional[str] = None, constant : bool = False, readonly : bool = False, allow_None : bool = False, - per_instance_descriptor : bool = False, deepcopy_default : bool = False, - class_member : bool = False, fget : typing.Optional[typing.Callable] = None, fset : typing.Optional[typing.Callable] = None, - fdel : typing.Optional[typing.Callable] = None, precedence : typing.Optional[float] = None) -> None: - super().__init__(default=default, doc=doc, - constant=constant, readonly=readonly, allow_None=allow_None, per_instance_descriptor=per_instance_descriptor, - deepcopy_default=deepcopy_default, class_member=class_member, fget=fget, fset=fset, fdel=fdel, - precedence=precedence) - if isinstance(search_paths, str): - self.search_paths = [search_paths] - elif isinstance(search_paths, list): - self.search_paths = search_paths - else: - self.search_paths = [] - - def _resolve(self, path): - return resolve_path(path, path_to_file=None, search_paths=self.search_paths) - - def validate_and_adapt(self, value : typing.Any) -> typing.Any: - if value is None and self.allow_None: - return - else: - return self._resolve(value) - - def __get__(self, obj, objtype) -> str: - """ - Return an absolute, normalized path (see resolve_path). - """ - raw_path = super().__get__(obj, objtype) - return None if raw_path is None else self._resolve(raw_path) - - def __getstate__(self): - # don't want to pickle the search_paths - state = super().__getstate__() - if 'search_paths' in state: - state['search_paths'] = [] - return state - - - -class Filename(Path): - """ - Parameter that can be set to a string specifying the path of a file. - - The string should be specified in UNIX style, but it will be - returned in the format of the user's operating system. - - The specified path can be absolute, or relative to either: - - * any of the paths specified in the search_paths attribute (if - search_paths is not None); - - or - - * any of the paths searched by resolve_path() (if search_paths - is None). - """ - - def _resolve(self, path): - return resolve_path(path, path_to_file=True, search_paths=self.search_paths) - - -class Foldername(Path): - """ - Parameter that can be set to a string specifying the path of a folder. - - The string should be specified in UNIX style, but it will be - returned in the format of the user's operating system. - - The specified path can be absolute, or relative to either: - - * any of the paths specified in the search_paths attribute (if - search_paths is not None); - - or - - * any of the paths searched by resolve_dir_path() (if search_paths - is None). - """ - - def _resolve(self, path): - return resolve_path(path, path_to_file=False, search_paths=self.search_paths) - - - -def abbreviate_paths(pathspec,named_paths): - """ - Given a dict of (pathname,path) pairs, removes any prefix shared by all pathnames. - Helps keep menu items short yet unambiguous. - """ - - prefix = os.path.commonprefix([os.path.dirname(name)+os.path.sep for name in named_paths.keys()]+[pathspec]) - return OrderedDict([(name[len(prefix):],path) for name,path in named_paths.items()]) - - - -class FileSelector(Selector): - """ - Given a path glob, allows one file to be selected from those matching. - """ - __slots__ = ['path'] - - def __init__(self, default : typing.Any, *, objects : typing.List, path : str = "", - doc : typing.Optional[str] = None, constant : bool = False, readonly : bool = False, allow_None : bool = False, - per_instance_descriptor : bool = False, deepcopy_default : bool = False, - class_member : bool = False, fget : typing.Optional[typing.Callable] = None, fset : typing.Optional[typing.Callable] = None, - fdel : typing.Optional[typing.Callable] = None, precedence : typing.Optional[float] = None) -> None: - super().__init__(default=default, objects=objects, empty_default=True, doc=doc, - constant=constant, readonly=readonly, allow_None=allow_None, per_instance_descriptor=per_instance_descriptor, - deepcopy_default=deepcopy_default, class_member=class_member, fget=fget, fset=fset, fdel=fdel, - precedence=precedence) - self.path = path # update is automatically called - - def _post_slot_set(self, slot: str, old : typing.Any, value : typing.Any) -> None: - super()._post_slot_set(slot, old, value) - if slot == 'path': - self.update() - - def update(self): - self.objects = sorted(glob.glob(self.path)) - if self.default in self.objects: - return - self.default = self.objects[0] if self.objects else None - - @property - def range(self): - return abbreviate_paths(self.path, super().range) - - - -class MultiFileSelector(FileSelector): - """ - Given a path glob, allows multiple files to be selected from the list of matches. - """ - __slots__ = ['path'] - - def __init__(self, default : typing.Any, *, path : str = "", - doc : typing.Optional[str] = None, constant : bool = False, readonly : bool = False, allow_None : bool = False, - label : typing.Optional[str] = None, per_instance_descriptor : bool = False, deepcopy_default : bool = False, - class_member : bool = False, fget : typing.Optional[typing.Callable] = None, fset : typing.Optional[typing.Callable] = None, - fdel : typing.Optional[typing.Callable] = None, precedence : typing.Optional[float] = None) -> None: - super().__init__(default=default, objects=None, doc=doc, - constant=constant, readonly=readonly, allow_None=allow_None, per_instance_descriptor=per_instance_descriptor, - deepcopy_default=deepcopy_default, class_member=class_member, fget=fget, fset=fset, - fdel=fdel, precedence=precedence) - - def update(self): - self.objects = sorted(glob.glob(self.path)) - if self.default and all([o in self.objects for o in self.default]): - return - self.default = self.objects - - - -class Date(Number): - """ - Date parameter of datetime or date type. - """ - - def __init__(self, default, *, bounds : typing.Union[typing.Tuple, None] = None, - crop_to_bounds : bool = False, inclusive_bounds : typing.Tuple = (True,True), step : typing.Any = None, - doc : typing.Optional[str] = None, constant : bool = False, readonly : bool = False, allow_None : bool = False, - per_instance_descriptor : bool = False, deepcopy_default : bool = False, - class_member : bool = False, fget : typing.Optional[typing.Callable] = None, fset : typing.Optional[typing.Callable] = None, - fdel : typing.Optional[typing.Callable] = None, precedence : typing.Optional[float] = None) -> None: - super().__init__(default=default, bounds=bounds, crop_to_bounds=crop_to_bounds, - inclusive_bounds=inclusive_bounds, step=step, doc=doc, - constant=constant, readonly=readonly, allow_None=allow_None, per_instance_descriptor=per_instance_descriptor, - deepcopy_default=deepcopy_default, class_member=class_member, fget=fget, fset=fset, fdel=fdel, - precedence=precedence) - self.dtype = dt_types - - def _validate_step(self, val): - if self.step is not None and not isinstance(self.step, dt_types): - raise ValueError(f"Step can only be None, a datetime or datetime type, not type {type(val)}") - - @classmethod - def serialize(cls, value): - if value is None: - return None - if not isinstance(value, (dt.datetime, dt.date)): # i.e np.datetime64, note numpy is imported only on requirement - value = value.astype(dt.datetime) - return value.strftime("%Y-%m-%dT%H:%M:%S.%f") - - @classmethod - def deserialize(cls, value): - if value == None: - return None - return dt.datetime.strptime(value, "%Y-%m-%dT%H:%M:%S.%f") - - - -class CalendarDate(Number): - """ - Parameter specifically allowing dates (not datetimes). - """ - - def __init__(self, default, *, bounds : typing.Union[typing.Tuple, None] = None, - crop_to_bounds : bool = False, inclusive_bounds : typing.Tuple = (True,True), step : typing.Any = None, - doc : typing.Optional[str] = None, constant : bool = False, readonly : bool = False, allow_None : bool = False, - per_instance_descriptor : bool = False, deepcopy_default : bool = False, - class_member : bool = False, fget : typing.Optional[typing.Callable] = None, fset : typing.Optional[typing.Callable] = None, - fdel : typing.Optional[typing.Callable] = None, precedence : typing.Optional[float] = None) -> None: - super().__init__(default=default, bounds=bounds, crop_to_bounds=crop_to_bounds, - inclusive_bounds=inclusive_bounds, step=step, doc=doc, - constant=constant, readonly=readonly, allow_None=allow_None, per_instance_descriptor=per_instance_descriptor, - deepcopy_default=deepcopy_default, class_member=class_member, fget=fget, fset=fset, fdel=fdel, - precedence=precedence) - self.dtype = dt.date - - def _validate_step(self, step): - if step is not None and not isinstance(step, self.dtype): - raise ValueError("Step can only be None or a date type.") - - @classmethod - def serialize(cls, value): - if value is None: - return None - return value.strftime("%Y-%m-%d") - - @classmethod - def deserialize(cls, value): - if value is None: - return None - return dt.datetime.strptime(value, "%Y-%m-%d").date() - - - -class CSS3Color(Parameter): - """ - Color parameter defined as a hex RGB string with an optional # - prefix or (optionally) as a CSS3 color name. - """ - - # CSS3 color specification https://www.w3.org/TR/css-color-3/#svg-color - _named_colors = [ 'aliceblue', 'antiquewhite', 'aqua', - 'aquamarine', 'azure', 'beige', 'bisque', 'black', - 'blanchedalmond', 'blue', 'blueviolet', 'brown', 'burlywood', - 'cadetblue', 'chartreuse', 'chocolate', 'coral', - 'cornflowerblue', 'cornsilk', 'crimson', 'cyan', 'darkblue', - 'darkcyan', 'darkgoldenrod', 'darkgray', 'darkgrey', - 'darkgreen', 'darkkhaki', 'darkmagenta', 'darkolivegreen', - 'darkorange', 'darkorchid', 'darkred', 'darksalmon', - 'darkseagreen', 'darkslateblue', 'darkslategray', - 'darkslategrey', 'darkturquoise', 'darkviolet', 'deeppink', - 'deepskyblue', 'dimgray', 'dimgrey', 'dodgerblue', - 'firebrick', 'floralwhite', 'forestgreen', 'fuchsia', - 'gainsboro', 'ghostwhite', 'gold', 'goldenrod', 'gray', - 'grey', 'green', 'greenyellow', 'honeydew', 'hotpink', - 'indianred', 'indigo', 'ivory', 'khaki', 'lavender', - 'lavenderblush', 'lawngreen', 'lemonchiffon', 'lightblue', - 'lightcoral', 'lightcyan', 'lightgoldenrodyellow', - 'lightgray', 'lightgrey', 'lightgreen', 'lightpink', - 'lightsalmon', 'lightseagreen', 'lightskyblue', - 'lightslategray', 'lightslategrey', 'lightsteelblue', - 'lightyellow', 'lime', 'limegreen', 'linen', 'magenta', - 'maroon', 'mediumaquamarine', 'mediumblue', 'mediumorchid', - 'mediumpurple', 'mediumseagreen', 'mediumslateblue', - 'mediumspringgreen', 'mediumturquoise', 'mediumvioletred', - 'midnightblue', 'mintcream', 'mistyrose', 'moccasin', - 'navajowhite', 'navy', 'oldlace', 'olive', 'olivedrab', - 'orange', 'orangered', 'orchid', 'palegoldenrod', 'palegreen', - 'paleturquoise', 'palevioletred', 'papayawhip', 'peachpuff', - 'peru', 'pink', 'plum', 'powderblue', 'purple', 'red', - 'rosybrown', 'royalblue', 'saddlebrown', 'salmon', - 'sandybrown', 'seagreen', 'seashell', 'sienna', 'silver', - 'skyblue', 'slateblue', 'slategray', 'slategrey', 'snow', - 'springgreen', 'steelblue', 'tan', 'teal', 'thistle', - 'tomato', 'turquoise', 'violet', 'wheat', 'white', - 'whitesmoke', 'yellow', 'yellowgreen'] - - __slots__ = ['allow_named'] - - def __init__(self, default, *, allow_named : bool = True, doc : typing.Optional[str] = None, constant : bool = False, - readonly : bool = False, allow_None : bool = False, - per_instance_descriptor : bool = False, deepcopy_default : bool = False, - class_member : bool = False, fget : typing.Optional[typing.Callable] = None, fset : typing.Optional[typing.Callable] = None, - fdel : typing.Optional[typing.Callable] = None, precedence : typing.Optional[float] = None) -> None: - super().__init__(default=default, doc=doc, - constant=constant, readonly=readonly, allow_None=allow_None, per_instance_descriptor=per_instance_descriptor, - deepcopy_default=deepcopy_default, class_member=class_member, fget=fget, fset=fset, fdel=fdel, - precedence=precedence) - self.allow_named = allow_named - - def validate_and_adapt(self, value : typing.Any): - if (self.allow_None and value is None): - return - if not isinstance(value, str): - raise ValueError("Color parameter %r expects a string value, " - "not an object of type %s." % (self.name, type(value))) - if self.allow_named and value in self._named_colors: - return - is_hex = re.match('^#?(([0-9a-fA-F]{2}){3}|([0-9a-fA-F]){3})$', value) - if not is_hex: - raise ValueError("Color '%s' only takes RGB hex codes " - "or named colors, received '%s'." % (self.name, value)) - - - -class Range(Tuple): - """ - A numeric range with optional bounds and softbounds. - """ - - __slots__ = ['bounds', 'inclusive_bounds', 'softbounds', 'step'] - - def __init__(self, default : typing.Optional[typing.Tuple] = None, *, bounds: typing.Optional[typing.Tuple[int, int]] = None, - length : typing.Optional[int] = None, item_type : typing.Optional[typing.Tuple] = None, - softbounds=None, inclusive_bounds=(True,True), step=None, - doc : typing.Optional[str] = None, constant : bool = False, - readonly : bool = False, allow_None : bool = False, label : typing.Optional[str] = None, - per_instance_descriptor : bool = False, deepcopy_default : bool = False, - class_member : bool = False, fget : typing.Optional[typing.Callable] = None, fset : typing.Optional[typing.Callable] = None, - fdel : typing.Optional[typing.Callable] = None, precedence : typing.Optional[float] = None) -> None: - self.inclusive_bounds = inclusive_bounds - self.softbounds = softbounds - self.step = step - super().__init__(default=default, bounds=bounds, item_type=item_type, length=length, doc=doc, - constant=constant, readonly=readonly, allow_None=allow_None, per_instance_descriptor=per_instance_descriptor, - deepcopy_default=deepcopy_default, class_member=class_member, fget=fget, fset=fset, fdel=fdel, - precedence=precedence) - - def validate_and_adapt(self, value : typing.Any) -> typing.Tuple: - raise NotImplementedError("Range validation not implemented") - super()._validate(val) - self._validate_bounds(val, self.bounds, self.inclusive_bounds) - - def _validate_bounds(self, val, bounds, inclusive_bounds): - if bounds is None or (val is None and self.allow_None): - return - vmin, vmax = bounds - incmin, incmax = inclusive_bounds - for bound, v in zip(['lower', 'upper'], val): - too_low = (vmin is not None) and (v < vmin if incmin else v <= vmin) - too_high = (vmax is not None) and (v > vmax if incmax else v >= vmax) - if too_low or too_high: - raise ValueError("Range parameter %r's %s bound must be in range %s." - % (self.name, bound, self.rangestr())) - - @property - def rangestr(self): - vmin, vmax = self.bounds - incmin, incmax = self.inclusive_bounds - incmin = '[' if incmin else '(' - incmax = ']' if incmax else ')' - return '%s%s, %s%s' % (incmin, vmin, vmax, incmax) - - - -class DateRange(Range): - """ - A datetime or date range specified as (start, end). - - Bounds must be specified as datetime or date types (see param.dt_types). - """ - - def _validate_value(self, val, allow_None): - # Cannot use super()._validate_value as DateRange inherits from - # NumericTuple which check that the tuple values are numbers and - # datetime objects aren't numbers. - if allow_None and val is None: - return - - if not isinstance(val, tuple): - raise ValueError("DateRange parameter %r only takes a tuple value, " - "not %s." % (self.name, type(val).__name__)) - for n in val: - if isinstance(n, dt_types): - continue - raise ValueError("DateRange parameter %r only takes date/datetime " - "values, not type %s." % (self.name, type(n).__name__)) - - start, end = val - if not end >= start: - raise ValueError("DateRange parameter %r's end datetime %s " - "is before start datetime %s." % - (self.name, val[1], val[0])) - - @classmethod - def serialize(cls, value): - if value is None: - return 'null' - # List as JSON has no tuple representation - serialized = [] - for v in value: - if not isinstance(v, (dt.datetime, dt.date)): # i.e np.datetime64 - v = v.astype(dt.datetime) - # Separate date and datetime to deserialize to the right type. - if type(v) == dt.date: - v = v.strftime("%Y-%m-%d") - else: - v = v.strftime("%Y-%m-%dT%H:%M:%S.%f") - serialized.append(v) - return serialized - - def deserialize(cls, value): - if value == 'null': - return None - deserialized = [] - for v in value: - # Date - if len(v) == 10: - v = dt.datetime.strptime(v, "%Y-%m-%d").date() - # Datetime - else: - v = dt.datetime.strptime(v, "%Y-%m-%dT%H:%M:%S.%f") - deserialized.append(v) - # As JSON has no tuple representation - return tuple(deserialized) - - - -class CalendarDateRange(Range): - """ - A date range specified as (start_date, end_date). - """ - def _validate_value(self, val, allow_None): - if allow_None and val is None: - return - - for n in val: - if not isinstance(n, dt.date): - raise ValueError("CalendarDateRange parameter %r only " - "takes date types, not %s." % (self.name, val)) - - start, end = val - if not end >= start: - raise ValueError("CalendarDateRange parameter %r's end date " - "%s is before start date %s." % - (self.name, val[1], val[0])) - - @classmethod - def serialize(cls, value): - if value is None: - return 'null' - # As JSON has no tuple representation - return [v.strftime("%Y-%m-%d") for v in value] - - @classmethod - def deserialize(cls, value): - if value == 'null': - return None - # As JSON has no tuple representation - return tuple([dt.datetime.strptime(v, "%Y-%m-%d").date() for v in value]) - - - - -def get_typed_iterable_bounds(bounds : tuple) -> tuple: - if bounds[0] is None and bounds[1] is None: - bounds = (0, 2*sys.maxsize + 1) - elif bounds[0] is None: - bounds = (0, bounds[1]) - elif bounds[1] is None: - bounds = (bounds[0], 2*sys.maxsize + 1) - return bounds - - -class BaseConstrainedList(collections.abc.MutableSequence): - - # Need to check mul - - def __init__(self, default : typing.List[typing.Any], *, bounds : tuple = (0, None), - constant : bool = False, skip_validate : bool = False) -> None: - super().__init__() - self.constant = constant - self.bounds = get_typed_iterable_bounds(bounds) - if not skip_validate: - self._validate_for_set(default) - self._inner = default - - def _validate_for_set(self, value : typing.Any) -> None: - self._validate_value(value) - self._validate_bounds_for_set(value) - self._validate_items(value) - - def _validate_for_extension(self, value : typing.List) -> None: - if self.constant: - raise ValueError(f"List {get_iterable_printfriendly_repr(self._inner)} is a constant, cannot be modified.") - self._validate_value(value) - self._validate_bounds_for_extension(value) - self._validate_items(value) - - def _validate_for_insertion(self, value : typing.Any) -> None: - if self.constant: - raise ValueError(f"List {get_iterable_printfriendly_repr(self._inner)} is a constant, cannot be modified.") - self._validate_bounds_for_extension() - self._validate_item(value) - - def _validate_value(self, value : typing.Any) -> None: - if not isinstance(value, list): - raise TypeError(f"Given value for a constrained list is not a list, but type {type(value)}") - - def _validate_items(self, value : typing.Any) -> None: - raise NotImplementedError("Please implement _validate_item in the child of BaseConstrainedList.") - - def _validate_item(self, value : typing.Any): - raise NotImplementedError("Please implement _validate_single_item in the child of BaseConstrainedList.") - - def _validate_bounds_for_set(self, value : typing.Any) -> None: - if not (value.__len__() >= self.bounds[0] and value.__len__() <= self.bounds[1]): - raise ValueError(wrap_error_text( - f"""given list {get_iterable_printfriendly_repr(value)} has length out of bounds {self.bounds}. - given length : {value.__len__()}""")) - - def _validate_bounds_for_extension(self, value : typing.Any = [None]) -> None: - if not (self._inner.__len__() + value.__len__() >= self.bounds[0] and - self._inner.__len__() + value.__len__() <= self.bounds[1]): - raise ValueError(wrap_error_text( - f"""given list for extending {get_iterable_printfriendly_repr(value)} extends existing list longer - than bounds {self.bounds}. given length : {self._inner.__len__() + value.__len__()}""")) - - def __len__(self) -> int: - return self._inner.__len__() - - def __iter__(self) -> typing.Any: - return self._inner.__iter__() - - def __str__(self) -> str: - return self._inner.__str__() - - def __contains__(self, item : typing.Any) -> bool: - return item in self._inner - - def __getitem__(self, index : int): - return self._inner[index] - - def __setitem__(self, index : int, value : typing.Any) -> None: - if self.constant: - raise ValueError(f"List {get_iterable_printfriendly_repr(self._inner)} is a constant, cannot be modified.") - self._validate_item(value) - self._inner[index] = value - - def __delitem__(self, index : int) -> None: - del self._inner[index] - - def __repr__(self) -> str: - return self._inner.__repr__() - - def __imul__(self, value : typing.Any) -> typing.List: - return self._inner.__imul__(value) - - def __mul__(self, value : typing.Any) -> typing.List: - return self._inner.__mul__(value) - - def __sizeof__(self) -> int: - return self._inner.__sizeof__() - - def __lt__(self, __x : typing.List[typing.Any]) -> bool: - return self._inner.__lt__(__x) - - def __le__(self, __x : typing.List[typing.Any]) -> bool: - return self._inner.__le__(__x) - - def __eq__(self, __x : typing.List[typing.Any]) -> bool: - return self._inner.__eq__(__x) - - def __ne__(self, __x : typing.List[typing.Any]) -> bool: - return self._inner.__ne__(__x) - - def __gt__(self, __x : typing.List[typing.Any]) -> bool: - return self._inner.__gt__(__x) - - def __ge__(self, __x : typing.List[typing.Any]) -> bool: - return self._inner.__ge__(__x) - - def __rmul__(self, __n : int) -> typing.List: - return self._inner.__rmul__(__n) - - def __reversed__(self) -> typing.Iterator: - return self._inner.__reversed__() - - def __add__(self, __x : typing.List[typing.Any]) -> typing.List: - if isinstance(__x, self.__class__): - return self._inner.__add__(__x._inner) - else: - return self._inner.__add__(__x) - - def __iadd__(self, values : typing.List[typing.Any] ) -> typing.List: - raise NotImplementedError("Please implement __iadd__ in the child of BaseConstrainedList.") - - def insert(self, __index : int, __object : typing.Any) -> None: - self._validate_for_insertion(__object) - self._inner.insert(__index, __object) - - def append(self, __object : typing.Any) -> None: - self._validate_for_insertion(__object) - self._inner.append(__object) - - def extend(self, __iterable) -> None: - self._validate_for_extension(__iterable) - self._inner.extend(__iterable) - - def reverse(self) -> None: - self._inner.reverse() - - def pop(self, __index: int) -> typing.Any: - return self._inner.pop(__index) - - def count(self, __value : typing.Any) -> int: - return self._inner.count(__value) - - def clear(self) -> None: - self._inner.clear() - - def index(self, __value : typing.Any, __start : int, __stop : int) -> int: - return self._inner.index(__value, __start, __stop) - - def remove(self, __value : typing.Any) -> None: - self._inner.remove(__value) - - def sort(self, key : typing.Any, reverse : bool): - self._inner.sort(key=key, reverse=reverse) - - def copy(self, return_as_typed_list : bool = False): - raise NotImplementedError("Please implement copy() in the child of BaseConstrainedList.") - - - -class TypeConstrainedList(BaseConstrainedList): - - def __init__(self, default : typing.List, *, item_type : typing.Any = None, - bounds : tuple = (0,None), constant : bool = False, skip_validate : bool = False) -> None: - self.item_type = item_type - super().__init__(default=default, bounds=bounds, constant=constant, skip_validate=skip_validate) - - def _validate_items(self, value : typing.Any) -> None: - if self.item_type is not None: - for val in value: - if not isinstance(val, self.item_type): - raise TypeError( - wrap_error_text(f""" - Not all elements of list {get_iterable_printfriendly_repr(value)} given are of allowed item type(s), - which are : {self.item_type}. Given type {type(val)}. Cannot set or extend typed list.""" - )) - - def _validate_item(self, value : typing.Any): - if self.item_type is not None and not isinstance(value, self.item_type): - raise TypeError( - wrap_error_text(f""" - Not all elements given are of allowed item type(s), which are : {self.item_type}. - Given type {type(value)}. Cannot append or insert in typed list.""" - )) - - def __iadd__(self, value : typing.List[typing.Any]): - self._validate_for_extension(value) - return TypeConstrainedList(default=self._inner.__iadd__(value), item_type=self.item_type, bounds=self.bounds, - constant=self.constant, skip_validate=True) - - def copy(self, return_as_typed_list : bool = False) -> typing.Union["TypeConstrainedList", typing.List[typing.Any]]: - if return_as_typed_list: - return TypeConstrainedList(default=self._inner.copy(), item_type=self.item_type, bounds=self.bounds, - constant=self.constant, skip_validate=True) - else: - return self._inner.copy() - - - -class TypedList(ClassSelector): - - __slots__ = ['item_type', 'bounds', 'accept_nonlist_object'] - - def __init__(self, default : typing.Optional[typing.List[typing.Any]] = None, *, item_type : typing.Any = None, - deepcopy_default : bool = True, accept_nonlist_object : bool = False, - allow_None : bool = True, bounds : tuple = (0,None), - doc : typing.Optional[str] = None, constant : bool = False, readonly : bool = False, - per_instance_descriptor : bool = False, class_member : bool = False, - fget : typing.Optional[typing.Callable] = None, fset : typing.Optional[typing.Callable] = None, - fdel : typing.Optional[typing.Callable] = None, precedence : typing.Optional[float] = None) -> None: - if default is not None: - default = TypeConstrainedList(default=default, item_type=item_type, bounds=bounds, constant=constant, - skip_validate=False) # type: ignore - super().__init__(class_ = TypeConstrainedList, default=default, isinstance=True, deepcopy_default=deepcopy_default, - doc=doc, constant=constant, readonly=readonly, allow_None=allow_None, - per_instance_descriptor=per_instance_descriptor, class_member=class_member, fget=fget, fset=fset, - fdel=fdel, precedence=precedence) - self.item_type = item_type - self.bounds = bounds - self.accept_nonlist_object = accept_nonlist_object - - # @instance_descriptor - super().__set__ takes care of instance descriptors - def validate_and_adapt(self, value : typing.Any): - if self.allow_None and value is None: - return - if value is not None and self.accept_nonlist_object and not isinstance(value, list): - value = [value] - return TypeConstrainedList(default=value, item_type=self.item_type, bounds=self.bounds, - constant=self.constant, skip_validate=False) - - @classmethod - def serialize(cls, value : TypeConstrainedList) -> typing.Any: - if value is None: - return None - return value._inner - - # no need for deserialize, when __set__ is called TypeConstrainedList is automatically created - - -class TypeConstrainedDict(collections.abc.MutableMapping): - """ A dictionary which contains only ``NewDict`` values. """ - - def __init__(self, default : typing.Dict, *, key_type : typing.Optional[typing.Union[type, typing.Tuple]] = None, - item_type : typing.Optional[typing.Union[type, typing.Tuple]] = None, - bounds : typing.Tuple = (0, None), constant : bool = False, skip_validate : bool = False): - super().__init__() - self.key_type = key_type - self.item_type = item_type - self.bounds = get_typed_iterable_bounds(bounds) - self.constant = constant - if not skip_validate: - self._validate_for_set(default) - self._inner = default - - def _validate_for_set(self, value : typing.Dict) -> None: - self._validate_value(value) - self._validate_bounds_for_set(value) - self._validate_items(value) - - def _validate_for_insertion(self, value : typing.Dict) -> None: - if self.constant: - raise ValueError(f"Dict {get_iterable_printfriendly_repr(self._inner)} is a constant and cannot be modified.") - self._validate_value(value) - self._validate_bounds_for_extension(value) - self._validate_items(value) - - def _validate_value(self, value) -> None: - if not isinstance(value, dict): - raise TypeError(wrap_error_text(f""" - Given value for typed dictionary is not a dictionary. Given type : {type(value)}. Expected dictionary.""")) - - def _validate_bounds_for_set(self, value : typing.Dict) -> None: - if not (self.bounds[0] <= value.__len__() <= self.bounds[1]): - raise ValueError(wrap_error_text(f""" - Given dictionary length outside bounds. Given length {value.__len__()}, expected length : {self.bounds}""")) - - def _validate_bounds_for_extension(self, value : typing.Dict = {"dummy" : "dummy"}) -> None: - if not (self.bounds[0] <= self._inner.__len__() + value.__len__() <= self.bounds[1]): - raise ValueError(wrap_error_text(f""" - Extending dictionary crosses bounds. Existing length {self._inner.__len__()}, - length of items to be added : {value.__len__()}, allowed bounds : {self.bounds}""")) - - def _validate_items(self, value : typing.Dict[typing.Any, typing.Any]) -> None: - keys = value.keys() - values = value.values() - if self.key_type is not None and len(keys) != 0: - for key in keys: - if not isinstance(key, self.key_type): - raise TypeError(wrap_error_text(f""" - Keys for typed dictionary contain incompatible types. - Allowed types : {self.key_type}, given type : {type(key)}""")) - if self.item_type is not None and len(values) != 0: - for value in values: - if not isinstance(value, self.item_type): - raise TypeError(wrap_error_text(f""" - Values for typed dictionary contain incompatible types. - Allowed types : {self.item_type}. given type : {type(value)}""")) - - def _validate_key_value_pair(self, __key : typing.Any, __value : typing.Any) -> None: - if self.key_type is not None: - if not isinstance(__key, self.key_type): - raise TypeError("given key {} is not of {}.".format(__key, self.key_type)) - if self.item_type is not None: - if not isinstance(__value, self.item_type): - raise TypeError("given item {} is not of {}.".format(__value, self.item_type)) - - def __iter__(self) -> typing.Iterator: - return self._inner.__iter__() - - def __setitem__(self, __key : typing.Any, __value : typing.Any) -> None: - if self.constant: - raise ValueError(f"Dict {get_iterable_printfriendly_repr(self._inner)} is a constant and cannot be modified.") - if __key not in self._inner: - self._validate_bounds_for_extension() - self._validate_key_value_pair(__key, __value) - self._inner.__setitem__(__key, __value) - - def __delitem__(self, __v : typing.Any) -> None: - self._inner.__delitem__(__v) - - def __getitem__(self, __k : typing.Any) -> typing.Any: - return self._inner.__getitem__(__k) - - def __str__(self) -> str: - return self._inner.__str__() - - def __len__(self) -> int: - return self._inner.__len__() - - def __contains__(self, __o : object) -> bool: - return self._inner.__contains__(__o) - - def __eq__(self, __o: object) -> bool: - return self._inner.__eq__(__o) - - def __ne__(self, __o: object) -> bool: - return self._inner.__ne__(__o) - - def __format__(self, __format_spec: str) -> str: - return self._inner.__format__(__format_spec) - - def __sizeof__(self) -> int: - return self._inner.__sizeof__() - - def __repr__(self) -> str: - return self._inner.__repr__() - - def fromkeys(self, __iterable, __value : typing.Any): - return self._inner.fromkeys(__iterable, __value) - - def keys(self) -> typing.Any: - return self._inner.keys() - - def items(self) -> typing.Any: - return self._inner.items() - - def values(self) -> typing.Any: - return self._inner.values() - - def get(self, __key : typing.Any, __default : typing.Any = None): - return self._inner.get(__key, __default) - - def setdefault(self, __key : typing.Any) -> None: - self._inner.setdefault(__key) - - def clear(self) -> None: - self._inner.clear() - - def copy(self, return_as_typed : bool = False) -> typing.Union["TypeConstrainedDict", typing.Dict]: - if return_as_typed: - return TypeConstrainedDict(default=self._inner.copy(), key_type=self.key_type, item_type=self.item_type, - bounds=self.bounds, constant=self.constant, skip_validate=True) - else: - return self._inner.copy() - - def popitem(self) -> tuple: - return self._inner.popitem() - - def pop(self, __key : typing.Any) -> typing.Any: - return self._inner.pop(__key) - - def update(self, __o : typing.Any) -> None: - self._validate_for_insertion(__o) - self._inner.update(__o) - - -class TypedKeyMappingsConstrainedDict(TypeConstrainedDict): - - def __init__(self, default: typing.Dict, *, type_mapping : typing.Dict, - allow_unspecified_keys : bool = False, bounds : tuple = (0, None), constant : bool = False, - skip_validate : bool = False) -> None: - self.type_mapping = type_mapping - self.allow_unspecified_keys = allow_unspecified_keys - self.key_list = self.type_mapping.keys() - super().__init__(default, key_type = None, item_type = None, bounds = bounds, constant = constant, - skip_validate=skip_validate) - - def _validate_items(self, value: typing.Dict) -> None: - for key, val in value.items(): - self._validate_key_value_pair(key, val) - - def _validate_key_value_pair(self, __key: typing.Any, __value: typing.Any) -> None: - if __key not in self.key_list: - if self.allow_unspecified_keys: - pass - else: - raise KeyError(f"Keys except {self.key_list} not allowed for typed dictionary. Given key : {__key}.") - elif not isinstance(__value, self.type_mapping[__key]): - raise TypeError(wrap_error_text(f""" - Value for key {__key} not of expected type : {self.type_mapping[__key]}. Given type : {type(__value)}.""")) - - def copy(self, return_as_typed : bool = False) -> typing.Union["TypedKeyMappingsConstrainedDict", typing.Dict]: - if return_as_typed: - return TypedKeyMappingsConstrainedDict(default=self._inner.copy(), type_mapping=self.type_mapping, - bounds=self.bounds, constant=self.constant, skip_validate=True) - else: - return self._inner.copy() - - -class TypedDict(ClassSelector): - - __slots__ = ['key_type', 'item_type', 'bounds'] - - def __init__(self, default : typing.Optional[typing.Dict] = None, *, key_type : typing.Any = None, - item_type : typing.Any = None, deepcopy_default : bool = True, allow_None : bool = True, - bounds : tuple = (0, None), doc : typing.Optional[str] = None, constant : bool = False, readonly : bool = False, - per_instance_descriptor : bool = False, - class_member : bool = False, fget : typing.Optional[typing.Callable] = None, - fset : typing.Optional[typing.Callable] = None, fdel : typing.Optional[typing.Callable] = None, - precedence : typing.Optional[float] = None) -> None: - if default is not None: - default = TypeConstrainedDict(default, key_type=key_type, item_type=item_type, bounds=bounds, - constant=constant, skip_validate=False) # type: ignore - self.key_type = key_type - self.item_type = item_type - self.bounds = bounds - super().__init__(class_=TypeConstrainedDict, default=default, isinstance=True, deepcopy_default=deepcopy_default, - doc=doc, constant=constant, readonly=readonly, allow_None=allow_None, fget=fget, fset=fset, fdel=fdel, - per_instance_descriptor=per_instance_descriptor, class_member=class_member, precedence=precedence) - - def __set__(self, obj, value): - if value is not None: - container = TypeConstrainedDict(default=value, key_type=self.key_type, item_type=self.item_type, - bounds=self.bounds, constant=self.constant, skip_validate=False) - return super().__set__(obj, container) # re-set it to trigger param related activities - else: - return super().__set__(obj, value) # re-set it to trigger param related activities - - @classmethod - def serialize(cls, value: TypeConstrainedDict) -> typing.Any: - if value is None: - return None - return value._inner - - -class TypedKeyMappingsDict(ClassSelector): - - __slots__ = ['type_mapping', 'allow_unspecified_keys', 'bounds'] - - def __init__(self, default : typing.Optional[typing.Dict[typing.Any, typing.Any]] = None, *, - type_mapping : typing.Dict, - allow_unspecified_keys : bool = True, bounds : tuple = (0, None), - deepcopy_default : bool = True, allow_None : bool = True, - doc : typing.Optional[str] = None, constant : bool = False, readonly : bool = False, - per_instance_descriptor : bool = False, class_member : bool = False, - fget : typing.Optional[typing.Callable] = None, fset : typing.Optional[typing.Callable] = None, - fdel : typing.Optional[typing.Callable] = None, precedence : typing.Optional[float] = None) -> None: - if default is not None: - default = TypedKeyMappingsConstrainedDict(default=default, type_mapping=type_mapping, - allow_unspecified_keys=allow_unspecified_keys, bounds=bounds, constant=constant, - skip_validate=False) # type: ignore - self.type_mapping = type_mapping - self.allow_unspecified_keys = allow_unspecified_keys - self.bounds = bounds - super().__init__(class_=TypedKeyMappingsConstrainedDict, default=default, - isinstance=True, deepcopy_default=deepcopy_default, doc=doc, constant=constant, readonly=readonly, - allow_None=allow_None, per_instance_descriptor=per_instance_descriptor, class_member=class_member, - fget=fget, fset=fset, fdel=fdel, precedence=precedence) - - def __set__(self, obj, value): - if value is not None: - container = TypedKeyMappingsConstrainedDict(default=value, type_mapping=self.type_mapping, - allow_unspecified_keys=self.allow_unspecified_keys, bounds=self.bounds, constant=self.constant, - skip_validate=False) - return super().__set__(obj, container) - else: - return super().__set__(obj, value) # re-set it to trigger param related activities - - @classmethod - def serialize(cls, value: TypeConstrainedDict) -> typing.Any: - if value is None: - return None - return value._inner - - -def hashable(x): - """ - Return a hashable version of the given object x, with lists and - dictionaries converted to tuples. Allows mutable objects to be - used as a lookup key in cases where the object has not actually - been mutated. Lookup will fail (appropriately) in cases where some - part of the object has changed. Does not (currently) recursively - replace mutable subobjects. - """ - if isinstance(x, collections.abc.MutableSequence): - return tuple(x) - elif isinstance(x, collections.abc.MutableMapping): - return tuple([(k,v) for k,v in x.items()]) - else: - return x - - -def named_objs(objlist, namesdict=None): - """ - Given a list of objects, returns a dictionary mapping from - string name for the object to the object itself. Accepts - an optional name,obj dictionary, which will override any other - name if that item is present in the dictionary. - """ - objs = OrderedDict() - - objtoname = {} - unhashables = [] - if namesdict is not None: - for k, v in namesdict.items(): - try: - objtoname[hashable(v)] = k - except TypeError: - unhashables.append((k, v)) - - for obj in objlist: - if objtoname and hashable(obj) in objtoname: - k = objtoname[hashable(obj)] - elif any(obj is v for (_, v) in unhashables): - k = [k for (k, v) in unhashables if v is obj][0] - elif hasattr(obj, "name"): - k = obj.name - elif hasattr(obj, '__name__'): - k = obj.__name__ - else: - k = str(obj) - objs[k] = obj - return objs - - -def _is_abstract(class_): - try: - return class_.__abstract - except AttributeError: - return False - - -# Could be a method of ClassSelector. -def concrete_descendents(parentclass): - """ - Return a dictionary containing all subclasses of the specified - parentclass, including the parentclass. Only classes that are - defined in scripts that have been run or modules that have been - imported are included, so the caller will usually first do ``from - package import *``. - - Only non-abstract classes will be included. - """ - return dict((c.__name__, c) for c in descendents(parentclass) - if not _is_abstract(c)) - - - -__all__ = [ - 'String', 'Bytes', 'IPAddress', 'Number', 'Integer', 'Boolean', 'Iterable', 'Tuple', 'List', 'Callable', - 'CSS3Color', 'Composite', 'Selector', 'ClassSelector', 'TupleSelector', - 'Filename', 'Foldername', 'Path', 'FileSelector', 'MultiFileSelector', - 'Date', 'CalendarDate', 'Range', 'DateRange', 'CalendarDateRange', - 'TypeConstrainedList', 'TypeConstrainedDict', 'TypedKeyMappingsConstrainedDict', - 'TypedList', 'TypedDict', 'TypedKeyMappingsDict', - ] diff --git a/hololinked/param/serializer.py b/hololinked/param/serializer.py deleted file mode 100644 index c2409b6..0000000 --- a/hololinked/param/serializer.py +++ /dev/null @@ -1,343 +0,0 @@ -""" -Classes used to support string serialization of Parameters and -Parameterized objects. -""" - -import json -import textwrap - -class UnserializableException(Exception): - pass - -class UnsafeserializableException(Exception): - pass - -def JSONNullable(json_type): - "Express a JSON schema type as nullable to easily support Parameters that allow_None" - return {'anyOf': [ json_type, {'type': 'null'}] } - - - -class Serialization(object): - """ - Base class used to implement different types of serialization. - """ - - @classmethod - def schema(cls, pobj, subset=None): - raise NotImplementedError # noqa: unimplemented method - - @classmethod - def serialize_parameters(cls, pobj, subset=None): - """ - Serialize the parameters on a Parameterized object into a - single serialized object, e.g. a JSON string. - """ - raise NotImplementedError # noqa: unimplemented method - - @classmethod - def deserialize_parameters(cls, pobj, serialized, subset=None): - """ - Deserialize a serialized object representing one or - more Parameters into a dictionary of parameter values. - """ - raise NotImplementedError # noqa: unimplemented method - - @classmethod - def serialize_parameter_value(cls, pobj, pname): - """ - Serialize a single parameter value. - """ - raise NotImplementedError # noqa: unimplemented method - - @classmethod - def deserialize_parameter_value(cls, pobj, pname, value): - """ - Deserialize a single parameter value. - """ - raise NotImplementedError # noqa: unimplemented method - - -class JSONSerialization(Serialization): - """ - Class responsible for specifying JSON serialization, deserialization - and JSON schemas for Parameters and Parameterized classes and - objects. - """ - - unserializable_parameter_types = ['Callable'] - - json_schema_literal_types = { - int:'integer', float:'number', str:'string', - type(None): 'null' - } - - @classmethod - def loads(cls, serialized): - return json.loads(serialized) - - @classmethod - def dumps(cls, obj): - return json.dumps(obj) - - @classmethod - def schema(cls, pobj, safe=False, subset=None): - schema = {} - for name, p in pobj.param.objects('existing').items(): - if subset is not None and name not in subset: - continue - schema[name] = p.schema(safe=safe) - if p.doc: - schema[name]['description'] = textwrap.dedent(p.doc).replace('\n', ' ').strip() - if p.label: - schema[name]['title'] = p.label - return schema - - @classmethod - def serialize_parameters(cls, pobj, subset=None): - # components = {} - # for name, p in pobj.param.objects('existing').items(): - # if subset is not None and name not in subset: - # continue - # value = pobj.param.get_value_generator(name) - # components[name] = p.serialize(value) - # return cls.dumps(components) - JSON = {} - pobjtype = type(pobj) - for key, param in pobj.parameters.objects().items(): - if subset is not None and key not in subset: - pass - else: - value = param.__get__(pobj, pobjtype) - value = param.serialize(value) - JSON[key] = value - return JSON - - @classmethod - def deserialize_parameters(cls, pobj, serialization, subset=None): - deserialized = cls.loads(serialization) - components = {} - for name, value in deserialized.items(): - if subset is not None and name not in subset: - continue - deserialized = pobj.param[name].deserialize(value) - components[name] = deserialized - return components - - # Parameter level methods - - @classmethod - def _get_method(cls, ptype, suffix): - "Returns specialized method if available, otherwise None" - method_name = ptype.lower()+ '_' + suffix - return getattr(cls, method_name, None) - - @classmethod - def param_schema(cls, ptype, p, safe=False, subset=None): - if ptype in cls.unserializable_parameter_types: - raise UnserializableException - dispatch_method = cls._get_method(ptype, 'schema') - if dispatch_method: - schema = dispatch_method(p, safe=safe) - else: - schema = {'type': ptype.lower()} - return JSONNullable(schema) if p.allow_None else schema - - @classmethod - def serialize_parameter_value(cls, pobj, pname): - value = pobj.param.get_value_generator(pname) - return cls.dumps(pobj.param[pname].serialize(value)) - - @classmethod - def deserialize_parameter_value(cls, pobj, pname, value): - value = cls.loads(value) - return pobj.param[pname].deserialize(value) - - # Custom Schemas - - @classmethod - def class__schema(cls, class_, safe=False): - from .parameterized import Parameterized - if isinstance(class_, tuple): - return {'anyOf': [cls.class__schema(cls_) for cls_ in class_]} - elif class_ in cls.json_schema_literal_types: - return {'type': cls.json_schema_literal_types[class_]} - elif issubclass(class_, Parameterized): - return {'type': 'object', 'properties': class_.param.schema(safe)} - else: - return {'type': 'object'} - - @classmethod - def array_schema(cls, p, safe=False): - if safe is True: - msg = ('Array is not guaranteed to be safe for ' - 'serialization as the dtype is unknown') - raise UnsafeserializableException(msg) - return {'type': 'array'} - - @classmethod - def classselector_schema(cls, p, safe=False): - return cls.class__schema(p.class_, safe=safe) - - @classmethod - def dict_schema(cls, p, safe=False): - if safe is True: - msg = ('Dict is not guaranteed to be safe for ' - 'serialization as the key and value types are unknown') - raise UnsafeserializableException(msg) - return {'type': 'object'} - - @classmethod - def date_schema(cls, p, safe=False): - return {'type': 'string', 'format': 'date-time'} - - @classmethod - def calendardate_schema(cls, p, safe=False): - return {'type': 'string', 'format': 'date'} - - @classmethod - def tuple_schema(cls, p, safe=False): - schema = {'type': 'array'} - if p.length is not None: - schema['minItems'] = p.length - schema['maxItems'] = p.length - return schema - - @classmethod - def number_schema(cls, p, safe=False): - schema = {'type': p.__class__.__name__.lower() } - return cls.declare_numeric_bounds(schema, p.bounds, p.inclusive_bounds) - - @classmethod - def declare_numeric_bounds(cls, schema, bounds, inclusive_bounds): - "Given an applicable numeric schema, augment with bounds information" - if bounds is not None: - (low, high) = bounds - if low is not None: - key = 'minimum' if inclusive_bounds[0] else 'exclusiveMinimum' - schema[key] = low - if high is not None: - key = 'maximum' if inclusive_bounds[1] else 'exclusiveMaximum' - schema[key] = high - return schema - - @classmethod - def integer_schema(cls, p, safe=False): - return cls.number_schema(p) - - @classmethod - def numerictuple_schema(cls, p, safe=False): - schema = cls.tuple_schema(p, safe=safe) - schema['additionalItems'] = {'type': 'number'} - return schema - - @classmethod - def xycoordinates_schema(cls, p, safe=False): - return cls.numerictuple_schema(p, safe=safe) - - @classmethod - def range_schema(cls, p, safe=False): - schema = cls.tuple_schema(p, safe=safe) - bounded_number = cls.declare_numeric_bounds( - {'type': 'number'}, p.bounds, p.inclusive_bounds) - schema['additionalItems'] = bounded_number - return schema - - @classmethod - def list_schema(cls, p, safe=False): - schema = {'type': 'array'} - if safe is True and p.item_type is None: - msg = ('List without a class specified cannot be guaranteed ' - 'to be safe for serialization') - raise UnsafeserializableException(msg) - if p.class_ is not None: - schema['items'] = cls.class__schema(p.item_type, safe=safe) - return schema - - @classmethod - def objectselector_schema(cls, p, safe=False): - try: - allowed_types = [{'type': cls.json_schema_literal_types[type(obj)]} - for obj in p.objects] - schema = {'anyOf': allowed_types} - schema['enum'] = p.objects - return schema - except: - if safe is True: - msg = ('ObjectSelector cannot be guaranteed to be safe for ' - 'serialization due to unserializable type in objects') - raise UnsafeserializableException(msg) - return {} - - @classmethod - def selector_schema(cls, p, safe=False): - try: - allowed_types = [{'type': cls.json_schema_literal_types[type(obj)]} - for obj in p.objects.values()] - schema = {'anyOf': allowed_types} - schema['enum'] = p.objects - return schema - except: - if safe is True: - msg = ('Selector cannot be guaranteed to be safe for ' - 'serialization due to unserializable type in objects') - raise UnsafeserializableException(msg) - return {} - - @classmethod - def listselector_schema(cls, p, safe=False): - if p.objects is None: - if safe is True: - msg = ('ListSelector cannot be guaranteed to be safe for ' - 'serialization as allowed objects unspecified') - return {'type': 'array'} - for obj in p.objects: - if type(obj) not in cls.json_schema_literal_types: - msg = 'ListSelector cannot serialize type %s' % type(obj) - raise UnserializableException(msg) - return {'type': 'array', 'items': {'enum': p.objects}} - - @classmethod - def dataframe_schema(cls, p, safe=False): - schema = {'type': 'array'} - if safe is True: - msg = ('DataFrame is not guaranteed to be safe for ' - 'serialization as the column dtypes are unknown') - raise UnsafeserializableException(msg) - if p.columns is None: - schema['items'] = {'type': 'object'} - return schema - - mincols, maxcols = None, None - if isinstance(p.columns, int): - mincols, maxcols = p.columns, p.columns - elif isinstance(p.columns, tuple): - mincols, maxcols = p.columns - - if isinstance(p.columns, int) or isinstance(p.columns, tuple): - schema['items'] = {'type': 'object', 'minItems': mincols, - 'maxItems': maxcols} - - if isinstance(p.columns, list) or isinstance(p.columns, set): - literal_types = [{'type':el} for el in cls.json_schema_literal_types.values()] - allowable_types = {'anyOf': literal_types} - properties = {name: allowable_types for name in p.columns} - schema['items'] = {'type': 'object', 'properties': properties} - - minrows, maxrows = None, None - if isinstance(p.rows, int): - minrows, maxrows = p.rows, p.rows - elif isinstance(p.rows, tuple): - minrows, maxrows = p.rows - - if minrows is not None: - schema['minItems'] = minrows - if maxrows is not None: - schema['maxItems'] = maxrows - - return schema - - -serializers = dict( - json = JSONSerialization -) \ No newline at end of file diff --git a/hololinked/param/utils.py b/hololinked/param/utils.py deleted file mode 100644 index c4888f2..0000000 --- a/hololinked/param/utils.py +++ /dev/null @@ -1,89 +0,0 @@ -from collections import OrderedDict -import sys -import inspect -import typing -from functools import reduce, partial - - -def classlist(class_ : typing.Any) -> typing.Tuple[type]: - """ - Return a list of the class hierarchy above (and including) the given class. - - Same as `inspect.getmro(class_)[::-1]` - """ - return inspect.getmro(class_)[::-1] - - -def get_dot_resolved_attr(obj : typing.Any, attr : str, *args): - def _getattr(obj, attr): - return getattr(obj, attr, *args) - return reduce(_getattr, [obj] + attr.split('.')) - - -def iscoroutinefunction(function : typing.Callable) -> bool: - """ - Whether the function is an asynchronous coroutine function. - """ - import asyncio - try: - return ( - inspect.isasyncgenfunction(function) or - asyncio.iscoroutinefunction(function) - ) - except AttributeError: - return False - - -def get_method_owner(method : typing.Callable) -> typing.Any: - """ - Gets the instance that owns the supplied method - """ - if not inspect.ismethod(method): - return None - if isinstance(method, partial): - method = method.func - return method.__self__ if sys.version_info.major >= 3 else method.im_self - - -def is_ordered_dict(d): - """ - Predicate checking for ordered dictionaries. OrderedDict is always - ordered, and vanilla Python dictionaries are ordered for Python 3.6+ - """ - py3_ordered_dicts = (sys.version_info.major == 3) and (sys.version_info.minor >= 6) - vanilla_odicts = (sys.version_info.major > 3) or py3_ordered_dicts - return isinstance(d, OrderedDict)or (vanilla_odicts and isinstance(d, dict)) - - -def get_all_slots(class_): - """ - Return a list of slot names for slots defined in `class_` and its - superclasses. - """ - # A subclass's __slots__ attribute does not contain slots defined - # in its superclass (the superclass' __slots__ end up as - # attributes of the subclass). - all_slots = [] - parent_param_classes = [c for c in classlist(class_)[1::]] - for c in parent_param_classes: - if hasattr(c,'__slots__'): - all_slots+=c.__slots__ - return all_slots - - -def get_occupied_slots(instance): - """ - Return a list of slots for which values have been set. - - (While a slot might be defined, if a value for that slot hasn't - been set, then it's an AttributeError to request the slot's - value.) - """ - return [slot for slot in get_all_slots(type(instance)) - if hasattr(instance, slot)] - - - - -__all__ = ['classlist', 'get_dot_resolved_attr', 'iscoroutinefunction', 'get_method_owner', 'get_all_slots', - 'get_occupied_slots'] \ No newline at end of file diff --git a/hololinked/param/version.py b/hololinked/param/version.py deleted file mode 100644 index 0534ae0..0000000 --- a/hololinked/param/version.py +++ /dev/null @@ -1,771 +0,0 @@ -""" -Provide consistent and up-to-date ``__version__`` strings for -Python packages. - -See https://github.com/holoviz/autover for more information. -""" - -# The Version class is a copy of autover.version.Version v0.2.5, -# except as noted below. -# -# The current version of autover supports a workflow based on tagging -# a git repository, and reports PEP440 compliant version information. -# Previously, the workflow required editing of version numbers in -# source code, and the version was not necessarily PEP440 compliant. -# Version.__new__ is added here to provide the previous Version class -# (OldDeprecatedVersion) if Version is called in the old way. - - -__author__ = 'Jean-Luc Stevens' - -import os, subprocess, json - -def run_cmd(args, cwd=None): - proc = subprocess.Popen(args, stdout=subprocess.PIPE, - stderr=subprocess.PIPE, - cwd=cwd) - output, error = (str(s.decode()).strip() for s in proc.communicate()) - - # Detects errors as _either_ a non-zero return code _or_ messages - # printed to stderr, because the return code is erroneously fixed at - # zero in some cases (see https://github.com/holoviz/param/pull/389). - if proc.returncode != 0 or len(error) > 0: - raise Exception(proc.returncode, error) - return output - - - -class Version(object): - """ - A simple approach to Python package versioning that supports PyPI - releases and additional information when working with version - control. When obtaining a package from PyPI, the version returned - is a string-formatted rendering of the supplied release tuple. - For instance, release (1,0) tagged as ``v1.0`` in the version - control system will return ``1.0`` for ``str(__version__)``. Any - number of items can be supplied in the release tuple, with either - two or three numeric versioning levels typical. - - During development, a command like ``git describe`` will be used to - compute the number of commits since the last version tag, the short - commit hash, and whether the commit is dirty (has changes not yet - committed). Version tags must start with a lowercase 'v' and have a - period in them, e.g. v2.0, v0.9.8 or v0.1 and may include the PEP440 - prerelease identifiers of 'a' (alpha) 'b' (beta) or 'rc' (release - candidate) allowing tags such as v2.0.a3, v0.9.8.b3 or v0.1.rc5. - - Also note that when version control system (VCS) information is - used, the number of commits since the last version tag is - determined. This approach is often useful in practice to decide - which version is newer for a single developer, but will not - necessarily be reliable when comparing against a different fork or - branch in a distributed VCS. - - For git, if you want version control information available even in - an exported archive (e.g. a .zip file from GitHub), you can set - the following line in the .gitattributes file of your project:: - - __init__.py export-subst - - Note that to support pip installation directly from GitHub via git - archive, a .version file must be tracked by the repo to supply the - release number (otherwise only the short SHA is available). - - The PEP440 format returned is [N!]N(.N)*[{a|b|rc}N][.postN+SHA] - where everything before .postN is obtained from the tag, the N in - .postN is the number of commits since the last tag and the SHA is - obtained via git describe. This later portion is only shown if the - commit count since the last tag is non zero. Instead of '.post', an - alternate valid prefix such as '.rev', '_rev', '_r' or '.r' may be - supplied.""" - - def __new__(cls,**kw): - # If called in the old way, provide the previous class. Means - # PEP440/tag based workflow warning below will never appear. - if ('release' in kw and kw['release'] is not None) or \ - ('dev' in kw and kw['dev'] is not None) or \ - ('commit_count' in kw): - return OldDeprecatedVersion(**kw) - else: - return super(Version, cls).__new__(cls) - - - def __init__(self, release=None, fpath=None, commit=None, reponame=None, - commit_count_prefix='.post', archive_commit=None, **kwargs): - """ - :release: Release tuple (corresponding to the current VCS tag) - :commit Short SHA. Set to '$Format:%h$' for git archive support. - :fpath: Set to ``__file__`` to access version control information - :reponame: Used to verify VCS repository name. - """ - self.fpath = fpath - self._expected_commit = commit - - if release is not None or 'commit_count' in kwargs: - print('WARNING: param.Version now supports PEP440 and a new tag based workflow. See param/version.py for more details') - - self.expected_release = release - - self._commit = None if (commit is None or commit.startswith("$Format")) else commit - self._commit_count = None - self._release = None - self._dirty = False - self._prerelease = None - - self.archive_commit= archive_commit - - self.reponame = reponame - self.commit_count_prefix = commit_count_prefix - - @property - def prerelease(self): - """ - Either None or one of 'aN' (alpha), 'bN' (beta) or 'rcN' - (release candidate) where N is an integer. - """ - return self.fetch()._prerelease - - @property - def release(self): - "Return the release tuple" - return self.fetch()._release - - @property - def commit(self): - "A specification for this particular VCS version, e.g. a short git SHA" - return self.fetch()._commit - - @property - def commit_count(self): - "Return the number of commits since the last release" - return self.fetch()._commit_count - - @property - def dirty(self): - "True if there are uncommited changes, False otherwise" - return self.fetch()._dirty - - - def fetch(self): - """ - Returns a tuple of the major version together with the - appropriate SHA and dirty bit (for development version only). - """ - if self._release is not None: - return self - - self._release = self.expected_release - if not self.fpath: - self._commit = self._expected_commit - return self - - # Only git right now but easily extended to SVN, Mercurial, etc. - for cmd in ['git', 'git.cmd', 'git.exe']: - try: - self.git_fetch(cmd) - break - except EnvironmentError: - pass - return self - - - def git_fetch(self, cmd='git', as_string=False): - commit_argument = self._commit - output = None - try: - if self.reponame is not None: - # Verify this is the correct repository (since fpath could - # be an unrelated git repository, and autover could just have - # been copied/installed into it). - remotes = run_cmd([cmd, 'remote', '-v'], - cwd=os.path.dirname(self.fpath)) - repo_matches = ['/' + self.reponame + '.git' , - # A remote 'server:reponame.git' can also be referred - # to (i.e. cloned) as `server:reponame`. - '/' + self.reponame + ' '] - if not any(m in remotes for m in repo_matches): - try: - output = self._output_from_file() - if output is not None: - self._update_from_vcs(output) - except: pass - if output is None: - # glob pattern (not regexp) matching vX.Y.Z* tags - output = run_cmd([cmd, 'describe', '--long', '--match', - "v[0-9]*.[0-9]*.[0-9]*", '--dirty'], - cwd=os.path.dirname(self.fpath)) - if as_string: return output - except Exception as e1: - try: - output = self._output_from_file() - if output is not None: - self._update_from_vcs(output) - if self._known_stale(): - self._commit_count = None - if as_string: return output - - # If an explicit commit was supplied (e.g from git - # archive), it should take precedence over the file. - if commit_argument: - self._commit = commit_argument - return - - except IOError: - if e1.args[1] == 'fatal: No names found, cannot describe anything.': - raise Exception("Cannot find any git version tags of format v*.*") - # If there is any other error, return (release value still useful) - return self - - self._update_from_vcs(output) - - - def _known_stale(self): - """ - The commit is known to be from a file (and therefore stale) if a - SHA is supplied by git archive and doesn't match the parsed commit. - """ - if self._output_from_file() is None: - commit = None - else: - commit = self.commit - - known_stale = (self.archive_commit is not None - and not self.archive_commit.startswith('$Format') - and self.archive_commit != commit) - if known_stale: self._commit_count = None - return known_stale - - def _output_from_file(self, entry='git_describe'): - """ - Read the version from a .version file that may exist alongside __init__.py. - - This file can be generated by piping the following output to file: - - git describe --long --match v*.* - """ - try: - vfile = os.path.join(os.path.dirname(self.fpath), '.version') - with open(vfile, 'r') as f: - return json.loads(f.read()).get(entry, None) - except: # File may be missing if using pip + git archive - return None - - - def _update_from_vcs(self, output): - "Update state based on the VCS state e.g the output of git describe" - split = output[1:].split('-') - dot_split = split[0].split('.') - for prefix in ['a','b','rc']: - if prefix in dot_split[-1]: - prefix_split = dot_split[-1].split(prefix) - self._prerelease = prefix + prefix_split[-1] - dot_split[-1] = prefix_split[0] - - - self._release = tuple(int(el) for el in dot_split) - self._commit_count = int(split[1]) - - self._commit = str(split[2][1:]) # Strip out 'g' prefix ('g'=>'git') - - self._dirty = (split[-1]=='dirty') - return self - - def __str__(self): - """ - Version in x.y.z string format. Does not include the "v" - prefix of the VCS version tags, for pip compatibility. - - If the commit count is non-zero or the repository is dirty, - the string representation is equivalent to the output of:: - - git describe --long --match v*.* --dirty - - (with "v" prefix removed). - """ - known_stale = self._known_stale() - if self.release is None and not known_stale: - extracted_directory_tag = self._output_from_file(entry='extracted_directory_tag') - return 'None' if extracted_directory_tag is None else extracted_directory_tag - elif self.release is None and known_stale: - extracted_directory_tag = self._output_from_file(entry='extracted_directory_tag') - if extracted_directory_tag is not None: - return extracted_directory_tag - return '0.0.0+g{SHA}-gitarchive'.format(SHA=self.archive_commit) - - release = '.'.join(str(el) for el in self.release) - prerelease = '' if self.prerelease is None else self.prerelease - - if self.commit_count == 0 and not self.dirty: - return release + prerelease - - commit = self.commit - dirty = '-dirty' if self.dirty else '' - archive_commit = '' - if known_stale: - archive_commit = '-gitarchive' - commit = self.archive_commit - - if archive_commit != '': - postcount = self.commit_count_prefix + '0' - elif self.commit_count not in [0, None]: - postcount = self.commit_count_prefix + str(self.commit_count) - else: - postcount = '' - - components = [release, prerelease, postcount, - '' if commit is None else '+g' + commit, dirty, - archive_commit] - return ''.join(components) - - def __repr__(self): - return str(self) - - def abbrev(self): - """ - Abbreviated string representation of just the release number. - """ - return '.'.join(str(el) for el in self.release) - - def verify(self, string_version=None): - """ - Check that the version information is consistent with the VCS - before doing a release. If supplied with a string version, - this is also checked against the current version. Should be - called from setup.py with the declared package version before - releasing to PyPI. - """ - if string_version and string_version != str(self): - raise Exception("Supplied string version does not match current version.") - - if self.dirty: - raise Exception("Current working directory is dirty.") - - if self.expected_release is not None and self.release != self.expected_release: - raise Exception("Declared release does not match current release tag.") - - if self.commit_count !=0: - raise Exception("Please update the VCS version tag before release.") - - if (self._expected_commit is not None - and not self._expected_commit.startswith( "$Format")): - raise Exception("Declared release does not match the VCS version tag") - - - - @classmethod - def get_setup_version(cls, setup_path, reponame, describe=False, - dirty='report', pkgname=None, archive_commit=None): - """ - Helper for use in setup.py to get the version from the .version file (if available) - or more up-to-date information from git describe (if available). - - Assumes the __init__.py will be found in the directory - {reponame}/__init__.py relative to setup.py unless pkgname is - explicitly specified in which case that name is used instead. - - If describe is True, the raw string obtained from git described is - returned which is useful for updating the .version file. - - The dirty policy can be one of 'report', 'strip', 'raise'. If it is - 'report' the version string may end in '-dirty' if the repository is - in a dirty state. If the policy is 'strip', the '-dirty' suffix - will be stripped out if present. If the policy is 'raise', an - exception is raised if the repository is in a dirty state. This can - be useful if you want to make sure packages are not built from a - dirty repository state. - """ - pkgname = reponame if pkgname is None else pkgname - policies = ['raise','report', 'strip'] - if dirty not in policies: - raise AssertionError("get_setup_version dirty policy must be in %r" % policies) - - fpath = os.path.join(setup_path, pkgname, "__init__.py") - version = Version(fpath=fpath, reponame=reponame, archive_commit=archive_commit) - if describe: - vstring = version.git_fetch(as_string=True) - else: - vstring = str(version) - - if version.dirty and dirty == 'raise': - raise AssertionError('Repository is in a dirty state.') - elif version.dirty and dirty=='strip': - return vstring.replace('-dirty', '') - else: - return vstring - - - @classmethod - def extract_directory_tag(cls, setup_path, reponame): - setup_dir = os.path.split(setup_path)[-1] # Directory containing setup.py - prefix = reponame + '-' # Prefix to match - if setup_dir.startswith(prefix): - tag = setup_dir[len(prefix):] - # Assuming the tag is a version if it isn't empty, 'master' and has a dot in it - if tag not in ['', 'master'] and ('.' in tag): - return tag - return None - - - @classmethod - def setup_version(cls, setup_path, reponame, archive_commit=None, - pkgname=None, dirty='report'): - info = {} - git_describe = None - pkgname = reponame if pkgname is None else pkgname - try: - # Will only work if in a git repo and git is available - git_describe = Version.get_setup_version(setup_path, - reponame, - describe=True, - dirty=dirty, - pkgname=pkgname, - archive_commit=archive_commit) - - if git_describe is not None: - info['git_describe'] = git_describe - except: pass - - if git_describe is None: - extracted_directory_tag = Version.extract_directory_tag(setup_path, reponame) - if extracted_directory_tag is not None: - info['extracted_directory_tag'] = extracted_directory_tag - try: - with open(os.path.join(setup_path, pkgname, '.version'), 'w') as f: - f.write(json.dumps({'extracted_directory_tag':extracted_directory_tag})) - except: - print('Error in setup_version: could not write .version file.') - - - info['version_string'] = Version.get_setup_version(setup_path, - reponame, - describe=False, - dirty=dirty, - pkgname=pkgname, - archive_commit=archive_commit) - try: - with open(os.path.join(setup_path, pkgname, '.version'), 'w') as f: - f.write(json.dumps(info)) - except: - print('Error in setup_version: could not write .version file.') - - return info['version_string'] - - - -def get_setup_version(location, reponame, pkgname=None, archive_commit=None): - """Helper for use in setup.py to get the current version from either - git describe or the .version file (if available). - - Set pkgname to the package name if it is different from the - repository name. - - To ensure git information is included in a git archive, add - setup.py to .gitattributes (in addition to __init__): - ``` - __init__.py export-subst - setup.py export-subst - ``` - Then supply "$Format:%h$" for archive_commit. - - """ - import warnings - pkgname = reponame if pkgname is None else pkgname - if archive_commit is None: - warnings.warn("No archive commit available; git archives will not contain version information") - return Version.setup_version(os.path.dirname(os.path.abspath(location)),reponame,pkgname=pkgname,archive_commit=archive_commit) - - -def get_setupcfg_version(): - """As get_setup_version(), but configure via setup.cfg. - - If your project uses setup.cfg to configure setuptools, and hence has - at least a "name" key in the [metadata] section, you can - set the version as follows: - ``` - [metadata] - name = mypackage - version = attr: autover.version.get_setup_version2 - ``` - - If the repository name is different from the package name, specify - `reponame` as a [tool:autover] option: - ``` - [tool:autover] - reponame = mypackage - ``` - - To ensure git information is included in a git archive, add - setup.cfg to .gitattributes (in addition to __init__): - ``` - __init__.py export-subst - setup.cfg export-subst - ``` - - Then add the following to setup.cfg: - ``` - [tool:autover.configparser_workaround.archive_commit=$Format:%h$] - ``` - - The above being a section heading rather than just a key is - because setuptools requires % to be escaped with %, or it can't - parse setup.cfg...but then git export-subst would not work. - - """ - try: - import configparser - except ImportError: - import ConfigParser as configparser # python2 (also prevents dict-like access) - import re - cfg = "setup.cfg" - autover_section = 'tool:autover' - config = configparser.ConfigParser() - config.read(cfg) - pkgname = config.get('metadata','name') - reponame = config.get(autover_section,'reponame',vars={'reponame':pkgname}) if autover_section in config.sections() else pkgname - - ### - # hack archive_commit into section heading; see docstring - archive_commit = None - archive_commit_key = autover_section+'.configparser_workaround.archive_commit' - for section in config.sections(): - if section.startswith(archive_commit_key): - archive_commit = re.match(r".*=\s*(\S*)\s*",section).group(1) - ### - return get_setup_version(cfg,reponame=reponame,pkgname=pkgname,archive_commit=archive_commit) - - -# from param/version.py aa087db29976d9b7e0f59c29789dfd721c85afd0 -class OldDeprecatedVersion(object): - """ - A simple approach to Python package versioning that supports PyPI - releases and additional information when working with version - control. When obtaining a package from PyPI, the version returned - is a string-formatted rendering of the supplied release tuple. - For instance, release (1,0) tagged as ``v1.0`` in the version - control system will return ``1.0`` for ``str(__version__)``. Any - number of items can be supplied in the release tuple, with either - two or three numeric versioning levels typical. - - During development, a command like ``git describe`` will be used to - compute the number of commits since the last version tag, the - short commit hash, and whether the commit is dirty (has changes - not yet committed). Version tags must start with a lowercase 'v' - and have a period in them, e.g. v2.0, v0.9.8 or v0.1. - - Development versions are supported by setting the dev argument to an - appropriate dev version number. The corresponding tag can be PEP440 - compliant (using .devX) of the form v0.1.dev3, v1.9.0.dev2 etc but - it doesn't have to be as the dot may be omitted i.e v0.1dev3, - v1.9.0dev2 etc. - - Also note that when version control system (VCS) information is - used, the comparison operators take into account the number of - commits since the last version tag. This approach is often useful - in practice to decide which version is newer for a single - developer, but will not necessarily be reliable when comparing - against a different fork or branch in a distributed VCS. - - For git, if you want version control information available even in - an exported archive (e.g. a .zip file from GitHub), you can set - the following line in the .gitattributes file of your project:: - - __init__.py export-subst - """ - - def __init__(self, release=None, fpath=None, commit=None, - reponame=None, dev=None, commit_count=0): - """ - :release: Release tuple (corresponding to the current VCS tag) - :commit Short SHA. Set to '$Format:%h$' for git archive support. - :fpath: Set to ``__file__`` to access version control information - :reponame: Used to verify VCS repository name. - :dev: Development version number. None if not a development version. - :commit_count Commits since last release. Set for dev releases. - """ - self.fpath = fpath - self._expected_commit = commit - self.expected_release = release - - self._commit = None if commit in [None, "$Format:%h$"] else commit - self._commit_count = commit_count - self._release = None - self._dirty = False - self.reponame = reponame - self.dev = dev - - @property - def release(self): - "Return the release tuple" - return self.fetch()._release - - @property - def commit(self): - "A specification for this particular VCS version, e.g. a short git SHA" - return self.fetch()._commit - - @property - def commit_count(self): - "Return the number of commits since the last release" - return self.fetch()._commit_count - - @property - def dirty(self): - "True if there are uncommited changes, False otherwise" - return self.fetch()._dirty - - - def fetch(self): - """ - Returns a tuple of the major version together with the - appropriate SHA and dirty bit (for development version only). - """ - if self._release is not None: - return self - - self._release = self.expected_release - if not self.fpath: - self._commit = self._expected_commit - return self - - # Only git right now but easily extended to SVN, Mercurial, etc. - for cmd in ['git', 'git.cmd', 'git.exe']: - try: - self.git_fetch(cmd) - break - except EnvironmentError: - pass - return self - - - def git_fetch(self, cmd='git'): - try: - if self.reponame is not None: - # Verify this is the correct repository (since fpath could - # be an unrelated git repository, and param could just have - # been copied/installed into it). - output = run_cmd([cmd, 'remote', '-v'], - cwd=os.path.dirname(self.fpath)) - repo_matches = ['/' + self.reponame + '.git' , - # A remote 'server:reponame.git' can also be referred - # to (i.e. cloned) as `server:reponame`. - '/' + self.reponame + ' '] - if not any(m in output for m in repo_matches): - return self - - output = run_cmd([cmd, 'describe', '--long', '--match', 'v*.*', '--dirty'], - cwd=os.path.dirname(self.fpath)) - except Exception as e: - if e.args[1] == 'fatal: No names found, cannot describe anything.': - raise Exception("Cannot find any git version tags of format v*.*") - # If there is any other error, return (release value still useful) - return self - - self._update_from_vcs(output) - - def _update_from_vcs(self, output): - "Update state based on the VCS state e.g the output of git describe" - split = output[1:].split('-') - if 'dev' in split[0]: - dev_split = split[0].split('dev') - self.dev = int(dev_split[1]) - split[0] = dev_split[0] - # Remove the pep440 dot if present - if split[0].endswith('.'): - split[0] = dev_split[0][:-1] - - self._release = tuple(int(el) for el in split[0].split('.')) - self._commit_count = int(split[1]) - self._commit = str(split[2][1:]) # Strip out 'g' prefix ('g'=>'git') - self._dirty = (split[-1]=='dirty') - return self - - - def __str__(self): - """ - Version in x.y.z string format. Does not include the "v" - prefix of the VCS version tags, for pip compatibility. - - If the commit count is non-zero or the repository is dirty, - the string representation is equivalent to the output of:: - - git describe --long --match v*.* --dirty - - (with "v" prefix removed). - """ - if self.release is None: return 'None' - release = '.'.join(str(el) for el in self.release) - release = '%s.dev%d' % (release, self.dev) if self.dev is not None else release - - if (self._expected_commit is not None) and ("$Format" not in self._expected_commit): - pass # Concrete commit supplied - print full version string - elif (self.commit_count == 0 and not self.dirty): - return release - - dirty_status = '-dirty' if self.dirty else '' - return '%s-%s-g%s%s' % (release, self.commit_count if self.commit_count else 'x', - self.commit, dirty_status) - - def __repr__(self): - return str(self) - - def abbrev(self,dev_suffix=""): - """ - Abbreviated string representation, optionally declaring whether it is - a development version. - """ - return '.'.join(str(el) for el in self.release) + \ - (dev_suffix if self.commit_count > 0 or self.dirty else "") - - - def __eq__(self, other): - """ - Two versions are considered equivalent if and only if they are - from the same release, with the same commit count, and are not - dirty. Any dirty version is considered different from any - other version, since it could potentially have any arbitrary - changes even for the same release and commit count. - """ - if self.dirty or other.dirty: return False - return ((self.release, self.commit_count, self.dev) - == (other.release, other.commit_count, other.dev)) - - def __gt__(self, other): - if self.release == other.release: - if self.dev == other.dev: - return self.commit_count > other.commit_count - elif None in [self.dev, other.dev]: - return self.dev is None - else: - return self.dev > other.dev - else: - return (self.release, self.commit_count) > (other.release, other.commit_count) - - def __lt__(self, other): - if self==other: - return False - else: - return not (self > other) - - - def verify(self, string_version=None): - """ - Check that the version information is consistent with the VCS - before doing a release. If supplied with a string version, - this is also checked against the current version. Should be - called from setup.py with the declared package version before - releasing to PyPI. - """ - if string_version and string_version != str(self): - raise Exception("Supplied string version does not match current version.") - - if self.dirty: - raise Exception("Current working directory is dirty.") - - if self.release != self.expected_release: - raise Exception("Declared release does not match current release tag.") - - if self.commit_count !=0: - raise Exception("Please update the VCS version tag before release.") - - if self._expected_commit not in [None, "$Format:%h$"]: - raise Exception("Declared release does not match the VCS version tag") diff --git a/hololinked/server/host_utilities.py b/hololinked/server/host_utilities.py index 63e5a0a..1fa5d47 100644 --- a/hololinked/server/host_utilities.py +++ b/hololinked/server/host_utilities.py @@ -101,7 +101,7 @@ async def login(self, username : str, password : str): return True return False - @post("/app/settings/new") + @post("/app/settings") async def create_app_setting(self, field : str, value : typing.Any): async with self.async_session() as session, session.begin(): session.add(self.appsettings( @@ -111,7 +111,7 @@ async def create_app_setting(self, field : str, value : typing.Any): ) session.commit() - @post("/app/settings/edit") + @put("/app/settings") async def edit_app_setting(self, field : str, value : typing.Any): async with self.async_session() as session, session.begin(): stmt = select(self.appsettings).filter_by(field = field) @@ -121,7 +121,7 @@ async def edit_app_setting(self, field : str, value : typing.Any): session.commit() return setting - @get('/app/settings/all') + @get('/app/settings') async def all_app_settings(self): async with self.async_session() as session: stmt = select(self.appsettings) @@ -129,7 +129,7 @@ async def all_app_settings(self): return {result[self.appsettings.__name__].field : result[self.appsettings.__name__].value["value"] for result in data.mappings().all()} - @get('/app/info/all') + @get('/app') async def all_app_settings(self): async with self.async_session() as session: stmt = select(self.appsettings) @@ -139,7 +139,7 @@ async def all_app_settings(self): for result in data.mappings().all()} } - @post('/dashboards/add') + @post('/dashboards') async def add_dashboards(self, name : str, URL : str, description : str): async with self.async_session() as session, session.begin(): session.add(self.dashboards( @@ -149,7 +149,7 @@ async def add_dashboards(self, name : str, URL : str, description : str): )) await session.commit() - @get('/dashboards/list') + @get('/dashboards') async def query_pages(self): async with self.async_session() as session: stmt = select(self.dashboards) @@ -222,7 +222,7 @@ def __init__(self, db_config_file : typing.Union[str, None], zmq_client_pool : M self.remote_object_info = remote_object_info self._uninstantiated_remote_objects : typing.Dict[str, UninstantiatedRemoteObject] = {} - @post('/subscribe') + @post('/subscribers') async def subscribe_to_host(self, host : str, port : int): client = AsyncHTTPClient() try: From df145871a0a81379875effe3e5e755608c96b6e5 Mon Sep 17 00:00:00 2001 From: "Vignesh.Vaidyanathan" <62492557+VigneshVSV@users.noreply.github.com> Date: Sat, 6 Jan 2024 22:47:49 +0100 Subject: [PATCH 004/167] Revert "removed param non-git folder to add git submodule" This reverts commit 71ac90e5a505b6f105118ed00298ebd5f67166d3. --- hololinked/param/__init__.py | 68 + hololinked/param/_async.py | 23 + hololinked/param/exceptions.py | 64 + hololinked/param/extensions.py | 171 +++ hololinked/param/ipython.py | 357 +++++ hololinked/param/logger.py | 47 + hololinked/param/parameterized.py | 2088 ++++++++++++++++++++++++++ hololinked/param/parameters.py | 2101 +++++++++++++++++++++++++++ hololinked/param/serializer.py | 343 +++++ hololinked/param/utils.py | 89 ++ hololinked/param/version.py | 771 ++++++++++ hololinked/server/host_utilities.py | 14 +- 12 files changed, 6129 insertions(+), 7 deletions(-) create mode 100644 hololinked/param/__init__.py create mode 100644 hololinked/param/_async.py create mode 100644 hololinked/param/exceptions.py create mode 100644 hololinked/param/extensions.py create mode 100644 hololinked/param/ipython.py create mode 100644 hololinked/param/logger.py create mode 100644 hololinked/param/parameterized.py create mode 100644 hololinked/param/parameters.py create mode 100644 hololinked/param/serializer.py create mode 100644 hololinked/param/utils.py create mode 100644 hololinked/param/version.py diff --git a/hololinked/param/__init__.py b/hololinked/param/__init__.py new file mode 100644 index 0000000..1fff831 --- /dev/null +++ b/hololinked/param/__init__.py @@ -0,0 +1,68 @@ +# adapted from param holoviz - https://github.com/holoviz/param - see following license +""" +Copyright (c) 2005-2022, HoloViz team. +All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are +met: + + * Redistributions of source code must retain the above copyright + notice, this list of conditions and the following disclaimer. + + * Redistributions in binary form must reproduce the above copyright + notice, this list of conditions and the following disclaimer in the + documentation and/or other materials provided with the + distribution. + + * Neither the name of the copyright holder nor the names of any + contributors may be used to endorse or promote products derived + from this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +""" + +from __future__ import print_function + +""" +Parameters are a kind of class attribute allowing special behavior, +including dynamically generated parameter values, documentation +strings, constant and read-only parameters, and type or range checking +at assignment time. + +Potentially useful for any large Python program that needs +user-modifiable object attributes; see the Parameter and Parameterized +classes for more information. If you do not want to add a dependency +on external code by importing from a separately installed param +package, you can simply save this file as param.py and copy it and +parameterized.py directly into your own package. + +This file contains subclasses of Parameter, implementing specific +parameter types (e.g. Number), and also imports the definition of +Parameters and Parameterized classes. +""" +from . import exceptions +from .parameterized import (Parameterized, ParameterizedFunction, ParamOverrides, Parameter, + depends_on, instance_descriptor, discard_events, edit_constant, ) + +from .logger import get_logger, logging_level, VERBOSE + +# Determine up-to-date version information, if possible, but with a +# safe fallback to ensure that this file and parameterized.py are the +# only two required files. +try: + from .version import Version + __version__ = str(Version(fpath=__file__, archive_commit="$Format:%h$", reponame="param")) +except: + __version__ = "0.0.0+unknown" + diff --git a/hololinked/param/_async.py b/hololinked/param/_async.py new file mode 100644 index 0000000..ad95021 --- /dev/null +++ b/hololinked/param/_async.py @@ -0,0 +1,23 @@ +""" +Module that implements asyncio.coroutine function wrappers to be used +by param internal callbacks. These are defined in a separate file due +to py2 incompatibility with both `async/await` and `yield from` syntax. +""" + +# import asyncio + +# def generate_depends(func): +# @asyncio.coroutine +# def _depends(*args, **kw): +# yield from func(*args, **kw) # noqa: E999 +# return _depends + + + +# def generate_callback(func, dependencies, kw): +# @asyncio.coroutine +# def cb(*events): +# args = (getattr(dep.owner, dep.name) for dep in dependencies) +# dep_kwargs = {n: getattr(dep.owner, dep.name) for n, dep in kw.items()} +# yield from func(*args, **dep_kwargs) # noqa: E999 +# return cb diff --git a/hololinked/param/exceptions.py b/hololinked/param/exceptions.py new file mode 100644 index 0000000..864139a --- /dev/null +++ b/hololinked/param/exceptions.py @@ -0,0 +1,64 @@ +import textwrap +import typing +from contextlib import contextmanager + +def wrap_error_text(text : str) -> str: + # return T.wrap(text) + #'\n'+'\n'.join([line.lstrip() + return textwrap.fill( + text = textwrap.dedent(text).lstrip(), + initial_indent='\n', + expand_tabs=True, + replace_whitespace= True, + ) + +def raise_TypeError(message, parameter) -> typing.NoReturn: + owner_str = '' + if isinstance(parameter, Parameter): + owner_str = f" Owner info : {parameter.owner}, parameter name : {parameter.name}." + elif issubclass(parameter, Parameter): + owner_str = '' + raise TypeError(message + owner_str) + +def raise_ValueError(message, parameter) -> typing.NoReturn: + owner_str = '' + if isinstance(parameter, Parameter): + owner_str = f" Owner info : {parameter.owner}, parameter name : {parameter.name}." + elif issubclass(parameter, Parameter): + owner_str = '' + raise ValueError(message + owner_str) + + +def get_iterable_printfriendly_repr(iterable): + # This method can be called before __init__ has called + # super's __init__, so there may not be any name set yet. + items = [] + limiter = ']' + length = 0 + for item in iterable: + string = str(item) + length += len(string) + if length < 200: + items.append(string) + else: + limiter = ', ...]' + break + items = '[' + ', '.join(items) + limiter + return items + + +@contextmanager +def exceptions_summarized(): + """Useful utility for writing docs that need to show expected errors. + Shows exception only, concisely, without a traceback. + """ + try: + yield + except Exception: + import sys + etype, value, tb = sys.exc_info() + print("{}: {}".format(etype.__name__,value), file=sys.stderr) + +from .parameterized import Parameter + +__all__ = ['wrap_error_text', 'raise_TypeError', 'raise_ValueError', 'get_iterable_printfriendly_repr'] \ No newline at end of file diff --git a/hololinked/param/extensions.py b/hololinked/param/extensions.py new file mode 100644 index 0000000..3a43ca5 --- /dev/null +++ b/hololinked/param/extensions.py @@ -0,0 +1,171 @@ +import typing +import numpy +from types import FunctionType + +from .parameterized import Parameterized, ParameterizedMetaclass +from .parameters import * + + + +class NumpyArray(ClassSelector): + """ + Parameter whose value is a numpy array. + """ + + def __init__(self, default=None, doc : typing.Union[str, None] = None, + constant : bool = False, readonly : bool = False, allow_None : bool = False, + label : typing.Union[str, None] = None, per_instance : bool = False, deep_copy : bool = False, + class_member : bool = False, fget : FunctionType = None, fset : FunctionType = None, + precedence : float = None) -> None: + + super(NumpyArray, self).__init__(class_=numpy.ndarray, default=default, doc=doc, + constant=constant, readonly=readonly, allow_None=allow_None, label=label, per_instance=per_instance, + deep_copy=deep_copy, class_member=class_member, fget=fget, fset=fset, precedence=precedence) + + @typing.overload + def __get__(self, obj : typing.Union[Parameterized, typing.Any], + objtype: typing.Union[ParameterizedMetaclass, typing.Any]) -> numpy.ndarray: + ... + + @classmethod + def serialize(cls, value : typing.Union[numpy.ndarray, None]): + if value is None: + return None + return value.tolist() + + @classmethod + def deserialize(cls, value): + if value == 'null' or value is None: + return None + return numpy.asarray(value) + + +from pandas import DataFrame as pdDFrame + + +class DataFrame(ClassSelector): + """ + Parameter whose value is a pandas DataFrame. + + The structure of the DataFrame can be constrained by the rows and + columns arguments: + + rows: If specified, may be a number or an integer bounds tuple to + constrain the allowable number of rows. + + columns: If specified, may be a number, an integer bounds tuple, a + list or a set. If the argument is numeric, constrains the number of + columns using the same semantics as used for rows. If either a list + or set of strings, the column names will be validated. If a set is + used, the supplied DataFrame must contain the specified columns and + if a list is given, the supplied DataFrame must contain exactly the + same columns and in the same order and no other columns. + """ + + __slots__ = ['rows', 'columns', 'ordered'] + + def __init__(self, default=None, rows=None, columns=None, ordered=None, **params): + self.rows = rows + self.columns = columns + self.ordered = ordered + super(DataFrame,self).__init__(pdDFrame, default=default, **params) + self._validate(self.default) + + def _length_bounds_check(self, bounds, length, name): + message = '{name} length {length} does not match declared bounds of {bounds}' + if not isinstance(bounds, tuple): + if (bounds != length): + raise ValueError(message.format(name=name, length=length, bounds=bounds)) + else: + return + (lower, upper) = bounds + failure = ((lower is not None and (length < lower)) + or (upper is not None and length > upper)) + if failure: + raise ValueError(message.format(name=name,length=length, bounds=bounds)) + + def _validate(self, val): + super(DataFrame, self)._validate(val) + + if isinstance(self.columns, set) and self.ordered is True: + raise ValueError('Columns cannot be ordered when specified as a set') + + if self.allow_None and val is None: + return + + if self.columns is None: + pass + elif (isinstance(self.columns, tuple) and len(self.columns)==2 + and all(isinstance(v, (type(None), numbers.Number)) for v in self.columns)): # Numeric bounds tuple + self._length_bounds_check(self.columns, len(val.columns), 'Columns') + elif isinstance(self.columns, (list, set)): + self.ordered = isinstance(self.columns, list) if self.ordered is None else self.ordered + difference = set(self.columns) - set([str(el) for el in val.columns]) + if difference: + msg = 'Provided DataFrame columns {found} does not contain required columns {expected}' + raise ValueError(msg.format(found=list(val.columns), expected=sorted(self.columns))) + else: + self._length_bounds_check(self.columns, len(val.columns), 'Column') + + if self.ordered: + if list(val.columns) != list(self.columns): + msg = 'Provided DataFrame columns {found} must exactly match {expected}' + raise ValueError(msg.format(found=list(val.columns), expected=self.columns)) + + if self.rows is not None: + self._length_bounds_check(self.rows, len(val), 'Row') + + @classmethod + def serialize(cls, value): + if value is None: + return 'null' + return value.to_dict('records') + + @classmethod + def deserialize(cls, value): + if value == 'null': + return None + from pandas import DataFrame as pdDFrame + return pdDFrame(value) + + + +class Series(ClassSelector): + """ + Parameter whose value is a pandas Series. + + The structure of the Series can be constrained by the rows argument + which may be a number or an integer bounds tuple to constrain the + allowable number of rows. + """ + + __slots__ = ['rows'] + + def __init__(self, default=None, rows=None, allow_None=False, **params): + from pandas import Series as pdSeries + self.rows = rows + super(Series,self).__init__(pdSeries, default=default, allow_None=allow_None, + **params) + self._validate(self.default) + + def _length_bounds_check(self, bounds, length, name): + message = '{name} length {length} does not match declared bounds of {bounds}' + if not isinstance(bounds, tuple): + if (bounds != length): + raise ValueError(message.format(name=name, length=length, bounds=bounds)) + else: + return + (lower, upper) = bounds + failure = ((lower is not None and (length < lower)) + or (upper is not None and length > upper)) + if failure: + raise ValueError(message.format(name=name,length=length, bounds=bounds)) + + def _validate(self, val): + super(Series, self)._validate(val) + + if self.allow_None and val is None: + return + + if self.rows is not None: + self._length_bounds_check(self.rows, len(val), 'Row') diff --git a/hololinked/param/ipython.py b/hololinked/param/ipython.py new file mode 100644 index 0000000..030875b --- /dev/null +++ b/hololinked/param/ipython.py @@ -0,0 +1,357 @@ +""" +Optional IPython extension for working with Parameters. + +This extension offers extended but completely optional functionality +for IPython users. From within IPython, it may be loaded using: + +%load_ext param.ipython + +This will register the %params line magic to allow easy inspection of +all the parameters defined on a parameterized class or object: + +%params + +All parameters of the class or object will be listed in the IPython +pager together with all their corresponding attributes and +docstrings. Note that the class or object to be inspected must already +exist in the active namespace. +""" + +__author__ = "Jean-Luc Stevens" + +import re +import sys +import itertools +import textwrap +import param + + +# Whether to generate warnings when misformatted docstrings are found +WARN_MISFORMATTED_DOCSTRINGS = False + +# ANSI color codes for the IPython pager +red = '\x1b[1;31m%s\x1b[0m' +blue = '\x1b[1;34m%s\x1b[0m' +green = '\x1b[1;32m%s\x1b[0m' +cyan = '\x1b[1;36m%s\x1b[0m' + + + +class ParamPager(object): + """ + Callable class that displays information about the supplied + Parameterized object or class in the IPython pager. + """ + + def __init__(self, metaclass=False): + """ + If metaclass is set to True, the checks for Parameterized + classes objects are disabled. This option is for use in + ParameterizedMetaclass for automatic docstring generation. + """ + # Order of the information to be listed in the table (left to right) + self.order = ['name', 'changed', 'value', 'type', 'bounds', 'mode'] + self.metaclass = metaclass + + + def get_param_info(self, obj, include_super=True): + """ + Get the parameter dictionary, the list of modifed parameters + and the dictionary of parameter values. If include_super is + True, parameters are also collected from the super classes. + """ + + params = dict(obj.parameters.objects(existing=True)) + if isinstance(obj,type): + changed = [] + val_dict = dict((k,p.default) for (k,p) in params.items()) + self_class = obj + else: + changed = list(obj.param.values(onlychanged=True).keys()) + val_dict = obj.param.values() + self_class = obj.__class__ + + if not include_super: + params = dict((k,v) for (k,v) in params.items() + if k in self_class.__dict__) + + params.pop('name') # Already displayed in the title. + return (params, val_dict, changed) + + + def param_docstrings(self, info, max_col_len=100, only_changed=False): + """ + Build a string to that presents all of the parameter + docstrings in a clean format (alternating red and blue for + readability). + """ + + (params, val_dict, changed) = info + contents = [] + displayed_params = [] + for name in self.sort_by_precedence(params): + if only_changed and not (name in changed): + continue + displayed_params.append((name, params[name])) + + right_shift = max(len(name) for name, _ in displayed_params)+2 + + for i, (name, p) in enumerate(displayed_params): + heading = "%s: " % name + unindented = textwrap.dedent("< No docstring available >" if p.doc is None else p.doc) + + if (WARN_MISFORMATTED_DOCSTRINGS + and not unindented.startswith("\n") and len(unindented.splitlines()) > 1): + param.main.warning("Multi-line docstring for %r is incorrectly formatted " + " (should start with newline)", name) + # Strip any starting newlines + while unindented.startswith("\n"): + unindented = unindented[1:] + + lines = unindented.splitlines() + if len(lines) > 1: + tail = ['%s%s' % (' ' * right_shift, line) for line in lines[1:]] + all_lines = [ heading.ljust(right_shift) + lines[0]] + tail + elif len(lines) == 1: + all_lines = [ heading.ljust(right_shift) + lines[0]] + else: + all_lines = [] + + if i % 2: # Alternate red and blue for docstrings + contents.extend([red %el for el in all_lines]) + else: + contents.extend([blue %el for el in all_lines]) + + return "\n".join(contents) + + + def sort_by_precedence(self, parameters): + """ + Sort the provided dictionary of parameters by their precedence value. + In Python 3, preserves the original ordering for parameters with the + same precedence; for Python 2 sorts them lexicographically by name, + unless explicit precedences are provided. + """ + params = [(p, pobj) for p, pobj in parameters.items()] + key_fn = lambda x: x[1].precedence if x[1].precedence is not None else 1e-8 + sorted_params = sorted(params, key=key_fn) + groups = itertools.groupby(sorted_params, key=key_fn) + # Params preserve definition order in Python 3.6+ + dict_ordered = ( + (sys.version_info.major == 3 and sys.version_info.minor >= 6) or + (sys.version_info.major > 3) or + all(p.precedence is not None for p in parameters.values()) + ) + ordered_groups = [list(grp) if dict_ordered else sorted(grp) for (_, grp) in groups] + ordered_params = [el[0] for group in ordered_groups for el in group + if (el[0] != 'name' or el[0] in parameters)] + return ordered_params + + + def _build_table(self, info, order, max_col_len=40, only_changed=False): + """ + Collect the information about parameters needed to build a + properly formatted table and then tabulate it. + """ + + info_list, bounds_dict = [], {} + (params, val_dict, changed) = info + col_widths = dict((k,0) for k in order) + + ordering = self.sort_by_precedence(params) + for name in ordering: + p = params[name] + if only_changed and not (name in changed): + continue + + constant = 'C' if p.constant else 'V' + readonly = 'RO' if p.readonly else 'RW' + allow_None = ' AN' if hasattr(p, 'allow_None') and p.allow_None else '' + + mode = '%s %s%s' % (constant, readonly, allow_None) + + value = repr(val_dict[name]) + if len(value) > (max_col_len - 3): + value = value[:max_col_len-3] + '...' + + p_dict = {'name': name, 'type': p.__class__.__name__, + 'mode': mode, 'value': value} + + if hasattr(p, 'bounds'): + lbound, ubound = (None,None) if p.bounds is None else p.bounds + + mark_lbound, mark_ubound = False, False + # Use soft_bounds when bounds not defined. + if hasattr(p, 'get_soft_bounds'): + soft_lbound, soft_ubound = p.get_soft_bounds() + if lbound is None and soft_lbound is not None: + lbound = soft_lbound + mark_lbound = True + if ubound is None and soft_ubound is not None: + ubound = soft_ubound + mark_ubound = True + + if (lbound, ubound) != (None,None): + bounds_dict[name] = (mark_lbound, mark_ubound) + p_dict['bounds'] = '(%s, %s)' % (lbound, ubound) + + for col in p_dict: + max_width = max([col_widths[col], len(p_dict[col])]) + col_widths[col] = max_width + + info_list.append((name, p_dict)) + + return self._tabulate(info_list, col_widths, changed, order, bounds_dict) + + + def _tabulate(self, info_list, col_widths, changed, order, bounds_dict): + """ + Returns the supplied information as a table suitable for + printing or paging. + + info_list: List of the parameters name, type and mode. + col_widths: Dictionary of column widths in characters + changed: List of parameters modified from their defaults. + order: The order of the table columns + bound_dict: Dictionary of appropriately formatted bounds + """ + + contents, tail = [], [] + column_set = set(k for _, row in info_list for k in row) + columns = [col for col in order if col in column_set] + + title_row = [] + # Generate the column headings + for i, col in enumerate(columns): + width = col_widths[col]+2 + col = col.capitalize() + formatted = col.ljust(width) if i == 0 else col.center(width) + title_row.append(formatted) + contents.append(blue % ''.join(title_row)+"\n") + + # Format the table rows + for row, info in info_list: + row_list = [] + for i,col in enumerate(columns): + width = col_widths[col]+2 + val = info[col] if (col in info) else '' + formatted = val.ljust(width) if i==0 else val.center(width) + + if col == 'bounds' and bounds_dict.get(row,False): + (mark_lbound, mark_ubound) = bounds_dict[row] + lval, uval = formatted.rsplit(',') + lspace, lstr = lval.rsplit('(') + ustr, uspace = uval.rsplit(')') + lbound = lspace + '('+(cyan % lstr) if mark_lbound else lval + ubound = (cyan % ustr)+')'+uspace if mark_ubound else uval + formatted = "%s,%s" % (lbound, ubound) + row_list.append(formatted) + + row_text = ''.join(row_list) + if row in changed: + row_text = red % row_text + + contents.append(row_text) + + return '\n'.join(contents+tail) + + + def __call__(self, param_obj): + """ + Given a Parameterized object or class, display information + about the parameters in the IPython pager. + """ + title = None + if not self.metaclass: + parameterized_object = isinstance(param_obj, param.parameterized) + parameterized_class = (isinstance(param_obj,type) + and issubclass(param_obj,param.parameterized)) + + if not (parameterized_object or parameterized_class): + print("Object is not a Parameterized class or object.") + return + + if parameterized_object: + # Only show the name if not autogenerated + class_name = param_obj.__class__.__name__ + default_name = re.match('^'+class_name+'[0-9]+$', param_obj.name) + obj_name = '' if default_name else (' %r' % param_obj.name) + title = 'Parameters of %r instance%s' % (class_name, obj_name) + + if title is None: + title = 'Parameters of %r' % param_obj.name + + heading_line = '=' * len(title) + heading_text = "%s\n%s\n" % (title, heading_line) + + param_info = self.get_param_info(param_obj, include_super=True) + if not param_info[0]: + return "%s\n%s" % ((green % heading_text), "Object has no parameters.") + + table = self._build_table(param_info, self.order, max_col_len=40, + only_changed=False) + + docstrings = self.param_docstrings(param_info, max_col_len=100, only_changed=False) + dflt_msg = "Parameters changed from their default values are marked in red." + top_heading = (green % heading_text) + top_heading += "\n%s" % (red % dflt_msg) + top_heading += "\n%s" % (cyan % "Soft bound values are marked in cyan.") + top_heading += '\nC/V= Constant/Variable, RO/RW = ReadOnly/ReadWrite, AN=Allow None' + + heading_text = 'Parameter docstrings:' + heading_string = "%s\n%s" % (heading_text, '=' * len(heading_text)) + docstring_heading = (green % heading_string) + return "%s\n\n%s\n\n%s\n\n%s" % (top_heading, table, docstring_heading, docstrings) + + +message = """Welcome to the param IPython extension! (https://param.holoviz.org/)""" +message += '\nAvailable magics: %params' + +_loaded = False + +def load_ipython_extension(ip, verbose=True): + + from IPython.core.magic import Magics, magics_class, line_magic + from IPython.core import page + + + @magics_class + class ParamMagics(Magics): + """ + Implements the %params line magic used to inspect the parameters + of a parameterized class or object. + """ + def __init__(self, *args, **kwargs): + super(ParamMagics, self).__init__(*args, **kwargs) + self.param_pager = ParamPager() + + + @line_magic + def params(self, parameter_s='', namespaces=None): + """ + The %params line magic accepts a single argument which is a + handle on the parameterized object to be inspected. If the + object can be found in the active namespace, information about + the object's parameters is displayed in the IPython pager. + + Usage: %params + """ + if parameter_s=='': + print("Please specify an object to inspect.") + return + + # Beware! Uses IPython internals that may change in future... + obj = self.shell._object_find(parameter_s) + if obj.found is False: + print("Object %r not found in the namespace." % parameter_s) + return + + page.page(self.param_pager(obj.obj)) + + + if verbose: print(message) + + global _loaded + if not _loaded: + _loaded = True + ip.register_magics(ParamMagics) diff --git a/hololinked/param/logger.py b/hololinked/param/logger.py new file mode 100644 index 0000000..07378ff --- /dev/null +++ b/hololinked/param/logger.py @@ -0,0 +1,47 @@ +import logging +from contextlib import contextmanager +from logging import DEBUG, INFO, WARNING, ERROR, CRITICAL, Logger + + + +VERBOSE = INFO - 1 +logging.addLevelName(VERBOSE, "VERBOSE") + +def get_logger(name : str = None) -> Logger: + if name is None: + root_logger = logging.getLogger('param') + if not root_logger.handlers: + root_logger.setLevel(logging.INFO) + formatter = logging.Formatter( + fmt='%(levelname)s:%(name)s: %(message)s') + handler = logging.StreamHandler() + handler.setFormatter(formatter) + root_logger.addHandler(handler) + return root_logger + else: + return logging.getLogger('param.' + name) + + +@contextmanager +def logging_level(level : int): + """ + Temporarily modify param's logging level. + """ + level = level.upper() + levels = [DEBUG, INFO, WARNING, ERROR, CRITICAL, VERBOSE] + level_names = ['DEBUG', 'INFO', 'WARNING', 'ERROR', 'CRITICAL', 'VERBOSE'] + + if level not in level_names: + raise Exception("Level %r not in %r" % (level, levels)) + + param_logger = get_logger() + logging_level = param_logger.getEffectiveLevel() + param_logger.setLevel(levels[level_names.index(level)]) + try: + yield None + finally: + param_logger.setLevel(logging_level) + + + +__all__ = ['logging_level', 'get_logger'] \ No newline at end of file diff --git a/hololinked/param/parameterized.py b/hololinked/param/parameterized.py new file mode 100644 index 0000000..974a8ed --- /dev/null +++ b/hololinked/param/parameterized.py @@ -0,0 +1,2088 @@ +""" +Generic support for objects with full-featured Parameters and +messaging. + +This file comes from the Param library (https://github.com/holoviz/param) +but can be taken out of the param module and used on its own if desired, +either alone (providing basic Parameter support) or with param's +__init__.py (providing specialized Parameter types). +""" + +import copy +import datetime +import re +import numbers +import operator +import inspect +import threading +import typing +from types import FunctionType, TracebackType +from enum import Enum +from dataclasses import dataclass, field +from collections import OrderedDict, defaultdict +from functools import partial, wraps +from operator import itemgetter, attrgetter +from contextlib import contextmanager + +from .utils import * +from .exceptions import * +from .serializer import serializers + +try: + # In case the optional ipython module is unavailable + from .ipython import ParamPager + param_pager = ParamPager(metaclass=True) # Generates param description +except: + param_pager = None + +dt_types = (datetime.datetime, datetime.date) + +try: + import numpy as np + dt_types = dt_types + (np.datetime64,) +except: + pass + +# External components can register an async executor which will run +# async functions +async_executor = None + +Undefined = NotImplemented + + +def instance_descriptor(f : typing.Callable[['Parameter', 'Parameterized', typing.Any], None]) -> typing.Callable[[ + 'Parameter', 'Parameterized', typing.Any], None]: + # If parameter has an instance Parameter, delegate setting + def fset(self : 'Parameter', obj : 'Parameterized', val : typing.Any) -> None: + if hasattr(obj, 'parameters'): + if hasattr(obj.parameters, '_instance_params'): + instance_param = obj.parameters._instance_params.get(self.name, None) + if instance_param is not None and self is not instance_param: + instance_param.__set__(obj, val) + return + return f(self, obj, val) + return fset + + + +class ParameterMetaclass(type): + """ + Metaclass allowing control over creation of Parameter classes. + """ + def __new__(mcs, classname : str, bases : typing.Tuple[typing.Any], + classdict : typing.Dict[str, typing.Any]) -> 'ParameterMetaclass': + + # store the class's docstring in __classdoc + if '__doc__' in classdict: + classdict['__classdoc'] = classdict['__doc__'] + + # when asking for help on Parameter *object*, return the doc slot + classdict['__doc__'] = property(attrgetter('doc')) + + # To get the benefit of slots, subclasses must themselves define + # __slots__, whether or not they define attributes not present in + # the base Parameter class. That's because a subclass will have + # a __dict__ unless it also defines __slots__. + if '__slots__' not in classdict: + classdict['__slots__'] = [] + if '__parent_slots__' not in classdict: + classdict['__parent_slots__'] = [] + + for base in bases: # there will almost always only one base because slots dont support multiple inheritance + for base_ in inspect.getmro(base): + if hasattr(base_, '__slots__'): + # check _post_slot_set in Parameter to understand the requirement + classdict['__parent_slots__'].extend(base_.__slots__) # type: ignore + + # No special handling for a __dict__ slot; should there be? + return type.__new__(mcs, classname, bases, classdict) + + def __getattribute__(mcs, name : str) -> typing.Any: + if name == '__doc__': + # when asking for help on Parameter *class*, return the + # stored class docstring + return type.__getattribute__(mcs, '__classdoc') + else: + return type.__getattribute__(mcs, name) + + + +class Parameter(metaclass=ParameterMetaclass): + """ + An attribute descriptor for declaring parameters. + + Parameters are a special kind of class attribute. Setting a + Parameterized class attribute to be a Parameter instance causes + that attribute of the class (and the class's instances) to be + treated as a Parameter. This allows special behavior, including + dynamically generated parameter values, documentation strings, + constant and read-only parameters, and type or range checking at + assignment time. + + For example, suppose someone wants to define two new kinds of + objects Foo and Bar, such that Bar has a parameter delta, Foo is a + subclass of Bar, and Foo has parameters alpha, sigma, and gamma + (and delta inherited from Bar). She would begin her class + definitions with something like this:: + + class Bar(Parameterized): + delta = Parameter(default=0.6, doc='The difference between steps.') + ... + class Foo(Bar): + alpha = Parameter(default=0.1, doc='The starting value.') + sigma = Parameter(default=0.5, doc='The standard deviation.', + constant=True) + gamma = Parameter(default=1.0, doc='The ending value.') + ... + + Class Foo would then have four parameters, with delta defaulting + to 0.6. + + Parameters have several advantages over plain attributes: + + 1. Parameters can be set automatically when an instance is + constructed: The default constructor for Foo (and Bar) will + accept arbitrary keyword arguments, each of which can be used + to specify the value of a Parameter of Foo (or any of Foo's + superclasses). E.g., if a script does this:: + + myfoo = Foo(alpha=0.5) + + myfoo.alpha will return 0.5, without the Foo constructor + needing special code to set alpha. + + If Foo implements its own constructor, keyword arguments will + still be accepted if the constructor accepts a dictionary of + keyword arguments (as in ``def __init__(self,**params):``), and + then each class calls its superclass (as in + ``super(Foo,self).__init__(**params)``) so that the + Parameterized constructor will process the keywords. + + 2. A Parameterized class need specify only the attributes of a + Parameter whose values differ from those declared in + superclasses; the other values will be inherited. E.g. if Foo + declares:: + + delta = Parameter(default=0.2) + + the default value of 0.2 will override the 0.6 inherited from + Bar, but the doc will be inherited from Bar. + + 3. The Parameter descriptor class can be subclassed to provide + more complex behavior, allowing special types of parameters + that, for example, require their values to be numbers in + certain ranges, generate their values dynamically from a random + distribution, or read their values from a file or other + external source. + + 4. The attributes associated with Parameters provide enough + information for automatically generating property sheets in + graphical user interfaces, allowing Parameterized instances to + be edited by users. + + Note that Parameters can only be used when set as class attributes + of Parameterized classes. Parameters used as standalone objects, + or as class attributes of non-Parameterized classes, will not have + the behavior described here. + """ + + # Be careful when referring to the 'name' of a Parameter: + # + # * A Parameterized class has a name for the attribute which is + # being represented by the Parameter in the code, + # this is called the 'attrib_name'. + # + # * When a Parameterized instance has its own local value for a + # parameter, it is stored as '_X_param_value' (where X is the + # attrib_name for the Parameter); in the code, this is called + # the internal_name. + + + # So that the extra features of Parameters do not require a lot of + # overhead, Parameters are implemented using __slots__ (see + # http://www.python.org/doc/2.4/ref/slots.html). + + __slots__ = ['default', 'doc', 'constant', 'readonly', 'allow_None', + 'per_instance_descriptor', 'deepcopy_default', 'class_member', 'precedence', + 'owner', 'name', '_internal_name', 'watchers', 'overloads', + '_disable_post_slot_set'] + + # Note: When initially created, a Parameter does not know which + # Parameterized class owns it. Once the owning Parameterized + # class is created, owner, name, and _internal_name are + # set. + + def __init__(self, default : typing.Any, *, doc : typing.Optional[str] = None, + constant : bool = False, readonly : bool = False, allow_None : bool = False, + per_instance_descriptor : bool = False, deepcopy_default : bool = False, class_member : bool = False, + fget : typing.Optional[typing.Callable] = None, fset : typing.Optional[typing.Callable] = None, + fdel : typing.Optional[typing.Callable] = None, precedence : typing.Optional[float] = None) -> None: # pylint: disable-msg=R0913 + + """Initialize a new Parameter object and store the supplied attributes: + + default: the owning class's value for the attribute represented + by this Parameter, which can be overridden in an instance. + + doc: docstring explaining what this parameter represents. + + constant: if true, the Parameter value can be changed only at + the class level or in a Parameterized constructor call. The + value is otherwise constant on the Parameterized instance, + once it has been constructed. + + readonly: if true, the Parameter value cannot ordinarily be + changed by setting the attribute at the class or instance + levels at all. The value can still be changed in code by + temporarily overriding the value of this slot and then + restoring it, which is useful for reporting values that the + _user_ should never change but which do change during code + execution. + + allow_None: if True, None is accepted as a valid value for + this Parameter, in addition to any other values that are + allowed. If the default value is defined as None, allow_None + is set to True automatically. + + label: optional text label to be used when this Parameter is + shown in a listing. If no label is supplied, the attribute name + for this parameter in the owning Parameterized object is used. + + per_instance_descriptor: whether a separate Parameter instance will be + created for every Parameterized instance. True by default. + If False, all instances of a Parameterized class will share + the same Parameter object, including all validation + attributes (bounds, etc.). + + deepcopy_default: controls whether the value of this Parameter will + be deepcopied when a Parameterized object is instantiated (if + True), or if the single default value will be shared by all + Parameterized instances (if False). For an immutable Parameter + value, it is best to leave deepcopy_default at the default of + False, so that a user can choose to change the value at the + Parameterized instance level (affecting only that instance) or + at the Parameterized class or superclass level (affecting all + existing and future instances of that class or superclass). For + a mutable Parameter value, the default of False is also appropriate + if you want all instances to share the same value state, e.g. if + they are each simply referring to a single global object like + a singleton. If instead each Parameterized should have its own + independently mutable value, deepcopy_default should be set to + True, but note that there is then no simple way to change the + value of this Parameter at the class or superclass level, + because each instance, once created, will then have an + independently deepcopied value. + + class_member : To make a ... + + precedence: a numeric value, usually in the range 0.0 to 1.0, + which allows the order of Parameters in a class to be defined in + a listing or e.g. in GUI menus. A negative precedence indicates + a parameter that should be hidden in such listings. + + default, doc, and precedence all default to None, which allows + inheritance of Parameter slots (attributes) from the owning-class' + class hierarchy (see ParameterizedMetaclass). + + Note - parameter's own attributes are not type checked. if one sets + allow_None = 45 instead of allow_None = True, allow_None will be taken to be True. + """ + self._disable_post_slot_set = False + # the above slot should better to stay at top of init for __setattr__ to work uniformly + self.default = default + self.doc = doc + self.constant = constant # readonly is also constant however constants can be set once + self.readonly = readonly + self.allow_None = constant or allow_None + self.per_instance_descriptor = per_instance_descriptor + self.deepcopy_default = deepcopy_default + self.class_member = class_member + self.precedence = precedence + self.watchers : typing.Dict[str, typing.List] = {} + self.overloads : typing.Dict[str, typing.Union[typing.Callable, None]] = dict(fget=fget, + fset=fset, fdel=fdel) + + def __set_name__(self, owner : typing.Any, attrib_name : str) -> None: + self._internal_name = f"_{attrib_name}_param_value" + self.name = attrib_name + self.owner = owner + # This particular order is generally important + + def __setattr__(self, attribute : str, value : typing.Any) -> None: + if attribute == 'name' and getattr(self, 'name', None) and value != self.name: + raise AttributeError("Parameter name cannot be modified after " + "it has been bound to a Parameterized.") + + watched = (attribute != "default" and hasattr(self, 'watchers') and attribute in self.watchers) + slot_attribute = attribute in self.__slots__ or attribute in self.__parent_slots__ # type: ignore + try: + old = getattr(self, attribute) if watched else NotImplemented + except AttributeError as exc: + if slot_attribute: + # If Parameter slot is defined but an AttributeError was raised + # we are in __setstate__ and watchers should not be triggered + old = NotImplemented + else: + raise # exc , dont raise exc as it will cause multiple tracebacks + + super(Parameter, self).__setattr__(attribute, value) + + if slot_attribute and hasattr(self, '_disable_post_slot_set') and not self._disable_post_slot_set: + self._post_slot_set(attribute, old, value) + + if old is NotImplemented or not isinstance(self.owner, Parameterized): + return + + event_dispatcher = self.owner.parameters.event_dispatcher + event = Event(what=attribute, name=self.name, obj=None, cls=self.owner, + old=old, new=value, type=None) + for watcher in self.watchers[attribute]: + event_dispatcher.call_watcher(watcher, event) + if not event_dispatcher.state.BATCH_WATCH: + event_dispatcher.batch_call_watchers() + + def _post_slot_set(self, slot : str, old : typing.Any, value : typing.Any) -> None: + """ + Can be overridden on subclasses to handle changes when parameter + attribute is set. Be very careful of circular calls. + """ + # __parent_slots__ attribute is needed for entry into this function correctly otherwise + # slot_attribute in __setattr__ will have wrong boolean flag + if slot == 'owner' and self.owner is not None: + with disable_post_slot_set(self): + self.default = self.validate_and_adapt(self.default) + + def __get__(self, obj : typing.Union['Parameterized', typing.Any], + objtype : typing.Union['ParameterizedMetaclass', typing.Any]) -> typing.Any: # pylint: disable-msg=W0613 + """ + Return the value for this Parameter. + + If called for a Parameterized class, produce that + class's value (i.e. this Parameter object's 'default' + attribute). + + If called for a Parameterized instance, produce that + instance's value, if one has been set - otherwise produce the + class's value (default). + """ + if obj is None: + return self + fget = self.overloads['fget'] + if fget is not None: + return fget(obj) + return obj.__dict__.get(self._internal_name, self.default) + + @instance_descriptor + def __set__(self, obj : typing.Union['Parameterized', typing.Any], value : typing.Any) -> None: + """ + Set the value for this Parameter. + + If called for a Parameterized class, set that class's + value (i.e. set this Parameter object's 'default' attribute). + + If called for a Parameterized instance, set the value of + this Parameter on that instance (i.e. in the instance's + __dict__, under the parameter's internal_name). + + If the Parameter's constant attribute is True, only allows + the value to be set for a Parameterized class or on + uninitialized Parameterized instances. + + If the Parameter's readonly attribute is True, only allows the + value to be specified in the Parameter declaration inside the + Parameterized source code. A read-only parameter also + cannot be set on a Parameterized class. + + Note that until we support some form of read-only + object, it is still possible to change the attributes of the + object stored in a constant or read-only Parameter (e.g. one + item in a list). + """ + if self.readonly: + raise_TypeError("Read-only parameter cannot be set/modified.", self) + + value = self.validate_and_adapt(value) + + obj = obj if not self.class_member else self.owner + + old = NotImplemented + if self.constant: + old = None + if (obj.__dict__.get(self._internal_name, NotImplemented) != NotImplemented) or self.default is not None: + # Dont even entertain any type of setting, even if its the same value + raise_TypeError("Constant parameter cannot be modified.", self) + else: + old = obj.__dict__.get(self._internal_name, self.default) + + # The following needs to be optimised, probably through lambda functions? + fset = self.overloads['fset'] + if fset is not None: + fset(obj, value) + else: + obj.__dict__[self._internal_name] = value + + self._post_value_set(obj, value) + + if not isinstance(obj, (Parameterized, ParameterizedMetaclass)): + """ + dont deal with events, watchers etc when object is not a Parameterized class child. + Many variables like obj.param below will also raise AttributeError. + This will enable generic use of Parameters without adherence to Parameterized subclassing. + """ + return + + event_dispatcher = obj.parameters.event_dispatcher + event_dispatcher.update_dynamic_dependencies(self.name) + + if self.name in event_dispatcher.all_watchers: + watchers = event_dispatcher.all_watchers[self.name].get('value', None) + if watchers is None: + watchers = self.watchers.get('value', None) + if watchers is None: + return + + event = Event(what=parameter.VALUE, name=self.name, obj=obj, cls=self.owner, + old=old, new=value, type=None) + + # Copy watchers here since they may be modified inplace during iteration + for watcher in sorted(watchers, key=lambda w: w.precedence): + event_dispatcher.call_watcher(watcher, event) + if not event_dispatcher.state.BATCH_WATCH: + event_dispatcher.batch_call_watchers() + + def validate_and_adapt(self, value : typing.Any) -> typing.Any: + """ + modify the given value if a proper logical reasoning can be given. + returns modified value. Should not be mostly used unless the data stored is quite complex by structure. + """ + # raise NotImplementedError("overload this function in child class to validate your value and adapt it if necessary.") + return value + + def _post_value_set(self, obj : typing.Union['Parameterized', typing.Any], value : typing.Any) -> None: + """Called after the parameter value has been validated and set""" + + def __getstate__(self): + """ + All Parameters have slots, not a dict, so we have to support + pickle and deepcopy ourselves. + """ + + state = {} + for slot in self.__slots__ + self.__parent_slots__: + state[slot] = getattr(self, slot) + state.pop('_disable_post_slot_set') + return state + + def __setstate__(self, state : typing.Dict[str, typing.Any]): + """ + set values of __slots__ (instead of in non-existent __dict__) + """ + # Handle renamed slots introduced for instance params + # if '_attrib_name' in state: + # state['name'] = state.pop('_attrib_name') + # if '_owner' in state: + # state['owner'] = state.pop('_owner') + # if 'watchers' not in state: + # state['watchers'] = {} + # if 'per_instance_descriptor' not in state: + # state['per_instance_descriptor'] = False + # if '_label' not in state: + # state['_label'] = None + with disable_post_slot_set(self): + for (k,v) in state.items(): + setattr(self,k,v) + + def getter(self, func : typing.Callable) -> typing.Callable: + self.overloads['fget'] = func + return func + + def setter(self, func : typing.Callable) -> typing.Callable: + self.overloads['fset'] = func + return func + + def deleter(self, func : typing.Callable) -> typing.Callable: + self.overloads['fdel'] = func + return func + + @classmethod + def serialize(cls, value : typing.Any) -> typing.Any: + "Given the parameter value, return a Python value suitable for serialization" + return value + + @classmethod + def deserialize(cls, value : typing.Any) -> typing.Any: + "Given a serializable Python value, return a value that the parameter can be set to" + return value + + def schema(self, safe : bool = False, subset : typing.Optional[typing.List] = None, + mode : str = 'json') -> typing.Dict[str, typing.Any]: + if mode not in serializers: + raise KeyError(f"Mode {mode} not in available serialization formats {list(serializers.keys())}") + return serializers[mode].param_schema(self.__class__.__name__, self, safe=safe, subset=subset) + + + +class disable_post_slot_set: + def __init__(self, parameter : 'Parameter'): + self.parameter = parameter + + def __enter__(self): + self.parameter._disable_post_slot_set = True + + def __exit__(self, exc_type, exc_value, traceback): + self.parameter._disable_post_slot_set = False + + +class parameter(Enum): + VALUE = 'value' + DOC = 'doc' + CONSTANT = 'constant' + READONLY = 'readonly' + ALLOW_NONE = 'allow_None' + PER_INSTANCE_DESCRIPTORS = 'per_instance_descriptor' + DEEPCOPY_DEFAULT = 'deepcopy_default' + CLASS_MEMBER = 'class_member' + PRECEDENCE = 'precedence' + OWNER = 'owner' + NAME = 'name' + WATCHERS = 'watchers' + OVERLOADS = 'overload' + # small letters creates clashes with name and value attribute + + +@dataclass +class Event: + """ + Object representing an event that triggers a Watcher. + what : What is being watched on the Parameter (either value or a slot name) + name : Name of the Parameter that was set or triggered + obj : Parameterized instance owning the watched Parameter, or None + cls : Parameterized class owning the watched Parameter + old : Previous value of the item being watched + new : New value of the item being watched + type : 'triggered' if this event was triggered explicitly, 'changed' if + the item was set and watching for 'onlychanged', 'set' if the item was set, + or None if type not yet known + """ + what : typing.Union[str, Enum] + name : str + obj : typing.Optional[typing.Union["Parameterized", "ParameterizedMetaclass"]] + cls : typing.Union["Parameterized", "ParameterizedMetaclass"] + old : typing.Any + new : typing.Any + type: typing.Optional[str] + + +@contextmanager +def edit_constant(obj : typing.Union['Parameterized', 'Parameter']): + """ + Temporarily set parameters on Parameterized object to constant=False + to allow editing them. + """ + if isinstance(obj, Parameterized): + params = obj.parameters.descriptors.values() + constants = [p.constant for p in params] + for p in params: + p.constant = False + try: + yield + except: + raise + finally: + for (p, const) in zip(params, constants): + p.constant = const + elif isinstance(obj, Parameter): + constant = obj.constant + obj.constant = False + try: + yield + except: + raise + finally: + obj.constant = constant + else: + raise TypeError(f"argument to edit_constant must be a parameter or parameterized instance, given type : {type(obj)}") + + +@dataclass +class GeneralDependencyInfo: + """ + Dependency info attached to a method of a Parameterized subclass. + """ + dependencies : typing.Tuple[typing.Union[str, Parameter]] + queued : bool + on_init : bool + invoke : bool + + +@dataclass +class ParameterDependencyInfo: + """ + Object describing something being watched about a Parameter. + + inst: Parameterized instance owning the Parameter, or None + cls: Parameterized class owning the Parameter + name: Name of the Parameter being watched + pobj: Parameter object being watched + what: What is being watched on the Parameter (either 'value' or a slot name) + """ + inst : typing.Optional["Parameterized"] # optional while being unbound + cls : "ParameterizedMetaclass" + name : str + pobj : Parameter + what : typing.Union[str, Enum] + + +@dataclass +class DynamicDependencyInfo: + """ + Object describing dynamic dependencies. + spec: Dependency specification to resolve + """ + notation : str + + +@dataclass +class SortedDependencies: + static : typing.List[ParameterDependencyInfo] = field(default_factory = list) + dynamic : typing.List[DynamicDependencyInfo] = field(default_factory = list) + + def __iadd__(self, other : "SortedDependencies") -> "SortedDependencies": + assert isinstance(other, SortedDependencies), wrap_error_text( + f"Can only add other ResolvedDepedency types to iteself, given type {type(other)}") + self.static += other.static + self.dynamic += other.dynamic + return self + + + +def depends_on(*parameters, invoke : bool = True, on_init : bool = True, queued : bool = False) -> typing.Callable: + """ + Annotates a function or Parameterized method to express its + dependencies. The specified dependencies can be either be + Parameter instances or if a method is supplied they can be + defined as strings referring to Parameters of the class, + or Parameters of subobjects (Parameterized objects that are + values of this object's parameters). Dependencies can either be + on Parameter values, or on other metadata about the Parameter. + """ + def decorator(func): + if not isinstance(parameters, tuple): + deps = tuple(parameters) + else: + deps = parameters + for dep in deps: + if not isinstance(dep, (str, Parameter)): + raise ValueError(wrap_error_text( + f"""The depends_on decorator only accepts string types referencing a parameter or parameter + instances, found {type(dep).__name__} type instead.""")) + + _dinfo = GeneralDependencyInfo( + dependencies=deps, + queued=queued, + on_init=on_init, + invoke=invoke + ) + if hasattr(func, 'param_dependency_info') and not isinstance(func.param_dependency_info, GeneralDependencyInfo): + raise TypeError(f"attribute 'param_depency_info' reserved by param library, please use another name.") + func.param_dependency_info = _dinfo + return func + return decorator + + + +@dataclass +class Watcher: + """ + Object declaring a callback function to invoke when an Event is + triggered on a watched item. + + inst : Parameterized instance owning the watched Parameter, or + None + + cls: Parameterized class owning the watched Parameter + + fn : Callback function to invoke when triggered by a watched + Parameter + + mode: 'args' for param.watch (call fn with PInfo object + positional args), or 'kwargs' for param.watch_values (call fn + with : keywords) + + onlychanged: If True, only trigger for actual changes, not + setting to the current value + + parameter_names: List of Parameters to watch, by name + + what: What to watch on the Parameters (either 'value' or a slot + name) + + queued: Immediately invoke callbacks triggered during processing + of an Event (if False), or queue them up for processing + later, after this event has been handled (if True) + + precedence: A numeric value which determines the precedence of + the watcher. Lower precedence values are executed + with higher priority. + """ + + inst : "Parameterized" + cls : "ParameterizedMetaclass" + fn : typing.Callable + mode : str + onlychanged : bool + parameter_names : typing.Tuple[str] + what : str + queued : bool + precedence : typing.Union[float, int] = field(default=0) + + +@contextmanager +def _batch_call_watchers(parameterized : typing.Union['Parameterized', 'ParameterizedMetaclass'], + queued : bool = True, run : bool = True): + """ + Internal version of batch_call_watchers, adding control over queueing and running. + Only actually batches events if enable=True; otherwise a no-op. Only actually + calls the accumulated watchers on exit if run=True; otherwise they remain queued. + """ + state = parameterized.parameters.event_dispatcher.state + BATCH_WATCH = state.BATCH_WATCH + state.BATCH_WATCH = queued or state.BATCH_WATCH + try: + yield + finally: + state.BATCH_WATCH = BATCH_WATCH + if run and not BATCH_WATCH: + parameterized.parameters.event_dispatcher.batch_call_watchers() + + +@contextmanager +def batch_call_watchers(parameterized : 'Parameterized'): + """ + Context manager to batch events to provide to Watchers on a + parameterized object. This context manager queues any events + triggered by setting a parameter on the supplied parameterized + object, saving them up to dispatch them all at once when the + context manager exits. + """ + state = parameterized.parameters.event_dispatcher.state + old_BATCH_WATCH = state.BATCH_WATCH + state.BATCH_WATCH = True + try: + yield + finally: + state.BATCH_WATCH = old_BATCH_WATCH + if not old_BATCH_WATCH: + parameterized.parameters.event_dispatcher.batch_call_watchers() + + +@contextmanager +def discard_events(parameterized : 'Parameterized'): + """ + Context manager that discards any events within its scope + triggered on the supplied parameterized object. + """ + state = parameterized.parameters.event_dispatcher.state + old_watchers = state.watchers + old_events = state.events + state.watchers = [] + state.events = [] + try: + yield + except: + raise + finally: + state.watchers = old_watchers + state.events = old_events + + +def _skip_event(*events : Event, **kwargs) -> bool: + """ + Checks whether a subobject event should be skipped. + Returns True if all the values on the new subobject + match the values on the previous subobject. + """ + what = kwargs.get('what', 'value') + changed = kwargs.get('changed') + if changed is None: + return False + for e in events: + for p in changed: + if what == 'value': + old = NotImplemented if e.old is None else get_dot_resolved_attr(e.old, p, None) + new = NotImplemented if e.new is None else get_dot_resolved_attr(e.new, p, None) + else: + old = NotImplemented if e.old is None else get_dot_resolved_attr(e.old.parameters[p], what, None) + new = NotImplemented if e.new is None else get_dot_resolved_attr(e.new.parameters[p], what, None) + if not Comparator.is_equal(old, new): + return False + return True + + + +class Comparator(object): + """ + Comparator defines methods for determining whether two objects + should be considered equal. It works by registering custom + comparison functions, which may either be registed by type or with + a predicate function. If no matching comparison can be found for + the two objects the comparison will return False. + + If registered by type the Comparator will check whether both + objects are of that type and apply the comparison. If the equality + function is instead registered with a function it will call the + function with each object individually to check if the comparison + applies. This is useful for defining comparisons for objects + without explicitly importing them. + + To use the Comparator simply call the is_equal function. + """ + + equalities = { + numbers.Number: operator.eq, + str: operator.eq, + bytes: operator.eq, + type(None): operator.eq, + type(NotImplemented) : operator.eq + } + equalities.update({ dtt : operator.eq for dtt in dt_types }) # type: ignore + + @classmethod + def is_equal(cls, obj1 : typing.Any, obj2 : typing.Any) -> bool: + for eq_type, eq in cls.equalities.items(): + if ((isinstance(eq_type, FunctionType) and eq_type(obj1) and eq_type(obj2)) + or (isinstance(obj1, eq_type) and isinstance(obj2, eq_type))): + return eq(obj1, obj2) + if isinstance(obj2, (list, set, tuple)): + return cls.compare_iterator(obj1, obj2) + elif isinstance(obj2, dict): + return cls.compare_mapping(obj1, obj2) + return False + + @classmethod + def compare_iterator(cls, obj1 : typing.Any, obj2 : typing.Any) -> bool: + if type(obj1) != type(obj2) or len(obj1) != len(obj2): return False + for o1, o2 in zip(obj1, obj2): + if not cls.is_equal(o1, o2): + return False + return True + + @classmethod + def compare_mapping(cls, obj1 : typing.Any, obj2 : typing.Any) -> bool: + if type(obj1) != type(obj2) or len(obj1) != len(obj2): return False + for k in obj1: + if k in obj2: + if not cls.is_equal(obj1[k], obj2[k]): + return False + else: + return False + return True + + + +@dataclass +class UnresolvedWatcherInfo: + method_name : str + invoke : bool + on_init : bool + static_dependencies : typing.List[ParameterDependencyInfo] + dynamic_dependencies : typing.List[DynamicDependencyInfo] + queued : bool = field(default = False) + + +class EventResolver: + + def __init__(self, owner_cls : 'ParameterizedMetaclass') -> None: + self.owner_cls = owner_cls + self._unresolved_watcher_info : typing.List[UnresolvedWatcherInfo] + + def create_unresolved_watcher_info(self, owner_class_members : dict): + # retrieve depends info from methods and store more conveniently + dependers : typing.List[typing.Tuple[str, typing.Callable, GeneralDependencyInfo]] = [ + (name, method, method.param_dependency_info) for (name, method) in owner_class_members.items() + if hasattr(method, 'param_dependency_info')] + + # Resolve dependencies of current class + _watch : typing.List[UnresolvedWatcherInfo] = [] + for name, method, dinfo in dependers: + if not dinfo.invoke: + continue + # No need MInfo + sorted_dependencies = self.method_depends_on(method, dynamic=False) + _watch.append(UnresolvedWatcherInfo( + method_name=name, + invoke=dinfo.invoke, + on_init=dinfo.on_init, + queued=dinfo.queued, + static_dependencies=sorted_dependencies.static, + dynamic_dependencies=sorted_dependencies.dynamic + )) + + # Resolve dependencies in class hierarchy + _inherited : typing.List[UnresolvedWatcherInfo] = [] + for mcs_super in classlist(self.owner_cls)[:-1][::-1]: + if isinstance(mcs_super, ParameterizedMetaclass): + for dep in mcs_super.parameters.event_resolver._unresolved_watcher_info: # type: ignore - why doesnt it work? + assert isinstance(dep, UnresolvedWatcherInfo), wrap_error_text( # dummy assertion to check types + f"""Parameters._unresolved_watcher_info only accept UnresolvedWatcherInfo type, given type {type(dep)}""") + method = getattr(mcs_super, dep.method_name, None) + if method is not None and hasattr(method, 'param_dependency_info'): + assert isinstance(method.param_dependency_info, GeneralDependencyInfo), wrap_error_text( + f"""attribute 'param_depency_info' reserved by param library, + please use another name for your attributes of type {type(method.param_dependency_info)}.""" + ) # dummy assertion to check types + dinfo : GeneralDependencyInfo = method.param_dependency_info + if (not any(dep.method_name == w.method_name for w in _watch+_inherited) and dinfo.invoke): + _inherited.append(dep) + + self._unresolved_watcher_info = _inherited + _watch + + + def method_depends_on(self, method : typing.Callable, dynamic : bool = True, intermediate : bool = True) -> SortedDependencies: + """ + Resolves dependencies declared on a method of Parameterized class. + Dynamic dependencies, i.e. dependencies on sub-objects which may + or may not yet be available, are only resolved if dynamic=True. + By default intermediate dependencies, i.e. dependencies on the + path to a sub-object are returned. For example for a dependency + on 'a.b.c' dependencies on 'a' and 'b' are returned as long as + intermediate=True. + + Returns lists of concrete dependencies on available parameters + and dynamic dependencies specifications which have to resolved + if the referenced sub-objects are defined. + """ + dependencies = SortedDependencies() + dinfo : GeneralDependencyInfo = method.param_dependency_info + for d in dinfo.dependencies: + _sorted_dependencies = self.convert_notation_to_dependency_info(d, dynamic, intermediate) + dependencies.dynamic += _sorted_dependencies.dynamic + for dep in _sorted_dependencies.static: + if isinstance(dep, ParameterDependencyInfo): + dependencies.static.append(dep) + else: + dependencies += self.method_depends_on(dep, dynamic, intermediate) + return dependencies + + + def convert_notation_to_dependency_info(self, depended_obj_notation : typing.Union[Parameter, str], + owner_inst : typing.Optional["Parameterized"] = None, dynamic : bool = True, + intermediate : bool = True) -> SortedDependencies: + """ + Resolves a dependency specification into lists of explicit + parameter dependencies and dynamic dependencies. + + Dynamic dependencies are specifications to be resolved when + the sub-object whose parameters are being depended on is + defined. + + During class creation set dynamic=False which means sub-object + dependencies are not resolved. At instance creation and + whenever a sub-object is set on an object this method will be + invoked to determine whether the dependency is available. + + For sub-object dependencies we also return dependencies for + every part of the path, e.g. for a dependency specification + like "a.b.c" we return dependencies for sub-object "a" and the + sub-sub-object "b" in addition to the dependency on the actual + parameter "c" on object "b". This is to ensure that if a + sub-object is swapped out we are notified and can update the + dynamic dependency to the new object. Even if a sub-object + dependency can only partially resolved, e.g. if object "a" + does not yet have a sub-object "b" we must watch for changes + to "b" on sub-object "a" in case such a subobject is put in "b". + """ + if isinstance(depended_obj_notation, Parameter): + if not intermediate: + inst = depended_obj_notation.owner if isinstance(depended_obj_notation.owner, Parameterized) else None + cls = depended_obj_notation.owner + if not isinstance(cls, ParameterizedMetaclass): + raise TypeError(wrap_error_text("""Currently dependencies of a parameter from another class except a subclass + of parameterized is not supported""")) + info = ParameterDependencyInfo(inst=inst, cls=cls, name=depended_obj_notation.name, + pobj=depended_obj_notation, what=parameter.VALUE) + return SortedDependencies(static=[info]) + return SortedDependencies() + + obj, attr, what = self.parse_notation(depended_obj_notation) + if obj is None: + src = owner_inst or self.owner_cls + elif not dynamic: + return SortedDependencies(dynamic=[DynamicDependencyInfo(notation=depended_obj_notation)]) + else: + src = get_dot_resolved_attr(owner_inst or self.owner_cls, obj[1::], NotImplemented) + if src == NotImplemented: + path = obj[1:].split('.') + static_deps = [] + # Attempt to partially resolve subobject path to ensure + # that if a subobject is later updated making the full + # subobject path available we have to be notified and + # set up watchers + if len(path) >= 1 and intermediate: + sub_src = None + subpath = path + while sub_src is None and subpath: + subpath = subpath[:-1] + sub_src = get_dot_resolved_attr(owner_inst or self.owner_cls, '.'.join(subpath), None) + if subpath: + static_deps += self.convert_notation_to_dependency_info( + '.'.join(path[:len(subpath)+1]), owner_inst, dynamic, intermediate).static + return SortedDependencies( + static=static_deps, + dynamic=[] if intermediate else [DynamicDependencyInfo(notation=depended_obj_notation)] + ) + + cls = (src, None) if isinstance(src, type) else (type(src), src) + if attr == 'parameters': + assert isinstance(obj, str), wrap_error_text("""object preceding parameters access (i.e. .parameters) + in dependency resolution became None due to internal error.""") + sorted_dependencies = self.convert_notation_to_dependency_info(obj[1:], + dynamic, intermediate) + for p in src.parameters: + sorted_dependencies += src.parameters.event_resolver.convert_notation_to_dependency_info(p, + dynamic, intermediate) + return sorted_dependencies + elif attr in src.parameters: + info = ParameterDependencyInfo(inst=owner_inst, cls=src, name=attr, + pobj=src.parameters[attr], what=what) + if obj is None or not intermediate: + return SortedDependencies(static=[info]) + sorted_dependencies = self.convert_notation_to_dependency_info(obj[1:], dynamic, intermediate) + if not intermediate: + sorted_dependencies.static.append(info) + return sorted_dependencies + elif hasattr(src, attr): + attr_obj = getattr(src, attr) + if isinstance(attr_obj, Parameterized): + return SortedDependencies() + elif isinstance(attr_obj, FunctionType): + raise NotImplementedError(wrap_error_text( + f"""In this version of param, support for dependency on other callbacks is removed. + Please divide your methods with your own logic. + """)) + else: + raise AttributeError(wrap_error_text( + f"""Attribute {attr!r} could not be resolved on {src} or resolved attribute not supported + for dependent events""")) + else: + raise AttributeError(f"Attribute {attr!r} could not be resolved on {src}.") + + + @classmethod + def parse_notation(cls, notation : str) -> typing.Tuple[typing.Union[str, None], str, str]: + """ + Parses param.depends specifications into three components: + + 1. The dotted path to the sub-object + 2. The attribute being depended on, i.e. either a parameter or method + 3. The parameter attribute being depended on + """ + assert notation.count(":") <= 1, "argument '{notation}' for depends has more than one colon" + notation = notation.strip() + m = re.match(r"(?P[^:]*):?(?P.*)", notation) + assert m is not None, f"could not parse object notation for finding dependecies {notation}" + what = m.group('what') + path = "."+m.group('path') + m = re.match(r"(?P.*)(\.)(?P.*)", path) + assert m is not None, f"could not parse object notation for finding dependecies {notation}" + obj = m.group('obj') + attr = m.group("attr") + return obj or None, attr, what or 'value' + + + def bind_static_dependencies(self, obj : "Parameterized", + static_dependencies : typing.List[ParameterDependencyInfo] = []) -> typing.List["ParameterDependencyInfo"]: + """ + Resolves constant and dynamic parameter dependencies previously + obtained using the method_depends_on function. Existing resolved + dependencies are updated with a supplied parameter instance while + dynamic dependencies are resolved if possible. + """ + dependencies = [] + for dep in static_dependencies: + if not issubclass(type(obj), dep.cls): + dependencies.append(dep) + continue + dep.inst = obj if dep.inst is None else dep.inst + dep.pobj = dep.inst.parameters[dep.name] + dependencies.append(dep) + return dependencies + + + def attempt_conversion_from_dynamic_to_static_dep(self, obj : "Parameterized", + dynamic_dependencies : typing.List[DynamicDependencyInfo] = [], + intermediate : bool = True): + dependencies = [] + for dep in dynamic_dependencies: + subresolved = obj.parameters.event_resolver.convert_notation_to_dependency_info(dep.notation, + intermediate=intermediate).static + for subdep in subresolved: + if isinstance(subdep, ParameterDependencyInfo): + subdep.inst = obj if subdep.inst is None else subdep.inst + subdep.pobj = subdep.inst.parameters[subdep.name] + dependencies.append(subdep) + else: + dependencies += self.method_depends_on(subdep, intermediate=intermediate).static + return dependencies + + + def resolve_dynamic_dependencies(self, obj : 'Parameterized', dynamic_dep : DynamicDependencyInfo, + param_dep : ParameterDependencyInfo, attribute : str) -> typing.Tuple: + """ + If a subobject whose parameters are being depended on changes + we should only trigger events if the actual parameter values + of the new object differ from those on the old subobject, + therefore we accumulate parameters to compare on a subobject + change event. + + Additionally we need to make sure to notify the parent object + if a subobject changes so the dependencies can be + reinitialized so we return a callback which updates the + dependencies. + """ + subobj = obj + subobjs : typing.List = [obj] + for subpath in dynamic_dep.notation.split('.')[:-1]: + subobj = getattr(subobj, subpath.split(':')[0], None) + subobjs.append(subobj) + + dep_obj = param_dep.inst or param_dep.cls + if dep_obj not in subobjs[:-1]: + return None, None, param_dep.what + + depth = subobjs.index(dep_obj) + callback = None + if depth > 0: + def callback(*events): + """ + If a subobject changes, we need to notify the main + object to update the dependencies. + """ + obj.parameters.event_dispatcher.update_dynamic_dependencies(attribute) + + p = '.'.join(dynamic_dep.notation.split(':')[0].split('.')[depth+1:]) + if p == 'param': + subparams = [sp for sp in list(subobjs[-1].parameters)] + else: + subparams = [p] + + if ':' in dynamic_dep.notation: + what = dynamic_dep.notation.split(':')[-1] + else: + what = param_dep.what + + return subparams, callback, what + + + +class EventDispatcherState: + + def __init__(self): + self._BATCH_WATCH : typing.Dict[int, bool] = {} # If true, Event and watcher objects are queued. + self._TRIGGER : typing.Dict[int, bool] = {} + self._events : typing.Dict[int, typing.List[Event]] = {} # Queue of batched events + self._watchers : typing.Dict[int, typing.List[Watcher]] = {} # Queue of batched watchers + + @property + def BATCH_WATCH(self) -> bool: + return self._BATCH_WATCH[threading.get_ident()] + + @BATCH_WATCH.setter + def BATCH_WATCH(self, value : bool): + self._BATCH_WATCH[threading.get_ident()] = value + + @property + def TRIGGER(self): + return self._TRIGGER[threading.get_ident()] + + @TRIGGER.setter + def TRIGGER(self, value): + self._TRIGGER[threading.get_ident()] = value + + @property + def events(self) -> typing.List[Event]: + thread_id = threading.get_ident() + try: + return self._events[thread_id] + except KeyError: + self._events[thread_id] = [] + return self._events[thread_id] + + @events.setter + def events(self, value): + self._events[threading.get_ident()] = value + + @property + def watchers(self) -> typing.List[Watcher]: + return self._watchers[threading.get_ident()] + + @watchers.setter + def watchers(self, value): + self._watchers[threading.get_ident()] = value + + + +class EventDispatcher: + + # This entire class is supposed to be instantiated as a private variable, therefore we dont use underscores + # for variables within this class + + def __init__(self, owner_inst : typing.Union['Parameterized', 'ParameterizedMetaclass'], + event_resolver : EventResolver) -> None: + self.owner_inst = owner_inst + self.owner_class = event_resolver.owner_cls + self.event_resolver = event_resolver + self.all_watchers : typing.Dict[str, typing.Dict[str, typing.List[Watcher]]] = {} + self.dynamic_watchers : typing.Dict[str, typing.List[Watcher]] = defaultdict(list) + self.state : EventDispatcherState = EventDispatcherState() + + + def prepare_instance_dependencies(self): + init_methods = [] + for watcher_info in self.event_resolver._unresolved_watcher_info: + static = defaultdict(list) + for dep in self.event_resolver.bind_static_dependencies(self.owner_inst, watcher_info.static_dependencies): + static[(id(dep.inst), id(dep.cls), dep.what)].append((dep, None)) + for group in static.values(): + self.watch_group(self.owner_inst, watcher_info.method_name, watcher_info.invoke, group) + m = getattr(self.owner_inst, watcher_info.method_name) + if watcher_info.on_init and m not in init_methods: + init_methods.append(m) + + self.update_dynamic_dependencies() + for m in init_methods: + m() + + + def update_dynamic_dependencies(self, attribute : typing.Optional[str] = None) -> None: + for watcher_info in self.event_resolver._unresolved_watcher_info: + # On initialization set up constant watchers; otherwise + # clean up previous dynamic watchers for the updated attribute + dynamic = [d for d in watcher_info.dynamic_dependencies if attribute is None or + d.notation.split(".")[0] == attribute] + if len(dynamic) > 0: + for w in self.dynamic_watchers.pop(watcher_info.method_name, []): + (w.inst or w.cls).parameters.event_dispatcher.deregister_watcher(w) + # Resolve dynamic dependencies one-by-one to be able to trace their watchers + grouped = defaultdict(list) + for ddep in dynamic: + for dep in self.event_resolver.attempt_conversion_from_dynamic_to_static_dep(self.owner_inst, + dynamic_dependencies=[ddep]): + grouped[(id(dep.inst), id(dep.cls), dep.what)].append((dep, ddep)) + + for group in grouped.values(): + watcher = self.watch_group(self.owner_inst, watcher_info.method_name, watcher_info.invoke, + group, attribute) + self.dynamic_watchers[watcher_info.method_name].append(watcher) + + + def watch_group(self, obj : "Parameterized", name : str, queued : bool, group : typing.List[typing.Tuple], + attribute : typing.Optional[str] = None): + """ + Sets up a watcher for a group of dependencies. Ensures that + if the dependency was dynamically generated we check whether + a subobject change event actually causes a value change and + that we update the existing watchers, i.e. clean up watchers + on the old subobject and create watchers on the new subobject. + """ + some_param_dep, dynamic_dep = group[0] + dep_obj : typing.Union[ParameterizedMetaclass, Parameterized] = some_param_dep.inst or some_param_dep.cls + params = [] + for p in group: + if p.name not in params: + params.append(p.name) + + if dynamic_dep is None or len(dynamic_dep) == 0: + subparams, callback, what = None, None, some_param_dep.what + else: + subparams, callback, what = self.event_resolver.resolve_dynamic_dependencies( + obj, dynamic_dep, some_param_dep, attribute) + + executor = self.create_method_caller(obj, name, what, subparams, callback) + return dep_obj.parameters.event_dispatcher.watch( + executor, params, some_param_dep.what, queued=queued, precedence=-1) + + + def create_method_caller(self, bound_inst : typing.Union["ParameterizedMetaclass", "Parameterized"], + method_name : str, what : str = 'value', changed : typing.Optional[typing.List] = None, callback=None): + """ + Wraps a method call adding support for scheduling a callback + before it is executed and skipping events if a subobject has + changed but its values have not. + """ + function = getattr(bound_inst, method_name) + if iscoroutinefunction(function): + async def caller(*events): # type: ignore + if callback: callback(*events) + if not _skip_event(*events, what=what, changed=changed): + await function() + else: + def caller(*events): + if callback: callback(*events) + if not _skip_event(*events, what=what, changed=changed): + return function() + caller._watcher_name = method_name + return caller + + + def watch(self, fn : typing.Callable, parameter_names : typing.Union[typing.List[str], str], what : str = 'value', + onlychanged : bool = True, queued : bool = False, precedence : float = -1): + parameter_names = tuple(parameter_names) if isinstance(parameter_names, list) else (parameter_names,) # type: ignore + watcher = Watcher(inst=self.owner_inst, cls=self.owner_class, fn=fn, mode='args', + onlychanged=onlychanged, parameter_names=parameter_names, # type: ignore + what=what, queued=queued, precedence=precedence) + self.register_watcher(watcher, what) + return watcher + + + def register_watcher(self, watcher : Watcher, what = 'value'): + parameter_names = watcher.parameter_names + for parameter_name in parameter_names: + # Execution should never reach here if parameter is not found. + # this should be solved in resolution itself - TODO - make sure + # if parameter_name not in self_.cls.param: + # raise ValueError("{} parameter was not found in list of " + # "parameters of class {}".format(parameter_name, self_.cls.__name__)) + if self.owner_inst is not None and what == "value": + if parameter_name not in self.all_watchers: + self.all_watchers[parameter_name] = {} + if what not in self.all_watchers[parameter_name]: + self.all_watchers[parameter_name][what] = [] + self.all_watchers[parameter_name][what].append(watcher) + else: + watchers = self.owner_inst.parameters[parameter_name].watchers + if what not in watchers: + watchers[what] = [] + watchers[what].append(watcher) + + + def deregister_watcher(self, watcher : Watcher, what = 'value'): + parameter_names = watcher.parameter_names + for parameter_name in parameter_names: + if self.owner_inst is not None and what == "value": + if parameter_name not in self.all_watchers or what not in self.all_watchers[parameter_name]: + return + self.all_watchers[parameter_name][what].remove(watcher) + else: + watchers = self.owner_inst.parameters[parameter_name].watchers + if what not in watchers: + return + watchers[what].remove(watcher) + + + def call_watcher(self, watcher : Watcher, event : Event) -> None: + """ + Invoke the given watcher appropriately given an Event object. + """ + if watcher.onlychanged and not Comparator.is_equal(event.old, event.new): + return + + if self.state.BATCH_WATCH: + self.state.events.append(event) + if not any(watcher is w for w in self.state.watchers): + self.state.watchers.append(watcher) + else: + with _batch_call_watchers(self.owner_inst or self.owner_class, + queued=watcher.queued, run=False): + self.execute_watcher(watcher, (event,)) + + + def batch_call_watchers(self): + """ + Batch call a set of watchers based on the parameter value + settings in kwargs using the queued Event and watcher objects. + """ + watchers = self.state.watchers + events = self.state.events + while len(events) > 0: + event = events.pop(0) + for watcher in sorted(watchers, key=lambda w: w.precedence): + with _batch_call_watchers(self.owner_inst or self.owner_class, + queued=watcher.queued, run=False): + self.execute_watcher(watcher, (event,)) + events.clear() + watchers.clear() + + + def execute_watcher(self, watcher : Watcher, events : typing.Tuple[Event]): + if watcher.mode == 'args': + args, kwargs = events, {} + else: + args, kwargs = (), {event.name: event.new for event in events} + + if iscoroutinefunction(watcher.fn): + if async_executor is None: + raise RuntimeError(wrap_error_text(f"""Could not execute {watcher.fn} coroutine function. Please + register a asynchronous executor on param.parameterized.async_executor, which + schedules the function on an event loop.""")) + async_executor(partial(watcher.fn, *args, **kwargs)) + else: + watcher.fn(*args, **kwargs) + + + def trigger(self, *parameters : str) -> None: + """ + Trigger watchers for the given set of parameter names. Watchers + will be triggered whether or not the parameter values have + actually changed. As a special case, the value will actually be + changed for a Parameter of type Event, setting it to True so + that it is clear which Event parameter has been triggered. + """ + trigger_params = [p for p in self_.self_or_cls.param + if hasattr(self_.self_or_cls.param[p], '_autotrigger_value')] + triggers = {p:self_.self_or_cls.param[p]._autotrigger_value + for p in trigger_params if p in param_names} + + events = self_.self_or_cls.param._events + watchers = self_.self_or_cls.param._watchers + self_.self_or_cls.param._events = [] + self_.self_or_cls.param._watchers = [] + param_values = self_.values() + params = {name: param_values[name] for name in param_names} + self.self_or_cls.param._TRIGGER = True + self.update(dict(params, **triggers)) + self.self_or_cls.param._TRIGGER = False + self.self_or_cls.param._events += events + self.self_or_cls.param._watchers += watchers + + + +class ClassParameters(object): + """ + Object that holds the namespace and implementation of Parameterized + methods as well as any state that is not in __slots__ or the + Parameters themselves. + + Exists at metaclass level (instantiated by the metaclass) + and at the instance level. Contains state specific to the + class. + """ + + def __init__(self, owner_cls : 'ParameterizedMetaclass', owner_class_members : typing.Optional[dict] = None) -> None: + """ + cls is the Parameterized class which is always set. + self is the instance if set. + """ + self.owner_cls = owner_cls + self.owner_inst = None + if owner_class_members is not None: + self.event_resolver = EventResolver(owner_cls=owner_cls) + self.event_dispatcher = EventDispatcher(owner_cls, self.event_resolver) + self.event_resolver.create_unresolved_watcher_info(owner_class_members) + + def __getitem__(self, key : str) -> 'Parameter': + """ + Returns the class or instance parameter like a dictionary dict[key] syntax lookup + """ + # code change comment - + # metaclass instance has a param attribute remember, no need to repeat logic of self_.self_or_cls + # as we create only one instance of Parameters object + return self.descriptors[key] # if self.owner_inst is None else self.owner_inst.param.objects(False) + + def __dir__(self) -> typing.List[str]: + """ + Adds parameters to dir + """ + return super().__dir__() + self.descriptors().keys() # type: ignore + + def __iter__(self): + """ + Iterates over the parameters on this object. + """ + yield from self.descriptors + + def __contains__(self, param : 'Parameter') -> bool: + return param in list(self) + + @property + def owner(self): + return self.owner_inst if self.owner_inst is not None else self.owner_cls + + @property + def descriptors(self) -> typing.Dict[str, 'Parameter']: + try: + paramdict = getattr(self.owner_cls, '__%s_params__' % self.owner_cls.__name__) + except AttributeError: + paramdict = {} + for class_ in classlist(self.owner_cls): + if class_ == object or class_ == type: + continue + for name, val in class_.__dict__.items(): + if isinstance(val, Parameter): + paramdict[name] = val + # We only want the cache to be visible to the cls on which + # params() is called, so we mangle the name ourselves at + # runtime (if we were to mangle it now, it would be + # _Parameterized.__params for all classes). + # print(self.owner_cls, '__%s_params__' % self.owner_cls.__name__, paramdict) + setattr(self.owner_cls, '__%s_params__' % self.owner_cls.__name__, paramdict) + return paramdict + + @property + def names(self) -> typing.Iterable[str]: + return self.descriptors.keys() + + @property + def defaults(self): + """Print the default values of all cls's Parameters.""" + defaults = {} + for key, val in self.descriptors.items(): + defaults[key] = val.default + return defaults + + @property + def values(self, onlychanged : bool = False): + """ + Return a dictionary of name,value pairs for the Parameters of this + object. + + When called on an instance with onlychanged set to True, will + only return values that are not equal to the default value + (onlychanged has no effect when called on a class). + """ + self_or_cls = self_.self_or_cls + vals = [] + for name, val in self_or_cls.param.objects('existing').items(): + value = self_or_cls.param.get_value_generator(name) + if not onlychanged or not all_equal(value, val.default): + vals.append((name, value)) + + vals.sort(key=itemgetter(0)) + return dict(vals) + + def serialize(self, subset : typing.Optional[typing.List[str]] = None, + mode : typing.Optional[str] = 'json') -> typing.Dict[str, str]: + if mode not in serializers: + raise ValueError(f'Mode {mode} not in available serialization formats {serializers.keys()}') + serializer = serializers[mode] + return serializer.serialize_parameters(self.owner, subset=subset) + + def serialize_value(self, parameter_name : str, mode : typing.Optional[str] = 'json') -> str: + if mode not in serializers: + raise ValueError(f'Mode {mode} not in available serialization formats {serializers.keys()}') + serializer = serializers[mode] + return serializer.serialize_parameter_value(self.owner, parameter_name) + + def deserialize(self, serialization : str, subset : typing.Optional[typing.List[str]] = None, + mode : typing.Optional[str] = 'json') -> typing.Dict[str, typing.Any]: + if mode not in serializers: + raise ValueError(f'Mode {mode} not in available serialization formats {serializers.keys()}') + serializer = serializers[mode] + return serializer.deserialize_parameters(self.owner, serialization, subset=subset) + + def deserialize_value(self, parameter_name : str, value : str, mode : str = 'json'): + if mode not in serializers: + raise ValueError(f'Mode {mode} not in available serialization formats {serializers.keys()}') + serializer = serializers[mode] + return serializer.deserialize_parameter_value(self.owner, parameter_name, value) + + def schema(self, safe : bool = False, subset : typing.Optional[typing.List[str]] = None, + mode : typing.Optional[str] = 'json') -> typing.Dict[str, typing.Any]: + """ + Returns a schema for the parameters on this Parameterized object. + """ + if mode not in serializers: + raise ValueError(f'Mode {mode} not in available serialization formats {serializers.keys()}') + serializer = serializers[mode] + return serializer.schema(self.owner, safe=safe, subset=subset) + + + +class InstanceParameters(ClassParameters): + + def __init__(self, owner_cls : 'ParameterizedMetaclass', owner_inst : 'Parameterized') -> None: + super().__init__(owner_cls=owner_cls, owner_class_members=None) + self.owner_inst = owner_inst + self._instance_params = {} + self.event_resolver = self.owner_cls.parameters.event_resolver + self.event_dispatcher = EventDispatcher(owner_inst, self.event_resolver) + self.event_dispatcher.prepare_instance_dependencies() + + + def _setup_parameters(self, **parameters): + """ + Initialize default and keyword parameter values. + + First, ensures that all Parameters with 'deepcopy_default=True' + (typically used for mutable Parameters) are copied directly + into each object, to ensure that there is an independent copy + (to avoid surprising aliasing errors). Then sets each of the + keyword arguments, warning when any of them are not defined as + parameters. + + Constant Parameters can be set during calls to this method. + """ + ## Deepcopy all 'deepcopy_default=True' parameters + # (building a set of names first to avoid redundantly + # instantiating a later-overridden parent class's parameter) + param_default_values_to_deepcopy = {} + param_descriptors_to_deepcopy = {} + for (k, v) in self.owner_cls.parameters.descriptors.items(): + if v.deepcopy_default and k != "name": + # (avoid replacing name with the default of None) + param_default_values_to_deepcopy[k] = v + if v.per_instance_descriptor and k != "name": + param_descriptors_to_deepcopy[k] = v + + for p in param_default_values_to_deepcopy.values(): + self._deep_copy_param_default(p) + for p in param_descriptors_to_deepcopy.values(): + self._deep_copy_param_descriptor(p) + + ## keyword arg setting + if len(parameters) > 0: + descs = self.descriptors + for name, val in parameters.items(): + desc = descs.get(name, None) # pylint: disable-msg=E1101 + if desc: + setattr(self.owner_inst, name, val) + # Its erroneous to set a non-descriptor (& non-param-descriptor) with a value from init. + # we dont know what that value even means, so we silently ignore + + + def _deep_copy_param_default(self, param_obj : 'Parameter') -> None: + # deepcopy param_obj.default into self.__dict__ (or dict_ if supplied) + # under the parameter's _internal_name (or key if supplied) + _old = self.owner_inst.__dict__.get(param_obj._internal_name, NotImplemented) + _old = _old if _old is not NotImplemented else param_obj.default + new_object = copy.deepcopy(_old) + # remember : simply setting in the dict does not activate post setter and remaining logic which is sometimes important + self.owner_inst.__dict__[param_obj._internal_name] = new_object + + + def _deep_copy_param_descriptor(self, param_obj : Parameter): + param_obj_copy = copy.deepcopy(param_obj) + self._instance_params[param_obj.name] = param_obj_copy + + + def add_parameter(self, param_name: str, param_obj: Parameter) -> None: + setattr(self.owner_inst, param_name, param_obj) + if param_obj.deepcopy_default: + self._deep_copy_param_default(param_obj) + try: + delattr(self.owner_cls, '__%s_params__'%self.owner_cls.__name__) + except AttributeError: + pass + + + @property + def descriptors(self) -> typing.Dict[str, 'Parameter']: + """ + Returns the Parameters of this instance or class + + If instance=True and called on a Parameterized instance it + will create instance parameters for all Parameters defined on + the class. To force class parameters to be returned use + instance=False. Since classes avoid creating instance + parameters unless necessary you may also request only existing + instance parameters to be returned by setting + instance='existing'. + """ + # We cache the parameters because this method is called often, + # and parameters are rarely added (and cannot be deleted) + return dict(super().descriptors, **self._instance_params) + + + +class ParameterizedMetaclass(type): + """ + The metaclass of Parameterized (and all its descendents). + + The metaclass overrides type.__setattr__ to allow us to set + Parameter values on classes without overwriting the attribute + descriptor. That is, for a Parameterized class of type X with a + Parameter y, the user can type X.y=3, which sets the default value + of Parameter y to be 3, rather than overwriting y with the + constant value 3 (and thereby losing all other info about that + Parameter, such as the doc string, bounds, etc.). + + The __init__ method is used when defining a Parameterized class, + usually when the module where that class is located is imported + for the first time. That is, the __init__ in this metaclass + initializes the *class* object, while the __init__ method defined + in each Parameterized class is called for each new instance of + that class. + + Additionally, a class can declare itself abstract by having an + attribute __abstract set to True. The 'abstract' attribute can be + used to find out if a class is abstract or not. + """ + def __init__(mcs, name : str, bases : typing.Tuple, dict_ : dict) -> None: + """ + Initialize the class object (not an instance of the class, but + the class itself). + """ + type.__init__(mcs, name, bases, dict_) + mcs._create_param_container(dict_) + mcs._update_docstring_signature(dict_.get('parameterized_docstring_signature', False)) + + def _create_param_container(mcs, mcs_members : dict): + mcs._param_container = ClassParameters(mcs, mcs_members) # value return when accessing cls/self.param + + @property + def parameters(mcs) -> ClassParameters: + return mcs._param_container + + def _update_docstring_signature(mcs, do : bool = True) -> None: + """ + Autogenerate a keyword signature in the class docstring for + all available parameters. This is particularly useful in the + IPython Notebook as IPython will parse this signature to allow + tab-completion of keywords. + + max_repr_len: Maximum length (in characters) of value reprs. + """ + if do: + processed_kws, keyword_groups = set(), [] + for cls in reversed(mcs.mro()): + keyword_group = [] + for (k, v) in sorted(cls.__dict__.items()): + if isinstance(v, Parameter) and k not in processed_kws: + param_type = v.__class__.__name__ + keyword_group.append("%s=%s" % (k, param_type)) + processed_kws.add(k) + keyword_groups.append(keyword_group) + + keywords = [el for grp in reversed(keyword_groups) for el in grp] + class_docstr = "\n" + mcs.__doc__ if mcs.__doc__ else '' + signature = "params(%s)" % (", ".join(keywords)) + description = param_pager(mcs) if param_pager else '' + mcs.__doc__ = signature + class_docstr + '\n' + description # type: ignore + + def __setattr__(mcs, attribute_name : str, value : typing.Any) -> None: + """ + Implements 'self.attribute_name=value' in a way that also supports Parameters. + + If there is already a descriptor named attribute_name, and + that descriptor is a Parameter, and the new value is *not* a + Parameter, then call that Parameter's __set__ method with the + specified value. + + In all other cases set the attribute normally (i.e. overwrite + the descriptor). If the new value is a Parameter, once it has + been set we make sure that the value is inherited from + Parameterized superclasses as described in __param_inheritance(). + """ + # Find out if there's a Parameter called attribute_name as a + # class attribute of this class - if not, parameter is None. + if attribute_name != '_param_container' and attribute_name != '__%s_params__' % mcs.__name__: + parameter = mcs.parameters.descriptors.get(attribute_name, None) + # checking isinstance(value, Parameter) will not work for ClassSelector + # and besides value is anyway validated. On the downside, this does not allow + # altering of parameter instances if class already of the parameter with attribute_name + if parameter: # and not isinstance(value, Parameter): + # if owning_class != mcs: + # parameter = copy.copy(parameter) + # parameter.owner = mcs + # type.__setattr__(mcs, attribute_name, parameter) + mcs.__dict__[attribute_name].__set__(mcs, value) + return + # set with None should not supported as with mcs it supports + # class attributes which can be validated + type.__setattr__(mcs, attribute_name, value) + + + +class Parameterized(metaclass=ParameterizedMetaclass): + """ + Base class for named objects that support Parameters and message + formatting. + + Automatic object naming: Every Parameterized instance has a name + parameter. If the user doesn't designate a name= argument + when constructing the object, the object will be given a name + consisting of its class name followed by a unique 5-digit number. + + Automatic parameter setting: The Parameterized __init__ method + will automatically read the list of keyword parameters. If any + keyword matches the name of a Parameter (see Parameter class) + defined in the object's class or any of its superclasses, that + parameter in the instance will get the value given as a keyword + argument. For example: + + class Foo(Parameterized): + xx = Parameter(default=1) + + foo = Foo(xx=20) + + in this case foo.xx gets the value 20. + + When initializing a Parameterized instance ('foo' in the example + above), the values of parameters can be supplied as keyword + arguments to the constructor (using parametername=parametervalue); + these values will override the class default values for this one + instance. + + If no 'name' parameter is supplied, self.name defaults to the + object's class name with a unique number appended to it. + + Message formatting: Each Parameterized instance has several + methods for optionally printing output. This functionality is + based on the standard Python 'logging' module; using the methods + provided here, wraps calls to the 'logging' module's root logger + and prepends each message with information about the instance + from which the call was made. For more information on how to set + the global logging level and change the default message prefix, + see documentation for the 'logging' module. + """ + def __init__(self, **params): + self.create_param_containers(**params) + + def create_param_containers(self, **params): + self._param_container = InstanceParameters(self.__class__, self) + self._param_container._setup_parameters(**params) + + @property + def parameters(self) -> InstanceParameters: + return self._param_container + + # 'Special' methods + def __getstate__(self): + """ + Save the object's state: return a dictionary that is a shallow + copy of the object's __dict__ and that also includes the + object's __slots__ (if it has any). + """ + state = self.__dict__.copy() + for slot in get_occupied_slots(self): + state[slot] = getattr(self, slot) + # Note that Parameterized object pickling assumes that + # attributes to be saved are only in __dict__ or __slots__ + # (the standard Python places to store attributes, so that's a + # reasonable assumption). (Additionally, class attributes that + # are Parameters are also handled, even when they haven't been + # instantiated - see PickleableClassAttributes.) + return state + + def __setstate__(self, state): + """ + Restore objects from the state dictionary to this object. + + During this process the object is considered uninitialized. + """ + # When making a copy the internal watchers have to be + # recreated and point to the new instance + if '_param_watchers' in state: + param_watchers = state['_param_watchers'] + for p, attrs in param_watchers.items(): + for attr, watchers in attrs.items(): + new_watchers = [] + for watcher in watchers: + watcher_args = list(watcher) + if watcher.inst is not None: + watcher_args[0] = self + fn = watcher.fn + if hasattr(fn, '_watcher_name'): + watcher_args[2] = _m_caller(self, fn._watcher_name) + elif get_method_owner(fn) is watcher.inst: + watcher_args[2] = getattr(self, fn.__name__) + new_watchers.append(Watcher(*watcher_args)) + param_watchers[p][attr] = new_watchers + + if '_instance__params' not in state: + state['_instance__params'] = {} + if '_param_watchers' not in state: + state['_param_watchers'] = {} + state.pop('param', None) + + for name,value in state.items(): + setattr(self,name,value) + self.initialized=True + + + + +# As of Python 2.6+, a fn's **args no longer has to be a +# dictionary. This might allow us to use a decorator to simplify using +# ParamOverrides (if that does indeed make them simpler to use). +# http://docs.python.org/whatsnew/2.6.html +class ParamOverrides(dict): + """ + A dictionary that returns the attribute of a specified object if + that attribute is not present in itself. + + Used to override the parameters of an object. + """ + + # NOTE: Attribute names of this object block parameters of the + # same name, so all attributes of this object should have names + # starting with an underscore (_). + + def __init__(self, overridden : Parameterized, dict_ : typing.Dict[str, typing.Any], + allow_extra_keywords : bool = False) -> None: + """ + If allow_extra_keywords is False, then all keys in the + supplied dict_ must match parameter names on the overridden + object (otherwise a warning will be printed). + + If allow_extra_keywords is True, then any items in the + supplied dict_ that are not also parameters of the overridden + object will be available via the extra_keywords() method. + """ + # This method should be fast because it's going to be + # called a lot. This _might_ be faster (not tested): + # def __init__(self,overridden,**kw): + # ... + # dict.__init__(self,**kw) + self._overridden = overridden + dict.__init__(self, dict_) + if allow_extra_keywords: + self._extra_keywords = self._extract_extra_keywords(dict_) + else: + self._check_params(dict_) + + def extra_keywords(self): + """ + Return a dictionary containing items from the originally + supplied dict_ whose names are not parameters of the + overridden object. + """ + return self._extra_keywords + + def param_keywords(self): + """ + Return a dictionary containing items from the originally + supplied dict_ whose names are parameters of the + overridden object (i.e. not extra keywords/parameters). + """ + return dict((key, self[key]) for key in self if key not in self.extra_keywords()) + + def __missing__(self,name): + # Return 'name' from the overridden object + return getattr(self._overridden, name) + + def __repr__(self): + # As dict.__repr__, but indicate the overridden object + return dict.__repr__(self) + " overriding params from %s"%repr(self._overridden) + + def __getattr__(self,name): + # Provide 'dot' access to entries in the dictionary. + # (This __getattr__ method is called only if 'name' isn't an + # attribute of self.) + return self.__getitem__(name) + + def __setattr__(self,name,val): + # Attributes whose name starts with _ are set on self (as + # normal), but all other attributes are inserted into the + # dictionary. + if not name.startswith('_'): + self.__setitem__(name, val) + else: + dict.__setattr__(self, name, val) + + def get(self, key, default = None): + try: + return self[key] + except KeyError: + return default + + def __contains__(self, key): + return key in self.__dict__ or key in self._overridden.parameters + + def _check_params(self,params): + """ + Print a warning if params contains something that is not a + Parameter of the overridden object. + """ + overridden_object_params = list(self._overridden.parameters) + for item in params: + if item not in overridden_object_params: + self.param.warning("'%s' will be ignored (not a Parameter).",item) + + def _extract_extra_keywords(self,params): + """ + Return any items in params that are not also + parameters of the overridden object. + """ + extra_keywords = {} + overridden_object_params = list(self._overridden.parameters) + for name, val in params.items(): + if name not in overridden_object_params: + extra_keywords[name]=val + # Could remove name from params (i.e. del params[name]) + # so that it's only available via extra_keywords() + return extra_keywords + + +# Helper function required by ParameterizedFunction.__reduce__ +def _new_parameterized(cls): + return Parameterized.__new__(cls) + + +class ParameterizedFunction(Parameterized): + """ + Acts like a Python function, but with arguments that are Parameters. + + Implemented as a subclass of Parameterized that, when instantiated, + automatically invokes __call__ and returns the result, instead of + returning an instance of the class. + + To obtain an instance of this class, call instance(). + """ + def __str__(self): + return self.__class__.__name__ + "()" + + def __call__(self, *args, **kw): + raise NotImplementedError("Subclasses must implement __call__.") + + def __reduce__(self): + # Control reconstruction (during unpickling and copying): + # ensure that ParameterizedFunction.__new__ is skipped + state = ParameterizedFunction.__getstate__(self) + # Here it's necessary to use a function defined at the + # module level rather than Parameterized.__new__ directly + # because otherwise pickle will find .__new__'s module to be + # __main__. Pretty obscure aspect of pickle.py... + return (_new_parameterized, (self.__class__,), state) + + def __new__(cls, *args, **params): + # Create and __call__() an instance of this class. + inst = super().__new__(cls) + return inst.__call__(*args, **params) + + + +def descendents(class_ : type) -> typing.List[type]: + """ + Return a list of the class hierarchy below (and including) the given class. + + The list is ordered from least- to most-specific. Can be useful for + printing the contents of an entire class hierarchy. + """ + assert isinstance(class_,type) + q = [class_] + out = [] + while len(q): + x = q.pop(0) + out.insert(0,x) + for b in x.__subclasses__(): + if b not in q and b not in out: + q.append(b) + return out[::-1] + + +def param_union(*parameterizeds : Parameterized, warn_duplicate : bool = False): + """ + Given a set of Parameterized objects, returns a dictionary + with the union of all param name, value pairs across them. + If warn is True (default), prints a warning if the same parameter has + been given multiple values; otherwise uses the last value + """ + d = dict() + for o in parameterizeds: + for k in o.parameters: + if k != 'name': + if k in d and warn_duplicate: + print(f"overwriting parameter {k}") + d[k] = getattr(o, k) + return d + + +def parameterized_class(name, params, bases = Parameterized): + """ + Dynamically create a parameterized class with the given name and the + supplied parameters, inheriting from the specified base(s). + """ + if not (isinstance(bases, list) or isinstance(bases, tuple)): + bases=[bases] + return type(name, tuple(bases), params) + diff --git a/hololinked/param/parameters.py b/hololinked/param/parameters.py new file mode 100644 index 0000000..aa5005a --- /dev/null +++ b/hololinked/param/parameters.py @@ -0,0 +1,2101 @@ +import glob +import re +import os.path +import datetime as dt +import typing +import numbers +import sys +import collections.abc +from collections import OrderedDict + +from .utils import * +from .exceptions import * +from .parameterized import ParamOverrides, Parameterized, ParameterizedFunction, descendents, dt_types, Parameter + + + +class Infinity(object): + """ + An instance of this class represents an infinite value. Unlike + Python's float('inf') value, this object can be safely compared + with gmpy numeric types across different gmpy versions. + + All operators on Infinity() return Infinity(), apart from the + comparison and equality operators. Equality works by checking + whether the two objects are both instances of this class. + """ + def __eq__ (self, other): return isinstance(other,self.__class__) + def __ne__ (self, other): return not self == other + def __lt__ (self, other): return False + def __le__ (self, other): return False + def __gt__ (self, other): return True + def __ge__ (self, other): return True + def __add__ (self, other): return self + def __radd__(self, other): return self + def __ladd__(self, other): return self + def __sub__ (self, other): return self + def __iadd_ (self, other): return self + def __isub__(self, other): return self + def __repr__(self): return "Infinity()" + def __str__ (self): return repr(self) + + + +class String(Parameter): + """ + A string parameter with a default value and optional regular expression (regex) matching. + + Example of using a regex to implement IPv4 address matching:: + + class IPAddress(String): + '''IPv4 address as a string (dotted decimal notation)''' + def __init__(self, default="0.0.0.0", allow_None=False, **kwargs): + ip_regex = r'^((25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)\.){3}(25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)$' + super(IPAddress, self).__init__(default=default, regex=ip_regex, **kwargs) + """ + + __slots__ = ['regex'] + + def __init__(self, default : typing.Optional[str] = "", *, regex : typing.Optional[str] = None, + doc : typing.Optional[str] = None, constant : bool = False, readonly : bool = False, allow_None : bool = False, + per_instance_descriptor : bool = False, deepcopy_default : bool = False, class_member : bool = False, + fget : typing.Optional[typing.Callable] = None, fset : typing.Optional[typing.Callable] = None, + fdel : typing.Optional[typing.Callable] = None, precedence : typing.Optional[float] = None) -> None: + super().__init__(default=default, doc=doc, constant=constant, readonly=readonly, + allow_None=allow_None, per_instance_descriptor=per_instance_descriptor, + deepcopy_default=deepcopy_default, class_member=class_member, fget=fget, fset=fset, fdel=fdel, + precedence=precedence) + self.regex = regex + + def validate_and_adapt(self, value : typing.Any) -> str: + self._assert(value, self.regex, self.allow_None) + return value + + @classmethod + def _assert(obj, value : typing.Any, regex : typing.Optional[str] = None, allow_None : bool = False) -> None: + """ + the method that implements the validator + """ + if value is None: + if allow_None: + return + else: + raise_ValueError(f"None not allowed for string type", obj) + if not isinstance(value, str): + raise_TypeError("given value is not string type, but {}.".format(type(value)), obj) + if regex is not None: + match = re.match(regex, value) + if match is None or match.group(0) != value: + # match should be original string, not some substring + raise_ValueError("given string value {} does not match regex {}.".format(value, regex), obj) + + @classmethod + def isinstance(cls, value : typing.Any, regex : typing.Optional[str] = None, allow_None : bool = False) -> bool: + """ + verify if given value is a string confirming to regex. + + Args: + value (Any): input value + regex (str, None): regex required to match, leave None if unnecessary + allow_None (bool): set True if None is tolerated + + Returns: + bool: True if conformant, else False. Any exceptions due to wrong inputs resulting in TypeError and ValueError + also lead to False + """ + try: + cls._assert(value, regex, allow_None) + return True + except (TypeError, ValueError): + return False + + + +class Bytes(String): + """ + A bytes parameter with a default value and optional regular + expression (regex) matching. + + Similar to the string parameter, but instead of type basestring + this parameter only allows objects of type bytes (e.g. b'bytes'). + """ + + @classmethod + def _assert(obj, value : typing.Any, regex : typing.Optional[bytes] = None, allow_None : bool = False) -> None: + """ + verify if given value is a bytes confirming to regex. + + Args: + value (Any): input value + regex (str, None): regex required to match, leave None if unnecessary + allow_None (bool): set True if None is tolerated + + Raises: + TypeError: if given type is not bytes + ValueError: if regex does not match + """ + if value is None: + if allow_None: + return + else: + raise_ValueError(f"None not allowed for string type", obj) + if not isinstance(value, bytes): + raise_TypeError("given value is not bytes type, but {}.".format(type(value)), obj) + if regex is not None: + match = re.match(regex, value) + if match is None or match.group(0) != value: + # match should be original string, not some substring + raise_ValueError("given bytes value {} does not match regex {}.".format(value, regex), obj) + + + +class IPAddress(Parameter): + + __slots__ = ['allow_localhost', 'allow_ipv4', 'allow_ipv6'] + + def __init__(self, default : typing.Optional[str] = "0.0.0.0", *, allow_ipv4 : bool = True, allow_ipv6 : bool = True, + allow_localhost : bool = True, + doc : typing.Optional[str] = None, constant : bool = False, readonly : bool = False, + allow_None : bool = False, per_instance_descriptor : bool = False, deepcopy_default : bool = False, + class_member : bool = False, fget : typing.Optional[typing.Callable] = None, fset : typing.Optional[typing.Callable] = None, + fdel : typing.Optional[typing.Callable] = None, precedence : typing.Optional[float] = None) -> None: + super().__init__(default=default, doc=doc, constant=constant, readonly=readonly, + allow_None=allow_None, per_instance_descriptor=per_instance_descriptor, + deepcopy_default=deepcopy_default, class_member=class_member, fget=fget, fset=fset, fdel=fdel, + precedence=precedence) + self.allow_localhost = allow_localhost + self.allow_ipv4 = allow_ipv4 + self.allow_ipv6 = allow_ipv6 + + def validate_and_adapt(self, value: typing.Any) -> str: + self._assert(value, self.allow_ipv4, self.allow_ipv6, self.allow_localhost, self.allow_None) + return value + + @classmethod + def _assert(obj, value : typing.Any, allow_ipv4 : bool = True, allow_ipv6 : bool = True, + allow_localhost : bool = True, allow_None : bool = False) -> None: + if value is None and allow_None: + return + if not isinstance(value, str): + raise_TypeError('given value for IP address not a string, but type {}'.format(type(value)), obj) + if allow_localhost and value == 'localhost': + return + if not ((allow_ipv4 and (obj.isipv4(value) or obj.isipv4cidr(value))) + or (allow_ipv6 and (obj.isipv6(value) or obj.isipv6cidr(value)))): + raise_ValueError("Given value {} is not a valid IP address.".format(value), obj) + + @classmethod + def isinstance(obj, value : typing.Any, allow_ipv4 : bool = True, allow_ipv6 : bool = True , + allow_localhost : bool = True, allow_None : bool = False) -> bool: + try: + obj._assert(value, allow_ipv4, allow_ipv6, allow_localhost, allow_None) + return True + except (TypeError, ValueError): + return False + + @classmethod + def isipv4(obj, value : str) -> bool: + """ + Return whether a given value is a valid IP version 4 address. + + This validator is based on `WTForms IPAddress validator`_ + + .. _WTForms IPAddress validator: + https://github.com/wtforms/wtforms/blob/master/wtforms/validators.py + + Args: + value (str): IP address string to validate, other types will raise unexpected errors (mostly attribute error) + + Returns: + bool : True if conformant + """ + groups = value.split(".") + if ( + len(groups) != 4 + or any(not x.isdigit() for x in groups) + or any(len(x) > 3 for x in groups) + ): + return False + return all(0 <= int(part) < 256 for part in groups) + + + @classmethod + def isipv4cidr(obj, value : str) -> bool: + """ + Return whether a given value is a valid CIDR-notated IP version 4 + address range. + + This validator is based on RFC4632 3.1. + + Args: + value (str): IP address string to validate, other types will raise unexpected errors (mostly attribute error) + + Returns: + bool : True if conformant + """ + try: + prefix, suffix = value.split('/', 2) + except ValueError: + return False + if not obj.is_ipv4(prefix) or not suffix.isdigit(): + return False + return 0 <= int(suffix) <= 32 + + @classmethod + def isipv6(obj, value : str) -> bool: + """ + Return whether a given value is a valid IP version 6 address + (including IPv4-mapped IPv6 addresses). + + This validator is based on `WTForms IPAddress validator`_. + + .. _WTForms IPAddress validator: + https://github.com/wtforms/wtforms/blob/master/wtforms/validators.py + + Examples:: + + >>> ipv6('abcd:ef::42:1') + True + + >>> ipv6('::ffff:192.0.2.128') + True + + >>> ipv6('::192.0.2.128') + True + + >>> ipv6('abc.0.0.1') + ValidationFailure(func=ipv6, args={'value': 'abc.0.0.1'}) + + .. versionadded:: 0.2 + + :param value: IP address string to validate + """ + ipv6_groups = value.split(':') + if len(ipv6_groups) == 1: + return False + ipv4_groups = ipv6_groups[-1].split('.') + + if len(ipv4_groups) > 1: + if not obj.is_ipv4(ipv6_groups[-1]): + return False + ipv6_groups = ipv6_groups[:-1] + else: + ipv4_groups = [] + + count_blank = 0 + for part in ipv6_groups: + if not part: + count_blank += 1 + continue + try: + num = int(part, 16) + except ValueError: + return False + else: + if not 0 <= num <= 65536 or len(part) > 4: + return False + + max_groups = 6 if ipv4_groups else 8 + part_count = len(ipv6_groups) - count_blank + if count_blank == 0 and part_count == max_groups: + # no :: -> must have size of max_groups + return True + elif count_blank == 1 and ipv6_groups[-1] and ipv6_groups[0] and part_count < max_groups: + # one :: inside the address or prefix or suffix : -> filter least two cases + return True + elif count_blank == 2 and part_count < max_groups and ( + ((ipv6_groups[0] and not ipv6_groups[-1]) or (not ipv6_groups[0] and ipv6_groups[-1])) or ipv4_groups): + # leading or trailing :: or : at end and begin -> filter last case + # Check if it has ipv4 groups because they get removed from the ipv6_groups + return True + elif count_blank == 3 and part_count == 0: + # :: is the address -> filter everything else + return True + return False + + @classmethod + def isipv6cidr(obj, value : str) -> bool: + """ + Returns whether a given value is a valid CIDR-notated IP version 6 + address range. + + This validator is based on RFC4632 3.1. + + Examples:: + + >>> ipv6_cidr('::1/128') + True + + >>> ipv6_cidr('::1') + ValidationFailure(func=ipv6_cidr, args={'value': '::1'}) + """ + try: + prefix, suffix = value.split('/', 2) + except ValueError: + return False + if not obj.is_ipv6(prefix) or not suffix.isdigit(): + return False + return 0 <= int(suffix) <= 128 + + + +class Number(Parameter): + """ + A numeric parameter with a default value and optional bounds. + + There are two types of bounds: ``bounds`` and + ``softbounds``. ``bounds`` are hard bounds: the parameter must + have a value within the specified range. The default bounds are + (None,None), meaning there are actually no hard bounds. One or + both bounds can be set by specifying a value + (e.g. bounds=(None,10) means there is no lower bound, and an upper + bound of 10). Bounds are inclusive by default, but exclusivity + can be specified for each bound by setting inclusive_bounds + (e.g. inclusive_bounds=(True,False) specifies an exclusive upper + bound). + + Using a default value outside the hard + bounds, or one that is not numeric, results in an exception. + + As a special case, if allow_None=True (which is true by default if + the parameter has a default of None when declared) then a value + of None is also allowed. + + A separate function set_in_bounds() is provided that will + silently crop the given value into the legal range, for use + in, for instance, a GUI. + + ``softbounds`` are present to indicate the typical range of + the parameter, but are not enforced. Setting the soft bounds + allows, for instance, a GUI to know what values to display on + sliders for the Number. + + Example of creating a Number:: + AB = Number(default=0.5, bounds=(None,10), softbounds=(0,1), doc='Distance from A to B.') + + """ + + __slots__ = ['bounds', 'inclusive_bounds', 'crop_to_bounds', 'dtype', 'step'] + + def __init__(self, default : typing.Optional[typing.Union[float, int]] = 0.0, *, bounds : typing.Optional[typing.Tuple] = None, + crop_to_bounds : bool = False, inclusive_bounds : typing.Tuple = (True,True), step : typing.Any = None, + doc : typing.Optional[str] = None, constant : bool = False, readonly : bool = False, allow_None : bool = False, + per_instance_descriptor : bool = False, deepcopy_default : bool = False, + class_member : bool = False, fget : typing.Optional[typing.Callable] = None, fset : typing.Optional[typing.Callable] = None, + fdel : typing.Optional[typing.Callable] = None, precedence : typing.Optional[float] = None) -> None: + super().__init__(default=default, doc=doc, constant=constant, readonly=readonly, + allow_None=allow_None, per_instance_descriptor=per_instance_descriptor, deepcopy_default=deepcopy_default, + class_member=class_member, fget=fget, fset=fset, fdel=fdel, precedence=precedence) + self.bounds = bounds + self.crop_to_bounds = crop_to_bounds + self.inclusive_bounds = inclusive_bounds + self.dtype = (float, int) + self.step = step + + def set_in_bounds(self, obj : typing.Union[Parameterized, typing.Any], value : typing.Union[float, int]) -> None: + """ + Set to the given value, but cropped to be within the legal bounds. + See crop_to_bounds for details on how cropping is done. + """ + self._assert(value, self.dtype, None, (False, False), self.allow_None) + bounded_value = self._crop_to_bounds(value) + super().__set__(obj, bounded_value) + + def _crop_to_bounds(self, value : typing.Union[int, float]) -> typing.Union[int, float]: + """ + Return the given value cropped to be within the hard bounds + for this parameter. + + If a numeric value is passed in, check it is within the hard + bounds. If it is larger than the high bound, return the high + bound. If it's smaller, return the low bound. In either case, the + returned value could be None. If a non-numeric value is passed + in, set to be the default value (which could be None). In no + case is an exception raised; all values are accepted. + """ + # Values outside the bounds are silently cropped to + # be inside the bounds. + assert self.bounds is not None, "Cannot crop to bounds when bounds is None" + vmin, vmax = self.bounds + incmin, incmax = self.inclusive_bounds + if vmin is not None: + if value < vmin: + if incmin: + return vmin + else: + return vmin + self.step + if vmax is not None: + if value > vmax: + if incmax: + return vmax + else: + return vmax - self.step + return value + + def validate_and_adapt(self, value: typing.Any) -> typing.Union[int, float]: + self._assert(value, self.dtype, None if self.crop_to_bounds else self.bounds, + self.inclusive_bounds, self.allow_None) + if self.crop_to_bounds and self.bounds and value is not None: + return self._crop_to_bounds(value) + return value + + @classmethod + def _assert(obj, value, dtype : typing.Tuple, bounds : typing.Optional[typing.Tuple] = None, + inclusive_bounds : typing.Tuple[bool, bool] = (True, True), allow_None : bool = False): + if allow_None and value is None: + return + if dtype is None: + if not obj.isnumber(value): + raise_TypeError("given value not of number type, but type {}.".format(type(value)), + obj) + elif not isinstance(value, dtype): + raise_TypeError("given value not of type {}, but type {}.".format(dtype, type(value)), obj) + if bounds: + vmin, vmax = bounds + incmin, incmax = inclusive_bounds + if vmax is not None: + if incmax is True: + if not value <= vmax: + raise_ValueError("given value must be at most {}, not {}.".format(vmax, value), obj) + else: + if not value < vmax: + raise_ValueError("Parameter must be less than {}, not {}.".format(vmax, value), obj) + + if vmin is not None: + if incmin is True: + if not value >= vmin: + raise_ValueError("Parameter must be at least {}, not {}.".format(vmin, value), obj) + else: + if not value > vmin: + raise_ValueError("Parameter must be greater than {}, not {}.".format(vmin, value), obj) + return value + + def _validate_step(self, value : typing.Any) -> None: + if value is not None: + if self.dtype: + if not isinstance(value, self.dtype): + raise_ValueError("Step can only be None or {}, not type {}.".format(self.dtype, type(value)), self) + elif not self.isnumber(self.step): + raise_ValueError("Step can only be None or numeric value, not type {}.".format(type(value)), self) + + def _post_slot_set(self, slot : str, old : typing.Any, value : typing.Any) -> None: + if slot == 'step': + self._validate_step(value) + return super()._post_slot_set(slot, old, value) + + @classmethod + def isinstance(obj, value, dtype : typing.Tuple, bounds : typing.Optional[typing.Tuple] = None, + inclusive_bounds : typing.Tuple[bool, bool] = (True, True), allow_None : bool = False): + try: + obj._assert(value, dtype, bounds, inclusive_bounds, allow_None) + return True + except (ValueError, TypeError): + return False + + @classmethod + def isnumber(cls, value : typing.Any) -> bool: + if isinstance(value, numbers.Number): return True + # The extra check is for classes that behave like numbers, such as those + # found in numpy, gmpy, etc. + elif (hasattr(value, '__int__') and hasattr(value, '__add__')): return True + # This is for older versions of gmpy + elif hasattr(value, 'qdiv'): return True + else: return False + + + +class Integer(Number): + """Numeric Parameter required to be an Integer""" + + def __init__(self, default : typing.Optional[int] = 0, *, bounds : typing.Optional[typing.Tuple] = None, + crop_to_bounds : bool = False, inclusive_bounds : typing.Tuple = (True,True), step : typing.Any = None, + doc : typing.Optional[str] = None, constant : bool = False, readonly : bool = False, allow_None : bool = False, + per_instance_descriptor : bool = False, deepcopy_default : bool = False, class_member : bool = False, + fget : typing.Optional[typing.Callable] = None, fset : typing.Optional[typing.Callable] = None, + fdel : typing.Optional[typing.Callable] = None, precedence : typing.Optional[float] = None) -> None: + super().__init__(default=default, bounds=bounds, crop_to_bounds=crop_to_bounds, inclusive_bounds=inclusive_bounds, + doc=doc, constant=constant, readonly=readonly, allow_None=allow_None, per_instance_descriptor=per_instance_descriptor, + deepcopy_default=deepcopy_default, class_member=class_member, fget=fget, fset=fset, fdel=fdel, + precedence=precedence) + self.dtype = (int,) + + def _validate_step(self, step : int): + if step is not None and not isinstance(step, int): + raise_ValueError("Step can only be None or an integer value, not type {}".format(type(step)), self) + + + +class Boolean(Parameter): + """Binary or tristate Boolean Parameter.""" + + def __init__(self, default : typing.Optional[bool] = False, *, + doc : typing.Optional[str] = None, constant : bool = False, readonly : bool = False, allow_None : bool = False, + per_instance_descriptor : bool = False, deepcopy_default : bool = False, + class_member : bool = False, fget : typing.Optional[typing.Callable] = None, fset : typing.Optional[typing.Callable] = None, + fdel : typing.Optional[typing.Callable] = None, precedence : typing.Optional[float] = None) -> None: + super().__init__(default=default, doc=doc, constant=constant, readonly=readonly, + allow_None=allow_None, per_instance_descriptor=per_instance_descriptor, deepcopy_default=deepcopy_default, + class_member=class_member, fget=fget, fset=fset, fdel=fdel, precedence=precedence) + + def validate_and_adapt(self, value : typing.Any) -> bool: + if self.allow_None and value is None: + return + elif not isinstance(value, bool): + raise_ValueError("given value not boolean type, but type {}".format(type(value)), self) + return value + + + +class Iterable(Parameter): + """A tuple or list Parameter (e.g. ('a',7.6,[3,5])) with a fixed tuple length.""" + + __slots__ = ['bounds', 'length', 'item_type', 'dtype'] + + def __init__(self, default : typing.Any, *, bounds : typing.Optional[typing.Tuple[int, int]] = None, + length : typing.Optional[int] = None, item_type : typing.Optional[typing.Tuple] = None, deepcopy_default : bool = False, + allow_None : bool = False, doc : typing.Optional[str] = None, constant : bool = False, readonly : bool = False, + per_instance_descriptor : bool = False, class_member : bool = False, + fget : typing.Optional[typing.Callable] = None, fset : typing.Optional[typing.Callable] = None, + fdel : typing.Optional[typing.Callable] = None, precedence : typing.Optional[float] = None) -> None: + """ + Initialize a tuple parameter with a fixed length (number of + elements). The length is determined by the initial default + value, if any, and must be supplied explicitly otherwise. The + length is not allowed to change after instantiation. + """ + super().__init__(default=default, doc=doc, constant=constant, readonly=readonly, + allow_None=allow_None, per_instance_descriptor=per_instance_descriptor, deepcopy_default=deepcopy_default, + class_member=class_member, fget=fget, fset=fset, fdel=fdel, precedence=precedence) + self.bounds = bounds + self.length = length + self.item_type = item_type + self.dtype = (list, tuple) + + def validate_and_adapt(self, value: typing.Any) -> typing.Union[typing.List, typing.Tuple]: + self._assert(value, self.bounds, self.length, self.dtype, self.item_type, self.allow_None) + return value + + @classmethod + def _assert(obj, value : typing.Any, bounds : typing.Optional[typing.Tuple[int, int]] = None, + length : typing.Optional[int] = None, dtype : typing.Union[type, typing.Tuple] = (list, tuple), + item_type : typing.Any = None, allow_None : bool = False) -> None: + if value is None and allow_None: + return + if not isinstance(value, dtype): + raise_ValueError("given value not of iterable type {}, but {}.".format(dtype, type(value)), obj) + if bounds is not None: + if not (len(value) >= bounds[0] and len(value) <= bounds[1]): + raise_ValueError("given iterable is not of the correct length ({} instead of between {} and {}).".format( + len(value), 0 if not bounds[0] else bounds[0], bounds[1]), obj) + elif length is not None and len(value) != length: + raise_ValueError("given iterable is not of correct length ({} instead of {})".format(len(value), length), + obj) + if item_type is not None: + for val in value: + if not isinstance(val, item_type): + raise_TypeError("not all elements of given iterable of item type {}, found object of type {}".format( + item_type, type(val)), obj) + + @classmethod + def isinstance(obj, value : typing.Any, bounds : typing.Optional[typing.Tuple[int, int]], + length : typing.Optional[int] = None, dtype : typing.Union[type, typing.Tuple] = (list, tuple), + item_type : typing.Any = None, allow_None : bool = False) -> bool: + try: + obj._assert(value, bounds, length, dtype, item_type, allow_None) + return True + except (ValueError, TypeError): + return False + + + +class Tuple(Iterable): + + __slots__ = ['accept_list', 'accept_item'] + + def __init__(self, default : typing.Any, *, bounds : typing.Optional[typing.Tuple[int, int]] = None, + length: typing.Optional[int] = None, item_type : typing.Optional[typing.Tuple] = None, + accept_list : bool = False, accept_item : bool = False, deepcopy_default : bool = False, + allow_None : bool = False, doc : typing.Optional[str] = None, constant : bool = False, readonly : bool = False, + per_instance_descriptor : bool = False, class_member : bool = False, + fget : typing.Optional[typing.Callable] = None, fset : typing.Optional[typing.Callable] = None, + fdel : typing.Optional[typing.Callable] = None, precedence : typing.Optional[float] = None) -> None: + super().__init__(default=default, bounds=bounds, length=length, item_type=item_type, doc=doc, constant=constant, + readonly=readonly, allow_None=allow_None, per_instance_descriptor=per_instance_descriptor, + deepcopy_default=deepcopy_default, class_member=class_member, fget=fget, fset=fset, fdel=fdel, + precedence=precedence) + self.accept_list = accept_list + self.accept_item = accept_item + self.dtype = (tuple,) # re-assigned + + def validate_and_adapt(self, value: typing.Any) -> typing.Tuple: + if self.accept_list and isinstance(value, list): + value = tuple(value) + if self.accept_item and not isinstance(value, (list, tuple, type(None))): + value = (value,) + self._assert(value, self.bounds, self.length, self.dtype, self.item_type, self.allow_None) + return value + + @classmethod + def serialize(cls, value): + if value is None: + return None + return list(value) # As JSON has no tuple representation + + @classmethod + def deserialize(cls, value): + if value == 'null': + return None + return tuple(value) # As JSON has no tuple representation + + + +class List(Iterable): + """ + Parameter whose value is a list of objects, usually of a specified type. + + The bounds allow a minimum and/or maximum length of + list to be enforced. If the item_type is non-None, all + items in the list are checked to be of that type. + + `class_` is accepted as an alias for `item_type`, but is + deprecated due to conflict with how the `class_` slot is + used in Selector classes. + """ + + __slots__ = ['accept_tuple'] + + def __init__(self, default: typing.Any, *, bounds : typing.Optional[typing.Tuple[int, int]] = None, + length : typing.Optional[int] = None, item_type : typing.Optional[typing.Tuple] = None, + accept_tuple : bool = False, deepcopy_default : bool = False, + allow_None : bool = False, doc : typing.Optional[str] = None, constant : bool = False, readonly : bool = False, + per_instance_descriptor : bool = False, + class_member : bool = False, fget : typing.Optional[typing.Callable] = None, + fset : typing.Optional[typing.Callable] = None, fdel : typing.Optional[typing.Callable] = None, + precedence : typing.Optional[float] = None) -> None: + super().__init__(default=default, bounds=bounds, length=length, item_type=item_type, + doc=doc, constant=constant, readonly=readonly, allow_None=allow_None, + per_instance_descriptor=per_instance_descriptor, deepcopy_default=deepcopy_default, + class_member=class_member, fget=fget, fset=fset, fdel=fdel, precedence=precedence) + self.accept_tuple = accept_tuple + self.dtype = list + + def validate_and_adapt(self, value: typing.Any) -> typing.Tuple: + if self.accept_tuple and isinstance(value, tuple): + value = list(value) + self._assert(value, self.bounds, self.length, self.dtype, self.item_type, self.allow_None) + return value + + + +class Callable(Parameter): + """ + Parameter holding a value that is a callable object, such as a function. + + A keyword argument instantiate=True should be provided when a + function object is used that might have state. On the other hand, + regular standalone functions cannot be deepcopied as of Python + 2.4, so instantiate must be False for those values. + """ + + def validate_and_adapt(self, value : typing.Any) -> typing.Callable: + if (self.allow_None and value is None) or callable(value): + return value + raise_ValueError("given value not a callable object, but type {}.".format(type(value)), self) + + + +class Composite(Parameter): + """ + A Parameter that is a composite of a set of other attributes of the class. + + The constructor argument 'attribs' takes a list of attribute + names, which may or may not be Parameters. Getting the parameter + returns a list of the values of the constituents of the composite, + in the order specified. Likewise, setting the parameter takes a + sequence of values and sets the value of the constituent + attributes. + + This Parameter type has not been tested with watchers and + dependencies, and may not support them properly. + """ + + __slots__ = ['attribs'] + + def __init__(self, attribs : typing.List[typing.Union[str, Parameter]], *, + doc : typing.Optional[str] = None, constant : bool = False, readonly : bool = False, allow_None : bool = False, + per_instance_descriptor : bool = False, deepcopy_default : bool = False, + class_member : bool = False, fget : typing.Optional[typing.Callable] = None, fset : typing.Optional[typing.Callable] = None, + fdel : typing.Optional[typing.Callable] = None, precedence : typing.Optional[float] = None) -> None: + super().__init__(None, doc=doc, constant=constant, readonly=readonly, allow_None=allow_None, + per_instance_descriptor=per_instance_descriptor, deepcopy_default=deepcopy_default, + class_member=class_member, fget=fget, fset=fset, fdel=fdel, precedence=precedence) + self.attribs = [] + if attribs is not None: + for attrib in attribs: + if isinstance(attrib, Parameter): + self.attribs.append(attrib.name) + else: + self.attribs.append(attrib) + + def __get__(self, obj, objtype) -> typing.List[typing.Any]: + """ + Return the values of all the attribs, as a list. + """ + return [getattr(obj, attr) for attr in self.attribs] + + def validate_and_adapt(self, value): + if not len(value) == len(self.attribs): + raise_ValueError("Compound parameter got the wrong number of values (needed {}, but got {}).".format( + len(self.attribs), len(value)), self) + return value + + def _post_setter(self, obj, val): + for a, v in zip(self.attribs, val): + setattr(obj, a, v) + + + +class SelectorBase(Parameter): + """ + Parameter whose value must be chosen from a list of possibilities. + + Subclasses must implement get_range(). + """ + + __abstract = True + + @property + def range(self): + raise NotImplementedError("get_range() must be implemented in subclasses.") + + + +class Selector(SelectorBase): + """ + Parameter whose value must be one object from a list of possible objects. + + By default, if no default is specified, picks the first object from + the provided set of objects, as long as the objects are in an + ordered data collection. + + check_on_set restricts the value to be among the current list of + objects. By default, if objects are initially supplied, + check_on_set is True, whereas if no objects are initially + supplied, check_on_set is False. This can be overridden by + explicitly specifying check_on_set initially. + + If check_on_set is True (either because objects are supplied + initially, or because it is explicitly specified), the default + (initial) value must be among the list of objects (unless the + default value is None). + + The list of objects can be supplied as a list (appropriate for + selecting among a set of strings, or among a set of objects with a + "name" parameter), or as a (preferably ordered) dictionary from + names to objects. If a dictionary is supplied, the objects + will need to be hashable so that their names can be looked + up from the object value. + """ + + __slots__ = ['objects', 'names'] + + # Selector is usually used to allow selection from a list of + # existing objects, therefore instantiate is False by default. + def __init__(self, *, objects : typing.List[typing.Any], default : typing.Any, empty_default : bool = False, + doc : typing.Optional[str] = None, constant : bool = False, readonly : bool = False, allow_None : bool = False, + per_instance_descriptor : bool = False, deepcopy_default : bool = False, + class_member : bool = False, fget : typing.Optional[typing.Callable] = None, + fset : typing.Optional[typing.Callable] = None, fdel : typing.Optional[typing.Callable] = None, + precedence : typing.Optional[float] = None) -> None: + super().__init__(default=default, doc=doc, constant=constant, readonly=readonly, + allow_None=allow_None, per_instance_descriptor=per_instance_descriptor, deepcopy_default=deepcopy_default, + class_member=class_member, fget=fget, fset=fset, fdel=fdel, precedence=precedence) + if objects is None: + objects = [] + autodefault = None + elif isinstance(objects, collections.abc.Mapping): + self.names = objects + self.objects = list(objects.values()) + autodefault = self.objects[0] + elif isinstance(objects, (list, tuple)): + self.names = None + self.objects = objects + autodefault = objects[0] + else: + raise TypeError("objects should be a list, tuple, mapping or None. Given type : {}".format(type(objects))) + default = autodefault if (not empty_default and default is None) else default + + def validate_and_adapt(self, value: typing.Any) -> typing.Any: + """ + val must be None or one of the objects in self.objects. + """ + if not (value in self.objects or (self.allow_None and value is None)): + raise_ValueError("given value not in list of possible objects, valid options include {}".format( + get_iterable_printfriendly_repr(self.objects)), self) + return value + + @property + def range(self): + """ + Return the possible objects to which this parameter could be set. + + (Returns the dictionary {object.name:object}.) + """ + if self.names is not None: + return named_objs(self.objects, self.names) + else: + return self.objects + + + +class ClassSelector(SelectorBase): + """ + Parameter allowing selection of either a subclass or an instance of a given set of classes. + By default, requires an instance, but if isinstance=False, accepts a class instead. + Both class and instance values respect the instantiate slot, though it matters only + for isinstance=True. + """ + + __slots__ = ['class_', 'isinstance'] + + def __init__(self, *, class_ , default : typing.Any, isinstance : bool = True, deepcopy_default : bool = False, + doc : typing.Optional[str] = None, constant : bool = False, readonly : bool = False, allow_None : bool = False, + per_instance_descriptor : bool = False, class_member : bool = False, + fget : typing.Optional[typing.Callable] = None, fset : typing.Optional[typing.Callable] = None, + fdel : typing.Optional[typing.Callable] = None, precedence : typing.Optional[float] = None) -> None: + super().__init__(default=default, doc=doc, constant=constant, readonly=readonly, + allow_None=allow_None, per_instance_descriptor=per_instance_descriptor, deepcopy_default=deepcopy_default, + class_member=class_member, fget=fget, fset=fset, fdel=fdel, precedence=precedence) + self.class_ = class_ + self.isinstance = isinstance + + def _get_class_name(self): + if isinstance(self.class_, tuple): + return ('(%s)' % ', '.join(cl.__name__ for cl in self.class_)) + else: + return self.class_.__name__ + + def validate_and_adapt(self, value): + if (value is None and self.allow_None): + return + if self.isinstance: + if not isinstance(value, self.class_): + raise_ValueError("{} parameter {} value must be an instance of {}, not {}.".format( + self.__class__.__name__, self.name, self._get_class_name(), value), self) + else: + if not issubclass(value, self.class_): + raise_ValueError("{} parameter {} must be a subclass of {}, not {}.".format( + self.__class__.__name__, self.name, self._get_class_name(), value.__name__), self) + return value + + @property + def range(self): + """ + Return the possible types for this parameter's value. + + (I.e. return `{name: }` for all classes that are + concrete_descendents() of `self.class_`.) + + Only classes from modules that have been imported are added + (see concrete_descendents()). + """ + classes = self.class_ if isinstance(self.class_, tuple) else (self.class_,) + all_classes = {} + for cls in classes: + all_classes.update(concrete_descendents(cls)) + d = OrderedDict((name, class_) for name,class_ in all_classes.items()) + if self.allow_None: + d['None'] = None + return d + + + +class TupleSelector(Selector): + """ + Variant of Selector where the value can be multiple objects from + a list of possible objects. + """ + + # Changed from ListSelector. Iterables need to be frozen to prevent spurious addition. + # To prevent duplicates, use frozen set selector + + __slots__ = ['accept_list'] + + def __init__(self, *, objects : typing.List, default : typing.Any, accept_list : bool = True, + doc : typing.Optional[str] = None, constant : bool = False, readonly : bool = False, allow_None : bool = False, + per_instance_descriptor : bool = False, deepcopy_default : bool = False, + class_member : bool = False, fget : typing.Optional[typing.Callable] = None, fset : typing.Optional[typing.Callable] = None, + fdel : typing.Optional[typing.Callable] = None, precedence : typing.Optional[float] = None) -> None: + super().__init__(objects=objects, default=default, empty_default=True, doc=doc, + constant=constant, readonly=readonly, allow_None=allow_None, per_instance_descriptor=per_instance_descriptor, + deepcopy_default=deepcopy_default, class_member=class_member, fget=fget, fset=fset, fdel=fdel, + precedence=precedence) + self.accept_list = accept_list + + def validate_and_adapt(self, value : typing.Any): + if value is None and self.allow_None: + return + if value not in self.objects: + # i.e. without iterating, we check that the value is not present in the objects + # This is useful to have list or iterables themselves as part of objects + # let objects = [[1,2], 3 ,4], if [1,2] is passed, then we should try to accept it plainly before moving to iterating + # and checking + if isinstance(value, list) and self.accept_list: + value = tuple(value) + if not isinstance(value, tuple): + raise_ValueError(f"object {value} not specified as a valid member of list of objects.", self) + else: + for obj in value: + if obj not in self.objects: + raise_ValueError("object {} not specified as a valid member of list of objects.".format(obj), self) + return value + + +# For portable code: +# - specify paths in unix (rather than Windows) style; +# - use resolve_path(path_to_file=True) for paths to existing files to be read, +# - use resolve_path(path_to_file=False) for paths to existing folders to be read, +# and normalize_path() for paths to new files to be written. + +class resolve_path(ParameterizedFunction): + """ + Find the path to an existing file, searching the paths specified + in the search_paths parameter if the filename is not absolute, and + converting a UNIX-style path to the current OS's format if + necessary. + + To turn a supplied relative path into an absolute one, the path is + appended to paths in the search_paths parameter, in order, until + the file is found. + + An IOError is raised if the file is not found. + + Similar to Python's os.path.abspath(), except more search paths + than just os.getcwd() can be used, and the file must exist. + """ + + search_paths = List(default=[os.getcwd()], doc=""" + Prepended to a non-relative path, in order, until a file is + found.""") + + path_to_file = Boolean(default=True, + allow_None=True, doc=""" + String specifying whether the path refers to a 'File' or a + 'Folder'. If None, the path may point to *either* a 'File' *or* + a 'Folder'.""") + + def __call__(self, path : str, **params) -> str: + p = ParamOverrides(self, params) + path = os.path.normpath(path) + ftype = "File" if p.path_to_file is True \ + else "Folder" if p.path_to_file is False else "Path" + + if not p.search_paths: + p.search_paths = [os.getcwd()] + + if os.path.isabs(path): + if ((p.path_to_file is None and os.path.exists(path)) or + (p.path_to_file is True and os.path.isfile(path)) or + (p.path_to_file is False and os.path.isdir( path))): + return path + raise IOError("%s '%s' not found." % (ftype,path)) + + else: + paths_tried = [] + for prefix in p.search_paths: + try_path = os.path.join(os.path.normpath(prefix), path) + + if ((p.path_to_file is None and os.path.exists(try_path)) or + (p.path_to_file is True and os.path.isfile(try_path)) or + (p.path_to_file is False and os.path.isdir( try_path))): + return try_path + + paths_tried.append(try_path) + + raise IOError(ftype + " " + os.path.split(path)[1] + " was not found in the following place(s): " + str(paths_tried) + ".") + + +class normalize_path(ParameterizedFunction): + """ + Convert a UNIX-style path to the current OS's format, + typically for creating a new file or directory. + + If the path is not already absolute, it will be made absolute + (using the prefix parameter). + + Should do the same as Python's os.path.abspath(), except using + prefix rather than os.getcwd). + """ + + prefix = String(default=os.getcwd(),doc=""" + Prepended to the specified path, if that path is not + absolute.""") + + def __call__(self, path : str = "", **params): + p = ParamOverrides(self,params) + + if not os.path.isabs(path): + path = os.path.join(os.path.normpath(p.prefix), path) + + return os.path.normpath(path) + + + +class Path(Parameter): + """ + Parameter that can be set to a string specifying the path of a file or folder. + + The string should be specified in UNIX style, but it will be + returned in the format of the user's operating system. Please use + the Filename or Foldername classes if you require discrimination + between the two possibilities. + + The specified path can be absolute, or relative to either: + + * any of the paths specified in the search_paths attribute (if + search_paths is not None); + + or + + * any of the paths searched by resolve_path() (if search_paths + is None). + """ + + __slots__ = ['search_paths'] + + def __init__(self, default : typing.Any = '', *, search_paths : typing.Optional[str] = None, + doc : typing.Optional[str] = None, constant : bool = False, readonly : bool = False, allow_None : bool = False, + per_instance_descriptor : bool = False, deepcopy_default : bool = False, + class_member : bool = False, fget : typing.Optional[typing.Callable] = None, fset : typing.Optional[typing.Callable] = None, + fdel : typing.Optional[typing.Callable] = None, precedence : typing.Optional[float] = None) -> None: + super().__init__(default=default, doc=doc, + constant=constant, readonly=readonly, allow_None=allow_None, per_instance_descriptor=per_instance_descriptor, + deepcopy_default=deepcopy_default, class_member=class_member, fget=fget, fset=fset, fdel=fdel, + precedence=precedence) + if isinstance(search_paths, str): + self.search_paths = [search_paths] + elif isinstance(search_paths, list): + self.search_paths = search_paths + else: + self.search_paths = [] + + def _resolve(self, path): + return resolve_path(path, path_to_file=None, search_paths=self.search_paths) + + def validate_and_adapt(self, value : typing.Any) -> typing.Any: + if value is None and self.allow_None: + return + else: + return self._resolve(value) + + def __get__(self, obj, objtype) -> str: + """ + Return an absolute, normalized path (see resolve_path). + """ + raw_path = super().__get__(obj, objtype) + return None if raw_path is None else self._resolve(raw_path) + + def __getstate__(self): + # don't want to pickle the search_paths + state = super().__getstate__() + if 'search_paths' in state: + state['search_paths'] = [] + return state + + + +class Filename(Path): + """ + Parameter that can be set to a string specifying the path of a file. + + The string should be specified in UNIX style, but it will be + returned in the format of the user's operating system. + + The specified path can be absolute, or relative to either: + + * any of the paths specified in the search_paths attribute (if + search_paths is not None); + + or + + * any of the paths searched by resolve_path() (if search_paths + is None). + """ + + def _resolve(self, path): + return resolve_path(path, path_to_file=True, search_paths=self.search_paths) + + +class Foldername(Path): + """ + Parameter that can be set to a string specifying the path of a folder. + + The string should be specified in UNIX style, but it will be + returned in the format of the user's operating system. + + The specified path can be absolute, or relative to either: + + * any of the paths specified in the search_paths attribute (if + search_paths is not None); + + or + + * any of the paths searched by resolve_dir_path() (if search_paths + is None). + """ + + def _resolve(self, path): + return resolve_path(path, path_to_file=False, search_paths=self.search_paths) + + + +def abbreviate_paths(pathspec,named_paths): + """ + Given a dict of (pathname,path) pairs, removes any prefix shared by all pathnames. + Helps keep menu items short yet unambiguous. + """ + + prefix = os.path.commonprefix([os.path.dirname(name)+os.path.sep for name in named_paths.keys()]+[pathspec]) + return OrderedDict([(name[len(prefix):],path) for name,path in named_paths.items()]) + + + +class FileSelector(Selector): + """ + Given a path glob, allows one file to be selected from those matching. + """ + __slots__ = ['path'] + + def __init__(self, default : typing.Any, *, objects : typing.List, path : str = "", + doc : typing.Optional[str] = None, constant : bool = False, readonly : bool = False, allow_None : bool = False, + per_instance_descriptor : bool = False, deepcopy_default : bool = False, + class_member : bool = False, fget : typing.Optional[typing.Callable] = None, fset : typing.Optional[typing.Callable] = None, + fdel : typing.Optional[typing.Callable] = None, precedence : typing.Optional[float] = None) -> None: + super().__init__(default=default, objects=objects, empty_default=True, doc=doc, + constant=constant, readonly=readonly, allow_None=allow_None, per_instance_descriptor=per_instance_descriptor, + deepcopy_default=deepcopy_default, class_member=class_member, fget=fget, fset=fset, fdel=fdel, + precedence=precedence) + self.path = path # update is automatically called + + def _post_slot_set(self, slot: str, old : typing.Any, value : typing.Any) -> None: + super()._post_slot_set(slot, old, value) + if slot == 'path': + self.update() + + def update(self): + self.objects = sorted(glob.glob(self.path)) + if self.default in self.objects: + return + self.default = self.objects[0] if self.objects else None + + @property + def range(self): + return abbreviate_paths(self.path, super().range) + + + +class MultiFileSelector(FileSelector): + """ + Given a path glob, allows multiple files to be selected from the list of matches. + """ + __slots__ = ['path'] + + def __init__(self, default : typing.Any, *, path : str = "", + doc : typing.Optional[str] = None, constant : bool = False, readonly : bool = False, allow_None : bool = False, + label : typing.Optional[str] = None, per_instance_descriptor : bool = False, deepcopy_default : bool = False, + class_member : bool = False, fget : typing.Optional[typing.Callable] = None, fset : typing.Optional[typing.Callable] = None, + fdel : typing.Optional[typing.Callable] = None, precedence : typing.Optional[float] = None) -> None: + super().__init__(default=default, objects=None, doc=doc, + constant=constant, readonly=readonly, allow_None=allow_None, per_instance_descriptor=per_instance_descriptor, + deepcopy_default=deepcopy_default, class_member=class_member, fget=fget, fset=fset, + fdel=fdel, precedence=precedence) + + def update(self): + self.objects = sorted(glob.glob(self.path)) + if self.default and all([o in self.objects for o in self.default]): + return + self.default = self.objects + + + +class Date(Number): + """ + Date parameter of datetime or date type. + """ + + def __init__(self, default, *, bounds : typing.Union[typing.Tuple, None] = None, + crop_to_bounds : bool = False, inclusive_bounds : typing.Tuple = (True,True), step : typing.Any = None, + doc : typing.Optional[str] = None, constant : bool = False, readonly : bool = False, allow_None : bool = False, + per_instance_descriptor : bool = False, deepcopy_default : bool = False, + class_member : bool = False, fget : typing.Optional[typing.Callable] = None, fset : typing.Optional[typing.Callable] = None, + fdel : typing.Optional[typing.Callable] = None, precedence : typing.Optional[float] = None) -> None: + super().__init__(default=default, bounds=bounds, crop_to_bounds=crop_to_bounds, + inclusive_bounds=inclusive_bounds, step=step, doc=doc, + constant=constant, readonly=readonly, allow_None=allow_None, per_instance_descriptor=per_instance_descriptor, + deepcopy_default=deepcopy_default, class_member=class_member, fget=fget, fset=fset, fdel=fdel, + precedence=precedence) + self.dtype = dt_types + + def _validate_step(self, val): + if self.step is not None and not isinstance(self.step, dt_types): + raise ValueError(f"Step can only be None, a datetime or datetime type, not type {type(val)}") + + @classmethod + def serialize(cls, value): + if value is None: + return None + if not isinstance(value, (dt.datetime, dt.date)): # i.e np.datetime64, note numpy is imported only on requirement + value = value.astype(dt.datetime) + return value.strftime("%Y-%m-%dT%H:%M:%S.%f") + + @classmethod + def deserialize(cls, value): + if value == None: + return None + return dt.datetime.strptime(value, "%Y-%m-%dT%H:%M:%S.%f") + + + +class CalendarDate(Number): + """ + Parameter specifically allowing dates (not datetimes). + """ + + def __init__(self, default, *, bounds : typing.Union[typing.Tuple, None] = None, + crop_to_bounds : bool = False, inclusive_bounds : typing.Tuple = (True,True), step : typing.Any = None, + doc : typing.Optional[str] = None, constant : bool = False, readonly : bool = False, allow_None : bool = False, + per_instance_descriptor : bool = False, deepcopy_default : bool = False, + class_member : bool = False, fget : typing.Optional[typing.Callable] = None, fset : typing.Optional[typing.Callable] = None, + fdel : typing.Optional[typing.Callable] = None, precedence : typing.Optional[float] = None) -> None: + super().__init__(default=default, bounds=bounds, crop_to_bounds=crop_to_bounds, + inclusive_bounds=inclusive_bounds, step=step, doc=doc, + constant=constant, readonly=readonly, allow_None=allow_None, per_instance_descriptor=per_instance_descriptor, + deepcopy_default=deepcopy_default, class_member=class_member, fget=fget, fset=fset, fdel=fdel, + precedence=precedence) + self.dtype = dt.date + + def _validate_step(self, step): + if step is not None and not isinstance(step, self.dtype): + raise ValueError("Step can only be None or a date type.") + + @classmethod + def serialize(cls, value): + if value is None: + return None + return value.strftime("%Y-%m-%d") + + @classmethod + def deserialize(cls, value): + if value is None: + return None + return dt.datetime.strptime(value, "%Y-%m-%d").date() + + + +class CSS3Color(Parameter): + """ + Color parameter defined as a hex RGB string with an optional # + prefix or (optionally) as a CSS3 color name. + """ + + # CSS3 color specification https://www.w3.org/TR/css-color-3/#svg-color + _named_colors = [ 'aliceblue', 'antiquewhite', 'aqua', + 'aquamarine', 'azure', 'beige', 'bisque', 'black', + 'blanchedalmond', 'blue', 'blueviolet', 'brown', 'burlywood', + 'cadetblue', 'chartreuse', 'chocolate', 'coral', + 'cornflowerblue', 'cornsilk', 'crimson', 'cyan', 'darkblue', + 'darkcyan', 'darkgoldenrod', 'darkgray', 'darkgrey', + 'darkgreen', 'darkkhaki', 'darkmagenta', 'darkolivegreen', + 'darkorange', 'darkorchid', 'darkred', 'darksalmon', + 'darkseagreen', 'darkslateblue', 'darkslategray', + 'darkslategrey', 'darkturquoise', 'darkviolet', 'deeppink', + 'deepskyblue', 'dimgray', 'dimgrey', 'dodgerblue', + 'firebrick', 'floralwhite', 'forestgreen', 'fuchsia', + 'gainsboro', 'ghostwhite', 'gold', 'goldenrod', 'gray', + 'grey', 'green', 'greenyellow', 'honeydew', 'hotpink', + 'indianred', 'indigo', 'ivory', 'khaki', 'lavender', + 'lavenderblush', 'lawngreen', 'lemonchiffon', 'lightblue', + 'lightcoral', 'lightcyan', 'lightgoldenrodyellow', + 'lightgray', 'lightgrey', 'lightgreen', 'lightpink', + 'lightsalmon', 'lightseagreen', 'lightskyblue', + 'lightslategray', 'lightslategrey', 'lightsteelblue', + 'lightyellow', 'lime', 'limegreen', 'linen', 'magenta', + 'maroon', 'mediumaquamarine', 'mediumblue', 'mediumorchid', + 'mediumpurple', 'mediumseagreen', 'mediumslateblue', + 'mediumspringgreen', 'mediumturquoise', 'mediumvioletred', + 'midnightblue', 'mintcream', 'mistyrose', 'moccasin', + 'navajowhite', 'navy', 'oldlace', 'olive', 'olivedrab', + 'orange', 'orangered', 'orchid', 'palegoldenrod', 'palegreen', + 'paleturquoise', 'palevioletred', 'papayawhip', 'peachpuff', + 'peru', 'pink', 'plum', 'powderblue', 'purple', 'red', + 'rosybrown', 'royalblue', 'saddlebrown', 'salmon', + 'sandybrown', 'seagreen', 'seashell', 'sienna', 'silver', + 'skyblue', 'slateblue', 'slategray', 'slategrey', 'snow', + 'springgreen', 'steelblue', 'tan', 'teal', 'thistle', + 'tomato', 'turquoise', 'violet', 'wheat', 'white', + 'whitesmoke', 'yellow', 'yellowgreen'] + + __slots__ = ['allow_named'] + + def __init__(self, default, *, allow_named : bool = True, doc : typing.Optional[str] = None, constant : bool = False, + readonly : bool = False, allow_None : bool = False, + per_instance_descriptor : bool = False, deepcopy_default : bool = False, + class_member : bool = False, fget : typing.Optional[typing.Callable] = None, fset : typing.Optional[typing.Callable] = None, + fdel : typing.Optional[typing.Callable] = None, precedence : typing.Optional[float] = None) -> None: + super().__init__(default=default, doc=doc, + constant=constant, readonly=readonly, allow_None=allow_None, per_instance_descriptor=per_instance_descriptor, + deepcopy_default=deepcopy_default, class_member=class_member, fget=fget, fset=fset, fdel=fdel, + precedence=precedence) + self.allow_named = allow_named + + def validate_and_adapt(self, value : typing.Any): + if (self.allow_None and value is None): + return + if not isinstance(value, str): + raise ValueError("Color parameter %r expects a string value, " + "not an object of type %s." % (self.name, type(value))) + if self.allow_named and value in self._named_colors: + return + is_hex = re.match('^#?(([0-9a-fA-F]{2}){3}|([0-9a-fA-F]){3})$', value) + if not is_hex: + raise ValueError("Color '%s' only takes RGB hex codes " + "or named colors, received '%s'." % (self.name, value)) + + + +class Range(Tuple): + """ + A numeric range with optional bounds and softbounds. + """ + + __slots__ = ['bounds', 'inclusive_bounds', 'softbounds', 'step'] + + def __init__(self, default : typing.Optional[typing.Tuple] = None, *, bounds: typing.Optional[typing.Tuple[int, int]] = None, + length : typing.Optional[int] = None, item_type : typing.Optional[typing.Tuple] = None, + softbounds=None, inclusive_bounds=(True,True), step=None, + doc : typing.Optional[str] = None, constant : bool = False, + readonly : bool = False, allow_None : bool = False, label : typing.Optional[str] = None, + per_instance_descriptor : bool = False, deepcopy_default : bool = False, + class_member : bool = False, fget : typing.Optional[typing.Callable] = None, fset : typing.Optional[typing.Callable] = None, + fdel : typing.Optional[typing.Callable] = None, precedence : typing.Optional[float] = None) -> None: + self.inclusive_bounds = inclusive_bounds + self.softbounds = softbounds + self.step = step + super().__init__(default=default, bounds=bounds, item_type=item_type, length=length, doc=doc, + constant=constant, readonly=readonly, allow_None=allow_None, per_instance_descriptor=per_instance_descriptor, + deepcopy_default=deepcopy_default, class_member=class_member, fget=fget, fset=fset, fdel=fdel, + precedence=precedence) + + def validate_and_adapt(self, value : typing.Any) -> typing.Tuple: + raise NotImplementedError("Range validation not implemented") + super()._validate(val) + self._validate_bounds(val, self.bounds, self.inclusive_bounds) + + def _validate_bounds(self, val, bounds, inclusive_bounds): + if bounds is None or (val is None and self.allow_None): + return + vmin, vmax = bounds + incmin, incmax = inclusive_bounds + for bound, v in zip(['lower', 'upper'], val): + too_low = (vmin is not None) and (v < vmin if incmin else v <= vmin) + too_high = (vmax is not None) and (v > vmax if incmax else v >= vmax) + if too_low or too_high: + raise ValueError("Range parameter %r's %s bound must be in range %s." + % (self.name, bound, self.rangestr())) + + @property + def rangestr(self): + vmin, vmax = self.bounds + incmin, incmax = self.inclusive_bounds + incmin = '[' if incmin else '(' + incmax = ']' if incmax else ')' + return '%s%s, %s%s' % (incmin, vmin, vmax, incmax) + + + +class DateRange(Range): + """ + A datetime or date range specified as (start, end). + + Bounds must be specified as datetime or date types (see param.dt_types). + """ + + def _validate_value(self, val, allow_None): + # Cannot use super()._validate_value as DateRange inherits from + # NumericTuple which check that the tuple values are numbers and + # datetime objects aren't numbers. + if allow_None and val is None: + return + + if not isinstance(val, tuple): + raise ValueError("DateRange parameter %r only takes a tuple value, " + "not %s." % (self.name, type(val).__name__)) + for n in val: + if isinstance(n, dt_types): + continue + raise ValueError("DateRange parameter %r only takes date/datetime " + "values, not type %s." % (self.name, type(n).__name__)) + + start, end = val + if not end >= start: + raise ValueError("DateRange parameter %r's end datetime %s " + "is before start datetime %s." % + (self.name, val[1], val[0])) + + @classmethod + def serialize(cls, value): + if value is None: + return 'null' + # List as JSON has no tuple representation + serialized = [] + for v in value: + if not isinstance(v, (dt.datetime, dt.date)): # i.e np.datetime64 + v = v.astype(dt.datetime) + # Separate date and datetime to deserialize to the right type. + if type(v) == dt.date: + v = v.strftime("%Y-%m-%d") + else: + v = v.strftime("%Y-%m-%dT%H:%M:%S.%f") + serialized.append(v) + return serialized + + def deserialize(cls, value): + if value == 'null': + return None + deserialized = [] + for v in value: + # Date + if len(v) == 10: + v = dt.datetime.strptime(v, "%Y-%m-%d").date() + # Datetime + else: + v = dt.datetime.strptime(v, "%Y-%m-%dT%H:%M:%S.%f") + deserialized.append(v) + # As JSON has no tuple representation + return tuple(deserialized) + + + +class CalendarDateRange(Range): + """ + A date range specified as (start_date, end_date). + """ + def _validate_value(self, val, allow_None): + if allow_None and val is None: + return + + for n in val: + if not isinstance(n, dt.date): + raise ValueError("CalendarDateRange parameter %r only " + "takes date types, not %s." % (self.name, val)) + + start, end = val + if not end >= start: + raise ValueError("CalendarDateRange parameter %r's end date " + "%s is before start date %s." % + (self.name, val[1], val[0])) + + @classmethod + def serialize(cls, value): + if value is None: + return 'null' + # As JSON has no tuple representation + return [v.strftime("%Y-%m-%d") for v in value] + + @classmethod + def deserialize(cls, value): + if value == 'null': + return None + # As JSON has no tuple representation + return tuple([dt.datetime.strptime(v, "%Y-%m-%d").date() for v in value]) + + + + +def get_typed_iterable_bounds(bounds : tuple) -> tuple: + if bounds[0] is None and bounds[1] is None: + bounds = (0, 2*sys.maxsize + 1) + elif bounds[0] is None: + bounds = (0, bounds[1]) + elif bounds[1] is None: + bounds = (bounds[0], 2*sys.maxsize + 1) + return bounds + + +class BaseConstrainedList(collections.abc.MutableSequence): + + # Need to check mul + + def __init__(self, default : typing.List[typing.Any], *, bounds : tuple = (0, None), + constant : bool = False, skip_validate : bool = False) -> None: + super().__init__() + self.constant = constant + self.bounds = get_typed_iterable_bounds(bounds) + if not skip_validate: + self._validate_for_set(default) + self._inner = default + + def _validate_for_set(self, value : typing.Any) -> None: + self._validate_value(value) + self._validate_bounds_for_set(value) + self._validate_items(value) + + def _validate_for_extension(self, value : typing.List) -> None: + if self.constant: + raise ValueError(f"List {get_iterable_printfriendly_repr(self._inner)} is a constant, cannot be modified.") + self._validate_value(value) + self._validate_bounds_for_extension(value) + self._validate_items(value) + + def _validate_for_insertion(self, value : typing.Any) -> None: + if self.constant: + raise ValueError(f"List {get_iterable_printfriendly_repr(self._inner)} is a constant, cannot be modified.") + self._validate_bounds_for_extension() + self._validate_item(value) + + def _validate_value(self, value : typing.Any) -> None: + if not isinstance(value, list): + raise TypeError(f"Given value for a constrained list is not a list, but type {type(value)}") + + def _validate_items(self, value : typing.Any) -> None: + raise NotImplementedError("Please implement _validate_item in the child of BaseConstrainedList.") + + def _validate_item(self, value : typing.Any): + raise NotImplementedError("Please implement _validate_single_item in the child of BaseConstrainedList.") + + def _validate_bounds_for_set(self, value : typing.Any) -> None: + if not (value.__len__() >= self.bounds[0] and value.__len__() <= self.bounds[1]): + raise ValueError(wrap_error_text( + f"""given list {get_iterable_printfriendly_repr(value)} has length out of bounds {self.bounds}. + given length : {value.__len__()}""")) + + def _validate_bounds_for_extension(self, value : typing.Any = [None]) -> None: + if not (self._inner.__len__() + value.__len__() >= self.bounds[0] and + self._inner.__len__() + value.__len__() <= self.bounds[1]): + raise ValueError(wrap_error_text( + f"""given list for extending {get_iterable_printfriendly_repr(value)} extends existing list longer + than bounds {self.bounds}. given length : {self._inner.__len__() + value.__len__()}""")) + + def __len__(self) -> int: + return self._inner.__len__() + + def __iter__(self) -> typing.Any: + return self._inner.__iter__() + + def __str__(self) -> str: + return self._inner.__str__() + + def __contains__(self, item : typing.Any) -> bool: + return item in self._inner + + def __getitem__(self, index : int): + return self._inner[index] + + def __setitem__(self, index : int, value : typing.Any) -> None: + if self.constant: + raise ValueError(f"List {get_iterable_printfriendly_repr(self._inner)} is a constant, cannot be modified.") + self._validate_item(value) + self._inner[index] = value + + def __delitem__(self, index : int) -> None: + del self._inner[index] + + def __repr__(self) -> str: + return self._inner.__repr__() + + def __imul__(self, value : typing.Any) -> typing.List: + return self._inner.__imul__(value) + + def __mul__(self, value : typing.Any) -> typing.List: + return self._inner.__mul__(value) + + def __sizeof__(self) -> int: + return self._inner.__sizeof__() + + def __lt__(self, __x : typing.List[typing.Any]) -> bool: + return self._inner.__lt__(__x) + + def __le__(self, __x : typing.List[typing.Any]) -> bool: + return self._inner.__le__(__x) + + def __eq__(self, __x : typing.List[typing.Any]) -> bool: + return self._inner.__eq__(__x) + + def __ne__(self, __x : typing.List[typing.Any]) -> bool: + return self._inner.__ne__(__x) + + def __gt__(self, __x : typing.List[typing.Any]) -> bool: + return self._inner.__gt__(__x) + + def __ge__(self, __x : typing.List[typing.Any]) -> bool: + return self._inner.__ge__(__x) + + def __rmul__(self, __n : int) -> typing.List: + return self._inner.__rmul__(__n) + + def __reversed__(self) -> typing.Iterator: + return self._inner.__reversed__() + + def __add__(self, __x : typing.List[typing.Any]) -> typing.List: + if isinstance(__x, self.__class__): + return self._inner.__add__(__x._inner) + else: + return self._inner.__add__(__x) + + def __iadd__(self, values : typing.List[typing.Any] ) -> typing.List: + raise NotImplementedError("Please implement __iadd__ in the child of BaseConstrainedList.") + + def insert(self, __index : int, __object : typing.Any) -> None: + self._validate_for_insertion(__object) + self._inner.insert(__index, __object) + + def append(self, __object : typing.Any) -> None: + self._validate_for_insertion(__object) + self._inner.append(__object) + + def extend(self, __iterable) -> None: + self._validate_for_extension(__iterable) + self._inner.extend(__iterable) + + def reverse(self) -> None: + self._inner.reverse() + + def pop(self, __index: int) -> typing.Any: + return self._inner.pop(__index) + + def count(self, __value : typing.Any) -> int: + return self._inner.count(__value) + + def clear(self) -> None: + self._inner.clear() + + def index(self, __value : typing.Any, __start : int, __stop : int) -> int: + return self._inner.index(__value, __start, __stop) + + def remove(self, __value : typing.Any) -> None: + self._inner.remove(__value) + + def sort(self, key : typing.Any, reverse : bool): + self._inner.sort(key=key, reverse=reverse) + + def copy(self, return_as_typed_list : bool = False): + raise NotImplementedError("Please implement copy() in the child of BaseConstrainedList.") + + + +class TypeConstrainedList(BaseConstrainedList): + + def __init__(self, default : typing.List, *, item_type : typing.Any = None, + bounds : tuple = (0,None), constant : bool = False, skip_validate : bool = False) -> None: + self.item_type = item_type + super().__init__(default=default, bounds=bounds, constant=constant, skip_validate=skip_validate) + + def _validate_items(self, value : typing.Any) -> None: + if self.item_type is not None: + for val in value: + if not isinstance(val, self.item_type): + raise TypeError( + wrap_error_text(f""" + Not all elements of list {get_iterable_printfriendly_repr(value)} given are of allowed item type(s), + which are : {self.item_type}. Given type {type(val)}. Cannot set or extend typed list.""" + )) + + def _validate_item(self, value : typing.Any): + if self.item_type is not None and not isinstance(value, self.item_type): + raise TypeError( + wrap_error_text(f""" + Not all elements given are of allowed item type(s), which are : {self.item_type}. + Given type {type(value)}. Cannot append or insert in typed list.""" + )) + + def __iadd__(self, value : typing.List[typing.Any]): + self._validate_for_extension(value) + return TypeConstrainedList(default=self._inner.__iadd__(value), item_type=self.item_type, bounds=self.bounds, + constant=self.constant, skip_validate=True) + + def copy(self, return_as_typed_list : bool = False) -> typing.Union["TypeConstrainedList", typing.List[typing.Any]]: + if return_as_typed_list: + return TypeConstrainedList(default=self._inner.copy(), item_type=self.item_type, bounds=self.bounds, + constant=self.constant, skip_validate=True) + else: + return self._inner.copy() + + + +class TypedList(ClassSelector): + + __slots__ = ['item_type', 'bounds', 'accept_nonlist_object'] + + def __init__(self, default : typing.Optional[typing.List[typing.Any]] = None, *, item_type : typing.Any = None, + deepcopy_default : bool = True, accept_nonlist_object : bool = False, + allow_None : bool = True, bounds : tuple = (0,None), + doc : typing.Optional[str] = None, constant : bool = False, readonly : bool = False, + per_instance_descriptor : bool = False, class_member : bool = False, + fget : typing.Optional[typing.Callable] = None, fset : typing.Optional[typing.Callable] = None, + fdel : typing.Optional[typing.Callable] = None, precedence : typing.Optional[float] = None) -> None: + if default is not None: + default = TypeConstrainedList(default=default, item_type=item_type, bounds=bounds, constant=constant, + skip_validate=False) # type: ignore + super().__init__(class_ = TypeConstrainedList, default=default, isinstance=True, deepcopy_default=deepcopy_default, + doc=doc, constant=constant, readonly=readonly, allow_None=allow_None, + per_instance_descriptor=per_instance_descriptor, class_member=class_member, fget=fget, fset=fset, + fdel=fdel, precedence=precedence) + self.item_type = item_type + self.bounds = bounds + self.accept_nonlist_object = accept_nonlist_object + + # @instance_descriptor - super().__set__ takes care of instance descriptors + def validate_and_adapt(self, value : typing.Any): + if self.allow_None and value is None: + return + if value is not None and self.accept_nonlist_object and not isinstance(value, list): + value = [value] + return TypeConstrainedList(default=value, item_type=self.item_type, bounds=self.bounds, + constant=self.constant, skip_validate=False) + + @classmethod + def serialize(cls, value : TypeConstrainedList) -> typing.Any: + if value is None: + return None + return value._inner + + # no need for deserialize, when __set__ is called TypeConstrainedList is automatically created + + +class TypeConstrainedDict(collections.abc.MutableMapping): + """ A dictionary which contains only ``NewDict`` values. """ + + def __init__(self, default : typing.Dict, *, key_type : typing.Optional[typing.Union[type, typing.Tuple]] = None, + item_type : typing.Optional[typing.Union[type, typing.Tuple]] = None, + bounds : typing.Tuple = (0, None), constant : bool = False, skip_validate : bool = False): + super().__init__() + self.key_type = key_type + self.item_type = item_type + self.bounds = get_typed_iterable_bounds(bounds) + self.constant = constant + if not skip_validate: + self._validate_for_set(default) + self._inner = default + + def _validate_for_set(self, value : typing.Dict) -> None: + self._validate_value(value) + self._validate_bounds_for_set(value) + self._validate_items(value) + + def _validate_for_insertion(self, value : typing.Dict) -> None: + if self.constant: + raise ValueError(f"Dict {get_iterable_printfriendly_repr(self._inner)} is a constant and cannot be modified.") + self._validate_value(value) + self._validate_bounds_for_extension(value) + self._validate_items(value) + + def _validate_value(self, value) -> None: + if not isinstance(value, dict): + raise TypeError(wrap_error_text(f""" + Given value for typed dictionary is not a dictionary. Given type : {type(value)}. Expected dictionary.""")) + + def _validate_bounds_for_set(self, value : typing.Dict) -> None: + if not (self.bounds[0] <= value.__len__() <= self.bounds[1]): + raise ValueError(wrap_error_text(f""" + Given dictionary length outside bounds. Given length {value.__len__()}, expected length : {self.bounds}""")) + + def _validate_bounds_for_extension(self, value : typing.Dict = {"dummy" : "dummy"}) -> None: + if not (self.bounds[0] <= self._inner.__len__() + value.__len__() <= self.bounds[1]): + raise ValueError(wrap_error_text(f""" + Extending dictionary crosses bounds. Existing length {self._inner.__len__()}, + length of items to be added : {value.__len__()}, allowed bounds : {self.bounds}""")) + + def _validate_items(self, value : typing.Dict[typing.Any, typing.Any]) -> None: + keys = value.keys() + values = value.values() + if self.key_type is not None and len(keys) != 0: + for key in keys: + if not isinstance(key, self.key_type): + raise TypeError(wrap_error_text(f""" + Keys for typed dictionary contain incompatible types. + Allowed types : {self.key_type}, given type : {type(key)}""")) + if self.item_type is not None and len(values) != 0: + for value in values: + if not isinstance(value, self.item_type): + raise TypeError(wrap_error_text(f""" + Values for typed dictionary contain incompatible types. + Allowed types : {self.item_type}. given type : {type(value)}""")) + + def _validate_key_value_pair(self, __key : typing.Any, __value : typing.Any) -> None: + if self.key_type is not None: + if not isinstance(__key, self.key_type): + raise TypeError("given key {} is not of {}.".format(__key, self.key_type)) + if self.item_type is not None: + if not isinstance(__value, self.item_type): + raise TypeError("given item {} is not of {}.".format(__value, self.item_type)) + + def __iter__(self) -> typing.Iterator: + return self._inner.__iter__() + + def __setitem__(self, __key : typing.Any, __value : typing.Any) -> None: + if self.constant: + raise ValueError(f"Dict {get_iterable_printfriendly_repr(self._inner)} is a constant and cannot be modified.") + if __key not in self._inner: + self._validate_bounds_for_extension() + self._validate_key_value_pair(__key, __value) + self._inner.__setitem__(__key, __value) + + def __delitem__(self, __v : typing.Any) -> None: + self._inner.__delitem__(__v) + + def __getitem__(self, __k : typing.Any) -> typing.Any: + return self._inner.__getitem__(__k) + + def __str__(self) -> str: + return self._inner.__str__() + + def __len__(self) -> int: + return self._inner.__len__() + + def __contains__(self, __o : object) -> bool: + return self._inner.__contains__(__o) + + def __eq__(self, __o: object) -> bool: + return self._inner.__eq__(__o) + + def __ne__(self, __o: object) -> bool: + return self._inner.__ne__(__o) + + def __format__(self, __format_spec: str) -> str: + return self._inner.__format__(__format_spec) + + def __sizeof__(self) -> int: + return self._inner.__sizeof__() + + def __repr__(self) -> str: + return self._inner.__repr__() + + def fromkeys(self, __iterable, __value : typing.Any): + return self._inner.fromkeys(__iterable, __value) + + def keys(self) -> typing.Any: + return self._inner.keys() + + def items(self) -> typing.Any: + return self._inner.items() + + def values(self) -> typing.Any: + return self._inner.values() + + def get(self, __key : typing.Any, __default : typing.Any = None): + return self._inner.get(__key, __default) + + def setdefault(self, __key : typing.Any) -> None: + self._inner.setdefault(__key) + + def clear(self) -> None: + self._inner.clear() + + def copy(self, return_as_typed : bool = False) -> typing.Union["TypeConstrainedDict", typing.Dict]: + if return_as_typed: + return TypeConstrainedDict(default=self._inner.copy(), key_type=self.key_type, item_type=self.item_type, + bounds=self.bounds, constant=self.constant, skip_validate=True) + else: + return self._inner.copy() + + def popitem(self) -> tuple: + return self._inner.popitem() + + def pop(self, __key : typing.Any) -> typing.Any: + return self._inner.pop(__key) + + def update(self, __o : typing.Any) -> None: + self._validate_for_insertion(__o) + self._inner.update(__o) + + +class TypedKeyMappingsConstrainedDict(TypeConstrainedDict): + + def __init__(self, default: typing.Dict, *, type_mapping : typing.Dict, + allow_unspecified_keys : bool = False, bounds : tuple = (0, None), constant : bool = False, + skip_validate : bool = False) -> None: + self.type_mapping = type_mapping + self.allow_unspecified_keys = allow_unspecified_keys + self.key_list = self.type_mapping.keys() + super().__init__(default, key_type = None, item_type = None, bounds = bounds, constant = constant, + skip_validate=skip_validate) + + def _validate_items(self, value: typing.Dict) -> None: + for key, val in value.items(): + self._validate_key_value_pair(key, val) + + def _validate_key_value_pair(self, __key: typing.Any, __value: typing.Any) -> None: + if __key not in self.key_list: + if self.allow_unspecified_keys: + pass + else: + raise KeyError(f"Keys except {self.key_list} not allowed for typed dictionary. Given key : {__key}.") + elif not isinstance(__value, self.type_mapping[__key]): + raise TypeError(wrap_error_text(f""" + Value for key {__key} not of expected type : {self.type_mapping[__key]}. Given type : {type(__value)}.""")) + + def copy(self, return_as_typed : bool = False) -> typing.Union["TypedKeyMappingsConstrainedDict", typing.Dict]: + if return_as_typed: + return TypedKeyMappingsConstrainedDict(default=self._inner.copy(), type_mapping=self.type_mapping, + bounds=self.bounds, constant=self.constant, skip_validate=True) + else: + return self._inner.copy() + + +class TypedDict(ClassSelector): + + __slots__ = ['key_type', 'item_type', 'bounds'] + + def __init__(self, default : typing.Optional[typing.Dict] = None, *, key_type : typing.Any = None, + item_type : typing.Any = None, deepcopy_default : bool = True, allow_None : bool = True, + bounds : tuple = (0, None), doc : typing.Optional[str] = None, constant : bool = False, readonly : bool = False, + per_instance_descriptor : bool = False, + class_member : bool = False, fget : typing.Optional[typing.Callable] = None, + fset : typing.Optional[typing.Callable] = None, fdel : typing.Optional[typing.Callable] = None, + precedence : typing.Optional[float] = None) -> None: + if default is not None: + default = TypeConstrainedDict(default, key_type=key_type, item_type=item_type, bounds=bounds, + constant=constant, skip_validate=False) # type: ignore + self.key_type = key_type + self.item_type = item_type + self.bounds = bounds + super().__init__(class_=TypeConstrainedDict, default=default, isinstance=True, deepcopy_default=deepcopy_default, + doc=doc, constant=constant, readonly=readonly, allow_None=allow_None, fget=fget, fset=fset, fdel=fdel, + per_instance_descriptor=per_instance_descriptor, class_member=class_member, precedence=precedence) + + def __set__(self, obj, value): + if value is not None: + container = TypeConstrainedDict(default=value, key_type=self.key_type, item_type=self.item_type, + bounds=self.bounds, constant=self.constant, skip_validate=False) + return super().__set__(obj, container) # re-set it to trigger param related activities + else: + return super().__set__(obj, value) # re-set it to trigger param related activities + + @classmethod + def serialize(cls, value: TypeConstrainedDict) -> typing.Any: + if value is None: + return None + return value._inner + + +class TypedKeyMappingsDict(ClassSelector): + + __slots__ = ['type_mapping', 'allow_unspecified_keys', 'bounds'] + + def __init__(self, default : typing.Optional[typing.Dict[typing.Any, typing.Any]] = None, *, + type_mapping : typing.Dict, + allow_unspecified_keys : bool = True, bounds : tuple = (0, None), + deepcopy_default : bool = True, allow_None : bool = True, + doc : typing.Optional[str] = None, constant : bool = False, readonly : bool = False, + per_instance_descriptor : bool = False, class_member : bool = False, + fget : typing.Optional[typing.Callable] = None, fset : typing.Optional[typing.Callable] = None, + fdel : typing.Optional[typing.Callable] = None, precedence : typing.Optional[float] = None) -> None: + if default is not None: + default = TypedKeyMappingsConstrainedDict(default=default, type_mapping=type_mapping, + allow_unspecified_keys=allow_unspecified_keys, bounds=bounds, constant=constant, + skip_validate=False) # type: ignore + self.type_mapping = type_mapping + self.allow_unspecified_keys = allow_unspecified_keys + self.bounds = bounds + super().__init__(class_=TypedKeyMappingsConstrainedDict, default=default, + isinstance=True, deepcopy_default=deepcopy_default, doc=doc, constant=constant, readonly=readonly, + allow_None=allow_None, per_instance_descriptor=per_instance_descriptor, class_member=class_member, + fget=fget, fset=fset, fdel=fdel, precedence=precedence) + + def __set__(self, obj, value): + if value is not None: + container = TypedKeyMappingsConstrainedDict(default=value, type_mapping=self.type_mapping, + allow_unspecified_keys=self.allow_unspecified_keys, bounds=self.bounds, constant=self.constant, + skip_validate=False) + return super().__set__(obj, container) + else: + return super().__set__(obj, value) # re-set it to trigger param related activities + + @classmethod + def serialize(cls, value: TypeConstrainedDict) -> typing.Any: + if value is None: + return None + return value._inner + + +def hashable(x): + """ + Return a hashable version of the given object x, with lists and + dictionaries converted to tuples. Allows mutable objects to be + used as a lookup key in cases where the object has not actually + been mutated. Lookup will fail (appropriately) in cases where some + part of the object has changed. Does not (currently) recursively + replace mutable subobjects. + """ + if isinstance(x, collections.abc.MutableSequence): + return tuple(x) + elif isinstance(x, collections.abc.MutableMapping): + return tuple([(k,v) for k,v in x.items()]) + else: + return x + + +def named_objs(objlist, namesdict=None): + """ + Given a list of objects, returns a dictionary mapping from + string name for the object to the object itself. Accepts + an optional name,obj dictionary, which will override any other + name if that item is present in the dictionary. + """ + objs = OrderedDict() + + objtoname = {} + unhashables = [] + if namesdict is not None: + for k, v in namesdict.items(): + try: + objtoname[hashable(v)] = k + except TypeError: + unhashables.append((k, v)) + + for obj in objlist: + if objtoname and hashable(obj) in objtoname: + k = objtoname[hashable(obj)] + elif any(obj is v for (_, v) in unhashables): + k = [k for (k, v) in unhashables if v is obj][0] + elif hasattr(obj, "name"): + k = obj.name + elif hasattr(obj, '__name__'): + k = obj.__name__ + else: + k = str(obj) + objs[k] = obj + return objs + + +def _is_abstract(class_): + try: + return class_.__abstract + except AttributeError: + return False + + +# Could be a method of ClassSelector. +def concrete_descendents(parentclass): + """ + Return a dictionary containing all subclasses of the specified + parentclass, including the parentclass. Only classes that are + defined in scripts that have been run or modules that have been + imported are included, so the caller will usually first do ``from + package import *``. + + Only non-abstract classes will be included. + """ + return dict((c.__name__, c) for c in descendents(parentclass) + if not _is_abstract(c)) + + + +__all__ = [ + 'String', 'Bytes', 'IPAddress', 'Number', 'Integer', 'Boolean', 'Iterable', 'Tuple', 'List', 'Callable', + 'CSS3Color', 'Composite', 'Selector', 'ClassSelector', 'TupleSelector', + 'Filename', 'Foldername', 'Path', 'FileSelector', 'MultiFileSelector', + 'Date', 'CalendarDate', 'Range', 'DateRange', 'CalendarDateRange', + 'TypeConstrainedList', 'TypeConstrainedDict', 'TypedKeyMappingsConstrainedDict', + 'TypedList', 'TypedDict', 'TypedKeyMappingsDict', + ] diff --git a/hololinked/param/serializer.py b/hololinked/param/serializer.py new file mode 100644 index 0000000..c2409b6 --- /dev/null +++ b/hololinked/param/serializer.py @@ -0,0 +1,343 @@ +""" +Classes used to support string serialization of Parameters and +Parameterized objects. +""" + +import json +import textwrap + +class UnserializableException(Exception): + pass + +class UnsafeserializableException(Exception): + pass + +def JSONNullable(json_type): + "Express a JSON schema type as nullable to easily support Parameters that allow_None" + return {'anyOf': [ json_type, {'type': 'null'}] } + + + +class Serialization(object): + """ + Base class used to implement different types of serialization. + """ + + @classmethod + def schema(cls, pobj, subset=None): + raise NotImplementedError # noqa: unimplemented method + + @classmethod + def serialize_parameters(cls, pobj, subset=None): + """ + Serialize the parameters on a Parameterized object into a + single serialized object, e.g. a JSON string. + """ + raise NotImplementedError # noqa: unimplemented method + + @classmethod + def deserialize_parameters(cls, pobj, serialized, subset=None): + """ + Deserialize a serialized object representing one or + more Parameters into a dictionary of parameter values. + """ + raise NotImplementedError # noqa: unimplemented method + + @classmethod + def serialize_parameter_value(cls, pobj, pname): + """ + Serialize a single parameter value. + """ + raise NotImplementedError # noqa: unimplemented method + + @classmethod + def deserialize_parameter_value(cls, pobj, pname, value): + """ + Deserialize a single parameter value. + """ + raise NotImplementedError # noqa: unimplemented method + + +class JSONSerialization(Serialization): + """ + Class responsible for specifying JSON serialization, deserialization + and JSON schemas for Parameters and Parameterized classes and + objects. + """ + + unserializable_parameter_types = ['Callable'] + + json_schema_literal_types = { + int:'integer', float:'number', str:'string', + type(None): 'null' + } + + @classmethod + def loads(cls, serialized): + return json.loads(serialized) + + @classmethod + def dumps(cls, obj): + return json.dumps(obj) + + @classmethod + def schema(cls, pobj, safe=False, subset=None): + schema = {} + for name, p in pobj.param.objects('existing').items(): + if subset is not None and name not in subset: + continue + schema[name] = p.schema(safe=safe) + if p.doc: + schema[name]['description'] = textwrap.dedent(p.doc).replace('\n', ' ').strip() + if p.label: + schema[name]['title'] = p.label + return schema + + @classmethod + def serialize_parameters(cls, pobj, subset=None): + # components = {} + # for name, p in pobj.param.objects('existing').items(): + # if subset is not None and name not in subset: + # continue + # value = pobj.param.get_value_generator(name) + # components[name] = p.serialize(value) + # return cls.dumps(components) + JSON = {} + pobjtype = type(pobj) + for key, param in pobj.parameters.objects().items(): + if subset is not None and key not in subset: + pass + else: + value = param.__get__(pobj, pobjtype) + value = param.serialize(value) + JSON[key] = value + return JSON + + @classmethod + def deserialize_parameters(cls, pobj, serialization, subset=None): + deserialized = cls.loads(serialization) + components = {} + for name, value in deserialized.items(): + if subset is not None and name not in subset: + continue + deserialized = pobj.param[name].deserialize(value) + components[name] = deserialized + return components + + # Parameter level methods + + @classmethod + def _get_method(cls, ptype, suffix): + "Returns specialized method if available, otherwise None" + method_name = ptype.lower()+ '_' + suffix + return getattr(cls, method_name, None) + + @classmethod + def param_schema(cls, ptype, p, safe=False, subset=None): + if ptype in cls.unserializable_parameter_types: + raise UnserializableException + dispatch_method = cls._get_method(ptype, 'schema') + if dispatch_method: + schema = dispatch_method(p, safe=safe) + else: + schema = {'type': ptype.lower()} + return JSONNullable(schema) if p.allow_None else schema + + @classmethod + def serialize_parameter_value(cls, pobj, pname): + value = pobj.param.get_value_generator(pname) + return cls.dumps(pobj.param[pname].serialize(value)) + + @classmethod + def deserialize_parameter_value(cls, pobj, pname, value): + value = cls.loads(value) + return pobj.param[pname].deserialize(value) + + # Custom Schemas + + @classmethod + def class__schema(cls, class_, safe=False): + from .parameterized import Parameterized + if isinstance(class_, tuple): + return {'anyOf': [cls.class__schema(cls_) for cls_ in class_]} + elif class_ in cls.json_schema_literal_types: + return {'type': cls.json_schema_literal_types[class_]} + elif issubclass(class_, Parameterized): + return {'type': 'object', 'properties': class_.param.schema(safe)} + else: + return {'type': 'object'} + + @classmethod + def array_schema(cls, p, safe=False): + if safe is True: + msg = ('Array is not guaranteed to be safe for ' + 'serialization as the dtype is unknown') + raise UnsafeserializableException(msg) + return {'type': 'array'} + + @classmethod + def classselector_schema(cls, p, safe=False): + return cls.class__schema(p.class_, safe=safe) + + @classmethod + def dict_schema(cls, p, safe=False): + if safe is True: + msg = ('Dict is not guaranteed to be safe for ' + 'serialization as the key and value types are unknown') + raise UnsafeserializableException(msg) + return {'type': 'object'} + + @classmethod + def date_schema(cls, p, safe=False): + return {'type': 'string', 'format': 'date-time'} + + @classmethod + def calendardate_schema(cls, p, safe=False): + return {'type': 'string', 'format': 'date'} + + @classmethod + def tuple_schema(cls, p, safe=False): + schema = {'type': 'array'} + if p.length is not None: + schema['minItems'] = p.length + schema['maxItems'] = p.length + return schema + + @classmethod + def number_schema(cls, p, safe=False): + schema = {'type': p.__class__.__name__.lower() } + return cls.declare_numeric_bounds(schema, p.bounds, p.inclusive_bounds) + + @classmethod + def declare_numeric_bounds(cls, schema, bounds, inclusive_bounds): + "Given an applicable numeric schema, augment with bounds information" + if bounds is not None: + (low, high) = bounds + if low is not None: + key = 'minimum' if inclusive_bounds[0] else 'exclusiveMinimum' + schema[key] = low + if high is not None: + key = 'maximum' if inclusive_bounds[1] else 'exclusiveMaximum' + schema[key] = high + return schema + + @classmethod + def integer_schema(cls, p, safe=False): + return cls.number_schema(p) + + @classmethod + def numerictuple_schema(cls, p, safe=False): + schema = cls.tuple_schema(p, safe=safe) + schema['additionalItems'] = {'type': 'number'} + return schema + + @classmethod + def xycoordinates_schema(cls, p, safe=False): + return cls.numerictuple_schema(p, safe=safe) + + @classmethod + def range_schema(cls, p, safe=False): + schema = cls.tuple_schema(p, safe=safe) + bounded_number = cls.declare_numeric_bounds( + {'type': 'number'}, p.bounds, p.inclusive_bounds) + schema['additionalItems'] = bounded_number + return schema + + @classmethod + def list_schema(cls, p, safe=False): + schema = {'type': 'array'} + if safe is True and p.item_type is None: + msg = ('List without a class specified cannot be guaranteed ' + 'to be safe for serialization') + raise UnsafeserializableException(msg) + if p.class_ is not None: + schema['items'] = cls.class__schema(p.item_type, safe=safe) + return schema + + @classmethod + def objectselector_schema(cls, p, safe=False): + try: + allowed_types = [{'type': cls.json_schema_literal_types[type(obj)]} + for obj in p.objects] + schema = {'anyOf': allowed_types} + schema['enum'] = p.objects + return schema + except: + if safe is True: + msg = ('ObjectSelector cannot be guaranteed to be safe for ' + 'serialization due to unserializable type in objects') + raise UnsafeserializableException(msg) + return {} + + @classmethod + def selector_schema(cls, p, safe=False): + try: + allowed_types = [{'type': cls.json_schema_literal_types[type(obj)]} + for obj in p.objects.values()] + schema = {'anyOf': allowed_types} + schema['enum'] = p.objects + return schema + except: + if safe is True: + msg = ('Selector cannot be guaranteed to be safe for ' + 'serialization due to unserializable type in objects') + raise UnsafeserializableException(msg) + return {} + + @classmethod + def listselector_schema(cls, p, safe=False): + if p.objects is None: + if safe is True: + msg = ('ListSelector cannot be guaranteed to be safe for ' + 'serialization as allowed objects unspecified') + return {'type': 'array'} + for obj in p.objects: + if type(obj) not in cls.json_schema_literal_types: + msg = 'ListSelector cannot serialize type %s' % type(obj) + raise UnserializableException(msg) + return {'type': 'array', 'items': {'enum': p.objects}} + + @classmethod + def dataframe_schema(cls, p, safe=False): + schema = {'type': 'array'} + if safe is True: + msg = ('DataFrame is not guaranteed to be safe for ' + 'serialization as the column dtypes are unknown') + raise UnsafeserializableException(msg) + if p.columns is None: + schema['items'] = {'type': 'object'} + return schema + + mincols, maxcols = None, None + if isinstance(p.columns, int): + mincols, maxcols = p.columns, p.columns + elif isinstance(p.columns, tuple): + mincols, maxcols = p.columns + + if isinstance(p.columns, int) or isinstance(p.columns, tuple): + schema['items'] = {'type': 'object', 'minItems': mincols, + 'maxItems': maxcols} + + if isinstance(p.columns, list) or isinstance(p.columns, set): + literal_types = [{'type':el} for el in cls.json_schema_literal_types.values()] + allowable_types = {'anyOf': literal_types} + properties = {name: allowable_types for name in p.columns} + schema['items'] = {'type': 'object', 'properties': properties} + + minrows, maxrows = None, None + if isinstance(p.rows, int): + minrows, maxrows = p.rows, p.rows + elif isinstance(p.rows, tuple): + minrows, maxrows = p.rows + + if minrows is not None: + schema['minItems'] = minrows + if maxrows is not None: + schema['maxItems'] = maxrows + + return schema + + +serializers = dict( + json = JSONSerialization +) \ No newline at end of file diff --git a/hololinked/param/utils.py b/hololinked/param/utils.py new file mode 100644 index 0000000..c4888f2 --- /dev/null +++ b/hololinked/param/utils.py @@ -0,0 +1,89 @@ +from collections import OrderedDict +import sys +import inspect +import typing +from functools import reduce, partial + + +def classlist(class_ : typing.Any) -> typing.Tuple[type]: + """ + Return a list of the class hierarchy above (and including) the given class. + + Same as `inspect.getmro(class_)[::-1]` + """ + return inspect.getmro(class_)[::-1] + + +def get_dot_resolved_attr(obj : typing.Any, attr : str, *args): + def _getattr(obj, attr): + return getattr(obj, attr, *args) + return reduce(_getattr, [obj] + attr.split('.')) + + +def iscoroutinefunction(function : typing.Callable) -> bool: + """ + Whether the function is an asynchronous coroutine function. + """ + import asyncio + try: + return ( + inspect.isasyncgenfunction(function) or + asyncio.iscoroutinefunction(function) + ) + except AttributeError: + return False + + +def get_method_owner(method : typing.Callable) -> typing.Any: + """ + Gets the instance that owns the supplied method + """ + if not inspect.ismethod(method): + return None + if isinstance(method, partial): + method = method.func + return method.__self__ if sys.version_info.major >= 3 else method.im_self + + +def is_ordered_dict(d): + """ + Predicate checking for ordered dictionaries. OrderedDict is always + ordered, and vanilla Python dictionaries are ordered for Python 3.6+ + """ + py3_ordered_dicts = (sys.version_info.major == 3) and (sys.version_info.minor >= 6) + vanilla_odicts = (sys.version_info.major > 3) or py3_ordered_dicts + return isinstance(d, OrderedDict)or (vanilla_odicts and isinstance(d, dict)) + + +def get_all_slots(class_): + """ + Return a list of slot names for slots defined in `class_` and its + superclasses. + """ + # A subclass's __slots__ attribute does not contain slots defined + # in its superclass (the superclass' __slots__ end up as + # attributes of the subclass). + all_slots = [] + parent_param_classes = [c for c in classlist(class_)[1::]] + for c in parent_param_classes: + if hasattr(c,'__slots__'): + all_slots+=c.__slots__ + return all_slots + + +def get_occupied_slots(instance): + """ + Return a list of slots for which values have been set. + + (While a slot might be defined, if a value for that slot hasn't + been set, then it's an AttributeError to request the slot's + value.) + """ + return [slot for slot in get_all_slots(type(instance)) + if hasattr(instance, slot)] + + + + +__all__ = ['classlist', 'get_dot_resolved_attr', 'iscoroutinefunction', 'get_method_owner', 'get_all_slots', + 'get_occupied_slots'] \ No newline at end of file diff --git a/hololinked/param/version.py b/hololinked/param/version.py new file mode 100644 index 0000000..0534ae0 --- /dev/null +++ b/hololinked/param/version.py @@ -0,0 +1,771 @@ +""" +Provide consistent and up-to-date ``__version__`` strings for +Python packages. + +See https://github.com/holoviz/autover for more information. +""" + +# The Version class is a copy of autover.version.Version v0.2.5, +# except as noted below. +# +# The current version of autover supports a workflow based on tagging +# a git repository, and reports PEP440 compliant version information. +# Previously, the workflow required editing of version numbers in +# source code, and the version was not necessarily PEP440 compliant. +# Version.__new__ is added here to provide the previous Version class +# (OldDeprecatedVersion) if Version is called in the old way. + + +__author__ = 'Jean-Luc Stevens' + +import os, subprocess, json + +def run_cmd(args, cwd=None): + proc = subprocess.Popen(args, stdout=subprocess.PIPE, + stderr=subprocess.PIPE, + cwd=cwd) + output, error = (str(s.decode()).strip() for s in proc.communicate()) + + # Detects errors as _either_ a non-zero return code _or_ messages + # printed to stderr, because the return code is erroneously fixed at + # zero in some cases (see https://github.com/holoviz/param/pull/389). + if proc.returncode != 0 or len(error) > 0: + raise Exception(proc.returncode, error) + return output + + + +class Version(object): + """ + A simple approach to Python package versioning that supports PyPI + releases and additional information when working with version + control. When obtaining a package from PyPI, the version returned + is a string-formatted rendering of the supplied release tuple. + For instance, release (1,0) tagged as ``v1.0`` in the version + control system will return ``1.0`` for ``str(__version__)``. Any + number of items can be supplied in the release tuple, with either + two or three numeric versioning levels typical. + + During development, a command like ``git describe`` will be used to + compute the number of commits since the last version tag, the short + commit hash, and whether the commit is dirty (has changes not yet + committed). Version tags must start with a lowercase 'v' and have a + period in them, e.g. v2.0, v0.9.8 or v0.1 and may include the PEP440 + prerelease identifiers of 'a' (alpha) 'b' (beta) or 'rc' (release + candidate) allowing tags such as v2.0.a3, v0.9.8.b3 or v0.1.rc5. + + Also note that when version control system (VCS) information is + used, the number of commits since the last version tag is + determined. This approach is often useful in practice to decide + which version is newer for a single developer, but will not + necessarily be reliable when comparing against a different fork or + branch in a distributed VCS. + + For git, if you want version control information available even in + an exported archive (e.g. a .zip file from GitHub), you can set + the following line in the .gitattributes file of your project:: + + __init__.py export-subst + + Note that to support pip installation directly from GitHub via git + archive, a .version file must be tracked by the repo to supply the + release number (otherwise only the short SHA is available). + + The PEP440 format returned is [N!]N(.N)*[{a|b|rc}N][.postN+SHA] + where everything before .postN is obtained from the tag, the N in + .postN is the number of commits since the last tag and the SHA is + obtained via git describe. This later portion is only shown if the + commit count since the last tag is non zero. Instead of '.post', an + alternate valid prefix such as '.rev', '_rev', '_r' or '.r' may be + supplied.""" + + def __new__(cls,**kw): + # If called in the old way, provide the previous class. Means + # PEP440/tag based workflow warning below will never appear. + if ('release' in kw and kw['release'] is not None) or \ + ('dev' in kw and kw['dev'] is not None) or \ + ('commit_count' in kw): + return OldDeprecatedVersion(**kw) + else: + return super(Version, cls).__new__(cls) + + + def __init__(self, release=None, fpath=None, commit=None, reponame=None, + commit_count_prefix='.post', archive_commit=None, **kwargs): + """ + :release: Release tuple (corresponding to the current VCS tag) + :commit Short SHA. Set to '$Format:%h$' for git archive support. + :fpath: Set to ``__file__`` to access version control information + :reponame: Used to verify VCS repository name. + """ + self.fpath = fpath + self._expected_commit = commit + + if release is not None or 'commit_count' in kwargs: + print('WARNING: param.Version now supports PEP440 and a new tag based workflow. See param/version.py for more details') + + self.expected_release = release + + self._commit = None if (commit is None or commit.startswith("$Format")) else commit + self._commit_count = None + self._release = None + self._dirty = False + self._prerelease = None + + self.archive_commit= archive_commit + + self.reponame = reponame + self.commit_count_prefix = commit_count_prefix + + @property + def prerelease(self): + """ + Either None or one of 'aN' (alpha), 'bN' (beta) or 'rcN' + (release candidate) where N is an integer. + """ + return self.fetch()._prerelease + + @property + def release(self): + "Return the release tuple" + return self.fetch()._release + + @property + def commit(self): + "A specification for this particular VCS version, e.g. a short git SHA" + return self.fetch()._commit + + @property + def commit_count(self): + "Return the number of commits since the last release" + return self.fetch()._commit_count + + @property + def dirty(self): + "True if there are uncommited changes, False otherwise" + return self.fetch()._dirty + + + def fetch(self): + """ + Returns a tuple of the major version together with the + appropriate SHA and dirty bit (for development version only). + """ + if self._release is not None: + return self + + self._release = self.expected_release + if not self.fpath: + self._commit = self._expected_commit + return self + + # Only git right now but easily extended to SVN, Mercurial, etc. + for cmd in ['git', 'git.cmd', 'git.exe']: + try: + self.git_fetch(cmd) + break + except EnvironmentError: + pass + return self + + + def git_fetch(self, cmd='git', as_string=False): + commit_argument = self._commit + output = None + try: + if self.reponame is not None: + # Verify this is the correct repository (since fpath could + # be an unrelated git repository, and autover could just have + # been copied/installed into it). + remotes = run_cmd([cmd, 'remote', '-v'], + cwd=os.path.dirname(self.fpath)) + repo_matches = ['/' + self.reponame + '.git' , + # A remote 'server:reponame.git' can also be referred + # to (i.e. cloned) as `server:reponame`. + '/' + self.reponame + ' '] + if not any(m in remotes for m in repo_matches): + try: + output = self._output_from_file() + if output is not None: + self._update_from_vcs(output) + except: pass + if output is None: + # glob pattern (not regexp) matching vX.Y.Z* tags + output = run_cmd([cmd, 'describe', '--long', '--match', + "v[0-9]*.[0-9]*.[0-9]*", '--dirty'], + cwd=os.path.dirname(self.fpath)) + if as_string: return output + except Exception as e1: + try: + output = self._output_from_file() + if output is not None: + self._update_from_vcs(output) + if self._known_stale(): + self._commit_count = None + if as_string: return output + + # If an explicit commit was supplied (e.g from git + # archive), it should take precedence over the file. + if commit_argument: + self._commit = commit_argument + return + + except IOError: + if e1.args[1] == 'fatal: No names found, cannot describe anything.': + raise Exception("Cannot find any git version tags of format v*.*") + # If there is any other error, return (release value still useful) + return self + + self._update_from_vcs(output) + + + def _known_stale(self): + """ + The commit is known to be from a file (and therefore stale) if a + SHA is supplied by git archive and doesn't match the parsed commit. + """ + if self._output_from_file() is None: + commit = None + else: + commit = self.commit + + known_stale = (self.archive_commit is not None + and not self.archive_commit.startswith('$Format') + and self.archive_commit != commit) + if known_stale: self._commit_count = None + return known_stale + + def _output_from_file(self, entry='git_describe'): + """ + Read the version from a .version file that may exist alongside __init__.py. + + This file can be generated by piping the following output to file: + + git describe --long --match v*.* + """ + try: + vfile = os.path.join(os.path.dirname(self.fpath), '.version') + with open(vfile, 'r') as f: + return json.loads(f.read()).get(entry, None) + except: # File may be missing if using pip + git archive + return None + + + def _update_from_vcs(self, output): + "Update state based on the VCS state e.g the output of git describe" + split = output[1:].split('-') + dot_split = split[0].split('.') + for prefix in ['a','b','rc']: + if prefix in dot_split[-1]: + prefix_split = dot_split[-1].split(prefix) + self._prerelease = prefix + prefix_split[-1] + dot_split[-1] = prefix_split[0] + + + self._release = tuple(int(el) for el in dot_split) + self._commit_count = int(split[1]) + + self._commit = str(split[2][1:]) # Strip out 'g' prefix ('g'=>'git') + + self._dirty = (split[-1]=='dirty') + return self + + def __str__(self): + """ + Version in x.y.z string format. Does not include the "v" + prefix of the VCS version tags, for pip compatibility. + + If the commit count is non-zero or the repository is dirty, + the string representation is equivalent to the output of:: + + git describe --long --match v*.* --dirty + + (with "v" prefix removed). + """ + known_stale = self._known_stale() + if self.release is None and not known_stale: + extracted_directory_tag = self._output_from_file(entry='extracted_directory_tag') + return 'None' if extracted_directory_tag is None else extracted_directory_tag + elif self.release is None and known_stale: + extracted_directory_tag = self._output_from_file(entry='extracted_directory_tag') + if extracted_directory_tag is not None: + return extracted_directory_tag + return '0.0.0+g{SHA}-gitarchive'.format(SHA=self.archive_commit) + + release = '.'.join(str(el) for el in self.release) + prerelease = '' if self.prerelease is None else self.prerelease + + if self.commit_count == 0 and not self.dirty: + return release + prerelease + + commit = self.commit + dirty = '-dirty' if self.dirty else '' + archive_commit = '' + if known_stale: + archive_commit = '-gitarchive' + commit = self.archive_commit + + if archive_commit != '': + postcount = self.commit_count_prefix + '0' + elif self.commit_count not in [0, None]: + postcount = self.commit_count_prefix + str(self.commit_count) + else: + postcount = '' + + components = [release, prerelease, postcount, + '' if commit is None else '+g' + commit, dirty, + archive_commit] + return ''.join(components) + + def __repr__(self): + return str(self) + + def abbrev(self): + """ + Abbreviated string representation of just the release number. + """ + return '.'.join(str(el) for el in self.release) + + def verify(self, string_version=None): + """ + Check that the version information is consistent with the VCS + before doing a release. If supplied with a string version, + this is also checked against the current version. Should be + called from setup.py with the declared package version before + releasing to PyPI. + """ + if string_version and string_version != str(self): + raise Exception("Supplied string version does not match current version.") + + if self.dirty: + raise Exception("Current working directory is dirty.") + + if self.expected_release is not None and self.release != self.expected_release: + raise Exception("Declared release does not match current release tag.") + + if self.commit_count !=0: + raise Exception("Please update the VCS version tag before release.") + + if (self._expected_commit is not None + and not self._expected_commit.startswith( "$Format")): + raise Exception("Declared release does not match the VCS version tag") + + + + @classmethod + def get_setup_version(cls, setup_path, reponame, describe=False, + dirty='report', pkgname=None, archive_commit=None): + """ + Helper for use in setup.py to get the version from the .version file (if available) + or more up-to-date information from git describe (if available). + + Assumes the __init__.py will be found in the directory + {reponame}/__init__.py relative to setup.py unless pkgname is + explicitly specified in which case that name is used instead. + + If describe is True, the raw string obtained from git described is + returned which is useful for updating the .version file. + + The dirty policy can be one of 'report', 'strip', 'raise'. If it is + 'report' the version string may end in '-dirty' if the repository is + in a dirty state. If the policy is 'strip', the '-dirty' suffix + will be stripped out if present. If the policy is 'raise', an + exception is raised if the repository is in a dirty state. This can + be useful if you want to make sure packages are not built from a + dirty repository state. + """ + pkgname = reponame if pkgname is None else pkgname + policies = ['raise','report', 'strip'] + if dirty not in policies: + raise AssertionError("get_setup_version dirty policy must be in %r" % policies) + + fpath = os.path.join(setup_path, pkgname, "__init__.py") + version = Version(fpath=fpath, reponame=reponame, archive_commit=archive_commit) + if describe: + vstring = version.git_fetch(as_string=True) + else: + vstring = str(version) + + if version.dirty and dirty == 'raise': + raise AssertionError('Repository is in a dirty state.') + elif version.dirty and dirty=='strip': + return vstring.replace('-dirty', '') + else: + return vstring + + + @classmethod + def extract_directory_tag(cls, setup_path, reponame): + setup_dir = os.path.split(setup_path)[-1] # Directory containing setup.py + prefix = reponame + '-' # Prefix to match + if setup_dir.startswith(prefix): + tag = setup_dir[len(prefix):] + # Assuming the tag is a version if it isn't empty, 'master' and has a dot in it + if tag not in ['', 'master'] and ('.' in tag): + return tag + return None + + + @classmethod + def setup_version(cls, setup_path, reponame, archive_commit=None, + pkgname=None, dirty='report'): + info = {} + git_describe = None + pkgname = reponame if pkgname is None else pkgname + try: + # Will only work if in a git repo and git is available + git_describe = Version.get_setup_version(setup_path, + reponame, + describe=True, + dirty=dirty, + pkgname=pkgname, + archive_commit=archive_commit) + + if git_describe is not None: + info['git_describe'] = git_describe + except: pass + + if git_describe is None: + extracted_directory_tag = Version.extract_directory_tag(setup_path, reponame) + if extracted_directory_tag is not None: + info['extracted_directory_tag'] = extracted_directory_tag + try: + with open(os.path.join(setup_path, pkgname, '.version'), 'w') as f: + f.write(json.dumps({'extracted_directory_tag':extracted_directory_tag})) + except: + print('Error in setup_version: could not write .version file.') + + + info['version_string'] = Version.get_setup_version(setup_path, + reponame, + describe=False, + dirty=dirty, + pkgname=pkgname, + archive_commit=archive_commit) + try: + with open(os.path.join(setup_path, pkgname, '.version'), 'w') as f: + f.write(json.dumps(info)) + except: + print('Error in setup_version: could not write .version file.') + + return info['version_string'] + + + +def get_setup_version(location, reponame, pkgname=None, archive_commit=None): + """Helper for use in setup.py to get the current version from either + git describe or the .version file (if available). + + Set pkgname to the package name if it is different from the + repository name. + + To ensure git information is included in a git archive, add + setup.py to .gitattributes (in addition to __init__): + ``` + __init__.py export-subst + setup.py export-subst + ``` + Then supply "$Format:%h$" for archive_commit. + + """ + import warnings + pkgname = reponame if pkgname is None else pkgname + if archive_commit is None: + warnings.warn("No archive commit available; git archives will not contain version information") + return Version.setup_version(os.path.dirname(os.path.abspath(location)),reponame,pkgname=pkgname,archive_commit=archive_commit) + + +def get_setupcfg_version(): + """As get_setup_version(), but configure via setup.cfg. + + If your project uses setup.cfg to configure setuptools, and hence has + at least a "name" key in the [metadata] section, you can + set the version as follows: + ``` + [metadata] + name = mypackage + version = attr: autover.version.get_setup_version2 + ``` + + If the repository name is different from the package name, specify + `reponame` as a [tool:autover] option: + ``` + [tool:autover] + reponame = mypackage + ``` + + To ensure git information is included in a git archive, add + setup.cfg to .gitattributes (in addition to __init__): + ``` + __init__.py export-subst + setup.cfg export-subst + ``` + + Then add the following to setup.cfg: + ``` + [tool:autover.configparser_workaround.archive_commit=$Format:%h$] + ``` + + The above being a section heading rather than just a key is + because setuptools requires % to be escaped with %, or it can't + parse setup.cfg...but then git export-subst would not work. + + """ + try: + import configparser + except ImportError: + import ConfigParser as configparser # python2 (also prevents dict-like access) + import re + cfg = "setup.cfg" + autover_section = 'tool:autover' + config = configparser.ConfigParser() + config.read(cfg) + pkgname = config.get('metadata','name') + reponame = config.get(autover_section,'reponame',vars={'reponame':pkgname}) if autover_section in config.sections() else pkgname + + ### + # hack archive_commit into section heading; see docstring + archive_commit = None + archive_commit_key = autover_section+'.configparser_workaround.archive_commit' + for section in config.sections(): + if section.startswith(archive_commit_key): + archive_commit = re.match(r".*=\s*(\S*)\s*",section).group(1) + ### + return get_setup_version(cfg,reponame=reponame,pkgname=pkgname,archive_commit=archive_commit) + + +# from param/version.py aa087db29976d9b7e0f59c29789dfd721c85afd0 +class OldDeprecatedVersion(object): + """ + A simple approach to Python package versioning that supports PyPI + releases and additional information when working with version + control. When obtaining a package from PyPI, the version returned + is a string-formatted rendering of the supplied release tuple. + For instance, release (1,0) tagged as ``v1.0`` in the version + control system will return ``1.0`` for ``str(__version__)``. Any + number of items can be supplied in the release tuple, with either + two or three numeric versioning levels typical. + + During development, a command like ``git describe`` will be used to + compute the number of commits since the last version tag, the + short commit hash, and whether the commit is dirty (has changes + not yet committed). Version tags must start with a lowercase 'v' + and have a period in them, e.g. v2.0, v0.9.8 or v0.1. + + Development versions are supported by setting the dev argument to an + appropriate dev version number. The corresponding tag can be PEP440 + compliant (using .devX) of the form v0.1.dev3, v1.9.0.dev2 etc but + it doesn't have to be as the dot may be omitted i.e v0.1dev3, + v1.9.0dev2 etc. + + Also note that when version control system (VCS) information is + used, the comparison operators take into account the number of + commits since the last version tag. This approach is often useful + in practice to decide which version is newer for a single + developer, but will not necessarily be reliable when comparing + against a different fork or branch in a distributed VCS. + + For git, if you want version control information available even in + an exported archive (e.g. a .zip file from GitHub), you can set + the following line in the .gitattributes file of your project:: + + __init__.py export-subst + """ + + def __init__(self, release=None, fpath=None, commit=None, + reponame=None, dev=None, commit_count=0): + """ + :release: Release tuple (corresponding to the current VCS tag) + :commit Short SHA. Set to '$Format:%h$' for git archive support. + :fpath: Set to ``__file__`` to access version control information + :reponame: Used to verify VCS repository name. + :dev: Development version number. None if not a development version. + :commit_count Commits since last release. Set for dev releases. + """ + self.fpath = fpath + self._expected_commit = commit + self.expected_release = release + + self._commit = None if commit in [None, "$Format:%h$"] else commit + self._commit_count = commit_count + self._release = None + self._dirty = False + self.reponame = reponame + self.dev = dev + + @property + def release(self): + "Return the release tuple" + return self.fetch()._release + + @property + def commit(self): + "A specification for this particular VCS version, e.g. a short git SHA" + return self.fetch()._commit + + @property + def commit_count(self): + "Return the number of commits since the last release" + return self.fetch()._commit_count + + @property + def dirty(self): + "True if there are uncommited changes, False otherwise" + return self.fetch()._dirty + + + def fetch(self): + """ + Returns a tuple of the major version together with the + appropriate SHA and dirty bit (for development version only). + """ + if self._release is not None: + return self + + self._release = self.expected_release + if not self.fpath: + self._commit = self._expected_commit + return self + + # Only git right now but easily extended to SVN, Mercurial, etc. + for cmd in ['git', 'git.cmd', 'git.exe']: + try: + self.git_fetch(cmd) + break + except EnvironmentError: + pass + return self + + + def git_fetch(self, cmd='git'): + try: + if self.reponame is not None: + # Verify this is the correct repository (since fpath could + # be an unrelated git repository, and param could just have + # been copied/installed into it). + output = run_cmd([cmd, 'remote', '-v'], + cwd=os.path.dirname(self.fpath)) + repo_matches = ['/' + self.reponame + '.git' , + # A remote 'server:reponame.git' can also be referred + # to (i.e. cloned) as `server:reponame`. + '/' + self.reponame + ' '] + if not any(m in output for m in repo_matches): + return self + + output = run_cmd([cmd, 'describe', '--long', '--match', 'v*.*', '--dirty'], + cwd=os.path.dirname(self.fpath)) + except Exception as e: + if e.args[1] == 'fatal: No names found, cannot describe anything.': + raise Exception("Cannot find any git version tags of format v*.*") + # If there is any other error, return (release value still useful) + return self + + self._update_from_vcs(output) + + def _update_from_vcs(self, output): + "Update state based on the VCS state e.g the output of git describe" + split = output[1:].split('-') + if 'dev' in split[0]: + dev_split = split[0].split('dev') + self.dev = int(dev_split[1]) + split[0] = dev_split[0] + # Remove the pep440 dot if present + if split[0].endswith('.'): + split[0] = dev_split[0][:-1] + + self._release = tuple(int(el) for el in split[0].split('.')) + self._commit_count = int(split[1]) + self._commit = str(split[2][1:]) # Strip out 'g' prefix ('g'=>'git') + self._dirty = (split[-1]=='dirty') + return self + + + def __str__(self): + """ + Version in x.y.z string format. Does not include the "v" + prefix of the VCS version tags, for pip compatibility. + + If the commit count is non-zero or the repository is dirty, + the string representation is equivalent to the output of:: + + git describe --long --match v*.* --dirty + + (with "v" prefix removed). + """ + if self.release is None: return 'None' + release = '.'.join(str(el) for el in self.release) + release = '%s.dev%d' % (release, self.dev) if self.dev is not None else release + + if (self._expected_commit is not None) and ("$Format" not in self._expected_commit): + pass # Concrete commit supplied - print full version string + elif (self.commit_count == 0 and not self.dirty): + return release + + dirty_status = '-dirty' if self.dirty else '' + return '%s-%s-g%s%s' % (release, self.commit_count if self.commit_count else 'x', + self.commit, dirty_status) + + def __repr__(self): + return str(self) + + def abbrev(self,dev_suffix=""): + """ + Abbreviated string representation, optionally declaring whether it is + a development version. + """ + return '.'.join(str(el) for el in self.release) + \ + (dev_suffix if self.commit_count > 0 or self.dirty else "") + + + def __eq__(self, other): + """ + Two versions are considered equivalent if and only if they are + from the same release, with the same commit count, and are not + dirty. Any dirty version is considered different from any + other version, since it could potentially have any arbitrary + changes even for the same release and commit count. + """ + if self.dirty or other.dirty: return False + return ((self.release, self.commit_count, self.dev) + == (other.release, other.commit_count, other.dev)) + + def __gt__(self, other): + if self.release == other.release: + if self.dev == other.dev: + return self.commit_count > other.commit_count + elif None in [self.dev, other.dev]: + return self.dev is None + else: + return self.dev > other.dev + else: + return (self.release, self.commit_count) > (other.release, other.commit_count) + + def __lt__(self, other): + if self==other: + return False + else: + return not (self > other) + + + def verify(self, string_version=None): + """ + Check that the version information is consistent with the VCS + before doing a release. If supplied with a string version, + this is also checked against the current version. Should be + called from setup.py with the declared package version before + releasing to PyPI. + """ + if string_version and string_version != str(self): + raise Exception("Supplied string version does not match current version.") + + if self.dirty: + raise Exception("Current working directory is dirty.") + + if self.release != self.expected_release: + raise Exception("Declared release does not match current release tag.") + + if self.commit_count !=0: + raise Exception("Please update the VCS version tag before release.") + + if self._expected_commit not in [None, "$Format:%h$"]: + raise Exception("Declared release does not match the VCS version tag") diff --git a/hololinked/server/host_utilities.py b/hololinked/server/host_utilities.py index 1fa5d47..63e5a0a 100644 --- a/hololinked/server/host_utilities.py +++ b/hololinked/server/host_utilities.py @@ -101,7 +101,7 @@ async def login(self, username : str, password : str): return True return False - @post("/app/settings") + @post("/app/settings/new") async def create_app_setting(self, field : str, value : typing.Any): async with self.async_session() as session, session.begin(): session.add(self.appsettings( @@ -111,7 +111,7 @@ async def create_app_setting(self, field : str, value : typing.Any): ) session.commit() - @put("/app/settings") + @post("/app/settings/edit") async def edit_app_setting(self, field : str, value : typing.Any): async with self.async_session() as session, session.begin(): stmt = select(self.appsettings).filter_by(field = field) @@ -121,7 +121,7 @@ async def edit_app_setting(self, field : str, value : typing.Any): session.commit() return setting - @get('/app/settings') + @get('/app/settings/all') async def all_app_settings(self): async with self.async_session() as session: stmt = select(self.appsettings) @@ -129,7 +129,7 @@ async def all_app_settings(self): return {result[self.appsettings.__name__].field : result[self.appsettings.__name__].value["value"] for result in data.mappings().all()} - @get('/app') + @get('/app/info/all') async def all_app_settings(self): async with self.async_session() as session: stmt = select(self.appsettings) @@ -139,7 +139,7 @@ async def all_app_settings(self): for result in data.mappings().all()} } - @post('/dashboards') + @post('/dashboards/add') async def add_dashboards(self, name : str, URL : str, description : str): async with self.async_session() as session, session.begin(): session.add(self.dashboards( @@ -149,7 +149,7 @@ async def add_dashboards(self, name : str, URL : str, description : str): )) await session.commit() - @get('/dashboards') + @get('/dashboards/list') async def query_pages(self): async with self.async_session() as session: stmt = select(self.dashboards) @@ -222,7 +222,7 @@ def __init__(self, db_config_file : typing.Union[str, None], zmq_client_pool : M self.remote_object_info = remote_object_info self._uninstantiated_remote_objects : typing.Dict[str, UninstantiatedRemoteObject] = {} - @post('/subscribers') + @post('/subscribe') async def subscribe_to_host(self, host : str, port : int): client = AsyncHTTPClient() try: From 6a834d95d3b3590e447e0dd1e4799bfd6fd418cb Mon Sep 17 00:00:00 2001 From: "Vignesh.Vaidyanathan" <62492557+VigneshVSV@users.noreply.github.com> Date: Sun, 21 Jan 2024 10:34:26 +0100 Subject: [PATCH 005/167] moving host utilities to plain HTTP server instead of remote object --- copy_param.bat | 2 + hololinked/server/HTTPServer.py | 7 +- hololinked/server/config.py | 4 +- hololinked/server/host_server.py | 13 +- hololinked/server/host_utilities.py | 578 ++++++++++++++++------------ 5 files changed, 352 insertions(+), 252 deletions(-) create mode 100644 copy_param.bat diff --git a/copy_param.bat b/copy_param.bat new file mode 100644 index 0000000..639c5fa --- /dev/null +++ b/copy_param.bat @@ -0,0 +1,2 @@ +@REM git submodule does not somehow allow flat heirarchy, so we copy the changes from param to commit it there. +robocopy .\\hololinked\\param ..\\param\\param package.json README.md LICENSE * /E diff --git a/hololinked/server/HTTPServer.py b/hololinked/server/HTTPServer.py index 45c6c06..122c558 100644 --- a/hololinked/server/HTTPServer.py +++ b/hololinked/server/HTTPServer.py @@ -12,7 +12,8 @@ from time import perf_counter from ..param import Parameterized -from ..param.parameters import Integer, IPAddress, ClassSelector, Selector, TypedList, Boolean, String +from ..param.parameters import (Integer, IPAddress, ClassSelector, Selector, + TypedList, Boolean, String) from .utils import create_default_logger @@ -20,7 +21,7 @@ from .data_classes import HTTPServerResourceData from .serializers import JSONSerializer from .constants import GET, PUT, POST, OPTIONS, DELETE, USE_OBJECT_NAME, CALLABLE -from .webserver_utils import log_resources, log_request, update_resources +from .webserver_utils import log_request, update_resources from .zmq_message_brokers import MessageMappedZMQClientPool from .handlers import (BaseRequestHandler, GetResource, PutResource, OptionsResource, PostResource, DeleteResource, FileHandlerResource) @@ -273,7 +274,7 @@ async def _setup_server(address : str, port : int, logger : logging.Logger, subs # S = TornadoHTTP2Server(Router, ssl_options=ssl_context) # else: S = TornadoHTTP1Server(Router, ssl_options=ssl_context) - S.listen(port = port, address = address) + S.listen(port=port, address=address) __all__ = ['HTTPServer'] \ No newline at end of file diff --git a/hololinked/server/config.py b/hololinked/server/config.py index 1c3f964..4e4b5b7 100644 --- a/hololinked/server/config.py +++ b/hololinked/server/config.py @@ -33,7 +33,9 @@ class Configuration: __slots__ = [ - "APPDATA_DIR", "PRIMARY_HOST", "LOCALHOST_PORT" + "APPDATA_DIR", "PRIMARY_HOST", "LOCALHOST_PORT", + "DB_CONFIG_FILE", "COOKIE_SECRET", + "PWD_HASHER_TIME_COST", "PWD_HASHER_MEMORY_COST" ] def __init__(self): diff --git a/hololinked/server/host_server.py b/hololinked/server/host_server.py index 5cfaa17..46d89c6 100644 --- a/hololinked/server/host_server.py +++ b/hololinked/server/host_server.py @@ -4,7 +4,7 @@ from ..param.parameters import String, TypedList from .HTTPServer import HTTPServer from .eventloop import Consumer -from .host_utilities import (ReactClientUtilities, PrimaryHostUtilities, create_client_tables, create_server_tables, +from .host_utilities import (create_tables, create_server_tables, SERVER_INSTANCE_NAME, CLIENT_HOST_INSTANCE_NAME) from .database import create_DB_URL @@ -63,16 +63,7 @@ def all_ok(self, boolean=False): self.consumers = [react_client_utilities, server_side_utilities] return True - def create_databases(self): - URL = create_DB_URL(self.db_config_file) - serverDB = f"{URL}/scadapyserver" - if not database_exists(serverDB): - create_database(serverDB) - create_server_tables(serverDB) - clientDB = f"{URL}/scadapyclient" - if not database_exists(clientDB): - create_database(clientDB) - create_client_tables(clientDB) + diff --git a/hololinked/server/host_utilities.py b/hololinked/server/host_utilities.py index 63e5a0a..5a6c177 100644 --- a/hololinked/server/host_utilities.py +++ b/hololinked/server/host_utilities.py @@ -1,16 +1,25 @@ +import secrets +import os +import base64 import socket import json import asyncio +import ssl import typing from dataclasses import dataclass, asdict, field +from typing import Any -from sqlalchemy import Integer, String, JSON, ARRAY, Boolean +from sqlalchemy import Engine, Integer, String, JSON, ARRAY, Boolean from sqlalchemy import select, create_engine -from sqlalchemy.orm import Session -from sqlalchemy.orm import Mapped, mapped_column, DeclarativeBase +from sqlalchemy.orm import Session, sessionmaker, Mapped, mapped_column, DeclarativeBase +from sqlalchemy_utils import database_exists, create_database, drop_database +from sqlalchemy.ext import asyncio as asyncio_ext from argon2 import PasswordHasher -from tornado.httputil import HTTPServerRequest from tornado.httpclient import AsyncHTTPClient, HTTPRequest +from tornado.web import RequestHandler, Application, authenticated +from tornado.escape import json_decode, json_encode +from tornado.httpserver import HTTPServer as TornadoHTTP1Server + from .serializers import JSONSerializer from .remote_parameters import TypedList @@ -20,7 +29,7 @@ from .decorators import post, get, put, delete from .eventloop import Consumer, EventLoop, fork_empty_eventloop from .remote_object import RemoteObject, RemoteObjectDB, RemoteObjectMetaclass -from .database import BaseAsyncDB +from .database import BaseAsyncDB, create_DB_URL SERVER_INSTANCE_NAME = 'server-util' @@ -45,117 +54,346 @@ # */ -class ReactClientUtilities(BaseAsyncDB, RemoteObject): - class TableBase(DeclarativeBase): - pass +global_engine : typing.Optional[Engine] = None +global_session : typing.Optional[Session] = None + + + +class TableBase(DeclarativeBase): + pass - class dashboards(TableBase): - __tablename__ = "dashboards" - - name : Mapped[str] = mapped_column(String(1024), primary_key = True) - URL : Mapped[str] = mapped_column(String(1024), unique = True) - description : Mapped[str] = mapped_column(String(16384)) - - def json(self): - return { - "name" : self.name, - "URL" : self.URL, - "description" : self.description - } +class Dashboards(TableBase): + __tablename__ = "dashboards" + + name : Mapped[str] = mapped_column(String(1024), primary_key=True) + URL : Mapped[str] = mapped_column(String(1024), unique=True) + description : Mapped[str] = mapped_column(String(16384)) + json_specfication : Mapped[typing.Dict[str, typing.Any]] = mapped_column(JSON) - class appsettings(TableBase): - __tablename__ = "appsettings" + def json(self): + return { + "name" : self.name, + "URL" : self.URL, + "description" : self.description, + "json" : self.json_specfication + } - field : Mapped[str] = mapped_column(String(8192), primary_key = True) - value : Mapped[typing.Dict[str, typing.Any]] = mapped_column(JSON) +class AppSettings(TableBase): + __tablename__ = "appsettings" - def json(self): - return { - "field" : self.field, - "value" : self.value - } + field : Mapped[str] = mapped_column(String(8192), primary_key=True) + value : Mapped[typing.Dict[str, typing.Any]] = mapped_column(JSON) + + def json(self): + return { + "field" : self.field, + "value" : self.value + } - class login_credentials(TableBase): - __tablename__ = "login_credentials" +class LoginCredentials(TableBase): + __tablename__ = "login_credentials" - username : Mapped[str] = mapped_column(String(1024), primary_key = True) - password : Mapped[str] = mapped_column(String(1024), unique = True) + email : Mapped[str] = mapped_column(String(1024), primary_key=True) + password : Mapped[str] = mapped_column(String(1024), unique=True) + +class Server(TableBase): + __tablename__ = "http_servers" + + hostname : Mapped[str] = mapped_column(String, primary_key=True) + type : Mapped[str] = mapped_column(String) + port : Mapped[int] = mapped_column(Integer) + IPAddress : Mapped[str] = mapped_column(String) + remote_objects : Mapped[typing.List[str]] = mapped_column(ARRAY(String)) + + +def for_authenticated_user(method): + def authenticated_method(self : RequestHandler): + if not self.current_user: + self.set_status(403) + self.set_header("Access-Control-Allow-Origin", "https://127.0.0.1:5173") + self.finish() + return + else: + print("current user is : ", self.current_user) + return method(self) + return authenticated_method + + +class PrimaryHostHandler(RequestHandler): + + def check_headers(self): + content_type = self.request.headers.get("Content-Type", None) + if content_type and content_type != "application/json": + self.set_status(500) + self.write({ "error" : "request body is not JSON." }) + self.finish() + + def get_current_user(self) -> Any: + return self.get_signed_cookie('user') + + def set_default_headers(self) -> None: + return super().set_default_headers() + + async def options(self): + self.set_status(200) + self.set_header("Access-Control-Allow-Origin", "https://127.0.0.1:5173") + self.set_header("Access-Control-Allow-Methods", "GET, POST, OPTIONS") + self.set_header("Access-Control-Allow-Headers", "*") + self.finish() + + +class UsersHandler(PrimaryHostHandler): + + async def post(self): + self.set_status(200) + self.finish() + + async def get(self): + self.set_status(200) + self.finish() - def __init__(self, db_config_file : str, **kwargs) -> None: - RemoteObject.__init__(self, **kwargs) - BaseAsyncDB.__init__(self, database='scadapyclient', serializer=self.json_serializer, - config_file=db_config_file) - - @post('/user/add') - async def add_user(self, username : str, password : str): - pass - - @post('/login') - async def login(self, username : str, password : str): - async with self.async_session() as session: - ph = PasswordHasher(time_cost = 500, memory_cost = 2) - stmt = select(self.login_credentials).filter_by(username = username) - data = await session.execute(stmt) - if data["password"] == ph.hash(password): - return True - return False - @post("/app/settings/new") - async def create_app_setting(self, field : str, value : typing.Any): - async with self.async_session() as session, session.begin(): - session.add(self.appsettings( - field = field, - value = {"value" : value} +class LoginHandler(PrimaryHostHandler): + + async def post(self): + self.check_headers() + try: + body = json_decode(self.request.body) + email = body["email"] + password = body["password"] + async with global_session() as session: + stmt = select(LoginCredentials).filter_by(email=email) + data = await session.execute(stmt) + data : LoginCredentials = data.scalars().all() + if len(data) == 0: + self.set_status(403, "authentication failed") + self.write({"reason" : "no username found"}) + else: + ph = PasswordHasher(time_cost=500) + if ph.verify(data[0].password, password): + self.set_status(200) + self.set_signed_cookie("user", email) + else: + self.set_status(403, "authentication failed") + self.write({"reason" : ""}) + except Exception as ex: + self.set_status(500, str(ex)) + self.set_header("Access-Control-Allow-Origin", "https://127.0.0.1:5173") + self.finish() + + async def options(self): + self.set_status(200) + self.set_header("Access-Control-Allow-Origin", "https://127.0.0.1:5173") + self.set_header("Access-Control-Allow-Methods", "POST, OPTIONS") + self.set_header("Access-Control-Allow-Headers", "*") + self.set_header("Access-Control-Allow-Credentials", True) + self.finish() + + +class AppSettingsHandler(PrimaryHostHandler): + + @for_authenticated_user + async def post(self): + self.check_headers() + try: + value = json_decode(self.request.body["value"]) + async with global_session() as session, session.begin(): + session.add(AppSettings( + field = field, + value = {"value" : value} + ) ) - ) - session.commit() - - @post("/app/settings/edit") - async def edit_app_setting(self, field : str, value : typing.Any): - async with self.async_session() as session, session.begin(): - stmt = select(self.appsettings).filter_by(field = field) - data = await session.execute(stmt) - setting = data.scalar() - setting.value = {"value" : value} - session.commit() - return setting - - @get('/app/settings/all') - async def all_app_settings(self): - async with self.async_session() as session: - stmt = select(self.appsettings) - data = await session.execute(stmt) - return {result[self.appsettings.__name__].field : result[self.appsettings.__name__].value["value"] - for result in data.mappings().all()} - - @get('/app/info/all') - async def all_app_settings(self): - async with self.async_session() as session: - stmt = select(self.appsettings) - data = await session.execute(stmt) - return { - "appsettings" : {result[self.appsettings.__name__].field : result[self.appsettings.__name__].value["value"] - for result in data.mappings().all()} + await session.commit() + self.set_status(200) + except Exception as ex: + self.set_status(500) + self.finish() + + @for_authenticated_user + async def patch(self): + self.check_headers() + try: + value = json_decode(self.request.body) + field = value["field"] + value = value["value"] + async with global_session() as session, session.begin(): + stmt = select(AppSettings).filter_by(field = field) + data = await session.execute(stmt) + setting : AppSettings = data.scalar() + setting.value = {"value" : value} + await session.commit() + self.set_status(200) + except Exception as ex: + self.set_status(500) + self.finish() + + @for_authenticated_user + async def get(self): + self.check_headers() + try: + async with global_session() as session: + stmt = select(AppSettings) + data = await session.execute(stmt) + serialized_data = json_encode({result[AppSettings.__name__].field : result[AppSettings.__name__].value["value"] + for result in data.mappings().all()}) + self.set_status(200) + self.set_header("Content-Type", "application/json") + self.write(serialized_data) + except Exception as ex: + self.set_status(500, str(ex)) + self.finish() + + +class DashboardsHandler(PrimaryHostHandler): + + @for_authenticated_user + async def post(self): + self.check_headers() + try: + data = json_decode(self.request.body) + async with global_session() as session, session.begin(): + session.add(Dashboards(**data)) + await session.commit() + self.set_status(200) + self.set_header("Access-Control-Allow-Origin", "https://127.0.0.1:5173") + except Exception as ex: + self.set_status(500, str(ex)) + self.finish() + + @for_authenticated_user + async def get(self): + self.check_headers() + try: + async with global_session() as session: + stmt = select(Dashboards) + data = await session.execute(stmt) + serialized_data = json_encode([result[Dashboards.__name__]._json() for result + in data.mappings().all()]) + self.set_status(200) + self.set_header("Content-Type", "application/json") + self.set_header("Access-Control-Allow-Origin", "https://127.0.0.1:5173") + self.write(serialized_data) + except Exception as ex: + self.set_status(500, str(ex)) + self.finish() + + +class SubscribersHandler(PrimaryHostHandler): + + @for_authenticated_user + async def post(self): + if self.request.headers["Content-Type"] == "application/json": + self.set_status(200) + server = SubscribedHTTPServers(**json_decode(self.request.body)) + async with global_session() as session, session.begin(): + session.add(server) + await session.commit() + self.finish() + + @for_authenticated_user + async def get(self): + self.set_status(200) + self.set_header("Content-Type", "application/json") + async with global_session() as session: + result = select(Server) + self.write(json_encode(result.scalars().all())) + + +class MainHandler(PrimaryHostHandler): + + async def get(self): + self.check_headers() + self.set_status(200) + self.set_header("Access-Control-Allow-Origin", "https://127.0.0.1:5173") + self.write("

I am alive!!!

") + self.finish() + + +def create_primary_host(config_file : str, ssl_context : ssl.SSLContext, **server_settings) -> TornadoHTTP1Server: + URL = f"{create_DB_URL(config_file)}/hololinked-host" + if not database_exists(URL): + try: + create_database(URL) + sync_engine = create_engine(URL) + TableBase.metadata.create_all(sync_engine) + create_tables(sync_engine) + create_credentials(sync_engine) + except Exception as ex: + drop_database(URL) + raise ex from None + + global global_engine, global_session + URL = f"{create_DB_URL(config_file, True)}/hololinked-host" + global_engine = asyncio_ext.create_async_engine(URL, echo=True) + global_session = sessionmaker(global_engine, expire_on_commit=True, + class_=asyncio_ext.AsyncSession) # type: ignore + + app = Application([ + (r"/", MainHandler), + (r"/users", UsersHandler), + (r"/dashboards", DashboardsHandler), + (r"/settings", AppSettingsHandler), + (r"/subscribers", SubscribersHandler), + (r"/login", LoginHandler) + ], cookie_secret=base64.b64encode(os.urandom(32)).decode('utf-8') , **server_settings) + return TornadoHTTP1Server(app, ssl_options=ssl_context) + + + +def create_tables(engine): + with Session(engine) as session, session.begin(): + # Pages + session.add(AppSettings( + field = 'dashboards', + value = { + 'deleteWithoutAsking' : True, + 'showRecentlyUsed' : True} + )) + + # login page + session.add(AppSettings( + field = 'login', + value = { + 'footer' : '', + 'footerLink' : '', + 'displayFooter' : True } + )) + + # server + session.add(AppSettings( + field = 'servers', + value = { + 'allowHTTP' : False + } + )) - @post('/dashboards/add') - async def add_dashboards(self, name : str, URL : str, description : str): - async with self.async_session() as session, session.begin(): - session.add(self.dashboards( - name = name, - URL = URL, - description = description - )) - await session.commit() + # remote object wizard + session.add(AppSettings( + field = 'remoteObjectViewer' , + value = { + 'stringifyConsoleOutput' : False, + 'consoleDefaultMaxEntries' : 15, + 'consoleDefaultWindowSize' : 500, + 'consoleDefaultFontSize' : 16, + 'stringifyLogViewerOutput' : False, + 'logViewerDefaultMaxEntries' : 10, + 'logViewerDefaultOutputWindowSize' : 1000, + 'logViewerDefaultFontSize' : 16 + } + )) + session.commit() - @get('/dashboards/list') - async def query_pages(self): - async with self.async_session() as session: - stmt = select(self.dashboards) - data = await session.execute(stmt) - return [result[self.dashboards.__name__] for result in data.mappings().all()] +def create_credentials(sync_engine): + print("Requested primary host seems to use a new database. Give username and password (not for database server, but for client logins from hololinked-portal) : ") + email = input("email-id (not collected anywhere else excepted your own database) : ") + password = input("password : ") + with Session(sync_engine) as session, session.begin(): + ph = PasswordHasher(time_cost=500) + session.add(LoginCredentials(email=email, password=ph.hash(password))) + session.commit() @dataclass @@ -354,147 +592,13 @@ def __init__(self, db_config_file : str, server_network_interface : str, port : https=False ) - @post('/subscription') - def subscription(self, hostname : str, port : int, type : str, https : bool, *, request : HTTPServerRequest): - server = SubscribedHTTPServers( - hostname=hostname, - IPAddress=request.remote_ip, - port=port, - type=type, - https=https - ) - self.subscribers.append(server) - - @get('/subscribers') - def get_subscribers(self): - return {"subscribers" : self.subscribers + [self.own_info]} - - @post('/starter/run') - async def starter(self): - pass - - -class PrimaryHostUtilities(PCHostUtilities): - - type : str = 'PRIMARY_HOST' - - class TableBase(DeclarativeBase): - pass - - class http_server(TableBase): - __tablename__ = "http_servers" - - hostname : Mapped[str] = mapped_column(String, primary_key = True) - type : Mapped[str] = mapped_column(String) - port : Mapped[int] = mapped_column(Integer) - IPAddress : Mapped[str] = mapped_column(String) - remote_objects : Mapped[typing.List[str]] = mapped_column(ARRAY(String)) - def __init__(self, db_config_file : str, server_network_interface : str, port : int, **kwargs) -> None: - super().__init__(db_config_file = db_config_file, server_network_interface = server_network_interface, - port = port, **kwargs) - self.own_info = SubscribedHTTPServers( - hostname = socket.gethostname(), - IPAddress = get_IP_from_interface(server_network_interface), - port = port, - type = self.type, - https=False - ) - -# remote_object_info = [dict( -# instance_name = 'server-util', -# **self.class_info() -# ), -# dict( -# instance_name = 'dashboard-util', -# classname = ReactClientUtilities.__name__, -# script = os.path.dirname(os.path.abspath(inspect.getfile(ReactClientUtilities))) -# )], - -# remote_object_info = [dict( -# instance_name = 'server-util', -# **self.class_info() -# )], - - - def create_server_tables(serverDB): engine = create_engine(serverDB) PrimaryHostUtilities.TableBase.metadata.create_all(engine) RemoteObjectDB.TableBase.metadata.create_all(engine) engine.dispose() -def create_client_tables(clientDB): - engine = create_engine(clientDB) - ReactClientUtilities.TableBase.metadata.create_all(engine) - with Session(engine) as session, session.begin(): - # Pages - session.add(ReactClientUtilities.appsettings( - field = 'dashboardsDeleteWithoutAsking', - value = {'value' : True} - )) - session.add(ReactClientUtilities.appsettings( - field = 'dashboardsShowRecentlyUsed', - value = {'value' : True} - )) - - # login page - session.add(ReactClientUtilities.appsettings( - field = 'loginFooter', - value = {'value' : ''} - )) - session.add(ReactClientUtilities.appsettings( - field = 'loginFooterLink', - value = {'value' : ''} - )) - session.add(ReactClientUtilities.appsettings( - field = 'loginDisplayFooter', - value = {'value' : True} - )) - - # server - session.add(ReactClientUtilities.appsettings( - field = 'serversAllowHTTP', - value = {'value' : False} - )) - - # remote object wizard - session.add(ReactClientUtilities.appsettings( - field = 'remoteObjectViewerConsoleStringifyOutput', - value = {'value' : False} - )) - session.add(ReactClientUtilities.appsettings( - field = 'remoteObjectViewerConsoleDefaultMaxEntries', - value = {'value' : 15} - )) - session.add(ReactClientUtilities.appsettings( - field = 'remoteObjectViewerConsoleDefaultWindowSize', - value = {'value' : 500} - )) - session.add(ReactClientUtilities.appsettings( - field = 'remoteObjectViewerConsoleDefaultFontSize', - value = {'value' : 16} - )) - - session.add(ReactClientUtilities.appsettings( - field = 'logViewerStringifyOutput', - value = {'value' : False} - )) - session.add(ReactClientUtilities.appsettings( - field = 'logViewerDefaultMaxEntries', - value = {'value' : 10} - )) - session.add(ReactClientUtilities.appsettings( - field = 'logViewerDefaultOutputWindowSize', - value = {'value' : 1000} - )) - session.add(ReactClientUtilities.appsettings( - field = 'logViewerDefaultFontSize', - value = {'value' : 16} - )) - - session.commit() - engine.dispose() -__all__ = ['ReactClientUtilities'] \ No newline at end of file +__all__ = ['create_primary_host'] \ No newline at end of file From 03ff6bbb8106208cdc2ad88cd0d3a20ad86949a3 Mon Sep 17 00:00:00 2001 From: "Vignesh.Vaidyanathan" <62492557+VigneshVSV@users.noreply.github.com> Date: Sat, 27 Jan 2024 14:54:15 +0100 Subject: [PATCH 006/167] some rework of remote-object to get rid of useless parameters --- hololinked/server/constants.py | 4 +- hololinked/server/data_classes.py | 23 +- hololinked/server/proxy_client.py | 11 +- hololinked/server/remote_object.py | 347 ++++++++++------------- hololinked/server/zmq_message_brokers.py | 2 +- 5 files changed, 167 insertions(+), 220 deletions(-) diff --git a/hololinked/server/constants.py b/hololinked/server/constants.py index 90f7b23..4981f65 100644 --- a/hololinked/server/constants.py +++ b/hololinked/server/constants.py @@ -28,7 +28,7 @@ # regex logic states_regex : str = '[A-Za-z_]+[A-Za-z_ 0-9]*' url_regex : str = r'[\-a-zA-Z0-9@:%._\/\+~#=]{1,256}' -instance_name_regex : str = r'[A-Za-z]+[A-Za-z_0-9\-\/]*' + # HTTP request methods GET : str = 'GET' @@ -57,4 +57,4 @@ JSONSerializable = typing.Union[typing.Dict[str, typing.Any], list, str, int, float, None] # ZMQ -ZMQ_PROTOCOLS = Enum('ZMQ_PROTOCOLS', 'TCP IPC') \ No newline at end of file +ZMQ_PROTOCOLS = Enum('ZMQ_PROTOCOLS', 'TCP IPC INPROC') \ No newline at end of file diff --git a/hololinked/server/data_classes.py b/hololinked/server/data_classes.py index c930b2e..a27f46c 100644 --- a/hololinked/server/data_classes.py +++ b/hololinked/server/data_classes.py @@ -69,7 +69,7 @@ def create_dataclass(self, obj : typing.Optional[typing.Any] = None, __use_slots_for_dataclass = True -@dataclass(frozen=True, slots=__use_slots_for_dataclass) +@dataclass# (frozen=True, slots=__use_slots_for_dataclass) class ScadaInfoData: """ This container class is created by the RemoteObject instance because descriptors (used by ScadaInfoValidator) @@ -115,10 +115,10 @@ class HTTPServerResourceData: what : str instance_name : str instruction : str - http_request_as_argument : bool = field( default = False ) - path_format : typing.Optional[str] = field( default=None ) - path_regex : typing.Optional[typing.Pattern] = field( default = None ) - param_convertors : typing.Optional[typing.Dict] = field( default = None ) + http_request_as_argument : bool = field(default=False) + path_format : typing.Optional[str] = field(default=None) + path_regex : typing.Optional[typing.Pattern] = field(default=None) + param_convertors : typing.Optional[typing.Dict] = field( default=None) def __init__(self, *, what : str, instance_name : str, fullpath : str, instruction : str, http_request_as_argument : bool = False) -> None: @@ -173,7 +173,7 @@ def compile_path(self): self.param_convertors = param_convertors -@dataclass(frozen = True) +@dataclass class HTTPServerEventData: """ Used by the HTTPServer instance to subscribe to events published by RemoteObject at a certain address. @@ -189,8 +189,8 @@ def json(self): -@dataclass(frozen = True) -class ProxyResourceData: +@dataclass +class RPCResourceData: """ Used by Proxy objects to fill attributes & methods in a proxy class. """ @@ -210,6 +210,13 @@ def json(self): def get_dunder_attr(self, __dunder_name : str): return getattr(self, __dunder_name.strip('_')) + def __getstate__(self): + return self.json() + + def __setstate__(self, values): + for key, value in values.items(): + setattr(self, key, value) + @dataclass class GUIResources: instance_name : str diff --git a/hololinked/server/proxy_client.py b/hololinked/server/proxy_client.py index b152574..cabaf25 100644 --- a/hololinked/server/proxy_client.py +++ b/hololinked/server/proxy_client.py @@ -22,15 +22,15 @@ class ObjectProxy: 'instance_name', 'logger', 'timeout', '_timeout', ]) - def __init__(self, instance_name : str, timeout : float = 5, load_remote_object = True, **kwargs) -> None: + def __init__(self, instance_name : str, timeout : float = 5, load_remote_object = True, protocol : str = 'TCP', **serializer) -> None: self.instance_name = instance_name - self._client_ID = instance_name+current_datetime_ms_str() + self._client_ID = instance_name + current_datetime_ms_str() self.logger = logging.Logger(self._client_ID) self.timeout = timeout # compose ZMQ client in Proxy client so that all sending and receiving is # done by the ZMQ client and not by the Proxy client directly. Proxy client only # bothers mainly about __setattr__ and _getattr__ - self._client = SyncZMQClient(instance_name, self._client_ID, client_type=PROXY, **kwargs) + self._client = SyncZMQClient(instance_name, self._client_ID, client_type=PROXY, **serializer) if load_remote_object: self.load_remote_object() @@ -192,7 +192,10 @@ def load_remote_object(self): reply : SingleLevelNestedJSON = fetch()[5]["returnValue"] for name, data in reply.items(): - data = ProxyResourceData(**data) + if isinstance(data, dict): + data = ProxyResourceData(**data) + elif not isinstance(data, ProxyResourceData): + raise RuntimeError("Logic error - unpickled info about server not instance of ProxyResourceData") if data.what == CALLABLE: _add_method(self, _RemoteMethod(self._client, data.instruction), data) elif data.what == ATTRIBUTE: diff --git a/hololinked/server/remote_object.py b/hololinked/server/remote_object.py index 5631c28..e36441f 100644 --- a/hololinked/server/remote_object.py +++ b/hololinked/server/remote_object.py @@ -24,7 +24,7 @@ from .serializers import * from .exceptions import BreakInnerLoop from .decorators import get, post, remote_method -from .data_classes import (GUIResources, HTTPServerEventData, HTTPServerResourceData, ProxyResourceData, +from .data_classes import (GUIResources, HTTPServerEventData, HTTPServerResourceData, RPCResourceData, HTTPServerResourceData, FileServerData, ScadaInfoData, ScadaInfoValidator) from .api_platform_utils import postman_item, postman_itemgroup @@ -181,8 +181,8 @@ def set_state(self, value, push_event : bool = True, skip_callbacks : bool = Fal """.format(value, self.states) )) - current_state = property(get_state, set_state, None, doc = """ - read and write current state of the state machine""") + current_state = property(get_state, set_state, None, + doc = """read and write current state of the state machine""") def query(self, info : typing.Union[str, typing.List[str]] ) -> typing.Any: raise NotImplementedError("arbitrary quering of {} not possible".format(self.__class__.__name__)) @@ -314,29 +314,52 @@ def parameters(mcs) -> RemoteClassParameters: return mcs._param_container - -class RemoteSubobject(Parameterized, metaclass=RemoteObjectMetaclass): + +class RemoteObject(Parameterized, metaclass=RemoteObjectMetaclass): """ - Subclass from here for remote capable sub objects composed within remote object instance. Does not support - state machine, logger, serializers, dedicated message brokers etc. + Expose your python classes for HTTP methods by subclassing from here. """ + __server_type__ = ServerTypes.REMOTE_OBJECT + state_machine : StateMachine - instance_name = String(default=None, regex=instance_name_regex, constant = True, - doc = """Unique string identifier of the instance used for many operations, + # objects given by user which we need to validate: + instance_name = String(default=None, regex=r'[A-Za-z]+[A-Za-z_0-9\-\/]*', constant=True, + doc="""Unique string identifier of the instance. This value is used for many operations, for example - creating zmq socket address, tables in databases, and to identify the instance - in the HTTP Server & scadapy.webdashboard client - + in the HTTP Server & webdashboard clients - (http(s)://{domain and sub domain}/{instance name}). It is suggested to use - the class name along with a unique name {class name}/{some name}. Instance names must be unique + the class name along with a unique name {class name}/{some unique name}. Instance names must be unique in your entire system.""") # type: ignore - events = RemoteParameter(readonly=True, URL_path='/events', + logger = ClassSelector(class_=logging.Logger, default=None, allow_None=True, + doc = """Logger object to print log messages, should be instance of logging.Logger(). default + logger is created if none is supplied.""") # type: ignore + rpc_serializer = ClassSelector(class_=(SerpentSerializer, JSONSerializer, PickleSerializer, str), # DillSerializer, + default='json', + doc="""The serializer that will be used for passing messages in zmq. For custom data + types which have serialization problems, you can subclass the serializers and implement + your own serialization options. Recommended serializer for exchange messages between + Proxy clients and server is Serpent and for HTTP serializer and server is JSON.""") # type: ignore + json_serializer = ClassSelector(class_=JSONSerializer, default=None, allow_None=True, + doc = """Serializer used for sending messages between HTTP server and remote object, + subclass JSONSerializer to implement undealt serialization options.""") # type: ignore + + # remote paramaters + object_info = RemoteParameter(readonly=True, URL_path='/object-info', + doc="obtained information about this object like the class name, script location etc.") # type: ignore + events : typing.Dict = RemoteParameter(readonly=True, URL_path='/events', doc="returns a dictionary with two fields " ) # type: ignore httpserver_resources = RemoteParameter(readonly=True, URL_path='/resources/http', doc="""""" ) # type: ignore - proxy_resources = RemoteParameter(readonly=True, URL_path='/resources/object-proxy', + rpc_resources = RemoteParameter(readonly=True, URL_path='/resources/object-proxy', doc= """object's resources exposed to ProxyClient, similar to http_resources but differs in details.""") # type: ignore + gui_resources : typing.Dict = RemoteParameter(readonly=True, URL_path='/resources/gui', + doc= """object's data read by scadapy webdashboard GUI client, similar to http_resources but differs + in details.""") # type: ignore + GUI = RemoteClassSelector(class_=ReactApp, default=None, allow_None=True, + doc= """GUI applied here will become visible at GUI tab of dashboard tool""") - + def __new__(cls, **kwargs): """ custom defined __new__ method to assign some important attributes at instance creation time directly instead of @@ -350,33 +373,45 @@ def __new__(cls, **kwargs): # objects created by us that require no validation but cannot be modified are called _internal_fixed_attributes obj._internal_fixed_attributes = ['_internal_fixed_attributes', 'instance_resources', '_owner'] # objects given by user which we need to validate (mostly descriptors) - obj.instance_name = kwargs.get('instance_name', None) return obj - - def __post_init__(self): + + def __init__(self, instance_name : str, logger : typing.Optional[logging.Logger] = None, log_level : typing.Optional[int] = None, + log_file : typing.Optional[str] = None, logger_remote_access : bool = True, + rpc_serializer : typing.Optional[BaseSerializer] = None, json_serializer : typing.Optional[JSONSerializer] = None, + server_protocols : typing.Optional[typing.Union[typing.List[ZMQ_PROTOCOLS], typing.Tuple[ZMQ_PROTOCOLS], ZMQ_PROTOCOLS]] = None, + db_config_file : typing.Optional[str] = None) -> None: + super().__init__(instance_name=instance_name, logger=logger, + rpc_serializer=rpc_serializer, json_serializer=json_serializer) + + # missing type definitions self.instance_name : str - self.httpserver_resources : typing.Dict - self.proxy_resources : typing.Dict + self.logger : logging.Logger + self.db_engine : RemoteObjectDB + self.rpc_serializer : BaseSerializer + self.json_serializer : JSONSerializer + self.object_info : RemoteObjectDB.RemoteObjectInfo self.events : typing.Dict - self._owner : typing.Optional[typing.Union[RemoteSubobject, RemoteObject]] + self.httpserver_resources : typing.Dict + self.rpc_resources : typing.Dict + self._eventloop_name : str + self._owner : typing.Optional[RemoteObject] self._internal_fixed_attributes : typing.List[str] - - @property - def _event_publisher(self) -> EventPublisher: - try: - return self.event_publisher - except AttributeError: - top_owner = self._owner - while True: - if isinstance(top_owner, RemoteObject): - self.event_publisher = top_owner.event_publisher - return self.event_publisher - elif isinstance(top_owner, RemoteSubobject): - top_owner = top_owner._owner - else: - raise RuntimeError(wrap_text("""Error while finding owner of RemoteSubobject, - RemoteSubobject must be composed only within RemoteObject or RemoteSubobject, - otherwise there can be problems.""")) + + self._prepare_logger(log_file=log_file, log_level=log_level, remote_access=logger_remote_access) + self._prepare_message_brokers(server_protocols=server_protocols, rpc_serializer=rpc_serializer, + json_serializer=json_serializer) + self._prepare_state_machine() + self._prepare_DB(db_config_file) + + + def __post_init__(self): + # Never create events before _prepare_instance(), no checks in place + self._owner = None + self._prepare_resources() + self._write_parameters_from_DB() + self.logger.info("initialialised RemoteObject of class {} with instance name {}".format( + self.__class__.__name__, self.instance_name)) + def __setattr__(self, __name: str, __value: typing.Any) -> None: if __name == '_internal_fixed_attributes' or __name in self._internal_fixed_attributes: @@ -395,6 +430,35 @@ def __setattr__(self, __name: str, __value: typing.Any) -> None: super().__setattr__(__name, __value) + def _prepare_logger(self, log_level : int, log_file : str, remote_access : bool = True): + if self.logger is None: + self.logger = create_default_logger('{}/{}'.format(self.__class__.__name__, self.instance_name), + log_level, log_file) + if remote_access and not any(isinstance(handler, RemoteAccessHandler) + for handler in self.logger.handlers): + self._remote_access_loghandler = RemoteAccessHandler(instance_name='logger', maxlen=500, emit_interval=1) + self.logger.addHandler(self._remote_access_loghandler) + else: + for handler in self.logger.handlers: + if isinstance(handler, RemoteAccessHandler): + self._remote_access_loghandler = handler + + + def _prepare_message_brokers(self, protocols : typing.Optional[typing.Union[typing.List[ZMQ_PROTOCOLS], + typing.Tuple[ZMQ_PROTOCOLS], ZMQ_PROTOCOLS]]): + self.message_broker = AsyncPollingZMQServer( + instance_name=self.instance_name, + executor_thread_event=threading.Event(), + server_type=self.__server_type__, + protocols=self.server_protocols, json_serializer=self.json_serializer, + proxy_serializer=self.proxy_serializer + ) + self.json_serializer = self.message_broker.json_serializer + self.proxy_serializer = self.message_broker.proxy_serializer + self.event_publisher = EventPublisher(identity=self.instance_name, proxy_serializer=self.proxy_serializer, + json_serializer=self.json_serializer) + + def _prepare_resources(self): """ this function analyses the members of the class which have 'scadapy' variable declared @@ -410,7 +474,7 @@ def _prepare_resources(self): OPTIONS = dict() ) # The following dict will be given to the proxy client - proxy_resources = dict() + rpc_resources = dict() # The following dict will be used by the event loop instance_resources : typing.Dict[str, ScadaInfoData] = dict() # create URL prefix @@ -434,7 +498,7 @@ def _prepare_resources(self): instruction=fullpath, http_request_as_argument=scada_info.http_request_as_argument ) - proxy_resources[fullpath] = ProxyResourceData( + rpc_resources[fullpath] = RPCResourceData( what=CALLABLE, instruction=fullpath, module=getattr(resource, '__module__'), @@ -454,7 +518,7 @@ def _prepare_resources(self): resource._prepare_instance() for http_method, resources in resource.httpserver_resources.items(): httpserver_resources[http_method].update(resources) - proxy_resources.update(resource.proxy_resources) + rpc_resources.update(resource.rpc_resources) instance_resources.update(resource.instance_resources) # Events for name, resource in inspect.getmembers(self, lambda o : isinstance(o, Event)): @@ -504,7 +568,7 @@ def _prepare_resources(self): instruction=fullpath + '/' + WRITE ) - proxy_resources[fullpath] = ProxyResourceData( + rpc_resources[fullpath] = RPCResourceData( what=ATTRIBUTE, instruction=fullpath, module=__file__, @@ -527,146 +591,10 @@ def _prepare_resources(self): instance_resources[fullpath+'/'+WRITE] = scada_info # The above for-loops can be used only once, the division is only for readability # _internal_fixed_attributes - allowed to set only once - self._proxy_resources = proxy_resources + self._rpc_resources = rpc_resources self._httpserver_resources = httpserver_resources self.instance_resources = instance_resources - - def _prepare_instance(self): - """ - iterates through the members of the Remote Object to identify the information that requires to be supplied - to the HTTPServer, ProxyClient and EventLoop. Called by default in the __init__ of RemoteObject. - """ - self._prepare_resources() - - @httpserver_resources.getter - def _get_httpserver_resources(self) -> typing.Dict[str, typing.Dict[str, typing.Any]]: - return self._httpserver_resources - - @proxy_resources.getter - def _get_proxy_resources(self) -> typing.Dict[str, typing.Dict[str, typing.Any]]: - return self._proxy_resources - - - -class RemoteObject(RemoteSubobject): - """ - Expose your python classes for HTTP methods by subclassing from here. - """ - __server_type__ = ServerTypes.USER_REMOTE_OBJECT - state_machine : StateMachine - - # objects given by user which we need to validate: - eventloop_name = String(default=None, constant=True, - doc = """internally managed, this value is the instance name of the eventloop where the object - is running. Multiple objects can accept requests in a single event loop.""") # type: ignore - log_level = Selector(objects=[logging.DEBUG, logging.INFO, logging.ERROR, - logging.CRITICAL, logging.ERROR], default = logging.INFO, allow_None = False, - doc="""One can either supply a logger or simply set this parameter to to create an internal logger - with specified level.""") # type: ignore - logger = ClassSelector(class_=logging.Logger, default=None, allow_None=True, - doc = """Logger object to print log messages, should be instance of logging.Logger(). default - logger is created if none is supplied.""") # type: ignore - logfile = String (default=None, allow_None=True, - doc="""Logs can be also be stored in a file when a valid filename is passed.""") # type: ignore - logger_remote_access = Boolean(default=False, - doc="""Set it to true to add a default RemoteAccessHandler to the logger""" ) - db_config_file = String (default=None, allow_None=True, - doc="""logs can be also be stored in a file when a valid filename is passed.""") # type: ignore - server_protocols = TupleSelector(default=None, allow_None=True, accept_list=True, - objects=[ZMQ_PROTOCOLS.IPC, ZMQ_PROTOCOLS.TCP], constant=True, - doc="""Protocols to be supported by the ZMQ Server that accepts requests for the RemoteObject - instance. Options are TCP, IPC or both, represented by the Enum ZMQ_PROTOCOLS. - Either pass one or both as list or tuple""") # type: ignore - proxy_serializer = ClassSelector(class_=(SerpentSerializer, JSONSerializer, PickleSerializer, str), # DillSerializer, - default='json', - doc="""The serializer that will be used for passing messages in zmq. For custom data - types which have serialization problems, you can subclass the serializers and implement - your own serialization options. Recommended serializer for exchange messages between - Proxy clients and server is Serpent and for HTTP serializer and server is JSON.""") # type: ignore - json_serializer = ClassSelector(class_=JSONSerializer, default=None, allow_None=True, - doc = """Serializer used for sending messages between HTTP server and remote object, - subclass JSONSerializer to implement undealt serialization options.""") # type: ignore - - # remote paramaters - object_info = RemoteParameter(readonly=True, URL_path='/object-info', - doc="obtained information about this object like the class name, script location etc.") # type: ignore - events : typing.Dict = RemoteParameter(readonly=True, URL_path='/events', - doc="returns a dictionary with two fields " ) # type: ignore - gui_resources : typing.Dict = RemoteParameter(readonly=True, URL_path='/resources/gui', - doc= """object's data read by scadapy webdashboard GUI client, similar to http_resources but differs - in details.""") # type: ignore - GUI = RemoteClassSelector(class_=ReactApp, default=None, allow_None=True, - doc= """GUI applied here will become visible at GUI tab of dashboard tool""") - - - def __new__(cls, **kwargs): - """ - custom defined __new__ method to assign some important attributes at instance creation time directly instead of - super().__init__(instance_name = val1 , users_own_kw_argument1 = users_val1, ..., users_own_kw_argumentn = users_valn) - method. The lowest child's __init__ is always called first and then the code reaches the __init__ of RemoteObject. - Therefore, when the user passes arguments to his own RemoteObject descendent, they have to again pass some required - information (like instance_name) to the __init__ of super() a second time with proper keywords. - To avoid this hassle, we create this __new__. super().__init__() in a descendent is still not optional though. - """ - obj = super().__new__(cls, **kwargs) - # objects given by user which we need to validate (descriptors) - obj.logfile = kwargs.get('logfile', None) - obj.log_level = kwargs.get('log_level', logging.INFO) - obj.logger_remote_access = obj.__class__.logger_remote_access if isinstance(obj.__class__.logger_remote_access, bool) else kwargs.get('logger_remote_access', False) - obj.logger = kwargs.get('logger', None) - obj.db_config_file = kwargs.get('db_config_file', None) - obj.eventloop_name = kwargs.get('eventloop_name', None) - obj.server_protocols = kwargs.get('server_protocols', (ZMQ_PROTOCOLS.IPC, ZMQ_PROTOCOLS.TCP)) - obj.json_serializer = kwargs.get('json_serializer', None) - obj.proxy_serializer = kwargs.get('proxy_serializer', 'json') - return obj - - - def __init__(self, **params) -> None: - # Signature of __new__ and __init__ is generally the same, however one reaches this __init__ - # through the child class. Currently it is not expected to pass the instance_name, log_level etc. - # once through instantian and once again through child class __init__ - for attr in ['instance_name', 'logger', 'log_level', 'logfile', 'db_config_file', 'eventloop_name', - 'server_protocols', 'json_serializer', 'proxy_serializer']: - params.pop(attr, None) - super().__init__(**params) - - # missing type definitions - self.eventloop_name : str - self.logfile : str - self.log_level : int - self.logger : logging.Logger - self.db_engine : RemoteObjectDB - self.server_protocols : typing.Tuple[Enum] - self.json_serializer : JSONSerializer - self.proxy_serializer : BaseSerializer - self.object_info : RemoteObjectDB.RemoteObjectInfo - - self._prepare_message_brokers() - self._prepare_state_machine() - - def __post_init__(self): - super().__post_init__() - # Never create events before _prepare_instance(), no checks in place - self._owner = None - self._prepare_instance() - self._prepare_DB() - self.logger.info("initialialised RemoteObject of class {} with instance name {}".format( - self.__class__.__name__, self.instance_name)) - - def _prepare_message_brokers(self): - self.message_broker = AsyncPollingZMQServer( - instance_name=self.instance_name, - executor_thread_event=threading.Event(), - server_type=self.__server_type__, - protocols=self.server_protocols, json_serializer=self.json_serializer, - proxy_serializer=self.proxy_serializer - ) - self.json_serializer = self.message_broker.json_serializer - self.proxy_serializer = self.message_broker.proxy_serializer - self.event_publisher = EventPublisher(identity=self.instance_name, proxy_serializer=self.proxy_serializer, - json_serializer=self.json_serializer) def _create_object_info(self, script_path : typing.Optional[str] = None): if not script_path: @@ -686,13 +614,14 @@ def _create_object_info(self, script_path : typing.Optional[str] = None): level_type = ConfigInfo.USER_MANAGED.name, ) - def _prepare_DB(self): - if not self.db_config_file: + + def _prepare_DB(self, config_file : str = None): + if not config_file: self._object_info = self._create_object_info() return # 1. create engine - self.db_engine = RemoteObjectDB(instance_name = self.instance_name, serializer = self.proxy_serializer, - config_file = self.db_config_file) + self.db_engine = RemoteObjectDB(instance_name=self.instance_name, serializer=self.rpc_serializer, + config_file=config_file) # 2. create an object metadata to be used by different types of clients object_info = self.db_engine.fetch_own_info() if object_info is None: @@ -705,6 +634,8 @@ def _prepare_DB(self): You might be reusing an instance name of another subclass and did not remove the old data from database. Please clean the database using database tools to start fresh. """)) + + def _write_parameters_from_DB(self): self.db_engine.create_missing_db_parameters(self.__class__.parameters.db_init_objects) # 4. read db_init and db_persist objects for db_param in self.db_engine.read_all_parameters(): @@ -717,27 +648,25 @@ def _prepare_state_machine(self): if hasattr(self, 'state_machine'): self.state_machine._prepare(self) self.logger.debug("setup state machine") - - @logger.getter - def _get_logger(self) -> logging.Logger: - return self._logger - - @logger.setter # type: ignore - def _set_logger(self, value : logging.Logger): - if value is None: - self._logger = create_default_logger('{}|{}'.format(self.__class__.__name__, self.instance_name), - self.log_level, self.logfile) - if self.logger_remote_access and not any(isinstance(handler, RemoteAccessHandler) - for handler in self.logger.handlers): - self.remote_access_handler = RemoteAccessHandler(instance_name='logger', maxlen=500, emit_interval=1) - self.logger.addHandler(self.remote_access_handler) - else: - for handler in self.logger.handlers: - if isinstance(handler, RemoteAccessHandler): - self.remote_access_handler = handler - else: - self._logger = value + + @property + def _event_publisher(self) -> EventPublisher: + try: + return self.event_publisher + except AttributeError: + top_owner = self._owner + while True: + if isinstance(top_owner, RemoteObject): + self.event_publisher = top_owner.event_publisher + return self.event_publisher + elif isinstance(top_owner, RemoteSubobject): + top_owner = top_owner._owner + else: + raise RuntimeError(wrap_text("""Error while finding owner of RemoteSubobject, + RemoteSubobject must be composed only within RemoteObject or RemoteSubobject, + otherwise there can be problems.""")) + @object_info.getter def _get_object_info(self): try: @@ -756,6 +685,14 @@ def _get_events(self) -> typing.Dict[str, typing.Any]: address = self.event_publisher.socket_address ) for event in self.event_publisher.events } + + @httpserver_resources.getter + def _get_httpserver_resources(self) -> typing.Dict[str, typing.Dict[str, typing.Any]]: + return self._httpserver_resources + + @rpc_resources.getter + def _get_rpc_resources(self) -> typing.Dict[str, typing.Dict[str, typing.Any]]: + return self._rpc_resources @gui_resources.getter def _get_gui_resources(self): @@ -768,11 +705,11 @@ def _get_gui_resources(self): ) for instruction, scada_info in self.instance_resources.items(): if scada_info.iscallable: - gui_resources.methods[instruction] = self.proxy_resources[instruction].json() + gui_resources.methods[instruction] = self.rpc_resources[instruction].json() gui_resources.methods[instruction]["scada_info"] = scada_info.json() # to check - apparently the recursive json() calling does not reach inner depths of a dict, # therefore we call json ourselves - gui_resources.methods[instruction]["owner"] = self.proxy_resources[instruction].qualname.split('.')[0] + gui_resources.methods[instruction]["owner"] = self.rpc_resources[instruction].qualname.split('.')[0] gui_resources.methods[instruction]["owner_instance_name"] = scada_info.bound_obj.instance_name gui_resources.methods[instruction]["type"] = 'classmethod' if isinstance(scada_info.obj, classmethod) else '' gui_resources.methods[instruction]["signature"] = get_signature(scada_info.obj)[0] @@ -808,7 +745,7 @@ def _get_gui_resources(self): return gui_resources @get(URL_path='/resources/postman-collection') - def postman_collection(self, domain_prefix : str = 'https://localhost:8080') -> postman_collection: + def postman_collection(self, domain_prefix : str) -> postman_collection: try: return self._postman_collection except AttributeError: diff --git a/hololinked/server/zmq_message_brokers.py b/hololinked/server/zmq_message_brokers.py index 644a6be..019b05b 100644 --- a/hololinked/server/zmq_message_brokers.py +++ b/hololinked/server/zmq_message_brokers.py @@ -37,7 +37,7 @@ class ServerTypes(Enum): UNKNOWN_TYPE = b'UNKNOWN_TYPE' EVENTLOOP = b'EVENTLOOP' - USER_REMOTE_OBJECT = b'USER_REMOTE_OBJECT' + REMOTE_OBJECT = b'REMOTE_OBJECT' POOL = b'POOL' From 2c6a6dd415a50df11cd078018e8f57b984370c69 Mon Sep 17 00:00:00 2001 From: "Vignesh.Vaidyanathan" <62492557+VigneshVSV@users.noreply.github.com> Date: Sat, 10 Feb 2024 22:31:38 +0100 Subject: [PATCH 007/167] tcp socket search end port - name fix in slots --- hololinked/server/config.py | 12 ++++++++---- 1 file changed, 8 insertions(+), 4 deletions(-) diff --git a/hololinked/server/config.py b/hololinked/server/config.py index 4e4b5b7..d350124 100644 --- a/hololinked/server/config.py +++ b/hololinked/server/config.py @@ -1,5 +1,5 @@ # adapted from pyro - https://github.com/irmen/Pyro5 - see following license -# currently not used correctly because its not correctly integrated to the package +# currently not used correctly because its not integrated to the package """ MIT License @@ -33,7 +33,8 @@ class Configuration: __slots__ = [ - "APPDATA_DIR", "PRIMARY_HOST", "LOCALHOST_PORT", + "TEMP_DIR", "TCP_SOCKET_SEARCH_START_PORT", "TCP_SOCKET_SEARCH_END_PORT", + "PRIMARY_HOST", "LOCALHOST_PORT", "DB_CONFIG_FILE", "COOKIE_SECRET", "PWD_HASHER_TIME_COST", "PWD_HASHER_MEMORY_COST" ] @@ -48,7 +49,10 @@ def reset_variables(self, use_environment : bool = True): Reset to default config items. If use_environment is False, won't read environment variables settings (useful if you can't trust your env). """ - self.APPDATA_DIR = tempfile.gettempdir() + "\\hololinked" + self.TEMP_DIR = f"{tempfile.gettempdir()}{os.sep}hololinked" + self.TCP_SOCKET_SEARCH_START_PORT = 60000 + self.TCP_SOCKET_SEARCH_END_PORT = 65535 + return # qualname is not defined if use_environment: @@ -81,7 +85,7 @@ def reset_variables(self, use_environment : bool = True): def reset_actions(self): try: - os.mkdir(self.APPDATA_DIR) + os.mkdir(self.TEMP_DIR) except FileExistsError: pass From f7148dbc575cc8917400822e5d95216b769f62f4 Mon Sep 17 00:00:00 2001 From: "Vignesh.Vaidyanathan" <62492557+VigneshVSV@users.noreply.github.com> Date: Sat, 10 Feb 2024 22:32:06 +0100 Subject: [PATCH 008/167] renames of dataclass names --- hololinked/server/data_classes.py | 329 ++++++++++++++++++++---------- 1 file changed, 217 insertions(+), 112 deletions(-) diff --git a/hololinked/server/data_classes.py b/hololinked/server/data_classes.py index a27f46c..b598877 100644 --- a/hololinked/server/data_classes.py +++ b/hololinked/server/data_classes.py @@ -1,3 +1,8 @@ +""" +The following is a list of all dataclasses used to store information on the exposed resources on the network +""" + + import typing import platform from enum import Enum @@ -9,32 +14,40 @@ +class RemoteResourceInfoValidator: + """ + A validator class for saving remote access related information on a resource. Currently callables (functions, + methods and those with__call__ ) and class/instance parameter store this information as their own attribute under + the variable ``remote_info``. This class is generally not for consumption by the package-end-user. + The information (and the variable) may be deleted later (currently not done) from these objects under ``_prepare_instance()`` + in RemoteObject class. + Attributes + ---------- -class ScadaInfoValidator: - """ - A validator class for saving remote access related information, this is not for - direct usage by the package-end-user. Both callables (functions, methods and those with - __call__ ) and class/instance attributes store this information as their own attribute - under the name `scada_info`. The information (and the variable) may be deleted later (currently not) - from these objects under _prepare_instance in RemoteObject class. - - Args: - URL_path (str): the path in the URL under which the object is accesible for remote-operations. - Must follow url-regex requirement. If not specified, the name of object - has to be extracted and used. - http_method (str): HTTP method under which the object is accessible. Must be any of specified in - decorator methods. - state (str): State machine state at which the callable will be executed or attribute can be - written (does not apply to read-only attributes). - obj_name (str): the name of the object which will be supplied to the ProxyClient class to populate - its own namespace. For HTTP clients, HTTP method and URL is important and for Proxy clients - (based on ZMQ), the obj_name is important. - iscoroutine (bool): whether the callable should be executed with async requirements - is_method (bool): True when the callable is a function or method and not an arbitrary object with - __call__ method. This is required to decide how the callable is bound/unbound. - is_dunder_callable (bool): Not a function or method, but a callable. Same use case as the previous attribute. - Standard definition of callable is not used in the above two attributes + URL_path : str, default extracted object name + the path in the URL under which the object is accesible. + Must follow url-regex ('[\-a-zA-Z0-9@:%._\/\+~#=]{1,256}') requirement. + If not specified, the name of object will be used. Underscores will be converted to dashes + for PEP 8 names and capitial letter converted to small letters with a leading dash(-) for camel case names. + http_method : str, default POST + HTTP method under which the object is accessible. Normally GET, POST, PUT, DELETE or PATCH. + state : str, default None + State machine state at which a callable will be executed or attribute/parameter can be + written. Does not apply to read-only attributes/parameters. + obj_name : str, default extracted object name + the name of the object which will be supplied to the ``ObjectProxy`` class to populate + its own namespace. For HTTP clients, HTTP method and URL is important and for object proxies clients, the + the obj_name is important. + iscoroutine : bool, default False + whether the callable should be executed as an async + iscallable : bool, default False + True for a method or function or callable + isparameter : bool, default False + True for a parameter + http_request_as_argument : bool, default False + if True, http request object will be passed as a argument to a callable. The user is warned to not use this + generally. """ URL_path = String(default=USE_OBJECT_NAME) #, regex=url_regex) http_method = TupleSelector(default=POST, objects=http_methods, accept_list=True) @@ -43,91 +56,141 @@ class ScadaInfoValidator: iscoroutine = Boolean(default=False) iscallable = Boolean(default=False) isparameter = Boolean(default=False) - http_request_as_argument = Boolean(default=False) + request_as_argument = Boolean(default=False) def __init__(self, **kwargs) -> None: + """ + No full-scale checks for unknown keyword arguments as the class + is used by the developer, so please try to be error-proof + """ for key, value in kwargs.items(): setattr(self, key, value) - # No full-scale checks for unknown keyword arguments as the class - # is used by the developer, so please try to be error-proof - def create_dataclass(self, obj : typing.Optional[typing.Any] = None, - bound_obj : typing.Optional[typing.Any] = None) -> "ScadaInfoData": + def to_dataclass(self, obj : typing.Optional[typing.Any] = None) -> "RemoteResource": """ - For a plain, faster and uncomplicated access, a dataclass in created + For a plain, faster and uncomplicated access, a dataclass in created & used by the + event loop. + + Parameters + ---------- + obj : parameter or method + + Returns + ------- + RemoteResource + dataclass equivalent of this object """ - return ScadaInfoData(URL_path=self.URL_path, http_method=self.http_method, + return RemoteResource(URL_path=self.URL_path, http_method=self.http_method, state=tuple(self.state) if self.state is not None else None, obj_name=self.obj_name, iscallable=self.iscallable, iscoroutine=self.iscoroutine, isparameter=self.isparameter, http_request_as_argument=self.http_request_as_argument, - obj=obj, bound_obj=bound_obj) + obj=obj) # http method is manually always stored as a tuple -__use_slots_for_dataclass = False +__dataclass_kwargs = dict(frozen=True) if float('.'.join(platform.python_version().split('.')[0:2])) > 3.10: - __use_slots_for_dataclass = True + __dataclass_kwargs["slots"] = True -@dataclass# (frozen=True, slots=__use_slots_for_dataclass) -class ScadaInfoData: +@dataclass(**__dataclass_kwargs) +class RemoteResource: """ - This container class is created by the RemoteObject instance because descriptors (used by ScadaInfoValidator) - are generally slower. It is used by the eventloop methods while executing the remote object and is stored under - RemoteObject.instance_resources dictionary. + This container class is a mirror of ``RemoteResourceInfoValidator``. It is created by the RemoteObject instance and + used by the EventLoop methods (for example ``execute_once()``) to access resource metadata instead of directly using + ``RemoteResourceInfoValidator`` parameters/attributes. This is because descriptors (used by ``RemoteResourceInfoValidator``) + are generally slower. Instances of this dataclass is stored under ``RemoteObject.instance_resources`` dictionary + for each parameter & method. Events use similar dataclass with metadata but with much less information. + This class is generally not for consumption by the package-end-user. """ - URL_path : str - http_method : str state : typing.Optional[typing.Union[typing.Tuple, str]] obj_name : str iscallable : bool iscoroutine : bool isparameter : bool - http_request_as_argument : bool + request_as_argument : bool obj : typing.Any - bound_obj : typing.Any - + def json(self): """ - Serilization method to access the container for HTTP clients. Set use_json_method = True - in serializers.JSONSerializer instance and pass the object to the serializer directly. + Set use_json_method=True in ``serializers.JSONSerializer`` instance and pass the object to the + serializer directly to get the JSON. """ # try: # return self._json # accessing dynamic attr from frozen object # except AttributeError: # always causes attribute error when slots are True json_dict = {} for field in fields(self): - if field.name != 'obj' and field.name != 'bound_obj': + if field.name != 'obj': json_dict[field.name] = getattr(self, field.name) # object.__setattr__(self, '_json', json_dict) # because object is frozen return json_dict @dataclass -class HTTPServerResourceData: +class HTTPResource: """ - Used by HTTPServer instance to decide where to route which instruction + Representation of the resource used by HTTP server for routing and passing information on + what to do with which resource - read, write, execute etc. This class is generally not for + consumption by the package-end-user. + + Attributes + ---------- - 'what' can be an 'ATTRIBUTE' or 'CALLABLE' (based on isparameter or iscallable) and 'instruction' - stores the instructions to be sent to the eventloop. 'instance_name' maps the instruction to a particular - instance of RemoteObject + what : str + is it a parameter, method or event? + instance_name : str + The ``instance_name`` of the remote object which owns the resource. Used by HTTP server to inform + the message brokers to send the message to the correct recipient remote object. + instruction : str + unique string that identifies the resource, generally made using the URL_path or identical to the URL_path ( + qualified URL path {instance name}/{URL path}). + path_format : str + see param converter doc + path_regex : str + see param converter doc + param_converters : str + path format, regex and converter are used by HTTP routers to extract path parameters + """ what : str instance_name : str instruction : str - http_request_as_argument : bool = field(default=False) + request_as_argument : bool = field(default=False) path_format : typing.Optional[str] = field(default=None) path_regex : typing.Optional[typing.Pattern] = field(default=None) - param_convertors : typing.Optional[typing.Dict] = field( default=None) + param_convertors : typing.Optional[typing.Dict] = field(default=None) + # below are all dunders, when something else is added, be careful to remember to edit ObjectProxy logic when necessary + # 'what' can be an 'ATTRIBUTE' or 'CALLABLE' (based on isparameter or iscallable) and 'instruction' + # stores the instructions to be sent to the eventloop. 'instance_name' maps the instruction to a particular + # instance of RemoteObject + def __init__(self, *, what : str, instance_name : str, fullpath : str, instruction : str, - http_request_as_argument : bool = False) -> None: + request_as_argument : bool = False) -> None: self.what = what self.instance_name = instance_name self.fullpath = fullpath self.instruction = instruction - self.http_request_as_argument = http_request_as_argument - + self.request_as_argument = request_as_argument + + def __getstate__(self): + return self.json() + + def __setstate__(self, values : typing.Dict): + for key, value in values.items(): + setattr(self, key, value) + + def get_dunder_attr(self, __dunder_name : str): + return getattr(self, __dunder_name.strip('_')) + + def json(self): + """ + Set use_json_method=True in ``serializers.JSONSerializer`` instance and pass the object to the + serializer directly to get the JSON. + """ + return asdict(self) + def compile_path(self): path_regex, self.path_format, param_convertors = compile_path(self.fullpath) if self.path_format == self.fullpath and len(param_convertors) == 0: @@ -139,14 +202,76 @@ def compile_path(self): self.path_regex = path_regex self.param_convertors = param_convertors + +@dataclass +class RPCResource: + """ + Representation of resource used by RPC clients for mapping client method calls, parameter read/writes & events + to a server resource. This class is generally not for consumption by the package-end-user. + + Attributes + ---------- + + what : str + is it a parameter, method or event? + instance_name : str + The ``instance_name`` of the remote object which owns the resource. Used by RPC client to inform + message brokers to send the message to the correct recipient. + instruction : str + unique string that identifies the resource, generally made using the URL_path. Although URL path is a HTTP + concept, it is still used as a unique identifier. + name : str + the name of the resource (__name__) + qualname : str + the qualified name of the resource (__qualname__) + doc : str + the docstring of the resource + """ + what : str + instance_name : str + instruction : str + name : str + qualname : str + doc : typing.Optional[str] + + def __init__(self, *, what : str, instance_name : str, fullpath : str, instruction : str, name : str, + qualname : str, doc : str) -> None: + self.what = what + self.instance_name = instance_name + self.fullpath = fullpath + self.instruction = instruction + self.name = name + self.qualname = qualname + self.doc = doc + def json(self): - return { - "what" : self.what, - "instance_name" : self.instance_name, - 'fullpath' : self.fullpath, - "instruction" : self.instruction, - "http_request_as_argument" : self.http_request_as_argument - } + """ + Set use_json_method=True in ``serializers.JSONSerializer`` instance and pass the object to the + serializer directly to get the JSON. + """ + return asdict(self) + + +@dataclass +class ServerSentEventInfo: + """ + event name and socket address of events to be consumed by clients. + This class is generally not for consumption by the package-end-user. + + Attributes + ---------- + event_name : str + name of the event, must be unique + socket_address : str + address of the socket + + """ + what : str + event_name : str + socket_address : str + + def json(self): + return asdict(self) @dataclass @@ -173,60 +298,40 @@ def compile_path(self): self.param_convertors = param_convertors -@dataclass -class HTTPServerEventData: - """ - Used by the HTTPServer instance to subscribe to events published by RemoteObject at a certain address. - The events are sent to the HTTP clients using server-sent-events. - """ - what : str - event_name : str - socket_address : str - - def json(self): - return asdict(self) - - - @dataclass -class RPCResourceData: - """ - Used by Proxy objects to fill attributes & methods in a proxy class. +class GUIResources: """ - what : str - instruction : str - # below are all dunders, when something else is added, be careful to remember to edit ObjectProxy logic when necessary - module : typing.Union[str, None] - name : str - qualname : str - doc : typing.Union[str, None] - kwdefaults : typing.Any - defaults : typing.Any - - def json(self): - return asdict(self) - - def get_dunder_attr(self, __dunder_name : str): - return getattr(self, __dunder_name.strip('_')) - - def __getstate__(self): - return self.json() - - def __setstate__(self, values): - for key, value in values.items(): - setattr(self, key, value) + Encapsulation of all information required to populate hololinked-portal GUI for a remote object. + This class is generally not for consumption by the package-end-user. -@dataclass -class GUIResources: + Attributes + ---------- + instance_name : str + instance name of the ``RemoteObject`` + inheritance : List[str] + inheritance tree of the ``RemoteObject`` + classdoc : str + class docstring + parameters : nested JSON (dictionary) + list of defined remote paramters and their metadata + methods : nested JSON (dictionary) + list of defined remote methods + events : nested JSON (dictionary) + list of defined events + documentation : Dict[str, str] + documentation files, name and path + GUI : nested JSON (dictionary) + generated from ``hololinked.webdashboard.ReactApp``, a GUI can be shown under 'default GUI' tab in the portal + """ instance_name : str - events : typing.Dict[str, typing.Any] + inheritance : typing.List[str] classdoc : typing.Optional[typing.List[str]] - inheritance : typing.List - GUI : typing.Optional[typing.Dict] = field( default=None ) - methods : typing.Dict[str, typing.Any] = field( default_factory=dict ) - parameters : typing.Dict[str, typing.Any] = field( default_factory=dict ) - documentation : typing.Optional[typing.Dict[str, typing.Any]] = field( default=None ) + parameters : typing.Dict[str, typing.Any] = field(default_factory=dict) + methods : typing.Dict[str, typing.Any] = field(default_factory=dict) + events : typing.Dict[str, typing.Any] = field(default_factory=dict) + documentation : typing.Optional[typing.Dict[str, typing.Any]] = field(default=None) + GUI : typing.Optional[typing.Dict] = field(default=None) def json(self): return asdict(self) \ No newline at end of file From f9463be930d406a85a513f8a87df1f4a785251bb Mon Sep 17 00:00:00 2001 From: "Vignesh.Vaidyanathan" <62492557+VigneshVSV@users.noreply.github.com> Date: Sat, 10 Feb 2024 22:32:45 +0100 Subject: [PATCH 009/167] added inproc socket to enum --- hololinked/server/constants.py | 1 + 1 file changed, 1 insertion(+) diff --git a/hololinked/server/constants.py b/hololinked/server/constants.py index 4981f65..d117099 100644 --- a/hololinked/server/constants.py +++ b/hololinked/server/constants.py @@ -3,6 +3,7 @@ import typing from enum import Enum from types import MethodType, FunctionType +import zmq # decorator constants From 7109413e2f6cad1d89a55d61453c4e7b1bc8ea72 Mon Sep 17 00:00:00 2001 From: "Vignesh.Vaidyanathan" <62492557+VigneshVSV@users.noreply.github.com> Date: Sat, 10 Feb 2024 22:33:10 +0100 Subject: [PATCH 010/167] moved http method decorators to separate file --- hololinked/server/decorators.py | 96 ++++++++++----------------------- 1 file changed, 28 insertions(+), 68 deletions(-) diff --git a/hololinked/server/decorators.py b/hololinked/server/decorators.py index b2ec855..f63fcb0 100644 --- a/hololinked/server/decorators.py +++ b/hololinked/server/decorators.py @@ -1,13 +1,12 @@ +import typing from types import FunctionType from inspect import iscoroutinefunction, getfullargspec -from typing import Any, Optional, Union, Callable -import typing from enum import Enum from functools import wraps from dataclasses import dataclass, asdict, field, fields -from .data_classes import ScadaInfoValidator, ScadaInfoData +from .data_classes import RemoteResourceInfoValidator, RemoteResource from .constants import (USE_OBJECT_NAME, UNSPECIFIED, GET, POST, PUT, DELETE, PATCH, WRAPPER_ASSIGNMENTS) from .utils import wrap_text from .path_converter import compile_path @@ -27,7 +26,7 @@ def wrap_method(method : FunctionType): """ @wraps(method, WRAPPER_ASSIGNMENTS) - def wrapped_method(*args, **kwargs) -> Any: + def wrapped_method(*args, **kwargs) -> typing.Any: self = args[0] self.logger.debug("called {} of instance {}".format(method.__qualname__, self.instance_name)) return method(*args, **kwargs) @@ -49,7 +48,7 @@ def is_private_attribute(attr_name: str) -> bool: def remote_method(URL_path : str = USE_OBJECT_NAME, http_method : str = POST, - state : Optional[Union[str, Enum]] = None) -> Callable: + state : typing.Optional[typing.Union[str, Enum]] = None) -> typing.Callable: """Use this function to decorate your methods to be accessible remotely. Args: @@ -67,58 +66,55 @@ def inner(obj): if isinstance(obj, classmethod): obj = obj.__func__ if callable(obj): - if hasattr(obj, 'scada_info') and not isinstance(obj.scada_info, ScadaInfoValidator): + if hasattr(obj, '_remote_info') and not isinstance(obj._remote_info, RemoteResourceInfoValidator): raise NameError( wrap_text( """ - variable name 'scada_info' reserved for scadapy library. - Please do not assign this variable to any other object except scadapy.server.scada_info.ScadaInfoValidator. + variable name '_remote_info' reserved for scadapy library. + Please do not assign this variable to any other object except scadapy.server.data_classes.RemoteResourceInfoValidator. """ ) ) else: - obj.scada_info = ScadaInfoValidator() + obj._remote_info = RemoteResourceInfoValidator() obj_name = obj.__qualname__.split('.') if len(obj_name) > 1: # i.e. its a bound method, used by RemoteObject if URL_path == USE_OBJECT_NAME: - obj.scada_info.URL_path = f'/{obj_name[1]}' + obj._remote_info.URL_path = f'/{obj_name[1]}' else: assert URL_path.startswith('/'), f"URL_path should start with '/', please add '/' before '{URL_path}'" - obj.scada_info.URL_path = URL_path - obj.scada_info.obj_name = obj_name[1] + obj._remote_info.URL_path = URL_path + obj._remote_info.obj_name = obj_name[1] elif len(obj_name) == 1 and isinstance(obj, FunctionType): # normal unbound function - used by HTTPServer instance if URL_path is USE_OBJECT_NAME: - obj.scada_info.URL_path = '/{}'.format(obj_name[0]) + obj._remote_info.URL_path = '/{}'.format(obj_name[0]) else: assert URL_path.startswith('/'), f"URL_path should start with '/', please add '/' before '{URL_path}'" - obj.scada_info.URL_path = URL_path - obj.scada_info.obj_name = obj_name[0] + obj._remote_info.URL_path = URL_path + obj._remote_info.obj_name = obj_name[0] else: raise RuntimeError(f"Undealt option for decorating {obj} or decorators wrongly used") if http_method is not UNSPECIFIED: if isinstance(http_method, str): - obj.scada_info.http_method = (http_method,) + obj._remote_info.http_method = (http_method,) else: - obj.scada_info.http_method = http_method + obj._remote_info.http_method = http_method if state is not None: if isinstance(state, (Enum, str)): - obj.scada_info.state = (state,) + obj._remote_info.state = (state,) else: - obj.scada_info.state = state + obj._remote_info.state = state if 'request' in getfullargspec(obj).kwonlyargs: - obj.scada_info.http_request_as_argument = True - obj.scada_info.iscallable = True - obj.scada_info.iscoroutine = iscoroutinefunction(obj) + obj._remote_info.http_request_as_argument = True + obj._remote_info.iscallable = True + obj._remote_info.iscoroutine = iscoroutinefunction(obj) return original else: raise TypeError( - wrap_text( - f""" - target for get()/post()/remote_method() or http method decorator is not a function/method. - Given type {type(obj)} - """ - ) + "target for get()/post()/remote_method() or http method decorator is not a function/method.", + f"Given type {type(obj)}" ) + return inner @@ -127,51 +123,15 @@ def remote_parameter(**kwargs): return RemoteParameter(*kwargs) -def get(URL_path = USE_OBJECT_NAME): - """ - use it on RemoteObject subclass methods to be available with GET HTTP request. - method is also by default accessible to proxy clients. - """ - return remote_method(URL_path=URL_path, http_method=GET) - -def post(URL_path = USE_OBJECT_NAME): - """ - use it on RemoteObject subclass methods to be available with POST HTTP request. - method is also by default accessible to proxy clients. - """ - return remote_method(URL_path=URL_path, http_method=POST) - -def put(URL_path = USE_OBJECT_NAME): - """ - use it on RemoteObject subclass methods to be available with PUT HTTP request. - method is also by default accessible to proxy clients. - """ - return remote_method(URL_path=URL_path, http_method=PUT) - -def delete(URL_path = USE_OBJECT_NAME): - """ - use it on RemoteObject subclass methods to be available with DELETE HTTP request. - method is also by default accessible to proxy clients. - """ - return remote_method(URL_path=URL_path, http_method=DELETE) - -def patch(URL_path = USE_OBJECT_NAME): - """ - use it on RemoteObject subclass methods to be available with PATCH HTTP request. - method is also by default accessible to proxy clients. - """ - return remote_method(URL_path=URL_path, http_method=PATCH) - - @dataclass class FuncInfo: module : str name : str qualname : str doc : str - kwdefaults : Any - defaults : Any - scadapy : ScadaInfoData + kwdefaults : typing.Any + defaults : typing.Any + scadapy : RemoteResource def json(self): return asdict(self) @@ -218,6 +178,6 @@ def json(self): -__all__ = ['get', 'put', 'post', 'delete', 'patch', 'remote_method', 'remote_parameter'] +__all__ = ['remote_method', 'remote_parameter'] From db3f6e29bc6adb56e7f8dbcfe6e1e58c6da71f2b Mon Sep 17 00:00:00 2001 From: "Vignesh.Vaidyanathan" <62492557+VigneshVSV@users.noreply.github.com> Date: Sat, 10 Feb 2024 22:34:24 +0100 Subject: [PATCH 011/167] minor changes and bug fixes --- hololinked/server/HTTPServer.py | 1 - hololinked/server/eventloop.py | 25 +++++++++++++++++++------ hololinked/server/handlers.py | 23 +++++++++++------------ hololinked/server/utils.py | 8 ++++---- hololinked/server/webserver_utils.py | 2 +- 5 files changed, 35 insertions(+), 24 deletions(-) diff --git a/hololinked/server/HTTPServer.py b/hololinked/server/HTTPServer.py index 122c558..64cc1d4 100644 --- a/hololinked/server/HTTPServer.py +++ b/hololinked/server/HTTPServer.py @@ -18,7 +18,6 @@ from .utils import create_default_logger from .decorators import get, put, post, delete, remote_method -from .data_classes import HTTPServerResourceData from .serializers import JSONSerializer from .constants import GET, PUT, POST, OPTIONS, DELETE, USE_OBJECT_NAME, CALLABLE from .webserver_utils import log_request, update_resources diff --git a/hololinked/server/eventloop.py b/hololinked/server/eventloop.py index 245bf7e..21bd5f3 100644 --- a/hololinked/server/eventloop.py +++ b/hololinked/server/eventloop.py @@ -3,7 +3,10 @@ import traceback import importlib import typing - +import zmq +import threading +import time +from collections import deque from .utils import unique_id, wrap_text from .constants import * @@ -11,7 +14,7 @@ from .exceptions import * from .decorators import post, get from .remote_object import * -from .zmq_message_brokers import AsyncPollingZMQServer, ZMQServerPool, ServerTypes +from .zmq_message_brokers import AsyncPollingZMQServer, ZMQServerPool, ServerTypes, AsyncZMQClient from .remote_parameter import RemoteParameter from ..param.parameters import Boolean, ClassSelector, TypedList, List as PlainList @@ -42,9 +45,7 @@ class EventLoop(RemoteObject): remote_objects = TypedList(item_type=(RemoteObject, Consumer), bounds=(0,100), allow_None=True, default=None, doc="""list of RemoteObjects which are being executed""") - threaded = Boolean(default=False, doc="""by default False, set to True to use thread pool executor instead of - of simple asyncio. Default executor is a single-threaded asyncio loop. Thread pool executor creates - each thread with its own asyncio loop.""" ) + # Remote Parameters uninstantiated_remote_objects = TypedDict(default=None, allow_None=True, key_type=str, item_type=(Consumer, str)) #, URL_path = '/uninstantiated-remote-objects') @@ -140,8 +141,20 @@ def register_new_consumer(self, instance : RemoteObject): async_loop.call_soon(lambda : asyncio.create_task(self.run_single_target(instance))) def run(self): + self._message_listener = threading.Thread(target=self._run_external_message_listener) + self._message_listener.start() + self._remote_object_executor = threading.Thread(target=self._run_remote_object_executor) + self._remote_object_executor.start() + + def _run_external_message_listener(self): + async_loop = asyncio.get_event_loop() + async_loop.run_until_complete( + asyncio.gather()) + self.logger.info("exiting event loop {}".format(self.instance_name)) + async_loop.close() + + def _run_remote_object_executor(self): async_loop = asyncio.get_event_loop() - # while True: async_loop.run_until_complete( asyncio.gather( *[self.run_single_target(instance) diff --git a/hololinked/server/handlers.py b/hololinked/server/handlers.py index 06304a5..6f7c8e1 100644 --- a/hololinked/server/handlers.py +++ b/hololinked/server/handlers.py @@ -16,15 +16,14 @@ from .remote_object import RemoteObject from .eventloop import EventLoop from .utils import current_datetime_ms_str -from .data_classes import (HTTPServerResourceData, HTTPServerEventData, - HTTPServerResourceData, FileServerData) +from .data_classes import FileServerData -UnknownHTTPServerData = HTTPServerResourceData( - what = 'unknown', - instance_name = 'unknown', - fullpath='unknown', - instruction = 'unknown' -) +# UnknownHTTPServerData = HTTPServerResourceData( +# what = 'unknown', +# instance_name = 'unknown', +# fullpath='unknown', +# instruction = 'unknown' +# ) @@ -54,8 +53,8 @@ class BaseRequestHandler(RequestHandler): """ zmq_client_pool : Union[MessageMappedZMQClientPool, None] = None json_serializer : JSONSerializer - resources : Dict[str, Dict[str, Union[HTTPServerResourceData, HTTPServerEventData, - FileServerData]]] + resources : Dict[str, Dict[str, Union[FileServerData, typing.Any]]] + # HTTPServerResourceData, HTTPServerEventData, own_resources : dict local_objects : Dict[str, RemoteObject] @@ -93,7 +92,7 @@ async def handle_func(self, instruction : Tuple[Callable, bool], arguments): "returnValue" : func(**arguments) }) - async def handle_bound_method(self, info : HTTPServerResourceData, arguments): + async def handle_bound_method(self, info, arguments): instance = self.local_objects[info.instance_name] return self.json_serializer.dumps({ "responseStatusCode" : 200, @@ -104,7 +103,7 @@ async def handle_bound_method(self, info : HTTPServerResourceData, arguments): } }) - async def handle_instruction(self, info : HTTPServerResourceData, path_arguments : typing.Optional[typing.Dict] = None) -> None: + async def handle_instruction(self, info, path_arguments : typing.Optional[typing.Dict] = None) -> None: self.set_status(200) self.add_header("Access-Control-Allow-Origin", self.client_address) self.set_header("Content-Type" , "application/json") diff --git a/hololinked/server/utils.py b/hololinked/server/utils.py index ac57cf9..6ee6e7b 100644 --- a/hololinked/server/utils.py +++ b/hololinked/server/utils.py @@ -154,10 +154,10 @@ def dashed_URL(word : str) -> str: return word.lower().replace('_', '-') -def create_default_logger(name : str, log_level : int = logging.INFO, logfile = None, +def create_default_logger(name : str, log_level : int = logging.INFO, log_file = None, format : str = '%(levelname)-8s - %(asctime)s:%(msecs)03d - %(name)s - %(message)s' ) -> logging.Logger: """ - the default logger used by most of scadapy package. StreamHandler is always created, pass logfile for a FileHandler + the default logger used by most of hololinked package. StreamHandler is always created, pass log_file for a FileHandler as well. """ logger = logging.getLogger(name) @@ -165,8 +165,8 @@ def create_default_logger(name : str, log_level : int = logging.INFO, logfile = default_handler = logging.StreamHandler(sys.stdout) default_handler.setFormatter(logging.Formatter(format, datefmt='%Y-%m-%dT%H:%M:%S')) logger.addHandler(default_handler) - if logfile: - file_handler = logging.FileHandler(logfile) + if log_file: + file_handler = logging.FileHandler(log_file) file_handler.setFormatter(logging.Formatter(format, datefmt='%Y-%m-%dT%H:%M:%S')) logger.addHandler(file_handler) return logger diff --git a/hololinked/server/webserver_utils.py b/hololinked/server/webserver_utils.py index 5d22ed0..8b58c2a 100644 --- a/hololinked/server/webserver_utils.py +++ b/hololinked/server/webserver_utils.py @@ -7,7 +7,7 @@ from tornado.httputil import HTTPServerRequest from .constants import CALLABLE, ATTRIBUTE, EVENT, FILE, IMAGE_STREAM -from .data_classes import HTTPServerEventData, HTTPServerResourceData, FileServerData +from .data_classes import FileServerData from .zmq_message_brokers import AsyncZMQClient, SyncZMQClient From 543baa295014d47cccb8058b741d6da85c69dc67 Mon Sep 17 00:00:00 2001 From: "Vignesh.Vaidyanathan" <62492557+VigneshVSV@users.noreply.github.com> Date: Sat, 10 Feb 2024 22:34:56 +0100 Subject: [PATCH 012/167] moved http method to http_methods file --- hololinked/server/http_methods.py | 46 +++++++++++++++++++++++++++++++ 1 file changed, 46 insertions(+) create mode 100644 hololinked/server/http_methods.py diff --git a/hololinked/server/http_methods.py b/hololinked/server/http_methods.py new file mode 100644 index 0000000..4c4a2d2 --- /dev/null +++ b/hololinked/server/http_methods.py @@ -0,0 +1,46 @@ + + + + + +from .constants import USE_OBJECT_NAME, GET, POST, PUT, DELETE, PATCH +from .decorators import remote_method + + +def get(URL_path = USE_OBJECT_NAME): + """ + use it on RemoteObject subclass methods to be available with GET HTTP request. + method is also by default accessible to proxy clients. + """ + return remote_method(URL_path=URL_path, http_method=GET) + +def post(URL_path = USE_OBJECT_NAME): + """ + use it on RemoteObject subclass methods to be available with POST HTTP request. + method is also by default accessible to proxy clients. + """ + return remote_method(URL_path=URL_path, http_method=POST) + +def put(URL_path = USE_OBJECT_NAME): + """ + use it on RemoteObject subclass methods to be available with PUT HTTP request. + method is also by default accessible to proxy clients. + """ + return remote_method(URL_path=URL_path, http_method=PUT) + +def delete(URL_path = USE_OBJECT_NAME): + """ + use it on RemoteObject subclass methods to be available with DELETE HTTP request. + method is also by default accessible to proxy clients. + """ + return remote_method(URL_path=URL_path, http_method=DELETE) + +def patch(URL_path = USE_OBJECT_NAME): + """ + use it on RemoteObject subclass methods to be available with PATCH HTTP request. + method is also by default accessible to proxy clients. + """ + return remote_method(URL_path=URL_path, http_method=PATCH) + + +__all__ = ['get', 'put', 'post', 'delete', 'patch'] \ No newline at end of file From cae1c65d8e7360562c5ea642af0d1a3c67d95536 Mon Sep 17 00:00:00 2001 From: "Vignesh.Vaidyanathan" <62492557+VigneshVSV@users.noreply.github.com> Date: Sat, 10 Feb 2024 22:37:01 +0100 Subject: [PATCH 013/167] added proper threaded RPC server - untested and missing logics, got rid of RemoteSubObject, RemoteObject modified --- hololinked/server/proxy_client.py | 6 +- hololinked/server/remote_object.py | 386 ++++++++++------------- hololinked/server/remote_parameter.py | 134 ++++---- hololinked/server/zmq_message_brokers.py | 280 +++++++++++----- 4 files changed, 446 insertions(+), 360 deletions(-) diff --git a/hololinked/server/proxy_client.py b/hololinked/server/proxy_client.py index cabaf25..fe00b63 100644 --- a/hololinked/server/proxy_client.py +++ b/hololinked/server/proxy_client.py @@ -7,7 +7,7 @@ from .zmq_message_brokers import SyncZMQClient, EventConsumer, PROXY from .utils import current_datetime_ms_str, raise_local_exception from .constants import PARAMETER, SERIALIZABLE_WRAPPER_ASSIGNMENTS, FUNC, CALLABLE, ATTRIBUTE, EVENT -from .data_classes import ProxyResourceData + @@ -360,7 +360,7 @@ def close(self): __allowed_attribute_types__ = (_RemoteParameter, _RemoteMethod) -def _add_method(client_obj : ObjectProxy, method : _RemoteMethod, func_info : ProxyResourceData) -> None: +def _add_method(client_obj : ObjectProxy, method : _RemoteMethod, func_info) -> None: for dunder in SERIALIZABLE_WRAPPER_ASSIGNMENTS: if dunder == '__qualname__': info = '{}.{}'.format(client_obj.__class__.__name__, func_info.get_dunder_attr(dunder).split('.')[1]) @@ -369,7 +369,7 @@ def _add_method(client_obj : ObjectProxy, method : _RemoteMethod, func_info : Pr setattr(method, dunder, info) client_obj.__setattr__(method.__name__, method) -def _add_parameter(client_obj : ObjectProxy, parameter : _RemoteParameter, parameter_info : ProxyResourceData) -> None: +def _add_parameter(client_obj : ObjectProxy, parameter : _RemoteParameter, parameter_info) -> None: for attr in ['doc', 'name']: # just to imitate _add_method logic setattr(parameter, attr, getattr(parameter_info, attr)) diff --git a/hololinked/server/remote_object.py b/hololinked/server/remote_object.py index e36441f..4527d7c 100644 --- a/hololinked/server/remote_object.py +++ b/hololinked/server/remote_object.py @@ -1,5 +1,4 @@ import asyncio -from collections import deque import json import logging import inspect @@ -8,6 +7,7 @@ import time import typing import datetime +from collections import deque from enum import EnumMeta, Enum from dataclasses import asdict, dataclass @@ -15,25 +15,24 @@ from sqlalchemy import select from sqlalchemy.orm import Mapped, mapped_column, DeclarativeBase, MappedAsDataclass + from ..param.parameterized import Parameterized, ParameterizedMetaclass -from ..param.parameters import (String, ClassSelector, TupleSelector, TypedDict, Boolean, - Selector, TypedKeyMappingsConstrainedDict) -from .constants import (EVENT, GET, IMAGE_STREAM, JSONSerializable, instance_name_regex, CallableType, CALLABLE, +from .constants import (EVENT, GET, IMAGE_STREAM, JSONSerializable, CallableType, CALLABLE, ATTRIBUTE, READ, WRITE, log_levels, POST, ZMQ_PROTOCOLS, FILE) from .serializers import * from .exceptions import BreakInnerLoop -from .decorators import get, post, remote_method -from .data_classes import (GUIResources, HTTPServerEventData, HTTPServerResourceData, RPCResourceData, - HTTPServerResourceData, FileServerData, ScadaInfoData, - ScadaInfoValidator) +from .decorators import remote_method +from .http_methods import get, post +from .data_classes import (GUIResources, RemoteResource, HTTPResource, RPCResource, RemoteResourceInfoValidator, + ServerSentEventInfo) from .api_platform_utils import postman_item, postman_itemgroup from .database import BaseAsyncDB, BaseSyncDB from .utils import create_default_logger, get_signature, wrap_text from .api_platform_utils import * from .remote_parameter import FileServer, PlotlyFigure, ReactApp, RemoteParameter, RemoteClassParameters, Image -from .remote_parameters import (Boolean as RemoteBoolean, ClassSelector as RemoteClassSelector, - Integer as RemoteInteger ) +from .remote_parameters import (Integer, String, ClassSelector, TupleSelector, TypedDict, Boolean, + Selector, TypedKeyMappingsConstrainedDict ) from .zmq_message_brokers import ServerTypes, EventPublisher, AsyncPollingZMQServer, Event @@ -54,9 +53,9 @@ class StateMachine: Attributes: exists (bool): internally computed, True if states and initial_states are valid """ - initial_state = RemoteClassSelector(default=None, allow_None=True, constant=True, class_=(Enum, str)) - exists = RemoteBoolean(default=False) - states = RemoteClassSelector(default=None, allow_None=True, constant=True, class_=(EnumMeta, tuple, list)) + initial_state = ClassSelector(default=None, allow_None=True, constant=True, class_=(Enum, str)) + exists = Boolean(default=False) + states = ClassSelector(default=None, allow_None=True, constant=True, class_=(EnumMeta, tuple, list)) on_enter = TypedDict(default=None, allow_None=True, key_type=str) on_exit = TypedDict(default=None, allow_None=True, key_type=str) machine = TypedDict(default=None, allow_None=True, key_type=str, item_type=(list, tuple)) @@ -101,17 +100,17 @@ def _prepare(self, owner : 'RemoteObject') -> None: if state in self: for resource in objects: if hasattr(resource, 'scada_info'): - assert isinstance(resource.scada_info, ScadaInfoValidator) # type: ignore - if resource.scada_info.iscallable and resource.scada_info.obj_name not in owner_methods: # type: ignore + assert isinstance(resource._remote_info, RemoteResourceInfoValidator) # type: ignore + if resource._remote_info.iscallable and resource._remote_info.obj_name not in owner_methods: # type: ignore raise AttributeError("Given object {} for state machine does not belong to class {}".format( resource, owner)) - if resource.scada_info.isparameter and resource not in owner_parameters: # type: ignore + if resource._remote_info.isparameter and resource not in owner_parameters: # type: ignore raise AttributeError("Given object {} - {} for state machine does not belong to class {}".format( resource.name, resource, owner)) - if resource.scada_info.state is None: # type: ignore - resource.scada_info.state = self._machine_compliant_state(state) # type: ignore + if resource._remote_info.state is None: # type: ignore + resource._remote_info.state = self._machine_compliant_state(state) # type: ignore else: - resource.scada_info.state = resource.scada_info.state + (self._machine_compliant_state(state), ) # type: ignore + resource._remote_info.state = resource._remote_info.state + (self._machine_compliant_state(state), ) # type: ignore else: raise AttributeError(wrap_text(f"""Object {resource} not made remotely accessible. Use state machine with remote parameters and remote methods only""")) @@ -317,47 +316,47 @@ def parameters(mcs) -> RemoteClassParameters: class RemoteObject(Parameterized, metaclass=RemoteObjectMetaclass): """ - Expose your python classes for HTTP methods by subclassing from here. + Expose your python classes for HTTP methods & RPC clients by subclassing from here. """ __server_type__ = ServerTypes.REMOTE_OBJECT state_machine : StateMachine # objects given by user which we need to validate: - instance_name = String(default=None, regex=r'[A-Za-z]+[A-Za-z_0-9\-\/]*', constant=True, + instance_name = String(default=None, regex=r'[A-Za-z]+[A-Za-z_0-9\-\/]*', constant=True, remote=False, doc="""Unique string identifier of the instance. This value is used for many operations, for example - creating zmq socket address, tables in databases, and to identify the instance in the HTTP Server & webdashboard clients - (http(s)://{domain and sub domain}/{instance name}). It is suggested to use the class name along with a unique name {class name}/{some unique name}. Instance names must be unique - in your entire system.""") # type: ignore - logger = ClassSelector(class_=logging.Logger, default=None, allow_None=True, + in your entire system.""") # type: str + logger = ClassSelector(class_=logging.Logger, default=None, allow_None=True, remote=False, doc = """Logger object to print log messages, should be instance of logging.Logger(). default - logger is created if none is supplied.""") # type: ignore + logger is created if none is supplied.""") # type: logging.Logger rpc_serializer = ClassSelector(class_=(SerpentSerializer, JSONSerializer, PickleSerializer, str), # DillSerializer, - default='json', + default='json', remote=False, doc="""The serializer that will be used for passing messages in zmq. For custom data types which have serialization problems, you can subclass the serializers and implement your own serialization options. Recommended serializer for exchange messages between - Proxy clients and server is Serpent and for HTTP serializer and server is JSON.""") # type: ignore - json_serializer = ClassSelector(class_=JSONSerializer, default=None, allow_None=True, + Proxy clients and server is Serpent and for HTTP serializer and server is JSON.""") # type: BaseSerializer + json_serializer = ClassSelector(class_=JSONSerializer, default=None, allow_None=True, remote=False, doc = """Serializer used for sending messages between HTTP server and remote object, - subclass JSONSerializer to implement undealt serialization options.""") # type: ignore + subclass JSONSerializer to implement undealt serialization options.""") # type: JSONSerializer # remote paramaters - object_info = RemoteParameter(readonly=True, URL_path='/object-info', - doc="obtained information about this object like the class name, script location etc.") # type: ignore - events : typing.Dict = RemoteParameter(readonly=True, URL_path='/events', - doc="returns a dictionary with two fields " ) # type: ignore + object_info = RemoteParameter(doc="contains information about this object like the class name, script location etc.", + readonly=True, URL_path='/info', fget = lambda self: self._object_info) # type: RemoteObjectDB.RemoteObjectInfo + events = RemoteParameter(readonly=True, URL_path='/events', + doc="returns a dictionary with two fields containing event name and event information") # type: typing.Dict[str, typing.Any] httpserver_resources = RemoteParameter(readonly=True, URL_path='/resources/http', - doc="""""" ) # type: ignore + doc="""object's resources exposed to HTTP server""", fget=lambda self: self._httpserver_resources ) # type: typing.Dict[str, typing.Dict[str, HTTPResource]] rpc_resources = RemoteParameter(readonly=True, URL_path='/resources/object-proxy', - doc= """object's resources exposed to ProxyClient, similar to http_resources but differs - in details.""") # type: ignore + doc= """object's resources exposed to RPC client, similar to HTTP resources but differs + in details.""", fget=lambda self: self._rpc_resources) # type: typing.Dict[str, typing.Any] gui_resources : typing.Dict = RemoteParameter(readonly=True, URL_path='/resources/gui', doc= """object's data read by scadapy webdashboard GUI client, similar to http_resources but differs - in details.""") # type: ignore - GUI = RemoteClassSelector(class_=ReactApp, default=None, allow_None=True, - doc= """GUI applied here will become visible at GUI tab of dashboard tool""") + in details.""") # type: typing.Dict[str, typing.Any] + GUI = ClassSelector(class_=ReactApp, default=None, allow_None=True, + doc= """GUI applied here will become visible at GUI tab of dashboard tool""") # type: typing.Optional[ReactApp] def __new__(cls, **kwargs): @@ -379,23 +378,13 @@ def __init__(self, instance_name : str, logger : typing.Optional[logging.Logger] log_file : typing.Optional[str] = None, logger_remote_access : bool = True, rpc_serializer : typing.Optional[BaseSerializer] = None, json_serializer : typing.Optional[JSONSerializer] = None, server_protocols : typing.Optional[typing.Union[typing.List[ZMQ_PROTOCOLS], typing.Tuple[ZMQ_PROTOCOLS], ZMQ_PROTOCOLS]] = None, - db_config_file : typing.Optional[str] = None) -> None: - super().__init__(instance_name=instance_name, logger=logger, - rpc_serializer=rpc_serializer, json_serializer=json_serializer) - - # missing type definitions - self.instance_name : str - self.logger : logging.Logger - self.db_engine : RemoteObjectDB - self.rpc_serializer : BaseSerializer - self.json_serializer : JSONSerializer - self.object_info : RemoteObjectDB.RemoteObjectInfo - self.events : typing.Dict - self.httpserver_resources : typing.Dict - self.rpc_resources : typing.Dict - self._eventloop_name : str - self._owner : typing.Optional[RemoteObject] + db_config_file : typing.Optional[str] = None, **params) -> None: + self._internal_fixed_attributes : typing.List[str] + self._owner : typing.Optional[RemoteObject] + + super().__init__(instance_name=instance_name, logger=logger, rpc_serializer=rpc_serializer, + json_serializer=json_serializer, **params) self._prepare_logger(log_file=log_file, log_level=log_level, remote_access=logger_remote_access) self._prepare_message_brokers(server_protocols=server_protocols, rpc_serializer=rpc_serializer, @@ -406,10 +395,9 @@ def __init__(self, instance_name : str, logger : typing.Optional[logging.Logger] def __post_init__(self): # Never create events before _prepare_instance(), no checks in place - self._owner = None self._prepare_resources() self._write_parameters_from_DB() - self.logger.info("initialialised RemoteObject of class {} with instance name {}".format( + self.logger.info("initialialised RemoteObject class {} with instance name {}".format( self.__class__.__name__, self.instance_name)) @@ -421,11 +409,7 @@ def __setattr__(self, __name: str, __value: typing.Any) -> None: super().__setattr__(__name, __value) else: raise AttributeError( - wrap_text( - f""" - Attempted to set {__name} more than once. cannot assign a value to this variable after creation. - """ - )) + f"Attempted to set {__name} more than once. Cannot assign a value to this variable after creation.") else: super().__setattr__(__name, __value) @@ -433,29 +417,30 @@ def __setattr__(self, __name: str, __value: typing.Any) -> None: def _prepare_logger(self, log_level : int, log_file : str, remote_access : bool = True): if self.logger is None: self.logger = create_default_logger('{}/{}'.format(self.__class__.__name__, self.instance_name), - log_level, log_file) - if remote_access and not any(isinstance(handler, RemoteAccessHandler) + logging.INFO if not log_level else log_level, + None if not log_file else log_file) + if remote_access: + if not any(isinstance(handler, RemoteAccessHandler) for handler in self.logger.handlers): - self._remote_access_loghandler = RemoteAccessHandler(instance_name='logger', maxlen=500, emit_interval=1) - self.logger.addHandler(self._remote_access_loghandler) - else: - for handler in self.logger.handlers: - if isinstance(handler, RemoteAccessHandler): - self._remote_access_loghandler = handler + self._remote_access_loghandler = RemoteAccessHandler(instance_name='logger', maxlen=500, emit_interval=1) + self.logger.addHandler(self._remote_access_loghandler) + else: + for handler in self.logger.handlers: + if isinstance(handler, RemoteAccessHandler): + self._remote_access_loghandler = handler - def _prepare_message_brokers(self, protocols : typing.Optional[typing.Union[typing.List[ZMQ_PROTOCOLS], - typing.Tuple[ZMQ_PROTOCOLS], ZMQ_PROTOCOLS]]): + def _prepare_message_brokers(self, protocols : typing.Optional[typing.Union[typing.Iterable[ZMQ_PROTOCOLS], ZMQ_PROTOCOLS]]): self.message_broker = AsyncPollingZMQServer( instance_name=self.instance_name, - executor_thread_event=threading.Event(), server_type=self.__server_type__, - protocols=self.server_protocols, json_serializer=self.json_serializer, - proxy_serializer=self.proxy_serializer + protocols=ZMQ_PROTOCOLS.INPROC, + json_serializer=self.json_serializer, + rpc_serializer=self.rpc_serializer ) self.json_serializer = self.message_broker.json_serializer - self.proxy_serializer = self.message_broker.proxy_serializer - self.event_publisher = EventPublisher(identity=self.instance_name, proxy_serializer=self.proxy_serializer, + self.rpc_serializer = self.message_broker.rpc_serializer + self.event_publisher = EventPublisher(identity=self.instance_name, rpc_serializer=self.rpc_serializer, json_serializer=self.json_serializer) @@ -472,62 +457,67 @@ def _prepare_resources(self): DELETE = dict(), PATCH = dict(), OPTIONS = dict() - ) - # The following dict will be given to the proxy client - rpc_resources = dict() + ) # type: typing.Dict[str, typing.Dict[str, HTTPResource]] + # The following dict will be given to the object proxy client + rpc_resources = dict() # type: typing.Dict[str, RPCResource] # The following dict will be used by the event loop - instance_resources : typing.Dict[str, ScadaInfoData] = dict() + instance_resources = dict() # type: typing.Dict[str, RemoteResource] # create URL prefix - self.full_URL_path_prefix = f'{self._owner.full_URL_path_prefix}/{self.instance_name}' if self._owner is not None else f'/{self.instance_name}' + self._full_URL_path_prefix = f'{self._owner._full_URL_path_prefix}/{self.instance_name}' if self._owner is not None else f'/{self.instance_name}' # First add methods and callables for name, resource in inspect.getmembers(self, inspect.ismethod): - if hasattr(resource, 'scada_info'): - if not isinstance(resource.scada_info, ScadaInfoValidator): # type: ignore - raise TypeError("instance member {} has unknown sub-member 'scada_info' of type {}.".format( - resource, type(resource.scada_info))) # type: ignore - scada_info = resource.scada_info.create_dataclass(obj=resource, bound_obj=self) # type: ignore - # methods are already bound though - fullpath = "{}{}".format(self.full_URL_path_prefix, scada_info.URL_path) - if scada_info.iscallable: - for http_method in scada_info.http_method: - httpserver_resources[http_method][fullpath] = HTTPServerResourceData( - what=CALLABLE, - instance_name=self._owner.instance_name if self._owner is not None else self.instance_name, - fullpath=fullpath, - instruction=fullpath, - http_request_as_argument=scada_info.http_request_as_argument - ) - rpc_resources[fullpath] = RPCResourceData( - what=CALLABLE, - instruction=fullpath, - module=getattr(resource, '__module__'), - name=getattr(resource, '__name__'), - qualname=getattr(resource, '__qualname__'), - doc=getattr(resource, '__doc__'), - kwdefaults=getattr(resource, '__kwdefaults__'), - defaults=getattr(resource, '__defaults__'), - ) - instance_resources[fullpath] = scada_info + if hasattr(resource, '_remote_info'): + if not isinstance(resource._remote_info, RemoteResourceInfoValidator): + raise TypeError("instance member {} has unknown sub-member '_remote_info' of type {}.".format( + resource, type(resource._remote_info))) + remote_info = resource._remote_info + # methods are already bound + fullpath = "{}{}".format(self._full_URL_path_prefix, remote_info.URL_path) + assert remote_info.iscallable, ("remote info from inspect.ismethod is not a callable", + "logic error - visit https://github.com/VigneshVSV/hololinked/issues to report") + for http_method in remote_info.http_method: + httpserver_resources[http_method][fullpath] = HTTPResource( + what=CALLABLE, + instance_name=self._owner.instance_name if self._owner is not None else self.instance_name, + fullpath=fullpath, + instruction=fullpath, + request_as_argument=remote_info.request_as_argument + ) + rpc_resources[fullpath] = RPCResource( + what=CALLABLE, + instruction=fullpath, + module=getattr(resource, '__module__'), + name=getattr(resource, '__name__'), + qualname=getattr(resource, '__qualname__'), + doc=getattr(resource, '__doc__'), + kwdefaults=getattr(resource, '__kwdefaults__'), + defaults=getattr(resource, '__defaults__'), + ) + instance_resources[fullpath] = remote_info.to_dataclass(obj=resource) # Other remote objects - for name, resource in inspect.getmembers(self, lambda o : isinstance(o, RemoteSubobject)): + for name, resource in inspect.getmembers(self, lambda o : isinstance(o, RemoteObject)): if name == '_owner': continue - elif isinstance(resource, RemoteSubobject): - resource._owner = self - resource._prepare_instance() - for http_method, resources in resource.httpserver_resources.items(): - httpserver_resources[http_method].update(resources) - rpc_resources.update(resource.rpc_resources) - instance_resources.update(resource.instance_resources) + assert isinstance(resource, RemoteObject), ("remote object children query from inspect.ismethod is not a RemoteObject", + "logic error - visit https://github.com/VigneshVSV/hololinked/issues to report") + # above assertion is only a typing convenience + resource._owner = self + resource._prepare_instance() + for http_method, resources in resource.httpserver_resources.items(): + httpserver_resources[http_method].update(resources) + rpc_resources.update(resource.rpc_resources) + instance_resources.update(resource.instance_resources) # Events for name, resource in inspect.getmembers(self, lambda o : isinstance(o, Event)): - assert isinstance(resource, Event) + assert isinstance(resource, Event), ("remote object event query from inspect.ismethod is not an Event", + "logic error - visit https://github.com/VigneshVSV/hololinked/issues to report") + # above assertion is only a typing convenience resource._owner = self - resource.full_URL_path_prefix = self.full_URL_path_prefix + resource._unique_event_name = bytes(f"{self._full_URL_path_prefix}{resource.URL_path}", encoding='utf-8') resource.publisher = self._event_publisher httpserver_resources[GET]['{}{}'.format( - self.full_URL_path_prefix, resource.URL_path)] = HTTPServerEventData( + self._full_URL_path_prefix, resource.URL_path)] = ServerSentEventInfo( # event URL_path has '/' prefix what=EVENT, event_name=resource.name, @@ -535,62 +525,47 @@ def _prepare_resources(self): ) # Parameters for parameter in self.parameters.descriptors.values(): - if hasattr(parameter, 'scada_info'): - if not isinstance(parameter.scada_info, ScadaInfoValidator): # type: ignore + if hasattr(parameter, '_remote_info'): + if not isinstance(parameter._remote_info, RemoteResourceInfoValidator): # type: ignore raise TypeError("instance member {} has unknown sub-member 'scada_info' of type {}.".format( - parameter, type(parameter.scada_info))) # type: ignore + parameter, type(parameter._remote_info))) # type: ignore # above condition is just a gaurd in case user does some unpredictable patching activities - scada_info = parameter.scada_info.create_dataclass(obj=parameter, bound_obj=self) # type: ignore - fullpath = "{}{}".format(self.full_URL_path_prefix, scada_info.URL_path) - if scada_info.isparameter: - read_http_method, write_http_method = scada_info.http_method - - httpserver_resources[read_http_method][fullpath] = HTTPServerResourceData( - what=ATTRIBUTE, - instance_name=self._owner.instance_name if self._owner is not None else self.instance_name, - fullpath=fullpath, - instruction=fullpath + '/' + READ - ) - if isinstance(parameter, Image) and parameter.streamable: - parameter.event._owner = self - parameter.event.full_URL_path_prefix = self.full_URL_path_prefix - parameter.event.publisher = self._event_publisher - httpserver_resources[GET]['{}{}'.format( - self.full_URL_path_prefix, parameter.event.URL_path)] = HTTPServerEventData( - what=EVENT, - event_name=parameter.event.name, - socket_address=self._event_publisher.socket_address, - ) - httpserver_resources[write_http_method][fullpath] = HTTPServerResourceData( - what=ATTRIBUTE, - instance_name=self._owner.instance_name if self._owner is not None else self.instance_name, - fullpath=fullpath, - instruction=fullpath + '/' + WRITE - ) + remote_info = parameter._remote_info + fullpath = "{}{}".format(self._full_URL_path_prefix, remote_info.URL_path) + assert remote_info.isparameter, ("remote object parameter query from inspect.ismethod is not a Parameter", + "logic error - visit https://github.com/VigneshVSV/hololinked/issues to report") + read_http_method, write_http_method = remote_info.http_method + + httpserver_resources[read_http_method][fullpath] = HTTPResource( + what=ATTRIBUTE, + instance_name=self._owner.instance_name if self._owner is not None else self.instance_name, + fullpath=fullpath, + instruction=fullpath + '/' + READ + ) + + httpserver_resources[write_http_method][fullpath] = HTTPResource( + what=ATTRIBUTE, + instance_name=self._owner.instance_name if self._owner is not None else self.instance_name, + fullpath=fullpath, + instruction=fullpath + '/' + WRITE + ) - rpc_resources[fullpath] = RPCResourceData( + rpc_resources[fullpath] = RPCResource( what=ATTRIBUTE, instruction=fullpath, module=__file__, doc=parameter.__doc__, - name=scada_info.obj_name, - qualname=self.__class__.__name__ + '.' + scada_info.obj_name, + name=remote_info.obj_name, + qualname=self.__class__.__name__ + '.' + remote_info.obj_name, # qualname is not correct probably, does not respect inheritance kwdefaults=None, defaults=None, ) - if isinstance(parameter, FileServer): - read_http_method, _ = scada_info.http_method - fileserverpath = "{}/files{}".format(self.full_URL_path_prefix, scada_info.URL_path) - httpserver_resources[read_http_method][fileserverpath] = FileServerData( - what=FILE, - directory=parameter.directory, - fullpath=fileserverpath - ) - instance_resources[fullpath+'/'+READ] = scada_info - instance_resources[fullpath+'/'+WRITE] = scada_info + dclass = remote_info.to_dataclass(obj=parameter) + instance_resources[fullpath+'/'+READ] = dclass + instance_resources[fullpath+'/'+WRITE] = dclass # The above for-loops can be used only once, the division is only for readability - # _internal_fixed_attributes - allowed to set only once + # following are in _internal_fixed_attributes - allowed to set only once self._rpc_resources = rpc_resources self._httpserver_resources = httpserver_resources self.instance_resources = instance_resources @@ -620,7 +595,7 @@ def _prepare_DB(self, config_file : str = None): self._object_info = self._create_object_info() return # 1. create engine - self.db_engine = RemoteObjectDB(instance_name=self.instance_name, serializer=self.rpc_serializer, + self.db_engine : RemoteObjectDB = RemoteObjectDB(instance_name=self.instance_name, serializer=self.rpc_serializer, config_file=config_file) # 2. create an object metadata to be used by different types of clients object_info = self.db_engine.fetch_own_info() @@ -634,16 +609,18 @@ def _prepare_DB(self, config_file : str = None): You might be reusing an instance name of another subclass and did not remove the old data from database. Please clean the database using database tools to start fresh. """)) - + + def _write_parameters_from_DB(self): self.db_engine.create_missing_db_parameters(self.__class__.parameters.db_init_objects) # 4. read db_init and db_persist objects for db_param in self.db_engine.read_all_parameters(): try: - setattr(self, db_param.name, self.proxy_serializer.loads(db_param.value)) # type: ignore + setattr(self, db_param.name, self.rpc_serializer.loads(db_param.value)) # type: ignore except Exception as E: self.logger.error(f"could not set attribute {db_param.name} due to error {E}") + def _prepare_state_machine(self): if hasattr(self, 'state_machine'): self.state_machine._prepare(self) @@ -655,25 +632,16 @@ def _event_publisher(self) -> EventPublisher: try: return self.event_publisher except AttributeError: - top_owner = self._owner + top_owner = self._owner # type: RemoteObject while True: if isinstance(top_owner, RemoteObject): - self.event_publisher = top_owner.event_publisher - return self.event_publisher - elif isinstance(top_owner, RemoteSubobject): top_owner = top_owner._owner else: - raise RuntimeError(wrap_text("""Error while finding owner of RemoteSubobject, - RemoteSubobject must be composed only within RemoteObject or RemoteSubobject, - otherwise there can be problems.""")) - - @object_info.getter - def _get_object_info(self): - try: - return self._object_info - except AttributeError: - return None - + break; + self.event_publisher = top_owner._event_publisher + return self.event_publisher + + @events.getter def _get_events(self) -> typing.Dict[str, typing.Any]: return { @@ -686,13 +654,6 @@ def _get_events(self) -> typing.Dict[str, typing.Any]: ) for event in self.event_publisher.events } - @httpserver_resources.getter - def _get_httpserver_resources(self) -> typing.Dict[str, typing.Dict[str, typing.Any]]: - return self._httpserver_resources - - @rpc_resources.getter - def _get_rpc_resources(self) -> typing.Dict[str, typing.Dict[str, typing.Any]]: - return self._rpc_resources @gui_resources.getter def _get_gui_resources(self): @@ -703,43 +664,43 @@ def _get_gui_resources(self): inheritance = [class_.__name__ for class_ in self.__class__.mro()], GUI = self.GUI, ) - for instruction, scada_info in self.instance_resources.items(): - if scada_info.iscallable: + for instruction, remote_info in self.instance_resources.items(): + if remote_info.iscallable: gui_resources.methods[instruction] = self.rpc_resources[instruction].json() - gui_resources.methods[instruction]["scada_info"] = scada_info.json() + gui_resources.methods[instruction]["remote_info"] = remote_info.json() # to check - apparently the recursive json() calling does not reach inner depths of a dict, # therefore we call json ourselves gui_resources.methods[instruction]["owner"] = self.rpc_resources[instruction].qualname.split('.')[0] - gui_resources.methods[instruction]["owner_instance_name"] = scada_info.bound_obj.instance_name - gui_resources.methods[instruction]["type"] = 'classmethod' if isinstance(scada_info.obj, classmethod) else '' - gui_resources.methods[instruction]["signature"] = get_signature(scada_info.obj)[0] - elif scada_info.isparameter: + gui_resources.methods[instruction]["owner_instance_name"] = remote_info.bound_obj.instance_name + gui_resources.methods[instruction]["type"] = 'classmethod' if isinstance(remote_info.obj, classmethod) else '' + gui_resources.methods[instruction]["signature"] = get_signature(remote_info.obj)[0] + elif remote_info.isparameter: path_without_RW = instruction.rsplit('/', 1)[0] if path_without_RW not in gui_resources.parameters: - gui_resources.parameters[path_without_RW] = self.__class__.parameters.webgui_info(scada_info.obj)[scada_info.obj.name] + gui_resources.parameters[path_without_RW] = self.__class__.parameters.webgui_info(remote_info.obj)[remote_info.obj.name] gui_resources.parameters[path_without_RW]["instruction"] = path_without_RW """ The instruction part has to be cleaned up to be called as fullpath. Setting the full path back into - scada_info is not correct because the unbound method is used by multiple instances. + remote_info is not correct because the unbound method is used by multiple instances. """ - gui_resources.parameters[path_without_RW]["owner_instance_name"] = scada_info.bound_obj.instance_name - if isinstance(scada_info.obj, PlotlyFigure): + gui_resources.parameters[path_without_RW]["owner_instance_name"] = remote_info.bound_obj.instance_name + if isinstance(remote_info.obj, PlotlyFigure): gui_resources.parameters[path_without_RW]['default'] = None gui_resources.parameters[path_without_RW]['visualization'] = { 'type' : 'plotly', - 'plot' : scada_info.obj.__get__(self, type(self)), - 'sources' : scada_info.obj.data_sources, + 'plot' : remote_info.obj.__get__(self, type(self)), + 'sources' : remote_info.obj.data_sources, 'actions' : { - scada_info.obj._action_stub.id : scada_info.obj._action_stub + remote_info.obj._action_stub.id : remote_info.obj._action_stub }, } - elif isinstance(scada_info.obj, Image): + elif isinstance(remote_info.obj, Image): gui_resources.parameters[path_without_RW]['default'] = None gui_resources.parameters[path_without_RW]['visualization'] = { 'type' : 'sse-video', - 'sources' : scada_info.obj.data_sources, + 'sources' : remote_info.obj.data_sources, 'actions' : { - scada_info.obj._action_stub.id : scada_info.obj._action_stub + remote_info.obj._action_stub.id : remote_info.obj._action_stub }, } return gui_resources @@ -768,8 +729,8 @@ def postman_collection(self, domain_prefix : str) -> postman_collection: for http_method, resource in self.httpserver_resources.items(): # i.e. this information is generated only on the httpserver accessible resrouces... for URL_path, httpserver_data in resource.items(): - if isinstance(httpserver_data, HTTPServerResourceData): - scada_info : ScadaInfoData + if isinstance(httpserver_data, HTTPResource): + scada_info : RemoteResource try: scada_info = self.instance_resources[httpserver_data.instruction] except KeyError: @@ -791,13 +752,6 @@ def postman_collection(self, domain_prefix : str) -> postman_collection: self._postman_collection = collection return collection - @post(URL_path='/resources/postman-collection/save') - def save_postman_collection(self, filename : typing.Optional[str] = None) -> None: - if filename is None: - filename = f'{self.__class__.__name__}_postman_collection.json' - with open(filename, 'w') as file: - json.dump(self.postman_collection().json(), file, indent = 4) - @get('/parameters/names') def _parameters(self): return self.parameters.descriptors.keys() @@ -885,7 +839,7 @@ def emit(self, record : logging.LogRecord): -class RemoteAccessHandler(logging.Handler, RemoteSubobject): +class RemoteAccessHandler(logging.Handler, RemoteObject): def __init__(self, maxlen : int = 100, emit_interval : float = 1.0, **kwargs) -> None: logging.Handler.__init__(self) @@ -1015,4 +969,4 @@ def get_execution_logs(self): -__all__ = ['RemoteObject', 'StateMachine', 'RemoteObjectDB', 'RemoteSubobject', 'ListHandler', 'RemoteAccessHandler'] +__all__ = ['RemoteObject', 'StateMachine', 'RemoteObjectDB', 'ListHandler', 'RemoteAccessHandler'] diff --git a/hololinked/server/remote_parameter.py b/hololinked/server/remote_parameter.py index 47781cd..6f0f56c 100644 --- a/hololinked/server/remote_parameter.py +++ b/hololinked/server/remote_parameter.py @@ -2,9 +2,8 @@ import os from enum import Enum -from ..param.parameterized import Parameter, Parameterized, ClassParameters, raise_TypeError -from ..param.exceptions import raise_ValueError -from .decorators import ScadaInfoValidator +from ..param.parameterized import Parameter, Parameterized, ClassParameters +from .decorators import RemoteResourceInfoValidator from .constants import GET, PUT, USE_OBJECT_NAME from .zmq_message_brokers import Event @@ -16,8 +15,8 @@ __default_parameter_write_method__ = PUT __parameter_info__ = [ - 'allow_None' , 'class_member', 'constant', 'db_commit', - 'db_first_load', 'db_memorized', 'deepcopy_default', 'per_instance_descriptor', + 'allow_None' , 'class_member', 'constant', 'db_init', 'db_persist', + 'db_commit', 'deepcopy_default', 'per_instance_descriptor', 'default', 'doc', 'metadata', 'name', 'readonly' # 'scada_info', 'parameter_type' # descriptor related info is also necessary ] @@ -25,33 +24,28 @@ class RemoteParameter(Parameter): + """ + Initialize a new Parameter object and store the supplied attributes: - __slots__ = ['db_persist', 'db_init', 'db_commit', 'scada_info'] + Parameters + ---------- - def __init__(self, default: typing.Any = None, *, doc : typing.Optional[str] = None, constant : bool = False, - readonly : bool = False, allow_None : bool = False, - URL_path : str = USE_OBJECT_NAME, - http_method : typing.Tuple[typing.Optional[str], typing.Optional[str]] = (GET, PUT), - state : typing.Optional[typing.Union[typing.List, typing.Tuple, str, Enum]] = None, - db_persist : bool = False, db_init : bool = False, db_commit : bool = False, - class_member : bool = False, fget : typing.Optional[typing.Callable] = None, - fset : typing.Optional[typing.Callable] = None, fdel : typing.Optional[typing.Callable] = None, - deepcopy_default : bool = False, per_instance_descriptor : bool = False, - precedence : typing.Optional[float] = None, - ) -> None: - """Initialize a new Parameter object and store the supplied attributes: + default: None or corresponding to parameter type + The default value of the parameter. This is owned by class for the attribute + represented by the Parameter, which is overridden in an instance after + setting the parameter. - default: the owning class's value for the attribute represented - by this Parameter, which can be overridden in an instance. + doc: str, default empty + docstring explaining what this parameter represents. - doc: docstring explaining what this parameter represents. - - constant: if true, the Parameter value can be changed only at + constant: bool, default False + if true, the Parameter value can be changed only at the class level or in a Parameterized constructor call. The value is otherwise constant on the Parameterized instance, once it has been constructed. - readonly: if true, the Parameter value cannot ordinarily be + readonly: bool, default False + if true, the Parameter value cannot ordinarily be changed by setting the attribute at the class or instance levels at all. The value can still be changed in code by temporarily overriding the value of this slot and then @@ -59,41 +53,41 @@ def __init__(self, default: typing.Any = None, *, doc : typing.Optional[str] = N _user_ should never change but which do change during code execution. - allow_None: if True, None is accepted as a valid value for + allow_None: bool, default False + if True, None is accepted as a valid value for this Parameter, in addition to any other values that are allowed. If the default value is defined as None, allow_None is set to True automatically. - db_memorized: if True, every read and write is stored in database + db_memorized: bool, default False + if True, every read and write is stored in database and persists instance destruction and creation. - - db_firstload: if True, only the first read is loaded from database. + + db_firstload: bool, default False + if True, only the first read is loaded from database. further reads and writes not written to database. if db_memorized is True, this value is ignored. - optional: some doc - - remote: set False to avoid exposing the variable for remote read + remote: bool, default True + set False to avoid exposing the variable for remote read and write - URL: resource locator under which the attribute is accessible through + URL_path: str, uses object name by default + resource locator under which the attribute is accessible through HTTP. when remote is True and no value is supplied, the variable name is used and underscores and replaced with dash - read_method: HTTP method for attribute read, default is GET - - write_method: HTTP method for attribute read, default is PUT - - read_time : some doc - - metadata: store your own JSON compatible metadata for the parameter + metadata: dict, default None + store your own JSON compatible metadata for the parameter which gives useful (and modifiable) information about the parameter. - label: optional text label to be used when this Parameter is + label: str, default extracted from object name + optional text label to be used when this Parameter is shown in a listing. If no label is supplied, the attribute name for this parameter in the owning Parameterized object is used. - per_instance: whether a separate Parameter instance will be + per_instance_descriptor: bool, default False + whether a separate Parameter instance will be created for every Parameterized instance. True by default. If False, all instances of a Parameterized class will share the same Parameter object, including all validation @@ -101,7 +95,8 @@ def __init__(self, default: typing.Any = None, *, doc : typing.Optional[str] = N conceptually similar but affects the Parameter value rather than the Parameter object. - deep_copy: controls whether the value of this Parameter will + deepcopy_default: bool, default False + controls whether the value of this Parameter will be deepcopied when a Parameterized object is instantiated (if True), or if the single default value will be shared by all Parameterized instances (if False). For an immutable Parameter @@ -120,43 +115,56 @@ def __init__(self, default: typing.Any = None, *, doc : typing.Optional[str] = N because each instance, once created, will then have an independently deepcopied value. - class_member : To make a ... + class_member : bool, default False - pickle_default_value: whether the default value should be - pickled. Usually, you would want the default value to be pickled, - but there are rare cases where that would not be the case (e.g. - for file search paths that are specific to a certain system). - - precedence: a numeric value, usually in the range 0.0 to 1.0, + precedence: float, default None + a numeric value, usually in the range 0.0 to 1.0, which allows the order of Parameters in a class to be defined in a listing or e.g. in GUI menus. A negative precedence indicates a parameter that should be hidden in such listings. - default, doc, and precedence all default to None, which allows - inheritance of Parameter slots (attributes) from the owning-class' - class hierarchy (see ParameterizedMetaclass). - """ + default, doc, and precedence all default to None, which allows + inheritance of Parameter slots (attributes) from the owning-class' + class hierarchy (see ParameterizedMetaclass). + """ + + __slots__ = ['db_persist', 'db_init', 'db_commit', 'metadata', '_remote_info'] + def __init__(self, default: typing.Any = None, *, doc : typing.Optional[str] = None, constant : bool = False, + readonly : bool = False, allow_None : bool = False, + URL_path : str = USE_OBJECT_NAME, remote : bool = True, + http_method : typing.Tuple[typing.Optional[str], typing.Optional[str]] = (GET, PUT), + state : typing.Optional[typing.Union[typing.List, typing.Tuple, str, Enum]] = None, + db_persist : bool = False, db_init : bool = False, db_commit : bool = False, + class_member : bool = False, fget : typing.Optional[typing.Callable] = None, + fset : typing.Optional[typing.Callable] = None, fdel : typing.Optional[typing.Callable] = None, + deepcopy_default : bool = False, per_instance_descriptor : bool = False, + precedence : typing.Optional[float] = None, metadata : typing.Optional[typing.Dict] = None + ) -> None: + super().__init__(default=default, doc=doc, constant=constant, readonly=readonly, allow_None=allow_None, per_instance_descriptor=per_instance_descriptor, deepcopy_default=deepcopy_default, - class_member=class_member, fget=fget, fset=fset, precedence=precedence) + class_member=class_member, fget=fget, fset=fset, fdel=fdel, precedence=precedence) self.db_persist = db_persist self.db_init = db_init self.db_commit = db_commit if URL_path is not USE_OBJECT_NAME: assert URL_path.startswith('/'), "URL path should start with a leading '/'" - self.scada_info = ScadaInfoValidator( - http_method = http_method, - URL_path = URL_path, - state = state, - isparameter = True - ) + self._remote_info = None + if remote: + self._remote_info = RemoteResourceInfoValidator( + http_method = http_method, + URL_path = URL_path, + state = state, + isparameter = True + ) + self.metadata = metadata def _post_slot_set(self, slot : str, old : typing.Any, value : typing.Any) -> None: if slot == 'owner' and self.owner is not None: - if self.scada_info.URL_path == USE_OBJECT_NAME: - self.scada_info.URL_path = '/' + self.name - self.scada_info.obj_name = self.name + if self._remote_info.URL_path == USE_OBJECT_NAME: + self._remote_info.URL_path = '/' + self.name + self._remote_info.obj_name = self.name # In principle the above could be done when setting name itself however to simplify # we do it with owner. So we should always remember order of __set_name__ -> 1) attrib_name, # 2) name and then 3) owner diff --git a/hololinked/server/zmq_message_brokers.py b/hololinked/server/zmq_message_brokers.py index 019b05b..ac3f62b 100644 --- a/hololinked/server/zmq_message_brokers.py +++ b/hololinked/server/zmq_message_brokers.py @@ -4,7 +4,7 @@ import asyncio import logging import typing -import threading +from collections import deque from enum import Enum from typing import Union, List, Any, Dict, Sequence, Iterator, Set @@ -12,6 +12,7 @@ from .utils import (current_datetime_ms_str, create_default_logger, run_coro_somehow, run_coro_sync, wrap_text, raise_local_exception) from .config import global_config +from .constants import ZMQ_PROTOCOLS from .serializers import (JSONSerializer, PickleSerializer, BaseSerializer, SerpentSerializer, # DillSerializer, serializers) from ..param.parameterized import Parameterized @@ -43,48 +44,96 @@ class ServerTypes(Enum): class BaseZMQ: + """ + Base class for all ZMQ message brokers. `hololinked` uses ZMQ under the hood to implement a + RPC server. All requests, either coming through a HTTP Server or an RPC client are routed via the RPC + Server to queue them before execution. See documentation of `RPCServer` for details. + This class implements `create_socket()` method & logger which is common to all server and client implementations. + """ def __init__(self) -> None: # only type definition for logger self.logger : logging.Logger def exit(self) -> None: + """ + Cleanup method to terminate ZMQ sockets and contexts before quitting. Called by `__del__()` + automatically. Each subclass server/client should implement their version of exiting if necessary. + """ raise NotImplementedError("implement exit() to gracefully exit ZMQ in {}.".format(self.__class__)) def __del__(self) -> None: self.exit() def create_socket(self, context : Union[zmq.asyncio.Context, zmq.Context], instance_name : str, identity : str, - bind : bool = False, protocol : str = "IPC", socket_address : Union[str, None] = None) -> None: + bind : bool = False, protocol : ZMQ_PROTOCOLS = ZMQ_PROTOCOLS.IPC, socket_type : zmq.SocketType = zmq.ROUTER, + socket_address : Union[str, None] = None) -> None: + """ + Create a socket with certain specifications + + Parameters + ---------- + context: zmq.Context or zmq.asyncio.Context + ZeroMQ Context object that creates the socket + instance_name: str + ``instance_name`` of the ``RemoteObject``. For servers, this serves as a name for the created + ROUTER socket. For clients, for IPC or INPROC, this allows to connect to the socket with the correct name. + must be unique. + identity: str + especially useful for clients to have a different name than the ``instance_name`` of the ``RemoteObject``. + For servers, supply the ``instance_name`` is sufficient. + bind: bool + whether to bind (server) or connect (client) + protocol: Enum + TCP, IPC or INPROC. Message crafting/passing/routing is protocol invariant as suggested by ZeroMQ docs. + socket_type: zmq.SocketType, default zmq.ROUTER + Usually a ROUTER socket is implemented for both client-server and peer-to-peer communication + socket_address: str + applicable only for TCP socket to find the correct socket to connect. + + Returns + ------- + None + """ self.context = context self.identity = identity - self.socket = self.context.socket(zmq.ROUTER) + self.socket = self.context.socket(socket_type) self.socket.setsockopt_string(zmq.IDENTITY, identity) - if protocol == "IPC": + if protocol == ZMQ_PROTOCOLS.IPC or protocol == "IPC": split_instance_name = instance_name.split('/') - socket_dir = '\\' + '\\'.join(split_instance_name[:-1]) if len(split_instance_name) > 1 else '' - directory = global_config.APPDATA_DIR + socket_dir + socket_dir = os.sep + os.sep.join(split_instance_name[:-1]) if len(split_instance_name) > 1 else '' + directory = global_config.TEMP_DIR + socket_dir if not os.path.exists(directory): os.makedirs(directory) # re-compute for IPC - socket_address = "ipc://{}\\{}.ipc".format(directory, split_instance_name[-1]) + socket_address = "ipc://{}{}{}.ipc".format(directory, os.sep, split_instance_name[-1]) if bind: self.socket.bind(socket_address) else: self.socket.connect(socket_address) - elif protocol == "TCP": + elif protocol == ZMQ_PROTOCOLS.TCP or protocol == "TCP": if bind: - for i in range(1000, 65535): + for i in range(global_config.TCP_SOCKET_SEARCH_START_PORT, global_config.TCP_SOCKET_SEARCH_END_PORT): socket_address = "tcp://*:{}".format(i) try: self.socket.bind(socket_address) break - except: - pass + except zmq.error.ZMQError as ex: + if not ex.strerror.startswith('Address in use'): + raise ex from None elif socket_address: self.socket.connect(socket_address) + else: + raise RuntimeError(f"Socket must be either bound or connected. No operation is being carried out for this socket {identity}") + elif protocol == ZMQ_PROTOCOLS.INPROC or protocol == "INPROC": + inproc_instance_name = instance_name.replace('/', '_').replace('-', '_') + socket_address = f'inproc://{inproc_instance_name}' + if bind: + self.socket.bind(socket_address) + else: + self.socket.connect(socket_address) else: - raise NotImplementedError("protocols other than IPC & TCP are not implemented now for {}".format(self.__class__)) + raise NotImplementedError("protocols other than IPC, TCP & INPROC are not implemented now for {}".format(self.__class__)) self.logger = self.get_logger(self.identity, socket_address, class_ = self.__class__.__name__) # type: ignore self.logger.info("created socket with address {} and {}".format(socket_address, "bound" if bind else "connected")) @@ -104,39 +153,39 @@ class BaseAsyncZMQ(BaseZMQ): For IPC sockets, a file is created under TEMP_DIR of global configuration. """ - def create_socket(self, context : Union[zmq.asyncio.Context, None], instance_name : str, identity : str, - bind : bool = False, protocol : str = "IPC", socket_address : Union[str, None] = None) -> None: + def create_socket(self, instance_name : str, context : Union[zmq.asyncio.Context, None], *, identity : str, bind : bool = False, + protocol : str = "IPC", socket_type : zmq.SocketType = zmq.ROUTER, socket_address : Union[str, None] = None) -> None: context = context or zmq.asyncio.Context() - super().create_socket(context, instance_name, identity, bind, protocol, socket_address) + super().create_socket(context, instance_name, identity, bind, protocol, socket_type, socket_address) class BaseSyncZMQ(BaseZMQ): - def create_socket(self, context : Union[zmq.Context, None], instance_name : str, identity : str, - bind : bool = False, protocol : str = "IPC", socket_address : Union[str, None] = None) -> None: + def create_socket(self, instance_name : str, context : Union[zmq.Context, None], *, identity : str, bind : bool = False, + protocol : str = "IPC", socket_type : zmq.SocketType = zmq.ROUTER, socket_address : Union[str, None] = None) -> None: context = context or zmq.Context() - super().create_socket(context, instance_name, identity, bind, protocol, socket_address) + super().create_socket(context, instance_name, identity, bind, protocol, socket_type, socket_address) class BaseZMQServer(BaseZMQ): """ - This class implements serializer instantiation and message handling for ZMQ servers and can be subclassed by all server instances - irrespective of sync or async. The messaging contract does not depend on sync or async implementation. + This class implements serializer instantiation and message handling for ZMQ servers and can be subclassed by all + server instances irrespective of sync or async. The messaging contract does not depend on sync or async implementation. For HTTP clients, json_serializer is necessary and for other types of clients, any of the allowed serializer is possible. """ def __init__(self, server_type : Enum, json_serializer : Union[None, JSONSerializer] = None, - proxy_serializer : Union[str, BaseSerializer, None] = None) -> None: + rpc_serializer : Union[str, BaseSerializer, None] = None) -> None: if json_serializer is None or isinstance(json_serializer, JSONSerializer): self.json_serializer = json_serializer or JSONSerializer() else: raise ValueError("invalid JSON serializer option for {}. Given option : {}".format(self.__class__, json_serializer)) - if isinstance(proxy_serializer, (PickleSerializer, SerpentSerializer, JSONSerializer)): # , DillSerializer)): - self.proxy_serializer = proxy_serializer - elif isinstance(proxy_serializer, str) or proxy_serializer is None: - self.proxy_serializer = serializers.get(proxy_serializer, SerpentSerializer)() + if isinstance(rpc_serializer, (PickleSerializer, SerpentSerializer, JSONSerializer)): # , DillSerializer)): + self.rpc_serializer = rpc_serializer + elif isinstance(rpc_serializer, str) or rpc_serializer is None: + self.rpc_serializer = serializers.get(rpc_serializer, SerpentSerializer)() else: - raise ValueError("invalid proxy serializer option for {}. Given option : {}".format(self.__class__, proxy_serializer)) + raise ValueError("invalid proxy serializer option for {}. Given option : {}".format(self.__class__, rpc_serializer)) self.server_type : Enum = server_type super().__init__() @@ -151,9 +200,9 @@ def parse_client_message(self, message : List[bytes]) -> Any: if message_type == INSTRUCTION: client_type = message[2] if client_type == PROXY: - message[5] = self.proxy_serializer.loads(message[5]) # type: ignore - message[6] = self.proxy_serializer.loads(message[6]) # type: ignore - message[7] = self.proxy_serializer.loads(message[7]) # type: ignore + message[5] = self.rpc_serializer.loads(message[5]) # type: ignore + message[6] = self.rpc_serializer.loads(message[6]) # type: ignore + message[7] = self.rpc_serializer.loads(message[7]) # type: ignore elif client_type == HTTP_SERVER: message[5] = self.json_serializer.loads(message[5]) # type: ignore message[6] = self.json_serializer.loads(message[6]) # type: ignore @@ -185,7 +234,7 @@ def craft_reply_from_client_message(self, original_client_message : List[bytes], if client_type == HTTP_SERVER: data = self.json_serializer.dumps(data) elif client_type == PROXY: - data = self.proxy_serializer.dumps(data) + data = self.rpc_serializer.dumps(data) else: raise ValueError("invalid client type given '{}' for preparing message to send from '{}' of type {}".format( client_type, self.identity, self.__class__)) @@ -234,12 +283,13 @@ class AsyncZMQServer(BaseZMQServer, BaseAsyncZMQ): """ def __init__(self, instance_name : str, server_type : Enum, context : Union[zmq.asyncio.Context, None] = None, - **kwargs) -> None: - BaseZMQServer.__init__(self, server_type, json_serializer = kwargs.get('json_serializer'), - proxy_serializer = kwargs.get('proxy_serializer', None)) + protocol : ZMQ_PROTOCOLS = ZMQ_PROTOCOLS.IPC, socket_type : zmq.SocketType = zmq.ROUTER, **kwargs) -> None: + BaseZMQServer.__init__(self, server_type, json_serializer=kwargs.get('json_serializer', None), + rpc_serializer=kwargs.get('rpc_serializer', None)) BaseAsyncZMQ.__init__(self) self.instance_name = instance_name - self.create_socket(context, instance_name, instance_name, bind = True) + self.create_socket(instance_name, context, identity=instance_name, bind=True, protocol=protocol, socket_type=socket_type, + socket_address=kwargs.get("socket_address", None)) self._terminate_context = context == None # terminate if it was created by instance async def _handshake(self, address : bytes) -> None: @@ -248,7 +298,7 @@ async def _handshake(self, address : bytes) -> None: async def _handle_invalid_message(self, original_client_message : List[bytes], exception : Exception) -> None: await self.socket.send_multipart(self.craft_reply_from_client_message(original_client_message, exception, - message_type = INVALID_MESSAGE)) + message_type=INVALID_MESSAGE)) self.logger.info("sent exception message to client '{}' : '{}'".format(original_client_message[0], str(exception))) async def async_recv_instruction(self) -> Any: @@ -299,20 +349,15 @@ def exit(self) -> None: class AsyncPollingZMQServer(AsyncZMQServer): """ - The purpose of this server to be identical to AsyncZMQServer except that it can be stopped from server side. + Identical to AsyncZMQServer, except that it can be stopped from server side. This is achieved by polling the socket instead of waiting indefinitely on the socket. - """ - def __init__(self, instance_name : str, executor_thread_event : threading.Event, - server_type : Enum, context : Union[zmq.asyncio.Context, None] = None, - poll_timeout = 25, **kwargs) -> None: - super().__init__(instance_name, server_type, context, **kwargs) + def __init__(self, instance_name : str, *, server_type : Enum, context : Union[zmq.asyncio.Context, None] = None, + socket_type : zmq.SocketType = zmq.ROUTER, protocol : ZMQ_PROTOCOLS = ZMQ_PROTOCOLS.IPC, + poll_timeout = 25, **kwargs) -> None: + super().__init__(instance_name, server_type, context, socket_type, protocol=protocol, **kwargs) self.poller = zmq.asyncio.Poller() - self._inproc_socket = None # definitions to be used later - self._ipc_socket = None - self._tcp_socket = None - self._executor_thread_event = executor_thread_event self._instructions = [] self.poller.register(self.socket, zmq.POLLIN) self.poll_timeout = poll_timeout @@ -343,9 +388,6 @@ async def poll_instructions(self) -> List[List[bytes]]: self.logger.debug("received instruction from client '{}' with msg-ID '{}'".format(instruction[0], instruction[4])) instructions.append(instruction) - if len(instructions) > 0: - self._instructions.extend(instructions) - self._executor_thread_event.set() return instructions def stop_polling(self) -> None: @@ -358,6 +400,9 @@ def exit(self) -> None: class ZMQServerPool(BaseZMQServer): + """ + Implements pool of sockets + """ def __init__(self, instance_names : Union[List[str], None] = None, **kwargs) -> None: self.context = zmq.asyncio.Context() @@ -370,7 +415,7 @@ def __init__(self, instance_names : Union[List[str], None] = None, **kwargs) -> for server in self.pool.values(): self.poller.register(server.socket, zmq.POLLIN) super().__init__(server_type = ServerTypes.POOL, json_serializer = kwargs.get('json_serializer'), - proxy_serializer = kwargs.get('proxy_serializer', None)) + rpc_serializer = kwargs.get('rpc_serializer', None)) def register_server(self, server : Union[AsyncZMQServer, AsyncPollingZMQServer]) -> None: self.pool[server.instance_name] = server @@ -403,7 +448,7 @@ async def poll(self, strip_delimiter : bool = False) -> List[Any]: self.stop_poll = False instructions = [] while not self.stop_poll: - sockets = await self.poller.poll(1) # type hints dont work in this line + sockets = await self.poller.poll(self.poll_timeout) for socket, _ in sockets: while True: try: @@ -437,9 +482,95 @@ def __iter__(self) -> Iterator[str]: def __contains__(self, name : str) -> bool: return name in self.pool.keys() + +class RPCServer: + + def __init__(self, instance_name : str, *, server_type : Enum, context : Union[zmq.asyncio.Context, None] = None, + protocols : typing.List[ZMQ_PROTOCOLS] = ZMQ_PROTOCOLS.IPC, poll_timeout = 25, **kwargs) -> None: + context = zmq.asyncio.Context() + if ZMQ_PROTOCOLS.TCP in protocols: + self.tcp_server = AsyncPollingZMQServer(instance_name=instance_name, context=context, + protocol=ZMQ_PROTOCOLS.TCP, **kwargs) + if ZMQ_PROTOCOLS.IPC in protocols: + self.ipc_server = AsyncPollingZMQServer(instance_name=instance_name, context=context, + protocol=ZMQ_PROTOCOLS.IPC, **kwargs) + if ZMQ_PROTOCOLS.INPROC in protocols: + self.inproc_server = AsyncPollingZMQServer(instance_name=instance_name, context=context, + protocol=ZMQ_PROTOCOLS.INPROC, **kwargs) + self.inproc_client = AsyncZMQClient(server_instance_name=instance_name, identity='', client_type='', + context=context, protocol=ZMQ_PROTOCOLS.INPROC) + self._instructions = deque() # type: typing.Iterable[typing.Tuple[typing.Any, asyncio.Event, asyncio.Future]] + self._replies = deque() + self.poll_timeout = poll_timeout + self.poller = zmq.Poller() + self.poller.register(self.ipc_server) + self.poller.register(self.tcp_server) + self.poller.register(self.inproc_server) + self._socket_to_server_map = { + self.tcp_server.socket : self.tcp_server, + self.ipc_server.socket : self.ipc_server, + self.inproc_server.socket : self.inproc_server + } + + def prepare(self): + """ + registers socket polling method and message tunnelling methods to the running + asyncio event loop + """ + eventloop = asyncio.get_running_loop() + eventloop.call_soon(self.poll) + eventloop.call_soon(self.tunnel_message_to_remote_objects) + + async def poll(self): + self.stop_poll = False + while not self.stop_poll: + sockets = await self.poller.poll(self.poll_timeout) + for socket, _ in sockets: + while True: + try: + instruction = self.parse_client_message(await socket.recv_multipart(zmq.NOBLOCK)) + except zmq.Again: + break + else: + timeout = instruction[7].get("timeout", None) + ready_to_process_event = asyncio.Event() + self._instructions.append((instruction, ready_to_process_event, + asyncio.create_task(self.process_timeouts(ready_to_process_event, timeout)), + socket + )) + + + async def tunnel_message_to_remote_objects(self, origin_socket : zmq.Context.socket): + """ + client's message to server looks as follows: + [address, bytes(), client type, message type, msg id, instruction, arguments, execution_context] + [ 0 , 1 , 2 , 3 , 4 , 5 , 6 , 7 ] + """ + while not self.stop_poll: + message, timeout_event, timeout_future = self._instructions.popleft() + await timeout_event.set() + if not timeout_future.done(): + await asyncio.wait(timeout_future) + if timeout_future.result(): + await self.inproc_client.socket.send_multipart(message) + reply = await self.inproc_client.async_recv_reply() + await origin_socket.send_multipart(reply) + + + async def process_timeouts(self, ready_to_process_event : asyncio.Event, origin_socket : zmq.Socket, + original_message : typing.List, timeout : typing.Optional[float] = None) -> bool: + try: + await asyncio.wait_for(ready_to_process_event.wait(), timeout) + return True + except TimeoutError: + await self._socket_to_server_map[origin_socket].async_send_reply(original_message, 'TIMEOUT') + return False + + + class BaseZMQClient(BaseZMQ): """ Server to client: @@ -459,7 +590,7 @@ def __init__(self, server_address : Union[bytes, None], server_instance_name : U self.client_type = client_type else: raise ValueError("invalid client type for {}. Given option {}".format(self.__class__, client_type)) - self.proxy_serializer = None + self.rpc_serializer = None self.json_serializer = None if client_type == HTTP_SERVER: json_serializer = kwargs.get("json_serializer", None) @@ -468,14 +599,14 @@ def __init__(self, server_address : Union[bytes, None], server_instance_name : U else: raise ValueError("invalid JSON serializer option for {}. Given option {}".format(self.__class__, json_serializer)) else: - proxy_serializer = kwargs.get("proxy_serializer", None) - if proxy_serializer is None or isinstance(proxy_serializer, (PickleSerializer, SerpentSerializer, + rpc_serializer = kwargs.get("rpc_serializer", None) + if rpc_serializer is None or isinstance(rpc_serializer, (PickleSerializer, SerpentSerializer, JSONSerializer)):#, DillSerializer)): - self.proxy_serializer = proxy_serializer or SerpentSerializer() - elif isinstance(proxy_serializer, str) and proxy_serializer in serializers.keys(): - self.proxy_serializer = serializers[proxy_serializer]() + self.rpc_serializer = rpc_serializer or SerpentSerializer() + elif isinstance(rpc_serializer, str) and rpc_serializer in serializers.keys(): + self.rpc_serializer = serializers[rpc_serializer]() else: - raise ValueError("invalid proxy serializer option for {}. Given option {}".format(self.__class__, proxy_serializer)) + raise ValueError("invalid proxy serializer option for {}. Given option {}".format(self.__class__, rpc_serializer)) self.server_address = server_address self.server_instance_name = server_instance_name self.server_type = ServerTypes.UNKNOWN_TYPE @@ -494,7 +625,7 @@ def parse_server_message(self, message : List[bytes]) -> Any: if self.client_type == HTTP_SERVER: message[5] = self.json_serializer.loads(message[5]) # type: ignore elif self.client_type == PROXY: - message[5] = self.proxy_serializer.loads(message[5]) # type: ignore + message[5] = self.rpc_serializer.loads(message[5]) # type: ignore return message elif message_type == HANDSHAKE: self.logger.debug("""handshake messages arriving out of order are silently dropped as receiving this message @@ -525,9 +656,9 @@ def craft_instruction_from_arguments(self, instruction : str, arguments : Dict[s elif not isinstance(arguments, bytes): arguments : bytes = self.json_serializer.dumps(arguments) else: - instruction : bytes = self.proxy_serializer.dumps(instruction) - context : bytes = self.proxy_serializer.dumps(context) - arguments : bytes = self.proxy_serializer.dumps(arguments) + instruction : bytes = self.rpc_serializer.dumps(instruction) + context : bytes = self.rpc_serializer.dumps(context) + arguments : bytes = self.rpc_serializer.dumps(arguments) return [ self.server_address, EMPTY_BYTE, @@ -757,7 +888,7 @@ def register_client(self, instance_name : str, protocol : str = 'IPC'): if instance_name not in self.pool.keys(): self.pool[instance_name] = AsyncZMQClient(server_instance_name = instance_name, identity = self.identity, client_type = self.client_type, handshake = True, protocol = protocol, - context = self.context, proxy_serializer = self.proxy_serializer, json_serializer = self.json_serializer) + context = self.context, rpc_serializer = self.rpc_serializer, json_serializer = self.json_serializer) else: raise ValueError("client already present in pool") @@ -977,21 +1108,13 @@ def __init__(self, name : str, URL_path : typing.Optional[str] = None) -> None: self.name = name # self.name_bytes = bytes(name, encoding = 'utf-8') self.URL_path = URL_path or '/' + name - self._full_URL_path_prefix = None - self._owner : typing.Optional[Parameterized] = None + self._unique_event_name = None # type: typing.Optional[str] + self._owner = None # type: typing.Optional[Parameterized] @property def owner(self): return self._owner - - @property - def full_URL_path_prefix(self): - return self._full_URL_path_prefix - - @full_URL_path_prefix.setter - def full_URL_path_prefix(self, value : str): - self._full_URL_path_prefix = value - + @property def publisher(self) -> "EventPublisher": return self._publisher @@ -1000,13 +1123,12 @@ def publisher(self) -> "EventPublisher": def publisher(self, value : "EventPublisher") -> None: if not hasattr(self, '_publisher'): self._publisher = value - self._event_unique_str = bytes(f"{self.full_URL_path_prefix}{self.URL_path}", encoding='utf-8') self._publisher.register_event(self) else: raise AttributeError("cannot reassign publisher attribute of event {}".format(self.name)) def push(self, data : typing.Any = None, serialize : bool = True): - self.publisher.publish_event(self._event_unique_str, data, serialize) + self.publisher.publish_event(self._unique_event_name, data, serialize) @@ -1035,12 +1157,12 @@ def __init__(self, identity : str, context : Union[zmq.Context, None] = None, * try: self.socket_address = "tcp://127.0.0.1:{}".format(i) self.socket.bind(self.socket_address) - except zmq.error.ZMQError as E: - if E.strerror.startswith('Address in use'): + except zmq.error.ZMQError as ex: + if ex.strerror.startswith('Address in use'): pass else: print("Following error while atttempting to bind to socket address : {}".format(self.socket_address)) - raise + raise ex from None else: self.logger = self.get_logger(identity, self.socket_address, logging.DEBUG, self.__class__.__name__) self.logger.info("created event publishing socket at {}".format(self.socket_address)) @@ -1114,6 +1236,8 @@ def exit(self): except Exception as E: self.logger.warn("could not properly terminate socket or attempted to terminate an already terminated socket of event consuming socket at address '{}'. Exception message : {}".format( self.socket_address, str(E))) + + __all__ = ['ServerTypes', 'AsyncZMQServer', 'AsyncPollingZMQServer', 'ZMQServerPool', 'SyncZMQClient', 'AsyncZMQClient', 'AsyncZMQClientPool', 'MessageMappedZMQClientPool', 'Event', 'CriticalEvent'] \ No newline at end of file From 7eb0813005cf635f63d598194291f358ef544be5 Mon Sep 17 00:00:00 2001 From: "Vignesh.Vaidyanathan" <62492557+VigneshVSV@users.noreply.github.com> Date: Sat, 10 Feb 2024 22:37:28 +0100 Subject: [PATCH 014/167] added remote parameter doc --- doc/source/autodoc/index.rst | 11 +++-- doc/source/autodoc/server/data_classes.rst | 6 --- .../autodoc/server/data_classes/index.rst | 48 +++++++++++++++++++ doc/source/autodoc/server/decorators.rst | 6 --- doc/source/autodoc/server/remote_object.rst | 6 --- .../autodoc/server/remote_parameter/index.rst | 12 +++++ doc/source/autodoc/server/server.rst | 8 ---- .../server/zmq_message_brokers/base_zmq.rst | 6 +++ .../server/zmq_message_brokers/index.rst | 10 ++++ doc/source/conf.py | 4 +- .../{note.rst => development_notes.rst} | 4 +- doc/source/examples/index.rst | 8 ++-- doc/source/index.rst | 37 +++++++------- doc/source/installation.rst | 4 +- doc/source/requirements.txt | 3 +- 15 files changed, 119 insertions(+), 54 deletions(-) delete mode 100644 doc/source/autodoc/server/data_classes.rst create mode 100644 doc/source/autodoc/server/data_classes/index.rst delete mode 100644 doc/source/autodoc/server/decorators.rst delete mode 100644 doc/source/autodoc/server/remote_object.rst create mode 100644 doc/source/autodoc/server/remote_parameter/index.rst delete mode 100644 doc/source/autodoc/server/server.rst create mode 100644 doc/source/autodoc/server/zmq_message_brokers/base_zmq.rst create mode 100644 doc/source/autodoc/server/zmq_message_brokers/index.rst rename doc/source/{note.rst => development_notes.rst} (98%) diff --git a/doc/source/autodoc/index.rst b/doc/source/autodoc/index.rst index 7b97343..bf315f2 100644 --- a/doc/source/autodoc/index.rst +++ b/doc/source/autodoc/index.rst @@ -3,8 +3,13 @@ API Reference ============= +hololinked.server +----------------- + .. toctree:: - :maxdepth: 2 + :maxdepth: 1 + + server/remote_parameter/index + server/zmq_message_brokers/index + server/data_classes/index - server/server - diff --git a/doc/source/autodoc/server/data_classes.rst b/doc/source/autodoc/server/data_classes.rst deleted file mode 100644 index 602cf0a..0000000 --- a/doc/source/autodoc/server/data_classes.rst +++ /dev/null @@ -1,6 +0,0 @@ -hololinked.server.data_classes --------------------------- - -.. automodule:: hololinked.server.data_classes - :members: - :show-inheritance: diff --git a/doc/source/autodoc/server/data_classes/index.rst b/doc/source/autodoc/server/data_classes/index.rst new file mode 100644 index 0000000..3323339 --- /dev/null +++ b/doc/source/autodoc/server/data_classes/index.rst @@ -0,0 +1,48 @@ +data classes +============ + +.. toctree:: + :maxdepth: 1 + + +.. collapse:: RemoteResourceInfoValidator + + .. autoclass:: hololinked.server.data_classes.RemoteResourceInfoValidator + :members: + :show-inheritance: + + +.. collapse:: RemoteResource + + .. autoclass:: hololinked.server.data_classes.RemoteResource + :members: + :show-inheritance: + + +.. collapse:: HTTPResourceInfo + + .. autoclass:: hololinked.server.data_classes.HTTPResourceInfo + :members: + :show-inheritance: + + +.. collapse:: RPCResourceInfo + + .. autoclass:: hololinked.server.data_classes.RPCResourceInfo + :members: + :show-inheritance: + + +.. collapse:: ServerSentEventInfo + + .. autoclass:: hololinked.server.data_classes.ServerSentEventInfo + :members: + :show-inheritance: + + +.. collapse:: GUIResources + + .. autoclass:: hololinked.server.data_classes.GUIResources + :members: + :show-inheritance: + diff --git a/doc/source/autodoc/server/decorators.rst b/doc/source/autodoc/server/decorators.rst deleted file mode 100644 index 0b6d33c..0000000 --- a/doc/source/autodoc/server/decorators.rst +++ /dev/null @@ -1,6 +0,0 @@ -hololinked.server.decorators ------------------------ - -.. automodule:: hololinked.server.decorators - :members: - :show-inheritance: \ No newline at end of file diff --git a/doc/source/autodoc/server/remote_object.rst b/doc/source/autodoc/server/remote_object.rst deleted file mode 100644 index 3c43d80..0000000 --- a/doc/source/autodoc/server/remote_object.rst +++ /dev/null @@ -1,6 +0,0 @@ -hololinked.server.remote_object --------------------------- - -.. automodule:: hololinked.server.remote_object - :members: - :show-inheritance: diff --git a/doc/source/autodoc/server/remote_parameter/index.rst b/doc/source/autodoc/server/remote_parameter/index.rst new file mode 100644 index 0000000..4f05673 --- /dev/null +++ b/doc/source/autodoc/server/remote_parameter/index.rst @@ -0,0 +1,12 @@ +Remote Parameters +================ + +.. toctree:: + :maxdepth: 1 + + +RemoteParameter +--------------- + +.. autoclass:: hololinked.server.remote_parameter.RemoteParameter + :show-inheritance: \ No newline at end of file diff --git a/doc/source/autodoc/server/server.rst b/doc/source/autodoc/server/server.rst deleted file mode 100644 index 785a6b7..0000000 --- a/doc/source/autodoc/server/server.rst +++ /dev/null @@ -1,8 +0,0 @@ -hololinked.server -============ - -.. toctree:: - - remote_object - decorators - data_classes diff --git a/doc/source/autodoc/server/zmq_message_brokers/base_zmq.rst b/doc/source/autodoc/server/zmq_message_brokers/base_zmq.rst new file mode 100644 index 0000000..9ae1f1b --- /dev/null +++ b/doc/source/autodoc/server/zmq_message_brokers/base_zmq.rst @@ -0,0 +1,6 @@ +BaseZMQ +======= + +.. autoclass:: hololinked.server.zmq_message_brokers.BaseZMQ + :members: + :show-inheritance: \ No newline at end of file diff --git a/doc/source/autodoc/server/zmq_message_brokers/index.rst b/doc/source/autodoc/server/zmq_message_brokers/index.rst new file mode 100644 index 0000000..1bdcaaa --- /dev/null +++ b/doc/source/autodoc/server/zmq_message_brokers/index.rst @@ -0,0 +1,10 @@ +ZMQ Message Brokers +=================== + +.. toctree:: + :maxdepth: 1 + + base_zmq + + + diff --git a/doc/source/conf.py b/doc/source/conf.py index ac3a32b..dbfd6ef 100644 --- a/doc/source/conf.py +++ b/doc/source/conf.py @@ -33,7 +33,9 @@ extensions = [ 'sphinx.ext.autodoc', 'sphinx.ext.duration', - 'sphinx_copybutton' + 'sphinx_copybutton', + 'sphinx_toolbox.collapse', + 'numpydoc' ] # Add any paths that contain templates here, relative to this directory. diff --git a/doc/source/note.rst b/doc/source/development_notes.rst similarity index 98% rename from doc/source/note.rst rename to doc/source/development_notes.rst index 7727011..16ffb97 100644 --- a/doc/source/note.rst +++ b/doc/source/development_notes.rst @@ -2,8 +2,8 @@ .. _note: -design note -=========== +development notes +================= In the interest of information to software engineers and web developers, the main difference of the above to a conventional RPC or REST(-like) paradigm in HTTP is that, |module-highlighted| attempts to be a hybrid of both. For instrument control & data-acquisition, it is difficult to move away completely from RPC to REST. Besides, most instrument drivers/hardware diff --git a/doc/source/examples/index.rst b/doc/source/examples/index.rst index dc57c5f..9220c0e 100644 --- a/doc/source/examples/index.rst +++ b/doc/source/examples/index.rst @@ -7,14 +7,16 @@ Remote Objects .. toctree:: server/spectrometer/index -The code is hosted at the repository `hololinked-examples `_. Consider also installing +The code is hosted at the repository `hololinked-examples `_. +Consider also installing * a JSON preview tool for your browser like `Chrome JSON Viewer `_. * `hololinked-portal `_ to have an web-interface to interact with RemoteObjects (after you can run your example object) -* hoppscotch or postman +* `hoppscotch `_ or `postman `_ GUI --- -Some browser based client examples based on ReactJS & `react material UI `_ are hosted at `hololinked.dev `_ +Some browser based client examples based on ReactJS are hosted at +`hololinked.dev `_ diff --git a/doc/source/index.rst b/doc/source/index.rst index b2484ab..7d6462a 100644 --- a/doc/source/index.rst +++ b/doc/source/index.rst @@ -10,10 +10,12 @@ Welcome to |module|'s documentation! ==================================== -|module-highlighted| is (supposed to be) a versatile and pythonic tool for building custom control and data acquisition software systems. If you have a requirement -to capture data from your hardware/instruments remotely in your local network, control them, show the data in a browser/dashboard, provide a Qt-GUI or run automated scripts, |module-highlighted| can help. Even if you wish to do -data-acquisition/control locally in a single computer, one can still separate the concerns of GUI & device or integrate with web-browser for a modern interface. -|module-highlighted| was created & is being designed with the following features in mind: +|module-highlighted| is (supposed to be) a versatile and pythonic tool for building custom control and data acquisition +software systems. If you have a requirement to capture data from your hardware/instrumentation remotely through your +domain network, control them, show the data in a browser/dashboard, provide a Qt-GUI or run automated scripts, +|module-highlighted| can help. Even if you wish to do data-acquisition/control locally in a single computer, one can still +separate the concerns of GUI & device or integrate with web-browser for a modern interface or use modern web development +based tools. |module-highlighted| is being developed with the following features in mind: * being truly pythonic - all code in python & all features of python * easy to understand & setup @@ -21,14 +23,19 @@ data-acquisition/control locally in a single computer, one can still separate th * agnostic to system size & flexibility in topology * 30FPS 1280*1080*3 image streaming over HTTP -In short - to use it in your home/hobby, in a lab or in a big research facility & industry. +In short - to use it in your home/hobby, in a lab or in a research facility & industry. -|module-highlighted| is primarily object oriented & the building block is the ``RemoteObject`` class. Your instrument class (i.e. the python object that controls the hardware) should inherit this class. Each such -class provides remote methods, remote attributes (also called remote parameters) & events which become accessible on the network through HTTP and/or TCP -after implementation by the |module-highlighted| developer. Interprocess communication (ZMQ's IPC) is available for restriction to single-computer applications. -Remote methods can be used to run control and measurement operations on your instruments or arbitrary python logic. -Remote parameters are type-checked object attributes with getter-setter options (identical to python ``property`` with added network access). -Events allow to asynchronously push arbitrary data to clients. Once such a ``RemoteObject`` is instantiated, it can be connected with the server of choice. +|module-highlighted| is primarily object oriented & the building block is the ``RemoteObject`` class. Your instrument +class (i.e. the python object that controls the hardware) should inherit this class. Each such class provides remote +methods, remote attributes (also called remote parameters) & events which become accessible on the network through HTTP +and/or TCP after implementation by the |module-highlighted| developer. Interprocess communication (ZMQ's IPC) is +available for restriction to single-computer applications. Remote methods can be used to run control and measurement +operations on your instruments or arbitrary python logic. Remote parameters are type-checked object attributes with +getter-setter options (identical to python ``property`` with added network access). Events allow to asynchronously push +arbitrary data to clients. Once such a ``RemoteObject`` is instantiated, it can be connected with the server of choice. + +.. warning:: + This project is under development and is an idealogical state. Please use it only for playtesting or exploring. .. note:: web developers & software engineers, consider reading the :ref:`note ` section @@ -41,9 +48,8 @@ Please follow the documentation for examples & tutorials, how-to's and API refer installation examples/index - benchmark/index autodoc/index - note + development_notes Indices and tables @@ -53,7 +59,6 @@ Indices and tables * :ref:`modindex` * :ref:`search` -.. note:: - This project is under development and is an idealogical state. Please use it only for playtesting or exploring. -Documentation last build : |today| \ No newline at end of file + +last build : |today| \ No newline at end of file diff --git a/doc/source/installation.rst b/doc/source/installation.rst index c24c01f..5617875 100644 --- a/doc/source/installation.rst +++ b/doc/source/installation.rst @@ -9,7 +9,7 @@ As |module-highlighted| is still in idealogical & development state, it is recom git clone https://github.com/VigneshVSV/hololinked.git -One could setup a conda environment from the included ``hololinked.yml`` file +Since there is no requirements files yet, one could setup a conda environment from the included ``hololinked.yml`` file .. code:: shell @@ -30,7 +30,7 @@ Also check out: - repository containing example code discussed in this documentation * - hololinked-portal - https://github.com/VigneshVSV/hololinked-portal.git - - GUI to access RemoteObjects & Data Visualization helper + - GUI to access RemoteObjects & Data Visualization helper. diff --git a/doc/source/requirements.txt b/doc/source/requirements.txt index 6f60f02..d4f8e5d 100644 --- a/doc/source/requirements.txt +++ b/doc/source/requirements.txt @@ -6,4 +6,5 @@ sphinxcontrib-htmlhelp==2.0.4 sphinxcontrib-jsmath==1.0.1 sphinxcontrib-qthelp==1.0.6 sphinxcontrib-serializinghtml==1.1.9 -pydata-sphinx-theme==0.14.3 \ No newline at end of file +pydata-sphinx-theme==0.14.3 +numpydoc==1.6.0 \ No newline at end of file From 6322fa88cda43fd56a13cb4927e43b9adac29f3d Mon Sep 17 00:00:00 2001 From: "Vignesh.Vaidyanathan" <62492557+VigneshVSV@users.noreply.github.com> Date: Sat, 10 Feb 2024 22:38:31 +0100 Subject: [PATCH 015/167] added __call__ method for Parameter --- hololinked/param/parameterized.py | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/hololinked/param/parameterized.py b/hololinked/param/parameterized.py index 974a8ed..06d3b04 100644 --- a/hololinked/param/parameterized.py +++ b/hololinked/param/parameterized.py @@ -503,6 +503,10 @@ def deleter(self, func : typing.Callable) -> typing.Callable: self.overloads['fdel'] = func return func + def __call__(self, func: typing.Callable) -> "Parameter": + self.getter(func) + return self + @classmethod def serialize(cls, value : typing.Any) -> typing.Any: "Given the parameter value, return a Python value suitable for serialization" From 33695bfed21c2734b3d4957262d671bb516e88fa Mon Sep 17 00:00:00 2001 From: "Vignesh.Vaidyanathan" <62492557+VigneshVSV@users.noreply.github.com> Date: Sat, 10 Feb 2024 22:43:22 +0100 Subject: [PATCH 016/167] added sphinx toolbox to requirements for doc building --- doc/source/requirements.txt | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/doc/source/requirements.txt b/doc/source/requirements.txt index d4f8e5d..dc00070 100644 --- a/doc/source/requirements.txt +++ b/doc/source/requirements.txt @@ -7,4 +7,5 @@ sphinxcontrib-jsmath==1.0.1 sphinxcontrib-qthelp==1.0.6 sphinxcontrib-serializinghtml==1.1.9 pydata-sphinx-theme==0.14.3 -numpydoc==1.6.0 \ No newline at end of file +numpydoc==1.6.0 +sphinx-toolbox=3.5.0 \ No newline at end of file From 19abd383086cd0f0b07ba1887b19aa8c5a5856c3 Mon Sep 17 00:00:00 2001 From: "Vignesh.Vaidyanathan" <62492557+VigneshVSV@users.noreply.github.com> Date: Sat, 10 Feb 2024 22:45:55 +0100 Subject: [PATCH 017/167] changed equalto to double equal to --- doc/source/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/doc/source/requirements.txt b/doc/source/requirements.txt index dc00070..360f6a3 100644 --- a/doc/source/requirements.txt +++ b/doc/source/requirements.txt @@ -8,4 +8,4 @@ sphinxcontrib-qthelp==1.0.6 sphinxcontrib-serializinghtml==1.1.9 pydata-sphinx-theme==0.14.3 numpydoc==1.6.0 -sphinx-toolbox=3.5.0 \ No newline at end of file +sphinx-toolbox==3.5.0 \ No newline at end of file From 7fd0b2eac1bc50b128929fedf54148d3d65bbd31 Mon Sep 17 00:00:00 2001 From: "Vignesh.Vaidyanathan" <62492557+VigneshVSV@users.noreply.github.com> Date: Sat, 10 Feb 2024 23:06:30 +0100 Subject: [PATCH 018/167] appended local package directory to conf through relative paths --- doc/source/conf.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/doc/source/conf.py b/doc/source/conf.py index dbfd6ef..14fe1c2 100644 --- a/doc/source/conf.py +++ b/doc/source/conf.py @@ -10,9 +10,9 @@ # add these directories to sys.path here. If the directory is relative to the # documentation root, use os.path.abspath to make it absolute, like shown here. # -# import os -# import sys -# sys.path.insert(0, os.path.abspath('.')) +import os +import sys +sys.path.insert(0, os.path.abspath(f'..{os.sep}..')) # -- Project information ----------------------------------------------------- From 8118f405f8bb62cb52826ee588eb14391f381755 Mon Sep 17 00:00:00 2001 From: "Vignesh.Vaidyanathan" <62492557+VigneshVSV@users.noreply.github.com> Date: Sat, 10 Feb 2024 23:07:12 +0100 Subject: [PATCH 019/167] appended local package directory to conf through relative paths --- doc/source/conf.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/doc/source/conf.py b/doc/source/conf.py index 14fe1c2..19ee27b 100644 --- a/doc/source/conf.py +++ b/doc/source/conf.py @@ -12,7 +12,7 @@ # import os import sys -sys.path.insert(0, os.path.abspath(f'..{os.sep}..')) +sys.path.insert(0, os.path.abspath(f'..{os.sep}..{os.sep}..')) # -- Project information ----------------------------------------------------- From 566b19d1f100b61720bd0a6efeede0e21dad6e8b Mon Sep 17 00:00:00 2001 From: "Vignesh.Vaidyanathan" <62492557+VigneshVSV@users.noreply.github.com> Date: Sat, 10 Feb 2024 23:09:05 +0100 Subject: [PATCH 020/167] appended local package directory to conf through relative paths --- doc/source/conf.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/doc/source/conf.py b/doc/source/conf.py index 19ee27b..14fe1c2 100644 --- a/doc/source/conf.py +++ b/doc/source/conf.py @@ -12,7 +12,7 @@ # import os import sys -sys.path.insert(0, os.path.abspath(f'..{os.sep}..{os.sep}..')) +sys.path.insert(0, os.path.abspath(f'..{os.sep}..')) # -- Project information ----------------------------------------------------- From 4832c72e9e4819fe7b7313eff97a5b4ffd7d5d97 Mon Sep 17 00:00:00 2001 From: "Vignesh.Vaidyanathan" <62492557+VigneshVSV@users.noreply.github.com> Date: Sat, 10 Feb 2024 23:14:57 +0100 Subject: [PATCH 021/167] generated requirements.txt along with updated requirements for sphinx --- requirements.txt | 12 ++++++++++++ 1 file changed, 12 insertions(+) create mode 100644 requirements.txt diff --git a/requirements.txt b/requirements.txt new file mode 100644 index 0000000..af2d9ee --- /dev/null +++ b/requirements.txt @@ -0,0 +1,12 @@ +argon2==0.1.10 +ConfigParser==6.0.0 +ifaddr==0.2.0 +ipython==8.21.0 +numpy==1.26.4 +pandas==2.2.0 +pyzmq==25.1.0 +serpent==1.41 +setuptools==68.0.0 +SQLAlchemy==2.0.21 +SQLAlchemy_Utils==0.41.1 +tornado==6.3.3 \ No newline at end of file From 846d5b2df1ed20f90d6afe9a1cb42e5bf98257a0 Mon Sep 17 00:00:00 2001 From: "Vignesh.Vaidyanathan" <62492557+VigneshVSV@users.noreply.github.com> Date: Sat, 10 Feb 2024 23:16:39 +0100 Subject: [PATCH 022/167] updated requirements to doc (missing from last commit) --- doc/source/requirements.txt | 14 +++++++++++++- 1 file changed, 13 insertions(+), 1 deletion(-) diff --git a/doc/source/requirements.txt b/doc/source/requirements.txt index 360f6a3..60eb455 100644 --- a/doc/source/requirements.txt +++ b/doc/source/requirements.txt @@ -8,4 +8,16 @@ sphinxcontrib-qthelp==1.0.6 sphinxcontrib-serializinghtml==1.1.9 pydata-sphinx-theme==0.14.3 numpydoc==1.6.0 -sphinx-toolbox==3.5.0 \ No newline at end of file +sphinx-toolbox==3.5.0 +argon2==0.1.10 +ConfigParser==6.0.0 +ifaddr==0.2.0 +ipython==8.21.0 +numpy==1.26.4 +pandas==2.2.0 +pyzmq==25.1.0 +serpent==1.41 +setuptools==68.0.0 +SQLAlchemy==2.0.21 +SQLAlchemy_Utils==0.41.1 +tornado==6.3.3 \ No newline at end of file From c3099703823e58db0bc0227d856216323ef4f90c Mon Sep 17 00:00:00 2001 From: "Vignesh.Vaidyanathan" <62492557+VigneshVSV@users.noreply.github.com> Date: Sat, 10 Feb 2024 23:38:16 +0100 Subject: [PATCH 023/167] refactored imports and removed many sphinx warnings --- doc/source/autodoc/server/data_classes/index.rst | 12 ++++++------ doc/source/autodoc/server/remote_parameter/index.rst | 2 +- doc/source/conf.py | 4 +++- doc/source/examples/server/spectrometer/index.rst | 1 + 4 files changed, 11 insertions(+), 8 deletions(-) diff --git a/doc/source/autodoc/server/data_classes/index.rst b/doc/source/autodoc/server/data_classes/index.rst index 3323339..e7cbd30 100644 --- a/doc/source/autodoc/server/data_classes/index.rst +++ b/doc/source/autodoc/server/data_classes/index.rst @@ -19,23 +19,23 @@ data classes :show-inheritance: -.. collapse:: HTTPResourceInfo +.. collapse:: HTTPResource - .. autoclass:: hololinked.server.data_classes.HTTPResourceInfo + .. autoclass:: hololinked.server.data_classes.HTTPResource :members: :show-inheritance: -.. collapse:: RPCResourceInfo +.. collapse:: RPCResource - .. autoclass:: hololinked.server.data_classes.RPCResourceInfo + .. autoclass:: hololinked.server.data_classes.RPCResource :members: :show-inheritance: -.. collapse:: ServerSentEventInfo +.. collapse:: ServerSentEvent - .. autoclass:: hololinked.server.data_classes.ServerSentEventInfo + .. autoclass:: hololinked.server.data_classes.ServerSentEvent :members: :show-inheritance: diff --git a/doc/source/autodoc/server/remote_parameter/index.rst b/doc/source/autodoc/server/remote_parameter/index.rst index 4f05673..780d239 100644 --- a/doc/source/autodoc/server/remote_parameter/index.rst +++ b/doc/source/autodoc/server/remote_parameter/index.rst @@ -1,5 +1,5 @@ Remote Parameters -================ +================= .. toctree:: :maxdepth: 1 diff --git a/doc/source/conf.py b/doc/source/conf.py index 14fe1c2..08cf640 100644 --- a/doc/source/conf.py +++ b/doc/source/conf.py @@ -62,4 +62,6 @@ # Add any paths that contain custom static files (such as style sheets) here, # relative to this directory. They are copied after the builtin static files, # so a file named "default.css" will overwrite the builtin "default.css". -html_static_path = ['_static'] \ No newline at end of file +html_static_path = ['_static'] + +numpydoc_show_class_members = False \ No newline at end of file diff --git a/doc/source/examples/server/spectrometer/index.rst b/doc/source/examples/server/spectrometer/index.rst index 3871373..2533cd6 100644 --- a/doc/source/examples/server/spectrometer/index.rst +++ b/doc/source/examples/server/spectrometer/index.rst @@ -103,6 +103,7 @@ with the prefix of the HTTP Server domain name and object instance name. log_level=logging.DEBUG, ) O.run() + To construct the full `URL_path`, the format is |br| `https://{domain name}/{instance name}/{parameter URL path}`, which gives |br| `https://localhost:8083/spectrometer/ocean-optics/USB2000-plus/serial-number` |br| for the `serial_number`. From 0e3ccb4025e9d64c854aa370289b70d4330671e4 Mon Sep 17 00:00:00 2001 From: "Vignesh.Vaidyanathan" <62492557+VigneshVSV@users.noreply.github.com> Date: Sun, 11 Feb 2024 20:22:53 +0100 Subject: [PATCH 024/167] threaded RPC server which seems to be working - untidy v1 --- hololinked/server/zmq_message_brokers.py | 593 +++++++++++++++++------ 1 file changed, 436 insertions(+), 157 deletions(-) diff --git a/hololinked/server/zmq_message_brokers.py b/hololinked/server/zmq_message_brokers.py index ac3f62b..b8907c7 100644 --- a/hololinked/server/zmq_message_brokers.py +++ b/hololinked/server/zmq_message_brokers.py @@ -20,9 +20,11 @@ HANDSHAKE = b'HANDSHAKE' +INVALID_MESSAGE = b'INVALID_MESSAGE' +TIMEOUT = b'TIMEOUT' + INSTRUCTION = b'INSTRUCTION' REPLY = b'REPLY' -INVALID_MESSAGE = b'INVALID_MESSAGE' EVENT = b'EVENT' EVENT_SUBSCRIPTION = b'EVENT_SUBSCRIPTION' SUCCESS = b'SUCCESS' @@ -30,8 +32,10 @@ EMPTY_BYTE = b'' EMPTY_DICT = {} + HTTP_SERVER = b'HTTP_SERVER' -PROXY = b'PROXY' +PROXY = b'PROXY' +TUNNELER = b'TUNNEL' @@ -45,11 +49,8 @@ class ServerTypes(Enum): class BaseZMQ: """ - Base class for all ZMQ message brokers. `hololinked` uses ZMQ under the hood to implement a - RPC server. All requests, either coming through a HTTP Server or an RPC client are routed via the RPC - Server to queue them before execution. See documentation of `RPCServer` for details. - - This class implements `create_socket()` method & logger which is common to all server and client implementations. + Base class for all ZMQ message brokers. Implements `create_socket()` method & logger which is common to + all server and client implementations. """ def __init__(self) -> None: # only type definition for logger @@ -62,14 +63,12 @@ def exit(self) -> None: """ raise NotImplementedError("implement exit() to gracefully exit ZMQ in {}.".format(self.__class__)) - def __del__(self) -> None: - self.exit() - def create_socket(self, context : Union[zmq.asyncio.Context, zmq.Context], instance_name : str, identity : str, bind : bool = False, protocol : ZMQ_PROTOCOLS = ZMQ_PROTOCOLS.IPC, socket_type : zmq.SocketType = zmq.ROUTER, - socket_address : Union[str, None] = None) -> None: + **kwargs) -> None: """ - Create a socket with certain specifications + Create a socket with certain specifications. When successful, a logger is also created. Supported ZeroMQ protocols + are TCP, IPC & INPROC. For IPC sockets, a file is created under TEMP_DIR of global configuration. Parameters ---------- @@ -94,6 +93,13 @@ def create_socket(self, context : Union[zmq.asyncio.Context, zmq.Context], insta Returns ------- None + + Raises + ------ + NotImplementedError + if protocol other than TCP, IPC or INPROC is used + RuntimeError + if protocol is TCP, a connection from client side is desired but a socket address is not supplied """ self.context = context self.identity = identity @@ -105,7 +111,7 @@ def create_socket(self, context : Union[zmq.asyncio.Context, zmq.Context], insta directory = global_config.TEMP_DIR + socket_dir if not os.path.exists(directory): os.makedirs(directory) - # re-compute for IPC + # re-compute for IPC because it looks for a file in a directory socket_address = "ipc://{}{}{}.ipc".format(directory, os.sep, split_instance_name[-1]) if bind: self.socket.bind(socket_address) @@ -121,8 +127,8 @@ def create_socket(self, context : Union[zmq.asyncio.Context, zmq.Context], insta except zmq.error.ZMQError as ex: if not ex.strerror.startswith('Address in use'): raise ex from None - elif socket_address: - self.socket.connect(socket_address) + elif kwargs.get('socket_address', None): + self.socket.connect(kwargs["socket_address"]) else: raise RuntimeError(f"Socket must be either bound or connected. No operation is being carried out for this socket {identity}") elif protocol == ZMQ_PROTOCOLS.INPROC or protocol == "INPROC": @@ -134,46 +140,78 @@ def create_socket(self, context : Union[zmq.asyncio.Context, zmq.Context], insta self.socket.connect(socket_address) else: raise NotImplementedError("protocols other than IPC, TCP & INPROC are not implemented now for {}".format(self.__class__)) - self.logger = self.get_logger(self.identity, socket_address, class_ = self.__class__.__name__) # type: ignore + self.logger = self.get_logger(self.identity, socket_type.name, protocol.name if isinstance(protocol, Enum) else protocol) self.logger.info("created socket with address {} and {}".format(socket_address, "bound" if bind else "connected")) @classmethod - def get_logger(cls, identity : str, socket_address : str, level = logging.DEBUG, class_ = 'BaseZMQ') -> logging.Logger: - if socket_address.endswith('ipc'): - socket_address = socket_address.split('\\')[-1] - socket_address.strip('.ipc') - class_ = class_.split('.')[-1] - name = '{}|{}|{}'.format(class_, identity, socket_address) - return create_default_logger(name, level) + def get_logger(cls, identity : str, socket_type : str, protocol : str, level = logging.DEBUG) -> logging.Logger: + """ + creates a logger with name {class name} | {socket type} | {protocol} | {identity}, + default logging level is ``logging.INFO`` + """ + return create_default_logger('{}|{}|{}|{}'.format(cls.__name__, socket_type, protocol, identity) , level) + + def __del__(self) -> None: + self.exit() class BaseAsyncZMQ(BaseZMQ): """ - Creates, binds/connects am async router socket with either TCP or ICP protocol. A context is create if none is supplied. - For IPC sockets, a file is created under TEMP_DIR of global configuration. + Base class for all async ZMQ servers and clients. """ def create_socket(self, instance_name : str, context : Union[zmq.asyncio.Context, None], *, identity : str, bind : bool = False, - protocol : str = "IPC", socket_type : zmq.SocketType = zmq.ROUTER, socket_address : Union[str, None] = None) -> None: + protocol : str = "IPC", socket_type : zmq.SocketType = zmq.ROUTER, **kwargs) -> None: + """ + Overloads ``create_socket()`` to create, bind/connect an async socket. A async context is create if none is supplied. + """ + if context and not isinstance(context, zmq.asyncio.Context): + raise TypeError("async ZMQ message broker accepts only async ZMQ context. supplied type {}".format(type(context))) context = context or zmq.asyncio.Context() - super().create_socket(context, instance_name, identity, bind, protocol, socket_type, socket_address) + super().create_socket(context, instance_name, identity, bind, protocol, socket_type, **kwargs) class BaseSyncZMQ(BaseZMQ): + """ + Base class for all sync ZMQ servers and clients. + """ def create_socket(self, instance_name : str, context : Union[zmq.Context, None], *, identity : str, bind : bool = False, - protocol : str = "IPC", socket_type : zmq.SocketType = zmq.ROUTER, socket_address : Union[str, None] = None) -> None: + protocol : str = "IPC", socket_type : zmq.SocketType = zmq.ROUTER, **kwargs) -> None: + """ + Overloads ``create_socket()`` to create, bind/connect an synchronous socket. A (synchronous) context is create if none is supplied. + """ + if context and not isinstance(context, zmq.Context): + raise TypeError("sync ZMQ message broker accepts only sync ZMQ context. supplied type {}".format(type(context))) context = context or zmq.Context() - super().create_socket(context, instance_name, identity, bind, protocol, socket_type, socket_address) + super().create_socket(context, instance_name, identity, bind, protocol, socket_type, **kwargs) class BaseZMQServer(BaseZMQ): """ - This class implements serializer instantiation and message handling for ZMQ servers and can be subclassed by all - server instances irrespective of sync or async. The messaging contract does not depend on sync or async implementation. - For HTTP clients, json_serializer is necessary and for other types of clients, any of the allowed serializer is possible. - """ + Implements serializer instantiation and message handling for ZMQ servers and can be subclassed by all + server instances irrespective of sync or async. For HTTP clients, json_serializer is necessary and for other types + of clients, any of the allowed serializer is possible. The messaging contract does not depend on sync or async + implementation. + + The message contract is as follows: + + client's message to server: |br| + [address, bytes(), client type, message type, msg id, instruction, arguments, execution_context] |br| + [ 0 , 1 , 2 , 3 , 4 , 5 , 6 , 7 ] |br| + + server's message to client: + + Parameters + ---------- + server_type: Enum + metadata about the nature of the server - currently not important. + json_serializer: hololinked.server.serializers.JSONSerializer + serializer used to send message to HTTP Server + rpc_serializer: any of hololinked.server.serializers.serializer, default serpent + serializer used to send message to RPC clients + """ def __init__(self, server_type : Enum, json_serializer : Union[None, JSONSerializer] = None, rpc_serializer : Union[str, BaseSerializer, None] = None) -> None: if json_serializer is None or isinstance(json_serializer, JSONSerializer): @@ -189,46 +227,92 @@ def __init__(self, server_type : Enum, json_serializer : Union[None, JSONSeriali self.server_type : Enum = server_type super().__init__() - def parse_client_message(self, message : List[bytes]) -> Any: + + def parse_client_message(self, message : List[bytes], socket : zmq.Socket, deserialize = True) -> Any: """ - client's message to server looks as follows: - [address, bytes(), client type, message type, msg id, instruction, arguments, execution_context] - [ 0 , 1 , 2 , 3 , 4 , 5 , 6 , 7 ] + deserializes important parts of the client's message, namely instruction, arguments, execution context + based on the client type. For handshake, automatically handles handshake. In case of exceptions while + deserializing, automatically sends and invalid message to client informing the nature of exception with the + exception metadata. + + client's message to server looks as follows: |br| + [address, bytes(), client type, message type, msg id, instruction, arguments, execution_context] |br| + [ 0 , 1 , 2 , 3 , 4 , 5 , 6 , 7 ] |br| """ try: message_type = message[3] if message_type == INSTRUCTION: - client_type = message[2] - if client_type == PROXY: - message[5] = self.rpc_serializer.loads(message[5]) # type: ignore - message[6] = self.rpc_serializer.loads(message[6]) # type: ignore - message[7] = self.rpc_serializer.loads(message[7]) # type: ignore - elif client_type == HTTP_SERVER: - message[5] = self.json_serializer.loads(message[5]) # type: ignore - message[6] = self.json_serializer.loads(message[6]) # type: ignore - message[7] = self.json_serializer.loads(message[7]) # type: ignore + if deserialize: + client_type = message[2] + if client_type == PROXY: + message[5] = self.rpc_serializer.loads(message[5]) # type: ignore + message[6] = self.rpc_serializer.loads(message[6]) # type: ignore + message[7] = self.rpc_serializer.loads(message[7]) # type: ignore + elif client_type == HTTP_SERVER: + message[5] = self.json_serializer.loads(message[5]) # type: ignore + message[6] = self.json_serializer.loads(message[6]) # type: ignore + message[7] = self.json_serializer.loads(message[7]) # type: ignore return message elif message_type == HANDSHAKE: - self.handshake(message[0]) + self.handshake(message[0], socket) except Exception as E: - self.handle_invalid_message(message, E) - - def craft_reply_from_arguments(self, address : bytes, message_type : bytes, message_id : bytes = b'', - data : Any = None) -> List[bytes]: + self.handle_invalid_message(message, E, socket) + + def craft_reply_from_arguments(self, address : bytes, client_type: bytes, message_type : bytes, message_id : bytes = b'', + data : Any = None) -> List[bytes]: + """ + call this method to craft an arbitrary reply or message to the client using the method arguments. + + Parameters + ---------- + address: bytes + the ROUTER address of the client + message_type: bytes + type of the message, possible values are b'REPLY', b'HANDSHAKE' and b'TIMEOUT' + message_id: bytes + message id of the original client message for which the reply is being crafted + data: Any + serializable data + + Returns + ------- + message: List[bytes] + the crafted reply with information in the correct positions + """ + if client_type == HTTP_SERVER: + data = self.json_serializer.dumps(data) + elif client_type == PROXY: + data = self.rpc_serializer.dumps(data) + return [ address, bytes(), self.server_type.value, message_type, message_id, - self.json_serializer.dumps(data) + data ] def craft_reply_from_client_message(self, original_client_message : List[bytes], data : Any = None, **overrides) -> List[bytes]: """ - client's message to server looks as follows: - [address, bytes(), client type, message type, msg id, instruction, arguments, execution_context] - [ 0 , 1 , 2 , 3 , 4 , 5 , 6 , 7 ] + craft a reply with certain data automatically from an originating client message. The client's address, type required + for serialization requirements, message id etc. are automatically created from the original message. + + client's message to server looks as follows: |br| + [address, bytes(), client type, message type, msg id, instruction, arguments, execution_context] |br| + [ 0 , 1 , 2 , 3 , 4 , 5 , 6 , 7 ] |br| + + Parameters + ---------- + original_client_message: List[bytes] + The message originated by the clieht for which the reply is being crafted + data: Any + serializable data + + Returns + ------- + message: List[bytes] + """ client_type = original_client_message[2] if client_type == HTTP_SERVER: @@ -253,33 +337,83 @@ def craft_reply_from_client_message(self, original_client_message : List[bytes], reply[3] = value return reply - def handshake(self, address : bytes) -> None: - run_coro_somehow(self._handshake(address)) + def handshake(self, address : bytes, socket : zmq.Socket) -> None: + """ + pass a handshake message to client. Absolutely mandatory to ensure initial messages do not get lost + because of ZMQ's very tiny but significant initial delay after creating socket. + + Parameters + ---------- + address: bytes + the address of the client to send the handshake + + Returns + ------- + None + """ + run_coro_somehow(self._handshake(address, socket)) + + def handle_invalid_message(self, original_client_message : List[bytes], exception : Exception, socket : zmq.Socket) -> None: + """ + pass an invalid message to the client when an exception occurred while parsing the message from the client + (``parse_client_message()``) + + Parameters + ---------- + original_client_message: List[bytes] + the client message parsing which the exception occurred + exception: Exception + exception object raised + + Returns + ------- + None + """ + run_coro_somehow(self._handle_invalid_message(original_client_message, exception, socket)) - def handle_invalid_message(self, message : List[bytes], exception : Exception) -> None: - run_coro_somehow(self._handle_invalid_message(message, exception)) + def handle_timeout(self, original_client_message : typing.Any, socket : zmq.Socket) -> None: + """ + pass timeout message to the client when the instruction could not be executed within specified timeout - async def _handshake(self, address : bytes) -> None: - raise NotImplementedError("handshake cannot be completed - implement _handshake in {} to complete handshake.".format(self.__class__)) + Parameters + ---------- + original_client_message: List[bytes] + the client message which could not executed within the specified timeout. timeout value is + generally specified within the execution context values. + + Returns + ------- + None + """ + run_coro_somehow(self._handle_timeout(original_client_message[0], original_client_message[4], socket)) + + async def _handshake(self, address : bytes, socket : zmq.Socket) -> None: + """ + Inner method that handles handshake. scheduled by ``handshake()``, signature same as ``handshake()``. + """ + await socket.send_multipart(self.craft_reply_from_arguments(address, b'PROXY', HANDSHAKE)) + self.logger.info("sent handshake to client '{}'".format(address)) + + async def _handle_timeout(self, address: bytes, message_id : bytes, socket : zmq.Socket) -> None: + """ + Inner method that handles timeout. scheduled by ``handle_timeout()``, signature same as ``handle_timeout``. + """ + await socket.send_multipart(self.craft_reply_from_arguments(address, b'PROXY', TIMEOUT, message_id)) - async def _handle_invalid_message(self, message : List[bytes], exception : Exception) -> None: - raise NotImplementedError( - wrap_text("invalid message cannot be handled - implement _handle_invalid_message in {} to handle invalid messages.".format( - self.__class__))) + async def _handle_invalid_message(self, original_client_message : List[bytes], exception : Exception, socket : zmq.Socket) -> None: + """ + Inner method that handles invalid messages. scheduled by ``handle_invalid_message()``, + signature same as ``handle_invalid_message()``. + """ + await socket.send_multipart(self.craft_reply_from_client_message(original_client_message, exception, + message_type=INVALID_MESSAGE)) + self.logger.info("sent exception message to client '{}' : '{}'".format(original_client_message[0], str(exception))) class AsyncZMQServer(BaseZMQServer, BaseAsyncZMQ): """ - ZMQ Server to be used by remote objects, this server will handle handshakes, event subscription notifications, - & instructions. - - message from client to server : - - [address, bytes(), client type, message type, msg id, instruction, arguments] - [ 0 , 1 , 2 , 3 , 4 , 5 , 6 ] - - Handshake - [client address, bytes, client_type, HANDSHAKE] + Implements blocking (non-polled) async receive instructions and send replies. """ def __init__(self, instance_name : str, server_type : Enum, context : Union[zmq.asyncio.Context, None] = None, @@ -288,32 +422,41 @@ def __init__(self, instance_name : str, server_type : Enum, context : Union[zmq. rpc_serializer=kwargs.get('rpc_serializer', None)) BaseAsyncZMQ.__init__(self) self.instance_name = instance_name - self.create_socket(instance_name, context, identity=instance_name, bind=True, protocol=protocol, socket_type=socket_type, - socket_address=kwargs.get("socket_address", None)) + self.create_socket(instance_name, context, identity=instance_name, bind=True, protocol=protocol, + socket_type=socket_type, socket_address=kwargs.get("socket_address", None)) self._terminate_context = context == None # terminate if it was created by instance - async def _handshake(self, address : bytes) -> None: - await self.socket.send_multipart(self.craft_reply_from_arguments(address, HANDSHAKE)) - self.logger.info("sent handshake to client '{}'".format(address)) - - async def _handle_invalid_message(self, original_client_message : List[bytes], exception : Exception) -> None: - await self.socket.send_multipart(self.craft_reply_from_client_message(original_client_message, exception, - message_type=INVALID_MESSAGE)) - self.logger.info("sent exception message to client '{}' : '{}'".format(original_client_message[0], str(exception))) - async def async_recv_instruction(self) -> Any: + """ + Receive one instruction in a blocking form. Async for multi-server paradigm, each server should schedule + this method in the event loop explicitly. This is taken care by the ``Eventloop`` & ``RPCServer``. + + Returns + ------- + instruction: List[bytes | Any] + received instruction with important content (instruction, arguments, execution context) deserialized. + """ while True: - instruction = self.parse_client_message(await self.socket.recv_multipart()) + instruction = self.parse_client_message(await self.socket.recv_multipart(), self.socket) if instruction: self.logger.debug("received instruction from client '{}' with msg-ID '{}'".format(instruction[0], instruction[4])) return instruction - async def async_recv_instructions(self, strip_delimiter = False) -> List[Any]: + async def async_recv_instructions(self) -> List[Any]: + """ + Receive all currently available instructions in blocking form. Async for multi-server paradigm, each server should schedule + this method in the event loop explicitly. This is taken care by the ``Eventloop`` & ``RPCServer``. + + Returns + ------- + instructions: List[List[bytes | Any]] + list of received instructions with important content (instruction, arguments, execution context) deserialized. + """ instructions = [await self.async_recv_instruction()] while True: try: - instruction = self.parse_client_message(await self.socket.recv_multipart(zmq.NOBLOCK)) + instruction = self.parse_client_message(await self.socket.recv_multipart(zmq.NOBLOCK), self.socket) if instruction: self.logger.debug("received instruction from client '{}' with msg-ID '{}'".format(instruction[0], instruction[4])) @@ -323,6 +466,20 @@ async def async_recv_instructions(self, strip_delimiter = False) -> List[Any]: return instructions async def async_send_reply(self, original_client_message : List[bytes], data : Any) -> None: + """ + Send reply for an instruction. + + Parameter + --------- + original_client_message: List[bytes] + original message so that the reply can be properly crafted and routed + data: Any + serializable data to be sent as reply + + Returns + ------- + None + """ reply = self.craft_reply_from_client_message(original_client_message, data) await self.socket.send_multipart(reply) self.logger.debug("sent reply to client '{}' with msg-ID '{}'".format(reply[0], reply[4])) @@ -331,6 +488,9 @@ async def async_send_event_subscription(self, consumer : str) -> None: await self.socket.send_multipart([consumer, bytes(), EVENT_SUBSCRIPTION]) def exit(self) -> None: + """ + closes socket and context, warns if any error occurs. + """ try: self.socket.close(0) self.logger.info("terminated socket of server '{}' of type '{}'".format(self.identity, self.__class__)) @@ -349,14 +509,36 @@ def exit(self) -> None: class AsyncPollingZMQServer(AsyncZMQServer): """ - Identical to AsyncZMQServer, except that it can be stopped from server side. - This is achieved by polling the socket instead of waiting indefinitely on the socket. + Identical to AsyncZMQServer, except that instructions are received in non-blocking/polling form. + This server can be stopped from server side by calling ``stop_polling()`` unlike ``AsyncZMQServer`` which + cannot be stopped manually unless an instruction arrives. + + Parameters + ---------- + instance_name: str + ``instance_name`` of the RemoteObject which the server serves + server_type: str + server type metadata - currently not useful/important + context: Optional, zmq.asyncio.Context + ZeroMQ Context object to use. All sockets share this context. Automatically created when None is supplied. + protocol: Enum, default ZMQ_PROTOCOLS.IPC + Use TCP for network access, IPC for multi-process applications, and INPROC for multi-threaded applications. + poll_timeout: int, default 25 + time in milliseconds to poll the sockets specified under ``procotols``. Useful for calling ``stop_polling()`` + where the max delay to stop polling will be ``poll_timeout`` + server_type: Enum + metadata about the nature of the server - currently not important. + json_serializer: hololinked.server.serializers.JSONSerializer + serializer used to send message to HTTP Server + rpc_serializer: any of hololinked.server.serializers.serializer, default serpent + serializer used to send message to RPC clients """ def __init__(self, instance_name : str, *, server_type : Enum, context : Union[zmq.asyncio.Context, None] = None, socket_type : zmq.SocketType = zmq.ROUTER, protocol : ZMQ_PROTOCOLS = ZMQ_PROTOCOLS.IPC, poll_timeout = 25, **kwargs) -> None: - super().__init__(instance_name, server_type, context, socket_type, protocol=protocol, **kwargs) + super().__init__(instance_name=instance_name, server_type=server_type, context=context, protocol=protocol, + socket_type=socket_type, **kwargs) self.poller = zmq.asyncio.Poller() self._instructions = [] self.poller.register(self.socket, zmq.POLLIN) @@ -364,6 +546,9 @@ def __init__(self, instance_name : str, *, server_type : Enum, context : Union[z @property def poll_timeout(self) -> int: + """ + socket polling timeout in milliseconds greater than 0. + """ return self._poll_timeout @poll_timeout.setter @@ -373,14 +558,23 @@ def poll_timeout(self, value) -> None: self._poll_timeout = value async def poll_instructions(self) -> List[List[bytes]]: + """ + poll for instructions with specified timeout (``poll_timeout``) and return if any instructions are available. + This method blocks, so make sure other methods are scheduled which can stop polling. + + Returns + ------- + instructions: List[List[bytes]] + list of received instructions with important content (instruction, arguments, execution context) deserialized. + """ self.stop_poll = False instructions = [] while not self.stop_poll: - sockets = await self.poller.poll(self.poll_timeout) # type hints dont work in this line + sockets = await self.poller.poll(self._poll_timeout) # type hints dont work in this line for socket, _ in sockets: while True: try: - instruction = self.parse_client_message(await socket.recv_multipart(zmq.NOBLOCK)) + instruction = self.parse_client_message(await socket.recv_multipart(zmq.NOBLOCK), socket) except zmq.Again: break else: @@ -388,12 +582,20 @@ async def poll_instructions(self) -> List[List[bytes]]: self.logger.debug("received instruction from client '{}' with msg-ID '{}'".format(instruction[0], instruction[4])) instructions.append(instruction) + if len(instructions) > 0: + break return instructions def stop_polling(self) -> None: + """ + stop polling and unblock ``poll_instructions()`` method + """ self.stop_poll = True def exit(self) -> None: + """ + unregister socket from poller and terminate socket and context. + """ self.poller.unregister(self.socket) return super().exit() @@ -448,11 +650,11 @@ async def poll(self, strip_delimiter : bool = False) -> List[Any]: self.stop_poll = False instructions = [] while not self.stop_poll: - sockets = await self.poller.poll(self.poll_timeout) + sockets = await self.poller.poll(self._poll_timeout) for socket, _ in sockets: while True: try: - instruction = self.parse_client_message(await socket.recv_multipart(zmq.NOBLOCK)) + instruction = self.parse_client_message(await socket.recv_multipart(zmq.NOBLOCK), socket) except zmq.Again: break else: @@ -485,89 +687,164 @@ def __contains__(self, name : str) -> bool: -class RPCServer: +class RPCServer(BaseZMQServer): + """ + Top level ZMQ RPC server used by ``RemoteObject`` and ``Eventloop``. + + Parameters + ---------- + instance_name: str + ``instance_name`` of the RemoteObject which the server serves + server_type: str + server type metadata - currently not useful/important + context: Optional, zmq.asyncio.Context + ZeroMQ Context object to use. All sockets share this context. Automatically created when None is supplied. + protocols: List[str, Enum], default [ZMQ_PROTOCOLS.TCP, ZMQ_PROTOCOLS.IPC, ZMQ_PROTOCOLS.INPROC] + all ZeroMQ sockets where instructions can be passed to the RPC server. Use TCP for network access, + IPC for multi-process applications, and INPROC for multi-threaded applications. Use all for complete access. + poll_timeout: int, default 25 + time in milliseconds to poll the sockets specified under ``procotols``. Useful for calling ``stop_polling()`` + where the max delay to stop polling will be ``poll_timeout`` + """ def __init__(self, instance_name : str, *, server_type : Enum, context : Union[zmq.asyncio.Context, None] = None, - protocols : typing.List[ZMQ_PROTOCOLS] = ZMQ_PROTOCOLS.IPC, poll_timeout = 25, **kwargs) -> None: - context = zmq.asyncio.Context() + protocols : typing.List[ZMQ_PROTOCOLS] = [ZMQ_PROTOCOLS.TCP, ZMQ_PROTOCOLS.IPC, ZMQ_PROTOCOLS.INPROC], + poll_timeout = 25, **kwargs) -> None: + super().__init__(server_type, kwargs.get('json_serializer', None), kwargs.get('rpc_serializer', None)) + kwargs["json_serializer"] = self.json_serializer + kwargs["rpc_serializer"] = self.rpc_serializer + self.context = zmq.asyncio.Context() + self.poller = zmq.asyncio.Poller() if ZMQ_PROTOCOLS.TCP in protocols: - self.tcp_server = AsyncPollingZMQServer(instance_name=instance_name, context=context, - protocol=ZMQ_PROTOCOLS.TCP, **kwargs) + self.tcp_server = AsyncPollingZMQServer(instance_name=instance_name, server_type=server_type, context=context, + protocol=ZMQ_PROTOCOLS.TCP, poll_timeout=poll_timeout, **kwargs) + self.poller.register(self.tcp_server.socket) if ZMQ_PROTOCOLS.IPC in protocols: - self.ipc_server = AsyncPollingZMQServer(instance_name=instance_name, context=context, - protocol=ZMQ_PROTOCOLS.IPC, **kwargs) + self.ipc_server = AsyncPollingZMQServer(instance_name=instance_name, server_type=server_type, context=context, + protocol=ZMQ_PROTOCOLS.IPC, poll_timeout=poll_timeout, **kwargs) + self.poller.register(self.ipc_server.socket) if ZMQ_PROTOCOLS.INPROC in protocols: - self.inproc_server = AsyncPollingZMQServer(instance_name=instance_name, context=context, - protocol=ZMQ_PROTOCOLS.INPROC, **kwargs) - self.inproc_client = AsyncZMQClient(server_instance_name=instance_name, identity='', client_type='', - context=context, protocol=ZMQ_PROTOCOLS.INPROC) - self._instructions = deque() # type: typing.Iterable[typing.Tuple[typing.Any, asyncio.Event, asyncio.Future]] - self._replies = deque() + self.inproc_server = AsyncPollingZMQServer(instance_name=instance_name, server_type=server_type, context=context, + protocol=ZMQ_PROTOCOLS.INPROC, poll_timeout=poll_timeout, **kwargs) + self.poller.register(self.inproc_server.socket) self.poll_timeout = poll_timeout - self.poller = zmq.Poller() - self.poller.register(self.ipc_server) - self.poller.register(self.tcp_server) - self.poller.register(self.inproc_server) + self.inproc_client = AsyncZMQClient(server_instance_name=f'{instance_name}/real', identity=f'{instance_name}/tunneler', + client_type=TUNNELER, context=context, protocol=ZMQ_PROTOCOLS.INPROC) + self._instructions = deque() # type: typing.Iterable[typing.Tuple[typing.Any, asyncio.Event, asyncio.Future, zmq.Socket]] + self._instructions_event = asyncio.Event() self._socket_to_server_map = { self.tcp_server.socket : self.tcp_server, self.ipc_server.socket : self.ipc_server, self.inproc_server.socket : self.inproc_server } + self.identity = f"{instance_name}/rpc-server" + self.logger = self.get_logger(instance_name, 'RPC', 'MIXED') + + async def handshake_complete(self): + await self.inproc_client.handshake_complete() def prepare(self): """ registers socket polling method and message tunnelling methods to the running asyncio event loop """ - eventloop = asyncio.get_running_loop() - eventloop.call_soon(self.poll) - eventloop.call_soon(self.tunnel_message_to_remote_objects) - + eventloop = asyncio.get_event_loop() + eventloop.call_soon(lambda : asyncio.create_task(self.poll())) + eventloop.call_soon(lambda : asyncio.create_task(self.tunnel_message_to_remote_objects())) + + @property + def poll_timeout(self) -> int: + return self._poll_timeout + @poll_timeout.setter + def poll_timeout(self, value) -> None: + if not isinstance(value, int) or value < 0: + raise ValueError("polling period must be an integer greater than 0, not {}. Value is considered in milliseconds.".format(value)) + self._poll_timeout = value + async def poll(self): + """ + poll for instructions and append them to instructions list to pass them to eventloop inproc server using an + inner inproc client. Register the messages for timeout calculation. + """ self.stop_poll = False + eventloop = asyncio.get_event_loop() while not self.stop_poll: - sockets = await self.poller.poll(self.poll_timeout) + sockets = await self.poller.poll(self._poll_timeout) for socket, _ in sockets: while True: try: - instruction = self.parse_client_message(await socket.recv_multipart(zmq.NOBLOCK)) + original_instruction = await socket.recv_multipart(zmq.NOBLOCK) + parsed_instruction = self.parse_client_message(original_instruction, socket, deserialize=False) except zmq.Again: break else: - timeout = instruction[7].get("timeout", None) - ready_to_process_event = asyncio.Event() - self._instructions.append((instruction, ready_to_process_event, - asyncio.create_task(self.process_timeouts(ready_to_process_event, timeout)), - socket - )) + if parsed_instruction: + timeout = 3 # parsed_instruction[7].get("timeout", None) + ready_to_process_event = None + timeout_task = None + if timeout is not None: + ready_to_process_event = asyncio.Event() + timeout_task = asyncio.create_task(self.process_timeouts(ready_to_process_event, socket, original_instruction, timeout)) + eventloop.call_soon(lambda : timeout_task) + self._instructions.append((original_instruction, ready_to_process_event, timeout_task, socket)) + print("instruction in RPC", original_instruction) + self._instructions_event.set() - async def tunnel_message_to_remote_objects(self, origin_socket : zmq.Context.socket): + async def tunnel_message_to_remote_objects(self): """ client's message to server looks as follows: [address, bytes(), client type, message type, msg id, instruction, arguments, execution_context] [ 0 , 1 , 2 , 3 , 4 , 5 , 6 , 7 ] """ while not self.stop_poll: - message, timeout_event, timeout_future = self._instructions.popleft() - await timeout_event.set() - if not timeout_future.done(): - await asyncio.wait(timeout_future) - if timeout_future.result(): - await self.inproc_client.socket.send_multipart(message) - reply = await self.inproc_client.async_recv_reply() - await origin_socket.send_multipart(reply) + if len(self._instructions) > 0: + message, ready_to_process_event, timeout_task, origin_socket = self._instructions.popleft() + timeout = True + if ready_to_process_event is not None: + ready_to_process_event.set() + timeout = await timeout_task + print("timeout result - ", timeout) + if ready_to_process_event is None or not timeout: + original_address = message[0] + message[0] = self.inproc_client.server_address # replace address + print("original address", original_address, "inproc address", message[0]) + await self.inproc_client.socket.send_multipart(message) + print("*********sent message to inproc") + reply = await self.inproc_client.socket.recv_multipart() + print("--------received message from inproc") + reply[0] = original_address + await origin_socket.send_multipart(reply) + print("###### sent message to client") + else: + await self._instructions_event.wait() + self._instructions_event.clear() async def process_timeouts(self, ready_to_process_event : asyncio.Event, origin_socket : zmq.Socket, original_message : typing.List, timeout : typing.Optional[float] = None) -> bool: try: await asyncio.wait_for(ready_to_process_event.wait(), timeout) - return True + return False except TimeoutError: - await self._socket_to_server_map[origin_socket].async_send_reply(original_message, 'TIMEOUT') - return False + self.handle_timeout(original_message, origin_socket) + return True + + def exit(self): + self.stop_poll = True + sockets = list(self.poller._map.keys()) + for i in range(len(sockets)): # iterating over keys will cause dictionary size change during iteration + self.poller.unregister(sockets[i]) + try: + self.inproc_client.exit() + self.inproc_server.exit() + self.ipc_server.exit() + self.tcp_server.exit() + except: + pass + self.context.term() + self.logger.info("terminated context of socket '{}' of type '{}'".format(self.identity, self.__class__)) @@ -586,7 +863,7 @@ class BaseZMQClient(BaseZMQ): def __init__(self, server_address : Union[bytes, None], server_instance_name : Union[str, None], client_type : bytes, **kwargs) -> None: - if client_type in [PROXY, HTTP_SERVER]: + if client_type in [PROXY, HTTP_SERVER, TUNNELER]: self.client_type = client_type else: raise ValueError("invalid client type for {}. Given option {}".format(self.__class__, client_type)) @@ -598,7 +875,7 @@ def __init__(self, server_address : Union[bytes, None], server_instance_name : U self.json_serializer = json_serializer or JSONSerializer() else: raise ValueError("invalid JSON serializer option for {}. Given option {}".format(self.__class__, json_serializer)) - else: + elif client_type == PROXY: rpc_serializer = kwargs.get("rpc_serializer", None) if rpc_serializer is None or isinstance(rpc_serializer, (PickleSerializer, SerpentSerializer, JSONSerializer)):#, DillSerializer)): @@ -775,10 +1052,10 @@ class AsyncZMQClient(BaseZMQClient, BaseAsyncZMQ): def __init__(self, server_instance_name : str, identity : str, client_type = HTTP_SERVER, handshake : bool = True, protocol : str = "IPC", context : Union[zmq.asyncio.Context, None] = None, **serializer) -> None: - BaseZMQClient.__init__(self, server_address = bytes(server_instance_name, encoding='utf-8'), - server_instance_name = server_instance_name, client_type = client_type, **serializer) + BaseZMQClient.__init__(self, server_address=bytes(server_instance_name, encoding='utf-8'), + server_instance_name=server_instance_name, client_type=client_type, **serializer) BaseAsyncZMQ.__init__(self) - self.create_socket(context, server_instance_name, identity) + self.create_socket(instance_name=server_instance_name, context=context, identity=identity, protocol=protocol) self._terminate_context = context == None self.handshake_event = asyncio.Event() if handshake: @@ -954,20 +1231,21 @@ async def poll(self) -> None: """ errors in handle_message should reach the client. """ - if reply: - address, _, server_type, message_type, message_id, response = reply - self.logger.debug("received reply from server '{}' with message ID {}".format(address, message_id)) - if message_id in self.cancelled_messages: - self.cancelled_messages.remove(message_id) - self.logger.debug(f'message_id {message_id} cancelled') - continue - try: - event = self.message_to_event_map[message_id] # type: ignore - except KeyError: - event_loop.call_soon(lambda: asyncio.create_task(self.resolve_reply(message_id, response))) - else: - self.shared_message_map[message_id] = response - event.set() + else: + if reply: + address, _, server_type, message_type, message_id, response = reply + self.logger.debug("received reply from server '{}' with message ID {}".format(address, message_id)) + if message_id in self.cancelled_messages: + self.cancelled_messages.remove(message_id) + self.logger.debug(f'message_id {message_id} cancelled') + continue + try: + event = self.message_to_event_map[message_id] # type: ignore + except KeyError: + event_loop.call_soon(lambda: asyncio.create_task(self.resolve_reply(message_id, response))) + else: + self.shared_message_map[message_id] = response + event.set() async def resolve_reply(self, message_id, return_value): max_number_of_retries = 100 @@ -1239,5 +1517,6 @@ def exit(self): -__all__ = ['ServerTypes', 'AsyncZMQServer', 'AsyncPollingZMQServer', 'ZMQServerPool', 'SyncZMQClient', - 'AsyncZMQClient', 'AsyncZMQClientPool', 'MessageMappedZMQClientPool', 'Event', 'CriticalEvent'] \ No newline at end of file +__all__ = ['ServerTypes', 'AsyncZMQServer', 'AsyncPollingZMQServer', 'ZMQServerPool', 'RPCServer', + 'SyncZMQClient', 'AsyncZMQClient', 'AsyncZMQClientPool', 'MessageMappedZMQClientPool', + 'Event', 'CriticalEvent'] \ No newline at end of file From 7e8e9691565fa51edd9849ff5a1383e20f9e5c4f Mon Sep 17 00:00:00 2001 From: "Vignesh.Vaidyanathan" <62492557+VigneshVSV@users.noreply.github.com> Date: Sun, 11 Feb 2024 20:23:09 +0100 Subject: [PATCH 025/167] minor updates --- .../_static/architecture.drawio.dark.svg | 4 ++ .../_static/architecture.drawio.light.svg | 4 ++ doc/source/autodoc/index.rst | 4 ++ .../server/zmq_message_brokers/base_zmq.rst | 13 +++++ .../zmq_message_brokers/base_zmq_server.rst | 16 ++++++ .../server/zmq_message_brokers/index.rst | 21 ++++++++ .../server/zmq_message_brokers/rpc_server.rst | 13 +++++ hololinked/server/HTTPServer.py | 3 +- hololinked/server/data_classes.py | 2 +- hololinked/server/eventloop.py | 2 +- hololinked/server/host_utilities.py | 2 +- hololinked/server/remote_object.py | 6 +-- hololinked/server/remote_parameters.py | 50 +++++++++---------- 13 files changed, 107 insertions(+), 33 deletions(-) create mode 100644 doc/source/_static/architecture.drawio.dark.svg create mode 100644 doc/source/_static/architecture.drawio.light.svg create mode 100644 doc/source/autodoc/server/zmq_message_brokers/base_zmq_server.rst create mode 100644 doc/source/autodoc/server/zmq_message_brokers/rpc_server.rst diff --git a/doc/source/_static/architecture.drawio.dark.svg b/doc/source/_static/architecture.drawio.dark.svg new file mode 100644 index 0000000..f198eb8 --- /dev/null +++ b/doc/source/_static/architecture.drawio.dark.svg @@ -0,0 +1,4 @@ + + + +

HTTP Clients
(Web browsers)

HTTP Clients...

Local Desktop

Clients & Scripts

(RPC Clients)

Local Desktop...

Clients & Scripts 

on the network

(RPC Clients)

Clients & Scripts...
HTTP Server
HTTP Server
INPROC Server
INPROC Server
IPC Server
IPC Server

TCP Server

TCP Server
RPC Server
RPC Server
INPROC Server
Eventloop and remote object executor
INPROC Server...
Common INPROC Client
Common INPR...
Recommended Communication
Recommende...
Not Recommended Communication
Not Recommended Com...
Developer should deicde
Developer should...
\ No newline at end of file diff --git a/doc/source/_static/architecture.drawio.light.svg b/doc/source/_static/architecture.drawio.light.svg new file mode 100644 index 0000000..4b0b2ed --- /dev/null +++ b/doc/source/_static/architecture.drawio.light.svg @@ -0,0 +1,4 @@ + + + +

HTTP Clients
(Web browsers)

HTTP Clients...

Local Desktop

Clients & Scripts

(RPC Clients)

Local Desktop...

Clients & Scripts 

on the network

(RPC Clients)

Clients & Scripts...
HTTP Server
HTTP Server
INPROC Server
INPROC Server
IPC Server
IPC Server

TCP Server

TCP Server
RPC Server
RPC Server
INPROC Server
Eventloop and remote object executor
INPROC Server...
Common INPROC Client
Common INPR...
Recommended Communication
Recommende...
Not Recommended Communication
Not Recommended Com...
Developer should deicde
Developer should...
\ No newline at end of file diff --git a/doc/source/autodoc/index.rst b/doc/source/autodoc/index.rst index bf315f2..9c829c7 100644 --- a/doc/source/autodoc/index.rst +++ b/doc/source/autodoc/index.rst @@ -1,5 +1,9 @@ .. _apiref: +.. |br| raw:: html + +
+ API Reference ============= diff --git a/doc/source/autodoc/server/zmq_message_brokers/base_zmq.rst b/doc/source/autodoc/server/zmq_message_brokers/base_zmq.rst index 9ae1f1b..70121e4 100644 --- a/doc/source/autodoc/server/zmq_message_brokers/base_zmq.rst +++ b/doc/source/autodoc/server/zmq_message_brokers/base_zmq.rst @@ -1,6 +1,19 @@ +.. |br| raw:: html + +
+ + BaseZMQ ======= .. autoclass:: hololinked.server.zmq_message_brokers.BaseZMQ :members: + :show-inheritance: + +.. autoclass:: hololinked.server.zmq_message_brokers.BaseSyncZMQ + :members: + :show-inheritance: + +.. autoclass:: hololinked.server.zmq_message_brokers.BaseAsyncZMQ + :members: :show-inheritance: \ No newline at end of file diff --git a/doc/source/autodoc/server/zmq_message_brokers/base_zmq_server.rst b/doc/source/autodoc/server/zmq_message_brokers/base_zmq_server.rst new file mode 100644 index 0000000..43527a4 --- /dev/null +++ b/doc/source/autodoc/server/zmq_message_brokers/base_zmq_server.rst @@ -0,0 +1,16 @@ +.. |br| raw:: html + +
+ + +ZMQ Servers +=========== + + +.. autoclass:: hololinked.server.zmq_message_brokers.BaseZMQServer + :members: + :show-inheritance: + +.. autoclass:: hololinked.server.zmq_message_brokers.AsyncZMQServer + :members: + :show-inheritance: diff --git a/doc/source/autodoc/server/zmq_message_brokers/index.rst b/doc/source/autodoc/server/zmq_message_brokers/index.rst index 1bdcaaa..6539580 100644 --- a/doc/source/autodoc/server/zmq_message_brokers/index.rst +++ b/doc/source/autodoc/server/zmq_message_brokers/index.rst @@ -1,10 +1,31 @@ ZMQ Message Brokers =================== +``hololinked`` uses ZMQ under the hood to implement a RPC server. All requests, either coming through a HTTP +Server (from a HTTP client/web browser) or an RPC client are routed via the RPC Server to queue them before execution. + +Since a RPC client is available in ``hololinked`` (or will be made available), it is suggested to use the HTTP +server for web development practices (like REST-similar endpoints) and not for RPC purposes. The following picture +summarizes how messages are routed to the ``RemoteObject``. + +.. image:: ../../../_static/architecture.drawio.light.svg + :class: only-light + +.. image:: ../../../_static/architecture.drawio.dark.svg + :class: only-dark + + +The message brokers are divided to client and server types. Servers recieve a message before replying & clients +initiate message requests. + +See documentation of ``RPCServer`` for details. + .. toctree:: :maxdepth: 1 base_zmq + base_zmq_server + rpc_server diff --git a/doc/source/autodoc/server/zmq_message_brokers/rpc_server.rst b/doc/source/autodoc/server/zmq_message_brokers/rpc_server.rst new file mode 100644 index 0000000..23fda29 --- /dev/null +++ b/doc/source/autodoc/server/zmq_message_brokers/rpc_server.rst @@ -0,0 +1,13 @@ +.. |br| raw:: html + +
+ + +RPC Server +========== + + +.. autoclass:: hololinked.server.zmq_message_brokers.RPCServer + :members: + :show-inheritance: + diff --git a/hololinked/server/HTTPServer.py b/hololinked/server/HTTPServer.py index 64cc1d4..78b00b9 100644 --- a/hololinked/server/HTTPServer.py +++ b/hololinked/server/HTTPServer.py @@ -17,7 +17,8 @@ from .utils import create_default_logger -from .decorators import get, put, post, delete, remote_method +from .decorators import remote_method +from .http_methods import get, put, post, delete from .serializers import JSONSerializer from .constants import GET, PUT, POST, OPTIONS, DELETE, USE_OBJECT_NAME, CALLABLE from .webserver_utils import log_request, update_resources diff --git a/hololinked/server/data_classes.py b/hololinked/server/data_classes.py index b598877..7f0f912 100644 --- a/hololinked/server/data_classes.py +++ b/hololinked/server/data_classes.py @@ -253,7 +253,7 @@ def json(self): @dataclass -class ServerSentEventInfo: +class ServerSentEvent: """ event name and socket address of events to be consumed by clients. This class is generally not for consumption by the package-end-user. diff --git a/hololinked/server/eventloop.py b/hololinked/server/eventloop.py index 21bd5f3..7809aaa 100644 --- a/hololinked/server/eventloop.py +++ b/hololinked/server/eventloop.py @@ -12,7 +12,7 @@ from .constants import * from .remote_parameters import TypedDict from .exceptions import * -from .decorators import post, get +from .http_methods import post, get from .remote_object import * from .zmq_message_brokers import AsyncPollingZMQServer, ZMQServerPool, ServerTypes, AsyncZMQClient from .remote_parameter import RemoteParameter diff --git a/hololinked/server/host_utilities.py b/hololinked/server/host_utilities.py index 5a6c177..0044b92 100644 --- a/hololinked/server/host_utilities.py +++ b/hololinked/server/host_utilities.py @@ -26,7 +26,7 @@ from .zmq_message_brokers import MessageMappedZMQClientPool from .webserver_utils import get_IP_from_interface, update_resources_using_client from .utils import unique_id -from .decorators import post, get, put, delete +from .http_methods import post, get, put, delete from .eventloop import Consumer, EventLoop, fork_empty_eventloop from .remote_object import RemoteObject, RemoteObjectDB, RemoteObjectMetaclass from .database import BaseAsyncDB, create_DB_URL diff --git a/hololinked/server/remote_object.py b/hololinked/server/remote_object.py index 4527d7c..b5382d2 100644 --- a/hololinked/server/remote_object.py +++ b/hololinked/server/remote_object.py @@ -25,7 +25,7 @@ from .decorators import remote_method from .http_methods import get, post from .data_classes import (GUIResources, RemoteResource, HTTPResource, RPCResource, RemoteResourceInfoValidator, - ServerSentEventInfo) + ServerSentEvent) from .api_platform_utils import postman_item, postman_itemgroup from .database import BaseAsyncDB, BaseSyncDB from .utils import create_default_logger, get_signature, wrap_text @@ -517,7 +517,7 @@ def _prepare_resources(self): resource._unique_event_name = bytes(f"{self._full_URL_path_prefix}{resource.URL_path}", encoding='utf-8') resource.publisher = self._event_publisher httpserver_resources[GET]['{}{}'.format( - self._full_URL_path_prefix, resource.URL_path)] = ServerSentEventInfo( + self._full_URL_path_prefix, resource.URL_path)] = ServerSentEvent( # event URL_path has '/' prefix what=EVENT, event_name=resource.name, @@ -865,7 +865,7 @@ def set_maxlen(self, value): self._critical_logs = deque(maxlen=value) self._execution_logs = deque(maxlen=value) - maxlen = RemoteInteger(default=100, bounds=(1, None), crop_to_bounds=True, URL_path='/maxlen', + maxlen = Integer(default=100, bounds=(1, None), crop_to_bounds=True, URL_path='/maxlen', fget=get_maxlen, fset=set_maxlen ) diff --git a/hololinked/server/remote_parameters.py b/hololinked/server/remote_parameters.py index 3bc316c..f7112a7 100644 --- a/hololinked/server/remote_parameters.py +++ b/hololinked/server/remote_parameters.py @@ -38,7 +38,7 @@ def __init__(self, default="0.0.0.0", allow_None=False, **kwargs): def __init__(self, default : typing.Optional[str] = "", *, regex : typing.Optional[str] = None, doc : typing.Optional[str] = None, constant : bool = False, readonly : bool = False, allow_None : bool = False, - URL_path : str = USE_OBJECT_NAME, http_method : typing.Tuple[str, str] = (GET, PUT), + URL_path : str = USE_OBJECT_NAME, http_method : typing.Tuple[str, str] = (GET, PUT), remote : bool = True, state : typing.Optional[typing.Union[typing.List, typing.Tuple, str, Enum]] = None, db_persist : bool = False, db_init : bool = False, db_commit : bool = False, per_instance_descriptor : bool = False, deepcopy_default : bool = False, class_member : bool = False, @@ -140,7 +140,7 @@ class IPAddress(RemoteParameter): def __init__(self, default : typing.Optional[str] = "0.0.0.0", *, allow_ipv4 : bool = True, allow_ipv6 : bool = True, allow_localhost : bool = True, doc : typing.Optional[str] = None, constant : bool = False, readonly : bool = False, - URL_path : str = USE_OBJECT_NAME, http_method : typing.Tuple[str, str] = (GET, PUT), + URL_path : str = USE_OBJECT_NAME, http_method : typing.Tuple[str, str] = (GET, PUT), remote : bool = True, state : typing.Optional[typing.Union[typing.List, typing.Tuple, str, Enum]] = None, db_persist : bool = False, db_init : bool = False, db_commit : bool = False, allow_None : bool = False, per_instance_descriptor : bool = False, deepcopy_default : bool = False, @@ -369,7 +369,7 @@ class Number(RemoteParameter): def __init__(self, default : typing.Optional[typing.Union[float, int]] = 0.0, *, bounds : typing.Optional[typing.Tuple] = None, crop_to_bounds : bool = False, inclusive_bounds : typing.Tuple = (True,True), step : typing.Any = None, doc : typing.Optional[str] = None, constant : bool = False, readonly : bool = False, allow_None : bool = False, - URL_path : str = USE_OBJECT_NAME, http_method : typing.Tuple[str, str] = (GET, PUT), + URL_path : str = USE_OBJECT_NAME, http_method : typing.Tuple[str, str] = (GET, PUT), remote : bool = True, state : typing.Optional[typing.Union[typing.List, typing.Tuple, str, Enum]] = None, db_persist : bool = False, db_init : bool = False, db_commit : bool = False, per_instance_descriptor : bool = False, deepcopy_default : bool = False, @@ -504,7 +504,7 @@ class Integer(Number): def __init__(self, default : typing.Optional[int] = 0, *, bounds : typing.Optional[typing.Tuple] = None, crop_to_bounds : bool = False, inclusive_bounds : typing.Tuple = (True,True), step : typing.Any = None, doc : typing.Optional[str] = None, constant : bool = False, readonly : bool = False, allow_None : bool = False, - URL_path : str = USE_OBJECT_NAME, http_method : typing.Tuple[str, str] = (GET, PUT), + URL_path : str = USE_OBJECT_NAME, http_method : typing.Tuple[str, str] = (GET, PUT), remote : bool = True, state : typing.Optional[typing.Union[typing.List, typing.Tuple, str, Enum]] = None, db_persist : bool = False, db_init : bool = False, db_commit : bool = False, @@ -530,7 +530,7 @@ class Boolean(RemoteParameter): def __init__(self, default : typing.Optional[bool] = False, *, doc : typing.Optional[str] = None, constant : bool = False, readonly : bool = False, allow_None : bool = False, - URL_path : str = USE_OBJECT_NAME, http_method : typing.Tuple[str, str] = (GET, PUT), + URL_path : str = USE_OBJECT_NAME, http_method : typing.Tuple[str, str] = (GET, PUT), remote : bool = True, state : typing.Optional[typing.Union[typing.List, typing.Tuple, str, Enum]] = None, db_persist : bool = False, db_init : bool = False, db_commit : bool = False, @@ -558,7 +558,7 @@ class Iterable(RemoteParameter): def __init__(self, default : typing.Any, *, bounds : typing.Optional[typing.Tuple[int, int]] = None, length : typing.Optional[int] = None, item_type : typing.Optional[typing.Tuple] = None, deepcopy_default : bool = False, allow_None : bool = False, doc : typing.Optional[str] = None, constant : bool = False, readonly : bool = False, - URL_path : str = USE_OBJECT_NAME, http_method : typing.Tuple[str, str] = (GET, PUT), + URL_path : str = USE_OBJECT_NAME, http_method : typing.Tuple[str, str] = (GET, PUT), remote : bool = True, state : typing.Optional[typing.Union[typing.List, typing.Tuple, str, Enum]] = None, db_persist : bool = False, db_init : bool = False, db_commit : bool = False, @@ -626,7 +626,7 @@ def __init__(self, default : typing.Any, *, bounds : typing.Optional[typing.Tupl length: typing.Optional[int] = None, item_type : typing.Optional[typing.Tuple] = None, accept_list : bool = False, deepcopy_default : bool = False, allow_None : bool = False, doc : typing.Optional[str] = None, constant : bool = False, readonly : bool = False, - URL_path : str = USE_OBJECT_NAME, http_method : typing.Tuple[str, str] = (GET, PUT), + URL_path : str = USE_OBJECT_NAME, http_method : typing.Tuple[str, str] = (GET, PUT), remote : bool = True, state : typing.Optional[typing.Union[typing.List, typing.Tuple, str, Enum]] = None, db_persist : bool = False, db_init : bool = False, db_commit : bool = False, @@ -681,7 +681,7 @@ def __init__(self, default: typing.Any, *, bounds : typing.Optional[typing.Tuple length : typing.Optional[int] = None, item_type : typing.Optional[typing.Tuple] = None, accept_tuple : bool = False, deepcopy_default : bool = False, allow_None : bool = False, doc : typing.Optional[str] = None, constant : bool = False, readonly : bool = False, - URL_path : str = USE_OBJECT_NAME, http_method : typing.Tuple[str, str] = (GET, PUT), + URL_path : str = USE_OBJECT_NAME, http_method : typing.Tuple[str, str] = (GET, PUT), remote : bool = True, state : typing.Optional[typing.Union[typing.List, typing.Tuple, str, Enum]] = None, db_persist : bool = False, db_init : bool = False, db_commit : bool = False, @@ -742,7 +742,7 @@ class Composite(RemoteParameter): def __init__(self, attribs : typing.List[typing.Union[str, Parameter]], *, doc : typing.Optional[str] = None, constant : bool = False, readonly : bool = False, allow_None : bool = False, - URL_path : str = USE_OBJECT_NAME, http_method : typing.Tuple[str, str] = (GET, PUT), + URL_path : str = USE_OBJECT_NAME, http_method : typing.Tuple[str, str] = (GET, PUT), remote : bool = True, state : typing.Optional[typing.Union[typing.List, typing.Tuple, str, Enum]] = None, db_persist : bool = False, db_init : bool = False, db_commit : bool = False, @@ -828,7 +828,7 @@ class Selector(SelectorBase): # existing objects, therefore instantiate is False by default. def __init__(self, *, objects : typing.List[typing.Any], default : typing.Any, empty_default : bool = False, doc : typing.Optional[str] = None, constant : bool = False, readonly : bool = False, allow_None : bool = False, - URL_path : str = USE_OBJECT_NAME, http_method : typing.Tuple[str, str] = (GET, PUT), + URL_path : str = USE_OBJECT_NAME, http_method : typing.Tuple[str, str] = (GET, PUT), remote : bool = True, state : typing.Optional[typing.Union[typing.List, typing.Tuple, str, Enum]] = None, db_persist : bool = False, db_init : bool = False, db_commit : bool = False, @@ -891,7 +891,7 @@ class ClassSelector(SelectorBase): def __init__(self, *, class_ , default : typing.Any, isinstance : bool = True, deepcopy_default : bool = False, doc : typing.Optional[str] = None, constant : bool = False, readonly : bool = False, allow_None : bool = False, - URL_path : str = USE_OBJECT_NAME, http_method : typing.Tuple[str, str] = (GET, PUT), + URL_path : str = USE_OBJECT_NAME, http_method : typing.Tuple[str, str] = (GET, PUT), remote : bool = True, state : typing.Optional[typing.Union[typing.List, typing.Tuple, str, Enum]] = None, db_persist : bool = False, db_init : bool = False, db_commit : bool = False, @@ -960,7 +960,7 @@ class TupleSelector(Selector): def __init__(self, *, objects : typing.List, default : typing.Any, accept_list : bool = True, doc : typing.Optional[str] = None, constant : bool = False, readonly : bool = False, allow_None : bool = False, - URL_path : str = USE_OBJECT_NAME, http_method : typing.Tuple[str, str] = (GET, PUT), + URL_path : str = USE_OBJECT_NAME, http_method : typing.Tuple[str, str] = (GET, PUT), remote : bool = True, state : typing.Optional[typing.Union[typing.List, typing.Tuple, str, Enum]] = None, db_persist : bool = False, db_init : bool = False, db_commit : bool = False, @@ -1024,7 +1024,7 @@ class Path(RemoteParameter): def __init__(self, default : typing.Any = '', *, search_paths : typing.Optional[str] = None, doc : typing.Optional[str] = None, constant : bool = False, readonly : bool = False, allow_None : bool = False, - URL_path : str = USE_OBJECT_NAME, http_method : typing.Tuple[str, str] = (GET, PUT), + URL_path : str = USE_OBJECT_NAME, http_method : typing.Tuple[str, str] = (GET, PUT), remote : bool = True, state : typing.Optional[typing.Union[typing.List, typing.Tuple, str, Enum]] = None, db_persist : bool = False, db_init : bool = False, db_commit : bool = False, @@ -1133,7 +1133,7 @@ class FileSelector(Selector): def __init__(self, default : typing.Any, *, objects : typing.List, path : str = "", doc : typing.Optional[str] = None, constant : bool = False, readonly : bool = False, allow_None : bool = False, - URL_path : str = USE_OBJECT_NAME, http_method : typing.Tuple[str, str] = (GET, PUT), + URL_path : str = USE_OBJECT_NAME, http_method : typing.Tuple[str, str] = (GET, PUT), remote : bool = True, state : typing.Optional[typing.Union[typing.List, typing.Tuple, str, Enum]] = None, db_persist : bool = False, db_init : bool = False, db_commit : bool = False, @@ -1173,7 +1173,7 @@ class MultiFileSelector(FileSelector): def __init__(self, default : typing.Any, *, path : str = "", doc : typing.Optional[str] = None, constant : bool = False, readonly : bool = False, allow_None : bool = False, - URL_path : str = USE_OBJECT_NAME, http_method : typing.Tuple[str, str] = (GET, PUT), + URL_path : str = USE_OBJECT_NAME, http_method : typing.Tuple[str, str] = (GET, PUT), remote : bool = True, state : typing.Optional[typing.Union[typing.List, typing.Tuple, str, Enum]] = None, db_persist : bool = False, db_init : bool = False, db_commit : bool = False, @@ -1203,7 +1203,7 @@ class Date(Number): def __init__(self, default, *, bounds : typing.Union[typing.Tuple, None] = None, crop_to_bounds : bool = False, inclusive_bounds : typing.Tuple = (True,True), step : typing.Any = None, doc : typing.Optional[str] = None, constant : bool = False, readonly : bool = False, allow_None : bool = False, - URL_path : str = USE_OBJECT_NAME, http_method : typing.Tuple[str, str] = (GET, PUT), + URL_path : str = USE_OBJECT_NAME, http_method : typing.Tuple[str, str] = (GET, PUT), remote : bool = True, state : typing.Optional[typing.Union[typing.List, typing.Tuple, str, Enum]] = None, db_persist : bool = False, db_init : bool = False, db_commit : bool = False, @@ -1247,7 +1247,7 @@ class CalendarDate(Number): def __init__(self, default, *, bounds : typing.Union[typing.Tuple, None] = None, crop_to_bounds : bool = False, inclusive_bounds : typing.Tuple = (True,True), step : typing.Any = None, doc : typing.Optional[str] = None, constant : bool = False, readonly : bool = False, allow_None : bool = False, - URL_path : str = USE_OBJECT_NAME, http_method : typing.Tuple[str, str] = (GET, PUT), + URL_path : str = USE_OBJECT_NAME, http_method : typing.Tuple[str, str] = (GET, PUT), remote : bool = True, state : typing.Optional[typing.Union[typing.List, typing.Tuple, str, Enum]] = None, db_persist : bool = False, db_init : bool = False, db_commit : bool = False, @@ -1327,7 +1327,7 @@ class CSS3Color(RemoteParameter): __slots__ = ['allow_named'] def __init__(self, default, *, allow_named : bool = True, doc : typing.Optional[str] = None, constant : bool = False, - URL_path : str = USE_OBJECT_NAME, http_method : typing.Tuple[str, str] = (GET, PUT), + URL_path : str = USE_OBJECT_NAME, http_method : typing.Tuple[str, str] = (GET, PUT), remote : bool = True, state : typing.Optional[typing.Union[typing.List, typing.Tuple, str, Enum]] = None, db_persist : bool = False, db_init : bool = False, db_commit : bool = False, @@ -1367,12 +1367,11 @@ class Range(Tuple): def __init__(self, default : typing.Optional[typing.Tuple] = None, *, bounds: typing.Optional[typing.Tuple[int, int]] = None, length : typing.Optional[int] = None, item_type : typing.Optional[typing.Tuple] = None, - softbounds=None, inclusive_bounds=(True,True), step=None, + softbounds=None, inclusive_bounds=(True,True), step=None, doc : typing.Optional[str] = None, constant : bool = False, - URL_path : str = USE_OBJECT_NAME, http_method : typing.Tuple[str, str] = (GET, PUT), + URL_path : str = USE_OBJECT_NAME, http_method : typing.Tuple[str, str] = (GET, PUT), remote : bool = True, state : typing.Optional[typing.Union[typing.List, typing.Tuple, str, Enum]] = None, db_persist : bool = False, db_init : bool = False, db_commit : bool = False, - readonly : bool = False, allow_None : bool = False, label : typing.Optional[str] = None, per_instance_descriptor : bool = False, deepcopy_default : bool = False, class_member : bool = False, fget : typing.Optional[typing.Callable] = None, fset : typing.Optional[typing.Callable] = None, @@ -1519,10 +1518,9 @@ class TypedList(ClassSelector): def __init__(self, default : typing.Optional[typing.List[typing.Any]] = None, *, item_type : typing.Any = None, deepcopy_default : bool = True, allow_None : bool = True, bounds : tuple = (0,None), doc : typing.Optional[str] = None, constant : bool = False, readonly : bool = False, - URL_path : str = USE_OBJECT_NAME, http_method : typing.Tuple[str, str] = (GET, PUT), + URL_path : str = USE_OBJECT_NAME, http_method : typing.Tuple[str, str] = (GET, PUT), remote : bool = True, state : typing.Optional[typing.Union[typing.List, typing.Tuple, str, Enum]] = None, - db_persist : bool = False, db_init : bool = False, db_commit : bool = False, - + db_persist : bool = False, db_init : bool = False, db_commit : bool = False, per_instance_descriptor : bool = False, class_member : bool = False, fget : typing.Optional[typing.Callable] = None, fset : typing.Optional[typing.Callable] = None, fdel : typing.Optional[typing.Callable] = None, precedence : typing.Optional[float] = None) -> None: @@ -1563,7 +1561,7 @@ class TypedDict(ClassSelector): def __init__(self, default : typing.Optional[typing.Dict] = None, *, key_type : typing.Any = None, item_type : typing.Any = None, deepcopy_default : bool = True, allow_None : bool = True, bounds : tuple = (0, None), doc : typing.Optional[str] = None, constant : bool = False, readonly : bool = False, - URL_path : str = USE_OBJECT_NAME, http_method : typing.Tuple[str, str] = (GET, PUT), + URL_path : str = USE_OBJECT_NAME, http_method : typing.Tuple[str, str] = (GET, PUT), remote : bool = True, state : typing.Optional[typing.Union[typing.List, typing.Tuple, str, Enum]] = None, db_persist : bool = False, db_init : bool = False, db_commit : bool = False, @@ -1607,7 +1605,7 @@ def __init__(self, default : typing.Optional[typing.Dict[typing.Any, typing.Any] allow_unspecified_keys : bool = True, bounds : tuple = (0, None), deepcopy_default : bool = True, allow_None : bool = True, doc : typing.Optional[str] = None, constant : bool = False, readonly : bool = False, - URL_path : str = USE_OBJECT_NAME, http_method : typing.Tuple[str, str] = (GET, PUT), + URL_path : str = USE_OBJECT_NAME, http_method : typing.Tuple[str, str] = (GET, PUT), remote : bool = True, state : typing.Optional[typing.Union[typing.List, typing.Tuple, str, Enum]] = None, db_persist : bool = False, db_init : bool = False, db_commit : bool = False, From 45572c0522d6fe1d4880032801be6fb299825e7b Mon Sep 17 00:00:00 2001 From: "Vignesh.Vaidyanathan" <62492557+VigneshVSV@users.noreply.github.com> Date: Sat, 17 Feb 2024 19:11:04 +0100 Subject: [PATCH 026/167] added docs and cleaned up RPC server, including handshaking - basic testing done --- hololinked/server/zmq_message_brokers.py | 1340 +++++++++++++++------- 1 file changed, 919 insertions(+), 421 deletions(-) diff --git a/hololinked/server/zmq_message_brokers.py b/hololinked/server/zmq_message_brokers.py index b8907c7..f487957 100644 --- a/hololinked/server/zmq_message_brokers.py +++ b/hololinked/server/zmq_message_brokers.py @@ -1,44 +1,76 @@ +import builtins import os +from typing import List import zmq import zmq.asyncio import asyncio import logging import typing +from uuid import uuid4 from collections import deque from enum import Enum -from typing import Union, List, Any, Dict, Sequence, Iterator, Set -from .utils import (current_datetime_ms_str, create_default_logger, run_coro_somehow, run_coro_sync, wrap_text, - raise_local_exception) +from .utils import create_default_logger, run_method_somehow, wrap_text from .config import global_config from .constants import ZMQ_PROTOCOLS from .serializers import (JSONSerializer, PickleSerializer, BaseSerializer, SerpentSerializer, # DillSerializer, - serializers) + serializers) from ..param.parameterized import Parameterized +# message types HANDSHAKE = b'HANDSHAKE' INVALID_MESSAGE = b'INVALID_MESSAGE' TIMEOUT = b'TIMEOUT' - INSTRUCTION = b'INSTRUCTION' REPLY = b'REPLY' + EVENT = b'EVENT' EVENT_SUBSCRIPTION = b'EVENT_SUBSCRIPTION' SUCCESS = b'SUCCESS' -FAILURE = b'FAILURE' + +# empty data EMPTY_BYTE = b'' EMPTY_DICT = {} +FAILURE = b'FAILURE' +# client types HTTP_SERVER = b'HTTP_SERVER' PROXY = b'PROXY' -TUNNELER = b'TUNNEL' - - - +TUNNELER = b'TUNNELER' # message passer from inproc client to inrproc server within RPC + + +""" +Message indices + +client's message to server: |br| +[address, bytes(), client type, message type, message id, timeout, instruction, arguments, execution_context] |br| +[ 0 , 1 , 2 , 3 , 4 , 5 , 6 , 7 , 8 ] |br| + +[address, bytes(), server_type, message_type, message id, data]|br| +[ 0 , 1 , 2 , 3 , 4 , 5 ]|br| +""" +# CM = Client Message +CM_INDEX_ADDRESS = 0 +CM_INDEX_CLIENT_TYPE = 2 +CM_INDEX_MESSAGE_TYPE = 3 +CM_INDEX_MESSAGE_ID = 4 +CM_INDEX_TIMEOUT = 5 +CM_INDEX_INSTRUCTION = 6 +CM_INDEX_ARGUMENTS = 7 +CM_INDEX_EXECUTION_CONTEXT = 8 + +# SM = Server Message +SM_INDEX_ADDRESS = 0 +SM_INDEX_SERVER_TYPE = 2 +SM_INDEX_MESSAGE_TYPE = 3 +SM_INDEX_MESSAGE_ID = 4 +SM_INDEX_DATA = 5 + +# Server types - currently useless metadata class ServerTypes(Enum): UNKNOWN_TYPE = b'UNKNOWN_TYPE' EVENTLOOP = b'EVENTLOOP' @@ -61,11 +93,11 @@ def exit(self) -> None: Cleanup method to terminate ZMQ sockets and contexts before quitting. Called by `__del__()` automatically. Each subclass server/client should implement their version of exiting if necessary. """ - raise NotImplementedError("implement exit() to gracefully exit ZMQ in {}.".format(self.__class__)) + raise NotImplementedError("Implement exit() to gracefully exit ZMQ in {}.".format(self.__class__)) - def create_socket(self, context : Union[zmq.asyncio.Context, zmq.Context], instance_name : str, identity : str, - bind : bool = False, protocol : ZMQ_PROTOCOLS = ZMQ_PROTOCOLS.IPC, socket_type : zmq.SocketType = zmq.ROUTER, - **kwargs) -> None: + def create_socket(self, context : typing.Union[zmq.asyncio.Context, zmq.Context], instance_name : str, + identity : str, bind : bool = False, protocol : ZMQ_PROTOCOLS = ZMQ_PROTOCOLS.IPC, + socket_type : zmq.SocketType = zmq.ROUTER, **kwargs) -> None: """ Create a socket with certain specifications. When successful, a logger is also created. Supported ZeroMQ protocols are TCP, IPC & INPROC. For IPC sockets, a file is created under TEMP_DIR of global configuration. @@ -87,8 +119,9 @@ def create_socket(self, context : Union[zmq.asyncio.Context, zmq.Context], insta TCP, IPC or INPROC. Message crafting/passing/routing is protocol invariant as suggested by ZeroMQ docs. socket_type: zmq.SocketType, default zmq.ROUTER Usually a ROUTER socket is implemented for both client-server and peer-to-peer communication - socket_address: str - applicable only for TCP socket to find the correct socket to connect. + **kwargs: dict + socket_address: str + applicable only for TCP socket to find the correct socket to connect. Returns ------- @@ -160,10 +193,10 @@ class BaseAsyncZMQ(BaseZMQ): Base class for all async ZMQ servers and clients. """ - def create_socket(self, instance_name : str, context : Union[zmq.asyncio.Context, None], *, identity : str, bind : bool = False, - protocol : str = "IPC", socket_type : zmq.SocketType = zmq.ROUTER, **kwargs) -> None: + def create_socket(self, instance_name : str, context : typing.Union[zmq.asyncio.Context, None], *, identity : str, + bind : bool = False, protocol : str = "IPC", socket_type : zmq.SocketType = zmq.ROUTER, **kwargs) -> None: """ - Overloads ``create_socket()`` to create, bind/connect an async socket. A async context is create if none is supplied. + Overloads ``create_socket()`` to create, bind/connect an async socket. A async context is created if none is supplied. """ if context and not isinstance(context, zmq.asyncio.Context): raise TypeError("async ZMQ message broker accepts only async ZMQ context. supplied type {}".format(type(context))) @@ -176,10 +209,11 @@ class BaseSyncZMQ(BaseZMQ): Base class for all sync ZMQ servers and clients. """ - def create_socket(self, instance_name : str, context : Union[zmq.Context, None], *, identity : str, bind : bool = False, - protocol : str = "IPC", socket_type : zmq.SocketType = zmq.ROUTER, **kwargs) -> None: + def create_socket(self, instance_name : str, context : typing.Union[zmq.Context, None], *, identity : str, + bind : bool = False, protocol : str = "IPC", socket_type : zmq.SocketType = zmq.ROUTER, **kwargs) -> None: """ - Overloads ``create_socket()`` to create, bind/connect an synchronous socket. A (synchronous) context is create if none is supplied. + Overloads ``create_socket()`` to create, bind/connect a synchronous socket. A (synchronous) context is created + if none is supplied. """ if context and not isinstance(context, zmq.Context): raise TypeError("sync ZMQ message broker accepts only sync ZMQ context. supplied type {}".format(type(context))) @@ -189,19 +223,25 @@ def create_socket(self, instance_name : str, context : Union[zmq.Context, None], class BaseZMQServer(BaseZMQ): """ - Implements serializer instantiation and message handling for ZMQ servers and can be subclassed by all - server instances irrespective of sync or async. For HTTP clients, json_serializer is necessary and for other types - of clients, any of the allowed serializer is possible. The messaging contract does not depend on sync or async - implementation. - - The message contract is as follows: - - client's message to server: |br| - [address, bytes(), client type, message type, msg id, instruction, arguments, execution_context] |br| - [ 0 , 1 , 2 , 3 , 4 , 5 , 6 , 7 ] |br| - - server's message to client: + Implements serializer instantiation and message parsing for ZMQ servers and can be subclassed by all + server instances irrespective of sync or async. For HTTP clients, json_serializer is necessary and for RPC clients, + any of the allowed serializer is possible. As suggested by ZMQ, a messaging contract is defined which is defined + as follows: + + client's message to server: + :: + [address, bytes(), client type, message type, messsage id, + [ 0 , 1 , 2 , 3 , 4 , + + timeout, instruction, arguments, execution context] + 5 , 6 , 7 , 8 ] + + server's message to client: + :: + [address, bytes(), server_type, message_type, message id, data] + [ 0 , 1 , 2 , 3 , 4 , 5 ] + The messaging contract does not depend on sync or async implementation. Parameters ---------- @@ -212,8 +252,8 @@ class BaseZMQServer(BaseZMQ): rpc_serializer: any of hololinked.server.serializers.serializer, default serpent serializer used to send message to RPC clients """ - def __init__(self, server_type : Enum, json_serializer : Union[None, JSONSerializer] = None, - rpc_serializer : Union[str, BaseSerializer, None] = None) -> None: + def __init__(self, server_type : Enum, json_serializer : typing.Union[None, JSONSerializer] = None, + rpc_serializer : typing.Union[str, BaseSerializer, None] = None) -> None: if json_serializer is None or isinstance(json_serializer, JSONSerializer): self.json_serializer = json_serializer or JSONSerializer() else: @@ -224,45 +264,69 @@ def __init__(self, server_type : Enum, json_serializer : Union[None, JSONSeriali self.rpc_serializer = serializers.get(rpc_serializer, SerpentSerializer)() else: raise ValueError("invalid proxy serializer option for {}. Given option : {}".format(self.__class__, rpc_serializer)) - self.server_type : Enum = server_type + self.server_type = server_type # type: bytes super().__init__() - def parse_client_message(self, message : List[bytes], socket : zmq.Socket, deserialize = True) -> Any: + def parse_client_message(self, message : typing.List[bytes]) -> typing.List[typing.Union[bytes, typing.Any]]: """ deserializes important parts of the client's message, namely instruction, arguments, execution context - based on the client type. For handshake, automatically handles handshake. In case of exceptions while - deserializing, automatically sends and invalid message to client informing the nature of exception with the + based on the client type. For handshake messages, automatically handles handshake. In case of exceptions while + deserializing, automatically sends an invalid message to client informing the nature of exception with the exception metadata. - client's message to server looks as follows: |br| - [address, bytes(), client type, message type, msg id, instruction, arguments, execution_context] |br| - [ 0 , 1 , 2 , 3 , 4 , 5 , 6 , 7 ] |br| + client's message to server: + :: + [address, bytes(), client type, message type, messsage id, + [ 0 , 1 , 2 , 3 , 4 , + + timeout, instruction, arguments, execution context] + 5 , 6 , 7 , 8 ] + + Execution Context Definitions (typing.Dict[str, typing.Any] or JSON): + - "plain_reply" - does not return state + - "fetch_execution_logs" - fetches logs that were accumulated while execution + + Parameters + ---------- + message: List[bytes] + message received from client + + Returns + ------- + message: List[bytes | Any] + message with instruction, arguments and execution context deserialized + """ try: - message_type = message[3] + message_type = message[CM_INDEX_MESSAGE_TYPE] if message_type == INSTRUCTION: - if deserialize: - client_type = message[2] - if client_type == PROXY: - message[5] = self.rpc_serializer.loads(message[5]) # type: ignore - message[6] = self.rpc_serializer.loads(message[6]) # type: ignore - message[7] = self.rpc_serializer.loads(message[7]) # type: ignore - elif client_type == HTTP_SERVER: - message[5] = self.json_serializer.loads(message[5]) # type: ignore - message[6] = self.json_serializer.loads(message[6]) # type: ignore - message[7] = self.json_serializer.loads(message[7]) # type: ignore + client_type = message[CM_INDEX_CLIENT_TYPE] + if client_type == PROXY: + message[CM_INDEX_INSTRUCTION] = self.rpc_serializer.loads(message[CM_INDEX_INSTRUCTION]) # type: ignore + message[CM_INDEX_ARGUMENTS] = self.rpc_serializer.loads(message[CM_INDEX_ARGUMENTS]) # type: ignore + message[CM_INDEX_EXECUTION_CONTEXT] = self.rpc_serializer.loads(message[CM_INDEX_EXECUTION_CONTEXT]) # type: ignore + elif client_type == HTTP_SERVER: + message[CM_INDEX_INSTRUCTION] = self.json_serializer.loads(message[CM_INDEX_INSTRUCTION]) # type: ignore + message[CM_INDEX_ARGUMENTS] = self.json_serializer.loads(message[CM_INDEX_ARGUMENTS]) # type: ignore + message[CM_INDEX_EXECUTION_CONTEXT] = self.json_serializer.loads(message[CM_INDEX_EXECUTION_CONTEXT]) # type: ignore return message elif message_type == HANDSHAKE: - self.handshake(message[0], socket) - except Exception as E: - self.handle_invalid_message(message, E, socket) + self.handshake(message) + except Exception as ex: + self.handle_invalid_message(message, ex) - def craft_reply_from_arguments(self, address : bytes, client_type: bytes, message_type : bytes, message_id : bytes = b'', - data : Any = None) -> List[bytes]: + + def craft_reply_from_arguments(self, address : bytes, client_type: bytes, message_type : bytes, + message_id : bytes = b'', data : typing.Any = None) -> typing.List[bytes]: """ call this method to craft an arbitrary reply or message to the client using the method arguments. + server's message to client: + :: + [address, bytes(), server_type, message_type, message id, data] + [ 0 , 1 , 2 , 3 , 4 , 5 ] + Parameters ---------- address: bytes @@ -277,7 +341,7 @@ def craft_reply_from_arguments(self, address : bytes, client_type: bytes, messag Returns ------- message: List[bytes] - the crafted reply with information in the correct positions + the crafted reply with information in the correct positions within the list """ if client_type == HTTP_SERVER: data = self.json_serializer.dumps(data) @@ -286,21 +350,24 @@ def craft_reply_from_arguments(self, address : bytes, client_type: bytes, messag return [ address, - bytes(), - self.server_type.value, + EMPTY_BYTE, + self.server_type, message_type, message_id, data ] - def craft_reply_from_client_message(self, original_client_message : List[bytes], data : Any = None, **overrides) -> List[bytes]: + + def craft_reply_from_client_message(self, original_client_message : typing.List[bytes], + data : typing.Any = None) -> typing.List[bytes]: """ craft a reply with certain data automatically from an originating client message. The client's address, type required for serialization requirements, message id etc. are automatically created from the original message. - client's message to server looks as follows: |br| - [address, bytes(), client type, message type, msg id, instruction, arguments, execution_context] |br| - [ 0 , 1 , 2 , 3 , 4 , 5 , 6 , 7 ] |br| + server's message to client: + :: + [address, bytes(), server_type, message_type, message id, data] + [ 0 , 1 , 2 , 3 , 4 , 5 ] Parameters ---------- @@ -312,9 +379,9 @@ def craft_reply_from_client_message(self, original_client_message : List[bytes], Returns ------- message: List[bytes] - + the crafted reply with information in the correct positions within the list """ - client_type = original_client_message[2] + client_type = original_client_message[CM_INDEX_CLIENT_TYPE] if client_type == HTTP_SERVER: data = self.json_serializer.dumps(data) elif client_type == PROXY: @@ -322,22 +389,17 @@ def craft_reply_from_client_message(self, original_client_message : List[bytes], else: raise ValueError("invalid client type given '{}' for preparing message to send from '{}' of type {}".format( client_type, self.identity, self.__class__)) - reply = [ - original_client_message[0], - bytes(), - self.server_type.value, - REPLY, - original_client_message[4], - data - ] - if len(overrides) == 0: - return reply - for key, value in overrides.items(): - if key == 'message_type': - reply[3] = value - return reply - - def handshake(self, address : bytes, socket : zmq.Socket) -> None: + return [ + original_client_message[CM_INDEX_ADDRESS], + EMPTY_BYTE, + self.server_type, + REPLY, + original_client_message[CM_INDEX_MESSAGE_ID], + data + ] + + + def handshake(self, original_client_message : typing.List[bytes]) -> None: """ pass a handshake message to client. Absolutely mandatory to ensure initial messages do not get lost because of ZMQ's very tiny but significant initial delay after creating socket. @@ -351,9 +413,15 @@ def handshake(self, address : bytes, socket : zmq.Socket) -> None: ------- None """ - run_coro_somehow(self._handshake(address, socket)) + run_method_somehow(self._handshake(original_client_message)) + + def _handshake(self, original_client_message : typing.List[bytes]) -> None: + raise NotImplementedError( + wrap_text("handshake cannot be handled - implement _handshake in {} to handshake.".format( + self.__class__))) + - def handle_invalid_message(self, original_client_message : List[bytes], exception : Exception, socket : zmq.Socket) -> None: + def handle_invalid_message(self, original_client_message : typing.List[bytes], exception : Exception) -> None: """ pass an invalid message to the client when an exception occurred while parsing the message from the client (``parse_client_message()``) @@ -369,9 +437,15 @@ def handle_invalid_message(self, original_client_message : List[bytes], exceptio ------- None """ - run_coro_somehow(self._handle_invalid_message(original_client_message, exception, socket)) + run_method_somehow(self._handle_invalid_message(original_client_message, exception)) - def handle_timeout(self, original_client_message : typing.Any, socket : zmq.Socket) -> None: + def _handle_invalid_message(self, message : typing.List[bytes], exception : Exception) -> None: + raise NotImplementedError( + wrap_text("invalid message cannot be handled - implement _handle_invalid_message in {} to handle invalid messages.".format( + self.__class__))) + + + def handle_timeout(self, original_client_message : typing.List[bytes]) -> None: """ pass timeout message to the client when the instruction could not be executed within specified timeout @@ -385,40 +459,59 @@ def handle_timeout(self, original_client_message : typing.Any, socket : zmq.Sock ------- None """ - run_coro_somehow(self._handle_timeout(original_client_message[0], original_client_message[4], socket)) + run_method_somehow(self._handle_timeout(original_client_message)) + + def _handle_timeout(self, original_client_message : typing.List[bytes]) -> None: + raise NotImplementedError( + wrap_text("timeouts cannot be handled - implement _handle_timeout in {} to handle timeout.".format( + self.__class__))) + + - async def _handshake(self, address : bytes, socket : zmq.Socket) -> None: +class BaseAsyncZMQServer(BaseZMQServer): + """ + Common to all async ZMQ servers + """ + + async def _handshake(self, original_client_message : typing.List[bytes]) -> None: """ - Inner method that handles handshake. scheduled by ``handshake()``, signature same as ``handshake()``. + Inner method that handles handshake. scheduled by ``handshake()`` method, signature same as ``handshake()``. """ - await socket.send_multipart(self.craft_reply_from_arguments(address, b'PROXY', HANDSHAKE)) - self.logger.info("sent handshake to client '{}'".format(address)) - - async def _handle_timeout(self, address: bytes, message_id : bytes, socket : zmq.Socket) -> None: + await self.socket.send_multipart(self.craft_reply_from_arguments(original_client_message[CM_INDEX_ADDRESS], + original_client_message[CM_INDEX_CLIENT_TYPE], HANDSHAKE, original_client_message[CM_INDEX_MESSAGE_ID], + EMPTY_BYTE)) + self.logger.info("sent handshake to client '{}'".format(original_client_message[CM_INDEX_ADDRESS])) + + + async def _handle_timeout(self, original_client_message : typing.List[bytes]) -> None: """ Inner method that handles timeout. scheduled by ``handle_timeout()``, signature same as ``handle_timeout``. """ - await socket.send_multipart(self.craft_reply_from_arguments(address, b'PROXY', TIMEOUT, message_id)) + await self.socket.send_multipart(self.craft_reply_from_arguments(original_client_message[CM_INDEX_ADDRESS], + original_client_message[CM_INDEX_CLIENT_TYPE], TIMEOUT, original_client_message[CM_INDEX_MESSAGE_ID])) + - async def _handle_invalid_message(self, original_client_message : List[bytes], exception : Exception, socket : zmq.Socket) -> None: + async def _handle_invalid_message(self, original_client_message : typing.List[bytes], exception : Exception) -> None: """ Inner method that handles invalid messages. scheduled by ``handle_invalid_message()``, signature same as ``handle_invalid_message()``. """ - await socket.send_multipart(self.craft_reply_from_client_message(original_client_message, exception, - message_type=INVALID_MESSAGE)) - self.logger.info("sent exception message to client '{}' : '{}'".format(original_client_message[0], str(exception))) + await self.socket.send_multipart(self.craft_reply_from_arguments(original_client_message[CM_INDEX_ADDRESS], + original_client_message[CM_INDEX_CLIENT_TYPE], INVALID_MESSAGE, + original_client_message[CM_INDEX_MESSAGE_ID]), exception) + self.logger.info("sent exception message to client '{}' : '{}'".format(original_client_message[CM_INDEX_ADDRESS], + str(exception))) -class AsyncZMQServer(BaseZMQServer, BaseAsyncZMQ): +class AsyncZMQServer(BaseAsyncZMQServer, BaseAsyncZMQ): """ Implements blocking (non-polled) async receive instructions and send replies. """ - def __init__(self, instance_name : str, server_type : Enum, context : Union[zmq.asyncio.Context, None] = None, + def __init__(self, instance_name : str, server_type : Enum, context : typing.Union[zmq.asyncio.Context, None] = None, protocol : ZMQ_PROTOCOLS = ZMQ_PROTOCOLS.IPC, socket_type : zmq.SocketType = zmq.ROUTER, **kwargs) -> None: - BaseZMQServer.__init__(self, server_type, json_serializer=kwargs.get('json_serializer', None), + BaseAsyncZMQServer.__init__(self, server_type, json_serializer=kwargs.get('json_serializer', None), rpc_serializer=kwargs.get('rpc_serializer', None)) BaseAsyncZMQ.__init__(self) self.instance_name = instance_name @@ -426,7 +519,8 @@ def __init__(self, instance_name : str, server_type : Enum, context : Union[zmq. socket_type=socket_type, socket_address=kwargs.get("socket_address", None)) self._terminate_context = context == None # terminate if it was created by instance - async def async_recv_instruction(self) -> Any: + + async def async_recv_instruction(self) -> typing.Any: """ Receive one instruction in a blocking form. Async for multi-server paradigm, each server should schedule this method in the event loop explicitly. This is taken care by the ``Eventloop`` & ``RPCServer``. @@ -437,13 +531,14 @@ async def async_recv_instruction(self) -> Any: received instruction with important content (instruction, arguments, execution context) deserialized. """ while True: - instruction = self.parse_client_message(await self.socket.recv_multipart(), self.socket) + instruction = self.parse_client_message(await self.socket.recv_multipart()) if instruction: - self.logger.debug("received instruction from client '{}' with msg-ID '{}'".format(instruction[0], - instruction[4])) + self.logger.debug("received instruction from client '{}' with msg-ID '{}'".format( + instruction[CM_INDEX_ADDRESS], instruction[CM_INDEX_MESSAGE_ID])) return instruction - async def async_recv_instructions(self) -> List[Any]: + + async def async_recv_instructions(self) -> typing.List[typing.Any]: """ Receive all currently available instructions in blocking form. Async for multi-server paradigm, each server should schedule this method in the event loop explicitly. This is taken care by the ``Eventloop`` & ``RPCServer``. @@ -456,21 +551,22 @@ async def async_recv_instructions(self) -> List[Any]: instructions = [await self.async_recv_instruction()] while True: try: - instruction = self.parse_client_message(await self.socket.recv_multipart(zmq.NOBLOCK), self.socket) + instruction = self.parse_client_message(await self.socket.recv_multipart(zmq.NOBLOCK)) if instruction: - self.logger.debug("received instruction from client '{}' with msg-ID '{}'".format(instruction[0], - instruction[4])) + self.logger.debug("received instruction from client '{}' with msg-ID '{}'".format( + instruction[CM_INDEX_ADDRESS], instruction[CM_INDEX_MESSAGE_ID])) instructions.append(instruction) except zmq.Again: break return instructions - async def async_send_reply(self, original_client_message : List[bytes], data : Any) -> None: + + async def async_send_reply(self, original_client_message : typing.List[bytes], data : typing.Any) -> None: """ Send reply for an instruction. - Parameter - --------- + Parameters + ---------- original_client_message: List[bytes] original message so that the reply can be properly crafted and routed data: Any @@ -480,12 +576,10 @@ async def async_send_reply(self, original_client_message : List[bytes], data : A ------- None """ - reply = self.craft_reply_from_client_message(original_client_message, data) - await self.socket.send_multipart(reply) - self.logger.debug("sent reply to client '{}' with msg-ID '{}'".format(reply[0], reply[4])) - - async def async_send_event_subscription(self, consumer : str) -> None: - await self.socket.send_multipart([consumer, bytes(), EVENT_SUBSCRIPTION]) + await self.socket.send_multipart(self.craft_reply_from_client_message(original_client_message, data)) + self.logger.debug("sent reply to client '{}' with msg-ID '{}'".format(original_client_message[CM_INDEX_ADDRESS], + original_client_message[CM_INDEX_MESSAGE_ID])) + def exit(self) -> None: """ @@ -521,26 +615,28 @@ class AsyncPollingZMQServer(AsyncZMQServer): server type metadata - currently not useful/important context: Optional, zmq.asyncio.Context ZeroMQ Context object to use. All sockets share this context. Automatically created when None is supplied. + socket_type : zmq.SocketType, default zmq.ROUTER + socket type of ZMQ socket, default is ROUTER (enables address based routing of messages) protocol: Enum, default ZMQ_PROTOCOLS.IPC Use TCP for network access, IPC for multi-process applications, and INPROC for multi-threaded applications. poll_timeout: int, default 25 time in milliseconds to poll the sockets specified under ``procotols``. Useful for calling ``stop_polling()`` where the max delay to stop polling will be ``poll_timeout`` - server_type: Enum - metadata about the nature of the server - currently not important. - json_serializer: hololinked.server.serializers.JSONSerializer - serializer used to send message to HTTP Server - rpc_serializer: any of hololinked.server.serializers.serializer, default serpent - serializer used to send message to RPC clients + + **kwargs: + json_serializer: hololinked.server.serializers.JSONSerializer + serializer used to send message to HTTP Server + rpc_serializer: any of hololinked.server.serializers.serializer, default serpent + serializer used to send message to RPC clients """ - def __init__(self, instance_name : str, *, server_type : Enum, context : Union[zmq.asyncio.Context, None] = None, + def __init__(self, instance_name : str, *, server_type : Enum, context : typing.Union[zmq.asyncio.Context, None] = None, socket_type : zmq.SocketType = zmq.ROUTER, protocol : ZMQ_PROTOCOLS = ZMQ_PROTOCOLS.IPC, poll_timeout = 25, **kwargs) -> None: super().__init__(instance_name=instance_name, server_type=server_type, context=context, protocol=protocol, socket_type=socket_type, **kwargs) - self.poller = zmq.asyncio.Poller() self._instructions = [] + self.poller = zmq.asyncio.Poller() self.poller.register(self.socket, zmq.POLLIN) self.poll_timeout = poll_timeout @@ -557,7 +653,7 @@ def poll_timeout(self, value) -> None: raise ValueError("polling period must be an integer greater than 0, not {}. Value is considered in milliseconds.".format(value)) self._poll_timeout = value - async def poll_instructions(self) -> List[List[bytes]]: + async def poll_instructions(self) -> typing.List[typing.List[bytes]]: """ poll for instructions with specified timeout (``poll_timeout``) and return if any instructions are available. This method blocks, so make sure other methods are scheduled which can stop polling. @@ -596,39 +692,45 @@ def exit(self) -> None: """ unregister socket from poller and terminate socket and context. """ - self.poller.unregister(self.socket) + try: + self.poller.unregister(self.socket) + except Exception as ex: + self.logger.warn(f"suppressing exception while closing socket {self.identity} - {str(ex)}") return super().exit() class ZMQServerPool(BaseZMQServer): """ - Implements pool of sockets + Implements pool of async ZMQ servers (& their sockets) """ - def __init__(self, instance_names : Union[List[str], None] = None, **kwargs) -> None: + def __init__(self, instance_names : typing.Union[typing.List[str], None] = None, **kwargs) -> None: self.context = zmq.asyncio.Context() - self.pool : Dict[str, Union[AsyncZMQServer, AsyncPollingZMQServer]] = dict() + self.pool = dict() # type: typing.Dict[str, typing.Union[AsyncZMQServer, AsyncPollingZMQServer]] self.poller = zmq.asyncio.Poller() if instance_names: for instance_name in instance_names: - self.pool[instance_name] = AsyncZMQServer(instance_name, ServerTypes.UNKNOWN_TYPE, self.context, + self.pool[instance_name] = AsyncZMQServer(instance_name, ServerTypes.UNKNOWN_TYPE.value, self.context, **kwargs) for server in self.pool.values(): self.poller.register(server.socket, zmq.POLLIN) - super().__init__(server_type = ServerTypes.POOL, json_serializer = kwargs.get('json_serializer'), + super().__init__(server_type = ServerTypes.POOL.value, json_serializer = kwargs.get('json_serializer'), rpc_serializer = kwargs.get('rpc_serializer', None)) - def register_server(self, server : Union[AsyncZMQServer, AsyncPollingZMQServer]) -> None: + def register_server(self, server : typing.Union[AsyncZMQServer, AsyncPollingZMQServer]) -> None: self.pool[server.instance_name] = server self.poller.register(server.socket, zmq.POLLIN) - def deregister_server(self, server : Union[AsyncZMQServer, AsyncPollingZMQServer]) -> None: + def deregister_server(self, server : typing.Union[AsyncZMQServer, AsyncPollingZMQServer]) -> None: self.poller.unregister(server.socket) self.pool.pop(server.instance_name) @property def poll_timeout(self) -> int: + """ + socket polling timeout in milliseconds greater than 0. + """ return self._poll_timeout @poll_timeout.setter @@ -637,16 +739,49 @@ def poll_timeout(self, value) -> None: raise ValueError("polling period must be an integer greater than 0, not {}. Value is considered in milliseconds.".format(value)) self._poll_timeout = value - async def async_recv_instruction(self, instance_name : str) -> Any: + async def async_recv_instruction(self, instance_name : str) -> typing.List: + """ + receive instruction for instance name + + Parameters + ---------- + instance_name : str + instance name of the ``RemoteObject`` or in this case, the ZMQ server. + """ return await self.pool[instance_name].async_recv_instruction() - async def async_recv_instructions(self, instance_name : str) -> Any: + async def async_recv_instructions(self, instance_name : str) -> typing.List[typing.List]: + """ + receive all available instructions for instance name + + Parameters + ---------- + instance_name : str + instance name of the ``RemoteObject`` or in this case, the ZMQ server. + """ return await self.pool[instance_name].async_recv_instructions() - async def async_send_reply(self, instance_name : str, original_client_message : List[bytes], data : Any) -> None: + async def async_send_reply(self, instance_name : str, original_client_message : typing.List[bytes], + data : typing.Any) -> None: + """ + send reply for instance name + + Parameters + ---------- + instance_name : str + instance name of the ``RemoteObject`` or in this case, the ZMQ server. + original_client_message: List[bytes] + instruction for which reply is being given + data: Any + data to be given as reply + """ await self.pool[instance_name].async_send_reply(original_client_message, data) - async def poll(self, strip_delimiter : bool = False) -> List[Any]: + async def poll(self) -> typing.List[typing.List[typing.Any]]: + """ + Pool for instruction in the entire server pool. Map the instruction to the correct instance + using the 0th index of the instruction. + """ self.stop_poll = False instructions = [] while not self.stop_poll: @@ -654,7 +789,7 @@ async def poll(self, strip_delimiter : bool = False) -> List[Any]: for socket, _ in sockets: while True: try: - instruction = self.parse_client_message(await socket.recv_multipart(zmq.NOBLOCK), socket) + instruction = self.parse_client_message(await socket.recv_multipart(zmq.NOBLOCK)) except zmq.Again: break else: @@ -663,6 +798,9 @@ async def poll(self, strip_delimiter : bool = False) -> List[Any]: return instructions def stop_polling(self) -> None: + """ + stop polling method ``poll()`` + """ self.stop_poll = True def exit(self) -> None: @@ -676,10 +814,10 @@ def exit(self) -> None: self.logger.warn("could not properly terminate context or attempted to terminate an already terminated context '{}'. Exception message : {}".format( self.identity, str(E))) - def __getitem__(self, key) -> Union[AsyncZMQServer, AsyncPollingZMQServer]: + def __getitem__(self, key) -> typing.Union[AsyncZMQServer, AsyncPollingZMQServer]: return self.pool[key] - def __iter__(self) -> Iterator[str]: + def __iter__(self) -> typing.Iterator[str]: return self.pool.__iter__() def __contains__(self, name : str) -> bool: @@ -707,7 +845,7 @@ class RPCServer(BaseZMQServer): where the max delay to stop polling will be ``poll_timeout`` """ - def __init__(self, instance_name : str, *, server_type : Enum, context : Union[zmq.asyncio.Context, None] = None, + def __init__(self, instance_name : str, *, server_type : Enum, context : typing.Union[zmq.asyncio.Context, None] = None, protocols : typing.List[ZMQ_PROTOCOLS] = [ZMQ_PROTOCOLS.TCP, ZMQ_PROTOCOLS.IPC, ZMQ_PROTOCOLS.INPROC], poll_timeout = 25, **kwargs) -> None: super().__init__(server_type, kwargs.get('json_serializer', None), kwargs.get('rpc_serializer', None)) @@ -730,19 +868,19 @@ def __init__(self, instance_name : str, *, server_type : Enum, context : Union[z self.poll_timeout = poll_timeout self.inproc_client = AsyncZMQClient(server_instance_name=f'{instance_name}/real', identity=f'{instance_name}/tunneler', client_type=TUNNELER, context=context, protocol=ZMQ_PROTOCOLS.INPROC) - self._instructions = deque() # type: typing.Iterable[typing.Tuple[typing.Any, asyncio.Event, asyncio.Future, zmq.Socket]] + self._instructions = deque() # type: typing.Iterable[typing.Tuple[typing.List[bytes], asyncio.Event, asyncio.Future, zmq.Socket]] self._instructions_event = asyncio.Event() - self._socket_to_server_map = { - self.tcp_server.socket : self.tcp_server, - self.ipc_server.socket : self.ipc_server, - self.inproc_server.socket : self.inproc_server - } self.identity = f"{instance_name}/rpc-server" self.logger = self.get_logger(instance_name, 'RPC', 'MIXED') + async def handshake_complete(self): + """ + handles inproc client's handshake with ``RemoteObject``'s inproc server + """ await self.inproc_client.handshake_complete() + def prepare(self): """ registers socket polling method and message tunnelling methods to the running @@ -752,51 +890,76 @@ def prepare(self): eventloop.call_soon(lambda : asyncio.create_task(self.poll())) eventloop.call_soon(lambda : asyncio.create_task(self.tunnel_message_to_remote_objects())) + @property def poll_timeout(self) -> int: + """ + socket polling timeout in milliseconds greater than 0. + """ return self._poll_timeout @poll_timeout.setter def poll_timeout(self, value) -> None: if not isinstance(value, int) or value < 0: - raise ValueError("polling period must be an integer greater than 0, not {}. Value is considered in milliseconds.".format(value)) + raise ValueError(("polling period must be an integer greater than 0, not {}.", + "Value is considered in milliseconds.".format(value))) self._poll_timeout = value - + + + def _get_timeout_from_instruction(self, message : typing.Tuple[bytes]) -> float: + """ + Unlike ``parse_client_message()``, this method only retrieves the timeout parameter + """ + client_type = message[CM_INDEX_CLIENT_TYPE] + if client_type == PROXY: + return self.rpc_serializer.loads(message[CM_INDEX_TIMEOUT]) + elif client_type == HTTP_SERVER: + return self.json_serializer.loads(message[CM_INDEX_TIMEOUT]) + + async def poll(self): """ - poll for instructions and append them to instructions list to pass them to eventloop inproc server using an - inner inproc client. Register the messages for timeout calculation. + poll for instructions and append them to instructions list to pass them to ``Eventloop``/``RemoteObject``'s inproc + server using an inner inproc client. Registers the messages for timeout calculation. """ self.stop_poll = False eventloop = asyncio.get_event_loop() while not self.stop_poll: - sockets = await self.poller.poll(self._poll_timeout) + sockets : typing.Tuple[zmq.Socket, int] = await self.poller.poll(self._poll_timeout) # type for socket, _ in sockets: while True: try: original_instruction = await socket.recv_multipart(zmq.NOBLOCK) - parsed_instruction = self.parse_client_message(original_instruction, socket, deserialize=False) + if original_instruction[CM_INDEX_MESSAGE_TYPE] == HANDSHAKE: + handshake_task = asyncio.create_task(self._handshake(original_instruction, socket)) + eventloop.call_soon(lambda : handshake_task) + timeout = self._get_timeout_from_instruction(original_instruction) + ready_to_process_event = None + timeout_task = None + if timeout is not None: + ready_to_process_event = asyncio.Event() + timeout_task = asyncio.create_task(self.process_timeouts(original_instruction, + ready_to_process_event, timeout, socket)) + eventloop.call_soon(lambda : timeout_task) except zmq.Again: break + except Exception as ex: + # handle invalid message + self.logger.error(f"exception occurred for message id {original_instruction[CM_INDEX_MESSAGE_ID]} - {str(ex)}") + invalid_message_task = asyncio.create_task(self._handle_invalid_message(original_instruction, + ex, socket)) + eventloop.call_soon(lambda: invalid_message_task) else: - if parsed_instruction: - timeout = 3 # parsed_instruction[7].get("timeout", None) - ready_to_process_event = None - timeout_task = None - if timeout is not None: - ready_to_process_event = asyncio.Event() - timeout_task = asyncio.create_task(self.process_timeouts(ready_to_process_event, socket, original_instruction, timeout)) - eventloop.call_soon(lambda : timeout_task) - self._instructions.append((original_instruction, ready_to_process_event, timeout_task, socket)) - print("instruction in RPC", original_instruction) - self._instructions_event.set() + self._instructions.append((original_instruction, ready_to_process_event, + timeout_task, socket)) + + # print("instruction in RPC", original_instruction) + self._instructions_event.set() async def tunnel_message_to_remote_objects(self): """ - client's message to server looks as follows: - [address, bytes(), client type, message type, msg id, instruction, arguments, execution_context] - [ 0 , 1 , 2 , 3 , 4 , 5 , 6 , 7 ] + message tunneler between external sockets and interal inproc client """ while not self.stop_poll: if len(self._instructions) > 0: @@ -805,42 +968,59 @@ async def tunnel_message_to_remote_objects(self): if ready_to_process_event is not None: ready_to_process_event.set() timeout = await timeout_task - print("timeout result - ", timeout) + # print("timeout result - ", timeout) if ready_to_process_event is None or not timeout: - original_address = message[0] - message[0] = self.inproc_client.server_address # replace address - print("original address", original_address, "inproc address", message[0]) + original_address = message[CM_INDEX_ADDRESS] + message[CM_INDEX_ADDRESS] = self.inproc_client.server_address # replace address + # print("original address", original_address, "inproc address", message[0]) await self.inproc_client.socket.send_multipart(message) - print("*********sent message to inproc") + # print("*********sent message to inproc") reply = await self.inproc_client.socket.recv_multipart() - print("--------received message from inproc") - reply[0] = original_address + # print("--------received message from inproc") + reply[SM_INDEX_ADDRESS] = original_address await origin_socket.send_multipart(reply) - print("###### sent message to client") + # print("###### sent message to client") else: await self._instructions_event.wait() self._instructions_event.clear() - async def process_timeouts(self, ready_to_process_event : asyncio.Event, origin_socket : zmq.Socket, - original_message : typing.List, timeout : typing.Optional[float] = None) -> bool: + async def process_timeouts(self, original_client_message : typing.List, ready_to_process_event : asyncio.Event, + timeout : typing.Optional[float], origin_socket : zmq.Socket) -> bool: + """ + replies timeout to client if timeout occured and prevents the instruction from being executed. + """ try: await asyncio.wait_for(ready_to_process_event.wait(), timeout) return False except TimeoutError: - self.handle_timeout(original_message, origin_socket) + await origin_socket.send_multipart(self.craft_reply_from_arguments(original_client_message[CM_INDEX_ADDRESS], + original_client_message[CM_INDEX_CLIENT_TYPE], TIMEOUT, original_client_message[CM_INDEX_MESSAGE_ID])) return True - + + async def _handle_invalid_message(self, original_client_message: builtins.list[builtins.bytes], exception: builtins.Exception, + originating_socket : zmq.Socket) -> None: + await originating_socket.send_multipart(self.craft_reply_from_arguments( + original_client_message[CM_INDEX_ADDRESS], original_client_message[CM_INDEX_CLIENT_TYPE], + INVALID_MESSAGE, original_client_message[CM_INDEX_MESSAGE_ID], exception)) + + async def _handshake(self, original_client_message: builtins.list[builtins.bytes], + originating_socket : zmq.Socket) -> None: + await originating_socket.send_multipart(self.craft_reply_from_arguments( + original_client_message[CM_INDEX_ADDRESS], + original_client_message[CM_INDEX_CLIENT_TYPE], HANDSHAKE, original_client_message[CM_INDEX_MESSAGE_ID], + EMPTY_BYTE)) + def exit(self): self.stop_poll = True sockets = list(self.poller._map.keys()) for i in range(len(sockets)): # iterating over keys will cause dictionary size change during iteration self.poller.unregister(sockets[i]) try: - self.inproc_client.exit() self.inproc_server.exit() self.ipc_server.exit() self.tcp_server.exit() + self.inproc_client.exit() except: pass self.context.term() @@ -850,23 +1030,32 @@ def exit(self): class BaseZMQClient(BaseZMQ): """ - Server to client: + Base class for all ZMQ clients irrespective of sync and async. - [address, bytes(), message_type, msg id, content] + server's reply to client + :: + [address, bytes(), server type , message_type, message id, content or response or reply] + [ 0 , 1 , 2 , 3 , 4 , 5 ] - Reply - [client address, bytes, REPLY, msg id, content] + Parameters + ---------- + server_instance_name: str + The instance name of the server (or ``RemoteObject``) + client_type: str + RPC or HTTP Server + **kwargs: + rpc_serializer: + custom implementation of RPC serializer if necessary + json_serializer: + custom implementation of JSON serializer if necessary - Execution Context Definitions: - "plain_reply" - "fetch_execution_logs" """ - def __init__(self, server_address : Union[bytes, None], server_instance_name : Union[str, None], - client_type : bytes, **kwargs) -> None: + def __init__(self, server_instance_name : str, client_type : bytes, **kwargs) -> None: if client_type in [PROXY, HTTP_SERVER, TUNNELER]: self.client_type = client_type else: - raise ValueError("invalid client type for {}. Given option {}".format(self.__class__, client_type)) + raise ValueError("Invalid client type for {}. Given option {}.".format(self.__class__, client_type)) self.rpc_serializer = None self.json_serializer = None if client_type == HTTP_SERVER: @@ -874,139 +1063,271 @@ def __init__(self, server_address : Union[bytes, None], server_instance_name : U if json_serializer is None or isinstance(json_serializer, JSONSerializer): self.json_serializer = json_serializer or JSONSerializer() else: - raise ValueError("invalid JSON serializer option for {}. Given option {}".format(self.__class__, json_serializer)) + raise ValueError("Invalid JSON serializer option for {}. Given option {}.".format(self.__class__, + json_serializer)) elif client_type == PROXY: rpc_serializer = kwargs.get("rpc_serializer", None) if rpc_serializer is None or isinstance(rpc_serializer, (PickleSerializer, SerpentSerializer, - JSONSerializer)):#, DillSerializer)): + JSONSerializer)): #, DillSerializer)): self.rpc_serializer = rpc_serializer or SerpentSerializer() elif isinstance(rpc_serializer, str) and rpc_serializer in serializers.keys(): self.rpc_serializer = serializers[rpc_serializer]() else: - raise ValueError("invalid proxy serializer option for {}. Given option {}".format(self.__class__, rpc_serializer)) - self.server_address = server_address + raise ValueError("invalid proxy serializer option for {}. Given option {}.".format(self.__class__, + rpc_serializer)) + if server_instance_name: + self.server_address = bytes(server_instance_name, encoding='utf-8') self.server_instance_name = server_instance_name - self.server_type = ServerTypes.UNKNOWN_TYPE + self.server_type = ServerTypes.UNKNOWN_TYPE.value super().__init__() - def parse_server_message(self, message : List[bytes]) -> Any: + def raise_local_exception(exception : typing.Dict[str, typing.Any]) -> None: + """ + raises an exception on client side using an exception from server by mapping it to the correct one based on type. + + Parameters + ---------- + exception: Dict[str, Any] + exception dictionary made by server with following keys - type, message, traceback, notes + + Raises + ------ + python exception based on type. If not found in builtins + """ + exc = getattr(builtins, exception["type"], None) + message = f"server raised exception, check following for server side traceback & above for client side traceback : " + if exc is None: + ex = Exception(message) + else: + ex = exc(message) + ex.__notes__ = exception["traceback"] + raise ex from None + + + def parse_server_message(self, message : typing.List[bytes], raise_client_side_exception : bool = False) -> typing.Any: """ - Server to client: + server's message to client: - [address, bytes(), message_type, msg id, content or response or reply] - [0 1 , 2 , 3 , 4 ] + :: + [address, bytes(), server type , message_type, message id, content or response or reply] + [ 0 , 1 , 2 , 3 , 4 , 5 ] + + Parameters + ---------- + message: List[bytes] + message sent be server + raise_client_side_exception: bool + raises exception from server on client + + Raises + ------ """ - message_type = message[3] + + message_type = message[SM_INDEX_MESSAGE_TYPE] if message_type == REPLY: if self.client_type == HTTP_SERVER: - message[5] = self.json_serializer.loads(message[5]) # type: ignore + message[SM_INDEX_DATA] = self.json_serializer.loads(message[SM_INDEX_DATA]) # type: ignore elif self.client_type == PROXY: - message[5] = self.rpc_serializer.loads(message[5]) # type: ignore + message[SM_INDEX_DATA] = self.rpc_serializer.loads(message[SM_INDEX_DATA]) # type: ignore return message elif message_type == HANDSHAKE: self.logger.debug("""handshake messages arriving out of order are silently dropped as receiving this message - means handshake was successful before. Received hanshake from {}""".format(message[0])) + means handshake was successful before. Received hanshake from {}""".format(message[0])) elif message_type == INVALID_MESSAGE: - raise Exception("Invalid message sent") - elif message_type == EVENT_SUBSCRIPTION: - return message[0], message[3], message[4] + if self.client_type == HTTP_SERVER: + message[SM_INDEX_DATA] = self.json_serializer.loads(message[SM_INDEX_DATA]) # type: ignore + elif self.client_type == PROXY: + message[SM_INDEX_DATA] = self.rpc_serializer.loads(message[SM_INDEX_DATA]) # type: ignore + self.raise_local_exception(message) + # if message[SM_INDEX_DATA].get('exception', None) is not None and raise_client_side_exception: + # self.raise_local_exception(message[SM_INDEX_DATA]['exception']) else: raise NotImplementedError("Unknown message type {} received. This message cannot be dealt.".format(message_type)) - def craft_instruction_from_arguments(self, instruction : str, arguments : Dict[str, Any], # type: ignore - context : Dict[str, Any]) -> List[bytes]: # type: ignore + + def craft_instruction_from_arguments(self, instruction : str, arguments : typing.Dict[str, typing.Any] = EMPTY_DICT, + timeout : typing.Optional[float] = None, context : typing.Dict[str, typing.Any] = EMPTY_DICT) -> typing.List[bytes]: """ - message from client to server : + message from client to server: - [address, bytes(), client type, message type, msg id, instruction, arguments] - [ 0 , 1 , 2 , 3 , 4 , 5 , 6 ] + :: + [address, bytes(), client type, message type, message id, instruction, arguments] + [ 0 , 1 , 2 , 3 , 4 , 5 , 6 ] - Handshake - [client address, bytes, client_type, HANDSHAKE] """ - message_id = bytes(str(hash(instruction + current_datetime_ms_str())), encoding='utf-8') + message_id = bytes(str(uuid4()), encoding='utf-8') if self.client_type == HTTP_SERVER: + timeout : bytes = self.json_serializer.dumps(timeout) instruction : bytes = self.json_serializer.dumps(instruction) + # if arguments == b'': + # arguments : bytes = self.json_serializer.dumps({}) + # elif not isinstance(arguments, bytes): + arguments : bytes = self.json_serializer.dumps(arguments) context : bytes = self.json_serializer.dumps(context) - if arguments == b'': - arguments : bytes = self.json_serializer.dumps({}) - elif not isinstance(arguments, bytes): - arguments : bytes = self.json_serializer.dumps(arguments) - else: + elif self.client_type == PROXY: + timeout : bytes = self.rpc_serializer.dumps(timeout) instruction : bytes = self.rpc_serializer.dumps(instruction) - context : bytes = self.rpc_serializer.dumps(context) arguments : bytes = self.rpc_serializer.dumps(arguments) + context : bytes = self.rpc_serializer.dumps(context) + return [ self.server_address, EMPTY_BYTE, self.client_type, INSTRUCTION, message_id, + timeout, instruction, arguments, context ] - def craft_message(self, message_type : bytes): + + def craft_empty_message_with_type(self, message_type : bytes = HANDSHAKE): + """ + create handshake message, ignores + """ return [ - self.server_address, + self.server_address, EMPTY_BYTE, self.client_type, message_type, EMPTY_BYTE, EMPTY_BYTE, EMPTY_BYTE, + EMPTY_BYTE, EMPTY_BYTE ] class SyncZMQClient(BaseZMQClient, BaseSyncZMQ): + """ + Synchronous ZMQ client that connect with sync or async server based on ZMQ protocol. Works like REQ-REP socket. + Each request is blocking until response is received. Suitable for most purposes. + + Parameters + ---------- + server_instance_name: str + The instance name of the server (or ``RemoteObject``) + identity: str + Unique identity of the client to receive messages from the server. Each client connecting to same server must + still have unique ID. + client_type: str + RPC or HTTP Server + handshake: bool + when true, handshake with the server first before allowing first message and block until that handshake was + accomplished. + protocol: str | Enum, TCP, IPC or INPROC, default IPC + protocol implemented by the server + **serializer: + rpc_serializer: + custom implementation of RPC serializer if necessary + json_serializer: + custom implementation of JSON serializer if necessary + """ def __init__(self, server_instance_name : str, identity : str, client_type = HTTP_SERVER, - handshake : bool = True, protocol : str = "IPC", context : Union[zmq.asyncio.Context, None] = None, + handshake : bool = True, protocol : str = "IPC", context : typing.Union[zmq.asyncio.Context, None] = None, **serializer) -> None: - BaseZMQClient.__init__(self, server_address = bytes(server_instance_name, encoding='utf-8'), - server_instance_name=server_instance_name, client_type=client_type, **serializer) + BaseZMQClient.__init__(self, server_instance_name=server_instance_name, + client_type=client_type, **serializer) BaseSyncZMQ.__init__(self) - self.create_socket(context or zmq.Context(), server_instance_name, identity) + self.create_socket(server_instance_name, context, identity=identity, protocol=protocol) self._terminate_context = context == None if handshake: self.handshake() - def send_instruction(self, instruction : str, arguments : Dict[str, Any] = EMPTY_DICT, context : Dict[str, Any] = EMPTY_DICT) -> bytes: - message = self.craft_instruction_from_arguments(instruction, arguments, context) + def send_instruction(self, instruction : str, arguments : typing.Dict[str, typing.Any] = EMPTY_DICT, + timeout : typing.Optional[float] = None, context : typing.Dict[str, typing.Any] = EMPTY_DICT) -> bytes: + """ + send message to server. + + client's message to server: + :: + [address, bytes(), client type, message type, messsage id, + [ 0 , 1 , 2 , 3 , 4 , + + timeout, instruction, arguments, execution context] + 5 , 6 , 7 , 8 ] + + Execution Context Definitions (typing.Dict[str, typing.Any] or JSON): + - "plain_reply" - does not return state + - "fetch_execution_logs" - fetches logs that were accumulated while execution + + Parameters + ---------- + instruction: str + unique str identifying a server side or ``RemoteObject`` resource. These values corresponding + to automatically extracted name from the object name or the URL_path prepended with the instance name. + arguments: Dict[str, Any] + if the instruction invokes a method, arguments of that method. + context: Dict[str, Any] + see execution context definitions + + Returns + ------- + message id : bytes + a byte representation of message id + """ + message = self.craft_instruction_from_arguments(instruction, arguments, timeout, context) self.socket.send_multipart(message) - self.logger.debug("sent instruction '{}' to server '{}' with msg-id {}".format(instruction, self.server_instance_name, - message[4])) - return message[4] + self.logger.debug("sent instruction '{}' to server '{}' with msg-id {}".format(instruction, + self.server_instance_name, message[SM_INDEX_MESSAGE_ID])) + return message[SM_INDEX_MESSAGE_ID] - def recv_reply(self, raise_client_side_exception : bool = False) -> Sequence[Union[bytes, Dict[str, Any]]]: + def recv_reply(self, raise_client_side_exception : bool = False) -> typing.List[typing.Union[ + bytes, typing.Dict[str, typing.Any]]]: + """ + Receives reply from server. Messages are identified by message id, so call this method immediately after + calling ``send_instruction()`` to avoid receiving messages out of order. Or, use other methods like + ``execute()``, ``read_attribute()`` or ``write_attribute()``. + + Parameters + ---------- + raise_client_side_exception: bool, default False + if True, any exceptions raised during execution inside ``RemoteObject`` instance will be raised on the client. + See docs of ``raise_local_exception()`` for info on exception + """ while True: - reply = self.parse_server_message(self.socket.recv_multipart()) # type: ignore + reply = self.parse_server_message(self.socket.recv_multipart(), raise_client_side_exception) # type: ignore if reply: - self.logger.debug("received reply with msg-id {}".format(reply[3])) - if reply[5].get('exception', None) is not None and raise_client_side_exception: - raise_local_exception(reply[5]['exception']) + self.logger.debug("received reply with msg-id {}".format(reply[SM_INDEX_MESSAGE_ID])) return reply - def execute(self, instruction : str, arguments : Dict[str, Any] = EMPTY_DICT, context : Dict[str, Any] = EMPTY_DICT, - raise_client_side_exception : bool = False) -> Any: + def execute(self, instruction : str, arguments : typing.Dict[str, typing.Any] = EMPTY_DICT, + context : typing.Dict[str, typing.Any] = EMPTY_DICT, raise_client_side_exception : bool = False + ) -> typing.List[typing.Union[bytes, typing.Dict[str, typing.Any]]]: + """ + send an instruction and receive the reply for it. + + Parameters + ---------- + instruction: str + unique str identifying a server side or ``RemoteObject`` resource. These values corresponding + to automatically extracted name from the object name or the URL_path prepended with the instance name. + arguments: Dict[str, Any] + if the instruction invokes a method, arguments of that method. + context: Dict[str, Any] + see execution context definitions + + Returns + ------- + message id : bytes + a byte representation of message id + """ self.send_instruction(instruction, arguments, context) return self.recv_reply(raise_client_side_exception) - def read_attribute(self, attribute_url : str, context : Dict[str, Any] = EMPTY_DICT, - raise_client_side_exception : bool = False) -> Any: - return self.execute(attribute_url+'/read', EMPTY_DICT, context, raise_client_side_exception) - - def write_attribute(self, attribute_url : str, value : Any, context : Dict[str, Any] = EMPTY_DICT, - raise_client_side_exception : bool = False) -> Any: - return self.execute(attribute_url+'/read', {"value" : value}, context, raise_client_side_exception) def handshake(self) -> None: + """ + hanshake with server + """ poller = zmq.Poller() poller.register(self.socket, zmq.POLLIN) while True: - self.socket.send_multipart(self.craft_message(HANDSHAKE)) + self.socket.send_multipart(self.create_empty_message_with_type(HANDSHAKE)) self.logger.debug("sent Handshake to server '{}'".format(self.server_instance_name)) if poller.poll(500): try: @@ -1017,7 +1338,7 @@ def handshake(self) -> None: if message[3] == HANDSHAKE: # type: ignore self.logger.info("client '{}' handshook with server '{}'".format(self.identity, self.server_instance_name)) - self.server_type = ServerTypes._value2member_map_[message[2]] # type: ignore + self.server_type = message[SM_INDEX_SERVER_TYPE] break else: raise ValueError('Handshake cannot be done. Another message arrived before handshake complete.') @@ -1042,7 +1363,6 @@ def exit(self) -> None: class AsyncZMQClient(BaseZMQClient, BaseAsyncZMQ): - """ Asynchronous client to talk to a ZMQ server where the server is identified by the instance name. The identity of the client needs to be different from the server, unlike the ZMQ Server. The client will also perform handshakes @@ -1050,78 +1370,136 @@ class AsyncZMQClient(BaseZMQClient, BaseAsyncZMQ): """ def __init__(self, server_instance_name : str, identity : str, client_type = HTTP_SERVER, - handshake : bool = True, protocol : str = "IPC", context : Union[zmq.asyncio.Context, None] = None, + handshake : bool = True, protocol : str = "IPC", context : typing.Union[zmq.asyncio.Context, None] = None, **serializer) -> None: - BaseZMQClient.__init__(self, server_address=bytes(server_instance_name, encoding='utf-8'), - server_instance_name=server_instance_name, client_type=client_type, **serializer) + BaseZMQClient.__init__(self, server_instance_name=server_instance_name, client_type=client_type, **serializer) BaseAsyncZMQ.__init__(self) self.create_socket(instance_name=server_instance_name, context=context, identity=identity, protocol=protocol) self._terminate_context = context == None - self.handshake_event = asyncio.Event() + self._handshake_event = asyncio.Event() if handshake: self.handshake() def handshake(self) -> None: - run_coro_somehow(self._handshake()) + """ + automatically called when handshake argument at init is True. When not automatically called, it is necessary + to call this method before awaiting ``handshake_complete()``. + """ + run_method_somehow(self._handshake()) async def _handshake(self) -> None: - self.handshake_event.clear() + """ + inner method that performs handshake with server + """ + self._handshake_event.clear() poller = zmq.asyncio.Poller() poller.register(self.socket, zmq.POLLIN) while True: - await self.socket.send_multipart(self.craft_message(HANDSHAKE)) - self.logger.debug("sent Handshake to server '{}'".format(self.server_instance_name)) + await self.socket.send_multipart(self.craft_empty_message_with_type(HANDSHAKE)) + self.logger.debug("sent handshake to server - '{}'".format(self.server_instance_name)) if await poller.poll(500): try: msg = await self.socket.recv_multipart(zmq.NOBLOCK) except zmq.Again: pass else: - if msg[3] == HANDSHAKE: - self.logger.info("client '{}' handshook with server '{}'".format(self.identity,self.server_instance_name)) - self.server_type = ServerTypes._value2member_map_[msg[2]] + if msg[SM_INDEX_MESSAGE_TYPE] == HANDSHAKE: + self.logger.info("client '{}' handshook with server '{}'".format(self.identity, self.server_instance_name)) + self.server_type = msg[SM_INDEX_SERVER_TYPE] break else: - self.logger.info("handshake cannot be done with server '{}'. another message arrived before handshake complete.".format(self.server_instance_name)) - self.handshake_event.set() + self.logger.info("handshake cannot be done with server '{}'. another message arrived before handshake complete.".format( + self.server_instance_name)) poller.unregister(self.socket) + self._handshake_event.set() del poller async def handshake_complete(self): - await self.handshake_event.wait() + """ + wait for handshake to complete + """ + await self._handshake_event.wait() - async def async_send_instruction(self, instruction : str, arguments : Dict[str, Any] = EMPTY_DICT, - context : Dict[str, Any] = EMPTY_DICT) -> bytes: - message = self.craft_instruction_from_arguments(instruction, arguments, context) + async def async_send_instruction(self, instruction : str, arguments : typing.Dict[str, typing.Any] = EMPTY_DICT, + timeout : typing.Optional[float] = None, context : typing.Dict[str, typing.Any] = EMPTY_DICT) -> bytes: + """ + send message to server. + + client's message to server: + :: + [address, bytes(), client type, message type, messsage id, + [ 0 , 1 , 2 , 3 , 4 , + + timeout, instruction, arguments, execution context] + 5 , 6 , 7 , 8 ] + + Execution Context Definitions (typing.Dict[str, typing.Any] or JSON): + - "plain_reply" - does not return state + - "fetch_execution_logs" - fetches logs that were accumulated while execution + + Parameters + ---------- + instruction: str + unique str identifying a server side or ``RemoteObject`` resource. These values corresponding + to automatically extracted name from the object name or the URL_path prepended with the instance name. + arguments: Dict[str, Any] + if the instruction invokes a method, arguments of that method. + context: Dict[str, Any] + see execution context definitions + + Returns + ------- + message id : bytes + a byte representation of message id + """ + message = self.craft_instruction_from_arguments(instruction, arguments, timeout, context) await self.socket.send_multipart(message) - self.logger.debug("sent instruction '{}' to server '{}' with msg-id {}".format(instruction, self.server_instance_name, - message[3])) - return message[4] + self.logger.debug("sent instruction '{}' to server '{}' with msg-id {}".format(instruction, + self.server_instance_name, message[SM_INDEX_MESSAGE_ID])) + return message[SM_INDEX_MESSAGE_ID] - async def async_recv_reply(self, raise_client_side_exception) -> Sequence[Union[bytes, Dict[str, Any]]]: + async def async_recv_reply(self, raise_client_side_exception : bool) -> typing.List[typing.Union[bytes, + typing.Dict[str, typing.Any]]]: + """ + Receives reply from server. Messages are identified by message id, so call this method immediately after + calling ``send_instruction()`` to avoid receiving messages out of order. Or, use other methods like + ``execute()``, ``read_attribute()`` or ``write_attribute()``. + + Parameters + ---------- + raise_client_side_exception: bool, default False + if True, any exceptions raised during execution inside ``RemoteObject`` instance will be raised on the client. + See docs of ``raise_local_exception()`` for info on exception + """ while True: - reply = self.parse_server_message(await self.socket.recv_multipart())# [2] # type: ignore + reply = self.parse_server_message(await self.socket.recv_multipart(), raise_client_side_exception)# [2] # type: ignore if reply: - self.logger.debug("received reply with message-id {}".format(reply[3])) - if reply[5].get('exception', None) is not None and raise_client_side_exception: - exc_info = reply[5]['exception'] - raise Exception("traceback : {},\nmessage : {},\ntype : {}".format('\n'.join(exc_info["traceback"]), - exc_info['message'], exc_info["type"])) + self.logger.debug("received reply with message-id {}".format(reply[SM_INDEX_MESSAGE_ID])) return reply - async def async_execute(self, instruction : str, arguments : Dict[str, Any] = EMPTY_DICT, context : Dict[str, Any] = EMPTY_DICT, - raise_client_side_exception = False): + async def async_execute(self, instruction : str, arguments : typing.Dict[str, typing.Any] = EMPTY_DICT, + context : typing.Dict[str, typing.Any] = EMPTY_DICT, raise_client_side_exception = False): + """ + send an instruction and receive the reply for it. + + Parameters + ---------- + instruction: str + unique str identifying a server side or ``RemoteObject`` resource. These values corresponding + to automatically extracted name from the object name or the URL_path prepended with the instance name. + arguments: Dict[str, Any] + if the instruction invokes a method, arguments of that method. + context: Dict[str, Any] + see execution context definitions + + Returns + ------- + message id : bytes + a byte representation of message id + """ await self.async_send_instruction(instruction, arguments, context) return await self.async_recv_reply(raise_client_side_exception) - async def read_attribute(self, attribute_url : str, context : Dict[str, Any] = EMPTY_DICT, - raise_client_side_exception : bool = False) -> Any: - return await self.async_execute(attribute_url+'/read', EMPTY_DICT, context, raise_client_side_exception) - - async def write_attribute(self, attribute_url : str, value : Any, context : Dict[str, Any] = EMPTY_DICT, - raise_client_side_exception : bool = False) -> Any: - return self.async_execute(attribute_url+'/write', {"value" : value}, context, raise_client_side_exception) - def exit(self) -> None: try: self.socket.close(0) @@ -1139,75 +1517,60 @@ def exit(self) -> None: -class AsyncZMQClientPool(BaseZMQClient): +class MessageMappedZMQClientPool(BaseZMQClient): + """ + Pool of clients where message ID can track the replies irrespective of order of arrival. + """ - def __init__(self, server_instance_names : List[str], identity : str, client_type = HTTP_SERVER, - protocol : str = 'IPC', **serializer) -> None: - self.identity = identity + def __init__(self, server_instance_names: typing.List[str], identity: str, poll_timeout = 25, + protocol : str = 'IPC', client_type = HTTP_SERVER, **serializer) -> None: + self.identity = identity + self.logger = self.get_logger(identity, 'pooled', logging.DEBUG) + # this class does not call create_socket method + super().__init__(server_instance_name=None, client_type=client_type, **serializer) self.context = zmq.asyncio.Context() - self.pool : Dict[str, AsyncZMQClient] = dict() + self.pool = dict() # type: typing.Dict[str, AsyncZMQClient] for instance_name in server_instance_names: - self.pool[instance_name] = AsyncZMQClient(server_instance_name = instance_name, - identity = identity, client_type = client_type, handshake = True, protocol = protocol, - context = self.context, **serializer) + self.pool[instance_name] = AsyncZMQClient(server_instance_name=instance_name, + identity=identity, client_type=client_type, handshake=True, protocol=protocol, + context=self.context, rpc_serializer=self.rpc_serializer, json_serializer=self.json_serializer) self.poller = zmq.asyncio.Poller() for client in self.pool.values(): self.poller.register(client.socket, zmq.POLLIN) - self.logger = self.get_logger(identity, 'pooled', logging.DEBUG) # Both the client pool as well as the individual client get their serializers and client_types # This is required to implement pool level sending and receiving messages like polling of pool of sockets - super().__init__(server_address = None, server_instance_name = None, client_type = client_type, **serializer) - - async def poll(self) -> None : - raise NotImplementedError("implement poll function for AsyncZMQClientPool subclass {}".format(self.__class__)) + self.event_pool = AsyncioEventPool(len(server_instance_names)) + self.events_map = dict() # type: typing.Dict[bytes, asyncio.Event] + self.message_map = dict() + self.cancelled_messages = [] + self.poll_timeout = poll_timeout + self.stop_poll = False + - def register_client(self, instance_name : str, protocol : str = 'IPC'): - if instance_name not in self.pool.keys(): - self.pool[instance_name] = AsyncZMQClient(server_instance_name = instance_name, + def create_new(self, server_instance_name : str, protocol : str = 'IPC') -> None: + """ + Create new server with specified protocol. other arguments are taken from pool specifications. + + Parameters + ---------- + instance_name: str + instance name of server + protocol: str + protocol implemented by ZMQ server + """ + if server_instance_name not in self.pool.keys(): + self.pool[server_instance_name] = AsyncZMQClient(server_instance_name=server_instance_name, identity = self.identity, client_type = self.client_type, handshake = True, protocol = protocol, context = self.context, rpc_serializer = self.rpc_serializer, json_serializer = self.json_serializer) else: - raise ValueError("client already present in pool") - - def __contains__(self, name : str) -> bool: - return name in self.pool + raise ValueError(f"client for instance name {server_instance_name} already present in pool") - def __getitem__(self, key) ->AsyncZMQClient: - return self.pool[key] - - def __iter__(self) -> Iterator[AsyncZMQClient]: - return iter(self.pool.values()) - - def exit(self) -> None: - for client in self.pool.values(): - self.poller.unregister(client.socket) - client.exit() - self.logger.info("all client socket unregistered from pool for '{}'".format(self.__class__)) - try: - self.context.term() - self.logger.info("context terminated for '{}'".format(self.__class__)) - except: - pass - - - -class MessageMappedZMQClientPool(AsyncZMQClientPool): - """ - Pool of clients where message ID can track the replies irrespective of order of arrival. - """ - - def __init__(self, instance_names: List[str], identity: str, poll_timeout = 25, protocol : str = 'IPC', - client_type = HTTP_SERVER, **serializer) -> None: - super().__init__(instance_names, identity, client_type = client_type, protocol = protocol, **serializer) - self.event_pool = EventPool(len(instance_names)) - self.message_to_event_map : Dict[bytes, asyncio.Event] = dict() - self.shared_message_map = dict() - self.poll_timeout = poll_timeout - self.stop_poll = False - self.cancelled_messages = [] @property def poll_timeout(self) -> int: + """ + socket polling timeout in milliseconds greater than 0. + """ return self._poll_timeout @poll_timeout.setter @@ -1216,7 +1579,12 @@ def poll_timeout(self, value) -> None: raise ValueError("polling period must be an integer greater than 0, not {}. Value is considered in milliseconds".format(value)) self._poll_timeout = value + async def poll(self) -> None: + """ + Poll for replies from server. Since the client is message mapped, this method should be independently started + in the event loop. Sending message and retrieving a message mapped is still carried out by other methods. + """ self.logger.info("client polling started for sockets for {}".format(list(self.pool.keys()))) self.stop_poll = False event_loop = asyncio.get_event_loop() @@ -1227,32 +1595,43 @@ async def poll(self) -> None: try: reply = self.parse_server_message(await socket.recv_multipart(zmq.NOBLOCK)) except zmq.Again: + # errors in handle_message should reach the client. break - """ - errors in handle_message should reach the client. - """ else: if reply: - address, _, server_type, message_type, message_id, response = reply + address, _, server_type, message_type, message_id, data = reply self.logger.debug("received reply from server '{}' with message ID {}".format(address, message_id)) if message_id in self.cancelled_messages: self.cancelled_messages.remove(message_id) self.logger.debug(f'message_id {message_id} cancelled') continue try: - event = self.message_to_event_map[message_id] # type: ignore + event = self.events_map[message_id] except KeyError: - event_loop.call_soon(lambda: asyncio.create_task(self.resolve_reply(message_id, response))) + invalid_event_task = asyncio.create_task(self._resolve_reply(message_id, data)) + event_loop.call_soon(lambda: invalid_event_task) else: - self.shared_message_map[message_id] = response + self.message_map[message_id] = data event.set() - async def resolve_reply(self, message_id, return_value): + + async def _resolve_reply(self, message_id : bytes, data : typing.Any) -> None: + """ + This method is called when there is an asyncio Event not available for a message ID. This can happen only + when the server replied before the client created a asyncio.Event object. check ``async_execute()`` for details. + + Parameters + ---------- + message_id: bytes + the message for which the event was not created + data: bytes + the data given by the server which needs to mapped to the message + """ max_number_of_retries = 100 for i in range(max_number_of_retries): - await asyncio.sleep(0.1) + await asyncio.sleep(0.025) try: - event = self.message_to_event_map[message_id] # type: ignore + event = self.events_map[message_id] except KeyError: if message_id in self.cancelled_messages: # Only for safety, likely should never reach here @@ -1261,105 +1640,219 @@ async def resolve_reply(self, message_id, return_value): return if i >= max_number_of_retries - 1: self.logger.error("unknown message id {} without corresponding event object".format(message_id)) - print(return_value) + # print(return_value) return else: - self.shared_message_map[message_id] = return_value + self.message_map[message_id] = data event.set() break - async def async_send_instruction(self, instance_name : str, instruction : str, arguments : Dict[str, Any] = EMPTY_DICT, - context : Dict[str, Any] = EMPTY_DICT) -> bytes: - message_id = await self.pool[instance_name].async_send_instruction(instruction, arguments, context) + + async def async_send_instruction(self, instance_name : str, instruction : str, + arguments : typing.Dict[str, typing.Any] = EMPTY_DICT, timeout : typing.Optional[float] = 3, + context : typing.Dict[str, typing.Any] = EMPTY_DICT) -> bytes: + """ + Send instruction to server with instance name. Replies are automatically polled & to be retrieved using + ``async_recv_reply()`` + + Parameters + ---------- + instance_name: str + instance name of the server + instruction: str + unique str identifying a server side or ``RemoteObject`` resource. These values corresponding + to automatically extracted name from the object name or the URL_path prepended with the instance name. + arguments: Dict[str, Any] + if the instruction invokes a method, arguments of that method. + context: Dict[str, Any] + see execution context definitions + + Returns + ------- + message_id: bytes + created message ID + """ + message_id = await self.pool[instance_name].async_send_instruction(instruction, arguments, timeout, context) event = self.event_pool.pop() - self.message_to_event_map[message_id] = event + self.events_map[message_id] = event return message_id async def async_recv_reply(self, message_id : bytes, plain_reply : bool = False, raise_client_side_exception = False, - timeout : typing.Optional[int] = 3) -> Dict[str, Any]: - event = self.message_to_event_map[message_id] + timeout : typing.Optional[float] = None) -> typing.Dict[str, typing.Any]: + """ + Receive reply for specified message ID. + + Parameters + ---------- + message_id: bytes + the message id for which reply needs to eb fetched + plain_reply: bool, default False + strip reply of any other contents like state machine state + raise_client_side_exceptions: bool, default False + raise exceptions from server on client side + timeout: float, + client side timeout, not the same as timeout passed to server, recommended to be None in general cases. + Server side timeouts ensure start of execution of instructions within specified timeouts and + drops execution altogether if timeout occured. Client side timeouts only wait for message to come within + the timeout, but do not gaurantee non-execution. + + Returns + ------- + reply: dict, Any + dictionary when plain reply is False, any value returned from execution on the server side if plain reply is + True. + + Raises + ------ + ValueError: + if supplied message id is not valid + TimeoutError: + if timeout is not None and reply did not arrive + """ + try: + event = self.events_map[message_id] + except KeyError: + raise ValueError(f"message id {message_id} unknown.") from None try: await asyncio.wait_for(event.wait(), timeout if (timeout and timeout > 0) else None) except TimeoutError: self.cancelled_messages.append(message_id) self.logger.debug(f'message_id {message_id} added to list of cancelled messages') + raise TimeoutError(f"Execution not completed within {timeout} seconds") from None else: - self.message_to_event_map.pop(message_id) - reply = self.shared_message_map.pop(message_id) + self.events_map.pop(message_id) + reply = self.message_map.pop(message_id) self.event_pool.completed(event) if not plain_reply and reply.get('exception', None) is not None and raise_client_side_exception: exc_info = reply['exception'] raise Exception("traceback : {},\nmessage : {},\ntype : {}".format ( '\n'.join(exc_info["traceback"]), exc_info['message'], exc_info["type"])) return reply - raise TimeoutError(f"Execution not completed within {timeout} seconds") - async def async_execute(self, instance_name : str, instruction : str, arguments : Dict[str, Any] = EMPTY_DICT, - context : Dict[str, Any] = EMPTY_DICT, raise_client_side_exception = False, - timeout : typing.Optional[int] = 3) -> Dict[str, Any]: - message_id = await self.async_send_instruction(instance_name, instruction, arguments, context) - return await self.async_recv_reply(message_id, context.get('plain_reply', False), raise_client_side_exception, - timeout) + async def async_execute(self, instance_name : str, instruction : str, arguments : typing.Dict[str, typing.Any] = EMPTY_DICT, + context : typing.Dict[str, typing.Any] = EMPTY_DICT, raise_client_side_exception = False, + server_timeout : typing.Optional[float] = 3, client_timeout : typing.Optional[float] = None) -> typing.Dict[str, typing.Any]: + """ + sends message and receives reply. + + Parameters + ---------- + instance_name: str + instance name of the server + instruction: str + unique str identifying a server side or ``RemoteObject`` resource. These values corresponding + to automatically extracted name from the object name or the URL_path prepended with the instance name. + arguments: Dict[str, Any] + if the instruction invokes a method, arguments of that method. + context: Dict[str, Any] + see execution context definitions + raise_client_side_exceptions: bool, default False + raise exceptions from server on client side + server_timeout: float, default 3 + server side timeout + client_timeout: float, default None + client side timeout, not the same as timeout passed to server, recommended to be None in general cases. + Server side timeouts ensure start of execution of instructions within specified timeouts and + drops execution altogether if timeout occured. Client side timeouts only wait for message to come within + the timeout, but do not gaurantee non-execution. + + """ + message_id = await self.async_send_instruction(instance_name, instruction, arguments, server_timeout, context) + return await self.async_recv_reply(message_id, False, raise_client_side_exception, client_timeout) def start_polling(self) -> None: + """ + register the server message polling loop in the asyncio event loop. + """ event_loop = asyncio.get_event_loop() event_loop.call_soon(lambda: asyncio.create_task(self.poll())) async def stop_polling(self): + """ + stop polling for replies from server + """ self.stop_poll = True - async def ping_all_servers(self): - replies : List[Dict[str, Any]] = await asyncio.gather(*[ - self.async_execute(instance_name, '/ping') for instance_name in self.pool.keys()]) - sorted_reply = dict() - for reply, instance_name in zip(replies, self.pool.keys()): - sorted_reply[instance_name] = reply.get("returnValue", False if reply.get("exception", None) is None else True) # type: ignore - return sorted_reply + # async def ping_all_servers(self): + # """ + # ping all servers connected to the client pool + # """ + # replies = await asyncio.gather(*[self.async_execute( + # instance_name, '/ping') for instance_name in self.pool.keys()]) # type: typing.List[typing.Dict[str, typing.Any]] + # sorted_reply = dict() + # for reply, instance_name in zip(replies, self.pool.keys()): + # sorted_reply[instance_name] = reply.get("returnValue", False if reply.get("exception", None) is None else True) # type: ignore + # return sorted_reply + + # def organised_gathered_replies(self, instance_names : List[str], gathered_replies : List[Any], context : Dict[str, Any] = EMPTY_DICT): + # """ + # First thing tomorrow + # """ + # plain_reply = context.pop('plain_reply', False) + # if not plain_reply: + # replies = dict( + # returnValue = dict(), + # state = dict() + # ) + # for instance_name, reply in zip(instance_names, gathered_replies): + # replies["state"].update(reply["state"]) + # replies["returnValue"][instance_name] = reply.get("returnValue", reply.get("exception", None)) + # else: + # replies = {} + # for instance_name, reply in zip(instance_names, gathered_replies): + # replies[instance_name] = reply + # return replies + + # async def async_execute_in_all(self, instruction : str, arguments : Dict[str, Any] = EMPTY_DICT, + # context : Dict[str, Any] = EMPTY_DICT, raise_client_side_exception = False) -> Dict[str, Any]: + # instance_names = self.pool.keys() + # gathered_replies = await asyncio.gather(*[ + # self.async_execute(instance_name, instruction, arguments, context, raise_client_side_exception) for instance_name in instance_names]) + # return self.organised_gathered_replies(instance_names, gathered_replies, context) + + # async def async_execute_in_all_remote_objects(self, instruction : str, arguments : Dict[str, Any] = EMPTY_DICT, + # context : Dict[str, Any] = EMPTY_DICT, raise_client_side_exception = False) -> Dict[str, Any]: + # instance_names = [instance_name for instance_name, client in self.pool.items() if client.server_type == ServerTypes.USER_REMOTE_OBJECT] + # gathered_replies = await asyncio.gather(*[ + # self.async_execute(instance_name, instruction, arguments, context, raise_client_side_exception) for instance_name in instance_names]) + # return self.organised_gathered_replies(instance_names, gathered_replies, context) + + # async def async_execute_in_all_eventloops(self, instruction : str, arguments : Dict[str, Any] = EMPTY_DICT, + # context : Dict[str, Any] = EMPTY_DICT, raise_client_side_exception = False) -> Dict[str, Any]: + # instance_names = [instance_name for instance_name, client in self.pool.items() if client.server_type == ServerTypes.EVENTLOOP] + # gathered_replies = await asyncio.gather(*[ + # self.async_execute(instance_name, instruction, arguments, context, raise_client_side_exception) for instance_name in instance_names]) + # return self.organised_gathered_replies(instance_names, gathered_replies, context) + + def __contains__(self, name : str) -> bool: + return name in self.pool - def organised_gathered_replies(self, instance_names : List[str], gathered_replies : List[Any], context : Dict[str, Any] = EMPTY_DICT): - """ - First thing tomorrow - """ - plain_reply = context.pop('plain_reply', False) - if not plain_reply: - replies = dict( - returnValue = dict(), - state = dict() - ) - for instance_name, reply in zip(instance_names, gathered_replies): - replies["state"].update(reply["state"]) - replies["returnValue"][instance_name] = reply.get("returnValue", reply.get("exception", None)) - else: - replies = {} - for instance_name, reply in zip(instance_names, gathered_replies): - replies[instance_name] = reply - return replies - - async def async_execute_in_all(self, instruction : str, arguments : Dict[str, Any] = EMPTY_DICT, - context : Dict[str, Any] = EMPTY_DICT, raise_client_side_exception = False) -> Dict[str, Any]: - instance_names = self.pool.keys() - gathered_replies = await asyncio.gather(*[ - self.async_execute(instance_name, instruction, arguments, context, raise_client_side_exception) for instance_name in instance_names]) - return self.organised_gathered_replies(instance_names, gathered_replies, context) + def __getitem__(self, key) ->AsyncZMQClient: + return self.pool[key] - async def async_execute_in_all_remote_objects(self, instruction : str, arguments : Dict[str, Any] = EMPTY_DICT, - context : Dict[str, Any] = EMPTY_DICT, raise_client_side_exception = False) -> Dict[str, Any]: - instance_names = [instance_name for instance_name, client in self.pool.items() if client.server_type == ServerTypes.USER_REMOTE_OBJECT] - gathered_replies = await asyncio.gather(*[ - self.async_execute(instance_name, instruction, arguments, context, raise_client_side_exception) for instance_name in instance_names]) - return self.organised_gathered_replies(instance_names, gathered_replies, context) + def __iter__(self) -> typing.Iterator[AsyncZMQClient]: + return iter(self.pool.values()) - async def async_execute_in_all_eventloops(self, instruction : str, arguments : Dict[str, Any] = EMPTY_DICT, - context : Dict[str, Any] = EMPTY_DICT, raise_client_side_exception = False) -> Dict[str, Any]: - instance_names = [instance_name for instance_name, client in self.pool.items() if client.server_type == ServerTypes.EVENTLOOP] - gathered_replies = await asyncio.gather(*[ - self.async_execute(instance_name, instruction, arguments, context, raise_client_side_exception) for instance_name in instance_names]) - return self.organised_gathered_replies(instance_names, gathered_replies, context) + def exit(self) -> None: + for client in self.pool.values(): + self.poller.unregister(client.socket) + client.exit() + self.logger.info("all client socket unregistered from pool for '{}'".format(self.__class__)) + try: + self.context.term() + self.logger.info("context terminated for '{}'".format(self.__class__)) + except: + pass -class EventPool: +class AsyncioEventPool: """ creates a pool of asyncio Events to be used as a synchronisation object for MessageMappedClientPool + + Parameters + ---------- + initial_number_of_events: int + initial pool size of events """ def __init__(self, initial_number_of_events : int) -> None: @@ -1367,6 +1860,9 @@ def __init__(self, initial_number_of_events : int) -> None: self.size = initial_number_of_events def pop(self) -> asyncio.Event: + """ + pop an event, new one is created if nothing left in pool + """ try: event = self.pool.pop(0) except IndexError: @@ -1376,6 +1872,9 @@ def pop(self) -> asyncio.Event: return event def completed(self, event : asyncio.Event) -> None: + """ + put an event back into the pool + """ self.pool.append(event) @@ -1426,8 +1925,8 @@ def push(self, data : typing.Any = None): class EventPublisher(BaseZMQServer): - def __init__(self, identity : str, context : Union[zmq.Context, None] = None, **serializer) -> None: - super().__init__(server_type = ServerTypes.UNKNOWN_TYPE, **serializer) + def __init__(self, identity : str, context : typing.Union[zmq.Context, None] = None, **serializer) -> None: + super().__init__(server_type = ServerTypes.UNKNOWN_TYPE.value, **serializer) self.context = context or zmq.Context() self.identity = identity self.socket = self.context.socket(zmq.PUB) @@ -1445,8 +1944,8 @@ def __init__(self, identity : str, context : Union[zmq.Context, None] = None, * self.logger = self.get_logger(identity, self.socket_address, logging.DEBUG, self.__class__.__name__) self.logger.info("created event publishing socket at {}".format(self.socket_address)) break - self.events : Set[Event] = set() - self.event_ids : Set[bytes] = set() + self.events = set() # type: typing.Set[Event] + self.event_ids = set() # type: typing.Set[bytes] def register_event(self, event : Event) -> None: # unique_str_bytes = bytes(unique_str, encoding = 'utf-8') @@ -1459,7 +1958,7 @@ def register_event(self, event : Event) -> None: self.events.add(event) self.logger.info("registered event '{}' serving at PUB socket with address : {}".format(event.name, self.socket_address)) - def publish_event(self, unique_str : bytes, data : Any, serialize : bool = True) -> None: + def publish_event(self, unique_str : bytes, data : typing.Any, serialize : bool = True) -> None: if unique_str in self.event_ids: self.socket.send_multipart([unique_str, self.json_serializer.dumps(data) if serialize else data]) else: @@ -1467,7 +1966,7 @@ def publish_event(self, unique_str : bytes, data : Any, serialize : bool = True) def exit(self): try: - self.socket.close() + self.socket.close(0) self.logger.info("terminated event publishing socket with address '{}'".format(self.socket_address)) except Exception as E: self.logger.warn("could not properly terminate context or attempted to terminate an already terminated context at address '{}'. Exception message : {}".format( @@ -1503,7 +2002,7 @@ async def receive_event(self, deserialize = False): def exit(self): try: - self.socket.close() + self.socket.close(0) self.logger.info("terminated event consuming socket with address '{}'".format(self.socket_address)) except Exception as E: self.logger.warn("could not properly terminate context or attempted to terminate an already terminated context at address '{}'. Exception message : {}".format( @@ -1518,5 +2017,4 @@ def exit(self): __all__ = ['ServerTypes', 'AsyncZMQServer', 'AsyncPollingZMQServer', 'ZMQServerPool', 'RPCServer', - 'SyncZMQClient', 'AsyncZMQClient', 'AsyncZMQClientPool', 'MessageMappedZMQClientPool', - 'Event', 'CriticalEvent'] \ No newline at end of file + 'SyncZMQClient', 'AsyncZMQClient', 'MessageMappedZMQClientPool', 'Event', 'CriticalEvent'] \ No newline at end of file From c3a82f6fdae5fe63907555785244982aaba2e843 Mon Sep 17 00:00:00 2001 From: "Vignesh.Vaidyanathan" <62492557+VigneshVSV@users.noreply.github.com> Date: Sat, 17 Feb 2024 19:12:27 +0100 Subject: [PATCH 027/167] ZMQ docs for all server and client classes --- .../server/zmq_message_brokers/base_zmq.rst | 8 ++++++++ .../autodoc/server/zmq_message_brokers/index.rst | 3 ++- .../server/zmq_message_brokers/zmq_client.rst | 16 ++++++++++++++++ .../{base_zmq_server.rst => zmq_server.rst} | 7 +++++-- 4 files changed, 31 insertions(+), 3 deletions(-) create mode 100644 doc/source/autodoc/server/zmq_message_brokers/zmq_client.rst rename doc/source/autodoc/server/zmq_message_brokers/{base_zmq_server.rst => zmq_server.rst} (53%) diff --git a/doc/source/autodoc/server/zmq_message_brokers/base_zmq.rst b/doc/source/autodoc/server/zmq_message_brokers/base_zmq.rst index 70121e4..a930ab7 100644 --- a/doc/source/autodoc/server/zmq_message_brokers/base_zmq.rst +++ b/doc/source/autodoc/server/zmq_message_brokers/base_zmq.rst @@ -15,5 +15,13 @@ BaseZMQ :show-inheritance: .. autoclass:: hololinked.server.zmq_message_brokers.BaseAsyncZMQ + :members: + :show-inheritance: + +.. autoclass:: hololinked.server.zmq_message_brokers.BaseAsyncZMQServer + :members: + :show-inheritance: + +.. autoclass:: hololinked.server.zmq_message_brokers.BaseZMQClient :members: :show-inheritance: \ No newline at end of file diff --git a/doc/source/autodoc/server/zmq_message_brokers/index.rst b/doc/source/autodoc/server/zmq_message_brokers/index.rst index 6539580..abbb89a 100644 --- a/doc/source/autodoc/server/zmq_message_brokers/index.rst +++ b/doc/source/autodoc/server/zmq_message_brokers/index.rst @@ -24,8 +24,9 @@ See documentation of ``RPCServer`` for details. :maxdepth: 1 base_zmq - base_zmq_server + zmq_server rpc_server + zmq_client diff --git a/doc/source/autodoc/server/zmq_message_brokers/zmq_client.rst b/doc/source/autodoc/server/zmq_message_brokers/zmq_client.rst new file mode 100644 index 0000000..0cf83b2 --- /dev/null +++ b/doc/source/autodoc/server/zmq_message_brokers/zmq_client.rst @@ -0,0 +1,16 @@ +.. |br| raw:: html + +
+ + +ZMQ Clients +=========== + +.. autoclass:: hololinked.server.zmq_message_brokers.SyncZMQClient + :members: + :show-inheritance: + +.. autoclass:: hololinked.server.zmq_message_brokers.MessageMappedZMQClientPool + :members: + :show-inheritance: + diff --git a/doc/source/autodoc/server/zmq_message_brokers/base_zmq_server.rst b/doc/source/autodoc/server/zmq_message_brokers/zmq_server.rst similarity index 53% rename from doc/source/autodoc/server/zmq_message_brokers/base_zmq_server.rst rename to doc/source/autodoc/server/zmq_message_brokers/zmq_server.rst index 43527a4..3390224 100644 --- a/doc/source/autodoc/server/zmq_message_brokers/base_zmq_server.rst +++ b/doc/source/autodoc/server/zmq_message_brokers/zmq_server.rst @@ -6,11 +6,14 @@ ZMQ Servers =========== +.. autoclass:: hololinked.server.zmq_message_brokers.AsyncZMQServer + :members: + :show-inheritance: -.. autoclass:: hololinked.server.zmq_message_brokers.BaseZMQServer +.. autoclass:: hololinked.server.zmq_message_brokers.AsyncPollingZMQServer :members: :show-inheritance: -.. autoclass:: hololinked.server.zmq_message_brokers.AsyncZMQServer +.. autoclass:: hololinked.server.zmq_message_brokers.ZMQServerPool :members: :show-inheritance: From 597fa4ef84b4576da9951cc016c4d07f2e52dc06 Mon Sep 17 00:00:00 2001 From: "Vignesh.Vaidyanathan" <62492557+VigneshVSV@users.noreply.github.com> Date: Sat, 17 Feb 2024 23:15:41 +0100 Subject: [PATCH 028/167] integrated RPC server to event loop, handshake to inner socket from external message listener works --- hololinked/server/data_classes.py | 13 +- hololinked/server/eventloop.py | 149 ++++++----- hololinked/server/remote_object.py | 308 ++++++++++++----------- hololinked/server/zmq_message_brokers.py | 40 +-- 4 files changed, 275 insertions(+), 235 deletions(-) diff --git a/hololinked/server/data_classes.py b/hololinked/server/data_classes.py index 7f0f912..cbfe045 100644 --- a/hololinked/server/data_classes.py +++ b/hololinked/server/data_classes.py @@ -66,7 +66,7 @@ def __init__(self, **kwargs) -> None: for key, value in kwargs.items(): setattr(self, key, value) - def to_dataclass(self, obj : typing.Optional[typing.Any] = None) -> "RemoteResource": + def to_dataclass(self, obj : typing.Any = None, bound_obj : typing.Any = None) -> "RemoteResource": """ For a plain, faster and uncomplicated access, a dataclass in created & used by the event loop. @@ -80,11 +80,10 @@ def to_dataclass(self, obj : typing.Optional[typing.Any] = None) -> "RemoteResou RemoteResource dataclass equivalent of this object """ - return RemoteResource(URL_path=self.URL_path, http_method=self.http_method, + return RemoteResource( state=tuple(self.state) if self.state is not None else None, obj_name=self.obj_name, iscallable=self.iscallable, iscoroutine=self.iscoroutine, - isparameter=self.isparameter, http_request_as_argument=self.http_request_as_argument, - obj=obj) + isparameter=self.isparameter, obj=obj, bound_obj=bound_obj) # http method is manually always stored as a tuple @@ -108,8 +107,8 @@ class RemoteResource: iscallable : bool iscoroutine : bool isparameter : bool - request_as_argument : bool obj : typing.Any + bound_obj : typing.Any def json(self): """ @@ -156,6 +155,7 @@ class HTTPResource: what : str instance_name : str instruction : str + fullpath : str request_as_argument : bool = field(default=False) path_format : typing.Optional[str] = field(default=None) path_regex : typing.Optional[typing.Pattern] = field(default=None) @@ -234,11 +234,10 @@ class RPCResource: qualname : str doc : typing.Optional[str] - def __init__(self, *, what : str, instance_name : str, fullpath : str, instruction : str, name : str, + def __init__(self, *, what : str, instance_name : str, instruction : str, name : str, qualname : str, doc : str) -> None: self.what = what self.instance_name = instance_name - self.fullpath = fullpath self.instruction = instruction self.name = name self.qualname = qualname diff --git a/hololinked/server/eventloop.py b/hololinked/server/eventloop.py index 7809aaa..671ec4d 100644 --- a/hololinked/server/eventloop.py +++ b/hololinked/server/eventloop.py @@ -1,3 +1,4 @@ +import os import subprocess import asyncio import traceback @@ -44,7 +45,7 @@ class EventLoop(RemoteObject): server_type = ServerTypes.EVENTLOOP remote_objects = TypedList(item_type=(RemoteObject, Consumer), bounds=(0,100), allow_None=True, default=None, - doc="""list of RemoteObjects which are being executed""") + doc="""list of RemoteObjects which are being executed""") #type: typing.List[RemoteObject] # Remote Parameters uninstantiated_remote_objects = TypedDict(default=None, allow_None=True, key_type=str, @@ -59,20 +60,20 @@ def __init__(self, *, instance_name : str, remote_objects : typing.Union[RemoteObject, Consumer, typing.List[typing.Union[RemoteObject, Consumer]]] = list(), # type: ignore - requires covariant types log_level : int = logging.INFO, **kwargs) -> None: super().__init__(instance_name=instance_name, remote_objects=remote_objects, log_level=log_level, **kwargs) - self._message_broker_pool : ZMQServerPool = ZMQServerPool(instance_names=None, - # create empty pool as message brokers are already created - proxy_serializer=self.proxy_serializer, json_serializer=self.json_serializer) + # self._message_broker_pool : ZMQServerPool = ZMQServerPool(instance_names=None, + # # create empty pool as message brokers are already created + # proxy_serializer=self.proxy_serializer, json_serializer=self.json_serializer) remote_objects : typing.List[RemoteObject] = [self] if self.remote_objects is not None: for consumer in self.remote_objects: if isinstance(consumer, RemoteObject): remote_objects.append(consumer) consumer.object_info.eventloop_name = self.instance_name - self._message_broker_pool.register_server(consumer.message_broker) + # self._message_broker_pool.register_server(consumer.message_broker) elif isinstance(consumer, Consumer): instance = consumer.consumer(*consumer.args, **consumer.kwargs, eventloop_name = self.instance_name) - self._message_broker_pool.register_server(instance.message_broker) + # self._message_broker_pool.register_server(instance.message_broker) remote_objects.append(instance) self.remote_objects = remote_objects # re-assign the instantiated objects as well self.uninstantiated_remote_objects = {} @@ -84,28 +85,41 @@ def __post_init__(self): @property def message_broker_pool(self): + # raise NotImplementedError("message broker pool currently not created and unavailable for access.") return self._message_broker_pool + # example of overloading + @post('/exit') + def exit(self): + raise BreakAllLoops + + @get('/remote-objects') def servers(self): return { instance.__class__.__name__ : instance.instance_name for instance in self.remote_objects } - # example of overloading - @post('/exit') - def exit(self): - raise BreakAllLoops - + + @post('/remote-objects') + def import_remote_object(self, file_name : str, object_name : str): + consumer = self._import_remote_object_module(file_name, object_name) + id = unique_id() + self.uninstantiated_remote_objects[id] = consumer + return dict( + id = id, + db_params = consumer.parameters.remote_objects_webgui_info(consumer.parameters.load_at_init_objects()) + ) + @classmethod - def import_remote_object(cls, file_name : str, object_name : str): - module_name = file_name.split('\\')[-1] - spec = importlib.util.spec_from_file_location(module_name, file_name) # type: ignore + def _import_remote_object_module(cls, file_name : str, object_name : str): + module_name = file_name.split(os.sep)[-1] + spec = importlib.util.spec_from_file_location(module_name, file_name) if spec is not None: - module = importlib.util.module_from_spec(spec) # type: ignore + module = importlib.util.module_from_spec(spec) spec.loader.exec_module(module) else: - module = importlib.import_module(module_name, file_name.split('\\')[0]) + module = importlib.import_module(module_name, file_name.split(os.sep)[0]) consumer = getattr(module, object_name) if issubclass(consumer, RemoteObject): @@ -114,21 +128,12 @@ def import_remote_object(cls, file_name : str, object_name : str): raise ValueError(wrap_text(f"""object name {object_name} in {file_name} not a subclass of RemoteObject. Only subclasses are accepted (not even instances). Given object : {consumer}""")) - @post('/remote-object/import') - def _import_remote_object(self, file_name : str, object_name : str): - consumer = self.import_remote_object(file_name, object_name) - id = unique_id() - self.uninstantiated_remote_objects[id] = consumer - return dict( - id = id, - db_params = consumer.parameters.remote_objects_webgui_info(consumer.parameters.load_at_init_objects()) - ) - - @post('/remote-object/instantiate') + @post('/remote-objects/instantiate') def instantiate(self, file_name : str, object_name : str, kwargs : typing.Dict = {}): - consumer = self.import_remote_object(file_name, object_name) - instance = consumer(**kwargs, eventloop_name = self.instance_name) - self.register_new_consumer(instance) + # consumer = self.import_remote_object(file_name, object_name) + # instance = consumer(**kwargs, eventloop_name=self.instance_name) + # self.register_new_consumer(instance) + raise NotImplementedError("Instantiation is not yet possible") def register_new_consumer(self, instance : RemoteObject): zmq_server = AsyncPollingZMQServer(instance_name=instance.instance_name, server_type=ServerTypes.USER_REMOTE_OBJECT, @@ -141,20 +146,37 @@ def register_new_consumer(self, instance : RemoteObject): async_loop.call_soon(lambda : asyncio.create_task(self.run_single_target(instance))) def run(self): - self._message_listener = threading.Thread(target=self._run_external_message_listener) - self._message_listener.start() self._remote_object_executor = threading.Thread(target=self._run_remote_object_executor) self._remote_object_executor.start() + self._run_external_message_listener() + self._remote_object_executor.join() def _run_external_message_listener(self): - async_loop = asyncio.get_event_loop() - async_loop.run_until_complete( - asyncio.gather()) - self.logger.info("exiting event loop {}".format(self.instance_name)) + """ + Runs ZMQ's sockets which are visible to clients + """ + if threading.current_thread() != threading.main_thread(): + async_loop = asyncio.new_event_loop() + asyncio.set_event_loop(async_loop) + else: + async_loop = asyncio.get_event_loop() + rpc_servers = [remote_object._rpc_server for remote_object in self.remote_objects] + methods = [] #type: typing.List[asyncio.Future] + for rpc_server in rpc_servers: + methods.append(rpc_server.poll()) + methods.append(rpc_server.tunnel_message_to_remote_objects()) + self.logger.info("starting external message listener thread") + async_loop.run_until_complete(asyncio.gather(*methods)) + self.logger.info("exiting external listener event loop {}".format(self.instance_name)) async_loop.close() def _run_remote_object_executor(self): - async_loop = asyncio.get_event_loop() + if threading.current_thread() != threading.main_thread(): + async_loop = asyncio.new_event_loop() + asyncio.set_event_loop(async_loop) + else: + async_loop = asyncio.get_event_loop() + self.logger.info("starting remote object executor thread") async_loop.run_until_complete( asyncio.gather( *[self.run_single_target(instance) @@ -169,7 +191,7 @@ async def run_single_target(cls, instance : RemoteObject) -> None: while True: instructions = await instance.message_broker.async_recv_instructions() for instruction in instructions: - client, _, client_type, _, msg_id, instruction_str, arguments, context = instruction + client, _, client_type, _, msg_id, _, instruction_str, arguments, context = instruction plain_reply = context.pop("plain_reply", False) fetch_execution_logs = context.pop("fetch_execution_logs", False) if not plain_reply and fetch_execution_logs: @@ -183,7 +205,6 @@ async def run_single_target(cls, instance : RemoteObject) -> None: return_value = await cls.execute_once(instance_name, instance, instruction_str, arguments) #type: ignore if not plain_reply: return_value = { - "responseStatusCode" : 200, "returnValue" : return_value, "state" : { instance_name : instance.state() @@ -199,7 +220,6 @@ async def run_single_target(cls, instance : RemoteObject) -> None: return_value = None if not plain_reply: return_value = { - "responseStatusCode" : 200, "returnValue" : None, "state" : { instance_name : instance.state() @@ -209,19 +229,17 @@ async def run_single_target(cls, instance : RemoteObject) -> None: return_value["logs"] = list_handler.log_list await instance.message_broker.async_send_reply(instruction, return_value) - return - except Exception as E: + except Exception as ex: instance.logger.error("RemoteObject {} with instance name {} produced error : {}.".format( - instance.__class__.__name__, instance_name, E)) + instance.__class__.__name__, instance_name, ex)) return_value = { - "message" : str(E), - "type" : repr(E).split('(', 1)[0], + "message" : str(ex), + "type" : repr(ex).split('(', 1)[0], "traceback" : traceback.format_exc().splitlines(), - "notes" : E.__notes__ if hasattr(E, "__notes__") else None + "notes" : ex.__notes__ if hasattr(ex, "__notes__") else None } if not plain_reply: return_value = { - "responseStatusCode" : 500, "exception" : return_value, "state" : { instance_name : instance.state() @@ -230,47 +248,46 @@ async def run_single_target(cls, instance : RemoteObject) -> None: if fetch_execution_logs: return_value["logs"] = list_handler.log_list await instance.message_broker.async_send_reply(instruction, return_value) - if fetch_execution_logs: + if not plain_reply and fetch_execution_logs: instance.logger.removeHandler(list_handler) @classmethod async def execute_once(cls, instance_name : str, instance : RemoteObject, instruction_str : str, arguments : typing.Dict[str, typing.Any]) -> typing.Dict[str, typing.Any]: - scadapy = instance.instance_resources[instruction_str] - if scadapy.iscallable: - if scadapy.state is None or (hasattr(instance, 'state_machine') and - instance.state_machine.current_state in scadapy.state): + resource = instance.instance_resources[instruction_str] + if resource.iscallable: + if resource.state is None or (hasattr(instance, 'state_machine') and + instance.state_machine.current_state in resource.state): # Note that because we actually find the resource within __prepare_instance__, its already bound # and we dont have to separately bind it. - func = scadapy.obj - if not scadapy.http_request_as_argument: - arguments.pop('request', None) - if scadapy.iscoroutine: + func = resource.obj + if resource.iscoroutine: return await func(**arguments) else: return func(**arguments) else: raise StateMachineError("RemoteObject '{}' is in '{}' state, however command can be executed only in '{}' state".format( - instance_name, instance.state(), scadapy.state)) + instance_name, instance.state(), resource.state)) - elif scadapy.isparameter: + elif resource.isparameter: action = instruction_str.split('/')[-1] - parameter : RemoteParameter = scadapy.obj - owner_inst : RemoteSubobject = scadapy.bound_obj + parameter : RemoteParameter = resource.obj + owner_inst : RemoteObject = resource.bound_obj if action == WRITE: - if scadapy.state is None or (hasattr(instance, 'state_machine') and - instance.state_machine.current_state in scadapy.state): - parameter.__set__(owner_inst, arguments["value"]) + if resource.state is None or (hasattr(instance, 'state_machine') and + instance.state_machine.current_state in resource.state): + return parameter.__set__(owner_inst, arguments["value"]) else: raise StateMachineError("RemoteObject {} is in `{}` state, however attribute can be written only in `{}` state".format( - instance_name, instance.state_machine.current_state, scadapy.state)) - return parameter.__get__(owner_inst, type(owner_inst)) + instance_name, instance.state_machine.current_state, resource.state)) + else: + return parameter.__get__(owner_inst, type(owner_inst)) raise NotImplementedError("Unimplemented execution path for RemoteObject {} for instruction {}".format(instance_name, instruction_str)) def fork_empty_eventloop(instance_name : str, logfile : typing.Union[str, None] = None, python_command : str = 'python', condaenv : typing.Union[str, None] = None, prefix_command : typing.Union[str, None] = None): - command_str = '{}{}{}-c "from scadapy.server import EventLoop; E = EventLoop({}); E.run();"'.format( + command_str = '{}{}{}-c "from hololinked.server import EventLoop; E = EventLoop({}); E.run();"'.format( f'{prefix_command} ' if prefix_command is not None else '', f'call conda activate {condaenv} && ' if condaenv is not None else '', f'{python_command} ', diff --git a/hololinked/server/remote_object.py b/hololinked/server/remote_object.py index b5382d2..98593de 100644 --- a/hololinked/server/remote_object.py +++ b/hololinked/server/remote_object.py @@ -14,6 +14,7 @@ from sqlalchemy import (Integer as DBInteger, String as DBString, JSON as DB_JSON, LargeBinary as DBBinary) from sqlalchemy import select from sqlalchemy.orm import Mapped, mapped_column, DeclarativeBase, MappedAsDataclass +import zmq from ..param.parameterized import Parameterized, ParameterizedMetaclass @@ -33,7 +34,7 @@ from .remote_parameter import FileServer, PlotlyFigure, ReactApp, RemoteParameter, RemoteClassParameters, Image from .remote_parameters import (Integer, String, ClassSelector, TupleSelector, TypedDict, Boolean, Selector, TypedKeyMappingsConstrainedDict ) -from .zmq_message_brokers import ServerTypes, EventPublisher, AsyncPollingZMQServer, Event +from .zmq_message_brokers import RPCServer, ServerTypes, EventPublisher, AsyncPollingZMQServer, Event @@ -313,15 +314,9 @@ def parameters(mcs) -> RemoteClassParameters: return mcs._param_container - -class RemoteObject(Parameterized, metaclass=RemoteObjectMetaclass): - """ - Expose your python classes for HTTP methods & RPC clients by subclassing from here. - """ - __server_type__ = ServerTypes.REMOTE_OBJECT - state_machine : StateMachine +class RemoteSubobject(Parameterized, metaclass=RemoteObjectMetaclass): - # objects given by user which we need to validate: + # local parameters instance_name = String(default=None, regex=r'[A-Za-z]+[A-Za-z_0-9\-\/]*', constant=True, remote=False, doc="""Unique string identifier of the instance. This value is used for many operations, for example - creating zmq socket address, tables in databases, and to identify the instance @@ -329,32 +324,19 @@ class RemoteObject(Parameterized, metaclass=RemoteObjectMetaclass): (http(s)://{domain and sub domain}/{instance name}). It is suggested to use the class name along with a unique name {class name}/{some unique name}. Instance names must be unique in your entire system.""") # type: str - logger = ClassSelector(class_=logging.Logger, default=None, allow_None=True, remote=False, - doc = """Logger object to print log messages, should be instance of logging.Logger(). default - logger is created if none is supplied.""") # type: logging.Logger - rpc_serializer = ClassSelector(class_=(SerpentSerializer, JSONSerializer, PickleSerializer, str), # DillSerializer, - default='json', remote=False, - doc="""The serializer that will be used for passing messages in zmq. For custom data - types which have serialization problems, you can subclass the serializers and implement - your own serialization options. Recommended serializer for exchange messages between - Proxy clients and server is Serpent and for HTTP serializer and server is JSON.""") # type: BaseSerializer - json_serializer = ClassSelector(class_=JSONSerializer, default=None, allow_None=True, remote=False, - doc = """Serializer used for sending messages between HTTP server and remote object, - subclass JSONSerializer to implement undealt serialization options.""") # type: JSONSerializer - - # remote paramaters - object_info = RemoteParameter(doc="contains information about this object like the class name, script location etc.", - readonly=True, URL_path='/info', fget = lambda self: self._object_info) # type: RemoteObjectDB.RemoteObjectInfo - events = RemoteParameter(readonly=True, URL_path='/events', - doc="returns a dictionary with two fields containing event name and event information") # type: typing.Dict[str, typing.Any] httpserver_resources = RemoteParameter(readonly=True, URL_path='/resources/http', doc="""object's resources exposed to HTTP server""", fget=lambda self: self._httpserver_resources ) # type: typing.Dict[str, typing.Dict[str, HTTPResource]] rpc_resources = RemoteParameter(readonly=True, URL_path='/resources/object-proxy', doc= """object's resources exposed to RPC client, similar to HTTP resources but differs in details.""", fget=lambda self: self._rpc_resources) # type: typing.Dict[str, typing.Any] + # remote paramerters + events = RemoteParameter(readonly=True, URL_path='/events', + doc="returns a dictionary with two fields containing event name and event information") # type: typing.Dict[str, typing.Any] gui_resources : typing.Dict = RemoteParameter(readonly=True, URL_path='/resources/gui', doc= """object's data read by scadapy webdashboard GUI client, similar to http_resources but differs in details.""") # type: typing.Dict[str, typing.Any] + object_info = RemoteParameter(doc="contains information about this object like the class name, script location etc.", + readonly=True, URL_path='/info', fget = lambda self: self._object_info) # type: RemoteObjectDB.RemoteObjectInfo GUI = ClassSelector(class_=ReactApp, default=None, allow_None=True, doc= """GUI applied here will become visible at GUI tab of dashboard tool""") # type: typing.Optional[ReactApp] @@ -373,33 +355,16 @@ def __new__(cls, **kwargs): obj._internal_fixed_attributes = ['_internal_fixed_attributes', 'instance_resources', '_owner'] # objects given by user which we need to validate (mostly descriptors) return obj + - def __init__(self, instance_name : str, logger : typing.Optional[logging.Logger] = None, log_level : typing.Optional[int] = None, - log_file : typing.Optional[str] = None, logger_remote_access : bool = True, - rpc_serializer : typing.Optional[BaseSerializer] = None, json_serializer : typing.Optional[JSONSerializer] = None, - server_protocols : typing.Optional[typing.Union[typing.List[ZMQ_PROTOCOLS], typing.Tuple[ZMQ_PROTOCOLS], ZMQ_PROTOCOLS]] = None, - db_config_file : typing.Optional[str] = None, **params) -> None: + def __init__(self, instance_name : str, **params): + super().__init__(instance_name=instance_name, **params) + + def __post_init__(self): self._internal_fixed_attributes : typing.List[str] self._owner : typing.Optional[RemoteObject] - - super().__init__(instance_name=instance_name, logger=logger, rpc_serializer=rpc_serializer, - json_serializer=json_serializer, **params) - self._prepare_logger(log_file=log_file, log_level=log_level, remote_access=logger_remote_access) - self._prepare_message_brokers(server_protocols=server_protocols, rpc_serializer=rpc_serializer, - json_serializer=json_serializer) - self._prepare_state_machine() - self._prepare_DB(db_config_file) - - - def __post_init__(self): - # Never create events before _prepare_instance(), no checks in place - self._prepare_resources() - self._write_parameters_from_DB() - self.logger.info("initialialised RemoteObject class {} with instance name {}".format( - self.__class__.__name__, self.instance_name)) - def __setattr__(self, __name: str, __value: typing.Any) -> None: if __name == '_internal_fixed_attributes' or __name in self._internal_fixed_attributes: @@ -414,36 +379,6 @@ def __setattr__(self, __name: str, __value: typing.Any) -> None: super().__setattr__(__name, __value) - def _prepare_logger(self, log_level : int, log_file : str, remote_access : bool = True): - if self.logger is None: - self.logger = create_default_logger('{}/{}'.format(self.__class__.__name__, self.instance_name), - logging.INFO if not log_level else log_level, - None if not log_file else log_file) - if remote_access: - if not any(isinstance(handler, RemoteAccessHandler) - for handler in self.logger.handlers): - self._remote_access_loghandler = RemoteAccessHandler(instance_name='logger', maxlen=500, emit_interval=1) - self.logger.addHandler(self._remote_access_loghandler) - else: - for handler in self.logger.handlers: - if isinstance(handler, RemoteAccessHandler): - self._remote_access_loghandler = handler - - - def _prepare_message_brokers(self, protocols : typing.Optional[typing.Union[typing.Iterable[ZMQ_PROTOCOLS], ZMQ_PROTOCOLS]]): - self.message_broker = AsyncPollingZMQServer( - instance_name=self.instance_name, - server_type=self.__server_type__, - protocols=ZMQ_PROTOCOLS.INPROC, - json_serializer=self.json_serializer, - rpc_serializer=self.rpc_serializer - ) - self.json_serializer = self.message_broker.json_serializer - self.rpc_serializer = self.message_broker.rpc_serializer - self.event_publisher = EventPublisher(identity=self.instance_name, rpc_serializer=self.rpc_serializer, - json_serializer=self.json_serializer) - - def _prepare_resources(self): """ this function analyses the members of the class which have 'scadapy' variable declared @@ -486,24 +421,22 @@ def _prepare_resources(self): ) rpc_resources[fullpath] = RPCResource( what=CALLABLE, + instance_name=self._owner.instance_name if self._owner is not None else self.instance_name, instruction=fullpath, - module=getattr(resource, '__module__'), name=getattr(resource, '__name__'), qualname=getattr(resource, '__qualname__'), doc=getattr(resource, '__doc__'), - kwdefaults=getattr(resource, '__kwdefaults__'), - defaults=getattr(resource, '__defaults__'), ) - instance_resources[fullpath] = remote_info.to_dataclass(obj=resource) + instance_resources[fullpath] = remote_info.to_dataclass(obj=resource, bound_obj=self) # Other remote objects - for name, resource in inspect.getmembers(self, lambda o : isinstance(o, RemoteObject)): + for name, resource in inspect.getmembers(self, lambda o : isinstance(o, RemoteSubobject)): if name == '_owner': continue - assert isinstance(resource, RemoteObject), ("remote object children query from inspect.ismethod is not a RemoteObject", + assert isinstance(resource, RemoteSubobject), ("remote object children query from inspect.ismethod is not a RemoteObject", "logic error - visit https://github.com/VigneshVSV/hololinked/issues to report") # above assertion is only a typing convenience resource._owner = self - resource._prepare_instance() + resource._prepare_resources() for http_method, resources in resource.httpserver_resources.items(): httpserver_resources[http_method].update(resources) rpc_resources.update(resource.rpc_resources) @@ -552,25 +485,23 @@ def _prepare_resources(self): rpc_resources[fullpath] = RPCResource( what=ATTRIBUTE, + instance_name=self._owner.instance_name if self._owner is not None else self.instance_name, instruction=fullpath, - module=__file__, doc=parameter.__doc__, name=remote_info.obj_name, qualname=self.__class__.__name__ + '.' + remote_info.obj_name, # qualname is not correct probably, does not respect inheritance - kwdefaults=None, - defaults=None, ) - dclass = remote_info.to_dataclass(obj=parameter) + dclass = remote_info.to_dataclass(obj=parameter, bound_obj=self) instance_resources[fullpath+'/'+READ] = dclass instance_resources[fullpath+'/'+WRITE] = dclass # The above for-loops can be used only once, the division is only for readability # following are in _internal_fixed_attributes - allowed to set only once self._rpc_resources = rpc_resources self._httpserver_resources = httpserver_resources - self.instance_resources = instance_resources + self.instance_resources = instance_resources - + def _create_object_info(self, script_path : typing.Optional[str] = None): if not script_path: try: @@ -584,62 +515,28 @@ def _create_object_info(self, script_path : typing.Optional[str] = None): http_server = ConfigInfo.USER_MANAGED.name, args = ConfigInfo.USER_MANAGED.name, kwargs = ConfigInfo.USER_MANAGED.name, - eventloop_name = self.eventloop_name, - level = 0, + eventloop_name = ConfigInfo.USER_MANAGED.name, + level = ConfigInfo.USER_MANAGED.name, level_type = ConfigInfo.USER_MANAGED.name, ) - - - def _prepare_DB(self, config_file : str = None): - if not config_file: - self._object_info = self._create_object_info() - return - # 1. create engine - self.db_engine : RemoteObjectDB = RemoteObjectDB(instance_name=self.instance_name, serializer=self.rpc_serializer, - config_file=config_file) - # 2. create an object metadata to be used by different types of clients - object_info = self.db_engine.fetch_own_info() - if object_info is None: - object_info = self._create_object_info() - self._object_info = object_info - # 3. enter parameters to DB if not already present - if self.object_info.class_name != self.__class__.__name__: - raise ValueError(wrap_text(f""" - Fetched instance name and class name from database not matching with the current RemoteObject class/subclass. - You might be reusing an instance name of another subclass and did not remove the old data from database. - Please clean the database using database tools to start fresh. - """)) - - - def _write_parameters_from_DB(self): - self.db_engine.create_missing_db_parameters(self.__class__.parameters.db_init_objects) - # 4. read db_init and db_persist objects - for db_param in self.db_engine.read_all_parameters(): - try: - setattr(self, db_param.name, self.rpc_serializer.loads(db_param.value)) # type: ignore - except Exception as E: - self.logger.error(f"could not set attribute {db_param.name} due to error {E}") - - - def _prepare_state_machine(self): - if hasattr(self, 'state_machine'): - self.state_machine._prepare(self) - self.logger.debug("setup state machine") - + @property def _event_publisher(self) -> EventPublisher: try: return self.event_publisher except AttributeError: - top_owner = self._owner # type: RemoteObject + top_owner = self._owner while True: if isinstance(top_owner, RemoteObject): + self.event_publisher = top_owner.event_publisher + return self.event_publisher + elif isinstance(top_owner, RemoteSubobject): top_owner = top_owner._owner else: - break; - self.event_publisher = top_owner._event_publisher - return self.event_publisher + raise RuntimeError("Error while finding owner of RemoteSubobject.", + "RemoteSubobject must be composed only within RemoteObject or RemoteSubobject, ", + "otherwise there can be problems.") @events.getter @@ -654,7 +551,6 @@ def _get_events(self) -> typing.Dict[str, typing.Any]: ) for event in self.event_publisher.events } - @gui_resources.getter def _get_gui_resources(self): gui_resources = GUIResources( @@ -704,7 +600,130 @@ def _get_gui_resources(self): }, } return gui_resources - + + +class RemoteObject(RemoteSubobject): + """ + Expose your python classes for HTTP methods & RPC clients by subclassing from here. + """ + __server_type__ = ServerTypes.REMOTE_OBJECT + state_machine : StateMachine + + # local parameters + logger = ClassSelector(class_=logging.Logger, default=None, allow_None=True, remote=False, + doc = """Logger object to print log messages, should be instance of logging.Logger(). default + logger is created if none is supplied.""") # type: logging.Logger + rpc_serializer = ClassSelector(class_=(SerpentSerializer, JSONSerializer, PickleSerializer, str), # DillSerializer, + allow_None=True, default='serpent', remote=False, + doc="""The serializer that will be used for passing messages in zmq. For custom data + types which have serialization problems, you can subclass the serializers and implement + your own serialization options. Recommended serializer for exchange messages between + Proxy clients and server is Serpent and for HTTP serializer and server is JSON.""") # type: BaseSerializer + json_serializer = ClassSelector(class_=JSONSerializer, default=None, allow_None=True, remote=False, + doc = """Serializer used for sending messages between HTTP server and remote object, + subclass JSONSerializer to implement undealt serialization options.""") # type: JSONSerializer + + + def __init__(self, instance_name : str, logger : typing.Optional[logging.Logger] = None, log_level : typing.Optional[int] = None, + log_file : typing.Optional[str] = None, logger_remote_access : bool = True, + rpc_serializer : typing.Optional[BaseSerializer] = 'serpent', json_serializer : typing.Optional[JSONSerializer] = None, + server_protocols : typing.Optional[typing.Union[typing.List[ZMQ_PROTOCOLS], + typing.Tuple[ZMQ_PROTOCOLS], ZMQ_PROTOCOLS]] = [ZMQ_PROTOCOLS.IPC, ZMQ_PROTOCOLS.TCP, ZMQ_PROTOCOLS.INPROC], + db_config_file : typing.Optional[str] = None, **params) -> None: + + super().__init__(instance_name=instance_name, logger=logger, rpc_serializer=rpc_serializer, + json_serializer=json_serializer, **params) + + self._prepare_logger(log_file=log_file, log_level=log_level, remote_access=logger_remote_access) + self._prepare_message_brokers(protocols=server_protocols) + self._prepare_state_machine() + self._prepare_DB(db_config_file) + + + def __post_init__(self): + # Never create events before _prepare_instance(), no checks in place + super().__post_init__() + self._owner = None + self._prepare_resources() + self._write_parameters_from_DB() + self.logger.info("initialialised RemoteObject class {} with instance name {}".format( + self.__class__.__name__, self.instance_name)) + + + def _prepare_logger(self, log_level : int, log_file : str, remote_access : bool = True): + if self.logger is None: + self.logger = create_default_logger(self.instance_name, + logging.INFO if not log_level else log_level, + None if not log_file else log_file) + if remote_access: + if not any(isinstance(handler, RemoteAccessHandler) + for handler in self.logger.handlers): + self._remote_access_loghandler = RemoteAccessHandler(instance_name='logger', maxlen=500, emit_interval=1) + self.logger.addHandler(self._remote_access_loghandler) + else: + for handler in self.logger.handlers: + if isinstance(handler, RemoteAccessHandler): + self._remote_access_loghandler = handler + + + def _prepare_message_brokers(self, protocols : typing.Optional[typing.Union[typing.Iterable[ZMQ_PROTOCOLS], ZMQ_PROTOCOLS]]): + context = zmq.asyncio.Context() + self.message_broker = AsyncPollingZMQServer( + instance_name=f'{self.instance_name}/inner', # hardcoded be very careful + server_type=self.__server_type__.value, + context=context, + protocol=ZMQ_PROTOCOLS.INPROC, + json_serializer=self.json_serializer, + rpc_serializer=self.rpc_serializer + ) + self.json_serializer = self.message_broker.json_serializer + self.rpc_serializer = self.message_broker.rpc_serializer + self._rpc_server = RPCServer(instance_name=self.instance_name, server_type=self.__server_type__.value, + context=context, protocols=protocols, json_serializer=self.json_serializer, + rpc_serializer=self.rpc_serializer) + self.event_publisher = EventPublisher(identity=self.instance_name, rpc_serializer=self.rpc_serializer, + json_serializer=self.json_serializer) + + + def _prepare_DB(self, config_file : str = None): + if not config_file: + self._object_info = self._create_object_info() + return + # 1. create engine + self.db_engine : RemoteObjectDB = RemoteObjectDB(instance_name=self.instance_name, serializer=self.rpc_serializer, + config_file=config_file) + # 2. create an object metadata to be used by different types of clients + object_info = self.db_engine.fetch_own_info() + if object_info is None: + object_info = self._create_object_info() + self._object_info = object_info + # 3. enter parameters to DB if not already present + if self.object_info.class_name != self.__class__.__name__: + raise ValueError(wrap_text(f""" + Fetched instance name and class name from database not matching with the current RemoteObject class/subclass. + You might be reusing an instance name of another subclass and did not remove the old data from database. + Please clean the database using database tools to start fresh. + """)) + + + def _write_parameters_from_DB(self): + if not hasattr(self, 'db_engine'): + return + self.db_engine.create_missing_db_parameters(self.__class__.parameters.db_init_objects) + # 4. read db_init and db_persist objects + for db_param in self.db_engine.read_all_parameters(): + try: + setattr(self, db_param.name, self.rpc_serializer.loads(db_param.value)) # type: ignore + except Exception as E: + self.logger.error(f"could not set attribute {db_param.name} due to error {E}") + + + def _prepare_state_machine(self): + if hasattr(self, 'state_machine'): + self.state_machine._prepare(self) + self.logger.debug("setup state machine") + + @get(URL_path='/resources/postman-collection') def postman_collection(self, domain_prefix : str) -> postman_collection: try: @@ -816,10 +835,13 @@ def log_to_console(self, data : typing.Any = None, level : typing.Any = 'DEBUG') def query(self, info : typing.Union[str, typing.List[str]]) -> typing.Any: raise NotImplementedError("arbitrary quering of {} currently not possible".format(self.__class__.__name__)) - def run(self): + def run(self, expose_eventloop : bool = False): from .eventloop import EventLoop - _eventloop = asyncio.get_event_loop() - _eventloop.run_until_complete(EventLoop.run_single_target(self)) + e = EventLoop(instance_name=f'{self.instance_name}/eventloop', remote_objects=[self], log_level=self.logger.level, + rpc_serializer=self.rpc_serializer, json_serializer=self.json_serializer) + if not expose_eventloop: + e.remote_objects = [self] # remote event loop from list of remote objects + e.run() @@ -838,11 +860,11 @@ def emit(self, record : logging.LogRecord): }) - -class RemoteAccessHandler(logging.Handler, RemoteObject): +class RemoteAccessHandler(logging.Handler, RemoteSubobject): def __init__(self, maxlen : int = 100, emit_interval : float = 1.0, **kwargs) -> None: logging.Handler.__init__(self) + RemoteSubobject.__init__(self, **kwargs) # self._last_time = datetime.datetime.now() if not isinstance(emit_interval, (float, int)) or emit_interval < 1.0: raise TypeError("Specify log emit interval as number greater than 1.0") diff --git a/hololinked/server/zmq_message_brokers.py b/hololinked/server/zmq_message_brokers.py index f487957..a7f3c1b 100644 --- a/hololinked/server/zmq_message_brokers.py +++ b/hololinked/server/zmq_message_brokers.py @@ -165,8 +165,8 @@ def create_socket(self, context : typing.Union[zmq.asyncio.Context, zmq.Context] else: raise RuntimeError(f"Socket must be either bound or connected. No operation is being carried out for this socket {identity}") elif protocol == ZMQ_PROTOCOLS.INPROC or protocol == "INPROC": - inproc_instance_name = instance_name.replace('/', '_').replace('-', '_') - socket_address = f'inproc://{inproc_instance_name}' + # inproc_instance_name = instance_name.replace('/', '_').replace('-', '_') + socket_address = f'inproc://{instance_name}' if bind: self.socket.bind(socket_address) else: @@ -635,7 +635,7 @@ def __init__(self, instance_name : str, *, server_type : Enum, context : typing. poll_timeout = 25, **kwargs) -> None: super().__init__(instance_name=instance_name, server_type=server_type, context=context, protocol=protocol, socket_type=socket_type, **kwargs) - self._instructions = [] + self.poller = zmq.asyncio.Poller() self.poller.register(self.socket, zmq.POLLIN) self.poll_timeout = poll_timeout @@ -851,23 +851,23 @@ def __init__(self, instance_name : str, *, server_type : Enum, context : typing. super().__init__(server_type, kwargs.get('json_serializer', None), kwargs.get('rpc_serializer', None)) kwargs["json_serializer"] = self.json_serializer kwargs["rpc_serializer"] = self.rpc_serializer - self.context = zmq.asyncio.Context() + self.context = context or zmq.asyncio.Context() self.poller = zmq.asyncio.Poller() - if ZMQ_PROTOCOLS.TCP in protocols: - self.tcp_server = AsyncPollingZMQServer(instance_name=instance_name, server_type=server_type, context=context, - protocol=ZMQ_PROTOCOLS.TCP, poll_timeout=poll_timeout, **kwargs) + if ZMQ_PROTOCOLS.TCP in protocols or "TCP" in protocols: + self.tcp_server = AsyncPollingZMQServer(instance_name=instance_name, server_type=server_type, + context=self.context, protocol=ZMQ_PROTOCOLS.TCP, poll_timeout=poll_timeout, **kwargs) self.poller.register(self.tcp_server.socket) - if ZMQ_PROTOCOLS.IPC in protocols: - self.ipc_server = AsyncPollingZMQServer(instance_name=instance_name, server_type=server_type, context=context, - protocol=ZMQ_PROTOCOLS.IPC, poll_timeout=poll_timeout, **kwargs) + if ZMQ_PROTOCOLS.IPC in protocols or "IPC" in protocols: + self.ipc_server = AsyncPollingZMQServer(instance_name=instance_name, server_type=server_type, + context=self.context, protocol=ZMQ_PROTOCOLS.IPC, poll_timeout=poll_timeout, **kwargs) self.poller.register(self.ipc_server.socket) - if ZMQ_PROTOCOLS.INPROC in protocols: - self.inproc_server = AsyncPollingZMQServer(instance_name=instance_name, server_type=server_type, context=context, - protocol=ZMQ_PROTOCOLS.INPROC, poll_timeout=poll_timeout, **kwargs) + if ZMQ_PROTOCOLS.INPROC in protocols or "INPROC" in protocols: + self.inproc_server = AsyncPollingZMQServer(instance_name=instance_name, server_type=server_type, + context=self.context, protocol=ZMQ_PROTOCOLS.INPROC, poll_timeout=poll_timeout, **kwargs) self.poller.register(self.inproc_server.socket) self.poll_timeout = poll_timeout - self.inproc_client = AsyncZMQClient(server_instance_name=f'{instance_name}/real', identity=f'{instance_name}/tunneler', - client_type=TUNNELER, context=context, protocol=ZMQ_PROTOCOLS.INPROC) + self.inproc_client = AsyncZMQClient(server_instance_name=f'{instance_name}/inner', identity=f'{instance_name}/tunneler', + client_type=TUNNELER, context=self.context, protocol=ZMQ_PROTOCOLS.INPROC, handshake=False) self._instructions = deque() # type: typing.Iterable[typing.Tuple[typing.List[bytes], asyncio.Event, asyncio.Future, zmq.Socket]] self._instructions_event = asyncio.Event() self.identity = f"{instance_name}/rpc-server" @@ -924,6 +924,8 @@ async def poll(self): """ self.stop_poll = False eventloop = asyncio.get_event_loop() + self.inproc_client.handshake() + await self.inproc_client.handshake_complete() while not self.stop_poll: sockets : typing.Tuple[zmq.Socket, int] = await self.poller.poll(self._poll_timeout) # type for socket, _ in sockets: @@ -1926,7 +1928,7 @@ def push(self, data : typing.Any = None): class EventPublisher(BaseZMQServer): def __init__(self, identity : str, context : typing.Union[zmq.Context, None] = None, **serializer) -> None: - super().__init__(server_type = ServerTypes.UNKNOWN_TYPE.value, **serializer) + super().__init__(server_type=ServerTypes.UNKNOWN_TYPE.value, **serializer) self.context = context or zmq.Context() self.identity = identity self.socket = self.context.socket(zmq.PUB) @@ -1941,7 +1943,7 @@ def __init__(self, identity : str, context : typing.Union[zmq.Context, None] = print("Following error while atttempting to bind to socket address : {}".format(self.socket_address)) raise ex from None else: - self.logger = self.get_logger(identity, self.socket_address, logging.DEBUG, self.__class__.__name__) + self.logger = self.get_logger(identity, "PUB", "TCP", logging.DEBUG) self.logger.info("created event publishing socket at {}".format(self.socket_address)) break self.events = set() # type: typing.Set[Event] @@ -1949,12 +1951,12 @@ def __init__(self, identity : str, context : typing.Union[zmq.Context, None] = def register_event(self, event : Event) -> None: # unique_str_bytes = bytes(unique_str, encoding = 'utf-8') - if event._event_unique_str in self.events: + if event._unique_event_name in self.events: raise AttributeError(wrap_text( """event {} already found in list of events, please use another name. Also, Remotesubobject and RemoteObject cannot share event names.""".format(event.name)) ) - self.event_ids.add(event._event_unique_str) + self.event_ids.add(event._unique_event_name) self.events.add(event) self.logger.info("registered event '{}' serving at PUB socket with address : {}".format(event.name, self.socket_address)) From 0173142db9268b6dd1a536f93abecd3a6ac6d8c4 Mon Sep 17 00:00:00 2001 From: "Vignesh.Vaidyanathan" <62492557+VigneshVSV@users.noreply.github.com> Date: Sat, 17 Feb 2024 23:16:34 +0100 Subject: [PATCH 029/167] removed local exception method --- hololinked/server/utils.py | 18 +----------------- 1 file changed, 1 insertion(+), 17 deletions(-) diff --git a/hololinked/server/utils.py b/hololinked/server/utils.py index 6ee6e7b..c54c930 100644 --- a/hololinked/server/utils.py +++ b/hololinked/server/utils.py @@ -6,9 +6,6 @@ import asyncio import inspect import typing -import builtins -import types -from typing import List from ..param.exceptions import wrap_error_text as wrap_text @@ -184,7 +181,7 @@ def run_coro_sync(coro): eventloop.run_until_complete(coro) -def run_coro_somehow(coro): +def run_method_somehow(coro): """ either schedule the coroutine or run it until its complete """ @@ -211,19 +208,6 @@ def get_signature(function : typing.Callable): -def raise_local_exception(exception : typing.Dict[str, typing.Any]): - exc = getattr(builtins, exception["type"], None) - message = f"server raised exception, check following for server side traceback & above for client side traceback : " - # tb = types.TracebackType() - if exc is None: - E = Exception(message) - else: - E = exc(message) - # E.with_traceback() - E.__notes__ = exception["traceback"] - raise E - - __all__ = ['current_datetime_ms_str', 'wrap_text', 'copy_parameters', 'dashed_URL'] From 5868525aa4099a06f9680e517a5c49d8df4ee2d1 Mon Sep 17 00:00:00 2001 From: "Vignesh.Vaidyanathan" <62492557+VigneshVSV@users.noreply.github.com> Date: Sun, 18 Feb 2024 14:02:25 +0100 Subject: [PATCH 030/167] snapshot before moving from ZMQ polling to custom polling --- hololinked/server/constants.py | 52 ++++++++++- hololinked/server/data_classes.py | 12 +-- hololinked/server/eventloop.py | 5 +- hololinked/server/proxy_client.py | 96 +++++++++++--------- hololinked/server/remote_object.py | 20 ++--- hololinked/server/remote_parameter.py | 49 +++++++--- hololinked/server/remote_parameters.py | 79 +++++++---------- hololinked/server/zmq_message_brokers.py | 108 ++++++++++++----------- 8 files changed, 240 insertions(+), 181 deletions(-) diff --git a/hololinked/server/constants.py b/hololinked/server/constants.py index d117099..cb407d4 100644 --- a/hololinked/server/constants.py +++ b/hololinked/server/constants.py @@ -1,9 +1,8 @@ import logging import functools import typing -from enum import Enum +from enum import Enum, StrEnum, IntEnum from types import MethodType, FunctionType -import zmq # decorator constants @@ -13,6 +12,16 @@ ANY_STATE : str = "ANY_STATE" UNSPECIFIED : str = "UNSPECIFIED" # types +class ResourceType(StrEnum): + FUNC = "FUNC" + ATTRIBUTE = "ATTRIBUTE" + PARAMETER = "PARAMETER" + IMAGE_STREAM = "IMAGE_STREAM" + CALLABLE = "CALLABLE" + FILE = "FILE" + EVENT = "EVENT" + + FUNC = "FUNC" ATTRIBUTE = "ATTRIBUTE" PARAMETER = "PARAMETER" @@ -25,7 +34,7 @@ # logic WRAPPER_ASSIGNMENTS = functools.WRAPPER_ASSIGNMENTS + ('__kwdefaults__', '__defaults__', ) -SERIALIZABLE_WRAPPER_ASSIGNMENTS = ('__module__', '__name__', '__qualname__', '__doc__', '__kwdefaults__', '__defaults__', ) +SERIALIZABLE_WRAPPER_ASSIGNMENTS = ('__name__', '__qualname__', '__doc__' ) # regex logic states_regex : str = '[A-Za-z_]+[A-Za-z_ 0-9]*' url_regex : str = r'[\-a-zA-Z0-9@:%._\/\+~#=]{1,256}' @@ -58,4 +67,39 @@ JSONSerializable = typing.Union[typing.Dict[str, typing.Any], list, str, int, float, None] # ZMQ -ZMQ_PROTOCOLS = Enum('ZMQ_PROTOCOLS', 'TCP IPC INPROC') \ No newline at end of file +class ZMQ_PROTOCOLS(Enum): + TCP = "TCP" + IPC = "IPC" + INPROC = "INPROC" + +class Instructions(StrEnum): + RPC_RESOURCES = '/resources/object-proxy/read' + HTTP_RESOURCES = '/resources/http-server/read' + +class ClientMessage(IntEnum): + ADDRESS = 0 + CLIENT_TYPE = 2 + MESSAGE_TYPE = 3 + MESSAGE_ID = 4 + TIMEOUT = 5 + INSTRUCTION = 6 + ARGUMENTS = 7 + EXECUTION_CONTEXT = 8 + +class ServerMessage(IntEnum): + ADDRESS = 0 + SERVER_TYPE = 2 + MESSAGE_TYPE = 3 + MESSAGE_ID = 4 + DATA = 5 + +class ServerMessageData(StrEnum): + RETURN_VALUE = "returnValue" + +class ServerTypes(Enum): + UNKNOWN_TYPE = b'UNKNOWN_TYPE' + EVENTLOOP = b'EVENTLOOP' + REMOTE_OBJECT = b'REMOTE_OBJECT' + POOL = b'POOL' + + diff --git a/hololinked/server/data_classes.py b/hololinked/server/data_classes.py index cbfe045..a2b87b1 100644 --- a/hololinked/server/data_classes.py +++ b/hololinked/server/data_classes.py @@ -181,9 +181,6 @@ def __setstate__(self, values : typing.Dict): for key, value in values.items(): setattr(self, key, value) - def get_dunder_attr(self, __dunder_name : str): - return getattr(self, __dunder_name.strip('_')) - def json(self): """ Set use_json_method=True in ``serializers.JSONSerializer`` instance and pass the object to the @@ -233,22 +230,27 @@ class RPCResource: name : str qualname : str doc : typing.Optional[str] + top_owner : bool def __init__(self, *, what : str, instance_name : str, instruction : str, name : str, - qualname : str, doc : str) -> None: + qualname : str, doc : str, top_owner : bool) -> None: self.what = what self.instance_name = instance_name self.instruction = instruction self.name = name self.qualname = qualname self.doc = doc + self.top_owner = top_owner def json(self): """ Set use_json_method=True in ``serializers.JSONSerializer`` instance and pass the object to the serializer directly to get the JSON. """ - return asdict(self) + return asdict(self) + + def get_dunder_attr(self, __dunder_name : str): + return getattr(self, __dunder_name.strip('_')) @dataclass diff --git a/hololinked/server/eventloop.py b/hololinked/server/eventloop.py index 671ec4d..fc2b477 100644 --- a/hololinked/server/eventloop.py +++ b/hololinked/server/eventloop.py @@ -150,7 +150,7 @@ def run(self): self._remote_object_executor.start() self._run_external_message_listener() self._remote_object_executor.join() - + def _run_external_message_listener(self): """ Runs ZMQ's sockets which are visible to clients @@ -261,8 +261,9 @@ async def execute_once(cls, instance_name : str, instance : RemoteObject, instru # Note that because we actually find the resource within __prepare_instance__, its already bound # and we dont have to separately bind it. func = resource.obj + args = arguments.pop('__args__') if resource.iscoroutine: - return await func(**arguments) + return await func(*args, **arguments) else: return func(**arguments) else: diff --git a/hololinked/server/proxy_client.py b/hololinked/server/proxy_client.py index fe00b63..b9ca3d8 100644 --- a/hololinked/server/proxy_client.py +++ b/hololinked/server/proxy_client.py @@ -4,33 +4,32 @@ import logging from typing import Any +from .data_classes import RPCResource from .zmq_message_brokers import SyncZMQClient, EventConsumer, PROXY -from .utils import current_datetime_ms_str, raise_local_exception -from .constants import PARAMETER, SERIALIZABLE_WRAPPER_ASSIGNMENTS, FUNC, CALLABLE, ATTRIBUTE, EVENT - - - - -SingleLevelNestedJSON = typing.Dict[str, typing.Dict[str, typing.Any]] - +from .utils import current_datetime_ms_str +from .constants import (SERIALIZABLE_WRAPPER_ASSIGNMENTS, Instructions, ServerMessage, ServerMessageData, ResourceType) +from .zmq_message_brokers import (CM_INDEX_ADDRESS, CM_INDEX_ARGUMENTS, CM_INDEX_CLIENT_TYPE, CM_INDEX_EXECUTION_CONTEXT, + CM_INDEX_INSTRUCTION, CM_INDEX_MESSAGE_ID, CM_INDEX_MESSAGE_TYPE, CM_INDEX_TIMEOUT) +from .zmq_message_brokers import (SM_INDEX_ADDRESS, SM_INDEX_DATA, SM_INDEX_MESSAGE_ID, SM_INDEX_MESSAGE_TYPE, + SM_INDEX_SERVER_TYPE) class ObjectProxy: - __own_attrs__ = frozenset([ - '_client', '_client_ID', '__annotations__', + _own_attrs = frozenset([ + '_client', 'identity', '__annotations__', 'instance_name', 'logger', 'timeout', '_timeout', ]) - def __init__(self, instance_name : str, timeout : float = 5, load_remote_object = True, protocol : str = 'TCP', **serializer) -> None: + def __init__(self, instance_name : str, timeout : float = 5, load_remote_object = True, protocol : str = 'TCP', **kwargs) -> None: self.instance_name = instance_name - self._client_ID = instance_name + current_datetime_ms_str() - self.logger = logging.Logger(self._client_ID) self.timeout = timeout + self.identity = instance_name + current_datetime_ms_str() + self.logger = logging.Logger(self.identity) # compose ZMQ client in Proxy client so that all sending and receiving is # done by the ZMQ client and not by the Proxy client directly. Proxy client only # bothers mainly about __setattr__ and _getattr__ - self._client = SyncZMQClient(instance_name, self._client_ID, client_type=PROXY, **serializer) + self._client = SyncZMQClient(instance_name, self.identity, client_type=PROXY, protocol=protocol, **kwargs) if load_remote_object: self.load_remote_object() @@ -44,7 +43,7 @@ def __getattribute__(self, __name: str) -> Any: return obj def __setattr__(self, __name : str, __value : typing.Any): - if __name in ObjectProxy.__own_attrs__ or (__name not in self.__dict__ and isinstance(__value, __allowed_attribute_types__)): + if __name in ObjectProxy._own_attrs or (__name not in self.__dict__ and isinstance(__value, __allowed_attribute_types__)): print(f"setting {__name}") return super(ObjectProxy, self).__setattr__(__name, __value) elif __name in self.__dict__: @@ -59,10 +58,11 @@ def __repr__(self): return f'ObjectProxy {self.instance_name}' def __enter__(self): + raise NotImplementedError("with statement is not completely implemented yet. Avoid.") return self def __exit__(self, exc_type, exc_value, traceback): - raise NotImplementedError("with statement exit is not yet implemented. Avoid.") + raise NotImplementedError("with statement is not completely implemented yet. Avoid.") def __bool__(self): return True @@ -77,7 +77,7 @@ def __ne__(self, other): return True def __hash__(self): - return hash(self._client_ID) + return hash(self.identity) @property def timeout(self) -> typing.Union[float, int]: @@ -104,7 +104,7 @@ def invoke(self, method : str, oneway : bool = False, **kwargs) -> typing.Any: return method(**kwargs) async def async_invoke(self, method : str, **kwargs): - method : _RemoteMethod = getattr(self, method, None) + method = getattr(self, method, None) # type: _RemoteMethod if not method: raise AttributeError(f"No remote method named {method}") return await method.async_call(**kwargs) @@ -188,19 +188,20 @@ def load_remote_object(self): Usually this will already be known due to the default behavior of the connect handshake, where the connect response also includes the metadata. """ - fetch = _RemoteMethod(self._client, f'/{self.instance_name}/resources/object-proxy/read') - reply : SingleLevelNestedJSON = fetch()[5]["returnValue"] + fetch = _RemoteMethod(self._client, f'/{self.instance_name}{Instructions.RPC_RESOURCES}', + self._timeout) # type: _RemoteMethod + reply = fetch()[ServerMessage.DATA][ServerMessageData.RETURN_VALUE] # type: typing.Dict[str, typing.Dict[str, typing.Any]] for name, data in reply.items(): if isinstance(data, dict): - data = ProxyResourceData(**data) - elif not isinstance(data, ProxyResourceData): - raise RuntimeError("Logic error - unpickled info about server not instance of ProxyResourceData") - if data.what == CALLABLE: - _add_method(self, _RemoteMethod(self._client, data.instruction), data) - elif data.what == ATTRIBUTE: - _add_parameter(self, _RemoteParameter(self._client, data.instruction), data) - elif data.what == EVENT: + data = RPCResource(**data) + elif not isinstance(data, RPCResource): + raise RuntimeError("Logic error - desieralized info about server not instance of ProxyResourceData") + if data.what == ResourceType.CALLABLE: + _add_method(self, _RemoteMethod(self._client, data.instruction, self.timeout), data) + elif data.what == ResourceType.PARAMETER: + _add_parameter(self, _RemoteParameter(self._client, data.instruction, self.timeout), data) + elif data.what == ResourceType.EVENT: pass # def _pyroInvokeBatch(self, calls, oneway=False): @@ -214,37 +215,36 @@ def load_remote_object(self): class _RemoteMethod: """method call abstraction""" - def __init__(self, client : SyncZMQClient, instruction : str) -> None: + def __init__(self, client : SyncZMQClient, instruction : str, timeout : typing.Optional[float] = None) -> None: self._client = client self._instruction = instruction + self._timeout = timeout self._loop = asyncio.get_event_loop() - def __del__(self): - self._client = None # remove ref, as of now weakref is not used. - @property # i.e. cannot have setter def last_return_value(self): return self._last_return_value def oneway(self, *args, **kwargs) -> None: - self._client.execute(self._instruction, kwargs) + kwargs["__args__"] = args + self._client.send_instruction(self._instruction, kwargs, self._timeout) def __call__(self, *args, **kwargs) -> typing.Any: - self._last_return_value : typing.Dict = self._client.execute(self._instruction, kwargs, - raise_client_side_exception=True) + kwargs["__args__"] = args + self._last_return_value = self._client.execute(self._instruction, + kwargs, raise_client_side_exception=True) return self._last_return_value - async def async_call(self, *args, **kwargs) -> typing.Any: - self._last_return_value : typing.Dict = self._client.execute(self._instruction, kwargs, - raise_client_side_exception=True) - return self._last_return_value + class _RemoteParameter: """parameter set & get abstraction""" - def __init__(self, client : SyncZMQClient, instruction : str): + def __init__(self, client : SyncZMQClient, instruction : str, + timeout : typing.Optional[float] = None) -> None: self._client = client + self._timeout = timeout self._read_instruction = instruction + '/read' self._write_instruction = instruction + '/write' @@ -262,7 +262,7 @@ def set(self, value : typing.Any) -> typing.Any: def get(self): self._last_value : typing.Dict = self._client.execute(self._read_instruction, raise_client_side_exception=True) - return self._last_value + return self._last_value[SM_INDEX_DATA] async def async_set(self, value : typing.Any) -> typing.Any: self._last_value : typing.Dict = await self._client.execute(self._write_instruction, dict(value=value), @@ -360,16 +360,24 @@ def close(self): __allowed_attribute_types__ = (_RemoteParameter, _RemoteMethod) -def _add_method(client_obj : ObjectProxy, method : _RemoteMethod, func_info) -> None: +def _add_method(client_obj : ObjectProxy, method : _RemoteMethod, func_info : RPCResource) -> None: + if isinstance(func_info, list): + raise TypeError(f"got list instead of RPC resource for {func_info.name}") + if not func_info.top_owner: + return for dunder in SERIALIZABLE_WRAPPER_ASSIGNMENTS: if dunder == '__qualname__': info = '{}.{}'.format(client_obj.__class__.__name__, func_info.get_dunder_attr(dunder).split('.')[1]) else: info = func_info.get_dunder_attr(dunder) setattr(method, dunder, info) - client_obj.__setattr__(method.__name__, method) + client_obj.__setattr__(func_info.name, method) -def _add_parameter(client_obj : ObjectProxy, parameter : _RemoteParameter, parameter_info) -> None: +def _add_parameter(client_obj : ObjectProxy, parameter : _RemoteParameter, parameter_info : RPCResource) -> None: + if isinstance(parameter_info, list): + raise TypeError(f"got list instead of RPC resource for {parameter_info.name}") + if not parameter_info.top_owner: + return for attr in ['doc', 'name']: # just to imitate _add_method logic setattr(parameter, attr, getattr(parameter_info, attr)) diff --git a/hololinked/server/remote_object.py b/hololinked/server/remote_object.py index 98593de..45dfe18 100644 --- a/hololinked/server/remote_object.py +++ b/hololinked/server/remote_object.py @@ -20,7 +20,7 @@ from ..param.parameterized import Parameterized, ParameterizedMetaclass from .constants import (EVENT, GET, IMAGE_STREAM, JSONSerializable, CallableType, CALLABLE, - ATTRIBUTE, READ, WRITE, log_levels, POST, ZMQ_PROTOCOLS, FILE) + PARAMETER, READ, WRITE, log_levels, POST, ZMQ_PROTOCOLS, FILE) from .serializers import * from .exceptions import BreakInnerLoop from .decorators import remote_method @@ -426,6 +426,7 @@ def _prepare_resources(self): name=getattr(resource, '__name__'), qualname=getattr(resource, '__qualname__'), doc=getattr(resource, '__doc__'), + top_owner=self._owner is None ) instance_resources[fullpath] = remote_info.to_dataclass(obj=resource, bound_obj=self) # Other remote objects @@ -458,7 +459,7 @@ def _prepare_resources(self): ) # Parameters for parameter in self.parameters.descriptors.values(): - if hasattr(parameter, '_remote_info'): + if hasattr(parameter, '_remote_info') and parameter._remote_info is not None: if not isinstance(parameter._remote_info, RemoteResourceInfoValidator): # type: ignore raise TypeError("instance member {} has unknown sub-member 'scada_info' of type {}.".format( parameter, type(parameter._remote_info))) # type: ignore @@ -470,27 +471,28 @@ def _prepare_resources(self): read_http_method, write_http_method = remote_info.http_method httpserver_resources[read_http_method][fullpath] = HTTPResource( - what=ATTRIBUTE, + what=PARAMETER, instance_name=self._owner.instance_name if self._owner is not None else self.instance_name, fullpath=fullpath, instruction=fullpath + '/' + READ ) httpserver_resources[write_http_method][fullpath] = HTTPResource( - what=ATTRIBUTE, + what=PARAMETER, instance_name=self._owner.instance_name if self._owner is not None else self.instance_name, fullpath=fullpath, instruction=fullpath + '/' + WRITE ) rpc_resources[fullpath] = RPCResource( - what=ATTRIBUTE, + what=PARAMETER, instance_name=self._owner.instance_name if self._owner is not None else self.instance_name, instruction=fullpath, doc=parameter.__doc__, name=remote_info.obj_name, qualname=self.__class__.__name__ + '.' + remote_info.obj_name, # qualname is not correct probably, does not respect inheritance + top_owner=self._owner is None ) dclass = remote_info.to_dataclass(obj=parameter, bound_obj=self) instance_resources[fullpath+'/'+READ] = dclass @@ -810,18 +812,14 @@ def ping(self) -> bool: return True @get('/test/speed') - def _test_speed(self, value : typing.Any): + def test_speed(self, value : typing.Any): """ This method returns whatever you give allowing speed test of different data types. The message sent is first serialized by the client, deserialized by the server and in return direction again serialized by server and deserialized by the client. oneway speed is twice the measured value. """ return value - - @get('/test/args/{int_arg:int}/{float_arg:float}/{str_arg:str}') - def _test_path_arguments(self, int_arg, float_arg, str_arg): - self.logger.info(f"passed arguments : int - {int_arg}, float - {float_arg}, str - {str_arg}") - + # example of remote_method decorator @remote_method(URL_path='/log/console', http_method = POST) def log_to_console(self, data : typing.Any = None, level : typing.Any = 'DEBUG') -> None: diff --git a/hololinked/server/remote_parameter.py b/hololinked/server/remote_parameter.py index 6f0f56c..74f98e7 100644 --- a/hololinked/server/remote_parameter.py +++ b/hololinked/server/remote_parameter.py @@ -59,24 +59,37 @@ class RemoteParameter(Parameter): allowed. If the default value is defined as None, allow_None is set to True automatically. - db_memorized: bool, default False + URL_path: str, uses object name by default + resource locator under which the attribute is accessible through + HTTP. when remote is True and no value is supplied, the variable name + is used and underscores and replaced with dash + + remote: bool, default True + set to false to make the parameter local + + http_method: tuple, default (GET, PUT) + http methods for read and write respectively + + state: str | Enum, default None + state of state machine where write can be executed + + db_persist: bool, default False if True, every read and write is stored in database and persists instance destruction and creation. - db_firstload: bool, default False + db_init: bool, default False if True, only the first read is loaded from database. - further reads and writes not written to database. if db_memorized + Further reads and writes not written to database. if db_persist + is True, this value is ignored. + + db_commit: bool, + if True, all write values are stored to database. if db_persist is True, this value is ignored. remote: bool, default True set False to avoid exposing the variable for remote read and write - URL_path: str, uses object name by default - resource locator under which the attribute is accessible through - HTTP. when remote is True and no value is supplied, the variable name - is used and underscores and replaced with dash - metadata: dict, default None store your own JSON compatible metadata for the parameter which gives useful (and modifiable) information about the parameter. @@ -86,6 +99,15 @@ class RemoteParameter(Parameter): shown in a listing. If no label is supplied, the attribute name for this parameter in the owning Parameterized object is used. + fget: Callable, default None + custom getter method, mandatory when setter method is also custom. + + fset: Callable, default None + custom setter method + + fdel: Callable, default None + custom deleter method + per_instance_descriptor: bool, default False whether a separate Parameter instance will be created for every Parameterized instance. True by default. @@ -116,6 +138,7 @@ class RemoteParameter(Parameter): independently deepcopied value. class_member : bool, default False + when True, parameter is set on class instead of instance. precedence: float, default None a numeric value, usually in the range 0.0 to 1.0, @@ -123,9 +146,6 @@ class RemoteParameter(Parameter): a listing or e.g. in GUI menus. A negative precedence indicates a parameter that should be hidden in such listings. - default, doc, and precedence all default to None, which allows - inheritance of Parameter slots (attributes) from the owning-class' - class hierarchy (see ParameterizedMetaclass). """ __slots__ = ['db_persist', 'db_init', 'db_commit', 'metadata', '_remote_info'] @@ -162,9 +182,10 @@ def __init__(self, default: typing.Any = None, *, doc : typing.Optional[str] = N def _post_slot_set(self, slot : str, old : typing.Any, value : typing.Any) -> None: if slot == 'owner' and self.owner is not None: - if self._remote_info.URL_path == USE_OBJECT_NAME: - self._remote_info.URL_path = '/' + self.name - self._remote_info.obj_name = self.name + if self._remote_info is not None: + if self._remote_info.URL_path == USE_OBJECT_NAME: + self._remote_info.URL_path = '/' + self.name + self._remote_info.obj_name = self.name # In principle the above could be done when setting name itself however to simplify # we do it with owner. So we should always remember order of __set_name__ -> 1) attrib_name, # 2) name and then 3) owner diff --git a/hololinked/server/remote_parameters.py b/hololinked/server/remote_parameters.py index f7112a7..a9a626a 100644 --- a/hololinked/server/remote_parameters.py +++ b/hololinked/server/remote_parameters.py @@ -4,8 +4,6 @@ import datetime as dt import typing import numbers -import sys -import functools import collections.abc from enum import Enum from collections import OrderedDict @@ -27,11 +25,11 @@ class String(RemoteParameter): Example of using a regex to implement IPv4 address matching:: - class IPAddress(String): + class IPAddress(String): '''IPv4 address as a string (dotted decimal notation)''' - def __init__(self, default="0.0.0.0", allow_None=False, **kwargs): - ip_regex = r'^((25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)\.){3}(25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)$' - super(IPAddress, self).__init__(default=default, regex=ip_regex, **kwargs) + def __init__(self, default="0.0.0.0", allow_None=False, **kwargs): + ip_regex = r'^((25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)\.){3}(25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)$' + super(IPAddress, self).__init__(default=default, regex=ip_regex, **kwargs) """ __slots__ = ['regex'] @@ -49,7 +47,7 @@ def __init__(self, default : typing.Optional[str] = "", *, regex : typing.Option deepcopy_default=deepcopy_default, class_member=class_member, fget=fget, fset=fset, fdel=fdel, precedence=precedence, URL_path=URL_path, http_method=http_method, state=state, db_persist=db_persist, - db_init=db_init, db_commit=db_commit) + db_init=db_init, db_commit=db_commit, remote=remote) self.regex = regex def validate_and_adapt(self, value : typing.Any) -> str: @@ -151,7 +149,7 @@ def __init__(self, default : typing.Optional[str] = "0.0.0.0", *, allow_ipv4 : b deepcopy_default=deepcopy_default, class_member=class_member, fget=fget, fset=fset, fdel=fdel, precedence=precedence, URL_path=URL_path, http_method=http_method, state=state, db_persist=db_persist, - db_init=db_init, db_commit=db_commit) + db_init=db_init, db_commit=db_commit, remote=remote) self.allow_localhost = allow_localhost self.allow_ipv4 = allow_ipv4 self.allow_ipv6 = allow_ipv6 @@ -379,7 +377,7 @@ def __init__(self, default : typing.Optional[typing.Union[float, int]] = 0.0, *, allow_None=allow_None, per_instance_descriptor=per_instance_descriptor, deepcopy_default=deepcopy_default, class_member=class_member, fget=fget, fset=fset, fdel=fdel, precedence=precedence, URL_path=URL_path, http_method=http_method, state=state, db_persist=db_persist, - db_init=db_init, db_commit=db_commit) + db_init=db_init, db_commit=db_commit, remote=remote) self.bounds = bounds self.crop_to_bounds = crop_to_bounds self.inclusive_bounds = inclusive_bounds @@ -507,7 +505,6 @@ def __init__(self, default : typing.Optional[int] = 0, *, bounds : typing.Option URL_path : str = USE_OBJECT_NAME, http_method : typing.Tuple[str, str] = (GET, PUT), remote : bool = True, state : typing.Optional[typing.Union[typing.List, typing.Tuple, str, Enum]] = None, db_persist : bool = False, db_init : bool = False, db_commit : bool = False, - per_instance_descriptor : bool = False, deepcopy_default : bool = False, class_member : bool = False, fget : typing.Optional[typing.Callable] = None, fset : typing.Optional[typing.Callable] = None, fdel : typing.Optional[typing.Callable] = None, precedence : typing.Optional[float] = None) -> None: @@ -516,7 +513,7 @@ def __init__(self, default : typing.Optional[int] = 0, *, bounds : typing.Option deepcopy_default=deepcopy_default, class_member=class_member, fget=fget, fset=fset, fdel=fdel, precedence=precedence, URL_path=URL_path, http_method=http_method, state=state, db_persist=db_persist, - db_init=db_init, db_commit=db_commit) + db_init=db_init, db_commit=db_commit, remote=remote) self.dtype = (int,) def _validate_step(self, step : int): @@ -533,7 +530,6 @@ def __init__(self, default : typing.Optional[bool] = False, *, URL_path : str = USE_OBJECT_NAME, http_method : typing.Tuple[str, str] = (GET, PUT), remote : bool = True, state : typing.Optional[typing.Union[typing.List, typing.Tuple, str, Enum]] = None, db_persist : bool = False, db_init : bool = False, db_commit : bool = False, - per_instance_descriptor : bool = False, deepcopy_default : bool = False, class_member : bool = False, fget : typing.Optional[typing.Callable] = None, fset : typing.Optional[typing.Callable] = None, fdel : typing.Optional[typing.Callable] = None, precedence : typing.Optional[float] = None) -> None: @@ -541,7 +537,7 @@ def __init__(self, default : typing.Optional[bool] = False, *, allow_None=allow_None, per_instance_descriptor=per_instance_descriptor, deepcopy_default=deepcopy_default, class_member=class_member, fget=fget, fset=fset, fdel=fdel, precedence=precedence, URL_path=URL_path, http_method=http_method, state=state, db_persist=db_persist, - db_init=db_init, db_commit=db_commit) + db_init=db_init, db_commit=db_commit, remote=remote) def validate_and_adapt(self, value : typing.Any) -> bool: if not isinstance(value, bool): @@ -561,7 +557,6 @@ def __init__(self, default : typing.Any, *, bounds : typing.Optional[typing.Tupl URL_path : str = USE_OBJECT_NAME, http_method : typing.Tuple[str, str] = (GET, PUT), remote : bool = True, state : typing.Optional[typing.Union[typing.List, typing.Tuple, str, Enum]] = None, db_persist : bool = False, db_init : bool = False, db_commit : bool = False, - per_instance_descriptor : bool = False, class_member : bool = False, fget : typing.Optional[typing.Callable] = None, fset : typing.Optional[typing.Callable] = None, fdel : typing.Optional[typing.Callable] = None, precedence : typing.Optional[float] = None) -> None: @@ -575,7 +570,7 @@ def __init__(self, default : typing.Any, *, bounds : typing.Optional[typing.Tupl allow_None=allow_None, per_instance_descriptor=per_instance_descriptor, deepcopy_default=deepcopy_default, class_member=class_member, fget=fget, fset=fset, fdel=fdel, precedence=precedence, URL_path=URL_path, http_method=http_method, state=state, db_persist=db_persist, - db_init=db_init, db_commit=db_commit) + db_init=db_init, db_commit=db_commit, remote=remote) self.bounds = bounds self.length = length self.item_type = item_type @@ -629,7 +624,6 @@ def __init__(self, default : typing.Any, *, bounds : typing.Optional[typing.Tupl URL_path : str = USE_OBJECT_NAME, http_method : typing.Tuple[str, str] = (GET, PUT), remote : bool = True, state : typing.Optional[typing.Union[typing.List, typing.Tuple, str, Enum]] = None, db_persist : bool = False, db_init : bool = False, db_commit : bool = False, - per_instance_descriptor : bool = False, class_member : bool = False, fget : typing.Optional[typing.Callable] = None, fset : typing.Optional[typing.Callable] = None, fdel : typing.Optional[typing.Callable] = None, precedence : typing.Optional[float] = None) -> None: @@ -638,7 +632,7 @@ def __init__(self, default : typing.Any, *, bounds : typing.Optional[typing.Tupl deepcopy_default=deepcopy_default, class_member=class_member, fget=fget, fset=fset, fdel=fdel, precedence=precedence, URL_path=URL_path, http_method=http_method, state=state, db_persist=db_persist, - db_init=db_init, db_commit=db_commit) + db_init=db_init, db_commit=db_commit, remote=remote) self.accept_list = accept_list self.dtype = (tuple,) # re-assigned @@ -684,7 +678,6 @@ def __init__(self, default: typing.Any, *, bounds : typing.Optional[typing.Tuple URL_path : str = USE_OBJECT_NAME, http_method : typing.Tuple[str, str] = (GET, PUT), remote : bool = True, state : typing.Optional[typing.Union[typing.List, typing.Tuple, str, Enum]] = None, db_persist : bool = False, db_init : bool = False, db_commit : bool = False, - per_instance_descriptor : bool = False, class_member : bool = False, fget : typing.Optional[typing.Callable] = None, fset : typing.Optional[typing.Callable] = None, fdel : typing.Optional[typing.Callable] = None, @@ -694,7 +687,7 @@ def __init__(self, default: typing.Any, *, bounds : typing.Optional[typing.Tuple per_instance_descriptor=per_instance_descriptor, deepcopy_default=deepcopy_default, class_member=class_member, fget=fget, fset=fset, fdel=fdel, precedence=precedence, URL_path=URL_path, http_method=http_method, state=state, db_persist=db_persist, - db_init=db_init, db_commit=db_commit) + db_init=db_init, db_commit=db_commit, remote=remote) self.accept_tuple = accept_tuple self.dtype = list @@ -745,7 +738,6 @@ def __init__(self, attribs : typing.List[typing.Union[str, Parameter]], *, URL_path : str = USE_OBJECT_NAME, http_method : typing.Tuple[str, str] = (GET, PUT), remote : bool = True, state : typing.Optional[typing.Union[typing.List, typing.Tuple, str, Enum]] = None, db_persist : bool = False, db_init : bool = False, db_commit : bool = False, - per_instance_descriptor : bool = False, deepcopy_default : bool = False, class_member : bool = False, fget : typing.Optional[typing.Callable] = None, fset : typing.Optional[typing.Callable] = None, fdel : typing.Optional[typing.Callable] = None, precedence : typing.Optional[float] = None) -> None: @@ -753,7 +745,7 @@ def __init__(self, attribs : typing.List[typing.Union[str, Parameter]], *, per_instance_descriptor=per_instance_descriptor, deepcopy_default=deepcopy_default, class_member=class_member, fget=fget, fset=fset, fdel=fdel, precedence=precedence, URL_path=URL_path, http_method=http_method, state=state, db_persist=db_persist, - db_init=db_init, db_commit=db_commit) + db_init=db_init, db_commit=db_commit, remote=remote) self.attribs = [] if attribs is not None: for attrib in attribs: @@ -830,8 +822,7 @@ def __init__(self, *, objects : typing.List[typing.Any], default : typing.Any, e doc : typing.Optional[str] = None, constant : bool = False, readonly : bool = False, allow_None : bool = False, URL_path : str = USE_OBJECT_NAME, http_method : typing.Tuple[str, str] = (GET, PUT), remote : bool = True, state : typing.Optional[typing.Union[typing.List, typing.Tuple, str, Enum]] = None, - db_persist : bool = False, db_init : bool = False, db_commit : bool = False, - + db_persist : bool = False, db_init : bool = False, db_commit : bool = False, per_instance_descriptor : bool = False, deepcopy_default : bool = False, class_member : bool = False, fget : typing.Optional[typing.Callable] = None, fset : typing.Optional[typing.Callable] = None, fdel : typing.Optional[typing.Callable] = None, @@ -840,7 +831,7 @@ def __init__(self, *, objects : typing.List[typing.Any], default : typing.Any, e allow_None=allow_None, per_instance_descriptor=per_instance_descriptor, deepcopy_default=deepcopy_default, class_member=class_member, fget=fget, fset=fset, fdel=fdel, precedence=precedence, URL_path=URL_path, http_method=http_method, state=state, db_persist=db_persist, - db_init=db_init, db_commit=db_commit) + db_init=db_init, db_commit=db_commit, remote=remote) if objects is None: objects = [] autodefault = None @@ -894,7 +885,6 @@ def __init__(self, *, class_ , default : typing.Any, isinstance : bool = True, d URL_path : str = USE_OBJECT_NAME, http_method : typing.Tuple[str, str] = (GET, PUT), remote : bool = True, state : typing.Optional[typing.Union[typing.List, typing.Tuple, str, Enum]] = None, db_persist : bool = False, db_init : bool = False, db_commit : bool = False, - per_instance_descriptor : bool = False, class_member : bool = False, fget : typing.Optional[typing.Callable] = None, fset : typing.Optional[typing.Callable] = None, fdel : typing.Optional[typing.Callable] = None, precedence : typing.Optional[float] = None) -> None: @@ -902,7 +892,7 @@ def __init__(self, *, class_ , default : typing.Any, isinstance : bool = True, d allow_None=allow_None, per_instance_descriptor=per_instance_descriptor, deepcopy_default=deepcopy_default, class_member=class_member, fget=fget, fset=fset, fdel=fdel, precedence=precedence, URL_path=URL_path, http_method=http_method, state=state, db_persist=db_persist, - db_init=db_init, db_commit=db_commit) + db_init=db_init, db_commit=db_commit, remote=remote) self.class_ = class_ self.isinstance = isinstance @@ -963,7 +953,6 @@ def __init__(self, *, objects : typing.List, default : typing.Any, accept_list : URL_path : str = USE_OBJECT_NAME, http_method : typing.Tuple[str, str] = (GET, PUT), remote : bool = True, state : typing.Optional[typing.Union[typing.List, typing.Tuple, str, Enum]] = None, db_persist : bool = False, db_init : bool = False, db_commit : bool = False, - per_instance_descriptor : bool = False, deepcopy_default : bool = False, class_member : bool = False, fget : typing.Optional[typing.Callable] = None, fset : typing.Optional[typing.Callable] = None, fdel : typing.Optional[typing.Callable] = None, precedence : typing.Optional[float] = None) -> None: @@ -972,7 +961,7 @@ def __init__(self, *, objects : typing.List, default : typing.Any, accept_list : deepcopy_default=deepcopy_default, class_member=class_member, fget=fget, fset=fset, fdel=fdel, precedence=precedence, URL_path=URL_path, http_method=http_method, state=state, db_persist=db_persist, - db_init=db_init, db_commit=db_commit) + db_init=db_init, db_commit=db_commit, remote=remote) self.accept_list = accept_list def validate_and_adapt(self, value : typing.Any): @@ -1027,7 +1016,6 @@ def __init__(self, default : typing.Any = '', *, search_paths : typing.Optional[ URL_path : str = USE_OBJECT_NAME, http_method : typing.Tuple[str, str] = (GET, PUT), remote : bool = True, state : typing.Optional[typing.Union[typing.List, typing.Tuple, str, Enum]] = None, db_persist : bool = False, db_init : bool = False, db_commit : bool = False, - per_instance_descriptor : bool = False, deepcopy_default : bool = False, class_member : bool = False, fget : typing.Optional[typing.Callable] = None, fset : typing.Optional[typing.Callable] = None, fdel : typing.Optional[typing.Callable] = None, precedence : typing.Optional[float] = None) -> None: @@ -1036,7 +1024,7 @@ def __init__(self, default : typing.Any = '', *, search_paths : typing.Optional[ deepcopy_default=deepcopy_default, class_member=class_member, fget=fget, fset=fset, fdel=fdel, precedence=precedence, URL_path=URL_path, http_method=http_method, state=state, db_persist=db_persist, - db_init=db_init, db_commit=db_commit) + db_init=db_init, db_commit=db_commit, remote=remote) if isinstance(search_paths, str): self.search_paths = [search_paths] elif isinstance(search_paths, list): @@ -1136,7 +1124,6 @@ def __init__(self, default : typing.Any, *, objects : typing.List, path : str = URL_path : str = USE_OBJECT_NAME, http_method : typing.Tuple[str, str] = (GET, PUT), remote : bool = True, state : typing.Optional[typing.Union[typing.List, typing.Tuple, str, Enum]] = None, db_persist : bool = False, db_init : bool = False, db_commit : bool = False, - per_instance_descriptor : bool = False, deepcopy_default : bool = False, class_member : bool = False, fget : typing.Optional[typing.Callable] = None, fset : typing.Optional[typing.Callable] = None, fdel : typing.Optional[typing.Callable] = None, precedence : typing.Optional[float] = None) -> None: @@ -1145,7 +1132,7 @@ def __init__(self, default : typing.Any, *, objects : typing.List, path : str = deepcopy_default=deepcopy_default, class_member=class_member, fget=fget, fset=fset, fdel=fdel, precedence=precedence, URL_path=URL_path, http_method=http_method, state=state, db_persist=db_persist, - db_init=db_init, db_commit=db_commit) + db_init=db_init, db_commit=db_commit, remote=remote) self.path = path # update is automatically called def _post_slot_set(self, slot: str, old : typing.Any, value : typing.Any) -> None: @@ -1176,7 +1163,6 @@ def __init__(self, default : typing.Any, *, path : str = "", URL_path : str = USE_OBJECT_NAME, http_method : typing.Tuple[str, str] = (GET, PUT), remote : bool = True, state : typing.Optional[typing.Union[typing.List, typing.Tuple, str, Enum]] = None, db_persist : bool = False, db_init : bool = False, db_commit : bool = False, - label : typing.Optional[str] = None, per_instance_descriptor : bool = False, deepcopy_default : bool = False, class_member : bool = False, fget : typing.Optional[typing.Callable] = None, fset : typing.Optional[typing.Callable] = None, fdel : typing.Optional[typing.Callable] = None, precedence : typing.Optional[float] = None) -> None: @@ -1185,7 +1171,7 @@ def __init__(self, default : typing.Any, *, path : str = "", deepcopy_default=deepcopy_default, class_member=class_member, fget=fget, fset=fset, fdel=fdel, precedence=precedence, URL_path=URL_path, http_method=http_method, state=state, db_persist=db_persist, - db_init=db_init, db_commit=db_commit) + db_init=db_init, db_commit=db_commit, remote=remote) def update(self): self.objects = sorted(glob.glob(self.path)) @@ -1205,8 +1191,7 @@ def __init__(self, default, *, bounds : typing.Union[typing.Tuple, None] = None, doc : typing.Optional[str] = None, constant : bool = False, readonly : bool = False, allow_None : bool = False, URL_path : str = USE_OBJECT_NAME, http_method : typing.Tuple[str, str] = (GET, PUT), remote : bool = True, state : typing.Optional[typing.Union[typing.List, typing.Tuple, str, Enum]] = None, - db_persist : bool = False, db_init : bool = False, db_commit : bool = False, - + db_persist : bool = False, db_init : bool = False, db_commit : bool = False, per_instance_descriptor : bool = False, deepcopy_default : bool = False, class_member : bool = False, fget : typing.Optional[typing.Callable] = None, fset : typing.Optional[typing.Callable] = None, fdel : typing.Optional[typing.Callable] = None, precedence : typing.Optional[float] = None) -> None: @@ -1216,7 +1201,7 @@ def __init__(self, default, *, bounds : typing.Union[typing.Tuple, None] = None, deepcopy_default=deepcopy_default, class_member=class_member, fget=fget, fset=fset, fdel=fdel, precedence=precedence, URL_path=URL_path, http_method=http_method, state=state, db_persist=db_persist, - db_init=db_init, db_commit=db_commit) + db_init=db_init, db_commit=db_commit, remote=remote) self.dtype = dt_types def _validate_step(self, val): @@ -1249,8 +1234,7 @@ def __init__(self, default, *, bounds : typing.Union[typing.Tuple, None] = None, doc : typing.Optional[str] = None, constant : bool = False, readonly : bool = False, allow_None : bool = False, URL_path : str = USE_OBJECT_NAME, http_method : typing.Tuple[str, str] = (GET, PUT), remote : bool = True, state : typing.Optional[typing.Union[typing.List, typing.Tuple, str, Enum]] = None, - db_persist : bool = False, db_init : bool = False, db_commit : bool = False, - + db_persist : bool = False, db_init : bool = False, db_commit : bool = False, per_instance_descriptor : bool = False, deepcopy_default : bool = False, class_member : bool = False, fget : typing.Optional[typing.Callable] = None, fset : typing.Optional[typing.Callable] = None, fdel : typing.Optional[typing.Callable] = None, precedence : typing.Optional[float] = None) -> None: @@ -1260,7 +1244,7 @@ def __init__(self, default, *, bounds : typing.Union[typing.Tuple, None] = None, deepcopy_default=deepcopy_default, class_member=class_member, fget=fget, fset=fset, fdel=fdel, precedence=precedence, URL_path=URL_path, http_method=http_method, state=state, db_persist=db_persist, - db_init=db_init, db_commit=db_commit) + db_init=db_init, db_commit=db_commit, remote=remote) self.dtype = dt.date def _validate_step(self, step): @@ -1330,7 +1314,6 @@ def __init__(self, default, *, allow_named : bool = True, doc : typing.Optional URL_path : str = USE_OBJECT_NAME, http_method : typing.Tuple[str, str] = (GET, PUT), remote : bool = True, state : typing.Optional[typing.Union[typing.List, typing.Tuple, str, Enum]] = None, db_persist : bool = False, db_init : bool = False, db_commit : bool = False, - readonly : bool = False, allow_None : bool = False, per_instance_descriptor : bool = False, deepcopy_default : bool = False, class_member : bool = False, fget : typing.Optional[typing.Callable] = None, fset : typing.Optional[typing.Callable] = None, @@ -1340,7 +1323,7 @@ def __init__(self, default, *, allow_named : bool = True, doc : typing.Optional deepcopy_default=deepcopy_default, class_member=class_member, fget=fget, fset=fset, fdel=fdel, precedence=precedence, URL_path=URL_path, http_method=http_method, state=state, db_persist=db_persist, - db_init=db_init, db_commit=db_commit) + db_init=db_init, db_commit=db_commit, remote=remote) self.allow_named = allow_named def validate_and_adapt(self, value : typing.Any): @@ -1384,7 +1367,7 @@ def __init__(self, default : typing.Optional[typing.Tuple] = None, *, bounds: ty deepcopy_default=deepcopy_default, class_member=class_member, fget=fget, fset=fset, fdel=fdel, precedence=precedence, URL_path=URL_path, http_method=http_method, state=state, db_persist=db_persist, - db_init=db_init, db_commit=db_commit) + db_init=db_init, db_commit=db_commit, remote=remote) def validate_and_adapt(self, value : typing.Any) -> typing.Tuple: raise NotImplementedError("Range validation not implemented") @@ -1532,7 +1515,7 @@ def __init__(self, default : typing.Optional[typing.List[typing.Any]] = None, *, per_instance_descriptor=per_instance_descriptor, class_member=class_member, fget=fget, fset=fset, fdel=fdel, precedence=precedence, URL_path=URL_path, http_method=http_method, state=state, db_persist=db_persist, - db_init=db_init, db_commit=db_commit) + db_init=db_init, db_commit=db_commit, remote=remote) self.item_type = item_type self.bounds = bounds @@ -1564,7 +1547,6 @@ def __init__(self, default : typing.Optional[typing.Dict] = None, *, key_type : URL_path : str = USE_OBJECT_NAME, http_method : typing.Tuple[str, str] = (GET, PUT), remote : bool = True, state : typing.Optional[typing.Union[typing.List, typing.Tuple, str, Enum]] = None, db_persist : bool = False, db_init : bool = False, db_commit : bool = False, - per_instance_descriptor : bool = False, class_member : bool = False, fget : typing.Optional[typing.Callable] = None, fset : typing.Optional[typing.Callable] = None, fdel : typing.Optional[typing.Callable] = None, @@ -1579,7 +1561,7 @@ def __init__(self, default : typing.Optional[typing.Dict] = None, *, key_type : doc=doc, constant=constant, readonly=readonly, allow_None=allow_None, fget=fget, fset=fset, fdel=fdel, per_instance_descriptor=per_instance_descriptor, class_member=class_member, precedence=precedence, URL_path=URL_path, http_method=http_method, state=state, db_persist=db_persist, - db_init=db_init, db_commit=db_commit) + db_init=db_init, db_commit=db_commit, remote=remote) def __set__(self, obj, value): if value is not None: @@ -1607,8 +1589,7 @@ def __init__(self, default : typing.Optional[typing.Dict[typing.Any, typing.Any] doc : typing.Optional[str] = None, constant : bool = False, readonly : bool = False, URL_path : str = USE_OBJECT_NAME, http_method : typing.Tuple[str, str] = (GET, PUT), remote : bool = True, state : typing.Optional[typing.Union[typing.List, typing.Tuple, str, Enum]] = None, - db_persist : bool = False, db_init : bool = False, db_commit : bool = False, - + db_persist : bool = False, db_init : bool = False, db_commit : bool = False, per_instance_descriptor : bool = False, class_member : bool = False, fget : typing.Optional[typing.Callable] = None, fset : typing.Optional[typing.Callable] = None, fdel : typing.Optional[typing.Callable] = None, precedence : typing.Optional[float] = None) -> None: @@ -1624,7 +1605,7 @@ def __init__(self, default : typing.Optional[typing.Dict[typing.Any, typing.Any] allow_None=allow_None, per_instance_descriptor=per_instance_descriptor, class_member=class_member, fget=fget, fset=fset, fdel=fdel, precedence=precedence, URL_path=URL_path, http_method=http_method, state=state, db_persist=db_persist, - db_init=db_init, db_commit=db_commit) + db_init=db_init, db_commit=db_commit, remote=remote) def __set__(self, obj, value): if value is not None: diff --git a/hololinked/server/zmq_message_brokers.py b/hololinked/server/zmq_message_brokers.py index a7f3c1b..04389bc 100644 --- a/hololinked/server/zmq_message_brokers.py +++ b/hololinked/server/zmq_message_brokers.py @@ -13,7 +13,7 @@ from .utils import create_default_logger, run_method_somehow, wrap_text from .config import global_config -from .constants import ZMQ_PROTOCOLS +from .constants import ZMQ_PROTOCOLS, ServerTypes from .serializers import (JSONSerializer, PickleSerializer, BaseSerializer, SerpentSerializer, # DillSerializer, serializers) from ..param.parameterized import Parameterized @@ -71,12 +71,7 @@ SM_INDEX_DATA = 5 # Server types - currently useless metadata -class ServerTypes(Enum): - UNKNOWN_TYPE = b'UNKNOWN_TYPE' - EVENTLOOP = b'EVENTLOOP' - REMOTE_OBJECT = b'REMOTE_OBJECT' - POOL = b'POOL' - + class BaseZMQ: @@ -161,9 +156,10 @@ def create_socket(self, context : typing.Union[zmq.asyncio.Context, zmq.Context] if not ex.strerror.startswith('Address in use'): raise ex from None elif kwargs.get('socket_address', None): + socket_address = kwargs.get('socket_address', None) self.socket.connect(kwargs["socket_address"]) else: - raise RuntimeError(f"Socket must be either bound or connected. No operation is being carried out for this socket {identity}") + raise RuntimeError(f"Socket must be either bound or connected & socket address not supplied for identity - {identity}") elif protocol == ZMQ_PROTOCOLS.INPROC or protocol == "INPROC": # inproc_instance_name = instance_name.replace('/', '_').replace('-', '_') socket_address = f'inproc://{instance_name}' @@ -177,7 +173,7 @@ def create_socket(self, context : typing.Union[zmq.asyncio.Context, zmq.Context] self.logger.info("created socket with address {} and {}".format(socket_address, "bound" if bind else "connected")) @classmethod - def get_logger(cls, identity : str, socket_type : str, protocol : str, level = logging.DEBUG) -> logging.Logger: + def get_logger(cls, identity : str, socket_type : str, protocol : str, level = logging.INFO) -> logging.Logger: """ creates a logger with name {class name} | {socket type} | {protocol} | {identity}, default logging level is ``logging.INFO`` @@ -917,7 +913,7 @@ def _get_timeout_from_instruction(self, message : typing.Tuple[bytes]) -> float: return self.json_serializer.loads(message[CM_INDEX_TIMEOUT]) - async def poll(self): + async def poll(self, server : AsyncZMQServer): """ poll for instructions and append them to instructions list to pass them to ``Eventloop``/``RemoteObject``'s inproc server using an inner inproc client. Registers the messages for timeout calculation. @@ -926,36 +922,39 @@ async def poll(self): eventloop = asyncio.get_event_loop() self.inproc_client.handshake() await self.inproc_client.handshake_complete() - while not self.stop_poll: - sockets : typing.Tuple[zmq.Socket, int] = await self.poller.poll(self._poll_timeout) # type - for socket, _ in sockets: - while True: - try: - original_instruction = await socket.recv_multipart(zmq.NOBLOCK) - if original_instruction[CM_INDEX_MESSAGE_TYPE] == HANDSHAKE: - handshake_task = asyncio.create_task(self._handshake(original_instruction, socket)) - eventloop.call_soon(lambda : handshake_task) - timeout = self._get_timeout_from_instruction(original_instruction) - ready_to_process_event = None - timeout_task = None - if timeout is not None: - ready_to_process_event = asyncio.Event() - timeout_task = asyncio.create_task(self.process_timeouts(original_instruction, - ready_to_process_event, timeout, socket)) - eventloop.call_soon(lambda : timeout_task) - except zmq.Again: - break - except Exception as ex: - # handle invalid message - self.logger.error(f"exception occurred for message id {original_instruction[CM_INDEX_MESSAGE_ID]} - {str(ex)}") - invalid_message_task = asyncio.create_task(self._handle_invalid_message(original_instruction, - ex, socket)) - eventloop.call_soon(lambda: invalid_message_task) - else: - self._instructions.append((original_instruction, ready_to_process_event, - timeout_task, socket)) - - # print("instruction in RPC", original_instruction) + # while not self.stop_poll: + # sockets : typing.Tuple[zmq.Socket, int] = await self.poller.poll(-1) # self._poll_timeout) # type + # for socket, _ in sockets: + # print("here") + socket = server.socket + while True: + try: + # print("polling remaining") + original_instruction = await socket.recv_multipart() + if original_instruction[CM_INDEX_MESSAGE_TYPE] == HANDSHAKE: + handshake_task = asyncio.create_task(self._handshake(original_instruction, socket)) + eventloop.call_soon(lambda : handshake_task) + continue + timeout = self._get_timeout_from_instruction(original_instruction) + ready_to_process_event = None + timeout_task = None + if timeout is not None: + ready_to_process_event = asyncio.Event() + timeout_task = asyncio.create_task(self.process_timeouts(original_instruction, + ready_to_process_event, timeout, socket)) + eventloop.call_soon(lambda : timeout_task) + except zmq.Again: + continue + except Exception as ex: + # handle invalid message + self.logger.error(f"exception occurred for message id {original_instruction[CM_INDEX_MESSAGE_ID]} - {str(ex)}") + invalid_message_task = asyncio.create_task(self._handle_invalid_message(original_instruction, + ex, socket)) + eventloop.call_soon(lambda: invalid_message_task) + else: + self._instructions.append((original_instruction, ready_to_process_event, + timeout_task, socket)) + self._instructions_event.set() @@ -1084,7 +1083,7 @@ def __init__(self, server_instance_name : str, client_type : bytes, **kwargs) -> super().__init__() - def raise_local_exception(exception : typing.Dict[str, typing.Any]) -> None: + def raise_local_exception(self, exception : typing.Dict[str, typing.Any]) -> None: """ raises an exception on client side using an exception from server by mapping it to the correct one based on type. @@ -1097,6 +1096,8 @@ def raise_local_exception(exception : typing.Dict[str, typing.Any]) -> None: ------ python exception based on type. If not found in builtins """ + if isinstance(exception, Exception): + raise exception from None exc = getattr(builtins, exception["type"], None) message = f"server raised exception, check following for server side traceback & above for client side traceback : " if exc is None: @@ -1141,7 +1142,7 @@ def parse_server_message(self, message : typing.List[bytes], raise_client_side_e message[SM_INDEX_DATA] = self.json_serializer.loads(message[SM_INDEX_DATA]) # type: ignore elif self.client_type == PROXY: message[SM_INDEX_DATA] = self.rpc_serializer.loads(message[SM_INDEX_DATA]) # type: ignore - self.raise_local_exception(message) + self.raise_local_exception(message[SM_INDEX_DATA]) # if message[SM_INDEX_DATA].get('exception', None) is not None and raise_client_side_exception: # self.raise_local_exception(message[SM_INDEX_DATA]['exception']) else: @@ -1222,7 +1223,9 @@ class SyncZMQClient(BaseZMQClient, BaseSyncZMQ): accomplished. protocol: str | Enum, TCP, IPC or INPROC, default IPC protocol implemented by the server - **serializer: + **kwargs: + socket_address: str + socket address for connecting to TCP server rpc_serializer: custom implementation of RPC serializer if necessary json_serializer: @@ -1231,11 +1234,11 @@ class SyncZMQClient(BaseZMQClient, BaseSyncZMQ): def __init__(self, server_instance_name : str, identity : str, client_type = HTTP_SERVER, handshake : bool = True, protocol : str = "IPC", context : typing.Union[zmq.asyncio.Context, None] = None, - **serializer) -> None: + **kwargs) -> None: BaseZMQClient.__init__(self, server_instance_name=server_instance_name, - client_type=client_type, **serializer) + client_type=client_type, **kwargs) BaseSyncZMQ.__init__(self) - self.create_socket(server_instance_name, context, identity=identity, protocol=protocol) + self.create_socket(server_instance_name, context, identity=identity, protocol=protocol, **kwargs) self._terminate_context = context == None if handshake: self.handshake() @@ -1298,8 +1301,8 @@ def recv_reply(self, raise_client_side_exception : bool = False) -> typing.List[ return reply def execute(self, instruction : str, arguments : typing.Dict[str, typing.Any] = EMPTY_DICT, - context : typing.Dict[str, typing.Any] = EMPTY_DICT, raise_client_side_exception : bool = False - ) -> typing.List[typing.Union[bytes, typing.Dict[str, typing.Any]]]: + timeout : typing.Optional[float] = None, context : typing.Dict[str, typing.Any] = EMPTY_DICT, + raise_client_side_exception : bool = False) -> typing.List[typing.Union[bytes, typing.Dict[str, typing.Any]]]: """ send an instruction and receive the reply for it. @@ -1318,7 +1321,7 @@ def execute(self, instruction : str, arguments : typing.Dict[str, typing.Any] = message id : bytes a byte representation of message id """ - self.send_instruction(instruction, arguments, context) + self.send_instruction(instruction, arguments, timeout, context) return self.recv_reply(raise_client_side_exception) @@ -1329,7 +1332,7 @@ def handshake(self) -> None: poller = zmq.Poller() poller.register(self.socket, zmq.POLLIN) while True: - self.socket.send_multipart(self.create_empty_message_with_type(HANDSHAKE)) + self.socket.send_multipart(self.craft_empty_message_with_type(HANDSHAKE)) self.logger.debug("sent Handshake to server '{}'".format(self.server_instance_name)) if poller.poll(500): try: @@ -1480,7 +1483,8 @@ async def async_recv_reply(self, raise_client_side_exception : bool) -> typing.L return reply async def async_execute(self, instruction : str, arguments : typing.Dict[str, typing.Any] = EMPTY_DICT, - context : typing.Dict[str, typing.Any] = EMPTY_DICT, raise_client_side_exception = False): + timeout : typing.Optional[float] = None, context : typing.Dict[str, typing.Any] = EMPTY_DICT, + raise_client_side_exception = False): """ send an instruction and receive the reply for it. @@ -1499,7 +1503,7 @@ async def async_execute(self, instruction : str, arguments : typing.Dict[str, ty message id : bytes a byte representation of message id """ - await self.async_send_instruction(instruction, arguments, context) + await self.async_send_instruction(instruction, arguments, timeout, context) return await self.async_recv_reply(raise_client_side_exception) def exit(self) -> None: From f96c131c651bcc42c41735ce148118ef60e7026c Mon Sep 17 00:00:00 2001 From: "Vignesh.Vaidyanathan" <62492557+VigneshVSV@users.noreply.github.com> Date: Sun, 18 Feb 2024 14:16:14 +0100 Subject: [PATCH 031/167] RPC server v1 without timeout checks --- hololinked/server/zmq_message_brokers.py | 11 +++++++---- 1 file changed, 7 insertions(+), 4 deletions(-) diff --git a/hololinked/server/zmq_message_brokers.py b/hololinked/server/zmq_message_brokers.py index 04389bc..f7d272c 100644 --- a/hololinked/server/zmq_message_brokers.py +++ b/hololinked/server/zmq_message_brokers.py @@ -913,7 +913,7 @@ def _get_timeout_from_instruction(self, message : typing.Tuple[bytes]) -> float: return self.json_serializer.loads(message[CM_INDEX_TIMEOUT]) - async def poll(self, server : AsyncZMQServer): + async def poll(self): """ poll for instructions and append them to instructions list to pass them to ``Eventloop``/``RemoteObject``'s inproc server using an inner inproc client. Registers the messages for timeout calculation. @@ -922,14 +922,19 @@ async def poll(self, server : AsyncZMQServer): eventloop = asyncio.get_event_loop() self.inproc_client.handshake() await self.inproc_client.handshake_complete() + eventloop.call_soon(lambda : asyncio.create_task(self.recv_instruction(self.inproc_server))) + eventloop.call_soon(lambda : asyncio.create_task(self.recv_instruction(self.tcp_server))) + eventloop.call_soon(lambda : asyncio.create_task(self.recv_instruction(self.ipc_server))) # while not self.stop_poll: # sockets : typing.Tuple[zmq.Socket, int] = await self.poller.poll(-1) # self._poll_timeout) # type # for socket, _ in sockets: # print("here") + + async def recv_instruction(self, server : AsyncZMQServer): + eventloop = asyncio.get_event_loop() socket = server.socket while True: try: - # print("polling remaining") original_instruction = await socket.recv_multipart() if original_instruction[CM_INDEX_MESSAGE_TYPE] == HANDSHAKE: handshake_task = asyncio.create_task(self._handshake(original_instruction, socket)) @@ -943,8 +948,6 @@ async def poll(self, server : AsyncZMQServer): timeout_task = asyncio.create_task(self.process_timeouts(original_instruction, ready_to_process_event, timeout, socket)) eventloop.call_soon(lambda : timeout_task) - except zmq.Again: - continue except Exception as ex: # handle invalid message self.logger.error(f"exception occurred for message id {original_instruction[CM_INDEX_MESSAGE_ID]} - {str(ex)}") From 5f2ccbfbdf0e66329ef39f9562e35562dbe9da75 Mon Sep 17 00:00:00 2001 From: "Vignesh.Vaidyanathan" <62492557+VigneshVSV@users.noreply.github.com> Date: Sun, 18 Feb 2024 14:27:11 +0100 Subject: [PATCH 032/167] update --- README.md | 18 ++++++++++-------- doc/source/installation.rst | 10 +++++++++- hololinked/server/zmq_message_brokers.py | 10 +--------- 3 files changed, 20 insertions(+), 18 deletions(-) diff --git a/README.md b/README.md index 592daf3..16159cb 100644 --- a/README.md +++ b/README.md @@ -2,17 +2,18 @@ ### Description -For beginners - `hololinked` is a pattern suited for instrument control and data acquisition over network using python. +For beginners - `hololinked` is an instrumentation control and data acquisition over network purely based on python.
-For those familiar with RPC & web development - `hololinked` is a ZMQ-based RPC toolkit with customizable HTTP end-points. The intended application -is, as stated before, instrument control/data acquisition or controlling generic python object(-instance)s. +For those familiar with RPC & web development - `hololinked` is a ZMQ-based RPC toolkit with customizable HTTP end-points. +The main goal is, as stated before, to develop a pythonic (& pure python) modern package for instrument control & data acquisition +through network (SCADA), along with native HTTP support for communication with browser clients for browser based UIs. +This package can also be used for general RPC/controlling general python object instances on network for other applications +like computational algorithms, running scripts etc..
-The main goal is to develop a pythonic (& pure python) modern package for instrument control through network (SCADA), -along with native HTTP support for communication with browser clients for browser based UIs. ##### NOTE - The package is rather incomplete and uploaded only for API showcase and active development. Even RPC logic is not complete.
-- tutorial webpage - (will be filled) +- documentation and tutorial webpage - https://hololinked.readthedocs.io/en/latest/ - example repository - https://github.com/VigneshVSV/hololinked-examples - helper GUI - https://github.com/VigneshVSV/hololinked-portal - custom GUI examples - (will be filled) @@ -28,6 +29,8 @@ Support is already present for the following: - use serializer of your choice (except for HTTP) - Serpent, JSON, pickle etc. & extend serialization to suit your requirement (HTTP Server will support only JSON serializer) - asyncio compatible - async RPC Server event-loop and async HTTP Server - have flexibility in process architecture - run HTTP Server & python object in separate processes or in the same process, combine multiple objects in same server etc. +- choose from one of multiple ZMQ Protocols - TCP for network access, and IPC for multi-process same-PC applications at improved speed. +Optionally INPROC for multi-threaded applications. Again, please check examples for how-to & explanations of the above. @@ -37,8 +40,7 @@ clone the repository and install in develop mode `pip install -e .` for convenie ### In Development -- Object Proxy (Plain TCP Client- i.e. the RPC part is not fully developed yet) -- Multiple ZMQ Protocols - currently only IPC protocol is supported +- Object Proxy - only a very rudimentary implementation exists now. - HTTP 2.0 - Database support for storing and loading parameters (based on SQLAlchemy) when object dies and restarts diff --git a/doc/source/installation.rst b/doc/source/installation.rst index 5617875..51425c7 100644 --- a/doc/source/installation.rst +++ b/doc/source/installation.rst @@ -3,7 +3,8 @@ Installation ============ -As |module-highlighted| is still in idealogical & development state, it is recommended to clone it from github & install in develop mode. +As |module-highlighted| is still in idealogical & development state, it is only possible to clone it from github & install +directly (in develop mode). .. code:: shell @@ -32,5 +33,12 @@ Also check out: - https://github.com/VigneshVSV/hololinked-portal.git - GUI to access RemoteObjects & Data Visualization helper. +To build & host docs locally, in top directory: +.. code:: shell + conda activate hololinked + cd doc + make clean + make html + python -m http.server --directory build\html diff --git a/hololinked/server/zmq_message_brokers.py b/hololinked/server/zmq_message_brokers.py index f7d272c..a108310 100644 --- a/hololinked/server/zmq_message_brokers.py +++ b/hololinked/server/zmq_message_brokers.py @@ -925,10 +925,7 @@ async def poll(self): eventloop.call_soon(lambda : asyncio.create_task(self.recv_instruction(self.inproc_server))) eventloop.call_soon(lambda : asyncio.create_task(self.recv_instruction(self.tcp_server))) eventloop.call_soon(lambda : asyncio.create_task(self.recv_instruction(self.ipc_server))) - # while not self.stop_poll: - # sockets : typing.Tuple[zmq.Socket, int] = await self.poller.poll(-1) # self._poll_timeout) # type - # for socket, _ in sockets: - # print("here") + async def recv_instruction(self, server : AsyncZMQServer): eventloop = asyncio.get_event_loop() @@ -972,18 +969,13 @@ async def tunnel_message_to_remote_objects(self): if ready_to_process_event is not None: ready_to_process_event.set() timeout = await timeout_task - # print("timeout result - ", timeout) if ready_to_process_event is None or not timeout: original_address = message[CM_INDEX_ADDRESS] message[CM_INDEX_ADDRESS] = self.inproc_client.server_address # replace address - # print("original address", original_address, "inproc address", message[0]) await self.inproc_client.socket.send_multipart(message) - # print("*********sent message to inproc") reply = await self.inproc_client.socket.recv_multipart() - # print("--------received message from inproc") reply[SM_INDEX_ADDRESS] = original_address await origin_socket.send_multipart(reply) - # print("###### sent message to client") else: await self._instructions_event.wait() self._instructions_event.clear() From 0433a2d0d3d9786637ed29d3f1b15d718c07e9f2 Mon Sep 17 00:00:00 2001 From: "Vignesh.Vaidyanathan" Date: Tue, 20 Feb 2024 13:44:52 +0100 Subject: [PATCH 033/167] changing request handlers to RPCHandler instead of GET, POST...etc. handler --- hololinked/server/HTTPServer.py | 412 ++++++++++------------- hololinked/server/__init__.py | 2 + hololinked/server/config.py | 4 + hololinked/server/data_classes.py | 1 + hololinked/server/handlers.py | 43 ++- hololinked/server/host_utilities.py | 4 +- hololinked/server/zmq_message_brokers.py | 11 +- 7 files changed, 236 insertions(+), 241 deletions(-) diff --git a/hololinked/server/HTTPServer.py b/hololinked/server/HTTPServer.py index 78b00b9..181d671 100644 --- a/hololinked/server/HTTPServer.py +++ b/hololinked/server/HTTPServer.py @@ -1,280 +1,226 @@ import logging import ssl -import asyncio -from typing import Dict, List, Callable, Union, Any -from multiprocessing import Process +import typing +from tornado import ioloop from tornado.web import Application -from tornado.routing import Router +from tornado.routing import ReversibleRuleRouter from tornado.httpserver import HTTPServer as TornadoHTTP1Server -# from tornado_http2.server import Server as TornadoHTTP2Server -from tornado import ioloop from tornado.httputil import HTTPServerRequest -from time import perf_counter +# from tornado_http2.server import Server as TornadoHTTP2Server from ..param import Parameterized from ..param.parameters import (Integer, IPAddress, ClassSelector, Selector, - TypedList, Boolean, String) - - -from .utils import create_default_logger -from .decorators import remote_method -from .http_methods import get, put, post, delete + TypedList, String) +from .data_classes import HTTPResource +from .utils import create_default_logger, run_method_somehow from .serializers import JSONSerializer -from .constants import GET, PUT, POST, OPTIONS, DELETE, USE_OBJECT_NAME, CALLABLE +from .constants import Instructions from .webserver_utils import log_request, update_resources from .zmq_message_brokers import MessageMappedZMQClientPool from .handlers import (BaseRequestHandler, GetResource, PutResource, OptionsResource, - PostResource, DeleteResource, FileHandlerResource) + PostResource, DeleteResource) from .remote_object import RemoteObject, RemoteObjectDB -from .eventloop import Consumer -from .host_utilities import HTTPServerUtilities, SERVER_INSTANCE_NAME -asyncio.set_event_loop_policy(asyncio.WindowsSelectorEventLoopPolicy()) -class CustomRouter(Router): +class CustomRouter(ReversibleRuleRouter): + + remote_object_http_resources : typing.Dict[str, HTTPResource] - def __init__(self, app : Application, logger : logging.Logger, IP : str, - file_server_paths) -> None: + def __init__(self, app : Application) -> None: self.app = app - self.logger = logger - self.IP = IP - self.logger.info('started webserver at {}, ready to receive requests.'.format(self.IP)) - self.file_server_paths = file_server_paths - + def find_handler(self, request : HTTPServerRequest): - start_time = perf_counter() - log_request(request, self.logger) - - if (request.method == GET): - for path in self.file_server_paths["STATIC_ROUTES"].keys(): - if request.path.startswith(path): - print("static handler") - return self.app.get_handler_delegate(request, FileHandlerResource, - target_kwargs=dict( - path=self.file_server_paths["STATIC_ROUTES"][path].directory - ), - path_args=[bytes(request.path.split('/')[-1], encoding='utf-8')]) - handler = GetResource - elif (request.method == POST): - handler = PostResource - elif (request.method == PUT): - handler = PutResource - elif (request.method == DELETE): - handler = DeleteResource - elif (request.method == OPTIONS): - handler = OptionsResource - else: - handler = OptionsResource + handler = super().find_handler(request=request) - return self.app.get_handler_delegate(request, handler, - target_kwargs=dict( - client_address='*', - start_time=start_time - )) + if handler is not None: + return self.app.get_handler_delegate(request, handler, + target_kwargs=dict( + resource=self.remote_object_http_resources.get(request.path), + client_address='*', + )) + return None + # start_time = perf_counter() + # log_request(request, self.logger) + + # if (request.method == GET): + # for path in self.file_server_paths["STATIC_ROUTES"].keys(): + # if request.path.startswith(path): + # print("static handler") + # return self.app.get_handler_delegate(request, FileHandlerResource, + # target_kwargs=dict( + # path=self.file_server_paths["STATIC_ROUTES"][path].directory + # ), + # path_args=[bytes(request.path.split('/')[-1], encoding='utf-8')]) + # handler = GetResource + # elif (request.method == POST): + # handler = PostResource + # elif (request.method == PUT): + # handler = PutResource + # elif (request.method == DELETE): + # handler = DeleteResource + # elif (request.method == OPTIONS): + # handler = OptionsResource + # else: + # handler = OptionsResource -__http_methods__ = { - GET : get, - POST : post, - PUT : put, - DELETE : delete -} class HTTPServer(Parameterized): - address = IPAddress( default = '0.0.0.0', - doc = "set custom IP address, default is localhost (0.0.0.0)" ) - port = Integer ( default = 8080, bounds = (0, 65535), + address = IPAddress(default='0.0.0.0', + doc = "set custom IP address, default is localhost (0.0.0.0)") + port = Integer(default=8080, bounds=(1, 65535), doc = "the port at which the server should be run (unique)" ) - protocol_version = Selector ( objects = [1.1, 2], default = 2, - doc = """for HTTP 2, SSL is mandatory. HTTP2 is recommended. - When no SSL configurations are provided, defaults to 1.1""" ) - logger = ClassSelector ( class_ = logging.Logger, default = None, allow_None = True, - doc = "Supply a custom logger here or set log_level parameter to a valid value" ) - log_level = Selector ( objects = [logging.DEBUG, logging.INFO, logging.ERROR, logging.CRITICAL, logging.ERROR], - default = logging.INFO, - doc = "Alternative to logger, this creates an internal logger with the specified log level" ) - consumers = TypedList ( item_type = (RemoteObject, Consumer, str), default = None, allow_None = True, - doc = "Remote Objects to be served by the HTTP server" ) - subscription = String ( default = None, allow_None = True, - doc = "Host Server to subscribe to coordinate starting sequence of remote objects & web GUI" ) - json_serializer = ClassSelector ( class_ = JSONSerializer, default = None, allow_None = True, - doc = "optionally, supply your own JSON serializer for custom types" ) - ssl_context = ClassSelector ( class_ = ssl.SSLContext , default = None, allow_None = True, - doc = "use it for highly customized SSL context to provide encrypted communication" ) - certfile = String ( default = None, allow_None = True, - doc = """alternative to SSL context, provide certificate file & key file to allow the server - to create a SSL connection on its own""" ) - keyfile = String ( default = None, allow_None = True, - doc = """alternative to SSL context, provide certificate file & key file to allow the server - to create a SSL connection on its own""" ) - server_network_interface = String ( default = 'Ethernet', - doc = """Currently there is no logic to detect the IP addresss (as externally visible) correctly, therefore - please send the network interface name to retrieve the IP. If a DNS server is present or , you may leave - this field""" ) - - def __init__(self, consumers = None, port = 8080, address = '0.0.0.0', subscription = None, logger = None, - log_level = logging.INFO, certfile = None, keyfile = None, json_serializer = None, ssl_context = None, - protocol_version = 2 ) -> None: + protocol_version = Selector(objects=[1, 1.1, 2], default=2, + doc="for HTTP 2, SSL is mandatory. HTTP2 is recommended. \ + When no SSL configurations are provided, defaults to 1.1" ) + logger = ClassSelector(class_=logging.Logger, default=None, allow_None=True, + doc="Supply a custom logger here or set log_level parameter to a valid value" ) + log_level = Selector(objects=[logging.DEBUG, logging.INFO, logging.ERROR, logging.CRITICAL, logging.ERROR], + default=logging.INFO, + doc="Alternative to logger, this creates an internal logger with the specified log level" ) + remote_objects = TypedList(item_type=str, default=None, allow_None=True, + doc="Remote Objects to be served by the HTTP server" ) + host = String(default=None, allow_None=True, + doc="Host Server to subscribe to coordinate starting sequence of remote objects & web GUI" ) + serializer = ClassSelector(class_=JSONSerializer, default=None, allow_None=True, + doc="optionally, supply your own JSON serializer for custom types" ) + ssl_context = ClassSelector(class_=ssl.SSLContext, default=None, allow_None=True, + doc="use it for highly customized SSL context to provide encrypted communication" ) + certfile = String(default=None, allow_None=True, + doc="alternative to SSL context, provide certificate file & key file to allow the server \ + to create a SSL connection on its own") + keyfile = String(default=None, allow_None=True, + doc="alternative to SSL context, provide certificate file & key file to allow the server \ + to create a SSL connection on its own") + network_interface = String(default='Ethernet', + doc="Currently there is no logic to detect the IP addresss (as externally visible) correctly, \ + therefore please send the network interface name to retrieve the IP. If a DNS server is present, \ + you may leave this field" ) + + def __init__(self, remote_objects : typing.List[str], *, port : int = 8080, address : str = '0.0.0.0', + host : str = None, logger : typing.Optional[logging.Logger] = None, log_level : int = logging.INFO, + certfile : str = None, keyfile : str = None, serializer : JSONSerializer = None, + ssl_context : ssl.SSLContext = None, protocol_version : int = 1, + network_interface : str = 'Ethernet') -> None: super().__init__( - consumers = consumers, - port = port, - address = address, - subscription = subscription, - logger = logger, - log_level = log_level, - json_serializer = json_serializer, - protocol_version = protocol_version, - certfile = certfile, - keyfile = keyfile, - ssl_context = ssl_context - ) - # functions that the server directly serves - self.server_process = None - self.resources = dict( - FILE_SERVER = dict(STATIC_ROUTES = dict(), DYNAMIC_ROUTES = dict()), - GET = dict(STATIC_ROUTES = dict(), DYNAMIC_ROUTES = dict()), - POST = dict(STATIC_ROUTES = dict(), DYNAMIC_ROUTES = dict()), - PUT = dict(STATIC_ROUTES = dict(), DYNAMIC_ROUTES = dict()), - DELETE = dict(STATIC_ROUTES = dict(), DYNAMIC_ROUTES = dict()), - OPTIONS = dict(STATIC_ROUTES = dict(), DYNAMIC_ROUTES = dict()) + remote_objects=remote_objects, + port=port, + address=address, + host=host, + logger=logger, + log_level=log_level, + serializer=serializer, + protocol_version=protocol_version, + certfile=certfile, + keyfile=keyfile, + ssl_context=ssl_context, + network_interface=network_interface ) + self.resources = {} + @property def all_ok(self) -> bool: - IP = "{}:{}".format(self.address, self.port) + self._IP = f"{self.address}:{self.port}" if self.logger is None: - self.logger = create_default_logger('{}|{}'.format(self.__class__.__name__, IP), self.log_level) - UtilitiesConsumer = Consumer(HTTPServerUtilities, logger = self.logger, db_config_file = None, - zmq_client_pool = None, instance_name = SERVER_INSTANCE_NAME, - remote_object_info = None) - if self.consumers is None: - self.consumers = [UtilitiesConsumer] - else: - self.consumers.append(UtilitiesConsumer) + self.logger = create_default_logger('{}|{}'.format(self.__class__.__name__, + f"{self.address}:{self.port}"), + self.log_level) return True - def start(self) -> None: - assert self.all_ok, 'HTTPServer all is not ok before starting' # Will always be True or cause some other exception - block : bool = True # currently block feature is not working with SSLContext due to pickling limitation of SSLContext - if block: - start_server(self.address, self.port, self.logger, self.subscription, self.consumers, self.resources, #type: ignore - self.ssl_context, self.json_serializer) + def listen(self) -> None: + assert self.all_ok, 'HTTPServer all is not ok before starting' + # Will always be True or cause some other exception + self._fetch_remote_object_resources() + self.event_loop = ioloop.IOLoop.current() + app = Application() + router=CustomRouter(app=app) + if self.protocol_version == 2: + raise NotImplementedError("Current HTTP2 is not implemented.") else: - self.server_process = Process(target = start_server, args = (self.address, self.port, self.logger, - self.subscription, self.consumers, self.resources, self.ssl_context, self.json_serializer)) - self.server_process.start() - + self.server = TornadoHTTP1Server(router, ssl_options=self.ssl_context) + self.server.listen(port=self.port, address=self.address) + self.logger.info(f'started webserver at {self.address}:{self.port}, ready to receive requests.') + self.event_loop.start() + + async def _fetch_remote_object_resources(self): + zmq_client_pool = MessageMappedZMQClientPool(self.remote_objects, + self._IP, json_serializer=self.serializer) + for client in zmq_client_pool: + await client.handshake_complete() + _, _, _, _, _, reply = await client.async_execute( + f'/{client.server_instance_name}/{Instructions.HTTP_RESOURCES}', + raise_client_side_exception=True) + update_resources(resources, reply["returnValue"]) # type: ignore + # _, _, _, _, _, reply = await client.read_attribute('/'+client.server_instance_name + '/object-info', raise_client_side_exception = True) + # remote_object_info.append(RemoteObjectDB.RemoteObjectInfo(**reply["returnValue"])) # Should raise an exception if returnValue key is not found for some reason. + print("handshake complete") + def stop(self) -> None: - if self.server_process: - try: - self.server_process.close() - except ValueError: - self.server_process.kill() - self.server_process = None - - def http_method_decorator(self, http_method : str, URL_path = USE_OBJECT_NAME): - def decorator(given_func): - func = remote_method(URL_path=decorator.URL_path, - http_method=decorator.http_method)(given_func) - self.resources[http_method]["STATIC_ROUTES"][decorator.URL_path] = HTTPServerResourceData ( - what=CALLABLE, - instance_name='', - instruction=func, - fullpath=decorator.URL_path, - ) - - return func - decorator.http_method = http_method - decorator.URL_path = URL_path - return decorator - - def get(self, URL_path = USE_OBJECT_NAME): - return self.http_method_decorator(GET, URL_path) - - def post(self, URL_path = USE_OBJECT_NAME): - return self.http_method_decorator(POST, URL_path) + raise NotImplementedError("closing HTTP server currently not supported.") + self.server.close_all_connections() + self.event_loop.close() + - def put(self, URL_path = USE_OBJECT_NAME): - return self.http_method_decorator(PUT, URL_path) - - def delete(self, URL_path = USE_OBJECT_NAME): - return self.http_method_decorator(DELETE, URL_path) - - - -def start_server(address : str, port : int, logger : logging.Logger, subscription : str, - consumers : List[Union[Consumer, RemoteObject, str]], resources : Dict[str, Any], ssl_context : ssl.SSLContext, - json_serializer : JSONSerializer) -> None: - """ - A separate function exists to start the server to be able to fork from current process - """ - event_loop = ioloop.IOLoop.current() - event_loop.run_sync(lambda : _setup_server(address, port, logger, subscription, consumers, resources, ssl_context, - json_serializer)) - # written as async function because zmq client is async, therefore run_sync for current use - if BaseRequestHandler.zmq_client_pool is not None: - event_loop.add_callback(BaseRequestHandler.zmq_client_pool.poll) - event_loop.start() - -async def _setup_server(address : str, port : int, logger : logging.Logger, subscription : str, - consumers : List[Union[Consumer, RemoteObject, str]], resources : Dict[str, Dict[str, Any]], - ssl_context : ssl.SSLContext, json_serializer : JSONSerializer, version : float = 2) -> None: - IP = "{}:{}".format(address, port) - instance_names = [] - server_remote_objects = {} - remote_object_info = [] - if consumers is not None: - for consumer in consumers: - if isinstance(consumer, RemoteObject): - server_remote_objects[consumer.instance_name] = consumer - update_resources(resources, consumer.httpserver_resources) - remote_object_info.append(consumer.object_info) - elif isinstance(consumer, Consumer): - instance = consumer.consumer(*consumer.args, **consumer.kwargs) - server_remote_objects[instance.instance_name] = instance - update_resources(resources, instance.httpserver_resources) - remote_object_info.append(instance.object_info) - else: - instance_names.append(consumer) +# async def _setup_server(address : str, port : int, logger : logging.Logger, subscription : str, +# consumers : List[Union[Consumer, RemoteObject, str]], resources : Dict[str, Dict[str, Any]], +# ssl_context : ssl.SSLContext, json_serializer : JSONSerializer, version : float = 2) -> None: +# IP = "{}:{}".format(address, port) +# instance_names = [] +# server_remote_objects = {} +# remote_object_info = [] +# if consumers is not None: +# for consumer in consumers: +# if isinstance(consumer, RemoteObject): +# server_remote_objects[consumer.instance_name] = consumer +# update_resources(resources, consumer.httpserver_resources) +# remote_object_info.append(consumer.object_info) +# elif isinstance(consumer, Consumer): +# instance = consumer.consumer(*consumer.args, **consumer.kwargs) +# server_remote_objects[instance.instance_name] = instance +# update_resources(resources, instance.httpserver_resources) +# remote_object_info.append(instance.object_info) +# else: +# instance_names.append(consumer) - zmq_client_pool = MessageMappedZMQClientPool(instance_names, IP, json_serializer = json_serializer) - for client in zmq_client_pool: - await client.handshake_complete() - _, _, _, _, _, reply = await client.read_attribute('/'+client.server_instance_name + '/resources/http', raise_client_side_exception = True) - update_resources(resources, reply["returnValue"]) # type: ignore - _, _, _, _, _, reply = await client.read_attribute('/'+client.server_instance_name + '/object-info', raise_client_side_exception = True) - remote_object_info.append(RemoteObjectDB.RemoteObjectInfo(**reply["returnValue"])) # Should raise an exception if returnValue key is not found for some reason. +# zmq_client_pool = MessageMappedZMQClientPool(instance_names, IP, json_serializer = json_serializer) +# for client in zmq_client_pool: +# await client.handshake_complete() +# _, _, _, _, _, reply = await client.read_attribute('/'+client.server_instance_name + '/resources/http', raise_client_side_exception = True) +# update_resources(resources, reply["returnValue"]) # type: ignore +# _, _, _, _, _, reply = await client.read_attribute('/'+client.server_instance_name + '/object-info', raise_client_side_exception = True) +# remote_object_info.append(RemoteObjectDB.RemoteObjectInfo(**reply["returnValue"])) # Should raise an exception if returnValue key is not found for some reason. - for RO in server_remote_objects.values(): - if isinstance(RO, HTTPServerUtilities): - RO.zmq_client_pool = zmq_client_pool - RO.remote_object_info = remote_object_info - RO._httpserver_resources = resources - if subscription: - await RO.subscribe_to_host(subscription, port) - break +# for RO in server_remote_objects.values(): +# if isinstance(RO, HTTPServerUtilities): +# RO.zmq_client_pool = zmq_client_pool +# RO.remote_object_info = remote_object_info +# RO._httpserver_resources = resources +# if subscription: +# await RO.subscribe_to_host(subscription, port) +# break - BaseRequestHandler.zmq_client_pool = zmq_client_pool - BaseRequestHandler.json_serializer = zmq_client_pool.json_serializer - BaseRequestHandler.local_objects = server_remote_objects - GetResource.resources = resources.get(GET, dict()) - PostResource.resources = resources.get(POST, dict()) - PutResource.resources = resources.get(PUT, dict()) - DeleteResource.resources = resources.get(DELETE, dict()) - OptionsResource.resources = resources.get(OPTIONS, dict()) - # log_resources(logger, resources) - Router = CustomRouter(Application(), logger, IP, resources.get('FILE_SERVER')) - # if version == 2: - # S = TornadoHTTP2Server(Router, ssl_options=ssl_context) - # else: - S = TornadoHTTP1Server(Router, ssl_options=ssl_context) - S.listen(port=port, address=address) +# BaseRequestHandler.zmq_client_pool = zmq_client_pool +# BaseRequestHandler.json_serializer = zmq_client_pool.json_serializer +# BaseRequestHandler.local_objects = server_remote_objects +# GetResource.resources = resources.get(GET, dict()) +# PostResource.resources = resources.get(POST, dict()) +# PutResource.resources = resources.get(PUT, dict()) +# DeleteResource.resources = resources.get(DELETE, dict()) +# OptionsResource.resources = resources.get(OPTIONS, dict()) +# # log_resources(logger, resources) +# Router = CustomRouter(Application(), logger, IP, resources.get('FILE_SERVER')) +# # if version == 2: +# # S = TornadoHTTP2Server(Router, ssl_options=ssl_context) +# # else: +# S = TornadoHTTP1Server(Router, ssl_options=ssl_context) +# S.listen(port=port, address=address) __all__ = ['HTTPServer'] \ No newline at end of file diff --git a/hololinked/server/__init__.py b/hololinked/server/__init__.py index 414ba26..a7d17d2 100644 --- a/hololinked/server/__init__.py +++ b/hololinked/server/__init__.py @@ -13,3 +13,5 @@ from .HTTPServer import * from .host_utilities import * from .host_server import * + + diff --git a/hololinked/server/config.py b/hololinked/server/config.py index d350124..cbd7dad 100644 --- a/hololinked/server/config.py +++ b/hololinked/server/config.py @@ -24,12 +24,16 @@ SOFTWARE. """ +import asyncio import tempfile import os import platform from . import __version__ +asyncio.set_event_loop_policy(asyncio.WindowsSelectorEventLoopPolicy()) + + class Configuration: __slots__ = [ diff --git a/hololinked/server/data_classes.py b/hololinked/server/data_classes.py index a2b87b1..755c6c7 100644 --- a/hololinked/server/data_classes.py +++ b/hololinked/server/data_classes.py @@ -160,6 +160,7 @@ class HTTPResource: path_format : typing.Optional[str] = field(default=None) path_regex : typing.Optional[typing.Pattern] = field(default=None) param_convertors : typing.Optional[typing.Dict] = field(default=None) + method : str = field(default="GET") # below are all dunders, when something else is added, be careful to remember to edit ObjectProxy logic when necessary # 'what' can be an 'ATTRIBUTE' or 'CALLABLE' (based on isparameter or iscallable) and 'instruction' diff --git a/hololinked/server/handlers.py b/hololinked/server/handlers.py index 6f7c8e1..de2e036 100644 --- a/hololinked/server/handlers.py +++ b/hololinked/server/handlers.py @@ -3,7 +3,7 @@ import typing from typing import List, Dict, Any, Union, Callable, Tuple from types import FunctionType -from tornado.web import RequestHandler, StaticFileHandler +from tornado.web import Application, RequestHandler, StaticFileHandler from tornado.iostream import StreamClosedError from tornado.httputil import HTTPServerRequest from time import perf_counter @@ -16,7 +16,7 @@ from .remote_object import RemoteObject from .eventloop import EventLoop from .utils import current_datetime_ms_str -from .data_classes import FileServerData +from .data_classes import FileServerData, HTTPResource # UnknownHTTPServerData = HTTPServerResourceData( # what = 'unknown', @@ -27,6 +27,45 @@ +class RPCHandler(RequestHandler): + + def initialize(self, resource : HTTPResource, client_address : str, + start_time : float) -> None: + self.resource = resource + self.client_address = client_address + self.start_time = start_time + + async def get(self): + if not self.resource.method == 'GET': + self.set_status(404, "not found") + + async def post(self): + if not self.resource.method == 'GET': + self.set_status(404, "not found") + + async def patch(self): + if not self.resource.method == 'GET': + self.set_status(404, "not found") + + async def put(self): + if not self.resource.method == 'GET': + self.set_status(404, "not found") + + async def delete(self): + if not self.resource.method == 'GET': + self.set_status(404, "not found") + + async def options(self): + self.set_status(204) + self.set_header("Access-Control-Allow-Origin", "*") + self.set_header("Access-Control-Allow-Headers", "*") + self.set_header("Access-Control-Allow-Methods", ', '.join(self.resource.method)) + self.finish() + + + + + class FileHandlerResource(StaticFileHandler): @classmethod diff --git a/hololinked/server/host_utilities.py b/hololinked/server/host_utilities.py index 0044b92..a159140 100644 --- a/hololinked/server/host_utilities.py +++ b/hololinked/server/host_utilities.py @@ -336,7 +336,9 @@ def create_primary_host(config_file : str, ssl_context : ssl.SSLContext, **serve (r"/settings", AppSettingsHandler), (r"/subscribers", SubscribersHandler), (r"/login", LoginHandler) - ], cookie_secret=base64.b64encode(os.urandom(32)).decode('utf-8') , **server_settings) + ], cookie_secret=base64.b64encode(os.urandom(32)).decode('utf-8') , + **server_settings) + return TornadoHTTP1Server(app, ssl_options=ssl_context) diff --git a/hololinked/server/zmq_message_brokers.py b/hololinked/server/zmq_message_brokers.py index a108310..5a60645 100644 --- a/hololinked/server/zmq_message_brokers.py +++ b/hololinked/server/zmq_message_brokers.py @@ -1005,7 +1005,7 @@ async def _handshake(self, original_client_message: builtins.list[builtins.bytes await originating_socket.send_multipart(self.craft_reply_from_arguments( original_client_message[CM_INDEX_ADDRESS], original_client_message[CM_INDEX_CLIENT_TYPE], HANDSHAKE, original_client_message[CM_INDEX_MESSAGE_ID], - EMPTY_BYTE)) + EMPTY_DICT)) def exit(self): self.stop_poll = True @@ -1158,10 +1158,11 @@ def craft_instruction_from_arguments(self, instruction : str, arguments : typing if self.client_type == HTTP_SERVER: timeout : bytes = self.json_serializer.dumps(timeout) instruction : bytes = self.json_serializer.dumps(instruction) - # if arguments == b'': - # arguments : bytes = self.json_serializer.dumps({}) - # elif not isinstance(arguments, bytes): - arguments : bytes = self.json_serializer.dumps(arguments) + # TODO - following can be improved + if arguments == b'': + arguments : bytes = self.json_serializer.dumps({}) + elif not isinstance(arguments, bytes): + arguments : bytes = self.json_serializer.dumps(arguments) context : bytes = self.json_serializer.dumps(context) elif self.client_type == PROXY: timeout : bytes = self.rpc_serializer.dumps(timeout) From fc96435eaca6df5430019bc9d8fd9856c42e8cfd Mon Sep 17 00:00:00 2001 From: "Vignesh.Vaidyanathan" Date: Tue, 20 Feb 2024 17:59:36 +0100 Subject: [PATCH 034/167] moved router to Rule object based matching directly within tornado - initialize kwargs passed directly @ init of Application --- hololinked/server/HTTPServer.py | 168 +++++++++++++++------------ hololinked/server/data_classes.py | 12 +- hololinked/server/eventloop.py | 21 ++-- hololinked/server/handlers.py | 16 ++- hololinked/server/remote_object.py | 2 +- hololinked/server/webserver_utils.py | 21 ++-- 6 files changed, 137 insertions(+), 103 deletions(-) diff --git a/hololinked/server/HTTPServer.py b/hololinked/server/HTTPServer.py index 181d671..b49deae 100644 --- a/hololinked/server/HTTPServer.py +++ b/hololinked/server/HTTPServer.py @@ -2,8 +2,7 @@ import ssl import typing from tornado import ioloop -from tornado.web import Application -from tornado.routing import ReversibleRuleRouter +from tornado.web import _HandlerDelegate, Application, RequestHandler from tornado.httpserver import HTTPServer as TornadoHTTP1Server from tornado.httputil import HTTPServerRequest # from tornado_http2.server import Server as TornadoHTTP2Server @@ -17,96 +16,77 @@ from .constants import Instructions from .webserver_utils import log_request, update_resources from .zmq_message_brokers import MessageMappedZMQClientPool -from .handlers import (BaseRequestHandler, GetResource, PutResource, OptionsResource, - PostResource, DeleteResource) +from .handlers import RPCHandler from .remote_object import RemoteObject, RemoteObjectDB -class CustomRouter(ReversibleRuleRouter): - remote_object_http_resources : typing.Dict[str, HTTPResource] - - def __init__(self, app : Application) -> None: - self.app = app - - def find_handler(self, request : HTTPServerRequest): - - handler = super().find_handler(request=request) - - if handler is not None: - return self.app.get_handler_delegate(request, handler, - target_kwargs=dict( - resource=self.remote_object_http_resources.get(request.path), - client_address='*', - )) - return None - - # start_time = perf_counter() - # log_request(request, self.logger) - - # if (request.method == GET): - # for path in self.file_server_paths["STATIC_ROUTES"].keys(): - # if request.path.startswith(path): - # print("static handler") - # return self.app.get_handler_delegate(request, FileHandlerResource, - # target_kwargs=dict( - # path=self.file_server_paths["STATIC_ROUTES"][path].directory - # ), - # path_args=[bytes(request.path.split('/')[-1], encoding='utf-8')]) - # handler = GetResource - # elif (request.method == POST): - # handler = PostResource - # elif (request.method == PUT): - # handler = PutResource - # elif (request.method == DELETE): - # handler = DeleteResource - # elif (request.method == OPTIONS): - # handler = OptionsResource - # else: - # handler = OptionsResource +# class CustomRouter(Application): +# remote_object_http_resources : typing.Dict[str, HTTPResource] +# def get_handler_delegate(self, +# request: HTTPServerRequest, +# target_class: type[RequestHandler], +# target_kwargs: typing.Dict[str, typing.Any] | None = None, +# path_args: typing.List[bytes] | None = None, +# path_kwargs: typing.Dict[str, bytes] | None = None +# ) -> _HandlerDelegate: +# try: +# if target_kwargs is None: +# target_kwargs = dict(resource=self.remote_object_http_resources.get(request.path)) +# else: +# target_kwargs.update('resource', self.remote_object_http_resources.get(request.path)) +# except Exception as ex: +# target_class = ExceptionHandler +# target_kwargs = None +# return super().get_handler_delegate(request, target_class, +# target_kwargs, path_args, path_kwargs) + + class HTTPServer(Parameterized): address = IPAddress(default='0.0.0.0', - doc = "set custom IP address, default is localhost (0.0.0.0)") + doc = "set custom IP address, default is localhost (0.0.0.0)") # type: str port = Integer(default=8080, bounds=(1, 65535), - doc = "the port at which the server should be run (unique)" ) + doc = "the port at which the server should be run (unique)" ) # ytype: int protocol_version = Selector(objects=[1, 1.1, 2], default=2, doc="for HTTP 2, SSL is mandatory. HTTP2 is recommended. \ - When no SSL configurations are provided, defaults to 1.1" ) + When no SSL configurations are provided, defaults to 1.1" ) # type: float logger = ClassSelector(class_=logging.Logger, default=None, allow_None=True, - doc="Supply a custom logger here or set log_level parameter to a valid value" ) + doc="Supply a custom logger here or set log_level parameter to a valid value" ) # type: logging.Logger log_level = Selector(objects=[logging.DEBUG, logging.INFO, logging.ERROR, logging.CRITICAL, logging.ERROR], default=logging.INFO, - doc="Alternative to logger, this creates an internal logger with the specified log level" ) + doc="Alternative to logger, this creates an internal logger with the specified log level" ) # type: int remote_objects = TypedList(item_type=str, default=None, allow_None=True, - doc="Remote Objects to be served by the HTTP server" ) + doc="Remote Objects to be served by the HTTP server" ) # type: typing.List[str] host = String(default=None, allow_None=True, - doc="Host Server to subscribe to coordinate starting sequence of remote objects & web GUI" ) + doc="Host Server to subscribe to coordinate starting sequence of remote objects & web GUI" ) # type: str serializer = ClassSelector(class_=JSONSerializer, default=None, allow_None=True, - doc="optionally, supply your own JSON serializer for custom types" ) + doc="optionally, supply your own JSON serializer for custom types" ) # type: JSONSerializer ssl_context = ClassSelector(class_=ssl.SSLContext, default=None, allow_None=True, - doc="use it for highly customized SSL context to provide encrypted communication" ) + doc="use it for highly customized SSL context to provide encrypted communication") # type: typing.Optional[ssl.SSLContext] certfile = String(default=None, allow_None=True, doc="alternative to SSL context, provide certificate file & key file to allow the server \ - to create a SSL connection on its own") + to create a SSL connection on its own") # type: str keyfile = String(default=None, allow_None=True, doc="alternative to SSL context, provide certificate file & key file to allow the server \ - to create a SSL connection on its own") + to create a SSL connection on its own") # type: str network_interface = String(default='Ethernet', doc="Currently there is no logic to detect the IP addresss (as externally visible) correctly, \ therefore please send the network interface name to retrieve the IP. If a DNS server is present, \ - you may leave this field" ) + you may leave this field" ) # type: str + request_handler = ClassSelector(default=RPCHandler, class_=RPCHandler, isinstance=False, + doc="custom web request handler of your choice" ) # type: RPCHandler def __init__(self, remote_objects : typing.List[str], *, port : int = 8080, address : str = '0.0.0.0', host : str = None, logger : typing.Optional[logging.Logger] = None, log_level : int = logging.INFO, certfile : str = None, keyfile : str = None, serializer : JSONSerializer = None, ssl_context : ssl.SSLContext = None, protocol_version : int = 1, - network_interface : str = 'Ethernet') -> None: + network_interface : str = 'Ethernet', request_handler : RPCHandler = RPCHandler) -> None: super().__init__( remote_objects=remote_objects, port=port, @@ -119,11 +99,10 @@ def __init__(self, remote_objects : typing.List[str], *, port : int = 8080, addr certfile=certfile, keyfile=keyfile, ssl_context=ssl_context, - network_interface=network_interface + network_interface=network_interface, + request_handler=request_handler ) - self.resources = {} - - + @property def all_ok(self) -> bool: self._IP = f"{self.address}:{self.port}" @@ -131,35 +110,80 @@ def all_ok(self) -> bool: self.logger = create_default_logger('{}|{}'.format(self.__class__.__name__, f"{self.address}:{self.port}"), self.log_level) + self.zmq_client_pool = MessageMappedZMQClientPool(self.remote_objects, + self._IP, json_serializer=self.serializer) + + self.resources = dict( + FILE_SERVER = dict(STATIC_ROUTES = dict(), DYNAMIC_ROUTES = dict()), + GET = dict(STATIC_ROUTES = dict(), DYNAMIC_ROUTES = dict()), + POST = dict(STATIC_ROUTES = dict(), DYNAMIC_ROUTES = dict()), + PUT = dict(STATIC_ROUTES = dict(), DYNAMIC_ROUTES = dict()), + DELETE = dict(STATIC_ROUTES = dict(), DYNAMIC_ROUTES = dict()), + OPTIONS = dict(STATIC_ROUTES = dict(), DYNAMIC_ROUTES = dict()) + ) return True + def listen(self) -> None: assert self.all_ok, 'HTTPServer all is not ok before starting' # Will always be True or cause some other exception - self._fetch_remote_object_resources() + run_method_somehow(self._fetch_remote_object_resources()) + self.request_handler.zmq_client_pool = self.zmq_client_pool self.event_loop = ioloop.IOLoop.current() - app = Application() - router=CustomRouter(app=app) + handlers = [] + for route, http_resource in self.resources["GET"]["STATIC_ROUTES"].items(): + handlers.append((route, self.request_handler, {'resource' : http_resource})) + """ + for handler based tornado rule matcher, the Rule object has following + signature + + def __init__( + self, + matcher: "Matcher", + target: Any, + target_kwargs: Optional[Dict[str, Any]] = None, + name: Optional[str] = None, + ) -> None: + + matcher - based on route + target - handler + target_kwargs - given to handler's initialize + name - ... + + len == 2 tuple is route + handler + len == 3 tuple is route + handler + target kwargs + + so we give (path, RPCHandler, {'resource' : HTTPResource}) + + path is extracted from remote_method(URL_path='....') + RPCHandler is the base handler of this package + resource goes into target kwargs as the HTTPResource generated by + remote_method and RemoteParamater contains all the info given + to make RPCHandler work + """ + self.app = Application(handlers=handlers) + # self.app.remote_object_http_resources = self.resources["GET"]["STATIC_ROUTES"] + # self.router=(app=self.app) if self.protocol_version == 2: raise NotImplementedError("Current HTTP2 is not implemented.") + self.server = TornadoHTTP2Server(router, ssl_options=self.ssl_context) else: - self.server = TornadoHTTP1Server(router, ssl_options=self.ssl_context) + self.server = TornadoHTTP1Server(self.app, ssl_options=self.ssl_context) self.server.listen(port=self.port, address=self.address) self.logger.info(f'started webserver at {self.address}:{self.port}, ready to receive requests.') self.event_loop.start() + async def _fetch_remote_object_resources(self): - zmq_client_pool = MessageMappedZMQClientPool(self.remote_objects, - self._IP, json_serializer=self.serializer) - for client in zmq_client_pool: + for client in self.zmq_client_pool: await client.handshake_complete() _, _, _, _, _, reply = await client.async_execute( - f'/{client.server_instance_name}/{Instructions.HTTP_RESOURCES}', + f'/{client.server_instance_name}{Instructions.HTTP_RESOURCES}', raise_client_side_exception=True) - update_resources(resources, reply["returnValue"]) # type: ignore + update_resources(self.resources, reply["returnValue"]) # type: ignore # _, _, _, _, _, reply = await client.read_attribute('/'+client.server_instance_name + '/object-info', raise_client_side_exception = True) # remote_object_info.append(RemoteObjectDB.RemoteObjectInfo(**reply["returnValue"])) # Should raise an exception if returnValue key is not found for some reason. - print("handshake complete") + def stop(self) -> None: raise NotImplementedError("closing HTTP server currently not supported.") diff --git a/hololinked/server/data_classes.py b/hololinked/server/data_classes.py index 755c6c7..a4e8f9b 100644 --- a/hololinked/server/data_classes.py +++ b/hololinked/server/data_classes.py @@ -184,10 +184,16 @@ def __setstate__(self, values : typing.Dict): def json(self): """ - Set use_json_method=True in ``serializers.JSONSerializer`` instance and pass the object to the - serializer directly to get the JSON. + Set use_json_method=True in ``serializers.JSONSerializer`` instance and pass the + object to the serializer directly to get the JSON. """ - return asdict(self) + return { + "what" : self.what, + "instance_name" : self.instance_name, + 'fullpath' : self.fullpath, + "instruction" : self.instruction, + "request_as_argument" : self.request_as_argument + } def compile_path(self): path_regex, self.path_format, param_convertors = compile_path(self.fullpath) diff --git a/hololinked/server/eventloop.py b/hololinked/server/eventloop.py index fc2b477..9e53635 100644 --- a/hololinked/server/eventloop.py +++ b/hololinked/server/eventloop.py @@ -4,10 +4,8 @@ import traceback import importlib import typing -import zmq import threading -import time -from collections import deque + from .utils import unique_id, wrap_text from .constants import * @@ -15,16 +13,17 @@ from .exceptions import * from .http_methods import post, get from .remote_object import * -from .zmq_message_brokers import AsyncPollingZMQServer, ZMQServerPool, ServerTypes, AsyncZMQClient +from .zmq_message_brokers import AsyncPollingZMQServer, ServerTypes from .remote_parameter import RemoteParameter -from ..param.parameters import Boolean, ClassSelector, TypedList, List as PlainList +from .remote_parameters import ClassSelector, TypedList, List class Consumer: - consumer = ClassSelector(default=None, allow_None=True, class_=RemoteObject, isinstance=False) - args = PlainList(default=None, allow_None=True, accept_tuple=True) - kwargs = TypedDict(default=None, allow_None=True, key_type=str) + consumer = ClassSelector(default=None, allow_None=True, class_=RemoteObject, isinstance=False, + remote=False) + args = List(default=None, allow_None=True, accept_tuple=True, remote=False) + kwargs = TypedDict(default=None, allow_None=True, key_type=str, remote=False) def __init__(self, consumer : typing.Type[RemoteObject], args : typing.Tuple = tuple(), **kwargs) -> None: if consumer is not None: @@ -45,7 +44,7 @@ class EventLoop(RemoteObject): server_type = ServerTypes.EVENTLOOP remote_objects = TypedList(item_type=(RemoteObject, Consumer), bounds=(0,100), allow_None=True, default=None, - doc="""list of RemoteObjects which are being executed""") #type: typing.List[RemoteObject] + doc="list of RemoteObjects which are being executed", remote=False) #type: typing.List[RemoteObject] # Remote Parameters uninstantiated_remote_objects = TypedDict(default=None, allow_None=True, key_type=str, @@ -200,8 +199,8 @@ async def run_single_target(cls, instance : RemoteObject) -> None: list_handler.setFormatter(instance.logger.handlers[0].formatter) instance.logger.addHandler(list_handler) try: - instance.logger.debug("""client {} of client type {} issued instruction {} with message id {}. - starting execution""".format(client, client_type, instruction_str, msg_id)) + instance.logger.debug("client {} of client type {} issued instruction {} with message id {}. \ + starting execution".format(client, client_type, instruction_str, msg_id)) return_value = await cls.execute_once(instance_name, instance, instruction_str, arguments) #type: ignore if not plain_reply: return_value = { diff --git a/hololinked/server/handlers.py b/hololinked/server/handlers.py index de2e036..faabf5d 100644 --- a/hololinked/server/handlers.py +++ b/hololinked/server/handlers.py @@ -29,16 +29,22 @@ class RPCHandler(RequestHandler): - def initialize(self, resource : HTTPResource, client_address : str, - start_time : float) -> None: + zmq_client_pool : MessageMappedZMQClientPool + + def initialize(self, resource : HTTPResource) -> None: self.resource = resource - self.client_address = client_address - self.start_time = start_time async def get(self): if not self.resource.method == 'GET': self.set_status(404, "not found") + self.set_status(200, "Exists") + self.write(""" +

I am alive


{} +

This is my resource - {} +
""".format(self.request, self.resource.json())) + self.finish() + async def post(self): if not self.resource.method == 'GET': self.set_status(404, "not found") @@ -64,8 +70,6 @@ async def options(self): - - class FileHandlerResource(StaticFileHandler): @classmethod diff --git a/hololinked/server/remote_object.py b/hololinked/server/remote_object.py index 45dfe18..5707a47 100644 --- a/hololinked/server/remote_object.py +++ b/hololinked/server/remote_object.py @@ -324,7 +324,7 @@ class RemoteSubobject(Parameterized, metaclass=RemoteObjectMetaclass): (http(s)://{domain and sub domain}/{instance name}). It is suggested to use the class name along with a unique name {class name}/{some unique name}. Instance names must be unique in your entire system.""") # type: str - httpserver_resources = RemoteParameter(readonly=True, URL_path='/resources/http', + httpserver_resources = RemoteParameter(readonly=True, URL_path='/resources/http-server', doc="""object's resources exposed to HTTP server""", fget=lambda self: self._httpserver_resources ) # type: typing.Dict[str, typing.Dict[str, HTTPResource]] rpc_resources = RemoteParameter(readonly=True, URL_path='/resources/object-proxy', doc= """object's resources exposed to RPC client, similar to HTTP resources but differs diff --git a/hololinked/server/webserver_utils.py b/hololinked/server/webserver_utils.py index 8b58c2a..2db8826 100644 --- a/hololinked/server/webserver_utils.py +++ b/hololinked/server/webserver_utils.py @@ -2,16 +2,16 @@ import textwrap import typing import ifaddr -from typing import Dict, Any, List # from tabulate import tabulate from tornado.httputil import HTTPServerRequest from .constants import CALLABLE, ATTRIBUTE, EVENT, FILE, IMAGE_STREAM -from .data_classes import FileServerData +from .data_classes import FileServerData, ServerSentEvent, HTTPResource from .zmq_message_brokers import AsyncZMQClient, SyncZMQClient -def update_resources(resources : Dict[str, Dict[str, Dict[str, Any]]], add : Dict[str, Dict[str, Any]]) -> None: +def update_resources(resources : typing.Dict[str, typing.Dict[str, typing.Dict[str, typing.Any]]], + add : typing.Dict[str, typing.Dict[str, typing.Any]]) -> None: file_server_routes = dict( STATIC_ROUTES = dict(), DYNAMIC_ROUTES = dict() @@ -20,25 +20,25 @@ def update_resources(resources : Dict[str, Dict[str, Dict[str, Any]]], add : Dic if http_method == 'FILE_SERVER': continue for URL_path, info in add[http_method].items(): - if isinstance(info, HTTPServerEventData): + if isinstance(info, ServerSentEvent): existing_map["STATIC_ROUTES"][URL_path] = info - elif isinstance(info, HTTPServerResourceData): + elif isinstance(info, HTTPResource): info.compile_path() if info.path_regex is None: existing_map["STATIC_ROUTES"][info.path_format] = info else: existing_map["DYNAMIC_ROUTES"][info.path_format] = info elif info["what"] == ATTRIBUTE or info["what"] == CALLABLE: - data = HTTPServerResourceData(**info) + data = HTTPResource(**info) data.compile_path() if data.path_regex is None: existing_map["STATIC_ROUTES"][data.path_format] = data else: existing_map["DYNAMIC_ROUTES"][data.path_format] = data elif info["what"] == EVENT: - existing_map["STATIC_ROUTES"][URL_path] = HTTPServerEventData(**info) + existing_map["STATIC_ROUTES"][URL_path] = ServerSentEvent(**info) elif info["what"] == IMAGE_STREAM: - existing_map["STATIC_ROUTES"][URL_path] = HTTPServerEventData(**info) + existing_map["STATIC_ROUTES"][URL_path] = ServerSentEvent(**info) elif info["what"] == FILE: data = FileServerData(**info) data.compile_path() @@ -51,7 +51,8 @@ def update_resources(resources : Dict[str, Dict[str, Dict[str, Any]]], add : Dic -async def update_resources_using_client(resources : Dict[str, Dict[str, Any]], remote_object_info : List, +async def update_resources_using_client(resources : typing.Dict[str, typing.Dict[str, typing.Any]], + remote_object_info : typing.List, client : typing.Union[AsyncZMQClient, SyncZMQClient]) -> None: from .remote_object import RemoteObjectDB _, _, _, _, _, reply = await client.async_execute('/resources/http', raise_client_side_exception = True) @@ -105,7 +106,7 @@ def log_request(request : HTTPServerRequest, logger : typing.Optional[logging.Lo resources_table_headers = ["URL", "method"] -def log_resources(logger : logging.Logger, resources : Dict[str, Dict[str, Any]] ) -> None: +def log_resources(logger : logging.Logger, resources : typing.Dict[str, typing.Dict[str, typing.Any]] ) -> None: if logger.level == logging.DEBUG: # check log level manually before cooking this long string text = """ From 1255eb39501a37d0c187346bbaaaee90c483891a Mon Sep 17 00:00:00 2001 From: "Vignesh.Vaidyanathan" Date: Tue, 20 Feb 2024 21:04:30 +0100 Subject: [PATCH 035/167] RPCHandler and EventHandler added --- hololinked/server/HTTPServer.py | 29 +- hololinked/server/handlers.py | 498 +++++++++----------------------- 2 files changed, 130 insertions(+), 397 deletions(-) diff --git a/hololinked/server/HTTPServer.py b/hololinked/server/HTTPServer.py index b49deae..c3b8dd9 100644 --- a/hololinked/server/HTTPServer.py +++ b/hololinked/server/HTTPServer.py @@ -2,9 +2,8 @@ import ssl import typing from tornado import ioloop -from tornado.web import _HandlerDelegate, Application, RequestHandler +from tornado.web import Application from tornado.httpserver import HTTPServer as TornadoHTTP1Server -from tornado.httputil import HTTPServerRequest # from tornado_http2.server import Server as TornadoHTTP2Server from ..param import Parameterized @@ -21,32 +20,6 @@ - - -# class CustomRouter(Application): - -# remote_object_http_resources : typing.Dict[str, HTTPResource] - -# def get_handler_delegate(self, -# request: HTTPServerRequest, -# target_class: type[RequestHandler], -# target_kwargs: typing.Dict[str, typing.Any] | None = None, -# path_args: typing.List[bytes] | None = None, -# path_kwargs: typing.Dict[str, bytes] | None = None -# ) -> _HandlerDelegate: -# try: -# if target_kwargs is None: -# target_kwargs = dict(resource=self.remote_object_http_resources.get(request.path)) -# else: -# target_kwargs.update('resource', self.remote_object_http_resources.get(request.path)) -# except Exception as ex: -# target_class = ExceptionHandler -# target_kwargs = None -# return super().get_handler_delegate(request, target_class, -# target_kwargs, path_args, path_kwargs) - - - class HTTPServer(Parameterized): address = IPAddress(default='0.0.0.0', diff --git a/hololinked/server/handlers.py b/hololinked/server/handlers.py index faabf5d..e1f540b 100644 --- a/hololinked/server/handlers.py +++ b/hololinked/server/handlers.py @@ -1,452 +1,212 @@ # routing ideas from https://www.tornadoweb.org/en/branch6.3/routing.html -import traceback import typing -from typing import List, Dict, Any, Union, Callable, Tuple -from types import FunctionType -from tornado.web import Application, RequestHandler, StaticFileHandler +from json import JSONDecodeError +from tornado.web import RequestHandler, StaticFileHandler from tornado.iostream import StreamClosedError -from tornado.httputil import HTTPServerRequest from time import perf_counter -from .constants import (IMAGE_STREAM, EVENT, CALLABLE, ATTRIBUTE, FILE) from .serializers import JSONSerializer -from .path_converter import extract_path_parameters from .zmq_message_brokers import MessageMappedZMQClientPool, EventConsumer -from .webserver_utils import log_request -from .remote_object import RemoteObject -from .eventloop import EventLoop +from .webserver_utils import * from .utils import current_datetime_ms_str -from .data_classes import FileServerData, HTTPResource - -# UnknownHTTPServerData = HTTPServerResourceData( -# what = 'unknown', -# instance_name = 'unknown', -# fullpath='unknown', -# instruction = 'unknown' -# ) +from .data_classes import HTTPResource, ServerSentEvent class RPCHandler(RequestHandler): zmq_client_pool : MessageMappedZMQClientPool + json_serializer : JSONSerializer + clients : str - def initialize(self, resource : HTTPResource) -> None: + def initialize(self, resource : typing.Union[HTTPResource, ServerSentEvent]) -> None: self.resource = resource + def set_headers(self): + self.set_status(200) + self.set_header("Content-Type" , "application/json") + + async def get(self): if not self.resource.method == 'GET': - self.set_status(404, "not found") - self.set_status(200, "Exists") - self.write(""" -

I am alive


{} -

This is my resource - {} -
""".format(self.request, self.resource.json())) - + self.set_status(404) + else: + self.set_headers() + await self.handle_through_remote_object() self.finish() - + async def post(self): - if not self.resource.method == 'GET': + if not self.resource.method == 'POST': self.set_status(404, "not found") + else: + self.set_headers() + await self.handle_through_remote_object() + self.finish() async def patch(self): - if not self.resource.method == 'GET': + if not self.resource.method == 'PATCH': self.set_status(404, "not found") + else: + self.set_headers() + await self.handle_through_remote_object() + self.finish() async def put(self): - if not self.resource.method == 'GET': + if not self.resource.method == 'PUT': self.set_status(404, "not found") + else: + self.set_headers() + await self.handle_through_remote_object() + self.finish() async def delete(self): - if not self.resource.method == 'GET': + if not self.resource.method == 'DELETE': self.set_status(404, "not found") + else: + self.set_headers() + await self.handle_through_remote_object() + self.finish() async def options(self): self.set_status(204) - self.set_header("Access-Control-Allow-Origin", "*") + self.add_header("Access-Control-Allow-Origin", self.clients) self.set_header("Access-Control-Allow-Headers", "*") self.set_header("Access-Control-Allow-Methods", ', '.join(self.resource.method)) self.finish() - -class FileHandlerResource(StaticFileHandler): - - @classmethod - def get_absolute_path(cls, root: str, path: str) -> str: - """Returns the absolute location of ``path`` relative to ``root``. - - ``root`` is the path configured for this `StaticFileHandler` - (in most cases the ``static_path`` `Application` setting). - - This class method may be overridden in subclasses. By default - it returns a filesystem path, but other strings may be used - as long as they are unique and understood by the subclass's - overridden `get_content`. - - .. versionadded:: 3.1 + def prepare_arguments(self, + path_arguments : typing.Optional[typing.Dict] = None + ) -> typing.Dict[str, typing.Any]: """ - return root+path - - - -class BaseRequestHandler(RequestHandler): - """ - Defines functions common to Request Handlers - """ - zmq_client_pool : Union[MessageMappedZMQClientPool, None] = None - json_serializer : JSONSerializer - resources : Dict[str, Dict[str, Union[FileServerData, typing.Any]]] - # HTTPServerResourceData, HTTPServerEventData, - own_resources : dict - local_objects : Dict[str, RemoteObject] - - def initialize(self, client_address : str, start_time : float) -> None: - self.client_address = client_address - self.start_time = start_time - - async def handle_client(self) -> None: - pass - - def prepare_arguments(self, path_arguments : typing.Optional[typing.Dict] = None) -> Dict[str, Any]: - arguments = {} - for key, value in self.request.query_arguments.items(): - if len(value) == 1: - arguments[key] = self.json_serializer.loads(value[0]) - else: - arguments[key] = [self.json_serializer.loads(val) for val in value] + merges all arguments to a single JSON body (for example, to provide it to + method execution as parameters) + """ + try: + arguments = self.json_serializer.loads(self.request.arguments) + except JSONDecodeError: + arguments = {} + if len(self.request.query_arguments) >= 1: + for key, value in self.request.query_arguments.items(): + if len(value) == 1: + arguments[key] = self.json_serializer.loads(value[0]) + else: + arguments[key] = [self.json_serializer.loads(val) for val in value] if len(self.request.body) > 0: arguments.update(self.json_serializer.loads(self.request.body)) - if path_arguments is not None: - arguments.update(path_arguments) - print(arguments) return arguments - async def handle_func(self, instruction : Tuple[Callable, bool], arguments): - func, iscoroutine = instruction, instruction.scada_info.iscoroutine - if iscoroutine: - return self.json_serializer.dumps({ - "responseStatusCode" : 200, - "returnValue" : await func(**arguments) - }) - else: - return self.json_serializer.dumps({ - "responseStatusCode" : 200, - "returnValue" : func(**arguments) - }) - - async def handle_bound_method(self, info, arguments): - instance = self.local_objects[info.instance_name] - return self.json_serializer.dumps({ - "responseStatusCode" : 200, - "returnValue" : await EventLoop.execute_once(info.instance_name, instance, - info.instruction, arguments), - "state" : { - info.instance_name : instance.state_machine.current_state if instance.state_machine is not None else None - } - }) - - async def handle_instruction(self, info, path_arguments : typing.Optional[typing.Dict] = None) -> None: - self.set_status(200) - self.add_header("Access-Control-Allow-Origin", self.client_address) - self.set_header("Content-Type" , "application/json") + + async def handle_through_remote_object(self) -> None: try: - arguments = self.prepare_arguments(path_arguments) - context = dict(fetch_execution_logs = arguments.pop('fetch_execution_logs', False)) - timeout = arguments.pop('timeout', 3) - if info.http_request_as_argument: + arguments = self.prepare_arguments() + context = dict(fetch_execution_logs=arguments.pop('fetch_execution_logs', False)) + timeout = arguments.pop('timeout', None) + if self.resource.request_as_argument: arguments['request'] = self.request - if isinstance(info.instruction, FunctionType): - reply = await self.handle_func(info.instruction, arguments) # type: ignore - elif info.instance_name in self.local_objects: - reply = await self.handle_bound_method(info, arguments) - elif self.zmq_client_pool is None: - raise AttributeError("wrong resource finding logic - contact developer.") - else: - # let the body be decoded on the remote object side - reply = await self.zmq_client_pool.async_execute(info.instance_name, info.instruction, arguments, - context, False, timeout) # type: ignore - except Exception as E: - reply = self.json_serializer.dumps({ - "responseStatusCode" : 500, - "exception" : { - "message" : str(E), - "type" : repr(E).split('(', 1)[0], - "traceback" : traceback.format_exc().splitlines(), - "notes" : E.__notes__ if hasattr(E, "__notes__") else None # type: ignore - } - }) + reply = await self.zmq_client_pool.async_execute(self.resource.instance_name, + self.resource.instruction, arguments, + context=context, raise_client_side_exception=True, + server_timeout=timeout, client_timeout=None) # type: ignore + # message mapped client pool currently strips the data part from return message + # and provides that as reply directly + except Exception as ex: + reply = self.json_serializer.dumps(format_exception_as_json(ex)) if reply: self.write(reply) - self.add_header("Execution-Time", f"{((perf_counter()-self.start_time)*1000.0):.4f}") + + + +class EventHandler(RequestHandler): + + def initialize(self, resource : typing.Union[HTTPResource, ServerSentEvent]) -> None: + self.resource = resource + + def set_headers(self) -> None: + self.set_header("Content-Type", "text/event-stream") + self.set_header("Cache-Control", "no-cache") + self.set_header("Connection", "keep-alive") + + async def get(self): + self.set_headers() + await self.handle_datastream() + self.finish() + + def options(self): + self.set_status(204) + self.add_header("Access-Control-Allow-Origin", self.clients) + self.set_header("Access-Control-Allow-Methods", 'GET') self.finish() - async def handled_through_remote_object(self, info : HTTPServerRequest) -> bool: - data = self.resources["STATIC_ROUTES"].get(self.request.path, UnknownHTTPServerData) - if data.what == CALLABLE or data.what == ATTRIBUTE: - # Cannot use 'is' operator like 'if what is CALLABLE' because the remote object from which - # CALLABLE string was fetched is in another process - await self.handle_instruction(data) # type: ignore - return True - if data.what == EVENT: - return False - - for route in self.resources["DYNAMIC_ROUTES"].values(): - arguments = extract_path_parameters(self.request.path, route.path_regex, route.param_convertors) - if arguments and route.what != FILE: - await self.handle_instruction(route, arguments) - return True - return False - async def handled_as_datastream(self, request : HTTPServerRequest) -> bool: - event_info = self.resources["STATIC_ROUTES"].get(self.request.path, UnknownHTTPServerData) - if event_info.what == EVENT: - try: - event_consumer = EventConsumer(request.path, event_info.socket_address, - f"{request.path}_HTTPEvent@"+current_datetime_ms_str()) - except Exception as E: - reply = self.json_serializer.dumps({ - "responseStatusCode" : 500, - "exception" : { - "message" : str(E), - "type" : repr(E).split('(', 1)[0], - "traceback" : traceback.format_exc().splitlines(), - "notes" : E.__notes__ if hasattr(E, "__notes__") else None # type: ignore - } - }) - self.set_status(404) - self.add_header('Access-Control-Allow-Origin', self.client_address) - self.add_header('Content-Type' , 'application/json') - self.write(reply) - self.finish() - return True - - self.set_status(200) - self.set_header('Access-Control-Allow-Origin', self.client_address) - self.set_header("Content-Type", "text/event-stream") - self.set_header("Cache-Control", "no-cache") - self.set_header("Connection", "keep-alive") - # Need to check if this helps as events with HTTP alone is not reliable - # self.set_header("Content-Security-Policy", "connect-src 'self' http://localhost:8085;") + async def handle_datastream(self) -> None: + try: + event_consumer = EventConsumer(self.request.path, self.resource.socket_address, + f"{self.resource.event_name}|HTTPEvent|{current_datetime_ms_str()}") data_header = b'data: %s\n\n' while True: try: data = await event_consumer.receive_event() if data: # already JSON serialized - print(f"data sent") + # print(f"data sent") self.write(data_header % data) await self.flush() except StreamClosedError: break - except Exception as E: - print({ - "responseStatusCode" : 500, - "exception" : { - "message" : str(E), - "type" : repr(E).split('(', 1)[0], - "traceback" : traceback.format_exc().splitlines(), - "notes" : E.__notes__ if hasattr(E, "__notes__") else None # type: ignore - }}) - self.finish() + except Exception as ex: + self.write(data_header % self.json_serializer.dumps( + format_exception_as_json(ex))) event_consumer.exit() - return True - return False - - async def handled_as_imagestream(self, request : HTTPServerRequest) -> bool: - event_info = self.resources["STATIC_ROUTES"].get(self.request.path, UnknownHTTPServerData) - if event_info.what == IMAGE_STREAM: - try: - event_consumer = EventConsumer(request.path, event_info.socket_address, - f"{request.path}_HTTPEvent@"+current_datetime_ms_str()) - except Exception as E: - reply = self.json_serializer.dumps({ - "responseStatusCode" : 500, - "exception" : { - "message" : str(E), - "type" : repr(E).split('(', 1)[0], - "traceback" : traceback.format_exc().splitlines(), - "notes" : E.__notes__ if hasattr(E, "__notes__") else None # type: ignore - } - }) - self.set_status(404) - self.add_header('Access-Control-Allow-Origin', self.client_address) - self.add_header('Content-Type' , 'application/json') - self.write(reply) - self.finish() - return True - - self.set_status(200) - self.set_header('Access-Control-Allow-Origin', self.client_address) + except Exception as ex: + self.write(data_header % self.json_serializer.dumps( + format_exception_as_json(ex))) + + + async def handled_imagestream(self) -> None: + try: self.set_header("Content-Type", "application/x-mpegURL") - self.set_header("Cache-Control", "no-cache") - self.set_header("Connection", "keep-alive") + event_consumer = EventConsumer(self.request.path, self.resource.socket_address, + f"{self.resource.event_name}|HTTPEvent|{current_datetime_ms_str()}") self.write("#EXTM3U\n") - # Need to check if this helps as events with HTTP alone is not reliable delimiter = "#EXTINF:{},\n" data_header = b'data:image/jpeg;base64,%s\n' while True: try: data = await event_consumer.receive_event() if data: - # already JSON serialized + # already serialized self.write(delimiter) self.write(data_header % data) - print(f"image data sent {data[0:100]}") + # print(f"image data sent {data[0:100]}") await self.flush() except StreamClosedError: break - except Exception as E: - print({ - "responseStatusCode" : 500, - "exception" : { - "message" : str(E), - "type" : repr(E).split('(', 1)[0], - "traceback" : traceback.format_exc().splitlines(), - "notes" : E.__notes__ if hasattr(E, "__notes__") else None # type: ignore - }}) - self.finish() + except Exception as ex: + self.write(data_header % self.json_serializer.dumps( + format_exception_as_json(ex))) event_consumer.exit() - return True - return False - - - - -class GetResource(BaseRequestHandler): - - async def handled_as_filestream(self, request : HTTPServerRequest) -> bool: - """this method is wrong and does not work""" - for route in self.resources["DYNAMIC_ROUTES"].values(): - arguments = extract_path_parameters(self.request.path, route.path_regex, route.param_convertors) - if arguments and route.what == FILE: - file_handler = FileHandler(self.application, request, path=route.directory) - # file_handler.initialize(data.directory) # type: ignore - await file_handler.get(arguments["filename"]) - return True - return False - - async def get(self): - # log_request(self.request) - if (await self.handled_through_remote_object(self.request)): - return - - elif (await self.handled_as_datastream(self.request)): - return - - elif (await self.handled_as_imagestream(self.request)): - return - - elif self.request.path in self.own_resources: - func = self.own_resources[self.request.path] - body = self.prepare_arguments() - func(self, body) - return - - self.set_status(404) - self.add_header("Access-Control-Allow-Origin", self.client_address) - self.finish() - - def paths(self, body): - self.set_status(200) - self.add_header("Access-Control-Allow-Origin", self.client_address) - self.add_header("Content-Type" , "application/json") - resources = dict( - GET = { - "STATIC_ROUTES" : GetResource.resources["STATIC_ROUTES"].keys(), - "DYNAMIC_ROUTES" : GetResource.resources["DYNAMIC_ROUTES"].keys() - }, - POST = { - "STATIC_ROUTES" : PostResource.resources["STATIC_ROUTES"].keys(), - "DYNAMIC_ROUTES" : PostResource.resources["DYNAMIC_ROUTES"].keys() - }, - PUT = { - "STATIC_ROUTES" : PutResource.resources["STATIC_ROUTES"].keys(), - "DYNAMIC_ROUTES" : PutResource.resources["DYNAMIC_ROUTES"].keys() - }, - DELETE = { - "STATIC_ROUTES" : DeleteResource.resources["STATIC_ROUTES"].keys(), - "DYNAMIC_ROUTES" : DeleteResource.resources["DYNAMIC_ROUTES"].keys() - }, - OPTIONS = { - "STATIC_ROUTES" : OptionsResource.resources["STATIC_ROUTES"].keys(), - "DYNAMIC_ROUTES" : OptionsResource.resources["DYNAMIC_ROUTES"].keys() - }, - ) - self.write(self.json_serializer.dumps(resources)) - self.finish() - - own_resources = { - '/paths' : paths, - } - - - -class PostResource(BaseRequestHandler): + except Exception as ex: + self.write(data_header % self.json_serializer.dumps( + format_exception_as_json(ex))) - async def post(self): - # log_request(self.request) - if (await self.handled_through_remote_object(self.request)): - return - - # elif self.request.path in self.own_resources: - # func = self.own_resources[self.request.path] - # body = self.decode_body(self.request.body) - # func(self, body) - # return - self.set_status(404) - self.add_header("Access-Control-Allow-Origin", self.client_address) - self.finish() - - -class PutResource(BaseRequestHandler): - - async def put(self): - - if (await self.handled_through_remote_object(self.request)): - return - - self.set_status(404) - self.add_header("Access-Control-Allow-Origin", self.client_address) - self.finish() - - -class PatchResource(BaseRequestHandler): - - async def patch(self): - - if (await self.handled_through_remote_object(self.request)): - return - - self.set_status(404) - self.add_header("Access-Control-Allow-Origin", self.client_address) - self.finish() - - - -class DeleteResource(BaseRequestHandler): - - async def delete(self): - - if (await self.handled_through_remote_object(self.request)): - return - - self.set_status(404) - self.add_header("Access-Control-Allow-Origin", self.client_address) - self.finish() +class FileHandler(StaticFileHandler): + @classmethod + def get_absolute_path(cls, root: str, path: str) -> str: + """ + Returns the absolute location of ``path`` relative to ``root``. + ``root`` is the path configured for this `StaticFileHandler` + (in most cases the ``static_path`` `Application` setting). -class OptionsResource(BaseRequestHandler): - """ - this is wrong philosophically - """ + This class method may be overridden in subclasses. By default + it returns a filesystem path, but other strings may be used + as long as they are unique and understood by the subclass's + overridden `get_content`. - async def options(self): - self.set_status(204) - self.set_header("Access-Control-Allow-Origin", "*") - self.set_header("Access-Control-Allow-Headers", "*") - self.set_header("Access-Control-Allow-Methods", 'GET, POST, PUT, DELETE, OPTIONS') - self.finish() + .. versionadded:: 3.1 + """ + return root+path \ No newline at end of file From e35b81f1d84b1d9b2e3e4476ed22e43c03af1be3 Mon Sep 17 00:00:00 2001 From: "Vignesh.Vaidyanathan" Date: Mon, 26 Feb 2024 07:53:29 +0100 Subject: [PATCH 036/167] new docs - unordered --- doc/source/autodoc/index.rst | 3 +++ doc/source/autodoc/server/decorators.rst | 6 ++++++ doc/source/autodoc/server/eventloop.rst | 6 ++++++ doc/source/autodoc/server/http_server.rst | 6 ++++++ doc/source/autodoc/server/remote_object.rst | 14 ++++++++++++++ .../autodoc/server/remote_parameter/boolean.rst | 10 ++++++++++ .../server/remote_parameter/class_selector.rst | 7 +++++++ .../autodoc/server/remote_parameter/index.rst | 16 +++++++++++++++- .../autodoc/server/remote_parameter/integer.rst | 7 +++++++ .../autodoc/server/remote_parameter/list.rst | 7 +++++++ .../autodoc/server/remote_parameter/number.rst | 7 +++++++ .../autodoc/server/remote_parameter/selector.rst | 7 +++++++ .../autodoc/server/remote_parameter/string.rst | 7 +++++++ .../autodoc/server/remote_parameter/tuple.rst | 7 +++++++ doc/source/conf.py | 4 ++++ 15 files changed, 113 insertions(+), 1 deletion(-) create mode 100644 doc/source/autodoc/server/decorators.rst create mode 100644 doc/source/autodoc/server/eventloop.rst create mode 100644 doc/source/autodoc/server/http_server.rst create mode 100644 doc/source/autodoc/server/remote_object.rst create mode 100644 doc/source/autodoc/server/remote_parameter/boolean.rst create mode 100644 doc/source/autodoc/server/remote_parameter/class_selector.rst create mode 100644 doc/source/autodoc/server/remote_parameter/integer.rst create mode 100644 doc/source/autodoc/server/remote_parameter/list.rst create mode 100644 doc/source/autodoc/server/remote_parameter/number.rst create mode 100644 doc/source/autodoc/server/remote_parameter/selector.rst create mode 100644 doc/source/autodoc/server/remote_parameter/string.rst create mode 100644 doc/source/autodoc/server/remote_parameter/tuple.rst diff --git a/doc/source/autodoc/index.rst b/doc/source/autodoc/index.rst index 9c829c7..e169486 100644 --- a/doc/source/autodoc/index.rst +++ b/doc/source/autodoc/index.rst @@ -13,6 +13,9 @@ hololinked.server .. toctree:: :maxdepth: 1 + server/remote_object + server/eventloop + server/http_server server/remote_parameter/index server/zmq_message_brokers/index server/data_classes/index diff --git a/doc/source/autodoc/server/decorators.rst b/doc/source/autodoc/server/decorators.rst new file mode 100644 index 0000000..c4f65f5 --- /dev/null +++ b/doc/source/autodoc/server/decorators.rst @@ -0,0 +1,6 @@ +decorators +========== + +.. autoclass:: hololinked.server.decorators + :members: + :show-inheritance: \ No newline at end of file diff --git a/doc/source/autodoc/server/eventloop.rst b/doc/source/autodoc/server/eventloop.rst new file mode 100644 index 0000000..9085d14 --- /dev/null +++ b/doc/source/autodoc/server/eventloop.rst @@ -0,0 +1,6 @@ +EventLoop +========= + +.. autoclass:: hololinked.server.eventloop.EventLoop + :members: + :show-inheritance: diff --git a/doc/source/autodoc/server/http_server.rst b/doc/source/autodoc/server/http_server.rst new file mode 100644 index 0000000..59e4f57 --- /dev/null +++ b/doc/source/autodoc/server/http_server.rst @@ -0,0 +1,6 @@ +HTTPServer +========== + +.. autoclass:: hololinked.server.HTTPServer.HTTPServer + :members: + :show-inheritance: \ No newline at end of file diff --git a/doc/source/autodoc/server/remote_object.rst b/doc/source/autodoc/server/remote_object.rst new file mode 100644 index 0000000..6cea058 --- /dev/null +++ b/doc/source/autodoc/server/remote_object.rst @@ -0,0 +1,14 @@ +Remote Object +============= + +.. autoclass:: hololinked.server.remote_object.RemoteObject + :members: + :show-inheritance: + +.. autoclass:: hololinked.server.remote_object.RemoteSubobject + :members: + :show-inheritance: + +.. autoclass:: hololinked.server.remote_object.StateMachine + :members: + :show-inheritance: diff --git a/doc/source/autodoc/server/remote_parameter/boolean.rst b/doc/source/autodoc/server/remote_parameter/boolean.rst new file mode 100644 index 0000000..bdc4c4e --- /dev/null +++ b/doc/source/autodoc/server/remote_parameter/boolean.rst @@ -0,0 +1,10 @@ +Boolean +======= + +.. toctree:: + :hidden: + :maxdepth: 1 + +.. autoclass:: hololinked.server.remote_parameters.Boolean + :members: + :show-inheritance: \ No newline at end of file diff --git a/doc/source/autodoc/server/remote_parameter/class_selector.rst b/doc/source/autodoc/server/remote_parameter/class_selector.rst new file mode 100644 index 0000000..fbd1892 --- /dev/null +++ b/doc/source/autodoc/server/remote_parameter/class_selector.rst @@ -0,0 +1,7 @@ +.. toctree:: + :hidden: + :maxdepth: 1 + +.. autoclass:: hololinked.server.remote_parameters.ClassSelector + :members: + :show-inheritance: \ No newline at end of file diff --git a/doc/source/autodoc/server/remote_parameter/index.rst b/doc/source/autodoc/server/remote_parameter/index.rst index 780d239..5d019d6 100644 --- a/doc/source/autodoc/server/remote_parameter/index.rst +++ b/doc/source/autodoc/server/remote_parameter/index.rst @@ -2,11 +2,25 @@ Remote Parameters ================= .. toctree:: + :hidden: :maxdepth: 1 + + string + number + integer + selector + class_selector + boolean + tuple + list RemoteParameter --------------- .. autoclass:: hololinked.server.remote_parameter.RemoteParameter - :show-inheritance: \ No newline at end of file + :members: + :show-inheritance: + + + diff --git a/doc/source/autodoc/server/remote_parameter/integer.rst b/doc/source/autodoc/server/remote_parameter/integer.rst new file mode 100644 index 0000000..6e87334 --- /dev/null +++ b/doc/source/autodoc/server/remote_parameter/integer.rst @@ -0,0 +1,7 @@ +.. toctree:: + :hidden: + :maxdepth: 1 + +.. autoclass:: hololinked.server.remote_parameters.Integer + :members: + :show-inheritance: \ No newline at end of file diff --git a/doc/source/autodoc/server/remote_parameter/list.rst b/doc/source/autodoc/server/remote_parameter/list.rst new file mode 100644 index 0000000..d5f0266 --- /dev/null +++ b/doc/source/autodoc/server/remote_parameter/list.rst @@ -0,0 +1,7 @@ +.. toctree:: + :hidden: + :maxdepth: 1 + +.. autoclass:: hololinked.server.remote_parameters.List + :members: + :show-inheritance: \ No newline at end of file diff --git a/doc/source/autodoc/server/remote_parameter/number.rst b/doc/source/autodoc/server/remote_parameter/number.rst new file mode 100644 index 0000000..cac2a71 --- /dev/null +++ b/doc/source/autodoc/server/remote_parameter/number.rst @@ -0,0 +1,7 @@ +.. toctree:: + :hidden: + :maxdepth: 1 + +.. autoclass:: hololinked.server.remote_parameters.Number + :members: + :show-inheritance: \ No newline at end of file diff --git a/doc/source/autodoc/server/remote_parameter/selector.rst b/doc/source/autodoc/server/remote_parameter/selector.rst new file mode 100644 index 0000000..3f73ca8 --- /dev/null +++ b/doc/source/autodoc/server/remote_parameter/selector.rst @@ -0,0 +1,7 @@ +.. toctree:: + :hidden: + :maxdepth: 1 + +.. autoclass:: hololinked.server.remote_parameters.Selector + :members: + :show-inheritance: \ No newline at end of file diff --git a/doc/source/autodoc/server/remote_parameter/string.rst b/doc/source/autodoc/server/remote_parameter/string.rst new file mode 100644 index 0000000..6efe82b --- /dev/null +++ b/doc/source/autodoc/server/remote_parameter/string.rst @@ -0,0 +1,7 @@ +.. toctree:: + :hidden: + :maxdepth: 1 + +.. autoclass:: hololinked.server.remote_parameters.String + :members: + :show-inheritance: diff --git a/doc/source/autodoc/server/remote_parameter/tuple.rst b/doc/source/autodoc/server/remote_parameter/tuple.rst new file mode 100644 index 0000000..407d50b --- /dev/null +++ b/doc/source/autodoc/server/remote_parameter/tuple.rst @@ -0,0 +1,7 @@ +.. toctree:: + :hidden: + :maxdepth: 1 + +.. autoclass:: hololinked.server.remote_parameters.Tuple + :members: + :show-inheritance: \ No newline at end of file diff --git a/doc/source/conf.py b/doc/source/conf.py index 08cf640..f5c299c 100644 --- a/doc/source/conf.py +++ b/doc/source/conf.py @@ -57,6 +57,10 @@ "**": ["sidebar-nav-bs"] } +html_theme_options = { + "secondary_sidebar_items": ["page-toc", "sourcelink"] +} + pygments_style = 'vs' # Add any paths that contain custom static files (such as style sheets) here, From 6f088dbf9479914b7491293079ac21216a52fc95 Mon Sep 17 00:00:00 2001 From: "Vignesh.Vaidyanathan" Date: Mon, 26 Feb 2024 07:54:07 +0100 Subject: [PATCH 037/167] web request handler moved to tornado interal routing --- hololinked/server/HTTPServer.py | 126 +++----- hololinked/server/handlers.py | 66 ++-- hololinked/server/proxy_client.py | 391 ----------------------- hololinked/server/remote_object.py | 9 +- hololinked/server/utils.py | 15 +- hololinked/server/webserver_utils.py | 14 +- hololinked/server/zmq_message_brokers.py | 2 +- 7 files changed, 108 insertions(+), 515 deletions(-) diff --git a/hololinked/server/HTTPServer.py b/hololinked/server/HTTPServer.py index c3b8dd9..c038848 100644 --- a/hololinked/server/HTTPServer.py +++ b/hololinked/server/HTTPServer.py @@ -9,23 +9,25 @@ from ..param import Parameterized from ..param.parameters import (Integer, IPAddress, ClassSelector, Selector, TypedList, String) -from .data_classes import HTTPResource -from .utils import create_default_logger, run_method_somehow +from .utils import create_default_logger, run_coro_sync, run_method_somehow from .serializers import JSONSerializer from .constants import Instructions from .webserver_utils import log_request, update_resources from .zmq_message_brokers import MessageMappedZMQClientPool -from .handlers import RPCHandler +from .handlers import RPCHandler, BaseHandler, EventHandler from .remote_object import RemoteObject, RemoteObjectDB class HTTPServer(Parameterized): + """ + HTTP(s) server to route requests to ``RemoteObject`` + """ address = IPAddress(default='0.0.0.0', - doc = "set custom IP address, default is localhost (0.0.0.0)") # type: str + doc="set custom IP address, default is localhost (0.0.0.0)") # type: str port = Integer(default=8080, bounds=(1, 65535), - doc = "the port at which the server should be run (unique)" ) # ytype: int + doc="the port at which the server should be run (unique)" ) # ytype: int protocol_version = Selector(objects=[1, 1.1, 2], default=2, doc="for HTTP 2, SSL is mandatory. HTTP2 is recommended. \ When no SSL configurations are provided, defaults to 1.1" ) # type: float @@ -49,11 +51,15 @@ class HTTPServer(Parameterized): doc="alternative to SSL context, provide certificate file & key file to allow the server \ to create a SSL connection on its own") # type: str network_interface = String(default='Ethernet', - doc="Currently there is no logic to detect the IP addresss (as externally visible) correctly, \ - therefore please send the network interface name to retrieve the IP. If a DNS server is present, \ - you may leave this field" ) # type: str + doc="Currently there is no logic to detect the IP addresss (as externally visible) correctly, \ + therefore please send the network interface name to retrieve the IP. If a DNS server is present, \ + you may leave this field" ) # type: str request_handler = ClassSelector(default=RPCHandler, class_=RPCHandler, isinstance=False, - doc="custom web request handler of your choice" ) # type: RPCHandler + doc="custom web request handler of your choice" ) # type: RPCHandler + event_handler = ClassSelector(default=EventHandler, class_=(EventHandler, BaseHandler), isinstance=False, + doc="custom event handler of your choice for handling events") # type: typing.Union[BaseHandler, EventHandler] + allowed_clients = TypedList(item_type=str, + doc="short attribute for setting client in CORS, overload set_headers() to implement custom CORS") def __init__(self, remote_objects : typing.List[str], *, port : int = 8080, address : str = '0.0.0.0', host : str = None, logger : typing.Optional[logging.Logger] = None, log_level : int = logging.INFO, @@ -76,6 +82,7 @@ def __init__(self, remote_objects : typing.List[str], *, port : int = 8080, addr request_handler=request_handler ) + @property def all_ok(self) -> bool: self._IP = f"{self.address}:{self.port}" @@ -83,9 +90,14 @@ def all_ok(self) -> bool: self.logger = create_default_logger('{}|{}'.format(self.__class__.__name__, f"{self.address}:{self.port}"), self.log_level) + self.zmq_client_pool = MessageMappedZMQClientPool(self.remote_objects, self._IP, json_serializer=self.serializer) - + BaseHandler.zmq_client_pool = self.zmq_client_pool + BaseHandler.json_serializer = self.serializer + BaseHandler.logger = self.logger + BaseHandler.clients = ', '.join(self.allowed_clients) + self.resources = dict( FILE_SERVER = dict(STATIC_ROUTES = dict(), DYNAMIC_ROUTES = dict()), GET = dict(STATIC_ROUTES = dict(), DYNAMIC_ROUTES = dict()), @@ -94,18 +106,10 @@ def all_ok(self) -> bool: DELETE = dict(STATIC_ROUTES = dict(), DYNAMIC_ROUTES = dict()), OPTIONS = dict(STATIC_ROUTES = dict(), DYNAMIC_ROUTES = dict()) ) - return True - - def listen(self) -> None: - assert self.all_ok, 'HTTPServer all is not ok before starting' - # Will always be True or cause some other exception - run_method_somehow(self._fetch_remote_object_resources()) - self.request_handler.zmq_client_pool = self.zmq_client_pool - self.event_loop = ioloop.IOLoop.current() - handlers = [] + self.handlers = [] for route, http_resource in self.resources["GET"]["STATIC_ROUTES"].items(): - handlers.append((route, self.request_handler, {'resource' : http_resource})) + self.handlers.append((route, self.request_handler, {'resource' : http_resource})) """ for handler based tornado rule matcher, the Rule object has following signature @@ -129,21 +133,30 @@ def __init__( so we give (path, RPCHandler, {'resource' : HTTPResource}) path is extracted from remote_method(URL_path='....') - RPCHandler is the base handler of this package + RPCHandler is the base handler of this package for RPC purposes resource goes into target kwargs as the HTTPResource generated by remote_method and RemoteParamater contains all the info given to make RPCHandler work """ - self.app = Application(handlers=handlers) - # self.app.remote_object_http_resources = self.resources["GET"]["STATIC_ROUTES"] - # self.router=(app=self.app) + return True + + + def listen(self) -> None: + assert self.all_ok, 'HTTPServer all is not ok before starting' + # Will always be True or cause some other exception + run_method_somehow(self._fetch_remote_object_resources()) + + self.event_loop = ioloop.IOLoop.current() + # self.event_loop.add_future(self._fetch_remote_object_resources()) + + self.app = Application(handlers=self.handlers) if self.protocol_version == 2: raise NotImplementedError("Current HTTP2 is not implemented.") self.server = TornadoHTTP2Server(router, ssl_options=self.ssl_context) else: self.server = TornadoHTTP1Server(self.app, ssl_options=self.ssl_context) self.server.listen(port=self.port, address=self.address) - self.logger.info(f'started webserver at {self.address}:{self.port}, ready to receive requests.') + self.logger.info(f'started webserver at {self._IP}, ready to receive requests.') self.event_loop.start() @@ -157,67 +170,12 @@ async def _fetch_remote_object_resources(self): # _, _, _, _, _, reply = await client.read_attribute('/'+client.server_instance_name + '/object-info', raise_client_side_exception = True) # remote_object_info.append(RemoteObjectDB.RemoteObjectInfo(**reply["returnValue"])) # Should raise an exception if returnValue key is not found for some reason. - - def stop(self) -> None: - raise NotImplementedError("closing HTTP server currently not supported.") - self.server.close_all_connections() - self.event_loop.close() - + def stop(self) -> None: + self.server.stop() + run_coro_sync(self.server.close_all_connections()) + self.event_loop.close() -# async def _setup_server(address : str, port : int, logger : logging.Logger, subscription : str, -# consumers : List[Union[Consumer, RemoteObject, str]], resources : Dict[str, Dict[str, Any]], -# ssl_context : ssl.SSLContext, json_serializer : JSONSerializer, version : float = 2) -> None: -# IP = "{}:{}".format(address, port) -# instance_names = [] -# server_remote_objects = {} -# remote_object_info = [] -# if consumers is not None: -# for consumer in consumers: -# if isinstance(consumer, RemoteObject): -# server_remote_objects[consumer.instance_name] = consumer -# update_resources(resources, consumer.httpserver_resources) -# remote_object_info.append(consumer.object_info) -# elif isinstance(consumer, Consumer): -# instance = consumer.consumer(*consumer.args, **consumer.kwargs) -# server_remote_objects[instance.instance_name] = instance -# update_resources(resources, instance.httpserver_resources) -# remote_object_info.append(instance.object_info) -# else: -# instance_names.append(consumer) - -# zmq_client_pool = MessageMappedZMQClientPool(instance_names, IP, json_serializer = json_serializer) -# for client in zmq_client_pool: -# await client.handshake_complete() -# _, _, _, _, _, reply = await client.read_attribute('/'+client.server_instance_name + '/resources/http', raise_client_side_exception = True) -# update_resources(resources, reply["returnValue"]) # type: ignore -# _, _, _, _, _, reply = await client.read_attribute('/'+client.server_instance_name + '/object-info', raise_client_side_exception = True) -# remote_object_info.append(RemoteObjectDB.RemoteObjectInfo(**reply["returnValue"])) # Should raise an exception if returnValue key is not found for some reason. - -# for RO in server_remote_objects.values(): -# if isinstance(RO, HTTPServerUtilities): -# RO.zmq_client_pool = zmq_client_pool -# RO.remote_object_info = remote_object_info -# RO._httpserver_resources = resources -# if subscription: -# await RO.subscribe_to_host(subscription, port) -# break - -# BaseRequestHandler.zmq_client_pool = zmq_client_pool -# BaseRequestHandler.json_serializer = zmq_client_pool.json_serializer -# BaseRequestHandler.local_objects = server_remote_objects -# GetResource.resources = resources.get(GET, dict()) -# PostResource.resources = resources.get(POST, dict()) -# PutResource.resources = resources.get(PUT, dict()) -# DeleteResource.resources = resources.get(DELETE, dict()) -# OptionsResource.resources = resources.get(OPTIONS, dict()) -# # log_resources(logger, resources) -# Router = CustomRouter(Application(), logger, IP, resources.get('FILE_SERVER')) -# # if version == 2: -# # S = TornadoHTTP2Server(Router, ssl_options=ssl_context) -# # else: -# S = TornadoHTTP1Server(Router, ssl_options=ssl_context) -# S.listen(port=port, address=address) __all__ = ['HTTPServer'] \ No newline at end of file diff --git a/hololinked/server/handlers.py b/hololinked/server/handlers.py index e1f540b..1b3d4fd 100644 --- a/hololinked/server/handlers.py +++ b/hololinked/server/handlers.py @@ -1,9 +1,9 @@ # routing ideas from https://www.tornadoweb.org/en/branch6.3/routing.html import typing +import logging from json import JSONDecodeError from tornado.web import RequestHandler, StaticFileHandler from tornado.iostream import StreamClosedError -from time import perf_counter from .serializers import JSONSerializer from .zmq_message_brokers import MessageMappedZMQClientPool, EventConsumer @@ -13,15 +13,43 @@ -class RPCHandler(RequestHandler): +class BaseHandler(RequestHandler): zmq_client_pool : MessageMappedZMQClientPool json_serializer : JSONSerializer clients : str + logger : logging.Logger def initialize(self, resource : typing.Union[HTTPResource, ServerSentEvent]) -> None: self.resource = resource + def set_headers(self): + raise NotImplementedError("implement set headers in child class to call it", + " before directing the request to RemoteObject") + + def prepare_arguments(self) -> typing.Dict[str, typing.Any]: + """ + merges all arguments to a single JSON body (for example, to provide it to + method execution as parameters) + """ + try: + arguments = self.json_serializer.loads(self.request.arguments) + except JSONDecodeError: + arguments = {} + if len(self.request.query_arguments) >= 1: + for key, value in self.request.query_arguments.items(): + if len(value) == 1: + arguments[key] = self.json_serializer.loads(value[0]) + else: + arguments[key] = [self.json_serializer.loads(val) for val in value] + if len(self.request.body) > 0: + arguments.update(self.json_serializer.loads(self.request.body)) + return arguments + + + +class RPCHandler(BaseHandler): + def set_headers(self): self.set_status(200) self.set_header("Content-Type" , "application/json") @@ -75,28 +103,6 @@ async def options(self): self.finish() - def prepare_arguments(self, - path_arguments : typing.Optional[typing.Dict] = None - ) -> typing.Dict[str, typing.Any]: - """ - merges all arguments to a single JSON body (for example, to provide it to - method execution as parameters) - """ - try: - arguments = self.json_serializer.loads(self.request.arguments) - except JSONDecodeError: - arguments = {} - if len(self.request.query_arguments) >= 1: - for key, value in self.request.query_arguments.items(): - if len(value) == 1: - arguments[key] = self.json_serializer.loads(value[0]) - else: - arguments[key] = [self.json_serializer.loads(val) for val in value] - if len(self.request.body) > 0: - arguments.update(self.json_serializer.loads(self.request.body)) - return arguments - - async def handle_through_remote_object(self) -> None: try: arguments = self.prepare_arguments() @@ -117,7 +123,7 @@ async def handle_through_remote_object(self) -> None: -class EventHandler(RequestHandler): +class EventHandler(BaseHandler): def initialize(self, resource : typing.Union[HTTPResource, ServerSentEvent]) -> None: self.resource = resource @@ -132,13 +138,13 @@ async def get(self): await self.handle_datastream() self.finish() - def options(self): + async def options(self): self.set_status(204) - self.add_header("Access-Control-Allow-Origin", self.clients) + self.set_header("Access-Control-Allow-Origin", self.clients) self.set_header("Access-Control-Allow-Methods", 'GET') self.finish() - + async def handle_datastream(self) -> None: try: event_consumer = EventConsumer(self.request.path, self.resource.socket_address, @@ -149,9 +155,9 @@ async def handle_datastream(self) -> None: data = await event_consumer.receive_event() if data: # already JSON serialized - # print(f"data sent") self.write(data_header % data) await self.flush() + self.logger.debug(f"new data sent - {self.resource.event_name}") except StreamClosedError: break except Exception as ex: @@ -178,8 +184,8 @@ async def handled_imagestream(self) -> None: # already serialized self.write(delimiter) self.write(data_header % data) - # print(f"image data sent {data[0:100]}") await self.flush() + self.logger.debug(f"new image sent - {self.resource.event_name}") except StreamClosedError: break except Exception as ex: diff --git a/hololinked/server/proxy_client.py b/hololinked/server/proxy_client.py index b9ca3d8..e69de29 100644 --- a/hololinked/server/proxy_client.py +++ b/hololinked/server/proxy_client.py @@ -1,391 +0,0 @@ -import threading -import asyncio -import typing -import logging -from typing import Any - -from .data_classes import RPCResource -from .zmq_message_brokers import SyncZMQClient, EventConsumer, PROXY -from .utils import current_datetime_ms_str -from .constants import (SERIALIZABLE_WRAPPER_ASSIGNMENTS, Instructions, ServerMessage, ServerMessageData, ResourceType) - -from .zmq_message_brokers import (CM_INDEX_ADDRESS, CM_INDEX_ARGUMENTS, CM_INDEX_CLIENT_TYPE, CM_INDEX_EXECUTION_CONTEXT, - CM_INDEX_INSTRUCTION, CM_INDEX_MESSAGE_ID, CM_INDEX_MESSAGE_TYPE, CM_INDEX_TIMEOUT) -from .zmq_message_brokers import (SM_INDEX_ADDRESS, SM_INDEX_DATA, SM_INDEX_MESSAGE_ID, SM_INDEX_MESSAGE_TYPE, - SM_INDEX_SERVER_TYPE) - -class ObjectProxy: - - _own_attrs = frozenset([ - '_client', 'identity', '__annotations__', - 'instance_name', 'logger', 'timeout', '_timeout', - ]) - - def __init__(self, instance_name : str, timeout : float = 5, load_remote_object = True, protocol : str = 'TCP', **kwargs) -> None: - self.instance_name = instance_name - self.timeout = timeout - self.identity = instance_name + current_datetime_ms_str() - self.logger = logging.Logger(self.identity) - # compose ZMQ client in Proxy client so that all sending and receiving is - # done by the ZMQ client and not by the Proxy client directly. Proxy client only - # bothers mainly about __setattr__ and _getattr__ - self._client = SyncZMQClient(instance_name, self.identity, client_type=PROXY, protocol=protocol, **kwargs) - if load_remote_object: - self.load_remote_object() - - def __del__(self): - self._client.exit() - - def __getattribute__(self, __name: str) -> Any: - obj = super().__getattribute__(__name) - if isinstance(obj, _RemoteParameter): - return obj.get() - return obj - - def __setattr__(self, __name : str, __value : typing.Any): - if __name in ObjectProxy._own_attrs or (__name not in self.__dict__ and isinstance(__value, __allowed_attribute_types__)): - print(f"setting {__name}") - return super(ObjectProxy, self).__setattr__(__name, __value) - elif __name in self.__dict__: - obj = self.__dict__[__name] - if isinstance(obj, _RemoteParameter): - obj.set(value=__value) - return - raise AttributeError(f"Cannot reset attribute {__name} again to ObjectProxy for {self.instance_name}.") - raise AttributeError(f"Cannot set foreign attribute {__name} to ObjectProxy for {self.instance_name}. Given attribute not found in RemoteObject.") - - def __repr__(self): - return f'ObjectProxy {self.instance_name}' - - def __enter__(self): - raise NotImplementedError("with statement is not completely implemented yet. Avoid.") - return self - - def __exit__(self, exc_type, exc_value, traceback): - raise NotImplementedError("with statement is not completely implemented yet. Avoid.") - - def __bool__(self): return True - - def __eq__(self, other): - if other is self: - return True - return isinstance(other, ObjectProxy) and other.instance_name == self.instance_name - - def __ne__(self, other): - if other and isinstance(other, ObjectProxy): - return other.instance_name != self.instance_name - return True - - def __hash__(self): - return hash(self.identity) - - @property - def timeout(self) -> typing.Union[float, int]: - return self._timeout - - @timeout.setter - def timeout(self, value : typing.Union[float, int]): - if not isinstance(value, (float, int, type(None))): - raise TypeError(f"Timeout can only be float or int greater than 0, or None. Given type {type(value)}.") - elif value is not None and value < 0: - raise ValueError("Timeout must be at least 0 or None, not negative.") - self._timeout = value - - timeout.__doc__ = """Timeout in seconds on server side for execution of method. Defaults to 5 seconds and - network times not considered.""" - - def invoke(self, method : str, oneway : bool = False, **kwargs) -> typing.Any: - method : _RemoteMethod = getattr(self, method, None) - if not method: - raise AttributeError(f"No remote method named {method}") - if oneway: - method.oneway(**kwargs) - else: - return method(**kwargs) - - async def async_invoke(self, method : str, **kwargs): - method = getattr(self, method, None) # type: _RemoteMethod - if not method: - raise AttributeError(f"No remote method named {method}") - return await method.async_call(**kwargs) - - def set_parameter(self, parameter : str, value : typing.Any, oneway : bool) -> None: - parameter : _RemoteParameter = getattr(self, parameter, None) - if not parameter: - raise AttributeError(f"No remote parameter named {parameter}") - if oneway: - parameter.oneway(value) - else: - parameter.set(value) - - async def async_set_parameters(self, oneway : bool = False, noblock : bool = False, **parameters): - pass - - def subscribe_event(self, event_name : str, callback : typing.Callable): - pass - - def unsubscribe_event(self, event_name : str): - pass - - # def __getstate__(self): - # # make sure a tuple of just primitive types are used to allow for proper serialization - # return str(self._pyroUri), tuple(self._pyroOneway), tuple(self._pyroMethods), \ - # tuple(self._pyroAttrs), self._pyroHandshake, self._pyroSerializer - - # def __setstate__(self, state): - # self._pyroUri = core.URI(state[0]) - # self._pyroOneway = set(state[1]) - # self._pyroMethods = set(state[2]) - # self._pyroAttrs = set(state[3]) - # self._pyroHandshake = state[4] - # self._pyroSerializer = state[5] - # self.__pyroTimeout = config.COMMTIMEOUT - # self._pyroMaxRetries = config.MAX_RETRIES - # self._pyroConnection = None - # self._pyroLocalSocket = None - # self._pyroSeq = 0 - # self._pyroRawWireResponse = False - # self.__pyroOwnerThread = get_ident() - - # def __copy__(self): - # p = object.__new__(type(self)) - # p.__setstate__(self.__getstate__()) - # p._pyroTimeout = self._pyroTimeout - # p._pyroRawWireResponse = self._pyroRawWireResponse - # p._pyroMaxRetries = self._pyroMaxRetries - # return p - - - # def __dir__(self): - # result = dir(self.__class__) + list(self.__dict__.keys()) - # return sorted(set(result) | self._pyroMethods | self._pyroAttrs) - - # # When special methods are invoked via special syntax (e.g. obj[index] calls - # # obj.__getitem__(index)), the special methods are not looked up via __getattr__ - # # for efficiency reasons; instead, their presence is checked directly. - # # Thus we need to define them here to force (remote) lookup through __getitem__. - - # def __len__(self): return self.__getattr__('__len__')() - # def __getitem__(self, index): return self.__getattr__('__getitem__')(index) - # def __setitem__(self, index, val): return self.__getattr__('__setitem__')(index, val) - # def __delitem__(self, index): return self.__getattr__('__delitem__')(index) - - # def __iter__(self): - # try: - # # use remote iterator if it exists - # yield from self.__getattr__('__iter__')() - # except AttributeError: - # # fallback to indexed based iteration - # try: - # yield from (self[index] for index in range(sys.maxsize)) - # except (StopIteration, IndexError): - # return - - - def load_remote_object(self): - """ - Get metadata from server (methods, parameters...) and remember them in some attributes of the proxy. - Usually this will already be known due to the default behavior of the connect handshake, where the - connect response also includes the metadata. - """ - fetch = _RemoteMethod(self._client, f'/{self.instance_name}{Instructions.RPC_RESOURCES}', - self._timeout) # type: _RemoteMethod - reply = fetch()[ServerMessage.DATA][ServerMessageData.RETURN_VALUE] # type: typing.Dict[str, typing.Dict[str, typing.Any]] - - for name, data in reply.items(): - if isinstance(data, dict): - data = RPCResource(**data) - elif not isinstance(data, RPCResource): - raise RuntimeError("Logic error - desieralized info about server not instance of ProxyResourceData") - if data.what == ResourceType.CALLABLE: - _add_method(self, _RemoteMethod(self._client, data.instruction, self.timeout), data) - elif data.what == ResourceType.PARAMETER: - _add_parameter(self, _RemoteParameter(self._client, data.instruction, self.timeout), data) - elif data.what == ResourceType.EVENT: - pass - - # def _pyroInvokeBatch(self, calls, oneway=False): - # flags = protocol.FLAGS_BATCH - # if oneway: - # flags |= protocol.FLAGS_ONEWAY - # return self._pyroInvoke("", calls, None, flags) - - - -class _RemoteMethod: - """method call abstraction""" - - def __init__(self, client : SyncZMQClient, instruction : str, timeout : typing.Optional[float] = None) -> None: - self._client = client - self._instruction = instruction - self._timeout = timeout - self._loop = asyncio.get_event_loop() - - @property # i.e. cannot have setter - def last_return_value(self): - return self._last_return_value - - def oneway(self, *args, **kwargs) -> None: - kwargs["__args__"] = args - self._client.send_instruction(self._instruction, kwargs, self._timeout) - - def __call__(self, *args, **kwargs) -> typing.Any: - kwargs["__args__"] = args - self._last_return_value = self._client.execute(self._instruction, - kwargs, raise_client_side_exception=True) - return self._last_return_value - - - - -class _RemoteParameter: - """parameter set & get abstraction""" - - def __init__(self, client : SyncZMQClient, instruction : str, - timeout : typing.Optional[float] = None) -> None: - self._client = client - self._timeout = timeout - self._read_instruction = instruction + '/read' - self._write_instruction = instruction + '/write' - - def __del__(self): - self._client = None - - @property # i.e. cannot have setter - def last_value(self): - return self._last_value - - def set(self, value : typing.Any) -> typing.Any: - self._last_value : typing.Dict = self._client.execute(self._write_instruction, dict(value=value), - raise_client_side_exception=True) - - def get(self): - self._last_value : typing.Dict = self._client.execute(self._read_instruction, - raise_client_side_exception=True) - return self._last_value[SM_INDEX_DATA] - - async def async_set(self, value : typing.Any) -> typing.Any: - self._last_value : typing.Dict = await self._client.execute(self._write_instruction, dict(value=value), - raise_client_side_exception=True) - - async def async_get(self): - self._last_value : typing.Dict = await self._client.execute(self._read_instruction, - raise_client_side_exception=True) - return self._last_value - - -class _Event: - """event streaming""" - - def __init__(self, client : SyncZMQClient, event_name : str, event_socket : str) -> None: - self._client = client - self._event_name = event_name - self._event_socket = event_socket - - def _subscribe(self, callback : typing.Callable): - self._event_consumer = EventConsumer(request.path, event_info.socket_address, - f"{request.path}_HTTPEvent@"+current_datetime_ms_str()) - self._cb = callback - self._subscribed = True - self._thread = threading.Thread(target=self.listen) - self._thread.start() - - def listen(self): - while self._subscribed: - try: - data = self._event_consumer.receive_event(deserialize=True) - self._cb(data) - except Exception as E: - print(E) - self._event_consumer.exit() - - def _unsubscribe(self): - self._subscribed = False - - - -class _StreamResultIterator(object): - """ - Pyro returns this as a result of a remote call which returns an iterator or generator. - It is a normal iterable and produces elements on demand from the remote iterator. - You can simply use it in for loops, list comprehensions etc. - """ - def __init__(self, streamId, proxy): - self.streamId = streamId - self.proxy = proxy - self.pyroseq = proxy._pyroSeq - - def __iter__(self): - return self - - def __next__(self): - if self.proxy is None: - raise StopIteration - if self.proxy._pyroConnection is None: - raise errors.ConnectionClosedError("the proxy for this stream result has been closed") - self.pyroseq += 1 - try: - return self.proxy._pyroInvoke("get_next_stream_item", [self.streamId], {}, objectId=core.DAEMON_NAME) - except (StopIteration, GeneratorExit): - # when the iterator is exhausted, the proxy is removed to avoid unneeded close_stream calls later - # (the server has closed its part of the stream by itself already) - self.proxy = None - raise - - def __del__(self): - try: - self.close() - except Exception: - pass - - def close(self): - if self.proxy and self.proxy._pyroConnection is not None: - if self.pyroseq == self.proxy._pyroSeq: - # we're still in sync, it's okay to use the same proxy to close this stream - self.proxy._pyroInvoke("close_stream", [self.streamId], {}, - flags=protocol.FLAGS_ONEWAY, objectId=core.DAEMON_NAME) - else: - # The proxy's sequence number has diverged. - # One of the reasons this can happen is because this call is being done from python's GC where - # it decides to gc old iterator objects *during a new call on the proxy*. - # If we use the same proxy and do a call in between, the other call on the proxy will get an out of sync seq and crash! - # We create a temporary second proxy to call close_stream on. This is inefficient, but avoids the problem. - with contextlib.suppress(errors.CommunicationError): - with self.proxy.__copy__() as closingProxy: - closingProxy._pyroInvoke("close_stream", [self.streamId], {}, - flags=protocol.FLAGS_ONEWAY, objectId=core.DAEMON_NAME) - self.proxy = None - - - -__allowed_attribute_types__ = (_RemoteParameter, _RemoteMethod) - -def _add_method(client_obj : ObjectProxy, method : _RemoteMethod, func_info : RPCResource) -> None: - if isinstance(func_info, list): - raise TypeError(f"got list instead of RPC resource for {func_info.name}") - if not func_info.top_owner: - return - for dunder in SERIALIZABLE_WRAPPER_ASSIGNMENTS: - if dunder == '__qualname__': - info = '{}.{}'.format(client_obj.__class__.__name__, func_info.get_dunder_attr(dunder).split('.')[1]) - else: - info = func_info.get_dunder_attr(dunder) - setattr(method, dunder, info) - client_obj.__setattr__(func_info.name, method) - -def _add_parameter(client_obj : ObjectProxy, parameter : _RemoteParameter, parameter_info : RPCResource) -> None: - if isinstance(parameter_info, list): - raise TypeError(f"got list instead of RPC resource for {parameter_info.name}") - if not parameter_info.top_owner: - return - for attr in ['doc', 'name']: - # just to imitate _add_method logic - setattr(parameter, attr, getattr(parameter_info, attr)) - client_obj.__setattr__(parameter_info.name, parameter) - -def _add_event(client_obj : ObjectProxy, event, event_info) -> None: - pass - - -__all__ = ['ObjectProxy'] - diff --git a/hololinked/server/remote_object.py b/hololinked/server/remote_object.py index 5707a47..2fb7dc0 100644 --- a/hololinked/server/remote_object.py +++ b/hololinked/server/remote_object.py @@ -151,9 +151,12 @@ def _machine_compliant_state(self, state) -> typing.Union[Enum, str]: return state def get_state(self) -> typing.Union[str, Enum, None]: - """return the current state. one can also access the property `current state`. - Returns: - str: current state + """ + return the current state. one can also access the property `current state`. + + Returns + ------- + current state: str """ return self._state diff --git a/hololinked/server/utils.py b/hololinked/server/utils.py index c54c930..019d912 100644 --- a/hololinked/server/utils.py +++ b/hololinked/server/utils.py @@ -6,6 +6,7 @@ import asyncio import inspect import typing +import asyncio from ..param.exceptions import wrap_error_text as wrap_text @@ -181,15 +182,21 @@ def run_coro_sync(coro): eventloop.run_until_complete(coro) -def run_method_somehow(coro): +def run_method_somehow(method : typing.Union[typing.Callable, typing.Coroutine]) -> typing.Any: """ either schedule the coroutine or run it until its complete """ + if not asyncio.iscoroutinefunction(method): + return method() + elif not asyncio.iscoroutine(method): + task = lambda : asyncio.create_task(method) #check later if lambda is necessary + else: + task = method eventloop = asyncio.get_event_loop() if eventloop.is_running(): - eventloop.call_soon(lambda : asyncio.create_task(coro)) + eventloop.call_soon(task) else: - eventloop.run_until_complete(coro) + eventloop.run_until_complete(task) def get_signature(function : typing.Callable): @@ -207,8 +214,6 @@ def get_signature(function : typing.Callable): return arg_names, arg_types - - __all__ = ['current_datetime_ms_str', 'wrap_text', 'copy_parameters', 'dashed_URL'] diff --git a/hololinked/server/webserver_utils.py b/hololinked/server/webserver_utils.py index 2db8826..f235f08 100644 --- a/hololinked/server/webserver_utils.py +++ b/hololinked/server/webserver_utils.py @@ -1,5 +1,6 @@ import logging import textwrap +import traceback import typing import ifaddr # from tabulate import tabulate @@ -141,4 +142,15 @@ def get_IP_from_interface(interface_name : str = 'Ethernet', adapter_name = None raise ValueError("interface name {} not found in system interfaces.".format(interface_name)) -__all__ = ['log_request', 'log_resources'] \ No newline at end of file +def format_exception_as_json(exc : Exception) -> typing.Dict[str, typing.Any]: + return { + "exception" : { + "message" : str(exc), + "type" : repr(exc).split('(', 1)[0], + "traceback" : traceback.format_exc().splitlines(), + "notes" : E.__notes__ if hasattr(exc, "__notes__") else None # type: ignore + } + } + + +__all__ = ['log_request', 'log_resources', 'format_exception_as_json'] \ No newline at end of file diff --git a/hololinked/server/zmq_message_brokers.py b/hololinked/server/zmq_message_brokers.py index 5a60645..74c94bb 100644 --- a/hololinked/server/zmq_message_brokers.py +++ b/hololinked/server/zmq_message_brokers.py @@ -1732,7 +1732,7 @@ async def async_recv_reply(self, message_id : bytes, plain_reply : bool = False, return reply async def async_execute(self, instance_name : str, instruction : str, arguments : typing.Dict[str, typing.Any] = EMPTY_DICT, - context : typing.Dict[str, typing.Any] = EMPTY_DICT, raise_client_side_exception = False, + *, context : typing.Dict[str, typing.Any] = EMPTY_DICT, raise_client_side_exception = False, server_timeout : typing.Optional[float] = 3, client_timeout : typing.Optional[float] = None) -> typing.Dict[str, typing.Any]: """ sends message and receives reply. From 4e994e4dc2e43e07957d0f58a23f804cddfeb4e5 Mon Sep 17 00:00:00 2001 From: "Vignesh.Vaidyanathan" Date: Mon, 26 Feb 2024 07:54:30 +0100 Subject: [PATCH 038/167] object proxy client moved to separate folder --- hololinked/client/__init__.py | 0 hololinked/client/proxy.py | 392 ++++++++++++++++++++++++++++++++++ 2 files changed, 392 insertions(+) create mode 100644 hololinked/client/__init__.py create mode 100644 hololinked/client/proxy.py diff --git a/hololinked/client/__init__.py b/hololinked/client/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/hololinked/client/proxy.py b/hololinked/client/proxy.py new file mode 100644 index 0000000..959f512 --- /dev/null +++ b/hololinked/client/proxy.py @@ -0,0 +1,392 @@ +import threading +import asyncio +import typing +import logging +from typing import Any + +from ..server.data_classes import RPCResource +from ..server.zmq_message_brokers import SyncZMQClient, EventConsumer, PROXY +from ..server.utils import current_datetime_ms_str +from ..server.constants import (SERIALIZABLE_WRAPPER_ASSIGNMENTS, Instructions, + ServerMessage, ServerMessageData, ResourceType) + +from ..server.zmq_message_brokers import (CM_INDEX_ADDRESS, CM_INDEX_ARGUMENTS, CM_INDEX_CLIENT_TYPE, CM_INDEX_EXECUTION_CONTEXT, + CM_INDEX_INSTRUCTION, CM_INDEX_MESSAGE_ID, CM_INDEX_MESSAGE_TYPE, CM_INDEX_TIMEOUT) +from ..server.zmq_message_brokers import (SM_INDEX_ADDRESS, SM_INDEX_DATA, SM_INDEX_MESSAGE_ID, SM_INDEX_MESSAGE_TYPE, + SM_INDEX_SERVER_TYPE) + +class ObjectProxy: + + _own_attrs = frozenset([ + '_zmq_client', 'identity', '__annotations__', + 'instance_name', 'logger', 'timeout', '_timeout', + ]) + + def __init__(self, instance_name : str, timeout : float = 5, load_remote_object = True, protocol : str = 'TCP', **kwargs) -> None: + self.instance_name = instance_name + self.timeout = timeout + self.identity = instance_name + current_datetime_ms_str() + self.logger = logging.Logger(self.identity) + # compose ZMQ client in Proxy client so that all sending and receiving is + # done by the ZMQ client and not by the Proxy client directly. Proxy client only + # bothers mainly about __setattr__ and _getattr__ + self._zmq_client = SyncZMQClient(instance_name, self.identity, client_type=PROXY, protocol=protocol, **kwargs) + if load_remote_object: + self.load_remote_object() + + def __del__(self): + self._zmq_client.exit() + + def __getattribute__(self, __name: str) -> Any: + obj = super().__getattribute__(__name) + if isinstance(obj, _RemoteParameter): + return obj.get() + return obj + + def __setattr__(self, __name : str, __value : typing.Any): + if __name in ObjectProxy._own_attrs or (__name not in self.__dict__ and isinstance(__value, __allowed_attribute_types__)): + print(f"setting {__name}") + return super(ObjectProxy, self).__setattr__(__name, __value) + elif __name in self.__dict__: + obj = self.__dict__[__name] + if isinstance(obj, _RemoteParameter): + obj.set(value=__value) + return + raise AttributeError(f"Cannot reset attribute {__name} again to ObjectProxy for {self.instance_name}.") + raise AttributeError(f"Cannot set foreign attribute {__name} to ObjectProxy for {self.instance_name}. Given attribute not found in RemoteObject.") + + def __repr__(self): + return f'ObjectProxy {self.instance_name}' + + def __enter__(self): + raise NotImplementedError("with statement is not completely implemented yet. Avoid.") + return self + + def __exit__(self, exc_type, exc_value, traceback): + raise NotImplementedError("with statement is not completely implemented yet. Avoid.") + + def __bool__(self): return True + + def __eq__(self, other): + if other is self: + return True + return isinstance(other, ObjectProxy) and other.instance_name == self.instance_name + + def __ne__(self, other): + if other and isinstance(other, ObjectProxy): + return other.instance_name != self.instance_name + return True + + def __hash__(self): + return hash(self.identity) + + @property + def timeout(self) -> typing.Union[float, int]: + return self._timeout + + @timeout.setter + def timeout(self, value : typing.Union[float, int]): + if not isinstance(value, (float, int, type(None))): + raise TypeError(f"Timeout can only be float or int greater than 0, or None. Given type {type(value)}.") + elif value is not None and value < 0: + raise ValueError("Timeout must be at least 0 or None, not negative.") + self._timeout = value + + timeout.__doc__ = """Timeout in seconds on server side for execution of method. Defaults to 5 seconds and + network times not considered.""" + + def invoke(self, method : str, oneway : bool = False, **kwargs) -> typing.Any: + method : _RemoteMethod = getattr(self, method, None) + if not method: + raise AttributeError(f"No remote method named {method}") + if oneway: + method.oneway(**kwargs) + else: + return method(**kwargs) + + async def async_invoke(self, method : str, **kwargs): + method = getattr(self, method, None) # type: _RemoteMethod + if not method: + raise AttributeError(f"No remote method named {method}") + return await method.async_call(**kwargs) + + def set_parameter(self, parameter : str, value : typing.Any, oneway : bool) -> None: + parameter : _RemoteParameter = getattr(self, parameter, None) + if not parameter: + raise AttributeError(f"No remote parameter named {parameter}") + if oneway: + parameter.oneway(value) + else: + parameter.set(value) + + async def async_set_parameters(self, oneway : bool = False, noblock : bool = False, **parameters): + pass + + def subscribe_event(self, event_name : str, callback : typing.Callable): + pass + + def unsubscribe_event(self, event_name : str): + pass + + # def __getstate__(self): + # # make sure a tuple of just primitive types are used to allow for proper serialization + # return str(self._pyroUri), tuple(self._pyroOneway), tuple(self._pyroMethods), \ + # tuple(self._pyroAttrs), self._pyroHandshake, self._pyroSerializer + + # def __setstate__(self, state): + # self._pyroUri = core.URI(state[0]) + # self._pyroOneway = set(state[1]) + # self._pyroMethods = set(state[2]) + # self._pyroAttrs = set(state[3]) + # self._pyroHandshake = state[4] + # self._pyroSerializer = state[5] + # self.__pyroTimeout = config.COMMTIMEOUT + # self._pyroMaxRetries = config.MAX_RETRIES + # self._pyroConnection = None + # self._pyroLocalSocket = None + # self._pyroSeq = 0 + # self._pyroRawWireResponse = False + # self.__pyroOwnerThread = get_ident() + + # def __copy__(self): + # p = object.__new__(type(self)) + # p.__setstate__(self.__getstate__()) + # p._pyroTimeout = self._pyroTimeout + # p._pyroRawWireResponse = self._pyroRawWireResponse + # p._pyroMaxRetries = self._pyroMaxRetries + # return p + + + # def __dir__(self): + # result = dir(self.__class__) + list(self.__dict__.keys()) + # return sorted(set(result) | self._pyroMethods | self._pyroAttrs) + + # # When special methods are invoked via special syntax (e.g. obj[index] calls + # # obj.__getitem__(index)), the special methods are not looked up via __getattr__ + # # for efficiency reasons; instead, their presence is checked directly. + # # Thus we need to define them here to force (remote) lookup through __getitem__. + + # def __len__(self): return self.__getattr__('__len__')() + # def __getitem__(self, index): return self.__getattr__('__getitem__')(index) + # def __setitem__(self, index, val): return self.__getattr__('__setitem__')(index, val) + # def __delitem__(self, index): return self.__getattr__('__delitem__')(index) + + # def __iter__(self): + # try: + # # use remote iterator if it exists + # yield from self.__getattr__('__iter__')() + # except AttributeError: + # # fallback to indexed based iteration + # try: + # yield from (self[index] for index in range(sys.maxsize)) + # except (StopIteration, IndexError): + # return + + + def load_remote_object(self): + """ + Get metadata from server (methods, parameters...) and remember them in some attributes of the proxy. + Usually this will already be known due to the default behavior of the connect handshake, where the + connect response also includes the metadata. + """ + fetch = _RemoteMethod(self._zmq_client, f'/{self.instance_name}{Instructions.RPC_RESOURCES}', + self._timeout) # type: _RemoteMethod + reply = fetch()[ServerMessage.DATA][ServerMessageData.RETURN_VALUE] # type: typing.Dict[str, typing.Dict[str, typing.Any]] + + for name, data in reply.items(): + if isinstance(data, dict): + data = RPCResource(**data) + elif not isinstance(data, RPCResource): + raise RuntimeError("Logic error - desieralized info about server not instance of ProxyResourceData") + if data.what == ResourceType.CALLABLE: + _add_method(self, _RemoteMethod(self._zmq_client, data.instruction, self.timeout), data) + elif data.what == ResourceType.PARAMETER: + _add_parameter(self, _RemoteParameter(self._zmq_client, data.instruction, self.timeout), data) + elif data.what == ResourceType.EVENT: + pass + + # def _pyroInvokeBatch(self, calls, oneway=False): + # flags = protocol.FLAGS_BATCH + # if oneway: + # flags |= protocol.FLAGS_ONEWAY + # return self._pyroInvoke("", calls, None, flags) + + + +class _RemoteMethod: + """method call abstraction""" + + def __init__(self, client : SyncZMQClient, instruction : str, timeout : typing.Optional[float] = None) -> None: + self._zmq_client = client + self._instruction = instruction + self._timeout = timeout + self._loop = asyncio.get_event_loop() + + @property # i.e. cannot have setter + def last_return_value(self): + return self._last_return_value + + def oneway(self, *args, **kwargs) -> None: + kwargs["__args__"] = args + self._zmq_client.send_instruction(self._instruction, kwargs, self._timeout) + + def __call__(self, *args, **kwargs) -> typing.Any: + kwargs["__args__"] = args + self._last_return_value = self._zmq_client.execute(self._instruction, + kwargs, raise_client_side_exception=True) + return self._last_return_value + + + + +class _RemoteParameter: + """parameter set & get abstraction""" + + def __init__(self, client : SyncZMQClient, instruction : str, + timeout : typing.Optional[float] = None) -> None: + self._zmq_client = client + self._timeout = timeout + self._read_instruction = instruction + '/read' + self._write_instruction = instruction + '/write' + + def __del__(self): + self._zmq_client = None + + @property # i.e. cannot have setter + def last_value(self): + return self._last_value + + def set(self, value : typing.Any) -> typing.Any: + self._last_value : typing.Dict = self._zmq_client.execute(self._write_instruction, dict(value=value), + raise_client_side_exception=True) + + def get(self): + self._last_value : typing.Dict = self._zmq_client.execute(self._read_instruction, + raise_client_side_exception=True) + return self._last_value[SM_INDEX_DATA] + + async def async_set(self, value : typing.Any) -> typing.Any: + self._last_value : typing.Dict = await self._zmq_client.execute(self._write_instruction, dict(value=value), + raise_client_side_exception=True) + + async def async_get(self): + self._last_value : typing.Dict = await self._zmq_client.execute(self._read_instruction, + raise_client_side_exception=True) + return self._last_value + + +class _Event: + """event streaming""" + + def __init__(self, client : SyncZMQClient, event_name : str, event_socket : str) -> None: + self._zmq_client = client + self._event_name = event_name + self._event_socket = event_socket + + def _subscribe(self, callback : typing.Callable): + self._event_consumer = EventConsumer(request.path, event_info.socket_address, + f"{request.path}_HTTPEvent@"+current_datetime_ms_str()) + self._cb = callback + self._subscribed = True + self._thread = threading.Thread(target=self.listen) + self._thread.start() + + def listen(self): + while self._subscribed: + try: + data = self._event_consumer.receive_event(deserialize=True) + self._cb(data) + except Exception as E: + print(E) + self._event_consumer.exit() + + def _unsubscribe(self): + self._subscribed = False + + + +class _StreamResultIterator(object): + """ + Pyro returns this as a result of a remote call which returns an iterator or generator. + It is a normal iterable and produces elements on demand from the remote iterator. + You can simply use it in for loops, list comprehensions etc. + """ + def __init__(self, streamId, proxy): + self.streamId = streamId + self.proxy = proxy + self.pyroseq = proxy._pyroSeq + + def __iter__(self): + return self + + def __next__(self): + if self.proxy is None: + raise StopIteration + if self.proxy._pyroConnection is None: + raise errors.ConnectionClosedError("the proxy for this stream result has been closed") + self.pyroseq += 1 + try: + return self.proxy._pyroInvoke("get_next_stream_item", [self.streamId], {}, objectId=core.DAEMON_NAME) + except (StopIteration, GeneratorExit): + # when the iterator is exhausted, the proxy is removed to avoid unneeded close_stream calls later + # (the server has closed its part of the stream by itself already) + self.proxy = None + raise + + def __del__(self): + try: + self.close() + except Exception: + pass + + def close(self): + if self.proxy and self.proxy._pyroConnection is not None: + if self.pyroseq == self.proxy._pyroSeq: + # we're still in sync, it's okay to use the same proxy to close this stream + self.proxy._pyroInvoke("close_stream", [self.streamId], {}, + flags=protocol.FLAGS_ONEWAY, objectId=core.DAEMON_NAME) + else: + # The proxy's sequence number has diverged. + # One of the reasons this can happen is because this call is being done from python's GC where + # it decides to gc old iterator objects *during a new call on the proxy*. + # If we use the same proxy and do a call in between, the other call on the proxy will get an out of sync seq and crash! + # We create a temporary second proxy to call close_stream on. This is inefficient, but avoids the problem. + with contextlib.suppress(errors.CommunicationError): + with self.proxy.__copy__() as closingProxy: + closingProxy._pyroInvoke("close_stream", [self.streamId], {}, + flags=protocol.FLAGS_ONEWAY, objectId=core.DAEMON_NAME) + self.proxy = None + + + +__allowed_attribute_types__ = (_RemoteParameter, _RemoteMethod) + +def _add_method(client_obj : ObjectProxy, method : _RemoteMethod, func_info : RPCResource) -> None: + if isinstance(func_info, list): + raise TypeError(f"got list instead of RPC resource for {func_info.name}") + if not func_info.top_owner: + return + for dunder in SERIALIZABLE_WRAPPER_ASSIGNMENTS: + if dunder == '__qualname__': + info = '{}.{}'.format(client_obj.__class__.__name__, func_info.get_dunder_attr(dunder).split('.')[1]) + else: + info = func_info.get_dunder_attr(dunder) + setattr(method, dunder, info) + client_obj.__setattr__(func_info.name, method) + +def _add_parameter(client_obj : ObjectProxy, parameter : _RemoteParameter, parameter_info : RPCResource) -> None: + if isinstance(parameter_info, list): + raise TypeError(f"got list instead of RPC resource for {parameter_info.name}") + if not parameter_info.top_owner: + return + for attr in ['doc', 'name']: + # just to imitate _add_method logic + setattr(parameter, attr, getattr(parameter_info, attr)) + client_obj.__setattr__(parameter_info.name, parameter) + +def _add_event(client_obj : ObjectProxy, event, event_info) -> None: + pass + + +__all__ = ['ObjectProxy'] + From e88c7997ec12bf657842d28c909cc453fe9950e7 Mon Sep 17 00:00:00 2001 From: "Vignesh.Vaidyanathan" Date: Mon, 26 Feb 2024 18:50:22 +0100 Subject: [PATCH 039/167] reordering constants and RemoteObjectDatabase moved to database.py from remote_object.py --- hololinked/server/HTTPServer.py | 5 +- hololinked/server/__init__.py | 4 +- hololinked/server/api_platform_utils.py | 4 +- hololinked/server/config.py | 13 +- hololinked/server/constants.py | 87 +++++---- hololinked/server/data_classes.py | 9 +- hololinked/server/database.py | 238 ++++++++++++++++++++--- hololinked/server/decorators.py | 99 ++++------ hololinked/server/eventloop.py | 1 - hololinked/server/exceptions.py | 1 + hololinked/server/host_server.py | 70 ------- hololinked/server/host_utilities.py | 8 +- hololinked/server/http_methods.py | 27 ++- hololinked/server/proxy_client.py | 0 hololinked/server/remote_object.py | 103 +--------- hololinked/server/remote_parameter.py | 17 +- hololinked/server/remote_parameters.py | 5 +- hololinked/server/serializers.py | 12 +- hololinked/server/webserver_utils.py | 2 +- hololinked/server/zmq_message_brokers.py | 16 +- 20 files changed, 356 insertions(+), 365 deletions(-) delete mode 100644 hololinked/server/host_server.py delete mode 100644 hololinked/server/proxy_client.py diff --git a/hololinked/server/HTTPServer.py b/hololinked/server/HTTPServer.py index c038848..bf6bdb1 100644 --- a/hololinked/server/HTTPServer.py +++ b/hololinked/server/HTTPServer.py @@ -11,11 +11,10 @@ TypedList, String) from .utils import create_default_logger, run_coro_sync, run_method_somehow from .serializers import JSONSerializer -from .constants import Instructions +from .constants import CommonInstructions from .webserver_utils import log_request, update_resources from .zmq_message_brokers import MessageMappedZMQClientPool from .handlers import RPCHandler, BaseHandler, EventHandler -from .remote_object import RemoteObject, RemoteObjectDB @@ -164,7 +163,7 @@ async def _fetch_remote_object_resources(self): for client in self.zmq_client_pool: await client.handshake_complete() _, _, _, _, _, reply = await client.async_execute( - f'/{client.server_instance_name}{Instructions.HTTP_RESOURCES}', + f'/{client.server_instance_name}{CommonInstructions.HTTP_RESOURCES}', raise_client_side_exception=True) update_resources(self.resources, reply["returnValue"]) # type: ignore # _, _, _, _, _, reply = await client.read_attribute('/'+client.server_instance_name + '/object-info', raise_client_side_exception = True) diff --git a/hololinked/server/__init__.py b/hololinked/server/__init__.py index a7d17d2..735bb9b 100644 --- a/hololinked/server/__init__.py +++ b/hololinked/server/__init__.py @@ -4,14 +4,12 @@ from .config import * from .serializers import * from .zmq_message_brokers import * -from .database import * from .decorators import * from .remote_parameter import * +from .database import * from .remote_object import * from .eventloop import * -from .proxy_client import * from .HTTPServer import * from .host_utilities import * -from .host_server import * diff --git a/hololinked/server/api_platform_utils.py b/hololinked/server/api_platform_utils.py index 707a692..816c748 100644 --- a/hololinked/server/api_platform_utils.py +++ b/hololinked/server/api_platform_utils.py @@ -1,7 +1,7 @@ from typing import Dict, List, Any, Union from dataclasses import dataclass, field, asdict -from .constants import POST +from .constants import HTTP_METHODS from .serializers import JSONSerializer @@ -58,7 +58,7 @@ class postman_http_request: url : str header : Union[List[Dict[str, Any]], None] = field(default=None) body : Union[Dict[str, Any], None] = field(default=None) - method : str = field(default=POST) + method : str = field(default=HTTP_METHODS.POST) description : Union[str, None] = field(default=None) def json(self): diff --git a/hololinked/server/config.py b/hololinked/server/config.py index cbd7dad..e49ff97 100644 --- a/hololinked/server/config.py +++ b/hololinked/server/config.py @@ -23,7 +23,7 @@ OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. """ - +import sys import asyncio import tempfile import os @@ -31,7 +31,9 @@ from . import __version__ -asyncio.set_event_loop_policy(asyncio.WindowsSelectorEventLoopPolicy()) +if sys.platform.startswith('win'): + asyncio.set_event_loop_policy(asyncio.WindowsSelectorEventLoopPolicy()) + class Configuration: @@ -48,7 +50,7 @@ def __init__(self): self.reset_actions() - def reset_variables(self, use_environment : bool = True): + def reset_variables(self, use_environment : bool = False): """ Reset to default config items. If use_environment is False, won't read environment variables settings (useful if you can't trust your env). @@ -57,12 +59,11 @@ def reset_variables(self, use_environment : bool = True): self.TCP_SOCKET_SEARCH_START_PORT = 60000 self.TCP_SOCKET_SEARCH_END_PORT = 65535 - return # qualname is not defined if use_environment: # environment variables overwrite config items - prefix = __qualname__.split('.')[0] - for item, envvalue in (e for e in os.environ.items() if e[0].startswith(prefix)): + prefix = 'hololinked' + for item, envvalue in (e for e in os.environ.items() if e[0].lower().startswith(prefix)): item = item[len(prefix):] if item not in self.__slots__: raise ValueError(f"invalid environment config variable: {prefix}{item}") diff --git a/hololinked/server/constants.py b/hololinked/server/constants.py index cb407d4..ba3a711 100644 --- a/hololinked/server/constants.py +++ b/hololinked/server/constants.py @@ -1,9 +1,11 @@ import logging -import functools import typing -from enum import Enum, StrEnum, IntEnum -from types import MethodType, FunctionType +from types import FunctionType, MethodType +from enum import StrEnum, IntEnum, Enum +# types +JSONSerializable = typing.Union[typing.Dict[str, typing.Any], list, str, int, float, None] +CallableType = (FunctionType, MethodType) # decorator constants # naming @@ -12,6 +14,7 @@ ANY_STATE : str = "ANY_STATE" UNSPECIFIED : str = "UNSPECIFIED" # types + class ResourceType(StrEnum): FUNC = "FUNC" ATTRIBUTE = "ATTRIBUTE" @@ -21,62 +24,53 @@ class ResourceType(StrEnum): FILE = "FILE" EVENT = "EVENT" - -FUNC = "FUNC" -ATTRIBUTE = "ATTRIBUTE" -PARAMETER = "PARAMETER" -IMAGE_STREAM = "IMAGE_STREAM" -CALLABLE = "CALLABLE" -FILE = "FILE" -# operation -READ = "read" -WRITE = "write" - -# logic -WRAPPER_ASSIGNMENTS = functools.WRAPPER_ASSIGNMENTS + ('__kwdefaults__', '__defaults__', ) -SERIALIZABLE_WRAPPER_ASSIGNMENTS = ('__name__', '__qualname__', '__doc__' ) # regex logic -states_regex : str = '[A-Za-z_]+[A-Za-z_ 0-9]*' -url_regex : str = r'[\-a-zA-Z0-9@:%._\/\+~#=]{1,256}' - +class REGEX(StrEnum): + states = '[A-Za-z_]+[A-Za-z_ 0-9]*' + url = r'[\-a-zA-Z0-9@:%._\/\+~#=]{1,256}' # HTTP request methods -GET : str = 'GET' -POST : str = 'POST' -PUT : str = 'PUT' -DELETE : str = 'DELETE' -PATCH : str = 'PATCH' -OPTIONS : str = 'OPTIONS' -http_methods = [GET, PUT, POST, DELETE, PATCH] -# HTTP Handler related -EVENT : str = 'event' -INSTRUCTION : str = 'INSTRUCTION' +class HTTP_METHODS(StrEnum): + GET = 'GET' + POST = 'POST' + PUT = 'PUT' + DELETE = 'DELETE' + PATCH = 'PATCH' + OPTIONS = 'OPTIONS' + +http_methods = [member for member in HTTP_METHODS._member_map_] # Logging -log_levels = dict( - DEBUG = logging.DEBUG, - INFO = logging.INFO, - CRITICAL = logging.CRITICAL, - ERROR = logging.ERROR, - WARN = logging.WARN, +class LOGLEVEL(IntEnum): + """ + ``logging.Logger`` log levels + """ + DEBUG = logging.DEBUG + INFO = logging.INFO + CRITICAL = logging.CRITICAL + ERROR = logging.ERROR + WARN = logging.WARN FATAL = logging.FATAL -) - -# types -CallableType = (FunctionType, MethodType) -JSONSerializable = typing.Union[typing.Dict[str, typing.Any], list, str, int, float, None] # ZMQ -class ZMQ_PROTOCOLS(Enum): +class ZMQ_PROTOCOLS(StrEnum): + """ + protocols of ZMQ supported by this package + """ TCP = "TCP" IPC = "IPC" INPROC = "INPROC" -class Instructions(StrEnum): +# Some common instructions +class CommonInstructions(StrEnum): RPC_RESOURCES = '/resources/object-proxy/read' HTTP_RESOURCES = '/resources/http-server/read' class ClientMessage(IntEnum): + """ + client sent message index for accessing message indices with names + instead of numbers + """ ADDRESS = 0 CLIENT_TYPE = 2 MESSAGE_TYPE = 3 @@ -87,6 +81,10 @@ class ClientMessage(IntEnum): EXECUTION_CONTEXT = 8 class ServerMessage(IntEnum): + """ + server sent message index for accessing message indices with names + instead of numbers + """ ADDRESS = 0 SERVER_TYPE = 2 MESSAGE_TYPE = 3 @@ -97,6 +95,9 @@ class ServerMessageData(StrEnum): RETURN_VALUE = "returnValue" class ServerTypes(Enum): + """ + type of ZMQ server + """ UNKNOWN_TYPE = b'UNKNOWN_TYPE' EVENTLOOP = b'EVENTLOOP' REMOTE_OBJECT = b'REMOTE_OBJECT' diff --git a/hololinked/server/data_classes.py b/hololinked/server/data_classes.py index a4e8f9b..f06522b 100644 --- a/hololinked/server/data_classes.py +++ b/hololinked/server/data_classes.py @@ -1,15 +1,14 @@ """ -The following is a list of all dataclasses used to store information on the exposed resources on the network +The following is a list of all dataclasses used to store information on the exposed +resources on the network """ - - import typing import platform from enum import Enum from dataclasses import dataclass, asdict, field, fields from ..param.parameters import String, Boolean, Tuple, TupleSelector -from .constants import (USE_OBJECT_NAME, POST, states_regex, url_regex, http_methods) +from .constants import (USE_OBJECT_NAME, HTTP_METHODS, REGEX, http_methods) from .path_converter import compile_path @@ -50,7 +49,7 @@ class RemoteResourceInfoValidator: generally. """ URL_path = String(default=USE_OBJECT_NAME) #, regex=url_regex) - http_method = TupleSelector(default=POST, objects=http_methods, accept_list=True) + http_method = TupleSelector(default=HTTP_METHODS.POST, objects=http_methods, accept_list=True) state = Tuple(default=None, item_type=(Enum, str), allow_None=True, accept_list=True, accept_item=True) obj_name = String(default=USE_OBJECT_NAME) iscoroutine = Boolean(default=False) diff --git a/hololinked/server/database.py b/hololinked/server/database.py index ceecd8f..6b6ff15 100644 --- a/hololinked/server/database.py +++ b/hololinked/server/database.py @@ -1,49 +1,237 @@ import typing +from sqlalchemy import create_engine, select from sqlalchemy.ext import asyncio as asyncio_ext from sqlalchemy.orm import sessionmaker -from sqlalchemy import create_engine +from sqlalchemy import Integer, String, JSON, LargeBinary +from sqlalchemy.orm import Mapped, mapped_column, DeclarativeBase, MappedAsDataclass +from dataclasses import dataclass, asdict from .serializers import JSONSerializer, BaseSerializer +from .constants import JSONSerializable +from .remote_parameter import RemoteParameter -def create_DB_URL(file_name : str, asynch : bool = False): - if file_name.endswith('.json'): - file = open(file_name, 'r') - conf = JSONSerializer.general_load(file) - else: - raise ValueError("config files of extension - {} expected, given file name {}".format(["json"], file_name)) - - host = conf.get("host", 'localhost') - port = conf.get("port", 5432) - user = conf.get('user', 'postgres') - password = conf.get('password', '') + +class RemoteObjectTableBase(DeclarativeBase): + pass + +class RemoteObjectInformation(MappedAsDataclass, RemoteObjectTableBase): + __tablename__ = "remote_objects" + + instance_name : Mapped[str] = mapped_column(String, primary_key = True) + class_name : Mapped[str] = mapped_column(String) + http_server : Mapped[str] = mapped_column(String) + script : Mapped[str] = mapped_column(String) + args : Mapped[JSONSerializable] = mapped_column(JSON) + kwargs : Mapped[JSONSerializable] = mapped_column(JSON) + eventloop_name : Mapped[str] = mapped_column(String) + level : Mapped[int] = mapped_column(Integer) + level_type : Mapped[str] = mapped_column(String) + + def json(self): + return asdict(self) - if asynch: - return f"postgresql+asyncpg://{user}:{password}@{host}:{port}" - else: - return f"postgresql://{user}:{password}@{host}:{port}" +class SerializedParameter(MappedAsDataclass, RemoteObjectTableBase): + __tablename__ = "parameters" + + id : Mapped[int] = mapped_column(Integer, primary_key = True, autoincrement = True) + instance_name : Mapped[str] = mapped_column(String) + name : Mapped[str] = mapped_column(String) + serialized_value : Mapped[bytes] = mapped_column(LargeBinary) + +@dataclass +class DeserializedParameter: # not part of database + name : str + value : typing.Any + + +class Database: + + @classmethod + def create_URL(file_name : str, asynch : bool = False): # async is a keyword + if file_name.endswith('.json'): + file = open(file_name, 'r') + conf = JSONSerializer.generic_load(file) + else: + raise ValueError("config files of extension - {} expected, given file name {}".format(["json"], file_name)) + + host = conf.get("host", 'localhost') + port = conf.get("port", 5432) + user = conf.get('user', 'postgres') + password = conf.get('password', '') + + if asynch: + return f"postgresql+asyncpg://{user}:{password}@{host}:{port}" + else: + return f"postgresql://{user}:{password}@{host}:{port}" +class BaseAsyncDB(Database): + """ + Base class for an async database engine, implements configuration file reader, + sqlalchemy engine & session creation. -class BaseAsyncDB: + Parameters + ---------- + database: str + The database to open in the database server specified in config_file (see below) + serializer: BaseSerializer + The serializer to use for serializing and deserializing data (for example + parameter serializing before writing to database). Will be the same as + serializer supplied to ``RemoteObject``. + config_file: str + absolute path to database server configuration file + """ - def __init__(self, database : str, serializer : BaseSerializer, config_file : typing.Union[str, None] = None) -> None: + def __init__(self, database : str, serializer : BaseSerializer, + config_file : typing.Union[str, None] = None) -> None: if config_file: - URL = f"{create_DB_URL(config_file, True)}/{database}" - self.engine = asyncio_ext.create_async_engine(URL, echo = True) - self.async_session = sessionmaker(self.engine, expire_on_commit=True, class_= asyncio_ext.AsyncSession) # type: ignore + URL = f"{self.create_URL(config_file, True)}/{database}" + self.engine = asyncio_ext.create_async_engine(URL, echo=True) + self.async_session = sessionmaker(self.engine, expire_on_commit=True, + class_= asyncio_ext.AsyncSession) # type: ignore self.serializer = serializer -class BaseSyncDB: +class BaseSyncDB(Database): + """ + Base class for an synchronous (blocking) database engine, implements + configuration file reader, sqlalchemy engine & session creation. - def __init__(self, database : str, serializer : BaseSerializer, config_file : typing.Union[str, None] = None) -> None: + Parameters + ---------- + database: str + The database to open in the database server specified in config_file (see below) + serializer: BaseSerializer + The serializer to use for serializing and deserializing data (for example + parameter serializing into database for storage). Will be the same as + serializer supplied to ``RemoteObject``. + config_file: str + absolute path to database server configuration file + """ + + def __init__(self, database : str, serializer : BaseSerializer, + config_file : typing.Union[str, None] = None) -> None: if config_file: - URL = f"{create_DB_URL(config_file, False)}/{database}" + URL = f"{self.create_URL(config_file, False)}/{database}" self.engine = create_engine(URL, echo = True) self.sync_session = sessionmaker(self.engine, expire_on_commit=True) self.serializer = serializer + +class RemoteObjectDB(BaseSyncDB): + """ + Database engine composed within ``RemoteObject``, carries out database + operations like storing object information, paramaters etc. + + Parameters + ---------- + instance_name: str + ``instance_name`` of the ``RemoteObject``` + serializer: BaseSerializer + serializer used by the ``RemoteObject``. The serializer to use for + serializing and deserializing data (for example parameter serializing + into database for storage). + config_file: str + configuration file of the database server + """ + + def __init__(self, instance_name : str, serializer : BaseSerializer, + config_file: typing.Optional[str] = None) -> None: + super().__init__(database='scadapyserver', serializer=serializer, + config_file=config_file) + self.instance_name = instance_name + + def fetch_own_info(self) -> RemoteObjectInformation: + """ + fetch ``RemoteObject`` instance's own information, for schema see + ``RemoteObjectInformation``. + + Returns + ------- + info: RemoteObject + """ + with self.sync_session() as session: + stmt = select(RemoteObjectInformation).filter_by(instance_name=self.instance_name) + data = session.execute(stmt) + data = data.scalars().all() + if len(data) == 0: + return None + return data[0] + + def read_all_parameters(self, deserialized : bool = True) -> typing.Sequence[ + typing.Union[SerializedParameter, DeserializedParameter]]: + """ + read all paramaters of the ``RemoteObject`` instance. + + Parameters + ---------- + deserialized: bool, default True + deserilize the parameters if True + """ + with self.sync_session() as session: + stmt = select(SerializedParameter).filter_by(instance_name=self.instance_name) + data = session.execute(stmt) + existing_params = data.scalars().all() #type: typing.Sequence[SerializedParameter] + if not deserialized: + return existing_params + params_data = [] + for param in existing_params: + params_data.append(DeserializedParameter( + name = param.name, + value = self.serializer.loads(param.serialized_value) + )) + return params_data + + def create_missing_db_parameters(self, + parameters : typing.Dict[str, RemoteParameter]) -> None: + """ + create any and all missing remote parameters of ``RemoteObject`` instance + in database. + + Parameter + --------- + parameters: Dict[str, RemoteParamater] + descriptors of the parameters + """ + with self.sync_session() as session: + existing_params = self.read_all_parameters() + existing_names = [p.name for p in existing_params] + for name, new_param in parameters.items(): + if name not in existing_names: + param = SerializedParameter( + instance_name=self.instance_name, + name=new_param.name, + serialized_value=self.serializer.dumps(new_param.default) + ) + session.add(param) + session.commit() + + def edit_parameter(self, parameter : RemoteParameter, + value : typing.Any) -> None: + """ + change the parameter value of an already existing parameter + + Parameter + --------- + parameter: RemoteParameter + descriptor of the parameter + value: Any + value of the parameter + """ + with self.sync_session() as session: + stmt = select(SerializedParameter).filter_by(instance_name=self.instance_name, + name=parameter.name) + data = session.execute(stmt) + param = data.scalar() + param.serialized_value = self.serializer.dumps(value) + session.commit() + + -__all__ = ['BaseAsyncDB'] \ No newline at end of file +__all__ = [ + 'BaseAsyncDB', + 'BaseSyncDB', + 'RemoteObjectDB' +] \ No newline at end of file diff --git a/hololinked/server/decorators.py b/hololinked/server/decorators.py index f63fcb0..feb56a1 100644 --- a/hololinked/server/decorators.py +++ b/hololinked/server/decorators.py @@ -1,18 +1,17 @@ +import functools import typing +from enum import Enum from types import FunctionType from inspect import iscoroutinefunction, getfullargspec -from enum import Enum -from functools import wraps -from dataclasses import dataclass, asdict, field, fields - -from .data_classes import RemoteResourceInfoValidator, RemoteResource -from .constants import (USE_OBJECT_NAME, UNSPECIFIED, GET, POST, PUT, DELETE, PATCH, WRAPPER_ASSIGNMENTS) +from .data_classes import RemoteResourceInfoValidator +from .constants import (USE_OBJECT_NAME, UNSPECIFIED, HTTP_METHODS) from .utils import wrap_text -from .path_converter import compile_path +WRAPPER_ASSIGNMENTS = functools.WRAPPER_ASSIGNMENTS + ('__kwdefaults__', '__defaults__', ) + def wrap_method(method : FunctionType): """wraps a methods with useful operations before and after calling a method. Old : use case not decided. @@ -25,7 +24,7 @@ def wrap_method(method : FunctionType): as much as possible """ - @wraps(method, WRAPPER_ASSIGNMENTS) + @functools.wraps(method, WRAPPER_ASSIGNMENTS) def wrapped_method(*args, **kwargs) -> typing.Any: self = args[0] self.logger.debug("called {} of instance {}".format(method.__qualname__, self.instance_name)) @@ -47,7 +46,7 @@ def is_private_attribute(attr_name: str) -> bool: return False -def remote_method(URL_path : str = USE_OBJECT_NAME, http_method : str = POST, +def remote_method(URL_path : str = USE_OBJECT_NAME, http_method : str = HTTP_METHODS.POST, state : typing.Optional[typing.Union[str, Enum]] = None) -> typing.Callable: """Use this function to decorate your methods to be accessible remotely. @@ -118,65 +117,37 @@ def inner(obj): return inner -def remote_parameter(**kwargs): - from .remote_parameter import RemoteParameter - return RemoteParameter(*kwargs) - +def remote_parameter(default: typing.Any = None, *, doc : typing.Optional[str] = None, + constant : bool = False, readonly : bool = False, allow_None : bool = False, + URL_path : str = USE_OBJECT_NAME, remote : bool = True, + http_method : typing.Tuple[typing.Optional[str], typing.Optional[str]] = (HTTP_METHODS.GET, HTTP_METHODS.PUT), + state : typing.Optional[typing.Union[typing.List, typing.Tuple, str, Enum]] = None, + db_persist : bool = False, db_init : bool = False, db_commit : bool = False, + class_member : bool = False, fget : typing.Optional[typing.Callable] = None, + fset : typing.Optional[typing.Callable] = None, fdel : typing.Optional[typing.Callable] = None, + deepcopy_default : bool = False, per_instance_descriptor : bool = False, + precedence : typing.Optional[float] = None, metadata : typing.Optional[typing.Dict] = None, + parameter_type : typing.Optional["RemoteParameter"] = None, **kwargs + ) -> "RemoteParameter": + """ + use like python ``property`` without declaring a remote parameter explicity. + """ + if type is not None and not isinstance(type, RemoteParameter): + raise TypeError(f"type argument must be a RemoteParameter if supplied, given type {parameter_type(type)}") + else: + parameter_type = RemoteParameter + # will raise import error when specified in argument -@dataclass -class FuncInfo: - module : str - name : str - qualname : str - doc : str - kwdefaults : typing.Any - defaults : typing.Any - scadapy : RemoteResource + return parameter_type(default=default, constant=constant, readonly=readonly, + allow_None=allow_None, URL_path=URL_path, remote=remote, http_method=http_method, + state=state, db_persist=db_persist, db_init=db_init, db_commit=db_commit, + class_member=class_member, fget=fget, fset=fset, fdel=fdel, + deepcopy_default=deepcopy_default, per_instance_descriptor=per_instance_descriptor, + precedence=precedence, metadata=metadata, **kwargs) - def json(self): - return asdict(self) - - -# @dataclass -# class DB_registration_info: -# script : str -# instance_name : str -# http_server : str = field(default = '') -# args : Tuple[Any] = field(default = tuple()) -# kwargs : Dict[str, Any] = field(default = dict()) -# eventloop : str = field(default = '') -# level : int = field(default = 1) -# level_type : str = field(default = '') - - -# def parse_request_args(*args, method : str): -# """ -# This method is useful when linters figure out conditional returns on decorators -# """ -# arg_len = len(args) -# if arg_len > 2 or arg_len == 0: -# raise ValueError( -# """ -# method {}() accepts only two argument, URL and/or a function/method. -# Given length of arguments : {}. -# """.format(method.lower(), arg_len) -# ) -# if isinstance(args[0], FunctionType): -# target = args[0] -# elif len(args) > 1 and isinstance(args[1], FunctionType): -# target = args[1] -# else: -# target = None -# if isinstance(args[0], str): -# URL = args[0] -# elif len(args) > 1 and isinstance(args[1], str): -# URL = args[1] -# else: -# URL = USE_OBJECT_NAME -# return target, URL - +from .remote_parameter import RemoteParameter __all__ = ['remote_method', 'remote_parameter'] diff --git a/hololinked/server/eventloop.py b/hololinked/server/eventloop.py index 9e53635..ea255fe 100644 --- a/hololinked/server/eventloop.py +++ b/hololinked/server/eventloop.py @@ -6,7 +6,6 @@ import typing import threading - from .utils import unique_id, wrap_text from .constants import * from .remote_parameters import TypedDict diff --git a/hololinked/server/exceptions.py b/hololinked/server/exceptions.py index 7deb3db..ded6c4e 100644 --- a/hololinked/server/exceptions.py +++ b/hololinked/server/exceptions.py @@ -17,4 +17,5 @@ class StateMachineError(Exception): pass + __all__ = ['BreakInnerLoop', 'BreakAllLoops', 'StateMachineError'] \ No newline at end of file diff --git a/hololinked/server/host_server.py b/hololinked/server/host_server.py deleted file mode 100644 index 46d89c6..0000000 --- a/hololinked/server/host_server.py +++ /dev/null @@ -1,70 +0,0 @@ -from sqlalchemy_utils import create_database, database_exists -import logging - -from ..param.parameters import String, TypedList -from .HTTPServer import HTTPServer -from .eventloop import Consumer -from .host_utilities import (create_tables, create_server_tables, - SERVER_INSTANCE_NAME, CLIENT_HOST_INSTANCE_NAME) -from .database import create_DB_URL - - - -class PCHostServer(HTTPServer): - - consumers = TypedList(item_type=Consumer, default=None, allow_None=True, - doc="""Remote Object to be directly served within the HTTP server""") - - db_config_file = String(default='') - - def __init__(self, *, port = 8080, address = '0.0.0.0', log_level = logging.INFO, - db_config_file = 'host_db_config.json', json_serializer = None, protocol_version = 2, **kwargs): - super().__init__( - consumers = None, - port = port, - address = address, - logger = kwargs.get('logger', None), - log_level = log_level, - json_serializer = json_serializer, - protocol_version = protocol_version - ) - - - -class PrimaryHostServer(HTTPServer): - - consumers = TypedList(item_type=Consumer, default=None, allow_None=True, - doc="""Remote Object to be directly served within the HTTP server""") - - db_config_file = String(default='') - - def __init__(self, *, port = 8080, address = '0.0.0.0', log_level = logging.INFO, - db_config_file = 'host_db_config.json', json_serializer = None, protocol_version = 2, **kwargs): - super().__init__( - consumers = None, - port = port, - address = address, - logger = kwargs.get('logger', None), - log_level = log_level, - json_serializer = json_serializer, - protocol_version = protocol_version - ) - self.db_config_file = db_config_file - self.create_databases() - - @property - def all_ok(self, boolean=False): - super().all_ok - react_client_utilities = Consumer(ReactClientUtilities, db_config_file = self.db_config_file, - instance_name = CLIENT_HOST_INSTANCE_NAME, logger = self.logger) - server_side_utilities = Consumer(PrimaryHostUtilities, db_config_file = self.db_config_file, - instance_name = SERVER_INSTANCE_NAME, server_network_interface = 'Wi-Fi', - port = self.port, logger = self.logger) - self.consumers = [react_client_utilities, server_side_utilities] - return True - - - - - -__all__ = ['PCHostServer', 'PrimaryHostServer'] \ No newline at end of file diff --git a/hololinked/server/host_utilities.py b/hololinked/server/host_utilities.py index a159140..70fa575 100644 --- a/hololinked/server/host_utilities.py +++ b/hololinked/server/host_utilities.py @@ -29,7 +29,7 @@ from .http_methods import post, get, put, delete from .eventloop import Consumer, EventLoop, fork_empty_eventloop from .remote_object import RemoteObject, RemoteObjectDB, RemoteObjectMetaclass -from .database import BaseAsyncDB, create_DB_URL +from .database import BaseAsyncDB SERVER_INSTANCE_NAME = 'server-util' @@ -311,7 +311,7 @@ async def get(self): def create_primary_host(config_file : str, ssl_context : ssl.SSLContext, **server_settings) -> TornadoHTTP1Server: - URL = f"{create_DB_URL(config_file)}/hololinked-host" + URL = f"{DB.create_DB_URL(config_file)}/hololinked-host" if not database_exists(URL): try: create_database(URL) @@ -324,7 +324,7 @@ def create_primary_host(config_file : str, ssl_context : ssl.SSLContext, **serve raise ex from None global global_engine, global_session - URL = f"{create_DB_URL(config_file, True)}/hololinked-host" + URL = f"{DB.create_DB_URL(config_file, True)}/hololinked-host" global_engine = asyncio_ext.create_async_engine(URL, echo=True) global_session = sessionmaker(global_engine, expire_on_commit=True, class_=asyncio_ext.AsyncSession) # type: ignore @@ -450,7 +450,7 @@ class HTTPServerUtilities(BaseAsyncDB, RemoteObject): type : str = 'NORMAL_REMOTE_OBJECT_SERVER' - remote_object_info = TypedList(default=None, allow_None=True, item_type=(RemoteObjectDB.RemoteObjectInfo), + remote_object_info = TypedList(default=None, allow_None=True, URL_path='/remote-object-info') def __init__(self, db_config_file : typing.Union[str, None], zmq_client_pool : MessageMappedZMQClientPool, diff --git a/hololinked/server/http_methods.py b/hololinked/server/http_methods.py index 4c4a2d2..9ebff79 100644 --- a/hololinked/server/http_methods.py +++ b/hololinked/server/http_methods.py @@ -1,46 +1,41 @@ - - - - - -from .constants import USE_OBJECT_NAME, GET, POST, PUT, DELETE, PATCH +from .constants import USE_OBJECT_NAME, HTTP_METHODS from .decorators import remote_method -def get(URL_path = USE_OBJECT_NAME): +def get(URL_path : str = USE_OBJECT_NAME): """ use it on RemoteObject subclass methods to be available with GET HTTP request. method is also by default accessible to proxy clients. """ - return remote_method(URL_path=URL_path, http_method=GET) + return remote_method(URL_path=URL_path, http_method=HTTP_METHODS.GET) -def post(URL_path = USE_OBJECT_NAME): +def post(URL_path : str = USE_OBJECT_NAME): """ use it on RemoteObject subclass methods to be available with POST HTTP request. method is also by default accessible to proxy clients. """ - return remote_method(URL_path=URL_path, http_method=POST) + return remote_method(URL_path=URL_path, http_method=HTTP_METHODS.POST) -def put(URL_path = USE_OBJECT_NAME): +def put(URL_path : str = USE_OBJECT_NAME): """ use it on RemoteObject subclass methods to be available with PUT HTTP request. method is also by default accessible to proxy clients. """ - return remote_method(URL_path=URL_path, http_method=PUT) + return remote_method(URL_path=URL_path, http_method=HTTP_METHODS.PUT) -def delete(URL_path = USE_OBJECT_NAME): +def delete(URL_path : str = USE_OBJECT_NAME): """ use it on RemoteObject subclass methods to be available with DELETE HTTP request. method is also by default accessible to proxy clients. """ - return remote_method(URL_path=URL_path, http_method=DELETE) + return remote_method(URL_path=URL_path, http_method=HTTP_METHODS.DELETE) -def patch(URL_path = USE_OBJECT_NAME): +def patch(URL_path : str = USE_OBJECT_NAME): """ use it on RemoteObject subclass methods to be available with PATCH HTTP request. method is also by default accessible to proxy clients. """ - return remote_method(URL_path=URL_path, http_method=PATCH) + return remote_method(URL_path=URL_path, http_method=HTTP_METHODS.PATCH) __all__ = ['get', 'put', 'post', 'delete', 'patch'] \ No newline at end of file diff --git a/hololinked/server/proxy_client.py b/hololinked/server/proxy_client.py deleted file mode 100644 index e69de29..0000000 diff --git a/hololinked/server/remote_object.py b/hololinked/server/remote_object.py index 2fb7dc0..904520a 100644 --- a/hololinked/server/remote_object.py +++ b/hololinked/server/remote_object.py @@ -1,5 +1,4 @@ import asyncio -import json import logging import inspect import os @@ -7,20 +6,14 @@ import time import typing import datetime +import zmq from collections import deque from enum import EnumMeta, Enum -from dataclasses import asdict, dataclass - -from sqlalchemy import (Integer as DBInteger, String as DBString, JSON as DB_JSON, LargeBinary as DBBinary) -from sqlalchemy import select -from sqlalchemy.orm import Mapped, mapped_column, DeclarativeBase, MappedAsDataclass -import zmq from ..param.parameterized import Parameterized, ParameterizedMetaclass - -from .constants import (EVENT, GET, IMAGE_STREAM, JSONSerializable, CallableType, CALLABLE, - PARAMETER, READ, WRITE, log_levels, POST, ZMQ_PROTOCOLS, FILE) +from .database import RemoteObjectDB +from .constants import (JSONSerializable, CallableType, LOGLEVEL, ZMQ_PROTOCOLS, HTTP_METHODS) from .serializers import * from .exceptions import BreakInnerLoop from .decorators import remote_method @@ -28,7 +21,6 @@ from .data_classes import (GUIResources, RemoteResource, HTTPResource, RPCResource, RemoteResourceInfoValidator, ServerSentEvent) from .api_platform_utils import postman_item, postman_itemgroup -from .database import BaseAsyncDB, BaseSyncDB from .utils import create_default_logger, get_signature, wrap_text from .api_platform_utils import * from .remote_parameter import FileServer, PlotlyFigure, ReactApp, RemoteParameter, RemoteClassParameters, Image @@ -192,92 +184,7 @@ def query(self, info : typing.Union[str, typing.List[str]] ) -> typing.Any: -class RemoteObjectDB(BaseSyncDB): - - class TableBase(DeclarativeBase): - pass - - class RemoteObjectInfo(MappedAsDataclass, TableBase): - __tablename__ = "remote_objects" - instance_name : Mapped[str] = mapped_column(DBString, primary_key = True) - class_name : Mapped[str] = mapped_column(DBString) - http_server : Mapped[str] = mapped_column(DBString) - script : Mapped[str] = mapped_column(DBString) - args : Mapped[JSONSerializable] = mapped_column(DB_JSON) - kwargs : Mapped[JSONSerializable] = mapped_column(DB_JSON) - eventloop_name : Mapped[str] = mapped_column(DBString) - level : Mapped[int] = mapped_column(DBInteger) - level_type : Mapped[str] = mapped_column(DBString) - - def json(self): - return asdict(self) - - class Parameter(TableBase): - __tablename__ = "parameters" - - id : Mapped[int] = mapped_column(DBInteger, primary_key = True, autoincrement = True) - instance_name : Mapped[str] = mapped_column(DBString) - name : Mapped[str] = mapped_column(DBString) - serialized_value : Mapped[bytes] = mapped_column(DBBinary) - - @dataclass - class ParameterData: - name : str - value : typing.Any - - def __init__(self, instance_name : str, serializer : BaseSerializer, - config_file: typing.Union[str, None] = None ) -> None: - super().__init__(database = 'scadapyserver', serializer = serializer, config_file = config_file) - self.instance_name = instance_name - - def fetch_own_info(self) -> RemoteObjectInfo: - with self.sync_session() as session: - stmt = select(self.RemoteObjectInfo).filter_by(instance_name = self.instance_name) - data = session.execute(stmt) - data = data.scalars().all() - if len(data) == 0: - return None - return data[0] - - def read_all_parameters(self, deserialized : bool = True) -> typing.Sequence[typing.Union[Parameter, - ParameterData]]: - with self.sync_session() as session: - stmt = select(self.Parameter).filter_by(instance_name = self.instance_name) - data = session.execute(stmt) - existing_params = data.scalars().all() - if not deserialized: - return existing_params - else: - params_data = [] - for param in existing_params: - params_data.append(self.ParameterData( - name = param.name, - value = self.serializer.loads(param.serialized_value) - )) - return params_data - - def edit_parameter(self, parameter : RemoteParameter, value : typing.Any) -> None: - with self.sync_session() as session: - stmt = select(self.Parameter).filter_by(instance_name = self.instance_name, name = parameter.name) - data = session.execute(stmt) - param = data.scalar() - param.serialized_value = self.serializer.dumps(value) - session.commit() - - def create_missing_db_parameters(self, parameters : typing.Dict[str, RemoteParameter]) -> None: - with self.sync_session() as session: - existing_params = self.read_all_parameters() - existing_names = [p.name for p in existing_params] - for name, new_param in parameters.items(): - if name not in existing_names: - param = self.Parameter( - instance_name = self.instance_name, - name = new_param.name, - serialized_value = self.serializer.dumps(new_param.default) - ) - session.add(param) - session.commit() @@ -453,7 +360,7 @@ def _prepare_resources(self): resource._owner = self resource._unique_event_name = bytes(f"{self._full_URL_path_prefix}{resource.URL_path}", encoding='utf-8') resource.publisher = self._event_publisher - httpserver_resources[GET]['{}{}'.format( + httpserver_resources[HTTP_METHODS.GET]['{}{}'.format( self._full_URL_path_prefix, resource.URL_path)] = ServerSentEvent( # event URL_path has '/' prefix what=EVENT, @@ -824,7 +731,7 @@ def test_speed(self, value : typing.Any): return value # example of remote_method decorator - @remote_method(URL_path='/log/console', http_method = POST) + @remote_method(URL_path='/log/console', http_method = HTTP_METHODS.POST) def log_to_console(self, data : typing.Any = None, level : typing.Any = 'DEBUG') -> None: if level not in log_levels.keys(): self.logger.error("log level {} invalid. logging with level INFO.".format(level)) diff --git a/hololinked/server/remote_parameter.py b/hololinked/server/remote_parameter.py index 74f98e7..84f9b24 100644 --- a/hololinked/server/remote_parameter.py +++ b/hololinked/server/remote_parameter.py @@ -4,7 +4,7 @@ from ..param.parameterized import Parameter, Parameterized, ClassParameters from .decorators import RemoteResourceInfoValidator -from .constants import GET, PUT, USE_OBJECT_NAME +from .constants import USE_OBJECT_NAME, HTTP_METHODS from .zmq_message_brokers import Event try: @@ -12,7 +12,7 @@ except: go = None -__default_parameter_write_method__ = PUT +__default_parameter_write_method__ = HTTP_METHODS.PUT __parameter_info__ = [ 'allow_None' , 'class_member', 'constant', 'db_init', 'db_persist', @@ -153,7 +153,7 @@ class RemoteParameter(Parameter): def __init__(self, default: typing.Any = None, *, doc : typing.Optional[str] = None, constant : bool = False, readonly : bool = False, allow_None : bool = False, URL_path : str = USE_OBJECT_NAME, remote : bool = True, - http_method : typing.Tuple[typing.Optional[str], typing.Optional[str]] = (GET, PUT), + http_method : typing.Tuple[typing.Optional[str], typing.Optional[str]] = (HTTP_METHODS.GET, HTTP_METHODS.PUT), state : typing.Optional[typing.Union[typing.List, typing.Tuple, str, Enum]] = None, db_persist : bool = False, db_init : bool = False, db_commit : bool = False, class_member : bool = False, fget : typing.Optional[typing.Callable] = None, @@ -240,8 +240,7 @@ def __init__(self, default_figure, *, polled : bool = False, refresh_interval : typing.Optional[int] = None, update_event_name : typing.Optional[str] = None, doc: typing.Union[str, None] = None, URL_path : str = USE_OBJECT_NAME) -> None: - super().__init__(default=default_figure, doc=doc, constant=True, readonly=True, URL_path=URL_path, - http_method=(GET, PUT)) + super().__init__(default=default_figure, doc=doc, constant=True, readonly=True, URL_path=URL_path) self.data_sources = data_sources self.refresh_interval = refresh_interval self.update_event_name = update_event_name @@ -290,7 +289,7 @@ def validate_and_adapt(self, value : typing.Any) -> typing.Any: if not go: raise ImportError("plotly was not found/imported, install plotly to suport PlotlyFigure paramater") if not isinstance(value, go.Figure): - raise_TypeError(f"figure arguments accepts only plotly.graph_objects.Figure, not type {type(value)}", + raise TypeError(f"figure arguments accepts only plotly.graph_objects.Figure, not type {type(value)}", self) return value @@ -307,7 +306,7 @@ class Image(VisualizationParameter): def __init__(self, default : typing.Any = None, *, streamable : bool = True, doc : typing.Optional[str] = None, constant : bool = False, readonly : bool = False, allow_None : bool = False, URL_path : str = USE_OBJECT_NAME, - http_method : typing.Tuple[typing.Optional[str], typing.Optional[str]] = (GET, PUT), + http_method : typing.Tuple[typing.Optional[str], typing.Optional[str]] = (HTTP_METHODS.GET, HTTP_METHODS.PUT), state : typing.Optional[typing.Union[typing.List, typing.Tuple, str, Enum]] = None, db_persist : bool = False, db_init : bool = False, db_commit : bool = False, class_member : bool = False, fget : typing.Optional[typing.Callable] = None, @@ -354,9 +353,9 @@ def __init__(self, directory : str, *, doc : typing.Optional[str] = None, URL_pa def validate_and_adapt_directory(self, value : str): if not isinstance(value, str): - raise_TypeError(f"FileServer parameter not a string, but type {type(value)}", self) + raise TypeError(f"FileServer parameter not a string, but type {type(value)}", self) if not os.path.isdir(value): - raise_ValueError(f"FileServer parameter directory '{value}' not a valid directory", self) + raise ValueError(f"FileServer parameter directory '{value}' not a valid directory", self) if not value.endswith('\\'): value += '\\' return value diff --git a/hololinked/server/remote_parameters.py b/hololinked/server/remote_parameters.py index a9a626a..bd40cb1 100644 --- a/hololinked/server/remote_parameters.py +++ b/hololinked/server/remote_parameters.py @@ -15,7 +15,10 @@ from ..param.parameters import (TypeConstrainedList, TypeConstrainedDict, abbreviate_paths, TypedKeyMappingsConstrainedDict, resolve_path, concrete_descendents, named_objs) from .remote_parameter import RemoteParameter -from .constants import USE_OBJECT_NAME, GET, PUT +from .constants import USE_OBJECT_NAME, HTTP_METHODS + +GET = HTTP_METHODS.GET +PUT = HTTP_METHODS.PUT diff --git a/hololinked/server/serializers.py b/hololinked/server/serializers.py index 8ace9ba..341cd69 100644 --- a/hololinked/server/serializers.py +++ b/hololinked/server/serializers.py @@ -142,21 +142,21 @@ def load(cls, file_desc) -> typing.Dict[str, typing.Any]: return json.load(file_desc) @classmethod - def general_dumps(cls, data) -> bytes: - data = json.dumps(data, ensure_ascii=False, allow_nan = True) + def generic_dumps(cls, data) -> bytes: + data = json.dumps(data, ensure_ascii=False, allow_nan=True) return data.encode("utf-8") @classmethod - def general_dump(cls, data : typing.Dict[str, typing.Any], file_desc) -> None: - json.dump(data, file_desc, ensure_ascii = False, allow_nan = True) + def generic_dump(cls, data : typing.Dict[str, typing.Any], file_desc) -> None: + json.dump(data, file_desc, ensure_ascii=False, allow_nan=True) @classmethod - def general_loads(cls, data : typing.Union[bytearray, memoryview, bytes]) -> typing.Dict[str, typing.Any]: + def generic_loads(cls, data : typing.Union[bytearray, memoryview, bytes]) -> typing.Dict[str, typing.Any]: data = cls._convertToBytes(data).decode("utf-8") # type: ignore return json.loads(data) @classmethod - def general_load(cls, file_desc) -> typing.Dict[str, typing.Any]: + def generic_load(cls, file_desc) -> typing.Dict[str, typing.Any]: return json.load(file_desc) def default(self, obj): diff --git a/hololinked/server/webserver_utils.py b/hololinked/server/webserver_utils.py index f235f08..11e544c 100644 --- a/hololinked/server/webserver_utils.py +++ b/hololinked/server/webserver_utils.py @@ -6,7 +6,7 @@ # from tabulate import tabulate from tornado.httputil import HTTPServerRequest -from .constants import CALLABLE, ATTRIBUTE, EVENT, FILE, IMAGE_STREAM +from .constants import ResourceType from .data_classes import FileServerData, ServerSentEvent, HTTPResource from .zmq_message_brokers import AsyncZMQClient, SyncZMQClient diff --git a/hololinked/server/zmq_message_brokers.py b/hololinked/server/zmq_message_brokers.py index 74c94bb..0c6e1e0 100644 --- a/hololinked/server/zmq_message_brokers.py +++ b/hololinked/server/zmq_message_brokers.py @@ -1,6 +1,5 @@ import builtins import os -from typing import List import zmq import zmq.asyncio import asyncio @@ -14,8 +13,9 @@ from .utils import create_default_logger, run_method_somehow, wrap_text from .config import global_config from .constants import ZMQ_PROTOCOLS, ServerTypes -from .serializers import (JSONSerializer, PickleSerializer, BaseSerializer, SerpentSerializer, # DillSerializer, - serializers) +from .serializers import (JSONSerializer, PickleSerializer, BaseSerializer, + SerpentSerializer, serializers) + # DillSerializer, from ..param.parameterized import Parameterized @@ -707,11 +707,11 @@ def __init__(self, instance_names : typing.Union[typing.List[str], None] = None, self.poller = zmq.asyncio.Poller() if instance_names: for instance_name in instance_names: - self.pool[instance_name] = AsyncZMQServer(instance_name, ServerTypes.UNKNOWN_TYPE.value, self.context, + self.pool[instance_name] = AsyncZMQServer(instance_name, ServerTypes.UNKNOWN_TYPE, self.context, **kwargs) for server in self.pool.values(): self.poller.register(server.socket, zmq.POLLIN) - super().__init__(server_type = ServerTypes.POOL.value, json_serializer = kwargs.get('json_serializer'), + super().__init__(server_type = ServerTypes.POOL, json_serializer = kwargs.get('json_serializer'), rpc_serializer = kwargs.get('rpc_serializer', None)) def register_server(self, server : typing.Union[AsyncZMQServer, AsyncPollingZMQServer]) -> None: @@ -1074,7 +1074,7 @@ def __init__(self, server_instance_name : str, client_type : bytes, **kwargs) -> if server_instance_name: self.server_address = bytes(server_instance_name, encoding='utf-8') self.server_instance_name = server_instance_name - self.server_type = ServerTypes.UNKNOWN_TYPE.value + self.server_type = ServerTypes.UNKNOWN_TYPE super().__init__() @@ -1928,7 +1928,7 @@ def push(self, data : typing.Any = None): class EventPublisher(BaseZMQServer): def __init__(self, identity : str, context : typing.Union[zmq.Context, None] = None, **serializer) -> None: - super().__init__(server_type=ServerTypes.UNKNOWN_TYPE.value, **serializer) + super().__init__(server_type=ServerTypes.UNKNOWN_TYPE, **serializer) self.context = context or zmq.Context() self.identity = identity self.socket = self.context.socket(zmq.PUB) @@ -2018,5 +2018,5 @@ def exit(self): -__all__ = ['ServerTypes', 'AsyncZMQServer', 'AsyncPollingZMQServer', 'ZMQServerPool', 'RPCServer', +__all__ = ['AsyncZMQServer', 'AsyncPollingZMQServer', 'ZMQServerPool', 'RPCServer', 'SyncZMQClient', 'AsyncZMQClient', 'MessageMappedZMQClientPool', 'Event', 'CriticalEvent'] \ No newline at end of file From 9ea812e80d867ac1e7c0fc79bc55891a8d93791a Mon Sep 17 00:00:00 2001 From: "Vignesh.Vaidyanathan" Date: Tue, 27 Feb 2024 14:08:18 +0100 Subject: [PATCH 040/167] update to docs --- doc/source/autodoc/index.rst | 2 + .../autodoc/server/database/base_DB.rst | 8 ++++ doc/source/autodoc/server/database/index.rst | 12 ++++++ doc/source/autodoc/server/decorators.rst | 6 +-- doc/source/development_notes.rst | 41 ++++++++++++------- doc/source/examples/index.rst | 9 +++- doc/source/index.rst | 10 ++--- 7 files changed, 63 insertions(+), 25 deletions(-) create mode 100644 doc/source/autodoc/server/database/base_DB.rst create mode 100644 doc/source/autodoc/server/database/index.rst diff --git a/doc/source/autodoc/index.rst b/doc/source/autodoc/index.rst index e169486..a92fe39 100644 --- a/doc/source/autodoc/index.rst +++ b/doc/source/autodoc/index.rst @@ -17,6 +17,8 @@ hololinked.server server/eventloop server/http_server server/remote_parameter/index + server/decorators + server/database/index server/zmq_message_brokers/index server/data_classes/index diff --git a/doc/source/autodoc/server/database/base_DB.rst b/doc/source/autodoc/server/database/base_DB.rst new file mode 100644 index 0000000..0a1d725 --- /dev/null +++ b/doc/source/autodoc/server/database/base_DB.rst @@ -0,0 +1,8 @@ + +.. autoclass:: hololinked.server.database.BaseAsyncDB + :members: + :show-inheritance: + +.. autoclass:: hololinked.server.database.BaseSyncDB + :members: + :show-inheritance: diff --git a/doc/source/autodoc/server/database/index.rst b/doc/source/autodoc/server/database/index.rst new file mode 100644 index 0000000..31c650c --- /dev/null +++ b/doc/source/autodoc/server/database/index.rst @@ -0,0 +1,12 @@ +Database +======== + +.. autoclass:: hololinked.server.database.RemoteObjectDB + :members: + :show-inheritance: + +.. toctree:: + :hidden: + :maxdepth: 1 + + base_DB \ No newline at end of file diff --git a/doc/source/autodoc/server/decorators.rst b/doc/source/autodoc/server/decorators.rst index c4f65f5..fdf268f 100644 --- a/doc/source/autodoc/server/decorators.rst +++ b/doc/source/autodoc/server/decorators.rst @@ -1,6 +1,6 @@ decorators ========== -.. autoclass:: hololinked.server.decorators - :members: - :show-inheritance: \ No newline at end of file +.. autofunction:: hololinked.server.decorators.remote_method + +.. autofunction:: hololinked.server.decorators.remote_parameter \ No newline at end of file diff --git a/doc/source/development_notes.rst b/doc/source/development_notes.rst index 16ffb97..d0f01a6 100644 --- a/doc/source/development_notes.rst +++ b/doc/source/development_notes.rst @@ -1,20 +1,31 @@ .. |module-highlighted| replace:: ``hololinked`` +.. |br| raw:: html + +
+ .. _note: development notes ================= -In the interest of information to software engineers and web developers, the main difference of the above to a conventional RPC or REST(-like) paradigm in HTTP is that, -|module-highlighted| attempts to be a hybrid of both. For instrument control & data-acquisition, it is difficult to move away completely from RPC to REST. Besides, most instrument drivers/hardware -allow only a single persistent connection instead of multiple clients or computers. Further, when such a client process talks to an instrument, only one instruction can be sent at a time. -On the other hand, HTTP Servers are multi-threaded or asyncio oriented by design and REST(-like) API honestly does not seem to care how many simultaneous operations are run. -To reconcile both, the following is proposed: +In the interest of information to software engineers and web developers, the main difference of |module-highlighted| to a conventional +RPC or REST(-like) paradigm in HTTP is that, |module-highlighted| attempts to be a hybrid of both. For instrument control +& data-acquisition, it is difficult to move away completely from RPC to REST. Besides, most instrument drivers/hardware +allow only a single persistent connection with a single process instead of multiple clients or processes. Further, when +such a process talks to an instrument, only one instruction can be sent at a time, which needs to be completed before +the next instruction. On the other hand, HTTP Servers are multi-threaded or asyncio oriented by design and REST(-like) API +does not care how many simultaneous operations are run. To reconcile both, the following is proposed: * |module-highlighted| gives the freedom to choose the HTTP request method & end-point URL desirable for each method, parameter and event -* All HTTP requests will be queued and executed serially unless threaded or made async manually by the programmer -* Verb like URLs may be used for methods & noun-like URLs are suggested to be used for parameters and events -* HTTP request method may be mapped as follows: +* All HTTP requests will be queued and executed serially unless threaded or made async manually by the programmer +* parameters can be used to model settings of instrumentation (both hardware and software-only), general class/instance attributes, hold captured & computed data, query said data, for example; basically all operations data and settings oriented. +* events can be used to push measured data, create alerts/alarms, inform availability of certain type of data etc. +* methods can be used to issue commands to instruments like start and stop acquisition, connect/disconnect etc. +* Verb like URLs may be used for methods & noun-like URLs are suggested to be used for parameters and events. Using a state machine or similar, it may be possible to cast all resources to REST-like. +* Modify web request handler to change headers, authentication etc. while leaving the remote object execution details to the package + +HTTP request methods may be mapped as follows: .. list-table:: :header-rows: 1 @@ -24,23 +35,23 @@ To reconcile both, the following is proposed: - remote method - event * - GET - - read parameter value - - run method which gives a return value with useful data (which may be difficult or illogical as a `parameter`) - - stream data (for example - measured physical quantities) + - read parameter value |br| (read a setting's value, fetch measured data - for example, measured physical quantities) + - run method which gives a return value with useful data |br| (which may be difficult or illogical as a ``parameter``) + - stream measured data immediately when available instead of fetching every time * - POST - - add dynamic parameters with certain settings + - add dynamic parameters with certain settings |br| (add a dynamic setting or data type etc. for which the logic is already factored in code) - run python logic, methods that connect/disconnect or issue commands to instruments (RPC) - not applicable * - PUT - - write parameter value + - write parameter value |br| (modify a setting and apply it onto the device) - change value of a resource which is difficult to factor into a parameter - not applicable * - DELETE - - remove a dynamic parameter + - remove a dynamic parameter |br| (remove a setting or data type for which the logic is already factored into the code) - developer's interpretation - not applicable * - PATCH - - change settings of a parameter + - change settings of a parameter |br| (change the rules of how a setting can be modified and applied, how a measured data can be stored etc.) - change partial value of a resource which is difficult to factor into a parameter or change settings of a parameter with custom logic - not applicable diff --git a/doc/source/examples/index.rst b/doc/source/examples/index.rst index 9220c0e..7f75bad 100644 --- a/doc/source/examples/index.rst +++ b/doc/source/examples/index.rst @@ -4,6 +4,11 @@ Examples Remote Objects -------------- +Beginner's example +__________________ + +These examples are for absolute beginners into the world of data-acquisition + .. toctree:: server/spectrometer/index @@ -14,8 +19,8 @@ Consider also installing * `hololinked-portal `_ to have an web-interface to interact with RemoteObjects (after you can run your example object) * `hoppscotch `_ or `postman `_ -GUI ---- +Web Development +--------------- Some browser based client examples based on ReactJS are hosted at `hololinked.dev `_ diff --git a/doc/source/index.rst b/doc/source/index.rst index 7d6462a..bf8595b 100644 --- a/doc/source/index.rst +++ b/doc/source/index.rst @@ -19,9 +19,9 @@ based tools. |module-highlighted| is being developed with the following features * being truly pythonic - all code in python & all features of python * easy to understand & setup -* reasonable integration with HTTP to take advantage of browser based GUI frameworks (like ReactJS) +* reasonable integration with HTTP to take advantage of modern web practices and Javascript GUI frameworks like React * agnostic to system size & flexibility in topology -* 30FPS 1280*1080*3 image streaming over HTTP +* 30FPS 1280*1080*3 (8 bit) image streaming over HTTP In short - to use it in your home/hobby, in a lab or in a research facility & industry. @@ -32,7 +32,9 @@ and/or TCP after implementation by the |module-highlighted| developer. Interproc available for restriction to single-computer applications. Remote methods can be used to run control and measurement operations on your instruments or arbitrary python logic. Remote parameters are type-checked object attributes with getter-setter options (identical to python ``property`` with added network access). Events allow to asynchronously push -arbitrary data to clients. Once such a ``RemoteObject`` is instantiated, it can be connected with the server of choice. +arbitrary data to clients. Once such a ``RemoteObject`` is instantiated, it can be connected with a server of choice (one or many). + +Please follow the documentation for examples & tutorials, how-to's and API reference. .. warning:: This project is under development and is an idealogical state. Please use it only for playtesting or exploring. @@ -40,8 +42,6 @@ arbitrary data to clients. Once such a ``RemoteObject`` is instantiated, it can .. note:: web developers & software engineers, consider reading the :ref:`note ` section -Please follow the documentation for examples & tutorials, how-to's and API reference. - .. toctree:: :maxdepth: 1 :caption: Contents: From 868c76e78e0d0d5b6783e4d97209e29a07e4e9c4 Mon Sep 17 00:00:00 2001 From: "Vignesh.Vaidyanathan" Date: Tue, 27 Feb 2024 14:19:54 +0100 Subject: [PATCH 041/167] removed docstring warnings --- doc/source/development_notes.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/doc/source/development_notes.rst b/doc/source/development_notes.rst index d0f01a6..e590f8a 100644 --- a/doc/source/development_notes.rst +++ b/doc/source/development_notes.rst @@ -6,7 +6,7 @@ .. _note: -development notes +Development Notes ================= In the interest of information to software engineers and web developers, the main difference of |module-highlighted| to a conventional From 4485bcf78c7ff0cc1ef5fad92fcdedc9c46da4fb Mon Sep 17 00:00:00 2001 From: "Vignesh.Vaidyanathan" Date: Tue, 27 Feb 2024 14:20:10 +0100 Subject: [PATCH 042/167] removed docstring warnings --- hololinked/server/database.py | 8 ++++---- hololinked/server/decorators.py | 22 +++++++++++++-------- hololinked/server/remote_object.py | 25 +++++++++++++++--------- hololinked/server/remote_parameters.py | 8 -------- hololinked/server/zmq_message_brokers.py | 7 ++++--- hololinked/wot/actions.py | 0 hololinked/wot/events.py | 0 hololinked/wot/properties.py | 4 ++++ hololinked/wot/thing.py | 7 +++++++ 9 files changed, 49 insertions(+), 32 deletions(-) create mode 100644 hololinked/wot/actions.py create mode 100644 hololinked/wot/events.py create mode 100644 hololinked/wot/properties.py create mode 100644 hololinked/wot/thing.py diff --git a/hololinked/server/database.py b/hololinked/server/database.py index 6b6ff15..c5bf666 100644 --- a/hololinked/server/database.py +++ b/hololinked/server/database.py @@ -190,8 +190,8 @@ def create_missing_db_parameters(self, create any and all missing remote parameters of ``RemoteObject`` instance in database. - Parameter - --------- + Parameters + ---------- parameters: Dict[str, RemoteParamater] descriptors of the parameters """ @@ -213,8 +213,8 @@ def edit_parameter(self, parameter : RemoteParameter, """ change the parameter value of an already existing parameter - Parameter - --------- + Parameters + ---------- parameter: RemoteParameter descriptor of the parameter value: Any diff --git a/hololinked/server/decorators.py b/hololinked/server/decorators.py index feb56a1..5f66c4b 100644 --- a/hololinked/server/decorators.py +++ b/hololinked/server/decorators.py @@ -50,14 +50,20 @@ def remote_method(URL_path : str = USE_OBJECT_NAME, http_method : str = HTTP_MET state : typing.Optional[typing.Union[str, Enum]] = None) -> typing.Callable: """Use this function to decorate your methods to be accessible remotely. - Args: - URL_path (str, optional): The path of URL under which the object is accessible. defaults to name of the object. - http_method (str, optional) : HTTP method (GET, POST, PUT etc.). defaults to POST. - state (Union[str, Tuple[str]], optional): state under which the object can executed or written. When not provided, - its accessible or can be executed under any state. - - Returns: - Callable: returns the callable object as it is or wrapped within loggers + Parameters + ---------- + URL_path: str, optional + The path of URL under which the object is accessible. defaults to name of the object. + http_method: str, optional + HTTP method (GET, POST, PUT etc.). defaults to POST. + state: str | Tuple[str], optional + state under which the object can executed or written. When not provided, + its accessible or can be executed under any state. + + Returns + ------- + callable: Callable + returns the callable object as it is or wrapped within loggers """ def inner(obj): diff --git a/hololinked/server/remote_object.py b/hololinked/server/remote_object.py index 904520a..8153220 100644 --- a/hololinked/server/remote_object.py +++ b/hololinked/server/remote_object.py @@ -35,16 +35,23 @@ class StateMachine: A container class for state machine related logic, this is intended to be used by the RemoteObject and its descendents. - Args: - initial_state (str): initial state of machine - states (Enum): Enum type holding enumeration of states - on_enter (Dict[str, Union[Callable, RemoteParameter]]): callbacks to be invoked when a certain state is entered. - It is to be specified as a dictionary with the states being the keys - on_exit (Dict[str, Union[Callable, RemoteParameter]]): callbacks to be invoked when a certain state is exited. - It is to be specified as a dictionary with the states being the keys + Parameters + ---------- + initial_state: str + initial state of machine + states: Enum + enumeration of states + on_enter: Dict[str, Callable | RemoteParameter] + callbacks to be invoked when a certain state is entered. It is to be specified + as a dictionary with the states being the keys + on_exit: Dict[str, Callable | RemoteParameter] + callbacks to be invoked when a certain state is exited. + It is to be specified as a dictionary with the states being the keys - Attributes: - exists (bool): internally computed, True if states and initial_states are valid + Attributes + ---------- + exists: bool + internally computed, True if states and initial_states are valid """ initial_state = ClassSelector(default=None, allow_None=True, constant=True, class_=(Enum, str)) exists = Boolean(default=False) diff --git a/hololinked/server/remote_parameters.py b/hololinked/server/remote_parameters.py index bd40cb1..32521b5 100644 --- a/hololinked/server/remote_parameters.py +++ b/hololinked/server/remote_parameters.py @@ -25,14 +25,6 @@ class String(RemoteParameter): """ A string parameter with a default value and optional regular expression (regex) matching. - - Example of using a regex to implement IPv4 address matching:: - - class IPAddress(String): - '''IPv4 address as a string (dotted decimal notation)''' - def __init__(self, default="0.0.0.0", allow_None=False, **kwargs): - ip_regex = r'^((25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)\.){3}(25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)$' - super(IPAddress, self).__init__(default=default, regex=ip_regex, **kwargs) """ __slots__ = ['regex'] diff --git a/hololinked/server/zmq_message_brokers.py b/hololinked/server/zmq_message_brokers.py index 0c6e1e0..1780963 100644 --- a/hololinked/server/zmq_message_brokers.py +++ b/hololinked/server/zmq_message_brokers.py @@ -1030,6 +1030,7 @@ class BaseZMQClient(BaseZMQ): server's reply to client :: + [address, bytes(), server type , message_type, message id, content or response or reply] [ 0 , 1 , 2 , 3 , 4 , 5 ] @@ -1040,11 +1041,10 @@ class BaseZMQClient(BaseZMQ): client_type: str RPC or HTTP Server **kwargs: - rpc_serializer: + rpc_serializer: BaseSerializer custom implementation of RPC serializer if necessary - json_serializer: + json_serializer: JSONSerializer custom implementation of JSON serializer if necessary - """ def __init__(self, server_instance_name : str, client_type : bytes, **kwargs) -> None: @@ -1246,6 +1246,7 @@ def send_instruction(self, instruction : str, arguments : typing.Dict[str, typin client's message to server: :: + [address, bytes(), client type, message type, messsage id, [ 0 , 1 , 2 , 3 , 4 , diff --git a/hololinked/wot/actions.py b/hololinked/wot/actions.py new file mode 100644 index 0000000..e69de29 diff --git a/hololinked/wot/events.py b/hololinked/wot/events.py new file mode 100644 index 0000000..e69de29 diff --git a/hololinked/wot/properties.py b/hololinked/wot/properties.py new file mode 100644 index 0000000..002c816 --- /dev/null +++ b/hololinked/wot/properties.py @@ -0,0 +1,4 @@ +from ..server.remote_parameter import RemoteParameter as property + + +__all__ = ['property'] \ No newline at end of file diff --git a/hololinked/wot/thing.py b/hololinked/wot/thing.py new file mode 100644 index 0000000..78a8e46 --- /dev/null +++ b/hololinked/wot/thing.py @@ -0,0 +1,7 @@ +from ..server.remote_object import RemoteObject + +class Thing(RemoteObject): + pass + + +__all__ = ['Thing'] \ No newline at end of file From 6c93276861700197cbfd08bed39670ce6fb30525 Mon Sep 17 00:00:00 2001 From: "Vignesh.Vaidyanathan" Date: Wed, 28 Feb 2024 09:04:39 +0100 Subject: [PATCH 043/167] database file reorganisations for sqlite & associated docs update --- doc/source/autodoc/server/remote_object.rst | 8 +- doc/source/conf.py | 4 +- doc/source/development_notes.rst | 1 + doc/source/index.rst | 4 +- hololinked/client/proxy.py | 129 +++++++------------- hololinked/server/constants.py | 1 + hololinked/server/database.py | 66 +++++----- hololinked/server/remote_object.py | 24 ++-- hololinked/server/remote_parameter.py | 2 +- 9 files changed, 102 insertions(+), 137 deletions(-) diff --git a/doc/source/autodoc/server/remote_object.rst b/doc/source/autodoc/server/remote_object.rst index 6cea058..1aebd26 100644 --- a/doc/source/autodoc/server/remote_object.rst +++ b/doc/source/autodoc/server/remote_object.rst @@ -1,12 +1,12 @@ -Remote Object -============= +RemoteObject +============ .. autoclass:: hololinked.server.remote_object.RemoteObject - :members: + :members: logger, rpc_serializer, json_serializer :show-inheritance: .. autoclass:: hololinked.server.remote_object.RemoteSubobject - :members: + :members: instance_name, object_info, GUI :show-inheritance: .. autoclass:: hololinked.server.remote_object.StateMachine diff --git a/doc/source/conf.py b/doc/source/conf.py index f5c299c..3592295 100644 --- a/doc/source/conf.py +++ b/doc/source/conf.py @@ -68,4 +68,6 @@ # so a file named "default.css" will overwrite the builtin "default.css". html_static_path = ['_static'] -numpydoc_show_class_members = False \ No newline at end of file +numpydoc_show_class_members = False + +autodoc_member_order = 'bysource' \ No newline at end of file diff --git a/doc/source/development_notes.rst b/doc/source/development_notes.rst index e590f8a..db97303 100644 --- a/doc/source/development_notes.rst +++ b/doc/source/development_notes.rst @@ -4,6 +4,7 @@
+ .. _note: Development Notes diff --git a/doc/source/index.rst b/doc/source/index.rst index bf8595b..2e16cbc 100644 --- a/doc/source/index.rst +++ b/doc/source/index.rst @@ -7,8 +7,8 @@ .. |module-highlighted| replace:: ``hololinked`` -Welcome to |module|'s documentation! -==================================== +|module| - pythonic supervisory control & data acquisition +========================================================== |module-highlighted| is (supposed to be) a versatile and pythonic tool for building custom control and data acquisition software systems. If you have a requirement to capture data from your hardware/instrumentation remotely through your diff --git a/hololinked/client/proxy.py b/hololinked/client/proxy.py index 959f512..4265832 100644 --- a/hololinked/client/proxy.py +++ b/hololinked/client/proxy.py @@ -2,6 +2,7 @@ import asyncio import typing import logging +import uuid from typing import Any from ..server.data_classes import RPCResource @@ -20,12 +21,13 @@ class ObjectProxy: _own_attrs = frozenset([ '_zmq_client', 'identity', '__annotations__', 'instance_name', 'logger', 'timeout', '_timeout', + '_events' ]) def __init__(self, instance_name : str, timeout : float = 5, load_remote_object = True, protocol : str = 'TCP', **kwargs) -> None: self.instance_name = instance_name self.timeout = timeout - self.identity = instance_name + current_datetime_ms_str() + self.identity = f"{instance_name}|{uuid.uuid4()}" self.logger = logging.Logger(self.identity) # compose ZMQ client in Proxy client so that all sending and receiving is # done by the ZMQ client and not by the Proxy client directly. Proxy client only @@ -96,8 +98,8 @@ def timeout(self, value : typing.Union[float, int]): network times not considered.""" def invoke(self, method : str, oneway : bool = False, **kwargs) -> typing.Any: - method : _RemoteMethod = getattr(self, method, None) - if not method: + method = getattr(self, method, None) # type: _RemoteMethod + if not isinstance(method, _RemoteMethod): raise AttributeError(f"No remote method named {method}") if oneway: method.oneway(**kwargs) @@ -106,13 +108,13 @@ def invoke(self, method : str, oneway : bool = False, **kwargs) -> typing.Any: async def async_invoke(self, method : str, **kwargs): method = getattr(self, method, None) # type: _RemoteMethod - if not method: + if not isinstance(method, _RemoteMethod): raise AttributeError(f"No remote method named {method}") return await method.async_call(**kwargs) def set_parameter(self, parameter : str, value : typing.Any, oneway : bool) -> None: - parameter : _RemoteParameter = getattr(self, parameter, None) - if not parameter: + parameter = getattr(self, parameter, None) # type: _RemoteParameter + if not isinstance(parameter, _RemoteParameter): raise AttributeError(f"No remote parameter named {parameter}") if oneway: parameter.oneway(value) @@ -123,64 +125,21 @@ async def async_set_parameters(self, oneway : bool = False, noblock : bool = Fal pass def subscribe_event(self, event_name : str, callback : typing.Callable): - pass + event = getattr(self, event_name, None) # type: _Event + if not isinstance(event, _Event): + raise AttributeError(f"No event named {event_name}") + if event._subscribed: + event._cbs.append(callback) + return self._events. + else: + event._subscribe([callback]) + self._events[uuid.uuid4()] = event def unsubscribe_event(self, event_name : str): - pass - - # def __getstate__(self): - # # make sure a tuple of just primitive types are used to allow for proper serialization - # return str(self._pyroUri), tuple(self._pyroOneway), tuple(self._pyroMethods), \ - # tuple(self._pyroAttrs), self._pyroHandshake, self._pyroSerializer - - # def __setstate__(self, state): - # self._pyroUri = core.URI(state[0]) - # self._pyroOneway = set(state[1]) - # self._pyroMethods = set(state[2]) - # self._pyroAttrs = set(state[3]) - # self._pyroHandshake = state[4] - # self._pyroSerializer = state[5] - # self.__pyroTimeout = config.COMMTIMEOUT - # self._pyroMaxRetries = config.MAX_RETRIES - # self._pyroConnection = None - # self._pyroLocalSocket = None - # self._pyroSeq = 0 - # self._pyroRawWireResponse = False - # self.__pyroOwnerThread = get_ident() - - # def __copy__(self): - # p = object.__new__(type(self)) - # p.__setstate__(self.__getstate__()) - # p._pyroTimeout = self._pyroTimeout - # p._pyroRawWireResponse = self._pyroRawWireResponse - # p._pyroMaxRetries = self._pyroMaxRetries - # return p - - - # def __dir__(self): - # result = dir(self.__class__) + list(self.__dict__.keys()) - # return sorted(set(result) | self._pyroMethods | self._pyroAttrs) - - # # When special methods are invoked via special syntax (e.g. obj[index] calls - # # obj.__getitem__(index)), the special methods are not looked up via __getattr__ - # # for efficiency reasons; instead, their presence is checked directly. - # # Thus we need to define them here to force (remote) lookup through __getitem__. - - # def __len__(self): return self.__getattr__('__len__')() - # def __getitem__(self, index): return self.__getattr__('__getitem__')(index) - # def __setitem__(self, index, val): return self.__getattr__('__setitem__')(index, val) - # def __delitem__(self, index): return self.__getattr__('__delitem__')(index) - - # def __iter__(self): - # try: - # # use remote iterator if it exists - # yield from self.__getattr__('__iter__')() - # except AttributeError: - # # fallback to indexed based iteration - # try: - # yield from (self[index] for index in range(sys.maxsize)) - # except (StopIteration, IndexError): - # return + event = getattr(self, event_name, None) # type: _Event + if not isinstance(event, _Event): + raise AttributeError(f"No event named {event_name}") + event._unsubscribe() def load_remote_object(self): @@ -193,6 +152,7 @@ def load_remote_object(self): self._timeout) # type: _RemoteMethod reply = fetch()[ServerMessage.DATA][ServerMessageData.RETURN_VALUE] # type: typing.Dict[str, typing.Dict[str, typing.Any]] + allowed_events = [] for name, data in reply.items(): if isinstance(data, dict): data = RPCResource(**data) @@ -203,15 +163,10 @@ def load_remote_object(self): elif data.what == ResourceType.PARAMETER: _add_parameter(self, _RemoteParameter(self._zmq_client, data.instruction, self.timeout), data) elif data.what == ResourceType.EVENT: - pass - - # def _pyroInvokeBatch(self, calls, oneway=False): - # flags = protocol.FLAGS_BATCH - # if oneway: - # flags |= protocol.FLAGS_ONEWAY - # return self._pyroInvoke("", calls, None, flags) + _add_event(self, _Event(self._zmq_client, data.event_name, data.event_socket), data) + self._events = {} - + class _RemoteMethod: """method call abstraction""" @@ -235,7 +190,9 @@ def __call__(self, *args, **kwargs) -> typing.Any: self._last_return_value = self._zmq_client.execute(self._instruction, kwargs, raise_client_side_exception=True) return self._last_return_value - + + async def async_call(self, *args, **kwargs): + pass @@ -273,20 +230,25 @@ async def async_get(self): self._last_value : typing.Dict = await self._zmq_client.execute(self._read_instruction, raise_client_side_exception=True) return self._last_value + + def oneway(self): + pass + class _Event: """event streaming""" def __init__(self, client : SyncZMQClient, event_name : str, event_socket : str) -> None: self._zmq_client = client - self._event_name = event_name - self._event_socket = event_socket - - def _subscribe(self, callback : typing.Callable): - self._event_consumer = EventConsumer(request.path, event_info.socket_address, - f"{request.path}_HTTPEvent@"+current_datetime_ms_str()) - self._cb = callback + self._name = event_name + self._URL = event_name + self._socket_address = event_socket + + def _subscribe(self, callbacks : typing.List[typing.Callable]): + self._event_consumer = EventConsumer(self._URL, self._socket_address, + f"{self._socket_address}_HTTPEvent@"+current_datetime_ms_str()) + self._cbs = callbacks self._subscribed = True self._thread = threading.Thread(target=self.listen) self._thread.start() @@ -295,7 +257,8 @@ def listen(self): while self._subscribed: try: data = self._event_consumer.receive_event(deserialize=True) - self._cb(data) + for cb in self._cbs: + cb(data) except Exception as E: print(E) self._event_consumer.exit() @@ -362,8 +325,6 @@ def close(self): __allowed_attribute_types__ = (_RemoteParameter, _RemoteMethod) def _add_method(client_obj : ObjectProxy, method : _RemoteMethod, func_info : RPCResource) -> None: - if isinstance(func_info, list): - raise TypeError(f"got list instead of RPC resource for {func_info.name}") if not func_info.top_owner: return for dunder in SERIALIZABLE_WRAPPER_ASSIGNMENTS: @@ -375,8 +336,6 @@ def _add_method(client_obj : ObjectProxy, method : _RemoteMethod, func_info : RP client_obj.__setattr__(func_info.name, method) def _add_parameter(client_obj : ObjectProxy, parameter : _RemoteParameter, parameter_info : RPCResource) -> None: - if isinstance(parameter_info, list): - raise TypeError(f"got list instead of RPC resource for {parameter_info.name}") if not parameter_info.top_owner: return for attr in ['doc', 'name']: @@ -384,8 +343,8 @@ def _add_parameter(client_obj : ObjectProxy, parameter : _RemoteParameter, param setattr(parameter, attr, getattr(parameter_info, attr)) client_obj.__setattr__(parameter_info.name, parameter) -def _add_event(client_obj : ObjectProxy, event, event_info) -> None: - pass +def _add_event(client_obj : ObjectProxy, event : _Event, event_info) -> None: + client_obj.__setattr__(event.name) __all__ = ['ObjectProxy'] diff --git a/hololinked/server/constants.py b/hololinked/server/constants.py index ba3a711..df9da73 100644 --- a/hololinked/server/constants.py +++ b/hololinked/server/constants.py @@ -104,3 +104,4 @@ class ServerTypes(Enum): POOL = b'POOL' + diff --git a/hololinked/server/database.py b/hololinked/server/database.py index c5bf666..2caeed8 100644 --- a/hololinked/server/database.py +++ b/hololinked/server/database.py @@ -6,6 +6,7 @@ from sqlalchemy.orm import Mapped, mapped_column, DeclarativeBase, MappedAsDataclass from dataclasses import dataclass, asdict +from ..param import Parameterized from .serializers import JSONSerializer, BaseSerializer from .constants import JSONSerializable from .remote_parameter import RemoteParameter @@ -47,23 +48,35 @@ class DeserializedParameter: # not part of database class Database: + def __init__(self, instance : Parameterized, serializer : typing.Optional[BaseSerializer] = None, + config_file : typing.Union[str, None] = None) -> None: + self.remote_object_instance = instance + self.instance_name = instance.instance_name + self.serializer = serializer + self.URL = self.create_URL(config_file) + @classmethod - def create_URL(file_name : str, asynch : bool = False): # async is a keyword - if file_name.endswith('.json'): + def create_URL(cls, file_name : str = None): + if not file_name: + conf = {} + elif file_name.endswith('.json'): file = open(file_name, 'r') conf = JSONSerializer.generic_load(file) else: raise ValueError("config files of extension - {} expected, given file name {}".format(["json"], file_name)) + dialect = conf.get('dialect', None) + server = conf.get('server', None) + database = conf.get('database', 'hololinked') + if not server: + file = conf.get('file', 'hololinked.db') + return f"sqlite+pysqlite://{file}/{database}" host = conf.get("host", 'localhost') port = conf.get("port", 5432) - user = conf.get('user', 'postgres') + user = conf.get('user', 'postgres') password = conf.get('password', '') - - if asynch: - return f"postgresql+asyncpg://{user}:{password}@{host}:{port}" - else: - return f"postgresql://{user}:{password}@{host}:{port}" + return f"{server}+{dialect}://{user}:{password}@{host}:{port}/{database}" + class BaseAsyncDB(Database): @@ -83,14 +96,13 @@ class BaseAsyncDB(Database): absolute path to database server configuration file """ - def __init__(self, database : str, serializer : BaseSerializer, + def __init__(self, instance : Parameterized, + serializer : typing.Optional[BaseSerializer] = None, config_file : typing.Union[str, None] = None) -> None: - if config_file: - URL = f"{self.create_URL(config_file, True)}/{database}" - self.engine = asyncio_ext.create_async_engine(URL, echo=True) - self.async_session = sessionmaker(self.engine, expire_on_commit=True, - class_= asyncio_ext.AsyncSession) # type: ignore - self.serializer = serializer + super().__init__(instance=instance, serializer=serializer, config_file=config_file) + self.engine = asyncio_ext.create_async_engine(self.URL, echo=True) + self.async_session = sessionmaker(self.engine, expire_on_commit=True, + class_=asyncio_ext.AsyncSession) class BaseSyncDB(Database): @@ -110,15 +122,13 @@ class BaseSyncDB(Database): absolute path to database server configuration file """ - def __init__(self, database : str, serializer : BaseSerializer, + def __init__(self, instance : Parameterized, + serializer : typing.Optional[BaseSerializer] = None, config_file : typing.Union[str, None] = None) -> None: - if config_file: - URL = f"{self.create_URL(config_file, False)}/{database}" - self.engine = create_engine(URL, echo = True) - self.sync_session = sessionmaker(self.engine, expire_on_commit=True) - self.serializer = serializer - - + super().__init__(instance=instance, serializer=serializer, config_file=config_file) + self.engine = create_engine(self.URL, echo = True) + self.sync_session = sessionmaker(self.engine, expire_on_commit=True) + class RemoteObjectDB(BaseSyncDB): """ @@ -137,12 +147,7 @@ class RemoteObjectDB(BaseSyncDB): configuration file of the database server """ - def __init__(self, instance_name : str, serializer : BaseSerializer, - config_file: typing.Optional[str] = None) -> None: - super().__init__(database='scadapyserver', serializer=serializer, - config_file=config_file) - self.instance_name = instance_name - + def fetch_own_info(self) -> RemoteObjectInformation: """ fetch ``RemoteObject`` instance's own information, for schema see @@ -203,7 +208,8 @@ def create_missing_db_parameters(self, param = SerializedParameter( instance_name=self.instance_name, name=new_param.name, - serialized_value=self.serializer.dumps(new_param.default) + serialized_value=self.serializer.dumps(getattr(self.remote_object_instance, + new_param.name)) ) session.add(param) session.commit() diff --git a/hololinked/server/remote_object.py b/hololinked/server/remote_object.py index 8153220..02e11fb 100644 --- a/hololinked/server/remote_object.py +++ b/hololinked/server/remote_object.py @@ -191,10 +191,6 @@ def query(self, info : typing.Union[str, typing.List[str]] ) -> typing.Any: - - - - ConfigInfo = Enum('LevelTypes','USER_MANAGED PRIMARY_HOST_WIDE PC_HOST_WIDE') @@ -241,24 +237,24 @@ class RemoteSubobject(Parameterized, metaclass=RemoteObjectMetaclass): (http(s)://{domain and sub domain}/{instance name}). It is suggested to use the class name along with a unique name {class name}/{some unique name}. Instance names must be unique in your entire system.""") # type: str + # remote paramerters httpserver_resources = RemoteParameter(readonly=True, URL_path='/resources/http-server', doc="""object's resources exposed to HTTP server""", fget=lambda self: self._httpserver_resources ) # type: typing.Dict[str, typing.Dict[str, HTTPResource]] rpc_resources = RemoteParameter(readonly=True, URL_path='/resources/object-proxy', doc= """object's resources exposed to RPC client, similar to HTTP resources but differs in details.""", fget=lambda self: self._rpc_resources) # type: typing.Dict[str, typing.Any] - # remote paramerters - events = RemoteParameter(readonly=True, URL_path='/events', - doc="returns a dictionary with two fields containing event name and event information") # type: typing.Dict[str, typing.Any] gui_resources : typing.Dict = RemoteParameter(readonly=True, URL_path='/resources/gui', doc= """object's data read by scadapy webdashboard GUI client, similar to http_resources but differs in details.""") # type: typing.Dict[str, typing.Any] + events = RemoteParameter(readonly=True, URL_path='/events', + doc="returns a dictionary with two fields containing event name and event information") # type: typing.Dict[str, typing.Any] object_info = RemoteParameter(doc="contains information about this object like the class name, script location etc.", readonly=True, URL_path='/info', fget = lambda self: self._object_info) # type: RemoteObjectDB.RemoteObjectInfo GUI = ClassSelector(class_=ReactApp, default=None, allow_None=True, - doc= """GUI applied here will become visible at GUI tab of dashboard tool""") # type: typing.Optional[ReactApp] + doc="GUI specified here will become visible at GUI tab of hololinked-portal dashboard tool") # type: typing.Optional[ReactApp] - def __new__(cls, **kwargs): + def __new__(cls): """ custom defined __new__ method to assign some important attributes at instance creation time directly instead of super().__init__(instance_name = val1 , users_own_kw_argument1 = users_val1, ..., users_own_kw_argumentn = users_valn) @@ -609,7 +605,7 @@ def _prepare_DB(self, config_file : str = None): self._object_info = self._create_object_info() return # 1. create engine - self.db_engine : RemoteObjectDB = RemoteObjectDB(instance_name=self.instance_name, serializer=self.rpc_serializer, + self.db_engine : RemoteObjectDB = RemoteObjectDB(instance=self, config_file=config_file) # 2. create an object metadata to be used by different types of clients object_info = self.db_engine.fetch_own_info() @@ -630,9 +626,9 @@ def _write_parameters_from_DB(self): return self.db_engine.create_missing_db_parameters(self.__class__.parameters.db_init_objects) # 4. read db_init and db_persist objects - for db_param in self.db_engine.read_all_parameters(): + for db_param in self.db_engine.read_all_parameters(): try: - setattr(self, db_param.name, self.rpc_serializer.loads(db_param.value)) # type: ignore + setattr(self, db_param.name, db_param.value) # type: ignore except Exception as E: self.logger.error(f"could not set attribute {db_param.name} due to error {E}") @@ -695,7 +691,7 @@ def _parameters(self): return self.parameters.descriptors.keys() @get('/parameters/values') - def parameter_values(self, **kwargs) -> typing.Dict[str, typing.Any]: + def _parameter_values(self, **kwargs) -> typing.Dict[str, typing.Any]: """ returns requested parameter values in a dict """ @@ -906,4 +902,4 @@ def get_execution_logs(self): -__all__ = ['RemoteObject', 'StateMachine', 'RemoteObjectDB', 'ListHandler', 'RemoteAccessHandler'] +__all__ = ['RemoteObject', 'StateMachine', 'ListHandler', 'RemoteAccessHandler'] diff --git a/hololinked/server/remote_parameter.py b/hololinked/server/remote_parameter.py index 84f9b24..99d4a07 100644 --- a/hololinked/server/remote_parameter.py +++ b/hololinked/server/remote_parameter.py @@ -192,7 +192,7 @@ def _post_slot_set(self, slot : str, old : typing.Any, value : typing.Any) -> No super()._post_slot_set(slot, old, value) def _post_value_set(self, obj : Parameterized, value : typing.Any) -> None: - if (self.db_persist or self.db_commit) and hasattr(obj, 'db_engine') and hasattr(obj.db_engine, 'edit_parameter'): + if (self.db_persist or self.db_commit) and hasattr(obj, 'db_engine'): obj.db_engine.edit_parameter(self, value) return super()._post_value_set(obj, value) From 5b89d9efb8feb66d094b6ed30dd2e0e91ece8eae Mon Sep 17 00:00:00 2001 From: "Vignesh.Vaidyanathan" Date: Fri, 1 Mar 2024 09:59:51 +0100 Subject: [PATCH 044/167] RemoteObjectHandler for creating new remote objects --- doc/source/index.rst | 4 +- hololinked/server/HTTPServer.py | 63 +-------- hololinked/server/database.py | 33 ++--- hololinked/server/handlers.py | 67 +++++++++- hololinked/server/host_utilities.py | 201 +++++++++++++--------------- 5 files changed, 172 insertions(+), 196 deletions(-) diff --git a/doc/source/index.rst b/doc/source/index.rst index 2e16cbc..4e1bc57 100644 --- a/doc/source/index.rst +++ b/doc/source/index.rst @@ -7,8 +7,8 @@ .. |module-highlighted| replace:: ``hololinked`` -|module| - pythonic supervisory control & data acquisition -========================================================== +|module| - Pythonic Supervisory Control & Data Acquisition / Internet of Things +=============================================================================== |module-highlighted| is (supposed to be) a versatile and pythonic tool for building custom control and data acquisition software systems. If you have a requirement to capture data from your hardware/instrumentation remotely through your diff --git a/hololinked/server/HTTPServer.py b/hololinked/server/HTTPServer.py index bf6bdb1..8f6e8e6 100644 --- a/hololinked/server/HTTPServer.py +++ b/hololinked/server/HTTPServer.py @@ -14,7 +14,7 @@ from .constants import CommonInstructions from .webserver_utils import log_request, update_resources from .zmq_message_brokers import MessageMappedZMQClientPool -from .handlers import RPCHandler, BaseHandler, EventHandler +from .handlers import RPCHandler, BaseHandler, EventHandler, RemoteObjectsHandler @@ -97,56 +97,19 @@ def all_ok(self) -> bool: BaseHandler.logger = self.logger BaseHandler.clients = ', '.join(self.allowed_clients) - self.resources = dict( - FILE_SERVER = dict(STATIC_ROUTES = dict(), DYNAMIC_ROUTES = dict()), - GET = dict(STATIC_ROUTES = dict(), DYNAMIC_ROUTES = dict()), - POST = dict(STATIC_ROUTES = dict(), DYNAMIC_ROUTES = dict()), - PUT = dict(STATIC_ROUTES = dict(), DYNAMIC_ROUTES = dict()), - DELETE = dict(STATIC_ROUTES = dict(), DYNAMIC_ROUTES = dict()), - OPTIONS = dict(STATIC_ROUTES = dict(), DYNAMIC_ROUTES = dict()) - ) - - self.handlers = [] - for route, http_resource in self.resources["GET"]["STATIC_ROUTES"].items(): - self.handlers.append((route, self.request_handler, {'resource' : http_resource})) - """ - for handler based tornado rule matcher, the Rule object has following - signature - - def __init__( - self, - matcher: "Matcher", - target: Any, - target_kwargs: Optional[Dict[str, Any]] = None, - name: Optional[str] = None, - ) -> None: + self.handlers = [ + (r'/remote-objects', RemoteObjectsHandler) + ] - matcher - based on route - target - handler - target_kwargs - given to handler's initialize - name - ... - - len == 2 tuple is route + handler - len == 3 tuple is route + handler + target kwargs - - so we give (path, RPCHandler, {'resource' : HTTPResource}) - - path is extracted from remote_method(URL_path='....') - RPCHandler is the base handler of this package for RPC purposes - resource goes into target kwargs as the HTTPResource generated by - remote_method and RemoteParamater contains all the info given - to make RPCHandler work - """ return True def listen(self) -> None: assert self.all_ok, 'HTTPServer all is not ok before starting' - # Will always be True or cause some other exception - run_method_somehow(self._fetch_remote_object_resources()) - + # Will always be True or cause some other exception self.event_loop = ioloop.IOLoop.current() - # self.event_loop.add_future(self._fetch_remote_object_resources()) + self.event_loop.add_future(RemoteObjectsHandler.connect_to_remote_object( + [client for client in self.zmq_client_pool])) self.app = Application(handlers=self.handlers) if self.protocol_version == 2: @@ -158,18 +121,6 @@ def listen(self) -> None: self.logger.info(f'started webserver at {self._IP}, ready to receive requests.') self.event_loop.start() - - async def _fetch_remote_object_resources(self): - for client in self.zmq_client_pool: - await client.handshake_complete() - _, _, _, _, _, reply = await client.async_execute( - f'/{client.server_instance_name}{CommonInstructions.HTTP_RESOURCES}', - raise_client_side_exception=True) - update_resources(self.resources, reply["returnValue"]) # type: ignore - # _, _, _, _, _, reply = await client.read_attribute('/'+client.server_instance_name + '/object-info', raise_client_side_exception = True) - # remote_object_info.append(RemoteObjectDB.RemoteObjectInfo(**reply["returnValue"])) # Should raise an exception if returnValue key is not found for some reason. - - def stop(self) -> None: self.server.stop() run_coro_sync(self.server.close_all_connections()) diff --git a/hololinked/server/database.py b/hololinked/server/database.py index 2caeed8..5b3d533 100644 --- a/hololinked/server/database.py +++ b/hololinked/server/database.py @@ -15,27 +15,11 @@ class RemoteObjectTableBase(DeclarativeBase): pass - -class RemoteObjectInformation(MappedAsDataclass, RemoteObjectTableBase): - __tablename__ = "remote_objects" - - instance_name : Mapped[str] = mapped_column(String, primary_key = True) - class_name : Mapped[str] = mapped_column(String) - http_server : Mapped[str] = mapped_column(String) - script : Mapped[str] = mapped_column(String) - args : Mapped[JSONSerializable] = mapped_column(JSON) - kwargs : Mapped[JSONSerializable] = mapped_column(JSON) - eventloop_name : Mapped[str] = mapped_column(String) - level : Mapped[int] = mapped_column(Integer) - level_type : Mapped[str] = mapped_column(String) - - def json(self): - return asdict(self) class SerializedParameter(MappedAsDataclass, RemoteObjectTableBase): __tablename__ = "parameters" - id : Mapped[int] = mapped_column(Integer, primary_key = True, autoincrement = True) + id : Mapped[int] = mapped_column(Integer, primary_key=True, autoincrement=True) instance_name : Mapped[str] = mapped_column(String) name : Mapped[str] = mapped_column(String) serialized_value : Mapped[bytes] = mapped_column(LargeBinary) @@ -46,7 +30,8 @@ class DeserializedParameter: # not part of database value : typing.Any -class Database: + +class BaseDB: def __init__(self, instance : Parameterized, serializer : typing.Optional[BaseSerializer] = None, config_file : typing.Union[str, None] = None) -> None: @@ -56,7 +41,7 @@ def __init__(self, instance : Parameterized, serializer : typing.Optional[BaseSe self.URL = self.create_URL(config_file) @classmethod - def create_URL(cls, file_name : str = None): + def create_URL(cls, file_name : str = None, database : typing.Optional[str] = None) -> str: if not file_name: conf = {} elif file_name.endswith('.json'): @@ -67,7 +52,8 @@ def create_URL(cls, file_name : str = None): dialect = conf.get('dialect', None) server = conf.get('server', None) - database = conf.get('database', 'hololinked') + if not database: + database = conf.get('database', 'hololinked') if not server: file = conf.get('file', 'hololinked.db') return f"sqlite+pysqlite://{file}/{database}" @@ -77,9 +63,8 @@ def create_URL(cls, file_name : str = None): password = conf.get('password', '') return f"{server}+{dialect}://{user}:{password}@{host}:{port}/{database}" - -class BaseAsyncDB(Database): +class BaseAsyncDB(BaseDB): """ Base class for an async database engine, implements configuration file reader, sqlalchemy engine & session creation. @@ -105,7 +90,7 @@ def __init__(self, instance : Parameterized, class_=asyncio_ext.AsyncSession) -class BaseSyncDB(Database): +class BaseSyncDB(BaseDB): """ Base class for an synchronous (blocking) database engine, implements configuration file reader, sqlalchemy engine & session creation. @@ -146,8 +131,6 @@ class RemoteObjectDB(BaseSyncDB): config_file: str configuration file of the database server """ - - def fetch_own_info(self) -> RemoteObjectInformation: """ fetch ``RemoteObject`` instance's own information, for schema see diff --git a/hololinked/server/handlers.py b/hololinked/server/handlers.py index 1b3d4fd..524c787 100644 --- a/hololinked/server/handlers.py +++ b/hololinked/server/handlers.py @@ -5,8 +5,9 @@ from tornado.web import RequestHandler, StaticFileHandler from tornado.iostream import StreamClosedError +from .constants import CommonInstructions from .serializers import JSONSerializer -from .zmq_message_brokers import MessageMappedZMQClientPool, EventConsumer +from .zmq_message_brokers import AsyncZMQClient, MessageMappedZMQClientPool, EventConsumer from .webserver_utils import * from .utils import current_datetime_ms_str from .data_classes import HTTPResource, ServerSentEvent @@ -215,4 +216,66 @@ def get_absolute_path(cls, root: str, path: str) -> str: .. versionadded:: 3.1 """ - return root+path \ No newline at end of file + return root+path + + + +class RemoteObjectsHandler(BaseHandler): + + def initialize(self, resource: HTTPResource | ServerSentEvent, request_handler) -> None: + self.request_handler = request_handler + return super().initialize(resource) + + async def get(self): + with self.async_session() as session: + pass + + async def post(self): + arguments = self.prepare_arguments() + self.set_status(200) + await self.connect_to_remote_object() + self.finish() + + + async def connect_to_remote_object(self, clients : typing.List[AsyncZMQClient]): + resources = dict() + for client in clients: + await client.handshake_complete() + _, _, _, _, _, reply = await client.async_execute( + f'/{client.server_instance_name}{CommonInstructions.HTTP_RESOURCES}', + raise_client_side_exception=True) + update_resources(resources, reply["returnValue"]) # type: ignore + # _, _, _, _, _, reply = await client.read_attribute('/'+client.server_instance_name + '/object-info', raise_client_side_exception = True) + # remote_object_info.append(RemoteObjectDB.RemoteObjectInfo(**reply["returnValue"])) # Should raise an exception if returnValue key is not found for some reason. + + handlers = [] + for route, http_resource in resources.items(): + handlers.append((route, self.request_handler, {'resource' : http_resource})) + """ + for handler based tornado rule matcher, the Rule object has following + signature + + def __init__( + self, + matcher: "Matcher", + target: Any, + target_kwargs: Optional[Dict[str, Any]] = None, + name: Optional[str] = None, + ) -> None: + + matcher - based on route + target - handler + target_kwargs - given to handler's initialize + name - ... + + len == 2 tuple is route + handler + len == 3 tuple is route + handler + target kwargs + + so we give (path, RPCHandler, {'resource' : HTTPResource}) + + path is extracted from remote_method(URL_path='....') + RPCHandler is the base handler of this package for RPC purposes + resource goes into target kwargs as the HTTPResource generated by + remote_method and RemoteParamater contains all the info given + to make RPCHandler work + """ diff --git a/hololinked/server/host_utilities.py b/hololinked/server/host_utilities.py index 70fa575..5308863 100644 --- a/hololinked/server/host_utilities.py +++ b/hololinked/server/host_utilities.py @@ -6,22 +6,22 @@ import asyncio import ssl import typing +import getpass from dataclasses import dataclass, asdict, field -from typing import Any +from argon2 import PasswordHasher from sqlalchemy import Engine, Integer, String, JSON, ARRAY, Boolean from sqlalchemy import select, create_engine -from sqlalchemy.orm import Session, sessionmaker, Mapped, mapped_column, DeclarativeBase -from sqlalchemy_utils import database_exists, create_database, drop_database +from sqlalchemy.orm import Session, sessionmaker, Mapped, mapped_column, DeclarativeBase, MappedAsDataclass from sqlalchemy.ext import asyncio as asyncio_ext -from argon2 import PasswordHasher +from sqlalchemy_utils import database_exists, create_database, drop_database from tornado.httpclient import AsyncHTTPClient, HTTPRequest from tornado.web import RequestHandler, Application, authenticated -from tornado.escape import json_decode, json_encode from tornado.httpserver import HTTPServer as TornadoHTTP1Server - +from .constants import JSONSerializable from .serializers import JSONSerializer +from .database import BaseDB from .remote_parameters import TypedList from .zmq_message_brokers import MessageMappedZMQClientPool from .webserver_utils import get_IP_from_interface, update_resources_using_client @@ -60,10 +60,10 @@ -class TableBase(DeclarativeBase): +class HololinkedHostTableBase(DeclarativeBase): pass -class Dashboards(TableBase): +class Dashboards(HololinkedHostTableBase, MappedAsDataclass): __tablename__ = "dashboards" name : Mapped[str] = mapped_column(String(1024), primary_key=True) @@ -72,32 +72,24 @@ class Dashboards(TableBase): json_specfication : Mapped[typing.Dict[str, typing.Any]] = mapped_column(JSON) def json(self): - return { - "name" : self.name, - "URL" : self.URL, - "description" : self.description, - "json" : self.json_specfication - } + return asdict(self) -class AppSettings(TableBase): +class AppSettings(HololinkedHostTableBase, MappedAsDataclass): __tablename__ = "appsettings" field : Mapped[str] = mapped_column(String(8192), primary_key=True) value : Mapped[typing.Dict[str, typing.Any]] = mapped_column(JSON) def json(self): - return { - "field" : self.field, - "value" : self.value - } + return asdict(self) -class LoginCredentials(TableBase): +class LoginCredentials(HololinkedHostTableBase, MappedAsDataclass): __tablename__ = "login_credentials" email : Mapped[str] = mapped_column(String(1024), primary_key=True) password : Mapped[str] = mapped_column(String(1024), unique=True) -class Server(TableBase): +class Server(HololinkedHostTableBase, MappedAsDataclass): __tablename__ = "http_servers" hostname : Mapped[str] = mapped_column(String, primary_key=True) @@ -105,6 +97,30 @@ class Server(TableBase): port : Mapped[int] = mapped_column(Integer) IPAddress : Mapped[str] = mapped_column(String) remote_objects : Mapped[typing.List[str]] = mapped_column(ARRAY(String)) + https : Mapped[bool] = mapped_column(Boolean) + qualifiedIP : Mapped[str] = field(init = False) + + def __post_init__(self): + self.qualifiedIP = '{}:{}'.format(self.hostname, self.port) + + def json(self): + return asdict(self) + +class RemoteObjectInformation(HololinkedHostTableBase, MappedAsDataclass): + __tablename__ = "remote_objects" + + instance_name : Mapped[str] = mapped_column(String, primary_key=True) + class_name : Mapped[str] = mapped_column(String) + script : Mapped[str] = mapped_column(String) + kwargs : Mapped[JSONSerializable] = mapped_column(JSON) + eventloop_instance_name : Mapped[str] = mapped_column(String) + http_server : Mapped[str] = mapped_column(String) + level : Mapped[int] = mapped_column(Integer) + level_type : Mapped[str] = mapped_column(String) + + def json(self): + return asdict(self) + def for_authenticated_user(method): @@ -120,7 +136,7 @@ def authenticated_method(self : RequestHandler): return authenticated_method -class PrimaryHostHandler(RequestHandler): +class SystemHostHandler(RequestHandler): def check_headers(self): content_type = self.request.headers.get("Content-Type", None) @@ -129,7 +145,7 @@ def check_headers(self): self.write({ "error" : "request body is not JSON." }) self.finish() - def get_current_user(self) -> Any: + def get_current_user(self) -> typing.Any: return self.get_signed_cookie('user') def set_default_headers(self) -> None: @@ -143,7 +159,7 @@ async def options(self): self.finish() -class UsersHandler(PrimaryHostHandler): +class UsersHandler(SystemHostHandler): async def post(self): self.set_status(200) @@ -154,12 +170,12 @@ async def get(self): self.finish() -class LoginHandler(PrimaryHostHandler): +class LoginHandler(SystemHostHandler): async def post(self): self.check_headers() try: - body = json_decode(self.request.body) + body = JSONSerializer.generic_loads(self.request.body) email = body["email"] password = body["password"] async with global_session() as session: @@ -191,13 +207,13 @@ async def options(self): self.finish() -class AppSettingsHandler(PrimaryHostHandler): +class AppSettingsHandler(SystemHostHandler): @for_authenticated_user async def post(self): self.check_headers() try: - value = json_decode(self.request.body["value"]) + value = JSONSerializer.generic_loads(self.request.body["value"]) async with global_session() as session, session.begin(): session.add(AppSettings( field = field, @@ -214,7 +230,7 @@ async def post(self): async def patch(self): self.check_headers() try: - value = json_decode(self.request.body) + value = JSONSerializer.generic_loads(self.request.body) field = value["field"] value = value["value"] async with global_session() as session, session.begin(): @@ -235,7 +251,7 @@ async def get(self): async with global_session() as session: stmt = select(AppSettings) data = await session.execute(stmt) - serialized_data = json_encode({result[AppSettings.__name__].field : result[AppSettings.__name__].value["value"] + serialized_data = JSONSerializer.generic_dumps({result[AppSettings.__name__].field : result[AppSettings.__name__].value["value"] for result in data.mappings().all()}) self.set_status(200) self.set_header("Content-Type", "application/json") @@ -245,13 +261,13 @@ async def get(self): self.finish() -class DashboardsHandler(PrimaryHostHandler): +class DashboardsHandler(SystemHostHandler): @for_authenticated_user async def post(self): self.check_headers() try: - data = json_decode(self.request.body) + data = JSONSerializer.generic_loads(self.request.body) async with global_session() as session, session.begin(): session.add(Dashboards(**data)) await session.commit() @@ -268,7 +284,7 @@ async def get(self): async with global_session() as session: stmt = select(Dashboards) data = await session.execute(stmt) - serialized_data = json_encode([result[Dashboards.__name__]._json() for result + serialized_data = JSONSerializer.generic_dumps([result[Dashboards.__name__]._json() for result in data.mappings().all()]) self.set_status(200) self.set_header("Content-Type", "application/json") @@ -279,13 +295,13 @@ async def get(self): self.finish() -class SubscribersHandler(PrimaryHostHandler): +class SubscribersHandler(SystemHostHandler): @for_authenticated_user async def post(self): if self.request.headers["Content-Type"] == "application/json": self.set_status(200) - server = SubscribedHTTPServers(**json_decode(self.request.body)) + server = SubscribedHTTPServers(**JSONSerializer.generic_loads(self.request.body)) async with global_session() as session, session.begin(): session.add(server) await session.commit() @@ -297,10 +313,17 @@ async def get(self): self.set_header("Content-Type", "application/json") async with global_session() as session: result = select(Server) - self.write(json_encode(result.scalars().all())) + self.write(JSONSerializer.generic_dumps(result.scalars().all())) -class MainHandler(PrimaryHostHandler): +class SubscriberHandler(SystemHostHandler): + + async def get(self): + pass + + + +class MainHandler(SystemHostHandler): async def get(self): self.check_headers() @@ -311,12 +334,12 @@ async def get(self): def create_primary_host(config_file : str, ssl_context : ssl.SSLContext, **server_settings) -> TornadoHTTP1Server: - URL = f"{DB.create_DB_URL(config_file)}/hololinked-host" + URL = BaseDB.create_URL(config_file, database="hololinked-host") if not database_exists(URL): try: create_database(URL) sync_engine = create_engine(URL) - TableBase.metadata.create_all(sync_engine) + HololinkedHostTableBase.metadata.create_all(sync_engine) create_tables(sync_engine) create_credentials(sync_engine) except Exception as ex: @@ -324,7 +347,6 @@ def create_primary_host(config_file : str, ssl_context : ssl.SSLContext, **serve raise ex from None global global_engine, global_session - URL = f"{DB.create_DB_URL(config_file, True)}/hololinked-host" global_engine = asyncio_ext.create_async_engine(URL, echo=True) global_session = sessionmaker(global_engine, expire_on_commit=True, class_=asyncio_ext.AsyncSession) # type: ignore @@ -335,8 +357,9 @@ def create_primary_host(config_file : str, ssl_context : ssl.SSLContext, **serve (r"/dashboards", DashboardsHandler), (r"/settings", AppSettingsHandler), (r"/subscribers", SubscribersHandler), + # (r"/remote-objects", RemoteObjectsHandler), (r"/login", LoginHandler) - ], cookie_secret=base64.b64encode(os.urandom(32)).decode('utf-8') , + ], cookie_secret=base64.b64encode(os.urandom(32)).decode('utf-8'), **server_settings) return TornadoHTTP1Server(app, ssl_options=ssl_context) @@ -345,57 +368,35 @@ def create_primary_host(config_file : str, ssl_context : ssl.SSLContext, **serve def create_tables(engine): with Session(engine) as session, session.begin(): - # Pages - session.add(AppSettings( - field = 'dashboards', - value = { - 'deleteWithoutAsking' : True, - 'showRecentlyUsed' : True} - )) - - # login page - session.add(AppSettings( - field = 'login', - value = { - 'footer' : '', - 'footerLink' : '', - 'displayFooter' : True - } - )) - - # server - session.add(AppSettings( - field = 'servers', - value = { - 'allowHTTP' : False - } - )) - - # remote object wizard - session.add(AppSettings( - field = 'remoteObjectViewer' , - value = { - 'stringifyConsoleOutput' : False, - 'consoleDefaultMaxEntries' : 15, - 'consoleDefaultWindowSize' : 500, - 'consoleDefaultFontSize' : 16, - 'stringifyLogViewerOutput' : False, - 'logViewerDefaultMaxEntries' : 10, - 'logViewerDefaultOutputWindowSize' : 1000, - 'logViewerDefaultFontSize' : 16 - } - )) + file = open("default_host_settings.json", 'r') + default_settings = JSONSerializer.generic_load(file) + for name, settings in default_settings.items(): + session.add(AppSettings( + field = name, + value = settings + )) session.commit() + def create_credentials(sync_engine): + """ + create name and password for a new user in a database + """ + print("Requested primary host seems to use a new database. Give username and password (not for database server, but for client logins from hololinked-portal) : ") email = input("email-id (not collected anywhere else excepted your own database) : ") - password = input("password : ") - - with Session(sync_engine) as session, session.begin(): - ph = PasswordHasher(time_cost=500) - session.add(LoginCredentials(email=email, password=ph.hash(password))) - session.commit() + while True: + password = getpass.getpass("password : ") + password_confirm = getpass.getpass("repeat-password : ") + if password != password_confirm: + print("password & repeat password not the same. Try again.") + continue + with Session(sync_engine) as session, session.begin(): + ph = PasswordHasher(time_cost=500) + session.add(LoginCredentials(email=email, password=ph.hash(password))) + session.commit() + return + raise RuntimeError("password not created, aborting database creation.") @dataclass @@ -408,20 +409,6 @@ def json(self): return asdict(self) -@dataclass -class SubscribedHTTPServers: - hostname : str - IPAddress : typing.Any - port : int - type : str - https : bool - qualifiedIP : str = field(init = False) - - def __post_init__(self): - self.qualifiedIP = '{}:{}'.format(self.hostname, self.port) - - def json(self): - return asdict(self) @dataclass @@ -441,7 +428,7 @@ def json(self): ) -class HTTPServerUtilities(BaseAsyncDB, RemoteObject): +class SystemHost(HTTPServer): """ HTTPServerUtilities provide functionality to instantiate, kill or get current status of existing remote-objects attached to this server, ability to subscribe to a Primary Host Server @@ -580,8 +567,7 @@ async def info(self): class PCHostUtilities(HTTPServerUtilities): - - type : str = 'PC_HOST' + def __init__(self, db_config_file : str, server_network_interface : str, port : int, **kwargs) -> None: super().__init__(db_config_file = db_config_file, zmq_client_pool = None, remote_object_info = None, **kwargs) @@ -595,12 +581,5 @@ def __init__(self, db_config_file : str, server_network_interface : str, port : ) -def create_server_tables(serverDB): - engine = create_engine(serverDB) - PrimaryHostUtilities.TableBase.metadata.create_all(engine) - RemoteObjectDB.TableBase.metadata.create_all(engine) - engine.dispose() - - __all__ = ['create_primary_host'] \ No newline at end of file From c01dbacca2d740d247994e3c59d67fe081d33661 Mon Sep 17 00:00:00 2001 From: "Vignesh.Vaidyanathan" <62492557+VigneshVSV@users.noreply.github.com> Date: Fri, 1 Mar 2024 21:32:36 +0100 Subject: [PATCH 045/167] updates to system host handlers - tried sqlite, does not work & requires custom table ORMs --- doc/source/autodoc/server/remote_object.rst | 6 +- hololinked/server/HTTPServer.py | 17 +- hololinked/server/assets/system_host_api.yml | 12 + hololinked/server/constants.py | 16 +- hololinked/server/data_classes.py | 38 ++- hololinked/server/database.py | 25 +- hololinked/server/handlers.py | 10 +- hololinked/server/host_utilities.py | 232 ++----------------- hololinked/server/remote_object.py | 120 +++++----- hololinked/server/serializers.py | 7 +- hololinked/server/webserver_utils.py | 76 +----- 11 files changed, 168 insertions(+), 391 deletions(-) create mode 100644 hololinked/server/assets/system_host_api.yml diff --git a/doc/source/autodoc/server/remote_object.rst b/doc/source/autodoc/server/remote_object.rst index 1aebd26..0a9cadb 100644 --- a/doc/source/autodoc/server/remote_object.rst +++ b/doc/source/autodoc/server/remote_object.rst @@ -6,7 +6,11 @@ RemoteObject :show-inheritance: .. autoclass:: hololinked.server.remote_object.RemoteSubobject - :members: instance_name, object_info, GUI + :members: instance_name, object_info, GUI, httpserver_resources, rpc_resources, gui_resources, events + :show-inheritance: + +.. autoclass:: hololinked.server.remote_object.RemoteObjectMeta + :members: parameters, _create_param_container :show-inheritance: .. autoclass:: hololinked.server.remote_object.StateMachine diff --git a/hololinked/server/HTTPServer.py b/hololinked/server/HTTPServer.py index 8f6e8e6..caaa64d 100644 --- a/hololinked/server/HTTPServer.py +++ b/hololinked/server/HTTPServer.py @@ -9,10 +9,8 @@ from ..param import Parameterized from ..param.parameters import (Integer, IPAddress, ClassSelector, Selector, TypedList, String) -from .utils import create_default_logger, run_coro_sync, run_method_somehow +from .utils import create_default_logger, run_coro_sync from .serializers import JSONSerializer -from .constants import CommonInstructions -from .webserver_utils import log_request, update_resources from .zmq_message_brokers import MessageMappedZMQClientPool from .handlers import RPCHandler, BaseHandler, EventHandler, RemoteObjectsHandler @@ -20,7 +18,7 @@ class HTTPServer(Parameterized): """ - HTTP(s) server to route requests to ``RemoteObject`` + HTTP(s) server to route requests to ``RemoteObject``. Only one HTTPServer per process supported. """ address = IPAddress(default='0.0.0.0', @@ -90,16 +88,18 @@ def all_ok(self) -> bool: f"{self.address}:{self.port}"), self.log_level) + self.handlers = [ + (r'/remote-objects', RemoteObjectsHandler, {'request_handler' : self.request_handler}) + ] + self.app = Application(handlers=self.handlers) + self.zmq_client_pool = MessageMappedZMQClientPool(self.remote_objects, self._IP, json_serializer=self.serializer) BaseHandler.zmq_client_pool = self.zmq_client_pool BaseHandler.json_serializer = self.serializer BaseHandler.logger = self.logger BaseHandler.clients = ', '.join(self.allowed_clients) - - self.handlers = [ - (r'/remote-objects', RemoteObjectsHandler) - ] + BaseHandler.application = self.app return True @@ -111,7 +111,6 @@ def listen(self) -> None: self.event_loop.add_future(RemoteObjectsHandler.connect_to_remote_object( [client for client in self.zmq_client_pool])) - self.app = Application(handlers=self.handlers) if self.protocol_version == 2: raise NotImplementedError("Current HTTP2 is not implemented.") self.server = TornadoHTTP2Server(router, ssl_options=self.ssl_context) diff --git a/hololinked/server/assets/system_host_api.yml b/hololinked/server/assets/system_host_api.yml new file mode 100644 index 0000000..b3e30a5 --- /dev/null +++ b/hololinked/server/assets/system_host_api.yml @@ -0,0 +1,12 @@ +openapi: '3.0.2' +info: + title: API Title + version: '1.0' +servers: + - url: https://api.server.test/v1 +paths: + /test: + get: + responses: + '200': + description: OK diff --git a/hololinked/server/constants.py b/hololinked/server/constants.py index df9da73..9419d28 100644 --- a/hololinked/server/constants.py +++ b/hololinked/server/constants.py @@ -15,7 +15,7 @@ UNSPECIFIED : str = "UNSPECIFIED" # types -class ResourceType(StrEnum): +class ResourceTypes(StrEnum): FUNC = "FUNC" ATTRIBUTE = "ATTRIBUTE" PARAMETER = "PARAMETER" @@ -24,6 +24,11 @@ class ResourceType(StrEnum): FILE = "FILE" EVENT = "EVENT" +class ResourceOperations(StrEnum): + PARAMETER_READ = "/read" + PARAMETER_WRITE = "/write" + + # regex logic class REGEX(StrEnum): states = '[A-Za-z_]+[A-Za-z_ 0-9]*' @@ -66,6 +71,15 @@ class CommonInstructions(StrEnum): RPC_RESOURCES = '/resources/object-proxy/read' HTTP_RESOURCES = '/resources/http-server/read' + @classmethod + def rpc_resource_read(cls, instance_name : str) -> str: + return f"/{instance_name}{cls.RPC_RESOURCES}" + + @classmethod + def http_resource_read(cls, instance_name : str) -> str: + return f"/{instance_name}{cls.HTTP_RESOURCES}" + + class ClientMessage(IntEnum): """ client sent message index for accessing message indices with names diff --git a/hololinked/server/data_classes.py b/hololinked/server/data_classes.py index f06522b..35bcd09 100644 --- a/hololinked/server/data_classes.py +++ b/hololinked/server/data_classes.py @@ -125,6 +125,15 @@ def json(self): return json_dict + +class HTTPMethodInstructions: + GET : typing.Optional[str] = field(default=None) + POST : typing.Optional[str] = field(default=None) + PUT : typing.Optional[str] = field(default=None) + DELETE : typing.Optional[str] = field(default=None) + PATCH : typing.Optional[str] = field(default=None) + + @dataclass class HTTPResource: """ @@ -143,36 +152,27 @@ class HTTPResource: instruction : str unique string that identifies the resource, generally made using the URL_path or identical to the URL_path ( qualified URL path {instance name}/{URL path}). - path_format : str - see param converter doc - path_regex : str - see param converter doc - param_converters : str - path format, regex and converter are used by HTTP routers to extract path parameters """ what : str instance_name : str - instruction : str fullpath : str request_as_argument : bool = field(default=False) - path_format : typing.Optional[str] = field(default=None) - path_regex : typing.Optional[typing.Pattern] = field(default=None) - param_convertors : typing.Optional[typing.Dict] = field(default=None) - method : str = field(default="GET") + instructions : typing.Dict[str, str] = field(default_factory=dict) + # below are all dunders, when something else is added, be careful to remember to edit ObjectProxy logic when necessary # 'what' can be an 'ATTRIBUTE' or 'CALLABLE' (based on isparameter or iscallable) and 'instruction' # stores the instructions to be sent to the eventloop. 'instance_name' maps the instruction to a particular # instance of RemoteObject - def __init__(self, *, what : str, instance_name : str, fullpath : str, instruction : str, - request_as_argument : bool = False) -> None: + def __init__(self, *, what : str, instance_name : str, fullpath : str, request_as_argument : bool = False, + **instructions) -> None: self.what = what self.instance_name = instance_name self.fullpath = fullpath - self.instruction = instruction self.request_as_argument = request_as_argument + self.instruction = HTTPMethodInstructions(**instructions) def __getstate__(self): return self.json() @@ -194,16 +194,6 @@ def json(self): "request_as_argument" : self.request_as_argument } - def compile_path(self): - path_regex, self.path_format, param_convertors = compile_path(self.fullpath) - if self.path_format == self.fullpath and len(param_convertors) == 0: - self.path_regex = None - self.param_convertors = None - elif self.path_format != self.fullpath and len(param_convertors) == 0: - raise RuntimeError(f"Unknown path format found '{self.path_format}' for path '{self.fullpath}', no path converters were created.") - else: - self.path_regex = path_regex - self.param_convertors = param_convertors @dataclass diff --git a/hololinked/server/database.py b/hololinked/server/database.py index 5b3d533..893432d 100644 --- a/hololinked/server/database.py +++ b/hololinked/server/database.py @@ -41,7 +41,8 @@ def __init__(self, instance : Parameterized, serializer : typing.Optional[BaseSe self.URL = self.create_URL(config_file) @classmethod - def create_URL(cls, file_name : str = None, database : typing.Optional[str] = None) -> str: + def create_URL(cls, file_name : str = None, database : typing.Optional[str] = None, + use_dialect : typing.Optional[bool] = False) -> str: if not file_name: conf = {} elif file_name.endswith('.json'): @@ -50,20 +51,24 @@ def create_URL(cls, file_name : str = None, database : typing.Optional[str] = No else: raise ValueError("config files of extension - {} expected, given file name {}".format(["json"], file_name)) - dialect = conf.get('dialect', None) server = conf.get('server', None) - if not database: - database = conf.get('database', 'hololinked') if not server: - file = conf.get('file', 'hololinked.db') - return f"sqlite+pysqlite://{file}/{database}" + file = conf.get('file', f"{database}.db" if not database.endswith('.db') else database) + return f"sqlite+pysqlite:///{file}" + if use_dialect: + dialect = conf.get('dialect', None) + else: + dialect = None + database = conf.get('database', 'hololinked') host = conf.get("host", 'localhost') port = conf.get("port", 5432) user = conf.get('user', 'postgres') password = conf.get('password', '') - return f"{server}+{dialect}://{user}:{password}@{host}:{port}/{database}" - - + if dialect: + return f"{server}+{dialect}://{user}:{password}@{host}:{port}/{database}" + else: + return f"{server}://{user}:{password}@{host}:{port}/{database}" + class BaseAsyncDB(BaseDB): """ Base class for an async database engine, implements configuration file reader, @@ -131,7 +136,7 @@ class RemoteObjectDB(BaseSyncDB): config_file: str configuration file of the database server """ - def fetch_own_info(self) -> RemoteObjectInformation: + def fetch_own_info(self): # -> RemoteObjectInformation: """ fetch ``RemoteObject`` instance's own information, for schema see ``RemoteObjectInformation``. diff --git a/hololinked/server/handlers.py b/hololinked/server/handlers.py index 524c787..883c3ec 100644 --- a/hololinked/server/handlers.py +++ b/hololinked/server/handlers.py @@ -2,13 +2,12 @@ import typing import logging from json import JSONDecodeError -from tornado.web import RequestHandler, StaticFileHandler +from tornado.web import RequestHandler, StaticFileHandler, Application from tornado.iostream import StreamClosedError from .constants import CommonInstructions from .serializers import JSONSerializer from .zmq_message_brokers import AsyncZMQClient, MessageMappedZMQClientPool, EventConsumer -from .webserver_utils import * from .utils import current_datetime_ms_str from .data_classes import HTTPResource, ServerSentEvent @@ -20,6 +19,7 @@ class BaseHandler(RequestHandler): json_serializer : JSONSerializer clients : str logger : logging.Logger + application : Application def initialize(self, resource : typing.Union[HTTPResource, ServerSentEvent]) -> None: self.resource = resource @@ -222,9 +222,8 @@ def get_absolute_path(cls, root: str, path: str) -> str: class RemoteObjectsHandler(BaseHandler): - def initialize(self, resource: HTTPResource | ServerSentEvent, request_handler) -> None: + def initialize(self, request_handler : RequestHandler) -> None: self.request_handler = request_handler - return super().initialize(resource) async def get(self): with self.async_session() as session: @@ -242,7 +241,7 @@ async def connect_to_remote_object(self, clients : typing.List[AsyncZMQClient]): for client in clients: await client.handshake_complete() _, _, _, _, _, reply = await client.async_execute( - f'/{client.server_instance_name}{CommonInstructions.HTTP_RESOURCES}', + CommonInstructions.http_resource_read(client.server_instance_name), raise_client_side_exception=True) update_resources(resources, reply["returnValue"]) # type: ignore # _, _, _, _, _, reply = await client.read_attribute('/'+client.server_instance_name + '/object-info', raise_client_side_exception = True) @@ -279,3 +278,4 @@ def __init__( remote_method and RemoteParamater contains all the info given to make RPCHandler work """ + self.application.wildcard_router.add_rules(handlers) \ No newline at end of file diff --git a/hololinked/server/host_utilities.py b/hololinked/server/host_utilities.py index 5308863..2a3b605 100644 --- a/hololinked/server/host_utilities.py +++ b/hololinked/server/host_utilities.py @@ -7,30 +7,20 @@ import ssl import typing import getpass -from dataclasses import dataclass, asdict, field +from dataclasses import asdict, field from argon2 import PasswordHasher -from sqlalchemy import Engine, Integer, String, JSON, ARRAY, Boolean +from sqlalchemy import Engine, Integer, String, JSON, ARRAY, Boolean, BLOB from sqlalchemy import select, create_engine from sqlalchemy.orm import Session, sessionmaker, Mapped, mapped_column, DeclarativeBase, MappedAsDataclass from sqlalchemy.ext import asyncio as asyncio_ext from sqlalchemy_utils import database_exists, create_database, drop_database -from tornado.httpclient import AsyncHTTPClient, HTTPRequest from tornado.web import RequestHandler, Application, authenticated from tornado.httpserver import HTTPServer as TornadoHTTP1Server from .constants import JSONSerializable from .serializers import JSONSerializer from .database import BaseDB -from .remote_parameters import TypedList -from .zmq_message_brokers import MessageMappedZMQClientPool -from .webserver_utils import get_IP_from_interface, update_resources_using_client -from .utils import unique_id -from .http_methods import post, get, put, delete -from .eventloop import Consumer, EventLoop, fork_empty_eventloop -from .remote_object import RemoteObject, RemoteObjectDB, RemoteObjectMetaclass -from .database import BaseAsyncDB - SERVER_INSTANCE_NAME = 'server-util' CLIENT_HOST_INSTANCE_NAME = 'dashboard-util' @@ -120,14 +110,13 @@ class RemoteObjectInformation(HololinkedHostTableBase, MappedAsDataclass): def json(self): return asdict(self) - - + def for_authenticated_user(method): def authenticated_method(self : RequestHandler): if not self.current_user: self.set_status(403) - self.set_header("Access-Control-Allow-Origin", "https://127.0.0.1:5173") + self.set_header("Access-Control-Allow-Origin", "https://localhost:5173") self.finish() return else: @@ -146,6 +135,7 @@ def check_headers(self): self.finish() def get_current_user(self) -> typing.Any: + return True return self.get_signed_cookie('user') def set_default_headers(self) -> None: @@ -153,7 +143,7 @@ def set_default_headers(self) -> None: async def options(self): self.set_status(200) - self.set_header("Access-Control-Allow-Origin", "https://127.0.0.1:5173") + self.set_header("Access-Control-Allow-Origin", "https://localhost:5173") self.set_header("Access-Control-Allow-Methods", "GET, POST, OPTIONS") self.set_header("Access-Control-Allow-Headers", "*") self.finish() @@ -195,12 +185,12 @@ async def post(self): self.write({"reason" : ""}) except Exception as ex: self.set_status(500, str(ex)) - self.set_header("Access-Control-Allow-Origin", "https://127.0.0.1:5173") + self.set_header("Access-Control-Allow-Origin", "https://localhost:5173") self.finish() async def options(self): self.set_status(200) - self.set_header("Access-Control-Allow-Origin", "https://127.0.0.1:5173") + self.set_header("Access-Control-Allow-Origin", "https://localhost:5173") self.set_header("Access-Control-Allow-Methods", "POST, OPTIONS") self.set_header("Access-Control-Allow-Headers", "*") self.set_header("Access-Control-Allow-Credentials", True) @@ -272,7 +262,7 @@ async def post(self): session.add(Dashboards(**data)) await session.commit() self.set_status(200) - self.set_header("Access-Control-Allow-Origin", "https://127.0.0.1:5173") + self.set_header("Access-Control-Allow-Origin", "https://localhost:5173") except Exception as ex: self.set_status(500, str(ex)) self.finish() @@ -288,7 +278,7 @@ async def get(self): in data.mappings().all()]) self.set_status(200) self.set_header("Content-Type", "application/json") - self.set_header("Access-Control-Allow-Origin", "https://127.0.0.1:5173") + self.set_header("Access-Control-Allow-Origin", "https://localhost:5173") self.write(serialized_data) except Exception as ex: self.set_status(500, str(ex)) @@ -328,13 +318,14 @@ class MainHandler(SystemHostHandler): async def get(self): self.check_headers() self.set_status(200) - self.set_header("Access-Control-Allow-Origin", "https://127.0.0.1:5173") + self.set_header("Access-Control-Allow-Origin", "https://localhost:5173") self.write("

I am alive!!!

") self.finish() -def create_primary_host(config_file : str, ssl_context : ssl.SSLContext, **server_settings) -> TornadoHTTP1Server: - URL = BaseDB.create_URL(config_file, database="hololinked-host") +def create_system_host(config_file : typing.Optional[str] = None, ssl_context : typing.Optional[ssl.SSLContext] = None, + **server_settings) -> TornadoHTTP1Server: + URL = BaseDB.create_URL(config_file, database='hololinked-host', use_dialect=False) if not database_exists(URL): try: create_database(URL) @@ -342,10 +333,15 @@ def create_primary_host(config_file : str, ssl_context : ssl.SSLContext, **serve HololinkedHostTableBase.metadata.create_all(sync_engine) create_tables(sync_engine) create_credentials(sync_engine) + sync_engine.dispose() except Exception as ex: - drop_database(URL) + sync_engine.dispose() + if URL.startswith("sqlite"): + os.remove(URL.split('/')[-1]) + else: + drop_database(URL) raise ex from None - + URL = BaseDB.create_URL(config_file, database='hololinked-host', use_dialect=True) global global_engine, global_session global_engine = asyncio_ext.create_async_engine(URL, echo=True) global_session = sessionmaker(global_engine, expire_on_commit=True, @@ -368,7 +364,7 @@ def create_primary_host(config_file : str, ssl_context : ssl.SSLContext, **serve def create_tables(engine): with Session(engine) as session, session.begin(): - file = open("default_host_settings.json", 'r') + file = open(f"{os.path.dirname(os.path.abspath(__file__))}{os.sep}assets{os.sep}default_host_settings.json", 'r') default_settings = JSONSerializer.generic_load(file) for name, settings in default_settings.items(): session.add(AppSettings( @@ -399,187 +395,5 @@ def create_credentials(sync_engine): raise RuntimeError("password not created, aborting database creation.") -@dataclass -class NonDBRemoteObjectInfo: - instance_name : str - classname : str - script : str - - def json(self): - return asdict(self) - - - - -@dataclass -class UninstantiatedRemoteObject: - consumer : RemoteObjectMetaclass - file_name : str - object_name : str - eventloop_name : str - id : str - - def json(self): - return dict ( - id = self.id, - file_name = self.file_name, - object_name = self.object_name, - eventloop_name = self.eventloop_name - ) - - -class SystemHost(HTTPServer): - """ - HTTPServerUtilities provide functionality to instantiate, kill or get current status of - existing remote-objects attached to this server, ability to subscribe to a Primary Host Server - which brings remote-objects to the web UI & spawn new event loops - """ - - type : str = 'NORMAL_REMOTE_OBJECT_SERVER' - - remote_object_info = TypedList(default=None, allow_None=True, - URL_path='/remote-object-info') - - def __init__(self, db_config_file : typing.Union[str, None], zmq_client_pool : MessageMappedZMQClientPool, - remote_object_info , **kwargs) -> None: - RemoteObject.__init__(self, **kwargs) - BaseAsyncDB.__init__(self, database='scadapyserver', serializer=self.json_serializer, config_file=db_config_file) - self.zmq_client_pool = zmq_client_pool - self.server_resources = None - self.remote_object_info = remote_object_info - self._uninstantiated_remote_objects : typing.Dict[str, UninstantiatedRemoteObject] = {} - - @post('/subscribe') - async def subscribe_to_host(self, host : str, port : int): - client = AsyncHTTPClient() - try: - R = await client.fetch(HTTPRequest( - url = "{}/{}/{}".format(host, SERVER_INSTANCE_NAME, 'subscription'), - method = 'POST', - body = JSONSerializer.general_dumps(dict( - hostname=socket.gethostname(), - port=port, - type=self.type, - https=True - )) - )) - except Exception as E: - self.logger.error(f"Could not subscribe to host {host}. error : {str(E)}, error type : {type(E)}.") - raise - if R.code == 200 or R.code == 202: - self.logger.info(f"subsribed successfully to host {host}") - else: - raise RuntimeError(f"could not subsribe to host {host}. response {json.loads(R.body)}") - # we lose the client anyway so we close it. if we decide to reuse the client, changes needed - client.close() - - @post('/eventloop/new') - def new_eventloop(self, instance_name, proxy_serializer, json_serializer): - fork_empty_eventloop(instance_name = instance_name) - self.zmq_client_pool.register_client(instance_name) - self.zmq_client_pool[instance_name].handshake() - - @post('/remote-object/import') - async def import_remote_object(self, file_name : str, object_name : str, eventloop_name : str): - consumer = EventLoop.import_remote_object(file_name, object_name) - id = unique_id().decode() - db_params = consumer.parameters.remote_objects_webgui_info(consumer.parameters.load_at_init_objects()) - self._uninstantiated_remote_objects[id] = UninstantiatedRemoteObject( - consumer = consumer, - file_name = file_name, - object_name = object_name, - eventloop_name = eventloop_name, - id = id - ) - return { - "id" : id, - "db_params" : db_params - } - - @delete('/remote-object/import') - async def del_imported_remote_object(self, id : str): - obj = self._uninstantiated_remote_objects.get(id, None) - if obj is None: - return False - elif isinstance(obj, str): - return await self.zmq_client_pool.async_execute(instance_name = obj, instruction = '/remote-object/import', - arguments = dict(id = obj) ) - else: - self.uninstantiated_remote_objects.pop(id) - return True - - @post('/remote-object/instantiate') - async def new_remote_object(self, id : str, kwargs : typing.Dict[str, typing.Any], db_params : typing.Dict[str, typing.Any]): - uninstantiated_remote_object = self._uninstantiated_remote_objects[id] - consumer = uninstantiated_remote_object.consumer - init_params = consumer.param_descriptors.load_at_init_objects() - for name, value in db_params.items(): - init_params[name].validate_and_adapt(value) - if uninstantiated_remote_object.eventloop_name not in self.zmq_client_pool: - fork_empty_eventloop(instance_name = uninstantiated_remote_object.eventloop_name) - self.zmq_client_pool.register_client(uninstantiated_remote_object.eventloop_name) - await self.zmq_client_pool[uninstantiated_remote_object.eventloop_name].handshake_complete() - await self.zmq_client_pool[uninstantiated_remote_object.eventloop_name].async_execute( - '/remote-object/instantiate', arguments = dict( - file_name = uninstantiated_remote_object.file_name, - object_name = uninstantiated_remote_object.object_name, - kwargs = kwargs), raise_client_side_exception = True - ) - if not kwargs.get('instance_name') in self.zmq_client_pool: - self.zmq_client_pool.register_client(kwargs.get('instance_name')) - await self.zmq_client_pool[kwargs.get('instance_name')].handshake_complete() - await update_resources_using_client(self.server_resources, - self.remote_object_info, self.zmq_client_pool[kwargs.get('instance_name')]) - await update_resources_using_client(self.server_resources, - self.remote_object_info, self.zmq_client_pool[uninstantiated_remote_object.eventloop_name]) - self._uninstantiated_remote_objects.pop(id, None) - - @get('/remote_objects/ping') - async def ping_consumers(self): - return await self.zmq_client_pool.ping_all_servers() - - @get('/remote_objects/state') - async def consumers_state(self): - return await self.zmq_client_pool.async_execute_in_all_remote_objects('/state', context = { - "plain_reply" : True - }) - - @get('/uninstantiated-remote-objects') - async def uninstantiated_remote_objects(self): - organised_reply = await self.zmq_client_pool.async_execute_in_all_eventloops('/uninstantiated-remote-objects/read', - context = { - "plain_reply" : True - }) - organised_reply[self.instance_name] = self._uninstantiated_remote_objects - return organised_reply - - @get('/info/all') - async def info(self): - consumers_state, uninstantiated_remote_objects = await asyncio.gather(self.consumers_state(), - self.uninstantiated_remote_objects()) - return dict( - remoteObjectState = consumers_state, - remoteObjectInfo = self.remote_object_info, - uninstantiatedRemoteObjects = uninstantiated_remote_objects - ) - - - - -class PCHostUtilities(HTTPServerUtilities): - - - def __init__(self, db_config_file : str, server_network_interface : str, port : int, **kwargs) -> None: - super().__init__(db_config_file = db_config_file, zmq_client_pool = None, remote_object_info = None, **kwargs) - self.subscribers : typing.List[SubscribedHTTPServers] = [] - self.own_info = SubscribedHTTPServers( - hostname=socket.gethostname(), - IPAddress=get_IP_from_interface(server_network_interface), - port= port, - type=self.type, - https=False - ) - - -__all__ = ['create_primary_host'] \ No newline at end of file +__all__ = ['create_system_host'] \ No newline at end of file diff --git a/hololinked/server/remote_object.py b/hololinked/server/remote_object.py index 02e11fb..67088fa 100644 --- a/hololinked/server/remote_object.py +++ b/hololinked/server/remote_object.py @@ -8,12 +8,12 @@ import datetime import zmq from collections import deque -from enum import EnumMeta, Enum +from enum import EnumMeta, StrEnum from ..param.parameterized import Parameterized, ParameterizedMetaclass +from .constants import (CallableType, LOGLEVEL, ZMQ_PROTOCOLS, HTTP_METHODS, ResourceOperations, ResourceTypes) from .database import RemoteObjectDB -from .constants import (JSONSerializable, CallableType, LOGLEVEL, ZMQ_PROTOCOLS, HTTP_METHODS) from .serializers import * from .exceptions import BreakInnerLoop from .decorators import remote_method @@ -47,13 +47,17 @@ class StateMachine: on_exit: Dict[str, Callable | RemoteParameter] callbacks to be invoked when a certain state is exited. It is to be specified as a dictionary with the states being the keys - + **machine: + state name: List[Callable, RemoteParamater] + directly pass the state name as an argument along with the methods/parameters which are allowed to execute + in that state + Attributes ---------- exists: bool internally computed, True if states and initial_states are valid """ - initial_state = ClassSelector(default=None, allow_None=True, constant=True, class_=(Enum, str)) + initial_state = ClassSelector(default=None, allow_None=True, constant=True, class_=(StrEnum, str)) exists = Boolean(default=False) states = ClassSelector(default=None, allow_None=True, constant=True, class_=(EnumMeta, tuple, list)) on_enter = TypedDict(default=None, allow_None=True, key_type=str) @@ -61,7 +65,7 @@ class StateMachine: machine = TypedDict(default=None, allow_None=True, key_type=str, item_type=(list, tuple)) def __init__(self, states : typing.Union[EnumMeta, typing.List[str], typing.Tuple[str]], *, - initial_state : typing.Union[Enum, str], + initial_state : typing.Union[StrEnum, str], on_enter : typing.Dict[str, typing.Union[typing.List[typing.Callable], typing.Callable]] = {}, on_exit : typing.Dict[str, typing.Union[typing.List[typing.Callable], typing.Callable]] = {}, push_state_change_event : bool = False, @@ -74,7 +78,8 @@ def __init__(self, states : typing.Union[EnumMeta, typing.List[str], typing.Tupl self.machine = machine self.push_state_change_event = push_state_change_event if push_state_change_event: - self.state_change_event = Event('state-change') + pass + # self.state_change_event = Event('state-change') def _prepare(self, owner : 'RemoteObject') -> None: if self.states is None and self.initial_state is None: @@ -100,7 +105,7 @@ def _prepare(self, owner : 'RemoteObject') -> None: if state in self: for resource in objects: if hasattr(resource, 'scada_info'): - assert isinstance(resource._remote_info, RemoteResourceInfoValidator) # type: ignore + assert isinstance(resource._remote_info, RemoteResourceInfoValidator) if resource._remote_info.iscallable and resource._remote_info.obj_name not in owner_methods: # type: ignore raise AttributeError("Given object {} for state machine does not belong to class {}".format( resource, owner)) @@ -137,19 +142,20 @@ def _prepare(self, owner : 'RemoteObject') -> None: raise TypeError(f"on_enter accept only methods. Given type {type(obj)}.") self.exists = True - def __contains__(self, state : typing.Union[str, Enum]): + def __contains__(self, state : typing.Union[str, StrEnum]): if isinstance(self.states, EnumMeta) and state not in self.states.__members__ and state not in self.states: # type: ignore return False - elif isinstance(self.states, (tuple, list)) and state not in self.states: + elif isinstance(self.states, tuple) and state not in self.states: return False return True + # TODO It might be better to return True's instead of False's and return False at the last, may take care of edge-cases better - def _machine_compliant_state(self, state) -> typing.Union[Enum, str]: + def _machine_compliant_state(self, state) -> typing.Union[StrEnum, str]: if isinstance(self.states, EnumMeta): return self.states.__members__[state] # type: ignore return state - def get_state(self) -> typing.Union[str, Enum, None]: + def get_state(self) -> typing.Union[str, StrEnum, None]: """ return the current state. one can also access the property `current state`. @@ -168,12 +174,12 @@ def set_state(self, value, push_event : bool = True, skip_callbacks : bool = Fal self._state = value if push_event and self.push_state_change_event: self.state_change_event.push({self.owner.instance_name : value}) - if isinstance(previous_state, Enum): + if isinstance(previous_state, StrEnum): previous_state = previous_state.name if previous_state in self.on_exit: for func in self.on_exit[previous_state]: # type: ignore func(self.owner) - if isinstance(value, Enum): + if isinstance(value, StrEnum): value = value.name if value in self.on_enter: for func in self.on_enter[value]: # type: ignore @@ -191,10 +197,16 @@ def query(self, info : typing.Union[str, typing.List[str]] ) -> typing.Any: -ConfigInfo = Enum('LevelTypes','USER_MANAGED PRIMARY_HOST_WIDE PC_HOST_WIDE') - -class RemoteObjectMetaclass(ParameterizedMetaclass): +class RemoteObjectMeta(ParameterizedMetaclass): + """ + Metaclass for remote object, implements a ``__post_init__()`` call & ``RemoteClassParameters`` instantiation for + ``RemoteObject``. During instantiation of ``RemoteObject``, first the message brokers are created (``_prepare_message_brokers()``), + then ``_prepare_logger()``, then ``_prepare_DB()`` & ``_prepare_state_machine()`` in the ``__init__()``. In the + ``__post_init__()``, the resources of the RemoteObject are segregated and database operations like writing parameter + values are carried out. Between ``__post_init__()`` and ``__init__()``, package user's ``__init__()`` will run where user can run + custom logic after preparation of message brokers and database engine and before using database operations. + """ @classmethod def __prepare__(cls, name, bases): @@ -220,14 +232,23 @@ def __call__(mcls, *args, **kwargs): return instance def _create_param_container(mcs, mcs_members : dict) -> None: + """ + creates ``RemoteClassParameters`` instead of ``param``'s own ``Parameters`` + as the default container for descriptors. See code of ``param``. + """ mcs._param_container = RemoteClassParameters(mcs, mcs_members) @property def parameters(mcs) -> RemoteClassParameters: + """ + returns ``RemoteClassParameters`` instance instead of ``param``'s own + ``Parameters`` instance. See code of ``param``. + """ return mcs._param_container -class RemoteSubobject(Parameterized, metaclass=RemoteObjectMetaclass): + +class RemoteSubobject(Parameterized, metaclass=RemoteObjectMeta): # local parameters instance_name = String(default=None, regex=r'[A-Za-z]+[A-Za-z_0-9\-\/]*', constant=True, remote=False, @@ -239,12 +260,13 @@ class RemoteSubobject(Parameterized, metaclass=RemoteObjectMetaclass): in your entire system.""") # type: str # remote paramerters httpserver_resources = RemoteParameter(readonly=True, URL_path='/resources/http-server', - doc="""object's resources exposed to HTTP server""", fget=lambda self: self._httpserver_resources ) # type: typing.Dict[str, typing.Dict[str, HTTPResource]] + doc="object's resources exposed to HTTP client (through hololinked.server.HTTPServer)", + fget=lambda self: self._httpserver_resources ) # type: typing.Dict[str, typing.Dict[str, HTTPResource]] rpc_resources = RemoteParameter(readonly=True, URL_path='/resources/object-proxy', - doc= """object's resources exposed to RPC client, similar to HTTP resources but differs - in details.""", fget=lambda self: self._rpc_resources) # type: typing.Dict[str, typing.Any] + doc="object's resources exposed to RPC client, similar to HTTP resources but differs in details.", + fget=lambda self: self._rpc_resources) # type: typing.Dict[str, typing.Any] gui_resources : typing.Dict = RemoteParameter(readonly=True, URL_path='/resources/gui', - doc= """object's data read by scadapy webdashboard GUI client, similar to http_resources but differs + doc="""object's data read by scadapy webdashboard GUI client, similar to http_resources but differs in details.""") # type: typing.Dict[str, typing.Any] events = RemoteParameter(readonly=True, URL_path='/events', doc="returns a dictionary with two fields containing event name and event information") # type: typing.Dict[str, typing.Any] @@ -298,14 +320,7 @@ def _prepare_resources(self): and extracts information """ # The following dict is to be given to the HTTP server - httpserver_resources = dict( - GET = dict(), - POST = dict(), - PUT = dict(), - DELETE = dict(), - PATCH = dict(), - OPTIONS = dict() - ) # type: typing.Dict[str, typing.Dict[str, HTTPResource]] + httpserver_resources = dict() # type: typing.Dict[str, HTTPResource] # The following dict will be given to the object proxy client rpc_resources = dict() # type: typing.Dict[str, RPCResource] # The following dict will be used by the event loop @@ -324,16 +339,17 @@ def _prepare_resources(self): fullpath = "{}{}".format(self._full_URL_path_prefix, remote_info.URL_path) assert remote_info.iscallable, ("remote info from inspect.ismethod is not a callable", "logic error - visit https://github.com/VigneshVSV/hololinked/issues to report") - for http_method in remote_info.http_method: - httpserver_resources[http_method][fullpath] = HTTPResource( - what=CALLABLE, - instance_name=self._owner.instance_name if self._owner is not None else self.instance_name, - fullpath=fullpath, - instruction=fullpath, - request_as_argument=remote_info.request_as_argument - ) + if len(remote_info.http_method) > 1: + raise ValueError(f"methods support only one HTTP method at the moment. Given number of methods : {len(remote_info.http_method)}.") + httpserver_resources[fullpath] = HTTPResource( + what=ResourceTypes.CALLABLE, + instance_name=self._owner.instance_name if self._owner is not None else self.instance_name, + fullpath=fullpath, + request_as_argument=remote_info.request_as_argument + **{http_method : fullpath}, + ) rpc_resources[fullpath] = RPCResource( - what=CALLABLE, + what=ResourceTypes.CALLABLE, instance_name=self._owner.instance_name if self._owner is not None else self.instance_name, instruction=fullpath, name=getattr(resource, '__name__'), @@ -366,7 +382,7 @@ def _prepare_resources(self): httpserver_resources[HTTP_METHODS.GET]['{}{}'.format( self._full_URL_path_prefix, resource.URL_path)] = ServerSentEvent( # event URL_path has '/' prefix - what=EVENT, + what=ResourceTypes.EVENT, event_name=resource.name, socket_address=self._event_publisher.socket_address ) @@ -383,33 +399,29 @@ def _prepare_resources(self): "logic error - visit https://github.com/VigneshVSV/hololinked/issues to report") read_http_method, write_http_method = remote_info.http_method - httpserver_resources[read_http_method][fullpath] = HTTPResource( - what=PARAMETER, - instance_name=self._owner.instance_name if self._owner is not None else self.instance_name, - fullpath=fullpath, - instruction=fullpath + '/' + READ - ) - - httpserver_resources[write_http_method][fullpath] = HTTPResource( - what=PARAMETER, + httpserver_resources[fullpath] = HTTPResource( + what=ResourceTypes.PARAMETER, instance_name=self._owner.instance_name if self._owner is not None else self.instance_name, fullpath=fullpath, - instruction=fullpath + '/' + WRITE + request_as_argument=False, + **{ + read_http_method : fullpath+ResourceOperations.PARAMETER_READ, + write_http_method : fullpath+ResourceOperations.PARAMETER_WRITE + } ) - rpc_resources[fullpath] = RPCResource( - what=PARAMETER, + what=ResourceTypes.PARAMETER, instance_name=self._owner.instance_name if self._owner is not None else self.instance_name, instruction=fullpath, doc=parameter.__doc__, name=remote_info.obj_name, qualname=self.__class__.__name__ + '.' + remote_info.obj_name, # qualname is not correct probably, does not respect inheritance - top_owner=self._owner is None + top_owner=self._owner is None ) dclass = remote_info.to_dataclass(obj=parameter, bound_obj=self) - instance_resources[fullpath+'/'+READ] = dclass - instance_resources[fullpath+'/'+WRITE] = dclass + instance_resources[fullpath+ResourceOperations.PARAMETER_READ] = dclass + instance_resources[fullpath+ResourceOperations.PARAMETER_WRITE] = dclass # The above for-loops can be used only once, the division is only for readability # following are in _internal_fixed_attributes - allowed to set only once self._rpc_resources = rpc_resources diff --git a/hololinked/server/serializers.py b/hololinked/server/serializers.py index 341cd69..b08d979 100644 --- a/hololinked/server/serializers.py +++ b/hololinked/server/serializers.py @@ -54,7 +54,8 @@ def loads(self, data): def dumps(self, data): raise NotImplementedError("implement in subclass") - def _convertToBytes(self, data): + @classmethod + def convert_to_bytes(self, data): if type(data) is bytearray: return bytes(data) if type(data) is memoryview: @@ -127,7 +128,7 @@ def dump(self, data : typing.Dict[str, typing.Any], file_desc) -> None: json.dump(data, file_desc, ensure_ascii=False, allow_nan=True, default=self.default) def loads(self, data : typing.Union[bytearray, memoryview, bytes]) -> typing.Any: - data : str = self._convertToBytes(data).decode("utf-8") + data : str = self.convert_to_bytes(data).decode("utf-8") try: return json.loads(data) except: @@ -152,7 +153,7 @@ def generic_dump(cls, data : typing.Dict[str, typing.Any], file_desc) -> None: @classmethod def generic_loads(cls, data : typing.Union[bytearray, memoryview, bytes]) -> typing.Dict[str, typing.Any]: - data = cls._convertToBytes(data).decode("utf-8") # type: ignore + data = cls.convert_to_bytes(data).decode("utf-8") return json.loads(data) @classmethod diff --git a/hololinked/server/webserver_utils.py b/hololinked/server/webserver_utils.py index 11e544c..0868317 100644 --- a/hololinked/server/webserver_utils.py +++ b/hololinked/server/webserver_utils.py @@ -3,63 +3,8 @@ import traceback import typing import ifaddr -# from tabulate import tabulate from tornado.httputil import HTTPServerRequest -from .constants import ResourceType -from .data_classes import FileServerData, ServerSentEvent, HTTPResource -from .zmq_message_brokers import AsyncZMQClient, SyncZMQClient - - -def update_resources(resources : typing.Dict[str, typing.Dict[str, typing.Dict[str, typing.Any]]], - add : typing.Dict[str, typing.Dict[str, typing.Any]]) -> None: - file_server_routes = dict( - STATIC_ROUTES = dict(), - DYNAMIC_ROUTES = dict() - ) - for http_method, existing_map in resources.items(): - if http_method == 'FILE_SERVER': - continue - for URL_path, info in add[http_method].items(): - if isinstance(info, ServerSentEvent): - existing_map["STATIC_ROUTES"][URL_path] = info - elif isinstance(info, HTTPResource): - info.compile_path() - if info.path_regex is None: - existing_map["STATIC_ROUTES"][info.path_format] = info - else: - existing_map["DYNAMIC_ROUTES"][info.path_format] = info - elif info["what"] == ATTRIBUTE or info["what"] == CALLABLE: - data = HTTPResource(**info) - data.compile_path() - if data.path_regex is None: - existing_map["STATIC_ROUTES"][data.path_format] = data - else: - existing_map["DYNAMIC_ROUTES"][data.path_format] = data - elif info["what"] == EVENT: - existing_map["STATIC_ROUTES"][URL_path] = ServerSentEvent(**info) - elif info["what"] == IMAGE_STREAM: - existing_map["STATIC_ROUTES"][URL_path] = ServerSentEvent(**info) - elif info["what"] == FILE: - data = FileServerData(**info) - data.compile_path() - if data.path_regex is None: - file_server_routes["STATIC_ROUTES"][data.path_format] = data - else: - file_server_routes["DYNAMIC_ROUTES"][data.path_format] = data - resources["FILE_SERVER"]["STATIC_ROUTES"].update(file_server_routes["STATIC_ROUTES"]) - resources["FILE_SERVER"]["STATIC_ROUTES"].update(file_server_routes["DYNAMIC_ROUTES"]) - - - -async def update_resources_using_client(resources : typing.Dict[str, typing.Dict[str, typing.Any]], - remote_object_info : typing.List, - client : typing.Union[AsyncZMQClient, SyncZMQClient]) -> None: - from .remote_object import RemoteObjectDB - _, _, _, _, _, reply = await client.async_execute('/resources/http', raise_client_side_exception = True) - update_resources(resources, reply["returnValue"]) # type: ignore - _, _, _, _, _, reply = await client.read_attribute('/object-info', raise_client_side_exception = True) - remote_object_info.append(RemoteObjectDB.RemoteObjectInfo(**reply["returnValue"])) def log_request(request : HTTPServerRequest, logger : typing.Optional[logging.Logger] = None) -> None: @@ -106,25 +51,6 @@ def log_request(request : HTTPServerRequest, logger : typing.Optional[logging.Lo print(textwrap.dedent(text).lstrip()) -resources_table_headers = ["URL", "method"] -def log_resources(logger : logging.Logger, resources : typing.Dict[str, typing.Dict[str, typing.Any]] ) -> None: - if logger.level == logging.DEBUG: - # check log level manually before cooking this long string - text = """ - GET resources : - {} - POST resources : - {} - PUT resources : - {} - """.format( - tabulate([[key] + [values[1]] for key, values in resources["GET"].items()] , headers=resources_table_headers, tablefmt="presto"), - tabulate([[key] + [values[1]] for key, values in resources["POST"].items()], headers=resources_table_headers, tablefmt="presto"), - tabulate([[key] + [values[1]] for key, values in resources["PUT"].items()] , headers=resources_table_headers, tablefmt="presto") - ) - logger.debug(textwrap.dedent(text).lstrip()) - - def get_IP_from_interface(interface_name : str = 'Ethernet', adapter_name = None): adapters = ifaddr.get_adapters(include_unconfigured=True) for adapter in adapters: @@ -153,4 +79,4 @@ def format_exception_as_json(exc : Exception) -> typing.Dict[str, typing.Any]: } -__all__ = ['log_request', 'log_resources', 'format_exception_as_json'] \ No newline at end of file +__all__ = ['log_request', 'format_exception_as_json'] \ No newline at end of file From 1e0a164d1fadfadc6632b83e9f358bb7d5e7722e Mon Sep 17 00:00:00 2001 From: "Vignesh.Vaidyanathan" <62492557+VigneshVSV@users.noreply.github.com> Date: Sun, 3 Mar 2024 11:35:34 +0100 Subject: [PATCH 046/167] system host handler docs and new targets in make file to automatically host docs --- doc/Makefile | 3 --- doc/make.bat | 23 +++++++++++++++++++ doc/source/autodoc/index.rst | 1 + .../autodoc/server/system_host/index.rst | 9 ++++++++ 4 files changed, 33 insertions(+), 3 deletions(-) create mode 100644 doc/source/autodoc/server/system_host/index.rst diff --git a/doc/Makefile b/doc/Makefile index 722ff77..54c86eb 100644 --- a/doc/Makefile +++ b/doc/Makefile @@ -20,9 +20,6 @@ clean: del * /s /f /q cd .. -cleanbuild: - cd .. - .PHONY: help Makefile # Catch-all target: route all unknown targets to Sphinx using the new diff --git a/doc/make.bat b/doc/make.bat index dc1312a..2949e3b 100644 --- a/doc/make.bat +++ b/doc/make.bat @@ -9,6 +9,7 @@ if "%SPHINXBUILD%" == "" ( ) set SOURCEDIR=source set BUILDDIR=build +set DOC_ADDRESS=http://localhost:8000 %SPHINXBUILD% >NUL 2>NUL if errorlevel 9009 ( @@ -25,11 +26,33 @@ if errorlevel 9009 ( if "%1" == "" goto help +if "%1" == "server" goto server + +if "%1" == "open-in-chrome" goto open-in-chrome + +if "%1" == "host-doc" goto host-doc + %SPHINXBUILD% -M %1 %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% %O% goto end :help %SPHINXBUILD% -M help %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% %O% +goto end + +:server +echo server is hosted at %DOC_ADDRESS%, change port directory in make file if necessary +python -m http.server --directory build\html +goto end + +:open-doc-in-browser +start explorer %DOC_ADDRESS% +goto end + +:host-doc +echo server is hosted at %DOC_ADDRESS%, change port directory in make file if necessary +start explorer %DOC_ADDRESS% +python -m http.server --directory build\html +goto end :end popd diff --git a/doc/source/autodoc/index.rst b/doc/source/autodoc/index.rst index a92fe39..9f0ca30 100644 --- a/doc/source/autodoc/index.rst +++ b/doc/source/autodoc/index.rst @@ -17,6 +17,7 @@ hololinked.server server/eventloop server/http_server server/remote_parameter/index + server/system_host/index server/decorators server/database/index server/zmq_message_brokers/index diff --git a/doc/source/autodoc/server/system_host/index.rst b/doc/source/autodoc/server/system_host/index.rst new file mode 100644 index 0000000..cf5e156 --- /dev/null +++ b/doc/source/autodoc/server/system_host/index.rst @@ -0,0 +1,9 @@ +SystemHost +========== + +.. autofunction:: hololinked.system_host.create_system_host + +.. autoclass:: hololinked.system_host.server.SystemHostHandler + :members: + :show-inheritance: + From 04f4d858d3686358694b7848d70773e64004a645 Mon Sep 17 00:00:00 2001 From: "Vignesh.Vaidyanathan" <62492557+VigneshVSV@users.noreply.github.com> Date: Sun, 3 Mar 2024 11:35:53 +0100 Subject: [PATCH 047/167] system host moved to new namespace --- hololinked/system_host/__init__.py | 1 + hololinked/system_host/models.py | 78 ++++++ hololinked/system_host/server.py | 395 +++++++++++++++++++++++++++++ 3 files changed, 474 insertions(+) create mode 100644 hololinked/system_host/__init__.py create mode 100644 hololinked/system_host/models.py create mode 100644 hololinked/system_host/server.py diff --git a/hololinked/system_host/__init__.py b/hololinked/system_host/__init__.py new file mode 100644 index 0000000..134ebb0 --- /dev/null +++ b/hololinked/system_host/__init__.py @@ -0,0 +1 @@ +from .server import create_system_host \ No newline at end of file diff --git a/hololinked/system_host/models.py b/hololinked/system_host/models.py new file mode 100644 index 0000000..e804606 --- /dev/null +++ b/hololinked/system_host/models.py @@ -0,0 +1,78 @@ +import typing +from dataclasses import asdict, field + +from sqlalchemy import Integer, String, JSON, ARRAY, Boolean, BLOB +from sqlalchemy.orm import Mapped, mapped_column, DeclarativeBase, MappedAsDataclass + +from ..server.constants import JSONSerializable + +class HololinkedHostTableBase(DeclarativeBase): + pass + +class Dashboards(HololinkedHostTableBase, MappedAsDataclass): + __tablename__ = "dashboards" + + name : Mapped[str] = mapped_column(String(1024), primary_key=True) + URL : Mapped[str] = mapped_column(String(1024), unique=True) + description : Mapped[str] = mapped_column(String(16384)) + json_specfication : Mapped[typing.Dict[str, typing.Any]] = mapped_column(JSON) + + def json(self): + return asdict(self) + +class AppSettings(HololinkedHostTableBase, MappedAsDataclass): + __tablename__ = "appsettings" + + field : Mapped[str] = mapped_column(String(8192), primary_key=True) + value : Mapped[typing.Dict[str, typing.Any]] = mapped_column(JSON) + + def json(self): + return asdict(self) + +class LoginCredentials(HololinkedHostTableBase, MappedAsDataclass): + __tablename__ = "login_credentials" + + email : Mapped[str] = mapped_column(String(1024), primary_key=True) + password : Mapped[str] = mapped_column(String(1024), unique=True) + +class Server(HololinkedHostTableBase, MappedAsDataclass): + __tablename__ = "http_servers" + + hostname : Mapped[str] = mapped_column(String, primary_key=True) + type : Mapped[str] = mapped_column(String) + port : Mapped[int] = mapped_column(Integer) + IPAddress : Mapped[str] = mapped_column(String) + remote_objects : Mapped[typing.List[str]] = mapped_column(ARRAY(String)) + https : Mapped[bool] = mapped_column(Boolean) + qualifiedIP : Mapped[str] = field(init = False) + + def __post_init__(self): + self.qualifiedIP = '{}:{}'.format(self.hostname, self.port) + + def json(self): + return asdict(self) + +class RemoteObjectInformation(HololinkedHostTableBase, MappedAsDataclass): + __tablename__ = "remote_objects" + + instance_name : Mapped[str] = mapped_column(String, primary_key=True) + class_name : Mapped[str] = mapped_column(String) + script : Mapped[str] = mapped_column(String) + kwargs : Mapped[JSONSerializable] = mapped_column(JSON) + eventloop_instance_name : Mapped[str] = mapped_column(String) + http_server : Mapped[str] = mapped_column(String) + level : Mapped[int] = mapped_column(Integer) + level_type : Mapped[str] = mapped_column(String) + + def json(self): + return asdict(self) + + +__all__ = [ + HololinkedHostTableBase.__name__, + Dashboards.__name__, + AppSettings.__name__, + LoginCredentials.__name__, + Server.__name__, + RemoteObjectInformation.__name__, +] \ No newline at end of file diff --git a/hololinked/system_host/server.py b/hololinked/system_host/server.py new file mode 100644 index 0000000..5d9529e --- /dev/null +++ b/hololinked/system_host/server.py @@ -0,0 +1,395 @@ +import secrets +import os +import base64 +import socket +import json +import asyncio +import ssl +import typing +import getpass +from dataclasses import asdict, field +import uuid +from argon2 import PasswordHasher + +from sqlalchemy import Engine +from sqlalchemy import select, create_engine +from sqlalchemy.orm import Session, sessionmaker +from sqlalchemy.ext import asyncio as asyncio_ext +from sqlalchemy_utils import database_exists, create_database, drop_database +from tornado.web import RequestHandler, Application +from tornado.httpserver import HTTPServer as TornadoHTTP1Server + +from ..server.serializers import JSONSerializer +from ..server.database import BaseDB +from .models import * + +# /* +# We want to be able to do the following + +# 1) Add and remove server + +# 2) Each server +# - can be designated as host +# - allows DB operations specific to the client +# - only one such server can exist +# - or as normal instrument server +# - create new eventloop +# - create new device +# - create new HTTP servers +# - raw input output +# - have GUI JSON +# */ + + + +global_engine : typing.Optional[Engine] = None +global_session : typing.Optional[Session] = None + + +def for_authenticated_user(method): + def authenticated_method(self : "SystemHostHandler"): + if not self.current_user_valid: + self.set_status(403) + self.set_header("Access-Control-Allow-Origin", self.CORS) + self.finish() + return + else: + print("current user is : ", self.current_user) + return method(self) + return authenticated_method + + +class SystemHostHandler(RequestHandler): + """ + Base Request Handler for all requests directed to system host server. Implements + CORS & credential checks. + """ + + CORS : typing.List[str] + + def check_headers(self): + """ + check suitable values for headers before processing the request + """ + content_type = self.request.headers.get("Content-Type", None) + if content_type and content_type != "application/json": + self.set_status(400, "request body is not JSON.") + self.finish() + + @property + def current_user_valid(self) -> bool: + """ + check if current user is a valid user for accessing authenticated resources + """ + if self.get_signed_cookie('user', None): + return True + + def get_current_user(self) -> typing.Any: + return self.get_signed_cookie('user', None) + + def set_access_control_allow_origin(self) -> None: + """ + For credential login, access control allow origin cannot be *, + See: https://developer.mozilla.org/en-US/docs/Web/HTTP/CORS#examples_of_access_control_scenarios + """ + origin = self.request.headers.get("Origin") + if origin is not None and (origin in self.CORS or origin + '/' in self.CORS): + self.set_header("Access-Control-Allow-Origin", self.CORS) + + def set_access_control_allow_headers(self) -> None: + """ + For credential login, access control allow headers cannot be *. + See: https://developer.mozilla.org/en-US/docs/Web/HTTP/CORS#examples_of_access_control_scenarios + """ + headers = ", ".join(self.request.headers.keys()) + if self.request.headers.get("Access-Control-Request-Headers", None): + headers += ", " + self.request.headers["Access-Control-Request-Headers"] + self.set_header("Access-Control-Allow-Headers", headers) + + def set_default_headers(self) -> None: + self.set_access_control_allow_origin() + self.set_access_control_allow_headers() + self.set_header("Access-Control-Allow-Credentials", "true") + return super().set_default_headers() + + async def options(self): + self.set_status(200) + # self.set_access_control_allow_origin() + # self.set_access_control_allow_headers() + # self.set_header("Access-Control-Allow-Headers", "*") + # self.set_header("Access-Control-Allow-Origin", self.CORS) + self.set_header("Access-Control-Allow-Methods", "GET, POST, OPTIONS") + self.finish() + + +class UsersHandler(SystemHostHandler): + + async def post(self): + self.set_status(200) + self.finish() + + async def get(self): + self.set_status(200) + self.finish() + + +class LoginHandler(SystemHostHandler): + + async def post(self): + self.check_headers() + try: + body = JSONSerializer.generic_loads(self.request.body) + email = body["email"] + password = body["password"] + async with global_session() as session: + stmt = select(LoginCredentials).filter_by(email=email) + data = await session.execute(stmt) + data = data.scalars().all() # type: typing.List[LoginCredentials] + if len(data) == 0: + self.set_status(403, "authentication failed - no username found") + else: + data = data[0] # type: LoginCredentials + ph = PasswordHasher(time_cost=500) + if ph.verify(data.password, password): + self.set_status(200) + self.set_signed_cookie("user", str(uuid.uuid4()), + secure=True, samesite="strict", domain="localhost") # note - CSF can occur + # domain=".app.localhost") + except Exception as ex: + self.set_status(500, f"authentication failed - {str(ex)}") + # self.set_header("Access-Control-Allow-Origin", self.CORS) + # self.set_header("Access-Control-Allow-Credentials", "true") + self.finish() + + async def options(self): + self.set_status(200) + # self.set_header("Access-Control-Allow-Headers", "*") + # self.set_header("Access-Control-Allow-Origin", self.CORS) + # self.set_header("Access-Control-Allow-Credentials", "true") + self.set_header("Access-Control-Allow-Methods", "POST, OPTIONS") + self.finish() + + +class AppSettingsHandler(SystemHostHandler): + + @for_authenticated_user + async def post(self): + self.check_headers() + try: + value = JSONSerializer.generic_loads(self.request.body["value"]) + async with global_session() as session, session.begin(): + session.add(AppSettings( + field = field, + value = {"value" : value} + ) + ) + await session.commit() + self.set_status(200) + except Exception as ex: + self.set_status(500, str(ex)) + # self.set_header("Access-Control-Allow-Origin", self.CORS) + self.finish() + + @for_authenticated_user + async def patch(self): + self.check_headers() + try: + value = JSONSerializer.generic_loads(self.request.body) + field = value["field"] + value = value["value"] + async with global_session() as session, session.begin(): + stmt = select(AppSettings).filter_by(field = field) + data = await session.execute(stmt) + setting : AppSettings = data.scalar() + setting.value = {"value" : value} + await session.commit() + self.set_status(200) + except Exception as ex: + self.set_status(500, str(ex)) + # self.set_header("Access-Control-Allow-Origin", self.CORS) + self.finish() + + @for_authenticated_user + async def get(self): + self.check_headers() + try: + async with global_session() as session: + stmt = select(AppSettings) + data = await session.execute(stmt) + serialized_data = JSONSerializer.generic_dumps({result[AppSettings.__name__].field : result[AppSettings.__name__].value + for result in data.mappings().all()}) + self.set_status(200) + self.set_header("Content-Type", "application/json") + self.write(serialized_data) + except Exception as ex: + self.set_status(500, str(ex)) + # self.set_header("Access-Control-Allow-Origin", self.CORS) + self.finish() + + +class DashboardsHandler(SystemHostHandler): + + @for_authenticated_user + async def post(self): + self.check_headers() + try: + data = JSONSerializer.generic_loads(self.request.body) + async with global_session() as session, session.begin(): + session.add(Dashboards(**data)) + await session.commit() + self.set_status(200) + except Exception as ex: + self.set_status(500, str(ex)) + # self.set_header("Access-Control-Allow-Origin", self.CORS) + # self.set_header("Access-Control-Allow-Credentials", "true") + self.finish() + + @for_authenticated_user + async def get(self): + self.check_headers() + try: + async with global_session() as session: + stmt = select(Dashboards) + data = await session.execute(stmt) + serialized_data = JSONSerializer.generic_dumps([result[Dashboards.__name__]._json() for result + in data.mappings().all()]) + self.set_status(200) + self.set_header("Content-Type", "application/json") + self.write(serialized_data) + except Exception as ex: + self.set_status(500, str(ex)) + # self.set_header("Access-Control-Allow-Origin", self.CORS) + # self.set_header("Access-Control-Allow-Credentials", "true") + self.finish() + + +class SubscribersHandler(SystemHostHandler): + + @for_authenticated_user + async def post(self): + if self.request.headers["Content-Type"] == "application/json": + self.set_status(200) + server = SubscribedHTTPServers(**JSONSerializer.generic_loads(self.request.body)) + async with global_session() as session, session.begin(): + session.add(server) + await session.commit() + self.finish() + + @for_authenticated_user + async def get(self): + self.set_status(200) + self.set_header("Content-Type", "application/json") + async with global_session() as session: + result = select(Server) + self.write(JSONSerializer.generic_dumps(result.scalars().all())) + + +class SubscriberHandler(SystemHostHandler): + + async def get(self): + pass + + + +class MainHandler(SystemHostHandler): + + async def get(self): + self.check_headers() + self.set_status(200) + # self.set_header("Access-Control-Allow-Origin", self.CORS) + # self.set_header("Access-Control-Allow-Credentials", "true") + self.write("

I am alive!!!

") + self.finish() + + +def create_system_host(db_config_file : typing.Optional[str] = None, ssl_context : typing.Optional[ssl.SSLContext] = None, + **server_settings) -> TornadoHTTP1Server: + """ + global function for creating system hosting server using a database configuration file, SSL context & certain + server settings. Currently supports only one server per process due to usage of some global variables. + + Parameters + ---------- + + """ + URL = BaseDB.create_URL(db_config_file, database='hololinked-host', use_dialect=False) + if not database_exists(URL): + try: + create_database(URL) + sync_engine = create_engine(URL) + HololinkedHostTableBase.metadata.create_all(sync_engine) + create_tables(sync_engine) + create_credentials(sync_engine) + sync_engine.dispose() + except Exception as ex: + sync_engine.dispose() + if URL.startswith("sqlite"): + os.remove(URL.split('/')[-1]) + else: + drop_database(URL) + raise ex from None + + global global_engine, global_session + URL = BaseDB.create_URL(db_config_file, database='hololinked-host', use_dialect=True) + global_engine = asyncio_ext.create_async_engine(URL, echo=True) + global_session = sessionmaker(global_engine, expire_on_commit=True, + class_=asyncio_ext.AsyncSession) # type: ignore + + CORS = server_settings.pop("CORS", []) + if not isinstance(CORS, (str, list)): + raise TypeError("CORS should be a list of strings or a string") + if isinstance(CORS, list): + CORS = ', '.join(CORS) + SystemHostHandler.CORS = CORS + + app = Application([ + (r"/", MainHandler), + (r"/users", UsersHandler), + (r"/dashboards", DashboardsHandler), + (r"/app-settings", AppSettingsHandler), + (r"/subscribers", SubscribersHandler), + # (r"/remote-objects", RemoteObjectsHandler), + (r"/login", LoginHandler) + ], + cookie_secret=base64.b64encode(os.urandom(32)).decode('utf-8'), + **server_settings) + + return TornadoHTTP1Server(app, ssl_options=ssl_context) + + + +def create_tables(engine): + with Session(engine) as session, session.begin(): + file = open(f"{os.path.dirname(os.path.abspath(__file__))}{os.sep}assets{os.sep}default_host_settings.json", 'r') + default_settings = JSONSerializer.generic_load(file) + for name, settings in default_settings.items(): + session.add(AppSettings( + field = name, + value = settings + )) + session.commit() + + +def create_credentials(sync_engine): + """ + create name and password for a new user in a database + """ + + print("Requested primary host seems to use a new database. Give username and password (not for database server, but for client logins from hololinked-portal) : ") + email = input("email-id (not collected anywhere else excepted your own database) : ") + while True: + password = getpass.getpass("password : ") + password_confirm = getpass.getpass("repeat-password : ") + if password != password_confirm: + print("password & repeat password not the same. Try again.") + continue + with Session(sync_engine) as session, session.begin(): + ph = PasswordHasher(time_cost=500) + session.add(LoginCredentials(email=email, password=ph.hash(password))) + session.commit() + return + raise RuntimeError("password not created, aborting database creation.") + + + +__all__ = ['create_system_host'] \ No newline at end of file From 530ae9dadf160b0e27c20e1dea96f3d9da3ba78a Mon Sep 17 00:00:00 2001 From: "Vignesh.Vaidyanathan" <62492557+VigneshVSV@users.noreply.github.com> Date: Sun, 3 Mar 2024 11:36:26 +0100 Subject: [PATCH 048/167] bug fixes & updates --- hololinked/server/__init__.py | 1 - hololinked/server/api_platform_utils.py | 2 +- hololinked/server/handlers.py | 5 +- hololinked/server/host_utilities.py | 399 ------------------------ hololinked/server/utils.py | 2 +- 5 files changed, 5 insertions(+), 404 deletions(-) delete mode 100644 hololinked/server/host_utilities.py diff --git a/hololinked/server/__init__.py b/hololinked/server/__init__.py index 735bb9b..9cb4887 100644 --- a/hololinked/server/__init__.py +++ b/hololinked/server/__init__.py @@ -10,6 +10,5 @@ from .remote_object import * from .eventloop import * from .HTTPServer import * -from .host_utilities import * diff --git a/hololinked/server/api_platform_utils.py b/hololinked/server/api_platform_utils.py index 816c748..da3446b 100644 --- a/hololinked/server/api_platform_utils.py +++ b/hololinked/server/api_platform_utils.py @@ -23,7 +23,7 @@ def json(self): def json_file(self, filename = 'collection.json'): with open(filename, 'w') as file: - JSONSerializer.general_dump(self.json(), file) + JSONSerializer.generic_dump(self.json(), file) @dataclass class postman_collection_info: diff --git a/hololinked/server/handlers.py b/hololinked/server/handlers.py index 883c3ec..996f606 100644 --- a/hololinked/server/handlers.py +++ b/hololinked/server/handlers.py @@ -5,9 +5,10 @@ from tornado.web import RequestHandler, StaticFileHandler, Application from tornado.iostream import StreamClosedError -from .constants import CommonInstructions +from .constants import CommonInstructions, ServerMessageData from .serializers import JSONSerializer from .zmq_message_brokers import AsyncZMQClient, MessageMappedZMQClientPool, EventConsumer +from .webserver_utils import * from .utils import current_datetime_ms_str from .data_classes import HTTPResource, ServerSentEvent @@ -243,7 +244,7 @@ async def connect_to_remote_object(self, clients : typing.List[AsyncZMQClient]): _, _, _, _, _, reply = await client.async_execute( CommonInstructions.http_resource_read(client.server_instance_name), raise_client_side_exception=True) - update_resources(resources, reply["returnValue"]) # type: ignore + resources.update(reply[ServerMessageData.RETURN_VALUE]) # _, _, _, _, _, reply = await client.read_attribute('/'+client.server_instance_name + '/object-info', raise_client_side_exception = True) # remote_object_info.append(RemoteObjectDB.RemoteObjectInfo(**reply["returnValue"])) # Should raise an exception if returnValue key is not found for some reason. diff --git a/hololinked/server/host_utilities.py b/hololinked/server/host_utilities.py deleted file mode 100644 index 2a3b605..0000000 --- a/hololinked/server/host_utilities.py +++ /dev/null @@ -1,399 +0,0 @@ -import secrets -import os -import base64 -import socket -import json -import asyncio -import ssl -import typing -import getpass -from dataclasses import asdict, field -from argon2 import PasswordHasher - -from sqlalchemy import Engine, Integer, String, JSON, ARRAY, Boolean, BLOB -from sqlalchemy import select, create_engine -from sqlalchemy.orm import Session, sessionmaker, Mapped, mapped_column, DeclarativeBase, MappedAsDataclass -from sqlalchemy.ext import asyncio as asyncio_ext -from sqlalchemy_utils import database_exists, create_database, drop_database -from tornado.web import RequestHandler, Application, authenticated -from tornado.httpserver import HTTPServer as TornadoHTTP1Server - -from .constants import JSONSerializable -from .serializers import JSONSerializer -from .database import BaseDB - -SERVER_INSTANCE_NAME = 'server-util' -CLIENT_HOST_INSTANCE_NAME = 'dashboard-util' - - -# /* -# We want to be able to do the following - -# 1) Add and remove server - -# 2) Each server -# - can be designated as host -# - allows DB operations specific to the client -# - only one such server can exist -# - or as normal instrument server -# - create new eventloop -# - create new device -# - create new HTTP servers -# - raw input output -# - have GUI JSON -# */ - - - -global_engine : typing.Optional[Engine] = None -global_session : typing.Optional[Session] = None - - - -class HololinkedHostTableBase(DeclarativeBase): - pass - -class Dashboards(HololinkedHostTableBase, MappedAsDataclass): - __tablename__ = "dashboards" - - name : Mapped[str] = mapped_column(String(1024), primary_key=True) - URL : Mapped[str] = mapped_column(String(1024), unique=True) - description : Mapped[str] = mapped_column(String(16384)) - json_specfication : Mapped[typing.Dict[str, typing.Any]] = mapped_column(JSON) - - def json(self): - return asdict(self) - -class AppSettings(HololinkedHostTableBase, MappedAsDataclass): - __tablename__ = "appsettings" - - field : Mapped[str] = mapped_column(String(8192), primary_key=True) - value : Mapped[typing.Dict[str, typing.Any]] = mapped_column(JSON) - - def json(self): - return asdict(self) - -class LoginCredentials(HololinkedHostTableBase, MappedAsDataclass): - __tablename__ = "login_credentials" - - email : Mapped[str] = mapped_column(String(1024), primary_key=True) - password : Mapped[str] = mapped_column(String(1024), unique=True) - -class Server(HololinkedHostTableBase, MappedAsDataclass): - __tablename__ = "http_servers" - - hostname : Mapped[str] = mapped_column(String, primary_key=True) - type : Mapped[str] = mapped_column(String) - port : Mapped[int] = mapped_column(Integer) - IPAddress : Mapped[str] = mapped_column(String) - remote_objects : Mapped[typing.List[str]] = mapped_column(ARRAY(String)) - https : Mapped[bool] = mapped_column(Boolean) - qualifiedIP : Mapped[str] = field(init = False) - - def __post_init__(self): - self.qualifiedIP = '{}:{}'.format(self.hostname, self.port) - - def json(self): - return asdict(self) - -class RemoteObjectInformation(HololinkedHostTableBase, MappedAsDataclass): - __tablename__ = "remote_objects" - - instance_name : Mapped[str] = mapped_column(String, primary_key=True) - class_name : Mapped[str] = mapped_column(String) - script : Mapped[str] = mapped_column(String) - kwargs : Mapped[JSONSerializable] = mapped_column(JSON) - eventloop_instance_name : Mapped[str] = mapped_column(String) - http_server : Mapped[str] = mapped_column(String) - level : Mapped[int] = mapped_column(Integer) - level_type : Mapped[str] = mapped_column(String) - - def json(self): - return asdict(self) - - -def for_authenticated_user(method): - def authenticated_method(self : RequestHandler): - if not self.current_user: - self.set_status(403) - self.set_header("Access-Control-Allow-Origin", "https://localhost:5173") - self.finish() - return - else: - print("current user is : ", self.current_user) - return method(self) - return authenticated_method - - -class SystemHostHandler(RequestHandler): - - def check_headers(self): - content_type = self.request.headers.get("Content-Type", None) - if content_type and content_type != "application/json": - self.set_status(500) - self.write({ "error" : "request body is not JSON." }) - self.finish() - - def get_current_user(self) -> typing.Any: - return True - return self.get_signed_cookie('user') - - def set_default_headers(self) -> None: - return super().set_default_headers() - - async def options(self): - self.set_status(200) - self.set_header("Access-Control-Allow-Origin", "https://localhost:5173") - self.set_header("Access-Control-Allow-Methods", "GET, POST, OPTIONS") - self.set_header("Access-Control-Allow-Headers", "*") - self.finish() - - -class UsersHandler(SystemHostHandler): - - async def post(self): - self.set_status(200) - self.finish() - - async def get(self): - self.set_status(200) - self.finish() - - -class LoginHandler(SystemHostHandler): - - async def post(self): - self.check_headers() - try: - body = JSONSerializer.generic_loads(self.request.body) - email = body["email"] - password = body["password"] - async with global_session() as session: - stmt = select(LoginCredentials).filter_by(email=email) - data = await session.execute(stmt) - data : LoginCredentials = data.scalars().all() - if len(data) == 0: - self.set_status(403, "authentication failed") - self.write({"reason" : "no username found"}) - else: - ph = PasswordHasher(time_cost=500) - if ph.verify(data[0].password, password): - self.set_status(200) - self.set_signed_cookie("user", email) - else: - self.set_status(403, "authentication failed") - self.write({"reason" : ""}) - except Exception as ex: - self.set_status(500, str(ex)) - self.set_header("Access-Control-Allow-Origin", "https://localhost:5173") - self.finish() - - async def options(self): - self.set_status(200) - self.set_header("Access-Control-Allow-Origin", "https://localhost:5173") - self.set_header("Access-Control-Allow-Methods", "POST, OPTIONS") - self.set_header("Access-Control-Allow-Headers", "*") - self.set_header("Access-Control-Allow-Credentials", True) - self.finish() - - -class AppSettingsHandler(SystemHostHandler): - - @for_authenticated_user - async def post(self): - self.check_headers() - try: - value = JSONSerializer.generic_loads(self.request.body["value"]) - async with global_session() as session, session.begin(): - session.add(AppSettings( - field = field, - value = {"value" : value} - ) - ) - await session.commit() - self.set_status(200) - except Exception as ex: - self.set_status(500) - self.finish() - - @for_authenticated_user - async def patch(self): - self.check_headers() - try: - value = JSONSerializer.generic_loads(self.request.body) - field = value["field"] - value = value["value"] - async with global_session() as session, session.begin(): - stmt = select(AppSettings).filter_by(field = field) - data = await session.execute(stmt) - setting : AppSettings = data.scalar() - setting.value = {"value" : value} - await session.commit() - self.set_status(200) - except Exception as ex: - self.set_status(500) - self.finish() - - @for_authenticated_user - async def get(self): - self.check_headers() - try: - async with global_session() as session: - stmt = select(AppSettings) - data = await session.execute(stmt) - serialized_data = JSONSerializer.generic_dumps({result[AppSettings.__name__].field : result[AppSettings.__name__].value["value"] - for result in data.mappings().all()}) - self.set_status(200) - self.set_header("Content-Type", "application/json") - self.write(serialized_data) - except Exception as ex: - self.set_status(500, str(ex)) - self.finish() - - -class DashboardsHandler(SystemHostHandler): - - @for_authenticated_user - async def post(self): - self.check_headers() - try: - data = JSONSerializer.generic_loads(self.request.body) - async with global_session() as session, session.begin(): - session.add(Dashboards(**data)) - await session.commit() - self.set_status(200) - self.set_header("Access-Control-Allow-Origin", "https://localhost:5173") - except Exception as ex: - self.set_status(500, str(ex)) - self.finish() - - @for_authenticated_user - async def get(self): - self.check_headers() - try: - async with global_session() as session: - stmt = select(Dashboards) - data = await session.execute(stmt) - serialized_data = JSONSerializer.generic_dumps([result[Dashboards.__name__]._json() for result - in data.mappings().all()]) - self.set_status(200) - self.set_header("Content-Type", "application/json") - self.set_header("Access-Control-Allow-Origin", "https://localhost:5173") - self.write(serialized_data) - except Exception as ex: - self.set_status(500, str(ex)) - self.finish() - - -class SubscribersHandler(SystemHostHandler): - - @for_authenticated_user - async def post(self): - if self.request.headers["Content-Type"] == "application/json": - self.set_status(200) - server = SubscribedHTTPServers(**JSONSerializer.generic_loads(self.request.body)) - async with global_session() as session, session.begin(): - session.add(server) - await session.commit() - self.finish() - - @for_authenticated_user - async def get(self): - self.set_status(200) - self.set_header("Content-Type", "application/json") - async with global_session() as session: - result = select(Server) - self.write(JSONSerializer.generic_dumps(result.scalars().all())) - - -class SubscriberHandler(SystemHostHandler): - - async def get(self): - pass - - - -class MainHandler(SystemHostHandler): - - async def get(self): - self.check_headers() - self.set_status(200) - self.set_header("Access-Control-Allow-Origin", "https://localhost:5173") - self.write("

I am alive!!!

") - self.finish() - - -def create_system_host(config_file : typing.Optional[str] = None, ssl_context : typing.Optional[ssl.SSLContext] = None, - **server_settings) -> TornadoHTTP1Server: - URL = BaseDB.create_URL(config_file, database='hololinked-host', use_dialect=False) - if not database_exists(URL): - try: - create_database(URL) - sync_engine = create_engine(URL) - HololinkedHostTableBase.metadata.create_all(sync_engine) - create_tables(sync_engine) - create_credentials(sync_engine) - sync_engine.dispose() - except Exception as ex: - sync_engine.dispose() - if URL.startswith("sqlite"): - os.remove(URL.split('/')[-1]) - else: - drop_database(URL) - raise ex from None - URL = BaseDB.create_URL(config_file, database='hololinked-host', use_dialect=True) - global global_engine, global_session - global_engine = asyncio_ext.create_async_engine(URL, echo=True) - global_session = sessionmaker(global_engine, expire_on_commit=True, - class_=asyncio_ext.AsyncSession) # type: ignore - - app = Application([ - (r"/", MainHandler), - (r"/users", UsersHandler), - (r"/dashboards", DashboardsHandler), - (r"/settings", AppSettingsHandler), - (r"/subscribers", SubscribersHandler), - # (r"/remote-objects", RemoteObjectsHandler), - (r"/login", LoginHandler) - ], cookie_secret=base64.b64encode(os.urandom(32)).decode('utf-8'), - **server_settings) - - return TornadoHTTP1Server(app, ssl_options=ssl_context) - - - -def create_tables(engine): - with Session(engine) as session, session.begin(): - file = open(f"{os.path.dirname(os.path.abspath(__file__))}{os.sep}assets{os.sep}default_host_settings.json", 'r') - default_settings = JSONSerializer.generic_load(file) - for name, settings in default_settings.items(): - session.add(AppSettings( - field = name, - value = settings - )) - session.commit() - - -def create_credentials(sync_engine): - """ - create name and password for a new user in a database - """ - - print("Requested primary host seems to use a new database. Give username and password (not for database server, but for client logins from hololinked-portal) : ") - email = input("email-id (not collected anywhere else excepted your own database) : ") - while True: - password = getpass.getpass("password : ") - password_confirm = getpass.getpass("repeat-password : ") - if password != password_confirm: - print("password & repeat password not the same. Try again.") - continue - with Session(sync_engine) as session, session.begin(): - ph = PasswordHasher(time_cost=500) - session.add(LoginCredentials(email=email, password=ph.hash(password))) - session.commit() - return - raise RuntimeError("password not created, aborting database creation.") - - - -__all__ = ['create_system_host'] \ No newline at end of file diff --git a/hololinked/server/utils.py b/hololinked/server/utils.py index 019d912..3886a41 100644 --- a/hololinked/server/utils.py +++ b/hololinked/server/utils.py @@ -22,7 +22,7 @@ def copy_parameters(src : str = 'D:/onedrive/desktop/dashboard/scada/scadapy/sca 'from .remote_parameter import RemoteParameter\n', 'from .constants import HTTP, PROXY, USE_OBJECT_NAME, GET, PUT'] - def fetch_line() -> str: + def fetch_line() -> typing.Generator[str]: with open(src, 'r') as file: oldlines = file.readlines() for line in oldlines: From b02e74a7e9211128032d62a74a1b2f45b5ce4d20 Mon Sep 17 00:00:00 2001 From: "Vignesh.Vaidyanathan" <62492557+VigneshVSV@users.noreply.github.com> Date: Sun, 3 Mar 2024 19:02:32 +0100 Subject: [PATCH 049/167] moved system handlers also to separate file --- hololinked/param/__init__.py | 2 +- hololinked/server/config.py | 1 + hololinked/server/database.py | 56 ++- hololinked/server/eventloop.py | 4 +- hololinked/server/utils.py | 2 +- .../assets/system_host_api.yml | 0 hololinked/system_host/handlers.py | 342 +++++++++++++++++ hololinked/system_host/models.py | 18 + hololinked/system_host/server.py | 359 +++--------------- 9 files changed, 450 insertions(+), 334 deletions(-) rename hololinked/{server => system_host}/assets/system_host_api.yml (100%) create mode 100644 hololinked/system_host/handlers.py diff --git a/hololinked/param/__init__.py b/hololinked/param/__init__.py index 1fff831..ea43c62 100644 --- a/hololinked/param/__init__.py +++ b/hololinked/param/__init__.py @@ -53,7 +53,7 @@ """ from . import exceptions from .parameterized import (Parameterized, ParameterizedFunction, ParamOverrides, Parameter, - depends_on, instance_descriptor, discard_events, edit_constant, ) + depends_on, instance_descriptor, discard_events, edit_constant) from .logger import get_logger, logging_level, VERBOSE diff --git a/hololinked/server/config.py b/hololinked/server/config.py index e49ff97..8441863 100644 --- a/hololinked/server/config.py +++ b/hololinked/server/config.py @@ -58,6 +58,7 @@ def reset_variables(self, use_environment : bool = False): self.TEMP_DIR = f"{tempfile.gettempdir()}{os.sep}hololinked" self.TCP_SOCKET_SEARCH_START_PORT = 60000 self.TCP_SOCKET_SEARCH_END_PORT = 65535 + self.PWD_HASHER_TIME_COST = 15 # qualname is not defined if use_environment: diff --git a/hololinked/server/database.py b/hololinked/server/database.py index 893432d..323cd14 100644 --- a/hololinked/server/database.py +++ b/hololinked/server/database.py @@ -39,36 +39,54 @@ def __init__(self, instance : Parameterized, serializer : typing.Optional[BaseSe self.instance_name = instance.instance_name self.serializer = serializer self.URL = self.create_URL(config_file) - + @classmethod - def create_URL(cls, file_name : str = None, database : typing.Optional[str] = None, - use_dialect : typing.Optional[bool] = False) -> str: - if not file_name: + def load_conf(cls, config_file : str) -> typing.Dict[str, typing.Any]: + if not config_file: conf = {} - elif file_name.endswith('.json'): - file = open(file_name, 'r') + elif config_file.endswith('.json'): + file = open(config_file, 'r') conf = JSONSerializer.generic_load(file) else: - raise ValueError("config files of extension - {} expected, given file name {}".format(["json"], file_name)) - + raise ValueError("config files of extension - {} expected, given file name {}".format(["json"], config_file)) + return conf + + @classmethod + def create_postgres_URL(cls, config_file : str = None, database : typing.Optional[str] = None, + use_dialect : typing.Optional[bool] = False) -> str: + conf = BaseDB.load_conf(config_file) server = conf.get('server', None) - if not server: - file = conf.get('file', f"{database}.db" if not database.endswith('.db') else database) - return f"sqlite+pysqlite:///{file}" - if use_dialect: - dialect = conf.get('dialect', None) - else: - dialect = None database = conf.get('database', 'hololinked') host = conf.get("host", 'localhost') port = conf.get("port", 5432) user = conf.get('user', 'postgres') password = conf.get('password', '') - if dialect: - return f"{server}+{dialect}://{user}:{password}@{host}:{port}/{database}" - else: - return f"{server}://{user}:{password}@{host}:{port}/{database}" + if use_dialect: + dialect = conf.get('dialect', None) + if dialect: + return f"{server}+{dialect}://{user}:{password}@{host}:{port}/{database}" + return f"{server}://{user}:{password}@{host}:{port}/{database}" + @classmethod + def create_sqlite_URL(cls, database : typing.Optional[str] = None, in_memory : bool = False, + config_file : typing.Optional[str] = None) -> str: + if config_file: + conf = BaseDB.load_conf(config_file) + elif not database and not in_memory: + raise ValueError("either a database name or a configuration file must be specified for sqlite") + else: + conf = {} + in_memory = conf.get('inmemory', in_memory) + dialect = conf.get('dialect', 'pysqlite') + if not in_memory: + database = conf.get('database', database) + file = conf.get('file', f"{database}.db" if not database.endswith('.db') else database) + return f"sqlite+{dialect}:///{file}" + else: + return f"sqlite+{dialect}:///:memory:" + + + class BaseAsyncDB(BaseDB): """ Base class for an async database engine, implements configuration file reader, diff --git a/hololinked/server/eventloop.py b/hololinked/server/eventloop.py index ea255fe..4aacd2b 100644 --- a/hololinked/server/eventloop.py +++ b/hololinked/server/eventloop.py @@ -6,7 +6,7 @@ import typing import threading -from .utils import unique_id, wrap_text +from .utils import uuid4_in_bytes, wrap_text from .constants import * from .remote_parameters import TypedDict from .exceptions import * @@ -102,7 +102,7 @@ def servers(self): @post('/remote-objects') def import_remote_object(self, file_name : str, object_name : str): consumer = self._import_remote_object_module(file_name, object_name) - id = unique_id() + id = uuid4_in_bytes() self.uninstantiated_remote_objects[id] = consumer return dict( id = id, diff --git a/hololinked/server/utils.py b/hololinked/server/utils.py index 3886a41..7abd09e 100644 --- a/hololinked/server/utils.py +++ b/hololinked/server/utils.py @@ -120,7 +120,7 @@ def process_current_line(line : str): file.writelines(newlines) -def unique_id() -> bytes: +def uuid4_in_bytes() -> bytes: """ uuid.uuid4() in bytes """ diff --git a/hololinked/server/assets/system_host_api.yml b/hololinked/system_host/assets/system_host_api.yml similarity index 100% rename from hololinked/server/assets/system_host_api.yml rename to hololinked/system_host/assets/system_host_api.yml diff --git a/hololinked/system_host/handlers.py b/hololinked/system_host/handlers.py new file mode 100644 index 0000000..778956a --- /dev/null +++ b/hololinked/system_host/handlers.py @@ -0,0 +1,342 @@ +import typing +import inspect +from argon2 import PasswordHasher + +from sqlalchemy import select, delete, update +from sqlalchemy.orm import Session +from sqlalchemy.ext import asyncio as asyncio_ext +from tornado.web import RequestHandler, HTTPError, authenticated + +from .models import * +from ..server.serializers import JSONSerializer +from ..server.config import global_config +from ..server.utils import uuid4_in_bytes + + + +def for_authenticated_user(method): + async def authenticated_method(self : "SystemHostHandler") -> None: + if self.current_user_valid: + return await method(self) + self.set_status(403) + self.set_custom_default_headers() + self.finish() + return + return authenticated_method + + +class SystemHostHandler(RequestHandler): + """ + Base Request Handler for all requests directed to system host server. Implements CORS & credential checks. + """ + + def initialize(self, CORS : typing.List[str], disk_session : Session, mem_session : asyncio_ext.AsyncSession) -> None: + self.CORS = CORS + self.disk_session = disk_session + self.mem_session = mem_session + + def check_headers(self): + """ + check suitable values for headers before processing the request + """ + content_type = self.request.headers.get("Content-Type", None) + if content_type and content_type != "application/json": + self.set_status(400, "request body is not JSON.") + self.finish() + + @property + def current_user_valid(self) -> bool: + """ + check if current user is a valid user for accessing authenticated resources + """ + user = self.get_signed_cookie('user', None) + if user is None: + return False + with self.mem_session() as session: + session : Session + stmt = select(UserSession).filter_by(session_key=user) + data = session.execute(stmt) + data = data.scalars().all() + if len(data) == 0: + return False + if len(data) > 1: + raise HTTPError("session ID not unique, internal logic error - contact developers (https://github.com/VigneshVSV/hololinked/issues)") + data = data[0] + if (data.session_key == user and data.origin == self.request.headers.get("Origin") and + data.user_agent == self.request.headers.get("User-Agent") and data.remote_IP == self.request.remote_ip): + return True + + def get_current_user(self) -> typing.Any: + """ + gets the current logged in user - call after ``current_user_valid`` + """ + return self.get_signed_cookie('user', None) + + def set_access_control_allow_origin(self) -> None: + """ + For credential login, access control allow origin cannot be *, + See: https://developer.mozilla.org/en-US/docs/Web/HTTP/CORS#examples_of_access_control_scenarios + """ + origin = self.request.headers.get("Origin") + if origin is not None and (origin in self.CORS or origin + '/' in self.CORS): + self.set_header("Access-Control-Allow-Origin", origin) + + def set_access_control_allow_headers(self) -> None: + """ + For credential login, access control allow headers cannot be *. + See: https://developer.mozilla.org/en-US/docs/Web/HTTP/CORS#examples_of_access_control_scenarios + """ + headers = ", ".join(self.request.headers.keys()) + if self.request.headers.get("Access-Control-Request-Headers", None): + headers += ", " + self.request.headers["Access-Control-Request-Headers"] + self.set_header("Access-Control-Allow-Headers", headers) + + def set_custom_default_headers(self) -> None: + """ + sets access control allow origin, allow headers and allow credentials + """ + self.set_access_control_allow_origin() + self.set_access_control_allow_headers() + self.set_header("Access-Control-Allow-Credentials", "true") + + async def options(self): + self.set_status(200) + self.set_header("Access-Control-Allow-Methods", "GET, POST, OPTIONS") + self.set_custom_default_headers() + self.finish() + + +class UsersHandler(SystemHostHandler): + + async def post(self): + self.set_status(200) + self.finish() + + async def get(self): + self.set_status(200) + self.finish() + + +class LoginHandler(SystemHostHandler): + """ + Performs login and supplies a signed cookie for session + """ + async def post(self): + self.check_headers() + try: + body = JSONSerializer.generic_loads(self.request.body) + email = body["email"] + password = body["password"] + async with self.disk_session() as session: + session : asyncio_ext.AsyncSession + stmt = select(LoginCredentials).filter_by(email=email) + data = await session.execute(stmt) + data = data.scalars().all() # type: typing.List[LoginCredentials] + if len(data) == 0: + self.set_status(403, "authentication failed - no username found") + else: + data = data[0] # type: LoginCredentials + ph = PasswordHasher(time_cost=global_config.PWD_HASHER_TIME_COST) + if ph.verify(data.password, password): + self.set_status(200) + cookie_value = uuid4_in_bytes() + self.set_signed_cookie("user", cookie_value, httponly=True, + secure=True, samesite="strict", domain="localhost", + expires_days=None) + with self.mem_session() as session: + session : Session + session.add(UserSession(email=email, session_key=cookie_value, + origin=self.request.headers.get("Origin"), + user_agent=self.request.headers.get("User-Agent"), + remote_IP=self.request.remote_ip + ) + ) + session.commit() + except Exception as ex: + self.set_status(500, f"authentication failed - {str(ex)}") + self.set_custom_default_headers() + self.finish() + + async def options(self): + self.set_status(200) + self.set_header("Access-Control-Allow-Methods", "POST, OPTIONS") + self.set_custom_default_headers() + self.finish() + + +class LogoutHandler(SystemHostHandler): + """ + Performs login and supplies a signed cookie for session + """ + async def post(self): + self.check_headers() + try: + if not self.current_user_valid: + self.set_status(409, "not a valid user to logout") + else: + user = self.get_current_user() + with self.mem_session() as session: + session : Session + stmt = delete(UserSession).filter_by(session_key=user) + result = session.execute(stmt) + if result.rowcount != 1: + self.set_status(500, "found user but could not logout") # never comes here + session.commit() + self.set_status(200, "logged out") + self.clear_cookie("user") + except Exception as ex: + self.set_status(500, f"logout failed - {str(ex)}") + self.set_custom_default_headers() + self.finish() + + async def options(self): + self.set_status(200) + self.set_header("Access-Control-Allow-Methods", "POST, OPTIONS") + self.set_custom_default_headers() + self.finish() + + + + +class AppSettingsHandler(SystemHostHandler): + + @for_authenticated_user + async def post(self): + self.check_headers() + try: + value = JSONSerializer.generic_loads(self.request.body["value"]) + async with self.disk_session() as session: + session : asyncio_ext.AsyncSession + session.add(AppSettings( + field = field, + value = {"value" : value} + )) + await session.commit() + self.set_status(200) + except Exception as ex: + self.set_status(500, str(ex)) + self.set_custom_default_headers() + self.finish() + + @for_authenticated_user + async def patch(self): + self.check_headers() + try: + value = JSONSerializer.generic_loads(self.request.body) + field = value["field"] + value = value["value"] + async with self.disk_session() as session, session.begin(): + stmt = select(AppSettings).filter_by(field = field) + data = await session.execute(stmt) + setting : AppSettings = data.scalar() + setting.value = {"value" : value} + await session.commit() + self.set_status(200) + except Exception as ex: + self.set_status(500, str(ex)) + self.set_custom_default_headers() + self.finish() + + @for_authenticated_user + async def get(self): + self.check_headers() + try: + async with self.disk_session() as session: + session : asyncio_ext.AsyncSession + stmt = select(AppSettings) + data = await session.execute(stmt) + serialized_data = JSONSerializer.generic_dumps({ + result[AppSettings.__name__].field : result[AppSettings.__name__].value + for result in data.mappings().all()}) + self.set_status(200) + self.set_header("Content-Type", "application/json") + self.write(serialized_data) + except Exception as ex: + self.set_status(500, str(ex)) + self.set_custom_default_headers() + self.finish() + + +class DashboardsHandler(SystemHostHandler): + + @for_authenticated_user + async def post(self): + self.check_headers() + try: + data = JSONSerializer.generic_loads(self.request.body) + async with self.disk_session() as session, session.begin(): + session.add(Dashboards(**data)) + await session.commit() + self.set_status(200) + except Exception as ex: + self.set_status(500, str(ex)) + self.set_custom_default_headers() + self.finish() + + @for_authenticated_user + async def get(self): + self.check_headers() + try: + async with self.disk_session() as session: + stmt = select(Dashboards) + data = await session.execute(stmt) + serialized_data = JSONSerializer.generic_dumps([result[Dashboards.__name__]._json() for result + in data.mappings().all()]) + self.set_status(200) + self.set_header("Content-Type", "application/json") + self.write(serialized_data) + except Exception as ex: + self.set_status(500, str(ex)) + self.set_custom_default_headers() + self.finish() + + +class SubscribersHandler(SystemHostHandler): + + @for_authenticated_user + async def post(self): + if self.request.headers["Content-Type"] == "application/json": + self.set_status(200) + server = SubscribedHTTPServers(**JSONSerializer.generic_loads(self.request.body)) + async with self.disk_session() as session, session.begin(): + session.add(server) + await session.commit() + self.finish() + + @for_authenticated_user + async def get(self): + self.set_status(200) + self.set_header("Content-Type", "application/json") + async with self.disk_session() as session: + result = select(Server) + self.write(JSONSerializer.generic_dumps(result.scalars().all())) + + +class SubscriberHandler(SystemHostHandler): + + async def get(self): + pass + + + +class MainHandler(SystemHostHandler): + + async def get(self): + self.check_headers() + self.set_status(200) + self.set_custom_default_headers() + self.write("

I am alive!!!

") + self.finish() + + + +__all__ = [ + SystemHostHandler.__name__, + UsersHandler.__name__, + AppSettingsHandler.__name__, + LoginHandler.__name__, + DashboardsHandler.__name__, + SubscribersHandler.__name__, + MainHandler.__name__, + LogoutHandler.__name__ +] \ No newline at end of file diff --git a/hololinked/system_host/models.py b/hololinked/system_host/models.py index e804606..07438c0 100644 --- a/hololinked/system_host/models.py +++ b/hololinked/system_host/models.py @@ -6,6 +6,7 @@ from ..server.constants import JSONSerializable + class HololinkedHostTableBase(DeclarativeBase): pass @@ -68,11 +69,28 @@ def json(self): return asdict(self) + +class HololinkedHostInMemoryTableBase(DeclarativeBase): + pass + +class UserSession(HololinkedHostInMemoryTableBase, MappedAsDataclass): + __tablename__ = "user_sessions" + + email : Mapped[str] = mapped_column(String) + session_key : Mapped[BLOB] = mapped_column(BLOB, primary_key=True) + origin : Mapped[str] = mapped_column(String) + user_agent : Mapped[str] = mapped_column(String) + remote_IP : Mapped[str] = mapped_column(String) + + + __all__ = [ HololinkedHostTableBase.__name__, + HololinkedHostInMemoryTableBase.__name__, Dashboards.__name__, AppSettings.__name__, LoginCredentials.__name__, Server.__name__, RemoteObjectInformation.__name__, + UserSession.__name__ ] \ No newline at end of file diff --git a/hololinked/system_host/server.py b/hololinked/system_host/server.py index 5d9529e..35cb42d 100644 --- a/hololinked/system_host/server.py +++ b/hololinked/system_host/server.py @@ -7,299 +7,20 @@ import ssl import typing import getpass -from dataclasses import asdict, field -import uuid from argon2 import PasswordHasher -from sqlalchemy import Engine -from sqlalchemy import select, create_engine +from sqlalchemy import create_engine from sqlalchemy.orm import Session, sessionmaker from sqlalchemy.ext import asyncio as asyncio_ext from sqlalchemy_utils import database_exists, create_database, drop_database -from tornado.web import RequestHandler, Application +from tornado.web import Application from tornado.httpserver import HTTPServer as TornadoHTTP1Server from ..server.serializers import JSONSerializer from ..server.database import BaseDB +from ..server.config import global_config from .models import * - -# /* -# We want to be able to do the following - -# 1) Add and remove server - -# 2) Each server -# - can be designated as host -# - allows DB operations specific to the client -# - only one such server can exist -# - or as normal instrument server -# - create new eventloop -# - create new device -# - create new HTTP servers -# - raw input output -# - have GUI JSON -# */ - - - -global_engine : typing.Optional[Engine] = None -global_session : typing.Optional[Session] = None - - -def for_authenticated_user(method): - def authenticated_method(self : "SystemHostHandler"): - if not self.current_user_valid: - self.set_status(403) - self.set_header("Access-Control-Allow-Origin", self.CORS) - self.finish() - return - else: - print("current user is : ", self.current_user) - return method(self) - return authenticated_method - - -class SystemHostHandler(RequestHandler): - """ - Base Request Handler for all requests directed to system host server. Implements - CORS & credential checks. - """ - - CORS : typing.List[str] - - def check_headers(self): - """ - check suitable values for headers before processing the request - """ - content_type = self.request.headers.get("Content-Type", None) - if content_type and content_type != "application/json": - self.set_status(400, "request body is not JSON.") - self.finish() - - @property - def current_user_valid(self) -> bool: - """ - check if current user is a valid user for accessing authenticated resources - """ - if self.get_signed_cookie('user', None): - return True - - def get_current_user(self) -> typing.Any: - return self.get_signed_cookie('user', None) - - def set_access_control_allow_origin(self) -> None: - """ - For credential login, access control allow origin cannot be *, - See: https://developer.mozilla.org/en-US/docs/Web/HTTP/CORS#examples_of_access_control_scenarios - """ - origin = self.request.headers.get("Origin") - if origin is not None and (origin in self.CORS or origin + '/' in self.CORS): - self.set_header("Access-Control-Allow-Origin", self.CORS) - - def set_access_control_allow_headers(self) -> None: - """ - For credential login, access control allow headers cannot be *. - See: https://developer.mozilla.org/en-US/docs/Web/HTTP/CORS#examples_of_access_control_scenarios - """ - headers = ", ".join(self.request.headers.keys()) - if self.request.headers.get("Access-Control-Request-Headers", None): - headers += ", " + self.request.headers["Access-Control-Request-Headers"] - self.set_header("Access-Control-Allow-Headers", headers) - - def set_default_headers(self) -> None: - self.set_access_control_allow_origin() - self.set_access_control_allow_headers() - self.set_header("Access-Control-Allow-Credentials", "true") - return super().set_default_headers() - - async def options(self): - self.set_status(200) - # self.set_access_control_allow_origin() - # self.set_access_control_allow_headers() - # self.set_header("Access-Control-Allow-Headers", "*") - # self.set_header("Access-Control-Allow-Origin", self.CORS) - self.set_header("Access-Control-Allow-Methods", "GET, POST, OPTIONS") - self.finish() - - -class UsersHandler(SystemHostHandler): - - async def post(self): - self.set_status(200) - self.finish() - - async def get(self): - self.set_status(200) - self.finish() - - -class LoginHandler(SystemHostHandler): - - async def post(self): - self.check_headers() - try: - body = JSONSerializer.generic_loads(self.request.body) - email = body["email"] - password = body["password"] - async with global_session() as session: - stmt = select(LoginCredentials).filter_by(email=email) - data = await session.execute(stmt) - data = data.scalars().all() # type: typing.List[LoginCredentials] - if len(data) == 0: - self.set_status(403, "authentication failed - no username found") - else: - data = data[0] # type: LoginCredentials - ph = PasswordHasher(time_cost=500) - if ph.verify(data.password, password): - self.set_status(200) - self.set_signed_cookie("user", str(uuid.uuid4()), - secure=True, samesite="strict", domain="localhost") # note - CSF can occur - # domain=".app.localhost") - except Exception as ex: - self.set_status(500, f"authentication failed - {str(ex)}") - # self.set_header("Access-Control-Allow-Origin", self.CORS) - # self.set_header("Access-Control-Allow-Credentials", "true") - self.finish() - - async def options(self): - self.set_status(200) - # self.set_header("Access-Control-Allow-Headers", "*") - # self.set_header("Access-Control-Allow-Origin", self.CORS) - # self.set_header("Access-Control-Allow-Credentials", "true") - self.set_header("Access-Control-Allow-Methods", "POST, OPTIONS") - self.finish() - - -class AppSettingsHandler(SystemHostHandler): - - @for_authenticated_user - async def post(self): - self.check_headers() - try: - value = JSONSerializer.generic_loads(self.request.body["value"]) - async with global_session() as session, session.begin(): - session.add(AppSettings( - field = field, - value = {"value" : value} - ) - ) - await session.commit() - self.set_status(200) - except Exception as ex: - self.set_status(500, str(ex)) - # self.set_header("Access-Control-Allow-Origin", self.CORS) - self.finish() - - @for_authenticated_user - async def patch(self): - self.check_headers() - try: - value = JSONSerializer.generic_loads(self.request.body) - field = value["field"] - value = value["value"] - async with global_session() as session, session.begin(): - stmt = select(AppSettings).filter_by(field = field) - data = await session.execute(stmt) - setting : AppSettings = data.scalar() - setting.value = {"value" : value} - await session.commit() - self.set_status(200) - except Exception as ex: - self.set_status(500, str(ex)) - # self.set_header("Access-Control-Allow-Origin", self.CORS) - self.finish() - - @for_authenticated_user - async def get(self): - self.check_headers() - try: - async with global_session() as session: - stmt = select(AppSettings) - data = await session.execute(stmt) - serialized_data = JSONSerializer.generic_dumps({result[AppSettings.__name__].field : result[AppSettings.__name__].value - for result in data.mappings().all()}) - self.set_status(200) - self.set_header("Content-Type", "application/json") - self.write(serialized_data) - except Exception as ex: - self.set_status(500, str(ex)) - # self.set_header("Access-Control-Allow-Origin", self.CORS) - self.finish() - - -class DashboardsHandler(SystemHostHandler): - - @for_authenticated_user - async def post(self): - self.check_headers() - try: - data = JSONSerializer.generic_loads(self.request.body) - async with global_session() as session, session.begin(): - session.add(Dashboards(**data)) - await session.commit() - self.set_status(200) - except Exception as ex: - self.set_status(500, str(ex)) - # self.set_header("Access-Control-Allow-Origin", self.CORS) - # self.set_header("Access-Control-Allow-Credentials", "true") - self.finish() - - @for_authenticated_user - async def get(self): - self.check_headers() - try: - async with global_session() as session: - stmt = select(Dashboards) - data = await session.execute(stmt) - serialized_data = JSONSerializer.generic_dumps([result[Dashboards.__name__]._json() for result - in data.mappings().all()]) - self.set_status(200) - self.set_header("Content-Type", "application/json") - self.write(serialized_data) - except Exception as ex: - self.set_status(500, str(ex)) - # self.set_header("Access-Control-Allow-Origin", self.CORS) - # self.set_header("Access-Control-Allow-Credentials", "true") - self.finish() - - -class SubscribersHandler(SystemHostHandler): - - @for_authenticated_user - async def post(self): - if self.request.headers["Content-Type"] == "application/json": - self.set_status(200) - server = SubscribedHTTPServers(**JSONSerializer.generic_loads(self.request.body)) - async with global_session() as session, session.begin(): - session.add(server) - await session.commit() - self.finish() - - @for_authenticated_user - async def get(self): - self.set_status(200) - self.set_header("Content-Type", "application/json") - async with global_session() as session: - result = select(Server) - self.write(JSONSerializer.generic_dumps(result.scalars().all())) - - -class SubscriberHandler(SystemHostHandler): - - async def get(self): - pass - - - -class MainHandler(SystemHostHandler): - - async def get(self): - self.check_headers() - self.set_status(200) - # self.set_header("Access-Control-Allow-Origin", self.CORS) - # self.set_header("Access-Control-Allow-Credentials", "true") - self.write("

I am alive!!!

") - self.finish() +from .handlers import * def create_system_host(db_config_file : typing.Optional[str] = None, ssl_context : typing.Optional[ssl.SSLContext] = None, @@ -312,44 +33,54 @@ def create_system_host(db_config_file : typing.Optional[str] = None, ssl_context ---------- """ - URL = BaseDB.create_URL(db_config_file, database='hololinked-host', use_dialect=False) - if not database_exists(URL): + disk_DB_URL = BaseDB.create_postgres_URL(db_config_file, database='hololinked-host', use_dialect=False) + if not database_exists(disk_DB_URL): try: - create_database(URL) - sync_engine = create_engine(URL) - HololinkedHostTableBase.metadata.create_all(sync_engine) - create_tables(sync_engine) - create_credentials(sync_engine) - sync_engine.dispose() + create_database(disk_DB_URL) + sync_disk_db_engine = create_engine(disk_DB_URL) + HololinkedHostTableBase.metadata.create_all(sync_disk_db_engine) + create_tables(sync_disk_db_engine) + create_credentials(sync_disk_db_engine) except Exception as ex: - sync_engine.dispose() - if URL.startswith("sqlite"): - os.remove(URL.split('/')[-1]) + if disk_DB_URL.startswith("sqlite"): + os.remove(disk_DB_URL.split('/')[-1]) else: - drop_database(URL) + drop_database(disk_DB_URL) raise ex from None + finally: + sync_disk_db_engine.dispose() + + disk_DB_URL = BaseDB.create_postgres_URL(db_config_file, database='hololinked-host', use_dialect=True) + disk_engine = asyncio_ext.create_async_engine(disk_DB_URL, echo=True) + disk_session = sessionmaker(disk_engine, expire_on_commit=True, + class_=asyncio_ext.AsyncSession) # type: asyncio_ext.AsyncSession - global global_engine, global_session - URL = BaseDB.create_URL(db_config_file, database='hololinked-host', use_dialect=True) - global_engine = asyncio_ext.create_async_engine(URL, echo=True) - global_session = sessionmaker(global_engine, expire_on_commit=True, - class_=asyncio_ext.AsyncSession) # type: ignore + mem_DB_URL = BaseDB.create_sqlite_URL(in_memory=True) + mem_engine = create_engine(mem_DB_URL, echo=True) + mem_session = sessionmaker(mem_engine, expire_on_commit=True, + class_=Session) # type: Session + HololinkedHostInMemoryTableBase.metadata.create_all(mem_engine) CORS = server_settings.pop("CORS", []) if not isinstance(CORS, (str, list)): raise TypeError("CORS should be a list of strings or a string") - if isinstance(CORS, list): - CORS = ', '.join(CORS) - SystemHostHandler.CORS = CORS + if isinstance(CORS, str): + CORS = [CORS] + kwargs = dict( + CORS=CORS, + disk_session=disk_session, + mem_session=mem_session + ) app = Application([ - (r"/", MainHandler), - (r"/users", UsersHandler), - (r"/dashboards", DashboardsHandler), - (r"/app-settings", AppSettingsHandler), - (r"/subscribers", SubscribersHandler), + (r"/", MainHandler, kwargs), + (r"/users", UsersHandler, kwargs), + (r"/dashboards", DashboardsHandler, kwargs), + (r"/app-settings", AppSettingsHandler, kwargs), + (r"/subscribers", SubscribersHandler, kwargs), # (r"/remote-objects", RemoteObjectsHandler), - (r"/login", LoginHandler) + (r"/login", LoginHandler, kwargs), + (r"/logout", LogoutHandler, kwargs) ], cookie_secret=base64.b64encode(os.urandom(32)).decode('utf-8'), **server_settings) @@ -367,7 +98,7 @@ def create_tables(engine): field = name, value = settings )) - session.commit() + session.commit() def create_credentials(sync_engine): @@ -384,12 +115,18 @@ def create_credentials(sync_engine): print("password & repeat password not the same. Try again.") continue with Session(sync_engine) as session, session.begin(): - ph = PasswordHasher(time_cost=500) + ph = PasswordHasher(time_cost=global_config.PWD_HASHER_TIME_COST) session.add(LoginCredentials(email=email, password=ph.hash(password))) session.commit() return raise RuntimeError("password not created, aborting database creation.") +def delete_database(db_config_file): + # config_file = str(Path(os.path.dirname(__file__)).parent) + "\\assets\\db_config.json" + URL = BaseDB.create_URL(db_config_file, database="hololinked-host", use_dialect=False) + drop_database(URL) + + __all__ = ['create_system_host'] \ No newline at end of file From 9c7431a373887417d68e1b471893b3d3b1a70ff7 Mon Sep 17 00:00:00 2001 From: "Vignesh.Vaidyanathan" <62492557+VigneshVSV@users.noreply.github.com> Date: Sun, 3 Mar 2024 19:03:01 +0100 Subject: [PATCH 050/167] updated argon2-cffi in requirements.txt --- doc/source/autodoc/server/system_host/index.rst | 2 +- doc/source/index.rst | 2 +- doc/source/installation.rst | 6 ++++++ doc/source/requirements.txt | 2 +- requirements.txt | 2 +- 5 files changed, 10 insertions(+), 4 deletions(-) diff --git a/doc/source/autodoc/server/system_host/index.rst b/doc/source/autodoc/server/system_host/index.rst index cf5e156..e651655 100644 --- a/doc/source/autodoc/server/system_host/index.rst +++ b/doc/source/autodoc/server/system_host/index.rst @@ -3,7 +3,7 @@ SystemHost .. autofunction:: hololinked.system_host.create_system_host -.. autoclass:: hololinked.system_host.server.SystemHostHandler +.. autoclass:: hololinked.system_host.handlers.SystemHostHandler :members: :show-inheritance: diff --git a/doc/source/index.rst b/doc/source/index.rst index 4e1bc57..750af12 100644 --- a/doc/source/index.rst +++ b/doc/source/index.rst @@ -18,8 +18,8 @@ separate the concerns of GUI & device or integrate with web-browser for a modern based tools. |module-highlighted| is being developed with the following features in mind: * being truly pythonic - all code in python & all features of python -* easy to understand & setup * reasonable integration with HTTP to take advantage of modern web practices and Javascript GUI frameworks like React +* easy to understand & setup * agnostic to system size & flexibility in topology * 30FPS 1280*1080*3 (8 bit) image streaming over HTTP diff --git a/doc/source/installation.rst b/doc/source/installation.rst index 51425c7..0ef4365 100644 --- a/doc/source/installation.rst +++ b/doc/source/installation.rst @@ -42,3 +42,9 @@ To build & host docs locally, in top directory: make clean make html python -m http.server --directory build\html + +To open the docs in the default browser, one can also issue the following instead of starting a python server + +.. code:: shell + + make host-doc \ No newline at end of file diff --git a/doc/source/requirements.txt b/doc/source/requirements.txt index 60eb455..84ef915 100644 --- a/doc/source/requirements.txt +++ b/doc/source/requirements.txt @@ -9,7 +9,7 @@ sphinxcontrib-serializinghtml==1.1.9 pydata-sphinx-theme==0.14.3 numpydoc==1.6.0 sphinx-toolbox==3.5.0 -argon2==0.1.10 +argon2-cffi==0.1.10 ConfigParser==6.0.0 ifaddr==0.2.0 ipython==8.21.0 diff --git a/requirements.txt b/requirements.txt index af2d9ee..7178cf3 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,4 +1,4 @@ -argon2==0.1.10 +argon2-cffi==0.1.10 ConfigParser==6.0.0 ifaddr==0.2.0 ipython==8.21.0 From 49a96a7c7238ab2a22cb8dd7296df66410a77dc2 Mon Sep 17 00:00:00 2001 From: "Vignesh.Vaidyanathan" <62492557+VigneshVSV@users.noreply.github.com> Date: Sun, 3 Mar 2024 19:09:55 +0100 Subject: [PATCH 051/167] added today_fmt in conf of RST --- doc/source/conf.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/doc/source/conf.py b/doc/source/conf.py index 3592295..25b6665 100644 --- a/doc/source/conf.py +++ b/doc/source/conf.py @@ -70,4 +70,6 @@ numpydoc_show_class_members = False -autodoc_member_order = 'bysource' \ No newline at end of file +autodoc_member_order = 'bysource' + +today_fmt = '%d.%m.%Y %H:%M' \ No newline at end of file From 8ef05840a36505322b1b7354d6f42065c7f1d064 Mon Sep 17 00:00:00 2001 From: "Vignesh.Vaidyanathan" <62492557+VigneshVSV@users.noreply.github.com> Date: Sun, 3 Mar 2024 19:12:15 +0100 Subject: [PATCH 052/167] changed version number argon2-cffi --- doc/source/index.rst | 2 +- doc/source/requirements.txt | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/doc/source/index.rst b/doc/source/index.rst index 750af12..fb47cab 100644 --- a/doc/source/index.rst +++ b/doc/source/index.rst @@ -61,4 +61,4 @@ Indices and tables -last build : |today| \ No newline at end of file +last build : |today| CET \ No newline at end of file diff --git a/doc/source/requirements.txt b/doc/source/requirements.txt index 84ef915..6143016 100644 --- a/doc/source/requirements.txt +++ b/doc/source/requirements.txt @@ -9,7 +9,7 @@ sphinxcontrib-serializinghtml==1.1.9 pydata-sphinx-theme==0.14.3 numpydoc==1.6.0 sphinx-toolbox==3.5.0 -argon2-cffi==0.1.10 +argon2-cffi==23.1.0 ConfigParser==6.0.0 ifaddr==0.2.0 ipython==8.21.0 From 5d9b48936246fe306c28c57b33233d89263b097e Mon Sep 17 00:00:00 2001 From: "Vignesh.Vaidyanathan" <62492557+VigneshVSV@users.noreply.github.com> Date: Mon, 4 Mar 2024 08:07:56 +0100 Subject: [PATCH 053/167] moved swagger api docs here - sync commmit - will be later moved as git submodule --- README.md | 3 + .../assets/swagger_ui_template.html | 22 +++++ .../assets/system-host-api/index.yml | 32 +++++++ .../system-host-api/routes/app-settings.yml | 94 +++++++++++++++++++ .../assets/system-host-api/routes/login.yml | 43 +++++++++ .../assets/system-host-api/routes/users.yml | 35 +++++++ .../system-host-api/schemas/app-settings.yml | 77 +++++++++++++++ .../assets/system-host-api/schemas/users.yml | 40 ++++++++ hololinked/system_host/handlers.py | 40 ++++++-- hololinked/system_host/server.py | 4 +- requirements.txt | 2 +- setup.py | 4 +- 12 files changed, 383 insertions(+), 13 deletions(-) create mode 100644 hololinked/system_host/assets/swagger_ui_template.html create mode 100644 hololinked/system_host/assets/system-host-api/index.yml create mode 100644 hololinked/system_host/assets/system-host-api/routes/app-settings.yml create mode 100644 hololinked/system_host/assets/system-host-api/routes/login.yml create mode 100644 hololinked/system_host/assets/system-host-api/routes/users.yml create mode 100644 hololinked/system_host/assets/system-host-api/schemas/app-settings.yml create mode 100644 hololinked/system_host/assets/system-host-api/schemas/users.yml diff --git a/README.md b/README.md index 16159cb..02bb84d 100644 --- a/README.md +++ b/README.md @@ -45,5 +45,8 @@ clone the repository and install in develop mode `pip install -e .` for convenie - Database support for storing and loading parameters (based on SQLAlchemy) when object dies and restarts +[![Documentation Status](https://readthedocs.org/projects/hololinked/badge/?version=latest)](https://hololinked.readthedocs.io/en/latest/?badge=latest) + + diff --git a/hololinked/system_host/assets/swagger_ui_template.html b/hololinked/system_host/assets/swagger_ui_template.html new file mode 100644 index 0000000..8fb31ea --- /dev/null +++ b/hololinked/system_host/assets/swagger_ui_template.html @@ -0,0 +1,22 @@ + + + + Swagger UI + + + +

+ + + + diff --git a/hololinked/system_host/assets/system-host-api/index.yml b/hololinked/system_host/assets/system-host-api/index.yml new file mode 100644 index 0000000..2073281 --- /dev/null +++ b/hololinked/system_host/assets/system-host-api/index.yml @@ -0,0 +1,32 @@ +openapi: '3.0.2' +info: + title: hololinked system host server + description: hololinked system host server stores system level data using this API. System level data allow interaction with one's system as a whole - where, what RemoteObject's are running and how? This API is also consumed by the hololinked-portal app, therefore containing services related to hololinked-portal. + version: '1.0.0' +servers: + - url: https://localhost:8080 + description: recommended + - url: http://localhost:8080 +paths: + /app-settings: + $ref: "./routes/app-settings.yml#/app-settings" + /app-settings/{name}: + $ref: "./routes/app-settings.yml#/app-settings-with-name" + /users: + $ref: "./routes/users.yml#/users" + /login: + $ref: "./routes/login.yml#/login" + /logout: + $ref: "./routes/login.yml#/logout" + +components: + schemas: + app-settings: + $ref: "./schemas/app-settings.yml#/components/schemas/app-settings" + users: + $ref: "./schemas/users.yml#/components/schemas/users" + login-data: + $ref: "./schemas/users.yml#/components/schemas/login-data" + securitySchemes: + cookieAuth: + $ref: "./schemas/users.yml#/components/securitySchemes/cookieAuth" \ No newline at end of file diff --git a/hololinked/system_host/assets/system-host-api/routes/app-settings.yml b/hololinked/system_host/assets/system-host-api/routes/app-settings.yml new file mode 100644 index 0000000..285a2e3 --- /dev/null +++ b/hololinked/system_host/assets/system-host-api/routes/app-settings.yml @@ -0,0 +1,94 @@ +app-settings: + get: + summary: get all user interface related settings of hololinked-portal + tags: + - hololinked portal settings + responses: + 200: + description: JSON of settings categorized by a certain field. + content: + application/json: + schema: + $ref: "../schemas/app-settings.yml#/components/schemas/app-settings" + 401: + description: user did not login therefore cannot access this resource + patch: + summary: edit one or multiple app-settings. + description: Its possible to edit both the entire JSON or one of the subfields + tags: + - hololinked portal settings + requestBody: + required: true + content: + application/json: + schema: + $ref: "../schemas/app-settings.yml#/components/schemas/app-settings" + responses: + 204: + description: settings was updated + +app-settings-with-name: + + get: + summary: get all user interface settings of hololinked-portal by name + tags: + - hololinked portal settings + parameters: + - name: + $ref: "#/components/parameters/name" + responses: + 200: + description: JSON of settings categorized by a certain field. + content: + application/json: + schema: + oneOf: [ + $ref: "../schemas/app-settings.yml#/components/schemas/dashboards", + $ref: "../schemas/app-settings.yml#/components/schemas/login", + $ref: "../schemas/app-settings.yml#/components/schemas/servers", + $ref: "../schemas/app-settings.yml#/components/schemas/remoteObjectViewer" + ] + + put: + summary: edit all settings specified by name + tags: + - hololinked portal settings + requestBody: + required: true + content: + application/json: + schema: + oneOf: [ + $ref: "../schemas/app-settings.yml#/components/schemas/dashboards", + $ref: "../schemas/app-settings.yml#/components/schemas/login", + $ref: "../schemas/app-settings.yml#/components/schemas/servers", + $ref: "../schemas/app-settings.yml#/components/schemas/remoteObjectViewer" + ] + parameters: + - name: + $ref: "#/components/parameters/name" + responses: + 204: + description: settings was updated + + patch: + summary: edit part of settings specified by name + tags: + - hololinked portal settings + parameters: + - name: + $ref: "#/components/parameters/name" + responses: + 204: + description: settings was updated + +components: + parameters: + name: + name: name + in: path + required: true + description: The name of the setting, one of the field of app-settings schema + schema: + type: string + enum: ["dashboards", "remote-object-viewer", "login", "servers"] \ No newline at end of file diff --git a/hololinked/system_host/assets/system-host-api/routes/login.yml b/hololinked/system_host/assets/system-host-api/routes/login.yml new file mode 100644 index 0000000..34a3176 --- /dev/null +++ b/hololinked/system_host/assets/system-host-api/routes/login.yml @@ -0,0 +1,43 @@ +login: + post: + tags: + - hololinked portal users + summary: login + requestBody: + required: true + content: + application/json: + schema: + $ref: "../schemas/users.yml#/components/schemas/login-data" + responses: + 204: + description: logged in + headers: + Set-Cookie: + description: session cookie + schema: + type: string + example: user=, Domain=localhost; HttpOnly; Path=/; SameSite=strict; Secure + 404: + description: user name not found + 500: + description: error occurred during authentication along with python exception string in status text + +logout: + post: + tags: + - hololinked portal users + summary: logout + responses: + 204: + description: logged out + headers: + Set-Cookie: + description: clears the session cookie (expiration time in example is immediate) + schema: + type: string + example: user=""; expires=Sat, 04 Mar 2023 19:03:01 GMT; Path=/ + 409: + description: not a valid user to logout + 500: + description: logout failed with python exception string in status text \ No newline at end of file diff --git a/hololinked/system_host/assets/system-host-api/routes/users.yml b/hololinked/system_host/assets/system-host-api/routes/users.yml new file mode 100644 index 0000000..bb84741 --- /dev/null +++ b/hololinked/system_host/assets/system-host-api/routes/users.yml @@ -0,0 +1,35 @@ +users: + post: + tags: + - hololinked portal users + summary: add new user + requestBody: + required: true + content: + application/json: + schema: + $ref: "../schemas/users.yml#/components/schemas/users" + responses: + 204: + description: new user was added + 401: + description: user did not login therefore cannot add new user + 403: + description: user is not authorised to add new user + patch: + tags: + - hololinked portal users + summary: change password + requestBody: + required: true + content: + application/json: + schema: + $ref: "../schemas/users.yml#/components/schemas/login-data" + responses: + 204: + description: password was changed + 401: + description: user did not login therefore cannot change password + 403: + description: user is not authorised to change password \ No newline at end of file diff --git a/hololinked/system_host/assets/system-host-api/schemas/app-settings.yml b/hololinked/system_host/assets/system-host-api/schemas/app-settings.yml new file mode 100644 index 0000000..7cd647b --- /dev/null +++ b/hololinked/system_host/assets/system-host-api/schemas/app-settings.yml @@ -0,0 +1,77 @@ +openapi: '3.0.2' +info: + title: hololinked system host server + description: hololinked system host server stores system level data using this API. System level data allow interaction with one's system as a whole - where, what RemoteObject's are running and how? This API is also consumed by the hololinked-portal app, therefore containing services related to hololinked-portal. + version: '1.0.0' +paths: {} +components: + schemas: + app-settings: + type: object + properties: + dashboards: + $ref: "#/components/schemas/dashboards" + login: + $ref: "#/components/schemas/login" + servers: + $ref: "#/components/schemas/servers" + remoteObjectViewer: + $ref: "#/components/schemas/remoteObjectViewer" + dashboards: + type: object + properties: + deleteWithoutAsking: + type: boolean + showRecentlyUsed: + type: boolean + login: + type: object + properties: + footer: + type: string + default: "" + footerLink: + type: string + default: "" + displayFooter: + type: boolean + servers: + type: object + properties: + allowHTTP: + type: boolean + default: false + remoteObjectViewer: + type: object + properties: + console: + type: object + properties: + stringifyOutput: + type: boolean + default: false + defaultMaxEntries: + type: integer + default: 15 + defaultWindowSize: + type: integer + default: 500 + defaultFontSize: + type: integer + default : 16 + logViewer: + type: object + properties: + stringifyOutput: + type: boolean + default: false + defaultMaxEntries: + type: integer + default: 10 + defaultWindowSize: + type: integer + default: 1000 + defaultFontSize: + type: integer + default: 16 + \ No newline at end of file diff --git a/hololinked/system_host/assets/system-host-api/schemas/users.yml b/hololinked/system_host/assets/system-host-api/schemas/users.yml new file mode 100644 index 0000000..e891727 --- /dev/null +++ b/hololinked/system_host/assets/system-host-api/schemas/users.yml @@ -0,0 +1,40 @@ +openapi: '3.0.2' +info: + title: hololinked system host server + description: hololinked system host server stores system level data using this API. System level data allow interaction with one's system as a whole - where, what RemoteObject's are running and how? This API is also consumed by the hololinked-portal app, therefore containing services related to hololinked-portal. + version: '1.0.0' +paths: {} +components: + schemas: + users: + type: object + properties: + email: + $ref: "#/components/schemas/email" + password: + $ref: "#/components/schemas/password" + type: + $ref: "#/components/schemas/type" + login-data: + type: object + properties: + email: + $ref: "#/components/schemas/email" + password: + $ref: "#/components/schemas/password" + email: + type: string + default: "" + password: + type: string + default: "" + type: + type: string + enum: ["admin", "user"] + default: "admin" + + securitySchemes: + cookieAuth: + type: apiKey + in: cookie + name: user \ No newline at end of file diff --git a/hololinked/system_host/handlers.py b/hololinked/system_host/handlers.py index 778956a..f7217e0 100644 --- a/hololinked/system_host/handlers.py +++ b/hololinked/system_host/handlers.py @@ -1,4 +1,6 @@ +import os import typing +import yaml import inspect from argon2 import PasswordHasher @@ -28,6 +30,7 @@ async def authenticated_method(self : "SystemHostHandler") -> None: class SystemHostHandler(RequestHandler): """ Base Request Handler for all requests directed to system host server. Implements CORS & credential checks. + Use built in swagger-ui for request handler documentation for other paths. """ def initialize(self, CORS : typing.List[str], disk_session : Session, mem_session : asyncio_ext.AsyncSession) -> None: @@ -100,7 +103,7 @@ def set_custom_default_headers(self) -> None: self.set_header("Access-Control-Allow-Credentials", "true") async def options(self): - self.set_status(200) + self.set_status(204) self.set_header("Access-Control-Allow-Methods", "GET, POST, OPTIONS") self.set_custom_default_headers() self.finish() @@ -119,7 +122,7 @@ async def get(self): class LoginHandler(SystemHostHandler): """ - Performs login and supplies a signed cookie for session + performs login and supplies a signed cookie for session """ async def post(self): self.check_headers() @@ -133,12 +136,12 @@ async def post(self): data = await session.execute(stmt) data = data.scalars().all() # type: typing.List[LoginCredentials] if len(data) == 0: - self.set_status(403, "authentication failed - no username found") + self.set_status(404, "authentication failed - no username found") else: data = data[0] # type: LoginCredentials ph = PasswordHasher(time_cost=global_config.PWD_HASHER_TIME_COST) if ph.verify(data.password, password): - self.set_status(200) + self.set_status(204, "logged in") cookie_value = uuid4_in_bytes() self.set_signed_cookie("user", cookie_value, httponly=True, secure=True, samesite="strict", domain="localhost", @@ -158,7 +161,7 @@ async def post(self): self.finish() async def options(self): - self.set_status(200) + self.set_status(204) self.set_header("Access-Control-Allow-Methods", "POST, OPTIONS") self.set_custom_default_headers() self.finish() @@ -166,7 +169,7 @@ async def options(self): class LogoutHandler(SystemHostHandler): """ - Performs login and supplies a signed cookie for session + Performs logout and clears the signed cookie of session """ async def post(self): self.check_headers() @@ -182,7 +185,7 @@ async def post(self): if result.rowcount != 1: self.set_status(500, "found user but could not logout") # never comes here session.commit() - self.set_status(200, "logged out") + self.set_status(204, "logged out") self.clear_cookie("user") except Exception as ex: self.set_status(500, f"logout failed - {str(ex)}") @@ -190,7 +193,7 @@ async def post(self): self.finish() async def options(self): - self.set_status(200) + self.set_status(204) self.set_header("Access-Control-Allow-Methods", "POST, OPTIONS") self.set_custom_default_headers() self.finish() @@ -318,6 +321,23 @@ async def get(self): pass +class SwaggerHandler(SystemHostHandler): + + async def get(self): + with open(f'{os.path.dirname(os.path.abspath(__file__))}{os.sep}assets{os.sep}system-host-api{os.sep}index.yml', 'r') as file: + swagger_spec = yaml.safe_load(file) + self.set_header('Content-Type', 'application/yaml') + self.write(swagger_spec) + self.finish() + + +class SwaggerUIHandler(SystemHostHandler): + + async def get(self): + await self.render(f"{os.path.dirname(os.path.abspath(__file__))}{os.sep}assets{os.sep}swagger_ui_template.html", + swagger_spec_url="/doc") + + class MainHandler(SystemHostHandler): @@ -338,5 +358,7 @@ async def get(self): DashboardsHandler.__name__, SubscribersHandler.__name__, MainHandler.__name__, - LogoutHandler.__name__ + LogoutHandler.__name__, + SwaggerHandler.__name__, + SwaggerUIHandler.__name__ ] \ No newline at end of file diff --git a/hololinked/system_host/server.py b/hololinked/system_host/server.py index 35cb42d..5e82a9e 100644 --- a/hololinked/system_host/server.py +++ b/hololinked/system_host/server.py @@ -80,7 +80,9 @@ def create_system_host(db_config_file : typing.Optional[str] = None, ssl_context (r"/subscribers", SubscribersHandler, kwargs), # (r"/remote-objects", RemoteObjectsHandler), (r"/login", LoginHandler, kwargs), - (r"/logout", LogoutHandler, kwargs) + (r"/logout", LogoutHandler, kwargs), + (r"/doc", SwaggerHandler, kwargs), + (r"/swagger-ui", SwaggerUIHandler, kwargs) ], cookie_secret=base64.b64encode(os.urandom(32)).decode('utf-8'), **server_settings) diff --git a/requirements.txt b/requirements.txt index 7178cf3..407faad 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,4 +1,4 @@ -argon2-cffi==0.1.10 +argon2-cffi==23.1.0 ConfigParser==6.0.0 ifaddr==0.2.0 ipython==8.21.0 diff --git a/setup.py b/setup.py index 4dbdbad..0108506 100644 --- a/setup.py +++ b/setup.py @@ -1,7 +1,7 @@ import setuptools long_description=""" -A zmq-based RPC tool-kit with built-in HTTP support for instrument control/data acquisition +A ZMQ-based RPC tool-kit with built-in HTTP support for instrument control/data acquisition or controlling generic python objects. """ @@ -10,7 +10,7 @@ version="0.1.0", author="Vignesh Vaidyanathan", author_email="vignesh.vaidyanathan@physik.uni-muenchen.de", - description="A zmq-based RPC tool-kit with built-in HTTP support for instrument control/data acquisition or controlling generic python objects.", + description="A ZMQ-based RPC tool-kit with built-in HTTP support for instrument control/data acquisition or controlling generic python objects.", long_description=long_description, long_description_content_type="text/markdown", url="", From 64c9395477050922088ea32dec3232a30b99590d Mon Sep 17 00:00:00 2001 From: "Vignesh.Vaidyanathan" Date: Wed, 6 Mar 2024 20:45:00 +0100 Subject: [PATCH 054/167] hostable local swagger ui within system host --- hololinked/server/database.py | 2 +- .../assets/swagger_ui_template.html | 2 +- hololinked/system_host/handlers.py | 34 ++++++++----------- hololinked/system_host/models.py | 9 +++-- hololinked/system_host/server.py | 12 ++++--- 5 files changed, 31 insertions(+), 28 deletions(-) diff --git a/hololinked/server/database.py b/hololinked/server/database.py index 323cd14..8b23655 100644 --- a/hololinked/server/database.py +++ b/hololinked/server/database.py @@ -56,7 +56,7 @@ def create_postgres_URL(cls, config_file : str = None, database : typing.Optiona use_dialect : typing.Optional[bool] = False) -> str: conf = BaseDB.load_conf(config_file) server = conf.get('server', None) - database = conf.get('database', 'hololinked') + database = conf.get('database', database) host = conf.get("host", 'localhost') port = conf.get("port", 5432) user = conf.get('user', 'postgres') diff --git a/hololinked/system_host/assets/swagger_ui_template.html b/hololinked/system_host/assets/swagger_ui_template.html index 8fb31ea..fb7dc11 100644 --- a/hololinked/system_host/assets/swagger_ui_template.html +++ b/hololinked/system_host/assets/swagger_ui_template.html @@ -9,7 +9,7 @@