diff --git a/pyproject.toml b/pyproject.toml index 03c5690080f..4e71b4de436 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -33,6 +33,8 @@ dependencies = [ "urllib3<2", "numpy>=1.21.0,<2", "Pillow>=9.3.0", + "pypng>=0.0.20", + "psutil>=5.9.2" ] [project.optional-dependencies] @@ -82,7 +84,8 @@ name = "ansys.pyensight.core" branch = true omit = [ "*/locallauncher.py", - "*/adr.py" + "*/adr.py", + "*/omniverse*.py" ] [tool.coverage.report] diff --git a/src/ansys/pyensight/core/ensight_grpc.py b/src/ansys/pyensight/core/ensight_grpc.py index 4eeb9384408..e270615db06 100644 --- a/src/ansys/pyensight/core/ensight_grpc.py +++ b/src/ansys/pyensight/core/ensight_grpc.py @@ -1,411 +1,430 @@ -"""ensight_grpc module - -This package defines the EnSightGRPC class which provides a simpler -interface to the EnSight gRPC interface, including event streams. - -""" -import threading -from typing import Any, Callable, List, Optional, Tuple, Union -import uuid - -from ansys.api.pyensight.v0 import ensight_pb2, ensight_pb2_grpc -import grpc - - -class EnSightGRPC(object): - """Wrapper around a gRPC connection to an EnSight instance - - This class provides an asynchronous interface to the EnSight - core gRPC interface. It can handle remote event - streams, providing a much simpler interface to the EnSight - application. The default is to make a connection to an EnSight - gRPC server on port 12345 on the loopback host. - - Parameters - ---------- - host: str, optional - Hostname where there EnSight gRPC server is running. - port: int, optional - Port to make the gRPC connection to - secret_key: str, optional - Connection secret key - """ - - def __init__(self, host: str = "127.0.0.1", port: int = 12345, secret_key: str = ""): - self._host = host - self._port = port - self._channel = None - self._stub = None - self._dsg_stub = None - self._security_token = secret_key - self._session_name: str = "" - # Streaming APIs - # Event (strings) - self._event_stream = None - self._event_thread: Optional[threading.Thread] = None - self._events: List[Any] = list() - # Callback for events (self._events not used) - self._event_callback: Optional[Callable] = None - self._prefix: Optional[str] = None - - @property - def host(self) -> str: - """The gRPC server (EnSight) hostname""" - return self._host - - def port(self) -> int: - """The gRPC server (EnSight) port number""" - return self._port - - @property - def security_token(self) -> str: - """The gRPC server (EnSight) secret key - - EnSight supports a security token in either numeric (-security {int}) or - string (ENSIGHT_SECURITY_TOKEN environmental variable) form. If EnSight - is using a security token, all gRPC calls must include this token. This - call sets the token for all grPC calls made by this class. - """ - return self._security_token - - @security_token.setter - def security_token(self, name: str) -> None: - self._security_token = name - - @property - def session_name(self) -> str: - """The gRPC server session name - - EnSight gRPC calls can include the session name via 'session_name' metadata. - A client session may provide a session name via this property. - """ - return self._session_name - - @session_name.setter - def session_name(self, name: str) -> None: - self._session_name = name - - def shutdown(self, stop_ensight: bool = False, force: bool = False) -> None: - """Close down the gRPC connection - - Disconnect all connections to the gRPC server. If stop_ensight is True, send the - 'Exit' command to the EnSight gRPC server. - - Parameters - ---------- - stop_ensight: bool, optional - if True, send an 'Exit' command to the gRPC server. - force: bool, optional - if stop_ensight and force are true, stop EnSight aggressively - """ - if self.is_connected(): - # if requested, send 'Exit' - if stop_ensight: - # the gRPC ExitRequest is exactly that, a request in some - # cases the operation needs to be forced - if force: - try: - self.command("ensight.exit(0)", do_eval=False) - except IOError: - # we expect this as the exit can result in the gRPC call failing - pass - else: - if self._stub: - _ = self._stub.Exit(ensight_pb2.ExitRequest(), metadata=self._metadata()) - # clean up control objects - self._stub = None - self._dsg_stub = None - if self._channel: - self._channel.close() - self._channel = None - - def is_connected(self) -> bool: - """Check to see if the gRPC connection is live - - Returns - ------- - True if the connection is active. - """ - return self._channel is not None - - def connect(self, timeout: float = 15.0) -> None: - """Establish the gRPC connection to EnSight - - Attempt to connect to an EnSight gRPC server using the host and port - established by the constructor. Note on failure, this function just - returns, but is_connected() will return False. - - Parameters - ---------- - timeout: float - how long to wait for the connection to timeout - """ - if self.is_connected(): - return - # set up the channel - self._channel = grpc.insecure_channel( - "{}:{}".format(self._host, self._port), - options=[ - ("grpc.max_receive_message_length", -1), - ("grpc.max_send_message_length", -1), - ("grpc.testing.fixed_reconnect_backoff_ms", 1100), - ], - ) - try: - grpc.channel_ready_future(self._channel).result(timeout=timeout) - except grpc.FutureTimeoutError: - self._channel = None - return - # hook up the stub interface - self._stub = ensight_pb2_grpc.EnSightServiceStub(self._channel) - - def _metadata(self) -> List[Tuple[bytes, Union[str, bytes]]]: - """Compute the gRPC stream metadata - - Compute the list to be passed to the gRPC calls for things like security - and the session name. - - """ - ret: List[Tuple[bytes, Union[str, bytes]]] = list() - s: Union[str, bytes] - if self._security_token: - s = self._security_token - if type(s) == str: - s = s.encode("utf-8") - ret.append((b"shared_secret", s)) - if self.session_name: - s = self.session_name.encode("utf-8") - ret.append((b"session_name", s)) - return ret - - def render( - self, - width: int = 640, - height: int = 480, - aa: int = 1, - png: bool = True, - highlighting: bool = False, - ) -> bytes: - """Generate a rendering of the current EnSight scene - - Render the current scene at a specific size and using a specific number of anti-aliasing - passes. The return value can be a byte array (width*height*3) bytes or a PNG image. - - Parameters - ---------- - width: int, optional - width of the image to render - height: int, optional - height of the image to render - aa: int, optional - number of antialiasing passes to use in generating the image - png: bool, optional - if True, the return value is a PNG image bytestream. Otherwise, it is a simple - bytes object with width*height*3 values. - highlighting: bool, optional - if True, selection highlighting will be included in the image. - - Returns - ------- - bytes - bytes object representation of the rendered image - - Raises - ------ - IOError if the operation fails - """ - self.connect() - ret_type = ensight_pb2.RenderRequest.IMAGE_RAW - if png: - ret_type = ensight_pb2.RenderRequest.IMAGE_PNG - response: Any - try: - if self._stub: - response = self._stub.RenderImage( - ensight_pb2.RenderRequest( - type=ret_type, - image_width=width, - image_height=height, - image_aa_passes=aa, - include_highlighting=highlighting, - ), - metadata=self._metadata(), - ) - except Exception: - raise IOError("gRPC connection dropped") - return response.value - - def geometry(self) -> bytes: - """Return the current scene geometry in glTF format - - Package up the geometry currently being viewed in the EnSight session as - a glTF stream. Return this stream as an array of byte. Note: no - intermediate files are utilized. - - Note: currently there is a limitation of glTF files to 2GB - - Returns - ------- - bytes object representation of the glTF file - - Raises - ------ - IOError if the operation fails - """ - self.connect() - response: Any - try: - if self._stub: - response = self._stub.GetGeometry( - ensight_pb2.GeometryRequest(type=ensight_pb2.GeometryRequest.GEOMETRY_GLB), - metadata=self._metadata(), - ) - except Exception: - raise IOError("gRPC connection dropped") - return response.value - - def command(self, command_string: str, do_eval: bool = True, json: bool = False) -> Any: - """Send a Python command string to be executed in EnSight - - The string will be run or evaluated in the EnSight Python interpreter via the - EnSightService::RunPython() gRPC all. If an exception or other error occurs, this - function will throw a RuntimeError. If do_eval is False, the return value will be None, - otherwise it will be the returned string (eval() will not be performed). If json is True, - the return value will be a JSON representation of the report execution result. - - Parameters - ---------- - command_string: str - The string to execute - do_eval: bool, optional - If True, a return value will be computed and returned - json: bool, optional - If True and do_eval is True, the return value will be a JSON representation of - the evaluated value. - - Returns - ------- - Any - None, a string ready for Python eval() or a JSON string. - - Raises - ------ - RuntimeError if the operation fails. - IOError if the communication fails. - """ - self.connect() - flags = ensight_pb2.PythonRequest.EXEC_RETURN_PYTHON - response: Any - if json: - flags = ensight_pb2.PythonRequest.EXEC_RETURN_JSON - if not do_eval: - flags = ensight_pb2.PythonRequest.EXEC_NO_RESULT - try: - if self._stub: - response = self._stub.RunPython( - ensight_pb2.PythonRequest(type=flags, command=command_string), - metadata=self._metadata(), - ) - except Exception: - raise IOError("gRPC connection dropped") - if response.error < 0: - raise RuntimeError(response.value) - if flags == ensight_pb2.PythonRequest.EXEC_NO_RESULT: - return None - # This was moved externally so pre-processing could be performed - # elif flags == ensight_pb2.PythonRequest.EXEC_RETURN_PYTHON: - # return eval(response.value) - return response.value - - def prefix(self) -> str: - """Return the unique prefix for this instance. - - Some EnSight gRPC APIs require a unique prefix so that EnSight can handle - multiple, simultaneous remote connections. This method will generate a GUID-based - prefix. - - Returns - ------- - str - A unique (for this session) prefix string of the form: grpc://{uuid}/ - """ - # prefix URIs will have the format: "grpc://{uuid}/{callbackname}?enum={}&uid={}" - if self._prefix is None: - self._prefix = "grpc://" + str(uuid.uuid1()) + "/" - return self._prefix - - def event_stream_enable(self, callback: Optional[Callable] = None) -> None: - """Enable a simple gRPC-based event stream from EnSight - - This method makes a EnSightService::GetEventStream() gRPC call into EnSight, returning - an ensightservice::EventReply stream. The method creates a thread to hold this - stream open and read new events from it. The thread adds the event strings to - a list of events stored on this instance. If callback is not None, the object - will be called with the event string, otherwise they can be retrieved using get_event(). - """ - if self._event_stream is not None: - return - self._event_callback = callback - self.connect() - if self._stub: - self._event_stream = self._stub.GetEventStream( - ensight_pb2.EventStreamRequest(prefix=self.prefix()), - metadata=self._metadata(), - ) - self._event_thread = threading.Thread(target=self._poll_events) - self._event_thread.daemon = True - self._event_thread.start() - - def event_stream_is_enabled(self) -> bool: - """Check to see if the event stream is enabled - - If an event stream has been successfully established via - event_stream_enable(), then this function returns True. - - Returns - ------- - True if a ensightservice::EventReply steam is active - """ - return self._event_stream is not None - - def get_event(self) -> Optional[str]: - """Retrieve and remove the oldest ensightservice::EventReply string - - When any of the event streaming systems is enabled, Python threads will receive the - event records and store them in this instance in an ordered fashion. This method - retrieves the oldest ensightservice::EventReply string in the queue. - - Returns - ------- - None or the oldest event string in the queue. - """ - try: - return self._events.pop(0) - except IndexError: - return None - - def _put_event(self, evt: "ensight_pb2.EventReply") -> None: - """Add an event record to the event queue on this instance - - This method is used by threads to make the events they receive available to - calling applications via get_event(). - """ - if self._event_callback: - self._event_callback(evt.tag) - return - self._events.append(evt.tag) - - def _poll_events(self) -> None: - """Internal method to handle event streams - - This method is called by a Python thread to read events via the established - ensightservice::EventReply stream. - """ - try: - while self._stub is not None: - evt = self._event_stream.next() - self._put_event(evt) - except Exception: - # signal that the gRPC connection has broken - self._event_stream = None - self._event_thread = None +"""ensight_grpc module + +This package defines the EnSightGRPC class which provides a simpler +interface to the EnSight gRPC interface, including event streams. + +""" +import threading +from typing import Any, Callable, List, Optional, Tuple, Union +import uuid + +from ansys.api.pyensight.v0 import dynamic_scene_graph_pb2_grpc, ensight_pb2, ensight_pb2_grpc +import grpc + + +class EnSightGRPC(object): + """Wrapper around a gRPC connection to an EnSight instance + + This class provides an asynchronous interface to the EnSight + core gRPC interface. It can handle remote event + streams, providing a much simpler interface to the EnSight + application. The default is to make a connection to an EnSight + gRPC server on port 12345 on the loopback host. + + Parameters + ---------- + host: str, optional + Hostname where there EnSight gRPC server is running. + port: int, optional + Port to make the gRPC connection to + secret_key: str, optional + Connection secret key + """ + + def __init__(self, host: str = "127.0.0.1", port: int = 12345, secret_key: str = ""): + self._host = host + self._port = port + self._channel = None + self._stub = None + self._dsg_stub = None + self._security_token = secret_key + self._session_name: str = "" + # Streaming APIs + # Event (strings) + self._event_stream = None + self._event_thread: Optional[threading.Thread] = None + self._events: List[Any] = list() + # Callback for events (self._events not used) + self._event_callback: Optional[Callable] = None + self._prefix: Optional[str] = None + + @property + def host(self) -> str: + """The gRPC server (EnSight) hostname""" + return self._host + + def port(self) -> int: + """The gRPC server (EnSight) port number""" + return self._port + + @property + def security_token(self) -> str: + """The gRPC server (EnSight) secret key + + EnSight supports a security token in either numeric (-security {int}) or + string (ENSIGHT_SECURITY_TOKEN environmental variable) form. If EnSight + is using a security token, all gRPC calls must include this token. This + call sets the token for all grPC calls made by this class. + """ + return self._security_token + + @security_token.setter + def security_token(self, name: str) -> None: + self._security_token = name + + @property + def session_name(self) -> str: + """The gRPC server session name + + EnSight gRPC calls can include the session name via 'session_name' metadata. + A client session may provide a session name via this property. + """ + return self._session_name + + @session_name.setter + def session_name(self, name: str) -> None: + self._session_name = name + + def shutdown(self, stop_ensight: bool = False, force: bool = False) -> None: + """Close down the gRPC connection + + Disconnect all connections to the gRPC server. If stop_ensight is True, send the + 'Exit' command to the EnSight gRPC server. + + Parameters + ---------- + stop_ensight: bool, optional + if True, send an 'Exit' command to the gRPC server. + force: bool, optional + if stop_ensight and force are true, stop EnSight aggressively + """ + if self.is_connected(): + # if requested, send 'Exit' + if stop_ensight: + # the gRPC ExitRequest is exactly that, a request in some + # cases the operation needs to be forced + if force: + try: + self.command("ensight.exit(0)", do_eval=False) + except IOError: + # we expect this as the exit can result in the gRPC call failing + pass + else: + if self._stub: + _ = self._stub.Exit(ensight_pb2.ExitRequest(), metadata=self._metadata()) + # clean up control objects + self._stub = None + self._dsg_stub = None + if self._channel: + self._channel.close() + self._channel = None + + def is_connected(self) -> bool: + """Check to see if the gRPC connection is live + + Returns + ------- + True if the connection is active. + """ + return self._channel is not None + + def connect(self, timeout: float = 15.0) -> None: + """Establish the gRPC connection to EnSight + + Attempt to connect to an EnSight gRPC server using the host and port + established by the constructor. Note on failure, this function just + returns, but is_connected() will return False. + + Parameters + ---------- + timeout: float + how long to wait for the connection to timeout + """ + if self.is_connected(): + return + # set up the channel + self._channel = grpc.insecure_channel( + "{}:{}".format(self._host, self._port), + options=[ + ("grpc.max_receive_message_length", -1), + ("grpc.max_send_message_length", -1), + ("grpc.testing.fixed_reconnect_backoff_ms", 1100), + ], + ) + try: + grpc.channel_ready_future(self._channel).result(timeout=timeout) + except grpc.FutureTimeoutError: + self._channel = None + return + # hook up the stub interface + self._stub = ensight_pb2_grpc.EnSightServiceStub(self._channel) + self._dsg_stub = dynamic_scene_graph_pb2_grpc.DynamicSceneGraphServiceStub(self._channel) + + def _metadata(self) -> List[Tuple[bytes, Union[str, bytes]]]: + """Compute the gRPC stream metadata + + Compute the list to be passed to the gRPC calls for things like security + and the session name. + + """ + ret: List[Tuple[bytes, Union[str, bytes]]] = list() + s: Union[str, bytes] + if self._security_token: + s = self._security_token + if type(s) == str: + s = s.encode("utf-8") + ret.append((b"shared_secret", s)) + if self.session_name: + s = self.session_name.encode("utf-8") + ret.append((b"session_name", s)) + return ret + + def render( + self, + width: int = 640, + height: int = 480, + aa: int = 1, + png: bool = True, + highlighting: bool = False, + ) -> bytes: + """Generate a rendering of the current EnSight scene + + Render the current scene at a specific size and using a specific number of anti-aliasing + passes. The return value can be a byte array (width*height*3) bytes or a PNG image. + + Parameters + ---------- + width: int, optional + width of the image to render + height: int, optional + height of the image to render + aa: int, optional + number of antialiasing passes to use in generating the image + png: bool, optional + if True, the return value is a PNG image bytestream. Otherwise, it is a simple + bytes object with width*height*3 values. + highlighting: bool, optional + if True, selection highlighting will be included in the image. + + Returns + ------- + bytes + bytes object representation of the rendered image + + Raises + ------ + IOError if the operation fails + """ + self.connect() + ret_type = ensight_pb2.RenderRequest.IMAGE_RAW + if png: + ret_type = ensight_pb2.RenderRequest.IMAGE_PNG + response: Any + try: + if self._stub: + response = self._stub.RenderImage( + ensight_pb2.RenderRequest( + type=ret_type, + image_width=width, + image_height=height, + image_aa_passes=aa, + include_highlighting=highlighting, + ), + metadata=self._metadata(), + ) + except Exception: + raise IOError("gRPC connection dropped") + return response.value + + def geometry(self) -> bytes: + """Return the current scene geometry in glTF format + + Package up the geometry currently being viewed in the EnSight session as + a glTF stream. Return this stream as an array of byte. Note: no + intermediate files are utilized. + + Note: currently there is a limitation of glTF files to 2GB + + Returns + ------- + bytes object representation of the glTF file + + Raises + ------ + IOError if the operation fails + """ + self.connect() + response: Any + try: + if self._stub: + response = self._stub.GetGeometry( + ensight_pb2.GeometryRequest(type=ensight_pb2.GeometryRequest.GEOMETRY_GLB), + metadata=self._metadata(), + ) + except Exception: + raise IOError("gRPC connection dropped") + return response.value + + def command(self, command_string: str, do_eval: bool = True, json: bool = False) -> Any: + """Send a Python command string to be executed in EnSight + + The string will be run or evaluated in the EnSight Python interpreter via the + EnSightService::RunPython() gRPC all. If an exception or other error occurs, this + function will throw a RuntimeError. If do_eval is False, the return value will be None, + otherwise it will be the returned string (eval() will not be performed). If json is True, + the return value will be a JSON representation of the report execution result. + + Parameters + ---------- + command_string: str + The string to execute + do_eval: bool, optional + If True, a return value will be computed and returned + json: bool, optional + If True and do_eval is True, the return value will be a JSON representation of + the evaluated value. + + Returns + ------- + Any + None, a string ready for Python eval() or a JSON string. + + Raises + ------ + RuntimeError if the operation fails. + IOError if the communication fails. + """ + self.connect() + flags = ensight_pb2.PythonRequest.EXEC_RETURN_PYTHON + response: Any + if json: + flags = ensight_pb2.PythonRequest.EXEC_RETURN_JSON + if not do_eval: + flags = ensight_pb2.PythonRequest.EXEC_NO_RESULT + try: + if self._stub: + response = self._stub.RunPython( + ensight_pb2.PythonRequest(type=flags, command=command_string), + metadata=self._metadata(), + ) + except Exception: + raise IOError("gRPC connection dropped") + if response.error < 0: + raise RuntimeError(response.value) + if flags == ensight_pb2.PythonRequest.EXEC_NO_RESULT: + return None + # This was moved externally so pre-processing could be performed + # elif flags == ensight_pb2.PythonRequest.EXEC_RETURN_PYTHON: + # return eval(response.value) + return response.value + + def prefix(self) -> str: + """Return the unique prefix for this instance. + + Some EnSight gRPC APIs require a unique prefix so that EnSight can handle + multiple, simultaneous remote connections. This method will generate a GUID-based + prefix. + + Returns + ------- + str + A unique (for this session) prefix string of the form: grpc://{uuid}/ + """ + # prefix URIs will have the format: "grpc://{uuid}/{callbackname}?enum={}&uid={}" + if self._prefix is None: + self._prefix = "grpc://" + str(uuid.uuid1()) + "/" + return self._prefix + + def event_stream_enable(self, callback: Optional[Callable] = None) -> None: + """Enable a simple gRPC-based event stream from EnSight + + This method makes a EnSightService::GetEventStream() gRPC call into EnSight, returning + an ensightservice::EventReply stream. The method creates a thread to hold this + stream open and read new events from it. The thread adds the event strings to + a list of events stored on this instance. If callback is not None, the object + will be called with the event string, otherwise they can be retrieved using get_event(). + """ + if self._event_stream is not None: + return + self._event_callback = callback + self.connect() + if self._stub: + self._event_stream = self._stub.GetEventStream( + ensight_pb2.EventStreamRequest(prefix=self.prefix()), + metadata=self._metadata(), + ) + self._event_thread = threading.Thread(target=self._poll_events) + self._event_thread.daemon = True + self._event_thread.start() + + def event_stream_is_enabled(self) -> bool: + """Check to see if the event stream is enabled + + If an event stream has been successfully established via + event_stream_enable(), then this function returns True. + + Returns + ------- + True if a ensightservice::EventReply steam is active + """ + return self._event_stream is not None + + def dynamic_scene_graph_stream(self, client_cmds): + """Open up a dynamic scene graph stream + + Make a DynamicSceneGraphService::GetSceneStream() rpc call and return + a ensightservice::SceneUpdateCommand stream instance. + + Parameters + ---------- + client_cmds + iterator that produces ensightservice::SceneClientCommand objects + + Returns + ------- + ensightservice::SceneUpdateCommand stream instance + """ + self.connect() + return self._dsg_stub.GetSceneStream(client_cmds, metadata=self._metadata()) + + def get_event(self) -> Optional[str]: + """Retrieve and remove the oldest ensightservice::EventReply string + + When any of the event streaming systems is enabled, Python threads will receive the + event records and store them in this instance in an ordered fashion. This method + retrieves the oldest ensightservice::EventReply string in the queue. + + Returns + ------- + None or the oldest event string in the queue. + """ + try: + return self._events.pop(0) + except IndexError: + return None + + def _put_event(self, evt: "ensight_pb2.EventReply") -> None: + """Add an event record to the event queue on this instance + + This method is used by threads to make the events they receive available to + calling applications via get_event(). + """ + if self._event_callback: + self._event_callback(evt.tag) + return + self._events.append(evt.tag) + + def _poll_events(self) -> None: + """Internal method to handle event streams + + This method is called by a Python thread to read events via the established + ensightservice::EventReply stream. + """ + try: + while self._stub is not None: + evt = self._event_stream.next() + self._put_event(evt) + except Exception: + # signal that the gRPC connection has broken + self._event_stream = None + self._event_thread = None diff --git a/src/ansys/pyensight/core/session.py b/src/ansys/pyensight/core/session.py index 130d948972b..654489156ad 100644 --- a/src/ansys/pyensight/core/session.py +++ b/src/ansys/pyensight/core/session.py @@ -1058,8 +1058,11 @@ def _build_utils_interface(self) -> None: if _utils_dir not in sys.path: sys.path.insert(0, _utils_dir) onlyfiles = [f for f in listdir(_utils_dir) if os.path.isfile(os.path.join(_utils_dir, f))] - for _filename in onlyfiles: - _filename = os.path.join(_utils_dir, _filename) + for _basename in onlyfiles: + # skip over any files with the "_server" in their names + if "_server" in _basename: + continue + _filename = os.path.join(_utils_dir, _basename) try: # get the module and class names _name = os.path.splitext(os.path.basename(_filename))[0] diff --git a/src/ansys/pyensight/core/utils/omniverse.py b/src/ansys/pyensight/core/utils/omniverse.py new file mode 100644 index 00000000000..5cec1a22365 --- /dev/null +++ b/src/ansys/pyensight/core/utils/omniverse.py @@ -0,0 +1,199 @@ +import os +import subprocess +import sys +from typing import TYPE_CHECKING, Optional, Union + +import psutil + +if TYPE_CHECKING: + try: + import ensight + except ImportError: + from ansys.api.pyensight import ensight_api + + +class Omniverse: + """Provides the ``ensight.utils.omniverse`` interface. + + The omniverse class methods provide an interface between an EnSight session + and an Omniverse instance. + + Note + ---- + This interface is only available when using pyensight (they do not work with + the ensight Python interpreter) and the module must be used in an interpreter + that includes the Omniverse Python modules (e.g. omni and pxr). Only a single + Omniverse connection can be established within a single pyensight session. + + Parameters + ---------- + interface: + Entity that provides the ``ensight`` namespace. In the case of + EnSight Python, the ``ensight`` module is passed. In the case + of PyEnSight, ``Session.ensight`` is passed. + + Example + ------- + :: + from ansys.pyensight.core import LocalLauncher + session = LocalLauncher().start() + ov = session.ensight.utils.omniverse + ov.create_connection() + ov.update() + ov.close_connection() + + """ + + def __init__(self, interface: Union["ensight_api.ensight", "ensight"]): + self._ensight = interface + self._server_pid: Optional[int] = None + + @staticmethod + def _check_modules() -> None: + """Verify that the Python interpreter is correct + + Check for omni and pxr modules. If not present, raise an exception. + + Raises + ------ + RuntimeError if the necessary modules are missing. + + """ + try: + # Note: the EnSight embedded interpreter will not have these + import omni # noqa: F401 + import pxr # noqa: F401 + except ImportError: + raise RuntimeError( + "The module requires the omni and pxr modules to be installed." + ) from None + + def _is_running_omniverse(self) -> bool: + """Check that an Omniverse connection is active + Returns + ------- + True if the connection is active, False otherwise. + """ + if self._server_pid is None: + return False + if psutil.pid_exists(self._server_pid): + return True + self._server_pid = None + return False + + def create_connection( + self, + omniverse_path: str, + include_camera: bool = False, + normalize_geometry: bool = False, + live: bool = True, + temporal: bool = False, + debug_filename: str = "", + ) -> None: + """Ensure that an EnSight dsg -> omniverse server is running + + Connect the current EnSight session to an Omniverse server. + This is done by launching a new service that makes a dynamic scene graph + connection to the EnSight session and pushes updates to the Omniverse server. + The initial EnSight scene will be pushed after the connection is established. + + Parameters + ---------- + omniverse_path : str + The URI to the Omniverse server. It will look like this: + "omniverse://localhost/Users/test" + include_camera: bool + If True, apply the EnSight camera to the Omniverse scene. This option + should be used if the target viewer is in AR/VR mode. Defaults to False. + normalize_geometry: bool + Omniverse units are in meters. If the source dataset is not in the correct + unit system or is just too large/small, this option will remap the geometry + to a unit cube. Defaults to False. + live: bool + If True, one can call 'update()' to send updated geometry to Omniverse. + If False, the Omniverse connection will push a single update and then + disconnect. Defaults to True. + temporal: bool + If True, all EnSight timesteps will be pushed to Omniverse. Defaults to False, only + the current timestep is pushed. + debug_filename: str + If the name of a file is provided, it will be used to save logging information on + the connection between EnSight and Omniverse. + + """ + self._check_modules() + if self._is_running_omniverse(): + raise RuntimeError("An Omniverse server connection is already active.") + # Make sure the internal ui module is loaded + self._ensight._session.cmd("import enspyqtgui_int", do_eval=False) + # Get the gRPC connection details and use them to launch the service + port = self._ensight._session.grpc.port() + hostname = self._ensight._session.grpc.host + token = self._ensight._session.grpc.security_token + script_name = "omniverse_dsg_server.py" + working_dir = os.path.dirname(__file__) + cmd = [ + sys.executable, + script_name, + "--host", + hostname, + "--port", + str(port), + "--path", + omniverse_path, + ] + if live: + cmd.extend(["--live"]) + if include_camera: + cmd.extend(["--vrmode"]) + if token: + cmd.extend(["--security", token]) + if temporal: + cmd.extend(["--animation"]) + else: + cmd.extend(["--no-animation"]) + if debug_filename: + cmd.extend(["--log_file", debug_filename]) + cmd.extend(["--verbose", "1"]) + if normalize_geometry: + cmd.extend(["--normalize_geometry"]) + env_vars = os.environ.copy() + process = subprocess.Popen(cmd, close_fds=True, env=env_vars, cwd=working_dir) + self._server_pid = process.pid + + def close_connection(self) -> None: + """Shut down the open EnSight dsg -> omniverse server + + Break the connection between the EnSight instance and Omniverse. + + """ + self._check_modules() + if not self._is_running_omniverse(): + return + proc = psutil.Process(self._server_pid) + for child in proc.children(recursive=True): + if psutil.pid_exists(child.pid): + # This can be a race condition, so it is ok if the child is dead already + try: + child.kill() + except psutil.NoSuchProcess: + pass + # Same issue, this process might already be shutting down, so NoSuchProcess is ok. + try: + proc.kill() + except psutil.NoSuchProcess: + pass + self._server_pid = None + + def update(self) -> None: + """Update the geometry in Omniverse + + Push the current EnSight scene to the current Omniverse connection. + + """ + self._check_modules() + if not self._is_running_omniverse(): + raise RuntimeError("No Omniverse server connection is currently active.") + update_cmd = "dynamicscenegraph://localhost/client/update" + cmd = f'enspyqtgui_int.dynamic_scene_graph_command("{update_cmd}")' + self._ensight._session.cmd(cmd, do_eval=False) diff --git a/src/ansys/pyensight/core/utils/omniverse_dsg_server.py b/src/ansys/pyensight/core/utils/omniverse_dsg_server.py new file mode 100644 index 00000000000..903eb8e46e2 --- /dev/null +++ b/src/ansys/pyensight/core/utils/omniverse_dsg_server.py @@ -0,0 +1,1315 @@ +# +# This file borrows heavily from the Omniverse Example Connector which +# contains the following notice: +# +############################################################################### +# Copyright 2020 NVIDIA Corporation +# +# Permission is hereby granted, free of charge, to any person obtaining a copy of +# this software and associated documentation files (the "Software"), to deal in +# the Software without restriction, including without limitation the rights to +# use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of +# the Software, and to permit persons to whom the Software is furnished to do so, +# subject to the following conditions: +# +# The above copyright notice and this permission notice shall be included in all +# copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS +# FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR +# COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER +# IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. +# +############################################################################### + +import argparse +import logging +import math +import os +import queue +import shutil +import sys +import threading +from typing import Any, List, Optional + +from ansys.api.pyensight.v0 import dynamic_scene_graph_pb2 +from ansys.pyensight.core import ensight_grpc +import numpy +import omni.client +import png +from pxr import Gf, Sdf, Usd, UsdGeom, UsdLux, UsdShade + + +class OmniverseWrapper: + verbose = 0 + + @staticmethod + def logCallback(threadName: None, component: Any, level: Any, message: str) -> None: + if OmniverseWrapper.verbose: + logging.info(message) + + @staticmethod + def connectionStatusCallback( + url: Any, connectionStatus: "omni.client.ConnectionStatus" + ) -> None: + if connectionStatus is omni.client.ConnectionStatus.CONNECT_ERROR: + sys.exit("[ERROR] Failed connection, exiting.") + + def __init__( + self, + live_edit: bool = False, + path: str = "omniverse://localhost/Users/test", + verbose: int = 0, + ): + self._connectionStatusSubscription = None + self._stage = None + self._destinationPath = path + self._old_stages: list = [] + self._stagename = "dsg_scene.usd" + self._live_edit = live_edit + if self._live_edit: + self._stagename = "dsg_scene.live" + OmniverseWrapper.verbose = verbose + + omni.client.set_log_callback(OmniverseWrapper.logCallback) + if verbose > 1: + omni.client.set_log_level(omni.client.LogLevel.DEBUG) + + if not omni.client.initialize(): + sys.exit("[ERROR] Unable to initialize Omniverse client, exiting.") + + self._connectionStatusSubscription = omni.client.register_connection_status_callback( + OmniverseWrapper.connectionStatusCallback + ) + + if not self.isValidOmniUrl(self._destinationPath): + self.log("Note technically the Omniverse URL {self._destinationPath} is not valid") + + def log(self, msg: str) -> None: + if OmniverseWrapper.verbose: + logging.info(msg) + + def shutdown(self) -> None: + omni.client.live_wait_for_pending_updates() + self._connectionStatusSubscription = None + omni.client.shutdown() + + @staticmethod + def isValidOmniUrl(url: str) -> bool: + omniURL = omni.client.break_url(url) + if omniURL.scheme == "omniverse" or omniURL.scheme == "omni": + return True + return False + + def stage_url(self, name: Optional[str] = None) -> str: + if name is None: + name = self._stagename + return self._destinationPath + "/" + name + + def delete_old_stages(self) -> None: + while self._old_stages: + stage = self._old_stages.pop() + omni.client.delete(stage) + + def create_new_stage(self) -> None: + self.log(f"Creating Omniverse stage: {self.stage_url()}") + if self._stage: + self._stage.Unload() + self._stage = None + self.delete_old_stages() + self._stage = Usd.Stage.CreateNew(self.stage_url()) + self._old_stages.append(self.stage_url()) + UsdGeom.SetStageUpAxis(self._stage, UsdGeom.Tokens.y) + # in M + UsdGeom.SetStageMetersPerUnit(self._stage, 1.0) + self.log(f"Created stage: {self.stage_url()}") + + def save_stage(self) -> None: + self._stage.GetRootLayer().Save() # type:ignore + omni.client.live_process() + + # This function will add a commented checkpoint to a file on Nucleus if: + # Live mode is disabled (live checkpoints are ill-supported) + # The Nucleus server supports checkpoints + def checkpoint(self, comment: str = "") -> None: + if self._live_edit: + return + result, serverInfo = omni.client.get_server_info(self.stage_url()) + if result and serverInfo and serverInfo.checkpoints_enabled: + bForceCheckpoint = True + self.log(f"Adding checkpoint comment <{comment}> to stage <{self.stage_url()}>") + omni.client.create_checkpoint(self.stage_url(), comment, bForceCheckpoint) + + def username(self, display: bool = True) -> Optional[str]: + result, serverInfo = omni.client.get_server_info(self.stage_url()) + if serverInfo: + if display: + self.log(f"Connected username:{serverInfo.username}") + return serverInfo.username + return None + + h = 50.0 + boxVertexIndices = [ + 0, + 1, + 2, + 1, + 3, + 2, + 4, + 5, + 6, + 4, + 6, + 7, + 8, + 9, + 10, + 8, + 10, + 11, + 12, + 13, + 14, + 12, + 14, + 15, + 16, + 17, + 18, + 16, + 18, + 19, + 20, + 21, + 22, + 20, + 22, + 23, + ] + boxVertexCounts = [3] * 12 + boxNormals = [ + (0, 0, -1), + (0, 0, -1), + (0, 0, -1), + (0, 0, -1), + (0, 0, 1), + (0, 0, 1), + (0, 0, 1), + (0, 0, 1), + (0, -1, 0), + (0, -1, 0), + (0, -1, 0), + (0, -1, 0), + (1, 0, 0), + (1, 0, 0), + (1, 0, 0), + (1, 0, 0), + (0, 1, 0), + (0, 1, 0), + (0, 1, 0), + (0, 1, 0), + (-1, 0, 0), + (-1, 0, 0), + (-1, 0, 0), + (-1, 0, 0), + ] + boxPoints = [ + (h, -h, -h), + (-h, -h, -h), + (h, h, -h), + (-h, h, -h), + (h, h, h), + (-h, h, h), + (-h, -h, h), + (h, -h, h), + (h, -h, h), + (-h, -h, h), + (-h, -h, -h), + (h, -h, -h), + (h, h, h), + (h, -h, h), + (h, -h, -h), + (h, h, -h), + (-h, h, h), + (h, h, h), + (h, h, -h), + (-h, h, -h), + (-h, -h, h), + (-h, h, h), + (-h, h, -h), + (-h, -h, -h), + ] + boxUVs = [ + (0, 0), + (0, 1), + (1, 1), + (1, 0), + (0, 0), + (0, 1), + (1, 1), + (1, 0), + (0, 0), + (0, 1), + (1, 1), + (1, 0), + (0, 0), + (0, 1), + (1, 1), + (1, 0), + (0, 0), + (0, 1), + (1, 1), + (1, 0), + (0, 0), + (0, 1), + (1, 1), + (1, 0), + ] + + def createBox(self, box_number: int = 0) -> "UsdGeom.Mesh": + rootUrl = "/Root" + boxUrl = rootUrl + "/Boxes/box_%d" % box_number + xformPrim = UsdGeom.Xform.Define(self._stage, rootUrl) # noqa: F841 + # Define the defaultPrim as the /Root prim + rootPrim = self._stage.GetPrimAtPath(rootUrl) # type:ignore + self._stage.SetDefaultPrim(rootPrim) # type:ignore + boxPrim = UsdGeom.Mesh.Define(self._stage, boxUrl) + boxPrim.CreateDisplayColorAttr([(0.463, 0.725, 0.0)]) + boxPrim.CreatePointsAttr(OmniverseWrapper.boxPoints) + boxPrim.CreateNormalsAttr(OmniverseWrapper.boxNormals) + boxPrim.CreateFaceVertexCountsAttr(OmniverseWrapper.boxVertexCounts) + boxPrim.CreateFaceVertexIndicesAttr(OmniverseWrapper.boxVertexIndices) + # USD 22.08 changed the primvar API + if hasattr(boxPrim, "CreatePrimvar"): + texCoords = boxPrim.CreatePrimvar( + "st", Sdf.ValueTypeNames.TexCoord2fArray, UsdGeom.Tokens.varying + ) + else: + primvarsAPI = UsdGeom.PrimvarsAPI(boxPrim) + texCoords = primvarsAPI.CreatePrimvar( + "st", Sdf.ValueTypeNames.TexCoord2fArray, UsdGeom.Tokens.varying + ) + texCoords.Set(OmniverseWrapper.boxUVs) + texCoords.SetInterpolation("vertex") + if not boxPrim: + sys.exit("[ERROR] Failure to create box") + self.save_stage() + return boxPrim + + @staticmethod + def clean_name(name: str, id_name: Any = None) -> str: + name = name.replace(" ", "_").replace("-", "_") + name = name.replace(".", "_").replace(":", "_") + name = name.replace("[", "_").replace("]", "_") + name = name.replace("(", "_").replace(")", "_") + name = name.replace("<", "_").replace(">", "_") + if id is not None: + name = name + "_" + str(id_name) + return name + + @staticmethod + def decompose_matrix(values: Any) -> Any: + # ang_convert = 180.0/math.pi + ang_convert = 1.0 + trans_convert = 1.0 + m = Gf.Matrix4f(*values) + m = m.GetTranspose() + + s = math.sqrt(m[0][0] * m[0][0] + m[0][1] * m[0][1] + m[0][2] * m[0][2]) + # cleanup scale + m = m.RemoveScaleShear() + # r = m.ExtractRotation() + R = m.ExtractRotationMatrix() + r = [ + math.atan2(R[2][1], R[2][2]) * ang_convert, + math.atan2(-R[2][0], 1.0) * ang_convert, + math.atan2(R[1][0], R[0][0]) * ang_convert, + ] + t = m.ExtractTranslation() + t = [t[0] * trans_convert, t[1] * trans_convert, t[2] * trans_convert] + return s, r, t + + def create_dsg_mesh_block( + self, + name, + id, + parent_prim, + verts, + conn, + normals, + tcoords, + matrix=[1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0], + diffuse=[1.0, 1.0, 1.0, 1.0], + variable=None, + ): + # 1D texture map for variables https://graphics.pixar.com/usd/release/tut_simple_shading.html + # create the part usd object + partname = self.clean_name(name, id) + stage_name = "/Parts/" + partname + ".usd" + part_stage_url = self.stage_url(stage_name) + omni.client.delete(part_stage_url) + part_stage = Usd.Stage.CreateNew(part_stage_url) + self._old_stages.append(part_stage_url) + xform = UsdGeom.Xform.Define(part_stage, "/" + partname) + mesh = UsdGeom.Mesh.Define(part_stage, "/" + partname + "/Mesh") + # mesh.CreateDisplayColorAttr() + mesh.CreateDoubleSidedAttr().Set(True) + mesh.CreatePointsAttr(verts) + mesh.CreateNormalsAttr(normals) + mesh.CreateFaceVertexCountsAttr([3] * int(conn.size / 3)) + mesh.CreateFaceVertexIndicesAttr(conn) + if (tcoords is not None) and variable: + # USD 22.08 changed the primvar API + if hasattr(mesh, "CreatePrimvar"): + texCoords = mesh.CreatePrimvar( + "st", Sdf.ValueTypeNames.TexCoord2fArray, UsdGeom.Tokens.varying + ) + else: + primvarsAPI = UsdGeom.PrimvarsAPI(mesh) + texCoords = primvarsAPI.CreatePrimvar( + "st", Sdf.ValueTypeNames.TexCoord2fArray, UsdGeom.Tokens.varying + ) + texCoords.Set(tcoords) + texCoords.SetInterpolation("vertex") + # sphere = part_stage.DefinePrim('/' + partname + '/sphere', 'Sphere') + part_prim = part_stage.GetPrimAtPath("/" + partname) + part_stage.SetDefaultPrim(part_prim) + + # Currently, this will never happen, but it is a setup for rigid body transforms + # At present, the group transforms have been cooked into the vertices so this is not needed + matrixOp = xform.AddXformOp(UsdGeom.XformOp.TypeTransform, UsdGeom.XformOp.PrecisionDouble) + matrixOp.Set(Gf.Matrix4d(*matrix).GetTranspose()) + + self.create_dsg_material( + part_stage, mesh, "/" + partname, diffuse=diffuse, variable=variable + ) + part_stage.GetRootLayer().Save() + + # glue it into our stage + path = parent_prim.GetPath().AppendChild("part_ref_" + partname) + part_ref = self._stage.OverridePrim(path) + part_ref.GetReferences().AddReference("." + stage_name) + + return part_stage_url + + def create_dsg_material( + self, stage, mesh, root_name, diffuse=[1.0, 1.0, 1.0, 1.0], variable=None + ): + # https://graphics.pixar.com/usd/release/spec_usdpreviewsurface.html + material = UsdShade.Material.Define(stage, root_name + "/Material") + pbrShader = UsdShade.Shader.Define(stage, root_name + "/Material/PBRShader") + pbrShader.CreateIdAttr("UsdPreviewSurface") + pbrShader.CreateInput("roughness", Sdf.ValueTypeNames.Float).Set(1.0) + pbrShader.CreateInput("metallic", Sdf.ValueTypeNames.Float).Set(0.0) + pbrShader.CreateInput("opacity", Sdf.ValueTypeNames.Float).Set(diffuse[3]) + pbrShader.CreateInput("useSpecularWorkflow", Sdf.ValueTypeNames.Int).Set(1) + if variable: + stReader = UsdShade.Shader.Define(stage, root_name + "/Material/stReader") + stReader.CreateIdAttr("UsdPrimvarReader_float2") + diffuseTextureSampler = UsdShade.Shader.Define( + stage, root_name + "/Material/diffuseTexture" + ) + diffuseTextureSampler.CreateIdAttr("UsdUVTexture") + name = self.clean_name(variable.name) + filename = self._destinationPath + f"/Parts/Textures/palette_{name}.png" + diffuseTextureSampler.CreateInput("file", Sdf.ValueTypeNames.Asset).Set(filename) + diffuseTextureSampler.CreateInput("st", Sdf.ValueTypeNames.Float2).ConnectToSource( + stReader.ConnectableAPI(), "result" + ) + diffuseTextureSampler.CreateOutput("rgb", Sdf.ValueTypeNames.Float3) + pbrShader.CreateInput("diffuseColor", Sdf.ValueTypeNames.Color3f).ConnectToSource( + diffuseTextureSampler.ConnectableAPI(), "rgb" + ) + stInput = material.CreateInput("frame:stPrimvarName", Sdf.ValueTypeNames.Token) + stInput.Set("st") + stReader.CreateInput("varname", Sdf.ValueTypeNames.Token).ConnectToSource(stInput) + else: + scale = 1.0 + color = Gf.Vec3f(diffuse[0] * scale, diffuse[1] * scale, diffuse[2] * scale) + pbrShader.CreateInput("diffuseColor", Sdf.ValueTypeNames.Color3f).Set(color) + + material.CreateSurfaceOutput().ConnectToSource(pbrShader.ConnectableAPI(), "surface") + UsdShade.MaterialBindingAPI(mesh).Bind(material) + + return material + + def create_dsg_variable_textures(self, variables): + # make folder: scratch/Textures/{palette_*.png} + shutil.rmtree("scratch", ignore_errors=True, onerror=None) + os.makedirs("scratch/Textures", exist_ok=True) + for var in variables.values(): + data = bytearray(var.texture) + n_pixels = int(len(data) / 4) + row = [] + for i in range(n_pixels): + row.append(data[i * 4 + 0]) + row.append(data[i * 4 + 1]) + row.append(data[i * 4 + 2]) + io = png.Writer(width=n_pixels, height=2, bitdepth=8, greyscale=False) + rows = [row, row] + name = self.clean_name(var.name) + with open(f"scratch/Textures/palette_{name}.png", "wb") as fp: + io.write(fp, rows) + uriPath = self._destinationPath + "/Parts/Textures" + omni.client.delete(uriPath) + omni.client.copy("scratch/Textures", uriPath) + + def create_dsg_root(self, camera=None): + root_name = "/Root" + root_prim = UsdGeom.Xform.Define(self._stage, root_name) + # Define the defaultPrim as the /Root prim + root_prim = self._stage.GetPrimAtPath(root_name) + self._stage.SetDefaultPrim(root_prim) + + if camera is not None: + cam_name = "/Root/Cam" + cam_prim = UsdGeom.Xform.Define(self._stage, cam_name) + cam_pos = Gf.Vec3d(camera.lookfrom[0], camera.lookfrom[1], camera.lookfrom[2]) + target_pos = Gf.Vec3d(camera.lookat[0], camera.lookat[1], camera.lookat[2]) + up_vec = Gf.Vec3d(camera.upvector[0], camera.upvector[1], camera.upvector[2]) + cam_prim = self._stage.GetPrimAtPath(cam_name) + geom_cam = UsdGeom.Camera(cam_prim) + if not geom_cam: + geom_cam = UsdGeom.Camera.Define(self._stage, cam_name) + # Set camera values + # center of interest attribute unique for Kit defines the pivot for tumbling the camera + # Set as an attribute on the prim + coi_attr = cam_prim.GetAttribute("omni:kit:centerOfInterest") + if not coi_attr.IsValid(): + coi_attr = cam_prim.CreateAttribute( + "omni:kit:centerOfInterest", Sdf.ValueTypeNames.Vector3d + ) + coi_attr.Set(target_pos) + # get the camera + cam = geom_cam.GetCamera() + # LOL, not sure why is might be correct, but so far it seems to work??? + cam.focalLength = camera.fieldofview + cam.clippingRange = Gf.Range1f(0.1, 10) + look_at = Gf.Matrix4d() + look_at.SetLookAt(cam_pos, target_pos, up_vec) + trans_row = look_at.GetRow(3) + trans_row = Gf.Vec4d(-trans_row[0], -trans_row[1], -trans_row[2], trans_row[3]) + look_at.SetRow(3, trans_row) + # print(look_at) + cam.transform = look_at + + # set the updated camera + geom_cam.SetFromCamera(cam) + return root_prim + + def create_dsg_group( + self, + name: str, + parent_prim, + obj_type: Any = None, + matrix: List[float] = [ + 1.0, + 0.0, + 0.0, + 0.0, + 0.0, + 1.0, + 0.0, + 0.0, + 0.0, + 0.0, + 1.0, + 0.0, + 0.0, + 0.0, + 0.0, + 1.0, + ], + ): + path = parent_prim.GetPath().AppendChild(self.clean_name(name)) + group_prim = UsdGeom.Xform.Define(self._stage, path) + # At present, the group transforms have been cooked into the vertices so this is not needed + matrixOp = group_prim.AddXformOp( + UsdGeom.XformOp.TypeTransform, UsdGeom.XformOp.PrecisionDouble + ) + matrixOp.Set(Gf.Matrix4d(*matrix).GetTranspose()) + self.log(f"Created group:'{name}' {str(obj_type)}") + return group_prim + + def uploadMaterial(self): + uriPath = self._destinationPath + "/Materials" + omni.client.delete(uriPath) + omni.client.copy("resources/Materials", uriPath) + + def createMaterial(self, mesh): + # Create a material instance for this in USD + materialName = "Fieldstone" + newMat = UsdShade.Material.Define(self._stage, "/Root/Looks/Fieldstone") + + matPath = "/Root/Looks/Fieldstone" + + # MDL Shader + # Create the MDL shader + mdlShader = UsdShade.Shader.Define(self._stage, matPath + "/Fieldstone") + mdlShader.CreateIdAttr("mdlMaterial") + + mdlShaderModule = "./Materials/Fieldstone.mdl" + mdlShader.SetSourceAsset(mdlShaderModule, "mdl") + # mdlShader.GetPrim().CreateAttribute("info:mdl:sourceAsset:subIdentifier", + # Sdf.ValueTypeNames.Token, True).Set(materialName) + # mdlOutput = newMat.CreateSurfaceOutput("mdl") + # mdlOutput.ConnectToSource(mdlShader, "out") + mdlShader.SetSourceAssetSubIdentifier(materialName, "mdl") + shaderOutput = mdlShader.CreateOutput("out", Sdf.ValueTypeNames.Token) + shaderOutput.SetRenderType("material") + newMat.CreateSurfaceOutput("mdl").ConnectToSource(shaderOutput) + newMat.CreateDisplacementOutput("mdl").ConnectToSource(shaderOutput) + newMat.CreateVolumeOutput("mdl").ConnectToSource(shaderOutput) + + # USD Preview Surface Shaders + + # Create the "USD Primvar reader for float2" shader + primStShader = UsdShade.Shader.Define(self._stage, matPath + "/PrimST") + primStShader.CreateIdAttr("UsdPrimvarReader_float2") + primStShader.CreateOutput("result", Sdf.ValueTypeNames.Float2) + primStShader.CreateInput("varname", Sdf.ValueTypeNames.Token).Set("st") + + # Create the "Diffuse Color Tex" shader + diffuseColorShader = UsdShade.Shader.Define(self._stage, matPath + "/DiffuseColorTex") + diffuseColorShader.CreateIdAttr("UsdUVTexture") + texInput = diffuseColorShader.CreateInput("file", Sdf.ValueTypeNames.Asset) + texInput.Set("./Materials/Fieldstone/Fieldstone_BaseColor.png") + texInput.GetAttr().SetColorSpace("RGB") + diffuseColorShader.CreateInput("st", Sdf.ValueTypeNames.Float2).ConnectToSource( + primStShader.CreateOutput("result", Sdf.ValueTypeNames.Float2) + ) + diffuseColorShaderOutput = diffuseColorShader.CreateOutput("rgb", Sdf.ValueTypeNames.Float3) + + # Create the "Normal Tex" shader + normalShader = UsdShade.Shader.Define(self._stage, matPath + "/NormalTex") + normalShader.CreateIdAttr("UsdUVTexture") + normalTexInput = normalShader.CreateInput("file", Sdf.ValueTypeNames.Asset) + normalTexInput.Set("./Materials/Fieldstone/Fieldstone_N.png") + normalTexInput.GetAttr().SetColorSpace("RAW") + normalShader.CreateInput("st", Sdf.ValueTypeNames.Float2).ConnectToSource( + primStShader.CreateOutput("result", Sdf.ValueTypeNames.Float2) + ) + normalShaderOutput = normalShader.CreateOutput("rgb", Sdf.ValueTypeNames.Float3) + + # Create the USD Preview Surface shader + usdPreviewSurfaceShader = UsdShade.Shader.Define(self._stage, matPath + "/PreviewSurface") + usdPreviewSurfaceShader.CreateIdAttr("UsdPreviewSurface") + diffuseColorInput = usdPreviewSurfaceShader.CreateInput( + "diffuseColor", Sdf.ValueTypeNames.Color3f + ) + diffuseColorInput.ConnectToSource(diffuseColorShaderOutput) + normalInput = usdPreviewSurfaceShader.CreateInput("normal", Sdf.ValueTypeNames.Normal3f) + normalInput.ConnectToSource(normalShaderOutput) + + # Set the linkage between material and USD Preview surface shader + # usdPreviewSurfaceOutput = newMat.CreateSurfaceOutput() + # usdPreviewSurfaceOutput.ConnectToSource(usdPreviewSurfaceShader, "surface") + # UsdShade.MaterialBindingAPI(mesh).Bind(newMat) + + usdPreviewSurfaceShaderOutput = usdPreviewSurfaceShader.CreateOutput( + "surface", Sdf.ValueTypeNames.Token + ) + usdPreviewSurfaceShaderOutput.SetRenderType("material") + newMat.CreateSurfaceOutput().ConnectToSource(usdPreviewSurfaceShaderOutput) + + UsdShade.MaterialBindingAPI.Apply(mesh.GetPrim()).Bind(newMat) + + # self.save_stage() + + # Create a distant light in the scene. + def createDistantLight(self): + newLight = UsdLux.DistantLight.Define(self._stage, "/Root/DistantLight") + newLight.CreateAngleAttr(0.53) + newLight.CreateColorAttr(Gf.Vec3f(1.0, 1.0, 0.745)) + newLight.CreateIntensityAttr(500.0) + + # self.save_stage() + + # Create a dome light in the scene. + def createDomeLight(self, texturePath): + newLight = UsdLux.DomeLight.Define(self._stage, "/Root/DomeLight") + newLight.CreateIntensityAttr(2200.0) + newLight.CreateTextureFileAttr(texturePath) + newLight.CreateTextureFormatAttr("latlong") + + # Set rotation on domelight + xForm = newLight + rotateOp = xForm.AddXformOp(UsdGeom.XformOp.TypeRotateZYX, UsdGeom.XformOp.PrecisionFloat) + rotateOp.Set(Gf.Vec3f(270, 0, 0)) + + # self.save_stage() + + def createEmptyFolder(self, emptyFolderPath): + folder = self._destinationPath + emptyFolderPath + self.log(f"Creating new folder: {folder}") + result = omni.client.create_folder(folder) + self.log(f"Finished creating: {result.name}") + return result.name + + +class Part(object): + def __init__(self, link: "DSGOmniverseLink"): + self._link = link + self.cmd: Optional[Any] = None + self.reset() + + def reset(self, cmd: Any = None) -> None: + self.conn_tris = numpy.array([], dtype="int32") + self.conn_lines = numpy.array([], dtype="int32") + self.coords = numpy.array([], dtype="float32") + self.normals = numpy.array([], dtype="float32") + self.normals_elem = False + self.tcoords = numpy.array([], dtype="float32") + self.tcoords_var = None + self.tcoords_elem = False + self.cmd = cmd + + def update_geom(self, cmd: dynamic_scene_graph_pb2.UpdateGeom) -> None: + if cmd.payload_type == dynamic_scene_graph_pb2.UpdateGeom.COORDINATES: + if self.coords.size != cmd.total_array_size: + self.coords = numpy.resize(self.coords, cmd.total_array_size) + self.coords[cmd.chunk_offset : cmd.chunk_offset + len(cmd.flt_array)] = cmd.flt_array + elif cmd.payload_type == dynamic_scene_graph_pb2.UpdateGeom.TRIANGLES: + if self.conn_tris.size != cmd.total_array_size: + self.conn_tris = numpy.resize(self.conn_tris, cmd.total_array_size) + self.conn_tris[cmd.chunk_offset : cmd.chunk_offset + len(cmd.int_array)] = cmd.int_array + elif cmd.payload_type == dynamic_scene_graph_pb2.UpdateGeom.LINES: + if self.conn_lines.size != cmd.total_array_size: + self.conn_lines = numpy.resize(self.conn_lines, cmd.total_array_size) + self.conn_lines[ + cmd.chunk_offset : cmd.chunk_offset + len(cmd.int_array) + ] = cmd.int_array + elif (cmd.payload_type == dynamic_scene_graph_pb2.UpdateGeom.ELEM_NORMALS) or ( + cmd.payload_type == dynamic_scene_graph_pb2.UpdateGeom.NODE_NORMALS + ): + self.normals_elem = cmd.payload_type == dynamic_scene_graph_pb2.UpdateGeom.ELEM_NORMALS + if self.normals.size != cmd.total_array_size: + self.normals = numpy.resize(self.normals, cmd.total_array_size) + self.normals[cmd.chunk_offset : cmd.chunk_offset + len(cmd.flt_array)] = cmd.flt_array + elif (cmd.payload_type == dynamic_scene_graph_pb2.UpdateGeom.ELEM_VARIABLE) or ( + cmd.payload_type == dynamic_scene_graph_pb2.UpdateGeom.NODE_VARIABLE + ): + # Get the variable definition + if cmd.variable_id in self._link._variables: + self.tcoords_var = cmd.variable_id + self.tcoords_elem = ( + cmd.payload_type == dynamic_scene_graph_pb2.UpdateGeom.ELEM_VARIABLE + ) + if self.tcoords.size != cmd.total_array_size: + self.tcoords = numpy.resize(self.tcoords, cmd.total_array_size) + self.tcoords[ + cmd.chunk_offset : cmd.chunk_offset + len(cmd.flt_array) + ] = cmd.flt_array + else: + self.tcoords_var = None + + def build(self): + if self.cmd is None: + return + if self.conn_lines.size: + self._link.log( + f"Note, part '{self.cmd.name}' has lines which are not currently supported." + ) + self.cmd = None + return + verts = self.coords + if self._link._normalize_geometry and self._link._scene_bounds is not None: + midx = (self._link._scene_bounds[3] + self._link._scene_bounds[0]) * 0.5 + midy = (self._link._scene_bounds[4] + self._link._scene_bounds[1]) * 0.5 + midz = (self._link._scene_bounds[5] + self._link._scene_bounds[2]) * 0.5 + dx = self._link._scene_bounds[3] - self._link._scene_bounds[0] + dy = self._link._scene_bounds[4] - self._link._scene_bounds[1] + dz = self._link._scene_bounds[5] - self._link._scene_bounds[2] + s = dx + if dy > s: + s = dy + if dz > s: + s = dz + if s == 0: + s = 1.0 + num_verts = int(verts.size / 3) + for i in range(num_verts): + j = i * 3 + verts[j + 0] = (verts[j + 0] - midx) / s + verts[j + 1] = (verts[j + 1] - midy) / s + verts[j + 2] = (verts[j + 2] - midz) / s + + conn = self.conn_tris + normals = self.normals + tcoords = None + if self.tcoords.size: + tcoords = self.tcoords + if self.tcoords_elem or self.normals_elem: + verts_per_prim = 3 + num_prims = int(conn.size / verts_per_prim) + # "flatten" the triangles to move values from elements to nodes + new_verts = numpy.ndarray((num_prims * verts_per_prim * 3,), dtype="float32") + new_conn = numpy.ndarray((num_prims * verts_per_prim,), dtype="int32") + new_tcoords = None + if tcoords is not None: + # remember that the input values are 1D at this point, we will expand to 2D later + new_tcoords = numpy.ndarray((num_prims * verts_per_prim,), dtype="float32") + new_normals = None + if normals is not None: + if normals.size == 0: + print("Warning: zero length normals!") + else: + new_normals = numpy.ndarray((num_prims * verts_per_prim * 3,), dtype="float32") + j = 0 + for i0 in range(num_prims): + for i1 in range(verts_per_prim): + idx = conn[i0 * verts_per_prim + i1] + # new connectivity (identity) + new_conn[j] = j + # copy the vertex + new_verts[j * 3 + 0] = verts[idx * 3 + 0] + new_verts[j * 3 + 1] = verts[idx * 3 + 1] + new_verts[j * 3 + 2] = verts[idx * 3 + 2] + if new_normals is not None: + if self.normals_elem: + # copy the normal associated with the face + new_normals[j * 3 + 0] = normals[i0 * 3 + 0] + new_normals[j * 3 + 1] = normals[i0 * 3 + 1] + new_normals[j * 3 + 2] = normals[i0 * 3 + 2] + else: + # copy the same normal as the vertex + new_normals[j * 3 + 0] = normals[idx * 3 + 0] + new_normals[j * 3 + 1] = normals[idx * 3 + 1] + new_normals[j * 3 + 2] = normals[idx * 3 + 2] + if new_tcoords is not None: + # remember, 1D texture coords at this point + if self.tcoords_elem: + # copy the texture coord associated with the face + new_tcoords[j] = tcoords[i0] + else: + # copy the same texture coord as the vertex + new_tcoords[j] = tcoords[idx] + j += 1 + # new arrays. + verts = new_verts + conn = new_conn + normals = new_normals + if tcoords is not None: + tcoords = new_tcoords + + var = None + # texture coords need transformation from variable value to [ST] + if tcoords is not None: + var_id = self.cmd.color_variableid + var = self._link._variables[var_id] + v_min = None + v_max = None + for lvl in var.levels: + if (v_min is None) or (v_min > lvl.value): + v_min = lvl.value + if (v_max is None) or (v_max < lvl.value): + v_max = lvl.value + var_minmax = [v_min, v_max] + # build a power of two x 1 texture + num_texels = int(len(var.texture) / 4) + half_texel = 1 / (num_texels * 2.0) + num_verts = int(verts.size / 3) + tmp = numpy.ndarray((num_verts * 2,), dtype="float32") + tmp.fill(0.5) # fill in the T coordinate... + tex_width = half_texel * 2 * (num_texels - 1) # center to center of num_texels + # if the range is 0, adjust the min by -1. The result is that the texture + # coords will get mapped to S=1.0 which is what EnSight does in this situation + if (var_minmax[1] - var_minmax[0]) == 0.0: + var_minmax[0] = var_minmax[0] - 1.0 + var_width = var_minmax[1] - var_minmax[0] + for idx in range(num_verts): + # normalized S coord value (clamp) + s = (tcoords[idx] - var_minmax[0]) / var_width + if s < 0.0: + s = 0.0 + if s > 1.0: + s = 1.0 + # map to the texture range and set the S value + tmp[idx * 2] = s * tex_width + half_texel + tcoords = tmp + + parent = self._link._groups[self.cmd.parent_id] + color = [ + self.cmd.fill_color[0] * self.cmd.diffuse, + self.cmd.fill_color[1] * self.cmd.diffuse, + self.cmd.fill_color[2] * self.cmd.diffuse, + self.cmd.fill_color[3], + ] + obj_id = self._link._mesh_block_count + # prim = + _ = self._link._omni.create_dsg_mesh_block( + self.cmd.name, + obj_id, + parent[1], + verts, + conn, + normals, + tcoords, + matrix=self.cmd.matrix4x4, + diffuse=color, + variable=var, + ) + self._link.log( + f"Part '{self.cmd.name}' defined: {self.coords.size/3} verts, {self.conn_tris.size/3} tris, {self.conn_lines.size/2} lines." + ) + self.cmd = None + + +class DSGOmniverseLink(object): + def __init__( + self, + omni: OmniverseWrapper, + port: int = 12345, + host: str = "127.0.0.1", + security_code: str = "", + verbose: int = 0, + normalize_geometry: bool = False, + vrmode: bool = False, + ): + super().__init__() + self._grpc = ensight_grpc.EnSightGRPC(port=port, host=host, secret_key=security_code) + self._verbose = verbose + self._thread: Optional[threading.Thread] = None + self._message_queue: queue.Queue = queue.Queue() # Messages coming from EnSight + self._dsg_queue: Optional[queue.SimpleQueue] = None # Outgoing messages to EnSight + self._shutdown = False + self._dsg = None + self._omni = omni + self._normalize_geometry = normalize_geometry + self._vrmode = vrmode + self._mesh_block_count = 0 + self._variables: dict = {} + self._groups: dict = {} + self._part: Part = Part(self) + self._scene_bounds: Optional[List] = None + + def log(self, s: str) -> None: + """Log a string to the logging system + + If verbosity is set, log the string. + """ + if self._verbose > 0: + logging.info(s) + + def start(self) -> int: + """Start a gRPC connection to an EnSight instance + + Make a gRPC connection and start a DSG stream handler. + + Returns + ------- + 0 on success, -1 on an error. + """ + # Start by setting up and verifying the connection + self._grpc.connect() + if not self._grpc.is_connected(): + logging.info( + f"Unable to establish gRPC connection to: {self._grpc.host()}:{self._grpc.port()}" + ) + return -1 + # Streaming API requires an iterator, so we make one from a queue + # it also returns an iterator. self._dsg_queue is the input stream interface + # self._dsg is the returned stream iterator. + if self._dsg is not None: + return 0 + self._dsg_queue = queue.SimpleQueue() + self._dsg = self._grpc.dynamic_scene_graph_stream( + iter(self._dsg_queue.get, None) # type:ignore + ) + self._thread = threading.Thread(target=self.poll_messages) + if self._thread is not None: + self._thread.start() + return 0 + + def end(self): + """Stop a gRPC connection to the EnSight instance""" + self._grpc.stop_server() + self._shutdown = True + self._thread.join() + self._grpc.shutdown() + self._dsg = None + self._thread = None + self._dsg_queue = None + + def is_shutdown(self): + """Check the service shutdown request status""" + return self._shutdown + + def request_an_update(self, animation: bool = False) -> None: + """Start a DSG update + Send a command to the DSG protocol to "init" an update. + + Parameters + ---------- + animation: + if True, export all EnSight timesteps. + """ + # Send an INIT command to trigger a stream of update packets + cmd = dynamic_scene_graph_pb2.SceneClientCommand() + cmd.command_type = dynamic_scene_graph_pb2.SceneClientCommand.INIT + # Allow EnSight push commands, but full scene only for now... + cmd.init.allow_spontaneous = True + cmd.init.include_temporal_geometry = animation + cmd.init.allow_incremental_updates = False + cmd.init.maximum_chunk_size = 1024 * 1024 + self._dsg_queue.put(cmd) # type:ignore + # Handle the update messages + self.handle_one_update() + + def poll_messages(self) -> None: + """Core interface to grab DSG events from gRPC and queue them for processing + + This is run by a thread that is monitoring the dsg RPC call for update messages + it places them in _message_queue as it finds them. They are picked up by the + main thread via get_next_message() + """ + while not self._shutdown: + try: + self._message_queue.put(next(self._dsg)) # type:ignore + except Exception: + self._shutdown = True + logging.info("DSG connection broken, calling exit") + os._exit(0) + + def get_next_message(self, wait: bool = True) -> Any: + """Get the next queued up protobuffer message + + Called by the main thread to get any messages that were pulled in from the + dsg stream and placed here by poll_messages() + """ + try: + return self._message_queue.get(block=wait) + except queue.Empty: + return None + + def handle_one_update(self) -> None: + """Monitor the DSG stream and handle a single update operation + + Wait until we get the scene update begin message. From there, reset the current + scene buckets and then parse all the incoming commands until we get the scene + update end command. At which point, save the generated stage (started in the + view command handler). Note: Parts are handled with an available bucket at all times. + When a new part update comes in or the scene update end happens, the part is "finished". + """ + # An update starts with a UPDATE_SCENE_BEGIN command + cmd = self.get_next_message() + while (cmd is not None) and ( + cmd.command_type != dynamic_scene_graph_pb2.SceneUpdateCommand.UPDATE_SCENE_BEGIN + ): + # Look for a begin command + cmd = self.get_next_message() + self.log("Begin update ------------------------") + + # Start anew + self._variables = {} + self._groups = {} + self._part = Part(self) + self._scene_bounds = None + self._mesh_block_count = 0 # reset when a new group shows up + + # handle the various commands until UPDATE_SCENE_END + cmd = self.get_next_message() + while (cmd is not None) and ( + cmd.command_type != dynamic_scene_graph_pb2.SceneUpdateCommand.UPDATE_SCENE_END + ): + self.handle_update_command(cmd) + cmd = self.get_next_message() + + # Flush the last part + self.finish_part() + + # Stage update complete + self._omni.save_stage() + + self.log("End update --------------------------") + + # handle an incoming gRPC update command + def handle_update_command(self, cmd: dynamic_scene_graph_pb2.SceneUpdateCommand) -> None: + """Dispatch out a scene update command to the proper handler + + Given a command object, pull out the correct portion of the protobuffer union and + pass it to the appropriate handler. + + Parameters + ---------- + cmd: + The command to be dispatched. + """ + name = "Unknown" + if cmd.command_type == dynamic_scene_graph_pb2.SceneUpdateCommand.DELETE_ID: + name = "Delete IDs" + elif cmd.command_type == dynamic_scene_graph_pb2.SceneUpdateCommand.UPDATE_PART: + name = "Part update" + tmp = cmd.update_part + self.handle_part(tmp) + elif cmd.command_type == dynamic_scene_graph_pb2.SceneUpdateCommand.UPDATE_GROUP: + name = "Group update" + tmp = cmd.update_group + self.handle_group(tmp) + elif cmd.command_type == dynamic_scene_graph_pb2.SceneUpdateCommand.UPDATE_GEOM: + name = "Geom update" + tmp = cmd.update_geom + self._part.update_geom(tmp) + elif cmd.command_type == dynamic_scene_graph_pb2.SceneUpdateCommand.UPDATE_VARIABLE: + name = "Variable update" + tmp = cmd.update_variable + self.handle_variable(tmp) + elif cmd.command_type == dynamic_scene_graph_pb2.SceneUpdateCommand.UPDATE_VIEW: + name = "View update" + tmp = cmd.update_view + self.handle_view(tmp) + elif cmd.command_type == dynamic_scene_graph_pb2.SceneUpdateCommand.UPDATE_TEXTURE: + name = "Texture update" + self.log(f"{name} --------------------------") + + def finish_part(self) -> None: + """Complete the current part + + There is always a part being modified. This method completes the current part, commits + it to the Omniverse USD, and sets up the next part. + """ + self._part.build() + self._mesh_block_count += 1 + + def handle_part(self, part: Any) -> None: + """Handle a DSG UPDATE_GROUP command + Parameters + ---------- + part: + The command coming from the EnSight stream. + """ + self.finish_part() + self._part.reset(part) + + def handle_group(self, group: Any) -> None: + """Handle a DSG UPDATE_GROUP command + Parameters + ---------- + group: + The command coming from the EnSight stream. + """ + # reset current mesh (part) count for unique "part" naming in USD + self._mesh_block_count = 0 + # get the parent group or view + parent = self._groups[group.parent_id] + obj_type = group.attributes.get("ENS_OBJ_TYPE", None) + matrix = group.matrix4x4 + # The Case matrix is basically the camera transform. In vrmode, we only want + # the raw geometry, so use the identity matrix. + if (obj_type == "ENS_CASE") and self._vrmode: + matrix = [ + 1.0, + 0.0, + 0.0, + 0.0, + 0.0, + 1.0, + 0.0, + 0.0, + 0.0, + 0.0, + 1.0, + 0.0, + 0.0, + 0.0, + 0.0, + 1.0, + ] + prim = self._omni.create_dsg_group(group.name, parent[1], matrix=matrix, obj_type=obj_type) + # record the scene bounds in case they are needed later + self._groups[group.id] = [group, prim] + bounds = group.attributes.get("ENS_SCENE_BOUNDS", None) + if bounds: + minmax = [] + for v in bounds.split(","): + try: + minmax.append(float(v)) + except Exception: + pass + if len(minmax) == 6: + self._scene_bounds = minmax + + def handle_variable(self, var: Any) -> None: + """Handle a DSG UPDATE_VARIABLE command + + Save off the EnSight variable DSG command object. + + Parameters + ---------- + var: + The command coming from the EnSight stream. + """ + self._variables[var.id] = var + + def handle_view(self, view: Any) -> None: + """Handle a DSG UPDATE_VIEW command + + Map a view command into a new Omniverse stage and populate it with materials/lights. + + Parameters + ---------- + view: + The command coming from the EnSight stream. + """ + self._scene_bounds = None + # Create a new root stage in Omniverse + self._omni.create_new_stage() + # Create the root group/camera + camera_info = view + if self._vrmode: + camera_info = None + root = self._omni.create_dsg_root(camera=camera_info) + self._omni.checkpoint("Created base scene") + # Create a distance and dome light in the scene + # self._omni.createDistantLight() + # self._omni.createDomeLight("./Materials/kloofendal_48d_partly_cloudy.hdr") + self._omni.createDomeLight("./Materials/000_sky.exr") + self._omni.checkpoint("Added lights to stage") + # Upload a material and textures to the Omniverse server + self._omni.uploadMaterial() + self._omni.create_dsg_variable_textures(self._variables) + # record + self._groups[view.id] = [view, root] + + +if __name__ == "__main__": + parser = argparse.ArgumentParser( + description="Python Omniverse EnSight Dynamic Scene Graph Client", + formatter_class=argparse.ArgumentDefaultsHelpFormatter, + ) + parser.add_argument( + "--path", + action="store", + default="omniverse://localhost/Users/test", + help="Omniverse pathname. Default=omniverse://localhost/Users/test", + ) + parser.add_argument( + "--port", + metavar="ensight_grpc_port", + nargs="?", + default=12345, + type=int, + help="EnSight gRPC port number", + ) + parser.add_argument( + "--host", + metavar="ensight_grpc_host", + nargs="?", + default="127.0.0.1", + type=str, + help="EnSight gRPC hostname", + ) + parser.add_argument( + "--security", + metavar="ensight_grpc_security_code", + nargs="?", + default="", + type=str, + help="EnSight gRPC security code", + ) + parser.add_argument( + "--verbose", + metavar="verbose_level", + default=0, + type=int, + help="Enable debugging information", + ) + parser.add_argument( + "--animation", dest="animation", action="store_true", help="Save all timesteps (default)" + ) + parser.add_argument( + "--no-animation", + dest="animation", + action="store_false", + help="Save only the current timestep", + ) + parser.set_defaults(animation=False) + parser.add_argument( + "--log_file", + metavar="log_filename", + default="", + type=str, + help="Save program output to the named log file instead of stdout", + ) + parser.add_argument( + "--live", + dest="live", + action="store_true", + default=False, + help="Enable continuous operation", + ) + parser.add_argument( + "--normalize_geometry", + dest="normalize", + action="store_true", + default=False, + help="Spatially normalize incoming geometry", + ) + parser.add_argument( + "--vrmode", + dest="vrmode", + action="store_true", + default=False, + help="In this mode do not include a camera or the case level matrix. Geometry only.", + ) + args = parser.parse_args() + + log_args = dict(format="DSG/Omniverse: %(message)s", level=logging.INFO) + if args.log_file: + log_args["filename"] = args.log_file + logging.basicConfig(**log_args) # type: ignore + + destinationPath = args.path + loggingEnabled = args.verbose + + # Make the OmniVerse connection + target = OmniverseWrapper(path=destinationPath, verbose=loggingEnabled) + + # Print the username for the server + target.username() + + if loggingEnabled: + logging.info("OmniVerse connection established.") + + dsg_link = DSGOmniverseLink( + omni=target, + port=args.port, + host=args.host, + vrmode=args.vrmode, + security_code=args.security, + verbose=loggingEnabled, + normalize_geometry=args.normalize, + ) + if loggingEnabled: + logging.info(f"Make DSG connection to: {args.host}:{args.port}") + + # Start the DSG link + err = dsg_link.start() + if err < 0: + sys.exit(err) + + # Simple pull request + dsg_link.request_an_update(animation=args.animation) + + # Live operation + if args.live: + if loggingEnabled: + logging.info("Waiting for remote push operations") + while not dsg_link.is_shutdown(): + dsg_link.handle_one_update() + + # Done... + if loggingEnabled: + logging.info("Shutting down DSG connection") + dsg_link.end() + + # Add a material to the box + # target.createMaterial(boxMesh) + + # Add a Nucleus Checkpoint to the stage + # target.checkpoint("Add material to the box") + + target.shutdown() diff --git a/src/ansys/pyensight/core/utils/resources/Materials/000_sky.exr b/src/ansys/pyensight/core/utils/resources/Materials/000_sky.exr new file mode 100644 index 00000000000..ad17c96203e Binary files /dev/null and b/src/ansys/pyensight/core/utils/resources/Materials/000_sky.exr differ diff --git a/src/ansys/pyensight/core/utils/resources/Materials/Fieldstone.mdl b/src/ansys/pyensight/core/utils/resources/Materials/Fieldstone.mdl new file mode 100644 index 00000000000..a4bbb5c2338 --- /dev/null +++ b/src/ansys/pyensight/core/utils/resources/Materials/Fieldstone.mdl @@ -0,0 +1,54 @@ +mdl 1.4; + +import ::OmniPBR::OmniPBR; +import ::anno::author; +import ::anno::description; +import ::anno::display_name; +import ::anno::key_words; +import ::anno::version; +import ::tex::gamma_mode; +import ::state::normal; + +export material Fieldstone(*) +[[ + ::anno::display_name("Omni PBR "), + ::anno::description("Omni PBR, supports ORM textures"), + ::anno::version(1, 0, 0, ""), + ::anno::author("NVIDIA CORPORATION"), + ::anno::key_words(string[]("omni", "PBR", "omniverse", "generic")) +]] + = ::OmniPBR::OmniPBR( + diffuse_color_constant: color(0.200000003f, 0.200000003f, 0.200000003f), + diffuse_texture: texture_2d("./Fieldstone/Fieldstone_BaseColor.png" /* tag 2828, version 6332211 */, ::tex::gamma_srgb), + albedo_desaturation: 0.f, + albedo_add: 0.f, + albedo_brightness: 1.f, + diffuse_tint: color(1.f, 1.f, 1.f), + reflection_roughness_constant: 0.5f, + reflection_roughness_texture_influence: 1.f, + reflectionroughness_texture: texture_2d(), + metallic_constant: 0.f, + metallic_texture_influence: 1.f, + metallic_texture: texture_2d(), + specular_level: 0.5f, + enable_ORM_texture: true, + ORM_texture: texture_2d("./Fieldstone/Fieldstone_ORM.png" /* tag 2830, version 596885211 */, ::tex::gamma_linear), + ao_to_diffuse: 0.f, + ao_texture: texture_2d(), + enable_emission: false, + emissive_color: color(1.f, 0.100000001f, 0.100000001f), + emissive_mask_texture: texture_2d(), + emissive_intensity: 40.f, + bump_factor: 1.f, + normalmap_texture: texture_2d("./Fieldstone/Fieldstone_N.png" /* tag 2832, version 3494456508 */, ::tex::gamma_linear), + detail_bump_factor: 0.300000012f, + detail_normalmap_texture: texture_2d(), + project_uvw: false, + world_or_object: false, + uv_space_index: 0, + texture_translate: float2(0.f), + texture_rotate: 0.f, + texture_scale: float2(1.f), + detail_texture_translate: float2(0.f), + detail_texture_rotate: 0.f, + detail_texture_scale: float2(1.f)); diff --git a/src/ansys/pyensight/core/utils/resources/Materials/Fieldstone/Fieldstone_BaseColor.png b/src/ansys/pyensight/core/utils/resources/Materials/Fieldstone/Fieldstone_BaseColor.png new file mode 100644 index 00000000000..29e83388335 Binary files /dev/null and b/src/ansys/pyensight/core/utils/resources/Materials/Fieldstone/Fieldstone_BaseColor.png differ diff --git a/src/ansys/pyensight/core/utils/resources/Materials/Fieldstone/Fieldstone_N.png b/src/ansys/pyensight/core/utils/resources/Materials/Fieldstone/Fieldstone_N.png new file mode 100644 index 00000000000..2f08380c44a Binary files /dev/null and b/src/ansys/pyensight/core/utils/resources/Materials/Fieldstone/Fieldstone_N.png differ diff --git a/src/ansys/pyensight/core/utils/resources/Materials/Fieldstone/Fieldstone_ORM.png b/src/ansys/pyensight/core/utils/resources/Materials/Fieldstone/Fieldstone_ORM.png new file mode 100644 index 00000000000..fd73d51f50e Binary files /dev/null and b/src/ansys/pyensight/core/utils/resources/Materials/Fieldstone/Fieldstone_ORM.png differ