From 9f412c7511b4af074386782725c8e13de55171e3 Mon Sep 17 00:00:00 2001
From: Mario Ostieri <107915956+mariostieriansys@users.noreply.github.com>
Date: Fri, 1 Nov 2024 15:20:40 +0100
Subject: [PATCH] Mostieri/fix contributors (#472)
---
.gitattributes | 1 +
.github/workflows/nightly-docs.yml | 104 +-
AUTHORS | 22 +-
CODE_OF_CONDUCT.md | 128 +-
CONTRIBUTORS.md | 26 +-
doc/make.bat | 57 -
doc/make.py | 340 +-
doc/source/_static/coverage.svg | 42 +-
doc/source/calc_functions.rst | 13468 ++++++++--------
.../examples_source/00-basic/03-ptrace.py | 302 +-
.../25-intermediate/02-utils.py | 386 +-
doc/source/libuserd_documentation.rst | 80 +-
doc/source/native_documentation.rst | 220 +-
doc/source/object_documentation.rst | 176 +-
doc/source/rest_api/ensight_rest_v1.yaml | 1190 +-
doc/source/rest_api/rest_api.rst | 298 +-
doc/source/user_guide/api_differences.rst | 62 +-
doc/source/user_guide/cmdlang_native.rst | 528 +-
doc/source/user_guide/ensight_scripts.rst | 232 +-
doc/source/user_guide/object_api.rst | 278 +-
doc/source/user_guide/omniverse_info.rst | 670 +-
.../ansys/tools/omniverse/core/__init__.py | 2 +-
.../ansys/tools/omniverse/core/extension.py | 796 +-
.../config/extension.toml | 122 +-
.../docs/CHANGELOG.md | 26 +-
.../ansys.tools.omniverse.core/docs/README.md | 22 +-
.../ansys.tools.omniverse.core/docs/index.rst | 36 +-
.../ansys/tools/omniverse/dsgui/__init__.py | 2 +-
.../ansys/tools/omniverse/dsgui/extension.py | 424 +-
.../config/extension.toml | 98 +-
.../docs/CHANGELOG.md | 24 +-
.../docs/README.md | 26 +-
.../docs/index.rst | 36 +-
pyproject.toml | 382 +-
rename_nightly_wheel.py | 32 +-
src/ansys/pyensight/core/common.py | 432 +-
src/ansys/pyensight/core/ensight_grpc.py | 864 +-
src/ansys/pyensight/core/libuserd.py | 3906 ++---
src/ansys/pyensight/core/renderable.py | 1706 +-
src/ansys/pyensight/core/session.py | 3640 ++---
src/ansys/pyensight/core/utils/dsg_server.py | 2170 +--
src/ansys/pyensight/core/utils/export.py | 1168 +-
src/ansys/pyensight/core/utils/omniverse.py | 724 +-
.../pyensight/core/utils/omniverse_cli.py | 1040 +-
.../core/utils/omniverse_dsg_server.py | 1764 +-
.../core/utils/omniverse_glb_server.py | 1262 +-
src/ansys/pyensight/core/utils/parts.py | 2398 +--
tests/example_tests/test_designpoints.py | 176 +-
tests/example_tests/test_glb_usd.py | 118 +-
tests/example_tests/test_libuserd.py | 156 +-
tests/example_tests/test_queries.py | 292 +-
tests/example_tests/test_remote_execution.py | 110 +-
tests/example_tests/test_remote_objects.py | 62 +-
tests/example_tests/test_rest_api.py | 146 +-
tests/example_tests/test_usd_export.py | 166 +-
tests/example_tests/test_utils.py | 152 +-
tests/unit_tests/test_session.py | 604 +-
57 files changed, 21819 insertions(+), 21875 deletions(-)
create mode 100644 .gitattributes
delete mode 100644 doc/make.bat
diff --git a/.gitattributes b/.gitattributes
new file mode 100644
index 00000000000..ffa2ec59250
--- /dev/null
+++ b/.gitattributes
@@ -0,0 +1 @@
+* text=auto !eol
\ No newline at end of file
diff --git a/.github/workflows/nightly-docs.yml b/.github/workflows/nightly-docs.yml
index dab19cc2a38..4b52ffe637f 100644
--- a/.github/workflows/nightly-docs.yml
+++ b/.github/workflows/nightly-docs.yml
@@ -1,52 +1,52 @@
-name: Nightly Documentation Build
-
-on:
- schedule: # UTC at 0400 - 12am EDT
- - cron: '0 4 * * *'
- workflow_dispatch:
-
-env:
- DOCUMENTATION_CNAME: 'ensight.docs.pyansys.com'
- ENSIGHT_IMAGE: 'ghcr.io/ansys-internal/ensight_dev'
-
-concurrency:
- group: ${{ github.workflow }}-${{ github.ref }}
- cancel-in-progress: true
-
-jobs:
- docs_build:
- runs-on: ubuntu-20.04
-
- steps:
- - uses: actions/checkout@v4
-
- - name: Login in Github Container registry
- uses: docker/login-action@v3
- with:
- registry: ghcr.io
- username: ansys-bot
- password: ${{ secrets.GITHUB_TOKEN }}
-
- - name: Docker pull
- run: docker pull ${{ env.ENSIGHT_IMAGE }}
-
- - name: Run Ansys documentation building action
- uses: ansys/actions/doc-build@v8
- env:
- ANSYSLMD_LICENSE_FILE: ${{ format('1055@{0}', secrets.LICENSE_SERVER) }}
- with:
- sphinxopts: '-j auto'
-
- docs_upload:
- needs: docs_build
- runs-on: ubuntu-latest
- steps:
-
- - name: Deploy development documentation
- uses: ansys/actions/doc-deploy-dev@v8
- with:
- cname: ${{ env.DOCUMENTATION_CNAME }}
- token: ${{ secrets.GITHUB_TOKEN }}
- bot-user: ${{ secrets.PYANSYS_CI_BOT_USERNAME }}
- bot-email: ${{ secrets.PYANSYS_CI_BOT_EMAIL }}
-
+name: Nightly Documentation Build
+
+on:
+ schedule: # UTC at 0400 - 12am EDT
+ - cron: '0 4 * * *'
+ workflow_dispatch:
+
+env:
+ DOCUMENTATION_CNAME: 'ensight.docs.pyansys.com'
+ ENSIGHT_IMAGE: 'ghcr.io/ansys-internal/ensight_dev'
+
+concurrency:
+ group: ${{ github.workflow }}-${{ github.ref }}
+ cancel-in-progress: true
+
+jobs:
+ docs_build:
+ runs-on: ubuntu-20.04
+
+ steps:
+ - uses: actions/checkout@v4
+
+ - name: Login in Github Container registry
+ uses: docker/login-action@v3
+ with:
+ registry: ghcr.io
+ username: ansys-bot
+ password: ${{ secrets.GITHUB_TOKEN }}
+
+ - name: Docker pull
+ run: docker pull ${{ env.ENSIGHT_IMAGE }}
+
+ - name: Run Ansys documentation building action
+ uses: ansys/actions/doc-build@v8
+ env:
+ ANSYSLMD_LICENSE_FILE: ${{ format('1055@{0}', secrets.LICENSE_SERVER) }}
+ with:
+ sphinxopts: '-j auto'
+
+ docs_upload:
+ needs: docs_build
+ runs-on: ubuntu-latest
+ steps:
+
+ - name: Deploy development documentation
+ uses: ansys/actions/doc-deploy-dev@v8
+ with:
+ cname: ${{ env.DOCUMENTATION_CNAME }}
+ token: ${{ secrets.GITHUB_TOKEN }}
+ bot-user: ${{ secrets.PYANSYS_CI_BOT_USERNAME }}
+ bot-email: ${{ secrets.PYANSYS_CI_BOT_EMAIL }}
+
diff --git a/AUTHORS b/AUTHORS
index 0a51cf45460..69c72e251ce 100644
--- a/AUTHORS
+++ b/AUTHORS
@@ -1,12 +1,12 @@
-# This is the list of significant contributors to this repository.
-#
-# This file does not necessarily list everyone who has contributed code.
-#
-# For contributions made under a Corporate CLA, the organization is
-# added to this file.
-#
-# If you have contributed to the repository and want to be added to this file,
-# submit a request.
-#
-#
+# This is the list of pyensight's significant contributors.
+#
+# This file does not necessarily list everyone who has contributed code.
+#
+# For contributions made under a Corporate CLA, the organization is
+# added to this file.
+#
+# If you have contributed to the repository and want to be added to this file,
+# submit a request.
+#
+#
ANSYS, Inc.
\ No newline at end of file
diff --git a/CODE_OF_CONDUCT.md b/CODE_OF_CONDUCT.md
index e4a9081d99a..808012f3a06 100644
--- a/CODE_OF_CONDUCT.md
+++ b/CODE_OF_CONDUCT.md
@@ -1,64 +1,64 @@
-# Contributor Covenant Code of Conduct
-
-## Our Pledge
-
-In the interest of fostering an open and welcoming environment, we as
-contributors and maintainers pledge to making participation in our project and
-our community a harassment-free experience for everyone, regardless of age, body
-size, disability, ethnicity, sex characteristics, gender identity and
-expression, level of experience, education, socio-economic status, nationality,
-personal appearance, race, religion, or sexual identity and orientation.
-
-## Our Standards
-
-Examples of behavior that contributes to creating a positive environment
-include:
-
-* Using welcoming and inclusive language
-* Being respectful of differing viewpoints and experiences
-* Gracefully accepting constructive criticism
-* Focusing on what is best for the community
-* Showing empathy towards other community members
-
-Examples of unacceptable behavior by participants include:
-
-* The use of sexualized language or imagery and unwelcome sexual attention or
- advances
-* Trolling, insulting/derogatory comments, and personal or political attacks
-* Public or private harassment
-* Publishing others' private information, such as a physical or electronic
- address, without explicit permission
-* Other conduct which could reasonably be considered inappropriate in a
- professional setting
-
-## Our Responsibilities
-
-Project maintainers are responsible for clarifying the standards of acceptable
-behavior and are expected to take appropriate and fair corrective action in
-response to any instances of unacceptable behavior.
-
-Project maintainers have the right and responsibility to remove, edit, or reject
-comments, commits, code, wiki edits, issues, and other contributions that are
-not aligned to this Code of Conduct, or to ban temporarily or permanently any
-contributor for other behaviors that they deem inappropriate, threatening,
-offensive, or harmful.
-
-## Scope
-
-This Code of Conduct applies both within project spaces and in public spaces
-when an individual is representing the project or its community. Examples of
-representing a project or community include using an official project e-mail
-address, posting via an official social media account, or acting as an appointed
-representative at an online or offline event. Representation of a project may be
-further defined and clarified by project maintainers.
-
-## Attribution
-
-This Code of Conduct is adapted from the [Contributor Covenant][homepage],
-version 1.4, available at
-https://www.contributor-covenant.org/version/1/4/code-of-conduct.html
-
-[homepage]: https://www.contributor-covenant.org
-
-For answers to common questions about this code of conduct, see
-https://www.contributor-covenant.org/faq
+# Contributor Covenant Code of Conduct
+
+## Our Pledge
+
+In the interest of fostering an open and welcoming environment, we as
+contributors and maintainers pledge to making participation in our project and
+our community a harassment-free experience for everyone, regardless of age, body
+size, disability, ethnicity, sex characteristics, gender identity and
+expression, level of experience, education, socio-economic status, nationality,
+personal appearance, race, religion, or sexual identity and orientation.
+
+## Our Standards
+
+Examples of behavior that contributes to creating a positive environment
+include:
+
+* Using welcoming and inclusive language
+* Being respectful of differing viewpoints and experiences
+* Gracefully accepting constructive criticism
+* Focusing on what is best for the community
+* Showing empathy towards other community members
+
+Examples of unacceptable behavior by participants include:
+
+* The use of sexualized language or imagery and unwelcome sexual attention or
+ advances
+* Trolling, insulting/derogatory comments, and personal or political attacks
+* Public or private harassment
+* Publishing others' private information, such as a physical or electronic
+ address, without explicit permission
+* Other conduct which could reasonably be considered inappropriate in a
+ professional setting
+
+## Our Responsibilities
+
+Project maintainers are responsible for clarifying the standards of acceptable
+behavior and are expected to take appropriate and fair corrective action in
+response to any instances of unacceptable behavior.
+
+Project maintainers have the right and responsibility to remove, edit, or reject
+comments, commits, code, wiki edits, issues, and other contributions that are
+not aligned to this Code of Conduct, or to ban temporarily or permanently any
+contributor for other behaviors that they deem inappropriate, threatening,
+offensive, or harmful.
+
+## Scope
+
+This Code of Conduct applies both within project spaces and in public spaces
+when an individual is representing the project or its community. Examples of
+representing a project or community include using an official project e-mail
+address, posting via an official social media account, or acting as an appointed
+representative at an online or offline event. Representation of a project may be
+further defined and clarified by project maintainers.
+
+## Attribution
+
+This Code of Conduct is adapted from the [Contributor Covenant][homepage],
+version 1.4, available at
+https://www.contributor-covenant.org/version/1/4/code-of-conduct.html
+
+[homepage]: https://www.contributor-covenant.org
+
+For answers to common questions about this code of conduct, see
+https://www.contributor-covenant.org/faq
diff --git a/CONTRIBUTORS.md b/CONTRIBUTORS.md
index de0884b3af9..1743fd12331 100644
--- a/CONTRIBUTORS.md
+++ b/CONTRIBUTORS.md
@@ -1,13 +1,13 @@
-# Contributors
-
-## Project Lead
-
-* [Mario Ostieri](https://github.com/mariostieriansys)
-
-## Individual Contributors
-
-* [Mario Ostieri](https://github.com/mariostieriansys)
-* [Randy Frank](https://github.com/randallfrank)
-* [Kevin Colburn](https://github.com/kecolburn)
-* [David Bremer](https://github.com/david-bremer)
-* [Mike Krogh](https://github.com/mfkrogh)
+# Contributors
+
+## Project Lead
+
+* [Mario Ostieri](https://github.com/mariostieriansys)
+
+## Individual Contributors
+
+* [Mario Ostieri](https://github.com/mariostieriansys)
+* [Randy Frank](https://github.com/randallfrank)
+* [Kevin Colburn](https://github.com/kecolburn)
+* [David Bremer](https://github.com/david-bremer)
+* [Mike Krogh](https://github.com/mfkrogh)
diff --git a/doc/make.bat b/doc/make.bat
deleted file mode 100644
index 10f040b37e7..00000000000
--- a/doc/make.bat
+++ /dev/null
@@ -1,57 +0,0 @@
-@ECHO OFF
-
-pushd %~dp0
-
-REM Command file for Sphinx documentation
-
-if "%SPHINXBUILD%" == "" (
- set SPHINXBUILD=sphinx-build
-)
-set SOURCEDIR=source
-set BUILDDIR=_build
-
-if "%1" == "" goto help
-if "%1" == "pdf" goto pdf
-if "%1" == "clean" goto clean
-
-%SPHINXBUILD% >NUL 2>NUL
-if errorlevel 9009 (
- echo.
- echo.The 'sphinx-build' command was not found. Make sure you have Sphinx
- echo.installed, then set the SPHINXBUILD environment variable to point
- echo.to the full path of the 'sphinx-build' executable. Alternatively you
- echo.may add the Sphinx directory to PATH.
- echo.
- echo.If you don't have Sphinx installed, grab it from
- echo.http://sphinx-doc.org/
- exit /b 1
-)
-
-%SPHINXBUILD% -M %1 %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% %O%
-goto end
-
-:help
-%SPHINXBUILD% -M help %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% %O%
-
-:html
-%SPHINXBUILD% -M linkcheck %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% %O%
-%SPHINXBUILD% -M html %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% %O%
-
-:clean
-rmdir /s /q %BUILDDIR% > /NUL 2>&1
-for /d /r %SOURCEDIR% %%d in (_autosummary) do @if exist "%%d" rmdir /s /q "%%d"
-goto end
-
-:pdf
- %SPHINXBUILD% -M latex %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% %O%
- cd "%BUILDDIR%\latex"
- for %%f in (*.tex) do (
- pdflatex "%%f" --interaction=nonstopmode)
- if NOT EXIST ansyspyensight.pdf (
-
- Echo "no pdf generated!"
- exit /b 1)
- Echo "pdf generated!"
-
-:end
-popd
diff --git a/doc/make.py b/doc/make.py
index 95fcf206c04..cb724174c9b 100644
--- a/doc/make.py
+++ b/doc/make.py
@@ -1,170 +1,170 @@
-import argparse
-import glob
-import os
-import platform
-import shutil
-import subprocess
-import sys
-import textwrap
-
-
-def find_exe(name: str) -> str:
- """Find an actual executable target
- Given the name of an executable, find the actual path to use.
-
- Parameters
- ----------
- name : str
- The undecorated name (e.g. pre-commit, sphinx-build, etc) to search for.
-
- """
- # exe files under Windows
- if platform.system().lower().startswith("win"):
- name += ".exe"
- pydir = os.path.dirname(sys.executable)
- pathname = os.path.join(pydir, "Scripts", name)
- if os.path.exists(pathname):
- return pathname
- pathname = os.path.join(pydir, "bin", name)
- if os.path.exists(pathname):
- return pathname
- pathname = os.path.join(pydir, name)
- if os.path.exists(pathname):
- return pathname
- raise RuntimeError(f"Unable to find script {name}. Is it installed?")
-
-
-"""
-# install dependencies
-python.exe -m pip install .[dev]
-python.exe -m pip install .[doc]
-
-# build - install
-python.exe -m build
-python.exe -m pip install .\dist\ansys_pyensight_core-0.1.dev0-py3-none-any.whl
-
-# docs
-cd doc
-# FASTDOCS=1?
-sphinx-build.exe -M html source _build -j auto
-"""
-
-
-def clean() -> None:
- """Clean up the documentation directories"""
- print("-" * 10, "Cleaning up old docs builds")
- paths = [
- "dist",
- os.path.join("doc", "_build"),
- os.path.join("doc", "source", "_autosummary"),
- os.path.join("doc", "source", "_examples"),
- ]
- for path in paths:
- shutil.rmtree(path, ignore_errors=True)
-
-
-def install():
- """Install the current wheel"""
- print("-" * 10, "Installing wheel")
- wheel_file = glob.glob("dist/*.whl")[0]
- # Uninstall existing wheel
- cmd = [sys.executable, "-m", "pip", "uninstall", "ansys-pyensight-core", "-y"]
- subprocess.run(cmd)
- # Install new wheel
- cmd = [sys.executable, "-m", "pip", "install", wheel_file]
- subprocess.run(cmd)
-
-
-def wheel():
- """Build the wheel"""
- print("-" * 10, "Building wheel")
- # Clean up the dist directory
- for name in glob.glob("dist/*.whl"):
- os.unlink(name)
- # Build the wheel
- cmd = [sys.executable, "-m", "build", "--wheel"]
- subprocess.run(cmd)
-
-
-def precommit():
- """Execute the pre-commit action"""
- print("-" * 10, "Pre-commit checks")
- executable = find_exe("pre-commit")
- cmd = [executable, "run", "--all-files"]
- subprocess.run(cmd)
-
-
-def docs(target: str = "html", skip_api: bool = False):
- """Run sphinx to build the docs
-
- Parameters
- ----------
- target : str, optional
- What specific build target (e.g. "html").
- skip_api : bool, optional
- Should we skip the (expensive) autosummary targets.
-
- """
- # Build the actual docs
- print("-" * 10, "Build sphinx docs")
- # chdir to docs
- os.chdir("doc")
- # build it
- executable = find_exe("sphinx-build")
- cmd = [executable, "-M", target, "source", "_build", "-j", "auto"]
- env = os.environ.copy()
- if skip_api:
- env["FASTDOCS"] = "1"
- subprocess.run(cmd, env=env)
-
-
-if __name__ == "__main__":
- operation_help = textwrap.dedent(
- """\
-'clean' : Clean build directories.
-'install' : Install the wheel.
-'precommit' : Run linting tools.
-'build' : Build the wheel.
-'docs' : Generate documentation.
-"""
- )
-
- parser = argparse.ArgumentParser(
- description="Build pyensight docs",
- formatter_class=argparse.RawTextHelpFormatter,
- )
- parser.add_argument(
- "operation",
- metavar="operation",
- choices=[
- "clean",
- "install",
- "precommit",
- "build",
- "docs",
- ],
- help=operation_help,
- )
- parser.add_argument(
- "--fastdocs",
- default=False,
- action="store_true",
- help="Skip generation of API (autosummary) docs",
- )
-
- # parse the command line
- args = parser.parse_args()
-
- if args.operation == "clean":
- clean()
- elif args.operation == "precommit":
- precommit()
- elif args.operation == "install":
- install()
- elif args.operation == "build":
- wheel()
- elif args.operation == "docs":
- docs(target="html", skip_api=args.fastdocs)
- elif args.operations == "":
- print()
- print("Complete.")
+import argparse
+import glob
+import os
+import platform
+import shutil
+import subprocess
+import sys
+import textwrap
+
+
+def find_exe(name: str) -> str:
+ """Find an actual executable target
+ Given the name of an executable, find the actual path to use.
+
+ Parameters
+ ----------
+ name : str
+ The undecorated name (e.g. pre-commit, sphinx-build, etc) to search for.
+
+ """
+ # exe files under Windows
+ if platform.system().lower().startswith("win"):
+ name += ".exe"
+ pydir = os.path.dirname(sys.executable)
+ pathname = os.path.join(pydir, "Scripts", name)
+ if os.path.exists(pathname):
+ return pathname
+ pathname = os.path.join(pydir, "bin", name)
+ if os.path.exists(pathname):
+ return pathname
+ pathname = os.path.join(pydir, name)
+ if os.path.exists(pathname):
+ return pathname
+ raise RuntimeError(f"Unable to find script {name}. Is it installed?")
+
+
+"""
+# install dependencies
+python.exe -m pip install .[dev]
+python.exe -m pip install .[doc]
+
+# build - install
+python.exe -m build
+python.exe -m pip install .\dist\ansys_pyensight_core-0.1.dev0-py3-none-any.whl
+
+# docs
+cd doc
+# FASTDOCS=1?
+sphinx-build.exe -M html source _build -j auto
+"""
+
+
+def clean() -> None:
+ """Clean up the documentation directories"""
+ print("-" * 10, "Cleaning up old docs builds")
+ paths = [
+ "dist",
+ os.path.join("doc", "_build"),
+ os.path.join("doc", "source", "_autosummary"),
+ os.path.join("doc", "source", "_examples"),
+ ]
+ for path in paths:
+ shutil.rmtree(path, ignore_errors=True)
+
+
+def install():
+ """Install the current wheel"""
+ print("-" * 10, "Installing wheel")
+ wheel_file = glob.glob("dist/*.whl")[0]
+ # Uninstall existing wheel
+ cmd = [sys.executable, "-m", "pip", "uninstall", "ansys-pyensight-core", "-y"]
+ subprocess.run(cmd)
+ # Install new wheel
+ cmd = [sys.executable, "-m", "pip", "install", wheel_file]
+ subprocess.run(cmd)
+
+
+def wheel():
+ """Build the wheel"""
+ print("-" * 10, "Building wheel")
+ # Clean up the dist directory
+ for name in glob.glob("dist/*.whl"):
+ os.unlink(name)
+ # Build the wheel
+ cmd = [sys.executable, "-m", "build", "--wheel"]
+ subprocess.run(cmd)
+
+
+def precommit():
+ """Execute the pre-commit action"""
+ print("-" * 10, "Pre-commit checks")
+ executable = find_exe("pre-commit")
+ cmd = [executable, "run", "--all-files"]
+ subprocess.run(cmd)
+
+
+def docs(target: str = "html", skip_api: bool = False):
+ """Run sphinx to build the docs
+
+ Parameters
+ ----------
+ target : str, optional
+ What specific build target (e.g. "html").
+ skip_api : bool, optional
+ Should we skip the (expensive) autosummary targets.
+
+ """
+ # Build the actual docs
+ print("-" * 10, "Build sphinx docs")
+ # chdir to docs
+ os.chdir("doc")
+ # build it
+ executable = find_exe("sphinx-build")
+ cmd = [executable, "-M", target, "source", "_build", "-j", "auto"]
+ env = os.environ.copy()
+ if skip_api:
+ env["FASTDOCS"] = "1"
+ subprocess.run(cmd, env=env)
+
+
+if __name__ == "__main__":
+ operation_help = textwrap.dedent(
+ """\
+'clean' : Clean build directories.
+'install' : Install the wheel.
+'precommit' : Run linting tools.
+'build' : Build the wheel.
+'docs' : Generate documentation.
+"""
+ )
+
+ parser = argparse.ArgumentParser(
+ description="Build pyensight docs",
+ formatter_class=argparse.RawTextHelpFormatter,
+ )
+ parser.add_argument(
+ "operation",
+ metavar="operation",
+ choices=[
+ "clean",
+ "install",
+ "precommit",
+ "build",
+ "docs",
+ ],
+ help=operation_help,
+ )
+ parser.add_argument(
+ "--fastdocs",
+ default=False,
+ action="store_true",
+ help="Skip generation of API (autosummary) docs",
+ )
+
+ # parse the command line
+ args = parser.parse_args()
+
+ if args.operation == "clean":
+ clean()
+ elif args.operation == "precommit":
+ precommit()
+ elif args.operation == "install":
+ install()
+ elif args.operation == "build":
+ wheel()
+ elif args.operation == "docs":
+ docs(target="html", skip_api=args.fastdocs)
+ elif args.operations == "":
+ print()
+ print("Complete.")
diff --git a/doc/source/_static/coverage.svg b/doc/source/_static/coverage.svg
index 49f2582cf68..078cfac9fd6 100644
--- a/doc/source/_static/coverage.svg
+++ b/doc/source/_static/coverage.svg
@@ -1,21 +1,21 @@
-
-
+
+
diff --git a/doc/source/calc_functions.rst b/doc/source/calc_functions.rst
index 1ecd673f3b6..843763ed249 100644
--- a/doc/source/calc_functions.rst
+++ b/doc/source/calc_functions.rst
@@ -1,6735 +1,6735 @@
-.. vale off
-
-.. _calculator_functions:
-
-====================
-Calculator functions
-====================
-
-EnSight supports a large number of calculator functions.
-An :class:`ENS_VAR` object can represent
-either fields read from disk or the use of a calculator function
-to compute a new variable from an input collection of parts, variables, and
-user-specified parameters.
-
-You can use the native API function :func:`pyensight.ensight_api.variables.evaluate`
-and the object API function :func:`pyensight.ens_globals.ENS_GLOBALS.create_variable`
-to create new EnSight variables that leverage these calculator functions.
-These function use a string representation of the function to create the
-new variable. Most calculator functions take a partlist (``plist``) as
-a parameter. In the native API, you use the ``plist`` string to refer to the currently
-selected parts. In the object API, you use the same ``plist`` text string, but you
-can specify the selection of parts to be used directly via the ``source`` keyword.
-
-This code provides some examples::
-
- # Create a variable named 'newvar1' using the :ref:`CmplxTransResp `
- # calculator function and the input variable 'c_scalar'. The variable is to be defined on
- # all of the current parts.
- session.ensight.part.select_all()
- session.ensight.variables.evaluate("newvar1 = CmplxTransResp(plist,c_scalar,90.0)")
- # Create a variable named 'newvar2' using the :ref:`EleSize `
- # calculator function. The variable is to be defined on all of the current parts.
- varobj = session.ensight.objs.core.create_variable("newvar2", "EleSize(plist)",
- sources=session.ensight.objs.core.PARTS)
-
-
-.. admonition:: Per-part constants
-
- Some calculator functions (such as :ref:`Area() `) return constant values. EnSight
- supports constant values that are per-case and per-part. For example, if the :ref:`Area() `
- function is computed as a per-case constant, the value is the sum of the area values computed
- part by part. If the :ref:`Area() ` function is computed as a per-part constant, the individual
- values for each part are stored on each part.
-
- All constant values are computed as per-case by default. For per-part computation, you
- must add an optional additional argument to the function. For example, ``Area(plist)``
- results in a per-case constant by default. ``Area(plist,Compute_Per_case)`` is also computed
- as per-case explicitly. ``Area(plist,Compute_Per_part)`` results in the variable being
- computed as per-part.
-
- Not all calculator functions support this. For those that do, the notion ``[,Compute_Per_part]``
- appears in the documentation. For an example, see: :ref:`Area() `.
-
-
-.. _Area:
-
-------
-Area()
-------
-
-**Area**
-
-``Area(any part or parts [, Compute_Per_part])``
-
-Computes a constant or constant per part variable whose
-value is the area of the selected parts. If a part is composed of 3D elements,
-the area is of the border representation of the part. The area of 1D elements is
-zero.
-
-
-.. _BL_aGradOfVelMag:
-
-------------------
-BL_aGradOfVelMag()
-------------------
-
-**Boundary Layer: A Gradient of Velocity Magnitude**
-
-``BL_aGradOfVelMag(boundary part or parts, velocity)``
-
-Computes a vector variable that is the gradient of the
-magnitude of the specified velocity variable on the selected boundary part
-or parts. The vector variable is defined as:
-
-:math:`GRA{D}_{BP}\left|V\right|={\nabla }_{BP}\left|V\right|=\frac{\partial V}{\partial x}\widehat{i}+\frac{\partial V}{\partial y}\widehat{j}+\frac{\partial V}{\partial z}\widehat{k}`
-
-
-where:
-
-.. list-table::
- :widths: 30 70
-
- * - :math:`BP`
- - on boundary part
- * - :math:`V=V\left(x,y,z\right)`
- - velocity vector
- * - :math:`\left|V\right|`
- - magnitude of velocity vector = :math:`\sqrt{V·V}`
- * - x, y, z
- - coordinate directions
- * - i, j, k
- - unit vectors in coordinate directions
-
-
-.. note::
- For each boundary part, this function finds its corresponding field part
- (``pfield``), computes the gradient of the velocity
- magnitude on the field part (``Grad(pfield,velocity``), and
- then maps these computed values onto the boundary part.
-
- Node or element IDs are used if they exist. Otherwise, the coordinate
- values between the field part and boundary part are mapped and resolved via
- a floating-point hashing scheme.
-
- This velocity-magnitude gradient variable can be used as an argument for
- the following boundary-layer functions that require this variable.
-
- Boundary layer (``BL_*``) functions are not supported for
- Server of Server (SoS) decomposition.
-
-
-.. list-table:: **Function arguments**
- :widths: 30 70
-
- * - *Boundary part*
- - 2D part
- * - *Velocity*
- - vector variable
-
-
-.. _BL_CfEdge:
-
------------
-BL_CfEdge()
------------
-
-
-**Boundary Layer: Edge Skin-Friction Coefficient**
-
-``BL_CfEdge(boundary part or parts, velocity, density, viscosity, ymax, flow comp(0,1,or2), grad)``
-
-
-Computes a scalar variable that is the edge skin-friction
-coefficient :math:`{C}_{f\left(e\right)}` (that is, using the density :math:`{\rho }_{e}` and velocity :math:`{U}_{e}` values at the edge of the boundary layer, not
-the free-stream density :math:`{\rho }_{\infty }` and velocity :math:`{U}_{\infty }` values).
-This scalar variable is defined as:
-
-Component: 0 = Total tangential-flow (parallel) to wall:
-
-:math:`{C}_{f\left(e\right)}=2{\tau }_{w}/\left({\rho }_{e}{U}_{e}^{2}\right)`
-
-Component: 1 = Stream-wise (flow) component tangent (parallel) to wall:
-
-:math:`{C}_{fs\left(e\right)}=2{\tau }_{ws}/\left({\rho }_{e}{U}_{e}^{2}\right)`
-
-Component: 2 = Cross-flow component tangent (parallel) to wall:
-
-:math:`{C}_{fc\left(e\right)}=2{\tau }_{wc}/\left({\rho }_{e}{U}_{e}^{2}\right)`
-
-where:
-
-.. list-table::
- :widths: 30 70
-
- * - :math:`{\tau }_{w}`
- - fluid shear stress magnitude at the boundary :math:`=\mu {\left(\partial u/\partial n\right)}_{n=0}=\sqrt{\left({\tau }_{ws}^{2}+{\tau }_{wc}^{2}\right)}`
- * - :math:`{\tau }_{ws}=\mu {\left(\partial {u}_{s}/\partial n\right)}_{n=0}`
- - stream-wise component of :math:`{\tau }_{w}`
- * - :math:`{\tau }_{wc}=\mu {\left(\partial {u}_{c}/\partial n\right)}_{n=0}`
- - cross-flow component of :math:`{\tau }_{w}`
- * - :math:`\mu`
- - dynamic viscosity of the fluid at the wall
- * - :math:`{\left(\partial u/\partial n\right)}_{n=0}`
- - magnitude of the velocity-magnitude gradient in the normal
- direction at the wall
- * - :math:`{\left(\partial {u}_{s}/\partial n\right)}_{n=0}`
- - stream-wise component of the velocity-magnitude gradient in
- the normal direction at the wall
- * - :math:`{\left(\partial {u}_{c}/\partial n\right)}_{n=0}`
- - cross-flow component of the velocity-magnitude gradient in
- the normal direction at the wall
- * - :math:`{\rho }_{e}`
- - density at the edge of the boundary layer
- * - :math:`{U}_{e}`
- - velocity at the edge of the boundary layer
-
-
-.. list-table:: **Function arguments**
- :widths: 30 70
-
- * - boundary part
- - 2D part
- * - velocity
- - vector variable
- * - density
- - scalar variable (compressible flow), constant number (incompressible flow)
- * - viscosity
- - scalar variable, constant variable, or constant number
- * - ymax
- - constant number (> 0 = Baldwin-Lomax-Spalart algorithm, 0 = convergence algorithm)
-
- See the algorithm note under :ref:`Boundary Layer Thickness `.
-
- * - flow comp
- - constant number (0 = tangent flow parallel to surface, 1 = stream-wise component
- tangent (parallel) to wall, 2 = cross-flow component tangent (parallel) to wall
- * - grad
- - -1 = flags the computing of the velocity-magnitude gradient via three-point interpolation
-
- vector variable = Grad(velocity magnitude)
-
-
-This scalar variable provides a measure of the skin-friction coefficient in the
-tangent (parallel-to-surface) direction and in its tangent's respective
-stream-wise and cross-flow directions, respective to the decomposed velocity
-parallel to the surface at the edge of the boundary layer.
-
-This is a non-dimensional measure of the fluid shear
-stress at the surface based on the local density and velocity at the edge of the
-boundary layer. The following figure illustrates the derivations of the computed
-*edge*-related velocity values: :math:`{U}_{e}`, :math:`{u}_{s}`, :math:`{u}_{c}`,
-and :math:`{c}_{}`.
-
-.. image:: /_static/UM-C7-12.png
-
-.. note::
- Boundary layer (``BL_*``) functions are not supported for
- Server of Server (SoS) decomposition.
-
-
-.. _BL_CfWall:
-
------------
-BL_CfWall()
------------
-
-
-**Boundary Layer: Wall Skin-Friction Coefficient**
-
-``BL_CfWall(boundary parts, velocity, viscosity, free density, free velocity, grad)``
-
-
-Computes a scalar variable that is the skin-friction
-coefficient :math:`{C}_{f\left(\infty \right)}`. This scalar variable
-is defined as:
-
-:math:`{C}_{f}{}_{\left(\infty \right)}=\frac{{\tau }_{w}}{0.5{\rho }_{\infty }{\left({U}_{\infty }\right)}^{2}}`
-
-where:
-
-.. list-table::
- :widths: 30 70
-
- * - :math:`{\tau }_{w}={\mu }_{w}{\left(\frac{\partial u}{\partial n}\right)}_{n=0}`
- - fluid shear stress at the wall
- * - :math:`{\mu }_{w}`
- - dynamic viscosity of the fluid at the wall
-
- (may be spatially and/or temporarily varying quantity, usually a constant)
-
- * - :math:`n`
- - distance profiled normal to the wall
- * - :math:`{\rho }_{\infty }`
- - freestream density
- * - :math:`{U}_{\infty }`
- - freestream velocity magnitude
- * - :math:`{\left(\frac{\partial u}{\partial n}\right)}_{n=0}`
- - tangent (parallel to surface) component of
- the velocity-magnitude gradient in the normal direction under the
- "where" list.
-
-
-This is a non-dimensional measure of the fluid shear
-stress at the surface. An important aspect of the skin friction coefficient
-is that :math:`{C}_{f\left(\infty \right)}=0`, which indicates boundary layer separation.
-
-.. list-table:: **Function arguments**
- :widths: 30 70
-
- * - boundary part
- - 2D part
- * - velocity
- - vector variable
- * - viscosity
- - scalar variable, constant variable, or constant number
- * - free density
- - constant number
- * - free velocity
- - constant number
- * - grad
- - -1 flags the computing of the velocity-magnitude gradient via three-point interpolation
-
- vector variable = Grad(velocity magnitude)
-
-
-.. note::
- Boundary layer (``BL_*``) functions are not supported for
- Server of Server (SoS) decomposition.
-
-
-.. _BL_CfWallCmp:
-
---------------
-BL_CfWallCmp()
---------------
-
-**Boundary Layer: Wall Skin-Friction Coefficient Components**
-
-``BL_CfWallCmp(boundary parts, velocity, viscosity,
-free-stream density, free-stream velocity-mag., ymax, flow comp(1or2),
-grad)``
-
-
-Computes a scalar variable that is a component of the
-skin friction coefficient :math:`{C}_{f}` tangent (or parallel) to the wall, either in the
-stream-wise :math:`{C}_{fs(·)}` or in the cross-flow :math:`{C}_{fc(·)}` direction. This
-scalar variable is defined as:
-
-Component 1 = Steam-wise (flow) component tangent (parallel) to wall:
-
-:math:`{C}_{fs\left(\infty \right)}=2{\tau }_{ws}/\left({\rho }_{\infty }{U}_{\infty }^{2}\right)`
-
-Component 2 = Cross-flow component tangent (parallel) to wall:
-
-:math:`{C}_{fc\left(\infty \right)}=2{\tau }_{wc}/\left({\rho }_{\infty }{U}_{\infty }^{2}\right)`
-
-where:
-
-.. list-table::
- :widths: 30 70
-
- * - :math:`{\tau }_{ws}=\mu {\left(\partial {u}_{s}/\partial n\right)}_{n=0}`
- - stream-wise component of :math:`{\tau }_{w}`
- * - :math:`{\tau }_{wc}=\mu {\left(\partial {u}_{c}/\partial n\right)}_{n=0}`
- - cross-flow component of :math:`{\tau }_{w}`
- * - :math:`{\tau }_{w}`
- - fluid shear stress magnitude at the wall :math:`=\mu {\left(\partial u/\partial n\right)}_{n=0}=\sqrt{\left({\tau }_{ws}^{2}+{\tau }_{wc}^{2}\right)}`
- * - :math:`\mu`
- - dynamic viscosity of the fluid at the wall
- * - :math:`{\left(\partial {u}_{s}/\partial n\right)}_{n=0}`
- - stream-wise component of the velocity-magnitude gradient in the normal direction at the wall
- * - :math:`{\left(\partial {u}_{c}/\partial n\right)}_{n=0}`
- - cross-flow component of the velocity-magnitude gradient in the normal direction at the wall
- * - :math:`{\rho }_{\infty }`
- - density at the edge of the boundary layer
- * - :math:`{U}_{\infty }`
- - velocity at the edge of the boundary layer
-
-
-.. list-table:: **Function arguments**
- :widths: 30 70
-
- * - boundary part
- - 2D part
- * - velocity
- - vector variable
- * - viscosity
- - scalar variable, constant variable, or constant number
- * - density
- - scalar variable (compressible flow) or constant number (incompressible flow)
- * - velocity mag
- - constant variable or constant number
- * - ymax
- - constant number (> 0 = Baldwin-Lomax-Spalart algorithm, 0 = convergence algorithm)
-
- See the algorithm note under :ref:`Boundary Layer Thickness `.
-
- * - flow comp
- - constant number (1 = stream-wise component tangent (parallel) to wall, 2 = cross-flow
- component tangent (parallel) to wall
- * - grad
- - -1 flags the computing of the
- velocity-magnitude gradient via three-point interpolation
-
- vector variable = Grad(velocity magnitude)
-
-
-.. note::
- Boundary layer (``BL_*``) functions are not supported for
- Server of Server (SoS) decomposition.
-
-
-.. _BL_CfWallTau:
-
---------------
-BL_CfWallTau()
---------------
-
-**Boundary Layer: Wall Fluid Shear-Stress**
-
-``BL_CfWallTau(boundary parts, velocity, viscosity, ymax, flow comp(0,1,or 2), grad)``
-
-
-Computes a scalar variable that is the fluid
-shear-stress at the wall :math:`{\tau }_{w}` or in its stream-wise :math:`{\tau }_{ws}` or cross-flow :math:`{\tau }_{cs}`
-component direction. This scalar variable is defined as:
-
-Component 0 = Total fluid shear-stress magnitude at the wall:
-
-:math:`{\tau }_{w}=\mu {\left(\frac{\partial u}{\partial n}\right)}_{n=0}=\sqrt{\left({\tau }_{ws}^{2}+{\tau }_{wc}^{2}\right)}`
-
-Component 1 = Steam-wise component of the fluid shear-stress at the wall:
-
-:math:`{\tau }_{ws}=\mu {\left(\frac{\partial {u}_{s}}{\partial n}\right)}_{n=0}`
-
-Component 2 = Cross-flow component of the fluid shear-stress at the wall:
-
-:math:`{\tau }_{wc}=\mu {\left(\frac{\partial {u}_{c}}{\partial n}\right)}_{n=0}`
-
-where:
-
-.. list-table::
- :widths: 30 70
-
- * - :math:`\mu`
- - dynamic viscosity of the fluid at the wall
-
- * - :math:`{\left(\frac{\partial u}{\partial n}\right)}_{n=0}`
-
- - magnitude of the velocity-magnitude gradient in the normal direction at the wall
-
- * - :math:`{\left(\frac{\partial {u}_{s}}{\partial n}\right)}_{n=0}`
- - stream-wise component of the velocity-magnitude gradient in
- the normal direction at the wall
-
- * - :math:`{\left(\frac{\partial {u}_{c}}{\partial n}\right)}_{n=0}`
- - cross-flow component of the velocity-magnitude gradient in
- the normal direction at the wall
-
-
-.. list-table:: **Function arguments**
- :widths: 30 70
-
- * - boundary part
- - 2D part
- * - velocity
- - vector variable
- * - viscosity
- - scalar variable, constant variable, or constant number
- * - ymax
- - constant number (> 0 = Baldwin-Lomax-Spalart algorithm, 0 = convergence algorithm)
-
- See the algorithm note under :ref:`Boundary Layer Thickness `.
-
- * - flow comp
- - constant number (0 = RMS of the stream-wise and cross-flow components,
- 1 = stream-wise component at the wall, 2 = cross-flow component at the wall)
- * - grad
- - -1 flags the computing of the velocity-magnitude gradient via three-point interpolation
-
- vector variable = Grad(velocity magnitude)
-
-
-.. note::
- Boundary layer (``BL_*``) functions are not supported for
- Server of Server (SoS) decomposition.
-
-
-.. _BL_DispThick:
-
---------------
-BL_DispThick()
---------------
-
-**Boundary Layer: Displacement Thickness**
-
-``BL_DispThick(boundary parts, velocity, density, ymax, flow comp(0,1,or 2), grad)``
-
-
-Computes a scalar variable that is the boundary layer
-displacement thickness :math:`{\delta }^{*}` , :math:`{\delta }_{s}^{*}` , or :math:`{\delta }_{c}^{*}` defined as:
-
-Component: 0 = Total tangential-flow parallel to the
-wall
-
-:math:`{\delta }_{tot}^{*}={\displaystyle {\int }_{0}^{\delta }\left(1-\frac{\rho u}{{\rho }_{e}{U}_{e}}\right)}dn`
-
-Component: 1 = Stream-wise flow component tangent (parallel)
-to the wall
-
-:math:`{\delta }_{s}^{*}={\displaystyle {\int }_{0}^{\delta }\left(1-\frac{\rho {u}_{s}}{{\rho }_{e}{U}_{e}}\right)}dn`
-
-Component: 2 = Cross-flow component tangent (parallel) to the
-wall
-
-:math:`{\delta }_{c}^{*}={\displaystyle {\int }_{0}^{\delta }\left(1-\frac{\rho {u}_{c}}{{\rho }_{e}{U}_{e}}\right)}dn`
-
-
-.. list-table::
- :widths: 30 70
-
- * - :math:`n`
- - distance profiled normal to the wall
- * - :math:`\delta`
- - boundary-layer thickness (distance to edge of boundary layer)
- * - :math:`\rho`
- - density at given profile location
- * - :math:`{\rho }_{e}`
- - density at the edge of the boundary layer
- * - :math:`u`
- - magnitude of the velocity component parallel
- to the wall at a given profile location in the boundary layer
- * - :math:`{u}_{s}`
- - stream-wise component of the velocity magnitude parallel to the
- wall at a given profile location in the boundary layer
- * - :math:`{u}_{c}`
- - cross-flow component of the velocity magnitude parallel to the
- wall at a given profile location in the boundary layer
- * - :math:`{U}_{e}`
- - u at the edge of the boundary layer
- * - :math:`{y}_{max}`
- - distance from wall to freestream
- * - comp
- - flow direction option
- * - grad
- - flag for gradient of velocity magnitude
-
-
-This scalar variable provides a measure for the effect of the boundary layer
-on the **outside** flow. The boundary layer causes a
-displacement of the streamlines around the body.
-
-.. list-table:: **Function arguments**
- :widths: 30 70
-
- * - boundary part
- - 2D part
- * - velocity
- - vector variable
- * - density
- - scalar variable (compressible flow), constant number (incompressible flow)
- * - :math:`{y}_{max}`
- - constant number (> 0 = Baldwin-Lomax-Spalart algorithm, 0 = convergence algorithm)
-
- See the algorithm note under :ref:`Boundary Layer Thickness `.
-
- * - flow comp
- - constant number (0 = total tangential flow direction parallel to wall,
- 1 = stream-wise flow component direction parallel to wall, 2 = cross-flow
- component direction parallel to wall
-
- * - grad
- - -1 flags the computing of the velocity-magnitude
- gradient via four-point interpolation
-
- vector variable = Grad(velocity magnitude)
-
-
-.. note::
- Boundary layer (``BL_*``) functions are not supported for
- Server of Server (SoS) decomposition.
-
-.. _BL_DistToValue:
-
-----------------
-BL_DistToValue()
-----------------
-
-**Boundary Layer: Distance to Value from Wall**
-
-``BL_DistToValue(boundary parts, scalar, scalar value)``
-
-
-Computes a scalar variable that is the distance
-:math:`d` from the wall to the specified value. This scalar variable is
-defined as:
-
-:math:`d={n|}_{f\left(\alpha \right)-c}`
-
-
-.. list-table::
- :widths: 30 70
-
- * - :math:`n`
- - distance profile d normal to boundary surface
- * - :math:`f\left(\alpha \right)`
- - scalar field (variable)
- * - :math:`\alpha`
- - scalar field values
- * - :math:`c`
- - scalar value at which to assign d
-
-
-.. list-table:: **Function arguments**
- :widths: 30 70
-
- * - boundary part
- - 0D, 1D, or 2D part
- * - scalar
- - scalar variable
- * - scalar value
- - constant number or constant variable
-
-
-.. note::
- Boundary layer (``BL_*``) functions are not supported for
- Server of Server (SoS) decomposition.
-
-
-.. _BL_MomeThick:
-
---------------
-BL_MomeThick()
---------------
-
-
-**Boundary Layer: Momentum Thickness**
-
-``BL_MomeThick(boundary parts, velocity, density, ymax, flow compi(0,1,or2), flow compj(0,1,or2), grad)``
-
-
-Computes a scalar variable that is the boundary-layer
-momentum thickness :math:`{\theta }_{tot}` , :math:`{\theta }_{ss}` , :math:`{\theta }_{sc}` , :math:`{\theta }_{cs}` , or :math:`{\theta }_{cc}`.
-This scalar variable is defined as:
-
-Components: (0,0) = Total tangential-flow parallel to the
-wall
-
-:math:`{\theta }_{tot}=\frac{1}{{\rho }_{e}{U}_{e}^{2}}{\displaystyle {\int }_{0}^{\delta }\left({U}_{e}-u\right)}\rho udn`
-
-Components: (1,1) = stream-wise, stream-wise component
-
-:math:`{\theta }_{ss}=\frac{1}{{\rho }_{e}{U}_{e}^{2}}{\displaystyle {\int }_{0}^{\delta }\left({U}_{e}-{u}_{s}\right)}\rho {u}_{s}dn`
-
-Components: (1,2) = Stream-wise, cross-flow component
-
-:math:`{\theta }_{sc}=\frac{1}{{\rho }_{e}{U}_{e}^{2}}{\displaystyle {\int }_{0}^{\delta }\left({U}_{e}-{u}_{s}\right)}\rho {u}_{c}dn`
-
-Components: (2,1) = cross-flow, stream-wise component
-
-:math:`{\theta }_{cs}=\frac{-1}{{\rho }_{e}{U}_{e}^{2}}{\displaystyle {\int }_{0}^{\delta }\rho {u}_{c}{u}_{s}}dn`
-
-Components: (2,2) = cross-flow, cross-flow component
-
-:math:`{\theta }_{cc}=\frac{-1}{{\rho }_{e}{U}_{e}^{2}}{\displaystyle {\int }_{0}^{\delta }\rho {u}_{{}_{c}}^{2}}dn`
-
-where:
-
-.. list-table::
- :widths: 30 70
-
- * - :math:`n`
- - distance profiled normal to the wall
- * - :math:`\delta`
- - boundary-layer thickness (or distance to edge
- of boundary layer)
- * - :math:`\rho`
- - density at given profile location
- * - :math:`{\rho }_{e}`
- - density at the edge of the boundary layer
- * - :math:`u`
- - magnitude of the velocity component parallel
- to the wall at a given profile location in the boundary layer
- * - :math:`{u}_{s}`
- - stream-wise component of the velocity magnitude parallel to
- the wall at a given profile location in the boundary layer
- * - :math:`{u}_{c}`
- - cross-flow component of the velocity magnitude parallel to
- the wall at a given profile location in the boundary layer
- * - :math:`{U}_{e}`
- - u at the edge of the boundary layer
- * - :math:`{y}_{max}`
- - distance from wall to freestream
- * - :math:`com{p}_{i}`
- - first flow direction option
- * - :math:`com{p}_{j}`
- - second flow direction option
- * - grad
- - flag for gradient of velocity magnitude
-
-
-This scalar variable relates to the momentum loss in the boundary layer.
-
-.. list-table:: **Function arguments**
- :widths: 30 70
-
- * - boundary part
- - 2D part
- * - velocity
- - vector variable
- * - density
- - scalar variable (compressible flow), constant number (incompressible flow)
- * - ymax
- - constant number (> 0 = Baldwin-Lomax-Spalart algorithm, 0 = convergence algorithm)
-
- See the algorithm note under :ref:`Boundary Layer Thickness `.
-
- * - compi
- - constant number (0 = total tangential flow direction parallel to wall,
- 1 = stream-wise flow component direction parallel to wall, 2 = cross-flow
- component direction parallel to wall)
- * - compj
- - constant number (0 = total tangential flow direction parallel to wall,
- 1 = stream-wise flow component direction parallel to wall, 2 = cross-flow
- component direction parallel to wall
- * - grad
- - -1 flags the computing of the
- velocity-magnitude gradient via four-point interpolation
-
- vector variable = Grad(velocity magnitude)
-
- See :ref:`BL_aGradfVelMag `.
-
-
-.. note::
- Boundary layer (``BL_*``) functions are not supported for
- Server of Server (SoS) decomposition.
-
-
-.. _BL_Scalar:
-
------------
-BL_Scalar()
------------
-
-
-**Boundary Layer: Scalar**
-
-``BL_Scalar(boundary parts, velocity, scalar, ymax, grad)``
-
-
-Computes a scalar variable that is the scalar value of
-the corresponding scalar field at the edge of the boundary layer. The function
-extracts the scalar value while computing the boundary-layer
-thickness. (See :ref:`Boundary Layer: Thickness`.)
-
-.. list-table:: **Function arguments**
- :widths: 30 70
-
- * - boundary part
- - 2D part
- * - velocity
- - vector variable
- * - scalar
- - scalar variable
- * - ymax
- - constant number (> 0 = Baldwin-Lomax-Spalart algorithm, 0 = convergence algorithm)
-
- See the algorithm note under :ref:`Boundary Layer Thickness `.
-
- * - grad
- - -1 flags the computing of the
- velocity-magnitude gradient via four-point interpolation
-
- vector variable = Grad(velocity magnitude)
-
-
-.. note::
- Boundary layer (``BL_*``) functions are not supported for
- Server of Server (SoS) decomposition.
-
-
-.. _BL_RecoveryThick:
-
-------------------
-BL_RecoveryThick()
-------------------
-
-
-**Boundary Layer: Recovery Thickness**
-
-``BL_RecoveryThick(boundary parts, velocity, total pressure, ymax, grad)``
-
-
-Computes a scalar variable that is the boundary-layer
-recovery thickness :math:`{\delta }_{rec}`. This scalar variable is defined as:
-
-:math:`{\delta }_{rec}={\displaystyle {\int }_{0}^{\delta }\left(1-\frac{{p}_{t}}{{p}_{te}}\right)}dn`
-
-
-.. list-table::
- :widths: 30 70
-
- * - :math:`n`
- - distance profiled normal to the wall
- * - :math:`\delta`
- - boundary-layer thickness (distance to edge of boundary layer)
-
- * - :math:`{p}_{t}`
- - total pressure at given profile location
-
- * - :math:`{p}_{te}`
- - pt at the edge of the boundary layer
- * - ymax
- - distance from wall to freestream
- * - grad
- - flag for gradient of velocity magnitude option
-
-
-This quantity does not appear in any physical
-conservation equations, but is sometimes used in the evaluation of inlet flows.
-
-.. list-table:: **Function arguments**
- :widths: 30 70
-
- * - boundary part
- - 2D part
- * - velocity
- - vector variable
- * - total pressure
- - scalar variable
- * - ymax
- - constant number (> 0 = Baldwin-Lomax-Spalart algorithm, 0 = convergence algorithm)
-
- See the algorithm note under :ref:`Boundary Layer Thickness `.
-
- * - grad
- - -1 flags the computing of the
- velocity-magnitude gradient via four-point interpolation.
-
- vector variable = Grad(velocity magnitude)
-
- See :ref:`BL_aGradfVelMag `.
-
-
-.. note::
- Boundary layer (``BL_*``) functions are not supported for
- Server of Server (SoS) decomposition.
-
-
-.. _BL_Shape:
-
-----------
-BL_Shape()
-----------
-
-
-**Boundary Layer: Shape Parameter**
-
-``BL_Shape()`` is not explicitly listed as a general function, but it can
-be computed as a scalar variable via the calculator by
-dividing a displacement thickness by a momentum thickness:
-
-:math:`H=\frac{{\delta }^{*}}{\theta }`
-
-
-.. list-table::
- :widths: 30 70
-
- * - :math:`{\delta }^{*}`
- - boundary-layer displacement thickness
- * - :math:`\theta`
- - boundary-layer momentum thickness
-
-
-This scalar variable is used to characterize boundary-layer flows, especially to
-indicate potential for separation. This variable increases as a
-separation point is approached, and it varies rapidly near a separation
-point.
-
-.. note::
- Separation has not been observed for H < 1.8, but it definitely
- has been observed for H = 2.6. Thus, separation is considered
- in some analytical methods to occur in turbulent boundary layers for H = 2.0.
-
- In a Blasius Laminar layer (that is flat plate boundary
- layer growth with zero pressure gradient), H = 2.605. In a turbulent boundary layer,
- H ~= 1.4 to 1.5, and with extreme variations, H ~= 1.2 to 2.5.
-
-
-
-.. _BL_Thick:
-
-----------
-BL_Thick()
-----------
-
-
-**Boundary Layer: Thickness**
-
-``BL_Thick(boundary parts, velocity, ymax, grad)``
-
-
-Computes a scalar variable that is the boundary-layer
-thickness :math:`\delta`. This scalar variable is defined as:
-
-:math:`\delta ={n|}_{u/U=0.995}`
-
-The distance normal from the surface to where :math:`u/U=0.995`.
-
-.. list-table::
- :widths: 30 70
-
- * - :math:`u`
- - magnitude of the velocity component parallel
- to the wall at a given location in the boundary layer
- * - :math:`U`
- - magnitude of the velocity just outside the boundary layer
-
-
-.. list-table:: **Function arguments**
- :widths: 30 70
-
- * - boundary part
- - 2D part
- * - velocity
- - vector variable
- * - ymax
- - constant number (> 0 = Baldwin-Lomax-Spalart algorithm, 0 = convergence algorithm)
-
- See the algorithm note that follows.
-
- * - grad
- - -1 = flags the computing of the
- velocity-magnitude gradient via three-point interpolation
-
- vector variable = Grad(velocity magnitude) See :ref:`BL_aGradfVelMag `.
-
-
-.. note::
- Boundary layer (``BL_*``) functions are not supported for
- Server of Server (SoS) decomposition.
-
-
-.. admonition:: Algorithm: Boundary Layer Thickness
-
- The ``ymax`` argument allows the edge of the boundary layer to be approximated by two
- different algorithms: the Baldwin-Lomax-Spalart algorithm and the convergence algorithm.
- Both algorithms profile velocity data normal to the boundary surface (wall).
- Specifying ``ymax > 0`` leverages results from both the Baldwin-Lomax and vorticity
- functions over the entire profile to produce a fading function that approximates the edge
- of the boundary layer, whereas specifying ``ymax = 0`` uses velocity and
- velocity gradient differences to converge to the edge of the boundary
- layer.
-
-**References**
-
-For more information, see these references:
-
-1. P.M. Gerhart, R.J. Gross, & J.I. Hochstein, Fundamentals
- of Fluid Mechanics, second Ed.,(Addison-Wesley: New York, 1992)
-2. P. Spalart, A Reasonable Method to Compute Boundary-Layer
- Parameters from Navier-Stokes Results, (Unpublished: Boeing, 1992)
-3. H. Schlichting & K. Gersten, Boundary Layer Theory, eighth
- Ed., (Springer-Verlag: Berlin, 2003)
-
-
-
-.. _BL_VelocityAtEdge:
-
--------------------
-BL_VelocityAtEdge()
--------------------
-
-
-**Boundary Layer: Velocity at Edge**
-
-``BL_VelocityAtEdge(boundary parts, velocity, ymax,comp(0,1,2),grad)``
-
-Extracts a vector variable that is a velocity vector
-:math:`{V}_{e}`, :math:`{V}_{p}`, or :math:`{V}_{n}`. This vector variable is defined as:
-
-.. list-table::
- :widths: 30 70
-
- * - :math:`{V}_{e}`
- - :math:`{V}_{e}\left(x,y,z\right)` = velocity vector at the edge of the boundary
- layer :math:`\delta`
-
- * - :math:`{V}_{n}`
-
- - :math:`Dot\left({V}_{e},N\right)` = decomposed velocity vector normal to
- the wall at the edge of the boundary layer :math:`\delta`
-
- * - :math:`{V}_{p}`
- - :math:`{V}_{e}\left({V}_{e}-{V}_{n}\right)` = decomposed velocity
- vector parallel to the wall at the edge of the boundary layer :math:`\delta`
-
-
-This vector variable computes a scalar variable that is the boundary-layer thickness
-:math:`\delta`. It is defined as:
-
-.. list-table::
- :widths: 30 70
-
- * - :math:`{V}_{n}`
-
- - :math:`Dot\left({V}_{e},N\right)` = decomposed velocity vector normal
- to the wall at the edge of the boundary layer :math:`\delta`
-
- * - :math:`{V}_{p}`
-
- - :math:`{V}_{e}\left({V}_{e}-{V}_{n}\right)` = decomposed velocity
- vector parallel to the wall at the edge of the boundary layer :math:`\delta`
-
-
-This scalar variable computes another scalar variable that is the boundary-layer
-thickness :math:`\delta`. It is defined as:
-
-.. list-table:: **Function arguments**
- :widths: 30 70
-
- * - boundary part
- - 2D part
- * - velocity
- - vector variable
- * - density
- - scalar variable (compressible flow), constant number (incompressible flow)
- * - ymax
- - constant number (> 0 = Baldwin-Lomax-Spalart algorithm, 0 = convergence algorithm)
-
- See the algorithm note under :ref:`Boundary Layer Thickness `.
-
- * - comp
- - constant number (0 = velocity vector at edge of boundary layer, 1 = decomposed
- velocity vector parallel to wall tangent to surface, 2 = decomposed velocity
- vector normal to wall)
- * - grad
- - -1 flags the computing of the
- velocity-magnitude gradient via four-point interpolation
-
- vector variable = Grad(velocity magnitude) See :ref:`BL_aGradfVelMag `.
-
-
-.. note::
- Boundary layer (``BL_*``) functions are not supported for
- Server of Server (SoS) decomposition.
-
-
-.. _BL_Y1Plus:
-
------------
-BL_Y1Plus()
------------
-
-
-**Boundary Layer: off Wall**
-
-``BL_Y1Plus(boundary parts, density, viscosity, grad option, vector variable)``
-
-
-:math:`{y}_{1}^{+}` computes a scalar variable that is the coefficient off the
-wall to the first field cell centroid. This scalar variable is defined as:
-
-:math:`{y}_{1}^{+}=\frac{{y}_{1}{\rho }_{w}}{{\mu }_{w}}\sqrt{\frac{{\tau }_{w}}{{\rho }_{w}}}`
-
-where:
-
-.. list-table::
- :widths: 30 70
-
- * - :math:`n`
- - distance profiled normal to the wall
- * - :math:`{\tau }_{w}`
- - :math:`={\mu }_{w}{\left(\frac{\partial u}{\partial n}\right)}_{n=0}`
- = fluid shear stress at the wall
- * - :math:`{\mu }_{w}`
- - dynamic viscosity of fluid at the wall (may be a
- spatially and/or temporally varying quantity and is usually a constant)
- * - :math:`{\rho }_{w}`
- - density at the wall
- * - :math:`{y}_{1}`
- - distance from first field element centroid to
- outer face, profiled normal to wall
- * - :math:`u`
- - fluid velocity vector
-
-
-Normally :math:`{y}^{+}` is used to estimate or confirm the required first grid spacing
-for proper capturing of viscous-layer properties. The values are dependent on
-various factors, including what variables at the wall are sought, the turbulent
-models used, and whether the law of the wall is used. For correct interpolation of
-the values for your application, consult a boundary-layer text.
-
-.. list-table:: **Function arguments**
- :widths: 30 70
-
- * - boundary part
- - 2D (wall or surface) part
- * - density
- - scalar variable
- * - viscosity
- - scalar variable, constant variable, or constant number
- * - gradient option
- - One of three values (1 = Use field velocity (used to calculate wall gradient),
- 2 = Use gradient at boundary part (wall or surface), 3 = Use gradient in
- corresponding field part)
- * - vector variable
- - One of three values depending on the gradient option (1 = Use field velocity = velocity vector,
- 2 = Use gradient at boundary = gradient variable on 2D boundary (wall or surface) part, 3 =
- Use gradient in field = gradient variable defined in 3D field part; or it
- could be the gradient calculated using Grad(velocity magnitude), that is
- :ref:`BL_aGradfVelMag `.
-
-.. note::
- Boundary layer (``BL_*``) functions are not supported for
- Server of Server (SoS) decomposition.
-
-
-.. _BL_Y1PlusDist:
-
----------------
-BL_Y1PlusDist()
----------------
-
-**Boundary Layer: Distance off Wall**
-
-``BL_Y1PlusDist(boundary parts, velocity)``
-
-
-:math:`{y}_{1}` Computes a scalar variable that is the off-the-wall distance,
-:math:`{y}_{1}`, which is the distance off the wall to the first field cell
-centroid. The velocity variable is only used to determine whether the variable
-is nodal or elemental to maintain consistency with the previous :math:`{y}_{1}^{+}`
-calculation.
-
-.. list-table:: **Function arguments**
- :widths: 30 70
-
- * - boundary part
- - 2D part
- * - velocity
- - vector variable
-
-
-.. note::
- Boundary layer (``BL_*``) functions are not supported for
- Server of Server (SoS) decomposition.
-
-
-.. _CaseMap:
-
----------
-CaseMap()
----------
-
-
-**Case Map**
-
-``CaseMap(2D or 3D parts, case to map from, scalar/vector/tensor, parts to map from, search option flag)``
-
-
-For all locations on the selected parts, this function
-finds the specified variable value (scalar, vector, or tensor) from
-the *case to map from* using a variety of user-specified
-search options.
-
-- If the variable in the *case to map from* is located at the nodes, the
- case-mapped variable is defined on the nodes of the selected parts.
-- If the variable is located at the elements, the case-mapped variable is
- defined at the elements of the selected parts.
-
-The idea is to map onto the selected parts a variable from another case,
-usually for comparison purposes. It does this by taking the
-location of the nodes or centroid of the elements and looking at the other case
-to see if the variable in question is defined at that location in the field. If
-so, the value is mapped to the parts nodes or element value. This algorithm can
-be fairly expensive, so there are options to inform the search that finds a
-matching variable location.
-
-.. list-table:: **Function arguments**
- :widths: 30 70
-
- * - case to map from
- - constant number
- * - scalar/vector/tensor
- - scalar, vector, or tensor variable
- * - search option
- - If mapping search is successful, always
- assigns the exact value found. If search mapping is not successful, because
- there is not an exact match of node or element location, the
- following occurs:
-
- If the search option is set to *search only* (0), an undefined value
- is assigned.
-
- If the search option is set to *nearest value* (1), the defined variable
- value at the closest node or element is assigned (no undefined values).
- This option takes time to search the *from case* according to
- the following *parts to map from* selection.
-
- * - parts to map from
- - The values for a location must be found by
- searching the geometry in the *case to map from*. By setting this
- option, you can hint to EnSight where in the geometry it should
- search, which can vastly improve performance.
-
- *Global search* (0) - This is the legacy scheme. It
- performs a methodical but uninformed search of the 3D,
- then 2D, then 1D, and then even 0D (point) elements to find the first
- defined variable value. This works well for mapping onto a 3D or 2D
- that is completely enclosed in a 3D *from* volume. It works poorly
- if the 2D is not fully enclosed (such as on
- the edge of a 3D part) or if you want to map a 2D onto a 2D part and
- other 3D parts exist.
-
- *Dimensionality match* (1) - Only parts of the same
- dimension in the from and to are searched. For example, only 3D
- *from* parts are used to map onto a 3D
- selected part. This is the option that the you should use most
- often.
-
- *Part number match* (2) - The order of the parts is
- used, that is if you are computing the case map on the third part,
- then the third part is used in the *case to map from*. This is best
- used if you have exactly the same dataset in terms of the part list
- ordering, but perhaps calculated differently so only the variable
- values differ.
-
- *Parts selected for case to map from* (3) - Select
- parts in the Case *from* as well as the case *to*. Only selected parts
- are used in the two cases.
-
-
-.. note::
- This function uses EnSight's search capability to do the mapping. It is
- critical that the nodes of the parts being mapped onto lie within the
- geometry of all of the parts of the case being mapped from. Mapping from a
- 2D surface to a 2D surface only works reliably if the surfaces are the
- same (or extremely close, and the ``flag=1`` option is chosen).
-
- Mapping nodal variables is faster than mapping elemental variables. This function is
- threaded so an Enterprise (formerly Gold or HPS) license key may improve
- performance.
-
- Select only the parts that you require, and use search option ``0`` if at all possible.
-
-
-
-.. _CaseMapDiff:
-
--------------
-CaseMapDiff()
--------------
-
-
-**Case Map Diff**
-
-``CaseMapDiff(2D or 3D parts, case to map from, scalar/vector/tensor, 0/1 0=search only 1=if search fails find closest)``
-
-
-This function is equivalent to the expression:
-
-``Variable - CaseMap[Variable]``
-
-For information on how this function works, see :ref:`CaseMap `.
-
-
-
-.. _CaseMapImage:
-
---------------
-CaseMapImage()
---------------
-
-**Case Map Image**
-
-``CaseMapImage(2D or 3D parts, part to map from, scalar, viewport number, Undefined value limit)``
-
-
-This function does a projection of a 2D part variable from a different case onto a
-3D geometry taking into account the view orientation from the specified viewport number,
-similar to a texture mapping. The function in effect maps 2D results to a 3D geometry taking into
-account view orientation and surface visibility.
-
-.. list-table:: **Function arguments**
- :widths: 30 70
-
- * - part to map from
- - part number of the 2D part (This 2D part is
- usually data from an infrared camera.)
- * - scalar
- - scalar variable
- * - viewport number
- - Viewport number showing parts that the
- variable is being computed on, from the same camera view as the part to
- map from
- * - Undefined value limit
- - Values on the 2D part that are under this
- value are considered undefined
-
-
-
-.. _Coeff:
-
--------
-Coeff()
--------
-
-**Coefficient**
-
-``Coeff(any 1D or 2D parts, scalar, component [, Compute_Per_part])``
-
-
-Computes a constant or constant per part variable whose
-value is a coefficient :math:`{C}_{x}` , :math:`{C}_{y}` , or :math:`{C}_{z}`
-such that :math:`{C}_{x}={\displaystyle {\int }_{S}f{n}_{x}dS}`,
-:math:`{C}_{y}={\displaystyle {\int }_{S}f{n}_{y}dS}`,
-:math:`{C}_{z}={\displaystyle {\int }_{S}f{n}_{z}dS}`
-
-where:
-
-
-.. list-table::
- :widths: 30 70
-
- * - :math:`f`
- - any scalar variable
- * - :math:`S`
- - 1D or 2D domain
- * - :math:`{n}_{x}`
- - x component of normal
- * - :math:`{n}_{y}`
- - y component of normal
- * - :math:`{n}_{z}`
- - z component of normal
-
-
-.. list-table:: **Function arguments**
- :widths: 30 70
-
- * - variable
- - scalar or vector
- * - component
- - if variable is a vector: [X], [Y], or [Z]
-
-
-Specify [X], [Y], or [Z] to get the corresponding coefficient.
-
-.. note::
- Normal for a 1D part is parallel to the plane of the plane tool.
-
-
-.. _Cmplx:
-
--------
-Cmplx()
--------
-
-**Complex**
-
-``Cmplx(any parts, scalar/vector(real portion), scalar/vector(complex portion), [optional frequency(Degrees)])``
-
-
-Creates a complex scalar or vector from two scalar or
-vector variables. The frequency is optional and is used only for
-reference.
-
-
-Z = A + Bi
-
-.. list-table:: **Function arguments**
- :widths: 30 70
-
- * - real portion
- - scalar or vector variable
- * - complex portion
- - scalar or vector variable (but must be same as the real portion)
- * - [frequency]
- - constant number (optional)
-
-
-.. _CmplxArg:
-
-----------
-CmplxArg()
-----------
-
-**Complex Argument**
-
-``CmplxArg(any parts, complex scalar or vector)``
-
-
-Computes the argument of a complex scalar or vector. The
-resulting scalar is given in a range between -180 and 180
-degrees.
-
-:math:`\text{Arg = atan(Vi/Vr)}`
-
-
-
-.. _CmplxConj:
-
------------
-CmplxConj()
------------
-
-**Complex Conjugate**
-
-``CmplxConj(any parts, complex scalar or vector)``
-
-
-Computes the conjugate of a complex scalar of vector.
-
-Returns a complex scalar or vector, where:
-
-:math:`\text{Nr = Vr}`
-
-
-:math:`\text{Ni = -Vi}`
-
-
-
-.. _CmplxImag:
-
------------
-CmplxImag()
------------
-
-**Complex Imaginary**
-
-``CmplxImag(any parts, complex scalar or vector)``
-
-
-Extracts the imaginary portion of a complex scalar or vector
-into a real scalar or vector:
-
-:math:`\text{N = Vi}`
-
-
-
-.. _CmplxModu:
-
------------
-CmplxModu()
------------
-
-**Complex Modulus**
-
-``CmplxModu(any parts, complex scalar or vector)``
-
-
-Returns a real scalar or vector that is the modulus of the
-given scalar or vector:
-
-:math:`\text{N = SQRT(Vr*Vr + Vi*Vi)}`
-
-
-
-.. _CmplxReal:
-
------------
-CmplxReal()
------------
-
-**Complex Real**
-
-``CmplxReal(any parts, complex scalar or vector)``
-
-
-Extracts the real portion of a complex scalar or vector
-into a real scalar or vector:
-
-:math:`\text{N = Vr}`
-
-
-
-.. _CmplxTransResp:
-
-----------------
-CmplxTransResp()
-----------------
-
-**Complex Transient Response**
-
-``CmplxTransResp(any parts, complex scalar or vector, constant PHI(0.0-360.0 Degrees))``
-
-
-Returns a real scalar or vector that is the real
-transient response:
-
-:math:`\text{Re(Vt) = Re(Vc)Cos(phi) - Im(Vc)Sin(phi)}`
-
-which is a function of the transient phase angle
-:math:`\text{phi}` defined by:
-
-:math:`\text{phi = 2 Pi f t}`
-
-where:
-
-.. list-table::
- :widths: 30 70
-
- * - t
- - harmonic response time parameter
- * - f
- - frequency of the complex variable :math:`\text{Vc}`
-
-
-and the complex field :math:`\text{Vc}`, defined as:
-
-:math:`\text{Vc = Vc(x,y,z) = Re(Vc) + i Im(Vc)}`
-
-where:
-
-.. list-table::
- :widths: 30 70
-
- * - Vc
- - complex variable field
- * - Re(Vc)
- - real portion of Vc
- * - Im(Vc)
- - imaginary portion of Vc
- * - i
- - Sqrt(-1)
-
-
-.. note::
- The transient complex function was a composition of Vc and Euler's
- relation, namely:
-
- Vt = Vt(x,y,z,t) = Re(Vt) + i Im(Vt) = Vc * e^(i phi)
-
- where:
-
- e^(i phi) = Cos(phi) + i Sin(phi)
-
- The real portion, Re(Vt), is as designated in the preceding equation.
-
- This function is only good for harmonic variations, thus fields with a
- defined frequency.
-
-
-.. list-table:: **Function arguments**
- :widths: 30 70
-
- * - phi angle
- - constant number between 0 and 360 degrees.
-
-
-.. _ConstPerPart:
-
---------------
-ConstPerPart()
---------------
-
-
-**ConstPerPart**
-
-``ConstPerPart(any parts, constant)``
-
-
-This function assigns a value to the selected parts. The value can be either a
-floating point value entered into the field or a case constant. This value
-does not change over time. At a later point, other parts can be selected
-and this value can be recalculated. These other parts are then assigned the new value.
-The existing parts that were previously selected retain their previously assigned
-value. In other words, each successive time that this value is recalculated for an
-existing variable, the values assigned to the most recently selected parts are updated
-without removing previously assigned values.
-
-
-.. _Curl:
-
-------
-Curl()
-------
-
-**Curl**
-
-``Curl(any parts, vector)``
-
-
-Computes a vector variable that is the cURL of the input vector:
-
-:math:`Cur{l}_{f}=\overline{\nabla }\times \dot{f}=\left(\frac{\partial {f}_{3}}{\partial y}-\frac{\partial {f}_{2}}{\partial z}\right)\widehat{i}+\left(\frac{\partial {f}_{1}}{\partial z}-\frac{\partial {f}_{3}}{\partial x}\right)\widehat{j}+\left(\frac{\partial {f}_{2}}{\partial x}-\frac{\partial {f}_{1}}{\partial y}\right)\widehat{k}`
-
-
-
-.. _Defect_Functions:
-
----------------------------------------------
-Porosity characterization functions (defects)
----------------------------------------------
-
-Consider a mesh with a scalar per element variable representing the micro porosity of each
-cell, where ``0`` means no porosity (the cell is completely full) and ``100`` means that the cell is
-fully porous (the cell is empty). Cells with a non-zero porosity are considered to have
-defects. Defects that span multiple cells may indicate an unacceptable defect.
-
-Six ``Defect_*`` functions are provided to help calculate factors of interest in characterizing
-the defects that occur over multiple cells. To use the following ``Defect_*`` functions, you would
-create an isovolume of your porosity variable between the desired ranges (perhaps 5 to 100) and
-select this isovolume part.
-
-
-.. _Defect_BulkVolume:
-
--------------------
-Defect_BulkVolume()
--------------------
-
-**Defect Bulk Volume**
-
-``Defect_BulkVolume(2D or 3D parts)``
-
-
-Returns a per element scalar that is the sum of the
-volume of all the cells comprising the defect. Each cell with the
-defect is then assigned this value.
-
-For input specifications, see :ref:`Defect Functions `.
-
-
-.. _Defect_Count:
-
---------------
-Defect_Count()
---------------
-
-**Defect Count**
-
-``Defect_Count(2D or 3D parts, Defect scalar per elem, min value, max value) [,Compute_Per_part])``
-
-
-Returns a case constant that filters the count of the
-number of defects existing between the minimum value and the maximum value. This
-function uses a ``defect scalar per elem`` variable that has been previously calculated by any of
-the other five :ref:`Defect functions `.
-
-For input specifications, see :ref:`Defect Functions `.
-
-
-.. _Defect_LargestLinearExtent:
-
-----------------------------
-Defect_LargestLinearExtent()
-----------------------------
-
-**Defect Largest Linear Extent**
-
-``Defect_LargestLinearExtent(2D or 3D parts)``
-
-
-Returns a per element scalar that is the largest linear
-extent of all the cells comprising the defect, where each cell of the defect is
-assigned this value. The largest linear extent is the root-mean-squared
-distance.
-
-For input specifications, see :ref:`Defect Functions `.
-
-
-.. _Defect_NetVolume:
-
-------------------
-Defect_NetVolume()
-------------------
-
-**Defect NetVolume**
-
-``Defect_NetVolume(2D or 3D parts, scalar per elem, scale factor)``
-
-
-Returns a per element scalar that is the sum of the cell
-volumes multiplied by the scalar per element variable multiplied by the scale
-factor of all the cells comprising the defect, where each cell of the defect is
-assigned this value. The ``scalar per elem`` variable is usually porosity,
-but you can use any per element scalar variable. The scale factor
-adjusts the scalar per element variable values, that is if the porosity range is
-from 0.0 to 100.0, then a scale factor of 0.01 can be used to normalize the
-porosity values to volume fraction values ranging from 0.0 to 1.0.
-
-For input specifications, see :ref:`Defect Functions `.
-
-
-.. _Defect_ShapeFactor:
-
---------------------
-Defect_ShapeFactor()
---------------------
-
-**Defect ShapeFactor**
-
-``Defect_ShapeFactor(2D or 3D parts)``
-
-
-Returns a per element scalar that is the *largest linear extent* divided by the diameter of the
-sphere with a volume equal to the *bulk volume* of the defect, where each cell of the defect
-is assigned this value.
-
-For input specifications, see :ref:`Defect Functions `.
-
-
-.. _Defect_SurfaceArea:
-
---------------------
-Defect_SurfaceArea()
---------------------
-
-**Defect SurfaceArea**
-
-``Defect_SurfaceArea(2D or 3D parts)``
-
-
-Returns a per element scalar that is the surface area of
-the defect, where each cell of the defect is assigned this value.
-
-For input specifications, see :ref:`Defect Functions `.
-
-
-.. _Density:
-
----------
-Density()
----------
-
-**Density**
-
-``Density(any parts, pressure, temperature, gas constant)``
-
-
-Computes a scalar variable that is the density :math:`\rho`. This scalar variable
-is defined as:
-
-:math:`\rho =\frac{p}{RT}`
-
-where:
-
-.. list-table::
- :widths: 30 70
-
- * - :math:`p`
- - pressure
- * - :math:`T`
- - temperature
- * - :math:`R`
- - gas constant
-
-
-.. list-table:: **Function arguments**
- :widths: 30 70
-
- * - pressure
- - scalar variable
- * - temperature
- - scalar variable
- * - gas constant
- - scalar, constant, or constant per part variable, or constant number
-
-
-
-.. _DensityLogNorm:
-
-----------------
-DensityLogNorm()
-----------------
-
-
-**Log of Normalized Density**
-
-``DensityLogNorm(any parts, density, freestream density)``
-
-
-Computes a scalar variable that is the natural log of *normalized density*. This
-scalar variable is defined as::
-
-:math:`\mathrm{ln}{\rho }_{n}=\mathrm{ln}\left(\rho /{\rho }_{i}\right)`
-
-where:
-
-.. list-table::
- :widths: 30 70
-
- * - :math:`\rho`
- - density
- * - :math:`{\rho }_{i}`
- - freestream density
-
-
-.. list-table:: **Function arguments**
- :widths: 30 70
-
- * - density
- - scalar variable, constant variable, or constant number
- * - freestream density
- - constant or constant per part variable or constant number
-
-
-.. _DensityNorm:
-
--------------
-DensityNorm()
--------------
-
-**Normalized Density**
-
-``DensityNorm(any parts, density, freestream density)``
-
-
-Computes a scalar variable that is the *normalized density* :math:`{\rho }_{n}`.
-This scalar variable is defined as:
-
-:math:`{\rho }_{n}=\rho /{\rho }_{i}`
-
-where:
-
-.. list-table::
- :widths: 30 70
-
- * - :math:`\rho`
- - density
- * - :math:`{\rho }_{i}`
- - freestream density
-
-
-.. list-table:: **Function arguments**
- :widths: 30 70
-
- * - density
- - scalar variable, constant variable, or constant number
- * - freestream density
- - constant or constant per part variable or constant number
-
-
-
-.. _DensityNormStag:
-
------------------
-DensityNormStag()
------------------
-
-
-**Normalized Stagnation Density**
-
-``DensityNormStag(any parts, density, total energy,
-velocity, ratio of specific heats freestream density, freestream speed of sound,
-freestream velocity magnitude)``
-
-
-Computes a scalar variable that is the *normalized stagnation density*.
-This scalar variable is defined as:
-
-:math:`{\rho }_{on}={\rho }_{o}/{\rho }_{oi}`
-
-where:
-
-.. list-table::
- :widths: 30 70
-
- * - :math:`{\rho }_{o}`
- - stagnation density
- * - :math:`{\rho }_{oi}`
- - freestream stagnation density
-
-
-where:
-
-.. list-table:: **Function arguments**
- :widths: 30 70
-
- * - density
- - scalar, constant, or constant per part variable, or constant number
- * - total energy
- - scalar variable
- * - velocity
- - vector variable
- * - ratio of specific heats
- - scalar, constant or constant per part variable, or constant number
- * - freestream density
- - constant or constant per part variable or constant number
- * - freestream speed of sound
- - constant or constant per part variable or constant number
- * - freestream velocity magnitude
- - constant or constant per part variable or constant number
-
-
-
-.. _DensityStag:
-
--------------
-DensityStag()
--------------
-
-**Stagnation Density**
-
-``DensityStag(any parts, density, total energy, velocity, ratio of specific heats)``
-
-
-Computes a scalar variable that is the *stagnation
-density* :math:`{\rho }_{o}`. This scalar variable is defined as:
-
-:math:`{\rho }_{o}=\rho {\left(1+\left(\frac{\gamma -I}{2}\right){M}^{2}\right)}^{\left(I/\left(\gamma -1\right)\right)}`
-
-where:
-
-.. list-table::
- :widths: 30 70
-
- * - :math:`\rho`
- - density
- * - :math:`\gamma`
- - ratio of specific heats
- * - :math:`M`
- - mach number
-
-
-
-.. list-table:: **Function arguments**
- :widths: 30 70
-
- * - density
- - scalar, constant, or constant per part variable, or constant number
- * - total energy
- - scalar variable
- * - velocity
- - vector variable
- * - ratio of specific heats
- - scalar, constant, or constant per part variable, or constant number
-
-
-
-.. _Dist2Nodes:
-
-------------
-Dist2Nodes()
-------------
-
-
-**Distance Between Nodes**
-
-``Dist2Nodes(any parts, nodeID1, nodeID2)``
-
-
-Computes a constant, positive variable that is the distance between any two nodes.
-This function searches down the part list until it finds *nodeID1* and
-then searches until it finds *nodeID2*. It returns ``Undefined`` if *nodeID1* or *nodeID2*
-cannot be found. Nodes are designated by their node IDs, so the part must have node IDs.
-
-.. note::
- Most created parts do not have node IDs.
-
- The geometry type is important for using this function. There are three geometry types:
- static, changing coordinate, and changing connectivity. You can find out your geometry
- type by selecting **Query→Dataset** and looking in the **General Geometric section** of the
- popup window.
-
- If you have a static geometry with visual displacement turned on, the ``Dis2Nodes``
- function does not use the displacement in its calculations. You must enable server-side
- (computational) displacement. If you have changing coordinate geometry, the ``Dist2Nnodes``
- function works without adjustment. If you have changing connectivity, the ``Dist2nNodes``
- function should not be used as it can give nonsensical results because connectivity is
- reevaluated each timestep and node IDs may be reassigned.
-
- For transient results, to find the distance between two nodes on different parts, or
- between two nodes if one or both nodes don't have IDs or the IDs are not unique for the model
- (namely, more than one part has the same node ID), use the line tool.
-
-
-.. list-table:: **Function arguments**
- :widths: 30 70
-
- * - nodeID1
- - constant number
- * - nodeID2
- - constant number
-
-
-.. _Dist2Part:
-
------------
-Dist2Part()
------------
-
-
-**Distance to Parts: Node to Nodes**
-
-``Dist2Part(origin part + field parts, origin part, origin part normal)``
-
-
-Computes a scalar variable on the origin part and field
-parts that is the minimum distance at each node of the origin and field parts to
-any node in the origin part. This distance is unsigned by default. The origin
-part is the origin of a Euclidean distance field. So, by definition, the scalar
-variable is always zero at the origin part because the distance to the
-origin part is always zero.
-
-The origin part normal vector must be a per node
-variable. If the origin part normal is calculated using the ``Normal`` calculator
-function, it is a per element variable and must be moved to the nodes using
-the calculator.
-
-.. note::
- The origin part must be included in the field part list (although, as
- mentioned earlier, the scalar variable is zero for all nodes on the
- origin part). This algorithm has an execution time on the order of the
- number of nodes in the field parts times the number of nodes in the origin
- part. While the implementation is both SOS-aware and threaded, the run time
- is dominated by the number of nodes in the computation.
-
-
-This function is computed between the nodes of the
-origin and field parts. As a result, the accuracy of its approximation to the
-distance field is limited to the density of nodes (effectively the size of the
-elements) in the origin part. If a more accurate approximation is required, use
-the :ref:`Dist2PartElem() ` function. While this function is slower,
-it is less dependent on the nodal distribution in the origin part because it uses the
-nodes plus the element faces to calculate the minimum distance.
-
-**Usage**
-
-You typically use an arbitrary 2D part to create a clip in a 3D field. You
-then use the 2D part as your origin part and select the origin part as well
-as your 3D field parts. There is no need to have normal vectors. After creating your
-scalar variable, which you might call ``distTo2Dpart``, you create
-an ``isosurface=0`` in your field using ``distTo2Dpart`` as your variable.
-
-.. list-table:: **Function arguments**
- :widths: 30 70
-
- * - origin part
- - part number to compute the distance to
- * - origin part normal
- - constant for unsigned computation or a
- nodal vector variable defined on the origin part for a signed computation
-
-
-
-.. _Dist2PartElem:
-
----------------
-Dist2PartElem()
----------------
-
-
-**Distance to Parts: Node to Elements**
-
-``Dist2PartElem(origin part + field parts, origin part, origin part normal)``
-
-
-Computes a scalar variable that is the minimum distance
-at each node of the origin part and field parts and the closest point on any
-element in the origin part.
-
-- If the origin part normal vector is not supplied, this distance is unsigned.
-- If the origin part normal vector is supplied, the distance is signed.
-
-.. note::
- The origin part normal vector must be a per node variable. If the origin part
- normal is calculated using the :ref:`Normal() ` function,
- it is a per element variable and must be moved to the nodes using the
- :ref:`ElemToNode() ` function. If it is per node and
- the origin part normal vector variable defined at the origin part is supplied,
- the direction of the normal is used to return a signed distance function
- with distances in the direction of the normal being positive.
-
-
-Once the closest point in the origin part has been found
-for a node in an field part, the dot product of the origin node normal and a
-vector between the two nodes is used to select the sign of the result.
-
-.. note::
- The origin part must be included in the field part list (although the
- output is zero for all nodes of the origin part because it is the
- origin of the Euclidean distance). This algorithm has an execution time on
- the order of the number of nodes in the field parts multiplied by the number of
- elements in the origin part. While the implementation is both SOS-aware and
- threaded, the run time is dominated by the number of nodes in the
- computation.
-
-
-This function is a more accurate estimation of the distance field than the :ref:`Dist2Part() `
-function because it allows for distances between nodes and element surfaces on the origin part. This
-improved accuracy results in increased computational complexity. As a result, the ``Dist2PartElem`` function
-can be several times slower than the :ref:`Dist2Part() ` function.
-
-.. list-table:: **Function arguments**
- :widths: 30 70
-
- * - origin part
- - part number to compute the distance to
- * - origin part normal
- - constant for unsigned computation or a
- nodal vector variable defined on the origin part for a signed computation
-
-
-.. _Div:
-
------
-Div()
------
-
-
-**Divergence**
-
-``Div(2D or 3D parts, vector)``
-
-
-Computes a scalar variable whose value is the divergence. This scalar variable is
-defined as:
-
-:math:`Div=\frac{\partial u}{\partial x}+\frac{\partial v}{\partial y}+\frac{\partial w}{\partial z}`
-
-where:
-
-.. list-table::
- :widths: 30 70
-
- * - u,v,w
- - velocity components in the X, Y, Z
- directions
-
-
-.. _EleMetric:
-
------------
-EleMetric()
------------
-
-
-**Element Metric**
-
-``EleMetric(any parts, metric_function).``
-
-
-Calculates an element mesh metric at each element,
-creating a scalar, element-based variable depending upon the selected metric
-function. The various metrics are valid for specific element types. If the
-element is not of the type supported by the metric function, the value at the
-element is the EnSight undefined value. Metrics exist for the following
-element types: ``tri``, ``quad``, ``tet``, and ``hex``. A metric can be any
-one of the following:
-
-.. list-table::
- :widths: 10 25 25 40
- :header-rows: 1
-
- * - #
- - Name
- - Elem types
- - Description
- * - 0
- - Element type
- - All
- - EnSight element type number. See the table that follows this one.
- * - 1
- - Condition
- - hexa8, tetra4, quad4, tria3
- - Condition number of the weighted Jacobian matrix.
- * - 2
- - Scaled Jacobian
- - hexa8, tetra4, quad4, tria3
- - Jacobian scaled by the edge length
- products.
-
- * - 3
- - Shape
- - hexa8, tetra4, quad4, tria3
- - Varies by element type.
- * - 4
- - Distortion
- - hexa8, tetra4, quad4, tria3
- - Distortion is a measure of how well behaved the
- mapping from parameter space to world coordinates is.
-
- * - 5
- - Edge ratio
- - hexa8, tetra4, quad4, tria3
- - Ratio of longest edge length over shortest
- edge length.
-
- * - 6
- - Jacobian
- - hexa8, tetra4, quad4
- - Minimum determinate of the Jacobian
- computed at each vertex.
-
- * - 7
- - Radius ratio
- - tetra4, quad4, tria3
- - Normalized ratio of the radius of the inscribed
- sphere to the radius of the circumsphere.
-
- * - 8
- - Minimum angle
- - tetra4, quad4, tria3
- - Minimum included angle in degrees.
- * - 9
- - Maximum edge ratio
- - hexa8, quad4
- - Largest ratio of principle axis
- lengths.
-
- * - 10
- - Skew
- - hexa8, quad4
- - Degree to which a pair of vectors are parallel
- using the dot product, maximum.
-
- * - 11
- - Taper
- - hexa8, quad4
- - Maximum ratio of a cross-derivative to its
- shortest associated principal axis.
-
- * - 12
- - Stretch
- - hexa8, quad4
- - Ratio of minimum edge length to maximum
- diagonal.
-
- * - 13
- - Oddy
- - hexa8, quad4
- - Maximum deviation of the metric tensor from the
- identity matrix, evaluated at the corners and element center.
-
- * - 14
- - Max aspect Frobenius
- - hexa8, quad4
- - Maximum of aspect Frobenius computed for the
- element decomposed into triangles.
-
- * - 15
- - Min aspect
- Frobenius
-
- - hexa8, quad4
- - Minimum of aspect Frobenius computed for the
- element decomposed into triangles.
-
- * - 16
- - Shear
- - hexa8, quad4
- - Scaled Jacobian with a truncated
- range.
-
- * - 17
- - Signed volume
- - hexa8, tetra4
- - Volume computed, preserving the sign.
- * - 18
- - Signed area
- - tria3, quad4
- - Area preserving the sign.
- * - 19
- - Maximum angle
- - tria3, quad4
- - Maximum
- included angle in degrees.
-
- * - 20
- - Aspect ratio
- - tetra4, quad4
- - Maximum edge length over area.
- * - 21
- - Aspect Frobenius
- - tetra4, tria3
- - Sum of the edge lengths squared divided by the
- area and normalized.
-
- * - 22
- - Diagonal
- - hexa8
- - Ratio of the minimum diagonal length to the
- maximum diagonal length.
-
- * - 23
- - Dimension
- - hexa8
- - :math:`\frac{V}{2\nabla V}`
- * - 24
- - Aspect beta
- - tetra4
- - Radius ratio of a positively oriented tetrahedron.
-
- * - 25
- - Aspect gamma
- - tetra4
- - Root-mean-square edge length to volume.
-
- * - 26
- - Collapse ratio
- - tetra4
- - Smallest ratio of the height of a vertex above
- its opposing triangle to the longest edge of that opposing triangle
- across all vertices of the tetrahedron.
-
- * - 27
- - Warpage
- - quad4
- - Cosine of the minimum dihedral angle formed by
- planes intersecting in diagonals.
-
- * - 28
- - Centroid
- - All
- - Returns each element centroid as a vector value
- at that element.
-
- * - 29
- - Volume Test
- - 3D elements
- - Returns 0.0 for non-3D elements.
- Each 3D element is decomposed into Tet04 elements. This option
- returns a scalar equal to 0.0, 1.0, or 2.0. It
- returns 0.0 if none of the Tet04 element volumes is negative, 1.0 if
- all of the Tet04 element volumes are negative, and 2.0 if some of
- the Tet04 element volumes are negative.
-
- * - 30
- - Signed Volume
- - 3D elements
- - Returns 0.0 for non-3D elements. Returns a scalar
- that is the sum of the signed volumes of the Tet4 decomposition for
- 3D elements.
-
- * - 31
- - Part Number
- - All
- - Returns a scalar at each element that is the
- EnSight part ID number of that element.
-
- * - 32
- - Face Count
- - All
- - Returns a scalar that is the number of faces in
- that element.
-
-
-**EnSight element types**
-
-.. list-table::
- :widths: 10 90
-
- * - 0
- - Point
- * - 1
- - Point ghost
- * - 2
- - 2 node bar
- * - 3
- - 2 node bar ghost
- * - 4
- - 3 node bar
- * - 5
- - 3 node bar ghost
- * - 6
- - 3 node triangle (tria3)
- * - 7
- - 3 node triangle ghost
- * - 10
- - 6 node triangle
- * - 11
- - 6 node triangle ghost
- * - 12
- - 4 node quadrilateral (quad4)
- * - 13
- - 4 node quadrilateral ghost
- * - 14
- - 8 node quadrilateral
- * - 15
- - 8 node quadrilateral ghost
- * - 16
- - 4 node tetrahedron (tetra4)
- * - 17
- - 4 node tetrahedron ghost
- * - 20
- - 10 node tetrahedron
- * - 21
- - 10 node tetrahedron ghost
- * - 22
- - 5 node pyramid
- * - 23
- - 5 node pyramid ghost
- * - 24
- - 13 node pyramid
- * - 25
- - 13 node pyramid ghost
- * - 26
- - 6 node pentahedron
- * - 27
- - 6 node pentahedron ghost
- * - 28
- - 15 node pentahedron
- * - 29
- - 15 node pentahedron ghost
- * - 30
- - 8 node hexahedron (hexa8)
- * - 31
- - 8 node hexahedron ghost
- * - 32
- - 20 node hexahedron
- * - 33
- - 20 node hexahedron ghost
- * - 34
- - N-sided polygon
- * - 35
- - N-sided polygon ghost
- * - 38
- - N-faced polyhedron
- * - 39
- - N-faced polyhedron ghost
-
-
-The implementation is based on the BSD implementation of
-the *Sandia Verdict Library*.
-
-**References**
-
-For more information on individual metrics, see these references:
-
-1. C. J. Stimpson, C. D. Ernst, P. Knupp, P. P. Pebay, & D.
- Thompson, The Verdict Library Reference Manual, May 8, 2007.
-2. The Verdict Library Reference Manual (http://www.vtk.org/Wiki/images/6/6b/VerdictManual-revA.pdf)
-
-
-
-.. _EleSize:
-
----------
-EleSize()
----------
-
-
-**Element Size**
-
-``EleSize(any parts).``
-
-Calculates the Volume/Area/Length for 3D/2D/1D elements
-respectively, at each element creating a scalar, element-based variable.
-
-.. note::
- This function uses the coordinates of the element to calculate the volume of each
- element. If you want to use displacement in the calculation of the volume, you must
- turn on computational (server-side) displacement, rather than visual only
- (client side) displacement. When computation displacement is turned on, displacement
- values are applied to the coordinates on the server prior to calculating the element
- size.
-
- If you calculate the element size of a part and then use that part to create a
- child part, the child part inherits the values of the ``EleSize`` calculation, which
- are the size of the parent elements and not the size of the child elements. If you want the
- ``EleSize`` of the child part, then you must select the child part and recalculate a new
- variable.
-
-
-
-.. _ElemToNode:
-
-------------
-ElemToNode()
-------------
-
-
-**Element to Node**
-
-``ElemToNode(any parts, element-based scalar or vector).``
-
-
-Averages an element-based variable to produce a
-node-based variable.
-
-For each **node[i]** → :math:`\text{val += (elem[j]->val * elem[j]->wt) | node[i]}`
-
-For each **node[i]** → :math:`\text{wt += elem[j]->wt | node[i]}`
-
-Results: **node[i]** → :math:`\text{val /= node[i]->wt}`
-
-where:
-
-.. list-table::
- :widths: 30 70
-
- * - wt
- - 1 for this algorithm and the weighting scalar in the :ref:`ElemToNodeWeighted() ` method
- * - j
- - iterator on all part elements
- * - i
- - iterator on all part nodes (nodes[i] must be on elem[j] to contribute)
- * - | node[i]
- - indicates node that is associated with elem[j]
-
-
-By default, this function uses all
-parts that share each node of the one or more selected parts. Parts that are not
-selected, whose elements are shared by nodes of the selected parts, have
-their element values averaged in with those of the selected parts.
-
-
-
-.. _ElemToNodeWeighted:
-
---------------------
-ElemToNodeWeighted()
---------------------
-
-
-**Element to Node Weighted**
-
-``ElemToNodeWeighted(any parts, element-based scalar or vector, element-based weighting scalar).``
-
-
-This function is the same as the :ref:`ElemToNode() ` function, except
-that the value of the variable at the element is weighted by an element scalar.
-That is, elem[j] → wt is the value of the weighting scalar in the :ref:`ElemToNode() `
-function previously described.
-
-One use of this function might be to use the element
-size as a weighting factor so that larger elements contribute more to the nodal
-value than smaller oelements.
-
-
-.. _EnergyT:
-
----------
-EnergyT()
----------
-
-
-**Energy: Total Energy**
-
-``EnergyT(any parts, density, pressure, velocity, ratio of specific heats).``
-
-
-Computes a scalar variable of total energy per unit volume.
-
-.. list-table::
- :widths: 30 70
-
- * - :math:`e=\rho \left({e}_{i}+\frac{{V}^{2}}{2}\right)`
- - total Energy
- * - :math:`{e}_{i}={e}_{0}-\frac{{V}^{2}}{2}`
- - internal Energy
- * - :math:`{e}_{0}=\frac{e}{\rho }`
- - stagnation Energy
-
-
-.. list-table::
- :widths: 30 70
-
- * - :math:`\text{ρ}`
- - density
- * - :math:`V`
- - velocity
-
-
-Or based on gamma, pressure, and velocity:
-
-
-:math:`e=\frac{p}{\left(\gamma -1\right)}+\rho \frac{{V}^{2}}{2}`
-
-
-.. list-table:: **Function arguments**
- :widths: 30 70
-
- * - density
- - scalar, constant, or constant per part variable, or constant number
- * - pressure
- - scalar variable
- * - velocity
- - vector variable
- * - ratio of specific heats
- - scalar, constant, or constant per part variable, or constant number
-
-
-.. _KinEn:
-
--------
-KinEn()
--------
-
-
-**Kinetic Energy**
-
-``KinEn(any parts, velocity, density)``
-
-
-Computes a scalar variable whose value is the kinetic
-energy :math:`{E}_{k}`. This scalar variable is defined as:
-
-:math:`{E}_{k}=\frac{1}{2}\rho {V}^{2}`
-
-where:
-
-.. list-table::
- :widths: 30 70
-
- * - :math:`\rho`
- - density
- * - :math:`V`
- - velocity variable
-
-
-.. list-table:: **Function arguments**
- :widths: 30 70
-
- * - velocity
- - vector variable
- * - density
- - scalar, constant, or constant per part variable, or constant number
-
-
-.. _Enthalpy:
-
-----------
-Enthalpy()
-----------
-
-
-**Enthalpy**
-
-``Enthalpy(any parts, density, total energy, velocity, ratio of specific heats)``
-
-
-Computes a scalar variable that is enthalpy,
-:math:`h`. This scalar variable is defined as:
-
-:math:`h=\gamma \left(\frac{E}{\rho }-\frac{{V}^{2}}{2}\right)`
-
-
-.. list-table::
- :widths: 30 70
-
- * - :math:`E`
- - total energy per unit volume
- * - :math:`\rho`
- - density
- * - :math:`V`
- - velocity magnitude
- * - :math:`\gamma`
- - ratio of specific heats
-
-
-
-.. list-table:: **Function arguments**
- :widths: 30 70
-
- * - density
- - scalar, constant, or constant per part variable, or constant number
- * - total energy
- - scalar variable
- * - velocity
- - vector variable
- * - ratio of specific heats
- - scalar, constant, or constant per part variable, or constant number
-
-
-
-.. _EnthalpyNorm:
-
---------------
-EnthalpyNorm()
---------------
-
-
-**Normalized Enthalpy**
-
-``EnthalpyNorm(any parts, density, total energy, velocity, ratio of specific heats, freestream density, freestream speed of sound)``
-
-
-Computes a scalar variable that is the normalized enthalpy
-:math:`{h}_{n}`. This scalar variable is defined as:
-
-:math:`{h}_{n}=h/{h}_{i}`
-
-
-.. list-table::
- :widths: 30 70
-
- * - :math:`h`
- - enthalpy
- * - :math:`{h}_{i}`
- - freestream enthalpy
-
-
-.. list-table:: **Function arguments**
- :widths: 30 70
-
- * - density
- - scalar, constant, or constant per part variable, or constant number
- * - total energy
- - scalar variable
- * - velocity
- - vector variable
- * - ratio of specific heats
- - scalar, constant, or constant per part variable, or constant number
- * - freestream density
- - constant or constant per part variable or constant number
- * - freestream speed of sound
- - constant or constant per part variable or constant number
-
-
-
-
-.. _EnthalpyStag:
-
---------------
-EnthalpyStag()
---------------
-
-
-**Stagnation Enthalpy**
-
-``EnthalpyStag(any parts, density, total energy, velocity, ratio of specific heats)``
-
-
-Computes a scalar variable that is the stagnation tnthalpy :math:`{h}_{o}`. This
-scalar variable is defined as::
-
-:math:`{h}_{o}=h+\frac{{V}^{2}}{2}`
-
-
-.. list-table::
- :widths: 30 70
-
- * - :math:`h`
- - enthalpy
- * - :math:`V`
- - velocity magnitude
-
-
-.. list-table:: **Function arguments**
- :widths: 30 70
-
- * - density
- - scalar, constant, or constant per part variable, or constant number
- * - total energy
- - scalar variable
- * - velocity
- - vector variable
- * - ratio of specific heats
- - scalar, constant, or constant per part variable, or constant number
-
-
-
-.. _EnthalpyNormStag:
-
-------------------
-EnthalpyNormStag()
-------------------
-
-
-**Normalized Stagnation Enthalpy**
-
-``EnthalpyNormStag(any parts, density, total energy,
-velocity, ratio of specific heats, freestream density, freestream speed of
-sound, freestream velocity magnitude)``
-
-
-Computes a scalar variable that is the normalized stagnation enthalpy :math:`{h}_{on}`.
-This scalar variable is defined as:
-
-:math:`{h}_{on}={h}_{o}/{h}_{oi}`
-
-
-.. list-table::
- :widths: 30 70
-
- * - :math:`{h}_{o}`
- - stagnation enthalpy
- * - :math:`{h}_{oi}`
- - freestream stagnation enthalpy
-
-
-.. list-table:: **Function arguments**
- :widths: 30 70
-
- * - density
- - scalar, constant, or constant per part variable, or constant number
- * - total energy
- - scalar variable
- * - velocity
- - vector variable
- * - ratio of specific heats
- - scalar, constant, or constant per part variable, or constant number
- * - freestream density
- - constant or constant per part variable or constant number
- * - freestream speed of sound
- - constant or constant per parts variable or constant number
- * - freestream velocity magnitude
- - constant or constant per part variable or constant number
-
-
-
-.. _Entropy:
-
----------
-Entropy()
----------
-
-
-**Entropy**
-
-``Entropy(any parts, density, total energy, velocity,
-ratio of specific heats, gas constant, freestream density, freestream speed of sound)``
-
-Computes a scalar variable that is the entropy,:math:`s`. This scalar variable is defined as:
-
-:math:`s=\mathrm{ln}\left(\frac{\frac{p}{{p}_{\infty }}}{{\left(\frac{\rho }{{\rho }_{\infty }}\right)}^{\gamma }}\right)\left(\frac{R}{\gamma -1}\right)`
-
-where:
-
-.. list-table::
- :widths: 30 70
-
- * - :math:`\rho`
- - density
- * - :math:`R`
- - gas constant
- * - :math:`\gamma`
- - ratio of specific heats
- * - :math:`{a}_{\infty }`
- - freestream speed of sound
- * - :math:`{\rho }_{\infty }`
- - freestream density
-
-
-Pressure, :math:`p`, is calculated from the total energy, :math:`e` , and velocity, :math:`V`:
-
-
-:math:`p=\left(\gamma -1\right)\left[e-\rho \frac{{V}^{2}}{2}\right]`
-
-
-with freestream pressure:
-
-
-:math:`{p}_{\infty }=\frac{{\rho }_{\infty }{a}_{\infty }^{2}}{\gamma }`
-
-
-.. list-table:: **Function arguments**
- :widths: 30 70
-
- * - density
- - scalar, constant, or constant per part variable, or constant number
- * - total energy
- - scalar variable
- * - velocity
- - vector variable
- * - ratio of specific heats
- - scalar, constant, or constant per part variable, or constant number
- * - gas constant
- - scalar, constant, or constant per part variable or constant number
- * - freestream density
- - constant or constant per part variable or constant number
- * - freestream speed of sound
- - constant or constant per part variable or constant number
-
-
-
-.. _Flow:
-
-------
-Flow()
-------
-
-
-**Flow**
-
-``Flow(any 1D or 2D parts, velocity [,Compute_Per_part]).``
-
-
-Computes a constant or constant per part variable whose
-value is the volume flow rate :math:`{Q}_{c}`. This scalar variable is defined as:
-
-:math:`{Q}_{c}={\displaystyle \underset{S}{\int }\left(V·\widehat{n}\right)}dS`
-
-where:
-
-.. list-table::
- :widths: 20 80
-
- * - :math:`V`
- - velocity vector
- * - :math:`\widehat{n}`
- - unit vector normal to surface
- * - :math:`S`
- - 1D or 2D domain
-
-
-.. list-table:: **Function arguments**
- :widths: 20 80
-
- * - velocity
- - vector variable
-
-
-.. note::
- The normal for each 2D element is calculated using the right-hand rule of
- the 2D element connectivity and must be consistent over the part. Otherwise,
- your results are incorrect. To calculate the mass flow rate, multiply the
- velocity vector by the density scalar and then substitute this vector value
- for the velocity vector in the previous equation.
-
-
-.. _FlowRate:
-
-----------
-FlowRate()
-----------
-
-
-**Flow Rate**
-
-``FlowRate(any 1D or 2D parts, velocity).``
-
-
-Computes a scalar :math:`{V}_{n}`, which is the component of velocity normal to the surface.
-This scalar variable is defined as:
-
-:math:`{V}_{n}=V·\widehat{n}`
-
-where:
-
-.. list-table::
- :widths: 20 80
-
- * - :math:`V`
- - velocity
- * - :math:`\widehat{n}`
- - unit vector normal to surface
- * - :math:`S`
- - 1D or 2D
-
-
-.. list-table:: **Function arguments**
- :widths: 20 80
-
- * - velocity
- - vector variable
-
-
-.. note::
- This function is equivalent to calculating the dot product of the velocity
- vector and the surface normal using the :ref:`Normal() ` function.
-
-
-
-.. _FluidShear:
-
-------------
-FluidShear()
-------------
-
-
-**Fluid Shear**
-
-``FluidShear(2D parts, velocity magnitude gradient, viscosity)``
-
-
-Computes a scalar variable :math:`\tau` whose value is defined as:
-
-:math:`\tau =\mu \frac{\partial V}{\partial n}`
-
-where:
-
-.. list-table::
- :widths: 20 80
-
- * - :math:`\tau`
- - shear stress
- * - :math:`\mu`
- - dynamic viscosity
- * - :math:`\frac{\partial V}{\partial n}`
- - velocity gradient in direction of surface normal
-
-
-.. note::
- To compute fluid shear stress:
-
- #. Use the :func:`Grad()` function on the velocity to obtain the ``Velocity Grad``
- variable in the 3D parts of interest.
-
- #. Create a part clip or extract the outer surface of the part using part extract,
- creating a 2D part from the 3D parts used in the previous step on the surface
- where you want to see the fluid shear stress.
-
- #. Compute the ``Fluid Shear`` variable on the 2D surface.
-
-
-.. list-table:: **Function arguments**
- :widths: 20 80
-
- * - velocity gradient
- - vector variable
- * - viscosity
- - scalar, constant, or constant per part variable, or constant number
-
-
-
-.. _FluidShearMax:
-
----------------
-FluidShearMax()
----------------
-
-
-**Fluid Shear Stress Max**
-
-``FluidShearMax(2D or 3D parts, velocity, density, turbulent kinetic energy, turbulent dissipation, laminar viscosity)``
-
-
-Computes a scalar variable :math:`\sum`. This scalar variable is defined as:
-
-:math:`\sum =F/A=\left({\mu }_{t}+{\mu }_{l}\right)E`
-
-where:
-
-
-.. list-table::
- :widths: 20 80
-
- * - :math:`F`
- - force
- * - :math:`A`
- - unit area
- * - :math:`{\mu }_{t}`
- - turbulent (eddy) viscosity
- * - :math:`{\mu }_{l}`
- - laminar viscosity (treated as a constant)
- * - :math:`E`
- - local strain
-
-
-The turbulent viscosity :math:`{\mu }_{t}` is defined as:
-
-:math:`{\mu }_{t}=\frac{\rho 0.09{k}^{2}}{\epsilon }`
-
-where:
-
-.. list-table::
- :widths: 20 80
-
- * - :math:`\rho`
- - density
- * - :math:`k`
- - turbulent kinetic energy
- * - :math:`\epsilon`
- - turbulent dissipation
-
-
-A measure of local strain :math:`E` (that is the local elongation in three directions)
-is given by:
-
-:math:`E=\sqrt{\left(2tr\left(D·D\right)\right)}`
-
-where:
-
-:math:`\left(2tr\left(D·D\right)\right)=2\left({d}_{11}^{2}+{d}_{22}^{2}+{d}_{33}^{2}\right)+\left({d}_{12}^{2}+{d}_{13}^{2}+{d}_{23}^{2}\right)`
-
-The *Euclidean norm* is defined by:
-
-:math:`tr\left(D·D\right)={d}_{11}^{2}+{d}_{22}^{2}+{d}_{33}^{2}+\frac{1}{2}\left({d}_{12}^{2}+{d}_{13}^{2}+{d}_{23}^{2}\right)`
-
-The rate of deformation tensor :math:`{d}_{ij}` is defined by:
-
-:math:`D=\left[{d}_{ij}\right]=\frac{1}{2}\left[\begin{array}{ccc}2{d}_{11}& {d}_{12}& {d}_{13}\\ {d}_{21}& 2{d}_{22}& {d}_{23}\\ {d}_{13}& {d}_{23}& 2{d}_{33}\end{array}\right]`
-
-where:
-
-.. list-table::
- :widths: 20 80
-
- * - :math:`{d}_{11}`
- - :math:`={}_{}{}^{1}u/{}_{}{}^{1}x`
- * - :math:`{d}_{22}`
- - :math:`={}_{}{}^{1}v/{}_{}{}^{1}y`
- * - :math:`{d}_{33}`
- - :math:`={}_{}{}^{1}w/{}_{}{}^{1}z`
- * - :math:`{d}_{12}`
- - :math:`={}_{}{}^{1}u/{}_{}{}^{1}y+{}_{}{}^{1}v/x={d}_{21}`
- * - :math:`{d}_{13}`
- - :math:`={}_{}{}^{1}u/{}_{}{}^{1}z+{}_{}{}^{1}w/x={d}_{31}`
- * - :math:`{d}_{23}`
- - :math:`={}_{}{}^{1}v/{}_{}{}^{1}z+{}_{}{}^{1}w/y={d}_{32}`
-
-
-The strain tensor :math:`{e}_{ij}` is defined by :math:`{e}_{ij}=\frac{1}{2}{d}_{ij}`.
-
-
-.. list-table:: **Function arguments**
- :widths: 30 70
-
- * - velocity
- - vector variable
- * - density
- - scalar, constant, or constant per part variable, or constant number
- * - turbulent kinetic energy
- - scalar variable
- * - turbulent dissipation
- - scalar variable
- * - laminar viscosity
- - constant or constant per part variable or constant number
-
-
-.. _Force:
-
--------
-Force()
--------
-
-
-**Force**
-
-``Force(2D parts, pressure)``
-
-
-Computes a vector variable whose value is the force
-:math:`F`. This scalar variable is defined as:
-
-:math:`F=pA`
-
-where:
-
-.. list-table::
- :widths: 20 80
-
- * - :math:`p`
- - pressure
- * - :math:`A`
- - unit area
-
-
-.. note::
- The force acts in the surface normal direction.
-
-
-.. list-table:: **Function arguments**
- :widths: 20 80
-
- * - pressure
- - scalar variable
-
-
-
-.. _Force1D:
-
----------
-Force1D()
----------
-
-
-**Force 1D**
-
-``Force1D(1D planar parts, pressure, surface normal)``
-
-
-Computes a vector variable whose value is the force :math:`F`. This function
-is defined as:
-
-:math:`F=pL`
-
-where:
-
-.. list-table::
- :widths: 20 80
-
- * - :math:`p`
- - pressure
- * - :math:`L`
- - unit length normal vector
-
-
-.. list-table:: **Function arguments**
- :widths: 20 80
-
- * - pressure
- - scalar variable
- * - surface normal
- - vector variable
-
-
-.. _Grad:
-
-------
-Grad()
-------
-
-
-**Gradient**
-
-``Grad(2D or 3D parts, scalar or vector(Magnitude is used))``
-
-
-Computes a vector variable whose value is the gradient :math:`GRA{D}_{f}`.
-This scalar variable is defined as:
-
-:math:`GRA{D}_{f}=\frac{\partial f}{\partial x}\widehat{i}+\frac{\partial f}{\partial y}\widehat{j}+\frac{\partial f}{\partial z}\widehat{k}`
-
-where:
-
-.. list-table::
- :widths: 20 80
-
- * - :math:`f`
- - any scalar variable (or the magnitude of the specified vector)
- * - :math:`x,y,z`
- - coordinate directions
- * - :math:`i,j,k`
- - unit vectors in coordinate directions
-
-
-.. admonition:: Algorithm: Gradient
-
- If the variable is at the element, it is moved to the nodes. Each element
- is then mapped to a normalized element and the Jacobian is calculated
- for the transformation from the element to the normalized element.
- Next, the inverse Jacobian is calculated for this transformation and used to
- compute the Jacobian for the scalar variable. Therefore, the chain rule is used
- with the inverse Jacobian of the transformation and the Jacobian of the scalar
- variable to calculate the gradient for each node of each element. The
- contributions of the gradient from all the elements are moved to all the nodes
- using an unweighted average. Finally, if the original variable is per element,
- the gradient is moved from the nodes to the elements using an unweighted
- average.
-
-
-.. _GradTensor:
-
-------------
-GradTensor()
-------------
-
-
-**Gradient Tensor**
-
-``GradTensor(2D or 3D parts, vector)``
-
-
-Computes a tensor variable whose value is the gradient
-:math:`GRA{D}_{F}`. This scalar variable is defined as:
-
-:math:`GRA{D}_{F}=\frac{\partial F}{\partial x}\widehat{i}+\frac{\partial F}{\partial y}\widehat{j}+\frac{\partial F}{\partial z}\widehat{k}`
-
-where:
-
-.. list-table::
- :widths: 20 80
-
- * - :math:`F`
- - any vector variable
- * - :math:`x,y,z`
- - coordinate directions
- * - :math:`i,j,k`
- - unit vectors in coordinate directions
-
-
-
-
-.. _HelicityDensity:
-
------------------
-HelicityDensity()
------------------
-
-
-**Helicity Density**
-
-``HelicityDensity(any parts, velocity)``
-
-
-Computes a scalar variable :math:`{H}_{d}` whose value is:
-
-:math:`{H}_{d}=V·\Omega`
-
-where:
-
-.. list-table::
- :widths: 20 80
-
- * - :math:`V`
- - velocity
- * - :math:`\Omega`
- - vorticity
-
-
-
-.. list-table:: **Function arguments**
- :widths: 20 80
-
- * - velocity
- - vector variable
-
-
-
-.. _HelicityRelative:
-
-------------------
-HelicityRelative()
-------------------
-
-
-**Relative Helicity**
-
-``HelicityRelative(any parts, velocity)``
-
-
-Computes a scalar variable :math:`{H}_{r}` whose value is:
-
-:math:`{H}_{r}=\mathrm{cos}\varphi =\frac{V·\Omega }{\left|V\right|\left|\Omega \right|}`
-
-where:
-
-.. list-table::
- :widths: 20 80
-
- * - :math:`\varphi`
- - angle between the velocity vector and the vorticity
- vector
-
- * - V
- - velocity
- * - Ω
- - vorticity
-
-
-
-.. list-table:: **Function arguments**
- :widths: 20 80
-
- * - velocity
- - vector variable
-
-
-
-.. _HelicityRelFilter:
-
--------------------
-HelicityRelFilter()
--------------------
-
-
-**Filtered Relative Helicity**
-
-``HelicityRelFilter(any parts, velocity, freestream velocity magnitude).``
-
-
-Computes a scalar variable :math:`{H}_{rf}` whose value is:
-
-:math:`{H}_{rf}={H}_{r}` , if :math:`\left|{H}_{d}\right|\ge filter` or :math:`{H}_{rf}=0` , if :math:`\left|{H}_{d}\right|` function.
-
-**References**
-
-For more information, see these references:
-
-1. Haller, G., "An objective definition of a vortex," Journal of
- Fluid Mechanics, 2005, vol. 525, pp. 1-26.
-2. Jeong, J. and Hussain, F., "On the identification of a
- vortex," Journal of Fluid Mechanics, 1995, vol. 285, pp. 69-94.
-
-
-.. _Mach:
-
-------
-Mach()
-------
-
-
-**Mach Number**
-
-``Mach(any parts, density, total energy, velocity, ratio of specific heats)``
-
-
-Computes a scalar variable whose value is the Mach
-number :math:`M`. This scalar variable is defined as:
-
-:math:`M=\frac{u}{\sqrt{\frac{\gamma p}{\rho }}}=\frac{u}{c}`
-
-where:
-
-.. list-table::
- :widths: 50 50
-
- * - :math:`m`
- - momentum
- * - :math:`\rho`
- - density
- * - :math:`u`
- - speed, computed from velocity input.
- * - :math:`\gamma`
- - ratio of specific heats (1.4 for air)
- * - :math:`p`
- - pressure (see :ref:`Pres() ` below)
- * - :math:`c`
- - speed of sound
-
-
-For a description, see :ref:`Energy: Total Energy `.
-
-.. list-table:: **Function arguments**
- :widths: 50 50
-
- * - density
- - scalar, constant, or constant per part variable, or constant number
- * - total energy
- - scalar variable
- * - velocity
- - vector variable
- * - ratio of specific heats
- - scalar, constant, or constant per part variable, or constant number
-
-
-
-.. _MakeScalElem:
-
---------------
-MakeScalElem()
---------------
-
-
-**Make Scalar at Elements**
-
-``MakeScalElem(any parts, constant number or constant or constant per part variable)``
-
-
-Assigns the specified constant value to each element,
-making a scalar variable.
-
-
-.. _MakeScalElemId:
-
-----------------
-MakeScalElemId()
-----------------
-
-
-**Make Scalar from Element ID**
-
-``MakeScalElemId(any parts)``
-
-
-Creates a scalar variable set to the element ID of the part. If the
-element ID does not exist or is undefined, the scalar value is set to ``Undefined``.
-
-
-.. _MakeScalNode:
-
---------------
-MakeScalNode()
---------------
-
-
-**Make Scalar at Nodes**
-
-``MakeScalNode(any parts, constant number or constant or constant per part variable)``
-
-
-Assigns the specified constant value to each node,
-making a scalar variable.
-
-
-.. _MakeScalNodeId:
-
-----------------
-MakeScalNodeId()
-----------------
-
-
-**Make Scalar from Node ID**
-
-``MakeScalNodeId(any parts)``
-
-
-Creates a scalar variable set to the node ID of the part. If the node
-ID does not exist or is undefined, the scalar value is set to ``Undefined``.
-
-
-.. _MakeVect:
-
-----------
-MakeVect()
-----------
-
-
-**Make Vector**
-
-``MakeVect(any parts, scalar or zero, scalar or zero, scalar or zero)``
-
-
-Computes a vector variable formed from scalar variables.
-
-- The first scalar becomes the X component of the vector.
-- The second scalar becomes the Y component of the vector.
-- The third scalar becomes the Z component of the vector.
-
-A zero can be specified for some of the scalars, creating a 2D or 1D vector field.
-
-.. note::
-
- To quickly make a vector, you can select the parts that you want to use,
- select three scalars (*scalar_x*, *scalar_y*, and *scalar_z*) in the
- variable list, and right-click and choose ***Make Vector** in the pull-down menu.
- In the side, choose whether to use all (or all available, if you have measured)
- parts or your currently selected parts (if you have parts selected) to calculate
- the vector. A vector variable is calculated and named using the
- scalar names (which should be adequate). If the order of the variables or the
- name of the vector cannot be definitively determined, then a GUI pops up with the
- proposed components in a pull-down menu and a proposed name for the created vector
- variable.
-
-
-.. _MassedParticle:
-
-----------------
-MassedParticle()
-----------------
-
-
-**Massed Particle Scalar**
-
-``MassedParticle(massed particle trace parts)``
-
-
-This scalar creates a massed-particle per element scalar
-variable for each of the parent parts of the massed-particle traces. This per
-element variable is the mass of the particle multiplied by the sum of the number of
-times each element is exited by a mass-particle trace. For more information, see
-`Particle-Mass Scalar on Boundaries
-`_
-in the *Ansys EnSight User Manual*.
-
-
-.. _MassFluxAvg:
-
--------------
-MassFluxAvg()
--------------
-
-
-**Mass-Flux Average**
-
-``MassFluxAvg(any 1D or 2D parts, scalar, velocity, density [,Compute_Per_part])``
-
-
-Computes a constant or constant per part variable whose
-value is the mass flux average :math:`{b}_{avg}`. This function is
-defined as:
-
-:math:`{b}_{avg}=\frac{{\displaystyle \underset{A}{\int }\rho b\left(V·N\right)}dA}{{\displaystyle \underset{A}{\int }\rho \left(V·N\right)}dA}=\frac{\text{Mass Flux of Scalar}}{\text{Mass Flux}}=\frac{Flow(plist,b\rho V)}{Flow(plist,\rho V)}`
-
-where:
-
-.. list-table::
- :widths: 10 90
-
- * - :math:`b`
- - any scalar variable, such as pressure, mach, or a
- vector component
-
- * - :math:`\rho`
- - density (constant or scalar) variable
- * - :math:`V`
- - velocity (vector) variable
- * - :math:`dA`
- - area of some 2D domain
- * - :math:`N`
- - unit vector normal to :math:`dA`
-
-
-
-.. list-table:: **Function arguments**
- :widths: 10 90
-
- * - scalar
- - any scalar variable, such as pressure, mach, or a
- vector component
- * - velocity
- - vector variable
- * - density
- - scalar, constant, or constant per part
- variable, or constant number
-
-
-
-
-.. _MatSpecies:
-
-------------
-MatSpecies()
-------------
-
-
-**MatSpecies**
-
-``MatSpecies(any model parts, any materials, any species, scalar per element).``
-
-
-Computes a scalar per element variable whose value
-:math:`\sigma` is the sum of all specified material and species combinations
-multiplied by the specified element variable on specified *model* parts with
-defined material species. This scalar variable is defined as:
-
-:math:`\sigma ={e}_{s}\Sigma m{s}_{ij}`
-
-where:
-
-.. list-table::
- :widths: 10 90
-
- * - :math:`{\text{e}}_{\text{s}}`
- - scalar per element variable value or value
- * - :math:`{\text{ms}}_{\text{ij}}`
-
- - * :math:`{\text{m}}_{\text{i}}{\text{ * s}}_{\text{j}}`
-
- * Product of the material fraction :math:`{\text{m}}_{\text{i}}` and its corresponding specie value :math:`{\text{s}}_{\text{j}}`
-
- * 0, if specie :math:`{\text{s}}_{\text{j}}` S does not exist for material :math:`{\text{m}}_{\text{i}}`
-
- * :math:`{\text{m}}_{\text{i}}` if no species are specified
-
-
-This function only operates on model parts with
-predefined species. The specified materials can either be a list of materials
-or a single material value. The specified species can either be a list, a single
-specie, or no specie (that is a null species list that then computes an element
-value based on only material fraction contributions). The scalar per element
-value can either be an active variable or a scalar value (that is the value 1
-would give pure material fraction and/or specie value extraction).
-
-Both material and specie names are selected from the
-context-sensitive **Active Variables** list, which changes to the **Materials** list and
-**Species** list for their respective prompts.
-
-
-.. _MatToScalar:
-
--------------
-MatToScalar()
--------------
-
-
-**MatToScalar**
-
-``MatToScalar(any model parts, a material)``
-
-
-Computes a scalar per element variable whose value ``s`` is
-the specified material's value ``m`` of the element on the specified parts.
-This function is defined as::
-
-``s = m``
-
-where:
-
-.. list-table::
- :widths: 10 90
-
- * - ``s``
- - scalar per element variable value of each element
- * - ``m``
- - corresponding material fraction value of each element
-
-
-
-This function only operates on model parts with
-predefined materials that are given by sparse mixed material definitions. Only
-one material can be converted into one per element scalar variable at a time.
-The material cannot be the null material.
-
-For more information on materials, see these topics in the *Ansys EnSight User Manual*:
-
-- `Material Interface Parts
- `_
-- `Utility Programs `_,
- which supplies information on the EnSight Case Gold Writer. See both "MATERIALS"
- sections for file formats and the example material dataset.
-
-
-.. _Max:
-
------
-Max()
------
-
-
-**Max**
-
-``Max(any parts, scalar or (vector, component) [,Compute_Per_part])``
-
-
-Computes a constant or constant per part variable whose
-value is the maximum value of the scalar (or vector component) in the parts
-selected. The component is not requested if a scalar is selected.
-
-.. list-table::
- :widths: 10 90
-
- * - [component]
- - if vector variable, magnitude is the default; otherwise specify [x], [y], or [z]
-
-
-.. _Min:
-
------
-Min()
------
-
-
-**Min**
-
-``Min(any parts, scalar or (vector, component) [,Compute_Per_part])``
-
-
-Computes a constant or constant per part variable whose
-value is the minimum value of the scalar (or vector component) in the parts
-selected.
-
-.. list-table::
- :widths: 10 90
-
- * - [component]
- - if vector variable, magnitude is the default; otherwise specify [x], [y], or [z]
-
-
-
-.. _Moment:
-
---------
-Moment()
---------
-
-
-**Moment**
-
-``Moment(any parts, vector, component [,Compute_Per_part])``
-
-
-Computes a constant or constant per part variable (the
-moment about the cursor tool location) whose value is the x, y, or z component of
-Moment :math:`M`.
-
-:math:`{M}_{x}=\Sigma \left({F}_{y}{d}_{z}-{F}_{z}{d}_{y}\right)`
-
-
-:math:`{M}_{y}=\Sigma \left({F}_{z}{d}_{x}-{F}_{x}{d}_{z}\right)`
-
-
-:math:`{M}_{z}=\Sigma \left({F}_{x}{d}_{y}-{F}_{y}{d}_{x}\right)`
-
-where:
-
-.. list-table::
- :widths: 20 80
-
- * - :math:`{F}_{i}`
- - force vector component in direction *i* of
- vector :math:`F\left(x,y,z\right)=\left(\text{Fx,Fy,Fz}\right)`
- * - :math:`{d}_{i}`
- - signed moment arm (the perpendicular distance from the line of action of the vector
- component :math:`{F}_{i}` to the moment axis, which is the current cursor tool position)
-
-
-.. list-table::
- :widths: 20 80
-
- * - vector
- - any vector variable
- * - component
- - [X], [Y], or [Z]
-
-
-
-.. _MomentVector:
-
---------------
-MomentVector()
---------------
-
-
-**MomentVector**
-
-``MomentVector(any parts, force vector).``
-
-
-Computes a nodal vector variable (the moment is computed
-about each point of the selected parts) whose value is the x, y, or z
-component of Moment :math:`M`.
-
-:math:`{M}_{x}=\Sigma \left({F}_{y}{d}_{z}-{F}_{z}{d}_{y}\right)`
-
-
-:math:`{M}_{y}=\Sigma \left({F}_{z}{d}_{x}-{F}_{x}{d}_{z}\right)`
-
-
-:math:`{M}_{z}=\Sigma \left({F}_{x}{d}_{y}-{F}_{y}{d}_{x}\right)`
-
-where:
-
-.. list-table::
- :widths: 20 80
-
- * - :math:`{F}_{i}`
- - force vector component in direction *i* of vector :math:`F\left(x,y,z\right)=\left(\text{Fx,Fy,Fz}\right)`
- * - :math:`{d}_{i}`
- - signed moment arm (the perpendicular distance from the line
- of action of the vector component :math:`{F}_{i}` to the moment axis (model point position))
-
-
-
-.. list-table:: **Function arguments**
- :widths: 20 80
-
- * - force vector
- - any vector variable (per node or per element)
-
-
-
-.. _Momentum:
-
-----------
-Momentum()
-----------
-
-
-**Momentum**
-
-``Momentum(any parts, velocity, density).``
-
-
-Computes a vector variable :math:`m`. This vector variable is defined as:
-
-:math:`m=\rho V`
-
-where:
-
-.. list-table::
- :widths: 20 80
-
- * - :math:`\rho`
- - density
- * - :math:`V`
- - velocity
-
-
-
-.. list-table:: **Function arguments**
- :widths: 20 80
-
- * - velocity
- - vector variable
- * - density
- - scalar, constant, constant per part variable, or constant number
-
-
-
-.. _NodeCount:
-
------------
-NodeCount()
------------
-
-
-**Node Count**
-
-``NodeCount(any parts [,Compute_Per_part])``
-
-
-Produces a constant or constant per part variable
-containing the node count of the parts specified.
-
-
-.. _NodeToElem:
-
-------------
-NodeToElem()
-------------
-
-
-**Node to Element**
-
-``NodeToElem(any parts, node-based scalar or vector)``
-
-
-Averages a node-based variable to produce an element-based variable.
-
-For each: ``elem[j]->val += node[i]->val | elem[j]``
-
-Results: ``elem[j]->val /= elem[j]->num_cell_nodes``
-
-where:
-
-.. list-table::
- :widths: 20 80
-
- * - j
- - iterator on all part elements
- * - i
- - iterator on all part nodes
-
- elem[j] indicates element that is associated with node[i]
-
-
-.. note::
- ``elem[j]`` must contain ``node[i]`` to contribute.
-
-
-
-.. _Normal:
-
---------
-Normal()
---------
-
-
-**Normal**
-
-``Normal(2D parts or 1D planar parts)``
-
-
-Computes a vector variable that is the normal to the
-surface at each element for 2D parts, or for 1D planar parts, lies normal to
-the 1D elements in the plane of the part.
-
-
-.. _NormC:
-
--------
-NormC()
--------
-
-
-**Normal Constraints**
-
-``NormC(2D or 3D parts, pressure, velocity, viscosity [,Compute_Per_part])``
-
-
-Computes a constant or constant per part variable whose
-value is the normal constraints :math:`NC`. It is defined as:
-
-:math:`NC={\displaystyle \underset{S}{\int }\left(-p+\mu \frac{\partial V}{\partial n}\widehat{n}\right)}dS`
-
-where:
-
-.. list-table::
- :widths: 20 80
-
- * - :math:`p`
- - pressure
- * - :math:`V`
- - velocity
- * - :math:`\mu`
- - dynamic viscosity
- * - :math:`n`
- - direction of normal
- * - :math:`S`
- - border of a 2D or 3D domain
-
-
-.. list-table:: **Function arguments**
- :widths: 20 80
-
- * - pressure
- - scalar variable
- * - velocity
- - vector variable
- * - viscosity
- - scalar, constant, or constant per part variable, or constant number
-
-
-
-.. _NormVect:
-
-----------
-NormVect()
-----------
-
-
-**Normalize Vector**
-
-``NormVect(any parts, vector)``
-
-
-Computes a vector variable whose value is the unit vector
-:math:`U` of the given vector :math:`V` .
-
-:math:`U=\frac{V\left({V}_{x},{V}_{y},{V}_{z}\right)}{\Vert V\Vert }`
-
-
-.. list-table::
- :widths: 50 50
-
- * - :math:`V`
- - vector variable field
- * - :math:`\Vert V\Vert`
- - :math:`\sqrt{{V}_{x}^{2}+{V}_{y}^{2}+{V}_{z}^{2}}`
-
-
-
-
-.. _OffsetField:
-
--------------
-OffsetField()
--------------
-
-
-**Normalize Vector**
-
-``OffsetField(2D or 3D parts)``
-
-
-Computes a scalar field of offset values. The values
-are in model distance units perpendicular to the boundary of the part.
-
-.. note::
- An isosurface created in this field would mimic the part boundary, but at the offset
- distance into the field.
-
- This function is not supported for Server of Server (SOS)
- decomposition because SOS was designed to benefit from independent
- server computations in parallel. The interdependent computational
- mapping of the field results from the fluid part onto the boundary part
- violates this assumption. In other words, because you cannot be sure that you
- have all of the fluid information on one server for the mapping, this
- function is disabled.
-
-
-
-.. _OffsetVar:
-
------------
-OffsetVar()
------------
-
-
-**Offset Variable**
-
-``OffsetVar(2D or 3D parts, scalar or vector, constant offset value)``
-
-
-Computes a scalar (or vector) variable defined as the
-offset value into the field of that variable that exists in the normal direction
-from the boundary of the selected part. This assigns near surface values of a
-variable to the surface of the selected parts from the neighboring 3D field,
-which is found automatically using the selected parts surfaces.
-
-In other words, this function gets the value of a
-variable from surrounding fields, a fixed distance from the surface of the
-selected parts, and assigns it to the surface of the selected part. For
-example, you might use this function to get the value of the velocity in the
-flow field a slight distance above your vehicle surface and assign that value to
-your vehicle surface.
-
-To use this function, select the parts in the part list
-that you want to use and enter both a variable and an offset. EnSight automatically
-detects the 3D field parts adjacent to the surfaces of your selected parts and reaches
-into these fields by your offset in the normal direction to obtain the variable value
-and then assign it to the surface of your selected parts.
-
-.. list-table:: **Function arguments**
- :widths: 20 80
-
- * - constant offset value
- - constant number (constant variable is not valid)
-
-.. note::
- Choose a negative offset if your normals do not point into the field.
-
- This function is not supported for Server of Server (SOS)
- decomposition because SOS was designed to benefit from independent server
- computations in parallel. Recall that EnSight must find the field adjacent
- to the surfaces of your selected parts. Because some of these fields might be
- on other servers, creating dependencies that preclude independent
- servers, this function is disabled.
-
-
-.. _PartNumber:
-
-------------
-PartNumber()
-------------
-
-
-**Part Number**
-
-``PartNumber(any parts [,Compute_Per_part])``
-
-
-Computes a constant per part variable that is the GUI
-part number if the part is a server-side part. If computed as ``Compute_Per_case``,
-the value is the maximum part number.
-
-
-.. note::
- Any client-side part (for example, a vector arrow, particle trace, or profile)
- is assigned the ``Undefined`` value. Model parts are always server-side parts.
-
-
-
-.. _Pres:
-
-------
-Pres()
-------
-
-
-**Pressure**
-
-``Pres(any parts,density, total energy, velocity, ratio of specific heats)``
-
-
-Computes a scalar variable whose value is the pressure
-:math:`p`. This scalar variable is defined as:
-
-:math:`p=\left(\gamma -1\right)\rho \left(\frac{E}{\rho }-\frac{1}{2}{V}^{2}\right)`
-
-
-.. list-table::
- :widths: 20 80
-
- * - :math:`m`
- - momentum
- * - :math:`E`
- - total energy
- * - :math:`\rho`
- - density
- * - :math:`V`
- - velocity :math:`=m/\rho`
- * - :math:`\gamma`
- - ratio of specific heats (1.4 for air)
-
-
-.. list-table:: **Function arguments**
- :widths: 20 80
-
- * - density
- - scalar, constant, or constant per part variable, or constant number
- * - total energy
- - scalar variable
- * - velocity
- - vector variable
- * - ratio of specific heats
- - scalar, constant, or constant per part variable, or constant number
-
-
-.. _PresCoef:
-
-----------
-PresCoef()
-----------
-
-
-**Pressure Coefficient**
-
-``PresCoef(any parts, density, total energy, velocity, ratio
-of specific heats, freestream density, freestream speed of sound, freestream
-velocity magnitude)``
-
-
-Computes a scalar variable that is the pressure coefficient
-:math:`{C}_{p}`. This scalar variable is defined as:
-
-:math:`{C}_{p}=\frac{p-{p}_{i}}{\frac{{\rho }_{i}{V}_{i}^{2}}{2}}`
-
-
-.. list-table::
- :widths: 20 80
-
- * - :math:`p`
- - pressure
- * - :math:`{p}_{i}`
- - freestream pressure
- * - :math:`{\rho }_{i}`
- - freestream density
- * - :math:`{V}_{i}`
- - freestream velocity magnitude
-
-
-
-.. list-table:: **Function arguments**
- :widths: 30 70
-
- * - density
- - scalar, constant, constant per part variable, or constant number
- * - total energy
- - scalar variable
- * - velocity
- - vector variable
- * - ratio of specific heats
- - scalar, constant, or constant per part variable, or constant number
- * - freestream density
- - constant or constant per part variable or constant number
- * - freestream speed of sound
- - constant or constant per part variable or constant number
- * - freestream velocity magnitude
- - constant or constant per part variable or constant number
-
-
-
-
-.. _PresDynam:
-
------------
-PresDynam()
------------
-
-
-**Dynamic Pressure**
-
-``PresDynam(any parts, density, velocity)``
-
-
-Computes a scalar variable that is the dynamic pressure :math:`q`. This
-scalar variable is defined as::
-
-:math:`q=\frac{\rho {V}^{2}}{2}`
-
-
-.. list-table::
- :widths: 20 80
-
- * - :math:`\rho`
- - density
- * - :math:`V`
- - velocity magnitude
-
-
-See :ref:`KinEn() `.
-
-.. list-table:: **Function arguments**
- :widths: 20 80
-
- * - density
- - scalar, constant, or constant per part variable, or constant number
- * - velocity
- - vector variable
-
-
-.. _PresNorm:
-
-----------
-PresNorm()
-----------
-
-
-**Normalized Pressure**
-
-``PresNorm(any parts, density, velocity, ratio of specific
-heats, freestream density, freestream speed of sound)``
-
-
-Computes a scalar variable that is the normalized pressure :math:`{p}_{n}`.
-This scalar variable is defined as:
-
-:math:`{p}_{n}=p/{p}_{i}`
-
-where:
-
-.. list-table::
- :widths: 20 80
-
- * - :math:`{p}_{i}`
- - freestream pressure = :math:`1/\gamma`
- * - :math:`\gamma`
- - ratio of specific heats
- * - :math:`p`
- - pressure
-
-
-.. list-table:: **Function arguments**
- :widths: 30 70
-
- * - density
- - scalar, constant, or constant per part
- variable, or constant number
- * - total energy
- - scalar variable
- * - velocity
- - vector variable
- * - ratio of specific heats
- - scalar, constant, or constant per part
- variable, or constant number
- * - freestream density
- - constant or constant per part variable or
- constant number
- * - freestream speed of sound
- - constant or constant per part variable or
- constant number
-
-
-.. _PresLogNorm:
-
--------------
-PresLogNorm()
--------------
-
-
-**Log of Normalized Pressure**
-
-``PresLogNorm(any parts, density, total energy, velocity,
-ratio of specific heats, freestream density, freestream speed of sound)``
-
-
-Computes a scalar variable that is the natural log of
-normalized pressure :math:`\mathrm{ln}{p}_{n}`. This scalar variable
-is defined as:
-
-:math:`\mathrm{ln}{p}_{n}=\mathrm{ln}\left(p/{p}_{i}\right)`
-
-where:
-
-.. list-table::
- :widths: 20 80
-
- * - :math:`{p}_{i}`
- - freestream pressure = :math:`1/\gamma`
- * - :math:`\gamma`
- - ratio of specific heats
- * - :math:`p`
- - pressure
-
-
-
-.. list-table:: **Function arguments**
- :widths: 30 70
-
- * - density
- - scalar, constant, or constant per part
- variable, or constant number
- * - total energy
- - scalar variable
- * - velocity
- - vector variable
- * - ratio of specific heats
- - scalar, constant, or constant per part
- variable, or constant number
- * - freestream density
- - constant or constant per part variable or
- constant number
- * - freestream speed of sound
- - constant or constant per part variable or
- constant number
-
-
-.. _PresStag:
-
-----------
-PresStag()
-----------
-
-
-**Stagnation Pressure**
-
-``PresStag(any parts, density, total energy, velocity, ratio
-of specific heats)``
-
-
-Computes a scalar variable that is the stagnation
-pressure :math:`{p}_{o}`. This scalar variable is defined as:
-
-:math:`{p}_{o}=p{\left(1+\left(\frac{\gamma -1}{2}\right){M}^{2}\right)}^{\left(\gamma /\left(\gamma -1\right)\right)}`
-
-where:
-
-.. list-table::
- :widths: 20 80
-
- * - :math:`p`
- - pressure
- * - :math:`\gamma`
- - ratio of specific heats
- * - :math:`M`
- - mach number
-
-
-.. note::
- In literature, stagnation pressure is used interchangeably with total pressure.
- The stagnation pressure (or total pressure) uses two different equations, depending
- upon the flow regime: compressible or incompressible. EnSight has chosen to define
- stagnation pressure using the preceding compressible flow equation and total pressure
- using the incompressible flow equation. See :ref:`PressT() `.
-
-
-.. list-table:: **Function arguments**
- :widths: 30 70
-
- * - density
- - scalar, constant, or constant per part
- variable, or constant number
- * - total energy
- - scalar variable
- * - velocity
- - vector variable
- * - ratio of specific heats
- - scalar, constant, or constant per part
- variable, or constant number
-
-
-
-.. _PresNormStag:
-
---------------
-PresNormStag()
---------------
-
-
-**Normalized Stagnation Pressure**
-
-``PresNormStag(any parts, density, total energy, velocity,
-ratio of specific heats, freestream density, freestream speed of sound,
-freestream velocity magnitude)``
-
-
-Computes a scalar variable that is the normalized
-stagnation pressure :math:`{p}_{on}`. This scalar variable
-is defined as:
-
-:math:`{p}_{on}=\left({p}_{o}/{p}_{oi}\right)`
-
-where:
-
-.. list-table::
- :widths: 20 80
-
- * - :math:`{p}_{o}`
- - stagnation pressure
- * - :math:`{p}_{oi}`
- - freestream stagnation pressure
-
-
-.. list-table:: **Function arguments**
- :widths: 30 70
-
- * - density
- - scalar, constant, or constant per part
- variable, or constant number
- * - total energy
- - scalar variable
- * - velocity
- - vector variable
- * - ratio of specific heats
- - scalar, constant, or constant per part
- variable, or constant number
- * - freestream density
- - constant or constant per part variable or
- constant number
- * - freestream speed of sound
- - constant or constant per part variable or
- constant number
- * - freestream velocity magnitude
- - constant or constant per part variable or
- constant number
-
-
-
-.. _PresStagCoef:
-
---------------
-PresStagCoef()
---------------
-
-
-**Stagnation Pressure Coefficient**
-
-``PresStagCoef(any parts, density, total energy, velocity,
-ratio of specific heats, freestream density, freestream speed of sound,
-freestream velocity magnitude)``
-
-
-Computes a scalar variable that is the stagnation pressure
-coefficient :math:`{C}_{{p}_{o}}`. This scalar variable is
-defined as:
-
-:math:`{C}_{{p}_{o}}=\left({p}_{o}-{p}_{i}\right)/\left(\frac{{\rho }_{i}{V}^{2}}{2}\right)`
-
-where:
-
-.. list-table::
- :widths: 20 80
-
- * - :math:`{p}_{o}`
- - stagnation pressure
- * - :math:`{p}_{i}`
- - freestream pressure = :math:`1/\gamma`
- * - :math:`\gamma`
- - ratio of specific heats
- * - :math:`{\rho }_{i}`
- - freestream density
- * - :math:`V`
- - velocity magnitude
-
-
-
-.. list-table:: **Function arguments**
- :widths: 30 70
-
- * - density
- - scalar, constant, or constant per part
- variable, or constant number
- * - total energy
- - scalar variable
- * - velocity
- - vector variable
- * - ratio of specific heats
- - scalar, constant, or constant per part
- variable, or constant number
- * - freestream density
- - constant or constant per part variable or
- constant number
- * - freestream speed of sound
- - constant or constant per part variable or
- constant number
- * - freestream velocity magnitude
- - constant or constant per part variable or
- constant number
-
-
-
-.. _PresPitot:
-
------------
-PresPitot()
------------
-
-
-**Pitot Pressure**
-
-``PresPitot(any parts, density, total energy, velocity,
-ratio of specific heats)``
-
-
-Computes a scalar variable that is the pitot pressure
-:math:`{p}_{p}`. This scalar variable is defined as:
-
-:math:`\begin{array}{ll}{p}_{p}\hfill & =sp\hfill \\ s\hfill & =\frac{{\left(\left(\frac{\gamma +1}{2}\right)\left(\frac{{V}^{2}}{\gamma \left(\gamma -1\right)\left(\frac{E}{\rho }-\frac{{V}^{2}}{2}\right)}\right)\right)}^{\left(\gamma /\left(\gamma -1\right)\right)}}{{\left(\left(\frac{2\gamma }{\gamma +1}\right)\left(\frac{{V}^{2}}{\gamma \left(\gamma -1\right)\left(\frac{E}{\rho }-\frac{{V}^{2}}{2}\right)}\right)-\left(\frac{\gamma -1}{\gamma +1}\right)\right)}^{\left(\gamma /\left(\gamma -1\right)\right)}}\hfill \end{array}`
-
-where:
-
-.. list-table::
- :widths: 20 80
-
- * - :math:`\gamma`
- - ratio of specific heats
- * - :math:`E`
- - total energy per unit volume
- * - :math:`\rho`
- - density
- * - :math:`V`
- - velocity magnitude
- * - :math:`p`
- - pressure
-
-
-.. list-table:: **Function arguments**
- :widths: 30 70
-
- * - density
- - scalar, constant, or constant per part
- variable, or constant number
- * - total energy
- - scalar variable
- * - velocity
- - vector variable
- * - ratio of specific heats
- - scalar, constant, or constant per part
- variable, or constant number
-
-
-.. note::
- For mach numbers less than 1.0, the pitot pressure is the same as the
- stagnation pressure. For mach numbers greater than or equal to 1.0, the
- pitot pressure is equivalent to the stagnation pressure behind a normal
- shock.
-
-
-
-.. _PresPitotRatio:
-
-----------------
-PresPitotRatio()
-----------------
-
-
-**Pitot Pressure Ratio**
-
-``PresPitotRatio(any parts, density, total energy, velocity,
-ratio of specific heats, freestream density, freestream speed of sound)``
-
-
-Computes a scalar variable that is the pitot pressure ratio
-:math:`{p}_{pr}`. This scalar variable is defined as:
-
-:math:`{p}_{pr}=s\left(\gamma -1\right)\left(E-\frac{\rho {V}^{2}}{2}\right)`
-
-where:
-
-.. list-table::
- :widths: 20 80
-
- * - :math:`s`
- - (as defined in :ref:`PresPitot() `)
- * - :math:`\gamma`
- - ratio of specific heats
- * - :math:`E`
- - total energy per unit volume
- * - :math:`\rho`
- - density
- * - :math:`V`
- - velocity magnitude
-
-
-
-.. list-table:: **Function arguments**
- :widths: 30 70
-
- * - density
- - scalar, constant, or constant per part
- variable, or constant number
- * - total energy
- - scalar variable
- * - velocity
- - vector variable
- * - ratio of specific heats
- - scalar, constant, or constant per part
- variable, or constant number
- * - freestream density
- - constant or constant per part variable or
- constant number
- * - freestream speed of sound
- - constant or constant per part variable or
- constant number
-
-
-
-.. _PresT:
-
--------
-PresT()
--------
-
-
-**Total Pressure**
-
-``PresT(any parts, pressure, velocity, density)``
-
-
-Computes a scalar variable whose value is the total
-pressure :math:`{p}_{t}`. This scalar variable is defined as:
-
-:math:`{p}_{t}=p+\rho \frac{{V}^{2}}{2}`
-
-where:
-
-.. list-table::
- :widths: 20 80
-
- * - :math:`\rho`
- - density
- * - :math:`V`
- - velocity
- * - :math:`p`
- - pressure
-
-
-.. note::
- In literature, total pressure is used interchangeably with stagnation
- pressure. The total pressure (or stagnation pressure) use two different
- equations, depending upon the flow regime: incompressible or compressible.
- EnSight has chosen to define total pressure using the preceding incompressible flow
- equation and stagnation pressure using the compressible flow
- equation. See :ref:`PreStag() `.
-
-
-.. list-table:: **Function arguments**
- :widths: 30 70
-
- * - pressure
- - scalar variable
- * - velocity
- - vector variable
- * - density
- - scalar, constant, or constant per part
- variable, or constant number
-
-
-
-.. _Q_criteria:
-
-------------
-Q_criteria()
-------------
-
-
-**Q_criteria**
-
-``Q_criteria(any parts, Grad_Vel_x, Grad_Vel_y, Grad_Vel_z)``
-
-
-Computes a scalar variable that is the second
-invariant, or Q-criterion, of the velocity gradient tensor. Vortex shells may
-then be visualized as an iso-surface of Q-criterion > 0. Here is how inputs
-to this function are calculated:
-
-First calculate the three components of velocity:
-
-Vel_x = Velocity[X] = x-component of the velocity vector
-
-Vel_y = Velocity[Y] = y-component of the velocity vector
-
-Vel_z = Velocity[Z] = z-component of the velocity vector
-
-Then calculate the gradients using the intermediate variable:
-
-Grad_Vel_x = Grad(any parts, Vel_x) = gradient of x component Velocity
-
-Grad_Vel_y = Grad(any parts, Vel_y) = gradient of y component Velocity
-
-Grad_Vel_z = Grad(any parts, Vel_z) = gradient of z component Velocity
-
-with:
-
-Velocity = velocity vector variable
-
-.. note::
- A common mistake is to try to calculate the gradient
- from the component of the velocity without using the intermediate ``Vel_x``,
- ``Vel_y``, and ``Vel_z`` variables. For example, the following calculation is wrong.
- It uses only the velocity magnitude.
-
- Grad_Vel_x = Grad(any parts, Velocity[X])
-
- This is a *User-Defined Math Function (UDMF)*, which you may modify and
- recompile. For more information, see the *EnSight Interface Manual*.
-
-
-.. admonition:: Algorithm: Q_criteria
-
- The three gradient vectors of the components of the
- velocity vector constitute the velocity gradient tensor. Using the nine components
- of this (antisymmetric) velocity gradient tensor, ``Lv``, construct both the
- symmetric, :math:`S` , and antisymmetric, :math:`\Omega`, parts of the
- velocity gradient tensor, the :math:`Q` criteria is established as follows.
-
- :math:`\nabla \nu =S+\Omega`
-
- where
-
- :math:`S=\frac{1}{2}\left[\nabla \nu +{\left(\nabla \nu \right)}^{T}\right]`
-
-
- :math:`\Omega =\frac{1}{2}\left[\nabla \nu -{\left(\nabla \nu \right)}^{T}\right]`
-
- solving for :math:`Q` (hence :math:`Q` criteria) when
-
- :math:`Q=\frac{1}{2}\left[{\left|\Omega \right|}^{2}-{\left|S\right|}^{2}\right]`
-
- which (in terms of EnSight variables) reduces to:
-
- .. code-block::
-
- Q = - 0.5 * ( Grad_Vel_x[X] * Grad_vel_x[X] + Grad_Vel_y[Y] * Grad_vel_y[Y] + Grad_Vel_z[Z] * Grad_Vel_z[Z] +
- 2 * (Grad_Vel_x[Y] * Grad_Vel_y[X] + Grad_Vel_x[Z] * Grad_Vel_z[X] + Grad_Vel_y[Z] * Grad_Vel_z[Y])) > 0
-
-
- Now, to find the vortices, create an isosurface where Q
- is positive (Q > 0). This is because an isosurface with positive Q isolates
- areas where the strength of the rotation overcomes the strain, thus making those
- surfaces eligible as vortex envelopes.
-
- See also the :ref:`Lambda2() ` function.
-
-**References**
-
-For more information, see these references:
-
-1. Dubief, Y and Delcayre, F., "On coherent-vortex
- identification in turbulence", Journal of Turbulence, (jot.iop.org) 1
- (2000) 11, pp.1-22.
-2. Haller, G., "An objective definition of a vortex," Journal of
- Fluid Mechanics, 2005, vol. 525, pp. 1-26.
-3. Jeong, J. and Hussain, F., "On the identification of a
- vortex," Journal of Fluid Mechanics, 1995, vol. 285, pp. 69-94.
-
-
-.. _Radiograph_grid:
-
------------------
-Radiograph_grid()
------------------
-
-
-**Radiograph_grid**
-
-``Radiograph_grid(1D or 2D parts, dir X, dir Y, dir Z, num_points, variable, [component])``
-
-
-Computes a per element scalar variable on the designated
-1D or 2D parts, that is a directional integration from these parts of a scalar
-variable or vector component through the model.
-
-Think of rays being cast from the center of each element of the 1D or 2D parents
-in the direction specified (and long enough to extend through the model). Along
-each ray, the desired variable is integrated and the integral value is assigned
-to the element from which the ray was cast. This function integrates the ray in
-a constant delta, grid-like fashion. You control the delta by the number of points that is
-specified in the integration direction.
-
-.. note::
- While this function is not generally as time-consuming as the
- :ref:`Radiograph_mesh() ` function (and you have
- some resolution control with the ``num_points`` argument), it
- can still take some computation time. You might want to set the Abort server
- operations performance preference to avoid being stuck in a computation
- loop that exceeds your patience.
-
-
-.. list-table:: **Function arguments**
- :widths: 20 80
-
- * - dir X
- - constant number: Integration direction vector x component
- * - dir Y
- - constant number: Integration direction vector y component
- * - dir Z
- - constant number: Integration direction vector z component
- * - num_points
- - constant number: Number of points along ray in the integration direction
- (The integration delta is the ray length divided by the number of
- points.)
- * - variable
- - Variable that is integrated along the ray
- * - component
- - If the variable is a vector [X], [Y], [Z], or [] for magnitude
-
-
-.. note::
- This function does not work properly for Server of Servers (SOS) mode.
- Each portion only gives its local value.
-
-
-
-.. _Radiograph_mesh:
-
------------------
-Radiograph_mesh()
------------------
-
-
-**Radiograph_mesh**
-
-``Radiograph_mesh(1D or 2D parts, dir X, dir Y, dir Z, variable, [component])``
-
-
-Computes a per element scalar variable on the designated
-1D or 2D parts, that is a directional integration from these parts of a scalar
-variable or vector component through the model. Think of rays being cast from
-the center of each element of the 1D or 2D parents in the direction specified
-(and long enough to extend through the model). Along each ray the desired
-variable is integrated and the integral value is assigned to the element from
-which the ray was cast. This function integrates the ray at each domain element
-face intersection.
-
-.. note::
- Running this function can be a very time-consuming process.
- You might want to set the **Abort server operations** performance preference
- in EnSight to avoid being stuck in a computation loop that exceeds your patience.
- The :ref:`Radiograph_grid() ` function is generally much quicker.
-
-
-.. list-table:: **Function arguments**
- :widths: 20 80
-
- * - dir X
- - constant number: Integration direction vector x component
- * - dir Y
- - constant number: Integration direction vector y component
- * - dir Z
- - constant number: Integration direction vector z component
- * - variable
- - Variable that is integrated along the ray.
- * - component
- - If the variable is a vector [X], [Y], [Z], or [] for magnitude
-
-
-.. note::
- This function does not work properly for Server of Servers (SOS) mode.
- Each portion only gives its local value.
-
-
-.. _RectToCyl:
-
------------
-RectToCyl()
------------
-
-
-**Rectangular To Cylindrical Vector**
-
-``RectToCyl(any parts, vector)``
-
-
-Produces a vector variable with cylindrical components
-according to frame 0.
-
-(Intended for calculation purposes)
-
-x = radial component
-y = tangential component
-z = z component
-
-
-.. _ServerNumber:
-
---------------
-ServerNumber()
---------------
-
-
-**Server Number**
-
-``ServerNumber(any parts)``
-
-
-Produces a per-element scalar variable that is the
-server number containing the element. This function is useful for decomposed models using
-Server of Servers (SOS) mode so that the distribution can be visualized.
-
-
-.. _ShockPlot3d:
-
--------------
-ShockPlot3d()
--------------
-
-
-**Shock Plot3d**
-
-``ShockPlot3d(2D or 3D parts, density, total energy,
-velocity, ratio of specific heats)``
-
-
-Computes a scalar variable ShockPlot3d whose value is:
-
-:math:`ShockPlot3d=\frac{V}{c}·\frac{grad(p)}{\left|grad(p)\right|}`
-
-where:
-
-.. list-table::
- :widths: 20 80
-
- * - :math:`V`
- - velocity
- * - :math:`c`
- - speed of sound
- * - :math:`p`
- - pressure
- * - :math:`grad(p)`
- - gradient of pressure
-
-
-
-.. list-table:: **Function arguments**
- :widths: 30 70
-
- * - density
- - scalar, constant, or constant per part
- variable, or constant number
- * - total energy
- - scalar variable
- * - velocity
- - vector variable
- * - ratio of specific heats
- - scalar, constant, or constant per part
- variable, or constant number.
-
-
-
-To compute candidate shock surfaces, create an
-isosurface of the calculated variable, ``shockplot3d = 1.0``.
-These shock regions can be verified by overlaying them with :math:`Mach\ge 1.0`
-
-Also consider comparing with the shock region/surface
-feature visualization.
-
-
-.. _SmoothMesh:
-
-------------
-SmoothMesh()
-------------
-
-
-**Mesh Smoothing**
-
-``SmoothMesh(any 1D or 2D parts, number of passes, weight)``
-
-
-Performs a mesh "smoothing" operation. This function
-returns a vector variable which, when applied to the mesh as a displacement,
-results in a "smoother" mesh representation. The function computes new node
-locations resulting from a "normalization" of the mesh elements.
-
-The result of this function tends to be a mesh with equal-sized elements.
-The algorithm applies a form of convolution to the mesh edges repeatedly
-(number of passes) using a weighting factor to control how much change
-in position is allowed in each pass. In most cases, the weight is supplied
-as a constant, but the weight can be specified as a nodal scalar array.
-This allows for local control over the region of the mesh to be smoothed.
-The algorithm is fully threaded.
-
-
-.. note::
- Nodes on the outer boundary of a mesh (or are bounded by ghost elements) are not
- allowed to move. A good set of initial parameters might be 50 passes with a
- weight constant of 0.05.
-
-
-For each pass, the following formula is applied:
-
-:math:`{x}_{i+1}={x}_{i}+w{\displaystyle \sum _{j=0}^{n}\left({x}_{j}-{x}_{i}\right)}`
-
-where
-
-:math:`x` = nodal position at pass (i)
-
-:math:`w` = nodal weight
-
-:math:`n` = edge connected nodes
-
-.. list-table:: **Function arguments**
- :widths: 20 80
-
- * - number of passes
- - number of smoothing passes to be applied:
- constant
-
- * - weight
- - fraction of the length of a node's edges that a
- node is allowed to move with each pass: nodal scalar variable or constant
-
-
-
-.. _SOSConstant:
-
--------------
-SOSConstant()
--------------
-
-
-**SOS Constant**
-
-``SOSConstant(any parts, variable, reduction operation (0-3))``
-
-
-.. note::
- Generally this function should not be necessary. The :ref:`SOSConstant() `
- function has been pulled into the server/SOS infrastructure. However, this function
- remains for backward compatibility.
-
-Computes a constant variable whose value is the result of applying a reduction operation
-on that constant variable over the values on each of the servers. If there is no SOS involved
-or only a single server, the result is the same as the constant variable value on the single
-server.
-
-The selected part is used to select the case from which the constant variable is
-used. The constant variable itself is specified (from the dataset or a computed
-value). The operation to perform is selected as an integer from ``0`` to ``3``:
-
-- ``0``: A simple summation of the values from each of the servers.
-- ``1``: An average of the values from the servers. (The weight given to each server
- in the average is the same, so this is essentially the sum operation divided by the number of servers.)
-- ``2``: The minimum of the values on each of the servers.
-- ``3``: The maximum of the values on each of the servers.
-
-
-.. list-table:: **Function arguments**
- :widths: 30 70
-
- * - variable
- - constant variable (from the data or computed)
-
- * - reduction operation
- - value from 0 to 3 that selects from the following operations:
-
- 0=sum 1=average 2=minimum 3=maximum
-
-
-
-.. _SpaMean:
-
----------
-SpaMean()
----------
-
-
-**Spatial Mean**
-
-``SpaMean(any parts, scalar or (vector, component) [,Compute_Per_part])``
-
-
-Computes a constant or constant per part variable whose
-value is the volume (or area or length) weighted mean value of a scalar (or
-vector component) at the current time. This value can change with time. The
-component is not requested if a scalar variable is used.
-
-The spatial mean is computed by summing the product of
-the volume (3D, area 2D, or length 1D) of each element by the value of the
-scalar (or vector component) taken at the centroid of the element (nodal
-variables are interpolated at each cell centroid using cell shape blending or
-metric functions) for each element over the entire part. The final sum is then
-divided by the total volume (or area) of the part.
-
-:math:`\text{Spatial Mean}=\frac{{\displaystyle \sum {s}_{i}vo{l}_{i}}}{{\displaystyle \sum vo{l}_{i}}}`
-
-where:
-
-:math:`{s}_{i}` = scalar taken at centroid of element i
-
-:math:`vo{l}_{i}` = volume (or area or length) of element i
-
-.. list-table:: **Function arguments**
- :widths: 20 80
-
- * - [component]
- - if vector variable, magnitude is the default, or specify [x], [y], or [z]
-
-
-
-.. _SpaMeanWeighted:
-
------------------
-SpaMeanWeighted()
------------------
-
-
-**Spatial Mean Weighted**
-
-``SpaMeanWeighted(any parts, scalar or (vector, component), weight, component [,Compute_Per_part])``
-
-
-Computes a constant or constant per part variable whose value is
-weighted both by the volume (or area or length) and a weighting variable.
-This value can change with time. For both the variable
-itself and the weighting variable, the component is not requested if a scalar
-variable is used.
-
-The weighted spatial mean is computed by summing the
-product of the volume (3D, area 2D, or length if 1D) of each element by the
-value of the scalar (or vector component) taken at the centroid of the element
-(nodal variables are interpolated at each cell centroid using cell shape
-blending or metric functions) with the product of the weighting scalar/vector
-component taken at the centroid of the element (again, if a nodal variable,
-similarly evaluated at the element centroid) for each element over the entire
-part. The final sum is then divided by the total scalar/vector weighted (again
-if a nodal weighting variable is similarly evaluated at the element centroid)
-volume (or area or length) of the part as follows:
-
-:math:`\text{Spatial Mean Weighted}=\frac{{\displaystyle \sum {w}_{i}{s}_{i}vo{l}_{i}}}{{\displaystyle \sum {w}_{i}vo{l}_{i}}}`
-
-where:
-
-:math:`{s}_{i}` = scalar or vector component taken at centroid of element i
-
-:math:`{w}_{i}` = scalar or vector component taken at centroid of element i
-
-:math:`vo{l}_{i}` = volume (or area or length) of element i
-
-.. list-table:: **Function arguments**
- :widths: 20 80
-
- * - [component]
- - if vector variable, magnitude is the default,
- or specify [x], [y], or [z]
-
-
-
-.. _Speed:
-
--------
-Speed()
--------
-
-
-**Speed**
-
-``Speed(any parts, velocity)``
-
-
-Computes a scalar variable whose value is the speed. This function
-is defined as:
-
-:math:`speed=\sqrt{{u}^{2}+{v}^{2}+{w}^{2}}`
-
-where:
-
-:math:`u,v,w` = velocity components in the :math:`x,y,z` directions.
-
-.. list-table:: **Function arguments**
- :widths: 20 80
-
- * - velocity
- - vector variable
-
-
-
-.. _SonicSpeed:
-
-------------
-SonicSpeed()
-------------
-
-
-**Sonic Speed**
-
-``SonicSpeed(any parts, density, total energy, velocity, ratio of specific heats).``
-
-
-Computes a scalar variable :math:`c` whose value is:
-
-:math:`c=\sqrt{\frac{\gamma p}{\rho }}`
-
-where:
-
-:math:`\gamma` = ratio of specific heats
-
-:math:`\rho` = density
-
-:math:`p` = pressure
-
-.. list-table:: **Function arguments**
- :widths: 30 70
-
- * - density
- - scalar, constant, or constant per part variable, or constant number
- * - total energy
- - scalar variable
- * - velocity
- - vector variable
- * - ratio of specific heats
- - scalar, constant, or constant per part variable, or constant number
-
-
-
-.. _StatMoment:
-
-------------
-StatMoment()
-------------
-
-
-**Statistics Moments**
-
-``StatMoment(any parts, v, function [,Compute_Per_part])``
-
-
-Computes a constant or constant per part that is the
-sum, mean, variance, skew, or kurtosis by applying a selected statistical
-function over all of the nodes or elements of the selected parts, given the
-selected scalar or constant variable. Five functions are defined:
-
-:math:`sum={\displaystyle \sum _{i=1}^{N}{\nu }_{i}}`
-
-
-:math:`mean=\frac{1}{N}{\displaystyle \sum _{i=1}^{N}{\nu }_{i}}`
-
-
-:math:`\mathrm{var}=\frac{1}{N-1}{\displaystyle \sum _{i=1}^{N}{\left({\nu }_{i}-mean\right)}^{2}}`
-
-
-:math:`skew=\frac{1}{N}{\displaystyle \sum _{i=1}^{N}{\left(\frac{{\nu }_{i}-mean}{\sqrt{\mathrm{var}}}\right)}^{3}}`
-
-
-:math:`kurt=\left\{\frac{1}{N}{\displaystyle \sum _{i=1}^{N}{\left(\frac{{\nu }_{i}-mean}{\sqrt{\mathrm{var}}}\right)}^{4}}\right\}-3`
-
-- The ``mean`` is the simple average (unweighted, arithmetic mean) of all the
- samples.
-- The ``var`` is the variance, which is an indication of the spread of a
- sample of numbers out from the mean. It is the square of the standard
- deviation.
-- The ``skew`` is an indication of the degree of asymmetry about the mean. A
- positive skew indicates an asymmetric tail toward more positive values.
- A negative skew indicates an asymmetric tail toward more negative values.
-- The ``kurt`` is the kurtosis, which is an indication of the peakness or
- flatness of the distribution compared to a normal distribution. A positive
- kurtosis indicates more peakness. A negative kurtosis indicates a more flat
- distribution.
-
-If the variable (``v``) is a constant, the operation is computed as if the
-variable was a nodal variable with the given value at all nodes. If the
-computation is over an element variable, the size of the element is not
-used in the computation. If volume or area weighting is desired, the
-variable must be pre-weighted.
-
-.. note::
-
- ``StatMoment(plist,scalar,0)`` should be used in place of the
- example user-defined math function, ``udmf_sum``, because the
- :ref:`StatMoment() ` function is threaded and
- properly handles ghost cells. However, for parallel (SOS) computation,
- because nodes at the interface are shared among servers, the values at
- the interface nodes are used in computations multiple times. Therefore,
- the``StatMoment`` value computed using a nodal variable using SOS deviates
- from the true value calculated using only one server. Elemental variables
- do not suffer from this issue as ghost elements are handled properly and
- elements are not shared among servers.
-
-
- .. list-table:: **Function arguments**
- :widths: 20 80
-
- * - v
- - scalar variable, constant or constant per part variable,
- or constant number
- * - function
- - constant number selecting the moment to compute
- (0=sum, 1=mean, 2=variance, 3=skewness, 4=kurtosis)
-
-
-**References**
-
-For more information, see these references:
-
-1. Numerical Recipes, Press et. al. Cambridge Univ. Press, 1997, pp. 454-459.
-
-
-
-.. _StatRegSpa:
-
-------------
-StatRegSpa()
-------------
-
-
-**Statistics Regression**
-
-``StatRegSpa(any parts, y, x0, x1, x2, x3, x4, weight)``
-
-
-Performs classical multivariate linear regression,
-predicting ``y = f(x0,x1,x2,x3,x4)``. The regression is performed at the current
-timestep using all of the nodes/elements of the selected parts. At each
-node/element, the input values y, x0, x1, x2, x3, and x4 and the weight are evaluated
-and added as an observation to the regression with the supplied weight (in the
-range [0.0-1.0]). If the model does not require five inputs, any of them can be
-specified as the constant number ``0.0`` to remove it. If the constant ``1.0`` is
-supplied as an input, an intercept is computed. You should avoid
-co-linearity in the inputs (which is especially easy when supplying constants as
-regressors).
-
-For example, to model simple linearity (``y = Ax0 + B``), the function
-parameters would be ``StatRegSpa(plist, yvar, xvar, 1., 0., 0., 0., 1.)``. This
-example specifies that all observations are to be weighted the same.
-If weighting by element volume is desired, compute a field variable of
-element volume, normalized by the largest individual element volume, and pass
-that variable as the weight. The function returns a scalar constant whose value
-is the R-squared value for the regression.
-
-
-.. list-table:: **Function arguments**
- :widths: 20 80
-
- * - y
- - scalar, constant, or constant per part
- variable or constant number
-
- * - x0, x1, x2, x3, x4
- - scalar, constant, or constant per part
- variable or constant number
-
- * - weight
- - scalar, constant, or constant per part
- variable or constant number
-
-
-For a full set of estimated values and statistical
-diagnostic output, see: :ref:`StatRegVal1() ` and
-:ref:`StatRegVal2() `.
-
-
-
-.. _StatRegVal1:
-
--------------
-StatRegVal1()
--------------
-
-
-**Statistics Regression Info**
-
-``StatRegVal1(any parts, regression_variable, function)``
-
-
-This function returns basic statistical diagnostics for a regression computed
-using ``StatRegSpa()``. The function is passed the output variable of a previously
-computed ``StatRegSpa()`` and the function number of a specific statistical quantity
-to return. The values include the standard sum of squares values for the regression
-as well as the R-squared value.
-
-
-.. list-table:: **Function arguments**
- :widths: 20 80
-
- * - regression_variable
- - a scalar variable which is the output of an earlier ``StatRegSpa()`` function
-
- * - function
- - the statistical quantity to return (0=sum of
- squares error, 1=sum of squares total, 2=sum of squares model, 3=R-squared)
-
-
-See also the :ref:`StatRegSpa() ` and :ref:`StatRegVal2() `
-functions.
-
-
-.. _StatRegVal2:
-
--------------
-StatRegVal2()
--------------
-
-
-**Statistics Regression Info**
-
-``StatRegVal2(any parts, regression_variable, function, selection)``
-
-
-This function returns statistical diagnostics specific to individual input coefficients
-for a regression computed using the ``StatRegSpa()`` function. The ``StatRegVal2()``
-function is passed the output variable previously computed by the ``StatRegSpa`` function,
-the function number of the specific statistical quantity to return, and the coefficient
-selected. The values include the sum of squares and partial sum of squares for the individual
-coefficients as well as the estimated coefficient itself and its standard error.
-
-
-.. list-table:: **Function arguments**
- :widths: 45 55
-
- * - regression_variable
- - scalar variable that is the output of an earlier ``StatRegSpa()`` function
-
- * - function
- - statistical quantity to return
-
- 0 = estimated coefficient
- 1 = sum of squares for the variable
- 2 = partial sum of squares for the variable
- 3 = standard error for the coefficient
-
- * - selection
- - constant or constant per part variable or
- constant number that selects the specific coefficient for which to
- retrieve the statistical quantity (0 = x0, 1 = x1, 2 = x2, 3 = x3,
- 4 = x4)
-
-See also the :ref:`StatRegSpa() ` and :ref:`StatRegVal1() `
-functions.
-
-
-.. _sumPerPart:
-
-------------
-sumPerPart()
-------------
-
-
-**sumPerPart**
-
-``sumPerPart( plist, scalar, result_type )``
-
-
-Sums scalar values of each part as a constant per part or a constant per case
-value.
-
-.. list-table:: **Function arguments**
- :widths: 20 80
-
- * - plist
- - any parts
-
- * - scalar
- - scalar variable
-
- * - result_type
- - ``Per case`` or ``Per part``
-
-
-.. _sumPerPartArg:
-
----------------
-sumPerPartArg()
----------------
-
-
-**sumPerPartArg**
-
-``sumPerPartArg( part, ConstantPerPart, result_type )``
-
-
-Sums the constant per part value of each part into a case constant
-value.
-
-
-.. list-table:: **Function arguments**
- :widths: 20 80
-
- * - plist
- - any parts
-
- * - ConstantPerPart
- - constant per part variable
-
- * - result_type
- - "Per case" or "Per part"
-
-
-.. _Swirl:
-
--------
-Swirl()
--------
-
-**Swirl**
-
-``Swirl(any parts, density, velocity).``
-
-
-Computes a scalar variable ``swirl`` whose value is:
-
-:math:`swirl=\frac{\Omega ·V}{\rho {V}^{2}}`
-
-where:
-
-.. list-table::
- :widths: 20 80
-
- * - :math:`\Omega`
- - vorticity
-
- * - :math:`\rho`
- - density
-
- * - :math:`V`
- - velocity
-
-
-.. list-table:: **Function arguments**
- :widths: 20 80
-
- * - density
- - scalar, constant, or constant per part variable, or constant number
-
- * - velocity
- - vector variable
-
-
-.. _Temperature:
-
--------------
-Temperature()
--------------
-
-
-**Temperature**
-
-``Temperature(any parts, density, total energy, velocity, ratio of specific heats, gas constant)``
-
-
-Computes a scalar variable whose value is the
-temperature **T**. The scalar variable is defined as::
-
-:math:`T=\frac{\left(\gamma -1\right)}{R}\left(\frac{E}{\rho }-\frac{1}{2}{V}^{2}\right)`
-
-where:
-
-.. list-table::
- :widths: 30 70
-
- * - :math:`m`
- - momentum
-
- * - :math:`E`
- - total energy per unit volume
-
- * - :math:`\rho`
- - density
-
- * - :math:`V`
- - velocity = :math:`m/\rho`
-
- * - :math:`\gamma`
- - ratio of specific heats (1.4 for air)
-
- * - :math:`R`
- - gas constant
-
-
-.. list-table:: **Function arguments**
- :widths: 30 70
-
- * - density
- - scalar, constant, or constant per part
- variable, or constant number
- * - total energy
- - scalar variable
- * - velocity
- - vector variable
- * - ratio of specific heats
- - scalar, constant, or constant per part
- variable, or constant number
- * - gas constant
- - constant or constant per part variable or
- constant number
-
-
-
-.. _TemperNorm:
-
-------------
-TemperNorm()
-------------
-
-
-**Normalized Temperature**
-
-``TemperNorm(any parts, density, total energy, velocity,
-ratio of specific heats, freestream density, freestream speed of sound, gas
-constant)``
-
-
-Computes a scalar variable that is the normalized temperature :math:`{T}_{n}`.
-This scalar variable is defined as:
-
-:math:`{T}_{n}=\frac{T}{{T}_{i}}`
-
-where:
-
-.. list-table::
- :widths: 30 70
-
- * - :math:`T`
- - temperature
- * - :math:`{T}_{i}`
- - freestream temperature
-
-
-.. list-table:: **Function arguments**
- :widths: 30 70
-
- * - density
- - scalar, constant, or constant per part
- variable, or constant number
- * - total energy
- - scalar variable
- * - velocity
- - vector variable
- * - ratio of specific heats
- - scalar, constant, or constant per part
- variable, or constant number
- * - freestream density
- - constant or constant per part variable or
- constant number
- * - freestream speed of sound
- - constant or constant per part variable or
- constant number
- * - gas constant
- - constant or constant per part variable or
- constant number
-
-
-
-.. _TemperLogNorm:
-
----------------
-TemperLogNorm()
----------------
-
-
-**Log of Normalized Temperature**
-
-``TemperLogNorm(any parts, density, total energy, velocity,
-ratio of specific heats, freestream density, freestream speed of sound, gas
-constant)``
-
-
-Computes a scalar variable that is the natural log of
-the normalized temperature :math:`\mathrm{ln}{T}_{n}`.
-This scalar variable is defined as:
-
-:math:`\mathrm{ln}{T}_{n}=\mathrm{ln}\left(T/{T}_{i}\right)`
-
-where:
-
-.. list-table::
- :widths: 30 70
-
- * - :math:`T`
- - temperature
- * - :math:`{T}_{i}`
- - freestream temperature
-
-
-.. list-table:: **Function arguments**
- :widths: 30 70
-
- * - density
- - scalar, constant, or constant per part
- variable, or constant number
- * - total energy
- - scalar variable
- * - velocity
- - vector variable
- * - ratio of specific heats
- - scalar, constant, or constant per part
- variable, or constant number
- * - freestream density
- - constant or constant per part variable or
- constant number
- * - freestream speed of sound
- - constant or constant per part variable or
- constant number
- * - gas constant
- - constant or constant per part variable or
- constant number
-
-
-
-
-.. _TemperStag:
-
-------------
-TemperStag()
-------------
-
-
-**Stagnation Temperature**
-
-``TemperStag(any parts, density, total energy, velocity,
-ratio of specific heats, gas constant)``
-
-
-Computes a scalar variable that is the stagnation
-temperature :math:`{T}_{o}`. This scalar variable is defined as:
-
-:math:`{T}_{o}=T\left(1+\left(\frac{\gamma -1}{2}\right){M}^{2}\right)`
-
-where:
-
-.. list-table::
- :widths: 30 70
-
- * - :math:`T`
- - temperature
- * - :math:`\gamma`
- - ratio of specific heats
- * - :math:`M`
- - mach number
-
-
-.. list-table:: **Function arguments**
- :widths: 30 70
-
- * - density
- - scalar, constant, or constant per part
- variable, or constant number
- * - total energy
- - scalar variable
- * - velocity
- - vector variable
- * - ratio of specific heats
- - scalar, constant, or constant per part
- variable, or constant number
- * - gas constant
- - constant or constant per part variable or
- constant number
-
-
-
-
-.. _TemperNormStag:
-
-----------------
-TemperNormStag()
-----------------
-
-
-**Normalized Stagnation Temperature**
-
-``TemperNormStag(any parts, density, total energy, velocity,
-ratio of specific heats, freestream density, freestream speed of sound,
-freestream velocity magnitude, gas constant)``
-
-
-Computes a scalar variable that is the normalized
-stagnation temperature :math:`{T}_{on}`. This function
-is defined as:
-
-:math:`{T}_{on}={T}_{o}/{T}_{oi}`
-
-where:
-
-.. list-table::
- :widths: 30 70
-
- * - :math:`{T}_{o}`
- - stagnation temperature
- * - :math:`{T}_{oi}`
- - freestream stagnation temperature
-
-
-
-.. list-table:: **Function arguments**
- :widths: 30 70
-
- * - density
- - scalar, constant, or constant per part
- variable, or constant number
- * - total energy
- - scalar variable
- * - velocity
- - vector variable
- * - ratio of specific heats
- - scalar, constant, or constant per part
- variable, or constant number
- * - freestream density
- - constant or constant per part variable or
- constant number
- * - freestream speed of sound
- - constant or constant per part variable or
- constant number
- * - freestream velocity magnitude
- - constant or constant per part variable or
- constant number
- * - gas constant
- - constant or constant per part variable or
- constant number
-
-
-
-
-
-.. _TempMean:
-
-----------
-TempMean()
-----------
-
-
-**Temporal Mean**
-
-``TempMean(any model parts, scalar,
-vector, or constant, timestep1, timestep2)``
-
-
-Computes a scalar, vector, or constant variable, depending on which
-type was selected, whose value is the mean value of the selected variable over
-the interval from timestep 1 to timestep 2. Therefore, the resultant variable is
-independent of time. The temporal mean is the discrete integral of the variable
-over time (using the *Trapezoidal Rule*) divided by the total
-time interval. Because any derived parts may vary in size over time, this
-function is only allowed on model parts. Model parts with changing connectivity
-are also not allowed.
-
-.. list-table:: **Function arguments**
- :widths: 30 70
-
- * - timestep1
- - constant number
- * - timestep2
- - constant number
-
-
-
-
-.. _TempMinMaxField:
-
------------------
-TempMinMaxField()
------------------
-
-
-**Temporal Minmax Field**
-
-``TempMinMaxField(any model parts, scalar or vector,
-timestep1, timestep2, 0 or 1, 0 = compute minimum, 1 = compute maximum)``
-
-
-Computes a scalar or vector variable, depending on which
-type was selected, whose value is the minimum or maximum at each location (node
-or element) of a scalar or vector variable over the interval from timestep1 to
-timestep2. Therefore, the resultant scalar or vector is independent of time. If
-the input variable is a vector, the maximum or minimum is the maximum or minimum
-of each component of the vector. Because any derived parts can vary in size over time,
-this function is only allowed on model parts. Model parts with changing
-connectivity are also not allowed.
-
-.. list-table:: **Function arguments**
- :widths: 30 70
-
- * - timestep1
- - constant number
- * - timestep2
- - constant number
-
-
-
-
-.. _TensorComponent:
-
------------------
-TensorComponent()
------------------
-
-
-**Tensor Component**
-
-``TensorComponent(any parts, tensor, tensor row(1-3), tensor
-col(1-3))``
-
-
-Creates a scalar variable that is the specified row and
-column of a tensor variable.
-
-S = Tij
-
-i = given row (1 to 3)
-
-j = given column (1 to 3)
-
-.. list-table:: **Function arguments**
- :widths: 30 70
-
- * - tensor row
- - constant number (1 to 3)
- * - tensor col
- - constant number (1 to 3)
-
-
-
-
-.. _TensorDeterminant:
-
--------------------
-TensorDeterminant()
--------------------
-
-
-**Tensor Determinant**
-
-``TensorDeterminant(any parts, Tensor or 3 Principals or 6
-Tensor Components)``
-
-
-Computes the determinant of a tensor variable, three
-principal scalar variables, or six tensor component scalar variables. This
-function requires either one or six entries beyond the parts, as indicated
-in the following examples.
-
-If computing from a tensor variable, a single tensor
-variable is needed.
-
-``TensorDeterminant(plist, Stress)``
-
-If computing from three principals, three scalar variables
-representing ``sigma_1``, ``sigma_2``, and ``sigma_3`` are needed. Additionally, you
-must enter a ``-1`` constant for the last three entries.
-
-``TensorDeterminant(plist, sigma_1, sigms_2, sigma_3, -1, -1, -1)``
-
-If computing from six tensor components, six scalar
-variables are needed. They must be the following variables in the
-order shown: ``t_11``, ``t_22``, ``t_33``, ``t_12``, ``t_13``,
-and ``t_23``.
-
-``TensorDeterminant(plist, t_11, t_22, t_33, t_12, t_13, t_23)``
-
-
-.. _TensorEigenvalue:
-
-------------------
-TensorEigenvalue()
-------------------
-
-
-**Tensor Eigenvalue**
-
-``TensorEigenvalue(any parts, tensor, which number(1-3))``
-
-
-Computes the eigenvalue of a tensor based on the number given (1-3).
-The first eigenvalue is always the largest, while the third eigenvalue
-is always the smallest.
-
-
-.. _TensorEigenvector:
-
--------------------
-TensorEigenvector()
--------------------
-
-
-**Tensor Eigenvector**
-
-``TensorEigenvector(any parts, tensor, which number(1-3))``
-
-
-Computes the eigenvector of a tensor based on the number given (1-3).
-The first eigenvalue is always the largest, while the third eigenvalue
-is always the smallest.
-
-
-.. _TensorMake:
-
-------------
-TensorMake()
-------------
-
-
-**Tensor Make**
-
-``TensorMake(any parts, T11, T22, T33, T12, T13, T23)``
-
-
-Creates a tensor from six scalars.
-
-
-.. _TensorMakeAsym:
-
-----------------
-TensorMakeAsym()
-----------------
-
-
-**Tensor Make Asymmetric**
-
-``TensorMakeAsym(any parts, T11,T12,T13, T21,T22,T23, T31,T32,T33)``
-
-
-Creates a tensor from nine scalars.
-
-
-.. _TensorTresca:
-
---------------
-TensorTresca()
---------------
-
-
-**Tensor Tresca**
-
-``TensorTresca(any parts, Tensor or 3 Principals or 6 Tensor Components)``
-
-
-Computes Tresca stress/strain from a tensor variable,
-three principal scalar variables, or six tensor component scalar variables. This
-function requires either one or six entries beyond the parts, as indicated
-in the following examples.
-
-If computing from a tensor variable, a single tensor
-variable is needed.
-
-``TensorTresca(plist, Stress)``
-
-If computing from three principals, three scalar variables
-representing ``sigma_1``, ``sigma_2``, and ``sigma_3`` are needed. Additionally, you
-must enter a ``-``1`` constant for the last three entries.
-
-``TensorTresca(plist, sigma_1, sigms_2, sigma_3, -1, -1, -1)``
-
-If computing from six tensor components, six scalar
-variables are needed. They must be the following variables in the
-order shown: ``t_11``, ``t_22``, ``t_33``, ``t_12``, ``t_13``,
-and ``t_23``.
-
-``TensorTresca(plist, t_11, t_22, t_33, t_12, t_13, t_23)``
-
-The basic equation follows. If needed, the
-principal stresses/strains are first computed from the tensor or its
-components.
-
-:math:`{\sigma }_{yp}=\left|{\sigma }_{1}-{\sigma }_{3}\right|`
-
-where:
-
-.. list-table::
- :widths: 30 70
-
- * - :math:`{\sigma }_{yp}`
- - yield stress
- * - :math:`{\sigma }_{1}`
- - greatest principal stress/strain
- * - :math:`{\sigma }_{3}`
- - least principal stress/strain
-
-
-
-
-.. _TensorVonMises:
-
-----------------
-TensorVonMises()
-----------------
-
-
-**Tensor Von Mises**
-
-``TensorVonMises(any parts, Tensor or 3 Principals or 6 Tensor Components)``
-
-
-Computes Von Mises stress/strain from a tensor variable,
-three principal scalar variables, or six tensor component scalar variables. This
-function requires either one or six entries beyond the parts, as indicated
-in the following examples.
-
-If computing from a tensor variable, a single tensor
-variable is needed.
-
-``TensorVonMises(plist, Stress)``
-
-If computing from three principals, three scalar variables
-representing ``sigma_1``, ``sigma_2``, and ``sigma_3`` are needed. Additionally, you
-must enter a ``-1`` constant for the last three entries.
-
-``TensorVonMises(plist, sigma_1, sigms_2, sigma_3, -1, -1, -1)``
-
-If computing from six tensor components, six scalar
-variables are needed. They must be the following variables in the
-order shown: ``t_11``, ``t_22``, ``t_33``, ``t_12``, ``t_13``,
-and ``t_23``.
-
-``TensorVonMises(plist, t_11, t_22, t_33, t_12, t_13, t_23)``
-
-The basic equation follows. If needed, the
-principal stresses/strains are first computed from the tensor or its
-components.
-
-:math:`{\sigma }_{yp}=\sqrt{\frac{1}{2}\left({\left({\sigma }_{1}-{\sigma }_{2}\right)}^{2}+{\left({\sigma }_{2}-{\sigma }_{3}\right)}^{2}+{\left({\sigma }_{3}-{\sigma }_{1}\right)}^{2}\right)}`
-
-where:
-
-.. list-table::
- :widths: 30 70
-
- * - :math:`{\sigma }_{yp}`
- - yield stress
- * - :math:`{\sigma }_{1}`
- - greatest principal stress/strain
- * - :math:`{\sigma }_{2}`
- - middle principal stress/strain
- * - :math:`{\sigma }_{3}`
- - least principal stress/strain
-
-
-.. _udmf_sum:
-
-----------
-udmf_sum()
-----------
-
-**udmf_sum**
-
-.. note::
- The :ref:`StatMoment() ` function has replaced the
- ``udmp_sum`` function. Use ``StatMoment(plist,scalar,0)`` instead.
-
-
-
-.. _VectorCylProjection:
-
----------------------
-VectorCylProjection()
----------------------
-
-
-**Vector Cyl Projection**
-
-``VectorCylProjection(any parts, vector, frame, axis)``
-
-
-Computes a new vector variable by projecting a vector
-onto a cylindrical coordinate system. A coordinate frame is used as the basis
-for the system and can be frame 0 (the center for the global coordinate system)
-or any other defined frame in any arbitrary orientation.
-
-The axial direction is defined to be the frame's Z axis. The radial direction
-is a vector from the Z axis to the position being computed. The Theta direction
-is then Cross(Z,R). The resulting new vector variable is in the direction of
-the chosen axis (Z, R, or Theta) with a magnitude computed by the dot product
-of the vector variable against the direction vector.
-
-.. list-table:: **Function arguments**
- :widths: 30 70
-
- * - vector
- - model vector variable
- * - frame
- - frame number (0-based) with frame 0 being the
- global reference.
- * - axis
- - radial (R), angular (Theta), ar Axial (Frame Z
- direction)
-
-
-
-.. _VectorRectProjection:
-
-----------------------
-VectorRectProjection()
-----------------------
-
-
-**Vector Rect Projection**
-
-``VectorRectProjection(any parts, vector, frame, axis)``
-
-
-Computes a new vector variable by projecting a vector
-onto a rectangular coordinate system. A coordinate frame is used for the new
-rectangular system and can be frame 0 (the center for the global coordinate
-system) or any other defined frame in any arbitrary orientation. The resulting new
-vector variable is in the direction of the chosen axis (X, Y, or Z) with a magnitude
-computed by the dot product of the vector variable against the direction vector.
-
-.. list-table:: **Function arguments**
- :widths: 30 70
-
- * - vector
- - model vector variable
- * - frame
- - frame number (0-based) with frame 0 being the
- global reference.
- * - axis
- - X, Y, or Z frame direction
-
-
-
-.. _Velo:
-
-------
-Velo()
-------
-
-
-**Velocity**
-
-``Velo(any parts, momentum, density)``
-
-
-Computes a vector variable whose value is the velocity ``V``.
-This vecctor variable is defined as:
-
-:math:`V=\frac{m}{\rho }`
-
-where:
-
-.. list-table::
- :widths: 30 70
-
- * - :math:`\rho`
- - density
- * - :math:`m`
- - momentum
-
-
-.. list-table:: **Function arguments**
- :widths: 30 70
-
- * - momentum
- - vector variable
- * - density
- - scalar, constant, or constant per part
- variable, or constant number
-
-
-
-.. _Vol:
-
------
-Vol()
------
-
-
-**Volume**
-
-``Vol(3D parts [,Compute_Per_part])``
-
-
-Computes a constant or constant per part variable whose
-value is the volume of 3D parts.
-
-.. note::
- This function uses the coordinates of the element to calculate the volume of
- each element. If you want to use displacement in the calculation of the
- volume, you must turn on computational (server-side) displacement, rather
- than visual only (client-side) displacement so that the displacement values
- are applied to the coordinates on the server prior to calculating each
- element size that is used to sum up the volume of the part.
-
-
-.. _Vort:
-
-------
-Vort()
-------
-
-
-**Vorticity**
-
-``Vort(any 2D or 3D parts, velocity)``
-
-
-Computes a vector variable that is the rotation of the
-flow in units of radians per second with components :math:`{\zeta }_{x},{\zeta }_{y},{\zeta }_{z}`.
-This vector variable is defined as:
-
-:math:`\begin{array}{ccc}{\zeta }_{x}=\frac{\partial w}{\partial x}-\frac{\partial v}{\partial z}& {\zeta }_{y}=\frac{\partial u}{\partial z}-\frac{\partial w}{\partial x}& {\zeta }_{z}=\frac{\partial v}{\partial x}-\frac{\partial u}{\partial y}\end{array}`
-
-where:
-
-.. list-table::
- :widths: 30 70
-
- * - u,v,w
- - velocity components in the X, Y, Z
- directions
-
-
-.. list-table:: **Function arguments**
- :widths: 30 70
-
- * - velocity
- - vector variable
-
-
-
-.. _VortGamma:
-
------------
-VortGamma()
------------
-
-
-**Vorticity Gamma**
-
-``VortGamma(2D part clips , velocity, gamma function number, k (1 or 2), proximity radius, proximity option)``
-
-
-Computes a dimensionless scalar variable on a 2D clip
-part whose value is the vorticity-gamma
-function, :math:`{\Gamma }_{k}\left(P\right)`, defined at each node
-(or element centroid for cell centered data), ``P``. This scalar variable
-is defined as follows:
-
-:math:`{\Gamma }_{k}\left(P\right)=\frac{1}{S}{\displaystyle \int \mathrm{sin}\left({\Theta }_{M}\right)dS}=\frac{1}{S}{\displaystyle \underset{\left(M\in S\right)}{\int }\left(\frac{\left(\stackrel{\rightharpoonup }{PM}\times {\stackrel{\rightharpoonup }{V}}_{M}\right)·\widehat{n}}{\Vert \stackrel{\rightharpoonup }{PM}\Vert ·\Vert {\stackrel{\rightharpoonup }{V}}_{M}\Vert }\right)dS}`
-
-where:
-
-.. list-table::
- :widths: 30 70
-
- * - :math:`{\Gamma }_{1}`
- - Gamma function number k=1 is a (non-Galilean invariant)
- vortex center approximation method "...a dimensionless scalar,
- with G1 bounded by 1. It can be shown that this bound is
- reached at the location of the vortex centre if the vortex is
- axis symmetrical. Therefore, this scalar function provides a way
- to quantify the streamline topology of the flow in the vicinity
- of P and the rotation sign of the vortex. ... Typically, near
- the vortex centre, G1 reaches values ranging from 0.9 to 1.0."
- [ref.2, pp. 1424-1425].
-
- * - :math:`{\Gamma }_{2}`
- - Gamma function number k=2 is a (Galilean invariant) vortex
- boundary approximation method resulting in a dimensionless
- scalar, "... a local function depending only on W and μ,
- where W is the rotation rate corresponding to the antisymmetrical
- part of the velocity gradient at P and μ is the eigenvalue of the
- symmetrical part of this tensor [ref.2, 1425]. (See the
- note following the function arguments.)
-
- * - k
- - Gamma function number 1 or 2 used to determine VM.
-
- * - P
- - Base node (or element centroid for per-element data) around
- which the proximity area (or zone of influence) is being
- considered.
-
- * - S
- - Proximity area (or zone of influence) surrounding P,
- determined by a proximity radius measured from the base P and
- the proximity option. The proximity option is used to determine
- which set of elements to include in S as follows.
-
- If the proximity option is 0, S includes all elements
- with any nodes within the proximity radius.
-
- If the proximity option is 1, S includes only elements with
- every node within the proximity radius. Both options also
- include all elements that contain P.
-
- * - M
- - Node (or element center) within S.
-
- * - PM
- - Vector from the base node P to M.
-
- * - V(P)
- - Velocity vector at P.
-
- * - V(M)
- - Velocity vector at each M.
-
- * - :math:`V_M`
-
- - If the gamma function number k = 1, VM = V(M). If the gamma function
- number k = 2, VM = V(M) - V(P).
-
-
- * - If k=2
- - VM = V(M) - V(P).
-
- * - n
- - Unit vector normal to the 2D plane parent clip
- part.
-
- * - :math:`θ_M`
- - Angle between VM and PM. Because -1 < sin(QM) < 1
- (and n is a unit vector), then
- -1 < :math:`{\Gamma }_{k}\left(P\right)` < 1.
-
-
-.. list-table:: **Function arguments**
- :widths: 30 70
-
- * - velocity
- - vector variable
- * - gamma function number
- - single integer (k=1 or k= 2) that determines
- which value of VM to use.
-
- A value of 1 is useful for finding vortex cores (centers).
-
- A value of 2 is useful for finding vortex boundaries.
-
- * - proximity radius
- - Float value greater than or equal to 0.0 that is used to
- determine the proximity area around each base node or element P over
- which the vorticity gamma is calculated on the 2D part clip.
-
- The larger the proximity radius, the more
- nodes (or elements) that are used to calculate G and the slower the
- calculation. A proximity radius less than or equal to 0.0
- always uses a proximity area of only elements that contain P and is
- the lower bound of this parameter, resulting in the smallest
- proximity area around P (and the fastest calculation). A radius of
- 0.0 is a good value for the first run.
-
- As the proximity radius approaches the parent plane size
- this calculation approaches using every node (or element) in
- the calculation for each node (or element) resulting in a n2
- operation whose solution may be measured in calendar time
- rather than wristwatch time.
-
- The radius should be large enough to sample
- sufficient elements for a meaningful average, but small enough so
- that the vortex result remains a local calculation reported at each
- element. Again, a radius of 0.0 is a good value for the first run.
- A radius with a small scaling of the element size is a good
- second run.
-
- * - proximity option
- - 0 to include all cells with any nodes in the proximity area.
-
- 1 to include only cells entirely located in the proximity area.
-
- Use this option along with the radius to control the
- number of nodes (or elements) used in the calculation for each node
- (or element) P.
-
- Consider using option 0 as the radius gets small
- relative to element size and using option 1 as the radius is enlarged.
- At a minimum, the proximity area always include elements that
- contain P.
-
-
-.. note::
-
- Recall that ω is the rotation rate for the antisymmetrical part of
- the velocity gradient and that μ is the eigenvalue of the symmetric part
- of the tensor. The local character of the flow may be classified for
- β2 in the following manner (based on figure 4 in [ref.2, 1425], which
- plots β2 as a function of the ratio of ω /μ):
-
- ω/μ < 1: flow locally dominated by strain, β2 < 2/π
-
- ω/μ = 1: pure shear, β2 = 2/π
-
- ω/μ > 1: flow locally dominated by rotation, β2 > 2/π
-
-
-**References**
-
-For more information, see these references:
-
-1. Jeong, J. and Hussain, F., "On the identification
- of a vortex," Journal of Fluid Mechanics, 1995, vol. 285,
- pp. 69-94.
-2. Laurent Graftieaux, Marc Michard, & Nathalie
- Grosjean "Combining PIV, POD and vortex identification
- algorithms for the study of unsteady turbulent swirling
- flows", Institute Of Physics Publishing Ltd in UK,
- Measurement Science & Technology, 12 (2001), pp. 1422-1429.
-3. PSA via Distene (personal communication).
-
+.. vale off
+
+.. _calculator_functions:
+
+====================
+Calculator functions
+====================
+
+EnSight supports a large number of calculator functions.
+An :class:`ENS_VAR` object can represent
+either fields read from disk or the use of a calculator function
+to compute a new variable from an input collection of parts, variables, and
+user-specified parameters.
+
+You can use the native API function :func:`pyensight.ensight_api.variables.evaluate`
+and the object API function :func:`pyensight.ens_globals.ENS_GLOBALS.create_variable`
+to create new EnSight variables that leverage these calculator functions.
+These function use a string representation of the function to create the
+new variable. Most calculator functions take a partlist (``plist``) as
+a parameter. In the native API, you use the ``plist`` string to refer to the currently
+selected parts. In the object API, you use the same ``plist`` text string, but you
+can specify the selection of parts to be used directly via the ``source`` keyword.
+
+This code provides some examples::
+
+ # Create a variable named 'newvar1' using the :ref:`CmplxTransResp `
+ # calculator function and the input variable 'c_scalar'. The variable is to be defined on
+ # all of the current parts.
+ session.ensight.part.select_all()
+ session.ensight.variables.evaluate("newvar1 = CmplxTransResp(plist,c_scalar,90.0)")
+ # Create a variable named 'newvar2' using the :ref:`EleSize `
+ # calculator function. The variable is to be defined on all of the current parts.
+ varobj = session.ensight.objs.core.create_variable("newvar2", "EleSize(plist)",
+ sources=session.ensight.objs.core.PARTS)
+
+
+.. admonition:: Per-part constants
+
+ Some calculator functions (such as :ref:`Area() `) return constant values. EnSight
+ supports constant values that are per-case and per-part. For example, if the :ref:`Area() `
+ function is computed as a per-case constant, the value is the sum of the area values computed
+ part by part. If the :ref:`Area() ` function is computed as a per-part constant, the individual
+ values for each part are stored on each part.
+
+ All constant values are computed as per-case by default. For per-part computation, you
+ must add an optional additional argument to the function. For example, ``Area(plist)``
+ results in a per-case constant by default. ``Area(plist,Compute_Per_case)`` is also computed
+ as per-case explicitly. ``Area(plist,Compute_Per_part)`` results in the variable being
+ computed as per-part.
+
+ Not all calculator functions support this. For those that do, the notion ``[,Compute_Per_part]``
+ appears in the documentation. For an example, see: :ref:`Area() `.
+
+
+.. _Area:
+
+------
+Area()
+------
+
+**Area**
+
+``Area(any part or parts [, Compute_Per_part])``
+
+Computes a constant or constant per part variable whose
+value is the area of the selected parts. If a part is composed of 3D elements,
+the area is of the border representation of the part. The area of 1D elements is
+zero.
+
+
+.. _BL_aGradOfVelMag:
+
+------------------
+BL_aGradOfVelMag()
+------------------
+
+**Boundary Layer: A Gradient of Velocity Magnitude**
+
+``BL_aGradOfVelMag(boundary part or parts, velocity)``
+
+Computes a vector variable that is the gradient of the
+magnitude of the specified velocity variable on the selected boundary part
+or parts. The vector variable is defined as:
+
+:math:`GRA{D}_{BP}\left|V\right|={\nabla }_{BP}\left|V\right|=\frac{\partial V}{\partial x}\widehat{i}+\frac{\partial V}{\partial y}\widehat{j}+\frac{\partial V}{\partial z}\widehat{k}`
+
+
+where:
+
+.. list-table::
+ :widths: 30 70
+
+ * - :math:`BP`
+ - on boundary part
+ * - :math:`V=V\left(x,y,z\right)`
+ - velocity vector
+ * - :math:`\left|V\right|`
+ - magnitude of velocity vector = :math:`\sqrt{V·V}`
+ * - x, y, z
+ - coordinate directions
+ * - i, j, k
+ - unit vectors in coordinate directions
+
+
+.. note::
+ For each boundary part, this function finds its corresponding field part
+ (``pfield``), computes the gradient of the velocity
+ magnitude on the field part (``Grad(pfield,velocity``), and
+ then maps these computed values onto the boundary part.
+
+ Node or element IDs are used if they exist. Otherwise, the coordinate
+ values between the field part and boundary part are mapped and resolved via
+ a floating-point hashing scheme.
+
+ This velocity-magnitude gradient variable can be used as an argument for
+ the following boundary-layer functions that require this variable.
+
+ Boundary layer (``BL_*``) functions are not supported for
+ Server of Server (SoS) decomposition.
+
+
+.. list-table:: **Function arguments**
+ :widths: 30 70
+
+ * - *Boundary part*
+ - 2D part
+ * - *Velocity*
+ - vector variable
+
+
+.. _BL_CfEdge:
+
+-----------
+BL_CfEdge()
+-----------
+
+
+**Boundary Layer: Edge Skin-Friction Coefficient**
+
+``BL_CfEdge(boundary part or parts, velocity, density, viscosity, ymax, flow comp(0,1,or2), grad)``
+
+
+Computes a scalar variable that is the edge skin-friction
+coefficient :math:`{C}_{f\left(e\right)}` (that is, using the density :math:`{\rho }_{e}` and velocity :math:`{U}_{e}` values at the edge of the boundary layer, not
+the free-stream density :math:`{\rho }_{\infty }` and velocity :math:`{U}_{\infty }` values).
+This scalar variable is defined as:
+
+Component: 0 = Total tangential-flow (parallel) to wall:
+
+:math:`{C}_{f\left(e\right)}=2{\tau }_{w}/\left({\rho }_{e}{U}_{e}^{2}\right)`
+
+Component: 1 = Stream-wise (flow) component tangent (parallel) to wall:
+
+:math:`{C}_{fs\left(e\right)}=2{\tau }_{ws}/\left({\rho }_{e}{U}_{e}^{2}\right)`
+
+Component: 2 = Cross-flow component tangent (parallel) to wall:
+
+:math:`{C}_{fc\left(e\right)}=2{\tau }_{wc}/\left({\rho }_{e}{U}_{e}^{2}\right)`
+
+where:
+
+.. list-table::
+ :widths: 30 70
+
+ * - :math:`{\tau }_{w}`
+ - fluid shear stress magnitude at the boundary :math:`=\mu {\left(\partial u/\partial n\right)}_{n=0}=\sqrt{\left({\tau }_{ws}^{2}+{\tau }_{wc}^{2}\right)}`
+ * - :math:`{\tau }_{ws}=\mu {\left(\partial {u}_{s}/\partial n\right)}_{n=0}`
+ - stream-wise component of :math:`{\tau }_{w}`
+ * - :math:`{\tau }_{wc}=\mu {\left(\partial {u}_{c}/\partial n\right)}_{n=0}`
+ - cross-flow component of :math:`{\tau }_{w}`
+ * - :math:`\mu`
+ - dynamic viscosity of the fluid at the wall
+ * - :math:`{\left(\partial u/\partial n\right)}_{n=0}`
+ - magnitude of the velocity-magnitude gradient in the normal
+ direction at the wall
+ * - :math:`{\left(\partial {u}_{s}/\partial n\right)}_{n=0}`
+ - stream-wise component of the velocity-magnitude gradient in
+ the normal direction at the wall
+ * - :math:`{\left(\partial {u}_{c}/\partial n\right)}_{n=0}`
+ - cross-flow component of the velocity-magnitude gradient in
+ the normal direction at the wall
+ * - :math:`{\rho }_{e}`
+ - density at the edge of the boundary layer
+ * - :math:`{U}_{e}`
+ - velocity at the edge of the boundary layer
+
+
+.. list-table:: **Function arguments**
+ :widths: 30 70
+
+ * - boundary part
+ - 2D part
+ * - velocity
+ - vector variable
+ * - density
+ - scalar variable (compressible flow), constant number (incompressible flow)
+ * - viscosity
+ - scalar variable, constant variable, or constant number
+ * - ymax
+ - constant number (> 0 = Baldwin-Lomax-Spalart algorithm, 0 = convergence algorithm)
+
+ See the algorithm note under :ref:`Boundary Layer Thickness `.
+
+ * - flow comp
+ - constant number (0 = tangent flow parallel to surface, 1 = stream-wise component
+ tangent (parallel) to wall, 2 = cross-flow component tangent (parallel) to wall
+ * - grad
+ - -1 = flags the computing of the velocity-magnitude gradient via three-point interpolation
+
+ vector variable = Grad(velocity magnitude)
+
+
+This scalar variable provides a measure of the skin-friction coefficient in the
+tangent (parallel-to-surface) direction and in its tangent's respective
+stream-wise and cross-flow directions, respective to the decomposed velocity
+parallel to the surface at the edge of the boundary layer.
+
+This is a non-dimensional measure of the fluid shear
+stress at the surface based on the local density and velocity at the edge of the
+boundary layer. The following figure illustrates the derivations of the computed
+*edge*-related velocity values: :math:`{U}_{e}`, :math:`{u}_{s}`, :math:`{u}_{c}`,
+and :math:`{c}_{}`.
+
+.. image:: /_static/UM-C7-12.png
+
+.. note::
+ Boundary layer (``BL_*``) functions are not supported for
+ Server of Server (SoS) decomposition.
+
+
+.. _BL_CfWall:
+
+-----------
+BL_CfWall()
+-----------
+
+
+**Boundary Layer: Wall Skin-Friction Coefficient**
+
+``BL_CfWall(boundary parts, velocity, viscosity, free density, free velocity, grad)``
+
+
+Computes a scalar variable that is the skin-friction
+coefficient :math:`{C}_{f\left(\infty \right)}`. This scalar variable
+is defined as:
+
+:math:`{C}_{f}{}_{\left(\infty \right)}=\frac{{\tau }_{w}}{0.5{\rho }_{\infty }{\left({U}_{\infty }\right)}^{2}}`
+
+where:
+
+.. list-table::
+ :widths: 30 70
+
+ * - :math:`{\tau }_{w}={\mu }_{w}{\left(\frac{\partial u}{\partial n}\right)}_{n=0}`
+ - fluid shear stress at the wall
+ * - :math:`{\mu }_{w}`
+ - dynamic viscosity of the fluid at the wall
+
+ (may be spatially and/or temporarily varying quantity, usually a constant)
+
+ * - :math:`n`
+ - distance profiled normal to the wall
+ * - :math:`{\rho }_{\infty }`
+ - freestream density
+ * - :math:`{U}_{\infty }`
+ - freestream velocity magnitude
+ * - :math:`{\left(\frac{\partial u}{\partial n}\right)}_{n=0}`
+ - tangent (parallel to surface) component of
+ the velocity-magnitude gradient in the normal direction under the
+ "where" list.
+
+
+This is a non-dimensional measure of the fluid shear
+stress at the surface. An important aspect of the skin friction coefficient
+is that :math:`{C}_{f\left(\infty \right)}=0`, which indicates boundary layer separation.
+
+.. list-table:: **Function arguments**
+ :widths: 30 70
+
+ * - boundary part
+ - 2D part
+ * - velocity
+ - vector variable
+ * - viscosity
+ - scalar variable, constant variable, or constant number
+ * - free density
+ - constant number
+ * - free velocity
+ - constant number
+ * - grad
+ - -1 flags the computing of the velocity-magnitude gradient via three-point interpolation
+
+ vector variable = Grad(velocity magnitude)
+
+
+.. note::
+ Boundary layer (``BL_*``) functions are not supported for
+ Server of Server (SoS) decomposition.
+
+
+.. _BL_CfWallCmp:
+
+--------------
+BL_CfWallCmp()
+--------------
+
+**Boundary Layer: Wall Skin-Friction Coefficient Components**
+
+``BL_CfWallCmp(boundary parts, velocity, viscosity,
+free-stream density, free-stream velocity-mag., ymax, flow comp(1or2),
+grad)``
+
+
+Computes a scalar variable that is a component of the
+skin friction coefficient :math:`{C}_{f}` tangent (or parallel) to the wall, either in the
+stream-wise :math:`{C}_{fs(·)}` or in the cross-flow :math:`{C}_{fc(·)}` direction. This
+scalar variable is defined as:
+
+Component 1 = Steam-wise (flow) component tangent (parallel) to wall:
+
+:math:`{C}_{fs\left(\infty \right)}=2{\tau }_{ws}/\left({\rho }_{\infty }{U}_{\infty }^{2}\right)`
+
+Component 2 = Cross-flow component tangent (parallel) to wall:
+
+:math:`{C}_{fc\left(\infty \right)}=2{\tau }_{wc}/\left({\rho }_{\infty }{U}_{\infty }^{2}\right)`
+
+where:
+
+.. list-table::
+ :widths: 30 70
+
+ * - :math:`{\tau }_{ws}=\mu {\left(\partial {u}_{s}/\partial n\right)}_{n=0}`
+ - stream-wise component of :math:`{\tau }_{w}`
+ * - :math:`{\tau }_{wc}=\mu {\left(\partial {u}_{c}/\partial n\right)}_{n=0}`
+ - cross-flow component of :math:`{\tau }_{w}`
+ * - :math:`{\tau }_{w}`
+ - fluid shear stress magnitude at the wall :math:`=\mu {\left(\partial u/\partial n\right)}_{n=0}=\sqrt{\left({\tau }_{ws}^{2}+{\tau }_{wc}^{2}\right)}`
+ * - :math:`\mu`
+ - dynamic viscosity of the fluid at the wall
+ * - :math:`{\left(\partial {u}_{s}/\partial n\right)}_{n=0}`
+ - stream-wise component of the velocity-magnitude gradient in the normal direction at the wall
+ * - :math:`{\left(\partial {u}_{c}/\partial n\right)}_{n=0}`
+ - cross-flow component of the velocity-magnitude gradient in the normal direction at the wall
+ * - :math:`{\rho }_{\infty }`
+ - density at the edge of the boundary layer
+ * - :math:`{U}_{\infty }`
+ - velocity at the edge of the boundary layer
+
+
+.. list-table:: **Function arguments**
+ :widths: 30 70
+
+ * - boundary part
+ - 2D part
+ * - velocity
+ - vector variable
+ * - viscosity
+ - scalar variable, constant variable, or constant number
+ * - density
+ - scalar variable (compressible flow) or constant number (incompressible flow)
+ * - velocity mag
+ - constant variable or constant number
+ * - ymax
+ - constant number (> 0 = Baldwin-Lomax-Spalart algorithm, 0 = convergence algorithm)
+
+ See the algorithm note under :ref:`Boundary Layer Thickness `.
+
+ * - flow comp
+ - constant number (1 = stream-wise component tangent (parallel) to wall, 2 = cross-flow
+ component tangent (parallel) to wall
+ * - grad
+ - -1 flags the computing of the
+ velocity-magnitude gradient via three-point interpolation
+
+ vector variable = Grad(velocity magnitude)
+
+
+.. note::
+ Boundary layer (``BL_*``) functions are not supported for
+ Server of Server (SoS) decomposition.
+
+
+.. _BL_CfWallTau:
+
+--------------
+BL_CfWallTau()
+--------------
+
+**Boundary Layer: Wall Fluid Shear-Stress**
+
+``BL_CfWallTau(boundary parts, velocity, viscosity, ymax, flow comp(0,1,or 2), grad)``
+
+
+Computes a scalar variable that is the fluid
+shear-stress at the wall :math:`{\tau }_{w}` or in its stream-wise :math:`{\tau }_{ws}` or cross-flow :math:`{\tau }_{cs}`
+component direction. This scalar variable is defined as:
+
+Component 0 = Total fluid shear-stress magnitude at the wall:
+
+:math:`{\tau }_{w}=\mu {\left(\frac{\partial u}{\partial n}\right)}_{n=0}=\sqrt{\left({\tau }_{ws}^{2}+{\tau }_{wc}^{2}\right)}`
+
+Component 1 = Steam-wise component of the fluid shear-stress at the wall:
+
+:math:`{\tau }_{ws}=\mu {\left(\frac{\partial {u}_{s}}{\partial n}\right)}_{n=0}`
+
+Component 2 = Cross-flow component of the fluid shear-stress at the wall:
+
+:math:`{\tau }_{wc}=\mu {\left(\frac{\partial {u}_{c}}{\partial n}\right)}_{n=0}`
+
+where:
+
+.. list-table::
+ :widths: 30 70
+
+ * - :math:`\mu`
+ - dynamic viscosity of the fluid at the wall
+
+ * - :math:`{\left(\frac{\partial u}{\partial n}\right)}_{n=0}`
+
+ - magnitude of the velocity-magnitude gradient in the normal direction at the wall
+
+ * - :math:`{\left(\frac{\partial {u}_{s}}{\partial n}\right)}_{n=0}`
+ - stream-wise component of the velocity-magnitude gradient in
+ the normal direction at the wall
+
+ * - :math:`{\left(\frac{\partial {u}_{c}}{\partial n}\right)}_{n=0}`
+ - cross-flow component of the velocity-magnitude gradient in
+ the normal direction at the wall
+
+
+.. list-table:: **Function arguments**
+ :widths: 30 70
+
+ * - boundary part
+ - 2D part
+ * - velocity
+ - vector variable
+ * - viscosity
+ - scalar variable, constant variable, or constant number
+ * - ymax
+ - constant number (> 0 = Baldwin-Lomax-Spalart algorithm, 0 = convergence algorithm)
+
+ See the algorithm note under :ref:`Boundary Layer Thickness `.
+
+ * - flow comp
+ - constant number (0 = RMS of the stream-wise and cross-flow components,
+ 1 = stream-wise component at the wall, 2 = cross-flow component at the wall)
+ * - grad
+ - -1 flags the computing of the velocity-magnitude gradient via three-point interpolation
+
+ vector variable = Grad(velocity magnitude)
+
+
+.. note::
+ Boundary layer (``BL_*``) functions are not supported for
+ Server of Server (SoS) decomposition.
+
+
+.. _BL_DispThick:
+
+--------------
+BL_DispThick()
+--------------
+
+**Boundary Layer: Displacement Thickness**
+
+``BL_DispThick(boundary parts, velocity, density, ymax, flow comp(0,1,or 2), grad)``
+
+
+Computes a scalar variable that is the boundary layer
+displacement thickness :math:`{\delta }^{*}` , :math:`{\delta }_{s}^{*}` , or :math:`{\delta }_{c}^{*}` defined as:
+
+Component: 0 = Total tangential-flow parallel to the
+wall
+
+:math:`{\delta }_{tot}^{*}={\displaystyle {\int }_{0}^{\delta }\left(1-\frac{\rho u}{{\rho }_{e}{U}_{e}}\right)}dn`
+
+Component: 1 = Stream-wise flow component tangent (parallel)
+to the wall
+
+:math:`{\delta }_{s}^{*}={\displaystyle {\int }_{0}^{\delta }\left(1-\frac{\rho {u}_{s}}{{\rho }_{e}{U}_{e}}\right)}dn`
+
+Component: 2 = Cross-flow component tangent (parallel) to the
+wall
+
+:math:`{\delta }_{c}^{*}={\displaystyle {\int }_{0}^{\delta }\left(1-\frac{\rho {u}_{c}}{{\rho }_{e}{U}_{e}}\right)}dn`
+
+
+.. list-table::
+ :widths: 30 70
+
+ * - :math:`n`
+ - distance profiled normal to the wall
+ * - :math:`\delta`
+ - boundary-layer thickness (distance to edge of boundary layer)
+ * - :math:`\rho`
+ - density at given profile location
+ * - :math:`{\rho }_{e}`
+ - density at the edge of the boundary layer
+ * - :math:`u`
+ - magnitude of the velocity component parallel
+ to the wall at a given profile location in the boundary layer
+ * - :math:`{u}_{s}`
+ - stream-wise component of the velocity magnitude parallel to the
+ wall at a given profile location in the boundary layer
+ * - :math:`{u}_{c}`
+ - cross-flow component of the velocity magnitude parallel to the
+ wall at a given profile location in the boundary layer
+ * - :math:`{U}_{e}`
+ - u at the edge of the boundary layer
+ * - :math:`{y}_{max}`
+ - distance from wall to freestream
+ * - comp
+ - flow direction option
+ * - grad
+ - flag for gradient of velocity magnitude
+
+
+This scalar variable provides a measure for the effect of the boundary layer
+on the **outside** flow. The boundary layer causes a
+displacement of the streamlines around the body.
+
+.. list-table:: **Function arguments**
+ :widths: 30 70
+
+ * - boundary part
+ - 2D part
+ * - velocity
+ - vector variable
+ * - density
+ - scalar variable (compressible flow), constant number (incompressible flow)
+ * - :math:`{y}_{max}`
+ - constant number (> 0 = Baldwin-Lomax-Spalart algorithm, 0 = convergence algorithm)
+
+ See the algorithm note under :ref:`Boundary Layer Thickness `.
+
+ * - flow comp
+ - constant number (0 = total tangential flow direction parallel to wall,
+ 1 = stream-wise flow component direction parallel to wall, 2 = cross-flow
+ component direction parallel to wall
+
+ * - grad
+ - -1 flags the computing of the velocity-magnitude
+ gradient via four-point interpolation
+
+ vector variable = Grad(velocity magnitude)
+
+
+.. note::
+ Boundary layer (``BL_*``) functions are not supported for
+ Server of Server (SoS) decomposition.
+
+.. _BL_DistToValue:
+
+----------------
+BL_DistToValue()
+----------------
+
+**Boundary Layer: Distance to Value from Wall**
+
+``BL_DistToValue(boundary parts, scalar, scalar value)``
+
+
+Computes a scalar variable that is the distance
+:math:`d` from the wall to the specified value. This scalar variable is
+defined as:
+
+:math:`d={n|}_{f\left(\alpha \right)-c}`
+
+
+.. list-table::
+ :widths: 30 70
+
+ * - :math:`n`
+ - distance profile d normal to boundary surface
+ * - :math:`f\left(\alpha \right)`
+ - scalar field (variable)
+ * - :math:`\alpha`
+ - scalar field values
+ * - :math:`c`
+ - scalar value at which to assign d
+
+
+.. list-table:: **Function arguments**
+ :widths: 30 70
+
+ * - boundary part
+ - 0D, 1D, or 2D part
+ * - scalar
+ - scalar variable
+ * - scalar value
+ - constant number or constant variable
+
+
+.. note::
+ Boundary layer (``BL_*``) functions are not supported for
+ Server of Server (SoS) decomposition.
+
+
+.. _BL_MomeThick:
+
+--------------
+BL_MomeThick()
+--------------
+
+
+**Boundary Layer: Momentum Thickness**
+
+``BL_MomeThick(boundary parts, velocity, density, ymax, flow compi(0,1,or2), flow compj(0,1,or2), grad)``
+
+
+Computes a scalar variable that is the boundary-layer
+momentum thickness :math:`{\theta }_{tot}` , :math:`{\theta }_{ss}` , :math:`{\theta }_{sc}` , :math:`{\theta }_{cs}` , or :math:`{\theta }_{cc}`.
+This scalar variable is defined as:
+
+Components: (0,0) = Total tangential-flow parallel to the
+wall
+
+:math:`{\theta }_{tot}=\frac{1}{{\rho }_{e}{U}_{e}^{2}}{\displaystyle {\int }_{0}^{\delta }\left({U}_{e}-u\right)}\rho udn`
+
+Components: (1,1) = stream-wise, stream-wise component
+
+:math:`{\theta }_{ss}=\frac{1}{{\rho }_{e}{U}_{e}^{2}}{\displaystyle {\int }_{0}^{\delta }\left({U}_{e}-{u}_{s}\right)}\rho {u}_{s}dn`
+
+Components: (1,2) = Stream-wise, cross-flow component
+
+:math:`{\theta }_{sc}=\frac{1}{{\rho }_{e}{U}_{e}^{2}}{\displaystyle {\int }_{0}^{\delta }\left({U}_{e}-{u}_{s}\right)}\rho {u}_{c}dn`
+
+Components: (2,1) = cross-flow, stream-wise component
+
+:math:`{\theta }_{cs}=\frac{-1}{{\rho }_{e}{U}_{e}^{2}}{\displaystyle {\int }_{0}^{\delta }\rho {u}_{c}{u}_{s}}dn`
+
+Components: (2,2) = cross-flow, cross-flow component
+
+:math:`{\theta }_{cc}=\frac{-1}{{\rho }_{e}{U}_{e}^{2}}{\displaystyle {\int }_{0}^{\delta }\rho {u}_{{}_{c}}^{2}}dn`
+
+where:
+
+.. list-table::
+ :widths: 30 70
+
+ * - :math:`n`
+ - distance profiled normal to the wall
+ * - :math:`\delta`
+ - boundary-layer thickness (or distance to edge
+ of boundary layer)
+ * - :math:`\rho`
+ - density at given profile location
+ * - :math:`{\rho }_{e}`
+ - density at the edge of the boundary layer
+ * - :math:`u`
+ - magnitude of the velocity component parallel
+ to the wall at a given profile location in the boundary layer
+ * - :math:`{u}_{s}`
+ - stream-wise component of the velocity magnitude parallel to
+ the wall at a given profile location in the boundary layer
+ * - :math:`{u}_{c}`
+ - cross-flow component of the velocity magnitude parallel to
+ the wall at a given profile location in the boundary layer
+ * - :math:`{U}_{e}`
+ - u at the edge of the boundary layer
+ * - :math:`{y}_{max}`
+ - distance from wall to freestream
+ * - :math:`com{p}_{i}`
+ - first flow direction option
+ * - :math:`com{p}_{j}`
+ - second flow direction option
+ * - grad
+ - flag for gradient of velocity magnitude
+
+
+This scalar variable relates to the momentum loss in the boundary layer.
+
+.. list-table:: **Function arguments**
+ :widths: 30 70
+
+ * - boundary part
+ - 2D part
+ * - velocity
+ - vector variable
+ * - density
+ - scalar variable (compressible flow), constant number (incompressible flow)
+ * - ymax
+ - constant number (> 0 = Baldwin-Lomax-Spalart algorithm, 0 = convergence algorithm)
+
+ See the algorithm note under :ref:`Boundary Layer Thickness `.
+
+ * - compi
+ - constant number (0 = total tangential flow direction parallel to wall,
+ 1 = stream-wise flow component direction parallel to wall, 2 = cross-flow
+ component direction parallel to wall)
+ * - compj
+ - constant number (0 = total tangential flow direction parallel to wall,
+ 1 = stream-wise flow component direction parallel to wall, 2 = cross-flow
+ component direction parallel to wall
+ * - grad
+ - -1 flags the computing of the
+ velocity-magnitude gradient via four-point interpolation
+
+ vector variable = Grad(velocity magnitude)
+
+ See :ref:`BL_aGradfVelMag `.
+
+
+.. note::
+ Boundary layer (``BL_*``) functions are not supported for
+ Server of Server (SoS) decomposition.
+
+
+.. _BL_Scalar:
+
+-----------
+BL_Scalar()
+-----------
+
+
+**Boundary Layer: Scalar**
+
+``BL_Scalar(boundary parts, velocity, scalar, ymax, grad)``
+
+
+Computes a scalar variable that is the scalar value of
+the corresponding scalar field at the edge of the boundary layer. The function
+extracts the scalar value while computing the boundary-layer
+thickness. (See :ref:`Boundary Layer: Thickness`.)
+
+.. list-table:: **Function arguments**
+ :widths: 30 70
+
+ * - boundary part
+ - 2D part
+ * - velocity
+ - vector variable
+ * - scalar
+ - scalar variable
+ * - ymax
+ - constant number (> 0 = Baldwin-Lomax-Spalart algorithm, 0 = convergence algorithm)
+
+ See the algorithm note under :ref:`Boundary Layer Thickness `.
+
+ * - grad
+ - -1 flags the computing of the
+ velocity-magnitude gradient via four-point interpolation
+
+ vector variable = Grad(velocity magnitude)
+
+
+.. note::
+ Boundary layer (``BL_*``) functions are not supported for
+ Server of Server (SoS) decomposition.
+
+
+.. _BL_RecoveryThick:
+
+------------------
+BL_RecoveryThick()
+------------------
+
+
+**Boundary Layer: Recovery Thickness**
+
+``BL_RecoveryThick(boundary parts, velocity, total pressure, ymax, grad)``
+
+
+Computes a scalar variable that is the boundary-layer
+recovery thickness :math:`{\delta }_{rec}`. This scalar variable is defined as:
+
+:math:`{\delta }_{rec}={\displaystyle {\int }_{0}^{\delta }\left(1-\frac{{p}_{t}}{{p}_{te}}\right)}dn`
+
+
+.. list-table::
+ :widths: 30 70
+
+ * - :math:`n`
+ - distance profiled normal to the wall
+ * - :math:`\delta`
+ - boundary-layer thickness (distance to edge of boundary layer)
+
+ * - :math:`{p}_{t}`
+ - total pressure at given profile location
+
+ * - :math:`{p}_{te}`
+ - pt at the edge of the boundary layer
+ * - ymax
+ - distance from wall to freestream
+ * - grad
+ - flag for gradient of velocity magnitude option
+
+
+This quantity does not appear in any physical
+conservation equations, but is sometimes used in the evaluation of inlet flows.
+
+.. list-table:: **Function arguments**
+ :widths: 30 70
+
+ * - boundary part
+ - 2D part
+ * - velocity
+ - vector variable
+ * - total pressure
+ - scalar variable
+ * - ymax
+ - constant number (> 0 = Baldwin-Lomax-Spalart algorithm, 0 = convergence algorithm)
+
+ See the algorithm note under :ref:`Boundary Layer Thickness `.
+
+ * - grad
+ - -1 flags the computing of the
+ velocity-magnitude gradient via four-point interpolation.
+
+ vector variable = Grad(velocity magnitude)
+
+ See :ref:`BL_aGradfVelMag `.
+
+
+.. note::
+ Boundary layer (``BL_*``) functions are not supported for
+ Server of Server (SoS) decomposition.
+
+
+.. _BL_Shape:
+
+----------
+BL_Shape()
+----------
+
+
+**Boundary Layer: Shape Parameter**
+
+``BL_Shape()`` is not explicitly listed as a general function, but it can
+be computed as a scalar variable via the calculator by
+dividing a displacement thickness by a momentum thickness:
+
+:math:`H=\frac{{\delta }^{*}}{\theta }`
+
+
+.. list-table::
+ :widths: 30 70
+
+ * - :math:`{\delta }^{*}`
+ - boundary-layer displacement thickness
+ * - :math:`\theta`
+ - boundary-layer momentum thickness
+
+
+This scalar variable is used to characterize boundary-layer flows, especially to
+indicate potential for separation. This variable increases as a
+separation point is approached, and it varies rapidly near a separation
+point.
+
+.. note::
+ Separation has not been observed for H < 1.8, but it definitely
+ has been observed for H = 2.6. Thus, separation is considered
+ in some analytical methods to occur in turbulent boundary layers for H = 2.0.
+
+ In a Blasius Laminar layer (that is flat plate boundary
+ layer growth with zero pressure gradient), H = 2.605. In a turbulent boundary layer,
+ H ~= 1.4 to 1.5, and with extreme variations, H ~= 1.2 to 2.5.
+
+
+
+.. _BL_Thick:
+
+----------
+BL_Thick()
+----------
+
+
+**Boundary Layer: Thickness**
+
+``BL_Thick(boundary parts, velocity, ymax, grad)``
+
+
+Computes a scalar variable that is the boundary-layer
+thickness :math:`\delta`. This scalar variable is defined as:
+
+:math:`\delta ={n|}_{u/U=0.995}`
+
+The distance normal from the surface to where :math:`u/U=0.995`.
+
+.. list-table::
+ :widths: 30 70
+
+ * - :math:`u`
+ - magnitude of the velocity component parallel
+ to the wall at a given location in the boundary layer
+ * - :math:`U`
+ - magnitude of the velocity just outside the boundary layer
+
+
+.. list-table:: **Function arguments**
+ :widths: 30 70
+
+ * - boundary part
+ - 2D part
+ * - velocity
+ - vector variable
+ * - ymax
+ - constant number (> 0 = Baldwin-Lomax-Spalart algorithm, 0 = convergence algorithm)
+
+ See the algorithm note that follows.
+
+ * - grad
+ - -1 = flags the computing of the
+ velocity-magnitude gradient via three-point interpolation
+
+ vector variable = Grad(velocity magnitude) See :ref:`BL_aGradfVelMag `.
+
+
+.. note::
+ Boundary layer (``BL_*``) functions are not supported for
+ Server of Server (SoS) decomposition.
+
+
+.. admonition:: Algorithm: Boundary Layer Thickness
+
+ The ``ymax`` argument allows the edge of the boundary layer to be approximated by two
+ different algorithms: the Baldwin-Lomax-Spalart algorithm and the convergence algorithm.
+ Both algorithms profile velocity data normal to the boundary surface (wall).
+ Specifying ``ymax > 0`` leverages results from both the Baldwin-Lomax and vorticity
+ functions over the entire profile to produce a fading function that approximates the edge
+ of the boundary layer, whereas specifying ``ymax = 0`` uses velocity and
+ velocity gradient differences to converge to the edge of the boundary
+ layer.
+
+**References**
+
+For more information, see these references:
+
+1. P.M. Gerhart, R.J. Gross, & J.I. Hochstein, Fundamentals
+ of Fluid Mechanics, second Ed.,(Addison-Wesley: New York, 1992)
+2. P. Spalart, A Reasonable Method to Compute Boundary-Layer
+ Parameters from Navier-Stokes Results, (Unpublished: Boeing, 1992)
+3. H. Schlichting & K. Gersten, Boundary Layer Theory, eighth
+ Ed., (Springer-Verlag: Berlin, 2003)
+
+
+
+.. _BL_VelocityAtEdge:
+
+-------------------
+BL_VelocityAtEdge()
+-------------------
+
+
+**Boundary Layer: Velocity at Edge**
+
+``BL_VelocityAtEdge(boundary parts, velocity, ymax,comp(0,1,2),grad)``
+
+Extracts a vector variable that is a velocity vector
+:math:`{V}_{e}`, :math:`{V}_{p}`, or :math:`{V}_{n}`. This vector variable is defined as:
+
+.. list-table::
+ :widths: 30 70
+
+ * - :math:`{V}_{e}`
+ - :math:`{V}_{e}\left(x,y,z\right)` = velocity vector at the edge of the boundary
+ layer :math:`\delta`
+
+ * - :math:`{V}_{n}`
+
+ - :math:`Dot\left({V}_{e},N\right)` = decomposed velocity vector normal to
+ the wall at the edge of the boundary layer :math:`\delta`
+
+ * - :math:`{V}_{p}`
+ - :math:`{V}_{e}\left({V}_{e}-{V}_{n}\right)` = decomposed velocity
+ vector parallel to the wall at the edge of the boundary layer :math:`\delta`
+
+
+This vector variable computes a scalar variable that is the boundary-layer thickness
+:math:`\delta`. It is defined as:
+
+.. list-table::
+ :widths: 30 70
+
+ * - :math:`{V}_{n}`
+
+ - :math:`Dot\left({V}_{e},N\right)` = decomposed velocity vector normal
+ to the wall at the edge of the boundary layer :math:`\delta`
+
+ * - :math:`{V}_{p}`
+
+ - :math:`{V}_{e}\left({V}_{e}-{V}_{n}\right)` = decomposed velocity
+ vector parallel to the wall at the edge of the boundary layer :math:`\delta`
+
+
+This scalar variable computes another scalar variable that is the boundary-layer
+thickness :math:`\delta`. It is defined as:
+
+.. list-table:: **Function arguments**
+ :widths: 30 70
+
+ * - boundary part
+ - 2D part
+ * - velocity
+ - vector variable
+ * - density
+ - scalar variable (compressible flow), constant number (incompressible flow)
+ * - ymax
+ - constant number (> 0 = Baldwin-Lomax-Spalart algorithm, 0 = convergence algorithm)
+
+ See the algorithm note under :ref:`Boundary Layer Thickness `.
+
+ * - comp
+ - constant number (0 = velocity vector at edge of boundary layer, 1 = decomposed
+ velocity vector parallel to wall tangent to surface, 2 = decomposed velocity
+ vector normal to wall)
+ * - grad
+ - -1 flags the computing of the
+ velocity-magnitude gradient via four-point interpolation
+
+ vector variable = Grad(velocity magnitude) See :ref:`BL_aGradfVelMag `.
+
+
+.. note::
+ Boundary layer (``BL_*``) functions are not supported for
+ Server of Server (SoS) decomposition.
+
+
+.. _BL_Y1Plus:
+
+-----------
+BL_Y1Plus()
+-----------
+
+
+**Boundary Layer: off Wall**
+
+``BL_Y1Plus(boundary parts, density, viscosity, grad option, vector variable)``
+
+
+:math:`{y}_{1}^{+}` computes a scalar variable that is the coefficient off the
+wall to the first field cell centroid. This scalar variable is defined as:
+
+:math:`{y}_{1}^{+}=\frac{{y}_{1}{\rho }_{w}}{{\mu }_{w}}\sqrt{\frac{{\tau }_{w}}{{\rho }_{w}}}`
+
+where:
+
+.. list-table::
+ :widths: 30 70
+
+ * - :math:`n`
+ - distance profiled normal to the wall
+ * - :math:`{\tau }_{w}`
+ - :math:`={\mu }_{w}{\left(\frac{\partial u}{\partial n}\right)}_{n=0}`
+ = fluid shear stress at the wall
+ * - :math:`{\mu }_{w}`
+ - dynamic viscosity of fluid at the wall (may be a
+ spatially and/or temporally varying quantity and is usually a constant)
+ * - :math:`{\rho }_{w}`
+ - density at the wall
+ * - :math:`{y}_{1}`
+ - distance from first field element centroid to
+ outer face, profiled normal to wall
+ * - :math:`u`
+ - fluid velocity vector
+
+
+Normally :math:`{y}^{+}` is used to estimate or confirm the required first grid spacing
+for proper capturing of viscous-layer properties. The values are dependent on
+various factors, including what variables at the wall are sought, the turbulent
+models used, and whether the law of the wall is used. For correct interpolation of
+the values for your application, consult a boundary-layer text.
+
+.. list-table:: **Function arguments**
+ :widths: 30 70
+
+ * - boundary part
+ - 2D (wall or surface) part
+ * - density
+ - scalar variable
+ * - viscosity
+ - scalar variable, constant variable, or constant number
+ * - gradient option
+ - One of three values (1 = Use field velocity (used to calculate wall gradient),
+ 2 = Use gradient at boundary part (wall or surface), 3 = Use gradient in
+ corresponding field part)
+ * - vector variable
+ - One of three values depending on the gradient option (1 = Use field velocity = velocity vector,
+ 2 = Use gradient at boundary = gradient variable on 2D boundary (wall or surface) part, 3 =
+ Use gradient in field = gradient variable defined in 3D field part; or it
+ could be the gradient calculated using Grad(velocity magnitude), that is
+ :ref:`BL_aGradfVelMag `.
+
+.. note::
+ Boundary layer (``BL_*``) functions are not supported for
+ Server of Server (SoS) decomposition.
+
+
+.. _BL_Y1PlusDist:
+
+---------------
+BL_Y1PlusDist()
+---------------
+
+**Boundary Layer: Distance off Wall**
+
+``BL_Y1PlusDist(boundary parts, velocity)``
+
+
+:math:`{y}_{1}` Computes a scalar variable that is the off-the-wall distance,
+:math:`{y}_{1}`, which is the distance off the wall to the first field cell
+centroid. The velocity variable is only used to determine whether the variable
+is nodal or elemental to maintain consistency with the previous :math:`{y}_{1}^{+}`
+calculation.
+
+.. list-table:: **Function arguments**
+ :widths: 30 70
+
+ * - boundary part
+ - 2D part
+ * - velocity
+ - vector variable
+
+
+.. note::
+ Boundary layer (``BL_*``) functions are not supported for
+ Server of Server (SoS) decomposition.
+
+
+.. _CaseMap:
+
+---------
+CaseMap()
+---------
+
+
+**Case Map**
+
+``CaseMap(2D or 3D parts, case to map from, scalar/vector/tensor, parts to map from, search option flag)``
+
+
+For all locations on the selected parts, this function
+finds the specified variable value (scalar, vector, or tensor) from
+the *case to map from* using a variety of user-specified
+search options.
+
+- If the variable in the *case to map from* is located at the nodes, the
+ case-mapped variable is defined on the nodes of the selected parts.
+- If the variable is located at the elements, the case-mapped variable is
+ defined at the elements of the selected parts.
+
+The idea is to map onto the selected parts a variable from another case,
+usually for comparison purposes. It does this by taking the
+location of the nodes or centroid of the elements and looking at the other case
+to see if the variable in question is defined at that location in the field. If
+so, the value is mapped to the parts nodes or element value. This algorithm can
+be fairly expensive, so there are options to inform the search that finds a
+matching variable location.
+
+.. list-table:: **Function arguments**
+ :widths: 30 70
+
+ * - case to map from
+ - constant number
+ * - scalar/vector/tensor
+ - scalar, vector, or tensor variable
+ * - search option
+ - If mapping search is successful, always
+ assigns the exact value found. If search mapping is not successful, because
+ there is not an exact match of node or element location, the
+ following occurs:
+
+ If the search option is set to *search only* (0), an undefined value
+ is assigned.
+
+ If the search option is set to *nearest value* (1), the defined variable
+ value at the closest node or element is assigned (no undefined values).
+ This option takes time to search the *from case* according to
+ the following *parts to map from* selection.
+
+ * - parts to map from
+ - The values for a location must be found by
+ searching the geometry in the *case to map from*. By setting this
+ option, you can hint to EnSight where in the geometry it should
+ search, which can vastly improve performance.
+
+ *Global search* (0) - This is the legacy scheme. It
+ performs a methodical but uninformed search of the 3D,
+ then 2D, then 1D, and then even 0D (point) elements to find the first
+ defined variable value. This works well for mapping onto a 3D or 2D
+ that is completely enclosed in a 3D *from* volume. It works poorly
+ if the 2D is not fully enclosed (such as on
+ the edge of a 3D part) or if you want to map a 2D onto a 2D part and
+ other 3D parts exist.
+
+ *Dimensionality match* (1) - Only parts of the same
+ dimension in the from and to are searched. For example, only 3D
+ *from* parts are used to map onto a 3D
+ selected part. This is the option that the you should use most
+ often.
+
+ *Part number match* (2) - The order of the parts is
+ used, that is if you are computing the case map on the third part,
+ then the third part is used in the *case to map from*. This is best
+ used if you have exactly the same dataset in terms of the part list
+ ordering, but perhaps calculated differently so only the variable
+ values differ.
+
+ *Parts selected for case to map from* (3) - Select
+ parts in the Case *from* as well as the case *to*. Only selected parts
+ are used in the two cases.
+
+
+.. note::
+ This function uses EnSight's search capability to do the mapping. It is
+ critical that the nodes of the parts being mapped onto lie within the
+ geometry of all of the parts of the case being mapped from. Mapping from a
+ 2D surface to a 2D surface only works reliably if the surfaces are the
+ same (or extremely close, and the ``flag=1`` option is chosen).
+
+ Mapping nodal variables is faster than mapping elemental variables. This function is
+ threaded so an Enterprise (formerly Gold or HPS) license key may improve
+ performance.
+
+ Select only the parts that you require, and use search option ``0`` if at all possible.
+
+
+
+.. _CaseMapDiff:
+
+-------------
+CaseMapDiff()
+-------------
+
+
+**Case Map Diff**
+
+``CaseMapDiff(2D or 3D parts, case to map from, scalar/vector/tensor, 0/1 0=search only 1=if search fails find closest)``
+
+
+This function is equivalent to the expression:
+
+``Variable - CaseMap[Variable]``
+
+For information on how this function works, see :ref:`CaseMap `.
+
+
+
+.. _CaseMapImage:
+
+--------------
+CaseMapImage()
+--------------
+
+**Case Map Image**
+
+``CaseMapImage(2D or 3D parts, part to map from, scalar, viewport number, Undefined value limit)``
+
+
+This function does a projection of a 2D part variable from a different case onto a
+3D geometry taking into account the view orientation from the specified viewport number,
+similar to a texture mapping. The function in effect maps 2D results to a 3D geometry taking into
+account view orientation and surface visibility.
+
+.. list-table:: **Function arguments**
+ :widths: 30 70
+
+ * - part to map from
+ - part number of the 2D part (This 2D part is
+ usually data from an infrared camera.)
+ * - scalar
+ - scalar variable
+ * - viewport number
+ - Viewport number showing parts that the
+ variable is being computed on, from the same camera view as the part to
+ map from
+ * - Undefined value limit
+ - Values on the 2D part that are under this
+ value are considered undefined
+
+
+
+.. _Coeff:
+
+-------
+Coeff()
+-------
+
+**Coefficient**
+
+``Coeff(any 1D or 2D parts, scalar, component [, Compute_Per_part])``
+
+
+Computes a constant or constant per part variable whose
+value is a coefficient :math:`{C}_{x}` , :math:`{C}_{y}` , or :math:`{C}_{z}`
+such that :math:`{C}_{x}={\displaystyle {\int }_{S}f{n}_{x}dS}`,
+:math:`{C}_{y}={\displaystyle {\int }_{S}f{n}_{y}dS}`,
+:math:`{C}_{z}={\displaystyle {\int }_{S}f{n}_{z}dS}`
+
+where:
+
+
+.. list-table::
+ :widths: 30 70
+
+ * - :math:`f`
+ - any scalar variable
+ * - :math:`S`
+ - 1D or 2D domain
+ * - :math:`{n}_{x}`
+ - x component of normal
+ * - :math:`{n}_{y}`
+ - y component of normal
+ * - :math:`{n}_{z}`
+ - z component of normal
+
+
+.. list-table:: **Function arguments**
+ :widths: 30 70
+
+ * - variable
+ - scalar or vector
+ * - component
+ - if variable is a vector: [X], [Y], or [Z]
+
+
+Specify [X], [Y], or [Z] to get the corresponding coefficient.
+
+.. note::
+ Normal for a 1D part is parallel to the plane of the plane tool.
+
+
+.. _Cmplx:
+
+-------
+Cmplx()
+-------
+
+**Complex**
+
+``Cmplx(any parts, scalar/vector(real portion), scalar/vector(complex portion), [optional frequency(Degrees)])``
+
+
+Creates a complex scalar or vector from two scalar or
+vector variables. The frequency is optional and is used only for
+reference.
+
+
+Z = A + Bi
+
+.. list-table:: **Function arguments**
+ :widths: 30 70
+
+ * - real portion
+ - scalar or vector variable
+ * - complex portion
+ - scalar or vector variable (but must be same as the real portion)
+ * - [frequency]
+ - constant number (optional)
+
+
+.. _CmplxArg:
+
+----------
+CmplxArg()
+----------
+
+**Complex Argument**
+
+``CmplxArg(any parts, complex scalar or vector)``
+
+
+Computes the argument of a complex scalar or vector. The
+resulting scalar is given in a range between -180 and 180
+degrees.
+
+:math:`\text{Arg = atan(Vi/Vr)}`
+
+
+
+.. _CmplxConj:
+
+-----------
+CmplxConj()
+-----------
+
+**Complex Conjugate**
+
+``CmplxConj(any parts, complex scalar or vector)``
+
+
+Computes the conjugate of a complex scalar of vector.
+
+Returns a complex scalar or vector, where:
+
+:math:`\text{Nr = Vr}`
+
+
+:math:`\text{Ni = -Vi}`
+
+
+
+.. _CmplxImag:
+
+-----------
+CmplxImag()
+-----------
+
+**Complex Imaginary**
+
+``CmplxImag(any parts, complex scalar or vector)``
+
+
+Extracts the imaginary portion of a complex scalar or vector
+into a real scalar or vector:
+
+:math:`\text{N = Vi}`
+
+
+
+.. _CmplxModu:
+
+-----------
+CmplxModu()
+-----------
+
+**Complex Modulus**
+
+``CmplxModu(any parts, complex scalar or vector)``
+
+
+Returns a real scalar or vector that is the modulus of the
+given scalar or vector:
+
+:math:`\text{N = SQRT(Vr*Vr + Vi*Vi)}`
+
+
+
+.. _CmplxReal:
+
+-----------
+CmplxReal()
+-----------
+
+**Complex Real**
+
+``CmplxReal(any parts, complex scalar or vector)``
+
+
+Extracts the real portion of a complex scalar or vector
+into a real scalar or vector:
+
+:math:`\text{N = Vr}`
+
+
+
+.. _CmplxTransResp:
+
+----------------
+CmplxTransResp()
+----------------
+
+**Complex Transient Response**
+
+``CmplxTransResp(any parts, complex scalar or vector, constant PHI(0.0-360.0 Degrees))``
+
+
+Returns a real scalar or vector that is the real
+transient response:
+
+:math:`\text{Re(Vt) = Re(Vc)Cos(phi) - Im(Vc)Sin(phi)}`
+
+which is a function of the transient phase angle
+:math:`\text{phi}` defined by:
+
+:math:`\text{phi = 2 Pi f t}`
+
+where:
+
+.. list-table::
+ :widths: 30 70
+
+ * - t
+ - harmonic response time parameter
+ * - f
+ - frequency of the complex variable :math:`\text{Vc}`
+
+
+and the complex field :math:`\text{Vc}`, defined as:
+
+:math:`\text{Vc = Vc(x,y,z) = Re(Vc) + i Im(Vc)}`
+
+where:
+
+.. list-table::
+ :widths: 30 70
+
+ * - Vc
+ - complex variable field
+ * - Re(Vc)
+ - real portion of Vc
+ * - Im(Vc)
+ - imaginary portion of Vc
+ * - i
+ - Sqrt(-1)
+
+
+.. note::
+ The transient complex function was a composition of Vc and Euler's
+ relation, namely:
+
+ Vt = Vt(x,y,z,t) = Re(Vt) + i Im(Vt) = Vc * e^(i phi)
+
+ where:
+
+ e^(i phi) = Cos(phi) + i Sin(phi)
+
+ The real portion, Re(Vt), is as designated in the preceding equation.
+
+ This function is only good for harmonic variations, thus fields with a
+ defined frequency.
+
+
+.. list-table:: **Function arguments**
+ :widths: 30 70
+
+ * - phi angle
+ - constant number between 0 and 360 degrees.
+
+
+.. _ConstPerPart:
+
+--------------
+ConstPerPart()
+--------------
+
+
+**ConstPerPart**
+
+``ConstPerPart(any parts, constant)``
+
+
+This function assigns a value to the selected parts. The value can be either a
+floating point value entered into the field or a case constant. This value
+does not change over time. At a later point, other parts can be selected
+and this value can be recalculated. These other parts are then assigned the new value.
+The existing parts that were previously selected retain their previously assigned
+value. In other words, each successive time that this value is recalculated for an
+existing variable, the values assigned to the most recently selected parts are updated
+without removing previously assigned values.
+
+
+.. _Curl:
+
+------
+Curl()
+------
+
+**Curl**
+
+``Curl(any parts, vector)``
+
+
+Computes a vector variable that is the cURL of the input vector:
+
+:math:`Cur{l}_{f}=\overline{\nabla }\times \dot{f}=\left(\frac{\partial {f}_{3}}{\partial y}-\frac{\partial {f}_{2}}{\partial z}\right)\widehat{i}+\left(\frac{\partial {f}_{1}}{\partial z}-\frac{\partial {f}_{3}}{\partial x}\right)\widehat{j}+\left(\frac{\partial {f}_{2}}{\partial x}-\frac{\partial {f}_{1}}{\partial y}\right)\widehat{k}`
+
+
+
+.. _Defect_Functions:
+
+---------------------------------------------
+Porosity characterization functions (defects)
+---------------------------------------------
+
+Consider a mesh with a scalar per element variable representing the micro porosity of each
+cell, where ``0`` means no porosity (the cell is completely full) and ``100`` means that the cell is
+fully porous (the cell is empty). Cells with a non-zero porosity are considered to have
+defects. Defects that span multiple cells may indicate an unacceptable defect.
+
+Six ``Defect_*`` functions are provided to help calculate factors of interest in characterizing
+the defects that occur over multiple cells. To use the following ``Defect_*`` functions, you would
+create an isovolume of your porosity variable between the desired ranges (perhaps 5 to 100) and
+select this isovolume part.
+
+
+.. _Defect_BulkVolume:
+
+-------------------
+Defect_BulkVolume()
+-------------------
+
+**Defect Bulk Volume**
+
+``Defect_BulkVolume(2D or 3D parts)``
+
+
+Returns a per element scalar that is the sum of the
+volume of all the cells comprising the defect. Each cell with the
+defect is then assigned this value.
+
+For input specifications, see :ref:`Defect Functions `.
+
+
+.. _Defect_Count:
+
+--------------
+Defect_Count()
+--------------
+
+**Defect Count**
+
+``Defect_Count(2D or 3D parts, Defect scalar per elem, min value, max value) [,Compute_Per_part])``
+
+
+Returns a case constant that filters the count of the
+number of defects existing between the minimum value and the maximum value. This
+function uses a ``defect scalar per elem`` variable that has been previously calculated by any of
+the other five :ref:`Defect functions `.
+
+For input specifications, see :ref:`Defect Functions `.
+
+
+.. _Defect_LargestLinearExtent:
+
+----------------------------
+Defect_LargestLinearExtent()
+----------------------------
+
+**Defect Largest Linear Extent**
+
+``Defect_LargestLinearExtent(2D or 3D parts)``
+
+
+Returns a per element scalar that is the largest linear
+extent of all the cells comprising the defect, where each cell of the defect is
+assigned this value. The largest linear extent is the root-mean-squared
+distance.
+
+For input specifications, see :ref:`Defect Functions `.
+
+
+.. _Defect_NetVolume:
+
+------------------
+Defect_NetVolume()
+------------------
+
+**Defect NetVolume**
+
+``Defect_NetVolume(2D or 3D parts, scalar per elem, scale factor)``
+
+
+Returns a per element scalar that is the sum of the cell
+volumes multiplied by the scalar per element variable multiplied by the scale
+factor of all the cells comprising the defect, where each cell of the defect is
+assigned this value. The ``scalar per elem`` variable is usually porosity,
+but you can use any per element scalar variable. The scale factor
+adjusts the scalar per element variable values, that is if the porosity range is
+from 0.0 to 100.0, then a scale factor of 0.01 can be used to normalize the
+porosity values to volume fraction values ranging from 0.0 to 1.0.
+
+For input specifications, see :ref:`Defect Functions `.
+
+
+.. _Defect_ShapeFactor:
+
+--------------------
+Defect_ShapeFactor()
+--------------------
+
+**Defect ShapeFactor**
+
+``Defect_ShapeFactor(2D or 3D parts)``
+
+
+Returns a per element scalar that is the *largest linear extent* divided by the diameter of the
+sphere with a volume equal to the *bulk volume* of the defect, where each cell of the defect
+is assigned this value.
+
+For input specifications, see :ref:`Defect Functions `.
+
+
+.. _Defect_SurfaceArea:
+
+--------------------
+Defect_SurfaceArea()
+--------------------
+
+**Defect SurfaceArea**
+
+``Defect_SurfaceArea(2D or 3D parts)``
+
+
+Returns a per element scalar that is the surface area of
+the defect, where each cell of the defect is assigned this value.
+
+For input specifications, see :ref:`Defect Functions `.
+
+
+.. _Density:
+
+---------
+Density()
+---------
+
+**Density**
+
+``Density(any parts, pressure, temperature, gas constant)``
+
+
+Computes a scalar variable that is the density :math:`\rho`. This scalar variable
+is defined as:
+
+:math:`\rho =\frac{p}{RT}`
+
+where:
+
+.. list-table::
+ :widths: 30 70
+
+ * - :math:`p`
+ - pressure
+ * - :math:`T`
+ - temperature
+ * - :math:`R`
+ - gas constant
+
+
+.. list-table:: **Function arguments**
+ :widths: 30 70
+
+ * - pressure
+ - scalar variable
+ * - temperature
+ - scalar variable
+ * - gas constant
+ - scalar, constant, or constant per part variable, or constant number
+
+
+
+.. _DensityLogNorm:
+
+----------------
+DensityLogNorm()
+----------------
+
+
+**Log of Normalized Density**
+
+``DensityLogNorm(any parts, density, freestream density)``
+
+
+Computes a scalar variable that is the natural log of *normalized density*. This
+scalar variable is defined as::
+
+:math:`\mathrm{ln}{\rho }_{n}=\mathrm{ln}\left(\rho /{\rho }_{i}\right)`
+
+where:
+
+.. list-table::
+ :widths: 30 70
+
+ * - :math:`\rho`
+ - density
+ * - :math:`{\rho }_{i}`
+ - freestream density
+
+
+.. list-table:: **Function arguments**
+ :widths: 30 70
+
+ * - density
+ - scalar variable, constant variable, or constant number
+ * - freestream density
+ - constant or constant per part variable or constant number
+
+
+.. _DensityNorm:
+
+-------------
+DensityNorm()
+-------------
+
+**Normalized Density**
+
+``DensityNorm(any parts, density, freestream density)``
+
+
+Computes a scalar variable that is the *normalized density* :math:`{\rho }_{n}`.
+This scalar variable is defined as:
+
+:math:`{\rho }_{n}=\rho /{\rho }_{i}`
+
+where:
+
+.. list-table::
+ :widths: 30 70
+
+ * - :math:`\rho`
+ - density
+ * - :math:`{\rho }_{i}`
+ - freestream density
+
+
+.. list-table:: **Function arguments**
+ :widths: 30 70
+
+ * - density
+ - scalar variable, constant variable, or constant number
+ * - freestream density
+ - constant or constant per part variable or constant number
+
+
+
+.. _DensityNormStag:
+
+-----------------
+DensityNormStag()
+-----------------
+
+
+**Normalized Stagnation Density**
+
+``DensityNormStag(any parts, density, total energy,
+velocity, ratio of specific heats freestream density, freestream speed of sound,
+freestream velocity magnitude)``
+
+
+Computes a scalar variable that is the *normalized stagnation density*.
+This scalar variable is defined as:
+
+:math:`{\rho }_{on}={\rho }_{o}/{\rho }_{oi}`
+
+where:
+
+.. list-table::
+ :widths: 30 70
+
+ * - :math:`{\rho }_{o}`
+ - stagnation density
+ * - :math:`{\rho }_{oi}`
+ - freestream stagnation density
+
+
+where:
+
+.. list-table:: **Function arguments**
+ :widths: 30 70
+
+ * - density
+ - scalar, constant, or constant per part variable, or constant number
+ * - total energy
+ - scalar variable
+ * - velocity
+ - vector variable
+ * - ratio of specific heats
+ - scalar, constant or constant per part variable, or constant number
+ * - freestream density
+ - constant or constant per part variable or constant number
+ * - freestream speed of sound
+ - constant or constant per part variable or constant number
+ * - freestream velocity magnitude
+ - constant or constant per part variable or constant number
+
+
+
+.. _DensityStag:
+
+-------------
+DensityStag()
+-------------
+
+**Stagnation Density**
+
+``DensityStag(any parts, density, total energy, velocity, ratio of specific heats)``
+
+
+Computes a scalar variable that is the *stagnation
+density* :math:`{\rho }_{o}`. This scalar variable is defined as:
+
+:math:`{\rho }_{o}=\rho {\left(1+\left(\frac{\gamma -I}{2}\right){M}^{2}\right)}^{\left(I/\left(\gamma -1\right)\right)}`
+
+where:
+
+.. list-table::
+ :widths: 30 70
+
+ * - :math:`\rho`
+ - density
+ * - :math:`\gamma`
+ - ratio of specific heats
+ * - :math:`M`
+ - mach number
+
+
+
+.. list-table:: **Function arguments**
+ :widths: 30 70
+
+ * - density
+ - scalar, constant, or constant per part variable, or constant number
+ * - total energy
+ - scalar variable
+ * - velocity
+ - vector variable
+ * - ratio of specific heats
+ - scalar, constant, or constant per part variable, or constant number
+
+
+
+.. _Dist2Nodes:
+
+------------
+Dist2Nodes()
+------------
+
+
+**Distance Between Nodes**
+
+``Dist2Nodes(any parts, nodeID1, nodeID2)``
+
+
+Computes a constant, positive variable that is the distance between any two nodes.
+This function searches down the part list until it finds *nodeID1* and
+then searches until it finds *nodeID2*. It returns ``Undefined`` if *nodeID1* or *nodeID2*
+cannot be found. Nodes are designated by their node IDs, so the part must have node IDs.
+
+.. note::
+ Most created parts do not have node IDs.
+
+ The geometry type is important for using this function. There are three geometry types:
+ static, changing coordinate, and changing connectivity. You can find out your geometry
+ type by selecting **Query→Dataset** and looking in the **General Geometric section** of the
+ popup window.
+
+ If you have a static geometry with visual displacement turned on, the ``Dis2Nodes``
+ function does not use the displacement in its calculations. You must enable server-side
+ (computational) displacement. If you have changing coordinate geometry, the ``Dist2Nnodes``
+ function works without adjustment. If you have changing connectivity, the ``Dist2nNodes``
+ function should not be used as it can give nonsensical results because connectivity is
+ reevaluated each timestep and node IDs may be reassigned.
+
+ For transient results, to find the distance between two nodes on different parts, or
+ between two nodes if one or both nodes don't have IDs or the IDs are not unique for the model
+ (namely, more than one part has the same node ID), use the line tool.
+
+
+.. list-table:: **Function arguments**
+ :widths: 30 70
+
+ * - nodeID1
+ - constant number
+ * - nodeID2
+ - constant number
+
+
+.. _Dist2Part:
+
+-----------
+Dist2Part()
+-----------
+
+
+**Distance to Parts: Node to Nodes**
+
+``Dist2Part(origin part + field parts, origin part, origin part normal)``
+
+
+Computes a scalar variable on the origin part and field
+parts that is the minimum distance at each node of the origin and field parts to
+any node in the origin part. This distance is unsigned by default. The origin
+part is the origin of a Euclidean distance field. So, by definition, the scalar
+variable is always zero at the origin part because the distance to the
+origin part is always zero.
+
+The origin part normal vector must be a per node
+variable. If the origin part normal is calculated using the ``Normal`` calculator
+function, it is a per element variable and must be moved to the nodes using
+the calculator.
+
+.. note::
+ The origin part must be included in the field part list (although, as
+ mentioned earlier, the scalar variable is zero for all nodes on the
+ origin part). This algorithm has an execution time on the order of the
+ number of nodes in the field parts times the number of nodes in the origin
+ part. While the implementation is both SOS-aware and threaded, the run time
+ is dominated by the number of nodes in the computation.
+
+
+This function is computed between the nodes of the
+origin and field parts. As a result, the accuracy of its approximation to the
+distance field is limited to the density of nodes (effectively the size of the
+elements) in the origin part. If a more accurate approximation is required, use
+the :ref:`Dist2PartElem() ` function. While this function is slower,
+it is less dependent on the nodal distribution in the origin part because it uses the
+nodes plus the element faces to calculate the minimum distance.
+
+**Usage**
+
+You typically use an arbitrary 2D part to create a clip in a 3D field. You
+then use the 2D part as your origin part and select the origin part as well
+as your 3D field parts. There is no need to have normal vectors. After creating your
+scalar variable, which you might call ``distTo2Dpart``, you create
+an ``isosurface=0`` in your field using ``distTo2Dpart`` as your variable.
+
+.. list-table:: **Function arguments**
+ :widths: 30 70
+
+ * - origin part
+ - part number to compute the distance to
+ * - origin part normal
+ - constant for unsigned computation or a
+ nodal vector variable defined on the origin part for a signed computation
+
+
+
+.. _Dist2PartElem:
+
+---------------
+Dist2PartElem()
+---------------
+
+
+**Distance to Parts: Node to Elements**
+
+``Dist2PartElem(origin part + field parts, origin part, origin part normal)``
+
+
+Computes a scalar variable that is the minimum distance
+at each node of the origin part and field parts and the closest point on any
+element in the origin part.
+
+- If the origin part normal vector is not supplied, this distance is unsigned.
+- If the origin part normal vector is supplied, the distance is signed.
+
+.. note::
+ The origin part normal vector must be a per node variable. If the origin part
+ normal is calculated using the :ref:`Normal() ` function,
+ it is a per element variable and must be moved to the nodes using the
+ :ref:`ElemToNode() ` function. If it is per node and
+ the origin part normal vector variable defined at the origin part is supplied,
+ the direction of the normal is used to return a signed distance function
+ with distances in the direction of the normal being positive.
+
+
+Once the closest point in the origin part has been found
+for a node in an field part, the dot product of the origin node normal and a
+vector between the two nodes is used to select the sign of the result.
+
+.. note::
+ The origin part must be included in the field part list (although the
+ output is zero for all nodes of the origin part because it is the
+ origin of the Euclidean distance). This algorithm has an execution time on
+ the order of the number of nodes in the field parts multiplied by the number of
+ elements in the origin part. While the implementation is both SOS-aware and
+ threaded, the run time is dominated by the number of nodes in the
+ computation.
+
+
+This function is a more accurate estimation of the distance field than the :ref:`Dist2Part() `
+function because it allows for distances between nodes and element surfaces on the origin part. This
+improved accuracy results in increased computational complexity. As a result, the ``Dist2PartElem`` function
+can be several times slower than the :ref:`Dist2Part() ` function.
+
+.. list-table:: **Function arguments**
+ :widths: 30 70
+
+ * - origin part
+ - part number to compute the distance to
+ * - origin part normal
+ - constant for unsigned computation or a
+ nodal vector variable defined on the origin part for a signed computation
+
+
+.. _Div:
+
+-----
+Div()
+-----
+
+
+**Divergence**
+
+``Div(2D or 3D parts, vector)``
+
+
+Computes a scalar variable whose value is the divergence. This scalar variable is
+defined as:
+
+:math:`Div=\frac{\partial u}{\partial x}+\frac{\partial v}{\partial y}+\frac{\partial w}{\partial z}`
+
+where:
+
+.. list-table::
+ :widths: 30 70
+
+ * - u,v,w
+ - velocity components in the X, Y, Z
+ directions
+
+
+.. _EleMetric:
+
+-----------
+EleMetric()
+-----------
+
+
+**Element Metric**
+
+``EleMetric(any parts, metric_function).``
+
+
+Calculates an element mesh metric at each element,
+creating a scalar, element-based variable depending upon the selected metric
+function. The various metrics are valid for specific element types. If the
+element is not of the type supported by the metric function, the value at the
+element is the EnSight undefined value. Metrics exist for the following
+element types: ``tri``, ``quad``, ``tet``, and ``hex``. A metric can be any
+one of the following:
+
+.. list-table::
+ :widths: 10 25 25 40
+ :header-rows: 1
+
+ * - #
+ - Name
+ - Elem types
+ - Description
+ * - 0
+ - Element type
+ - All
+ - EnSight element type number. See the table that follows this one.
+ * - 1
+ - Condition
+ - hexa8, tetra4, quad4, tria3
+ - Condition number of the weighted Jacobian matrix.
+ * - 2
+ - Scaled Jacobian
+ - hexa8, tetra4, quad4, tria3
+ - Jacobian scaled by the edge length
+ products.
+
+ * - 3
+ - Shape
+ - hexa8, tetra4, quad4, tria3
+ - Varies by element type.
+ * - 4
+ - Distortion
+ - hexa8, tetra4, quad4, tria3
+ - Distortion is a measure of how well behaved the
+ mapping from parameter space to world coordinates is.
+
+ * - 5
+ - Edge ratio
+ - hexa8, tetra4, quad4, tria3
+ - Ratio of longest edge length over shortest
+ edge length.
+
+ * - 6
+ - Jacobian
+ - hexa8, tetra4, quad4
+ - Minimum determinate of the Jacobian
+ computed at each vertex.
+
+ * - 7
+ - Radius ratio
+ - tetra4, quad4, tria3
+ - Normalized ratio of the radius of the inscribed
+ sphere to the radius of the circumsphere.
+
+ * - 8
+ - Minimum angle
+ - tetra4, quad4, tria3
+ - Minimum included angle in degrees.
+ * - 9
+ - Maximum edge ratio
+ - hexa8, quad4
+ - Largest ratio of principle axis
+ lengths.
+
+ * - 10
+ - Skew
+ - hexa8, quad4
+ - Degree to which a pair of vectors are parallel
+ using the dot product, maximum.
+
+ * - 11
+ - Taper
+ - hexa8, quad4
+ - Maximum ratio of a cross-derivative to its
+ shortest associated principal axis.
+
+ * - 12
+ - Stretch
+ - hexa8, quad4
+ - Ratio of minimum edge length to maximum
+ diagonal.
+
+ * - 13
+ - Oddy
+ - hexa8, quad4
+ - Maximum deviation of the metric tensor from the
+ identity matrix, evaluated at the corners and element center.
+
+ * - 14
+ - Max aspect Frobenius
+ - hexa8, quad4
+ - Maximum of aspect Frobenius computed for the
+ element decomposed into triangles.
+
+ * - 15
+ - Min aspect
+ Frobenius
+
+ - hexa8, quad4
+ - Minimum of aspect Frobenius computed for the
+ element decomposed into triangles.
+
+ * - 16
+ - Shear
+ - hexa8, quad4
+ - Scaled Jacobian with a truncated
+ range.
+
+ * - 17
+ - Signed volume
+ - hexa8, tetra4
+ - Volume computed, preserving the sign.
+ * - 18
+ - Signed area
+ - tria3, quad4
+ - Area preserving the sign.
+ * - 19
+ - Maximum angle
+ - tria3, quad4
+ - Maximum
+ included angle in degrees.
+
+ * - 20
+ - Aspect ratio
+ - tetra4, quad4
+ - Maximum edge length over area.
+ * - 21
+ - Aspect Frobenius
+ - tetra4, tria3
+ - Sum of the edge lengths squared divided by the
+ area and normalized.
+
+ * - 22
+ - Diagonal
+ - hexa8
+ - Ratio of the minimum diagonal length to the
+ maximum diagonal length.
+
+ * - 23
+ - Dimension
+ - hexa8
+ - :math:`\frac{V}{2\nabla V}`
+ * - 24
+ - Aspect beta
+ - tetra4
+ - Radius ratio of a positively oriented tetrahedron.
+
+ * - 25
+ - Aspect gamma
+ - tetra4
+ - Root-mean-square edge length to volume.
+
+ * - 26
+ - Collapse ratio
+ - tetra4
+ - Smallest ratio of the height of a vertex above
+ its opposing triangle to the longest edge of that opposing triangle
+ across all vertices of the tetrahedron.
+
+ * - 27
+ - Warpage
+ - quad4
+ - Cosine of the minimum dihedral angle formed by
+ planes intersecting in diagonals.
+
+ * - 28
+ - Centroid
+ - All
+ - Returns each element centroid as a vector value
+ at that element.
+
+ * - 29
+ - Volume Test
+ - 3D elements
+ - Returns 0.0 for non-3D elements.
+ Each 3D element is decomposed into Tet04 elements. This option
+ returns a scalar equal to 0.0, 1.0, or 2.0. It
+ returns 0.0 if none of the Tet04 element volumes is negative, 1.0 if
+ all of the Tet04 element volumes are negative, and 2.0 if some of
+ the Tet04 element volumes are negative.
+
+ * - 30
+ - Signed Volume
+ - 3D elements
+ - Returns 0.0 for non-3D elements. Returns a scalar
+ that is the sum of the signed volumes of the Tet4 decomposition for
+ 3D elements.
+
+ * - 31
+ - Part Number
+ - All
+ - Returns a scalar at each element that is the
+ EnSight part ID number of that element.
+
+ * - 32
+ - Face Count
+ - All
+ - Returns a scalar that is the number of faces in
+ that element.
+
+
+**EnSight element types**
+
+.. list-table::
+ :widths: 10 90
+
+ * - 0
+ - Point
+ * - 1
+ - Point ghost
+ * - 2
+ - 2 node bar
+ * - 3
+ - 2 node bar ghost
+ * - 4
+ - 3 node bar
+ * - 5
+ - 3 node bar ghost
+ * - 6
+ - 3 node triangle (tria3)
+ * - 7
+ - 3 node triangle ghost
+ * - 10
+ - 6 node triangle
+ * - 11
+ - 6 node triangle ghost
+ * - 12
+ - 4 node quadrilateral (quad4)
+ * - 13
+ - 4 node quadrilateral ghost
+ * - 14
+ - 8 node quadrilateral
+ * - 15
+ - 8 node quadrilateral ghost
+ * - 16
+ - 4 node tetrahedron (tetra4)
+ * - 17
+ - 4 node tetrahedron ghost
+ * - 20
+ - 10 node tetrahedron
+ * - 21
+ - 10 node tetrahedron ghost
+ * - 22
+ - 5 node pyramid
+ * - 23
+ - 5 node pyramid ghost
+ * - 24
+ - 13 node pyramid
+ * - 25
+ - 13 node pyramid ghost
+ * - 26
+ - 6 node pentahedron
+ * - 27
+ - 6 node pentahedron ghost
+ * - 28
+ - 15 node pentahedron
+ * - 29
+ - 15 node pentahedron ghost
+ * - 30
+ - 8 node hexahedron (hexa8)
+ * - 31
+ - 8 node hexahedron ghost
+ * - 32
+ - 20 node hexahedron
+ * - 33
+ - 20 node hexahedron ghost
+ * - 34
+ - N-sided polygon
+ * - 35
+ - N-sided polygon ghost
+ * - 38
+ - N-faced polyhedron
+ * - 39
+ - N-faced polyhedron ghost
+
+
+The implementation is based on the BSD implementation of
+the *Sandia Verdict Library*.
+
+**References**
+
+For more information on individual metrics, see these references:
+
+1. C. J. Stimpson, C. D. Ernst, P. Knupp, P. P. Pebay, & D.
+ Thompson, The Verdict Library Reference Manual, May 8, 2007.
+2. The Verdict Library Reference Manual (http://www.vtk.org/Wiki/images/6/6b/VerdictManual-revA.pdf)
+
+
+
+.. _EleSize:
+
+---------
+EleSize()
+---------
+
+
+**Element Size**
+
+``EleSize(any parts).``
+
+Calculates the Volume/Area/Length for 3D/2D/1D elements
+respectively, at each element creating a scalar, element-based variable.
+
+.. note::
+ This function uses the coordinates of the element to calculate the volume of each
+ element. If you want to use displacement in the calculation of the volume, you must
+ turn on computational (server-side) displacement, rather than visual only
+ (client side) displacement. When computation displacement is turned on, displacement
+ values are applied to the coordinates on the server prior to calculating the element
+ size.
+
+ If you calculate the element size of a part and then use that part to create a
+ child part, the child part inherits the values of the ``EleSize`` calculation, which
+ are the size of the parent elements and not the size of the child elements. If you want the
+ ``EleSize`` of the child part, then you must select the child part and recalculate a new
+ variable.
+
+
+
+.. _ElemToNode:
+
+------------
+ElemToNode()
+------------
+
+
+**Element to Node**
+
+``ElemToNode(any parts, element-based scalar or vector).``
+
+
+Averages an element-based variable to produce a
+node-based variable.
+
+For each **node[i]** → :math:`\text{val += (elem[j]->val * elem[j]->wt) | node[i]}`
+
+For each **node[i]** → :math:`\text{wt += elem[j]->wt | node[i]}`
+
+Results: **node[i]** → :math:`\text{val /= node[i]->wt}`
+
+where:
+
+.. list-table::
+ :widths: 30 70
+
+ * - wt
+ - 1 for this algorithm and the weighting scalar in the :ref:`ElemToNodeWeighted() ` method
+ * - j
+ - iterator on all part elements
+ * - i
+ - iterator on all part nodes (nodes[i] must be on elem[j] to contribute)
+ * - | node[i]
+ - indicates node that is associated with elem[j]
+
+
+By default, this function uses all
+parts that share each node of the one or more selected parts. Parts that are not
+selected, whose elements are shared by nodes of the selected parts, have
+their element values averaged in with those of the selected parts.
+
+
+
+.. _ElemToNodeWeighted:
+
+--------------------
+ElemToNodeWeighted()
+--------------------
+
+
+**Element to Node Weighted**
+
+``ElemToNodeWeighted(any parts, element-based scalar or vector, element-based weighting scalar).``
+
+
+This function is the same as the :ref:`ElemToNode() ` function, except
+that the value of the variable at the element is weighted by an element scalar.
+That is, elem[j] → wt is the value of the weighting scalar in the :ref:`ElemToNode() `
+function previously described.
+
+One use of this function might be to use the element
+size as a weighting factor so that larger elements contribute more to the nodal
+value than smaller oelements.
+
+
+.. _EnergyT:
+
+---------
+EnergyT()
+---------
+
+
+**Energy: Total Energy**
+
+``EnergyT(any parts, density, pressure, velocity, ratio of specific heats).``
+
+
+Computes a scalar variable of total energy per unit volume.
+
+.. list-table::
+ :widths: 30 70
+
+ * - :math:`e=\rho \left({e}_{i}+\frac{{V}^{2}}{2}\right)`
+ - total Energy
+ * - :math:`{e}_{i}={e}_{0}-\frac{{V}^{2}}{2}`
+ - internal Energy
+ * - :math:`{e}_{0}=\frac{e}{\rho }`
+ - stagnation Energy
+
+
+.. list-table::
+ :widths: 30 70
+
+ * - :math:`\text{ρ}`
+ - density
+ * - :math:`V`
+ - velocity
+
+
+Or based on gamma, pressure, and velocity:
+
+
+:math:`e=\frac{p}{\left(\gamma -1\right)}+\rho \frac{{V}^{2}}{2}`
+
+
+.. list-table:: **Function arguments**
+ :widths: 30 70
+
+ * - density
+ - scalar, constant, or constant per part variable, or constant number
+ * - pressure
+ - scalar variable
+ * - velocity
+ - vector variable
+ * - ratio of specific heats
+ - scalar, constant, or constant per part variable, or constant number
+
+
+.. _KinEn:
+
+-------
+KinEn()
+-------
+
+
+**Kinetic Energy**
+
+``KinEn(any parts, velocity, density)``
+
+
+Computes a scalar variable whose value is the kinetic
+energy :math:`{E}_{k}`. This scalar variable is defined as:
+
+:math:`{E}_{k}=\frac{1}{2}\rho {V}^{2}`
+
+where:
+
+.. list-table::
+ :widths: 30 70
+
+ * - :math:`\rho`
+ - density
+ * - :math:`V`
+ - velocity variable
+
+
+.. list-table:: **Function arguments**
+ :widths: 30 70
+
+ * - velocity
+ - vector variable
+ * - density
+ - scalar, constant, or constant per part variable, or constant number
+
+
+.. _Enthalpy:
+
+----------
+Enthalpy()
+----------
+
+
+**Enthalpy**
+
+``Enthalpy(any parts, density, total energy, velocity, ratio of specific heats)``
+
+
+Computes a scalar variable that is enthalpy,
+:math:`h`. This scalar variable is defined as:
+
+:math:`h=\gamma \left(\frac{E}{\rho }-\frac{{V}^{2}}{2}\right)`
+
+
+.. list-table::
+ :widths: 30 70
+
+ * - :math:`E`
+ - total energy per unit volume
+ * - :math:`\rho`
+ - density
+ * - :math:`V`
+ - velocity magnitude
+ * - :math:`\gamma`
+ - ratio of specific heats
+
+
+
+.. list-table:: **Function arguments**
+ :widths: 30 70
+
+ * - density
+ - scalar, constant, or constant per part variable, or constant number
+ * - total energy
+ - scalar variable
+ * - velocity
+ - vector variable
+ * - ratio of specific heats
+ - scalar, constant, or constant per part variable, or constant number
+
+
+
+.. _EnthalpyNorm:
+
+--------------
+EnthalpyNorm()
+--------------
+
+
+**Normalized Enthalpy**
+
+``EnthalpyNorm(any parts, density, total energy, velocity, ratio of specific heats, freestream density, freestream speed of sound)``
+
+
+Computes a scalar variable that is the normalized enthalpy
+:math:`{h}_{n}`. This scalar variable is defined as:
+
+:math:`{h}_{n}=h/{h}_{i}`
+
+
+.. list-table::
+ :widths: 30 70
+
+ * - :math:`h`
+ - enthalpy
+ * - :math:`{h}_{i}`
+ - freestream enthalpy
+
+
+.. list-table:: **Function arguments**
+ :widths: 30 70
+
+ * - density
+ - scalar, constant, or constant per part variable, or constant number
+ * - total energy
+ - scalar variable
+ * - velocity
+ - vector variable
+ * - ratio of specific heats
+ - scalar, constant, or constant per part variable, or constant number
+ * - freestream density
+ - constant or constant per part variable or constant number
+ * - freestream speed of sound
+ - constant or constant per part variable or constant number
+
+
+
+
+.. _EnthalpyStag:
+
+--------------
+EnthalpyStag()
+--------------
+
+
+**Stagnation Enthalpy**
+
+``EnthalpyStag(any parts, density, total energy, velocity, ratio of specific heats)``
+
+
+Computes a scalar variable that is the stagnation tnthalpy :math:`{h}_{o}`. This
+scalar variable is defined as::
+
+:math:`{h}_{o}=h+\frac{{V}^{2}}{2}`
+
+
+.. list-table::
+ :widths: 30 70
+
+ * - :math:`h`
+ - enthalpy
+ * - :math:`V`
+ - velocity magnitude
+
+
+.. list-table:: **Function arguments**
+ :widths: 30 70
+
+ * - density
+ - scalar, constant, or constant per part variable, or constant number
+ * - total energy
+ - scalar variable
+ * - velocity
+ - vector variable
+ * - ratio of specific heats
+ - scalar, constant, or constant per part variable, or constant number
+
+
+
+.. _EnthalpyNormStag:
+
+------------------
+EnthalpyNormStag()
+------------------
+
+
+**Normalized Stagnation Enthalpy**
+
+``EnthalpyNormStag(any parts, density, total energy,
+velocity, ratio of specific heats, freestream density, freestream speed of
+sound, freestream velocity magnitude)``
+
+
+Computes a scalar variable that is the normalized stagnation enthalpy :math:`{h}_{on}`.
+This scalar variable is defined as:
+
+:math:`{h}_{on}={h}_{o}/{h}_{oi}`
+
+
+.. list-table::
+ :widths: 30 70
+
+ * - :math:`{h}_{o}`
+ - stagnation enthalpy
+ * - :math:`{h}_{oi}`
+ - freestream stagnation enthalpy
+
+
+.. list-table:: **Function arguments**
+ :widths: 30 70
+
+ * - density
+ - scalar, constant, or constant per part variable, or constant number
+ * - total energy
+ - scalar variable
+ * - velocity
+ - vector variable
+ * - ratio of specific heats
+ - scalar, constant, or constant per part variable, or constant number
+ * - freestream density
+ - constant or constant per part variable or constant number
+ * - freestream speed of sound
+ - constant or constant per parts variable or constant number
+ * - freestream velocity magnitude
+ - constant or constant per part variable or constant number
+
+
+
+.. _Entropy:
+
+---------
+Entropy()
+---------
+
+
+**Entropy**
+
+``Entropy(any parts, density, total energy, velocity,
+ratio of specific heats, gas constant, freestream density, freestream speed of sound)``
+
+Computes a scalar variable that is the entropy,:math:`s`. This scalar variable is defined as:
+
+:math:`s=\mathrm{ln}\left(\frac{\frac{p}{{p}_{\infty }}}{{\left(\frac{\rho }{{\rho }_{\infty }}\right)}^{\gamma }}\right)\left(\frac{R}{\gamma -1}\right)`
+
+where:
+
+.. list-table::
+ :widths: 30 70
+
+ * - :math:`\rho`
+ - density
+ * - :math:`R`
+ - gas constant
+ * - :math:`\gamma`
+ - ratio of specific heats
+ * - :math:`{a}_{\infty }`
+ - freestream speed of sound
+ * - :math:`{\rho }_{\infty }`
+ - freestream density
+
+
+Pressure, :math:`p`, is calculated from the total energy, :math:`e` , and velocity, :math:`V`:
+
+
+:math:`p=\left(\gamma -1\right)\left[e-\rho \frac{{V}^{2}}{2}\right]`
+
+
+with freestream pressure:
+
+
+:math:`{p}_{\infty }=\frac{{\rho }_{\infty }{a}_{\infty }^{2}}{\gamma }`
+
+
+.. list-table:: **Function arguments**
+ :widths: 30 70
+
+ * - density
+ - scalar, constant, or constant per part variable, or constant number
+ * - total energy
+ - scalar variable
+ * - velocity
+ - vector variable
+ * - ratio of specific heats
+ - scalar, constant, or constant per part variable, or constant number
+ * - gas constant
+ - scalar, constant, or constant per part variable or constant number
+ * - freestream density
+ - constant or constant per part variable or constant number
+ * - freestream speed of sound
+ - constant or constant per part variable or constant number
+
+
+
+.. _Flow:
+
+------
+Flow()
+------
+
+
+**Flow**
+
+``Flow(any 1D or 2D parts, velocity [,Compute_Per_part]).``
+
+
+Computes a constant or constant per part variable whose
+value is the volume flow rate :math:`{Q}_{c}`. This scalar variable is defined as:
+
+:math:`{Q}_{c}={\displaystyle \underset{S}{\int }\left(V·\widehat{n}\right)}dS`
+
+where:
+
+.. list-table::
+ :widths: 20 80
+
+ * - :math:`V`
+ - velocity vector
+ * - :math:`\widehat{n}`
+ - unit vector normal to surface
+ * - :math:`S`
+ - 1D or 2D domain
+
+
+.. list-table:: **Function arguments**
+ :widths: 20 80
+
+ * - velocity
+ - vector variable
+
+
+.. note::
+ The normal for each 2D element is calculated using the right-hand rule of
+ the 2D element connectivity and must be consistent over the part. Otherwise,
+ your results are incorrect. To calculate the mass flow rate, multiply the
+ velocity vector by the density scalar and then substitute this vector value
+ for the velocity vector in the previous equation.
+
+
+.. _FlowRate:
+
+----------
+FlowRate()
+----------
+
+
+**Flow Rate**
+
+``FlowRate(any 1D or 2D parts, velocity).``
+
+
+Computes a scalar :math:`{V}_{n}`, which is the component of velocity normal to the surface.
+This scalar variable is defined as:
+
+:math:`{V}_{n}=V·\widehat{n}`
+
+where:
+
+.. list-table::
+ :widths: 20 80
+
+ * - :math:`V`
+ - velocity
+ * - :math:`\widehat{n}`
+ - unit vector normal to surface
+ * - :math:`S`
+ - 1D or 2D
+
+
+.. list-table:: **Function arguments**
+ :widths: 20 80
+
+ * - velocity
+ - vector variable
+
+
+.. note::
+ This function is equivalent to calculating the dot product of the velocity
+ vector and the surface normal using the :ref:`Normal() ` function.
+
+
+
+.. _FluidShear:
+
+------------
+FluidShear()
+------------
+
+
+**Fluid Shear**
+
+``FluidShear(2D parts, velocity magnitude gradient, viscosity)``
+
+
+Computes a scalar variable :math:`\tau` whose value is defined as:
+
+:math:`\tau =\mu \frac{\partial V}{\partial n}`
+
+where:
+
+.. list-table::
+ :widths: 20 80
+
+ * - :math:`\tau`
+ - shear stress
+ * - :math:`\mu`
+ - dynamic viscosity
+ * - :math:`\frac{\partial V}{\partial n}`
+ - velocity gradient in direction of surface normal
+
+
+.. note::
+ To compute fluid shear stress:
+
+ #. Use the :func:`Grad()` function on the velocity to obtain the ``Velocity Grad``
+ variable in the 3D parts of interest.
+
+ #. Create a part clip or extract the outer surface of the part using part extract,
+ creating a 2D part from the 3D parts used in the previous step on the surface
+ where you want to see the fluid shear stress.
+
+ #. Compute the ``Fluid Shear`` variable on the 2D surface.
+
+
+.. list-table:: **Function arguments**
+ :widths: 20 80
+
+ * - velocity gradient
+ - vector variable
+ * - viscosity
+ - scalar, constant, or constant per part variable, or constant number
+
+
+
+.. _FluidShearMax:
+
+---------------
+FluidShearMax()
+---------------
+
+
+**Fluid Shear Stress Max**
+
+``FluidShearMax(2D or 3D parts, velocity, density, turbulent kinetic energy, turbulent dissipation, laminar viscosity)``
+
+
+Computes a scalar variable :math:`\sum`. This scalar variable is defined as:
+
+:math:`\sum =F/A=\left({\mu }_{t}+{\mu }_{l}\right)E`
+
+where:
+
+
+.. list-table::
+ :widths: 20 80
+
+ * - :math:`F`
+ - force
+ * - :math:`A`
+ - unit area
+ * - :math:`{\mu }_{t}`
+ - turbulent (eddy) viscosity
+ * - :math:`{\mu }_{l}`
+ - laminar viscosity (treated as a constant)
+ * - :math:`E`
+ - local strain
+
+
+The turbulent viscosity :math:`{\mu }_{t}` is defined as:
+
+:math:`{\mu }_{t}=\frac{\rho 0.09{k}^{2}}{\epsilon }`
+
+where:
+
+.. list-table::
+ :widths: 20 80
+
+ * - :math:`\rho`
+ - density
+ * - :math:`k`
+ - turbulent kinetic energy
+ * - :math:`\epsilon`
+ - turbulent dissipation
+
+
+A measure of local strain :math:`E` (that is the local elongation in three directions)
+is given by:
+
+:math:`E=\sqrt{\left(2tr\left(D·D\right)\right)}`
+
+where:
+
+:math:`\left(2tr\left(D·D\right)\right)=2\left({d}_{11}^{2}+{d}_{22}^{2}+{d}_{33}^{2}\right)+\left({d}_{12}^{2}+{d}_{13}^{2}+{d}_{23}^{2}\right)`
+
+The *Euclidean norm* is defined by:
+
+:math:`tr\left(D·D\right)={d}_{11}^{2}+{d}_{22}^{2}+{d}_{33}^{2}+\frac{1}{2}\left({d}_{12}^{2}+{d}_{13}^{2}+{d}_{23}^{2}\right)`
+
+The rate of deformation tensor :math:`{d}_{ij}` is defined by:
+
+:math:`D=\left[{d}_{ij}\right]=\frac{1}{2}\left[\begin{array}{ccc}2{d}_{11}& {d}_{12}& {d}_{13}\\ {d}_{21}& 2{d}_{22}& {d}_{23}\\ {d}_{13}& {d}_{23}& 2{d}_{33}\end{array}\right]`
+
+where:
+
+.. list-table::
+ :widths: 20 80
+
+ * - :math:`{d}_{11}`
+ - :math:`={}_{}{}^{1}u/{}_{}{}^{1}x`
+ * - :math:`{d}_{22}`
+ - :math:`={}_{}{}^{1}v/{}_{}{}^{1}y`
+ * - :math:`{d}_{33}`
+ - :math:`={}_{}{}^{1}w/{}_{}{}^{1}z`
+ * - :math:`{d}_{12}`
+ - :math:`={}_{}{}^{1}u/{}_{}{}^{1}y+{}_{}{}^{1}v/x={d}_{21}`
+ * - :math:`{d}_{13}`
+ - :math:`={}_{}{}^{1}u/{}_{}{}^{1}z+{}_{}{}^{1}w/x={d}_{31}`
+ * - :math:`{d}_{23}`
+ - :math:`={}_{}{}^{1}v/{}_{}{}^{1}z+{}_{}{}^{1}w/y={d}_{32}`
+
+
+The strain tensor :math:`{e}_{ij}` is defined by :math:`{e}_{ij}=\frac{1}{2}{d}_{ij}`.
+
+
+.. list-table:: **Function arguments**
+ :widths: 30 70
+
+ * - velocity
+ - vector variable
+ * - density
+ - scalar, constant, or constant per part variable, or constant number
+ * - turbulent kinetic energy
+ - scalar variable
+ * - turbulent dissipation
+ - scalar variable
+ * - laminar viscosity
+ - constant or constant per part variable or constant number
+
+
+.. _Force:
+
+-------
+Force()
+-------
+
+
+**Force**
+
+``Force(2D parts, pressure)``
+
+
+Computes a vector variable whose value is the force
+:math:`F`. This scalar variable is defined as:
+
+:math:`F=pA`
+
+where:
+
+.. list-table::
+ :widths: 20 80
+
+ * - :math:`p`
+ - pressure
+ * - :math:`A`
+ - unit area
+
+
+.. note::
+ The force acts in the surface normal direction.
+
+
+.. list-table:: **Function arguments**
+ :widths: 20 80
+
+ * - pressure
+ - scalar variable
+
+
+
+.. _Force1D:
+
+---------
+Force1D()
+---------
+
+
+**Force 1D**
+
+``Force1D(1D planar parts, pressure, surface normal)``
+
+
+Computes a vector variable whose value is the force :math:`F`. This function
+is defined as:
+
+:math:`F=pL`
+
+where:
+
+.. list-table::
+ :widths: 20 80
+
+ * - :math:`p`
+ - pressure
+ * - :math:`L`
+ - unit length normal vector
+
+
+.. list-table:: **Function arguments**
+ :widths: 20 80
+
+ * - pressure
+ - scalar variable
+ * - surface normal
+ - vector variable
+
+
+.. _Grad:
+
+------
+Grad()
+------
+
+
+**Gradient**
+
+``Grad(2D or 3D parts, scalar or vector(Magnitude is used))``
+
+
+Computes a vector variable whose value is the gradient :math:`GRA{D}_{f}`.
+This scalar variable is defined as:
+
+:math:`GRA{D}_{f}=\frac{\partial f}{\partial x}\widehat{i}+\frac{\partial f}{\partial y}\widehat{j}+\frac{\partial f}{\partial z}\widehat{k}`
+
+where:
+
+.. list-table::
+ :widths: 20 80
+
+ * - :math:`f`
+ - any scalar variable (or the magnitude of the specified vector)
+ * - :math:`x,y,z`
+ - coordinate directions
+ * - :math:`i,j,k`
+ - unit vectors in coordinate directions
+
+
+.. admonition:: Algorithm: Gradient
+
+ If the variable is at the element, it is moved to the nodes. Each element
+ is then mapped to a normalized element and the Jacobian is calculated
+ for the transformation from the element to the normalized element.
+ Next, the inverse Jacobian is calculated for this transformation and used to
+ compute the Jacobian for the scalar variable. Therefore, the chain rule is used
+ with the inverse Jacobian of the transformation and the Jacobian of the scalar
+ variable to calculate the gradient for each node of each element. The
+ contributions of the gradient from all the elements are moved to all the nodes
+ using an unweighted average. Finally, if the original variable is per element,
+ the gradient is moved from the nodes to the elements using an unweighted
+ average.
+
+
+.. _GradTensor:
+
+------------
+GradTensor()
+------------
+
+
+**Gradient Tensor**
+
+``GradTensor(2D or 3D parts, vector)``
+
+
+Computes a tensor variable whose value is the gradient
+:math:`GRA{D}_{F}`. This scalar variable is defined as:
+
+:math:`GRA{D}_{F}=\frac{\partial F}{\partial x}\widehat{i}+\frac{\partial F}{\partial y}\widehat{j}+\frac{\partial F}{\partial z}\widehat{k}`
+
+where:
+
+.. list-table::
+ :widths: 20 80
+
+ * - :math:`F`
+ - any vector variable
+ * - :math:`x,y,z`
+ - coordinate directions
+ * - :math:`i,j,k`
+ - unit vectors in coordinate directions
+
+
+
+
+.. _HelicityDensity:
+
+-----------------
+HelicityDensity()
+-----------------
+
+
+**Helicity Density**
+
+``HelicityDensity(any parts, velocity)``
+
+
+Computes a scalar variable :math:`{H}_{d}` whose value is:
+
+:math:`{H}_{d}=V·\Omega`
+
+where:
+
+.. list-table::
+ :widths: 20 80
+
+ * - :math:`V`
+ - velocity
+ * - :math:`\Omega`
+ - vorticity
+
+
+
+.. list-table:: **Function arguments**
+ :widths: 20 80
+
+ * - velocity
+ - vector variable
+
+
+
+.. _HelicityRelative:
+
+------------------
+HelicityRelative()
+------------------
+
+
+**Relative Helicity**
+
+``HelicityRelative(any parts, velocity)``
+
+
+Computes a scalar variable :math:`{H}_{r}` whose value is:
+
+:math:`{H}_{r}=\mathrm{cos}\varphi =\frac{V·\Omega }{\left|V\right|\left|\Omega \right|}`
+
+where:
+
+.. list-table::
+ :widths: 20 80
+
+ * - :math:`\varphi`
+ - angle between the velocity vector and the vorticity
+ vector
+
+ * - V
+ - velocity
+ * - Ω
+ - vorticity
+
+
+
+.. list-table:: **Function arguments**
+ :widths: 20 80
+
+ * - velocity
+ - vector variable
+
+
+
+.. _HelicityRelFilter:
+
+-------------------
+HelicityRelFilter()
+-------------------
+
+
+**Filtered Relative Helicity**
+
+``HelicityRelFilter(any parts, velocity, freestream velocity magnitude).``
+
+
+Computes a scalar variable :math:`{H}_{rf}` whose value is:
+
+:math:`{H}_{rf}={H}_{r}` , if :math:`\left|{H}_{d}\right|\ge filter` or :math:`{H}_{rf}=0` , if :math:`\left|{H}_{d}\right|` function.
+
+**References**
+
+For more information, see these references:
+
+1. Haller, G., "An objective definition of a vortex," Journal of
+ Fluid Mechanics, 2005, vol. 525, pp. 1-26.
+2. Jeong, J. and Hussain, F., "On the identification of a
+ vortex," Journal of Fluid Mechanics, 1995, vol. 285, pp. 69-94.
+
+
+.. _Mach:
+
+------
+Mach()
+------
+
+
+**Mach Number**
+
+``Mach(any parts, density, total energy, velocity, ratio of specific heats)``
+
+
+Computes a scalar variable whose value is the Mach
+number :math:`M`. This scalar variable is defined as:
+
+:math:`M=\frac{u}{\sqrt{\frac{\gamma p}{\rho }}}=\frac{u}{c}`
+
+where:
+
+.. list-table::
+ :widths: 50 50
+
+ * - :math:`m`
+ - momentum
+ * - :math:`\rho`
+ - density
+ * - :math:`u`
+ - speed, computed from velocity input.
+ * - :math:`\gamma`
+ - ratio of specific heats (1.4 for air)
+ * - :math:`p`
+ - pressure (see :ref:`Pres() ` below)
+ * - :math:`c`
+ - speed of sound
+
+
+For a description, see :ref:`Energy: Total Energy `.
+
+.. list-table:: **Function arguments**
+ :widths: 50 50
+
+ * - density
+ - scalar, constant, or constant per part variable, or constant number
+ * - total energy
+ - scalar variable
+ * - velocity
+ - vector variable
+ * - ratio of specific heats
+ - scalar, constant, or constant per part variable, or constant number
+
+
+
+.. _MakeScalElem:
+
+--------------
+MakeScalElem()
+--------------
+
+
+**Make Scalar at Elements**
+
+``MakeScalElem(any parts, constant number or constant or constant per part variable)``
+
+
+Assigns the specified constant value to each element,
+making a scalar variable.
+
+
+.. _MakeScalElemId:
+
+----------------
+MakeScalElemId()
+----------------
+
+
+**Make Scalar from Element ID**
+
+``MakeScalElemId(any parts)``
+
+
+Creates a scalar variable set to the element ID of the part. If the
+element ID does not exist or is undefined, the scalar value is set to ``Undefined``.
+
+
+.. _MakeScalNode:
+
+--------------
+MakeScalNode()
+--------------
+
+
+**Make Scalar at Nodes**
+
+``MakeScalNode(any parts, constant number or constant or constant per part variable)``
+
+
+Assigns the specified constant value to each node,
+making a scalar variable.
+
+
+.. _MakeScalNodeId:
+
+----------------
+MakeScalNodeId()
+----------------
+
+
+**Make Scalar from Node ID**
+
+``MakeScalNodeId(any parts)``
+
+
+Creates a scalar variable set to the node ID of the part. If the node
+ID does not exist or is undefined, the scalar value is set to ``Undefined``.
+
+
+.. _MakeVect:
+
+----------
+MakeVect()
+----------
+
+
+**Make Vector**
+
+``MakeVect(any parts, scalar or zero, scalar or zero, scalar or zero)``
+
+
+Computes a vector variable formed from scalar variables.
+
+- The first scalar becomes the X component of the vector.
+- The second scalar becomes the Y component of the vector.
+- The third scalar becomes the Z component of the vector.
+
+A zero can be specified for some of the scalars, creating a 2D or 1D vector field.
+
+.. note::
+
+ To quickly make a vector, you can select the parts that you want to use,
+ select three scalars (*scalar_x*, *scalar_y*, and *scalar_z*) in the
+ variable list, and right-click and choose ***Make Vector** in the pull-down menu.
+ In the side, choose whether to use all (or all available, if you have measured)
+ parts or your currently selected parts (if you have parts selected) to calculate
+ the vector. A vector variable is calculated and named using the
+ scalar names (which should be adequate). If the order of the variables or the
+ name of the vector cannot be definitively determined, then a GUI pops up with the
+ proposed components in a pull-down menu and a proposed name for the created vector
+ variable.
+
+
+.. _MassedParticle:
+
+----------------
+MassedParticle()
+----------------
+
+
+**Massed Particle Scalar**
+
+``MassedParticle(massed particle trace parts)``
+
+
+This scalar creates a massed-particle per element scalar
+variable for each of the parent parts of the massed-particle traces. This per
+element variable is the mass of the particle multiplied by the sum of the number of
+times each element is exited by a mass-particle trace. For more information, see
+`Particle-Mass Scalar on Boundaries
+`_
+in the *Ansys EnSight User Manual*.
+
+
+.. _MassFluxAvg:
+
+-------------
+MassFluxAvg()
+-------------
+
+
+**Mass-Flux Average**
+
+``MassFluxAvg(any 1D or 2D parts, scalar, velocity, density [,Compute_Per_part])``
+
+
+Computes a constant or constant per part variable whose
+value is the mass flux average :math:`{b}_{avg}`. This function is
+defined as:
+
+:math:`{b}_{avg}=\frac{{\displaystyle \underset{A}{\int }\rho b\left(V·N\right)}dA}{{\displaystyle \underset{A}{\int }\rho \left(V·N\right)}dA}=\frac{\text{Mass Flux of Scalar}}{\text{Mass Flux}}=\frac{Flow(plist,b\rho V)}{Flow(plist,\rho V)}`
+
+where:
+
+.. list-table::
+ :widths: 10 90
+
+ * - :math:`b`
+ - any scalar variable, such as pressure, mach, or a
+ vector component
+
+ * - :math:`\rho`
+ - density (constant or scalar) variable
+ * - :math:`V`
+ - velocity (vector) variable
+ * - :math:`dA`
+ - area of some 2D domain
+ * - :math:`N`
+ - unit vector normal to :math:`dA`
+
+
+
+.. list-table:: **Function arguments**
+ :widths: 10 90
+
+ * - scalar
+ - any scalar variable, such as pressure, mach, or a
+ vector component
+ * - velocity
+ - vector variable
+ * - density
+ - scalar, constant, or constant per part
+ variable, or constant number
+
+
+
+
+.. _MatSpecies:
+
+------------
+MatSpecies()
+------------
+
+
+**MatSpecies**
+
+``MatSpecies(any model parts, any materials, any species, scalar per element).``
+
+
+Computes a scalar per element variable whose value
+:math:`\sigma` is the sum of all specified material and species combinations
+multiplied by the specified element variable on specified *model* parts with
+defined material species. This scalar variable is defined as:
+
+:math:`\sigma ={e}_{s}\Sigma m{s}_{ij}`
+
+where:
+
+.. list-table::
+ :widths: 10 90
+
+ * - :math:`{\text{e}}_{\text{s}}`
+ - scalar per element variable value or value
+ * - :math:`{\text{ms}}_{\text{ij}}`
+
+ - * :math:`{\text{m}}_{\text{i}}{\text{ * s}}_{\text{j}}`
+
+ * Product of the material fraction :math:`{\text{m}}_{\text{i}}` and its corresponding specie value :math:`{\text{s}}_{\text{j}}`
+
+ * 0, if specie :math:`{\text{s}}_{\text{j}}` S does not exist for material :math:`{\text{m}}_{\text{i}}`
+
+ * :math:`{\text{m}}_{\text{i}}` if no species are specified
+
+
+This function only operates on model parts with
+predefined species. The specified materials can either be a list of materials
+or a single material value. The specified species can either be a list, a single
+specie, or no specie (that is a null species list that then computes an element
+value based on only material fraction contributions). The scalar per element
+value can either be an active variable or a scalar value (that is the value 1
+would give pure material fraction and/or specie value extraction).
+
+Both material and specie names are selected from the
+context-sensitive **Active Variables** list, which changes to the **Materials** list and
+**Species** list for their respective prompts.
+
+
+.. _MatToScalar:
+
+-------------
+MatToScalar()
+-------------
+
+
+**MatToScalar**
+
+``MatToScalar(any model parts, a material)``
+
+
+Computes a scalar per element variable whose value ``s`` is
+the specified material's value ``m`` of the element on the specified parts.
+This function is defined as::
+
+``s = m``
+
+where:
+
+.. list-table::
+ :widths: 10 90
+
+ * - ``s``
+ - scalar per element variable value of each element
+ * - ``m``
+ - corresponding material fraction value of each element
+
+
+
+This function only operates on model parts with
+predefined materials that are given by sparse mixed material definitions. Only
+one material can be converted into one per element scalar variable at a time.
+The material cannot be the null material.
+
+For more information on materials, see these topics in the *Ansys EnSight User Manual*:
+
+- `Material Interface Parts
+ `_
+- `Utility Programs `_,
+ which supplies information on the EnSight Case Gold Writer. See both "MATERIALS"
+ sections for file formats and the example material dataset.
+
+
+.. _Max:
+
+-----
+Max()
+-----
+
+
+**Max**
+
+``Max(any parts, scalar or (vector, component) [,Compute_Per_part])``
+
+
+Computes a constant or constant per part variable whose
+value is the maximum value of the scalar (or vector component) in the parts
+selected. The component is not requested if a scalar is selected.
+
+.. list-table::
+ :widths: 10 90
+
+ * - [component]
+ - if vector variable, magnitude is the default; otherwise specify [x], [y], or [z]
+
+
+.. _Min:
+
+-----
+Min()
+-----
+
+
+**Min**
+
+``Min(any parts, scalar or (vector, component) [,Compute_Per_part])``
+
+
+Computes a constant or constant per part variable whose
+value is the minimum value of the scalar (or vector component) in the parts
+selected.
+
+.. list-table::
+ :widths: 10 90
+
+ * - [component]
+ - if vector variable, magnitude is the default; otherwise specify [x], [y], or [z]
+
+
+
+.. _Moment:
+
+--------
+Moment()
+--------
+
+
+**Moment**
+
+``Moment(any parts, vector, component [,Compute_Per_part])``
+
+
+Computes a constant or constant per part variable (the
+moment about the cursor tool location) whose value is the x, y, or z component of
+Moment :math:`M`.
+
+:math:`{M}_{x}=\Sigma \left({F}_{y}{d}_{z}-{F}_{z}{d}_{y}\right)`
+
+
+:math:`{M}_{y}=\Sigma \left({F}_{z}{d}_{x}-{F}_{x}{d}_{z}\right)`
+
+
+:math:`{M}_{z}=\Sigma \left({F}_{x}{d}_{y}-{F}_{y}{d}_{x}\right)`
+
+where:
+
+.. list-table::
+ :widths: 20 80
+
+ * - :math:`{F}_{i}`
+ - force vector component in direction *i* of
+ vector :math:`F\left(x,y,z\right)=\left(\text{Fx,Fy,Fz}\right)`
+ * - :math:`{d}_{i}`
+ - signed moment arm (the perpendicular distance from the line of action of the vector
+ component :math:`{F}_{i}` to the moment axis, which is the current cursor tool position)
+
+
+.. list-table::
+ :widths: 20 80
+
+ * - vector
+ - any vector variable
+ * - component
+ - [X], [Y], or [Z]
+
+
+
+.. _MomentVector:
+
+--------------
+MomentVector()
+--------------
+
+
+**MomentVector**
+
+``MomentVector(any parts, force vector).``
+
+
+Computes a nodal vector variable (the moment is computed
+about each point of the selected parts) whose value is the x, y, or z
+component of Moment :math:`M`.
+
+:math:`{M}_{x}=\Sigma \left({F}_{y}{d}_{z}-{F}_{z}{d}_{y}\right)`
+
+
+:math:`{M}_{y}=\Sigma \left({F}_{z}{d}_{x}-{F}_{x}{d}_{z}\right)`
+
+
+:math:`{M}_{z}=\Sigma \left({F}_{x}{d}_{y}-{F}_{y}{d}_{x}\right)`
+
+where:
+
+.. list-table::
+ :widths: 20 80
+
+ * - :math:`{F}_{i}`
+ - force vector component in direction *i* of vector :math:`F\left(x,y,z\right)=\left(\text{Fx,Fy,Fz}\right)`
+ * - :math:`{d}_{i}`
+ - signed moment arm (the perpendicular distance from the line
+ of action of the vector component :math:`{F}_{i}` to the moment axis (model point position))
+
+
+
+.. list-table:: **Function arguments**
+ :widths: 20 80
+
+ * - force vector
+ - any vector variable (per node or per element)
+
+
+
+.. _Momentum:
+
+----------
+Momentum()
+----------
+
+
+**Momentum**
+
+``Momentum(any parts, velocity, density).``
+
+
+Computes a vector variable :math:`m`. This vector variable is defined as:
+
+:math:`m=\rho V`
+
+where:
+
+.. list-table::
+ :widths: 20 80
+
+ * - :math:`\rho`
+ - density
+ * - :math:`V`
+ - velocity
+
+
+
+.. list-table:: **Function arguments**
+ :widths: 20 80
+
+ * - velocity
+ - vector variable
+ * - density
+ - scalar, constant, constant per part variable, or constant number
+
+
+
+.. _NodeCount:
+
+-----------
+NodeCount()
+-----------
+
+
+**Node Count**
+
+``NodeCount(any parts [,Compute_Per_part])``
+
+
+Produces a constant or constant per part variable
+containing the node count of the parts specified.
+
+
+.. _NodeToElem:
+
+------------
+NodeToElem()
+------------
+
+
+**Node to Element**
+
+``NodeToElem(any parts, node-based scalar or vector)``
+
+
+Averages a node-based variable to produce an element-based variable.
+
+For each: ``elem[j]->val += node[i]->val | elem[j]``
+
+Results: ``elem[j]->val /= elem[j]->num_cell_nodes``
+
+where:
+
+.. list-table::
+ :widths: 20 80
+
+ * - j
+ - iterator on all part elements
+ * - i
+ - iterator on all part nodes
+
+ elem[j] indicates element that is associated with node[i]
+
+
+.. note::
+ ``elem[j]`` must contain ``node[i]`` to contribute.
+
+
+
+.. _Normal:
+
+--------
+Normal()
+--------
+
+
+**Normal**
+
+``Normal(2D parts or 1D planar parts)``
+
+
+Computes a vector variable that is the normal to the
+surface at each element for 2D parts, or for 1D planar parts, lies normal to
+the 1D elements in the plane of the part.
+
+
+.. _NormC:
+
+-------
+NormC()
+-------
+
+
+**Normal Constraints**
+
+``NormC(2D or 3D parts, pressure, velocity, viscosity [,Compute_Per_part])``
+
+
+Computes a constant or constant per part variable whose
+value is the normal constraints :math:`NC`. It is defined as:
+
+:math:`NC={\displaystyle \underset{S}{\int }\left(-p+\mu \frac{\partial V}{\partial n}\widehat{n}\right)}dS`
+
+where:
+
+.. list-table::
+ :widths: 20 80
+
+ * - :math:`p`
+ - pressure
+ * - :math:`V`
+ - velocity
+ * - :math:`\mu`
+ - dynamic viscosity
+ * - :math:`n`
+ - direction of normal
+ * - :math:`S`
+ - border of a 2D or 3D domain
+
+
+.. list-table:: **Function arguments**
+ :widths: 20 80
+
+ * - pressure
+ - scalar variable
+ * - velocity
+ - vector variable
+ * - viscosity
+ - scalar, constant, or constant per part variable, or constant number
+
+
+
+.. _NormVect:
+
+----------
+NormVect()
+----------
+
+
+**Normalize Vector**
+
+``NormVect(any parts, vector)``
+
+
+Computes a vector variable whose value is the unit vector
+:math:`U` of the given vector :math:`V` .
+
+:math:`U=\frac{V\left({V}_{x},{V}_{y},{V}_{z}\right)}{\Vert V\Vert }`
+
+
+.. list-table::
+ :widths: 50 50
+
+ * - :math:`V`
+ - vector variable field
+ * - :math:`\Vert V\Vert`
+ - :math:`\sqrt{{V}_{x}^{2}+{V}_{y}^{2}+{V}_{z}^{2}}`
+
+
+
+
+.. _OffsetField:
+
+-------------
+OffsetField()
+-------------
+
+
+**Normalize Vector**
+
+``OffsetField(2D or 3D parts)``
+
+
+Computes a scalar field of offset values. The values
+are in model distance units perpendicular to the boundary of the part.
+
+.. note::
+ An isosurface created in this field would mimic the part boundary, but at the offset
+ distance into the field.
+
+ This function is not supported for Server of Server (SOS)
+ decomposition because SOS was designed to benefit from independent
+ server computations in parallel. The interdependent computational
+ mapping of the field results from the fluid part onto the boundary part
+ violates this assumption. In other words, because you cannot be sure that you
+ have all of the fluid information on one server for the mapping, this
+ function is disabled.
+
+
+
+.. _OffsetVar:
+
+-----------
+OffsetVar()
+-----------
+
+
+**Offset Variable**
+
+``OffsetVar(2D or 3D parts, scalar or vector, constant offset value)``
+
+
+Computes a scalar (or vector) variable defined as the
+offset value into the field of that variable that exists in the normal direction
+from the boundary of the selected part. This assigns near surface values of a
+variable to the surface of the selected parts from the neighboring 3D field,
+which is found automatically using the selected parts surfaces.
+
+In other words, this function gets the value of a
+variable from surrounding fields, a fixed distance from the surface of the
+selected parts, and assigns it to the surface of the selected part. For
+example, you might use this function to get the value of the velocity in the
+flow field a slight distance above your vehicle surface and assign that value to
+your vehicle surface.
+
+To use this function, select the parts in the part list
+that you want to use and enter both a variable and an offset. EnSight automatically
+detects the 3D field parts adjacent to the surfaces of your selected parts and reaches
+into these fields by your offset in the normal direction to obtain the variable value
+and then assign it to the surface of your selected parts.
+
+.. list-table:: **Function arguments**
+ :widths: 20 80
+
+ * - constant offset value
+ - constant number (constant variable is not valid)
+
+.. note::
+ Choose a negative offset if your normals do not point into the field.
+
+ This function is not supported for Server of Server (SOS)
+ decomposition because SOS was designed to benefit from independent server
+ computations in parallel. Recall that EnSight must find the field adjacent
+ to the surfaces of your selected parts. Because some of these fields might be
+ on other servers, creating dependencies that preclude independent
+ servers, this function is disabled.
+
+
+.. _PartNumber:
+
+------------
+PartNumber()
+------------
+
+
+**Part Number**
+
+``PartNumber(any parts [,Compute_Per_part])``
+
+
+Computes a constant per part variable that is the GUI
+part number if the part is a server-side part. If computed as ``Compute_Per_case``,
+the value is the maximum part number.
+
+
+.. note::
+ Any client-side part (for example, a vector arrow, particle trace, or profile)
+ is assigned the ``Undefined`` value. Model parts are always server-side parts.
+
+
+
+.. _Pres:
+
+------
+Pres()
+------
+
+
+**Pressure**
+
+``Pres(any parts,density, total energy, velocity, ratio of specific heats)``
+
+
+Computes a scalar variable whose value is the pressure
+:math:`p`. This scalar variable is defined as:
+
+:math:`p=\left(\gamma -1\right)\rho \left(\frac{E}{\rho }-\frac{1}{2}{V}^{2}\right)`
+
+
+.. list-table::
+ :widths: 20 80
+
+ * - :math:`m`
+ - momentum
+ * - :math:`E`
+ - total energy
+ * - :math:`\rho`
+ - density
+ * - :math:`V`
+ - velocity :math:`=m/\rho`
+ * - :math:`\gamma`
+ - ratio of specific heats (1.4 for air)
+
+
+.. list-table:: **Function arguments**
+ :widths: 20 80
+
+ * - density
+ - scalar, constant, or constant per part variable, or constant number
+ * - total energy
+ - scalar variable
+ * - velocity
+ - vector variable
+ * - ratio of specific heats
+ - scalar, constant, or constant per part variable, or constant number
+
+
+.. _PresCoef:
+
+----------
+PresCoef()
+----------
+
+
+**Pressure Coefficient**
+
+``PresCoef(any parts, density, total energy, velocity, ratio
+of specific heats, freestream density, freestream speed of sound, freestream
+velocity magnitude)``
+
+
+Computes a scalar variable that is the pressure coefficient
+:math:`{C}_{p}`. This scalar variable is defined as:
+
+:math:`{C}_{p}=\frac{p-{p}_{i}}{\frac{{\rho }_{i}{V}_{i}^{2}}{2}}`
+
+
+.. list-table::
+ :widths: 20 80
+
+ * - :math:`p`
+ - pressure
+ * - :math:`{p}_{i}`
+ - freestream pressure
+ * - :math:`{\rho }_{i}`
+ - freestream density
+ * - :math:`{V}_{i}`
+ - freestream velocity magnitude
+
+
+
+.. list-table:: **Function arguments**
+ :widths: 30 70
+
+ * - density
+ - scalar, constant, constant per part variable, or constant number
+ * - total energy
+ - scalar variable
+ * - velocity
+ - vector variable
+ * - ratio of specific heats
+ - scalar, constant, or constant per part variable, or constant number
+ * - freestream density
+ - constant or constant per part variable or constant number
+ * - freestream speed of sound
+ - constant or constant per part variable or constant number
+ * - freestream velocity magnitude
+ - constant or constant per part variable or constant number
+
+
+
+
+.. _PresDynam:
+
+-----------
+PresDynam()
+-----------
+
+
+**Dynamic Pressure**
+
+``PresDynam(any parts, density, velocity)``
+
+
+Computes a scalar variable that is the dynamic pressure :math:`q`. This
+scalar variable is defined as::
+
+:math:`q=\frac{\rho {V}^{2}}{2}`
+
+
+.. list-table::
+ :widths: 20 80
+
+ * - :math:`\rho`
+ - density
+ * - :math:`V`
+ - velocity magnitude
+
+
+See :ref:`KinEn() `.
+
+.. list-table:: **Function arguments**
+ :widths: 20 80
+
+ * - density
+ - scalar, constant, or constant per part variable, or constant number
+ * - velocity
+ - vector variable
+
+
+.. _PresNorm:
+
+----------
+PresNorm()
+----------
+
+
+**Normalized Pressure**
+
+``PresNorm(any parts, density, velocity, ratio of specific
+heats, freestream density, freestream speed of sound)``
+
+
+Computes a scalar variable that is the normalized pressure :math:`{p}_{n}`.
+This scalar variable is defined as:
+
+:math:`{p}_{n}=p/{p}_{i}`
+
+where:
+
+.. list-table::
+ :widths: 20 80
+
+ * - :math:`{p}_{i}`
+ - freestream pressure = :math:`1/\gamma`
+ * - :math:`\gamma`
+ - ratio of specific heats
+ * - :math:`p`
+ - pressure
+
+
+.. list-table:: **Function arguments**
+ :widths: 30 70
+
+ * - density
+ - scalar, constant, or constant per part
+ variable, or constant number
+ * - total energy
+ - scalar variable
+ * - velocity
+ - vector variable
+ * - ratio of specific heats
+ - scalar, constant, or constant per part
+ variable, or constant number
+ * - freestream density
+ - constant or constant per part variable or
+ constant number
+ * - freestream speed of sound
+ - constant or constant per part variable or
+ constant number
+
+
+.. _PresLogNorm:
+
+-------------
+PresLogNorm()
+-------------
+
+
+**Log of Normalized Pressure**
+
+``PresLogNorm(any parts, density, total energy, velocity,
+ratio of specific heats, freestream density, freestream speed of sound)``
+
+
+Computes a scalar variable that is the natural log of
+normalized pressure :math:`\mathrm{ln}{p}_{n}`. This scalar variable
+is defined as:
+
+:math:`\mathrm{ln}{p}_{n}=\mathrm{ln}\left(p/{p}_{i}\right)`
+
+where:
+
+.. list-table::
+ :widths: 20 80
+
+ * - :math:`{p}_{i}`
+ - freestream pressure = :math:`1/\gamma`
+ * - :math:`\gamma`
+ - ratio of specific heats
+ * - :math:`p`
+ - pressure
+
+
+
+.. list-table:: **Function arguments**
+ :widths: 30 70
+
+ * - density
+ - scalar, constant, or constant per part
+ variable, or constant number
+ * - total energy
+ - scalar variable
+ * - velocity
+ - vector variable
+ * - ratio of specific heats
+ - scalar, constant, or constant per part
+ variable, or constant number
+ * - freestream density
+ - constant or constant per part variable or
+ constant number
+ * - freestream speed of sound
+ - constant or constant per part variable or
+ constant number
+
+
+.. _PresStag:
+
+----------
+PresStag()
+----------
+
+
+**Stagnation Pressure**
+
+``PresStag(any parts, density, total energy, velocity, ratio
+of specific heats)``
+
+
+Computes a scalar variable that is the stagnation
+pressure :math:`{p}_{o}`. This scalar variable is defined as:
+
+:math:`{p}_{o}=p{\left(1+\left(\frac{\gamma -1}{2}\right){M}^{2}\right)}^{\left(\gamma /\left(\gamma -1\right)\right)}`
+
+where:
+
+.. list-table::
+ :widths: 20 80
+
+ * - :math:`p`
+ - pressure
+ * - :math:`\gamma`
+ - ratio of specific heats
+ * - :math:`M`
+ - mach number
+
+
+.. note::
+ In literature, stagnation pressure is used interchangeably with total pressure.
+ The stagnation pressure (or total pressure) uses two different equations, depending
+ upon the flow regime: compressible or incompressible. EnSight has chosen to define
+ stagnation pressure using the preceding compressible flow equation and total pressure
+ using the incompressible flow equation. See :ref:`PressT() `.
+
+
+.. list-table:: **Function arguments**
+ :widths: 30 70
+
+ * - density
+ - scalar, constant, or constant per part
+ variable, or constant number
+ * - total energy
+ - scalar variable
+ * - velocity
+ - vector variable
+ * - ratio of specific heats
+ - scalar, constant, or constant per part
+ variable, or constant number
+
+
+
+.. _PresNormStag:
+
+--------------
+PresNormStag()
+--------------
+
+
+**Normalized Stagnation Pressure**
+
+``PresNormStag(any parts, density, total energy, velocity,
+ratio of specific heats, freestream density, freestream speed of sound,
+freestream velocity magnitude)``
+
+
+Computes a scalar variable that is the normalized
+stagnation pressure :math:`{p}_{on}`. This scalar variable
+is defined as:
+
+:math:`{p}_{on}=\left({p}_{o}/{p}_{oi}\right)`
+
+where:
+
+.. list-table::
+ :widths: 20 80
+
+ * - :math:`{p}_{o}`
+ - stagnation pressure
+ * - :math:`{p}_{oi}`
+ - freestream stagnation pressure
+
+
+.. list-table:: **Function arguments**
+ :widths: 30 70
+
+ * - density
+ - scalar, constant, or constant per part
+ variable, or constant number
+ * - total energy
+ - scalar variable
+ * - velocity
+ - vector variable
+ * - ratio of specific heats
+ - scalar, constant, or constant per part
+ variable, or constant number
+ * - freestream density
+ - constant or constant per part variable or
+ constant number
+ * - freestream speed of sound
+ - constant or constant per part variable or
+ constant number
+ * - freestream velocity magnitude
+ - constant or constant per part variable or
+ constant number
+
+
+
+.. _PresStagCoef:
+
+--------------
+PresStagCoef()
+--------------
+
+
+**Stagnation Pressure Coefficient**
+
+``PresStagCoef(any parts, density, total energy, velocity,
+ratio of specific heats, freestream density, freestream speed of sound,
+freestream velocity magnitude)``
+
+
+Computes a scalar variable that is the stagnation pressure
+coefficient :math:`{C}_{{p}_{o}}`. This scalar variable is
+defined as:
+
+:math:`{C}_{{p}_{o}}=\left({p}_{o}-{p}_{i}\right)/\left(\frac{{\rho }_{i}{V}^{2}}{2}\right)`
+
+where:
+
+.. list-table::
+ :widths: 20 80
+
+ * - :math:`{p}_{o}`
+ - stagnation pressure
+ * - :math:`{p}_{i}`
+ - freestream pressure = :math:`1/\gamma`
+ * - :math:`\gamma`
+ - ratio of specific heats
+ * - :math:`{\rho }_{i}`
+ - freestream density
+ * - :math:`V`
+ - velocity magnitude
+
+
+
+.. list-table:: **Function arguments**
+ :widths: 30 70
+
+ * - density
+ - scalar, constant, or constant per part
+ variable, or constant number
+ * - total energy
+ - scalar variable
+ * - velocity
+ - vector variable
+ * - ratio of specific heats
+ - scalar, constant, or constant per part
+ variable, or constant number
+ * - freestream density
+ - constant or constant per part variable or
+ constant number
+ * - freestream speed of sound
+ - constant or constant per part variable or
+ constant number
+ * - freestream velocity magnitude
+ - constant or constant per part variable or
+ constant number
+
+
+
+.. _PresPitot:
+
+-----------
+PresPitot()
+-----------
+
+
+**Pitot Pressure**
+
+``PresPitot(any parts, density, total energy, velocity,
+ratio of specific heats)``
+
+
+Computes a scalar variable that is the pitot pressure
+:math:`{p}_{p}`. This scalar variable is defined as:
+
+:math:`\begin{array}{ll}{p}_{p}\hfill & =sp\hfill \\ s\hfill & =\frac{{\left(\left(\frac{\gamma +1}{2}\right)\left(\frac{{V}^{2}}{\gamma \left(\gamma -1\right)\left(\frac{E}{\rho }-\frac{{V}^{2}}{2}\right)}\right)\right)}^{\left(\gamma /\left(\gamma -1\right)\right)}}{{\left(\left(\frac{2\gamma }{\gamma +1}\right)\left(\frac{{V}^{2}}{\gamma \left(\gamma -1\right)\left(\frac{E}{\rho }-\frac{{V}^{2}}{2}\right)}\right)-\left(\frac{\gamma -1}{\gamma +1}\right)\right)}^{\left(\gamma /\left(\gamma -1\right)\right)}}\hfill \end{array}`
+
+where:
+
+.. list-table::
+ :widths: 20 80
+
+ * - :math:`\gamma`
+ - ratio of specific heats
+ * - :math:`E`
+ - total energy per unit volume
+ * - :math:`\rho`
+ - density
+ * - :math:`V`
+ - velocity magnitude
+ * - :math:`p`
+ - pressure
+
+
+.. list-table:: **Function arguments**
+ :widths: 30 70
+
+ * - density
+ - scalar, constant, or constant per part
+ variable, or constant number
+ * - total energy
+ - scalar variable
+ * - velocity
+ - vector variable
+ * - ratio of specific heats
+ - scalar, constant, or constant per part
+ variable, or constant number
+
+
+.. note::
+ For mach numbers less than 1.0, the pitot pressure is the same as the
+ stagnation pressure. For mach numbers greater than or equal to 1.0, the
+ pitot pressure is equivalent to the stagnation pressure behind a normal
+ shock.
+
+
+
+.. _PresPitotRatio:
+
+----------------
+PresPitotRatio()
+----------------
+
+
+**Pitot Pressure Ratio**
+
+``PresPitotRatio(any parts, density, total energy, velocity,
+ratio of specific heats, freestream density, freestream speed of sound)``
+
+
+Computes a scalar variable that is the pitot pressure ratio
+:math:`{p}_{pr}`. This scalar variable is defined as:
+
+:math:`{p}_{pr}=s\left(\gamma -1\right)\left(E-\frac{\rho {V}^{2}}{2}\right)`
+
+where:
+
+.. list-table::
+ :widths: 20 80
+
+ * - :math:`s`
+ - (as defined in :ref:`PresPitot() `)
+ * - :math:`\gamma`
+ - ratio of specific heats
+ * - :math:`E`
+ - total energy per unit volume
+ * - :math:`\rho`
+ - density
+ * - :math:`V`
+ - velocity magnitude
+
+
+
+.. list-table:: **Function arguments**
+ :widths: 30 70
+
+ * - density
+ - scalar, constant, or constant per part
+ variable, or constant number
+ * - total energy
+ - scalar variable
+ * - velocity
+ - vector variable
+ * - ratio of specific heats
+ - scalar, constant, or constant per part
+ variable, or constant number
+ * - freestream density
+ - constant or constant per part variable or
+ constant number
+ * - freestream speed of sound
+ - constant or constant per part variable or
+ constant number
+
+
+
+.. _PresT:
+
+-------
+PresT()
+-------
+
+
+**Total Pressure**
+
+``PresT(any parts, pressure, velocity, density)``
+
+
+Computes a scalar variable whose value is the total
+pressure :math:`{p}_{t}`. This scalar variable is defined as:
+
+:math:`{p}_{t}=p+\rho \frac{{V}^{2}}{2}`
+
+where:
+
+.. list-table::
+ :widths: 20 80
+
+ * - :math:`\rho`
+ - density
+ * - :math:`V`
+ - velocity
+ * - :math:`p`
+ - pressure
+
+
+.. note::
+ In literature, total pressure is used interchangeably with stagnation
+ pressure. The total pressure (or stagnation pressure) use two different
+ equations, depending upon the flow regime: incompressible or compressible.
+ EnSight has chosen to define total pressure using the preceding incompressible flow
+ equation and stagnation pressure using the compressible flow
+ equation. See :ref:`PreStag() `.
+
+
+.. list-table:: **Function arguments**
+ :widths: 30 70
+
+ * - pressure
+ - scalar variable
+ * - velocity
+ - vector variable
+ * - density
+ - scalar, constant, or constant per part
+ variable, or constant number
+
+
+
+.. _Q_criteria:
+
+------------
+Q_criteria()
+------------
+
+
+**Q_criteria**
+
+``Q_criteria(any parts, Grad_Vel_x, Grad_Vel_y, Grad_Vel_z)``
+
+
+Computes a scalar variable that is the second
+invariant, or Q-criterion, of the velocity gradient tensor. Vortex shells may
+then be visualized as an iso-surface of Q-criterion > 0. Here is how inputs
+to this function are calculated:
+
+First calculate the three components of velocity:
+
+Vel_x = Velocity[X] = x-component of the velocity vector
+
+Vel_y = Velocity[Y] = y-component of the velocity vector
+
+Vel_z = Velocity[Z] = z-component of the velocity vector
+
+Then calculate the gradients using the intermediate variable:
+
+Grad_Vel_x = Grad(any parts, Vel_x) = gradient of x component Velocity
+
+Grad_Vel_y = Grad(any parts, Vel_y) = gradient of y component Velocity
+
+Grad_Vel_z = Grad(any parts, Vel_z) = gradient of z component Velocity
+
+with:
+
+Velocity = velocity vector variable
+
+.. note::
+ A common mistake is to try to calculate the gradient
+ from the component of the velocity without using the intermediate ``Vel_x``,
+ ``Vel_y``, and ``Vel_z`` variables. For example, the following calculation is wrong.
+ It uses only the velocity magnitude.
+
+ Grad_Vel_x = Grad(any parts, Velocity[X])
+
+ This is a *User-Defined Math Function (UDMF)*, which you may modify and
+ recompile. For more information, see the *EnSight Interface Manual*.
+
+
+.. admonition:: Algorithm: Q_criteria
+
+ The three gradient vectors of the components of the
+ velocity vector constitute the velocity gradient tensor. Using the nine components
+ of this (antisymmetric) velocity gradient tensor, ``Lv``, construct both the
+ symmetric, :math:`S` , and antisymmetric, :math:`\Omega`, parts of the
+ velocity gradient tensor, the :math:`Q` criteria is established as follows.
+
+ :math:`\nabla \nu =S+\Omega`
+
+ where
+
+ :math:`S=\frac{1}{2}\left[\nabla \nu +{\left(\nabla \nu \right)}^{T}\right]`
+
+
+ :math:`\Omega =\frac{1}{2}\left[\nabla \nu -{\left(\nabla \nu \right)}^{T}\right]`
+
+ solving for :math:`Q` (hence :math:`Q` criteria) when
+
+ :math:`Q=\frac{1}{2}\left[{\left|\Omega \right|}^{2}-{\left|S\right|}^{2}\right]`
+
+ which (in terms of EnSight variables) reduces to:
+
+ .. code-block::
+
+ Q = - 0.5 * ( Grad_Vel_x[X] * Grad_vel_x[X] + Grad_Vel_y[Y] * Grad_vel_y[Y] + Grad_Vel_z[Z] * Grad_Vel_z[Z] +
+ 2 * (Grad_Vel_x[Y] * Grad_Vel_y[X] + Grad_Vel_x[Z] * Grad_Vel_z[X] + Grad_Vel_y[Z] * Grad_Vel_z[Y])) > 0
+
+
+ Now, to find the vortices, create an isosurface where Q
+ is positive (Q > 0). This is because an isosurface with positive Q isolates
+ areas where the strength of the rotation overcomes the strain, thus making those
+ surfaces eligible as vortex envelopes.
+
+ See also the :ref:`Lambda2() ` function.
+
+**References**
+
+For more information, see these references:
+
+1. Dubief, Y and Delcayre, F., "On coherent-vortex
+ identification in turbulence", Journal of Turbulence, (jot.iop.org) 1
+ (2000) 11, pp.1-22.
+2. Haller, G., "An objective definition of a vortex," Journal of
+ Fluid Mechanics, 2005, vol. 525, pp. 1-26.
+3. Jeong, J. and Hussain, F., "On the identification of a
+ vortex," Journal of Fluid Mechanics, 1995, vol. 285, pp. 69-94.
+
+
+.. _Radiograph_grid:
+
+-----------------
+Radiograph_grid()
+-----------------
+
+
+**Radiograph_grid**
+
+``Radiograph_grid(1D or 2D parts, dir X, dir Y, dir Z, num_points, variable, [component])``
+
+
+Computes a per element scalar variable on the designated
+1D or 2D parts, that is a directional integration from these parts of a scalar
+variable or vector component through the model.
+
+Think of rays being cast from the center of each element of the 1D or 2D parents
+in the direction specified (and long enough to extend through the model). Along
+each ray, the desired variable is integrated and the integral value is assigned
+to the element from which the ray was cast. This function integrates the ray in
+a constant delta, grid-like fashion. You control the delta by the number of points that is
+specified in the integration direction.
+
+.. note::
+ While this function is not generally as time-consuming as the
+ :ref:`Radiograph_mesh() ` function (and you have
+ some resolution control with the ``num_points`` argument), it
+ can still take some computation time. You might want to set the Abort server
+ operations performance preference to avoid being stuck in a computation
+ loop that exceeds your patience.
+
+
+.. list-table:: **Function arguments**
+ :widths: 20 80
+
+ * - dir X
+ - constant number: Integration direction vector x component
+ * - dir Y
+ - constant number: Integration direction vector y component
+ * - dir Z
+ - constant number: Integration direction vector z component
+ * - num_points
+ - constant number: Number of points along ray in the integration direction
+ (The integration delta is the ray length divided by the number of
+ points.)
+ * - variable
+ - Variable that is integrated along the ray
+ * - component
+ - If the variable is a vector [X], [Y], [Z], or [] for magnitude
+
+
+.. note::
+ This function does not work properly for Server of Servers (SOS) mode.
+ Each portion only gives its local value.
+
+
+
+.. _Radiograph_mesh:
+
+-----------------
+Radiograph_mesh()
+-----------------
+
+
+**Radiograph_mesh**
+
+``Radiograph_mesh(1D or 2D parts, dir X, dir Y, dir Z, variable, [component])``
+
+
+Computes a per element scalar variable on the designated
+1D or 2D parts, that is a directional integration from these parts of a scalar
+variable or vector component through the model. Think of rays being cast from
+the center of each element of the 1D or 2D parents in the direction specified
+(and long enough to extend through the model). Along each ray the desired
+variable is integrated and the integral value is assigned to the element from
+which the ray was cast. This function integrates the ray at each domain element
+face intersection.
+
+.. note::
+ Running this function can be a very time-consuming process.
+ You might want to set the **Abort server operations** performance preference
+ in EnSight to avoid being stuck in a computation loop that exceeds your patience.
+ The :ref:`Radiograph_grid() ` function is generally much quicker.
+
+
+.. list-table:: **Function arguments**
+ :widths: 20 80
+
+ * - dir X
+ - constant number: Integration direction vector x component
+ * - dir Y
+ - constant number: Integration direction vector y component
+ * - dir Z
+ - constant number: Integration direction vector z component
+ * - variable
+ - Variable that is integrated along the ray.
+ * - component
+ - If the variable is a vector [X], [Y], [Z], or [] for magnitude
+
+
+.. note::
+ This function does not work properly for Server of Servers (SOS) mode.
+ Each portion only gives its local value.
+
+
+.. _RectToCyl:
+
+-----------
+RectToCyl()
+-----------
+
+
+**Rectangular To Cylindrical Vector**
+
+``RectToCyl(any parts, vector)``
+
+
+Produces a vector variable with cylindrical components
+according to frame 0.
+
+(Intended for calculation purposes)
+
+x = radial component
+y = tangential component
+z = z component
+
+
+.. _ServerNumber:
+
+--------------
+ServerNumber()
+--------------
+
+
+**Server Number**
+
+``ServerNumber(any parts)``
+
+
+Produces a per-element scalar variable that is the
+server number containing the element. This function is useful for decomposed models using
+Server of Servers (SOS) mode so that the distribution can be visualized.
+
+
+.. _ShockPlot3d:
+
+-------------
+ShockPlot3d()
+-------------
+
+
+**Shock Plot3d**
+
+``ShockPlot3d(2D or 3D parts, density, total energy,
+velocity, ratio of specific heats)``
+
+
+Computes a scalar variable ShockPlot3d whose value is:
+
+:math:`ShockPlot3d=\frac{V}{c}·\frac{grad(p)}{\left|grad(p)\right|}`
+
+where:
+
+.. list-table::
+ :widths: 20 80
+
+ * - :math:`V`
+ - velocity
+ * - :math:`c`
+ - speed of sound
+ * - :math:`p`
+ - pressure
+ * - :math:`grad(p)`
+ - gradient of pressure
+
+
+
+.. list-table:: **Function arguments**
+ :widths: 30 70
+
+ * - density
+ - scalar, constant, or constant per part
+ variable, or constant number
+ * - total energy
+ - scalar variable
+ * - velocity
+ - vector variable
+ * - ratio of specific heats
+ - scalar, constant, or constant per part
+ variable, or constant number.
+
+
+
+To compute candidate shock surfaces, create an
+isosurface of the calculated variable, ``shockplot3d = 1.0``.
+These shock regions can be verified by overlaying them with :math:`Mach\ge 1.0`
+
+Also consider comparing with the shock region/surface
+feature visualization.
+
+
+.. _SmoothMesh:
+
+------------
+SmoothMesh()
+------------
+
+
+**Mesh Smoothing**
+
+``SmoothMesh(any 1D or 2D parts, number of passes, weight)``
+
+
+Performs a mesh "smoothing" operation. This function
+returns a vector variable which, when applied to the mesh as a displacement,
+results in a "smoother" mesh representation. The function computes new node
+locations resulting from a "normalization" of the mesh elements.
+
+The result of this function tends to be a mesh with equal-sized elements.
+The algorithm applies a form of convolution to the mesh edges repeatedly
+(number of passes) using a weighting factor to control how much change
+in position is allowed in each pass. In most cases, the weight is supplied
+as a constant, but the weight can be specified as a nodal scalar array.
+This allows for local control over the region of the mesh to be smoothed.
+The algorithm is fully threaded.
+
+
+.. note::
+ Nodes on the outer boundary of a mesh (or are bounded by ghost elements) are not
+ allowed to move. A good set of initial parameters might be 50 passes with a
+ weight constant of 0.05.
+
+
+For each pass, the following formula is applied:
+
+:math:`{x}_{i+1}={x}_{i}+w{\displaystyle \sum _{j=0}^{n}\left({x}_{j}-{x}_{i}\right)}`
+
+where
+
+:math:`x` = nodal position at pass (i)
+
+:math:`w` = nodal weight
+
+:math:`n` = edge connected nodes
+
+.. list-table:: **Function arguments**
+ :widths: 20 80
+
+ * - number of passes
+ - number of smoothing passes to be applied:
+ constant
+
+ * - weight
+ - fraction of the length of a node's edges that a
+ node is allowed to move with each pass: nodal scalar variable or constant
+
+
+
+.. _SOSConstant:
+
+-------------
+SOSConstant()
+-------------
+
+
+**SOS Constant**
+
+``SOSConstant(any parts, variable, reduction operation (0-3))``
+
+
+.. note::
+ Generally this function should not be necessary. The :ref:`SOSConstant() `
+ function has been pulled into the server/SOS infrastructure. However, this function
+ remains for backward compatibility.
+
+Computes a constant variable whose value is the result of applying a reduction operation
+on that constant variable over the values on each of the servers. If there is no SOS involved
+or only a single server, the result is the same as the constant variable value on the single
+server.
+
+The selected part is used to select the case from which the constant variable is
+used. The constant variable itself is specified (from the dataset or a computed
+value). The operation to perform is selected as an integer from ``0`` to ``3``:
+
+- ``0``: A simple summation of the values from each of the servers.
+- ``1``: An average of the values from the servers. (The weight given to each server
+ in the average is the same, so this is essentially the sum operation divided by the number of servers.)
+- ``2``: The minimum of the values on each of the servers.
+- ``3``: The maximum of the values on each of the servers.
+
+
+.. list-table:: **Function arguments**
+ :widths: 30 70
+
+ * - variable
+ - constant variable (from the data or computed)
+
+ * - reduction operation
+ - value from 0 to 3 that selects from the following operations:
+
+ 0=sum 1=average 2=minimum 3=maximum
+
+
+
+.. _SpaMean:
+
+---------
+SpaMean()
+---------
+
+
+**Spatial Mean**
+
+``SpaMean(any parts, scalar or (vector, component) [,Compute_Per_part])``
+
+
+Computes a constant or constant per part variable whose
+value is the volume (or area or length) weighted mean value of a scalar (or
+vector component) at the current time. This value can change with time. The
+component is not requested if a scalar variable is used.
+
+The spatial mean is computed by summing the product of
+the volume (3D, area 2D, or length 1D) of each element by the value of the
+scalar (or vector component) taken at the centroid of the element (nodal
+variables are interpolated at each cell centroid using cell shape blending or
+metric functions) for each element over the entire part. The final sum is then
+divided by the total volume (or area) of the part.
+
+:math:`\text{Spatial Mean}=\frac{{\displaystyle \sum {s}_{i}vo{l}_{i}}}{{\displaystyle \sum vo{l}_{i}}}`
+
+where:
+
+:math:`{s}_{i}` = scalar taken at centroid of element i
+
+:math:`vo{l}_{i}` = volume (or area or length) of element i
+
+.. list-table:: **Function arguments**
+ :widths: 20 80
+
+ * - [component]
+ - if vector variable, magnitude is the default, or specify [x], [y], or [z]
+
+
+
+.. _SpaMeanWeighted:
+
+-----------------
+SpaMeanWeighted()
+-----------------
+
+
+**Spatial Mean Weighted**
+
+``SpaMeanWeighted(any parts, scalar or (vector, component), weight, component [,Compute_Per_part])``
+
+
+Computes a constant or constant per part variable whose value is
+weighted both by the volume (or area or length) and a weighting variable.
+This value can change with time. For both the variable
+itself and the weighting variable, the component is not requested if a scalar
+variable is used.
+
+The weighted spatial mean is computed by summing the
+product of the volume (3D, area 2D, or length if 1D) of each element by the
+value of the scalar (or vector component) taken at the centroid of the element
+(nodal variables are interpolated at each cell centroid using cell shape
+blending or metric functions) with the product of the weighting scalar/vector
+component taken at the centroid of the element (again, if a nodal variable,
+similarly evaluated at the element centroid) for each element over the entire
+part. The final sum is then divided by the total scalar/vector weighted (again
+if a nodal weighting variable is similarly evaluated at the element centroid)
+volume (or area or length) of the part as follows:
+
+:math:`\text{Spatial Mean Weighted}=\frac{{\displaystyle \sum {w}_{i}{s}_{i}vo{l}_{i}}}{{\displaystyle \sum {w}_{i}vo{l}_{i}}}`
+
+where:
+
+:math:`{s}_{i}` = scalar or vector component taken at centroid of element i
+
+:math:`{w}_{i}` = scalar or vector component taken at centroid of element i
+
+:math:`vo{l}_{i}` = volume (or area or length) of element i
+
+.. list-table:: **Function arguments**
+ :widths: 20 80
+
+ * - [component]
+ - if vector variable, magnitude is the default,
+ or specify [x], [y], or [z]
+
+
+
+.. _Speed:
+
+-------
+Speed()
+-------
+
+
+**Speed**
+
+``Speed(any parts, velocity)``
+
+
+Computes a scalar variable whose value is the speed. This function
+is defined as:
+
+:math:`speed=\sqrt{{u}^{2}+{v}^{2}+{w}^{2}}`
+
+where:
+
+:math:`u,v,w` = velocity components in the :math:`x,y,z` directions.
+
+.. list-table:: **Function arguments**
+ :widths: 20 80
+
+ * - velocity
+ - vector variable
+
+
+
+.. _SonicSpeed:
+
+------------
+SonicSpeed()
+------------
+
+
+**Sonic Speed**
+
+``SonicSpeed(any parts, density, total energy, velocity, ratio of specific heats).``
+
+
+Computes a scalar variable :math:`c` whose value is:
+
+:math:`c=\sqrt{\frac{\gamma p}{\rho }}`
+
+where:
+
+:math:`\gamma` = ratio of specific heats
+
+:math:`\rho` = density
+
+:math:`p` = pressure
+
+.. list-table:: **Function arguments**
+ :widths: 30 70
+
+ * - density
+ - scalar, constant, or constant per part variable, or constant number
+ * - total energy
+ - scalar variable
+ * - velocity
+ - vector variable
+ * - ratio of specific heats
+ - scalar, constant, or constant per part variable, or constant number
+
+
+
+.. _StatMoment:
+
+------------
+StatMoment()
+------------
+
+
+**Statistics Moments**
+
+``StatMoment(any parts, v, function [,Compute_Per_part])``
+
+
+Computes a constant or constant per part that is the
+sum, mean, variance, skew, or kurtosis by applying a selected statistical
+function over all of the nodes or elements of the selected parts, given the
+selected scalar or constant variable. Five functions are defined:
+
+:math:`sum={\displaystyle \sum _{i=1}^{N}{\nu }_{i}}`
+
+
+:math:`mean=\frac{1}{N}{\displaystyle \sum _{i=1}^{N}{\nu }_{i}}`
+
+
+:math:`\mathrm{var}=\frac{1}{N-1}{\displaystyle \sum _{i=1}^{N}{\left({\nu }_{i}-mean\right)}^{2}}`
+
+
+:math:`skew=\frac{1}{N}{\displaystyle \sum _{i=1}^{N}{\left(\frac{{\nu }_{i}-mean}{\sqrt{\mathrm{var}}}\right)}^{3}}`
+
+
+:math:`kurt=\left\{\frac{1}{N}{\displaystyle \sum _{i=1}^{N}{\left(\frac{{\nu }_{i}-mean}{\sqrt{\mathrm{var}}}\right)}^{4}}\right\}-3`
+
+- The ``mean`` is the simple average (unweighted, arithmetic mean) of all the
+ samples.
+- The ``var`` is the variance, which is an indication of the spread of a
+ sample of numbers out from the mean. It is the square of the standard
+ deviation.
+- The ``skew`` is an indication of the degree of asymmetry about the mean. A
+ positive skew indicates an asymmetric tail toward more positive values.
+ A negative skew indicates an asymmetric tail toward more negative values.
+- The ``kurt`` is the kurtosis, which is an indication of the peakness or
+ flatness of the distribution compared to a normal distribution. A positive
+ kurtosis indicates more peakness. A negative kurtosis indicates a more flat
+ distribution.
+
+If the variable (``v``) is a constant, the operation is computed as if the
+variable was a nodal variable with the given value at all nodes. If the
+computation is over an element variable, the size of the element is not
+used in the computation. If volume or area weighting is desired, the
+variable must be pre-weighted.
+
+.. note::
+
+ ``StatMoment(plist,scalar,0)`` should be used in place of the
+ example user-defined math function, ``udmf_sum``, because the
+ :ref:`StatMoment() ` function is threaded and
+ properly handles ghost cells. However, for parallel (SOS) computation,
+ because nodes at the interface are shared among servers, the values at
+ the interface nodes are used in computations multiple times. Therefore,
+ the``StatMoment`` value computed using a nodal variable using SOS deviates
+ from the true value calculated using only one server. Elemental variables
+ do not suffer from this issue as ghost elements are handled properly and
+ elements are not shared among servers.
+
+
+ .. list-table:: **Function arguments**
+ :widths: 20 80
+
+ * - v
+ - scalar variable, constant or constant per part variable,
+ or constant number
+ * - function
+ - constant number selecting the moment to compute
+ (0=sum, 1=mean, 2=variance, 3=skewness, 4=kurtosis)
+
+
+**References**
+
+For more information, see these references:
+
+1. Numerical Recipes, Press et. al. Cambridge Univ. Press, 1997, pp. 454-459.
+
+
+
+.. _StatRegSpa:
+
+------------
+StatRegSpa()
+------------
+
+
+**Statistics Regression**
+
+``StatRegSpa(any parts, y, x0, x1, x2, x3, x4, weight)``
+
+
+Performs classical multivariate linear regression,
+predicting ``y = f(x0,x1,x2,x3,x4)``. The regression is performed at the current
+timestep using all of the nodes/elements of the selected parts. At each
+node/element, the input values y, x0, x1, x2, x3, and x4 and the weight are evaluated
+and added as an observation to the regression with the supplied weight (in the
+range [0.0-1.0]). If the model does not require five inputs, any of them can be
+specified as the constant number ``0.0`` to remove it. If the constant ``1.0`` is
+supplied as an input, an intercept is computed. You should avoid
+co-linearity in the inputs (which is especially easy when supplying constants as
+regressors).
+
+For example, to model simple linearity (``y = Ax0 + B``), the function
+parameters would be ``StatRegSpa(plist, yvar, xvar, 1., 0., 0., 0., 1.)``. This
+example specifies that all observations are to be weighted the same.
+If weighting by element volume is desired, compute a field variable of
+element volume, normalized by the largest individual element volume, and pass
+that variable as the weight. The function returns a scalar constant whose value
+is the R-squared value for the regression.
+
+
+.. list-table:: **Function arguments**
+ :widths: 20 80
+
+ * - y
+ - scalar, constant, or constant per part
+ variable or constant number
+
+ * - x0, x1, x2, x3, x4
+ - scalar, constant, or constant per part
+ variable or constant number
+
+ * - weight
+ - scalar, constant, or constant per part
+ variable or constant number
+
+
+For a full set of estimated values and statistical
+diagnostic output, see: :ref:`StatRegVal1() ` and
+:ref:`StatRegVal2() `.
+
+
+
+.. _StatRegVal1:
+
+-------------
+StatRegVal1()
+-------------
+
+
+**Statistics Regression Info**
+
+``StatRegVal1(any parts, regression_variable, function)``
+
+
+This function returns basic statistical diagnostics for a regression computed
+using ``StatRegSpa()``. The function is passed the output variable of a previously
+computed ``StatRegSpa()`` and the function number of a specific statistical quantity
+to return. The values include the standard sum of squares values for the regression
+as well as the R-squared value.
+
+
+.. list-table:: **Function arguments**
+ :widths: 20 80
+
+ * - regression_variable
+ - a scalar variable which is the output of an earlier ``StatRegSpa()`` function
+
+ * - function
+ - the statistical quantity to return (0=sum of
+ squares error, 1=sum of squares total, 2=sum of squares model, 3=R-squared)
+
+
+See also the :ref:`StatRegSpa() ` and :ref:`StatRegVal2() `
+functions.
+
+
+.. _StatRegVal2:
+
+-------------
+StatRegVal2()
+-------------
+
+
+**Statistics Regression Info**
+
+``StatRegVal2(any parts, regression_variable, function, selection)``
+
+
+This function returns statistical diagnostics specific to individual input coefficients
+for a regression computed using the ``StatRegSpa()`` function. The ``StatRegVal2()``
+function is passed the output variable previously computed by the ``StatRegSpa`` function,
+the function number of the specific statistical quantity to return, and the coefficient
+selected. The values include the sum of squares and partial sum of squares for the individual
+coefficients as well as the estimated coefficient itself and its standard error.
+
+
+.. list-table:: **Function arguments**
+ :widths: 45 55
+
+ * - regression_variable
+ - scalar variable that is the output of an earlier ``StatRegSpa()`` function
+
+ * - function
+ - statistical quantity to return
+
+ 0 = estimated coefficient
+ 1 = sum of squares for the variable
+ 2 = partial sum of squares for the variable
+ 3 = standard error for the coefficient
+
+ * - selection
+ - constant or constant per part variable or
+ constant number that selects the specific coefficient for which to
+ retrieve the statistical quantity (0 = x0, 1 = x1, 2 = x2, 3 = x3,
+ 4 = x4)
+
+See also the :ref:`StatRegSpa() ` and :ref:`StatRegVal1() `
+functions.
+
+
+.. _sumPerPart:
+
+------------
+sumPerPart()
+------------
+
+
+**sumPerPart**
+
+``sumPerPart( plist, scalar, result_type )``
+
+
+Sums scalar values of each part as a constant per part or a constant per case
+value.
+
+.. list-table:: **Function arguments**
+ :widths: 20 80
+
+ * - plist
+ - any parts
+
+ * - scalar
+ - scalar variable
+
+ * - result_type
+ - ``Per case`` or ``Per part``
+
+
+.. _sumPerPartArg:
+
+---------------
+sumPerPartArg()
+---------------
+
+
+**sumPerPartArg**
+
+``sumPerPartArg( part, ConstantPerPart, result_type )``
+
+
+Sums the constant per part value of each part into a case constant
+value.
+
+
+.. list-table:: **Function arguments**
+ :widths: 20 80
+
+ * - plist
+ - any parts
+
+ * - ConstantPerPart
+ - constant per part variable
+
+ * - result_type
+ - "Per case" or "Per part"
+
+
+.. _Swirl:
+
+-------
+Swirl()
+-------
+
+**Swirl**
+
+``Swirl(any parts, density, velocity).``
+
+
+Computes a scalar variable ``swirl`` whose value is:
+
+:math:`swirl=\frac{\Omega ·V}{\rho {V}^{2}}`
+
+where:
+
+.. list-table::
+ :widths: 20 80
+
+ * - :math:`\Omega`
+ - vorticity
+
+ * - :math:`\rho`
+ - density
+
+ * - :math:`V`
+ - velocity
+
+
+.. list-table:: **Function arguments**
+ :widths: 20 80
+
+ * - density
+ - scalar, constant, or constant per part variable, or constant number
+
+ * - velocity
+ - vector variable
+
+
+.. _Temperature:
+
+-------------
+Temperature()
+-------------
+
+
+**Temperature**
+
+``Temperature(any parts, density, total energy, velocity, ratio of specific heats, gas constant)``
+
+
+Computes a scalar variable whose value is the
+temperature **T**. The scalar variable is defined as::
+
+:math:`T=\frac{\left(\gamma -1\right)}{R}\left(\frac{E}{\rho }-\frac{1}{2}{V}^{2}\right)`
+
+where:
+
+.. list-table::
+ :widths: 30 70
+
+ * - :math:`m`
+ - momentum
+
+ * - :math:`E`
+ - total energy per unit volume
+
+ * - :math:`\rho`
+ - density
+
+ * - :math:`V`
+ - velocity = :math:`m/\rho`
+
+ * - :math:`\gamma`
+ - ratio of specific heats (1.4 for air)
+
+ * - :math:`R`
+ - gas constant
+
+
+.. list-table:: **Function arguments**
+ :widths: 30 70
+
+ * - density
+ - scalar, constant, or constant per part
+ variable, or constant number
+ * - total energy
+ - scalar variable
+ * - velocity
+ - vector variable
+ * - ratio of specific heats
+ - scalar, constant, or constant per part
+ variable, or constant number
+ * - gas constant
+ - constant or constant per part variable or
+ constant number
+
+
+
+.. _TemperNorm:
+
+------------
+TemperNorm()
+------------
+
+
+**Normalized Temperature**
+
+``TemperNorm(any parts, density, total energy, velocity,
+ratio of specific heats, freestream density, freestream speed of sound, gas
+constant)``
+
+
+Computes a scalar variable that is the normalized temperature :math:`{T}_{n}`.
+This scalar variable is defined as:
+
+:math:`{T}_{n}=\frac{T}{{T}_{i}}`
+
+where:
+
+.. list-table::
+ :widths: 30 70
+
+ * - :math:`T`
+ - temperature
+ * - :math:`{T}_{i}`
+ - freestream temperature
+
+
+.. list-table:: **Function arguments**
+ :widths: 30 70
+
+ * - density
+ - scalar, constant, or constant per part
+ variable, or constant number
+ * - total energy
+ - scalar variable
+ * - velocity
+ - vector variable
+ * - ratio of specific heats
+ - scalar, constant, or constant per part
+ variable, or constant number
+ * - freestream density
+ - constant or constant per part variable or
+ constant number
+ * - freestream speed of sound
+ - constant or constant per part variable or
+ constant number
+ * - gas constant
+ - constant or constant per part variable or
+ constant number
+
+
+
+.. _TemperLogNorm:
+
+---------------
+TemperLogNorm()
+---------------
+
+
+**Log of Normalized Temperature**
+
+``TemperLogNorm(any parts, density, total energy, velocity,
+ratio of specific heats, freestream density, freestream speed of sound, gas
+constant)``
+
+
+Computes a scalar variable that is the natural log of
+the normalized temperature :math:`\mathrm{ln}{T}_{n}`.
+This scalar variable is defined as:
+
+:math:`\mathrm{ln}{T}_{n}=\mathrm{ln}\left(T/{T}_{i}\right)`
+
+where:
+
+.. list-table::
+ :widths: 30 70
+
+ * - :math:`T`
+ - temperature
+ * - :math:`{T}_{i}`
+ - freestream temperature
+
+
+.. list-table:: **Function arguments**
+ :widths: 30 70
+
+ * - density
+ - scalar, constant, or constant per part
+ variable, or constant number
+ * - total energy
+ - scalar variable
+ * - velocity
+ - vector variable
+ * - ratio of specific heats
+ - scalar, constant, or constant per part
+ variable, or constant number
+ * - freestream density
+ - constant or constant per part variable or
+ constant number
+ * - freestream speed of sound
+ - constant or constant per part variable or
+ constant number
+ * - gas constant
+ - constant or constant per part variable or
+ constant number
+
+
+
+
+.. _TemperStag:
+
+------------
+TemperStag()
+------------
+
+
+**Stagnation Temperature**
+
+``TemperStag(any parts, density, total energy, velocity,
+ratio of specific heats, gas constant)``
+
+
+Computes a scalar variable that is the stagnation
+temperature :math:`{T}_{o}`. This scalar variable is defined as:
+
+:math:`{T}_{o}=T\left(1+\left(\frac{\gamma -1}{2}\right){M}^{2}\right)`
+
+where:
+
+.. list-table::
+ :widths: 30 70
+
+ * - :math:`T`
+ - temperature
+ * - :math:`\gamma`
+ - ratio of specific heats
+ * - :math:`M`
+ - mach number
+
+
+.. list-table:: **Function arguments**
+ :widths: 30 70
+
+ * - density
+ - scalar, constant, or constant per part
+ variable, or constant number
+ * - total energy
+ - scalar variable
+ * - velocity
+ - vector variable
+ * - ratio of specific heats
+ - scalar, constant, or constant per part
+ variable, or constant number
+ * - gas constant
+ - constant or constant per part variable or
+ constant number
+
+
+
+
+.. _TemperNormStag:
+
+----------------
+TemperNormStag()
+----------------
+
+
+**Normalized Stagnation Temperature**
+
+``TemperNormStag(any parts, density, total energy, velocity,
+ratio of specific heats, freestream density, freestream speed of sound,
+freestream velocity magnitude, gas constant)``
+
+
+Computes a scalar variable that is the normalized
+stagnation temperature :math:`{T}_{on}`. This function
+is defined as:
+
+:math:`{T}_{on}={T}_{o}/{T}_{oi}`
+
+where:
+
+.. list-table::
+ :widths: 30 70
+
+ * - :math:`{T}_{o}`
+ - stagnation temperature
+ * - :math:`{T}_{oi}`
+ - freestream stagnation temperature
+
+
+
+.. list-table:: **Function arguments**
+ :widths: 30 70
+
+ * - density
+ - scalar, constant, or constant per part
+ variable, or constant number
+ * - total energy
+ - scalar variable
+ * - velocity
+ - vector variable
+ * - ratio of specific heats
+ - scalar, constant, or constant per part
+ variable, or constant number
+ * - freestream density
+ - constant or constant per part variable or
+ constant number
+ * - freestream speed of sound
+ - constant or constant per part variable or
+ constant number
+ * - freestream velocity magnitude
+ - constant or constant per part variable or
+ constant number
+ * - gas constant
+ - constant or constant per part variable or
+ constant number
+
+
+
+
+
+.. _TempMean:
+
+----------
+TempMean()
+----------
+
+
+**Temporal Mean**
+
+``TempMean(any model parts, scalar,
+vector, or constant, timestep1, timestep2)``
+
+
+Computes a scalar, vector, or constant variable, depending on which
+type was selected, whose value is the mean value of the selected variable over
+the interval from timestep 1 to timestep 2. Therefore, the resultant variable is
+independent of time. The temporal mean is the discrete integral of the variable
+over time (using the *Trapezoidal Rule*) divided by the total
+time interval. Because any derived parts may vary in size over time, this
+function is only allowed on model parts. Model parts with changing connectivity
+are also not allowed.
+
+.. list-table:: **Function arguments**
+ :widths: 30 70
+
+ * - timestep1
+ - constant number
+ * - timestep2
+ - constant number
+
+
+
+
+.. _TempMinMaxField:
+
+-----------------
+TempMinMaxField()
+-----------------
+
+
+**Temporal Minmax Field**
+
+``TempMinMaxField(any model parts, scalar or vector,
+timestep1, timestep2, 0 or 1, 0 = compute minimum, 1 = compute maximum)``
+
+
+Computes a scalar or vector variable, depending on which
+type was selected, whose value is the minimum or maximum at each location (node
+or element) of a scalar or vector variable over the interval from timestep1 to
+timestep2. Therefore, the resultant scalar or vector is independent of time. If
+the input variable is a vector, the maximum or minimum is the maximum or minimum
+of each component of the vector. Because any derived parts can vary in size over time,
+this function is only allowed on model parts. Model parts with changing
+connectivity are also not allowed.
+
+.. list-table:: **Function arguments**
+ :widths: 30 70
+
+ * - timestep1
+ - constant number
+ * - timestep2
+ - constant number
+
+
+
+
+.. _TensorComponent:
+
+-----------------
+TensorComponent()
+-----------------
+
+
+**Tensor Component**
+
+``TensorComponent(any parts, tensor, tensor row(1-3), tensor
+col(1-3))``
+
+
+Creates a scalar variable that is the specified row and
+column of a tensor variable.
+
+S = Tij
+
+i = given row (1 to 3)
+
+j = given column (1 to 3)
+
+.. list-table:: **Function arguments**
+ :widths: 30 70
+
+ * - tensor row
+ - constant number (1 to 3)
+ * - tensor col
+ - constant number (1 to 3)
+
+
+
+
+.. _TensorDeterminant:
+
+-------------------
+TensorDeterminant()
+-------------------
+
+
+**Tensor Determinant**
+
+``TensorDeterminant(any parts, Tensor or 3 Principals or 6
+Tensor Components)``
+
+
+Computes the determinant of a tensor variable, three
+principal scalar variables, or six tensor component scalar variables. This
+function requires either one or six entries beyond the parts, as indicated
+in the following examples.
+
+If computing from a tensor variable, a single tensor
+variable is needed.
+
+``TensorDeterminant(plist, Stress)``
+
+If computing from three principals, three scalar variables
+representing ``sigma_1``, ``sigma_2``, and ``sigma_3`` are needed. Additionally, you
+must enter a ``-1`` constant for the last three entries.
+
+``TensorDeterminant(plist, sigma_1, sigms_2, sigma_3, -1, -1, -1)``
+
+If computing from six tensor components, six scalar
+variables are needed. They must be the following variables in the
+order shown: ``t_11``, ``t_22``, ``t_33``, ``t_12``, ``t_13``,
+and ``t_23``.
+
+``TensorDeterminant(plist, t_11, t_22, t_33, t_12, t_13, t_23)``
+
+
+.. _TensorEigenvalue:
+
+------------------
+TensorEigenvalue()
+------------------
+
+
+**Tensor Eigenvalue**
+
+``TensorEigenvalue(any parts, tensor, which number(1-3))``
+
+
+Computes the eigenvalue of a tensor based on the number given (1-3).
+The first eigenvalue is always the largest, while the third eigenvalue
+is always the smallest.
+
+
+.. _TensorEigenvector:
+
+-------------------
+TensorEigenvector()
+-------------------
+
+
+**Tensor Eigenvector**
+
+``TensorEigenvector(any parts, tensor, which number(1-3))``
+
+
+Computes the eigenvector of a tensor based on the number given (1-3).
+The first eigenvalue is always the largest, while the third eigenvalue
+is always the smallest.
+
+
+.. _TensorMake:
+
+------------
+TensorMake()
+------------
+
+
+**Tensor Make**
+
+``TensorMake(any parts, T11, T22, T33, T12, T13, T23)``
+
+
+Creates a tensor from six scalars.
+
+
+.. _TensorMakeAsym:
+
+----------------
+TensorMakeAsym()
+----------------
+
+
+**Tensor Make Asymmetric**
+
+``TensorMakeAsym(any parts, T11,T12,T13, T21,T22,T23, T31,T32,T33)``
+
+
+Creates a tensor from nine scalars.
+
+
+.. _TensorTresca:
+
+--------------
+TensorTresca()
+--------------
+
+
+**Tensor Tresca**
+
+``TensorTresca(any parts, Tensor or 3 Principals or 6 Tensor Components)``
+
+
+Computes Tresca stress/strain from a tensor variable,
+three principal scalar variables, or six tensor component scalar variables. This
+function requires either one or six entries beyond the parts, as indicated
+in the following examples.
+
+If computing from a tensor variable, a single tensor
+variable is needed.
+
+``TensorTresca(plist, Stress)``
+
+If computing from three principals, three scalar variables
+representing ``sigma_1``, ``sigma_2``, and ``sigma_3`` are needed. Additionally, you
+must enter a ``-``1`` constant for the last three entries.
+
+``TensorTresca(plist, sigma_1, sigms_2, sigma_3, -1, -1, -1)``
+
+If computing from six tensor components, six scalar
+variables are needed. They must be the following variables in the
+order shown: ``t_11``, ``t_22``, ``t_33``, ``t_12``, ``t_13``,
+and ``t_23``.
+
+``TensorTresca(plist, t_11, t_22, t_33, t_12, t_13, t_23)``
+
+The basic equation follows. If needed, the
+principal stresses/strains are first computed from the tensor or its
+components.
+
+:math:`{\sigma }_{yp}=\left|{\sigma }_{1}-{\sigma }_{3}\right|`
+
+where:
+
+.. list-table::
+ :widths: 30 70
+
+ * - :math:`{\sigma }_{yp}`
+ - yield stress
+ * - :math:`{\sigma }_{1}`
+ - greatest principal stress/strain
+ * - :math:`{\sigma }_{3}`
+ - least principal stress/strain
+
+
+
+
+.. _TensorVonMises:
+
+----------------
+TensorVonMises()
+----------------
+
+
+**Tensor Von Mises**
+
+``TensorVonMises(any parts, Tensor or 3 Principals or 6 Tensor Components)``
+
+
+Computes Von Mises stress/strain from a tensor variable,
+three principal scalar variables, or six tensor component scalar variables. This
+function requires either one or six entries beyond the parts, as indicated
+in the following examples.
+
+If computing from a tensor variable, a single tensor
+variable is needed.
+
+``TensorVonMises(plist, Stress)``
+
+If computing from three principals, three scalar variables
+representing ``sigma_1``, ``sigma_2``, and ``sigma_3`` are needed. Additionally, you
+must enter a ``-1`` constant for the last three entries.
+
+``TensorVonMises(plist, sigma_1, sigms_2, sigma_3, -1, -1, -1)``
+
+If computing from six tensor components, six scalar
+variables are needed. They must be the following variables in the
+order shown: ``t_11``, ``t_22``, ``t_33``, ``t_12``, ``t_13``,
+and ``t_23``.
+
+``TensorVonMises(plist, t_11, t_22, t_33, t_12, t_13, t_23)``
+
+The basic equation follows. If needed, the
+principal stresses/strains are first computed from the tensor or its
+components.
+
+:math:`{\sigma }_{yp}=\sqrt{\frac{1}{2}\left({\left({\sigma }_{1}-{\sigma }_{2}\right)}^{2}+{\left({\sigma }_{2}-{\sigma }_{3}\right)}^{2}+{\left({\sigma }_{3}-{\sigma }_{1}\right)}^{2}\right)}`
+
+where:
+
+.. list-table::
+ :widths: 30 70
+
+ * - :math:`{\sigma }_{yp}`
+ - yield stress
+ * - :math:`{\sigma }_{1}`
+ - greatest principal stress/strain
+ * - :math:`{\sigma }_{2}`
+ - middle principal stress/strain
+ * - :math:`{\sigma }_{3}`
+ - least principal stress/strain
+
+
+.. _udmf_sum:
+
+----------
+udmf_sum()
+----------
+
+**udmf_sum**
+
+.. note::
+ The :ref:`StatMoment() ` function has replaced the
+ ``udmp_sum`` function. Use ``StatMoment(plist,scalar,0)`` instead.
+
+
+
+.. _VectorCylProjection:
+
+---------------------
+VectorCylProjection()
+---------------------
+
+
+**Vector Cyl Projection**
+
+``VectorCylProjection(any parts, vector, frame, axis)``
+
+
+Computes a new vector variable by projecting a vector
+onto a cylindrical coordinate system. A coordinate frame is used as the basis
+for the system and can be frame 0 (the center for the global coordinate system)
+or any other defined frame in any arbitrary orientation.
+
+The axial direction is defined to be the frame's Z axis. The radial direction
+is a vector from the Z axis to the position being computed. The Theta direction
+is then Cross(Z,R). The resulting new vector variable is in the direction of
+the chosen axis (Z, R, or Theta) with a magnitude computed by the dot product
+of the vector variable against the direction vector.
+
+.. list-table:: **Function arguments**
+ :widths: 30 70
+
+ * - vector
+ - model vector variable
+ * - frame
+ - frame number (0-based) with frame 0 being the
+ global reference.
+ * - axis
+ - radial (R), angular (Theta), ar Axial (Frame Z
+ direction)
+
+
+
+.. _VectorRectProjection:
+
+----------------------
+VectorRectProjection()
+----------------------
+
+
+**Vector Rect Projection**
+
+``VectorRectProjection(any parts, vector, frame, axis)``
+
+
+Computes a new vector variable by projecting a vector
+onto a rectangular coordinate system. A coordinate frame is used for the new
+rectangular system and can be frame 0 (the center for the global coordinate
+system) or any other defined frame in any arbitrary orientation. The resulting new
+vector variable is in the direction of the chosen axis (X, Y, or Z) with a magnitude
+computed by the dot product of the vector variable against the direction vector.
+
+.. list-table:: **Function arguments**
+ :widths: 30 70
+
+ * - vector
+ - model vector variable
+ * - frame
+ - frame number (0-based) with frame 0 being the
+ global reference.
+ * - axis
+ - X, Y, or Z frame direction
+
+
+
+.. _Velo:
+
+------
+Velo()
+------
+
+
+**Velocity**
+
+``Velo(any parts, momentum, density)``
+
+
+Computes a vector variable whose value is the velocity ``V``.
+This vecctor variable is defined as:
+
+:math:`V=\frac{m}{\rho }`
+
+where:
+
+.. list-table::
+ :widths: 30 70
+
+ * - :math:`\rho`
+ - density
+ * - :math:`m`
+ - momentum
+
+
+.. list-table:: **Function arguments**
+ :widths: 30 70
+
+ * - momentum
+ - vector variable
+ * - density
+ - scalar, constant, or constant per part
+ variable, or constant number
+
+
+
+.. _Vol:
+
+-----
+Vol()
+-----
+
+
+**Volume**
+
+``Vol(3D parts [,Compute_Per_part])``
+
+
+Computes a constant or constant per part variable whose
+value is the volume of 3D parts.
+
+.. note::
+ This function uses the coordinates of the element to calculate the volume of
+ each element. If you want to use displacement in the calculation of the
+ volume, you must turn on computational (server-side) displacement, rather
+ than visual only (client-side) displacement so that the displacement values
+ are applied to the coordinates on the server prior to calculating each
+ element size that is used to sum up the volume of the part.
+
+
+.. _Vort:
+
+------
+Vort()
+------
+
+
+**Vorticity**
+
+``Vort(any 2D or 3D parts, velocity)``
+
+
+Computes a vector variable that is the rotation of the
+flow in units of radians per second with components :math:`{\zeta }_{x},{\zeta }_{y},{\zeta }_{z}`.
+This vector variable is defined as:
+
+:math:`\begin{array}{ccc}{\zeta }_{x}=\frac{\partial w}{\partial x}-\frac{\partial v}{\partial z}& {\zeta }_{y}=\frac{\partial u}{\partial z}-\frac{\partial w}{\partial x}& {\zeta }_{z}=\frac{\partial v}{\partial x}-\frac{\partial u}{\partial y}\end{array}`
+
+where:
+
+.. list-table::
+ :widths: 30 70
+
+ * - u,v,w
+ - velocity components in the X, Y, Z
+ directions
+
+
+.. list-table:: **Function arguments**
+ :widths: 30 70
+
+ * - velocity
+ - vector variable
+
+
+
+.. _VortGamma:
+
+-----------
+VortGamma()
+-----------
+
+
+**Vorticity Gamma**
+
+``VortGamma(2D part clips , velocity, gamma function number, k (1 or 2), proximity radius, proximity option)``
+
+
+Computes a dimensionless scalar variable on a 2D clip
+part whose value is the vorticity-gamma
+function, :math:`{\Gamma }_{k}\left(P\right)`, defined at each node
+(or element centroid for cell centered data), ``P``. This scalar variable
+is defined as follows:
+
+:math:`{\Gamma }_{k}\left(P\right)=\frac{1}{S}{\displaystyle \int \mathrm{sin}\left({\Theta }_{M}\right)dS}=\frac{1}{S}{\displaystyle \underset{\left(M\in S\right)}{\int }\left(\frac{\left(\stackrel{\rightharpoonup }{PM}\times {\stackrel{\rightharpoonup }{V}}_{M}\right)·\widehat{n}}{\Vert \stackrel{\rightharpoonup }{PM}\Vert ·\Vert {\stackrel{\rightharpoonup }{V}}_{M}\Vert }\right)dS}`
+
+where:
+
+.. list-table::
+ :widths: 30 70
+
+ * - :math:`{\Gamma }_{1}`
+ - Gamma function number k=1 is a (non-Galilean invariant)
+ vortex center approximation method "...a dimensionless scalar,
+ with G1 bounded by 1. It can be shown that this bound is
+ reached at the location of the vortex centre if the vortex is
+ axis symmetrical. Therefore, this scalar function provides a way
+ to quantify the streamline topology of the flow in the vicinity
+ of P and the rotation sign of the vortex. ... Typically, near
+ the vortex centre, G1 reaches values ranging from 0.9 to 1.0."
+ [ref.2, pp. 1424-1425].
+
+ * - :math:`{\Gamma }_{2}`
+ - Gamma function number k=2 is a (Galilean invariant) vortex
+ boundary approximation method resulting in a dimensionless
+ scalar, "... a local function depending only on W and μ,
+ where W is the rotation rate corresponding to the antisymmetrical
+ part of the velocity gradient at P and μ is the eigenvalue of the
+ symmetrical part of this tensor [ref.2, 1425]. (See the
+ note following the function arguments.)
+
+ * - k
+ - Gamma function number 1 or 2 used to determine VM.
+
+ * - P
+ - Base node (or element centroid for per-element data) around
+ which the proximity area (or zone of influence) is being
+ considered.
+
+ * - S
+ - Proximity area (or zone of influence) surrounding P,
+ determined by a proximity radius measured from the base P and
+ the proximity option. The proximity option is used to determine
+ which set of elements to include in S as follows.
+
+ If the proximity option is 0, S includes all elements
+ with any nodes within the proximity radius.
+
+ If the proximity option is 1, S includes only elements with
+ every node within the proximity radius. Both options also
+ include all elements that contain P.
+
+ * - M
+ - Node (or element center) within S.
+
+ * - PM
+ - Vector from the base node P to M.
+
+ * - V(P)
+ - Velocity vector at P.
+
+ * - V(M)
+ - Velocity vector at each M.
+
+ * - :math:`V_M`
+
+ - If the gamma function number k = 1, VM = V(M). If the gamma function
+ number k = 2, VM = V(M) - V(P).
+
+
+ * - If k=2
+ - VM = V(M) - V(P).
+
+ * - n
+ - Unit vector normal to the 2D plane parent clip
+ part.
+
+ * - :math:`θ_M`
+ - Angle between VM and PM. Because -1 < sin(QM) < 1
+ (and n is a unit vector), then
+ -1 < :math:`{\Gamma }_{k}\left(P\right)` < 1.
+
+
+.. list-table:: **Function arguments**
+ :widths: 30 70
+
+ * - velocity
+ - vector variable
+ * - gamma function number
+ - single integer (k=1 or k= 2) that determines
+ which value of VM to use.
+
+ A value of 1 is useful for finding vortex cores (centers).
+
+ A value of 2 is useful for finding vortex boundaries.
+
+ * - proximity radius
+ - Float value greater than or equal to 0.0 that is used to
+ determine the proximity area around each base node or element P over
+ which the vorticity gamma is calculated on the 2D part clip.
+
+ The larger the proximity radius, the more
+ nodes (or elements) that are used to calculate G and the slower the
+ calculation. A proximity radius less than or equal to 0.0
+ always uses a proximity area of only elements that contain P and is
+ the lower bound of this parameter, resulting in the smallest
+ proximity area around P (and the fastest calculation). A radius of
+ 0.0 is a good value for the first run.
+
+ As the proximity radius approaches the parent plane size
+ this calculation approaches using every node (or element) in
+ the calculation for each node (or element) resulting in a n2
+ operation whose solution may be measured in calendar time
+ rather than wristwatch time.
+
+ The radius should be large enough to sample
+ sufficient elements for a meaningful average, but small enough so
+ that the vortex result remains a local calculation reported at each
+ element. Again, a radius of 0.0 is a good value for the first run.
+ A radius with a small scaling of the element size is a good
+ second run.
+
+ * - proximity option
+ - 0 to include all cells with any nodes in the proximity area.
+
+ 1 to include only cells entirely located in the proximity area.
+
+ Use this option along with the radius to control the
+ number of nodes (or elements) used in the calculation for each node
+ (or element) P.
+
+ Consider using option 0 as the radius gets small
+ relative to element size and using option 1 as the radius is enlarged.
+ At a minimum, the proximity area always include elements that
+ contain P.
+
+
+.. note::
+
+ Recall that ω is the rotation rate for the antisymmetrical part of
+ the velocity gradient and that μ is the eigenvalue of the symmetric part
+ of the tensor. The local character of the flow may be classified for
+ β2 in the following manner (based on figure 4 in [ref.2, 1425], which
+ plots β2 as a function of the ratio of ω /μ):
+
+ ω/μ < 1: flow locally dominated by strain, β2 < 2/π
+
+ ω/μ = 1: pure shear, β2 = 2/π
+
+ ω/μ > 1: flow locally dominated by rotation, β2 > 2/π
+
+
+**References**
+
+For more information, see these references:
+
+1. Jeong, J. and Hussain, F., "On the identification
+ of a vortex," Journal of Fluid Mechanics, 1995, vol. 285,
+ pp. 69-94.
+2. Laurent Graftieaux, Marc Michard, & Nathalie
+ Grosjean "Combining PIV, POD and vortex identification
+ algorithms for the study of unsteady turbulent swirling
+ flows", Institute Of Physics Publishing Ltd in UK,
+ Measurement Science & Technology, 12 (2001), pp. 1422-1429.
+3. PSA via Distene (personal communication).
+
.. vale on
\ No newline at end of file
diff --git a/doc/source/examples_source/00-basic/03-ptrace.py b/doc/source/examples_source/00-basic/03-ptrace.py
index 9ac2501fcd0..db0c505e93f 100644
--- a/doc/source/examples_source/00-basic/03-ptrace.py
+++ b/doc/source/examples_source/00-basic/03-ptrace.py
@@ -1,151 +1,151 @@
-"""
-.. _ptrace_basic_example:
-
-Particle Trace (Streamline/Pathline) usage
-==========================================
-
-Utilize EnSight Particle Trace (aka Streamline/Pathline) to visualize a
-vector through the domain.
-Create Streamline, animate, allow for dynamic change.
-
-"""
-
-###############################################################################
-# Start an EnSight session
-# ------------------------
-# Launch and connect to an instance of EnSight.
-# This example uses a local EnSight installation.
-
-from ansys.pyensight.core import LocalLauncher
-
-session = LocalLauncher().start()
-# Setup shortcuts for long winded calls.
-eocore = session.ensight.objs.core
-eonums = session.ensight.objs.enums
-eoutil = session.ensight.utils
-
-###############################################################################
-# Load a dataset
-# --------------
-# Load Shuttle data included in the EnSight installation and render
-#
-# .. image:: /_static/03_ptrace_0.png
-
-xyz_file = f"{session.cei_home}/ensight{session.cei_suffix}/data/plot3d/shuttle.xyz"
-q_file = f"{session.cei_home}/ensight{session.cei_suffix}/data/plot3d/shuttle.q"
-session.load_data(
- data_file=xyz_file,
- result_file=q_file,
- file_format="PLOT3D",
- representation="3D_feature_2D_full",
-)
-session.show("image", width=800, height=600)
-
-
-###############################################################################
-# Extract an IJK Range
-# --------------------
-# The PLOT3D reader only reads the volume by default. Extract a
-# particular IJK range for the surface of the shuttle
-
-session.ensight.data_partbuild.begin()
-session.ensight.case.select("Case 1")
-session.ensight.data_partbuild.data_type("structured")
-session.ensight.data_partbuild.group("OFF")
-session.ensight.data_partbuild.select_begin(1)
-session.ensight.data_partbuild.domain("all")
-session.ensight.data_partbuild.noderange_i(1, 53)
-session.ensight.data_partbuild.noderange_j(1, 63)
-session.ensight.data_partbuild.noderange_k(1, 1)
-session.ensight.data_partbuild.nodestep(1, 1, 1)
-session.ensight.data_partbuild.nodedelta(0, 0, 0)
-session.ensight.data_partbuild.description("Shuttle")
-session.ensight.data_partbuild.create()
-session.ensight.part.select_byname_begin("(CASE:Case 1)Shuttle")
-session.ensight.case.select("Case 1")
-session.ensight.data_partbuild.end()
-
-
-###############################################################################
-# Setup the View
-# --------------
-# Set View, Turn on Symmetry, turn off visibility of flow domain.
-
-session.ensight.view_transf.rotate(-109.084335, -1.64276719, 0)
-session.ensight.view_transf.rotate(-2.38553524, 115.462845, 0)
-session.ensight.view_transf.zoom(0.489253074)
-session.ensight.view_transf.look_at(2.4110589, 0.567389309, 0.241451085)
-session.ensight.view_transf.look_from(2.4110589, 0.567389309, 5.69335651)
-
-session.ensight.part.select_all()
-session.ensight.part.modify_begin()
-session.ensight.part.symmetry_type("mirror")
-session.ensight.part.symmetry_mirror_y("ON")
-session.ensight.part.modify_end()
-
-session.ensight.part.select_begin(1)
-session.ensight.part.modify_begin()
-session.ensight.part.visible("OFF")
-session.ensight.part.modify_end()
-
-
-###############################################################################
-# Create a Particle Trace using the Line Tool Specification
-# ---------------------------------------------------------
-# Using the 3D parts as the parent, with the line tool as the emission type
-# "Momentum" as the vector, and 50 points alone the line as emitter locations.
-#
-# .. image:: /_static/03_ptrace_1.png
-
-pt1 = [0.1245, 0.064366, -0.03438]
-pt2 = [1.0018, 0.399756, -0.03258]
-parent_parts = eoutil.parts.select_parts_by_dimension(3)
-npts = 50 # number of emitters
-vector_var = eocore.VARIABLES["Momentum"][0] # Vector variable to use
-strpart = eoutil.parts.create_particle_trace_from_line(
- "Streamline",
- vector_var,
- point1=pt1,
- point2=pt2,
- num_points=npts,
- source_parts=parent_parts,
- direction="+/-",
-)
-session.show("image", width=800, height=600)
-
-###############################################################################
-# Change Visual Attributes
-# ----------------------------------------------------------
-# Modify the attributes of the Streamline for visual clarity
-#
-# .. image:: /_static/03_ptrace_2.png
-
-strpart.REPRESENTATION = eonums.TRACE_TUBE
-strpart.WIDTHSCALEFACTOR = 0.012
-strpart.COLORBYPALETTE = "Momentum"
-session.show("image", width=800, height=600)
-
-###############################################################################
-# Animate the Streamlines
-# ----------------------------------------------------------
-# Turn OFF the streamlines (to see the animate under)
-# Turn ON the animate streamlines.
-# Change to Sphere representation, size, and adjust speed and length.
-#
-# .. image:: /_static/03_ptrace_3.png
-
-strpart.VISIBLE = False
-strpart.ANIMATE = True
-eocore.HEADTYPE = eonums.ATRACE_HEAD_SPHERE
-eocore.HEADSCALE = 0.03
-eocore.PARTICLETIME = 2.0
-eocore.DELTATIME = 0.065
-eocore.MULTIPLEPULSES = True
-session.show("image", width=800, height=600)
-
-###############################################################################
-# Close the session
-#
-
-# sphinx_gallery_thumbnail_path = '_static/03_ptrace_2.png'
-session.close()
+"""
+.. _ptrace_basic_example:
+
+Particle Trace (Streamline/Pathline) usage
+==========================================
+
+Utilize EnSight Particle Trace (aka Streamline/Pathline) to visualize a
+vector through the domain.
+Create Streamline, animate, allow for dynamic change.
+
+"""
+
+###############################################################################
+# Start an EnSight session
+# ------------------------
+# Launch and connect to an instance of EnSight.
+# This example uses a local EnSight installation.
+
+from ansys.pyensight.core import LocalLauncher
+
+session = LocalLauncher().start()
+# Setup shortcuts for long winded calls.
+eocore = session.ensight.objs.core
+eonums = session.ensight.objs.enums
+eoutil = session.ensight.utils
+
+###############################################################################
+# Load a dataset
+# --------------
+# Load Shuttle data included in the EnSight installation and render
+#
+# .. image:: /_static/03_ptrace_0.png
+
+xyz_file = f"{session.cei_home}/ensight{session.cei_suffix}/data/plot3d/shuttle.xyz"
+q_file = f"{session.cei_home}/ensight{session.cei_suffix}/data/plot3d/shuttle.q"
+session.load_data(
+ data_file=xyz_file,
+ result_file=q_file,
+ file_format="PLOT3D",
+ representation="3D_feature_2D_full",
+)
+session.show("image", width=800, height=600)
+
+
+###############################################################################
+# Extract an IJK Range
+# --------------------
+# The PLOT3D reader only reads the volume by default. Extract a
+# particular IJK range for the surface of the shuttle
+
+session.ensight.data_partbuild.begin()
+session.ensight.case.select("Case 1")
+session.ensight.data_partbuild.data_type("structured")
+session.ensight.data_partbuild.group("OFF")
+session.ensight.data_partbuild.select_begin(1)
+session.ensight.data_partbuild.domain("all")
+session.ensight.data_partbuild.noderange_i(1, 53)
+session.ensight.data_partbuild.noderange_j(1, 63)
+session.ensight.data_partbuild.noderange_k(1, 1)
+session.ensight.data_partbuild.nodestep(1, 1, 1)
+session.ensight.data_partbuild.nodedelta(0, 0, 0)
+session.ensight.data_partbuild.description("Shuttle")
+session.ensight.data_partbuild.create()
+session.ensight.part.select_byname_begin("(CASE:Case 1)Shuttle")
+session.ensight.case.select("Case 1")
+session.ensight.data_partbuild.end()
+
+
+###############################################################################
+# Setup the View
+# --------------
+# Set View, Turn on Symmetry, turn off visibility of flow domain.
+
+session.ensight.view_transf.rotate(-109.084335, -1.64276719, 0)
+session.ensight.view_transf.rotate(-2.38553524, 115.462845, 0)
+session.ensight.view_transf.zoom(0.489253074)
+session.ensight.view_transf.look_at(2.4110589, 0.567389309, 0.241451085)
+session.ensight.view_transf.look_from(2.4110589, 0.567389309, 5.69335651)
+
+session.ensight.part.select_all()
+session.ensight.part.modify_begin()
+session.ensight.part.symmetry_type("mirror")
+session.ensight.part.symmetry_mirror_y("ON")
+session.ensight.part.modify_end()
+
+session.ensight.part.select_begin(1)
+session.ensight.part.modify_begin()
+session.ensight.part.visible("OFF")
+session.ensight.part.modify_end()
+
+
+###############################################################################
+# Create a Particle Trace using the Line Tool Specification
+# ---------------------------------------------------------
+# Using the 3D parts as the parent, with the line tool as the emission type
+# "Momentum" as the vector, and 50 points alone the line as emitter locations.
+#
+# .. image:: /_static/03_ptrace_1.png
+
+pt1 = [0.1245, 0.064366, -0.03438]
+pt2 = [1.0018, 0.399756, -0.03258]
+parent_parts = eoutil.parts.select_parts_by_dimension(3)
+npts = 50 # number of emitters
+vector_var = eocore.VARIABLES["Momentum"][0] # Vector variable to use
+strpart = eoutil.parts.create_particle_trace_from_line(
+ "Streamline",
+ vector_var,
+ point1=pt1,
+ point2=pt2,
+ num_points=npts,
+ source_parts=parent_parts,
+ direction="+/-",
+)
+session.show("image", width=800, height=600)
+
+###############################################################################
+# Change Visual Attributes
+# ----------------------------------------------------------
+# Modify the attributes of the Streamline for visual clarity
+#
+# .. image:: /_static/03_ptrace_2.png
+
+strpart.REPRESENTATION = eonums.TRACE_TUBE
+strpart.WIDTHSCALEFACTOR = 0.012
+strpart.COLORBYPALETTE = "Momentum"
+session.show("image", width=800, height=600)
+
+###############################################################################
+# Animate the Streamlines
+# ----------------------------------------------------------
+# Turn OFF the streamlines (to see the animate under)
+# Turn ON the animate streamlines.
+# Change to Sphere representation, size, and adjust speed and length.
+#
+# .. image:: /_static/03_ptrace_3.png
+
+strpart.VISIBLE = False
+strpart.ANIMATE = True
+eocore.HEADTYPE = eonums.ATRACE_HEAD_SPHERE
+eocore.HEADSCALE = 0.03
+eocore.PARTICLETIME = 2.0
+eocore.DELTATIME = 0.065
+eocore.MULTIPLEPULSES = True
+session.show("image", width=800, height=600)
+
+###############################################################################
+# Close the session
+#
+
+# sphinx_gallery_thumbnail_path = '_static/03_ptrace_2.png'
+session.close()
diff --git a/doc/source/examples_source/25-intermediate/02-utils.py b/doc/source/examples_source/25-intermediate/02-utils.py
index c5cff5ddc9f..a32ac9019cb 100644
--- a/doc/source/examples_source/25-intermediate/02-utils.py
+++ b/doc/source/examples_source/25-intermediate/02-utils.py
@@ -1,193 +1,193 @@
-"""
-.. _ref_utils_example:
-
-EnSight Utilities
-=====================
-The PyEnSight ``utils`` modules have been designed to expose standard
-postprocessing operations via simplified APIs. This example shows how to
-use the ``utils`` to easily perform specific operations.
-
-"""
-
-###############################################################################
-# Start an EnSight session
-# ------------------------
-# Launch and connect to an instance of EnSight.
-# This example uses a local EnSight installation.
-
-from ansys.pyensight.core import LocalLauncher
-from ansys.pyensight.core.enscontext import EnsContext
-
-session = LocalLauncher().start()
-
-
-###############################################################################
-# Load the data
-# -------------
-# Use a remote session to load a simple time-varying dataset of
-# waterflow over a break.
-#
-# .. image:: /_static/02_utils_0.png
-
-session.load_example("waterbreak.ens")
-session.show("image", width=800, height=600)
-
-
-###############################################################################
-# Load the ``utils`` modules
-# --------------------------
-#
-# The ``utils`` modules are available as instances of ``ensight.utils``. To
-# provide a simple use case, this example casts them into new variables
-# with the same names.
-
-parts = session.ensight.utils.parts
-views = session.ensight.utils.views
-query = session.ensight.utils.query
-
-
-###############################################################################
-# Capture a context of the current state
-# --------------------------------------
-# Use the :func:`capture_context`
-# method to save an in-memory context, which is retrieved later in this example.
-# Also save the context to a file for use in a future PyEnSight session.
-
-init_state = session.capture_context()
-init_state.save("init_state.ctxz")
-
-
-###############################################################################
-# Change view direction and restore an in-memory context
-# ------------------------------------------------------
-# Save an isometric view along the direction vector (1,1,1) and a new in-memory
-# context. Save the view, naming it ``isometric``. Use the
-# :func:`select_parts_by_tag`
-# method to select all parts. (All parts are returned because no tags have been supplied
-# and because the dataset has no metadata for the parts).
-#
-# When no tags are supplied, all parts are selected.
-#
-# .. image:: /_static/02_utils_1.png
-#
-# All parts are hidden.
-#
-# .. image:: /_static/02_utils_2.png
-#
-# Restore the state, showing the isometric view once again.
-#
-# .. image:: /_static/02_utils_1.png
-
-views.set_view_direction(1, 1, 1, name="isometric")
-iso_state = session.capture_context()
-session.show("image", width=800, height=600)
-# Because no tags are supplied, all parts are selected
-
-# Hide the parts.
-parts.select_parts_by_tag().set_attr("VISIBLE", False)
-
-# Restore the state, showing the isometric view once again.
-
-session.show("image", width=800, height=600)
-session.restore_context(iso_state)
-session.show("image", width=800, height=600)
-
-
-###############################################################################
-# Create scoped name
-# ------------------
-# A scoped name provides for easily using EnSight submodules to generate
-# distance queries. PyEnSight supports the generation of context managers for
-# the PyEnSight modules. Its context manager features, along with the context
-# manager features in Python, can simplify the workflow.
-#
-# This code generates a query along a 1D part on the fly. It uses the ``Parts`` class to
-# select the parent part and the
-# :func:`select_parts_by_dimension`
-# method to select all 3D parts. Lastly, it saves a context for later use.
-#
-# The rendering view should look like this:
-#
-# .. image:: /_static/02_utils_3.png
-
-sn = session.ensight.utils.support.scoped_name
-zclip_state = None
-with sn(session.ensight) as ensight, sn(session.ensight.objs.core) as core:
- clip_default = core.DEFAULTPARTS[ensight.PART_CLIP_PLANE]
- clip = clip_default.createpart(name="XClip", sources=parts.select_parts_by_dimension(3))[0]
- attrs = []
- attrs.append(["MESHPLANE", 2]) # Z axis
- attrs.append(["TOOL", 9]) # XYZ Tool
- attrs.append(["VALUE", 0.55]) # Z value
- zclip = clip_default.createpart(name="ZClip", sources=clip)[0]
- query.create_distance(
- "zlip_query", query.DISTANCE_PART1D, [zclip], core.VARIABLES["p"][0], new_plotter=True
- )
- zclip_state = session.capture_context()
-session.show("image", width=800, height=600)
-
-
-###############################################################################
-# Restore a view
-# --------------
-# This code chances the model orientation, position, and zoom. It then restores
-# the isometric view. While restoring a context restores the orientation,
-# position, zoom level, and the objects available at the time that the context
-# was saved, restoring a view only restores the orientation and
-# position.
-#
-# The rendering view should look like this:
-#
-# .. image:: /_static/02_utils_4.png
-
-session.ensight.view_transf.rotate(-66.5934067, 1.71428561, 0)
-session.ensight.view_transf.rotate(18.0219765, -31.6363659, 0)
-session.ensight.view_transf.rotate(-4.83516455, 9.5064888, 0)
-session.ensight.view_transf.zoom(0.740957975)
-session.ensight.view_transf.zoom(0.792766333)
-session.ensight.view_transf.translate(0.0719177574, 0.0678303316, 0)
-session.ensight.view_transf.rotate(4.83516455, 3.42857122, 0)
-views.restore_view("isometric")
-session.show("image", width=800, height=600)
-
-
-###############################################################################
-# Create a temporal query
-# -----------------------
-# This code restores the distance query context and then generates a temporal
-# query. This query is applied to a specific XYZ point, querying the
-# ``"alpha1"`` variable. The XYZ point is set to the model centroid and computed
-# via the ``Views`` class. The data generated is then printed.
-#
-# The value returned should look like this:
-#
-# .. image:: /_static/02_utils_5.png
-
-session.restore_context(zclip_state)
-temp_query = query.create_temporal(
- "temporal_query",
- query.TEMPORAL_XYZ,
- parts.select_parts_by_dimension(3),
- "alpha1",
- xyz=views.compute_model_centroid(),
-)
-print(temp_query.QUERY_DATA)
-
-
-###############################################################################
-# Restore a context from disk
-# ---------------------------
-# This code shows how to restore a context previously saved on disk.
-# Because PyEnSight context files do not store the location of the dataset by
-# default, you must load the dataset before restoring the context.
-#
-# The rendering view should look like this.
-#
-# .. image:: /_static/02_utils_6.png
-
-ctx = EnsContext()
-ctx.load("init_state.ctxz")
-session.restore_context(ctx)
-session.show("image", width=800, height=600)
-
-# sphinx_gallery_thumbnail_path = '_static/02_utils_3.png'
+"""
+.. _ref_utils_example:
+
+EnSight Utilities
+=====================
+The PyEnSight ``utils`` modules have been designed to expose standard
+postprocessing operations via simplified APIs. This example shows how to
+use the ``utils`` to easily perform specific operations.
+
+"""
+
+###############################################################################
+# Start an EnSight session
+# ------------------------
+# Launch and connect to an instance of EnSight.
+# This example uses a local EnSight installation.
+
+from ansys.pyensight.core import LocalLauncher
+from ansys.pyensight.core.enscontext import EnsContext
+
+session = LocalLauncher().start()
+
+
+###############################################################################
+# Load the data
+# -------------
+# Use a remote session to load a simple time-varying dataset of
+# waterflow over a break.
+#
+# .. image:: /_static/02_utils_0.png
+
+session.load_example("waterbreak.ens")
+session.show("image", width=800, height=600)
+
+
+###############################################################################
+# Load the ``utils`` modules
+# --------------------------
+#
+# The ``utils`` modules are available as instances of ``ensight.utils``. To
+# provide a simple use case, this example casts them into new variables
+# with the same names.
+
+parts = session.ensight.utils.parts
+views = session.ensight.utils.views
+query = session.ensight.utils.query
+
+
+###############################################################################
+# Capture a context of the current state
+# --------------------------------------
+# Use the :func:`capture_context`
+# method to save an in-memory context, which is retrieved later in this example.
+# Also save the context to a file for use in a future PyEnSight session.
+
+init_state = session.capture_context()
+init_state.save("init_state.ctxz")
+
+
+###############################################################################
+# Change view direction and restore an in-memory context
+# ------------------------------------------------------
+# Save an isometric view along the direction vector (1,1,1) and a new in-memory
+# context. Save the view, naming it ``isometric``. Use the
+# :func:`select_parts_by_tag`
+# method to select all parts. (All parts are returned because no tags have been supplied
+# and because the dataset has no metadata for the parts).
+#
+# When no tags are supplied, all parts are selected.
+#
+# .. image:: /_static/02_utils_1.png
+#
+# All parts are hidden.
+#
+# .. image:: /_static/02_utils_2.png
+#
+# Restore the state, showing the isometric view once again.
+#
+# .. image:: /_static/02_utils_1.png
+
+views.set_view_direction(1, 1, 1, name="isometric")
+iso_state = session.capture_context()
+session.show("image", width=800, height=600)
+# Because no tags are supplied, all parts are selected
+
+# Hide the parts.
+parts.select_parts_by_tag().set_attr("VISIBLE", False)
+
+# Restore the state, showing the isometric view once again.
+
+session.show("image", width=800, height=600)
+session.restore_context(iso_state)
+session.show("image", width=800, height=600)
+
+
+###############################################################################
+# Create scoped name
+# ------------------
+# A scoped name provides for easily using EnSight submodules to generate
+# distance queries. PyEnSight supports the generation of context managers for
+# the PyEnSight modules. Its context manager features, along with the context
+# manager features in Python, can simplify the workflow.
+#
+# This code generates a query along a 1D part on the fly. It uses the ``Parts`` class to
+# select the parent part and the
+# :func:`select_parts_by_dimension`
+# method to select all 3D parts. Lastly, it saves a context for later use.
+#
+# The rendering view should look like this:
+#
+# .. image:: /_static/02_utils_3.png
+
+sn = session.ensight.utils.support.scoped_name
+zclip_state = None
+with sn(session.ensight) as ensight, sn(session.ensight.objs.core) as core:
+ clip_default = core.DEFAULTPARTS[ensight.PART_CLIP_PLANE]
+ clip = clip_default.createpart(name="XClip", sources=parts.select_parts_by_dimension(3))[0]
+ attrs = []
+ attrs.append(["MESHPLANE", 2]) # Z axis
+ attrs.append(["TOOL", 9]) # XYZ Tool
+ attrs.append(["VALUE", 0.55]) # Z value
+ zclip = clip_default.createpart(name="ZClip", sources=clip)[0]
+ query.create_distance(
+ "zlip_query", query.DISTANCE_PART1D, [zclip], core.VARIABLES["p"][0], new_plotter=True
+ )
+ zclip_state = session.capture_context()
+session.show("image", width=800, height=600)
+
+
+###############################################################################
+# Restore a view
+# --------------
+# This code chances the model orientation, position, and zoom. It then restores
+# the isometric view. While restoring a context restores the orientation,
+# position, zoom level, and the objects available at the time that the context
+# was saved, restoring a view only restores the orientation and
+# position.
+#
+# The rendering view should look like this:
+#
+# .. image:: /_static/02_utils_4.png
+
+session.ensight.view_transf.rotate(-66.5934067, 1.71428561, 0)
+session.ensight.view_transf.rotate(18.0219765, -31.6363659, 0)
+session.ensight.view_transf.rotate(-4.83516455, 9.5064888, 0)
+session.ensight.view_transf.zoom(0.740957975)
+session.ensight.view_transf.zoom(0.792766333)
+session.ensight.view_transf.translate(0.0719177574, 0.0678303316, 0)
+session.ensight.view_transf.rotate(4.83516455, 3.42857122, 0)
+views.restore_view("isometric")
+session.show("image", width=800, height=600)
+
+
+###############################################################################
+# Create a temporal query
+# -----------------------
+# This code restores the distance query context and then generates a temporal
+# query. This query is applied to a specific XYZ point, querying the
+# ``"alpha1"`` variable. The XYZ point is set to the model centroid and computed
+# via the ``Views`` class. The data generated is then printed.
+#
+# The value returned should look like this:
+#
+# .. image:: /_static/02_utils_5.png
+
+session.restore_context(zclip_state)
+temp_query = query.create_temporal(
+ "temporal_query",
+ query.TEMPORAL_XYZ,
+ parts.select_parts_by_dimension(3),
+ "alpha1",
+ xyz=views.compute_model_centroid(),
+)
+print(temp_query.QUERY_DATA)
+
+
+###############################################################################
+# Restore a context from disk
+# ---------------------------
+# This code shows how to restore a context previously saved on disk.
+# Because PyEnSight context files do not store the location of the dataset by
+# default, you must load the dataset before restoring the context.
+#
+# The rendering view should look like this.
+#
+# .. image:: /_static/02_utils_6.png
+
+ctx = EnsContext()
+ctx.load("init_state.ctxz")
+session.restore_context(ctx)
+session.show("image", width=800, height=600)
+
+# sphinx_gallery_thumbnail_path = '_static/02_utils_3.png'
diff --git a/doc/source/libuserd_documentation.rst b/doc/source/libuserd_documentation.rst
index 9c0ea3b823f..ff9fb92ceee 100644
--- a/doc/source/libuserd_documentation.rst
+++ b/doc/source/libuserd_documentation.rst
@@ -1,40 +1,40 @@
-.. _ref_libuserd_api_docs:
-
-**********************
-LibUserd API reference
-**********************
-
-The ``libuserd`` module allows PyEnSight to directly access EnSight
-user-defined readers (USERD). Any file format for which EnSight
-uses a USERD interface can be read using this API.
-
-
-.. note::
- This module was first introduced with the Ansys 2025 R1 distribution.
- It should be considered **Beta** at this point in time. Please report
- issues via github.
-
-
-
-.. toctree::
- :hidden:
- :maxdepth: 4
-
-
-.. autosummary::
- :toctree: _autosummary/
-
- ansys.pyensight.core.libuserd
- ansys.pyensight.core.libuserd.LibUserd
- ansys.pyensight.core.libuserd.ReaderInfo
- ansys.pyensight.core.libuserd.Reader
- ansys.pyensight.core.libuserd.Part
- ansys.pyensight.core.libuserd.PartHints
- ansys.pyensight.core.libuserd.ElementType
- ansys.pyensight.core.libuserd.Variable
- ansys.pyensight.core.libuserd.VariableType
- ansys.pyensight.core.libuserd.VariableLocation
- ansys.pyensight.core.libuserd.Query
- ansys.pyensight.core.libuserd.LibUserdError
- ansys.pyensight.core.libuserd.ErrorCodes
-
+.. _ref_libuserd_api_docs:
+
+**********************
+LibUserd API reference
+**********************
+
+The ``libuserd`` module allows PyEnSight to directly access EnSight
+user-defined readers (USERD). Any file format for which EnSight
+uses a USERD interface can be read using this API.
+
+
+.. note::
+ This module was first introduced with the Ansys 2025 R1 distribution.
+ It should be considered **Beta** at this point in time. Please report
+ issues via github.
+
+
+
+.. toctree::
+ :hidden:
+ :maxdepth: 4
+
+
+.. autosummary::
+ :toctree: _autosummary/
+
+ ansys.pyensight.core.libuserd
+ ansys.pyensight.core.libuserd.LibUserd
+ ansys.pyensight.core.libuserd.ReaderInfo
+ ansys.pyensight.core.libuserd.Reader
+ ansys.pyensight.core.libuserd.Part
+ ansys.pyensight.core.libuserd.PartHints
+ ansys.pyensight.core.libuserd.ElementType
+ ansys.pyensight.core.libuserd.Variable
+ ansys.pyensight.core.libuserd.VariableType
+ ansys.pyensight.core.libuserd.VariableLocation
+ ansys.pyensight.core.libuserd.Query
+ ansys.pyensight.core.libuserd.LibUserdError
+ ansys.pyensight.core.libuserd.ErrorCodes
+
diff --git a/doc/source/native_documentation.rst b/doc/source/native_documentation.rst
index 6df08f1f59c..1b051c3bfb4 100644
--- a/doc/source/native_documentation.rst
+++ b/doc/source/native_documentation.rst
@@ -1,110 +1,110 @@
-.. _ref_native_api_docs:
-
-********************
-Native API reference
-********************
-
-The :ref:`"native Python API"` is the result of direct
-conversion from EnSight command language into Python syntax. In EnSight,
-you can select a block of command language from the command dialog and copy
-it to the clipboard in this format. There are many limitations
-to this interface. For example, there is no mechanism to query values and scripts
-are highly order dependent. For new development, consider using
-the :ref:`"Ensight object API"` where possible.
-
-
-.. toctree::
- :hidden:
- :maxdepth: 4
-
-
-.. autosummary::
- :toctree: _autosummary/
-
- ansys.api.pyensight.ensight_api
- ansys.api.pyensight.ensight_api.anim
- ansys.api.pyensight.ensight_api.anim_flipbook
- ansys.api.pyensight.ensight_api.anim_keyframe
- ansys.api.pyensight.ensight_api.anim_quick
- ansys.api.pyensight.ensight_api.anim_recorders
- ansys.api.pyensight.ensight_api.anim_screens
- ansys.api.pyensight.ensight_api.anim_traces
- ansys.api.pyensight.ensight_api.annot_backgr
- ansys.api.pyensight.ensight_api.annot_entlbl
- ansys.api.pyensight.ensight_api.annotation
- ansys.api.pyensight.ensight_api.arrow
- ansys.api.pyensight.ensight_api.auxgeom
- ansys.api.pyensight.ensight_api.boundarylayer
- ansys.api.pyensight.ensight_api.case
- ansys.api.pyensight.ensight_api.clip
- ansys.api.pyensight.ensight_api.collab
- ansys.api.pyensight.ensight_api.command
- ansys.api.pyensight.ensight_api.connect
- ansys.api.pyensight.ensight_api.context_restore
- ansys.api.pyensight.ensight_api.contour
- ansys.api.pyensight.ensight_api.curve
- ansys.api.pyensight.ensight_api.data
- ansys.api.pyensight.ensight_api.data_partbuild
- ansys.api.pyensight.ensight_api.define
- ansys.api.pyensight.ensight_api.devsrf
- ansys.api.pyensight.ensight_api.dial
- ansys.api.pyensight.ensight_api.dpart
- ansys.api.pyensight.ensight_api.elevsurf
- ansys.api.pyensight.ensight_api.ensight
- ansys.api.pyensight.ensight_api.enums
- ansys.api.pyensight.ensight_api.extrude
- ansys.api.pyensight.ensight_api.file
- ansys.api.pyensight.ensight_api.filterpart
- ansys.api.pyensight.ensight_api.format
- ansys.api.pyensight.ensight_api.frame
- ansys.api.pyensight.ensight_api.function
- ansys.api.pyensight.ensight_api.gauge
- ansys.api.pyensight.ensight_api.help
- ansys.api.pyensight.ensight_api.isos
- ansys.api.pyensight.ensight_api.legend
- ansys.api.pyensight.ensight_api.lightsource
- ansys.api.pyensight.ensight_api.line
- ansys.api.pyensight.ensight_api.logo
- ansys.api.pyensight.ensight_api.material
- ansys.api.pyensight.ensight_api.message_window
- ansys.api.pyensight.ensight_api.model
- ansys.api.pyensight.ensight_api.nplot
- ansys.api.pyensight.ensight_api.nvc
- ansys.api.pyensight.ensight_api.objs
- ansys.api.pyensight.ensight_api.part
- ansys.api.pyensight.ensight_api.plot
- ansys.api.pyensight.ensight_api.pointpart
- ansys.api.pyensight.ensight_api.prefs
- ansys.api.pyensight.ensight_api.profile
- ansys.api.pyensight.ensight_api.ptrace
- ansys.api.pyensight.ensight_api.ptrace_emitr
- ansys.api.pyensight.ensight_api.query_ent_var
- ansys.api.pyensight.ensight_api.query_interact
- ansys.api.pyensight.ensight_api.savegeom
- ansys.api.pyensight.ensight_api.scene
- ansys.api.pyensight.ensight_api.sepattach
- ansys.api.pyensight.ensight_api.set_tdata
- ansys.api.pyensight.ensight_api.set_visenv
- ansys.api.pyensight.ensight_api.shape
- ansys.api.pyensight.ensight_api.shell
- ansys.api.pyensight.ensight_api.shock
- ansys.api.pyensight.ensight_api.show_info
- ansys.api.pyensight.ensight_api.solution_time
- ansys.api.pyensight.ensight_api.species
- ansys.api.pyensight.ensight_api.subset
- ansys.api.pyensight.ensight_api.tensor
- ansys.api.pyensight.ensight_api.text
- ansys.api.pyensight.ensight_api.texture
- ansys.api.pyensight.ensight_api.tools
- ansys.api.pyensight.ensight_api.user
- ansys.api.pyensight.ensight_api.varextcfd
- ansys.api.pyensight.ensight_api.variables
- ansys.api.pyensight.ensight_api.vctarrow
- ansys.api.pyensight.ensight_api.view
- ansys.api.pyensight.ensight_api.view_transf
- ansys.api.pyensight.ensight_api.viewport
- ansys.api.pyensight.ensight_api.viewport_axis
- ansys.api.pyensight.ensight_api.viewport_bounds
- ansys.api.pyensight.ensight_api.views
- ansys.api.pyensight.ensight_api.vof
- ansys.api.pyensight.ensight_api.vortexcore
+.. _ref_native_api_docs:
+
+********************
+Native API reference
+********************
+
+The :ref:`"native Python API"` is the result of direct
+conversion from EnSight command language into Python syntax. In EnSight,
+you can select a block of command language from the command dialog and copy
+it to the clipboard in this format. There are many limitations
+to this interface. For example, there is no mechanism to query values and scripts
+are highly order dependent. For new development, consider using
+the :ref:`"Ensight object API"` where possible.
+
+
+.. toctree::
+ :hidden:
+ :maxdepth: 4
+
+
+.. autosummary::
+ :toctree: _autosummary/
+
+ ansys.api.pyensight.ensight_api
+ ansys.api.pyensight.ensight_api.anim
+ ansys.api.pyensight.ensight_api.anim_flipbook
+ ansys.api.pyensight.ensight_api.anim_keyframe
+ ansys.api.pyensight.ensight_api.anim_quick
+ ansys.api.pyensight.ensight_api.anim_recorders
+ ansys.api.pyensight.ensight_api.anim_screens
+ ansys.api.pyensight.ensight_api.anim_traces
+ ansys.api.pyensight.ensight_api.annot_backgr
+ ansys.api.pyensight.ensight_api.annot_entlbl
+ ansys.api.pyensight.ensight_api.annotation
+ ansys.api.pyensight.ensight_api.arrow
+ ansys.api.pyensight.ensight_api.auxgeom
+ ansys.api.pyensight.ensight_api.boundarylayer
+ ansys.api.pyensight.ensight_api.case
+ ansys.api.pyensight.ensight_api.clip
+ ansys.api.pyensight.ensight_api.collab
+ ansys.api.pyensight.ensight_api.command
+ ansys.api.pyensight.ensight_api.connect
+ ansys.api.pyensight.ensight_api.context_restore
+ ansys.api.pyensight.ensight_api.contour
+ ansys.api.pyensight.ensight_api.curve
+ ansys.api.pyensight.ensight_api.data
+ ansys.api.pyensight.ensight_api.data_partbuild
+ ansys.api.pyensight.ensight_api.define
+ ansys.api.pyensight.ensight_api.devsrf
+ ansys.api.pyensight.ensight_api.dial
+ ansys.api.pyensight.ensight_api.dpart
+ ansys.api.pyensight.ensight_api.elevsurf
+ ansys.api.pyensight.ensight_api.ensight
+ ansys.api.pyensight.ensight_api.enums
+ ansys.api.pyensight.ensight_api.extrude
+ ansys.api.pyensight.ensight_api.file
+ ansys.api.pyensight.ensight_api.filterpart
+ ansys.api.pyensight.ensight_api.format
+ ansys.api.pyensight.ensight_api.frame
+ ansys.api.pyensight.ensight_api.function
+ ansys.api.pyensight.ensight_api.gauge
+ ansys.api.pyensight.ensight_api.help
+ ansys.api.pyensight.ensight_api.isos
+ ansys.api.pyensight.ensight_api.legend
+ ansys.api.pyensight.ensight_api.lightsource
+ ansys.api.pyensight.ensight_api.line
+ ansys.api.pyensight.ensight_api.logo
+ ansys.api.pyensight.ensight_api.material
+ ansys.api.pyensight.ensight_api.message_window
+ ansys.api.pyensight.ensight_api.model
+ ansys.api.pyensight.ensight_api.nplot
+ ansys.api.pyensight.ensight_api.nvc
+ ansys.api.pyensight.ensight_api.objs
+ ansys.api.pyensight.ensight_api.part
+ ansys.api.pyensight.ensight_api.plot
+ ansys.api.pyensight.ensight_api.pointpart
+ ansys.api.pyensight.ensight_api.prefs
+ ansys.api.pyensight.ensight_api.profile
+ ansys.api.pyensight.ensight_api.ptrace
+ ansys.api.pyensight.ensight_api.ptrace_emitr
+ ansys.api.pyensight.ensight_api.query_ent_var
+ ansys.api.pyensight.ensight_api.query_interact
+ ansys.api.pyensight.ensight_api.savegeom
+ ansys.api.pyensight.ensight_api.scene
+ ansys.api.pyensight.ensight_api.sepattach
+ ansys.api.pyensight.ensight_api.set_tdata
+ ansys.api.pyensight.ensight_api.set_visenv
+ ansys.api.pyensight.ensight_api.shape
+ ansys.api.pyensight.ensight_api.shell
+ ansys.api.pyensight.ensight_api.shock
+ ansys.api.pyensight.ensight_api.show_info
+ ansys.api.pyensight.ensight_api.solution_time
+ ansys.api.pyensight.ensight_api.species
+ ansys.api.pyensight.ensight_api.subset
+ ansys.api.pyensight.ensight_api.tensor
+ ansys.api.pyensight.ensight_api.text
+ ansys.api.pyensight.ensight_api.texture
+ ansys.api.pyensight.ensight_api.tools
+ ansys.api.pyensight.ensight_api.user
+ ansys.api.pyensight.ensight_api.varextcfd
+ ansys.api.pyensight.ensight_api.variables
+ ansys.api.pyensight.ensight_api.vctarrow
+ ansys.api.pyensight.ensight_api.view
+ ansys.api.pyensight.ensight_api.view_transf
+ ansys.api.pyensight.ensight_api.viewport
+ ansys.api.pyensight.ensight_api.viewport_axis
+ ansys.api.pyensight.ensight_api.viewport_bounds
+ ansys.api.pyensight.ensight_api.views
+ ansys.api.pyensight.ensight_api.vof
+ ansys.api.pyensight.ensight_api.vortexcore
diff --git a/doc/source/object_documentation.rst b/doc/source/object_documentation.rst
index a19ec516fa0..454de4c007b 100644
--- a/doc/source/object_documentation.rst
+++ b/doc/source/object_documentation.rst
@@ -1,88 +1,88 @@
-.. _ref_object_api_docs:
-
-********************
-Object API reference
-********************
-
-The :ref:`"object"` API is a direct interface to the EnSight object structures.
-It includes the ability to set/query attributes as well as connect callback
-function that execute when attribute values change, etc. While it is a more
-advanced interface than the :ref:`"native"` API, there are some
-operations that can only be performed via that interface.
-
-
-.. toctree::
- :hidden:
- :maxdepth: 4
-
-
-.. autosummary::
- :toctree: _autosummary/
-
- ansys.pyensight.core.ensobj.ENSOBJ
- ansys.pyensight.core.ensobjlist
- ansys.api.pyensight.calc_funcs.ens_calculator
- ansys.api.pyensight.ens_annot.ENS_ANNOT
- ansys.api.pyensight.ens_annot_text.ENS_ANNOT_TEXT
- ansys.api.pyensight.ens_annot_line.ENS_ANNOT_LINE
- ansys.api.pyensight.ens_annot_logo.ENS_ANNOT_LOGO
- ansys.api.pyensight.ens_annot_lgnd.ENS_ANNOT_LGND
- ansys.api.pyensight.ens_annot_marker.ENS_ANNOT_MARKER
- ansys.api.pyensight.ens_annot_arrow.ENS_ANNOT_ARROW
- ansys.api.pyensight.ens_annot_dial.ENS_ANNOT_DIAL
- ansys.api.pyensight.ens_annot_gauge.ENS_ANNOT_GAUGE
- ansys.api.pyensight.ens_annot_shape.ENS_ANNOT_SHAPE
- ansys.api.pyensight.ens_camera.ENS_CAMERA
- ansys.api.pyensight.ens_case.ENS_CASE
- ansys.api.pyensight.ens_flipbook.ENS_FLIPBOOK
- ansys.api.pyensight.ens_frame.ENS_FRAME
- ansys.api.pyensight.ens_geom.ENS_GEOM
- ansys.api.pyensight.ens_globals.ENS_GLOBALS
- ansys.api.pyensight.ens_group.ENS_GROUP
- ansys.api.pyensight.ens_lightsource.ENS_LIGHTSOURCE
- ansys.api.pyensight.ens_lpart.ENS_LPART
- ansys.api.pyensight.ens_mat.ENS_MAT
- ansys.api.pyensight.ens_palette.ENS_PALETTE
- ansys.api.pyensight.ens_part.ENS_PART
- ansys.api.pyensight.ens_part_model.ENS_PART_MODEL
- ansys.api.pyensight.ens_part_clip.ENS_PART_CLIP
- ansys.api.pyensight.ens_part_contour.ENS_PART_CONTOUR
- ansys.api.pyensight.ens_part_discrete_particle.ENS_PART_DISCRETE_PARTICLE
- ansys.api.pyensight.ens_part_frame.ENS_PART_FRAME
- ansys.api.pyensight.ens_part_isosurface.ENS_PART_ISOSURFACE
- ansys.api.pyensight.ens_part_particle_trace.ENS_PART_PARTICLE_TRACE
- ansys.api.pyensight.ens_part_profile.ENS_PART_PROFILE
- ansys.api.pyensight.ens_part_vector_arrow.ENS_PART_VECTOR_ARROW
- ansys.api.pyensight.ens_part_elevated_surface.ENS_PART_ELEVATED_SURFACE
- ansys.api.pyensight.ens_part_developed_surface.ENS_PART_DEVELOPED_SURFACE
- ansys.api.pyensight.ens_part_built_up.ENS_PART_BUILT_UP
- ansys.api.pyensight.ens_part_tensor_glyph.ENS_PART_TENSOR_GLYPH
- ansys.api.pyensight.ens_part_fx_vortex_core.ENS_PART_FX_VORTEX_CORE
- ansys.api.pyensight.ens_part_fx_shock.ENS_PART_FX_SHOCK
- ansys.api.pyensight.ens_part_fx_sep_att.ENS_PART_FX_SEP_ATT
- ansys.api.pyensight.ens_part_mat_interface.ENS_PART_MAT_INTERFACE
- ansys.api.pyensight.ens_part_point.ENS_PART_POINT
- ansys.api.pyensight.ens_part_axisymmetric.ENS_PART_AXISYMMETRIC
- ansys.api.pyensight.ens_part_vof.ENS_PART_VOF
- ansys.api.pyensight.ens_part_aux_geom.ENS_PART_AUX_GEOM
- ansys.api.pyensight.ens_part_filter.ENS_PART_FILTER
- ansys.api.pyensight.ens_plotter.ENS_PLOTTER
- ansys.api.pyensight.ens_polyline.ENS_POLYLINE
- ansys.api.pyensight.ens_probe.ENS_PROBE
- ansys.api.pyensight.ens_query.ENS_QUERY
- ansys.api.pyensight.ens_scene.ENS_SCENE
- ansys.api.pyensight.ens_source.ENS_SOURCE
- ansys.api.pyensight.ens_spec.ENS_SPEC
- ansys.api.pyensight.ens_state.ENS_STATE
- ansys.api.pyensight.ens_texture.ENS_TEXTURE
- ansys.api.pyensight.ens_tool.ENS_TOOL
- ansys.api.pyensight.ens_tool_cursor.ENS_TOOL_CURSOR
- ansys.api.pyensight.ens_tool_line.ENS_TOOL_LINE
- ansys.api.pyensight.ens_tool_plane.ENS_TOOL_PLANE
- ansys.api.pyensight.ens_tool_box.ENS_TOOL_BOX
- ansys.api.pyensight.ens_tool_cylinder.ENS_TOOL_CYLINDER
- ansys.api.pyensight.ens_tool_cone.ENS_TOOL_CONE
- ansys.api.pyensight.ens_tool_sphere.ENS_TOOL_SPHERE
- ansys.api.pyensight.ens_tool_revolution.ENS_TOOL_REVOLUTION
- ansys.api.pyensight.ens_var.ENS_VAR
- ansys.api.pyensight.ens_vport.ENS_VPORT
+.. _ref_object_api_docs:
+
+********************
+Object API reference
+********************
+
+The :ref:`"object"` API is a direct interface to the EnSight object structures.
+It includes the ability to set/query attributes as well as connect callback
+function that execute when attribute values change, etc. While it is a more
+advanced interface than the :ref:`"native"` API, there are some
+operations that can only be performed via that interface.
+
+
+.. toctree::
+ :hidden:
+ :maxdepth: 4
+
+
+.. autosummary::
+ :toctree: _autosummary/
+
+ ansys.pyensight.core.ensobj.ENSOBJ
+ ansys.pyensight.core.ensobjlist
+ ansys.api.pyensight.calc_funcs.ens_calculator
+ ansys.api.pyensight.ens_annot.ENS_ANNOT
+ ansys.api.pyensight.ens_annot_text.ENS_ANNOT_TEXT
+ ansys.api.pyensight.ens_annot_line.ENS_ANNOT_LINE
+ ansys.api.pyensight.ens_annot_logo.ENS_ANNOT_LOGO
+ ansys.api.pyensight.ens_annot_lgnd.ENS_ANNOT_LGND
+ ansys.api.pyensight.ens_annot_marker.ENS_ANNOT_MARKER
+ ansys.api.pyensight.ens_annot_arrow.ENS_ANNOT_ARROW
+ ansys.api.pyensight.ens_annot_dial.ENS_ANNOT_DIAL
+ ansys.api.pyensight.ens_annot_gauge.ENS_ANNOT_GAUGE
+ ansys.api.pyensight.ens_annot_shape.ENS_ANNOT_SHAPE
+ ansys.api.pyensight.ens_camera.ENS_CAMERA
+ ansys.api.pyensight.ens_case.ENS_CASE
+ ansys.api.pyensight.ens_flipbook.ENS_FLIPBOOK
+ ansys.api.pyensight.ens_frame.ENS_FRAME
+ ansys.api.pyensight.ens_geom.ENS_GEOM
+ ansys.api.pyensight.ens_globals.ENS_GLOBALS
+ ansys.api.pyensight.ens_group.ENS_GROUP
+ ansys.api.pyensight.ens_lightsource.ENS_LIGHTSOURCE
+ ansys.api.pyensight.ens_lpart.ENS_LPART
+ ansys.api.pyensight.ens_mat.ENS_MAT
+ ansys.api.pyensight.ens_palette.ENS_PALETTE
+ ansys.api.pyensight.ens_part.ENS_PART
+ ansys.api.pyensight.ens_part_model.ENS_PART_MODEL
+ ansys.api.pyensight.ens_part_clip.ENS_PART_CLIP
+ ansys.api.pyensight.ens_part_contour.ENS_PART_CONTOUR
+ ansys.api.pyensight.ens_part_discrete_particle.ENS_PART_DISCRETE_PARTICLE
+ ansys.api.pyensight.ens_part_frame.ENS_PART_FRAME
+ ansys.api.pyensight.ens_part_isosurface.ENS_PART_ISOSURFACE
+ ansys.api.pyensight.ens_part_particle_trace.ENS_PART_PARTICLE_TRACE
+ ansys.api.pyensight.ens_part_profile.ENS_PART_PROFILE
+ ansys.api.pyensight.ens_part_vector_arrow.ENS_PART_VECTOR_ARROW
+ ansys.api.pyensight.ens_part_elevated_surface.ENS_PART_ELEVATED_SURFACE
+ ansys.api.pyensight.ens_part_developed_surface.ENS_PART_DEVELOPED_SURFACE
+ ansys.api.pyensight.ens_part_built_up.ENS_PART_BUILT_UP
+ ansys.api.pyensight.ens_part_tensor_glyph.ENS_PART_TENSOR_GLYPH
+ ansys.api.pyensight.ens_part_fx_vortex_core.ENS_PART_FX_VORTEX_CORE
+ ansys.api.pyensight.ens_part_fx_shock.ENS_PART_FX_SHOCK
+ ansys.api.pyensight.ens_part_fx_sep_att.ENS_PART_FX_SEP_ATT
+ ansys.api.pyensight.ens_part_mat_interface.ENS_PART_MAT_INTERFACE
+ ansys.api.pyensight.ens_part_point.ENS_PART_POINT
+ ansys.api.pyensight.ens_part_axisymmetric.ENS_PART_AXISYMMETRIC
+ ansys.api.pyensight.ens_part_vof.ENS_PART_VOF
+ ansys.api.pyensight.ens_part_aux_geom.ENS_PART_AUX_GEOM
+ ansys.api.pyensight.ens_part_filter.ENS_PART_FILTER
+ ansys.api.pyensight.ens_plotter.ENS_PLOTTER
+ ansys.api.pyensight.ens_polyline.ENS_POLYLINE
+ ansys.api.pyensight.ens_probe.ENS_PROBE
+ ansys.api.pyensight.ens_query.ENS_QUERY
+ ansys.api.pyensight.ens_scene.ENS_SCENE
+ ansys.api.pyensight.ens_source.ENS_SOURCE
+ ansys.api.pyensight.ens_spec.ENS_SPEC
+ ansys.api.pyensight.ens_state.ENS_STATE
+ ansys.api.pyensight.ens_texture.ENS_TEXTURE
+ ansys.api.pyensight.ens_tool.ENS_TOOL
+ ansys.api.pyensight.ens_tool_cursor.ENS_TOOL_CURSOR
+ ansys.api.pyensight.ens_tool_line.ENS_TOOL_LINE
+ ansys.api.pyensight.ens_tool_plane.ENS_TOOL_PLANE
+ ansys.api.pyensight.ens_tool_box.ENS_TOOL_BOX
+ ansys.api.pyensight.ens_tool_cylinder.ENS_TOOL_CYLINDER
+ ansys.api.pyensight.ens_tool_cone.ENS_TOOL_CONE
+ ansys.api.pyensight.ens_tool_sphere.ENS_TOOL_SPHERE
+ ansys.api.pyensight.ens_tool_revolution.ENS_TOOL_REVOLUTION
+ ansys.api.pyensight.ens_var.ENS_VAR
+ ansys.api.pyensight.ens_vport.ENS_VPORT
diff --git a/doc/source/rest_api/ensight_rest_v1.yaml b/doc/source/rest_api/ensight_rest_v1.yaml
index e35f9045761..c30646e7c94 100644
--- a/doc/source/rest_api/ensight_rest_v1.yaml
+++ b/doc/source/rest_api/ensight_rest_v1.yaml
@@ -1,595 +1,595 @@
-openapi: 3.0.0
-info:
- title: pyensight - OpenAPI 3.0
- description: |-
- The pyensight module provides a mechanism for launching an EnSight instance
- and control it through a gRPC interface through Python. The pyensight REST
- api provides a mechanism by which a Javascript application can leverage the
- pyensight API directly via REST.
-
- The REST API requires a pyensight initiated ensight instance. The API
- itself is implemented in the websocketserver instance associated with the
- pyensight instance.
- version: 1.0.0
-
-tags:
- - name: Basic API
- description: Generic Python interface.
- externalDocs:
- description: pyensight.Session
- url: https://ensight.docs.pyansys.com/version/dev/_autosummary/pyensight.Session.html
-
- - name: Native API
- description: Interface to the EnSight "native" Python API. Created by converting command
- language into Python.
- externalDocs:
- description: EnSight native command language API
- url: https://ensight.docs.pyansys.com/version/dev/user_guide/cmdlang_native.html
-
- - name: Object API
- description: Interface to the EnSight Python object API
- externalDocs:
- description: EnSight Python Object API
- url: https://ensight.docs.pyansys.com/version/dev/user_guide/index.html
-
-paths:
-
- /ensight/v1/{sessionid}/exec:
- put:
- summary: Exec Python command(s)
- description: Run one or more strings as Python commands in the remote EnSight session.
- operationId: execPython
- tags:
- - Basic API
- parameters:
- - name: sessionid
- in: path
- description: The PyEnSight session secret key.
- required: true
- schema:
- type: string
- example: "ab32cefa055411eeb8a654435d7ef902"
- requestBody:
- content:
- application/json:
- schema:
- type: array
- items:
- type: string
- example:
- [
- "ensight.part.modify_begin()",
- "ensight.part.elt_representation('3D_feature_2D_full')",
- "ensight.part.modify_end()"
- ]
- responses:
- '200':
- description: Successful operation
- '400':
- description: Python error encountered during execution
- '401':
- description: Invalid sessionid
-
- /ensight/v1/{sessionid}/eval:
- put:
- summary: Eval a Python command
- description: Execute one string as a Python command in the remote EnSight session and return
- the result.
- operationId: evalPython
- tags:
- - Basic API
- parameters:
- - name: sessionid
- in: path
- description: The PyEnSight session secret key.
- required: true
- schema:
- type: string
- example: "ab32cefa055411eeb8a654435d7ef902"
- - name: returns
- in: query
- description: A comma separated list of attribute names to return when an ENSOBJ object is returned. ENSOBJ is the default which returns @ENSOBJ=123@.
- required: false
- schema:
- type: string
- examples:
- Object ID:
- value: "__OBJID__"
- Description and visibility:
- value: "VISIBLE,DESCRIPTION"
- requestBody:
- content:
- application/json:
- schema:
- type: string
- example:
- ensight.objs.core.unit_system()
- responses:
- '200':
- description: Successful operation
- content:
- application/json:
- schema:
- type: object
- example:
- [
- "SI",
- "Metric SI",
- true,
- {
- "M": "kg",
- "L": "m",
- "T": "s",
- "K": "K",
- "Q": "A",
- "D": "rad",
- "I": "cd",
- "A": "mol"
- }
- ]
- '400':
- description: Python error encountered during execution
- '401':
- description: Invalid sessionid
-
- /ensight/v1/{sessionid}/def_func/{app}/{funcname}:
- put:
- summary: Define a remote function
- description: Create a function in the remote EnSight instance that can be called
- directly via the REST API.
- operationId: newRemoteFunc
- tags:
- - Basic API
- parameters:
- - name: sessionid
- in: path
- description: The PyEnSight session secret key.
- required: true
- schema:
- type: string
- example: "ab32cefa055411eeb8a654435d7ef902"
- - name: app
- in: path
- description: A namespace to place the new function in. Generally used to avoid
- name collisions and to provide a place to store session data. It
- must be a valid Python variable name.
- required: true
- schema:
- type: string
- example: "myapp"
- - name: funcname
- in: path
- description: The name of the function to be defined
- required: true
- schema:
- type: string
- example: "funcname"
- - name: imports
- in: query
- description: A list of modules that should be import before the function is defined.
- required: false
- schema:
- type: string
- example:
- "time,math,numpy,typing"
- requestBody:
- description: The string source code of a Python function. The first parameter will always be
- the namespace the function is defined in. Subsequent args should be keyword args.
- The modules listed in the imports query will have been imported as well as
- SimpleNamespace. The function return value will be returned via json.dumps().
- content:
- application/json:
- schema:
- type: string
- example: |-
- "def funcname(app: SimpleNamespace, o: typing.Optional['ENSOBJ'] = None, s : str = 'VISIBLE') -> dict:
- v = o.getattr(s)
- return dict(s=v)"
- responses:
- '200':
- description: Successful operation
- '400':
- description: Python error encountered during execution
- '401':
- description: Invalid sessionid
-
- /ensight/v1/{sessionid}/call_func/{app}/{funcname}:
- put:
- summary: Call a remote function
- description: Call a previously defined remote function.
- operationId: callRemoteFunc
- tags:
- - Basic API
- parameters:
- - name: sessionid
- in: path
- description: The PyEnSight session secret key.
- required: true
- schema:
- type: string
- example: "ab32cefa055411eeb8a654435d7ef902"
- - name: app
- in: path
- description: A namespace to place the new function in. Generally used to avoid
- name collisions and to provide a place to store session data. It
- must be a valid Python variable name.
- required: true
- schema:
- type: string
- example: "myapp"
- - name: funcname
- in: path
- description: The name of the function to be called.
- required: true
- schema:
- type: string
- example: "funcname"
- - name: returns
- in: query
- description: A comma separated list of attribute names to return when an ENSOBJ object is returned. ENSOBJ is the default which returns @ENSOBJ=123@.
- required: false
- schema:
- type: string
- examples:
- Object ID:
- value: "__OBJID__"
- Description and visibility:
- value: "VISIBLE,DESCRIPTION"
- requestBody:
- description: The keyword parameters for the Python call.
- An input string of the form '@ENSOBJ=v@' will be converted into
- ensight.objs.wrap_id(v).
- In the example,
- app.funcname(app, num_samples=4, source=ensight.objs.wrap_id(120)) will
- be called. The value returned by the function will be output using
- json.dumps().
- content:
- application/json:
- schema:
- type: object
- example:
- {
- "num_samples": 4,
- "source": "@ENSOBJ=120@"
- }
- responses:
- '200':
- description: Successful operation
- '400':
- description: Python error encountered during execution
- '401':
- description: Invalid sessionid, app or funcname
-
- /ensight/v1/{sessionid}/cmd/{nativecommandname}:
- put:
- summary: Run an EnSight native Python command
- description: Execute the named EnSight native Python command with the parameters passed in
- the list of object in the request body.
- operationId: nativeCmd
- tags:
- - Native API
- parameters:
- - name: sessionid
- in: path
- description: The PyEnSight session secret key.
- required: true
- schema:
- type: string
- example: "ab32cefa055411eeb8a654435d7ef902"
- - name: nativecommandname
- in: path
- description: The native python binding for a command language command.
- required: true
- schema:
- type: string
- example: "ensight.view_transf.rotate"
- requestBody:
- content:
- application/json:
- schema:
- type: array
- items:
- type: object
- example:
- [
- 33.7, -0.402, 0.0
- ]
- responses:
- '200':
- description: Successful operation
- '400':
- description: Python error encountered during execution
- '401':
- description: Invalid sessionid
-
- /ensight/v1/{sessionid}/ensobjs/{objectid}/{attributeid}:
- get:
- summary: ENSOBJ attributes
- description: Get an ENSOBJ attribute value
- operationId: getobjAttr
- tags:
- - Object API
- parameters:
- - name: sessionid
- in: path
- description: The PyEnSight session secret key.
- required: true
- schema:
- type: string
- example: "ab32cefa055411eeb8a654435d7ef902"
- - name: objectid
- in: path
- description: The name of an EnSight object or an object id
- required: true
- schema:
- type: string
- examples:
- Object reference:
- value: "ensight.objs.core"
- Object ID:
- value: 1234
- - name: attributeid
- in: path
- description: An ENSOBJ attribute name like VISIBLE.
- required: true
- schema:
- type: string
- example: "VISIBLE"
- - name: returns
- in: query
- description: A comma separated list of attribute names to return when an ENSOBJ object is returned. ENSOBJ is the default which returns @ENSOBJ=123@.
- required: false
- schema:
- type: string
- examples:
- Object ID:
- value: "__OBJID__"
- Description and visibility:
- value: "VISIBLE,DESCRIPTION"
- responses:
- '200':
- description: Successful operation
- '400':
- description: Python error encountered during execution
- '401':
- description: Invalid sessionid
-
- put:
- summary: ENSOBJ attributes
- description: Set an ENSOBJ attribute value
- operationId: setobjAttr
- tags:
- - Object API
- parameters:
- - name: sessionid
- in: path
- description: The PyEnSight session secret key.
- required: true
- schema:
- type: string
- example: "ab32cefa055411eeb8a654435d7ef902"
- - name: objectid
- in: path
- description: The name of an EnSight object or an object id
- required: true
- schema:
- type: string
- examples:
- Object reference:
- value: "ensight.objs.core"
- Object ID:
- value: 1234
- - name: attributeid
- in: path
- description: The native python binding for a command language command.
- required: true
- schema:
- type: string
- example: "VISIBLE"
- requestBody:
- content:
- application/json:
- schema:
- type: object
- responses:
- '200':
- description: Successful operation
- '400':
- description: Python error encountered during execution
- '401':
- description: Invalid sessionid
-
- /ensight/v1/{sessionid}/ensobjs/setattrs:
- put:
- summary: Multi-ENSOBJ setattrs
- description: Set a collection of attributes on a collection of ENSOBJ objects.
- operationId: setobjsAttr
- tags:
- - Object API
- parameters:
- - name: sessionid
- in: path
- description: The PyEnSight session secret key.
- required: true
- schema:
- type: string
- example: "ab32cefa055411eeb8a654435d7ef902"
- - name: suppress_errors
- in: query
- description: If this is true and there is an error during the set operation, do not
- return an error/stop processing the entire list.
- required: false
- schema:
- type: boolean
- requestBody:
- description: The object contains a list of the objects to have the set operation
- applied to and a value object keyed by the attribute names to set.
- content:
- application/json:
- schema:
- type: object
- properties:
- objects:
- type: array
- items:
- oneOf:
- - type: string
- - type: integer
- value:
- type: object
- example:
- {
- "objects": ["ensight.objs.core", 1234],
- "values": {
- "VISIBLE": true,
- "DESCRIPTION": "example"
- }
- }
- responses:
- '200':
- description: Successful operation
- '400':
- description: Python error encountered during execution
- '401':
- description: Invalid sessionid
-
- /ensight/v1/{sessionid}/ensobjs/getattrs:
- put:
- summary: Multi-ENSOBJ getattrs
- description: Get a collection of attributes on a collection of ENSOBJ objects.
- operationId: getobjsAttr
- tags:
- - Object API
- parameters:
- - name: sessionid
- in: path
- description: The PyEnSight session secret key.
- required: true
- schema:
- type: string
- example: "ab32cefa055411eeb8a654435d7ef902"
- - name: returns
- in: query
- description: A comma separated list of attribute names to return when an ENSOBJ object is returned. ENSOBJ is the default which returns @ENSOBJ=123@.
- required: false
- schema:
- type: string
- examples:
- Object ID:
- value: "__OBJID__"
- Description and visibility:
- value: "VISIBLE,DESCRIPTION"
- - name: suppress_errors
- in: query
- description: If this is true and there is an error during the set operation, do not
- return an error/stop processing the entire list. Also, no key will appear in
- the output object for objects which returned an error.
- required: false
- schema:
- type: boolean
- requestBody:
- description: A list of object names and ids to query the attributes listed by the returns= query
- content:
- application/json:
- schema:
- type: array
- items:
- oneOf:
- - type: string
- - type: integer
- example:
- ["ensight.objs.core", 1234]
- responses:
- '200':
- description: Successful operation. The object has keys for each value in the input array
- and the value will be an array of the output attribute values.
- content:
- application/json:
- schema:
- type: object
- example:
- {
- "ensight.objs.core": [false, "global object"],
- "1234": [true, "wheels"]
- }
- '400':
- description: Python error encountered during execution
- '401':
- description: Invalid sessionid
-
- /ensight/v1/{sessionid}/ensobjs/{objectid}/methods/{method}:
- put:
- summary: Run ENSOBJ method
- description: Execute an ENSOBJ method and return the result.
- operationId: objMethod
- tags:
- - Object API
- parameters:
- - name: sessionid
- in: path
- description: The PyEnSight session secret key.
- required: true
- schema:
- type: string
- example: "ab32cefa055411eeb8a654435d7ef902"
- - name: objectid
- in: path
- description: The name of an EnSight object or an object id.
- required: true
- schema:
- type: string
- examples:
- Object reference:
- value: "ensight.objs.core"
- Object ID:
- value: 1234
- - name: method
- in: path
- description: The name of a method on the selected object.
- required: true
- schema:
- type: string
- example: "createvariable"
- - name: returns
- in: query
- description: A comma separated list of attribute names to return when an ENSOBJ object is returned. ENSOBJ is the default which returns @ENSOBJ=123@.
- required: false
- schema:
- type: string
- examples:
- Object ID:
- value: "__OBJID__"
- Description and visibility:
- value: "VISIBLE,DESCRIPTION"
- requestBody:
- description: The positional and keyword parameters for the Python call.
- An input string starting with '@ENSOBJ=v@' will be converted into
- ensight.objs.wrap_id(v).
- In the example,
- foo("param1", 1.0, num_samples=4, source=ensight.objs.wrap_id(120))
- content:
- application/json:
- schema:
- type: object
- properties:
- args:
- type: array
- items:
- type: object
- kwargs:
- type: object
- example:
- {
- "args": ["param1", 1.0],
- "kwargs": {
- "num_samples": 4,
- "source": "@ENSOBJ=120@"
- }
- }
- responses:
- '200':
- description: Successful operation
- '400':
- description: Python error encountered during execution
- '401':
- description: Invalid sessionid
+openapi: 3.0.0
+info:
+ title: pyensight - OpenAPI 3.0
+ description: |-
+ The pyensight module provides a mechanism for launching an EnSight instance
+ and control it through a gRPC interface through Python. The pyensight REST
+ api provides a mechanism by which a Javascript application can leverage the
+ pyensight API directly via REST.
+
+ The REST API requires a pyensight initiated ensight instance. The API
+ itself is implemented in the websocketserver instance associated with the
+ pyensight instance.
+ version: 1.0.0
+
+tags:
+ - name: Basic API
+ description: Generic Python interface.
+ externalDocs:
+ description: pyensight.Session
+ url: https://ensight.docs.pyansys.com/version/dev/_autosummary/pyensight.Session.html
+
+ - name: Native API
+ description: Interface to the EnSight "native" Python API. Created by converting command
+ language into Python.
+ externalDocs:
+ description: EnSight native command language API
+ url: https://ensight.docs.pyansys.com/version/dev/user_guide/cmdlang_native.html
+
+ - name: Object API
+ description: Interface to the EnSight Python object API
+ externalDocs:
+ description: EnSight Python Object API
+ url: https://ensight.docs.pyansys.com/version/dev/user_guide/index.html
+
+paths:
+
+ /ensight/v1/{sessionid}/exec:
+ put:
+ summary: Exec Python command(s)
+ description: Run one or more strings as Python commands in the remote EnSight session.
+ operationId: execPython
+ tags:
+ - Basic API
+ parameters:
+ - name: sessionid
+ in: path
+ description: The PyEnSight session secret key.
+ required: true
+ schema:
+ type: string
+ example: "ab32cefa055411eeb8a654435d7ef902"
+ requestBody:
+ content:
+ application/json:
+ schema:
+ type: array
+ items:
+ type: string
+ example:
+ [
+ "ensight.part.modify_begin()",
+ "ensight.part.elt_representation('3D_feature_2D_full')",
+ "ensight.part.modify_end()"
+ ]
+ responses:
+ '200':
+ description: Successful operation
+ '400':
+ description: Python error encountered during execution
+ '401':
+ description: Invalid sessionid
+
+ /ensight/v1/{sessionid}/eval:
+ put:
+ summary: Eval a Python command
+ description: Execute one string as a Python command in the remote EnSight session and return
+ the result.
+ operationId: evalPython
+ tags:
+ - Basic API
+ parameters:
+ - name: sessionid
+ in: path
+ description: The PyEnSight session secret key.
+ required: true
+ schema:
+ type: string
+ example: "ab32cefa055411eeb8a654435d7ef902"
+ - name: returns
+ in: query
+ description: A comma separated list of attribute names to return when an ENSOBJ object is returned. ENSOBJ is the default which returns @ENSOBJ=123@.
+ required: false
+ schema:
+ type: string
+ examples:
+ Object ID:
+ value: "__OBJID__"
+ Description and visibility:
+ value: "VISIBLE,DESCRIPTION"
+ requestBody:
+ content:
+ application/json:
+ schema:
+ type: string
+ example:
+ ensight.objs.core.unit_system()
+ responses:
+ '200':
+ description: Successful operation
+ content:
+ application/json:
+ schema:
+ type: object
+ example:
+ [
+ "SI",
+ "Metric SI",
+ true,
+ {
+ "M": "kg",
+ "L": "m",
+ "T": "s",
+ "K": "K",
+ "Q": "A",
+ "D": "rad",
+ "I": "cd",
+ "A": "mol"
+ }
+ ]
+ '400':
+ description: Python error encountered during execution
+ '401':
+ description: Invalid sessionid
+
+ /ensight/v1/{sessionid}/def_func/{app}/{funcname}:
+ put:
+ summary: Define a remote function
+ description: Create a function in the remote EnSight instance that can be called
+ directly via the REST API.
+ operationId: newRemoteFunc
+ tags:
+ - Basic API
+ parameters:
+ - name: sessionid
+ in: path
+ description: The PyEnSight session secret key.
+ required: true
+ schema:
+ type: string
+ example: "ab32cefa055411eeb8a654435d7ef902"
+ - name: app
+ in: path
+ description: A namespace to place the new function in. Generally used to avoid
+ name collisions and to provide a place to store session data. It
+ must be a valid Python variable name.
+ required: true
+ schema:
+ type: string
+ example: "myapp"
+ - name: funcname
+ in: path
+ description: The name of the function to be defined
+ required: true
+ schema:
+ type: string
+ example: "funcname"
+ - name: imports
+ in: query
+ description: A list of modules that should be import before the function is defined.
+ required: false
+ schema:
+ type: string
+ example:
+ "time,math,numpy,typing"
+ requestBody:
+ description: The string source code of a Python function. The first parameter will always be
+ the namespace the function is defined in. Subsequent args should be keyword args.
+ The modules listed in the imports query will have been imported as well as
+ SimpleNamespace. The function return value will be returned via json.dumps().
+ content:
+ application/json:
+ schema:
+ type: string
+ example: |-
+ "def funcname(app: SimpleNamespace, o: typing.Optional['ENSOBJ'] = None, s : str = 'VISIBLE') -> dict:
+ v = o.getattr(s)
+ return dict(s=v)"
+ responses:
+ '200':
+ description: Successful operation
+ '400':
+ description: Python error encountered during execution
+ '401':
+ description: Invalid sessionid
+
+ /ensight/v1/{sessionid}/call_func/{app}/{funcname}:
+ put:
+ summary: Call a remote function
+ description: Call a previously defined remote function.
+ operationId: callRemoteFunc
+ tags:
+ - Basic API
+ parameters:
+ - name: sessionid
+ in: path
+ description: The PyEnSight session secret key.
+ required: true
+ schema:
+ type: string
+ example: "ab32cefa055411eeb8a654435d7ef902"
+ - name: app
+ in: path
+ description: A namespace to place the new function in. Generally used to avoid
+ name collisions and to provide a place to store session data. It
+ must be a valid Python variable name.
+ required: true
+ schema:
+ type: string
+ example: "myapp"
+ - name: funcname
+ in: path
+ description: The name of the function to be called.
+ required: true
+ schema:
+ type: string
+ example: "funcname"
+ - name: returns
+ in: query
+ description: A comma separated list of attribute names to return when an ENSOBJ object is returned. ENSOBJ is the default which returns @ENSOBJ=123@.
+ required: false
+ schema:
+ type: string
+ examples:
+ Object ID:
+ value: "__OBJID__"
+ Description and visibility:
+ value: "VISIBLE,DESCRIPTION"
+ requestBody:
+ description: The keyword parameters for the Python call.
+ An input string of the form '@ENSOBJ=v@' will be converted into
+ ensight.objs.wrap_id(v).
+ In the example,
+ app.funcname(app, num_samples=4, source=ensight.objs.wrap_id(120)) will
+ be called. The value returned by the function will be output using
+ json.dumps().
+ content:
+ application/json:
+ schema:
+ type: object
+ example:
+ {
+ "num_samples": 4,
+ "source": "@ENSOBJ=120@"
+ }
+ responses:
+ '200':
+ description: Successful operation
+ '400':
+ description: Python error encountered during execution
+ '401':
+ description: Invalid sessionid, app or funcname
+
+ /ensight/v1/{sessionid}/cmd/{nativecommandname}:
+ put:
+ summary: Run an EnSight native Python command
+ description: Execute the named EnSight native Python command with the parameters passed in
+ the list of object in the request body.
+ operationId: nativeCmd
+ tags:
+ - Native API
+ parameters:
+ - name: sessionid
+ in: path
+ description: The PyEnSight session secret key.
+ required: true
+ schema:
+ type: string
+ example: "ab32cefa055411eeb8a654435d7ef902"
+ - name: nativecommandname
+ in: path
+ description: The native python binding for a command language command.
+ required: true
+ schema:
+ type: string
+ example: "ensight.view_transf.rotate"
+ requestBody:
+ content:
+ application/json:
+ schema:
+ type: array
+ items:
+ type: object
+ example:
+ [
+ 33.7, -0.402, 0.0
+ ]
+ responses:
+ '200':
+ description: Successful operation
+ '400':
+ description: Python error encountered during execution
+ '401':
+ description: Invalid sessionid
+
+ /ensight/v1/{sessionid}/ensobjs/{objectid}/{attributeid}:
+ get:
+ summary: ENSOBJ attributes
+ description: Get an ENSOBJ attribute value
+ operationId: getobjAttr
+ tags:
+ - Object API
+ parameters:
+ - name: sessionid
+ in: path
+ description: The PyEnSight session secret key.
+ required: true
+ schema:
+ type: string
+ example: "ab32cefa055411eeb8a654435d7ef902"
+ - name: objectid
+ in: path
+ description: The name of an EnSight object or an object id
+ required: true
+ schema:
+ type: string
+ examples:
+ Object reference:
+ value: "ensight.objs.core"
+ Object ID:
+ value: 1234
+ - name: attributeid
+ in: path
+ description: An ENSOBJ attribute name like VISIBLE.
+ required: true
+ schema:
+ type: string
+ example: "VISIBLE"
+ - name: returns
+ in: query
+ description: A comma separated list of attribute names to return when an ENSOBJ object is returned. ENSOBJ is the default which returns @ENSOBJ=123@.
+ required: false
+ schema:
+ type: string
+ examples:
+ Object ID:
+ value: "__OBJID__"
+ Description and visibility:
+ value: "VISIBLE,DESCRIPTION"
+ responses:
+ '200':
+ description: Successful operation
+ '400':
+ description: Python error encountered during execution
+ '401':
+ description: Invalid sessionid
+
+ put:
+ summary: ENSOBJ attributes
+ description: Set an ENSOBJ attribute value
+ operationId: setobjAttr
+ tags:
+ - Object API
+ parameters:
+ - name: sessionid
+ in: path
+ description: The PyEnSight session secret key.
+ required: true
+ schema:
+ type: string
+ example: "ab32cefa055411eeb8a654435d7ef902"
+ - name: objectid
+ in: path
+ description: The name of an EnSight object or an object id
+ required: true
+ schema:
+ type: string
+ examples:
+ Object reference:
+ value: "ensight.objs.core"
+ Object ID:
+ value: 1234
+ - name: attributeid
+ in: path
+ description: The native python binding for a command language command.
+ required: true
+ schema:
+ type: string
+ example: "VISIBLE"
+ requestBody:
+ content:
+ application/json:
+ schema:
+ type: object
+ responses:
+ '200':
+ description: Successful operation
+ '400':
+ description: Python error encountered during execution
+ '401':
+ description: Invalid sessionid
+
+ /ensight/v1/{sessionid}/ensobjs/setattrs:
+ put:
+ summary: Multi-ENSOBJ setattrs
+ description: Set a collection of attributes on a collection of ENSOBJ objects.
+ operationId: setobjsAttr
+ tags:
+ - Object API
+ parameters:
+ - name: sessionid
+ in: path
+ description: The PyEnSight session secret key.
+ required: true
+ schema:
+ type: string
+ example: "ab32cefa055411eeb8a654435d7ef902"
+ - name: suppress_errors
+ in: query
+ description: If this is true and there is an error during the set operation, do not
+ return an error/stop processing the entire list.
+ required: false
+ schema:
+ type: boolean
+ requestBody:
+ description: The object contains a list of the objects to have the set operation
+ applied to and a value object keyed by the attribute names to set.
+ content:
+ application/json:
+ schema:
+ type: object
+ properties:
+ objects:
+ type: array
+ items:
+ oneOf:
+ - type: string
+ - type: integer
+ value:
+ type: object
+ example:
+ {
+ "objects": ["ensight.objs.core", 1234],
+ "values": {
+ "VISIBLE": true,
+ "DESCRIPTION": "example"
+ }
+ }
+ responses:
+ '200':
+ description: Successful operation
+ '400':
+ description: Python error encountered during execution
+ '401':
+ description: Invalid sessionid
+
+ /ensight/v1/{sessionid}/ensobjs/getattrs:
+ put:
+ summary: Multi-ENSOBJ getattrs
+ description: Get a collection of attributes on a collection of ENSOBJ objects.
+ operationId: getobjsAttr
+ tags:
+ - Object API
+ parameters:
+ - name: sessionid
+ in: path
+ description: The PyEnSight session secret key.
+ required: true
+ schema:
+ type: string
+ example: "ab32cefa055411eeb8a654435d7ef902"
+ - name: returns
+ in: query
+ description: A comma separated list of attribute names to return when an ENSOBJ object is returned. ENSOBJ is the default which returns @ENSOBJ=123@.
+ required: false
+ schema:
+ type: string
+ examples:
+ Object ID:
+ value: "__OBJID__"
+ Description and visibility:
+ value: "VISIBLE,DESCRIPTION"
+ - name: suppress_errors
+ in: query
+ description: If this is true and there is an error during the set operation, do not
+ return an error/stop processing the entire list. Also, no key will appear in
+ the output object for objects which returned an error.
+ required: false
+ schema:
+ type: boolean
+ requestBody:
+ description: A list of object names and ids to query the attributes listed by the returns= query
+ content:
+ application/json:
+ schema:
+ type: array
+ items:
+ oneOf:
+ - type: string
+ - type: integer
+ example:
+ ["ensight.objs.core", 1234]
+ responses:
+ '200':
+ description: Successful operation. The object has keys for each value in the input array
+ and the value will be an array of the output attribute values.
+ content:
+ application/json:
+ schema:
+ type: object
+ example:
+ {
+ "ensight.objs.core": [false, "global object"],
+ "1234": [true, "wheels"]
+ }
+ '400':
+ description: Python error encountered during execution
+ '401':
+ description: Invalid sessionid
+
+ /ensight/v1/{sessionid}/ensobjs/{objectid}/methods/{method}:
+ put:
+ summary: Run ENSOBJ method
+ description: Execute an ENSOBJ method and return the result.
+ operationId: objMethod
+ tags:
+ - Object API
+ parameters:
+ - name: sessionid
+ in: path
+ description: The PyEnSight session secret key.
+ required: true
+ schema:
+ type: string
+ example: "ab32cefa055411eeb8a654435d7ef902"
+ - name: objectid
+ in: path
+ description: The name of an EnSight object or an object id.
+ required: true
+ schema:
+ type: string
+ examples:
+ Object reference:
+ value: "ensight.objs.core"
+ Object ID:
+ value: 1234
+ - name: method
+ in: path
+ description: The name of a method on the selected object.
+ required: true
+ schema:
+ type: string
+ example: "createvariable"
+ - name: returns
+ in: query
+ description: A comma separated list of attribute names to return when an ENSOBJ object is returned. ENSOBJ is the default which returns @ENSOBJ=123@.
+ required: false
+ schema:
+ type: string
+ examples:
+ Object ID:
+ value: "__OBJID__"
+ Description and visibility:
+ value: "VISIBLE,DESCRIPTION"
+ requestBody:
+ description: The positional and keyword parameters for the Python call.
+ An input string starting with '@ENSOBJ=v@' will be converted into
+ ensight.objs.wrap_id(v).
+ In the example,
+ foo("param1", 1.0, num_samples=4, source=ensight.objs.wrap_id(120))
+ content:
+ application/json:
+ schema:
+ type: object
+ properties:
+ args:
+ type: array
+ items:
+ type: object
+ kwargs:
+ type: object
+ example:
+ {
+ "args": ["param1", 1.0],
+ "kwargs": {
+ "num_samples": 4,
+ "source": "@ENSOBJ=120@"
+ }
+ }
+ responses:
+ '200':
+ description: Successful operation
+ '400':
+ description: Python error encountered during execution
+ '401':
+ description: Invalid sessionid
diff --git a/doc/source/rest_api/rest_api.rst b/doc/source/rest_api/rest_api.rst
index ce78cd3a35d..decb6b0599f 100644
--- a/doc/source/rest_api/rest_api.rst
+++ b/doc/source/rest_api/rest_api.rst
@@ -1,149 +1,149 @@
-.. _rest_api:
-
-
-********
-REST API
-********
-
-An EnSight session that is started using PyEnSight may enable the direct REST API.
-The REST API, which allows JavaScript code to directly access the EnSight Python
-APIs, is only available in EnSight 2024 R1 and later. To enable the REST API,
-you set the ``enable_rest_api`` keyword to ``True`` for the Launcher subclass ctor.
-
-.. note::
-
- The information here is for informational purposes only. The REST API has
- been defined, but it is not currently enabled in EnSight. It is scheduled
- for release in EnSight 2024 R1.
-
-
-REST API enablement via PyEnSight
----------------------------------
-
-You can start the REST API service via the PyEnSight local launcher::
-
- >>> from ansys.pyensight.core import LocalLauncher
- >>> s = LocalLauncher(enable_rest_api=True).start()
- >>> s.load_data(f"{s.cei_home}/ensight{s.cei_suffix}/data/cube/cube.case")
- >>> uri_base = f"http://{s.hostname}:{s.html_port}/ensight/v1/{s.secret_key}"
-
-
-The base URI looks something like this, but the port and GUID can vary:
-``http://127.0.0.1:36474/ensight/v1/b7c04700-0a27-11ee-be68-381428170733``.
-
-
-Basic REST API
---------------
-
-You can use the string from the previous example to execute REST calls via
-Python ``requests``::
-
- >>> import requests
- >>> requests.put(uri_base+"/eval", json="ensight.objs.core.PARTS").json()
- ['@ENSOBJ=1022@']
- >>> requests.put(uri_base+"/eval", json="ensight.objs.core.PARTS", params=dict(returns="DESCRIPTION,VISIBLE")).json()
- [['Computational mesh', True]]
-
-
-The REST calls use the REST API to run the ``ensight.objs.core.PARTS`` command and output
-something like ``['@ENSOBJ=1022@']``, a reference to object 1022. What the query
-option returns is then used to return the ``DESCRIPTION`` and ``VISIBLE`` attributes. In this
-case, the output for the second ``PUT`` is ``[['Computational mesh', True]]``.
-
-.. note::
-
- Examples here leverage Python requests to execute REST calls, but tools like
- cURL and Swagger can also be leveraged. The intended use of the REST API is via
- JavaScript to use ``fetch()`` from within a web page, making it possible to control and interact
- with a PyEnSight-launched EnSight instance directly from the browser. Moreover, both
- PyEnSight and REST calls can be used to talk to the same EnSight session, making it
- possible to communicate between browser JavaScript and PyEnSight Python scripts using
- the EnSight instance as a common communication hub.
-
-
-Remote Python functions
------------------------
-
-Continuing the example, the REST API can be used to define a Python function in the
-remote EnSight session. First define the function::
-
- >>> foo_src = "def foo(n:int = 1):\n return list(numpy.random.rand(n))\n"
- >>> requests.put(uri_base+"/def_func/myapp/foo", json=foo_src, params=dict(imports="numpy"))
-
-
-
-The preceding code uses the provided function source code to define a function named ``foo``
-in the ``myapp`` namespace. The function being defined should use keywords only, no
-positional arguments.
-
-.. note::
- If the namespace does not exist, it is created.
-
-The function also makes use of the ``numpy`` module. A function must either import
-the module inside of the function or include the names of the modules in the ``imports``
-query options as a comma-separated list of module names. Because numpy arrays do not
-directly support serialization to JSON, a list is used for the returned value.
-
-Once the function has been defined, it can be called like this::
-
- >>> requests.put(uri_base+"/call_func/myapp/foo", json=dict(n=3)).json()
- [0.2024879142048186, 0.7627361155568255, 0.6102904199228575]
-
-
-The returned JSON is a list of three random floating point numbers.
-
-
-Direct commands
----------------
-
-The native API can be called directly using the REST API::
-
- >>> requests.put(uri_base+"/cmd/ensight.view_transf.rotate", json=[5.2,10.4,0]).json()
- 0
-
-
-The EnSight view rotates accordingly. The object API can also be called directly.
-You can get or set object attributes in various forms on single objects or lists of objects::
-
- >>> requests.get(uri_base+"/ensobjs/ensight.objs.core/PARTS").json()
- ['@ENSOBJ=1022@']
- >>> requests.get(uri_base+"/ensobjs/ensight.objs.core/PARTS", params=dict(returns="VISIBLE,__OBJID__")).json()
- [[True, 1022]]
- >>> requests.put(uri_base+"/ensobjs/1022/VISIBLE", json=False)
-
- >>> requests.put(uri_base+"/ensobjs/setattrs", json=dict(objects=["1022"], values=dict(VISIBLE=False)))
-
- >>> requests.put(uri_base+"/ensobjs/getattrs", json=[1022], params=dict(returns="DESCRIPTION,VISIBLE")).json()
- {'1022': ['Computational mesh', False]}
- >>> requests.put(uri_base+"/eval", json="ensight.objs.core").json()
- '@ENSOBJ=220@'
- >>> requests.put(uri_base+"/eval", json="ensight.objs.core", params=dict(returns="__OBJID__")).json()
- 220
-
-
-You can specify objects by name (``ensight.objs.core``) or by number (``220``) and return
-any attributes of the objects in a single call, reducing the number of REST calls needed
-for complex operations.
-
-
-Shared token security
----------------------
-
-The REST API leverages shared secrets to control access to the EnSight instance. Every
-PyEnSight-launched instance has a shared secret token that must be provided in all REST calls.
-This shared secret token can be accessed using the PyEnSight :attr:`secret_key`
-attribute. All REST APIs expect that the token be specified in one of two ways:
-
-- The token can be passed as part of the URL path in this form:
- ``{LOCATION}/ensight/v1/{TOKEN}/{OPERATION}``.
-- The token can be passed in an ``Authorization: Bearer TOKEN`` header. When you use
- this approach, you can pass any value in the URL path.
-
-If you supply tokens using both methods, the token in the header is used.
-
-REST API reference
-------------------
-
-.. openapi:: ensight_rest_v1.yaml
- :examples:
-
+.. _rest_api:
+
+
+********
+REST API
+********
+
+An EnSight session that is started using PyEnSight may enable the direct REST API.
+The REST API, which allows JavaScript code to directly access the EnSight Python
+APIs, is only available in EnSight 2024 R1 and later. To enable the REST API,
+you set the ``enable_rest_api`` keyword to ``True`` for the Launcher subclass ctor.
+
+.. note::
+
+ The information here is for informational purposes only. The REST API has
+ been defined, but it is not currently enabled in EnSight. It is scheduled
+ for release in EnSight 2024 R1.
+
+
+REST API enablement via PyEnSight
+---------------------------------
+
+You can start the REST API service via the PyEnSight local launcher::
+
+ >>> from ansys.pyensight.core import LocalLauncher
+ >>> s = LocalLauncher(enable_rest_api=True).start()
+ >>> s.load_data(f"{s.cei_home}/ensight{s.cei_suffix}/data/cube/cube.case")
+ >>> uri_base = f"http://{s.hostname}:{s.html_port}/ensight/v1/{s.secret_key}"
+
+
+The base URI looks something like this, but the port and GUID can vary:
+``http://127.0.0.1:36474/ensight/v1/b7c04700-0a27-11ee-be68-381428170733``.
+
+
+Basic REST API
+--------------
+
+You can use the string from the previous example to execute REST calls via
+Python ``requests``::
+
+ >>> import requests
+ >>> requests.put(uri_base+"/eval", json="ensight.objs.core.PARTS").json()
+ ['@ENSOBJ=1022@']
+ >>> requests.put(uri_base+"/eval", json="ensight.objs.core.PARTS", params=dict(returns="DESCRIPTION,VISIBLE")).json()
+ [['Computational mesh', True]]
+
+
+The REST calls use the REST API to run the ``ensight.objs.core.PARTS`` command and output
+something like ``['@ENSOBJ=1022@']``, a reference to object 1022. What the query
+option returns is then used to return the ``DESCRIPTION`` and ``VISIBLE`` attributes. In this
+case, the output for the second ``PUT`` is ``[['Computational mesh', True]]``.
+
+.. note::
+
+ Examples here leverage Python requests to execute REST calls, but tools like
+ cURL and Swagger can also be leveraged. The intended use of the REST API is via
+ JavaScript to use ``fetch()`` from within a web page, making it possible to control and interact
+ with a PyEnSight-launched EnSight instance directly from the browser. Moreover, both
+ PyEnSight and REST calls can be used to talk to the same EnSight session, making it
+ possible to communicate between browser JavaScript and PyEnSight Python scripts using
+ the EnSight instance as a common communication hub.
+
+
+Remote Python functions
+-----------------------
+
+Continuing the example, the REST API can be used to define a Python function in the
+remote EnSight session. First define the function::
+
+ >>> foo_src = "def foo(n:int = 1):\n return list(numpy.random.rand(n))\n"
+ >>> requests.put(uri_base+"/def_func/myapp/foo", json=foo_src, params=dict(imports="numpy"))
+
+
+
+The preceding code uses the provided function source code to define a function named ``foo``
+in the ``myapp`` namespace. The function being defined should use keywords only, no
+positional arguments.
+
+.. note::
+ If the namespace does not exist, it is created.
+
+The function also makes use of the ``numpy`` module. A function must either import
+the module inside of the function or include the names of the modules in the ``imports``
+query options as a comma-separated list of module names. Because numpy arrays do not
+directly support serialization to JSON, a list is used for the returned value.
+
+Once the function has been defined, it can be called like this::
+
+ >>> requests.put(uri_base+"/call_func/myapp/foo", json=dict(n=3)).json()
+ [0.2024879142048186, 0.7627361155568255, 0.6102904199228575]
+
+
+The returned JSON is a list of three random floating point numbers.
+
+
+Direct commands
+---------------
+
+The native API can be called directly using the REST API::
+
+ >>> requests.put(uri_base+"/cmd/ensight.view_transf.rotate", json=[5.2,10.4,0]).json()
+ 0
+
+
+The EnSight view rotates accordingly. The object API can also be called directly.
+You can get or set object attributes in various forms on single objects or lists of objects::
+
+ >>> requests.get(uri_base+"/ensobjs/ensight.objs.core/PARTS").json()
+ ['@ENSOBJ=1022@']
+ >>> requests.get(uri_base+"/ensobjs/ensight.objs.core/PARTS", params=dict(returns="VISIBLE,__OBJID__")).json()
+ [[True, 1022]]
+ >>> requests.put(uri_base+"/ensobjs/1022/VISIBLE", json=False)
+
+ >>> requests.put(uri_base+"/ensobjs/setattrs", json=dict(objects=["1022"], values=dict(VISIBLE=False)))
+
+ >>> requests.put(uri_base+"/ensobjs/getattrs", json=[1022], params=dict(returns="DESCRIPTION,VISIBLE")).json()
+ {'1022': ['Computational mesh', False]}
+ >>> requests.put(uri_base+"/eval", json="ensight.objs.core").json()
+ '@ENSOBJ=220@'
+ >>> requests.put(uri_base+"/eval", json="ensight.objs.core", params=dict(returns="__OBJID__")).json()
+ 220
+
+
+You can specify objects by name (``ensight.objs.core``) or by number (``220``) and return
+any attributes of the objects in a single call, reducing the number of REST calls needed
+for complex operations.
+
+
+Shared token security
+---------------------
+
+The REST API leverages shared secrets to control access to the EnSight instance. Every
+PyEnSight-launched instance has a shared secret token that must be provided in all REST calls.
+This shared secret token can be accessed using the PyEnSight :attr:`secret_key`
+attribute. All REST APIs expect that the token be specified in one of two ways:
+
+- The token can be passed as part of the URL path in this form:
+ ``{LOCATION}/ensight/v1/{TOKEN}/{OPERATION}``.
+- The token can be passed in an ``Authorization: Bearer TOKEN`` header. When you use
+ this approach, you can pass any value in the URL path.
+
+If you supply tokens using both methods, the token in the header is used.
+
+REST API reference
+------------------
+
+.. openapi:: ensight_rest_v1.yaml
+ :examples:
+
diff --git a/doc/source/user_guide/api_differences.rst b/doc/source/user_guide/api_differences.rst
index 095ed146115..c1db5817018 100644
--- a/doc/source/user_guide/api_differences.rst
+++ b/doc/source/user_guide/api_differences.rst
@@ -1,31 +1,31 @@
-.. _api_differences:
-
-Python API differences in EnSight versus PyEnSight
-==================================================
-
-There are a few differences between the EnSight Python API and the
-PyEnSight API. Generally, the ``ensight`` module in EnSight and the
-``Session.ensight`` class instance have the same interface. Most
-source code written against this API runs in both environment.
-
-Free ``ENS_GROUP`` objects
-^^^^^^^^^^^^^^^^^^^^^^^^^^
-
-In EnSight, the following code is legal::
-
- group = ensight.objs.core.create_group()
-
-
-This code is not legal in PyEnSight because the target object (*group*) does not exist
-in the EnSight session. In general, methods that create free ``ENS_GROUP``
-objects have been removed from the PyEnSight API.
-
-Object class specialization
-^^^^^^^^^^^^^^^^^^^^^^^^^^^
-
-In EnSight, ``ENS_PART`` proxy objects are used for all part types. Model,
-Clip, and Iso-contours are all of the ``ENS_PART`` class. In PyEnSight,
-model parts are of the ``ENS_PART_MODEL`` class, and clips are of the
-``ENS_PART_CLIP`` class. These are both subclasses of PyEnSight's ``ENS_PART``
-class. This mechanism applies to ``ENS_PART``, ``ENS_ANNOT``, and ``ENS_TOOL``
-classes.
+.. _api_differences:
+
+Python API differences in EnSight versus PyEnSight
+==================================================
+
+There are a few differences between the EnSight Python API and the
+PyEnSight API. Generally, the ``ensight`` module in EnSight and the
+``Session.ensight`` class instance have the same interface. Most
+source code written against this API runs in both environment.
+
+Free ``ENS_GROUP`` objects
+^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+In EnSight, the following code is legal::
+
+ group = ensight.objs.core.create_group()
+
+
+This code is not legal in PyEnSight because the target object (*group*) does not exist
+in the EnSight session. In general, methods that create free ``ENS_GROUP``
+objects have been removed from the PyEnSight API.
+
+Object class specialization
+^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+In EnSight, ``ENS_PART`` proxy objects are used for all part types. Model,
+Clip, and Iso-contours are all of the ``ENS_PART`` class. In PyEnSight,
+model parts are of the ``ENS_PART_MODEL`` class, and clips are of the
+``ENS_PART_CLIP`` class. These are both subclasses of PyEnSight's ``ENS_PART``
+class. This mechanism applies to ``ENS_PART``, ``ENS_ANNOT``, and ``ENS_TOOL``
+classes.
diff --git a/doc/source/user_guide/cmdlang_native.rst b/doc/source/user_guide/cmdlang_native.rst
index fec32d08b24..3f63a05aa8d 100644
--- a/doc/source/user_guide/cmdlang_native.rst
+++ b/doc/source/user_guide/cmdlang_native.rst
@@ -1,264 +1,264 @@
-.. _ref_cmdlang_native:
-
-EnSight command language native Python API
-==========================================
-
-Since its inception, EnSight has had a journaling language, commonly referred to as
-the *command language*. Every operation in EnSight can be captured in command language.
-Scripts can be generated using this language and played back in the GUI or in batch mode.
-Furthermore, features like context files (that capture a session state) do so using
-a command language variant. Command language is not documented, but it roughly follows
-the :ref:`EnSight architecture `. The :ref: `native Python API `
-is a binding to this command language interface.
-
-Command language overview
--------------------------
-
-A command language call has this general form: :samp:`{class}: {command} [values...]`.
-For example, :samp:`view_transf: rotate -4. 24. 0.` rotates the current view, and
-:samp:`data: replace data.case` loads a dataset into the current case.
-
-Commands may also follow a *begin/end* or *modify begin/end* structure, where commands
-of the same class are used between the begin and end commands. This form is used to *batch*
-a collection of changes into a single update operation. Finally, there is a
-*select begin/end* form for expanding values onto multiple lines.
-
-The language separates the specification of target objects and attribute changes into
-separate commands. Thus, there is always a *current selection* of all objects of
-a specific type, and many commands operate on the current selection. For example,
-this code sets the current part selection to part number 1 and then modifies the
-line width and part color::
-
- part: select_begin
- 1
- part: select_end
- part: modify_begin
- part: line_width 2
- part: colorby_rgb 1.0 0.0 0.0
- part: modify_end
-
-
-.. note::
- While most of the command language follows these two command forms, there are
- some commands that do not follow this form.
-
-As noted previously, EnSight maintains a *default* object of every type. You can
-use commands to modify most attributes on the default object. For example, you can
-use the ``create`` command to create a new object instance. The following code creates
-a clip by selecting the default clip object and then setting up the type and clip position
-value on the default clip. It then changes the current part selection to what parts should
-become the parent of the clip and calls the :samp:`clip: create` command to create the clip::
-
- clip: select_default
- clip: begin
- clip: domain intersect
- clip: tool xyz
- clip: value 0.5
- clip: end
- part: select_begin
- 1
- 2
- part: select_end
- clip: create
-
-
-PyEnSight command language binding
-----------------------------------
-
-The native Python API binding is a simple syntax conversion from command language
-into Python syntax. The ``command`` class is a module in Python under the ``session.ensight``
-module. All values are passed as native Python parameters. For the *select/begin/end*
-command form, only the ``begin`` command is used. All of the value lines can be specified
-as Python parameters or a list. The previous example becomes this Python script:
-
-.. code-block:: python
-
- session.ensight.clip.select_default()
- session.ensight.clip.begin()
- session.ensight.clip.domain("intersect")
- session.ensight.clip.tool("xyz")
- session.ensight.clip.value(0.5)
- session.ensight.clip.end()
- session.ensight.part.select_begin(1, 2)
- session.ensight.clip.create()
-
-
-Lists of objects can be used where multiple values are specified
-as parameters, which means that this syntax is also valid:
-
-.. code-block:: python
-
- session.ensight.part.select_begin([1, 2])
- session.ensight.part.modify_begin()
- session.ensight.part.colorby_rgb([0.0, 0.0, 1.0])
- session.ensight.part.modify_end()
-
-
-Native API debugging
-^^^^^^^^^^^^^^^^^^^^
-
-Every command also returns an error code, which is ``0`` on success. For example,
-:samp:`err = session.ensight.part.colorby_rgb([0.0,0.0,"sad"])` sets ``err`` to ``-1``.
-You can use the :func:`attrtree`
-method to enable Python exception handling instead of returning an error code. You should
-use the :func:`attrtree`
-method when debugging native API scripts.
-
-.. note::
- Because the exception setting is global, care should be taken to reset the error
- handling status on an error to ensure proper EnSight operation.
-
-
-This example shows how you can convert error return values into exceptions:
-
-.. code-block:: python
-
- try:
- session.ensight.sendmesgoptions(exception=True)
- session.ensight.part.select_begin([1, 2])
- session.ensight.part.colorby_rgb([0.0,0.0,"sad"])
- except RuntimeError as e:
- print("Error", e)
- finally:
- session.ensight.sendmesgoptions(exception=False)
-
-
-The code prints this error:
-
-:samp:`RuntimeError: Command: (part: colorby_rgb 0.0 0.0 sad ) returned: RGB color: bad parameter`
-
-
-GUI conversion
---------------
-There is a built-in mechanism to convert code in command language into Python. To do this,
-you first paste the command language into the Python editor. On EnSight's **Execution** tab,
-you can use the right-mouse button menu to select and copy lines of command language.
-
-Next, select the text in the editor and use the **Edit** menu to select either
-the **Convert selection to sendmesg()** or **Convert selection to native Python**
-option. In general, the native Python conversion results in much more readable Python code
-that is far easier to edit than the **Convert selection to sendmesg()** option. You should
-use the **Convert selection to native Python** option for all but legacy development.
-
-The **File** menu provides two items to execute the current file text in the EnSight Python
-interpreter. The **Run script** option causes the file contents to be executed in the global
-namespace (for example, like the ``execfile()`` function). The **Import script as module**
-option first saves the current file to disk and then executes a Python import operation on the
-file, which executes in a private namespace. Both options verify the syntax of the current
-file and allow for rapid prototyping.
-
-Special cases
--------------
-
-There are a number of commands in the EnSight command language that are not valid
-Python names. Here are a few examples::
-
- function: #_of_levels 5
- annotation: 3d_label_size 10.0
- command: print "hello"
- viewport: raise
-
-Here are some reasons that a name might be invalid:
-
-* Name contains an invalid character (such as ``#``).
-
-* Name begins with a digit (such as ``1``).
-
-* Name is a Python-reserved word (such as ``raise``).
-
-Invalid names are transformed using these rules:
-
-* ``#`` characters are replaced with the text *number*.
-
-* Names that start with a digit are prefixed with an underscore (_).
-
-* Names that are the same as a Python-reserved word are prefixed with an underscore (_).
-
-The previous examples are transformed as follows:
-
-.. code-block:: python
-
- session.ensight.function.number_of_levels(5)
- session.ensight.annotation._3d_label_size(10.0)
- session.ensight.command.print("hello")
- session.ensight.viewport._raise()
-
-
-.. _selection_transfer:
-
-Selection and the object API
-----------------------------
-
-The native API maintains a notion of a *current selection* with a collection
-of commands to manipulate it, such as :func: `select_begin()`.
-The object API reflects the EnSight GUI via SELECTED attributes and selection ``ENS_GROUP`` objects.
-Due to the implicit nature of the native API, until it is used, the native selection
-is not reflected in EnSight objects. When using both APIs in a single script, it can
-become necessary to synchronize the two notions of selection. This is done with the
-the :func:`get_mainpartlist_select()`
-command. This command sets the native selection to match the object selection. For example, this
-code allows the object selection mechanisms to be used to set up the part selection for
-subsequent native commands:
-
-.. code-block:: python
-
- p = session.ensight.objs.core.PARTS["rear body"][0]
- session.ensight.objs.core.selection().addchild(p, replace=1)
- session.ensight.part.get_mainpartlist_select()
- session.ensight.part.modify_begin()
- session.ensight.part.colorby_rgb(0.0,1.0,0.0)
- session.ensight.part.modify_end()
-
-
-.. _ensight_to_pyensight:
-
-Convert existing EnSight scripts to PyEnSight
----------------------------------------------
-
-PyEnSight has been designed to be fully compatible with the existing EnSight Python language,
-supporting both the *native* Python API and the *object* API. Indeed, the ``ensight`` attribute
-of a PyEnSight ``session`` object is a *clone* of the ``ensight`` module generated via
-introspection. This means that any attribute, object, instance, variable, and more
-available in the ``ensight`` module is also available in PyEnSight, which manages the
-communication with EnSight and the conversion of a command to its corresponding EnSight counterpart.
-However, you must make a few adjustments to port an existing EnSight script into
-PyEnSight. Here is a list of operations to perform to make the conversion:
-
-* All the calls to the ``ensight`` module and its attributes must be pre-fixed
- with the current session instance. For example:
-
-.. code-block:: python
-
- # Old syntax
- # ensight.objs.core.PARTS
- # New syntax
- session.ensight.objs.core.PARTS
-
-* The ``ensight`` module cannot be imported anymore because it is an attribute of the PyEnSight ``session`` object.
- However, you can use the ``scoped_name`` utility to mimic the syntax that you would obtain importing a module
- or a submodule:
-
-.. code-block:: python
-
- # Remove previous imports
- # import ensight
- # from ensight.objs import *
- # from ensight.objs import core
-
- # Create a ``scoped_name`` instance
- sn = session.ensight.utils.support.scoped_name
-
- # Create a context manager with the ``scoped_name`` instance, where you can
- # use the old syntax
-
- with sn(session.ensight) as ensight, sn(session.ensight.objs.core) as core:
- core.PARTS[0].DESCRIPTION
- ensight.view.bounds("ON")
-
-* The main advantage of using the ``scoped_Name`` instance is that the new syntax is also
- supported directly in EnSight. This greatly simplifies the porting of a PyEnSight script
- into EnSight.
-
-
-
-
+.. _ref_cmdlang_native:
+
+EnSight command language native Python API
+==========================================
+
+Since its inception, EnSight has had a journaling language, commonly referred to as
+the *command language*. Every operation in EnSight can be captured in command language.
+Scripts can be generated using this language and played back in the GUI or in batch mode.
+Furthermore, features like context files (that capture a session state) do so using
+a command language variant. Command language is not documented, but it roughly follows
+the :ref:`EnSight architecture `. The :ref: `native Python API `
+is a binding to this command language interface.
+
+Command language overview
+-------------------------
+
+A command language call has this general form: :samp:`{class}: {command} [values...]`.
+For example, :samp:`view_transf: rotate -4. 24. 0.` rotates the current view, and
+:samp:`data: replace data.case` loads a dataset into the current case.
+
+Commands may also follow a *begin/end* or *modify begin/end* structure, where commands
+of the same class are used between the begin and end commands. This form is used to *batch*
+a collection of changes into a single update operation. Finally, there is a
+*select begin/end* form for expanding values onto multiple lines.
+
+The language separates the specification of target objects and attribute changes into
+separate commands. Thus, there is always a *current selection* of all objects of
+a specific type, and many commands operate on the current selection. For example,
+this code sets the current part selection to part number 1 and then modifies the
+line width and part color::
+
+ part: select_begin
+ 1
+ part: select_end
+ part: modify_begin
+ part: line_width 2
+ part: colorby_rgb 1.0 0.0 0.0
+ part: modify_end
+
+
+.. note::
+ While most of the command language follows these two command forms, there are
+ some commands that do not follow this form.
+
+As noted previously, EnSight maintains a *default* object of every type. You can
+use commands to modify most attributes on the default object. For example, you can
+use the ``create`` command to create a new object instance. The following code creates
+a clip by selecting the default clip object and then setting up the type and clip position
+value on the default clip. It then changes the current part selection to what parts should
+become the parent of the clip and calls the :samp:`clip: create` command to create the clip::
+
+ clip: select_default
+ clip: begin
+ clip: domain intersect
+ clip: tool xyz
+ clip: value 0.5
+ clip: end
+ part: select_begin
+ 1
+ 2
+ part: select_end
+ clip: create
+
+
+PyEnSight command language binding
+----------------------------------
+
+The native Python API binding is a simple syntax conversion from command language
+into Python syntax. The ``command`` class is a module in Python under the ``session.ensight``
+module. All values are passed as native Python parameters. For the *select/begin/end*
+command form, only the ``begin`` command is used. All of the value lines can be specified
+as Python parameters or a list. The previous example becomes this Python script:
+
+.. code-block:: python
+
+ session.ensight.clip.select_default()
+ session.ensight.clip.begin()
+ session.ensight.clip.domain("intersect")
+ session.ensight.clip.tool("xyz")
+ session.ensight.clip.value(0.5)
+ session.ensight.clip.end()
+ session.ensight.part.select_begin(1, 2)
+ session.ensight.clip.create()
+
+
+Lists of objects can be used where multiple values are specified
+as parameters, which means that this syntax is also valid:
+
+.. code-block:: python
+
+ session.ensight.part.select_begin([1, 2])
+ session.ensight.part.modify_begin()
+ session.ensight.part.colorby_rgb([0.0, 0.0, 1.0])
+ session.ensight.part.modify_end()
+
+
+Native API debugging
+^^^^^^^^^^^^^^^^^^^^
+
+Every command also returns an error code, which is ``0`` on success. For example,
+:samp:`err = session.ensight.part.colorby_rgb([0.0,0.0,"sad"])` sets ``err`` to ``-1``.
+You can use the :func:`attrtree`
+method to enable Python exception handling instead of returning an error code. You should
+use the :func:`attrtree`
+method when debugging native API scripts.
+
+.. note::
+ Because the exception setting is global, care should be taken to reset the error
+ handling status on an error to ensure proper EnSight operation.
+
+
+This example shows how you can convert error return values into exceptions:
+
+.. code-block:: python
+
+ try:
+ session.ensight.sendmesgoptions(exception=True)
+ session.ensight.part.select_begin([1, 2])
+ session.ensight.part.colorby_rgb([0.0,0.0,"sad"])
+ except RuntimeError as e:
+ print("Error", e)
+ finally:
+ session.ensight.sendmesgoptions(exception=False)
+
+
+The code prints this error:
+
+:samp:`RuntimeError: Command: (part: colorby_rgb 0.0 0.0 sad ) returned: RGB color: bad parameter`
+
+
+GUI conversion
+--------------
+There is a built-in mechanism to convert code in command language into Python. To do this,
+you first paste the command language into the Python editor. On EnSight's **Execution** tab,
+you can use the right-mouse button menu to select and copy lines of command language.
+
+Next, select the text in the editor and use the **Edit** menu to select either
+the **Convert selection to sendmesg()** or **Convert selection to native Python**
+option. In general, the native Python conversion results in much more readable Python code
+that is far easier to edit than the **Convert selection to sendmesg()** option. You should
+use the **Convert selection to native Python** option for all but legacy development.
+
+The **File** menu provides two items to execute the current file text in the EnSight Python
+interpreter. The **Run script** option causes the file contents to be executed in the global
+namespace (for example, like the ``execfile()`` function). The **Import script as module**
+option first saves the current file to disk and then executes a Python import operation on the
+file, which executes in a private namespace. Both options verify the syntax of the current
+file and allow for rapid prototyping.
+
+Special cases
+-------------
+
+There are a number of commands in the EnSight command language that are not valid
+Python names. Here are a few examples::
+
+ function: #_of_levels 5
+ annotation: 3d_label_size 10.0
+ command: print "hello"
+ viewport: raise
+
+Here are some reasons that a name might be invalid:
+
+* Name contains an invalid character (such as ``#``).
+
+* Name begins with a digit (such as ``1``).
+
+* Name is a Python-reserved word (such as ``raise``).
+
+Invalid names are transformed using these rules:
+
+* ``#`` characters are replaced with the text *number*.
+
+* Names that start with a digit are prefixed with an underscore (_).
+
+* Names that are the same as a Python-reserved word are prefixed with an underscore (_).
+
+The previous examples are transformed as follows:
+
+.. code-block:: python
+
+ session.ensight.function.number_of_levels(5)
+ session.ensight.annotation._3d_label_size(10.0)
+ session.ensight.command.print("hello")
+ session.ensight.viewport._raise()
+
+
+.. _selection_transfer:
+
+Selection and the object API
+----------------------------
+
+The native API maintains a notion of a *current selection* with a collection
+of commands to manipulate it, such as :func: `select_begin()`.
+The object API reflects the EnSight GUI via SELECTED attributes and selection ``ENS_GROUP`` objects.
+Due to the implicit nature of the native API, until it is used, the native selection
+is not reflected in EnSight objects. When using both APIs in a single script, it can
+become necessary to synchronize the two notions of selection. This is done with the
+the :func:`get_mainpartlist_select()`
+command. This command sets the native selection to match the object selection. For example, this
+code allows the object selection mechanisms to be used to set up the part selection for
+subsequent native commands:
+
+.. code-block:: python
+
+ p = session.ensight.objs.core.PARTS["rear body"][0]
+ session.ensight.objs.core.selection().addchild(p, replace=1)
+ session.ensight.part.get_mainpartlist_select()
+ session.ensight.part.modify_begin()
+ session.ensight.part.colorby_rgb(0.0,1.0,0.0)
+ session.ensight.part.modify_end()
+
+
+.. _ensight_to_pyensight:
+
+Convert existing EnSight scripts to PyEnSight
+---------------------------------------------
+
+PyEnSight has been designed to be fully compatible with the existing EnSight Python language,
+supporting both the *native* Python API and the *object* API. Indeed, the ``ensight`` attribute
+of a PyEnSight ``session`` object is a *clone* of the ``ensight`` module generated via
+introspection. This means that any attribute, object, instance, variable, and more
+available in the ``ensight`` module is also available in PyEnSight, which manages the
+communication with EnSight and the conversion of a command to its corresponding EnSight counterpart.
+However, you must make a few adjustments to port an existing EnSight script into
+PyEnSight. Here is a list of operations to perform to make the conversion:
+
+* All the calls to the ``ensight`` module and its attributes must be pre-fixed
+ with the current session instance. For example:
+
+.. code-block:: python
+
+ # Old syntax
+ # ensight.objs.core.PARTS
+ # New syntax
+ session.ensight.objs.core.PARTS
+
+* The ``ensight`` module cannot be imported anymore because it is an attribute of the PyEnSight ``session`` object.
+ However, you can use the ``scoped_name`` utility to mimic the syntax that you would obtain importing a module
+ or a submodule:
+
+.. code-block:: python
+
+ # Remove previous imports
+ # import ensight
+ # from ensight.objs import *
+ # from ensight.objs import core
+
+ # Create a ``scoped_name`` instance
+ sn = session.ensight.utils.support.scoped_name
+
+ # Create a context manager with the ``scoped_name`` instance, where you can
+ # use the old syntax
+
+ with sn(session.ensight) as ensight, sn(session.ensight.objs.core) as core:
+ core.PARTS[0].DESCRIPTION
+ ensight.view.bounds("ON")
+
+* The main advantage of using the ``scoped_Name`` instance is that the new syntax is also
+ supported directly in EnSight. This greatly simplifies the porting of a PyEnSight script
+ into EnSight.
+
+
+
+
diff --git a/doc/source/user_guide/ensight_scripts.rst b/doc/source/user_guide/ensight_scripts.rst
index 007d43fc20c..7018e68420e 100644
--- a/doc/source/user_guide/ensight_scripts.rst
+++ b/doc/source/user_guide/ensight_scripts.rst
@@ -1,116 +1,116 @@
-.. _ref_ensight_scripts:
-
-EnSight Python scripts: Running and debugging
-=============================================
-
-EnSight supports the notion of a Python script, a parallel construct to the command language
-journaling script (``.enc`` file). This script is a file of Python commands that can be directly
-run from the EnSight command line or via the Python script editor built into EnSight.
-Many EnSight Python scripts are written using the **Python** tab script editor in the command
-dialog, often by translating EnSight command language into Python via the built-in tools.
-
-Here is an example of such a script::
-
- ensight.legend.select_palette_begin("Coordinates")
- ensight.legend.visible("ON")
- ensight.part.select_begin(4)
- ensight.variables.activate("Coordinates")
- ensight.part.modify_begin()
- ensight.part.colorby_palette("Coordinates")
- ensight.part.modify_end()
- ensight.legend.select_palette_begin("Coordinates")
- ensight.legend.visible("ON")
-
-
-The preceding script is effectively a line-by-line translation from the EnSight command
-language into the native Python bindings (:ref:`ref_cmdlang_native`). Such scripts can be
-executed by the script editor's **Run script** or **Import script as module** commands.
-These scripts assume that the ``ensight`` module has been imported, making
-it difficult to run them from inside of a PyEnSight session, where the ``ensight`` module is a
-property of the :class:`Session` object instance.
-
-
-Running EnSight Python scripts
-------------------------------
-
-To run scripts like the one in the preceding example in PyEnSight, you use the
-:func:`run_script` method. For example,
-assume that you have an EnSight Python script named ``"/home/ensight/example.py"``.
-You can use this code to run this script via the PyEnSight module::
-
- from ansys.pyensight.core import LocalLauncher
-
- session = LocalLauncher().start()
- _ = session.run_script("/home/ensight/example.py")
-
-
-The preceding code imports the ``example.py`` file into the interpreter. This results in
-an ``example`` module being imported. The imported module has the symbol ``ensight`` set to
-the current session ``ensight`` property and uses that interface to execute the script
-commands remotely.
-
-
-Debugging EnSight Python scripts
---------------------------------
-
-A common request is to be able to write and debug EnSight Python scripts in integrated development
-environments (IDEs) like Visual Studio Code. Assume that you have a file
-named ``example.py``::
-
- for vp in ensight.objs.core.VPORTS:
- print(vp.DESCRIPTION)
-
-
-In the same directory, assume that you have a launching script, ``runme.py``::
-
- from ansys.pyensight.core import LocalLauncher
-
- session = LocalLauncher(batch=False).start()
- _ = session.run_script("./example.py")
-
-
-In Visual Studio Code, you can insert a breakpoint on the ``print()`` line and debug the
-``example.py`` script when the ``runme.py`` script is run in debug mode from Visual Studio Code.
-Note that in this example, ``batch=False`` is specified in the ``LocalLauncher`` constructor.
-This causes the EnSight GUI to display as well, enabling direct interaction with the
-full EnSight app and debugging.
-
-
-Limitations
------------
-
-It is important to note that there are some important differences between an EnSight Python
-script run in EnSight versus in an IDE via the PyEnSight interface.
-
-Using the :func:`run_script` method causes the directory
-containing the EnSight Python script to be added to ``sys.path``, if it is not already added.
-
-
-Speed
-`````
-
-There is a significant difference in the speed with which the code can be executed. This
-is because the ``ensight`` commands are executed remotely and the results are returned. The
-workaround for this is to use the :func:`exec`
-method, but it requires that the code in the Python script be rewritten as a function.
-In debugging situations, this may not be a major issue.
-
-
-``ensight`` module
-```````````````````
-
-Another difference is that the nature of the ``ensight`` object in the script is very different.
-When running in EnSight, it is a true Python module. When running via the
-:func:`run_script` method, the object
-is an instance of the ``ensight_api`` class. In general, these both provide the same API, but
-they are not identical. This approach includes the general API limitations
-described in :ref:`api_differences`.
-
-
-Import versus run
-`````````````````
-
-The :func:`run_script` method always uses
-the module import mechanism to "run" the scripts. EnSight Python scripts that do not
-run in the EnSight script editor using the **Import script as module** menu command
-cannot be used with this system.
+.. _ref_ensight_scripts:
+
+EnSight Python scripts: Running and debugging
+=============================================
+
+EnSight supports the notion of a Python script, a parallel construct to the command language
+journaling script (``.enc`` file). This script is a file of Python commands that can be directly
+run from the EnSight command line or via the Python script editor built into EnSight.
+Many EnSight Python scripts are written using the **Python** tab script editor in the command
+dialog, often by translating EnSight command language into Python via the built-in tools.
+
+Here is an example of such a script::
+
+ ensight.legend.select_palette_begin("Coordinates")
+ ensight.legend.visible("ON")
+ ensight.part.select_begin(4)
+ ensight.variables.activate("Coordinates")
+ ensight.part.modify_begin()
+ ensight.part.colorby_palette("Coordinates")
+ ensight.part.modify_end()
+ ensight.legend.select_palette_begin("Coordinates")
+ ensight.legend.visible("ON")
+
+
+The preceding script is effectively a line-by-line translation from the EnSight command
+language into the native Python bindings (:ref:`ref_cmdlang_native`). Such scripts can be
+executed by the script editor's **Run script** or **Import script as module** commands.
+These scripts assume that the ``ensight`` module has been imported, making
+it difficult to run them from inside of a PyEnSight session, where the ``ensight`` module is a
+property of the :class:`Session` object instance.
+
+
+Running EnSight Python scripts
+------------------------------
+
+To run scripts like the one in the preceding example in PyEnSight, you use the
+:func:`run_script` method. For example,
+assume that you have an EnSight Python script named ``"/home/ensight/example.py"``.
+You can use this code to run this script via the PyEnSight module::
+
+ from ansys.pyensight.core import LocalLauncher
+
+ session = LocalLauncher().start()
+ _ = session.run_script("/home/ensight/example.py")
+
+
+The preceding code imports the ``example.py`` file into the interpreter. This results in
+an ``example`` module being imported. The imported module has the symbol ``ensight`` set to
+the current session ``ensight`` property and uses that interface to execute the script
+commands remotely.
+
+
+Debugging EnSight Python scripts
+--------------------------------
+
+A common request is to be able to write and debug EnSight Python scripts in integrated development
+environments (IDEs) like Visual Studio Code. Assume that you have a file
+named ``example.py``::
+
+ for vp in ensight.objs.core.VPORTS:
+ print(vp.DESCRIPTION)
+
+
+In the same directory, assume that you have a launching script, ``runme.py``::
+
+ from ansys.pyensight.core import LocalLauncher
+
+ session = LocalLauncher(batch=False).start()
+ _ = session.run_script("./example.py")
+
+
+In Visual Studio Code, you can insert a breakpoint on the ``print()`` line and debug the
+``example.py`` script when the ``runme.py`` script is run in debug mode from Visual Studio Code.
+Note that in this example, ``batch=False`` is specified in the ``LocalLauncher`` constructor.
+This causes the EnSight GUI to display as well, enabling direct interaction with the
+full EnSight app and debugging.
+
+
+Limitations
+-----------
+
+It is important to note that there are some important differences between an EnSight Python
+script run in EnSight versus in an IDE via the PyEnSight interface.
+
+Using the :func:`run_script` method causes the directory
+containing the EnSight Python script to be added to ``sys.path``, if it is not already added.
+
+
+Speed
+`````
+
+There is a significant difference in the speed with which the code can be executed. This
+is because the ``ensight`` commands are executed remotely and the results are returned. The
+workaround for this is to use the :func:`exec`
+method, but it requires that the code in the Python script be rewritten as a function.
+In debugging situations, this may not be a major issue.
+
+
+``ensight`` module
+```````````````````
+
+Another difference is that the nature of the ``ensight`` object in the script is very different.
+When running in EnSight, it is a true Python module. When running via the
+:func:`run_script` method, the object
+is an instance of the ``ensight_api`` class. In general, these both provide the same API, but
+they are not identical. This approach includes the general API limitations
+described in :ref:`api_differences`.
+
+
+Import versus run
+`````````````````
+
+The :func:`run_script` method always uses
+the module import mechanism to "run" the scripts. EnSight Python scripts that do not
+run in the EnSight script editor using the **Import script as module** menu command
+cannot be used with this system.
diff --git a/doc/source/user_guide/object_api.rst b/doc/source/user_guide/object_api.rst
index 39e584b3255..cefa04e78a6 100644
--- a/doc/source/user_guide/object_api.rst
+++ b/doc/source/user_guide/object_api.rst
@@ -1,139 +1,139 @@
-.. _ref_object_api:
-
-EnSight object API
-==================
-
-The object interface directly exposes core EnSight objects using *proxy* Python objects
-that hold references to the underlying C++ objects. These proxy objects can be used to
-get or set attributes on the C++ objects as well as call methods on them. The object
-interface has a number of advantages over the native interface.
-
-- Attributes support the event callback mechanism. This makes it possible to
- register callbacks in Python that are executed when a specific set of attributes
- change on a single object or a class of objects.
-- The interface supports the specification of objects via name, ID, or object. This helps
- remove ambiguity and adds flexibility when passing things like parts.
-- In general, the object API does not necessitate the use of stored state, meaning
- the command language's *currently selected parts* notion. The API provides access
- to and the ability to change the state for interoperability with the native API,
- but it does not require its use.
-
-Proxy objects: ``ENSOBJ`` class
--------------------------------
-
-The object interface revolves around proxy object classes. The base class for these
-objects is the ``ENSOBJ`` class. The object is a wrapper around an EnSight
-object ID. An EnSight object ID is a monotonically increasing 64-bit integer, unique for a
-given EnSight session. The proxy object stores the object ID in the :samp:`objid` object and
-can make method and attribute calls directly on the C++ core objects via that ID. The
-``ENSOBJ`` interface supports attribute introspection, including attribute names, types, and
-general organization. In most cases where an attribute takes an object, the API supports
-objects, descriptions, and IDs, making transition between the various APIs fairly seamless.
-For example, the Python bindings search the variable list for names and IDs if
-those types are provided::
-
- part = session.objs.core.PARTS["Clip"][0]
- var = session.ensight.objs.core.VARIABLES["temperature"][0]
- print(var, var.DESCRIPTION, var.ID)
- # The COLORBYPALETTE attribute is defined as being a ENS_VAR object.
- # It is legal to pass the object, the object name or the object ID.
- p.COLORBYPALETTE = var
- p.COLORBYPALETTE = var.DESCRIPTION
- p.COLORBYPALETTE = var.ID
-
-
-Interface root: ``ensight.objs`` module
----------------------------------------
-
-The session ``ensight.objs`` module serves as the access point into the object interface.
-All other EnSight objects can be accessed from methods and objects in this
-module. Key modules and objects include ``ensight.objs.enums`` (enumerations used for
-attribute IDs and values) and all proxy object base classes (such as the
-``ensight.objs.ENS_VAR`` class). There are also methods for searching for objects
-and manipulating object IDs.
-
-
-Global state: ``ensight.objs.core`` object
-------------------------------------------
-
-Access to the global state of the EnSight session is stored in an ``ENS_GLOBALS`` singleton object
-accessed by the :samp:`session.ensight.objs.core` object. All other object instances can be
-accessed through attributes or methods on this object. For example, ``ENS_PART`` objects can
-be accessed via the ``PARTS`` property, and ``ENS_VAR`` objects can be accessed via the
-``VARIABLES`` property.
-
-
-Attributes
-----------
-
-The state of all objects is stored as a collection of attributes on the object.
-There are a collection of methods for accessing attributes. Here is an example::
-
- p = session.ensight.objs.core.PARTS[0]
- p.DESCRIPTION = "HELLO"
- p.setattr("DESCRIPTION", "HELLO")
- p.setattr(session.ensight.objs.enums.DESCRIPTION, "HELLO")
- p.setattrs(dict(DESCRIPTION="HELLO"))
-
-
-Attribute names may be specified using string names or enumerations. There are multiple
-interfaces to get or set attributes. You can use the :func:`attrinfo`
-method or the :func:`attrtree`
-method to access detailed information about an attribute. Descriptions of attributes
-are available in multiple languages, which can be selected via the :samp:`Session.language`
-property.
-
-One nuance to the attribute interface is that all object attributes are lists. Thus,
-while an attribute like :samp:`COLORBYPALETTE` is a single variable object, it is always
-returned as a list.
-
-Finally, objects are always returned as ``ensobjlist`` instances. This is a subclass
-of a Python list object that includes extra methods for searching via ``'[]'``
-indexing and the :func:`find` method as well as
-calls to get or set attribute values in bulk on all the objects in the container.
-
-It is also possible to create user-defined attributes. These may hold simple
-values, string, integers, or floats. They are stored in the ``METADATA`` attribute,
-but they behave the same as intrinsic attributes.
-
-Events
-------
-
-Whenever an attribute changes its value, an event is generated. Callback functions
-can be attached to these events. Thus, a PyEnSight app can respond to changes
-in state caused by Python calls or intrinsic changes in the EnSight core state (such
-as a time-varying animation playback). Here is a simple example that connects the
-``part_event()`` function to any changes in the ``VISIBLE`` or ``COLORBYRGB`` properties
-on any ``ENS_PART`` subclass object::
-
- def part_event(uri: str):
- p = urlparse(uri)
- q = parse_qs(p.query)
- obj = session.ensight.objs.wrap_id(int(q["uid"][0]))
- value = obj.getattr(q["enum"][0])
- part_disp.value = f"Part: {obj}, Attribute: {q['enum'][0]} Value: {value}"
-
- attribs = [session.ensight.objs.enums.VISIBLE, session.ensight.objs.enums.COLORBYRGB]
- session.add_callback("'ENS_PART'", "partattr", attribs, part_event)
-
-
-Replacing the ``ENS_PART`` string with a specific ``ENSOBJ`` instance would limit the
-function to the one specific object instance rather than a class of objects.
-
-
-Selection and the native API
-----------------------------
-
-Unlike the native API, the object API does not require a "current selection" as
-the target of all operations is explicit. The object API supports SELECTED attributes
-and selection group objects. These directly represent the state of the EnSight GUI.
-In cases where one would like to use both APIs in a single script, it can become
-necessary to synchronize these two selections. See: :ref:`selection_transfer` for details
-on how this can be done.
-
-
-Tips and tricks
----------------
-
-You can access an additional collection of EnSight-specific Python notes in the
-`Python and EnSight documentation `_.
+.. _ref_object_api:
+
+EnSight object API
+==================
+
+The object interface directly exposes core EnSight objects using *proxy* Python objects
+that hold references to the underlying C++ objects. These proxy objects can be used to
+get or set attributes on the C++ objects as well as call methods on them. The object
+interface has a number of advantages over the native interface.
+
+- Attributes support the event callback mechanism. This makes it possible to
+ register callbacks in Python that are executed when a specific set of attributes
+ change on a single object or a class of objects.
+- The interface supports the specification of objects via name, ID, or object. This helps
+ remove ambiguity and adds flexibility when passing things like parts.
+- In general, the object API does not necessitate the use of stored state, meaning
+ the command language's *currently selected parts* notion. The API provides access
+ to and the ability to change the state for interoperability with the native API,
+ but it does not require its use.
+
+Proxy objects: ``ENSOBJ`` class
+-------------------------------
+
+The object interface revolves around proxy object classes. The base class for these
+objects is the ``ENSOBJ`` class. The object is a wrapper around an EnSight
+object ID. An EnSight object ID is a monotonically increasing 64-bit integer, unique for a
+given EnSight session. The proxy object stores the object ID in the :samp:`objid` object and
+can make method and attribute calls directly on the C++ core objects via that ID. The
+``ENSOBJ`` interface supports attribute introspection, including attribute names, types, and
+general organization. In most cases where an attribute takes an object, the API supports
+objects, descriptions, and IDs, making transition between the various APIs fairly seamless.
+For example, the Python bindings search the variable list for names and IDs if
+those types are provided::
+
+ part = session.objs.core.PARTS["Clip"][0]
+ var = session.ensight.objs.core.VARIABLES["temperature"][0]
+ print(var, var.DESCRIPTION, var.ID)
+ # The COLORBYPALETTE attribute is defined as being a ENS_VAR object.
+ # It is legal to pass the object, the object name or the object ID.
+ p.COLORBYPALETTE = var
+ p.COLORBYPALETTE = var.DESCRIPTION
+ p.COLORBYPALETTE = var.ID
+
+
+Interface root: ``ensight.objs`` module
+---------------------------------------
+
+The session ``ensight.objs`` module serves as the access point into the object interface.
+All other EnSight objects can be accessed from methods and objects in this
+module. Key modules and objects include ``ensight.objs.enums`` (enumerations used for
+attribute IDs and values) and all proxy object base classes (such as the
+``ensight.objs.ENS_VAR`` class). There are also methods for searching for objects
+and manipulating object IDs.
+
+
+Global state: ``ensight.objs.core`` object
+------------------------------------------
+
+Access to the global state of the EnSight session is stored in an ``ENS_GLOBALS`` singleton object
+accessed by the :samp:`session.ensight.objs.core` object. All other object instances can be
+accessed through attributes or methods on this object. For example, ``ENS_PART`` objects can
+be accessed via the ``PARTS`` property, and ``ENS_VAR`` objects can be accessed via the
+``VARIABLES`` property.
+
+
+Attributes
+----------
+
+The state of all objects is stored as a collection of attributes on the object.
+There are a collection of methods for accessing attributes. Here is an example::
+
+ p = session.ensight.objs.core.PARTS[0]
+ p.DESCRIPTION = "HELLO"
+ p.setattr("DESCRIPTION", "HELLO")
+ p.setattr(session.ensight.objs.enums.DESCRIPTION, "HELLO")
+ p.setattrs(dict(DESCRIPTION="HELLO"))
+
+
+Attribute names may be specified using string names or enumerations. There are multiple
+interfaces to get or set attributes. You can use the :func:`attrinfo`
+method or the :func:`attrtree`
+method to access detailed information about an attribute. Descriptions of attributes
+are available in multiple languages, which can be selected via the :samp:`Session.language`
+property.
+
+One nuance to the attribute interface is that all object attributes are lists. Thus,
+while an attribute like :samp:`COLORBYPALETTE` is a single variable object, it is always
+returned as a list.
+
+Finally, objects are always returned as ``ensobjlist`` instances. This is a subclass
+of a Python list object that includes extra methods for searching via ``'[]'``
+indexing and the :func:`find` method as well as
+calls to get or set attribute values in bulk on all the objects in the container.
+
+It is also possible to create user-defined attributes. These may hold simple
+values, string, integers, or floats. They are stored in the ``METADATA`` attribute,
+but they behave the same as intrinsic attributes.
+
+Events
+------
+
+Whenever an attribute changes its value, an event is generated. Callback functions
+can be attached to these events. Thus, a PyEnSight app can respond to changes
+in state caused by Python calls or intrinsic changes in the EnSight core state (such
+as a time-varying animation playback). Here is a simple example that connects the
+``part_event()`` function to any changes in the ``VISIBLE`` or ``COLORBYRGB`` properties
+on any ``ENS_PART`` subclass object::
+
+ def part_event(uri: str):
+ p = urlparse(uri)
+ q = parse_qs(p.query)
+ obj = session.ensight.objs.wrap_id(int(q["uid"][0]))
+ value = obj.getattr(q["enum"][0])
+ part_disp.value = f"Part: {obj}, Attribute: {q['enum'][0]} Value: {value}"
+
+ attribs = [session.ensight.objs.enums.VISIBLE, session.ensight.objs.enums.COLORBYRGB]
+ session.add_callback("'ENS_PART'", "partattr", attribs, part_event)
+
+
+Replacing the ``ENS_PART`` string with a specific ``ENSOBJ`` instance would limit the
+function to the one specific object instance rather than a class of objects.
+
+
+Selection and the native API
+----------------------------
+
+Unlike the native API, the object API does not require a "current selection" as
+the target of all operations is explicit. The object API supports SELECTED attributes
+and selection group objects. These directly represent the state of the EnSight GUI.
+In cases where one would like to use both APIs in a single script, it can become
+necessary to synchronize these two selections. See: :ref:`selection_transfer` for details
+on how this can be done.
+
+
+Tips and tricks
+---------------
+
+You can access an additional collection of EnSight-specific Python notes in the
+`Python and EnSight documentation `_.
diff --git a/doc/source/user_guide/omniverse_info.rst b/doc/source/user_guide/omniverse_info.rst
index ca1e2c8bc1e..b453c2d640f 100644
--- a/doc/source/user_guide/omniverse_info.rst
+++ b/doc/source/user_guide/omniverse_info.rst
@@ -1,335 +1,335 @@
-.. _omniverse_info:
-
-PyEnSight/ANSYS Omniverse Interface
-===================================
-
-PyEnSight includes an interface to export geometry representing the
-current EnSight scene to an Omniverse server. The interface supports
-EnSight 2023 R2 or later. It leverages the EnSight Dynamic Scene Graph
-gRPC interface to access the scene. It is also possible to export
-EnSight scene as glTF (GLB) format files and convert them into USD
-format.
-
-The API is available through a PyEnSight session instance, from EnSight
-Python directly as (ensight.utils.omniverse for 2025 R1 and later) and
-from within Omniverse applications via the ansys.tools.omniverse.core and
-ansys.tools.omniverse.dsgui kit extensions.
-
-The Python API is defined here: :class:`Omniverse`.
-
-
-PyEnSight and EnSight Python API
---------------------------------
-
-The API can be used directly from a local Python installation of the
-`ansys-pyensight-core `_ module:
-
-.. code-block:: python
-
- from ansys.pyensight.core import LocalLauncher
- s = LocalLauncher(batch=False).start()
- s.load_example("waterbreak.ens")
- # The directory to save USD representation into
- usd_directory = "/omniverse/examples/water"
- # Start a new connection between EnSight and Omniverse
- s.ensight.utils.omniverse.create_connection(usd_directory)
- # Do some work...
- # Push a scene update
- s.ensight.utils.omniverse.update()
-
-
-.. note::
-
- The ``batch=False`` option used in the examples causes the EnSight
- GUI to be displayed together with the Omniverse Composer GUI.
-
- It is possible to run a pyensight script from inside of an Omniverse
- kit application. In this case, care must be taken to close the EnSight
- session before exiting the Omniverse application hosting the PyEnSight
- session or is it possible to leave the EnSight instance running.
-
-
-From inside an EnSight session, the API is similar:
-
-.. code-block:: python
-
- # Start a DSG server in EnSight first
- (_, grpc_port, security) = ensight.objs.core.grpc_server(port=0, start=True)
- # Start a new connection between the EnSight DSG server and Omniverse
- options = {"host": "127.0.0.1", "port": str(grpc_port)}
- if security:
- options["security"] = security
- usd_directory = "/omniverse/examples/water"
- ensight.utils.omniverse.create_connection(usd_directory, options=options)
- # Do some more work...
- # Push a scene update
- ensight.utils.omniverse.update()
-
-
-After running the script, the scene will appear in any Omniverse kit tree view
-under the specified directory. The file ``dsg_scene.usd`` can be loaded into
-Omniverse applications such as Create. The ``ensight.utils.omniverse.update()`` command
-may be used to update the USD data in Omniverse, reflecting any recent changes in
-the EnSight scene.
-
-Starting with 2025 R1, one can also access Omniverse via an EnSight
-user-defined tool:
-
-.. image:: /_static/omniverse_tool.png
-
-Clicking on "Start export service" executes something
-similar to the previous Python snippet and the button will change to
-a mode where it just executes ``ensight.utils.omniverse.update()``
-when the "Export scene" button is clicked.
-
-.. note::
-
- Several of the options are locked in once the service is started.
- To change options like "Temporal", the service must often be stopped
- and restarted using this dialog.
-
-
-PyEnSight/Omniverse kit from an Omniverse Kit Application
----------------------------------------------------------
-
-To install the service into an Omniverse application, one can install
-it via the third party extensions dialog. Select the ``Extensions`` option
-from the ``Window`` menu. Select third party extensions and filter
-by ``ANSYS``. Enabling the extension will install the kit extension.
-The kit extension will find the most recent Ansys install and use the
-version of the pyensight found in the install to perform export
-operations.
-
-.. image:: /_static/omniverse_extension.png
-
-The ``ansys.tools.omniverse.dsgui`` kit includes a GUI similar to the
-EnSight 2025 R1 user-defined tool. It allows one to select a
-target directory and the details of a gRPC connection
-to a running EnSight. For example, if one launches EnSight with
-``ensight.bat -grpc_server 2345``, then the uri: ``grpc://127.0.0.1:2345``
-can to used to request a locally running EnSight to push the current
-scene to Omniverse.
-
-.. note::
-
- If the ``ansys.tools.omniverse.core`` and ``ansys.tools.omniverse.dsgui``
- do not show up in the Community extensions list in Omniverse, then
- it can be added to the ``Extension Search Paths`` list as:
- ``git://github.com/ansys/pyensight.git?branch=main&dir=exts``.
-
-
-Running the Scene Exporter via Command Line
--------------------------------------------
-
-A pyensight install includes the omniverse_cli module which
-may be used to execute an export operation from the
-command line or launch the export service. The Python included
-in the EnSight distribution includes this module as well. Assuming
-the pyensight repository has been cloned to: ``D:\repos\pyensight`` the
-following can be run in the Python virtual environment that was
-used to build the module and the module installed:
-
-.. code-block:: bat
-
- cd "D:\repos\pyensight"
- .\venv\Scripts\activate.ps1
- python -m build
- python -m pip uninstall ansys.pyensight.core -y
- python -m pip install .\dist\ansys_pyensight_core-0.9.0.dev0-py3-none-any.whl
- python -m ansys.pyensight.core.utils.omniverse_cli -h
-
-
-The following help output will be generated:
-
-
-.. code-block::
-
- usage: omniverse_cli.py [-h] [--verbose verbose_level] [--log_file log_filename] [--dsg_uri DSG_URI]
- [--security_token token] [--monitor_directory glb_directory] [--time_scale time_scale]
- [--normalize_geometry yes|no|true|false|1|0] [--include_camera yes|no|true|false|1|0]
- [--temporal yes|no|true|false|1|0] [--oneshot yes|no|true|false|1|0]
- [--line_width line_width]
- destination
-
- PyEnSight Omniverse Geometry Service
-
- positional arguments:
- destination The directory to save the USD scene graph into.
-
- options:
- -h, --help show this help message and exit
- --verbose verbose_level
- Enable logging information (0-3). Default: 0
- --log_file log_filename
- Save logging output to the named log file instead of stdout.
- --dsg_uri DSG_URI The URI of the EnSight Dynamic Scene Graph server. Default: grpc://127.0.0.1:5234
- --security_token token
- Dynamic scene graph API security token. Default: none
- --monitor_directory glb_directory
- Monitor specified directory for GLB files to be exported. Default: none
- --time_scale time_scale
- Scaling factor to be applied to input time values. Default: 1.0
- --normalize_geometry yes|no|true|false|1|0
- Enable mapping of geometry to a normalized Cartesian space. Default: false
- --include_camera yes|no|true|false|1|0
- Include the camera in the output USD scene graph. Default: true
- --temporal yes|no|true|false|1|0
- Export a temporal scene graph. Default: false
- --oneshot yes|no|true|false|1|0
- Convert a single geometry into USD and exit. Default: false
- --line_width line_width
- Width of lines: >0=absolute size. <0=fraction of diagonal. 0=wireframe. Default: None
-
-
-Listing the various command line options.
-
-The core operation of this CLI tool is to convert a scene into USD format. The resulting USD data
-can be read into Omniverse, Blender and other DCC asset pipelines. The input data for this
-conversion can come from one of two sources: the EnSight Dynamic Scene Graph gRPC server or
-via GLB files.
-
-The command line tool can be run in two different modes: *server* and *one-shot*. In *one-shot* mode,
-a single conversion, export is performed and the CLI tool exits. In server mode, an initial
-conversion is performed and the server continues to run, either monitoring a directory for
-scene updates or listening for DSG scene push operations. The advantage of the latter
-approach is that it is possible for the tool to push incremental updates to the USD scene
-which can be faster and may result in cleaner output over time.
-
-
-Scene Source: DSG Connection
-^^^^^^^^^^^^^^^^^^^^^^^^^^^^
-
-This is the default scene source. It is selected by the ``--dsg_uri`` command line option which
-defaults to: ``grpc://127.0.0.1:5234``. In this mode, the tool will attempt to connect to an
-EnSight session via the DSG protocol. EnSight run with the command line option ``-grpc_server 5234``
-will start the DSG server. The port number may be varied as needed by the deployment and the
-EnSight session can run on an entirely different system (e.g. remote PyEnSight session) by supplying
-the hostname in the grpc URI specification.
-
-.. note::
-
- If using remote connections, it is strongly suggested that ``--security_token`` be used to
- prevent the scene from being observed by other applications.
- Additionally, if EnSight is not started with a gRPC server option specified, the server
- can be started later using the EnSight Python APIs.
-
-
-Once the EnSight session has been established, the following command line may be used to start
-a server:
-
-.. code-block:: bat
-
- python -m ansys.pyensight.core.utils.omniverse_cli --dsg_uri grpc://127.0.0.1:12342 d:\save\usd_files
-
-
-The server will cause an initial scene push and will then wait, pushing geometry as requested until the
-EnSight DSG session ends.
-
-
-Scene Source: GLB Files
-^^^^^^^^^^^^^^^^^^^^^^^
-
-The GLB file support is restricted to specially formatted GLB files written using the Ansys GLTFWriter
-library from Ansys applications like EnSight and Fluent. These files contain additional, Ansys specific
-hints that are used to enhance and accelerate the conversions. The GLB conversion mode is selected
-using the ``--monitor_directory`` command line option. In server mode, it should point to a directory
-into which ``.glb`` files are copied. The server watches for a file of the same base name as the GLB file,
-but the extension ``.upload``. The server will then convert the file into USD form and delete both the
-GLB file and the upload file.
-
-For example, if one has a GLB file named: ``car_body.glb``, one should first copy the file into the
-directory specified by ``--monitor_directory`` and once the copy is complete, an empty file
-named: ``car_body.upload`` should be created in the directory as well (the server will not attempt to
-read the GLB file until the ``.upload`` file exists). Once the conversion is complete, the two files
-will be deleted by the server.
-
-.. note::
-
- The ``.upload`` file can also be a JSON description of the scene which allows for importing
- of multiple GLB files and setting other options. This format will be documented in a future
- release.
-
-
-If the tool is being run in *one-shot* mode, the single GLB file that should be specified using
-``--monitor_directory`` along with the ``--oneshot`` options. For example:
-
-
-.. code-block:: bat
-
- python -m ansys.pyensight.core.utils.omniverse_cli --monitor_directory d:\source\in_file.glb --oneshot 1 d:\save\usd_files
-
-
-Will convert the single GLB file into USD format and then exit.
-
-.. _OneShotMode:
-
-Server vs One-Shot Mode
-^^^^^^^^^^^^^^^^^^^^^^^
-
-If the ``--oneshot`` option is not specified, the tool will run in server mode. It will monitor either
-the DSG protocol or the directory specified by ``--monitor_directory`` option for geometry data. In
-this mode, the USD scene in the ``destination`` will be updated to reflect the last scene pushed.
-Unused files will be removed and items that do not change will not be updated. Thus, server
-mode is best suited for dynamic, interactive applications. If server mode is initiated via the command line,
-a single scene push will automatically be performed. One can start subsequent push operations
-from the EnSight python interpreter with the following commands.
-
-
-.. code-block:: python
-
- import enspyqtgui_int
- # Current timestep
- enspyqtgui_int.dynamic_scene_graph_command("dynamicscenegraph://localhost/client/update")
- # All timesteps
- enspyqtgui_int.dynamic_scene_graph_command("dynamicscenegraph://localhost/client/update?timesteps=1")
-
-
-If ``--oneshot`` is specified, only a single conversion is performed and the tool will not maintain
-a notion of the scene state. This makes the operation simpler and avoids the need for extra processes,
-however old files from previous export operations will not be removed and the USD directory may need
-to be manually cleaned between export operations.
-
-
-General Options
-^^^^^^^^^^^^^^^
-
-Output options:
-
-* ``--verbose verbose_level`` - Controls the amount of progress and debug information that will be
- generated.
-* ``--log_file log_filename`` - If specified, the verbose output will be saved to the named file
- instead of stdout.
-
-
-Several options can be used to customize the scaling of various aspects of the generated output.
-
-* ``--time_scale time_scale`` - If specified, the timestep values in the input geometry stream will be
- multiplied by this value before being sent into the USD file. This can be used to do things like
- transform solution times into video time lines.
-* ``--normalize_geometry yes|no|true|false|1|0`` - If enabled, the largest axis in the input geometry
- will be scaled to a unit cube and the other axis will be scaled by the same ratio.
-* ``--line_width line_width`` - Input scenes may include lines. If this option is specified, those
- lines will be include in the USD output. The size of the lines are specified in the scene geometry
- space units by this option. If this option is negative, the size of the lines will be set to the
- diagonal of the first geometry block with lines, multiplied by the absolute value of the option.
- The environmental variable ``ANSYS_OV_LINE_WIDTH`` can be used to specify the default value for
- this option.
-
-
-Miscellaneous features:
-
-* ``--include_camera yes|no|true|false|1|0`` - By default, the tool will attempt to include
- the input scene camera in the USD output. This can be useful when trying to reproduce a
- specific view. However, when exporting assets that will be combined later or in
- interactive/VR/AR use-cases the camera specification can be disabled using this option.
-* ``--temporal yes|no|true|false|1|0`` - When using the DSG geometry source, this option can
- be used to force time-varying export from EnSight. The default is to export only the
- the current timestep.
-* ``--oneshot yes|no|true|false|1|0`` - As discussed earlier, this option is used to disable
- server mode. See :ref:`OneShotMode` for details.
-
-
-Material Conversions
-^^^^^^^^^^^^^^^^^^^^
-
-A mechanism for semi-automated mapping of materials is currently a work in progress.
+.. _omniverse_info:
+
+PyEnSight/ANSYS Omniverse Interface
+===================================
+
+PyEnSight includes an interface to export geometry representing the
+current EnSight scene to an Omniverse server. The interface supports
+EnSight 2023 R2 or later. It leverages the EnSight Dynamic Scene Graph
+gRPC interface to access the scene. It is also possible to export
+EnSight scene as glTF (GLB) format files and convert them into USD
+format.
+
+The API is available through a PyEnSight session instance, from EnSight
+Python directly as (ensight.utils.omniverse for 2025 R1 and later) and
+from within Omniverse applications via the ansys.tools.omniverse.core and
+ansys.tools.omniverse.dsgui kit extensions.
+
+The Python API is defined here: :class:`Omniverse`.
+
+
+PyEnSight and EnSight Python API
+--------------------------------
+
+The API can be used directly from a local Python installation of the
+`ansys-pyensight-core `_ module:
+
+.. code-block:: python
+
+ from ansys.pyensight.core import LocalLauncher
+ s = LocalLauncher(batch=False).start()
+ s.load_example("waterbreak.ens")
+ # The directory to save USD representation into
+ usd_directory = "/omniverse/examples/water"
+ # Start a new connection between EnSight and Omniverse
+ s.ensight.utils.omniverse.create_connection(usd_directory)
+ # Do some work...
+ # Push a scene update
+ s.ensight.utils.omniverse.update()
+
+
+.. note::
+
+ The ``batch=False`` option used in the examples causes the EnSight
+ GUI to be displayed together with the Omniverse Composer GUI.
+
+ It is possible to run a pyensight script from inside of an Omniverse
+ kit application. In this case, care must be taken to close the EnSight
+ session before exiting the Omniverse application hosting the PyEnSight
+ session or is it possible to leave the EnSight instance running.
+
+
+From inside an EnSight session, the API is similar:
+
+.. code-block:: python
+
+ # Start a DSG server in EnSight first
+ (_, grpc_port, security) = ensight.objs.core.grpc_server(port=0, start=True)
+ # Start a new connection between the EnSight DSG server and Omniverse
+ options = {"host": "127.0.0.1", "port": str(grpc_port)}
+ if security:
+ options["security"] = security
+ usd_directory = "/omniverse/examples/water"
+ ensight.utils.omniverse.create_connection(usd_directory, options=options)
+ # Do some more work...
+ # Push a scene update
+ ensight.utils.omniverse.update()
+
+
+After running the script, the scene will appear in any Omniverse kit tree view
+under the specified directory. The file ``dsg_scene.usd`` can be loaded into
+Omniverse applications such as Create. The ``ensight.utils.omniverse.update()`` command
+may be used to update the USD data in Omniverse, reflecting any recent changes in
+the EnSight scene.
+
+Starting with 2025 R1, one can also access Omniverse via an EnSight
+user-defined tool:
+
+.. image:: /_static/omniverse_tool.png
+
+Clicking on "Start export service" executes something
+similar to the previous Python snippet and the button will change to
+a mode where it just executes ``ensight.utils.omniverse.update()``
+when the "Export scene" button is clicked.
+
+.. note::
+
+ Several of the options are locked in once the service is started.
+ To change options like "Temporal", the service must often be stopped
+ and restarted using this dialog.
+
+
+PyEnSight/Omniverse kit from an Omniverse Kit Application
+---------------------------------------------------------
+
+To install the service into an Omniverse application, one can install
+it via the third party extensions dialog. Select the ``Extensions`` option
+from the ``Window`` menu. Select third party extensions and filter
+by ``ANSYS``. Enabling the extension will install the kit extension.
+The kit extension will find the most recent Ansys install and use the
+version of the pyensight found in the install to perform export
+operations.
+
+.. image:: /_static/omniverse_extension.png
+
+The ``ansys.tools.omniverse.dsgui`` kit includes a GUI similar to the
+EnSight 2025 R1 user-defined tool. It allows one to select a
+target directory and the details of a gRPC connection
+to a running EnSight. For example, if one launches EnSight with
+``ensight.bat -grpc_server 2345``, then the uri: ``grpc://127.0.0.1:2345``
+can to used to request a locally running EnSight to push the current
+scene to Omniverse.
+
+.. note::
+
+ If the ``ansys.tools.omniverse.core`` and ``ansys.tools.omniverse.dsgui``
+ do not show up in the Community extensions list in Omniverse, then
+ it can be added to the ``Extension Search Paths`` list as:
+ ``git://github.com/ansys/pyensight.git?branch=main&dir=exts``.
+
+
+Running the Scene Exporter via Command Line
+-------------------------------------------
+
+A pyensight install includes the omniverse_cli module which
+may be used to execute an export operation from the
+command line or launch the export service. The Python included
+in the EnSight distribution includes this module as well. Assuming
+the pyensight repository has been cloned to: ``D:\repos\pyensight`` the
+following can be run in the Python virtual environment that was
+used to build the module and the module installed:
+
+.. code-block:: bat
+
+ cd "D:\repos\pyensight"
+ .\venv\Scripts\activate.ps1
+ python -m build
+ python -m pip uninstall ansys.pyensight.core -y
+ python -m pip install .\dist\ansys_pyensight_core-0.9.0.dev0-py3-none-any.whl
+ python -m ansys.pyensight.core.utils.omniverse_cli -h
+
+
+The following help output will be generated:
+
+
+.. code-block::
+
+ usage: omniverse_cli.py [-h] [--verbose verbose_level] [--log_file log_filename] [--dsg_uri DSG_URI]
+ [--security_token token] [--monitor_directory glb_directory] [--time_scale time_scale]
+ [--normalize_geometry yes|no|true|false|1|0] [--include_camera yes|no|true|false|1|0]
+ [--temporal yes|no|true|false|1|0] [--oneshot yes|no|true|false|1|0]
+ [--line_width line_width]
+ destination
+
+ PyEnSight Omniverse Geometry Service
+
+ positional arguments:
+ destination The directory to save the USD scene graph into.
+
+ options:
+ -h, --help show this help message and exit
+ --verbose verbose_level
+ Enable logging information (0-3). Default: 0
+ --log_file log_filename
+ Save logging output to the named log file instead of stdout.
+ --dsg_uri DSG_URI The URI of the EnSight Dynamic Scene Graph server. Default: grpc://127.0.0.1:5234
+ --security_token token
+ Dynamic scene graph API security token. Default: none
+ --monitor_directory glb_directory
+ Monitor specified directory for GLB files to be exported. Default: none
+ --time_scale time_scale
+ Scaling factor to be applied to input time values. Default: 1.0
+ --normalize_geometry yes|no|true|false|1|0
+ Enable mapping of geometry to a normalized Cartesian space. Default: false
+ --include_camera yes|no|true|false|1|0
+ Include the camera in the output USD scene graph. Default: true
+ --temporal yes|no|true|false|1|0
+ Export a temporal scene graph. Default: false
+ --oneshot yes|no|true|false|1|0
+ Convert a single geometry into USD and exit. Default: false
+ --line_width line_width
+ Width of lines: >0=absolute size. <0=fraction of diagonal. 0=wireframe. Default: None
+
+
+Listing the various command line options.
+
+The core operation of this CLI tool is to convert a scene into USD format. The resulting USD data
+can be read into Omniverse, Blender and other DCC asset pipelines. The input data for this
+conversion can come from one of two sources: the EnSight Dynamic Scene Graph gRPC server or
+via GLB files.
+
+The command line tool can be run in two different modes: *server* and *one-shot*. In *one-shot* mode,
+a single conversion, export is performed and the CLI tool exits. In server mode, an initial
+conversion is performed and the server continues to run, either monitoring a directory for
+scene updates or listening for DSG scene push operations. The advantage of the latter
+approach is that it is possible for the tool to push incremental updates to the USD scene
+which can be faster and may result in cleaner output over time.
+
+
+Scene Source: DSG Connection
+^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+This is the default scene source. It is selected by the ``--dsg_uri`` command line option which
+defaults to: ``grpc://127.0.0.1:5234``. In this mode, the tool will attempt to connect to an
+EnSight session via the DSG protocol. EnSight run with the command line option ``-grpc_server 5234``
+will start the DSG server. The port number may be varied as needed by the deployment and the
+EnSight session can run on an entirely different system (e.g. remote PyEnSight session) by supplying
+the hostname in the grpc URI specification.
+
+.. note::
+
+ If using remote connections, it is strongly suggested that ``--security_token`` be used to
+ prevent the scene from being observed by other applications.
+ Additionally, if EnSight is not started with a gRPC server option specified, the server
+ can be started later using the EnSight Python APIs.
+
+
+Once the EnSight session has been established, the following command line may be used to start
+a server:
+
+.. code-block:: bat
+
+ python -m ansys.pyensight.core.utils.omniverse_cli --dsg_uri grpc://127.0.0.1:12342 d:\save\usd_files
+
+
+The server will cause an initial scene push and will then wait, pushing geometry as requested until the
+EnSight DSG session ends.
+
+
+Scene Source: GLB Files
+^^^^^^^^^^^^^^^^^^^^^^^
+
+The GLB file support is restricted to specially formatted GLB files written using the Ansys GLTFWriter
+library from Ansys applications like EnSight and Fluent. These files contain additional, Ansys specific
+hints that are used to enhance and accelerate the conversions. The GLB conversion mode is selected
+using the ``--monitor_directory`` command line option. In server mode, it should point to a directory
+into which ``.glb`` files are copied. The server watches for a file of the same base name as the GLB file,
+but the extension ``.upload``. The server will then convert the file into USD form and delete both the
+GLB file and the upload file.
+
+For example, if one has a GLB file named: ``car_body.glb``, one should first copy the file into the
+directory specified by ``--monitor_directory`` and once the copy is complete, an empty file
+named: ``car_body.upload`` should be created in the directory as well (the server will not attempt to
+read the GLB file until the ``.upload`` file exists). Once the conversion is complete, the two files
+will be deleted by the server.
+
+.. note::
+
+ The ``.upload`` file can also be a JSON description of the scene which allows for importing
+ of multiple GLB files and setting other options. This format will be documented in a future
+ release.
+
+
+If the tool is being run in *one-shot* mode, the single GLB file that should be specified using
+``--monitor_directory`` along with the ``--oneshot`` options. For example:
+
+
+.. code-block:: bat
+
+ python -m ansys.pyensight.core.utils.omniverse_cli --monitor_directory d:\source\in_file.glb --oneshot 1 d:\save\usd_files
+
+
+Will convert the single GLB file into USD format and then exit.
+
+.. _OneShotMode:
+
+Server vs One-Shot Mode
+^^^^^^^^^^^^^^^^^^^^^^^
+
+If the ``--oneshot`` option is not specified, the tool will run in server mode. It will monitor either
+the DSG protocol or the directory specified by ``--monitor_directory`` option for geometry data. In
+this mode, the USD scene in the ``destination`` will be updated to reflect the last scene pushed.
+Unused files will be removed and items that do not change will not be updated. Thus, server
+mode is best suited for dynamic, interactive applications. If server mode is initiated via the command line,
+a single scene push will automatically be performed. One can start subsequent push operations
+from the EnSight python interpreter with the following commands.
+
+
+.. code-block:: python
+
+ import enspyqtgui_int
+ # Current timestep
+ enspyqtgui_int.dynamic_scene_graph_command("dynamicscenegraph://localhost/client/update")
+ # All timesteps
+ enspyqtgui_int.dynamic_scene_graph_command("dynamicscenegraph://localhost/client/update?timesteps=1")
+
+
+If ``--oneshot`` is specified, only a single conversion is performed and the tool will not maintain
+a notion of the scene state. This makes the operation simpler and avoids the need for extra processes,
+however old files from previous export operations will not be removed and the USD directory may need
+to be manually cleaned between export operations.
+
+
+General Options
+^^^^^^^^^^^^^^^
+
+Output options:
+
+* ``--verbose verbose_level`` - Controls the amount of progress and debug information that will be
+ generated.
+* ``--log_file log_filename`` - If specified, the verbose output will be saved to the named file
+ instead of stdout.
+
+
+Several options can be used to customize the scaling of various aspects of the generated output.
+
+* ``--time_scale time_scale`` - If specified, the timestep values in the input geometry stream will be
+ multiplied by this value before being sent into the USD file. This can be used to do things like
+ transform solution times into video time lines.
+* ``--normalize_geometry yes|no|true|false|1|0`` - If enabled, the largest axis in the input geometry
+ will be scaled to a unit cube and the other axis will be scaled by the same ratio.
+* ``--line_width line_width`` - Input scenes may include lines. If this option is specified, those
+ lines will be include in the USD output. The size of the lines are specified in the scene geometry
+ space units by this option. If this option is negative, the size of the lines will be set to the
+ diagonal of the first geometry block with lines, multiplied by the absolute value of the option.
+ The environmental variable ``ANSYS_OV_LINE_WIDTH`` can be used to specify the default value for
+ this option.
+
+
+Miscellaneous features:
+
+* ``--include_camera yes|no|true|false|1|0`` - By default, the tool will attempt to include
+ the input scene camera in the USD output. This can be useful when trying to reproduce a
+ specific view. However, when exporting assets that will be combined later or in
+ interactive/VR/AR use-cases the camera specification can be disabled using this option.
+* ``--temporal yes|no|true|false|1|0`` - When using the DSG geometry source, this option can
+ be used to force time-varying export from EnSight. The default is to export only the
+ the current timestep.
+* ``--oneshot yes|no|true|false|1|0`` - As discussed earlier, this option is used to disable
+ server mode. See :ref:`OneShotMode` for details.
+
+
+Material Conversions
+^^^^^^^^^^^^^^^^^^^^
+
+A mechanism for semi-automated mapping of materials is currently a work in progress.
diff --git a/exts/ansys.tools.omniverse.core/ansys/tools/omniverse/core/__init__.py b/exts/ansys.tools.omniverse.core/ansys/tools/omniverse/core/__init__.py
index 5d375081f69..53b61d60bec 100644
--- a/exts/ansys.tools.omniverse.core/ansys/tools/omniverse/core/__init__.py
+++ b/exts/ansys.tools.omniverse.core/ansys/tools/omniverse/core/__init__.py
@@ -1 +1 @@
-from .extension import *
+from .extension import *
diff --git a/exts/ansys.tools.omniverse.core/ansys/tools/omniverse/core/extension.py b/exts/ansys.tools.omniverse.core/ansys/tools/omniverse/core/extension.py
index aa82e310b4a..513584c138c 100644
--- a/exts/ansys.tools.omniverse.core/ansys/tools/omniverse/core/extension.py
+++ b/exts/ansys.tools.omniverse.core/ansys/tools/omniverse/core/extension.py
@@ -1,398 +1,398 @@
-import json
-import logging
-import os
-import platform
-import subprocess
-import sys
-import tempfile
-from typing import Optional
-import uuid
-
-import carb.settings
-import omni.ext
-import omni.kit.app
-import omni.kit.pipapi
-
-"""
-The current kit leverages an EnSight installation. It can find this via
-environmental variable/directory scanning (looking for an Ansys installation)
-or by looking at CEI_HOME. CEI_HOME is tried first.
-"""
-
-
-def find_kit_filename() -> Optional[str]:
- """
- Use a combination of the current omniverse application and the information
- in the local .nvidia-omniverse/config/omniverse.toml file to come up with
- the pathname of a kit executable suitable for hosting another copy of the
- ansys.tools.omniverse.core kit.
-
- Returns
- -------
- The pathname of a kit executable or None
-
- """
- # get the current application
- app = omni.kit.app.get_app()
- app_name = app.get_app_filename().split(".")[-1]
- app_version = app.get_app_version().split("-")[0]
-
- # parse the toml config file for the location of the installed apps
- try:
- import tomllib
- except ModuleNotFoundError:
- import pip._vendor.tomli as tomllib
-
- homedir = os.path.expanduser("~")
- ov_config = os.path.join(homedir, ".nvidia-omniverse", "config", "omniverse.toml")
- with open(ov_config, "r") as ov_file:
- ov_data = ov_file.read()
- config = tomllib.loads(ov_data)
- appdir = config.get("paths", {}).get("library_root", None)
- appdir = os.path.join(appdir, f"{app_name}-{app_version}")
-
- # Windows: 'kit.bat' in '.' or 'kit' followed by 'kit.exe' in '.' or 'kit'
- # Linux: 'kit.sh' in '.' or 'kit' followed by 'kit' in '.' or 'kit'
- exe_names = ["kit.sh", "kit"]
- if sys.platform.startswith("win"):
- exe_names = ["kit.bat", "kit.exe"]
-
- # look in 4 places...
- for dir_name in [appdir, os.path.join(appdir, "kit")]:
- for name in exe_names:
- if os.path.exists(os.path.join(dir_name, name)):
- return os.path.join(dir_name, name)
-
- return None
-
-
-class AnsysToolsOmniverseCoreServerExtension(omni.ext.IExt):
- """
- This class is an Omniverse kit. The kit is capable of creating a
- connection to an Ansys Distributed Scene Graph service and pushing
- the graph into an Omniverse Nucleus.
- """
-
- _service_instance = None
-
- def __init__(self, *args, **kwargs) -> None:
- super().__init__(*args, **kwargs)
- ext_name = __name__.rsplit(".", 1)[0]
- self._logger = logging.getLogger(ext_name)
- self._dsg_uri = self._setting("dsgUrl", "ENSIGHT_GRPC_URI")
- self._omni_uri = self._setting("omniUrl", "ENSIGHT_OMNI_URI")
- if self._omni_uri.startswith("omniverse://"):
- self._omni_uri = "~"
- self._omni_uri = os.path.expanduser(self._omni_uri)
- self._security_token = self._setting("securityCode", "ENSIGHT_SECURITY_TOKEN")
- self._temporal = self._setting("temporal") != "0"
- self._vrmode = self._setting("vrmode") != "0"
- try:
- scale = float(self._setting("timeScale"))
- except ValueError:
- scale = 1.0
- self._time_scale: float = scale
- self._normalize_geometry = self._setting("normalizeGeometry") != "0"
- self._version: str = ""
- self._shutdown: bool = False
- self._server_process = None
- self._status_filename: str = ""
- self._interpreter = self._find_ensight_cpython()
-
- @property
- def version(self) -> str:
- return self._version
-
- @property
- def dsg_uri(self) -> str:
- """The endpoint of a Dynamic Scene Graph service: grpc://{hostname}:{port}"""
- return self._dsg_uri
-
- @dsg_uri.setter
- def dsg_uri(self, uri: str) -> None:
- self._dsg_uri = uri
-
- @property
- def destination(self) -> str:
- """The output USD directory name"""
- return self._omni_uri
-
- @destination.setter
- def destination(self, value: str) -> None:
- self._omni_uri = value
-
- @property
- def security_token(self) -> str:
- """The security token of the DSG service instance."""
- return self._security_token
-
- @security_token.setter
- def security_token(self, value: str) -> None:
- self._security_token = value
-
- @property
- def temporal(self) -> bool:
- """If True, the DSG update should include all timesteps."""
- return self._temporal
-
- @temporal.setter
- def temporal(self, value: bool) -> None:
- self._temporal = bool(value)
-
- @property
- def vrmode(self) -> bool:
- """If True, the DSG update should not include camera transforms."""
- return self._vrmode
-
- @vrmode.setter
- def vrmode(self, value: bool) -> None:
- self._vrmode = bool(value)
-
- @property
- def normalize_geometry(self) -> bool:
- """If True, the DSG geometry should be remapped into normalized space."""
- return self._normalize_geometry
-
- @normalize_geometry.setter
- def normalize_geometry(self, val: bool) -> None:
- self._normalize_geometry = val
-
- @property
- def time_scale(self) -> float:
- """Value to multiply DSG time values by before passing to Omniverse"""
- return self._time_scale
-
- @time_scale.setter
- def time_scale(self, value: float) -> None:
- self._time_scale = value
-
- @property
- def interpreter(self) -> str:
- """Fully qualified path to the python.exe or .bat wrapper in which pyensight is installed."""
- return self._interpreter
-
- @interpreter.setter
- def interpreter(self, value: str) -> None:
- self._interpreter = value
-
- @classmethod
- def get_instance(cls) -> Optional["AnsysToolsOmniverseCoreServerExtension"]:
- return cls._service_instance
-
- @classmethod
- def _setting(cls, name: str, env_varname: str = "") -> str:
- """
- Get a CLI option value. First check if any specified
- environment variable is present and if so, return that value.
- Next, check to see if a command line value is set and return
- that. Finally, fall back to the value (if any) specified in
- the kit toml file.
-
- Parameters
- ----------
- name
- The name of the command line flag to check the value of.
- env_varname
- Optional name of the environment variable to check the value of.
-
- Returns
- -------
- A string or None.
- """
- # any environmental variable trumps them all.
- if env_varname:
- value = os.environ.get(env_varname, None)
- if value:
- return value
- settings = carb.settings.get_settings()
- ext_name = __name__.rsplit(".", 1)[0]
- s = f"/exts/{ext_name}/{name}"
- return settings.get(s)
-
- def info(self, text: str) -> None:
- """
- Send message to the logger at the info level.
-
- Parameters
- ----------
- text
- The message to send.
- """
- self._logger.info(text)
-
- def warning(self, text: str) -> None:
- """
- Send message to the logger at the warning level.
-
- Parameters
- ----------
- text
- The message to send.
- """
- self._logger.warning(text)
-
- def error(self, text: str) -> None:
- """
- Send message to the logger at the error level.
-
- Parameters
- ----------
- text
- The message to send.
- """
- self._logger.error(text)
-
- def _find_ensight_cpython(self) -> Optional[str]:
- """
- Scan the current system, looking for EnSight installations, specifically, cpython.
- Check: PYENSIGHT_ANSYS_INSTALLATION, CEI_HOME, AWP_ROOT* in that order
-
- Returns
- -------
- The first cpython found or None
-
- """
-
- cpython = "cpython"
- if platform.system() == "Windows":
- cpython += ".bat"
-
- dirs_to_check = []
- if "PYENSIGHT_ANSYS_INSTALLATION" in os.environ:
- env_inst = os.environ["PYENSIGHT_ANSYS_INSTALLATION"]
- dirs_to_check.append(env_inst)
- # Note: PYENSIGHT_ANSYS_INSTALLATION is designed for devel builds
- # where there is no CEI directory, but for folks using it in other
- # ways, we'll add that one too, just in case.
- dirs_to_check.append(os.path.join(env_inst, "CEI"))
-
- # Look for most recent Ansys install, 25.1 or later
- awp_roots = []
- for env_name in dict(os.environ).keys():
- if env_name.startswith("AWP_ROOT") and int(env_name[len("AWP_ROOT") :]) >= 251:
- awp_roots.append(env_name)
- awp_roots.sort(reverse=True)
- for env_name in awp_roots:
- dirs_to_check.append(os.path.join(os.environ[env_name], "CEI"))
-
- # check all the collected locations in order
- for install_dir in dirs_to_check:
- launch_file = os.path.join(install_dir, "bin", cpython)
- if os.path.isfile(launch_file):
- if self.validate_interpreter(launch_file):
- return launch_file
- return ""
-
- def on_startup(self, ext_id: str) -> None:
- """
- Called by Omniverse when the kit instance is started.
-
- Parameters
- ----------
- ext_id
- The specific version of the kit.
- """
- self._version = ext_id.split("-")[-1]
- self.info(f"ANSYS tools omniverse core server startup: {self._version}")
- AnsysToolsOmniverseCoreServerExtension._service_instance = self
-
- def on_shutdown(self) -> None:
- """
- Called by Omniverse when the kit instance is shutting down.
- """
- self.info("ANSYS tools omniverse core server shutdown")
- self.shutdown()
- AnsysToolsOmniverseCoreServerExtension._service_instance = None
-
- def validate_interpreter(self, launch_file: str) -> bool:
- if len(launch_file) == 0:
- return False
- has_ov_module = False
- try:
- cmd = [launch_file, "-m", "ansys.pyensight.core.utils.omniverse_cli", "-h"]
- env_vars = os.environ.copy()
- proc = subprocess.Popen(cmd, stdout=subprocess.PIPE, env=env_vars)
- proc.wait(timeout=60)
- has_ov_module = proc.communicate()[0].decode("utf-8").startswith("usage: omniverse_cli")
- except Exception as error:
- self.warning(f"Exception thrown while testing python {str(launch_file)}: {str(error)}")
- has_ov_module = False
- return has_ov_module
-
- def dsg_export(self) -> None:
- """
- Use the oneshot feature of the pyensight omniverse_cli to push the current
- EnSight scene to the supplied directory in USD format.
- """
- if self._interpreter is None:
- self.warning("Unable to determine a kit executable pathname.")
- return
- self.info(f"Using {self._interpreter} to run the server")
- cmd = [self._interpreter]
- cmd.extend(["-m", "ansys.pyensight.core.utils.omniverse_cli"])
- cmd.append(self.destination)
- if self.security_token:
- cmd.extend(["--security_token", self.security_token])
- if self.temporal:
- cmd.extend(["--temporal", "true"])
- if self.vrmode:
- cmd.extend(["--include_camera", "false"])
- if self.normalize_geometry:
- cmd.extend(["--normalize_geometry", "true"])
- if self.time_scale != 1.0:
- cmd.extend(["--time_scale", str(self.time_scale)])
- cmd.extend(["--dsg_uri", self.dsg_uri])
- cmd.extend(["--oneshot", "true"])
- env_vars = os.environ.copy()
- # we are launching the kit from an Omniverse app. In this case, we
- # inform the kit instance of:
- # (1) the name of the "server status" file, if any
- self._new_status_file()
- env_vars["ANSYS_OV_SERVER_STATUS_FILENAME"] = self._status_filename
- try:
- self.info(f"Running {' '.join(cmd)}")
- self._server_process = subprocess.Popen(cmd, close_fds=True, env=env_vars)
- except Exception as error:
- self.warning(f"Error running translator: {error}")
- self._new_status_file(new=False)
-
- def _new_status_file(self, new=True) -> None:
- """
- Remove any existing status file and create a new one if requested.
-
- Parameters
- ----------
- new : bool
- If True, create a new status file.
- """
- if self._status_filename:
- if os.path.exists(self._status_filename):
- try:
- os.remove(self._status_filename)
- except OSError:
- self.warning(f"Unable to delete the status file: {self._status_filename}")
- self._status_filename = ""
- if new:
- self._status_filename = os.path.join(
- tempfile.gettempdir(), str(uuid.uuid1()) + "_gs_status.txt"
- )
-
- def read_status_file(self) -> dict:
- """Read the status file and return its contents as a dictionary.
-
- Note: this can fail if the file is being written to when this call is made, so expect
- failures.
-
- Returns
- -------
- Optional[dict]
- A dictionary with the fields 'status', 'start_time', 'processed_buffers', 'total_buffers' or empty
- """
- if not self._status_filename:
- return {}
- try:
- with open(self._status_filename, "r") as status_file:
- data = json.load(status_file)
- except Exception:
- return {}
- return data
+import json
+import logging
+import os
+import platform
+import subprocess
+import sys
+import tempfile
+from typing import Optional
+import uuid
+
+import carb.settings
+import omni.ext
+import omni.kit.app
+import omni.kit.pipapi
+
+"""
+The current kit leverages an EnSight installation. It can find this via
+environmental variable/directory scanning (looking for an Ansys installation)
+or by looking at CEI_HOME. CEI_HOME is tried first.
+"""
+
+
+def find_kit_filename() -> Optional[str]:
+ """
+ Use a combination of the current omniverse application and the information
+ in the local .nvidia-omniverse/config/omniverse.toml file to come up with
+ the pathname of a kit executable suitable for hosting another copy of the
+ ansys.tools.omniverse.core kit.
+
+ Returns
+ -------
+ The pathname of a kit executable or None
+
+ """
+ # get the current application
+ app = omni.kit.app.get_app()
+ app_name = app.get_app_filename().split(".")[-1]
+ app_version = app.get_app_version().split("-")[0]
+
+ # parse the toml config file for the location of the installed apps
+ try:
+ import tomllib
+ except ModuleNotFoundError:
+ import pip._vendor.tomli as tomllib
+
+ homedir = os.path.expanduser("~")
+ ov_config = os.path.join(homedir, ".nvidia-omniverse", "config", "omniverse.toml")
+ with open(ov_config, "r") as ov_file:
+ ov_data = ov_file.read()
+ config = tomllib.loads(ov_data)
+ appdir = config.get("paths", {}).get("library_root", None)
+ appdir = os.path.join(appdir, f"{app_name}-{app_version}")
+
+ # Windows: 'kit.bat' in '.' or 'kit' followed by 'kit.exe' in '.' or 'kit'
+ # Linux: 'kit.sh' in '.' or 'kit' followed by 'kit' in '.' or 'kit'
+ exe_names = ["kit.sh", "kit"]
+ if sys.platform.startswith("win"):
+ exe_names = ["kit.bat", "kit.exe"]
+
+ # look in 4 places...
+ for dir_name in [appdir, os.path.join(appdir, "kit")]:
+ for name in exe_names:
+ if os.path.exists(os.path.join(dir_name, name)):
+ return os.path.join(dir_name, name)
+
+ return None
+
+
+class AnsysToolsOmniverseCoreServerExtension(omni.ext.IExt):
+ """
+ This class is an Omniverse kit. The kit is capable of creating a
+ connection to an Ansys Distributed Scene Graph service and pushing
+ the graph into an Omniverse Nucleus.
+ """
+
+ _service_instance = None
+
+ def __init__(self, *args, **kwargs) -> None:
+ super().__init__(*args, **kwargs)
+ ext_name = __name__.rsplit(".", 1)[0]
+ self._logger = logging.getLogger(ext_name)
+ self._dsg_uri = self._setting("dsgUrl", "ENSIGHT_GRPC_URI")
+ self._omni_uri = self._setting("omniUrl", "ENSIGHT_OMNI_URI")
+ if self._omni_uri.startswith("omniverse://"):
+ self._omni_uri = "~"
+ self._omni_uri = os.path.expanduser(self._omni_uri)
+ self._security_token = self._setting("securityCode", "ENSIGHT_SECURITY_TOKEN")
+ self._temporal = self._setting("temporal") != "0"
+ self._vrmode = self._setting("vrmode") != "0"
+ try:
+ scale = float(self._setting("timeScale"))
+ except ValueError:
+ scale = 1.0
+ self._time_scale: float = scale
+ self._normalize_geometry = self._setting("normalizeGeometry") != "0"
+ self._version: str = ""
+ self._shutdown: bool = False
+ self._server_process = None
+ self._status_filename: str = ""
+ self._interpreter = self._find_ensight_cpython()
+
+ @property
+ def version(self) -> str:
+ return self._version
+
+ @property
+ def dsg_uri(self) -> str:
+ """The endpoint of a Dynamic Scene Graph service: grpc://{hostname}:{port}"""
+ return self._dsg_uri
+
+ @dsg_uri.setter
+ def dsg_uri(self, uri: str) -> None:
+ self._dsg_uri = uri
+
+ @property
+ def destination(self) -> str:
+ """The output USD directory name"""
+ return self._omni_uri
+
+ @destination.setter
+ def destination(self, value: str) -> None:
+ self._omni_uri = value
+
+ @property
+ def security_token(self) -> str:
+ """The security token of the DSG service instance."""
+ return self._security_token
+
+ @security_token.setter
+ def security_token(self, value: str) -> None:
+ self._security_token = value
+
+ @property
+ def temporal(self) -> bool:
+ """If True, the DSG update should include all timesteps."""
+ return self._temporal
+
+ @temporal.setter
+ def temporal(self, value: bool) -> None:
+ self._temporal = bool(value)
+
+ @property
+ def vrmode(self) -> bool:
+ """If True, the DSG update should not include camera transforms."""
+ return self._vrmode
+
+ @vrmode.setter
+ def vrmode(self, value: bool) -> None:
+ self._vrmode = bool(value)
+
+ @property
+ def normalize_geometry(self) -> bool:
+ """If True, the DSG geometry should be remapped into normalized space."""
+ return self._normalize_geometry
+
+ @normalize_geometry.setter
+ def normalize_geometry(self, val: bool) -> None:
+ self._normalize_geometry = val
+
+ @property
+ def time_scale(self) -> float:
+ """Value to multiply DSG time values by before passing to Omniverse"""
+ return self._time_scale
+
+ @time_scale.setter
+ def time_scale(self, value: float) -> None:
+ self._time_scale = value
+
+ @property
+ def interpreter(self) -> str:
+ """Fully qualified path to the python.exe or .bat wrapper in which pyensight is installed."""
+ return self._interpreter
+
+ @interpreter.setter
+ def interpreter(self, value: str) -> None:
+ self._interpreter = value
+
+ @classmethod
+ def get_instance(cls) -> Optional["AnsysToolsOmniverseCoreServerExtension"]:
+ return cls._service_instance
+
+ @classmethod
+ def _setting(cls, name: str, env_varname: str = "") -> str:
+ """
+ Get a CLI option value. First check if any specified
+ environment variable is present and if so, return that value.
+ Next, check to see if a command line value is set and return
+ that. Finally, fall back to the value (if any) specified in
+ the kit toml file.
+
+ Parameters
+ ----------
+ name
+ The name of the command line flag to check the value of.
+ env_varname
+ Optional name of the environment variable to check the value of.
+
+ Returns
+ -------
+ A string or None.
+ """
+ # any environmental variable trumps them all.
+ if env_varname:
+ value = os.environ.get(env_varname, None)
+ if value:
+ return value
+ settings = carb.settings.get_settings()
+ ext_name = __name__.rsplit(".", 1)[0]
+ s = f"/exts/{ext_name}/{name}"
+ return settings.get(s)
+
+ def info(self, text: str) -> None:
+ """
+ Send message to the logger at the info level.
+
+ Parameters
+ ----------
+ text
+ The message to send.
+ """
+ self._logger.info(text)
+
+ def warning(self, text: str) -> None:
+ """
+ Send message to the logger at the warning level.
+
+ Parameters
+ ----------
+ text
+ The message to send.
+ """
+ self._logger.warning(text)
+
+ def error(self, text: str) -> None:
+ """
+ Send message to the logger at the error level.
+
+ Parameters
+ ----------
+ text
+ The message to send.
+ """
+ self._logger.error(text)
+
+ def _find_ensight_cpython(self) -> Optional[str]:
+ """
+ Scan the current system, looking for EnSight installations, specifically, cpython.
+ Check: PYENSIGHT_ANSYS_INSTALLATION, CEI_HOME, AWP_ROOT* in that order
+
+ Returns
+ -------
+ The first cpython found or None
+
+ """
+
+ cpython = "cpython"
+ if platform.system() == "Windows":
+ cpython += ".bat"
+
+ dirs_to_check = []
+ if "PYENSIGHT_ANSYS_INSTALLATION" in os.environ:
+ env_inst = os.environ["PYENSIGHT_ANSYS_INSTALLATION"]
+ dirs_to_check.append(env_inst)
+ # Note: PYENSIGHT_ANSYS_INSTALLATION is designed for devel builds
+ # where there is no CEI directory, but for folks using it in other
+ # ways, we'll add that one too, just in case.
+ dirs_to_check.append(os.path.join(env_inst, "CEI"))
+
+ # Look for most recent Ansys install, 25.1 or later
+ awp_roots = []
+ for env_name in dict(os.environ).keys():
+ if env_name.startswith("AWP_ROOT") and int(env_name[len("AWP_ROOT") :]) >= 251:
+ awp_roots.append(env_name)
+ awp_roots.sort(reverse=True)
+ for env_name in awp_roots:
+ dirs_to_check.append(os.path.join(os.environ[env_name], "CEI"))
+
+ # check all the collected locations in order
+ for install_dir in dirs_to_check:
+ launch_file = os.path.join(install_dir, "bin", cpython)
+ if os.path.isfile(launch_file):
+ if self.validate_interpreter(launch_file):
+ return launch_file
+ return ""
+
+ def on_startup(self, ext_id: str) -> None:
+ """
+ Called by Omniverse when the kit instance is started.
+
+ Parameters
+ ----------
+ ext_id
+ The specific version of the kit.
+ """
+ self._version = ext_id.split("-")[-1]
+ self.info(f"ANSYS tools omniverse core server startup: {self._version}")
+ AnsysToolsOmniverseCoreServerExtension._service_instance = self
+
+ def on_shutdown(self) -> None:
+ """
+ Called by Omniverse when the kit instance is shutting down.
+ """
+ self.info("ANSYS tools omniverse core server shutdown")
+ self.shutdown()
+ AnsysToolsOmniverseCoreServerExtension._service_instance = None
+
+ def validate_interpreter(self, launch_file: str) -> bool:
+ if len(launch_file) == 0:
+ return False
+ has_ov_module = False
+ try:
+ cmd = [launch_file, "-m", "ansys.pyensight.core.utils.omniverse_cli", "-h"]
+ env_vars = os.environ.copy()
+ proc = subprocess.Popen(cmd, stdout=subprocess.PIPE, env=env_vars)
+ proc.wait(timeout=60)
+ has_ov_module = proc.communicate()[0].decode("utf-8").startswith("usage: omniverse_cli")
+ except Exception as error:
+ self.warning(f"Exception thrown while testing python {str(launch_file)}: {str(error)}")
+ has_ov_module = False
+ return has_ov_module
+
+ def dsg_export(self) -> None:
+ """
+ Use the oneshot feature of the pyensight omniverse_cli to push the current
+ EnSight scene to the supplied directory in USD format.
+ """
+ if self._interpreter is None:
+ self.warning("Unable to determine a kit executable pathname.")
+ return
+ self.info(f"Using {self._interpreter} to run the server")
+ cmd = [self._interpreter]
+ cmd.extend(["-m", "ansys.pyensight.core.utils.omniverse_cli"])
+ cmd.append(self.destination)
+ if self.security_token:
+ cmd.extend(["--security_token", self.security_token])
+ if self.temporal:
+ cmd.extend(["--temporal", "true"])
+ if self.vrmode:
+ cmd.extend(["--include_camera", "false"])
+ if self.normalize_geometry:
+ cmd.extend(["--normalize_geometry", "true"])
+ if self.time_scale != 1.0:
+ cmd.extend(["--time_scale", str(self.time_scale)])
+ cmd.extend(["--dsg_uri", self.dsg_uri])
+ cmd.extend(["--oneshot", "true"])
+ env_vars = os.environ.copy()
+ # we are launching the kit from an Omniverse app. In this case, we
+ # inform the kit instance of:
+ # (1) the name of the "server status" file, if any
+ self._new_status_file()
+ env_vars["ANSYS_OV_SERVER_STATUS_FILENAME"] = self._status_filename
+ try:
+ self.info(f"Running {' '.join(cmd)}")
+ self._server_process = subprocess.Popen(cmd, close_fds=True, env=env_vars)
+ except Exception as error:
+ self.warning(f"Error running translator: {error}")
+ self._new_status_file(new=False)
+
+ def _new_status_file(self, new=True) -> None:
+ """
+ Remove any existing status file and create a new one if requested.
+
+ Parameters
+ ----------
+ new : bool
+ If True, create a new status file.
+ """
+ if self._status_filename:
+ if os.path.exists(self._status_filename):
+ try:
+ os.remove(self._status_filename)
+ except OSError:
+ self.warning(f"Unable to delete the status file: {self._status_filename}")
+ self._status_filename = ""
+ if new:
+ self._status_filename = os.path.join(
+ tempfile.gettempdir(), str(uuid.uuid1()) + "_gs_status.txt"
+ )
+
+ def read_status_file(self) -> dict:
+ """Read the status file and return its contents as a dictionary.
+
+ Note: this can fail if the file is being written to when this call is made, so expect
+ failures.
+
+ Returns
+ -------
+ Optional[dict]
+ A dictionary with the fields 'status', 'start_time', 'processed_buffers', 'total_buffers' or empty
+ """
+ if not self._status_filename:
+ return {}
+ try:
+ with open(self._status_filename, "r") as status_file:
+ data = json.load(status_file)
+ except Exception:
+ return {}
+ return data
diff --git a/exts/ansys.tools.omniverse.core/config/extension.toml b/exts/ansys.tools.omniverse.core/config/extension.toml
index b74d43a4c91..caf8493f6fa 100644
--- a/exts/ansys.tools.omniverse.core/config/extension.toml
+++ b/exts/ansys.tools.omniverse.core/config/extension.toml
@@ -1,61 +1,61 @@
-[package]
-# Semantic Versioning is used: https://semver.org/
-version = "0.8.12"
-
-# Lists people or organizations that are considered the "authors" of the package.
-authors = ["ANSYS"]
-
-# The title and description fields are primarily for displaying extension info in UI
-title = "ANSYS Tools Omniverse Core Server"
-description = "A geometry synchronization service that enables export of geometry scenes from ANSYS products to Omniverse."
-
-# Path (relative to the root) or content of readme markdown file for UI.
-readme = "docs/README.md"
-
-# URL of the extension source repository.
-repository = "https://github.com/ansys/pyensight"
-
-# One of categories for UI.
-category = "simulation"
-
-# Keywords for the extension
-keywords = ["ANSYS", "EnSight", "PyEnSight", "Fluent", "kit"]
-
-# Location of change log file in target (final) folder of extension, relative to the root.
-# More info on writing changelog: https://keepachangelog.com/en/1.0.0/
-changelog = "docs/CHANGELOG.md"
-
-# Preview image and icon. Folder named "data" automatically goes in git lfs (see .gitattributes file).
-# Preview image is shown in "Overview" of Extensions window. Screenshot of an extension might be a good preview image.
-preview_image = "data/preview.png"
-
-# Icon is shown in Extensions window, it is recommended to be square, of size 256x256.
-icon = "data/icon.png"
-
-# Use omni.ui to build simple UI
-[dependencies]
-# "omni.kit.pipapi" = {}
-"omni.client" = {}
-"omni.usd" = {}
-
-# Main python module this extension provides, it will be publicly available as "import ansys.tools.omniverse.core".
-[[python.module]]
-name = "ansys.tools.omniverse.core"
-
-[[test]]
-# Extra dependencies only to be used during test run
-dependencies = [
- "omni.kit.ui_test" # UI testing extension
-]
-
-[settings]
-# CLI setting defaults (note: "help" and "run" are also supported)
-exts."ansys.tools.omniverse.core".dsgUrl = "grpc://127.0.0.1:5234"
-exts."ansys.tools.omniverse.core".omniUrl = "~"
-exts."ansys.tools.omniverse.core".securityCode = ""
-exts."ansys.tools.omniverse.core".temporal = "0"
-exts."ansys.tools.omniverse.core".vrmode = "0"
-exts."ansys.tools.omniverse.core".normalizeGeometry = "0"
-exts."ansys.tools.omniverse.core".timeScale = "1.0"
-exts."ansys.tools.omniverse.core".interpreter = ""
-
+[package]
+# Semantic Versioning is used: https://semver.org/
+version = "0.8.12"
+
+# Lists people or organizations that are considered the "authors" of the package.
+authors = ["ANSYS"]
+
+# The title and description fields are primarily for displaying extension info in UI
+title = "ANSYS Tools Omniverse Core Server"
+description = "A geometry synchronization service that enables export of geometry scenes from ANSYS products to Omniverse."
+
+# Path (relative to the root) or content of readme markdown file for UI.
+readme = "docs/README.md"
+
+# URL of the extension source repository.
+repository = "https://github.com/ansys/pyensight"
+
+# One of categories for UI.
+category = "simulation"
+
+# Keywords for the extension
+keywords = ["ANSYS", "EnSight", "PyEnSight", "Fluent", "kit"]
+
+# Location of change log file in target (final) folder of extension, relative to the root.
+# More info on writing changelog: https://keepachangelog.com/en/1.0.0/
+changelog = "docs/CHANGELOG.md"
+
+# Preview image and icon. Folder named "data" automatically goes in git lfs (see .gitattributes file).
+# Preview image is shown in "Overview" of Extensions window. Screenshot of an extension might be a good preview image.
+preview_image = "data/preview.png"
+
+# Icon is shown in Extensions window, it is recommended to be square, of size 256x256.
+icon = "data/icon.png"
+
+# Use omni.ui to build simple UI
+[dependencies]
+# "omni.kit.pipapi" = {}
+"omni.client" = {}
+"omni.usd" = {}
+
+# Main python module this extension provides, it will be publicly available as "import ansys.tools.omniverse.core".
+[[python.module]]
+name = "ansys.tools.omniverse.core"
+
+[[test]]
+# Extra dependencies only to be used during test run
+dependencies = [
+ "omni.kit.ui_test" # UI testing extension
+]
+
+[settings]
+# CLI setting defaults (note: "help" and "run" are also supported)
+exts."ansys.tools.omniverse.core".dsgUrl = "grpc://127.0.0.1:5234"
+exts."ansys.tools.omniverse.core".omniUrl = "~"
+exts."ansys.tools.omniverse.core".securityCode = ""
+exts."ansys.tools.omniverse.core".temporal = "0"
+exts."ansys.tools.omniverse.core".vrmode = "0"
+exts."ansys.tools.omniverse.core".normalizeGeometry = "0"
+exts."ansys.tools.omniverse.core".timeScale = "1.0"
+exts."ansys.tools.omniverse.core".interpreter = ""
+
diff --git a/exts/ansys.tools.omniverse.core/docs/CHANGELOG.md b/exts/ansys.tools.omniverse.core/docs/CHANGELOG.md
index 58b67690d12..1dde8e84d48 100644
--- a/exts/ansys.tools.omniverse.core/docs/CHANGELOG.md
+++ b/exts/ansys.tools.omniverse.core/docs/CHANGELOG.md
@@ -1,13 +1,13 @@
-# Changelog
-
-The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/).
-
-## [0.8.12] - 2024-10-17
-- Update to let the path to the python installation be validated and edited
-
-## [0.8.11] - 2024-09-17
-- Update for new pyensight Omniverse interface
-
-## [0.1.0] - 2024-07-30
-- First version
-
+# Changelog
+
+The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/).
+
+## [0.8.12] - 2024-10-17
+- Update to let the path to the python installation be validated and edited
+
+## [0.8.11] - 2024-09-17
+- Update for new pyensight Omniverse interface
+
+## [0.1.0] - 2024-07-30
+- First version
+
diff --git a/exts/ansys.tools.omniverse.core/docs/README.md b/exts/ansys.tools.omniverse.core/docs/README.md
index ff645fa984a..27c6b2580aa 100644
--- a/exts/ansys.tools.omniverse.core/docs/README.md
+++ b/exts/ansys.tools.omniverse.core/docs/README.md
@@ -1,11 +1,11 @@
-# ANSYS Tools Omniverse Core [ansys.tools.omniverse.core]
-
-The Omniverse extension provides a dynamic connection between an ANSYS
-geometry source (e.g. ANSYS EnSight) and an Omniverse instance. It can
-also be launched via pyensight and via a simple GUI provided by
-the [ansys.tools.omniverse.dsgui] kit extension.
-
-For more details on this extension see:
-https://ensight.docs.pyansys.com/version/dev/user_guide/omniverse_info.html
-
-
+# ANSYS Tools Omniverse Core [ansys.tools.omniverse.core]
+
+The Omniverse extension provides a dynamic connection between an ANSYS
+geometry source (e.g. ANSYS EnSight) and an Omniverse instance. It can
+also be launched via pyensight and via a simple GUI provided by
+the [ansys.tools.omniverse.dsgui] kit extension.
+
+For more details on this extension see:
+https://ensight.docs.pyansys.com/version/dev/user_guide/omniverse_info.html
+
+
diff --git a/exts/ansys.tools.omniverse.core/docs/index.rst b/exts/ansys.tools.omniverse.core/docs/index.rst
index e98621c056a..f2acbbaf653 100644
--- a/exts/ansys.tools.omniverse.core/docs/index.rst
+++ b/exts/ansys.tools.omniverse.core/docs/index.rst
@@ -1,18 +1,18 @@
-ansys.tools.omniverse.core
-######################
-
-
-.. toctree::
- :maxdepth: 1
-
- README
- CHANGELOG
-
-
-.. automodule::"ansys.tools.omniverse.core"
- :platform: Windows-x86_64, Linux-x86_64
- :members:
- :undoc-members:
- :show-inheritance:
- :imported-members:
- :exclude-members: contextmanager
+ansys.tools.omniverse.core
+######################
+
+
+.. toctree::
+ :maxdepth: 1
+
+ README
+ CHANGELOG
+
+
+.. automodule::"ansys.tools.omniverse.core"
+ :platform: Windows-x86_64, Linux-x86_64
+ :members:
+ :undoc-members:
+ :show-inheritance:
+ :imported-members:
+ :exclude-members: contextmanager
diff --git a/exts/ansys.tools.omniverse.dsgui/ansys/tools/omniverse/dsgui/__init__.py b/exts/ansys.tools.omniverse.dsgui/ansys/tools/omniverse/dsgui/__init__.py
index 5d375081f69..53b61d60bec 100644
--- a/exts/ansys.tools.omniverse.dsgui/ansys/tools/omniverse/dsgui/__init__.py
+++ b/exts/ansys.tools.omniverse.dsgui/ansys/tools/omniverse/dsgui/__init__.py
@@ -1 +1 @@
-from .extension import *
+from .extension import *
diff --git a/exts/ansys.tools.omniverse.dsgui/ansys/tools/omniverse/dsgui/extension.py b/exts/ansys.tools.omniverse.dsgui/ansys/tools/omniverse/dsgui/extension.py
index 6eb46dfc1fb..8e1d0772c76 100644
--- a/exts/ansys.tools.omniverse.dsgui/ansys/tools/omniverse/dsgui/extension.py
+++ b/exts/ansys.tools.omniverse.dsgui/ansys/tools/omniverse/dsgui/extension.py
@@ -1,212 +1,212 @@
-import logging
-import threading
-import time
-from typing import Any, Optional
-
-import ansys.tools.omniverse.core
-import omni.ext
-import omni.ui as ui
-
-
-class AnsysToolsOmniverseDSGUIExtension(omni.ext.IExt):
- def __init__(self, *args, **kwargs) -> None:
- super().__init__(*args, **kwargs)
- self._window: Any = None
- self._label_w: Any = None
- self._logger = logging.getLogger(__name__.rsplit(".", 1)[0])
- self._grpc = None
- self._dsg_uri_w = None
- self._dsg_token_w = None
- self._interpreter_w = None
- self._destination_w = None
- self._temporal_w = None
- self._vrmode_w = None
- self._normalize_w = None
- self._time_scale_w = None
- self._connect_w = None
- self._update_w = None
- self._connected = False
- self._error_msg = ""
-
- @property
- def service(self) -> Optional["AnsysToolsOmniverseDSGUIExtension"]:
- return ansys.tools.omniverse.core.AnsysToolsOmniverseCoreServerExtension.get_instance()
-
- def info(self, text: str) -> None:
- self._logger.info(text)
-
- def warning(self, text: str) -> None:
- self._logger.warning(text)
-
- def error(self, text: str) -> None:
- self._logger.error(text)
-
- def start_server(self) -> None:
- if self._connected:
- return
- self.service.dsg_uri = self._dsg_uri_w.model.as_string
- self.service.security_token = self._dsg_token_w.model.as_string
- self.service.interpreter = self._interpreter_w.model.as_string
- self.service.destination = self._destination_w.model.as_string
- self.service.temporal = self._temporal_w.model.as_bool
- self.service.vrmode = self._vrmode_w.model.as_bool
- self.service.normalize_geometry = self._normalize_w.model.as_bool
- scale = self._time_scale_w.model.as_float
- if scale <= 0.0:
- scale = 1.0
- self.service.time_scale = scale
- self.info("Connected to DSG service")
- self._connected = True
-
- def stop_server(self) -> None:
- if not self._connected:
- return
- self.info("Disconnect from DSG service")
- self._connected = False
-
- def connect_cb(self) -> None:
- if self.service is None:
- self.error("Unable to find ansys.tools.omniverse.core instance")
- return
- if self._connected:
- self.stop_server()
- else:
- pypath = self._interpreter_w.model.as_string
- if not self.service.validate_interpreter(pypath):
- self._error_msg = ". Invalid Python path."
- else:
- self._error_msg = ""
- self.start_server()
- self.update_ui()
-
- def update_cb(self) -> None:
- if not self._connected:
- self.error("No DSG service connected")
- return
- self.service.dsg_export()
-
- def on_startup(self, ext_id: str) -> None:
- self.info(f"ANSYS tools omniverse DSG GUI startup: {ext_id}")
- if self.service is None:
- self.error("Unable to find ansys.tools.omniverse.core instance")
- self.build_ui()
- self._update_callback()
-
- def _update_callback(self) -> None:
- self.update_ui()
- threading.Timer(0.5, self._update_callback).start()
-
- def update_ui(self) -> None:
- status = self.service.read_status_file()
- if self._connected:
- self._connect_w.text = "Disconnect from DSG Server"
- tmp = f"Connected to: {self.service.dsg_uri}"
- if status.get("status", "idle") == "working":
- count = status.get("processed_buffers", 0)
- total = status.get("total_buffers", 0)
- dt = time.time() - status.get("start_time", 0.0)
- percent = 0
- if total > 0:
- percent = int((count / total) * 100)
- tmp = f"Transfer: {percent}% : {dt:.2f}s"
- self._label_w.text = tmp
- else:
- self._connect_w.text = "Connect to DSG Server"
- self._label_w.text = "No connected DSG server" + self._error_msg
- self._update_w.enabled = self._connected and (status.get("status", "idle") == "idle")
- self._connect_w.enabled = status.get("status", "idle") == "idle"
- self._temporal_w.enabled = True
- self._vrmode_w.enabled = not self._connected
- self._normalize_w.enabled = not self._connected
- self._time_scale_w.enabled = not self._connected
- self._dsg_uri_w.enabled = not self._connected
- self._dsg_token_w.enabled = not self._connected
- self._interpreter_w.enabled = not self._connected
- self._destination_w.enabled = not self._connected
-
- def build_ui(self) -> None:
- self._window = ui.Window(f"ANSYS Tools Omniverse DSG ({self.service.version})")
- with self._window.frame:
- with ui.VStack(height=0, spacing=5):
- self._label_w = ui.Label("No connected DSG server" + self._error_msg)
-
- with ui.HStack(spacing=5):
- ui.Label(
- "DSG Service URI:",
- alignment=ui.Alignment.RIGHT_CENTER,
- width=0,
- )
- self._dsg_uri_w = ui.StringField()
- self._dsg_uri_w.model.as_string = self.service.dsg_uri
-
- with ui.HStack(spacing=5):
- ui.Label(
- "DSG security code:",
- alignment=ui.Alignment.RIGHT_CENTER,
- width=0,
- )
- self._dsg_token_w = ui.StringField(password_mode=True)
- self._dsg_token_w.model.as_string = self.service.security_token
-
- with ui.HStack(spacing=5):
- ui.Label(
- "Python path:",
- alignment=ui.Alignment.RIGHT_CENTER,
- width=0,
- )
- self._interpreter_w = ui.StringField()
- self._interpreter_w.model.as_string = str(self.service.interpreter)
-
- with ui.HStack(spacing=5):
- ui.Label(
- "Export directory:",
- alignment=ui.Alignment.RIGHT_CENTER,
- width=0,
- )
- self._destination_w = ui.StringField()
- self._destination_w.model.as_string = self.service.destination
-
- with ui.HStack(spacing=5):
- with ui.HStack(spacing=5):
- self._temporal_w = ui.CheckBox(width=0)
- self._temporal_w.model.set_value(self.service.temporal)
- ui.Label("Temporal", alignment=ui.Alignment.LEFT_CENTER)
-
- with ui.HStack(spacing=5):
- self._vrmode_w = ui.CheckBox(width=0)
- self._vrmode_w.model.set_value(self.service.vrmode)
- ui.Label("VR Mode", alignment=ui.Alignment.LEFT_CENTER)
-
- with ui.HStack(spacing=5):
- self._normalize_w = ui.CheckBox(width=0)
- self._normalize_w.model.set_value(self.service.normalize_geometry)
- ui.Label("Normalize", alignment=ui.Alignment.LEFT_CENTER)
-
- with ui.HStack(spacing=5):
- ui.Label(
- "Temporal scaling factor:",
- alignment=ui.Alignment.RIGHT_CENTER,
- width=0,
- )
- self._time_scale_w = ui.FloatField()
- self._time_scale_w.model.as_float = self.service.time_scale
-
- with ui.HStack():
- self._connect_w = ui.Button("Connect to DSG Server", clicked_fn=self.connect_cb)
- self._update_w = ui.Button("Request Update", clicked_fn=self.update_cb)
-
- def on_shutdown(self) -> None:
- self.info("ANSYS Tools Omniverse DSG shutdown")
- self.stop_server()
- self._window = None
- self._label_w = None
- self._dsg_uri_w = None
- self._dsg_token_w = None
- self._interpreter_w = None
- self._destination_w = None
- self._temporal_w = None
- self._vrmode_w = None
- self._normalize_w = None
- self._time_scale_w = None
- self._connect_w = None
- self._update_w = None
+import logging
+import threading
+import time
+from typing import Any, Optional
+
+import ansys.tools.omniverse.core
+import omni.ext
+import omni.ui as ui
+
+
+class AnsysToolsOmniverseDSGUIExtension(omni.ext.IExt):
+ def __init__(self, *args, **kwargs) -> None:
+ super().__init__(*args, **kwargs)
+ self._window: Any = None
+ self._label_w: Any = None
+ self._logger = logging.getLogger(__name__.rsplit(".", 1)[0])
+ self._grpc = None
+ self._dsg_uri_w = None
+ self._dsg_token_w = None
+ self._interpreter_w = None
+ self._destination_w = None
+ self._temporal_w = None
+ self._vrmode_w = None
+ self._normalize_w = None
+ self._time_scale_w = None
+ self._connect_w = None
+ self._update_w = None
+ self._connected = False
+ self._error_msg = ""
+
+ @property
+ def service(self) -> Optional["AnsysToolsOmniverseDSGUIExtension"]:
+ return ansys.tools.omniverse.core.AnsysToolsOmniverseCoreServerExtension.get_instance()
+
+ def info(self, text: str) -> None:
+ self._logger.info(text)
+
+ def warning(self, text: str) -> None:
+ self._logger.warning(text)
+
+ def error(self, text: str) -> None:
+ self._logger.error(text)
+
+ def start_server(self) -> None:
+ if self._connected:
+ return
+ self.service.dsg_uri = self._dsg_uri_w.model.as_string
+ self.service.security_token = self._dsg_token_w.model.as_string
+ self.service.interpreter = self._interpreter_w.model.as_string
+ self.service.destination = self._destination_w.model.as_string
+ self.service.temporal = self._temporal_w.model.as_bool
+ self.service.vrmode = self._vrmode_w.model.as_bool
+ self.service.normalize_geometry = self._normalize_w.model.as_bool
+ scale = self._time_scale_w.model.as_float
+ if scale <= 0.0:
+ scale = 1.0
+ self.service.time_scale = scale
+ self.info("Connected to DSG service")
+ self._connected = True
+
+ def stop_server(self) -> None:
+ if not self._connected:
+ return
+ self.info("Disconnect from DSG service")
+ self._connected = False
+
+ def connect_cb(self) -> None:
+ if self.service is None:
+ self.error("Unable to find ansys.tools.omniverse.core instance")
+ return
+ if self._connected:
+ self.stop_server()
+ else:
+ pypath = self._interpreter_w.model.as_string
+ if not self.service.validate_interpreter(pypath):
+ self._error_msg = ". Invalid Python path."
+ else:
+ self._error_msg = ""
+ self.start_server()
+ self.update_ui()
+
+ def update_cb(self) -> None:
+ if not self._connected:
+ self.error("No DSG service connected")
+ return
+ self.service.dsg_export()
+
+ def on_startup(self, ext_id: str) -> None:
+ self.info(f"ANSYS tools omniverse DSG GUI startup: {ext_id}")
+ if self.service is None:
+ self.error("Unable to find ansys.tools.omniverse.core instance")
+ self.build_ui()
+ self._update_callback()
+
+ def _update_callback(self) -> None:
+ self.update_ui()
+ threading.Timer(0.5, self._update_callback).start()
+
+ def update_ui(self) -> None:
+ status = self.service.read_status_file()
+ if self._connected:
+ self._connect_w.text = "Disconnect from DSG Server"
+ tmp = f"Connected to: {self.service.dsg_uri}"
+ if status.get("status", "idle") == "working":
+ count = status.get("processed_buffers", 0)
+ total = status.get("total_buffers", 0)
+ dt = time.time() - status.get("start_time", 0.0)
+ percent = 0
+ if total > 0:
+ percent = int((count / total) * 100)
+ tmp = f"Transfer: {percent}% : {dt:.2f}s"
+ self._label_w.text = tmp
+ else:
+ self._connect_w.text = "Connect to DSG Server"
+ self._label_w.text = "No connected DSG server" + self._error_msg
+ self._update_w.enabled = self._connected and (status.get("status", "idle") == "idle")
+ self._connect_w.enabled = status.get("status", "idle") == "idle"
+ self._temporal_w.enabled = True
+ self._vrmode_w.enabled = not self._connected
+ self._normalize_w.enabled = not self._connected
+ self._time_scale_w.enabled = not self._connected
+ self._dsg_uri_w.enabled = not self._connected
+ self._dsg_token_w.enabled = not self._connected
+ self._interpreter_w.enabled = not self._connected
+ self._destination_w.enabled = not self._connected
+
+ def build_ui(self) -> None:
+ self._window = ui.Window(f"ANSYS Tools Omniverse DSG ({self.service.version})")
+ with self._window.frame:
+ with ui.VStack(height=0, spacing=5):
+ self._label_w = ui.Label("No connected DSG server" + self._error_msg)
+
+ with ui.HStack(spacing=5):
+ ui.Label(
+ "DSG Service URI:",
+ alignment=ui.Alignment.RIGHT_CENTER,
+ width=0,
+ )
+ self._dsg_uri_w = ui.StringField()
+ self._dsg_uri_w.model.as_string = self.service.dsg_uri
+
+ with ui.HStack(spacing=5):
+ ui.Label(
+ "DSG security code:",
+ alignment=ui.Alignment.RIGHT_CENTER,
+ width=0,
+ )
+ self._dsg_token_w = ui.StringField(password_mode=True)
+ self._dsg_token_w.model.as_string = self.service.security_token
+
+ with ui.HStack(spacing=5):
+ ui.Label(
+ "Python path:",
+ alignment=ui.Alignment.RIGHT_CENTER,
+ width=0,
+ )
+ self._interpreter_w = ui.StringField()
+ self._interpreter_w.model.as_string = str(self.service.interpreter)
+
+ with ui.HStack(spacing=5):
+ ui.Label(
+ "Export directory:",
+ alignment=ui.Alignment.RIGHT_CENTER,
+ width=0,
+ )
+ self._destination_w = ui.StringField()
+ self._destination_w.model.as_string = self.service.destination
+
+ with ui.HStack(spacing=5):
+ with ui.HStack(spacing=5):
+ self._temporal_w = ui.CheckBox(width=0)
+ self._temporal_w.model.set_value(self.service.temporal)
+ ui.Label("Temporal", alignment=ui.Alignment.LEFT_CENTER)
+
+ with ui.HStack(spacing=5):
+ self._vrmode_w = ui.CheckBox(width=0)
+ self._vrmode_w.model.set_value(self.service.vrmode)
+ ui.Label("VR Mode", alignment=ui.Alignment.LEFT_CENTER)
+
+ with ui.HStack(spacing=5):
+ self._normalize_w = ui.CheckBox(width=0)
+ self._normalize_w.model.set_value(self.service.normalize_geometry)
+ ui.Label("Normalize", alignment=ui.Alignment.LEFT_CENTER)
+
+ with ui.HStack(spacing=5):
+ ui.Label(
+ "Temporal scaling factor:",
+ alignment=ui.Alignment.RIGHT_CENTER,
+ width=0,
+ )
+ self._time_scale_w = ui.FloatField()
+ self._time_scale_w.model.as_float = self.service.time_scale
+
+ with ui.HStack():
+ self._connect_w = ui.Button("Connect to DSG Server", clicked_fn=self.connect_cb)
+ self._update_w = ui.Button("Request Update", clicked_fn=self.update_cb)
+
+ def on_shutdown(self) -> None:
+ self.info("ANSYS Tools Omniverse DSG shutdown")
+ self.stop_server()
+ self._window = None
+ self._label_w = None
+ self._dsg_uri_w = None
+ self._dsg_token_w = None
+ self._interpreter_w = None
+ self._destination_w = None
+ self._temporal_w = None
+ self._vrmode_w = None
+ self._normalize_w = None
+ self._time_scale_w = None
+ self._connect_w = None
+ self._update_w = None
diff --git a/exts/ansys.tools.omniverse.dsgui/config/extension.toml b/exts/ansys.tools.omniverse.dsgui/config/extension.toml
index f337498522b..d0a74d3d900 100644
--- a/exts/ansys.tools.omniverse.dsgui/config/extension.toml
+++ b/exts/ansys.tools.omniverse.dsgui/config/extension.toml
@@ -1,49 +1,49 @@
-[package]
-# Semantic Versioning is used: https://semver.org/
-version = "0.8.12"
-
-# Lists people or organizations that are considered the "authors" of the package.
-authors = ["ANSYS"]
-
-# The title and description fields are primarily for displaying extension info in UI
-title = "ANSYS Tools Omniverse DSG GUI"
-description = "A geometry synchronization service that enables export of geometry scenes from ANSYS products to Omniverse."
-
-# Path (relative to the root) or content of readme markdown file for UI.
-readme = "docs/README.md"
-
-# URL of the extension source repository.
-repository = "https://github.com/ansys/pyensight"
-
-# One of categories for UI.
-category = "simulation"
-
-# Keywords for the extension
-keywords = ["ANSYS", "EnSight", "PyEnSight", "Fluent", "kit"]
-
-# Location of change log file in target (final) folder of extension, relative to the root.
-# More info on writing changelog: https://keepachangelog.com/en/1.0.0/
-changelog = "docs/CHANGELOG.md"
-
-# Preview image and icon. Folder named "data" automatically goes in git lfs (see .gitattributes file).
-# Preview image is shown in "Overview" of Extensions window. Screenshot of an extension might be a good preview image.
-preview_image = "data/preview.png"
-
-# Icon is shown in Extensions window, it is recommended to be square, of size 256x256.
-icon = "data/icon.png"
-
-# Use omni.ui to build simple UI
-[dependencies]
-"omni.kit.uiapp" = {}
-"ansys.tools.omniverse.core" = {}
-
-# Main python module this extension provides, it will be publicly available as "import ansys.tools.omniverse.dsgui".
-[[python.module]]
-name = "ansys.tools.omniverse.dsgui"
-
-[[test]]
-# Extra dependencies only to be used during test run
-dependencies = [
- "omni.kit.ui_test" # UI testing extension
-]
-
+[package]
+# Semantic Versioning is used: https://semver.org/
+version = "0.8.12"
+
+# Lists people or organizations that are considered the "authors" of the package.
+authors = ["ANSYS"]
+
+# The title and description fields are primarily for displaying extension info in UI
+title = "ANSYS Tools Omniverse DSG GUI"
+description = "A geometry synchronization service that enables export of geometry scenes from ANSYS products to Omniverse."
+
+# Path (relative to the root) or content of readme markdown file for UI.
+readme = "docs/README.md"
+
+# URL of the extension source repository.
+repository = "https://github.com/ansys/pyensight"
+
+# One of categories for UI.
+category = "simulation"
+
+# Keywords for the extension
+keywords = ["ANSYS", "EnSight", "PyEnSight", "Fluent", "kit"]
+
+# Location of change log file in target (final) folder of extension, relative to the root.
+# More info on writing changelog: https://keepachangelog.com/en/1.0.0/
+changelog = "docs/CHANGELOG.md"
+
+# Preview image and icon. Folder named "data" automatically goes in git lfs (see .gitattributes file).
+# Preview image is shown in "Overview" of Extensions window. Screenshot of an extension might be a good preview image.
+preview_image = "data/preview.png"
+
+# Icon is shown in Extensions window, it is recommended to be square, of size 256x256.
+icon = "data/icon.png"
+
+# Use omni.ui to build simple UI
+[dependencies]
+"omni.kit.uiapp" = {}
+"ansys.tools.omniverse.core" = {}
+
+# Main python module this extension provides, it will be publicly available as "import ansys.tools.omniverse.dsgui".
+[[python.module]]
+name = "ansys.tools.omniverse.dsgui"
+
+[[test]]
+# Extra dependencies only to be used during test run
+dependencies = [
+ "omni.kit.ui_test" # UI testing extension
+]
+
diff --git a/exts/ansys.tools.omniverse.dsgui/docs/CHANGELOG.md b/exts/ansys.tools.omniverse.dsgui/docs/CHANGELOG.md
index 11d717c6cf7..2418d39ae78 100644
--- a/exts/ansys.tools.omniverse.dsgui/docs/CHANGELOG.md
+++ b/exts/ansys.tools.omniverse.dsgui/docs/CHANGELOG.md
@@ -1,12 +1,12 @@
-# Changelog
-
-The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/).
-
-## [0.8.11] - 2024-10-17
-- Add UI to view, edit, and validate the path to the python containing pyensight
-
-## [0.8.11] - 2024-09-17
-- Update for new pyensight Omniverse interface
-
-## [0.1.0] - 2024-07-30
-- First version
+# Changelog
+
+The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/).
+
+## [0.8.11] - 2024-10-17
+- Add UI to view, edit, and validate the path to the python containing pyensight
+
+## [0.8.11] - 2024-09-17
+- Update for new pyensight Omniverse interface
+
+## [0.1.0] - 2024-07-30
+- First version
diff --git a/exts/ansys.tools.omniverse.dsgui/docs/README.md b/exts/ansys.tools.omniverse.dsgui/docs/README.md
index 5dad45106fb..b36d0131d5b 100644
--- a/exts/ansys.tools.omniverse.dsgui/docs/README.md
+++ b/exts/ansys.tools.omniverse.dsgui/docs/README.md
@@ -1,13 +1,13 @@
-# ANSYS Tools Omniverse DSG GUI [ansys.tools.omniverse.dsgui]
-
-This Omniverse extension is a UI interface to the [ansys.tools.omniverse.core]
-kit extension. It allows an Omniverse application user to connect to
-a running copy of ANSYS EnSight or other application that supports the
-Dynamic Scene Graph gRPC protocol. The GUI allows for the remote scene
-to be pulled, on request, into a specified location in USD format.
-
-
-For more details on this extension see:
-https://ensight.docs.pyansys.com/version/dev/user_guide/omniverse_info.html
-
-
+# ANSYS Tools Omniverse DSG GUI [ansys.tools.omniverse.dsgui]
+
+This Omniverse extension is a UI interface to the [ansys.tools.omniverse.core]
+kit extension. It allows an Omniverse application user to connect to
+a running copy of ANSYS EnSight or other application that supports the
+Dynamic Scene Graph gRPC protocol. The GUI allows for the remote scene
+to be pulled, on request, into a specified location in USD format.
+
+
+For more details on this extension see:
+https://ensight.docs.pyansys.com/version/dev/user_guide/omniverse_info.html
+
+
diff --git a/exts/ansys.tools.omniverse.dsgui/docs/index.rst b/exts/ansys.tools.omniverse.dsgui/docs/index.rst
index d2258522b7f..85f2a9ee27e 100644
--- a/exts/ansys.tools.omniverse.dsgui/docs/index.rst
+++ b/exts/ansys.tools.omniverse.dsgui/docs/index.rst
@@ -1,18 +1,18 @@
-ansys.tools.omniverse.dsgui
-########################
-
-
-.. toctree::
- :maxdepth: 1
-
- README
- CHANGELOG
-
-
-.. automodule::"ansys.tools.omniverse.dsgui"
- :platform: Windows-x86_64, Linux-x86_64
- :members:
- :undoc-members:
- :show-inheritance:
- :imported-members:
- :exclude-members: contextmanager
+ansys.tools.omniverse.dsgui
+########################
+
+
+.. toctree::
+ :maxdepth: 1
+
+ README
+ CHANGELOG
+
+
+.. automodule::"ansys.tools.omniverse.dsgui"
+ :platform: Windows-x86_64, Linux-x86_64
+ :members:
+ :undoc-members:
+ :show-inheritance:
+ :imported-members:
+ :exclude-members: contextmanager
diff --git a/pyproject.toml b/pyproject.toml
index fee03b84300..76cc7d98814 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -1,191 +1,191 @@
-[build-system]
-requires = [
- "flit_core>=3.2,<4"
-]
-build-backend = "flit_core.buildapi"
-
-[project]
-name = "ansys-pyensight-core"
-version = "0.9.0-dev0"
-description = "A python wrapper for Ansys EnSight"
-readme = "README.rst"
-requires-python = ">=3.9,<4"
-license = {file = "LICENSE"}
-authors = [{name = "ANSYS, Inc.", email = "pyansys.core@ansys.com"}]
-maintainers = [{name = "ANSYS, Inc.", email = "pyansys.core@ansys.com"}]
-classifiers = [
- "Development Status :: 4 - Beta",
- "Intended Audience :: Science/Research",
- "Topic :: Scientific/Engineering :: Information Analysis",
- "License :: OSI Approved :: MIT License",
- "Operating System :: OS Independent",
- "Programming Language :: Python :: 3.9",
- "Programming Language :: Python :: 3.10",
- "Programming Language :: Python :: 3.11",
- "Programming Language :: Python :: 3.12",
-]
-
-dependencies = [
- "importlib-metadata>=4.0; python_version<='3.8'",
- "ansys-api-pyensight==0.4.2",
- "requests>=2.28.2",
- "docker>=6.1.0",
- "urllib3<3.0.0",
- "numpy>=1.21.0,<2",
- "Pillow>=9.3.0",
- "pypng>=0.0.20",
- "psutil>=5.9.2",
- "usd-core>=24.8",
- "pygltflib>=1.16.2",
-]
-
-[project.optional-dependencies]
-dev = [
- "build>=0.10.0",
- "bump2version>=1.0.1",
- "ipdb>=0.9.4",
- "dill>=0.3.5.1",
- "pre-commit>=3.3.3",
-]
-tests = [
- "pytest==8.3.2",
- "pytest-cov==5.0.0",
- "dill>=0.3.5.1",
- "pytest-mock==3.10.0",
- "urllib3==2.2.2",
- "requests>=2.28.2",
- "docker>=6.1.0",
- "pytest-xdist==3.6.1",
- "pytest-rerunfailures~=14.0",
-]
-doc = [
- "Sphinx==8.0.2",
- "numpydoc==1.8.0",
- "ansys-sphinx-theme==1.1.1",
- "sphinx-copybutton==0.5.2",
- "sphinx-gallery==0.17.1",
- "sphinxcontrib-mermaid==0.9.2",
- "docker>=6.1.0",
- "matplotlib==3.9.1.post1",
- "requests>=2.28.2",
- "sphinxcontrib.jquery==4.1",
- "sphinxcontrib-openapi==0.8.4",
- "coverage-badge==1.1.2",
- "sphinxcontrib-video==0.2.1",
- "usd-core>=24.8",
- "pygltflib>=1.16.2",
-]
-
-[project.urls]
-Documentation = "https://ensight.docs.pyansys.com/"
-Homepage = "https://github.com/ansys/pyensight"
-Source = "https://github.com/ansys/pyensight"
-Tracker = "https://github.com/ansys/pyensight/issues"
-Changelog = "https://github.com/ansys/pyensight/blob/main/CHANGELOG.rst"
-
-[tool.flit.module]
-name = "ansys.pyensight.core"
-
-[tool.coverage.run]
-branch = true
-omit = [
- "*/locallauncher.py",
- "*/adr.py",
- "*/omniverse*.py",
- "*/dsg_server.py",
- "*/readers.py",
- "*/omniverse/core/*.py",
- "*/omniverse/dsgui/*.py",
-]
-
-[tool.coverage.report]
-exclude_lines = [
- "pragma: no cover",
- "def __repr__",
- "if selfdebug",
- "raise AssertionError",
- "raise NotImplementedError",
- "if 0:",
- "if __name__ == .__main__.:",
- "@(abc.)?abstractmethod",
- "if TYPE_CHECKING:",
- "def _repr_pretty_",
- "self._pim_instance",
- "self._pim_file_service",
- "def _launch_ensight_with_pim"]
-
-ignore_errors = true
-show_missing = true
-
-[tool.coverage.html]
-show_contexts = true
-
-[tool.black]
-line-length = 100
-src_paths = ["src/ansys", "doc"]
-
-[tool.pytest.ini_options]
-minversion = "7.1"
-testpaths = [
- "tests",
-]
-addopts = "--setup-show --cov=ansys.pyensight.core --cov-report html:coverage-html --cov-report term --cov-config=.coveragerc --capture=tee-sys --tb=native -p no:warnings"
-markers =[
- "integration:Run integration tests",
- "smoke:Run the smoke tests",
- "unit:Run the unit tests",
- ]
-norecursedirs = ".git .idea"
-filterwarnings = "ignore:.+:DeprecationWarning"
-
-
-[tool.codespell]
-ignore-words-list = "ro, te, pres"
-quiet-level = 3
-skip = "*.pyc,*.xml,*.gif,*.png,*.jpg,*.js,*.html,*.svg,tests/ensigth_api_test_assets.txt"
-
-[tool.isort]
-profile = "black"
-skip_gitignore = true
-force_sort_within_sections = true
-line_length = 100
-default_section = "THIRDPARTY"
-filter_files = true
-src_paths = ["ansys", "doc", "tests"]
-
-[tool.bandit]
-level = "HIGH"
-number = "3"
-recursive = true
-exclude = ["venv/*", "tests/*"]
-
-[tool.mypy]
-python_version = 3.9
-strict = false
-namespace_packages = true
-explicit_package_bases = true
-exclude = ["tests/", "^doc"]
-no_site_packages = true
-follow_imports = "skip"
-
-[[tool.mypy.overrides]]
-module = ["grpc",
- "grpc_health",
- "grpc_bindings",
- "requests",
- "ansys",
- "ansys.*",
- "docker",
- "dill",
- "IPython.display",
- "enve",
- "urllib3",
- "ensight",
- "numpy",
- "PIL",
- "simple_upload_server.*"
-]
-ignore_missing_imports = true
-
-[pydocstyle]
-convention = "numpy"
+[build-system]
+requires = [
+ "flit_core>=3.2,<4"
+]
+build-backend = "flit_core.buildapi"
+
+[project]
+name = "ansys-pyensight-core"
+version = "0.9.0-dev0"
+description = "A python wrapper for Ansys EnSight"
+readme = "README.rst"
+requires-python = ">=3.9,<4"
+license = {file = "LICENSE"}
+authors = [{name = "ANSYS, Inc.", email = "pyansys.core@ansys.com"}]
+maintainers = [{name = "ANSYS, Inc.", email = "pyansys.core@ansys.com"}]
+classifiers = [
+ "Development Status :: 4 - Beta",
+ "Intended Audience :: Science/Research",
+ "Topic :: Scientific/Engineering :: Information Analysis",
+ "License :: OSI Approved :: MIT License",
+ "Operating System :: OS Independent",
+ "Programming Language :: Python :: 3.9",
+ "Programming Language :: Python :: 3.10",
+ "Programming Language :: Python :: 3.11",
+ "Programming Language :: Python :: 3.12",
+]
+
+dependencies = [
+ "importlib-metadata>=4.0; python_version<='3.8'",
+ "ansys-api-pyensight==0.4.2",
+ "requests>=2.28.2",
+ "docker>=6.1.0",
+ "urllib3<3.0.0",
+ "numpy>=1.21.0,<2",
+ "Pillow>=9.3.0",
+ "pypng>=0.0.20",
+ "psutil>=5.9.2",
+ "usd-core>=24.8",
+ "pygltflib>=1.16.2",
+]
+
+[project.optional-dependencies]
+dev = [
+ "build>=0.10.0",
+ "bump2version>=1.0.1",
+ "ipdb>=0.9.4",
+ "dill>=0.3.5.1",
+ "pre-commit>=3.3.3",
+]
+tests = [
+ "pytest==8.3.2",
+ "pytest-cov==5.0.0",
+ "dill>=0.3.5.1",
+ "pytest-mock==3.10.0",
+ "urllib3==2.2.2",
+ "requests>=2.28.2",
+ "docker>=6.1.0",
+ "pytest-xdist==3.6.1",
+ "pytest-rerunfailures~=14.0",
+]
+doc = [
+ "Sphinx==8.0.2",
+ "numpydoc==1.8.0",
+ "ansys-sphinx-theme==1.1.1",
+ "sphinx-copybutton==0.5.2",
+ "sphinx-gallery==0.17.1",
+ "sphinxcontrib-mermaid==0.9.2",
+ "docker>=6.1.0",
+ "matplotlib==3.9.1.post1",
+ "requests>=2.28.2",
+ "sphinxcontrib.jquery==4.1",
+ "sphinxcontrib-openapi==0.8.4",
+ "coverage-badge==1.1.2",
+ "sphinxcontrib-video==0.2.1",
+ "usd-core>=24.8",
+ "pygltflib>=1.16.2",
+]
+
+[project.urls]
+Documentation = "https://ensight.docs.pyansys.com/"
+Homepage = "https://github.com/ansys/pyensight"
+Source = "https://github.com/ansys/pyensight"
+Tracker = "https://github.com/ansys/pyensight/issues"
+Changelog = "https://github.com/ansys/pyensight/blob/main/CHANGELOG.rst"
+
+[tool.flit.module]
+name = "ansys.pyensight.core"
+
+[tool.coverage.run]
+branch = true
+omit = [
+ "*/locallauncher.py",
+ "*/adr.py",
+ "*/omniverse*.py",
+ "*/dsg_server.py",
+ "*/readers.py",
+ "*/omniverse/core/*.py",
+ "*/omniverse/dsgui/*.py",
+]
+
+[tool.coverage.report]
+exclude_lines = [
+ "pragma: no cover",
+ "def __repr__",
+ "if selfdebug",
+ "raise AssertionError",
+ "raise NotImplementedError",
+ "if 0:",
+ "if __name__ == .__main__.:",
+ "@(abc.)?abstractmethod",
+ "if TYPE_CHECKING:",
+ "def _repr_pretty_",
+ "self._pim_instance",
+ "self._pim_file_service",
+ "def _launch_ensight_with_pim"]
+
+ignore_errors = true
+show_missing = true
+
+[tool.coverage.html]
+show_contexts = true
+
+[tool.black]
+line-length = 100
+src_paths = ["src/ansys", "doc"]
+
+[tool.pytest.ini_options]
+minversion = "7.1"
+testpaths = [
+ "tests",
+]
+addopts = "--setup-show --cov=ansys.pyensight.core --cov-report html:coverage-html --cov-report term --cov-config=.coveragerc --capture=tee-sys --tb=native -p no:warnings"
+markers =[
+ "integration:Run integration tests",
+ "smoke:Run the smoke tests",
+ "unit:Run the unit tests",
+ ]
+norecursedirs = ".git .idea"
+filterwarnings = "ignore:.+:DeprecationWarning"
+
+
+[tool.codespell]
+ignore-words-list = "ro, te, pres"
+quiet-level = 3
+skip = "*.pyc,*.xml,*.gif,*.png,*.jpg,*.js,*.html,*.svg,tests/ensigth_api_test_assets.txt"
+
+[tool.isort]
+profile = "black"
+skip_gitignore = true
+force_sort_within_sections = true
+line_length = 100
+default_section = "THIRDPARTY"
+filter_files = true
+src_paths = ["ansys", "doc", "tests"]
+
+[tool.bandit]
+level = "HIGH"
+number = "3"
+recursive = true
+exclude = ["venv/*", "tests/*"]
+
+[tool.mypy]
+python_version = 3.9
+strict = false
+namespace_packages = true
+explicit_package_bases = true
+exclude = ["tests/", "^doc"]
+no_site_packages = true
+follow_imports = "skip"
+
+[[tool.mypy.overrides]]
+module = ["grpc",
+ "grpc_health",
+ "grpc_bindings",
+ "requests",
+ "ansys",
+ "ansys.*",
+ "docker",
+ "dill",
+ "IPython.display",
+ "enve",
+ "urllib3",
+ "ensight",
+ "numpy",
+ "PIL",
+ "simple_upload_server.*"
+]
+ignore_missing_imports = true
+
+[pydocstyle]
+convention = "numpy"
diff --git a/rename_nightly_wheel.py b/rename_nightly_wheel.py
index 091b54051ee..068af42444e 100644
--- a/rename_nightly_wheel.py
+++ b/rename_nightly_wheel.py
@@ -1,16 +1,16 @@
-import datetime
-import glob
-import os
-
-# rename: ansys_pyensight_core-0.2.dev0-py3-none-any.whl to
-# ansys_pyensight_core-0.2.dev0-{date_tag}-py3-none-any.whl
-# monotonically increasing number with minute level
-# resolution so a nightly can be run once a minute
-date_tag = datetime.datetime.now().strftime("%Y%m%d%H%M")
-for name in glob.glob("dist/*.whl"):
- chunks = name.split("-")
- if len(chunks) == 5:
- chunks.insert(2, date_tag)
- new_name = "-".join(chunks)
- os.rename(name, new_name)
- print(f"Rename wheel to: '{new_name}'")
+import datetime
+import glob
+import os
+
+# rename: ansys_pyensight_core-0.2.dev0-py3-none-any.whl to
+# ansys_pyensight_core-0.2.dev0-{date_tag}-py3-none-any.whl
+# monotonically increasing number with minute level
+# resolution so a nightly can be run once a minute
+date_tag = datetime.datetime.now().strftime("%Y%m%d%H%M")
+for name in glob.glob("dist/*.whl"):
+ chunks = name.split("-")
+ if len(chunks) == 5:
+ chunks.insert(2, date_tag)
+ new_name = "-".join(chunks)
+ os.rename(name, new_name)
+ print(f"Rename wheel to: '{new_name}'")
diff --git a/src/ansys/pyensight/core/common.py b/src/ansys/pyensight/core/common.py
index ef51c3dc838..daf0606f065 100644
--- a/src/ansys/pyensight/core/common.py
+++ b/src/ansys/pyensight/core/common.py
@@ -1,216 +1,216 @@
-""" This module provides a list of common utilities shared between different PyEnSight modules."""
-
-import random
-import re
-import socket
-import time
-from typing import TYPE_CHECKING, Any, Dict, List, Optional, Tuple
-
-from ansys.pyensight.core import enshell_grpc
-import urllib3
-
-try:
- from simple_upload_server.client import Client
-
- simple_upload_server_is_available = True # pragma: no cover
-except Exception:
- simple_upload_server_is_available = False
-
-if TYPE_CHECKING:
- from docker import DockerClient
-
-
-def find_unused_ports(count: int, avoid: Optional[List[int]] = None) -> Optional[List[int]]:
- """Find "count" unused ports on the host system
-
- A port is considered unused if it does not respond to a "connect" attempt. Walk
- the ports from 'start' to 'end' looking for unused ports and avoiding any ports
- in the 'avoid' list. Stop once the desired number of ports have been
- found. If an insufficient number of ports were found, return None.
-
- Parameters
- ----------
- count: int :
- Number of unused ports to find
- avoid: Optional[List[int]] :
- An optional list of ports not to check
-
- Returns
- -------
- The detected ports or None on failure
-
- """
- if avoid is None:
- avoid = []
- ports = list()
-
- # pick a starting port number
- start = random.randint(1024, 64000)
- # We will scan for 65530 ports unless end is specified
- port_mod = 65530
- end = start + port_mod - 1
- # walk the "virtual" port range
- for base_port in range(start, end + 1):
- # Map to physical port range
- # There have been some issues with 65534+ so we stop at 65530
- port = base_port % port_mod
- # port 0 is special
- if port == 0: # pragma: no cover
- continue # pragma: no cover
- # avoid admin ports
- if port < 1024: # pragma: no cover
- continue # pragma: no cover
- # are we supposed to skip this one?
- if port in avoid: # pragma: no cover
- continue # pragma: no cover
- # is anyone listening?
- sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
- result = sock.connect_ex(("127.0.0.1", port))
- if result != 0:
- ports.append(port)
- else:
- sock.close() # pragma: no cover
- if len(ports) >= count:
- return ports
- # in case we failed...
- if len(ports) < count: # pragma: no cover
- return None # pragma: no cover
- return ports # pragma: no cover
-
-
-def get_host_port(uri: str) -> Tuple[str, int]:
- """Get the host port for the input uri
-
- Parameters
- ----------
-
- uri: str
- The Uri to inspect
-
- Returns
- -------
- (tuple):
- A tuple containing the host and the port of the input uri
- """
- parse_results = urllib3.util.parse_url(uri)
- port = (
- parse_results.port
- if parse_results.port
- else (443 if re.search("^https|wss$", parse_results.scheme) else None)
- )
- return (parse_results.host, port)
-
-
-def get_file_service(pim_instance: Any) -> Optional[Any]: # pragma: no cover
- """Get the file service object for the input pim instance.
-
- Parameters
- ----------
-
- pim_instance:
- the PIM instance to get the service from.
-
- Returns
- -------
-
- pim_file_service:
- the PIM file service object
- """
- if simple_upload_server_is_available is False:
- return None
- if pim_instance is None:
- return None
-
- if "http-simple-upload-server" in pim_instance.services:
- pim_file_service = Client(
- token="token",
- url=pim_instance.services["http-simple-upload-server"].uri,
- headers=pim_instance.services["http-simple-upload-server"].headers,
- )
- return pim_file_service
- return None
-
-
-def populate_service_host_port( # pragma: no cover
- pim_instance: Any, service_host_port: Dict[str, Tuple[str, int]], webui: bool = False
-) -> Dict[str, Tuple[str, int]]:
- """Populate the service host port dictionary with the services available in the PIM instance.
-
- Parameters
- ----------
- pim_instance:
- the PIM instance to get the servicea from.
- service_host_port: dict
- the dictionary to be updated with the services from the PIM instance
- webui: bool
- if True retrieve also the webUI service
-
- Returns
- -------
- service_host_port: dict
- the dictionary updated with the services from the PIM instance
- """
- if not set(("grpc_private", "http", "ws")).issubset(pim_instance.services):
- raise RuntimeError(
- "If channel is specified, the PIM instance must have a list of length 3 "
- + "containing the appropriate service URIs. It does not."
- )
- service_host_port["grpc_private"] = get_host_port(pim_instance.services["grpc_private"].uri)
- service_host_port["http"] = get_host_port(pim_instance.services["http"].uri)
- service_host_port["ws"] = get_host_port(pim_instance.services["ws"].uri)
- service_host_port["grpc"] = ("127.0.0.1", -1)
- if webui:
- service_host_port["webui"] = get_host_port(pim_instance.services["webui"].uri)
- return service_host_port
-
-
-def launch_enshell_interface(
- enshell_grpc_channel: Any, grpc_port: int, timeout: float
-) -> enshell_grpc.EnShellGRPC:
- """Launch the EnShell gRPC Interface.
-
- Parameters
- ----------
- enshell_grpc_channel:
- An eventual gRPC channel already available, like in the PIM case
- grpc_port: int
- the gRPC port to connect to
- timeout: float
- a timeout to wait for the gRPC connection
-
- Returns
- -------
- enshell: enshell_grpc.EnShellGRPC
- the enshell gRPC interface
- """
- if enshell_grpc_channel: # pragma: no cover
- enshell = enshell_grpc.EnShellGRPC() # pragma: no cover
- enshell.connect_existing_channel(enshell_grpc_channel) # pragma: no cover
- else:
- enshell = enshell_grpc.EnShellGRPC(port=grpc_port)
- time_start = time.time()
- while time.time() - time_start < timeout: # pragma: no cover
- if enshell.is_connected():
- break
- try:
- enshell.connect(timeout=timeout)
- except OSError: # pragma: no cover
- pass # pragma: no cover
- return enshell
-
-
-def pull_image(docker_client: "DockerClient", image_name: str) -> None:
- """Pull the input docker image using the input Docker Client
-
- Parameters
- ----------
- docker_client: DockerClient
- the current DockerClient to pull the image with
- image_name: str
- the image to pull
- """
- try:
- if docker_client is not None: # pragma: no cover
- docker_client.images.pull(image_name)
- except Exception:
- raise RuntimeError(f"Can't pull Docker image: {image_name}")
+""" This module provides a list of common utilities shared between different PyEnSight modules."""
+
+import random
+import re
+import socket
+import time
+from typing import TYPE_CHECKING, Any, Dict, List, Optional, Tuple
+
+from ansys.pyensight.core import enshell_grpc
+import urllib3
+
+try:
+ from simple_upload_server.client import Client
+
+ simple_upload_server_is_available = True # pragma: no cover
+except Exception:
+ simple_upload_server_is_available = False
+
+if TYPE_CHECKING:
+ from docker import DockerClient
+
+
+def find_unused_ports(count: int, avoid: Optional[List[int]] = None) -> Optional[List[int]]:
+ """Find "count" unused ports on the host system
+
+ A port is considered unused if it does not respond to a "connect" attempt. Walk
+ the ports from 'start' to 'end' looking for unused ports and avoiding any ports
+ in the 'avoid' list. Stop once the desired number of ports have been
+ found. If an insufficient number of ports were found, return None.
+
+ Parameters
+ ----------
+ count: int :
+ Number of unused ports to find
+ avoid: Optional[List[int]] :
+ An optional list of ports not to check
+
+ Returns
+ -------
+ The detected ports or None on failure
+
+ """
+ if avoid is None:
+ avoid = []
+ ports = list()
+
+ # pick a starting port number
+ start = random.randint(1024, 64000)
+ # We will scan for 65530 ports unless end is specified
+ port_mod = 65530
+ end = start + port_mod - 1
+ # walk the "virtual" port range
+ for base_port in range(start, end + 1):
+ # Map to physical port range
+ # There have been some issues with 65534+ so we stop at 65530
+ port = base_port % port_mod
+ # port 0 is special
+ if port == 0: # pragma: no cover
+ continue # pragma: no cover
+ # avoid admin ports
+ if port < 1024: # pragma: no cover
+ continue # pragma: no cover
+ # are we supposed to skip this one?
+ if port in avoid: # pragma: no cover
+ continue # pragma: no cover
+ # is anyone listening?
+ sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
+ result = sock.connect_ex(("127.0.0.1", port))
+ if result != 0:
+ ports.append(port)
+ else:
+ sock.close() # pragma: no cover
+ if len(ports) >= count:
+ return ports
+ # in case we failed...
+ if len(ports) < count: # pragma: no cover
+ return None # pragma: no cover
+ return ports # pragma: no cover
+
+
+def get_host_port(uri: str) -> Tuple[str, int]:
+ """Get the host port for the input uri
+
+ Parameters
+ ----------
+
+ uri: str
+ The Uri to inspect
+
+ Returns
+ -------
+ (tuple):
+ A tuple containing the host and the port of the input uri
+ """
+ parse_results = urllib3.util.parse_url(uri)
+ port = (
+ parse_results.port
+ if parse_results.port
+ else (443 if re.search("^https|wss$", parse_results.scheme) else None)
+ )
+ return (parse_results.host, port)
+
+
+def get_file_service(pim_instance: Any) -> Optional[Any]: # pragma: no cover
+ """Get the file service object for the input pim instance.
+
+ Parameters
+ ----------
+
+ pim_instance:
+ the PIM instance to get the service from.
+
+ Returns
+ -------
+
+ pim_file_service:
+ the PIM file service object
+ """
+ if simple_upload_server_is_available is False:
+ return None
+ if pim_instance is None:
+ return None
+
+ if "http-simple-upload-server" in pim_instance.services:
+ pim_file_service = Client(
+ token="token",
+ url=pim_instance.services["http-simple-upload-server"].uri,
+ headers=pim_instance.services["http-simple-upload-server"].headers,
+ )
+ return pim_file_service
+ return None
+
+
+def populate_service_host_port( # pragma: no cover
+ pim_instance: Any, service_host_port: Dict[str, Tuple[str, int]], webui: bool = False
+) -> Dict[str, Tuple[str, int]]:
+ """Populate the service host port dictionary with the services available in the PIM instance.
+
+ Parameters
+ ----------
+ pim_instance:
+ the PIM instance to get the servicea from.
+ service_host_port: dict
+ the dictionary to be updated with the services from the PIM instance
+ webui: bool
+ if True retrieve also the webUI service
+
+ Returns
+ -------
+ service_host_port: dict
+ the dictionary updated with the services from the PIM instance
+ """
+ if not set(("grpc_private", "http", "ws")).issubset(pim_instance.services):
+ raise RuntimeError(
+ "If channel is specified, the PIM instance must have a list of length 3 "
+ + "containing the appropriate service URIs. It does not."
+ )
+ service_host_port["grpc_private"] = get_host_port(pim_instance.services["grpc_private"].uri)
+ service_host_port["http"] = get_host_port(pim_instance.services["http"].uri)
+ service_host_port["ws"] = get_host_port(pim_instance.services["ws"].uri)
+ service_host_port["grpc"] = ("127.0.0.1", -1)
+ if webui:
+ service_host_port["webui"] = get_host_port(pim_instance.services["webui"].uri)
+ return service_host_port
+
+
+def launch_enshell_interface(
+ enshell_grpc_channel: Any, grpc_port: int, timeout: float
+) -> enshell_grpc.EnShellGRPC:
+ """Launch the EnShell gRPC Interface.
+
+ Parameters
+ ----------
+ enshell_grpc_channel:
+ An eventual gRPC channel already available, like in the PIM case
+ grpc_port: int
+ the gRPC port to connect to
+ timeout: float
+ a timeout to wait for the gRPC connection
+
+ Returns
+ -------
+ enshell: enshell_grpc.EnShellGRPC
+ the enshell gRPC interface
+ """
+ if enshell_grpc_channel: # pragma: no cover
+ enshell = enshell_grpc.EnShellGRPC() # pragma: no cover
+ enshell.connect_existing_channel(enshell_grpc_channel) # pragma: no cover
+ else:
+ enshell = enshell_grpc.EnShellGRPC(port=grpc_port)
+ time_start = time.time()
+ while time.time() - time_start < timeout: # pragma: no cover
+ if enshell.is_connected():
+ break
+ try:
+ enshell.connect(timeout=timeout)
+ except OSError: # pragma: no cover
+ pass # pragma: no cover
+ return enshell
+
+
+def pull_image(docker_client: "DockerClient", image_name: str) -> None:
+ """Pull the input docker image using the input Docker Client
+
+ Parameters
+ ----------
+ docker_client: DockerClient
+ the current DockerClient to pull the image with
+ image_name: str
+ the image to pull
+ """
+ try:
+ if docker_client is not None: # pragma: no cover
+ docker_client.images.pull(image_name)
+ except Exception:
+ raise RuntimeError(f"Can't pull Docker image: {image_name}")
diff --git a/src/ansys/pyensight/core/ensight_grpc.py b/src/ansys/pyensight/core/ensight_grpc.py
index 6ccf9f9d6ef..855770668c2 100644
--- a/src/ansys/pyensight/core/ensight_grpc.py
+++ b/src/ansys/pyensight/core/ensight_grpc.py
@@ -1,432 +1,432 @@
-"""ensight_grpc module
-
-This package defines the EnSightGRPC class which provides a simpler
-interface to the EnSight gRPC interface, including event streams.
-
-"""
-import threading
-from typing import Any, Callable, List, Optional, Tuple, Union
-import uuid
-
-from ansys.api.pyensight.v0 import dynamic_scene_graph_pb2_grpc, ensight_pb2, ensight_pb2_grpc
-import grpc
-
-
-class EnSightGRPC(object):
- """Wrapper around a gRPC connection to an EnSight instance
-
- This class provides an asynchronous interface to the EnSight
- core gRPC interface. It can handle remote event
- streams, providing a much simpler interface to the EnSight
- application. The default is to make a connection to an EnSight
- gRPC server on port 12345 on the loopback host.
-
- Parameters
- ----------
- host: str, optional
- Hostname where there EnSight gRPC server is running.
- port: int, optional
- Port to make the gRPC connection to
- secret_key: str, optional
- Connection secret key
- """
-
- def __init__(self, host: str = "127.0.0.1", port: int = 12345, secret_key: str = ""):
- self._host = host
- self._port = port
- self._channel = None
- self._stub = None
- self._dsg_stub = None
- self._security_token = secret_key
- self._session_name: str = ""
- # Streaming APIs
- # Event (strings)
- self._event_stream = None
- self._event_thread: Optional[threading.Thread] = None
- self._events: List[Any] = list()
- # Callback for events (self._events not used)
- self._event_callback: Optional[Callable] = None
- self._prefix: Optional[str] = None
-
- @property
- def host(self) -> str:
- """The gRPC server (EnSight) hostname"""
- return self._host
-
- def port(self) -> int:
- """The gRPC server (EnSight) port number"""
- return self._port
-
- @property
- def security_token(self) -> str:
- """The gRPC server (EnSight) secret key
-
- EnSight supports a security token in either numeric (-security {int}) or
- string (ENSIGHT_SECURITY_TOKEN environmental variable) form. If EnSight
- is using a security token, all gRPC calls must include this token. This
- call sets the token for all grPC calls made by this class.
- """
- return self._security_token
-
- @security_token.setter
- def security_token(self, name: str) -> None:
- self._security_token = name # pragma: no cover
-
- @property
- def session_name(self) -> str:
- """The gRPC server session name
-
- EnSight gRPC calls can include the session name via 'session_name' metadata.
- A client session may provide a session name via this property.
- """
- return self._session_name
-
- @session_name.setter
- def session_name(self, name: str) -> None:
- self._session_name = name
-
- def shutdown(self, stop_ensight: bool = False, force: bool = False) -> None:
- """Close down the gRPC connection
-
- Disconnect all connections to the gRPC server. If stop_ensight is True, send the
- 'Exit' command to the EnSight gRPC server.
-
- Parameters
- ----------
- stop_ensight: bool, optional
- if True, send an 'Exit' command to the gRPC server.
- force: bool, optional
- if stop_ensight and force are true, stop EnSight aggressively
- """
- if self.is_connected(): # pragma: no cover
- # if requested, send 'Exit'
- if stop_ensight: # pragma: no cover
- # the gRPC ExitRequest is exactly that, a request in some
- # cases the operation needs to be forced
- if force: # pragma: no cover
- try:
- self.command("ensight.exit(0)", do_eval=False)
- except IOError: # pragma: no cover
- # we expect this as the exit can result in the gRPC call failing
- pass # pragma: no cover
- else:
- if self._stub: # pragma: no cover
- _ = self._stub.Exit(
- ensight_pb2.ExitRequest(), metadata=self._metadata()
- ) # pragma: no cover
- # clean up control objects
- self._stub = None
- self._dsg_stub = None
- if self._channel:
- self._channel.close()
- self._channel = None
-
- def is_connected(self) -> bool:
- """Check to see if the gRPC connection is live
-
- Returns
- -------
- True if the connection is active.
- """
- return self._channel is not None
-
- def connect(self, timeout: float = 15.0) -> None:
- """Establish the gRPC connection to EnSight
-
- Attempt to connect to an EnSight gRPC server using the host and port
- established by the constructor. Note on failure, this function just
- returns, but is_connected() will return False.
-
- Parameters
- ----------
- timeout: float
- how long to wait for the connection to timeout
- """
- if self.is_connected():
- return
- # set up the channel
- self._channel = grpc.insecure_channel(
- "{}:{}".format(self._host, self._port),
- options=[
- ("grpc.max_receive_message_length", -1),
- ("grpc.max_send_message_length", -1),
- ("grpc.testing.fixed_reconnect_backoff_ms", 1100),
- ],
- )
- try:
- grpc.channel_ready_future(self._channel).result(timeout=timeout)
- except grpc.FutureTimeoutError: # pragma: no cover
- self._channel = None # pragma: no cover
- return # pragma: no cover
- # hook up the stub interface
- self._stub = ensight_pb2_grpc.EnSightServiceStub(self._channel)
- self._dsg_stub = dynamic_scene_graph_pb2_grpc.DynamicSceneGraphServiceStub(self._channel)
-
- def _metadata(self) -> List[Tuple[bytes, Union[str, bytes]]]:
- """Compute the gRPC stream metadata
-
- Compute the list to be passed to the gRPC calls for things like security
- and the session name.
-
- """
- ret: List[Tuple[bytes, Union[str, bytes]]] = list()
- s: Union[str, bytes]
- if self._security_token: # pragma: no cover
- s = self._security_token
- if type(s) == str: # pragma: no cover
- s = s.encode("utf-8")
- ret.append((b"shared_secret", s))
- if self.session_name: # pragma: no cover
- s = self.session_name.encode("utf-8")
- ret.append((b"session_name", s))
- return ret
-
- def render(
- self,
- width: int = 640,
- height: int = 480,
- aa: int = 1,
- png: bool = True,
- highlighting: bool = False,
- ) -> bytes:
- """Generate a rendering of the current EnSight scene
-
- Render the current scene at a specific size and using a specific number of anti-aliasing
- passes. The return value can be a byte array (width*height*3) bytes or a PNG image.
-
- Parameters
- ----------
- width: int, optional
- width of the image to render
- height: int, optional
- height of the image to render
- aa: int, optional
- number of antialiasing passes to use in generating the image
- png: bool, optional
- if True, the return value is a PNG image bytestream. Otherwise, it is a simple
- bytes object with width*height*3 values.
- highlighting: bool, optional
- if True, selection highlighting will be included in the image.
-
- Returns
- -------
- bytes
- bytes object representation of the rendered image
-
- Raises
- ------
- IOError if the operation fails
- """
- self.connect()
- ret_type = ensight_pb2.RenderRequest.IMAGE_RAW
- if png: # pragma: no cover
- ret_type = ensight_pb2.RenderRequest.IMAGE_PNG
- response: Any
- try:
- if self._stub: # pragma: no cover
- response = self._stub.RenderImage(
- ensight_pb2.RenderRequest(
- type=ret_type,
- image_width=width,
- image_height=height,
- image_aa_passes=aa,
- include_highlighting=highlighting,
- ),
- metadata=self._metadata(),
- )
- except Exception: # pragma: no cover
- raise IOError("gRPC connection dropped") # pragma: no cover
- return response.value
-
- def geometry(self) -> bytes:
- """Return the current scene geometry in glTF format
-
- Package up the geometry currently being viewed in the EnSight session as
- a glTF stream. Return this stream as an array of byte. Note: no
- intermediate files are utilized.
-
- Note: currently there is a limitation of glTF files to 2GB
-
- Returns
- -------
- bytes object representation of the glTF file
-
- Raises
- ------
- IOError if the operation fails
- """
- self.connect()
- response: Any
- try:
- if self._stub: # pragma: no cover
- response = self._stub.GetGeometry(
- ensight_pb2.GeometryRequest(type=ensight_pb2.GeometryRequest.GEOMETRY_GLB),
- metadata=self._metadata(),
- )
- except Exception: # pragma: no cover
- raise IOError("gRPC connection dropped") # pragma: no cover
- return response.value
-
- def command(self, command_string: str, do_eval: bool = True, json: bool = False) -> Any:
- """Send a Python command string to be executed in EnSight
-
- The string will be run or evaluated in the EnSight Python interpreter via the
- EnSightService::RunPython() gRPC all. If an exception or other error occurs, this
- function will throw a RuntimeError. If do_eval is False, the return value will be None,
- otherwise it will be the returned string (eval() will not be performed). If json is True,
- the return value will be a JSON representation of the report execution result.
-
- Parameters
- ----------
- command_string: str
- The string to execute
- do_eval: bool, optional
- If True, a return value will be computed and returned
- json: bool, optional
- If True and do_eval is True, the return value will be a JSON representation of
- the evaluated value.
-
- Returns
- -------
- Any
- None, a string ready for Python eval() or a JSON string.
-
- Raises
- ------
- RuntimeError if the operation fails.
- IOError if the communication fails.
- """
- self.connect()
- flags = ensight_pb2.PythonRequest.EXEC_RETURN_PYTHON
- response: Any
- if json: # pragma: no cover
- flags = ensight_pb2.PythonRequest.EXEC_RETURN_JSON # pragma: no cover
- if not do_eval:
- flags = ensight_pb2.PythonRequest.EXEC_NO_RESULT
- try:
- if self._stub: # pragma: no cover
- response = self._stub.RunPython(
- ensight_pb2.PythonRequest(type=flags, command=command_string),
- metadata=self._metadata(),
- )
- except Exception:
- raise IOError("gRPC connection dropped")
- if response.error < 0: # pragma: no cover
- raise RuntimeError(response.value) # pragma: no cover
- if flags == ensight_pb2.PythonRequest.EXEC_NO_RESULT:
- return None
- # This was moved externally so pre-processing could be performed
- # elif flags == ensight_pb2.PythonRequest.EXEC_RETURN_PYTHON:
- # return eval(response.value)
- return response.value
-
- def prefix(self) -> str:
- """Return the unique prefix for this instance.
-
- Some EnSight gRPC APIs require a unique prefix so that EnSight can handle
- multiple, simultaneous remote connections. This method will generate a GUID-based
- prefix.
-
- Returns
- -------
- str
- A unique (for this session) prefix string of the form: grpc://{uuid}/
- """
- # prefix URIs will have the format: "grpc://{uuid}/{callbackname}?enum={}&uid={}"
- if self._prefix is None:
- self._prefix = "grpc://" + str(uuid.uuid1()) + "/"
- return self._prefix
-
- def event_stream_enable(self, callback: Optional[Callable] = None) -> None:
- """Enable a simple gRPC-based event stream from EnSight
-
- This method makes a EnSightService::GetEventStream() gRPC call into EnSight, returning
- an ensightservice::EventReply stream. The method creates a thread to hold this
- stream open and read new events from it. The thread adds the event strings to
- a list of events stored on this instance. If callback is not None, the object
- will be called with the event string, otherwise they can be retrieved using get_event().
- """
- if self._event_stream is not None: # pragma: no cover
- return # pragma: no cover
- self._event_callback = callback
- self.connect()
- if self._stub: # pragma: no cover
- self._event_stream = self._stub.GetEventStream(
- ensight_pb2.EventStreamRequest(prefix=self.prefix()),
- metadata=self._metadata(),
- )
- self._event_thread = threading.Thread(target=self._poll_events)
- self._event_thread.daemon = True
- self._event_thread.start()
-
- def event_stream_is_enabled(self) -> bool:
- """Check to see if the event stream is enabled
-
- If an event stream has been successfully established via
- event_stream_enable(), then this function returns True.
-
- Returns
- -------
- True if a ensightservice::EventReply steam is active
- """
- return self._event_stream is not None # pragma: no cover
-
- def dynamic_scene_graph_stream(self, client_cmds): # pragma: no cover
- """Open up a dynamic scene graph stream
-
- Make a DynamicSceneGraphService::GetSceneStream() rpc call and return
- a ensightservice::SceneUpdateCommand stream instance.
-
- Parameters
- ----------
- client_cmds
- iterator that produces ensightservice::SceneClientCommand objects
-
- Returns
- -------
- ensightservice::SceneUpdateCommand stream instance
- """
- self.connect()
- return self._dsg_stub.GetSceneStream(client_cmds, metadata=self._metadata())
-
- def get_event(self) -> Optional[str]: # pragma: no cover
- """Retrieve and remove the oldest ensightservice::EventReply string
-
- When any of the event streaming systems is enabled, Python threads will receive the
- event records and store them in this instance in an ordered fashion. This method
- retrieves the oldest ensightservice::EventReply string in the queue.
-
- Returns
- -------
- None or the oldest event string in the queue.
- """
- try:
- return self._events.pop(0)
- except IndexError:
- return None
-
- def _put_event(self, evt: "ensight_pb2.EventReply") -> None:
- """Add an event record to the event queue on this instance
-
- This method is used by threads to make the events they receive available to
- calling applications via get_event().
- """
- if self._event_callback: # pragma: no cover
- self._event_callback(evt.tag)
- return
- self._events.append(evt.tag) # pragma: no cover
-
- def _poll_events(self) -> None:
- """Internal method to handle event streams
-
- This method is called by a Python thread to read events via the established
- ensightservice::EventReply stream.
- """
- try:
- while self._stub is not None: # pragma: no cover
- evt = self._event_stream.next()
- self._put_event(evt)
- except Exception:
- # signal that the gRPC connection has broken
- self._event_stream = None
- self._event_thread = None
+"""ensight_grpc module
+
+This package defines the EnSightGRPC class which provides a simpler
+interface to the EnSight gRPC interface, including event streams.
+
+"""
+import threading
+from typing import Any, Callable, List, Optional, Tuple, Union
+import uuid
+
+from ansys.api.pyensight.v0 import dynamic_scene_graph_pb2_grpc, ensight_pb2, ensight_pb2_grpc
+import grpc
+
+
+class EnSightGRPC(object):
+ """Wrapper around a gRPC connection to an EnSight instance
+
+ This class provides an asynchronous interface to the EnSight
+ core gRPC interface. It can handle remote event
+ streams, providing a much simpler interface to the EnSight
+ application. The default is to make a connection to an EnSight
+ gRPC server on port 12345 on the loopback host.
+
+ Parameters
+ ----------
+ host: str, optional
+ Hostname where there EnSight gRPC server is running.
+ port: int, optional
+ Port to make the gRPC connection to
+ secret_key: str, optional
+ Connection secret key
+ """
+
+ def __init__(self, host: str = "127.0.0.1", port: int = 12345, secret_key: str = ""):
+ self._host = host
+ self._port = port
+ self._channel = None
+ self._stub = None
+ self._dsg_stub = None
+ self._security_token = secret_key
+ self._session_name: str = ""
+ # Streaming APIs
+ # Event (strings)
+ self._event_stream = None
+ self._event_thread: Optional[threading.Thread] = None
+ self._events: List[Any] = list()
+ # Callback for events (self._events not used)
+ self._event_callback: Optional[Callable] = None
+ self._prefix: Optional[str] = None
+
+ @property
+ def host(self) -> str:
+ """The gRPC server (EnSight) hostname"""
+ return self._host
+
+ def port(self) -> int:
+ """The gRPC server (EnSight) port number"""
+ return self._port
+
+ @property
+ def security_token(self) -> str:
+ """The gRPC server (EnSight) secret key
+
+ EnSight supports a security token in either numeric (-security {int}) or
+ string (ENSIGHT_SECURITY_TOKEN environmental variable) form. If EnSight
+ is using a security token, all gRPC calls must include this token. This
+ call sets the token for all grPC calls made by this class.
+ """
+ return self._security_token
+
+ @security_token.setter
+ def security_token(self, name: str) -> None:
+ self._security_token = name # pragma: no cover
+
+ @property
+ def session_name(self) -> str:
+ """The gRPC server session name
+
+ EnSight gRPC calls can include the session name via 'session_name' metadata.
+ A client session may provide a session name via this property.
+ """
+ return self._session_name
+
+ @session_name.setter
+ def session_name(self, name: str) -> None:
+ self._session_name = name
+
+ def shutdown(self, stop_ensight: bool = False, force: bool = False) -> None:
+ """Close down the gRPC connection
+
+ Disconnect all connections to the gRPC server. If stop_ensight is True, send the
+ 'Exit' command to the EnSight gRPC server.
+
+ Parameters
+ ----------
+ stop_ensight: bool, optional
+ if True, send an 'Exit' command to the gRPC server.
+ force: bool, optional
+ if stop_ensight and force are true, stop EnSight aggressively
+ """
+ if self.is_connected(): # pragma: no cover
+ # if requested, send 'Exit'
+ if stop_ensight: # pragma: no cover
+ # the gRPC ExitRequest is exactly that, a request in some
+ # cases the operation needs to be forced
+ if force: # pragma: no cover
+ try:
+ self.command("ensight.exit(0)", do_eval=False)
+ except IOError: # pragma: no cover
+ # we expect this as the exit can result in the gRPC call failing
+ pass # pragma: no cover
+ else:
+ if self._stub: # pragma: no cover
+ _ = self._stub.Exit(
+ ensight_pb2.ExitRequest(), metadata=self._metadata()
+ ) # pragma: no cover
+ # clean up control objects
+ self._stub = None
+ self._dsg_stub = None
+ if self._channel:
+ self._channel.close()
+ self._channel = None
+
+ def is_connected(self) -> bool:
+ """Check to see if the gRPC connection is live
+
+ Returns
+ -------
+ True if the connection is active.
+ """
+ return self._channel is not None
+
+ def connect(self, timeout: float = 15.0) -> None:
+ """Establish the gRPC connection to EnSight
+
+ Attempt to connect to an EnSight gRPC server using the host and port
+ established by the constructor. Note on failure, this function just
+ returns, but is_connected() will return False.
+
+ Parameters
+ ----------
+ timeout: float
+ how long to wait for the connection to timeout
+ """
+ if self.is_connected():
+ return
+ # set up the channel
+ self._channel = grpc.insecure_channel(
+ "{}:{}".format(self._host, self._port),
+ options=[
+ ("grpc.max_receive_message_length", -1),
+ ("grpc.max_send_message_length", -1),
+ ("grpc.testing.fixed_reconnect_backoff_ms", 1100),
+ ],
+ )
+ try:
+ grpc.channel_ready_future(self._channel).result(timeout=timeout)
+ except grpc.FutureTimeoutError: # pragma: no cover
+ self._channel = None # pragma: no cover
+ return # pragma: no cover
+ # hook up the stub interface
+ self._stub = ensight_pb2_grpc.EnSightServiceStub(self._channel)
+ self._dsg_stub = dynamic_scene_graph_pb2_grpc.DynamicSceneGraphServiceStub(self._channel)
+
+ def _metadata(self) -> List[Tuple[bytes, Union[str, bytes]]]:
+ """Compute the gRPC stream metadata
+
+ Compute the list to be passed to the gRPC calls for things like security
+ and the session name.
+
+ """
+ ret: List[Tuple[bytes, Union[str, bytes]]] = list()
+ s: Union[str, bytes]
+ if self._security_token: # pragma: no cover
+ s = self._security_token
+ if type(s) == str: # pragma: no cover
+ s = s.encode("utf-8")
+ ret.append((b"shared_secret", s))
+ if self.session_name: # pragma: no cover
+ s = self.session_name.encode("utf-8")
+ ret.append((b"session_name", s))
+ return ret
+
+ def render(
+ self,
+ width: int = 640,
+ height: int = 480,
+ aa: int = 1,
+ png: bool = True,
+ highlighting: bool = False,
+ ) -> bytes:
+ """Generate a rendering of the current EnSight scene
+
+ Render the current scene at a specific size and using a specific number of anti-aliasing
+ passes. The return value can be a byte array (width*height*3) bytes or a PNG image.
+
+ Parameters
+ ----------
+ width: int, optional
+ width of the image to render
+ height: int, optional
+ height of the image to render
+ aa: int, optional
+ number of antialiasing passes to use in generating the image
+ png: bool, optional
+ if True, the return value is a PNG image bytestream. Otherwise, it is a simple
+ bytes object with width*height*3 values.
+ highlighting: bool, optional
+ if True, selection highlighting will be included in the image.
+
+ Returns
+ -------
+ bytes
+ bytes object representation of the rendered image
+
+ Raises
+ ------
+ IOError if the operation fails
+ """
+ self.connect()
+ ret_type = ensight_pb2.RenderRequest.IMAGE_RAW
+ if png: # pragma: no cover
+ ret_type = ensight_pb2.RenderRequest.IMAGE_PNG
+ response: Any
+ try:
+ if self._stub: # pragma: no cover
+ response = self._stub.RenderImage(
+ ensight_pb2.RenderRequest(
+ type=ret_type,
+ image_width=width,
+ image_height=height,
+ image_aa_passes=aa,
+ include_highlighting=highlighting,
+ ),
+ metadata=self._metadata(),
+ )
+ except Exception: # pragma: no cover
+ raise IOError("gRPC connection dropped") # pragma: no cover
+ return response.value
+
+ def geometry(self) -> bytes:
+ """Return the current scene geometry in glTF format
+
+ Package up the geometry currently being viewed in the EnSight session as
+ a glTF stream. Return this stream as an array of byte. Note: no
+ intermediate files are utilized.
+
+ Note: currently there is a limitation of glTF files to 2GB
+
+ Returns
+ -------
+ bytes object representation of the glTF file
+
+ Raises
+ ------
+ IOError if the operation fails
+ """
+ self.connect()
+ response: Any
+ try:
+ if self._stub: # pragma: no cover
+ response = self._stub.GetGeometry(
+ ensight_pb2.GeometryRequest(type=ensight_pb2.GeometryRequest.GEOMETRY_GLB),
+ metadata=self._metadata(),
+ )
+ except Exception: # pragma: no cover
+ raise IOError("gRPC connection dropped") # pragma: no cover
+ return response.value
+
+ def command(self, command_string: str, do_eval: bool = True, json: bool = False) -> Any:
+ """Send a Python command string to be executed in EnSight
+
+ The string will be run or evaluated in the EnSight Python interpreter via the
+ EnSightService::RunPython() gRPC all. If an exception or other error occurs, this
+ function will throw a RuntimeError. If do_eval is False, the return value will be None,
+ otherwise it will be the returned string (eval() will not be performed). If json is True,
+ the return value will be a JSON representation of the report execution result.
+
+ Parameters
+ ----------
+ command_string: str
+ The string to execute
+ do_eval: bool, optional
+ If True, a return value will be computed and returned
+ json: bool, optional
+ If True and do_eval is True, the return value will be a JSON representation of
+ the evaluated value.
+
+ Returns
+ -------
+ Any
+ None, a string ready for Python eval() or a JSON string.
+
+ Raises
+ ------
+ RuntimeError if the operation fails.
+ IOError if the communication fails.
+ """
+ self.connect()
+ flags = ensight_pb2.PythonRequest.EXEC_RETURN_PYTHON
+ response: Any
+ if json: # pragma: no cover
+ flags = ensight_pb2.PythonRequest.EXEC_RETURN_JSON # pragma: no cover
+ if not do_eval:
+ flags = ensight_pb2.PythonRequest.EXEC_NO_RESULT
+ try:
+ if self._stub: # pragma: no cover
+ response = self._stub.RunPython(
+ ensight_pb2.PythonRequest(type=flags, command=command_string),
+ metadata=self._metadata(),
+ )
+ except Exception:
+ raise IOError("gRPC connection dropped")
+ if response.error < 0: # pragma: no cover
+ raise RuntimeError(response.value) # pragma: no cover
+ if flags == ensight_pb2.PythonRequest.EXEC_NO_RESULT:
+ return None
+ # This was moved externally so pre-processing could be performed
+ # elif flags == ensight_pb2.PythonRequest.EXEC_RETURN_PYTHON:
+ # return eval(response.value)
+ return response.value
+
+ def prefix(self) -> str:
+ """Return the unique prefix for this instance.
+
+ Some EnSight gRPC APIs require a unique prefix so that EnSight can handle
+ multiple, simultaneous remote connections. This method will generate a GUID-based
+ prefix.
+
+ Returns
+ -------
+ str
+ A unique (for this session) prefix string of the form: grpc://{uuid}/
+ """
+ # prefix URIs will have the format: "grpc://{uuid}/{callbackname}?enum={}&uid={}"
+ if self._prefix is None:
+ self._prefix = "grpc://" + str(uuid.uuid1()) + "/"
+ return self._prefix
+
+ def event_stream_enable(self, callback: Optional[Callable] = None) -> None:
+ """Enable a simple gRPC-based event stream from EnSight
+
+ This method makes a EnSightService::GetEventStream() gRPC call into EnSight, returning
+ an ensightservice::EventReply stream. The method creates a thread to hold this
+ stream open and read new events from it. The thread adds the event strings to
+ a list of events stored on this instance. If callback is not None, the object
+ will be called with the event string, otherwise they can be retrieved using get_event().
+ """
+ if self._event_stream is not None: # pragma: no cover
+ return # pragma: no cover
+ self._event_callback = callback
+ self.connect()
+ if self._stub: # pragma: no cover
+ self._event_stream = self._stub.GetEventStream(
+ ensight_pb2.EventStreamRequest(prefix=self.prefix()),
+ metadata=self._metadata(),
+ )
+ self._event_thread = threading.Thread(target=self._poll_events)
+ self._event_thread.daemon = True
+ self._event_thread.start()
+
+ def event_stream_is_enabled(self) -> bool:
+ """Check to see if the event stream is enabled
+
+ If an event stream has been successfully established via
+ event_stream_enable(), then this function returns True.
+
+ Returns
+ -------
+ True if a ensightservice::EventReply steam is active
+ """
+ return self._event_stream is not None # pragma: no cover
+
+ def dynamic_scene_graph_stream(self, client_cmds): # pragma: no cover
+ """Open up a dynamic scene graph stream
+
+ Make a DynamicSceneGraphService::GetSceneStream() rpc call and return
+ a ensightservice::SceneUpdateCommand stream instance.
+
+ Parameters
+ ----------
+ client_cmds
+ iterator that produces ensightservice::SceneClientCommand objects
+
+ Returns
+ -------
+ ensightservice::SceneUpdateCommand stream instance
+ """
+ self.connect()
+ return self._dsg_stub.GetSceneStream(client_cmds, metadata=self._metadata())
+
+ def get_event(self) -> Optional[str]: # pragma: no cover
+ """Retrieve and remove the oldest ensightservice::EventReply string
+
+ When any of the event streaming systems is enabled, Python threads will receive the
+ event records and store them in this instance in an ordered fashion. This method
+ retrieves the oldest ensightservice::EventReply string in the queue.
+
+ Returns
+ -------
+ None or the oldest event string in the queue.
+ """
+ try:
+ return self._events.pop(0)
+ except IndexError:
+ return None
+
+ def _put_event(self, evt: "ensight_pb2.EventReply") -> None:
+ """Add an event record to the event queue on this instance
+
+ This method is used by threads to make the events they receive available to
+ calling applications via get_event().
+ """
+ if self._event_callback: # pragma: no cover
+ self._event_callback(evt.tag)
+ return
+ self._events.append(evt.tag) # pragma: no cover
+
+ def _poll_events(self) -> None:
+ """Internal method to handle event streams
+
+ This method is called by a Python thread to read events via the established
+ ensightservice::EventReply stream.
+ """
+ try:
+ while self._stub is not None: # pragma: no cover
+ evt = self._event_stream.next()
+ self._put_event(evt)
+ except Exception:
+ # signal that the gRPC connection has broken
+ self._event_stream = None
+ self._event_thread = None
diff --git a/src/ansys/pyensight/core/libuserd.py b/src/ansys/pyensight/core/libuserd.py
index 92311bfaf32..7fcc55ff239 100644
--- a/src/ansys/pyensight/core/libuserd.py
+++ b/src/ansys/pyensight/core/libuserd.py
@@ -1,1953 +1,1953 @@
-"""
-The ``libuserd`` module allows PyEnSight to directly access EnSight
-user-defined readers (USERD). Any file format for which EnSight
-uses a USERD interface can be read using this API
-
-Examples
---------
-
->>> from ansys.pyensight.core import libuserd
->>> userd = libuserd.LibUserd()
->>> userd.initialize()
->>> print(userd.library_version())
->>> datafile = "/example/data/CFX/Axial_001.res"
->>> readers = userd.query_format(datafile)
->>> data = readers[0].read_dataset(datafile)
->>> print(data.parts())
->>> print(data.variables())
->>> userd.shutdown()
-
-"""
-import enum
-import logging
-import os
-import platform
-import shutil
-import subprocess
-import tempfile
-import time
-from typing import TYPE_CHECKING, Any, Dict, List, Optional, Tuple, Union
-import uuid
-import warnings
-
-from ansys.api.pyensight.v0 import libuserd_pb2, libuserd_pb2_grpc
-from ansys.pyensight.core.common import (
- find_unused_ports,
- get_file_service,
- launch_enshell_interface,
- populate_service_host_port,
- pull_image,
-)
-import grpc
-import numpy
-import psutil
-import requests
-
-try:
- import docker
-except ModuleNotFoundError: # pragma: no cover
- raise RuntimeError("The docker module must be installed for DockerLauncher")
-except Exception: # pragma: no cover
- raise RuntimeError("Cannot initialize Docker")
-
-
-if TYPE_CHECKING:
- from docker import DockerClient
- from docker.models.containers import Container
- from enshell_grpc import EnShellGRPC
-
-# This code is currently in development/beta state
-warnings.warn(
- "The libuserd interface/API is still under active development and should be considered beta.",
- stacklevel=2,
-)
-
-
-def _build_enum(name: str, pb_enum: Any, flag: bool = False) -> Union[enum.IntEnum, enum.IntFlag]:
- values = {}
- for v in pb_enum:
- values[v[0]] = v[1]
- if flag:
- return enum.IntFlag(name, values)
- return enum.IntEnum(name, values)
-
-
-ErrorCodes = _build_enum("ErrorCodes", libuserd_pb2.ErrorCodes.items())
-ElementType = _build_enum("ElementType", libuserd_pb2.ElementType.items())
-VariableLocation = _build_enum("VariableLocation", libuserd_pb2.VariableLocation.items())
-VariableType = _build_enum("VariableType", libuserd_pb2.VariableType.items())
-PartHints = _build_enum("PartHints", libuserd_pb2.PartHints.items(), flag=True)
-
-
-class LibUserdError(Exception):
- """
- This class is an exception object raised from the libuserd
- library itself (not the gRPC remote interface). The associated
- numeric LibUserd.ErrorCode is available via the 'code' attribute.
-
- Parameters
- ----------
- msg : str
- The message text to be included in the exception.
-
- Attributes
- ----------
- code : int
- The LibUserd ErrorCodes enum value for this error.
- """
-
- def __init__(self, msg) -> None:
- super(LibUserdError, self).__init__(msg)
- self._code = libuserd_pb2.ErrorCodes.UNKNOWN
- if msg.startswith("LibUserd("):
- try:
- self._code = int(msg[len("LibUserd(") :].split(")")[0])
- except Exception:
- pass
-
- @property
- def code(self) -> int:
- """The numeric error code: LibUserd.ErrorCodes"""
- return self._code
-
-
-class Query(object):
- """
- The class represents a reader "query" instance. It includes
- the query name as well as the preferred titles. The ``data``
- method may be used to access the X,Y plot values.
-
- Parameters
- ----------
- userd
- The LibUserd instance this query is associated with.
- pb
- The protobuffer that represents this object.
-
- Attributes
- ----------
- id : int
- The id of this query.
- name : str
- The name of this query.
- x_title : str
- String to use as the x-axis title.
- y_title : str
- String to use as the y-axis title.
- metadata : Dict[str, str]
- The metadata for this query.
- """
-
- def __init__(self, userd: "LibUserd", pb: libuserd_pb2.QueryInfo) -> None:
- self._userd = userd
- self.id = pb.id
- self.name = pb.name
- self.x_title = pb.xTitle
- self.y_title = pb.yTitle
- self.metadata = {}
- for key in pb.metadata.keys():
- self.metadata[key] = pb.metadata[key]
-
- def __str__(self) -> str:
- return f"Query id: {self.id}, name: '{self.name}'"
-
- def __repr__(self):
- return f"<{self.__class__.__name__} object, id: {self.id}, name: '{self.name}'>"
-
- def data(self) -> List["numpy.array"]:
- """
- Get the X,Y values for this query.
-
- Returns
- -------
- List[numpy.array]
- A list of two numpy arrays [X, Y].
- """
- self._userd.connect_check()
- pb = libuserd_pb2.Query_dataRequest()
- try:
- reply = self._userd.stub.Query_data(pb, metadata=self._userd.metadata())
- except grpc.RpcError as e:
- raise self._userd.libuserd_exception(e)
- return [numpy.array(reply.x), numpy.array(reply.y)]
-
-
-class Variable(object):
- """
- The class represents a reader "variable" instance. It includes
- information about the variable, including it type (vector, scalar, etc)
- location (nodal, elemental, etc), name and units.
-
- Parameters
- ----------
- userd
- The LibUserd instance this query is associated with.
- pb
- The protobuffer that represents this object.
-
- Attributes
- ----------
- id : int
- The id of this variable.
- name : str
- The name of this variable.
- unitLabel : str
- The unit label of this variable, "Pa" for example.
- unitDims : str
- The dimensions of this variable, "L/S" for distance per second.
- location : "VariableLocation"
- The location of this variable.
- type : "VariableType"
- The type of this variable.
- timeVarying : bool
- True if the variable is time-varying.
- isComplex : bool
- True if the variable is complex.
- numOfComponents : int
- The number of components of this variable. A scalar is 1 and
- a vector is 3.
- metadata : Dict[str, str]
- The metadata for this query.
- """
-
- def __init__(self, userd: "LibUserd", pb: libuserd_pb2.VariableInfo) -> None:
- self._userd = userd
- self.id = pb.id
- self.name = pb.name
- self.unitLabel = pb.unitLabel
- self.unitDims = pb.unitDims
- self.location = VariableLocation(pb.varLocation) # type: ignore
- self.type = VariableType(pb.type) # type: ignore
- self.timeVarying = pb.timeVarying
- self.isComplex = pb.isComplex
- self.interleaveFlag = pb.interleaveFlag
- self.numOfComponents = pb.numOfComponents
- self.metadata = {}
- for key in pb.metadata.keys():
- self.metadata[key] = pb.metadata[key]
-
- def __str__(self) -> str:
- return f"Variable id: {self.id}, name: '{self.name}', type: {self.type.name}, location: {self.location.name}"
-
- def __repr__(self):
- return f"<{self.__class__.__name__} object, id: {self.id}, name: '{self.name}'>"
-
-
-class Part(object):
- """
- This class represents the EnSight notion of a part. A part is a single mesh consisting
- of a nodal array along with a collection of element specifications. Methods are provided
- to access the nodes and connectivity as well as any variables that might be defined
- on the nodes or elements of this mesh.
-
- Parameters
- ----------
- userd
- The LibUserd instance this query is associated with.
- pb
- The protobuffer that represents this object.
-
- Attributes
- ----------
- id : int
- The id of this part.
- name : str
- The name of this part.
- reader_id : int
- The id of the Reader this part is associated with.
- hints : int
- See: `PartHints`.
- reader_api_version : float
- The API version number of the USERD reader this part was read with.
- metadata : Dict[str, str]
- The metadata for this query.
- """
-
- def __init__(self, userd: "LibUserd", pb: libuserd_pb2.PartInfo):
- self._userd = userd
- self.index = pb.index
- self.id = pb.id
- self.name = pb.name
- self.reader_id = pb.reader_id
- self.hints = pb.hints
- self.reader_api_version = pb.reader_api_version
- self.metadata = {}
- for key in pb.metadata.keys():
- self.metadata[key] = pb.metadata[key]
-
- def __str__(self):
- return f"Part id: {self.id}, name: '{self.name}'"
-
- def __repr__(self):
- return f"<{self.__class__.__name__} object, id: {self.id}, name: '{self.name}'>"
-
- def nodes(self) -> "numpy.array":
- """
- Return the vertex array for the part.
-
- Returns
- -------
- numpy.array
- A numpy array of packed values: x,y,z,z,y,z, ...
-
- Examples
- --------
-
- >>> part = reader.parts()[0]
- >>> nodes = part.nodes()
- >>> nodes.shape = (len(nodes)//3, 3)
- >>> print(nodes)
-
- """
- self._userd.connect_check()
- pb = libuserd_pb2.Part_nodesRequest()
- pb.part_id = self.id
- try:
- stream = self._userd.stub.Part_nodes(pb, metadata=self._userd.metadata())
- except grpc.RpcError as e:
- raise self._userd.libuserd_exception(e)
- nodes = numpy.empty(0, dtype=numpy.float32)
- for chunk in stream:
- if len(nodes) < chunk.total_size:
- nodes = numpy.empty(chunk.total_size, dtype=numpy.float32)
- offset = chunk.offset
- values = numpy.array(chunk.xyz)
- nodes[offset : offset + len(values)] = values
- return nodes
-
- def num_elements(self) -> dict:
- """
- Get the number of elements of a given type in the current part.
-
- Returns
- -------
- dict
- A dictionary with keys being the element type and the values being the number of
- such elements. Element types with zero elements are not included in the dictionary.
-
- Examples
- --------
-
- >>> part = reader.parts()[0]
- >>> elements = part.elements()
- >>> for etype, count in elements.items():
- ... print(libuserd.ElementType(etype).name, count)
-
- """
- self._userd.connect_check()
- pb = libuserd_pb2.Part_num_elementsRequest()
- pb.part_id = self.id
- try:
- reply = self._userd.stub.Part_num_elements(pb, metadata=self._userd.metadata())
- except grpc.RpcError as e:
- raise self._userd.libuserd_exception(e)
- elements = {}
- for key in reply.elementCount.keys():
- if reply.elementCount[key] > 0:
- elements[key] = reply.elementCount[key]
- return elements
-
- def element_conn(self, elem_type: int) -> "numpy.array":
- """
- For "zoo" element types, return the part element connectivity for the specified
- element type.
-
- Parameters
- ----------
- elem_type : int
- The element type. All but NFACED and NSIDED element types are allowed.
-
- Returns
- -------
- numpy.array
- A numpy array of the node indices.
-
- Examples
- --------
-
- >>> part = reader.parts()[0]
- >>> conn = part.element_conn(libuserd.ElementType.HEX08)
- >>> nodes_per_elem = libuserd_instance.nodes_per_element(libuserd.ElementType.HEX08)
- >>> conn.shape = (len(conn)//nodes_per_elem, nodes_per_elem)
- >>> for element in conn:
- ... print(element)
-
- """
- if elem_type >= ElementType.NSIDED: # type: ignore
- raise RuntimeError(f"Element type {elem_type} is not valid for this call")
- pb = libuserd_pb2.Part_element_connRequest()
- pb.part_id = self.id
- pb.elemType = elem_type
- try:
- stream = self._userd.stub.Part_element_conn(pb, metadata=self._userd.metadata())
- conn = numpy.empty(0, dtype=numpy.uint32)
- for chunk in stream:
- if len(conn) < chunk.total_size:
- conn = numpy.empty(chunk.total_size, dtype=numpy.uint32)
- offset = chunk.offset
- values = numpy.array(chunk.connectivity)
- conn[offset : offset + len(values)] = values
- except grpc.RpcError as e:
- error = self._userd.libuserd_exception(e)
- # if we get an "UNKNOWN" error, then return an empty array
- if isinstance(error, LibUserdError):
- if error.code == ErrorCodes.UNKNOWN: # type: ignore
- return numpy.empty(0, dtype=numpy.uint32)
- raise error
- return conn
-
- def element_conn_nsided(self, elem_type: int) -> List["numpy.array"]:
- """
- For an N-Sided element type (regular or ghost), return the connectivity information
- for the elements of that type in this part at this timestep.
-
- Two arrays are returned in a list:
-
- - num_nodes_per_element : one number per element that represent the number of nodes in that element
- - nodes : the actual node indices
-
- Arrays are packed sequentially. Walking the elements sequentially, if the number of
- nodes for an element is 4, then there are 4 entries added to the nodes array
- for that element.
-
- Parameters
- ----------
- elem_type: int
- NSIDED or NSIDED_GHOST.
-
- Returns
- -------
- List[numpy.array]
- Two numpy arrays: num_nodes_per_element, nodes
- """
- self._userd.connect_check()
- pb = libuserd_pb2.Part_element_conn_nsidedRequest()
- pb.part_id = self.id
- pb.elemType = elem_type
- try:
- stream = self._userd.stub.Part_element_conn_nsided(pb, metadata=self._userd.metadata())
- nodes = numpy.empty(0, dtype=numpy.uint32)
- indices = numpy.empty(0, dtype=numpy.uint32)
- for chunk in stream:
- if len(nodes) < chunk.nodes_total_size:
- nodes = numpy.empty(chunk.nodes_total_size, dtype=numpy.uint32)
- if len(indices) < chunk.indices_total_size:
- indices = numpy.empty(chunk.indices_total_size, dtype=numpy.uint32)
- if len(chunk.nodesPerPolygon):
- offset = chunk.nodes_offset
- values = numpy.array(chunk.nodesPerPolygon)
- nodes[offset : offset + len(values)] = values
- if len(chunk.nodeIndices):
- offset = chunk.indices_offset
- values = numpy.array(chunk.nodeIndices)
- indices[offset : offset + len(values)] = values
- except grpc.RpcError as e:
- raise self._userd.libuserd_exception(e)
- return [nodes, indices]
-
- def element_conn_nfaced(self, elem_type: int) -> List["numpy.array"]:
- """
- For an N-Faced element type (regular or ghost), return the connectivity information
- for the elements of that type in this part at this timestep.
-
- Three arrays are returned in a list:
-
- - num_faces_per_element : one number per element that represent the number of faces in that element
- - num_nodes_per_face : for each face, the number of nodes in the face.
- - face_nodes : the actual node indices
-
- All arrays are packed sequentially. Walking the elements sequentially, if the number of
- faces for an element is 4, then there are 4 entries added to the num_nodes_per_face array
- for that element. Likewise, the nodes for each face are appended in order to the
- face_nodes array.
-
- Parameters
- ----------
- elem_type: int
- NFACED or NFACED_GHOST.
-
- Returns
- -------
- List[numpy.array]
- Three numpy arrays: num_faces_per_element, num_nodes_per_face, face_nodes
- """
- self._userd.connect_check()
- pb = libuserd_pb2.Part_element_conn_nfacedRequest()
- pb.part_id = self.id
- pb.elemType = elem_type
- try:
- stream = self._userd.stub.Part_element_conn_nfaced(pb, metadata=self._userd.metadata())
- face = numpy.empty(0, dtype=numpy.uint32)
- npf = numpy.empty(0, dtype=numpy.uint32)
- nodes = numpy.empty(0, dtype=numpy.uint32)
- for chunk in stream:
- if len(face) < chunk.face_total_size:
- face = numpy.empty(chunk.face_total_size, dtype=numpy.uint32)
- if len(npf) < chunk.npf_total_size:
- npf = numpy.empty(chunk.npf_total_size, dtype=numpy.uint32)
- if len(nodes) < chunk.nodes_total_size:
- nodes = numpy.empty(chunk.nodes_total_size, dtype=numpy.uint32)
- if len(chunk.facesPerElement):
- offset = chunk.face_offset
- values = numpy.array(chunk.facesPerElement)
- face[offset : offset + len(values)] = values
- if len(chunk.nodesPerFace):
- offset = chunk.npf_offset
- values = numpy.array(chunk.nodesPerFace)
- npf[offset : offset + len(values)] = values
- if len(chunk.nodeIndices):
- offset = chunk.nodes_offset
- values = numpy.array(chunk.nodeIndices)
- nodes[offset : offset + len(values)] = values
- except grpc.RpcError as e:
- raise self._userd.libuserd_exception(e)
- return [face, npf, nodes]
-
- def variable_values(
- self, variable: "Variable", elem_type: int = 0, imaginary: bool = False, component: int = 0
- ) -> "numpy.array":
- """
- Return a numpy array containing the value(s) of a variable. If the variable is a
- part variable, a single float value is returned. If the variable is a nodal variable,
- the resulting numpy array will have the same number of values as there are nodes.
- If the variable is elemental, the `elem_type` selects the block of elements to return
- the variable values for (`elem_type` is ignored for other variable types).
-
- Parameters
- ----------
- variable : Variable
- The variable to return the values for.
- elem_type : int
- Used only if the variable location is elemental, this keyword selects the element
- type to return the variable values for.
- imaginary : bool
- If the variable is of type complex, setting this to True will select the imaginary
- portion of the data.
- component : int
- Select the channel for a multivalued variable type. For example, if the variable
- is a vector, setting component to 1 will select the 'Y' component.
-
- Returns
- -------
- numpy.array
- A numpy array or a single scalar float.
- """
- self._userd.connect_check()
- pb = libuserd_pb2.Part_variable_valuesRequest()
- pb.part_id = self.id
- pb.var_id = variable.id
- pb.elemType = elem_type
- pb.varComponent = component
- pb.complex = imaginary
- try:
- stream = self._userd.stub.Part_variable_values(pb, metadata=self._userd.metadata())
- v = numpy.empty(0, dtype=numpy.float32)
- for chunk in stream:
- if len(v) < chunk.total_size:
- v = numpy.empty(chunk.total_size, dtype=numpy.float32)
- offset = chunk.offset
- values = numpy.array(chunk.varValues)
- v[offset : offset + len(values)] = values
- except grpc.RpcError as e:
- raise self._userd.libuserd_exception(e)
- return v
-
- def rigid_body_transform(self) -> dict:
- """
- Return the rigid body transform for this part at the current timestep. The
- returned dictionary includes the following fields:
-
- - "translation" : Translation 3 floats x,y,z
- - "euler_value" : Euler values 4 floats e0,e1,e2,e3
- - "center_of_gravity" : Center of transform 3 floats x,y,z
- - "rotation_order" : The order rotations are applied 1 float
- - "rotation_angles" : The rotations in radians 3 floats rx,ry,rz
-
- Returns
- -------
- dict
- The transform dictionary.
- """
- self._userd.connect_check()
- pb = libuserd_pb2.Part_rigid_body_transformRequest()
- pb.part_id = self.id
- try:
- reply = self._userd.stub.Part_rigid_body_transform(pb, metadata=self._userd.metadata())
- except grpc.RpcError as e:
- raise self._userd.libuserd_exception(e)
- out = {
- "translation": numpy.array(reply.transform.translation),
- "euler_value": numpy.array(reply.transform.euler_value),
- "center_of_gravity": numpy.array(reply.transform.center_of_gravity),
- "rotation_order": reply.transform.rotation_order,
- "rotation_angles": numpy.array(reply.transform.rotation_angles),
- }
- return out
-
-
-class Reader(object):
- """
- This class represents is an instance of a user-defined reader that is actively reading a
- dataset.
-
- Parameters
- ----------
- userd
- The LibUserd instance this query is associated with.
- pb
- The protobuffer that represents this object.
-
- Attributes
- ----------
- unit_system : str
- The units system provided by the dataset.
- metadata : Dict[str, str]
- The metadata for this query.
-
- Notes
- -----
- There can only be one reader active in a single `LibUserd` instance.
-
- """
-
- def __init__(self, userd: "LibUserd", pb: libuserd_pb2.Reader) -> None:
- self._userd = userd
- self.unit_system = pb.unitSystem
- self.metadata = {}
- for key in pb.metadata.keys():
- self.metadata[key] = pb.metadata[key]
- self.raw_metadata = pb.raw_metadata
- self._timesets: List["numpy.array"] = []
- self._update_timesets()
-
- def _update_timesets(self) -> None:
- """
- To simplify the interface to time, the timesets are all queried and
- cached. Additionally, a "common timeset" is generated that combines
- all the timevalues from all timesets into a single timeset which is
- saved as "timeset 0".
-
- This method reads all the timesets and generates the common timeset.
- """
- if len(self._timesets):
- return
- num_timesets = self.get_number_of_time_sets()
- # The common timeset
- common = set()
- self._timesets = [numpy.array([], dtype="float32")]
- # The other timesets
- for ts in range(1, num_timesets + 1):
- v = self.timevalues(timeset=ts)
- # merge into the common timeset
- common.update(v)
- self._timesets.append(v)
- self._timesets[0] = numpy.array(sorted(list(common)))
-
- def _common_set_step(self, s: int) -> None:
- """
- When the common timeset is used in a ``set_timestep()`` call, this
- method selects the time value from the common timeset and then calls
- ``_common_set_time()`` to change the current simulation time.
-
- Parameters
- ----------
- s : int
- The index (timestep) in the common timeset to change the current time to.
-
- Raises
- ------
- RuntimeError
- If the timestep index is invalid.
-
- """
- try:
- v = self._timesets[0][s]
- self._common_set_time(v)
- except IndexError:
- raise RuntimeError(f"Invalid step number {s}.") from None
-
- def _common_set_time(self, t: float) -> None:
- """
- Change the current time value to the passed time value. This method
- walks all the timesets. It selects the largest time value in each timeset
- that is less than or equal to the specified time value. It then sets
- the time value for each timeset accordingly.
-
- Parameters
- ----------
- t : float
- The time value (in the common timeset) to change the reader simulation time to.
-
- """
- # given the time float from the common timeline,
- # change the timestep in all the timesets to match.
- for timeset in range(1, len(self._timesets)):
- # check for perfect match first (avoids rounding)
- where = numpy.where(self._timesets[timeset] == t)
- if len(where[0]):
- timestep = where[0][0]
- else:
- timestep = numpy.searchsorted(self._timesets[timeset], t)
- timestep = min(timestep, len(self._timesets[timeset]) - 1)
- self.set_timestep(timestep, timeset=timeset)
-
- def parts(self) -> List[Part]:
- """
- Get a list of the parts this reader can access.
-
- Returns
- -------
- List[Part]
- A list of Part objects.
- """
- self._userd.connect_check()
- pb = libuserd_pb2.Reader_partsRequest()
- try:
- parts = self._userd.stub.Reader_parts(pb, metadata=self._userd.metadata())
- except grpc.RpcError as e:
- raise self._userd.libuserd_exception(e)
- out = []
- for part in parts.partList:
- out.append(Part(self._userd, part))
- return out
-
- def variables(self) -> List[Variable]:
- """
- Get a list of the variables this reader can access.
-
- Returns
- -------
- List[Variable]
- A list of Variable objects.
- """
- self._userd.connect_check()
- pb = libuserd_pb2.Reader_variablesRequest()
- try:
- variables = self._userd.stub.Reader_variables(pb, metadata=self._userd.metadata())
- except grpc.RpcError as e:
- raise self._userd.libuserd_exception(e)
- out = []
- for variable in variables.variableList:
- out.append(Variable(self._userd, variable))
- return out
-
- def queries(self) -> List[Query]:
- """
- Get a list of the queries this reader can access.
-
- Returns
- -------
- List[Query]
- A list of Query objects.
- """
- self._userd.connect_check()
- pb = libuserd_pb2.Reader_queriesRequest()
- try:
- queries = self._userd.stub.Reader_queries(pb, metadata=self._userd.metadata())
- except grpc.RpcError as e:
- raise self._userd.libuserd_exception(e)
- out = []
- for query in queries.queryList:
- out.append(Query(self._userd, query))
- return out
-
- def get_number_of_time_sets(self) -> int:
- """
- Get the number of timesets in the dataset.
-
- Returns
- -------
- int
- The number of timesets.
- """
- if len(self._timesets):
- return len(self._timesets) - 1
- self._userd.connect_check()
- pb = libuserd_pb2.Reader_get_number_of_time_setsRequest()
- try:
- reply = self._userd.stub.Reader_get_number_of_time_sets(
- pb, metadata=self._userd.metadata()
- )
- except grpc.RpcError as e:
- raise self._userd.libuserd_exception(e)
- return reply.numberOfTimeSets
-
- def timevalues(self, timeset: int = 0) -> List[float]:
- """
- Get a list of the time step values in this dataset for the specified timeset.
- The default timeset is ``0`` which is a special, "common" timeset formed by
- merging all the timesets in the data into a single timeset.
-
- Parameters
- ----------
- timeset : int, optional
- The timestep to query (default is 0)
-
- Returns
- -------
- numpy.array
- The simulation time value floats.
- """
- if timeset == 0:
- try:
- return self._timesets[timeset]
- except IndexError:
- return numpy.array([], dtype="float32")
- self._userd.connect_check()
- pb = libuserd_pb2.Reader_timevaluesRequest()
- pb.timeSetNumber = timeset
- try:
- timevalues = self._userd.stub.Reader_timevalues(pb, metadata=self._userd.metadata())
- except grpc.RpcError as e:
- raise self._userd.libuserd_exception(e)
- return numpy.array(timevalues.timeValues)
-
- def set_timevalue(self, timevalue: float, timeset: int = 0) -> None:
- """
- Change the current time within the selected timeset to the specified value.
- The default timeset selected is the merged "common" timeset ``0``. If the
- "common" timeset is used, the appropriate time value will be set for
- all timesets by this method.
-
- Parameters
- ----------
- timevalue : float
- The time value to change the timestep closest to.
- timeset : int, optional
- The timeset to change (default is 0)
-
- Examples
- --------
- >>> from ansys.pyensight.core import libuserd
- >>> import numpy
- >>> s = libuserd.LibUserd()
- >>> s.initialize()
- >>> opt = {'Long names': 0, 'Number of timesteps': 5, 'Number of scalars': 3,
- ... 'Number of spheres': 2, 'Number of cubes': 2}
- >>> d = s.load_data("foo", file_format="Synthetic", reader_options=opt)
- >>> parts = d.parts()
- >>> for t in d.timevalues():
- ... d.set_timevalue(t)
- ... for p in parts:
- ... nodes = p.nodes()
- ... nodes.shape = (len(nodes)//3, 3)
- ... centroid = numpy.average(nodes, 0)
- ... print(f"Time: {t} Part: {p.name} Centroid: {centroid}")
- >>> s.shutdown()
-
- """
- if timeset == 0:
- self._common_set_time(timevalue)
- return
- self._userd.connect_check()
- pb = libuserd_pb2.Reader_set_timevalueRequest()
- pb.timesetNumber = timeset
- pb.timeValue = timevalue
- try:
- _ = self._userd.stub.Reader_set_timevalue(pb, metadata=self._userd.metadata())
- except grpc.RpcError as e:
- raise self._userd.libuserd_exception(e)
-
- def set_timestep(self, timestep: int, timeset: int = 0) -> None:
- """
- Change the current time to the specified timestep. This call is the same as:
- ``reader.set_timevalue(reader.timevalues()[timestep])``.
- The default timeset selected is the merged "common" timeset ``0``. If the
- "common" timeset is used, the appropriate time value will be set for
- all timesets by this method.
-
- Parameters
- ----------
- timestep : int
- The timestep to change to.
- timeset : int, optional
- The timeset to change (default is 0)
- """
- if timeset == 0:
- self._common_set_step(timestep)
- return
- self._userd.connect_check()
- pb = libuserd_pb2.Reader_set_timestepRequest()
- pb.timeSetNumber = timeset
- pb.timeStep = timestep
- try:
- _ = self._userd.stub.Reader_set_timestep(pb, metadata=self._userd.metadata())
- except grpc.RpcError as e:
- raise self._userd.libuserd_exception(e)
-
- def is_geometry_changing(self) -> bool:
- """
- Check to see if the geometry in this dataset is changing. over time
-
- Returns
- -------
- bool
- True if the geometry is changing, False otherwise.
- """
- self._userd.connect_check()
- pb = libuserd_pb2.Reader_is_geometry_changingRequest()
- try:
- reply = self._userd.stub.Reader_is_geometry_changing(
- pb, metadata=self._userd.metadata()
- )
- except grpc.RpcError as e:
- raise self._userd.libuserd_exception(e)
- return reply.isGeomChanging
-
- def variable_value(self, variable: "Variable") -> float:
- """
- For any "case" variable (e.g. time), the value of the variable.
-
- Parameters
- ----------
- variable
- The variable to query. Note, this variable location must be on a CASE.
-
- Returns
- -------
- float
- The value of the variable.
- """
- self._userd.connect_check()
- pb = libuserd_pb2.Reader_variable_valueRequest()
- pb.variable_id = variable.id
- try:
- reply = self._userd.stub.Reader_variable_value(pb, metadata=self._userd.metadata())
- except grpc.RpcError as e:
- raise self._userd.libuserd_exception(e)
- return reply.value
-
-
-class ReaderInfo(object):
- """
- This class represents an available reader, before it has been instantiated.
- The read_dataset() function actually tries to open a dataset and returns
- a `Reader` instance that is reading the data.
-
- The class contains a list of options that can control/configure the reader.
- These include "boolean", "option" and "field" options. These include defaults
- supplied by the reader. To use these, change the value or value_index fields
- to the desired values before calling `read_dataset`.
-
- Parameters
- ----------
- userd
- The LibUserd instance this query is associated with.
- pb
- The protobuffer that represents this object.
-
- Attributes
- ----------
- id : int
- The reader id.
- name : str
- The reader name.
- description : str
- A brief description of the reader and in some cases its operation.
- fileLabel1 : str
- A string appropriate for a "file select" button for the primary filename.
- fileLabel2 : str
- A string appropriate for a "file select" button for the secondary filename.
- opt_booleans : List[dict]
- The boolean user options.
- opt_options : List[dict]
- The option user options suitable for display via an option menu.
- opt_fields : List[dict]
- The field user options suitable for display via a text field.
- """
-
- def __init__(self, userd: "LibUserd", pb: libuserd_pb2.ReaderInfo):
- self._userd = userd
- self.id = pb.id
- self.name = pb.name
- self.description = pb.description
- self.fileLabel1 = pb.fileLabel1
- self.fileLabel2 = pb.fileLabel2
- self.opt_booleans = []
- for b in pb.options.booleans:
- self.opt_booleans.append(dict(name=b.name, value=b.value, default=b.default_value))
- self.opt_options = []
- for o in pb.options.options:
- values = []
- for v in o.values:
- values.append(v)
- self.opt_options.append(
- dict(name=o.name, values=values, value=o.value_index, default=o.default_value_index)
- )
- self.opt_fields = []
- for f in pb.options.fields:
- self.opt_fields.append(dict(name=f.name, value=f.value, default=f.default_value))
-
- def read_dataset(self, file1: str, file2: str = "") -> "Reader":
- """
- Attempt to read some files on disk using this reader and the specified options.
- If successful, return an actual reader instance.
-
- Parameters
- ----------
- file1 : str
- The primary filename (e.g. "foo.cas")
- file2 : str
- An optional secondary filename (e.g. "foo.dat")
-
- Returns
- -------
- Reader
- An instance of the `Reader` class.
- """
- self._userd.connect_check()
- pb = libuserd_pb2.ReaderInfo_read_datasetRequest()
- pb.filename_1 = file1
- if file2:
- pb.filename_2 = file2
- pb.reader_id = self.id
- options = self._get_option_values()
- for b in options["booleans"]:
- pb.option_values_bools.append(b)
- for o in options["options"]:
- pb.option_values_options.append(o)
- for f in options["fields"]:
- pb.option_values_fields.append(f)
- try:
- reader = self._userd.stub.ReaderInfo_read_dataset(pb, metadata=self._userd.metadata())
- except grpc.RpcError as e:
- raise self._userd.libuserd_exception(e)
- return Reader(self._userd, reader.reader)
-
- def _get_option_values(self) -> dict:
- """Extract the current option values from the options dictionaries"""
- out = dict()
- booleans = []
- for b in self.opt_booleans:
- booleans.append(b["value"])
- out["booleans"] = booleans
- options = []
- for o in self.opt_options:
- options.append(o["value"])
- out["options"] = options
- fields = []
- for f in self.opt_fields:
- fields.append(f["value"])
- out["fields"] = fields
- return out
-
- def __str__(self) -> str:
- return f"ReaderInfo id: {self.id}, name: {self.name}, description: {self.description}"
-
- def __repr__(self):
- return f"<{self.__class__.__name__} object, id: {self.id}, name: '{self.name}'>"
-
-
-class LibUserd(object):
- """
- LibUserd is the primary interface to the USERD library. All interaction starts at this object.
-
- Parameters
- ----------
- ansys_installation
- Optional location to search for an Ansys software installation.
-
- Examples
- --------
-
- >>> from ansys.pyensight.core import libuserd
- >>> l = libuserd.LibUserd()
- >>> l.initialize()
- >>> readers = l.query_format(r"D:\data\Axial_001.res")
- >>> data = readers[0].read_dataset(r"D:\data\Axial_001.res")
- >>> part = data.parts[0]
- >>> print(part, part.nodes())
- >>> l.shutdown()
-
- """
-
- def __init__(
- self,
- ansys_installation: str = "",
- use_docker: bool = False,
- data_directory: Optional[str] = None,
- docker_image_name: Optional[str] = None,
- use_dev: bool = False,
- product_version: Optional[str] = None,
- channel: Optional[grpc.Channel] = None,
- pim_instance: Optional[Any] = None,
- timeout: float = 120.0,
- pull_image_if_not_available: bool = False,
- ):
- self._server_pathname: Optional[str] = None
- self._host = "127.0.0.1"
- self._security_token = str(uuid.uuid1())
- self._grpc_port = 0
- self._server_process: Optional[subprocess.Popen] = None
- self._channel: Optional[grpc.Channel] = None
- self._stub = None
- self._security_file: Optional[str] = None
- # Docker attributes
- self._pull_image = pull_image_if_not_available
- self._timeout = timeout
- self._product_version = product_version
- self._data_directory = data_directory
- self._image_name = "ghcr.io/ansys-internal/ensight"
- if use_dev:
- self._image_name = "ghcr.io/ansys-internal/ensight_dev"
- if docker_image_name:
- self._image_name = docker_image_name
- self._docker_client: Optional["DockerClient"] = None
- self._container: Optional["Container"] = None
- self._enshell: Optional["EnShellGRPC"] = None
- self._pim_instance = pim_instance
- self._enshell_grpc_channel: Optional[grpc.Channel] = channel
- self._pim_file_service: Optional[Any] = None
- self._service_host_port: Dict[str, Tuple[str, int]] = {}
- local_launch = True
- if any([use_docker, use_dev, self._pim_instance]):
- local_launch = False
- self._launch_enshell()
- else:
- # find the pathname to the server
- self._server_pathname = self._find_ensight_server_name(
- ansys_installation=ansys_installation
- )
- if self._server_pathname is None:
- raise RuntimeError("Unable to detect an EnSight server installation.")
- # enums
- self._build_enums()
- if local_launch:
- self._local_launch()
- # Build the gRPC connection
- self._connect()
-
- def _local_launch(self) -> None:
- """Launch the gRPC server from a local installation."""
- # have the server save status so we can read it later
- with tempfile.TemporaryDirectory() as tmpdirname:
- self._security_file = os.path.join(tmpdirname, "security.grpc")
-
- # Build the command line
- cmd = [str(self.server_pathname)]
- cmd.extend(["-grpc_server", str(self.grpc_port)])
- cmd.extend(["-security_file", self._security_file])
- env_vars = os.environ.copy()
- if self.security_token:
- env_vars["ENSIGHT_SECURITY_TOKEN"] = self.security_token
- env_vars["ENSIGHT_GRPC_SECURITY_FILE"] = self._security_file
- # start the server
- try:
- self._server_process = subprocess.Popen(
- cmd,
- close_fds=True,
- env=env_vars,
- stderr=subprocess.DEVNULL,
- stdout=subprocess.DEVNULL,
- )
- except Exception as error:
- raise error
-
- start_time = time.time()
- while (self._grpc_port == 0) and (time.time() - start_time < 120.0):
- try:
- # Read the port and security token from the security file
- with open(self._security_file, "r") as f:
- for line in f:
- line = line.strip()
- if line.startswith("grpc_port:"):
- self._grpc_port = int(line[len("grpc_port:") :])
- elif line.startswith("grpc_password:"):
- self._security_token = line[len("grpc_password:") :]
- except (OSError, IOError):
- pass
-
- # Unable to get the grpc port/password
- if self._grpc_port == 0:
- self.shutdown()
- raise RuntimeError(f"Unable to start the gRPC server ({str(self.server_pathname)})")
-
- def _build_enums(self) -> None:
- # retained for backward compatibility
- self.ErrorCodes = ErrorCodes
- self.ElementType = ElementType
- self.VariableLocation = VariableLocation
- self.VariableType = VariableType
- self.PartHints = PartHints
-
- def _pull_docker_image(self) -> None:
- """Pull the docker image if not available"""
- pull_image(self._docker_client, self._image_name)
-
- def _check_if_image_available(self) -> bool:
- """Check if the input docker image is available."""
- if not self._docker_client:
- return False
- filtered_images = self._docker_client.images.list(filters={"reference": self._image_name})
- if len(filtered_images) > 0:
- return True
- return False
-
- def _launch_enshell(self) -> None:
- """Create an enshell entry point and use it to launch a Container."""
- if self._pim_instance:
- self._service_host_port = populate_service_host_port(self._pim_instance, {})
- self._pim_file_service = get_file_service(self._pim_instance)
- self._grpc_port = int(self._service_host_port["grpc_private"][1])
- self._host = self._service_host_port["grpc_private"][0]
- else:
- if not self._data_directory:
- self._data_directory = tempfile.mkdtemp(prefix="pyensight_")
- available = self._check_if_image_available()
- if not available and self._pull_image and not self._pim_instance:
- self._pull_docker_image()
- ports = find_unused_ports(2, avoid=[1999])
- self._service_host_port = {
- "grpc": ("127.0.0.1", ports[0]),
- "grpc_private": ("127.0.0.1", ports[1]),
- }
- self._grpc_port = ports[1]
- if not self._pim_instance:
- self._launch_container()
- self._enshell = launch_enshell_interface(
- self._enshell_grpc_channel, self._service_host_port["grpc"][1], self._timeout
- )
- self._cei_home = self._enshell.cei_home()
- self._ansys_version = self._enshell.ansys_version()
- print("CEI_HOME=", self._cei_home)
- print("Ansys Version=", self._ansys_version)
- grpc_port = self._service_host_port["grpc_private"][1]
- ensight_args = f"-grpc_server {grpc_port}"
- container_env_str = f"ENSIGHT_SECURITY_TOKEN={self._security_token}\n"
- ret = self._enshell.start_ensight_server(ensight_args, container_env_str)
- if ret[0] != 0: # pragma: no cover
- self._stop_container_and_enshell() # pragma: no cover
- raise RuntimeError(
- f"Error starting EnSight Server with args: {ensight_args}"
- ) # pragma: no cover
-
- def _launch_container(self) -> None:
- """Launch a docker container for the input image."""
- self._docker_client = docker.from_env()
- grpc_port = self._service_host_port["grpc"][1]
- private_grpc_port = self._service_host_port["grpc_private"][1]
- ports_to_map = {
- str(self._service_host_port["grpc"][1]) + "/tcp": str(grpc_port),
- str(self._service_host_port["grpc_private"][1]) + "/tcp": str(private_grpc_port),
- }
- enshell_cmd = "-app -v 3 -grpc_server " + str(grpc_port)
- container_env = {
- "ENSIGHT_SECURITY_TOKEN": self.security_token,
- }
- data_volume = {self._data_directory: {"bind": "/data", "mode": "rw"}}
-
- if not self._docker_client:
- raise RuntimeError("Could not startup docker.")
- self._container = self._docker_client.containers.run( # pragma: no cover
- self._image_name,
- command=enshell_cmd,
- volumes=data_volume,
- environment=container_env,
- ports=ports_to_map,
- tty=True,
- detach=True,
- auto_remove=True,
- remove=True,
- )
-
- def _stop_container_and_enshell(self) -> None:
- """Release any additional resources allocated during launching."""
- if self._enshell:
- if self._enshell.is_connected(): # pragma: no cover
- try:
- logging.debug("Stopping EnShell.\n")
- self._enshell.stop_server()
- except Exception: # pragma: no cover
- pass # pragma: no cover
- self._enshell = None
- if self._container:
- try:
- logging.debug("Stopping the Docker Container.\n")
- self._container.stop()
- except Exception:
- pass
- try:
- logging.debug("Removing the Docker Container.\n")
- self._container.remove()
- except Exception:
- pass
- self._container = None
-
- if self._pim_instance is not None:
- logging.debug("Deleting the PIM instance.\n")
- self._pim_instance.delete()
- self._pim_instance = None
-
- @property
- def stub(self):
- """A libuserd_pb2_grpc.LibUSERDServiceStub instance bound to a gRPC connection channel"""
- return self._stub
-
- @property
- def server_pathname(self) -> Optional[str]:
- """The pathanme of the detected EnSight server executable used as the gRPC server"""
- return self._server_pathname
-
- @property
- def security_token(self) -> str:
- """The current gRPC security token"""
- return self._security_token
-
- @property
- def grpc_port(self) -> int:
- """The current gRPC port"""
- return self._grpc_port
-
- def __del__(self) -> None:
- self.shutdown()
-
- @staticmethod
- def _find_ensight_server_name(ansys_installation: str = "") -> Optional[str]:
- """
- Parameters
- ----------
- ansys_installation : str
- Path to the local Ansys installation, including the version
- directory. The default is ``None``, in which case common locations
- are scanned to detect the latest local Ansys installation. The
- ``PYENSIGHT_ANSYS_INSTALLATION`` environmental variable is checked first.
-
- Returns
- -------
- str
- The first valid ensight_server found or None
-
- """
- dirs_to_check = []
- if ansys_installation:
- dirs_to_check.append(ansys_installation)
-
- if "PYENSIGHT_ANSYS_INSTALLATION" in os.environ:
- env_inst = os.environ["PYENSIGHT_ANSYS_INSTALLATION"]
- dirs_to_check.append(env_inst)
- # Note: PYENSIGHT_ANSYS_INSTALLATION is designed for devel builds
- # where there is no CEI directory, but for folks using it in other
- # ways, we'll add that one too, just in case.
- dirs_to_check.append(os.path.join(env_inst, "CEI"))
-
- try:
- import enve
-
- dirs_to_check.append(enve.home())
- except ModuleNotFoundError:
- pass
-
- if "CEI_HOME" in os.environ:
- env_inst = os.environ["CEI_HOME"]
- dirs_to_check.append(env_inst)
-
- # Look for most recent Ansys install
- awp_roots = []
- for env_name in dict(os.environ).keys():
- if env_name.startswith("AWP_ROOT"):
- try:
- version = int(env_name[len("AWP_ROOT") :])
- # this API is new in 2025 R1 distributions
- if version >= 251:
- awp_roots.append(env_name)
- except ValueError:
- pass
- awp_roots.sort(reverse=True)
- for env_name in awp_roots:
- dirs_to_check.append(os.path.join(os.environ[env_name], "CEI"))
-
- # check all the collected locations in order
- app_name = "ensight_server"
- if platform.system() == "Windows":
- app_name += ".bat"
- for install_dir in dirs_to_check:
- launch_file = os.path.join(install_dir, "bin", app_name)
- if os.path.isfile(launch_file):
- return launch_file
- return None
-
- def _is_connected(self) -> bool:
- """Check to see if the gRPC connection is live
-
- Returns
- -------
- bool
- True if the connection is active.
- """
- return self._channel is not None
-
- def _connect(self) -> None:
- """Establish the gRPC connection to EnSight
-
- Attempt to connect to an EnSight gRPC server using the host and port
- established by the constructor. Note on failure, this function just
- returns, but is_connected() will return False.
-
- Parameters
- ----------
- timeout: float
- how long to wait for the connection to timeout
- """
- if self._is_connected():
- return
- # set up the channel
-
- self._channel = grpc.insecure_channel(
- "{}:{}".format(self._host, self._grpc_port),
- options=[
- ("grpc.max_receive_message_length", -1),
- ("grpc.max_send_message_length", -1),
- ("grpc.testing.fixed_reconnect_backoff_ms", 1100),
- ],
- )
- try:
- grpc.channel_ready_future(self._channel).result(timeout=self._timeout)
- except grpc.FutureTimeoutError: # pragma: no cover
- self._channel = None # pragma: no cover
- return # pragma: no cover
- # hook up the stub interface
- self._stub = libuserd_pb2_grpc.LibUSERDServiceStub(self._channel)
-
- def metadata(self) -> List[Tuple[bytes, Union[str, bytes]]]:
- """Compute the gRPC stream metadata
-
- Compute the list to be passed to the gRPC calls for things like security
- and the session name.
-
- Returns
- -------
- List[Tuple[bytes, Union[str, bytes]]]
- A list object of the metadata elements needed in a gRPC call to
- satisfy the EnSight server gRPC requirements.
- """
- ret: List[Tuple[bytes, Union[str, bytes]]] = list()
- s: Union[str, bytes]
- if self._security_token: # pragma: no cover
- s = self._security_token
- if type(s) == str: # pragma: no cover
- s = s.encode("utf-8")
- ret.append((b"shared_secret", s))
- return ret
-
- def libuserd_exception(self, e: "grpc.RpcError") -> Exception:
- """
- Given an exception raised as the result of a gRPC call, return either
- the input exception or a LibUserdError exception object to differentiate
- between gRPC issues and libuserd issues.
-
- Parameters
- ----------
- e
- The exception raised by a gRPC call.
-
- Returns
- -------
- Exception
- Either the original exception or a LibUserdError exception instance, depending on
- the original exception message details.
- """
- msg = e.details()
- if msg.startswith("LibUserd("):
- return LibUserdError(msg)
- return e
-
- def _disconnect(self, no_error: bool = False) -> None:
- """Close down the gRPC connection
-
- Disconnect all connections to the gRPC server. Send the shutdown request gRPC command
- to the server first.
-
- Parameters
- ----------
- no_error
- If true, ignore errors resulting from the shutdown operation.
- """
- if not self._is_connected(): # pragma: no cover
- return
- # Note: this is expected to return an error
- try:
- pb = libuserd_pb2.Libuserd_shutdownRequest()
- self._stub.Libuserd_shutdown(pb, metadata=self.metadata()) # type: ignore
- if self._channel:
- self._channel.close()
- except grpc.RpcError as e:
- if not no_error:
- raise self.libuserd_exception(e)
- finally:
- # clean up control objects
- self._stub = None
- self._channel = None
-
- def connect_check(self) -> None:
- """
- Verify that there is an active gRPC connection established. If not raise
- a RuntimeError
-
- Raises
- ------
- RuntimeError
- If there is no active connection.
- """
- if not self._is_connected():
- raise RuntimeError("gRPC connection not established")
-
- """
- gRPC method bindings
- """
-
- def shutdown(self) -> None:
- """
- Close any active gRPC connection and shut down the EnSight server.
- The object is no longer usable.
- """
- self._disconnect(no_error=True)
- # Just in case, we will try to kill the server directly as well
- if self._server_process:
- if psutil.pid_exists(self._server_process.pid):
- proc = psutil.Process(self._server_process.pid)
- for child in proc.children(recursive=True):
- if psutil.pid_exists(child.pid):
- # This can be a race condition, so it is ok if the child is dead already
- try:
- child.kill()
- except psutil.NoSuchProcess:
- pass
- # Same issue, this process might already be shutting down, so NoSuchProcess is ok.
- try:
- proc.kill()
- except psutil.NoSuchProcess:
- pass
- if self._container:
- self._stop_container_and_enshell()
- self._server_process = None
-
- def ansys_release_string(self) -> str:
- """
- Return the Ansys release for the library.
-
- Returns
- -------
- str
- Return a string like "2025 R1"
- """
- self.connect_check()
- pb = libuserd_pb2.Libuserd_ansys_release_stringRequest()
- try:
- ret = self.stub.Libuserd_ansys_release_string(pb, metadata=self.metadata())
- except grpc.RpcError as e:
- raise self.libuserd_exception(e)
- return ret.result
-
- def ansys_release_number(self) -> int:
- """
- Return the Ansys release number of the library.
-
- Returns
- -------
- int
- A version number like 251 (for "2025 R1")
- """
- self.connect_check()
- pb = libuserd_pb2.Libuserd_ansys_release_numberRequest()
- try:
- ret = self.stub.Libuserd_ansys_release_number(pb, metadata=self.metadata())
- except grpc.RpcError as e:
- raise self.libuserd_exception(e)
- return ret.result
-
- def library_version(self) -> str:
- """
- The library version number. This string is the version of the
- library interface itself. This is not the same as the version
- number of the Ansys release that corresponds to the library.
-
- This number follows semantic versioning rules: "1.0.0" or
- "0.4.3-rc.1" would be examples of valid library_version() strings.
-
- Returns
- -------
- str
- The library interface version number string.
- """
- self.connect_check()
- pb = libuserd_pb2.Libuserd_library_versionRequest()
- try:
- ret = self.stub.Libuserd_library_version(pb, metadata=self.metadata())
- except grpc.RpcError as e:
- raise self.libuserd_exception(e)
- return ret.result
-
- def nodes_per_element(self, element_type: int) -> int:
- """
- For a given element type (e.g. HEX20), return the number of nodes used by the element.
- Note, this is not supported for NSIDED and NFACED element types.
-
- Parameters
- ----------
- element_type
- The element type: ElementType enum value
-
- Returns
- -------
- int
- Number of nodes per element or 0 if elem_type is not a valid zoo element type.
- """
- self.connect_check()
- pb = libuserd_pb2.Libuserd_nodes_per_elementRequest()
- pb.elemType = element_type
- try:
- ret = self.stub.Libuserd_nodes_per_element(pb, metadata=self.metadata())
- except grpc.RpcError as e:
- raise self.libuserd_exception(e)
- return ret.result
-
- def element_is_ghost(self, element_type: int) -> bool:
- """
-
- For a given element type (e.g. HEX20), determine if the element type should be considered
- a "ghost" element.
-
- Parameters
- ----------
- element_type
- The element type: ElementType enum value
-
- Returns
- -------
- bool
- True if the element is a ghost (or an invalid element type).
- """
- self.connect_check()
- pb = libuserd_pb2.Libuserd_element_is_ghostRequest()
- pb.elemType = element_type
- try:
- ret = self.stub.Libuserd_element_is_ghost(pb, metadata=self.metadata())
- except grpc.RpcError as e:
- raise self.libuserd_exception(e)
- return ret.result
-
- def element_is_zoo(self, element_type: int) -> bool:
- """
- For a given element type (e.g. HEX20), determine if the element type is zoo or not
-
- Parameters
- ----------
- element_type
- The element type: ElementType enum value
-
- Returns
- -------
- bool
- True if the element is a zoo element and false if it is NSIDED or NFACED.
- """
- self.connect_check()
- pb = libuserd_pb2.Libuserd_element_is_zooRequest()
- pb.elemType = element_type
- try:
- ret = self.stub.Libuserd_element_is_zoo(pb, metadata=self.metadata())
- except grpc.RpcError as e:
- raise self.libuserd_exception(e)
- return ret.result
-
- def element_is_nsided(self, element_type: int) -> bool:
- """
- For a given element type, determine if the element type is n-sided or not
-
- Parameters
- ----------
- element_type
- The element type: ElementType enum value
-
- Returns
- -------
- bool
- True if the element is a NSIDED or NSIDED_GHOST and False otherwise.
- """
- self.connect_check()
- pb = libuserd_pb2.Libuserd_element_is_nsidedRequest()
- pb.elemType = element_type
- try:
- ret = self.stub.Libuserd_element_is_nsided(pb, metadata=self.metadata())
- except grpc.RpcError as e:
- raise self.libuserd_exception(e)
- return ret.result
-
- def element_is_nfaced(self, element_type: int) -> bool:
- """
- For a given element type, determine if the element type is n-faced or not
-
- Parameters
- ----------
- element_type
- The element type: ElementType enum value
-
- Returns
- -------
- bool
- True if the element is a NFACED or NFACED_GHOST and False otherwise.
- """
- self.connect_check()
- pb = libuserd_pb2.Libuserd_element_is_nfacedRequest()
- pb.elemType = element_type
- try:
- ret = self.stub.Libuserd_element_is_nfaced(pb, metadata=self.metadata())
- except grpc.RpcError as e:
- raise self.libuserd_exception(e)
- return ret.result
-
- def number_of_simple_element_types(self) -> int:
- """
- There is a consecutive range of element type enums that are supported by the
- Part.element_conn() method. This function returns the number of those elements
- and may be useful in common element type handling code.
-
- Note: The value is effectively int(ElementType.NSIDED).
-
- Returns
- -------
- int
- The number of zoo element types.
- """
- self.connect_check()
- pb = libuserd_pb2.Libuserd_number_of_simple_element_typesRequest()
- try:
- ret = self.stub.Libuserd_number_of_simple_element_types(pb, metadata=self.metadata())
- except grpc.RpcError as e:
- raise self.libuserd_exception(e)
- return ret.result
-
- def initialize(self) -> None:
- """
- This call initializes the libuserd system. It causes the library to scan for available
- readers and set up any required reduction engine bits. It can only be called once.
- """
- self.connect_check()
- pb = libuserd_pb2.Libuserd_initializeRequest()
- try:
- _ = self.stub.Libuserd_initialize(pb, metadata=self.metadata())
- except grpc.RpcError as e:
- raise self.libuserd_exception(e)
-
- def get_all_readers(self) -> List["ReaderInfo"]:
- """
- Return a list of the readers that are available.
-
- Returns
- -------
- List[ReaderInfo]
- List of all ReaderInfo objects.
- """
- self.connect_check()
- pb = libuserd_pb2.Libuserd_get_all_readersRequest()
- try:
- readers = self.stub.Libuserd_get_all_readers(pb, metadata=self.metadata())
- except grpc.RpcError as e:
- raise self.libuserd_exception(e)
- out = []
- for reader in readers.readerInfo:
- out.append(ReaderInfo(self, reader))
- return out
-
- def query_format(self, name1: str, name2: str = "") -> List["ReaderInfo"]:
- """
- For a given dataset (filename(s)), ask the readers if they should be able to read
- that data.
-
- Parameters
- ----------
- name1
- Primary input filename
-
- name2
- Optional, secondary input filename
-
- Returns
- -------
- List[ReaderInfo]
- List of ReaderInfo objects that might be able to read the dataset
- """
- self.connect_check()
- pb = libuserd_pb2.Libuserd_query_formatRequest()
- pb.name1 = name1
- if name2:
- pb.name2 = name2
- try:
- readers = self.stub.Libuserd_query_format(pb, metadata=self.metadata())
- except grpc.RpcError as e:
- raise self.libuserd_exception(e)
- out = []
- for reader in readers.readerInfo:
- out.append(ReaderInfo(self, reader))
- return out
-
- def load_data(
- self,
- data_file: str,
- result_file: str = "",
- file_format: Optional[str] = None,
- reader_options: Dict[str, Any] = {},
- ) -> "Reader":
- """Use the reader to load a dataset and return an instance
- to the resulting ``Reader`` interface.
-
- Parameters
- ----------
- data_file : str
- Name of the data file to load.
- result_file : str, optional
- Name of the second data file for dual-file datasets.
- file_format : str, optional
- Name of the USERD reader to use. The default is ``None``,
- in which case libuserd selects a reader.
- reader_options : dict, optional
- Dictionary of reader-specific option-value pairs that can be used
- to customize the reader behavior. The default is ``None``.
-
- Returns
- -------
- Reader
- Resulting Reader object instance.
-
- Raises
- ------
- RuntimeError
- If libused cannot guess the file format or an error occurs while the
- data is being read.
-
- Examples
- --------
-
- >>> from ansys.pyensight.core import libuserd
- >>> userd = libuserd.LibUserd()
- >>> userd.initialize()
- >>> opt = {'Long names': False, 'Number of timesteps': '10', 'Number of scalars': '3'}
- >>> data = userd.load_data("foo", file_format="Synthetic", reader_options=opt
- >>> print(data.parts())
- >>> print(data.variables())
- >>> userd.shutdown()
-
- """
- the_reader: Optional[ReaderInfo] = None
- if file_format:
- for reader in self.get_all_readers():
- if reader.name == file_format:
- the_reader = reader
- break
- if the_reader is None:
- raise RuntimeError(f"The reader '{file_format}' could not be found.")
- else:
- readers = self.query_format(data_file, name2=result_file)
- if len(readers):
- the_reader = readers[0]
- if the_reader is None:
- raise RuntimeError(f"Unable to find a reader for '{data_file}':'{result_file}'.")
- for key, value in reader_options.items():
- for b in the_reader.opt_booleans:
- if key == b["name"]:
- b["value"] = bool(value)
- for o in the_reader.opt_options:
- if key == o["name"]:
- o["value"] = int(value)
- for f in the_reader.opt_fields:
- if key == f["name"]:
- f["value"] = str(value)
- try:
- output = the_reader.read_dataset(data_file, result_file)
- except Exception:
- raise RuntimeError("Unable to open the specified dataset.") from None
-
- return output
-
- @staticmethod
- def _download_files(uri: str, pathname: str, folder: bool = False):
- """Download files from the input uri and save them on the input pathname.
-
- Parameters:
- ----------
-
- uri: str
- The uri to get files from
- pathname: str
- The location were to save the files. It could be either a file or a folder.
- folder: bool
- True if the uri will server files from a directory. In this case,
- pathname will be used as the directory were to save the files.
- """
- if not folder:
- with requests.get(uri, stream=True) as r:
- with open(pathname, "wb") as f:
- shutil.copyfileobj(r.raw, f)
- else:
- with requests.get(uri) as r:
- data = r.json()
- os.makedirs(pathname, exist_ok=True)
- for item in data:
- if item["type"] == "file":
- file_url = item["download_url"]
- filename = os.path.join(pathname, item["name"])
- r = requests.get(file_url, stream=True)
- with open(filename, "wb") as f:
- f.write(r.content)
-
- def file_service(self) -> Optional[Any]:
- """Get the PIM file service object if available."""
- return self._pim_file_service
-
- def download_pyansys_example(
- self,
- filename: str,
- directory: Optional[str] = None,
- root: Optional[str] = None,
- folder: bool = False,
- ) -> str:
- """Download an example dataset from the ansys/example-data repository.
- The dataset is downloaded local to the EnSight server location, so that it can
- be downloaded even if running from a container.
-
- Parameters
- ----------
- filename: str
- The filename to download
- directory: str
- The directory to download the filename from
- root: str
- If set, the download will happen from another location
- folder: bool
- If set to True, it marks the filename to be a directory rather
- than a single file
-
- Returns
- -------
- pathname: str
- The download location, local to the EnSight server directory.
- If folder is set to True, the download location will be a folder containing
- all the items available in the repository location under that folder.
-
- Examples
- --------
- >>> from ansys.pyensight.core import libuserd
- >>> l = libuserd.LibUserd()
- >>> cas_file = l.download_pyansys_example("mixing_elbow.cas.h5","pyfluent/mixing_elbow")
- >>> dat_file = l.download_pyansys_example("mixing_elbow.dat.h5","pyfluent/mixing_elbow")
- """
- base_uri = "https://github.com/ansys/example-data/raw/master"
- base_api_uri = "https://api.github.com/repos/ansys/example-data/contents"
- if not folder:
- if root is not None:
- base_uri = root
- else:
- base_uri = base_api_uri
- uri = f"{base_uri}/{filename}"
- if directory:
- uri = f"{base_uri}/{directory}/{filename}"
- # Local installs and PIM instances
- download_path = f"{os.getcwd()}/{filename}"
- if self._container and self._data_directory:
- # Docker Image
- download_path = os.path.join(self._data_directory, filename)
- self._download_files(uri, download_path, folder=folder)
- pathname = download_path
- if self._container:
- # Convert local path to Docker mounted volume path
- pathname = f"/data/{filename}"
- return pathname
+"""
+The ``libuserd`` module allows PyEnSight to directly access EnSight
+user-defined readers (USERD). Any file format for which EnSight
+uses a USERD interface can be read using this API
+
+Examples
+--------
+
+>>> from ansys.pyensight.core import libuserd
+>>> userd = libuserd.LibUserd()
+>>> userd.initialize()
+>>> print(userd.library_version())
+>>> datafile = "/example/data/CFX/Axial_001.res"
+>>> readers = userd.query_format(datafile)
+>>> data = readers[0].read_dataset(datafile)
+>>> print(data.parts())
+>>> print(data.variables())
+>>> userd.shutdown()
+
+"""
+import enum
+import logging
+import os
+import platform
+import shutil
+import subprocess
+import tempfile
+import time
+from typing import TYPE_CHECKING, Any, Dict, List, Optional, Tuple, Union
+import uuid
+import warnings
+
+from ansys.api.pyensight.v0 import libuserd_pb2, libuserd_pb2_grpc
+from ansys.pyensight.core.common import (
+ find_unused_ports,
+ get_file_service,
+ launch_enshell_interface,
+ populate_service_host_port,
+ pull_image,
+)
+import grpc
+import numpy
+import psutil
+import requests
+
+try:
+ import docker
+except ModuleNotFoundError: # pragma: no cover
+ raise RuntimeError("The docker module must be installed for DockerLauncher")
+except Exception: # pragma: no cover
+ raise RuntimeError("Cannot initialize Docker")
+
+
+if TYPE_CHECKING:
+ from docker import DockerClient
+ from docker.models.containers import Container
+ from enshell_grpc import EnShellGRPC
+
+# This code is currently in development/beta state
+warnings.warn(
+ "The libuserd interface/API is still under active development and should be considered beta.",
+ stacklevel=2,
+)
+
+
+def _build_enum(name: str, pb_enum: Any, flag: bool = False) -> Union[enum.IntEnum, enum.IntFlag]:
+ values = {}
+ for v in pb_enum:
+ values[v[0]] = v[1]
+ if flag:
+ return enum.IntFlag(name, values)
+ return enum.IntEnum(name, values)
+
+
+ErrorCodes = _build_enum("ErrorCodes", libuserd_pb2.ErrorCodes.items())
+ElementType = _build_enum("ElementType", libuserd_pb2.ElementType.items())
+VariableLocation = _build_enum("VariableLocation", libuserd_pb2.VariableLocation.items())
+VariableType = _build_enum("VariableType", libuserd_pb2.VariableType.items())
+PartHints = _build_enum("PartHints", libuserd_pb2.PartHints.items(), flag=True)
+
+
+class LibUserdError(Exception):
+ """
+ This class is an exception object raised from the libuserd
+ library itself (not the gRPC remote interface). The associated
+ numeric LibUserd.ErrorCode is available via the 'code' attribute.
+
+ Parameters
+ ----------
+ msg : str
+ The message text to be included in the exception.
+
+ Attributes
+ ----------
+ code : int
+ The LibUserd ErrorCodes enum value for this error.
+ """
+
+ def __init__(self, msg) -> None:
+ super(LibUserdError, self).__init__(msg)
+ self._code = libuserd_pb2.ErrorCodes.UNKNOWN
+ if msg.startswith("LibUserd("):
+ try:
+ self._code = int(msg[len("LibUserd(") :].split(")")[0])
+ except Exception:
+ pass
+
+ @property
+ def code(self) -> int:
+ """The numeric error code: LibUserd.ErrorCodes"""
+ return self._code
+
+
+class Query(object):
+ """
+ The class represents a reader "query" instance. It includes
+ the query name as well as the preferred titles. The ``data``
+ method may be used to access the X,Y plot values.
+
+ Parameters
+ ----------
+ userd
+ The LibUserd instance this query is associated with.
+ pb
+ The protobuffer that represents this object.
+
+ Attributes
+ ----------
+ id : int
+ The id of this query.
+ name : str
+ The name of this query.
+ x_title : str
+ String to use as the x-axis title.
+ y_title : str
+ String to use as the y-axis title.
+ metadata : Dict[str, str]
+ The metadata for this query.
+ """
+
+ def __init__(self, userd: "LibUserd", pb: libuserd_pb2.QueryInfo) -> None:
+ self._userd = userd
+ self.id = pb.id
+ self.name = pb.name
+ self.x_title = pb.xTitle
+ self.y_title = pb.yTitle
+ self.metadata = {}
+ for key in pb.metadata.keys():
+ self.metadata[key] = pb.metadata[key]
+
+ def __str__(self) -> str:
+ return f"Query id: {self.id}, name: '{self.name}'"
+
+ def __repr__(self):
+ return f"<{self.__class__.__name__} object, id: {self.id}, name: '{self.name}'>"
+
+ def data(self) -> List["numpy.array"]:
+ """
+ Get the X,Y values for this query.
+
+ Returns
+ -------
+ List[numpy.array]
+ A list of two numpy arrays [X, Y].
+ """
+ self._userd.connect_check()
+ pb = libuserd_pb2.Query_dataRequest()
+ try:
+ reply = self._userd.stub.Query_data(pb, metadata=self._userd.metadata())
+ except grpc.RpcError as e:
+ raise self._userd.libuserd_exception(e)
+ return [numpy.array(reply.x), numpy.array(reply.y)]
+
+
+class Variable(object):
+ """
+ The class represents a reader "variable" instance. It includes
+ information about the variable, including it type (vector, scalar, etc)
+ location (nodal, elemental, etc), name and units.
+
+ Parameters
+ ----------
+ userd
+ The LibUserd instance this query is associated with.
+ pb
+ The protobuffer that represents this object.
+
+ Attributes
+ ----------
+ id : int
+ The id of this variable.
+ name : str
+ The name of this variable.
+ unitLabel : str
+ The unit label of this variable, "Pa" for example.
+ unitDims : str
+ The dimensions of this variable, "L/S" for distance per second.
+ location : "VariableLocation"
+ The location of this variable.
+ type : "VariableType"
+ The type of this variable.
+ timeVarying : bool
+ True if the variable is time-varying.
+ isComplex : bool
+ True if the variable is complex.
+ numOfComponents : int
+ The number of components of this variable. A scalar is 1 and
+ a vector is 3.
+ metadata : Dict[str, str]
+ The metadata for this query.
+ """
+
+ def __init__(self, userd: "LibUserd", pb: libuserd_pb2.VariableInfo) -> None:
+ self._userd = userd
+ self.id = pb.id
+ self.name = pb.name
+ self.unitLabel = pb.unitLabel
+ self.unitDims = pb.unitDims
+ self.location = VariableLocation(pb.varLocation) # type: ignore
+ self.type = VariableType(pb.type) # type: ignore
+ self.timeVarying = pb.timeVarying
+ self.isComplex = pb.isComplex
+ self.interleaveFlag = pb.interleaveFlag
+ self.numOfComponents = pb.numOfComponents
+ self.metadata = {}
+ for key in pb.metadata.keys():
+ self.metadata[key] = pb.metadata[key]
+
+ def __str__(self) -> str:
+ return f"Variable id: {self.id}, name: '{self.name}', type: {self.type.name}, location: {self.location.name}"
+
+ def __repr__(self):
+ return f"<{self.__class__.__name__} object, id: {self.id}, name: '{self.name}'>"
+
+
+class Part(object):
+ """
+ This class represents the EnSight notion of a part. A part is a single mesh consisting
+ of a nodal array along with a collection of element specifications. Methods are provided
+ to access the nodes and connectivity as well as any variables that might be defined
+ on the nodes or elements of this mesh.
+
+ Parameters
+ ----------
+ userd
+ The LibUserd instance this query is associated with.
+ pb
+ The protobuffer that represents this object.
+
+ Attributes
+ ----------
+ id : int
+ The id of this part.
+ name : str
+ The name of this part.
+ reader_id : int
+ The id of the Reader this part is associated with.
+ hints : int
+ See: `PartHints`.
+ reader_api_version : float
+ The API version number of the USERD reader this part was read with.
+ metadata : Dict[str, str]
+ The metadata for this query.
+ """
+
+ def __init__(self, userd: "LibUserd", pb: libuserd_pb2.PartInfo):
+ self._userd = userd
+ self.index = pb.index
+ self.id = pb.id
+ self.name = pb.name
+ self.reader_id = pb.reader_id
+ self.hints = pb.hints
+ self.reader_api_version = pb.reader_api_version
+ self.metadata = {}
+ for key in pb.metadata.keys():
+ self.metadata[key] = pb.metadata[key]
+
+ def __str__(self):
+ return f"Part id: {self.id}, name: '{self.name}'"
+
+ def __repr__(self):
+ return f"<{self.__class__.__name__} object, id: {self.id}, name: '{self.name}'>"
+
+ def nodes(self) -> "numpy.array":
+ """
+ Return the vertex array for the part.
+
+ Returns
+ -------
+ numpy.array
+ A numpy array of packed values: x,y,z,z,y,z, ...
+
+ Examples
+ --------
+
+ >>> part = reader.parts()[0]
+ >>> nodes = part.nodes()
+ >>> nodes.shape = (len(nodes)//3, 3)
+ >>> print(nodes)
+
+ """
+ self._userd.connect_check()
+ pb = libuserd_pb2.Part_nodesRequest()
+ pb.part_id = self.id
+ try:
+ stream = self._userd.stub.Part_nodes(pb, metadata=self._userd.metadata())
+ except grpc.RpcError as e:
+ raise self._userd.libuserd_exception(e)
+ nodes = numpy.empty(0, dtype=numpy.float32)
+ for chunk in stream:
+ if len(nodes) < chunk.total_size:
+ nodes = numpy.empty(chunk.total_size, dtype=numpy.float32)
+ offset = chunk.offset
+ values = numpy.array(chunk.xyz)
+ nodes[offset : offset + len(values)] = values
+ return nodes
+
+ def num_elements(self) -> dict:
+ """
+ Get the number of elements of a given type in the current part.
+
+ Returns
+ -------
+ dict
+ A dictionary with keys being the element type and the values being the number of
+ such elements. Element types with zero elements are not included in the dictionary.
+
+ Examples
+ --------
+
+ >>> part = reader.parts()[0]
+ >>> elements = part.elements()
+ >>> for etype, count in elements.items():
+ ... print(libuserd.ElementType(etype).name, count)
+
+ """
+ self._userd.connect_check()
+ pb = libuserd_pb2.Part_num_elementsRequest()
+ pb.part_id = self.id
+ try:
+ reply = self._userd.stub.Part_num_elements(pb, metadata=self._userd.metadata())
+ except grpc.RpcError as e:
+ raise self._userd.libuserd_exception(e)
+ elements = {}
+ for key in reply.elementCount.keys():
+ if reply.elementCount[key] > 0:
+ elements[key] = reply.elementCount[key]
+ return elements
+
+ def element_conn(self, elem_type: int) -> "numpy.array":
+ """
+ For "zoo" element types, return the part element connectivity for the specified
+ element type.
+
+ Parameters
+ ----------
+ elem_type : int
+ The element type. All but NFACED and NSIDED element types are allowed.
+
+ Returns
+ -------
+ numpy.array
+ A numpy array of the node indices.
+
+ Examples
+ --------
+
+ >>> part = reader.parts()[0]
+ >>> conn = part.element_conn(libuserd.ElementType.HEX08)
+ >>> nodes_per_elem = libuserd_instance.nodes_per_element(libuserd.ElementType.HEX08)
+ >>> conn.shape = (len(conn)//nodes_per_elem, nodes_per_elem)
+ >>> for element in conn:
+ ... print(element)
+
+ """
+ if elem_type >= ElementType.NSIDED: # type: ignore
+ raise RuntimeError(f"Element type {elem_type} is not valid for this call")
+ pb = libuserd_pb2.Part_element_connRequest()
+ pb.part_id = self.id
+ pb.elemType = elem_type
+ try:
+ stream = self._userd.stub.Part_element_conn(pb, metadata=self._userd.metadata())
+ conn = numpy.empty(0, dtype=numpy.uint32)
+ for chunk in stream:
+ if len(conn) < chunk.total_size:
+ conn = numpy.empty(chunk.total_size, dtype=numpy.uint32)
+ offset = chunk.offset
+ values = numpy.array(chunk.connectivity)
+ conn[offset : offset + len(values)] = values
+ except grpc.RpcError as e:
+ error = self._userd.libuserd_exception(e)
+ # if we get an "UNKNOWN" error, then return an empty array
+ if isinstance(error, LibUserdError):
+ if error.code == ErrorCodes.UNKNOWN: # type: ignore
+ return numpy.empty(0, dtype=numpy.uint32)
+ raise error
+ return conn
+
+ def element_conn_nsided(self, elem_type: int) -> List["numpy.array"]:
+ """
+ For an N-Sided element type (regular or ghost), return the connectivity information
+ for the elements of that type in this part at this timestep.
+
+ Two arrays are returned in a list:
+
+ - num_nodes_per_element : one number per element that represent the number of nodes in that element
+ - nodes : the actual node indices
+
+ Arrays are packed sequentially. Walking the elements sequentially, if the number of
+ nodes for an element is 4, then there are 4 entries added to the nodes array
+ for that element.
+
+ Parameters
+ ----------
+ elem_type: int
+ NSIDED or NSIDED_GHOST.
+
+ Returns
+ -------
+ List[numpy.array]
+ Two numpy arrays: num_nodes_per_element, nodes
+ """
+ self._userd.connect_check()
+ pb = libuserd_pb2.Part_element_conn_nsidedRequest()
+ pb.part_id = self.id
+ pb.elemType = elem_type
+ try:
+ stream = self._userd.stub.Part_element_conn_nsided(pb, metadata=self._userd.metadata())
+ nodes = numpy.empty(0, dtype=numpy.uint32)
+ indices = numpy.empty(0, dtype=numpy.uint32)
+ for chunk in stream:
+ if len(nodes) < chunk.nodes_total_size:
+ nodes = numpy.empty(chunk.nodes_total_size, dtype=numpy.uint32)
+ if len(indices) < chunk.indices_total_size:
+ indices = numpy.empty(chunk.indices_total_size, dtype=numpy.uint32)
+ if len(chunk.nodesPerPolygon):
+ offset = chunk.nodes_offset
+ values = numpy.array(chunk.nodesPerPolygon)
+ nodes[offset : offset + len(values)] = values
+ if len(chunk.nodeIndices):
+ offset = chunk.indices_offset
+ values = numpy.array(chunk.nodeIndices)
+ indices[offset : offset + len(values)] = values
+ except grpc.RpcError as e:
+ raise self._userd.libuserd_exception(e)
+ return [nodes, indices]
+
+ def element_conn_nfaced(self, elem_type: int) -> List["numpy.array"]:
+ """
+ For an N-Faced element type (regular or ghost), return the connectivity information
+ for the elements of that type in this part at this timestep.
+
+ Three arrays are returned in a list:
+
+ - num_faces_per_element : one number per element that represent the number of faces in that element
+ - num_nodes_per_face : for each face, the number of nodes in the face.
+ - face_nodes : the actual node indices
+
+ All arrays are packed sequentially. Walking the elements sequentially, if the number of
+ faces for an element is 4, then there are 4 entries added to the num_nodes_per_face array
+ for that element. Likewise, the nodes for each face are appended in order to the
+ face_nodes array.
+
+ Parameters
+ ----------
+ elem_type: int
+ NFACED or NFACED_GHOST.
+
+ Returns
+ -------
+ List[numpy.array]
+ Three numpy arrays: num_faces_per_element, num_nodes_per_face, face_nodes
+ """
+ self._userd.connect_check()
+ pb = libuserd_pb2.Part_element_conn_nfacedRequest()
+ pb.part_id = self.id
+ pb.elemType = elem_type
+ try:
+ stream = self._userd.stub.Part_element_conn_nfaced(pb, metadata=self._userd.metadata())
+ face = numpy.empty(0, dtype=numpy.uint32)
+ npf = numpy.empty(0, dtype=numpy.uint32)
+ nodes = numpy.empty(0, dtype=numpy.uint32)
+ for chunk in stream:
+ if len(face) < chunk.face_total_size:
+ face = numpy.empty(chunk.face_total_size, dtype=numpy.uint32)
+ if len(npf) < chunk.npf_total_size:
+ npf = numpy.empty(chunk.npf_total_size, dtype=numpy.uint32)
+ if len(nodes) < chunk.nodes_total_size:
+ nodes = numpy.empty(chunk.nodes_total_size, dtype=numpy.uint32)
+ if len(chunk.facesPerElement):
+ offset = chunk.face_offset
+ values = numpy.array(chunk.facesPerElement)
+ face[offset : offset + len(values)] = values
+ if len(chunk.nodesPerFace):
+ offset = chunk.npf_offset
+ values = numpy.array(chunk.nodesPerFace)
+ npf[offset : offset + len(values)] = values
+ if len(chunk.nodeIndices):
+ offset = chunk.nodes_offset
+ values = numpy.array(chunk.nodeIndices)
+ nodes[offset : offset + len(values)] = values
+ except grpc.RpcError as e:
+ raise self._userd.libuserd_exception(e)
+ return [face, npf, nodes]
+
+ def variable_values(
+ self, variable: "Variable", elem_type: int = 0, imaginary: bool = False, component: int = 0
+ ) -> "numpy.array":
+ """
+ Return a numpy array containing the value(s) of a variable. If the variable is a
+ part variable, a single float value is returned. If the variable is a nodal variable,
+ the resulting numpy array will have the same number of values as there are nodes.
+ If the variable is elemental, the `elem_type` selects the block of elements to return
+ the variable values for (`elem_type` is ignored for other variable types).
+
+ Parameters
+ ----------
+ variable : Variable
+ The variable to return the values for.
+ elem_type : int
+ Used only if the variable location is elemental, this keyword selects the element
+ type to return the variable values for.
+ imaginary : bool
+ If the variable is of type complex, setting this to True will select the imaginary
+ portion of the data.
+ component : int
+ Select the channel for a multivalued variable type. For example, if the variable
+ is a vector, setting component to 1 will select the 'Y' component.
+
+ Returns
+ -------
+ numpy.array
+ A numpy array or a single scalar float.
+ """
+ self._userd.connect_check()
+ pb = libuserd_pb2.Part_variable_valuesRequest()
+ pb.part_id = self.id
+ pb.var_id = variable.id
+ pb.elemType = elem_type
+ pb.varComponent = component
+ pb.complex = imaginary
+ try:
+ stream = self._userd.stub.Part_variable_values(pb, metadata=self._userd.metadata())
+ v = numpy.empty(0, dtype=numpy.float32)
+ for chunk in stream:
+ if len(v) < chunk.total_size:
+ v = numpy.empty(chunk.total_size, dtype=numpy.float32)
+ offset = chunk.offset
+ values = numpy.array(chunk.varValues)
+ v[offset : offset + len(values)] = values
+ except grpc.RpcError as e:
+ raise self._userd.libuserd_exception(e)
+ return v
+
+ def rigid_body_transform(self) -> dict:
+ """
+ Return the rigid body transform for this part at the current timestep. The
+ returned dictionary includes the following fields:
+
+ - "translation" : Translation 3 floats x,y,z
+ - "euler_value" : Euler values 4 floats e0,e1,e2,e3
+ - "center_of_gravity" : Center of transform 3 floats x,y,z
+ - "rotation_order" : The order rotations are applied 1 float
+ - "rotation_angles" : The rotations in radians 3 floats rx,ry,rz
+
+ Returns
+ -------
+ dict
+ The transform dictionary.
+ """
+ self._userd.connect_check()
+ pb = libuserd_pb2.Part_rigid_body_transformRequest()
+ pb.part_id = self.id
+ try:
+ reply = self._userd.stub.Part_rigid_body_transform(pb, metadata=self._userd.metadata())
+ except grpc.RpcError as e:
+ raise self._userd.libuserd_exception(e)
+ out = {
+ "translation": numpy.array(reply.transform.translation),
+ "euler_value": numpy.array(reply.transform.euler_value),
+ "center_of_gravity": numpy.array(reply.transform.center_of_gravity),
+ "rotation_order": reply.transform.rotation_order,
+ "rotation_angles": numpy.array(reply.transform.rotation_angles),
+ }
+ return out
+
+
+class Reader(object):
+ """
+ This class represents is an instance of a user-defined reader that is actively reading a
+ dataset.
+
+ Parameters
+ ----------
+ userd
+ The LibUserd instance this query is associated with.
+ pb
+ The protobuffer that represents this object.
+
+ Attributes
+ ----------
+ unit_system : str
+ The units system provided by the dataset.
+ metadata : Dict[str, str]
+ The metadata for this query.
+
+ Notes
+ -----
+ There can only be one reader active in a single `LibUserd` instance.
+
+ """
+
+ def __init__(self, userd: "LibUserd", pb: libuserd_pb2.Reader) -> None:
+ self._userd = userd
+ self.unit_system = pb.unitSystem
+ self.metadata = {}
+ for key in pb.metadata.keys():
+ self.metadata[key] = pb.metadata[key]
+ self.raw_metadata = pb.raw_metadata
+ self._timesets: List["numpy.array"] = []
+ self._update_timesets()
+
+ def _update_timesets(self) -> None:
+ """
+ To simplify the interface to time, the timesets are all queried and
+ cached. Additionally, a "common timeset" is generated that combines
+ all the timevalues from all timesets into a single timeset which is
+ saved as "timeset 0".
+
+ This method reads all the timesets and generates the common timeset.
+ """
+ if len(self._timesets):
+ return
+ num_timesets = self.get_number_of_time_sets()
+ # The common timeset
+ common = set()
+ self._timesets = [numpy.array([], dtype="float32")]
+ # The other timesets
+ for ts in range(1, num_timesets + 1):
+ v = self.timevalues(timeset=ts)
+ # merge into the common timeset
+ common.update(v)
+ self._timesets.append(v)
+ self._timesets[0] = numpy.array(sorted(list(common)))
+
+ def _common_set_step(self, s: int) -> None:
+ """
+ When the common timeset is used in a ``set_timestep()`` call, this
+ method selects the time value from the common timeset and then calls
+ ``_common_set_time()`` to change the current simulation time.
+
+ Parameters
+ ----------
+ s : int
+ The index (timestep) in the common timeset to change the current time to.
+
+ Raises
+ ------
+ RuntimeError
+ If the timestep index is invalid.
+
+ """
+ try:
+ v = self._timesets[0][s]
+ self._common_set_time(v)
+ except IndexError:
+ raise RuntimeError(f"Invalid step number {s}.") from None
+
+ def _common_set_time(self, t: float) -> None:
+ """
+ Change the current time value to the passed time value. This method
+ walks all the timesets. It selects the largest time value in each timeset
+ that is less than or equal to the specified time value. It then sets
+ the time value for each timeset accordingly.
+
+ Parameters
+ ----------
+ t : float
+ The time value (in the common timeset) to change the reader simulation time to.
+
+ """
+ # given the time float from the common timeline,
+ # change the timestep in all the timesets to match.
+ for timeset in range(1, len(self._timesets)):
+ # check for perfect match first (avoids rounding)
+ where = numpy.where(self._timesets[timeset] == t)
+ if len(where[0]):
+ timestep = where[0][0]
+ else:
+ timestep = numpy.searchsorted(self._timesets[timeset], t)
+ timestep = min(timestep, len(self._timesets[timeset]) - 1)
+ self.set_timestep(timestep, timeset=timeset)
+
+ def parts(self) -> List[Part]:
+ """
+ Get a list of the parts this reader can access.
+
+ Returns
+ -------
+ List[Part]
+ A list of Part objects.
+ """
+ self._userd.connect_check()
+ pb = libuserd_pb2.Reader_partsRequest()
+ try:
+ parts = self._userd.stub.Reader_parts(pb, metadata=self._userd.metadata())
+ except grpc.RpcError as e:
+ raise self._userd.libuserd_exception(e)
+ out = []
+ for part in parts.partList:
+ out.append(Part(self._userd, part))
+ return out
+
+ def variables(self) -> List[Variable]:
+ """
+ Get a list of the variables this reader can access.
+
+ Returns
+ -------
+ List[Variable]
+ A list of Variable objects.
+ """
+ self._userd.connect_check()
+ pb = libuserd_pb2.Reader_variablesRequest()
+ try:
+ variables = self._userd.stub.Reader_variables(pb, metadata=self._userd.metadata())
+ except grpc.RpcError as e:
+ raise self._userd.libuserd_exception(e)
+ out = []
+ for variable in variables.variableList:
+ out.append(Variable(self._userd, variable))
+ return out
+
+ def queries(self) -> List[Query]:
+ """
+ Get a list of the queries this reader can access.
+
+ Returns
+ -------
+ List[Query]
+ A list of Query objects.
+ """
+ self._userd.connect_check()
+ pb = libuserd_pb2.Reader_queriesRequest()
+ try:
+ queries = self._userd.stub.Reader_queries(pb, metadata=self._userd.metadata())
+ except grpc.RpcError as e:
+ raise self._userd.libuserd_exception(e)
+ out = []
+ for query in queries.queryList:
+ out.append(Query(self._userd, query))
+ return out
+
+ def get_number_of_time_sets(self) -> int:
+ """
+ Get the number of timesets in the dataset.
+
+ Returns
+ -------
+ int
+ The number of timesets.
+ """
+ if len(self._timesets):
+ return len(self._timesets) - 1
+ self._userd.connect_check()
+ pb = libuserd_pb2.Reader_get_number_of_time_setsRequest()
+ try:
+ reply = self._userd.stub.Reader_get_number_of_time_sets(
+ pb, metadata=self._userd.metadata()
+ )
+ except grpc.RpcError as e:
+ raise self._userd.libuserd_exception(e)
+ return reply.numberOfTimeSets
+
+ def timevalues(self, timeset: int = 0) -> List[float]:
+ """
+ Get a list of the time step values in this dataset for the specified timeset.
+ The default timeset is ``0`` which is a special, "common" timeset formed by
+ merging all the timesets in the data into a single timeset.
+
+ Parameters
+ ----------
+ timeset : int, optional
+ The timestep to query (default is 0)
+
+ Returns
+ -------
+ numpy.array
+ The simulation time value floats.
+ """
+ if timeset == 0:
+ try:
+ return self._timesets[timeset]
+ except IndexError:
+ return numpy.array([], dtype="float32")
+ self._userd.connect_check()
+ pb = libuserd_pb2.Reader_timevaluesRequest()
+ pb.timeSetNumber = timeset
+ try:
+ timevalues = self._userd.stub.Reader_timevalues(pb, metadata=self._userd.metadata())
+ except grpc.RpcError as e:
+ raise self._userd.libuserd_exception(e)
+ return numpy.array(timevalues.timeValues)
+
+ def set_timevalue(self, timevalue: float, timeset: int = 0) -> None:
+ """
+ Change the current time within the selected timeset to the specified value.
+ The default timeset selected is the merged "common" timeset ``0``. If the
+ "common" timeset is used, the appropriate time value will be set for
+ all timesets by this method.
+
+ Parameters
+ ----------
+ timevalue : float
+ The time value to change the timestep closest to.
+ timeset : int, optional
+ The timeset to change (default is 0)
+
+ Examples
+ --------
+ >>> from ansys.pyensight.core import libuserd
+ >>> import numpy
+ >>> s = libuserd.LibUserd()
+ >>> s.initialize()
+ >>> opt = {'Long names': 0, 'Number of timesteps': 5, 'Number of scalars': 3,
+ ... 'Number of spheres': 2, 'Number of cubes': 2}
+ >>> d = s.load_data("foo", file_format="Synthetic", reader_options=opt)
+ >>> parts = d.parts()
+ >>> for t in d.timevalues():
+ ... d.set_timevalue(t)
+ ... for p in parts:
+ ... nodes = p.nodes()
+ ... nodes.shape = (len(nodes)//3, 3)
+ ... centroid = numpy.average(nodes, 0)
+ ... print(f"Time: {t} Part: {p.name} Centroid: {centroid}")
+ >>> s.shutdown()
+
+ """
+ if timeset == 0:
+ self._common_set_time(timevalue)
+ return
+ self._userd.connect_check()
+ pb = libuserd_pb2.Reader_set_timevalueRequest()
+ pb.timesetNumber = timeset
+ pb.timeValue = timevalue
+ try:
+ _ = self._userd.stub.Reader_set_timevalue(pb, metadata=self._userd.metadata())
+ except grpc.RpcError as e:
+ raise self._userd.libuserd_exception(e)
+
+ def set_timestep(self, timestep: int, timeset: int = 0) -> None:
+ """
+ Change the current time to the specified timestep. This call is the same as:
+ ``reader.set_timevalue(reader.timevalues()[timestep])``.
+ The default timeset selected is the merged "common" timeset ``0``. If the
+ "common" timeset is used, the appropriate time value will be set for
+ all timesets by this method.
+
+ Parameters
+ ----------
+ timestep : int
+ The timestep to change to.
+ timeset : int, optional
+ The timeset to change (default is 0)
+ """
+ if timeset == 0:
+ self._common_set_step(timestep)
+ return
+ self._userd.connect_check()
+ pb = libuserd_pb2.Reader_set_timestepRequest()
+ pb.timeSetNumber = timeset
+ pb.timeStep = timestep
+ try:
+ _ = self._userd.stub.Reader_set_timestep(pb, metadata=self._userd.metadata())
+ except grpc.RpcError as e:
+ raise self._userd.libuserd_exception(e)
+
+ def is_geometry_changing(self) -> bool:
+ """
+ Check to see if the geometry in this dataset is changing. over time
+
+ Returns
+ -------
+ bool
+ True if the geometry is changing, False otherwise.
+ """
+ self._userd.connect_check()
+ pb = libuserd_pb2.Reader_is_geometry_changingRequest()
+ try:
+ reply = self._userd.stub.Reader_is_geometry_changing(
+ pb, metadata=self._userd.metadata()
+ )
+ except grpc.RpcError as e:
+ raise self._userd.libuserd_exception(e)
+ return reply.isGeomChanging
+
+ def variable_value(self, variable: "Variable") -> float:
+ """
+ For any "case" variable (e.g. time), the value of the variable.
+
+ Parameters
+ ----------
+ variable
+ The variable to query. Note, this variable location must be on a CASE.
+
+ Returns
+ -------
+ float
+ The value of the variable.
+ """
+ self._userd.connect_check()
+ pb = libuserd_pb2.Reader_variable_valueRequest()
+ pb.variable_id = variable.id
+ try:
+ reply = self._userd.stub.Reader_variable_value(pb, metadata=self._userd.metadata())
+ except grpc.RpcError as e:
+ raise self._userd.libuserd_exception(e)
+ return reply.value
+
+
+class ReaderInfo(object):
+ """
+ This class represents an available reader, before it has been instantiated.
+ The read_dataset() function actually tries to open a dataset and returns
+ a `Reader` instance that is reading the data.
+
+ The class contains a list of options that can control/configure the reader.
+ These include "boolean", "option" and "field" options. These include defaults
+ supplied by the reader. To use these, change the value or value_index fields
+ to the desired values before calling `read_dataset`.
+
+ Parameters
+ ----------
+ userd
+ The LibUserd instance this query is associated with.
+ pb
+ The protobuffer that represents this object.
+
+ Attributes
+ ----------
+ id : int
+ The reader id.
+ name : str
+ The reader name.
+ description : str
+ A brief description of the reader and in some cases its operation.
+ fileLabel1 : str
+ A string appropriate for a "file select" button for the primary filename.
+ fileLabel2 : str
+ A string appropriate for a "file select" button for the secondary filename.
+ opt_booleans : List[dict]
+ The boolean user options.
+ opt_options : List[dict]
+ The option user options suitable for display via an option menu.
+ opt_fields : List[dict]
+ The field user options suitable for display via a text field.
+ """
+
+ def __init__(self, userd: "LibUserd", pb: libuserd_pb2.ReaderInfo):
+ self._userd = userd
+ self.id = pb.id
+ self.name = pb.name
+ self.description = pb.description
+ self.fileLabel1 = pb.fileLabel1
+ self.fileLabel2 = pb.fileLabel2
+ self.opt_booleans = []
+ for b in pb.options.booleans:
+ self.opt_booleans.append(dict(name=b.name, value=b.value, default=b.default_value))
+ self.opt_options = []
+ for o in pb.options.options:
+ values = []
+ for v in o.values:
+ values.append(v)
+ self.opt_options.append(
+ dict(name=o.name, values=values, value=o.value_index, default=o.default_value_index)
+ )
+ self.opt_fields = []
+ for f in pb.options.fields:
+ self.opt_fields.append(dict(name=f.name, value=f.value, default=f.default_value))
+
+ def read_dataset(self, file1: str, file2: str = "") -> "Reader":
+ """
+ Attempt to read some files on disk using this reader and the specified options.
+ If successful, return an actual reader instance.
+
+ Parameters
+ ----------
+ file1 : str
+ The primary filename (e.g. "foo.cas")
+ file2 : str
+ An optional secondary filename (e.g. "foo.dat")
+
+ Returns
+ -------
+ Reader
+ An instance of the `Reader` class.
+ """
+ self._userd.connect_check()
+ pb = libuserd_pb2.ReaderInfo_read_datasetRequest()
+ pb.filename_1 = file1
+ if file2:
+ pb.filename_2 = file2
+ pb.reader_id = self.id
+ options = self._get_option_values()
+ for b in options["booleans"]:
+ pb.option_values_bools.append(b)
+ for o in options["options"]:
+ pb.option_values_options.append(o)
+ for f in options["fields"]:
+ pb.option_values_fields.append(f)
+ try:
+ reader = self._userd.stub.ReaderInfo_read_dataset(pb, metadata=self._userd.metadata())
+ except grpc.RpcError as e:
+ raise self._userd.libuserd_exception(e)
+ return Reader(self._userd, reader.reader)
+
+ def _get_option_values(self) -> dict:
+ """Extract the current option values from the options dictionaries"""
+ out = dict()
+ booleans = []
+ for b in self.opt_booleans:
+ booleans.append(b["value"])
+ out["booleans"] = booleans
+ options = []
+ for o in self.opt_options:
+ options.append(o["value"])
+ out["options"] = options
+ fields = []
+ for f in self.opt_fields:
+ fields.append(f["value"])
+ out["fields"] = fields
+ return out
+
+ def __str__(self) -> str:
+ return f"ReaderInfo id: {self.id}, name: {self.name}, description: {self.description}"
+
+ def __repr__(self):
+ return f"<{self.__class__.__name__} object, id: {self.id}, name: '{self.name}'>"
+
+
+class LibUserd(object):
+ """
+ LibUserd is the primary interface to the USERD library. All interaction starts at this object.
+
+ Parameters
+ ----------
+ ansys_installation
+ Optional location to search for an Ansys software installation.
+
+ Examples
+ --------
+
+ >>> from ansys.pyensight.core import libuserd
+ >>> l = libuserd.LibUserd()
+ >>> l.initialize()
+ >>> readers = l.query_format(r"D:\data\Axial_001.res")
+ >>> data = readers[0].read_dataset(r"D:\data\Axial_001.res")
+ >>> part = data.parts[0]
+ >>> print(part, part.nodes())
+ >>> l.shutdown()
+
+ """
+
+ def __init__(
+ self,
+ ansys_installation: str = "",
+ use_docker: bool = False,
+ data_directory: Optional[str] = None,
+ docker_image_name: Optional[str] = None,
+ use_dev: bool = False,
+ product_version: Optional[str] = None,
+ channel: Optional[grpc.Channel] = None,
+ pim_instance: Optional[Any] = None,
+ timeout: float = 120.0,
+ pull_image_if_not_available: bool = False,
+ ):
+ self._server_pathname: Optional[str] = None
+ self._host = "127.0.0.1"
+ self._security_token = str(uuid.uuid1())
+ self._grpc_port = 0
+ self._server_process: Optional[subprocess.Popen] = None
+ self._channel: Optional[grpc.Channel] = None
+ self._stub = None
+ self._security_file: Optional[str] = None
+ # Docker attributes
+ self._pull_image = pull_image_if_not_available
+ self._timeout = timeout
+ self._product_version = product_version
+ self._data_directory = data_directory
+ self._image_name = "ghcr.io/ansys-internal/ensight"
+ if use_dev:
+ self._image_name = "ghcr.io/ansys-internal/ensight_dev"
+ if docker_image_name:
+ self._image_name = docker_image_name
+ self._docker_client: Optional["DockerClient"] = None
+ self._container: Optional["Container"] = None
+ self._enshell: Optional["EnShellGRPC"] = None
+ self._pim_instance = pim_instance
+ self._enshell_grpc_channel: Optional[grpc.Channel] = channel
+ self._pim_file_service: Optional[Any] = None
+ self._service_host_port: Dict[str, Tuple[str, int]] = {}
+ local_launch = True
+ if any([use_docker, use_dev, self._pim_instance]):
+ local_launch = False
+ self._launch_enshell()
+ else:
+ # find the pathname to the server
+ self._server_pathname = self._find_ensight_server_name(
+ ansys_installation=ansys_installation
+ )
+ if self._server_pathname is None:
+ raise RuntimeError("Unable to detect an EnSight server installation.")
+ # enums
+ self._build_enums()
+ if local_launch:
+ self._local_launch()
+ # Build the gRPC connection
+ self._connect()
+
+ def _local_launch(self) -> None:
+ """Launch the gRPC server from a local installation."""
+ # have the server save status so we can read it later
+ with tempfile.TemporaryDirectory() as tmpdirname:
+ self._security_file = os.path.join(tmpdirname, "security.grpc")
+
+ # Build the command line
+ cmd = [str(self.server_pathname)]
+ cmd.extend(["-grpc_server", str(self.grpc_port)])
+ cmd.extend(["-security_file", self._security_file])
+ env_vars = os.environ.copy()
+ if self.security_token:
+ env_vars["ENSIGHT_SECURITY_TOKEN"] = self.security_token
+ env_vars["ENSIGHT_GRPC_SECURITY_FILE"] = self._security_file
+ # start the server
+ try:
+ self._server_process = subprocess.Popen(
+ cmd,
+ close_fds=True,
+ env=env_vars,
+ stderr=subprocess.DEVNULL,
+ stdout=subprocess.DEVNULL,
+ )
+ except Exception as error:
+ raise error
+
+ start_time = time.time()
+ while (self._grpc_port == 0) and (time.time() - start_time < 120.0):
+ try:
+ # Read the port and security token from the security file
+ with open(self._security_file, "r") as f:
+ for line in f:
+ line = line.strip()
+ if line.startswith("grpc_port:"):
+ self._grpc_port = int(line[len("grpc_port:") :])
+ elif line.startswith("grpc_password:"):
+ self._security_token = line[len("grpc_password:") :]
+ except (OSError, IOError):
+ pass
+
+ # Unable to get the grpc port/password
+ if self._grpc_port == 0:
+ self.shutdown()
+ raise RuntimeError(f"Unable to start the gRPC server ({str(self.server_pathname)})")
+
+ def _build_enums(self) -> None:
+ # retained for backward compatibility
+ self.ErrorCodes = ErrorCodes
+ self.ElementType = ElementType
+ self.VariableLocation = VariableLocation
+ self.VariableType = VariableType
+ self.PartHints = PartHints
+
+ def _pull_docker_image(self) -> None:
+ """Pull the docker image if not available"""
+ pull_image(self._docker_client, self._image_name)
+
+ def _check_if_image_available(self) -> bool:
+ """Check if the input docker image is available."""
+ if not self._docker_client:
+ return False
+ filtered_images = self._docker_client.images.list(filters={"reference": self._image_name})
+ if len(filtered_images) > 0:
+ return True
+ return False
+
+ def _launch_enshell(self) -> None:
+ """Create an enshell entry point and use it to launch a Container."""
+ if self._pim_instance:
+ self._service_host_port = populate_service_host_port(self._pim_instance, {})
+ self._pim_file_service = get_file_service(self._pim_instance)
+ self._grpc_port = int(self._service_host_port["grpc_private"][1])
+ self._host = self._service_host_port["grpc_private"][0]
+ else:
+ if not self._data_directory:
+ self._data_directory = tempfile.mkdtemp(prefix="pyensight_")
+ available = self._check_if_image_available()
+ if not available and self._pull_image and not self._pim_instance:
+ self._pull_docker_image()
+ ports = find_unused_ports(2, avoid=[1999])
+ self._service_host_port = {
+ "grpc": ("127.0.0.1", ports[0]),
+ "grpc_private": ("127.0.0.1", ports[1]),
+ }
+ self._grpc_port = ports[1]
+ if not self._pim_instance:
+ self._launch_container()
+ self._enshell = launch_enshell_interface(
+ self._enshell_grpc_channel, self._service_host_port["grpc"][1], self._timeout
+ )
+ self._cei_home = self._enshell.cei_home()
+ self._ansys_version = self._enshell.ansys_version()
+ print("CEI_HOME=", self._cei_home)
+ print("Ansys Version=", self._ansys_version)
+ grpc_port = self._service_host_port["grpc_private"][1]
+ ensight_args = f"-grpc_server {grpc_port}"
+ container_env_str = f"ENSIGHT_SECURITY_TOKEN={self._security_token}\n"
+ ret = self._enshell.start_ensight_server(ensight_args, container_env_str)
+ if ret[0] != 0: # pragma: no cover
+ self._stop_container_and_enshell() # pragma: no cover
+ raise RuntimeError(
+ f"Error starting EnSight Server with args: {ensight_args}"
+ ) # pragma: no cover
+
+ def _launch_container(self) -> None:
+ """Launch a docker container for the input image."""
+ self._docker_client = docker.from_env()
+ grpc_port = self._service_host_port["grpc"][1]
+ private_grpc_port = self._service_host_port["grpc_private"][1]
+ ports_to_map = {
+ str(self._service_host_port["grpc"][1]) + "/tcp": str(grpc_port),
+ str(self._service_host_port["grpc_private"][1]) + "/tcp": str(private_grpc_port),
+ }
+ enshell_cmd = "-app -v 3 -grpc_server " + str(grpc_port)
+ container_env = {
+ "ENSIGHT_SECURITY_TOKEN": self.security_token,
+ }
+ data_volume = {self._data_directory: {"bind": "/data", "mode": "rw"}}
+
+ if not self._docker_client:
+ raise RuntimeError("Could not startup docker.")
+ self._container = self._docker_client.containers.run( # pragma: no cover
+ self._image_name,
+ command=enshell_cmd,
+ volumes=data_volume,
+ environment=container_env,
+ ports=ports_to_map,
+ tty=True,
+ detach=True,
+ auto_remove=True,
+ remove=True,
+ )
+
+ def _stop_container_and_enshell(self) -> None:
+ """Release any additional resources allocated during launching."""
+ if self._enshell:
+ if self._enshell.is_connected(): # pragma: no cover
+ try:
+ logging.debug("Stopping EnShell.\n")
+ self._enshell.stop_server()
+ except Exception: # pragma: no cover
+ pass # pragma: no cover
+ self._enshell = None
+ if self._container:
+ try:
+ logging.debug("Stopping the Docker Container.\n")
+ self._container.stop()
+ except Exception:
+ pass
+ try:
+ logging.debug("Removing the Docker Container.\n")
+ self._container.remove()
+ except Exception:
+ pass
+ self._container = None
+
+ if self._pim_instance is not None:
+ logging.debug("Deleting the PIM instance.\n")
+ self._pim_instance.delete()
+ self._pim_instance = None
+
+ @property
+ def stub(self):
+ """A libuserd_pb2_grpc.LibUSERDServiceStub instance bound to a gRPC connection channel"""
+ return self._stub
+
+ @property
+ def server_pathname(self) -> Optional[str]:
+ """The pathanme of the detected EnSight server executable used as the gRPC server"""
+ return self._server_pathname
+
+ @property
+ def security_token(self) -> str:
+ """The current gRPC security token"""
+ return self._security_token
+
+ @property
+ def grpc_port(self) -> int:
+ """The current gRPC port"""
+ return self._grpc_port
+
+ def __del__(self) -> None:
+ self.shutdown()
+
+ @staticmethod
+ def _find_ensight_server_name(ansys_installation: str = "") -> Optional[str]:
+ """
+ Parameters
+ ----------
+ ansys_installation : str
+ Path to the local Ansys installation, including the version
+ directory. The default is ``None``, in which case common locations
+ are scanned to detect the latest local Ansys installation. The
+ ``PYENSIGHT_ANSYS_INSTALLATION`` environmental variable is checked first.
+
+ Returns
+ -------
+ str
+ The first valid ensight_server found or None
+
+ """
+ dirs_to_check = []
+ if ansys_installation:
+ dirs_to_check.append(ansys_installation)
+
+ if "PYENSIGHT_ANSYS_INSTALLATION" in os.environ:
+ env_inst = os.environ["PYENSIGHT_ANSYS_INSTALLATION"]
+ dirs_to_check.append(env_inst)
+ # Note: PYENSIGHT_ANSYS_INSTALLATION is designed for devel builds
+ # where there is no CEI directory, but for folks using it in other
+ # ways, we'll add that one too, just in case.
+ dirs_to_check.append(os.path.join(env_inst, "CEI"))
+
+ try:
+ import enve
+
+ dirs_to_check.append(enve.home())
+ except ModuleNotFoundError:
+ pass
+
+ if "CEI_HOME" in os.environ:
+ env_inst = os.environ["CEI_HOME"]
+ dirs_to_check.append(env_inst)
+
+ # Look for most recent Ansys install
+ awp_roots = []
+ for env_name in dict(os.environ).keys():
+ if env_name.startswith("AWP_ROOT"):
+ try:
+ version = int(env_name[len("AWP_ROOT") :])
+ # this API is new in 2025 R1 distributions
+ if version >= 251:
+ awp_roots.append(env_name)
+ except ValueError:
+ pass
+ awp_roots.sort(reverse=True)
+ for env_name in awp_roots:
+ dirs_to_check.append(os.path.join(os.environ[env_name], "CEI"))
+
+ # check all the collected locations in order
+ app_name = "ensight_server"
+ if platform.system() == "Windows":
+ app_name += ".bat"
+ for install_dir in dirs_to_check:
+ launch_file = os.path.join(install_dir, "bin", app_name)
+ if os.path.isfile(launch_file):
+ return launch_file
+ return None
+
+ def _is_connected(self) -> bool:
+ """Check to see if the gRPC connection is live
+
+ Returns
+ -------
+ bool
+ True if the connection is active.
+ """
+ return self._channel is not None
+
+ def _connect(self) -> None:
+ """Establish the gRPC connection to EnSight
+
+ Attempt to connect to an EnSight gRPC server using the host and port
+ established by the constructor. Note on failure, this function just
+ returns, but is_connected() will return False.
+
+ Parameters
+ ----------
+ timeout: float
+ how long to wait for the connection to timeout
+ """
+ if self._is_connected():
+ return
+ # set up the channel
+
+ self._channel = grpc.insecure_channel(
+ "{}:{}".format(self._host, self._grpc_port),
+ options=[
+ ("grpc.max_receive_message_length", -1),
+ ("grpc.max_send_message_length", -1),
+ ("grpc.testing.fixed_reconnect_backoff_ms", 1100),
+ ],
+ )
+ try:
+ grpc.channel_ready_future(self._channel).result(timeout=self._timeout)
+ except grpc.FutureTimeoutError: # pragma: no cover
+ self._channel = None # pragma: no cover
+ return # pragma: no cover
+ # hook up the stub interface
+ self._stub = libuserd_pb2_grpc.LibUSERDServiceStub(self._channel)
+
+ def metadata(self) -> List[Tuple[bytes, Union[str, bytes]]]:
+ """Compute the gRPC stream metadata
+
+ Compute the list to be passed to the gRPC calls for things like security
+ and the session name.
+
+ Returns
+ -------
+ List[Tuple[bytes, Union[str, bytes]]]
+ A list object of the metadata elements needed in a gRPC call to
+ satisfy the EnSight server gRPC requirements.
+ """
+ ret: List[Tuple[bytes, Union[str, bytes]]] = list()
+ s: Union[str, bytes]
+ if self._security_token: # pragma: no cover
+ s = self._security_token
+ if type(s) == str: # pragma: no cover
+ s = s.encode("utf-8")
+ ret.append((b"shared_secret", s))
+ return ret
+
+ def libuserd_exception(self, e: "grpc.RpcError") -> Exception:
+ """
+ Given an exception raised as the result of a gRPC call, return either
+ the input exception or a LibUserdError exception object to differentiate
+ between gRPC issues and libuserd issues.
+
+ Parameters
+ ----------
+ e
+ The exception raised by a gRPC call.
+
+ Returns
+ -------
+ Exception
+ Either the original exception or a LibUserdError exception instance, depending on
+ the original exception message details.
+ """
+ msg = e.details()
+ if msg.startswith("LibUserd("):
+ return LibUserdError(msg)
+ return e
+
+ def _disconnect(self, no_error: bool = False) -> None:
+ """Close down the gRPC connection
+
+ Disconnect all connections to the gRPC server. Send the shutdown request gRPC command
+ to the server first.
+
+ Parameters
+ ----------
+ no_error
+ If true, ignore errors resulting from the shutdown operation.
+ """
+ if not self._is_connected(): # pragma: no cover
+ return
+ # Note: this is expected to return an error
+ try:
+ pb = libuserd_pb2.Libuserd_shutdownRequest()
+ self._stub.Libuserd_shutdown(pb, metadata=self.metadata()) # type: ignore
+ if self._channel:
+ self._channel.close()
+ except grpc.RpcError as e:
+ if not no_error:
+ raise self.libuserd_exception(e)
+ finally:
+ # clean up control objects
+ self._stub = None
+ self._channel = None
+
+ def connect_check(self) -> None:
+ """
+ Verify that there is an active gRPC connection established. If not raise
+ a RuntimeError
+
+ Raises
+ ------
+ RuntimeError
+ If there is no active connection.
+ """
+ if not self._is_connected():
+ raise RuntimeError("gRPC connection not established")
+
+ """
+ gRPC method bindings
+ """
+
+ def shutdown(self) -> None:
+ """
+ Close any active gRPC connection and shut down the EnSight server.
+ The object is no longer usable.
+ """
+ self._disconnect(no_error=True)
+ # Just in case, we will try to kill the server directly as well
+ if self._server_process:
+ if psutil.pid_exists(self._server_process.pid):
+ proc = psutil.Process(self._server_process.pid)
+ for child in proc.children(recursive=True):
+ if psutil.pid_exists(child.pid):
+ # This can be a race condition, so it is ok if the child is dead already
+ try:
+ child.kill()
+ except psutil.NoSuchProcess:
+ pass
+ # Same issue, this process might already be shutting down, so NoSuchProcess is ok.
+ try:
+ proc.kill()
+ except psutil.NoSuchProcess:
+ pass
+ if self._container:
+ self._stop_container_and_enshell()
+ self._server_process = None
+
+ def ansys_release_string(self) -> str:
+ """
+ Return the Ansys release for the library.
+
+ Returns
+ -------
+ str
+ Return a string like "2025 R1"
+ """
+ self.connect_check()
+ pb = libuserd_pb2.Libuserd_ansys_release_stringRequest()
+ try:
+ ret = self.stub.Libuserd_ansys_release_string(pb, metadata=self.metadata())
+ except grpc.RpcError as e:
+ raise self.libuserd_exception(e)
+ return ret.result
+
+ def ansys_release_number(self) -> int:
+ """
+ Return the Ansys release number of the library.
+
+ Returns
+ -------
+ int
+ A version number like 251 (for "2025 R1")
+ """
+ self.connect_check()
+ pb = libuserd_pb2.Libuserd_ansys_release_numberRequest()
+ try:
+ ret = self.stub.Libuserd_ansys_release_number(pb, metadata=self.metadata())
+ except grpc.RpcError as e:
+ raise self.libuserd_exception(e)
+ return ret.result
+
+ def library_version(self) -> str:
+ """
+ The library version number. This string is the version of the
+ library interface itself. This is not the same as the version
+ number of the Ansys release that corresponds to the library.
+
+ This number follows semantic versioning rules: "1.0.0" or
+ "0.4.3-rc.1" would be examples of valid library_version() strings.
+
+ Returns
+ -------
+ str
+ The library interface version number string.
+ """
+ self.connect_check()
+ pb = libuserd_pb2.Libuserd_library_versionRequest()
+ try:
+ ret = self.stub.Libuserd_library_version(pb, metadata=self.metadata())
+ except grpc.RpcError as e:
+ raise self.libuserd_exception(e)
+ return ret.result
+
+ def nodes_per_element(self, element_type: int) -> int:
+ """
+ For a given element type (e.g. HEX20), return the number of nodes used by the element.
+ Note, this is not supported for NSIDED and NFACED element types.
+
+ Parameters
+ ----------
+ element_type
+ The element type: ElementType enum value
+
+ Returns
+ -------
+ int
+ Number of nodes per element or 0 if elem_type is not a valid zoo element type.
+ """
+ self.connect_check()
+ pb = libuserd_pb2.Libuserd_nodes_per_elementRequest()
+ pb.elemType = element_type
+ try:
+ ret = self.stub.Libuserd_nodes_per_element(pb, metadata=self.metadata())
+ except grpc.RpcError as e:
+ raise self.libuserd_exception(e)
+ return ret.result
+
+ def element_is_ghost(self, element_type: int) -> bool:
+ """
+
+ For a given element type (e.g. HEX20), determine if the element type should be considered
+ a "ghost" element.
+
+ Parameters
+ ----------
+ element_type
+ The element type: ElementType enum value
+
+ Returns
+ -------
+ bool
+ True if the element is a ghost (or an invalid element type).
+ """
+ self.connect_check()
+ pb = libuserd_pb2.Libuserd_element_is_ghostRequest()
+ pb.elemType = element_type
+ try:
+ ret = self.stub.Libuserd_element_is_ghost(pb, metadata=self.metadata())
+ except grpc.RpcError as e:
+ raise self.libuserd_exception(e)
+ return ret.result
+
+ def element_is_zoo(self, element_type: int) -> bool:
+ """
+ For a given element type (e.g. HEX20), determine if the element type is zoo or not
+
+ Parameters
+ ----------
+ element_type
+ The element type: ElementType enum value
+
+ Returns
+ -------
+ bool
+ True if the element is a zoo element and false if it is NSIDED or NFACED.
+ """
+ self.connect_check()
+ pb = libuserd_pb2.Libuserd_element_is_zooRequest()
+ pb.elemType = element_type
+ try:
+ ret = self.stub.Libuserd_element_is_zoo(pb, metadata=self.metadata())
+ except grpc.RpcError as e:
+ raise self.libuserd_exception(e)
+ return ret.result
+
+ def element_is_nsided(self, element_type: int) -> bool:
+ """
+ For a given element type, determine if the element type is n-sided or not
+
+ Parameters
+ ----------
+ element_type
+ The element type: ElementType enum value
+
+ Returns
+ -------
+ bool
+ True if the element is a NSIDED or NSIDED_GHOST and False otherwise.
+ """
+ self.connect_check()
+ pb = libuserd_pb2.Libuserd_element_is_nsidedRequest()
+ pb.elemType = element_type
+ try:
+ ret = self.stub.Libuserd_element_is_nsided(pb, metadata=self.metadata())
+ except grpc.RpcError as e:
+ raise self.libuserd_exception(e)
+ return ret.result
+
+ def element_is_nfaced(self, element_type: int) -> bool:
+ """
+ For a given element type, determine if the element type is n-faced or not
+
+ Parameters
+ ----------
+ element_type
+ The element type: ElementType enum value
+
+ Returns
+ -------
+ bool
+ True if the element is a NFACED or NFACED_GHOST and False otherwise.
+ """
+ self.connect_check()
+ pb = libuserd_pb2.Libuserd_element_is_nfacedRequest()
+ pb.elemType = element_type
+ try:
+ ret = self.stub.Libuserd_element_is_nfaced(pb, metadata=self.metadata())
+ except grpc.RpcError as e:
+ raise self.libuserd_exception(e)
+ return ret.result
+
+ def number_of_simple_element_types(self) -> int:
+ """
+ There is a consecutive range of element type enums that are supported by the
+ Part.element_conn() method. This function returns the number of those elements
+ and may be useful in common element type handling code.
+
+ Note: The value is effectively int(ElementType.NSIDED).
+
+ Returns
+ -------
+ int
+ The number of zoo element types.
+ """
+ self.connect_check()
+ pb = libuserd_pb2.Libuserd_number_of_simple_element_typesRequest()
+ try:
+ ret = self.stub.Libuserd_number_of_simple_element_types(pb, metadata=self.metadata())
+ except grpc.RpcError as e:
+ raise self.libuserd_exception(e)
+ return ret.result
+
+ def initialize(self) -> None:
+ """
+ This call initializes the libuserd system. It causes the library to scan for available
+ readers and set up any required reduction engine bits. It can only be called once.
+ """
+ self.connect_check()
+ pb = libuserd_pb2.Libuserd_initializeRequest()
+ try:
+ _ = self.stub.Libuserd_initialize(pb, metadata=self.metadata())
+ except grpc.RpcError as e:
+ raise self.libuserd_exception(e)
+
+ def get_all_readers(self) -> List["ReaderInfo"]:
+ """
+ Return a list of the readers that are available.
+
+ Returns
+ -------
+ List[ReaderInfo]
+ List of all ReaderInfo objects.
+ """
+ self.connect_check()
+ pb = libuserd_pb2.Libuserd_get_all_readersRequest()
+ try:
+ readers = self.stub.Libuserd_get_all_readers(pb, metadata=self.metadata())
+ except grpc.RpcError as e:
+ raise self.libuserd_exception(e)
+ out = []
+ for reader in readers.readerInfo:
+ out.append(ReaderInfo(self, reader))
+ return out
+
+ def query_format(self, name1: str, name2: str = "") -> List["ReaderInfo"]:
+ """
+ For a given dataset (filename(s)), ask the readers if they should be able to read
+ that data.
+
+ Parameters
+ ----------
+ name1
+ Primary input filename
+
+ name2
+ Optional, secondary input filename
+
+ Returns
+ -------
+ List[ReaderInfo]
+ List of ReaderInfo objects that might be able to read the dataset
+ """
+ self.connect_check()
+ pb = libuserd_pb2.Libuserd_query_formatRequest()
+ pb.name1 = name1
+ if name2:
+ pb.name2 = name2
+ try:
+ readers = self.stub.Libuserd_query_format(pb, metadata=self.metadata())
+ except grpc.RpcError as e:
+ raise self.libuserd_exception(e)
+ out = []
+ for reader in readers.readerInfo:
+ out.append(ReaderInfo(self, reader))
+ return out
+
+ def load_data(
+ self,
+ data_file: str,
+ result_file: str = "",
+ file_format: Optional[str] = None,
+ reader_options: Dict[str, Any] = {},
+ ) -> "Reader":
+ """Use the reader to load a dataset and return an instance
+ to the resulting ``Reader`` interface.
+
+ Parameters
+ ----------
+ data_file : str
+ Name of the data file to load.
+ result_file : str, optional
+ Name of the second data file for dual-file datasets.
+ file_format : str, optional
+ Name of the USERD reader to use. The default is ``None``,
+ in which case libuserd selects a reader.
+ reader_options : dict, optional
+ Dictionary of reader-specific option-value pairs that can be used
+ to customize the reader behavior. The default is ``None``.
+
+ Returns
+ -------
+ Reader
+ Resulting Reader object instance.
+
+ Raises
+ ------
+ RuntimeError
+ If libused cannot guess the file format or an error occurs while the
+ data is being read.
+
+ Examples
+ --------
+
+ >>> from ansys.pyensight.core import libuserd
+ >>> userd = libuserd.LibUserd()
+ >>> userd.initialize()
+ >>> opt = {'Long names': False, 'Number of timesteps': '10', 'Number of scalars': '3'}
+ >>> data = userd.load_data("foo", file_format="Synthetic", reader_options=opt
+ >>> print(data.parts())
+ >>> print(data.variables())
+ >>> userd.shutdown()
+
+ """
+ the_reader: Optional[ReaderInfo] = None
+ if file_format:
+ for reader in self.get_all_readers():
+ if reader.name == file_format:
+ the_reader = reader
+ break
+ if the_reader is None:
+ raise RuntimeError(f"The reader '{file_format}' could not be found.")
+ else:
+ readers = self.query_format(data_file, name2=result_file)
+ if len(readers):
+ the_reader = readers[0]
+ if the_reader is None:
+ raise RuntimeError(f"Unable to find a reader for '{data_file}':'{result_file}'.")
+ for key, value in reader_options.items():
+ for b in the_reader.opt_booleans:
+ if key == b["name"]:
+ b["value"] = bool(value)
+ for o in the_reader.opt_options:
+ if key == o["name"]:
+ o["value"] = int(value)
+ for f in the_reader.opt_fields:
+ if key == f["name"]:
+ f["value"] = str(value)
+ try:
+ output = the_reader.read_dataset(data_file, result_file)
+ except Exception:
+ raise RuntimeError("Unable to open the specified dataset.") from None
+
+ return output
+
+ @staticmethod
+ def _download_files(uri: str, pathname: str, folder: bool = False):
+ """Download files from the input uri and save them on the input pathname.
+
+ Parameters:
+ ----------
+
+ uri: str
+ The uri to get files from
+ pathname: str
+ The location were to save the files. It could be either a file or a folder.
+ folder: bool
+ True if the uri will server files from a directory. In this case,
+ pathname will be used as the directory were to save the files.
+ """
+ if not folder:
+ with requests.get(uri, stream=True) as r:
+ with open(pathname, "wb") as f:
+ shutil.copyfileobj(r.raw, f)
+ else:
+ with requests.get(uri) as r:
+ data = r.json()
+ os.makedirs(pathname, exist_ok=True)
+ for item in data:
+ if item["type"] == "file":
+ file_url = item["download_url"]
+ filename = os.path.join(pathname, item["name"])
+ r = requests.get(file_url, stream=True)
+ with open(filename, "wb") as f:
+ f.write(r.content)
+
+ def file_service(self) -> Optional[Any]:
+ """Get the PIM file service object if available."""
+ return self._pim_file_service
+
+ def download_pyansys_example(
+ self,
+ filename: str,
+ directory: Optional[str] = None,
+ root: Optional[str] = None,
+ folder: bool = False,
+ ) -> str:
+ """Download an example dataset from the ansys/example-data repository.
+ The dataset is downloaded local to the EnSight server location, so that it can
+ be downloaded even if running from a container.
+
+ Parameters
+ ----------
+ filename: str
+ The filename to download
+ directory: str
+ The directory to download the filename from
+ root: str
+ If set, the download will happen from another location
+ folder: bool
+ If set to True, it marks the filename to be a directory rather
+ than a single file
+
+ Returns
+ -------
+ pathname: str
+ The download location, local to the EnSight server directory.
+ If folder is set to True, the download location will be a folder containing
+ all the items available in the repository location under that folder.
+
+ Examples
+ --------
+ >>> from ansys.pyensight.core import libuserd
+ >>> l = libuserd.LibUserd()
+ >>> cas_file = l.download_pyansys_example("mixing_elbow.cas.h5","pyfluent/mixing_elbow")
+ >>> dat_file = l.download_pyansys_example("mixing_elbow.dat.h5","pyfluent/mixing_elbow")
+ """
+ base_uri = "https://github.com/ansys/example-data/raw/master"
+ base_api_uri = "https://api.github.com/repos/ansys/example-data/contents"
+ if not folder:
+ if root is not None:
+ base_uri = root
+ else:
+ base_uri = base_api_uri
+ uri = f"{base_uri}/{filename}"
+ if directory:
+ uri = f"{base_uri}/{directory}/{filename}"
+ # Local installs and PIM instances
+ download_path = f"{os.getcwd()}/{filename}"
+ if self._container and self._data_directory:
+ # Docker Image
+ download_path = os.path.join(self._data_directory, filename)
+ self._download_files(uri, download_path, folder=folder)
+ pathname = download_path
+ if self._container:
+ # Convert local path to Docker mounted volume path
+ pathname = f"/data/{filename}"
+ return pathname
diff --git a/src/ansys/pyensight/core/renderable.py b/src/ansys/pyensight/core/renderable.py
index b3e0e66e974..03c72ea3fc7 100644
--- a/src/ansys/pyensight/core/renderable.py
+++ b/src/ansys/pyensight/core/renderable.py
@@ -1,853 +1,853 @@
-"""Renderable module.
-
-This module provides the interface for creating objects in the EnSight session
-that can be displayed via HTML over the websocket server interface.
-"""
-import hashlib
-import os
-import shutil
-from typing import TYPE_CHECKING, Any, Dict, List, Optional, Tuple, Union, no_type_check
-import uuid
-import warnings
-import webbrowser
-
-import requests
-
-if TYPE_CHECKING:
- from ansys.pyensight.core import Session
-
-
-def _get_ansysnexus_version(version: Union[int, str]) -> str:
- if int(version) < 242:
- return ""
- return str(version)
-
-
-class Renderable:
- """Generates HTML pages for renderable entities.
-
- This class provides the underlying HTML remote webpage generation for
- the :func:`show` method. The approach
- is to generate the renderable in the EnSight session and make the
- artifacts available via the websocket server. The artifacts are then
- wrapped with simple HTML pages, which are also served up by the websocket
- server. These HTML pages can then be used to populate iframes.
-
- Parameters
- ----------
- session :
- PyEnSight session to generate renderables for.
- cell_handle :
- Jupyter notebook cell handle (if any). The default is ``None``.
- width : int, optional
- Width of the renderable. The default is ``None``.
- height : int, optional
- Height of the renderable. The default is ``None``.
- temporal : bool, optional
- Whether to show data for all timesteps in an interactive
- WebGL-based browser viewer. The default is ``False``.
- aa : int, optional
- Number of antialiasing passes to use when rendering images.
- The default is ``1``.
- fps : float, optional
- Number of frames per second to use for animation playback. The
- default is ``30.0``.
- num_frames : int, optional
- Number of frames of static timestep to record for animation playback.
- The default is ``None``.
-
- """
-
- def __init__(
- self,
- session: "Session",
- cell_handle: Optional[Any] = None,
- width: Optional[int] = None,
- height: Optional[int] = None,
- temporal: bool = False,
- aa: int = 1,
- fps: float = 30.0,
- num_frames: Optional[int] = None,
- ) -> None:
- self._session = session
- self._filename_index: int = 0
- self._guid: str = str(uuid.uuid1()).replace("-", "")
- self._download_names: List[str] = []
- # The Jupyter notebook cell handle (if any)
- self._cell_handle = cell_handle
- # the URL to the base HTML file for this entity
- self._url: Optional[str] = None
- # the pathname of the HTML file in the remote EnSight session
- self._url_remote_pathname: Optional[str] = None
- # the name passed to the pyensight session show() string
- self._rendertype: str = ""
- # Common attributes used by various subclasses
- self._width: Optional[int] = width
- self._height: Optional[int] = height
- self._temporal: bool = temporal
- self._aa: int = aa
- self._fps: float = fps
- self._num_frames: Optional[int] = num_frames
- #
- self._using_proxy = False
- # if we're talking directly to WS, then use 'http' otherwise 'https' for the proxy
- self._http_protocol = "http"
- try:
- if self._session.launcher._pim_instance is not None:
- self._using_proxy = True
- self._http_protocol = "https"
- except Exception:
- # the launcher may not be PIM aware; that's ok
- pass
-
- def __repr__(self) -> str:
- name = self.__class__.__name__
- return f"{name}( url='{self._url}' )"
-
- @no_type_check
- def _repr_pretty_(self, p: "pretty", cycle: bool) -> None:
- """Support the pretty module for better IPython support.
-
- Parameters
- ----------
- p : text, optional
- Pretty flag. The default is ``"pretty"``.
- cycle : bool, optional
- Cycle flag. The default is ``None``.
-
- """
- name = self.__class__.__name__
- p.text(f"{name}( url='{self._url}' )")
-
- def _get_query_parameters_str(self, params: Optional[Dict[str, str]] = None) -> str:
- """Generate any optional http query parameters.
- Return a string formatted as a URL query to tack on to the
- beginning part of the URL. The string may be empty if there
- aren't any parameters. The method takes a dict
- of parameters, possibly empty or None, and combines it with the
- parameters from the launcher, which also may be empty.
- """
- qp_dict = self._session.launcher._get_query_parameters()
- if qp_dict is None:
- # just in case
- qp_dict = {}
- if params:
- qp_dict.update(params)
- query_parameter_str = ""
- symbol = "?"
- for p in qp_dict.items():
- query_parameter_str += f"{symbol}{p[0]}={p[1]}"
- symbol = "&"
- return query_parameter_str
-
- def _generate_filename(self, suffix: str) -> Tuple[str, str]:
- """Create session-specific files and URLs.
-
- Every time this method is called, a new filename (on the EnSight host)
- and the associated URL for that file are generated. The caller
- provides the suffix for the names.
-
- Parameters
- ----------
- suffix: str
- Suffix of the file.
-
- Returns
- -------
- Tuple[str, str]
- Filename to use on the host system and the URL that accesses the
- file via REST calls to the websocket server.
-
- """
- filename = f"{self._session.secret_key}_{self._guid}_{self._filename_index}{suffix}"
- # Note: cannot use os.path.join here as the OS of the EnSight session might not match
- # the client OS.
- pathname = f"{self._session.launcher.session_directory}/{filename}"
- self._filename_index += 1
- return pathname, filename
-
- def _generate_url(self) -> None:
- """Build the remote HTML filename and associated URL.
-
- On the remote system the, pathname to the HTML file is
- ``{session_directory}/{session}_{guid}_{index}_{type}.html``.
- The URL to the file (through the session HTTP server) is
- ``http://{system}:{websocketserverhtmlport}/{session}_{guid}_{index}_{type}.html``.
-
- Note that there may be optional http query parameters at the end of the URL.
-
- After this call, ``_url`` and ``_url_remote_pathname`` reflect these names.
-
- """
- suffix = f"_{self._rendertype}.html"
- filename_index = self._filename_index
- remote_pathname, _ = self._generate_filename(suffix)
- simple_filename = f"{self._session.secret_key}_{self._guid}_{filename_index}{suffix}"
- url = f"{self._http_protocol}://{self._session.html_hostname}:{self._session.html_port}"
- self._url = f"{url}/{simple_filename}{self._get_query_parameters_str()}"
- self._url_remote_pathname = remote_pathname
-
- def _save_remote_html_page(self, html: str) -> None:
- """Create an HTML webpage on the remote host.
-
- Given a snippet of HTML, create a file on the remote server with the
- name generated by the ``_generate_url()`` method.
- The most common use is to generate an "iframe" wrapper around some HTML
- snippet.
-
- Parameters
- ----------
- html : str
- HTML snippet to wrap remotely.
-
- """
- # save "html" into a file on the remote server with filename .html
- cmd = f'open(r"""{self._url_remote_pathname}""", "w").write("""{html}""")'
- self._session.grpc.command(cmd, do_eval=False)
-
- def browser(self) -> None:
- """Open a web browser page to display the renderable content."""
- if self._url:
- webbrowser.open(self._url)
-
- @property
- def url(self) -> Optional[str]:
- """URL to the renderable content."""
- return self._url
-
- def _default_size(self, width: int, height: int) -> Tuple[int, int]:
- """Propose and return a size for a rectangle.
-
- The renderable may have been constructed with user-supplied width and height
- information. If so, that information is returned. If not, the width and
- height values passed to this method are returned.
-
- Parameters
- ----------
- width : int
- Width value to return if the renderable does not have a width.
- height : int
- Height value to return if the renderable does not have a height.
-
- Returns
- -------
- Tuple[int, int]
- Tuple (width, height) of the size values to use.
-
- """
- out_w = self._width
- if out_w is None:
- out_w = width
- out_h = self._height
- if out_h is None:
- out_h = height
- return out_w, out_h
-
- def update(self) -> None:
- """Update the visualization and display it.
-
- When this method is called, the graphics content is updated to the
- current EnSight instance state. For example, an image might be re-captured.
- The URL of the content stays the same, but the content that the URL displays is
- updated.
-
- If the renderable was created in the context of a Jupyter notebook cell,
- the original cell display is updated.
-
- """
- if self._cell_handle:
- from IPython.display import IFrame
-
- width, height = self._default_size(800, 600)
- self._cell_handle.update(IFrame(src=self._url, width=width, height=height))
-
- def delete(self) -> None:
- """Delete all server resources for the renderable.
-
- A renderable occupies resources in the EnSight :class:`Session`
- instance. This method releases those resources. Once this method is called, the renderable
- can no longer be displayed.
-
- Notes
- -----
- This method has not yet been implemented.
-
- """
- pass
-
- def download(self, dirname: str) -> List[str]:
- """Download the content files for the renderable.
-
- A renderable saves files (such as images, mpegs, and geometry) in the EnSight instance.
- Normally, these files are accessed via the webpage specified in the URL property.
- This method allows for those files to be downloaded to a local directory so that they
- can be used for other purposes.
-
- .. note::
- Any previously existing files with the same name are overwritten.
-
- Parameters
- ----------
- dirname : str
- Name of the existing directory to save the files to.
-
-
- Returns
- -------
- list
- List of names for the downloaded files.
-
- Examples
- --------
- Download the PNG file generated by the image renderable.
-
- >>> img = session.show('image", width=640, height=480, aa=4)
- >>> names = img.download("/tmp")
- >>> png_pathname = os.path.join("/tmp", names[0])
-
- """
- for filename in self._download_names:
- url = f"{self._http_protocol}://{self._session.html_hostname}:{self._session.html_port}/{filename}{self._get_query_parameters_str()}"
- outpath = os.path.join(dirname, filename)
- with requests.get(url, stream=True) as r:
- with open(outpath, "wb") as f:
- shutil.copyfileobj(r.raw, f)
- return self._download_names
-
-
-class RenderableImage(Renderable):
- """Renders an image on the EnSight host system and makes it available via a webpage."""
-
- def __init__(self, *args, **kwargs) -> None:
- """Initialize RenderableImage."""
- super().__init__(*args, **kwargs)
- self._rendertype = "image"
- self._generate_url()
- # the HTML serves up a PNG file
- pathname, filename = self._generate_filename(".png")
- self._png_pathname = pathname
- self._png_filename = filename
- # the download is the png file
- self._download_names.append(self._png_filename)
- self.update()
-
- def update(self):
- """Update the image and display it.
-
- If the renderable is part of a Jupyter notebook cell, that cell is updated
- as an iframe reference.
-
- """
- # save the image file on the remote host
- w, h = self._default_size(1920, 1080)
- cmd = f'ensight.render({w},{h},num_samples={self._aa}).save(r"""{self._png_pathname}""")'
- self._session.cmd(cmd)
- # generate HTML page with file references local to the websocket server root
- html = '\n'
- html += f'\n'
- html += "\n"
- # refresh the remote HTML
- self._save_remote_html_page(html)
- super().update()
-
-
-class RenderableDeepPixel(Renderable):
- """Renders a deep pixel image on the EnSight host system and makes it available via a webpage."""
-
- def __init__(self, *args, **kwargs) -> None:
- super().__init__(*args, **kwargs)
- self._rendertype = "deep_pixel"
- self._generate_url()
- pathname, filename = self._generate_filename(".tif")
- self._tif_pathname = pathname
- self._tif_filename = filename
- # the download is the tiff file
- self._download_names.append(self._tif_filename)
- self.update()
-
- def update(self):
- """Update the deep pixel image and display it.
-
- If the renderable is part of a Jupyter notebook cell, that cell is updated as
- an iframe reference.
- """
- # save the (deep) image file
- # get the optional query parameters which may be an empty string
- # needed for proxy servers like ansys lab
- optional_query = self._get_query_parameters_str()
- w, h = self._default_size(1920, 1080)
- deep = f",num_samples={self._aa},enhanced=1"
- cmd = f'ensight.render({w},{h}{deep}).save(r"""{self._tif_pathname}""")'
- self._session.cmd(cmd)
- html_source = os.path.join(os.path.dirname(__file__), "deep_pixel_view.html")
- with open(html_source, "r") as fp:
- html = fp.read()
- # copy some files from Nexus
- cmd = "import shutil, enve, ceiversion, os.path\n"
- base_name = "os.path.join(enve.home(), f'nexus{ceiversion.nexus_suffix}', 'django', "
- base_name += "'website', 'static', 'website', 'scripts', "
- for script in ["geotiff.js", "geotiff_nexus.js", "bootstrap.min.js"]:
- name = base_name + f"'{script}')"
- cmd += f'shutil.copy({name}, r"""{self._session.launcher.session_directory}""")\n'
- name = "os.path.join(enve.home(), f'nexus{ceiversion.nexus_suffix}', 'django', "
- name += "'website', 'static', 'website', 'content', 'bootstrap.min.css')"
- cmd += f'shutil.copy({name}, r"""{self._session.launcher.session_directory}""")\n'
- self._session.cmd(cmd, do_eval=False)
- # With Bootstrap 5 (2025 R1), class names have '-bs-' in them, e.g. 'data-bs-toggle' vs 'data-toggle'
- bs_prefix = "bs-"
- jquery_version = ""
- if int(self._session._cei_suffix) < 251:
- bs_prefix = ""
- jquery_version = "-3.4.1"
- jquery = f"jquery{jquery_version}.min.js"
- cmd = "import shutil, enve, ceiversion, os.path\n"
- name = base_name + f"'{jquery}')"
- cmd += "try:"
- cmd += f' shutil.copy({name}, r"""{self._session.launcher.session_directory}""")\n'
- cmd += "except Exception:"
- cmd += " pass"
- name = "os.path.join(enve.home(), f'nexus{ceiversion.nexus_suffix}', 'django', "
- name += "'website', 'static', 'website', 'content', 'bootstrap.min.css')"
- cmd += f'shutil.copy({name}, r"""{self._session.launcher.session_directory}""")\n'
- self._session.cmd(cmd, do_eval=False)
- url = f"{self._http_protocol}://{self._session.html_hostname}:{self._session.html_port}"
- tiff_url = f"{url}/{self._tif_filename}{optional_query}"
- # replace some bits in the HTML
- html = html.replace("TIFF_URL", tiff_url)
- html = html.replace("ITEMID", self._guid)
- html = html.replace("OPTIONAL_QUERY", optional_query)
- html = html.replace("JQUERY_VERSION", jquery_version)
- html = html.replace("BS_PREFIX", bs_prefix)
- # refresh the remote HTML
- self._save_remote_html_page(html)
- super().update()
-
-
-class RenderableMP4(Renderable):
- """Renders the timesteps of the current dataset into an MP4 file and displays the results."""
-
- def __init__(self, *args, **kwargs) -> None:
- super().__init__(*args, **kwargs)
- self._rendertype = "animation"
- self._generate_url()
- # the HTML serves up a PNG file
- pathname, filename = self._generate_filename(".mp4")
- self._mp4_pathname = pathname
- self._mp4_filename = filename
- # the download is the mp4 file
- self._download_names.append(self._mp4_filename)
- self.update()
-
- def update(self):
- """Update the animation and display it.
-
- If the renderable is part of a Jupyter notebook cell, that cell is updated as an
- iframe reference.
-
- """
- # save the image file on the remote host
- w, h = self._default_size(1920, 1080)
- # Assume this is a particle trace animation save...
- num_frames = self._num_frames
- st = 0
- if self._num_frames is None:
- # get the timestep limits, [0,0] is non-time varying
- st, en = self._session.ensight.objs.core.TIMESTEP_LIMITS
- num_frames = en - st + 1
- self._session.ensight.file.animation_rend_offscreen("ON")
- self._session.ensight.file.animation_screen_tiling(1, 1)
- self._session.ensight.file.animation_format("mpeg4")
- self._session.ensight.file.animation_format_options("Quality High Type 1")
- self._session.ensight.file.animation_frame_rate(self._fps)
- self._session.ensight.file.animation_rend_offscreen("ON")
- self._session.ensight.file.animation_numpasses(self._aa)
- self._session.ensight.file.animation_stereo("mono")
- self._session.ensight.file.animation_screen_tiling(1, 1)
- self._session.ensight.file.animation_file(self._mp4_pathname)
- self._session.ensight.file.animation_window_size("user_defined")
- self._session.ensight.file.animation_window_xy(w, h)
- self._session.ensight.file.animation_frames(num_frames)
- self._session.ensight.file.animation_start_number(st)
- self._session.ensight.file.animation_multiple_images("OFF")
- self._session.ensight.file.animation_raytrace_it("OFF")
- self._session.ensight.file.animation_raytrace_ext("OFF")
- self._session.ensight.file.animation_play_flipbook("OFF")
- self._session.ensight.file.animation_play_keyframe("OFF")
-
- if self._num_frames is None:
- # playing over time
- self._session.ensight.file.animation_play_time("ON")
- self._session.ensight.file.animation_reset_traces("OFF")
- self._session.ensight.file.animation_reset_time("ON")
- else:
- # recording particle traces/etc
- self._session.ensight.file.animation_play_time("OFF")
- self._session.ensight.file.animation_reset_traces("ON")
- self._session.ensight.file.animation_reset_time("OFF")
-
- self._session.ensight.file.animation_reset_flipbook("OFF")
- self._session.ensight.file.animation_reset_keyframe("OFF")
- self._session.ensight.file.save_animation()
-
- # generate HTML page with file references local to the websocket server root
- html = '\n'
- html += f'\n"
- html += "\n"
-
- # refresh the remote HTML
- self._save_remote_html_page(html)
- super().update()
-
-
-class RenderableWebGL(Renderable):
- """Renders an AVZ file (WebGL renderable) on the EnSight host system and makes it available via
- a webpage.
- """
-
- def __init__(self, *args, **kwargs) -> None:
- super().__init__(*args, **kwargs)
- self._rendertype = "webgl"
- self._generate_url()
- pathname, filename = self._generate_filename(".avz")
- self._avz_pathname = pathname
- self._avz_filename = filename
- # the download is the avz file
- self._download_names.append(self._avz_filename)
- self.update()
-
- def update(self):
- """Update the WebGL geometry and display it.
-
- If the renderable is part of a Jupyter notebook cell, that cell is updated as
- an iframe reference.
- """
- # save the .avz file
- self._session.ensight.part.select_all()
- self._session.ensight.savegeom.format("avz")
- # current timestep or all of the timesteps
- ts = self._session.ensight.objs.core.TIMESTEP
- st = ts
- en = ts
- if self._temporal:
- st, en = self._session.ensight.objs.core.TIMESTEP_LIMITS
- self._session.ensight.savegeom.begin_step(st)
- self._session.ensight.savegeom.end_step(en)
- self._session.ensight.savegeom.step_by(1)
- # Save the file
- self._session.ensight.savegeom.save_geometric_entities(self._avz_pathname)
- # generate HTML page with file references local to the websocket server root
- version = _get_ansysnexus_version(self._session._cei_suffix)
- if self._using_proxy:
- # if using pim we get the static content from the front end and not
- # where ensight is running, thus we use a specific URI host and not relative.
- html = f"\n"
- html += f"\n"
- else:
- html = f"\n"
- html += f"\n"
- # refresh the remote HTML
- self._save_remote_html_page(html)
- super().update()
-
-
-class RenderableVNC(Renderable):
- """Generates an ansys-nexus-viewer component that can be used to connect to the EnSight VNC remote image renderer."""
-
- def __init__(self, *args, **kwargs) -> None:
- super().__init__(*args, **kwargs)
- self._generate_url()
- self._rendertype = "remote"
- self.update()
-
- def _update_2023R2_or_less(self):
- """Update the remote rendering widget and display it for
- backend EnSight of version earlier than 2024R1
- """
- query_params = {
- "autoconnect": "true",
- "host": self._session.html_hostname,
- "port": self._session.ws_port,
- }
- url = f"{self._http_protocol}://{self._session.html_hostname}:{self._session.html_port}"
- url += "/ansys/nexus/novnc/vnc_envision.html"
- url += self._get_query_parameters_str(query_params)
- self._url = url
-
- def update(self):
- """Update the remote rendering widget and display it.
-
- If the renderable is part of a Jupyter notebook cell, that cell is updated as an
- iframe reference.
-
- """
- optional_query = self._get_query_parameters_str()
- version = _get_ansysnexus_version(self._session._cei_suffix)
- if int(self._session._cei_suffix) < 242: # pragma: no cover
- version = ""
- self._update_2023R2_or_less() # pragma: no cover
- else:
- html = (
- f"\n"
- )
- rest_uri = (
- f"{self._http_protocol}://{self._session.html_hostname}:{self._session.html_port}"
- )
- ws_uri = (
- f"{self._http_protocol}://{self._session.html_hostname}:{self._session.ws_port}"
- )
-
- query_args = ""
- if self._using_proxy and optional_query: # pragma: no cover
- query_args = f', "extra_query_args":"{optional_query[1:]}"' # pragma: no cover
-
- attributes = ' renderer="envnc"'
- attributes += ' ui="simple"'
- attributes += ' active="true"'
- attributes += (
- " renderer_options='"
- + f'{{ "ws":"{ws_uri}", "http":"{rest_uri}", "security_token":"{self._session.secret_key}", "connect_to_running_ens":true {query_args} }}'
- + "'"
- )
-
- html += f"\n"
-
- # refresh the remote HTML
- self._save_remote_html_page(html)
- super().update()
-
-
-# Undocumented class
-class RenderableVNCAngular(Renderable):
- def __init__(self, *args, **kwargs) -> None:
- super().__init__(*args, **kwargs)
- self._generate_url()
- self._rendertype = "remote"
- self.update()
-
- def update(self):
- optional_query = self._get_query_parameters_str()
- version = _get_ansysnexus_version(self._session._cei_suffix)
- base_content = f"""
-
-
-
-
- WebEnSight
-
-
-
-
-
-"""
- module_with_attributes = "\n \n"
- script_src = '\n'
- content = base_content + module_with_attributes + script_src
- self._save_remote_html_page(content)
- super().update()
-
-
-class RenderableEVSN(Renderable):
- """Generates a URL that can be used to connect to the EnVision VNC remote image renderer."""
-
- def __init__(self, *args, **kwargs) -> None:
- super().__init__(*args, **kwargs)
- self._rendertype = "remote_scene"
- self._generate_url()
- pathname, filename = self._generate_filename(".evsn")
- self._evsn_pathname = pathname
- self._evsn_filename = filename
- pathname, filename = self._generate_filename(".png")
- self._proxy_pathname = pathname
- self._proxy_filename = filename
- # the download is the evsn file
- self._download_names.append(self._evsn_filename)
- self.update()
-
- def update(self):
- """Update the remote rendering widget and display it.
-
- If the renderable is part of a Jupyter notebook cell, that cell is updated as an
- iframe reference.
-
- """
- # Save the proxy image
- w, h = self._default_size(1920, 1080)
- cmd = f'ensight.render({w},{h},num_samples={self._aa}).save(r"""{self._proxy_pathname}""")'
- self._session.cmd(cmd)
- # save the .evsn file
- self._session.ensight.file.save_scenario_which_parts("all")
- self._session.ensight.file.scenario_format("envision")
- # current timestep or all of the timesteps
- if self._temporal:
- st, en = self._session.ensight.objs.core.TIMESTEP_LIMITS
- self._session.ensight.file.scenario_steptime_anim(1, st, en, 1.0)
- else:
- self._session.ensight.file.scenario_steptime_anim(0, 1, 1, 1)
- varlist = self._session.ensight.objs.core.VARIABLES.find(True, "ACTIVE")
- vars = [x.DESCRIPTION for x in varlist]
- self._session.ensight.variables.select_byname_begin(vars)
- # Save the file
- self._session.ensight.file.save_scenario_fileslct(self._evsn_pathname)
-
- # generate HTML page with file references local to the websocketserver root
- optional_query = self._get_query_parameters_str()
- version = _get_ansysnexus_version(self._session._cei_suffix)
- html = f"\n"
- server = f"{self._http_protocol}://{self._session.html_hostname}:{self._session.html_port}"
-
- # FIXME: This method doesn't work with Ansys Lab since the viewer seems to require
- # a full pathname to the file being generated by EnSight on a shared file system.
- # The following commented out line should replace the two after that, but that
- # prevents running locally from working since it's not using the full pathname to
- # the shared file. -MFK
- cleanname = self._evsn_filename.replace("\\", "/")
- attributes = f"src='{cleanname}'"
- # attributes = f"src='{cleanname}{optional_query}'"
-
- attributes += f" proxy_img='/{self._proxy_filename}{optional_query}'"
- attributes += " aspect_ratio='proxy'"
- attributes += " renderer='envnc'"
- http_uri = f'"http":"{server}"'
- ws_uri = (
- f'"ws":"{self._http_protocol}://{self._session.html_hostname}:{self._session.ws_port}"'
- )
- secrets = f'"security_token":"{self._session.secret_key}"'
- if not self._using_proxy or not optional_query: # pragma: no cover
- attributes += f" renderer_options='{{ {http_uri}, {ws_uri}, {secrets} }}'"
- elif self._using_proxy and optional_query: # pragma: no cover
- query_args = f'"extra_query_args":"{optional_query[1:]}"' # pragma: no cover
- attributes += f" renderer_options='{{ {http_uri}, {ws_uri}, {secrets}, {query_args} }}'" # pragma: no cover
- html += f"\n"
- # refresh the remote HTML
- self._save_remote_html_page(html)
- super().update()
-
-
-class RenderableSGEO(Renderable): # pragma: no cover
- """Generates a WebGL-based renderable that leverages the SGEO format/viewer interface for progressive geometry transport."""
-
- def __init__(self, *args, **kwargs) -> None: # pragma: no cover
- super().__init__(*args, **kwargs)
- self._generate_url()
- pathname, filename = self._generate_filename("")
- # on the server, a JSON block can be accessed via:
- # {_sgeo_base_pathname}/geometry.sgeo
- # and the update files:
- # {_sgeo_base_pathname}/{names}.bin
- self._sgeo_base_pathname = pathname
- self._sgeo_base_filename = filename
- # Create the directory where the sgeo files will go '/{filename}/' URL base
- cmd = f'import os\nos.mkdir(r"""{self._sgeo_base_pathname}""")\n'
- self._session.cmd(cmd, do_eval=False)
- # get a stream ID
- self._stream_id = self._session.ensight.dsg_new_stream(sgeo=1)
- #
- self._revision = 0
- self.update()
-
- def update(self): # pragma: no cover
- """Generate a SGEO geometry file.
-
- This method causes the EnSight session to generate an updated geometry SGEO
- file and content and then display the results in any attached WebGL viewer.
-
- If the renderable is part of a Jupyter notebook cell, that cell is updated as an
- iframe reference.
-
- """
- # Ask for an update to be generated
- remote_filename = f"{self._sgeo_base_pathname}/geometry.sgeo"
- self._session.ensight.dsg_save_update(
- remote_filename,
- urlprefix=f"/{self._sgeo_base_filename}/",
- stream=self._stream_id,
- )
-
- # Update the proxy image
- self._update_proxy()
-
- # If the first update, generate the HTML
- if self._revision == 0:
- # generate HTML page with file references local to the websocketserver root
- attributes = (
- f"src='/{self._sgeo_base_filename}/geometry.sgeo{self._get_query_parameters_str()}'"
- )
- attributes += f" proxy_img='/{self._sgeo_base_filename}/proxy.png{self._get_query_parameters_str()}'"
- attributes += " aspect_ratio='proxy'"
- attributes += " renderer='sgeo'"
- version = _get_ansysnexus_version(self._session._cei_suffix)
- html = f"\n"
- html += f"\n"
- html += self._periodic_script()
- # refresh the remote HTML
- self._save_remote_html_page(html)
- # Subsequent updates are handled by the component itself
- super().update()
-
- # update the revision file
- rev_filename = f"{self._sgeo_base_pathname}/geometry.rev"
- cmd = f'with open(r"""{rev_filename}""", "w") as fp:\n'
- cmd += f' fp.write("{self._revision}")\n'
- self._session.cmd(cmd, do_eval=False)
-
- self._revision += 1
-
- def _update_proxy(self):
- """Replace the current proxy image with the current view."""
- # save a proxy image
- w, h = self._default_size(1920, 1080)
- remote_filename = f"{self._sgeo_base_pathname}/proxy.png"
- cmd = f'ensight.render({w},{h},num_samples={self._aa}).save(r"""{remote_filename}""")'
- self._session.cmd(cmd, do_eval=False)
-
- def delete(self) -> None:
- try:
- _ = self._session.ensight.dsg_close_stream(self._stream_id)
- except Exception:
- pass
- super().delete()
-
- def _periodic_script(self) -> str:
- html_source = os.path.join(os.path.dirname(__file__), "sgeo_poll.html")
- with open(html_source, "r") as fp:
- html = fp.read()
- revision_uri = f"/{self._sgeo_base_filename}/geometry.rev{self._get_query_parameters_str()}"
- html = html.replace("REVURL_ITEMID", revision_uri)
- html = html.replace("ITEMID", self._guid)
- return html
-
-
-class RenderableFluidsWebUI(Renderable):
- def __init__(self, *args, **kwargs) -> None:
- super().__init__(*args, **kwargs)
- self._session.ensight_version_check("2025 R1")
- warnings.warn("The webUI is still under active development and should be considered beta.")
- self._rendertype = "webui"
- self._generate_url()
- self.update()
-
- def _generate_url(self) -> None:
- sha256_hash = hashlib.sha256()
- sha256_hash.update(self._session._secret_key.encode())
- token = sha256_hash.hexdigest()
- optional_query = self._get_query_parameters_str()
- port = self._session._webui_port
- if "instance_name" in self._session._launcher._get_query_parameters():
- instance_name = self._session._launcher._get_query_parameters()["instance_name"]
- # If using PIM, the port needs to be the 443 HTTPS Port;
- port = self._session.html_port
- # In the webUI code there's already a workflow to pass down the query parameter
- # ans_instance_id, just use it
- instance_name = self._session._launcher._get_query_parameters()["instance_name"]
- optional_query = f"?ans_instance_id={instance_name}"
- url = f"{self._http_protocol}://{self._session.html_hostname}:{port}"
- url += f"{optional_query}#{token}"
- self._url = url
+"""Renderable module.
+
+This module provides the interface for creating objects in the EnSight session
+that can be displayed via HTML over the websocket server interface.
+"""
+import hashlib
+import os
+import shutil
+from typing import TYPE_CHECKING, Any, Dict, List, Optional, Tuple, Union, no_type_check
+import uuid
+import warnings
+import webbrowser
+
+import requests
+
+if TYPE_CHECKING:
+ from ansys.pyensight.core import Session
+
+
+def _get_ansysnexus_version(version: Union[int, str]) -> str:
+ if int(version) < 242:
+ return ""
+ return str(version)
+
+
+class Renderable:
+ """Generates HTML pages for renderable entities.
+
+ This class provides the underlying HTML remote webpage generation for
+ the :func:`show` method. The approach
+ is to generate the renderable in the EnSight session and make the
+ artifacts available via the websocket server. The artifacts are then
+ wrapped with simple HTML pages, which are also served up by the websocket
+ server. These HTML pages can then be used to populate iframes.
+
+ Parameters
+ ----------
+ session :
+ PyEnSight session to generate renderables for.
+ cell_handle :
+ Jupyter notebook cell handle (if any). The default is ``None``.
+ width : int, optional
+ Width of the renderable. The default is ``None``.
+ height : int, optional
+ Height of the renderable. The default is ``None``.
+ temporal : bool, optional
+ Whether to show data for all timesteps in an interactive
+ WebGL-based browser viewer. The default is ``False``.
+ aa : int, optional
+ Number of antialiasing passes to use when rendering images.
+ The default is ``1``.
+ fps : float, optional
+ Number of frames per second to use for animation playback. The
+ default is ``30.0``.
+ num_frames : int, optional
+ Number of frames of static timestep to record for animation playback.
+ The default is ``None``.
+
+ """
+
+ def __init__(
+ self,
+ session: "Session",
+ cell_handle: Optional[Any] = None,
+ width: Optional[int] = None,
+ height: Optional[int] = None,
+ temporal: bool = False,
+ aa: int = 1,
+ fps: float = 30.0,
+ num_frames: Optional[int] = None,
+ ) -> None:
+ self._session = session
+ self._filename_index: int = 0
+ self._guid: str = str(uuid.uuid1()).replace("-", "")
+ self._download_names: List[str] = []
+ # The Jupyter notebook cell handle (if any)
+ self._cell_handle = cell_handle
+ # the URL to the base HTML file for this entity
+ self._url: Optional[str] = None
+ # the pathname of the HTML file in the remote EnSight session
+ self._url_remote_pathname: Optional[str] = None
+ # the name passed to the pyensight session show() string
+ self._rendertype: str = ""
+ # Common attributes used by various subclasses
+ self._width: Optional[int] = width
+ self._height: Optional[int] = height
+ self._temporal: bool = temporal
+ self._aa: int = aa
+ self._fps: float = fps
+ self._num_frames: Optional[int] = num_frames
+ #
+ self._using_proxy = False
+ # if we're talking directly to WS, then use 'http' otherwise 'https' for the proxy
+ self._http_protocol = "http"
+ try:
+ if self._session.launcher._pim_instance is not None:
+ self._using_proxy = True
+ self._http_protocol = "https"
+ except Exception:
+ # the launcher may not be PIM aware; that's ok
+ pass
+
+ def __repr__(self) -> str:
+ name = self.__class__.__name__
+ return f"{name}( url='{self._url}' )"
+
+ @no_type_check
+ def _repr_pretty_(self, p: "pretty", cycle: bool) -> None:
+ """Support the pretty module for better IPython support.
+
+ Parameters
+ ----------
+ p : text, optional
+ Pretty flag. The default is ``"pretty"``.
+ cycle : bool, optional
+ Cycle flag. The default is ``None``.
+
+ """
+ name = self.__class__.__name__
+ p.text(f"{name}( url='{self._url}' )")
+
+ def _get_query_parameters_str(self, params: Optional[Dict[str, str]] = None) -> str:
+ """Generate any optional http query parameters.
+ Return a string formatted as a URL query to tack on to the
+ beginning part of the URL. The string may be empty if there
+ aren't any parameters. The method takes a dict
+ of parameters, possibly empty or None, and combines it with the
+ parameters from the launcher, which also may be empty.
+ """
+ qp_dict = self._session.launcher._get_query_parameters()
+ if qp_dict is None:
+ # just in case
+ qp_dict = {}
+ if params:
+ qp_dict.update(params)
+ query_parameter_str = ""
+ symbol = "?"
+ for p in qp_dict.items():
+ query_parameter_str += f"{symbol}{p[0]}={p[1]}"
+ symbol = "&"
+ return query_parameter_str
+
+ def _generate_filename(self, suffix: str) -> Tuple[str, str]:
+ """Create session-specific files and URLs.
+
+ Every time this method is called, a new filename (on the EnSight host)
+ and the associated URL for that file are generated. The caller
+ provides the suffix for the names.
+
+ Parameters
+ ----------
+ suffix: str
+ Suffix of the file.
+
+ Returns
+ -------
+ Tuple[str, str]
+ Filename to use on the host system and the URL that accesses the
+ file via REST calls to the websocket server.
+
+ """
+ filename = f"{self._session.secret_key}_{self._guid}_{self._filename_index}{suffix}"
+ # Note: cannot use os.path.join here as the OS of the EnSight session might not match
+ # the client OS.
+ pathname = f"{self._session.launcher.session_directory}/{filename}"
+ self._filename_index += 1
+ return pathname, filename
+
+ def _generate_url(self) -> None:
+ """Build the remote HTML filename and associated URL.
+
+ On the remote system the, pathname to the HTML file is
+ ``{session_directory}/{session}_{guid}_{index}_{type}.html``.
+ The URL to the file (through the session HTTP server) is
+ ``http://{system}:{websocketserverhtmlport}/{session}_{guid}_{index}_{type}.html``.
+
+ Note that there may be optional http query parameters at the end of the URL.
+
+ After this call, ``_url`` and ``_url_remote_pathname`` reflect these names.
+
+ """
+ suffix = f"_{self._rendertype}.html"
+ filename_index = self._filename_index
+ remote_pathname, _ = self._generate_filename(suffix)
+ simple_filename = f"{self._session.secret_key}_{self._guid}_{filename_index}{suffix}"
+ url = f"{self._http_protocol}://{self._session.html_hostname}:{self._session.html_port}"
+ self._url = f"{url}/{simple_filename}{self._get_query_parameters_str()}"
+ self._url_remote_pathname = remote_pathname
+
+ def _save_remote_html_page(self, html: str) -> None:
+ """Create an HTML webpage on the remote host.
+
+ Given a snippet of HTML, create a file on the remote server with the
+ name generated by the ``_generate_url()`` method.
+ The most common use is to generate an "iframe" wrapper around some HTML
+ snippet.
+
+ Parameters
+ ----------
+ html : str
+ HTML snippet to wrap remotely.
+
+ """
+ # save "html" into a file on the remote server with filename .html
+ cmd = f'open(r"""{self._url_remote_pathname}""", "w").write("""{html}""")'
+ self._session.grpc.command(cmd, do_eval=False)
+
+ def browser(self) -> None:
+ """Open a web browser page to display the renderable content."""
+ if self._url:
+ webbrowser.open(self._url)
+
+ @property
+ def url(self) -> Optional[str]:
+ """URL to the renderable content."""
+ return self._url
+
+ def _default_size(self, width: int, height: int) -> Tuple[int, int]:
+ """Propose and return a size for a rectangle.
+
+ The renderable may have been constructed with user-supplied width and height
+ information. If so, that information is returned. If not, the width and
+ height values passed to this method are returned.
+
+ Parameters
+ ----------
+ width : int
+ Width value to return if the renderable does not have a width.
+ height : int
+ Height value to return if the renderable does not have a height.
+
+ Returns
+ -------
+ Tuple[int, int]
+ Tuple (width, height) of the size values to use.
+
+ """
+ out_w = self._width
+ if out_w is None:
+ out_w = width
+ out_h = self._height
+ if out_h is None:
+ out_h = height
+ return out_w, out_h
+
+ def update(self) -> None:
+ """Update the visualization and display it.
+
+ When this method is called, the graphics content is updated to the
+ current EnSight instance state. For example, an image might be re-captured.
+ The URL of the content stays the same, but the content that the URL displays is
+ updated.
+
+ If the renderable was created in the context of a Jupyter notebook cell,
+ the original cell display is updated.
+
+ """
+ if self._cell_handle:
+ from IPython.display import IFrame
+
+ width, height = self._default_size(800, 600)
+ self._cell_handle.update(IFrame(src=self._url, width=width, height=height))
+
+ def delete(self) -> None:
+ """Delete all server resources for the renderable.
+
+ A renderable occupies resources in the EnSight :class:`Session`
+ instance. This method releases those resources. Once this method is called, the renderable
+ can no longer be displayed.
+
+ Notes
+ -----
+ This method has not yet been implemented.
+
+ """
+ pass
+
+ def download(self, dirname: str) -> List[str]:
+ """Download the content files for the renderable.
+
+ A renderable saves files (such as images, mpegs, and geometry) in the EnSight instance.
+ Normally, these files are accessed via the webpage specified in the URL property.
+ This method allows for those files to be downloaded to a local directory so that they
+ can be used for other purposes.
+
+ .. note::
+ Any previously existing files with the same name are overwritten.
+
+ Parameters
+ ----------
+ dirname : str
+ Name of the existing directory to save the files to.
+
+
+ Returns
+ -------
+ list
+ List of names for the downloaded files.
+
+ Examples
+ --------
+ Download the PNG file generated by the image renderable.
+
+ >>> img = session.show('image", width=640, height=480, aa=4)
+ >>> names = img.download("/tmp")
+ >>> png_pathname = os.path.join("/tmp", names[0])
+
+ """
+ for filename in self._download_names:
+ url = f"{self._http_protocol}://{self._session.html_hostname}:{self._session.html_port}/{filename}{self._get_query_parameters_str()}"
+ outpath = os.path.join(dirname, filename)
+ with requests.get(url, stream=True) as r:
+ with open(outpath, "wb") as f:
+ shutil.copyfileobj(r.raw, f)
+ return self._download_names
+
+
+class RenderableImage(Renderable):
+ """Renders an image on the EnSight host system and makes it available via a webpage."""
+
+ def __init__(self, *args, **kwargs) -> None:
+ """Initialize RenderableImage."""
+ super().__init__(*args, **kwargs)
+ self._rendertype = "image"
+ self._generate_url()
+ # the HTML serves up a PNG file
+ pathname, filename = self._generate_filename(".png")
+ self._png_pathname = pathname
+ self._png_filename = filename
+ # the download is the png file
+ self._download_names.append(self._png_filename)
+ self.update()
+
+ def update(self):
+ """Update the image and display it.
+
+ If the renderable is part of a Jupyter notebook cell, that cell is updated
+ as an iframe reference.
+
+ """
+ # save the image file on the remote host
+ w, h = self._default_size(1920, 1080)
+ cmd = f'ensight.render({w},{h},num_samples={self._aa}).save(r"""{self._png_pathname}""")'
+ self._session.cmd(cmd)
+ # generate HTML page with file references local to the websocket server root
+ html = '\n'
+ html += f'\n'
+ html += "\n"
+ # refresh the remote HTML
+ self._save_remote_html_page(html)
+ super().update()
+
+
+class RenderableDeepPixel(Renderable):
+ """Renders a deep pixel image on the EnSight host system and makes it available via a webpage."""
+
+ def __init__(self, *args, **kwargs) -> None:
+ super().__init__(*args, **kwargs)
+ self._rendertype = "deep_pixel"
+ self._generate_url()
+ pathname, filename = self._generate_filename(".tif")
+ self._tif_pathname = pathname
+ self._tif_filename = filename
+ # the download is the tiff file
+ self._download_names.append(self._tif_filename)
+ self.update()
+
+ def update(self):
+ """Update the deep pixel image and display it.
+
+ If the renderable is part of a Jupyter notebook cell, that cell is updated as
+ an iframe reference.
+ """
+ # save the (deep) image file
+ # get the optional query parameters which may be an empty string
+ # needed for proxy servers like ansys lab
+ optional_query = self._get_query_parameters_str()
+ w, h = self._default_size(1920, 1080)
+ deep = f",num_samples={self._aa},enhanced=1"
+ cmd = f'ensight.render({w},{h}{deep}).save(r"""{self._tif_pathname}""")'
+ self._session.cmd(cmd)
+ html_source = os.path.join(os.path.dirname(__file__), "deep_pixel_view.html")
+ with open(html_source, "r") as fp:
+ html = fp.read()
+ # copy some files from Nexus
+ cmd = "import shutil, enve, ceiversion, os.path\n"
+ base_name = "os.path.join(enve.home(), f'nexus{ceiversion.nexus_suffix}', 'django', "
+ base_name += "'website', 'static', 'website', 'scripts', "
+ for script in ["geotiff.js", "geotiff_nexus.js", "bootstrap.min.js"]:
+ name = base_name + f"'{script}')"
+ cmd += f'shutil.copy({name}, r"""{self._session.launcher.session_directory}""")\n'
+ name = "os.path.join(enve.home(), f'nexus{ceiversion.nexus_suffix}', 'django', "
+ name += "'website', 'static', 'website', 'content', 'bootstrap.min.css')"
+ cmd += f'shutil.copy({name}, r"""{self._session.launcher.session_directory}""")\n'
+ self._session.cmd(cmd, do_eval=False)
+ # With Bootstrap 5 (2025 R1), class names have '-bs-' in them, e.g. 'data-bs-toggle' vs 'data-toggle'
+ bs_prefix = "bs-"
+ jquery_version = ""
+ if int(self._session._cei_suffix) < 251:
+ bs_prefix = ""
+ jquery_version = "-3.4.1"
+ jquery = f"jquery{jquery_version}.min.js"
+ cmd = "import shutil, enve, ceiversion, os.path\n"
+ name = base_name + f"'{jquery}')"
+ cmd += "try:"
+ cmd += f' shutil.copy({name}, r"""{self._session.launcher.session_directory}""")\n'
+ cmd += "except Exception:"
+ cmd += " pass"
+ name = "os.path.join(enve.home(), f'nexus{ceiversion.nexus_suffix}', 'django', "
+ name += "'website', 'static', 'website', 'content', 'bootstrap.min.css')"
+ cmd += f'shutil.copy({name}, r"""{self._session.launcher.session_directory}""")\n'
+ self._session.cmd(cmd, do_eval=False)
+ url = f"{self._http_protocol}://{self._session.html_hostname}:{self._session.html_port}"
+ tiff_url = f"{url}/{self._tif_filename}{optional_query}"
+ # replace some bits in the HTML
+ html = html.replace("TIFF_URL", tiff_url)
+ html = html.replace("ITEMID", self._guid)
+ html = html.replace("OPTIONAL_QUERY", optional_query)
+ html = html.replace("JQUERY_VERSION", jquery_version)
+ html = html.replace("BS_PREFIX", bs_prefix)
+ # refresh the remote HTML
+ self._save_remote_html_page(html)
+ super().update()
+
+
+class RenderableMP4(Renderable):
+ """Renders the timesteps of the current dataset into an MP4 file and displays the results."""
+
+ def __init__(self, *args, **kwargs) -> None:
+ super().__init__(*args, **kwargs)
+ self._rendertype = "animation"
+ self._generate_url()
+ # the HTML serves up a PNG file
+ pathname, filename = self._generate_filename(".mp4")
+ self._mp4_pathname = pathname
+ self._mp4_filename = filename
+ # the download is the mp4 file
+ self._download_names.append(self._mp4_filename)
+ self.update()
+
+ def update(self):
+ """Update the animation and display it.
+
+ If the renderable is part of a Jupyter notebook cell, that cell is updated as an
+ iframe reference.
+
+ """
+ # save the image file on the remote host
+ w, h = self._default_size(1920, 1080)
+ # Assume this is a particle trace animation save...
+ num_frames = self._num_frames
+ st = 0
+ if self._num_frames is None:
+ # get the timestep limits, [0,0] is non-time varying
+ st, en = self._session.ensight.objs.core.TIMESTEP_LIMITS
+ num_frames = en - st + 1
+ self._session.ensight.file.animation_rend_offscreen("ON")
+ self._session.ensight.file.animation_screen_tiling(1, 1)
+ self._session.ensight.file.animation_format("mpeg4")
+ self._session.ensight.file.animation_format_options("Quality High Type 1")
+ self._session.ensight.file.animation_frame_rate(self._fps)
+ self._session.ensight.file.animation_rend_offscreen("ON")
+ self._session.ensight.file.animation_numpasses(self._aa)
+ self._session.ensight.file.animation_stereo("mono")
+ self._session.ensight.file.animation_screen_tiling(1, 1)
+ self._session.ensight.file.animation_file(self._mp4_pathname)
+ self._session.ensight.file.animation_window_size("user_defined")
+ self._session.ensight.file.animation_window_xy(w, h)
+ self._session.ensight.file.animation_frames(num_frames)
+ self._session.ensight.file.animation_start_number(st)
+ self._session.ensight.file.animation_multiple_images("OFF")
+ self._session.ensight.file.animation_raytrace_it("OFF")
+ self._session.ensight.file.animation_raytrace_ext("OFF")
+ self._session.ensight.file.animation_play_flipbook("OFF")
+ self._session.ensight.file.animation_play_keyframe("OFF")
+
+ if self._num_frames is None:
+ # playing over time
+ self._session.ensight.file.animation_play_time("ON")
+ self._session.ensight.file.animation_reset_traces("OFF")
+ self._session.ensight.file.animation_reset_time("ON")
+ else:
+ # recording particle traces/etc
+ self._session.ensight.file.animation_play_time("OFF")
+ self._session.ensight.file.animation_reset_traces("ON")
+ self._session.ensight.file.animation_reset_time("OFF")
+
+ self._session.ensight.file.animation_reset_flipbook("OFF")
+ self._session.ensight.file.animation_reset_keyframe("OFF")
+ self._session.ensight.file.save_animation()
+
+ # generate HTML page with file references local to the websocket server root
+ html = '\n'
+ html += f'\n"
+ html += "\n"
+
+ # refresh the remote HTML
+ self._save_remote_html_page(html)
+ super().update()
+
+
+class RenderableWebGL(Renderable):
+ """Renders an AVZ file (WebGL renderable) on the EnSight host system and makes it available via
+ a webpage.
+ """
+
+ def __init__(self, *args, **kwargs) -> None:
+ super().__init__(*args, **kwargs)
+ self._rendertype = "webgl"
+ self._generate_url()
+ pathname, filename = self._generate_filename(".avz")
+ self._avz_pathname = pathname
+ self._avz_filename = filename
+ # the download is the avz file
+ self._download_names.append(self._avz_filename)
+ self.update()
+
+ def update(self):
+ """Update the WebGL geometry and display it.
+
+ If the renderable is part of a Jupyter notebook cell, that cell is updated as
+ an iframe reference.
+ """
+ # save the .avz file
+ self._session.ensight.part.select_all()
+ self._session.ensight.savegeom.format("avz")
+ # current timestep or all of the timesteps
+ ts = self._session.ensight.objs.core.TIMESTEP
+ st = ts
+ en = ts
+ if self._temporal:
+ st, en = self._session.ensight.objs.core.TIMESTEP_LIMITS
+ self._session.ensight.savegeom.begin_step(st)
+ self._session.ensight.savegeom.end_step(en)
+ self._session.ensight.savegeom.step_by(1)
+ # Save the file
+ self._session.ensight.savegeom.save_geometric_entities(self._avz_pathname)
+ # generate HTML page with file references local to the websocket server root
+ version = _get_ansysnexus_version(self._session._cei_suffix)
+ if self._using_proxy:
+ # if using pim we get the static content from the front end and not
+ # where ensight is running, thus we use a specific URI host and not relative.
+ html = f"\n"
+ html += f"\n"
+ else:
+ html = f"\n"
+ html += f"\n"
+ # refresh the remote HTML
+ self._save_remote_html_page(html)
+ super().update()
+
+
+class RenderableVNC(Renderable):
+ """Generates an ansys-nexus-viewer component that can be used to connect to the EnSight VNC remote image renderer."""
+
+ def __init__(self, *args, **kwargs) -> None:
+ super().__init__(*args, **kwargs)
+ self._generate_url()
+ self._rendertype = "remote"
+ self.update()
+
+ def _update_2023R2_or_less(self):
+ """Update the remote rendering widget and display it for
+ backend EnSight of version earlier than 2024R1
+ """
+ query_params = {
+ "autoconnect": "true",
+ "host": self._session.html_hostname,
+ "port": self._session.ws_port,
+ }
+ url = f"{self._http_protocol}://{self._session.html_hostname}:{self._session.html_port}"
+ url += "/ansys/nexus/novnc/vnc_envision.html"
+ url += self._get_query_parameters_str(query_params)
+ self._url = url
+
+ def update(self):
+ """Update the remote rendering widget and display it.
+
+ If the renderable is part of a Jupyter notebook cell, that cell is updated as an
+ iframe reference.
+
+ """
+ optional_query = self._get_query_parameters_str()
+ version = _get_ansysnexus_version(self._session._cei_suffix)
+ if int(self._session._cei_suffix) < 242: # pragma: no cover
+ version = ""
+ self._update_2023R2_or_less() # pragma: no cover
+ else:
+ html = (
+ f"\n"
+ )
+ rest_uri = (
+ f"{self._http_protocol}://{self._session.html_hostname}:{self._session.html_port}"
+ )
+ ws_uri = (
+ f"{self._http_protocol}://{self._session.html_hostname}:{self._session.ws_port}"
+ )
+
+ query_args = ""
+ if self._using_proxy and optional_query: # pragma: no cover
+ query_args = f', "extra_query_args":"{optional_query[1:]}"' # pragma: no cover
+
+ attributes = ' renderer="envnc"'
+ attributes += ' ui="simple"'
+ attributes += ' active="true"'
+ attributes += (
+ " renderer_options='"
+ + f'{{ "ws":"{ws_uri}", "http":"{rest_uri}", "security_token":"{self._session.secret_key}", "connect_to_running_ens":true {query_args} }}'
+ + "'"
+ )
+
+ html += f"\n"
+
+ # refresh the remote HTML
+ self._save_remote_html_page(html)
+ super().update()
+
+
+# Undocumented class
+class RenderableVNCAngular(Renderable):
+ def __init__(self, *args, **kwargs) -> None:
+ super().__init__(*args, **kwargs)
+ self._generate_url()
+ self._rendertype = "remote"
+ self.update()
+
+ def update(self):
+ optional_query = self._get_query_parameters_str()
+ version = _get_ansysnexus_version(self._session._cei_suffix)
+ base_content = f"""
+
+
+
+
+ WebEnSight
+
+
+
+
+
+"""
+ module_with_attributes = "\n \n"
+ script_src = '\n'
+ content = base_content + module_with_attributes + script_src
+ self._save_remote_html_page(content)
+ super().update()
+
+
+class RenderableEVSN(Renderable):
+ """Generates a URL that can be used to connect to the EnVision VNC remote image renderer."""
+
+ def __init__(self, *args, **kwargs) -> None:
+ super().__init__(*args, **kwargs)
+ self._rendertype = "remote_scene"
+ self._generate_url()
+ pathname, filename = self._generate_filename(".evsn")
+ self._evsn_pathname = pathname
+ self._evsn_filename = filename
+ pathname, filename = self._generate_filename(".png")
+ self._proxy_pathname = pathname
+ self._proxy_filename = filename
+ # the download is the evsn file
+ self._download_names.append(self._evsn_filename)
+ self.update()
+
+ def update(self):
+ """Update the remote rendering widget and display it.
+
+ If the renderable is part of a Jupyter notebook cell, that cell is updated as an
+ iframe reference.
+
+ """
+ # Save the proxy image
+ w, h = self._default_size(1920, 1080)
+ cmd = f'ensight.render({w},{h},num_samples={self._aa}).save(r"""{self._proxy_pathname}""")'
+ self._session.cmd(cmd)
+ # save the .evsn file
+ self._session.ensight.file.save_scenario_which_parts("all")
+ self._session.ensight.file.scenario_format("envision")
+ # current timestep or all of the timesteps
+ if self._temporal:
+ st, en = self._session.ensight.objs.core.TIMESTEP_LIMITS
+ self._session.ensight.file.scenario_steptime_anim(1, st, en, 1.0)
+ else:
+ self._session.ensight.file.scenario_steptime_anim(0, 1, 1, 1)
+ varlist = self._session.ensight.objs.core.VARIABLES.find(True, "ACTIVE")
+ vars = [x.DESCRIPTION for x in varlist]
+ self._session.ensight.variables.select_byname_begin(vars)
+ # Save the file
+ self._session.ensight.file.save_scenario_fileslct(self._evsn_pathname)
+
+ # generate HTML page with file references local to the websocketserver root
+ optional_query = self._get_query_parameters_str()
+ version = _get_ansysnexus_version(self._session._cei_suffix)
+ html = f"\n"
+ server = f"{self._http_protocol}://{self._session.html_hostname}:{self._session.html_port}"
+
+ # FIXME: This method doesn't work with Ansys Lab since the viewer seems to require
+ # a full pathname to the file being generated by EnSight on a shared file system.
+ # The following commented out line should replace the two after that, but that
+ # prevents running locally from working since it's not using the full pathname to
+ # the shared file. -MFK
+ cleanname = self._evsn_filename.replace("\\", "/")
+ attributes = f"src='{cleanname}'"
+ # attributes = f"src='{cleanname}{optional_query}'"
+
+ attributes += f" proxy_img='/{self._proxy_filename}{optional_query}'"
+ attributes += " aspect_ratio='proxy'"
+ attributes += " renderer='envnc'"
+ http_uri = f'"http":"{server}"'
+ ws_uri = (
+ f'"ws":"{self._http_protocol}://{self._session.html_hostname}:{self._session.ws_port}"'
+ )
+ secrets = f'"security_token":"{self._session.secret_key}"'
+ if not self._using_proxy or not optional_query: # pragma: no cover
+ attributes += f" renderer_options='{{ {http_uri}, {ws_uri}, {secrets} }}'"
+ elif self._using_proxy and optional_query: # pragma: no cover
+ query_args = f'"extra_query_args":"{optional_query[1:]}"' # pragma: no cover
+ attributes += f" renderer_options='{{ {http_uri}, {ws_uri}, {secrets}, {query_args} }}'" # pragma: no cover
+ html += f"\n"
+ # refresh the remote HTML
+ self._save_remote_html_page(html)
+ super().update()
+
+
+class RenderableSGEO(Renderable): # pragma: no cover
+ """Generates a WebGL-based renderable that leverages the SGEO format/viewer interface for progressive geometry transport."""
+
+ def __init__(self, *args, **kwargs) -> None: # pragma: no cover
+ super().__init__(*args, **kwargs)
+ self._generate_url()
+ pathname, filename = self._generate_filename("")
+ # on the server, a JSON block can be accessed via:
+ # {_sgeo_base_pathname}/geometry.sgeo
+ # and the update files:
+ # {_sgeo_base_pathname}/{names}.bin
+ self._sgeo_base_pathname = pathname
+ self._sgeo_base_filename = filename
+ # Create the directory where the sgeo files will go '/{filename}/' URL base
+ cmd = f'import os\nos.mkdir(r"""{self._sgeo_base_pathname}""")\n'
+ self._session.cmd(cmd, do_eval=False)
+ # get a stream ID
+ self._stream_id = self._session.ensight.dsg_new_stream(sgeo=1)
+ #
+ self._revision = 0
+ self.update()
+
+ def update(self): # pragma: no cover
+ """Generate a SGEO geometry file.
+
+ This method causes the EnSight session to generate an updated geometry SGEO
+ file and content and then display the results in any attached WebGL viewer.
+
+ If the renderable is part of a Jupyter notebook cell, that cell is updated as an
+ iframe reference.
+
+ """
+ # Ask for an update to be generated
+ remote_filename = f"{self._sgeo_base_pathname}/geometry.sgeo"
+ self._session.ensight.dsg_save_update(
+ remote_filename,
+ urlprefix=f"/{self._sgeo_base_filename}/",
+ stream=self._stream_id,
+ )
+
+ # Update the proxy image
+ self._update_proxy()
+
+ # If the first update, generate the HTML
+ if self._revision == 0:
+ # generate HTML page with file references local to the websocketserver root
+ attributes = (
+ f"src='/{self._sgeo_base_filename}/geometry.sgeo{self._get_query_parameters_str()}'"
+ )
+ attributes += f" proxy_img='/{self._sgeo_base_filename}/proxy.png{self._get_query_parameters_str()}'"
+ attributes += " aspect_ratio='proxy'"
+ attributes += " renderer='sgeo'"
+ version = _get_ansysnexus_version(self._session._cei_suffix)
+ html = f"\n"
+ html += f"\n"
+ html += self._periodic_script()
+ # refresh the remote HTML
+ self._save_remote_html_page(html)
+ # Subsequent updates are handled by the component itself
+ super().update()
+
+ # update the revision file
+ rev_filename = f"{self._sgeo_base_pathname}/geometry.rev"
+ cmd = f'with open(r"""{rev_filename}""", "w") as fp:\n'
+ cmd += f' fp.write("{self._revision}")\n'
+ self._session.cmd(cmd, do_eval=False)
+
+ self._revision += 1
+
+ def _update_proxy(self):
+ """Replace the current proxy image with the current view."""
+ # save a proxy image
+ w, h = self._default_size(1920, 1080)
+ remote_filename = f"{self._sgeo_base_pathname}/proxy.png"
+ cmd = f'ensight.render({w},{h},num_samples={self._aa}).save(r"""{remote_filename}""")'
+ self._session.cmd(cmd, do_eval=False)
+
+ def delete(self) -> None:
+ try:
+ _ = self._session.ensight.dsg_close_stream(self._stream_id)
+ except Exception:
+ pass
+ super().delete()
+
+ def _periodic_script(self) -> str:
+ html_source = os.path.join(os.path.dirname(__file__), "sgeo_poll.html")
+ with open(html_source, "r") as fp:
+ html = fp.read()
+ revision_uri = f"/{self._sgeo_base_filename}/geometry.rev{self._get_query_parameters_str()}"
+ html = html.replace("REVURL_ITEMID", revision_uri)
+ html = html.replace("ITEMID", self._guid)
+ return html
+
+
+class RenderableFluidsWebUI(Renderable):
+ def __init__(self, *args, **kwargs) -> None:
+ super().__init__(*args, **kwargs)
+ self._session.ensight_version_check("2025 R1")
+ warnings.warn("The webUI is still under active development and should be considered beta.")
+ self._rendertype = "webui"
+ self._generate_url()
+ self.update()
+
+ def _generate_url(self) -> None:
+ sha256_hash = hashlib.sha256()
+ sha256_hash.update(self._session._secret_key.encode())
+ token = sha256_hash.hexdigest()
+ optional_query = self._get_query_parameters_str()
+ port = self._session._webui_port
+ if "instance_name" in self._session._launcher._get_query_parameters():
+ instance_name = self._session._launcher._get_query_parameters()["instance_name"]
+ # If using PIM, the port needs to be the 443 HTTPS Port;
+ port = self._session.html_port
+ # In the webUI code there's already a workflow to pass down the query parameter
+ # ans_instance_id, just use it
+ instance_name = self._session._launcher._get_query_parameters()["instance_name"]
+ optional_query = f"?ans_instance_id={instance_name}"
+ url = f"{self._http_protocol}://{self._session.html_hostname}:{port}"
+ url += f"{optional_query}#{token}"
+ self._url = url
diff --git a/src/ansys/pyensight/core/session.py b/src/ansys/pyensight/core/session.py
index 29e806c33b0..7c8be146a4d 100644
--- a/src/ansys/pyensight/core/session.py
+++ b/src/ansys/pyensight/core/session.py
@@ -1,1820 +1,1820 @@
-"""Session module.
-
-The ``Session`` module allows PyEnSight to control the EnSight session.
-
-Examples:
-
->>> from ansys.pyensight.core import LocalLauncher
->>> session = LocalLauncher().start()
->>> type(session)
-ansys.pyensight.Session
-
-"""
-import atexit
-import importlib.util
-from os import listdir
-import os.path
-import platform
-import sys
-import textwrap
-import time
-import types
-from typing import TYPE_CHECKING, Any, Callable, Dict, List, Optional, Tuple, Union
-from urllib.parse import urlparse
-from urllib.request import url2pathname
-import uuid
-import webbrowser
-
-from ansys.pyensight.core.enscontext import EnsContext
-from ansys.pyensight.core.launcher import Launcher
-from ansys.pyensight.core.listobj import ensobjlist
-from ansys.pyensight.core.renderable import (
- RenderableDeepPixel,
- RenderableEVSN,
- RenderableFluidsWebUI,
- RenderableImage,
- RenderableMP4,
- RenderableSGEO,
- RenderableVNC,
- RenderableVNCAngular,
- RenderableWebGL,
-)
-import requests
-
-if TYPE_CHECKING:
- from ansys.api.pyensight import ensight_api
- from ansys.pyensight.core import enscontext, ensight_grpc, renderable
- from ansys.pyensight.core.ensobj import ENSOBJ
-
-
-class InvalidEnSightVersion(Exception):
- pass
-
-
-class Session:
- """Provides for accessing an EnSight ``Session`` instance.
-
- The ``Session`` object wraps the various connections to an EnSight instance. It includes
- the location of the installation and the gRPC, HTML and WS ports used to talk to the
- EnSight session. In most cases, a ``Session`` instance is created using Launcher
- class methods, but if the EnSight session is already running, an instance can be
- created directly to wrap this running EnSight session.
-
- If the ``Session`` object is created via a Launcher ``start()`` method call, when the
- session object is garbage collected, the EnSight instance is automatically stopped.
- To prevent this behavior (and leave the EnSight instance running), set the
- ``halt_ensight_on_close`` property to ``False``.
-
- A gRPC connection is required to interact with an EnSight session. The host, gRPC
- port number, and secret key must be specified. The HTML and WS ports, which are used to
- enable the :func:`show`) method, also require that
- an instance of the websocket server is running.
-
- Parameters
- ----------
- host : str, optional
- Name of the host that the EnSight gRPC service is running on.
- The default is ``"127.0.0.1"``, which is the localhost.
- install_path : str, optional
- Path to the CEI directory to launch EnSight from.
- The default is ``None``.
- secret_key : str, optional
- Shared session secret key for validating the gRPC communication.
- The default is ``""``.
- grpc_port : int, optional
- Port number of the EnSight gRPC service. The default is ``12345``.
- html_host : str, optional
- Optional hostname for html connections if different than host
- Used by Ansys Lab and reverse proxy servers
- html_port : int, optional
- Port number of the websocket server's HTTP server. The default is
- ``None``.
- ws_port : int, optional
- Port number of the websocket server's WS server. The default is
- ``None``.
- session_directory : str, optional
- Directory on the server for local data storage. The default is
- ``None``.
- timeout : float, optional
- Number of seconds to try a gRPC connection before giving up.
- The default is ``120``.
- rest_api : bool, optional
- Whether to enable the EnSight REST API for the remote EnSight instance.
- The default is ``False``.
- sos : bool, optional
- Whether the remote EnSight instance is to use the SOS (Server
- of Servers) feature. The default is ``False``.
-
- Examples
- --------
-
- >>> from ansys.pyensight.core import Session
- >>> session = Session(host="127.0.0.1", grpc_port=12345, http_port=8000, ws_port=8100)
-
- >>> from ansys.pyensight.core import LocalLauncher
- >>> session = LocalLauncher().start()
-
- >>> # Launch an instance of EnSight, then create a second connection to the instance
- >>> from ansys.pyensight.core import LocalLauncher, Session
- >>> launched_session = LocalLauncher().start()
- >>> # Get a string that can be used to create a second connection
- >>> session_string = str(launched_session)
- >>> # Create a second connection to the same EnSight instance
- >>> connected_session = eval(session_string)
-
- """
-
- def __init__(
- self,
- host: str = "127.0.0.1",
- install_path: Optional[str] = None,
- secret_key: str = "",
- grpc_port: int = 12345,
- html_hostname: Optional[str] = None,
- html_port: Optional[int] = None,
- ws_port: Optional[int] = None,
- session_directory: Optional[str] = None,
- timeout: float = 120.0,
- rest_api: bool = False,
- sos: bool = False,
- webui_port: Optional[int] = None,
- ) -> None:
- # every session instance needs a unique name that can be used as a cache key
- self._session_name = str(uuid.uuid1())
- # when objects come into play, we can reuse them, so hash ID to instance here
- self._ensobj_hash: Dict[int, "ENSOBJ"] = {}
- self._language = "en"
- self._rest_api_enabled = rest_api
- self._sos_enabled = sos
- self._timeout = timeout
- self._cei_home = ""
- self._cei_suffix = ""
- self._hostname = host
- self._install_path = install_path
- self._launcher = None
- if html_hostname == "" or html_hostname is None:
- # if we weren't given an html host, use the hostname
- self._html_hostname = self._hostname
- else:
- self._html_hostname = html_hostname
- self._html_port = html_port
- self._ws_port = ws_port
- self._secret_key = secret_key
- self._grpc_port = grpc_port
- self._halt_ensight_on_close = True
- self._callbacks: Dict[str, Tuple[int, Any]] = dict()
- self._webui_port = webui_port
- # if the caller passed a session directory we will assume they are
- # creating effectively a proxy Session and create a (stub) launcher
- if session_directory is not None:
- self._launcher = Launcher()
- self._launcher.session_directory = session_directory
- # The stub will not know about us
- self._halt_ensight_on_close = False
-
- # are we in a jupyter notebook?
- try:
- _ = get_ipython() # type: ignore
- self._jupyter_notebook = True # pragma: no cover
- except NameError:
- self._jupyter_notebook = False
-
- # Connect to the EnSight instance
- from ansys.api.pyensight import ensight_api # pylint: disable=import-outside-toplevel
- from ansys.pyensight.core import ensight_grpc # pylint: disable=import-outside-toplevel
-
- self._ensight = ensight_api.ensight(self)
- self._build_utils_interface()
- self._grpc = ensight_grpc.EnSightGRPC(
- host=self._hostname, port=self._grpc_port, secret_key=self._secret_key
- )
- self._grpc.session_name = self._session_name
-
- # establish the connection with retry
- self._establish_connection(validate=True)
-
- # update the enums to match current EnSight instance
- cmd = "{key: getattr(ensight.objs.enums, key) for key in dir(ensight.objs.enums)}"
- new_enums = self.cmd(cmd)
- for key, value in new_enums.items():
- if key.startswith("__") and (key != "__OBJID__"):
- continue
- setattr(self._ensight.objs.enums, key, value)
-
- # create ensight.core
- self._ensight.objs.core = self.cmd("ensight.objs.core")
-
- # get the remote Python interpreter version
- self.cmd("import platform", do_eval=False)
- self._ensight_python_version = self.cmd("platform.python_version_tuple()")
-
- # Because this session can have allocated significant external resources
- # we very much want a chance to close it up cleanly. It is legal to
- # call close() twice on this class if needed.
- atexit.register(self.close)
-
- # Speed up subtype lookups:
- self._subtype_tables = {}
- part_lookup_dict = dict()
- part_lookup_dict[0] = "ENS_PART_MODEL"
- part_lookup_dict[1] = "ENS_PART_CLIP"
- part_lookup_dict[2] = "ENS_PART_CONTOUR"
- part_lookup_dict[3] = "ENS_PART_DISCRETE_PARTICLE"
- part_lookup_dict[4] = "ENS_PART_FRAME"
- part_lookup_dict[5] = "ENS_PART_ISOSURFACE"
- part_lookup_dict[6] = "ENS_PART_PARTICLE_TRACE"
- part_lookup_dict[7] = "ENS_PART_PROFILE"
- part_lookup_dict[8] = "ENS_PART_VECTOR_ARROW"
- part_lookup_dict[9] = "ENS_PART_ELEVATED_SURFACE"
- part_lookup_dict[10] = "ENS_PART_DEVELOPED_SURFACE"
- part_lookup_dict[15] = "ENS_PART_BUILT_UP"
- part_lookup_dict[16] = "ENS_PART_TENSOR_GLYPH"
- part_lookup_dict[17] = "ENS_PART_FX_VORTEX_CORE"
- part_lookup_dict[18] = "ENS_PART_FX_SHOCK"
- part_lookup_dict[19] = "ENS_PART_FX_SEP_ATT"
- part_lookup_dict[20] = "ENS_PART_MAT_INTERFACE"
- part_lookup_dict[21] = "ENS_PART_POINT"
- part_lookup_dict[22] = "ENS_PART_AXISYMMETRIC"
- part_lookup_dict[24] = "ENS_PART_VOF"
- part_lookup_dict[25] = "ENS_PART_AUX_GEOM"
- part_lookup_dict[26] = "ENS_PART_FILTER"
- self._subtype_tables["ENS_PART"] = part_lookup_dict
- annot_lookup_dict = dict()
- annot_lookup_dict[0] = "ENS_ANNOT_TEXT"
- annot_lookup_dict[1] = "ENS_ANNOT_LINE"
- annot_lookup_dict[2] = "ENS_ANNOT_LOGO"
- annot_lookup_dict[3] = "ENS_ANNOT_LGND"
- annot_lookup_dict[4] = "ENS_ANNOT_MARKER"
- annot_lookup_dict[5] = "ENS_ANNOT_ARROW"
- annot_lookup_dict[6] = "ENS_ANNOT_DIAL"
- annot_lookup_dict[7] = "ENS_ANNOT_GAUGE"
- annot_lookup_dict[8] = "ENS_ANNOT_SHAPE"
- self._subtype_tables["ENS_ANNOT"] = annot_lookup_dict
- tool_lookup_dict = dict()
- tool_lookup_dict[0] = "ENS_TOOL_CURSOR"
- tool_lookup_dict[1] = "ENS_TOOL_LINE"
- tool_lookup_dict[2] = "ENS_TOOL_PLANE"
- tool_lookup_dict[3] = "ENS_TOOL_BOX"
- tool_lookup_dict[4] = "ENS_TOOL_CYLINDER"
- tool_lookup_dict[5] = "ENS_TOOL_CONE"
- tool_lookup_dict[6] = "ENS_TOOL_SPHERE"
- tool_lookup_dict[7] = "ENS_TOOL_REVOLUTION"
- self._subtype_tables["ENS_TOOL"] = tool_lookup_dict
-
- def __repr__(self):
- # if this is called from in the ctor, self.launcher might be None.
- session_dir = ""
- if self.launcher:
- session_dir = self.launcher.session_directory
- s = f"Session(host='{self.hostname}', secret_key='{self.secret_key}', "
- s += f"sos={self.sos}, rest_api={self.rest_api}, "
- s += f"html_hostname='{self.html_hostname}', html_port={self.html_port}, "
- s += f"grpc_port={self._grpc_port}, "
- s += f"ws_port={self.ws_port}, session_directory=r'{session_dir}')"
- return s
-
- def _establish_connection(self, validate: bool = False) -> None:
- """Establish a gRPC connection to the EnSight instance.
-
- Parameters
- ----------
- validate : bool
- If true, actually try to communicate with EnSight. By default false.
- """
- time_start = time.time()
- while time.time() - time_start < self._timeout: # pragma: no cover
- if self._grpc.is_connected():
- try:
- if validate:
- self._cei_home = self.cmd("ensight.version('CEI_HOME')")
- self._cei_suffix = self.cmd("ensight.version('suffix')")
- self._check_rest_connection()
- return
- except OSError: # pragma: no cover
- pass # pragma: no cover
- self._grpc.connect(timeout=self._timeout)
- raise RuntimeError("Unable to establish a gRPC connection to EnSight.") # pragma: no cover
-
- def _check_rest_connection(self) -> None:
- """Validate the REST API connection works
-
- Use requests to see if the REST API is up and running (it takes time
- for websocketserver to make a gRPC connection as well).
-
- """
- if not self.rest_api:
- return
- #
- #
- # even when using PIM and a proxy server (Ansys Lab) this connects
- # directly from the python running in the Notebook (the front-end)
- # to the EnSight Docker Container and not the proxy server.
- # Thus, here we use 'http', the private hostname, and the html port
- # (which is the same on the proxy server).
- url = f"http://{self._hostname}:{self.html_port}/ensight/v1/session/exec"
- time_start = time.time()
- while time.time() - time_start < self._timeout:
- try:
- _ = requests.put(
- url,
- json="enscl.rest_test = 30*20",
- headers=dict(Authorization=f"Bearer {self.secret_key}"),
- )
- return
- except Exception:
- pass
- time.sleep(0.5)
- raise RuntimeError("Unable to establish a REST connection to EnSight.") # pragma: no cover
-
- @property
- def name(self) -> str:
- """The session name is a unique identifier for this Session instance. It
- is used by EnSight to maintain session specific data values within the
- EnSight instance."""
- return self._session_name
-
- @property
- def language(self) -> str:
- """Current language specification for the EnSight session. Various
- information calls return their information in the target language
- if possible. The default is ``"en"``.
-
- Examples
- --------
-
- >>> session.language = "en"
- >>> session.ensight.objs.core.attrinfo(session.ensight.objs.enums.PREDEFINEDPALETTES)
- >>> session.language = "zh"
- >>> session.ensight.objs.core.attrinfo(session.ensight.objs.enums.PREDEFINEDPALETTES)
-
- """
- return self._language
-
- @language.setter
- def language(self, value: str) -> None:
- self._language = value
- self.cmd(f"ensight.core.tr.changelang(lin='{self._language}')", do_eval=False)
-
- @property
- def halt_ensight_on_close(self) -> bool:
- """Flag for indicating whether to halt EnSight on close. If this property
- is ``True`` and the session was created via a launcher, when the session
- is closed, the EnSight instance is stopped.
-
- .. Note::
- While this flag prevents the :func:`close`
- method from shutting down EnSight, depending on how the host Python interpreter is configured,
- the EnSight session may still be halted. For example, this behavior can
- occur in Jupyter Lab.
- """
- return self._halt_ensight_on_close
-
- @halt_ensight_on_close.setter
- def halt_ensight_on_close(self, value: bool) -> None:
- self._halt_ensight_on_close = value
-
- @property
- def timeout(self) -> float:
- """Amount of time in seconds to try a gRPC connection before giving up."""
- return self._timeout
-
- @timeout.setter
- def timeout(self, value: float) -> None:
- self._timeout = value
-
- @property
- def cei_home(self) -> str:
- """Value of ``CEI_HOME`` for the connected EnSight session."""
- return self._cei_home
-
- @property
- def cei_suffix(self) -> str:
- """Suffix string of the connected EnSight session. For example, ``222``."""
- return self._cei_suffix
-
- @property
- def jupyter_notebook(self) -> bool:
- """Flag indicating if the session is running in a Jupyter notebook and should use
- the display features of that interface.
-
- """
- return self._jupyter_notebook
-
- @jupyter_notebook.setter
- def jupyter_notebook(self, value: bool) -> None:
- self._jupyter_notebook = value
-
- @property
- def ensight(self) -> "ensight_api.ensight":
- """Core EnSight API wrapper."""
- return self._ensight
-
- @property
- def grpc(self) -> "ensight_grpc.EnSightGRPC":
- """The gRPC wrapper instance used by this session to access EnSight."""
- return self._grpc
-
- @property
- def secret_key(self) -> str:
- """Secret key used for communication validation in the gRPC instance."""
- return self._secret_key
-
- @property
- def html_port(self) -> Optional[int]:
- """Port supporting HTML interaction with EnSight."""
- return self._html_port
-
- @property
- def ws_port(self) -> Optional[int]:
- """Port supporting WS interaction with EnSight."""
- return self._ws_port
-
- @property
- def hostname(self) -> str:
- """Hostname of the system hosting the EnSight instance."""
- return self._hostname
-
- @property
- def html_hostname(self) -> str:
- """Hostname of the system hosting the EnSight web server instance."""
- return self._html_hostname
-
- @property
- def launcher(self) -> "Launcher":
- """Reference to the launcher instance if a launcher was used to instantiate the session."""
- return self._launcher
-
- @launcher.setter
- def launcher(self, value: "Launcher"):
- self._launcher = value
-
- @property
- def sos(self) -> bool:
- """
- Flag indicating if the remote EnSight session is running in SOS (Server of Server) mode.
- """
- return self._sos_enabled
-
- @property
- def rest_api(self) -> bool:
- """
- Flag indicating if the remote EnSight session supports the REST API.
- """
- return self._rest_api_enabled
-
- @staticmethod
- def help():
- """Open the documentation for PyEnSight in a web browser."""
- url = "https://ensight.docs.pyansys.com/"
- webbrowser.open(url)
-
- def copy_to_session(
- self,
- local_prefix: str,
- filelist: List[str],
- remote_prefix: Optional[str] = None,
- progress: bool = False,
- ) -> list:
- """Copy a collection of files into the EnSight session.
-
- Copy files from the local filesystem into the filesystem that is hosting
- the EnSight instance.
-
- .. note::
- For a :class:`LocalLauncheransys.pyensight.core.LocalLauncher>`
- instance, these are the same filesystems.
-
- Parameters
- ----------
- local_prefix : str
- URL prefix to use for all files specified for the ``filelist``
- parameter. The only protocol supported is ``'file://'``, which
- is the local filesystem.
- filelist : list
- List of files to copy. These files are prefixed with ``local_prefix``
- and written relative to the ``remote_prefix`` parameter appended to
- ``session.launcher.session_directory``.
- remote_prefix : str
- Directory on the remote (EnSight) filesystem, which is the
- destination for the files. This prefix is appended to
- ``session.launcher.session_directory``.
- progress : bool, optional
- Whether to show a progress bar. The default is ``False``. If ``True`` and
- the ``tqdm`` module is available, a progress bar is shown.
-
- Returns
- -------
- list
- List of the filenames that were copied and their sizes.
-
- Examples
- --------
- >>> the_files = ["fluent_data_dir", "ensight_script.py"]
- >>> session.copy_to_session("file:///D:/data", the_files, progress=True)
-
- >>> the_files = ["fluent_data_dir", "ensight_script.py"]
- >>> session.copy_to_session("file:///scratch/data", the_files, remote_prefix="data")
-
- """
- uri = urlparse(local_prefix)
- if uri.scheme != "file":
- raise RuntimeError("Only the file:// protocol is supported for the local_prefix")
- localdir = url2pathname(uri.path)
-
- remote_functions = textwrap.dedent(
- """\
- import os
- def copy_write_function__(filename: str, data: bytes) -> None:
- os.makedirs(os.path.dirname(filename), exist_ok=True)
- with open(filename, "ab") as fp:
- fp.write(data)
- """
- )
-
- self.cmd(remote_functions, do_eval=False)
-
- out = []
- dirlen = 0
- if localdir: # pragma: no cover
- # we use dirlen + 1 here to remove the '/' inserted by os.path.join()
- dirlen = len(localdir) + 1
- for item in filelist:
- try:
- name = os.path.join(localdir, item)
- if os.path.isfile(name):
- out.append((name[dirlen:], os.stat(name).st_size))
- else:
- for root, _, files in os.walk(name):
- for filename in files:
- fullname = os.path.join(root, filename)
- out.append((fullname[dirlen:], os.stat(fullname).st_size))
- except Exception:
- pass
- if progress: # pragma: no cover
- try:
- from tqdm.auto import tqdm
- except ImportError:
- tqdm = list
- else:
- tqdm = list
- for item in tqdm(out):
- filename = os.path.join(localdir, item[0])
- out_dir = self.launcher.session_directory.replace("\\", "/")
- if remote_prefix:
- out_dir += f"/{remote_prefix}"
- name = out_dir + f"/{item[0]}"
- name = name.replace("\\", "/")
- # Walk the file in chunk size blocks
- chunk_size = 1024 * 1024
- with open(filename, "rb") as fp:
- while True:
- data = fp.read(chunk_size)
- if data == b"":
- break
- self.cmd(
- f"copy_write_function__(r'{name}', {data!r})", do_eval=False
- ) # pragma: no cover
- return out
-
- def copy_from_session(
- self,
- local_prefix: str,
- filelist: List[str],
- remote_prefix: Optional[str] = None,
- progress: bool = False,
- ) -> list:
- """Copy a collection of files out of the EnSight session.
-
- Copy files from the filesystem of the remote EnSight instance to the
- filesystem of the local PyEnsight instance.
-
- .. note::
- For a :class:`LocalLauncheransys.pyensight.core.LocalLauncher>`
- instance, these are the same filesystems.
-
- Parameters
- ----------
- local_prefix : str
- URL prefix of the location to save the files to. The only
- protocol currently supported is ``'file://'``, which is the
- local filesystem.
- filelist : list
- List of the files to copy. These files are prefixed
- with ``session.launcher.session_directory/remote_prefix`` and written
- relative to URL prefix specified for the ``local_prefix`` parameter.
- remote_prefix : str, optional
- Directory on the remote (EnSight) filesystem that is the source
- for the files. This prefix is appended to ``session.launcher.session_directory``.
- progress : bool, optional
- Whether to show a progress bar. The default is ``False``. If ``True`` and
- the ``tqdm`` module is available, a progress bar is shown.
-
- Returns
- -------
- list
- List of the files that were copied.
-
- Examples
- --------
- >>> the_files = ["fluent_data_dir", "ensight_script.py"]
- >>> session.copy_from_session("file:///D:/restored_data", the_files, progress=True)
-
- >>> the_files = ["fluent_data_dir", "ensight_script.py"]
- >>> session.copy_from_session("file:///scratch/restored_data", the_files,
- remote_prefix="data")
- """
-
- uri = urlparse(local_prefix)
- if uri.scheme != "file":
- raise RuntimeError("Only the file:// protocol is supported for the local_prefix")
- localdir = url2pathname(uri.path)
-
- remote_functions = textwrap.dedent(
- """\
- import os
- def copy_walk_function__(remotedir: str, filelist: list) -> None:
- out = []
- dirlen = 0
- if remotedir:
- dirlen = len(remotedir) + 1
- for item in filelist:
- try:
- name = os.path.join(remotedir, item)
- if os.path.isfile(name):
- out.append((name[dirlen:], os.stat(name).st_size))
- else:
- for root, _, files in os.walk(name):
- for filename in files:
- fullname = os.path.join(root, filename)
- out.append((fullname[dirlen:], os.stat(fullname).st_size))
- except Exception:
- pass
- return out
- # (needed for flake8)
- def copy_read_function__(filename: str, offset: int, numbytes: int) -> bytes:
- with open(filename, "rb") as fp:
- fp.seek(offset)
- data = fp.read(numbytes)
- return data
- """
- )
-
- self.cmd(remote_functions, do_eval=False)
-
- remote_directory = self.launcher.session_directory
- if remote_prefix:
- remote_directory = f"{remote_directory}/{remote_prefix}"
- remote_directory = remote_directory.replace("\\", "/")
- names = self.cmd(f"copy_walk_function__(r'{remote_directory}', {filelist})", do_eval=True)
- if progress:
- try:
- from tqdm.auto import tqdm
- except ImportError:
- tqdm = list
- else:
- tqdm = list
- for item in tqdm(names):
- name = f"{remote_directory}/{item[0]}".replace("\\", "/")
- full_name = os.path.join(localdir, item[0])
- os.makedirs(os.path.dirname(full_name), exist_ok=True)
- with open(full_name, "wb") as fp:
- offset = 0
- chunk_size = 1024 * 1024
- while True:
- data = self.cmd(
- f"copy_read_function__(r'{name}', {offset}, {chunk_size})", do_eval=True
- )
- if len(data) == 0:
- break
- fp.write(data)
- offset += chunk_size
- return names
-
- def run_script(self, filename: str) -> Optional[types.ModuleType]:
- """Run an EnSight Python script file.
-
- In EnSight, there is a notion of a Python *script* that is normally run line by
- line in EnSight. In such scripts, the ``ensight`` module is assumed to be preloaded.
- This method runs such scripts by importing them as modules and running the commands
- through the PyEnSight interface. This is done by installing the PyEnsight ``Session``
- object into the module before it is imported. This makes it possible to use a
- Python debugger with an EnSight Python script, using the PyEnSight interface.
-
- .. note::
-
- Because the Python script is imported as a module, the script filename must
- have a ``.py`` extension.
-
-
- Parameters
- ----------
- filename : str
- Filename of the Python script to run, which is loaded as a module by PyEnSight.
-
- Returns
- -------
- types.ModuleType
- Imported module.
-
- """
- dirname = os.path.dirname(filename)
- if not dirname: # pragma: no cover
- dirname = "." # pragma: no cover
- if dirname not in sys.path:
- sys.path.append(dirname)
- module_name, _ = os.path.splitext(os.path.basename(filename))
- # get the module reference
- spec = importlib.util.find_spec(module_name)
- if spec: # pragma: no cover
- module = importlib.util.module_from_spec(spec)
- # insert an ensight interface into the module
- if self.ensight:
- module.ensight = self.ensight # type: ignore
- # load (run) the module
- if spec.loader: # pragma: no cover
- spec.loader.exec_module(module)
- return module
- return None # pragma: no cover
-
- def exec(self, function: Callable, *args, remote: bool = False, **kwargs) -> Any:
- """Run a function containing EnSight API calls locally or in the EnSight interpreter.
-
- The function is in this form::
-
- def myfunc(ensight, *args, **kwargs):
- ...
- return value
-
- The ``exec()`` method allows for the function to be executed in the PyEnSight Python
- interpreter or the (remote) EnSight interpreter. Thus, a function making a large
- number of RPC calls can run much faster than if it runs solely in the PyEnSight
- interpreter.
-
- These constraints exist on this capability:
-
- - The function may only use arguments passed to the ``exec()`` method and can only
- return a single value.
- - The function cannot modify the input arguments.
- - The input arguments must be serializable and the PyEnSight Python interpreter
- version must match the version in EnSight.
-
- Parameters
- ----------
- remote : bool, optional
- Whether to execute the function in the (remote) EnSight interpreter.
-
- Examples
- --------
- >>> from ansys.pyensight.core import LocalLauncher
- >>> session = LocalLauncher().start()
- >>> options = dict()
- >>> options['Verbose mode'] = 'OFF'
- >>> options['Use ghost elements'] = 'OFF'
- >>> options['Long names'] = 'OFF'
- >>> options['Compatibility mode'] = 'ON'
- >>> options['Move Transient Parts'] = 'ON'
- >>> options['Element type'] = 'Tri 3'
- >>> options['Boundary ghosts'] = 'None'
- >>> options['Spread out parts'] = 'Legacy'
- >>> options['Number of spheres'] = 100
- >>> options['Number of cubes'] = 100
- >>> options['Number of planes'] = 0
- >>> options['Number of elements start'] = 1000
- >>> options['Number of elements end'] = 1000
- >>> options['Number of timesteps'] = 1
- >>> options['Part scaling factor'] = 1.000000e+00
- >>> options['Random number seed'] = 0
- >>> options['Number of scalars'] = 3
- >>> options['Number of vectors'] = 3
- >>> options['Number of constants'] = 3
- >>> session.load_data("dummy", file_format="Synthetic", reader_options=options)
-
- >>> def count(ensight, attr, value):
- >>> import time
- >>> start = time.time()
- >>> count = 0
- >>> for p in ensight.objs.core.PARTS:
- >>> if p.getattr(attr) == value:
- >>> count += 1
- >>> print(count(session.ensight, "VISIBLE", True))
- >>> print(session.exec(count, "VISIBLE", True))
- >>> print(session.exec(count, "VISIBLE", True, remote=True))
-
- """
- if remote:
- # remote execution only supported in 2023 R1 or later
- if int(self._cei_suffix) < 231:
- raise RuntimeError("Remote function execution only supported in 2023 R1 and later")
- local_python_version = platform.python_version_tuple()
- if self._ensight_python_version[0:2] != local_python_version[0:2]:
- vers = "Local and remote Python versions must match: "
- vers += ".".join(local_python_version)
- vers += " vs "
- vers += ".".join(self._ensight_python_version)
- raise RuntimeError(vers)
- import dill # pylint: disable=import-outside-toplevel
-
- # Create a bound object that allows for direct encoding of the args/kwargs params
- # The new function would be bound_function(ensight) where the args are captured
- # in the lambda.
- bound_function = lambda ens: function( # noqa: E731 # pragma: no cover
- ens, *args, **kwargs
- )
- # Serialize the bound function
- serialized_function = dill.dumps(bound_function, recurse=True)
- self.cmd("import dill", do_eval=False)
- # Run it remotely, passing the instance ensight instead of self._ensight
- cmd = f"dill.loads(eval(repr({serialized_function})))(ensight)"
- return self.cmd(cmd)
- else:
- return function(self._ensight, *args, **kwargs)
-
- def show(
- self,
- what: str = "image",
- width: Optional[int] = None,
- height: Optional[int] = None,
- temporal: bool = False,
- aa: int = 4,
- fps: float = 30.0,
- num_frames: Optional[int] = None,
- ) -> "renderable.Renderable":
- """Capture the current EnSight scene or otherwise make it available for
- display in a web browser.
-
- This method generates the appropriate visuals and returns the renderable
- object for viewing. If the session is in a Jupyter notebook, the cell
- in which the ``show()`` method is issued is updated with the renderable display.
-
- Parameters
- ----------
- what : str, optional
- Type of scene display to generate. The default is ``"image"``.
- Options are:
-
- * ``image``: Simple rendered PNG image
- * ``deep_pixel``: EnSight deep pixel image
- * ``animation``: MPEG4 movie
- * ``webgl``: Interactive WebGL-based browser viewer
- * ``remote``: Remote rendering-based interactive EnSight viewer
- * ``remote_scene``: Remote rendering-based interactive EnSight viewer
-
- width : int, optional
- Width of the rendered entity. The default is ``None``.
- height : int, optional
- Height of the rendered entity. The default is ``None``.
- temporal : bool, optional
- Whether to include all timesteps in WebGL views. The default is ``False``.
- aa : int, optional
- Number of antialiasing passes to use when rendering images. The
- default is ``4``.
- fps : float, optional
- Number of frames per second to use for animation playback. The default
- is ``30``.
- num_frames : int, optional
- Number of frames of static timestep to record for animation playback.
-
- Returns
- -------
- renderable.Renderable
-
- Raises
- ------
- RuntimeError
- If it is not possible to generate the content.
-
- Examples
- --------
- Render an image and display it in a browser. Rotate the scene and update the display.
-
- >>> image = session.show('image', width=800, height=600)
- >>> image.browser()
- >>> session.ensight.view_transf.rotate(30, 30, 0)
- >>> image.update()
- >>> image.browser()
-
- """
- self._establish_connection()
- if self._html_port is None:
- raise RuntimeError("No websocketserver has been associated with this Session")
-
- kwargs = dict(
- height=height, width=width, temporal=temporal, aa=aa, fps=fps, num_frames=num_frames
- )
- if self._jupyter_notebook: # pragma: no cover
- from IPython.display import display
-
- # get the cell DisplayHandle instance
- kwargs["cell_handle"] = display("", display_id=True)
-
- render = None
- if what == "image":
- render = RenderableImage(self, **kwargs)
- elif what == "deep_pixel":
- render = RenderableDeepPixel(self, **kwargs)
- elif what == "animation":
- render = RenderableMP4(self, **kwargs)
- elif what == "webgl":
- render = RenderableWebGL(self, **kwargs)
- elif what == "sgeo":
- # the SGEO protocol is only supported in 2023 R1 and higher
- if int(self._cei_suffix) < 231:
- # Use the AVZ viewer in older versions of EnSight
- render = RenderableWebGL(self, **kwargs)
- else:
- render = RenderableSGEO(self, **kwargs)
- elif what == "remote":
- render = RenderableVNC(self, **kwargs)
- elif what == "remote_scene":
- render = RenderableEVSN(self, **kwargs)
- # Undocumented. Available only internally
- elif what == "webensight":
- render = RenderableVNCAngular(self, **kwargs)
- elif what == "webui":
- render = RenderableFluidsWebUI(self, **kwargs)
-
- if render is None:
- raise RuntimeError("Unable to generate requested visualization")
-
- return render
-
- def cmd(self, value: str, do_eval: bool = True) -> Any:
- """Run a command in EnSight and return the results.
-
- Parameters
- ----------
- value : str
- String of the command to run
- do_eval : bool, optional
- Whether to perform an evaluation. The default is ``True``.
-
-
- Returns
- -------
- result
- Result of the string being executed as Python inside EnSight.
-
- Examples
- --------
-
- >>> print(session.cmd("10+4"))
- 14
- """
- self._establish_connection()
- ret = self._grpc.command(value, do_eval=do_eval)
- if do_eval:
- ret = self._convert_ctor(ret)
- value = eval(ret, dict(session=self, ensobjlist=ensobjlist))
- return value
- return ret
-
- def geometry(self, what: str = "glb") -> bytes:
- """Return the current EnSight scene as a geometry file.
-
- Parameters
- ----------
- what : str, optional
- File format to return. The default is ``"glb"``.
-
- Returns
- -------
- obj
- Generated geometry file as a bytes object.
-
- Examples
- --------
- >>> data = session.geometry()
- >>> with open("file.glb", "wb") as fp:
- >>> fp.write(data)
-
- """
- self._establish_connection()
- return self._grpc.geometry()
-
- def render(self, width: int, height: int, aa: int = 1) -> bytes:
- """Render the current EnSight scene and return a PNG image.
-
- Parameters
- ----------
- width : int
- Width of the rendered image in pixels.
- height : int
- Height of the rendered image in pixels.
- aa : int, optional
- Number of antialiasing passes to use. The default is ``1``.
-
- Returns
- -------
- obj
- PNG image as a bytes object.
-
- Examples
- --------
- >>> data = session.render(1920, 1080, aa=4)
- >>> with open("file.png", "wb") as fp:
- >>> fp.write(data)
-
- """
- self._establish_connection()
- return self._grpc.render(width=width, height=height, aa=aa)
-
- def _release_remote_objects(self, object_id: Optional[int] = None):
- """
- Send a command to the remote EnSight session to drop a specific object
- or all objects from the remote object cache.
-
- Parameters
- ----------
- object_id: int, optional
- The specific object to drop from the cache. If no objects are specified,
- then all remote objects associated with this session will be dropped.
-
- """
- obj_str = ""
- if object_id: # pragma: no cover
- obj_str = f", id={object_id}" # pragma: no cover
- cmd = f"ensight.objs.release_id('{self.name}'{obj_str})"
- _ = self.cmd(cmd, do_eval=False)
-
- def close(self) -> None:
- """Close the session.
-
- Close the current session and its gRPC connection.
- """
- # if version 242 or higher, free any objects we have cached there
- if self.cei_suffix >= "242":
- try:
- self._release_remote_objects()
- except RuntimeError: # pragma: no cover
- # handle some intermediate EnSight builds.
- pass
- if self._launcher and self._halt_ensight_on_close:
- self._launcher.close(self)
- else:
- # lightweight shtudown, just close the gRC connection
- self._grpc.shutdown(stop_ensight=False)
- self._launcher = None
-
- def _build_utils_interface(self) -> None:
- """Build the ``ensight.utils`` interface.
-
- This method Walk the PY files in the ``utils`` directory, creating instances
- of the classes in those files and placing them in the
- ``Session.ensight.utils`` namespace.
- """
- self._ensight.utils = types.SimpleNamespace()
- _utils_dir = os.path.join(os.path.dirname(__file__), "utils")
- if _utils_dir not in sys.path:
- sys.path.insert(0, _utils_dir)
- onlyfiles = [f for f in listdir(_utils_dir) if os.path.isfile(os.path.join(_utils_dir, f))]
- for _basename in onlyfiles:
- # skip over any files with the "_server" in their names
- if "_server" in _basename or "_cli" in _basename:
- continue
- _filename = os.path.join(_utils_dir, _basename)
- try:
- # get the module and class names
- _name = os.path.splitext(os.path.basename(_filename))[0]
- if _name == "__init__":
- continue
- _cap_name = _name[0].upper() + _name[1:]
- # import the module
- spec = importlib.util.spec_from_file_location(
- f"ansys.pyensight.core.utils.{_name}", _filename
- )
- if spec: # pragma: no cover
- _module = importlib.util.module_from_spec(spec)
- if spec.loader: # pragma: no cover
- spec.loader.exec_module(_module)
- # get the class from the module (query.py filename -> Query() object)
- _the_class = getattr(_module, _cap_name)
- # Create an instance, using ensight as the EnSight interface
- # and place it in this module.
- setattr(self._ensight.utils, _name, _the_class(self._ensight))
- except Exception as e: # pragma: no cover
- # Warn on import errors
- print(f"Error loading ensight.utils from: '{_filename}' : {e}")
-
- MONITOR_NEW_TIMESTEPS_OFF = "off"
- MONITOR_NEW_TIMESTEPS_STAY_AT_CURRENT = "stay_at_current"
- MONITOR_NEW_TIMESTEPS_JUMP_TO_END = "jump_to_end"
-
- def load_data(
- self,
- data_file: str,
- result_file: Optional[str] = None,
- file_format: Optional[str] = None,
- reader_options: Optional[dict] = None,
- new_case: bool = False,
- representation: str = "3D_feature_2D_full",
- monitor_new_timesteps: str = MONITOR_NEW_TIMESTEPS_OFF,
- ) -> None:
- """Load a dataset into the EnSight instance.
-
- Load the data from a given file into EnSight. The new data
- replaces any currently loaded data in the session.
-
- Parameters
- ----------
- data_file : str
- Name of the data file to load.
- result_file : str, optional
- Name of the second data file for dual-file datasets.
- file_format : str, optional
- Name of the EnSight reader to use. The default is ``None``,
- in which case EnSight selects a reader.
- reader_options : dict, optional
- Dictionary of reader-specific option-value pairs that can be used
- to customize the reader behavior. The default is ``None``.
- new_case : bool, optional
- Whether to load the dataset in another case. The default is ``False``,
- in which case the dataset replaces the one (if any) loaded in the existing
- current case.
- representation : str, optional
- Default representation for the parts loaded. The default is
- ``"3D_feature_2D_full"``.
- monitor_new_timesteps: str, optional
- Defaulted to off, if changed EnSight will monitor for new timesteps.
- The allowed values are MONITOR_NEW_TIMESTEPS_OFF, MONITOR_NEW_TIMESTEPS_STAY_AT_CURRENT
- and MONITOR_NEW_TIMESTEPS_JUMP_TO_END
-
- Raises
- ------
- RuntimeError
- If EnSight cannot guess the file format or an error occurs while the
- data is being read.
-
- Examples
- --------
- >>> from ansys.pyensight.core import LocalLauncher
- >>> session = LocalLauncher().start()
- >>> session.load_data(r'D:\data\CFX\example_data.res')
-
- """
- self._establish_connection()
- # what application are we talking to?
- target = self.cmd("ensight.version('product').lower()")
- if target == "envision":
- cmd = f'ensight.data.replace(r"""{data_file}""")'
- if self.cmd(cmd) != 0:
- raise RuntimeError("Unable to load the dataset.")
- return
-
- # Handle case changes...
- cmds = [
- 'ensight.case.link_modelparts_byname("OFF")',
- 'ensight.case.create_viewport("OFF")',
- 'ensight.case.apply_context("OFF")',
- "ensight.case.reflect_model_in(\"'none'\")",
- ]
- for cmd in cmds:
- self.cmd(cmd, do_eval=False)
-
- if new_case:
- # New case
- new_case_name = None
- for case in self.ensight.objs.core.CASES:
- if case.ACTIVE == 0:
- new_case_name = case.DESCRIPTION
- break
- if new_case_name is None:
- raise RuntimeError("No cases available for adding.")
- cmd = f'ensight.case.add("{new_case_name}")'
- self.cmd(cmd, do_eval=False)
- cmd = f'ensight.case.select("{new_case_name}")'
- self.cmd(cmd, do_eval=False)
- else:
- # Case replace
- current_case_name = self.ensight.objs.core.CURRENTCASE[0].DESCRIPTION
- cmd = f'ensight.case.replace("{current_case_name}", "{current_case_name}")'
- self.cmd(cmd, do_eval=False)
- cmd = f'ensight.case.select("{current_case_name}")'
- self.cmd(cmd, do_eval=False)
-
- # Attempt to find the file format if none is specified
- if file_format is None:
- try:
- cmd = "ensight.objs.core.CURRENTCASE[0]"
- cmd += f'.queryfileformat(r"""{data_file}""")["reader"]'
- file_format = self.cmd(cmd)
- except RuntimeError:
- raise RuntimeError(f"Unable to determine file format for {data_file}")
-
- # Load the data
- cmds = [
- "ensight.part.select_default()",
- "ensight.part.modify_begin()",
- f'ensight.part.elt_representation("{representation}")',
- "ensight.part.modify_end()",
- 'ensight.data.binary_files_are("native")',
- f'ensight.data.format("{file_format}")',
- ]
- if reader_options:
- for key, value in reader_options.items():
- option = f"""ensight.data.reader_option("{repr(key)} {repr(value)}")"""
- cmds.append(option)
- if result_file:
- cmds.append(f'ensight.data.result(r"""{result_file}""")')
- cmds.append("ensight.data.shift_time(1.000000, 0.000000, 0.000000)")
- cmds.append(f'ensight.solution_time.monitor_for_new_steps("{monitor_new_timesteps}")')
- cmds.append(f'ensight.data.replace(r"""{data_file}""")')
- for cmd in cmds:
- if self.cmd(cmd) != 0:
- raise RuntimeError("Unable to load the dataset.")
-
- def download_pyansys_example(
- self,
- filename: str,
- directory: Optional[str] = None,
- root: Optional[str] = None,
- folder: Optional[bool] = None,
- ) -> str:
- """Download an example dataset from the ansys/example-data repository.
- The dataset is downloaded local to the EnSight server location, so that it can
- be downloaded even if running from a container.
-
- Parameters
- ----------
- filename: str
- The filename to download
- directory: str
- The directory to download the filename from
- root: str
- If set, the download will happen from another location
- folder: bool
- If set to True, it marks the filename to be a directory rather
- than a single file
-
- Returns
- -------
- pathname: str
- The download location, local to the EnSight server directory.
- If folder is set to True, the download location will be a folder containing
- all the items available in the repository location under that folder.
-
- Examples
- --------
- >>> from ansys.pyensight.core import DockerLauncher
- >>> session = DockerLauncher().start(data_directory="D:\\")
- >>> cas_file = session.download_pyansys_example("mixing_elbow.cas.h5","pyfluent/mixing_elbow")
- >>> dat_file = session.download_pyansys_example("mixing_elbow.dat.h5","pyfluent/mixing_elbow")
- >>> session.load_data(cas_file, result_file=dat_file)
- >>> remote = session.show("remote")
- >>> remote.browser()
- """
- base_uri = "https://github.com/ansys/example-data/raw/master"
- base_api_uri = "https://api.github.com/repos/ansys/example-data/contents"
- if not folder:
- if root is not None:
- base_uri = root
- else:
- base_uri = base_api_uri
- uri = f"{base_uri}/{filename}"
- if directory:
- uri = f"{base_uri}/{directory}/{filename}"
- pathname = f"{self.launcher.session_directory}/{filename}"
- if not folder:
- script = "import requests\n"
- script += "import shutil\n"
- script += "import os\n"
- script += f'url = "{uri}"\n'
- script += f'outpath = r"""{pathname}"""\n'
- script += "with requests.get(url, stream=True) as r:\n"
- script += " with open(outpath, 'wb') as f:\n"
- script += " shutil.copyfileobj(r.raw, f)\n"
- self.cmd(script, do_eval=False)
- else:
- script = "import requests\n"
- script += "import shutil\n"
- script += "import os\n"
- script += f'url = "{uri}"\n'
- script += "with requests.get(url) as r:\n"
- script += " data = r.json()\n"
- script += f' output_directory = r"""{pathname}"""\n'
- script += " os.makedirs(output_directory, exist_ok=True)\n"
- script += " for item in data:\n"
- script += " if item['type'] == 'file':\n"
- script += " file_url = item['download_url']\n"
- script += " filename = os.path.join(output_directory, item['name'])\n"
- script += " r = requests.get(file_url, stream=True)\n"
- script += " with open(filename, 'wb') as f:\n"
- script += " f.write(r.content)\n"
- self.cmd(script, do_eval=False)
- return pathname
-
- def load_example(
- self, example_name: str, uncompress: bool = False, root: Optional[str] = None
- ) -> str:
- """Load an example dataset.
-
- This method downloads an EnSight session file from a known location and loads
- it into the current EnSight instance. The URL for the dataset is formed by
- combining the value given for the ``example_name`` parameter with a root URL.
- The default base URL is provided by Ansys, but it can be overridden by specifying
- a value for the ``root`` parameter.
-
- Parameters
- ----------
- example_name : str
- Name of the EnSight session file (``.ens``) to download and load.
- uncompress : bool, optional
- Whether to unzip the downloaded file into the returned directory name.
- The default is ``False``.
- root : str, optional
- Base URL for the download.
-
- Returns
- -------
- str
- Path to the downloaded file in the EnSight session.
-
- Examples
- --------
- >>> from ansys.pyensight.core import LocalLauncher
- >>> session = LocalLauncher().start()
- >>> session.load_example("fluent_wing_example.ens")
- >>> remote = session.show("remote")
- >>> remote.browser()
-
- """
- base_uri = "https://s3.amazonaws.com/www3.ensight.com/PyEnSight/ExampleData"
- if root is not None: # pragma: no cover
- base_uri = root # pragma: no cover
- pathname = self.download_pyansys_example(example_name, root=base_uri)
- script = f'outpath = r"""{pathname}"""\n'
- if uncompress:
- # in this case, remove the extension and unzip the file
- pathname_dir = os.path.splitext(pathname)[0]
- script += "outpath_dir = os.path.splitext(outpath)[0]\n"
- script += "os.mkdir(outpath_dir)\n"
- script += "shutil.unpack_archive(outpath, outpath_dir, 'zip')\n"
- # return the directory name
- pathname = pathname_dir
- else:
- script += "ensight.objs.ensxml_restore_file(outpath)\n"
- self.cmd(script, do_eval=False)
- return pathname
-
- def add_callback(
- self, target: Any, tag: str, attr_list: list, method: Callable, compress: bool = True
- ) -> None:
- """Register a callback with an event tuple.
-
- For a given target object (such as ``"ensight.objs.core"``) and a list
- of attributes (such as ``["PARTS", "VARIABLES"]``), this method sets up a
- callback to be made when any of those attribute change on the target object.
- The target can also be an EnSight (not PyEnSight) class name, for example
- "ENS_PART". In this latter form, all objects of that type are watched for
- specified attribute changes.
-
- The callback is made with a single argument, a string encoded in URL format
- with the supplied tag, the name of the attribute that changed and the UID
- of the object that changed. The string passed to the callback is in this form:
- ``grpc://{sessionguid}/{tag}?enum={attribute}&uid={objectid}``.
-
- Only one callback with the noted tag can be used in the session.
-
- Parameters
- ----------
- target : obj, str
- Name of the target object or name of a class as a string to
- match all objects of that class. A proxy class reference is
- also allowed. For example, ``session.ensight.objs.core``.
- tag : str
- Unique name for the callback. A tag can end with macros of
- the form ``{{attrname}}`` to return the value of an attribute of the
- target object. The macros should take the form of URI queries to
- simplify parsing.
- attr_list : list
- List of attributes of the target that are to result in the callback
- being called if changed.
- method : Callable
- Callable that is called with the returned URL.
- compress : bool, optional
- Whether to call only the last event if a repeated event is generated
- as a result of an action. The default is ``True``. If ``False``, every
- event results in a callback.
-
- Examples
- --------
- A string similar to this is printed when the dataset is loaded and the part list
- changes:
-
- `` Event : grpc://f6f74dae-f0ed-11ec-aa58-381428170733/partlist?enum=PARTS&uid=221``
-
- >>> from ansys.pyensight.core import LocalLauncher
- >>> s = LocalLauncher().start()
- >>> def cb(v: str):
- >>> print("Event:", v)
- >>> s.add_callback("ensight.objs.core", "partlist", ["PARTS"], cb)
- >>> s.load_data(r"D:\ANSYSDev\data\CFX\HeatingCoil_001.res")
- """
- self._establish_connection()
- # shorten the tag up to the query block. Macros are only legal in the query block.
- try:
- idx = tag.index("?")
- short_tag = tag[:idx]
- except ValueError:
- short_tag = tag
- if short_tag in self._callbacks:
- raise RuntimeError(f"A callback for tag '{short_tag}' already exists")
- # Build the addcallback string against the full tag
- flags = ""
- if compress:
- flags = ",flags=ensight.objs.EVENTMAP_FLAG_COMP_GLOBAL"
- if hasattr(target, "__OBJID__"):
- target = self.remote_obj(target.__OBJID__)
- cmd = f"ensight.objs.addcallback({target},None,"
- cmd += f"'{self._grpc.prefix()}{tag}',attrs={repr(attr_list)}{flags})"
- callback_id = self.cmd(cmd)
- # if this is the first callback, start the event stream
- if len(self._callbacks) == 0:
- self._grpc.event_stream_enable(callback=self._event_callback)
- # record the callback id along with the callback
- # if the callback URL starts with the short_tag, we make the callback
- self._callbacks[short_tag] = (callback_id, method)
-
- def remove_callback(self, tag: str) -> None:
- """Remove a callback that the :func`add_callback`
- method started.
-
- Given a tag used to register a previous callback (``add_callback()``), remove
- this callback from the EnSight callback system.
-
- Parameters
- ----------
- tag : str
- Callback string tag.
-
- Raises
- ------
- RuntimeError
- If an invalid tag is supplied.
-
- """
- if tag not in self._callbacks:
- raise RuntimeError(f"A callback for tag '{tag}' does not exist")
- callback_id = self._callbacks[tag][0]
- del self._callbacks[tag]
- cmd = f"ensight.objs.removecallback({callback_id})"
- _ = self.cmd(cmd, do_eval=False)
-
- def _event_callback(self, cmd: str) -> None:
- """Pass the URL back to the registered callback.
-
- This method matches the ``cmd`` URL with the registered callback and then
- makes the callback.
-
- Parameters
- ----------
- cmd : str
- URL callback from the gRPC event stream. The URL has this
- form: ``grpc://{sessionguid}/{tag}?enum={attribute}&uid={objectid}``.
-
- """
- # EnSight will always tack on '?enum='. If our tag uses ?macro={{attr}},
- # you will get too many '?' in the URL, making it difficult to parse.
- # So, we look for "?..." and a following "?enum=". If we see this, convert
- # "?enum=" into "&enum=".
- idx_question = cmd.find("?")
- idx_enum = cmd.find("?enum=")
- if idx_question < idx_enum:
- cmd = cmd.replace("?enum=", "&enum=")
- parse = urlparse(cmd)
- tag = parse.path[1:]
- for key, value in self._callbacks.items():
- # remember "key" is a shortened version of tag
- if tag.startswith(key):
- value[1](cmd)
- return
- print(f"Unhandled event: {cmd}")
-
- # Object API helper functions
- @staticmethod
- def remote_obj(ensobjid: int) -> str:
- """Generate a string that, for a given ``ENSOBJ`` object ID, returns
- a proxy object instance.
-
- Parameters
- ----------
- ensobjid: int
- ID of the ``ENSOBJ`` object.
-
- Returns
- -------
- str
- String for the proxy object instance.
- """
- return f"ensight.objs.wrap_id({ensobjid})"
-
- def _prune_hash(self) -> None:
- """Prune the ``ENSOBJ`` hash table.
-
- The ``ENSOBJ`` hash table may need flushing if it gets too big. Do that here."""
- if len(self._ensobj_hash) > 1000000:
- self._ensobj_hash = {}
-
- def add_ensobj_instance(self, obj: "ENSOBJ") -> None:
- """Add a new ``ENSOBJ`` object instance to the hash table.
-
- Parameters
- ----------
- obj : ENSOBJ
- ``ENSOBJ`` object instance.
- """
- self._ensobj_hash[obj.__OBJID__] = obj
-
- def obj_instance(self, ensobjid: int) -> Optional["ENSOBJ"]:
- """Get any existing proxy object associated with an ID.
-
- Parameters
- ----------
- ensobjid: int
- ID of the ``ENSOBJ`` object.
-
- """
- return self._ensobj_hash.get(ensobjid, None)
-
- def _obj_attr_subtype(self, classname: str) -> Tuple[Optional[int], Optional[dict]]:
- """Get subtype information for a given class.
-
- For an input class name, this method returns the proper Python proxy class name and,
- if the class supports subclasses, the attribute ID of the differentiating attribute.
-
- Parameters
- ----------
- classname : str
- Root class name to look up.
-
- Returns
- -------
- Tuple[Optional[int], Optional[dict]]
- (attr_id, subclassnamedict): Attribute used to differentiate between classes
- and a dictionary of the class names for each value of the attribute.
-
- """
- if classname == "ENS_PART":
- return self.ensight.objs.enums.PARTTYPE, self._subtype_tables[classname]
-
- elif classname == "ENS_ANNOT":
- return self.ensight.objs.enums.ANNOTTYPE, self._subtype_tables[classname]
-
- elif classname == "ENS_TOOL":
- return self.ensight.objs.enums.TOOLTYPE, self._subtype_tables[classname]
-
- return None, None
-
- def _convert_ctor(self, s: str) -> str:
- """Convert ENSOBJ object references into executable code in __repl__ strings.
-
- The __repl__() implementation for an ENSOBJ subclass generates strings like these::
-
- Class: ENS_GLOBALS, CvfObjID: 221, cached:yes
- Class: ENS_PART, desc: 'Sphere', CvfObjID: 1078, cached:no
- Class: ENS_PART, desc: 'engine', PartType: 0, CvfObjID: 1097, cached:no
- Class: ENS_GROUP, desc: '', Owned, CvfObjID: 1043, cached:no
-
- This method detects strings like those and converts them into strings like these::
-
- session.ensight.objs.ENS_GLOBALS(session, 221)
- session.ensight.objs.ENS_PART_MODEL(session, 1078, attr_id=1610612792, attr_value=0)
-
- where:
-
- 1610612792 is ensight.objs.enums.PARTTYPE.
-
- If a proxy object for the ID already exists, it can also generate strings like this::
-
- session.obj_instance(221)
-
-
- Parameters
- ----------
- s : str
- String to convert.
-
- """
- self._prune_hash()
- offset = 0
- while True:
- # Find the object repl block to replace
- id = s.find("CvfObjID:", offset)
- if id == -1:
- break
- start = s.find("Class: ", offset)
- if (start == -1) or (start > id):
- break
- tail_len = 11
- tail = s.find(", cached:no", offset)
- if tail == -1:
- tail_len = 12
- tail = s.find(", cached:yes", offset)
- if tail == -1: # pragma: no cover
- break # pragma: no cover
- # just this object substring
- tmp = s[start + 7 : tail]
- # Subtype (PartType:, AnnotType:, ToolType:)
- subtype = None
- for name in ("PartType:", "AnnotType:", "ToolType:"):
- location = tmp.find(name)
- if location != -1:
- subtype = int(tmp[location + len(name) :].split(",")[0])
- break
- # Owned flag
- owned_flag = "Owned," in tmp
- # isolate the block to replace
- prefix = s[:start]
- suffix = s[tail + tail_len :]
- # parse out the object id and classname
- objid = int(s[id + 9 : tail])
- classname = s[start + 7 : tail]
- comma = classname.find(",")
- classname = classname[:comma]
- # pick the subclass based on the classname
- attr_id, classname_lookup = self._obj_attr_subtype(classname)
- # generate the replacement text
- if objid in self._ensobj_hash:
- replace_text = f"session.obj_instance({objid})"
- else:
- subclass_info = ""
- if attr_id is not None:
- if subtype is not None:
- # the 2024 R2 interface includes the subtype
- if (classname_lookup is not None) and (subtype in classname_lookup):
- classname = classname_lookup[subtype]
- subclass_info = f",attr_id={attr_id}, attr_value={subtype}"
- elif classname_lookup is not None: # pragma: no cover
- # if a "subclass" case and no subclass attrid value, ask for it...
- remote_name = self.remote_obj(objid)
- cmd = f"{remote_name}.getattr({attr_id})"
- attr_value = self.cmd(cmd)
- if attr_value in classname_lookup:
- classname = classname_lookup[attr_value]
- subclass_info = f",attr_id={attr_id}, attr_value={attr_value}"
- if owned_flag:
- subclass_info += ",owned=True"
- replace_text = f"session.ensight.objs.{classname}(session, {objid}{subclass_info})"
- if replace_text is None: # pragma: no cover
- break # pragma: no cover
- offset = start + len(replace_text)
- s = prefix + replace_text + suffix
- s = s.strip()
- if s.startswith("[") and s.endswith("]"):
- s = f"ensobjlist({s}, session=session)"
- return s
-
- def capture_context(self, full_context: bool = False) -> "enscontext.EnsContext":
- """Capture the current EnSight instance state.
-
- This method causes the EnSight instance to save a context and return an ``EnsContext``
- object representing that saved state.
-
- Parameters
- ----------
- full_context : bool, optional
- Whether to include all aspects of the Ensight instance. The default is ``False``.
-
- Returns
- -------
- enscontext.EnsContext
-
- Examples
- --------
- >>> ctx = session.capture_context()
- >>> ctx.save("session_context.ctxz")
-
- """
- self.cmd("import ansys.pyensight.core.enscontext", do_eval=False)
- data_str = self.cmd(
- f"ansys.pyensight.core.enscontext._capture_context(ensight,{full_context})",
- do_eval=True,
- )
- context = EnsContext()
- context._from_data(data_str)
- return context
-
- def restore_context(self, context: "enscontext.EnsContext") -> None:
- """Restore the current EnSight instance state.
-
- This method restores EnSight to the state stored in an ``EnsContext``
- object that was either read from disk or returned by the
- :func:`capture_context` method.
-
- Parameters
- ----------
- context : enscontext.EnsContext
- Context to set the current EnSight instance to.
-
- Examples
- --------
- >>> tmp_ctx = session.capture_context()
- >>> session.restore_context(EnsContext("session_context.ctxz"))
- >>> session.restore_context(tmp_ctx)
- """
- data_str = context._data(b64=True)
- self.cmd("import ansys.pyensight.core.enscontext", do_eval=False)
- self.cmd(
- f"ansys.pyensight.core.enscontext._restore_context(ensight,'{data_str}')", do_eval=False
- )
-
- def ensight_version_check(
- self,
- version: Union[int, str],
- message: str = "",
- exception: bool = True,
- strict: bool = False,
- ) -> bool:
- """Check if the session is a specific version.
-
- Different versions of pyensight Sessions may host different versions of EnSight.
- This method compares the version of the remote EnSight session to a specific version
- number. If the remote EnSight version is at least the specified version, then
- this method returns True. If the version of EnSight is earlier than the specified
- version, this method will raise an exception. The caller can specify the
- error string to be included. They may also specify if the version check should
- be for a specific version vs the specified version or higher. It is also possible
- to avoid the exception and instead just return True or False for cases when an
- alternative implementation might be used.
-
- Parameters
- ----------
- version : Union[int, str]
- The version number to compare the EnSight version against.
- message : str
- The message string to be used as the text for any raised exception.
- exception : bool
- If True, and the version comparison fails, an InvalidEnSightVersion is raised.
- Otherwise, the result of the comparison is returned.
- strict : bool
- If True, the comparison of the two versions will only pass if they
- are identical. If False, if the EnSight version is greater than or
- equal to the specified version the comparison will pass.
-
- Returns
- -------
- True if the comparison succeeds, False otherwise.
-
- Raises
- ------
- InvalidEnSightVersion if the comparison fails and exception is True.
- """
- ens_version = int(self.ensight.version("suffix"))
- # handle various input formats
- target = version
- if isinstance(target, str): # pragma: no cover
- # could be 'year RX' or the suffix as a string
- if "R" in target:
- tmp = [int(x) for x in target.split("R")]
- target = (tmp[0] - 2000) * 10 + tmp[1]
- else:
- target = int(target)
- # check validity
- valid = ens_version == target
- at_least = ""
- if not strict: # pragma: no cover
- at_least = "at least "
- valid = ens_version >= target
- if (not valid) and exception:
- ens_version = self.ensight.version("version-full")
- base_msg = f" ({at_least}'{version}' required, '{ens_version}' current)"
- if message: # pragma: no cover
- message += base_msg # pragma: no cover
- else:
- message = f"A newer version of EnSight is required to use this API:{base_msg}"
- raise InvalidEnSightVersion(message)
- return valid
-
- def find_remote_unused_ports(
- self,
- count: int,
- start: Optional[int] = None,
- end: Optional[int] = None,
- avoid: Optional[list[int]] = None,
- ) -> Optional[List[int]]:
- """
- Find "count" unused ports on the host system. A port is considered
- unused if it does not respond to a "connect" attempt. Walk the ports
- from 'start' to 'end' looking for unused ports and avoiding any ports
- in the 'avoid' list. Stop once the desired number of ports have been
- found. If an insufficient number of ports were found, return None.
- An admin user check is used to skip [1-1023].
-
- Parameters
- ----------
- count: int
- number of unused ports to find
- start: int
- the first port to check or None (random start)
- end: int
- the last port to check or None (full range check)
- avoid: list
- an optional list of ports not to check
-
- Returns
- -------
- the detected ports or None on failure
- """
- cmd = "from cei import find_unused_ports\n"
- cmd += f"ports = find_unused_ports({count}, start={start}, end={end}, avoid={avoid})"
- self.cmd(cmd, do_eval=False)
- return self.cmd("ports")
+"""Session module.
+
+The ``Session`` module allows PyEnSight to control the EnSight session.
+
+Examples:
+
+>>> from ansys.pyensight.core import LocalLauncher
+>>> session = LocalLauncher().start()
+>>> type(session)
+ansys.pyensight.Session
+
+"""
+import atexit
+import importlib.util
+from os import listdir
+import os.path
+import platform
+import sys
+import textwrap
+import time
+import types
+from typing import TYPE_CHECKING, Any, Callable, Dict, List, Optional, Tuple, Union
+from urllib.parse import urlparse
+from urllib.request import url2pathname
+import uuid
+import webbrowser
+
+from ansys.pyensight.core.enscontext import EnsContext
+from ansys.pyensight.core.launcher import Launcher
+from ansys.pyensight.core.listobj import ensobjlist
+from ansys.pyensight.core.renderable import (
+ RenderableDeepPixel,
+ RenderableEVSN,
+ RenderableFluidsWebUI,
+ RenderableImage,
+ RenderableMP4,
+ RenderableSGEO,
+ RenderableVNC,
+ RenderableVNCAngular,
+ RenderableWebGL,
+)
+import requests
+
+if TYPE_CHECKING:
+ from ansys.api.pyensight import ensight_api
+ from ansys.pyensight.core import enscontext, ensight_grpc, renderable
+ from ansys.pyensight.core.ensobj import ENSOBJ
+
+
+class InvalidEnSightVersion(Exception):
+ pass
+
+
+class Session:
+ """Provides for accessing an EnSight ``Session`` instance.
+
+ The ``Session`` object wraps the various connections to an EnSight instance. It includes
+ the location of the installation and the gRPC, HTML and WS ports used to talk to the
+ EnSight session. In most cases, a ``Session`` instance is created using Launcher
+ class methods, but if the EnSight session is already running, an instance can be
+ created directly to wrap this running EnSight session.
+
+ If the ``Session`` object is created via a Launcher ``start()`` method call, when the
+ session object is garbage collected, the EnSight instance is automatically stopped.
+ To prevent this behavior (and leave the EnSight instance running), set the
+ ``halt_ensight_on_close`` property to ``False``.
+
+ A gRPC connection is required to interact with an EnSight session. The host, gRPC
+ port number, and secret key must be specified. The HTML and WS ports, which are used to
+ enable the :func:`show`) method, also require that
+ an instance of the websocket server is running.
+
+ Parameters
+ ----------
+ host : str, optional
+ Name of the host that the EnSight gRPC service is running on.
+ The default is ``"127.0.0.1"``, which is the localhost.
+ install_path : str, optional
+ Path to the CEI directory to launch EnSight from.
+ The default is ``None``.
+ secret_key : str, optional
+ Shared session secret key for validating the gRPC communication.
+ The default is ``""``.
+ grpc_port : int, optional
+ Port number of the EnSight gRPC service. The default is ``12345``.
+ html_host : str, optional
+ Optional hostname for html connections if different than host
+ Used by Ansys Lab and reverse proxy servers
+ html_port : int, optional
+ Port number of the websocket server's HTTP server. The default is
+ ``None``.
+ ws_port : int, optional
+ Port number of the websocket server's WS server. The default is
+ ``None``.
+ session_directory : str, optional
+ Directory on the server for local data storage. The default is
+ ``None``.
+ timeout : float, optional
+ Number of seconds to try a gRPC connection before giving up.
+ The default is ``120``.
+ rest_api : bool, optional
+ Whether to enable the EnSight REST API for the remote EnSight instance.
+ The default is ``False``.
+ sos : bool, optional
+ Whether the remote EnSight instance is to use the SOS (Server
+ of Servers) feature. The default is ``False``.
+
+ Examples
+ --------
+
+ >>> from ansys.pyensight.core import Session
+ >>> session = Session(host="127.0.0.1", grpc_port=12345, http_port=8000, ws_port=8100)
+
+ >>> from ansys.pyensight.core import LocalLauncher
+ >>> session = LocalLauncher().start()
+
+ >>> # Launch an instance of EnSight, then create a second connection to the instance
+ >>> from ansys.pyensight.core import LocalLauncher, Session
+ >>> launched_session = LocalLauncher().start()
+ >>> # Get a string that can be used to create a second connection
+ >>> session_string = str(launched_session)
+ >>> # Create a second connection to the same EnSight instance
+ >>> connected_session = eval(session_string)
+
+ """
+
+ def __init__(
+ self,
+ host: str = "127.0.0.1",
+ install_path: Optional[str] = None,
+ secret_key: str = "",
+ grpc_port: int = 12345,
+ html_hostname: Optional[str] = None,
+ html_port: Optional[int] = None,
+ ws_port: Optional[int] = None,
+ session_directory: Optional[str] = None,
+ timeout: float = 120.0,
+ rest_api: bool = False,
+ sos: bool = False,
+ webui_port: Optional[int] = None,
+ ) -> None:
+ # every session instance needs a unique name that can be used as a cache key
+ self._session_name = str(uuid.uuid1())
+ # when objects come into play, we can reuse them, so hash ID to instance here
+ self._ensobj_hash: Dict[int, "ENSOBJ"] = {}
+ self._language = "en"
+ self._rest_api_enabled = rest_api
+ self._sos_enabled = sos
+ self._timeout = timeout
+ self._cei_home = ""
+ self._cei_suffix = ""
+ self._hostname = host
+ self._install_path = install_path
+ self._launcher = None
+ if html_hostname == "" or html_hostname is None:
+ # if we weren't given an html host, use the hostname
+ self._html_hostname = self._hostname
+ else:
+ self._html_hostname = html_hostname
+ self._html_port = html_port
+ self._ws_port = ws_port
+ self._secret_key = secret_key
+ self._grpc_port = grpc_port
+ self._halt_ensight_on_close = True
+ self._callbacks: Dict[str, Tuple[int, Any]] = dict()
+ self._webui_port = webui_port
+ # if the caller passed a session directory we will assume they are
+ # creating effectively a proxy Session and create a (stub) launcher
+ if session_directory is not None:
+ self._launcher = Launcher()
+ self._launcher.session_directory = session_directory
+ # The stub will not know about us
+ self._halt_ensight_on_close = False
+
+ # are we in a jupyter notebook?
+ try:
+ _ = get_ipython() # type: ignore
+ self._jupyter_notebook = True # pragma: no cover
+ except NameError:
+ self._jupyter_notebook = False
+
+ # Connect to the EnSight instance
+ from ansys.api.pyensight import ensight_api # pylint: disable=import-outside-toplevel
+ from ansys.pyensight.core import ensight_grpc # pylint: disable=import-outside-toplevel
+
+ self._ensight = ensight_api.ensight(self)
+ self._build_utils_interface()
+ self._grpc = ensight_grpc.EnSightGRPC(
+ host=self._hostname, port=self._grpc_port, secret_key=self._secret_key
+ )
+ self._grpc.session_name = self._session_name
+
+ # establish the connection with retry
+ self._establish_connection(validate=True)
+
+ # update the enums to match current EnSight instance
+ cmd = "{key: getattr(ensight.objs.enums, key) for key in dir(ensight.objs.enums)}"
+ new_enums = self.cmd(cmd)
+ for key, value in new_enums.items():
+ if key.startswith("__") and (key != "__OBJID__"):
+ continue
+ setattr(self._ensight.objs.enums, key, value)
+
+ # create ensight.core
+ self._ensight.objs.core = self.cmd("ensight.objs.core")
+
+ # get the remote Python interpreter version
+ self.cmd("import platform", do_eval=False)
+ self._ensight_python_version = self.cmd("platform.python_version_tuple()")
+
+ # Because this session can have allocated significant external resources
+ # we very much want a chance to close it up cleanly. It is legal to
+ # call close() twice on this class if needed.
+ atexit.register(self.close)
+
+ # Speed up subtype lookups:
+ self._subtype_tables = {}
+ part_lookup_dict = dict()
+ part_lookup_dict[0] = "ENS_PART_MODEL"
+ part_lookup_dict[1] = "ENS_PART_CLIP"
+ part_lookup_dict[2] = "ENS_PART_CONTOUR"
+ part_lookup_dict[3] = "ENS_PART_DISCRETE_PARTICLE"
+ part_lookup_dict[4] = "ENS_PART_FRAME"
+ part_lookup_dict[5] = "ENS_PART_ISOSURFACE"
+ part_lookup_dict[6] = "ENS_PART_PARTICLE_TRACE"
+ part_lookup_dict[7] = "ENS_PART_PROFILE"
+ part_lookup_dict[8] = "ENS_PART_VECTOR_ARROW"
+ part_lookup_dict[9] = "ENS_PART_ELEVATED_SURFACE"
+ part_lookup_dict[10] = "ENS_PART_DEVELOPED_SURFACE"
+ part_lookup_dict[15] = "ENS_PART_BUILT_UP"
+ part_lookup_dict[16] = "ENS_PART_TENSOR_GLYPH"
+ part_lookup_dict[17] = "ENS_PART_FX_VORTEX_CORE"
+ part_lookup_dict[18] = "ENS_PART_FX_SHOCK"
+ part_lookup_dict[19] = "ENS_PART_FX_SEP_ATT"
+ part_lookup_dict[20] = "ENS_PART_MAT_INTERFACE"
+ part_lookup_dict[21] = "ENS_PART_POINT"
+ part_lookup_dict[22] = "ENS_PART_AXISYMMETRIC"
+ part_lookup_dict[24] = "ENS_PART_VOF"
+ part_lookup_dict[25] = "ENS_PART_AUX_GEOM"
+ part_lookup_dict[26] = "ENS_PART_FILTER"
+ self._subtype_tables["ENS_PART"] = part_lookup_dict
+ annot_lookup_dict = dict()
+ annot_lookup_dict[0] = "ENS_ANNOT_TEXT"
+ annot_lookup_dict[1] = "ENS_ANNOT_LINE"
+ annot_lookup_dict[2] = "ENS_ANNOT_LOGO"
+ annot_lookup_dict[3] = "ENS_ANNOT_LGND"
+ annot_lookup_dict[4] = "ENS_ANNOT_MARKER"
+ annot_lookup_dict[5] = "ENS_ANNOT_ARROW"
+ annot_lookup_dict[6] = "ENS_ANNOT_DIAL"
+ annot_lookup_dict[7] = "ENS_ANNOT_GAUGE"
+ annot_lookup_dict[8] = "ENS_ANNOT_SHAPE"
+ self._subtype_tables["ENS_ANNOT"] = annot_lookup_dict
+ tool_lookup_dict = dict()
+ tool_lookup_dict[0] = "ENS_TOOL_CURSOR"
+ tool_lookup_dict[1] = "ENS_TOOL_LINE"
+ tool_lookup_dict[2] = "ENS_TOOL_PLANE"
+ tool_lookup_dict[3] = "ENS_TOOL_BOX"
+ tool_lookup_dict[4] = "ENS_TOOL_CYLINDER"
+ tool_lookup_dict[5] = "ENS_TOOL_CONE"
+ tool_lookup_dict[6] = "ENS_TOOL_SPHERE"
+ tool_lookup_dict[7] = "ENS_TOOL_REVOLUTION"
+ self._subtype_tables["ENS_TOOL"] = tool_lookup_dict
+
+ def __repr__(self):
+ # if this is called from in the ctor, self.launcher might be None.
+ session_dir = ""
+ if self.launcher:
+ session_dir = self.launcher.session_directory
+ s = f"Session(host='{self.hostname}', secret_key='{self.secret_key}', "
+ s += f"sos={self.sos}, rest_api={self.rest_api}, "
+ s += f"html_hostname='{self.html_hostname}', html_port={self.html_port}, "
+ s += f"grpc_port={self._grpc_port}, "
+ s += f"ws_port={self.ws_port}, session_directory=r'{session_dir}')"
+ return s
+
+ def _establish_connection(self, validate: bool = False) -> None:
+ """Establish a gRPC connection to the EnSight instance.
+
+ Parameters
+ ----------
+ validate : bool
+ If true, actually try to communicate with EnSight. By default false.
+ """
+ time_start = time.time()
+ while time.time() - time_start < self._timeout: # pragma: no cover
+ if self._grpc.is_connected():
+ try:
+ if validate:
+ self._cei_home = self.cmd("ensight.version('CEI_HOME')")
+ self._cei_suffix = self.cmd("ensight.version('suffix')")
+ self._check_rest_connection()
+ return
+ except OSError: # pragma: no cover
+ pass # pragma: no cover
+ self._grpc.connect(timeout=self._timeout)
+ raise RuntimeError("Unable to establish a gRPC connection to EnSight.") # pragma: no cover
+
+ def _check_rest_connection(self) -> None:
+ """Validate the REST API connection works
+
+ Use requests to see if the REST API is up and running (it takes time
+ for websocketserver to make a gRPC connection as well).
+
+ """
+ if not self.rest_api:
+ return
+ #
+ #
+ # even when using PIM and a proxy server (Ansys Lab) this connects
+ # directly from the python running in the Notebook (the front-end)
+ # to the EnSight Docker Container and not the proxy server.
+ # Thus, here we use 'http', the private hostname, and the html port
+ # (which is the same on the proxy server).
+ url = f"http://{self._hostname}:{self.html_port}/ensight/v1/session/exec"
+ time_start = time.time()
+ while time.time() - time_start < self._timeout:
+ try:
+ _ = requests.put(
+ url,
+ json="enscl.rest_test = 30*20",
+ headers=dict(Authorization=f"Bearer {self.secret_key}"),
+ )
+ return
+ except Exception:
+ pass
+ time.sleep(0.5)
+ raise RuntimeError("Unable to establish a REST connection to EnSight.") # pragma: no cover
+
+ @property
+ def name(self) -> str:
+ """The session name is a unique identifier for this Session instance. It
+ is used by EnSight to maintain session specific data values within the
+ EnSight instance."""
+ return self._session_name
+
+ @property
+ def language(self) -> str:
+ """Current language specification for the EnSight session. Various
+ information calls return their information in the target language
+ if possible. The default is ``"en"``.
+
+ Examples
+ --------
+
+ >>> session.language = "en"
+ >>> session.ensight.objs.core.attrinfo(session.ensight.objs.enums.PREDEFINEDPALETTES)
+ >>> session.language = "zh"
+ >>> session.ensight.objs.core.attrinfo(session.ensight.objs.enums.PREDEFINEDPALETTES)
+
+ """
+ return self._language
+
+ @language.setter
+ def language(self, value: str) -> None:
+ self._language = value
+ self.cmd(f"ensight.core.tr.changelang(lin='{self._language}')", do_eval=False)
+
+ @property
+ def halt_ensight_on_close(self) -> bool:
+ """Flag for indicating whether to halt EnSight on close. If this property
+ is ``True`` and the session was created via a launcher, when the session
+ is closed, the EnSight instance is stopped.
+
+ .. Note::
+ While this flag prevents the :func:`close`
+ method from shutting down EnSight, depending on how the host Python interpreter is configured,
+ the EnSight session may still be halted. For example, this behavior can
+ occur in Jupyter Lab.
+ """
+ return self._halt_ensight_on_close
+
+ @halt_ensight_on_close.setter
+ def halt_ensight_on_close(self, value: bool) -> None:
+ self._halt_ensight_on_close = value
+
+ @property
+ def timeout(self) -> float:
+ """Amount of time in seconds to try a gRPC connection before giving up."""
+ return self._timeout
+
+ @timeout.setter
+ def timeout(self, value: float) -> None:
+ self._timeout = value
+
+ @property
+ def cei_home(self) -> str:
+ """Value of ``CEI_HOME`` for the connected EnSight session."""
+ return self._cei_home
+
+ @property
+ def cei_suffix(self) -> str:
+ """Suffix string of the connected EnSight session. For example, ``222``."""
+ return self._cei_suffix
+
+ @property
+ def jupyter_notebook(self) -> bool:
+ """Flag indicating if the session is running in a Jupyter notebook and should use
+ the display features of that interface.
+
+ """
+ return self._jupyter_notebook
+
+ @jupyter_notebook.setter
+ def jupyter_notebook(self, value: bool) -> None:
+ self._jupyter_notebook = value
+
+ @property
+ def ensight(self) -> "ensight_api.ensight":
+ """Core EnSight API wrapper."""
+ return self._ensight
+
+ @property
+ def grpc(self) -> "ensight_grpc.EnSightGRPC":
+ """The gRPC wrapper instance used by this session to access EnSight."""
+ return self._grpc
+
+ @property
+ def secret_key(self) -> str:
+ """Secret key used for communication validation in the gRPC instance."""
+ return self._secret_key
+
+ @property
+ def html_port(self) -> Optional[int]:
+ """Port supporting HTML interaction with EnSight."""
+ return self._html_port
+
+ @property
+ def ws_port(self) -> Optional[int]:
+ """Port supporting WS interaction with EnSight."""
+ return self._ws_port
+
+ @property
+ def hostname(self) -> str:
+ """Hostname of the system hosting the EnSight instance."""
+ return self._hostname
+
+ @property
+ def html_hostname(self) -> str:
+ """Hostname of the system hosting the EnSight web server instance."""
+ return self._html_hostname
+
+ @property
+ def launcher(self) -> "Launcher":
+ """Reference to the launcher instance if a launcher was used to instantiate the session."""
+ return self._launcher
+
+ @launcher.setter
+ def launcher(self, value: "Launcher"):
+ self._launcher = value
+
+ @property
+ def sos(self) -> bool:
+ """
+ Flag indicating if the remote EnSight session is running in SOS (Server of Server) mode.
+ """
+ return self._sos_enabled
+
+ @property
+ def rest_api(self) -> bool:
+ """
+ Flag indicating if the remote EnSight session supports the REST API.
+ """
+ return self._rest_api_enabled
+
+ @staticmethod
+ def help():
+ """Open the documentation for PyEnSight in a web browser."""
+ url = "https://ensight.docs.pyansys.com/"
+ webbrowser.open(url)
+
+ def copy_to_session(
+ self,
+ local_prefix: str,
+ filelist: List[str],
+ remote_prefix: Optional[str] = None,
+ progress: bool = False,
+ ) -> list:
+ """Copy a collection of files into the EnSight session.
+
+ Copy files from the local filesystem into the filesystem that is hosting
+ the EnSight instance.
+
+ .. note::
+ For a :class:`LocalLauncheransys.pyensight.core.LocalLauncher>`
+ instance, these are the same filesystems.
+
+ Parameters
+ ----------
+ local_prefix : str
+ URL prefix to use for all files specified for the ``filelist``
+ parameter. The only protocol supported is ``'file://'``, which
+ is the local filesystem.
+ filelist : list
+ List of files to copy. These files are prefixed with ``local_prefix``
+ and written relative to the ``remote_prefix`` parameter appended to
+ ``session.launcher.session_directory``.
+ remote_prefix : str
+ Directory on the remote (EnSight) filesystem, which is the
+ destination for the files. This prefix is appended to
+ ``session.launcher.session_directory``.
+ progress : bool, optional
+ Whether to show a progress bar. The default is ``False``. If ``True`` and
+ the ``tqdm`` module is available, a progress bar is shown.
+
+ Returns
+ -------
+ list
+ List of the filenames that were copied and their sizes.
+
+ Examples
+ --------
+ >>> the_files = ["fluent_data_dir", "ensight_script.py"]
+ >>> session.copy_to_session("file:///D:/data", the_files, progress=True)
+
+ >>> the_files = ["fluent_data_dir", "ensight_script.py"]
+ >>> session.copy_to_session("file:///scratch/data", the_files, remote_prefix="data")
+
+ """
+ uri = urlparse(local_prefix)
+ if uri.scheme != "file":
+ raise RuntimeError("Only the file:// protocol is supported for the local_prefix")
+ localdir = url2pathname(uri.path)
+
+ remote_functions = textwrap.dedent(
+ """\
+ import os
+ def copy_write_function__(filename: str, data: bytes) -> None:
+ os.makedirs(os.path.dirname(filename), exist_ok=True)
+ with open(filename, "ab") as fp:
+ fp.write(data)
+ """
+ )
+
+ self.cmd(remote_functions, do_eval=False)
+
+ out = []
+ dirlen = 0
+ if localdir: # pragma: no cover
+ # we use dirlen + 1 here to remove the '/' inserted by os.path.join()
+ dirlen = len(localdir) + 1
+ for item in filelist:
+ try:
+ name = os.path.join(localdir, item)
+ if os.path.isfile(name):
+ out.append((name[dirlen:], os.stat(name).st_size))
+ else:
+ for root, _, files in os.walk(name):
+ for filename in files:
+ fullname = os.path.join(root, filename)
+ out.append((fullname[dirlen:], os.stat(fullname).st_size))
+ except Exception:
+ pass
+ if progress: # pragma: no cover
+ try:
+ from tqdm.auto import tqdm
+ except ImportError:
+ tqdm = list
+ else:
+ tqdm = list
+ for item in tqdm(out):
+ filename = os.path.join(localdir, item[0])
+ out_dir = self.launcher.session_directory.replace("\\", "/")
+ if remote_prefix:
+ out_dir += f"/{remote_prefix}"
+ name = out_dir + f"/{item[0]}"
+ name = name.replace("\\", "/")
+ # Walk the file in chunk size blocks
+ chunk_size = 1024 * 1024
+ with open(filename, "rb") as fp:
+ while True:
+ data = fp.read(chunk_size)
+ if data == b"":
+ break
+ self.cmd(
+ f"copy_write_function__(r'{name}', {data!r})", do_eval=False
+ ) # pragma: no cover
+ return out
+
+ def copy_from_session(
+ self,
+ local_prefix: str,
+ filelist: List[str],
+ remote_prefix: Optional[str] = None,
+ progress: bool = False,
+ ) -> list:
+ """Copy a collection of files out of the EnSight session.
+
+ Copy files from the filesystem of the remote EnSight instance to the
+ filesystem of the local PyEnsight instance.
+
+ .. note::
+ For a :class:`LocalLauncheransys.pyensight.core.LocalLauncher>`
+ instance, these are the same filesystems.
+
+ Parameters
+ ----------
+ local_prefix : str
+ URL prefix of the location to save the files to. The only
+ protocol currently supported is ``'file://'``, which is the
+ local filesystem.
+ filelist : list
+ List of the files to copy. These files are prefixed
+ with ``session.launcher.session_directory/remote_prefix`` and written
+ relative to URL prefix specified for the ``local_prefix`` parameter.
+ remote_prefix : str, optional
+ Directory on the remote (EnSight) filesystem that is the source
+ for the files. This prefix is appended to ``session.launcher.session_directory``.
+ progress : bool, optional
+ Whether to show a progress bar. The default is ``False``. If ``True`` and
+ the ``tqdm`` module is available, a progress bar is shown.
+
+ Returns
+ -------
+ list
+ List of the files that were copied.
+
+ Examples
+ --------
+ >>> the_files = ["fluent_data_dir", "ensight_script.py"]
+ >>> session.copy_from_session("file:///D:/restored_data", the_files, progress=True)
+
+ >>> the_files = ["fluent_data_dir", "ensight_script.py"]
+ >>> session.copy_from_session("file:///scratch/restored_data", the_files,
+ remote_prefix="data")
+ """
+
+ uri = urlparse(local_prefix)
+ if uri.scheme != "file":
+ raise RuntimeError("Only the file:// protocol is supported for the local_prefix")
+ localdir = url2pathname(uri.path)
+
+ remote_functions = textwrap.dedent(
+ """\
+ import os
+ def copy_walk_function__(remotedir: str, filelist: list) -> None:
+ out = []
+ dirlen = 0
+ if remotedir:
+ dirlen = len(remotedir) + 1
+ for item in filelist:
+ try:
+ name = os.path.join(remotedir, item)
+ if os.path.isfile(name):
+ out.append((name[dirlen:], os.stat(name).st_size))
+ else:
+ for root, _, files in os.walk(name):
+ for filename in files:
+ fullname = os.path.join(root, filename)
+ out.append((fullname[dirlen:], os.stat(fullname).st_size))
+ except Exception:
+ pass
+ return out
+ # (needed for flake8)
+ def copy_read_function__(filename: str, offset: int, numbytes: int) -> bytes:
+ with open(filename, "rb") as fp:
+ fp.seek(offset)
+ data = fp.read(numbytes)
+ return data
+ """
+ )
+
+ self.cmd(remote_functions, do_eval=False)
+
+ remote_directory = self.launcher.session_directory
+ if remote_prefix:
+ remote_directory = f"{remote_directory}/{remote_prefix}"
+ remote_directory = remote_directory.replace("\\", "/")
+ names = self.cmd(f"copy_walk_function__(r'{remote_directory}', {filelist})", do_eval=True)
+ if progress:
+ try:
+ from tqdm.auto import tqdm
+ except ImportError:
+ tqdm = list
+ else:
+ tqdm = list
+ for item in tqdm(names):
+ name = f"{remote_directory}/{item[0]}".replace("\\", "/")
+ full_name = os.path.join(localdir, item[0])
+ os.makedirs(os.path.dirname(full_name), exist_ok=True)
+ with open(full_name, "wb") as fp:
+ offset = 0
+ chunk_size = 1024 * 1024
+ while True:
+ data = self.cmd(
+ f"copy_read_function__(r'{name}', {offset}, {chunk_size})", do_eval=True
+ )
+ if len(data) == 0:
+ break
+ fp.write(data)
+ offset += chunk_size
+ return names
+
+ def run_script(self, filename: str) -> Optional[types.ModuleType]:
+ """Run an EnSight Python script file.
+
+ In EnSight, there is a notion of a Python *script* that is normally run line by
+ line in EnSight. In such scripts, the ``ensight`` module is assumed to be preloaded.
+ This method runs such scripts by importing them as modules and running the commands
+ through the PyEnSight interface. This is done by installing the PyEnsight ``Session``
+ object into the module before it is imported. This makes it possible to use a
+ Python debugger with an EnSight Python script, using the PyEnSight interface.
+
+ .. note::
+
+ Because the Python script is imported as a module, the script filename must
+ have a ``.py`` extension.
+
+
+ Parameters
+ ----------
+ filename : str
+ Filename of the Python script to run, which is loaded as a module by PyEnSight.
+
+ Returns
+ -------
+ types.ModuleType
+ Imported module.
+
+ """
+ dirname = os.path.dirname(filename)
+ if not dirname: # pragma: no cover
+ dirname = "." # pragma: no cover
+ if dirname not in sys.path:
+ sys.path.append(dirname)
+ module_name, _ = os.path.splitext(os.path.basename(filename))
+ # get the module reference
+ spec = importlib.util.find_spec(module_name)
+ if spec: # pragma: no cover
+ module = importlib.util.module_from_spec(spec)
+ # insert an ensight interface into the module
+ if self.ensight:
+ module.ensight = self.ensight # type: ignore
+ # load (run) the module
+ if spec.loader: # pragma: no cover
+ spec.loader.exec_module(module)
+ return module
+ return None # pragma: no cover
+
+ def exec(self, function: Callable, *args, remote: bool = False, **kwargs) -> Any:
+ """Run a function containing EnSight API calls locally or in the EnSight interpreter.
+
+ The function is in this form::
+
+ def myfunc(ensight, *args, **kwargs):
+ ...
+ return value
+
+ The ``exec()`` method allows for the function to be executed in the PyEnSight Python
+ interpreter or the (remote) EnSight interpreter. Thus, a function making a large
+ number of RPC calls can run much faster than if it runs solely in the PyEnSight
+ interpreter.
+
+ These constraints exist on this capability:
+
+ - The function may only use arguments passed to the ``exec()`` method and can only
+ return a single value.
+ - The function cannot modify the input arguments.
+ - The input arguments must be serializable and the PyEnSight Python interpreter
+ version must match the version in EnSight.
+
+ Parameters
+ ----------
+ remote : bool, optional
+ Whether to execute the function in the (remote) EnSight interpreter.
+
+ Examples
+ --------
+ >>> from ansys.pyensight.core import LocalLauncher
+ >>> session = LocalLauncher().start()
+ >>> options = dict()
+ >>> options['Verbose mode'] = 'OFF'
+ >>> options['Use ghost elements'] = 'OFF'
+ >>> options['Long names'] = 'OFF'
+ >>> options['Compatibility mode'] = 'ON'
+ >>> options['Move Transient Parts'] = 'ON'
+ >>> options['Element type'] = 'Tri 3'
+ >>> options['Boundary ghosts'] = 'None'
+ >>> options['Spread out parts'] = 'Legacy'
+ >>> options['Number of spheres'] = 100
+ >>> options['Number of cubes'] = 100
+ >>> options['Number of planes'] = 0
+ >>> options['Number of elements start'] = 1000
+ >>> options['Number of elements end'] = 1000
+ >>> options['Number of timesteps'] = 1
+ >>> options['Part scaling factor'] = 1.000000e+00
+ >>> options['Random number seed'] = 0
+ >>> options['Number of scalars'] = 3
+ >>> options['Number of vectors'] = 3
+ >>> options['Number of constants'] = 3
+ >>> session.load_data("dummy", file_format="Synthetic", reader_options=options)
+
+ >>> def count(ensight, attr, value):
+ >>> import time
+ >>> start = time.time()
+ >>> count = 0
+ >>> for p in ensight.objs.core.PARTS:
+ >>> if p.getattr(attr) == value:
+ >>> count += 1
+ >>> print(count(session.ensight, "VISIBLE", True))
+ >>> print(session.exec(count, "VISIBLE", True))
+ >>> print(session.exec(count, "VISIBLE", True, remote=True))
+
+ """
+ if remote:
+ # remote execution only supported in 2023 R1 or later
+ if int(self._cei_suffix) < 231:
+ raise RuntimeError("Remote function execution only supported in 2023 R1 and later")
+ local_python_version = platform.python_version_tuple()
+ if self._ensight_python_version[0:2] != local_python_version[0:2]:
+ vers = "Local and remote Python versions must match: "
+ vers += ".".join(local_python_version)
+ vers += " vs "
+ vers += ".".join(self._ensight_python_version)
+ raise RuntimeError(vers)
+ import dill # pylint: disable=import-outside-toplevel
+
+ # Create a bound object that allows for direct encoding of the args/kwargs params
+ # The new function would be bound_function(ensight) where the args are captured
+ # in the lambda.
+ bound_function = lambda ens: function( # noqa: E731 # pragma: no cover
+ ens, *args, **kwargs
+ )
+ # Serialize the bound function
+ serialized_function = dill.dumps(bound_function, recurse=True)
+ self.cmd("import dill", do_eval=False)
+ # Run it remotely, passing the instance ensight instead of self._ensight
+ cmd = f"dill.loads(eval(repr({serialized_function})))(ensight)"
+ return self.cmd(cmd)
+ else:
+ return function(self._ensight, *args, **kwargs)
+
+ def show(
+ self,
+ what: str = "image",
+ width: Optional[int] = None,
+ height: Optional[int] = None,
+ temporal: bool = False,
+ aa: int = 4,
+ fps: float = 30.0,
+ num_frames: Optional[int] = None,
+ ) -> "renderable.Renderable":
+ """Capture the current EnSight scene or otherwise make it available for
+ display in a web browser.
+
+ This method generates the appropriate visuals and returns the renderable
+ object for viewing. If the session is in a Jupyter notebook, the cell
+ in which the ``show()`` method is issued is updated with the renderable display.
+
+ Parameters
+ ----------
+ what : str, optional
+ Type of scene display to generate. The default is ``"image"``.
+ Options are:
+
+ * ``image``: Simple rendered PNG image
+ * ``deep_pixel``: EnSight deep pixel image
+ * ``animation``: MPEG4 movie
+ * ``webgl``: Interactive WebGL-based browser viewer
+ * ``remote``: Remote rendering-based interactive EnSight viewer
+ * ``remote_scene``: Remote rendering-based interactive EnSight viewer
+
+ width : int, optional
+ Width of the rendered entity. The default is ``None``.
+ height : int, optional
+ Height of the rendered entity. The default is ``None``.
+ temporal : bool, optional
+ Whether to include all timesteps in WebGL views. The default is ``False``.
+ aa : int, optional
+ Number of antialiasing passes to use when rendering images. The
+ default is ``4``.
+ fps : float, optional
+ Number of frames per second to use for animation playback. The default
+ is ``30``.
+ num_frames : int, optional
+ Number of frames of static timestep to record for animation playback.
+
+ Returns
+ -------
+ renderable.Renderable
+
+ Raises
+ ------
+ RuntimeError
+ If it is not possible to generate the content.
+
+ Examples
+ --------
+ Render an image and display it in a browser. Rotate the scene and update the display.
+
+ >>> image = session.show('image', width=800, height=600)
+ >>> image.browser()
+ >>> session.ensight.view_transf.rotate(30, 30, 0)
+ >>> image.update()
+ >>> image.browser()
+
+ """
+ self._establish_connection()
+ if self._html_port is None:
+ raise RuntimeError("No websocketserver has been associated with this Session")
+
+ kwargs = dict(
+ height=height, width=width, temporal=temporal, aa=aa, fps=fps, num_frames=num_frames
+ )
+ if self._jupyter_notebook: # pragma: no cover
+ from IPython.display import display
+
+ # get the cell DisplayHandle instance
+ kwargs["cell_handle"] = display("", display_id=True)
+
+ render = None
+ if what == "image":
+ render = RenderableImage(self, **kwargs)
+ elif what == "deep_pixel":
+ render = RenderableDeepPixel(self, **kwargs)
+ elif what == "animation":
+ render = RenderableMP4(self, **kwargs)
+ elif what == "webgl":
+ render = RenderableWebGL(self, **kwargs)
+ elif what == "sgeo":
+ # the SGEO protocol is only supported in 2023 R1 and higher
+ if int(self._cei_suffix) < 231:
+ # Use the AVZ viewer in older versions of EnSight
+ render = RenderableWebGL(self, **kwargs)
+ else:
+ render = RenderableSGEO(self, **kwargs)
+ elif what == "remote":
+ render = RenderableVNC(self, **kwargs)
+ elif what == "remote_scene":
+ render = RenderableEVSN(self, **kwargs)
+ # Undocumented. Available only internally
+ elif what == "webensight":
+ render = RenderableVNCAngular(self, **kwargs)
+ elif what == "webui":
+ render = RenderableFluidsWebUI(self, **kwargs)
+
+ if render is None:
+ raise RuntimeError("Unable to generate requested visualization")
+
+ return render
+
+ def cmd(self, value: str, do_eval: bool = True) -> Any:
+ """Run a command in EnSight and return the results.
+
+ Parameters
+ ----------
+ value : str
+ String of the command to run
+ do_eval : bool, optional
+ Whether to perform an evaluation. The default is ``True``.
+
+
+ Returns
+ -------
+ result
+ Result of the string being executed as Python inside EnSight.
+
+ Examples
+ --------
+
+ >>> print(session.cmd("10+4"))
+ 14
+ """
+ self._establish_connection()
+ ret = self._grpc.command(value, do_eval=do_eval)
+ if do_eval:
+ ret = self._convert_ctor(ret)
+ value = eval(ret, dict(session=self, ensobjlist=ensobjlist))
+ return value
+ return ret
+
+ def geometry(self, what: str = "glb") -> bytes:
+ """Return the current EnSight scene as a geometry file.
+
+ Parameters
+ ----------
+ what : str, optional
+ File format to return. The default is ``"glb"``.
+
+ Returns
+ -------
+ obj
+ Generated geometry file as a bytes object.
+
+ Examples
+ --------
+ >>> data = session.geometry()
+ >>> with open("file.glb", "wb") as fp:
+ >>> fp.write(data)
+
+ """
+ self._establish_connection()
+ return self._grpc.geometry()
+
+ def render(self, width: int, height: int, aa: int = 1) -> bytes:
+ """Render the current EnSight scene and return a PNG image.
+
+ Parameters
+ ----------
+ width : int
+ Width of the rendered image in pixels.
+ height : int
+ Height of the rendered image in pixels.
+ aa : int, optional
+ Number of antialiasing passes to use. The default is ``1``.
+
+ Returns
+ -------
+ obj
+ PNG image as a bytes object.
+
+ Examples
+ --------
+ >>> data = session.render(1920, 1080, aa=4)
+ >>> with open("file.png", "wb") as fp:
+ >>> fp.write(data)
+
+ """
+ self._establish_connection()
+ return self._grpc.render(width=width, height=height, aa=aa)
+
+ def _release_remote_objects(self, object_id: Optional[int] = None):
+ """
+ Send a command to the remote EnSight session to drop a specific object
+ or all objects from the remote object cache.
+
+ Parameters
+ ----------
+ object_id: int, optional
+ The specific object to drop from the cache. If no objects are specified,
+ then all remote objects associated with this session will be dropped.
+
+ """
+ obj_str = ""
+ if object_id: # pragma: no cover
+ obj_str = f", id={object_id}" # pragma: no cover
+ cmd = f"ensight.objs.release_id('{self.name}'{obj_str})"
+ _ = self.cmd(cmd, do_eval=False)
+
+ def close(self) -> None:
+ """Close the session.
+
+ Close the current session and its gRPC connection.
+ """
+ # if version 242 or higher, free any objects we have cached there
+ if self.cei_suffix >= "242":
+ try:
+ self._release_remote_objects()
+ except RuntimeError: # pragma: no cover
+ # handle some intermediate EnSight builds.
+ pass
+ if self._launcher and self._halt_ensight_on_close:
+ self._launcher.close(self)
+ else:
+ # lightweight shtudown, just close the gRC connection
+ self._grpc.shutdown(stop_ensight=False)
+ self._launcher = None
+
+ def _build_utils_interface(self) -> None:
+ """Build the ``ensight.utils`` interface.
+
+ This method Walk the PY files in the ``utils`` directory, creating instances
+ of the classes in those files and placing them in the
+ ``Session.ensight.utils`` namespace.
+ """
+ self._ensight.utils = types.SimpleNamespace()
+ _utils_dir = os.path.join(os.path.dirname(__file__), "utils")
+ if _utils_dir not in sys.path:
+ sys.path.insert(0, _utils_dir)
+ onlyfiles = [f for f in listdir(_utils_dir) if os.path.isfile(os.path.join(_utils_dir, f))]
+ for _basename in onlyfiles:
+ # skip over any files with the "_server" in their names
+ if "_server" in _basename or "_cli" in _basename:
+ continue
+ _filename = os.path.join(_utils_dir, _basename)
+ try:
+ # get the module and class names
+ _name = os.path.splitext(os.path.basename(_filename))[0]
+ if _name == "__init__":
+ continue
+ _cap_name = _name[0].upper() + _name[1:]
+ # import the module
+ spec = importlib.util.spec_from_file_location(
+ f"ansys.pyensight.core.utils.{_name}", _filename
+ )
+ if spec: # pragma: no cover
+ _module = importlib.util.module_from_spec(spec)
+ if spec.loader: # pragma: no cover
+ spec.loader.exec_module(_module)
+ # get the class from the module (query.py filename -> Query() object)
+ _the_class = getattr(_module, _cap_name)
+ # Create an instance, using ensight as the EnSight interface
+ # and place it in this module.
+ setattr(self._ensight.utils, _name, _the_class(self._ensight))
+ except Exception as e: # pragma: no cover
+ # Warn on import errors
+ print(f"Error loading ensight.utils from: '{_filename}' : {e}")
+
+ MONITOR_NEW_TIMESTEPS_OFF = "off"
+ MONITOR_NEW_TIMESTEPS_STAY_AT_CURRENT = "stay_at_current"
+ MONITOR_NEW_TIMESTEPS_JUMP_TO_END = "jump_to_end"
+
+ def load_data(
+ self,
+ data_file: str,
+ result_file: Optional[str] = None,
+ file_format: Optional[str] = None,
+ reader_options: Optional[dict] = None,
+ new_case: bool = False,
+ representation: str = "3D_feature_2D_full",
+ monitor_new_timesteps: str = MONITOR_NEW_TIMESTEPS_OFF,
+ ) -> None:
+ """Load a dataset into the EnSight instance.
+
+ Load the data from a given file into EnSight. The new data
+ replaces any currently loaded data in the session.
+
+ Parameters
+ ----------
+ data_file : str
+ Name of the data file to load.
+ result_file : str, optional
+ Name of the second data file for dual-file datasets.
+ file_format : str, optional
+ Name of the EnSight reader to use. The default is ``None``,
+ in which case EnSight selects a reader.
+ reader_options : dict, optional
+ Dictionary of reader-specific option-value pairs that can be used
+ to customize the reader behavior. The default is ``None``.
+ new_case : bool, optional
+ Whether to load the dataset in another case. The default is ``False``,
+ in which case the dataset replaces the one (if any) loaded in the existing
+ current case.
+ representation : str, optional
+ Default representation for the parts loaded. The default is
+ ``"3D_feature_2D_full"``.
+ monitor_new_timesteps: str, optional
+ Defaulted to off, if changed EnSight will monitor for new timesteps.
+ The allowed values are MONITOR_NEW_TIMESTEPS_OFF, MONITOR_NEW_TIMESTEPS_STAY_AT_CURRENT
+ and MONITOR_NEW_TIMESTEPS_JUMP_TO_END
+
+ Raises
+ ------
+ RuntimeError
+ If EnSight cannot guess the file format or an error occurs while the
+ data is being read.
+
+ Examples
+ --------
+ >>> from ansys.pyensight.core import LocalLauncher
+ >>> session = LocalLauncher().start()
+ >>> session.load_data(r'D:\data\CFX\example_data.res')
+
+ """
+ self._establish_connection()
+ # what application are we talking to?
+ target = self.cmd("ensight.version('product').lower()")
+ if target == "envision":
+ cmd = f'ensight.data.replace(r"""{data_file}""")'
+ if self.cmd(cmd) != 0:
+ raise RuntimeError("Unable to load the dataset.")
+ return
+
+ # Handle case changes...
+ cmds = [
+ 'ensight.case.link_modelparts_byname("OFF")',
+ 'ensight.case.create_viewport("OFF")',
+ 'ensight.case.apply_context("OFF")',
+ "ensight.case.reflect_model_in(\"'none'\")",
+ ]
+ for cmd in cmds:
+ self.cmd(cmd, do_eval=False)
+
+ if new_case:
+ # New case
+ new_case_name = None
+ for case in self.ensight.objs.core.CASES:
+ if case.ACTIVE == 0:
+ new_case_name = case.DESCRIPTION
+ break
+ if new_case_name is None:
+ raise RuntimeError("No cases available for adding.")
+ cmd = f'ensight.case.add("{new_case_name}")'
+ self.cmd(cmd, do_eval=False)
+ cmd = f'ensight.case.select("{new_case_name}")'
+ self.cmd(cmd, do_eval=False)
+ else:
+ # Case replace
+ current_case_name = self.ensight.objs.core.CURRENTCASE[0].DESCRIPTION
+ cmd = f'ensight.case.replace("{current_case_name}", "{current_case_name}")'
+ self.cmd(cmd, do_eval=False)
+ cmd = f'ensight.case.select("{current_case_name}")'
+ self.cmd(cmd, do_eval=False)
+
+ # Attempt to find the file format if none is specified
+ if file_format is None:
+ try:
+ cmd = "ensight.objs.core.CURRENTCASE[0]"
+ cmd += f'.queryfileformat(r"""{data_file}""")["reader"]'
+ file_format = self.cmd(cmd)
+ except RuntimeError:
+ raise RuntimeError(f"Unable to determine file format for {data_file}")
+
+ # Load the data
+ cmds = [
+ "ensight.part.select_default()",
+ "ensight.part.modify_begin()",
+ f'ensight.part.elt_representation("{representation}")',
+ "ensight.part.modify_end()",
+ 'ensight.data.binary_files_are("native")',
+ f'ensight.data.format("{file_format}")',
+ ]
+ if reader_options:
+ for key, value in reader_options.items():
+ option = f"""ensight.data.reader_option("{repr(key)} {repr(value)}")"""
+ cmds.append(option)
+ if result_file:
+ cmds.append(f'ensight.data.result(r"""{result_file}""")')
+ cmds.append("ensight.data.shift_time(1.000000, 0.000000, 0.000000)")
+ cmds.append(f'ensight.solution_time.monitor_for_new_steps("{monitor_new_timesteps}")')
+ cmds.append(f'ensight.data.replace(r"""{data_file}""")')
+ for cmd in cmds:
+ if self.cmd(cmd) != 0:
+ raise RuntimeError("Unable to load the dataset.")
+
+ def download_pyansys_example(
+ self,
+ filename: str,
+ directory: Optional[str] = None,
+ root: Optional[str] = None,
+ folder: Optional[bool] = None,
+ ) -> str:
+ """Download an example dataset from the ansys/example-data repository.
+ The dataset is downloaded local to the EnSight server location, so that it can
+ be downloaded even if running from a container.
+
+ Parameters
+ ----------
+ filename: str
+ The filename to download
+ directory: str
+ The directory to download the filename from
+ root: str
+ If set, the download will happen from another location
+ folder: bool
+ If set to True, it marks the filename to be a directory rather
+ than a single file
+
+ Returns
+ -------
+ pathname: str
+ The download location, local to the EnSight server directory.
+ If folder is set to True, the download location will be a folder containing
+ all the items available in the repository location under that folder.
+
+ Examples
+ --------
+ >>> from ansys.pyensight.core import DockerLauncher
+ >>> session = DockerLauncher().start(data_directory="D:\\")
+ >>> cas_file = session.download_pyansys_example("mixing_elbow.cas.h5","pyfluent/mixing_elbow")
+ >>> dat_file = session.download_pyansys_example("mixing_elbow.dat.h5","pyfluent/mixing_elbow")
+ >>> session.load_data(cas_file, result_file=dat_file)
+ >>> remote = session.show("remote")
+ >>> remote.browser()
+ """
+ base_uri = "https://github.com/ansys/example-data/raw/master"
+ base_api_uri = "https://api.github.com/repos/ansys/example-data/contents"
+ if not folder:
+ if root is not None:
+ base_uri = root
+ else:
+ base_uri = base_api_uri
+ uri = f"{base_uri}/{filename}"
+ if directory:
+ uri = f"{base_uri}/{directory}/{filename}"
+ pathname = f"{self.launcher.session_directory}/{filename}"
+ if not folder:
+ script = "import requests\n"
+ script += "import shutil\n"
+ script += "import os\n"
+ script += f'url = "{uri}"\n'
+ script += f'outpath = r"""{pathname}"""\n'
+ script += "with requests.get(url, stream=True) as r:\n"
+ script += " with open(outpath, 'wb') as f:\n"
+ script += " shutil.copyfileobj(r.raw, f)\n"
+ self.cmd(script, do_eval=False)
+ else:
+ script = "import requests\n"
+ script += "import shutil\n"
+ script += "import os\n"
+ script += f'url = "{uri}"\n'
+ script += "with requests.get(url) as r:\n"
+ script += " data = r.json()\n"
+ script += f' output_directory = r"""{pathname}"""\n'
+ script += " os.makedirs(output_directory, exist_ok=True)\n"
+ script += " for item in data:\n"
+ script += " if item['type'] == 'file':\n"
+ script += " file_url = item['download_url']\n"
+ script += " filename = os.path.join(output_directory, item['name'])\n"
+ script += " r = requests.get(file_url, stream=True)\n"
+ script += " with open(filename, 'wb') as f:\n"
+ script += " f.write(r.content)\n"
+ self.cmd(script, do_eval=False)
+ return pathname
+
+ def load_example(
+ self, example_name: str, uncompress: bool = False, root: Optional[str] = None
+ ) -> str:
+ """Load an example dataset.
+
+ This method downloads an EnSight session file from a known location and loads
+ it into the current EnSight instance. The URL for the dataset is formed by
+ combining the value given for the ``example_name`` parameter with a root URL.
+ The default base URL is provided by Ansys, but it can be overridden by specifying
+ a value for the ``root`` parameter.
+
+ Parameters
+ ----------
+ example_name : str
+ Name of the EnSight session file (``.ens``) to download and load.
+ uncompress : bool, optional
+ Whether to unzip the downloaded file into the returned directory name.
+ The default is ``False``.
+ root : str, optional
+ Base URL for the download.
+
+ Returns
+ -------
+ str
+ Path to the downloaded file in the EnSight session.
+
+ Examples
+ --------
+ >>> from ansys.pyensight.core import LocalLauncher
+ >>> session = LocalLauncher().start()
+ >>> session.load_example("fluent_wing_example.ens")
+ >>> remote = session.show("remote")
+ >>> remote.browser()
+
+ """
+ base_uri = "https://s3.amazonaws.com/www3.ensight.com/PyEnSight/ExampleData"
+ if root is not None: # pragma: no cover
+ base_uri = root # pragma: no cover
+ pathname = self.download_pyansys_example(example_name, root=base_uri)
+ script = f'outpath = r"""{pathname}"""\n'
+ if uncompress:
+ # in this case, remove the extension and unzip the file
+ pathname_dir = os.path.splitext(pathname)[0]
+ script += "outpath_dir = os.path.splitext(outpath)[0]\n"
+ script += "os.mkdir(outpath_dir)\n"
+ script += "shutil.unpack_archive(outpath, outpath_dir, 'zip')\n"
+ # return the directory name
+ pathname = pathname_dir
+ else:
+ script += "ensight.objs.ensxml_restore_file(outpath)\n"
+ self.cmd(script, do_eval=False)
+ return pathname
+
+ def add_callback(
+ self, target: Any, tag: str, attr_list: list, method: Callable, compress: bool = True
+ ) -> None:
+ """Register a callback with an event tuple.
+
+ For a given target object (such as ``"ensight.objs.core"``) and a list
+ of attributes (such as ``["PARTS", "VARIABLES"]``), this method sets up a
+ callback to be made when any of those attribute change on the target object.
+ The target can also be an EnSight (not PyEnSight) class name, for example
+ "ENS_PART". In this latter form, all objects of that type are watched for
+ specified attribute changes.
+
+ The callback is made with a single argument, a string encoded in URL format
+ with the supplied tag, the name of the attribute that changed and the UID
+ of the object that changed. The string passed to the callback is in this form:
+ ``grpc://{sessionguid}/{tag}?enum={attribute}&uid={objectid}``.
+
+ Only one callback with the noted tag can be used in the session.
+
+ Parameters
+ ----------
+ target : obj, str
+ Name of the target object or name of a class as a string to
+ match all objects of that class. A proxy class reference is
+ also allowed. For example, ``session.ensight.objs.core``.
+ tag : str
+ Unique name for the callback. A tag can end with macros of
+ the form ``{{attrname}}`` to return the value of an attribute of the
+ target object. The macros should take the form of URI queries to
+ simplify parsing.
+ attr_list : list
+ List of attributes of the target that are to result in the callback
+ being called if changed.
+ method : Callable
+ Callable that is called with the returned URL.
+ compress : bool, optional
+ Whether to call only the last event if a repeated event is generated
+ as a result of an action. The default is ``True``. If ``False``, every
+ event results in a callback.
+
+ Examples
+ --------
+ A string similar to this is printed when the dataset is loaded and the part list
+ changes:
+
+ `` Event : grpc://f6f74dae-f0ed-11ec-aa58-381428170733/partlist?enum=PARTS&uid=221``
+
+ >>> from ansys.pyensight.core import LocalLauncher
+ >>> s = LocalLauncher().start()
+ >>> def cb(v: str):
+ >>> print("Event:", v)
+ >>> s.add_callback("ensight.objs.core", "partlist", ["PARTS"], cb)
+ >>> s.load_data(r"D:\ANSYSDev\data\CFX\HeatingCoil_001.res")
+ """
+ self._establish_connection()
+ # shorten the tag up to the query block. Macros are only legal in the query block.
+ try:
+ idx = tag.index("?")
+ short_tag = tag[:idx]
+ except ValueError:
+ short_tag = tag
+ if short_tag in self._callbacks:
+ raise RuntimeError(f"A callback for tag '{short_tag}' already exists")
+ # Build the addcallback string against the full tag
+ flags = ""
+ if compress:
+ flags = ",flags=ensight.objs.EVENTMAP_FLAG_COMP_GLOBAL"
+ if hasattr(target, "__OBJID__"):
+ target = self.remote_obj(target.__OBJID__)
+ cmd = f"ensight.objs.addcallback({target},None,"
+ cmd += f"'{self._grpc.prefix()}{tag}',attrs={repr(attr_list)}{flags})"
+ callback_id = self.cmd(cmd)
+ # if this is the first callback, start the event stream
+ if len(self._callbacks) == 0:
+ self._grpc.event_stream_enable(callback=self._event_callback)
+ # record the callback id along with the callback
+ # if the callback URL starts with the short_tag, we make the callback
+ self._callbacks[short_tag] = (callback_id, method)
+
+ def remove_callback(self, tag: str) -> None:
+ """Remove a callback that the :func`add_callback`
+ method started.
+
+ Given a tag used to register a previous callback (``add_callback()``), remove
+ this callback from the EnSight callback system.
+
+ Parameters
+ ----------
+ tag : str
+ Callback string tag.
+
+ Raises
+ ------
+ RuntimeError
+ If an invalid tag is supplied.
+
+ """
+ if tag not in self._callbacks:
+ raise RuntimeError(f"A callback for tag '{tag}' does not exist")
+ callback_id = self._callbacks[tag][0]
+ del self._callbacks[tag]
+ cmd = f"ensight.objs.removecallback({callback_id})"
+ _ = self.cmd(cmd, do_eval=False)
+
+ def _event_callback(self, cmd: str) -> None:
+ """Pass the URL back to the registered callback.
+
+ This method matches the ``cmd`` URL with the registered callback and then
+ makes the callback.
+
+ Parameters
+ ----------
+ cmd : str
+ URL callback from the gRPC event stream. The URL has this
+ form: ``grpc://{sessionguid}/{tag}?enum={attribute}&uid={objectid}``.
+
+ """
+ # EnSight will always tack on '?enum='. If our tag uses ?macro={{attr}},
+ # you will get too many '?' in the URL, making it difficult to parse.
+ # So, we look for "?..." and a following "?enum=". If we see this, convert
+ # "?enum=" into "&enum=".
+ idx_question = cmd.find("?")
+ idx_enum = cmd.find("?enum=")
+ if idx_question < idx_enum:
+ cmd = cmd.replace("?enum=", "&enum=")
+ parse = urlparse(cmd)
+ tag = parse.path[1:]
+ for key, value in self._callbacks.items():
+ # remember "key" is a shortened version of tag
+ if tag.startswith(key):
+ value[1](cmd)
+ return
+ print(f"Unhandled event: {cmd}")
+
+ # Object API helper functions
+ @staticmethod
+ def remote_obj(ensobjid: int) -> str:
+ """Generate a string that, for a given ``ENSOBJ`` object ID, returns
+ a proxy object instance.
+
+ Parameters
+ ----------
+ ensobjid: int
+ ID of the ``ENSOBJ`` object.
+
+ Returns
+ -------
+ str
+ String for the proxy object instance.
+ """
+ return f"ensight.objs.wrap_id({ensobjid})"
+
+ def _prune_hash(self) -> None:
+ """Prune the ``ENSOBJ`` hash table.
+
+ The ``ENSOBJ`` hash table may need flushing if it gets too big. Do that here."""
+ if len(self._ensobj_hash) > 1000000:
+ self._ensobj_hash = {}
+
+ def add_ensobj_instance(self, obj: "ENSOBJ") -> None:
+ """Add a new ``ENSOBJ`` object instance to the hash table.
+
+ Parameters
+ ----------
+ obj : ENSOBJ
+ ``ENSOBJ`` object instance.
+ """
+ self._ensobj_hash[obj.__OBJID__] = obj
+
+ def obj_instance(self, ensobjid: int) -> Optional["ENSOBJ"]:
+ """Get any existing proxy object associated with an ID.
+
+ Parameters
+ ----------
+ ensobjid: int
+ ID of the ``ENSOBJ`` object.
+
+ """
+ return self._ensobj_hash.get(ensobjid, None)
+
+ def _obj_attr_subtype(self, classname: str) -> Tuple[Optional[int], Optional[dict]]:
+ """Get subtype information for a given class.
+
+ For an input class name, this method returns the proper Python proxy class name and,
+ if the class supports subclasses, the attribute ID of the differentiating attribute.
+
+ Parameters
+ ----------
+ classname : str
+ Root class name to look up.
+
+ Returns
+ -------
+ Tuple[Optional[int], Optional[dict]]
+ (attr_id, subclassnamedict): Attribute used to differentiate between classes
+ and a dictionary of the class names for each value of the attribute.
+
+ """
+ if classname == "ENS_PART":
+ return self.ensight.objs.enums.PARTTYPE, self._subtype_tables[classname]
+
+ elif classname == "ENS_ANNOT":
+ return self.ensight.objs.enums.ANNOTTYPE, self._subtype_tables[classname]
+
+ elif classname == "ENS_TOOL":
+ return self.ensight.objs.enums.TOOLTYPE, self._subtype_tables[classname]
+
+ return None, None
+
+ def _convert_ctor(self, s: str) -> str:
+ """Convert ENSOBJ object references into executable code in __repl__ strings.
+
+ The __repl__() implementation for an ENSOBJ subclass generates strings like these::
+
+ Class: ENS_GLOBALS, CvfObjID: 221, cached:yes
+ Class: ENS_PART, desc: 'Sphere', CvfObjID: 1078, cached:no
+ Class: ENS_PART, desc: 'engine', PartType: 0, CvfObjID: 1097, cached:no
+ Class: ENS_GROUP, desc: '', Owned, CvfObjID: 1043, cached:no
+
+ This method detects strings like those and converts them into strings like these::
+
+ session.ensight.objs.ENS_GLOBALS(session, 221)
+ session.ensight.objs.ENS_PART_MODEL(session, 1078, attr_id=1610612792, attr_value=0)
+
+ where:
+
+ 1610612792 is ensight.objs.enums.PARTTYPE.
+
+ If a proxy object for the ID already exists, it can also generate strings like this::
+
+ session.obj_instance(221)
+
+
+ Parameters
+ ----------
+ s : str
+ String to convert.
+
+ """
+ self._prune_hash()
+ offset = 0
+ while True:
+ # Find the object repl block to replace
+ id = s.find("CvfObjID:", offset)
+ if id == -1:
+ break
+ start = s.find("Class: ", offset)
+ if (start == -1) or (start > id):
+ break
+ tail_len = 11
+ tail = s.find(", cached:no", offset)
+ if tail == -1:
+ tail_len = 12
+ tail = s.find(", cached:yes", offset)
+ if tail == -1: # pragma: no cover
+ break # pragma: no cover
+ # just this object substring
+ tmp = s[start + 7 : tail]
+ # Subtype (PartType:, AnnotType:, ToolType:)
+ subtype = None
+ for name in ("PartType:", "AnnotType:", "ToolType:"):
+ location = tmp.find(name)
+ if location != -1:
+ subtype = int(tmp[location + len(name) :].split(",")[0])
+ break
+ # Owned flag
+ owned_flag = "Owned," in tmp
+ # isolate the block to replace
+ prefix = s[:start]
+ suffix = s[tail + tail_len :]
+ # parse out the object id and classname
+ objid = int(s[id + 9 : tail])
+ classname = s[start + 7 : tail]
+ comma = classname.find(",")
+ classname = classname[:comma]
+ # pick the subclass based on the classname
+ attr_id, classname_lookup = self._obj_attr_subtype(classname)
+ # generate the replacement text
+ if objid in self._ensobj_hash:
+ replace_text = f"session.obj_instance({objid})"
+ else:
+ subclass_info = ""
+ if attr_id is not None:
+ if subtype is not None:
+ # the 2024 R2 interface includes the subtype
+ if (classname_lookup is not None) and (subtype in classname_lookup):
+ classname = classname_lookup[subtype]
+ subclass_info = f",attr_id={attr_id}, attr_value={subtype}"
+ elif classname_lookup is not None: # pragma: no cover
+ # if a "subclass" case and no subclass attrid value, ask for it...
+ remote_name = self.remote_obj(objid)
+ cmd = f"{remote_name}.getattr({attr_id})"
+ attr_value = self.cmd(cmd)
+ if attr_value in classname_lookup:
+ classname = classname_lookup[attr_value]
+ subclass_info = f",attr_id={attr_id}, attr_value={attr_value}"
+ if owned_flag:
+ subclass_info += ",owned=True"
+ replace_text = f"session.ensight.objs.{classname}(session, {objid}{subclass_info})"
+ if replace_text is None: # pragma: no cover
+ break # pragma: no cover
+ offset = start + len(replace_text)
+ s = prefix + replace_text + suffix
+ s = s.strip()
+ if s.startswith("[") and s.endswith("]"):
+ s = f"ensobjlist({s}, session=session)"
+ return s
+
+ def capture_context(self, full_context: bool = False) -> "enscontext.EnsContext":
+ """Capture the current EnSight instance state.
+
+ This method causes the EnSight instance to save a context and return an ``EnsContext``
+ object representing that saved state.
+
+ Parameters
+ ----------
+ full_context : bool, optional
+ Whether to include all aspects of the Ensight instance. The default is ``False``.
+
+ Returns
+ -------
+ enscontext.EnsContext
+
+ Examples
+ --------
+ >>> ctx = session.capture_context()
+ >>> ctx.save("session_context.ctxz")
+
+ """
+ self.cmd("import ansys.pyensight.core.enscontext", do_eval=False)
+ data_str = self.cmd(
+ f"ansys.pyensight.core.enscontext._capture_context(ensight,{full_context})",
+ do_eval=True,
+ )
+ context = EnsContext()
+ context._from_data(data_str)
+ return context
+
+ def restore_context(self, context: "enscontext.EnsContext") -> None:
+ """Restore the current EnSight instance state.
+
+ This method restores EnSight to the state stored in an ``EnsContext``
+ object that was either read from disk or returned by the
+ :func:`capture_context` method.
+
+ Parameters
+ ----------
+ context : enscontext.EnsContext
+ Context to set the current EnSight instance to.
+
+ Examples
+ --------
+ >>> tmp_ctx = session.capture_context()
+ >>> session.restore_context(EnsContext("session_context.ctxz"))
+ >>> session.restore_context(tmp_ctx)
+ """
+ data_str = context._data(b64=True)
+ self.cmd("import ansys.pyensight.core.enscontext", do_eval=False)
+ self.cmd(
+ f"ansys.pyensight.core.enscontext._restore_context(ensight,'{data_str}')", do_eval=False
+ )
+
+ def ensight_version_check(
+ self,
+ version: Union[int, str],
+ message: str = "",
+ exception: bool = True,
+ strict: bool = False,
+ ) -> bool:
+ """Check if the session is a specific version.
+
+ Different versions of pyensight Sessions may host different versions of EnSight.
+ This method compares the version of the remote EnSight session to a specific version
+ number. If the remote EnSight version is at least the specified version, then
+ this method returns True. If the version of EnSight is earlier than the specified
+ version, this method will raise an exception. The caller can specify the
+ error string to be included. They may also specify if the version check should
+ be for a specific version vs the specified version or higher. It is also possible
+ to avoid the exception and instead just return True or False for cases when an
+ alternative implementation might be used.
+
+ Parameters
+ ----------
+ version : Union[int, str]
+ The version number to compare the EnSight version against.
+ message : str
+ The message string to be used as the text for any raised exception.
+ exception : bool
+ If True, and the version comparison fails, an InvalidEnSightVersion is raised.
+ Otherwise, the result of the comparison is returned.
+ strict : bool
+ If True, the comparison of the two versions will only pass if they
+ are identical. If False, if the EnSight version is greater than or
+ equal to the specified version the comparison will pass.
+
+ Returns
+ -------
+ True if the comparison succeeds, False otherwise.
+
+ Raises
+ ------
+ InvalidEnSightVersion if the comparison fails and exception is True.
+ """
+ ens_version = int(self.ensight.version("suffix"))
+ # handle various input formats
+ target = version
+ if isinstance(target, str): # pragma: no cover
+ # could be 'year RX' or the suffix as a string
+ if "R" in target:
+ tmp = [int(x) for x in target.split("R")]
+ target = (tmp[0] - 2000) * 10 + tmp[1]
+ else:
+ target = int(target)
+ # check validity
+ valid = ens_version == target
+ at_least = ""
+ if not strict: # pragma: no cover
+ at_least = "at least "
+ valid = ens_version >= target
+ if (not valid) and exception:
+ ens_version = self.ensight.version("version-full")
+ base_msg = f" ({at_least}'{version}' required, '{ens_version}' current)"
+ if message: # pragma: no cover
+ message += base_msg # pragma: no cover
+ else:
+ message = f"A newer version of EnSight is required to use this API:{base_msg}"
+ raise InvalidEnSightVersion(message)
+ return valid
+
+ def find_remote_unused_ports(
+ self,
+ count: int,
+ start: Optional[int] = None,
+ end: Optional[int] = None,
+ avoid: Optional[list[int]] = None,
+ ) -> Optional[List[int]]:
+ """
+ Find "count" unused ports on the host system. A port is considered
+ unused if it does not respond to a "connect" attempt. Walk the ports
+ from 'start' to 'end' looking for unused ports and avoiding any ports
+ in the 'avoid' list. Stop once the desired number of ports have been
+ found. If an insufficient number of ports were found, return None.
+ An admin user check is used to skip [1-1023].
+
+ Parameters
+ ----------
+ count: int
+ number of unused ports to find
+ start: int
+ the first port to check or None (random start)
+ end: int
+ the last port to check or None (full range check)
+ avoid: list
+ an optional list of ports not to check
+
+ Returns
+ -------
+ the detected ports or None on failure
+ """
+ cmd = "from cei import find_unused_ports\n"
+ cmd += f"ports = find_unused_ports({count}, start={start}, end={end}, avoid={avoid})"
+ self.cmd(cmd, do_eval=False)
+ return self.cmd("ports")
diff --git a/src/ansys/pyensight/core/utils/dsg_server.py b/src/ansys/pyensight/core/utils/dsg_server.py
index 603912ee335..e229a48de3c 100644
--- a/src/ansys/pyensight/core/utils/dsg_server.py
+++ b/src/ansys/pyensight/core/utils/dsg_server.py
@@ -1,1085 +1,1085 @@
-import hashlib
-import json
-import logging
-import os
-import queue
-import sys
-import threading
-import time
-from typing import Any, Dict, List, Optional
-
-from ansys.api.pyensight.v0 import dynamic_scene_graph_pb2
-from ansys.pyensight.core import ensight_grpc
-import numpy
-
-
-class Part(object):
- def __init__(self, session: "DSGSession"):
- """
- This object roughly represents an EnSight "Part". It contains the connectivity,
- coordinates, normals and texture coordinate information for one DSG entity
-
- This object stores basic geometry information coming from the DSG protocol. The
- update_geom() method can parse an "UpdateGeom" protobuffer and merges the results
- into the Part object.
-
- Parameters
- ----------
- session:
- The DSG connection session object.
- """
- self.session = session
- self.conn_tris = numpy.array([], dtype="int32")
- self.conn_lines = numpy.array([], dtype="int32")
- self.coords = numpy.array([], dtype="float32")
- self.normals = numpy.array([], dtype="float32")
- self.normals_elem = False
- self.tcoords = numpy.array([], dtype="float32")
- self.tcoords_elem = False
- self.node_sizes = numpy.array([], dtype="float32")
- self.cmd: Optional[Any] = None
- self.hash = hashlib.new("sha256")
- self._material: Optional[Any] = None
- self.reset()
-
- def reset(self, cmd: Any = None) -> None:
- """
- Reset the part object state to prepare the object
- for a new part representation. Numpy arrays are cleared
- and the state reset.
-
- Parameters
- ----------
- cmd: Any
- The DSG command that triggered this reset. Most likely
- this is a UPDATE_PART command.
-
- """
- self.conn_tris = numpy.array([], dtype="int32")
- self.conn_lines = numpy.array([], dtype="int32")
- self.coords = numpy.array([], dtype="float32")
- self.normals = numpy.array([], dtype="float32")
- self.normals_elem = False
- self.tcoords = numpy.array([], dtype="float32")
- self.tcoords_var_id = None
- self.tcoords_elem = False
- self.node_sizes = numpy.array([], dtype="float32")
- self.hash = hashlib.new("sha256")
- if cmd is not None:
- self.hash.update(cmd.hash.encode("utf-8"))
- self.cmd = cmd
- self._material = None
-
- def _parse_material(self) -> None:
- """
- Parse the JSON string in the part command material string and
- make the content accessible via material_names() and material().
- """
- if self._material is not None:
- return
- try:
- if self.cmd.material_name: # type: ignore
- self._material = json.loads(self.cmd.material_name) # type: ignore
- for key, value in self._material.items():
- value["name"] = key
- else:
- self._material = {}
- except Exception as e:
- self.session.warn(f"Unable to parse JSON material: {str(e)}")
- self._material = {}
-
- def material_names(self) -> List[str]:
- """
- Return the list of material names included in the part material.
-
- Returns
- -------
- List[str]
- The list of defined material names.
- """
- self._parse_material()
- if self._material is None:
- return []
- return list(self._material.keys())
-
- def material(self, name: str = "") -> dict:
- """
- Return the material dictionary for the specified material name.
-
- Parameters
- ----------
- name: str
- The material name to query. If no material name is given, the
- first name in the material_names() list is used.
-
- Returns
- -------
- dict
- The material description dictionary or an empty dictionary.
- """
- self._parse_material()
- if not name:
- names = self.material_names()
- if len(names):
- name = names[0]
- if self._material is None:
- return {}
- return self._material.get(name, {})
-
- def update_geom(self, cmd: dynamic_scene_graph_pb2.UpdateGeom) -> None:
- """
- Merge an update geometry command into the numpy buffers being cached in this object
-
- Parameters
- ----------
- cmd:
- This is an array update command. It could be for coordinates, normals, variables, connectivity, etc.
- """
- if cmd.payload_type == dynamic_scene_graph_pb2.UpdateGeom.COORDINATES:
- if self.coords.size != cmd.total_array_size:
- self.coords = numpy.resize(self.coords, cmd.total_array_size)
- self.coords[cmd.chunk_offset : cmd.chunk_offset + len(cmd.flt_array)] = cmd.flt_array
- elif cmd.payload_type == dynamic_scene_graph_pb2.UpdateGeom.TRIANGLES:
- if self.conn_tris.size != cmd.total_array_size:
- self.conn_tris = numpy.resize(self.conn_tris, cmd.total_array_size)
- self.conn_tris[cmd.chunk_offset : cmd.chunk_offset + len(cmd.int_array)] = cmd.int_array
- elif cmd.payload_type == dynamic_scene_graph_pb2.UpdateGeom.LINES:
- if self.conn_lines.size != cmd.total_array_size:
- self.conn_lines = numpy.resize(self.conn_lines, cmd.total_array_size)
- self.conn_lines[
- cmd.chunk_offset : cmd.chunk_offset + len(cmd.int_array)
- ] = cmd.int_array
- elif (cmd.payload_type == dynamic_scene_graph_pb2.UpdateGeom.ELEM_NORMALS) or (
- cmd.payload_type == dynamic_scene_graph_pb2.UpdateGeom.NODE_NORMALS
- ):
- self.normals_elem = cmd.payload_type == dynamic_scene_graph_pb2.UpdateGeom.ELEM_NORMALS
- if self.normals.size != cmd.total_array_size:
- self.normals = numpy.resize(self.normals, cmd.total_array_size)
- self.normals[cmd.chunk_offset : cmd.chunk_offset + len(cmd.flt_array)] = cmd.flt_array
- elif (cmd.payload_type == dynamic_scene_graph_pb2.UpdateGeom.ELEM_VARIABLE) or (
- cmd.payload_type == dynamic_scene_graph_pb2.UpdateGeom.NODE_VARIABLE
- ):
- # Get the variable definition
- if cmd.variable_id in self.session.variables:
- if self.cmd.color_variableid == cmd.variable_id: # type: ignore
- # Receive the colorby var values
- self.tcoords_elem = (
- cmd.payload_type == dynamic_scene_graph_pb2.UpdateGeom.ELEM_VARIABLE
- )
- if self.tcoords.size != cmd.total_array_size:
- self.tcoords = numpy.resize(self.tcoords, cmd.total_array_size)
- self.tcoords[
- cmd.chunk_offset : cmd.chunk_offset + len(cmd.flt_array)
- ] = cmd.flt_array
-
- # Add the variable hash to the Part's hash, to pick up palette changes
- var_cmd = self.session.variables.get(cmd.variable_id, None)
- if var_cmd is not None:
- self.hash.update(var_cmd.hash.encode("utf-8"))
-
- if self.cmd.node_size_variableid == cmd.variable_id: # type: ignore
- # Receive the node size var values
- if self.node_sizes.size != cmd.total_array_size:
- self.node_sizes = numpy.resize(self.node_sizes, cmd.total_array_size)
- self.node_sizes[
- cmd.chunk_offset : cmd.chunk_offset + len(cmd.flt_array)
- ] = cmd.flt_array
- # Combine the hashes for the UpdatePart and all UpdateGeom messages
- self.hash.update(cmd.hash.encode("utf-8"))
-
- def nodal_surface_rep(self):
- """
- This function processes the geometry arrays and converts them into nodal representation.
- It will duplicate triangles as needed (to preserve element normals) and will convert
- variable data into texture coordinates.
-
- Returns
- -------
- On failure, the method returns None for the first return value. The returned tuple is:
-
- (part_command, vertices, connectivity, normals, tex_coords, var_command)
-
- part_command: UPDATE_PART command object
- vertices: numpy array of the nodal coordinates
- connectivity: numpy array of the triangle indices into the vertices array
- normals: numpy array of per vertex normal values (optional)
- tcoords: numpy array of per vertex texture coordinates (optional)
- var_command: UPDATE_VARIABLE command object for the variable the texture coordinate correspond to, if any
- """
- if self.cmd is None:
- return None, None, None, None, None, None
- if self.conn_tris.size == 0:
- self.session.log(f"Note: part '{self.cmd.name}' contains no triangles.")
- return None, None, None, None, None, None
- verts = self.coords
- _ = self._normalize_verts(verts)
-
- conn = self.conn_tris
- normals = self.normals
- tcoords = None
- if self.tcoords.size:
- tcoords = self.tcoords
- if self.tcoords_elem or self.normals_elem:
- verts_per_prim = 3
- num_prims = conn.size // verts_per_prim
- # "flatten" the triangles to move values from elements to nodes
- new_verts = numpy.ndarray((num_prims * verts_per_prim * 3,), dtype="float32")
- new_conn = numpy.ndarray((num_prims * verts_per_prim,), dtype="int32")
- new_tcoords = None
- if tcoords is not None:
- # remember that the input values are 1D at this point, we will expand to 2D later
- new_tcoords = numpy.ndarray((num_prims * verts_per_prim,), dtype="float32")
- new_normals = None
- if normals is not None:
- if normals.size == 0:
- self.session.log("Warning: zero length normals!")
- else:
- new_normals = numpy.ndarray((num_prims * verts_per_prim * 3,), dtype="float32")
- j = 0
- for i0 in range(num_prims):
- for i1 in range(verts_per_prim):
- idx = conn[i0 * verts_per_prim + i1]
- # new connectivity (identity)
- new_conn[j] = j
- # copy the vertex
- new_verts[j * 3 + 0] = verts[idx * 3 + 0]
- new_verts[j * 3 + 1] = verts[idx * 3 + 1]
- new_verts[j * 3 + 2] = verts[idx * 3 + 2]
- if new_normals is not None:
- if self.normals_elem:
- # copy the normal associated with the face
- new_normals[j * 3 + 0] = normals[i0 * 3 + 0]
- new_normals[j * 3 + 1] = normals[i0 * 3 + 1]
- new_normals[j * 3 + 2] = normals[i0 * 3 + 2]
- else:
- # copy the same normal as the vertex
- new_normals[j * 3 + 0] = normals[idx * 3 + 0]
- new_normals[j * 3 + 1] = normals[idx * 3 + 1]
- new_normals[j * 3 + 2] = normals[idx * 3 + 2]
- if new_tcoords is not None:
- # remember, 1D texture coords at this point
- if self.tcoords_elem:
- # copy the texture coord associated with the face
- new_tcoords[j] = tcoords[i0]
- else:
- # copy the same texture coord as the vertex
- new_tcoords[j] = tcoords[idx]
- j += 1
- # new arrays.
- verts = new_verts
- conn = new_conn
- normals = new_normals
- if tcoords is not None:
- tcoords = new_tcoords
-
- var_cmd = None
- # texture coords need transformation from variable value to [ST]
- if tcoords is not None:
- tcoords, var_cmd = self._build_st_coords(tcoords, verts.size // 3)
-
- self.session.log(
- f"Part '{self.cmd.name}' defined: {self.coords.size // 3} verts, {self.conn_tris.size // 3} tris."
- )
- command = self.cmd
-
- return command, verts, conn, normals, tcoords, var_cmd
-
- def _normalize_verts(self, verts: numpy.ndarray) -> float:
- """
- This function scales and translates vertices, so the longest axis in the scene is of
- length 1.0, and data is centered at the origin
-
- Returns the scale factor
- """
- s = 1.0
- if self.session.normalize_geometry and self.session.scene_bounds is not None:
- num_verts = verts.size // 3
- midx = (self.session.scene_bounds[3] + self.session.scene_bounds[0]) * 0.5
- midy = (self.session.scene_bounds[4] + self.session.scene_bounds[1]) * 0.5
- midz = (self.session.scene_bounds[5] + self.session.scene_bounds[2]) * 0.5
- dx = self.session.scene_bounds[3] - self.session.scene_bounds[0]
- dy = self.session.scene_bounds[4] - self.session.scene_bounds[1]
- dz = self.session.scene_bounds[5] - self.session.scene_bounds[2]
- s = dx
- if dy > s:
- s = dy
- if dz > s:
- s = dz
- if s == 0:
- s = 1.0
- for i in range(num_verts):
- j = i * 3
- verts[j + 0] = (verts[j + 0] - midx) / s
- verts[j + 1] = (verts[j + 1] - midy) / s
- verts[j + 2] = (verts[j + 2] - midz) / s
- return 1.0 / s
-
- def _build_st_coords(self, tcoords: numpy.ndarray, num_verts: int):
- """
- The Omniverse interface uses 2D texturing (s,t) to reference the texture map.
- This method converts DSG texture coordinates (1D and in "variable" units) into
- 2D OpenGL style [0.,1.] normalized coordinate space. the "t" coordinate will
- always be 0.5.
-
- Parameters
- ----------
- tcoords: numpy.ndarray
- The DSG 1D texture coordinates, which are actually variable values.
-
- num_verts: int
- The number of vertices in the mesh.
-
- Returns
- -------
- numpy.ndarray, Any
- The ST OpenGL GL texture coordinate array and the variable definition DSG command.
- """
- var_dsg_id = self.cmd.color_variableid # type: ignore
- var_cmd = self.session.variables[var_dsg_id]
- v_min = None
- v_max = None
- for lvl in var_cmd.levels:
- if (v_min is None) or (v_min > lvl.value):
- v_min = lvl.value
- if (v_max is None) or (v_max < lvl.value):
- v_max = lvl.value
- var_minmax: List[float] = [v_min, v_max] # type: ignore
- # build a power of two x 1 texture
- num_texels = len(var_cmd.texture) // 4
- half_texel = 1 / (num_texels * 2.0)
- tmp = numpy.ndarray((num_verts * 2,), dtype="float32")
- tmp.fill(0.5) # fill in the T coordinate...
- tex_width = half_texel * 2 * (num_texels - 1) # center to center of num_texels
- # if the range is 0, adjust the min by -1. The result is that the texture
- # coords will get mapped to S=1.0 which is what EnSight does in this situation
- if (var_minmax[1] - var_minmax[0]) == 0.0:
- var_minmax[0] = var_minmax[0] - 1.0
- var_width = var_minmax[1] - var_minmax[0]
- for idx in range(num_verts):
- # normalized S coord value (clamp)
- s = (tcoords[idx] - var_minmax[0]) / var_width
- if s < 0.0:
- s = 0.0
- if s > 1.0:
- s = 1.0
- # map to the texture range and set the S value
- tmp[idx * 2] = s * tex_width + half_texel
- return tmp, var_cmd
-
- def line_rep(self):
- """
- This function processes the geometry arrays and returns values to represent line data.
- The vertex array embeds the connectivity, so every two points represent a line segment.
- The tcoords similarly follow the vertex array notion.
-
- Returns
- -------
- On failure, the method returns None for the first return value. The returned tuple is:
-
- (part_command, vertices, connectivity, tex_coords, var_command)
-
- part_command: UPDATE_PART command object
- vertices: numpy array of per-node coordinates (two per line segment)
- tcoords: numpy array of per vertex texture coordinates (optional)
- var_command: UPDATE_VARIABLE command object for the variable the colors correspond to, if any
- """
- if self.cmd is None:
- return None, None, None, None
- if self.cmd.render != self.cmd.CONNECTIVITY:
- # Early out. Rendering type for this object is a surface rep, not a point rep
- return None, None, None, None
-
- num_lines = self.conn_lines.size // 2
- if num_lines == 0:
- return None, None, None, None
- verts = numpy.ndarray((num_lines * 2 * 3,), dtype="float32")
- tcoords = None
- if self.tcoords.size:
- tcoords = numpy.ndarray((num_lines * 2,), dtype="float32")
- # TODO: handle elemental line values (self.tcoords_elem) by converting to nodal...
- # if self.tcoords_elem:
- for i in range(num_lines):
- i0 = self.conn_lines[i * 2]
- i1 = self.conn_lines[i * 2 + 1]
- offset = i * 6
- verts[offset + 0] = self.coords[i0 * 3 + 0]
- verts[offset + 1] = self.coords[i0 * 3 + 1]
- verts[offset + 2] = self.coords[i0 * 3 + 2]
- verts[offset + 3] = self.coords[i1 * 3 + 0]
- verts[offset + 4] = self.coords[i1 * 3 + 1]
- verts[offset + 5] = self.coords[i1 * 3 + 2]
- if tcoords is not None:
- # tcoords are 1D at this point
- offset = i * 2
- tcoords[offset + 0] = self.tcoords[i0]
- tcoords[offset + 1] = self.tcoords[i1]
-
- _ = self._normalize_verts(verts)
-
- var_cmd = None
- # texture coords need transformation from variable value to [ST]
- if tcoords is not None:
- tcoords, var_cmd = self._build_st_coords(tcoords, verts.size // 3)
-
- self.session.log(f"Part '{self.cmd.name}' defined: {num_lines} lines.")
- command = self.cmd
-
- return command, verts, tcoords, var_cmd
-
- def point_rep(self):
- """
- This function processes the geometry arrays and returns values to represent point data
-
- Returns
- -------
- On failure, the method returns None for the first return value. The returned tuple is:
-
- (part_command, vertices, sizes, colors, var_command)
-
- part_command: UPDATE_PART command object
- vertices: numpy array of per-node coordinates
- sizes: numpy array of per-node radii
- colors: numpy array of per-node rgb colors
- var_command: UPDATE_VARIABLE command object for the variable the colors correspond to, if any
- """
- if self.cmd is None:
- return None, None, None, None, None
- if self.cmd.render != self.cmd.NODES:
- # Early out. Rendering type for this object is a surface rep, not a point rep
- return None, None, None, None, None
- verts = self.coords
- num_verts = verts.size // 3
- norm_scale = self._normalize_verts(verts)
-
- # Convert var values in self.tcoords to RGB colors
- # For now, look up RGB colors. Planned USD enhancements should allow tex coords instead.
- colors = None
- var_cmd = None
-
- if self.tcoords.size and self.tcoords.size == num_verts:
- var_dsg_id = self.cmd.color_variableid
- var_cmd = self.session.variables[var_dsg_id]
- if len(var_cmd.levels) == 0:
- self.session.log(
- f"Note: Node rep not created for part '{self.cmd.name}'. It has var values, but a palette with 0 levels."
- )
- return None, None, None, None, None
-
- p_min = None
- p_max = None
- for lvl in var_cmd.levels:
- if (p_min is None) or (p_min > lvl.value):
- p_min = lvl.value
- if (p_max is None) or (p_max < lvl.value):
- p_max = lvl.value
-
- num_texels = int(len(var_cmd.texture) / 4)
-
- colors = numpy.ndarray((num_verts * 3,), dtype="float32")
- low_color = [c / 255.0 for c in var_cmd.texture[0:3]]
- high_color = [
- c / 255.0 for c in var_cmd.texture[4 * (num_texels - 1) : 4 * (num_texels - 1) + 3]
- ]
- if p_min == p_max:
- # Special case where palette min == palette max
- mid_color = var_cmd[4 * (num_texels // 2) : 4 * (num_texels // 2) + 3]
- for idx in range(num_verts):
- val = self.tcoords[idx]
- if val == p_min:
- colors[idx * 3 : idx * 3 + 3] = mid_color
- elif val < p_min:
- colors[idx * 3 : idx * 3 + 3] = low_color
- elif val > p_min:
- colors[idx * 3 : idx * 3 + 3] = high_color
- else:
- for idx in range(num_verts):
- val = self.tcoords[idx]
- if val <= p_min:
- colors[idx * 3 : idx * 3 + 3] = low_color
- else:
- pal_pos = (num_texels - 1) * (val - p_min) / (p_max - p_min)
- pal_idx, pal_sub = divmod(pal_pos, 1)
- pal_idx = int(pal_idx)
-
- if pal_idx >= num_texels - 1:
- colors[idx * 3 : idx * 3 + 3] = high_color
- else:
- col0 = var_cmd.texture[pal_idx * 4 : pal_idx * 4 + 3]
- col1 = var_cmd.texture[4 + pal_idx * 4 : 4 + pal_idx * 4 + 3]
- for ii in range(0, 3):
- colors[idx * 3 + ii] = (
- col0[ii] * pal_sub + col1[ii] * (1.0 - pal_sub)
- ) / 255.0
- self.session.log(f"Part '{self.cmd.name}' defined: {self.coords.size // 3} points.")
-
- node_sizes = None
- if self.node_sizes.size and self.node_sizes.size == num_verts:
- # Pass out the node sizes if there is a size-by variable
- node_size_default = self.cmd.node_size_default * norm_scale
- node_sizes = numpy.ndarray((num_verts,), dtype="float32")
- for ii in range(0, num_verts):
- node_sizes[ii] = self.node_sizes[ii] * node_size_default
- elif norm_scale != 1.0:
- # Pass out the node sizes if the model is normalized to fit in a unit cube
- node_size_default = self.cmd.node_size_default * norm_scale
- node_sizes = numpy.ndarray((num_verts,), dtype="float32")
- for ii in range(0, num_verts):
- node_sizes[ii] = node_size_default
-
- self.session.log(f"Part '{self.cmd.name}' defined: {self.coords.size // 3} points.")
- command = self.cmd
-
- return command, verts, node_sizes, colors, var_cmd
-
-
-class UpdateHandler(object):
- """
- This class serves as the interface between a DSGSession and a hosting application.
- The DSGSession processes the general aspects of the gRPC pipeline and collects the
- various DSG objects into collections of: groups, variables, etc. It also coalesces
- the individual array updates into a "Part" object which represents a single addressable
- mesh chunk.
- UpdateHandler methods are called as the various update happen, and it is called when
- a mesh chunk has been entirely defined. In most scenarios, a subclass of UpdateHandler
- is passed to the DSGSession to handshake the mesh data to the application target.
- """
-
- def __init__(self) -> None:
- self._session: "DSGSession"
-
- @property
- def session(self) -> "DSGSession":
- """The session object this handler has been associated with"""
- return self._session
-
- @session.setter
- def session(self, session: "DSGSession") -> None:
- self._session = session
-
- def add_group(self, id: int, view: bool = False) -> None:
- """Called when a new group command has been added: self.session.groups[id]"""
- if view:
- self.session.log(f"Adding view: {self.session.groups[id]}")
- else:
- self.session.log(f"Adding group: {self.session.groups[id].name}")
-
- def add_variable(self, id: int) -> None:
- """Called when a new group command has been added: self.session.variables[id]"""
- self.session.log(f"Adding variable: {self.session.variables[id].name}")
-
- def finalize_part(self, part: Part) -> None:
- """Called when all the updates on a Part object have been completed.
-
- Note: this superclass method should be called after the subclass has processed
- the part geometry as the saved part command will be destroyed by this call.
- """
- if part.cmd:
- self.session.log(f"Part finalized: {part.cmd.name}")
- part.cmd = None
-
- def start_connection(self) -> None:
- """A new gRPC connection has been established: self.session.grpc"""
- grpc = self.session.grpc
- self.session.log(f"gRPC connection established to: {grpc.host}:{grpc.port}")
-
- def end_connection(self) -> None:
- """The previous gRPC connection has been closed"""
- self.session.log("gRPC connection closed")
-
- def begin_update(self) -> None:
- """A new scene update is about to begin"""
- self.session.log("Begin update ------------------------")
-
- def end_update(self) -> None:
- """The scene update is complete"""
- self.session.log("End update ------------------------")
-
- def get_dsg_cmd_attribute(self, obj: Any, name: str, default: Any = None) -> Optional[str]:
- """Utility function to get an attribute from a DSG update object
-
- Note: UpdateVariable and UpdateGroup commands support generic attributes
- """
- return obj.attributes.get(name, default)
-
- def group_matrix(self, group: Any) -> Any:
- matrix = group.matrix4x4
- # The Case matrix is basically the camera transform. In vrmode, we only want
- # the raw geometry, so use the identity matrix.
- if (
- self.get_dsg_cmd_attribute(group, "ENS_OBJ_TYPE") == "ENS_CASE"
- ) and self.session.vrmode:
- matrix = [
- 1.0,
- 0.0,
- 0.0,
- 0.0,
- 0.0,
- 1.0,
- 0.0,
- 0.0,
- 0.0,
- 0.0,
- 1.0,
- 0.0,
- 0.0,
- 0.0,
- 0.0,
- 1.0,
- ]
- return matrix
-
-
-class DSGSession(object):
- def __init__(
- self,
- port: int = 12345,
- host: str = "127.0.0.1",
- security_code: str = "",
- verbose: int = 0,
- normalize_geometry: bool = False,
- vrmode: bool = False,
- time_scale: float = 1.0,
- handler: UpdateHandler = UpdateHandler(),
- ):
- """
- Manage a gRPC connection and link it to an UpdateHandler instance
-
- This class makes a DSG gRPC connection via the specified port and host (leveraging
- the passed security code). As DSG protobuffers arrive, they are merged into Part
- object instances and the UpdateHandler is invoked to further process them.
-
- Parameters
- ----------
- port : int
- The port number the EnSight gRPC service is running on.
- The default is ``12345``.
- host : str
- Name of the host that the EnSight gRPC service is running on.
- The default is ``"127.0.0.1"``, which is the localhost.
- security_code : str
- Shared security code for validating the gRPC communication.
- The default is ``""``.
- verbose : int
- The verbosity level. If set to 1 or higher the class will call logging.info
- for log output. The default is ``0``.
- normalize_geometry : bool
- If True, the scene coordinates will be remapped into the volume [-1,-1,-1] - [1,1,1]
- The default is not to remap coordinates.
- vrmode : bool
- If True, do not include the EnSight camera in the generated view group. The default
- is to include the EnSight view in the scene transformations.
- time_scale : float
- All DSG protobuffers time values will be multiplied by this factor after
- being received. The default is ``1.0``.
- handler : UpdateHandler
- This is an UpdateHandler subclass that is called back when the state of
- a scene transfer changes. For example, methods are called when the
- transfer begins or ends and when a Part (mesh block) is ready for processing.
- """
- super().__init__()
- self._grpc = ensight_grpc.EnSightGRPC(port=port, host=host, secret_key=security_code)
- self._callback_handler = handler
- self._verbose = verbose
- self._thread: Optional[threading.Thread] = None
- self._message_queue: queue.Queue = queue.Queue() # Messages coming from EnSight
- self._dsg_queue: Optional[queue.SimpleQueue] = None # Outgoing messages to EnSight
- self._shutdown = False
- self._dsg = None
- self._normalize_geometry = normalize_geometry
- self._vrmode = vrmode
- self._time_scale = time_scale
- self._time_limits = [
- sys.float_info.max,
- -sys.float_info.max,
- ] # Min/max across all time steps
- self._mesh_block_count = 0
- self._variables: Dict[int, Any] = dict()
- self._groups: Dict[int, Any] = dict()
- self._part: Part = Part(self)
- self._scene_bounds: Optional[List] = None
- self._cur_timeline: List = [0.0, 0.0] # Start/End time for current update
- self._callback_handler.session = self
- # log any status changes to this file. external apps will be monitoring
- self._status_file = os.environ.get("ANSYS_OV_SERVER_STATUS_FILENAME", "")
- self._status = dict(status="idle", start_time=0.0, processed_buffers=0, total_buffers=0)
-
- @property
- def scene_bounds(self) -> Optional[List]:
- return self._scene_bounds
-
- @property
- def mesh_block_count(self) -> int:
- return self._mesh_block_count
-
- @property
- def vrmode(self) -> bool:
- return self._vrmode
-
- @vrmode.setter
- def vrmode(self, value: bool) -> None:
- self._vrmode = value
-
- @property
- def normalize_geometry(self) -> bool:
- return self._normalize_geometry
-
- @normalize_geometry.setter
- def normalize_geometry(self, value: bool) -> None:
- self._normalize_geometry = value
-
- @property
- def variables(self) -> dict:
- return self._variables
-
- @property
- def groups(self) -> dict:
- return self._groups
-
- @property
- def part(self) -> Part:
- return self._part
-
- @property
- def time_limits(self) -> List:
- return self._time_limits
-
- @property
- def cur_timeline(self) -> List:
- return self._cur_timeline
-
- @cur_timeline.setter
- def cur_timeline(self, timeline: List) -> None:
- self._cur_timeline = timeline
- self._time_limits[0] = min(self._time_limits[0], self._cur_timeline[0])
- self._time_limits[1] = max(self._time_limits[1], self._cur_timeline[1])
-
- @property
- def grpc(self) -> ensight_grpc.EnSightGRPC:
- return self._grpc
-
- def log(self, s: str, level: int = 0) -> None:
- """Log a string to the logging system
-
- If the message level is less than the current verbosity,
- emit the message.
- """
- if level < self._verbose:
- logging.info(s)
-
- @staticmethod
- def warn(s: str) -> None:
- """Issue a warning to the logging system
-
- The logging message is mapped to "warn" and cannot be blocked via verbosity
- checks.
- """
- logging.warning(s)
-
- @staticmethod
- def error(s: str) -> None:
- """Issue an error to the logging system
-
- The logging message is mapped to "error" and cannot be blocked via verbosity
- checks.
- """
- logging.error(s)
-
- def start(self) -> int:
- """Start a gRPC connection to an EnSight instance
-
- Make a gRPC connection and start a DSG stream handler.
-
- Returns
- -------
- 0 on success, -1 on an error.
- """
- # Start by setting up and verifying the connection
- self._grpc.connect()
- if not self._grpc.is_connected():
- self.log(f"Unable to establish gRPC connection to: {self._grpc.host}:{self._grpc.port}")
- return -1
- # Streaming API requires an iterator, so we make one from a queue
- # it also returns an iterator. self._dsg_queue is the input stream interface
- # self._dsg is the returned stream iterator.
- if self._dsg is not None:
- return 0
- self._dsg_queue = queue.SimpleQueue()
- self._dsg = self._grpc.dynamic_scene_graph_stream(
- iter(self._dsg_queue.get, None) # type:ignore
- )
- self._thread = threading.Thread(target=self._poll_messages)
- if self._thread is not None:
- self._thread.start()
- self._callback_handler.start_connection()
- return 0
-
- def end(self):
- """Stop a gRPC connection to the EnSight instance"""
- self._callback_handler.end_connection()
- self._grpc.shutdown()
- self._shutdown = True
- self._thread.join()
- self._grpc.shutdown()
- self._dsg = None
- self._thread = None
- self._dsg_queue = None
-
- def is_shutdown(self):
- """Check the service shutdown request status"""
- return self._shutdown
-
- def _update_status_file(self, timed: bool = False):
- """
- Update the status file contents. The status file will contain the
- following json object, stored as: self._status
-
- {
- 'status' : "working|idle",
- 'start_time' : timestamp_of_update_begin,
- 'processed_buffers' : number_of_protobuffers_processed,
- 'total_buffers' : number_of_protobuffers_total,
- }
-
- Parameters
- ----------
- timed : bool, optional:
- if True, only update every second.
-
- """
- if self._status_file:
- current_time = time.time()
- if timed:
- last_time = self._status.get("last_time", 0.0)
- if current_time - last_time < 1.0: # type: ignore
- return
- self._status["last_time"] = current_time
- try:
- message = json.dumps(self._status)
- with open(self._status_file, "w") as status_file:
- status_file.write(message)
- except IOError:
- pass # Note failure is expected here in some cases
-
- def request_an_update(self, animation: bool = False, allow_spontaneous: bool = True) -> None:
- """Start a DSG update
- Send a command to the DSG protocol to "init" an update.
-
- Parameters
- ----------
- animation:
- if True, export all EnSight timesteps.
- allow_spontaneous:
- if True, allow EnSight to trigger async updates.
- """
- # Send an INIT command to trigger a stream of update packets
- cmd = dynamic_scene_graph_pb2.SceneClientCommand()
- cmd.command_type = dynamic_scene_graph_pb2.SceneClientCommand.INIT
- # Allow EnSight push commands, but full scene only for now...
- cmd.init.allow_spontaneous = allow_spontaneous
- cmd.init.include_temporal_geometry = animation
- cmd.init.allow_incremental_updates = False
- cmd.init.maximum_chunk_size = 1024 * 1024
- self._dsg_queue.put(cmd) # type:ignore
-
- def _poll_messages(self) -> None:
- """Core interface to grab DSG events from gRPC and queue them for processing
-
- This is run by a thread that is monitoring the dsg RPC call for update messages
- it places them in _message_queue as it finds them. They are picked up by the
- main thread via get_next_message()
- """
- while not self._shutdown:
- try:
- self._message_queue.put(next(self._dsg)) # type:ignore
- except Exception:
- self._shutdown = True
- self.log("DSG connection broken, calling exit")
- os._exit(0)
-
- def _get_next_message(self, wait: bool = True) -> Any:
- """Get the next queued up protobuffer message
-
- Called by the main thread to get any messages that were pulled in from the
- dsg stream and placed here by _poll_messages()
- """
- try:
- return self._message_queue.get(block=wait)
- except queue.Empty:
- return None
-
- def _reset(self):
- self._variables = {}
- self._groups = {}
- self._part = Part(self)
- self._scene_bounds = None
- self._mesh_block_count = 0 # reset when a new group shows up
-
- def handle_one_update(self) -> None:
- """Monitor the DSG stream and handle a single update operation
-
- Wait until we get the scene update begin message. From there, reset the current
- scene buckets and then parse all the incoming commands until we get the scene
- update end command. At which point, save the generated stage (started in the
- view command handler). Note: Parts are handled with an available bucket at all times.
- When a new part update comes in or the scene update end happens, the part is "finished".
- """
- # An update starts with a UPDATE_SCENE_BEGIN command
- cmd = self._get_next_message()
- while (cmd is not None) and (
- cmd.command_type != dynamic_scene_graph_pb2.SceneUpdateCommand.UPDATE_SCENE_BEGIN
- ):
- # Look for a begin command
- cmd = self._get_next_message()
-
- # Start anew
- self._reset()
- self._callback_handler.begin_update()
-
- # Update our status
- self._status = dict(
- status="working", start_time=time.time(), processed_buffers=1, total_buffers=1
- )
- self._update_status_file()
-
- # handle the various commands until UPDATE_SCENE_END
- cmd = self._get_next_message()
- while (cmd is not None) and (
- cmd.command_type != dynamic_scene_graph_pb2.SceneUpdateCommand.UPDATE_SCENE_END
- ):
- self._handle_update_command(cmd)
- self._status["processed_buffers"] += 1 # type: ignore
- self._status["total_buffers"] = self._status["processed_buffers"] + self._message_queue.qsize() # type: ignore
- self._update_status_file(timed=True)
- cmd = self._get_next_message()
-
- # Flush the last part
- self._finish_part()
-
- self._callback_handler.end_update()
-
- # Update our status
- self._status = dict(status="idle", start_time=0.0, processed_buffers=0, total_buffers=0)
- self._update_status_file()
-
- def _handle_update_command(self, cmd: dynamic_scene_graph_pb2.SceneUpdateCommand) -> None:
- """Dispatch out a scene update command to the proper handler
-
- Given a command object, pull out the correct portion of the protobuffer union and
- pass it to the appropriate handler.
-
- Parameters
- ----------
- cmd:
- The command to be dispatched.
- """
- name = "Unknown"
- if cmd.command_type == dynamic_scene_graph_pb2.SceneUpdateCommand.DELETE_ID:
- name = "Delete IDs"
- elif cmd.command_type == dynamic_scene_graph_pb2.SceneUpdateCommand.UPDATE_PART:
- name = "Part update"
- tmp = cmd.update_part
- self._handle_part(tmp)
- elif cmd.command_type == dynamic_scene_graph_pb2.SceneUpdateCommand.UPDATE_GROUP:
- name = "Group update"
- tmp = cmd.update_group
- self._handle_group(tmp)
- elif cmd.command_type == dynamic_scene_graph_pb2.SceneUpdateCommand.UPDATE_GEOM:
- name = "Geom update"
- tmp = cmd.update_geom
- self._part.update_geom(tmp)
- elif cmd.command_type == dynamic_scene_graph_pb2.SceneUpdateCommand.UPDATE_VARIABLE:
- name = "Variable update"
- tmp = cmd.update_variable
- self._handle_variable(tmp)
- elif cmd.command_type == dynamic_scene_graph_pb2.SceneUpdateCommand.UPDATE_VIEW:
- name = "View update"
- tmp = cmd.update_view
- self._handle_view(tmp)
- elif cmd.command_type == dynamic_scene_graph_pb2.SceneUpdateCommand.UPDATE_TEXTURE:
- name = "Texture update"
- self.log(f"{name} --------------------------")
-
- def _finish_part(self) -> None:
- """Complete the current part
-
- There is always a part being modified. This method completes the current part, committing
- it to the handler.
- """
- try:
- self._callback_handler.finalize_part(self.part)
- except Exception as e:
- import traceback
-
- self.warn(f"Error encountered while finalizing part geometry: {str(e)}")
- traceback_str = "".join(traceback.format_tb(e.__traceback__))
- logging.debug(f"Traceback: {traceback_str}")
- self._mesh_block_count += 1
-
- def _handle_part(self, part_cmd: Any) -> None:
- """Handle a DSG UPDATE_PART command
-
- Finish the current part and set up the next part.
-
- Parameters
- ----------
- part:
- The command coming from the EnSight stream.
- """
- self._finish_part()
- self._part.reset(part_cmd)
-
- def _handle_group(self, group: Any) -> None:
- """Handle a DSG UPDATE_GROUP command
-
- Parameters
- ----------
- group:
- The command coming from the EnSight stream.
- """
- # reset current mesh (part) count for unique "part" naming in USD
- self._mesh_block_count = 0
-
- # record the scene bounds in case they are needed later
- self._groups[group.id] = group
- bounds = group.attributes.get("ENS_SCENE_BOUNDS", None)
- if bounds:
- minmax = list()
- for v in bounds.split(","):
- try:
- minmax.append(float(v))
- except ValueError:
- pass
- if len(minmax) == 6:
- self._scene_bounds = minmax
- # callback
- self._callback_handler.add_group(group.id)
-
- def _handle_variable(self, var: Any) -> None:
- """Handle a DSG UPDATE_VARIABLE command
-
- Save off the EnSight variable DSG command object.
-
- Parameters
- ----------
- var:
- The command coming from the EnSight stream.
- """
- self._variables[var.id] = var
- self._callback_handler.add_variable(var.id)
-
- def _handle_view(self, view: Any) -> None:
- """Handle a DSG UPDATE_VIEW command
-
- Parameters
- ----------
- view:
- The command coming from the EnSight stream.
- """
- self._finish_part()
- self._scene_bounds = None
- self._groups[view.id] = view
- if len(view.timeline) == 2:
- view.timeline[0] *= self._time_scale
- view.timeline[1] *= self._time_scale
- self.cur_timeline = [view.timeline[0], view.timeline[1]]
- self._callback_handler.add_group(view.id, view=True)
+import hashlib
+import json
+import logging
+import os
+import queue
+import sys
+import threading
+import time
+from typing import Any, Dict, List, Optional
+
+from ansys.api.pyensight.v0 import dynamic_scene_graph_pb2
+from ansys.pyensight.core import ensight_grpc
+import numpy
+
+
+class Part(object):
+ def __init__(self, session: "DSGSession"):
+ """
+ This object roughly represents an EnSight "Part". It contains the connectivity,
+ coordinates, normals and texture coordinate information for one DSG entity
+
+ This object stores basic geometry information coming from the DSG protocol. The
+ update_geom() method can parse an "UpdateGeom" protobuffer and merges the results
+ into the Part object.
+
+ Parameters
+ ----------
+ session:
+ The DSG connection session object.
+ """
+ self.session = session
+ self.conn_tris = numpy.array([], dtype="int32")
+ self.conn_lines = numpy.array([], dtype="int32")
+ self.coords = numpy.array([], dtype="float32")
+ self.normals = numpy.array([], dtype="float32")
+ self.normals_elem = False
+ self.tcoords = numpy.array([], dtype="float32")
+ self.tcoords_elem = False
+ self.node_sizes = numpy.array([], dtype="float32")
+ self.cmd: Optional[Any] = None
+ self.hash = hashlib.new("sha256")
+ self._material: Optional[Any] = None
+ self.reset()
+
+ def reset(self, cmd: Any = None) -> None:
+ """
+ Reset the part object state to prepare the object
+ for a new part representation. Numpy arrays are cleared
+ and the state reset.
+
+ Parameters
+ ----------
+ cmd: Any
+ The DSG command that triggered this reset. Most likely
+ this is a UPDATE_PART command.
+
+ """
+ self.conn_tris = numpy.array([], dtype="int32")
+ self.conn_lines = numpy.array([], dtype="int32")
+ self.coords = numpy.array([], dtype="float32")
+ self.normals = numpy.array([], dtype="float32")
+ self.normals_elem = False
+ self.tcoords = numpy.array([], dtype="float32")
+ self.tcoords_var_id = None
+ self.tcoords_elem = False
+ self.node_sizes = numpy.array([], dtype="float32")
+ self.hash = hashlib.new("sha256")
+ if cmd is not None:
+ self.hash.update(cmd.hash.encode("utf-8"))
+ self.cmd = cmd
+ self._material = None
+
+ def _parse_material(self) -> None:
+ """
+ Parse the JSON string in the part command material string and
+ make the content accessible via material_names() and material().
+ """
+ if self._material is not None:
+ return
+ try:
+ if self.cmd.material_name: # type: ignore
+ self._material = json.loads(self.cmd.material_name) # type: ignore
+ for key, value in self._material.items():
+ value["name"] = key
+ else:
+ self._material = {}
+ except Exception as e:
+ self.session.warn(f"Unable to parse JSON material: {str(e)}")
+ self._material = {}
+
+ def material_names(self) -> List[str]:
+ """
+ Return the list of material names included in the part material.
+
+ Returns
+ -------
+ List[str]
+ The list of defined material names.
+ """
+ self._parse_material()
+ if self._material is None:
+ return []
+ return list(self._material.keys())
+
+ def material(self, name: str = "") -> dict:
+ """
+ Return the material dictionary for the specified material name.
+
+ Parameters
+ ----------
+ name: str
+ The material name to query. If no material name is given, the
+ first name in the material_names() list is used.
+
+ Returns
+ -------
+ dict
+ The material description dictionary or an empty dictionary.
+ """
+ self._parse_material()
+ if not name:
+ names = self.material_names()
+ if len(names):
+ name = names[0]
+ if self._material is None:
+ return {}
+ return self._material.get(name, {})
+
+ def update_geom(self, cmd: dynamic_scene_graph_pb2.UpdateGeom) -> None:
+ """
+ Merge an update geometry command into the numpy buffers being cached in this object
+
+ Parameters
+ ----------
+ cmd:
+ This is an array update command. It could be for coordinates, normals, variables, connectivity, etc.
+ """
+ if cmd.payload_type == dynamic_scene_graph_pb2.UpdateGeom.COORDINATES:
+ if self.coords.size != cmd.total_array_size:
+ self.coords = numpy.resize(self.coords, cmd.total_array_size)
+ self.coords[cmd.chunk_offset : cmd.chunk_offset + len(cmd.flt_array)] = cmd.flt_array
+ elif cmd.payload_type == dynamic_scene_graph_pb2.UpdateGeom.TRIANGLES:
+ if self.conn_tris.size != cmd.total_array_size:
+ self.conn_tris = numpy.resize(self.conn_tris, cmd.total_array_size)
+ self.conn_tris[cmd.chunk_offset : cmd.chunk_offset + len(cmd.int_array)] = cmd.int_array
+ elif cmd.payload_type == dynamic_scene_graph_pb2.UpdateGeom.LINES:
+ if self.conn_lines.size != cmd.total_array_size:
+ self.conn_lines = numpy.resize(self.conn_lines, cmd.total_array_size)
+ self.conn_lines[
+ cmd.chunk_offset : cmd.chunk_offset + len(cmd.int_array)
+ ] = cmd.int_array
+ elif (cmd.payload_type == dynamic_scene_graph_pb2.UpdateGeom.ELEM_NORMALS) or (
+ cmd.payload_type == dynamic_scene_graph_pb2.UpdateGeom.NODE_NORMALS
+ ):
+ self.normals_elem = cmd.payload_type == dynamic_scene_graph_pb2.UpdateGeom.ELEM_NORMALS
+ if self.normals.size != cmd.total_array_size:
+ self.normals = numpy.resize(self.normals, cmd.total_array_size)
+ self.normals[cmd.chunk_offset : cmd.chunk_offset + len(cmd.flt_array)] = cmd.flt_array
+ elif (cmd.payload_type == dynamic_scene_graph_pb2.UpdateGeom.ELEM_VARIABLE) or (
+ cmd.payload_type == dynamic_scene_graph_pb2.UpdateGeom.NODE_VARIABLE
+ ):
+ # Get the variable definition
+ if cmd.variable_id in self.session.variables:
+ if self.cmd.color_variableid == cmd.variable_id: # type: ignore
+ # Receive the colorby var values
+ self.tcoords_elem = (
+ cmd.payload_type == dynamic_scene_graph_pb2.UpdateGeom.ELEM_VARIABLE
+ )
+ if self.tcoords.size != cmd.total_array_size:
+ self.tcoords = numpy.resize(self.tcoords, cmd.total_array_size)
+ self.tcoords[
+ cmd.chunk_offset : cmd.chunk_offset + len(cmd.flt_array)
+ ] = cmd.flt_array
+
+ # Add the variable hash to the Part's hash, to pick up palette changes
+ var_cmd = self.session.variables.get(cmd.variable_id, None)
+ if var_cmd is not None:
+ self.hash.update(var_cmd.hash.encode("utf-8"))
+
+ if self.cmd.node_size_variableid == cmd.variable_id: # type: ignore
+ # Receive the node size var values
+ if self.node_sizes.size != cmd.total_array_size:
+ self.node_sizes = numpy.resize(self.node_sizes, cmd.total_array_size)
+ self.node_sizes[
+ cmd.chunk_offset : cmd.chunk_offset + len(cmd.flt_array)
+ ] = cmd.flt_array
+ # Combine the hashes for the UpdatePart and all UpdateGeom messages
+ self.hash.update(cmd.hash.encode("utf-8"))
+
+ def nodal_surface_rep(self):
+ """
+ This function processes the geometry arrays and converts them into nodal representation.
+ It will duplicate triangles as needed (to preserve element normals) and will convert
+ variable data into texture coordinates.
+
+ Returns
+ -------
+ On failure, the method returns None for the first return value. The returned tuple is:
+
+ (part_command, vertices, connectivity, normals, tex_coords, var_command)
+
+ part_command: UPDATE_PART command object
+ vertices: numpy array of the nodal coordinates
+ connectivity: numpy array of the triangle indices into the vertices array
+ normals: numpy array of per vertex normal values (optional)
+ tcoords: numpy array of per vertex texture coordinates (optional)
+ var_command: UPDATE_VARIABLE command object for the variable the texture coordinate correspond to, if any
+ """
+ if self.cmd is None:
+ return None, None, None, None, None, None
+ if self.conn_tris.size == 0:
+ self.session.log(f"Note: part '{self.cmd.name}' contains no triangles.")
+ return None, None, None, None, None, None
+ verts = self.coords
+ _ = self._normalize_verts(verts)
+
+ conn = self.conn_tris
+ normals = self.normals
+ tcoords = None
+ if self.tcoords.size:
+ tcoords = self.tcoords
+ if self.tcoords_elem or self.normals_elem:
+ verts_per_prim = 3
+ num_prims = conn.size // verts_per_prim
+ # "flatten" the triangles to move values from elements to nodes
+ new_verts = numpy.ndarray((num_prims * verts_per_prim * 3,), dtype="float32")
+ new_conn = numpy.ndarray((num_prims * verts_per_prim,), dtype="int32")
+ new_tcoords = None
+ if tcoords is not None:
+ # remember that the input values are 1D at this point, we will expand to 2D later
+ new_tcoords = numpy.ndarray((num_prims * verts_per_prim,), dtype="float32")
+ new_normals = None
+ if normals is not None:
+ if normals.size == 0:
+ self.session.log("Warning: zero length normals!")
+ else:
+ new_normals = numpy.ndarray((num_prims * verts_per_prim * 3,), dtype="float32")
+ j = 0
+ for i0 in range(num_prims):
+ for i1 in range(verts_per_prim):
+ idx = conn[i0 * verts_per_prim + i1]
+ # new connectivity (identity)
+ new_conn[j] = j
+ # copy the vertex
+ new_verts[j * 3 + 0] = verts[idx * 3 + 0]
+ new_verts[j * 3 + 1] = verts[idx * 3 + 1]
+ new_verts[j * 3 + 2] = verts[idx * 3 + 2]
+ if new_normals is not None:
+ if self.normals_elem:
+ # copy the normal associated with the face
+ new_normals[j * 3 + 0] = normals[i0 * 3 + 0]
+ new_normals[j * 3 + 1] = normals[i0 * 3 + 1]
+ new_normals[j * 3 + 2] = normals[i0 * 3 + 2]
+ else:
+ # copy the same normal as the vertex
+ new_normals[j * 3 + 0] = normals[idx * 3 + 0]
+ new_normals[j * 3 + 1] = normals[idx * 3 + 1]
+ new_normals[j * 3 + 2] = normals[idx * 3 + 2]
+ if new_tcoords is not None:
+ # remember, 1D texture coords at this point
+ if self.tcoords_elem:
+ # copy the texture coord associated with the face
+ new_tcoords[j] = tcoords[i0]
+ else:
+ # copy the same texture coord as the vertex
+ new_tcoords[j] = tcoords[idx]
+ j += 1
+ # new arrays.
+ verts = new_verts
+ conn = new_conn
+ normals = new_normals
+ if tcoords is not None:
+ tcoords = new_tcoords
+
+ var_cmd = None
+ # texture coords need transformation from variable value to [ST]
+ if tcoords is not None:
+ tcoords, var_cmd = self._build_st_coords(tcoords, verts.size // 3)
+
+ self.session.log(
+ f"Part '{self.cmd.name}' defined: {self.coords.size // 3} verts, {self.conn_tris.size // 3} tris."
+ )
+ command = self.cmd
+
+ return command, verts, conn, normals, tcoords, var_cmd
+
+ def _normalize_verts(self, verts: numpy.ndarray) -> float:
+ """
+ This function scales and translates vertices, so the longest axis in the scene is of
+ length 1.0, and data is centered at the origin
+
+ Returns the scale factor
+ """
+ s = 1.0
+ if self.session.normalize_geometry and self.session.scene_bounds is not None:
+ num_verts = verts.size // 3
+ midx = (self.session.scene_bounds[3] + self.session.scene_bounds[0]) * 0.5
+ midy = (self.session.scene_bounds[4] + self.session.scene_bounds[1]) * 0.5
+ midz = (self.session.scene_bounds[5] + self.session.scene_bounds[2]) * 0.5
+ dx = self.session.scene_bounds[3] - self.session.scene_bounds[0]
+ dy = self.session.scene_bounds[4] - self.session.scene_bounds[1]
+ dz = self.session.scene_bounds[5] - self.session.scene_bounds[2]
+ s = dx
+ if dy > s:
+ s = dy
+ if dz > s:
+ s = dz
+ if s == 0:
+ s = 1.0
+ for i in range(num_verts):
+ j = i * 3
+ verts[j + 0] = (verts[j + 0] - midx) / s
+ verts[j + 1] = (verts[j + 1] - midy) / s
+ verts[j + 2] = (verts[j + 2] - midz) / s
+ return 1.0 / s
+
+ def _build_st_coords(self, tcoords: numpy.ndarray, num_verts: int):
+ """
+ The Omniverse interface uses 2D texturing (s,t) to reference the texture map.
+ This method converts DSG texture coordinates (1D and in "variable" units) into
+ 2D OpenGL style [0.,1.] normalized coordinate space. the "t" coordinate will
+ always be 0.5.
+
+ Parameters
+ ----------
+ tcoords: numpy.ndarray
+ The DSG 1D texture coordinates, which are actually variable values.
+
+ num_verts: int
+ The number of vertices in the mesh.
+
+ Returns
+ -------
+ numpy.ndarray, Any
+ The ST OpenGL GL texture coordinate array and the variable definition DSG command.
+ """
+ var_dsg_id = self.cmd.color_variableid # type: ignore
+ var_cmd = self.session.variables[var_dsg_id]
+ v_min = None
+ v_max = None
+ for lvl in var_cmd.levels:
+ if (v_min is None) or (v_min > lvl.value):
+ v_min = lvl.value
+ if (v_max is None) or (v_max < lvl.value):
+ v_max = lvl.value
+ var_minmax: List[float] = [v_min, v_max] # type: ignore
+ # build a power of two x 1 texture
+ num_texels = len(var_cmd.texture) // 4
+ half_texel = 1 / (num_texels * 2.0)
+ tmp = numpy.ndarray((num_verts * 2,), dtype="float32")
+ tmp.fill(0.5) # fill in the T coordinate...
+ tex_width = half_texel * 2 * (num_texels - 1) # center to center of num_texels
+ # if the range is 0, adjust the min by -1. The result is that the texture
+ # coords will get mapped to S=1.0 which is what EnSight does in this situation
+ if (var_minmax[1] - var_minmax[0]) == 0.0:
+ var_minmax[0] = var_minmax[0] - 1.0
+ var_width = var_minmax[1] - var_minmax[0]
+ for idx in range(num_verts):
+ # normalized S coord value (clamp)
+ s = (tcoords[idx] - var_minmax[0]) / var_width
+ if s < 0.0:
+ s = 0.0
+ if s > 1.0:
+ s = 1.0
+ # map to the texture range and set the S value
+ tmp[idx * 2] = s * tex_width + half_texel
+ return tmp, var_cmd
+
+ def line_rep(self):
+ """
+ This function processes the geometry arrays and returns values to represent line data.
+ The vertex array embeds the connectivity, so every two points represent a line segment.
+ The tcoords similarly follow the vertex array notion.
+
+ Returns
+ -------
+ On failure, the method returns None for the first return value. The returned tuple is:
+
+ (part_command, vertices, connectivity, tex_coords, var_command)
+
+ part_command: UPDATE_PART command object
+ vertices: numpy array of per-node coordinates (two per line segment)
+ tcoords: numpy array of per vertex texture coordinates (optional)
+ var_command: UPDATE_VARIABLE command object for the variable the colors correspond to, if any
+ """
+ if self.cmd is None:
+ return None, None, None, None
+ if self.cmd.render != self.cmd.CONNECTIVITY:
+ # Early out. Rendering type for this object is a surface rep, not a point rep
+ return None, None, None, None
+
+ num_lines = self.conn_lines.size // 2
+ if num_lines == 0:
+ return None, None, None, None
+ verts = numpy.ndarray((num_lines * 2 * 3,), dtype="float32")
+ tcoords = None
+ if self.tcoords.size:
+ tcoords = numpy.ndarray((num_lines * 2,), dtype="float32")
+ # TODO: handle elemental line values (self.tcoords_elem) by converting to nodal...
+ # if self.tcoords_elem:
+ for i in range(num_lines):
+ i0 = self.conn_lines[i * 2]
+ i1 = self.conn_lines[i * 2 + 1]
+ offset = i * 6
+ verts[offset + 0] = self.coords[i0 * 3 + 0]
+ verts[offset + 1] = self.coords[i0 * 3 + 1]
+ verts[offset + 2] = self.coords[i0 * 3 + 2]
+ verts[offset + 3] = self.coords[i1 * 3 + 0]
+ verts[offset + 4] = self.coords[i1 * 3 + 1]
+ verts[offset + 5] = self.coords[i1 * 3 + 2]
+ if tcoords is not None:
+ # tcoords are 1D at this point
+ offset = i * 2
+ tcoords[offset + 0] = self.tcoords[i0]
+ tcoords[offset + 1] = self.tcoords[i1]
+
+ _ = self._normalize_verts(verts)
+
+ var_cmd = None
+ # texture coords need transformation from variable value to [ST]
+ if tcoords is not None:
+ tcoords, var_cmd = self._build_st_coords(tcoords, verts.size // 3)
+
+ self.session.log(f"Part '{self.cmd.name}' defined: {num_lines} lines.")
+ command = self.cmd
+
+ return command, verts, tcoords, var_cmd
+
+ def point_rep(self):
+ """
+ This function processes the geometry arrays and returns values to represent point data
+
+ Returns
+ -------
+ On failure, the method returns None for the first return value. The returned tuple is:
+
+ (part_command, vertices, sizes, colors, var_command)
+
+ part_command: UPDATE_PART command object
+ vertices: numpy array of per-node coordinates
+ sizes: numpy array of per-node radii
+ colors: numpy array of per-node rgb colors
+ var_command: UPDATE_VARIABLE command object for the variable the colors correspond to, if any
+ """
+ if self.cmd is None:
+ return None, None, None, None, None
+ if self.cmd.render != self.cmd.NODES:
+ # Early out. Rendering type for this object is a surface rep, not a point rep
+ return None, None, None, None, None
+ verts = self.coords
+ num_verts = verts.size // 3
+ norm_scale = self._normalize_verts(verts)
+
+ # Convert var values in self.tcoords to RGB colors
+ # For now, look up RGB colors. Planned USD enhancements should allow tex coords instead.
+ colors = None
+ var_cmd = None
+
+ if self.tcoords.size and self.tcoords.size == num_verts:
+ var_dsg_id = self.cmd.color_variableid
+ var_cmd = self.session.variables[var_dsg_id]
+ if len(var_cmd.levels) == 0:
+ self.session.log(
+ f"Note: Node rep not created for part '{self.cmd.name}'. It has var values, but a palette with 0 levels."
+ )
+ return None, None, None, None, None
+
+ p_min = None
+ p_max = None
+ for lvl in var_cmd.levels:
+ if (p_min is None) or (p_min > lvl.value):
+ p_min = lvl.value
+ if (p_max is None) or (p_max < lvl.value):
+ p_max = lvl.value
+
+ num_texels = int(len(var_cmd.texture) / 4)
+
+ colors = numpy.ndarray((num_verts * 3,), dtype="float32")
+ low_color = [c / 255.0 for c in var_cmd.texture[0:3]]
+ high_color = [
+ c / 255.0 for c in var_cmd.texture[4 * (num_texels - 1) : 4 * (num_texels - 1) + 3]
+ ]
+ if p_min == p_max:
+ # Special case where palette min == palette max
+ mid_color = var_cmd[4 * (num_texels // 2) : 4 * (num_texels // 2) + 3]
+ for idx in range(num_verts):
+ val = self.tcoords[idx]
+ if val == p_min:
+ colors[idx * 3 : idx * 3 + 3] = mid_color
+ elif val < p_min:
+ colors[idx * 3 : idx * 3 + 3] = low_color
+ elif val > p_min:
+ colors[idx * 3 : idx * 3 + 3] = high_color
+ else:
+ for idx in range(num_verts):
+ val = self.tcoords[idx]
+ if val <= p_min:
+ colors[idx * 3 : idx * 3 + 3] = low_color
+ else:
+ pal_pos = (num_texels - 1) * (val - p_min) / (p_max - p_min)
+ pal_idx, pal_sub = divmod(pal_pos, 1)
+ pal_idx = int(pal_idx)
+
+ if pal_idx >= num_texels - 1:
+ colors[idx * 3 : idx * 3 + 3] = high_color
+ else:
+ col0 = var_cmd.texture[pal_idx * 4 : pal_idx * 4 + 3]
+ col1 = var_cmd.texture[4 + pal_idx * 4 : 4 + pal_idx * 4 + 3]
+ for ii in range(0, 3):
+ colors[idx * 3 + ii] = (
+ col0[ii] * pal_sub + col1[ii] * (1.0 - pal_sub)
+ ) / 255.0
+ self.session.log(f"Part '{self.cmd.name}' defined: {self.coords.size // 3} points.")
+
+ node_sizes = None
+ if self.node_sizes.size and self.node_sizes.size == num_verts:
+ # Pass out the node sizes if there is a size-by variable
+ node_size_default = self.cmd.node_size_default * norm_scale
+ node_sizes = numpy.ndarray((num_verts,), dtype="float32")
+ for ii in range(0, num_verts):
+ node_sizes[ii] = self.node_sizes[ii] * node_size_default
+ elif norm_scale != 1.0:
+ # Pass out the node sizes if the model is normalized to fit in a unit cube
+ node_size_default = self.cmd.node_size_default * norm_scale
+ node_sizes = numpy.ndarray((num_verts,), dtype="float32")
+ for ii in range(0, num_verts):
+ node_sizes[ii] = node_size_default
+
+ self.session.log(f"Part '{self.cmd.name}' defined: {self.coords.size // 3} points.")
+ command = self.cmd
+
+ return command, verts, node_sizes, colors, var_cmd
+
+
+class UpdateHandler(object):
+ """
+ This class serves as the interface between a DSGSession and a hosting application.
+ The DSGSession processes the general aspects of the gRPC pipeline and collects the
+ various DSG objects into collections of: groups, variables, etc. It also coalesces
+ the individual array updates into a "Part" object which represents a single addressable
+ mesh chunk.
+ UpdateHandler methods are called as the various update happen, and it is called when
+ a mesh chunk has been entirely defined. In most scenarios, a subclass of UpdateHandler
+ is passed to the DSGSession to handshake the mesh data to the application target.
+ """
+
+ def __init__(self) -> None:
+ self._session: "DSGSession"
+
+ @property
+ def session(self) -> "DSGSession":
+ """The session object this handler has been associated with"""
+ return self._session
+
+ @session.setter
+ def session(self, session: "DSGSession") -> None:
+ self._session = session
+
+ def add_group(self, id: int, view: bool = False) -> None:
+ """Called when a new group command has been added: self.session.groups[id]"""
+ if view:
+ self.session.log(f"Adding view: {self.session.groups[id]}")
+ else:
+ self.session.log(f"Adding group: {self.session.groups[id].name}")
+
+ def add_variable(self, id: int) -> None:
+ """Called when a new group command has been added: self.session.variables[id]"""
+ self.session.log(f"Adding variable: {self.session.variables[id].name}")
+
+ def finalize_part(self, part: Part) -> None:
+ """Called when all the updates on a Part object have been completed.
+
+ Note: this superclass method should be called after the subclass has processed
+ the part geometry as the saved part command will be destroyed by this call.
+ """
+ if part.cmd:
+ self.session.log(f"Part finalized: {part.cmd.name}")
+ part.cmd = None
+
+ def start_connection(self) -> None:
+ """A new gRPC connection has been established: self.session.grpc"""
+ grpc = self.session.grpc
+ self.session.log(f"gRPC connection established to: {grpc.host}:{grpc.port}")
+
+ def end_connection(self) -> None:
+ """The previous gRPC connection has been closed"""
+ self.session.log("gRPC connection closed")
+
+ def begin_update(self) -> None:
+ """A new scene update is about to begin"""
+ self.session.log("Begin update ------------------------")
+
+ def end_update(self) -> None:
+ """The scene update is complete"""
+ self.session.log("End update ------------------------")
+
+ def get_dsg_cmd_attribute(self, obj: Any, name: str, default: Any = None) -> Optional[str]:
+ """Utility function to get an attribute from a DSG update object
+
+ Note: UpdateVariable and UpdateGroup commands support generic attributes
+ """
+ return obj.attributes.get(name, default)
+
+ def group_matrix(self, group: Any) -> Any:
+ matrix = group.matrix4x4
+ # The Case matrix is basically the camera transform. In vrmode, we only want
+ # the raw geometry, so use the identity matrix.
+ if (
+ self.get_dsg_cmd_attribute(group, "ENS_OBJ_TYPE") == "ENS_CASE"
+ ) and self.session.vrmode:
+ matrix = [
+ 1.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 1.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 1.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 1.0,
+ ]
+ return matrix
+
+
+class DSGSession(object):
+ def __init__(
+ self,
+ port: int = 12345,
+ host: str = "127.0.0.1",
+ security_code: str = "",
+ verbose: int = 0,
+ normalize_geometry: bool = False,
+ vrmode: bool = False,
+ time_scale: float = 1.0,
+ handler: UpdateHandler = UpdateHandler(),
+ ):
+ """
+ Manage a gRPC connection and link it to an UpdateHandler instance
+
+ This class makes a DSG gRPC connection via the specified port and host (leveraging
+ the passed security code). As DSG protobuffers arrive, they are merged into Part
+ object instances and the UpdateHandler is invoked to further process them.
+
+ Parameters
+ ----------
+ port : int
+ The port number the EnSight gRPC service is running on.
+ The default is ``12345``.
+ host : str
+ Name of the host that the EnSight gRPC service is running on.
+ The default is ``"127.0.0.1"``, which is the localhost.
+ security_code : str
+ Shared security code for validating the gRPC communication.
+ The default is ``""``.
+ verbose : int
+ The verbosity level. If set to 1 or higher the class will call logging.info
+ for log output. The default is ``0``.
+ normalize_geometry : bool
+ If True, the scene coordinates will be remapped into the volume [-1,-1,-1] - [1,1,1]
+ The default is not to remap coordinates.
+ vrmode : bool
+ If True, do not include the EnSight camera in the generated view group. The default
+ is to include the EnSight view in the scene transformations.
+ time_scale : float
+ All DSG protobuffers time values will be multiplied by this factor after
+ being received. The default is ``1.0``.
+ handler : UpdateHandler
+ This is an UpdateHandler subclass that is called back when the state of
+ a scene transfer changes. For example, methods are called when the
+ transfer begins or ends and when a Part (mesh block) is ready for processing.
+ """
+ super().__init__()
+ self._grpc = ensight_grpc.EnSightGRPC(port=port, host=host, secret_key=security_code)
+ self._callback_handler = handler
+ self._verbose = verbose
+ self._thread: Optional[threading.Thread] = None
+ self._message_queue: queue.Queue = queue.Queue() # Messages coming from EnSight
+ self._dsg_queue: Optional[queue.SimpleQueue] = None # Outgoing messages to EnSight
+ self._shutdown = False
+ self._dsg = None
+ self._normalize_geometry = normalize_geometry
+ self._vrmode = vrmode
+ self._time_scale = time_scale
+ self._time_limits = [
+ sys.float_info.max,
+ -sys.float_info.max,
+ ] # Min/max across all time steps
+ self._mesh_block_count = 0
+ self._variables: Dict[int, Any] = dict()
+ self._groups: Dict[int, Any] = dict()
+ self._part: Part = Part(self)
+ self._scene_bounds: Optional[List] = None
+ self._cur_timeline: List = [0.0, 0.0] # Start/End time for current update
+ self._callback_handler.session = self
+ # log any status changes to this file. external apps will be monitoring
+ self._status_file = os.environ.get("ANSYS_OV_SERVER_STATUS_FILENAME", "")
+ self._status = dict(status="idle", start_time=0.0, processed_buffers=0, total_buffers=0)
+
+ @property
+ def scene_bounds(self) -> Optional[List]:
+ return self._scene_bounds
+
+ @property
+ def mesh_block_count(self) -> int:
+ return self._mesh_block_count
+
+ @property
+ def vrmode(self) -> bool:
+ return self._vrmode
+
+ @vrmode.setter
+ def vrmode(self, value: bool) -> None:
+ self._vrmode = value
+
+ @property
+ def normalize_geometry(self) -> bool:
+ return self._normalize_geometry
+
+ @normalize_geometry.setter
+ def normalize_geometry(self, value: bool) -> None:
+ self._normalize_geometry = value
+
+ @property
+ def variables(self) -> dict:
+ return self._variables
+
+ @property
+ def groups(self) -> dict:
+ return self._groups
+
+ @property
+ def part(self) -> Part:
+ return self._part
+
+ @property
+ def time_limits(self) -> List:
+ return self._time_limits
+
+ @property
+ def cur_timeline(self) -> List:
+ return self._cur_timeline
+
+ @cur_timeline.setter
+ def cur_timeline(self, timeline: List) -> None:
+ self._cur_timeline = timeline
+ self._time_limits[0] = min(self._time_limits[0], self._cur_timeline[0])
+ self._time_limits[1] = max(self._time_limits[1], self._cur_timeline[1])
+
+ @property
+ def grpc(self) -> ensight_grpc.EnSightGRPC:
+ return self._grpc
+
+ def log(self, s: str, level: int = 0) -> None:
+ """Log a string to the logging system
+
+ If the message level is less than the current verbosity,
+ emit the message.
+ """
+ if level < self._verbose:
+ logging.info(s)
+
+ @staticmethod
+ def warn(s: str) -> None:
+ """Issue a warning to the logging system
+
+ The logging message is mapped to "warn" and cannot be blocked via verbosity
+ checks.
+ """
+ logging.warning(s)
+
+ @staticmethod
+ def error(s: str) -> None:
+ """Issue an error to the logging system
+
+ The logging message is mapped to "error" and cannot be blocked via verbosity
+ checks.
+ """
+ logging.error(s)
+
+ def start(self) -> int:
+ """Start a gRPC connection to an EnSight instance
+
+ Make a gRPC connection and start a DSG stream handler.
+
+ Returns
+ -------
+ 0 on success, -1 on an error.
+ """
+ # Start by setting up and verifying the connection
+ self._grpc.connect()
+ if not self._grpc.is_connected():
+ self.log(f"Unable to establish gRPC connection to: {self._grpc.host}:{self._grpc.port}")
+ return -1
+ # Streaming API requires an iterator, so we make one from a queue
+ # it also returns an iterator. self._dsg_queue is the input stream interface
+ # self._dsg is the returned stream iterator.
+ if self._dsg is not None:
+ return 0
+ self._dsg_queue = queue.SimpleQueue()
+ self._dsg = self._grpc.dynamic_scene_graph_stream(
+ iter(self._dsg_queue.get, None) # type:ignore
+ )
+ self._thread = threading.Thread(target=self._poll_messages)
+ if self._thread is not None:
+ self._thread.start()
+ self._callback_handler.start_connection()
+ return 0
+
+ def end(self):
+ """Stop a gRPC connection to the EnSight instance"""
+ self._callback_handler.end_connection()
+ self._grpc.shutdown()
+ self._shutdown = True
+ self._thread.join()
+ self._grpc.shutdown()
+ self._dsg = None
+ self._thread = None
+ self._dsg_queue = None
+
+ def is_shutdown(self):
+ """Check the service shutdown request status"""
+ return self._shutdown
+
+ def _update_status_file(self, timed: bool = False):
+ """
+ Update the status file contents. The status file will contain the
+ following json object, stored as: self._status
+
+ {
+ 'status' : "working|idle",
+ 'start_time' : timestamp_of_update_begin,
+ 'processed_buffers' : number_of_protobuffers_processed,
+ 'total_buffers' : number_of_protobuffers_total,
+ }
+
+ Parameters
+ ----------
+ timed : bool, optional:
+ if True, only update every second.
+
+ """
+ if self._status_file:
+ current_time = time.time()
+ if timed:
+ last_time = self._status.get("last_time", 0.0)
+ if current_time - last_time < 1.0: # type: ignore
+ return
+ self._status["last_time"] = current_time
+ try:
+ message = json.dumps(self._status)
+ with open(self._status_file, "w") as status_file:
+ status_file.write(message)
+ except IOError:
+ pass # Note failure is expected here in some cases
+
+ def request_an_update(self, animation: bool = False, allow_spontaneous: bool = True) -> None:
+ """Start a DSG update
+ Send a command to the DSG protocol to "init" an update.
+
+ Parameters
+ ----------
+ animation:
+ if True, export all EnSight timesteps.
+ allow_spontaneous:
+ if True, allow EnSight to trigger async updates.
+ """
+ # Send an INIT command to trigger a stream of update packets
+ cmd = dynamic_scene_graph_pb2.SceneClientCommand()
+ cmd.command_type = dynamic_scene_graph_pb2.SceneClientCommand.INIT
+ # Allow EnSight push commands, but full scene only for now...
+ cmd.init.allow_spontaneous = allow_spontaneous
+ cmd.init.include_temporal_geometry = animation
+ cmd.init.allow_incremental_updates = False
+ cmd.init.maximum_chunk_size = 1024 * 1024
+ self._dsg_queue.put(cmd) # type:ignore
+
+ def _poll_messages(self) -> None:
+ """Core interface to grab DSG events from gRPC and queue them for processing
+
+ This is run by a thread that is monitoring the dsg RPC call for update messages
+ it places them in _message_queue as it finds them. They are picked up by the
+ main thread via get_next_message()
+ """
+ while not self._shutdown:
+ try:
+ self._message_queue.put(next(self._dsg)) # type:ignore
+ except Exception:
+ self._shutdown = True
+ self.log("DSG connection broken, calling exit")
+ os._exit(0)
+
+ def _get_next_message(self, wait: bool = True) -> Any:
+ """Get the next queued up protobuffer message
+
+ Called by the main thread to get any messages that were pulled in from the
+ dsg stream and placed here by _poll_messages()
+ """
+ try:
+ return self._message_queue.get(block=wait)
+ except queue.Empty:
+ return None
+
+ def _reset(self):
+ self._variables = {}
+ self._groups = {}
+ self._part = Part(self)
+ self._scene_bounds = None
+ self._mesh_block_count = 0 # reset when a new group shows up
+
+ def handle_one_update(self) -> None:
+ """Monitor the DSG stream and handle a single update operation
+
+ Wait until we get the scene update begin message. From there, reset the current
+ scene buckets and then parse all the incoming commands until we get the scene
+ update end command. At which point, save the generated stage (started in the
+ view command handler). Note: Parts are handled with an available bucket at all times.
+ When a new part update comes in or the scene update end happens, the part is "finished".
+ """
+ # An update starts with a UPDATE_SCENE_BEGIN command
+ cmd = self._get_next_message()
+ while (cmd is not None) and (
+ cmd.command_type != dynamic_scene_graph_pb2.SceneUpdateCommand.UPDATE_SCENE_BEGIN
+ ):
+ # Look for a begin command
+ cmd = self._get_next_message()
+
+ # Start anew
+ self._reset()
+ self._callback_handler.begin_update()
+
+ # Update our status
+ self._status = dict(
+ status="working", start_time=time.time(), processed_buffers=1, total_buffers=1
+ )
+ self._update_status_file()
+
+ # handle the various commands until UPDATE_SCENE_END
+ cmd = self._get_next_message()
+ while (cmd is not None) and (
+ cmd.command_type != dynamic_scene_graph_pb2.SceneUpdateCommand.UPDATE_SCENE_END
+ ):
+ self._handle_update_command(cmd)
+ self._status["processed_buffers"] += 1 # type: ignore
+ self._status["total_buffers"] = self._status["processed_buffers"] + self._message_queue.qsize() # type: ignore
+ self._update_status_file(timed=True)
+ cmd = self._get_next_message()
+
+ # Flush the last part
+ self._finish_part()
+
+ self._callback_handler.end_update()
+
+ # Update our status
+ self._status = dict(status="idle", start_time=0.0, processed_buffers=0, total_buffers=0)
+ self._update_status_file()
+
+ def _handle_update_command(self, cmd: dynamic_scene_graph_pb2.SceneUpdateCommand) -> None:
+ """Dispatch out a scene update command to the proper handler
+
+ Given a command object, pull out the correct portion of the protobuffer union and
+ pass it to the appropriate handler.
+
+ Parameters
+ ----------
+ cmd:
+ The command to be dispatched.
+ """
+ name = "Unknown"
+ if cmd.command_type == dynamic_scene_graph_pb2.SceneUpdateCommand.DELETE_ID:
+ name = "Delete IDs"
+ elif cmd.command_type == dynamic_scene_graph_pb2.SceneUpdateCommand.UPDATE_PART:
+ name = "Part update"
+ tmp = cmd.update_part
+ self._handle_part(tmp)
+ elif cmd.command_type == dynamic_scene_graph_pb2.SceneUpdateCommand.UPDATE_GROUP:
+ name = "Group update"
+ tmp = cmd.update_group
+ self._handle_group(tmp)
+ elif cmd.command_type == dynamic_scene_graph_pb2.SceneUpdateCommand.UPDATE_GEOM:
+ name = "Geom update"
+ tmp = cmd.update_geom
+ self._part.update_geom(tmp)
+ elif cmd.command_type == dynamic_scene_graph_pb2.SceneUpdateCommand.UPDATE_VARIABLE:
+ name = "Variable update"
+ tmp = cmd.update_variable
+ self._handle_variable(tmp)
+ elif cmd.command_type == dynamic_scene_graph_pb2.SceneUpdateCommand.UPDATE_VIEW:
+ name = "View update"
+ tmp = cmd.update_view
+ self._handle_view(tmp)
+ elif cmd.command_type == dynamic_scene_graph_pb2.SceneUpdateCommand.UPDATE_TEXTURE:
+ name = "Texture update"
+ self.log(f"{name} --------------------------")
+
+ def _finish_part(self) -> None:
+ """Complete the current part
+
+ There is always a part being modified. This method completes the current part, committing
+ it to the handler.
+ """
+ try:
+ self._callback_handler.finalize_part(self.part)
+ except Exception as e:
+ import traceback
+
+ self.warn(f"Error encountered while finalizing part geometry: {str(e)}")
+ traceback_str = "".join(traceback.format_tb(e.__traceback__))
+ logging.debug(f"Traceback: {traceback_str}")
+ self._mesh_block_count += 1
+
+ def _handle_part(self, part_cmd: Any) -> None:
+ """Handle a DSG UPDATE_PART command
+
+ Finish the current part and set up the next part.
+
+ Parameters
+ ----------
+ part:
+ The command coming from the EnSight stream.
+ """
+ self._finish_part()
+ self._part.reset(part_cmd)
+
+ def _handle_group(self, group: Any) -> None:
+ """Handle a DSG UPDATE_GROUP command
+
+ Parameters
+ ----------
+ group:
+ The command coming from the EnSight stream.
+ """
+ # reset current mesh (part) count for unique "part" naming in USD
+ self._mesh_block_count = 0
+
+ # record the scene bounds in case they are needed later
+ self._groups[group.id] = group
+ bounds = group.attributes.get("ENS_SCENE_BOUNDS", None)
+ if bounds:
+ minmax = list()
+ for v in bounds.split(","):
+ try:
+ minmax.append(float(v))
+ except ValueError:
+ pass
+ if len(minmax) == 6:
+ self._scene_bounds = minmax
+ # callback
+ self._callback_handler.add_group(group.id)
+
+ def _handle_variable(self, var: Any) -> None:
+ """Handle a DSG UPDATE_VARIABLE command
+
+ Save off the EnSight variable DSG command object.
+
+ Parameters
+ ----------
+ var:
+ The command coming from the EnSight stream.
+ """
+ self._variables[var.id] = var
+ self._callback_handler.add_variable(var.id)
+
+ def _handle_view(self, view: Any) -> None:
+ """Handle a DSG UPDATE_VIEW command
+
+ Parameters
+ ----------
+ view:
+ The command coming from the EnSight stream.
+ """
+ self._finish_part()
+ self._scene_bounds = None
+ self._groups[view.id] = view
+ if len(view.timeline) == 2:
+ view.timeline[0] *= self._time_scale
+ view.timeline[1] *= self._time_scale
+ self.cur_timeline = [view.timeline[0], view.timeline[1]]
+ self._callback_handler.add_group(view.id, view=True)
diff --git a/src/ansys/pyensight/core/utils/export.py b/src/ansys/pyensight/core/utils/export.py
index 50c9cecacbb..f092e7b9cac 100644
--- a/src/ansys/pyensight/core/utils/export.py
+++ b/src/ansys/pyensight/core/utils/export.py
@@ -1,584 +1,584 @@
-import glob
-import os
-import tempfile
-from types import ModuleType
-from typing import Any, List, Optional, Union
-import uuid
-
-from PIL import Image
-import numpy
-
-try:
- import ensight
- import enve
-except ImportError:
- from ansys.api.pyensight import ensight_api
-
-
-class Export:
- """Provides the ``ensight.utils.export`` interface.
-
- The methods in this class implement simplified interfaces to common
- image and animation export operations.
-
- This class is instantiated as ``ensight.utils.export`` in EnSight Python
- and as ``Session.ensight.utils.export`` in PyEnSight. The constructor is
- passed the interface, which serves as the ``ensight`` module for either
- case. As a result, the methods can be accessed as ``ensight.utils.export.image()``
- in EnSight Python or ``session.ensight.utils.export.animation()`` in PyEnSight.
-
- Parameters
- ----------
- interface :
- Entity that provides the ``ensight`` namespace. In the case of
- EnSight Python, the ``ensight`` module is passed. In the case
- of PyEnSight, ``Session.ensight`` is passed.
- """
-
- def __init__(self, interface: Union["ensight_api.ensight", "ensight"]):
- self._ensight = interface
-
- def _remote_support_check(self):
- """Determine if ``ensight.utils.export`` exists on the remote system.
-
- Before trying to use this module, use this method to determine if this
- module is available in the EnSight instance.
-
- Raises
- ------
- RuntimeError if the module is not present.
- """
- # if a module, then we are inside EnSight
- if isinstance(self._ensight, ModuleType): # pragma: no cover
- return # pragma: no cover
- try:
- _ = self._ensight._session.cmd("dir(ensight.utils.export)")
- except RuntimeError: # pragma: no cover
- import ansys.pyensight.core # pragma: no cover
-
- raise RuntimeError( # pragma: no cover
- f"Remote EnSight session must have PyEnsight version \
- {ansys.pyensight.core.DEFAULT_ANSYS_VERSION} or higher installed to use this API."
- )
-
- TIFFTAG_IMAGEDESCRIPTION: int = 0x010E
-
- def image(
- self,
- filename: str,
- width: Optional[int] = None,
- height: Optional[int] = None,
- passes: int = 4,
- enhanced: bool = False,
- raytrace: bool = False,
- ) -> None:
- """Render an image of the current EnSight scene.
-
- Parameters
- ----------
- filename : str
- Name of the local file to save the image to.
- width : int, optional
- Width of the image in pixels. The default is ``None``, in which case
- ```ensight.objs.core.WINDOWSIZE[0]`` is used.
- height : int, optional
- Height of the image in pixels. The default is ``None``, in which case
- ``ensight.objs.core.WINDOWSIZE[1]`` is used.
- passes : int, optional
- Number of antialiasing passes. The default is ``4``.
- enhanced : bool, optional
- Whether to save the image to the filename specified in the TIFF format.
- The default is ``False``. The TIFF format includes additional channels
- for the per-pixel object and variable information.
- raytrace : bool, optional
- Whether to render the image with the raytracing engine. The default is ``False``.
-
- Examples
- --------
- >>> s = LocalLauncher().start()
- >>> s.load_data(f"{s.cei_home}/ensight{s.cei_suffix}/data/cube/cube.case")
- >>> s.ensight.utils.export.image("example.png")
-
- """
- self._remote_support_check()
-
- win_size = self._ensight.objs.core.WINDOWSIZE
- if width is None:
- width = win_size[0]
- if height is None:
- height = win_size[1]
-
- if isinstance(self._ensight, ModuleType): # pragma: no cover
- raw_image = self._image_remote(
- width, height, passes, enhanced, raytrace
- ) # pragma: no cover
- else:
- cmd = f"ensight.utils.export._image_remote({width}, {height}, {passes}, "
- cmd += f"{enhanced}, {raytrace})"
- raw_image = self._ensight._session.cmd(cmd)
-
- pil_image = self._dict_to_pil(raw_image)
- if enhanced:
- tiffinfo_dir = {self.TIFFTAG_IMAGEDESCRIPTION: raw_image["metadata"]}
- pil_image[0].save(
- filename,
- save_all=True,
- append_images=[pil_image[1], pil_image[2]],
- tiffinfo=tiffinfo_dir,
- )
- else:
- pil_image[0].save(filename)
-
- def _dict_to_pil(self, data: dict) -> list:
- """Convert the contents of the dictionary into a PIL image.
-
- Parameters
- ----------
- data : dict
- Dictionary representation of the contents of the ``enve`` object.
-
- Returns
- -------
- list
- List of one or three image objects, [RGB {, pick, variable}].
- """
- images = [
- Image.fromarray(self._numpy_from_dict(data["pixeldata"])).transpose(
- Image.FLIP_TOP_BOTTOM
- )
- ]
- if data.get("variabledata", None) and data.get("pickdata", None):
- images.append(
- Image.fromarray(self._numpy_from_dict(data["pickdata"])).transpose(
- Image.FLIP_TOP_BOTTOM
- )
- )
- images.append(
- Image.fromarray(self._numpy_from_dict(data["variabledata"])).transpose(
- Image.FLIP_TOP_BOTTOM
- )
- )
- return images
-
- @staticmethod
- def _numpy_to_dict(array: Any) -> Optional[dict]:
- """Convert a numpy array into a dictionary.
-
- Parameters
- ----------
- array:
- Numpy array or None.
-
- Returns
- -------
- ``None`` or a dictionary that can be serialized.
- """
- if array is None:
- return None
- return dict(shape=array.shape, dtype=array.dtype.str, data=array.tostring())
-
- @staticmethod
- def _numpy_from_dict(obj: Optional[dict]) -> Any:
- """Convert a dictionary into a numpy array.
-
- Parameters
- ----------
- obj:
- Dictionary generated by ``_numpy_to_dict`` or ``None``.
-
- Returns
- -------
- ``None`` or a numpy array.
- """
- if obj is None:
- return None
- return numpy.frombuffer(obj["data"], dtype=obj["dtype"]).reshape(obj["shape"])
-
- def _image_remote(
- self, width: int, height: int, passes: int, enhanced: bool, raytrace: bool
- ) -> dict:
- """EnSight-side implementation.
-
- Parameters
- ----------
- width : int
- Width of the image in pixels.
- height : int
- Height of the image in pixels.
- passes : int
- Number of antialiasing passes.
- enhanced : bool
- Whether to returned the image as a "deep pixel" TIFF image file.
- raytrace :
- Whether to render the image with the raytracing engine.
-
- Returns
- -------
- dict
- Dictionary of the various channels.
- """
- if not raytrace:
- img = ensight.render(x=width, y=height, num_samples=passes, enhanced=enhanced)
- else:
- with tempfile.TemporaryDirectory() as tmpdirname:
- tmpfilename = os.path.join(tmpdirname, str(uuid.uuid1()))
- ensight.file.image_format("png")
- ensight.file.image_file(tmpfilename)
- ensight.file.image_window_size("user_defined")
- ensight.file.image_window_xy(width, height)
- ensight.file.image_rend_offscreen("ON")
- ensight.file.image_numpasses(passes)
- ensight.file.image_stereo("current")
- ensight.file.image_screen_tiling(1, 1)
- ensight.file.raytracer_options("fgoverlay 1 imagedenoise 1 quality 5")
- ensight.file.image_raytrace_it("ON")
- ensight.file.save_image()
- img = enve.image()
- img.load(f"{tmpfilename}.png")
- # get the channels from the enve.image instance
- output = dict(width=width, height=height, metadata=img.metadata)
- # extract the channels from the image
- output["pixeldata"] = self._numpy_to_dict(img.pixeldata)
- output["variabledata"] = self._numpy_to_dict(img.variabledata)
- output["pickdata"] = self._numpy_to_dict(img.pickdata)
- return output
-
- ANIM_TYPE_SOLUTIONTIME: int = 0
- ANIM_TYPE_ANIMATEDTRACES: int = 1
- ANIM_TYPE_FLIPBOOK: int = 2
- ANIM_TYPE_KEYFRAME: int = 3
-
- def animation(
- self,
- filename: str,
- width: Optional[int] = None,
- height: Optional[int] = None,
- passes: int = 4,
- anim_type: int = ANIM_TYPE_SOLUTIONTIME,
- frames: Optional[int] = None,
- starting_frame: int = 0,
- frames_per_second: float = 60.0,
- format_options: Optional[str] = "",
- raytrace: bool = False,
- ) -> None:
- """Generate an MPEG4 animation file.
-
- An MPEG4 animation file can be generated from temporal data, flipbooks, keyframes,
- or animated traces.
-
- Parameters
- ----------
- filename : str
- Name for the MPEG4 file to save to local disk.
- width : int, optional
- Width of the image in pixels. The default is ``None``, in which case
- ``ensight.objs.core.WINDOWSIZE[0]`` is used.
- height : int, optional
- Height of the image in pixels. The default is ``None``, in which case
- ``ensight.objs.core.WINDOWSIZE[1]`` is used.
- passes : int, optional
- Number of antialiasing passes. The default is ``4``.
- anim_type : int, optional
- Type of the animation to render. The default is ``0``, in which case
- ``"ANIM_TYPE_SOLUTIONTIME"`` is used. This table provides descriptions
- by each option number and name:
-
- =========================== ========================================
- Name Animation type
- =========================== ========================================
- 0: ANIM_TYPE_SOLUTIONTIME Animation over all solution times
- 1: ANIM_TYPE_ANIMATEDTRACES Records animated rotations and traces
- 2: ANIM_TYPE_FLIPBOOK Records current flipbook animation
- 3: ANIM_TYPE_KEYFRAME Records current kKeyframe animation
- =========================== ========================================
-
- frames : int, optional
- Number of frames to save. The default is ``None``. The default for
- all but ``ANIM_TYPE_ANIMATEDTRACES`` covers all timesteps, flipbook
- pages, or keyframe steps. If ``ANIM_TYPE_ANIMATEDTRACES`` is specified,
- this keyword is required.
- starting_frame : int, optional
- Keyword for saving a subset of the complete collection of frames.
- The default is ``0``.
- frames_per_second : float, optional
- Number of frames per second for playback in the saved animation.
- The default is ``60.0``.
- format_options : str, optional
- More specific options for the MPEG4 encoder. The default is ``""``.
- raytrace : bool, optional
- Whether to render the image with the raytracing engine. The default is ``False``.
-
- Examples
- --------
- >>> s = LocalLauncher().start()
- >>> data = f"{s.cei_home}/ensight{s.cei_suffix}gui/demos/Crash Queries.ens"
- >>> s.ensight.objs.ensxml_restore_file(data)
- >>> quality = "Quality Best Type 1"
- >>> s.ensight.utils.export.animation("local_file.mp4", format_options=quality)
-
- """
- self._remote_support_check()
-
- win_size = self._ensight.objs.core.WINDOWSIZE
- if width is None:
- width = win_size[0]
- if height is None:
- height = win_size[1]
-
- if format_options is None:
- format_options = "Quality High Type 1"
-
- num_frames: int = 0
- if frames is None:
- if anim_type == self.ANIM_TYPE_SOLUTIONTIME:
- num_timesteps = self._ensight.objs.core.TIMESTEP_LIMITS[1]
- num_frames = num_timesteps - starting_frame
- elif anim_type == self.ANIM_TYPE_ANIMATEDTRACES:
- raise RuntimeError("frames is a required keyword with ANIMATEDTRACES animations")
- elif anim_type == self.ANIM_TYPE_FLIPBOOK:
- num_flip_pages = len(self._ensight.objs.core.FLIPBOOKS[0].PAGE_DETAILS)
- num_frames = num_flip_pages - starting_frame
- elif anim_type == self.ANIM_TYPE_KEYFRAME:
- num_keyframe_pages = self._ensight.objs.core.KEYFRAMEDATA["totalFrames"]
- num_frames = num_keyframe_pages - starting_frame
- else:
- num_frames = frames
-
- if num_frames < 1: # pragma: no cover
- raise RuntimeError( # pragma: no cover
- "No frames selected. Perhaps a static dataset SOLUTIONTIME request \
- or no FLIPBOOK/KEYFRAME defined."
- )
-
- if isinstance(self._ensight, ModuleType): # pragma: no cover
- raw_mpeg4 = self._animation_remote( # pragma: no cover
- width,
- height,
- passes,
- anim_type,
- starting_frame,
- num_frames,
- frames_per_second,
- format_options,
- raytrace,
- )
- else:
- cmd = f"ensight.utils.export._animation_remote({width}, {height}, {passes}, "
- cmd += f"{anim_type}, {starting_frame}, {num_frames}, "
- cmd += f"{frames_per_second}, '{format_options}', {raytrace})"
- raw_mpeg4 = self._ensight._session.cmd(cmd)
-
- with open(filename, "wb") as fp:
- fp.write(raw_mpeg4)
-
- def _animation_remote(
- self,
- width: int,
- height: int,
- passes: int,
- anim_type: int,
- start: int,
- frames: int,
- fps: float,
- options: str,
- raytrace: bool,
- ) -> bytes:
- """EnSight-side implementation.
-
- Parameters
- ----------
- width : int
- Width of the image in pixels.
- height : int
- Height of the image in pixels.
- passes : int
- Number of antialiasing passes.
- anim_type : int
- Type of animation to save.
- start : int
- First frame number to save.
- frames : int
- Number of frames to save.
- fps : float
- Output framerate.
- options : str
- MPEG4 configuration options.
- raytrace : bool
- Whether to render the image with the raytracing engine.
-
- Returns
- -------
- bytes
- MPEG4 stream in bytes.
- """
-
- with tempfile.TemporaryDirectory() as tmpdirname:
- tmpfilename = os.path.join(tmpdirname, str(uuid.uuid1()) + ".mp4")
- self._ensight.file.animation_rend_offscreen("ON")
- self._ensight.file.animation_screen_tiling(1, 1)
- self._ensight.file.animation_format("mpeg4")
- if options:
- self._ensight.file.animation_format_options(options)
- self._ensight.file.animation_frame_rate(fps)
- self._ensight.file.animation_rend_offscreen("ON")
- self._ensight.file.animation_numpasses(passes)
- self._ensight.file.animation_stereo("mono")
- self._ensight.file.animation_screen_tiling(1, 1)
- self._ensight.file.animation_file(tmpfilename)
- self._ensight.file.animation_window_size("user_defined")
- self._ensight.file.animation_window_xy(width, height)
- self._ensight.file.animation_frames(frames)
- self._ensight.file.animation_start_number(start)
- self._ensight.file.animation_multiple_images("OFF")
- if raytrace:
- self._ensight.file.animation_raytrace_it("ON")
- else:
- self._ensight.file.animation_raytrace_it("OFF")
- self._ensight.file.animation_raytrace_ext("OFF")
-
- self._ensight.file.animation_play_time("OFF")
- self._ensight.file.animation_play_flipbook("OFF")
- self._ensight.file.animation_play_keyframe("OFF")
-
- self._ensight.file.animation_reset_time("OFF")
- self._ensight.file.animation_reset_traces("OFF")
- self._ensight.file.animation_reset_flipbook("OFF")
- self._ensight.file.animation_reset_keyframe("OFF")
-
- if anim_type == self.ANIM_TYPE_SOLUTIONTIME:
- # playing over time
- self._ensight.file.animation_play_time("ON")
- self._ensight.file.animation_reset_time("ON")
- elif anim_type == self.ANIM_TYPE_ANIMATEDTRACES:
- # recording particle traces/etc
- self._ensight.file.animation_reset_traces("ON")
- elif anim_type == self.ANIM_TYPE_KEYFRAME:
- self._ensight.file.animation_reset_keyframe("ON")
- self._ensight.file.animation_play_keyframe("ON")
- elif anim_type == self.ANIM_TYPE_FLIPBOOK:
- self._ensight.file.animation_play_flipbook("ON")
- self._ensight.file.animation_reset_flipbook("ON")
-
- self._ensight.file.save_animation()
-
- with open(tmpfilename, "rb") as fp:
- mp4_data = fp.read()
-
- return mp4_data
-
- GEOM_EXPORT_GLTF = "gltf2"
- GEOM_EXPORT_AVZ = "avz"
- GEOM_EXPORT_PLY = "ply"
- GEOM_EXPORT_STL = "stl"
-
- extension_map = {
- GEOM_EXPORT_GLTF: ".glb",
- GEOM_EXPORT_AVZ: ".avz",
- GEOM_EXPORT_PLY: ".ply",
- GEOM_EXPORT_STL: ".stl",
- }
-
- def _geometry_remote( # pragma: no cover
- self, format: str, starting_timestep: int, frames: int, delta_timestep: int
- ) -> List[bytes]:
- """EnSight-side implementation.
-
- Parameters
- ----------
- format : str
- The format to export
- starting_timestep: int
- The first timestep to export. If None, defaults to the current timestep
- frames: int
- Number of timesteps to save. If None, defaults from the current timestep to the last
- delta_timestep: int
- The delta timestep to use when exporting
-
- Returns
- -------
- bytes
- Geometry export in bytes
- """
- rawdata = None
- extension = self.extension_map.get(format)
- rawdata_list = []
- if not extension:
- raise RuntimeError("The geometry export format provided is not supported.")
- with tempfile.TemporaryDirectory() as tmpdirname:
- self._ensight.part.select_all()
- self._ensight.savegeom.format(format)
- self._ensight.savegeom.begin_step(starting_timestep)
- # frames is 1-indexed, so I need to decrease of 1
- self._ensight.savegeom.end_step(starting_timestep + frames - 1)
- self._ensight.savegeom.step_by(delta_timestep)
- tmpfilename = os.path.join(tmpdirname, str(uuid.uuid1()))
- self._ensight.savegeom.save_geometric_entities(tmpfilename)
- files = glob.glob(f"{tmpfilename}*{extension}")
- for export_file in files:
- with open(export_file, "rb") as tmpfile:
- rawdata = tmpfile.read()
- rawdata_list.append(rawdata)
- return rawdata_list
-
- def geometry(
- self,
- filename: str,
- format: str = GEOM_EXPORT_GLTF,
- starting_timestep: Optional[int] = None,
- frames: Optional[int] = 1,
- delta_timestep: Optional[int] = None,
- ) -> None:
- """Export a geometry file.
-
- Parameters
- ----------
- filename: str
- The location where to export the geometry
- format : str
- The format to export
- starting_timestep: int
- The first timestep to export. If None, defaults to the current timestep
- frames: int
- Number of timesteps to save. If None, defaults from the current timestep to the last
- delta_timestep: int
- The delta timestep to use when exporting
-
- Examples
- --------
- >>> s = LocalLauncher().start()
- >>> data = f"{s.cei_home}/ensight{s.cei_suffix}gui/demos/Crash Queries.ens"
- >>> s.ensight.objs.ensxml_restore_file(data)
- >>> s.ensight.utils.export.geometry("local_file.glb", format=s.ensight.utils.export.GEOM_EXPORT_GLTF)
- """
- if starting_timestep is None:
- starting_timestep = int(self._ensight.objs.core.TIMESTEP)
- if frames is None or frames == -1:
- # Timesteps are 0-indexed so frames need to be increased of 1
- frames = int(self._ensight.objs.core.TIMESTEP_LIMITS[1]) + 1
- if not delta_timestep:
- delta_timestep = 1
- self._remote_support_check()
- raw_data_list = None
- if isinstance(self._ensight, ModuleType): # pragma: no cover
- raw_data_list = self._geometry_remote( # pragma: no cover
- format,
- starting_timestep=starting_timestep,
- frames=frames,
- delta_timestep=delta_timestep,
- )
- else:
- self._ensight._session.ensight_version_check("2024 R2")
- cmd = f"ensight.utils.export._geometry_remote('{format}', {starting_timestep}, {frames}, {delta_timestep})"
- raw_data_list = self._ensight._session.cmd(cmd)
- if raw_data_list: # pragma: no cover
- if len(raw_data_list) == 1:
- with open(filename, "wb") as fp:
- fp.write(raw_data_list[0])
- else:
- for idx, raw_data in enumerate(raw_data_list):
- filename_base, extension = os.path.splitext(filename)
- _filename = f"{filename_base}{str(idx).zfill(3)}{extension}"
- with open(_filename, "wb") as fp:
- fp.write(raw_data)
- else: # pragma: no cover
- raise IOError("Export was not successful") # pragma: no cover
+import glob
+import os
+import tempfile
+from types import ModuleType
+from typing import Any, List, Optional, Union
+import uuid
+
+from PIL import Image
+import numpy
+
+try:
+ import ensight
+ import enve
+except ImportError:
+ from ansys.api.pyensight import ensight_api
+
+
+class Export:
+ """Provides the ``ensight.utils.export`` interface.
+
+ The methods in this class implement simplified interfaces to common
+ image and animation export operations.
+
+ This class is instantiated as ``ensight.utils.export`` in EnSight Python
+ and as ``Session.ensight.utils.export`` in PyEnSight. The constructor is
+ passed the interface, which serves as the ``ensight`` module for either
+ case. As a result, the methods can be accessed as ``ensight.utils.export.image()``
+ in EnSight Python or ``session.ensight.utils.export.animation()`` in PyEnSight.
+
+ Parameters
+ ----------
+ interface :
+ Entity that provides the ``ensight`` namespace. In the case of
+ EnSight Python, the ``ensight`` module is passed. In the case
+ of PyEnSight, ``Session.ensight`` is passed.
+ """
+
+ def __init__(self, interface: Union["ensight_api.ensight", "ensight"]):
+ self._ensight = interface
+
+ def _remote_support_check(self):
+ """Determine if ``ensight.utils.export`` exists on the remote system.
+
+ Before trying to use this module, use this method to determine if this
+ module is available in the EnSight instance.
+
+ Raises
+ ------
+ RuntimeError if the module is not present.
+ """
+ # if a module, then we are inside EnSight
+ if isinstance(self._ensight, ModuleType): # pragma: no cover
+ return # pragma: no cover
+ try:
+ _ = self._ensight._session.cmd("dir(ensight.utils.export)")
+ except RuntimeError: # pragma: no cover
+ import ansys.pyensight.core # pragma: no cover
+
+ raise RuntimeError( # pragma: no cover
+ f"Remote EnSight session must have PyEnsight version \
+ {ansys.pyensight.core.DEFAULT_ANSYS_VERSION} or higher installed to use this API."
+ )
+
+ TIFFTAG_IMAGEDESCRIPTION: int = 0x010E
+
+ def image(
+ self,
+ filename: str,
+ width: Optional[int] = None,
+ height: Optional[int] = None,
+ passes: int = 4,
+ enhanced: bool = False,
+ raytrace: bool = False,
+ ) -> None:
+ """Render an image of the current EnSight scene.
+
+ Parameters
+ ----------
+ filename : str
+ Name of the local file to save the image to.
+ width : int, optional
+ Width of the image in pixels. The default is ``None``, in which case
+ ```ensight.objs.core.WINDOWSIZE[0]`` is used.
+ height : int, optional
+ Height of the image in pixels. The default is ``None``, in which case
+ ``ensight.objs.core.WINDOWSIZE[1]`` is used.
+ passes : int, optional
+ Number of antialiasing passes. The default is ``4``.
+ enhanced : bool, optional
+ Whether to save the image to the filename specified in the TIFF format.
+ The default is ``False``. The TIFF format includes additional channels
+ for the per-pixel object and variable information.
+ raytrace : bool, optional
+ Whether to render the image with the raytracing engine. The default is ``False``.
+
+ Examples
+ --------
+ >>> s = LocalLauncher().start()
+ >>> s.load_data(f"{s.cei_home}/ensight{s.cei_suffix}/data/cube/cube.case")
+ >>> s.ensight.utils.export.image("example.png")
+
+ """
+ self._remote_support_check()
+
+ win_size = self._ensight.objs.core.WINDOWSIZE
+ if width is None:
+ width = win_size[0]
+ if height is None:
+ height = win_size[1]
+
+ if isinstance(self._ensight, ModuleType): # pragma: no cover
+ raw_image = self._image_remote(
+ width, height, passes, enhanced, raytrace
+ ) # pragma: no cover
+ else:
+ cmd = f"ensight.utils.export._image_remote({width}, {height}, {passes}, "
+ cmd += f"{enhanced}, {raytrace})"
+ raw_image = self._ensight._session.cmd(cmd)
+
+ pil_image = self._dict_to_pil(raw_image)
+ if enhanced:
+ tiffinfo_dir = {self.TIFFTAG_IMAGEDESCRIPTION: raw_image["metadata"]}
+ pil_image[0].save(
+ filename,
+ save_all=True,
+ append_images=[pil_image[1], pil_image[2]],
+ tiffinfo=tiffinfo_dir,
+ )
+ else:
+ pil_image[0].save(filename)
+
+ def _dict_to_pil(self, data: dict) -> list:
+ """Convert the contents of the dictionary into a PIL image.
+
+ Parameters
+ ----------
+ data : dict
+ Dictionary representation of the contents of the ``enve`` object.
+
+ Returns
+ -------
+ list
+ List of one or three image objects, [RGB {, pick, variable}].
+ """
+ images = [
+ Image.fromarray(self._numpy_from_dict(data["pixeldata"])).transpose(
+ Image.FLIP_TOP_BOTTOM
+ )
+ ]
+ if data.get("variabledata", None) and data.get("pickdata", None):
+ images.append(
+ Image.fromarray(self._numpy_from_dict(data["pickdata"])).transpose(
+ Image.FLIP_TOP_BOTTOM
+ )
+ )
+ images.append(
+ Image.fromarray(self._numpy_from_dict(data["variabledata"])).transpose(
+ Image.FLIP_TOP_BOTTOM
+ )
+ )
+ return images
+
+ @staticmethod
+ def _numpy_to_dict(array: Any) -> Optional[dict]:
+ """Convert a numpy array into a dictionary.
+
+ Parameters
+ ----------
+ array:
+ Numpy array or None.
+
+ Returns
+ -------
+ ``None`` or a dictionary that can be serialized.
+ """
+ if array is None:
+ return None
+ return dict(shape=array.shape, dtype=array.dtype.str, data=array.tostring())
+
+ @staticmethod
+ def _numpy_from_dict(obj: Optional[dict]) -> Any:
+ """Convert a dictionary into a numpy array.
+
+ Parameters
+ ----------
+ obj:
+ Dictionary generated by ``_numpy_to_dict`` or ``None``.
+
+ Returns
+ -------
+ ``None`` or a numpy array.
+ """
+ if obj is None:
+ return None
+ return numpy.frombuffer(obj["data"], dtype=obj["dtype"]).reshape(obj["shape"])
+
+ def _image_remote(
+ self, width: int, height: int, passes: int, enhanced: bool, raytrace: bool
+ ) -> dict:
+ """EnSight-side implementation.
+
+ Parameters
+ ----------
+ width : int
+ Width of the image in pixels.
+ height : int
+ Height of the image in pixels.
+ passes : int
+ Number of antialiasing passes.
+ enhanced : bool
+ Whether to returned the image as a "deep pixel" TIFF image file.
+ raytrace :
+ Whether to render the image with the raytracing engine.
+
+ Returns
+ -------
+ dict
+ Dictionary of the various channels.
+ """
+ if not raytrace:
+ img = ensight.render(x=width, y=height, num_samples=passes, enhanced=enhanced)
+ else:
+ with tempfile.TemporaryDirectory() as tmpdirname:
+ tmpfilename = os.path.join(tmpdirname, str(uuid.uuid1()))
+ ensight.file.image_format("png")
+ ensight.file.image_file(tmpfilename)
+ ensight.file.image_window_size("user_defined")
+ ensight.file.image_window_xy(width, height)
+ ensight.file.image_rend_offscreen("ON")
+ ensight.file.image_numpasses(passes)
+ ensight.file.image_stereo("current")
+ ensight.file.image_screen_tiling(1, 1)
+ ensight.file.raytracer_options("fgoverlay 1 imagedenoise 1 quality 5")
+ ensight.file.image_raytrace_it("ON")
+ ensight.file.save_image()
+ img = enve.image()
+ img.load(f"{tmpfilename}.png")
+ # get the channels from the enve.image instance
+ output = dict(width=width, height=height, metadata=img.metadata)
+ # extract the channels from the image
+ output["pixeldata"] = self._numpy_to_dict(img.pixeldata)
+ output["variabledata"] = self._numpy_to_dict(img.variabledata)
+ output["pickdata"] = self._numpy_to_dict(img.pickdata)
+ return output
+
+ ANIM_TYPE_SOLUTIONTIME: int = 0
+ ANIM_TYPE_ANIMATEDTRACES: int = 1
+ ANIM_TYPE_FLIPBOOK: int = 2
+ ANIM_TYPE_KEYFRAME: int = 3
+
+ def animation(
+ self,
+ filename: str,
+ width: Optional[int] = None,
+ height: Optional[int] = None,
+ passes: int = 4,
+ anim_type: int = ANIM_TYPE_SOLUTIONTIME,
+ frames: Optional[int] = None,
+ starting_frame: int = 0,
+ frames_per_second: float = 60.0,
+ format_options: Optional[str] = "",
+ raytrace: bool = False,
+ ) -> None:
+ """Generate an MPEG4 animation file.
+
+ An MPEG4 animation file can be generated from temporal data, flipbooks, keyframes,
+ or animated traces.
+
+ Parameters
+ ----------
+ filename : str
+ Name for the MPEG4 file to save to local disk.
+ width : int, optional
+ Width of the image in pixels. The default is ``None``, in which case
+ ``ensight.objs.core.WINDOWSIZE[0]`` is used.
+ height : int, optional
+ Height of the image in pixels. The default is ``None``, in which case
+ ``ensight.objs.core.WINDOWSIZE[1]`` is used.
+ passes : int, optional
+ Number of antialiasing passes. The default is ``4``.
+ anim_type : int, optional
+ Type of the animation to render. The default is ``0``, in which case
+ ``"ANIM_TYPE_SOLUTIONTIME"`` is used. This table provides descriptions
+ by each option number and name:
+
+ =========================== ========================================
+ Name Animation type
+ =========================== ========================================
+ 0: ANIM_TYPE_SOLUTIONTIME Animation over all solution times
+ 1: ANIM_TYPE_ANIMATEDTRACES Records animated rotations and traces
+ 2: ANIM_TYPE_FLIPBOOK Records current flipbook animation
+ 3: ANIM_TYPE_KEYFRAME Records current kKeyframe animation
+ =========================== ========================================
+
+ frames : int, optional
+ Number of frames to save. The default is ``None``. The default for
+ all but ``ANIM_TYPE_ANIMATEDTRACES`` covers all timesteps, flipbook
+ pages, or keyframe steps. If ``ANIM_TYPE_ANIMATEDTRACES`` is specified,
+ this keyword is required.
+ starting_frame : int, optional
+ Keyword for saving a subset of the complete collection of frames.
+ The default is ``0``.
+ frames_per_second : float, optional
+ Number of frames per second for playback in the saved animation.
+ The default is ``60.0``.
+ format_options : str, optional
+ More specific options for the MPEG4 encoder. The default is ``""``.
+ raytrace : bool, optional
+ Whether to render the image with the raytracing engine. The default is ``False``.
+
+ Examples
+ --------
+ >>> s = LocalLauncher().start()
+ >>> data = f"{s.cei_home}/ensight{s.cei_suffix}gui/demos/Crash Queries.ens"
+ >>> s.ensight.objs.ensxml_restore_file(data)
+ >>> quality = "Quality Best Type 1"
+ >>> s.ensight.utils.export.animation("local_file.mp4", format_options=quality)
+
+ """
+ self._remote_support_check()
+
+ win_size = self._ensight.objs.core.WINDOWSIZE
+ if width is None:
+ width = win_size[0]
+ if height is None:
+ height = win_size[1]
+
+ if format_options is None:
+ format_options = "Quality High Type 1"
+
+ num_frames: int = 0
+ if frames is None:
+ if anim_type == self.ANIM_TYPE_SOLUTIONTIME:
+ num_timesteps = self._ensight.objs.core.TIMESTEP_LIMITS[1]
+ num_frames = num_timesteps - starting_frame
+ elif anim_type == self.ANIM_TYPE_ANIMATEDTRACES:
+ raise RuntimeError("frames is a required keyword with ANIMATEDTRACES animations")
+ elif anim_type == self.ANIM_TYPE_FLIPBOOK:
+ num_flip_pages = len(self._ensight.objs.core.FLIPBOOKS[0].PAGE_DETAILS)
+ num_frames = num_flip_pages - starting_frame
+ elif anim_type == self.ANIM_TYPE_KEYFRAME:
+ num_keyframe_pages = self._ensight.objs.core.KEYFRAMEDATA["totalFrames"]
+ num_frames = num_keyframe_pages - starting_frame
+ else:
+ num_frames = frames
+
+ if num_frames < 1: # pragma: no cover
+ raise RuntimeError( # pragma: no cover
+ "No frames selected. Perhaps a static dataset SOLUTIONTIME request \
+ or no FLIPBOOK/KEYFRAME defined."
+ )
+
+ if isinstance(self._ensight, ModuleType): # pragma: no cover
+ raw_mpeg4 = self._animation_remote( # pragma: no cover
+ width,
+ height,
+ passes,
+ anim_type,
+ starting_frame,
+ num_frames,
+ frames_per_second,
+ format_options,
+ raytrace,
+ )
+ else:
+ cmd = f"ensight.utils.export._animation_remote({width}, {height}, {passes}, "
+ cmd += f"{anim_type}, {starting_frame}, {num_frames}, "
+ cmd += f"{frames_per_second}, '{format_options}', {raytrace})"
+ raw_mpeg4 = self._ensight._session.cmd(cmd)
+
+ with open(filename, "wb") as fp:
+ fp.write(raw_mpeg4)
+
+ def _animation_remote(
+ self,
+ width: int,
+ height: int,
+ passes: int,
+ anim_type: int,
+ start: int,
+ frames: int,
+ fps: float,
+ options: str,
+ raytrace: bool,
+ ) -> bytes:
+ """EnSight-side implementation.
+
+ Parameters
+ ----------
+ width : int
+ Width of the image in pixels.
+ height : int
+ Height of the image in pixels.
+ passes : int
+ Number of antialiasing passes.
+ anim_type : int
+ Type of animation to save.
+ start : int
+ First frame number to save.
+ frames : int
+ Number of frames to save.
+ fps : float
+ Output framerate.
+ options : str
+ MPEG4 configuration options.
+ raytrace : bool
+ Whether to render the image with the raytracing engine.
+
+ Returns
+ -------
+ bytes
+ MPEG4 stream in bytes.
+ """
+
+ with tempfile.TemporaryDirectory() as tmpdirname:
+ tmpfilename = os.path.join(tmpdirname, str(uuid.uuid1()) + ".mp4")
+ self._ensight.file.animation_rend_offscreen("ON")
+ self._ensight.file.animation_screen_tiling(1, 1)
+ self._ensight.file.animation_format("mpeg4")
+ if options:
+ self._ensight.file.animation_format_options(options)
+ self._ensight.file.animation_frame_rate(fps)
+ self._ensight.file.animation_rend_offscreen("ON")
+ self._ensight.file.animation_numpasses(passes)
+ self._ensight.file.animation_stereo("mono")
+ self._ensight.file.animation_screen_tiling(1, 1)
+ self._ensight.file.animation_file(tmpfilename)
+ self._ensight.file.animation_window_size("user_defined")
+ self._ensight.file.animation_window_xy(width, height)
+ self._ensight.file.animation_frames(frames)
+ self._ensight.file.animation_start_number(start)
+ self._ensight.file.animation_multiple_images("OFF")
+ if raytrace:
+ self._ensight.file.animation_raytrace_it("ON")
+ else:
+ self._ensight.file.animation_raytrace_it("OFF")
+ self._ensight.file.animation_raytrace_ext("OFF")
+
+ self._ensight.file.animation_play_time("OFF")
+ self._ensight.file.animation_play_flipbook("OFF")
+ self._ensight.file.animation_play_keyframe("OFF")
+
+ self._ensight.file.animation_reset_time("OFF")
+ self._ensight.file.animation_reset_traces("OFF")
+ self._ensight.file.animation_reset_flipbook("OFF")
+ self._ensight.file.animation_reset_keyframe("OFF")
+
+ if anim_type == self.ANIM_TYPE_SOLUTIONTIME:
+ # playing over time
+ self._ensight.file.animation_play_time("ON")
+ self._ensight.file.animation_reset_time("ON")
+ elif anim_type == self.ANIM_TYPE_ANIMATEDTRACES:
+ # recording particle traces/etc
+ self._ensight.file.animation_reset_traces("ON")
+ elif anim_type == self.ANIM_TYPE_KEYFRAME:
+ self._ensight.file.animation_reset_keyframe("ON")
+ self._ensight.file.animation_play_keyframe("ON")
+ elif anim_type == self.ANIM_TYPE_FLIPBOOK:
+ self._ensight.file.animation_play_flipbook("ON")
+ self._ensight.file.animation_reset_flipbook("ON")
+
+ self._ensight.file.save_animation()
+
+ with open(tmpfilename, "rb") as fp:
+ mp4_data = fp.read()
+
+ return mp4_data
+
+ GEOM_EXPORT_GLTF = "gltf2"
+ GEOM_EXPORT_AVZ = "avz"
+ GEOM_EXPORT_PLY = "ply"
+ GEOM_EXPORT_STL = "stl"
+
+ extension_map = {
+ GEOM_EXPORT_GLTF: ".glb",
+ GEOM_EXPORT_AVZ: ".avz",
+ GEOM_EXPORT_PLY: ".ply",
+ GEOM_EXPORT_STL: ".stl",
+ }
+
+ def _geometry_remote( # pragma: no cover
+ self, format: str, starting_timestep: int, frames: int, delta_timestep: int
+ ) -> List[bytes]:
+ """EnSight-side implementation.
+
+ Parameters
+ ----------
+ format : str
+ The format to export
+ starting_timestep: int
+ The first timestep to export. If None, defaults to the current timestep
+ frames: int
+ Number of timesteps to save. If None, defaults from the current timestep to the last
+ delta_timestep: int
+ The delta timestep to use when exporting
+
+ Returns
+ -------
+ bytes
+ Geometry export in bytes
+ """
+ rawdata = None
+ extension = self.extension_map.get(format)
+ rawdata_list = []
+ if not extension:
+ raise RuntimeError("The geometry export format provided is not supported.")
+ with tempfile.TemporaryDirectory() as tmpdirname:
+ self._ensight.part.select_all()
+ self._ensight.savegeom.format(format)
+ self._ensight.savegeom.begin_step(starting_timestep)
+ # frames is 1-indexed, so I need to decrease of 1
+ self._ensight.savegeom.end_step(starting_timestep + frames - 1)
+ self._ensight.savegeom.step_by(delta_timestep)
+ tmpfilename = os.path.join(tmpdirname, str(uuid.uuid1()))
+ self._ensight.savegeom.save_geometric_entities(tmpfilename)
+ files = glob.glob(f"{tmpfilename}*{extension}")
+ for export_file in files:
+ with open(export_file, "rb") as tmpfile:
+ rawdata = tmpfile.read()
+ rawdata_list.append(rawdata)
+ return rawdata_list
+
+ def geometry(
+ self,
+ filename: str,
+ format: str = GEOM_EXPORT_GLTF,
+ starting_timestep: Optional[int] = None,
+ frames: Optional[int] = 1,
+ delta_timestep: Optional[int] = None,
+ ) -> None:
+ """Export a geometry file.
+
+ Parameters
+ ----------
+ filename: str
+ The location where to export the geometry
+ format : str
+ The format to export
+ starting_timestep: int
+ The first timestep to export. If None, defaults to the current timestep
+ frames: int
+ Number of timesteps to save. If None, defaults from the current timestep to the last
+ delta_timestep: int
+ The delta timestep to use when exporting
+
+ Examples
+ --------
+ >>> s = LocalLauncher().start()
+ >>> data = f"{s.cei_home}/ensight{s.cei_suffix}gui/demos/Crash Queries.ens"
+ >>> s.ensight.objs.ensxml_restore_file(data)
+ >>> s.ensight.utils.export.geometry("local_file.glb", format=s.ensight.utils.export.GEOM_EXPORT_GLTF)
+ """
+ if starting_timestep is None:
+ starting_timestep = int(self._ensight.objs.core.TIMESTEP)
+ if frames is None or frames == -1:
+ # Timesteps are 0-indexed so frames need to be increased of 1
+ frames = int(self._ensight.objs.core.TIMESTEP_LIMITS[1]) + 1
+ if not delta_timestep:
+ delta_timestep = 1
+ self._remote_support_check()
+ raw_data_list = None
+ if isinstance(self._ensight, ModuleType): # pragma: no cover
+ raw_data_list = self._geometry_remote( # pragma: no cover
+ format,
+ starting_timestep=starting_timestep,
+ frames=frames,
+ delta_timestep=delta_timestep,
+ )
+ else:
+ self._ensight._session.ensight_version_check("2024 R2")
+ cmd = f"ensight.utils.export._geometry_remote('{format}', {starting_timestep}, {frames}, {delta_timestep})"
+ raw_data_list = self._ensight._session.cmd(cmd)
+ if raw_data_list: # pragma: no cover
+ if len(raw_data_list) == 1:
+ with open(filename, "wb") as fp:
+ fp.write(raw_data_list[0])
+ else:
+ for idx, raw_data in enumerate(raw_data_list):
+ filename_base, extension = os.path.splitext(filename)
+ _filename = f"{filename_base}{str(idx).zfill(3)}{extension}"
+ with open(_filename, "wb") as fp:
+ fp.write(raw_data)
+ else: # pragma: no cover
+ raise IOError("Export was not successful") # pragma: no cover
diff --git a/src/ansys/pyensight/core/utils/omniverse.py b/src/ansys/pyensight/core/utils/omniverse.py
index 722152434f9..f2f5f663d74 100644
--- a/src/ansys/pyensight/core/utils/omniverse.py
+++ b/src/ansys/pyensight/core/utils/omniverse.py
@@ -1,362 +1,362 @@
-import glob
-import json
-import os
-import platform
-import subprocess
-import sys
-import tempfile
-from types import ModuleType
-from typing import TYPE_CHECKING, Optional, Union
-import uuid
-
-import psutil
-
-if TYPE_CHECKING:
- try:
- import ensight
- except ImportError:
- from ansys.api.pyensight import ensight_api
-
-
-class Omniverse:
- """Provides the ``ensight.utils.omniverse`` interface.
-
- The omniverse class methods provide an interface between an EnSight session
- and an Omniverse instance. See :ref:`omniverse_info` for additional details.
-
- Parameters
- ----------
- interface: Union["ensight_api.ensight", "ensight"]
- Entity that provides the ``ensight`` namespace. In the case of
- EnSight Python, the ``ensight`` module is passed. In the case
- of PyEnSight, ``Session.ensight`` is passed.
-
- Notes
- -----
- This interface is only available when using pyensight (they do not work with
- the ensight Python interpreter) and the module must be used in an interpreter
- that includes the Omniverse Python modules (e.g. omni and pxr). Only a single
- Omniverse connection can be established within a single pyensight session.
-
- Examples
- --------
-
- >>> from ansys.pyensight.core import LocalLauncher
- >>> session = LocalLauncher().start()
- >>> ov = session.ensight.utils.omniverse
- >>> ov.create_connection(r"D:\Omniverse\Example")
- >>> ov.update()
- >>> ov.close_connection()
-
- """
-
- def __init__(self, interface: Union["ensight_api.ensight", "ensight"]):
- self._ensight = interface
- self._server_pid: Optional[int] = None
- self._interpreter: str = ""
- self._status_filename: str = ""
-
- @staticmethod
- def find_kit_filename(fallback_directory: Optional[str] = None) -> Optional[str]:
- """
- Use a combination of the current omniverse application and the information
- in the local .nvidia-omniverse/config/omniverse.toml file to come up with
- the pathname of a kit executable suitable for hosting another copy of the
- ansys.geometry.server kit.
-
- Returns
- -------
- Optional[str]
- The pathname of a kit executable or None
-
- """
- # parse the toml config file for the location of the installed apps
- try:
- import tomllib
- except ModuleNotFoundError:
- import pip._vendor.tomli as tomllib
-
- homedir = os.path.expanduser("~")
- ov_config = os.path.join(homedir, ".nvidia-omniverse", "config", "omniverse.toml")
- if not os.path.exists(ov_config):
- return None
- # read the Omniverse configuration toml file
- with open(ov_config, "r") as ov_file:
- ov_data = ov_file.read()
- config = tomllib.loads(ov_data)
- appdir = config.get("paths", {}).get("library_root", fallback_directory)
-
- # If we are running inside an Omniverse app, use that information
- try:
- import omni.kit.app
-
- # get the current application
- app = omni.kit.app.get_app()
- app_name = app.get_app_filename().split(".")[-1]
- app_version = app.get_app_version().split("-")[0]
- # and where it is installed
- appdir = os.path.join(appdir, f"{app_name}-{app_version}")
- except ModuleNotFoundError:
- # Names should be like: "C:\\Users\\foo\\AppData\\Local\\ov\\pkg\\create-2023.2.3\\launcher.toml"
- target = None
- target_version = None
- for d in glob.glob(os.path.join(appdir, "*", "launcher.toml")):
- test_dir = os.path.dirname(d)
- # the name will be something like "create-2023.2.3"
- name = os.path.basename(test_dir).split("-")
- if len(name) != 2:
- continue
- if name[0] not in ("kit", "create", "view"):
- continue
- if (target_version is None) or (name[1] > target_version):
- target = test_dir
- target_version = name[1]
- if target is None:
- return None
- appdir = target
-
- # Windows: 'kit.bat' in '.' or 'kit' followed by 'kit.exe' in '.' or 'kit'
- # Linux: 'kit.sh' in '.' or 'kit' followed by 'kit' in '.' or 'kit'
- exe_names = ["kit.sh", "kit"]
- if sys.platform.startswith("win"):
- exe_names = ["kit.bat", "kit.exe"]
-
- # look in 4 places...
- for dir_name in [appdir, os.path.join(appdir, "kit")]:
- for exe_name in exe_names:
- if os.path.exists(os.path.join(dir_name, exe_name)):
- return os.path.join(dir_name, exe_name)
-
- return None
-
- def _check_modules(self) -> None:
- """Verify that the Python interpreter is correct
-
- Check for module dependencies. If not present, raise an exception.
-
- Raises
- ------
- RuntimeError
- if the necessary modules are missing.
-
- """
- # One time check for this
- if len(self._interpreter):
- return
-
- # if a module, then we are inside EnSight
- if isinstance(self._ensight, ModuleType): # pragma: no cover
- # in this case, we can just use cpython
- import ceiversion
- import enve
-
- cei_home = os.environ.get("CEI_HOME", enve.home())
- self._interpreter = os.path.join(cei_home, "bin", f"cpython{ceiversion.apex_suffix}")
- if platform.system() == "Windows":
- self._interpreter += ".bat"
- return
- # Using the python interpreter running this code
- self._interpreter = sys.executable
-
- # in the future, these will be part of the pyensight wheel
- # dependencies, but for now we include this check.
- try:
- import pxr # noqa: F401
- import pygltflib # noqa: F401
- except Exception:
- raise RuntimeError("Unable to detect omniverse dependencies: usd-core, pygltflib.")
-
- def is_running_omniverse(self) -> bool:
- """Check that an Omniverse connection is active
-
- Returns
- -------
- bool
- True if the connection is active, False otherwise.
- """
- if self._server_pid is None:
- return False
- if psutil.pid_exists(self._server_pid):
- return True
- self._server_pid = None
- return False
-
- def create_connection(
- self,
- omniverse_path: str,
- include_camera: bool = False,
- normalize_geometry: bool = False,
- temporal: bool = False,
- live: bool = True,
- debug_filename: str = "",
- time_scale: float = 1.0,
- options: dict = {},
- ) -> None:
- """Ensure that an EnSight dsg -> omniverse server is running
-
- Connect the current EnSight session to an Omniverse server.
- This is done by launching a new service that makes a dynamic scene graph
- connection to the EnSight session and pushes updates to the Omniverse server.
- The initial EnSight scene will be pushed after the connection is established.
-
- Parameters
- ----------
- omniverse_path : str
- The directory name where the USD files should be saved. For example:
- "C:/Users/test/OV/usdfiles"
- include_camera : bool
- If True, apply the EnSight camera to the Omniverse scene. This option
- should be used if the target viewer is in AR/VR mode. Defaults to False.
- normalize_geometry : bool
- Omniverse units are in meters. If the source dataset is not in the correct
- unit system or is just too large/small, this option will remap the geometry
- to a unit cube. Defaults to False.
- temporal : bool
- If True, save all timesteps.
- live : bool
- If True, one can call 'update()' to send updated geometry to Omniverse.
- If False, the Omniverse connection will push a single update and then
- disconnect. Defaults to True.
- time_scale : float
- Multiply all EnSight time values by this factor before exporting to Omniverse.
- The default is 1.0.
- debug_filename : str
- If the name of a file is provided, it will be used to save logging information on
- the connection between EnSight and Omniverse. This option is no longer supported,
- but the API remains for backwards compatibility.
- options : dict
- Allows for a fallback for the grpc host/port and the security token.
- """
- if not isinstance(self._ensight, ModuleType):
- self._ensight._session.ensight_version_check("2023 R2")
- self._check_modules()
- if self.is_running_omniverse():
- raise RuntimeError("An Omniverse server connection is already active.")
- if not isinstance(self._ensight, ModuleType):
- # Make sure the internal ui module is loaded
- self._ensight._session.cmd("import enspyqtgui_int", do_eval=False)
- # Get the gRPC connection details and use them to launch the service
- port = self._ensight._session.grpc.port()
- hostname = self._ensight._session.grpc.host
- token = self._ensight._session.grpc.security_token
- else:
- hostname = options.get("host", "127.0.0.1")
- port = options.get("port", 12345)
- token = options.get("security", "")
-
- # Launch the server via the 'ansys.pyensight.core.utils.omniverse_cli' module
- dsg_uri = f"grpc://{hostname}:{port}"
- cmd = [self._interpreter]
- cmd.extend(["-m", "ansys.pyensight.core.utils.omniverse_cli"])
- cmd.append(omniverse_path)
- if token:
- cmd.extend(["--security_token", token])
- if temporal:
- cmd.extend(["--temporal", "true"])
- if not include_camera:
- cmd.extend(["--include_camera", "false"])
- if normalize_geometry:
- cmd.extend(["--normalize_geometry", "true"])
- if time_scale != 1.0:
- cmd.extend(["--time_scale", str(time_scale)])
- if not live:
- cmd.extend(["--oneshot", "1"])
- cmd.extend(["--dsg_uri", dsg_uri])
- env_vars = os.environ.copy()
- # we are launching the kit from EnSight or PyEnSight. In these cases, we
- # inform the kit instance of:
- # (1) the name of the "server status" file, if any
- self._new_status_file()
- env_vars["ANSYS_OV_SERVER_STATUS_FILENAME"] = self._status_filename
- process = subprocess.Popen(cmd, close_fds=True, env=env_vars)
- self._server_pid = process.pid
-
- def _new_status_file(self, new=True) -> None:
- """
- Remove any existing status file and create a new one if requested.
-
- Parameters
- ----------
- new : bool
- If True, create a new status file.
- """
- if self._status_filename:
- try:
- os.remove(self._status_filename)
- except OSError:
- pass
- self._status_filename = ""
- if new:
- self._status_filename = os.path.join(
- tempfile.gettempdir(), str(uuid.uuid1()) + "_gs_status.txt"
- )
-
- def read_status_file(self) -> dict:
- """Read the status file and return its contents as a dictionary.
-
- Note: this can fail if the file is being written to when this call is made, so expect
- failures.
-
- Returns
- -------
- Optional[dict]
- A dictionary with the fields 'status', 'start_time', 'processed_buffers', 'total_buffers' or empty
- """
- if not self._status_filename:
- return {}
- try:
- with open(self._status_filename, "r") as status_file:
- data = json.load(status_file)
- except Exception:
- return {}
- return data
-
- def close_connection(self) -> None:
- """Shut down the open EnSight dsg -> omniverse server
-
- Break the connection between the EnSight instance and Omniverse.
-
- """
- self._check_modules()
- if not self.is_running_omniverse():
- return
- proc = psutil.Process(self._server_pid)
- for child in proc.children(recursive=True):
- if psutil.pid_exists(child.pid):
- # This can be a race condition, so it is ok if the child is dead already
- try:
- child.kill()
- except psutil.NoSuchProcess:
- pass
- # Same issue, this process might already be shutting down, so NoSuchProcess is ok.
- try:
- proc.kill()
- except psutil.NoSuchProcess:
- pass
- self._server_pid = None
- self._new_status_file(new=False)
-
- def update(self, temporal: bool = False) -> None:
- """Update the geometry in Omniverse
-
- Export the current EnSight scene to the current Omniverse connection.
-
- Parameters
- ----------
- temporal : bool
- If True, export all timesteps.
- """
- update_cmd = "dynamicscenegraph://localhost/client/update"
- if temporal:
- update_cmd += "?timesteps=1"
- self._check_modules()
- if not self.is_running_omniverse():
- raise RuntimeError("No Omniverse server connection is currently active.")
- if not isinstance(self._ensight, ModuleType):
- self._ensight._session.ensight_version_check("2023 R2")
- cmd = f'enspyqtgui_int.dynamic_scene_graph_command("{update_cmd}")'
- self._ensight._session.cmd(cmd, do_eval=False)
- else:
- import enspyqtgui_int
-
- enspyqtgui_int.dynamic_scene_graph_command(f"{update_cmd}")
+import glob
+import json
+import os
+import platform
+import subprocess
+import sys
+import tempfile
+from types import ModuleType
+from typing import TYPE_CHECKING, Optional, Union
+import uuid
+
+import psutil
+
+if TYPE_CHECKING:
+ try:
+ import ensight
+ except ImportError:
+ from ansys.api.pyensight import ensight_api
+
+
+class Omniverse:
+ """Provides the ``ensight.utils.omniverse`` interface.
+
+ The omniverse class methods provide an interface between an EnSight session
+ and an Omniverse instance. See :ref:`omniverse_info` for additional details.
+
+ Parameters
+ ----------
+ interface: Union["ensight_api.ensight", "ensight"]
+ Entity that provides the ``ensight`` namespace. In the case of
+ EnSight Python, the ``ensight`` module is passed. In the case
+ of PyEnSight, ``Session.ensight`` is passed.
+
+ Notes
+ -----
+ This interface is only available when using pyensight (they do not work with
+ the ensight Python interpreter) and the module must be used in an interpreter
+ that includes the Omniverse Python modules (e.g. omni and pxr). Only a single
+ Omniverse connection can be established within a single pyensight session.
+
+ Examples
+ --------
+
+ >>> from ansys.pyensight.core import LocalLauncher
+ >>> session = LocalLauncher().start()
+ >>> ov = session.ensight.utils.omniverse
+ >>> ov.create_connection(r"D:\Omniverse\Example")
+ >>> ov.update()
+ >>> ov.close_connection()
+
+ """
+
+ def __init__(self, interface: Union["ensight_api.ensight", "ensight"]):
+ self._ensight = interface
+ self._server_pid: Optional[int] = None
+ self._interpreter: str = ""
+ self._status_filename: str = ""
+
+ @staticmethod
+ def find_kit_filename(fallback_directory: Optional[str] = None) -> Optional[str]:
+ """
+ Use a combination of the current omniverse application and the information
+ in the local .nvidia-omniverse/config/omniverse.toml file to come up with
+ the pathname of a kit executable suitable for hosting another copy of the
+ ansys.geometry.server kit.
+
+ Returns
+ -------
+ Optional[str]
+ The pathname of a kit executable or None
+
+ """
+ # parse the toml config file for the location of the installed apps
+ try:
+ import tomllib
+ except ModuleNotFoundError:
+ import pip._vendor.tomli as tomllib
+
+ homedir = os.path.expanduser("~")
+ ov_config = os.path.join(homedir, ".nvidia-omniverse", "config", "omniverse.toml")
+ if not os.path.exists(ov_config):
+ return None
+ # read the Omniverse configuration toml file
+ with open(ov_config, "r") as ov_file:
+ ov_data = ov_file.read()
+ config = tomllib.loads(ov_data)
+ appdir = config.get("paths", {}).get("library_root", fallback_directory)
+
+ # If we are running inside an Omniverse app, use that information
+ try:
+ import omni.kit.app
+
+ # get the current application
+ app = omni.kit.app.get_app()
+ app_name = app.get_app_filename().split(".")[-1]
+ app_version = app.get_app_version().split("-")[0]
+ # and where it is installed
+ appdir = os.path.join(appdir, f"{app_name}-{app_version}")
+ except ModuleNotFoundError:
+ # Names should be like: "C:\\Users\\foo\\AppData\\Local\\ov\\pkg\\create-2023.2.3\\launcher.toml"
+ target = None
+ target_version = None
+ for d in glob.glob(os.path.join(appdir, "*", "launcher.toml")):
+ test_dir = os.path.dirname(d)
+ # the name will be something like "create-2023.2.3"
+ name = os.path.basename(test_dir).split("-")
+ if len(name) != 2:
+ continue
+ if name[0] not in ("kit", "create", "view"):
+ continue
+ if (target_version is None) or (name[1] > target_version):
+ target = test_dir
+ target_version = name[1]
+ if target is None:
+ return None
+ appdir = target
+
+ # Windows: 'kit.bat' in '.' or 'kit' followed by 'kit.exe' in '.' or 'kit'
+ # Linux: 'kit.sh' in '.' or 'kit' followed by 'kit' in '.' or 'kit'
+ exe_names = ["kit.sh", "kit"]
+ if sys.platform.startswith("win"):
+ exe_names = ["kit.bat", "kit.exe"]
+
+ # look in 4 places...
+ for dir_name in [appdir, os.path.join(appdir, "kit")]:
+ for exe_name in exe_names:
+ if os.path.exists(os.path.join(dir_name, exe_name)):
+ return os.path.join(dir_name, exe_name)
+
+ return None
+
+ def _check_modules(self) -> None:
+ """Verify that the Python interpreter is correct
+
+ Check for module dependencies. If not present, raise an exception.
+
+ Raises
+ ------
+ RuntimeError
+ if the necessary modules are missing.
+
+ """
+ # One time check for this
+ if len(self._interpreter):
+ return
+
+ # if a module, then we are inside EnSight
+ if isinstance(self._ensight, ModuleType): # pragma: no cover
+ # in this case, we can just use cpython
+ import ceiversion
+ import enve
+
+ cei_home = os.environ.get("CEI_HOME", enve.home())
+ self._interpreter = os.path.join(cei_home, "bin", f"cpython{ceiversion.apex_suffix}")
+ if platform.system() == "Windows":
+ self._interpreter += ".bat"
+ return
+ # Using the python interpreter running this code
+ self._interpreter = sys.executable
+
+ # in the future, these will be part of the pyensight wheel
+ # dependencies, but for now we include this check.
+ try:
+ import pxr # noqa: F401
+ import pygltflib # noqa: F401
+ except Exception:
+ raise RuntimeError("Unable to detect omniverse dependencies: usd-core, pygltflib.")
+
+ def is_running_omniverse(self) -> bool:
+ """Check that an Omniverse connection is active
+
+ Returns
+ -------
+ bool
+ True if the connection is active, False otherwise.
+ """
+ if self._server_pid is None:
+ return False
+ if psutil.pid_exists(self._server_pid):
+ return True
+ self._server_pid = None
+ return False
+
+ def create_connection(
+ self,
+ omniverse_path: str,
+ include_camera: bool = False,
+ normalize_geometry: bool = False,
+ temporal: bool = False,
+ live: bool = True,
+ debug_filename: str = "",
+ time_scale: float = 1.0,
+ options: dict = {},
+ ) -> None:
+ """Ensure that an EnSight dsg -> omniverse server is running
+
+ Connect the current EnSight session to an Omniverse server.
+ This is done by launching a new service that makes a dynamic scene graph
+ connection to the EnSight session and pushes updates to the Omniverse server.
+ The initial EnSight scene will be pushed after the connection is established.
+
+ Parameters
+ ----------
+ omniverse_path : str
+ The directory name where the USD files should be saved. For example:
+ "C:/Users/test/OV/usdfiles"
+ include_camera : bool
+ If True, apply the EnSight camera to the Omniverse scene. This option
+ should be used if the target viewer is in AR/VR mode. Defaults to False.
+ normalize_geometry : bool
+ Omniverse units are in meters. If the source dataset is not in the correct
+ unit system or is just too large/small, this option will remap the geometry
+ to a unit cube. Defaults to False.
+ temporal : bool
+ If True, save all timesteps.
+ live : bool
+ If True, one can call 'update()' to send updated geometry to Omniverse.
+ If False, the Omniverse connection will push a single update and then
+ disconnect. Defaults to True.
+ time_scale : float
+ Multiply all EnSight time values by this factor before exporting to Omniverse.
+ The default is 1.0.
+ debug_filename : str
+ If the name of a file is provided, it will be used to save logging information on
+ the connection between EnSight and Omniverse. This option is no longer supported,
+ but the API remains for backwards compatibility.
+ options : dict
+ Allows for a fallback for the grpc host/port and the security token.
+ """
+ if not isinstance(self._ensight, ModuleType):
+ self._ensight._session.ensight_version_check("2023 R2")
+ self._check_modules()
+ if self.is_running_omniverse():
+ raise RuntimeError("An Omniverse server connection is already active.")
+ if not isinstance(self._ensight, ModuleType):
+ # Make sure the internal ui module is loaded
+ self._ensight._session.cmd("import enspyqtgui_int", do_eval=False)
+ # Get the gRPC connection details and use them to launch the service
+ port = self._ensight._session.grpc.port()
+ hostname = self._ensight._session.grpc.host
+ token = self._ensight._session.grpc.security_token
+ else:
+ hostname = options.get("host", "127.0.0.1")
+ port = options.get("port", 12345)
+ token = options.get("security", "")
+
+ # Launch the server via the 'ansys.pyensight.core.utils.omniverse_cli' module
+ dsg_uri = f"grpc://{hostname}:{port}"
+ cmd = [self._interpreter]
+ cmd.extend(["-m", "ansys.pyensight.core.utils.omniverse_cli"])
+ cmd.append(omniverse_path)
+ if token:
+ cmd.extend(["--security_token", token])
+ if temporal:
+ cmd.extend(["--temporal", "true"])
+ if not include_camera:
+ cmd.extend(["--include_camera", "false"])
+ if normalize_geometry:
+ cmd.extend(["--normalize_geometry", "true"])
+ if time_scale != 1.0:
+ cmd.extend(["--time_scale", str(time_scale)])
+ if not live:
+ cmd.extend(["--oneshot", "1"])
+ cmd.extend(["--dsg_uri", dsg_uri])
+ env_vars = os.environ.copy()
+ # we are launching the kit from EnSight or PyEnSight. In these cases, we
+ # inform the kit instance of:
+ # (1) the name of the "server status" file, if any
+ self._new_status_file()
+ env_vars["ANSYS_OV_SERVER_STATUS_FILENAME"] = self._status_filename
+ process = subprocess.Popen(cmd, close_fds=True, env=env_vars)
+ self._server_pid = process.pid
+
+ def _new_status_file(self, new=True) -> None:
+ """
+ Remove any existing status file and create a new one if requested.
+
+ Parameters
+ ----------
+ new : bool
+ If True, create a new status file.
+ """
+ if self._status_filename:
+ try:
+ os.remove(self._status_filename)
+ except OSError:
+ pass
+ self._status_filename = ""
+ if new:
+ self._status_filename = os.path.join(
+ tempfile.gettempdir(), str(uuid.uuid1()) + "_gs_status.txt"
+ )
+
+ def read_status_file(self) -> dict:
+ """Read the status file and return its contents as a dictionary.
+
+ Note: this can fail if the file is being written to when this call is made, so expect
+ failures.
+
+ Returns
+ -------
+ Optional[dict]
+ A dictionary with the fields 'status', 'start_time', 'processed_buffers', 'total_buffers' or empty
+ """
+ if not self._status_filename:
+ return {}
+ try:
+ with open(self._status_filename, "r") as status_file:
+ data = json.load(status_file)
+ except Exception:
+ return {}
+ return data
+
+ def close_connection(self) -> None:
+ """Shut down the open EnSight dsg -> omniverse server
+
+ Break the connection between the EnSight instance and Omniverse.
+
+ """
+ self._check_modules()
+ if not self.is_running_omniverse():
+ return
+ proc = psutil.Process(self._server_pid)
+ for child in proc.children(recursive=True):
+ if psutil.pid_exists(child.pid):
+ # This can be a race condition, so it is ok if the child is dead already
+ try:
+ child.kill()
+ except psutil.NoSuchProcess:
+ pass
+ # Same issue, this process might already be shutting down, so NoSuchProcess is ok.
+ try:
+ proc.kill()
+ except psutil.NoSuchProcess:
+ pass
+ self._server_pid = None
+ self._new_status_file(new=False)
+
+ def update(self, temporal: bool = False) -> None:
+ """Update the geometry in Omniverse
+
+ Export the current EnSight scene to the current Omniverse connection.
+
+ Parameters
+ ----------
+ temporal : bool
+ If True, export all timesteps.
+ """
+ update_cmd = "dynamicscenegraph://localhost/client/update"
+ if temporal:
+ update_cmd += "?timesteps=1"
+ self._check_modules()
+ if not self.is_running_omniverse():
+ raise RuntimeError("No Omniverse server connection is currently active.")
+ if not isinstance(self._ensight, ModuleType):
+ self._ensight._session.ensight_version_check("2023 R2")
+ cmd = f'enspyqtgui_int.dynamic_scene_graph_command("{update_cmd}")'
+ self._ensight._session.cmd(cmd, do_eval=False)
+ else:
+ import enspyqtgui_int
+
+ enspyqtgui_int.dynamic_scene_graph_command(f"{update_cmd}")
diff --git a/src/ansys/pyensight/core/utils/omniverse_cli.py b/src/ansys/pyensight/core/utils/omniverse_cli.py
index 9d7f3a8481e..5293b1fe598 100644
--- a/src/ansys/pyensight/core/utils/omniverse_cli.py
+++ b/src/ansys/pyensight/core/utils/omniverse_cli.py
@@ -1,520 +1,520 @@
-import argparse
-from functools import partial
-import glob
-import json
-import logging
-import os
-import pathlib
-import time
-from typing import Any, List, Optional
-from urllib.parse import urlparse
-
-import ansys.pyensight.core
-import ansys.pyensight.core.utils.dsg_server as dsg_server
-import ansys.pyensight.core.utils.omniverse_dsg_server as ov_dsg_server
-import ansys.pyensight.core.utils.omniverse_glb_server as ov_glb_server
-
-
-def str2bool_type(v: Any) -> bool:
- """
- This function is designed to be a 'type=' filter for an argparse entry returning a boolean.
- It allows for additional, common alternative strings as booleans. These include 'yes','no',
- 'true','false','t','f','y','n','1' and '0'. If the value does not meet the requirements,
- the function will raise the argparse.ArgumentTypeError exception.
- :param v: The (potential) boolean argument.
- :return: The actual boolean value.
- :raises: argparse.ArgumentTypeError
- """
- if isinstance(v, bool):
- return v
- if v.lower() in ("yes", "true", "t", "y", "1"):
- return True
- elif v.lower() in ("no", "false", "f", "n", "0"):
- return False
- else:
- raise argparse.ArgumentTypeError("Boolean value expected.")
-
-
-def int_range_type(
- v: Any, min_value: int = 0, max_value: int = 100, allow: Optional[List[int]] = None
-) -> int:
- """
- This function is designed to be a 'type=' filter for an argparse entry returning an integer value within
- a specified range. If the value does not meet the requirements, the function will raise the
- argparse.ArgumentTypeError exception. This function is normally used with functools.partial to bind
- the minimum and maximum values. For example: type=partial(int_range_type, min_value=0, max_value=65535)
- :param v: The (potential) integer argument.
- :param min_value: The minimum legal integer value.
- :param max_value: The maximum legal integer value.
- :param allow:A list of additional, legal values
- :return: The validated integer value.
- :raises: argparse.ArgumentTypeError
- """
- try:
- value = int(v)
- except ValueError:
- raise argparse.ArgumentTypeError("Integer value expected.")
- if allow is None:
- allow = []
- if (value >= min_value) and (value <= max_value):
- return value
- elif value in allow:
- return value
- else:
- msg = f"Integer value is not in the range [{min_value},{max_value}]"
- if allow:
- msg += f" or in the list {allow}"
- raise argparse.ArgumentTypeError(msg + ".")
-
-
-class OmniverseGeometryServer(object):
- def __init__(
- self,
- security_token: str = "",
- destination: str = "",
- temporal: bool = False,
- vrmode: bool = False,
- time_scale: float = 1.0,
- normalize_geometry: bool = False,
- dsg_uri: str = "",
- monitor_directory: str = "",
- line_width: float = -0.0001,
- use_lines: bool = False,
- ) -> None:
- self._dsg_uri = dsg_uri
- self._destination = destination
- self._security_token = security_token
- if not self._security_token:
- self._security_token = os.environ.get("ENSIGHT_SECURITY_TOKEN", "")
- self._temporal = temporal
- self._vrmode = vrmode
- self._time_scale = time_scale
- self._normalize_geometry = normalize_geometry
- self._version = "unknown"
- self._shutdown = False
- self._server_process = None
- self._status_filename: str = ""
- self._monitor_directory: str = monitor_directory
- self._line_width = line_width
- self._use_lines = use_lines
-
- @property
- def monitor_directory(self) -> Optional[str]:
- if self._monitor_directory:
- return self._monitor_directory
- # converts "" -> None
- return None
-
- @property
- def pyensight_version(self) -> str:
- """The ansys.pyensight.core version"""
- return ansys.pyensight.core.VERSION
-
- @property
- def dsg_uri(self) -> str:
- """The endpoint of a Dynamic Scene Graph service: grpc://{hostname}:{port}"""
- return self._dsg_uri
-
- @dsg_uri.setter
- def dsg_uri(self, uri: str) -> None:
- self._dsg_uri = uri
-
- @property
- def destination(self) -> str:
- """The endpoint of an Omniverse Nucleus service: omniverse://{hostname}/{path}"""
- return self._destination
-
- @destination.setter
- def destination(self, value: str) -> None:
- self._destination = value
-
- @property
- def security_token(self) -> str:
- """The security token of the DSG service instance."""
- return self._security_token
-
- @security_token.setter
- def security_token(self, value: str) -> None:
- self._security_token = value
-
- @property
- def temporal(self) -> bool:
- """If True, the DSG update should include all timesteps."""
- return self._temporal
-
- @temporal.setter
- def temporal(self, value: bool) -> None:
- self._temporal = bool(value)
-
- @property
- def vrmode(self) -> bool:
- """If True, the DSG update should not include camera transforms."""
- return self._vrmode
-
- @vrmode.setter
- def vrmode(self, value: bool) -> None:
- self._vrmode = bool(value)
-
- @property
- def normalize_geometry(self) -> bool:
- """If True, the DSG geometry should be remapped into normalized space."""
- return self._normalize_geometry
-
- @normalize_geometry.setter
- def normalize_geometry(self, val: bool) -> None:
- self._normalize_geometry = val
-
- @property
- def time_scale(self) -> float:
- """Value to multiply DSG time values by before passing to Omniverse"""
- return self._time_scale
-
- @time_scale.setter
- def time_scale(self, value: float) -> None:
- self._time_scale = value
-
- def run_server(self, one_shot: bool = False) -> None:
- """
- Run a DSG to Omniverse server in process.
-
- Note: this method does not return until the DSG connection is dropped or
- self.stop_server() has been called.
-
- Parameters
- ----------
- one_shot : bool
- If True, only run the server to transfer a single scene and
- then return.
- """
-
- # Build the Omniverse connection
- omni_link = ov_dsg_server.OmniverseWrapper(
- destination=self._destination, line_width=self._line_width, use_lines=self._use_lines
- )
- logging.info("Omniverse connection established.")
-
- # parse the DSG USI
- parsed = urlparse(self.dsg_uri)
- port = parsed.port
- host = parsed.hostname
-
- # link it to a DSG session
- update_handler = ov_dsg_server.OmniverseUpdateHandler(omni_link)
- dsg_link = dsg_server.DSGSession(
- port=port,
- host=host,
- vrmode=self.vrmode,
- security_code=self.security_token,
- verbose=1,
- normalize_geometry=self.normalize_geometry,
- time_scale=self.time_scale,
- handler=update_handler,
- )
-
- # Start the DSG link
- logging.info(f"Making DSG connection to: {self.dsg_uri}")
- err = dsg_link.start()
- if err < 0:
- logging.error("Omniverse connection failed.")
- return
-
- # Initial pull request
- dsg_link.request_an_update(animation=self.temporal)
-
- # until the link is dropped, continue
- while not dsg_link.is_shutdown() and not self._shutdown:
- dsg_link.handle_one_update()
- if one_shot:
- break
-
- logging.info("Shutting down DSG connection")
- dsg_link.end()
- omni_link.shutdown()
-
- def run_monitor(self):
- """
- Run monitor and upload GLB files to Omniverse in process. There are two cases:
-
- 1) the "directory name" is actually a .glb file. In this case, simply push
- the glb file contents to Omniverse.
-
- 2) If a directory, then we periodically scan the directory for files named "*.upload".
- If this file is found, there are two cases:
-
- a) The file is empty. In this case, for a file named ABC.upload, the file
- ABC.glb will be read and uploaded before both files are deleted.
-
- b) The file contains valid json. In this case, the json object is parsed with
- the following format (two glb files for the first timestep and one for the second):
-
- {
- "version": 1,
- "destination": "",
- "files": ["a.glb", "b.glb", "c.glb"],
- "times": [0.0, 0.0, 1.0]
- }
-
- "times" is optional and defaults to [0*len("files")]. Once processed,
- all the files referenced in the json and the json file itself are deleted.
- "omniuri" is optional and defaults to the passed Omniverse path.
-
- Note: In this mode, the method does not return until a "shutdown" file or
- an error is encountered.
-
- TODO: add "push" mechanism to trigger a DSG push from the connected session. This
- can be done via the monitor mechanism and used by the Omniverse kit to implement
- a "pull".
- """
- the_dir = self.monitor_directory
- single_file_upload = False
- if os.path.isfile(the_dir) and the_dir.lower().endswith(".glb"):
- single_file_upload = True
- else:
- if not os.path.isdir(the_dir):
- logging.error(f"The monitor directory {the_dir} does not exist.")
- return
-
- # Build the Omniverse connection
- omni_link = ov_dsg_server.OmniverseWrapper(
- destination=self._destination, line_width=self._line_width, use_lines=self._use_lines
- )
- logging.info("Omniverse connection established.")
-
- # use an OmniverseUpdateHandler
- update_handler = ov_dsg_server.OmniverseUpdateHandler(omni_link)
-
- # Link it to the GLB file monitoring service
- glb_link = ov_glb_server.GLBSession(verbose=1, handler=update_handler, vrmode=self.vrmode)
- if single_file_upload:
- start_time = time.time()
- logging.info(f"Uploading file: {the_dir}.")
- try:
- glb_link.start_uploads([0.0, 0.0])
- glb_link.upload_file(the_dir)
- glb_link.end_uploads()
- except Exception as error:
- logging.error(f"Unable to upload file: {the_dir}: {error}")
- logging.info(f"Uploaded in {(time.time() - start_time):.2f}")
- else:
- logging.info(f"Starting file monitoring for {the_dir}.")
- the_dir_path = pathlib.Path(the_dir)
- try:
- stop_file = os.path.join(the_dir, "shutdown")
- orig_destination = omni_link.destination
- while not os.path.exists(stop_file):
- loop_time = time.time()
- files_to_remove = []
- for filename in glob.glob(os.path.join(the_dir, "*.upload")):
- # reset to the launch URI/directory
- omni_link.destination = orig_destination
- # Keep track of the files and time values
- files_to_remove.append(filename)
- files_to_process = []
- file_timestamps = []
- if os.path.getsize(filename) == 0:
- # replace the ".upload" extension with ".glb"
- glb_file = os.path.splitext(filename)[0] + ".glb"
- if os.path.exists(glb_file):
- files_to_process.append(glb_file)
- file_timestamps.append(0.0)
- files_to_remove.append(glb_file)
- else:
- # read the .upload file json content
- try:
- with open(filename, "r") as fp:
- glb_info = json.load(fp)
- except Exception:
- logging.error(f"Unable to read file: {filename}")
- continue
- # if specified, set the URI/directory target
- omni_link.destination = glb_info.get("destination", orig_destination)
- # Get the GLB files to process
- the_files = glb_info.get("files", [])
- files_to_remove.extend(the_files)
- # Times not used for now, but parse them anyway
- the_times = glb_info.get("times", [0.0] * len(the_files))
- file_timestamps.extend(the_times)
- # Validate a few things
- if len(the_files) != len(the_times):
- logging.error(
- f"Number of times and files are not the same in: {filename}"
- )
- continue
- files_to_process.extend(the_files)
- # manage time
- timeline = sorted(set(file_timestamps))
- if len(timeline) != 1:
- logging.warning("Time values not currently supported.")
- if len(files_to_process) > 1:
- logging.warning("Multiple glb files not currently fully supported.")
- # Upload the files
- glb_link.start_uploads([timeline[0], timeline[-1]])
- for glb_file, timestamp in zip(files_to_process, file_timestamps):
- start_time = time.time()
- logging.info(f"Uploading file: {glb_file} to {omni_link.destination}.")
- try:
- time_idx = timeline.index(timestamp) + 1
- if time_idx == len(timeline):
- time_idx -= 1
- limits = [timestamp, timeline[time_idx]]
- glb_link.upload_file(glb_file, timeline=limits)
- except Exception as error:
- logging.error(f"Unable to upload file: {glb_file}: {error}")
- logging.info(f"Uploaded in {(time.time() - start_time):.2f}s")
- glb_link.end_uploads()
- for filename in files_to_remove:
- try:
- # Only delete the file if it is in the_dir_path
- filename_path = pathlib.Path(filename)
- if filename_path.is_relative_to(the_dir_path):
- os.remove(filename)
- except IOError:
- pass
- if time.time() - loop_time < 0.1:
- time.sleep(0.25)
- except Exception as error:
- logging.error(f"Error encountered while monitoring: {error}")
- logging.info("Stopping file monitoring.")
- try:
- os.remove(stop_file)
- except IOError:
- logging.error("Unable to remove 'shutdown' file.")
-
- omni_link.shutdown()
-
-
-if __name__ == "__main__":
- parser = argparse.ArgumentParser(description="PyEnSight Omniverse Geometry Service")
- parser.add_argument(
- "destination", default="", type=str, help="The directory to save the USD scene graph into."
- )
- parser.add_argument(
- "--verbose",
- metavar="verbose_level",
- default=0,
- type=partial(int_range_type, min_value=0, max_value=3),
- help="Enable logging information (0-3). Default: 0",
- )
- parser.add_argument(
- "--log_file",
- metavar="log_filename",
- default="",
- type=str,
- help="Save logging output to the named log file instead of stdout.",
- )
- parser.add_argument(
- "--dsg_uri",
- default="grpc://127.0.0.1:5234",
- type=str,
- help="The URI of the EnSight Dynamic Scene Graph server. Default: grpc://127.0.0.1:5234",
- )
- parser.add_argument(
- "--security_token",
- metavar="token",
- default="",
- type=str,
- help="Dynamic scene graph API security token. Default: none",
- )
- parser.add_argument(
- "--monitor_directory",
- metavar="glb_directory",
- default="",
- type=str,
- help="Monitor specified directory for GLB files to be exported. Default: none",
- )
- parser.add_argument(
- "--time_scale",
- metavar="time_scale",
- default=1.0,
- type=float,
- help="Scaling factor to be applied to input time values. Default: 1.0",
- )
- parser.add_argument(
- "--normalize_geometry",
- metavar="yes|no|true|false|1|0",
- default=False,
- type=str2bool_type,
- help="Enable mapping of geometry to a normalized Cartesian space. Default: false",
- )
- parser.add_argument(
- "--include_camera",
- metavar="yes|no|true|false|1|0",
- default=True,
- type=str2bool_type,
- help="Include the camera in the output USD scene graph. Default: true",
- )
- parser.add_argument(
- "--temporal",
- metavar="yes|no|true|false|1|0",
- default=False,
- type=str2bool_type,
- help="Export a temporal scene graph. Default: false",
- )
- parser.add_argument(
- "--oneshot",
- metavar="yes|no|true|false|1|0",
- default=False,
- type=str2bool_type,
- help="Convert a single geometry into USD and exit. Default: false",
- )
- line_default: Any = os.environ.get("ANSYS_OV_LINE_WIDTH", None)
- if line_default is not None:
- try:
- line_default = float(line_default)
- except ValueError:
- line_default = None
- # Potential future default: -0.0001
- parser.add_argument(
- "--line_width",
- metavar="line_width",
- default=line_default,
- type=float,
- help=f"Width of lines: >0=absolute size. <0=fraction of diagonal. 0=wireframe. Default: {line_default}",
- )
-
- # parse the command line
- args = parser.parse_args()
-
- # set up logging
- level = logging.ERROR
- if args.verbose == 1:
- level = logging.WARN
- elif args.verbose == 2:
- level = logging.INFO
- elif args.verbose == 3:
- level = logging.DEBUG
- log_args = dict(format="GeometryService:%(levelname)s:%(message)s", level=level)
- if args.log_file:
- log_args["filename"] = args.log_file
- # start with a clean logging instance
- while logging.root.hasHandlers():
- logging.root.removeHandler(logging.root.handlers[0])
- logging.basicConfig(**log_args) # type: ignore
-
- # size of lines in data units or fraction of bounding box diagonal
- use_lines = args.line_width is not None
- line_width = -0.0001
- if args.line_width is not None:
- line_width = args.line_width
-
- # Build the server object
- server = OmniverseGeometryServer(
- destination=args.destination,
- dsg_uri=args.dsg_uri,
- security_token=args.security_token,
- monitor_directory=args.monitor_directory,
- time_scale=args.time_scale,
- normalize_geometry=args.normalize_geometry,
- vrmode=not args.include_camera,
- temporal=args.temporal,
- line_width=line_width,
- use_lines=use_lines,
- )
-
- # run the server
- logging.info("Server startup.")
- if server.monitor_directory:
- server.run_monitor()
- else:
- server.run_server(one_shot=args.oneshot)
- logging.info("Server shutdown.")
+import argparse
+from functools import partial
+import glob
+import json
+import logging
+import os
+import pathlib
+import time
+from typing import Any, List, Optional
+from urllib.parse import urlparse
+
+import ansys.pyensight.core
+import ansys.pyensight.core.utils.dsg_server as dsg_server
+import ansys.pyensight.core.utils.omniverse_dsg_server as ov_dsg_server
+import ansys.pyensight.core.utils.omniverse_glb_server as ov_glb_server
+
+
+def str2bool_type(v: Any) -> bool:
+ """
+ This function is designed to be a 'type=' filter for an argparse entry returning a boolean.
+ It allows for additional, common alternative strings as booleans. These include 'yes','no',
+ 'true','false','t','f','y','n','1' and '0'. If the value does not meet the requirements,
+ the function will raise the argparse.ArgumentTypeError exception.
+ :param v: The (potential) boolean argument.
+ :return: The actual boolean value.
+ :raises: argparse.ArgumentTypeError
+ """
+ if isinstance(v, bool):
+ return v
+ if v.lower() in ("yes", "true", "t", "y", "1"):
+ return True
+ elif v.lower() in ("no", "false", "f", "n", "0"):
+ return False
+ else:
+ raise argparse.ArgumentTypeError("Boolean value expected.")
+
+
+def int_range_type(
+ v: Any, min_value: int = 0, max_value: int = 100, allow: Optional[List[int]] = None
+) -> int:
+ """
+ This function is designed to be a 'type=' filter for an argparse entry returning an integer value within
+ a specified range. If the value does not meet the requirements, the function will raise the
+ argparse.ArgumentTypeError exception. This function is normally used with functools.partial to bind
+ the minimum and maximum values. For example: type=partial(int_range_type, min_value=0, max_value=65535)
+ :param v: The (potential) integer argument.
+ :param min_value: The minimum legal integer value.
+ :param max_value: The maximum legal integer value.
+ :param allow:A list of additional, legal values
+ :return: The validated integer value.
+ :raises: argparse.ArgumentTypeError
+ """
+ try:
+ value = int(v)
+ except ValueError:
+ raise argparse.ArgumentTypeError("Integer value expected.")
+ if allow is None:
+ allow = []
+ if (value >= min_value) and (value <= max_value):
+ return value
+ elif value in allow:
+ return value
+ else:
+ msg = f"Integer value is not in the range [{min_value},{max_value}]"
+ if allow:
+ msg += f" or in the list {allow}"
+ raise argparse.ArgumentTypeError(msg + ".")
+
+
+class OmniverseGeometryServer(object):
+ def __init__(
+ self,
+ security_token: str = "",
+ destination: str = "",
+ temporal: bool = False,
+ vrmode: bool = False,
+ time_scale: float = 1.0,
+ normalize_geometry: bool = False,
+ dsg_uri: str = "",
+ monitor_directory: str = "",
+ line_width: float = -0.0001,
+ use_lines: bool = False,
+ ) -> None:
+ self._dsg_uri = dsg_uri
+ self._destination = destination
+ self._security_token = security_token
+ if not self._security_token:
+ self._security_token = os.environ.get("ENSIGHT_SECURITY_TOKEN", "")
+ self._temporal = temporal
+ self._vrmode = vrmode
+ self._time_scale = time_scale
+ self._normalize_geometry = normalize_geometry
+ self._version = "unknown"
+ self._shutdown = False
+ self._server_process = None
+ self._status_filename: str = ""
+ self._monitor_directory: str = monitor_directory
+ self._line_width = line_width
+ self._use_lines = use_lines
+
+ @property
+ def monitor_directory(self) -> Optional[str]:
+ if self._monitor_directory:
+ return self._monitor_directory
+ # converts "" -> None
+ return None
+
+ @property
+ def pyensight_version(self) -> str:
+ """The ansys.pyensight.core version"""
+ return ansys.pyensight.core.VERSION
+
+ @property
+ def dsg_uri(self) -> str:
+ """The endpoint of a Dynamic Scene Graph service: grpc://{hostname}:{port}"""
+ return self._dsg_uri
+
+ @dsg_uri.setter
+ def dsg_uri(self, uri: str) -> None:
+ self._dsg_uri = uri
+
+ @property
+ def destination(self) -> str:
+ """The endpoint of an Omniverse Nucleus service: omniverse://{hostname}/{path}"""
+ return self._destination
+
+ @destination.setter
+ def destination(self, value: str) -> None:
+ self._destination = value
+
+ @property
+ def security_token(self) -> str:
+ """The security token of the DSG service instance."""
+ return self._security_token
+
+ @security_token.setter
+ def security_token(self, value: str) -> None:
+ self._security_token = value
+
+ @property
+ def temporal(self) -> bool:
+ """If True, the DSG update should include all timesteps."""
+ return self._temporal
+
+ @temporal.setter
+ def temporal(self, value: bool) -> None:
+ self._temporal = bool(value)
+
+ @property
+ def vrmode(self) -> bool:
+ """If True, the DSG update should not include camera transforms."""
+ return self._vrmode
+
+ @vrmode.setter
+ def vrmode(self, value: bool) -> None:
+ self._vrmode = bool(value)
+
+ @property
+ def normalize_geometry(self) -> bool:
+ """If True, the DSG geometry should be remapped into normalized space."""
+ return self._normalize_geometry
+
+ @normalize_geometry.setter
+ def normalize_geometry(self, val: bool) -> None:
+ self._normalize_geometry = val
+
+ @property
+ def time_scale(self) -> float:
+ """Value to multiply DSG time values by before passing to Omniverse"""
+ return self._time_scale
+
+ @time_scale.setter
+ def time_scale(self, value: float) -> None:
+ self._time_scale = value
+
+ def run_server(self, one_shot: bool = False) -> None:
+ """
+ Run a DSG to Omniverse server in process.
+
+ Note: this method does not return until the DSG connection is dropped or
+ self.stop_server() has been called.
+
+ Parameters
+ ----------
+ one_shot : bool
+ If True, only run the server to transfer a single scene and
+ then return.
+ """
+
+ # Build the Omniverse connection
+ omni_link = ov_dsg_server.OmniverseWrapper(
+ destination=self._destination, line_width=self._line_width, use_lines=self._use_lines
+ )
+ logging.info("Omniverse connection established.")
+
+ # parse the DSG USI
+ parsed = urlparse(self.dsg_uri)
+ port = parsed.port
+ host = parsed.hostname
+
+ # link it to a DSG session
+ update_handler = ov_dsg_server.OmniverseUpdateHandler(omni_link)
+ dsg_link = dsg_server.DSGSession(
+ port=port,
+ host=host,
+ vrmode=self.vrmode,
+ security_code=self.security_token,
+ verbose=1,
+ normalize_geometry=self.normalize_geometry,
+ time_scale=self.time_scale,
+ handler=update_handler,
+ )
+
+ # Start the DSG link
+ logging.info(f"Making DSG connection to: {self.dsg_uri}")
+ err = dsg_link.start()
+ if err < 0:
+ logging.error("Omniverse connection failed.")
+ return
+
+ # Initial pull request
+ dsg_link.request_an_update(animation=self.temporal)
+
+ # until the link is dropped, continue
+ while not dsg_link.is_shutdown() and not self._shutdown:
+ dsg_link.handle_one_update()
+ if one_shot:
+ break
+
+ logging.info("Shutting down DSG connection")
+ dsg_link.end()
+ omni_link.shutdown()
+
+ def run_monitor(self):
+ """
+ Run monitor and upload GLB files to Omniverse in process. There are two cases:
+
+ 1) the "directory name" is actually a .glb file. In this case, simply push
+ the glb file contents to Omniverse.
+
+ 2) If a directory, then we periodically scan the directory for files named "*.upload".
+ If this file is found, there are two cases:
+
+ a) The file is empty. In this case, for a file named ABC.upload, the file
+ ABC.glb will be read and uploaded before both files are deleted.
+
+ b) The file contains valid json. In this case, the json object is parsed with
+ the following format (two glb files for the first timestep and one for the second):
+
+ {
+ "version": 1,
+ "destination": "",
+ "files": ["a.glb", "b.glb", "c.glb"],
+ "times": [0.0, 0.0, 1.0]
+ }
+
+ "times" is optional and defaults to [0*len("files")]. Once processed,
+ all the files referenced in the json and the json file itself are deleted.
+ "omniuri" is optional and defaults to the passed Omniverse path.
+
+ Note: In this mode, the method does not return until a "shutdown" file or
+ an error is encountered.
+
+ TODO: add "push" mechanism to trigger a DSG push from the connected session. This
+ can be done via the monitor mechanism and used by the Omniverse kit to implement
+ a "pull".
+ """
+ the_dir = self.monitor_directory
+ single_file_upload = False
+ if os.path.isfile(the_dir) and the_dir.lower().endswith(".glb"):
+ single_file_upload = True
+ else:
+ if not os.path.isdir(the_dir):
+ logging.error(f"The monitor directory {the_dir} does not exist.")
+ return
+
+ # Build the Omniverse connection
+ omni_link = ov_dsg_server.OmniverseWrapper(
+ destination=self._destination, line_width=self._line_width, use_lines=self._use_lines
+ )
+ logging.info("Omniverse connection established.")
+
+ # use an OmniverseUpdateHandler
+ update_handler = ov_dsg_server.OmniverseUpdateHandler(omni_link)
+
+ # Link it to the GLB file monitoring service
+ glb_link = ov_glb_server.GLBSession(verbose=1, handler=update_handler, vrmode=self.vrmode)
+ if single_file_upload:
+ start_time = time.time()
+ logging.info(f"Uploading file: {the_dir}.")
+ try:
+ glb_link.start_uploads([0.0, 0.0])
+ glb_link.upload_file(the_dir)
+ glb_link.end_uploads()
+ except Exception as error:
+ logging.error(f"Unable to upload file: {the_dir}: {error}")
+ logging.info(f"Uploaded in {(time.time() - start_time):.2f}")
+ else:
+ logging.info(f"Starting file monitoring for {the_dir}.")
+ the_dir_path = pathlib.Path(the_dir)
+ try:
+ stop_file = os.path.join(the_dir, "shutdown")
+ orig_destination = omni_link.destination
+ while not os.path.exists(stop_file):
+ loop_time = time.time()
+ files_to_remove = []
+ for filename in glob.glob(os.path.join(the_dir, "*.upload")):
+ # reset to the launch URI/directory
+ omni_link.destination = orig_destination
+ # Keep track of the files and time values
+ files_to_remove.append(filename)
+ files_to_process = []
+ file_timestamps = []
+ if os.path.getsize(filename) == 0:
+ # replace the ".upload" extension with ".glb"
+ glb_file = os.path.splitext(filename)[0] + ".glb"
+ if os.path.exists(glb_file):
+ files_to_process.append(glb_file)
+ file_timestamps.append(0.0)
+ files_to_remove.append(glb_file)
+ else:
+ # read the .upload file json content
+ try:
+ with open(filename, "r") as fp:
+ glb_info = json.load(fp)
+ except Exception:
+ logging.error(f"Unable to read file: {filename}")
+ continue
+ # if specified, set the URI/directory target
+ omni_link.destination = glb_info.get("destination", orig_destination)
+ # Get the GLB files to process
+ the_files = glb_info.get("files", [])
+ files_to_remove.extend(the_files)
+ # Times not used for now, but parse them anyway
+ the_times = glb_info.get("times", [0.0] * len(the_files))
+ file_timestamps.extend(the_times)
+ # Validate a few things
+ if len(the_files) != len(the_times):
+ logging.error(
+ f"Number of times and files are not the same in: {filename}"
+ )
+ continue
+ files_to_process.extend(the_files)
+ # manage time
+ timeline = sorted(set(file_timestamps))
+ if len(timeline) != 1:
+ logging.warning("Time values not currently supported.")
+ if len(files_to_process) > 1:
+ logging.warning("Multiple glb files not currently fully supported.")
+ # Upload the files
+ glb_link.start_uploads([timeline[0], timeline[-1]])
+ for glb_file, timestamp in zip(files_to_process, file_timestamps):
+ start_time = time.time()
+ logging.info(f"Uploading file: {glb_file} to {omni_link.destination}.")
+ try:
+ time_idx = timeline.index(timestamp) + 1
+ if time_idx == len(timeline):
+ time_idx -= 1
+ limits = [timestamp, timeline[time_idx]]
+ glb_link.upload_file(glb_file, timeline=limits)
+ except Exception as error:
+ logging.error(f"Unable to upload file: {glb_file}: {error}")
+ logging.info(f"Uploaded in {(time.time() - start_time):.2f}s")
+ glb_link.end_uploads()
+ for filename in files_to_remove:
+ try:
+ # Only delete the file if it is in the_dir_path
+ filename_path = pathlib.Path(filename)
+ if filename_path.is_relative_to(the_dir_path):
+ os.remove(filename)
+ except IOError:
+ pass
+ if time.time() - loop_time < 0.1:
+ time.sleep(0.25)
+ except Exception as error:
+ logging.error(f"Error encountered while monitoring: {error}")
+ logging.info("Stopping file monitoring.")
+ try:
+ os.remove(stop_file)
+ except IOError:
+ logging.error("Unable to remove 'shutdown' file.")
+
+ omni_link.shutdown()
+
+
+if __name__ == "__main__":
+ parser = argparse.ArgumentParser(description="PyEnSight Omniverse Geometry Service")
+ parser.add_argument(
+ "destination", default="", type=str, help="The directory to save the USD scene graph into."
+ )
+ parser.add_argument(
+ "--verbose",
+ metavar="verbose_level",
+ default=0,
+ type=partial(int_range_type, min_value=0, max_value=3),
+ help="Enable logging information (0-3). Default: 0",
+ )
+ parser.add_argument(
+ "--log_file",
+ metavar="log_filename",
+ default="",
+ type=str,
+ help="Save logging output to the named log file instead of stdout.",
+ )
+ parser.add_argument(
+ "--dsg_uri",
+ default="grpc://127.0.0.1:5234",
+ type=str,
+ help="The URI of the EnSight Dynamic Scene Graph server. Default: grpc://127.0.0.1:5234",
+ )
+ parser.add_argument(
+ "--security_token",
+ metavar="token",
+ default="",
+ type=str,
+ help="Dynamic scene graph API security token. Default: none",
+ )
+ parser.add_argument(
+ "--monitor_directory",
+ metavar="glb_directory",
+ default="",
+ type=str,
+ help="Monitor specified directory for GLB files to be exported. Default: none",
+ )
+ parser.add_argument(
+ "--time_scale",
+ metavar="time_scale",
+ default=1.0,
+ type=float,
+ help="Scaling factor to be applied to input time values. Default: 1.0",
+ )
+ parser.add_argument(
+ "--normalize_geometry",
+ metavar="yes|no|true|false|1|0",
+ default=False,
+ type=str2bool_type,
+ help="Enable mapping of geometry to a normalized Cartesian space. Default: false",
+ )
+ parser.add_argument(
+ "--include_camera",
+ metavar="yes|no|true|false|1|0",
+ default=True,
+ type=str2bool_type,
+ help="Include the camera in the output USD scene graph. Default: true",
+ )
+ parser.add_argument(
+ "--temporal",
+ metavar="yes|no|true|false|1|0",
+ default=False,
+ type=str2bool_type,
+ help="Export a temporal scene graph. Default: false",
+ )
+ parser.add_argument(
+ "--oneshot",
+ metavar="yes|no|true|false|1|0",
+ default=False,
+ type=str2bool_type,
+ help="Convert a single geometry into USD and exit. Default: false",
+ )
+ line_default: Any = os.environ.get("ANSYS_OV_LINE_WIDTH", None)
+ if line_default is not None:
+ try:
+ line_default = float(line_default)
+ except ValueError:
+ line_default = None
+ # Potential future default: -0.0001
+ parser.add_argument(
+ "--line_width",
+ metavar="line_width",
+ default=line_default,
+ type=float,
+ help=f"Width of lines: >0=absolute size. <0=fraction of diagonal. 0=wireframe. Default: {line_default}",
+ )
+
+ # parse the command line
+ args = parser.parse_args()
+
+ # set up logging
+ level = logging.ERROR
+ if args.verbose == 1:
+ level = logging.WARN
+ elif args.verbose == 2:
+ level = logging.INFO
+ elif args.verbose == 3:
+ level = logging.DEBUG
+ log_args = dict(format="GeometryService:%(levelname)s:%(message)s", level=level)
+ if args.log_file:
+ log_args["filename"] = args.log_file
+ # start with a clean logging instance
+ while logging.root.hasHandlers():
+ logging.root.removeHandler(logging.root.handlers[0])
+ logging.basicConfig(**log_args) # type: ignore
+
+ # size of lines in data units or fraction of bounding box diagonal
+ use_lines = args.line_width is not None
+ line_width = -0.0001
+ if args.line_width is not None:
+ line_width = args.line_width
+
+ # Build the server object
+ server = OmniverseGeometryServer(
+ destination=args.destination,
+ dsg_uri=args.dsg_uri,
+ security_token=args.security_token,
+ monitor_directory=args.monitor_directory,
+ time_scale=args.time_scale,
+ normalize_geometry=args.normalize_geometry,
+ vrmode=not args.include_camera,
+ temporal=args.temporal,
+ line_width=line_width,
+ use_lines=use_lines,
+ )
+
+ # run the server
+ logging.info("Server startup.")
+ if server.monitor_directory:
+ server.run_monitor()
+ else:
+ server.run_server(one_shot=args.oneshot)
+ logging.info("Server shutdown.")
diff --git a/src/ansys/pyensight/core/utils/omniverse_dsg_server.py b/src/ansys/pyensight/core/utils/omniverse_dsg_server.py
index b93e392fa06..06bd776d100 100644
--- a/src/ansys/pyensight/core/utils/omniverse_dsg_server.py
+++ b/src/ansys/pyensight/core/utils/omniverse_dsg_server.py
@@ -1,882 +1,882 @@
-#
-# This file borrows heavily from the Omniverse Example Connector which
-# contains the following notice:
-#
-###############################################################################
-# Copyright 2020 NVIDIA Corporation
-#
-# Permission is hereby granted, free of charge, to any person obtaining a copy of
-# this software and associated documentation files (the "Software"), to deal in
-# the Software without restriction, including without limitation the rights to
-# use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of
-# the Software, and to permit persons to whom the Software is furnished to do so,
-# subject to the following conditions:
-#
-# The above copyright notice and this permission notice shall be included in all
-# copies or substantial portions of the Software.
-#
-# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
-# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS
-# FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR
-# COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER
-# IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
-# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-#
-###############################################################################
-import logging
-import math
-import os
-import shutil
-import tempfile
-from typing import Any, Dict, List, Optional
-
-from ansys.pyensight.core.utils.dsg_server import Part, UpdateHandler
-import numpy
-import png
-from pxr import Gf, Kind, Sdf, Usd, UsdGeom, UsdLux, UsdShade
-
-
-class OmniverseWrapper(object):
- def __init__(
- self,
- live_edit: bool = False,
- destination: str = "",
- line_width: float = -0.0001,
- use_lines: bool = False,
- ) -> None:
- self._cleaned_index = 0
- self._cleaned_names: dict = {}
- self._connectionStatusSubscription = None
- self._stage = None
- self._destinationPath: str = ""
- self._old_stages: list = []
- self._stagename = "dsg_scene.usd"
- self._live_edit: bool = live_edit
- if self._live_edit:
- self._stagename = "dsg_scene.live"
- # USD time slider will have 120 tick marks per second of animation time
- self._time_codes_per_second = 120.0
-
- if destination:
- self.destination = destination
-
- self._line_width = line_width
- self._use_lines = use_lines
-
- @property
- def destination(self) -> str:
- """The current output directory."""
- return self._destinationPath
-
- @destination.setter
- def destination(self, directory: str) -> None:
- self._destinationPath = directory
- if not self.is_valid_destination(directory):
- logging.warning(f"Invalid destination path: {directory}")
-
- @property
- def line_width(self) -> float:
- return self._line_width
-
- @line_width.setter
- def line_width(self, line_width: float) -> None:
- self._line_width = line_width
-
- @property
- def use_lines(self) -> bool:
- return self._use_lines
-
- def shutdown(self) -> None:
- """
- Shutdown the connection to Omniverse cleanly.
- """
- self._connectionStatusSubscription = None
-
- @staticmethod
- def is_valid_destination(path: str) -> bool:
- """
- Verify that the target path is a writeable directory.
-
- Parameters
- ----------
- path
- The path to check
-
- Returns
- -------
- True if the path is a writeable directory, False otherwise.
- """
- return os.access(path, os.W_OK)
-
- def stage_url(self, name: Optional[str] = None) -> str:
- """
- For a given object name, create the URL for the item.
- Parameters
- ----------
- name: the name of the object to generate the URL for. If None, it will be the URL for the
- stage name.
-
- Returns
- -------
- The URL for the object.
- """
- if name is None:
- name = self._stagename
- return os.path.join(self._destinationPath, name)
-
- def delete_old_stages(self) -> None:
- """
- Remove all the stages included in the "_old_stages" list.
- If a stage is in use and cannot be removed, keep its name in _old_stages
- to retry later.
- """
- stages_unremoved = list()
- while self._old_stages:
- stage = self._old_stages.pop()
- try:
- if os.path.isfile(stage):
- os.remove(stage)
- else:
- shutil.rmtree(stage, ignore_errors=True, onerror=None)
- except OSError:
- stages_unremoved.append(stage)
- self._old_stages = stages_unremoved
-
- def create_new_stage(self) -> None:
- """
- Create a new stage. using the current stage name.
- """
- logging.info(f"Creating Omniverse stage: {self.stage_url()}")
- if self._stage:
- self._stage.Unload()
- self._stage = None
- self.delete_old_stages()
- self._stage = Usd.Stage.CreateNew(self.stage_url())
- # record the stage in the "_old_stages" list.
- self._old_stages.append(self.stage_url())
- UsdGeom.SetStageUpAxis(self._stage, UsdGeom.Tokens.y)
- # in M
- UsdGeom.SetStageMetersPerUnit(self._stage, 1.0)
- logging.info(f"Created stage: {self.stage_url()}")
-
- def save_stage(self, comment: str = "") -> None:
- """
- For live connections, save the current edit and allow live processing.
-
- Presently, live connections are disabled.
- """
- self._stage.GetRootLayer().Save() # type:ignore
-
- def clear_cleaned_names(self) -> None:
- """
- Clear the list of cleaned names
- """
- self._cleaned_names = {}
- self._cleaned_index = 0
-
- def clean_name(self, name: str, id_name: Any = None) -> str:
- """Generate a valid USD name
-
- From a base (EnSight) varname, partname, etc. and the DSG id, generate
- a unique, valid USD name. Save the names so that if the same name
- comes in again, the previously computed name is returned and if the
- manipulation results in a conflict, the name can be made unique.
-
- Parameters
- ----------
- name:
- The name to generate a USD name for.
-
- id_name:
- The DSG id associated with the DSG name, if any.
-
- Returns
- -------
- A unique USD name.
- """
- orig_name = name
- # return any previously generated name
- if (name, id_name) in self._cleaned_names:
- return self._cleaned_names[(name, id_name)]
- # replace invalid characters. EnSight uses a number of characters that are illegal in USD names.
- replacements = {
- ord("+"): "_",
- ord("-"): "_",
- ord("."): "_",
- ord(":"): "_",
- ord("["): "_",
- ord("]"): "_",
- ord("("): "_",
- ord(")"): "_",
- ord("<"): "_",
- ord(">"): "_",
- ord("/"): "_",
- ord("="): "_",
- ord(","): "_",
- ord(" "): "_",
- ord("\\"): "_",
- }
- name = name.translate(replacements)
- if name[0].isdigit():
- name = f"_{name}"
- if id_name is not None:
- name = name + "_" + str(id_name)
- if name in self._cleaned_names.values():
- # Make the name unique
- while f"{name}_{self._cleaned_index}" in self._cleaned_names.values():
- self._cleaned_index += 1
- name = f"{name}_{self._cleaned_index}"
- # store off the cleaned name
- self._cleaned_names[(orig_name, id_name)] = name
- return name
-
- @staticmethod
- def decompose_matrix(values: Any) -> Any:
- """
- Decompose an array of floats (representing a 4x4 matrix) into scale, rotation and translation.
- Parameters
- ----------
- values:
- 16 values (input to Gf.Matrix4f CTOR)
-
- Returns
- -------
- (scale, rotation, translation)
- """
- # ang_convert = 180.0/math.pi
- ang_convert = 1.0
- trans_convert = 1.0
- m = Gf.Matrix4f(*values)
- m = m.GetTranspose()
-
- s = math.sqrt(m[0][0] * m[0][0] + m[0][1] * m[0][1] + m[0][2] * m[0][2])
- # cleanup scale
- m = m.RemoveScaleShear()
- # r = m.ExtractRotation()
- R = m.ExtractRotationMatrix()
- r = [
- math.atan2(R[2][1], R[2][2]) * ang_convert,
- math.atan2(-R[2][0], 1.0) * ang_convert,
- math.atan2(R[1][0], R[0][0]) * ang_convert,
- ]
- t = m.ExtractTranslation()
- t = [t[0] * trans_convert, t[1] * trans_convert, t[2] * trans_convert]
- return s, r, t
-
- def create_dsg_mesh_block(
- self,
- name,
- id,
- part_hash,
- parent_prim,
- verts,
- conn,
- normals,
- tcoords,
- matrix=[1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0],
- diffuse=[1.0, 1.0, 1.0, 1.0],
- variable=None,
- timeline=[0.0, 0.0],
- first_timestep=False,
- mat_info={},
- ):
- # 1D texture map for variables https://graphics.pixar.com/usd/release/tut_simple_shading.html
- # create the part usd object
- partname = self.clean_name(name + part_hash.hexdigest())
- stage_name = "/Parts/" + partname + ".usd"
- part_stage_url = self.stage_url(os.path.join("Parts", partname + ".usd"))
- part_stage = None
-
- if not os.path.exists(part_stage_url):
- part_stage = Usd.Stage.CreateNew(part_stage_url)
- self._old_stages.append(part_stage_url)
- xform = UsdGeom.Xform.Define(part_stage, "/" + partname)
- mesh = UsdGeom.Mesh.Define(part_stage, "/" + partname + "/Mesh")
- # mesh.CreateDisplayColorAttr()
- mesh.CreateDoubleSidedAttr().Set(True)
- mesh.CreatePointsAttr(verts)
- mesh.CreateNormalsAttr(normals)
- mesh.CreateFaceVertexCountsAttr([3] * (conn.size // 3))
- mesh.CreateFaceVertexIndicesAttr(conn)
- if (tcoords is not None) and variable:
- primvarsAPI = UsdGeom.PrimvarsAPI(mesh)
- texCoords = primvarsAPI.CreatePrimvar(
- "st", Sdf.ValueTypeNames.TexCoord2fArray, UsdGeom.Tokens.varying
- )
- texCoords.Set(tcoords)
- texCoords.SetInterpolation("vertex")
- part_prim = part_stage.GetPrimAtPath("/" + partname)
- part_stage.SetDefaultPrim(part_prim)
-
- # Currently, this will never happen, but it is a setup for rigid body transforms
- # At present, the group transforms have been cooked into the vertices so this is not needed
- matrixOp = xform.AddXformOp(
- UsdGeom.XformOp.TypeTransform, UsdGeom.XformOp.PrecisionDouble
- )
- matrixOp.Set(Gf.Matrix4d(*matrix).GetTranspose())
-
- self.create_dsg_material(
- part_stage,
- mesh,
- "/" + partname,
- diffuse=diffuse,
- variable=variable,
- mat_info=mat_info,
- )
-
- timestep_prim = self.add_timestep_group(parent_prim, timeline, first_timestep)
-
- # glue it into our stage
- path = timestep_prim.GetPath().AppendChild("part_ref_" + partname)
- part_ref = self._stage.OverridePrim(path)
- part_ref.GetReferences().AddReference("." + stage_name)
-
- if part_stage is not None:
- part_stage.GetRootLayer().Save()
-
- return part_stage_url
-
- def add_timestep_group(
- self, parent_prim: UsdGeom.Xform, timeline: List[float], first_timestep: bool
- ) -> UsdGeom.Xform:
- # add a layer in the group hierarchy for the timestep
- timestep_group_path = parent_prim.GetPath().AppendChild(
- self.clean_name("t" + str(timeline[0]), None)
- )
- timestep_prim = UsdGeom.Xform.Define(self._stage, timestep_group_path)
- visibility_attr = UsdGeom.Imageable(timestep_prim).GetVisibilityAttr()
- if first_timestep:
- visibility_attr.Set("inherited", Usd.TimeCode.EarliestTime())
- else:
- visibility_attr.Set("invisible", Usd.TimeCode.EarliestTime())
- visibility_attr.Set("inherited", timeline[0] * self._time_codes_per_second)
- # Final timestep has timeline[0]==timeline[1]. Leave final timestep visible.
- if timeline[0] < timeline[1]:
- visibility_attr.Set("invisible", timeline[1] * self._time_codes_per_second)
- return timestep_prim
-
- def create_dsg_lines(
- self,
- name,
- id,
- part_hash,
- parent_prim,
- verts,
- tcoords,
- matrix=[1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0],
- diffuse=[1.0, 1.0, 1.0, 1.0],
- variable=None,
- timeline=[0.0, 0.0],
- first_timestep=False,
- mat_info={},
- ):
- # TODO: GLB extension maps to DSG PART attribute map
- width = self.line_width
- wireframe = width == 0.0
- if width < 0.0:
- tmp = verts.reshape(-1, 3)
- mins = numpy.min(tmp, axis=0)
- maxs = numpy.max(tmp, axis=0)
- dx = maxs[0] - mins[0]
- dy = maxs[1] - mins[1]
- dz = maxs[2] - mins[2]
- diagonal = math.sqrt(dx * dx + dy * dy + dz * dz)
- width = diagonal * math.fabs(width)
- self.line_width = width
-
- # include the line width in the hash
- part_hash.update(str(self.line_width).encode("utf-8"))
-
- # 1D texture map for variables https://graphics.pixar.com/usd/release/tut_simple_shading.html
- # create the part usd object
- partname = self.clean_name(name + part_hash.hexdigest()) + "_l"
- stage_name = "/Parts/" + partname + ".usd"
- part_stage_url = self.stage_url(os.path.join("Parts", partname + ".usd"))
- part_stage = None
-
- var_cmd = variable
-
- if not os.path.exists(part_stage_url):
- part_stage = Usd.Stage.CreateNew(part_stage_url)
- self._old_stages.append(part_stage_url)
- xform = UsdGeom.Xform.Define(part_stage, "/" + partname)
- lines = UsdGeom.BasisCurves.Define(part_stage, "/" + partname + "/Lines")
- lines.CreateDoubleSidedAttr().Set(True)
- lines.CreatePointsAttr(verts)
- lines.CreateCurveVertexCountsAttr([2] * (verts.size // 6))
- lines.CreatePurposeAttr().Set("render")
- lines.CreateTypeAttr().Set("linear")
- lines.CreateWidthsAttr([width])
- lines.SetWidthsInterpolation("constant")
- # Rounded endpoint are a primvar
- primvarsAPI = UsdGeom.PrimvarsAPI(lines)
- endCaps = primvarsAPI.CreatePrimvar(
- "endcaps", Sdf.ValueTypeNames.Int, UsdGeom.Tokens.constant
- )
- endCaps.Set(2) # Rounded = 2
-
- prim = lines.GetPrim()
- prim.CreateAttribute(
- "omni:scene:visualization:drawWireframe", Sdf.ValueTypeNames.Bool
- ).Set(wireframe)
- if (tcoords is not None) and var_cmd:
- primvarsAPI = UsdGeom.PrimvarsAPI(lines)
- texCoords = primvarsAPI.CreatePrimvar(
- "st", Sdf.ValueTypeNames.TexCoord2fArray, UsdGeom.Tokens.varying
- )
- texCoords.Set(tcoords)
- texCoords.SetInterpolation("vertex")
- part_prim = part_stage.GetPrimAtPath("/" + partname)
- part_stage.SetDefaultPrim(part_prim)
-
- # Currently, this will never happen, but it is a setup for rigid body transforms
- # At present, the group transforms have been cooked into the vertices so this is not needed
- matrixOp = xform.AddXformOp(
- UsdGeom.XformOp.TypeTransform, UsdGeom.XformOp.PrecisionDouble
- )
- matrixOp.Set(Gf.Matrix4d(*matrix).GetTranspose())
-
- self.create_dsg_material(
- part_stage,
- lines,
- "/" + partname,
- diffuse=diffuse,
- variable=var_cmd,
- mat_info=mat_info,
- )
-
- timestep_prim = self.add_timestep_group(parent_prim, timeline, first_timestep)
-
- # glue it into our stage
- path = timestep_prim.GetPath().AppendChild("part_ref_" + partname)
- part_ref = self._stage.OverridePrim(path)
- part_ref.GetReferences().AddReference("." + stage_name)
-
- if part_stage is not None:
- part_stage.GetRootLayer().Save()
-
- return part_stage_url
-
- def create_dsg_points(
- self,
- name,
- id,
- part_hash,
- parent_prim,
- verts,
- sizes,
- colors,
- matrix=[1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0],
- default_size=1.0,
- default_color=[1.0, 1.0, 1.0, 1.0],
- timeline=[0.0, 0.0],
- first_timestep=False,
- ):
- # create the part usd object
- partname = self.clean_name(name + part_hash.hexdigest())
- stage_name = "/Parts/" + partname + ".usd"
- part_stage_url = self.stage_url(os.path.join("Parts", partname + ".usd"))
- part_stage = None
-
- if not os.path.exists(part_stage_url):
- part_stage = Usd.Stage.CreateNew(part_stage_url)
- self._old_stages.append(part_stage_url)
- xform = UsdGeom.Xform.Define(part_stage, "/" + partname)
-
- points = UsdGeom.Points.Define(part_stage, "/" + partname + "/Points")
- # points.GetPointsAttr().Set(Vt.Vec3fArray(verts.tolist()))
- points.GetPointsAttr().Set(verts)
- if sizes is not None and sizes.size == (verts.size // 3):
- points.GetWidthsAttr().Set(sizes)
- else:
- points.GetWidthsAttr().Set([default_size] * (verts.size // 3))
-
- colorAttr = points.GetPrim().GetAttribute("primvars:displayColor")
- colorAttr.SetMetadata("interpolation", "vertex")
- if colors is not None and colors.size == verts.size:
- colorAttr.Set(colors)
- else:
- colorAttr.Set([default_color[0:3]] * (verts.size // 3))
-
- part_prim = part_stage.GetPrimAtPath("/" + partname)
- part_stage.SetDefaultPrim(part_prim)
-
- # Currently, this will never happen, but it is a setup for rigid body transforms
- # At present, the group transforms have been cooked into the vertices so this is not needed
- matrixOp = xform.AddXformOp(
- UsdGeom.XformOp.TypeTransform, UsdGeom.XformOp.PrecisionDouble
- )
- matrixOp.Set(Gf.Matrix4d(*matrix).GetTranspose())
-
- timestep_prim = self.add_timestep_group(parent_prim, timeline, first_timestep)
-
- # glue it into our stage
- path = timestep_prim.GetPath().AppendChild("part_ref_" + partname)
- part_ref = self._stage.OverridePrim(path)
- part_ref.GetReferences().AddReference("." + stage_name)
-
- if part_stage is not None:
- part_stage.GetRootLayer().Save()
-
- return part_stage_url
-
- def create_dsg_material(
- self, stage, mesh, root_name, diffuse=[1.0, 1.0, 1.0, 1.0], variable=None, mat_info={}
- ):
- # https://graphics.pixar.com/usd/release/spec_usdpreviewsurface.html
- # Use ior==1.0 to be more like EnSight - rays of light do not bend when passing through transparent objs
- material = UsdShade.Material.Define(stage, root_name + "/Material")
- pbrShader = UsdShade.Shader.Define(stage, root_name + "/Material/PBRShader")
- pbrShader.CreateIdAttr("UsdPreviewSurface")
- smoothness = mat_info.get("smoothness", 0.0)
- pbrShader.CreateInput("roughness", Sdf.ValueTypeNames.Float).Set(1.0 - smoothness)
- metallic = mat_info.get("metallic", 0.0)
- pbrShader.CreateInput("metallic", Sdf.ValueTypeNames.Float).Set(metallic)
- opacity = mat_info.get("opacity", diffuse[3])
- pbrShader.CreateInput("opacity", Sdf.ValueTypeNames.Float).Set(opacity)
- pbrShader.CreateInput("ior", Sdf.ValueTypeNames.Float).Set(1.0)
- pbrShader.CreateInput("useSpecularWorkflow", Sdf.ValueTypeNames.Int).Set(1)
- if variable:
- stReader = UsdShade.Shader.Define(stage, root_name + "/Material/stReader")
- stReader.CreateIdAttr("UsdPrimvarReader_float2")
- diffuseTextureSampler = UsdShade.Shader.Define(
- stage, root_name + "/Material/diffuseTexture"
- )
- diffuseTextureSampler.CreateIdAttr("UsdUVTexture")
- name = self.clean_name(variable.name)
- filename = f"./Textures/palette_{name}.png"
- diffuseTextureSampler.CreateInput("file", Sdf.ValueTypeNames.Asset).Set(filename)
- diffuseTextureSampler.CreateInput("st", Sdf.ValueTypeNames.Float2).ConnectToSource(
- stReader.ConnectableAPI(), "result"
- )
- diffuseTextureSampler.CreateOutput("rgb", Sdf.ValueTypeNames.Float3)
- pbrShader.CreateInput("diffuseColor", Sdf.ValueTypeNames.Color3f).ConnectToSource(
- diffuseTextureSampler.ConnectableAPI(), "rgb"
- )
- stInput = material.CreateInput("frame:stPrimvarName", Sdf.ValueTypeNames.Token)
- stInput.Set("st")
- stReader.CreateInput("varname", Sdf.ValueTypeNames.Token).ConnectToSource(stInput)
- else:
- # The colors are a mixture of content from the DSG PART protocol buffer
- # and the JSON material block from the material_name field.
- kd = 1.0
- diffuse_color = [diffuse[0], diffuse[1], diffuse[2]]
- ke = 1.0
- emissive_color = [0.0, 0.0, 0.0]
- ks = 1.0
- specular_color = [0.0, 0.0, 0.0]
- mat_name = mat_info.get("name", "")
- if mat_name.startswith("ensight"):
- diffuse_color = mat_info.get("diffuse", diffuse_color)
- if mat_name != "ensight/Default":
- ke = mat_info.get("ke", ke)
- emissive_color = mat_info.get("emissive", emissive_color)
- ks = mat_info.get("ks", ks)
- specular_color = mat_info.get("specular", specular_color)
- # Set the colors
- color = Gf.Vec3f(diffuse_color[0] * kd, diffuse_color[1] * kd, diffuse_color[2] * kd)
- pbrShader.CreateInput("diffuseColor", Sdf.ValueTypeNames.Color3f).Set(color)
- color = Gf.Vec3f(emissive_color[0] * ke, emissive_color[1] * ke, emissive_color[2] * ke)
- pbrShader.CreateInput("emissiveColor", Sdf.ValueTypeNames.Color3f).Set(color)
- color = Gf.Vec3f(specular_color[0] * ks, specular_color[1] * ks, specular_color[2] * ks)
- pbrShader.CreateInput("specularColor", Sdf.ValueTypeNames.Color3f).Set(color)
-
- material.CreateSurfaceOutput().ConnectToSource(pbrShader.ConnectableAPI(), "surface")
- UsdShade.MaterialBindingAPI(mesh).Bind(material)
-
- return material
-
- def create_dsg_variable_textures(self, variables):
- with tempfile.TemporaryDirectory() as tempdir:
- # make folder: {tempdir}/scratch/Textures/{palette_*.png}
- os.makedirs(f"{tempdir}/scratch/Textures", exist_ok=True)
- for var in variables.values():
- data = bytearray(var.texture)
- n_pixels = int(len(data) / 4)
- row = []
- for i in range(n_pixels):
- row.append(data[i * 4 + 0])
- row.append(data[i * 4 + 1])
- row.append(data[i * 4 + 2])
- io = png.Writer(width=n_pixels, height=2, bitdepth=8, greyscale=False)
- rows = [row, row]
- name = self.clean_name(var.name)
- with open(f"{tempdir}/scratch/Textures/palette_{name}.png", "wb") as fp:
- io.write(fp, rows)
- uriPath = self._destinationPath + "/Parts/Textures"
- shutil.rmtree(uriPath, ignore_errors=True, onerror=None)
- shutil.copytree(f"{tempdir}/scratch/Textures", uriPath)
-
- def create_dsg_root(self):
- root_name = "/Root"
- root_prim = UsdGeom.Xform.Define(self._stage, root_name)
- # Define the defaultPrim as the /Root prim
- root_prim = self._stage.GetPrimAtPath(root_name)
- self._stage.SetDefaultPrim(root_prim)
- return root_prim
-
- def update_camera(self, camera):
- if camera is not None:
- cam_name = "/Root/Cam"
- cam_prim = UsdGeom.Xform.Define(self._stage, cam_name)
- cam_pos = Gf.Vec3d(camera.lookfrom[0], camera.lookfrom[1], camera.lookfrom[2])
- target_pos = Gf.Vec3d(camera.lookat[0], camera.lookat[1], camera.lookat[2])
- up_vec = Gf.Vec3d(camera.upvector[0], camera.upvector[1], camera.upvector[2])
- cam_prim = self._stage.GetPrimAtPath(cam_name)
- geom_cam = UsdGeom.Camera(cam_prim)
- if not geom_cam:
- geom_cam = UsdGeom.Camera.Define(self._stage, cam_name)
- # Set camera values
- # center of interest attribute unique for Kit defines the pivot for tumbling the camera
- # Set as an attribute on the prim
- coi_attr = cam_prim.GetAttribute("omni:kit:centerOfInterest")
- if not coi_attr.IsValid():
- coi_attr = cam_prim.CreateAttribute(
- "omni:kit:centerOfInterest", Sdf.ValueTypeNames.Vector3d
- )
- coi_attr.Set(target_pos)
- # get the camera
- cam = geom_cam.GetCamera()
- # LOL, not sure why is might be correct, but so far it seems to work???
- cam.focalLength = camera.fieldofview
- cam.clippingRange = Gf.Range1f(0.1, 10)
- look_at = Gf.Matrix4d()
- look_at.SetLookAt(cam_pos, target_pos, up_vec)
- trans_row = look_at.GetRow(3)
- trans_row = Gf.Vec4d(-trans_row[0], -trans_row[1], -trans_row[2], trans_row[3])
- look_at.SetRow(3, trans_row)
- cam.transform = look_at
-
- # set the updated camera
- geom_cam.SetFromCamera(cam)
-
- def create_dsg_group(
- self,
- name: str,
- parent_prim,
- obj_type: Any = None,
- matrix: List[float] = [
- 1.0,
- 0.0,
- 0.0,
- 0.0,
- 0.0,
- 1.0,
- 0.0,
- 0.0,
- 0.0,
- 0.0,
- 1.0,
- 0.0,
- 0.0,
- 0.0,
- 0.0,
- 1.0,
- ],
- ):
- path = parent_prim.GetPath().AppendChild(self.clean_name(name))
- group_prim = UsdGeom.Xform.Get(self._stage, path)
- if not group_prim:
- group_prim = UsdGeom.Xform.Define(self._stage, path)
- # At present, the group transforms have been cooked into the vertices so this is not needed
- matrix_op = group_prim.AddXformOp(
- UsdGeom.XformOp.TypeTransform, UsdGeom.XformOp.PrecisionDouble
- )
- matrix_op.Set(Gf.Matrix4d(*matrix).GetTranspose())
- # Map kinds
- kind = Kind.Tokens.group
- if obj_type == "ENS_CASE":
- kind = Kind.Tokens.assembly
- elif obj_type == "ENS_PART":
- kind = Kind.Tokens.component
- Usd.ModelAPI(group_prim).SetKind(kind)
- logging.info(f"Created group:'{name}' {str(obj_type)}")
- return group_prim
-
- def uploadMaterial(self):
- uriPath = self._destinationPath + "/Materials"
- shutil.rmtree(uriPath, ignore_errors=True, onerror=None)
- fullpath = os.path.join(os.path.dirname(__file__), "resources", "Materials")
- shutil.copytree(fullpath, uriPath)
-
- # Create a dome light in the scene.
- def createDomeLight(self, texturePath):
- newLight = UsdLux.DomeLight.Define(self._stage, "/Root/DomeLight")
- newLight.CreateIntensityAttr(2200.0)
- newLight.CreateTextureFileAttr(texturePath)
- newLight.CreateTextureFormatAttr("latlong")
-
- # Set rotation on domelight
- xForm = newLight
- rotateOp = xForm.AddXformOp(UsdGeom.XformOp.TypeRotateZYX, UsdGeom.XformOp.PrecisionFloat)
- rotateOp.Set(Gf.Vec3f(270, 0, 0))
-
-
-class OmniverseUpdateHandler(UpdateHandler):
- """
- Implement the Omniverse glue to a DSGSession instance
- """
-
- def __init__(self, omni: OmniverseWrapper):
- super().__init__()
- self._omni = omni
- self._group_prims: Dict[int, Any] = dict()
- self._root_prim = None
- self._sent_textures = False
-
- def add_group(self, id: int, view: bool = False) -> None:
- super().add_group(id, view)
- group = self.session.groups[id]
- if not view:
- parent_prim = self._group_prims[group.parent_id]
- obj_type = self.get_dsg_cmd_attribute(group, "ENS_OBJ_TYPE")
- matrix = self.group_matrix(group)
- prim = self._omni.create_dsg_group(
- group.name, parent_prim, matrix=matrix, obj_type=obj_type
- )
- self._group_prims[id] = prim
- else:
- # Map a view command into a new Omniverse stage and populate it with materials/lights.
- # Create a new root stage in Omniverse
-
- # Create or update the root group/camera
- if not self.session.vrmode:
- self._omni.update_camera(camera=group)
-
- # record
- self._group_prims[id] = self._root_prim
-
- if self._omni._stage is not None:
- self._omni._stage.SetStartTimeCode(
- self.session.time_limits[0] * self._omni._time_codes_per_second
- )
- self._omni._stage.SetEndTimeCode(
- self.session.time_limits[1] * self._omni._time_codes_per_second
- )
- self._omni._stage.SetTimeCodesPerSecond(self._omni._time_codes_per_second)
-
- # Send the variable textures. Safe to do so once the first view is processed.
- if not self._sent_textures:
- self._omni.create_dsg_variable_textures(self.session.variables)
- self._sent_textures = True
-
- def add_variable(self, id: int) -> None:
- super().add_variable(id)
-
- def finalize_part(self, part: Part) -> None:
- # generate an Omniverse compliant mesh from the Part
- if part is None or part.cmd is None:
- return
- parent_prim = self._group_prims[part.cmd.parent_id]
- obj_id = self.session.mesh_block_count
- matrix = part.cmd.matrix4x4
- name = part.cmd.name
- color = [
- part.cmd.fill_color[0] * part.cmd.diffuse,
- part.cmd.fill_color[1] * part.cmd.diffuse,
- part.cmd.fill_color[2] * part.cmd.diffuse,
- part.cmd.fill_color[3],
- ]
-
- mat_info = part.material()
- if part.cmd.render == part.cmd.CONNECTIVITY:
- has_triangles = False
- command, verts, conn, normals, tcoords, var_cmd = part.nodal_surface_rep()
- if command is not None:
- has_triangles = True
- # Generate the mesh block
- _ = self._omni.create_dsg_mesh_block(
- name,
- obj_id,
- part.hash,
- parent_prim,
- verts,
- conn,
- normals,
- tcoords,
- matrix=matrix,
- diffuse=color,
- variable=var_cmd,
- timeline=self.session.cur_timeline,
- first_timestep=(self.session.cur_timeline[0] == self.session.time_limits[0]),
- mat_info=mat_info,
- )
- if self._omni.use_lines:
- command, verts, tcoords, var_cmd = part.line_rep()
- if command is not None:
- # If there are no triangle (ideally if these are not hidden line
- # edges), then use the base color for the part. If there are
- # triangles, then assume these are hidden line edges and use the
- # line_color.
- line_color = color
- if has_triangles:
- line_color = [
- part.cmd.line_color[0] * part.cmd.diffuse,
- part.cmd.line_color[1] * part.cmd.diffuse,
- part.cmd.line_color[2] * part.cmd.diffuse,
- part.cmd.line_color[3],
- ]
- # TODO: texture coordinates on lines are current invalid in OV
- var_cmd = None
- tcoords = None
- # Generate the lines
- _ = self._omni.create_dsg_lines(
- name,
- obj_id,
- part.hash,
- parent_prim,
- verts,
- tcoords,
- matrix=matrix,
- diffuse=line_color,
- variable=var_cmd,
- timeline=self.session.cur_timeline,
- first_timestep=(
- self.session.cur_timeline[0] == self.session.time_limits[0]
- ),
- )
-
- elif part.cmd.render == part.cmd.NODES:
- command, verts, sizes, colors, var_cmd = part.point_rep()
- if command is not None:
- _ = self._omni.create_dsg_points(
- name,
- obj_id,
- part.hash,
- parent_prim,
- verts,
- sizes,
- colors,
- matrix=matrix,
- default_size=part.cmd.node_size_default,
- default_color=color,
- timeline=self.session.cur_timeline,
- first_timestep=(self.session.cur_timeline[0] == self.session.time_limits[0]),
- )
- super().finalize_part(part)
-
- def start_connection(self) -> None:
- super().start_connection()
-
- def end_connection(self) -> None:
- super().end_connection()
-
- def begin_update(self) -> None:
- super().begin_update()
- # restart the name tables
- self._omni.clear_cleaned_names()
- # clear the group Omni prims list
- self._group_prims = dict()
-
- self._omni.create_new_stage()
- self._root_prim = self._omni.create_dsg_root()
- # Create a distance and dome light in the scene
- self._omni.createDomeLight("./Materials/000_sky.exr")
- # Upload a material to the Omniverse server
- self._omni.uploadMaterial()
- self._sent_textures = False
-
- def end_update(self) -> None:
- super().end_update()
- # Stage update complete
- self._omni.save_stage()
+#
+# This file borrows heavily from the Omniverse Example Connector which
+# contains the following notice:
+#
+###############################################################################
+# Copyright 2020 NVIDIA Corporation
+#
+# Permission is hereby granted, free of charge, to any person obtaining a copy of
+# this software and associated documentation files (the "Software"), to deal in
+# the Software without restriction, including without limitation the rights to
+# use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of
+# the Software, and to permit persons to whom the Software is furnished to do so,
+# subject to the following conditions:
+#
+# The above copyright notice and this permission notice shall be included in all
+# copies or substantial portions of the Software.
+#
+# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS
+# FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR
+# COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER
+# IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
+# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+#
+###############################################################################
+import logging
+import math
+import os
+import shutil
+import tempfile
+from typing import Any, Dict, List, Optional
+
+from ansys.pyensight.core.utils.dsg_server import Part, UpdateHandler
+import numpy
+import png
+from pxr import Gf, Kind, Sdf, Usd, UsdGeom, UsdLux, UsdShade
+
+
+class OmniverseWrapper(object):
+ def __init__(
+ self,
+ live_edit: bool = False,
+ destination: str = "",
+ line_width: float = -0.0001,
+ use_lines: bool = False,
+ ) -> None:
+ self._cleaned_index = 0
+ self._cleaned_names: dict = {}
+ self._connectionStatusSubscription = None
+ self._stage = None
+ self._destinationPath: str = ""
+ self._old_stages: list = []
+ self._stagename = "dsg_scene.usd"
+ self._live_edit: bool = live_edit
+ if self._live_edit:
+ self._stagename = "dsg_scene.live"
+ # USD time slider will have 120 tick marks per second of animation time
+ self._time_codes_per_second = 120.0
+
+ if destination:
+ self.destination = destination
+
+ self._line_width = line_width
+ self._use_lines = use_lines
+
+ @property
+ def destination(self) -> str:
+ """The current output directory."""
+ return self._destinationPath
+
+ @destination.setter
+ def destination(self, directory: str) -> None:
+ self._destinationPath = directory
+ if not self.is_valid_destination(directory):
+ logging.warning(f"Invalid destination path: {directory}")
+
+ @property
+ def line_width(self) -> float:
+ return self._line_width
+
+ @line_width.setter
+ def line_width(self, line_width: float) -> None:
+ self._line_width = line_width
+
+ @property
+ def use_lines(self) -> bool:
+ return self._use_lines
+
+ def shutdown(self) -> None:
+ """
+ Shutdown the connection to Omniverse cleanly.
+ """
+ self._connectionStatusSubscription = None
+
+ @staticmethod
+ def is_valid_destination(path: str) -> bool:
+ """
+ Verify that the target path is a writeable directory.
+
+ Parameters
+ ----------
+ path
+ The path to check
+
+ Returns
+ -------
+ True if the path is a writeable directory, False otherwise.
+ """
+ return os.access(path, os.W_OK)
+
+ def stage_url(self, name: Optional[str] = None) -> str:
+ """
+ For a given object name, create the URL for the item.
+ Parameters
+ ----------
+ name: the name of the object to generate the URL for. If None, it will be the URL for the
+ stage name.
+
+ Returns
+ -------
+ The URL for the object.
+ """
+ if name is None:
+ name = self._stagename
+ return os.path.join(self._destinationPath, name)
+
+ def delete_old_stages(self) -> None:
+ """
+ Remove all the stages included in the "_old_stages" list.
+ If a stage is in use and cannot be removed, keep its name in _old_stages
+ to retry later.
+ """
+ stages_unremoved = list()
+ while self._old_stages:
+ stage = self._old_stages.pop()
+ try:
+ if os.path.isfile(stage):
+ os.remove(stage)
+ else:
+ shutil.rmtree(stage, ignore_errors=True, onerror=None)
+ except OSError:
+ stages_unremoved.append(stage)
+ self._old_stages = stages_unremoved
+
+ def create_new_stage(self) -> None:
+ """
+ Create a new stage. using the current stage name.
+ """
+ logging.info(f"Creating Omniverse stage: {self.stage_url()}")
+ if self._stage:
+ self._stage.Unload()
+ self._stage = None
+ self.delete_old_stages()
+ self._stage = Usd.Stage.CreateNew(self.stage_url())
+ # record the stage in the "_old_stages" list.
+ self._old_stages.append(self.stage_url())
+ UsdGeom.SetStageUpAxis(self._stage, UsdGeom.Tokens.y)
+ # in M
+ UsdGeom.SetStageMetersPerUnit(self._stage, 1.0)
+ logging.info(f"Created stage: {self.stage_url()}")
+
+ def save_stage(self, comment: str = "") -> None:
+ """
+ For live connections, save the current edit and allow live processing.
+
+ Presently, live connections are disabled.
+ """
+ self._stage.GetRootLayer().Save() # type:ignore
+
+ def clear_cleaned_names(self) -> None:
+ """
+ Clear the list of cleaned names
+ """
+ self._cleaned_names = {}
+ self._cleaned_index = 0
+
+ def clean_name(self, name: str, id_name: Any = None) -> str:
+ """Generate a valid USD name
+
+ From a base (EnSight) varname, partname, etc. and the DSG id, generate
+ a unique, valid USD name. Save the names so that if the same name
+ comes in again, the previously computed name is returned and if the
+ manipulation results in a conflict, the name can be made unique.
+
+ Parameters
+ ----------
+ name:
+ The name to generate a USD name for.
+
+ id_name:
+ The DSG id associated with the DSG name, if any.
+
+ Returns
+ -------
+ A unique USD name.
+ """
+ orig_name = name
+ # return any previously generated name
+ if (name, id_name) in self._cleaned_names:
+ return self._cleaned_names[(name, id_name)]
+ # replace invalid characters. EnSight uses a number of characters that are illegal in USD names.
+ replacements = {
+ ord("+"): "_",
+ ord("-"): "_",
+ ord("."): "_",
+ ord(":"): "_",
+ ord("["): "_",
+ ord("]"): "_",
+ ord("("): "_",
+ ord(")"): "_",
+ ord("<"): "_",
+ ord(">"): "_",
+ ord("/"): "_",
+ ord("="): "_",
+ ord(","): "_",
+ ord(" "): "_",
+ ord("\\"): "_",
+ }
+ name = name.translate(replacements)
+ if name[0].isdigit():
+ name = f"_{name}"
+ if id_name is not None:
+ name = name + "_" + str(id_name)
+ if name in self._cleaned_names.values():
+ # Make the name unique
+ while f"{name}_{self._cleaned_index}" in self._cleaned_names.values():
+ self._cleaned_index += 1
+ name = f"{name}_{self._cleaned_index}"
+ # store off the cleaned name
+ self._cleaned_names[(orig_name, id_name)] = name
+ return name
+
+ @staticmethod
+ def decompose_matrix(values: Any) -> Any:
+ """
+ Decompose an array of floats (representing a 4x4 matrix) into scale, rotation and translation.
+ Parameters
+ ----------
+ values:
+ 16 values (input to Gf.Matrix4f CTOR)
+
+ Returns
+ -------
+ (scale, rotation, translation)
+ """
+ # ang_convert = 180.0/math.pi
+ ang_convert = 1.0
+ trans_convert = 1.0
+ m = Gf.Matrix4f(*values)
+ m = m.GetTranspose()
+
+ s = math.sqrt(m[0][0] * m[0][0] + m[0][1] * m[0][1] + m[0][2] * m[0][2])
+ # cleanup scale
+ m = m.RemoveScaleShear()
+ # r = m.ExtractRotation()
+ R = m.ExtractRotationMatrix()
+ r = [
+ math.atan2(R[2][1], R[2][2]) * ang_convert,
+ math.atan2(-R[2][0], 1.0) * ang_convert,
+ math.atan2(R[1][0], R[0][0]) * ang_convert,
+ ]
+ t = m.ExtractTranslation()
+ t = [t[0] * trans_convert, t[1] * trans_convert, t[2] * trans_convert]
+ return s, r, t
+
+ def create_dsg_mesh_block(
+ self,
+ name,
+ id,
+ part_hash,
+ parent_prim,
+ verts,
+ conn,
+ normals,
+ tcoords,
+ matrix=[1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0],
+ diffuse=[1.0, 1.0, 1.0, 1.0],
+ variable=None,
+ timeline=[0.0, 0.0],
+ first_timestep=False,
+ mat_info={},
+ ):
+ # 1D texture map for variables https://graphics.pixar.com/usd/release/tut_simple_shading.html
+ # create the part usd object
+ partname = self.clean_name(name + part_hash.hexdigest())
+ stage_name = "/Parts/" + partname + ".usd"
+ part_stage_url = self.stage_url(os.path.join("Parts", partname + ".usd"))
+ part_stage = None
+
+ if not os.path.exists(part_stage_url):
+ part_stage = Usd.Stage.CreateNew(part_stage_url)
+ self._old_stages.append(part_stage_url)
+ xform = UsdGeom.Xform.Define(part_stage, "/" + partname)
+ mesh = UsdGeom.Mesh.Define(part_stage, "/" + partname + "/Mesh")
+ # mesh.CreateDisplayColorAttr()
+ mesh.CreateDoubleSidedAttr().Set(True)
+ mesh.CreatePointsAttr(verts)
+ mesh.CreateNormalsAttr(normals)
+ mesh.CreateFaceVertexCountsAttr([3] * (conn.size // 3))
+ mesh.CreateFaceVertexIndicesAttr(conn)
+ if (tcoords is not None) and variable:
+ primvarsAPI = UsdGeom.PrimvarsAPI(mesh)
+ texCoords = primvarsAPI.CreatePrimvar(
+ "st", Sdf.ValueTypeNames.TexCoord2fArray, UsdGeom.Tokens.varying
+ )
+ texCoords.Set(tcoords)
+ texCoords.SetInterpolation("vertex")
+ part_prim = part_stage.GetPrimAtPath("/" + partname)
+ part_stage.SetDefaultPrim(part_prim)
+
+ # Currently, this will never happen, but it is a setup for rigid body transforms
+ # At present, the group transforms have been cooked into the vertices so this is not needed
+ matrixOp = xform.AddXformOp(
+ UsdGeom.XformOp.TypeTransform, UsdGeom.XformOp.PrecisionDouble
+ )
+ matrixOp.Set(Gf.Matrix4d(*matrix).GetTranspose())
+
+ self.create_dsg_material(
+ part_stage,
+ mesh,
+ "/" + partname,
+ diffuse=diffuse,
+ variable=variable,
+ mat_info=mat_info,
+ )
+
+ timestep_prim = self.add_timestep_group(parent_prim, timeline, first_timestep)
+
+ # glue it into our stage
+ path = timestep_prim.GetPath().AppendChild("part_ref_" + partname)
+ part_ref = self._stage.OverridePrim(path)
+ part_ref.GetReferences().AddReference("." + stage_name)
+
+ if part_stage is not None:
+ part_stage.GetRootLayer().Save()
+
+ return part_stage_url
+
+ def add_timestep_group(
+ self, parent_prim: UsdGeom.Xform, timeline: List[float], first_timestep: bool
+ ) -> UsdGeom.Xform:
+ # add a layer in the group hierarchy for the timestep
+ timestep_group_path = parent_prim.GetPath().AppendChild(
+ self.clean_name("t" + str(timeline[0]), None)
+ )
+ timestep_prim = UsdGeom.Xform.Define(self._stage, timestep_group_path)
+ visibility_attr = UsdGeom.Imageable(timestep_prim).GetVisibilityAttr()
+ if first_timestep:
+ visibility_attr.Set("inherited", Usd.TimeCode.EarliestTime())
+ else:
+ visibility_attr.Set("invisible", Usd.TimeCode.EarliestTime())
+ visibility_attr.Set("inherited", timeline[0] * self._time_codes_per_second)
+ # Final timestep has timeline[0]==timeline[1]. Leave final timestep visible.
+ if timeline[0] < timeline[1]:
+ visibility_attr.Set("invisible", timeline[1] * self._time_codes_per_second)
+ return timestep_prim
+
+ def create_dsg_lines(
+ self,
+ name,
+ id,
+ part_hash,
+ parent_prim,
+ verts,
+ tcoords,
+ matrix=[1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0],
+ diffuse=[1.0, 1.0, 1.0, 1.0],
+ variable=None,
+ timeline=[0.0, 0.0],
+ first_timestep=False,
+ mat_info={},
+ ):
+ # TODO: GLB extension maps to DSG PART attribute map
+ width = self.line_width
+ wireframe = width == 0.0
+ if width < 0.0:
+ tmp = verts.reshape(-1, 3)
+ mins = numpy.min(tmp, axis=0)
+ maxs = numpy.max(tmp, axis=0)
+ dx = maxs[0] - mins[0]
+ dy = maxs[1] - mins[1]
+ dz = maxs[2] - mins[2]
+ diagonal = math.sqrt(dx * dx + dy * dy + dz * dz)
+ width = diagonal * math.fabs(width)
+ self.line_width = width
+
+ # include the line width in the hash
+ part_hash.update(str(self.line_width).encode("utf-8"))
+
+ # 1D texture map for variables https://graphics.pixar.com/usd/release/tut_simple_shading.html
+ # create the part usd object
+ partname = self.clean_name(name + part_hash.hexdigest()) + "_l"
+ stage_name = "/Parts/" + partname + ".usd"
+ part_stage_url = self.stage_url(os.path.join("Parts", partname + ".usd"))
+ part_stage = None
+
+ var_cmd = variable
+
+ if not os.path.exists(part_stage_url):
+ part_stage = Usd.Stage.CreateNew(part_stage_url)
+ self._old_stages.append(part_stage_url)
+ xform = UsdGeom.Xform.Define(part_stage, "/" + partname)
+ lines = UsdGeom.BasisCurves.Define(part_stage, "/" + partname + "/Lines")
+ lines.CreateDoubleSidedAttr().Set(True)
+ lines.CreatePointsAttr(verts)
+ lines.CreateCurveVertexCountsAttr([2] * (verts.size // 6))
+ lines.CreatePurposeAttr().Set("render")
+ lines.CreateTypeAttr().Set("linear")
+ lines.CreateWidthsAttr([width])
+ lines.SetWidthsInterpolation("constant")
+ # Rounded endpoint are a primvar
+ primvarsAPI = UsdGeom.PrimvarsAPI(lines)
+ endCaps = primvarsAPI.CreatePrimvar(
+ "endcaps", Sdf.ValueTypeNames.Int, UsdGeom.Tokens.constant
+ )
+ endCaps.Set(2) # Rounded = 2
+
+ prim = lines.GetPrim()
+ prim.CreateAttribute(
+ "omni:scene:visualization:drawWireframe", Sdf.ValueTypeNames.Bool
+ ).Set(wireframe)
+ if (tcoords is not None) and var_cmd:
+ primvarsAPI = UsdGeom.PrimvarsAPI(lines)
+ texCoords = primvarsAPI.CreatePrimvar(
+ "st", Sdf.ValueTypeNames.TexCoord2fArray, UsdGeom.Tokens.varying
+ )
+ texCoords.Set(tcoords)
+ texCoords.SetInterpolation("vertex")
+ part_prim = part_stage.GetPrimAtPath("/" + partname)
+ part_stage.SetDefaultPrim(part_prim)
+
+ # Currently, this will never happen, but it is a setup for rigid body transforms
+ # At present, the group transforms have been cooked into the vertices so this is not needed
+ matrixOp = xform.AddXformOp(
+ UsdGeom.XformOp.TypeTransform, UsdGeom.XformOp.PrecisionDouble
+ )
+ matrixOp.Set(Gf.Matrix4d(*matrix).GetTranspose())
+
+ self.create_dsg_material(
+ part_stage,
+ lines,
+ "/" + partname,
+ diffuse=diffuse,
+ variable=var_cmd,
+ mat_info=mat_info,
+ )
+
+ timestep_prim = self.add_timestep_group(parent_prim, timeline, first_timestep)
+
+ # glue it into our stage
+ path = timestep_prim.GetPath().AppendChild("part_ref_" + partname)
+ part_ref = self._stage.OverridePrim(path)
+ part_ref.GetReferences().AddReference("." + stage_name)
+
+ if part_stage is not None:
+ part_stage.GetRootLayer().Save()
+
+ return part_stage_url
+
+ def create_dsg_points(
+ self,
+ name,
+ id,
+ part_hash,
+ parent_prim,
+ verts,
+ sizes,
+ colors,
+ matrix=[1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0],
+ default_size=1.0,
+ default_color=[1.0, 1.0, 1.0, 1.0],
+ timeline=[0.0, 0.0],
+ first_timestep=False,
+ ):
+ # create the part usd object
+ partname = self.clean_name(name + part_hash.hexdigest())
+ stage_name = "/Parts/" + partname + ".usd"
+ part_stage_url = self.stage_url(os.path.join("Parts", partname + ".usd"))
+ part_stage = None
+
+ if not os.path.exists(part_stage_url):
+ part_stage = Usd.Stage.CreateNew(part_stage_url)
+ self._old_stages.append(part_stage_url)
+ xform = UsdGeom.Xform.Define(part_stage, "/" + partname)
+
+ points = UsdGeom.Points.Define(part_stage, "/" + partname + "/Points")
+ # points.GetPointsAttr().Set(Vt.Vec3fArray(verts.tolist()))
+ points.GetPointsAttr().Set(verts)
+ if sizes is not None and sizes.size == (verts.size // 3):
+ points.GetWidthsAttr().Set(sizes)
+ else:
+ points.GetWidthsAttr().Set([default_size] * (verts.size // 3))
+
+ colorAttr = points.GetPrim().GetAttribute("primvars:displayColor")
+ colorAttr.SetMetadata("interpolation", "vertex")
+ if colors is not None and colors.size == verts.size:
+ colorAttr.Set(colors)
+ else:
+ colorAttr.Set([default_color[0:3]] * (verts.size // 3))
+
+ part_prim = part_stage.GetPrimAtPath("/" + partname)
+ part_stage.SetDefaultPrim(part_prim)
+
+ # Currently, this will never happen, but it is a setup for rigid body transforms
+ # At present, the group transforms have been cooked into the vertices so this is not needed
+ matrixOp = xform.AddXformOp(
+ UsdGeom.XformOp.TypeTransform, UsdGeom.XformOp.PrecisionDouble
+ )
+ matrixOp.Set(Gf.Matrix4d(*matrix).GetTranspose())
+
+ timestep_prim = self.add_timestep_group(parent_prim, timeline, first_timestep)
+
+ # glue it into our stage
+ path = timestep_prim.GetPath().AppendChild("part_ref_" + partname)
+ part_ref = self._stage.OverridePrim(path)
+ part_ref.GetReferences().AddReference("." + stage_name)
+
+ if part_stage is not None:
+ part_stage.GetRootLayer().Save()
+
+ return part_stage_url
+
+ def create_dsg_material(
+ self, stage, mesh, root_name, diffuse=[1.0, 1.0, 1.0, 1.0], variable=None, mat_info={}
+ ):
+ # https://graphics.pixar.com/usd/release/spec_usdpreviewsurface.html
+ # Use ior==1.0 to be more like EnSight - rays of light do not bend when passing through transparent objs
+ material = UsdShade.Material.Define(stage, root_name + "/Material")
+ pbrShader = UsdShade.Shader.Define(stage, root_name + "/Material/PBRShader")
+ pbrShader.CreateIdAttr("UsdPreviewSurface")
+ smoothness = mat_info.get("smoothness", 0.0)
+ pbrShader.CreateInput("roughness", Sdf.ValueTypeNames.Float).Set(1.0 - smoothness)
+ metallic = mat_info.get("metallic", 0.0)
+ pbrShader.CreateInput("metallic", Sdf.ValueTypeNames.Float).Set(metallic)
+ opacity = mat_info.get("opacity", diffuse[3])
+ pbrShader.CreateInput("opacity", Sdf.ValueTypeNames.Float).Set(opacity)
+ pbrShader.CreateInput("ior", Sdf.ValueTypeNames.Float).Set(1.0)
+ pbrShader.CreateInput("useSpecularWorkflow", Sdf.ValueTypeNames.Int).Set(1)
+ if variable:
+ stReader = UsdShade.Shader.Define(stage, root_name + "/Material/stReader")
+ stReader.CreateIdAttr("UsdPrimvarReader_float2")
+ diffuseTextureSampler = UsdShade.Shader.Define(
+ stage, root_name + "/Material/diffuseTexture"
+ )
+ diffuseTextureSampler.CreateIdAttr("UsdUVTexture")
+ name = self.clean_name(variable.name)
+ filename = f"./Textures/palette_{name}.png"
+ diffuseTextureSampler.CreateInput("file", Sdf.ValueTypeNames.Asset).Set(filename)
+ diffuseTextureSampler.CreateInput("st", Sdf.ValueTypeNames.Float2).ConnectToSource(
+ stReader.ConnectableAPI(), "result"
+ )
+ diffuseTextureSampler.CreateOutput("rgb", Sdf.ValueTypeNames.Float3)
+ pbrShader.CreateInput("diffuseColor", Sdf.ValueTypeNames.Color3f).ConnectToSource(
+ diffuseTextureSampler.ConnectableAPI(), "rgb"
+ )
+ stInput = material.CreateInput("frame:stPrimvarName", Sdf.ValueTypeNames.Token)
+ stInput.Set("st")
+ stReader.CreateInput("varname", Sdf.ValueTypeNames.Token).ConnectToSource(stInput)
+ else:
+ # The colors are a mixture of content from the DSG PART protocol buffer
+ # and the JSON material block from the material_name field.
+ kd = 1.0
+ diffuse_color = [diffuse[0], diffuse[1], diffuse[2]]
+ ke = 1.0
+ emissive_color = [0.0, 0.0, 0.0]
+ ks = 1.0
+ specular_color = [0.0, 0.0, 0.0]
+ mat_name = mat_info.get("name", "")
+ if mat_name.startswith("ensight"):
+ diffuse_color = mat_info.get("diffuse", diffuse_color)
+ if mat_name != "ensight/Default":
+ ke = mat_info.get("ke", ke)
+ emissive_color = mat_info.get("emissive", emissive_color)
+ ks = mat_info.get("ks", ks)
+ specular_color = mat_info.get("specular", specular_color)
+ # Set the colors
+ color = Gf.Vec3f(diffuse_color[0] * kd, diffuse_color[1] * kd, diffuse_color[2] * kd)
+ pbrShader.CreateInput("diffuseColor", Sdf.ValueTypeNames.Color3f).Set(color)
+ color = Gf.Vec3f(emissive_color[0] * ke, emissive_color[1] * ke, emissive_color[2] * ke)
+ pbrShader.CreateInput("emissiveColor", Sdf.ValueTypeNames.Color3f).Set(color)
+ color = Gf.Vec3f(specular_color[0] * ks, specular_color[1] * ks, specular_color[2] * ks)
+ pbrShader.CreateInput("specularColor", Sdf.ValueTypeNames.Color3f).Set(color)
+
+ material.CreateSurfaceOutput().ConnectToSource(pbrShader.ConnectableAPI(), "surface")
+ UsdShade.MaterialBindingAPI(mesh).Bind(material)
+
+ return material
+
+ def create_dsg_variable_textures(self, variables):
+ with tempfile.TemporaryDirectory() as tempdir:
+ # make folder: {tempdir}/scratch/Textures/{palette_*.png}
+ os.makedirs(f"{tempdir}/scratch/Textures", exist_ok=True)
+ for var in variables.values():
+ data = bytearray(var.texture)
+ n_pixels = int(len(data) / 4)
+ row = []
+ for i in range(n_pixels):
+ row.append(data[i * 4 + 0])
+ row.append(data[i * 4 + 1])
+ row.append(data[i * 4 + 2])
+ io = png.Writer(width=n_pixels, height=2, bitdepth=8, greyscale=False)
+ rows = [row, row]
+ name = self.clean_name(var.name)
+ with open(f"{tempdir}/scratch/Textures/palette_{name}.png", "wb") as fp:
+ io.write(fp, rows)
+ uriPath = self._destinationPath + "/Parts/Textures"
+ shutil.rmtree(uriPath, ignore_errors=True, onerror=None)
+ shutil.copytree(f"{tempdir}/scratch/Textures", uriPath)
+
+ def create_dsg_root(self):
+ root_name = "/Root"
+ root_prim = UsdGeom.Xform.Define(self._stage, root_name)
+ # Define the defaultPrim as the /Root prim
+ root_prim = self._stage.GetPrimAtPath(root_name)
+ self._stage.SetDefaultPrim(root_prim)
+ return root_prim
+
+ def update_camera(self, camera):
+ if camera is not None:
+ cam_name = "/Root/Cam"
+ cam_prim = UsdGeom.Xform.Define(self._stage, cam_name)
+ cam_pos = Gf.Vec3d(camera.lookfrom[0], camera.lookfrom[1], camera.lookfrom[2])
+ target_pos = Gf.Vec3d(camera.lookat[0], camera.lookat[1], camera.lookat[2])
+ up_vec = Gf.Vec3d(camera.upvector[0], camera.upvector[1], camera.upvector[2])
+ cam_prim = self._stage.GetPrimAtPath(cam_name)
+ geom_cam = UsdGeom.Camera(cam_prim)
+ if not geom_cam:
+ geom_cam = UsdGeom.Camera.Define(self._stage, cam_name)
+ # Set camera values
+ # center of interest attribute unique for Kit defines the pivot for tumbling the camera
+ # Set as an attribute on the prim
+ coi_attr = cam_prim.GetAttribute("omni:kit:centerOfInterest")
+ if not coi_attr.IsValid():
+ coi_attr = cam_prim.CreateAttribute(
+ "omni:kit:centerOfInterest", Sdf.ValueTypeNames.Vector3d
+ )
+ coi_attr.Set(target_pos)
+ # get the camera
+ cam = geom_cam.GetCamera()
+ # LOL, not sure why is might be correct, but so far it seems to work???
+ cam.focalLength = camera.fieldofview
+ cam.clippingRange = Gf.Range1f(0.1, 10)
+ look_at = Gf.Matrix4d()
+ look_at.SetLookAt(cam_pos, target_pos, up_vec)
+ trans_row = look_at.GetRow(3)
+ trans_row = Gf.Vec4d(-trans_row[0], -trans_row[1], -trans_row[2], trans_row[3])
+ look_at.SetRow(3, trans_row)
+ cam.transform = look_at
+
+ # set the updated camera
+ geom_cam.SetFromCamera(cam)
+
+ def create_dsg_group(
+ self,
+ name: str,
+ parent_prim,
+ obj_type: Any = None,
+ matrix: List[float] = [
+ 1.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 1.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 1.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 1.0,
+ ],
+ ):
+ path = parent_prim.GetPath().AppendChild(self.clean_name(name))
+ group_prim = UsdGeom.Xform.Get(self._stage, path)
+ if not group_prim:
+ group_prim = UsdGeom.Xform.Define(self._stage, path)
+ # At present, the group transforms have been cooked into the vertices so this is not needed
+ matrix_op = group_prim.AddXformOp(
+ UsdGeom.XformOp.TypeTransform, UsdGeom.XformOp.PrecisionDouble
+ )
+ matrix_op.Set(Gf.Matrix4d(*matrix).GetTranspose())
+ # Map kinds
+ kind = Kind.Tokens.group
+ if obj_type == "ENS_CASE":
+ kind = Kind.Tokens.assembly
+ elif obj_type == "ENS_PART":
+ kind = Kind.Tokens.component
+ Usd.ModelAPI(group_prim).SetKind(kind)
+ logging.info(f"Created group:'{name}' {str(obj_type)}")
+ return group_prim
+
+ def uploadMaterial(self):
+ uriPath = self._destinationPath + "/Materials"
+ shutil.rmtree(uriPath, ignore_errors=True, onerror=None)
+ fullpath = os.path.join(os.path.dirname(__file__), "resources", "Materials")
+ shutil.copytree(fullpath, uriPath)
+
+ # Create a dome light in the scene.
+ def createDomeLight(self, texturePath):
+ newLight = UsdLux.DomeLight.Define(self._stage, "/Root/DomeLight")
+ newLight.CreateIntensityAttr(2200.0)
+ newLight.CreateTextureFileAttr(texturePath)
+ newLight.CreateTextureFormatAttr("latlong")
+
+ # Set rotation on domelight
+ xForm = newLight
+ rotateOp = xForm.AddXformOp(UsdGeom.XformOp.TypeRotateZYX, UsdGeom.XformOp.PrecisionFloat)
+ rotateOp.Set(Gf.Vec3f(270, 0, 0))
+
+
+class OmniverseUpdateHandler(UpdateHandler):
+ """
+ Implement the Omniverse glue to a DSGSession instance
+ """
+
+ def __init__(self, omni: OmniverseWrapper):
+ super().__init__()
+ self._omni = omni
+ self._group_prims: Dict[int, Any] = dict()
+ self._root_prim = None
+ self._sent_textures = False
+
+ def add_group(self, id: int, view: bool = False) -> None:
+ super().add_group(id, view)
+ group = self.session.groups[id]
+ if not view:
+ parent_prim = self._group_prims[group.parent_id]
+ obj_type = self.get_dsg_cmd_attribute(group, "ENS_OBJ_TYPE")
+ matrix = self.group_matrix(group)
+ prim = self._omni.create_dsg_group(
+ group.name, parent_prim, matrix=matrix, obj_type=obj_type
+ )
+ self._group_prims[id] = prim
+ else:
+ # Map a view command into a new Omniverse stage and populate it with materials/lights.
+ # Create a new root stage in Omniverse
+
+ # Create or update the root group/camera
+ if not self.session.vrmode:
+ self._omni.update_camera(camera=group)
+
+ # record
+ self._group_prims[id] = self._root_prim
+
+ if self._omni._stage is not None:
+ self._omni._stage.SetStartTimeCode(
+ self.session.time_limits[0] * self._omni._time_codes_per_second
+ )
+ self._omni._stage.SetEndTimeCode(
+ self.session.time_limits[1] * self._omni._time_codes_per_second
+ )
+ self._omni._stage.SetTimeCodesPerSecond(self._omni._time_codes_per_second)
+
+ # Send the variable textures. Safe to do so once the first view is processed.
+ if not self._sent_textures:
+ self._omni.create_dsg_variable_textures(self.session.variables)
+ self._sent_textures = True
+
+ def add_variable(self, id: int) -> None:
+ super().add_variable(id)
+
+ def finalize_part(self, part: Part) -> None:
+ # generate an Omniverse compliant mesh from the Part
+ if part is None or part.cmd is None:
+ return
+ parent_prim = self._group_prims[part.cmd.parent_id]
+ obj_id = self.session.mesh_block_count
+ matrix = part.cmd.matrix4x4
+ name = part.cmd.name
+ color = [
+ part.cmd.fill_color[0] * part.cmd.diffuse,
+ part.cmd.fill_color[1] * part.cmd.diffuse,
+ part.cmd.fill_color[2] * part.cmd.diffuse,
+ part.cmd.fill_color[3],
+ ]
+
+ mat_info = part.material()
+ if part.cmd.render == part.cmd.CONNECTIVITY:
+ has_triangles = False
+ command, verts, conn, normals, tcoords, var_cmd = part.nodal_surface_rep()
+ if command is not None:
+ has_triangles = True
+ # Generate the mesh block
+ _ = self._omni.create_dsg_mesh_block(
+ name,
+ obj_id,
+ part.hash,
+ parent_prim,
+ verts,
+ conn,
+ normals,
+ tcoords,
+ matrix=matrix,
+ diffuse=color,
+ variable=var_cmd,
+ timeline=self.session.cur_timeline,
+ first_timestep=(self.session.cur_timeline[0] == self.session.time_limits[0]),
+ mat_info=mat_info,
+ )
+ if self._omni.use_lines:
+ command, verts, tcoords, var_cmd = part.line_rep()
+ if command is not None:
+ # If there are no triangle (ideally if these are not hidden line
+ # edges), then use the base color for the part. If there are
+ # triangles, then assume these are hidden line edges and use the
+ # line_color.
+ line_color = color
+ if has_triangles:
+ line_color = [
+ part.cmd.line_color[0] * part.cmd.diffuse,
+ part.cmd.line_color[1] * part.cmd.diffuse,
+ part.cmd.line_color[2] * part.cmd.diffuse,
+ part.cmd.line_color[3],
+ ]
+ # TODO: texture coordinates on lines are current invalid in OV
+ var_cmd = None
+ tcoords = None
+ # Generate the lines
+ _ = self._omni.create_dsg_lines(
+ name,
+ obj_id,
+ part.hash,
+ parent_prim,
+ verts,
+ tcoords,
+ matrix=matrix,
+ diffuse=line_color,
+ variable=var_cmd,
+ timeline=self.session.cur_timeline,
+ first_timestep=(
+ self.session.cur_timeline[0] == self.session.time_limits[0]
+ ),
+ )
+
+ elif part.cmd.render == part.cmd.NODES:
+ command, verts, sizes, colors, var_cmd = part.point_rep()
+ if command is not None:
+ _ = self._omni.create_dsg_points(
+ name,
+ obj_id,
+ part.hash,
+ parent_prim,
+ verts,
+ sizes,
+ colors,
+ matrix=matrix,
+ default_size=part.cmd.node_size_default,
+ default_color=color,
+ timeline=self.session.cur_timeline,
+ first_timestep=(self.session.cur_timeline[0] == self.session.time_limits[0]),
+ )
+ super().finalize_part(part)
+
+ def start_connection(self) -> None:
+ super().start_connection()
+
+ def end_connection(self) -> None:
+ super().end_connection()
+
+ def begin_update(self) -> None:
+ super().begin_update()
+ # restart the name tables
+ self._omni.clear_cleaned_names()
+ # clear the group Omni prims list
+ self._group_prims = dict()
+
+ self._omni.create_new_stage()
+ self._root_prim = self._omni.create_dsg_root()
+ # Create a distance and dome light in the scene
+ self._omni.createDomeLight("./Materials/000_sky.exr")
+ # Upload a material to the Omniverse server
+ self._omni.uploadMaterial()
+ self._sent_textures = False
+
+ def end_update(self) -> None:
+ super().end_update()
+ # Stage update complete
+ self._omni.save_stage()
diff --git a/src/ansys/pyensight/core/utils/omniverse_glb_server.py b/src/ansys/pyensight/core/utils/omniverse_glb_server.py
index 46ece6b9fc5..f5275096173 100644
--- a/src/ansys/pyensight/core/utils/omniverse_glb_server.py
+++ b/src/ansys/pyensight/core/utils/omniverse_glb_server.py
@@ -1,631 +1,631 @@
-import io
-import logging
-import os
-import sys
-from typing import Any, List, Optional
-import uuid
-
-from PIL import Image
-from ansys.api.pyensight.v0 import dynamic_scene_graph_pb2
-import ansys.pyensight.core.utils.dsg_server as dsg_server
-import numpy
-import pygltflib
-
-sys.path.insert(0, os.path.dirname(__file__))
-from dsg_server import UpdateHandler # noqa: E402
-
-
-class GLBSession(dsg_server.DSGSession):
- def __init__(
- self,
- verbose: int = 0,
- normalize_geometry: bool = False,
- time_scale: float = 1.0,
- vrmode: bool = False,
- handler: UpdateHandler = UpdateHandler(),
- ):
- """
- Provide an interface to read a GLB file and link it to an UpdateHandler instance
-
- This class reads GLB files and provides the data to an UpdateHandler instance for
- further processing.
-
- Parameters
- ----------
- verbose : int
- The verbosity level. If set to 1 or higher the class will call logging.info
- for log output. The default is ``0``.
- normalize_geometry : bool
- If True, the scene coordinates will be remapped into the volume [-1,-1,-1] - [1,1,1]
- The default is not to remap coordinates.
- time_scale : float
- Scale time values by this factor after being read. The default is ``1.0``.
- vrmode : bool
- If True, do not include the camera in the output.
- handler : UpdateHandler
- This is an UpdateHandler subclass that is called back when the state of
- a scene transfer changes. For example, methods are called when the
- transfer begins or ends and when a Part (mesh block) is ready for processing.
- """
- super().__init__(
- verbose=verbose,
- normalize_geometry=normalize_geometry,
- time_scale=time_scale,
- vrmode=vrmode,
- handler=handler,
- )
- self._gltf: pygltflib.GLTF2 = pygltflib.GLTF2()
- self._id_num: int = 0
- self._node_idx: int = -1
- self._glb_textures: dict = {}
- self._scene_id: int = 0
-
- def _reset(self) -> None:
- """
- Reset the current state to prepare for a new dataset.
- """
- super()._reset()
- self._cur_timeline = [0.0, 0.0] # Start/End time for current update
- self._status = dict(status="idle", start_time=0.0, processed_buffers=0, total_buffers=0)
- self._gltf = pygltflib.GLTF2()
- self._node_idx = -1
- self._id_num = 0
- self._glb_textures = {}
- self._scene_id = 0
-
- def _next_id(self) -> int:
- """Simple sequential number source
- Called whenever a unique integer is needed.
-
- Returns
- -------
- int
- A unique, monotonically increasing integer.
- """
- self._id_num += 1
- return self._id_num
-
- def _map_material(self, glb_materialid: int, part_pb: Any) -> None:
- """
- Apply various material properties to part protocol buffer.
-
- Parameters
- ----------
- glb_materialid : int
- The GLB material ID to use as the source information.
- part_pb : Any
- The DSG UpdatePart protocol buffer to update.
- """
- mat = self._gltf.materials[glb_materialid]
- color = [1.0, 1.0, 1.0, 1.0]
- # Change the color if we can find one
- if hasattr(mat, "pbrMetallicRoughness"):
- if hasattr(mat.pbrMetallicRoughness, "baseColorFactor"):
- color = mat.pbrMetallicRoughness.baseColorFactor
- part_pb.fill_color.extend(color)
- part_pb.line_color.extend(color)
- # Constants for now
- part_pb.ambient = 1.0
- part_pb.diffuse = 1.0
- part_pb.specular_intensity = 1.0
- # if the material maps to a variable, set the variable id for coloring
- glb_varid = self._find_variable_from_glb_mat(glb_materialid)
- if glb_varid:
- part_pb.color_variableid = glb_varid
-
- def _parse_mesh(self, meshid: int, parentid: int, parentname: str) -> None:
- """
- Walk a mesh id found in a "node" instance. This amounts to
- walking the list of "primitives" in the "meshes" list indexed
- by the meshid.
-
- Parameters
- ----------
- meshid: int
- The index of the mesh in the "meshes" list.
-
- parentid: int
- The DSG parent id.
-
- parentname: str
- The name of the GROUP parent of the meshes.
- """
- mesh = self._gltf.meshes[meshid]
- for prim_idx, prim in enumerate(mesh.primitives):
- # POINTS, LINES, LINE_LOOP, LINE_STRIP, TRIANGLES, TRIANGLE_STRIP, TRIANGLE_FAN
- mode = prim.mode
- if mode not in (pygltflib.TRIANGLES, pygltflib.LINES, pygltflib.POINTS):
- self.warn(
- f"Unhandled connectivity {mode}. Currently only TRIANGLE and LINE connectivity is supported."
- )
- continue
- glb_materialid = prim.material
-
- # GLB Prim -> DSG Part
- part_name = f"{parentname}_prim{prim_idx}_"
- cmd, part_pb = self._create_pb("PART", parent_id=parentid, name=part_name)
- part_pb.render = dynamic_scene_graph_pb2.UpdatePart.RenderingMode.CONNECTIVITY
- part_pb.shading = dynamic_scene_graph_pb2.UpdatePart.ShadingMode.NODAL
- self._map_material(glb_materialid, part_pb)
- part_dsg_id = part_pb.id
- self._handle_update_command(cmd)
-
- # GLB Attributes -> DSG Geom
- conn = self._get_data(prim.indices, 0)
- cmd, conn_pb = self._create_pb("GEOM", parent_id=part_dsg_id)
- if mode == pygltflib.TRIANGLES:
- conn_pb.payload_type = dynamic_scene_graph_pb2.UpdateGeom.ArrayType.TRIANGLES
- elif mode == pygltflib.LINES:
- conn_pb.payload_type = dynamic_scene_graph_pb2.UpdateGeom.ArrayType.LINES
- else:
- conn_pb.payload_type = dynamic_scene_graph_pb2.UpdateGeom.ArrayType.POINTS
- conn_pb.int_array.extend(conn)
- conn_pb.chunk_offset = 0
- conn_pb.total_array_size = len(conn)
- self._handle_update_command(cmd)
-
- if prim.attributes.POSITION is not None:
- verts = self._get_data(prim.attributes.POSITION)
- cmd, verts_pb = self._create_pb("GEOM", parent_id=part_dsg_id)
- verts_pb.payload_type = dynamic_scene_graph_pb2.UpdateGeom.ArrayType.COORDINATES
- verts_pb.flt_array.extend(verts)
- verts_pb.chunk_offset = 0
- verts_pb.total_array_size = len(verts)
- self._handle_update_command(cmd)
-
- if prim.attributes.NORMAL is not None:
- normals = self._get_data(prim.attributes.NORMAL)
- cmd, normals_pb = self._create_pb("GEOM", parent_id=part_dsg_id)
- normals_pb.payload_type = dynamic_scene_graph_pb2.UpdateGeom.ArrayType.NODE_NORMALS
- normals_pb.flt_array.extend(normals)
- normals_pb.chunk_offset = 0
- normals_pb.total_array_size = len(normals)
- self._handle_update_command(cmd)
-
- if prim.attributes.TEXCOORD_0 is not None:
- # Note: texture coords are stored as VEC2, so we get 2 components back
- texcoords = self._get_data(prim.attributes.TEXCOORD_0, components=2)
- # we only want the 's' component of an s,t pairing
- texcoords = texcoords[::2]
- cmd, texcoords_pb = self._create_pb("GEOM", parent_id=part_dsg_id)
- texcoords_pb.payload_type = (
- dynamic_scene_graph_pb2.UpdateGeom.ArrayType.NODE_VARIABLE
- )
- texcoords_pb.flt_array.extend(texcoords)
- texcoords_pb.chunk_offset = 0
- texcoords_pb.total_array_size = len(texcoords)
- glb_varid = self._find_variable_from_glb_mat(glb_materialid)
- if glb_varid:
- texcoords_pb.variable_id = glb_varid
- self._handle_update_command(cmd)
-
- def _get_data(
- self,
- accessorid: int,
- components: int = 3,
- ) -> numpy.ndarray:
- """
- Return the float buffer corresponding to the given accessorid. The id
- is usually obtained from a primitive: primitive.attributes.POSITION
- or primitive.attributes.NORMAL or primitive.attributes.TEXCOORD_0.
- It can also come from primitive.indices. In that case, the number of
- components needs to be set to 0.
-
- Parameters
- ----------
- accessorid: int
- The accessor index of the primitive.
-
- components: int
- The number of floats per vertex for the values 1,2,3 if the number
- of components is 0, read integer indices.
-
- Returns
- -------
- numpy.ndarray
- The float buffer corresponding to the nodal data or an int buffer of connectivity.
- """
- dtypes = {}
- dtypes[pygltflib.BYTE] = numpy.int8
- dtypes[pygltflib.UNSIGNED_BYTE] = numpy.uint8
- dtypes[pygltflib.SHORT] = numpy.int16
- dtypes[pygltflib.UNSIGNED_SHORT] = numpy.uint16
- dtypes[pygltflib.UNSIGNED_INT] = numpy.uint32
- dtypes[pygltflib.FLOAT] = numpy.float32
-
- binary_blob = self._gltf.binary_blob()
- accessor = self._gltf.accessors[accessorid]
- buffer_view = self._gltf.bufferViews[accessor.bufferView]
- dtype = numpy.float32
- data_dtype = dtypes[accessor.componentType]
- count = accessor.count * components
- # connectivity
- if components == 0:
- dtype = numpy.uint32
- count = accessor.count
- offset = buffer_view.byteOffset + accessor.byteOffset
- blob = binary_blob[offset : offset + buffer_view.byteLength]
- ret = numpy.frombuffer(blob, dtype=data_dtype, count=count)
- if data_dtype != dtype:
- return ret.astype(dtype)
- return ret
-
- def _walk_node(self, nodeid: int, parentid: int) -> None:
- """
- Given a node id (likely from walking a scenes array), walk the mesh
- objects in the node. A "node" has the keys "mesh" and "name".
-
- Each node has a single mesh object in it.
-
- Parameters
- ----------
- nodeid: int
- The node id to walk.
-
- parentid: int
- The DSG parent id.
-
- """
- node = self._gltf.nodes[nodeid]
- name = self._name(node)
- matrix = self._transform(node)
-
- # GLB node -> DSG Group
- cmd, group_pb = self._create_pb("GROUP", parent_id=parentid, name=name)
- group_pb.matrix4x4.extend(matrix)
- self._handle_update_command(cmd)
-
- if node.mesh is not None:
- self._parse_mesh(node.mesh, group_pb.id, name)
-
- # Handle node.rotation, node.translation, node.scale, node.matrix
- for child_id in node.children:
- self._walk_node(child_id, group_pb.id)
-
- def start_uploads(self, timeline: List[float]) -> None:
- """
- Begin an upload process for a potential collection of files.
-
- Parameters
- ----------
- timeline : List[float]
- The time values for the files span this range of values.
- """
- self._scene_id = self._next_id()
- self._cur_timeline = timeline
- self._callback_handler.begin_update()
- self._update_status_file()
-
- def end_uploads(self) -> None:
- """
- The upload process for the current collection of files is complete.
- """
- self._reset()
- self._update_status_file()
-
- def _find_variable_from_glb_mat(self, glb_material_id: int) -> Optional[int]:
- """
- Given a glb_material id, find the corresponding dsg variable id
-
- Parameters
- ----------
- glb_material_id : int
- The material id from the glb file.
-
- Returns
- -------
- Optional[int]
- The dsg variable id or None, if no variable is found.
- """
- value = self._glb_textures.get(glb_material_id, None)
- if value is not None:
- return value["pb"].id
- return None
-
- def upload_file(self, glb_filename: str, timeline: List[float] = [0.0, 0.0]) -> bool:
- """
- Parse a GLB file and call out to the handler to present the data
- to another interface (e.g. Omniverse)
-
- Parameters
- ----------
- timeline : List[float]
- The first and last time value for which the content of this file should be
- visible.
-
- glb_filename : str
- The name of the GLB file to parse
-
- Returns
- -------
- bool:
- returns True on success, False otherwise
- """
- try:
- ok = True
- self._gltf = pygltflib.GLTF2().load(glb_filename)
- self.log(f"File: {glb_filename} Info: {self._gltf.asset}")
-
- # check for GLTFWriter source
- if (self._gltf.asset.generator is None) or (
- ("GLTF Writer" not in self._gltf.asset.generator)
- and ("Ansys Ensight" not in self._gltf.asset.generator)
- ):
- self.error(
- f"Unable to process: {glb_filename} : Not written by GLTF Writer library"
- )
- return False
-
- # Walk texture nodes -> DSG Variable buffers
- for tex_idx, texture in enumerate(self._gltf.textures):
- image = self._gltf.images[texture.source]
- if image.uri is None:
- bv = self._gltf.bufferViews[image.bufferView]
- raw_png = self._gltf.binary_blob()[
- bv.byteOffset : bv.byteOffset + bv.byteLength
- ]
- else:
- raw_png = self._gltf.get_data_from_buffer_uri(image.uri)
- png_img = Image.open(io.BytesIO(raw_png))
- raw_rgba = png_img.tobytes()
- raw_rgba = raw_rgba[0 : len(raw_rgba) // png_img.size[1]]
- var_name = "Variable_" + str(tex_idx)
- cmd, var_pb = self._create_pb("VARIABLE", parent_id=self._scene_id, name=var_name)
- var_pb.location = dynamic_scene_graph_pb2.UpdateVariable.VarLocation.NODAL
- var_pb.dimension = dynamic_scene_graph_pb2.UpdateVariable.VarDimension.SCALAR
- var_pb.undefined_value = -1e38
- var_pb.pal_interp = (
- dynamic_scene_graph_pb2.UpdateVariable.PaletteInterpolation.CONTINUOUS
- )
- var_pb.sub_levels = 0
- var_pb.undefined_display = (
- dynamic_scene_graph_pb2.UpdateVariable.UndefinedDisplay.AS_ZERO
- )
- var_pb.texture = raw_rgba
- colors = numpy.frombuffer(raw_rgba, dtype=numpy.uint8)
- colors.shape = (-1, 4)
- num = len(colors)
- levels = []
- for i, c in enumerate(colors):
- level = dynamic_scene_graph_pb2.VariableLevel()
- level.value = float(i) / float(num - 1)
- level.red = float(c[0]) / 255.0
- level.green = float(c[1]) / 255.0
- level.blue = float(c[2]) / 255.0
- level.alpha = float(c[3]) / 255.0
- levels.append(level)
- var_pb.levels.extend(levels)
- # create a map from GLB material index to glb
- d = dict(pb=var_pb, idx=tex_idx)
- # Find all the materials that map to this texture
- for mat_idx, mat in enumerate(self._gltf.materials):
- if not hasattr(mat, "pbrMetallicRoughness"):
- continue
- if not hasattr(mat.pbrMetallicRoughness, "baseColorTexture"):
- continue
- if not hasattr(mat.pbrMetallicRoughness.baseColorTexture, "index"):
- continue
- if mat.pbrMetallicRoughness.baseColorTexture.index == tex_idx:
- material_index = mat_idx
- # does this Variable/texture already exist?
- duplicate = None
- saved_id = var_pb.id
- saved_name = var_pb.name
- for key, value in self._glb_textures.items():
- var_pb.name = value["pb"].name
- var_pb.id = value["pb"].id
- if value["pb"] == var_pb:
- duplicate = key
- break
- var_pb.id = saved_id
- var_pb.name = saved_name
- # if a new texture, add the Variable and create an index to the material
- if duplicate is None:
- self._handle_update_command(cmd)
- self._glb_textures[material_index] = d
- else:
- # create an additional reference to this variable from this material
- self._glb_textures[material_index] = self._glb_textures[duplicate]
-
- # GLB file: general layout
- # scene: "default_index"
- # scenes: [scene_index].nodes -> [node ids]
- # was scene_id = self._gltf.scene
- num_scenes = len(self._gltf.scenes)
- for scene_idx in range(num_scenes):
- # GLB Scene -> DSG View
- cmd, view_pb = self._create_pb("VIEW", parent_id=self._scene_id)
- view_pb.lookat.extend([0.0, 0.0, -1.0])
- view_pb.lookfrom.extend([0.0, 0.0, 0.0])
- view_pb.upvector.extend([0.0, 1.0, 0.0])
- view_pb.timeline.extend(self._build_scene_timeline(scene_idx, timeline))
- if len(self._gltf.cameras) > 0:
- camera = self._gltf.cameras[0]
- if camera.type == "orthographic":
- view_pb.nearfar.extend(
- [float(camera.orthographic.znear), float(camera.orthographic.zfar)]
- )
- else:
- view_pb.nearfar.extend(
- [float(camera.perspective.znear), float(camera.perspective.zfar)]
- )
- view_pb.fieldofview = camera.perspective.yfov
- view_pb.aspectratio = camera.aspectratio.aspectRatio
- self._handle_update_command(cmd)
- for node_id in self._gltf.scenes[scene_idx].nodes:
- self._walk_node(node_id, view_pb.id)
- self._finish_part()
-
- self._callback_handler.end_update()
-
- except Exception as e:
- import traceback
-
- self.error(f"Unable to process: {glb_filename} : {e}")
- traceback_str = "".join(traceback.format_tb(e.__traceback__))
- logging.debug(f"Traceback: {traceback_str}")
- ok = False
-
- return ok
-
- def _build_scene_timeline(self, scene_idx: int, input_timeline: List[float]) -> List[float]:
- """
- For a given scene and externally supplied timeline, compute the timeline for the scene.
-
- If the ANSYS_scene_time extension is present, use that value.
- If there is only a single scene, return the supplied timeline.
- If the supplied timeline is empty, use an integer timeline based on the number of scenes in the GLB file.
- Carve up the timeline into chunks, one per scene.
-
- Parameters
- ----------
- scene_idx: int
- The index of the scene to compute for.
-
- input_timeline: List[float]
- An externally supplied timeline.
-
- Returns
- -------
- List[float]
- The computed timeline.
- """
- # if ANSYS_scene_time is used, time ranges will come from there
- if "ANSYS_scene_time" in self._gltf.scenes[scene_idx].extensions:
- return self._gltf.scenes[scene_idx].extensions["ANSYS_scene_time"]
- # if there is only one scene, then use the input timeline
- num_scenes = len(self._gltf.scenes)
- if num_scenes == 1:
- return input_timeline
- # if the timeline has zero length, we make it the number of scenes
- timeline = input_timeline
- if timeline[1] - timeline[0] <= 0.0:
- timeline = [0.0, float(num_scenes - 1)]
- # carve time into the input timeline.
- delta = (timeline[1] - timeline[0]) / float(num_scenes)
- output: List[float] = []
- output.append(float(scene_idx) * delta + timeline[0])
- output.append(output[0] + delta)
- return output
-
- @staticmethod
- def _transform(node: Any) -> List[float]:
- """
- Convert the node "matrix" or "translation", "rotation" and "scale" values into
- a 4x4 matrix representation.
-
- "nodes": [
- {
- "matrix": [
- 1,0,0,0,
- 0,1,0,0,
- 0,0,1,0,
- 5,6,7,1
- ],
- ...
- },
- {
- "translation":
- [ 0,0,0 ],
- "rotation":
- [ 0,0,0,1 ],
- "scale":
- [ 1,1,1 ]
- ...
- },
- ]
-
- Parameters
- ----------
- node: Any
- The node to compute the matrix transform for.
-
- Returns
- -------
- List[float]
- The 4x4 transformation matrix.
-
- """
- identity = numpy.identity(4)
- if node.matrix:
- tmp = numpy.array(node.matrix)
- tmp.shape = (4, 4)
- tmp = tmp.transpose()
- return list(tmp.flatten())
- if node.translation:
- identity[3][0] = node.translation[0]
- identity[3][1] = node.translation[1]
- identity[3][2] = node.translation[2]
- if node.rotation:
- # In GLB, the null quat is [0,0,0,1] so reverse the vector here
- q = [node.rotation[3], node.rotation[0], node.rotation[1], node.rotation[2]]
- rot = numpy.array(
- [
- [q[0], -q[1], -q[2], -q[3]],
- [q[1], q[0], -q[3], q[2]],
- [q[2], q[3], q[0], -q[1]],
- [q[3], -q[2], q[1], q[0]],
- ]
- )
- identity = numpy.multiply(identity, rot)
- if node.scale:
- s = node.scale
- scale = numpy.array(
- [
- [s[0], 0.0, 0.0, 0.0],
- [0.0, s[1], 0.0, 0.0],
- [0.0, 0.0, s[2], 0.0],
- [0.0, 0.0, 0.0, 1.0],
- ]
- )
- identity = numpy.multiply(identity, scale)
- return list(identity.flatten())
-
- def _name(self, node: Any) -> str:
- """
- Given a GLB node object, return the name of the node. If the node does not
- have a name, give it a generated node.
-
- Parameters
- ----------
- node: Any
- The GLB node to get the name of.
-
- Returns
- -------
- str
- The name of the node.
- """
- if hasattr(node, "name") and node.name:
- return node.name
- self._node_idx += 1
- return f"Node_{self._node_idx}"
-
- def _create_pb(
- self, cmd_type: str, parent_id: int = -1, name: str = ""
- ) -> "dynamic_scene_graph_pb2.SceneUpdateCommand":
- cmd = dynamic_scene_graph_pb2.SceneUpdateCommand()
- if cmd_type == "PART":
- cmd.command_type = dynamic_scene_graph_pb2.SceneUpdateCommand.UPDATE_PART
- subcmd = cmd.update_part
- subcmd.hash = str(uuid.uuid1())
- elif cmd_type == "GROUP":
- cmd.command_type = dynamic_scene_graph_pb2.SceneUpdateCommand.UPDATE_GROUP
- subcmd = cmd.update_group
- elif cmd_type == "VARIABLE":
- cmd.command_type = dynamic_scene_graph_pb2.SceneUpdateCommand.UPDATE_VARIABLE
- subcmd = cmd.update_variable
- elif cmd_type == "GEOM":
- cmd.command_type = dynamic_scene_graph_pb2.SceneUpdateCommand.UPDATE_GEOM
- subcmd = cmd.update_geom
- subcmd.hash = str(uuid.uuid1())
- elif cmd_type == "VIEW":
- cmd.command_type = dynamic_scene_graph_pb2.SceneUpdateCommand.UPDATE_VIEW
- subcmd = cmd.update_view
- subcmd.id = self._next_id()
- if parent_id >= 0:
- subcmd.parent_id = parent_id
- if cmd_type not in ("GEOM", "VIEW"):
- if name:
- subcmd.name = name
- return cmd, subcmd
+import io
+import logging
+import os
+import sys
+from typing import Any, List, Optional
+import uuid
+
+from PIL import Image
+from ansys.api.pyensight.v0 import dynamic_scene_graph_pb2
+import ansys.pyensight.core.utils.dsg_server as dsg_server
+import numpy
+import pygltflib
+
+sys.path.insert(0, os.path.dirname(__file__))
+from dsg_server import UpdateHandler # noqa: E402
+
+
+class GLBSession(dsg_server.DSGSession):
+ def __init__(
+ self,
+ verbose: int = 0,
+ normalize_geometry: bool = False,
+ time_scale: float = 1.0,
+ vrmode: bool = False,
+ handler: UpdateHandler = UpdateHandler(),
+ ):
+ """
+ Provide an interface to read a GLB file and link it to an UpdateHandler instance
+
+ This class reads GLB files and provides the data to an UpdateHandler instance for
+ further processing.
+
+ Parameters
+ ----------
+ verbose : int
+ The verbosity level. If set to 1 or higher the class will call logging.info
+ for log output. The default is ``0``.
+ normalize_geometry : bool
+ If True, the scene coordinates will be remapped into the volume [-1,-1,-1] - [1,1,1]
+ The default is not to remap coordinates.
+ time_scale : float
+ Scale time values by this factor after being read. The default is ``1.0``.
+ vrmode : bool
+ If True, do not include the camera in the output.
+ handler : UpdateHandler
+ This is an UpdateHandler subclass that is called back when the state of
+ a scene transfer changes. For example, methods are called when the
+ transfer begins or ends and when a Part (mesh block) is ready for processing.
+ """
+ super().__init__(
+ verbose=verbose,
+ normalize_geometry=normalize_geometry,
+ time_scale=time_scale,
+ vrmode=vrmode,
+ handler=handler,
+ )
+ self._gltf: pygltflib.GLTF2 = pygltflib.GLTF2()
+ self._id_num: int = 0
+ self._node_idx: int = -1
+ self._glb_textures: dict = {}
+ self._scene_id: int = 0
+
+ def _reset(self) -> None:
+ """
+ Reset the current state to prepare for a new dataset.
+ """
+ super()._reset()
+ self._cur_timeline = [0.0, 0.0] # Start/End time for current update
+ self._status = dict(status="idle", start_time=0.0, processed_buffers=0, total_buffers=0)
+ self._gltf = pygltflib.GLTF2()
+ self._node_idx = -1
+ self._id_num = 0
+ self._glb_textures = {}
+ self._scene_id = 0
+
+ def _next_id(self) -> int:
+ """Simple sequential number source
+ Called whenever a unique integer is needed.
+
+ Returns
+ -------
+ int
+ A unique, monotonically increasing integer.
+ """
+ self._id_num += 1
+ return self._id_num
+
+ def _map_material(self, glb_materialid: int, part_pb: Any) -> None:
+ """
+ Apply various material properties to part protocol buffer.
+
+ Parameters
+ ----------
+ glb_materialid : int
+ The GLB material ID to use as the source information.
+ part_pb : Any
+ The DSG UpdatePart protocol buffer to update.
+ """
+ mat = self._gltf.materials[glb_materialid]
+ color = [1.0, 1.0, 1.0, 1.0]
+ # Change the color if we can find one
+ if hasattr(mat, "pbrMetallicRoughness"):
+ if hasattr(mat.pbrMetallicRoughness, "baseColorFactor"):
+ color = mat.pbrMetallicRoughness.baseColorFactor
+ part_pb.fill_color.extend(color)
+ part_pb.line_color.extend(color)
+ # Constants for now
+ part_pb.ambient = 1.0
+ part_pb.diffuse = 1.0
+ part_pb.specular_intensity = 1.0
+ # if the material maps to a variable, set the variable id for coloring
+ glb_varid = self._find_variable_from_glb_mat(glb_materialid)
+ if glb_varid:
+ part_pb.color_variableid = glb_varid
+
+ def _parse_mesh(self, meshid: int, parentid: int, parentname: str) -> None:
+ """
+ Walk a mesh id found in a "node" instance. This amounts to
+ walking the list of "primitives" in the "meshes" list indexed
+ by the meshid.
+
+ Parameters
+ ----------
+ meshid: int
+ The index of the mesh in the "meshes" list.
+
+ parentid: int
+ The DSG parent id.
+
+ parentname: str
+ The name of the GROUP parent of the meshes.
+ """
+ mesh = self._gltf.meshes[meshid]
+ for prim_idx, prim in enumerate(mesh.primitives):
+ # POINTS, LINES, LINE_LOOP, LINE_STRIP, TRIANGLES, TRIANGLE_STRIP, TRIANGLE_FAN
+ mode = prim.mode
+ if mode not in (pygltflib.TRIANGLES, pygltflib.LINES, pygltflib.POINTS):
+ self.warn(
+ f"Unhandled connectivity {mode}. Currently only TRIANGLE and LINE connectivity is supported."
+ )
+ continue
+ glb_materialid = prim.material
+
+ # GLB Prim -> DSG Part
+ part_name = f"{parentname}_prim{prim_idx}_"
+ cmd, part_pb = self._create_pb("PART", parent_id=parentid, name=part_name)
+ part_pb.render = dynamic_scene_graph_pb2.UpdatePart.RenderingMode.CONNECTIVITY
+ part_pb.shading = dynamic_scene_graph_pb2.UpdatePart.ShadingMode.NODAL
+ self._map_material(glb_materialid, part_pb)
+ part_dsg_id = part_pb.id
+ self._handle_update_command(cmd)
+
+ # GLB Attributes -> DSG Geom
+ conn = self._get_data(prim.indices, 0)
+ cmd, conn_pb = self._create_pb("GEOM", parent_id=part_dsg_id)
+ if mode == pygltflib.TRIANGLES:
+ conn_pb.payload_type = dynamic_scene_graph_pb2.UpdateGeom.ArrayType.TRIANGLES
+ elif mode == pygltflib.LINES:
+ conn_pb.payload_type = dynamic_scene_graph_pb2.UpdateGeom.ArrayType.LINES
+ else:
+ conn_pb.payload_type = dynamic_scene_graph_pb2.UpdateGeom.ArrayType.POINTS
+ conn_pb.int_array.extend(conn)
+ conn_pb.chunk_offset = 0
+ conn_pb.total_array_size = len(conn)
+ self._handle_update_command(cmd)
+
+ if prim.attributes.POSITION is not None:
+ verts = self._get_data(prim.attributes.POSITION)
+ cmd, verts_pb = self._create_pb("GEOM", parent_id=part_dsg_id)
+ verts_pb.payload_type = dynamic_scene_graph_pb2.UpdateGeom.ArrayType.COORDINATES
+ verts_pb.flt_array.extend(verts)
+ verts_pb.chunk_offset = 0
+ verts_pb.total_array_size = len(verts)
+ self._handle_update_command(cmd)
+
+ if prim.attributes.NORMAL is not None:
+ normals = self._get_data(prim.attributes.NORMAL)
+ cmd, normals_pb = self._create_pb("GEOM", parent_id=part_dsg_id)
+ normals_pb.payload_type = dynamic_scene_graph_pb2.UpdateGeom.ArrayType.NODE_NORMALS
+ normals_pb.flt_array.extend(normals)
+ normals_pb.chunk_offset = 0
+ normals_pb.total_array_size = len(normals)
+ self._handle_update_command(cmd)
+
+ if prim.attributes.TEXCOORD_0 is not None:
+ # Note: texture coords are stored as VEC2, so we get 2 components back
+ texcoords = self._get_data(prim.attributes.TEXCOORD_0, components=2)
+ # we only want the 's' component of an s,t pairing
+ texcoords = texcoords[::2]
+ cmd, texcoords_pb = self._create_pb("GEOM", parent_id=part_dsg_id)
+ texcoords_pb.payload_type = (
+ dynamic_scene_graph_pb2.UpdateGeom.ArrayType.NODE_VARIABLE
+ )
+ texcoords_pb.flt_array.extend(texcoords)
+ texcoords_pb.chunk_offset = 0
+ texcoords_pb.total_array_size = len(texcoords)
+ glb_varid = self._find_variable_from_glb_mat(glb_materialid)
+ if glb_varid:
+ texcoords_pb.variable_id = glb_varid
+ self._handle_update_command(cmd)
+
+ def _get_data(
+ self,
+ accessorid: int,
+ components: int = 3,
+ ) -> numpy.ndarray:
+ """
+ Return the float buffer corresponding to the given accessorid. The id
+ is usually obtained from a primitive: primitive.attributes.POSITION
+ or primitive.attributes.NORMAL or primitive.attributes.TEXCOORD_0.
+ It can also come from primitive.indices. In that case, the number of
+ components needs to be set to 0.
+
+ Parameters
+ ----------
+ accessorid: int
+ The accessor index of the primitive.
+
+ components: int
+ The number of floats per vertex for the values 1,2,3 if the number
+ of components is 0, read integer indices.
+
+ Returns
+ -------
+ numpy.ndarray
+ The float buffer corresponding to the nodal data or an int buffer of connectivity.
+ """
+ dtypes = {}
+ dtypes[pygltflib.BYTE] = numpy.int8
+ dtypes[pygltflib.UNSIGNED_BYTE] = numpy.uint8
+ dtypes[pygltflib.SHORT] = numpy.int16
+ dtypes[pygltflib.UNSIGNED_SHORT] = numpy.uint16
+ dtypes[pygltflib.UNSIGNED_INT] = numpy.uint32
+ dtypes[pygltflib.FLOAT] = numpy.float32
+
+ binary_blob = self._gltf.binary_blob()
+ accessor = self._gltf.accessors[accessorid]
+ buffer_view = self._gltf.bufferViews[accessor.bufferView]
+ dtype = numpy.float32
+ data_dtype = dtypes[accessor.componentType]
+ count = accessor.count * components
+ # connectivity
+ if components == 0:
+ dtype = numpy.uint32
+ count = accessor.count
+ offset = buffer_view.byteOffset + accessor.byteOffset
+ blob = binary_blob[offset : offset + buffer_view.byteLength]
+ ret = numpy.frombuffer(blob, dtype=data_dtype, count=count)
+ if data_dtype != dtype:
+ return ret.astype(dtype)
+ return ret
+
+ def _walk_node(self, nodeid: int, parentid: int) -> None:
+ """
+ Given a node id (likely from walking a scenes array), walk the mesh
+ objects in the node. A "node" has the keys "mesh" and "name".
+
+ Each node has a single mesh object in it.
+
+ Parameters
+ ----------
+ nodeid: int
+ The node id to walk.
+
+ parentid: int
+ The DSG parent id.
+
+ """
+ node = self._gltf.nodes[nodeid]
+ name = self._name(node)
+ matrix = self._transform(node)
+
+ # GLB node -> DSG Group
+ cmd, group_pb = self._create_pb("GROUP", parent_id=parentid, name=name)
+ group_pb.matrix4x4.extend(matrix)
+ self._handle_update_command(cmd)
+
+ if node.mesh is not None:
+ self._parse_mesh(node.mesh, group_pb.id, name)
+
+ # Handle node.rotation, node.translation, node.scale, node.matrix
+ for child_id in node.children:
+ self._walk_node(child_id, group_pb.id)
+
+ def start_uploads(self, timeline: List[float]) -> None:
+ """
+ Begin an upload process for a potential collection of files.
+
+ Parameters
+ ----------
+ timeline : List[float]
+ The time values for the files span this range of values.
+ """
+ self._scene_id = self._next_id()
+ self._cur_timeline = timeline
+ self._callback_handler.begin_update()
+ self._update_status_file()
+
+ def end_uploads(self) -> None:
+ """
+ The upload process for the current collection of files is complete.
+ """
+ self._reset()
+ self._update_status_file()
+
+ def _find_variable_from_glb_mat(self, glb_material_id: int) -> Optional[int]:
+ """
+ Given a glb_material id, find the corresponding dsg variable id
+
+ Parameters
+ ----------
+ glb_material_id : int
+ The material id from the glb file.
+
+ Returns
+ -------
+ Optional[int]
+ The dsg variable id or None, if no variable is found.
+ """
+ value = self._glb_textures.get(glb_material_id, None)
+ if value is not None:
+ return value["pb"].id
+ return None
+
+ def upload_file(self, glb_filename: str, timeline: List[float] = [0.0, 0.0]) -> bool:
+ """
+ Parse a GLB file and call out to the handler to present the data
+ to another interface (e.g. Omniverse)
+
+ Parameters
+ ----------
+ timeline : List[float]
+ The first and last time value for which the content of this file should be
+ visible.
+
+ glb_filename : str
+ The name of the GLB file to parse
+
+ Returns
+ -------
+ bool:
+ returns True on success, False otherwise
+ """
+ try:
+ ok = True
+ self._gltf = pygltflib.GLTF2().load(glb_filename)
+ self.log(f"File: {glb_filename} Info: {self._gltf.asset}")
+
+ # check for GLTFWriter source
+ if (self._gltf.asset.generator is None) or (
+ ("GLTF Writer" not in self._gltf.asset.generator)
+ and ("Ansys Ensight" not in self._gltf.asset.generator)
+ ):
+ self.error(
+ f"Unable to process: {glb_filename} : Not written by GLTF Writer library"
+ )
+ return False
+
+ # Walk texture nodes -> DSG Variable buffers
+ for tex_idx, texture in enumerate(self._gltf.textures):
+ image = self._gltf.images[texture.source]
+ if image.uri is None:
+ bv = self._gltf.bufferViews[image.bufferView]
+ raw_png = self._gltf.binary_blob()[
+ bv.byteOffset : bv.byteOffset + bv.byteLength
+ ]
+ else:
+ raw_png = self._gltf.get_data_from_buffer_uri(image.uri)
+ png_img = Image.open(io.BytesIO(raw_png))
+ raw_rgba = png_img.tobytes()
+ raw_rgba = raw_rgba[0 : len(raw_rgba) // png_img.size[1]]
+ var_name = "Variable_" + str(tex_idx)
+ cmd, var_pb = self._create_pb("VARIABLE", parent_id=self._scene_id, name=var_name)
+ var_pb.location = dynamic_scene_graph_pb2.UpdateVariable.VarLocation.NODAL
+ var_pb.dimension = dynamic_scene_graph_pb2.UpdateVariable.VarDimension.SCALAR
+ var_pb.undefined_value = -1e38
+ var_pb.pal_interp = (
+ dynamic_scene_graph_pb2.UpdateVariable.PaletteInterpolation.CONTINUOUS
+ )
+ var_pb.sub_levels = 0
+ var_pb.undefined_display = (
+ dynamic_scene_graph_pb2.UpdateVariable.UndefinedDisplay.AS_ZERO
+ )
+ var_pb.texture = raw_rgba
+ colors = numpy.frombuffer(raw_rgba, dtype=numpy.uint8)
+ colors.shape = (-1, 4)
+ num = len(colors)
+ levels = []
+ for i, c in enumerate(colors):
+ level = dynamic_scene_graph_pb2.VariableLevel()
+ level.value = float(i) / float(num - 1)
+ level.red = float(c[0]) / 255.0
+ level.green = float(c[1]) / 255.0
+ level.blue = float(c[2]) / 255.0
+ level.alpha = float(c[3]) / 255.0
+ levels.append(level)
+ var_pb.levels.extend(levels)
+ # create a map from GLB material index to glb
+ d = dict(pb=var_pb, idx=tex_idx)
+ # Find all the materials that map to this texture
+ for mat_idx, mat in enumerate(self._gltf.materials):
+ if not hasattr(mat, "pbrMetallicRoughness"):
+ continue
+ if not hasattr(mat.pbrMetallicRoughness, "baseColorTexture"):
+ continue
+ if not hasattr(mat.pbrMetallicRoughness.baseColorTexture, "index"):
+ continue
+ if mat.pbrMetallicRoughness.baseColorTexture.index == tex_idx:
+ material_index = mat_idx
+ # does this Variable/texture already exist?
+ duplicate = None
+ saved_id = var_pb.id
+ saved_name = var_pb.name
+ for key, value in self._glb_textures.items():
+ var_pb.name = value["pb"].name
+ var_pb.id = value["pb"].id
+ if value["pb"] == var_pb:
+ duplicate = key
+ break
+ var_pb.id = saved_id
+ var_pb.name = saved_name
+ # if a new texture, add the Variable and create an index to the material
+ if duplicate is None:
+ self._handle_update_command(cmd)
+ self._glb_textures[material_index] = d
+ else:
+ # create an additional reference to this variable from this material
+ self._glb_textures[material_index] = self._glb_textures[duplicate]
+
+ # GLB file: general layout
+ # scene: "default_index"
+ # scenes: [scene_index].nodes -> [node ids]
+ # was scene_id = self._gltf.scene
+ num_scenes = len(self._gltf.scenes)
+ for scene_idx in range(num_scenes):
+ # GLB Scene -> DSG View
+ cmd, view_pb = self._create_pb("VIEW", parent_id=self._scene_id)
+ view_pb.lookat.extend([0.0, 0.0, -1.0])
+ view_pb.lookfrom.extend([0.0, 0.0, 0.0])
+ view_pb.upvector.extend([0.0, 1.0, 0.0])
+ view_pb.timeline.extend(self._build_scene_timeline(scene_idx, timeline))
+ if len(self._gltf.cameras) > 0:
+ camera = self._gltf.cameras[0]
+ if camera.type == "orthographic":
+ view_pb.nearfar.extend(
+ [float(camera.orthographic.znear), float(camera.orthographic.zfar)]
+ )
+ else:
+ view_pb.nearfar.extend(
+ [float(camera.perspective.znear), float(camera.perspective.zfar)]
+ )
+ view_pb.fieldofview = camera.perspective.yfov
+ view_pb.aspectratio = camera.aspectratio.aspectRatio
+ self._handle_update_command(cmd)
+ for node_id in self._gltf.scenes[scene_idx].nodes:
+ self._walk_node(node_id, view_pb.id)
+ self._finish_part()
+
+ self._callback_handler.end_update()
+
+ except Exception as e:
+ import traceback
+
+ self.error(f"Unable to process: {glb_filename} : {e}")
+ traceback_str = "".join(traceback.format_tb(e.__traceback__))
+ logging.debug(f"Traceback: {traceback_str}")
+ ok = False
+
+ return ok
+
+ def _build_scene_timeline(self, scene_idx: int, input_timeline: List[float]) -> List[float]:
+ """
+ For a given scene and externally supplied timeline, compute the timeline for the scene.
+
+ If the ANSYS_scene_time extension is present, use that value.
+ If there is only a single scene, return the supplied timeline.
+ If the supplied timeline is empty, use an integer timeline based on the number of scenes in the GLB file.
+ Carve up the timeline into chunks, one per scene.
+
+ Parameters
+ ----------
+ scene_idx: int
+ The index of the scene to compute for.
+
+ input_timeline: List[float]
+ An externally supplied timeline.
+
+ Returns
+ -------
+ List[float]
+ The computed timeline.
+ """
+ # if ANSYS_scene_time is used, time ranges will come from there
+ if "ANSYS_scene_time" in self._gltf.scenes[scene_idx].extensions:
+ return self._gltf.scenes[scene_idx].extensions["ANSYS_scene_time"]
+ # if there is only one scene, then use the input timeline
+ num_scenes = len(self._gltf.scenes)
+ if num_scenes == 1:
+ return input_timeline
+ # if the timeline has zero length, we make it the number of scenes
+ timeline = input_timeline
+ if timeline[1] - timeline[0] <= 0.0:
+ timeline = [0.0, float(num_scenes - 1)]
+ # carve time into the input timeline.
+ delta = (timeline[1] - timeline[0]) / float(num_scenes)
+ output: List[float] = []
+ output.append(float(scene_idx) * delta + timeline[0])
+ output.append(output[0] + delta)
+ return output
+
+ @staticmethod
+ def _transform(node: Any) -> List[float]:
+ """
+ Convert the node "matrix" or "translation", "rotation" and "scale" values into
+ a 4x4 matrix representation.
+
+ "nodes": [
+ {
+ "matrix": [
+ 1,0,0,0,
+ 0,1,0,0,
+ 0,0,1,0,
+ 5,6,7,1
+ ],
+ ...
+ },
+ {
+ "translation":
+ [ 0,0,0 ],
+ "rotation":
+ [ 0,0,0,1 ],
+ "scale":
+ [ 1,1,1 ]
+ ...
+ },
+ ]
+
+ Parameters
+ ----------
+ node: Any
+ The node to compute the matrix transform for.
+
+ Returns
+ -------
+ List[float]
+ The 4x4 transformation matrix.
+
+ """
+ identity = numpy.identity(4)
+ if node.matrix:
+ tmp = numpy.array(node.matrix)
+ tmp.shape = (4, 4)
+ tmp = tmp.transpose()
+ return list(tmp.flatten())
+ if node.translation:
+ identity[3][0] = node.translation[0]
+ identity[3][1] = node.translation[1]
+ identity[3][2] = node.translation[2]
+ if node.rotation:
+ # In GLB, the null quat is [0,0,0,1] so reverse the vector here
+ q = [node.rotation[3], node.rotation[0], node.rotation[1], node.rotation[2]]
+ rot = numpy.array(
+ [
+ [q[0], -q[1], -q[2], -q[3]],
+ [q[1], q[0], -q[3], q[2]],
+ [q[2], q[3], q[0], -q[1]],
+ [q[3], -q[2], q[1], q[0]],
+ ]
+ )
+ identity = numpy.multiply(identity, rot)
+ if node.scale:
+ s = node.scale
+ scale = numpy.array(
+ [
+ [s[0], 0.0, 0.0, 0.0],
+ [0.0, s[1], 0.0, 0.0],
+ [0.0, 0.0, s[2], 0.0],
+ [0.0, 0.0, 0.0, 1.0],
+ ]
+ )
+ identity = numpy.multiply(identity, scale)
+ return list(identity.flatten())
+
+ def _name(self, node: Any) -> str:
+ """
+ Given a GLB node object, return the name of the node. If the node does not
+ have a name, give it a generated node.
+
+ Parameters
+ ----------
+ node: Any
+ The GLB node to get the name of.
+
+ Returns
+ -------
+ str
+ The name of the node.
+ """
+ if hasattr(node, "name") and node.name:
+ return node.name
+ self._node_idx += 1
+ return f"Node_{self._node_idx}"
+
+ def _create_pb(
+ self, cmd_type: str, parent_id: int = -1, name: str = ""
+ ) -> "dynamic_scene_graph_pb2.SceneUpdateCommand":
+ cmd = dynamic_scene_graph_pb2.SceneUpdateCommand()
+ if cmd_type == "PART":
+ cmd.command_type = dynamic_scene_graph_pb2.SceneUpdateCommand.UPDATE_PART
+ subcmd = cmd.update_part
+ subcmd.hash = str(uuid.uuid1())
+ elif cmd_type == "GROUP":
+ cmd.command_type = dynamic_scene_graph_pb2.SceneUpdateCommand.UPDATE_GROUP
+ subcmd = cmd.update_group
+ elif cmd_type == "VARIABLE":
+ cmd.command_type = dynamic_scene_graph_pb2.SceneUpdateCommand.UPDATE_VARIABLE
+ subcmd = cmd.update_variable
+ elif cmd_type == "GEOM":
+ cmd.command_type = dynamic_scene_graph_pb2.SceneUpdateCommand.UPDATE_GEOM
+ subcmd = cmd.update_geom
+ subcmd.hash = str(uuid.uuid1())
+ elif cmd_type == "VIEW":
+ cmd.command_type = dynamic_scene_graph_pb2.SceneUpdateCommand.UPDATE_VIEW
+ subcmd = cmd.update_view
+ subcmd.id = self._next_id()
+ if parent_id >= 0:
+ subcmd.parent_id = parent_id
+ if cmd_type not in ("GEOM", "VIEW"):
+ if name:
+ subcmd.name = name
+ return cmd, subcmd
diff --git a/src/ansys/pyensight/core/utils/parts.py b/src/ansys/pyensight/core/utils/parts.py
index d79fbe9c4de..0f70b0040d5 100644
--- a/src/ansys/pyensight/core/utils/parts.py
+++ b/src/ansys/pyensight/core/utils/parts.py
@@ -1,1199 +1,1199 @@
-"""Parts module.
-
-This module allows PyEnSight to control the parts in the EnSight session.
-
-Example for selecting all 3D parts:
-
-(PyEnSight)
->>> from ansys.pyensight.core import LocalLauncher
->>> session = LocalLauncher().start()
->>> parts = session.ensight.utils.parts
->>> parts.select_by_dimension(3)
-
-(EnSight)
->>> from ensight.utils import parts
->>> parts.select_by_dimension(3)
-
-"""
-from types import ModuleType
-from typing import TYPE_CHECKING, Any, Dict, List, Optional, Tuple, Union
-
-try:
- import ensight
- from ensight.objs import ens_emitterobj, ensobjlist # type: ignore
-except ImportError:
- from ansys.api.pyensight.ens_emitterobj import ens_emitterobj
- from ansys.pyensight.core.listobj import ensobjlist
-
-if TYPE_CHECKING:
- from ansys.api.pyensight import ensight_api
- from ansys.api.pyensight.ens_part import ENS_PART
- from ansys.api.pyensight.ens_part_particle_trace import ENS_PART_PARTICLE_TRACE
- from ansys.api.pyensight.ens_var import ENS_VAR
-
-
-def convert_part(
- _ensight: Union["ensight_api.ensight", "ensight"], part: Union[str, int, "ENS_PART"]
-):
- if isinstance(part, str):
- return _ensight.objs.core.PARTS[part][0].PARTNUMBER
- elif isinstance(part, int):
- return part
- elif hasattr(part, "PARTNUMBER"):
- return part.PARTNUMBER
-
-
-def convert_variable(
- _ensight: Union["ensight_api.ensight", "ensight"], var: Union[str, int, "ENS_VAR"]
-) -> Optional[int]:
- if isinstance(var, str):
- return int(_ensight.objs.core.VARIABLES[var][0].ID)
- elif isinstance(var, int):
- return var
- elif hasattr(var, "ID"):
- return int(var.ID)
- return None # pragma: no cover
-
-
-class Parts:
- """Controls the parts in the current EnSight ``Session`` instance."""
-
- class _EnSEmitterPoint(ens_emitterobj): # pragma: no cover
- def __init__( # pragma: no cover
- self,
- ensight: "ensight",
- point1: Optional[List[float]] = [0, 0, 0],
- ): # pragma: no cover
- if not isinstance(ensight, ModuleType):
- raise RuntimeError(
- "The class cannot be used directly in PyEnSight. It should not be used directly even in EnSight"
- )
- super().__init__(ensight.objs.EMIT_CURSOR)
- self.ensight = ensight
- self.ensight.view_transf.cursor(*point1)
- self.CENTROID = point1
-
- class _EnSEmitterGrid(ens_emitterobj): # pragma: no cover
- def __init__( # pragma: no cover
- self,
- ensight: "ensight",
- point1: Optional[List[float]] = [0, 0, 0],
- point2: Optional[List[float]] = [0, 0, 0],
- point3: Optional[List[float]] = [0, 0, 0],
- point4: Optional[List[float]] = [0, 0, 0],
- num_points_x: Optional[int] = 25,
- num_points_y: Optional[int] = 25,
- ): # pragma: no cover
- if not isinstance(ensight, ModuleType):
- raise RuntimeError(
- "The class cannot be used directly in PyEnSight. It should not be used directly even in EnSight"
- )
- super().__init__(ensight.objs.EMIT_PLANE)
- self.ensight = ensight
- self.ensight.view_transf.plane(1, *point1)
- self.ensight.view_transf.plane(2, *point2)
- self.ensight.view_transf.plane(3, *point3)
- self.POINT1 = point1
- self.POINT2 = point2
- self.POINT3 = point3
- self.POINT4 = point4
- self.NUM_POINTS_X = num_points_x
- self.NUM_POINTS_Y = num_points_y
-
- class _EnSEmitterLine(ens_emitterobj): # pragma: no cover
- def __init__( # pragma: no cover
- self,
- ensight: "ensight",
- point1: Optional[List[float]] = [0, 0, 0],
- point2: Optional[List[float]] = [0, 0, 0],
- num_points: Optional[int] = 100,
- ): # pragma: no cover
- if not isinstance(ensight, ModuleType):
- raise RuntimeError(
- "The class cannot be used directly in PyEnSight. It should not be used directly even in EnSight"
- )
- super().__init__(ensight.objs.EMIT_LINE)
- self.ensight = ensight
- self.ensight.view_transf.line(1, *point1)
- self.ensight.view_transf.line(2, *point2)
- self.POINT1 = point1
- self.POINT2 = point2
- self.NUM_POINTS = num_points
-
- class _EnSEmitterPart(ens_emitterobj): # pragma: no cover
- def __init__( # pragma: no cover
- self,
- ensight: "ensight",
- part: Optional[Any] = None,
- part_kind: Optional[Any] = 0,
- num_points: Optional[int] = 100,
- ): # pragma: no cover
- if not isinstance(ensight, ModuleType):
- raise RuntimeError(
- "The class cannot be used directly in PyEnSight. It should not be used directly even in EnSight"
- )
- super().__init__(ensight.objs.EMIT_PART)
- self.ensight = ensight
- if not part:
- raise RuntimeError("part is a required input")
- self.PART = convert_part(self.ensight, part)
- self.NUM_POINTS = num_points
- self.DISTRIB_TYPE = part_kind
-
- def __init__(self, ensight: Union["ensight_api.ensight", "ensight"]):
- self.ensight = ensight
-
- def select_parts_by_dimension(self, dimension: int) -> ensobjlist["ENS_PART"]:
- """Select parts by the input dimension and return the parts found.
-
- Parameters
- ----------
- dimension : int
- Dimension for selecting parts.
-
- Returns
- -------
- ensobjlist["ENS_PART"]
- found (ensobjlist): List of parts found.
-
- """
- parts = self.ensight.objs.core.PARTS
- parts.set_attr("SELECTED", False)
- found = parts.find(True, f"HAS{dimension}DELEMENTS")
- found.set_attr("SELECTED", True)
- return found
-
- def select_parts_invert(self) -> ensobjlist["ENS_PART"]:
- """Select parts currently not selected, deselecting the previously selected parts.
-
- Returns
- -------
- ensobjlist["ENS_PART"]
- Updated list of parts selected.
-
- """
- self.ensight.part.select_invert()
- parts = self.ensight.objs.core.PARTS
- return parts.find(True, "SELECTED")
-
- def select_parts_by_tag(
- self,
- tag: Optional[str] = None,
- value: Optional[str] = None,
- tagdict: Optional[Dict[str, str]] = None,
- ) -> ensobjlist["ENS_PART"]:
- """Select parts by the input dimension and return the parts found.
-
- Parameters
- ----------
- tag : str, optional
- Tag for finding the parts.
- value : str, optional
- Value for finding the parts.
- tagdict : dict, optional
- Dictionary containing the key and value pairs for finding
- the parts. Only the parts that have all the keys and corresponding
- values are returned. If a value for this parameter is supplied, it
- takes precedence over the values supplied for the ``tag`` and
- ``value`` parameters.
-
- Returns
- -------
- ensobjlist["ENS_PART"]
- List of parts found. If no arguments are given, all parts are returned.
-
- """
- parts = self.ensight.objs.core.PARTS
- metadata = {p: p.METADATA for p in parts}
- found = ensobjlist()
- if not tag and not value and not tagdict:
- self.ensight.part.select_all()
- return parts
- if not tagdict:
- if tag and value:
- found = ensobjlist([p for p, met in metadata.items() if met.get(tag) == value])
- elif value and not tag:
- found = ensobjlist([p for p, met in metadata.items() if value in met.values()])
- elif tag and not value: # pragma: no cover
- found = ensobjlist([p for p, met in metadata.items() if tag in met.keys()])
- else:
- found = ensobjlist(
- [
- p
- for p, met in metadata.items()
- if all(met.get(k) == v for k, v in tagdict.items())
- ]
- )
- if found:
- found.set_attr("SELECTED", True)
- return found
-
- _EMIT_POINT: int = 0
- _EMIT_LINE: int = 1
- _EMIT_PLANE: int = 2
- _EMIT_PART: int = 3
- PT_POS_TIME: str = "+"
- PT_NEG_TIME: str = "-"
- PT_POS_NEG_TIME: str = "+/-"
- PART_EMIT_FROM_NODES: int = 0
- PART_EMIT_FROM_AREA: int = 1
-
- def _create_emitters(
- self,
- emitter_type: int,
- points: Optional[List[List[float]]] = None,
- point1: Optional[List[float]] = None,
- point2: Optional[List[float]] = None,
- point3: Optional[List[float]] = None,
- parts: Optional[List["ENS_PART"]] = None,
- part_distribution_type: Optional[int] = 0,
- num_points: Optional[int] = 100,
- num_points_x: Optional[int] = 25,
- num_points_y: Optional[int] = 25,
- ) -> List[Any]:
- """Private routine to create emitter objects"""
- new_emitters: List[Any] = []
- if emitter_type == self._EMIT_POINT:
- if not points: # pragma: no cover
- raise RuntimeError(
- "list of points needed if particle trace emitted from points"
- ) # pragma: no cover
- for p in points:
- if isinstance(self.ensight, ModuleType): # pragma: no cover
- new_emitters.append(
- self._EnSEmitterPoint(self.ensight, point1=p)
- ) # pragma: no cover
- else:
- new_emitters.append(
- f"ensight.utils.parts._EnSEmitterPoint(ensight, point1={p})"
- )
- elif emitter_type == self._EMIT_LINE:
- if not any([point1, point2]):
- raise RuntimeError("point1 and point2 needed if particle trace emitted from line")
- if isinstance(self.ensight, ModuleType): # pragma: no cover
- new_emitters.append( # pragma: no cover
- self._EnSEmitterLine(
- self.ensight, point1=point1, point2=point2, num_points=num_points
- )
- )
- else:
- new_emitters.append(
- f"ensight.utils.parts._EnSEmitterLine(ensight, point1={point1}, point2={point2}, num_points={num_points})"
- )
- elif emitter_type == self._EMIT_PLANE:
- if not any([point1, point2, point3]):
- raise RuntimeError( # pragma: no cover
- "point1, point2 and point3 needed if particle trace emitted from plane"
- )
- if isinstance(self.ensight, ModuleType): # pragma: no cover
- new_emitters.append( # pragma: no cover
- self._EnSEmitterGrid(
- self.ensight,
- point1=point1,
- point2=point2,
- point3=point3,
- num_points_x=num_points_x,
- num_points_y=num_points_y,
- )
- )
- else:
- new_emitters.append(
- f"ensight.utils.parts._EnSEmitterGrid(ensight, point1={point1}, point2={point2}, point3={point3}, num_points_x={num_points_x}, num_points_y={num_points_y})"
- )
- elif emitter_type == self._EMIT_PART: # pragma: no cover
- if not parts: # pragma: no cover
- raise RuntimeError(
- "part and num_points needed if particle trace emitted from part"
- ) # pragma: no cover
- for p in parts:
- if isinstance(self.ensight, ModuleType): # pragma: no cover
- new_emitters.append( # pragma: no cover
- self._EnSEmitterPart(
- self.ensight,
- part=p,
- num_points=num_points,
- part_kind=part_distribution_type,
- )
- )
- else:
- new_emitters.append(
- f"ensight.utils.parts._EnSEmitterPart(ensight, part={convert_part(self.ensight ,p)}, num_points={num_points}, part_kind={part_distribution_type})"
- )
- else:
- raise RuntimeError(
- "No input provided to create the emitters for the particle trace"
- ) # pragma: no cover
- return new_emitters
-
- def _create_particle_trace_part(
- self,
- name: str,
- variable: Union[str, int, "ENS_VAR"],
- direction: str,
- source_parts: List["ENS_PART"],
- pathlines: Optional[bool] = False,
- emit_time: Optional[float] = None,
- total_time: Optional[float] = None,
- delta_time: Optional[float] = None,
- surface_restrict: Optional[bool] = False,
- ) -> "ENS_PART_PARTICLE_TRACE":
- """Private routine to create a particle trace part object"""
- current_timestep = None
- direction_map = {
- self.PT_POS_TIME: self.ensight.objs.enums.POS_TIME,
- self.PT_NEG_TIME: self.ensight.objs.enums.NEG_TIME,
- self.PT_POS_NEG_TIME: self.ensight.objs.enums.POS_NEG_TIME,
- }
- idx = self.ensight.objs.enums.PART_PARTICLE_TRACE
- def_part: "ENS_PART_PARTICLE_TRACE" = self.ensight.objs.core.DEFAULTPARTS[idx]
- def_part.TYPE = self.ensight.objs.enums.STREAMLINE
- if pathlines is True:
- def_part.TYPE = self.ensight.objs.enums.PATHLINE
- current_timestep = self.ensight.objs.core.TIMESTEP
- self.ensight.objs.core.TIMESTEP = self.ensight.objs.core.TIMESTEP_LIMITS[0]
- if total_time:
- def_part.TOTALTIME = total_time
- if delta_time:
- def_part.DELTATIME = delta_time
- if emit_time: # pragma: no cover
- def_part.STARTTIME = emit_time
- def_part.DESCRIPTION = name
- def_part.VARIABLE = convert_variable(self.ensight, variable)
- def_part.SURFACERESTRICTED = False
- def_part.TRACEDIRECTION = direction_map.get(direction)
- if surface_restrict:
- def_part.SURFACERESTRICTED = True
- particle_trace_part: "ENS_PART_PARTICLE_TRACE" = def_part.createpart(
- sources=source_parts, name=name
- )[0]
- if current_timestep:
- self.ensight.objs.core.TIMESTEP = current_timestep
- return particle_trace_part
-
- def _add_emitters_to_particle_trace_part(
- self,
- particle_trace_part: "ENS_PART_PARTICLE_TRACE",
- new_emitters: List[Any],
- palette: Optional[str] = None,
- clean: Optional[bool] = False,
- ) -> "ENS_PART_PARTICLE_TRACE":
- """Private utility to add emitters to an existing particle trace part."""
- if isinstance(self.ensight, ModuleType): # pragma: no cover
- if clean: # pragma: no cover
- emitters = [] # pragma: no cover
- else: # pragma: no cover
- emitters = particle_trace_part.EMITTERS.copy() # pragma: no cover
- emitters.extend(new_emitters) # pragma: no cover
- particle_trace_part.EMITTERS = emitters # pragma: no cover
- else:
- if clean:
- self.ensight._session.cmd("enscl.emitters=[]", do_eval=False)
- else:
- self.ensight._session.cmd(
- f"enscl.emitters=ensight.objs.wrap_id({particle_trace_part.objid}).EMITTERS.copy()",
- do_eval=False,
- )
- text = "enscl.emitters.extend(["
- for emitter in new_emitters:
- text += emitter + ", "
- text = text[:-2]
- text += "])"
- self.ensight._session.cmd(text, do_eval=False)
- self.ensight._session.cmd(
- f"ensight.objs.wrap_id({particle_trace_part.objid}).setattr('EMITTERS', enscl.emitters.copy())"
- )
- self.ensight._session.cmd("del enscl.emitters", do_eval=False)
- if palette:
- particle_trace_part.COLORBYPALETTE = palette
- return particle_trace_part
-
- def _cure_particle_trace_part(
- self, particle_trace_part: Union[str, int, "ENS_PART_PARTICLE_TRACE"]
- ) -> "ENS_PART_PARTICLE_TRACE":
- """Private utility to cure an input particle trace part and convert it to an ``ENS_PART`"""
-
- # the add_emitter* functions were added in 2024 R2
- if not isinstance(self.ensight, ModuleType): # pragma: no cover
- self.ensight._session.ensight_version_check("2024 R2")
-
- _particle_trace_part: "ENS_PART_PARTICLE_TRACE"
- if isinstance(particle_trace_part, (str, int)): # pragma: no cover
- temp = self.ensight.objs.core.PARTS[particle_trace_part] # pragma: no cover
- if not temp: # pragma: no cover
- raise RuntimeError(
- "particle_trace_part input is not a valid part"
- ) # pragma: no cover
- _particle_trace_part = temp[0] # pragma: no cover
- else:
- _particle_trace_part = particle_trace_part
- return _particle_trace_part
-
- def _prepare_particle_creation(
- self,
- direction: Optional[str] = None,
- source_parts: Optional[List[Union[str, int, "ENS_PART"]]] = None,
- ) -> Tuple[str, List["ENS_PART"]]:
- """Private utility to set the direction if not provided, and to cure the list of source parts."""
-
- # the create_particle* functions were added in 2024 R2
- if not isinstance(self.ensight, ModuleType): # pragma: no cover
- self.ensight._session.ensight_version_check("2024 R2")
-
- if not direction:
- direction = self.PT_POS_TIME
- if source_parts: # pragma: no cover
- converted_source_parts = [convert_part(self.ensight, p) for p in source_parts]
- if not source_parts: # pragma: no cover
- converted_source_parts = self.ensight.objs.core.selection( # pragma: no cover
- name="ENS_PART"
- )
- if not converted_source_parts: # pragma: no cover
- raise RuntimeError("No part selected for particle trace generation") # pragma: no cover
- return direction, converted_source_parts
-
- def _find_palette(self, color_by: Optional[Union[str, int, "ENS_VAR"]] = None) -> Optional[str]:
- """Private utility to find the description of the input color_by variable"""
- palette: Optional[str] = None
- if color_by:
- try:
- _color_by_var: List["ENS_VAR"] = self.ensight.objs.core.VARIABLES.find(
- [convert_variable(self.ensight, color_by)], attr="ID"
- )
- if _color_by_var:
- palette = _color_by_var[0].DESCRIPTION
- except Exception:
- raise RuntimeError(
- "The variable supplied to color the particle trace by does not exist"
- )
- return palette
-
- def create_particle_trace_from_points(
- self,
- name: str,
- variable: Union[str, int, "ENS_VAR"],
- points: List[List[float]],
- direction: Optional[str] = None,
- pathlines: Optional[bool] = False,
- source_parts: Optional[List[Union[str, int, "ENS_PART"]]] = None,
- emit_time: Optional[float] = None,
- total_time: Optional[float] = None,
- delta_time: Optional[float] = None,
- color_by: Optional[Union[str, int, "ENS_VAR"]] = None,
- ) -> "ENS_PART_PARTICLE_TRACE":
- """
- Create a particle trace part from a list o points.
- Returns the ``ENS_PART`` generated.
-
- Parameters
- ----------
-
- name: str
- The name of part to be generated
- variable:
- The variable to compute the particle traces with.
- It can be the name, the ID or the ``ENS_VAR`` object. It must be a vector variable.
- direction: str
- The direction for the particle traces to be generated.
- This table describes the options:
-
- ================== ==============================================
- Name Query type
- ================== ==============================================
- PT_POS_TIME Follow the vector direction
- PT_NEG_TIME Go contrary to the vector direction
- PT_POS_NEG_TIME Follow and go contrary to the vector direction
- ================== ==============================================
-
- If not provided, it will default to ``PT_POS_TIME``
- pathlines: bool
- True if the particle traces need to be pathlines
- points: list
- List of coordinates for the seed points.
- source_parts: list
- A list of parts to create the particle trace in. For instance, in a CFD
- simulation this might be the fluid zone.
- If not provided, the function will try to look for the selected parts.
- emit_time: float
- The emission time to start the particle trace from. If not provided,
- it will use the current time.
- total_time: float
- The total emission time. If not provided, EnSight will provide the end time
- for a transient simulation, an internal best time for steady state simulations.
- delta_time: float
- The interval for the emissions. If not provided, EnSight will provide
- a best estimate.
- color_by
- The optional variable to color the particle trace by.
- It can be the name, the ID or the ``ENS_VAR`` object.
-
- Examples
- --------
- >>> s = LocalLauncher().start()
- >>> cas_file = s.download_pyansys_example("mixing_elbow.cas.h5","pyfluent/mixing_elbow")
- >>> dat_file = s.download_pyansys_example("mixing_elbow.dat.h5","pyfluent/mixing_elbow")
- >>> s.load_data(cas_file, result_file=dat_file)
- >>> s.ensight.utils.parts.create_particle_trace_from_points("mytraces", "Velocity", points=[[-0.02,-0.123,0.01576],[0.109876,-0.123,0.0123]], source_parts=parts.select_parts_by_dimension(3))
- """
- emitter_type = self._EMIT_POINT
- direction, converted_source_parts = self._prepare_particle_creation(
- direction=direction, source_parts=source_parts
- )
- particle_trace_part = self._create_particle_trace_part(
- name,
- variable,
- direction,
- converted_source_parts,
- pathlines=pathlines,
- emit_time=emit_time,
- delta_time=delta_time,
- total_time=total_time,
- )
- new_emitters = self._create_emitters(emitter_type=emitter_type, points=points)
- palette = self._find_palette(color_by=color_by)
- return self._add_emitters_to_particle_trace_part(
- particle_trace_part, new_emitters=new_emitters, palette=palette, clean=True
- )
-
- def create_particle_trace_from_line(
- self,
- name: str,
- variable: Union[str, int, "ENS_VAR"],
- point1: List[float],
- point2: List[float],
- num_points: Optional[int] = 100,
- direction: Optional[str] = None,
- pathlines: Optional[bool] = False,
- source_parts: Optional[List[Union[str, int, "ENS_PART"]]] = None,
- emit_time: Optional[float] = None,
- total_time: Optional[float] = None,
- delta_time: Optional[float] = None,
- color_by: Optional[Union[str, int, "ENS_VAR"]] = None,
- ) -> "ENS_PART_PARTICLE_TRACE":
- """
- Create a particle trace part from a line.
- Returns the ``ENS_PART`` generated.
-
- Parameters
- ----------
-
- name: str
- The name of part to be generated
- variable:
- The variable to compute the particle traces with.
- It can be the name, the ID or the ``ENS_VAR`` object. It must be a vector variable.
- direction: str
- The direction for the particle traces to be generated.
- This table describes the options:
-
- ================== ==============================================
- Name Query type
- ================== ==============================================
- PT_POS_TIME Follow the vector direction
- PT_NEG_TIME Go contrary to the vector direction
- PT_POS_NEG_TIME Follow and go contrary to the vector direction
- ================== ==============================================
-
- If not provided, it will default to ``PT_POS_TIME``
- pathlines: bool
- True if the particle traces need to be pathlines
- point1: list
- List of coordinates for point 1.
- point2: list
- List of coordinates for point 2.
- source_parts: list
- A list of parts to create the particle trace in. For instance, in a CFD
- simulation this might be the fluid zone.
- If not provided, the function will try to look for the selected parts.
- num_points: int
- The number of points to emit from. Defaults to 100.
- emit_time: float
- The emission time to start the particle trace from. If not provided,
- it will use the current time.
- total_time: float
- The total emission time. If not provided, EnSight will provide the end time
- for a transient simulation, an internal best time for steady state simulations.
- delta_time: float
- The interval for the emissions. If not provided, EnSight will provide
- a best estimate.
- color_by
- The optional variable to color the particle trace by.
- It can be the name, the ID or the ``ENS_VAR`` object.
-
- Examples
- --------
- >>> s = LocalLauncher().start()
- >>> cas_file = s.download_pyansys_example("mixing_elbow.cas.h5","pyfluent/mixing_elbow")
- >>> dat_file = s.download_pyansys_example("mixing_elbow.dat.h5","pyfluent/mixing_elbow")
- >>> s.load_data(cas_file, result_file=dat_file)
- >>> parts = s.ensight.utils.parts
- >>> parts.create_particle_trace_from_line("mytraces", "Velocity", point1=[-0.02,-0.123,0.01576], point2=[0.109876,-0.123,0.0123], num_points=10, source_parts=parts.select_parts_by_dimension(3))
- """
- emitter_type = self._EMIT_LINE
- direction, converted_source_parts = self._prepare_particle_creation(
- direction=direction, source_parts=source_parts
- )
- particle_trace_part = self._create_particle_trace_part(
- name,
- variable,
- direction,
- converted_source_parts,
- pathlines=pathlines,
- emit_time=emit_time,
- delta_time=delta_time,
- total_time=total_time,
- )
- new_emitters = self._create_emitters(
- emitter_type=emitter_type, point1=point1, point2=point2, num_points=num_points
- )
- palette = self._find_palette(color_by=color_by)
- return self._add_emitters_to_particle_trace_part(
- particle_trace_part, new_emitters=new_emitters, palette=palette, clean=True
- )
-
- def create_particle_trace_from_plane(
- self,
- name: str,
- variable: Union[str, int, "ENS_VAR"],
- point1: List[float],
- point2: List[float],
- point3: List[float],
- num_points_x: Optional[int] = 25,
- num_points_y: Optional[int] = 25,
- direction: Optional[str] = None,
- pathlines: Optional[bool] = False,
- source_parts: Optional[List[Union[str, int, "ENS_PART"]]] = None,
- emit_time: Optional[float] = None,
- total_time: Optional[float] = None,
- delta_time: Optional[float] = None,
- color_by: Optional[Union[str, int, "ENS_VAR"]] = None,
- ) -> "ENS_PART_PARTICLE_TRACE":
- """
- Create a particle trace part from a plane.
- Returns the ``ENS_PART`` generated.
-
- Parameters
- ----------
-
- name: str
- The name of part to be generated
- variable:
- The variable to compute the particle traces with.
- It can be the name, the ID or the ``ENS_VAR`` object. It must be a vector variable.
- direction: str
- The direction for the particle traces to be generated.
- This table describes the options:
-
- ================== ==============================================
- Name Query type
- ================== ==============================================
- PT_POS_TIME Follow the vector direction
- PT_NEG_TIME Go contrary to the vector direction
- PT_POS_NEG_TIME Follow and go contrary to the vector direction
- ================== ==============================================
-
- If not provided, it will default to ``PT_POS_TIME``
- pathlines: bool
- True if the particle traces need to be pathlines
- point1: list
- List of coordinates for point 1, being a corner of the plane.
- point2: list
- List of coordinates for point 2, being a corner of the plane.
- point3: list
- List of coordinates for point 3, being a corner of the plane.
- source_parts: list
- A list of parts to create the particle trace in. For instance, in a CFD
- simulation this might be the fluid zone.
- If not provided, the function will try to look for the selected parts.
- num_points_x: int
- The number of points on the ``X`` direction of the emission plane.
- Defaults to 25.
- num_points_y: int
- The number of points on the ``Y`` direction of the emission plane.
- Defaults to 25.
- emit_time: float
- The emission time to start the particle trace from. If not provided,
- it will use the current time.
- total_time: float
- The total emission time. If not provided, EnSight will provide the end time
- for a transient simulation, an internal best time for steady state simulations.
- delta_time: float
- The interval for the emissions. If not provided, EnSight will provide
- a best estimate.
- color_by
- The optional variable to color the particle trace by.
- It can be the name, the ID or the ``ENS_VAR`` object.
-
- Examples
- --------
- >>> s = LocalLauncher().start()
- >>> cas_file = s.download_pyansys_example("mixing_elbow.cas.h5","pyfluent/mixing_elbow")
- >>> dat_file = s.download_pyansys_example("mixing_elbow.dat.h5","pyfluent/mixing_elbow")
- >>> s.load_data(cas_file, result_file=dat_file)
- >>> parts = s.ensight.utils.parts
- >>> parts.create_particle_trace_from_plane("mytraces", "Velocity", point1=[-0.02,-0.123,0.01576], point2=[0.109876,-0.123,0.0123], point3=[0.1, 0, 0.05] ,num_points_x=10, num_points_y=10, source_parts=parts.select_parts_by_dimension(3))
- """
- emitter_type = self._EMIT_PLANE
- direction, converted_source_parts = self._prepare_particle_creation(
- direction=direction, source_parts=source_parts
- )
- particle_trace_part = self._create_particle_trace_part(
- name,
- variable,
- direction,
- converted_source_parts,
- pathlines=pathlines,
- emit_time=emit_time,
- delta_time=delta_time,
- total_time=total_time,
- )
- new_emitters = self._create_emitters(
- emitter_type=emitter_type,
- point1=point1,
- point2=point2,
- point3=point3,
- num_points_x=num_points_x,
- num_points_y=num_points_y,
- )
- palette = self._find_palette(color_by=color_by)
- return self._add_emitters_to_particle_trace_part(
- particle_trace_part, new_emitters=new_emitters, palette=palette, clean=True
- )
-
- def create_particle_trace_from_parts(
- self,
- name: str,
- variable: Union[str, int, "ENS_VAR"],
- parts: List[Union[str, int, "ENS_PART"]],
- part_distribution_type: Optional[int] = 0,
- num_points: Optional[int] = 100,
- direction: Optional[str] = None,
- pathlines: Optional[bool] = False,
- source_parts: Optional[List[Union[str, int, "ENS_PART"]]] = None,
- emit_time: Optional[float] = None,
- total_time: Optional[float] = None,
- delta_time: Optional[float] = None,
- color_by: Optional[Union[str, int, "ENS_VAR"]] = None,
- surface_restrict: Optional[bool] = False,
- ) -> "ENS_PART_PARTICLE_TRACE":
- """
- Create a particle trace part from a list of seed parts.
- Returns the ``ENS_PART`` generated.
-
- Parameters
- ----------
-
- name: str
- The name of part to be generated
- variable:
- The variable to compute the particle traces with.
- It can be the name, the ID or the ``ENS_VAR`` object. It must be a vector variable.
- direction: str
- The direction for the particle traces to be generated.
- This table describes the options:
-
- ================== ==============================================
- Name Query type
- ================== ==============================================
- PT_POS_TIME Follow the vector direction
- PT_NEG_TIME Go contrary to the vector direction
- PT_POS_NEG_TIME Follow and go contrary to the vector direction
- ================== ==============================================
-
- If not provided, it will default to ``PT_POS_TIME``
- pathlines: bool
- True if the particle traces need to be pathlines
- source_parts: list
- A list of parts to create the particle trace in. For instance, in a CFD
- simulation this might be the fluid zone.
- If not provided, the function will try to look for the selected parts.
- parts: list
- A list of parts to emit the particle traces from.
- They can be their names, their IDs or the respective ``ENS_PART`` objects.
- part_distribution_type: int
- The distribution of emitters in case of emission from a part.
- This table describes the options:
-
- ==================== =================================================
- Name Query type
- ==================== =================================================
- PART_EMIT_FROM_NODES Emit from the nodes of the part
- PART_EMIT_FROM_AREA Create an area of equidistant points for emission
- ==================== =================================================
-
- If not provided, it will default to ``PART_EMIT_FROM_NODES``
- num_points: int
- The number of points to emit from.
- Defaults to 100.
- emit_time: float
- The emission time to start the particle trace from. If not provided,
- it will use the current time.
- total_time: float
- The total emission time. If not provided, EnSight will provide the end time
- for a transient simulation, an internal best time for steady state simulations.
- delta_time: float
- The interval for the emissions. If not provided, EnSight will provide
- a best estimate.
- color_by
- The optional variable to color the particle trace by.
- It can be the name, the ID or the ``ENS_VAR`` object.
- surface_restrict: bool
- True if the particle trace needs to be restricted to the input parts.
- Defaults to False. The flag will be applied to any additional emitter
- appended to the particle trace created.
-
- Examples
- --------
- >>> s = LocalLauncher().start()
- >>> cas_file = s.download_pyansys_example("mixing_elbow.cas.h5","pyfluent/mixing_elbow")
- >>> dat_file = s.download_pyansys_example("mixing_elbow.dat.h5","pyfluent/mixing_elbow")
- >>> s.load_data(cas_file, result_file=dat_file)
- >>> parts = s.ensight.utils.parts
- >>> parts.create_particle_trace_from_parts("mytraces", "Velocity", parts=["hot-inlet", "cold-inlet"], num_points=100 source_parts=parts.select_parts_by_dimension(3))
- """
- emitter_type = self._EMIT_PART
- direction, converted_source_parts = self._prepare_particle_creation(
- direction=direction, source_parts=source_parts
- )
- particle_trace_part = self._create_particle_trace_part(
- name,
- variable,
- direction,
- converted_source_parts,
- pathlines=pathlines,
- emit_time=emit_time,
- delta_time=delta_time,
- total_time=total_time,
- surface_restrict=surface_restrict,
- )
- new_parts = [convert_part(self.ensight, p) for p in parts]
- new_emitters = self._create_emitters(
- emitter_type=emitter_type,
- parts=new_parts,
- part_distribution_type=part_distribution_type,
- num_points=num_points,
- )
- palette = self._find_palette(color_by=color_by)
- return self._add_emitters_to_particle_trace_part(
- particle_trace_part, new_emitters=new_emitters, palette=palette, clean=True
- )
-
- def add_emitter_points_to_particle_trace_part(
- self,
- particle_trace_part: Union[str, int, "ENS_PART"],
- points: List[List[float]],
- ) -> "ENS_PART_PARTICLE_TRACE":
- """
- Add point emitters to an existing particle trace. The function will return the updated
- ``ENS_PART`` object.
-
- Parameters
- ----------
-
- particle_trace_part:
- The particle trace part to be added emitters to.
- Can be the name, the ID or the ``ENS_PART`` object
- points: list
- List of list containing the coordinates for the seed points.
-
- Examples
- --------
- >>> s = LocalLauncher().start()
- >>> cas_file = s.download_pyansys_example("mixing_elbow.cas.h5","pyfluent/mixing_elbow")
- >>> dat_file = s.download_pyansys_example("mixing_elbow.dat.h5","pyfluent/mixing_elbow")
- >>> s.load_data(cas_file, result_file=dat_file)
- >>> p = s.ensight.utils.parts.create_particle_trace_from_points("mytraces", "Velocity", points=[[-0.02, -0.123, 0.01576]], source_parts=parts.select_parts_by_dimension(3))
- >>> p = s.ensight.utils.parts.add_emitter_points_to_particle_trace_part(p, points=[[0.109876, -0.123, 0.0123]])
- """
- emitter_type = self._EMIT_POINT
- particle_trace_part = self._cure_particle_trace_part(particle_trace_part)
- new_emitters = self._create_emitters(emitter_type=emitter_type, points=points)
- return self._add_emitters_to_particle_trace_part(particle_trace_part, new_emitters)
-
- def add_emitter_line_to_particle_trace_part(
- self,
- particle_trace_part: Union[str, int, "ENS_PART"],
- point1: List[float],
- point2: List[float],
- num_points: Optional[int] = 100,
- ) -> "ENS_PART_PARTICLE_TRACE":
- """
- Add a line emitter to an existing particle trace. The function will return the updated
- ``ENS_PART`` object.
-
- Parameters
- ----------
-
- particle_trace_part:
- The particle trace part to be added emitters to.
- Can be the name, the ID or the ``ENS_PART`` object.
- point1: list
- The coordinates for point 1.
- point2: list
- The coordinates for point 2.
- num_points: int
- The number of seed points. Defaults to 100.
-
- Examples
- --------
- >>> s = LocalLauncher().start()
- >>> cas_file = s.download_pyansys_example("mixing_elbow.cas.h5","pyfluent/mixing_elbow")
- >>> dat_file = s.download_pyansys_example("mixing_elbow.dat.h5","pyfluent/mixing_elbow")
- >>> s.load_data(cas_file, result_file=dat_file)
- >>> p = s.ensight.utils.parts.create_particle_trace_from_points("mytraces", "Velocity", points=[[-0.02,-0.123,0.01576]], source_parts=parts.select_parts_by_dimension(3))
- >>> p = s.ensight.utils.parts.add_emitter_line_to_particle_trace_part(p, point1=[-0.02, -0.123, 0.01576], point2=[0.109876, -0.123, 0.0123], num_points=10)
- """
- emitter_type = self._EMIT_LINE
- particle_trace_part = self._cure_particle_trace_part(particle_trace_part)
- new_emitters = self._create_emitters(
- emitter_type=emitter_type, point1=point1, point2=point2, num_points=num_points
- )
- return self._add_emitters_to_particle_trace_part(particle_trace_part, new_emitters)
-
- def add_emitter_plane_to_particle_trace_part(
- self,
- particle_trace_part: Union[str, int, "ENS_PART"],
- point1: List[float],
- point2: List[float],
- point3: List[float],
- num_points_x: Optional[int] = 25,
- num_points_y: Optional[int] = 25,
- ) -> "ENS_PART_PARTICLE_TRACE":
- """
- Add a plane emitter to an existing particle trace. The function will return the updated
- ``ENS_PART`` object.
-
- Parameters
- ----------
-
- particle_trace_part:
- The particle trace part to be added emitters to.
- Can be the name, the ID or the ``ENS_PART`` object.
- point1: list
- The coordinates for point 1, being a corner of the plane.
- point2: list
- The coordinates for point 2, being a corner of the plane.
- point3: list
- The coordinates for point 3, being a corner of the plane.
- num_points_x: int
- The number of points on the ``X`` direction of the emission plane.
- Defaults to 25.
- num_points_y: int
- The number of points on the ``Y`` direction of the emission plane.
- Defaults to 25.
-
- Examples
- --------
- >>> s = LocalLauncher().start()
- >>> cas_file = s.download_pyansys_example("mixing_elbow.cas.h5","pyfluent/mixing_elbow")
- >>> dat_file = s.download_pyansys_example("mixing_elbow.dat.h5","pyfluent/mixing_elbow")
- >>> s.load_data(cas_file, result_file=dat_file)
- >>> p = s.ensight.utils.parts.create_particle_trace_from_points("mytraces", "Velocity", points=[[-0.02,-0.123,0.01576]], source_parts=parts.select_parts_by_dimension(3))
- >>> p = s.ensight.utils.parts.add_emitter_plane_to_particle_trace_part(p, point1=[-0.02, -0.123, 0.01576], point2=[0.109876, -0.123, 0.0123], point3=[0.1, 0, 0.05], num_points_x=10, num_points_y=10)
- """
- emitter_type = self._EMIT_PLANE
- particle_trace_part = self._cure_particle_trace_part(particle_trace_part)
- new_emitters = self._create_emitters(
- emitter_type=emitter_type,
- point1=point1,
- point2=point2,
- point3=point3,
- num_points_x=num_points_x,
- num_points_y=num_points_y,
- )
- return self._add_emitters_to_particle_trace_part(particle_trace_part, new_emitters)
-
- def add_emitter_parts_to_particle_trace_part(
- self,
- particle_trace_part: Union[str, int, "ENS_PART"],
- parts: List[Union[str, int, "ENS_PART"]],
- part_distribution_type: Optional[int] = 0,
- num_points: Optional[int] = 100,
- ) -> "ENS_PART_PARTICLE_TRACE":
- """
- Add a list of part emitters to an existing particle trace. The function will return the updated
- ``ENS_PART`` object.
-
- Parameters
- ----------
-
- particle_trace_part:
- The particle trace part to be added emitters to.
- Can be the name, the ID or the ``ENS_PART`` object.
- parts: list
- A list of parts to emit the particle traces from.
- They can be their names, their IDs or the respective ``ENS_PART`` objects.
- part_distribution_type: int
- The distribution of emitters in case of emission from a part.
- This table describes the options:
-
- ==================== =================================================
- Name Query type
- ==================== =================================================
- PART_EMIT_FROM_NODES Emit from the nodes of the part
- PART_EMIT_FROM_AREA Create an area of equidistant points for emission
- ==================== =================================================
-
- If not provided, it will default to ``PART_EMIT_FROM_NODES``
- num_points: int
- The number of points to emit from.
- Defaults to 100.
-
- Examples
- --------
- >>> s = LocalLauncher().start()
- >>> cas_file = s.download_pyansys_example("mixing_elbow.cas.h5","pyfluent/mixing_elbow")
- >>> dat_file = s.download_pyansys_example("mixing_elbow.dat.h5","pyfluent/mixing_elbow")
- >>> s.load_data(cas_file, result_file=dat_file)
- >>> p = s.ensight.utils.parts.create_particle_trace_from_points("mytraces", "Velocity", points=[[-0.02, -0.123, 0.01576]], source_parts=parts.select_parts_by_dimension(3))
- >>> p = s.ensight.utils.parts.add_emitter_parts_to_particle_trace_part(p, parts=["cold-inlet", "hot-inlet"], num_points=25)
- """
- emitter_type = self._EMIT_PART
- particle_trace_part = self._cure_particle_trace_part(particle_trace_part)
- new_parts = [convert_part(self.ensight, p) for p in parts]
- new_emitters = self._create_emitters(
- emitter_type=emitter_type,
- parts=new_parts,
- part_distribution_type=part_distribution_type,
- num_points=num_points,
- )
- return self._add_emitters_to_particle_trace_part(particle_trace_part, new_emitters)
-
- def select_parts(
- self,
- p_list: Optional[List[Union[str, int, "ENS_PART"]]] = None,
- rec_flag: Optional[bool] = True,
- ) -> Optional[List["ENS_PART"]]:
- """
- Select the parts string, or int, or ensight.objs.ENS_PART, or list
- and record the selection (by default) honoring the
- EnSight preference to record command language by part id or by name.
- It creates a list of part objects and selects the parts, and records the
- selection by default.
-
- Parameters
- ----------
- p_list: list
- The list of part objects to compute the forces on. It can either be a list of names
- a list of IDs (integers or strings) or directly a list of ENS_PART objects
- rec_flag: bool
- True if the selection needs to be recorded
-
- Returns
- -------
- list
- A list of part objects selected or None if error.
-
-
- NOTE: If you do not want a measured part in your
- selection, then don't include it in the list
- e.g. if
- core.PARTS[0].PARTTYPE == ensight.objs.enums.PART_DISCRETE_PARTICLE == 3
- then it is a measured part
- """
- #
- pobj_list = self.get_part_id_obj_name(p_list, "obj")
-
- if not pobj_list:
- raise RuntimeError("Error, select_parts: part list is empty")
- else:
- # This was formerly used to record command lang 10.1.6(c)
- # using part ids:
- # ensight.part.select_begin(pid_list,record=1)
- # Now records selection, honoring the the preference
- # part selection of by part id or by name (2024R1)
- record = 1 if rec_flag else 0
- self.ensight.objs.core.selection(name="ENS_PART").addchild(
- pobj_list, replace=1, record=record
- )
- # This is essential to synchronize cmd lang with the GUI, C++
- self.ensight.part.get_mainpartlist_select()
-
- return pobj_list
-
- def get_part_id_obj_name(
- self,
- plist: Optional[Union[str, int, "ENS_PART", List[str], List[int], List["ENS_PART"]]] = None,
- ret_flag="id",
- ) -> Union[Optional[List[int]], Optional[List[str]], Optional[List["ENS_PART"]]]:
- """
- Input a part or a list of parts and return an id, object, or name
- or a list of ids, objects, or names.
-
- Parameters
- ----------
- p_list: list
- The list of part objects to compute the forces on. It can either be a list of names
- a list of IDs (integers or strings) or directly a list of ENS_PART objects
-
- ret_flag: str
- A string that determines what is returned
-
- Returns
- -------
- list
- Either a list of part IDs, or a list of names or a list of ENS_PART objects
- depending on the requested ret_flag value
- """
- # To not change the interface I didn't move ret_flag to be a required argument,
- # so I need to check its value now
- if not ret_flag:
- return None
- if not plist:
- plist = [p for p in self.ensight.objs.core.PARTS]
- pobj_list: List["ENS_PART"] = []
- #
- # Basically figure out what plist is, then convert it to a list of ENS_PARTs
- #
- if (
- isinstance(plist, self.ensight.objs.ENS_PART)
- or isinstance(plist, int)
- or isinstance(plist, str)
- ):
- p_list = [plist]
- elif isinstance(plist, list) or isinstance(plist, ensobjlist):
- p_list = [p for p in plist]
- else: # pragma: no cover
- raise RuntimeError( # pragma: no cover
- "Unknown type of input var plist {}".format(type(plist))
- )
- #
- # p_list must now be a list
- #
-
- if not p_list:
- return None
- if not isinstance(p_list[0], (str, int, self.ensight.objs.ENS_PART)): # pragma: no cover
- error = "First member is neither ENS_PART, int, nor string" # pragma: no cover
- error += f"{p_list[0]} type = {type(p_list[0])}; aborting" # pragma: no cover
- raise RuntimeError(error) # pragma: no cover
- pobjs: List["ENS_PART"]
- if isinstance(p_list[0], int):
- # list of ints must be part ids
- for pid in p_list:
- pobjs = [p for p in self.ensight.objs.core.PARTS if p.PARTNUMBER == pid]
- for prt in pobjs:
- pobj_list.append(prt)
- elif isinstance(p_list[0], str):
- if not p_list[0].isdigit():
- for pname in p_list:
- pobjs = [p for p in self.ensight.objs.core.PARTS if p.DESCRIPTION == pname]
- for prt in pobjs:
- pobj_list.append(prt)
- else: # digits, must be a string list of part ids?
- for pid_str in p_list:
- pobjs = [
- p for p in self.ensight.objs.core.PARTS if p.PARTNUMBER == int(pid_str)
- ]
- for prt in pobjs:
- pobj_list.append(prt)
- else:
- for prt in p_list:
- pobj_list.append(prt)
- if ret_flag == "name":
- val_strings = [str(p.DESCRIPTION) for p in pobj_list]
- return val_strings
- if ret_flag == "obj":
- val_objs = [p for p in pobj_list]
- return val_objs
- val_ints = [int(p.PARTNUMBER) for p in pobj_list]
- return val_ints
+"""Parts module.
+
+This module allows PyEnSight to control the parts in the EnSight session.
+
+Example for selecting all 3D parts:
+
+(PyEnSight)
+>>> from ansys.pyensight.core import LocalLauncher
+>>> session = LocalLauncher().start()
+>>> parts = session.ensight.utils.parts
+>>> parts.select_by_dimension(3)
+
+(EnSight)
+>>> from ensight.utils import parts
+>>> parts.select_by_dimension(3)
+
+"""
+from types import ModuleType
+from typing import TYPE_CHECKING, Any, Dict, List, Optional, Tuple, Union
+
+try:
+ import ensight
+ from ensight.objs import ens_emitterobj, ensobjlist # type: ignore
+except ImportError:
+ from ansys.api.pyensight.ens_emitterobj import ens_emitterobj
+ from ansys.pyensight.core.listobj import ensobjlist
+
+if TYPE_CHECKING:
+ from ansys.api.pyensight import ensight_api
+ from ansys.api.pyensight.ens_part import ENS_PART
+ from ansys.api.pyensight.ens_part_particle_trace import ENS_PART_PARTICLE_TRACE
+ from ansys.api.pyensight.ens_var import ENS_VAR
+
+
+def convert_part(
+ _ensight: Union["ensight_api.ensight", "ensight"], part: Union[str, int, "ENS_PART"]
+):
+ if isinstance(part, str):
+ return _ensight.objs.core.PARTS[part][0].PARTNUMBER
+ elif isinstance(part, int):
+ return part
+ elif hasattr(part, "PARTNUMBER"):
+ return part.PARTNUMBER
+
+
+def convert_variable(
+ _ensight: Union["ensight_api.ensight", "ensight"], var: Union[str, int, "ENS_VAR"]
+) -> Optional[int]:
+ if isinstance(var, str):
+ return int(_ensight.objs.core.VARIABLES[var][0].ID)
+ elif isinstance(var, int):
+ return var
+ elif hasattr(var, "ID"):
+ return int(var.ID)
+ return None # pragma: no cover
+
+
+class Parts:
+ """Controls the parts in the current EnSight ``Session`` instance."""
+
+ class _EnSEmitterPoint(ens_emitterobj): # pragma: no cover
+ def __init__( # pragma: no cover
+ self,
+ ensight: "ensight",
+ point1: Optional[List[float]] = [0, 0, 0],
+ ): # pragma: no cover
+ if not isinstance(ensight, ModuleType):
+ raise RuntimeError(
+ "The class cannot be used directly in PyEnSight. It should not be used directly even in EnSight"
+ )
+ super().__init__(ensight.objs.EMIT_CURSOR)
+ self.ensight = ensight
+ self.ensight.view_transf.cursor(*point1)
+ self.CENTROID = point1
+
+ class _EnSEmitterGrid(ens_emitterobj): # pragma: no cover
+ def __init__( # pragma: no cover
+ self,
+ ensight: "ensight",
+ point1: Optional[List[float]] = [0, 0, 0],
+ point2: Optional[List[float]] = [0, 0, 0],
+ point3: Optional[List[float]] = [0, 0, 0],
+ point4: Optional[List[float]] = [0, 0, 0],
+ num_points_x: Optional[int] = 25,
+ num_points_y: Optional[int] = 25,
+ ): # pragma: no cover
+ if not isinstance(ensight, ModuleType):
+ raise RuntimeError(
+ "The class cannot be used directly in PyEnSight. It should not be used directly even in EnSight"
+ )
+ super().__init__(ensight.objs.EMIT_PLANE)
+ self.ensight = ensight
+ self.ensight.view_transf.plane(1, *point1)
+ self.ensight.view_transf.plane(2, *point2)
+ self.ensight.view_transf.plane(3, *point3)
+ self.POINT1 = point1
+ self.POINT2 = point2
+ self.POINT3 = point3
+ self.POINT4 = point4
+ self.NUM_POINTS_X = num_points_x
+ self.NUM_POINTS_Y = num_points_y
+
+ class _EnSEmitterLine(ens_emitterobj): # pragma: no cover
+ def __init__( # pragma: no cover
+ self,
+ ensight: "ensight",
+ point1: Optional[List[float]] = [0, 0, 0],
+ point2: Optional[List[float]] = [0, 0, 0],
+ num_points: Optional[int] = 100,
+ ): # pragma: no cover
+ if not isinstance(ensight, ModuleType):
+ raise RuntimeError(
+ "The class cannot be used directly in PyEnSight. It should not be used directly even in EnSight"
+ )
+ super().__init__(ensight.objs.EMIT_LINE)
+ self.ensight = ensight
+ self.ensight.view_transf.line(1, *point1)
+ self.ensight.view_transf.line(2, *point2)
+ self.POINT1 = point1
+ self.POINT2 = point2
+ self.NUM_POINTS = num_points
+
+ class _EnSEmitterPart(ens_emitterobj): # pragma: no cover
+ def __init__( # pragma: no cover
+ self,
+ ensight: "ensight",
+ part: Optional[Any] = None,
+ part_kind: Optional[Any] = 0,
+ num_points: Optional[int] = 100,
+ ): # pragma: no cover
+ if not isinstance(ensight, ModuleType):
+ raise RuntimeError(
+ "The class cannot be used directly in PyEnSight. It should not be used directly even in EnSight"
+ )
+ super().__init__(ensight.objs.EMIT_PART)
+ self.ensight = ensight
+ if not part:
+ raise RuntimeError("part is a required input")
+ self.PART = convert_part(self.ensight, part)
+ self.NUM_POINTS = num_points
+ self.DISTRIB_TYPE = part_kind
+
+ def __init__(self, ensight: Union["ensight_api.ensight", "ensight"]):
+ self.ensight = ensight
+
+ def select_parts_by_dimension(self, dimension: int) -> ensobjlist["ENS_PART"]:
+ """Select parts by the input dimension and return the parts found.
+
+ Parameters
+ ----------
+ dimension : int
+ Dimension for selecting parts.
+
+ Returns
+ -------
+ ensobjlist["ENS_PART"]
+ found (ensobjlist): List of parts found.
+
+ """
+ parts = self.ensight.objs.core.PARTS
+ parts.set_attr("SELECTED", False)
+ found = parts.find(True, f"HAS{dimension}DELEMENTS")
+ found.set_attr("SELECTED", True)
+ return found
+
+ def select_parts_invert(self) -> ensobjlist["ENS_PART"]:
+ """Select parts currently not selected, deselecting the previously selected parts.
+
+ Returns
+ -------
+ ensobjlist["ENS_PART"]
+ Updated list of parts selected.
+
+ """
+ self.ensight.part.select_invert()
+ parts = self.ensight.objs.core.PARTS
+ return parts.find(True, "SELECTED")
+
+ def select_parts_by_tag(
+ self,
+ tag: Optional[str] = None,
+ value: Optional[str] = None,
+ tagdict: Optional[Dict[str, str]] = None,
+ ) -> ensobjlist["ENS_PART"]:
+ """Select parts by the input dimension and return the parts found.
+
+ Parameters
+ ----------
+ tag : str, optional
+ Tag for finding the parts.
+ value : str, optional
+ Value for finding the parts.
+ tagdict : dict, optional
+ Dictionary containing the key and value pairs for finding
+ the parts. Only the parts that have all the keys and corresponding
+ values are returned. If a value for this parameter is supplied, it
+ takes precedence over the values supplied for the ``tag`` and
+ ``value`` parameters.
+
+ Returns
+ -------
+ ensobjlist["ENS_PART"]
+ List of parts found. If no arguments are given, all parts are returned.
+
+ """
+ parts = self.ensight.objs.core.PARTS
+ metadata = {p: p.METADATA for p in parts}
+ found = ensobjlist()
+ if not tag and not value and not tagdict:
+ self.ensight.part.select_all()
+ return parts
+ if not tagdict:
+ if tag and value:
+ found = ensobjlist([p for p, met in metadata.items() if met.get(tag) == value])
+ elif value and not tag:
+ found = ensobjlist([p for p, met in metadata.items() if value in met.values()])
+ elif tag and not value: # pragma: no cover
+ found = ensobjlist([p for p, met in metadata.items() if tag in met.keys()])
+ else:
+ found = ensobjlist(
+ [
+ p
+ for p, met in metadata.items()
+ if all(met.get(k) == v for k, v in tagdict.items())
+ ]
+ )
+ if found:
+ found.set_attr("SELECTED", True)
+ return found
+
+ _EMIT_POINT: int = 0
+ _EMIT_LINE: int = 1
+ _EMIT_PLANE: int = 2
+ _EMIT_PART: int = 3
+ PT_POS_TIME: str = "+"
+ PT_NEG_TIME: str = "-"
+ PT_POS_NEG_TIME: str = "+/-"
+ PART_EMIT_FROM_NODES: int = 0
+ PART_EMIT_FROM_AREA: int = 1
+
+ def _create_emitters(
+ self,
+ emitter_type: int,
+ points: Optional[List[List[float]]] = None,
+ point1: Optional[List[float]] = None,
+ point2: Optional[List[float]] = None,
+ point3: Optional[List[float]] = None,
+ parts: Optional[List["ENS_PART"]] = None,
+ part_distribution_type: Optional[int] = 0,
+ num_points: Optional[int] = 100,
+ num_points_x: Optional[int] = 25,
+ num_points_y: Optional[int] = 25,
+ ) -> List[Any]:
+ """Private routine to create emitter objects"""
+ new_emitters: List[Any] = []
+ if emitter_type == self._EMIT_POINT:
+ if not points: # pragma: no cover
+ raise RuntimeError(
+ "list of points needed if particle trace emitted from points"
+ ) # pragma: no cover
+ for p in points:
+ if isinstance(self.ensight, ModuleType): # pragma: no cover
+ new_emitters.append(
+ self._EnSEmitterPoint(self.ensight, point1=p)
+ ) # pragma: no cover
+ else:
+ new_emitters.append(
+ f"ensight.utils.parts._EnSEmitterPoint(ensight, point1={p})"
+ )
+ elif emitter_type == self._EMIT_LINE:
+ if not any([point1, point2]):
+ raise RuntimeError("point1 and point2 needed if particle trace emitted from line")
+ if isinstance(self.ensight, ModuleType): # pragma: no cover
+ new_emitters.append( # pragma: no cover
+ self._EnSEmitterLine(
+ self.ensight, point1=point1, point2=point2, num_points=num_points
+ )
+ )
+ else:
+ new_emitters.append(
+ f"ensight.utils.parts._EnSEmitterLine(ensight, point1={point1}, point2={point2}, num_points={num_points})"
+ )
+ elif emitter_type == self._EMIT_PLANE:
+ if not any([point1, point2, point3]):
+ raise RuntimeError( # pragma: no cover
+ "point1, point2 and point3 needed if particle trace emitted from plane"
+ )
+ if isinstance(self.ensight, ModuleType): # pragma: no cover
+ new_emitters.append( # pragma: no cover
+ self._EnSEmitterGrid(
+ self.ensight,
+ point1=point1,
+ point2=point2,
+ point3=point3,
+ num_points_x=num_points_x,
+ num_points_y=num_points_y,
+ )
+ )
+ else:
+ new_emitters.append(
+ f"ensight.utils.parts._EnSEmitterGrid(ensight, point1={point1}, point2={point2}, point3={point3}, num_points_x={num_points_x}, num_points_y={num_points_y})"
+ )
+ elif emitter_type == self._EMIT_PART: # pragma: no cover
+ if not parts: # pragma: no cover
+ raise RuntimeError(
+ "part and num_points needed if particle trace emitted from part"
+ ) # pragma: no cover
+ for p in parts:
+ if isinstance(self.ensight, ModuleType): # pragma: no cover
+ new_emitters.append( # pragma: no cover
+ self._EnSEmitterPart(
+ self.ensight,
+ part=p,
+ num_points=num_points,
+ part_kind=part_distribution_type,
+ )
+ )
+ else:
+ new_emitters.append(
+ f"ensight.utils.parts._EnSEmitterPart(ensight, part={convert_part(self.ensight ,p)}, num_points={num_points}, part_kind={part_distribution_type})"
+ )
+ else:
+ raise RuntimeError(
+ "No input provided to create the emitters for the particle trace"
+ ) # pragma: no cover
+ return new_emitters
+
+ def _create_particle_trace_part(
+ self,
+ name: str,
+ variable: Union[str, int, "ENS_VAR"],
+ direction: str,
+ source_parts: List["ENS_PART"],
+ pathlines: Optional[bool] = False,
+ emit_time: Optional[float] = None,
+ total_time: Optional[float] = None,
+ delta_time: Optional[float] = None,
+ surface_restrict: Optional[bool] = False,
+ ) -> "ENS_PART_PARTICLE_TRACE":
+ """Private routine to create a particle trace part object"""
+ current_timestep = None
+ direction_map = {
+ self.PT_POS_TIME: self.ensight.objs.enums.POS_TIME,
+ self.PT_NEG_TIME: self.ensight.objs.enums.NEG_TIME,
+ self.PT_POS_NEG_TIME: self.ensight.objs.enums.POS_NEG_TIME,
+ }
+ idx = self.ensight.objs.enums.PART_PARTICLE_TRACE
+ def_part: "ENS_PART_PARTICLE_TRACE" = self.ensight.objs.core.DEFAULTPARTS[idx]
+ def_part.TYPE = self.ensight.objs.enums.STREAMLINE
+ if pathlines is True:
+ def_part.TYPE = self.ensight.objs.enums.PATHLINE
+ current_timestep = self.ensight.objs.core.TIMESTEP
+ self.ensight.objs.core.TIMESTEP = self.ensight.objs.core.TIMESTEP_LIMITS[0]
+ if total_time:
+ def_part.TOTALTIME = total_time
+ if delta_time:
+ def_part.DELTATIME = delta_time
+ if emit_time: # pragma: no cover
+ def_part.STARTTIME = emit_time
+ def_part.DESCRIPTION = name
+ def_part.VARIABLE = convert_variable(self.ensight, variable)
+ def_part.SURFACERESTRICTED = False
+ def_part.TRACEDIRECTION = direction_map.get(direction)
+ if surface_restrict:
+ def_part.SURFACERESTRICTED = True
+ particle_trace_part: "ENS_PART_PARTICLE_TRACE" = def_part.createpart(
+ sources=source_parts, name=name
+ )[0]
+ if current_timestep:
+ self.ensight.objs.core.TIMESTEP = current_timestep
+ return particle_trace_part
+
+ def _add_emitters_to_particle_trace_part(
+ self,
+ particle_trace_part: "ENS_PART_PARTICLE_TRACE",
+ new_emitters: List[Any],
+ palette: Optional[str] = None,
+ clean: Optional[bool] = False,
+ ) -> "ENS_PART_PARTICLE_TRACE":
+ """Private utility to add emitters to an existing particle trace part."""
+ if isinstance(self.ensight, ModuleType): # pragma: no cover
+ if clean: # pragma: no cover
+ emitters = [] # pragma: no cover
+ else: # pragma: no cover
+ emitters = particle_trace_part.EMITTERS.copy() # pragma: no cover
+ emitters.extend(new_emitters) # pragma: no cover
+ particle_trace_part.EMITTERS = emitters # pragma: no cover
+ else:
+ if clean:
+ self.ensight._session.cmd("enscl.emitters=[]", do_eval=False)
+ else:
+ self.ensight._session.cmd(
+ f"enscl.emitters=ensight.objs.wrap_id({particle_trace_part.objid}).EMITTERS.copy()",
+ do_eval=False,
+ )
+ text = "enscl.emitters.extend(["
+ for emitter in new_emitters:
+ text += emitter + ", "
+ text = text[:-2]
+ text += "])"
+ self.ensight._session.cmd(text, do_eval=False)
+ self.ensight._session.cmd(
+ f"ensight.objs.wrap_id({particle_trace_part.objid}).setattr('EMITTERS', enscl.emitters.copy())"
+ )
+ self.ensight._session.cmd("del enscl.emitters", do_eval=False)
+ if palette:
+ particle_trace_part.COLORBYPALETTE = palette
+ return particle_trace_part
+
+ def _cure_particle_trace_part(
+ self, particle_trace_part: Union[str, int, "ENS_PART_PARTICLE_TRACE"]
+ ) -> "ENS_PART_PARTICLE_TRACE":
+ """Private utility to cure an input particle trace part and convert it to an ``ENS_PART`"""
+
+ # the add_emitter* functions were added in 2024 R2
+ if not isinstance(self.ensight, ModuleType): # pragma: no cover
+ self.ensight._session.ensight_version_check("2024 R2")
+
+ _particle_trace_part: "ENS_PART_PARTICLE_TRACE"
+ if isinstance(particle_trace_part, (str, int)): # pragma: no cover
+ temp = self.ensight.objs.core.PARTS[particle_trace_part] # pragma: no cover
+ if not temp: # pragma: no cover
+ raise RuntimeError(
+ "particle_trace_part input is not a valid part"
+ ) # pragma: no cover
+ _particle_trace_part = temp[0] # pragma: no cover
+ else:
+ _particle_trace_part = particle_trace_part
+ return _particle_trace_part
+
+ def _prepare_particle_creation(
+ self,
+ direction: Optional[str] = None,
+ source_parts: Optional[List[Union[str, int, "ENS_PART"]]] = None,
+ ) -> Tuple[str, List["ENS_PART"]]:
+ """Private utility to set the direction if not provided, and to cure the list of source parts."""
+
+ # the create_particle* functions were added in 2024 R2
+ if not isinstance(self.ensight, ModuleType): # pragma: no cover
+ self.ensight._session.ensight_version_check("2024 R2")
+
+ if not direction:
+ direction = self.PT_POS_TIME
+ if source_parts: # pragma: no cover
+ converted_source_parts = [convert_part(self.ensight, p) for p in source_parts]
+ if not source_parts: # pragma: no cover
+ converted_source_parts = self.ensight.objs.core.selection( # pragma: no cover
+ name="ENS_PART"
+ )
+ if not converted_source_parts: # pragma: no cover
+ raise RuntimeError("No part selected for particle trace generation") # pragma: no cover
+ return direction, converted_source_parts
+
+ def _find_palette(self, color_by: Optional[Union[str, int, "ENS_VAR"]] = None) -> Optional[str]:
+ """Private utility to find the description of the input color_by variable"""
+ palette: Optional[str] = None
+ if color_by:
+ try:
+ _color_by_var: List["ENS_VAR"] = self.ensight.objs.core.VARIABLES.find(
+ [convert_variable(self.ensight, color_by)], attr="ID"
+ )
+ if _color_by_var:
+ palette = _color_by_var[0].DESCRIPTION
+ except Exception:
+ raise RuntimeError(
+ "The variable supplied to color the particle trace by does not exist"
+ )
+ return palette
+
+ def create_particle_trace_from_points(
+ self,
+ name: str,
+ variable: Union[str, int, "ENS_VAR"],
+ points: List[List[float]],
+ direction: Optional[str] = None,
+ pathlines: Optional[bool] = False,
+ source_parts: Optional[List[Union[str, int, "ENS_PART"]]] = None,
+ emit_time: Optional[float] = None,
+ total_time: Optional[float] = None,
+ delta_time: Optional[float] = None,
+ color_by: Optional[Union[str, int, "ENS_VAR"]] = None,
+ ) -> "ENS_PART_PARTICLE_TRACE":
+ """
+ Create a particle trace part from a list o points.
+ Returns the ``ENS_PART`` generated.
+
+ Parameters
+ ----------
+
+ name: str
+ The name of part to be generated
+ variable:
+ The variable to compute the particle traces with.
+ It can be the name, the ID or the ``ENS_VAR`` object. It must be a vector variable.
+ direction: str
+ The direction for the particle traces to be generated.
+ This table describes the options:
+
+ ================== ==============================================
+ Name Query type
+ ================== ==============================================
+ PT_POS_TIME Follow the vector direction
+ PT_NEG_TIME Go contrary to the vector direction
+ PT_POS_NEG_TIME Follow and go contrary to the vector direction
+ ================== ==============================================
+
+ If not provided, it will default to ``PT_POS_TIME``
+ pathlines: bool
+ True if the particle traces need to be pathlines
+ points: list
+ List of coordinates for the seed points.
+ source_parts: list
+ A list of parts to create the particle trace in. For instance, in a CFD
+ simulation this might be the fluid zone.
+ If not provided, the function will try to look for the selected parts.
+ emit_time: float
+ The emission time to start the particle trace from. If not provided,
+ it will use the current time.
+ total_time: float
+ The total emission time. If not provided, EnSight will provide the end time
+ for a transient simulation, an internal best time for steady state simulations.
+ delta_time: float
+ The interval for the emissions. If not provided, EnSight will provide
+ a best estimate.
+ color_by
+ The optional variable to color the particle trace by.
+ It can be the name, the ID or the ``ENS_VAR`` object.
+
+ Examples
+ --------
+ >>> s = LocalLauncher().start()
+ >>> cas_file = s.download_pyansys_example("mixing_elbow.cas.h5","pyfluent/mixing_elbow")
+ >>> dat_file = s.download_pyansys_example("mixing_elbow.dat.h5","pyfluent/mixing_elbow")
+ >>> s.load_data(cas_file, result_file=dat_file)
+ >>> s.ensight.utils.parts.create_particle_trace_from_points("mytraces", "Velocity", points=[[-0.02,-0.123,0.01576],[0.109876,-0.123,0.0123]], source_parts=parts.select_parts_by_dimension(3))
+ """
+ emitter_type = self._EMIT_POINT
+ direction, converted_source_parts = self._prepare_particle_creation(
+ direction=direction, source_parts=source_parts
+ )
+ particle_trace_part = self._create_particle_trace_part(
+ name,
+ variable,
+ direction,
+ converted_source_parts,
+ pathlines=pathlines,
+ emit_time=emit_time,
+ delta_time=delta_time,
+ total_time=total_time,
+ )
+ new_emitters = self._create_emitters(emitter_type=emitter_type, points=points)
+ palette = self._find_palette(color_by=color_by)
+ return self._add_emitters_to_particle_trace_part(
+ particle_trace_part, new_emitters=new_emitters, palette=palette, clean=True
+ )
+
+ def create_particle_trace_from_line(
+ self,
+ name: str,
+ variable: Union[str, int, "ENS_VAR"],
+ point1: List[float],
+ point2: List[float],
+ num_points: Optional[int] = 100,
+ direction: Optional[str] = None,
+ pathlines: Optional[bool] = False,
+ source_parts: Optional[List[Union[str, int, "ENS_PART"]]] = None,
+ emit_time: Optional[float] = None,
+ total_time: Optional[float] = None,
+ delta_time: Optional[float] = None,
+ color_by: Optional[Union[str, int, "ENS_VAR"]] = None,
+ ) -> "ENS_PART_PARTICLE_TRACE":
+ """
+ Create a particle trace part from a line.
+ Returns the ``ENS_PART`` generated.
+
+ Parameters
+ ----------
+
+ name: str
+ The name of part to be generated
+ variable:
+ The variable to compute the particle traces with.
+ It can be the name, the ID or the ``ENS_VAR`` object. It must be a vector variable.
+ direction: str
+ The direction for the particle traces to be generated.
+ This table describes the options:
+
+ ================== ==============================================
+ Name Query type
+ ================== ==============================================
+ PT_POS_TIME Follow the vector direction
+ PT_NEG_TIME Go contrary to the vector direction
+ PT_POS_NEG_TIME Follow and go contrary to the vector direction
+ ================== ==============================================
+
+ If not provided, it will default to ``PT_POS_TIME``
+ pathlines: bool
+ True if the particle traces need to be pathlines
+ point1: list
+ List of coordinates for point 1.
+ point2: list
+ List of coordinates for point 2.
+ source_parts: list
+ A list of parts to create the particle trace in. For instance, in a CFD
+ simulation this might be the fluid zone.
+ If not provided, the function will try to look for the selected parts.
+ num_points: int
+ The number of points to emit from. Defaults to 100.
+ emit_time: float
+ The emission time to start the particle trace from. If not provided,
+ it will use the current time.
+ total_time: float
+ The total emission time. If not provided, EnSight will provide the end time
+ for a transient simulation, an internal best time for steady state simulations.
+ delta_time: float
+ The interval for the emissions. If not provided, EnSight will provide
+ a best estimate.
+ color_by
+ The optional variable to color the particle trace by.
+ It can be the name, the ID or the ``ENS_VAR`` object.
+
+ Examples
+ --------
+ >>> s = LocalLauncher().start()
+ >>> cas_file = s.download_pyansys_example("mixing_elbow.cas.h5","pyfluent/mixing_elbow")
+ >>> dat_file = s.download_pyansys_example("mixing_elbow.dat.h5","pyfluent/mixing_elbow")
+ >>> s.load_data(cas_file, result_file=dat_file)
+ >>> parts = s.ensight.utils.parts
+ >>> parts.create_particle_trace_from_line("mytraces", "Velocity", point1=[-0.02,-0.123,0.01576], point2=[0.109876,-0.123,0.0123], num_points=10, source_parts=parts.select_parts_by_dimension(3))
+ """
+ emitter_type = self._EMIT_LINE
+ direction, converted_source_parts = self._prepare_particle_creation(
+ direction=direction, source_parts=source_parts
+ )
+ particle_trace_part = self._create_particle_trace_part(
+ name,
+ variable,
+ direction,
+ converted_source_parts,
+ pathlines=pathlines,
+ emit_time=emit_time,
+ delta_time=delta_time,
+ total_time=total_time,
+ )
+ new_emitters = self._create_emitters(
+ emitter_type=emitter_type, point1=point1, point2=point2, num_points=num_points
+ )
+ palette = self._find_palette(color_by=color_by)
+ return self._add_emitters_to_particle_trace_part(
+ particle_trace_part, new_emitters=new_emitters, palette=palette, clean=True
+ )
+
+ def create_particle_trace_from_plane(
+ self,
+ name: str,
+ variable: Union[str, int, "ENS_VAR"],
+ point1: List[float],
+ point2: List[float],
+ point3: List[float],
+ num_points_x: Optional[int] = 25,
+ num_points_y: Optional[int] = 25,
+ direction: Optional[str] = None,
+ pathlines: Optional[bool] = False,
+ source_parts: Optional[List[Union[str, int, "ENS_PART"]]] = None,
+ emit_time: Optional[float] = None,
+ total_time: Optional[float] = None,
+ delta_time: Optional[float] = None,
+ color_by: Optional[Union[str, int, "ENS_VAR"]] = None,
+ ) -> "ENS_PART_PARTICLE_TRACE":
+ """
+ Create a particle trace part from a plane.
+ Returns the ``ENS_PART`` generated.
+
+ Parameters
+ ----------
+
+ name: str
+ The name of part to be generated
+ variable:
+ The variable to compute the particle traces with.
+ It can be the name, the ID or the ``ENS_VAR`` object. It must be a vector variable.
+ direction: str
+ The direction for the particle traces to be generated.
+ This table describes the options:
+
+ ================== ==============================================
+ Name Query type
+ ================== ==============================================
+ PT_POS_TIME Follow the vector direction
+ PT_NEG_TIME Go contrary to the vector direction
+ PT_POS_NEG_TIME Follow and go contrary to the vector direction
+ ================== ==============================================
+
+ If not provided, it will default to ``PT_POS_TIME``
+ pathlines: bool
+ True if the particle traces need to be pathlines
+ point1: list
+ List of coordinates for point 1, being a corner of the plane.
+ point2: list
+ List of coordinates for point 2, being a corner of the plane.
+ point3: list
+ List of coordinates for point 3, being a corner of the plane.
+ source_parts: list
+ A list of parts to create the particle trace in. For instance, in a CFD
+ simulation this might be the fluid zone.
+ If not provided, the function will try to look for the selected parts.
+ num_points_x: int
+ The number of points on the ``X`` direction of the emission plane.
+ Defaults to 25.
+ num_points_y: int
+ The number of points on the ``Y`` direction of the emission plane.
+ Defaults to 25.
+ emit_time: float
+ The emission time to start the particle trace from. If not provided,
+ it will use the current time.
+ total_time: float
+ The total emission time. If not provided, EnSight will provide the end time
+ for a transient simulation, an internal best time for steady state simulations.
+ delta_time: float
+ The interval for the emissions. If not provided, EnSight will provide
+ a best estimate.
+ color_by
+ The optional variable to color the particle trace by.
+ It can be the name, the ID or the ``ENS_VAR`` object.
+
+ Examples
+ --------
+ >>> s = LocalLauncher().start()
+ >>> cas_file = s.download_pyansys_example("mixing_elbow.cas.h5","pyfluent/mixing_elbow")
+ >>> dat_file = s.download_pyansys_example("mixing_elbow.dat.h5","pyfluent/mixing_elbow")
+ >>> s.load_data(cas_file, result_file=dat_file)
+ >>> parts = s.ensight.utils.parts
+ >>> parts.create_particle_trace_from_plane("mytraces", "Velocity", point1=[-0.02,-0.123,0.01576], point2=[0.109876,-0.123,0.0123], point3=[0.1, 0, 0.05] ,num_points_x=10, num_points_y=10, source_parts=parts.select_parts_by_dimension(3))
+ """
+ emitter_type = self._EMIT_PLANE
+ direction, converted_source_parts = self._prepare_particle_creation(
+ direction=direction, source_parts=source_parts
+ )
+ particle_trace_part = self._create_particle_trace_part(
+ name,
+ variable,
+ direction,
+ converted_source_parts,
+ pathlines=pathlines,
+ emit_time=emit_time,
+ delta_time=delta_time,
+ total_time=total_time,
+ )
+ new_emitters = self._create_emitters(
+ emitter_type=emitter_type,
+ point1=point1,
+ point2=point2,
+ point3=point3,
+ num_points_x=num_points_x,
+ num_points_y=num_points_y,
+ )
+ palette = self._find_palette(color_by=color_by)
+ return self._add_emitters_to_particle_trace_part(
+ particle_trace_part, new_emitters=new_emitters, palette=palette, clean=True
+ )
+
+ def create_particle_trace_from_parts(
+ self,
+ name: str,
+ variable: Union[str, int, "ENS_VAR"],
+ parts: List[Union[str, int, "ENS_PART"]],
+ part_distribution_type: Optional[int] = 0,
+ num_points: Optional[int] = 100,
+ direction: Optional[str] = None,
+ pathlines: Optional[bool] = False,
+ source_parts: Optional[List[Union[str, int, "ENS_PART"]]] = None,
+ emit_time: Optional[float] = None,
+ total_time: Optional[float] = None,
+ delta_time: Optional[float] = None,
+ color_by: Optional[Union[str, int, "ENS_VAR"]] = None,
+ surface_restrict: Optional[bool] = False,
+ ) -> "ENS_PART_PARTICLE_TRACE":
+ """
+ Create a particle trace part from a list of seed parts.
+ Returns the ``ENS_PART`` generated.
+
+ Parameters
+ ----------
+
+ name: str
+ The name of part to be generated
+ variable:
+ The variable to compute the particle traces with.
+ It can be the name, the ID or the ``ENS_VAR`` object. It must be a vector variable.
+ direction: str
+ The direction for the particle traces to be generated.
+ This table describes the options:
+
+ ================== ==============================================
+ Name Query type
+ ================== ==============================================
+ PT_POS_TIME Follow the vector direction
+ PT_NEG_TIME Go contrary to the vector direction
+ PT_POS_NEG_TIME Follow and go contrary to the vector direction
+ ================== ==============================================
+
+ If not provided, it will default to ``PT_POS_TIME``
+ pathlines: bool
+ True if the particle traces need to be pathlines
+ source_parts: list
+ A list of parts to create the particle trace in. For instance, in a CFD
+ simulation this might be the fluid zone.
+ If not provided, the function will try to look for the selected parts.
+ parts: list
+ A list of parts to emit the particle traces from.
+ They can be their names, their IDs or the respective ``ENS_PART`` objects.
+ part_distribution_type: int
+ The distribution of emitters in case of emission from a part.
+ This table describes the options:
+
+ ==================== =================================================
+ Name Query type
+ ==================== =================================================
+ PART_EMIT_FROM_NODES Emit from the nodes of the part
+ PART_EMIT_FROM_AREA Create an area of equidistant points for emission
+ ==================== =================================================
+
+ If not provided, it will default to ``PART_EMIT_FROM_NODES``
+ num_points: int
+ The number of points to emit from.
+ Defaults to 100.
+ emit_time: float
+ The emission time to start the particle trace from. If not provided,
+ it will use the current time.
+ total_time: float
+ The total emission time. If not provided, EnSight will provide the end time
+ for a transient simulation, an internal best time for steady state simulations.
+ delta_time: float
+ The interval for the emissions. If not provided, EnSight will provide
+ a best estimate.
+ color_by
+ The optional variable to color the particle trace by.
+ It can be the name, the ID or the ``ENS_VAR`` object.
+ surface_restrict: bool
+ True if the particle trace needs to be restricted to the input parts.
+ Defaults to False. The flag will be applied to any additional emitter
+ appended to the particle trace created.
+
+ Examples
+ --------
+ >>> s = LocalLauncher().start()
+ >>> cas_file = s.download_pyansys_example("mixing_elbow.cas.h5","pyfluent/mixing_elbow")
+ >>> dat_file = s.download_pyansys_example("mixing_elbow.dat.h5","pyfluent/mixing_elbow")
+ >>> s.load_data(cas_file, result_file=dat_file)
+ >>> parts = s.ensight.utils.parts
+ >>> parts.create_particle_trace_from_parts("mytraces", "Velocity", parts=["hot-inlet", "cold-inlet"], num_points=100 source_parts=parts.select_parts_by_dimension(3))
+ """
+ emitter_type = self._EMIT_PART
+ direction, converted_source_parts = self._prepare_particle_creation(
+ direction=direction, source_parts=source_parts
+ )
+ particle_trace_part = self._create_particle_trace_part(
+ name,
+ variable,
+ direction,
+ converted_source_parts,
+ pathlines=pathlines,
+ emit_time=emit_time,
+ delta_time=delta_time,
+ total_time=total_time,
+ surface_restrict=surface_restrict,
+ )
+ new_parts = [convert_part(self.ensight, p) for p in parts]
+ new_emitters = self._create_emitters(
+ emitter_type=emitter_type,
+ parts=new_parts,
+ part_distribution_type=part_distribution_type,
+ num_points=num_points,
+ )
+ palette = self._find_palette(color_by=color_by)
+ return self._add_emitters_to_particle_trace_part(
+ particle_trace_part, new_emitters=new_emitters, palette=palette, clean=True
+ )
+
+ def add_emitter_points_to_particle_trace_part(
+ self,
+ particle_trace_part: Union[str, int, "ENS_PART"],
+ points: List[List[float]],
+ ) -> "ENS_PART_PARTICLE_TRACE":
+ """
+ Add point emitters to an existing particle trace. The function will return the updated
+ ``ENS_PART`` object.
+
+ Parameters
+ ----------
+
+ particle_trace_part:
+ The particle trace part to be added emitters to.
+ Can be the name, the ID or the ``ENS_PART`` object
+ points: list
+ List of list containing the coordinates for the seed points.
+
+ Examples
+ --------
+ >>> s = LocalLauncher().start()
+ >>> cas_file = s.download_pyansys_example("mixing_elbow.cas.h5","pyfluent/mixing_elbow")
+ >>> dat_file = s.download_pyansys_example("mixing_elbow.dat.h5","pyfluent/mixing_elbow")
+ >>> s.load_data(cas_file, result_file=dat_file)
+ >>> p = s.ensight.utils.parts.create_particle_trace_from_points("mytraces", "Velocity", points=[[-0.02, -0.123, 0.01576]], source_parts=parts.select_parts_by_dimension(3))
+ >>> p = s.ensight.utils.parts.add_emitter_points_to_particle_trace_part(p, points=[[0.109876, -0.123, 0.0123]])
+ """
+ emitter_type = self._EMIT_POINT
+ particle_trace_part = self._cure_particle_trace_part(particle_trace_part)
+ new_emitters = self._create_emitters(emitter_type=emitter_type, points=points)
+ return self._add_emitters_to_particle_trace_part(particle_trace_part, new_emitters)
+
+ def add_emitter_line_to_particle_trace_part(
+ self,
+ particle_trace_part: Union[str, int, "ENS_PART"],
+ point1: List[float],
+ point2: List[float],
+ num_points: Optional[int] = 100,
+ ) -> "ENS_PART_PARTICLE_TRACE":
+ """
+ Add a line emitter to an existing particle trace. The function will return the updated
+ ``ENS_PART`` object.
+
+ Parameters
+ ----------
+
+ particle_trace_part:
+ The particle trace part to be added emitters to.
+ Can be the name, the ID or the ``ENS_PART`` object.
+ point1: list
+ The coordinates for point 1.
+ point2: list
+ The coordinates for point 2.
+ num_points: int
+ The number of seed points. Defaults to 100.
+
+ Examples
+ --------
+ >>> s = LocalLauncher().start()
+ >>> cas_file = s.download_pyansys_example("mixing_elbow.cas.h5","pyfluent/mixing_elbow")
+ >>> dat_file = s.download_pyansys_example("mixing_elbow.dat.h5","pyfluent/mixing_elbow")
+ >>> s.load_data(cas_file, result_file=dat_file)
+ >>> p = s.ensight.utils.parts.create_particle_trace_from_points("mytraces", "Velocity", points=[[-0.02,-0.123,0.01576]], source_parts=parts.select_parts_by_dimension(3))
+ >>> p = s.ensight.utils.parts.add_emitter_line_to_particle_trace_part(p, point1=[-0.02, -0.123, 0.01576], point2=[0.109876, -0.123, 0.0123], num_points=10)
+ """
+ emitter_type = self._EMIT_LINE
+ particle_trace_part = self._cure_particle_trace_part(particle_trace_part)
+ new_emitters = self._create_emitters(
+ emitter_type=emitter_type, point1=point1, point2=point2, num_points=num_points
+ )
+ return self._add_emitters_to_particle_trace_part(particle_trace_part, new_emitters)
+
+ def add_emitter_plane_to_particle_trace_part(
+ self,
+ particle_trace_part: Union[str, int, "ENS_PART"],
+ point1: List[float],
+ point2: List[float],
+ point3: List[float],
+ num_points_x: Optional[int] = 25,
+ num_points_y: Optional[int] = 25,
+ ) -> "ENS_PART_PARTICLE_TRACE":
+ """
+ Add a plane emitter to an existing particle trace. The function will return the updated
+ ``ENS_PART`` object.
+
+ Parameters
+ ----------
+
+ particle_trace_part:
+ The particle trace part to be added emitters to.
+ Can be the name, the ID or the ``ENS_PART`` object.
+ point1: list
+ The coordinates for point 1, being a corner of the plane.
+ point2: list
+ The coordinates for point 2, being a corner of the plane.
+ point3: list
+ The coordinates for point 3, being a corner of the plane.
+ num_points_x: int
+ The number of points on the ``X`` direction of the emission plane.
+ Defaults to 25.
+ num_points_y: int
+ The number of points on the ``Y`` direction of the emission plane.
+ Defaults to 25.
+
+ Examples
+ --------
+ >>> s = LocalLauncher().start()
+ >>> cas_file = s.download_pyansys_example("mixing_elbow.cas.h5","pyfluent/mixing_elbow")
+ >>> dat_file = s.download_pyansys_example("mixing_elbow.dat.h5","pyfluent/mixing_elbow")
+ >>> s.load_data(cas_file, result_file=dat_file)
+ >>> p = s.ensight.utils.parts.create_particle_trace_from_points("mytraces", "Velocity", points=[[-0.02,-0.123,0.01576]], source_parts=parts.select_parts_by_dimension(3))
+ >>> p = s.ensight.utils.parts.add_emitter_plane_to_particle_trace_part(p, point1=[-0.02, -0.123, 0.01576], point2=[0.109876, -0.123, 0.0123], point3=[0.1, 0, 0.05], num_points_x=10, num_points_y=10)
+ """
+ emitter_type = self._EMIT_PLANE
+ particle_trace_part = self._cure_particle_trace_part(particle_trace_part)
+ new_emitters = self._create_emitters(
+ emitter_type=emitter_type,
+ point1=point1,
+ point2=point2,
+ point3=point3,
+ num_points_x=num_points_x,
+ num_points_y=num_points_y,
+ )
+ return self._add_emitters_to_particle_trace_part(particle_trace_part, new_emitters)
+
+ def add_emitter_parts_to_particle_trace_part(
+ self,
+ particle_trace_part: Union[str, int, "ENS_PART"],
+ parts: List[Union[str, int, "ENS_PART"]],
+ part_distribution_type: Optional[int] = 0,
+ num_points: Optional[int] = 100,
+ ) -> "ENS_PART_PARTICLE_TRACE":
+ """
+ Add a list of part emitters to an existing particle trace. The function will return the updated
+ ``ENS_PART`` object.
+
+ Parameters
+ ----------
+
+ particle_trace_part:
+ The particle trace part to be added emitters to.
+ Can be the name, the ID or the ``ENS_PART`` object.
+ parts: list
+ A list of parts to emit the particle traces from.
+ They can be their names, their IDs or the respective ``ENS_PART`` objects.
+ part_distribution_type: int
+ The distribution of emitters in case of emission from a part.
+ This table describes the options:
+
+ ==================== =================================================
+ Name Query type
+ ==================== =================================================
+ PART_EMIT_FROM_NODES Emit from the nodes of the part
+ PART_EMIT_FROM_AREA Create an area of equidistant points for emission
+ ==================== =================================================
+
+ If not provided, it will default to ``PART_EMIT_FROM_NODES``
+ num_points: int
+ The number of points to emit from.
+ Defaults to 100.
+
+ Examples
+ --------
+ >>> s = LocalLauncher().start()
+ >>> cas_file = s.download_pyansys_example("mixing_elbow.cas.h5","pyfluent/mixing_elbow")
+ >>> dat_file = s.download_pyansys_example("mixing_elbow.dat.h5","pyfluent/mixing_elbow")
+ >>> s.load_data(cas_file, result_file=dat_file)
+ >>> p = s.ensight.utils.parts.create_particle_trace_from_points("mytraces", "Velocity", points=[[-0.02, -0.123, 0.01576]], source_parts=parts.select_parts_by_dimension(3))
+ >>> p = s.ensight.utils.parts.add_emitter_parts_to_particle_trace_part(p, parts=["cold-inlet", "hot-inlet"], num_points=25)
+ """
+ emitter_type = self._EMIT_PART
+ particle_trace_part = self._cure_particle_trace_part(particle_trace_part)
+ new_parts = [convert_part(self.ensight, p) for p in parts]
+ new_emitters = self._create_emitters(
+ emitter_type=emitter_type,
+ parts=new_parts,
+ part_distribution_type=part_distribution_type,
+ num_points=num_points,
+ )
+ return self._add_emitters_to_particle_trace_part(particle_trace_part, new_emitters)
+
+ def select_parts(
+ self,
+ p_list: Optional[List[Union[str, int, "ENS_PART"]]] = None,
+ rec_flag: Optional[bool] = True,
+ ) -> Optional[List["ENS_PART"]]:
+ """
+ Select the parts string, or int, or ensight.objs.ENS_PART, or list
+ and record the selection (by default) honoring the
+ EnSight preference to record command language by part id or by name.
+ It creates a list of part objects and selects the parts, and records the
+ selection by default.
+
+ Parameters
+ ----------
+ p_list: list
+ The list of part objects to compute the forces on. It can either be a list of names
+ a list of IDs (integers or strings) or directly a list of ENS_PART objects
+ rec_flag: bool
+ True if the selection needs to be recorded
+
+ Returns
+ -------
+ list
+ A list of part objects selected or None if error.
+
+
+ NOTE: If you do not want a measured part in your
+ selection, then don't include it in the list
+ e.g. if
+ core.PARTS[0].PARTTYPE == ensight.objs.enums.PART_DISCRETE_PARTICLE == 3
+ then it is a measured part
+ """
+ #
+ pobj_list = self.get_part_id_obj_name(p_list, "obj")
+
+ if not pobj_list:
+ raise RuntimeError("Error, select_parts: part list is empty")
+ else:
+ # This was formerly used to record command lang 10.1.6(c)
+ # using part ids:
+ # ensight.part.select_begin(pid_list,record=1)
+ # Now records selection, honoring the the preference
+ # part selection of by part id or by name (2024R1)
+ record = 1 if rec_flag else 0
+ self.ensight.objs.core.selection(name="ENS_PART").addchild(
+ pobj_list, replace=1, record=record
+ )
+ # This is essential to synchronize cmd lang with the GUI, C++
+ self.ensight.part.get_mainpartlist_select()
+
+ return pobj_list
+
+ def get_part_id_obj_name(
+ self,
+ plist: Optional[Union[str, int, "ENS_PART", List[str], List[int], List["ENS_PART"]]] = None,
+ ret_flag="id",
+ ) -> Union[Optional[List[int]], Optional[List[str]], Optional[List["ENS_PART"]]]:
+ """
+ Input a part or a list of parts and return an id, object, or name
+ or a list of ids, objects, or names.
+
+ Parameters
+ ----------
+ p_list: list
+ The list of part objects to compute the forces on. It can either be a list of names
+ a list of IDs (integers or strings) or directly a list of ENS_PART objects
+
+ ret_flag: str
+ A string that determines what is returned
+
+ Returns
+ -------
+ list
+ Either a list of part IDs, or a list of names or a list of ENS_PART objects
+ depending on the requested ret_flag value
+ """
+ # To not change the interface I didn't move ret_flag to be a required argument,
+ # so I need to check its value now
+ if not ret_flag:
+ return None
+ if not plist:
+ plist = [p for p in self.ensight.objs.core.PARTS]
+ pobj_list: List["ENS_PART"] = []
+ #
+ # Basically figure out what plist is, then convert it to a list of ENS_PARTs
+ #
+ if (
+ isinstance(plist, self.ensight.objs.ENS_PART)
+ or isinstance(plist, int)
+ or isinstance(plist, str)
+ ):
+ p_list = [plist]
+ elif isinstance(plist, list) or isinstance(plist, ensobjlist):
+ p_list = [p for p in plist]
+ else: # pragma: no cover
+ raise RuntimeError( # pragma: no cover
+ "Unknown type of input var plist {}".format(type(plist))
+ )
+ #
+ # p_list must now be a list
+ #
+
+ if not p_list:
+ return None
+ if not isinstance(p_list[0], (str, int, self.ensight.objs.ENS_PART)): # pragma: no cover
+ error = "First member is neither ENS_PART, int, nor string" # pragma: no cover
+ error += f"{p_list[0]} type = {type(p_list[0])}; aborting" # pragma: no cover
+ raise RuntimeError(error) # pragma: no cover
+ pobjs: List["ENS_PART"]
+ if isinstance(p_list[0], int):
+ # list of ints must be part ids
+ for pid in p_list:
+ pobjs = [p for p in self.ensight.objs.core.PARTS if p.PARTNUMBER == pid]
+ for prt in pobjs:
+ pobj_list.append(prt)
+ elif isinstance(p_list[0], str):
+ if not p_list[0].isdigit():
+ for pname in p_list:
+ pobjs = [p for p in self.ensight.objs.core.PARTS if p.DESCRIPTION == pname]
+ for prt in pobjs:
+ pobj_list.append(prt)
+ else: # digits, must be a string list of part ids?
+ for pid_str in p_list:
+ pobjs = [
+ p for p in self.ensight.objs.core.PARTS if p.PARTNUMBER == int(pid_str)
+ ]
+ for prt in pobjs:
+ pobj_list.append(prt)
+ else:
+ for prt in p_list:
+ pobj_list.append(prt)
+ if ret_flag == "name":
+ val_strings = [str(p.DESCRIPTION) for p in pobj_list]
+ return val_strings
+ if ret_flag == "obj":
+ val_objs = [p for p in pobj_list]
+ return val_objs
+ val_ints = [int(p.PARTNUMBER) for p in pobj_list]
+ return val_ints
diff --git a/tests/example_tests/test_designpoints.py b/tests/example_tests/test_designpoints.py
index b1ceb160cab..1107406a2f0 100644
--- a/tests/example_tests/test_designpoints.py
+++ b/tests/example_tests/test_designpoints.py
@@ -1,88 +1,88 @@
-import glob
-import os
-
-from ansys.pyensight.core import DockerLauncher, LocalLauncher
-import pytest
-
-
-def test_designpoints(tmpdir, pytestconfig: pytest.Config):
- data_dir = tmpdir.mkdir("datadir")
- use_local = pytestconfig.getoption("use_local_launcher")
- root = None
- if use_local:
- launcher = LocalLauncher()
- root = "http://s3.amazonaws.com/www3.ensight.com/PyEnSight/ExampleData"
- else:
- launcher = DockerLauncher(data_directory=data_dir, use_dev=True)
- session = launcher.start()
- session.load_example("elbow_dp0_dp1.ens", root=root)
- image = session.show("image", width=800, height=600)
- image.download(data_dir)
- print([p.PATHNAME for p in session.ensight.objs.core.PARTS])
- # Create two more viewports (there is always one viewport)
- session.ensight.objs.core.DEFAULTVPORT[0].createviewport()
- session.ensight.objs.core.DEFAULTVPORT[0].createviewport()
- # Make these viewports visible and grab references to the viewport objects.
- session.ensight.objs.core.VPORTS.set_attr(session.ensight.objs.enums.VISIBLE, True)
- vp0 = session.ensight.objs.core.VPORTS[0]
- vp1 = session.ensight.objs.core.VPORTS[1]
- vp2 = session.ensight.objs.core.VPORTS[2]
- # Position the viewports by setting their WIDTH, HEIGHT, ORIGINX and ORIGINY attributes.
- vp0.WIDTH = 0.5
- vp1.WIDTH = 0.5
- vp2.WIDTH = 1.0
- session.ensight.objs.core.VPORTS.set_attr(session.ensight.objs.enums.HEIGHT, 0.5)
- vp0.ORIGINX = 0.0
- vp0.ORIGINY = 0.5
- vp0.BORDERVISIBLE = True
- vp1.ORIGINX = 0.5
- vp1.ORIGINY = 0.5
- vp2.ORIGINX = 0.0
- vp2.ORIGINY = 0.0
- # Link the transforms of all the viewports to each other
- session.ensight.objs.core.VPORTS.set_attr(session.ensight.objs.enums.LINKED, True)
- # Hide all but the "fluid" parts
- session.ensight.objs.core.PARTS.set_attr(session.ensight.objs.enums.VISIBLE, False)
- session.ensight.objs.core.PARTS["fluid"].set_attr(session.ensight.objs.enums.VISIBLE, True)
- session.ensight.objs.core.PARTS["fluid"].set_attr(
- session.ensight.objs.enums.ELTREPRESENTATION, session.ensight.objs.enums.BORD_FULL
- )
- image = session.show("image", width=800, height=600)
- image.download(data_dir)
- fluid0 = session.ensight.objs.core.PARTS["fluid"][0]
- fluid1 = session.ensight.objs.core.PARTS["fluid"][1]
-
- # Using an LPART: we find the ENS_LPART instance in the first case
- # for the part named "fluid". If we load() this object, we get a
- # new instance of the case 0 "fluid" mesh.
- fluid0_diff = session.ensight.objs.core.CASES[0].LPARTS.find("fluid")[0].load()
- fluid0_diff.ELTREPRESENTATION = session.ensight.objs.enums.BORD_FULL
-
- # Get the temperature variable and color the fluid parts by it.
- temperature = session.ensight.objs.core.VARIABLES["Static_Temperature"][0]
- fluid0_diff.COLORBYPALETTE = temperature
- fluid0.COLORBYPALETTE = temperature
- fluid1.COLORBYPALETTE = temperature
-
- # Each of the three parts should only be visible in one viewport.
- fluid0.VIEWPORTVIS = session.ensight.objs.enums.VIEWPORT00
- fluid1.VIEWPORTVIS = session.ensight.objs.enums.VIEWPORT01
- fluid0_diff.VIEWPORTVIS = session.ensight.objs.enums.VIEWPORT02
- image = session.show("image", width=800, height=600)
- image.download(data_dir)
- temperature_diff = session.ensight.objs.core.create_variable(
- "Temperature_Difference",
- value="CaseMapDiff(plist, 2, Static_Temperature, 0, 1)",
- sources=[fluid0_diff],
- )
-
- fluid0_diff.COLORBYPALETTE = temperature_diff
- image = session.show("image", width=800, height=600)
- image.download(data_dir)
- limits = [(round(v / 5.0) * 5) for v in temperature_diff.MINMAX]
- temperature_diff.PALETTE[0].MINMAX = limits
- session.show("remote")
- local_files = glob.glob(os.path.join(data_dir, "*"))
- png_local = [x for x in local_files if ".png" in x]
- assert len(png_local) == 4
- session.close()
+import glob
+import os
+
+from ansys.pyensight.core import DockerLauncher, LocalLauncher
+import pytest
+
+
+def test_designpoints(tmpdir, pytestconfig: pytest.Config):
+ data_dir = tmpdir.mkdir("datadir")
+ use_local = pytestconfig.getoption("use_local_launcher")
+ root = None
+ if use_local:
+ launcher = LocalLauncher()
+ root = "http://s3.amazonaws.com/www3.ensight.com/PyEnSight/ExampleData"
+ else:
+ launcher = DockerLauncher(data_directory=data_dir, use_dev=True)
+ session = launcher.start()
+ session.load_example("elbow_dp0_dp1.ens", root=root)
+ image = session.show("image", width=800, height=600)
+ image.download(data_dir)
+ print([p.PATHNAME for p in session.ensight.objs.core.PARTS])
+ # Create two more viewports (there is always one viewport)
+ session.ensight.objs.core.DEFAULTVPORT[0].createviewport()
+ session.ensight.objs.core.DEFAULTVPORT[0].createviewport()
+ # Make these viewports visible and grab references to the viewport objects.
+ session.ensight.objs.core.VPORTS.set_attr(session.ensight.objs.enums.VISIBLE, True)
+ vp0 = session.ensight.objs.core.VPORTS[0]
+ vp1 = session.ensight.objs.core.VPORTS[1]
+ vp2 = session.ensight.objs.core.VPORTS[2]
+ # Position the viewports by setting their WIDTH, HEIGHT, ORIGINX and ORIGINY attributes.
+ vp0.WIDTH = 0.5
+ vp1.WIDTH = 0.5
+ vp2.WIDTH = 1.0
+ session.ensight.objs.core.VPORTS.set_attr(session.ensight.objs.enums.HEIGHT, 0.5)
+ vp0.ORIGINX = 0.0
+ vp0.ORIGINY = 0.5
+ vp0.BORDERVISIBLE = True
+ vp1.ORIGINX = 0.5
+ vp1.ORIGINY = 0.5
+ vp2.ORIGINX = 0.0
+ vp2.ORIGINY = 0.0
+ # Link the transforms of all the viewports to each other
+ session.ensight.objs.core.VPORTS.set_attr(session.ensight.objs.enums.LINKED, True)
+ # Hide all but the "fluid" parts
+ session.ensight.objs.core.PARTS.set_attr(session.ensight.objs.enums.VISIBLE, False)
+ session.ensight.objs.core.PARTS["fluid"].set_attr(session.ensight.objs.enums.VISIBLE, True)
+ session.ensight.objs.core.PARTS["fluid"].set_attr(
+ session.ensight.objs.enums.ELTREPRESENTATION, session.ensight.objs.enums.BORD_FULL
+ )
+ image = session.show("image", width=800, height=600)
+ image.download(data_dir)
+ fluid0 = session.ensight.objs.core.PARTS["fluid"][0]
+ fluid1 = session.ensight.objs.core.PARTS["fluid"][1]
+
+ # Using an LPART: we find the ENS_LPART instance in the first case
+ # for the part named "fluid". If we load() this object, we get a
+ # new instance of the case 0 "fluid" mesh.
+ fluid0_diff = session.ensight.objs.core.CASES[0].LPARTS.find("fluid")[0].load()
+ fluid0_diff.ELTREPRESENTATION = session.ensight.objs.enums.BORD_FULL
+
+ # Get the temperature variable and color the fluid parts by it.
+ temperature = session.ensight.objs.core.VARIABLES["Static_Temperature"][0]
+ fluid0_diff.COLORBYPALETTE = temperature
+ fluid0.COLORBYPALETTE = temperature
+ fluid1.COLORBYPALETTE = temperature
+
+ # Each of the three parts should only be visible in one viewport.
+ fluid0.VIEWPORTVIS = session.ensight.objs.enums.VIEWPORT00
+ fluid1.VIEWPORTVIS = session.ensight.objs.enums.VIEWPORT01
+ fluid0_diff.VIEWPORTVIS = session.ensight.objs.enums.VIEWPORT02
+ image = session.show("image", width=800, height=600)
+ image.download(data_dir)
+ temperature_diff = session.ensight.objs.core.create_variable(
+ "Temperature_Difference",
+ value="CaseMapDiff(plist, 2, Static_Temperature, 0, 1)",
+ sources=[fluid0_diff],
+ )
+
+ fluid0_diff.COLORBYPALETTE = temperature_diff
+ image = session.show("image", width=800, height=600)
+ image.download(data_dir)
+ limits = [(round(v / 5.0) * 5) for v in temperature_diff.MINMAX]
+ temperature_diff.PALETTE[0].MINMAX = limits
+ session.show("remote")
+ local_files = glob.glob(os.path.join(data_dir, "*"))
+ png_local = [x for x in local_files if ".png" in x]
+ assert len(png_local) == 4
+ session.close()
diff --git a/tests/example_tests/test_glb_usd.py b/tests/example_tests/test_glb_usd.py
index 848b2845f16..f311880cfad 100644
--- a/tests/example_tests/test_glb_usd.py
+++ b/tests/example_tests/test_glb_usd.py
@@ -1,59 +1,59 @@
-import os
-import shutil
-import subprocess
-import sys
-import tempfile
-
-import requests
-
-
-def test_glb_usd():
- # Get the example files
- base_uri = "https://s3.amazonaws.com/www3.ensight.com/PyEnSight/ExampleData"
- filenames = ["fluent_elbow.glb", "rwing_bsl_1.glb", "rwing_bsl_2.glb"]
- with tempfile.TemporaryDirectory() as tmpdirname:
- for filename in filenames:
- outpath = os.path.join(tmpdirname, filename)
- url = f"{base_uri}/{filename}"
- with requests.get(url, stream=True) as r:
- with open(outpath, "wb") as f:
- shutil.copyfileobj(r.raw, f)
- # convert these into USD format
- with tempfile.TemporaryDirectory() as usd_dir:
- # Build the CLI command
- cmd = [sys.executable]
- cmd.extend(["-m", "ansys.pyensight.core.utils.omniverse_cli"])
- cmd.extend(["--oneshot", "1"])
- cmd.extend(["--include_camera", "0"])
- cmd.extend(["--monitor_directory", outpath])
- cmd.append(usd_dir)
- env_vars = os.environ.copy()
- subprocess.run(cmd, close_fds=True, env=env_vars)
- assert os.path.isfile(os.path.join(usd_dir, "dsg_scene.usd"))
- assert os.path.isdir(os.path.join(usd_dir, "Parts"))
-
-
-def test_ensight_glb_usd():
- # Get the example files: both generated by Ensight, _10.glb is time varying.
- base_uri = "https://s3.amazonaws.com/www3.ensight.com/PyEnSight/ExampleData"
- filenames = ["obstacle.glb", "obstacle_10.glb"]
- with tempfile.TemporaryDirectory() as tmpdirname:
- for filename in filenames:
- outpath = os.path.join(tmpdirname, filename)
- url = f"{base_uri}/{filename}"
- with requests.get(url, stream=True) as r:
- with open(outpath, "wb") as f:
- shutil.copyfileobj(r.raw, f)
- # convert these into USD format
- with tempfile.TemporaryDirectory() as usd_dir:
- # Build the CLI command
- cmd = [sys.executable]
- cmd.extend(["-m", "ansys.pyensight.core.utils.omniverse_cli"])
- cmd.extend(["--oneshot", "1"])
- cmd.extend(["--include_camera", "0"])
- cmd.extend(["--monitor_directory", outpath])
- cmd.append(usd_dir)
- env_vars = os.environ.copy()
- subprocess.run(cmd, close_fds=True, env=env_vars)
- assert os.path.isfile(os.path.join(usd_dir, "dsg_scene.usd"))
- assert os.path.isdir(os.path.join(usd_dir, "Parts"))
+import os
+import shutil
+import subprocess
+import sys
+import tempfile
+
+import requests
+
+
+def test_glb_usd():
+ # Get the example files
+ base_uri = "https://s3.amazonaws.com/www3.ensight.com/PyEnSight/ExampleData"
+ filenames = ["fluent_elbow.glb", "rwing_bsl_1.glb", "rwing_bsl_2.glb"]
+ with tempfile.TemporaryDirectory() as tmpdirname:
+ for filename in filenames:
+ outpath = os.path.join(tmpdirname, filename)
+ url = f"{base_uri}/{filename}"
+ with requests.get(url, stream=True) as r:
+ with open(outpath, "wb") as f:
+ shutil.copyfileobj(r.raw, f)
+ # convert these into USD format
+ with tempfile.TemporaryDirectory() as usd_dir:
+ # Build the CLI command
+ cmd = [sys.executable]
+ cmd.extend(["-m", "ansys.pyensight.core.utils.omniverse_cli"])
+ cmd.extend(["--oneshot", "1"])
+ cmd.extend(["--include_camera", "0"])
+ cmd.extend(["--monitor_directory", outpath])
+ cmd.append(usd_dir)
+ env_vars = os.environ.copy()
+ subprocess.run(cmd, close_fds=True, env=env_vars)
+ assert os.path.isfile(os.path.join(usd_dir, "dsg_scene.usd"))
+ assert os.path.isdir(os.path.join(usd_dir, "Parts"))
+
+
+def test_ensight_glb_usd():
+ # Get the example files: both generated by Ensight, _10.glb is time varying.
+ base_uri = "https://s3.amazonaws.com/www3.ensight.com/PyEnSight/ExampleData"
+ filenames = ["obstacle.glb", "obstacle_10.glb"]
+ with tempfile.TemporaryDirectory() as tmpdirname:
+ for filename in filenames:
+ outpath = os.path.join(tmpdirname, filename)
+ url = f"{base_uri}/{filename}"
+ with requests.get(url, stream=True) as r:
+ with open(outpath, "wb") as f:
+ shutil.copyfileobj(r.raw, f)
+ # convert these into USD format
+ with tempfile.TemporaryDirectory() as usd_dir:
+ # Build the CLI command
+ cmd = [sys.executable]
+ cmd.extend(["-m", "ansys.pyensight.core.utils.omniverse_cli"])
+ cmd.extend(["--oneshot", "1"])
+ cmd.extend(["--include_camera", "0"])
+ cmd.extend(["--monitor_directory", outpath])
+ cmd.append(usd_dir)
+ env_vars = os.environ.copy()
+ subprocess.run(cmd, close_fds=True, env=env_vars)
+ assert os.path.isfile(os.path.join(usd_dir, "dsg_scene.usd"))
+ assert os.path.isdir(os.path.join(usd_dir, "Parts"))
diff --git a/tests/example_tests/test_libuserd.py b/tests/example_tests/test_libuserd.py
index d2b3fe6ca0e..b8a9ef40e95 100644
--- a/tests/example_tests/test_libuserd.py
+++ b/tests/example_tests/test_libuserd.py
@@ -1,78 +1,78 @@
-from ansys.pyensight.core.libuserd import LibUserd
-import numpy
-import pytest
-
-
-def test_libuserd_basic(tmpdir, pytestconfig: pytest.Config):
- data_dir = tmpdir.mkdir("datadir")
- use_local = pytestconfig.getoption("use_local_launcher")
- if use_local:
- libuserd = LibUserd()
- else:
- libuserd = LibUserd(use_docker=True, use_dev=True, data_directory=data_dir)
- libuserd.initialize()
- libuserd.get_all_readers()
- libuserd.ansys_release_number()
- libuserd.ansys_release_string()
- cas_file = libuserd.download_pyansys_example("mixing_elbow.cas.h5", "pyfluent/mixing_elbow")
- dat_file = libuserd.download_pyansys_example("mixing_elbow.dat.h5", "pyfluent/mixing_elbow")
- r = libuserd.query_format(cas_file, dat_file)
- d = r[0].read_dataset(cas_file, dat_file)
-
- parts = d.parts()
- p = [p for p in parts if p.name == "elbow-fluid"][0]
-
- vars = d.variables()
- v = [v for v in vars if v.name == "Static_Pressure"][0]
-
- p.nodes()
- p.num_elements()
- p.element_conn(libuserd.ElementType.HEX08)
- p.variable_values(v, libuserd.ElementType.HEX08)
-
- libuserd.shutdown()
-
-
-def test_libuserd_synthetic_time(tmpdir, pytestconfig: pytest.Config):
- data_dir = tmpdir.mkdir("datadir")
- use_local = pytestconfig.getoption("use_local_launcher")
- if use_local:
- libuserd = LibUserd()
- else:
- libuserd = LibUserd(use_docker=True, use_dev=True, data_directory=data_dir)
- libuserd.initialize()
- opt = {
- "Long names": 0,
- "Number of timesteps": 10,
- "Number of scalars": 3,
- "Number of spheres": 10,
- "Number of cubes": 10,
- }
- d = libuserd.load_data("foo", file_format="Synthetic", reader_options=opt)
- p = d.parts()[0]
- v = d.variables()[0]
-
- assert "Sphere" in p.name
- assert "Scalar" in v.name
- assert len(d.timevalues()) == 10
- assert d.get_number_of_time_sets() == 1
-
- d.set_timestep(5)
- n = p.nodes()
- n.shape = (len(n) // 3, 3)
- centroid_5 = numpy.average(n, 0)
-
- d.set_timestep(0)
- n = p.nodes()
- n.shape = (len(n) // 3, 3)
- centroid_0 = numpy.average(n, 0)
-
- d.set_timevalue(5.0)
- n = p.nodes()
- n.shape = (len(n) // 3, 3)
- centroid_50 = numpy.average(n, 0)
-
- assert numpy.array_equal(centroid_5, centroid_50)
- assert not numpy.array_equal(centroid_5, centroid_0)
-
- libuserd.shutdown()
+from ansys.pyensight.core.libuserd import LibUserd
+import numpy
+import pytest
+
+
+def test_libuserd_basic(tmpdir, pytestconfig: pytest.Config):
+ data_dir = tmpdir.mkdir("datadir")
+ use_local = pytestconfig.getoption("use_local_launcher")
+ if use_local:
+ libuserd = LibUserd()
+ else:
+ libuserd = LibUserd(use_docker=True, use_dev=True, data_directory=data_dir)
+ libuserd.initialize()
+ libuserd.get_all_readers()
+ libuserd.ansys_release_number()
+ libuserd.ansys_release_string()
+ cas_file = libuserd.download_pyansys_example("mixing_elbow.cas.h5", "pyfluent/mixing_elbow")
+ dat_file = libuserd.download_pyansys_example("mixing_elbow.dat.h5", "pyfluent/mixing_elbow")
+ r = libuserd.query_format(cas_file, dat_file)
+ d = r[0].read_dataset(cas_file, dat_file)
+
+ parts = d.parts()
+ p = [p for p in parts if p.name == "elbow-fluid"][0]
+
+ vars = d.variables()
+ v = [v for v in vars if v.name == "Static_Pressure"][0]
+
+ p.nodes()
+ p.num_elements()
+ p.element_conn(libuserd.ElementType.HEX08)
+ p.variable_values(v, libuserd.ElementType.HEX08)
+
+ libuserd.shutdown()
+
+
+def test_libuserd_synthetic_time(tmpdir, pytestconfig: pytest.Config):
+ data_dir = tmpdir.mkdir("datadir")
+ use_local = pytestconfig.getoption("use_local_launcher")
+ if use_local:
+ libuserd = LibUserd()
+ else:
+ libuserd = LibUserd(use_docker=True, use_dev=True, data_directory=data_dir)
+ libuserd.initialize()
+ opt = {
+ "Long names": 0,
+ "Number of timesteps": 10,
+ "Number of scalars": 3,
+ "Number of spheres": 10,
+ "Number of cubes": 10,
+ }
+ d = libuserd.load_data("foo", file_format="Synthetic", reader_options=opt)
+ p = d.parts()[0]
+ v = d.variables()[0]
+
+ assert "Sphere" in p.name
+ assert "Scalar" in v.name
+ assert len(d.timevalues()) == 10
+ assert d.get_number_of_time_sets() == 1
+
+ d.set_timestep(5)
+ n = p.nodes()
+ n.shape = (len(n) // 3, 3)
+ centroid_5 = numpy.average(n, 0)
+
+ d.set_timestep(0)
+ n = p.nodes()
+ n.shape = (len(n) // 3, 3)
+ centroid_0 = numpy.average(n, 0)
+
+ d.set_timevalue(5.0)
+ n = p.nodes()
+ n.shape = (len(n) // 3, 3)
+ centroid_50 = numpy.average(n, 0)
+
+ assert numpy.array_equal(centroid_5, centroid_50)
+ assert not numpy.array_equal(centroid_5, centroid_0)
+
+ libuserd.shutdown()
diff --git a/tests/example_tests/test_queries.py b/tests/example_tests/test_queries.py
index 19d307251da..0f63bcaab43 100644
--- a/tests/example_tests/test_queries.py
+++ b/tests/example_tests/test_queries.py
@@ -1,146 +1,146 @@
-import glob
-from operator import attrgetter
-import os
-
-from ansys.pyensight.core.dockerlauncher import DockerLauncher
-from ansys.pyensight.core.locallauncher import LocalLauncher
-import numpy as np
-import pytest
-
-
-def test_queries(tmpdir, pytestconfig: pytest.Config):
- data_dir = tmpdir.mkdir("datadir")
- use_local = pytestconfig.getoption("use_local_launcher")
- root = None
- if use_local:
- launcher = LocalLauncher()
- root = "http://s3.amazonaws.com/www3.ensight.com/PyEnSight/ExampleData"
- else:
- launcher = DockerLauncher(data_directory=data_dir, use_dev=True)
- session = launcher.start()
- session.load_example("waterbreak.ens", root=root)
- # Get the core part and variable objects
- var = session.ensight.objs.core.VARIABLES["p"][0]
- part = session.ensight.objs.core.PARTS["default_region"][0]
- # Isolate the 3D part "default_region" and display it at
- # solution time 0.7, coloring it by the 'p' variable.
- session.ensight.objs.core.PARTS.set_attr("VISIBLE", False)
- part.VISIBLE = True
- part.ELTREPRESENTATION = session.ensight.objs.enums.BORD_FULL
- part.COLORBYPALETTE = var
- session.ensight.objs.core.SOLUTIONTIME = 0.7
- session.ensight.objs.core.HIDDENLINE_USE_RGB = True
- session.ensight.objs.core.HIDDENLINE = True
- # Rotate the view a bit
- session.ensight.view_transf.rotate(-66.5934067, 1.71428561, 0)
- session.ensight.view_transf.rotate(18.0219765, -31.6363659, 0)
- session.ensight.view_transf.rotate(-4.83516455, 9.5064888, 0)
- session.ensight.view_transf.zoom(0.740957975)
- session.ensight.view_transf.zoom(0.792766333)
- session.ensight.view_transf.translate(0.0719177574, 0.0678303316, 0)
- session.ensight.view_transf.rotate(4.83516455, 3.42857122, 0)
- # Display it
- image = session.show("image", width=800, height=600)
- image.download(data_dir)
- session.ensight.part.select_begin(part.PARTNUMBER)
- session.ensight.query_ent_var.begin()
- session.ensight.query_ent_var.description("Pressure vs Distance")
- session.ensight.query_ent_var.query_type("generated")
- session.ensight.query_ent_var.number_of_sample_pts(20)
- session.ensight.query_ent_var.constrain("line_tool")
- session.ensight.query_ent_var.line_loc(1, 0.00, 0.075, 0.0)
- session.ensight.query_ent_var.line_loc(2, 0.58, 0.075, 0.0)
- session.ensight.query_ent_var.distance("arc_length")
- session.ensight.query_ent_var.variable_1(var.DESCRIPTION)
- session.ensight.query_ent_var.generate_over("distance")
- session.ensight.query_ent_var.variable_2("DISTANCE")
- session.ensight.query_ent_var.end()
- session.ensight.query_ent_var.query()
- # This is an interesting trick. The above code uses the
- # 'native' command bindings. We would like to be able to
- # use the query object. EnSight object 'values' are monotonically
- # increasing numbers. Thus, the 'max()' operation on a list
- # of EnSight objects will return the most recently created one.
- line_query = max(session.ensight.objs.core.QUERIES, key=attrgetter("__OBJID__"))
- print(line_query, line_query.QUERY_DATA["xydata"])
- line_plot = session.ensight.objs.core.defaultplot[0].createplotter()
- line_query.addtoplot(line_plot)
- line_plot.rescale()
- line_plot.PLOTTITLE = f"{var.DESCRIPTION} vs Distance"
- line_plot.AXISXTITLE = "Distance"
- line_plot.AXISYTITLE = var.DESCRIPTION
- line_plot.LEGENDVISIBLE = False
- line_plot.AXISXAUTOSCALE = False
- line_plot.AXISXMIN = 0.0
- line_plot.AXISXMAX = 0.6
- line_plot.AXISXLABELFORMAT = "%.2f"
- line_plot.AXISXGRIDTYPE = 1
- line_plot.AXISYGRIDTYPE = 1
- line_plot.TIMEMARKER = False
- line_plot.AXISYAUTOSCALE = False
- line_plot.AXISYMIN = -200.0
- line_plot.AXISYMAX = 2200.0
- image = session.show("image", width=800, height=600)
- image.download(data_dir)
- elem_ids = [134, 398, 662]
- session.ensight.part.select_begin(part.PARTNUMBER)
- session.ensight.query_interact.search("exact")
- session.ensight.query_interact.query("element")
- session.ensight.query_interact.number_displayed(3)
- # Create 3 element probes using pre-selected element numbers
- for id in elem_ids:
- session.ensight.query_interact.create(id)
- # Make the probe locations a bit more visible
- session.ensight.objs.core.PROBES[0].LABELALWAYSONTOP = True
-
- # Make three queries. Again, a generated query but with
- # "time" as "variable 2" and specific simulation start and
- # end times specified
- session.ensight.part.select_begin(part.PARTNUMBER)
- elem_queries = []
- for id in elem_ids:
- session.ensight.query_ent_var.begin()
- session.ensight.query_ent_var.description(f"{id}")
- session.ensight.query_ent_var.query_type("generated")
- session.ensight.query_ent_var.number_of_sample_pts(20)
- session.ensight.query_ent_var.begin_simtime(0)
- session.ensight.query_ent_var.end_simtime(1)
- session.ensight.query_ent_var.constrain("element")
- session.ensight.query_ent_var.sample_by("value")
- session.ensight.query_ent_var.variable_1(var.DESCRIPTION)
- session.ensight.query_ent_var.elem_id(id)
- session.ensight.query_ent_var.generate_over("time")
- session.ensight.query_ent_var.variable_2("TIME")
- session.ensight.query_ent_var.update_with_newtimesteps("ON")
- session.ensight.query_ent_var.end()
- session.ensight.query_ent_var.query()
- # Just like before, grab the query objects.
- elem_queries.append(max(session.ensight.objs.core.QUERIES, key=attrgetter("__OBJID__")))
- print(elem_queries)
- elem_plot = session.ensight.objs.core.defaultplot[0].createplotter(
- xtitle="Time", ytitle=var.DESCRIPTION
- )
- for query in elem_queries:
- query.addtoplot(elem_plot)
- elem_plot.rescale()
- elem_plot.PLOTTITLE = "Elements vs Time"
- elem_plot.AXISXLABELFORMAT = "%.1f"
- elem_plot.AXISXGRIDTYPE = 1
- elem_plot.AXISYGRIDTYPE = 1
- anim = session.show("animation", width=800, height=600, fps=5)
- anim.download(data_dir)
- session.ensight.objs.core.SOLUTIONTIME = 0.7
- data = np.array(line_query.QUERY_DATA["xydata"])
- fit = np.polyfit(data[:, 0], data[:, 1], 6)
- new_y = np.polyval(fit, data[:, 0])
- data[:, 1] = new_y
- session.ensight.query_xy_create("curvefit", "fit", "Distance", data.tolist())
- fit_query = max(session.ensight.objs.core.QUERIES)
- fit_query.addtoplot(line_plot)
- session.show("remote")
- local_files = glob.glob(os.path.join(data_dir, "*"))
- png_local = [x for x in local_files if ".png" in x]
- mp4_local = [x for x in local_files if ".mp4" in x]
- assert len(png_local) == 2
- assert len(mp4_local) == 1
- session.close()
+import glob
+from operator import attrgetter
+import os
+
+from ansys.pyensight.core.dockerlauncher import DockerLauncher
+from ansys.pyensight.core.locallauncher import LocalLauncher
+import numpy as np
+import pytest
+
+
+def test_queries(tmpdir, pytestconfig: pytest.Config):
+ data_dir = tmpdir.mkdir("datadir")
+ use_local = pytestconfig.getoption("use_local_launcher")
+ root = None
+ if use_local:
+ launcher = LocalLauncher()
+ root = "http://s3.amazonaws.com/www3.ensight.com/PyEnSight/ExampleData"
+ else:
+ launcher = DockerLauncher(data_directory=data_dir, use_dev=True)
+ session = launcher.start()
+ session.load_example("waterbreak.ens", root=root)
+ # Get the core part and variable objects
+ var = session.ensight.objs.core.VARIABLES["p"][0]
+ part = session.ensight.objs.core.PARTS["default_region"][0]
+ # Isolate the 3D part "default_region" and display it at
+ # solution time 0.7, coloring it by the 'p' variable.
+ session.ensight.objs.core.PARTS.set_attr("VISIBLE", False)
+ part.VISIBLE = True
+ part.ELTREPRESENTATION = session.ensight.objs.enums.BORD_FULL
+ part.COLORBYPALETTE = var
+ session.ensight.objs.core.SOLUTIONTIME = 0.7
+ session.ensight.objs.core.HIDDENLINE_USE_RGB = True
+ session.ensight.objs.core.HIDDENLINE = True
+ # Rotate the view a bit
+ session.ensight.view_transf.rotate(-66.5934067, 1.71428561, 0)
+ session.ensight.view_transf.rotate(18.0219765, -31.6363659, 0)
+ session.ensight.view_transf.rotate(-4.83516455, 9.5064888, 0)
+ session.ensight.view_transf.zoom(0.740957975)
+ session.ensight.view_transf.zoom(0.792766333)
+ session.ensight.view_transf.translate(0.0719177574, 0.0678303316, 0)
+ session.ensight.view_transf.rotate(4.83516455, 3.42857122, 0)
+ # Display it
+ image = session.show("image", width=800, height=600)
+ image.download(data_dir)
+ session.ensight.part.select_begin(part.PARTNUMBER)
+ session.ensight.query_ent_var.begin()
+ session.ensight.query_ent_var.description("Pressure vs Distance")
+ session.ensight.query_ent_var.query_type("generated")
+ session.ensight.query_ent_var.number_of_sample_pts(20)
+ session.ensight.query_ent_var.constrain("line_tool")
+ session.ensight.query_ent_var.line_loc(1, 0.00, 0.075, 0.0)
+ session.ensight.query_ent_var.line_loc(2, 0.58, 0.075, 0.0)
+ session.ensight.query_ent_var.distance("arc_length")
+ session.ensight.query_ent_var.variable_1(var.DESCRIPTION)
+ session.ensight.query_ent_var.generate_over("distance")
+ session.ensight.query_ent_var.variable_2("DISTANCE")
+ session.ensight.query_ent_var.end()
+ session.ensight.query_ent_var.query()
+ # This is an interesting trick. The above code uses the
+ # 'native' command bindings. We would like to be able to
+ # use the query object. EnSight object 'values' are monotonically
+ # increasing numbers. Thus, the 'max()' operation on a list
+ # of EnSight objects will return the most recently created one.
+ line_query = max(session.ensight.objs.core.QUERIES, key=attrgetter("__OBJID__"))
+ print(line_query, line_query.QUERY_DATA["xydata"])
+ line_plot = session.ensight.objs.core.defaultplot[0].createplotter()
+ line_query.addtoplot(line_plot)
+ line_plot.rescale()
+ line_plot.PLOTTITLE = f"{var.DESCRIPTION} vs Distance"
+ line_plot.AXISXTITLE = "Distance"
+ line_plot.AXISYTITLE = var.DESCRIPTION
+ line_plot.LEGENDVISIBLE = False
+ line_plot.AXISXAUTOSCALE = False
+ line_plot.AXISXMIN = 0.0
+ line_plot.AXISXMAX = 0.6
+ line_plot.AXISXLABELFORMAT = "%.2f"
+ line_plot.AXISXGRIDTYPE = 1
+ line_plot.AXISYGRIDTYPE = 1
+ line_plot.TIMEMARKER = False
+ line_plot.AXISYAUTOSCALE = False
+ line_plot.AXISYMIN = -200.0
+ line_plot.AXISYMAX = 2200.0
+ image = session.show("image", width=800, height=600)
+ image.download(data_dir)
+ elem_ids = [134, 398, 662]
+ session.ensight.part.select_begin(part.PARTNUMBER)
+ session.ensight.query_interact.search("exact")
+ session.ensight.query_interact.query("element")
+ session.ensight.query_interact.number_displayed(3)
+ # Create 3 element probes using pre-selected element numbers
+ for id in elem_ids:
+ session.ensight.query_interact.create(id)
+ # Make the probe locations a bit more visible
+ session.ensight.objs.core.PROBES[0].LABELALWAYSONTOP = True
+
+ # Make three queries. Again, a generated query but with
+ # "time" as "variable 2" and specific simulation start and
+ # end times specified
+ session.ensight.part.select_begin(part.PARTNUMBER)
+ elem_queries = []
+ for id in elem_ids:
+ session.ensight.query_ent_var.begin()
+ session.ensight.query_ent_var.description(f"{id}")
+ session.ensight.query_ent_var.query_type("generated")
+ session.ensight.query_ent_var.number_of_sample_pts(20)
+ session.ensight.query_ent_var.begin_simtime(0)
+ session.ensight.query_ent_var.end_simtime(1)
+ session.ensight.query_ent_var.constrain("element")
+ session.ensight.query_ent_var.sample_by("value")
+ session.ensight.query_ent_var.variable_1(var.DESCRIPTION)
+ session.ensight.query_ent_var.elem_id(id)
+ session.ensight.query_ent_var.generate_over("time")
+ session.ensight.query_ent_var.variable_2("TIME")
+ session.ensight.query_ent_var.update_with_newtimesteps("ON")
+ session.ensight.query_ent_var.end()
+ session.ensight.query_ent_var.query()
+ # Just like before, grab the query objects.
+ elem_queries.append(max(session.ensight.objs.core.QUERIES, key=attrgetter("__OBJID__")))
+ print(elem_queries)
+ elem_plot = session.ensight.objs.core.defaultplot[0].createplotter(
+ xtitle="Time", ytitle=var.DESCRIPTION
+ )
+ for query in elem_queries:
+ query.addtoplot(elem_plot)
+ elem_plot.rescale()
+ elem_plot.PLOTTITLE = "Elements vs Time"
+ elem_plot.AXISXLABELFORMAT = "%.1f"
+ elem_plot.AXISXGRIDTYPE = 1
+ elem_plot.AXISYGRIDTYPE = 1
+ anim = session.show("animation", width=800, height=600, fps=5)
+ anim.download(data_dir)
+ session.ensight.objs.core.SOLUTIONTIME = 0.7
+ data = np.array(line_query.QUERY_DATA["xydata"])
+ fit = np.polyfit(data[:, 0], data[:, 1], 6)
+ new_y = np.polyval(fit, data[:, 0])
+ data[:, 1] = new_y
+ session.ensight.query_xy_create("curvefit", "fit", "Distance", data.tolist())
+ fit_query = max(session.ensight.objs.core.QUERIES)
+ fit_query.addtoplot(line_plot)
+ session.show("remote")
+ local_files = glob.glob(os.path.join(data_dir, "*"))
+ png_local = [x for x in local_files if ".png" in x]
+ mp4_local = [x for x in local_files if ".mp4" in x]
+ assert len(png_local) == 2
+ assert len(mp4_local) == 1
+ session.close()
diff --git a/tests/example_tests/test_remote_execution.py b/tests/example_tests/test_remote_execution.py
index 77057dde999..47697db02ca 100644
--- a/tests/example_tests/test_remote_execution.py
+++ b/tests/example_tests/test_remote_execution.py
@@ -1,55 +1,55 @@
-import time
-
-from ansys.pyensight.core import DockerLauncher, LocalLauncher
-import pytest
-
-
-def test_remote_execution(tmpdir, pytestconfig: pytest.Config):
- def myfunc(ensight):
- names = []
- for p in ensight.objs.core.PARTS:
- names.append(p.DESCRIPTION)
- return names
-
- def count(ensight, attr, value):
- import time # time must be imported on the EnSight side as well
-
- start = time.time()
- count = 0
- for p in ensight.objs.core.PARTS:
- if p.getattr(attr) == value:
- count += 1
- return count, time.time() - start
-
- data_dir = tmpdir.mkdir("datadir")
- use_local = pytestconfig.getoption("use_local_launcher")
- if use_local:
- launcher = LocalLauncher()
- else:
- launcher = DockerLauncher(data_directory=data_dir, use_dev=True)
- session = launcher.start()
- session.load_data(f"{session.cei_home}/ensight{session.cei_suffix}/data/guard_rail/crash.case")
- start = time.time()
- names = myfunc(session.ensight)
- print(f"Remote: {time.time()-start}")
- print(names)
-
- cmd = "def myfunc():\n"
- cmd += " names = []\n"
- cmd += " for p in ensight.objs.core.PARTS:\n"
- cmd += " names.append(p.DESCRIPTION)\n"
- cmd += " return names.__repr__()\n"
- session.cmd(cmd, do_eval=False)
- start = time.time()
- names = session.cmd("myfunc()")
- print(f"Remote: {time.time()-start}")
- print(names)
- print(count(session.ensight, "VISIBLE", True))
- _grpc = session.exec(count, "VISIBLE", True)
- print(_grpc)
- try:
- remote = session.exec(count, "VISIBLE", True)
- print(remote)
- except RuntimeError: # case of mismatch between python versions
- pass
- session.close()
+import time
+
+from ansys.pyensight.core import DockerLauncher, LocalLauncher
+import pytest
+
+
+def test_remote_execution(tmpdir, pytestconfig: pytest.Config):
+ def myfunc(ensight):
+ names = []
+ for p in ensight.objs.core.PARTS:
+ names.append(p.DESCRIPTION)
+ return names
+
+ def count(ensight, attr, value):
+ import time # time must be imported on the EnSight side as well
+
+ start = time.time()
+ count = 0
+ for p in ensight.objs.core.PARTS:
+ if p.getattr(attr) == value:
+ count += 1
+ return count, time.time() - start
+
+ data_dir = tmpdir.mkdir("datadir")
+ use_local = pytestconfig.getoption("use_local_launcher")
+ if use_local:
+ launcher = LocalLauncher()
+ else:
+ launcher = DockerLauncher(data_directory=data_dir, use_dev=True)
+ session = launcher.start()
+ session.load_data(f"{session.cei_home}/ensight{session.cei_suffix}/data/guard_rail/crash.case")
+ start = time.time()
+ names = myfunc(session.ensight)
+ print(f"Remote: {time.time()-start}")
+ print(names)
+
+ cmd = "def myfunc():\n"
+ cmd += " names = []\n"
+ cmd += " for p in ensight.objs.core.PARTS:\n"
+ cmd += " names.append(p.DESCRIPTION)\n"
+ cmd += " return names.__repr__()\n"
+ session.cmd(cmd, do_eval=False)
+ start = time.time()
+ names = session.cmd("myfunc()")
+ print(f"Remote: {time.time()-start}")
+ print(names)
+ print(count(session.ensight, "VISIBLE", True))
+ _grpc = session.exec(count, "VISIBLE", True)
+ print(_grpc)
+ try:
+ remote = session.exec(count, "VISIBLE", True)
+ print(remote)
+ except RuntimeError: # case of mismatch between python versions
+ pass
+ session.close()
diff --git a/tests/example_tests/test_remote_objects.py b/tests/example_tests/test_remote_objects.py
index 3fc51dc9411..6206af32e2e 100644
--- a/tests/example_tests/test_remote_objects.py
+++ b/tests/example_tests/test_remote_objects.py
@@ -1,31 +1,31 @@
-import gc
-
-from ansys.pyensight.core import DockerLauncher, LocalLauncher
-import pytest
-
-
-def test_remote_objects(tmpdir, pytestconfig: pytest.Config):
- data_dir = tmpdir.mkdir("datadir")
- use_local = pytestconfig.getoption("use_local_launcher")
- if use_local:
- launcher = LocalLauncher()
- else:
- launcher = DockerLauncher(data_directory=data_dir, use_dev=True)
- session = launcher.start()
- session.load_data(f"{session.cei_home}/ensight{session.cei_suffix}/data/guard_rail/crash.case")
-
- # call __str__ on an ENSOBJ object w/o DESCRIPTION attribute (for coverage)
- print(session.ensight.objs.core)
-
- if session.cei_suffix >= "242":
- # Create an ENS_GROUP object (a remote object)
- g = session.ensight.objs.core.PARTS.find("*rail*", wildcard=1, group=1)
- assert "ENS_GROUP" in g.__str__(), "ensobjlist.find() did not return an ENS_GROUP instance"
- assert "Owned" in g.__str__(), "Remote ENS_GROUP is not 'Owned'"
- assert "Owned" not in g.CHILDREN.__str__(), "Objects in ENS_GROUP are incorrectly 'Owned'"
-
- # Exercise the custom __del__() method
- g = None
- gc.collect()
-
- session.close()
+import gc
+
+from ansys.pyensight.core import DockerLauncher, LocalLauncher
+import pytest
+
+
+def test_remote_objects(tmpdir, pytestconfig: pytest.Config):
+ data_dir = tmpdir.mkdir("datadir")
+ use_local = pytestconfig.getoption("use_local_launcher")
+ if use_local:
+ launcher = LocalLauncher()
+ else:
+ launcher = DockerLauncher(data_directory=data_dir, use_dev=True)
+ session = launcher.start()
+ session.load_data(f"{session.cei_home}/ensight{session.cei_suffix}/data/guard_rail/crash.case")
+
+ # call __str__ on an ENSOBJ object w/o DESCRIPTION attribute (for coverage)
+ print(session.ensight.objs.core)
+
+ if session.cei_suffix >= "242":
+ # Create an ENS_GROUP object (a remote object)
+ g = session.ensight.objs.core.PARTS.find("*rail*", wildcard=1, group=1)
+ assert "ENS_GROUP" in g.__str__(), "ensobjlist.find() did not return an ENS_GROUP instance"
+ assert "Owned" in g.__str__(), "Remote ENS_GROUP is not 'Owned'"
+ assert "Owned" not in g.CHILDREN.__str__(), "Objects in ENS_GROUP are incorrectly 'Owned'"
+
+ # Exercise the custom __del__() method
+ g = None
+ gc.collect()
+
+ session.close()
diff --git a/tests/example_tests/test_rest_api.py b/tests/example_tests/test_rest_api.py
index 06d39321136..4dfc2566d6f 100644
--- a/tests/example_tests/test_rest_api.py
+++ b/tests/example_tests/test_rest_api.py
@@ -1,73 +1,73 @@
-from ansys.pyensight.core.dockerlauncher import DockerLauncher
-from ansys.pyensight.core.locallauncher import LocalLauncher
-import pytest
-import requests
-
-
-def test_rest_apis(tmpdir, pytestconfig: pytest.Config):
- data_dir = tmpdir.mkdir("datadir")
- use_local = pytestconfig.getoption("use_local_launcher")
- if use_local:
- launcher = LocalLauncher(enable_rest_api=True)
- else:
- launcher = DockerLauncher(data_directory=data_dir, use_dev=True, enable_rest_api=True)
-
- s = launcher.start()
- s.load_data(f"{s.cei_home}/ensight{s.cei_suffix}/data/cube/cube.case")
- uri_base = f"http://{s.hostname}:{s.html_port}/ensight/v1/{s.secret_key}"
-
- # Simple attempt to do some math, store it and get the value back
-
- ret = requests.put(f"{uri_base}/exec", json="enscl.rest_test = 30*20")
- assert ret.status_code == 200
-
- value = requests.put(f"{uri_base}/eval", json="enscl.rest_test").json()
- assert value == 600, "Unable to check computed value"
-
- # grab some helpful object ids
- js_part_name = requests.put(f"{uri_base}/eval", json="ensight.objs.core.PARTS[0]").json()
- part_id = requests.put(
- f"{uri_base}/eval", json="ensight.objs.core.PARTS[0]", params=dict(returns="__OBJID__")
- ).json()
- # check for '@ENSOBJ={id}@' name
- assert js_part_name == f"@ENSOBJ={part_id}@"
-
- # Simple command language example
- ret = requests.put(f"{uri_base}/cmd/ensight.view_transf.rotate", json=[5.2, 10.4, 0]).json()
- assert ret == 0
-
- # Alternate API for getting part object references
- ret = requests.get(f"{uri_base}/ensobjs/ensight.objs.core/PARTS").json()
- assert ret[0] == js_part_name
-
- # Manipulate the VISIBLE attribute in various ways
- # Start by the ensobjs API
- ret = requests.get(f"{uri_base}/ensobjs/{part_id}/VISIBLE").json()
- assert ret is True
- ret = requests.put(f"{uri_base}/ensobjs/{part_id}/VISIBLE", json=False)
- assert ret.status_code == 200
- # Verify via getattrs API
- ret = requests.put(
- f"{uri_base}/ensobjs/getattrs", json=[part_id], params=dict(returns="DESCRIPTION,VISIBLE")
- ).json()
- assert ret[f"{part_id}"][1] is False
- # try via the setatts API
- ret = requests.put(
- f"{uri_base}/ensobjs/setattrs", json=dict(objects=[f"{part_id}"], values=dict(VISIBLE=True))
- )
- assert ret.status_code == 200
- # Verify the result
- ret = requests.get(f"{uri_base}/ensobjs/{part_id}/VISIBLE").json()
- assert ret is True
-
- # Simple remote function test
- foo_src = "def foo(n:int = 1):\n return list(numpy.random.rand(n))\n"
- ret = requests.put(
- f"{uri_base}/def_func/rest_test/foo", json=foo_src, params=dict(imports="numpy")
- )
- assert ret.status_code == 200
- ret = requests.put(uri_base + "/call_func/rest_test/foo", json=dict(n=3)).json()
- assert len(ret) == 3
- assert type(ret[0]) == float
-
- s.close()
+from ansys.pyensight.core.dockerlauncher import DockerLauncher
+from ansys.pyensight.core.locallauncher import LocalLauncher
+import pytest
+import requests
+
+
+def test_rest_apis(tmpdir, pytestconfig: pytest.Config):
+ data_dir = tmpdir.mkdir("datadir")
+ use_local = pytestconfig.getoption("use_local_launcher")
+ if use_local:
+ launcher = LocalLauncher(enable_rest_api=True)
+ else:
+ launcher = DockerLauncher(data_directory=data_dir, use_dev=True, enable_rest_api=True)
+
+ s = launcher.start()
+ s.load_data(f"{s.cei_home}/ensight{s.cei_suffix}/data/cube/cube.case")
+ uri_base = f"http://{s.hostname}:{s.html_port}/ensight/v1/{s.secret_key}"
+
+ # Simple attempt to do some math, store it and get the value back
+
+ ret = requests.put(f"{uri_base}/exec", json="enscl.rest_test = 30*20")
+ assert ret.status_code == 200
+
+ value = requests.put(f"{uri_base}/eval", json="enscl.rest_test").json()
+ assert value == 600, "Unable to check computed value"
+
+ # grab some helpful object ids
+ js_part_name = requests.put(f"{uri_base}/eval", json="ensight.objs.core.PARTS[0]").json()
+ part_id = requests.put(
+ f"{uri_base}/eval", json="ensight.objs.core.PARTS[0]", params=dict(returns="__OBJID__")
+ ).json()
+ # check for '@ENSOBJ={id}@' name
+ assert js_part_name == f"@ENSOBJ={part_id}@"
+
+ # Simple command language example
+ ret = requests.put(f"{uri_base}/cmd/ensight.view_transf.rotate", json=[5.2, 10.4, 0]).json()
+ assert ret == 0
+
+ # Alternate API for getting part object references
+ ret = requests.get(f"{uri_base}/ensobjs/ensight.objs.core/PARTS").json()
+ assert ret[0] == js_part_name
+
+ # Manipulate the VISIBLE attribute in various ways
+ # Start by the ensobjs API
+ ret = requests.get(f"{uri_base}/ensobjs/{part_id}/VISIBLE").json()
+ assert ret is True
+ ret = requests.put(f"{uri_base}/ensobjs/{part_id}/VISIBLE", json=False)
+ assert ret.status_code == 200
+ # Verify via getattrs API
+ ret = requests.put(
+ f"{uri_base}/ensobjs/getattrs", json=[part_id], params=dict(returns="DESCRIPTION,VISIBLE")
+ ).json()
+ assert ret[f"{part_id}"][1] is False
+ # try via the setatts API
+ ret = requests.put(
+ f"{uri_base}/ensobjs/setattrs", json=dict(objects=[f"{part_id}"], values=dict(VISIBLE=True))
+ )
+ assert ret.status_code == 200
+ # Verify the result
+ ret = requests.get(f"{uri_base}/ensobjs/{part_id}/VISIBLE").json()
+ assert ret is True
+
+ # Simple remote function test
+ foo_src = "def foo(n:int = 1):\n return list(numpy.random.rand(n))\n"
+ ret = requests.put(
+ f"{uri_base}/def_func/rest_test/foo", json=foo_src, params=dict(imports="numpy")
+ )
+ assert ret.status_code == 200
+ ret = requests.put(uri_base + "/call_func/rest_test/foo", json=dict(n=3)).json()
+ assert len(ret) == 3
+ assert type(ret[0]) == float
+
+ s.close()
diff --git a/tests/example_tests/test_usd_export.py b/tests/example_tests/test_usd_export.py
index 9bf82224217..f2536449324 100644
--- a/tests/example_tests/test_usd_export.py
+++ b/tests/example_tests/test_usd_export.py
@@ -1,83 +1,83 @@
-import glob
-import os
-import time
-
-from ansys.pyensight.core import DockerLauncher, LocalLauncher
-from pxr import Usd
-import pytest
-
-
-def compare_prims(prim1, prim2):
- if prim1.GetPath() != prim2.GetPath():
- return False
- if prim1.GetTypeName() != prim2.GetTypeName():
- return False
- for attr1 in prim1.GetAttributes():
- attr2 = prim2.GetAttribute(attr1.GetName())
- if not attr2 or attr1.Get() != attr2.Get():
- return False
- return True
-
-
-def compare_usd_files(stage1, stage2):
- if not stage1 or not stage2:
- raise RuntimeError("Cannot open one of the Usd files")
- prims1 = list(stage1.Traverse())
- prims2 = list(stage2.Traverse())
- if len(prims1) != len(prims2):
- print("Different number of prims.")
- return False
- for prim1, prim2 in zip(prims1, prims2):
- if not compare_prims(prim1, prim2):
- print(f"Differences found in prim: {prim1.GetPath()}")
- return False
- print("No differences found.")
- return True
-
-
-def wait_for_idle(session):
- found = False
- start = time.time()
- while not found and time.time() - start < 60:
- status = session.ensight.utils.omniverse.read_status_file()
- if status.get("status") == "idle":
- found = True
- time.sleep(0.5)
- return found
-
-
-def test_usd_export(tmpdir, pytestconfig: pytest.Config):
- data_dir = tmpdir.mkdir("datadir")
- use_local = pytestconfig.getoption("use_local_launcher")
- if use_local:
- launcher = LocalLauncher()
- else:
- launcher = DockerLauncher(data_directory=data_dir, use_dev=True)
- session = launcher.start()
- session.load_example("waterbreak.ens")
- session.ensight.utils.omniverse.create_connection(data_dir)
- assert wait_for_idle(session)
- usd_files = glob.glob(os.path.join(data_dir, "*.usd"))
- assert len(usd_files) == 1
- base_usd = usd_files[0]
- parts = glob.glob(os.path.join(data_dir, "Parts", "*.usd"))
- assert len(parts) == 5
- temp_stage = Usd.Stage.Open(usd_files[0])
- # Save off the first stage to make it static and not get live updates for
- # later comparison
- temp_stage.Export(os.path.join(data_dir, "stage1.usd"))
- stage1 = Usd.Stage.Open(os.path.join(data_dir, "stage1.usd"))
- session.ensight.objs.core.PARTS.set_attr("COLORBYPALETTE", "alpha1")
- session.ensight.utils.omniverse.update()
- assert wait_for_idle(session)
- stage2 = Usd.Stage.Open(base_usd)
- diff = compare_usd_files(stage1, stage2)
- assert diff is False
- diff = compare_usd_files(temp_stage, stage2)
- assert diff is True
- session.ensight.utils.omniverse.update(temporal=True)
- assert wait_for_idle(session)
- parts = glob.glob(os.path.join(data_dir, "Parts", "*.usd"))
- # Considering deduplication, at the end of the export there will be 39 items
- # not 100 (5*20)
- assert len(parts) > 5
+import glob
+import os
+import time
+
+from ansys.pyensight.core import DockerLauncher, LocalLauncher
+from pxr import Usd
+import pytest
+
+
+def compare_prims(prim1, prim2):
+ if prim1.GetPath() != prim2.GetPath():
+ return False
+ if prim1.GetTypeName() != prim2.GetTypeName():
+ return False
+ for attr1 in prim1.GetAttributes():
+ attr2 = prim2.GetAttribute(attr1.GetName())
+ if not attr2 or attr1.Get() != attr2.Get():
+ return False
+ return True
+
+
+def compare_usd_files(stage1, stage2):
+ if not stage1 or not stage2:
+ raise RuntimeError("Cannot open one of the Usd files")
+ prims1 = list(stage1.Traverse())
+ prims2 = list(stage2.Traverse())
+ if len(prims1) != len(prims2):
+ print("Different number of prims.")
+ return False
+ for prim1, prim2 in zip(prims1, prims2):
+ if not compare_prims(prim1, prim2):
+ print(f"Differences found in prim: {prim1.GetPath()}")
+ return False
+ print("No differences found.")
+ return True
+
+
+def wait_for_idle(session):
+ found = False
+ start = time.time()
+ while not found and time.time() - start < 60:
+ status = session.ensight.utils.omniverse.read_status_file()
+ if status.get("status") == "idle":
+ found = True
+ time.sleep(0.5)
+ return found
+
+
+def test_usd_export(tmpdir, pytestconfig: pytest.Config):
+ data_dir = tmpdir.mkdir("datadir")
+ use_local = pytestconfig.getoption("use_local_launcher")
+ if use_local:
+ launcher = LocalLauncher()
+ else:
+ launcher = DockerLauncher(data_directory=data_dir, use_dev=True)
+ session = launcher.start()
+ session.load_example("waterbreak.ens")
+ session.ensight.utils.omniverse.create_connection(data_dir)
+ assert wait_for_idle(session)
+ usd_files = glob.glob(os.path.join(data_dir, "*.usd"))
+ assert len(usd_files) == 1
+ base_usd = usd_files[0]
+ parts = glob.glob(os.path.join(data_dir, "Parts", "*.usd"))
+ assert len(parts) == 5
+ temp_stage = Usd.Stage.Open(usd_files[0])
+ # Save off the first stage to make it static and not get live updates for
+ # later comparison
+ temp_stage.Export(os.path.join(data_dir, "stage1.usd"))
+ stage1 = Usd.Stage.Open(os.path.join(data_dir, "stage1.usd"))
+ session.ensight.objs.core.PARTS.set_attr("COLORBYPALETTE", "alpha1")
+ session.ensight.utils.omniverse.update()
+ assert wait_for_idle(session)
+ stage2 = Usd.Stage.Open(base_usd)
+ diff = compare_usd_files(stage1, stage2)
+ assert diff is False
+ diff = compare_usd_files(temp_stage, stage2)
+ assert diff is True
+ session.ensight.utils.omniverse.update(temporal=True)
+ assert wait_for_idle(session)
+ parts = glob.glob(os.path.join(data_dir, "Parts", "*.usd"))
+ # Considering deduplication, at the end of the export there will be 39 items
+ # not 100 (5*20)
+ assert len(parts) > 5
diff --git a/tests/example_tests/test_utils.py b/tests/example_tests/test_utils.py
index 717bcaa7890..fc297fc0a21 100644
--- a/tests/example_tests/test_utils.py
+++ b/tests/example_tests/test_utils.py
@@ -1,76 +1,76 @@
-import os
-import warnings
-
-from ansys.pyensight.core.dockerlauncher import DockerLauncher
-from ansys.pyensight.core.enscontext import EnsContext
-from ansys.pyensight.core.locallauncher import LocalLauncher
-import pytest
-
-warnings.filterwarnings("ignore")
-
-
-def test_utils(tmpdir, pytestconfig: pytest.Config):
- data_dir = tmpdir.mkdir("datadir")
- use_local = pytestconfig.getoption("use_local_launcher")
- root = None
- if use_local:
- launcher = LocalLauncher()
- root = "http://s3.amazonaws.com/www3.ensight.com/PyEnSight/ExampleData"
- else:
- launcher = DockerLauncher(data_directory=data_dir, use_dev=True)
- session = launcher.start()
- session2 = launcher.start()
- # Check that only one session is associated to a launcher
- assert session == session2
- session.load_example("waterbreak.ens", root=root)
- # "Load" the utils modules
- parts = session.ensight.utils.parts
- views = session.ensight.utils.views
- query = session.ensight.utils.query
- init_state = session.capture_context()
- init_state.save(os.path.join(data_dir, "init_state.ctxz"))
- views.set_view_direction(1, 1, 1, name="isometric")
- iso_state = session.capture_context()
- # Since no tags are supplied, all the parts are selected
- parts.select_parts_by_tag().set_attr("VISIBLE", False)
- session.restore_context(iso_state)
- sn = session.ensight.utils.support.scoped_name
- zclip_state = None
- with sn(session.ensight) as ensight, sn(session.ensight.objs.core) as core:
- clip_default = core.DEFAULTPARTS[ensight.PART_CLIP_PLANE]
- clip = clip_default.createpart(name="XClip", sources=parts.select_parts_by_dimension(3))[0]
- attrs = []
- attrs.append(["MESHPLANE", ensight.objs.enums.MESH_SLICE_Z]) # Z axis
- attrs.append(["TOOL", ensight.objs.enums.CT_XYZ]) # XYZ Tool
- attrs.append(["VALUE", 0.55]) # Z value
- zclip = clip_default.createpart(name="ZClip", sources=clip)[0]
- query.create_distance(
- "zlip_query", query.DISTANCE_PART1D, [zclip], core.VARIABLES["p"][0], new_plotter=True
- )
- zclip_state = session.capture_context()
- session.show("remote")
- # Change the view to test the view restoring
- with sn(session.ensight, native_exceptions=True) as ensight:
- ensight.view_transf.rotate(-66.5934067, 1.71428561, 0)
- ensight.view_transf.rotate(18.0219765, -31.6363659, 0)
- ensight.view_transf.rotate(-4.83516455, 9.5064888, 0)
- ensight.view_transf.zoom(0.740957975)
- ensight.view_transf.zoom(0.792766333)
- ensight.view_transf.translate(0.0719177574, 0.0678303316, 0)
- ensight.view_transf.rotate(4.83516455, 3.42857122, 0)
- views.restore_view("isometric")
- session.show("remote")
- session.restore_context(zclip_state)
- temp_query = query.create_temporal(
- "temporal_query",
- query.TEMPORAL_XYZ,
- parts.select_parts_by_dimension(3),
- "alpha1",
- xyz=views.compute_model_centroid(),
- new_plotter=True,
- )
- print(temp_query.QUERY_DATA)
- session.show("remote")
- ctx = EnsContext()
- ctx.load(os.path.join(data_dir, "init_state.ctxz"))
- session.restore_context(ctx)
+import os
+import warnings
+
+from ansys.pyensight.core.dockerlauncher import DockerLauncher
+from ansys.pyensight.core.enscontext import EnsContext
+from ansys.pyensight.core.locallauncher import LocalLauncher
+import pytest
+
+warnings.filterwarnings("ignore")
+
+
+def test_utils(tmpdir, pytestconfig: pytest.Config):
+ data_dir = tmpdir.mkdir("datadir")
+ use_local = pytestconfig.getoption("use_local_launcher")
+ root = None
+ if use_local:
+ launcher = LocalLauncher()
+ root = "http://s3.amazonaws.com/www3.ensight.com/PyEnSight/ExampleData"
+ else:
+ launcher = DockerLauncher(data_directory=data_dir, use_dev=True)
+ session = launcher.start()
+ session2 = launcher.start()
+ # Check that only one session is associated to a launcher
+ assert session == session2
+ session.load_example("waterbreak.ens", root=root)
+ # "Load" the utils modules
+ parts = session.ensight.utils.parts
+ views = session.ensight.utils.views
+ query = session.ensight.utils.query
+ init_state = session.capture_context()
+ init_state.save(os.path.join(data_dir, "init_state.ctxz"))
+ views.set_view_direction(1, 1, 1, name="isometric")
+ iso_state = session.capture_context()
+ # Since no tags are supplied, all the parts are selected
+ parts.select_parts_by_tag().set_attr("VISIBLE", False)
+ session.restore_context(iso_state)
+ sn = session.ensight.utils.support.scoped_name
+ zclip_state = None
+ with sn(session.ensight) as ensight, sn(session.ensight.objs.core) as core:
+ clip_default = core.DEFAULTPARTS[ensight.PART_CLIP_PLANE]
+ clip = clip_default.createpart(name="XClip", sources=parts.select_parts_by_dimension(3))[0]
+ attrs = []
+ attrs.append(["MESHPLANE", ensight.objs.enums.MESH_SLICE_Z]) # Z axis
+ attrs.append(["TOOL", ensight.objs.enums.CT_XYZ]) # XYZ Tool
+ attrs.append(["VALUE", 0.55]) # Z value
+ zclip = clip_default.createpart(name="ZClip", sources=clip)[0]
+ query.create_distance(
+ "zlip_query", query.DISTANCE_PART1D, [zclip], core.VARIABLES["p"][0], new_plotter=True
+ )
+ zclip_state = session.capture_context()
+ session.show("remote")
+ # Change the view to test the view restoring
+ with sn(session.ensight, native_exceptions=True) as ensight:
+ ensight.view_transf.rotate(-66.5934067, 1.71428561, 0)
+ ensight.view_transf.rotate(18.0219765, -31.6363659, 0)
+ ensight.view_transf.rotate(-4.83516455, 9.5064888, 0)
+ ensight.view_transf.zoom(0.740957975)
+ ensight.view_transf.zoom(0.792766333)
+ ensight.view_transf.translate(0.0719177574, 0.0678303316, 0)
+ ensight.view_transf.rotate(4.83516455, 3.42857122, 0)
+ views.restore_view("isometric")
+ session.show("remote")
+ session.restore_context(zclip_state)
+ temp_query = query.create_temporal(
+ "temporal_query",
+ query.TEMPORAL_XYZ,
+ parts.select_parts_by_dimension(3),
+ "alpha1",
+ xyz=views.compute_model_centroid(),
+ new_plotter=True,
+ )
+ print(temp_query.QUERY_DATA)
+ session.show("remote")
+ ctx = EnsContext()
+ ctx.load(os.path.join(data_dir, "init_state.ctxz"))
+ session.restore_context(ctx)
diff --git a/tests/unit_tests/test_session.py b/tests/unit_tests/test_session.py
index 030b59cd400..81e60446660 100644
--- a/tests/unit_tests/test_session.py
+++ b/tests/unit_tests/test_session.py
@@ -1,302 +1,302 @@
-"""Unit tests for session.py"""
-import platform
-from unittest import mock
-import webbrowser
-
-import ansys.pyensight.core
-from ansys.pyensight.core.launcher import Launcher
-import ansys.pyensight.core.renderable
-from ansys.pyensight.core.session import Session # noqa: F401
-import pytest
-
-
-def test_show(mocked_session, mocker):
- session = mocked_session
- session.ensight.objs.core.TIMESTEP = 1
- session.ensight.objs.core.TIMESTEP_LIMITS = [0, 5]
- vars = mock.MagicMock("EnSightVARS")
- pressure_mock = mock.MagicMock("Pressure")
- density_mock = mock.MagicMock("Density")
- pressure_mock.DESCRIPTION = "Pressure"
- density_mock.DESCRIPTION = "Density"
- vars.find = lambda val, str: [pressure_mock, density_mock]
- session.ensight.objs.core.VARIABLES = vars
- filename = f"{session._launcher.session_directory}/remote_filename"
- mocker.patch.object(
- ansys.pyensight.core.renderable.Renderable,
- "_generate_filename",
- return_value=(f"{filename}", "remote_filename"),
- )
- stream_mock = mock.MagicMock("stream")
- stream_mock.side_effect = lambda *args, **kwargs: 6
- session.ensight.dsg_new_stream = stream_mock
- update_mock = mock.MagicMock("update")
- update_mock.side_effect = lambda *args, **kwargs: True
- session.ensight.dsg_new_stream = stream_mock
- session.ensight.dsg_save_update = update_mock
- session.show()
- session.show("webgl")
- session.show("remote")
- session.show("remote_scene")
- session.show("deep_pixel")
- session.show("animation")
- session._cei_suffix = "232"
- session.show("sgeo")
- session._cei_suffix = "211"
- session.show("sgeo")
- with pytest.raises(RuntimeError) as exec_info:
- session.show("3DVRRendering")
- assert "Unable to generate requested visualization" in str(exec_info)
- session._html_port = None
- with pytest.raises(RuntimeError) as exec_info:
- session.show()
- assert "No websocketserver has been associated with this Session" in str(exec_info)
-
-
-def test_clone(mocked_session):
- session = mocked_session
- cmd = str(session)
- second_connection = eval(cmd)
- assert session.secret_key == second_connection.secret_key
-
-
-def test_exec(mocked_session):
- def function(*args, **kwargs):
- return True
-
- session = mocked_session
- session._cei_suffix = 211
- fargs = ["1", "2"]
- fkwargs = {"a": 3, "b": 4}
- with pytest.raises(RuntimeError):
- session.exec(function, *fargs, remote=True, **fkwargs)
- session._cei_suffix = 232
- session._ensight_python_version("4", "5", "23")
- with pytest.raises(RuntimeError):
- session.exec(function, *fargs, remote=True, **fkwargs)
- session.cmd = lambda *args, **kwargs: "Bob"
- session._ensight_python_version = platform.python_version_tuple()
- assert session.exec(function, *fargs, remote=True, **fkwargs) == "Bob"
- assert session.exec(function, *fargs, remote=False, **fkwargs) is True
-
-
-def test_session_load_data(mocked_session):
- session = mocked_session
- session.cmd = lambda *args, **kwargs: 0
- case = mock.MagicMock("EnSightCase")
- case.DESCRIPTION = "CurrentCase"
- session.ensight.objs.core.CURRENTCASE = [case]
- session.load_data(
- data_file="/stairway/to/heaven",
- result_file="/path/do/darkness",
- file_format=".mobi",
- reader_options={"a": 1, "b": 2},
- new_case=False,
- representation="3D_feature_2D_full",
- )
- session.load_data(
- data_file="/stairway/to/heaven",
- file_format=".mobi",
- new_case=False,
- representation="3D_feature_2D_full",
- )
- case = mock.MagicMock("EnSightCase")
- case.DESCRIPTION = "CurrentCase"
- case.ACTIVE = 0
- session.ensight.objs.core.CASES = [case]
- session.load_data(
- data_file="/stairway/to/heaven",
- result_file="/path/do/darkness",
- file_format=".mobi",
- reader_options={"a": 1, "b": 2},
- new_case=True,
- representation="3D_feature_2D_full",
- )
- case.ACTIVE = 1
- with pytest.raises(RuntimeError):
- session.load_data(
- data_file="/stairway/to/heaven",
- result_file="/path/do/darkness",
- file_format=".mobi",
- reader_options={"a": 1, "b": 2},
- new_case=True,
- representation="3D_feature_2D_full",
- )
- session.cmd = mock.MagicMock("envision_cmd")
- session.cmd.side_effect = ["envision", 0]
- session.load_data(
- data_file="/stairway/to/heaven",
- result_file="/path/do/darkness",
- file_format=".mobi",
- reader_options={"a": 1, "b": 2},
- new_case=True,
- representation="3D_feature_2D_full",
- )
- session.cmd.side_effect = ["envision", -1]
- with pytest.raises(RuntimeError):
- session.load_data(
- data_file="/stairway/to/heaven",
- result_file="/path/do/darkness",
- file_format=".mobi",
- reader_options={"a": 1, "b": 2},
- new_case=True,
- representation="3D_feature_2D_full",
- )
- session.cmd = mock.MagicMock("fileformat_cmd")
- session.cmd.side_effect = [0] * 7 + [".cas"] + [0] * 12
- session.load_data(
- data_file="/stairway/to/heaven",
- result_file="/path/do/darkness",
- reader_options={"a": 1, "b": 2},
- new_case=False,
- representation="3D_feature_2D_full",
- )
- session.cmd.side_effect = [0] * 7 + [RuntimeError] + [0] * 12
- with pytest.raises(RuntimeError) as exec_info:
- session.load_data(
- data_file="/stairway/to/heaven",
- result_file="/path/do/darkness",
- reader_options={"a": 1, "b": 2},
- new_case=False,
- representation="3D_feature_2D_full",
- )
- assert "Unable to determine file format for /stairway/to/heaven" in str(exec_info)
- session.cmd.side_effect = [0] * 7 + [".encas"] + [0] * 11 + [-1]
- with pytest.raises(RuntimeError) as exec_info:
- session.load_data(
- data_file="/stairway/to/heaven",
- result_file="/path/do/darkness",
- reader_options={"a": 1, "b": 2},
- new_case=False,
- representation="3D_feature_2D_full",
- )
- assert "Unable to load the dataset." in str(exec_info)
-
-
-def test_load_example(mocked_session, mocker):
- session = mocked_session
- mocker.patch.object(session, "cmd")
- session.load_example("large_dataset")
- session.load_example("large_dataset", "www.ansys.com")
-
-
-def test_callbacks(mocked_session, mocker):
- session = mocked_session
- cmd = mocker.patch.object(session, "cmd", return_value=1)
- session._grpc.event_stream_enable = mock.MagicMock("stream")
- session._grpc.prefix = lambda: ""
- session.add_callback(
- "test",
- "vport?w={{WIDTH}}&h={{HEIGHT}}&x={{ORIGINX}}&y={{ORIGINY}}",
- ["a", "b"],
- print,
- )
- assert session._callbacks["vport"] == (1, print)
- with pytest.raises(RuntimeError) as exec_info:
- session.add_callback("test", "vport", ["a", "b"], print)
- assert "A callback for vport already exists"
- session.add_callback("test", "partlist", ["a", "b"], print)
- assert session._callbacks["partlist"] == (1, print)
- session.add_callback("test", "variablelist", ["a", "b"], print, compress=False)
- target = mock.MagicMock("testtarget")
- target.__OBJID__ = 12
- session.add_callback(target, "statelist", ["a", "b"], print, compress=False)
- session.remove_callback("statelist")
- with pytest.raises(RuntimeError) as exec_info:
- session.remove_callback("geomlist")
- assert "A callback for tag 'geomlist' does not exist" in str(exec_info)
- cmd = mock.MagicMock("event_cmd")
- cmd.find = mock.MagicMock("magic find")
- cmd.find.side_effect = [1, 5]
- url = "grpc://abcd1234-5678efgh/vport?enum=1&uid=0"
- session._event_callback(url)
- url = "grpc://abcd1234-5678efgh/?tag&vport?enum=1&uid=0"
- session._event_callback(url)
- session._callbacks.clear()
-
-
-def test_convert_ctor(mocked_session, mocker):
- session = mocked_session
- value = session._convert_ctor("Class: ENS_GLOBALS, CvfObjID: 221, cached:yes")
- assert value == "session.ensight.objs.ENS_GLOBALS(session, 221)"
- cmd = mocker.patch.object(session, "cmd", return_value=0)
- value = session._convert_ctor("Class: ENS_PART, desc: 'Sphere', CvfObjID: 1078, cached:no")
- assert (
- value
- == "session.ensight.objs.ENS_PART_MODEL(session, 1078,attr_id=1610612795, attr_value=0)"
- )
- cmd.return_value = 3
- value = session._convert_ctor("Class: ENS_ANNOT, desc: 'Pressure', CvfObjID: 4761, cached:no")
- assert (
- value
- == "session.ensight.objs.ENS_ANNOT_LGND(session, 4761,attr_id=1610612995, attr_value=3)"
- )
- cmd.return_value = 6
- value = session._convert_ctor("Class: ENS_TOOL, desc: 'Sphere', CvfObjID: 763, cached:no")
- assert (
- value
- == "session.ensight.objs.ENS_TOOL_SPHERE(session, 763,attr_id=1610613035, attr_value=6)"
- )
- session._ensobj_hash = {i: i for i in range(10000000)}
- value = session._convert_ctor("Class: ENS_GLOBALS, CvfObjID: 221, cached:yes")
- assert value == "session.ensight.objs.ENS_GLOBALS(session, 221)"
- session._convert_ctor("test")
- session._convert_ctor("CvfObjID: 221, Class: ENS_GLOBALS, cached:yes")
- session._convert_ctor("CvfObjID: 221, Class: ENS_GLOBALS, cachedcachedcached:yes")
- object = mock.MagicMock("Test")
- object.__OBJID__ = 763
- session.add_ensobj_instance(object)
- assert session.obj_instance(763) == object
- value = session._convert_ctor("Class: ENS_TOOL, desc: 'Sphere', CvfObjID: 763, cached:no")
- assert value == "session.obj_instance(763)"
-
-
-def test_close(mocked_session, mocker):
- session = mocked_session
- session._grpc.shutdown = mock.MagicMock("shutdown")
- session.close()
- session._halt_ensight_on_close = True
- session._launcher = Launcher()
- session = mocked_session
- with pytest.raises(RuntimeError) as exec_info:
- session.close()
- assert "Session not associated with this Launcher" in str(exec_info)
-
-
-def test_render(mocked_session):
- mocked_session.grpc.render = mock.MagicMock("render")
- mocked_session.render(300, 400, 5)
- mocked_session.grpc.render.assert_called_once()
-
-
-def test_geometry(mocked_session):
- mocked_session.grpc.geometry = mock.MagicMock("render")
- mocked_session.geometry()
- mocked_session.grpc.geometry.assert_called_once()
-
-
-def test_properties(mocked_session):
- assert mocked_session.timeout == 120.0
- mocked_session.timeout = 113.5
- assert mocked_session.language == "en"
- assert mocked_session.halt_ensight_on_close is False
- mocked_session._cei_home = "/new/path"
- mocked_session._cei_suffix = "178"
- assert mocked_session.cei_home == "/new/path"
- assert mocked_session.cei_suffix == "178"
- assert mocked_session.jupyter_notebook is False
- mocked_session._jupyter_notebook = True
- assert mocked_session.jupyter_notebook is True
-
-
-def test_help(mocked_session, mocker):
- web = mocker.patch.object(webbrowser, "open")
- mocked_session.help()
- web.assert_called_once_with("https://ensight.docs.pyansys.com/")
-
-
-"""
-def test_close(local_launcher_session) -> None:
- local_launcher_session.close()
- assert local_launcher_session.launcher is None
-"""
+"""Unit tests for session.py"""
+import platform
+from unittest import mock
+import webbrowser
+
+import ansys.pyensight.core
+from ansys.pyensight.core.launcher import Launcher
+import ansys.pyensight.core.renderable
+from ansys.pyensight.core.session import Session # noqa: F401
+import pytest
+
+
+def test_show(mocked_session, mocker):
+ session = mocked_session
+ session.ensight.objs.core.TIMESTEP = 1
+ session.ensight.objs.core.TIMESTEP_LIMITS = [0, 5]
+ vars = mock.MagicMock("EnSightVARS")
+ pressure_mock = mock.MagicMock("Pressure")
+ density_mock = mock.MagicMock("Density")
+ pressure_mock.DESCRIPTION = "Pressure"
+ density_mock.DESCRIPTION = "Density"
+ vars.find = lambda val, str: [pressure_mock, density_mock]
+ session.ensight.objs.core.VARIABLES = vars
+ filename = f"{session._launcher.session_directory}/remote_filename"
+ mocker.patch.object(
+ ansys.pyensight.core.renderable.Renderable,
+ "_generate_filename",
+ return_value=(f"{filename}", "remote_filename"),
+ )
+ stream_mock = mock.MagicMock("stream")
+ stream_mock.side_effect = lambda *args, **kwargs: 6
+ session.ensight.dsg_new_stream = stream_mock
+ update_mock = mock.MagicMock("update")
+ update_mock.side_effect = lambda *args, **kwargs: True
+ session.ensight.dsg_new_stream = stream_mock
+ session.ensight.dsg_save_update = update_mock
+ session.show()
+ session.show("webgl")
+ session.show("remote")
+ session.show("remote_scene")
+ session.show("deep_pixel")
+ session.show("animation")
+ session._cei_suffix = "232"
+ session.show("sgeo")
+ session._cei_suffix = "211"
+ session.show("sgeo")
+ with pytest.raises(RuntimeError) as exec_info:
+ session.show("3DVRRendering")
+ assert "Unable to generate requested visualization" in str(exec_info)
+ session._html_port = None
+ with pytest.raises(RuntimeError) as exec_info:
+ session.show()
+ assert "No websocketserver has been associated with this Session" in str(exec_info)
+
+
+def test_clone(mocked_session):
+ session = mocked_session
+ cmd = str(session)
+ second_connection = eval(cmd)
+ assert session.secret_key == second_connection.secret_key
+
+
+def test_exec(mocked_session):
+ def function(*args, **kwargs):
+ return True
+
+ session = mocked_session
+ session._cei_suffix = 211
+ fargs = ["1", "2"]
+ fkwargs = {"a": 3, "b": 4}
+ with pytest.raises(RuntimeError):
+ session.exec(function, *fargs, remote=True, **fkwargs)
+ session._cei_suffix = 232
+ session._ensight_python_version("4", "5", "23")
+ with pytest.raises(RuntimeError):
+ session.exec(function, *fargs, remote=True, **fkwargs)
+ session.cmd = lambda *args, **kwargs: "Bob"
+ session._ensight_python_version = platform.python_version_tuple()
+ assert session.exec(function, *fargs, remote=True, **fkwargs) == "Bob"
+ assert session.exec(function, *fargs, remote=False, **fkwargs) is True
+
+
+def test_session_load_data(mocked_session):
+ session = mocked_session
+ session.cmd = lambda *args, **kwargs: 0
+ case = mock.MagicMock("EnSightCase")
+ case.DESCRIPTION = "CurrentCase"
+ session.ensight.objs.core.CURRENTCASE = [case]
+ session.load_data(
+ data_file="/stairway/to/heaven",
+ result_file="/path/do/darkness",
+ file_format=".mobi",
+ reader_options={"a": 1, "b": 2},
+ new_case=False,
+ representation="3D_feature_2D_full",
+ )
+ session.load_data(
+ data_file="/stairway/to/heaven",
+ file_format=".mobi",
+ new_case=False,
+ representation="3D_feature_2D_full",
+ )
+ case = mock.MagicMock("EnSightCase")
+ case.DESCRIPTION = "CurrentCase"
+ case.ACTIVE = 0
+ session.ensight.objs.core.CASES = [case]
+ session.load_data(
+ data_file="/stairway/to/heaven",
+ result_file="/path/do/darkness",
+ file_format=".mobi",
+ reader_options={"a": 1, "b": 2},
+ new_case=True,
+ representation="3D_feature_2D_full",
+ )
+ case.ACTIVE = 1
+ with pytest.raises(RuntimeError):
+ session.load_data(
+ data_file="/stairway/to/heaven",
+ result_file="/path/do/darkness",
+ file_format=".mobi",
+ reader_options={"a": 1, "b": 2},
+ new_case=True,
+ representation="3D_feature_2D_full",
+ )
+ session.cmd = mock.MagicMock("envision_cmd")
+ session.cmd.side_effect = ["envision", 0]
+ session.load_data(
+ data_file="/stairway/to/heaven",
+ result_file="/path/do/darkness",
+ file_format=".mobi",
+ reader_options={"a": 1, "b": 2},
+ new_case=True,
+ representation="3D_feature_2D_full",
+ )
+ session.cmd.side_effect = ["envision", -1]
+ with pytest.raises(RuntimeError):
+ session.load_data(
+ data_file="/stairway/to/heaven",
+ result_file="/path/do/darkness",
+ file_format=".mobi",
+ reader_options={"a": 1, "b": 2},
+ new_case=True,
+ representation="3D_feature_2D_full",
+ )
+ session.cmd = mock.MagicMock("fileformat_cmd")
+ session.cmd.side_effect = [0] * 7 + [".cas"] + [0] * 12
+ session.load_data(
+ data_file="/stairway/to/heaven",
+ result_file="/path/do/darkness",
+ reader_options={"a": 1, "b": 2},
+ new_case=False,
+ representation="3D_feature_2D_full",
+ )
+ session.cmd.side_effect = [0] * 7 + [RuntimeError] + [0] * 12
+ with pytest.raises(RuntimeError) as exec_info:
+ session.load_data(
+ data_file="/stairway/to/heaven",
+ result_file="/path/do/darkness",
+ reader_options={"a": 1, "b": 2},
+ new_case=False,
+ representation="3D_feature_2D_full",
+ )
+ assert "Unable to determine file format for /stairway/to/heaven" in str(exec_info)
+ session.cmd.side_effect = [0] * 7 + [".encas"] + [0] * 11 + [-1]
+ with pytest.raises(RuntimeError) as exec_info:
+ session.load_data(
+ data_file="/stairway/to/heaven",
+ result_file="/path/do/darkness",
+ reader_options={"a": 1, "b": 2},
+ new_case=False,
+ representation="3D_feature_2D_full",
+ )
+ assert "Unable to load the dataset." in str(exec_info)
+
+
+def test_load_example(mocked_session, mocker):
+ session = mocked_session
+ mocker.patch.object(session, "cmd")
+ session.load_example("large_dataset")
+ session.load_example("large_dataset", "www.ansys.com")
+
+
+def test_callbacks(mocked_session, mocker):
+ session = mocked_session
+ cmd = mocker.patch.object(session, "cmd", return_value=1)
+ session._grpc.event_stream_enable = mock.MagicMock("stream")
+ session._grpc.prefix = lambda: ""
+ session.add_callback(
+ "test",
+ "vport?w={{WIDTH}}&h={{HEIGHT}}&x={{ORIGINX}}&y={{ORIGINY}}",
+ ["a", "b"],
+ print,
+ )
+ assert session._callbacks["vport"] == (1, print)
+ with pytest.raises(RuntimeError) as exec_info:
+ session.add_callback("test", "vport", ["a", "b"], print)
+ assert "A callback for vport already exists"
+ session.add_callback("test", "partlist", ["a", "b"], print)
+ assert session._callbacks["partlist"] == (1, print)
+ session.add_callback("test", "variablelist", ["a", "b"], print, compress=False)
+ target = mock.MagicMock("testtarget")
+ target.__OBJID__ = 12
+ session.add_callback(target, "statelist", ["a", "b"], print, compress=False)
+ session.remove_callback("statelist")
+ with pytest.raises(RuntimeError) as exec_info:
+ session.remove_callback("geomlist")
+ assert "A callback for tag 'geomlist' does not exist" in str(exec_info)
+ cmd = mock.MagicMock("event_cmd")
+ cmd.find = mock.MagicMock("magic find")
+ cmd.find.side_effect = [1, 5]
+ url = "grpc://abcd1234-5678efgh/vport?enum=1&uid=0"
+ session._event_callback(url)
+ url = "grpc://abcd1234-5678efgh/?tag&vport?enum=1&uid=0"
+ session._event_callback(url)
+ session._callbacks.clear()
+
+
+def test_convert_ctor(mocked_session, mocker):
+ session = mocked_session
+ value = session._convert_ctor("Class: ENS_GLOBALS, CvfObjID: 221, cached:yes")
+ assert value == "session.ensight.objs.ENS_GLOBALS(session, 221)"
+ cmd = mocker.patch.object(session, "cmd", return_value=0)
+ value = session._convert_ctor("Class: ENS_PART, desc: 'Sphere', CvfObjID: 1078, cached:no")
+ assert (
+ value
+ == "session.ensight.objs.ENS_PART_MODEL(session, 1078,attr_id=1610612795, attr_value=0)"
+ )
+ cmd.return_value = 3
+ value = session._convert_ctor("Class: ENS_ANNOT, desc: 'Pressure', CvfObjID: 4761, cached:no")
+ assert (
+ value
+ == "session.ensight.objs.ENS_ANNOT_LGND(session, 4761,attr_id=1610612995, attr_value=3)"
+ )
+ cmd.return_value = 6
+ value = session._convert_ctor("Class: ENS_TOOL, desc: 'Sphere', CvfObjID: 763, cached:no")
+ assert (
+ value
+ == "session.ensight.objs.ENS_TOOL_SPHERE(session, 763,attr_id=1610613035, attr_value=6)"
+ )
+ session._ensobj_hash = {i: i for i in range(10000000)}
+ value = session._convert_ctor("Class: ENS_GLOBALS, CvfObjID: 221, cached:yes")
+ assert value == "session.ensight.objs.ENS_GLOBALS(session, 221)"
+ session._convert_ctor("test")
+ session._convert_ctor("CvfObjID: 221, Class: ENS_GLOBALS, cached:yes")
+ session._convert_ctor("CvfObjID: 221, Class: ENS_GLOBALS, cachedcachedcached:yes")
+ object = mock.MagicMock("Test")
+ object.__OBJID__ = 763
+ session.add_ensobj_instance(object)
+ assert session.obj_instance(763) == object
+ value = session._convert_ctor("Class: ENS_TOOL, desc: 'Sphere', CvfObjID: 763, cached:no")
+ assert value == "session.obj_instance(763)"
+
+
+def test_close(mocked_session, mocker):
+ session = mocked_session
+ session._grpc.shutdown = mock.MagicMock("shutdown")
+ session.close()
+ session._halt_ensight_on_close = True
+ session._launcher = Launcher()
+ session = mocked_session
+ with pytest.raises(RuntimeError) as exec_info:
+ session.close()
+ assert "Session not associated with this Launcher" in str(exec_info)
+
+
+def test_render(mocked_session):
+ mocked_session.grpc.render = mock.MagicMock("render")
+ mocked_session.render(300, 400, 5)
+ mocked_session.grpc.render.assert_called_once()
+
+
+def test_geometry(mocked_session):
+ mocked_session.grpc.geometry = mock.MagicMock("render")
+ mocked_session.geometry()
+ mocked_session.grpc.geometry.assert_called_once()
+
+
+def test_properties(mocked_session):
+ assert mocked_session.timeout == 120.0
+ mocked_session.timeout = 113.5
+ assert mocked_session.language == "en"
+ assert mocked_session.halt_ensight_on_close is False
+ mocked_session._cei_home = "/new/path"
+ mocked_session._cei_suffix = "178"
+ assert mocked_session.cei_home == "/new/path"
+ assert mocked_session.cei_suffix == "178"
+ assert mocked_session.jupyter_notebook is False
+ mocked_session._jupyter_notebook = True
+ assert mocked_session.jupyter_notebook is True
+
+
+def test_help(mocked_session, mocker):
+ web = mocker.patch.object(webbrowser, "open")
+ mocked_session.help()
+ web.assert_called_once_with("https://ensight.docs.pyansys.com/")
+
+
+"""
+def test_close(local_launcher_session) -> None:
+ local_launcher_session.close()
+ assert local_launcher_session.launcher is None
+"""