From 83dabcf7ce73031990c60641c5c21c614b2e3e79 Mon Sep 17 00:00:00 2001 From: Dmitriy Date: Fri, 27 Dec 2024 17:28:01 +0500 Subject: [PATCH 1/9] feat: merge zeebe-grpc to pyzeebe --- .gitignore | 2 + .pre-commit-config.yaml | 5 +- poetry.lock | 668 ++++--- pyproject.toml | 57 +- pyzeebe/grpc_internals/zeebe_adapter_base.py | 2 +- pyzeebe/grpc_internals/zeebe_job_adapter.py | 14 +- .../grpc_internals/zeebe_message_adapter.py | 6 +- .../grpc_internals/zeebe_process_adapter.py | 39 +- pyzeebe/proto/__init__.py | 2 + pyzeebe/proto/gateway_pb2.py | 171 ++ pyzeebe/proto/gateway_pb2.pyi | 1687 +++++++++++++++++ pyzeebe/proto/gateway_pb2_grpc.py | 1178 ++++++++++++ pyzeebe/proto/gateway_pb2_grpc.pyi | 1104 +++++++++++ tests/unit/conftest.py | 4 +- tests/unit/utils/gateway_mock.py | 10 +- update_proto.py | 36 + 16 files changed, 4651 insertions(+), 334 deletions(-) create mode 100644 pyzeebe/proto/__init__.py create mode 100644 pyzeebe/proto/gateway_pb2.py create mode 100644 pyzeebe/proto/gateway_pb2.pyi create mode 100644 pyzeebe/proto/gateway_pb2_grpc.py create mode 100644 pyzeebe/proto/gateway_pb2_grpc.pyi create mode 100644 update_proto.py diff --git a/.gitignore b/.gitignore index ac3aa17f..8da334a9 100644 --- a/.gitignore +++ b/.gitignore @@ -145,3 +145,5 @@ cython_debug/ # pycharm .idea + +zeebe_grpc/ diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index a9c1bf8c..7406fedc 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -13,6 +13,7 @@ repos: entry: black language: python files: \.py$ + exclude: ^(.*pb2.*|.*\.pyi)$ - repo: local hooks: - id: isort @@ -20,6 +21,7 @@ repos: entry: isort language: python files: \.py$ + exclude: ^(.*pb2.*|.*\.pyi)$ - repo: local hooks: - id: mypy @@ -32,10 +34,11 @@ repos: hooks: - id: pyupgrade args: [--py39-plus] + exclude: ^(.*pb2.*|.*\.pyi)$ - repo: https://github.com/charliermarsh/ruff-pre-commit rev: v0.6.9 hooks: - id: ruff args: - --fix - exclude: ^(tests/.*|examples/.*|docs/.*)$ + exclude: ^(tests/.*|examples/.*|docs/.*|.*pb2.*|.*\.pyi)$ diff --git a/poetry.lock b/poetry.lock index 9bf20deb..45769c06 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1,4 +1,4 @@ -# This file is automatically @generated by Poetry 1.8.5 and should not be changed by hand. +# This file is automatically @generated by Poetry 1.8.4 and should not be changed by hand. [[package]] name = "alabaster" @@ -109,123 +109,125 @@ uvloop = ["uvloop (>=0.15.2)"] [[package]] name = "certifi" -version = "2024.8.30" +version = "2024.12.14" description = "Python package for providing Mozilla's CA Bundle." optional = false python-versions = ">=3.6" files = [ - {file = "certifi-2024.8.30-py3-none-any.whl", hash = "sha256:922820b53db7a7257ffbda3f597266d435245903d80737e34f8a45ff3e3230d8"}, - {file = "certifi-2024.8.30.tar.gz", hash = "sha256:bec941d2aa8195e248a60b31ff9f0558284cf01a52591ceda73ea9afffd69fd9"}, + {file = "certifi-2024.12.14-py3-none-any.whl", hash = "sha256:1275f7a45be9464efc1173084eaa30f866fe2e47d389406136d332ed4967ec56"}, + {file = "certifi-2024.12.14.tar.gz", hash = "sha256:b650d30f370c2b724812bee08008be0c4163b163ddaec3f2546c1caf65f191db"}, ] [[package]] name = "charset-normalizer" -version = "3.3.2" +version = "3.4.1" description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet." optional = false -python-versions = ">=3.7.0" -files = [ - {file = "charset-normalizer-3.3.2.tar.gz", hash = "sha256:f30c3cb33b24454a82faecaf01b19c18562b1e89558fb6c56de4d9118a032fd5"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:25baf083bf6f6b341f4121c2f3c548875ee6f5339300e08be3f2b2ba1721cdd3"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:06435b539f889b1f6f4ac1758871aae42dc3a8c0e24ac9e60c2384973ad73027"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9063e24fdb1e498ab71cb7419e24622516c4a04476b17a2dab57e8baa30d6e03"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6897af51655e3691ff853668779c7bad41579facacf5fd7253b0133308cf000d"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1d3193f4a680c64b4b6a9115943538edb896edc190f0b222e73761716519268e"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:cd70574b12bb8a4d2aaa0094515df2463cb429d8536cfb6c7ce983246983e5a6"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8465322196c8b4d7ab6d1e049e4c5cb460d0394da4a27d23cc242fbf0034b6b5"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a9a8e9031d613fd2009c182b69c7b2c1ef8239a0efb1df3f7c8da66d5dd3d537"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:beb58fe5cdb101e3a055192ac291b7a21e3b7ef4f67fa1d74e331a7f2124341c"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:e06ed3eb3218bc64786f7db41917d4e686cc4856944f53d5bdf83a6884432e12"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:2e81c7b9c8979ce92ed306c249d46894776a909505d8f5a4ba55b14206e3222f"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:572c3763a264ba47b3cf708a44ce965d98555f618ca42c926a9c1616d8f34269"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:fd1abc0d89e30cc4e02e4064dc67fcc51bd941eb395c502aac3ec19fab46b519"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-win32.whl", hash = "sha256:3d47fa203a7bd9c5b6cee4736ee84ca03b8ef23193c0d1ca99b5089f72645c73"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-win_amd64.whl", hash = "sha256:10955842570876604d404661fbccbc9c7e684caf432c09c715ec38fbae45ae09"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:802fe99cca7457642125a8a88a084cef28ff0cf9407060f7b93dca5aa25480db"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:573f6eac48f4769d667c4442081b1794f52919e7edada77495aaed9236d13a96"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:549a3a73da901d5bc3ce8d24e0600d1fa85524c10287f6004fbab87672bf3e1e"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f27273b60488abe721a075bcca6d7f3964f9f6f067c8c4c605743023d7d3944f"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1ceae2f17a9c33cb48e3263960dc5fc8005351ee19db217e9b1bb15d28c02574"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:65f6f63034100ead094b8744b3b97965785388f308a64cf8d7c34f2f2e5be0c4"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:753f10e867343b4511128c6ed8c82f7bec3bd026875576dfd88483c5c73b2fd8"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4a78b2b446bd7c934f5dcedc588903fb2f5eec172f3d29e52a9096a43722adfc"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:e537484df0d8f426ce2afb2d0f8e1c3d0b114b83f8850e5f2fbea0e797bd82ae"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:eb6904c354526e758fda7167b33005998fb68c46fbc10e013ca97f21ca5c8887"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:deb6be0ac38ece9ba87dea880e438f25ca3eddfac8b002a2ec3d9183a454e8ae"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:4ab2fe47fae9e0f9dee8c04187ce5d09f48eabe611be8259444906793ab7cbce"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:80402cd6ee291dcb72644d6eac93785fe2c8b9cb30893c1af5b8fdd753b9d40f"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-win32.whl", hash = "sha256:7cd13a2e3ddeed6913a65e66e94b51d80a041145a026c27e6bb76c31a853c6ab"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-win_amd64.whl", hash = "sha256:663946639d296df6a2bb2aa51b60a2454ca1cb29835324c640dafb5ff2131a77"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:0b2b64d2bb6d3fb9112bafa732def486049e63de9618b5843bcdd081d8144cd8"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:ddbb2551d7e0102e7252db79ba445cdab71b26640817ab1e3e3648dad515003b"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:55086ee1064215781fff39a1af09518bc9255b50d6333f2e4c74ca09fac6a8f6"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8f4a014bc36d3c57402e2977dada34f9c12300af536839dc38c0beab8878f38a"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a10af20b82360ab00827f916a6058451b723b4e65030c5a18577c8b2de5b3389"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8d756e44e94489e49571086ef83b2bb8ce311e730092d2c34ca8f7d925cb20aa"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:90d558489962fd4918143277a773316e56c72da56ec7aa3dc3dbbe20fdfed15b"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6ac7ffc7ad6d040517be39eb591cac5ff87416c2537df6ba3cba3bae290c0fed"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:7ed9e526742851e8d5cc9e6cf41427dfc6068d4f5a3bb03659444b4cabf6bc26"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:8bdb58ff7ba23002a4c5808d608e4e6c687175724f54a5dade5fa8c67b604e4d"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:6b3251890fff30ee142c44144871185dbe13b11bab478a88887a639655be1068"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:b4a23f61ce87adf89be746c8a8974fe1c823c891d8f86eb218bb957c924bb143"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:efcb3f6676480691518c177e3b465bcddf57cea040302f9f4e6e191af91174d4"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-win32.whl", hash = "sha256:d965bba47ddeec8cd560687584e88cf699fd28f192ceb452d1d7ee807c5597b7"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-win_amd64.whl", hash = "sha256:96b02a3dc4381e5494fad39be677abcb5e6634bf7b4fa83a6dd3112607547001"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:95f2a5796329323b8f0512e09dbb7a1860c46a39da62ecb2324f116fa8fdc85c"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c002b4ffc0be611f0d9da932eb0f704fe2602a9a949d1f738e4c34c75b0863d5"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a981a536974bbc7a512cf44ed14938cf01030a99e9b3a06dd59578882f06f985"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3287761bc4ee9e33561a7e058c72ac0938c4f57fe49a09eae428fd88aafe7bb6"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:42cb296636fcc8b0644486d15c12376cb9fa75443e00fb25de0b8602e64c1714"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0a55554a2fa0d408816b3b5cedf0045f4b8e1a6065aec45849de2d6f3f8e9786"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:c083af607d2515612056a31f0a8d9e0fcb5876b7bfc0abad3ecd275bc4ebc2d5"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:87d1351268731db79e0f8e745d92493ee2841c974128ef629dc518b937d9194c"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:bd8f7df7d12c2db9fab40bdd87a7c09b1530128315d047a086fa3ae3435cb3a8"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:c180f51afb394e165eafe4ac2936a14bee3eb10debc9d9e4db8958fe36afe711"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:8c622a5fe39a48f78944a87d4fb8a53ee07344641b0562c540d840748571b811"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-win32.whl", hash = "sha256:db364eca23f876da6f9e16c9da0df51aa4f104a972735574842618b8c6d999d4"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-win_amd64.whl", hash = "sha256:86216b5cee4b06df986d214f664305142d9c76df9b6512be2738aa72a2048f99"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:6463effa3186ea09411d50efc7d85360b38d5f09b870c48e4600f63af490e56a"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:6c4caeef8fa63d06bd437cd4bdcf3ffefe6738fb1b25951440d80dc7df8c03ac"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:37e55c8e51c236f95b033f6fb391d7d7970ba5fe7ff453dad675e88cf303377a"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fb69256e180cb6c8a894fee62b3afebae785babc1ee98b81cdf68bbca1987f33"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ae5f4161f18c61806f411a13b0310bea87f987c7d2ecdbdaad0e94eb2e404238"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b2b0a0c0517616b6869869f8c581d4eb2dd83a4d79e0ebcb7d373ef9956aeb0a"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:45485e01ff4d3630ec0d9617310448a8702f70e9c01906b0d0118bdf9d124cf2"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:eb00ed941194665c332bf8e078baf037d6c35d7c4f3102ea2d4f16ca94a26dc8"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:2127566c664442652f024c837091890cb1942c30937add288223dc895793f898"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:a50aebfa173e157099939b17f18600f72f84eed3049e743b68ad15bd69b6bf99"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:4d0d1650369165a14e14e1e47b372cfcb31d6ab44e6e33cb2d4e57265290044d"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:923c0c831b7cfcb071580d3f46c4baf50f174be571576556269530f4bbd79d04"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:06a81e93cd441c56a9b65d8e1d043daeb97a3d0856d177d5c90ba85acb3db087"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-win32.whl", hash = "sha256:6ef1d82a3af9d3eecdba2321dc1b3c238245d890843e040e41e470ffa64c3e25"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-win_amd64.whl", hash = "sha256:eb8821e09e916165e160797a6c17edda0679379a4be5c716c260e836e122f54b"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:c235ebd9baae02f1b77bcea61bce332cb4331dc3617d254df3323aa01ab47bd4"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:5b4c145409bef602a690e7cfad0a15a55c13320ff7a3ad7ca59c13bb8ba4d45d"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:68d1f8a9e9e37c1223b656399be5d6b448dea850bed7d0f87a8311f1ff3dabb0"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:22afcb9f253dac0696b5a4be4a1c0f8762f8239e21b99680099abd9b2b1b2269"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e27ad930a842b4c5eb8ac0016b0a54f5aebbe679340c26101df33424142c143c"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1f79682fbe303db92bc2b1136016a38a42e835d932bab5b3b1bfcfbf0640e519"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b261ccdec7821281dade748d088bb6e9b69e6d15b30652b74cbbac25e280b796"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:122c7fa62b130ed55f8f285bfd56d5f4b4a5b503609d181f9ad85e55c89f4185"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:d0eccceffcb53201b5bfebb52600a5fb483a20b61da9dbc885f8b103cbe7598c"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:9f96df6923e21816da7e0ad3fd47dd8f94b2a5ce594e00677c0013018b813458"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:7f04c839ed0b6b98b1a7501a002144b76c18fb1c1850c8b98d458ac269e26ed2"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:34d1c8da1e78d2e001f363791c98a272bb734000fcef47a491c1e3b0505657a8"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:ff8fa367d09b717b2a17a052544193ad76cd49979c805768879cb63d9ca50561"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-win32.whl", hash = "sha256:aed38f6e4fb3f5d6bf81bfa990a07806be9d83cf7bacef998ab1a9bd660a581f"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-win_amd64.whl", hash = "sha256:b01b88d45a6fcb69667cd6d2f7a9aeb4bf53760d7fc536bf679ec94fe9f3ff3d"}, - {file = "charset_normalizer-3.3.2-py3-none-any.whl", hash = "sha256:3e4d1f6587322d2788836a99c69062fbb091331ec940e02d12d179c1d53e25fc"}, +python-versions = ">=3.7" +files = [ + {file = "charset_normalizer-3.4.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:91b36a978b5ae0ee86c394f5a54d6ef44db1de0815eb43de826d41d21e4af3de"}, + {file = "charset_normalizer-3.4.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7461baadb4dc00fd9e0acbe254e3d7d2112e7f92ced2adc96e54ef6501c5f176"}, + {file = "charset_normalizer-3.4.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e218488cd232553829be0664c2292d3af2eeeb94b32bea483cf79ac6a694e037"}, + {file = "charset_normalizer-3.4.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:80ed5e856eb7f30115aaf94e4a08114ccc8813e6ed1b5efa74f9f82e8509858f"}, + {file = "charset_normalizer-3.4.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b010a7a4fd316c3c484d482922d13044979e78d1861f0e0650423144c616a46a"}, + {file = "charset_normalizer-3.4.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4532bff1b8421fd0a320463030c7520f56a79c9024a4e88f01c537316019005a"}, + {file = "charset_normalizer-3.4.1-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:d973f03c0cb71c5ed99037b870f2be986c3c05e63622c017ea9816881d2dd247"}, + {file = "charset_normalizer-3.4.1-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:3a3bd0dcd373514dcec91c411ddb9632c0d7d92aed7093b8c3bbb6d69ca74408"}, + {file = "charset_normalizer-3.4.1-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:d9c3cdf5390dcd29aa8056d13e8e99526cda0305acc038b96b30352aff5ff2bb"}, + {file = "charset_normalizer-3.4.1-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:2bdfe3ac2e1bbe5b59a1a63721eb3b95fc9b6817ae4a46debbb4e11f6232428d"}, + {file = "charset_normalizer-3.4.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:eab677309cdb30d047996b36d34caeda1dc91149e4fdca0b1a039b3f79d9a807"}, + {file = "charset_normalizer-3.4.1-cp310-cp310-win32.whl", hash = "sha256:c0429126cf75e16c4f0ad00ee0eae4242dc652290f940152ca8c75c3a4b6ee8f"}, + {file = "charset_normalizer-3.4.1-cp310-cp310-win_amd64.whl", hash = "sha256:9f0b8b1c6d84c8034a44893aba5e767bf9c7a211e313a9605d9c617d7083829f"}, + {file = "charset_normalizer-3.4.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:8bfa33f4f2672964266e940dd22a195989ba31669bd84629f05fab3ef4e2d125"}, + {file = "charset_normalizer-3.4.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:28bf57629c75e810b6ae989f03c0828d64d6b26a5e205535585f96093e405ed1"}, + {file = "charset_normalizer-3.4.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f08ff5e948271dc7e18a35641d2f11a4cd8dfd5634f55228b691e62b37125eb3"}, + {file = "charset_normalizer-3.4.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:234ac59ea147c59ee4da87a0c0f098e9c8d169f4dc2a159ef720f1a61bbe27cd"}, + {file = "charset_normalizer-3.4.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fd4ec41f914fa74ad1b8304bbc634b3de73d2a0889bd32076342a573e0779e00"}, + {file = "charset_normalizer-3.4.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:eea6ee1db730b3483adf394ea72f808b6e18cf3cb6454b4d86e04fa8c4327a12"}, + {file = "charset_normalizer-3.4.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:c96836c97b1238e9c9e3fe90844c947d5afbf4f4c92762679acfe19927d81d77"}, + {file = "charset_normalizer-3.4.1-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:4d86f7aff21ee58f26dcf5ae81a9addbd914115cdebcbb2217e4f0ed8982e146"}, + {file = "charset_normalizer-3.4.1-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:09b5e6733cbd160dcc09589227187e242a30a49ca5cefa5a7edd3f9d19ed53fd"}, + {file = "charset_normalizer-3.4.1-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:5777ee0881f9499ed0f71cc82cf873d9a0ca8af166dfa0af8ec4e675b7df48e6"}, + {file = "charset_normalizer-3.4.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:237bdbe6159cff53b4f24f397d43c6336c6b0b42affbe857970cefbb620911c8"}, + {file = "charset_normalizer-3.4.1-cp311-cp311-win32.whl", hash = "sha256:8417cb1f36cc0bc7eaba8ccb0e04d55f0ee52df06df3ad55259b9a323555fc8b"}, + {file = "charset_normalizer-3.4.1-cp311-cp311-win_amd64.whl", hash = "sha256:d7f50a1f8c450f3925cb367d011448c39239bb3eb4117c36a6d354794de4ce76"}, + {file = "charset_normalizer-3.4.1-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:73d94b58ec7fecbc7366247d3b0b10a21681004153238750bb67bd9012414545"}, + {file = "charset_normalizer-3.4.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dad3e487649f498dd991eeb901125411559b22e8d7ab25d3aeb1af367df5efd7"}, + {file = "charset_normalizer-3.4.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c30197aa96e8eed02200a83fba2657b4c3acd0f0aa4bdc9f6c1af8e8962e0757"}, + {file = "charset_normalizer-3.4.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2369eea1ee4a7610a860d88f268eb39b95cb588acd7235e02fd5a5601773d4fa"}, + {file = "charset_normalizer-3.4.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bc2722592d8998c870fa4e290c2eec2c1569b87fe58618e67d38b4665dfa680d"}, + {file = "charset_normalizer-3.4.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ffc9202a29ab3920fa812879e95a9e78b2465fd10be7fcbd042899695d75e616"}, + {file = "charset_normalizer-3.4.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:804a4d582ba6e5b747c625bf1255e6b1507465494a40a2130978bda7b932c90b"}, + {file = "charset_normalizer-3.4.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:0f55e69f030f7163dffe9fd0752b32f070566451afe180f99dbeeb81f511ad8d"}, + {file = "charset_normalizer-3.4.1-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:c4c3e6da02df6fa1410a7680bd3f63d4f710232d3139089536310d027950696a"}, + {file = "charset_normalizer-3.4.1-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:5df196eb874dae23dcfb968c83d4f8fdccb333330fe1fc278ac5ceeb101003a9"}, + {file = "charset_normalizer-3.4.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:e358e64305fe12299a08e08978f51fc21fac060dcfcddd95453eabe5b93ed0e1"}, + {file = "charset_normalizer-3.4.1-cp312-cp312-win32.whl", hash = "sha256:9b23ca7ef998bc739bf6ffc077c2116917eabcc901f88da1b9856b210ef63f35"}, + {file = "charset_normalizer-3.4.1-cp312-cp312-win_amd64.whl", hash = "sha256:6ff8a4a60c227ad87030d76e99cd1698345d4491638dfa6673027c48b3cd395f"}, + {file = "charset_normalizer-3.4.1-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:aabfa34badd18f1da5ec1bc2715cadc8dca465868a4e73a0173466b688f29dda"}, + {file = "charset_normalizer-3.4.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:22e14b5d70560b8dd51ec22863f370d1e595ac3d024cb8ad7d308b4cd95f8313"}, + {file = "charset_normalizer-3.4.1-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8436c508b408b82d87dc5f62496973a1805cd46727c34440b0d29d8a2f50a6c9"}, + {file = "charset_normalizer-3.4.1-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2d074908e1aecee37a7635990b2c6d504cd4766c7bc9fc86d63f9c09af3fa11b"}, + {file = "charset_normalizer-3.4.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:955f8851919303c92343d2f66165294848d57e9bba6cf6e3625485a70a038d11"}, + {file = "charset_normalizer-3.4.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:44ecbf16649486d4aebafeaa7ec4c9fed8b88101f4dd612dcaf65d5e815f837f"}, + {file = "charset_normalizer-3.4.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:0924e81d3d5e70f8126529951dac65c1010cdf117bb75eb02dd12339b57749dd"}, + {file = "charset_normalizer-3.4.1-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:2967f74ad52c3b98de4c3b32e1a44e32975e008a9cd2a8cc8966d6a5218c5cb2"}, + {file = "charset_normalizer-3.4.1-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:c75cb2a3e389853835e84a2d8fb2b81a10645b503eca9bcb98df6b5a43eb8886"}, + {file = "charset_normalizer-3.4.1-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:09b26ae6b1abf0d27570633b2b078a2a20419c99d66fb2823173d73f188ce601"}, + {file = "charset_normalizer-3.4.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:fa88b843d6e211393a37219e6a1c1df99d35e8fd90446f1118f4216e307e48cd"}, + {file = "charset_normalizer-3.4.1-cp313-cp313-win32.whl", hash = "sha256:eb8178fe3dba6450a3e024e95ac49ed3400e506fd4e9e5c32d30adda88cbd407"}, + {file = "charset_normalizer-3.4.1-cp313-cp313-win_amd64.whl", hash = "sha256:b1ac5992a838106edb89654e0aebfc24f5848ae2547d22c2c3f66454daa11971"}, + {file = "charset_normalizer-3.4.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f30bf9fd9be89ecb2360c7d94a711f00c09b976258846efe40db3d05828e8089"}, + {file = "charset_normalizer-3.4.1-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:97f68b8d6831127e4787ad15e6757232e14e12060bec17091b85eb1486b91d8d"}, + {file = "charset_normalizer-3.4.1-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7974a0b5ecd505609e3b19742b60cee7aa2aa2fb3151bc917e6e2646d7667dcf"}, + {file = "charset_normalizer-3.4.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fc54db6c8593ef7d4b2a331b58653356cf04f67c960f584edb7c3d8c97e8f39e"}, + {file = "charset_normalizer-3.4.1-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:311f30128d7d333eebd7896965bfcfbd0065f1716ec92bd5638d7748eb6f936a"}, + {file = "charset_normalizer-3.4.1-cp37-cp37m-musllinux_1_2_aarch64.whl", hash = "sha256:7d053096f67cd1241601111b698f5cad775f97ab25d81567d3f59219b5f1adbd"}, + {file = "charset_normalizer-3.4.1-cp37-cp37m-musllinux_1_2_i686.whl", hash = "sha256:807f52c1f798eef6cf26beb819eeb8819b1622ddfeef9d0977a8502d4db6d534"}, + {file = "charset_normalizer-3.4.1-cp37-cp37m-musllinux_1_2_ppc64le.whl", hash = "sha256:dccbe65bd2f7f7ec22c4ff99ed56faa1e9f785482b9bbd7c717e26fd723a1d1e"}, + {file = "charset_normalizer-3.4.1-cp37-cp37m-musllinux_1_2_s390x.whl", hash = "sha256:2fb9bd477fdea8684f78791a6de97a953c51831ee2981f8e4f583ff3b9d9687e"}, + {file = "charset_normalizer-3.4.1-cp37-cp37m-musllinux_1_2_x86_64.whl", hash = "sha256:01732659ba9b5b873fc117534143e4feefecf3b2078b0a6a2e925271bb6f4cfa"}, + {file = "charset_normalizer-3.4.1-cp37-cp37m-win32.whl", hash = "sha256:7a4f97a081603d2050bfaffdefa5b02a9ec823f8348a572e39032caa8404a487"}, + {file = "charset_normalizer-3.4.1-cp37-cp37m-win_amd64.whl", hash = "sha256:7b1bef6280950ee6c177b326508f86cad7ad4dff12454483b51d8b7d673a2c5d"}, + {file = "charset_normalizer-3.4.1-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:ecddf25bee22fe4fe3737a399d0d177d72bc22be6913acfab364b40bce1ba83c"}, + {file = "charset_normalizer-3.4.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8c60ca7339acd497a55b0ea5d506b2a2612afb2826560416f6894e8b5770d4a9"}, + {file = "charset_normalizer-3.4.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b7b2d86dd06bfc2ade3312a83a5c364c7ec2e3498f8734282c6c3d4b07b346b8"}, + {file = "charset_normalizer-3.4.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:dd78cfcda14a1ef52584dbb008f7ac81c1328c0f58184bf9a84c49c605002da6"}, + {file = "charset_normalizer-3.4.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6e27f48bcd0957c6d4cb9d6fa6b61d192d0b13d5ef563e5f2ae35feafc0d179c"}, + {file = "charset_normalizer-3.4.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:01ad647cdd609225c5350561d084b42ddf732f4eeefe6e678765636791e78b9a"}, + {file = "charset_normalizer-3.4.1-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:619a609aa74ae43d90ed2e89bdd784765de0a25ca761b93e196d938b8fd1dbbd"}, + {file = "charset_normalizer-3.4.1-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:89149166622f4db9b4b6a449256291dc87a99ee53151c74cbd82a53c8c2f6ccd"}, + {file = "charset_normalizer-3.4.1-cp38-cp38-musllinux_1_2_ppc64le.whl", hash = "sha256:7709f51f5f7c853f0fb938bcd3bc59cdfdc5203635ffd18bf354f6967ea0f824"}, + {file = "charset_normalizer-3.4.1-cp38-cp38-musllinux_1_2_s390x.whl", hash = "sha256:345b0426edd4e18138d6528aed636de7a9ed169b4aaf9d61a8c19e39d26838ca"}, + {file = "charset_normalizer-3.4.1-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:0907f11d019260cdc3f94fbdb23ff9125f6b5d1039b76003b5b0ac9d6a6c9d5b"}, + {file = "charset_normalizer-3.4.1-cp38-cp38-win32.whl", hash = "sha256:ea0d8d539afa5eb2728aa1932a988a9a7af94f18582ffae4bc10b3fbdad0626e"}, + {file = "charset_normalizer-3.4.1-cp38-cp38-win_amd64.whl", hash = "sha256:329ce159e82018d646c7ac45b01a430369d526569ec08516081727a20e9e4af4"}, + {file = "charset_normalizer-3.4.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:b97e690a2118911e39b4042088092771b4ae3fc3aa86518f84b8cf6888dbdb41"}, + {file = "charset_normalizer-3.4.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:78baa6d91634dfb69ec52a463534bc0df05dbd546209b79a3880a34487f4b84f"}, + {file = "charset_normalizer-3.4.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1a2bc9f351a75ef49d664206d51f8e5ede9da246602dc2d2726837620ea034b2"}, + {file = "charset_normalizer-3.4.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:75832c08354f595c760a804588b9357d34ec00ba1c940c15e31e96d902093770"}, + {file = "charset_normalizer-3.4.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0af291f4fe114be0280cdd29d533696a77b5b49cfde5467176ecab32353395c4"}, + {file = "charset_normalizer-3.4.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0167ddc8ab6508fe81860a57dd472b2ef4060e8d378f0cc555707126830f2537"}, + {file = "charset_normalizer-3.4.1-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:2a75d49014d118e4198bcee5ee0a6f25856b29b12dbf7cd012791f8a6cc5c496"}, + {file = "charset_normalizer-3.4.1-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:363e2f92b0f0174b2f8238240a1a30142e3db7b957a5dd5689b0e75fb717cc78"}, + {file = "charset_normalizer-3.4.1-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:ab36c8eb7e454e34e60eb55ca5d241a5d18b2c6244f6827a30e451c42410b5f7"}, + {file = "charset_normalizer-3.4.1-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:4c0907b1928a36d5a998d72d64d8eaa7244989f7aaaf947500d3a800c83a3fd6"}, + {file = "charset_normalizer-3.4.1-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:04432ad9479fa40ec0f387795ddad4437a2b50417c69fa275e212933519ff294"}, + {file = "charset_normalizer-3.4.1-cp39-cp39-win32.whl", hash = "sha256:3bed14e9c89dcb10e8f3a29f9ccac4955aebe93c71ae803af79265c9ca5644c5"}, + {file = "charset_normalizer-3.4.1-cp39-cp39-win_amd64.whl", hash = "sha256:49402233c892a461407c512a19435d1ce275543138294f7ef013f0b63d5d3765"}, + {file = "charset_normalizer-3.4.1-py3-none-any.whl", hash = "sha256:d98b1668f06378c6dbefec3b92299716b931cd4e6061f3c875a71ced1780ab85"}, + {file = "charset_normalizer-3.4.1.tar.gz", hash = "sha256:44251f18cd68a75b56585dd00dae26183e102cd5e0f9f1466e6df5da2ed64ea3"}, ] [[package]] name = "click" -version = "8.1.7" +version = "8.1.8" description = "Composable command line interface toolkit" optional = false python-versions = ">=3.7" files = [ - {file = "click-8.1.7-py3-none-any.whl", hash = "sha256:ae74fb96c20a0277a1d615f1e4d73c8414f5a98db8b799a7931d1582f3390c28"}, - {file = "click-8.1.7.tar.gz", hash = "sha256:ca9853ad459e787e2192211578cc907e7594e294c7ccc834310722b41b9ca6de"}, + {file = "click-8.1.8-py3-none-any.whl", hash = "sha256:63c132bbbed01578a06712a2d1f497bb62d9c1c0d329b7903a866228027263b2"}, + {file = "click-8.1.8.tar.gz", hash = "sha256:ed53c9d8990d83c2a27deae68e4ee337473f6330c040a31d4225c9574d16096a"}, ] [package.dependencies] @@ -350,13 +352,13 @@ files = [ [[package]] name = "docutils" -version = "0.20.1" +version = "0.21.2" description = "Docutils -- Python Documentation Utilities" optional = false -python-versions = ">=3.7" +python-versions = ">=3.9" files = [ - {file = "docutils-0.20.1-py3-none-any.whl", hash = "sha256:96f387a2c5562db4476f09f13bbab2192e764cac08ebbf3a34a95d9b1e4a59d6"}, - {file = "docutils-0.20.1.tar.gz", hash = "sha256:f08a4e276c3a1583a86dce3e34aba3fe04d02bba2dd51ed16106244e8a923e3b"}, + {file = "docutils-0.21.2-py3-none-any.whl", hash = "sha256:dafca5b9e384f0e419294eb4d2ff9fa826435bf15f15b7bd45723e8ad76811b2"}, + {file = "docutils-0.21.2.tar.gz", hash = "sha256:3a6b18732edf182daa3cd12775bbb338cf5691468f91eeeb109deff6ebfa986f"}, ] [[package]] @@ -375,70 +377,139 @@ test = ["pytest (>=6)"] [[package]] name = "grpcio" -version = "1.66.2" +version = "1.68.1" description = "HTTP/2-based RPC framework" optional = false python-versions = ">=3.8" files = [ - {file = "grpcio-1.66.2-cp310-cp310-linux_armv7l.whl", hash = "sha256:fe96281713168a3270878255983d2cb1a97e034325c8c2c25169a69289d3ecfa"}, - {file = "grpcio-1.66.2-cp310-cp310-macosx_12_0_universal2.whl", hash = "sha256:73fc8f8b9b5c4a03e802b3cd0c18b2b06b410d3c1dcbef989fdeb943bd44aff7"}, - {file = "grpcio-1.66.2-cp310-cp310-manylinux_2_17_aarch64.whl", hash = "sha256:03b0b307ba26fae695e067b94cbb014e27390f8bc5ac7a3a39b7723fed085604"}, - {file = "grpcio-1.66.2-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7d69ce1f324dc2d71e40c9261d3fdbe7d4c9d60f332069ff9b2a4d8a257c7b2b"}, - {file = "grpcio-1.66.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:05bc2ceadc2529ab0b227b1310d249d95d9001cd106aa4d31e8871ad3c428d73"}, - {file = "grpcio-1.66.2-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:8ac475e8da31484efa25abb774674d837b343afb78bb3bcdef10f81a93e3d6bf"}, - {file = "grpcio-1.66.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:0be4e0490c28da5377283861bed2941d1d20ec017ca397a5df4394d1c31a9b50"}, - {file = "grpcio-1.66.2-cp310-cp310-win32.whl", hash = "sha256:4e504572433f4e72b12394977679161d495c4c9581ba34a88d843eaf0f2fbd39"}, - {file = "grpcio-1.66.2-cp310-cp310-win_amd64.whl", hash = "sha256:2018b053aa15782db2541ca01a7edb56a0bf18c77efed975392583725974b249"}, - {file = "grpcio-1.66.2-cp311-cp311-linux_armv7l.whl", hash = "sha256:2335c58560a9e92ac58ff2bc5649952f9b37d0735608242973c7a8b94a6437d8"}, - {file = "grpcio-1.66.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:45a3d462826f4868b442a6b8fdbe8b87b45eb4f5b5308168c156b21eca43f61c"}, - {file = "grpcio-1.66.2-cp311-cp311-manylinux_2_17_aarch64.whl", hash = "sha256:a9539f01cb04950fd4b5ab458e64a15f84c2acc273670072abe49a3f29bbad54"}, - {file = "grpcio-1.66.2-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ce89f5876662f146d4c1f695dda29d4433a5d01c8681fbd2539afff535da14d4"}, - {file = "grpcio-1.66.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d25a14af966438cddf498b2e338f88d1c9706f3493b1d73b93f695c99c5f0e2a"}, - {file = "grpcio-1.66.2-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:6001e575b8bbd89eee11960bb640b6da6ae110cf08113a075f1e2051cc596cae"}, - {file = "grpcio-1.66.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:4ea1d062c9230278793820146c95d038dc0f468cbdd172eec3363e42ff1c7d01"}, - {file = "grpcio-1.66.2-cp311-cp311-win32.whl", hash = "sha256:38b68498ff579a3b1ee8f93a05eb48dc2595795f2f62716e797dc24774c1aaa8"}, - {file = "grpcio-1.66.2-cp311-cp311-win_amd64.whl", hash = "sha256:6851de821249340bdb100df5eacfecfc4e6075fa85c6df7ee0eb213170ec8e5d"}, - {file = "grpcio-1.66.2-cp312-cp312-linux_armv7l.whl", hash = "sha256:802d84fd3d50614170649853d121baaaa305de7b65b3e01759247e768d691ddf"}, - {file = "grpcio-1.66.2-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:80fd702ba7e432994df208f27514280b4b5c6843e12a48759c9255679ad38db8"}, - {file = "grpcio-1.66.2-cp312-cp312-manylinux_2_17_aarch64.whl", hash = "sha256:12fda97ffae55e6526825daf25ad0fa37483685952b5d0f910d6405c87e3adb6"}, - {file = "grpcio-1.66.2-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:950da58d7d80abd0ea68757769c9db0a95b31163e53e5bb60438d263f4bed7b7"}, - {file = "grpcio-1.66.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e636ce23273683b00410f1971d209bf3689238cf5538d960adc3cdfe80dd0dbd"}, - {file = "grpcio-1.66.2-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:a917d26e0fe980b0ac7bfcc1a3c4ad6a9a4612c911d33efb55ed7833c749b0ee"}, - {file = "grpcio-1.66.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:49f0ca7ae850f59f828a723a9064cadbed90f1ece179d375966546499b8a2c9c"}, - {file = "grpcio-1.66.2-cp312-cp312-win32.whl", hash = "sha256:31fd163105464797a72d901a06472860845ac157389e10f12631025b3e4d0453"}, - {file = "grpcio-1.66.2-cp312-cp312-win_amd64.whl", hash = "sha256:ff1f7882e56c40b0d33c4922c15dfa30612f05fb785074a012f7cda74d1c3679"}, - {file = "grpcio-1.66.2-cp313-cp313-linux_armv7l.whl", hash = "sha256:3b00efc473b20d8bf83e0e1ae661b98951ca56111feb9b9611df8efc4fe5d55d"}, - {file = "grpcio-1.66.2-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:1caa38fb22a8578ab8393da99d4b8641e3a80abc8fd52646f1ecc92bcb8dee34"}, - {file = "grpcio-1.66.2-cp313-cp313-manylinux_2_17_aarch64.whl", hash = "sha256:c408f5ef75cfffa113cacd8b0c0e3611cbfd47701ca3cdc090594109b9fcbaed"}, - {file = "grpcio-1.66.2-cp313-cp313-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c806852deaedee9ce8280fe98955c9103f62912a5b2d5ee7e3eaa284a6d8d8e7"}, - {file = "grpcio-1.66.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f145cc21836c332c67baa6fc81099d1d27e266401565bf481948010d6ea32d46"}, - {file = "grpcio-1.66.2-cp313-cp313-musllinux_1_1_i686.whl", hash = "sha256:73e3b425c1e155730273f73e419de3074aa5c5e936771ee0e4af0814631fb30a"}, - {file = "grpcio-1.66.2-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:9c509a4f78114cbc5f0740eb3d7a74985fd2eff022971bc9bc31f8bc93e66a3b"}, - {file = "grpcio-1.66.2-cp313-cp313-win32.whl", hash = "sha256:20657d6b8cfed7db5e11b62ff7dfe2e12064ea78e93f1434d61888834bc86d75"}, - {file = "grpcio-1.66.2-cp313-cp313-win_amd64.whl", hash = "sha256:fb70487c95786e345af5e854ffec8cb8cc781bcc5df7930c4fbb7feaa72e1cdf"}, - {file = "grpcio-1.66.2-cp38-cp38-linux_armv7l.whl", hash = "sha256:a18e20d8321c6400185b4263e27982488cb5cdd62da69147087a76a24ef4e7e3"}, - {file = "grpcio-1.66.2-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:02697eb4a5cbe5a9639f57323b4c37bcb3ab2d48cec5da3dc2f13334d72790dd"}, - {file = "grpcio-1.66.2-cp38-cp38-manylinux_2_17_aarch64.whl", hash = "sha256:99a641995a6bc4287a6315989ee591ff58507aa1cbe4c2e70d88411c4dcc0839"}, - {file = "grpcio-1.66.2-cp38-cp38-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3ed71e81782966ffead60268bbda31ea3f725ebf8aa73634d5dda44f2cf3fb9c"}, - {file = "grpcio-1.66.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bbd27c24a4cc5e195a7f56cfd9312e366d5d61b86e36d46bbe538457ea6eb8dd"}, - {file = "grpcio-1.66.2-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:d9a9724a156c8ec6a379869b23ba3323b7ea3600851c91489b871e375f710bc8"}, - {file = "grpcio-1.66.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:d8d4732cc5052e92cea2f78b233c2e2a52998ac40cd651f40e398893ad0d06ec"}, - {file = "grpcio-1.66.2-cp38-cp38-win32.whl", hash = "sha256:7b2c86457145ce14c38e5bf6bdc19ef88e66c5fee2c3d83285c5aef026ba93b3"}, - {file = "grpcio-1.66.2-cp38-cp38-win_amd64.whl", hash = "sha256:e88264caad6d8d00e7913996030bac8ad5f26b7411495848cc218bd3a9040b6c"}, - {file = "grpcio-1.66.2-cp39-cp39-linux_armv7l.whl", hash = "sha256:c400ba5675b67025c8a9f48aa846f12a39cf0c44df5cd060e23fda5b30e9359d"}, - {file = "grpcio-1.66.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:66a0cd8ba6512b401d7ed46bb03f4ee455839957f28b8d61e7708056a806ba6a"}, - {file = "grpcio-1.66.2-cp39-cp39-manylinux_2_17_aarch64.whl", hash = "sha256:06de8ec0bd71be123eec15b0e0d457474931c2c407869b6c349bd9bed4adbac3"}, - {file = "grpcio-1.66.2-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:fb57870449dfcfac428afbb5a877829fcb0d6db9d9baa1148705739e9083880e"}, - {file = "grpcio-1.66.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b672abf90a964bfde2d0ecbce30f2329a47498ba75ce6f4da35a2f4532b7acbc"}, - {file = "grpcio-1.66.2-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:ad2efdbe90c73b0434cbe64ed372e12414ad03c06262279b104a029d1889d13e"}, - {file = "grpcio-1.66.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:9c3a99c519f4638e700e9e3f83952e27e2ea10873eecd7935823dab0c1c9250e"}, - {file = "grpcio-1.66.2-cp39-cp39-win32.whl", hash = "sha256:78fa51ebc2d9242c0fc5db0feecc57a9943303b46664ad89921f5079e2e4ada7"}, - {file = "grpcio-1.66.2-cp39-cp39-win_amd64.whl", hash = "sha256:728bdf36a186e7f51da73be7f8d09457a03061be848718d0edf000e709418987"}, - {file = "grpcio-1.66.2.tar.gz", hash = "sha256:563588c587b75c34b928bc428548e5b00ea38c46972181a4d8b75ba7e3f24231"}, + {file = "grpcio-1.68.1-cp310-cp310-linux_armv7l.whl", hash = "sha256:d35740e3f45f60f3c37b1e6f2f4702c23867b9ce21c6410254c9c682237da68d"}, + {file = "grpcio-1.68.1-cp310-cp310-macosx_12_0_universal2.whl", hash = "sha256:d99abcd61760ebb34bdff37e5a3ba333c5cc09feda8c1ad42547bea0416ada78"}, + {file = "grpcio-1.68.1-cp310-cp310-manylinux_2_17_aarch64.whl", hash = "sha256:f8261fa2a5f679abeb2a0a93ad056d765cdca1c47745eda3f2d87f874ff4b8c9"}, + {file = "grpcio-1.68.1-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0feb02205a27caca128627bd1df4ee7212db051019a9afa76f4bb6a1a80ca95e"}, + {file = "grpcio-1.68.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:919d7f18f63bcad3a0f81146188e90274fde800a94e35d42ffe9eadf6a9a6330"}, + {file = "grpcio-1.68.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:963cc8d7d79b12c56008aabd8b457f400952dbea8997dd185f155e2f228db079"}, + {file = "grpcio-1.68.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:ccf2ebd2de2d6661e2520dae293298a3803a98ebfc099275f113ce1f6c2a80f1"}, + {file = "grpcio-1.68.1-cp310-cp310-win32.whl", hash = "sha256:2cc1fd04af8399971bcd4f43bd98c22d01029ea2e56e69c34daf2bf8470e47f5"}, + {file = "grpcio-1.68.1-cp310-cp310-win_amd64.whl", hash = "sha256:ee2e743e51cb964b4975de572aa8fb95b633f496f9fcb5e257893df3be854746"}, + {file = "grpcio-1.68.1-cp311-cp311-linux_armv7l.whl", hash = "sha256:55857c71641064f01ff0541a1776bfe04a59db5558e82897d35a7793e525774c"}, + {file = "grpcio-1.68.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:4b177f5547f1b995826ef529d2eef89cca2f830dd8b2c99ffd5fde4da734ba73"}, + {file = "grpcio-1.68.1-cp311-cp311-manylinux_2_17_aarch64.whl", hash = "sha256:3522c77d7e6606d6665ec8d50e867f13f946a4e00c7df46768f1c85089eae515"}, + {file = "grpcio-1.68.1-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9d1fae6bbf0816415b81db1e82fb3bf56f7857273c84dcbe68cbe046e58e1ccd"}, + {file = "grpcio-1.68.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:298ee7f80e26f9483f0b6f94cc0a046caf54400a11b644713bb5b3d8eb387600"}, + {file = "grpcio-1.68.1-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:cbb5780e2e740b6b4f2d208e90453591036ff80c02cc605fea1af8e6fc6b1bbe"}, + {file = "grpcio-1.68.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:ddda1aa22495d8acd9dfbafff2866438d12faec4d024ebc2e656784d96328ad0"}, + {file = "grpcio-1.68.1-cp311-cp311-win32.whl", hash = "sha256:b33bd114fa5a83f03ec6b7b262ef9f5cac549d4126f1dc702078767b10c46ed9"}, + {file = "grpcio-1.68.1-cp311-cp311-win_amd64.whl", hash = "sha256:7f20ebec257af55694d8f993e162ddf0d36bd82d4e57f74b31c67b3c6d63d8b2"}, + {file = "grpcio-1.68.1-cp312-cp312-linux_armv7l.whl", hash = "sha256:8829924fffb25386995a31998ccbbeaa7367223e647e0122043dfc485a87c666"}, + {file = "grpcio-1.68.1-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:3aed6544e4d523cd6b3119b0916cef3d15ef2da51e088211e4d1eb91a6c7f4f1"}, + {file = "grpcio-1.68.1-cp312-cp312-manylinux_2_17_aarch64.whl", hash = "sha256:4efac5481c696d5cb124ff1c119a78bddbfdd13fc499e3bc0ca81e95fc573684"}, + {file = "grpcio-1.68.1-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6ab2d912ca39c51f46baf2a0d92aa265aa96b2443266fc50d234fa88bf877d8e"}, + {file = "grpcio-1.68.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:95c87ce2a97434dffe7327a4071839ab8e8bffd0054cc74cbe971fba98aedd60"}, + {file = "grpcio-1.68.1-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:e4842e4872ae4ae0f5497bf60a0498fa778c192cc7a9e87877abd2814aca9475"}, + {file = "grpcio-1.68.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:255b1635b0ed81e9f91da4fcc8d43b7ea5520090b9a9ad9340d147066d1d3613"}, + {file = "grpcio-1.68.1-cp312-cp312-win32.whl", hash = "sha256:7dfc914cc31c906297b30463dde0b9be48e36939575eaf2a0a22a8096e69afe5"}, + {file = "grpcio-1.68.1-cp312-cp312-win_amd64.whl", hash = "sha256:a0c8ddabef9c8f41617f213e527254c41e8b96ea9d387c632af878d05db9229c"}, + {file = "grpcio-1.68.1-cp313-cp313-linux_armv7l.whl", hash = "sha256:a47faedc9ea2e7a3b6569795c040aae5895a19dde0c728a48d3c5d7995fda385"}, + {file = "grpcio-1.68.1-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:390eee4225a661c5cd133c09f5da1ee3c84498dc265fd292a6912b65c421c78c"}, + {file = "grpcio-1.68.1-cp313-cp313-manylinux_2_17_aarch64.whl", hash = "sha256:66a24f3d45c33550703f0abb8b656515b0ab777970fa275693a2f6dc8e35f1c1"}, + {file = "grpcio-1.68.1-cp313-cp313-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c08079b4934b0bf0a8847f42c197b1d12cba6495a3d43febd7e99ecd1cdc8d54"}, + {file = "grpcio-1.68.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8720c25cd9ac25dd04ee02b69256d0ce35bf8a0f29e20577427355272230965a"}, + {file = "grpcio-1.68.1-cp313-cp313-musllinux_1_1_i686.whl", hash = "sha256:04cfd68bf4f38f5bb959ee2361a7546916bd9a50f78617a346b3aeb2b42e2161"}, + {file = "grpcio-1.68.1-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:c28848761a6520c5c6071d2904a18d339a796ebe6b800adc8b3f474c5ce3c3ad"}, + {file = "grpcio-1.68.1-cp313-cp313-win32.whl", hash = "sha256:77d65165fc35cff6e954e7fd4229e05ec76102d4406d4576528d3a3635fc6172"}, + {file = "grpcio-1.68.1-cp313-cp313-win_amd64.whl", hash = "sha256:a8040f85dcb9830d8bbb033ae66d272614cec6faceee88d37a88a9bd1a7a704e"}, + {file = "grpcio-1.68.1-cp38-cp38-linux_armv7l.whl", hash = "sha256:eeb38ff04ab6e5756a2aef6ad8d94e89bb4a51ef96e20f45c44ba190fa0bcaad"}, + {file = "grpcio-1.68.1-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:8a3869a6661ec8f81d93f4597da50336718bde9eb13267a699ac7e0a1d6d0bea"}, + {file = "grpcio-1.68.1-cp38-cp38-manylinux_2_17_aarch64.whl", hash = "sha256:2c4cec6177bf325eb6faa6bd834d2ff6aa8bb3b29012cceb4937b86f8b74323c"}, + {file = "grpcio-1.68.1-cp38-cp38-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:12941d533f3cd45d46f202e3667be8ebf6bcb3573629c7ec12c3e211d99cfccf"}, + {file = "grpcio-1.68.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:80af6f1e69c5e68a2be529990684abdd31ed6622e988bf18850075c81bb1ad6e"}, + {file = "grpcio-1.68.1-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:e8dbe3e00771bfe3d04feed8210fc6617006d06d9a2679b74605b9fed3e8362c"}, + {file = "grpcio-1.68.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:83bbf5807dc3ee94ce1de2dfe8a356e1d74101e4b9d7aa8c720cc4818a34aded"}, + {file = "grpcio-1.68.1-cp38-cp38-win32.whl", hash = "sha256:8cb620037a2fd9eeee97b4531880e439ebfcd6d7d78f2e7dcc3726428ab5ef63"}, + {file = "grpcio-1.68.1-cp38-cp38-win_amd64.whl", hash = "sha256:52fbf85aa71263380d330f4fce9f013c0798242e31ede05fcee7fbe40ccfc20d"}, + {file = "grpcio-1.68.1-cp39-cp39-linux_armv7l.whl", hash = "sha256:cb400138e73969eb5e0535d1d06cae6a6f7a15f2cc74add320e2130b8179211a"}, + {file = "grpcio-1.68.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:a1b988b40f2fd9de5c820f3a701a43339d8dcf2cb2f1ca137e2c02671cc83ac1"}, + {file = "grpcio-1.68.1-cp39-cp39-manylinux_2_17_aarch64.whl", hash = "sha256:96f473cdacfdd506008a5d7579c9f6a7ff245a9ade92c3c0265eb76cc591914f"}, + {file = "grpcio-1.68.1-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:37ea3be171f3cf3e7b7e412a98b77685eba9d4fd67421f4a34686a63a65d99f9"}, + {file = "grpcio-1.68.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3ceb56c4285754e33bb3c2fa777d055e96e6932351a3082ce3559be47f8024f0"}, + {file = "grpcio-1.68.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:dffd29a2961f3263a16d73945b57cd44a8fd0b235740cb14056f0612329b345e"}, + {file = "grpcio-1.68.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:025f790c056815b3bf53da850dd70ebb849fd755a4b1ac822cb65cd631e37d43"}, + {file = "grpcio-1.68.1-cp39-cp39-win32.whl", hash = "sha256:1098f03dedc3b9810810568060dea4ac0822b4062f537b0f53aa015269be0a76"}, + {file = "grpcio-1.68.1-cp39-cp39-win_amd64.whl", hash = "sha256:334ab917792904245a028f10e803fcd5b6f36a7b2173a820c0b5b076555825e1"}, + {file = "grpcio-1.68.1.tar.gz", hash = "sha256:44a8502dd5de653ae6a73e2de50a401d84184f0331d0ac3daeb044e66d5c5054"}, ] [package.extras] -protobuf = ["grpcio-tools (>=1.66.2)"] +protobuf = ["grpcio-tools (>=1.68.1)"] + +[[package]] +name = "grpcio-tools" +version = "1.68.1" +description = "Protobuf code generator for gRPC" +optional = false +python-versions = ">=3.8" +files = [ + {file = "grpcio_tools-1.68.1-cp310-cp310-linux_armv7l.whl", hash = "sha256:3a93ea324c5cbccdff55110777410d026dc1e69c3d47684ac97f57f7a77b9c70"}, + {file = "grpcio_tools-1.68.1-cp310-cp310-macosx_12_0_universal2.whl", hash = "sha256:94cbfb9482cfd7bdb5f081b94fa137a16e4fe031daa57a2cd85d8cb4e18dce25"}, + {file = "grpcio_tools-1.68.1-cp310-cp310-manylinux_2_17_aarch64.whl", hash = "sha256:bbe7e1641859c858d0f4631f7f7c09e7302433f1aa037028d2419c1410945fac"}, + {file = "grpcio_tools-1.68.1-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:55c0f91c4294c5807796ed26af42509f3d68497942a92d9ee9f43b08768d6c3c"}, + {file = "grpcio_tools-1.68.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:85adc798fd3b57ab3e998b5897c5daab6840211ac16cdf3ba99901cb9b90094a"}, + {file = "grpcio_tools-1.68.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:f0bdccb00709bf6180a80a353a99fa844cc0bb2d450cdf7fc6ab22c988bb6b4c"}, + {file = "grpcio_tools-1.68.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:2465e4d347b35dc0c007e074c79d5ded0a89c3aa26651e690f83593e0cc28af8"}, + {file = "grpcio_tools-1.68.1-cp310-cp310-win32.whl", hash = "sha256:83c124a1776c1027da7d36584c8044cfed7a9f10e90f08dafde8d2a4cb822319"}, + {file = "grpcio_tools-1.68.1-cp310-cp310-win_amd64.whl", hash = "sha256:283fd1359d619d42c3346f1d8f0a70636a036a421178803a1ab8083fa4228a38"}, + {file = "grpcio_tools-1.68.1-cp311-cp311-linux_armv7l.whl", hash = "sha256:02f04de42834129eb54bb12469160ab631a0395d6a2b77975381c02b994086c3"}, + {file = "grpcio_tools-1.68.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:92b6aab37095879ef9ee428dd171740ff794f4c7a66bc1cc7280cd0051f8cd96"}, + {file = "grpcio_tools-1.68.1-cp311-cp311-manylinux_2_17_aarch64.whl", hash = "sha256:1f0ac6ac5e1e33b998511981b3ef36489501833413354f3597b97a3452d7d7ba"}, + {file = "grpcio_tools-1.68.1-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:28e0bca3a262af86557f30e30ddf2fadc2324ee05cd7352716924cc7f83541f1"}, + {file = "grpcio_tools-1.68.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:12239cf5ca6b7b4937103953cf35c49683d935e32e98596fe52dd35168aa86e6"}, + {file = "grpcio_tools-1.68.1-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:8e48d8884fcf6b182c73d0560a183404458e30a0f479918b88ca8fbd48b8b05f"}, + {file = "grpcio_tools-1.68.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:e4e8059469847441855322da16fa2c0f9787b996c237a98778210e31188a8652"}, + {file = "grpcio_tools-1.68.1-cp311-cp311-win32.whl", hash = "sha256:21815d54a83effbd2600d16382a7897298cfeffe578557fc9a47b642cc8ddafe"}, + {file = "grpcio_tools-1.68.1-cp311-cp311-win_amd64.whl", hash = "sha256:2114528723d9f12d3e24af3d433ec6f140deea1dd64d3bb1b4ebced217f1867c"}, + {file = "grpcio_tools-1.68.1-cp312-cp312-linux_armv7l.whl", hash = "sha256:d67a9d1ad22ff0d22715dba1d5f8f23ebd47cea84ccd20c90bf4690d988adc5b"}, + {file = "grpcio_tools-1.68.1-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:c7f1e704ff73eb01afac51b63b74868a35aaa5d6f791fc63bd41af44a51aa232"}, + {file = "grpcio_tools-1.68.1-cp312-cp312-manylinux_2_17_aarch64.whl", hash = "sha256:e9f69988bd77db014795511c498e89a0db24bd47877e65921364114f88de3bee"}, + {file = "grpcio_tools-1.68.1-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8585ec7d11fcc2bb635b39605a4466ca9fa28dbae0c184fe58f456da72cb9031"}, + {file = "grpcio_tools-1.68.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c81d0be6c46fcbcd2cd126804060a95531cdf6d779436b2fbc68c8b4a7db2dc1"}, + {file = "grpcio_tools-1.68.1-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:6efdb02e75baf289935b5dad665f0e0f7c3311d86aae0cd2c709e2a8a34bb620"}, + {file = "grpcio_tools-1.68.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8ea367639e771e5a05f7320eb4ae2b27e09d2ec3baeae9819d1c590cc7eaaa08"}, + {file = "grpcio_tools-1.68.1-cp312-cp312-win32.whl", hash = "sha256:a5b1021c9942bba7eca1555061e2d308f506198088a3a539fcb3633499c6635f"}, + {file = "grpcio_tools-1.68.1-cp312-cp312-win_amd64.whl", hash = "sha256:315ad9c28940c95e85e57aeca309d298113175c2d5e8221501a05a51072f5477"}, + {file = "grpcio_tools-1.68.1-cp313-cp313-linux_armv7l.whl", hash = "sha256:67e49b5ede0cc8a0f988f41f7b72f6bc03180aecdb5213bd985bc1bbfd9ffdac"}, + {file = "grpcio_tools-1.68.1-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:b78e38f953062d45ff92ec940da292dc9bfbf26de492c8dc44e12b13493a8e80"}, + {file = "grpcio_tools-1.68.1-cp313-cp313-manylinux_2_17_aarch64.whl", hash = "sha256:8ebe9df5bab4121e8f51e013a379be2027179a0c8013e89d686a1e5800e9c205"}, + {file = "grpcio_tools-1.68.1-cp313-cp313-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:be553e3ea7447ed9e2e2d089f3b0a77000e86d2681b3c77498c98dddffc62d22"}, + {file = "grpcio_tools-1.68.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d4877f3eabb6185b5691f5218fedc86a84a833734847a294048862ec910a2854"}, + {file = "grpcio_tools-1.68.1-cp313-cp313-musllinux_1_1_i686.whl", hash = "sha256:b98173e536e8f2779eff84a03409cca6497dc1fad3d10a47c8d881b2cb36259b"}, + {file = "grpcio_tools-1.68.1-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:5b64035dcd0df70acf3af972c3f103b0ce141d29732fd94eaa8b38cf7c8e62fe"}, + {file = "grpcio_tools-1.68.1-cp313-cp313-win32.whl", hash = "sha256:573f3ed3276df20c308797ae834ac6c5595b1dd2953b243eedadbcd986a287d7"}, + {file = "grpcio_tools-1.68.1-cp313-cp313-win_amd64.whl", hash = "sha256:c4539c6231015c40db879fbc0feaaf03adb4275c1bd2b4dd26e2323f2a13655a"}, + {file = "grpcio_tools-1.68.1-cp38-cp38-linux_armv7l.whl", hash = "sha256:3e0fc6dbc64efc7bb0fe23ce46587e0cbeb512142d543834c2bc9100c8f255ff"}, + {file = "grpcio_tools-1.68.1-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:79337ac1b19610b99f93aa52ae05e5fbf96adbe60d54ecf192af44cc69118d19"}, + {file = "grpcio_tools-1.68.1-cp38-cp38-manylinux_2_17_aarch64.whl", hash = "sha256:eb7cae5f0232aba9057f26a45ef6b0a5633d36627fe49442c0985b6f44b67822"}, + {file = "grpcio_tools-1.68.1-cp38-cp38-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:25fe1bcbb558a477c525bec9d67e1469d47dddc9430e6e5c0d11f67f08cfc810"}, + {file = "grpcio_tools-1.68.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1ce901f42037d1ebc7724e721180d03e33163d5acf0a62c52728e6c36117c5e9"}, + {file = "grpcio_tools-1.68.1-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:3c213c2208c42dce2a5fc7cfb2b952a3c22ef019812f9f27bd54c6e00ee0720e"}, + {file = "grpcio_tools-1.68.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:ff6ae5031a03ab90e9c508d12914438b73efd44b5eed9946bf8974c453d0ed57"}, + {file = "grpcio_tools-1.68.1-cp38-cp38-win32.whl", hash = "sha256:41e631e72b6b94eb6f3d9cd533c682249f82fc58007c7561f6e521b884a6347e"}, + {file = "grpcio_tools-1.68.1-cp38-cp38-win_amd64.whl", hash = "sha256:69fb93761f116a5b063fb4f6150023c4d785304b37adcebf561b95018f9b40ae"}, + {file = "grpcio_tools-1.68.1-cp39-cp39-linux_armv7l.whl", hash = "sha256:31c703dba465956acb83adc105d61297459d0d14b512441d827f6c040cbffe2b"}, + {file = "grpcio_tools-1.68.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:1093f441751689d225916e3fe02daf98d2becab688b9e167bd2c38454ec50906"}, + {file = "grpcio_tools-1.68.1-cp39-cp39-manylinux_2_17_aarch64.whl", hash = "sha256:3543b9205e5b88d2280493aa9b55d35ce9cc45b7a0891c9d84c200652802e22a"}, + {file = "grpcio_tools-1.68.1-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:79d575cc5a522b9920d9a07387976fc02d162bdf97ba51cf91fabdca8dfdb491"}, + {file = "grpcio_tools-1.68.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d546e4a506288d6227acc0eb625039c5e1ad96218c8cfe9ecf661a41e15e442e"}, + {file = "grpcio_tools-1.68.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:aced9c7a4edbf6eff73720bfa6fefd9053ae294535a488dfb92a372913eda10d"}, + {file = "grpcio_tools-1.68.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:d3c08d1a244b5025ba3f8ef81d0885b431b93cc20bc4560add4cdfcf38c1bfad"}, + {file = "grpcio_tools-1.68.1-cp39-cp39-win32.whl", hash = "sha256:049f05a3f227e9f696059a20b2858e6d7c1cd6037d8471306d7ab7627b1a4ce4"}, + {file = "grpcio_tools-1.68.1-cp39-cp39-win_amd64.whl", hash = "sha256:4c3599c75b1157e6bda24cdbdadb023bf0fe1085aa1e0047a1f35a8778f9b56e"}, + {file = "grpcio_tools-1.68.1.tar.gz", hash = "sha256:2413a17ad16c9c821b36e4a67fc64c37b9e4636ab1c3a07778018801378739ba"}, +] + +[package.dependencies] +grpcio = ">=1.68.1" +protobuf = ">=5.26.1,<6.0dev" +setuptools = "*" [[package]] name = "idna" @@ -532,72 +603,72 @@ i18n = ["Babel (>=2.7)"] [[package]] name = "markupsafe" -version = "3.0.0" +version = "3.0.2" description = "Safely add untrusted strings to HTML/XML markup." optional = false python-versions = ">=3.9" files = [ - {file = "MarkupSafe-3.0.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:380faf314c3c84c1682ca672e6280c6c59e92d0bc13dc71758ffa2de3cd4e252"}, - {file = "MarkupSafe-3.0.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:1ee9790be6f62121c4c58bbced387b0965ab7bffeecb4e17cc42ef290784e363"}, - {file = "MarkupSafe-3.0.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5ddf5cb8e9c00d9bf8b0c75949fb3ff9ea2096ba531693e2e87336d197fdb908"}, - {file = "MarkupSafe-3.0.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9b36473a2d3e882d1873ea906ce54408b9588dc2c65989664e6e7f5a2de353d7"}, - {file = "MarkupSafe-3.0.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:dba0f83119b9514bc37272ad012f0cc03f0805cc6a2bea7244e19250ac8ff29f"}, - {file = "MarkupSafe-3.0.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:409535e0521c4630d5b5a1bf284e9d3c76d2fc2f153ebb12cf3827797798cc99"}, - {file = "MarkupSafe-3.0.0-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:64a7c7856c3a409011139b17d137c2924df4318dab91ee0530800819617c4381"}, - {file = "MarkupSafe-3.0.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:4deea1d9169578917d1f35cdb581bc7bab56a7e8c5be2633bd1b9549c3c22a01"}, - {file = "MarkupSafe-3.0.0-cp310-cp310-win32.whl", hash = "sha256:3cd0bba31d484fe9b9d77698ddb67c978704603dc10cdc905512af308cfcca6b"}, - {file = "MarkupSafe-3.0.0-cp310-cp310-win_amd64.whl", hash = "sha256:4ca04c60006867610a06575b46941ae616b19da0adc85b9f8f3d9cbd7a3da385"}, - {file = "MarkupSafe-3.0.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:e64b390a306f9e849ee809f92af6a52cda41741c914358e0e9f8499d03741526"}, - {file = "MarkupSafe-3.0.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:7c524203207f5b569df06c96dafdc337228921ee8c3cc5f6e891d024c6595352"}, - {file = "MarkupSafe-3.0.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c409691696bec2b5e5c9efd9593c99025bf2f317380bf0d993ee0213516d908a"}, - {file = "MarkupSafe-3.0.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:64f7d04410be600aa5ec0626d73d43e68a51c86500ce12917e10fd013e258df5"}, - {file = "MarkupSafe-3.0.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:105ada43a61af22acb8774514c51900dc820c481cc5ba53f17c09d294d9c07ca"}, - {file = "MarkupSafe-3.0.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:a5fd5500d4e4f7cc88d8c0f2e45126c4307ed31e08f8ec521474f2fd99d35ac3"}, - {file = "MarkupSafe-3.0.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:25396abd52b16900932e05b7104bcdc640a4d96c914f39c3b984e5a17b01fba0"}, - {file = "MarkupSafe-3.0.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:3efde9a8c56c3b6e5f3fa4baea828f8184970c7c78480fedb620d804b1c31e5c"}, - {file = "MarkupSafe-3.0.0-cp311-cp311-win32.whl", hash = "sha256:12ddac720b8965332d36196f6f83477c6351ba6a25d4aff91e30708c729350d7"}, - {file = "MarkupSafe-3.0.0-cp311-cp311-win_amd64.whl", hash = "sha256:658fdf6022740896c403d45148bf0c36978c6b48c9ef8b1f8d0c7a11b6cdea86"}, - {file = "MarkupSafe-3.0.0-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:d261ec38b8a99a39b62e0119ed47fe3b62f7691c500bc1e815265adc016438c1"}, - {file = "MarkupSafe-3.0.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:e363440c8534bf2f2ef1b8fdc02037eb5fff8fce2a558519b22d6a3a38b3ec5e"}, - {file = "MarkupSafe-3.0.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7835de4c56066e096407a1852e5561f6033786dd987fa90dc384e45b9bd21295"}, - {file = "MarkupSafe-3.0.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b6cc46a27d904c9be5732029769acf4b0af69345172ed1ef6d4db0c023ff603b"}, - {file = "MarkupSafe-3.0.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f0411641d31aa6f7f0cc13f0f18b63b8dc08da5f3a7505972a42ab059f479ba3"}, - {file = "MarkupSafe-3.0.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:b2a7afd24d408b907672015555bc10be2382e6c5f62a488e2d452da670bbd389"}, - {file = "MarkupSafe-3.0.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:c8ab7efeff1884c5da8e18f743b667215300e09043820d11723718de0b7db934"}, - {file = "MarkupSafe-3.0.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:8219e2207f6c188d15614ea043636c2b36d2d79bf853639c124a179412325a13"}, - {file = "MarkupSafe-3.0.0-cp312-cp312-win32.whl", hash = "sha256:59420b5a9a5d3fee483a32adb56d7369ae0d630798da056001be1e9f674f3aa6"}, - {file = "MarkupSafe-3.0.0-cp312-cp312-win_amd64.whl", hash = "sha256:7ed789d0f7f11fcf118cf0acb378743dfdd4215d7f7d18837c88171405c9a452"}, - {file = "MarkupSafe-3.0.0-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:27d6a73682b99568916c54a4bfced40e7d871ba685b580ea04bbd2e405dfd4c5"}, - {file = "MarkupSafe-3.0.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:494a64efc535e147fcc713dba58eecfce3a79f1e93ebe81995b387f5cd9bc2e1"}, - {file = "MarkupSafe-3.0.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c5243044a927e8a6bb28517838662a019cd7f73d7f106bbb37ab5e7fa8451a92"}, - {file = "MarkupSafe-3.0.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:63dae84964a9a3d2610808cee038f435d9a111620c37ccf872c2fcaeca6865b3"}, - {file = "MarkupSafe-3.0.0-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:dcbee57fedc9b2182c54ffc1c5eed316c3da8bbfeda8009e1b5d7220199d15da"}, - {file = "MarkupSafe-3.0.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:f846fd7c241e5bd4161e2a483663eb66e4d8e12130fcdc052f310f388f1d61c6"}, - {file = "MarkupSafe-3.0.0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:678fbceb202382aae42c1f0cd9f56b776bc20a58ae5b553ee1fe6b802983a1d6"}, - {file = "MarkupSafe-3.0.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:bd9b8e458e2bab52f9ad3ab5dc8b689a3c84b12b2a2f64cd9a0dfe209fb6b42f"}, - {file = "MarkupSafe-3.0.0-cp313-cp313-win32.whl", hash = "sha256:1fd02f47596e00a372f5b4af2b4c45f528bade65c66dfcbc6e1ea1bfda758e98"}, - {file = "MarkupSafe-3.0.0-cp313-cp313-win_amd64.whl", hash = "sha256:b94bec9eda10111ec7102ef909eca4f3c2df979643924bfe58375f560713a7d1"}, - {file = "MarkupSafe-3.0.0-cp313-cp313t-macosx_10_13_universal2.whl", hash = "sha256:509c424069dd037d078925b6815fc56b7271f3aaec471e55e6fa513b0a80d2aa"}, - {file = "MarkupSafe-3.0.0-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:81be2c0084d8c69e97e3c5d73ce9e2a6e523556f2a19c4e195c09d499be2f808"}, - {file = "MarkupSafe-3.0.0-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b43ac1eb9f91e0c14aac1d2ef0f76bc7b9ceea51de47536f61268191adf52ad7"}, - {file = "MarkupSafe-3.0.0-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3b231255770723f1e125d63c14269bcd8b8136ecfb620b9a18c0297e046d0736"}, - {file = "MarkupSafe-3.0.0-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c182d45600556917f811aa019d834a89fe4b6f6255da2fd0bdcf80e970f95918"}, - {file = "MarkupSafe-3.0.0-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:9f91c90f8f3bf436f81c12eeb4d79f9ddd263c71125e6ad71341906832a34386"}, - {file = "MarkupSafe-3.0.0-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:a7171d2b869e9be238ea318c196baf58fbf272704e9c1cd4be8c380eea963342"}, - {file = "MarkupSafe-3.0.0-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:cb244adf2499aa37d5dc43431990c7f0b632d841af66a51d22bd89c437b60264"}, - {file = "MarkupSafe-3.0.0-cp313-cp313t-win32.whl", hash = "sha256:96e3ed550600185d34429477f1176cedea8293fa40e47fe37a05751bcb64c997"}, - {file = "MarkupSafe-3.0.0-cp313-cp313t-win_amd64.whl", hash = "sha256:1d151b9cf3307e259b749125a5a08c030ba15a8f1d567ca5bfb0e92f35e761f5"}, - {file = "MarkupSafe-3.0.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:23efb2be7221105c8eb0e905433414d2439cb0a8c5d5ca081c1c72acef0f5613"}, - {file = "MarkupSafe-3.0.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:81ee9c967956b9ea39b3a5270b7cb1740928d205b0dc72629164ce621b4debf9"}, - {file = "MarkupSafe-3.0.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5509a8373fed30b978557890a226c3d30569746c565b9daba69df80c160365a5"}, - {file = "MarkupSafe-3.0.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f1c13c6c908811f867a8e9e66efb2d6c03d1cdd83e92788fe97f693c457dc44f"}, - {file = "MarkupSafe-3.0.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d7e63d1977d3806ce0a1a3e0099b089f61abdede5238ca6a3f3bf8877b46d095"}, - {file = "MarkupSafe-3.0.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:d2c099be5274847d606574234e494f23a359e829ba337ea9037c3a72b0851942"}, - {file = "MarkupSafe-3.0.0-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:e042ccf8fe5bf8b6a4b38b3f7d618eb10ea20402b0c9f4add9293408de447974"}, - {file = "MarkupSafe-3.0.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:98fb3a2bf525ad66db96745707b93ba0f78928b7a1cb2f1cb4b143bc7e2ba3b3"}, - {file = "MarkupSafe-3.0.0-cp39-cp39-win32.whl", hash = "sha256:a80c6740e1bfbe50cea7cbf74f48823bb57bd59d914ee22ff8a81963b08e62d2"}, - {file = "MarkupSafe-3.0.0-cp39-cp39-win_amd64.whl", hash = "sha256:5d207ff5cceef77796f8aacd44263266248cf1fbc601441524d7835613f8abec"}, - {file = "markupsafe-3.0.0.tar.gz", hash = "sha256:03ff62dea2fef3eadf2f1853bc6332bcb0458d9608b11dfb1cd5aeda1c178ea6"}, + {file = "MarkupSafe-3.0.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:7e94c425039cde14257288fd61dcfb01963e658efbc0ff54f5306b06054700f8"}, + {file = "MarkupSafe-3.0.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9e2d922824181480953426608b81967de705c3cef4d1af983af849d7bd619158"}, + {file = "MarkupSafe-3.0.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:38a9ef736c01fccdd6600705b09dc574584b89bea478200c5fbf112a6b0d5579"}, + {file = "MarkupSafe-3.0.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bbcb445fa71794da8f178f0f6d66789a28d7319071af7a496d4d507ed566270d"}, + {file = "MarkupSafe-3.0.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:57cb5a3cf367aeb1d316576250f65edec5bb3be939e9247ae594b4bcbc317dfb"}, + {file = "MarkupSafe-3.0.2-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:3809ede931876f5b2ec92eef964286840ed3540dadf803dd570c3b7e13141a3b"}, + {file = "MarkupSafe-3.0.2-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:e07c3764494e3776c602c1e78e298937c3315ccc9043ead7e685b7f2b8d47b3c"}, + {file = "MarkupSafe-3.0.2-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:b424c77b206d63d500bcb69fa55ed8d0e6a3774056bdc4839fc9298a7edca171"}, + {file = "MarkupSafe-3.0.2-cp310-cp310-win32.whl", hash = "sha256:fcabf5ff6eea076f859677f5f0b6b5c1a51e70a376b0579e0eadef8db48c6b50"}, + {file = "MarkupSafe-3.0.2-cp310-cp310-win_amd64.whl", hash = "sha256:6af100e168aa82a50e186c82875a5893c5597a0c1ccdb0d8b40240b1f28b969a"}, + {file = "MarkupSafe-3.0.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:9025b4018f3a1314059769c7bf15441064b2207cb3f065e6ea1e7359cb46db9d"}, + {file = "MarkupSafe-3.0.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:93335ca3812df2f366e80509ae119189886b0f3c2b81325d39efdb84a1e2ae93"}, + {file = "MarkupSafe-3.0.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2cb8438c3cbb25e220c2ab33bb226559e7afb3baec11c4f218ffa7308603c832"}, + {file = "MarkupSafe-3.0.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a123e330ef0853c6e822384873bef7507557d8e4a082961e1defa947aa59ba84"}, + {file = "MarkupSafe-3.0.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1e084f686b92e5b83186b07e8a17fc09e38fff551f3602b249881fec658d3eca"}, + {file = "MarkupSafe-3.0.2-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:d8213e09c917a951de9d09ecee036d5c7d36cb6cb7dbaece4c71a60d79fb9798"}, + {file = "MarkupSafe-3.0.2-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:5b02fb34468b6aaa40dfc198d813a641e3a63b98c2b05a16b9f80b7ec314185e"}, + {file = "MarkupSafe-3.0.2-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:0bff5e0ae4ef2e1ae4fdf2dfd5b76c75e5c2fa4132d05fc1b0dabcd20c7e28c4"}, + {file = "MarkupSafe-3.0.2-cp311-cp311-win32.whl", hash = "sha256:6c89876f41da747c8d3677a2b540fb32ef5715f97b66eeb0c6b66f5e3ef6f59d"}, + {file = "MarkupSafe-3.0.2-cp311-cp311-win_amd64.whl", hash = "sha256:70a87b411535ccad5ef2f1df5136506a10775d267e197e4cf531ced10537bd6b"}, + {file = "MarkupSafe-3.0.2-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:9778bd8ab0a994ebf6f84c2b949e65736d5575320a17ae8984a77fab08db94cf"}, + {file = "MarkupSafe-3.0.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:846ade7b71e3536c4e56b386c2a47adf5741d2d8b94ec9dc3e92e5e1ee1e2225"}, + {file = "MarkupSafe-3.0.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1c99d261bd2d5f6b59325c92c73df481e05e57f19837bdca8413b9eac4bd8028"}, + {file = "MarkupSafe-3.0.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e17c96c14e19278594aa4841ec148115f9c7615a47382ecb6b82bd8fea3ab0c8"}, + {file = "MarkupSafe-3.0.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:88416bd1e65dcea10bc7569faacb2c20ce071dd1f87539ca2ab364bf6231393c"}, + {file = "MarkupSafe-3.0.2-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:2181e67807fc2fa785d0592dc2d6206c019b9502410671cc905d132a92866557"}, + {file = "MarkupSafe-3.0.2-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:52305740fe773d09cffb16f8ed0427942901f00adedac82ec8b67752f58a1b22"}, + {file = "MarkupSafe-3.0.2-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:ad10d3ded218f1039f11a75f8091880239651b52e9bb592ca27de44eed242a48"}, + {file = "MarkupSafe-3.0.2-cp312-cp312-win32.whl", hash = "sha256:0f4ca02bea9a23221c0182836703cbf8930c5e9454bacce27e767509fa286a30"}, + {file = "MarkupSafe-3.0.2-cp312-cp312-win_amd64.whl", hash = "sha256:8e06879fc22a25ca47312fbe7c8264eb0b662f6db27cb2d3bbbc74b1df4b9b87"}, + {file = "MarkupSafe-3.0.2-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:ba9527cdd4c926ed0760bc301f6728ef34d841f405abf9d4f959c478421e4efd"}, + {file = "MarkupSafe-3.0.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:f8b3d067f2e40fe93e1ccdd6b2e1d16c43140e76f02fb1319a05cf2b79d99430"}, + {file = "MarkupSafe-3.0.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:569511d3b58c8791ab4c2e1285575265991e6d8f8700c7be0e88f86cb0672094"}, + {file = "MarkupSafe-3.0.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:15ab75ef81add55874e7ab7055e9c397312385bd9ced94920f2802310c930396"}, + {file = "MarkupSafe-3.0.2-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f3818cb119498c0678015754eba762e0d61e5b52d34c8b13d770f0719f7b1d79"}, + {file = "MarkupSafe-3.0.2-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:cdb82a876c47801bb54a690c5ae105a46b392ac6099881cdfb9f6e95e4014c6a"}, + {file = "MarkupSafe-3.0.2-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:cabc348d87e913db6ab4aa100f01b08f481097838bdddf7c7a84b7575b7309ca"}, + {file = "MarkupSafe-3.0.2-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:444dcda765c8a838eaae23112db52f1efaf750daddb2d9ca300bcae1039adc5c"}, + {file = "MarkupSafe-3.0.2-cp313-cp313-win32.whl", hash = "sha256:bcf3e58998965654fdaff38e58584d8937aa3096ab5354d493c77d1fdd66d7a1"}, + {file = "MarkupSafe-3.0.2-cp313-cp313-win_amd64.whl", hash = "sha256:e6a2a455bd412959b57a172ce6328d2dd1f01cb2135efda2e4576e8a23fa3b0f"}, + {file = "MarkupSafe-3.0.2-cp313-cp313t-macosx_10_13_universal2.whl", hash = "sha256:b5a6b3ada725cea8a5e634536b1b01c30bcdcd7f9c6fff4151548d5bf6b3a36c"}, + {file = "MarkupSafe-3.0.2-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:a904af0a6162c73e3edcb969eeeb53a63ceeb5d8cf642fade7d39e7963a22ddb"}, + {file = "MarkupSafe-3.0.2-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4aa4e5faecf353ed117801a068ebab7b7e09ffb6e1d5e412dc852e0da018126c"}, + {file = "MarkupSafe-3.0.2-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c0ef13eaeee5b615fb07c9a7dadb38eac06a0608b41570d8ade51c56539e509d"}, + {file = "MarkupSafe-3.0.2-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d16a81a06776313e817c951135cf7340a3e91e8c1ff2fac444cfd75fffa04afe"}, + {file = "MarkupSafe-3.0.2-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:6381026f158fdb7c72a168278597a5e3a5222e83ea18f543112b2662a9b699c5"}, + {file = "MarkupSafe-3.0.2-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:3d79d162e7be8f996986c064d1c7c817f6df3a77fe3d6859f6f9e7be4b8c213a"}, + {file = "MarkupSafe-3.0.2-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:131a3c7689c85f5ad20f9f6fb1b866f402c445b220c19fe4308c0b147ccd2ad9"}, + {file = "MarkupSafe-3.0.2-cp313-cp313t-win32.whl", hash = "sha256:ba8062ed2cf21c07a9e295d5b8a2a5ce678b913b45fdf68c32d95d6c1291e0b6"}, + {file = "MarkupSafe-3.0.2-cp313-cp313t-win_amd64.whl", hash = "sha256:e444a31f8db13eb18ada366ab3cf45fd4b31e4db1236a4448f68778c1d1a5a2f"}, + {file = "MarkupSafe-3.0.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:eaa0a10b7f72326f1372a713e73c3f739b524b3af41feb43e4921cb529f5929a"}, + {file = "MarkupSafe-3.0.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:48032821bbdf20f5799ff537c7ac3d1fba0ba032cfc06194faffa8cda8b560ff"}, + {file = "MarkupSafe-3.0.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1a9d3f5f0901fdec14d8d2f66ef7d035f2157240a433441719ac9a3fba440b13"}, + {file = "MarkupSafe-3.0.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:88b49a3b9ff31e19998750c38e030fc7bb937398b1f78cfa599aaef92d693144"}, + {file = "MarkupSafe-3.0.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:cfad01eed2c2e0c01fd0ecd2ef42c492f7f93902e39a42fc9ee1692961443a29"}, + {file = "MarkupSafe-3.0.2-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:1225beacc926f536dc82e45f8a4d68502949dc67eea90eab715dea3a21c1b5f0"}, + {file = "MarkupSafe-3.0.2-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:3169b1eefae027567d1ce6ee7cae382c57fe26e82775f460f0b2778beaad66c0"}, + {file = "MarkupSafe-3.0.2-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:eb7972a85c54febfb25b5c4b4f3af4dcc731994c7da0d8a0b4a6eb0640e1d178"}, + {file = "MarkupSafe-3.0.2-cp39-cp39-win32.whl", hash = "sha256:8c4e8c3ce11e1f92f6536ff07154f9d49677ebaaafc32db9db4620bc11ed480f"}, + {file = "MarkupSafe-3.0.2-cp39-cp39-win_amd64.whl", hash = "sha256:6e296a513ca3d94054c2c881cc913116e90fd030ad1c656b3869762b754f5f8a"}, + {file = "markupsafe-3.0.2.tar.gz", hash = "sha256:ee55d3edf80167e48ea11a923c7386f4669df67d7994554387f84e7d8b0a2bf0"}, ] [[package]] @@ -675,6 +746,21 @@ files = [ {file = "mypy_extensions-1.0.0.tar.gz", hash = "sha256:75dbf8955dc00442a438fc4d0666508a9a97b6bd41aa2f0ffe9d2f2725af0782"}, ] +[[package]] +name = "mypy-protobuf" +version = "3.6.0" +description = "Generate mypy stub files from protobuf specs" +optional = false +python-versions = ">=3.8" +files = [ + {file = "mypy-protobuf-3.6.0.tar.gz", hash = "sha256:02f242eb3409f66889f2b1a3aa58356ec4d909cdd0f93115622e9e70366eca3c"}, + {file = "mypy_protobuf-3.6.0-py3-none-any.whl", hash = "sha256:56176e4d569070e7350ea620262478b49b7efceba4103d468448f1d21492fd6c"}, +] + +[package.dependencies] +protobuf = ">=4.25.3" +types-protobuf = ">=4.24" + [[package]] name = "oauthlib" version = "3.2.2" @@ -693,13 +779,13 @@ signedtoken = ["cryptography (>=3.0.0)", "pyjwt (>=2.0.0,<3)"] [[package]] name = "packaging" -version = "24.1" +version = "24.2" description = "Core utilities for Python packages" optional = false python-versions = ">=3.8" files = [ - {file = "packaging-24.1-py3-none-any.whl", hash = "sha256:5b8f2217dbdbd2f7f384c41c628544e6d52f2d0f53c6d0c3ea61aa5d1d7ff124"}, - {file = "packaging-24.1.tar.gz", hash = "sha256:026ed72c8ed3fcce5bf8950572258698927fd1dbda10a5e981cdf0ac37f4f002"}, + {file = "packaging-24.2-py3-none-any.whl", hash = "sha256:09abb1bccd265c01f4a3aa3f7a7db064b36514d2cba19a2f694fe6150451a759"}, + {file = "packaging-24.2.tar.gz", hash = "sha256:c228a6dc5e932d346bc5739379109d49e8853dd8223571c7c5b55260edc0b97f"}, ] [[package]] @@ -746,22 +832,22 @@ testing = ["pytest", "pytest-benchmark"] [[package]] name = "protobuf" -version = "4.25.5" +version = "5.29.2" description = "" optional = false python-versions = ">=3.8" files = [ - {file = "protobuf-4.25.5-cp310-abi3-win32.whl", hash = "sha256:5e61fd921603f58d2f5acb2806a929b4675f8874ff5f330b7d6f7e2e784bbcd8"}, - {file = "protobuf-4.25.5-cp310-abi3-win_amd64.whl", hash = "sha256:4be0571adcbe712b282a330c6e89eae24281344429ae95c6d85e79e84780f5ea"}, - {file = "protobuf-4.25.5-cp37-abi3-macosx_10_9_universal2.whl", hash = "sha256:b2fde3d805354df675ea4c7c6338c1aecd254dfc9925e88c6d31a2bcb97eb173"}, - {file = "protobuf-4.25.5-cp37-abi3-manylinux2014_aarch64.whl", hash = "sha256:919ad92d9b0310070f8356c24b855c98df2b8bd207ebc1c0c6fcc9ab1e007f3d"}, - {file = "protobuf-4.25.5-cp37-abi3-manylinux2014_x86_64.whl", hash = "sha256:fe14e16c22be926d3abfcb500e60cab068baf10b542b8c858fa27e098123e331"}, - {file = "protobuf-4.25.5-cp38-cp38-win32.whl", hash = "sha256:98d8d8aa50de6a2747efd9cceba361c9034050ecce3e09136f90de37ddba66e1"}, - {file = "protobuf-4.25.5-cp38-cp38-win_amd64.whl", hash = "sha256:b0234dd5a03049e4ddd94b93400b67803c823cfc405689688f59b34e0742381a"}, - {file = "protobuf-4.25.5-cp39-cp39-win32.whl", hash = "sha256:abe32aad8561aa7cc94fc7ba4fdef646e576983edb94a73381b03c53728a626f"}, - {file = "protobuf-4.25.5-cp39-cp39-win_amd64.whl", hash = "sha256:7a183f592dc80aa7c8da7ad9e55091c4ffc9497b3054452d629bb85fa27c2a45"}, - {file = "protobuf-4.25.5-py3-none-any.whl", hash = "sha256:0aebecb809cae990f8129ada5ca273d9d670b76d9bfc9b1809f0a9c02b7dbf41"}, - {file = "protobuf-4.25.5.tar.gz", hash = "sha256:7f8249476b4a9473645db7f8ab42b02fe1488cbe5fb72fddd445e0665afd8584"}, + {file = "protobuf-5.29.2-cp310-abi3-win32.whl", hash = "sha256:c12ba8249f5624300cf51c3d0bfe5be71a60c63e4dcf51ffe9a68771d958c851"}, + {file = "protobuf-5.29.2-cp310-abi3-win_amd64.whl", hash = "sha256:842de6d9241134a973aab719ab42b008a18a90f9f07f06ba480df268f86432f9"}, + {file = "protobuf-5.29.2-cp38-abi3-macosx_10_9_universal2.whl", hash = "sha256:a0c53d78383c851bfa97eb42e3703aefdc96d2036a41482ffd55dc5f529466eb"}, + {file = "protobuf-5.29.2-cp38-abi3-manylinux2014_aarch64.whl", hash = "sha256:494229ecd8c9009dd71eda5fd57528395d1eacdf307dbece6c12ad0dd09e912e"}, + {file = "protobuf-5.29.2-cp38-abi3-manylinux2014_x86_64.whl", hash = "sha256:b6b0d416bbbb9d4fbf9d0561dbfc4e324fd522f61f7af0fe0f282ab67b22477e"}, + {file = "protobuf-5.29.2-cp38-cp38-win32.whl", hash = "sha256:e621a98c0201a7c8afe89d9646859859be97cb22b8bf1d8eacfd90d5bda2eb19"}, + {file = "protobuf-5.29.2-cp38-cp38-win_amd64.whl", hash = "sha256:13d6d617a2a9e0e82a88113d7191a1baa1e42c2cc6f5f1398d3b054c8e7e714a"}, + {file = "protobuf-5.29.2-cp39-cp39-win32.whl", hash = "sha256:36000f97ea1e76e8398a3f02936aac2a5d2b111aae9920ec1b769fc4a222c4d9"}, + {file = "protobuf-5.29.2-cp39-cp39-win_amd64.whl", hash = "sha256:2d2e674c58a06311c8e99e74be43e7f3a8d1e2b2fdf845eaa347fbd866f23355"}, + {file = "protobuf-5.29.2-py3-none-any.whl", hash = "sha256:fde4554c0e578a5a0bcc9a276339594848d1e89f9ea47b4427c80e5d72f90181"}, + {file = "protobuf-5.29.2.tar.gz", hash = "sha256:b2cc8e8bb7c9326996f0e160137b0861f1a82162502658df2951209d0cb0309e"}, ] [[package]] @@ -999,6 +1085,26 @@ urllib3 = ">=1.25.10,<3.0" [package.extras] tests = ["coverage (>=6.0.0)", "flake8", "mypy", "pytest (>=7.0.0)", "pytest-asyncio", "pytest-cov", "pytest-httpserver", "tomli", "tomli-w", "types-PyYAML", "types-requests"] +[[package]] +name = "setuptools" +version = "75.6.0" +description = "Easily download, build, install, upgrade, and uninstall Python packages" +optional = false +python-versions = ">=3.9" +files = [ + {file = "setuptools-75.6.0-py3-none-any.whl", hash = "sha256:ce74b49e8f7110f9bf04883b730f4765b774ef3ef28f722cce7c273d253aaf7d"}, + {file = "setuptools-75.6.0.tar.gz", hash = "sha256:8199222558df7c86216af4f84c30e9b34a61d8ba19366cc914424cdbd28252f6"}, +] + +[package.extras] +check = ["pytest-checkdocs (>=2.4)", "pytest-ruff (>=0.2.1)", "ruff (>=0.7.0)"] +core = ["importlib_metadata (>=6)", "jaraco.collections", "jaraco.functools (>=4)", "jaraco.text (>=3.7)", "more_itertools", "more_itertools (>=8.8)", "packaging", "packaging (>=24.2)", "platformdirs (>=4.2.2)", "tomli (>=2.0.1)", "wheel (>=0.43.0)"] +cover = ["pytest-cov"] +doc = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "pyproject-hooks (!=1.1)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-inline-tabs", "sphinx-lint", "sphinx-notfound-page (>=1,<2)", "sphinx-reredirects", "sphinxcontrib-towncrier", "towncrier (<24.7)"] +enabler = ["pytest-enabler (>=2.2)"] +test = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "ini2toml[lite] (>=0.14)", "jaraco.develop (>=7.21)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "jaraco.test (>=5.5)", "packaging (>=24.2)", "pip (>=19.1)", "pyproject-hooks (!=1.1)", "pytest (>=6,!=8.1.*)", "pytest-home (>=0.5)", "pytest-perf", "pytest-subprocess", "pytest-timeout", "pytest-xdist (>=3)", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel (>=0.44.0)"] +type = ["importlib_metadata (>=7.0.2)", "jaraco.develop (>=7.21)", "mypy (>=1.12,<1.14)", "pytest-mypy"] + [[package]] name = "sniffio" version = "1.3.1" @@ -1186,13 +1292,43 @@ test = ["pytest"] [[package]] name = "tomli" -version = "2.0.2" +version = "2.2.1" description = "A lil' TOML parser" optional = false python-versions = ">=3.8" files = [ - {file = "tomli-2.0.2-py3-none-any.whl", hash = "sha256:2ebe24485c53d303f690b0ec092806a085f07af5a5aa1464f3931eec36caaa38"}, - {file = "tomli-2.0.2.tar.gz", hash = "sha256:d46d457a85337051c36524bc5349dd91b1877838e2979ac5ced3e710ed8a60ed"}, + {file = "tomli-2.2.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:678e4fa69e4575eb77d103de3df8a895e1591b48e740211bd1067378c69e8249"}, + {file = "tomli-2.2.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:023aa114dd824ade0100497eb2318602af309e5a55595f76b626d6d9f3b7b0a6"}, + {file = "tomli-2.2.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ece47d672db52ac607a3d9599a9d48dcb2f2f735c6c2d1f34130085bb12b112a"}, + {file = "tomli-2.2.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6972ca9c9cc9f0acaa56a8ca1ff51e7af152a9f87fb64623e31d5c83700080ee"}, + {file = "tomli-2.2.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c954d2250168d28797dd4e3ac5cf812a406cd5a92674ee4c8f123c889786aa8e"}, + {file = "tomli-2.2.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:8dd28b3e155b80f4d54beb40a441d366adcfe740969820caf156c019fb5c7ec4"}, + {file = "tomli-2.2.1-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:e59e304978767a54663af13c07b3d1af22ddee3bb2fb0618ca1593e4f593a106"}, + {file = "tomli-2.2.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:33580bccab0338d00994d7f16f4c4ec25b776af3ffaac1ed74e0b3fc95e885a8"}, + {file = "tomli-2.2.1-cp311-cp311-win32.whl", hash = "sha256:465af0e0875402f1d226519c9904f37254b3045fc5084697cefb9bdde1ff99ff"}, + {file = "tomli-2.2.1-cp311-cp311-win_amd64.whl", hash = "sha256:2d0f2fdd22b02c6d81637a3c95f8cd77f995846af7414c5c4b8d0545afa1bc4b"}, + {file = "tomli-2.2.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:4a8f6e44de52d5e6c657c9fe83b562f5f4256d8ebbfe4ff922c495620a7f6cea"}, + {file = "tomli-2.2.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:8d57ca8095a641b8237d5b079147646153d22552f1c637fd3ba7f4b0b29167a8"}, + {file = "tomli-2.2.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4e340144ad7ae1533cb897d406382b4b6fede8890a03738ff1683af800d54192"}, + {file = "tomli-2.2.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:db2b95f9de79181805df90bedc5a5ab4c165e6ec3fe99f970d0e302f384ad222"}, + {file = "tomli-2.2.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:40741994320b232529c802f8bc86da4e1aa9f413db394617b9a256ae0f9a7f77"}, + {file = "tomli-2.2.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:400e720fe168c0f8521520190686ef8ef033fb19fc493da09779e592861b78c6"}, + {file = "tomli-2.2.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:02abe224de6ae62c19f090f68da4e27b10af2b93213d36cf44e6e1c5abd19fdd"}, + {file = "tomli-2.2.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:b82ebccc8c8a36f2094e969560a1b836758481f3dc360ce9a3277c65f374285e"}, + {file = "tomli-2.2.1-cp312-cp312-win32.whl", hash = "sha256:889f80ef92701b9dbb224e49ec87c645ce5df3fa2cc548664eb8a25e03127a98"}, + {file = "tomli-2.2.1-cp312-cp312-win_amd64.whl", hash = "sha256:7fc04e92e1d624a4a63c76474610238576942d6b8950a2d7f908a340494e67e4"}, + {file = "tomli-2.2.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:f4039b9cbc3048b2416cc57ab3bda989a6fcf9b36cf8937f01a6e731b64f80d7"}, + {file = "tomli-2.2.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:286f0ca2ffeeb5b9bd4fcc8d6c330534323ec51b2f52da063b11c502da16f30c"}, + {file = "tomli-2.2.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a92ef1a44547e894e2a17d24e7557a5e85a9e1d0048b0b5e7541f76c5032cb13"}, + {file = "tomli-2.2.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9316dc65bed1684c9a98ee68759ceaed29d229e985297003e494aa825ebb0281"}, + {file = "tomli-2.2.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e85e99945e688e32d5a35c1ff38ed0b3f41f43fad8df0bdf79f72b2ba7bc5272"}, + {file = "tomli-2.2.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:ac065718db92ca818f8d6141b5f66369833d4a80a9d74435a268c52bdfa73140"}, + {file = "tomli-2.2.1-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:d920f33822747519673ee656a4b6ac33e382eca9d331c87770faa3eef562aeb2"}, + {file = "tomli-2.2.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:a198f10c4d1b1375d7687bc25294306e551bf1abfa4eace6650070a5c1ae2744"}, + {file = "tomli-2.2.1-cp313-cp313-win32.whl", hash = "sha256:d3f5614314d758649ab2ab3a62d4f2004c825922f9e370b29416484086b264ec"}, + {file = "tomli-2.2.1-cp313-cp313-win_amd64.whl", hash = "sha256:a38aa0308e754b0e3c67e344754dff64999ff9b513e691d0e786265c93583c69"}, + {file = "tomli-2.2.1-py3-none-any.whl", hash = "sha256:cb55c73c5f4408779d0cf3eef9f762b9c9f147a77de7b258bef0a5628adc85cc"}, + {file = "tomli-2.2.1.tar.gz", hash = "sha256:cd45e1dc79c835ce60f7404ec8119f2eb06d38b1deba146f07ced3bbc44505ff"}, ] [[package]] @@ -1217,15 +1353,26 @@ files = [ {file = "types_oauthlib-3.2.0.20240806-py3-none-any.whl", hash = "sha256:581bb8e194700d16ae1f0b62a6039261ed1afd0b88e78782e1c48f6507c52f34"}, ] +[[package]] +name = "types-protobuf" +version = "5.29.1.20241207" +description = "Typing stubs for protobuf" +optional = false +python-versions = ">=3.8" +files = [ + {file = "types_protobuf-5.29.1.20241207-py3-none-any.whl", hash = "sha256:92893c42083e9b718c678badc0af7a9a1307b92afe1599e5cba5f3d35b668b2f"}, + {file = "types_protobuf-5.29.1.20241207.tar.gz", hash = "sha256:2ebcadb8ab3ef2e3e2f067e0882906d64ba0dc65fc5b0fd7a8b692315b4a0be9"}, +] + [[package]] name = "types-requests" -version = "2.32.0.20240914" +version = "2.32.0.20241016" description = "Typing stubs for requests" optional = false python-versions = ">=3.8" files = [ - {file = "types-requests-2.32.0.20240914.tar.gz", hash = "sha256:2850e178db3919d9bf809e434eef65ba49d0e7e33ac92d588f4a5e295fffd405"}, - {file = "types_requests-2.32.0.20240914-py3-none-any.whl", hash = "sha256:59c2f673eb55f32a99b2894faf6020e1a9f4a402ad0f192bfee0b64469054310"}, + {file = "types-requests-2.32.0.20241016.tar.gz", hash = "sha256:0d9cad2f27515d0e3e3da7134a1b6f28fb97129d86b867f24d9c726452634d95"}, + {file = "types_requests-2.32.0.20241016-py3-none-any.whl", hash = "sha256:4195d62d6d3e043a4eaaf08ff8a62184584d2e8684e9d2aa178c7915a7da3747"}, ] [package.dependencies] @@ -1259,13 +1406,13 @@ files = [ [[package]] name = "urllib3" -version = "2.2.3" +version = "2.3.0" description = "HTTP library with thread-safe connection pooling, file post, and more." optional = false -python-versions = ">=3.8" +python-versions = ">=3.9" files = [ - {file = "urllib3-2.2.3-py3-none-any.whl", hash = "sha256:ca899ca043dcb1bafa3e262d73aa25c465bfb49e0bd9dd5d59f1d0acba2f8fac"}, - {file = "urllib3-2.2.3.tar.gz", hash = "sha256:e7d814a81dad81e6caf2ec9fdedb284ecc9c73076b62654547cc64ccdcae26e9"}, + {file = "urllib3-2.3.0-py3-none-any.whl", hash = "sha256:1cee9ad369867bfdbbb48b7dd50374c0967a0bb7710050facf0dd6911440e3df"}, + {file = "urllib3-2.3.0.tar.gz", hash = "sha256:f8c5449b3cf0861679ce7e0503c7b44b5ec981bec0d1d3795a07f1ba96f0204d"}, ] [package.extras] @@ -1274,30 +1421,15 @@ h2 = ["h2 (>=4,<5)"] socks = ["pysocks (>=1.5.6,!=1.5.7,<2.0)"] zstd = ["zstandard (>=0.18.0)"] -[[package]] -name = "zeebe-grpc" -version = "8.4.13" -description = "zeebe Python gRPC Gateway" -optional = false -python-versions = "*" -files = [ - {file = "zeebe_grpc-8.4.13-py3-none-any.whl", hash = "sha256:eeff91bb9b1cbb4844590ea0381ee5f9bfc87d2848d95d8fdfd3de693706e6ca"}, - {file = "zeebe_grpc-8.4.13.tar.gz", hash = "sha256:6036494373e06c1fe31d90a335d13ed60b7d0c8c2af2b46b31440ab5259c29cc"}, -] - -[package.dependencies] -grpcio = ">=1.49,<2.0" -protobuf = ">=4.21,<5.0" - [[package]] name = "zipp" -version = "3.20.2" +version = "3.21.0" description = "Backport of pathlib-compatible object wrapper for zip files" optional = false -python-versions = ">=3.8" +python-versions = ">=3.9" files = [ - {file = "zipp-3.20.2-py3-none-any.whl", hash = "sha256:a817ac80d6cf4b23bf7f2828b7cabf326f15a001bea8b1f9b49631780ba28350"}, - {file = "zipp-3.20.2.tar.gz", hash = "sha256:bc9eb26f4506fda01b81bcde0ca78103b6e62f991b381fec825435c836edbc29"}, + {file = "zipp-3.21.0-py3-none-any.whl", hash = "sha256:ac1bbe05fd2991f160ebce24ffbac5f6d11d83dc90891255885223d42b3cd931"}, + {file = "zipp-3.21.0.tar.gz", hash = "sha256:2c9958f6430a2040341a52eb608ed6dd93ef4392e02ffe219417c1b28b5dd1f4"}, ] [package.extras] @@ -1311,4 +1443,4 @@ type = ["pytest-mypy"] [metadata] lock-version = "2.0" python-versions = "^3.9" -content-hash = "e363708bb9784779feaaa6e3d3929b571119b18eed4938fd12aadec02366f96b" +content-hash = "47a7b6e10e9e7358786ba0ec84f862d5a9276e7879a8f921d5d45ca7af6f3e47" diff --git a/pyproject.toml b/pyproject.toml index 0231e720..ddadcd1d 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -3,9 +3,7 @@ name = "pyzeebe" version = "4.2.0" description = "Zeebe client api" authors = ["Jonatan Martens "] -maintainers = [ - "Dmitriy ", -] +maintainers = ["Dmitriy "] license = "MIT" readme = "README.md" homepage = "https://github.com/camunda-community-hub/pyzeebe" @@ -15,19 +13,18 @@ keywords = ["zeebe", "workflow", "workflow-engine"] classifiers = [ "Programming Language :: Python :: 3", "License :: OSI Approved :: MIT License", - "Operating System :: OS Independent" -] -packages = [ - { include = "pyzeebe" }, + "Operating System :: OS Independent", ] +packages = [{ include = "pyzeebe" }] [tool.poetry.dependencies] python = "^3.9" +anyio = "^4.6.0" +grpcio = "^1.66" +protobuf = "^5.28" oauthlib = "^3.1.0" requests-oauthlib = ">=1.3.0,<3.0.0" -zeebe-grpc = "^8.4.0" typing-extensions = "^4.11.0" -anyio = "^4.6.0" [tool.poetry.group.dev.dependencies] pytest = ">=7.4,<9.0" @@ -41,10 +38,13 @@ coveralls = "^3.3.1" responses = ">=0.23.2,<0.26.0" sphinx-rtd-theme = ">=3.0.0,<3.1.0" sphinx = ">=6,<8" +grpcio-tools = "^1.66" +mypy-protobuf = "^3.6" [tool.poetry.group.stubs.dependencies] types-oauthlib = "^3.1.0" types-requests-oauthlib = ">=1.3.0,<3.0.0" +types-protobuf = "^5.29.1.20241207" [tool.mypy] python_version = "3.9" @@ -52,46 +52,51 @@ packages = ["pyzeebe"] strict = true [[tool.mypy.overrides]] -module = [ - "grpc", - "zeebe_grpc.gateway_pb2", - "zeebe_grpc.gateway_pb2_grpc", -] +module = "grpc" ignore_missing_imports = true +[[tool.mypy.overrides]] +module = "pyzeebe.proto.*" +disable_error_code = ["import-untyped", "unused-ignore"] # "type-arg" + [tool.pylint.master] max-line-length = 120 disable = ["C0114", "C0115", "C0116"] [tool.black] line-length = 120 +extend-exclude = ''' +( + .*_pb2.py # exclude autogenerated Protocol Buffer files anywhere in the project + | .*_pb2_grpc.py +) +''' [tool.isort] profile = "black" +extend_skip_glob = ["*_pb2.py", "*_pb2_grpc.py", "*.pyi"] [tool.pytest.ini_options] asyncio_mode = "auto" -markers = [ - "e2e: end to end tests", -] +markers = ["e2e: end to end tests"] [tool.ruff] target-version = "py39" [tool.ruff.lint] select = [ - "E", # pycodestyle errors - "W", # pycodestyle warnings - "F", # pyflakes - "C", # flake8-comprehensions - "B", # flake8-bugbear - "TID", # flake8-tidy-imports - "T20", # flake8-print + "E", # pycodestyle errors + "W", # pycodestyle warnings + "F", # pyflakes + "C", # flake8-comprehensions + "B", # flake8-bugbear + "TID", # flake8-tidy-imports + "T20", # flake8-print "ASYNC", # flake8-async - "FA", # flake8-future-annotations + "FA", # flake8-future-annotations ] ignore = [ - "E501", # line too long, handled by black + "E501", # line too long, handled by black ] [build-system] diff --git a/pyzeebe/grpc_internals/zeebe_adapter_base.py b/pyzeebe/grpc_internals/zeebe_adapter_base.py index cd9f5adb..8507583a 100644 --- a/pyzeebe/grpc_internals/zeebe_adapter_base.py +++ b/pyzeebe/grpc_internals/zeebe_adapter_base.py @@ -2,7 +2,6 @@ from typing import NoReturn import grpc -from zeebe_grpc.gateway_pb2_grpc import GatewayStub from pyzeebe.errors import ( UnknownGrpcStatusCodeError, @@ -13,6 +12,7 @@ ) from pyzeebe.errors.pyzeebe_errors import PyZeebeError from pyzeebe.grpc_internals.grpc_utils import is_error_status +from pyzeebe.proto.gateway_pb2_grpc import GatewayStub logger = logging.getLogger(__name__) diff --git a/pyzeebe/grpc_internals/zeebe_job_adapter.py b/pyzeebe/grpc_internals/zeebe_job_adapter.py index 4b3cd0b8..49ccc6ad 100644 --- a/pyzeebe/grpc_internals/zeebe_job_adapter.py +++ b/pyzeebe/grpc_internals/zeebe_job_adapter.py @@ -6,13 +6,6 @@ from collections.abc import AsyncGenerator, Iterable import grpc -from zeebe_grpc.gateway_pb2 import ( - ActivatedJob, - ActivateJobsRequest, - CompleteJobRequest, - FailJobRequest, - ThrowErrorRequest, -) from pyzeebe.errors import ( ActivateJobsRequestInvalidError, @@ -22,6 +15,13 @@ from pyzeebe.grpc_internals.grpc_utils import is_error_status from pyzeebe.grpc_internals.zeebe_adapter_base import ZeebeAdapterBase from pyzeebe.job.job import Job +from pyzeebe.proto.gateway_pb2 import ( + ActivatedJob, + ActivateJobsRequest, + CompleteJobRequest, + FailJobRequest, + ThrowErrorRequest, +) from pyzeebe.types import Variables from .types import CompleteJobResponse, FailJobResponse, ThrowErrorResponse diff --git a/pyzeebe/grpc_internals/zeebe_message_adapter.py b/pyzeebe/grpc_internals/zeebe_message_adapter.py index ba4389a9..75de861b 100644 --- a/pyzeebe/grpc_internals/zeebe_message_adapter.py +++ b/pyzeebe/grpc_internals/zeebe_message_adapter.py @@ -3,11 +3,11 @@ import json import grpc -from zeebe_grpc.gateway_pb2 import PublishMessageRequest from pyzeebe.errors import MessageAlreadyExistsError from pyzeebe.grpc_internals.grpc_utils import is_error_status from pyzeebe.grpc_internals.zeebe_adapter_base import ZeebeAdapterBase +from pyzeebe.proto.gateway_pb2 import PublishMessageRequest from pyzeebe.types import Variables from .types import PublishMessageResponse @@ -28,10 +28,10 @@ async def publish_message( PublishMessageRequest( name=name, correlationKey=correlation_key, - messageId=message_id, + messageId=message_id, # type: ignore[arg-type] timeToLive=time_to_live_in_milliseconds, variables=json.dumps(variables), - tenantId=tenant_id, + tenantId=tenant_id, # type: ignore[arg-type] ) ) except grpc.aio.AioRpcError as grpc_error: diff --git a/pyzeebe/grpc_internals/zeebe_process_adapter.py b/pyzeebe/grpc_internals/zeebe_process_adapter.py index 39cb414c..55a8e1df 100644 --- a/pyzeebe/grpc_internals/zeebe_process_adapter.py +++ b/pyzeebe/grpc_internals/zeebe_process_adapter.py @@ -7,17 +7,6 @@ import anyio import grpc -from zeebe_grpc.gateway_pb2 import ( - CancelProcessInstanceRequest, - CreateProcessInstanceRequest, - CreateProcessInstanceWithResultRequest, - DecisionMetadata, - DecisionRequirementsMetadata, - DeployResourceRequest, - FormMetadata, - ProcessMetadata, - Resource, -) from pyzeebe.errors import ( InvalidJSONError, @@ -29,6 +18,17 @@ ) from pyzeebe.grpc_internals.grpc_utils import is_error_status from pyzeebe.grpc_internals.zeebe_adapter_base import ZeebeAdapterBase +from pyzeebe.proto.gateway_pb2 import ( + CancelProcessInstanceRequest, + CreateProcessInstanceRequest, + CreateProcessInstanceWithResultRequest, + DecisionMetadata, + DecisionRequirementsMetadata, + DeployResourceRequest, + FormMetadata, + ProcessMetadata, + Resource, +) from pyzeebe.types import Variables from .types import ( @@ -53,7 +53,7 @@ async def create_process_instance( bpmnProcessId=bpmn_process_id, version=version, variables=json.dumps(variables), - tenantId=tenant_id, + tenantId=tenant_id, # type: ignore[arg-type] ) ) except grpc.aio.AioRpcError as grpc_error: @@ -83,7 +83,7 @@ async def create_process_instance_with_result( bpmnProcessId=bpmn_process_id, version=version, variables=json.dumps(variables), - tenantId=tenant_id, + tenantId=tenant_id, # type: ignore[arg-type] ), requestTimeout=timeout, fetchVariables=variables_to_fetch, @@ -133,7 +133,7 @@ async def deploy_resource(self, *resource_file_path: str, tenant_id: str | None response = await self._gateway_stub.DeployResource( DeployResourceRequest( resources=[await result for result in map(_create_resource_request, resource_file_path)], - tenantId=tenant_id, + tenantId=tenant_id, # type: ignore[arg-type] ) ) except grpc.aio.AioRpcError as grpc_error: @@ -206,13 +206,10 @@ def _create_form_from_raw_form(response: FormMetadata) -> DeployResourceResponse _METADATA_PARSERS: dict[ str, - Callable[ - [ProcessMetadata | DecisionMetadata | DecisionRequirementsMetadata | FormMetadata], - DeployResourceResponse.ProcessMetadata - | DeployResourceResponse.DecisionMetadata - | DeployResourceResponse.DecisionRequirementsMetadata - | DeployResourceResponse.FormMetadata, - ], + Callable[[ProcessMetadata], DeployResourceResponse.ProcessMetadata] + | Callable[[DecisionMetadata], DeployResourceResponse.DecisionMetadata] + | Callable[[DecisionRequirementsMetadata], DeployResourceResponse.DecisionRequirementsMetadata] + | Callable[[FormMetadata], DeployResourceResponse.FormMetadata], ] = { "process": ZeebeProcessAdapter._create_process_from_raw_process, "decision": ZeebeProcessAdapter._create_decision_from_raw_decision, diff --git a/pyzeebe/proto/__init__.py b/pyzeebe/proto/__init__.py new file mode 100644 index 00000000..0c97c96c --- /dev/null +++ b/pyzeebe/proto/__init__.py @@ -0,0 +1,2 @@ +from . import gateway_pb2 as gateway_pb2 +from . import gateway_pb2_grpc as gateway_pb2_grpc diff --git a/pyzeebe/proto/gateway_pb2.py b/pyzeebe/proto/gateway_pb2.py new file mode 100644 index 00000000..e6f139fb --- /dev/null +++ b/pyzeebe/proto/gateway_pb2.py @@ -0,0 +1,171 @@ +# -*- coding: utf-8 -*- +# Generated by the protocol buffer compiler. DO NOT EDIT! +# NO CHECKED-IN PROTOBUF GENCODE +# source: pyzeebe/proto/gateway.proto +# Protobuf Python Version: 5.28.1 +"""Generated protocol buffer code.""" +from google.protobuf import descriptor as _descriptor +from google.protobuf import descriptor_pool as _descriptor_pool +from google.protobuf import runtime_version as _runtime_version +from google.protobuf import symbol_database as _symbol_database +from google.protobuf.internal import builder as _builder +_runtime_version.ValidateProtobufRuntimeVersion( + _runtime_version.Domain.PUBLIC, + 5, + 28, + 1, + '', + 'pyzeebe/proto/gateway.proto' +) +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + + + +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x1bpyzeebe/proto/gateway.proto\x12\x10gateway_protocol\"u\n\x1aStreamActivatedJobsRequest\x12\x0c\n\x04type\x18\x01 \x01(\t\x12\x0e\n\x06worker\x18\x02 \x01(\t\x12\x0f\n\x07timeout\x18\x03 \x01(\x03\x12\x15\n\rfetchVariable\x18\x05 \x03(\t\x12\x11\n\ttenantIds\x18\x06 \x03(\t\"\xa1\x01\n\x13\x41\x63tivateJobsRequest\x12\x0c\n\x04type\x18\x01 \x01(\t\x12\x0e\n\x06worker\x18\x02 \x01(\t\x12\x0f\n\x07timeout\x18\x03 \x01(\x03\x12\x19\n\x11maxJobsToActivate\x18\x04 \x01(\x05\x12\x15\n\rfetchVariable\x18\x05 \x03(\t\x12\x16\n\x0erequestTimeout\x18\x06 \x01(\x03\x12\x11\n\ttenantIds\x18\x07 \x03(\t\"D\n\x14\x41\x63tivateJobsResponse\x12,\n\x04jobs\x18\x01 \x03(\x0b\x32\x1e.gateway_protocol.ActivatedJob\"\xba\x02\n\x0c\x41\x63tivatedJob\x12\x0b\n\x03key\x18\x01 \x01(\x03\x12\x0c\n\x04type\x18\x02 \x01(\t\x12\x1a\n\x12processInstanceKey\x18\x03 \x01(\x03\x12\x15\n\rbpmnProcessId\x18\x04 \x01(\t\x12 \n\x18processDefinitionVersion\x18\x05 \x01(\x05\x12\x1c\n\x14processDefinitionKey\x18\x06 \x01(\x03\x12\x11\n\telementId\x18\x07 \x01(\t\x12\x1a\n\x12\x65lementInstanceKey\x18\x08 \x01(\x03\x12\x15\n\rcustomHeaders\x18\t \x01(\t\x12\x0e\n\x06worker\x18\n \x01(\t\x12\x0f\n\x07retries\x18\x0b \x01(\x05\x12\x10\n\x08\x64\x65\x61\x64line\x18\x0c \x01(\x03\x12\x11\n\tvariables\x18\r \x01(\t\x12\x10\n\x08tenantId\x18\x0e \x01(\t\":\n\x1c\x43\x61ncelProcessInstanceRequest\x12\x1a\n\x12processInstanceKey\x18\x01 \x01(\x03\"\x1f\n\x1d\x43\x61ncelProcessInstanceResponse\"7\n\x12\x43ompleteJobRequest\x12\x0e\n\x06jobKey\x18\x01 \x01(\x03\x12\x11\n\tvariables\x18\x02 \x01(\t\"\x15\n\x13\x43ompleteJobResponse\"\xdf\x01\n\x1c\x43reateProcessInstanceRequest\x12\x1c\n\x14processDefinitionKey\x18\x01 \x01(\x03\x12\x15\n\rbpmnProcessId\x18\x02 \x01(\t\x12\x0f\n\x07version\x18\x03 \x01(\x05\x12\x11\n\tvariables\x18\x04 \x01(\t\x12T\n\x11startInstructions\x18\x05 \x03(\x0b\x32\x39.gateway_protocol.ProcessInstanceCreationStartInstruction\x12\x10\n\x08tenantId\x18\x06 \x01(\t\"<\n\'ProcessInstanceCreationStartInstruction\x12\x11\n\telementId\x18\x01 \x01(\t\"\x93\x01\n\x1d\x43reateProcessInstanceResponse\x12\x1c\n\x14processDefinitionKey\x18\x01 \x01(\x03\x12\x15\n\rbpmnProcessId\x18\x02 \x01(\t\x12\x0f\n\x07version\x18\x03 \x01(\x05\x12\x1a\n\x12processInstanceKey\x18\x04 \x01(\x03\x12\x10\n\x08tenantId\x18\x05 \x01(\t\"\x99\x01\n&CreateProcessInstanceWithResultRequest\x12?\n\x07request\x18\x01 \x01(\x0b\x32..gateway_protocol.CreateProcessInstanceRequest\x12\x16\n\x0erequestTimeout\x18\x02 \x01(\x03\x12\x16\n\x0e\x66\x65tchVariables\x18\x03 \x03(\t\"\xb0\x01\n\'CreateProcessInstanceWithResultResponse\x12\x1c\n\x14processDefinitionKey\x18\x01 \x01(\x03\x12\x15\n\rbpmnProcessId\x18\x02 \x01(\t\x12\x0f\n\x07version\x18\x03 \x01(\x05\x12\x1a\n\x12processInstanceKey\x18\x04 \x01(\x03\x12\x11\n\tvariables\x18\x05 \x01(\t\x12\x10\n\x08tenantId\x18\x06 \x01(\t\"g\n\x17\x45valuateDecisionRequest\x12\x13\n\x0b\x64\x65\x63isionKey\x18\x01 \x01(\x03\x12\x12\n\ndecisionId\x18\x02 \x01(\t\x12\x11\n\tvariables\x18\x03 \x01(\t\x12\x10\n\x08tenantId\x18\x04 \x01(\t\"\xd0\x02\n\x18\x45valuateDecisionResponse\x12\x13\n\x0b\x64\x65\x63isionKey\x18\x01 \x01(\x03\x12\x12\n\ndecisionId\x18\x02 \x01(\t\x12\x14\n\x0c\x64\x65\x63isionName\x18\x03 \x01(\t\x12\x17\n\x0f\x64\x65\x63isionVersion\x18\x04 \x01(\x05\x12\x1e\n\x16\x64\x65\x63isionRequirementsId\x18\x05 \x01(\t\x12\x1f\n\x17\x64\x65\x63isionRequirementsKey\x18\x06 \x01(\x03\x12\x16\n\x0e\x64\x65\x63isionOutput\x18\x07 \x01(\t\x12?\n\x12\x65valuatedDecisions\x18\x08 \x03(\x0b\x32#.gateway_protocol.EvaluatedDecision\x12\x18\n\x10\x66\x61iledDecisionId\x18\t \x01(\t\x12\x16\n\x0e\x66\x61ilureMessage\x18\n \x01(\t\x12\x10\n\x08tenantId\x18\x0b \x01(\t\"\xab\x02\n\x11\x45valuatedDecision\x12\x13\n\x0b\x64\x65\x63isionKey\x18\x01 \x01(\x03\x12\x12\n\ndecisionId\x18\x02 \x01(\t\x12\x14\n\x0c\x64\x65\x63isionName\x18\x03 \x01(\t\x12\x17\n\x0f\x64\x65\x63isionVersion\x18\x04 \x01(\x05\x12\x14\n\x0c\x64\x65\x63isionType\x18\x05 \x01(\t\x12\x16\n\x0e\x64\x65\x63isionOutput\x18\x06 \x01(\t\x12;\n\x0cmatchedRules\x18\x07 \x03(\x0b\x32%.gateway_protocol.MatchedDecisionRule\x12\x41\n\x0f\x65valuatedInputs\x18\x08 \x03(\x0b\x32(.gateway_protocol.EvaluatedDecisionInput\x12\x10\n\x08tenantId\x18\t \x01(\t\"P\n\x16\x45valuatedDecisionInput\x12\x0f\n\x07inputId\x18\x01 \x01(\t\x12\x11\n\tinputName\x18\x02 \x01(\t\x12\x12\n\ninputValue\x18\x03 \x01(\t\"T\n\x17\x45valuatedDecisionOutput\x12\x10\n\x08outputId\x18\x01 \x01(\t\x12\x12\n\noutputName\x18\x02 \x01(\t\x12\x13\n\x0boutputValue\x18\x03 \x01(\t\"}\n\x13MatchedDecisionRule\x12\x0e\n\x06ruleId\x18\x01 \x01(\t\x12\x11\n\truleIndex\x18\x02 \x01(\x05\x12\x43\n\x10\x65valuatedOutputs\x18\x03 \x03(\x0b\x32).gateway_protocol.EvaluatedDecisionOutput\"U\n\x14\x44\x65ployProcessRequest\x12\x39\n\tprocesses\x18\x01 \x03(\x0b\x32&.gateway_protocol.ProcessRequestObject:\x02\x18\x01\"<\n\x14ProcessRequestObject\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x12\n\ndefinition\x18\x02 \x01(\x0c:\x02\x18\x01\"^\n\x15\x44\x65ployProcessResponse\x12\x0b\n\x03key\x18\x01 \x01(\x03\x12\x34\n\tprocesses\x18\x02 \x03(\x0b\x32!.gateway_protocol.ProcessMetadata:\x02\x18\x01\"X\n\x15\x44\x65ployResourceRequest\x12-\n\tresources\x18\x01 \x03(\x0b\x32\x1a.gateway_protocol.Resource\x12\x10\n\x08tenantId\x18\x02 \x01(\t\")\n\x08Resource\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x0f\n\x07\x63ontent\x18\x02 \x01(\x0c\"j\n\x16\x44\x65ployResourceResponse\x12\x0b\n\x03key\x18\x01 \x01(\x03\x12\x31\n\x0b\x64\x65ployments\x18\x02 \x03(\x0b\x32\x1c.gateway_protocol.Deployment\x12\x10\n\x08tenantId\x18\x03 \x01(\t\"\x86\x02\n\nDeployment\x12\x34\n\x07process\x18\x01 \x01(\x0b\x32!.gateway_protocol.ProcessMetadataH\x00\x12\x36\n\x08\x64\x65\x63ision\x18\x02 \x01(\x0b\x32\".gateway_protocol.DecisionMetadataH\x00\x12N\n\x14\x64\x65\x63isionRequirements\x18\x03 \x01(\x0b\x32..gateway_protocol.DecisionRequirementsMetadataH\x00\x12.\n\x04\x66orm\x18\x04 \x01(\x0b\x32\x1e.gateway_protocol.FormMetadataH\x00\x42\n\n\x08Metadata\"\x7f\n\x0fProcessMetadata\x12\x15\n\rbpmnProcessId\x18\x01 \x01(\t\x12\x0f\n\x07version\x18\x02 \x01(\x05\x12\x1c\n\x14processDefinitionKey\x18\x03 \x01(\x03\x12\x14\n\x0cresourceName\x18\x04 \x01(\t\x12\x10\n\x08tenantId\x18\x05 \x01(\t\"\xbe\x01\n\x10\x44\x65\x63isionMetadata\x12\x15\n\rdmnDecisionId\x18\x01 \x01(\t\x12\x17\n\x0f\x64mnDecisionName\x18\x02 \x01(\t\x12\x0f\n\x07version\x18\x03 \x01(\x05\x12\x13\n\x0b\x64\x65\x63isionKey\x18\x04 \x01(\x03\x12!\n\x19\x64mnDecisionRequirementsId\x18\x05 \x01(\t\x12\x1f\n\x17\x64\x65\x63isionRequirementsKey\x18\x06 \x01(\x03\x12\x10\n\x08tenantId\x18\x07 \x01(\t\"\xc0\x01\n\x1c\x44\x65\x63isionRequirementsMetadata\x12!\n\x19\x64mnDecisionRequirementsId\x18\x01 \x01(\t\x12#\n\x1b\x64mnDecisionRequirementsName\x18\x02 \x01(\t\x12\x0f\n\x07version\x18\x03 \x01(\x05\x12\x1f\n\x17\x64\x65\x63isionRequirementsKey\x18\x04 \x01(\x03\x12\x14\n\x0cresourceName\x18\x05 \x01(\t\x12\x10\n\x08tenantId\x18\x06 \x01(\t\"h\n\x0c\x46ormMetadata\x12\x0e\n\x06\x66ormId\x18\x01 \x01(\t\x12\x0f\n\x07version\x18\x02 \x01(\x05\x12\x0f\n\x07\x66ormKey\x18\x03 \x01(\x03\x12\x14\n\x0cresourceName\x18\x04 \x01(\t\x12\x10\n\x08tenantId\x18\x05 \x01(\t\"p\n\x0e\x46\x61ilJobRequest\x12\x0e\n\x06jobKey\x18\x01 \x01(\x03\x12\x0f\n\x07retries\x18\x02 \x01(\x05\x12\x14\n\x0c\x65rrorMessage\x18\x03 \x01(\t\x12\x14\n\x0cretryBackOff\x18\x04 \x01(\x03\x12\x11\n\tvariables\x18\x05 \x01(\t\"\x11\n\x0f\x46\x61ilJobResponse\"_\n\x11ThrowErrorRequest\x12\x0e\n\x06jobKey\x18\x01 \x01(\x03\x12\x11\n\terrorCode\x18\x02 \x01(\t\x12\x14\n\x0c\x65rrorMessage\x18\x03 \x01(\t\x12\x11\n\tvariables\x18\x04 \x01(\t\"\x14\n\x12ThrowErrorResponse\"\x89\x01\n\x15PublishMessageRequest\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x16\n\x0e\x63orrelationKey\x18\x02 \x01(\t\x12\x12\n\ntimeToLive\x18\x03 \x01(\x03\x12\x11\n\tmessageId\x18\x04 \x01(\t\x12\x11\n\tvariables\x18\x05 \x01(\t\x12\x10\n\x08tenantId\x18\x06 \x01(\t\"7\n\x16PublishMessageResponse\x12\x0b\n\x03key\x18\x01 \x01(\x03\x12\x10\n\x08tenantId\x18\x02 \x01(\t\"-\n\x16ResolveIncidentRequest\x12\x13\n\x0bincidentKey\x18\x01 \x01(\x03\"\x19\n\x17ResolveIncidentResponse\"\x11\n\x0fTopologyRequest\"\xa2\x01\n\x10TopologyResponse\x12-\n\x07\x62rokers\x18\x01 \x03(\x0b\x32\x1c.gateway_protocol.BrokerInfo\x12\x13\n\x0b\x63lusterSize\x18\x02 \x01(\x05\x12\x17\n\x0fpartitionsCount\x18\x03 \x01(\x05\x12\x19\n\x11replicationFactor\x18\x04 \x01(\x05\x12\x16\n\x0egatewayVersion\x18\x05 \x01(\t\"z\n\nBrokerInfo\x12\x0e\n\x06nodeId\x18\x01 \x01(\x05\x12\x0c\n\x04host\x18\x02 \x01(\t\x12\x0c\n\x04port\x18\x03 \x01(\x05\x12/\n\npartitions\x18\x04 \x03(\x0b\x32\x1b.gateway_protocol.Partition\x12\x0f\n\x07version\x18\x05 \x01(\t\"\xa0\x02\n\tPartition\x12\x13\n\x0bpartitionId\x18\x01 \x01(\x05\x12=\n\x04role\x18\x02 \x01(\x0e\x32/.gateway_protocol.Partition.PartitionBrokerRole\x12\x41\n\x06health\x18\x03 \x01(\x0e\x32\x31.gateway_protocol.Partition.PartitionBrokerHealth\"=\n\x13PartitionBrokerRole\x12\n\n\x06LEADER\x10\x00\x12\x0c\n\x08\x46OLLOWER\x10\x01\x12\x0c\n\x08INACTIVE\x10\x02\"=\n\x15PartitionBrokerHealth\x12\x0b\n\x07HEALTHY\x10\x00\x12\r\n\tUNHEALTHY\x10\x01\x12\x08\n\x04\x44\x45\x41\x44\x10\x02\":\n\x17UpdateJobRetriesRequest\x12\x0e\n\x06jobKey\x18\x01 \x01(\x03\x12\x0f\n\x07retries\x18\x02 \x01(\x05\"\x1a\n\x18UpdateJobRetriesResponse\":\n\x17UpdateJobTimeoutRequest\x12\x0e\n\x06jobKey\x18\x01 \x01(\x03\x12\x0f\n\x07timeout\x18\x02 \x01(\x03\"\x1a\n\x18UpdateJobTimeoutResponse\"S\n\x13SetVariablesRequest\x12\x1a\n\x12\x65lementInstanceKey\x18\x01 \x01(\x03\x12\x11\n\tvariables\x18\x02 \x01(\t\x12\r\n\x05local\x18\x03 \x01(\x08\"#\n\x14SetVariablesResponse\x12\x0b\n\x03key\x18\x01 \x01(\x03\"\xa0\x04\n\x1cModifyProcessInstanceRequest\x12\x1a\n\x12processInstanceKey\x18\x01 \x01(\x03\x12`\n\x14\x61\x63tivateInstructions\x18\x02 \x03(\x0b\x32\x42.gateway_protocol.ModifyProcessInstanceRequest.ActivateInstruction\x12\x62\n\x15terminateInstructions\x18\x03 \x03(\x0b\x32\x43.gateway_protocol.ModifyProcessInstanceRequest.TerminateInstruction\x1a\xae\x01\n\x13\x41\x63tivateInstruction\x12\x11\n\telementId\x18\x01 \x01(\t\x12\"\n\x1a\x61ncestorElementInstanceKey\x18\x02 \x01(\x03\x12`\n\x14variableInstructions\x18\x03 \x03(\x0b\x32\x42.gateway_protocol.ModifyProcessInstanceRequest.VariableInstruction\x1a\x39\n\x13VariableInstruction\x12\x11\n\tvariables\x18\x01 \x01(\t\x12\x0f\n\x07scopeId\x18\x02 \x01(\t\x1a\x32\n\x14TerminateInstruction\x12\x1a\n\x12\x65lementInstanceKey\x18\x01 \x01(\x03\"\x1f\n\x1dModifyProcessInstanceResponse\"\xf0\x02\n\x1dMigrateProcessInstanceRequest\x12\x1a\n\x12processInstanceKey\x18\x01 \x01(\x03\x12T\n\rmigrationPlan\x18\x02 \x01(\x0b\x32=.gateway_protocol.MigrateProcessInstanceRequest.MigrationPlan\x1a\x94\x01\n\rMigrationPlan\x12\"\n\x1atargetProcessDefinitionKey\x18\x01 \x01(\x03\x12_\n\x13mappingInstructions\x18\x02 \x03(\x0b\x32\x42.gateway_protocol.MigrateProcessInstanceRequest.MappingInstruction\x1a\x46\n\x12MappingInstruction\x12\x17\n\x0fsourceElementId\x18\x01 \x01(\t\x12\x17\n\x0ftargetElementId\x18\x02 \x01(\t\" \n\x1eMigrateProcessInstanceResponse\",\n\x15\x44\x65leteResourceRequest\x12\x13\n\x0bresourceKey\x18\x01 \x01(\x03\"\x18\n\x16\x44\x65leteResourceResponse\"Q\n\x16\x42roadcastSignalRequest\x12\x12\n\nsignalName\x18\x01 \x01(\t\x12\x11\n\tvariables\x18\x02 \x01(\t\x12\x10\n\x08tenantId\x18\x03 \x01(\t\"8\n\x17\x42roadcastSignalResponse\x12\x0b\n\x03key\x18\x01 \x01(\x03\x12\x10\n\x08tenantId\x18\x02 \x01(\t2\xdb\x11\n\x07Gateway\x12\x61\n\x0c\x41\x63tivateJobs\x12%.gateway_protocol.ActivateJobsRequest\x1a&.gateway_protocol.ActivateJobsResponse\"\x00\x30\x01\x12g\n\x13StreamActivatedJobs\x12,.gateway_protocol.StreamActivatedJobsRequest\x1a\x1e.gateway_protocol.ActivatedJob\"\x00\x30\x01\x12z\n\x15\x43\x61ncelProcessInstance\x12..gateway_protocol.CancelProcessInstanceRequest\x1a/.gateway_protocol.CancelProcessInstanceResponse\"\x00\x12\\\n\x0b\x43ompleteJob\x12$.gateway_protocol.CompleteJobRequest\x1a%.gateway_protocol.CompleteJobResponse\"\x00\x12z\n\x15\x43reateProcessInstance\x12..gateway_protocol.CreateProcessInstanceRequest\x1a/.gateway_protocol.CreateProcessInstanceResponse\"\x00\x12\x98\x01\n\x1f\x43reateProcessInstanceWithResult\x12\x38.gateway_protocol.CreateProcessInstanceWithResultRequest\x1a\x39.gateway_protocol.CreateProcessInstanceWithResultResponse\"\x00\x12k\n\x10\x45valuateDecision\x12).gateway_protocol.EvaluateDecisionRequest\x1a*.gateway_protocol.EvaluateDecisionResponse\"\x00\x12\x65\n\rDeployProcess\x12&.gateway_protocol.DeployProcessRequest\x1a\'.gateway_protocol.DeployProcessResponse\"\x03\x88\x02\x01\x12\x65\n\x0e\x44\x65ployResource\x12\'.gateway_protocol.DeployResourceRequest\x1a(.gateway_protocol.DeployResourceResponse\"\x00\x12P\n\x07\x46\x61ilJob\x12 .gateway_protocol.FailJobRequest\x1a!.gateway_protocol.FailJobResponse\"\x00\x12Y\n\nThrowError\x12#.gateway_protocol.ThrowErrorRequest\x1a$.gateway_protocol.ThrowErrorResponse\"\x00\x12\x65\n\x0ePublishMessage\x12\'.gateway_protocol.PublishMessageRequest\x1a(.gateway_protocol.PublishMessageResponse\"\x00\x12h\n\x0fResolveIncident\x12(.gateway_protocol.ResolveIncidentRequest\x1a).gateway_protocol.ResolveIncidentResponse\"\x00\x12_\n\x0cSetVariables\x12%.gateway_protocol.SetVariablesRequest\x1a&.gateway_protocol.SetVariablesResponse\"\x00\x12S\n\x08Topology\x12!.gateway_protocol.TopologyRequest\x1a\".gateway_protocol.TopologyResponse\"\x00\x12k\n\x10UpdateJobRetries\x12).gateway_protocol.UpdateJobRetriesRequest\x1a*.gateway_protocol.UpdateJobRetriesResponse\"\x00\x12z\n\x15ModifyProcessInstance\x12..gateway_protocol.ModifyProcessInstanceRequest\x1a/.gateway_protocol.ModifyProcessInstanceResponse\"\x00\x12}\n\x16MigrateProcessInstance\x12/.gateway_protocol.MigrateProcessInstanceRequest\x1a\x30.gateway_protocol.MigrateProcessInstanceResponse\"\x00\x12k\n\x10UpdateJobTimeout\x12).gateway_protocol.UpdateJobTimeoutRequest\x1a*.gateway_protocol.UpdateJobTimeoutResponse\"\x00\x12\x65\n\x0e\x44\x65leteResource\x12\'.gateway_protocol.DeleteResourceRequest\x1a(.gateway_protocol.DeleteResourceResponse\"\x00\x12h\n\x0f\x42roadcastSignal\x12(.gateway_protocol.BroadcastSignalRequest\x1a).gateway_protocol.BroadcastSignalResponse\"\x00\x42,\n!io.camunda.zeebe.gateway.protocolP\x00Z\x05./;pbb\x06proto3') + +_globals = globals() +_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals) +_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'pyzeebe.proto.gateway_pb2', _globals) +if not _descriptor._USE_C_DESCRIPTORS: + _globals['DESCRIPTOR']._loaded_options = None + _globals['DESCRIPTOR']._serialized_options = b'\n!io.camunda.zeebe.gateway.protocolP\000Z\005./;pb' + _globals['_DEPLOYPROCESSREQUEST']._loaded_options = None + _globals['_DEPLOYPROCESSREQUEST']._serialized_options = b'\030\001' + _globals['_PROCESSREQUESTOBJECT']._loaded_options = None + _globals['_PROCESSREQUESTOBJECT']._serialized_options = b'\030\001' + _globals['_DEPLOYPROCESSRESPONSE']._loaded_options = None + _globals['_DEPLOYPROCESSRESPONSE']._serialized_options = b'\030\001' + _globals['_GATEWAY'].methods_by_name['DeployProcess']._loaded_options = None + _globals['_GATEWAY'].methods_by_name['DeployProcess']._serialized_options = b'\210\002\001' + _globals['_STREAMACTIVATEDJOBSREQUEST']._serialized_start=49 + _globals['_STREAMACTIVATEDJOBSREQUEST']._serialized_end=166 + _globals['_ACTIVATEJOBSREQUEST']._serialized_start=169 + _globals['_ACTIVATEJOBSREQUEST']._serialized_end=330 + _globals['_ACTIVATEJOBSRESPONSE']._serialized_start=332 + _globals['_ACTIVATEJOBSRESPONSE']._serialized_end=400 + _globals['_ACTIVATEDJOB']._serialized_start=403 + _globals['_ACTIVATEDJOB']._serialized_end=717 + _globals['_CANCELPROCESSINSTANCEREQUEST']._serialized_start=719 + _globals['_CANCELPROCESSINSTANCEREQUEST']._serialized_end=777 + _globals['_CANCELPROCESSINSTANCERESPONSE']._serialized_start=779 + _globals['_CANCELPROCESSINSTANCERESPONSE']._serialized_end=810 + _globals['_COMPLETEJOBREQUEST']._serialized_start=812 + _globals['_COMPLETEJOBREQUEST']._serialized_end=867 + _globals['_COMPLETEJOBRESPONSE']._serialized_start=869 + _globals['_COMPLETEJOBRESPONSE']._serialized_end=890 + _globals['_CREATEPROCESSINSTANCEREQUEST']._serialized_start=893 + _globals['_CREATEPROCESSINSTANCEREQUEST']._serialized_end=1116 + _globals['_PROCESSINSTANCECREATIONSTARTINSTRUCTION']._serialized_start=1118 + _globals['_PROCESSINSTANCECREATIONSTARTINSTRUCTION']._serialized_end=1178 + _globals['_CREATEPROCESSINSTANCERESPONSE']._serialized_start=1181 + _globals['_CREATEPROCESSINSTANCERESPONSE']._serialized_end=1328 + _globals['_CREATEPROCESSINSTANCEWITHRESULTREQUEST']._serialized_start=1331 + _globals['_CREATEPROCESSINSTANCEWITHRESULTREQUEST']._serialized_end=1484 + _globals['_CREATEPROCESSINSTANCEWITHRESULTRESPONSE']._serialized_start=1487 + _globals['_CREATEPROCESSINSTANCEWITHRESULTRESPONSE']._serialized_end=1663 + _globals['_EVALUATEDECISIONREQUEST']._serialized_start=1665 + _globals['_EVALUATEDECISIONREQUEST']._serialized_end=1768 + _globals['_EVALUATEDECISIONRESPONSE']._serialized_start=1771 + _globals['_EVALUATEDECISIONRESPONSE']._serialized_end=2107 + _globals['_EVALUATEDDECISION']._serialized_start=2110 + _globals['_EVALUATEDDECISION']._serialized_end=2409 + _globals['_EVALUATEDDECISIONINPUT']._serialized_start=2411 + _globals['_EVALUATEDDECISIONINPUT']._serialized_end=2491 + _globals['_EVALUATEDDECISIONOUTPUT']._serialized_start=2493 + _globals['_EVALUATEDDECISIONOUTPUT']._serialized_end=2577 + _globals['_MATCHEDDECISIONRULE']._serialized_start=2579 + _globals['_MATCHEDDECISIONRULE']._serialized_end=2704 + _globals['_DEPLOYPROCESSREQUEST']._serialized_start=2706 + _globals['_DEPLOYPROCESSREQUEST']._serialized_end=2791 + _globals['_PROCESSREQUESTOBJECT']._serialized_start=2793 + _globals['_PROCESSREQUESTOBJECT']._serialized_end=2853 + _globals['_DEPLOYPROCESSRESPONSE']._serialized_start=2855 + _globals['_DEPLOYPROCESSRESPONSE']._serialized_end=2949 + _globals['_DEPLOYRESOURCEREQUEST']._serialized_start=2951 + _globals['_DEPLOYRESOURCEREQUEST']._serialized_end=3039 + _globals['_RESOURCE']._serialized_start=3041 + _globals['_RESOURCE']._serialized_end=3082 + _globals['_DEPLOYRESOURCERESPONSE']._serialized_start=3084 + _globals['_DEPLOYRESOURCERESPONSE']._serialized_end=3190 + _globals['_DEPLOYMENT']._serialized_start=3193 + _globals['_DEPLOYMENT']._serialized_end=3455 + _globals['_PROCESSMETADATA']._serialized_start=3457 + _globals['_PROCESSMETADATA']._serialized_end=3584 + _globals['_DECISIONMETADATA']._serialized_start=3587 + _globals['_DECISIONMETADATA']._serialized_end=3777 + _globals['_DECISIONREQUIREMENTSMETADATA']._serialized_start=3780 + _globals['_DECISIONREQUIREMENTSMETADATA']._serialized_end=3972 + _globals['_FORMMETADATA']._serialized_start=3974 + _globals['_FORMMETADATA']._serialized_end=4078 + _globals['_FAILJOBREQUEST']._serialized_start=4080 + _globals['_FAILJOBREQUEST']._serialized_end=4192 + _globals['_FAILJOBRESPONSE']._serialized_start=4194 + _globals['_FAILJOBRESPONSE']._serialized_end=4211 + _globals['_THROWERRORREQUEST']._serialized_start=4213 + _globals['_THROWERRORREQUEST']._serialized_end=4308 + _globals['_THROWERRORRESPONSE']._serialized_start=4310 + _globals['_THROWERRORRESPONSE']._serialized_end=4330 + _globals['_PUBLISHMESSAGEREQUEST']._serialized_start=4333 + _globals['_PUBLISHMESSAGEREQUEST']._serialized_end=4470 + _globals['_PUBLISHMESSAGERESPONSE']._serialized_start=4472 + _globals['_PUBLISHMESSAGERESPONSE']._serialized_end=4527 + _globals['_RESOLVEINCIDENTREQUEST']._serialized_start=4529 + _globals['_RESOLVEINCIDENTREQUEST']._serialized_end=4574 + _globals['_RESOLVEINCIDENTRESPONSE']._serialized_start=4576 + _globals['_RESOLVEINCIDENTRESPONSE']._serialized_end=4601 + _globals['_TOPOLOGYREQUEST']._serialized_start=4603 + _globals['_TOPOLOGYREQUEST']._serialized_end=4620 + _globals['_TOPOLOGYRESPONSE']._serialized_start=4623 + _globals['_TOPOLOGYRESPONSE']._serialized_end=4785 + _globals['_BROKERINFO']._serialized_start=4787 + _globals['_BROKERINFO']._serialized_end=4909 + _globals['_PARTITION']._serialized_start=4912 + _globals['_PARTITION']._serialized_end=5200 + _globals['_PARTITION_PARTITIONBROKERROLE']._serialized_start=5076 + _globals['_PARTITION_PARTITIONBROKERROLE']._serialized_end=5137 + _globals['_PARTITION_PARTITIONBROKERHEALTH']._serialized_start=5139 + _globals['_PARTITION_PARTITIONBROKERHEALTH']._serialized_end=5200 + _globals['_UPDATEJOBRETRIESREQUEST']._serialized_start=5202 + _globals['_UPDATEJOBRETRIESREQUEST']._serialized_end=5260 + _globals['_UPDATEJOBRETRIESRESPONSE']._serialized_start=5262 + _globals['_UPDATEJOBRETRIESRESPONSE']._serialized_end=5288 + _globals['_UPDATEJOBTIMEOUTREQUEST']._serialized_start=5290 + _globals['_UPDATEJOBTIMEOUTREQUEST']._serialized_end=5348 + _globals['_UPDATEJOBTIMEOUTRESPONSE']._serialized_start=5350 + _globals['_UPDATEJOBTIMEOUTRESPONSE']._serialized_end=5376 + _globals['_SETVARIABLESREQUEST']._serialized_start=5378 + _globals['_SETVARIABLESREQUEST']._serialized_end=5461 + _globals['_SETVARIABLESRESPONSE']._serialized_start=5463 + _globals['_SETVARIABLESRESPONSE']._serialized_end=5498 + _globals['_MODIFYPROCESSINSTANCEREQUEST']._serialized_start=5501 + _globals['_MODIFYPROCESSINSTANCEREQUEST']._serialized_end=6045 + _globals['_MODIFYPROCESSINSTANCEREQUEST_ACTIVATEINSTRUCTION']._serialized_start=5760 + _globals['_MODIFYPROCESSINSTANCEREQUEST_ACTIVATEINSTRUCTION']._serialized_end=5934 + _globals['_MODIFYPROCESSINSTANCEREQUEST_VARIABLEINSTRUCTION']._serialized_start=5936 + _globals['_MODIFYPROCESSINSTANCEREQUEST_VARIABLEINSTRUCTION']._serialized_end=5993 + _globals['_MODIFYPROCESSINSTANCEREQUEST_TERMINATEINSTRUCTION']._serialized_start=5995 + _globals['_MODIFYPROCESSINSTANCEREQUEST_TERMINATEINSTRUCTION']._serialized_end=6045 + _globals['_MODIFYPROCESSINSTANCERESPONSE']._serialized_start=6047 + _globals['_MODIFYPROCESSINSTANCERESPONSE']._serialized_end=6078 + _globals['_MIGRATEPROCESSINSTANCEREQUEST']._serialized_start=6081 + _globals['_MIGRATEPROCESSINSTANCEREQUEST']._serialized_end=6449 + _globals['_MIGRATEPROCESSINSTANCEREQUEST_MIGRATIONPLAN']._serialized_start=6229 + _globals['_MIGRATEPROCESSINSTANCEREQUEST_MIGRATIONPLAN']._serialized_end=6377 + _globals['_MIGRATEPROCESSINSTANCEREQUEST_MAPPINGINSTRUCTION']._serialized_start=6379 + _globals['_MIGRATEPROCESSINSTANCEREQUEST_MAPPINGINSTRUCTION']._serialized_end=6449 + _globals['_MIGRATEPROCESSINSTANCERESPONSE']._serialized_start=6451 + _globals['_MIGRATEPROCESSINSTANCERESPONSE']._serialized_end=6483 + _globals['_DELETERESOURCEREQUEST']._serialized_start=6485 + _globals['_DELETERESOURCEREQUEST']._serialized_end=6529 + _globals['_DELETERESOURCERESPONSE']._serialized_start=6531 + _globals['_DELETERESOURCERESPONSE']._serialized_end=6555 + _globals['_BROADCASTSIGNALREQUEST']._serialized_start=6557 + _globals['_BROADCASTSIGNALREQUEST']._serialized_end=6638 + _globals['_BROADCASTSIGNALRESPONSE']._serialized_start=6640 + _globals['_BROADCASTSIGNALRESPONSE']._serialized_end=6696 + _globals['_GATEWAY']._serialized_start=6699 + _globals['_GATEWAY']._serialized_end=8966 +# @@protoc_insertion_point(module_scope) diff --git a/pyzeebe/proto/gateway_pb2.pyi b/pyzeebe/proto/gateway_pb2.pyi new file mode 100644 index 00000000..7f8e7dd3 --- /dev/null +++ b/pyzeebe/proto/gateway_pb2.pyi @@ -0,0 +1,1687 @@ +""" +@generated by mypy-protobuf. Do not edit manually! +isort:skip_file +""" + +import builtins +import collections.abc +import google.protobuf.descriptor +import google.protobuf.internal.containers +import google.protobuf.internal.enum_type_wrapper +import google.protobuf.message +import sys +import typing + +if sys.version_info >= (3, 10): + import typing as typing_extensions +else: + import typing_extensions + +DESCRIPTOR: google.protobuf.descriptor.FileDescriptor + +@typing.final +class StreamActivatedJobsRequest(google.protobuf.message.Message): + """For a more complete documentation, refer to Zeebe documentation at: + https://docs.camunda.io/docs/reference/grpc + """ + + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + TYPE_FIELD_NUMBER: builtins.int + WORKER_FIELD_NUMBER: builtins.int + TIMEOUT_FIELD_NUMBER: builtins.int + FETCHVARIABLE_FIELD_NUMBER: builtins.int + TENANTIDS_FIELD_NUMBER: builtins.int + type: builtins.str + """the job type, as defined in the BPMN process (e.g. ) + """ + worker: builtins.str + """the name of the worker activating the jobs, mostly used for logging purposes""" + timeout: builtins.int + """a job returned after this call will not be activated by another call until the + timeout (in ms) has been reached + """ + @property + def fetchVariable(self) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[builtins.str]: + """a list of variables to fetch as the job variables; if empty, all visible variables at + the time of activation for the scope of the job will be returned + """ + + @property + def tenantIds(self) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[builtins.str]: + """a list of identifiers of tenants for which to stream jobs""" + + def __init__( + self, + *, + type: builtins.str = ..., + worker: builtins.str = ..., + timeout: builtins.int = ..., + fetchVariable: collections.abc.Iterable[builtins.str] | None = ..., + tenantIds: collections.abc.Iterable[builtins.str] | None = ..., + ) -> None: ... + def ClearField(self, field_name: typing.Literal["fetchVariable", b"fetchVariable", "tenantIds", b"tenantIds", "timeout", b"timeout", "type", b"type", "worker", b"worker"]) -> None: ... + +global___StreamActivatedJobsRequest = StreamActivatedJobsRequest + +@typing.final +class ActivateJobsRequest(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + TYPE_FIELD_NUMBER: builtins.int + WORKER_FIELD_NUMBER: builtins.int + TIMEOUT_FIELD_NUMBER: builtins.int + MAXJOBSTOACTIVATE_FIELD_NUMBER: builtins.int + FETCHVARIABLE_FIELD_NUMBER: builtins.int + REQUESTTIMEOUT_FIELD_NUMBER: builtins.int + TENANTIDS_FIELD_NUMBER: builtins.int + type: builtins.str + """the job type, as defined in the BPMN process (e.g. ) + """ + worker: builtins.str + """the name of the worker activating the jobs, mostly used for logging purposes""" + timeout: builtins.int + """a job returned after this call will not be activated by another call until the + timeout (in ms) has been reached + """ + maxJobsToActivate: builtins.int + """the maximum jobs to activate by this request""" + requestTimeout: builtins.int + """The request will be completed when at least one job is activated or after the requestTimeout (in ms). + if the requestTimeout = 0, a default timeout is used. + if the requestTimeout < 0, long polling is disabled and the request is completed immediately, even when no job is activated. + """ + @property + def fetchVariable(self) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[builtins.str]: + """a list of variables to fetch as the job variables; if empty, all visible variables at + the time of activation for the scope of the job will be returned + """ + + @property + def tenantIds(self) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[builtins.str]: + """a list of IDs of tenants for which to activate jobs""" + + def __init__( + self, + *, + type: builtins.str = ..., + worker: builtins.str = ..., + timeout: builtins.int = ..., + maxJobsToActivate: builtins.int = ..., + fetchVariable: collections.abc.Iterable[builtins.str] | None = ..., + requestTimeout: builtins.int = ..., + tenantIds: collections.abc.Iterable[builtins.str] | None = ..., + ) -> None: ... + def ClearField(self, field_name: typing.Literal["fetchVariable", b"fetchVariable", "maxJobsToActivate", b"maxJobsToActivate", "requestTimeout", b"requestTimeout", "tenantIds", b"tenantIds", "timeout", b"timeout", "type", b"type", "worker", b"worker"]) -> None: ... + +global___ActivateJobsRequest = ActivateJobsRequest + +@typing.final +class ActivateJobsResponse(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + JOBS_FIELD_NUMBER: builtins.int + @property + def jobs(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___ActivatedJob]: + """list of activated jobs""" + + def __init__( + self, + *, + jobs: collections.abc.Iterable[global___ActivatedJob] | None = ..., + ) -> None: ... + def ClearField(self, field_name: typing.Literal["jobs", b"jobs"]) -> None: ... + +global___ActivateJobsResponse = ActivateJobsResponse + +@typing.final +class ActivatedJob(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + KEY_FIELD_NUMBER: builtins.int + TYPE_FIELD_NUMBER: builtins.int + PROCESSINSTANCEKEY_FIELD_NUMBER: builtins.int + BPMNPROCESSID_FIELD_NUMBER: builtins.int + PROCESSDEFINITIONVERSION_FIELD_NUMBER: builtins.int + PROCESSDEFINITIONKEY_FIELD_NUMBER: builtins.int + ELEMENTID_FIELD_NUMBER: builtins.int + ELEMENTINSTANCEKEY_FIELD_NUMBER: builtins.int + CUSTOMHEADERS_FIELD_NUMBER: builtins.int + WORKER_FIELD_NUMBER: builtins.int + RETRIES_FIELD_NUMBER: builtins.int + DEADLINE_FIELD_NUMBER: builtins.int + VARIABLES_FIELD_NUMBER: builtins.int + TENANTID_FIELD_NUMBER: builtins.int + key: builtins.int + """the key, a unique identifier for the job""" + type: builtins.str + """the type of the job (should match what was requested)""" + processInstanceKey: builtins.int + """the job's process instance key""" + bpmnProcessId: builtins.str + """the bpmn process ID of the job process definition""" + processDefinitionVersion: builtins.int + """the version of the job process definition""" + processDefinitionKey: builtins.int + """the key of the job process definition""" + elementId: builtins.str + """the associated task element ID""" + elementInstanceKey: builtins.int + """the unique key identifying the associated task, unique within the scope of the + process instance + """ + customHeaders: builtins.str + """a set of custom headers defined during modelling; returned as a serialized + JSON document + """ + worker: builtins.str + """the name of the worker which activated this job""" + retries: builtins.int + """the amount of retries left to this job (should always be positive)""" + deadline: builtins.int + """when the job can be activated again, sent as a UNIX epoch timestamp""" + variables: builtins.str + """JSON document, computed at activation time, consisting of all visible variables to + the task scope + """ + tenantId: builtins.str + """the id of the tenant that owns the job""" + def __init__( + self, + *, + key: builtins.int = ..., + type: builtins.str = ..., + processInstanceKey: builtins.int = ..., + bpmnProcessId: builtins.str = ..., + processDefinitionVersion: builtins.int = ..., + processDefinitionKey: builtins.int = ..., + elementId: builtins.str = ..., + elementInstanceKey: builtins.int = ..., + customHeaders: builtins.str = ..., + worker: builtins.str = ..., + retries: builtins.int = ..., + deadline: builtins.int = ..., + variables: builtins.str = ..., + tenantId: builtins.str = ..., + ) -> None: ... + def ClearField(self, field_name: typing.Literal["bpmnProcessId", b"bpmnProcessId", "customHeaders", b"customHeaders", "deadline", b"deadline", "elementId", b"elementId", "elementInstanceKey", b"elementInstanceKey", "key", b"key", "processDefinitionKey", b"processDefinitionKey", "processDefinitionVersion", b"processDefinitionVersion", "processInstanceKey", b"processInstanceKey", "retries", b"retries", "tenantId", b"tenantId", "type", b"type", "variables", b"variables", "worker", b"worker"]) -> None: ... + +global___ActivatedJob = ActivatedJob + +@typing.final +class CancelProcessInstanceRequest(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + PROCESSINSTANCEKEY_FIELD_NUMBER: builtins.int + processInstanceKey: builtins.int + """the process instance key (as, for example, obtained from + CreateProcessInstanceResponse) + """ + def __init__( + self, + *, + processInstanceKey: builtins.int = ..., + ) -> None: ... + def ClearField(self, field_name: typing.Literal["processInstanceKey", b"processInstanceKey"]) -> None: ... + +global___CancelProcessInstanceRequest = CancelProcessInstanceRequest + +@typing.final +class CancelProcessInstanceResponse(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + def __init__( + self, + ) -> None: ... + +global___CancelProcessInstanceResponse = CancelProcessInstanceResponse + +@typing.final +class CompleteJobRequest(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + JOBKEY_FIELD_NUMBER: builtins.int + VARIABLES_FIELD_NUMBER: builtins.int + jobKey: builtins.int + """the unique job identifier, as obtained from ActivateJobsResponse""" + variables: builtins.str + """a JSON document representing the variables in the current task scope""" + def __init__( + self, + *, + jobKey: builtins.int = ..., + variables: builtins.str = ..., + ) -> None: ... + def ClearField(self, field_name: typing.Literal["jobKey", b"jobKey", "variables", b"variables"]) -> None: ... + +global___CompleteJobRequest = CompleteJobRequest + +@typing.final +class CompleteJobResponse(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + def __init__( + self, + ) -> None: ... + +global___CompleteJobResponse = CompleteJobResponse + +@typing.final +class CreateProcessInstanceRequest(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + PROCESSDEFINITIONKEY_FIELD_NUMBER: builtins.int + BPMNPROCESSID_FIELD_NUMBER: builtins.int + VERSION_FIELD_NUMBER: builtins.int + VARIABLES_FIELD_NUMBER: builtins.int + STARTINSTRUCTIONS_FIELD_NUMBER: builtins.int + TENANTID_FIELD_NUMBER: builtins.int + processDefinitionKey: builtins.int + """the unique key identifying the process definition (e.g. returned from a process + in the DeployProcessResponse message) + """ + bpmnProcessId: builtins.str + """the BPMN process ID of the process definition""" + version: builtins.int + """the version of the process; set to -1 to use the latest version""" + variables: builtins.str + """JSON document that will instantiate the variables for the root variable scope of the + process instance; it must be a JSON object, as variables will be mapped in a + key-value fashion. e.g. { "a": 1, "b": 2 } will create two variables, named "a" and + "b" respectively, with their associated values. [{ "a": 1, "b": 2 }] would not be a + valid argument, as the root of the JSON document is an array and not an object. + """ + tenantId: builtins.str + """the tenant id of the process definition""" + @property + def startInstructions(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___ProcessInstanceCreationStartInstruction]: + """List of start instructions. If empty (default) the process instance + will start at the start event. If non-empty the process instance will apply start + instructions after it has been created + """ + + def __init__( + self, + *, + processDefinitionKey: builtins.int = ..., + bpmnProcessId: builtins.str = ..., + version: builtins.int = ..., + variables: builtins.str = ..., + startInstructions: collections.abc.Iterable[global___ProcessInstanceCreationStartInstruction] | None = ..., + tenantId: builtins.str = ..., + ) -> None: ... + def ClearField(self, field_name: typing.Literal["bpmnProcessId", b"bpmnProcessId", "processDefinitionKey", b"processDefinitionKey", "startInstructions", b"startInstructions", "tenantId", b"tenantId", "variables", b"variables", "version", b"version"]) -> None: ... + +global___CreateProcessInstanceRequest = CreateProcessInstanceRequest + +@typing.final +class ProcessInstanceCreationStartInstruction(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + ELEMENTID_FIELD_NUMBER: builtins.int + elementId: builtins.str + """for now, however, the start instruction is implicitly a + "startBeforeElement" instruction + + element ID + """ + def __init__( + self, + *, + elementId: builtins.str = ..., + ) -> None: ... + def ClearField(self, field_name: typing.Literal["elementId", b"elementId"]) -> None: ... + +global___ProcessInstanceCreationStartInstruction = ProcessInstanceCreationStartInstruction + +@typing.final +class CreateProcessInstanceResponse(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + PROCESSDEFINITIONKEY_FIELD_NUMBER: builtins.int + BPMNPROCESSID_FIELD_NUMBER: builtins.int + VERSION_FIELD_NUMBER: builtins.int + PROCESSINSTANCEKEY_FIELD_NUMBER: builtins.int + TENANTID_FIELD_NUMBER: builtins.int + processDefinitionKey: builtins.int + """the key of the process definition which was used to create the process instance""" + bpmnProcessId: builtins.str + """the BPMN process ID of the process definition which was used to create the process + instance + """ + version: builtins.int + """the version of the process definition which was used to create the process instance""" + processInstanceKey: builtins.int + """the unique identifier of the created process instance; to be used wherever a request + needs a process instance key (e.g. CancelProcessInstanceRequest) + """ + tenantId: builtins.str + """the tenant identifier of the created process instance""" + def __init__( + self, + *, + processDefinitionKey: builtins.int = ..., + bpmnProcessId: builtins.str = ..., + version: builtins.int = ..., + processInstanceKey: builtins.int = ..., + tenantId: builtins.str = ..., + ) -> None: ... + def ClearField(self, field_name: typing.Literal["bpmnProcessId", b"bpmnProcessId", "processDefinitionKey", b"processDefinitionKey", "processInstanceKey", b"processInstanceKey", "tenantId", b"tenantId", "version", b"version"]) -> None: ... + +global___CreateProcessInstanceResponse = CreateProcessInstanceResponse + +@typing.final +class CreateProcessInstanceWithResultRequest(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + REQUEST_FIELD_NUMBER: builtins.int + REQUESTTIMEOUT_FIELD_NUMBER: builtins.int + FETCHVARIABLES_FIELD_NUMBER: builtins.int + requestTimeout: builtins.int + """timeout (in ms). the request will be closed if the process is not completed + before the requestTimeout. + if requestTimeout = 0, uses the generic requestTimeout configured in the gateway. + """ + @property + def request(self) -> global___CreateProcessInstanceRequest: ... + @property + def fetchVariables(self) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[builtins.str]: + """list of names of variables to be included in `CreateProcessInstanceWithResultResponse.variables` + if empty, all visible variables in the root scope will be returned. + """ + + def __init__( + self, + *, + request: global___CreateProcessInstanceRequest | None = ..., + requestTimeout: builtins.int = ..., + fetchVariables: collections.abc.Iterable[builtins.str] | None = ..., + ) -> None: ... + def HasField(self, field_name: typing.Literal["request", b"request"]) -> builtins.bool: ... + def ClearField(self, field_name: typing.Literal["fetchVariables", b"fetchVariables", "request", b"request", "requestTimeout", b"requestTimeout"]) -> None: ... + +global___CreateProcessInstanceWithResultRequest = CreateProcessInstanceWithResultRequest + +@typing.final +class CreateProcessInstanceWithResultResponse(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + PROCESSDEFINITIONKEY_FIELD_NUMBER: builtins.int + BPMNPROCESSID_FIELD_NUMBER: builtins.int + VERSION_FIELD_NUMBER: builtins.int + PROCESSINSTANCEKEY_FIELD_NUMBER: builtins.int + VARIABLES_FIELD_NUMBER: builtins.int + TENANTID_FIELD_NUMBER: builtins.int + processDefinitionKey: builtins.int + """the key of the process definition which was used to create the process instance""" + bpmnProcessId: builtins.str + """the BPMN process ID of the process definition which was used to create the process + instance + """ + version: builtins.int + """the version of the process definition which was used to create the process instance""" + processInstanceKey: builtins.int + """the unique identifier of the created process instance; to be used wherever a request + needs a process instance key (e.g. CancelProcessInstanceRequest) + """ + variables: builtins.str + """JSON document + consists of visible variables in the root scope + """ + tenantId: builtins.str + """the tenant identifier of the process definition""" + def __init__( + self, + *, + processDefinitionKey: builtins.int = ..., + bpmnProcessId: builtins.str = ..., + version: builtins.int = ..., + processInstanceKey: builtins.int = ..., + variables: builtins.str = ..., + tenantId: builtins.str = ..., + ) -> None: ... + def ClearField(self, field_name: typing.Literal["bpmnProcessId", b"bpmnProcessId", "processDefinitionKey", b"processDefinitionKey", "processInstanceKey", b"processInstanceKey", "tenantId", b"tenantId", "variables", b"variables", "version", b"version"]) -> None: ... + +global___CreateProcessInstanceWithResultResponse = CreateProcessInstanceWithResultResponse + +@typing.final +class EvaluateDecisionRequest(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + DECISIONKEY_FIELD_NUMBER: builtins.int + DECISIONID_FIELD_NUMBER: builtins.int + VARIABLES_FIELD_NUMBER: builtins.int + TENANTID_FIELD_NUMBER: builtins.int + decisionKey: builtins.int + """the unique key identifying the decision to be evaluated (e.g. returned + from a decision in the DeployResourceResponse message) + """ + decisionId: builtins.str + """the ID of the decision to be evaluated""" + variables: builtins.str + """JSON document that will instantiate the variables for the decision to be + evaluated; it must be a JSON object, as variables will be mapped in a + key-value fashion, e.g. { "a": 1, "b": 2 } will create two variables, + named "a" and "b" respectively, with their associated values. + [{ "a": 1, "b": 2 }] would not be a valid argument, as the root of the + JSON document is an array and not an object. + """ + tenantId: builtins.str + """the tenant identifier of the decision""" + def __init__( + self, + *, + decisionKey: builtins.int = ..., + decisionId: builtins.str = ..., + variables: builtins.str = ..., + tenantId: builtins.str = ..., + ) -> None: ... + def ClearField(self, field_name: typing.Literal["decisionId", b"decisionId", "decisionKey", b"decisionKey", "tenantId", b"tenantId", "variables", b"variables"]) -> None: ... + +global___EvaluateDecisionRequest = EvaluateDecisionRequest + +@typing.final +class EvaluateDecisionResponse(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + DECISIONKEY_FIELD_NUMBER: builtins.int + DECISIONID_FIELD_NUMBER: builtins.int + DECISIONNAME_FIELD_NUMBER: builtins.int + DECISIONVERSION_FIELD_NUMBER: builtins.int + DECISIONREQUIREMENTSID_FIELD_NUMBER: builtins.int + DECISIONREQUIREMENTSKEY_FIELD_NUMBER: builtins.int + DECISIONOUTPUT_FIELD_NUMBER: builtins.int + EVALUATEDDECISIONS_FIELD_NUMBER: builtins.int + FAILEDDECISIONID_FIELD_NUMBER: builtins.int + FAILUREMESSAGE_FIELD_NUMBER: builtins.int + TENANTID_FIELD_NUMBER: builtins.int + decisionKey: builtins.int + """the unique key identifying the decision which was evaluated (e.g. returned + from a decision in the DeployResourceResponse message) + """ + decisionId: builtins.str + """the ID of the decision which was evaluated""" + decisionName: builtins.str + """the name of the decision which was evaluated""" + decisionVersion: builtins.int + """the version of the decision which was evaluated""" + decisionRequirementsId: builtins.str + """the ID of the decision requirements graph that the decision which was + evaluated is part of. + """ + decisionRequirementsKey: builtins.int + """the unique key identifying the decision requirements graph that the + decision which was evaluated is part of. + """ + decisionOutput: builtins.str + """JSON document that will instantiate the result of the decision which was + evaluated; it will be a JSON object, as the result output will be mapped + in a key-value fashion, e.g. { "a": 1 }. + """ + failedDecisionId: builtins.str + """an optional string indicating the ID of the decision which + failed during evaluation + """ + failureMessage: builtins.str + """an optional message describing why the decision which was evaluated failed""" + tenantId: builtins.str + """the tenant identifier of the evaluated decision""" + @property + def evaluatedDecisions(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___EvaluatedDecision]: + """a list of decisions that were evaluated within the requested decision evaluation""" + + def __init__( + self, + *, + decisionKey: builtins.int = ..., + decisionId: builtins.str = ..., + decisionName: builtins.str = ..., + decisionVersion: builtins.int = ..., + decisionRequirementsId: builtins.str = ..., + decisionRequirementsKey: builtins.int = ..., + decisionOutput: builtins.str = ..., + evaluatedDecisions: collections.abc.Iterable[global___EvaluatedDecision] | None = ..., + failedDecisionId: builtins.str = ..., + failureMessage: builtins.str = ..., + tenantId: builtins.str = ..., + ) -> None: ... + def ClearField(self, field_name: typing.Literal["decisionId", b"decisionId", "decisionKey", b"decisionKey", "decisionName", b"decisionName", "decisionOutput", b"decisionOutput", "decisionRequirementsId", b"decisionRequirementsId", "decisionRequirementsKey", b"decisionRequirementsKey", "decisionVersion", b"decisionVersion", "evaluatedDecisions", b"evaluatedDecisions", "failedDecisionId", b"failedDecisionId", "failureMessage", b"failureMessage", "tenantId", b"tenantId"]) -> None: ... + +global___EvaluateDecisionResponse = EvaluateDecisionResponse + +@typing.final +class EvaluatedDecision(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + DECISIONKEY_FIELD_NUMBER: builtins.int + DECISIONID_FIELD_NUMBER: builtins.int + DECISIONNAME_FIELD_NUMBER: builtins.int + DECISIONVERSION_FIELD_NUMBER: builtins.int + DECISIONTYPE_FIELD_NUMBER: builtins.int + DECISIONOUTPUT_FIELD_NUMBER: builtins.int + MATCHEDRULES_FIELD_NUMBER: builtins.int + EVALUATEDINPUTS_FIELD_NUMBER: builtins.int + TENANTID_FIELD_NUMBER: builtins.int + decisionKey: builtins.int + """the unique key identifying the decision which was evaluated (e.g. returned + from a decision in the DeployResourceResponse message) + """ + decisionId: builtins.str + """the ID of the decision which was evaluated""" + decisionName: builtins.str + """the name of the decision which was evaluated""" + decisionVersion: builtins.int + """the version of the decision which was evaluated""" + decisionType: builtins.str + """the type of the decision which was evaluated""" + decisionOutput: builtins.str + """JSON document that will instantiate the result of the decision which was + evaluated; it will be a JSON object, as the result output will be mapped + in a key-value fashion, e.g. { "a": 1 }. + """ + tenantId: builtins.str + """the tenant identifier of the evaluated decision""" + @property + def matchedRules(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___MatchedDecisionRule]: + """the decision rules that matched within this decision evaluation""" + + @property + def evaluatedInputs(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___EvaluatedDecisionInput]: + """the decision inputs that were evaluated within this decision evaluation""" + + def __init__( + self, + *, + decisionKey: builtins.int = ..., + decisionId: builtins.str = ..., + decisionName: builtins.str = ..., + decisionVersion: builtins.int = ..., + decisionType: builtins.str = ..., + decisionOutput: builtins.str = ..., + matchedRules: collections.abc.Iterable[global___MatchedDecisionRule] | None = ..., + evaluatedInputs: collections.abc.Iterable[global___EvaluatedDecisionInput] | None = ..., + tenantId: builtins.str = ..., + ) -> None: ... + def ClearField(self, field_name: typing.Literal["decisionId", b"decisionId", "decisionKey", b"decisionKey", "decisionName", b"decisionName", "decisionOutput", b"decisionOutput", "decisionType", b"decisionType", "decisionVersion", b"decisionVersion", "evaluatedInputs", b"evaluatedInputs", "matchedRules", b"matchedRules", "tenantId", b"tenantId"]) -> None: ... + +global___EvaluatedDecision = EvaluatedDecision + +@typing.final +class EvaluatedDecisionInput(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + INPUTID_FIELD_NUMBER: builtins.int + INPUTNAME_FIELD_NUMBER: builtins.int + INPUTVALUE_FIELD_NUMBER: builtins.int + inputId: builtins.str + """the id of the evaluated decision input""" + inputName: builtins.str + """the name of the evaluated decision input""" + inputValue: builtins.str + """the value of the evaluated decision input""" + def __init__( + self, + *, + inputId: builtins.str = ..., + inputName: builtins.str = ..., + inputValue: builtins.str = ..., + ) -> None: ... + def ClearField(self, field_name: typing.Literal["inputId", b"inputId", "inputName", b"inputName", "inputValue", b"inputValue"]) -> None: ... + +global___EvaluatedDecisionInput = EvaluatedDecisionInput + +@typing.final +class EvaluatedDecisionOutput(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + OUTPUTID_FIELD_NUMBER: builtins.int + OUTPUTNAME_FIELD_NUMBER: builtins.int + OUTPUTVALUE_FIELD_NUMBER: builtins.int + outputId: builtins.str + """the id of the evaluated decision output""" + outputName: builtins.str + """the name of the evaluated decision output""" + outputValue: builtins.str + """the value of the evaluated decision output""" + def __init__( + self, + *, + outputId: builtins.str = ..., + outputName: builtins.str = ..., + outputValue: builtins.str = ..., + ) -> None: ... + def ClearField(self, field_name: typing.Literal["outputId", b"outputId", "outputName", b"outputName", "outputValue", b"outputValue"]) -> None: ... + +global___EvaluatedDecisionOutput = EvaluatedDecisionOutput + +@typing.final +class MatchedDecisionRule(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + RULEID_FIELD_NUMBER: builtins.int + RULEINDEX_FIELD_NUMBER: builtins.int + EVALUATEDOUTPUTS_FIELD_NUMBER: builtins.int + ruleId: builtins.str + """the id of the matched rule""" + ruleIndex: builtins.int + """the index of the matched rule""" + @property + def evaluatedOutputs(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___EvaluatedDecisionOutput]: + """the evaluated decision outputs""" + + def __init__( + self, + *, + ruleId: builtins.str = ..., + ruleIndex: builtins.int = ..., + evaluatedOutputs: collections.abc.Iterable[global___EvaluatedDecisionOutput] | None = ..., + ) -> None: ... + def ClearField(self, field_name: typing.Literal["evaluatedOutputs", b"evaluatedOutputs", "ruleId", b"ruleId", "ruleIndex", b"ruleIndex"]) -> None: ... + +global___MatchedDecisionRule = MatchedDecisionRule + +@typing.final +class DeployProcessRequest(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + PROCESSES_FIELD_NUMBER: builtins.int + @property + def processes(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___ProcessRequestObject]: + """List of process resources to deploy""" + + def __init__( + self, + *, + processes: collections.abc.Iterable[global___ProcessRequestObject] | None = ..., + ) -> None: ... + def ClearField(self, field_name: typing.Literal["processes", b"processes"]) -> None: ... + +global___DeployProcessRequest = DeployProcessRequest + +@typing.final +class ProcessRequestObject(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + NAME_FIELD_NUMBER: builtins.int + DEFINITION_FIELD_NUMBER: builtins.int + name: builtins.str + """the resource basename, e.g. myProcess.bpmn""" + definition: builtins.bytes + """the process definition as a UTF8-encoded string""" + def __init__( + self, + *, + name: builtins.str = ..., + definition: builtins.bytes = ..., + ) -> None: ... + def ClearField(self, field_name: typing.Literal["definition", b"definition", "name", b"name"]) -> None: ... + +global___ProcessRequestObject = ProcessRequestObject + +@typing.final +class DeployProcessResponse(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + KEY_FIELD_NUMBER: builtins.int + PROCESSES_FIELD_NUMBER: builtins.int + key: builtins.int + """the unique key identifying the deployment""" + @property + def processes(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___ProcessMetadata]: + """a list of deployed processes""" + + def __init__( + self, + *, + key: builtins.int = ..., + processes: collections.abc.Iterable[global___ProcessMetadata] | None = ..., + ) -> None: ... + def ClearField(self, field_name: typing.Literal["key", b"key", "processes", b"processes"]) -> None: ... + +global___DeployProcessResponse = DeployProcessResponse + +@typing.final +class DeployResourceRequest(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + RESOURCES_FIELD_NUMBER: builtins.int + TENANTID_FIELD_NUMBER: builtins.int + tenantId: builtins.str + """the tenant id of the resources to deploy""" + @property + def resources(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___Resource]: + """list of resources to deploy""" + + def __init__( + self, + *, + resources: collections.abc.Iterable[global___Resource] | None = ..., + tenantId: builtins.str = ..., + ) -> None: ... + def ClearField(self, field_name: typing.Literal["resources", b"resources", "tenantId", b"tenantId"]) -> None: ... + +global___DeployResourceRequest = DeployResourceRequest + +@typing.final +class Resource(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + NAME_FIELD_NUMBER: builtins.int + CONTENT_FIELD_NUMBER: builtins.int + name: builtins.str + """the resource name, e.g. myProcess.bpmn or myDecision.dmn""" + content: builtins.bytes + """the file content as a UTF8-encoded string""" + def __init__( + self, + *, + name: builtins.str = ..., + content: builtins.bytes = ..., + ) -> None: ... + def ClearField(self, field_name: typing.Literal["content", b"content", "name", b"name"]) -> None: ... + +global___Resource = Resource + +@typing.final +class DeployResourceResponse(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + KEY_FIELD_NUMBER: builtins.int + DEPLOYMENTS_FIELD_NUMBER: builtins.int + TENANTID_FIELD_NUMBER: builtins.int + key: builtins.int + """the unique key identifying the deployment""" + tenantId: builtins.str + """the tenant id of the deployed resources""" + @property + def deployments(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___Deployment]: + """a list of deployed resources, e.g. processes""" + + def __init__( + self, + *, + key: builtins.int = ..., + deployments: collections.abc.Iterable[global___Deployment] | None = ..., + tenantId: builtins.str = ..., + ) -> None: ... + def ClearField(self, field_name: typing.Literal["deployments", b"deployments", "key", b"key", "tenantId", b"tenantId"]) -> None: ... + +global___DeployResourceResponse = DeployResourceResponse + +@typing.final +class Deployment(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + PROCESS_FIELD_NUMBER: builtins.int + DECISION_FIELD_NUMBER: builtins.int + DECISIONREQUIREMENTS_FIELD_NUMBER: builtins.int + FORM_FIELD_NUMBER: builtins.int + @property + def process(self) -> global___ProcessMetadata: + """metadata of a deployed process""" + + @property + def decision(self) -> global___DecisionMetadata: + """metadata of a deployed decision""" + + @property + def decisionRequirements(self) -> global___DecisionRequirementsMetadata: + """metadata of a deployed decision requirements""" + + @property + def form(self) -> global___FormMetadata: + """metadata of a deployed form""" + + def __init__( + self, + *, + process: global___ProcessMetadata | None = ..., + decision: global___DecisionMetadata | None = ..., + decisionRequirements: global___DecisionRequirementsMetadata | None = ..., + form: global___FormMetadata | None = ..., + ) -> None: ... + def HasField(self, field_name: typing.Literal["Metadata", b"Metadata", "decision", b"decision", "decisionRequirements", b"decisionRequirements", "form", b"form", "process", b"process"]) -> builtins.bool: ... + def ClearField(self, field_name: typing.Literal["Metadata", b"Metadata", "decision", b"decision", "decisionRequirements", b"decisionRequirements", "form", b"form", "process", b"process"]) -> None: ... + def WhichOneof(self, oneof_group: typing.Literal["Metadata", b"Metadata"]) -> typing.Literal["process", "decision", "decisionRequirements", "form"] | None: ... + +global___Deployment = Deployment + +@typing.final +class ProcessMetadata(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + BPMNPROCESSID_FIELD_NUMBER: builtins.int + VERSION_FIELD_NUMBER: builtins.int + PROCESSDEFINITIONKEY_FIELD_NUMBER: builtins.int + RESOURCENAME_FIELD_NUMBER: builtins.int + TENANTID_FIELD_NUMBER: builtins.int + bpmnProcessId: builtins.str + """the bpmn process ID, as parsed during deployment; together with the version forms a + unique identifier for a specific process definition + """ + version: builtins.int + """the assigned process version""" + processDefinitionKey: builtins.int + """the assigned key, which acts as a unique identifier for this process""" + resourceName: builtins.str + """the resource name (see: ProcessRequestObject.name) from which this process was + parsed + """ + tenantId: builtins.str + """the tenant id of the deployed process""" + def __init__( + self, + *, + bpmnProcessId: builtins.str = ..., + version: builtins.int = ..., + processDefinitionKey: builtins.int = ..., + resourceName: builtins.str = ..., + tenantId: builtins.str = ..., + ) -> None: ... + def ClearField(self, field_name: typing.Literal["bpmnProcessId", b"bpmnProcessId", "processDefinitionKey", b"processDefinitionKey", "resourceName", b"resourceName", "tenantId", b"tenantId", "version", b"version"]) -> None: ... + +global___ProcessMetadata = ProcessMetadata + +@typing.final +class DecisionMetadata(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + DMNDECISIONID_FIELD_NUMBER: builtins.int + DMNDECISIONNAME_FIELD_NUMBER: builtins.int + VERSION_FIELD_NUMBER: builtins.int + DECISIONKEY_FIELD_NUMBER: builtins.int + DMNDECISIONREQUIREMENTSID_FIELD_NUMBER: builtins.int + DECISIONREQUIREMENTSKEY_FIELD_NUMBER: builtins.int + TENANTID_FIELD_NUMBER: builtins.int + dmnDecisionId: builtins.str + """the dmn decision ID, as parsed during deployment; together with the + versions forms a unique identifier for a specific decision + """ + dmnDecisionName: builtins.str + """the dmn name of the decision, as parsed during deployment""" + version: builtins.int + """the assigned decision version""" + decisionKey: builtins.int + """the assigned decision key, which acts as a unique identifier for this + decision + """ + dmnDecisionRequirementsId: builtins.str + """the dmn ID of the decision requirements graph that this decision is part + of, as parsed during deployment + """ + decisionRequirementsKey: builtins.int + """the assigned key of the decision requirements graph that this decision is + part of + """ + tenantId: builtins.str + """the tenant id of the deployed decision""" + def __init__( + self, + *, + dmnDecisionId: builtins.str = ..., + dmnDecisionName: builtins.str = ..., + version: builtins.int = ..., + decisionKey: builtins.int = ..., + dmnDecisionRequirementsId: builtins.str = ..., + decisionRequirementsKey: builtins.int = ..., + tenantId: builtins.str = ..., + ) -> None: ... + def ClearField(self, field_name: typing.Literal["decisionKey", b"decisionKey", "decisionRequirementsKey", b"decisionRequirementsKey", "dmnDecisionId", b"dmnDecisionId", "dmnDecisionName", b"dmnDecisionName", "dmnDecisionRequirementsId", b"dmnDecisionRequirementsId", "tenantId", b"tenantId", "version", b"version"]) -> None: ... + +global___DecisionMetadata = DecisionMetadata + +@typing.final +class DecisionRequirementsMetadata(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + DMNDECISIONREQUIREMENTSID_FIELD_NUMBER: builtins.int + DMNDECISIONREQUIREMENTSNAME_FIELD_NUMBER: builtins.int + VERSION_FIELD_NUMBER: builtins.int + DECISIONREQUIREMENTSKEY_FIELD_NUMBER: builtins.int + RESOURCENAME_FIELD_NUMBER: builtins.int + TENANTID_FIELD_NUMBER: builtins.int + dmnDecisionRequirementsId: builtins.str + """the dmn decision requirements ID, as parsed during deployment; together + with the versions forms a unique identifier for a specific decision + """ + dmnDecisionRequirementsName: builtins.str + """the dmn name of the decision requirements, as parsed during deployment""" + version: builtins.int + """the assigned decision requirements version""" + decisionRequirementsKey: builtins.int + """the assigned decision requirements key, which acts as a unique identifier + for this decision requirements + """ + resourceName: builtins.str + """the resource name (see: Resource.name) from which this decision + requirements was parsed + """ + tenantId: builtins.str + """the tenant id of the deployed decision requirements""" + def __init__( + self, + *, + dmnDecisionRequirementsId: builtins.str = ..., + dmnDecisionRequirementsName: builtins.str = ..., + version: builtins.int = ..., + decisionRequirementsKey: builtins.int = ..., + resourceName: builtins.str = ..., + tenantId: builtins.str = ..., + ) -> None: ... + def ClearField(self, field_name: typing.Literal["decisionRequirementsKey", b"decisionRequirementsKey", "dmnDecisionRequirementsId", b"dmnDecisionRequirementsId", "dmnDecisionRequirementsName", b"dmnDecisionRequirementsName", "resourceName", b"resourceName", "tenantId", b"tenantId", "version", b"version"]) -> None: ... + +global___DecisionRequirementsMetadata = DecisionRequirementsMetadata + +@typing.final +class FormMetadata(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + FORMID_FIELD_NUMBER: builtins.int + VERSION_FIELD_NUMBER: builtins.int + FORMKEY_FIELD_NUMBER: builtins.int + RESOURCENAME_FIELD_NUMBER: builtins.int + TENANTID_FIELD_NUMBER: builtins.int + formId: builtins.str + """the form ID, as parsed during deployment; together with the + versions forms a unique identifier for a specific form + """ + version: builtins.int + """the assigned form version""" + formKey: builtins.int + """the assigned key, which acts as a unique identifier for this form""" + resourceName: builtins.str + """the resource name""" + tenantId: builtins.str + """the tenant id of the deployed form""" + def __init__( + self, + *, + formId: builtins.str = ..., + version: builtins.int = ..., + formKey: builtins.int = ..., + resourceName: builtins.str = ..., + tenantId: builtins.str = ..., + ) -> None: ... + def ClearField(self, field_name: typing.Literal["formId", b"formId", "formKey", b"formKey", "resourceName", b"resourceName", "tenantId", b"tenantId", "version", b"version"]) -> None: ... + +global___FormMetadata = FormMetadata + +@typing.final +class FailJobRequest(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + JOBKEY_FIELD_NUMBER: builtins.int + RETRIES_FIELD_NUMBER: builtins.int + ERRORMESSAGE_FIELD_NUMBER: builtins.int + RETRYBACKOFF_FIELD_NUMBER: builtins.int + VARIABLES_FIELD_NUMBER: builtins.int + jobKey: builtins.int + """the unique job identifier, as obtained when activating the job""" + retries: builtins.int + """the amount of retries the job should have left""" + errorMessage: builtins.str + """an optional message describing why the job failed + this is particularly useful if a job runs out of retries and an incident is raised, + as it this message can help explain why an incident was raised + """ + retryBackOff: builtins.int + """the backoff timeout (in ms) for the next retry""" + variables: builtins.str + """JSON document that will instantiate the variables at the local scope of the + job's associated task; it must be a JSON object, as variables will be mapped in a + key-value fashion. e.g. { "a": 1, "b": 2 } will create two variables, named "a" and + "b" respectively, with their associated values. [{ "a": 1, "b": 2 }] would not be a + valid argument, as the root of the JSON document is an array and not an object. + """ + def __init__( + self, + *, + jobKey: builtins.int = ..., + retries: builtins.int = ..., + errorMessage: builtins.str = ..., + retryBackOff: builtins.int = ..., + variables: builtins.str = ..., + ) -> None: ... + def ClearField(self, field_name: typing.Literal["errorMessage", b"errorMessage", "jobKey", b"jobKey", "retries", b"retries", "retryBackOff", b"retryBackOff", "variables", b"variables"]) -> None: ... + +global___FailJobRequest = FailJobRequest + +@typing.final +class FailJobResponse(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + def __init__( + self, + ) -> None: ... + +global___FailJobResponse = FailJobResponse + +@typing.final +class ThrowErrorRequest(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + JOBKEY_FIELD_NUMBER: builtins.int + ERRORCODE_FIELD_NUMBER: builtins.int + ERRORMESSAGE_FIELD_NUMBER: builtins.int + VARIABLES_FIELD_NUMBER: builtins.int + jobKey: builtins.int + """the unique job identifier, as obtained when activating the job""" + errorCode: builtins.str + """the error code that will be matched with an error catch event""" + errorMessage: builtins.str + """an optional error message that provides additional context""" + variables: builtins.str + """JSON document that will instantiate the variables at the local scope of the + error catch event that catches the thrown error; it must be a JSON object, as variables will be mapped in a + key-value fashion. e.g. { "a": 1, "b": 2 } will create two variables, named "a" and + "b" respectively, with their associated values. [{ "a": 1, "b": 2 }] would not be a + valid argument, as the root of the JSON document is an array and not an object. + """ + def __init__( + self, + *, + jobKey: builtins.int = ..., + errorCode: builtins.str = ..., + errorMessage: builtins.str = ..., + variables: builtins.str = ..., + ) -> None: ... + def ClearField(self, field_name: typing.Literal["errorCode", b"errorCode", "errorMessage", b"errorMessage", "jobKey", b"jobKey", "variables", b"variables"]) -> None: ... + +global___ThrowErrorRequest = ThrowErrorRequest + +@typing.final +class ThrowErrorResponse(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + def __init__( + self, + ) -> None: ... + +global___ThrowErrorResponse = ThrowErrorResponse + +@typing.final +class PublishMessageRequest(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + NAME_FIELD_NUMBER: builtins.int + CORRELATIONKEY_FIELD_NUMBER: builtins.int + TIMETOLIVE_FIELD_NUMBER: builtins.int + MESSAGEID_FIELD_NUMBER: builtins.int + VARIABLES_FIELD_NUMBER: builtins.int + TENANTID_FIELD_NUMBER: builtins.int + name: builtins.str + """the name of the message""" + correlationKey: builtins.str + """the correlation key of the message""" + timeToLive: builtins.int + """how long the message should be buffered on the broker, in milliseconds""" + messageId: builtins.str + """the unique ID of the message; can be omitted. only useful to ensure only one message + with the given ID will ever be published (during its lifetime) + """ + variables: builtins.str + """the message variables as a JSON document; to be valid, the root of the document must be an + object, e.g. { "a": "foo" }. [ "foo" ] would not be valid. + """ + tenantId: builtins.str + """the tenant id of the message""" + def __init__( + self, + *, + name: builtins.str = ..., + correlationKey: builtins.str = ..., + timeToLive: builtins.int = ..., + messageId: builtins.str = ..., + variables: builtins.str = ..., + tenantId: builtins.str = ..., + ) -> None: ... + def ClearField(self, field_name: typing.Literal["correlationKey", b"correlationKey", "messageId", b"messageId", "name", b"name", "tenantId", b"tenantId", "timeToLive", b"timeToLive", "variables", b"variables"]) -> None: ... + +global___PublishMessageRequest = PublishMessageRequest + +@typing.final +class PublishMessageResponse(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + KEY_FIELD_NUMBER: builtins.int + TENANTID_FIELD_NUMBER: builtins.int + key: builtins.int + """the unique ID of the message that was published""" + tenantId: builtins.str + """the tenant id of the message""" + def __init__( + self, + *, + key: builtins.int = ..., + tenantId: builtins.str = ..., + ) -> None: ... + def ClearField(self, field_name: typing.Literal["key", b"key", "tenantId", b"tenantId"]) -> None: ... + +global___PublishMessageResponse = PublishMessageResponse + +@typing.final +class ResolveIncidentRequest(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + INCIDENTKEY_FIELD_NUMBER: builtins.int + incidentKey: builtins.int + """the unique ID of the incident to resolve""" + def __init__( + self, + *, + incidentKey: builtins.int = ..., + ) -> None: ... + def ClearField(self, field_name: typing.Literal["incidentKey", b"incidentKey"]) -> None: ... + +global___ResolveIncidentRequest = ResolveIncidentRequest + +@typing.final +class ResolveIncidentResponse(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + def __init__( + self, + ) -> None: ... + +global___ResolveIncidentResponse = ResolveIncidentResponse + +@typing.final +class TopologyRequest(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + def __init__( + self, + ) -> None: ... + +global___TopologyRequest = TopologyRequest + +@typing.final +class TopologyResponse(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + BROKERS_FIELD_NUMBER: builtins.int + CLUSTERSIZE_FIELD_NUMBER: builtins.int + PARTITIONSCOUNT_FIELD_NUMBER: builtins.int + REPLICATIONFACTOR_FIELD_NUMBER: builtins.int + GATEWAYVERSION_FIELD_NUMBER: builtins.int + clusterSize: builtins.int + """how many nodes are in the cluster""" + partitionsCount: builtins.int + """how many partitions are spread across the cluster""" + replicationFactor: builtins.int + """configured replication factor for this cluster""" + gatewayVersion: builtins.str + """gateway version""" + @property + def brokers(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___BrokerInfo]: + """list of brokers part of this cluster""" + + def __init__( + self, + *, + brokers: collections.abc.Iterable[global___BrokerInfo] | None = ..., + clusterSize: builtins.int = ..., + partitionsCount: builtins.int = ..., + replicationFactor: builtins.int = ..., + gatewayVersion: builtins.str = ..., + ) -> None: ... + def ClearField(self, field_name: typing.Literal["brokers", b"brokers", "clusterSize", b"clusterSize", "gatewayVersion", b"gatewayVersion", "partitionsCount", b"partitionsCount", "replicationFactor", b"replicationFactor"]) -> None: ... + +global___TopologyResponse = TopologyResponse + +@typing.final +class BrokerInfo(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + NODEID_FIELD_NUMBER: builtins.int + HOST_FIELD_NUMBER: builtins.int + PORT_FIELD_NUMBER: builtins.int + PARTITIONS_FIELD_NUMBER: builtins.int + VERSION_FIELD_NUMBER: builtins.int + nodeId: builtins.int + """unique (within a cluster) node ID for the broker""" + host: builtins.str + """hostname of the broker""" + port: builtins.int + """port for the broker""" + version: builtins.str + """broker version""" + @property + def partitions(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___Partition]: + """list of partitions managed or replicated on this broker""" + + def __init__( + self, + *, + nodeId: builtins.int = ..., + host: builtins.str = ..., + port: builtins.int = ..., + partitions: collections.abc.Iterable[global___Partition] | None = ..., + version: builtins.str = ..., + ) -> None: ... + def ClearField(self, field_name: typing.Literal["host", b"host", "nodeId", b"nodeId", "partitions", b"partitions", "port", b"port", "version", b"version"]) -> None: ... + +global___BrokerInfo = BrokerInfo + +@typing.final +class Partition(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + class _PartitionBrokerRole: + ValueType = typing.NewType("ValueType", builtins.int) + V: typing_extensions.TypeAlias = ValueType + + class _PartitionBrokerRoleEnumTypeWrapper(google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[Partition._PartitionBrokerRole.ValueType], builtins.type): + DESCRIPTOR: google.protobuf.descriptor.EnumDescriptor + LEADER: Partition._PartitionBrokerRole.ValueType # 0 + FOLLOWER: Partition._PartitionBrokerRole.ValueType # 1 + INACTIVE: Partition._PartitionBrokerRole.ValueType # 2 + + class PartitionBrokerRole(_PartitionBrokerRole, metaclass=_PartitionBrokerRoleEnumTypeWrapper): + """Describes the Raft role of the broker for a given partition""" + + LEADER: Partition.PartitionBrokerRole.ValueType # 0 + FOLLOWER: Partition.PartitionBrokerRole.ValueType # 1 + INACTIVE: Partition.PartitionBrokerRole.ValueType # 2 + + class _PartitionBrokerHealth: + ValueType = typing.NewType("ValueType", builtins.int) + V: typing_extensions.TypeAlias = ValueType + + class _PartitionBrokerHealthEnumTypeWrapper(google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[Partition._PartitionBrokerHealth.ValueType], builtins.type): + DESCRIPTOR: google.protobuf.descriptor.EnumDescriptor + HEALTHY: Partition._PartitionBrokerHealth.ValueType # 0 + UNHEALTHY: Partition._PartitionBrokerHealth.ValueType # 1 + DEAD: Partition._PartitionBrokerHealth.ValueType # 2 + + class PartitionBrokerHealth(_PartitionBrokerHealth, metaclass=_PartitionBrokerHealthEnumTypeWrapper): + """Describes the current health of the partition""" + + HEALTHY: Partition.PartitionBrokerHealth.ValueType # 0 + UNHEALTHY: Partition.PartitionBrokerHealth.ValueType # 1 + DEAD: Partition.PartitionBrokerHealth.ValueType # 2 + + PARTITIONID_FIELD_NUMBER: builtins.int + ROLE_FIELD_NUMBER: builtins.int + HEALTH_FIELD_NUMBER: builtins.int + partitionId: builtins.int + """the unique ID of this partition""" + role: global___Partition.PartitionBrokerRole.ValueType + """the role of the broker for this partition""" + health: global___Partition.PartitionBrokerHealth.ValueType + """the health of this partition""" + def __init__( + self, + *, + partitionId: builtins.int = ..., + role: global___Partition.PartitionBrokerRole.ValueType = ..., + health: global___Partition.PartitionBrokerHealth.ValueType = ..., + ) -> None: ... + def ClearField(self, field_name: typing.Literal["health", b"health", "partitionId", b"partitionId", "role", b"role"]) -> None: ... + +global___Partition = Partition + +@typing.final +class UpdateJobRetriesRequest(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + JOBKEY_FIELD_NUMBER: builtins.int + RETRIES_FIELD_NUMBER: builtins.int + jobKey: builtins.int + """the unique job identifier, as obtained through ActivateJobs""" + retries: builtins.int + """the new amount of retries for the job; must be positive""" + def __init__( + self, + *, + jobKey: builtins.int = ..., + retries: builtins.int = ..., + ) -> None: ... + def ClearField(self, field_name: typing.Literal["jobKey", b"jobKey", "retries", b"retries"]) -> None: ... + +global___UpdateJobRetriesRequest = UpdateJobRetriesRequest + +@typing.final +class UpdateJobRetriesResponse(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + def __init__( + self, + ) -> None: ... + +global___UpdateJobRetriesResponse = UpdateJobRetriesResponse + +@typing.final +class UpdateJobTimeoutRequest(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + JOBKEY_FIELD_NUMBER: builtins.int + TIMEOUT_FIELD_NUMBER: builtins.int + jobKey: builtins.int + """the unique job identifier, as obtained from ActivateJobsResponse""" + timeout: builtins.int + """the duration of the new timeout in ms, starting from the current moment""" + def __init__( + self, + *, + jobKey: builtins.int = ..., + timeout: builtins.int = ..., + ) -> None: ... + def ClearField(self, field_name: typing.Literal["jobKey", b"jobKey", "timeout", b"timeout"]) -> None: ... + +global___UpdateJobTimeoutRequest = UpdateJobTimeoutRequest + +@typing.final +class UpdateJobTimeoutResponse(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + def __init__( + self, + ) -> None: ... + +global___UpdateJobTimeoutResponse = UpdateJobTimeoutResponse + +@typing.final +class SetVariablesRequest(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + ELEMENTINSTANCEKEY_FIELD_NUMBER: builtins.int + VARIABLES_FIELD_NUMBER: builtins.int + LOCAL_FIELD_NUMBER: builtins.int + elementInstanceKey: builtins.int + """the unique identifier of a particular element; can be the process instance key (as + obtained during instance creation), or a given element, such as a service task (see + elementInstanceKey on the job message) + """ + variables: builtins.str + """a JSON serialized document describing variables as key value pairs; the root of the document + must be an object + """ + local: builtins.bool + """if true, the variables will be merged strictly into the local scope (as indicated by + elementInstanceKey); this means the variables is not propagated to upper scopes. + for example, let's say we have two scopes, '1' and '2', with each having effective variables as: + 1 => `{ "foo" : 2 }`, and 2 => `{ "bar" : 1 }`. if we send an update request with + elementInstanceKey = 2, variables `{ "foo" : 5 }`, and local is true, then scope 1 will + be unchanged, and scope 2 will now be `{ "bar" : 1, "foo" 5 }`. if local was false, however, + then scope 1 would be `{ "foo": 5 }`, and scope 2 would be `{ "bar" : 1 }`. + """ + def __init__( + self, + *, + elementInstanceKey: builtins.int = ..., + variables: builtins.str = ..., + local: builtins.bool = ..., + ) -> None: ... + def ClearField(self, field_name: typing.Literal["elementInstanceKey", b"elementInstanceKey", "local", b"local", "variables", b"variables"]) -> None: ... + +global___SetVariablesRequest = SetVariablesRequest + +@typing.final +class SetVariablesResponse(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + KEY_FIELD_NUMBER: builtins.int + key: builtins.int + """the unique key of the set variables command""" + def __init__( + self, + *, + key: builtins.int = ..., + ) -> None: ... + def ClearField(self, field_name: typing.Literal["key", b"key"]) -> None: ... + +global___SetVariablesResponse = SetVariablesResponse + +@typing.final +class ModifyProcessInstanceRequest(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + @typing.final + class ActivateInstruction(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + ELEMENTID_FIELD_NUMBER: builtins.int + ANCESTORELEMENTINSTANCEKEY_FIELD_NUMBER: builtins.int + VARIABLEINSTRUCTIONS_FIELD_NUMBER: builtins.int + elementId: builtins.str + """the id of the element that should be activated""" + ancestorElementInstanceKey: builtins.int + """the key of the ancestor scope the element instance should be created in; + set to -1 to create the new element instance within an existing element + instance of the flow scope + """ + @property + def variableInstructions(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___ModifyProcessInstanceRequest.VariableInstruction]: + """instructions describing which variables should be created""" + + def __init__( + self, + *, + elementId: builtins.str = ..., + ancestorElementInstanceKey: builtins.int = ..., + variableInstructions: collections.abc.Iterable[global___ModifyProcessInstanceRequest.VariableInstruction] | None = ..., + ) -> None: ... + def ClearField(self, field_name: typing.Literal["ancestorElementInstanceKey", b"ancestorElementInstanceKey", "elementId", b"elementId", "variableInstructions", b"variableInstructions"]) -> None: ... + + @typing.final + class VariableInstruction(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + VARIABLES_FIELD_NUMBER: builtins.int + SCOPEID_FIELD_NUMBER: builtins.int + variables: builtins.str + """JSON document that will instantiate the variables for the root variable scope of the + process instance; it must be a JSON object, as variables will be mapped in a + key-value fashion. e.g. { "a": 1, "b": 2 } will create two variables, named "a" and + "b" respectively, with their associated values. [{ "a": 1, "b": 2 }] would not be a + valid argument, as the root of the JSON document is an array and not an object. + """ + scopeId: builtins.str + """the id of the element in which scope the variables should be created; + leave empty to create the variables in the global scope of the process instance + """ + def __init__( + self, + *, + variables: builtins.str = ..., + scopeId: builtins.str = ..., + ) -> None: ... + def ClearField(self, field_name: typing.Literal["scopeId", b"scopeId", "variables", b"variables"]) -> None: ... + + @typing.final + class TerminateInstruction(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + ELEMENTINSTANCEKEY_FIELD_NUMBER: builtins.int + elementInstanceKey: builtins.int + """the id of the element that should be terminated""" + def __init__( + self, + *, + elementInstanceKey: builtins.int = ..., + ) -> None: ... + def ClearField(self, field_name: typing.Literal["elementInstanceKey", b"elementInstanceKey"]) -> None: ... + + PROCESSINSTANCEKEY_FIELD_NUMBER: builtins.int + ACTIVATEINSTRUCTIONS_FIELD_NUMBER: builtins.int + TERMINATEINSTRUCTIONS_FIELD_NUMBER: builtins.int + processInstanceKey: builtins.int + """the key of the process instance that should be modified""" + @property + def activateInstructions(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___ModifyProcessInstanceRequest.ActivateInstruction]: + """instructions describing which elements should be activated in which scopes, + and which variables should be created + """ + + @property + def terminateInstructions(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___ModifyProcessInstanceRequest.TerminateInstruction]: + """instructions describing which elements should be terminated""" + + def __init__( + self, + *, + processInstanceKey: builtins.int = ..., + activateInstructions: collections.abc.Iterable[global___ModifyProcessInstanceRequest.ActivateInstruction] | None = ..., + terminateInstructions: collections.abc.Iterable[global___ModifyProcessInstanceRequest.TerminateInstruction] | None = ..., + ) -> None: ... + def ClearField(self, field_name: typing.Literal["activateInstructions", b"activateInstructions", "processInstanceKey", b"processInstanceKey", "terminateInstructions", b"terminateInstructions"]) -> None: ... + +global___ModifyProcessInstanceRequest = ModifyProcessInstanceRequest + +@typing.final +class ModifyProcessInstanceResponse(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + def __init__( + self, + ) -> None: ... + +global___ModifyProcessInstanceResponse = ModifyProcessInstanceResponse + +@typing.final +class MigrateProcessInstanceRequest(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + @typing.final + class MigrationPlan(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + TARGETPROCESSDEFINITIONKEY_FIELD_NUMBER: builtins.int + MAPPINGINSTRUCTIONS_FIELD_NUMBER: builtins.int + targetProcessDefinitionKey: builtins.int + """the key of process definition to migrate the process instance to""" + @property + def mappingInstructions(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___MigrateProcessInstanceRequest.MappingInstruction]: + """the mapping instructions describe how to map elements from the source process definition to the target process definition""" + + def __init__( + self, + *, + targetProcessDefinitionKey: builtins.int = ..., + mappingInstructions: collections.abc.Iterable[global___MigrateProcessInstanceRequest.MappingInstruction] | None = ..., + ) -> None: ... + def ClearField(self, field_name: typing.Literal["mappingInstructions", b"mappingInstructions", "targetProcessDefinitionKey", b"targetProcessDefinitionKey"]) -> None: ... + + @typing.final + class MappingInstruction(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + SOURCEELEMENTID_FIELD_NUMBER: builtins.int + TARGETELEMENTID_FIELD_NUMBER: builtins.int + sourceElementId: builtins.str + """the element id to migrate from""" + targetElementId: builtins.str + """the element id to migrate into""" + def __init__( + self, + *, + sourceElementId: builtins.str = ..., + targetElementId: builtins.str = ..., + ) -> None: ... + def ClearField(self, field_name: typing.Literal["sourceElementId", b"sourceElementId", "targetElementId", b"targetElementId"]) -> None: ... + + PROCESSINSTANCEKEY_FIELD_NUMBER: builtins.int + MIGRATIONPLAN_FIELD_NUMBER: builtins.int + processInstanceKey: builtins.int + """key of the process instance to migrate""" + @property + def migrationPlan(self) -> global___MigrateProcessInstanceRequest.MigrationPlan: + """the migration plan that defines target process and element mappings""" + + def __init__( + self, + *, + processInstanceKey: builtins.int = ..., + migrationPlan: global___MigrateProcessInstanceRequest.MigrationPlan | None = ..., + ) -> None: ... + def HasField(self, field_name: typing.Literal["migrationPlan", b"migrationPlan"]) -> builtins.bool: ... + def ClearField(self, field_name: typing.Literal["migrationPlan", b"migrationPlan", "processInstanceKey", b"processInstanceKey"]) -> None: ... + +global___MigrateProcessInstanceRequest = MigrateProcessInstanceRequest + +@typing.final +class MigrateProcessInstanceResponse(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + def __init__( + self, + ) -> None: ... + +global___MigrateProcessInstanceResponse = MigrateProcessInstanceResponse + +@typing.final +class DeleteResourceRequest(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + RESOURCEKEY_FIELD_NUMBER: builtins.int + resourceKey: builtins.int + """The key of the resource that should be deleted. This can either be the key + of a process definition, the key of a decision requirements definition or the key of a form. + """ + def __init__( + self, + *, + resourceKey: builtins.int = ..., + ) -> None: ... + def ClearField(self, field_name: typing.Literal["resourceKey", b"resourceKey"]) -> None: ... + +global___DeleteResourceRequest = DeleteResourceRequest + +@typing.final +class DeleteResourceResponse(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + def __init__( + self, + ) -> None: ... + +global___DeleteResourceResponse = DeleteResourceResponse + +@typing.final +class BroadcastSignalRequest(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + SIGNALNAME_FIELD_NUMBER: builtins.int + VARIABLES_FIELD_NUMBER: builtins.int + TENANTID_FIELD_NUMBER: builtins.int + signalName: builtins.str + """The name of the signal""" + variables: builtins.str + """the signal variables as a JSON document; to be valid, the root of the document must be an + object, e.g. { "a": "foo" }. [ "foo" ] would not be valid. + """ + tenantId: builtins.str + """the id of the tenant that owns the signal.""" + def __init__( + self, + *, + signalName: builtins.str = ..., + variables: builtins.str = ..., + tenantId: builtins.str = ..., + ) -> None: ... + def ClearField(self, field_name: typing.Literal["signalName", b"signalName", "tenantId", b"tenantId", "variables", b"variables"]) -> None: ... + +global___BroadcastSignalRequest = BroadcastSignalRequest + +@typing.final +class BroadcastSignalResponse(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + KEY_FIELD_NUMBER: builtins.int + TENANTID_FIELD_NUMBER: builtins.int + key: builtins.int + """the unique ID of the signal that was broadcasted.""" + tenantId: builtins.str + """the tenant id of the signal that was broadcasted.""" + def __init__( + self, + *, + key: builtins.int = ..., + tenantId: builtins.str = ..., + ) -> None: ... + def ClearField(self, field_name: typing.Literal["key", b"key", "tenantId", b"tenantId"]) -> None: ... + +global___BroadcastSignalResponse = BroadcastSignalResponse diff --git a/pyzeebe/proto/gateway_pb2_grpc.py b/pyzeebe/proto/gateway_pb2_grpc.py new file mode 100644 index 00000000..6bfd2aa2 --- /dev/null +++ b/pyzeebe/proto/gateway_pb2_grpc.py @@ -0,0 +1,1178 @@ +# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT! +"""Client and server classes corresponding to protobuf-defined services.""" +import grpc +import warnings + +from pyzeebe.proto import gateway_pb2 as pyzeebe_dot_proto_dot_gateway__pb2 + +GRPC_GENERATED_VERSION = '1.68.1' +GRPC_VERSION = grpc.__version__ +_version_not_supported = False + +try: + from grpc._utilities import first_version_is_lower + _version_not_supported = first_version_is_lower(GRPC_VERSION, GRPC_GENERATED_VERSION) +except ImportError: + _version_not_supported = True + +if _version_not_supported: + raise RuntimeError( + f'The grpc package installed is at version {GRPC_VERSION},' + + f' but the generated code in pyzeebe/proto/gateway_pb2_grpc.py depends on' + + f' grpcio>={GRPC_GENERATED_VERSION}.' + + f' Please upgrade your grpc module to grpcio>={GRPC_GENERATED_VERSION}' + + f' or downgrade your generated code using grpcio-tools<={GRPC_VERSION}.' + ) + + +class GatewayStub(object): + """Missing associated documentation comment in .proto file.""" + + def __init__(self, channel): + """Constructor. + + Args: + channel: A grpc.Channel. + """ + self.ActivateJobs = channel.unary_stream( + '/gateway_protocol.Gateway/ActivateJobs', + request_serializer=pyzeebe_dot_proto_dot_gateway__pb2.ActivateJobsRequest.SerializeToString, + response_deserializer=pyzeebe_dot_proto_dot_gateway__pb2.ActivateJobsResponse.FromString, + _registered_method=True) + self.StreamActivatedJobs = channel.unary_stream( + '/gateway_protocol.Gateway/StreamActivatedJobs', + request_serializer=pyzeebe_dot_proto_dot_gateway__pb2.StreamActivatedJobsRequest.SerializeToString, + response_deserializer=pyzeebe_dot_proto_dot_gateway__pb2.ActivatedJob.FromString, + _registered_method=True) + self.CancelProcessInstance = channel.unary_unary( + '/gateway_protocol.Gateway/CancelProcessInstance', + request_serializer=pyzeebe_dot_proto_dot_gateway__pb2.CancelProcessInstanceRequest.SerializeToString, + response_deserializer=pyzeebe_dot_proto_dot_gateway__pb2.CancelProcessInstanceResponse.FromString, + _registered_method=True) + self.CompleteJob = channel.unary_unary( + '/gateway_protocol.Gateway/CompleteJob', + request_serializer=pyzeebe_dot_proto_dot_gateway__pb2.CompleteJobRequest.SerializeToString, + response_deserializer=pyzeebe_dot_proto_dot_gateway__pb2.CompleteJobResponse.FromString, + _registered_method=True) + self.CreateProcessInstance = channel.unary_unary( + '/gateway_protocol.Gateway/CreateProcessInstance', + request_serializer=pyzeebe_dot_proto_dot_gateway__pb2.CreateProcessInstanceRequest.SerializeToString, + response_deserializer=pyzeebe_dot_proto_dot_gateway__pb2.CreateProcessInstanceResponse.FromString, + _registered_method=True) + self.CreateProcessInstanceWithResult = channel.unary_unary( + '/gateway_protocol.Gateway/CreateProcessInstanceWithResult', + request_serializer=pyzeebe_dot_proto_dot_gateway__pb2.CreateProcessInstanceWithResultRequest.SerializeToString, + response_deserializer=pyzeebe_dot_proto_dot_gateway__pb2.CreateProcessInstanceWithResultResponse.FromString, + _registered_method=True) + self.EvaluateDecision = channel.unary_unary( + '/gateway_protocol.Gateway/EvaluateDecision', + request_serializer=pyzeebe_dot_proto_dot_gateway__pb2.EvaluateDecisionRequest.SerializeToString, + response_deserializer=pyzeebe_dot_proto_dot_gateway__pb2.EvaluateDecisionResponse.FromString, + _registered_method=True) + self.DeployProcess = channel.unary_unary( + '/gateway_protocol.Gateway/DeployProcess', + request_serializer=pyzeebe_dot_proto_dot_gateway__pb2.DeployProcessRequest.SerializeToString, + response_deserializer=pyzeebe_dot_proto_dot_gateway__pb2.DeployProcessResponse.FromString, + _registered_method=True) + self.DeployResource = channel.unary_unary( + '/gateway_protocol.Gateway/DeployResource', + request_serializer=pyzeebe_dot_proto_dot_gateway__pb2.DeployResourceRequest.SerializeToString, + response_deserializer=pyzeebe_dot_proto_dot_gateway__pb2.DeployResourceResponse.FromString, + _registered_method=True) + self.FailJob = channel.unary_unary( + '/gateway_protocol.Gateway/FailJob', + request_serializer=pyzeebe_dot_proto_dot_gateway__pb2.FailJobRequest.SerializeToString, + response_deserializer=pyzeebe_dot_proto_dot_gateway__pb2.FailJobResponse.FromString, + _registered_method=True) + self.ThrowError = channel.unary_unary( + '/gateway_protocol.Gateway/ThrowError', + request_serializer=pyzeebe_dot_proto_dot_gateway__pb2.ThrowErrorRequest.SerializeToString, + response_deserializer=pyzeebe_dot_proto_dot_gateway__pb2.ThrowErrorResponse.FromString, + _registered_method=True) + self.PublishMessage = channel.unary_unary( + '/gateway_protocol.Gateway/PublishMessage', + request_serializer=pyzeebe_dot_proto_dot_gateway__pb2.PublishMessageRequest.SerializeToString, + response_deserializer=pyzeebe_dot_proto_dot_gateway__pb2.PublishMessageResponse.FromString, + _registered_method=True) + self.ResolveIncident = channel.unary_unary( + '/gateway_protocol.Gateway/ResolveIncident', + request_serializer=pyzeebe_dot_proto_dot_gateway__pb2.ResolveIncidentRequest.SerializeToString, + response_deserializer=pyzeebe_dot_proto_dot_gateway__pb2.ResolveIncidentResponse.FromString, + _registered_method=True) + self.SetVariables = channel.unary_unary( + '/gateway_protocol.Gateway/SetVariables', + request_serializer=pyzeebe_dot_proto_dot_gateway__pb2.SetVariablesRequest.SerializeToString, + response_deserializer=pyzeebe_dot_proto_dot_gateway__pb2.SetVariablesResponse.FromString, + _registered_method=True) + self.Topology = channel.unary_unary( + '/gateway_protocol.Gateway/Topology', + request_serializer=pyzeebe_dot_proto_dot_gateway__pb2.TopologyRequest.SerializeToString, + response_deserializer=pyzeebe_dot_proto_dot_gateway__pb2.TopologyResponse.FromString, + _registered_method=True) + self.UpdateJobRetries = channel.unary_unary( + '/gateway_protocol.Gateway/UpdateJobRetries', + request_serializer=pyzeebe_dot_proto_dot_gateway__pb2.UpdateJobRetriesRequest.SerializeToString, + response_deserializer=pyzeebe_dot_proto_dot_gateway__pb2.UpdateJobRetriesResponse.FromString, + _registered_method=True) + self.ModifyProcessInstance = channel.unary_unary( + '/gateway_protocol.Gateway/ModifyProcessInstance', + request_serializer=pyzeebe_dot_proto_dot_gateway__pb2.ModifyProcessInstanceRequest.SerializeToString, + response_deserializer=pyzeebe_dot_proto_dot_gateway__pb2.ModifyProcessInstanceResponse.FromString, + _registered_method=True) + self.MigrateProcessInstance = channel.unary_unary( + '/gateway_protocol.Gateway/MigrateProcessInstance', + request_serializer=pyzeebe_dot_proto_dot_gateway__pb2.MigrateProcessInstanceRequest.SerializeToString, + response_deserializer=pyzeebe_dot_proto_dot_gateway__pb2.MigrateProcessInstanceResponse.FromString, + _registered_method=True) + self.UpdateJobTimeout = channel.unary_unary( + '/gateway_protocol.Gateway/UpdateJobTimeout', + request_serializer=pyzeebe_dot_proto_dot_gateway__pb2.UpdateJobTimeoutRequest.SerializeToString, + response_deserializer=pyzeebe_dot_proto_dot_gateway__pb2.UpdateJobTimeoutResponse.FromString, + _registered_method=True) + self.DeleteResource = channel.unary_unary( + '/gateway_protocol.Gateway/DeleteResource', + request_serializer=pyzeebe_dot_proto_dot_gateway__pb2.DeleteResourceRequest.SerializeToString, + response_deserializer=pyzeebe_dot_proto_dot_gateway__pb2.DeleteResourceResponse.FromString, + _registered_method=True) + self.BroadcastSignal = channel.unary_unary( + '/gateway_protocol.Gateway/BroadcastSignal', + request_serializer=pyzeebe_dot_proto_dot_gateway__pb2.BroadcastSignalRequest.SerializeToString, + response_deserializer=pyzeebe_dot_proto_dot_gateway__pb2.BroadcastSignalResponse.FromString, + _registered_method=True) + + +class GatewayServicer(object): + """Missing associated documentation comment in .proto file.""" + + def ActivateJobs(self, request, context): + """ + Iterates through all known partitions round-robin and activates up to the requested + maximum and streams them back to the client as they are activated. + + Errors: + INVALID_ARGUMENT: + - type is blank (empty string, null) + - worker is blank (empty string, null) + - timeout less than 1 + - maxJobsToActivate is less than 1 + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def StreamActivatedJobs(self, request, context): + """ + Registers client to a job stream that will stream jobs back to the client as + they become activatable. + + Errors: + INVALID_ARGUMENT: + - type is blank (empty string, null) + - timeout less than 1 + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def CancelProcessInstance(self, request, context): + """ + Cancels a running process instance + + Errors: + NOT_FOUND: + - no process instance exists with the given key + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def CompleteJob(self, request, context): + """ + Completes a job with the given variables, which allows completing the associated service task. + + Errors: + NOT_FOUND: + - no job exists with the given job key. Note that since jobs are removed once completed, + it could be that this job did exist at some point. + + FAILED_PRECONDITION: + - the job was marked as failed. In that case, the related incident must be resolved before + the job can be activated again and completed. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def CreateProcessInstance(self, request, context): + """ + Creates and starts an instance of the specified process. The process definition to use to + create the instance can be specified either using its unique key (as returned by + DeployProcess), or using the BPMN process ID and a version. Pass -1 as the version to use the + latest deployed version. Note that only processes with none start events can be started through + this command. + + Errors: + NOT_FOUND: + - no process with the given key exists (if processDefinitionKey was given) + - no process with the given process ID exists (if bpmnProcessId was given but version was -1) + - no process with the given process ID and version exists (if both bpmnProcessId and version were given) + + FAILED_PRECONDITION: + - the process definition does not contain a none start event; only processes with none + start event can be started manually. + + INVALID_ARGUMENT: + - the given variables argument is not a valid JSON document; it is expected to be a valid + JSON document where the root node is an object. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def CreateProcessInstanceWithResult(self, request, context): + """ + Behaves similarly to `rpc CreateProcessInstance`, except that a successful response is received when the process completes successfully. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def EvaluateDecision(self, request, context): + """ + Evaluates a decision. The decision to evaluate can be specified either by + using its unique key (as returned by DeployResource), or using the decision + ID. When using the decision ID, the latest deployed version of the decision + is used. + + Errors: + INVALID_ARGUMENT: + - no decision with the given key exists (if decisionKey was given) + - no decision with the given decision ID exists (if decisionId was given) + - both decision ID and decision KEY were provided, or are missing + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def DeployProcess(self, request, context): + """ + Deploys one or more processes to Zeebe. Note that this is an atomic call, + i.e. either all processes are deployed, or none of them are. + + Errors: + INVALID_ARGUMENT: + - no resources given. + - if at least one resource is invalid. A resource is considered invalid if: + - the resource data is not deserializable (e.g. detected as BPMN, but it's broken XML) + - the process is invalid (e.g. an event-based gateway has an outgoing sequence flow to a task) + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def DeployResource(self, request, context): + """ + Deploys one or more resources (e.g. processes or decision models) to Zeebe. + Note that this is an atomic call, i.e. either all resources are deployed, or none of them are. + + Errors: + PERMISSION_DENIED: + - if a deployment to an unauthorized tenant is performed + INVALID_ARGUMENT: + - no resources given. + - if at least one resource is invalid. A resource is considered invalid if: + - the content is not deserializable (e.g. detected as BPMN, but it's broken XML) + - the content is invalid (e.g. an event-based gateway has an outgoing sequence flow to a task) + - if multi-tenancy is enabled, and: + - a tenant id is not provided + - a tenant id with an invalid format is provided + - if multi-tenancy is disabled and a tenant id is provided + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def FailJob(self, request, context): + """ + Marks the job as failed; if the retries argument is positive, then the job will be immediately + activatable again, and a worker could try again to process it. If it is zero or negative however, + an incident will be raised, tagged with the given errorMessage, and the job will not be + activatable until the incident is resolved. + + Errors: + NOT_FOUND: + - no job was found with the given key + + FAILED_PRECONDITION: + - the job was not activated + - the job is already in a failed state, i.e. ran out of retries + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def ThrowError(self, request, context): + """ + Reports a business error (i.e. non-technical) that occurs while processing a job. The error is handled in the process by an error catch event. If there is no error catch event with the specified errorCode then an incident will be raised instead. + + Errors: + NOT_FOUND: + - no job was found with the given key + + FAILED_PRECONDITION: + - the job is not in an activated state + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def PublishMessage(self, request, context): + """ + Publishes a single message. Messages are published to specific partitions computed from their + correlation keys. + + Errors: + ALREADY_EXISTS: + - a message with the same ID was previously published (and is still alive) + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def ResolveIncident(self, request, context): + """ + Resolves a given incident. This simply marks the incident as resolved; most likely a call to + UpdateJobRetries or SetVariables will be necessary to actually resolve the + problem, following by this call. + + Errors: + NOT_FOUND: + - no incident with the given key exists + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def SetVariables(self, request, context): + """ + Updates all the variables of a particular scope (e.g. process instance, flow element instance) + from the given JSON document. + + Errors: + NOT_FOUND: + - no element with the given elementInstanceKey exists + INVALID_ARGUMENT: + - the given variables document is not a valid JSON document; valid documents are expected to + be JSON documents where the root node is an object. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def Topology(self, request, context): + """ + Obtains the current topology of the cluster the gateway is part of. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def UpdateJobRetries(self, request, context): + """ + Updates the number of retries a job has left. This is mostly useful for jobs that have run out of + retries, should the underlying problem be solved. + + Errors: + NOT_FOUND: + - no job exists with the given key + + INVALID_ARGUMENT: + - retries is not greater than 0 + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def ModifyProcessInstance(self, request, context): + """ + Modifies the process instance. This is done by activating and/or terminating specific elements of the instance. + + Errors: + NOT_FOUND: + - no process instance exists with the given key + + FAILED_PRECONDITION: + - trying to activate element inside of a multi-instance + + INVALID_ARGUMENT: + - activating or terminating unknown element + - ancestor of element for activation doesn't exist + - scope of variable is unknown + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def MigrateProcessInstance(self, request, context): + """ + Migrates the process instance to the specified process definition. + In simple terms, this is handled by updating the active element's process. + + Errors: + NOT_FOUND: + - no process instance exists with the given key, or it is not active + - no process definition exists with the given target definition key + - no process instance exists with the given key for the tenants the user is authorized to work with. + + FAILED_PRECONDITION: + - not all active elements in the given process instance are mapped to the elements in the target process definition + - a mapping instruction changes the type of an element or event + - a mapping instruction refers to an unsupported element (i.e. some elements will be supported later on) + - a mapping instruction refers to element in unsupported scenarios. + (i.e. migration is not supported when process instance or target process elements contains event subscriptions) + + INVALID_ARGUMENT: + - A `sourceElementId` does not refer to an element in the process instance's process definition + - A `targetElementId` does not refer to an element in the target process definition + - A `sourceElementId` is mapped by multiple mapping instructions. + For example, the engine cannot determine how to migrate a process instance when the instructions are: [A->B, A->C]. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def UpdateJobTimeout(self, request, context): + """ + Updates the deadline of a job using the timeout (in ms) provided. This can be used + for extending or shortening the job deadline. + + Errors: + NOT_FOUND: + - no job exists with the given key + + INVALID_STATE: + - no deadline exists for the given job key + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def DeleteResource(self, request, context): + """ + Deletes a resource from the state. Once a resource has been deleted it cannot + be recovered. If the resource needs to be available again, a new deployment + of the resource is required. + + Deleting a process will cancel any running instances of this process + definition. New instances of a deleted process are created using + the lastest version that hasn't been deleted. Creating a new + process instance is impossible when all versions have been + deleted. + + Deleting a decision requirement definitions could cause incidents in process + instances referencing these decisions in a business rule task. A decision + will be evaluated with the latest version that hasn't been deleted. If all + versions of a decision have been deleted the evaluation is rejected. + + Errors: + NOT_FOUND: + - No resource exists with the given key + + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def BroadcastSignal(self, request, context): + """ + Broadcasts a signal. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + +def add_GatewayServicer_to_server(servicer, server): + rpc_method_handlers = { + 'ActivateJobs': grpc.unary_stream_rpc_method_handler( + servicer.ActivateJobs, + request_deserializer=pyzeebe_dot_proto_dot_gateway__pb2.ActivateJobsRequest.FromString, + response_serializer=pyzeebe_dot_proto_dot_gateway__pb2.ActivateJobsResponse.SerializeToString, + ), + 'StreamActivatedJobs': grpc.unary_stream_rpc_method_handler( + servicer.StreamActivatedJobs, + request_deserializer=pyzeebe_dot_proto_dot_gateway__pb2.StreamActivatedJobsRequest.FromString, + response_serializer=pyzeebe_dot_proto_dot_gateway__pb2.ActivatedJob.SerializeToString, + ), + 'CancelProcessInstance': grpc.unary_unary_rpc_method_handler( + servicer.CancelProcessInstance, + request_deserializer=pyzeebe_dot_proto_dot_gateway__pb2.CancelProcessInstanceRequest.FromString, + response_serializer=pyzeebe_dot_proto_dot_gateway__pb2.CancelProcessInstanceResponse.SerializeToString, + ), + 'CompleteJob': grpc.unary_unary_rpc_method_handler( + servicer.CompleteJob, + request_deserializer=pyzeebe_dot_proto_dot_gateway__pb2.CompleteJobRequest.FromString, + response_serializer=pyzeebe_dot_proto_dot_gateway__pb2.CompleteJobResponse.SerializeToString, + ), + 'CreateProcessInstance': grpc.unary_unary_rpc_method_handler( + servicer.CreateProcessInstance, + request_deserializer=pyzeebe_dot_proto_dot_gateway__pb2.CreateProcessInstanceRequest.FromString, + response_serializer=pyzeebe_dot_proto_dot_gateway__pb2.CreateProcessInstanceResponse.SerializeToString, + ), + 'CreateProcessInstanceWithResult': grpc.unary_unary_rpc_method_handler( + servicer.CreateProcessInstanceWithResult, + request_deserializer=pyzeebe_dot_proto_dot_gateway__pb2.CreateProcessInstanceWithResultRequest.FromString, + response_serializer=pyzeebe_dot_proto_dot_gateway__pb2.CreateProcessInstanceWithResultResponse.SerializeToString, + ), + 'EvaluateDecision': grpc.unary_unary_rpc_method_handler( + servicer.EvaluateDecision, + request_deserializer=pyzeebe_dot_proto_dot_gateway__pb2.EvaluateDecisionRequest.FromString, + response_serializer=pyzeebe_dot_proto_dot_gateway__pb2.EvaluateDecisionResponse.SerializeToString, + ), + 'DeployProcess': grpc.unary_unary_rpc_method_handler( + servicer.DeployProcess, + request_deserializer=pyzeebe_dot_proto_dot_gateway__pb2.DeployProcessRequest.FromString, + response_serializer=pyzeebe_dot_proto_dot_gateway__pb2.DeployProcessResponse.SerializeToString, + ), + 'DeployResource': grpc.unary_unary_rpc_method_handler( + servicer.DeployResource, + request_deserializer=pyzeebe_dot_proto_dot_gateway__pb2.DeployResourceRequest.FromString, + response_serializer=pyzeebe_dot_proto_dot_gateway__pb2.DeployResourceResponse.SerializeToString, + ), + 'FailJob': grpc.unary_unary_rpc_method_handler( + servicer.FailJob, + request_deserializer=pyzeebe_dot_proto_dot_gateway__pb2.FailJobRequest.FromString, + response_serializer=pyzeebe_dot_proto_dot_gateway__pb2.FailJobResponse.SerializeToString, + ), + 'ThrowError': grpc.unary_unary_rpc_method_handler( + servicer.ThrowError, + request_deserializer=pyzeebe_dot_proto_dot_gateway__pb2.ThrowErrorRequest.FromString, + response_serializer=pyzeebe_dot_proto_dot_gateway__pb2.ThrowErrorResponse.SerializeToString, + ), + 'PublishMessage': grpc.unary_unary_rpc_method_handler( + servicer.PublishMessage, + request_deserializer=pyzeebe_dot_proto_dot_gateway__pb2.PublishMessageRequest.FromString, + response_serializer=pyzeebe_dot_proto_dot_gateway__pb2.PublishMessageResponse.SerializeToString, + ), + 'ResolveIncident': grpc.unary_unary_rpc_method_handler( + servicer.ResolveIncident, + request_deserializer=pyzeebe_dot_proto_dot_gateway__pb2.ResolveIncidentRequest.FromString, + response_serializer=pyzeebe_dot_proto_dot_gateway__pb2.ResolveIncidentResponse.SerializeToString, + ), + 'SetVariables': grpc.unary_unary_rpc_method_handler( + servicer.SetVariables, + request_deserializer=pyzeebe_dot_proto_dot_gateway__pb2.SetVariablesRequest.FromString, + response_serializer=pyzeebe_dot_proto_dot_gateway__pb2.SetVariablesResponse.SerializeToString, + ), + 'Topology': grpc.unary_unary_rpc_method_handler( + servicer.Topology, + request_deserializer=pyzeebe_dot_proto_dot_gateway__pb2.TopologyRequest.FromString, + response_serializer=pyzeebe_dot_proto_dot_gateway__pb2.TopologyResponse.SerializeToString, + ), + 'UpdateJobRetries': grpc.unary_unary_rpc_method_handler( + servicer.UpdateJobRetries, + request_deserializer=pyzeebe_dot_proto_dot_gateway__pb2.UpdateJobRetriesRequest.FromString, + response_serializer=pyzeebe_dot_proto_dot_gateway__pb2.UpdateJobRetriesResponse.SerializeToString, + ), + 'ModifyProcessInstance': grpc.unary_unary_rpc_method_handler( + servicer.ModifyProcessInstance, + request_deserializer=pyzeebe_dot_proto_dot_gateway__pb2.ModifyProcessInstanceRequest.FromString, + response_serializer=pyzeebe_dot_proto_dot_gateway__pb2.ModifyProcessInstanceResponse.SerializeToString, + ), + 'MigrateProcessInstance': grpc.unary_unary_rpc_method_handler( + servicer.MigrateProcessInstance, + request_deserializer=pyzeebe_dot_proto_dot_gateway__pb2.MigrateProcessInstanceRequest.FromString, + response_serializer=pyzeebe_dot_proto_dot_gateway__pb2.MigrateProcessInstanceResponse.SerializeToString, + ), + 'UpdateJobTimeout': grpc.unary_unary_rpc_method_handler( + servicer.UpdateJobTimeout, + request_deserializer=pyzeebe_dot_proto_dot_gateway__pb2.UpdateJobTimeoutRequest.FromString, + response_serializer=pyzeebe_dot_proto_dot_gateway__pb2.UpdateJobTimeoutResponse.SerializeToString, + ), + 'DeleteResource': grpc.unary_unary_rpc_method_handler( + servicer.DeleteResource, + request_deserializer=pyzeebe_dot_proto_dot_gateway__pb2.DeleteResourceRequest.FromString, + response_serializer=pyzeebe_dot_proto_dot_gateway__pb2.DeleteResourceResponse.SerializeToString, + ), + 'BroadcastSignal': grpc.unary_unary_rpc_method_handler( + servicer.BroadcastSignal, + request_deserializer=pyzeebe_dot_proto_dot_gateway__pb2.BroadcastSignalRequest.FromString, + response_serializer=pyzeebe_dot_proto_dot_gateway__pb2.BroadcastSignalResponse.SerializeToString, + ), + } + generic_handler = grpc.method_handlers_generic_handler( + 'gateway_protocol.Gateway', rpc_method_handlers) + server.add_generic_rpc_handlers((generic_handler,)) + server.add_registered_method_handlers('gateway_protocol.Gateway', rpc_method_handlers) + + + # This class is part of an EXPERIMENTAL API. +class Gateway(object): + """Missing associated documentation comment in .proto file.""" + + @staticmethod + def ActivateJobs(request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None): + return grpc.experimental.unary_stream( + request, + target, + '/gateway_protocol.Gateway/ActivateJobs', + pyzeebe_dot_proto_dot_gateway__pb2.ActivateJobsRequest.SerializeToString, + pyzeebe_dot_proto_dot_gateway__pb2.ActivateJobsResponse.FromString, + options, + channel_credentials, + insecure, + call_credentials, + compression, + wait_for_ready, + timeout, + metadata, + _registered_method=True) + + @staticmethod + def StreamActivatedJobs(request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None): + return grpc.experimental.unary_stream( + request, + target, + '/gateway_protocol.Gateway/StreamActivatedJobs', + pyzeebe_dot_proto_dot_gateway__pb2.StreamActivatedJobsRequest.SerializeToString, + pyzeebe_dot_proto_dot_gateway__pb2.ActivatedJob.FromString, + options, + channel_credentials, + insecure, + call_credentials, + compression, + wait_for_ready, + timeout, + metadata, + _registered_method=True) + + @staticmethod + def CancelProcessInstance(request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None): + return grpc.experimental.unary_unary( + request, + target, + '/gateway_protocol.Gateway/CancelProcessInstance', + pyzeebe_dot_proto_dot_gateway__pb2.CancelProcessInstanceRequest.SerializeToString, + pyzeebe_dot_proto_dot_gateway__pb2.CancelProcessInstanceResponse.FromString, + options, + channel_credentials, + insecure, + call_credentials, + compression, + wait_for_ready, + timeout, + metadata, + _registered_method=True) + + @staticmethod + def CompleteJob(request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None): + return grpc.experimental.unary_unary( + request, + target, + '/gateway_protocol.Gateway/CompleteJob', + pyzeebe_dot_proto_dot_gateway__pb2.CompleteJobRequest.SerializeToString, + pyzeebe_dot_proto_dot_gateway__pb2.CompleteJobResponse.FromString, + options, + channel_credentials, + insecure, + call_credentials, + compression, + wait_for_ready, + timeout, + metadata, + _registered_method=True) + + @staticmethod + def CreateProcessInstance(request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None): + return grpc.experimental.unary_unary( + request, + target, + '/gateway_protocol.Gateway/CreateProcessInstance', + pyzeebe_dot_proto_dot_gateway__pb2.CreateProcessInstanceRequest.SerializeToString, + pyzeebe_dot_proto_dot_gateway__pb2.CreateProcessInstanceResponse.FromString, + options, + channel_credentials, + insecure, + call_credentials, + compression, + wait_for_ready, + timeout, + metadata, + _registered_method=True) + + @staticmethod + def CreateProcessInstanceWithResult(request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None): + return grpc.experimental.unary_unary( + request, + target, + '/gateway_protocol.Gateway/CreateProcessInstanceWithResult', + pyzeebe_dot_proto_dot_gateway__pb2.CreateProcessInstanceWithResultRequest.SerializeToString, + pyzeebe_dot_proto_dot_gateway__pb2.CreateProcessInstanceWithResultResponse.FromString, + options, + channel_credentials, + insecure, + call_credentials, + compression, + wait_for_ready, + timeout, + metadata, + _registered_method=True) + + @staticmethod + def EvaluateDecision(request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None): + return grpc.experimental.unary_unary( + request, + target, + '/gateway_protocol.Gateway/EvaluateDecision', + pyzeebe_dot_proto_dot_gateway__pb2.EvaluateDecisionRequest.SerializeToString, + pyzeebe_dot_proto_dot_gateway__pb2.EvaluateDecisionResponse.FromString, + options, + channel_credentials, + insecure, + call_credentials, + compression, + wait_for_ready, + timeout, + metadata, + _registered_method=True) + + @staticmethod + def DeployProcess(request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None): + return grpc.experimental.unary_unary( + request, + target, + '/gateway_protocol.Gateway/DeployProcess', + pyzeebe_dot_proto_dot_gateway__pb2.DeployProcessRequest.SerializeToString, + pyzeebe_dot_proto_dot_gateway__pb2.DeployProcessResponse.FromString, + options, + channel_credentials, + insecure, + call_credentials, + compression, + wait_for_ready, + timeout, + metadata, + _registered_method=True) + + @staticmethod + def DeployResource(request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None): + return grpc.experimental.unary_unary( + request, + target, + '/gateway_protocol.Gateway/DeployResource', + pyzeebe_dot_proto_dot_gateway__pb2.DeployResourceRequest.SerializeToString, + pyzeebe_dot_proto_dot_gateway__pb2.DeployResourceResponse.FromString, + options, + channel_credentials, + insecure, + call_credentials, + compression, + wait_for_ready, + timeout, + metadata, + _registered_method=True) + + @staticmethod + def FailJob(request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None): + return grpc.experimental.unary_unary( + request, + target, + '/gateway_protocol.Gateway/FailJob', + pyzeebe_dot_proto_dot_gateway__pb2.FailJobRequest.SerializeToString, + pyzeebe_dot_proto_dot_gateway__pb2.FailJobResponse.FromString, + options, + channel_credentials, + insecure, + call_credentials, + compression, + wait_for_ready, + timeout, + metadata, + _registered_method=True) + + @staticmethod + def ThrowError(request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None): + return grpc.experimental.unary_unary( + request, + target, + '/gateway_protocol.Gateway/ThrowError', + pyzeebe_dot_proto_dot_gateway__pb2.ThrowErrorRequest.SerializeToString, + pyzeebe_dot_proto_dot_gateway__pb2.ThrowErrorResponse.FromString, + options, + channel_credentials, + insecure, + call_credentials, + compression, + wait_for_ready, + timeout, + metadata, + _registered_method=True) + + @staticmethod + def PublishMessage(request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None): + return grpc.experimental.unary_unary( + request, + target, + '/gateway_protocol.Gateway/PublishMessage', + pyzeebe_dot_proto_dot_gateway__pb2.PublishMessageRequest.SerializeToString, + pyzeebe_dot_proto_dot_gateway__pb2.PublishMessageResponse.FromString, + options, + channel_credentials, + insecure, + call_credentials, + compression, + wait_for_ready, + timeout, + metadata, + _registered_method=True) + + @staticmethod + def ResolveIncident(request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None): + return grpc.experimental.unary_unary( + request, + target, + '/gateway_protocol.Gateway/ResolveIncident', + pyzeebe_dot_proto_dot_gateway__pb2.ResolveIncidentRequest.SerializeToString, + pyzeebe_dot_proto_dot_gateway__pb2.ResolveIncidentResponse.FromString, + options, + channel_credentials, + insecure, + call_credentials, + compression, + wait_for_ready, + timeout, + metadata, + _registered_method=True) + + @staticmethod + def SetVariables(request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None): + return grpc.experimental.unary_unary( + request, + target, + '/gateway_protocol.Gateway/SetVariables', + pyzeebe_dot_proto_dot_gateway__pb2.SetVariablesRequest.SerializeToString, + pyzeebe_dot_proto_dot_gateway__pb2.SetVariablesResponse.FromString, + options, + channel_credentials, + insecure, + call_credentials, + compression, + wait_for_ready, + timeout, + metadata, + _registered_method=True) + + @staticmethod + def Topology(request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None): + return grpc.experimental.unary_unary( + request, + target, + '/gateway_protocol.Gateway/Topology', + pyzeebe_dot_proto_dot_gateway__pb2.TopologyRequest.SerializeToString, + pyzeebe_dot_proto_dot_gateway__pb2.TopologyResponse.FromString, + options, + channel_credentials, + insecure, + call_credentials, + compression, + wait_for_ready, + timeout, + metadata, + _registered_method=True) + + @staticmethod + def UpdateJobRetries(request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None): + return grpc.experimental.unary_unary( + request, + target, + '/gateway_protocol.Gateway/UpdateJobRetries', + pyzeebe_dot_proto_dot_gateway__pb2.UpdateJobRetriesRequest.SerializeToString, + pyzeebe_dot_proto_dot_gateway__pb2.UpdateJobRetriesResponse.FromString, + options, + channel_credentials, + insecure, + call_credentials, + compression, + wait_for_ready, + timeout, + metadata, + _registered_method=True) + + @staticmethod + def ModifyProcessInstance(request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None): + return grpc.experimental.unary_unary( + request, + target, + '/gateway_protocol.Gateway/ModifyProcessInstance', + pyzeebe_dot_proto_dot_gateway__pb2.ModifyProcessInstanceRequest.SerializeToString, + pyzeebe_dot_proto_dot_gateway__pb2.ModifyProcessInstanceResponse.FromString, + options, + channel_credentials, + insecure, + call_credentials, + compression, + wait_for_ready, + timeout, + metadata, + _registered_method=True) + + @staticmethod + def MigrateProcessInstance(request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None): + return grpc.experimental.unary_unary( + request, + target, + '/gateway_protocol.Gateway/MigrateProcessInstance', + pyzeebe_dot_proto_dot_gateway__pb2.MigrateProcessInstanceRequest.SerializeToString, + pyzeebe_dot_proto_dot_gateway__pb2.MigrateProcessInstanceResponse.FromString, + options, + channel_credentials, + insecure, + call_credentials, + compression, + wait_for_ready, + timeout, + metadata, + _registered_method=True) + + @staticmethod + def UpdateJobTimeout(request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None): + return grpc.experimental.unary_unary( + request, + target, + '/gateway_protocol.Gateway/UpdateJobTimeout', + pyzeebe_dot_proto_dot_gateway__pb2.UpdateJobTimeoutRequest.SerializeToString, + pyzeebe_dot_proto_dot_gateway__pb2.UpdateJobTimeoutResponse.FromString, + options, + channel_credentials, + insecure, + call_credentials, + compression, + wait_for_ready, + timeout, + metadata, + _registered_method=True) + + @staticmethod + def DeleteResource(request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None): + return grpc.experimental.unary_unary( + request, + target, + '/gateway_protocol.Gateway/DeleteResource', + pyzeebe_dot_proto_dot_gateway__pb2.DeleteResourceRequest.SerializeToString, + pyzeebe_dot_proto_dot_gateway__pb2.DeleteResourceResponse.FromString, + options, + channel_credentials, + insecure, + call_credentials, + compression, + wait_for_ready, + timeout, + metadata, + _registered_method=True) + + @staticmethod + def BroadcastSignal(request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None): + return grpc.experimental.unary_unary( + request, + target, + '/gateway_protocol.Gateway/BroadcastSignal', + pyzeebe_dot_proto_dot_gateway__pb2.BroadcastSignalRequest.SerializeToString, + pyzeebe_dot_proto_dot_gateway__pb2.BroadcastSignalResponse.FromString, + options, + channel_credentials, + insecure, + call_credentials, + compression, + wait_for_ready, + timeout, + metadata, + _registered_method=True) diff --git a/pyzeebe/proto/gateway_pb2_grpc.pyi b/pyzeebe/proto/gateway_pb2_grpc.pyi new file mode 100644 index 00000000..277c7961 --- /dev/null +++ b/pyzeebe/proto/gateway_pb2_grpc.pyi @@ -0,0 +1,1104 @@ +""" +@generated by mypy-protobuf. Do not edit manually! +isort:skip_file +""" + +import abc +import collections.abc +import grpc +import grpc.aio +import pyzeebe.proto.gateway_pb2 +import typing + +_T = typing.TypeVar("_T") + +class _MaybeAsyncIterator(collections.abc.AsyncIterator[_T], collections.abc.Iterator[_T], metaclass=abc.ABCMeta): ... + +class _ServicerContext(grpc.ServicerContext, grpc.aio.ServicerContext): # type: ignore[misc, type-arg] + ... + +class GatewayStub: + def __init__(self, channel: typing.Union[grpc.Channel, grpc.aio.Channel]) -> None: ... + ActivateJobs: grpc.UnaryStreamMultiCallable[ + pyzeebe.proto.gateway_pb2.ActivateJobsRequest, + pyzeebe.proto.gateway_pb2.ActivateJobsResponse, + ] + """ + Iterates through all known partitions round-robin and activates up to the requested + maximum and streams them back to the client as they are activated. + + Errors: + INVALID_ARGUMENT: + - type is blank (empty string, null) + - worker is blank (empty string, null) + - timeout less than 1 + - maxJobsToActivate is less than 1 + """ + + StreamActivatedJobs: grpc.UnaryStreamMultiCallable[ + pyzeebe.proto.gateway_pb2.StreamActivatedJobsRequest, + pyzeebe.proto.gateway_pb2.ActivatedJob, + ] + """ + Registers client to a job stream that will stream jobs back to the client as + they become activatable. + + Errors: + INVALID_ARGUMENT: + - type is blank (empty string, null) + - timeout less than 1 + """ + + CancelProcessInstance: grpc.UnaryUnaryMultiCallable[ + pyzeebe.proto.gateway_pb2.CancelProcessInstanceRequest, + pyzeebe.proto.gateway_pb2.CancelProcessInstanceResponse, + ] + """ + Cancels a running process instance + + Errors: + NOT_FOUND: + - no process instance exists with the given key + """ + + CompleteJob: grpc.UnaryUnaryMultiCallable[ + pyzeebe.proto.gateway_pb2.CompleteJobRequest, + pyzeebe.proto.gateway_pb2.CompleteJobResponse, + ] + """ + Completes a job with the given variables, which allows completing the associated service task. + + Errors: + NOT_FOUND: + - no job exists with the given job key. Note that since jobs are removed once completed, + it could be that this job did exist at some point. + + FAILED_PRECONDITION: + - the job was marked as failed. In that case, the related incident must be resolved before + the job can be activated again and completed. + """ + + CreateProcessInstance: grpc.UnaryUnaryMultiCallable[ + pyzeebe.proto.gateway_pb2.CreateProcessInstanceRequest, + pyzeebe.proto.gateway_pb2.CreateProcessInstanceResponse, + ] + """ + Creates and starts an instance of the specified process. The process definition to use to + create the instance can be specified either using its unique key (as returned by + DeployProcess), or using the BPMN process ID and a version. Pass -1 as the version to use the + latest deployed version. Note that only processes with none start events can be started through + this command. + + Errors: + NOT_FOUND: + - no process with the given key exists (if processDefinitionKey was given) + - no process with the given process ID exists (if bpmnProcessId was given but version was -1) + - no process with the given process ID and version exists (if both bpmnProcessId and version were given) + + FAILED_PRECONDITION: + - the process definition does not contain a none start event; only processes with none + start event can be started manually. + + INVALID_ARGUMENT: + - the given variables argument is not a valid JSON document; it is expected to be a valid + JSON document where the root node is an object. + """ + + CreateProcessInstanceWithResult: grpc.UnaryUnaryMultiCallable[ + pyzeebe.proto.gateway_pb2.CreateProcessInstanceWithResultRequest, + pyzeebe.proto.gateway_pb2.CreateProcessInstanceWithResultResponse, + ] + """ + Behaves similarly to `rpc CreateProcessInstance`, except that a successful response is received when the process completes successfully. + """ + + EvaluateDecision: grpc.UnaryUnaryMultiCallable[ + pyzeebe.proto.gateway_pb2.EvaluateDecisionRequest, + pyzeebe.proto.gateway_pb2.EvaluateDecisionResponse, + ] + """ + Evaluates a decision. The decision to evaluate can be specified either by + using its unique key (as returned by DeployResource), or using the decision + ID. When using the decision ID, the latest deployed version of the decision + is used. + + Errors: + INVALID_ARGUMENT: + - no decision with the given key exists (if decisionKey was given) + - no decision with the given decision ID exists (if decisionId was given) + - both decision ID and decision KEY were provided, or are missing + """ + + DeployProcess: grpc.UnaryUnaryMultiCallable[ + pyzeebe.proto.gateway_pb2.DeployProcessRequest, + pyzeebe.proto.gateway_pb2.DeployProcessResponse, + ] + """ + Deploys one or more processes to Zeebe. Note that this is an atomic call, + i.e. either all processes are deployed, or none of them are. + + Errors: + INVALID_ARGUMENT: + - no resources given. + - if at least one resource is invalid. A resource is considered invalid if: + - the resource data is not deserializable (e.g. detected as BPMN, but it's broken XML) + - the process is invalid (e.g. an event-based gateway has an outgoing sequence flow to a task) + """ + + DeployResource: grpc.UnaryUnaryMultiCallable[ + pyzeebe.proto.gateway_pb2.DeployResourceRequest, + pyzeebe.proto.gateway_pb2.DeployResourceResponse, + ] + """ + Deploys one or more resources (e.g. processes or decision models) to Zeebe. + Note that this is an atomic call, i.e. either all resources are deployed, or none of them are. + + Errors: + PERMISSION_DENIED: + - if a deployment to an unauthorized tenant is performed + INVALID_ARGUMENT: + - no resources given. + - if at least one resource is invalid. A resource is considered invalid if: + - the content is not deserializable (e.g. detected as BPMN, but it's broken XML) + - the content is invalid (e.g. an event-based gateway has an outgoing sequence flow to a task) + - if multi-tenancy is enabled, and: + - a tenant id is not provided + - a tenant id with an invalid format is provided + - if multi-tenancy is disabled and a tenant id is provided + """ + + FailJob: grpc.UnaryUnaryMultiCallable[ + pyzeebe.proto.gateway_pb2.FailJobRequest, + pyzeebe.proto.gateway_pb2.FailJobResponse, + ] + """ + Marks the job as failed; if the retries argument is positive, then the job will be immediately + activatable again, and a worker could try again to process it. If it is zero or negative however, + an incident will be raised, tagged with the given errorMessage, and the job will not be + activatable until the incident is resolved. + + Errors: + NOT_FOUND: + - no job was found with the given key + + FAILED_PRECONDITION: + - the job was not activated + - the job is already in a failed state, i.e. ran out of retries + """ + + ThrowError: grpc.UnaryUnaryMultiCallable[ + pyzeebe.proto.gateway_pb2.ThrowErrorRequest, + pyzeebe.proto.gateway_pb2.ThrowErrorResponse, + ] + """ + Reports a business error (i.e. non-technical) that occurs while processing a job. The error is handled in the process by an error catch event. If there is no error catch event with the specified errorCode then an incident will be raised instead. + + Errors: + NOT_FOUND: + - no job was found with the given key + + FAILED_PRECONDITION: + - the job is not in an activated state + """ + + PublishMessage: grpc.UnaryUnaryMultiCallable[ + pyzeebe.proto.gateway_pb2.PublishMessageRequest, + pyzeebe.proto.gateway_pb2.PublishMessageResponse, + ] + """ + Publishes a single message. Messages are published to specific partitions computed from their + correlation keys. + + Errors: + ALREADY_EXISTS: + - a message with the same ID was previously published (and is still alive) + """ + + ResolveIncident: grpc.UnaryUnaryMultiCallable[ + pyzeebe.proto.gateway_pb2.ResolveIncidentRequest, + pyzeebe.proto.gateway_pb2.ResolveIncidentResponse, + ] + """ + Resolves a given incident. This simply marks the incident as resolved; most likely a call to + UpdateJobRetries or SetVariables will be necessary to actually resolve the + problem, following by this call. + + Errors: + NOT_FOUND: + - no incident with the given key exists + """ + + SetVariables: grpc.UnaryUnaryMultiCallable[ + pyzeebe.proto.gateway_pb2.SetVariablesRequest, + pyzeebe.proto.gateway_pb2.SetVariablesResponse, + ] + """ + Updates all the variables of a particular scope (e.g. process instance, flow element instance) + from the given JSON document. + + Errors: + NOT_FOUND: + - no element with the given elementInstanceKey exists + INVALID_ARGUMENT: + - the given variables document is not a valid JSON document; valid documents are expected to + be JSON documents where the root node is an object. + """ + + Topology: grpc.UnaryUnaryMultiCallable[ + pyzeebe.proto.gateway_pb2.TopologyRequest, + pyzeebe.proto.gateway_pb2.TopologyResponse, + ] + """ + Obtains the current topology of the cluster the gateway is part of. + """ + + UpdateJobRetries: grpc.UnaryUnaryMultiCallable[ + pyzeebe.proto.gateway_pb2.UpdateJobRetriesRequest, + pyzeebe.proto.gateway_pb2.UpdateJobRetriesResponse, + ] + """ + Updates the number of retries a job has left. This is mostly useful for jobs that have run out of + retries, should the underlying problem be solved. + + Errors: + NOT_FOUND: + - no job exists with the given key + + INVALID_ARGUMENT: + - retries is not greater than 0 + """ + + ModifyProcessInstance: grpc.UnaryUnaryMultiCallable[ + pyzeebe.proto.gateway_pb2.ModifyProcessInstanceRequest, + pyzeebe.proto.gateway_pb2.ModifyProcessInstanceResponse, + ] + """ + Modifies the process instance. This is done by activating and/or terminating specific elements of the instance. + + Errors: + NOT_FOUND: + - no process instance exists with the given key + + FAILED_PRECONDITION: + - trying to activate element inside of a multi-instance + + INVALID_ARGUMENT: + - activating or terminating unknown element + - ancestor of element for activation doesn't exist + - scope of variable is unknown + """ + + MigrateProcessInstance: grpc.UnaryUnaryMultiCallable[ + pyzeebe.proto.gateway_pb2.MigrateProcessInstanceRequest, + pyzeebe.proto.gateway_pb2.MigrateProcessInstanceResponse, + ] + """ + Migrates the process instance to the specified process definition. + In simple terms, this is handled by updating the active element's process. + + Errors: + NOT_FOUND: + - no process instance exists with the given key, or it is not active + - no process definition exists with the given target definition key + - no process instance exists with the given key for the tenants the user is authorized to work with. + + FAILED_PRECONDITION: + - not all active elements in the given process instance are mapped to the elements in the target process definition + - a mapping instruction changes the type of an element or event + - a mapping instruction refers to an unsupported element (i.e. some elements will be supported later on) + - a mapping instruction refers to element in unsupported scenarios. + (i.e. migration is not supported when process instance or target process elements contains event subscriptions) + + INVALID_ARGUMENT: + - A `sourceElementId` does not refer to an element in the process instance's process definition + - A `targetElementId` does not refer to an element in the target process definition + - A `sourceElementId` is mapped by multiple mapping instructions. + For example, the engine cannot determine how to migrate a process instance when the instructions are: [A->B, A->C]. + """ + + UpdateJobTimeout: grpc.UnaryUnaryMultiCallable[ + pyzeebe.proto.gateway_pb2.UpdateJobTimeoutRequest, + pyzeebe.proto.gateway_pb2.UpdateJobTimeoutResponse, + ] + """ + Updates the deadline of a job using the timeout (in ms) provided. This can be used + for extending or shortening the job deadline. + + Errors: + NOT_FOUND: + - no job exists with the given key + + INVALID_STATE: + - no deadline exists for the given job key + """ + + DeleteResource: grpc.UnaryUnaryMultiCallable[ + pyzeebe.proto.gateway_pb2.DeleteResourceRequest, + pyzeebe.proto.gateway_pb2.DeleteResourceResponse, + ] + """ + Deletes a resource from the state. Once a resource has been deleted it cannot + be recovered. If the resource needs to be available again, a new deployment + of the resource is required. + + Deleting a process will cancel any running instances of this process + definition. New instances of a deleted process are created using + the lastest version that hasn't been deleted. Creating a new + process instance is impossible when all versions have been + deleted. + + Deleting a decision requirement definitions could cause incidents in process + instances referencing these decisions in a business rule task. A decision + will be evaluated with the latest version that hasn't been deleted. If all + versions of a decision have been deleted the evaluation is rejected. + + Errors: + NOT_FOUND: + - No resource exists with the given key + """ + + BroadcastSignal: grpc.UnaryUnaryMultiCallable[ + pyzeebe.proto.gateway_pb2.BroadcastSignalRequest, + pyzeebe.proto.gateway_pb2.BroadcastSignalResponse, + ] + """ + Broadcasts a signal. + """ + +class GatewayAsyncStub: + ActivateJobs: grpc.aio.UnaryStreamMultiCallable[ + pyzeebe.proto.gateway_pb2.ActivateJobsRequest, + pyzeebe.proto.gateway_pb2.ActivateJobsResponse, + ] + """ + Iterates through all known partitions round-robin and activates up to the requested + maximum and streams them back to the client as they are activated. + + Errors: + INVALID_ARGUMENT: + - type is blank (empty string, null) + - worker is blank (empty string, null) + - timeout less than 1 + - maxJobsToActivate is less than 1 + """ + + StreamActivatedJobs: grpc.aio.UnaryStreamMultiCallable[ + pyzeebe.proto.gateway_pb2.StreamActivatedJobsRequest, + pyzeebe.proto.gateway_pb2.ActivatedJob, + ] + """ + Registers client to a job stream that will stream jobs back to the client as + they become activatable. + + Errors: + INVALID_ARGUMENT: + - type is blank (empty string, null) + - timeout less than 1 + """ + + CancelProcessInstance: grpc.aio.UnaryUnaryMultiCallable[ + pyzeebe.proto.gateway_pb2.CancelProcessInstanceRequest, + pyzeebe.proto.gateway_pb2.CancelProcessInstanceResponse, + ] + """ + Cancels a running process instance + + Errors: + NOT_FOUND: + - no process instance exists with the given key + """ + + CompleteJob: grpc.aio.UnaryUnaryMultiCallable[ + pyzeebe.proto.gateway_pb2.CompleteJobRequest, + pyzeebe.proto.gateway_pb2.CompleteJobResponse, + ] + """ + Completes a job with the given variables, which allows completing the associated service task. + + Errors: + NOT_FOUND: + - no job exists with the given job key. Note that since jobs are removed once completed, + it could be that this job did exist at some point. + + FAILED_PRECONDITION: + - the job was marked as failed. In that case, the related incident must be resolved before + the job can be activated again and completed. + """ + + CreateProcessInstance: grpc.aio.UnaryUnaryMultiCallable[ + pyzeebe.proto.gateway_pb2.CreateProcessInstanceRequest, + pyzeebe.proto.gateway_pb2.CreateProcessInstanceResponse, + ] + """ + Creates and starts an instance of the specified process. The process definition to use to + create the instance can be specified either using its unique key (as returned by + DeployProcess), or using the BPMN process ID and a version. Pass -1 as the version to use the + latest deployed version. Note that only processes with none start events can be started through + this command. + + Errors: + NOT_FOUND: + - no process with the given key exists (if processDefinitionKey was given) + - no process with the given process ID exists (if bpmnProcessId was given but version was -1) + - no process with the given process ID and version exists (if both bpmnProcessId and version were given) + + FAILED_PRECONDITION: + - the process definition does not contain a none start event; only processes with none + start event can be started manually. + + INVALID_ARGUMENT: + - the given variables argument is not a valid JSON document; it is expected to be a valid + JSON document where the root node is an object. + """ + + CreateProcessInstanceWithResult: grpc.aio.UnaryUnaryMultiCallable[ + pyzeebe.proto.gateway_pb2.CreateProcessInstanceWithResultRequest, + pyzeebe.proto.gateway_pb2.CreateProcessInstanceWithResultResponse, + ] + """ + Behaves similarly to `rpc CreateProcessInstance`, except that a successful response is received when the process completes successfully. + """ + + EvaluateDecision: grpc.aio.UnaryUnaryMultiCallable[ + pyzeebe.proto.gateway_pb2.EvaluateDecisionRequest, + pyzeebe.proto.gateway_pb2.EvaluateDecisionResponse, + ] + """ + Evaluates a decision. The decision to evaluate can be specified either by + using its unique key (as returned by DeployResource), or using the decision + ID. When using the decision ID, the latest deployed version of the decision + is used. + + Errors: + INVALID_ARGUMENT: + - no decision with the given key exists (if decisionKey was given) + - no decision with the given decision ID exists (if decisionId was given) + - both decision ID and decision KEY were provided, or are missing + """ + + DeployProcess: grpc.aio.UnaryUnaryMultiCallable[ + pyzeebe.proto.gateway_pb2.DeployProcessRequest, + pyzeebe.proto.gateway_pb2.DeployProcessResponse, + ] + """ + Deploys one or more processes to Zeebe. Note that this is an atomic call, + i.e. either all processes are deployed, or none of them are. + + Errors: + INVALID_ARGUMENT: + - no resources given. + - if at least one resource is invalid. A resource is considered invalid if: + - the resource data is not deserializable (e.g. detected as BPMN, but it's broken XML) + - the process is invalid (e.g. an event-based gateway has an outgoing sequence flow to a task) + """ + + DeployResource: grpc.aio.UnaryUnaryMultiCallable[ + pyzeebe.proto.gateway_pb2.DeployResourceRequest, + pyzeebe.proto.gateway_pb2.DeployResourceResponse, + ] + """ + Deploys one or more resources (e.g. processes or decision models) to Zeebe. + Note that this is an atomic call, i.e. either all resources are deployed, or none of them are. + + Errors: + PERMISSION_DENIED: + - if a deployment to an unauthorized tenant is performed + INVALID_ARGUMENT: + - no resources given. + - if at least one resource is invalid. A resource is considered invalid if: + - the content is not deserializable (e.g. detected as BPMN, but it's broken XML) + - the content is invalid (e.g. an event-based gateway has an outgoing sequence flow to a task) + - if multi-tenancy is enabled, and: + - a tenant id is not provided + - a tenant id with an invalid format is provided + - if multi-tenancy is disabled and a tenant id is provided + """ + + FailJob: grpc.aio.UnaryUnaryMultiCallable[ + pyzeebe.proto.gateway_pb2.FailJobRequest, + pyzeebe.proto.gateway_pb2.FailJobResponse, + ] + """ + Marks the job as failed; if the retries argument is positive, then the job will be immediately + activatable again, and a worker could try again to process it. If it is zero or negative however, + an incident will be raised, tagged with the given errorMessage, and the job will not be + activatable until the incident is resolved. + + Errors: + NOT_FOUND: + - no job was found with the given key + + FAILED_PRECONDITION: + - the job was not activated + - the job is already in a failed state, i.e. ran out of retries + """ + + ThrowError: grpc.aio.UnaryUnaryMultiCallable[ + pyzeebe.proto.gateway_pb2.ThrowErrorRequest, + pyzeebe.proto.gateway_pb2.ThrowErrorResponse, + ] + """ + Reports a business error (i.e. non-technical) that occurs while processing a job. The error is handled in the process by an error catch event. If there is no error catch event with the specified errorCode then an incident will be raised instead. + + Errors: + NOT_FOUND: + - no job was found with the given key + + FAILED_PRECONDITION: + - the job is not in an activated state + """ + + PublishMessage: grpc.aio.UnaryUnaryMultiCallable[ + pyzeebe.proto.gateway_pb2.PublishMessageRequest, + pyzeebe.proto.gateway_pb2.PublishMessageResponse, + ] + """ + Publishes a single message. Messages are published to specific partitions computed from their + correlation keys. + + Errors: + ALREADY_EXISTS: + - a message with the same ID was previously published (and is still alive) + """ + + ResolveIncident: grpc.aio.UnaryUnaryMultiCallable[ + pyzeebe.proto.gateway_pb2.ResolveIncidentRequest, + pyzeebe.proto.gateway_pb2.ResolveIncidentResponse, + ] + """ + Resolves a given incident. This simply marks the incident as resolved; most likely a call to + UpdateJobRetries or SetVariables will be necessary to actually resolve the + problem, following by this call. + + Errors: + NOT_FOUND: + - no incident with the given key exists + """ + + SetVariables: grpc.aio.UnaryUnaryMultiCallable[ + pyzeebe.proto.gateway_pb2.SetVariablesRequest, + pyzeebe.proto.gateway_pb2.SetVariablesResponse, + ] + """ + Updates all the variables of a particular scope (e.g. process instance, flow element instance) + from the given JSON document. + + Errors: + NOT_FOUND: + - no element with the given elementInstanceKey exists + INVALID_ARGUMENT: + - the given variables document is not a valid JSON document; valid documents are expected to + be JSON documents where the root node is an object. + """ + + Topology: grpc.aio.UnaryUnaryMultiCallable[ + pyzeebe.proto.gateway_pb2.TopologyRequest, + pyzeebe.proto.gateway_pb2.TopologyResponse, + ] + """ + Obtains the current topology of the cluster the gateway is part of. + """ + + UpdateJobRetries: grpc.aio.UnaryUnaryMultiCallable[ + pyzeebe.proto.gateway_pb2.UpdateJobRetriesRequest, + pyzeebe.proto.gateway_pb2.UpdateJobRetriesResponse, + ] + """ + Updates the number of retries a job has left. This is mostly useful for jobs that have run out of + retries, should the underlying problem be solved. + + Errors: + NOT_FOUND: + - no job exists with the given key + + INVALID_ARGUMENT: + - retries is not greater than 0 + """ + + ModifyProcessInstance: grpc.aio.UnaryUnaryMultiCallable[ + pyzeebe.proto.gateway_pb2.ModifyProcessInstanceRequest, + pyzeebe.proto.gateway_pb2.ModifyProcessInstanceResponse, + ] + """ + Modifies the process instance. This is done by activating and/or terminating specific elements of the instance. + + Errors: + NOT_FOUND: + - no process instance exists with the given key + + FAILED_PRECONDITION: + - trying to activate element inside of a multi-instance + + INVALID_ARGUMENT: + - activating or terminating unknown element + - ancestor of element for activation doesn't exist + - scope of variable is unknown + """ + + MigrateProcessInstance: grpc.aio.UnaryUnaryMultiCallable[ + pyzeebe.proto.gateway_pb2.MigrateProcessInstanceRequest, + pyzeebe.proto.gateway_pb2.MigrateProcessInstanceResponse, + ] + """ + Migrates the process instance to the specified process definition. + In simple terms, this is handled by updating the active element's process. + + Errors: + NOT_FOUND: + - no process instance exists with the given key, or it is not active + - no process definition exists with the given target definition key + - no process instance exists with the given key for the tenants the user is authorized to work with. + + FAILED_PRECONDITION: + - not all active elements in the given process instance are mapped to the elements in the target process definition + - a mapping instruction changes the type of an element or event + - a mapping instruction refers to an unsupported element (i.e. some elements will be supported later on) + - a mapping instruction refers to element in unsupported scenarios. + (i.e. migration is not supported when process instance or target process elements contains event subscriptions) + + INVALID_ARGUMENT: + - A `sourceElementId` does not refer to an element in the process instance's process definition + - A `targetElementId` does not refer to an element in the target process definition + - A `sourceElementId` is mapped by multiple mapping instructions. + For example, the engine cannot determine how to migrate a process instance when the instructions are: [A->B, A->C]. + """ + + UpdateJobTimeout: grpc.aio.UnaryUnaryMultiCallable[ + pyzeebe.proto.gateway_pb2.UpdateJobTimeoutRequest, + pyzeebe.proto.gateway_pb2.UpdateJobTimeoutResponse, + ] + """ + Updates the deadline of a job using the timeout (in ms) provided. This can be used + for extending or shortening the job deadline. + + Errors: + NOT_FOUND: + - no job exists with the given key + + INVALID_STATE: + - no deadline exists for the given job key + """ + + DeleteResource: grpc.aio.UnaryUnaryMultiCallable[ + pyzeebe.proto.gateway_pb2.DeleteResourceRequest, + pyzeebe.proto.gateway_pb2.DeleteResourceResponse, + ] + """ + Deletes a resource from the state. Once a resource has been deleted it cannot + be recovered. If the resource needs to be available again, a new deployment + of the resource is required. + + Deleting a process will cancel any running instances of this process + definition. New instances of a deleted process are created using + the lastest version that hasn't been deleted. Creating a new + process instance is impossible when all versions have been + deleted. + + Deleting a decision requirement definitions could cause incidents in process + instances referencing these decisions in a business rule task. A decision + will be evaluated with the latest version that hasn't been deleted. If all + versions of a decision have been deleted the evaluation is rejected. + + Errors: + NOT_FOUND: + - No resource exists with the given key + """ + + BroadcastSignal: grpc.aio.UnaryUnaryMultiCallable[ + pyzeebe.proto.gateway_pb2.BroadcastSignalRequest, + pyzeebe.proto.gateway_pb2.BroadcastSignalResponse, + ] + """ + Broadcasts a signal. + """ + +class GatewayServicer(metaclass=abc.ABCMeta): + @abc.abstractmethod + def ActivateJobs( + self, + request: pyzeebe.proto.gateway_pb2.ActivateJobsRequest, + context: _ServicerContext, + ) -> typing.Union[collections.abc.Iterator[pyzeebe.proto.gateway_pb2.ActivateJobsResponse], collections.abc.AsyncIterator[pyzeebe.proto.gateway_pb2.ActivateJobsResponse]]: + """ + Iterates through all known partitions round-robin and activates up to the requested + maximum and streams them back to the client as they are activated. + + Errors: + INVALID_ARGUMENT: + - type is blank (empty string, null) + - worker is blank (empty string, null) + - timeout less than 1 + - maxJobsToActivate is less than 1 + """ + + @abc.abstractmethod + def StreamActivatedJobs( + self, + request: pyzeebe.proto.gateway_pb2.StreamActivatedJobsRequest, + context: _ServicerContext, + ) -> typing.Union[collections.abc.Iterator[pyzeebe.proto.gateway_pb2.ActivatedJob], collections.abc.AsyncIterator[pyzeebe.proto.gateway_pb2.ActivatedJob]]: + """ + Registers client to a job stream that will stream jobs back to the client as + they become activatable. + + Errors: + INVALID_ARGUMENT: + - type is blank (empty string, null) + - timeout less than 1 + """ + + @abc.abstractmethod + def CancelProcessInstance( + self, + request: pyzeebe.proto.gateway_pb2.CancelProcessInstanceRequest, + context: _ServicerContext, + ) -> typing.Union[pyzeebe.proto.gateway_pb2.CancelProcessInstanceResponse, collections.abc.Awaitable[pyzeebe.proto.gateway_pb2.CancelProcessInstanceResponse]]: + """ + Cancels a running process instance + + Errors: + NOT_FOUND: + - no process instance exists with the given key + """ + + @abc.abstractmethod + def CompleteJob( + self, + request: pyzeebe.proto.gateway_pb2.CompleteJobRequest, + context: _ServicerContext, + ) -> typing.Union[pyzeebe.proto.gateway_pb2.CompleteJobResponse, collections.abc.Awaitable[pyzeebe.proto.gateway_pb2.CompleteJobResponse]]: + """ + Completes a job with the given variables, which allows completing the associated service task. + + Errors: + NOT_FOUND: + - no job exists with the given job key. Note that since jobs are removed once completed, + it could be that this job did exist at some point. + + FAILED_PRECONDITION: + - the job was marked as failed. In that case, the related incident must be resolved before + the job can be activated again and completed. + """ + + @abc.abstractmethod + def CreateProcessInstance( + self, + request: pyzeebe.proto.gateway_pb2.CreateProcessInstanceRequest, + context: _ServicerContext, + ) -> typing.Union[pyzeebe.proto.gateway_pb2.CreateProcessInstanceResponse, collections.abc.Awaitable[pyzeebe.proto.gateway_pb2.CreateProcessInstanceResponse]]: + """ + Creates and starts an instance of the specified process. The process definition to use to + create the instance can be specified either using its unique key (as returned by + DeployProcess), or using the BPMN process ID and a version. Pass -1 as the version to use the + latest deployed version. Note that only processes with none start events can be started through + this command. + + Errors: + NOT_FOUND: + - no process with the given key exists (if processDefinitionKey was given) + - no process with the given process ID exists (if bpmnProcessId was given but version was -1) + - no process with the given process ID and version exists (if both bpmnProcessId and version were given) + + FAILED_PRECONDITION: + - the process definition does not contain a none start event; only processes with none + start event can be started manually. + + INVALID_ARGUMENT: + - the given variables argument is not a valid JSON document; it is expected to be a valid + JSON document where the root node is an object. + """ + + @abc.abstractmethod + def CreateProcessInstanceWithResult( + self, + request: pyzeebe.proto.gateway_pb2.CreateProcessInstanceWithResultRequest, + context: _ServicerContext, + ) -> typing.Union[pyzeebe.proto.gateway_pb2.CreateProcessInstanceWithResultResponse, collections.abc.Awaitable[pyzeebe.proto.gateway_pb2.CreateProcessInstanceWithResultResponse]]: + """ + Behaves similarly to `rpc CreateProcessInstance`, except that a successful response is received when the process completes successfully. + """ + + @abc.abstractmethod + def EvaluateDecision( + self, + request: pyzeebe.proto.gateway_pb2.EvaluateDecisionRequest, + context: _ServicerContext, + ) -> typing.Union[pyzeebe.proto.gateway_pb2.EvaluateDecisionResponse, collections.abc.Awaitable[pyzeebe.proto.gateway_pb2.EvaluateDecisionResponse]]: + """ + Evaluates a decision. The decision to evaluate can be specified either by + using its unique key (as returned by DeployResource), or using the decision + ID. When using the decision ID, the latest deployed version of the decision + is used. + + Errors: + INVALID_ARGUMENT: + - no decision with the given key exists (if decisionKey was given) + - no decision with the given decision ID exists (if decisionId was given) + - both decision ID and decision KEY were provided, or are missing + """ + + @abc.abstractmethod + def DeployProcess( + self, + request: pyzeebe.proto.gateway_pb2.DeployProcessRequest, + context: _ServicerContext, + ) -> typing.Union[pyzeebe.proto.gateway_pb2.DeployProcessResponse, collections.abc.Awaitable[pyzeebe.proto.gateway_pb2.DeployProcessResponse]]: + """ + Deploys one or more processes to Zeebe. Note that this is an atomic call, + i.e. either all processes are deployed, or none of them are. + + Errors: + INVALID_ARGUMENT: + - no resources given. + - if at least one resource is invalid. A resource is considered invalid if: + - the resource data is not deserializable (e.g. detected as BPMN, but it's broken XML) + - the process is invalid (e.g. an event-based gateway has an outgoing sequence flow to a task) + """ + + @abc.abstractmethod + def DeployResource( + self, + request: pyzeebe.proto.gateway_pb2.DeployResourceRequest, + context: _ServicerContext, + ) -> typing.Union[pyzeebe.proto.gateway_pb2.DeployResourceResponse, collections.abc.Awaitable[pyzeebe.proto.gateway_pb2.DeployResourceResponse]]: + """ + Deploys one or more resources (e.g. processes or decision models) to Zeebe. + Note that this is an atomic call, i.e. either all resources are deployed, or none of them are. + + Errors: + PERMISSION_DENIED: + - if a deployment to an unauthorized tenant is performed + INVALID_ARGUMENT: + - no resources given. + - if at least one resource is invalid. A resource is considered invalid if: + - the content is not deserializable (e.g. detected as BPMN, but it's broken XML) + - the content is invalid (e.g. an event-based gateway has an outgoing sequence flow to a task) + - if multi-tenancy is enabled, and: + - a tenant id is not provided + - a tenant id with an invalid format is provided + - if multi-tenancy is disabled and a tenant id is provided + """ + + @abc.abstractmethod + def FailJob( + self, + request: pyzeebe.proto.gateway_pb2.FailJobRequest, + context: _ServicerContext, + ) -> typing.Union[pyzeebe.proto.gateway_pb2.FailJobResponse, collections.abc.Awaitable[pyzeebe.proto.gateway_pb2.FailJobResponse]]: + """ + Marks the job as failed; if the retries argument is positive, then the job will be immediately + activatable again, and a worker could try again to process it. If it is zero or negative however, + an incident will be raised, tagged with the given errorMessage, and the job will not be + activatable until the incident is resolved. + + Errors: + NOT_FOUND: + - no job was found with the given key + + FAILED_PRECONDITION: + - the job was not activated + - the job is already in a failed state, i.e. ran out of retries + """ + + @abc.abstractmethod + def ThrowError( + self, + request: pyzeebe.proto.gateway_pb2.ThrowErrorRequest, + context: _ServicerContext, + ) -> typing.Union[pyzeebe.proto.gateway_pb2.ThrowErrorResponse, collections.abc.Awaitable[pyzeebe.proto.gateway_pb2.ThrowErrorResponse]]: + """ + Reports a business error (i.e. non-technical) that occurs while processing a job. The error is handled in the process by an error catch event. If there is no error catch event with the specified errorCode then an incident will be raised instead. + + Errors: + NOT_FOUND: + - no job was found with the given key + + FAILED_PRECONDITION: + - the job is not in an activated state + """ + + @abc.abstractmethod + def PublishMessage( + self, + request: pyzeebe.proto.gateway_pb2.PublishMessageRequest, + context: _ServicerContext, + ) -> typing.Union[pyzeebe.proto.gateway_pb2.PublishMessageResponse, collections.abc.Awaitable[pyzeebe.proto.gateway_pb2.PublishMessageResponse]]: + """ + Publishes a single message. Messages are published to specific partitions computed from their + correlation keys. + + Errors: + ALREADY_EXISTS: + - a message with the same ID was previously published (and is still alive) + """ + + @abc.abstractmethod + def ResolveIncident( + self, + request: pyzeebe.proto.gateway_pb2.ResolveIncidentRequest, + context: _ServicerContext, + ) -> typing.Union[pyzeebe.proto.gateway_pb2.ResolveIncidentResponse, collections.abc.Awaitable[pyzeebe.proto.gateway_pb2.ResolveIncidentResponse]]: + """ + Resolves a given incident. This simply marks the incident as resolved; most likely a call to + UpdateJobRetries or SetVariables will be necessary to actually resolve the + problem, following by this call. + + Errors: + NOT_FOUND: + - no incident with the given key exists + """ + + @abc.abstractmethod + def SetVariables( + self, + request: pyzeebe.proto.gateway_pb2.SetVariablesRequest, + context: _ServicerContext, + ) -> typing.Union[pyzeebe.proto.gateway_pb2.SetVariablesResponse, collections.abc.Awaitable[pyzeebe.proto.gateway_pb2.SetVariablesResponse]]: + """ + Updates all the variables of a particular scope (e.g. process instance, flow element instance) + from the given JSON document. + + Errors: + NOT_FOUND: + - no element with the given elementInstanceKey exists + INVALID_ARGUMENT: + - the given variables document is not a valid JSON document; valid documents are expected to + be JSON documents where the root node is an object. + """ + + @abc.abstractmethod + def Topology( + self, + request: pyzeebe.proto.gateway_pb2.TopologyRequest, + context: _ServicerContext, + ) -> typing.Union[pyzeebe.proto.gateway_pb2.TopologyResponse, collections.abc.Awaitable[pyzeebe.proto.gateway_pb2.TopologyResponse]]: + """ + Obtains the current topology of the cluster the gateway is part of. + """ + + @abc.abstractmethod + def UpdateJobRetries( + self, + request: pyzeebe.proto.gateway_pb2.UpdateJobRetriesRequest, + context: _ServicerContext, + ) -> typing.Union[pyzeebe.proto.gateway_pb2.UpdateJobRetriesResponse, collections.abc.Awaitable[pyzeebe.proto.gateway_pb2.UpdateJobRetriesResponse]]: + """ + Updates the number of retries a job has left. This is mostly useful for jobs that have run out of + retries, should the underlying problem be solved. + + Errors: + NOT_FOUND: + - no job exists with the given key + + INVALID_ARGUMENT: + - retries is not greater than 0 + """ + + @abc.abstractmethod + def ModifyProcessInstance( + self, + request: pyzeebe.proto.gateway_pb2.ModifyProcessInstanceRequest, + context: _ServicerContext, + ) -> typing.Union[pyzeebe.proto.gateway_pb2.ModifyProcessInstanceResponse, collections.abc.Awaitable[pyzeebe.proto.gateway_pb2.ModifyProcessInstanceResponse]]: + """ + Modifies the process instance. This is done by activating and/or terminating specific elements of the instance. + + Errors: + NOT_FOUND: + - no process instance exists with the given key + + FAILED_PRECONDITION: + - trying to activate element inside of a multi-instance + + INVALID_ARGUMENT: + - activating or terminating unknown element + - ancestor of element for activation doesn't exist + - scope of variable is unknown + """ + + @abc.abstractmethod + def MigrateProcessInstance( + self, + request: pyzeebe.proto.gateway_pb2.MigrateProcessInstanceRequest, + context: _ServicerContext, + ) -> typing.Union[pyzeebe.proto.gateway_pb2.MigrateProcessInstanceResponse, collections.abc.Awaitable[pyzeebe.proto.gateway_pb2.MigrateProcessInstanceResponse]]: + """ + Migrates the process instance to the specified process definition. + In simple terms, this is handled by updating the active element's process. + + Errors: + NOT_FOUND: + - no process instance exists with the given key, or it is not active + - no process definition exists with the given target definition key + - no process instance exists with the given key for the tenants the user is authorized to work with. + + FAILED_PRECONDITION: + - not all active elements in the given process instance are mapped to the elements in the target process definition + - a mapping instruction changes the type of an element or event + - a mapping instruction refers to an unsupported element (i.e. some elements will be supported later on) + - a mapping instruction refers to element in unsupported scenarios. + (i.e. migration is not supported when process instance or target process elements contains event subscriptions) + + INVALID_ARGUMENT: + - A `sourceElementId` does not refer to an element in the process instance's process definition + - A `targetElementId` does not refer to an element in the target process definition + - A `sourceElementId` is mapped by multiple mapping instructions. + For example, the engine cannot determine how to migrate a process instance when the instructions are: [A->B, A->C]. + """ + + @abc.abstractmethod + def UpdateJobTimeout( + self, + request: pyzeebe.proto.gateway_pb2.UpdateJobTimeoutRequest, + context: _ServicerContext, + ) -> typing.Union[pyzeebe.proto.gateway_pb2.UpdateJobTimeoutResponse, collections.abc.Awaitable[pyzeebe.proto.gateway_pb2.UpdateJobTimeoutResponse]]: + """ + Updates the deadline of a job using the timeout (in ms) provided. This can be used + for extending or shortening the job deadline. + + Errors: + NOT_FOUND: + - no job exists with the given key + + INVALID_STATE: + - no deadline exists for the given job key + """ + + @abc.abstractmethod + def DeleteResource( + self, + request: pyzeebe.proto.gateway_pb2.DeleteResourceRequest, + context: _ServicerContext, + ) -> typing.Union[pyzeebe.proto.gateway_pb2.DeleteResourceResponse, collections.abc.Awaitable[pyzeebe.proto.gateway_pb2.DeleteResourceResponse]]: + """ + Deletes a resource from the state. Once a resource has been deleted it cannot + be recovered. If the resource needs to be available again, a new deployment + of the resource is required. + + Deleting a process will cancel any running instances of this process + definition. New instances of a deleted process are created using + the lastest version that hasn't been deleted. Creating a new + process instance is impossible when all versions have been + deleted. + + Deleting a decision requirement definitions could cause incidents in process + instances referencing these decisions in a business rule task. A decision + will be evaluated with the latest version that hasn't been deleted. If all + versions of a decision have been deleted the evaluation is rejected. + + Errors: + NOT_FOUND: + - No resource exists with the given key + """ + + @abc.abstractmethod + def BroadcastSignal( + self, + request: pyzeebe.proto.gateway_pb2.BroadcastSignalRequest, + context: _ServicerContext, + ) -> typing.Union[pyzeebe.proto.gateway_pb2.BroadcastSignalResponse, collections.abc.Awaitable[pyzeebe.proto.gateway_pb2.BroadcastSignalResponse]]: + """ + Broadcasts a signal. + """ + +def add_GatewayServicer_to_server(servicer: GatewayServicer, server: typing.Union[grpc.Server, grpc.aio.Server]) -> None: ... diff --git a/tests/unit/conftest.py b/tests/unit/conftest.py index 619161d1..b08c04c7 100644 --- a/tests/unit/conftest.py +++ b/tests/unit/conftest.py @@ -131,7 +131,7 @@ async def simple_exception_handler(e: Exception, job: Job, job_controller: JobCo @pytest.fixture(scope="module") def grpc_add_to_server(): - from zeebe_grpc.gateway_pb2_grpc import add_GatewayServicer_to_server + from pyzeebe.proto.gateway_pb2_grpc import add_GatewayServicer_to_server return add_GatewayServicer_to_server @@ -143,7 +143,7 @@ def grpc_servicer(): @pytest.fixture(scope="module") def grpc_stub_cls(grpc_channel): - from zeebe_grpc.gateway_pb2_grpc import GatewayStub + from pyzeebe.proto.gateway_pb2_grpc import GatewayStub return GatewayStub diff --git a/tests/unit/utils/gateway_mock.py b/tests/unit/utils/gateway_mock.py index 8f217917..ed76d092 100644 --- a/tests/unit/utils/gateway_mock.py +++ b/tests/unit/utils/gateway_mock.py @@ -4,7 +4,10 @@ from uuid import uuid4 import grpc -from zeebe_grpc.gateway_pb2 import ( + +from pyzeebe.job.job import Job +from pyzeebe.job.job_status import JobStatus +from pyzeebe.proto.gateway_pb2 import ( ActivatedJob, ActivateJobsResponse, CancelProcessInstanceResponse, @@ -19,10 +22,7 @@ ProcessMetadata, PublishMessageResponse, ) -from zeebe_grpc.gateway_pb2_grpc import GatewayServicer - -from pyzeebe.job.job import Job -from pyzeebe.job.job_status import JobStatus +from pyzeebe.proto.gateway_pb2_grpc import GatewayServicer from pyzeebe.task.task import Task from tests.unit.utils.random_utils import RANDOM_RANGE, random_job diff --git a/update_proto.py b/update_proto.py new file mode 100644 index 00000000..193bac18 --- /dev/null +++ b/update_proto.py @@ -0,0 +1,36 @@ +import os +import pathlib + +import requests +from grpc_tools.protoc import main as grpc_tools_protoc_main + +zeebe_proto_version = "8.4.13" + + +def generate_proto(): + proto_dir = pathlib.Path("pyzeebe/proto") + proto_file = proto_dir / "gateway.proto" + for path in proto_dir.glob("*pb2*"): + os.remove(path) + + proto_url = f"https://raw.githubusercontent.com/camunda/camunda/refs/tags/{zeebe_proto_version}/gateway-protocol/src/main/proto/gateway.proto" + proto_content = requests.get(proto_url, allow_redirects=True) + with proto_file.open("wb") as tmpfile: + tmpfile.write(proto_content.content) + + grpc_tools_protoc_main( + [ + "--proto_path=.", + "--python_out=.", + "--mypy_out=.", + "--grpc_python_out=.", + "--mypy_grpc_out=.", + os.path.relpath(tmpfile.name), + ] + ) + + proto_file.unlink() + + +if __name__ == "__main__": + generate_proto() From 18c14746330f79ec95cc8838ad7eac884be99ef4 Mon Sep 17 00:00:00 2001 From: Dmitriy Date: Fri, 27 Dec 2024 17:28:52 +0500 Subject: [PATCH 2/9] chore: bump zeebe proto to 8.6.6 --- pyzeebe/proto/gateway_pb2.py | 240 ++++++++++++++--------------- pyzeebe/proto/gateway_pb2.pyi | 79 ++++++++-- pyzeebe/proto/gateway_pb2_grpc.py | 9 ++ pyzeebe/proto/gateway_pb2_grpc.pyi | 27 ++++ update_proto.py | 4 +- 5 files changed, 226 insertions(+), 133 deletions(-) diff --git a/pyzeebe/proto/gateway_pb2.py b/pyzeebe/proto/gateway_pb2.py index e6f139fb..5c801a43 100644 --- a/pyzeebe/proto/gateway_pb2.py +++ b/pyzeebe/proto/gateway_pb2.py @@ -24,7 +24,7 @@ -DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x1bpyzeebe/proto/gateway.proto\x12\x10gateway_protocol\"u\n\x1aStreamActivatedJobsRequest\x12\x0c\n\x04type\x18\x01 \x01(\t\x12\x0e\n\x06worker\x18\x02 \x01(\t\x12\x0f\n\x07timeout\x18\x03 \x01(\x03\x12\x15\n\rfetchVariable\x18\x05 \x03(\t\x12\x11\n\ttenantIds\x18\x06 \x03(\t\"\xa1\x01\n\x13\x41\x63tivateJobsRequest\x12\x0c\n\x04type\x18\x01 \x01(\t\x12\x0e\n\x06worker\x18\x02 \x01(\t\x12\x0f\n\x07timeout\x18\x03 \x01(\x03\x12\x19\n\x11maxJobsToActivate\x18\x04 \x01(\x05\x12\x15\n\rfetchVariable\x18\x05 \x03(\t\x12\x16\n\x0erequestTimeout\x18\x06 \x01(\x03\x12\x11\n\ttenantIds\x18\x07 \x03(\t\"D\n\x14\x41\x63tivateJobsResponse\x12,\n\x04jobs\x18\x01 \x03(\x0b\x32\x1e.gateway_protocol.ActivatedJob\"\xba\x02\n\x0c\x41\x63tivatedJob\x12\x0b\n\x03key\x18\x01 \x01(\x03\x12\x0c\n\x04type\x18\x02 \x01(\t\x12\x1a\n\x12processInstanceKey\x18\x03 \x01(\x03\x12\x15\n\rbpmnProcessId\x18\x04 \x01(\t\x12 \n\x18processDefinitionVersion\x18\x05 \x01(\x05\x12\x1c\n\x14processDefinitionKey\x18\x06 \x01(\x03\x12\x11\n\telementId\x18\x07 \x01(\t\x12\x1a\n\x12\x65lementInstanceKey\x18\x08 \x01(\x03\x12\x15\n\rcustomHeaders\x18\t \x01(\t\x12\x0e\n\x06worker\x18\n \x01(\t\x12\x0f\n\x07retries\x18\x0b \x01(\x05\x12\x10\n\x08\x64\x65\x61\x64line\x18\x0c \x01(\x03\x12\x11\n\tvariables\x18\r \x01(\t\x12\x10\n\x08tenantId\x18\x0e \x01(\t\":\n\x1c\x43\x61ncelProcessInstanceRequest\x12\x1a\n\x12processInstanceKey\x18\x01 \x01(\x03\"\x1f\n\x1d\x43\x61ncelProcessInstanceResponse\"7\n\x12\x43ompleteJobRequest\x12\x0e\n\x06jobKey\x18\x01 \x01(\x03\x12\x11\n\tvariables\x18\x02 \x01(\t\"\x15\n\x13\x43ompleteJobResponse\"\xdf\x01\n\x1c\x43reateProcessInstanceRequest\x12\x1c\n\x14processDefinitionKey\x18\x01 \x01(\x03\x12\x15\n\rbpmnProcessId\x18\x02 \x01(\t\x12\x0f\n\x07version\x18\x03 \x01(\x05\x12\x11\n\tvariables\x18\x04 \x01(\t\x12T\n\x11startInstructions\x18\x05 \x03(\x0b\x32\x39.gateway_protocol.ProcessInstanceCreationStartInstruction\x12\x10\n\x08tenantId\x18\x06 \x01(\t\"<\n\'ProcessInstanceCreationStartInstruction\x12\x11\n\telementId\x18\x01 \x01(\t\"\x93\x01\n\x1d\x43reateProcessInstanceResponse\x12\x1c\n\x14processDefinitionKey\x18\x01 \x01(\x03\x12\x15\n\rbpmnProcessId\x18\x02 \x01(\t\x12\x0f\n\x07version\x18\x03 \x01(\x05\x12\x1a\n\x12processInstanceKey\x18\x04 \x01(\x03\x12\x10\n\x08tenantId\x18\x05 \x01(\t\"\x99\x01\n&CreateProcessInstanceWithResultRequest\x12?\n\x07request\x18\x01 \x01(\x0b\x32..gateway_protocol.CreateProcessInstanceRequest\x12\x16\n\x0erequestTimeout\x18\x02 \x01(\x03\x12\x16\n\x0e\x66\x65tchVariables\x18\x03 \x03(\t\"\xb0\x01\n\'CreateProcessInstanceWithResultResponse\x12\x1c\n\x14processDefinitionKey\x18\x01 \x01(\x03\x12\x15\n\rbpmnProcessId\x18\x02 \x01(\t\x12\x0f\n\x07version\x18\x03 \x01(\x05\x12\x1a\n\x12processInstanceKey\x18\x04 \x01(\x03\x12\x11\n\tvariables\x18\x05 \x01(\t\x12\x10\n\x08tenantId\x18\x06 \x01(\t\"g\n\x17\x45valuateDecisionRequest\x12\x13\n\x0b\x64\x65\x63isionKey\x18\x01 \x01(\x03\x12\x12\n\ndecisionId\x18\x02 \x01(\t\x12\x11\n\tvariables\x18\x03 \x01(\t\x12\x10\n\x08tenantId\x18\x04 \x01(\t\"\xd0\x02\n\x18\x45valuateDecisionResponse\x12\x13\n\x0b\x64\x65\x63isionKey\x18\x01 \x01(\x03\x12\x12\n\ndecisionId\x18\x02 \x01(\t\x12\x14\n\x0c\x64\x65\x63isionName\x18\x03 \x01(\t\x12\x17\n\x0f\x64\x65\x63isionVersion\x18\x04 \x01(\x05\x12\x1e\n\x16\x64\x65\x63isionRequirementsId\x18\x05 \x01(\t\x12\x1f\n\x17\x64\x65\x63isionRequirementsKey\x18\x06 \x01(\x03\x12\x16\n\x0e\x64\x65\x63isionOutput\x18\x07 \x01(\t\x12?\n\x12\x65valuatedDecisions\x18\x08 \x03(\x0b\x32#.gateway_protocol.EvaluatedDecision\x12\x18\n\x10\x66\x61iledDecisionId\x18\t \x01(\t\x12\x16\n\x0e\x66\x61ilureMessage\x18\n \x01(\t\x12\x10\n\x08tenantId\x18\x0b \x01(\t\"\xab\x02\n\x11\x45valuatedDecision\x12\x13\n\x0b\x64\x65\x63isionKey\x18\x01 \x01(\x03\x12\x12\n\ndecisionId\x18\x02 \x01(\t\x12\x14\n\x0c\x64\x65\x63isionName\x18\x03 \x01(\t\x12\x17\n\x0f\x64\x65\x63isionVersion\x18\x04 \x01(\x05\x12\x14\n\x0c\x64\x65\x63isionType\x18\x05 \x01(\t\x12\x16\n\x0e\x64\x65\x63isionOutput\x18\x06 \x01(\t\x12;\n\x0cmatchedRules\x18\x07 \x03(\x0b\x32%.gateway_protocol.MatchedDecisionRule\x12\x41\n\x0f\x65valuatedInputs\x18\x08 \x03(\x0b\x32(.gateway_protocol.EvaluatedDecisionInput\x12\x10\n\x08tenantId\x18\t \x01(\t\"P\n\x16\x45valuatedDecisionInput\x12\x0f\n\x07inputId\x18\x01 \x01(\t\x12\x11\n\tinputName\x18\x02 \x01(\t\x12\x12\n\ninputValue\x18\x03 \x01(\t\"T\n\x17\x45valuatedDecisionOutput\x12\x10\n\x08outputId\x18\x01 \x01(\t\x12\x12\n\noutputName\x18\x02 \x01(\t\x12\x13\n\x0boutputValue\x18\x03 \x01(\t\"}\n\x13MatchedDecisionRule\x12\x0e\n\x06ruleId\x18\x01 \x01(\t\x12\x11\n\truleIndex\x18\x02 \x01(\x05\x12\x43\n\x10\x65valuatedOutputs\x18\x03 \x03(\x0b\x32).gateway_protocol.EvaluatedDecisionOutput\"U\n\x14\x44\x65ployProcessRequest\x12\x39\n\tprocesses\x18\x01 \x03(\x0b\x32&.gateway_protocol.ProcessRequestObject:\x02\x18\x01\"<\n\x14ProcessRequestObject\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x12\n\ndefinition\x18\x02 \x01(\x0c:\x02\x18\x01\"^\n\x15\x44\x65ployProcessResponse\x12\x0b\n\x03key\x18\x01 \x01(\x03\x12\x34\n\tprocesses\x18\x02 \x03(\x0b\x32!.gateway_protocol.ProcessMetadata:\x02\x18\x01\"X\n\x15\x44\x65ployResourceRequest\x12-\n\tresources\x18\x01 \x03(\x0b\x32\x1a.gateway_protocol.Resource\x12\x10\n\x08tenantId\x18\x02 \x01(\t\")\n\x08Resource\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x0f\n\x07\x63ontent\x18\x02 \x01(\x0c\"j\n\x16\x44\x65ployResourceResponse\x12\x0b\n\x03key\x18\x01 \x01(\x03\x12\x31\n\x0b\x64\x65ployments\x18\x02 \x03(\x0b\x32\x1c.gateway_protocol.Deployment\x12\x10\n\x08tenantId\x18\x03 \x01(\t\"\x86\x02\n\nDeployment\x12\x34\n\x07process\x18\x01 \x01(\x0b\x32!.gateway_protocol.ProcessMetadataH\x00\x12\x36\n\x08\x64\x65\x63ision\x18\x02 \x01(\x0b\x32\".gateway_protocol.DecisionMetadataH\x00\x12N\n\x14\x64\x65\x63isionRequirements\x18\x03 \x01(\x0b\x32..gateway_protocol.DecisionRequirementsMetadataH\x00\x12.\n\x04\x66orm\x18\x04 \x01(\x0b\x32\x1e.gateway_protocol.FormMetadataH\x00\x42\n\n\x08Metadata\"\x7f\n\x0fProcessMetadata\x12\x15\n\rbpmnProcessId\x18\x01 \x01(\t\x12\x0f\n\x07version\x18\x02 \x01(\x05\x12\x1c\n\x14processDefinitionKey\x18\x03 \x01(\x03\x12\x14\n\x0cresourceName\x18\x04 \x01(\t\x12\x10\n\x08tenantId\x18\x05 \x01(\t\"\xbe\x01\n\x10\x44\x65\x63isionMetadata\x12\x15\n\rdmnDecisionId\x18\x01 \x01(\t\x12\x17\n\x0f\x64mnDecisionName\x18\x02 \x01(\t\x12\x0f\n\x07version\x18\x03 \x01(\x05\x12\x13\n\x0b\x64\x65\x63isionKey\x18\x04 \x01(\x03\x12!\n\x19\x64mnDecisionRequirementsId\x18\x05 \x01(\t\x12\x1f\n\x17\x64\x65\x63isionRequirementsKey\x18\x06 \x01(\x03\x12\x10\n\x08tenantId\x18\x07 \x01(\t\"\xc0\x01\n\x1c\x44\x65\x63isionRequirementsMetadata\x12!\n\x19\x64mnDecisionRequirementsId\x18\x01 \x01(\t\x12#\n\x1b\x64mnDecisionRequirementsName\x18\x02 \x01(\t\x12\x0f\n\x07version\x18\x03 \x01(\x05\x12\x1f\n\x17\x64\x65\x63isionRequirementsKey\x18\x04 \x01(\x03\x12\x14\n\x0cresourceName\x18\x05 \x01(\t\x12\x10\n\x08tenantId\x18\x06 \x01(\t\"h\n\x0c\x46ormMetadata\x12\x0e\n\x06\x66ormId\x18\x01 \x01(\t\x12\x0f\n\x07version\x18\x02 \x01(\x05\x12\x0f\n\x07\x66ormKey\x18\x03 \x01(\x03\x12\x14\n\x0cresourceName\x18\x04 \x01(\t\x12\x10\n\x08tenantId\x18\x05 \x01(\t\"p\n\x0e\x46\x61ilJobRequest\x12\x0e\n\x06jobKey\x18\x01 \x01(\x03\x12\x0f\n\x07retries\x18\x02 \x01(\x05\x12\x14\n\x0c\x65rrorMessage\x18\x03 \x01(\t\x12\x14\n\x0cretryBackOff\x18\x04 \x01(\x03\x12\x11\n\tvariables\x18\x05 \x01(\t\"\x11\n\x0f\x46\x61ilJobResponse\"_\n\x11ThrowErrorRequest\x12\x0e\n\x06jobKey\x18\x01 \x01(\x03\x12\x11\n\terrorCode\x18\x02 \x01(\t\x12\x14\n\x0c\x65rrorMessage\x18\x03 \x01(\t\x12\x11\n\tvariables\x18\x04 \x01(\t\"\x14\n\x12ThrowErrorResponse\"\x89\x01\n\x15PublishMessageRequest\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x16\n\x0e\x63orrelationKey\x18\x02 \x01(\t\x12\x12\n\ntimeToLive\x18\x03 \x01(\x03\x12\x11\n\tmessageId\x18\x04 \x01(\t\x12\x11\n\tvariables\x18\x05 \x01(\t\x12\x10\n\x08tenantId\x18\x06 \x01(\t\"7\n\x16PublishMessageResponse\x12\x0b\n\x03key\x18\x01 \x01(\x03\x12\x10\n\x08tenantId\x18\x02 \x01(\t\"-\n\x16ResolveIncidentRequest\x12\x13\n\x0bincidentKey\x18\x01 \x01(\x03\"\x19\n\x17ResolveIncidentResponse\"\x11\n\x0fTopologyRequest\"\xa2\x01\n\x10TopologyResponse\x12-\n\x07\x62rokers\x18\x01 \x03(\x0b\x32\x1c.gateway_protocol.BrokerInfo\x12\x13\n\x0b\x63lusterSize\x18\x02 \x01(\x05\x12\x17\n\x0fpartitionsCount\x18\x03 \x01(\x05\x12\x19\n\x11replicationFactor\x18\x04 \x01(\x05\x12\x16\n\x0egatewayVersion\x18\x05 \x01(\t\"z\n\nBrokerInfo\x12\x0e\n\x06nodeId\x18\x01 \x01(\x05\x12\x0c\n\x04host\x18\x02 \x01(\t\x12\x0c\n\x04port\x18\x03 \x01(\x05\x12/\n\npartitions\x18\x04 \x03(\x0b\x32\x1b.gateway_protocol.Partition\x12\x0f\n\x07version\x18\x05 \x01(\t\"\xa0\x02\n\tPartition\x12\x13\n\x0bpartitionId\x18\x01 \x01(\x05\x12=\n\x04role\x18\x02 \x01(\x0e\x32/.gateway_protocol.Partition.PartitionBrokerRole\x12\x41\n\x06health\x18\x03 \x01(\x0e\x32\x31.gateway_protocol.Partition.PartitionBrokerHealth\"=\n\x13PartitionBrokerRole\x12\n\n\x06LEADER\x10\x00\x12\x0c\n\x08\x46OLLOWER\x10\x01\x12\x0c\n\x08INACTIVE\x10\x02\"=\n\x15PartitionBrokerHealth\x12\x0b\n\x07HEALTHY\x10\x00\x12\r\n\tUNHEALTHY\x10\x01\x12\x08\n\x04\x44\x45\x41\x44\x10\x02\":\n\x17UpdateJobRetriesRequest\x12\x0e\n\x06jobKey\x18\x01 \x01(\x03\x12\x0f\n\x07retries\x18\x02 \x01(\x05\"\x1a\n\x18UpdateJobRetriesResponse\":\n\x17UpdateJobTimeoutRequest\x12\x0e\n\x06jobKey\x18\x01 \x01(\x03\x12\x0f\n\x07timeout\x18\x02 \x01(\x03\"\x1a\n\x18UpdateJobTimeoutResponse\"S\n\x13SetVariablesRequest\x12\x1a\n\x12\x65lementInstanceKey\x18\x01 \x01(\x03\x12\x11\n\tvariables\x18\x02 \x01(\t\x12\r\n\x05local\x18\x03 \x01(\x08\"#\n\x14SetVariablesResponse\x12\x0b\n\x03key\x18\x01 \x01(\x03\"\xa0\x04\n\x1cModifyProcessInstanceRequest\x12\x1a\n\x12processInstanceKey\x18\x01 \x01(\x03\x12`\n\x14\x61\x63tivateInstructions\x18\x02 \x03(\x0b\x32\x42.gateway_protocol.ModifyProcessInstanceRequest.ActivateInstruction\x12\x62\n\x15terminateInstructions\x18\x03 \x03(\x0b\x32\x43.gateway_protocol.ModifyProcessInstanceRequest.TerminateInstruction\x1a\xae\x01\n\x13\x41\x63tivateInstruction\x12\x11\n\telementId\x18\x01 \x01(\t\x12\"\n\x1a\x61ncestorElementInstanceKey\x18\x02 \x01(\x03\x12`\n\x14variableInstructions\x18\x03 \x03(\x0b\x32\x42.gateway_protocol.ModifyProcessInstanceRequest.VariableInstruction\x1a\x39\n\x13VariableInstruction\x12\x11\n\tvariables\x18\x01 \x01(\t\x12\x0f\n\x07scopeId\x18\x02 \x01(\t\x1a\x32\n\x14TerminateInstruction\x12\x1a\n\x12\x65lementInstanceKey\x18\x01 \x01(\x03\"\x1f\n\x1dModifyProcessInstanceResponse\"\xf0\x02\n\x1dMigrateProcessInstanceRequest\x12\x1a\n\x12processInstanceKey\x18\x01 \x01(\x03\x12T\n\rmigrationPlan\x18\x02 \x01(\x0b\x32=.gateway_protocol.MigrateProcessInstanceRequest.MigrationPlan\x1a\x94\x01\n\rMigrationPlan\x12\"\n\x1atargetProcessDefinitionKey\x18\x01 \x01(\x03\x12_\n\x13mappingInstructions\x18\x02 \x03(\x0b\x32\x42.gateway_protocol.MigrateProcessInstanceRequest.MappingInstruction\x1a\x46\n\x12MappingInstruction\x12\x17\n\x0fsourceElementId\x18\x01 \x01(\t\x12\x17\n\x0ftargetElementId\x18\x02 \x01(\t\" \n\x1eMigrateProcessInstanceResponse\",\n\x15\x44\x65leteResourceRequest\x12\x13\n\x0bresourceKey\x18\x01 \x01(\x03\"\x18\n\x16\x44\x65leteResourceResponse\"Q\n\x16\x42roadcastSignalRequest\x12\x12\n\nsignalName\x18\x01 \x01(\t\x12\x11\n\tvariables\x18\x02 \x01(\t\x12\x10\n\x08tenantId\x18\x03 \x01(\t\"8\n\x17\x42roadcastSignalResponse\x12\x0b\n\x03key\x18\x01 \x01(\x03\x12\x10\n\x08tenantId\x18\x02 \x01(\t2\xdb\x11\n\x07Gateway\x12\x61\n\x0c\x41\x63tivateJobs\x12%.gateway_protocol.ActivateJobsRequest\x1a&.gateway_protocol.ActivateJobsResponse\"\x00\x30\x01\x12g\n\x13StreamActivatedJobs\x12,.gateway_protocol.StreamActivatedJobsRequest\x1a\x1e.gateway_protocol.ActivatedJob\"\x00\x30\x01\x12z\n\x15\x43\x61ncelProcessInstance\x12..gateway_protocol.CancelProcessInstanceRequest\x1a/.gateway_protocol.CancelProcessInstanceResponse\"\x00\x12\\\n\x0b\x43ompleteJob\x12$.gateway_protocol.CompleteJobRequest\x1a%.gateway_protocol.CompleteJobResponse\"\x00\x12z\n\x15\x43reateProcessInstance\x12..gateway_protocol.CreateProcessInstanceRequest\x1a/.gateway_protocol.CreateProcessInstanceResponse\"\x00\x12\x98\x01\n\x1f\x43reateProcessInstanceWithResult\x12\x38.gateway_protocol.CreateProcessInstanceWithResultRequest\x1a\x39.gateway_protocol.CreateProcessInstanceWithResultResponse\"\x00\x12k\n\x10\x45valuateDecision\x12).gateway_protocol.EvaluateDecisionRequest\x1a*.gateway_protocol.EvaluateDecisionResponse\"\x00\x12\x65\n\rDeployProcess\x12&.gateway_protocol.DeployProcessRequest\x1a\'.gateway_protocol.DeployProcessResponse\"\x03\x88\x02\x01\x12\x65\n\x0e\x44\x65ployResource\x12\'.gateway_protocol.DeployResourceRequest\x1a(.gateway_protocol.DeployResourceResponse\"\x00\x12P\n\x07\x46\x61ilJob\x12 .gateway_protocol.FailJobRequest\x1a!.gateway_protocol.FailJobResponse\"\x00\x12Y\n\nThrowError\x12#.gateway_protocol.ThrowErrorRequest\x1a$.gateway_protocol.ThrowErrorResponse\"\x00\x12\x65\n\x0ePublishMessage\x12\'.gateway_protocol.PublishMessageRequest\x1a(.gateway_protocol.PublishMessageResponse\"\x00\x12h\n\x0fResolveIncident\x12(.gateway_protocol.ResolveIncidentRequest\x1a).gateway_protocol.ResolveIncidentResponse\"\x00\x12_\n\x0cSetVariables\x12%.gateway_protocol.SetVariablesRequest\x1a&.gateway_protocol.SetVariablesResponse\"\x00\x12S\n\x08Topology\x12!.gateway_protocol.TopologyRequest\x1a\".gateway_protocol.TopologyResponse\"\x00\x12k\n\x10UpdateJobRetries\x12).gateway_protocol.UpdateJobRetriesRequest\x1a*.gateway_protocol.UpdateJobRetriesResponse\"\x00\x12z\n\x15ModifyProcessInstance\x12..gateway_protocol.ModifyProcessInstanceRequest\x1a/.gateway_protocol.ModifyProcessInstanceResponse\"\x00\x12}\n\x16MigrateProcessInstance\x12/.gateway_protocol.MigrateProcessInstanceRequest\x1a\x30.gateway_protocol.MigrateProcessInstanceResponse\"\x00\x12k\n\x10UpdateJobTimeout\x12).gateway_protocol.UpdateJobTimeoutRequest\x1a*.gateway_protocol.UpdateJobTimeoutResponse\"\x00\x12\x65\n\x0e\x44\x65leteResource\x12\'.gateway_protocol.DeleteResourceRequest\x1a(.gateway_protocol.DeleteResourceResponse\"\x00\x12h\n\x0f\x42roadcastSignal\x12(.gateway_protocol.BroadcastSignalRequest\x1a).gateway_protocol.BroadcastSignalResponse\"\x00\x42,\n!io.camunda.zeebe.gateway.protocolP\x00Z\x05./;pbb\x06proto3') +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x1bpyzeebe/proto/gateway.proto\x12\x10gateway_protocol\"u\n\x1aStreamActivatedJobsRequest\x12\x0c\n\x04type\x18\x01 \x01(\t\x12\x0e\n\x06worker\x18\x02 \x01(\t\x12\x0f\n\x07timeout\x18\x03 \x01(\x03\x12\x15\n\rfetchVariable\x18\x05 \x03(\t\x12\x11\n\ttenantIds\x18\x06 \x03(\t\"\xa1\x01\n\x13\x41\x63tivateJobsRequest\x12\x0c\n\x04type\x18\x01 \x01(\t\x12\x0e\n\x06worker\x18\x02 \x01(\t\x12\x0f\n\x07timeout\x18\x03 \x01(\x03\x12\x19\n\x11maxJobsToActivate\x18\x04 \x01(\x05\x12\x15\n\rfetchVariable\x18\x05 \x03(\t\x12\x16\n\x0erequestTimeout\x18\x06 \x01(\x03\x12\x11\n\ttenantIds\x18\x07 \x03(\t\"D\n\x14\x41\x63tivateJobsResponse\x12,\n\x04jobs\x18\x01 \x03(\x0b\x32\x1e.gateway_protocol.ActivatedJob\"\xba\x02\n\x0c\x41\x63tivatedJob\x12\x0b\n\x03key\x18\x01 \x01(\x03\x12\x0c\n\x04type\x18\x02 \x01(\t\x12\x1a\n\x12processInstanceKey\x18\x03 \x01(\x03\x12\x15\n\rbpmnProcessId\x18\x04 \x01(\t\x12 \n\x18processDefinitionVersion\x18\x05 \x01(\x05\x12\x1c\n\x14processDefinitionKey\x18\x06 \x01(\x03\x12\x11\n\telementId\x18\x07 \x01(\t\x12\x1a\n\x12\x65lementInstanceKey\x18\x08 \x01(\x03\x12\x15\n\rcustomHeaders\x18\t \x01(\t\x12\x0e\n\x06worker\x18\n \x01(\t\x12\x0f\n\x07retries\x18\x0b \x01(\x05\x12\x10\n\x08\x64\x65\x61\x64line\x18\x0c \x01(\x03\x12\x11\n\tvariables\x18\r \x01(\t\x12\x10\n\x08tenantId\x18\x0e \x01(\t\"r\n\x1c\x43\x61ncelProcessInstanceRequest\x12\x1a\n\x12processInstanceKey\x18\x01 \x01(\x03\x12\x1f\n\x12operationReference\x18\x02 \x01(\x04H\x00\x88\x01\x01\x42\x15\n\x13_operationReference\"\x1f\n\x1d\x43\x61ncelProcessInstanceResponse\"7\n\x12\x43ompleteJobRequest\x12\x0e\n\x06jobKey\x18\x01 \x01(\x03\x12\x11\n\tvariables\x18\x02 \x01(\t\"\x15\n\x13\x43ompleteJobResponse\"\x97\x02\n\x1c\x43reateProcessInstanceRequest\x12\x1c\n\x14processDefinitionKey\x18\x01 \x01(\x03\x12\x15\n\rbpmnProcessId\x18\x02 \x01(\t\x12\x0f\n\x07version\x18\x03 \x01(\x05\x12\x11\n\tvariables\x18\x04 \x01(\t\x12T\n\x11startInstructions\x18\x05 \x03(\x0b\x32\x39.gateway_protocol.ProcessInstanceCreationStartInstruction\x12\x10\n\x08tenantId\x18\x06 \x01(\t\x12\x1f\n\x12operationReference\x18\x07 \x01(\x04H\x00\x88\x01\x01\x42\x15\n\x13_operationReference\"<\n\'ProcessInstanceCreationStartInstruction\x12\x11\n\telementId\x18\x01 \x01(\t\"\x93\x01\n\x1d\x43reateProcessInstanceResponse\x12\x1c\n\x14processDefinitionKey\x18\x01 \x01(\x03\x12\x15\n\rbpmnProcessId\x18\x02 \x01(\t\x12\x0f\n\x07version\x18\x03 \x01(\x05\x12\x1a\n\x12processInstanceKey\x18\x04 \x01(\x03\x12\x10\n\x08tenantId\x18\x05 \x01(\t\"\x99\x01\n&CreateProcessInstanceWithResultRequest\x12?\n\x07request\x18\x01 \x01(\x0b\x32..gateway_protocol.CreateProcessInstanceRequest\x12\x16\n\x0erequestTimeout\x18\x02 \x01(\x03\x12\x16\n\x0e\x66\x65tchVariables\x18\x03 \x03(\t\"\xb0\x01\n\'CreateProcessInstanceWithResultResponse\x12\x1c\n\x14processDefinitionKey\x18\x01 \x01(\x03\x12\x15\n\rbpmnProcessId\x18\x02 \x01(\t\x12\x0f\n\x07version\x18\x03 \x01(\x05\x12\x1a\n\x12processInstanceKey\x18\x04 \x01(\x03\x12\x11\n\tvariables\x18\x05 \x01(\t\x12\x10\n\x08tenantId\x18\x06 \x01(\t\"g\n\x17\x45valuateDecisionRequest\x12\x13\n\x0b\x64\x65\x63isionKey\x18\x01 \x01(\x03\x12\x12\n\ndecisionId\x18\x02 \x01(\t\x12\x11\n\tvariables\x18\x03 \x01(\t\x12\x10\n\x08tenantId\x18\x04 \x01(\t\"\xed\x02\n\x18\x45valuateDecisionResponse\x12\x13\n\x0b\x64\x65\x63isionKey\x18\x01 \x01(\x03\x12\x12\n\ndecisionId\x18\x02 \x01(\t\x12\x14\n\x0c\x64\x65\x63isionName\x18\x03 \x01(\t\x12\x17\n\x0f\x64\x65\x63isionVersion\x18\x04 \x01(\x05\x12\x1e\n\x16\x64\x65\x63isionRequirementsId\x18\x05 \x01(\t\x12\x1f\n\x17\x64\x65\x63isionRequirementsKey\x18\x06 \x01(\x03\x12\x16\n\x0e\x64\x65\x63isionOutput\x18\x07 \x01(\t\x12?\n\x12\x65valuatedDecisions\x18\x08 \x03(\x0b\x32#.gateway_protocol.EvaluatedDecision\x12\x18\n\x10\x66\x61iledDecisionId\x18\t \x01(\t\x12\x16\n\x0e\x66\x61ilureMessage\x18\n \x01(\t\x12\x10\n\x08tenantId\x18\x0b \x01(\t\x12\x1b\n\x13\x64\x65\x63isionInstanceKey\x18\x0c \x01(\x03\"\xab\x02\n\x11\x45valuatedDecision\x12\x13\n\x0b\x64\x65\x63isionKey\x18\x01 \x01(\x03\x12\x12\n\ndecisionId\x18\x02 \x01(\t\x12\x14\n\x0c\x64\x65\x63isionName\x18\x03 \x01(\t\x12\x17\n\x0f\x64\x65\x63isionVersion\x18\x04 \x01(\x05\x12\x14\n\x0c\x64\x65\x63isionType\x18\x05 \x01(\t\x12\x16\n\x0e\x64\x65\x63isionOutput\x18\x06 \x01(\t\x12;\n\x0cmatchedRules\x18\x07 \x03(\x0b\x32%.gateway_protocol.MatchedDecisionRule\x12\x41\n\x0f\x65valuatedInputs\x18\x08 \x03(\x0b\x32(.gateway_protocol.EvaluatedDecisionInput\x12\x10\n\x08tenantId\x18\t \x01(\t\"P\n\x16\x45valuatedDecisionInput\x12\x0f\n\x07inputId\x18\x01 \x01(\t\x12\x11\n\tinputName\x18\x02 \x01(\t\x12\x12\n\ninputValue\x18\x03 \x01(\t\"T\n\x17\x45valuatedDecisionOutput\x12\x10\n\x08outputId\x18\x01 \x01(\t\x12\x12\n\noutputName\x18\x02 \x01(\t\x12\x13\n\x0boutputValue\x18\x03 \x01(\t\"}\n\x13MatchedDecisionRule\x12\x0e\n\x06ruleId\x18\x01 \x01(\t\x12\x11\n\truleIndex\x18\x02 \x01(\x05\x12\x43\n\x10\x65valuatedOutputs\x18\x03 \x03(\x0b\x32).gateway_protocol.EvaluatedDecisionOutput\"U\n\x14\x44\x65ployProcessRequest\x12\x39\n\tprocesses\x18\x01 \x03(\x0b\x32&.gateway_protocol.ProcessRequestObject:\x02\x18\x01\"<\n\x14ProcessRequestObject\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x12\n\ndefinition\x18\x02 \x01(\x0c:\x02\x18\x01\"^\n\x15\x44\x65ployProcessResponse\x12\x0b\n\x03key\x18\x01 \x01(\x03\x12\x34\n\tprocesses\x18\x02 \x03(\x0b\x32!.gateway_protocol.ProcessMetadata:\x02\x18\x01\"X\n\x15\x44\x65ployResourceRequest\x12-\n\tresources\x18\x01 \x03(\x0b\x32\x1a.gateway_protocol.Resource\x12\x10\n\x08tenantId\x18\x02 \x01(\t\")\n\x08Resource\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x0f\n\x07\x63ontent\x18\x02 \x01(\x0c\"j\n\x16\x44\x65ployResourceResponse\x12\x0b\n\x03key\x18\x01 \x01(\x03\x12\x31\n\x0b\x64\x65ployments\x18\x02 \x03(\x0b\x32\x1c.gateway_protocol.Deployment\x12\x10\n\x08tenantId\x18\x03 \x01(\t\"\x86\x02\n\nDeployment\x12\x34\n\x07process\x18\x01 \x01(\x0b\x32!.gateway_protocol.ProcessMetadataH\x00\x12\x36\n\x08\x64\x65\x63ision\x18\x02 \x01(\x0b\x32\".gateway_protocol.DecisionMetadataH\x00\x12N\n\x14\x64\x65\x63isionRequirements\x18\x03 \x01(\x0b\x32..gateway_protocol.DecisionRequirementsMetadataH\x00\x12.\n\x04\x66orm\x18\x04 \x01(\x0b\x32\x1e.gateway_protocol.FormMetadataH\x00\x42\n\n\x08Metadata\"\x7f\n\x0fProcessMetadata\x12\x15\n\rbpmnProcessId\x18\x01 \x01(\t\x12\x0f\n\x07version\x18\x02 \x01(\x05\x12\x1c\n\x14processDefinitionKey\x18\x03 \x01(\x03\x12\x14\n\x0cresourceName\x18\x04 \x01(\t\x12\x10\n\x08tenantId\x18\x05 \x01(\t\"\xbe\x01\n\x10\x44\x65\x63isionMetadata\x12\x15\n\rdmnDecisionId\x18\x01 \x01(\t\x12\x17\n\x0f\x64mnDecisionName\x18\x02 \x01(\t\x12\x0f\n\x07version\x18\x03 \x01(\x05\x12\x13\n\x0b\x64\x65\x63isionKey\x18\x04 \x01(\x03\x12!\n\x19\x64mnDecisionRequirementsId\x18\x05 \x01(\t\x12\x1f\n\x17\x64\x65\x63isionRequirementsKey\x18\x06 \x01(\x03\x12\x10\n\x08tenantId\x18\x07 \x01(\t\"\xc0\x01\n\x1c\x44\x65\x63isionRequirementsMetadata\x12!\n\x19\x64mnDecisionRequirementsId\x18\x01 \x01(\t\x12#\n\x1b\x64mnDecisionRequirementsName\x18\x02 \x01(\t\x12\x0f\n\x07version\x18\x03 \x01(\x05\x12\x1f\n\x17\x64\x65\x63isionRequirementsKey\x18\x04 \x01(\x03\x12\x14\n\x0cresourceName\x18\x05 \x01(\t\x12\x10\n\x08tenantId\x18\x06 \x01(\t\"h\n\x0c\x46ormMetadata\x12\x0e\n\x06\x66ormId\x18\x01 \x01(\t\x12\x0f\n\x07version\x18\x02 \x01(\x05\x12\x0f\n\x07\x66ormKey\x18\x03 \x01(\x03\x12\x14\n\x0cresourceName\x18\x04 \x01(\t\x12\x10\n\x08tenantId\x18\x05 \x01(\t\"p\n\x0e\x46\x61ilJobRequest\x12\x0e\n\x06jobKey\x18\x01 \x01(\x03\x12\x0f\n\x07retries\x18\x02 \x01(\x05\x12\x14\n\x0c\x65rrorMessage\x18\x03 \x01(\t\x12\x14\n\x0cretryBackOff\x18\x04 \x01(\x03\x12\x11\n\tvariables\x18\x05 \x01(\t\"\x11\n\x0f\x46\x61ilJobResponse\"_\n\x11ThrowErrorRequest\x12\x0e\n\x06jobKey\x18\x01 \x01(\x03\x12\x11\n\terrorCode\x18\x02 \x01(\t\x12\x14\n\x0c\x65rrorMessage\x18\x03 \x01(\t\x12\x11\n\tvariables\x18\x04 \x01(\t\"\x14\n\x12ThrowErrorResponse\"\x89\x01\n\x15PublishMessageRequest\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x16\n\x0e\x63orrelationKey\x18\x02 \x01(\t\x12\x12\n\ntimeToLive\x18\x03 \x01(\x03\x12\x11\n\tmessageId\x18\x04 \x01(\t\x12\x11\n\tvariables\x18\x05 \x01(\t\x12\x10\n\x08tenantId\x18\x06 \x01(\t\"7\n\x16PublishMessageResponse\x12\x0b\n\x03key\x18\x01 \x01(\x03\x12\x10\n\x08tenantId\x18\x02 \x01(\t\"e\n\x16ResolveIncidentRequest\x12\x13\n\x0bincidentKey\x18\x01 \x01(\x03\x12\x1f\n\x12operationReference\x18\x02 \x01(\x04H\x00\x88\x01\x01\x42\x15\n\x13_operationReference\"\x19\n\x17ResolveIncidentResponse\"\x11\n\x0fTopologyRequest\"\xa2\x01\n\x10TopologyResponse\x12-\n\x07\x62rokers\x18\x01 \x03(\x0b\x32\x1c.gateway_protocol.BrokerInfo\x12\x13\n\x0b\x63lusterSize\x18\x02 \x01(\x05\x12\x17\n\x0fpartitionsCount\x18\x03 \x01(\x05\x12\x19\n\x11replicationFactor\x18\x04 \x01(\x05\x12\x16\n\x0egatewayVersion\x18\x05 \x01(\t\"z\n\nBrokerInfo\x12\x0e\n\x06nodeId\x18\x01 \x01(\x05\x12\x0c\n\x04host\x18\x02 \x01(\t\x12\x0c\n\x04port\x18\x03 \x01(\x05\x12/\n\npartitions\x18\x04 \x03(\x0b\x32\x1b.gateway_protocol.Partition\x12\x0f\n\x07version\x18\x05 \x01(\t\"\xa0\x02\n\tPartition\x12\x13\n\x0bpartitionId\x18\x01 \x01(\x05\x12=\n\x04role\x18\x02 \x01(\x0e\x32/.gateway_protocol.Partition.PartitionBrokerRole\x12\x41\n\x06health\x18\x03 \x01(\x0e\x32\x31.gateway_protocol.Partition.PartitionBrokerHealth\"=\n\x13PartitionBrokerRole\x12\n\n\x06LEADER\x10\x00\x12\x0c\n\x08\x46OLLOWER\x10\x01\x12\x0c\n\x08INACTIVE\x10\x02\"=\n\x15PartitionBrokerHealth\x12\x0b\n\x07HEALTHY\x10\x00\x12\r\n\tUNHEALTHY\x10\x01\x12\x08\n\x04\x44\x45\x41\x44\x10\x02\"r\n\x17UpdateJobRetriesRequest\x12\x0e\n\x06jobKey\x18\x01 \x01(\x03\x12\x0f\n\x07retries\x18\x02 \x01(\x05\x12\x1f\n\x12operationReference\x18\x03 \x01(\x04H\x00\x88\x01\x01\x42\x15\n\x13_operationReference\"\x1a\n\x18UpdateJobRetriesResponse\"r\n\x17UpdateJobTimeoutRequest\x12\x0e\n\x06jobKey\x18\x01 \x01(\x03\x12\x0f\n\x07timeout\x18\x02 \x01(\x03\x12\x1f\n\x12operationReference\x18\x03 \x01(\x04H\x00\x88\x01\x01\x42\x15\n\x13_operationReference\"\x1a\n\x18UpdateJobTimeoutResponse\"\x8b\x01\n\x13SetVariablesRequest\x12\x1a\n\x12\x65lementInstanceKey\x18\x01 \x01(\x03\x12\x11\n\tvariables\x18\x02 \x01(\t\x12\r\n\x05local\x18\x03 \x01(\x08\x12\x1f\n\x12operationReference\x18\x04 \x01(\x04H\x00\x88\x01\x01\x42\x15\n\x13_operationReference\"#\n\x14SetVariablesResponse\x12\x0b\n\x03key\x18\x01 \x01(\x03\"\xd8\x04\n\x1cModifyProcessInstanceRequest\x12\x1a\n\x12processInstanceKey\x18\x01 \x01(\x03\x12`\n\x14\x61\x63tivateInstructions\x18\x02 \x03(\x0b\x32\x42.gateway_protocol.ModifyProcessInstanceRequest.ActivateInstruction\x12\x62\n\x15terminateInstructions\x18\x03 \x03(\x0b\x32\x43.gateway_protocol.ModifyProcessInstanceRequest.TerminateInstruction\x12\x1f\n\x12operationReference\x18\x04 \x01(\x04H\x00\x88\x01\x01\x1a\xae\x01\n\x13\x41\x63tivateInstruction\x12\x11\n\telementId\x18\x01 \x01(\t\x12\"\n\x1a\x61ncestorElementInstanceKey\x18\x02 \x01(\x03\x12`\n\x14variableInstructions\x18\x03 \x03(\x0b\x32\x42.gateway_protocol.ModifyProcessInstanceRequest.VariableInstruction\x1a\x39\n\x13VariableInstruction\x12\x11\n\tvariables\x18\x01 \x01(\t\x12\x0f\n\x07scopeId\x18\x02 \x01(\t\x1a\x32\n\x14TerminateInstruction\x12\x1a\n\x12\x65lementInstanceKey\x18\x01 \x01(\x03\x42\x15\n\x13_operationReference\"\x1f\n\x1dModifyProcessInstanceResponse\"\xa8\x03\n\x1dMigrateProcessInstanceRequest\x12\x1a\n\x12processInstanceKey\x18\x01 \x01(\x03\x12T\n\rmigrationPlan\x18\x02 \x01(\x0b\x32=.gateway_protocol.MigrateProcessInstanceRequest.MigrationPlan\x12\x1f\n\x12operationReference\x18\x03 \x01(\x04H\x00\x88\x01\x01\x1a\x94\x01\n\rMigrationPlan\x12\"\n\x1atargetProcessDefinitionKey\x18\x01 \x01(\x03\x12_\n\x13mappingInstructions\x18\x02 \x03(\x0b\x32\x42.gateway_protocol.MigrateProcessInstanceRequest.MappingInstruction\x1a\x46\n\x12MappingInstruction\x12\x17\n\x0fsourceElementId\x18\x01 \x01(\t\x12\x17\n\x0ftargetElementId\x18\x02 \x01(\tB\x15\n\x13_operationReference\" \n\x1eMigrateProcessInstanceResponse\"d\n\x15\x44\x65leteResourceRequest\x12\x13\n\x0bresourceKey\x18\x01 \x01(\x03\x12\x1f\n\x12operationReference\x18\x02 \x01(\x04H\x00\x88\x01\x01\x42\x15\n\x13_operationReference\"\x18\n\x16\x44\x65leteResourceResponse\"Q\n\x16\x42roadcastSignalRequest\x12\x12\n\nsignalName\x18\x01 \x01(\t\x12\x11\n\tvariables\x18\x02 \x01(\t\x12\x10\n\x08tenantId\x18\x03 \x01(\t\"8\n\x17\x42roadcastSignalResponse\x12\x0b\n\x03key\x18\x01 \x01(\x03\x12\x10\n\x08tenantId\x18\x02 \x01(\t2\xdb\x11\n\x07Gateway\x12\x61\n\x0c\x41\x63tivateJobs\x12%.gateway_protocol.ActivateJobsRequest\x1a&.gateway_protocol.ActivateJobsResponse\"\x00\x30\x01\x12g\n\x13StreamActivatedJobs\x12,.gateway_protocol.StreamActivatedJobsRequest\x1a\x1e.gateway_protocol.ActivatedJob\"\x00\x30\x01\x12z\n\x15\x43\x61ncelProcessInstance\x12..gateway_protocol.CancelProcessInstanceRequest\x1a/.gateway_protocol.CancelProcessInstanceResponse\"\x00\x12\\\n\x0b\x43ompleteJob\x12$.gateway_protocol.CompleteJobRequest\x1a%.gateway_protocol.CompleteJobResponse\"\x00\x12z\n\x15\x43reateProcessInstance\x12..gateway_protocol.CreateProcessInstanceRequest\x1a/.gateway_protocol.CreateProcessInstanceResponse\"\x00\x12\x98\x01\n\x1f\x43reateProcessInstanceWithResult\x12\x38.gateway_protocol.CreateProcessInstanceWithResultRequest\x1a\x39.gateway_protocol.CreateProcessInstanceWithResultResponse\"\x00\x12k\n\x10\x45valuateDecision\x12).gateway_protocol.EvaluateDecisionRequest\x1a*.gateway_protocol.EvaluateDecisionResponse\"\x00\x12\x65\n\rDeployProcess\x12&.gateway_protocol.DeployProcessRequest\x1a\'.gateway_protocol.DeployProcessResponse\"\x03\x88\x02\x01\x12\x65\n\x0e\x44\x65ployResource\x12\'.gateway_protocol.DeployResourceRequest\x1a(.gateway_protocol.DeployResourceResponse\"\x00\x12P\n\x07\x46\x61ilJob\x12 .gateway_protocol.FailJobRequest\x1a!.gateway_protocol.FailJobResponse\"\x00\x12Y\n\nThrowError\x12#.gateway_protocol.ThrowErrorRequest\x1a$.gateway_protocol.ThrowErrorResponse\"\x00\x12\x65\n\x0ePublishMessage\x12\'.gateway_protocol.PublishMessageRequest\x1a(.gateway_protocol.PublishMessageResponse\"\x00\x12h\n\x0fResolveIncident\x12(.gateway_protocol.ResolveIncidentRequest\x1a).gateway_protocol.ResolveIncidentResponse\"\x00\x12_\n\x0cSetVariables\x12%.gateway_protocol.SetVariablesRequest\x1a&.gateway_protocol.SetVariablesResponse\"\x00\x12S\n\x08Topology\x12!.gateway_protocol.TopologyRequest\x1a\".gateway_protocol.TopologyResponse\"\x00\x12k\n\x10UpdateJobRetries\x12).gateway_protocol.UpdateJobRetriesRequest\x1a*.gateway_protocol.UpdateJobRetriesResponse\"\x00\x12z\n\x15ModifyProcessInstance\x12..gateway_protocol.ModifyProcessInstanceRequest\x1a/.gateway_protocol.ModifyProcessInstanceResponse\"\x00\x12}\n\x16MigrateProcessInstance\x12/.gateway_protocol.MigrateProcessInstanceRequest\x1a\x30.gateway_protocol.MigrateProcessInstanceResponse\"\x00\x12k\n\x10UpdateJobTimeout\x12).gateway_protocol.UpdateJobTimeoutRequest\x1a*.gateway_protocol.UpdateJobTimeoutResponse\"\x00\x12\x65\n\x0e\x44\x65leteResource\x12\'.gateway_protocol.DeleteResourceRequest\x1a(.gateway_protocol.DeleteResourceResponse\"\x00\x12h\n\x0f\x42roadcastSignal\x12(.gateway_protocol.BroadcastSignalRequest\x1a).gateway_protocol.BroadcastSignalResponse\"\x00\x42,\n!io.camunda.zeebe.gateway.protocolP\x00Z\x05./;pbb\x06proto3') _globals = globals() _builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals) @@ -49,123 +49,123 @@ _globals['_ACTIVATEDJOB']._serialized_start=403 _globals['_ACTIVATEDJOB']._serialized_end=717 _globals['_CANCELPROCESSINSTANCEREQUEST']._serialized_start=719 - _globals['_CANCELPROCESSINSTANCEREQUEST']._serialized_end=777 - _globals['_CANCELPROCESSINSTANCERESPONSE']._serialized_start=779 - _globals['_CANCELPROCESSINSTANCERESPONSE']._serialized_end=810 - _globals['_COMPLETEJOBREQUEST']._serialized_start=812 - _globals['_COMPLETEJOBREQUEST']._serialized_end=867 - _globals['_COMPLETEJOBRESPONSE']._serialized_start=869 - _globals['_COMPLETEJOBRESPONSE']._serialized_end=890 - _globals['_CREATEPROCESSINSTANCEREQUEST']._serialized_start=893 - _globals['_CREATEPROCESSINSTANCEREQUEST']._serialized_end=1116 - _globals['_PROCESSINSTANCECREATIONSTARTINSTRUCTION']._serialized_start=1118 - _globals['_PROCESSINSTANCECREATIONSTARTINSTRUCTION']._serialized_end=1178 - _globals['_CREATEPROCESSINSTANCERESPONSE']._serialized_start=1181 - _globals['_CREATEPROCESSINSTANCERESPONSE']._serialized_end=1328 - _globals['_CREATEPROCESSINSTANCEWITHRESULTREQUEST']._serialized_start=1331 - _globals['_CREATEPROCESSINSTANCEWITHRESULTREQUEST']._serialized_end=1484 - _globals['_CREATEPROCESSINSTANCEWITHRESULTRESPONSE']._serialized_start=1487 - _globals['_CREATEPROCESSINSTANCEWITHRESULTRESPONSE']._serialized_end=1663 - _globals['_EVALUATEDECISIONREQUEST']._serialized_start=1665 - _globals['_EVALUATEDECISIONREQUEST']._serialized_end=1768 - _globals['_EVALUATEDECISIONRESPONSE']._serialized_start=1771 - _globals['_EVALUATEDECISIONRESPONSE']._serialized_end=2107 - _globals['_EVALUATEDDECISION']._serialized_start=2110 - _globals['_EVALUATEDDECISION']._serialized_end=2409 - _globals['_EVALUATEDDECISIONINPUT']._serialized_start=2411 - _globals['_EVALUATEDDECISIONINPUT']._serialized_end=2491 - _globals['_EVALUATEDDECISIONOUTPUT']._serialized_start=2493 - _globals['_EVALUATEDDECISIONOUTPUT']._serialized_end=2577 - _globals['_MATCHEDDECISIONRULE']._serialized_start=2579 - _globals['_MATCHEDDECISIONRULE']._serialized_end=2704 - _globals['_DEPLOYPROCESSREQUEST']._serialized_start=2706 - _globals['_DEPLOYPROCESSREQUEST']._serialized_end=2791 - _globals['_PROCESSREQUESTOBJECT']._serialized_start=2793 - _globals['_PROCESSREQUESTOBJECT']._serialized_end=2853 - _globals['_DEPLOYPROCESSRESPONSE']._serialized_start=2855 - _globals['_DEPLOYPROCESSRESPONSE']._serialized_end=2949 - _globals['_DEPLOYRESOURCEREQUEST']._serialized_start=2951 - _globals['_DEPLOYRESOURCEREQUEST']._serialized_end=3039 - _globals['_RESOURCE']._serialized_start=3041 - _globals['_RESOURCE']._serialized_end=3082 - _globals['_DEPLOYRESOURCERESPONSE']._serialized_start=3084 - _globals['_DEPLOYRESOURCERESPONSE']._serialized_end=3190 - _globals['_DEPLOYMENT']._serialized_start=3193 - _globals['_DEPLOYMENT']._serialized_end=3455 - _globals['_PROCESSMETADATA']._serialized_start=3457 - _globals['_PROCESSMETADATA']._serialized_end=3584 - _globals['_DECISIONMETADATA']._serialized_start=3587 - _globals['_DECISIONMETADATA']._serialized_end=3777 - _globals['_DECISIONREQUIREMENTSMETADATA']._serialized_start=3780 - _globals['_DECISIONREQUIREMENTSMETADATA']._serialized_end=3972 - _globals['_FORMMETADATA']._serialized_start=3974 - _globals['_FORMMETADATA']._serialized_end=4078 - _globals['_FAILJOBREQUEST']._serialized_start=4080 - _globals['_FAILJOBREQUEST']._serialized_end=4192 - _globals['_FAILJOBRESPONSE']._serialized_start=4194 - _globals['_FAILJOBRESPONSE']._serialized_end=4211 - _globals['_THROWERRORREQUEST']._serialized_start=4213 - _globals['_THROWERRORREQUEST']._serialized_end=4308 - _globals['_THROWERRORRESPONSE']._serialized_start=4310 - _globals['_THROWERRORRESPONSE']._serialized_end=4330 - _globals['_PUBLISHMESSAGEREQUEST']._serialized_start=4333 - _globals['_PUBLISHMESSAGEREQUEST']._serialized_end=4470 - _globals['_PUBLISHMESSAGERESPONSE']._serialized_start=4472 - _globals['_PUBLISHMESSAGERESPONSE']._serialized_end=4527 - _globals['_RESOLVEINCIDENTREQUEST']._serialized_start=4529 - _globals['_RESOLVEINCIDENTREQUEST']._serialized_end=4574 - _globals['_RESOLVEINCIDENTRESPONSE']._serialized_start=4576 - _globals['_RESOLVEINCIDENTRESPONSE']._serialized_end=4601 - _globals['_TOPOLOGYREQUEST']._serialized_start=4603 - _globals['_TOPOLOGYREQUEST']._serialized_end=4620 - _globals['_TOPOLOGYRESPONSE']._serialized_start=4623 - _globals['_TOPOLOGYRESPONSE']._serialized_end=4785 - _globals['_BROKERINFO']._serialized_start=4787 - _globals['_BROKERINFO']._serialized_end=4909 - _globals['_PARTITION']._serialized_start=4912 - _globals['_PARTITION']._serialized_end=5200 - _globals['_PARTITION_PARTITIONBROKERROLE']._serialized_start=5076 - _globals['_PARTITION_PARTITIONBROKERROLE']._serialized_end=5137 - _globals['_PARTITION_PARTITIONBROKERHEALTH']._serialized_start=5139 - _globals['_PARTITION_PARTITIONBROKERHEALTH']._serialized_end=5200 - _globals['_UPDATEJOBRETRIESREQUEST']._serialized_start=5202 - _globals['_UPDATEJOBRETRIESREQUEST']._serialized_end=5260 - _globals['_UPDATEJOBRETRIESRESPONSE']._serialized_start=5262 - _globals['_UPDATEJOBRETRIESRESPONSE']._serialized_end=5288 - _globals['_UPDATEJOBTIMEOUTREQUEST']._serialized_start=5290 - _globals['_UPDATEJOBTIMEOUTREQUEST']._serialized_end=5348 - _globals['_UPDATEJOBTIMEOUTRESPONSE']._serialized_start=5350 - _globals['_UPDATEJOBTIMEOUTRESPONSE']._serialized_end=5376 - _globals['_SETVARIABLESREQUEST']._serialized_start=5378 - _globals['_SETVARIABLESREQUEST']._serialized_end=5461 - _globals['_SETVARIABLESRESPONSE']._serialized_start=5463 - _globals['_SETVARIABLESRESPONSE']._serialized_end=5498 - _globals['_MODIFYPROCESSINSTANCEREQUEST']._serialized_start=5501 - _globals['_MODIFYPROCESSINSTANCEREQUEST']._serialized_end=6045 - _globals['_MODIFYPROCESSINSTANCEREQUEST_ACTIVATEINSTRUCTION']._serialized_start=5760 - _globals['_MODIFYPROCESSINSTANCEREQUEST_ACTIVATEINSTRUCTION']._serialized_end=5934 - _globals['_MODIFYPROCESSINSTANCEREQUEST_VARIABLEINSTRUCTION']._serialized_start=5936 - _globals['_MODIFYPROCESSINSTANCEREQUEST_VARIABLEINSTRUCTION']._serialized_end=5993 - _globals['_MODIFYPROCESSINSTANCEREQUEST_TERMINATEINSTRUCTION']._serialized_start=5995 - _globals['_MODIFYPROCESSINSTANCEREQUEST_TERMINATEINSTRUCTION']._serialized_end=6045 - _globals['_MODIFYPROCESSINSTANCERESPONSE']._serialized_start=6047 - _globals['_MODIFYPROCESSINSTANCERESPONSE']._serialized_end=6078 - _globals['_MIGRATEPROCESSINSTANCEREQUEST']._serialized_start=6081 - _globals['_MIGRATEPROCESSINSTANCEREQUEST']._serialized_end=6449 - _globals['_MIGRATEPROCESSINSTANCEREQUEST_MIGRATIONPLAN']._serialized_start=6229 - _globals['_MIGRATEPROCESSINSTANCEREQUEST_MIGRATIONPLAN']._serialized_end=6377 - _globals['_MIGRATEPROCESSINSTANCEREQUEST_MAPPINGINSTRUCTION']._serialized_start=6379 - _globals['_MIGRATEPROCESSINSTANCEREQUEST_MAPPINGINSTRUCTION']._serialized_end=6449 - _globals['_MIGRATEPROCESSINSTANCERESPONSE']._serialized_start=6451 - _globals['_MIGRATEPROCESSINSTANCERESPONSE']._serialized_end=6483 - _globals['_DELETERESOURCEREQUEST']._serialized_start=6485 - _globals['_DELETERESOURCEREQUEST']._serialized_end=6529 - _globals['_DELETERESOURCERESPONSE']._serialized_start=6531 - _globals['_DELETERESOURCERESPONSE']._serialized_end=6555 - _globals['_BROADCASTSIGNALREQUEST']._serialized_start=6557 - _globals['_BROADCASTSIGNALREQUEST']._serialized_end=6638 - _globals['_BROADCASTSIGNALRESPONSE']._serialized_start=6640 - _globals['_BROADCASTSIGNALRESPONSE']._serialized_end=6696 - _globals['_GATEWAY']._serialized_start=6699 - _globals['_GATEWAY']._serialized_end=8966 + _globals['_CANCELPROCESSINSTANCEREQUEST']._serialized_end=833 + _globals['_CANCELPROCESSINSTANCERESPONSE']._serialized_start=835 + _globals['_CANCELPROCESSINSTANCERESPONSE']._serialized_end=866 + _globals['_COMPLETEJOBREQUEST']._serialized_start=868 + _globals['_COMPLETEJOBREQUEST']._serialized_end=923 + _globals['_COMPLETEJOBRESPONSE']._serialized_start=925 + _globals['_COMPLETEJOBRESPONSE']._serialized_end=946 + _globals['_CREATEPROCESSINSTANCEREQUEST']._serialized_start=949 + _globals['_CREATEPROCESSINSTANCEREQUEST']._serialized_end=1228 + _globals['_PROCESSINSTANCECREATIONSTARTINSTRUCTION']._serialized_start=1230 + _globals['_PROCESSINSTANCECREATIONSTARTINSTRUCTION']._serialized_end=1290 + _globals['_CREATEPROCESSINSTANCERESPONSE']._serialized_start=1293 + _globals['_CREATEPROCESSINSTANCERESPONSE']._serialized_end=1440 + _globals['_CREATEPROCESSINSTANCEWITHRESULTREQUEST']._serialized_start=1443 + _globals['_CREATEPROCESSINSTANCEWITHRESULTREQUEST']._serialized_end=1596 + _globals['_CREATEPROCESSINSTANCEWITHRESULTRESPONSE']._serialized_start=1599 + _globals['_CREATEPROCESSINSTANCEWITHRESULTRESPONSE']._serialized_end=1775 + _globals['_EVALUATEDECISIONREQUEST']._serialized_start=1777 + _globals['_EVALUATEDECISIONREQUEST']._serialized_end=1880 + _globals['_EVALUATEDECISIONRESPONSE']._serialized_start=1883 + _globals['_EVALUATEDECISIONRESPONSE']._serialized_end=2248 + _globals['_EVALUATEDDECISION']._serialized_start=2251 + _globals['_EVALUATEDDECISION']._serialized_end=2550 + _globals['_EVALUATEDDECISIONINPUT']._serialized_start=2552 + _globals['_EVALUATEDDECISIONINPUT']._serialized_end=2632 + _globals['_EVALUATEDDECISIONOUTPUT']._serialized_start=2634 + _globals['_EVALUATEDDECISIONOUTPUT']._serialized_end=2718 + _globals['_MATCHEDDECISIONRULE']._serialized_start=2720 + _globals['_MATCHEDDECISIONRULE']._serialized_end=2845 + _globals['_DEPLOYPROCESSREQUEST']._serialized_start=2847 + _globals['_DEPLOYPROCESSREQUEST']._serialized_end=2932 + _globals['_PROCESSREQUESTOBJECT']._serialized_start=2934 + _globals['_PROCESSREQUESTOBJECT']._serialized_end=2994 + _globals['_DEPLOYPROCESSRESPONSE']._serialized_start=2996 + _globals['_DEPLOYPROCESSRESPONSE']._serialized_end=3090 + _globals['_DEPLOYRESOURCEREQUEST']._serialized_start=3092 + _globals['_DEPLOYRESOURCEREQUEST']._serialized_end=3180 + _globals['_RESOURCE']._serialized_start=3182 + _globals['_RESOURCE']._serialized_end=3223 + _globals['_DEPLOYRESOURCERESPONSE']._serialized_start=3225 + _globals['_DEPLOYRESOURCERESPONSE']._serialized_end=3331 + _globals['_DEPLOYMENT']._serialized_start=3334 + _globals['_DEPLOYMENT']._serialized_end=3596 + _globals['_PROCESSMETADATA']._serialized_start=3598 + _globals['_PROCESSMETADATA']._serialized_end=3725 + _globals['_DECISIONMETADATA']._serialized_start=3728 + _globals['_DECISIONMETADATA']._serialized_end=3918 + _globals['_DECISIONREQUIREMENTSMETADATA']._serialized_start=3921 + _globals['_DECISIONREQUIREMENTSMETADATA']._serialized_end=4113 + _globals['_FORMMETADATA']._serialized_start=4115 + _globals['_FORMMETADATA']._serialized_end=4219 + _globals['_FAILJOBREQUEST']._serialized_start=4221 + _globals['_FAILJOBREQUEST']._serialized_end=4333 + _globals['_FAILJOBRESPONSE']._serialized_start=4335 + _globals['_FAILJOBRESPONSE']._serialized_end=4352 + _globals['_THROWERRORREQUEST']._serialized_start=4354 + _globals['_THROWERRORREQUEST']._serialized_end=4449 + _globals['_THROWERRORRESPONSE']._serialized_start=4451 + _globals['_THROWERRORRESPONSE']._serialized_end=4471 + _globals['_PUBLISHMESSAGEREQUEST']._serialized_start=4474 + _globals['_PUBLISHMESSAGEREQUEST']._serialized_end=4611 + _globals['_PUBLISHMESSAGERESPONSE']._serialized_start=4613 + _globals['_PUBLISHMESSAGERESPONSE']._serialized_end=4668 + _globals['_RESOLVEINCIDENTREQUEST']._serialized_start=4670 + _globals['_RESOLVEINCIDENTREQUEST']._serialized_end=4771 + _globals['_RESOLVEINCIDENTRESPONSE']._serialized_start=4773 + _globals['_RESOLVEINCIDENTRESPONSE']._serialized_end=4798 + _globals['_TOPOLOGYREQUEST']._serialized_start=4800 + _globals['_TOPOLOGYREQUEST']._serialized_end=4817 + _globals['_TOPOLOGYRESPONSE']._serialized_start=4820 + _globals['_TOPOLOGYRESPONSE']._serialized_end=4982 + _globals['_BROKERINFO']._serialized_start=4984 + _globals['_BROKERINFO']._serialized_end=5106 + _globals['_PARTITION']._serialized_start=5109 + _globals['_PARTITION']._serialized_end=5397 + _globals['_PARTITION_PARTITIONBROKERROLE']._serialized_start=5273 + _globals['_PARTITION_PARTITIONBROKERROLE']._serialized_end=5334 + _globals['_PARTITION_PARTITIONBROKERHEALTH']._serialized_start=5336 + _globals['_PARTITION_PARTITIONBROKERHEALTH']._serialized_end=5397 + _globals['_UPDATEJOBRETRIESREQUEST']._serialized_start=5399 + _globals['_UPDATEJOBRETRIESREQUEST']._serialized_end=5513 + _globals['_UPDATEJOBRETRIESRESPONSE']._serialized_start=5515 + _globals['_UPDATEJOBRETRIESRESPONSE']._serialized_end=5541 + _globals['_UPDATEJOBTIMEOUTREQUEST']._serialized_start=5543 + _globals['_UPDATEJOBTIMEOUTREQUEST']._serialized_end=5657 + _globals['_UPDATEJOBTIMEOUTRESPONSE']._serialized_start=5659 + _globals['_UPDATEJOBTIMEOUTRESPONSE']._serialized_end=5685 + _globals['_SETVARIABLESREQUEST']._serialized_start=5688 + _globals['_SETVARIABLESREQUEST']._serialized_end=5827 + _globals['_SETVARIABLESRESPONSE']._serialized_start=5829 + _globals['_SETVARIABLESRESPONSE']._serialized_end=5864 + _globals['_MODIFYPROCESSINSTANCEREQUEST']._serialized_start=5867 + _globals['_MODIFYPROCESSINSTANCEREQUEST']._serialized_end=6467 + _globals['_MODIFYPROCESSINSTANCEREQUEST_ACTIVATEINSTRUCTION']._serialized_start=6159 + _globals['_MODIFYPROCESSINSTANCEREQUEST_ACTIVATEINSTRUCTION']._serialized_end=6333 + _globals['_MODIFYPROCESSINSTANCEREQUEST_VARIABLEINSTRUCTION']._serialized_start=6335 + _globals['_MODIFYPROCESSINSTANCEREQUEST_VARIABLEINSTRUCTION']._serialized_end=6392 + _globals['_MODIFYPROCESSINSTANCEREQUEST_TERMINATEINSTRUCTION']._serialized_start=6394 + _globals['_MODIFYPROCESSINSTANCEREQUEST_TERMINATEINSTRUCTION']._serialized_end=6444 + _globals['_MODIFYPROCESSINSTANCERESPONSE']._serialized_start=6469 + _globals['_MODIFYPROCESSINSTANCERESPONSE']._serialized_end=6500 + _globals['_MIGRATEPROCESSINSTANCEREQUEST']._serialized_start=6503 + _globals['_MIGRATEPROCESSINSTANCEREQUEST']._serialized_end=6927 + _globals['_MIGRATEPROCESSINSTANCEREQUEST_MIGRATIONPLAN']._serialized_start=6684 + _globals['_MIGRATEPROCESSINSTANCEREQUEST_MIGRATIONPLAN']._serialized_end=6832 + _globals['_MIGRATEPROCESSINSTANCEREQUEST_MAPPINGINSTRUCTION']._serialized_start=6834 + _globals['_MIGRATEPROCESSINSTANCEREQUEST_MAPPINGINSTRUCTION']._serialized_end=6904 + _globals['_MIGRATEPROCESSINSTANCERESPONSE']._serialized_start=6929 + _globals['_MIGRATEPROCESSINSTANCERESPONSE']._serialized_end=6961 + _globals['_DELETERESOURCEREQUEST']._serialized_start=6963 + _globals['_DELETERESOURCEREQUEST']._serialized_end=7063 + _globals['_DELETERESOURCERESPONSE']._serialized_start=7065 + _globals['_DELETERESOURCERESPONSE']._serialized_end=7089 + _globals['_BROADCASTSIGNALREQUEST']._serialized_start=7091 + _globals['_BROADCASTSIGNALREQUEST']._serialized_end=7172 + _globals['_BROADCASTSIGNALRESPONSE']._serialized_start=7174 + _globals['_BROADCASTSIGNALRESPONSE']._serialized_end=7230 + _globals['_GATEWAY']._serialized_start=7233 + _globals['_GATEWAY']._serialized_end=9500 # @@protoc_insertion_point(module_scope) diff --git a/pyzeebe/proto/gateway_pb2.pyi b/pyzeebe/proto/gateway_pb2.pyi index 7f8e7dd3..ff3b3128 100644 --- a/pyzeebe/proto/gateway_pb2.pyi +++ b/pyzeebe/proto/gateway_pb2.pyi @@ -215,16 +215,22 @@ class CancelProcessInstanceRequest(google.protobuf.message.Message): DESCRIPTOR: google.protobuf.descriptor.Descriptor PROCESSINSTANCEKEY_FIELD_NUMBER: builtins.int + OPERATIONREFERENCE_FIELD_NUMBER: builtins.int processInstanceKey: builtins.int """the process instance key (as, for example, obtained from CreateProcessInstanceResponse) """ + operationReference: builtins.int + """a reference key chosen by the user and will be part of all records resulted from this operation""" def __init__( self, *, processInstanceKey: builtins.int = ..., + operationReference: builtins.int | None = ..., ) -> None: ... - def ClearField(self, field_name: typing.Literal["processInstanceKey", b"processInstanceKey"]) -> None: ... + def HasField(self, field_name: typing.Literal["_operationReference", b"_operationReference", "operationReference", b"operationReference"]) -> builtins.bool: ... + def ClearField(self, field_name: typing.Literal["_operationReference", b"_operationReference", "operationReference", b"operationReference", "processInstanceKey", b"processInstanceKey"]) -> None: ... + def WhichOneof(self, oneof_group: typing.Literal["_operationReference", b"_operationReference"]) -> typing.Literal["operationReference"] | None: ... global___CancelProcessInstanceRequest = CancelProcessInstanceRequest @@ -278,6 +284,7 @@ class CreateProcessInstanceRequest(google.protobuf.message.Message): VARIABLES_FIELD_NUMBER: builtins.int STARTINSTRUCTIONS_FIELD_NUMBER: builtins.int TENANTID_FIELD_NUMBER: builtins.int + OPERATIONREFERENCE_FIELD_NUMBER: builtins.int processDefinitionKey: builtins.int """the unique key identifying the process definition (e.g. returned from a process in the DeployProcessResponse message) @@ -295,6 +302,8 @@ class CreateProcessInstanceRequest(google.protobuf.message.Message): """ tenantId: builtins.str """the tenant id of the process definition""" + operationReference: builtins.int + """a reference key chosen by the user and will be part of all records resulted from this operation""" @property def startInstructions(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___ProcessInstanceCreationStartInstruction]: """List of start instructions. If empty (default) the process instance @@ -311,8 +320,11 @@ class CreateProcessInstanceRequest(google.protobuf.message.Message): variables: builtins.str = ..., startInstructions: collections.abc.Iterable[global___ProcessInstanceCreationStartInstruction] | None = ..., tenantId: builtins.str = ..., + operationReference: builtins.int | None = ..., ) -> None: ... - def ClearField(self, field_name: typing.Literal["bpmnProcessId", b"bpmnProcessId", "processDefinitionKey", b"processDefinitionKey", "startInstructions", b"startInstructions", "tenantId", b"tenantId", "variables", b"variables", "version", b"version"]) -> None: ... + def HasField(self, field_name: typing.Literal["_operationReference", b"_operationReference", "operationReference", b"operationReference"]) -> builtins.bool: ... + def ClearField(self, field_name: typing.Literal["_operationReference", b"_operationReference", "bpmnProcessId", b"bpmnProcessId", "operationReference", b"operationReference", "processDefinitionKey", b"processDefinitionKey", "startInstructions", b"startInstructions", "tenantId", b"tenantId", "variables", b"variables", "version", b"version"]) -> None: ... + def WhichOneof(self, oneof_group: typing.Literal["_operationReference", b"_operationReference"]) -> typing.Literal["operationReference"] | None: ... global___CreateProcessInstanceRequest = CreateProcessInstanceRequest @@ -497,6 +509,7 @@ class EvaluateDecisionResponse(google.protobuf.message.Message): FAILEDDECISIONID_FIELD_NUMBER: builtins.int FAILUREMESSAGE_FIELD_NUMBER: builtins.int TENANTID_FIELD_NUMBER: builtins.int + DECISIONINSTANCEKEY_FIELD_NUMBER: builtins.int decisionKey: builtins.int """the unique key identifying the decision which was evaluated (e.g. returned from a decision in the DeployResourceResponse message) @@ -528,6 +541,8 @@ class EvaluateDecisionResponse(google.protobuf.message.Message): """an optional message describing why the decision which was evaluated failed""" tenantId: builtins.str """the tenant identifier of the evaluated decision""" + decisionInstanceKey: builtins.int + """the unique key identifying this decision evaluation""" @property def evaluatedDecisions(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___EvaluatedDecision]: """a list of decisions that were evaluated within the requested decision evaluation""" @@ -546,8 +561,9 @@ class EvaluateDecisionResponse(google.protobuf.message.Message): failedDecisionId: builtins.str = ..., failureMessage: builtins.str = ..., tenantId: builtins.str = ..., + decisionInstanceKey: builtins.int = ..., ) -> None: ... - def ClearField(self, field_name: typing.Literal["decisionId", b"decisionId", "decisionKey", b"decisionKey", "decisionName", b"decisionName", "decisionOutput", b"decisionOutput", "decisionRequirementsId", b"decisionRequirementsId", "decisionRequirementsKey", b"decisionRequirementsKey", "decisionVersion", b"decisionVersion", "evaluatedDecisions", b"evaluatedDecisions", "failedDecisionId", b"failedDecisionId", "failureMessage", b"failureMessage", "tenantId", b"tenantId"]) -> None: ... + def ClearField(self, field_name: typing.Literal["decisionId", b"decisionId", "decisionInstanceKey", b"decisionInstanceKey", "decisionKey", b"decisionKey", "decisionName", b"decisionName", "decisionOutput", b"decisionOutput", "decisionRequirementsId", b"decisionRequirementsId", "decisionRequirementsKey", b"decisionRequirementsKey", "decisionVersion", b"decisionVersion", "evaluatedDecisions", b"evaluatedDecisions", "failedDecisionId", b"failedDecisionId", "failureMessage", b"failureMessage", "tenantId", b"tenantId"]) -> None: ... global___EvaluateDecisionResponse = EvaluateDecisionResponse @@ -1166,14 +1182,20 @@ class ResolveIncidentRequest(google.protobuf.message.Message): DESCRIPTOR: google.protobuf.descriptor.Descriptor INCIDENTKEY_FIELD_NUMBER: builtins.int + OPERATIONREFERENCE_FIELD_NUMBER: builtins.int incidentKey: builtins.int """the unique ID of the incident to resolve""" + operationReference: builtins.int + """a reference key chosen by the user and will be part of all records resulted from this operation""" def __init__( self, *, incidentKey: builtins.int = ..., + operationReference: builtins.int | None = ..., ) -> None: ... - def ClearField(self, field_name: typing.Literal["incidentKey", b"incidentKey"]) -> None: ... + def HasField(self, field_name: typing.Literal["_operationReference", b"_operationReference", "operationReference", b"operationReference"]) -> builtins.bool: ... + def ClearField(self, field_name: typing.Literal["_operationReference", b"_operationReference", "incidentKey", b"incidentKey", "operationReference", b"operationReference"]) -> None: ... + def WhichOneof(self, oneof_group: typing.Literal["_operationReference", b"_operationReference"]) -> typing.Literal["operationReference"] | None: ... global___ResolveIncidentRequest = ResolveIncidentRequest @@ -1329,17 +1351,23 @@ class UpdateJobRetriesRequest(google.protobuf.message.Message): JOBKEY_FIELD_NUMBER: builtins.int RETRIES_FIELD_NUMBER: builtins.int + OPERATIONREFERENCE_FIELD_NUMBER: builtins.int jobKey: builtins.int """the unique job identifier, as obtained through ActivateJobs""" retries: builtins.int """the new amount of retries for the job; must be positive""" + operationReference: builtins.int + """a reference key chosen by the user and will be part of all records resulted from this operation""" def __init__( self, *, jobKey: builtins.int = ..., retries: builtins.int = ..., + operationReference: builtins.int | None = ..., ) -> None: ... - def ClearField(self, field_name: typing.Literal["jobKey", b"jobKey", "retries", b"retries"]) -> None: ... + def HasField(self, field_name: typing.Literal["_operationReference", b"_operationReference", "operationReference", b"operationReference"]) -> builtins.bool: ... + def ClearField(self, field_name: typing.Literal["_operationReference", b"_operationReference", "jobKey", b"jobKey", "operationReference", b"operationReference", "retries", b"retries"]) -> None: ... + def WhichOneof(self, oneof_group: typing.Literal["_operationReference", b"_operationReference"]) -> typing.Literal["operationReference"] | None: ... global___UpdateJobRetriesRequest = UpdateJobRetriesRequest @@ -1359,17 +1387,23 @@ class UpdateJobTimeoutRequest(google.protobuf.message.Message): JOBKEY_FIELD_NUMBER: builtins.int TIMEOUT_FIELD_NUMBER: builtins.int + OPERATIONREFERENCE_FIELD_NUMBER: builtins.int jobKey: builtins.int """the unique job identifier, as obtained from ActivateJobsResponse""" timeout: builtins.int """the duration of the new timeout in ms, starting from the current moment""" + operationReference: builtins.int + """a reference key chosen by the user and will be part of all records resulted from this operation""" def __init__( self, *, jobKey: builtins.int = ..., timeout: builtins.int = ..., + operationReference: builtins.int | None = ..., ) -> None: ... - def ClearField(self, field_name: typing.Literal["jobKey", b"jobKey", "timeout", b"timeout"]) -> None: ... + def HasField(self, field_name: typing.Literal["_operationReference", b"_operationReference", "operationReference", b"operationReference"]) -> builtins.bool: ... + def ClearField(self, field_name: typing.Literal["_operationReference", b"_operationReference", "jobKey", b"jobKey", "operationReference", b"operationReference", "timeout", b"timeout"]) -> None: ... + def WhichOneof(self, oneof_group: typing.Literal["_operationReference", b"_operationReference"]) -> typing.Literal["operationReference"] | None: ... global___UpdateJobTimeoutRequest = UpdateJobTimeoutRequest @@ -1390,6 +1424,7 @@ class SetVariablesRequest(google.protobuf.message.Message): ELEMENTINSTANCEKEY_FIELD_NUMBER: builtins.int VARIABLES_FIELD_NUMBER: builtins.int LOCAL_FIELD_NUMBER: builtins.int + OPERATIONREFERENCE_FIELD_NUMBER: builtins.int elementInstanceKey: builtins.int """the unique identifier of a particular element; can be the process instance key (as obtained during instance creation), or a given element, such as a service task (see @@ -1408,14 +1443,19 @@ class SetVariablesRequest(google.protobuf.message.Message): be unchanged, and scope 2 will now be `{ "bar" : 1, "foo" 5 }`. if local was false, however, then scope 1 would be `{ "foo": 5 }`, and scope 2 would be `{ "bar" : 1 }`. """ + operationReference: builtins.int + """a reference key chosen by the user and will be part of all records resulted from this operation""" def __init__( self, *, elementInstanceKey: builtins.int = ..., variables: builtins.str = ..., local: builtins.bool = ..., + operationReference: builtins.int | None = ..., ) -> None: ... - def ClearField(self, field_name: typing.Literal["elementInstanceKey", b"elementInstanceKey", "local", b"local", "variables", b"variables"]) -> None: ... + def HasField(self, field_name: typing.Literal["_operationReference", b"_operationReference", "operationReference", b"operationReference"]) -> builtins.bool: ... + def ClearField(self, field_name: typing.Literal["_operationReference", b"_operationReference", "elementInstanceKey", b"elementInstanceKey", "local", b"local", "operationReference", b"operationReference", "variables", b"variables"]) -> None: ... + def WhichOneof(self, oneof_group: typing.Literal["_operationReference", b"_operationReference"]) -> typing.Literal["operationReference"] | None: ... global___SetVariablesRequest = SetVariablesRequest @@ -1508,8 +1548,11 @@ class ModifyProcessInstanceRequest(google.protobuf.message.Message): PROCESSINSTANCEKEY_FIELD_NUMBER: builtins.int ACTIVATEINSTRUCTIONS_FIELD_NUMBER: builtins.int TERMINATEINSTRUCTIONS_FIELD_NUMBER: builtins.int + OPERATIONREFERENCE_FIELD_NUMBER: builtins.int processInstanceKey: builtins.int """the key of the process instance that should be modified""" + operationReference: builtins.int + """a reference key chosen by the user and will be part of all records resulted from this operation""" @property def activateInstructions(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___ModifyProcessInstanceRequest.ActivateInstruction]: """instructions describing which elements should be activated in which scopes, @@ -1526,8 +1569,11 @@ class ModifyProcessInstanceRequest(google.protobuf.message.Message): processInstanceKey: builtins.int = ..., activateInstructions: collections.abc.Iterable[global___ModifyProcessInstanceRequest.ActivateInstruction] | None = ..., terminateInstructions: collections.abc.Iterable[global___ModifyProcessInstanceRequest.TerminateInstruction] | None = ..., + operationReference: builtins.int | None = ..., ) -> None: ... - def ClearField(self, field_name: typing.Literal["activateInstructions", b"activateInstructions", "processInstanceKey", b"processInstanceKey", "terminateInstructions", b"terminateInstructions"]) -> None: ... + def HasField(self, field_name: typing.Literal["_operationReference", b"_operationReference", "operationReference", b"operationReference"]) -> builtins.bool: ... + def ClearField(self, field_name: typing.Literal["_operationReference", b"_operationReference", "activateInstructions", b"activateInstructions", "operationReference", b"operationReference", "processInstanceKey", b"processInstanceKey", "terminateInstructions", b"terminateInstructions"]) -> None: ... + def WhichOneof(self, oneof_group: typing.Literal["_operationReference", b"_operationReference"]) -> typing.Literal["operationReference"] | None: ... global___ModifyProcessInstanceRequest = ModifyProcessInstanceRequest @@ -1585,8 +1631,11 @@ class MigrateProcessInstanceRequest(google.protobuf.message.Message): PROCESSINSTANCEKEY_FIELD_NUMBER: builtins.int MIGRATIONPLAN_FIELD_NUMBER: builtins.int + OPERATIONREFERENCE_FIELD_NUMBER: builtins.int processInstanceKey: builtins.int """key of the process instance to migrate""" + operationReference: builtins.int + """a reference key chosen by the user and will be part of all records resulted from this operation""" @property def migrationPlan(self) -> global___MigrateProcessInstanceRequest.MigrationPlan: """the migration plan that defines target process and element mappings""" @@ -1596,9 +1645,11 @@ class MigrateProcessInstanceRequest(google.protobuf.message.Message): *, processInstanceKey: builtins.int = ..., migrationPlan: global___MigrateProcessInstanceRequest.MigrationPlan | None = ..., + operationReference: builtins.int | None = ..., ) -> None: ... - def HasField(self, field_name: typing.Literal["migrationPlan", b"migrationPlan"]) -> builtins.bool: ... - def ClearField(self, field_name: typing.Literal["migrationPlan", b"migrationPlan", "processInstanceKey", b"processInstanceKey"]) -> None: ... + def HasField(self, field_name: typing.Literal["_operationReference", b"_operationReference", "migrationPlan", b"migrationPlan", "operationReference", b"operationReference"]) -> builtins.bool: ... + def ClearField(self, field_name: typing.Literal["_operationReference", b"_operationReference", "migrationPlan", b"migrationPlan", "operationReference", b"operationReference", "processInstanceKey", b"processInstanceKey"]) -> None: ... + def WhichOneof(self, oneof_group: typing.Literal["_operationReference", b"_operationReference"]) -> typing.Literal["operationReference"] | None: ... global___MigrateProcessInstanceRequest = MigrateProcessInstanceRequest @@ -1617,16 +1668,22 @@ class DeleteResourceRequest(google.protobuf.message.Message): DESCRIPTOR: google.protobuf.descriptor.Descriptor RESOURCEKEY_FIELD_NUMBER: builtins.int + OPERATIONREFERENCE_FIELD_NUMBER: builtins.int resourceKey: builtins.int """The key of the resource that should be deleted. This can either be the key of a process definition, the key of a decision requirements definition or the key of a form. """ + operationReference: builtins.int + """a reference key chosen by the user and will be part of all records resulted from this operation""" def __init__( self, *, resourceKey: builtins.int = ..., + operationReference: builtins.int | None = ..., ) -> None: ... - def ClearField(self, field_name: typing.Literal["resourceKey", b"resourceKey"]) -> None: ... + def HasField(self, field_name: typing.Literal["_operationReference", b"_operationReference", "operationReference", b"operationReference"]) -> builtins.bool: ... + def ClearField(self, field_name: typing.Literal["_operationReference", b"_operationReference", "operationReference", b"operationReference", "resourceKey", b"resourceKey"]) -> None: ... + def WhichOneof(self, oneof_group: typing.Literal["_operationReference", b"_operationReference"]) -> typing.Literal["operationReference"] | None: ... global___DeleteResourceRequest = DeleteResourceRequest diff --git a/pyzeebe/proto/gateway_pb2_grpc.py b/pyzeebe/proto/gateway_pb2_grpc.py index 6bfd2aa2..9ab03c4f 100644 --- a/pyzeebe/proto/gateway_pb2_grpc.py +++ b/pyzeebe/proto/gateway_pb2_grpc.py @@ -169,6 +169,12 @@ def StreamActivatedJobs(self, request, context): INVALID_ARGUMENT: - type is blank (empty string, null) - timeout less than 1 + - If multi-tenancy is enabled, and tenantIds is empty (empty list) + - If multi-tenancy is enabled, and an invalid tenant ID is provided. A tenant ID is considered invalid if: + - The tenant ID is blank (empty string, null) + - The tenant ID is longer than 31 characters + - The tenant ID contains anything other than alphanumeric characters, dot (.), dash (-), or underscore (_) + - If multi-tenancy is disabled, and tenantIds is not empty (empty list), or has an ID other than """ context.set_code(grpc.StatusCode.UNIMPLEMENTED) context.set_details('Method not implemented!') @@ -427,9 +433,12 @@ def MigrateProcessInstance(self, request, context): FAILED_PRECONDITION: - not all active elements in the given process instance are mapped to the elements in the target process definition - a mapping instruction changes the type of an element or event + - a mapping instruction changes the implementation of a task + - a mapping instruction detaches a boundary event from an active element - a mapping instruction refers to an unsupported element (i.e. some elements will be supported later on) - a mapping instruction refers to element in unsupported scenarios. (i.e. migration is not supported when process instance or target process elements contains event subscriptions) + - multiple mapping instructions target the same boundary event INVALID_ARGUMENT: - A `sourceElementId` does not refer to an element in the process instance's process definition diff --git a/pyzeebe/proto/gateway_pb2_grpc.pyi b/pyzeebe/proto/gateway_pb2_grpc.pyi index 277c7961..df1f1074 100644 --- a/pyzeebe/proto/gateway_pb2_grpc.pyi +++ b/pyzeebe/proto/gateway_pb2_grpc.pyi @@ -47,6 +47,12 @@ class GatewayStub: INVALID_ARGUMENT: - type is blank (empty string, null) - timeout less than 1 + - If multi-tenancy is enabled, and tenantIds is empty (empty list) + - If multi-tenancy is enabled, and an invalid tenant ID is provided. A tenant ID is considered invalid if: + - The tenant ID is blank (empty string, null) + - The tenant ID is longer than 31 characters + - The tenant ID contains anything other than alphanumeric characters, dot (.), dash (-), or underscore (_) + - If multi-tenancy is disabled, and tenantIds is not empty (empty list), or has an ID other than """ CancelProcessInstance: grpc.UnaryUnaryMultiCallable[ @@ -305,9 +311,12 @@ class GatewayStub: FAILED_PRECONDITION: - not all active elements in the given process instance are mapped to the elements in the target process definition - a mapping instruction changes the type of an element or event + - a mapping instruction changes the implementation of a task + - a mapping instruction detaches a boundary event from an active element - a mapping instruction refers to an unsupported element (i.e. some elements will be supported later on) - a mapping instruction refers to element in unsupported scenarios. (i.e. migration is not supported when process instance or target process elements contains event subscriptions) + - multiple mapping instructions target the same boundary event INVALID_ARGUMENT: - A `sourceElementId` does not refer to an element in the process instance's process definition @@ -394,6 +403,12 @@ class GatewayAsyncStub: INVALID_ARGUMENT: - type is blank (empty string, null) - timeout less than 1 + - If multi-tenancy is enabled, and tenantIds is empty (empty list) + - If multi-tenancy is enabled, and an invalid tenant ID is provided. A tenant ID is considered invalid if: + - The tenant ID is blank (empty string, null) + - The tenant ID is longer than 31 characters + - The tenant ID contains anything other than alphanumeric characters, dot (.), dash (-), or underscore (_) + - If multi-tenancy is disabled, and tenantIds is not empty (empty list), or has an ID other than """ CancelProcessInstance: grpc.aio.UnaryUnaryMultiCallable[ @@ -652,9 +667,12 @@ class GatewayAsyncStub: FAILED_PRECONDITION: - not all active elements in the given process instance are mapped to the elements in the target process definition - a mapping instruction changes the type of an element or event + - a mapping instruction changes the implementation of a task + - a mapping instruction detaches a boundary event from an active element - a mapping instruction refers to an unsupported element (i.e. some elements will be supported later on) - a mapping instruction refers to element in unsupported scenarios. (i.e. migration is not supported when process instance or target process elements contains event subscriptions) + - multiple mapping instructions target the same boundary event INVALID_ARGUMENT: - A `sourceElementId` does not refer to an element in the process instance's process definition @@ -745,6 +763,12 @@ class GatewayServicer(metaclass=abc.ABCMeta): INVALID_ARGUMENT: - type is blank (empty string, null) - timeout less than 1 + - If multi-tenancy is enabled, and tenantIds is empty (empty list) + - If multi-tenancy is enabled, and an invalid tenant ID is provided. A tenant ID is considered invalid if: + - The tenant ID is blank (empty string, null) + - The tenant ID is longer than 31 characters + - The tenant ID contains anything other than alphanumeric characters, dot (.), dash (-), or underscore (_) + - If multi-tenancy is disabled, and tenantIds is not empty (empty list), or has an ID other than """ @abc.abstractmethod @@ -1035,9 +1059,12 @@ class GatewayServicer(metaclass=abc.ABCMeta): FAILED_PRECONDITION: - not all active elements in the given process instance are mapped to the elements in the target process definition - a mapping instruction changes the type of an element or event + - a mapping instruction changes the implementation of a task + - a mapping instruction detaches a boundary event from an active element - a mapping instruction refers to an unsupported element (i.e. some elements will be supported later on) - a mapping instruction refers to element in unsupported scenarios. (i.e. migration is not supported when process instance or target process elements contains event subscriptions) + - multiple mapping instructions target the same boundary event INVALID_ARGUMENT: - A `sourceElementId` does not refer to an element in the process instance's process definition diff --git a/update_proto.py b/update_proto.py index 193bac18..c7d2a97e 100644 --- a/update_proto.py +++ b/update_proto.py @@ -4,7 +4,7 @@ import requests from grpc_tools.protoc import main as grpc_tools_protoc_main -zeebe_proto_version = "8.4.13" +zeebe_proto_version = "8.6.6" def generate_proto(): @@ -13,7 +13,7 @@ def generate_proto(): for path in proto_dir.glob("*pb2*"): os.remove(path) - proto_url = f"https://raw.githubusercontent.com/camunda/camunda/refs/tags/{zeebe_proto_version}/gateway-protocol/src/main/proto/gateway.proto" + proto_url = f"https://raw.githubusercontent.com/camunda/camunda/refs/tags/{zeebe_proto_version}/zeebe/gateway-protocol/src/main/proto/gateway.proto" proto_content = requests.get(proto_url, allow_redirects=True) with proto_file.open("wb") as tmpfile: tmpfile.write(proto_content.content) From aa3be43ad30cc1ab2ba07e53a619ba2aea452ee2 Mon Sep 17 00:00:00 2001 From: Dmitriy Date: Fri, 27 Dec 2024 17:29:43 +0500 Subject: [PATCH 3/9] update docs --- docs/index.rst | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/docs/index.rst b/docs/index.rst index 9958cf85..504763e2 100644 --- a/docs/index.rst +++ b/docs/index.rst @@ -48,8 +48,7 @@ Creating a client Dependencies ============ -* python 3.7+ -* zeebe-grpc +* python 3.9+ * grpcio * protobuf * oauthlib From a5fcd202d385c6c36d5f8a1bb40331dd6a2f527d Mon Sep 17 00:00:00 2001 From: Dmitriy Date: Fri, 27 Dec 2024 17:39:54 +0500 Subject: [PATCH 4/9] clean code --- .gitignore | 2 -- 1 file changed, 2 deletions(-) diff --git a/.gitignore b/.gitignore index 8da334a9..ac3aa17f 100644 --- a/.gitignore +++ b/.gitignore @@ -145,5 +145,3 @@ cython_debug/ # pycharm .idea - -zeebe_grpc/ From a14f8f6314e64c4c8722e6ed25a2493e28195a6c Mon Sep 17 00:00:00 2001 From: Dmitriy Date: Fri, 27 Dec 2024 17:40:26 +0500 Subject: [PATCH 5/9] clean code --- pyproject.toml | 32 +++++++++++++++++++------------- 1 file changed, 19 insertions(+), 13 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index ddadcd1d..e77616b7 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -3,7 +3,9 @@ name = "pyzeebe" version = "4.2.0" description = "Zeebe client api" authors = ["Jonatan Martens "] -maintainers = ["Dmitriy "] +maintainers = [ + "Dmitriy ", +] license = "MIT" readme = "README.md" homepage = "https://github.com/camunda-community-hub/pyzeebe" @@ -13,9 +15,11 @@ keywords = ["zeebe", "workflow", "workflow-engine"] classifiers = [ "Programming Language :: Python :: 3", "License :: OSI Approved :: MIT License", - "Operating System :: OS Independent", + "Operating System :: OS Independent" +] +packages = [ + { include = "pyzeebe" }, ] -packages = [{ include = "pyzeebe" }] [tool.poetry.dependencies] python = "^3.9" @@ -78,25 +82,27 @@ extend_skip_glob = ["*_pb2.py", "*_pb2_grpc.py", "*.pyi"] [tool.pytest.ini_options] asyncio_mode = "auto" -markers = ["e2e: end to end tests"] +markers = [ + "e2e: end to end tests", +] [tool.ruff] target-version = "py39" [tool.ruff.lint] select = [ - "E", # pycodestyle errors - "W", # pycodestyle warnings - "F", # pyflakes - "C", # flake8-comprehensions - "B", # flake8-bugbear - "TID", # flake8-tidy-imports - "T20", # flake8-print + "E", # pycodestyle errors + "W", # pycodestyle warnings + "F", # pyflakes + "C", # flake8-comprehensions + "B", # flake8-bugbear + "TID", # flake8-tidy-imports + "T20", # flake8-print "ASYNC", # flake8-async - "FA", # flake8-future-annotations + "FA", # flake8-future-annotations ] ignore = [ - "E501", # line too long, handled by black + "E501", # line too long, handled by black ] [build-system] From ec58e135d06a8daa1be22f168bbf68643b8b4f73 Mon Sep 17 00:00:00 2001 From: Dmitriy Date: Fri, 27 Dec 2024 17:48:38 +0500 Subject: [PATCH 6/9] add pyzeebe/proto/* to coverage exclude --- pyproject.toml | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/pyproject.toml b/pyproject.toml index e77616b7..904287ce 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -86,6 +86,11 @@ markers = [ "e2e: end to end tests", ] +[tool.coverage.run] +omit = [ + "pyzeebe/proto/*" +] + [tool.ruff] target-version = "py39" From aebaccd6ea52f2d7f423ff9f146f39569028d8a2 Mon Sep 17 00:00:00 2001 From: Dmitriy Date: Mon, 30 Dec 2024 10:34:49 +0500 Subject: [PATCH 7/9] Update update_proto.py Co-authored-by: Felix Schneider <77450740+felicijus@users.noreply.github.com> --- update_proto.py | 63 ++++++++++++++++++++++++++++++++++++------------- 1 file changed, 46 insertions(+), 17 deletions(-) diff --git a/update_proto.py b/update_proto.py index c7d2a97e..28cd3014 100644 --- a/update_proto.py +++ b/update_proto.py @@ -1,36 +1,65 @@ +import argparse import os import pathlib import requests from grpc_tools.protoc import main as grpc_tools_protoc_main -zeebe_proto_version = "8.6.6" +DEFAULT_PROTO_VERSION: str = "8.6.6" -def generate_proto(): +def main(): + parser = argparse.ArgumentParser(description="Download Zeebe proto file and generate protoctol buffers.") + parser.add_argument( + "-pv", + "--proto-version", + default=[DEFAULT_PROTO_VERSION], + nargs=1, + type=str, + help=f"zeebe proto version, default is {DEFAULT_PROTO_VERSION}", + required=False, + # NOTE: The default value is set to the latest version of Zeebe proto file. + ) + args = parser.parse_args() + + print(f"Zeebe Proto version: {args.proto_version[0]}") + proto_version = args.proto_version[0] + generate_proto(proto_version) + + +def generate_proto(zeebe_proto_version: str): proto_dir = pathlib.Path("pyzeebe/proto") proto_file = proto_dir / "gateway.proto" for path in proto_dir.glob("*pb2*"): os.remove(path) proto_url = f"https://raw.githubusercontent.com/camunda/camunda/refs/tags/{zeebe_proto_version}/zeebe/gateway-protocol/src/main/proto/gateway.proto" - proto_content = requests.get(proto_url, allow_redirects=True) - with proto_file.open("wb") as tmpfile: - tmpfile.write(proto_content.content) - grpc_tools_protoc_main( - [ - "--proto_path=.", - "--python_out=.", - "--mypy_out=.", - "--grpc_python_out=.", - "--mypy_grpc_out=.", - os.path.relpath(tmpfile.name), - ] - ) + try: + print(f"Downloading proto file from {proto_url}") + proto_content = requests.get(proto_url, timeout=5) + proto_content.raise_for_status() + + with proto_file.open("wb") as tmpfile: + tmpfile.write(proto_content.content) + + grpc_tools_protoc_main( + [ + "--proto_path=.", + "--python_out=.", + "--mypy_out=.", + "--grpc_python_out=.", + "--mypy_grpc_out=.", + os.path.relpath(tmpfile.name), + ] + ) + proto_file.unlink() - proto_file.unlink() + except requests.exceptions.HTTPError as err: + print(f"HTTP Error occurred: {err}") + except requests.exceptions.RequestException as err: + print(f"Error occurred: {err}") if __name__ == "__main__": - generate_proto() + main() From b6f97528a44955d9acd3eb4e75c431e0f08d3375 Mon Sep 17 00:00:00 2001 From: Dmitriy Date: Mon, 30 Dec 2024 10:42:43 +0500 Subject: [PATCH 8/9] chore: add update_proto.py to tool.ruff.lint.per-file-ignores --- pyproject.toml | 3 +++ 1 file changed, 3 insertions(+) diff --git a/pyproject.toml b/pyproject.toml index 904287ce..4cd2ce94 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -110,6 +110,9 @@ ignore = [ "E501", # line too long, handled by black ] +[tool.ruff.lint.per-file-ignores] +"update_proto.py" = ["T201"] + [build-system] requires = ["poetry-core>=1.0.0"] build-backend = "poetry.core.masonry.api" From 79bd5ce02fb05cd424b0cbdf763a434ea17316f3 Mon Sep 17 00:00:00 2001 From: Dmitriy Date: Mon, 30 Dec 2024 11:03:13 +0500 Subject: [PATCH 9/9] fix docstrings in grpc types --- pyzeebe/grpc_internals/types.py | 117 ++++++++++++++++++-------------- 1 file changed, 65 insertions(+), 52 deletions(-) diff --git a/pyzeebe/grpc_internals/types.py b/pyzeebe/grpc_internals/types.py index 55e9f8d7..494945cf 100644 --- a/pyzeebe/grpc_internals/types.py +++ b/pyzeebe/grpc_internals/types.py @@ -7,36 +7,40 @@ @dataclass(frozen=True) class CreateProcessInstanceResponse: - #: the key of the process definition which was used to create the process instance process_definition_key: int - #: the BPMN process ID of the process definition which was used to create the process - #: instance + """the key of the process definition which was used to create the process instance""" bpmn_process_id: str - #: the version of the process definition which was used to create the process instance + """the BPMN process ID of the process definition which was used to create the process + instance + """ version: int - #: the unique identifier of the created process instance; to be used wherever a request - #: needs a process instance key (e.g. CancelProcessInstanceRequest) + """the version of the process definition which was used to create the process instance""" process_instance_key: int - #: the tenant ID of the created process instance + """the unique identifier of the created process instance; to be used wherever a request + needs a process instance key (e.g. CancelProcessInstanceRequest) + """ tenant_id: str | None + """the tenant ID of the created process instance""" @dataclass(frozen=True) class CreateProcessInstanceWithResultResponse: - #: the key of the process definition which was used to create the process instance process_definition_key: int - #: the BPMN process ID of the process definition which was used to create the process - #: instance + """the key of the process definition which was used to create the process instance""" bpmn_process_id: str - #: the version of the process definition which was used to create the process instance + """the BPMN process ID of the process definition which was used to create the process + instance + """ version: int - #: the unique identifier of the created process instance; to be used wherever a request - #: needs a process instance key (e.g. CancelProcessInstanceRequest) + """the version of the process definition which was used to create the process instance""" process_instance_key: int - #: consisting of all visible variables to the root scope + """the unique identifier of the created process instance; to be used wherever a request + needs a process instance key (e.g. CancelProcessInstanceRequest) + """ variables: Variables - #: the tenant ID of the process definition + """consisting of all visible variables to the root scope""" tenant_id: str | None + """the tenant ID of the created process instance""" @dataclass(frozen=True) @@ -48,85 +52,94 @@ class CancelProcessInstanceResponse: class DeployResourceResponse: @dataclass(frozen=True) class ProcessMetadata: - #: the bpmn process ID, as parsed during deployment; together with the version forms a - #: unique identifier for a specific process definition bpmn_process_id: str - #: the assigned process version + """the bpmn process ID, as parsed during deployment; together with the version forms a + unique identifier for a specific process definition + """ version: int - #: the assigned key, which acts as a unique identifier for this process + """the assigned process version""" process_definition_key: int - #: the resource name from which this process was parsed + """the assigned key, which acts as a unique identifier for this process""" resource_name: str - #: the tenant ID of the deployed process + """the resource name from which this process was parsed""" tenant_id: str | None + """the tenant ID of the deployed process""" @dataclass(frozen=True) class DecisionMetadata: - #: the dmn decision ID, as parsed during deployment; together with the - #: versions forms a unique identifier for a specific decision dmn_decision_id: str - #: the dmn name of the decision, as parsed during deployment + """the dmn decision ID, as parsed during deployment; together with the + versions forms a unique identifier for a specific decision + """ dmn_decision_name: str - #: the assigned decision version + """the dmn name of the decision, as parsed during deployment""" version: int - #: the assigned decision key, which acts as a unique identifier for this - #: decision + """the assigned decision version""" decision_key: int - #: the dmn ID of the decision requirements graph that this decision is part - #: of, as parsed during deployment + """the assigned decision key, which acts as a unique identifier for this + decision + """ dmn_decision_requirements_id: str - #: the assigned key of the decision requirements graph that this decision is - #: part of + """the dmn ID of the decision requirements graph that this decision is part + of, as parsed during deployment + """ decision_requirements_key: int - #: the tenant ID of the deployed decision + """the assigned key of the decision requirements graph that this decision is + part of + """ tenant_id: str | None + """the tenant ID of the deployed decision""" @dataclass(frozen=True) class DecisionRequirementsMetadata: - #: the dmn decision requirements ID, as parsed during deployment; together - #: with the versions forms a unique identifier for a specific decision dmn_decision_requirements_id: str - #: the dmn name of the decision requirements, as parsed during deployment + """the dmn decision requirements ID, as parsed during deployment; together + with the versions forms a unique identifier for a specific decision + """ dmn_decision_requirements_name: str - #: the assigned decision requirements version + """the dmn name of the decision requirements, as parsed during deployment""" version: int - #: the assigned decision requirements key, which acts as a unique identifier - #: for this decision requirements + """the assigned decision requirements version""" decision_requirements_key: int - #: the resource name from which this decision - #: requirements was parsed + """the assigned decision requirements key, which acts as a unique identifier + for this decision requirements + """ resource_name: str - #: the tenant ID of the deployed decision requirements + """the resource name from which this decision + requirements was parsed + """ tenant_id: str | None + """the tenant ID of the deployed decision requirements""" @dataclass(frozen=True) class FormMetadata: - #: the form ID, as parsed during deployment; together with the - #: versions forms a unique identifier for a specific form form_id: str - #: the assigned form version + """the form ID, as parsed during deployment; together with the + versions forms a unique identifier for a specific form + """ version: int - #: the assigned key, which acts as a unique identifier for this form + """the assigned form version""" form_key: int - #: the resource name + """the assigned key, which acts as a unique identifier for this form""" resource_name: str - #: the tenant ID of the deployed form + """the resource name""" tenant_id: str | None + """the tenant ID of the deployed form""" - #: the unique key identifying the deployment key: int - #: a list of deployed resources, e.g. processes + """the unique key identifying the deployment""" deployments: list[ProcessMetadata | DecisionMetadata | DecisionRequirementsMetadata | FormMetadata] - #: the tenant ID of the deployed resources + """a list of deployed resources, e.g. processes""" tenant_id: str | None + """the tenant ID of the deployed resources""" @dataclass(frozen=True) class PublishMessageResponse: - #: the unique ID of the message that was published key: int - #: the tenant ID of the message + """the unique ID of the message that was published""" tenant_id: str | None + """the tenant ID of the message""" @dataclass(frozen=True)