diff --git a/poetry.lock b/poetry.lock
index 9b4a37e6..1b7a8b78 100644
--- a/poetry.lock
+++ b/poetry.lock
@@ -38,13 +38,13 @@ typing-extensions = {version = ">=4.0.0", markers = "python_version < \"3.11\""}
[[package]]
name = "babel"
-version = "2.15.0"
+version = "2.16.0"
description = "Internationalization utilities"
optional = false
python-versions = ">=3.8"
files = [
- {file = "Babel-2.15.0-py3-none-any.whl", hash = "sha256:08706bdad8d0a3413266ab61bd6c34d0c28d6e1e7badf40a2cebe67644e2e1fb"},
- {file = "babel-2.15.0.tar.gz", hash = "sha256:8daf0e265d05768bc6c7a314cf1321e9a123afc328cc635c18622a2f30a04413"},
+ {file = "babel-2.16.0-py3-none-any.whl", hash = "sha256:368b5b98b37c06b7daf6696391c3240c938b37767d4584413e8438c5c435fa8b"},
+ {file = "babel-2.16.0.tar.gz", hash = "sha256:d1f3554ca26605fe173f3de0c65f750f5a42f924499bf134de6423582298e316"},
]
[package.dependencies]
@@ -112,13 +112,13 @@ files = [
[[package]]
name = "certifi"
-version = "2024.7.4"
+version = "2024.8.30"
description = "Python package for providing Mozilla's CA Bundle."
optional = false
python-versions = ">=3.6"
files = [
- {file = "certifi-2024.7.4-py3-none-any.whl", hash = "sha256:c198e21b1289c2ab85ee4e67bb4b4ef3ead0892059901a8d5b622f24a1101e90"},
- {file = "certifi-2024.7.4.tar.gz", hash = "sha256:5a1e7645bc0ec61a09e26c36f6106dd4cf40c6db3a1fb6352b0244e7fb057c7b"},
+ {file = "certifi-2024.8.30-py3-none-any.whl", hash = "sha256:922820b53db7a7257ffbda3f597266d435245903d80737e34f8a45ff3e3230d8"},
+ {file = "certifi-2024.8.30.tar.gz", hash = "sha256:bec941d2aa8195e248a60b31ff9f0558284cf01a52591ceda73ea9afffd69fd9"},
]
[[package]]
@@ -364,13 +364,13 @@ files = [
[[package]]
name = "exceptiongroup"
-version = "1.2.1"
+version = "1.2.2"
description = "Backport of PEP 654 (exception groups)"
optional = false
python-versions = ">=3.7"
files = [
- {file = "exceptiongroup-1.2.1-py3-none-any.whl", hash = "sha256:5258b9ed329c5bbdd31a309f53cbfb0b155341807f6ff7606a1e801a891b29ad"},
- {file = "exceptiongroup-1.2.1.tar.gz", hash = "sha256:a4785e48b045528f5bfe627b6ad554ff32def154f42372786903b7abcfe1aa16"},
+ {file = "exceptiongroup-1.2.2-py3-none-any.whl", hash = "sha256:3111b9d131c238bec2f8f516e123e14ba243563fb135d3fe885990585aa7795b"},
+ {file = "exceptiongroup-1.2.2.tar.gz", hash = "sha256:47c2edf7c6738fafb49fd34290706d1a1a2f4d1c6df275526b62cbb4aa5393cc"},
]
[package.extras]
@@ -378,71 +378,131 @@ test = ["pytest (>=6)"]
[[package]]
name = "grpcio"
-version = "1.64.1"
+version = "1.66.1"
description = "HTTP/2-based RPC framework"
optional = false
python-versions = ">=3.8"
files = [
- {file = "grpcio-1.64.1-cp310-cp310-linux_armv7l.whl", hash = "sha256:55697ecec192bc3f2f3cc13a295ab670f51de29884ca9ae6cd6247df55df2502"},
- {file = "grpcio-1.64.1-cp310-cp310-macosx_12_0_universal2.whl", hash = "sha256:3b64ae304c175671efdaa7ec9ae2cc36996b681eb63ca39c464958396697daff"},
- {file = "grpcio-1.64.1-cp310-cp310-manylinux_2_17_aarch64.whl", hash = "sha256:bac71b4b28bc9af61efcdc7630b166440bbfbaa80940c9a697271b5e1dabbc61"},
- {file = "grpcio-1.64.1-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6c024ffc22d6dc59000faf8ad781696d81e8e38f4078cb0f2630b4a3cf231a90"},
- {file = "grpcio-1.64.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e7cd5c1325f6808b8ae31657d281aadb2a51ac11ab081ae335f4f7fc44c1721d"},
- {file = "grpcio-1.64.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:0a2813093ddb27418a4c99f9b1c223fab0b053157176a64cc9db0f4557b69bd9"},
- {file = "grpcio-1.64.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:2981c7365a9353f9b5c864595c510c983251b1ab403e05b1ccc70a3d9541a73b"},
- {file = "grpcio-1.64.1-cp310-cp310-win32.whl", hash = "sha256:1262402af5a511c245c3ae918167eca57342c72320dffae5d9b51840c4b2f86d"},
- {file = "grpcio-1.64.1-cp310-cp310-win_amd64.whl", hash = "sha256:19264fc964576ddb065368cae953f8d0514ecc6cb3da8903766d9fb9d4554c33"},
- {file = "grpcio-1.64.1-cp311-cp311-linux_armv7l.whl", hash = "sha256:58b1041e7c870bb30ee41d3090cbd6f0851f30ae4eb68228955d973d3efa2e61"},
- {file = "grpcio-1.64.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:bbc5b1d78a7822b0a84c6f8917faa986c1a744e65d762ef6d8be9d75677af2ca"},
- {file = "grpcio-1.64.1-cp311-cp311-manylinux_2_17_aarch64.whl", hash = "sha256:5841dd1f284bd1b3d8a6eca3a7f062b06f1eec09b184397e1d1d43447e89a7ae"},
- {file = "grpcio-1.64.1-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8caee47e970b92b3dd948371230fcceb80d3f2277b3bf7fbd7c0564e7d39068e"},
- {file = "grpcio-1.64.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:73819689c169417a4f978e562d24f2def2be75739c4bed1992435d007819da1b"},
- {file = "grpcio-1.64.1-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:6503b64c8b2dfad299749cad1b595c650c91e5b2c8a1b775380fcf8d2cbba1e9"},
- {file = "grpcio-1.64.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:1de403fc1305fd96cfa75e83be3dee8538f2413a6b1685b8452301c7ba33c294"},
- {file = "grpcio-1.64.1-cp311-cp311-win32.whl", hash = "sha256:d4d29cc612e1332237877dfa7fe687157973aab1d63bd0f84cf06692f04c0367"},
- {file = "grpcio-1.64.1-cp311-cp311-win_amd64.whl", hash = "sha256:5e56462b05a6f860b72f0fa50dca06d5b26543a4e88d0396259a07dc30f4e5aa"},
- {file = "grpcio-1.64.1-cp312-cp312-linux_armv7l.whl", hash = "sha256:4657d24c8063e6095f850b68f2d1ba3b39f2b287a38242dcabc166453e950c59"},
- {file = "grpcio-1.64.1-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:62b4e6eb7bf901719fce0ca83e3ed474ae5022bb3827b0a501e056458c51c0a1"},
- {file = "grpcio-1.64.1-cp312-cp312-manylinux_2_17_aarch64.whl", hash = "sha256:ee73a2f5ca4ba44fa33b4d7d2c71e2c8a9e9f78d53f6507ad68e7d2ad5f64a22"},
- {file = "grpcio-1.64.1-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:198908f9b22e2672a998870355e226a725aeab327ac4e6ff3a1399792ece4762"},
- {file = "grpcio-1.64.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:39b9d0acaa8d835a6566c640f48b50054f422d03e77e49716d4c4e8e279665a1"},
- {file = "grpcio-1.64.1-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:5e42634a989c3aa6049f132266faf6b949ec2a6f7d302dbb5c15395b77d757eb"},
- {file = "grpcio-1.64.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:b1a82e0b9b3022799c336e1fc0f6210adc019ae84efb7321d668129d28ee1efb"},
- {file = "grpcio-1.64.1-cp312-cp312-win32.whl", hash = "sha256:55260032b95c49bee69a423c2f5365baa9369d2f7d233e933564d8a47b893027"},
- {file = "grpcio-1.64.1-cp312-cp312-win_amd64.whl", hash = "sha256:c1a786ac592b47573a5bb7e35665c08064a5d77ab88a076eec11f8ae86b3e3f6"},
- {file = "grpcio-1.64.1-cp38-cp38-linux_armv7l.whl", hash = "sha256:a011ac6c03cfe162ff2b727bcb530567826cec85eb8d4ad2bfb4bd023287a52d"},
- {file = "grpcio-1.64.1-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:4d6dab6124225496010bd22690f2d9bd35c7cbb267b3f14e7a3eb05c911325d4"},
- {file = "grpcio-1.64.1-cp38-cp38-manylinux_2_17_aarch64.whl", hash = "sha256:a5e771d0252e871ce194d0fdcafd13971f1aae0ddacc5f25615030d5df55c3a2"},
- {file = "grpcio-1.64.1-cp38-cp38-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2c3c1b90ab93fed424e454e93c0ed0b9d552bdf1b0929712b094f5ecfe7a23ad"},
- {file = "grpcio-1.64.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:20405cb8b13fd779135df23fabadc53b86522d0f1cba8cca0e87968587f50650"},
- {file = "grpcio-1.64.1-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:0cc79c982ccb2feec8aad0e8fb0d168bcbca85bc77b080d0d3c5f2f15c24ea8f"},
- {file = "grpcio-1.64.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:a3a035c37ce7565b8f4f35ff683a4db34d24e53dc487e47438e434eb3f701b2a"},
- {file = "grpcio-1.64.1-cp38-cp38-win32.whl", hash = "sha256:1257b76748612aca0f89beec7fa0615727fd6f2a1ad580a9638816a4b2eb18fd"},
- {file = "grpcio-1.64.1-cp38-cp38-win_amd64.whl", hash = "sha256:0a12ddb1678ebc6a84ec6b0487feac020ee2b1659cbe69b80f06dbffdb249122"},
- {file = "grpcio-1.64.1-cp39-cp39-linux_armv7l.whl", hash = "sha256:75dbbf415026d2862192fe1b28d71f209e2fd87079d98470db90bebe57b33179"},
- {file = "grpcio-1.64.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:e3d9f8d1221baa0ced7ec7322a981e28deb23749c76eeeb3d33e18b72935ab62"},
- {file = "grpcio-1.64.1-cp39-cp39-manylinux_2_17_aarch64.whl", hash = "sha256:5f8b75f64d5d324c565b263c67dbe4f0af595635bbdd93bb1a88189fc62ed2e5"},
- {file = "grpcio-1.64.1-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c84ad903d0d94311a2b7eea608da163dace97c5fe9412ea311e72c3684925602"},
- {file = "grpcio-1.64.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:940e3ec884520155f68a3b712d045e077d61c520a195d1a5932c531f11883489"},
- {file = "grpcio-1.64.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:f10193c69fc9d3d726e83bbf0f3d316f1847c3071c8c93d8090cf5f326b14309"},
- {file = "grpcio-1.64.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:ac15b6c2c80a4d1338b04d42a02d376a53395ddf0ec9ab157cbaf44191f3ffdd"},
- {file = "grpcio-1.64.1-cp39-cp39-win32.whl", hash = "sha256:03b43d0ccf99c557ec671c7dede64f023c7da9bb632ac65dbc57f166e4970040"},
- {file = "grpcio-1.64.1-cp39-cp39-win_amd64.whl", hash = "sha256:ed6091fa0adcc7e4ff944090cf203a52da35c37a130efa564ded02b7aff63bcd"},
- {file = "grpcio-1.64.1.tar.gz", hash = "sha256:8d51dd1c59d5fa0f34266b80a3805ec29a1f26425c2a54736133f6d87fc4968a"},
+ {file = "grpcio-1.66.1-cp310-cp310-linux_armv7l.whl", hash = "sha256:4877ba180591acdf127afe21ec1c7ff8a5ecf0fe2600f0d3c50e8c4a1cbc6492"},
+ {file = "grpcio-1.66.1-cp310-cp310-macosx_12_0_universal2.whl", hash = "sha256:3750c5a00bd644c75f4507f77a804d0189d97a107eb1481945a0cf3af3e7a5ac"},
+ {file = "grpcio-1.66.1-cp310-cp310-manylinux_2_17_aarch64.whl", hash = "sha256:a013c5fbb12bfb5f927444b477a26f1080755a931d5d362e6a9a720ca7dbae60"},
+ {file = "grpcio-1.66.1-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b1b24c23d51a1e8790b25514157d43f0a4dce1ac12b3f0b8e9f66a5e2c4c132f"},
+ {file = "grpcio-1.66.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b7ffb8ea674d68de4cac6f57d2498fef477cef582f1fa849e9f844863af50083"},
+ {file = "grpcio-1.66.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:307b1d538140f19ccbd3aed7a93d8f71103c5d525f3c96f8616111614b14bf2a"},
+ {file = "grpcio-1.66.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:1c17ebcec157cfb8dd445890a03e20caf6209a5bd4ac5b040ae9dbc59eef091d"},
+ {file = "grpcio-1.66.1-cp310-cp310-win32.whl", hash = "sha256:ef82d361ed5849d34cf09105d00b94b6728d289d6b9235513cb2fcc79f7c432c"},
+ {file = "grpcio-1.66.1-cp310-cp310-win_amd64.whl", hash = "sha256:292a846b92cdcd40ecca46e694997dd6b9be6c4c01a94a0dfb3fcb75d20da858"},
+ {file = "grpcio-1.66.1-cp311-cp311-linux_armv7l.whl", hash = "sha256:c30aeceeaff11cd5ddbc348f37c58bcb96da8d5aa93fed78ab329de5f37a0d7a"},
+ {file = "grpcio-1.66.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:8a1e224ce6f740dbb6b24c58f885422deebd7eb724aff0671a847f8951857c26"},
+ {file = "grpcio-1.66.1-cp311-cp311-manylinux_2_17_aarch64.whl", hash = "sha256:a66fe4dc35d2330c185cfbb42959f57ad36f257e0cc4557d11d9f0a3f14311df"},
+ {file = "grpcio-1.66.1-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e3ba04659e4fce609de2658fe4dbf7d6ed21987a94460f5f92df7579fd5d0e22"},
+ {file = "grpcio-1.66.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4573608e23f7e091acfbe3e84ac2045680b69751d8d67685ffa193a4429fedb1"},
+ {file = "grpcio-1.66.1-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:7e06aa1f764ec8265b19d8f00140b8c4b6ca179a6dc67aa9413867c47e1fb04e"},
+ {file = "grpcio-1.66.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:3885f037eb11f1cacc41f207b705f38a44b69478086f40608959bf5ad85826dd"},
+ {file = "grpcio-1.66.1-cp311-cp311-win32.whl", hash = "sha256:97ae7edd3f3f91480e48ede5d3e7d431ad6005bfdbd65c1b56913799ec79e791"},
+ {file = "grpcio-1.66.1-cp311-cp311-win_amd64.whl", hash = "sha256:cfd349de4158d797db2bd82d2020554a121674e98fbe6b15328456b3bf2495bb"},
+ {file = "grpcio-1.66.1-cp312-cp312-linux_armv7l.whl", hash = "sha256:a92c4f58c01c77205df6ff999faa008540475c39b835277fb8883b11cada127a"},
+ {file = "grpcio-1.66.1-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:fdb14bad0835914f325349ed34a51940bc2ad965142eb3090081593c6e347be9"},
+ {file = "grpcio-1.66.1-cp312-cp312-manylinux_2_17_aarch64.whl", hash = "sha256:f03a5884c56256e08fd9e262e11b5cfacf1af96e2ce78dc095d2c41ccae2c80d"},
+ {file = "grpcio-1.66.1-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2ca2559692d8e7e245d456877a85ee41525f3ed425aa97eb7a70fc9a79df91a0"},
+ {file = "grpcio-1.66.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:84ca1be089fb4446490dd1135828bd42a7c7f8421e74fa581611f7afdf7ab761"},
+ {file = "grpcio-1.66.1-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:d639c939ad7c440c7b2819a28d559179a4508783f7e5b991166f8d7a34b52815"},
+ {file = "grpcio-1.66.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:b9feb4e5ec8dc2d15709f4d5fc367794d69277f5d680baf1910fc9915c633524"},
+ {file = "grpcio-1.66.1-cp312-cp312-win32.whl", hash = "sha256:7101db1bd4cd9b880294dec41a93fcdce465bdbb602cd8dc5bd2d6362b618759"},
+ {file = "grpcio-1.66.1-cp312-cp312-win_amd64.whl", hash = "sha256:b0aa03d240b5539648d996cc60438f128c7f46050989e35b25f5c18286c86734"},
+ {file = "grpcio-1.66.1-cp38-cp38-linux_armv7l.whl", hash = "sha256:ecfe735e7a59e5a98208447293ff8580e9db1e890e232b8b292dc8bd15afc0d2"},
+ {file = "grpcio-1.66.1-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:4825a3aa5648010842e1c9d35a082187746aa0cdbf1b7a2a930595a94fb10fce"},
+ {file = "grpcio-1.66.1-cp38-cp38-manylinux_2_17_aarch64.whl", hash = "sha256:f517fd7259fe823ef3bd21e508b653d5492e706e9f0ef82c16ce3347a8a5620c"},
+ {file = "grpcio-1.66.1-cp38-cp38-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f1fe60d0772831d96d263b53d83fb9a3d050a94b0e94b6d004a5ad111faa5b5b"},
+ {file = "grpcio-1.66.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:31a049daa428f928f21090403e5d18ea02670e3d5d172581670be006100db9ef"},
+ {file = "grpcio-1.66.1-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:6f914386e52cbdeb5d2a7ce3bf1fdfacbe9d818dd81b6099a05b741aaf3848bb"},
+ {file = "grpcio-1.66.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:bff2096bdba686019fb32d2dde45b95981f0d1490e054400f70fc9a8af34b49d"},
+ {file = "grpcio-1.66.1-cp38-cp38-win32.whl", hash = "sha256:aa8ba945c96e73de29d25331b26f3e416e0c0f621e984a3ebdb2d0d0b596a3b3"},
+ {file = "grpcio-1.66.1-cp38-cp38-win_amd64.whl", hash = "sha256:161d5c535c2bdf61b95080e7f0f017a1dfcb812bf54093e71e5562b16225b4ce"},
+ {file = "grpcio-1.66.1-cp39-cp39-linux_armv7l.whl", hash = "sha256:d0cd7050397b3609ea51727b1811e663ffda8bda39c6a5bb69525ef12414b503"},
+ {file = "grpcio-1.66.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:0e6c9b42ded5d02b6b1fea3a25f036a2236eeb75d0579bfd43c0018c88bf0a3e"},
+ {file = "grpcio-1.66.1-cp39-cp39-manylinux_2_17_aarch64.whl", hash = "sha256:c9f80f9fad93a8cf71c7f161778ba47fd730d13a343a46258065c4deb4b550c0"},
+ {file = "grpcio-1.66.1-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5dd67ed9da78e5121efc5c510f0122a972216808d6de70953a740560c572eb44"},
+ {file = "grpcio-1.66.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:48b0d92d45ce3be2084b92fb5bae2f64c208fea8ceed7fccf6a7b524d3c4942e"},
+ {file = "grpcio-1.66.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:4d813316d1a752be6f5c4360c49f55b06d4fe212d7df03253dfdae90c8a402bb"},
+ {file = "grpcio-1.66.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:9c9bebc6627873ec27a70fc800f6083a13c70b23a5564788754b9ee52c5aef6c"},
+ {file = "grpcio-1.66.1-cp39-cp39-win32.whl", hash = "sha256:30a1c2cf9390c894c90bbc70147f2372130ad189cffef161f0432d0157973f45"},
+ {file = "grpcio-1.66.1-cp39-cp39-win_amd64.whl", hash = "sha256:17663598aadbedc3cacd7bbde432f541c8e07d2496564e22b214b22c7523dac8"},
+ {file = "grpcio-1.66.1.tar.gz", hash = "sha256:35334f9c9745add3e357e3372756fd32d925bd52c41da97f4dfdafbde0bf0ee2"},
]
[package.extras]
-protobuf = ["grpcio-tools (>=1.64.1)"]
+protobuf = ["grpcio-tools (>=1.66.1)"]
+
+[[package]]
+name = "grpcio-tools"
+version = "1.66.1"
+description = "Protobuf code generator for gRPC"
+optional = false
+python-versions = ">=3.8"
+files = [
+ {file = "grpcio_tools-1.66.1-cp310-cp310-linux_armv7l.whl", hash = "sha256:e0c71405399ef59782600b1f0bdebc69ba12d7c9527cd268162a86273971d294"},
+ {file = "grpcio_tools-1.66.1-cp310-cp310-macosx_12_0_universal2.whl", hash = "sha256:df1a174a6f9d3b4c380f005f33352d2e95464f33f021fb08084735a2eb6e23b1"},
+ {file = "grpcio_tools-1.66.1-cp310-cp310-manylinux_2_17_aarch64.whl", hash = "sha256:7d789bfe53fce9e87aa80c3694a366258ce4c41b706258e9228ed4994832b780"},
+ {file = "grpcio_tools-1.66.1-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:95c44a265ff01fd05166edae9350bc2e7d1d9a95e8f53b8cd04d2ae0a588c583"},
+ {file = "grpcio_tools-1.66.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b962a8767c3c0f9afe92e0dd6bb0b2305d35195a1053f84d4d31f585b87557ed"},
+ {file = "grpcio_tools-1.66.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:d8616773126ec3cdf747b06a12e957b43ac15c34e4728def91fa67249a7c689a"},
+ {file = "grpcio_tools-1.66.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:0067e79b6001560ac6acc78cca11fd3504fa27f8af46e3cdbac2f4998505e597"},
+ {file = "grpcio_tools-1.66.1-cp310-cp310-win32.whl", hash = "sha256:fa4f95a79a34afc3b5464895d091cd1911227fc3ab0441b9a37cd1817cf7db86"},
+ {file = "grpcio_tools-1.66.1-cp310-cp310-win_amd64.whl", hash = "sha256:3acce426f5e643de63019311171f4d31131da8149de518716a95c29a2c12dd38"},
+ {file = "grpcio_tools-1.66.1-cp311-cp311-linux_armv7l.whl", hash = "sha256:9a07e24feb7472419cf70ebbb38dd4299aea696f91f191b62a99b3ee9ff03f89"},
+ {file = "grpcio_tools-1.66.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:097a069e7c640043921ecaf3e88d7af78ccd40c25dbddc91db2a4a2adbd0393d"},
+ {file = "grpcio_tools-1.66.1-cp311-cp311-manylinux_2_17_aarch64.whl", hash = "sha256:016fa273dc696c9d8045091ac50e000bce766183a6b150801f51c2946e33dbe3"},
+ {file = "grpcio_tools-1.66.1-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1ec9f4f964f8e8ed5e9cc13deb678c83d5597074c256805373220627833bc5ad"},
+ {file = "grpcio_tools-1.66.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3198815814cdd12bdb69b7580d7770a4ad4c8b2093e0bd6b987bc817618e3eec"},
+ {file = "grpcio_tools-1.66.1-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:796620fc41d3fbb566d9614ef22bc55df67fac1f1e19c1e0fb6ec48bc9b6a44b"},
+ {file = "grpcio_tools-1.66.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:222d8dc218560698e1abf652fb47e4015994ec7a265ef46e012fd9c9e77a4d6b"},
+ {file = "grpcio_tools-1.66.1-cp311-cp311-win32.whl", hash = "sha256:56e17a11f34df252b4c6fb8aa8cd7b44d162dba9f3333be87ddf7c8bf496622a"},
+ {file = "grpcio_tools-1.66.1-cp311-cp311-win_amd64.whl", hash = "sha256:edd52d667f2aa3c73233be0a821596937f24536647c12d96bfc54aa4cb04747d"},
+ {file = "grpcio_tools-1.66.1-cp312-cp312-linux_armv7l.whl", hash = "sha256:869b6960d5daffda0dac1a474b44144f0dace0d4336394e499c4f400c5e2f8d9"},
+ {file = "grpcio_tools-1.66.1-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:68d9390bf9ba863ac147fc722d6548caa587235e887cac1bc2438212e89d1de7"},
+ {file = "grpcio_tools-1.66.1-cp312-cp312-manylinux_2_17_aarch64.whl", hash = "sha256:b8660401beca7e3af28722439e07b0bcdca80b4a68f5a5a1138ae7b7780a6abf"},
+ {file = "grpcio_tools-1.66.1-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:eb67b9aa9cd69468bceb933e8e0f89fd13695746c018c4d2e6b3b84e73f3ad97"},
+ {file = "grpcio_tools-1.66.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5daceb9716e31edc0e1ba0f93303785211438c43502edddad7a919fc4cb3d664"},
+ {file = "grpcio_tools-1.66.1-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:0a86398a4cd0665bc7f09fa90b89bac592c959d2c895bf3cf5d47a98c0f2d24c"},
+ {file = "grpcio_tools-1.66.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:1b4acb53338072ab3023e418a5c7059cb15686abd1607516fa1453406dd5f69d"},
+ {file = "grpcio_tools-1.66.1-cp312-cp312-win32.whl", hash = "sha256:88e04b7546101bc79c868c941777efd5088063a9e4f03b4d7263dde796fbabf7"},
+ {file = "grpcio_tools-1.66.1-cp312-cp312-win_amd64.whl", hash = "sha256:5b4fc56abeafae74140f5da29af1093e88ce64811d77f1a81c3146e9e996fb6a"},
+ {file = "grpcio_tools-1.66.1-cp38-cp38-linux_armv7l.whl", hash = "sha256:d4dd2ff982c1aa328ef47ce34f07af82f1f13599912fb1618ebc5fe1e14dddb8"},
+ {file = "grpcio_tools-1.66.1-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:066648543f786cb74b1fef5652359952455dbba37e832642026fd9fd8a219b5f"},
+ {file = "grpcio_tools-1.66.1-cp38-cp38-manylinux_2_17_aarch64.whl", hash = "sha256:d19d47744c30e6bafa76b3113740e71f382d75ebb2918c1efd62ebe6ba7e20f9"},
+ {file = "grpcio_tools-1.66.1-cp38-cp38-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:739c53571130b359b738ac7d6d0a1f772e15779b66df7e6764bee4071cd38689"},
+ {file = "grpcio_tools-1.66.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2226ff8d3ecba83b7622946df19d6e8e15cb52f761b8d9e2f807b228db5f1b1e"},
+ {file = "grpcio_tools-1.66.1-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:2f4b1498cb8b422fbae32a491c9154e8d47650caf5852fbe6b3b34253e824343"},
+ {file = "grpcio_tools-1.66.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:93d2d9e14e81affdc63d67c42eb16a8da1b6fecc16442a703ca60eb0e7591691"},
+ {file = "grpcio_tools-1.66.1-cp38-cp38-win32.whl", hash = "sha256:d761dfd97a10e4aae73628b5120c64e56f0cded88651d0003d2d80e678c3e7c9"},
+ {file = "grpcio_tools-1.66.1-cp38-cp38-win_amd64.whl", hash = "sha256:e1c2ac0955f5fb87b8444316e475242d194c3f3cd0b7b6e54b889a7b6f05156f"},
+ {file = "grpcio_tools-1.66.1-cp39-cp39-linux_armv7l.whl", hash = "sha256:5f1f04578b72c281e39274348a61d240c48d5321ba8d7a8838e194099ecbc322"},
+ {file = "grpcio_tools-1.66.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:da9b0c08dbbf07535ee1b75a22d0acc5675a808a3a3df9f9b21e0e73ddfbb3a9"},
+ {file = "grpcio_tools-1.66.1-cp39-cp39-manylinux_2_17_aarch64.whl", hash = "sha256:e302b4e1fa856d74ff65c65888b3a37153287ce6ad5bad80b2fdf95130accec2"},
+ {file = "grpcio_tools-1.66.1-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7fc3f62494f238774755ff90f0e66a93ac7972ea1eb7180c45acf4fd53b25cca"},
+ {file = "grpcio_tools-1.66.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:23cad65ff22459aa387f543d293f54834c9aac8f76fb7416a7046556df75b567"},
+ {file = "grpcio_tools-1.66.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:3d17a27c567a5e4d18f487368215cb51b43e2499059fd6113b92f7ae1fee48be"},
+ {file = "grpcio_tools-1.66.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:4df167e67b083f96bc277032a526f6186e98662aaa49baea1dfb8ecfe26ce117"},
+ {file = "grpcio_tools-1.66.1-cp39-cp39-win32.whl", hash = "sha256:f94d5193b2f2a9595795b83e7978b2bee1c0399da66f2f24d179c388f81fb99c"},
+ {file = "grpcio_tools-1.66.1-cp39-cp39-win_amd64.whl", hash = "sha256:66f527a1e3f063065e29cf6f3e55892434d13a5a51e3b22402e09da9521e98a3"},
+ {file = "grpcio_tools-1.66.1.tar.gz", hash = "sha256:5055ffe840ea8f505c30378be02afb4dbecb33480e554debe10b63d6b2f641c3"},
+]
+
+[package.dependencies]
+grpcio = ">=1.66.1"
+protobuf = ">=5.26.1,<6.0dev"
+setuptools = "*"
[[package]]
name = "idna"
-version = "3.7"
+version = "3.8"
description = "Internationalized Domain Names in Applications (IDNA)"
optional = false
-python-versions = ">=3.5"
+python-versions = ">=3.6"
files = [
- {file = "idna-3.7-py3-none-any.whl", hash = "sha256:82fee1fc78add43492d3a1898bfa6d8a904cc97d8427f683ed8e798d07761aa0"},
- {file = "idna-3.7.tar.gz", hash = "sha256:028ff3aadf0609c1fd278d8ea3089299412a7a8b9bd005dd08b9f8285bcb5cfc"},
+ {file = "idna-3.8-py3-none-any.whl", hash = "sha256:050b4e5baadcd44d760cedbd2b8e639f2ff89bbc7a5730fcc662954303377aac"},
+ {file = "idna-3.8.tar.gz", hash = "sha256:d838c2c0ed6fced7693d5e8ab8e734d5f8fda53a039c0164afb0b82e771e3603"},
]
[[package]]
@@ -659,6 +719,21 @@ files = [
{file = "mypy_extensions-1.0.0.tar.gz", hash = "sha256:75dbf8955dc00442a438fc4d0666508a9a97b6bd41aa2f0ffe9d2f2725af0782"},
]
+[[package]]
+name = "mypy-protobuf"
+version = "3.6.0"
+description = "Generate mypy stub files from protobuf specs"
+optional = false
+python-versions = ">=3.8"
+files = [
+ {file = "mypy-protobuf-3.6.0.tar.gz", hash = "sha256:02f242eb3409f66889f2b1a3aa58356ec4d909cdd0f93115622e9e70366eca3c"},
+ {file = "mypy_protobuf-3.6.0-py3-none-any.whl", hash = "sha256:56176e4d569070e7350ea620262478b49b7efceba4103d468448f1d21492fd6c"},
+]
+
+[package.dependencies]
+protobuf = ">=4.25.3"
+types-protobuf = ">=4.24"
+
[[package]]
name = "oauthlib"
version = "3.2.2"
@@ -677,13 +752,13 @@ signedtoken = ["cryptography (>=3.0.0)", "pyjwt (>=2.0.0,<3)"]
[[package]]
name = "packaging"
-version = "24.0"
+version = "24.1"
description = "Core utilities for Python packages"
optional = false
-python-versions = ">=3.7"
+python-versions = ">=3.8"
files = [
- {file = "packaging-24.0-py3-none-any.whl", hash = "sha256:2ddfb553fdf02fb784c234c7ba6ccc288296ceabec964ad2eae3777778130bc5"},
- {file = "packaging-24.0.tar.gz", hash = "sha256:eb82c5e3e56209074766e6885bb04b8c38a0c015d0a30036ebe7ece34c9989e9"},
+ {file = "packaging-24.1-py3-none-any.whl", hash = "sha256:5b8f2217dbdbd2f7f384c41c628544e6d52f2d0f53c6d0c3ea61aa5d1d7ff124"},
+ {file = "packaging-24.1.tar.gz", hash = "sha256:026ed72c8ed3fcce5bf8950572258698927fd1dbda10a5e981cdf0ac37f4f002"},
]
[[package]]
@@ -699,19 +774,19 @@ files = [
[[package]]
name = "platformdirs"
-version = "4.2.2"
+version = "4.3.2"
description = "A small Python package for determining appropriate platform-specific dirs, e.g. a `user data dir`."
optional = false
python-versions = ">=3.8"
files = [
- {file = "platformdirs-4.2.2-py3-none-any.whl", hash = "sha256:2d7a1657e36a80ea911db832a8a6ece5ee53d8de21edd5cc5879af6530b1bfee"},
- {file = "platformdirs-4.2.2.tar.gz", hash = "sha256:38b7b51f512eed9e84a22788b4bce1de17c0adb134d6becb09836e37d8654cd3"},
+ {file = "platformdirs-4.3.2-py3-none-any.whl", hash = "sha256:eb1c8582560b34ed4ba105009a4badf7f6f85768b30126f351328507b2beb617"},
+ {file = "platformdirs-4.3.2.tar.gz", hash = "sha256:9e5e27a08aa095dd127b9f2e764d74254f482fef22b0970773bfba79d091ab8c"},
]
[package.extras]
-docs = ["furo (>=2023.9.10)", "proselint (>=0.13)", "sphinx (>=7.2.6)", "sphinx-autodoc-typehints (>=1.25.2)"]
-test = ["appdirs (==1.4.4)", "covdefaults (>=2.3)", "pytest (>=7.4.3)", "pytest-cov (>=4.1)", "pytest-mock (>=3.12)"]
-type = ["mypy (>=1.8)"]
+docs = ["furo (>=2024.8.6)", "proselint (>=0.14)", "sphinx (>=8.0.2)", "sphinx-autodoc-typehints (>=2.4)"]
+test = ["appdirs (==1.4.4)", "covdefaults (>=2.3)", "pytest (>=8.3.2)", "pytest-cov (>=5)", "pytest-mock (>=3.14)"]
+type = ["mypy (>=1.11.2)"]
[[package]]
name = "pluggy"
@@ -730,22 +805,22 @@ testing = ["pytest", "pytest-benchmark"]
[[package]]
name = "protobuf"
-version = "4.25.3"
+version = "5.27.4"
description = ""
optional = false
python-versions = ">=3.8"
files = [
- {file = "protobuf-4.25.3-cp310-abi3-win32.whl", hash = "sha256:d4198877797a83cbfe9bffa3803602bbe1625dc30d8a097365dbc762e5790faa"},
- {file = "protobuf-4.25.3-cp310-abi3-win_amd64.whl", hash = "sha256:209ba4cc916bab46f64e56b85b090607a676f66b473e6b762e6f1d9d591eb2e8"},
- {file = "protobuf-4.25.3-cp37-abi3-macosx_10_9_universal2.whl", hash = "sha256:f1279ab38ecbfae7e456a108c5c0681e4956d5b1090027c1de0f934dfdb4b35c"},
- {file = "protobuf-4.25.3-cp37-abi3-manylinux2014_aarch64.whl", hash = "sha256:e7cb0ae90dd83727f0c0718634ed56837bfeeee29a5f82a7514c03ee1364c019"},
- {file = "protobuf-4.25.3-cp37-abi3-manylinux2014_x86_64.whl", hash = "sha256:7c8daa26095f82482307bc717364e7c13f4f1c99659be82890dcfc215194554d"},
- {file = "protobuf-4.25.3-cp38-cp38-win32.whl", hash = "sha256:f4f118245c4a087776e0a8408be33cf09f6c547442c00395fbfb116fac2f8ac2"},
- {file = "protobuf-4.25.3-cp38-cp38-win_amd64.whl", hash = "sha256:c053062984e61144385022e53678fbded7aea14ebb3e0305ae3592fb219ccfa4"},
- {file = "protobuf-4.25.3-cp39-cp39-win32.whl", hash = "sha256:19b270aeaa0099f16d3ca02628546b8baefe2955bbe23224aaf856134eccf1e4"},
- {file = "protobuf-4.25.3-cp39-cp39-win_amd64.whl", hash = "sha256:e3c97a1555fd6388f857770ff8b9703083de6bf1f9274a002a332d65fbb56c8c"},
- {file = "protobuf-4.25.3-py3-none-any.whl", hash = "sha256:f0700d54bcf45424477e46a9f0944155b46fb0639d69728739c0e47bab83f2b9"},
- {file = "protobuf-4.25.3.tar.gz", hash = "sha256:25b5d0b42fd000320bd7830b349e3b696435f3b329810427a6bcce6a5492cc5c"},
+ {file = "protobuf-5.27.4-cp310-abi3-win32.whl", hash = "sha256:10319748764b917a9a7cddef1582a0a9cd0f8f6d04e545c6236f7ccaf9b624d9"},
+ {file = "protobuf-5.27.4-cp310-abi3-win_amd64.whl", hash = "sha256:f0c24374aaaf103f33662e4de7666a4a4280abebdb8a9f3f0f9b1d71b61174ec"},
+ {file = "protobuf-5.27.4-cp38-abi3-macosx_10_9_universal2.whl", hash = "sha256:e85fed07013e5a0121efbaf1b14355fdc66f6e545f12fc5985b2882370410006"},
+ {file = "protobuf-5.27.4-cp38-abi3-manylinux2014_aarch64.whl", hash = "sha256:d5a0e229061600842e57af4ff6a8522ede5280bcfa4fe7f3a1c20589377859a6"},
+ {file = "protobuf-5.27.4-cp38-abi3-manylinux2014_x86_64.whl", hash = "sha256:25ba1f0633f73c3939f3b84e1636f3eb3bab7196952ebb83906d56945edd6aa8"},
+ {file = "protobuf-5.27.4-cp38-cp38-win32.whl", hash = "sha256:565b051249a2f8270af04206dd4f3b73a02343e7d9e072aed57441b369b3467d"},
+ {file = "protobuf-5.27.4-cp38-cp38-win_amd64.whl", hash = "sha256:e673f173cbac4e59c7817ed358e471e4c77aa9166986edf3e731156379a556c7"},
+ {file = "protobuf-5.27.4-cp39-cp39-win32.whl", hash = "sha256:25169c7624d5a9e669fa6faff5a6e818f854346d51ee347b2284676beb9e85dd"},
+ {file = "protobuf-5.27.4-cp39-cp39-win_amd64.whl", hash = "sha256:1fe7735902e84ce35c4152cf07981c176713935a8efad78cea547aae5f4f75cb"},
+ {file = "protobuf-5.27.4-py3-none-any.whl", hash = "sha256:b97259641e8d38738eef34a173e51d2d53a453baab01a32477a64752d9ce59a3"},
+ {file = "protobuf-5.27.4.tar.gz", hash = "sha256:eaa1016e353d8fc5bf08c8087e96eed15f5297aa52bb7ee1f533278bb3f3aad7"},
]
[[package]]
@@ -876,62 +951,64 @@ files = [
[[package]]
name = "pyyaml"
-version = "6.0.1"
+version = "6.0.2"
description = "YAML parser and emitter for Python"
optional = false
-python-versions = ">=3.6"
+python-versions = ">=3.8"
files = [
- {file = "PyYAML-6.0.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d858aa552c999bc8a8d57426ed01e40bef403cd8ccdd0fc5f6f04a00414cac2a"},
- {file = "PyYAML-6.0.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:fd66fc5d0da6d9815ba2cebeb4205f95818ff4b79c3ebe268e75d961704af52f"},
- {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:69b023b2b4daa7548bcfbd4aa3da05b3a74b772db9e23b982788168117739938"},
- {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:81e0b275a9ecc9c0c0c07b4b90ba548307583c125f54d5b6946cfee6360c733d"},
- {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ba336e390cd8e4d1739f42dfe9bb83a3cc2e80f567d8805e11b46f4a943f5515"},
- {file = "PyYAML-6.0.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:326c013efe8048858a6d312ddd31d56e468118ad4cdeda36c719bf5bb6192290"},
- {file = "PyYAML-6.0.1-cp310-cp310-win32.whl", hash = "sha256:bd4af7373a854424dabd882decdc5579653d7868b8fb26dc7d0e99f823aa5924"},
- {file = "PyYAML-6.0.1-cp310-cp310-win_amd64.whl", hash = "sha256:fd1592b3fdf65fff2ad0004b5e363300ef59ced41c2e6b3a99d4089fa8c5435d"},
- {file = "PyYAML-6.0.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:6965a7bc3cf88e5a1c3bd2e0b5c22f8d677dc88a455344035f03399034eb3007"},
- {file = "PyYAML-6.0.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:f003ed9ad21d6a4713f0a9b5a7a0a79e08dd0f221aff4525a2be4c346ee60aab"},
- {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:42f8152b8dbc4fe7d96729ec2b99c7097d656dc1213a3229ca5383f973a5ed6d"},
- {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:062582fca9fabdd2c8b54a3ef1c978d786e0f6b3a1510e0ac93ef59e0ddae2bc"},
- {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d2b04aac4d386b172d5b9692e2d2da8de7bfb6c387fa4f801fbf6fb2e6ba4673"},
- {file = "PyYAML-6.0.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:e7d73685e87afe9f3b36c799222440d6cf362062f78be1013661b00c5c6f678b"},
- {file = "PyYAML-6.0.1-cp311-cp311-win32.whl", hash = "sha256:1635fd110e8d85d55237ab316b5b011de701ea0f29d07611174a1b42f1444741"},
- {file = "PyYAML-6.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:bf07ee2fef7014951eeb99f56f39c9bb4af143d8aa3c21b1677805985307da34"},
- {file = "PyYAML-6.0.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:855fb52b0dc35af121542a76b9a84f8d1cd886ea97c84703eaa6d88e37a2ad28"},
- {file = "PyYAML-6.0.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:40df9b996c2b73138957fe23a16a4f0ba614f4c0efce1e9406a184b6d07fa3a9"},
- {file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a08c6f0fe150303c1c6b71ebcd7213c2858041a7e01975da3a99aed1e7a378ef"},
- {file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6c22bec3fbe2524cde73d7ada88f6566758a8f7227bfbf93a408a9d86bcc12a0"},
- {file = "PyYAML-6.0.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8d4e9c88387b0f5c7d5f281e55304de64cf7f9c0021a3525bd3b1c542da3b0e4"},
- {file = "PyYAML-6.0.1-cp312-cp312-win32.whl", hash = "sha256:d483d2cdf104e7c9fa60c544d92981f12ad66a457afae824d146093b8c294c54"},
- {file = "PyYAML-6.0.1-cp312-cp312-win_amd64.whl", hash = "sha256:0d3304d8c0adc42be59c5f8a4d9e3d7379e6955ad754aa9d6ab7a398b59dd1df"},
- {file = "PyYAML-6.0.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:50550eb667afee136e9a77d6dc71ae76a44df8b3e51e41b77f6de2932bfe0f47"},
- {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1fe35611261b29bd1de0070f0b2f47cb6ff71fa6595c077e42bd0c419fa27b98"},
- {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:704219a11b772aea0d8ecd7058d0082713c3562b4e271b849ad7dc4a5c90c13c"},
- {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:afd7e57eddb1a54f0f1a974bc4391af8bcce0b444685d936840f125cf046d5bd"},
- {file = "PyYAML-6.0.1-cp36-cp36m-win32.whl", hash = "sha256:fca0e3a251908a499833aa292323f32437106001d436eca0e6e7833256674585"},
- {file = "PyYAML-6.0.1-cp36-cp36m-win_amd64.whl", hash = "sha256:f22ac1c3cac4dbc50079e965eba2c1058622631e526bd9afd45fedd49ba781fa"},
- {file = "PyYAML-6.0.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:b1275ad35a5d18c62a7220633c913e1b42d44b46ee12554e5fd39c70a243d6a3"},
- {file = "PyYAML-6.0.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:18aeb1bf9a78867dc38b259769503436b7c72f7a1f1f4c93ff9a17de54319b27"},
- {file = "PyYAML-6.0.1-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:596106435fa6ad000c2991a98fa58eeb8656ef2325d7e158344fb33864ed87e3"},
- {file = "PyYAML-6.0.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:baa90d3f661d43131ca170712d903e6295d1f7a0f595074f151c0aed377c9b9c"},
- {file = "PyYAML-6.0.1-cp37-cp37m-win32.whl", hash = "sha256:9046c58c4395dff28dd494285c82ba00b546adfc7ef001486fbf0324bc174fba"},
- {file = "PyYAML-6.0.1-cp37-cp37m-win_amd64.whl", hash = "sha256:4fb147e7a67ef577a588a0e2c17b6db51dda102c71de36f8549b6816a96e1867"},
- {file = "PyYAML-6.0.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1d4c7e777c441b20e32f52bd377e0c409713e8bb1386e1099c2415f26e479595"},
- {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a0cd17c15d3bb3fa06978b4e8958dcdc6e0174ccea823003a106c7d4d7899ac5"},
- {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:28c119d996beec18c05208a8bd78cbe4007878c6dd15091efb73a30e90539696"},
- {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7e07cbde391ba96ab58e532ff4803f79c4129397514e1413a7dc761ccd755735"},
- {file = "PyYAML-6.0.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:49a183be227561de579b4a36efbb21b3eab9651dd81b1858589f796549873dd6"},
- {file = "PyYAML-6.0.1-cp38-cp38-win32.whl", hash = "sha256:184c5108a2aca3c5b3d3bf9395d50893a7ab82a38004c8f61c258d4428e80206"},
- {file = "PyYAML-6.0.1-cp38-cp38-win_amd64.whl", hash = "sha256:1e2722cc9fbb45d9b87631ac70924c11d3a401b2d7f410cc0e3bbf249f2dca62"},
- {file = "PyYAML-6.0.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:9eb6caa9a297fc2c2fb8862bc5370d0303ddba53ba97e71f08023b6cd73d16a8"},
- {file = "PyYAML-6.0.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:c8098ddcc2a85b61647b2590f825f3db38891662cfc2fc776415143f599bb859"},
- {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5773183b6446b2c99bb77e77595dd486303b4faab2b086e7b17bc6bef28865f6"},
- {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b786eecbdf8499b9ca1d697215862083bd6d2a99965554781d0d8d1ad31e13a0"},
- {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bc1bf2925a1ecd43da378f4db9e4f799775d6367bdb94671027b73b393a7c42c"},
- {file = "PyYAML-6.0.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:04ac92ad1925b2cff1db0cfebffb6ffc43457495c9b3c39d3fcae417d7125dc5"},
- {file = "PyYAML-6.0.1-cp39-cp39-win32.whl", hash = "sha256:faca3bdcf85b2fc05d06ff3fbc1f83e1391b3e724afa3feba7d13eeab355484c"},
- {file = "PyYAML-6.0.1-cp39-cp39-win_amd64.whl", hash = "sha256:510c9deebc5c0225e8c96813043e62b680ba2f9c50a08d3724c7f28a747d1486"},
- {file = "PyYAML-6.0.1.tar.gz", hash = "sha256:bfdf460b1736c775f2ba9f6a92bca30bc2095067b8a9d77876d1fad6cc3b4a43"},
+ {file = "PyYAML-6.0.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:0a9a2848a5b7feac301353437eb7d5957887edbf81d56e903999a75a3d743086"},
+ {file = "PyYAML-6.0.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:29717114e51c84ddfba879543fb232a6ed60086602313ca38cce623c1d62cfbf"},
+ {file = "PyYAML-6.0.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8824b5a04a04a047e72eea5cec3bc266db09e35de6bdfe34c9436ac5ee27d237"},
+ {file = "PyYAML-6.0.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7c36280e6fb8385e520936c3cb3b8042851904eba0e58d277dca80a5cfed590b"},
+ {file = "PyYAML-6.0.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ec031d5d2feb36d1d1a24380e4db6d43695f3748343d99434e6f5f9156aaa2ed"},
+ {file = "PyYAML-6.0.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:936d68689298c36b53b29f23c6dbb74de12b4ac12ca6cfe0e047bedceea56180"},
+ {file = "PyYAML-6.0.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:23502f431948090f597378482b4812b0caae32c22213aecf3b55325e049a6c68"},
+ {file = "PyYAML-6.0.2-cp310-cp310-win32.whl", hash = "sha256:2e99c6826ffa974fe6e27cdb5ed0021786b03fc98e5ee3c5bfe1fd5015f42b99"},
+ {file = "PyYAML-6.0.2-cp310-cp310-win_amd64.whl", hash = "sha256:a4d3091415f010369ae4ed1fc6b79def9416358877534caf6a0fdd2146c87a3e"},
+ {file = "PyYAML-6.0.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:cc1c1159b3d456576af7a3e4d1ba7e6924cb39de8f67111c735f6fc832082774"},
+ {file = "PyYAML-6.0.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:1e2120ef853f59c7419231f3bf4e7021f1b936f6ebd222406c3b60212205d2ee"},
+ {file = "PyYAML-6.0.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5d225db5a45f21e78dd9358e58a98702a0302f2659a3c6cd320564b75b86f47c"},
+ {file = "PyYAML-6.0.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5ac9328ec4831237bec75defaf839f7d4564be1e6b25ac710bd1a96321cc8317"},
+ {file = "PyYAML-6.0.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3ad2a3decf9aaba3d29c8f537ac4b243e36bef957511b4766cb0057d32b0be85"},
+ {file = "PyYAML-6.0.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:ff3824dc5261f50c9b0dfb3be22b4567a6f938ccce4587b38952d85fd9e9afe4"},
+ {file = "PyYAML-6.0.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:797b4f722ffa07cc8d62053e4cff1486fa6dc094105d13fea7b1de7d8bf71c9e"},
+ {file = "PyYAML-6.0.2-cp311-cp311-win32.whl", hash = "sha256:11d8f3dd2b9c1207dcaf2ee0bbbfd5991f571186ec9cc78427ba5bd32afae4b5"},
+ {file = "PyYAML-6.0.2-cp311-cp311-win_amd64.whl", hash = "sha256:e10ce637b18caea04431ce14fabcf5c64a1c61ec9c56b071a4b7ca131ca52d44"},
+ {file = "PyYAML-6.0.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:c70c95198c015b85feafc136515252a261a84561b7b1d51e3384e0655ddf25ab"},
+ {file = "PyYAML-6.0.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:ce826d6ef20b1bc864f0a68340c8b3287705cae2f8b4b1d932177dcc76721725"},
+ {file = "PyYAML-6.0.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1f71ea527786de97d1a0cc0eacd1defc0985dcf6b3f17bb77dcfc8c34bec4dc5"},
+ {file = "PyYAML-6.0.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9b22676e8097e9e22e36d6b7bda33190d0d400f345f23d4065d48f4ca7ae0425"},
+ {file = "PyYAML-6.0.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:80bab7bfc629882493af4aa31a4cfa43a4c57c83813253626916b8c7ada83476"},
+ {file = "PyYAML-6.0.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:0833f8694549e586547b576dcfaba4a6b55b9e96098b36cdc7ebefe667dfed48"},
+ {file = "PyYAML-6.0.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8b9c7197f7cb2738065c481a0461e50ad02f18c78cd75775628afb4d7137fb3b"},
+ {file = "PyYAML-6.0.2-cp312-cp312-win32.whl", hash = "sha256:ef6107725bd54b262d6dedcc2af448a266975032bc85ef0172c5f059da6325b4"},
+ {file = "PyYAML-6.0.2-cp312-cp312-win_amd64.whl", hash = "sha256:7e7401d0de89a9a855c839bc697c079a4af81cf878373abd7dc625847d25cbd8"},
+ {file = "PyYAML-6.0.2-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:efdca5630322a10774e8e98e1af481aad470dd62c3170801852d752aa7a783ba"},
+ {file = "PyYAML-6.0.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:50187695423ffe49e2deacb8cd10510bc361faac997de9efef88badc3bb9e2d1"},
+ {file = "PyYAML-6.0.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0ffe8360bab4910ef1b9e87fb812d8bc0a308b0d0eef8c8f44e0254ab3b07133"},
+ {file = "PyYAML-6.0.2-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:17e311b6c678207928d649faa7cb0d7b4c26a0ba73d41e99c4fff6b6c3276484"},
+ {file = "PyYAML-6.0.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:70b189594dbe54f75ab3a1acec5f1e3faa7e8cf2f1e08d9b561cb41b845f69d5"},
+ {file = "PyYAML-6.0.2-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:41e4e3953a79407c794916fa277a82531dd93aad34e29c2a514c2c0c5fe971cc"},
+ {file = "PyYAML-6.0.2-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:68ccc6023a3400877818152ad9a1033e3db8625d899c72eacb5a668902e4d652"},
+ {file = "PyYAML-6.0.2-cp313-cp313-win32.whl", hash = "sha256:bc2fa7c6b47d6bc618dd7fb02ef6fdedb1090ec036abab80d4681424b84c1183"},
+ {file = "PyYAML-6.0.2-cp313-cp313-win_amd64.whl", hash = "sha256:8388ee1976c416731879ac16da0aff3f63b286ffdd57cdeb95f3f2e085687563"},
+ {file = "PyYAML-6.0.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:24471b829b3bf607e04e88d79542a9d48bb037c2267d7927a874e6c205ca7e9a"},
+ {file = "PyYAML-6.0.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d7fded462629cfa4b685c5416b949ebad6cec74af5e2d42905d41e257e0869f5"},
+ {file = "PyYAML-6.0.2-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d84a1718ee396f54f3a086ea0a66d8e552b2ab2017ef8b420e92edbc841c352d"},
+ {file = "PyYAML-6.0.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9056c1ecd25795207ad294bcf39f2db3d845767be0ea6e6a34d856f006006083"},
+ {file = "PyYAML-6.0.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:82d09873e40955485746739bcb8b4586983670466c23382c19cffecbf1fd8706"},
+ {file = "PyYAML-6.0.2-cp38-cp38-win32.whl", hash = "sha256:43fa96a3ca0d6b1812e01ced1044a003533c47f6ee8aca31724f78e93ccc089a"},
+ {file = "PyYAML-6.0.2-cp38-cp38-win_amd64.whl", hash = "sha256:01179a4a8559ab5de078078f37e5c1a30d76bb88519906844fd7bdea1b7729ff"},
+ {file = "PyYAML-6.0.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:688ba32a1cffef67fd2e9398a2efebaea461578b0923624778664cc1c914db5d"},
+ {file = "PyYAML-6.0.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:a8786accb172bd8afb8be14490a16625cbc387036876ab6ba70912730faf8e1f"},
+ {file = "PyYAML-6.0.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d8e03406cac8513435335dbab54c0d385e4a49e4945d2909a581c83647ca0290"},
+ {file = "PyYAML-6.0.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f753120cb8181e736c57ef7636e83f31b9c0d1722c516f7e86cf15b7aa57ff12"},
+ {file = "PyYAML-6.0.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3b1fdb9dc17f5a7677423d508ab4f243a726dea51fa5e70992e59a7411c89d19"},
+ {file = "PyYAML-6.0.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:0b69e4ce7a131fe56b7e4d770c67429700908fc0752af059838b1cfb41960e4e"},
+ {file = "PyYAML-6.0.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:a9f8c2e67970f13b16084e04f134610fd1d374bf477b17ec1599185cf611d725"},
+ {file = "PyYAML-6.0.2-cp39-cp39-win32.whl", hash = "sha256:6395c297d42274772abc367baaa79683958044e5d3835486c16da75d2a694631"},
+ {file = "PyYAML-6.0.2-cp39-cp39-win_amd64.whl", hash = "sha256:39693e1f8320ae4f43943590b49779ffb98acb81f788220ea932a6b6c51004d8"},
+ {file = "pyyaml-6.0.2.tar.gz", hash = "sha256:d584d9ec91ad65861cc08d42e834324ef890a082e591037abe114850ff7bbc3e"},
]
[[package]]
@@ -992,6 +1069,26 @@ urllib3 = ">=1.25.10,<3.0"
[package.extras]
tests = ["coverage (>=6.0.0)", "flake8", "mypy", "pytest (>=7.0.0)", "pytest-asyncio", "pytest-cov", "pytest-httpserver", "tomli", "tomli-w", "types-PyYAML", "types-requests"]
+[[package]]
+name = "setuptools"
+version = "74.1.2"
+description = "Easily download, build, install, upgrade, and uninstall Python packages"
+optional = false
+python-versions = ">=3.8"
+files = [
+ {file = "setuptools-74.1.2-py3-none-any.whl", hash = "sha256:5f4c08aa4d3ebcb57a50c33b1b07e94315d7fc7230f7115e47fc99776c8ce308"},
+ {file = "setuptools-74.1.2.tar.gz", hash = "sha256:95b40ed940a1c67eb70fc099094bd6e99c6ee7c23aa2306f4d2697ba7916f9c6"},
+]
+
+[package.extras]
+check = ["pytest-checkdocs (>=2.4)", "pytest-ruff (>=0.2.1)", "ruff (>=0.5.2)"]
+core = ["importlib-metadata (>=6)", "importlib-resources (>=5.10.2)", "jaraco.text (>=3.7)", "more-itertools (>=8.8)", "packaging (>=24)", "platformdirs (>=2.6.2)", "tomli (>=2.0.1)", "wheel (>=0.43.0)"]
+cover = ["pytest-cov"]
+doc = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "pyproject-hooks (!=1.1)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-inline-tabs", "sphinx-lint", "sphinx-notfound-page (>=1,<2)", "sphinx-reredirects", "sphinxcontrib-towncrier", "towncrier (<24.7)"]
+enabler = ["pytest-enabler (>=2.2)"]
+test = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "ini2toml[lite] (>=0.14)", "jaraco.develop (>=7.21)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "jaraco.test", "packaging (>=23.2)", "pip (>=19.1)", "pyproject-hooks (!=1.1)", "pytest (>=6,!=8.1.*)", "pytest-home (>=0.5)", "pytest-perf", "pytest-subprocess", "pytest-timeout", "pytest-xdist (>=3)", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel (>=0.44.0)"]
+type = ["importlib-metadata (>=7.0.2)", "jaraco.develop (>=7.21)", "mypy (==1.11.*)", "pytest-mypy"]
+
[[package]]
name = "snowballstemmer"
version = "2.2.0"
@@ -1173,13 +1270,13 @@ files = [
[[package]]
name = "tomlkit"
-version = "0.12.5"
+version = "0.13.2"
description = "Style preserving TOML library"
optional = false
-python-versions = ">=3.7"
+python-versions = ">=3.8"
files = [
- {file = "tomlkit-0.12.5-py3-none-any.whl", hash = "sha256:af914f5a9c59ed9d0762c7b64d3b5d5df007448eb9cd2edc8a46b1eafead172f"},
- {file = "tomlkit-0.12.5.tar.gz", hash = "sha256:eef34fba39834d4d6b73c9ba7f3e4d1c417a4e56f89a7e96e090dd0d24b8fb3c"},
+ {file = "tomlkit-0.13.2-py3-none-any.whl", hash = "sha256:7a974427f6e119197f670fbbbeae7bef749a6c14e793db934baefc1b5f03efde"},
+ {file = "tomlkit-0.13.2.tar.gz", hash = "sha256:fff5fe59a87295b278abd31bec92c15d9bc4a06885ab12bcea52c71119392e79"},
]
[[package]]
@@ -1204,15 +1301,26 @@ files = [
{file = "types_oauthlib-3.2.0.20240806-py3-none-any.whl", hash = "sha256:581bb8e194700d16ae1f0b62a6039261ed1afd0b88e78782e1c48f6507c52f34"},
]
+[[package]]
+name = "types-protobuf"
+version = "5.27.0.20240907"
+description = "Typing stubs for protobuf"
+optional = false
+python-versions = ">=3.8"
+files = [
+ {file = "types-protobuf-5.27.0.20240907.tar.gz", hash = "sha256:bb6f90f66b18d4d1c75667b6586334b0573a6fcee5eb0142a7348a765a7cbadc"},
+ {file = "types_protobuf-5.27.0.20240907-py3-none-any.whl", hash = "sha256:5443270534cc8072909ef7ad9e1421ccff924ca658749a6396c0c43d64c32676"},
+]
+
[[package]]
name = "types-requests"
-version = "2.32.0.20240602"
+version = "2.32.0.20240907"
description = "Typing stubs for requests"
optional = false
python-versions = ">=3.8"
files = [
- {file = "types-requests-2.32.0.20240602.tar.gz", hash = "sha256:3f98d7bbd0dd94ebd10ff43a7fbe20c3b8528acace6d8efafef0b6a184793f06"},
- {file = "types_requests-2.32.0.20240602-py3-none-any.whl", hash = "sha256:ed3946063ea9fbc6b5fc0c44fa279188bae42d582cb63760be6cb4b9d06c3de8"},
+ {file = "types-requests-2.32.0.20240907.tar.gz", hash = "sha256:ff33935f061b5e81ec87997e91050f7b4af4f82027a7a7a9d9aaea04a963fdf8"},
+ {file = "types_requests-2.32.0.20240907-py3-none-any.whl", hash = "sha256:1d1e79faeaf9d42def77f3c304893dea17a97cae98168ac69f3cb465516ee8da"},
]
[package.dependencies]
@@ -1261,21 +1369,6 @@ h2 = ["h2 (>=4,<5)"]
socks = ["pysocks (>=1.5.6,!=1.5.7,<2.0)"]
zstd = ["zstandard (>=0.18.0)"]
-[[package]]
-name = "zeebe-grpc"
-version = "8.4.0"
-description = "zeebe Python gRPC Gateway"
-optional = false
-python-versions = "*"
-files = [
- {file = "zeebe_grpc-8.4.0-py3-none-any.whl", hash = "sha256:d83e2f76a3eafa10298200b0c1c4f37ee9eca84902a973516c6293d88bd16a23"},
- {file = "zeebe_grpc-8.4.0.tar.gz", hash = "sha256:ff84f11df4c519937d877a9b253b8b9e7492b5d1661f2dd7b9a62f0dc131ac7c"},
-]
-
-[package.dependencies]
-grpcio = ">=1.49,<2.0"
-protobuf = ">=4.21,<5.0"
-
[[package]]
name = "zipp"
version = "3.20.1"
@@ -1298,4 +1391,4 @@ type = ["pytest-mypy"]
[metadata]
lock-version = "2.0"
python-versions = "^3.8"
-content-hash = "9b66928380ec80986801da379d196a04f14c4c299d688010538a1e37c88a44c5"
+content-hash = "70b43d17c8e4ee71dbeddf3ddea6ad42ecaeaf38ae3faf55b22d6ca4638d8673"
diff --git a/pyproject.toml b/pyproject.toml
index d326fe6b..94270939 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -23,7 +23,9 @@ python = "^3.8"
oauthlib = "^3.1.0"
requests-oauthlib = ">=1.3.0,<3.0.0"
aiofiles = ">=0.7,<25"
-zeebe-grpc = "^8.4.0"
+grpcio = ">=1.66.0"
+grpcio-tools = ">=1.66.0"
+protobuf = ">=5.27.2,<5.28.0"
typing-extensions = "^4.11.0"
[tool.poetry.group.dev.dependencies]
@@ -45,19 +47,24 @@ importlib-metadata = ">=6.8,<9.0"
types-aiofiles = ">=0.7,<25"
types-oauthlib = "^3.1.0"
types-requests-oauthlib = ">=1.3.0,<3.0.0"
+types-protobuf = "*"
+mypy-protobuf = "*"
[tool.mypy]
python_version = "3.8"
packages = ["pyzeebe"]
strict = true
+[[tool.mypy.overrides]]
+module = "grpc"
+ignore_missing_imports = true
+
[[tool.mypy.overrides]]
module = [
- "grpc",
- "zeebe_grpc.gateway_pb2",
- "zeebe_grpc.gateway_pb2_grpc",
+ "pyzeebe.proto.*"
]
-ignore_missing_imports = true
+#disallow_untyped_defs = false
+disable_error_code = [ "import-untyped", "unused-ignore"] # "type-arg"
[tool.pylint.master]
max-line-length = 120
@@ -65,13 +72,25 @@ disable = ["C0114", "C0115", "C0116"]
[tool.black]
line-length = 120
+extend-exclude = '''
+(
+ .*_pb2.py # exclude autogenerated Protocol Buffer files anywhere in the project
+ | .*_pb2_grpc.py
+)
+'''
[tool.isort]
profile = "black"
+extend_skip_glob = ["*_pb2.py", "*_pb2_grpc.py", "*.pyi"]
[tool.pytest.ini_options]
asyncio_mode = "auto"
+[tool.coverage.run]
+omit = [
+ "pyzeebe/proto/*"
+]
+
[build-system]
requires = ["poetry-core>=1.0.0"]
build-backend = "poetry.core.masonry.api"
diff --git a/pyzeebe/grpc_internals/zeebe_adapter_base.py b/pyzeebe/grpc_internals/zeebe_adapter_base.py
index c375a391..dc50830b 100644
--- a/pyzeebe/grpc_internals/zeebe_adapter_base.py
+++ b/pyzeebe/grpc_internals/zeebe_adapter_base.py
@@ -2,7 +2,6 @@
from typing import NoReturn
import grpc
-from zeebe_grpc.gateway_pb2_grpc import GatewayStub
from pyzeebe.errors import (
UnknownGrpcStatusCodeError,
@@ -12,6 +11,7 @@
)
from pyzeebe.errors.pyzeebe_errors import PyZeebeError
from pyzeebe.grpc_internals.grpc_utils import is_error_status
+from pyzeebe.proto.gateway_pb2_grpc import GatewayStub
logger = logging.getLogger(__name__)
diff --git a/pyzeebe/grpc_internals/zeebe_job_adapter.py b/pyzeebe/grpc_internals/zeebe_job_adapter.py
index 5fd328b8..692fd462 100644
--- a/pyzeebe/grpc_internals/zeebe_job_adapter.py
+++ b/pyzeebe/grpc_internals/zeebe_job_adapter.py
@@ -4,13 +4,6 @@
from typing import AsyncGenerator, Iterable, Optional
import grpc
-from zeebe_grpc.gateway_pb2 import (
- ActivatedJob,
- ActivateJobsRequest,
- CompleteJobRequest,
- FailJobRequest,
- ThrowErrorRequest,
-)
from pyzeebe.errors import (
ActivateJobsRequestInvalidError,
@@ -20,6 +13,13 @@
from pyzeebe.grpc_internals.grpc_utils import is_error_status
from pyzeebe.grpc_internals.zeebe_adapter_base import ZeebeAdapterBase
from pyzeebe.job.job import Job
+from pyzeebe.proto.gateway_pb2 import (
+ ActivatedJob,
+ ActivateJobsRequest,
+ CompleteJobRequest,
+ FailJobRequest,
+ ThrowErrorRequest,
+)
from pyzeebe.types import Variables
from .types import CompleteJobResponse, FailJobResponse, ThrowErrorResponse
diff --git a/pyzeebe/grpc_internals/zeebe_message_adapter.py b/pyzeebe/grpc_internals/zeebe_message_adapter.py
index 6cd5018d..8096ea9d 100644
--- a/pyzeebe/grpc_internals/zeebe_message_adapter.py
+++ b/pyzeebe/grpc_internals/zeebe_message_adapter.py
@@ -2,11 +2,11 @@
from typing import Optional
import grpc
-from zeebe_grpc.gateway_pb2 import PublishMessageRequest
from pyzeebe.errors import MessageAlreadyExistsError
from pyzeebe.grpc_internals.grpc_utils import is_error_status
from pyzeebe.grpc_internals.zeebe_adapter_base import ZeebeAdapterBase
+from pyzeebe.proto.gateway_pb2 import PublishMessageRequest
from pyzeebe.types import Variables
from .types import PublishMessageResponse
diff --git a/pyzeebe/grpc_internals/zeebe_process_adapter.py b/pyzeebe/grpc_internals/zeebe_process_adapter.py
index 427ba731..916e122f 100644
--- a/pyzeebe/grpc_internals/zeebe_process_adapter.py
+++ b/pyzeebe/grpc_internals/zeebe_process_adapter.py
@@ -4,17 +4,6 @@
import aiofiles
import grpc
-from zeebe_grpc.gateway_pb2 import (
- CancelProcessInstanceRequest,
- CreateProcessInstanceRequest,
- CreateProcessInstanceWithResultRequest,
- DecisionMetadata,
- DecisionRequirementsMetadata,
- DeployResourceRequest,
- FormMetadata,
- ProcessMetadata,
- Resource,
-)
from pyzeebe.errors import (
InvalidJSONError,
@@ -26,6 +15,17 @@
)
from pyzeebe.grpc_internals.grpc_utils import is_error_status
from pyzeebe.grpc_internals.zeebe_adapter_base import ZeebeAdapterBase
+from pyzeebe.proto.gateway_pb2 import (
+ CancelProcessInstanceRequest,
+ CreateProcessInstanceRequest,
+ CreateProcessInstanceWithResultRequest,
+ DecisionMetadata,
+ DecisionRequirementsMetadata,
+ DeployResourceRequest,
+ FormMetadata,
+ ProcessMetadata,
+ Resource,
+)
from pyzeebe.types import Variables
from .types import (
diff --git a/pyzeebe/proto/__init__.py b/pyzeebe/proto/__init__.py
new file mode 100644
index 00000000..e69de29b
diff --git a/pyzeebe/proto/gateway.proto b/pyzeebe/proto/gateway.proto
new file mode 100644
index 00000000..fd9bf17e
--- /dev/null
+++ b/pyzeebe/proto/gateway.proto
@@ -0,0 +1,1018 @@
+syntax = 'proto3';
+package gateway_protocol;
+
+option java_multiple_files = false;
+option java_package = "io.camunda.zeebe.gateway.protocol";
+option go_package = "./;pb";
+
+// For a more complete documentation, refer to Zeebe documentation at:
+// https://docs.camunda.io/docs/reference/grpc
+
+message StreamActivatedJobsRequest {
+ // the job type, as defined in the BPMN process (e.g. )
+ string type = 1;
+ // the name of the worker activating the jobs, mostly used for logging purposes
+ string worker = 2;
+ // a job returned after this call will not be activated by another call until the
+ // timeout (in ms) has been reached
+ int64 timeout = 3;
+ // a list of variables to fetch as the job variables; if empty, all visible variables at
+ // the time of activation for the scope of the job will be returned
+ repeated string fetchVariable = 5;
+ // a list of identifiers of tenants for which to stream jobs
+ repeated string tenantIds = 6;
+}
+
+message ActivateJobsRequest {
+ // the job type, as defined in the BPMN process (e.g. )
+ string type = 1;
+ // the name of the worker activating the jobs, mostly used for logging purposes
+ string worker = 2;
+ // a job returned after this call will not be activated by another call until the
+ // timeout (in ms) has been reached
+ int64 timeout = 3;
+ // the maximum jobs to activate by this request
+ int32 maxJobsToActivate = 4;
+ // a list of variables to fetch as the job variables; if empty, all visible variables at
+ // the time of activation for the scope of the job will be returned
+ repeated string fetchVariable = 5;
+ // The request will be completed when at least one job is activated or after the requestTimeout (in ms).
+ // if the requestTimeout = 0, a default timeout is used.
+ // if the requestTimeout < 0, long polling is disabled and the request is completed immediately, even when no job is activated.
+ int64 requestTimeout = 6;
+ // a list of IDs of tenants for which to activate jobs
+ repeated string tenantIds = 7;
+}
+
+message ActivateJobsResponse {
+ // list of activated jobs
+ repeated ActivatedJob jobs = 1;
+}
+
+message ActivatedJob {
+ // the key, a unique identifier for the job
+ int64 key = 1;
+ // the type of the job (should match what was requested)
+ string type = 2;
+ // the job's process instance key
+ int64 processInstanceKey = 3;
+ // the bpmn process ID of the job process definition
+ string bpmnProcessId = 4;
+ // the version of the job process definition
+ int32 processDefinitionVersion = 5;
+ // the key of the job process definition
+ int64 processDefinitionKey = 6;
+ // the associated task element ID
+ string elementId = 7;
+ // the unique key identifying the associated task, unique within the scope of the
+ // process instance
+ int64 elementInstanceKey = 8;
+ // a set of custom headers defined during modelling; returned as a serialized
+ // JSON document
+ string customHeaders = 9;
+ // the name of the worker which activated this job
+ string worker = 10;
+ // the amount of retries left to this job (should always be positive)
+ int32 retries = 11;
+ // when the job can be activated again, sent as a UNIX epoch timestamp
+ int64 deadline = 12;
+ // JSON document, computed at activation time, consisting of all visible variables to
+ // the task scope
+ string variables = 13;
+ // the id of the tenant that owns the job
+ string tenantId = 14;
+}
+
+message CancelProcessInstanceRequest {
+ // the process instance key (as, for example, obtained from
+ // CreateProcessInstanceResponse)
+ int64 processInstanceKey = 1;
+ // a reference key chosen by the user and will be part of all records resulted from this operation
+ optional uint64 operationReference = 2;
+}
+
+message CancelProcessInstanceResponse {
+}
+
+message CompleteJobRequest {
+ // the unique job identifier, as obtained from ActivateJobsResponse
+ int64 jobKey = 1;
+ // a JSON document representing the variables in the current task scope
+ string variables = 2;
+}
+
+message CompleteJobResponse {
+}
+
+message CreateProcessInstanceRequest {
+ // the unique key identifying the process definition (e.g. returned from a process
+ // in the DeployProcessResponse message)
+ int64 processDefinitionKey = 1;
+ // the BPMN process ID of the process definition
+ string bpmnProcessId = 2;
+ // the version of the process; set to -1 to use the latest version
+ int32 version = 3;
+ // JSON document that will instantiate the variables for the root variable scope of the
+ // process instance; it must be a JSON object, as variables will be mapped in a
+ // key-value fashion. e.g. { "a": 1, "b": 2 } will create two variables, named "a" and
+ // "b" respectively, with their associated values. [{ "a": 1, "b": 2 }] would not be a
+ // valid argument, as the root of the JSON document is an array and not an object.
+ string variables = 4;
+ // List of start instructions. If empty (default) the process instance
+ // will start at the start event. If non-empty the process instance will apply start
+ // instructions after it has been created
+ repeated ProcessInstanceCreationStartInstruction startInstructions = 5;
+ // the tenant id of the process definition
+ string tenantId = 6;
+
+ // a reference key chosen by the user and will be part of all records resulted from this operation
+ optional uint64 operationReference = 7;
+}
+
+message ProcessInstanceCreationStartInstruction {
+
+ // future extensions might include
+ // - different types of start instructions
+ // - ability to set local variables for different flow scopes
+
+ // for now, however, the start instruction is implicitly a
+ // "startBeforeElement" instruction
+
+ // element ID
+ string elementId = 1;
+}
+
+message CreateProcessInstanceResponse {
+ // the key of the process definition which was used to create the process instance
+ int64 processDefinitionKey = 1;
+ // the BPMN process ID of the process definition which was used to create the process
+ // instance
+ string bpmnProcessId = 2;
+ // the version of the process definition which was used to create the process instance
+ int32 version = 3;
+ // the unique identifier of the created process instance; to be used wherever a request
+ // needs a process instance key (e.g. CancelProcessInstanceRequest)
+ int64 processInstanceKey = 4;
+ // the tenant identifier of the created process instance
+ string tenantId = 5;
+}
+
+message CreateProcessInstanceWithResultRequest {
+ CreateProcessInstanceRequest request = 1;
+ // timeout (in ms). the request will be closed if the process is not completed
+ // before the requestTimeout.
+ // if requestTimeout = 0, uses the generic requestTimeout configured in the gateway.
+ int64 requestTimeout = 2;
+ // list of names of variables to be included in `CreateProcessInstanceWithResultResponse.variables`
+ // if empty, all visible variables in the root scope will be returned.
+ repeated string fetchVariables = 3;
+}
+
+message CreateProcessInstanceWithResultResponse {
+ // the key of the process definition which was used to create the process instance
+ int64 processDefinitionKey = 1;
+ // the BPMN process ID of the process definition which was used to create the process
+ // instance
+ string bpmnProcessId = 2;
+ // the version of the process definition which was used to create the process instance
+ int32 version = 3;
+ // the unique identifier of the created process instance; to be used wherever a request
+ // needs a process instance key (e.g. CancelProcessInstanceRequest)
+ int64 processInstanceKey = 4;
+ // JSON document
+ // consists of visible variables in the root scope
+ string variables = 5;
+ // the tenant identifier of the process definition
+ string tenantId = 6;
+}
+
+message EvaluateDecisionRequest {
+ // the unique key identifying the decision to be evaluated (e.g. returned
+ // from a decision in the DeployResourceResponse message)
+ int64 decisionKey = 1;
+ // the ID of the decision to be evaluated
+ string decisionId = 2;
+ // JSON document that will instantiate the variables for the decision to be
+ // evaluated; it must be a JSON object, as variables will be mapped in a
+ // key-value fashion, e.g. { "a": 1, "b": 2 } will create two variables,
+ // named "a" and "b" respectively, with their associated values.
+ // [{ "a": 1, "b": 2 }] would not be a valid argument, as the root of the
+ // JSON document is an array and not an object.
+ string variables = 3;
+ // the tenant identifier of the decision
+ string tenantId = 4;
+}
+
+message EvaluateDecisionResponse {
+ // the unique key identifying the decision which was evaluated (e.g. returned
+ // from a decision in the DeployResourceResponse message)
+ int64 decisionKey = 1;
+ // the ID of the decision which was evaluated
+ string decisionId = 2;
+ // the name of the decision which was evaluated
+ string decisionName = 3;
+ // the version of the decision which was evaluated
+ int32 decisionVersion = 4;
+ // the ID of the decision requirements graph that the decision which was
+ // evaluated is part of.
+ string decisionRequirementsId = 5;
+ // the unique key identifying the decision requirements graph that the
+ // decision which was evaluated is part of.
+ int64 decisionRequirementsKey = 6;
+ // JSON document that will instantiate the result of the decision which was
+ // evaluated; it will be a JSON object, as the result output will be mapped
+ // in a key-value fashion, e.g. { "a": 1 }.
+ string decisionOutput = 7;
+ // a list of decisions that were evaluated within the requested decision evaluation
+ repeated EvaluatedDecision evaluatedDecisions = 8;
+ // an optional string indicating the ID of the decision which
+ // failed during evaluation
+ string failedDecisionId = 9;
+ // an optional message describing why the decision which was evaluated failed
+ string failureMessage = 10;
+ // the tenant identifier of the evaluated decision
+ string tenantId = 11;
+ // the unique key identifying this decision evaluation
+ int64 decisionInstanceKey = 12;
+}
+
+message EvaluatedDecision {
+ // the unique key identifying the decision which was evaluated (e.g. returned
+ // from a decision in the DeployResourceResponse message)
+ int64 decisionKey = 1;
+ // the ID of the decision which was evaluated
+ string decisionId = 2;
+ // the name of the decision which was evaluated
+ string decisionName = 3;
+ // the version of the decision which was evaluated
+ int32 decisionVersion = 4;
+ // the type of the decision which was evaluated
+ string decisionType = 5;
+ // JSON document that will instantiate the result of the decision which was
+ // evaluated; it will be a JSON object, as the result output will be mapped
+ // in a key-value fashion, e.g. { "a": 1 }.
+ string decisionOutput = 6;
+ // the decision rules that matched within this decision evaluation
+ repeated MatchedDecisionRule matchedRules = 7;
+ // the decision inputs that were evaluated within this decision evaluation
+ repeated EvaluatedDecisionInput evaluatedInputs = 8;
+ // the tenant identifier of the evaluated decision
+ string tenantId = 9;
+}
+
+message EvaluatedDecisionInput {
+ // the id of the evaluated decision input
+ string inputId = 1;
+ // the name of the evaluated decision input
+ string inputName = 2;
+ // the value of the evaluated decision input
+ string inputValue = 3;
+}
+
+message EvaluatedDecisionOutput {
+ // the id of the evaluated decision output
+ string outputId = 1;
+ // the name of the evaluated decision output
+ string outputName = 2;
+ // the value of the evaluated decision output
+ string outputValue = 3;
+}
+
+message MatchedDecisionRule {
+ // the id of the matched rule
+ string ruleId = 1;
+ // the index of the matched rule
+ int32 ruleIndex = 2;
+ // the evaluated decision outputs
+ repeated EvaluatedDecisionOutput evaluatedOutputs = 3;
+}
+
+message DeployProcessRequest {
+ // since 8, replaced by DeployResourceRequest
+ option deprecated = true;
+ // List of process resources to deploy
+ repeated ProcessRequestObject processes = 1;
+}
+
+message ProcessRequestObject {
+ // since 8, replaced by Resource
+ option deprecated = true;
+ // the resource basename, e.g. myProcess.bpmn
+ string name = 1;
+ // the process definition as a UTF8-encoded string
+ bytes definition = 2;
+}
+
+message DeployProcessResponse {
+ // since 8, replaced by DeployResourceResponse
+ option deprecated = true;
+ // the unique key identifying the deployment
+ int64 key = 1;
+ // a list of deployed processes
+ repeated ProcessMetadata processes = 2;
+}
+
+message DeployResourceRequest {
+ // list of resources to deploy
+ repeated Resource resources = 1;
+ // the tenant id of the resources to deploy
+ string tenantId = 2;
+}
+
+message Resource {
+ // the resource name, e.g. myProcess.bpmn or myDecision.dmn
+ string name = 1;
+ // the file content as a UTF8-encoded string
+ bytes content = 2;
+}
+
+message DeployResourceResponse {
+ // the unique key identifying the deployment
+ int64 key = 1;
+ // a list of deployed resources, e.g. processes
+ repeated Deployment deployments = 2;
+ // the tenant id of the deployed resources
+ string tenantId = 3;
+}
+
+message Deployment {
+ // each deployment has only one metadata
+ oneof Metadata {
+ // metadata of a deployed process
+ ProcessMetadata process = 1;
+ // metadata of a deployed decision
+ DecisionMetadata decision = 2;
+ // metadata of a deployed decision requirements
+ DecisionRequirementsMetadata decisionRequirements = 3;
+ // metadata of a deployed form
+ FormMetadata form = 4;
+ }
+}
+
+message ProcessMetadata {
+ // the bpmn process ID, as parsed during deployment; together with the version forms a
+ // unique identifier for a specific process definition
+ string bpmnProcessId = 1;
+ // the assigned process version
+ int32 version = 2;
+ // the assigned key, which acts as a unique identifier for this process
+ int64 processDefinitionKey = 3;
+ // the resource name (see: ProcessRequestObject.name) from which this process was
+ // parsed
+ string resourceName = 4;
+ // the tenant id of the deployed process
+ string tenantId = 5;
+}
+
+message DecisionMetadata {
+ // the dmn decision ID, as parsed during deployment; together with the
+ // versions forms a unique identifier for a specific decision
+ string dmnDecisionId = 1;
+ // the dmn name of the decision, as parsed during deployment
+ string dmnDecisionName = 2;
+ // the assigned decision version
+ int32 version = 3;
+ // the assigned decision key, which acts as a unique identifier for this
+ // decision
+ int64 decisionKey = 4;
+ // the dmn ID of the decision requirements graph that this decision is part
+ // of, as parsed during deployment
+ string dmnDecisionRequirementsId = 5;
+ // the assigned key of the decision requirements graph that this decision is
+ // part of
+ int64 decisionRequirementsKey = 6;
+ // the tenant id of the deployed decision
+ string tenantId = 7;
+}
+
+message DecisionRequirementsMetadata {
+ // the dmn decision requirements ID, as parsed during deployment; together
+ // with the versions forms a unique identifier for a specific decision
+ string dmnDecisionRequirementsId = 1;
+ // the dmn name of the decision requirements, as parsed during deployment
+ string dmnDecisionRequirementsName = 2;
+ // the assigned decision requirements version
+ int32 version = 3;
+ // the assigned decision requirements key, which acts as a unique identifier
+ // for this decision requirements
+ int64 decisionRequirementsKey = 4;
+ // the resource name (see: Resource.name) from which this decision
+ // requirements was parsed
+ string resourceName = 5;
+ // the tenant id of the deployed decision requirements
+ string tenantId = 6;
+}
+
+message FormMetadata {
+ // the form ID, as parsed during deployment; together with the
+ // versions forms a unique identifier for a specific form
+ string formId = 1;
+ // the assigned form version
+ int32 version = 2;
+ // the assigned key, which acts as a unique identifier for this form
+ int64 formKey = 3;
+ // the resource name
+ string resourceName = 4;
+ // the tenant id of the deployed form
+ string tenantId = 5;
+}
+
+message FailJobRequest {
+ // the unique job identifier, as obtained when activating the job
+ int64 jobKey = 1;
+ // the amount of retries the job should have left
+ int32 retries = 2;
+ // an optional message describing why the job failed
+ // this is particularly useful if a job runs out of retries and an incident is raised,
+ // as it this message can help explain why an incident was raised
+ string errorMessage = 3;
+ // the backoff timeout (in ms) for the next retry
+ int64 retryBackOff = 4;
+ // JSON document that will instantiate the variables at the local scope of the
+ // job's associated task; it must be a JSON object, as variables will be mapped in a
+ // key-value fashion. e.g. { "a": 1, "b": 2 } will create two variables, named "a" and
+ // "b" respectively, with their associated values. [{ "a": 1, "b": 2 }] would not be a
+ // valid argument, as the root of the JSON document is an array and not an object.
+ string variables = 5;
+}
+
+message FailJobResponse {
+}
+
+message ThrowErrorRequest {
+ // the unique job identifier, as obtained when activating the job
+ int64 jobKey = 1;
+ // the error code that will be matched with an error catch event
+ string errorCode = 2;
+ // an optional error message that provides additional context
+ string errorMessage = 3;
+ // JSON document that will instantiate the variables at the local scope of the
+ // error catch event that catches the thrown error; it must be a JSON object, as variables will be mapped in a
+ // key-value fashion. e.g. { "a": 1, "b": 2 } will create two variables, named "a" and
+ // "b" respectively, with their associated values. [{ "a": 1, "b": 2 }] would not be a
+ // valid argument, as the root of the JSON document is an array and not an object.
+ string variables = 4;
+}
+
+message ThrowErrorResponse {
+}
+
+message PublishMessageRequest {
+ // the name of the message
+ string name = 1;
+ // the correlation key of the message
+ string correlationKey = 2;
+ // how long the message should be buffered on the broker, in milliseconds
+ int64 timeToLive = 3;
+ // the unique ID of the message; can be omitted. only useful to ensure only one message
+ // with the given ID will ever be published (during its lifetime)
+ string messageId = 4;
+ // the message variables as a JSON document; to be valid, the root of the document must be an
+ // object, e.g. { "a": "foo" }. [ "foo" ] would not be valid.
+ string variables = 5;
+ // the tenant id of the message
+ string tenantId = 6;
+}
+
+message PublishMessageResponse {
+ // the unique ID of the message that was published
+ int64 key = 1;
+ // the tenant id of the message
+ string tenantId = 2;
+}
+
+message ResolveIncidentRequest {
+ // the unique ID of the incident to resolve
+ int64 incidentKey = 1;
+ // a reference key chosen by the user and will be part of all records resulted from this operation
+ optional uint64 operationReference = 2;
+}
+
+message ResolveIncidentResponse {
+}
+
+message TopologyRequest {
+}
+
+message TopologyResponse {
+ // list of brokers part of this cluster
+ repeated BrokerInfo brokers = 1;
+ // how many nodes are in the cluster
+ int32 clusterSize = 2;
+ // how many partitions are spread across the cluster
+ int32 partitionsCount = 3;
+ // configured replication factor for this cluster
+ int32 replicationFactor = 4;
+ // gateway version
+ string gatewayVersion = 5;
+}
+
+message BrokerInfo {
+ // unique (within a cluster) node ID for the broker
+ int32 nodeId = 1;
+ // hostname of the broker
+ string host = 2;
+ // port for the broker
+ int32 port = 3;
+ // list of partitions managed or replicated on this broker
+ repeated Partition partitions = 4;
+ // broker version
+ string version = 5;
+}
+
+message Partition {
+ // Describes the Raft role of the broker for a given partition
+ enum PartitionBrokerRole {
+ LEADER = 0;
+ FOLLOWER = 1;
+ INACTIVE = 2;
+ }
+
+ // Describes the current health of the partition
+ enum PartitionBrokerHealth {
+ HEALTHY = 0;
+ UNHEALTHY = 1;
+ DEAD = 2;
+ }
+
+ // the unique ID of this partition
+ int32 partitionId = 1;
+ // the role of the broker for this partition
+ PartitionBrokerRole role = 2;
+ // the health of this partition
+ PartitionBrokerHealth health = 3;
+}
+
+message UpdateJobRetriesRequest {
+ // the unique job identifier, as obtained through ActivateJobs
+ int64 jobKey = 1;
+ // the new amount of retries for the job; must be positive
+ int32 retries = 2;
+ // a reference key chosen by the user and will be part of all records resulted from this operation
+ optional uint64 operationReference = 3;
+}
+
+message UpdateJobRetriesResponse {
+}
+
+message UpdateJobTimeoutRequest {
+ // the unique job identifier, as obtained from ActivateJobsResponse
+ int64 jobKey = 1;
+ // the duration of the new timeout in ms, starting from the current moment
+ int64 timeout = 2;
+ // a reference key chosen by the user and will be part of all records resulted from this operation
+ optional uint64 operationReference = 3;
+}
+
+message UpdateJobTimeoutResponse {
+}
+
+message SetVariablesRequest {
+ // the unique identifier of a particular element; can be the process instance key (as
+ // obtained during instance creation), or a given element, such as a service task (see
+ // elementInstanceKey on the job message)
+ int64 elementInstanceKey = 1;
+ // a JSON serialized document describing variables as key value pairs; the root of the document
+ // must be an object
+ string variables = 2;
+ // if true, the variables will be merged strictly into the local scope (as indicated by
+ // elementInstanceKey); this means the variables is not propagated to upper scopes.
+ // for example, let's say we have two scopes, '1' and '2', with each having effective variables as:
+ // 1 => `{ "foo" : 2 }`, and 2 => `{ "bar" : 1 }`. if we send an update request with
+ // elementInstanceKey = 2, variables `{ "foo" : 5 }`, and local is true, then scope 1 will
+ // be unchanged, and scope 2 will now be `{ "bar" : 1, "foo" 5 }`. if local was false, however,
+ // then scope 1 would be `{ "foo": 5 }`, and scope 2 would be `{ "bar" : 1 }`.
+ bool local = 3;
+ // a reference key chosen by the user and will be part of all records resulted from this operation
+ optional uint64 operationReference = 4;
+}
+
+message SetVariablesResponse {
+ // the unique key of the set variables command
+ int64 key = 1;
+}
+
+message ModifyProcessInstanceRequest {
+ // the key of the process instance that should be modified
+ int64 processInstanceKey = 1;
+ // instructions describing which elements should be activated in which scopes,
+ // and which variables should be created
+ repeated ActivateInstruction activateInstructions = 2;
+ // instructions describing which elements should be terminated
+ repeated TerminateInstruction terminateInstructions = 3;
+ // a reference key chosen by the user and will be part of all records resulted from this operation
+ optional uint64 operationReference = 4;
+
+ message ActivateInstruction {
+ // the id of the element that should be activated
+ string elementId = 1;
+ // the key of the ancestor scope the element instance should be created in;
+ // set to -1 to create the new element instance within an existing element
+ // instance of the flow scope
+ int64 ancestorElementInstanceKey = 2;
+ // instructions describing which variables should be created
+ repeated VariableInstruction variableInstructions = 3;
+ }
+
+ message VariableInstruction {
+ // JSON document that will instantiate the variables for the root variable scope of the
+ // process instance; it must be a JSON object, as variables will be mapped in a
+ // key-value fashion. e.g. { "a": 1, "b": 2 } will create two variables, named "a" and
+ // "b" respectively, with their associated values. [{ "a": 1, "b": 2 }] would not be a
+ // valid argument, as the root of the JSON document is an array and not an object.
+ string variables = 1;
+ // the id of the element in which scope the variables should be created;
+ // leave empty to create the variables in the global scope of the process instance
+ string scopeId = 2;
+ }
+
+ message TerminateInstruction {
+ // the id of the element that should be terminated
+ int64 elementInstanceKey = 1;
+ }
+}
+
+message ModifyProcessInstanceResponse {
+
+}
+
+message MigrateProcessInstanceRequest {
+ // key of the process instance to migrate
+ int64 processInstanceKey = 1;
+ // the migration plan that defines target process and element mappings
+ MigrationPlan migrationPlan = 2;
+ // a reference key chosen by the user and will be part of all records resulted from this operation
+ optional uint64 operationReference = 3;
+
+ message MigrationPlan {
+ // the key of process definition to migrate the process instance to
+ int64 targetProcessDefinitionKey = 1;
+ // the mapping instructions describe how to map elements from the source process definition to the target process definition
+ repeated MappingInstruction mappingInstructions = 2;
+ }
+
+ message MappingInstruction {
+ // the element id to migrate from
+ string sourceElementId = 1;
+ // the element id to migrate into
+ string targetElementId = 2;
+ }
+}
+
+message MigrateProcessInstanceResponse {
+
+}
+
+message DeleteResourceRequest {
+ // The key of the resource that should be deleted. This can either be the key
+ // of a process definition, the key of a decision requirements definition or the key of a form.
+ int64 resourceKey = 1;
+ // a reference key chosen by the user and will be part of all records resulted from this operation
+ optional uint64 operationReference = 2;
+}
+
+message DeleteResourceResponse {
+
+}
+
+message BroadcastSignalRequest {
+ // The name of the signal
+ string signalName = 1;
+ // the signal variables as a JSON document; to be valid, the root of the document must be an
+ // object, e.g. { "a": "foo" }. [ "foo" ] would not be valid.
+ string variables = 2;
+ // the id of the tenant that owns the signal.
+ string tenantId = 3;
+}
+
+message BroadcastSignalResponse {
+ // the unique ID of the signal that was broadcasted.
+ int64 key = 1;
+ // the tenant id of the signal that was broadcasted.
+ string tenantId = 2;
+}
+
+service Gateway {
+ /*
+ Iterates through all known partitions round-robin and activates up to the requested
+ maximum and streams them back to the client as they are activated.
+
+ Errors:
+ INVALID_ARGUMENT:
+ - type is blank (empty string, null)
+ - worker is blank (empty string, null)
+ - timeout less than 1
+ - maxJobsToActivate is less than 1
+ */
+ rpc ActivateJobs (ActivateJobsRequest) returns (stream ActivateJobsResponse) {
+ }
+
+ /*
+ Registers client to a job stream that will stream jobs back to the client as
+ they become activatable.
+
+ Errors:
+ INVALID_ARGUMENT:
+ - type is blank (empty string, null)
+ - timeout less than 1
+ - If multi-tenancy is enabled, and tenantIds is empty (empty list)
+ - If multi-tenancy is enabled, and an invalid tenant ID is provided. A tenant ID is considered invalid if:
+ - The tenant ID is blank (empty string, null)
+ - The tenant ID is longer than 31 characters
+ - The tenant ID contains anything other than alphanumeric characters, dot (.), dash (-), or underscore (_)
+ - If multi-tenancy is disabled, and tenantIds is not empty (empty list), or has an ID other than
+ */
+ rpc StreamActivatedJobs (StreamActivatedJobsRequest) returns (stream ActivatedJob) {
+ }
+
+ /*
+ Cancels a running process instance
+
+ Errors:
+ NOT_FOUND:
+ - no process instance exists with the given key
+ */
+ rpc CancelProcessInstance (CancelProcessInstanceRequest) returns (CancelProcessInstanceResponse) {
+ }
+
+ /*
+ Completes a job with the given variables, which allows completing the associated service task.
+
+ Errors:
+ NOT_FOUND:
+ - no job exists with the given job key. Note that since jobs are removed once completed,
+ it could be that this job did exist at some point.
+
+ FAILED_PRECONDITION:
+ - the job was marked as failed. In that case, the related incident must be resolved before
+ the job can be activated again and completed.
+ */
+ rpc CompleteJob (CompleteJobRequest) returns (CompleteJobResponse) {
+ }
+
+ /*
+ Creates and starts an instance of the specified process. The process definition to use to
+ create the instance can be specified either using its unique key (as returned by
+ DeployProcess), or using the BPMN process ID and a version. Pass -1 as the version to use the
+ latest deployed version. Note that only processes with none start events can be started through
+ this command.
+
+ Errors:
+ NOT_FOUND:
+ - no process with the given key exists (if processDefinitionKey was given)
+ - no process with the given process ID exists (if bpmnProcessId was given but version was -1)
+ - no process with the given process ID and version exists (if both bpmnProcessId and version were given)
+
+ FAILED_PRECONDITION:
+ - the process definition does not contain a none start event; only processes with none
+ start event can be started manually.
+
+ INVALID_ARGUMENT:
+ - the given variables argument is not a valid JSON document; it is expected to be a valid
+ JSON document where the root node is an object.
+ */
+ rpc CreateProcessInstance (CreateProcessInstanceRequest) returns (CreateProcessInstanceResponse) {
+ }
+
+ /*
+ Behaves similarly to `rpc CreateProcessInstance`, except that a successful response is received when the process completes successfully.
+ */
+ rpc CreateProcessInstanceWithResult (CreateProcessInstanceWithResultRequest) returns (CreateProcessInstanceWithResultResponse) {
+ }
+
+ /*
+ Evaluates a decision. The decision to evaluate can be specified either by
+ using its unique key (as returned by DeployResource), or using the decision
+ ID. When using the decision ID, the latest deployed version of the decision
+ is used.
+
+ Errors:
+ INVALID_ARGUMENT:
+ - no decision with the given key exists (if decisionKey was given)
+ - no decision with the given decision ID exists (if decisionId was given)
+ - both decision ID and decision KEY were provided, or are missing
+ */
+ rpc EvaluateDecision (EvaluateDecisionRequest) returns (EvaluateDecisionResponse) {
+ }
+
+ /*
+ Deploys one or more processes to Zeebe. Note that this is an atomic call,
+ i.e. either all processes are deployed, or none of them are.
+
+ Errors:
+ INVALID_ARGUMENT:
+ - no resources given.
+ - if at least one resource is invalid. A resource is considered invalid if:
+ - the resource data is not deserializable (e.g. detected as BPMN, but it's broken XML)
+ - the process is invalid (e.g. an event-based gateway has an outgoing sequence flow to a task)
+ */
+ rpc DeployProcess (DeployProcessRequest) returns (DeployProcessResponse) {
+ // since 8, replaced by DeployResource
+ option deprecated = true;
+ }
+
+ /*
+ Deploys one or more resources (e.g. processes or decision models) to Zeebe.
+ Note that this is an atomic call, i.e. either all resources are deployed, or none of them are.
+
+ Errors:
+ PERMISSION_DENIED:
+ - if a deployment to an unauthorized tenant is performed
+ INVALID_ARGUMENT:
+ - no resources given.
+ - if at least one resource is invalid. A resource is considered invalid if:
+ - the content is not deserializable (e.g. detected as BPMN, but it's broken XML)
+ - the content is invalid (e.g. an event-based gateway has an outgoing sequence flow to a task)
+ - if multi-tenancy is enabled, and:
+ - a tenant id is not provided
+ - a tenant id with an invalid format is provided
+ - if multi-tenancy is disabled and a tenant id is provided
+ */
+ rpc DeployResource (DeployResourceRequest) returns (DeployResourceResponse) {
+ }
+
+
+ /*
+ Marks the job as failed; if the retries argument is positive, then the job will be immediately
+ activatable again, and a worker could try again to process it. If it is zero or negative however,
+ an incident will be raised, tagged with the given errorMessage, and the job will not be
+ activatable until the incident is resolved.
+
+ Errors:
+ NOT_FOUND:
+ - no job was found with the given key
+
+ FAILED_PRECONDITION:
+ - the job was not activated
+ - the job is already in a failed state, i.e. ran out of retries
+ */
+ rpc FailJob (FailJobRequest) returns (FailJobResponse) {
+ }
+
+ /*
+ Reports a business error (i.e. non-technical) that occurs while processing a job. The error is handled in the process by an error catch event. If there is no error catch event with the specified errorCode then an incident will be raised instead.
+
+ Errors:
+ NOT_FOUND:
+ - no job was found with the given key
+
+ FAILED_PRECONDITION:
+ - the job is not in an activated state
+ */
+ rpc ThrowError (ThrowErrorRequest) returns (ThrowErrorResponse) {
+ }
+
+ /*
+ Publishes a single message. Messages are published to specific partitions computed from their
+ correlation keys.
+
+ Errors:
+ ALREADY_EXISTS:
+ - a message with the same ID was previously published (and is still alive)
+ */
+ rpc PublishMessage (PublishMessageRequest) returns (PublishMessageResponse) {
+ }
+
+ /*
+ Resolves a given incident. This simply marks the incident as resolved; most likely a call to
+ UpdateJobRetries or SetVariables will be necessary to actually resolve the
+ problem, following by this call.
+
+ Errors:
+ NOT_FOUND:
+ - no incident with the given key exists
+ */
+ rpc ResolveIncident (ResolveIncidentRequest) returns (ResolveIncidentResponse) {
+ }
+
+ /*
+ Updates all the variables of a particular scope (e.g. process instance, flow element instance)
+ from the given JSON document.
+
+ Errors:
+ NOT_FOUND:
+ - no element with the given elementInstanceKey exists
+ INVALID_ARGUMENT:
+ - the given variables document is not a valid JSON document; valid documents are expected to
+ be JSON documents where the root node is an object.
+ */
+ rpc SetVariables (SetVariablesRequest) returns (SetVariablesResponse) {
+ }
+
+ /*
+ Obtains the current topology of the cluster the gateway is part of.
+ */
+ rpc Topology (TopologyRequest) returns (TopologyResponse) {
+ }
+
+ /*
+ Updates the number of retries a job has left. This is mostly useful for jobs that have run out of
+ retries, should the underlying problem be solved.
+
+ Errors:
+ NOT_FOUND:
+ - no job exists with the given key
+
+ INVALID_ARGUMENT:
+ - retries is not greater than 0
+ */
+ rpc UpdateJobRetries (UpdateJobRetriesRequest) returns (UpdateJobRetriesResponse) {
+ }
+
+ /*
+ Modifies the process instance. This is done by activating and/or terminating specific elements of the instance.
+
+ Errors:
+ NOT_FOUND:
+ - no process instance exists with the given key
+
+ FAILED_PRECONDITION:
+ - trying to activate element inside of a multi-instance
+
+ INVALID_ARGUMENT:
+ - activating or terminating unknown element
+ - ancestor of element for activation doesn't exist
+ - scope of variable is unknown
+ */
+ rpc ModifyProcessInstance (ModifyProcessInstanceRequest) returns (ModifyProcessInstanceResponse) {
+
+ }
+
+ /*
+ Migrates the process instance to the specified process definition.
+ In simple terms, this is handled by updating the active element's process.
+
+ Errors:
+ NOT_FOUND:
+ - no process instance exists with the given key, or it is not active
+ - no process definition exists with the given target definition key
+ - no process instance exists with the given key for the tenants the user is authorized to work with.
+
+ FAILED_PRECONDITION:
+ - not all active elements in the given process instance are mapped to the elements in the target process definition
+ - a mapping instruction changes the type of an element or event
+ - a mapping instruction changes the implementation of a task
+ - a mapping instruction detaches a boundary event from an active element
+ - a mapping instruction refers to an unsupported element (i.e. some elements will be supported later on)
+ - a mapping instruction refers to element in unsupported scenarios.
+ (i.e. migration is not supported when process instance or target process elements contains event subscriptions)
+ - multiple mapping instructions target the same boundary event
+
+ INVALID_ARGUMENT:
+ - A `sourceElementId` does not refer to an element in the process instance's process definition
+ - A `targetElementId` does not refer to an element in the target process definition
+ - A `sourceElementId` is mapped by multiple mapping instructions.
+ For example, the engine cannot determine how to migrate a process instance when the instructions are: [A->B, A->C].
+ */
+ rpc MigrateProcessInstance (MigrateProcessInstanceRequest) returns (MigrateProcessInstanceResponse) {
+
+ }
+
+ /*
+ Updates the deadline of a job using the timeout (in ms) provided. This can be used
+ for extending or shortening the job deadline.
+
+ Errors:
+ NOT_FOUND:
+ - no job exists with the given key
+
+ INVALID_STATE:
+ - no deadline exists for the given job key
+ */
+ rpc UpdateJobTimeout (UpdateJobTimeoutRequest) returns (UpdateJobTimeoutResponse) {
+ }
+
+ /*
+ Deletes a resource from the state. Once a resource has been deleted it cannot
+ be recovered. If the resource needs to be available again, a new deployment
+ of the resource is required.
+
+ Deleting a process will cancel any running instances of this process
+ definition. New instances of a deleted process are created using
+ the lastest version that hasn't been deleted. Creating a new
+ process instance is impossible when all versions have been
+ deleted.
+
+ Deleting a decision requirement definitions could cause incidents in process
+ instances referencing these decisions in a business rule task. A decision
+ will be evaluated with the latest version that hasn't been deleted. If all
+ versions of a decision have been deleted the evaluation is rejected.
+
+ Errors:
+ NOT_FOUND:
+ - No resource exists with the given key
+
+ */
+ rpc DeleteResource (DeleteResourceRequest) returns (DeleteResourceResponse) {
+
+ }
+
+ /*
+ Broadcasts a signal.
+ */
+ rpc BroadcastSignal (BroadcastSignalRequest) returns (BroadcastSignalResponse) {
+
+ }
+}
\ No newline at end of file
diff --git a/pyzeebe/proto/gateway_pb2.py b/pyzeebe/proto/gateway_pb2.py
new file mode 100644
index 00000000..5d98e56d
--- /dev/null
+++ b/pyzeebe/proto/gateway_pb2.py
@@ -0,0 +1,171 @@
+# -*- coding: utf-8 -*-
+# Generated by the protocol buffer compiler. DO NOT EDIT!
+# NO CHECKED-IN PROTOBUF GENCODE
+# source: pyzeebe/proto/gateway.proto
+# Protobuf Python Version: 5.27.2
+"""Generated protocol buffer code."""
+from google.protobuf import descriptor as _descriptor
+from google.protobuf import descriptor_pool as _descriptor_pool
+from google.protobuf import runtime_version as _runtime_version
+from google.protobuf import symbol_database as _symbol_database
+from google.protobuf.internal import builder as _builder
+_runtime_version.ValidateProtobufRuntimeVersion(
+ _runtime_version.Domain.PUBLIC,
+ 5,
+ 27,
+ 2,
+ '',
+ 'pyzeebe/proto/gateway.proto'
+)
+# @@protoc_insertion_point(imports)
+
+_sym_db = _symbol_database.Default()
+
+
+
+
+DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x1bpyzeebe/proto/gateway.proto\x12\x10gateway_protocol\"u\n\x1aStreamActivatedJobsRequest\x12\x0c\n\x04type\x18\x01 \x01(\t\x12\x0e\n\x06worker\x18\x02 \x01(\t\x12\x0f\n\x07timeout\x18\x03 \x01(\x03\x12\x15\n\rfetchVariable\x18\x05 \x03(\t\x12\x11\n\ttenantIds\x18\x06 \x03(\t\"\xa1\x01\n\x13\x41\x63tivateJobsRequest\x12\x0c\n\x04type\x18\x01 \x01(\t\x12\x0e\n\x06worker\x18\x02 \x01(\t\x12\x0f\n\x07timeout\x18\x03 \x01(\x03\x12\x19\n\x11maxJobsToActivate\x18\x04 \x01(\x05\x12\x15\n\rfetchVariable\x18\x05 \x03(\t\x12\x16\n\x0erequestTimeout\x18\x06 \x01(\x03\x12\x11\n\ttenantIds\x18\x07 \x03(\t\"D\n\x14\x41\x63tivateJobsResponse\x12,\n\x04jobs\x18\x01 \x03(\x0b\x32\x1e.gateway_protocol.ActivatedJob\"\xba\x02\n\x0c\x41\x63tivatedJob\x12\x0b\n\x03key\x18\x01 \x01(\x03\x12\x0c\n\x04type\x18\x02 \x01(\t\x12\x1a\n\x12processInstanceKey\x18\x03 \x01(\x03\x12\x15\n\rbpmnProcessId\x18\x04 \x01(\t\x12 \n\x18processDefinitionVersion\x18\x05 \x01(\x05\x12\x1c\n\x14processDefinitionKey\x18\x06 \x01(\x03\x12\x11\n\telementId\x18\x07 \x01(\t\x12\x1a\n\x12\x65lementInstanceKey\x18\x08 \x01(\x03\x12\x15\n\rcustomHeaders\x18\t \x01(\t\x12\x0e\n\x06worker\x18\n \x01(\t\x12\x0f\n\x07retries\x18\x0b \x01(\x05\x12\x10\n\x08\x64\x65\x61\x64line\x18\x0c \x01(\x03\x12\x11\n\tvariables\x18\r \x01(\t\x12\x10\n\x08tenantId\x18\x0e \x01(\t\"r\n\x1c\x43\x61ncelProcessInstanceRequest\x12\x1a\n\x12processInstanceKey\x18\x01 \x01(\x03\x12\x1f\n\x12operationReference\x18\x02 \x01(\x04H\x00\x88\x01\x01\x42\x15\n\x13_operationReference\"\x1f\n\x1d\x43\x61ncelProcessInstanceResponse\"7\n\x12\x43ompleteJobRequest\x12\x0e\n\x06jobKey\x18\x01 \x01(\x03\x12\x11\n\tvariables\x18\x02 \x01(\t\"\x15\n\x13\x43ompleteJobResponse\"\x97\x02\n\x1c\x43reateProcessInstanceRequest\x12\x1c\n\x14processDefinitionKey\x18\x01 \x01(\x03\x12\x15\n\rbpmnProcessId\x18\x02 \x01(\t\x12\x0f\n\x07version\x18\x03 \x01(\x05\x12\x11\n\tvariables\x18\x04 \x01(\t\x12T\n\x11startInstructions\x18\x05 \x03(\x0b\x32\x39.gateway_protocol.ProcessInstanceCreationStartInstruction\x12\x10\n\x08tenantId\x18\x06 \x01(\t\x12\x1f\n\x12operationReference\x18\x07 \x01(\x04H\x00\x88\x01\x01\x42\x15\n\x13_operationReference\"<\n\'ProcessInstanceCreationStartInstruction\x12\x11\n\telementId\x18\x01 \x01(\t\"\x93\x01\n\x1d\x43reateProcessInstanceResponse\x12\x1c\n\x14processDefinitionKey\x18\x01 \x01(\x03\x12\x15\n\rbpmnProcessId\x18\x02 \x01(\t\x12\x0f\n\x07version\x18\x03 \x01(\x05\x12\x1a\n\x12processInstanceKey\x18\x04 \x01(\x03\x12\x10\n\x08tenantId\x18\x05 \x01(\t\"\x99\x01\n&CreateProcessInstanceWithResultRequest\x12?\n\x07request\x18\x01 \x01(\x0b\x32..gateway_protocol.CreateProcessInstanceRequest\x12\x16\n\x0erequestTimeout\x18\x02 \x01(\x03\x12\x16\n\x0e\x66\x65tchVariables\x18\x03 \x03(\t\"\xb0\x01\n\'CreateProcessInstanceWithResultResponse\x12\x1c\n\x14processDefinitionKey\x18\x01 \x01(\x03\x12\x15\n\rbpmnProcessId\x18\x02 \x01(\t\x12\x0f\n\x07version\x18\x03 \x01(\x05\x12\x1a\n\x12processInstanceKey\x18\x04 \x01(\x03\x12\x11\n\tvariables\x18\x05 \x01(\t\x12\x10\n\x08tenantId\x18\x06 \x01(\t\"g\n\x17\x45valuateDecisionRequest\x12\x13\n\x0b\x64\x65\x63isionKey\x18\x01 \x01(\x03\x12\x12\n\ndecisionId\x18\x02 \x01(\t\x12\x11\n\tvariables\x18\x03 \x01(\t\x12\x10\n\x08tenantId\x18\x04 \x01(\t\"\xed\x02\n\x18\x45valuateDecisionResponse\x12\x13\n\x0b\x64\x65\x63isionKey\x18\x01 \x01(\x03\x12\x12\n\ndecisionId\x18\x02 \x01(\t\x12\x14\n\x0c\x64\x65\x63isionName\x18\x03 \x01(\t\x12\x17\n\x0f\x64\x65\x63isionVersion\x18\x04 \x01(\x05\x12\x1e\n\x16\x64\x65\x63isionRequirementsId\x18\x05 \x01(\t\x12\x1f\n\x17\x64\x65\x63isionRequirementsKey\x18\x06 \x01(\x03\x12\x16\n\x0e\x64\x65\x63isionOutput\x18\x07 \x01(\t\x12?\n\x12\x65valuatedDecisions\x18\x08 \x03(\x0b\x32#.gateway_protocol.EvaluatedDecision\x12\x18\n\x10\x66\x61iledDecisionId\x18\t \x01(\t\x12\x16\n\x0e\x66\x61ilureMessage\x18\n \x01(\t\x12\x10\n\x08tenantId\x18\x0b \x01(\t\x12\x1b\n\x13\x64\x65\x63isionInstanceKey\x18\x0c \x01(\x03\"\xab\x02\n\x11\x45valuatedDecision\x12\x13\n\x0b\x64\x65\x63isionKey\x18\x01 \x01(\x03\x12\x12\n\ndecisionId\x18\x02 \x01(\t\x12\x14\n\x0c\x64\x65\x63isionName\x18\x03 \x01(\t\x12\x17\n\x0f\x64\x65\x63isionVersion\x18\x04 \x01(\x05\x12\x14\n\x0c\x64\x65\x63isionType\x18\x05 \x01(\t\x12\x16\n\x0e\x64\x65\x63isionOutput\x18\x06 \x01(\t\x12;\n\x0cmatchedRules\x18\x07 \x03(\x0b\x32%.gateway_protocol.MatchedDecisionRule\x12\x41\n\x0f\x65valuatedInputs\x18\x08 \x03(\x0b\x32(.gateway_protocol.EvaluatedDecisionInput\x12\x10\n\x08tenantId\x18\t \x01(\t\"P\n\x16\x45valuatedDecisionInput\x12\x0f\n\x07inputId\x18\x01 \x01(\t\x12\x11\n\tinputName\x18\x02 \x01(\t\x12\x12\n\ninputValue\x18\x03 \x01(\t\"T\n\x17\x45valuatedDecisionOutput\x12\x10\n\x08outputId\x18\x01 \x01(\t\x12\x12\n\noutputName\x18\x02 \x01(\t\x12\x13\n\x0boutputValue\x18\x03 \x01(\t\"}\n\x13MatchedDecisionRule\x12\x0e\n\x06ruleId\x18\x01 \x01(\t\x12\x11\n\truleIndex\x18\x02 \x01(\x05\x12\x43\n\x10\x65valuatedOutputs\x18\x03 \x03(\x0b\x32).gateway_protocol.EvaluatedDecisionOutput\"U\n\x14\x44\x65ployProcessRequest\x12\x39\n\tprocesses\x18\x01 \x03(\x0b\x32&.gateway_protocol.ProcessRequestObject:\x02\x18\x01\"<\n\x14ProcessRequestObject\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x12\n\ndefinition\x18\x02 \x01(\x0c:\x02\x18\x01\"^\n\x15\x44\x65ployProcessResponse\x12\x0b\n\x03key\x18\x01 \x01(\x03\x12\x34\n\tprocesses\x18\x02 \x03(\x0b\x32!.gateway_protocol.ProcessMetadata:\x02\x18\x01\"X\n\x15\x44\x65ployResourceRequest\x12-\n\tresources\x18\x01 \x03(\x0b\x32\x1a.gateway_protocol.Resource\x12\x10\n\x08tenantId\x18\x02 \x01(\t\")\n\x08Resource\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x0f\n\x07\x63ontent\x18\x02 \x01(\x0c\"j\n\x16\x44\x65ployResourceResponse\x12\x0b\n\x03key\x18\x01 \x01(\x03\x12\x31\n\x0b\x64\x65ployments\x18\x02 \x03(\x0b\x32\x1c.gateway_protocol.Deployment\x12\x10\n\x08tenantId\x18\x03 \x01(\t\"\x86\x02\n\nDeployment\x12\x34\n\x07process\x18\x01 \x01(\x0b\x32!.gateway_protocol.ProcessMetadataH\x00\x12\x36\n\x08\x64\x65\x63ision\x18\x02 \x01(\x0b\x32\".gateway_protocol.DecisionMetadataH\x00\x12N\n\x14\x64\x65\x63isionRequirements\x18\x03 \x01(\x0b\x32..gateway_protocol.DecisionRequirementsMetadataH\x00\x12.\n\x04\x66orm\x18\x04 \x01(\x0b\x32\x1e.gateway_protocol.FormMetadataH\x00\x42\n\n\x08Metadata\"\x7f\n\x0fProcessMetadata\x12\x15\n\rbpmnProcessId\x18\x01 \x01(\t\x12\x0f\n\x07version\x18\x02 \x01(\x05\x12\x1c\n\x14processDefinitionKey\x18\x03 \x01(\x03\x12\x14\n\x0cresourceName\x18\x04 \x01(\t\x12\x10\n\x08tenantId\x18\x05 \x01(\t\"\xbe\x01\n\x10\x44\x65\x63isionMetadata\x12\x15\n\rdmnDecisionId\x18\x01 \x01(\t\x12\x17\n\x0f\x64mnDecisionName\x18\x02 \x01(\t\x12\x0f\n\x07version\x18\x03 \x01(\x05\x12\x13\n\x0b\x64\x65\x63isionKey\x18\x04 \x01(\x03\x12!\n\x19\x64mnDecisionRequirementsId\x18\x05 \x01(\t\x12\x1f\n\x17\x64\x65\x63isionRequirementsKey\x18\x06 \x01(\x03\x12\x10\n\x08tenantId\x18\x07 \x01(\t\"\xc0\x01\n\x1c\x44\x65\x63isionRequirementsMetadata\x12!\n\x19\x64mnDecisionRequirementsId\x18\x01 \x01(\t\x12#\n\x1b\x64mnDecisionRequirementsName\x18\x02 \x01(\t\x12\x0f\n\x07version\x18\x03 \x01(\x05\x12\x1f\n\x17\x64\x65\x63isionRequirementsKey\x18\x04 \x01(\x03\x12\x14\n\x0cresourceName\x18\x05 \x01(\t\x12\x10\n\x08tenantId\x18\x06 \x01(\t\"h\n\x0c\x46ormMetadata\x12\x0e\n\x06\x66ormId\x18\x01 \x01(\t\x12\x0f\n\x07version\x18\x02 \x01(\x05\x12\x0f\n\x07\x66ormKey\x18\x03 \x01(\x03\x12\x14\n\x0cresourceName\x18\x04 \x01(\t\x12\x10\n\x08tenantId\x18\x05 \x01(\t\"p\n\x0e\x46\x61ilJobRequest\x12\x0e\n\x06jobKey\x18\x01 \x01(\x03\x12\x0f\n\x07retries\x18\x02 \x01(\x05\x12\x14\n\x0c\x65rrorMessage\x18\x03 \x01(\t\x12\x14\n\x0cretryBackOff\x18\x04 \x01(\x03\x12\x11\n\tvariables\x18\x05 \x01(\t\"\x11\n\x0f\x46\x61ilJobResponse\"_\n\x11ThrowErrorRequest\x12\x0e\n\x06jobKey\x18\x01 \x01(\x03\x12\x11\n\terrorCode\x18\x02 \x01(\t\x12\x14\n\x0c\x65rrorMessage\x18\x03 \x01(\t\x12\x11\n\tvariables\x18\x04 \x01(\t\"\x14\n\x12ThrowErrorResponse\"\x89\x01\n\x15PublishMessageRequest\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x16\n\x0e\x63orrelationKey\x18\x02 \x01(\t\x12\x12\n\ntimeToLive\x18\x03 \x01(\x03\x12\x11\n\tmessageId\x18\x04 \x01(\t\x12\x11\n\tvariables\x18\x05 \x01(\t\x12\x10\n\x08tenantId\x18\x06 \x01(\t\"7\n\x16PublishMessageResponse\x12\x0b\n\x03key\x18\x01 \x01(\x03\x12\x10\n\x08tenantId\x18\x02 \x01(\t\"e\n\x16ResolveIncidentRequest\x12\x13\n\x0bincidentKey\x18\x01 \x01(\x03\x12\x1f\n\x12operationReference\x18\x02 \x01(\x04H\x00\x88\x01\x01\x42\x15\n\x13_operationReference\"\x19\n\x17ResolveIncidentResponse\"\x11\n\x0fTopologyRequest\"\xa2\x01\n\x10TopologyResponse\x12-\n\x07\x62rokers\x18\x01 \x03(\x0b\x32\x1c.gateway_protocol.BrokerInfo\x12\x13\n\x0b\x63lusterSize\x18\x02 \x01(\x05\x12\x17\n\x0fpartitionsCount\x18\x03 \x01(\x05\x12\x19\n\x11replicationFactor\x18\x04 \x01(\x05\x12\x16\n\x0egatewayVersion\x18\x05 \x01(\t\"z\n\nBrokerInfo\x12\x0e\n\x06nodeId\x18\x01 \x01(\x05\x12\x0c\n\x04host\x18\x02 \x01(\t\x12\x0c\n\x04port\x18\x03 \x01(\x05\x12/\n\npartitions\x18\x04 \x03(\x0b\x32\x1b.gateway_protocol.Partition\x12\x0f\n\x07version\x18\x05 \x01(\t\"\xa0\x02\n\tPartition\x12\x13\n\x0bpartitionId\x18\x01 \x01(\x05\x12=\n\x04role\x18\x02 \x01(\x0e\x32/.gateway_protocol.Partition.PartitionBrokerRole\x12\x41\n\x06health\x18\x03 \x01(\x0e\x32\x31.gateway_protocol.Partition.PartitionBrokerHealth\"=\n\x13PartitionBrokerRole\x12\n\n\x06LEADER\x10\x00\x12\x0c\n\x08\x46OLLOWER\x10\x01\x12\x0c\n\x08INACTIVE\x10\x02\"=\n\x15PartitionBrokerHealth\x12\x0b\n\x07HEALTHY\x10\x00\x12\r\n\tUNHEALTHY\x10\x01\x12\x08\n\x04\x44\x45\x41\x44\x10\x02\"r\n\x17UpdateJobRetriesRequest\x12\x0e\n\x06jobKey\x18\x01 \x01(\x03\x12\x0f\n\x07retries\x18\x02 \x01(\x05\x12\x1f\n\x12operationReference\x18\x03 \x01(\x04H\x00\x88\x01\x01\x42\x15\n\x13_operationReference\"\x1a\n\x18UpdateJobRetriesResponse\"r\n\x17UpdateJobTimeoutRequest\x12\x0e\n\x06jobKey\x18\x01 \x01(\x03\x12\x0f\n\x07timeout\x18\x02 \x01(\x03\x12\x1f\n\x12operationReference\x18\x03 \x01(\x04H\x00\x88\x01\x01\x42\x15\n\x13_operationReference\"\x1a\n\x18UpdateJobTimeoutResponse\"\x8b\x01\n\x13SetVariablesRequest\x12\x1a\n\x12\x65lementInstanceKey\x18\x01 \x01(\x03\x12\x11\n\tvariables\x18\x02 \x01(\t\x12\r\n\x05local\x18\x03 \x01(\x08\x12\x1f\n\x12operationReference\x18\x04 \x01(\x04H\x00\x88\x01\x01\x42\x15\n\x13_operationReference\"#\n\x14SetVariablesResponse\x12\x0b\n\x03key\x18\x01 \x01(\x03\"\xd8\x04\n\x1cModifyProcessInstanceRequest\x12\x1a\n\x12processInstanceKey\x18\x01 \x01(\x03\x12`\n\x14\x61\x63tivateInstructions\x18\x02 \x03(\x0b\x32\x42.gateway_protocol.ModifyProcessInstanceRequest.ActivateInstruction\x12\x62\n\x15terminateInstructions\x18\x03 \x03(\x0b\x32\x43.gateway_protocol.ModifyProcessInstanceRequest.TerminateInstruction\x12\x1f\n\x12operationReference\x18\x04 \x01(\x04H\x00\x88\x01\x01\x1a\xae\x01\n\x13\x41\x63tivateInstruction\x12\x11\n\telementId\x18\x01 \x01(\t\x12\"\n\x1a\x61ncestorElementInstanceKey\x18\x02 \x01(\x03\x12`\n\x14variableInstructions\x18\x03 \x03(\x0b\x32\x42.gateway_protocol.ModifyProcessInstanceRequest.VariableInstruction\x1a\x39\n\x13VariableInstruction\x12\x11\n\tvariables\x18\x01 \x01(\t\x12\x0f\n\x07scopeId\x18\x02 \x01(\t\x1a\x32\n\x14TerminateInstruction\x12\x1a\n\x12\x65lementInstanceKey\x18\x01 \x01(\x03\x42\x15\n\x13_operationReference\"\x1f\n\x1dModifyProcessInstanceResponse\"\xa8\x03\n\x1dMigrateProcessInstanceRequest\x12\x1a\n\x12processInstanceKey\x18\x01 \x01(\x03\x12T\n\rmigrationPlan\x18\x02 \x01(\x0b\x32=.gateway_protocol.MigrateProcessInstanceRequest.MigrationPlan\x12\x1f\n\x12operationReference\x18\x03 \x01(\x04H\x00\x88\x01\x01\x1a\x94\x01\n\rMigrationPlan\x12\"\n\x1atargetProcessDefinitionKey\x18\x01 \x01(\x03\x12_\n\x13mappingInstructions\x18\x02 \x03(\x0b\x32\x42.gateway_protocol.MigrateProcessInstanceRequest.MappingInstruction\x1a\x46\n\x12MappingInstruction\x12\x17\n\x0fsourceElementId\x18\x01 \x01(\t\x12\x17\n\x0ftargetElementId\x18\x02 \x01(\tB\x15\n\x13_operationReference\" \n\x1eMigrateProcessInstanceResponse\"d\n\x15\x44\x65leteResourceRequest\x12\x13\n\x0bresourceKey\x18\x01 \x01(\x03\x12\x1f\n\x12operationReference\x18\x02 \x01(\x04H\x00\x88\x01\x01\x42\x15\n\x13_operationReference\"\x18\n\x16\x44\x65leteResourceResponse\"Q\n\x16\x42roadcastSignalRequest\x12\x12\n\nsignalName\x18\x01 \x01(\t\x12\x11\n\tvariables\x18\x02 \x01(\t\x12\x10\n\x08tenantId\x18\x03 \x01(\t\"8\n\x17\x42roadcastSignalResponse\x12\x0b\n\x03key\x18\x01 \x01(\x03\x12\x10\n\x08tenantId\x18\x02 \x01(\t2\xdb\x11\n\x07Gateway\x12\x61\n\x0c\x41\x63tivateJobs\x12%.gateway_protocol.ActivateJobsRequest\x1a&.gateway_protocol.ActivateJobsResponse\"\x00\x30\x01\x12g\n\x13StreamActivatedJobs\x12,.gateway_protocol.StreamActivatedJobsRequest\x1a\x1e.gateway_protocol.ActivatedJob\"\x00\x30\x01\x12z\n\x15\x43\x61ncelProcessInstance\x12..gateway_protocol.CancelProcessInstanceRequest\x1a/.gateway_protocol.CancelProcessInstanceResponse\"\x00\x12\\\n\x0b\x43ompleteJob\x12$.gateway_protocol.CompleteJobRequest\x1a%.gateway_protocol.CompleteJobResponse\"\x00\x12z\n\x15\x43reateProcessInstance\x12..gateway_protocol.CreateProcessInstanceRequest\x1a/.gateway_protocol.CreateProcessInstanceResponse\"\x00\x12\x98\x01\n\x1f\x43reateProcessInstanceWithResult\x12\x38.gateway_protocol.CreateProcessInstanceWithResultRequest\x1a\x39.gateway_protocol.CreateProcessInstanceWithResultResponse\"\x00\x12k\n\x10\x45valuateDecision\x12).gateway_protocol.EvaluateDecisionRequest\x1a*.gateway_protocol.EvaluateDecisionResponse\"\x00\x12\x65\n\rDeployProcess\x12&.gateway_protocol.DeployProcessRequest\x1a\'.gateway_protocol.DeployProcessResponse\"\x03\x88\x02\x01\x12\x65\n\x0e\x44\x65ployResource\x12\'.gateway_protocol.DeployResourceRequest\x1a(.gateway_protocol.DeployResourceResponse\"\x00\x12P\n\x07\x46\x61ilJob\x12 .gateway_protocol.FailJobRequest\x1a!.gateway_protocol.FailJobResponse\"\x00\x12Y\n\nThrowError\x12#.gateway_protocol.ThrowErrorRequest\x1a$.gateway_protocol.ThrowErrorResponse\"\x00\x12\x65\n\x0ePublishMessage\x12\'.gateway_protocol.PublishMessageRequest\x1a(.gateway_protocol.PublishMessageResponse\"\x00\x12h\n\x0fResolveIncident\x12(.gateway_protocol.ResolveIncidentRequest\x1a).gateway_protocol.ResolveIncidentResponse\"\x00\x12_\n\x0cSetVariables\x12%.gateway_protocol.SetVariablesRequest\x1a&.gateway_protocol.SetVariablesResponse\"\x00\x12S\n\x08Topology\x12!.gateway_protocol.TopologyRequest\x1a\".gateway_protocol.TopologyResponse\"\x00\x12k\n\x10UpdateJobRetries\x12).gateway_protocol.UpdateJobRetriesRequest\x1a*.gateway_protocol.UpdateJobRetriesResponse\"\x00\x12z\n\x15ModifyProcessInstance\x12..gateway_protocol.ModifyProcessInstanceRequest\x1a/.gateway_protocol.ModifyProcessInstanceResponse\"\x00\x12}\n\x16MigrateProcessInstance\x12/.gateway_protocol.MigrateProcessInstanceRequest\x1a\x30.gateway_protocol.MigrateProcessInstanceResponse\"\x00\x12k\n\x10UpdateJobTimeout\x12).gateway_protocol.UpdateJobTimeoutRequest\x1a*.gateway_protocol.UpdateJobTimeoutResponse\"\x00\x12\x65\n\x0e\x44\x65leteResource\x12\'.gateway_protocol.DeleteResourceRequest\x1a(.gateway_protocol.DeleteResourceResponse\"\x00\x12h\n\x0f\x42roadcastSignal\x12(.gateway_protocol.BroadcastSignalRequest\x1a).gateway_protocol.BroadcastSignalResponse\"\x00\x42,\n!io.camunda.zeebe.gateway.protocolP\x00Z\x05./;pbb\x06proto3')
+
+_globals = globals()
+_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals)
+_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'pyzeebe.proto.gateway_pb2', _globals)
+if not _descriptor._USE_C_DESCRIPTORS:
+ _globals['DESCRIPTOR']._loaded_options = None
+ _globals['DESCRIPTOR']._serialized_options = b'\n!io.camunda.zeebe.gateway.protocolP\000Z\005./;pb'
+ _globals['_DEPLOYPROCESSREQUEST']._loaded_options = None
+ _globals['_DEPLOYPROCESSREQUEST']._serialized_options = b'\030\001'
+ _globals['_PROCESSREQUESTOBJECT']._loaded_options = None
+ _globals['_PROCESSREQUESTOBJECT']._serialized_options = b'\030\001'
+ _globals['_DEPLOYPROCESSRESPONSE']._loaded_options = None
+ _globals['_DEPLOYPROCESSRESPONSE']._serialized_options = b'\030\001'
+ _globals['_GATEWAY'].methods_by_name['DeployProcess']._loaded_options = None
+ _globals['_GATEWAY'].methods_by_name['DeployProcess']._serialized_options = b'\210\002\001'
+ _globals['_STREAMACTIVATEDJOBSREQUEST']._serialized_start=49
+ _globals['_STREAMACTIVATEDJOBSREQUEST']._serialized_end=166
+ _globals['_ACTIVATEJOBSREQUEST']._serialized_start=169
+ _globals['_ACTIVATEJOBSREQUEST']._serialized_end=330
+ _globals['_ACTIVATEJOBSRESPONSE']._serialized_start=332
+ _globals['_ACTIVATEJOBSRESPONSE']._serialized_end=400
+ _globals['_ACTIVATEDJOB']._serialized_start=403
+ _globals['_ACTIVATEDJOB']._serialized_end=717
+ _globals['_CANCELPROCESSINSTANCEREQUEST']._serialized_start=719
+ _globals['_CANCELPROCESSINSTANCEREQUEST']._serialized_end=833
+ _globals['_CANCELPROCESSINSTANCERESPONSE']._serialized_start=835
+ _globals['_CANCELPROCESSINSTANCERESPONSE']._serialized_end=866
+ _globals['_COMPLETEJOBREQUEST']._serialized_start=868
+ _globals['_COMPLETEJOBREQUEST']._serialized_end=923
+ _globals['_COMPLETEJOBRESPONSE']._serialized_start=925
+ _globals['_COMPLETEJOBRESPONSE']._serialized_end=946
+ _globals['_CREATEPROCESSINSTANCEREQUEST']._serialized_start=949
+ _globals['_CREATEPROCESSINSTANCEREQUEST']._serialized_end=1228
+ _globals['_PROCESSINSTANCECREATIONSTARTINSTRUCTION']._serialized_start=1230
+ _globals['_PROCESSINSTANCECREATIONSTARTINSTRUCTION']._serialized_end=1290
+ _globals['_CREATEPROCESSINSTANCERESPONSE']._serialized_start=1293
+ _globals['_CREATEPROCESSINSTANCERESPONSE']._serialized_end=1440
+ _globals['_CREATEPROCESSINSTANCEWITHRESULTREQUEST']._serialized_start=1443
+ _globals['_CREATEPROCESSINSTANCEWITHRESULTREQUEST']._serialized_end=1596
+ _globals['_CREATEPROCESSINSTANCEWITHRESULTRESPONSE']._serialized_start=1599
+ _globals['_CREATEPROCESSINSTANCEWITHRESULTRESPONSE']._serialized_end=1775
+ _globals['_EVALUATEDECISIONREQUEST']._serialized_start=1777
+ _globals['_EVALUATEDECISIONREQUEST']._serialized_end=1880
+ _globals['_EVALUATEDECISIONRESPONSE']._serialized_start=1883
+ _globals['_EVALUATEDECISIONRESPONSE']._serialized_end=2248
+ _globals['_EVALUATEDDECISION']._serialized_start=2251
+ _globals['_EVALUATEDDECISION']._serialized_end=2550
+ _globals['_EVALUATEDDECISIONINPUT']._serialized_start=2552
+ _globals['_EVALUATEDDECISIONINPUT']._serialized_end=2632
+ _globals['_EVALUATEDDECISIONOUTPUT']._serialized_start=2634
+ _globals['_EVALUATEDDECISIONOUTPUT']._serialized_end=2718
+ _globals['_MATCHEDDECISIONRULE']._serialized_start=2720
+ _globals['_MATCHEDDECISIONRULE']._serialized_end=2845
+ _globals['_DEPLOYPROCESSREQUEST']._serialized_start=2847
+ _globals['_DEPLOYPROCESSREQUEST']._serialized_end=2932
+ _globals['_PROCESSREQUESTOBJECT']._serialized_start=2934
+ _globals['_PROCESSREQUESTOBJECT']._serialized_end=2994
+ _globals['_DEPLOYPROCESSRESPONSE']._serialized_start=2996
+ _globals['_DEPLOYPROCESSRESPONSE']._serialized_end=3090
+ _globals['_DEPLOYRESOURCEREQUEST']._serialized_start=3092
+ _globals['_DEPLOYRESOURCEREQUEST']._serialized_end=3180
+ _globals['_RESOURCE']._serialized_start=3182
+ _globals['_RESOURCE']._serialized_end=3223
+ _globals['_DEPLOYRESOURCERESPONSE']._serialized_start=3225
+ _globals['_DEPLOYRESOURCERESPONSE']._serialized_end=3331
+ _globals['_DEPLOYMENT']._serialized_start=3334
+ _globals['_DEPLOYMENT']._serialized_end=3596
+ _globals['_PROCESSMETADATA']._serialized_start=3598
+ _globals['_PROCESSMETADATA']._serialized_end=3725
+ _globals['_DECISIONMETADATA']._serialized_start=3728
+ _globals['_DECISIONMETADATA']._serialized_end=3918
+ _globals['_DECISIONREQUIREMENTSMETADATA']._serialized_start=3921
+ _globals['_DECISIONREQUIREMENTSMETADATA']._serialized_end=4113
+ _globals['_FORMMETADATA']._serialized_start=4115
+ _globals['_FORMMETADATA']._serialized_end=4219
+ _globals['_FAILJOBREQUEST']._serialized_start=4221
+ _globals['_FAILJOBREQUEST']._serialized_end=4333
+ _globals['_FAILJOBRESPONSE']._serialized_start=4335
+ _globals['_FAILJOBRESPONSE']._serialized_end=4352
+ _globals['_THROWERRORREQUEST']._serialized_start=4354
+ _globals['_THROWERRORREQUEST']._serialized_end=4449
+ _globals['_THROWERRORRESPONSE']._serialized_start=4451
+ _globals['_THROWERRORRESPONSE']._serialized_end=4471
+ _globals['_PUBLISHMESSAGEREQUEST']._serialized_start=4474
+ _globals['_PUBLISHMESSAGEREQUEST']._serialized_end=4611
+ _globals['_PUBLISHMESSAGERESPONSE']._serialized_start=4613
+ _globals['_PUBLISHMESSAGERESPONSE']._serialized_end=4668
+ _globals['_RESOLVEINCIDENTREQUEST']._serialized_start=4670
+ _globals['_RESOLVEINCIDENTREQUEST']._serialized_end=4771
+ _globals['_RESOLVEINCIDENTRESPONSE']._serialized_start=4773
+ _globals['_RESOLVEINCIDENTRESPONSE']._serialized_end=4798
+ _globals['_TOPOLOGYREQUEST']._serialized_start=4800
+ _globals['_TOPOLOGYREQUEST']._serialized_end=4817
+ _globals['_TOPOLOGYRESPONSE']._serialized_start=4820
+ _globals['_TOPOLOGYRESPONSE']._serialized_end=4982
+ _globals['_BROKERINFO']._serialized_start=4984
+ _globals['_BROKERINFO']._serialized_end=5106
+ _globals['_PARTITION']._serialized_start=5109
+ _globals['_PARTITION']._serialized_end=5397
+ _globals['_PARTITION_PARTITIONBROKERROLE']._serialized_start=5273
+ _globals['_PARTITION_PARTITIONBROKERROLE']._serialized_end=5334
+ _globals['_PARTITION_PARTITIONBROKERHEALTH']._serialized_start=5336
+ _globals['_PARTITION_PARTITIONBROKERHEALTH']._serialized_end=5397
+ _globals['_UPDATEJOBRETRIESREQUEST']._serialized_start=5399
+ _globals['_UPDATEJOBRETRIESREQUEST']._serialized_end=5513
+ _globals['_UPDATEJOBRETRIESRESPONSE']._serialized_start=5515
+ _globals['_UPDATEJOBRETRIESRESPONSE']._serialized_end=5541
+ _globals['_UPDATEJOBTIMEOUTREQUEST']._serialized_start=5543
+ _globals['_UPDATEJOBTIMEOUTREQUEST']._serialized_end=5657
+ _globals['_UPDATEJOBTIMEOUTRESPONSE']._serialized_start=5659
+ _globals['_UPDATEJOBTIMEOUTRESPONSE']._serialized_end=5685
+ _globals['_SETVARIABLESREQUEST']._serialized_start=5688
+ _globals['_SETVARIABLESREQUEST']._serialized_end=5827
+ _globals['_SETVARIABLESRESPONSE']._serialized_start=5829
+ _globals['_SETVARIABLESRESPONSE']._serialized_end=5864
+ _globals['_MODIFYPROCESSINSTANCEREQUEST']._serialized_start=5867
+ _globals['_MODIFYPROCESSINSTANCEREQUEST']._serialized_end=6467
+ _globals['_MODIFYPROCESSINSTANCEREQUEST_ACTIVATEINSTRUCTION']._serialized_start=6159
+ _globals['_MODIFYPROCESSINSTANCEREQUEST_ACTIVATEINSTRUCTION']._serialized_end=6333
+ _globals['_MODIFYPROCESSINSTANCEREQUEST_VARIABLEINSTRUCTION']._serialized_start=6335
+ _globals['_MODIFYPROCESSINSTANCEREQUEST_VARIABLEINSTRUCTION']._serialized_end=6392
+ _globals['_MODIFYPROCESSINSTANCEREQUEST_TERMINATEINSTRUCTION']._serialized_start=6394
+ _globals['_MODIFYPROCESSINSTANCEREQUEST_TERMINATEINSTRUCTION']._serialized_end=6444
+ _globals['_MODIFYPROCESSINSTANCERESPONSE']._serialized_start=6469
+ _globals['_MODIFYPROCESSINSTANCERESPONSE']._serialized_end=6500
+ _globals['_MIGRATEPROCESSINSTANCEREQUEST']._serialized_start=6503
+ _globals['_MIGRATEPROCESSINSTANCEREQUEST']._serialized_end=6927
+ _globals['_MIGRATEPROCESSINSTANCEREQUEST_MIGRATIONPLAN']._serialized_start=6684
+ _globals['_MIGRATEPROCESSINSTANCEREQUEST_MIGRATIONPLAN']._serialized_end=6832
+ _globals['_MIGRATEPROCESSINSTANCEREQUEST_MAPPINGINSTRUCTION']._serialized_start=6834
+ _globals['_MIGRATEPROCESSINSTANCEREQUEST_MAPPINGINSTRUCTION']._serialized_end=6904
+ _globals['_MIGRATEPROCESSINSTANCERESPONSE']._serialized_start=6929
+ _globals['_MIGRATEPROCESSINSTANCERESPONSE']._serialized_end=6961
+ _globals['_DELETERESOURCEREQUEST']._serialized_start=6963
+ _globals['_DELETERESOURCEREQUEST']._serialized_end=7063
+ _globals['_DELETERESOURCERESPONSE']._serialized_start=7065
+ _globals['_DELETERESOURCERESPONSE']._serialized_end=7089
+ _globals['_BROADCASTSIGNALREQUEST']._serialized_start=7091
+ _globals['_BROADCASTSIGNALREQUEST']._serialized_end=7172
+ _globals['_BROADCASTSIGNALRESPONSE']._serialized_start=7174
+ _globals['_BROADCASTSIGNALRESPONSE']._serialized_end=7230
+ _globals['_GATEWAY']._serialized_start=7233
+ _globals['_GATEWAY']._serialized_end=9500
+# @@protoc_insertion_point(module_scope)
diff --git a/pyzeebe/proto/gateway_pb2.pyi b/pyzeebe/proto/gateway_pb2.pyi
new file mode 100644
index 00000000..ff3b3128
--- /dev/null
+++ b/pyzeebe/proto/gateway_pb2.pyi
@@ -0,0 +1,1744 @@
+"""
+@generated by mypy-protobuf. Do not edit manually!
+isort:skip_file
+"""
+
+import builtins
+import collections.abc
+import google.protobuf.descriptor
+import google.protobuf.internal.containers
+import google.protobuf.internal.enum_type_wrapper
+import google.protobuf.message
+import sys
+import typing
+
+if sys.version_info >= (3, 10):
+ import typing as typing_extensions
+else:
+ import typing_extensions
+
+DESCRIPTOR: google.protobuf.descriptor.FileDescriptor
+
+@typing.final
+class StreamActivatedJobsRequest(google.protobuf.message.Message):
+ """For a more complete documentation, refer to Zeebe documentation at:
+ https://docs.camunda.io/docs/reference/grpc
+ """
+
+ DESCRIPTOR: google.protobuf.descriptor.Descriptor
+
+ TYPE_FIELD_NUMBER: builtins.int
+ WORKER_FIELD_NUMBER: builtins.int
+ TIMEOUT_FIELD_NUMBER: builtins.int
+ FETCHVARIABLE_FIELD_NUMBER: builtins.int
+ TENANTIDS_FIELD_NUMBER: builtins.int
+ type: builtins.str
+ """the job type, as defined in the BPMN process (e.g. )
+ """
+ worker: builtins.str
+ """the name of the worker activating the jobs, mostly used for logging purposes"""
+ timeout: builtins.int
+ """a job returned after this call will not be activated by another call until the
+ timeout (in ms) has been reached
+ """
+ @property
+ def fetchVariable(self) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[builtins.str]:
+ """a list of variables to fetch as the job variables; if empty, all visible variables at
+ the time of activation for the scope of the job will be returned
+ """
+
+ @property
+ def tenantIds(self) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[builtins.str]:
+ """a list of identifiers of tenants for which to stream jobs"""
+
+ def __init__(
+ self,
+ *,
+ type: builtins.str = ...,
+ worker: builtins.str = ...,
+ timeout: builtins.int = ...,
+ fetchVariable: collections.abc.Iterable[builtins.str] | None = ...,
+ tenantIds: collections.abc.Iterable[builtins.str] | None = ...,
+ ) -> None: ...
+ def ClearField(self, field_name: typing.Literal["fetchVariable", b"fetchVariable", "tenantIds", b"tenantIds", "timeout", b"timeout", "type", b"type", "worker", b"worker"]) -> None: ...
+
+global___StreamActivatedJobsRequest = StreamActivatedJobsRequest
+
+@typing.final
+class ActivateJobsRequest(google.protobuf.message.Message):
+ DESCRIPTOR: google.protobuf.descriptor.Descriptor
+
+ TYPE_FIELD_NUMBER: builtins.int
+ WORKER_FIELD_NUMBER: builtins.int
+ TIMEOUT_FIELD_NUMBER: builtins.int
+ MAXJOBSTOACTIVATE_FIELD_NUMBER: builtins.int
+ FETCHVARIABLE_FIELD_NUMBER: builtins.int
+ REQUESTTIMEOUT_FIELD_NUMBER: builtins.int
+ TENANTIDS_FIELD_NUMBER: builtins.int
+ type: builtins.str
+ """the job type, as defined in the BPMN process (e.g. )
+ """
+ worker: builtins.str
+ """the name of the worker activating the jobs, mostly used for logging purposes"""
+ timeout: builtins.int
+ """a job returned after this call will not be activated by another call until the
+ timeout (in ms) has been reached
+ """
+ maxJobsToActivate: builtins.int
+ """the maximum jobs to activate by this request"""
+ requestTimeout: builtins.int
+ """The request will be completed when at least one job is activated or after the requestTimeout (in ms).
+ if the requestTimeout = 0, a default timeout is used.
+ if the requestTimeout < 0, long polling is disabled and the request is completed immediately, even when no job is activated.
+ """
+ @property
+ def fetchVariable(self) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[builtins.str]:
+ """a list of variables to fetch as the job variables; if empty, all visible variables at
+ the time of activation for the scope of the job will be returned
+ """
+
+ @property
+ def tenantIds(self) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[builtins.str]:
+ """a list of IDs of tenants for which to activate jobs"""
+
+ def __init__(
+ self,
+ *,
+ type: builtins.str = ...,
+ worker: builtins.str = ...,
+ timeout: builtins.int = ...,
+ maxJobsToActivate: builtins.int = ...,
+ fetchVariable: collections.abc.Iterable[builtins.str] | None = ...,
+ requestTimeout: builtins.int = ...,
+ tenantIds: collections.abc.Iterable[builtins.str] | None = ...,
+ ) -> None: ...
+ def ClearField(self, field_name: typing.Literal["fetchVariable", b"fetchVariable", "maxJobsToActivate", b"maxJobsToActivate", "requestTimeout", b"requestTimeout", "tenantIds", b"tenantIds", "timeout", b"timeout", "type", b"type", "worker", b"worker"]) -> None: ...
+
+global___ActivateJobsRequest = ActivateJobsRequest
+
+@typing.final
+class ActivateJobsResponse(google.protobuf.message.Message):
+ DESCRIPTOR: google.protobuf.descriptor.Descriptor
+
+ JOBS_FIELD_NUMBER: builtins.int
+ @property
+ def jobs(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___ActivatedJob]:
+ """list of activated jobs"""
+
+ def __init__(
+ self,
+ *,
+ jobs: collections.abc.Iterable[global___ActivatedJob] | None = ...,
+ ) -> None: ...
+ def ClearField(self, field_name: typing.Literal["jobs", b"jobs"]) -> None: ...
+
+global___ActivateJobsResponse = ActivateJobsResponse
+
+@typing.final
+class ActivatedJob(google.protobuf.message.Message):
+ DESCRIPTOR: google.protobuf.descriptor.Descriptor
+
+ KEY_FIELD_NUMBER: builtins.int
+ TYPE_FIELD_NUMBER: builtins.int
+ PROCESSINSTANCEKEY_FIELD_NUMBER: builtins.int
+ BPMNPROCESSID_FIELD_NUMBER: builtins.int
+ PROCESSDEFINITIONVERSION_FIELD_NUMBER: builtins.int
+ PROCESSDEFINITIONKEY_FIELD_NUMBER: builtins.int
+ ELEMENTID_FIELD_NUMBER: builtins.int
+ ELEMENTINSTANCEKEY_FIELD_NUMBER: builtins.int
+ CUSTOMHEADERS_FIELD_NUMBER: builtins.int
+ WORKER_FIELD_NUMBER: builtins.int
+ RETRIES_FIELD_NUMBER: builtins.int
+ DEADLINE_FIELD_NUMBER: builtins.int
+ VARIABLES_FIELD_NUMBER: builtins.int
+ TENANTID_FIELD_NUMBER: builtins.int
+ key: builtins.int
+ """the key, a unique identifier for the job"""
+ type: builtins.str
+ """the type of the job (should match what was requested)"""
+ processInstanceKey: builtins.int
+ """the job's process instance key"""
+ bpmnProcessId: builtins.str
+ """the bpmn process ID of the job process definition"""
+ processDefinitionVersion: builtins.int
+ """the version of the job process definition"""
+ processDefinitionKey: builtins.int
+ """the key of the job process definition"""
+ elementId: builtins.str
+ """the associated task element ID"""
+ elementInstanceKey: builtins.int
+ """the unique key identifying the associated task, unique within the scope of the
+ process instance
+ """
+ customHeaders: builtins.str
+ """a set of custom headers defined during modelling; returned as a serialized
+ JSON document
+ """
+ worker: builtins.str
+ """the name of the worker which activated this job"""
+ retries: builtins.int
+ """the amount of retries left to this job (should always be positive)"""
+ deadline: builtins.int
+ """when the job can be activated again, sent as a UNIX epoch timestamp"""
+ variables: builtins.str
+ """JSON document, computed at activation time, consisting of all visible variables to
+ the task scope
+ """
+ tenantId: builtins.str
+ """the id of the tenant that owns the job"""
+ def __init__(
+ self,
+ *,
+ key: builtins.int = ...,
+ type: builtins.str = ...,
+ processInstanceKey: builtins.int = ...,
+ bpmnProcessId: builtins.str = ...,
+ processDefinitionVersion: builtins.int = ...,
+ processDefinitionKey: builtins.int = ...,
+ elementId: builtins.str = ...,
+ elementInstanceKey: builtins.int = ...,
+ customHeaders: builtins.str = ...,
+ worker: builtins.str = ...,
+ retries: builtins.int = ...,
+ deadline: builtins.int = ...,
+ variables: builtins.str = ...,
+ tenantId: builtins.str = ...,
+ ) -> None: ...
+ def ClearField(self, field_name: typing.Literal["bpmnProcessId", b"bpmnProcessId", "customHeaders", b"customHeaders", "deadline", b"deadline", "elementId", b"elementId", "elementInstanceKey", b"elementInstanceKey", "key", b"key", "processDefinitionKey", b"processDefinitionKey", "processDefinitionVersion", b"processDefinitionVersion", "processInstanceKey", b"processInstanceKey", "retries", b"retries", "tenantId", b"tenantId", "type", b"type", "variables", b"variables", "worker", b"worker"]) -> None: ...
+
+global___ActivatedJob = ActivatedJob
+
+@typing.final
+class CancelProcessInstanceRequest(google.protobuf.message.Message):
+ DESCRIPTOR: google.protobuf.descriptor.Descriptor
+
+ PROCESSINSTANCEKEY_FIELD_NUMBER: builtins.int
+ OPERATIONREFERENCE_FIELD_NUMBER: builtins.int
+ processInstanceKey: builtins.int
+ """the process instance key (as, for example, obtained from
+ CreateProcessInstanceResponse)
+ """
+ operationReference: builtins.int
+ """a reference key chosen by the user and will be part of all records resulted from this operation"""
+ def __init__(
+ self,
+ *,
+ processInstanceKey: builtins.int = ...,
+ operationReference: builtins.int | None = ...,
+ ) -> None: ...
+ def HasField(self, field_name: typing.Literal["_operationReference", b"_operationReference", "operationReference", b"operationReference"]) -> builtins.bool: ...
+ def ClearField(self, field_name: typing.Literal["_operationReference", b"_operationReference", "operationReference", b"operationReference", "processInstanceKey", b"processInstanceKey"]) -> None: ...
+ def WhichOneof(self, oneof_group: typing.Literal["_operationReference", b"_operationReference"]) -> typing.Literal["operationReference"] | None: ...
+
+global___CancelProcessInstanceRequest = CancelProcessInstanceRequest
+
+@typing.final
+class CancelProcessInstanceResponse(google.protobuf.message.Message):
+ DESCRIPTOR: google.protobuf.descriptor.Descriptor
+
+ def __init__(
+ self,
+ ) -> None: ...
+
+global___CancelProcessInstanceResponse = CancelProcessInstanceResponse
+
+@typing.final
+class CompleteJobRequest(google.protobuf.message.Message):
+ DESCRIPTOR: google.protobuf.descriptor.Descriptor
+
+ JOBKEY_FIELD_NUMBER: builtins.int
+ VARIABLES_FIELD_NUMBER: builtins.int
+ jobKey: builtins.int
+ """the unique job identifier, as obtained from ActivateJobsResponse"""
+ variables: builtins.str
+ """a JSON document representing the variables in the current task scope"""
+ def __init__(
+ self,
+ *,
+ jobKey: builtins.int = ...,
+ variables: builtins.str = ...,
+ ) -> None: ...
+ def ClearField(self, field_name: typing.Literal["jobKey", b"jobKey", "variables", b"variables"]) -> None: ...
+
+global___CompleteJobRequest = CompleteJobRequest
+
+@typing.final
+class CompleteJobResponse(google.protobuf.message.Message):
+ DESCRIPTOR: google.protobuf.descriptor.Descriptor
+
+ def __init__(
+ self,
+ ) -> None: ...
+
+global___CompleteJobResponse = CompleteJobResponse
+
+@typing.final
+class CreateProcessInstanceRequest(google.protobuf.message.Message):
+ DESCRIPTOR: google.protobuf.descriptor.Descriptor
+
+ PROCESSDEFINITIONKEY_FIELD_NUMBER: builtins.int
+ BPMNPROCESSID_FIELD_NUMBER: builtins.int
+ VERSION_FIELD_NUMBER: builtins.int
+ VARIABLES_FIELD_NUMBER: builtins.int
+ STARTINSTRUCTIONS_FIELD_NUMBER: builtins.int
+ TENANTID_FIELD_NUMBER: builtins.int
+ OPERATIONREFERENCE_FIELD_NUMBER: builtins.int
+ processDefinitionKey: builtins.int
+ """the unique key identifying the process definition (e.g. returned from a process
+ in the DeployProcessResponse message)
+ """
+ bpmnProcessId: builtins.str
+ """the BPMN process ID of the process definition"""
+ version: builtins.int
+ """the version of the process; set to -1 to use the latest version"""
+ variables: builtins.str
+ """JSON document that will instantiate the variables for the root variable scope of the
+ process instance; it must be a JSON object, as variables will be mapped in a
+ key-value fashion. e.g. { "a": 1, "b": 2 } will create two variables, named "a" and
+ "b" respectively, with their associated values. [{ "a": 1, "b": 2 }] would not be a
+ valid argument, as the root of the JSON document is an array and not an object.
+ """
+ tenantId: builtins.str
+ """the tenant id of the process definition"""
+ operationReference: builtins.int
+ """a reference key chosen by the user and will be part of all records resulted from this operation"""
+ @property
+ def startInstructions(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___ProcessInstanceCreationStartInstruction]:
+ """List of start instructions. If empty (default) the process instance
+ will start at the start event. If non-empty the process instance will apply start
+ instructions after it has been created
+ """
+
+ def __init__(
+ self,
+ *,
+ processDefinitionKey: builtins.int = ...,
+ bpmnProcessId: builtins.str = ...,
+ version: builtins.int = ...,
+ variables: builtins.str = ...,
+ startInstructions: collections.abc.Iterable[global___ProcessInstanceCreationStartInstruction] | None = ...,
+ tenantId: builtins.str = ...,
+ operationReference: builtins.int | None = ...,
+ ) -> None: ...
+ def HasField(self, field_name: typing.Literal["_operationReference", b"_operationReference", "operationReference", b"operationReference"]) -> builtins.bool: ...
+ def ClearField(self, field_name: typing.Literal["_operationReference", b"_operationReference", "bpmnProcessId", b"bpmnProcessId", "operationReference", b"operationReference", "processDefinitionKey", b"processDefinitionKey", "startInstructions", b"startInstructions", "tenantId", b"tenantId", "variables", b"variables", "version", b"version"]) -> None: ...
+ def WhichOneof(self, oneof_group: typing.Literal["_operationReference", b"_operationReference"]) -> typing.Literal["operationReference"] | None: ...
+
+global___CreateProcessInstanceRequest = CreateProcessInstanceRequest
+
+@typing.final
+class ProcessInstanceCreationStartInstruction(google.protobuf.message.Message):
+ DESCRIPTOR: google.protobuf.descriptor.Descriptor
+
+ ELEMENTID_FIELD_NUMBER: builtins.int
+ elementId: builtins.str
+ """for now, however, the start instruction is implicitly a
+ "startBeforeElement" instruction
+
+ element ID
+ """
+ def __init__(
+ self,
+ *,
+ elementId: builtins.str = ...,
+ ) -> None: ...
+ def ClearField(self, field_name: typing.Literal["elementId", b"elementId"]) -> None: ...
+
+global___ProcessInstanceCreationStartInstruction = ProcessInstanceCreationStartInstruction
+
+@typing.final
+class CreateProcessInstanceResponse(google.protobuf.message.Message):
+ DESCRIPTOR: google.protobuf.descriptor.Descriptor
+
+ PROCESSDEFINITIONKEY_FIELD_NUMBER: builtins.int
+ BPMNPROCESSID_FIELD_NUMBER: builtins.int
+ VERSION_FIELD_NUMBER: builtins.int
+ PROCESSINSTANCEKEY_FIELD_NUMBER: builtins.int
+ TENANTID_FIELD_NUMBER: builtins.int
+ processDefinitionKey: builtins.int
+ """the key of the process definition which was used to create the process instance"""
+ bpmnProcessId: builtins.str
+ """the BPMN process ID of the process definition which was used to create the process
+ instance
+ """
+ version: builtins.int
+ """the version of the process definition which was used to create the process instance"""
+ processInstanceKey: builtins.int
+ """the unique identifier of the created process instance; to be used wherever a request
+ needs a process instance key (e.g. CancelProcessInstanceRequest)
+ """
+ tenantId: builtins.str
+ """the tenant identifier of the created process instance"""
+ def __init__(
+ self,
+ *,
+ processDefinitionKey: builtins.int = ...,
+ bpmnProcessId: builtins.str = ...,
+ version: builtins.int = ...,
+ processInstanceKey: builtins.int = ...,
+ tenantId: builtins.str = ...,
+ ) -> None: ...
+ def ClearField(self, field_name: typing.Literal["bpmnProcessId", b"bpmnProcessId", "processDefinitionKey", b"processDefinitionKey", "processInstanceKey", b"processInstanceKey", "tenantId", b"tenantId", "version", b"version"]) -> None: ...
+
+global___CreateProcessInstanceResponse = CreateProcessInstanceResponse
+
+@typing.final
+class CreateProcessInstanceWithResultRequest(google.protobuf.message.Message):
+ DESCRIPTOR: google.protobuf.descriptor.Descriptor
+
+ REQUEST_FIELD_NUMBER: builtins.int
+ REQUESTTIMEOUT_FIELD_NUMBER: builtins.int
+ FETCHVARIABLES_FIELD_NUMBER: builtins.int
+ requestTimeout: builtins.int
+ """timeout (in ms). the request will be closed if the process is not completed
+ before the requestTimeout.
+ if requestTimeout = 0, uses the generic requestTimeout configured in the gateway.
+ """
+ @property
+ def request(self) -> global___CreateProcessInstanceRequest: ...
+ @property
+ def fetchVariables(self) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[builtins.str]:
+ """list of names of variables to be included in `CreateProcessInstanceWithResultResponse.variables`
+ if empty, all visible variables in the root scope will be returned.
+ """
+
+ def __init__(
+ self,
+ *,
+ request: global___CreateProcessInstanceRequest | None = ...,
+ requestTimeout: builtins.int = ...,
+ fetchVariables: collections.abc.Iterable[builtins.str] | None = ...,
+ ) -> None: ...
+ def HasField(self, field_name: typing.Literal["request", b"request"]) -> builtins.bool: ...
+ def ClearField(self, field_name: typing.Literal["fetchVariables", b"fetchVariables", "request", b"request", "requestTimeout", b"requestTimeout"]) -> None: ...
+
+global___CreateProcessInstanceWithResultRequest = CreateProcessInstanceWithResultRequest
+
+@typing.final
+class CreateProcessInstanceWithResultResponse(google.protobuf.message.Message):
+ DESCRIPTOR: google.protobuf.descriptor.Descriptor
+
+ PROCESSDEFINITIONKEY_FIELD_NUMBER: builtins.int
+ BPMNPROCESSID_FIELD_NUMBER: builtins.int
+ VERSION_FIELD_NUMBER: builtins.int
+ PROCESSINSTANCEKEY_FIELD_NUMBER: builtins.int
+ VARIABLES_FIELD_NUMBER: builtins.int
+ TENANTID_FIELD_NUMBER: builtins.int
+ processDefinitionKey: builtins.int
+ """the key of the process definition which was used to create the process instance"""
+ bpmnProcessId: builtins.str
+ """the BPMN process ID of the process definition which was used to create the process
+ instance
+ """
+ version: builtins.int
+ """the version of the process definition which was used to create the process instance"""
+ processInstanceKey: builtins.int
+ """the unique identifier of the created process instance; to be used wherever a request
+ needs a process instance key (e.g. CancelProcessInstanceRequest)
+ """
+ variables: builtins.str
+ """JSON document
+ consists of visible variables in the root scope
+ """
+ tenantId: builtins.str
+ """the tenant identifier of the process definition"""
+ def __init__(
+ self,
+ *,
+ processDefinitionKey: builtins.int = ...,
+ bpmnProcessId: builtins.str = ...,
+ version: builtins.int = ...,
+ processInstanceKey: builtins.int = ...,
+ variables: builtins.str = ...,
+ tenantId: builtins.str = ...,
+ ) -> None: ...
+ def ClearField(self, field_name: typing.Literal["bpmnProcessId", b"bpmnProcessId", "processDefinitionKey", b"processDefinitionKey", "processInstanceKey", b"processInstanceKey", "tenantId", b"tenantId", "variables", b"variables", "version", b"version"]) -> None: ...
+
+global___CreateProcessInstanceWithResultResponse = CreateProcessInstanceWithResultResponse
+
+@typing.final
+class EvaluateDecisionRequest(google.protobuf.message.Message):
+ DESCRIPTOR: google.protobuf.descriptor.Descriptor
+
+ DECISIONKEY_FIELD_NUMBER: builtins.int
+ DECISIONID_FIELD_NUMBER: builtins.int
+ VARIABLES_FIELD_NUMBER: builtins.int
+ TENANTID_FIELD_NUMBER: builtins.int
+ decisionKey: builtins.int
+ """the unique key identifying the decision to be evaluated (e.g. returned
+ from a decision in the DeployResourceResponse message)
+ """
+ decisionId: builtins.str
+ """the ID of the decision to be evaluated"""
+ variables: builtins.str
+ """JSON document that will instantiate the variables for the decision to be
+ evaluated; it must be a JSON object, as variables will be mapped in a
+ key-value fashion, e.g. { "a": 1, "b": 2 } will create two variables,
+ named "a" and "b" respectively, with their associated values.
+ [{ "a": 1, "b": 2 }] would not be a valid argument, as the root of the
+ JSON document is an array and not an object.
+ """
+ tenantId: builtins.str
+ """the tenant identifier of the decision"""
+ def __init__(
+ self,
+ *,
+ decisionKey: builtins.int = ...,
+ decisionId: builtins.str = ...,
+ variables: builtins.str = ...,
+ tenantId: builtins.str = ...,
+ ) -> None: ...
+ def ClearField(self, field_name: typing.Literal["decisionId", b"decisionId", "decisionKey", b"decisionKey", "tenantId", b"tenantId", "variables", b"variables"]) -> None: ...
+
+global___EvaluateDecisionRequest = EvaluateDecisionRequest
+
+@typing.final
+class EvaluateDecisionResponse(google.protobuf.message.Message):
+ DESCRIPTOR: google.protobuf.descriptor.Descriptor
+
+ DECISIONKEY_FIELD_NUMBER: builtins.int
+ DECISIONID_FIELD_NUMBER: builtins.int
+ DECISIONNAME_FIELD_NUMBER: builtins.int
+ DECISIONVERSION_FIELD_NUMBER: builtins.int
+ DECISIONREQUIREMENTSID_FIELD_NUMBER: builtins.int
+ DECISIONREQUIREMENTSKEY_FIELD_NUMBER: builtins.int
+ DECISIONOUTPUT_FIELD_NUMBER: builtins.int
+ EVALUATEDDECISIONS_FIELD_NUMBER: builtins.int
+ FAILEDDECISIONID_FIELD_NUMBER: builtins.int
+ FAILUREMESSAGE_FIELD_NUMBER: builtins.int
+ TENANTID_FIELD_NUMBER: builtins.int
+ DECISIONINSTANCEKEY_FIELD_NUMBER: builtins.int
+ decisionKey: builtins.int
+ """the unique key identifying the decision which was evaluated (e.g. returned
+ from a decision in the DeployResourceResponse message)
+ """
+ decisionId: builtins.str
+ """the ID of the decision which was evaluated"""
+ decisionName: builtins.str
+ """the name of the decision which was evaluated"""
+ decisionVersion: builtins.int
+ """the version of the decision which was evaluated"""
+ decisionRequirementsId: builtins.str
+ """the ID of the decision requirements graph that the decision which was
+ evaluated is part of.
+ """
+ decisionRequirementsKey: builtins.int
+ """the unique key identifying the decision requirements graph that the
+ decision which was evaluated is part of.
+ """
+ decisionOutput: builtins.str
+ """JSON document that will instantiate the result of the decision which was
+ evaluated; it will be a JSON object, as the result output will be mapped
+ in a key-value fashion, e.g. { "a": 1 }.
+ """
+ failedDecisionId: builtins.str
+ """an optional string indicating the ID of the decision which
+ failed during evaluation
+ """
+ failureMessage: builtins.str
+ """an optional message describing why the decision which was evaluated failed"""
+ tenantId: builtins.str
+ """the tenant identifier of the evaluated decision"""
+ decisionInstanceKey: builtins.int
+ """the unique key identifying this decision evaluation"""
+ @property
+ def evaluatedDecisions(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___EvaluatedDecision]:
+ """a list of decisions that were evaluated within the requested decision evaluation"""
+
+ def __init__(
+ self,
+ *,
+ decisionKey: builtins.int = ...,
+ decisionId: builtins.str = ...,
+ decisionName: builtins.str = ...,
+ decisionVersion: builtins.int = ...,
+ decisionRequirementsId: builtins.str = ...,
+ decisionRequirementsKey: builtins.int = ...,
+ decisionOutput: builtins.str = ...,
+ evaluatedDecisions: collections.abc.Iterable[global___EvaluatedDecision] | None = ...,
+ failedDecisionId: builtins.str = ...,
+ failureMessage: builtins.str = ...,
+ tenantId: builtins.str = ...,
+ decisionInstanceKey: builtins.int = ...,
+ ) -> None: ...
+ def ClearField(self, field_name: typing.Literal["decisionId", b"decisionId", "decisionInstanceKey", b"decisionInstanceKey", "decisionKey", b"decisionKey", "decisionName", b"decisionName", "decisionOutput", b"decisionOutput", "decisionRequirementsId", b"decisionRequirementsId", "decisionRequirementsKey", b"decisionRequirementsKey", "decisionVersion", b"decisionVersion", "evaluatedDecisions", b"evaluatedDecisions", "failedDecisionId", b"failedDecisionId", "failureMessage", b"failureMessage", "tenantId", b"tenantId"]) -> None: ...
+
+global___EvaluateDecisionResponse = EvaluateDecisionResponse
+
+@typing.final
+class EvaluatedDecision(google.protobuf.message.Message):
+ DESCRIPTOR: google.protobuf.descriptor.Descriptor
+
+ DECISIONKEY_FIELD_NUMBER: builtins.int
+ DECISIONID_FIELD_NUMBER: builtins.int
+ DECISIONNAME_FIELD_NUMBER: builtins.int
+ DECISIONVERSION_FIELD_NUMBER: builtins.int
+ DECISIONTYPE_FIELD_NUMBER: builtins.int
+ DECISIONOUTPUT_FIELD_NUMBER: builtins.int
+ MATCHEDRULES_FIELD_NUMBER: builtins.int
+ EVALUATEDINPUTS_FIELD_NUMBER: builtins.int
+ TENANTID_FIELD_NUMBER: builtins.int
+ decisionKey: builtins.int
+ """the unique key identifying the decision which was evaluated (e.g. returned
+ from a decision in the DeployResourceResponse message)
+ """
+ decisionId: builtins.str
+ """the ID of the decision which was evaluated"""
+ decisionName: builtins.str
+ """the name of the decision which was evaluated"""
+ decisionVersion: builtins.int
+ """the version of the decision which was evaluated"""
+ decisionType: builtins.str
+ """the type of the decision which was evaluated"""
+ decisionOutput: builtins.str
+ """JSON document that will instantiate the result of the decision which was
+ evaluated; it will be a JSON object, as the result output will be mapped
+ in a key-value fashion, e.g. { "a": 1 }.
+ """
+ tenantId: builtins.str
+ """the tenant identifier of the evaluated decision"""
+ @property
+ def matchedRules(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___MatchedDecisionRule]:
+ """the decision rules that matched within this decision evaluation"""
+
+ @property
+ def evaluatedInputs(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___EvaluatedDecisionInput]:
+ """the decision inputs that were evaluated within this decision evaluation"""
+
+ def __init__(
+ self,
+ *,
+ decisionKey: builtins.int = ...,
+ decisionId: builtins.str = ...,
+ decisionName: builtins.str = ...,
+ decisionVersion: builtins.int = ...,
+ decisionType: builtins.str = ...,
+ decisionOutput: builtins.str = ...,
+ matchedRules: collections.abc.Iterable[global___MatchedDecisionRule] | None = ...,
+ evaluatedInputs: collections.abc.Iterable[global___EvaluatedDecisionInput] | None = ...,
+ tenantId: builtins.str = ...,
+ ) -> None: ...
+ def ClearField(self, field_name: typing.Literal["decisionId", b"decisionId", "decisionKey", b"decisionKey", "decisionName", b"decisionName", "decisionOutput", b"decisionOutput", "decisionType", b"decisionType", "decisionVersion", b"decisionVersion", "evaluatedInputs", b"evaluatedInputs", "matchedRules", b"matchedRules", "tenantId", b"tenantId"]) -> None: ...
+
+global___EvaluatedDecision = EvaluatedDecision
+
+@typing.final
+class EvaluatedDecisionInput(google.protobuf.message.Message):
+ DESCRIPTOR: google.protobuf.descriptor.Descriptor
+
+ INPUTID_FIELD_NUMBER: builtins.int
+ INPUTNAME_FIELD_NUMBER: builtins.int
+ INPUTVALUE_FIELD_NUMBER: builtins.int
+ inputId: builtins.str
+ """the id of the evaluated decision input"""
+ inputName: builtins.str
+ """the name of the evaluated decision input"""
+ inputValue: builtins.str
+ """the value of the evaluated decision input"""
+ def __init__(
+ self,
+ *,
+ inputId: builtins.str = ...,
+ inputName: builtins.str = ...,
+ inputValue: builtins.str = ...,
+ ) -> None: ...
+ def ClearField(self, field_name: typing.Literal["inputId", b"inputId", "inputName", b"inputName", "inputValue", b"inputValue"]) -> None: ...
+
+global___EvaluatedDecisionInput = EvaluatedDecisionInput
+
+@typing.final
+class EvaluatedDecisionOutput(google.protobuf.message.Message):
+ DESCRIPTOR: google.protobuf.descriptor.Descriptor
+
+ OUTPUTID_FIELD_NUMBER: builtins.int
+ OUTPUTNAME_FIELD_NUMBER: builtins.int
+ OUTPUTVALUE_FIELD_NUMBER: builtins.int
+ outputId: builtins.str
+ """the id of the evaluated decision output"""
+ outputName: builtins.str
+ """the name of the evaluated decision output"""
+ outputValue: builtins.str
+ """the value of the evaluated decision output"""
+ def __init__(
+ self,
+ *,
+ outputId: builtins.str = ...,
+ outputName: builtins.str = ...,
+ outputValue: builtins.str = ...,
+ ) -> None: ...
+ def ClearField(self, field_name: typing.Literal["outputId", b"outputId", "outputName", b"outputName", "outputValue", b"outputValue"]) -> None: ...
+
+global___EvaluatedDecisionOutput = EvaluatedDecisionOutput
+
+@typing.final
+class MatchedDecisionRule(google.protobuf.message.Message):
+ DESCRIPTOR: google.protobuf.descriptor.Descriptor
+
+ RULEID_FIELD_NUMBER: builtins.int
+ RULEINDEX_FIELD_NUMBER: builtins.int
+ EVALUATEDOUTPUTS_FIELD_NUMBER: builtins.int
+ ruleId: builtins.str
+ """the id of the matched rule"""
+ ruleIndex: builtins.int
+ """the index of the matched rule"""
+ @property
+ def evaluatedOutputs(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___EvaluatedDecisionOutput]:
+ """the evaluated decision outputs"""
+
+ def __init__(
+ self,
+ *,
+ ruleId: builtins.str = ...,
+ ruleIndex: builtins.int = ...,
+ evaluatedOutputs: collections.abc.Iterable[global___EvaluatedDecisionOutput] | None = ...,
+ ) -> None: ...
+ def ClearField(self, field_name: typing.Literal["evaluatedOutputs", b"evaluatedOutputs", "ruleId", b"ruleId", "ruleIndex", b"ruleIndex"]) -> None: ...
+
+global___MatchedDecisionRule = MatchedDecisionRule
+
+@typing.final
+class DeployProcessRequest(google.protobuf.message.Message):
+ DESCRIPTOR: google.protobuf.descriptor.Descriptor
+
+ PROCESSES_FIELD_NUMBER: builtins.int
+ @property
+ def processes(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___ProcessRequestObject]:
+ """List of process resources to deploy"""
+
+ def __init__(
+ self,
+ *,
+ processes: collections.abc.Iterable[global___ProcessRequestObject] | None = ...,
+ ) -> None: ...
+ def ClearField(self, field_name: typing.Literal["processes", b"processes"]) -> None: ...
+
+global___DeployProcessRequest = DeployProcessRequest
+
+@typing.final
+class ProcessRequestObject(google.protobuf.message.Message):
+ DESCRIPTOR: google.protobuf.descriptor.Descriptor
+
+ NAME_FIELD_NUMBER: builtins.int
+ DEFINITION_FIELD_NUMBER: builtins.int
+ name: builtins.str
+ """the resource basename, e.g. myProcess.bpmn"""
+ definition: builtins.bytes
+ """the process definition as a UTF8-encoded string"""
+ def __init__(
+ self,
+ *,
+ name: builtins.str = ...,
+ definition: builtins.bytes = ...,
+ ) -> None: ...
+ def ClearField(self, field_name: typing.Literal["definition", b"definition", "name", b"name"]) -> None: ...
+
+global___ProcessRequestObject = ProcessRequestObject
+
+@typing.final
+class DeployProcessResponse(google.protobuf.message.Message):
+ DESCRIPTOR: google.protobuf.descriptor.Descriptor
+
+ KEY_FIELD_NUMBER: builtins.int
+ PROCESSES_FIELD_NUMBER: builtins.int
+ key: builtins.int
+ """the unique key identifying the deployment"""
+ @property
+ def processes(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___ProcessMetadata]:
+ """a list of deployed processes"""
+
+ def __init__(
+ self,
+ *,
+ key: builtins.int = ...,
+ processes: collections.abc.Iterable[global___ProcessMetadata] | None = ...,
+ ) -> None: ...
+ def ClearField(self, field_name: typing.Literal["key", b"key", "processes", b"processes"]) -> None: ...
+
+global___DeployProcessResponse = DeployProcessResponse
+
+@typing.final
+class DeployResourceRequest(google.protobuf.message.Message):
+ DESCRIPTOR: google.protobuf.descriptor.Descriptor
+
+ RESOURCES_FIELD_NUMBER: builtins.int
+ TENANTID_FIELD_NUMBER: builtins.int
+ tenantId: builtins.str
+ """the tenant id of the resources to deploy"""
+ @property
+ def resources(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___Resource]:
+ """list of resources to deploy"""
+
+ def __init__(
+ self,
+ *,
+ resources: collections.abc.Iterable[global___Resource] | None = ...,
+ tenantId: builtins.str = ...,
+ ) -> None: ...
+ def ClearField(self, field_name: typing.Literal["resources", b"resources", "tenantId", b"tenantId"]) -> None: ...
+
+global___DeployResourceRequest = DeployResourceRequest
+
+@typing.final
+class Resource(google.protobuf.message.Message):
+ DESCRIPTOR: google.protobuf.descriptor.Descriptor
+
+ NAME_FIELD_NUMBER: builtins.int
+ CONTENT_FIELD_NUMBER: builtins.int
+ name: builtins.str
+ """the resource name, e.g. myProcess.bpmn or myDecision.dmn"""
+ content: builtins.bytes
+ """the file content as a UTF8-encoded string"""
+ def __init__(
+ self,
+ *,
+ name: builtins.str = ...,
+ content: builtins.bytes = ...,
+ ) -> None: ...
+ def ClearField(self, field_name: typing.Literal["content", b"content", "name", b"name"]) -> None: ...
+
+global___Resource = Resource
+
+@typing.final
+class DeployResourceResponse(google.protobuf.message.Message):
+ DESCRIPTOR: google.protobuf.descriptor.Descriptor
+
+ KEY_FIELD_NUMBER: builtins.int
+ DEPLOYMENTS_FIELD_NUMBER: builtins.int
+ TENANTID_FIELD_NUMBER: builtins.int
+ key: builtins.int
+ """the unique key identifying the deployment"""
+ tenantId: builtins.str
+ """the tenant id of the deployed resources"""
+ @property
+ def deployments(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___Deployment]:
+ """a list of deployed resources, e.g. processes"""
+
+ def __init__(
+ self,
+ *,
+ key: builtins.int = ...,
+ deployments: collections.abc.Iterable[global___Deployment] | None = ...,
+ tenantId: builtins.str = ...,
+ ) -> None: ...
+ def ClearField(self, field_name: typing.Literal["deployments", b"deployments", "key", b"key", "tenantId", b"tenantId"]) -> None: ...
+
+global___DeployResourceResponse = DeployResourceResponse
+
+@typing.final
+class Deployment(google.protobuf.message.Message):
+ DESCRIPTOR: google.protobuf.descriptor.Descriptor
+
+ PROCESS_FIELD_NUMBER: builtins.int
+ DECISION_FIELD_NUMBER: builtins.int
+ DECISIONREQUIREMENTS_FIELD_NUMBER: builtins.int
+ FORM_FIELD_NUMBER: builtins.int
+ @property
+ def process(self) -> global___ProcessMetadata:
+ """metadata of a deployed process"""
+
+ @property
+ def decision(self) -> global___DecisionMetadata:
+ """metadata of a deployed decision"""
+
+ @property
+ def decisionRequirements(self) -> global___DecisionRequirementsMetadata:
+ """metadata of a deployed decision requirements"""
+
+ @property
+ def form(self) -> global___FormMetadata:
+ """metadata of a deployed form"""
+
+ def __init__(
+ self,
+ *,
+ process: global___ProcessMetadata | None = ...,
+ decision: global___DecisionMetadata | None = ...,
+ decisionRequirements: global___DecisionRequirementsMetadata | None = ...,
+ form: global___FormMetadata | None = ...,
+ ) -> None: ...
+ def HasField(self, field_name: typing.Literal["Metadata", b"Metadata", "decision", b"decision", "decisionRequirements", b"decisionRequirements", "form", b"form", "process", b"process"]) -> builtins.bool: ...
+ def ClearField(self, field_name: typing.Literal["Metadata", b"Metadata", "decision", b"decision", "decisionRequirements", b"decisionRequirements", "form", b"form", "process", b"process"]) -> None: ...
+ def WhichOneof(self, oneof_group: typing.Literal["Metadata", b"Metadata"]) -> typing.Literal["process", "decision", "decisionRequirements", "form"] | None: ...
+
+global___Deployment = Deployment
+
+@typing.final
+class ProcessMetadata(google.protobuf.message.Message):
+ DESCRIPTOR: google.protobuf.descriptor.Descriptor
+
+ BPMNPROCESSID_FIELD_NUMBER: builtins.int
+ VERSION_FIELD_NUMBER: builtins.int
+ PROCESSDEFINITIONKEY_FIELD_NUMBER: builtins.int
+ RESOURCENAME_FIELD_NUMBER: builtins.int
+ TENANTID_FIELD_NUMBER: builtins.int
+ bpmnProcessId: builtins.str
+ """the bpmn process ID, as parsed during deployment; together with the version forms a
+ unique identifier for a specific process definition
+ """
+ version: builtins.int
+ """the assigned process version"""
+ processDefinitionKey: builtins.int
+ """the assigned key, which acts as a unique identifier for this process"""
+ resourceName: builtins.str
+ """the resource name (see: ProcessRequestObject.name) from which this process was
+ parsed
+ """
+ tenantId: builtins.str
+ """the tenant id of the deployed process"""
+ def __init__(
+ self,
+ *,
+ bpmnProcessId: builtins.str = ...,
+ version: builtins.int = ...,
+ processDefinitionKey: builtins.int = ...,
+ resourceName: builtins.str = ...,
+ tenantId: builtins.str = ...,
+ ) -> None: ...
+ def ClearField(self, field_name: typing.Literal["bpmnProcessId", b"bpmnProcessId", "processDefinitionKey", b"processDefinitionKey", "resourceName", b"resourceName", "tenantId", b"tenantId", "version", b"version"]) -> None: ...
+
+global___ProcessMetadata = ProcessMetadata
+
+@typing.final
+class DecisionMetadata(google.protobuf.message.Message):
+ DESCRIPTOR: google.protobuf.descriptor.Descriptor
+
+ DMNDECISIONID_FIELD_NUMBER: builtins.int
+ DMNDECISIONNAME_FIELD_NUMBER: builtins.int
+ VERSION_FIELD_NUMBER: builtins.int
+ DECISIONKEY_FIELD_NUMBER: builtins.int
+ DMNDECISIONREQUIREMENTSID_FIELD_NUMBER: builtins.int
+ DECISIONREQUIREMENTSKEY_FIELD_NUMBER: builtins.int
+ TENANTID_FIELD_NUMBER: builtins.int
+ dmnDecisionId: builtins.str
+ """the dmn decision ID, as parsed during deployment; together with the
+ versions forms a unique identifier for a specific decision
+ """
+ dmnDecisionName: builtins.str
+ """the dmn name of the decision, as parsed during deployment"""
+ version: builtins.int
+ """the assigned decision version"""
+ decisionKey: builtins.int
+ """the assigned decision key, which acts as a unique identifier for this
+ decision
+ """
+ dmnDecisionRequirementsId: builtins.str
+ """the dmn ID of the decision requirements graph that this decision is part
+ of, as parsed during deployment
+ """
+ decisionRequirementsKey: builtins.int
+ """the assigned key of the decision requirements graph that this decision is
+ part of
+ """
+ tenantId: builtins.str
+ """the tenant id of the deployed decision"""
+ def __init__(
+ self,
+ *,
+ dmnDecisionId: builtins.str = ...,
+ dmnDecisionName: builtins.str = ...,
+ version: builtins.int = ...,
+ decisionKey: builtins.int = ...,
+ dmnDecisionRequirementsId: builtins.str = ...,
+ decisionRequirementsKey: builtins.int = ...,
+ tenantId: builtins.str = ...,
+ ) -> None: ...
+ def ClearField(self, field_name: typing.Literal["decisionKey", b"decisionKey", "decisionRequirementsKey", b"decisionRequirementsKey", "dmnDecisionId", b"dmnDecisionId", "dmnDecisionName", b"dmnDecisionName", "dmnDecisionRequirementsId", b"dmnDecisionRequirementsId", "tenantId", b"tenantId", "version", b"version"]) -> None: ...
+
+global___DecisionMetadata = DecisionMetadata
+
+@typing.final
+class DecisionRequirementsMetadata(google.protobuf.message.Message):
+ DESCRIPTOR: google.protobuf.descriptor.Descriptor
+
+ DMNDECISIONREQUIREMENTSID_FIELD_NUMBER: builtins.int
+ DMNDECISIONREQUIREMENTSNAME_FIELD_NUMBER: builtins.int
+ VERSION_FIELD_NUMBER: builtins.int
+ DECISIONREQUIREMENTSKEY_FIELD_NUMBER: builtins.int
+ RESOURCENAME_FIELD_NUMBER: builtins.int
+ TENANTID_FIELD_NUMBER: builtins.int
+ dmnDecisionRequirementsId: builtins.str
+ """the dmn decision requirements ID, as parsed during deployment; together
+ with the versions forms a unique identifier for a specific decision
+ """
+ dmnDecisionRequirementsName: builtins.str
+ """the dmn name of the decision requirements, as parsed during deployment"""
+ version: builtins.int
+ """the assigned decision requirements version"""
+ decisionRequirementsKey: builtins.int
+ """the assigned decision requirements key, which acts as a unique identifier
+ for this decision requirements
+ """
+ resourceName: builtins.str
+ """the resource name (see: Resource.name) from which this decision
+ requirements was parsed
+ """
+ tenantId: builtins.str
+ """the tenant id of the deployed decision requirements"""
+ def __init__(
+ self,
+ *,
+ dmnDecisionRequirementsId: builtins.str = ...,
+ dmnDecisionRequirementsName: builtins.str = ...,
+ version: builtins.int = ...,
+ decisionRequirementsKey: builtins.int = ...,
+ resourceName: builtins.str = ...,
+ tenantId: builtins.str = ...,
+ ) -> None: ...
+ def ClearField(self, field_name: typing.Literal["decisionRequirementsKey", b"decisionRequirementsKey", "dmnDecisionRequirementsId", b"dmnDecisionRequirementsId", "dmnDecisionRequirementsName", b"dmnDecisionRequirementsName", "resourceName", b"resourceName", "tenantId", b"tenantId", "version", b"version"]) -> None: ...
+
+global___DecisionRequirementsMetadata = DecisionRequirementsMetadata
+
+@typing.final
+class FormMetadata(google.protobuf.message.Message):
+ DESCRIPTOR: google.protobuf.descriptor.Descriptor
+
+ FORMID_FIELD_NUMBER: builtins.int
+ VERSION_FIELD_NUMBER: builtins.int
+ FORMKEY_FIELD_NUMBER: builtins.int
+ RESOURCENAME_FIELD_NUMBER: builtins.int
+ TENANTID_FIELD_NUMBER: builtins.int
+ formId: builtins.str
+ """the form ID, as parsed during deployment; together with the
+ versions forms a unique identifier for a specific form
+ """
+ version: builtins.int
+ """the assigned form version"""
+ formKey: builtins.int
+ """the assigned key, which acts as a unique identifier for this form"""
+ resourceName: builtins.str
+ """the resource name"""
+ tenantId: builtins.str
+ """the tenant id of the deployed form"""
+ def __init__(
+ self,
+ *,
+ formId: builtins.str = ...,
+ version: builtins.int = ...,
+ formKey: builtins.int = ...,
+ resourceName: builtins.str = ...,
+ tenantId: builtins.str = ...,
+ ) -> None: ...
+ def ClearField(self, field_name: typing.Literal["formId", b"formId", "formKey", b"formKey", "resourceName", b"resourceName", "tenantId", b"tenantId", "version", b"version"]) -> None: ...
+
+global___FormMetadata = FormMetadata
+
+@typing.final
+class FailJobRequest(google.protobuf.message.Message):
+ DESCRIPTOR: google.protobuf.descriptor.Descriptor
+
+ JOBKEY_FIELD_NUMBER: builtins.int
+ RETRIES_FIELD_NUMBER: builtins.int
+ ERRORMESSAGE_FIELD_NUMBER: builtins.int
+ RETRYBACKOFF_FIELD_NUMBER: builtins.int
+ VARIABLES_FIELD_NUMBER: builtins.int
+ jobKey: builtins.int
+ """the unique job identifier, as obtained when activating the job"""
+ retries: builtins.int
+ """the amount of retries the job should have left"""
+ errorMessage: builtins.str
+ """an optional message describing why the job failed
+ this is particularly useful if a job runs out of retries and an incident is raised,
+ as it this message can help explain why an incident was raised
+ """
+ retryBackOff: builtins.int
+ """the backoff timeout (in ms) for the next retry"""
+ variables: builtins.str
+ """JSON document that will instantiate the variables at the local scope of the
+ job's associated task; it must be a JSON object, as variables will be mapped in a
+ key-value fashion. e.g. { "a": 1, "b": 2 } will create two variables, named "a" and
+ "b" respectively, with their associated values. [{ "a": 1, "b": 2 }] would not be a
+ valid argument, as the root of the JSON document is an array and not an object.
+ """
+ def __init__(
+ self,
+ *,
+ jobKey: builtins.int = ...,
+ retries: builtins.int = ...,
+ errorMessage: builtins.str = ...,
+ retryBackOff: builtins.int = ...,
+ variables: builtins.str = ...,
+ ) -> None: ...
+ def ClearField(self, field_name: typing.Literal["errorMessage", b"errorMessage", "jobKey", b"jobKey", "retries", b"retries", "retryBackOff", b"retryBackOff", "variables", b"variables"]) -> None: ...
+
+global___FailJobRequest = FailJobRequest
+
+@typing.final
+class FailJobResponse(google.protobuf.message.Message):
+ DESCRIPTOR: google.protobuf.descriptor.Descriptor
+
+ def __init__(
+ self,
+ ) -> None: ...
+
+global___FailJobResponse = FailJobResponse
+
+@typing.final
+class ThrowErrorRequest(google.protobuf.message.Message):
+ DESCRIPTOR: google.protobuf.descriptor.Descriptor
+
+ JOBKEY_FIELD_NUMBER: builtins.int
+ ERRORCODE_FIELD_NUMBER: builtins.int
+ ERRORMESSAGE_FIELD_NUMBER: builtins.int
+ VARIABLES_FIELD_NUMBER: builtins.int
+ jobKey: builtins.int
+ """the unique job identifier, as obtained when activating the job"""
+ errorCode: builtins.str
+ """the error code that will be matched with an error catch event"""
+ errorMessage: builtins.str
+ """an optional error message that provides additional context"""
+ variables: builtins.str
+ """JSON document that will instantiate the variables at the local scope of the
+ error catch event that catches the thrown error; it must be a JSON object, as variables will be mapped in a
+ key-value fashion. e.g. { "a": 1, "b": 2 } will create two variables, named "a" and
+ "b" respectively, with their associated values. [{ "a": 1, "b": 2 }] would not be a
+ valid argument, as the root of the JSON document is an array and not an object.
+ """
+ def __init__(
+ self,
+ *,
+ jobKey: builtins.int = ...,
+ errorCode: builtins.str = ...,
+ errorMessage: builtins.str = ...,
+ variables: builtins.str = ...,
+ ) -> None: ...
+ def ClearField(self, field_name: typing.Literal["errorCode", b"errorCode", "errorMessage", b"errorMessage", "jobKey", b"jobKey", "variables", b"variables"]) -> None: ...
+
+global___ThrowErrorRequest = ThrowErrorRequest
+
+@typing.final
+class ThrowErrorResponse(google.protobuf.message.Message):
+ DESCRIPTOR: google.protobuf.descriptor.Descriptor
+
+ def __init__(
+ self,
+ ) -> None: ...
+
+global___ThrowErrorResponse = ThrowErrorResponse
+
+@typing.final
+class PublishMessageRequest(google.protobuf.message.Message):
+ DESCRIPTOR: google.protobuf.descriptor.Descriptor
+
+ NAME_FIELD_NUMBER: builtins.int
+ CORRELATIONKEY_FIELD_NUMBER: builtins.int
+ TIMETOLIVE_FIELD_NUMBER: builtins.int
+ MESSAGEID_FIELD_NUMBER: builtins.int
+ VARIABLES_FIELD_NUMBER: builtins.int
+ TENANTID_FIELD_NUMBER: builtins.int
+ name: builtins.str
+ """the name of the message"""
+ correlationKey: builtins.str
+ """the correlation key of the message"""
+ timeToLive: builtins.int
+ """how long the message should be buffered on the broker, in milliseconds"""
+ messageId: builtins.str
+ """the unique ID of the message; can be omitted. only useful to ensure only one message
+ with the given ID will ever be published (during its lifetime)
+ """
+ variables: builtins.str
+ """the message variables as a JSON document; to be valid, the root of the document must be an
+ object, e.g. { "a": "foo" }. [ "foo" ] would not be valid.
+ """
+ tenantId: builtins.str
+ """the tenant id of the message"""
+ def __init__(
+ self,
+ *,
+ name: builtins.str = ...,
+ correlationKey: builtins.str = ...,
+ timeToLive: builtins.int = ...,
+ messageId: builtins.str = ...,
+ variables: builtins.str = ...,
+ tenantId: builtins.str = ...,
+ ) -> None: ...
+ def ClearField(self, field_name: typing.Literal["correlationKey", b"correlationKey", "messageId", b"messageId", "name", b"name", "tenantId", b"tenantId", "timeToLive", b"timeToLive", "variables", b"variables"]) -> None: ...
+
+global___PublishMessageRequest = PublishMessageRequest
+
+@typing.final
+class PublishMessageResponse(google.protobuf.message.Message):
+ DESCRIPTOR: google.protobuf.descriptor.Descriptor
+
+ KEY_FIELD_NUMBER: builtins.int
+ TENANTID_FIELD_NUMBER: builtins.int
+ key: builtins.int
+ """the unique ID of the message that was published"""
+ tenantId: builtins.str
+ """the tenant id of the message"""
+ def __init__(
+ self,
+ *,
+ key: builtins.int = ...,
+ tenantId: builtins.str = ...,
+ ) -> None: ...
+ def ClearField(self, field_name: typing.Literal["key", b"key", "tenantId", b"tenantId"]) -> None: ...
+
+global___PublishMessageResponse = PublishMessageResponse
+
+@typing.final
+class ResolveIncidentRequest(google.protobuf.message.Message):
+ DESCRIPTOR: google.protobuf.descriptor.Descriptor
+
+ INCIDENTKEY_FIELD_NUMBER: builtins.int
+ OPERATIONREFERENCE_FIELD_NUMBER: builtins.int
+ incidentKey: builtins.int
+ """the unique ID of the incident to resolve"""
+ operationReference: builtins.int
+ """a reference key chosen by the user and will be part of all records resulted from this operation"""
+ def __init__(
+ self,
+ *,
+ incidentKey: builtins.int = ...,
+ operationReference: builtins.int | None = ...,
+ ) -> None: ...
+ def HasField(self, field_name: typing.Literal["_operationReference", b"_operationReference", "operationReference", b"operationReference"]) -> builtins.bool: ...
+ def ClearField(self, field_name: typing.Literal["_operationReference", b"_operationReference", "incidentKey", b"incidentKey", "operationReference", b"operationReference"]) -> None: ...
+ def WhichOneof(self, oneof_group: typing.Literal["_operationReference", b"_operationReference"]) -> typing.Literal["operationReference"] | None: ...
+
+global___ResolveIncidentRequest = ResolveIncidentRequest
+
+@typing.final
+class ResolveIncidentResponse(google.protobuf.message.Message):
+ DESCRIPTOR: google.protobuf.descriptor.Descriptor
+
+ def __init__(
+ self,
+ ) -> None: ...
+
+global___ResolveIncidentResponse = ResolveIncidentResponse
+
+@typing.final
+class TopologyRequest(google.protobuf.message.Message):
+ DESCRIPTOR: google.protobuf.descriptor.Descriptor
+
+ def __init__(
+ self,
+ ) -> None: ...
+
+global___TopologyRequest = TopologyRequest
+
+@typing.final
+class TopologyResponse(google.protobuf.message.Message):
+ DESCRIPTOR: google.protobuf.descriptor.Descriptor
+
+ BROKERS_FIELD_NUMBER: builtins.int
+ CLUSTERSIZE_FIELD_NUMBER: builtins.int
+ PARTITIONSCOUNT_FIELD_NUMBER: builtins.int
+ REPLICATIONFACTOR_FIELD_NUMBER: builtins.int
+ GATEWAYVERSION_FIELD_NUMBER: builtins.int
+ clusterSize: builtins.int
+ """how many nodes are in the cluster"""
+ partitionsCount: builtins.int
+ """how many partitions are spread across the cluster"""
+ replicationFactor: builtins.int
+ """configured replication factor for this cluster"""
+ gatewayVersion: builtins.str
+ """gateway version"""
+ @property
+ def brokers(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___BrokerInfo]:
+ """list of brokers part of this cluster"""
+
+ def __init__(
+ self,
+ *,
+ brokers: collections.abc.Iterable[global___BrokerInfo] | None = ...,
+ clusterSize: builtins.int = ...,
+ partitionsCount: builtins.int = ...,
+ replicationFactor: builtins.int = ...,
+ gatewayVersion: builtins.str = ...,
+ ) -> None: ...
+ def ClearField(self, field_name: typing.Literal["brokers", b"brokers", "clusterSize", b"clusterSize", "gatewayVersion", b"gatewayVersion", "partitionsCount", b"partitionsCount", "replicationFactor", b"replicationFactor"]) -> None: ...
+
+global___TopologyResponse = TopologyResponse
+
+@typing.final
+class BrokerInfo(google.protobuf.message.Message):
+ DESCRIPTOR: google.protobuf.descriptor.Descriptor
+
+ NODEID_FIELD_NUMBER: builtins.int
+ HOST_FIELD_NUMBER: builtins.int
+ PORT_FIELD_NUMBER: builtins.int
+ PARTITIONS_FIELD_NUMBER: builtins.int
+ VERSION_FIELD_NUMBER: builtins.int
+ nodeId: builtins.int
+ """unique (within a cluster) node ID for the broker"""
+ host: builtins.str
+ """hostname of the broker"""
+ port: builtins.int
+ """port for the broker"""
+ version: builtins.str
+ """broker version"""
+ @property
+ def partitions(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___Partition]:
+ """list of partitions managed or replicated on this broker"""
+
+ def __init__(
+ self,
+ *,
+ nodeId: builtins.int = ...,
+ host: builtins.str = ...,
+ port: builtins.int = ...,
+ partitions: collections.abc.Iterable[global___Partition] | None = ...,
+ version: builtins.str = ...,
+ ) -> None: ...
+ def ClearField(self, field_name: typing.Literal["host", b"host", "nodeId", b"nodeId", "partitions", b"partitions", "port", b"port", "version", b"version"]) -> None: ...
+
+global___BrokerInfo = BrokerInfo
+
+@typing.final
+class Partition(google.protobuf.message.Message):
+ DESCRIPTOR: google.protobuf.descriptor.Descriptor
+
+ class _PartitionBrokerRole:
+ ValueType = typing.NewType("ValueType", builtins.int)
+ V: typing_extensions.TypeAlias = ValueType
+
+ class _PartitionBrokerRoleEnumTypeWrapper(google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[Partition._PartitionBrokerRole.ValueType], builtins.type):
+ DESCRIPTOR: google.protobuf.descriptor.EnumDescriptor
+ LEADER: Partition._PartitionBrokerRole.ValueType # 0
+ FOLLOWER: Partition._PartitionBrokerRole.ValueType # 1
+ INACTIVE: Partition._PartitionBrokerRole.ValueType # 2
+
+ class PartitionBrokerRole(_PartitionBrokerRole, metaclass=_PartitionBrokerRoleEnumTypeWrapper):
+ """Describes the Raft role of the broker for a given partition"""
+
+ LEADER: Partition.PartitionBrokerRole.ValueType # 0
+ FOLLOWER: Partition.PartitionBrokerRole.ValueType # 1
+ INACTIVE: Partition.PartitionBrokerRole.ValueType # 2
+
+ class _PartitionBrokerHealth:
+ ValueType = typing.NewType("ValueType", builtins.int)
+ V: typing_extensions.TypeAlias = ValueType
+
+ class _PartitionBrokerHealthEnumTypeWrapper(google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[Partition._PartitionBrokerHealth.ValueType], builtins.type):
+ DESCRIPTOR: google.protobuf.descriptor.EnumDescriptor
+ HEALTHY: Partition._PartitionBrokerHealth.ValueType # 0
+ UNHEALTHY: Partition._PartitionBrokerHealth.ValueType # 1
+ DEAD: Partition._PartitionBrokerHealth.ValueType # 2
+
+ class PartitionBrokerHealth(_PartitionBrokerHealth, metaclass=_PartitionBrokerHealthEnumTypeWrapper):
+ """Describes the current health of the partition"""
+
+ HEALTHY: Partition.PartitionBrokerHealth.ValueType # 0
+ UNHEALTHY: Partition.PartitionBrokerHealth.ValueType # 1
+ DEAD: Partition.PartitionBrokerHealth.ValueType # 2
+
+ PARTITIONID_FIELD_NUMBER: builtins.int
+ ROLE_FIELD_NUMBER: builtins.int
+ HEALTH_FIELD_NUMBER: builtins.int
+ partitionId: builtins.int
+ """the unique ID of this partition"""
+ role: global___Partition.PartitionBrokerRole.ValueType
+ """the role of the broker for this partition"""
+ health: global___Partition.PartitionBrokerHealth.ValueType
+ """the health of this partition"""
+ def __init__(
+ self,
+ *,
+ partitionId: builtins.int = ...,
+ role: global___Partition.PartitionBrokerRole.ValueType = ...,
+ health: global___Partition.PartitionBrokerHealth.ValueType = ...,
+ ) -> None: ...
+ def ClearField(self, field_name: typing.Literal["health", b"health", "partitionId", b"partitionId", "role", b"role"]) -> None: ...
+
+global___Partition = Partition
+
+@typing.final
+class UpdateJobRetriesRequest(google.protobuf.message.Message):
+ DESCRIPTOR: google.protobuf.descriptor.Descriptor
+
+ JOBKEY_FIELD_NUMBER: builtins.int
+ RETRIES_FIELD_NUMBER: builtins.int
+ OPERATIONREFERENCE_FIELD_NUMBER: builtins.int
+ jobKey: builtins.int
+ """the unique job identifier, as obtained through ActivateJobs"""
+ retries: builtins.int
+ """the new amount of retries for the job; must be positive"""
+ operationReference: builtins.int
+ """a reference key chosen by the user and will be part of all records resulted from this operation"""
+ def __init__(
+ self,
+ *,
+ jobKey: builtins.int = ...,
+ retries: builtins.int = ...,
+ operationReference: builtins.int | None = ...,
+ ) -> None: ...
+ def HasField(self, field_name: typing.Literal["_operationReference", b"_operationReference", "operationReference", b"operationReference"]) -> builtins.bool: ...
+ def ClearField(self, field_name: typing.Literal["_operationReference", b"_operationReference", "jobKey", b"jobKey", "operationReference", b"operationReference", "retries", b"retries"]) -> None: ...
+ def WhichOneof(self, oneof_group: typing.Literal["_operationReference", b"_operationReference"]) -> typing.Literal["operationReference"] | None: ...
+
+global___UpdateJobRetriesRequest = UpdateJobRetriesRequest
+
+@typing.final
+class UpdateJobRetriesResponse(google.protobuf.message.Message):
+ DESCRIPTOR: google.protobuf.descriptor.Descriptor
+
+ def __init__(
+ self,
+ ) -> None: ...
+
+global___UpdateJobRetriesResponse = UpdateJobRetriesResponse
+
+@typing.final
+class UpdateJobTimeoutRequest(google.protobuf.message.Message):
+ DESCRIPTOR: google.protobuf.descriptor.Descriptor
+
+ JOBKEY_FIELD_NUMBER: builtins.int
+ TIMEOUT_FIELD_NUMBER: builtins.int
+ OPERATIONREFERENCE_FIELD_NUMBER: builtins.int
+ jobKey: builtins.int
+ """the unique job identifier, as obtained from ActivateJobsResponse"""
+ timeout: builtins.int
+ """the duration of the new timeout in ms, starting from the current moment"""
+ operationReference: builtins.int
+ """a reference key chosen by the user and will be part of all records resulted from this operation"""
+ def __init__(
+ self,
+ *,
+ jobKey: builtins.int = ...,
+ timeout: builtins.int = ...,
+ operationReference: builtins.int | None = ...,
+ ) -> None: ...
+ def HasField(self, field_name: typing.Literal["_operationReference", b"_operationReference", "operationReference", b"operationReference"]) -> builtins.bool: ...
+ def ClearField(self, field_name: typing.Literal["_operationReference", b"_operationReference", "jobKey", b"jobKey", "operationReference", b"operationReference", "timeout", b"timeout"]) -> None: ...
+ def WhichOneof(self, oneof_group: typing.Literal["_operationReference", b"_operationReference"]) -> typing.Literal["operationReference"] | None: ...
+
+global___UpdateJobTimeoutRequest = UpdateJobTimeoutRequest
+
+@typing.final
+class UpdateJobTimeoutResponse(google.protobuf.message.Message):
+ DESCRIPTOR: google.protobuf.descriptor.Descriptor
+
+ def __init__(
+ self,
+ ) -> None: ...
+
+global___UpdateJobTimeoutResponse = UpdateJobTimeoutResponse
+
+@typing.final
+class SetVariablesRequest(google.protobuf.message.Message):
+ DESCRIPTOR: google.protobuf.descriptor.Descriptor
+
+ ELEMENTINSTANCEKEY_FIELD_NUMBER: builtins.int
+ VARIABLES_FIELD_NUMBER: builtins.int
+ LOCAL_FIELD_NUMBER: builtins.int
+ OPERATIONREFERENCE_FIELD_NUMBER: builtins.int
+ elementInstanceKey: builtins.int
+ """the unique identifier of a particular element; can be the process instance key (as
+ obtained during instance creation), or a given element, such as a service task (see
+ elementInstanceKey on the job message)
+ """
+ variables: builtins.str
+ """a JSON serialized document describing variables as key value pairs; the root of the document
+ must be an object
+ """
+ local: builtins.bool
+ """if true, the variables will be merged strictly into the local scope (as indicated by
+ elementInstanceKey); this means the variables is not propagated to upper scopes.
+ for example, let's say we have two scopes, '1' and '2', with each having effective variables as:
+ 1 => `{ "foo" : 2 }`, and 2 => `{ "bar" : 1 }`. if we send an update request with
+ elementInstanceKey = 2, variables `{ "foo" : 5 }`, and local is true, then scope 1 will
+ be unchanged, and scope 2 will now be `{ "bar" : 1, "foo" 5 }`. if local was false, however,
+ then scope 1 would be `{ "foo": 5 }`, and scope 2 would be `{ "bar" : 1 }`.
+ """
+ operationReference: builtins.int
+ """a reference key chosen by the user and will be part of all records resulted from this operation"""
+ def __init__(
+ self,
+ *,
+ elementInstanceKey: builtins.int = ...,
+ variables: builtins.str = ...,
+ local: builtins.bool = ...,
+ operationReference: builtins.int | None = ...,
+ ) -> None: ...
+ def HasField(self, field_name: typing.Literal["_operationReference", b"_operationReference", "operationReference", b"operationReference"]) -> builtins.bool: ...
+ def ClearField(self, field_name: typing.Literal["_operationReference", b"_operationReference", "elementInstanceKey", b"elementInstanceKey", "local", b"local", "operationReference", b"operationReference", "variables", b"variables"]) -> None: ...
+ def WhichOneof(self, oneof_group: typing.Literal["_operationReference", b"_operationReference"]) -> typing.Literal["operationReference"] | None: ...
+
+global___SetVariablesRequest = SetVariablesRequest
+
+@typing.final
+class SetVariablesResponse(google.protobuf.message.Message):
+ DESCRIPTOR: google.protobuf.descriptor.Descriptor
+
+ KEY_FIELD_NUMBER: builtins.int
+ key: builtins.int
+ """the unique key of the set variables command"""
+ def __init__(
+ self,
+ *,
+ key: builtins.int = ...,
+ ) -> None: ...
+ def ClearField(self, field_name: typing.Literal["key", b"key"]) -> None: ...
+
+global___SetVariablesResponse = SetVariablesResponse
+
+@typing.final
+class ModifyProcessInstanceRequest(google.protobuf.message.Message):
+ DESCRIPTOR: google.protobuf.descriptor.Descriptor
+
+ @typing.final
+ class ActivateInstruction(google.protobuf.message.Message):
+ DESCRIPTOR: google.protobuf.descriptor.Descriptor
+
+ ELEMENTID_FIELD_NUMBER: builtins.int
+ ANCESTORELEMENTINSTANCEKEY_FIELD_NUMBER: builtins.int
+ VARIABLEINSTRUCTIONS_FIELD_NUMBER: builtins.int
+ elementId: builtins.str
+ """the id of the element that should be activated"""
+ ancestorElementInstanceKey: builtins.int
+ """the key of the ancestor scope the element instance should be created in;
+ set to -1 to create the new element instance within an existing element
+ instance of the flow scope
+ """
+ @property
+ def variableInstructions(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___ModifyProcessInstanceRequest.VariableInstruction]:
+ """instructions describing which variables should be created"""
+
+ def __init__(
+ self,
+ *,
+ elementId: builtins.str = ...,
+ ancestorElementInstanceKey: builtins.int = ...,
+ variableInstructions: collections.abc.Iterable[global___ModifyProcessInstanceRequest.VariableInstruction] | None = ...,
+ ) -> None: ...
+ def ClearField(self, field_name: typing.Literal["ancestorElementInstanceKey", b"ancestorElementInstanceKey", "elementId", b"elementId", "variableInstructions", b"variableInstructions"]) -> None: ...
+
+ @typing.final
+ class VariableInstruction(google.protobuf.message.Message):
+ DESCRIPTOR: google.protobuf.descriptor.Descriptor
+
+ VARIABLES_FIELD_NUMBER: builtins.int
+ SCOPEID_FIELD_NUMBER: builtins.int
+ variables: builtins.str
+ """JSON document that will instantiate the variables for the root variable scope of the
+ process instance; it must be a JSON object, as variables will be mapped in a
+ key-value fashion. e.g. { "a": 1, "b": 2 } will create two variables, named "a" and
+ "b" respectively, with their associated values. [{ "a": 1, "b": 2 }] would not be a
+ valid argument, as the root of the JSON document is an array and not an object.
+ """
+ scopeId: builtins.str
+ """the id of the element in which scope the variables should be created;
+ leave empty to create the variables in the global scope of the process instance
+ """
+ def __init__(
+ self,
+ *,
+ variables: builtins.str = ...,
+ scopeId: builtins.str = ...,
+ ) -> None: ...
+ def ClearField(self, field_name: typing.Literal["scopeId", b"scopeId", "variables", b"variables"]) -> None: ...
+
+ @typing.final
+ class TerminateInstruction(google.protobuf.message.Message):
+ DESCRIPTOR: google.protobuf.descriptor.Descriptor
+
+ ELEMENTINSTANCEKEY_FIELD_NUMBER: builtins.int
+ elementInstanceKey: builtins.int
+ """the id of the element that should be terminated"""
+ def __init__(
+ self,
+ *,
+ elementInstanceKey: builtins.int = ...,
+ ) -> None: ...
+ def ClearField(self, field_name: typing.Literal["elementInstanceKey", b"elementInstanceKey"]) -> None: ...
+
+ PROCESSINSTANCEKEY_FIELD_NUMBER: builtins.int
+ ACTIVATEINSTRUCTIONS_FIELD_NUMBER: builtins.int
+ TERMINATEINSTRUCTIONS_FIELD_NUMBER: builtins.int
+ OPERATIONREFERENCE_FIELD_NUMBER: builtins.int
+ processInstanceKey: builtins.int
+ """the key of the process instance that should be modified"""
+ operationReference: builtins.int
+ """a reference key chosen by the user and will be part of all records resulted from this operation"""
+ @property
+ def activateInstructions(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___ModifyProcessInstanceRequest.ActivateInstruction]:
+ """instructions describing which elements should be activated in which scopes,
+ and which variables should be created
+ """
+
+ @property
+ def terminateInstructions(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___ModifyProcessInstanceRequest.TerminateInstruction]:
+ """instructions describing which elements should be terminated"""
+
+ def __init__(
+ self,
+ *,
+ processInstanceKey: builtins.int = ...,
+ activateInstructions: collections.abc.Iterable[global___ModifyProcessInstanceRequest.ActivateInstruction] | None = ...,
+ terminateInstructions: collections.abc.Iterable[global___ModifyProcessInstanceRequest.TerminateInstruction] | None = ...,
+ operationReference: builtins.int | None = ...,
+ ) -> None: ...
+ def HasField(self, field_name: typing.Literal["_operationReference", b"_operationReference", "operationReference", b"operationReference"]) -> builtins.bool: ...
+ def ClearField(self, field_name: typing.Literal["_operationReference", b"_operationReference", "activateInstructions", b"activateInstructions", "operationReference", b"operationReference", "processInstanceKey", b"processInstanceKey", "terminateInstructions", b"terminateInstructions"]) -> None: ...
+ def WhichOneof(self, oneof_group: typing.Literal["_operationReference", b"_operationReference"]) -> typing.Literal["operationReference"] | None: ...
+
+global___ModifyProcessInstanceRequest = ModifyProcessInstanceRequest
+
+@typing.final
+class ModifyProcessInstanceResponse(google.protobuf.message.Message):
+ DESCRIPTOR: google.protobuf.descriptor.Descriptor
+
+ def __init__(
+ self,
+ ) -> None: ...
+
+global___ModifyProcessInstanceResponse = ModifyProcessInstanceResponse
+
+@typing.final
+class MigrateProcessInstanceRequest(google.protobuf.message.Message):
+ DESCRIPTOR: google.protobuf.descriptor.Descriptor
+
+ @typing.final
+ class MigrationPlan(google.protobuf.message.Message):
+ DESCRIPTOR: google.protobuf.descriptor.Descriptor
+
+ TARGETPROCESSDEFINITIONKEY_FIELD_NUMBER: builtins.int
+ MAPPINGINSTRUCTIONS_FIELD_NUMBER: builtins.int
+ targetProcessDefinitionKey: builtins.int
+ """the key of process definition to migrate the process instance to"""
+ @property
+ def mappingInstructions(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___MigrateProcessInstanceRequest.MappingInstruction]:
+ """the mapping instructions describe how to map elements from the source process definition to the target process definition"""
+
+ def __init__(
+ self,
+ *,
+ targetProcessDefinitionKey: builtins.int = ...,
+ mappingInstructions: collections.abc.Iterable[global___MigrateProcessInstanceRequest.MappingInstruction] | None = ...,
+ ) -> None: ...
+ def ClearField(self, field_name: typing.Literal["mappingInstructions", b"mappingInstructions", "targetProcessDefinitionKey", b"targetProcessDefinitionKey"]) -> None: ...
+
+ @typing.final
+ class MappingInstruction(google.protobuf.message.Message):
+ DESCRIPTOR: google.protobuf.descriptor.Descriptor
+
+ SOURCEELEMENTID_FIELD_NUMBER: builtins.int
+ TARGETELEMENTID_FIELD_NUMBER: builtins.int
+ sourceElementId: builtins.str
+ """the element id to migrate from"""
+ targetElementId: builtins.str
+ """the element id to migrate into"""
+ def __init__(
+ self,
+ *,
+ sourceElementId: builtins.str = ...,
+ targetElementId: builtins.str = ...,
+ ) -> None: ...
+ def ClearField(self, field_name: typing.Literal["sourceElementId", b"sourceElementId", "targetElementId", b"targetElementId"]) -> None: ...
+
+ PROCESSINSTANCEKEY_FIELD_NUMBER: builtins.int
+ MIGRATIONPLAN_FIELD_NUMBER: builtins.int
+ OPERATIONREFERENCE_FIELD_NUMBER: builtins.int
+ processInstanceKey: builtins.int
+ """key of the process instance to migrate"""
+ operationReference: builtins.int
+ """a reference key chosen by the user and will be part of all records resulted from this operation"""
+ @property
+ def migrationPlan(self) -> global___MigrateProcessInstanceRequest.MigrationPlan:
+ """the migration plan that defines target process and element mappings"""
+
+ def __init__(
+ self,
+ *,
+ processInstanceKey: builtins.int = ...,
+ migrationPlan: global___MigrateProcessInstanceRequest.MigrationPlan | None = ...,
+ operationReference: builtins.int | None = ...,
+ ) -> None: ...
+ def HasField(self, field_name: typing.Literal["_operationReference", b"_operationReference", "migrationPlan", b"migrationPlan", "operationReference", b"operationReference"]) -> builtins.bool: ...
+ def ClearField(self, field_name: typing.Literal["_operationReference", b"_operationReference", "migrationPlan", b"migrationPlan", "operationReference", b"operationReference", "processInstanceKey", b"processInstanceKey"]) -> None: ...
+ def WhichOneof(self, oneof_group: typing.Literal["_operationReference", b"_operationReference"]) -> typing.Literal["operationReference"] | None: ...
+
+global___MigrateProcessInstanceRequest = MigrateProcessInstanceRequest
+
+@typing.final
+class MigrateProcessInstanceResponse(google.protobuf.message.Message):
+ DESCRIPTOR: google.protobuf.descriptor.Descriptor
+
+ def __init__(
+ self,
+ ) -> None: ...
+
+global___MigrateProcessInstanceResponse = MigrateProcessInstanceResponse
+
+@typing.final
+class DeleteResourceRequest(google.protobuf.message.Message):
+ DESCRIPTOR: google.protobuf.descriptor.Descriptor
+
+ RESOURCEKEY_FIELD_NUMBER: builtins.int
+ OPERATIONREFERENCE_FIELD_NUMBER: builtins.int
+ resourceKey: builtins.int
+ """The key of the resource that should be deleted. This can either be the key
+ of a process definition, the key of a decision requirements definition or the key of a form.
+ """
+ operationReference: builtins.int
+ """a reference key chosen by the user and will be part of all records resulted from this operation"""
+ def __init__(
+ self,
+ *,
+ resourceKey: builtins.int = ...,
+ operationReference: builtins.int | None = ...,
+ ) -> None: ...
+ def HasField(self, field_name: typing.Literal["_operationReference", b"_operationReference", "operationReference", b"operationReference"]) -> builtins.bool: ...
+ def ClearField(self, field_name: typing.Literal["_operationReference", b"_operationReference", "operationReference", b"operationReference", "resourceKey", b"resourceKey"]) -> None: ...
+ def WhichOneof(self, oneof_group: typing.Literal["_operationReference", b"_operationReference"]) -> typing.Literal["operationReference"] | None: ...
+
+global___DeleteResourceRequest = DeleteResourceRequest
+
+@typing.final
+class DeleteResourceResponse(google.protobuf.message.Message):
+ DESCRIPTOR: google.protobuf.descriptor.Descriptor
+
+ def __init__(
+ self,
+ ) -> None: ...
+
+global___DeleteResourceResponse = DeleteResourceResponse
+
+@typing.final
+class BroadcastSignalRequest(google.protobuf.message.Message):
+ DESCRIPTOR: google.protobuf.descriptor.Descriptor
+
+ SIGNALNAME_FIELD_NUMBER: builtins.int
+ VARIABLES_FIELD_NUMBER: builtins.int
+ TENANTID_FIELD_NUMBER: builtins.int
+ signalName: builtins.str
+ """The name of the signal"""
+ variables: builtins.str
+ """the signal variables as a JSON document; to be valid, the root of the document must be an
+ object, e.g. { "a": "foo" }. [ "foo" ] would not be valid.
+ """
+ tenantId: builtins.str
+ """the id of the tenant that owns the signal."""
+ def __init__(
+ self,
+ *,
+ signalName: builtins.str = ...,
+ variables: builtins.str = ...,
+ tenantId: builtins.str = ...,
+ ) -> None: ...
+ def ClearField(self, field_name: typing.Literal["signalName", b"signalName", "tenantId", b"tenantId", "variables", b"variables"]) -> None: ...
+
+global___BroadcastSignalRequest = BroadcastSignalRequest
+
+@typing.final
+class BroadcastSignalResponse(google.protobuf.message.Message):
+ DESCRIPTOR: google.protobuf.descriptor.Descriptor
+
+ KEY_FIELD_NUMBER: builtins.int
+ TENANTID_FIELD_NUMBER: builtins.int
+ key: builtins.int
+ """the unique ID of the signal that was broadcasted."""
+ tenantId: builtins.str
+ """the tenant id of the signal that was broadcasted."""
+ def __init__(
+ self,
+ *,
+ key: builtins.int = ...,
+ tenantId: builtins.str = ...,
+ ) -> None: ...
+ def ClearField(self, field_name: typing.Literal["key", b"key", "tenantId", b"tenantId"]) -> None: ...
+
+global___BroadcastSignalResponse = BroadcastSignalResponse
diff --git a/pyzeebe/proto/gateway_pb2_grpc.py b/pyzeebe/proto/gateway_pb2_grpc.py
new file mode 100644
index 00000000..6d126429
--- /dev/null
+++ b/pyzeebe/proto/gateway_pb2_grpc.py
@@ -0,0 +1,1187 @@
+# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT!
+"""Client and server classes corresponding to protobuf-defined services."""
+import grpc
+import warnings
+
+from pyzeebe.proto import gateway_pb2 as pyzeebe_dot_proto_dot_gateway__pb2
+
+GRPC_GENERATED_VERSION = '1.66.1'
+GRPC_VERSION = grpc.__version__
+_version_not_supported = False
+
+try:
+ from grpc._utilities import first_version_is_lower
+ _version_not_supported = first_version_is_lower(GRPC_VERSION, GRPC_GENERATED_VERSION)
+except ImportError:
+ _version_not_supported = True
+
+if _version_not_supported:
+ raise RuntimeError(
+ f'The grpc package installed is at version {GRPC_VERSION},'
+ + f' but the generated code in pyzeebe/proto/gateway_pb2_grpc.py depends on'
+ + f' grpcio>={GRPC_GENERATED_VERSION}.'
+ + f' Please upgrade your grpc module to grpcio>={GRPC_GENERATED_VERSION}'
+ + f' or downgrade your generated code using grpcio-tools<={GRPC_VERSION}.'
+ )
+
+
+class GatewayStub(object):
+ """Missing associated documentation comment in .proto file."""
+
+ def __init__(self, channel):
+ """Constructor.
+
+ Args:
+ channel: A grpc.Channel.
+ """
+ self.ActivateJobs = channel.unary_stream(
+ '/gateway_protocol.Gateway/ActivateJobs',
+ request_serializer=pyzeebe_dot_proto_dot_gateway__pb2.ActivateJobsRequest.SerializeToString,
+ response_deserializer=pyzeebe_dot_proto_dot_gateway__pb2.ActivateJobsResponse.FromString,
+ _registered_method=True)
+ self.StreamActivatedJobs = channel.unary_stream(
+ '/gateway_protocol.Gateway/StreamActivatedJobs',
+ request_serializer=pyzeebe_dot_proto_dot_gateway__pb2.StreamActivatedJobsRequest.SerializeToString,
+ response_deserializer=pyzeebe_dot_proto_dot_gateway__pb2.ActivatedJob.FromString,
+ _registered_method=True)
+ self.CancelProcessInstance = channel.unary_unary(
+ '/gateway_protocol.Gateway/CancelProcessInstance',
+ request_serializer=pyzeebe_dot_proto_dot_gateway__pb2.CancelProcessInstanceRequest.SerializeToString,
+ response_deserializer=pyzeebe_dot_proto_dot_gateway__pb2.CancelProcessInstanceResponse.FromString,
+ _registered_method=True)
+ self.CompleteJob = channel.unary_unary(
+ '/gateway_protocol.Gateway/CompleteJob',
+ request_serializer=pyzeebe_dot_proto_dot_gateway__pb2.CompleteJobRequest.SerializeToString,
+ response_deserializer=pyzeebe_dot_proto_dot_gateway__pb2.CompleteJobResponse.FromString,
+ _registered_method=True)
+ self.CreateProcessInstance = channel.unary_unary(
+ '/gateway_protocol.Gateway/CreateProcessInstance',
+ request_serializer=pyzeebe_dot_proto_dot_gateway__pb2.CreateProcessInstanceRequest.SerializeToString,
+ response_deserializer=pyzeebe_dot_proto_dot_gateway__pb2.CreateProcessInstanceResponse.FromString,
+ _registered_method=True)
+ self.CreateProcessInstanceWithResult = channel.unary_unary(
+ '/gateway_protocol.Gateway/CreateProcessInstanceWithResult',
+ request_serializer=pyzeebe_dot_proto_dot_gateway__pb2.CreateProcessInstanceWithResultRequest.SerializeToString,
+ response_deserializer=pyzeebe_dot_proto_dot_gateway__pb2.CreateProcessInstanceWithResultResponse.FromString,
+ _registered_method=True)
+ self.EvaluateDecision = channel.unary_unary(
+ '/gateway_protocol.Gateway/EvaluateDecision',
+ request_serializer=pyzeebe_dot_proto_dot_gateway__pb2.EvaluateDecisionRequest.SerializeToString,
+ response_deserializer=pyzeebe_dot_proto_dot_gateway__pb2.EvaluateDecisionResponse.FromString,
+ _registered_method=True)
+ self.DeployProcess = channel.unary_unary(
+ '/gateway_protocol.Gateway/DeployProcess',
+ request_serializer=pyzeebe_dot_proto_dot_gateway__pb2.DeployProcessRequest.SerializeToString,
+ response_deserializer=pyzeebe_dot_proto_dot_gateway__pb2.DeployProcessResponse.FromString,
+ _registered_method=True)
+ self.DeployResource = channel.unary_unary(
+ '/gateway_protocol.Gateway/DeployResource',
+ request_serializer=pyzeebe_dot_proto_dot_gateway__pb2.DeployResourceRequest.SerializeToString,
+ response_deserializer=pyzeebe_dot_proto_dot_gateway__pb2.DeployResourceResponse.FromString,
+ _registered_method=True)
+ self.FailJob = channel.unary_unary(
+ '/gateway_protocol.Gateway/FailJob',
+ request_serializer=pyzeebe_dot_proto_dot_gateway__pb2.FailJobRequest.SerializeToString,
+ response_deserializer=pyzeebe_dot_proto_dot_gateway__pb2.FailJobResponse.FromString,
+ _registered_method=True)
+ self.ThrowError = channel.unary_unary(
+ '/gateway_protocol.Gateway/ThrowError',
+ request_serializer=pyzeebe_dot_proto_dot_gateway__pb2.ThrowErrorRequest.SerializeToString,
+ response_deserializer=pyzeebe_dot_proto_dot_gateway__pb2.ThrowErrorResponse.FromString,
+ _registered_method=True)
+ self.PublishMessage = channel.unary_unary(
+ '/gateway_protocol.Gateway/PublishMessage',
+ request_serializer=pyzeebe_dot_proto_dot_gateway__pb2.PublishMessageRequest.SerializeToString,
+ response_deserializer=pyzeebe_dot_proto_dot_gateway__pb2.PublishMessageResponse.FromString,
+ _registered_method=True)
+ self.ResolveIncident = channel.unary_unary(
+ '/gateway_protocol.Gateway/ResolveIncident',
+ request_serializer=pyzeebe_dot_proto_dot_gateway__pb2.ResolveIncidentRequest.SerializeToString,
+ response_deserializer=pyzeebe_dot_proto_dot_gateway__pb2.ResolveIncidentResponse.FromString,
+ _registered_method=True)
+ self.SetVariables = channel.unary_unary(
+ '/gateway_protocol.Gateway/SetVariables',
+ request_serializer=pyzeebe_dot_proto_dot_gateway__pb2.SetVariablesRequest.SerializeToString,
+ response_deserializer=pyzeebe_dot_proto_dot_gateway__pb2.SetVariablesResponse.FromString,
+ _registered_method=True)
+ self.Topology = channel.unary_unary(
+ '/gateway_protocol.Gateway/Topology',
+ request_serializer=pyzeebe_dot_proto_dot_gateway__pb2.TopologyRequest.SerializeToString,
+ response_deserializer=pyzeebe_dot_proto_dot_gateway__pb2.TopologyResponse.FromString,
+ _registered_method=True)
+ self.UpdateJobRetries = channel.unary_unary(
+ '/gateway_protocol.Gateway/UpdateJobRetries',
+ request_serializer=pyzeebe_dot_proto_dot_gateway__pb2.UpdateJobRetriesRequest.SerializeToString,
+ response_deserializer=pyzeebe_dot_proto_dot_gateway__pb2.UpdateJobRetriesResponse.FromString,
+ _registered_method=True)
+ self.ModifyProcessInstance = channel.unary_unary(
+ '/gateway_protocol.Gateway/ModifyProcessInstance',
+ request_serializer=pyzeebe_dot_proto_dot_gateway__pb2.ModifyProcessInstanceRequest.SerializeToString,
+ response_deserializer=pyzeebe_dot_proto_dot_gateway__pb2.ModifyProcessInstanceResponse.FromString,
+ _registered_method=True)
+ self.MigrateProcessInstance = channel.unary_unary(
+ '/gateway_protocol.Gateway/MigrateProcessInstance',
+ request_serializer=pyzeebe_dot_proto_dot_gateway__pb2.MigrateProcessInstanceRequest.SerializeToString,
+ response_deserializer=pyzeebe_dot_proto_dot_gateway__pb2.MigrateProcessInstanceResponse.FromString,
+ _registered_method=True)
+ self.UpdateJobTimeout = channel.unary_unary(
+ '/gateway_protocol.Gateway/UpdateJobTimeout',
+ request_serializer=pyzeebe_dot_proto_dot_gateway__pb2.UpdateJobTimeoutRequest.SerializeToString,
+ response_deserializer=pyzeebe_dot_proto_dot_gateway__pb2.UpdateJobTimeoutResponse.FromString,
+ _registered_method=True)
+ self.DeleteResource = channel.unary_unary(
+ '/gateway_protocol.Gateway/DeleteResource',
+ request_serializer=pyzeebe_dot_proto_dot_gateway__pb2.DeleteResourceRequest.SerializeToString,
+ response_deserializer=pyzeebe_dot_proto_dot_gateway__pb2.DeleteResourceResponse.FromString,
+ _registered_method=True)
+ self.BroadcastSignal = channel.unary_unary(
+ '/gateway_protocol.Gateway/BroadcastSignal',
+ request_serializer=pyzeebe_dot_proto_dot_gateway__pb2.BroadcastSignalRequest.SerializeToString,
+ response_deserializer=pyzeebe_dot_proto_dot_gateway__pb2.BroadcastSignalResponse.FromString,
+ _registered_method=True)
+
+
+class GatewayServicer(object):
+ """Missing associated documentation comment in .proto file."""
+
+ def ActivateJobs(self, request, context):
+ """
+ Iterates through all known partitions round-robin and activates up to the requested
+ maximum and streams them back to the client as they are activated.
+
+ Errors:
+ INVALID_ARGUMENT:
+ - type is blank (empty string, null)
+ - worker is blank (empty string, null)
+ - timeout less than 1
+ - maxJobsToActivate is less than 1
+ """
+ context.set_code(grpc.StatusCode.UNIMPLEMENTED)
+ context.set_details('Method not implemented!')
+ raise NotImplementedError('Method not implemented!')
+
+ def StreamActivatedJobs(self, request, context):
+ """
+ Registers client to a job stream that will stream jobs back to the client as
+ they become activatable.
+
+ Errors:
+ INVALID_ARGUMENT:
+ - type is blank (empty string, null)
+ - timeout less than 1
+ - If multi-tenancy is enabled, and tenantIds is empty (empty list)
+ - If multi-tenancy is enabled, and an invalid tenant ID is provided. A tenant ID is considered invalid if:
+ - The tenant ID is blank (empty string, null)
+ - The tenant ID is longer than 31 characters
+ - The tenant ID contains anything other than alphanumeric characters, dot (.), dash (-), or underscore (_)
+ - If multi-tenancy is disabled, and tenantIds is not empty (empty list), or has an ID other than
+ """
+ context.set_code(grpc.StatusCode.UNIMPLEMENTED)
+ context.set_details('Method not implemented!')
+ raise NotImplementedError('Method not implemented!')
+
+ def CancelProcessInstance(self, request, context):
+ """
+ Cancels a running process instance
+
+ Errors:
+ NOT_FOUND:
+ - no process instance exists with the given key
+ """
+ context.set_code(grpc.StatusCode.UNIMPLEMENTED)
+ context.set_details('Method not implemented!')
+ raise NotImplementedError('Method not implemented!')
+
+ def CompleteJob(self, request, context):
+ """
+ Completes a job with the given variables, which allows completing the associated service task.
+
+ Errors:
+ NOT_FOUND:
+ - no job exists with the given job key. Note that since jobs are removed once completed,
+ it could be that this job did exist at some point.
+
+ FAILED_PRECONDITION:
+ - the job was marked as failed. In that case, the related incident must be resolved before
+ the job can be activated again and completed.
+ """
+ context.set_code(grpc.StatusCode.UNIMPLEMENTED)
+ context.set_details('Method not implemented!')
+ raise NotImplementedError('Method not implemented!')
+
+ def CreateProcessInstance(self, request, context):
+ """
+ Creates and starts an instance of the specified process. The process definition to use to
+ create the instance can be specified either using its unique key (as returned by
+ DeployProcess), or using the BPMN process ID and a version. Pass -1 as the version to use the
+ latest deployed version. Note that only processes with none start events can be started through
+ this command.
+
+ Errors:
+ NOT_FOUND:
+ - no process with the given key exists (if processDefinitionKey was given)
+ - no process with the given process ID exists (if bpmnProcessId was given but version was -1)
+ - no process with the given process ID and version exists (if both bpmnProcessId and version were given)
+
+ FAILED_PRECONDITION:
+ - the process definition does not contain a none start event; only processes with none
+ start event can be started manually.
+
+ INVALID_ARGUMENT:
+ - the given variables argument is not a valid JSON document; it is expected to be a valid
+ JSON document where the root node is an object.
+ """
+ context.set_code(grpc.StatusCode.UNIMPLEMENTED)
+ context.set_details('Method not implemented!')
+ raise NotImplementedError('Method not implemented!')
+
+ def CreateProcessInstanceWithResult(self, request, context):
+ """
+ Behaves similarly to `rpc CreateProcessInstance`, except that a successful response is received when the process completes successfully.
+ """
+ context.set_code(grpc.StatusCode.UNIMPLEMENTED)
+ context.set_details('Method not implemented!')
+ raise NotImplementedError('Method not implemented!')
+
+ def EvaluateDecision(self, request, context):
+ """
+ Evaluates a decision. The decision to evaluate can be specified either by
+ using its unique key (as returned by DeployResource), or using the decision
+ ID. When using the decision ID, the latest deployed version of the decision
+ is used.
+
+ Errors:
+ INVALID_ARGUMENT:
+ - no decision with the given key exists (if decisionKey was given)
+ - no decision with the given decision ID exists (if decisionId was given)
+ - both decision ID and decision KEY were provided, or are missing
+ """
+ context.set_code(grpc.StatusCode.UNIMPLEMENTED)
+ context.set_details('Method not implemented!')
+ raise NotImplementedError('Method not implemented!')
+
+ def DeployProcess(self, request, context):
+ """
+ Deploys one or more processes to Zeebe. Note that this is an atomic call,
+ i.e. either all processes are deployed, or none of them are.
+
+ Errors:
+ INVALID_ARGUMENT:
+ - no resources given.
+ - if at least one resource is invalid. A resource is considered invalid if:
+ - the resource data is not deserializable (e.g. detected as BPMN, but it's broken XML)
+ - the process is invalid (e.g. an event-based gateway has an outgoing sequence flow to a task)
+ """
+ context.set_code(grpc.StatusCode.UNIMPLEMENTED)
+ context.set_details('Method not implemented!')
+ raise NotImplementedError('Method not implemented!')
+
+ def DeployResource(self, request, context):
+ """
+ Deploys one or more resources (e.g. processes or decision models) to Zeebe.
+ Note that this is an atomic call, i.e. either all resources are deployed, or none of them are.
+
+ Errors:
+ PERMISSION_DENIED:
+ - if a deployment to an unauthorized tenant is performed
+ INVALID_ARGUMENT:
+ - no resources given.
+ - if at least one resource is invalid. A resource is considered invalid if:
+ - the content is not deserializable (e.g. detected as BPMN, but it's broken XML)
+ - the content is invalid (e.g. an event-based gateway has an outgoing sequence flow to a task)
+ - if multi-tenancy is enabled, and:
+ - a tenant id is not provided
+ - a tenant id with an invalid format is provided
+ - if multi-tenancy is disabled and a tenant id is provided
+ """
+ context.set_code(grpc.StatusCode.UNIMPLEMENTED)
+ context.set_details('Method not implemented!')
+ raise NotImplementedError('Method not implemented!')
+
+ def FailJob(self, request, context):
+ """
+ Marks the job as failed; if the retries argument is positive, then the job will be immediately
+ activatable again, and a worker could try again to process it. If it is zero or negative however,
+ an incident will be raised, tagged with the given errorMessage, and the job will not be
+ activatable until the incident is resolved.
+
+ Errors:
+ NOT_FOUND:
+ - no job was found with the given key
+
+ FAILED_PRECONDITION:
+ - the job was not activated
+ - the job is already in a failed state, i.e. ran out of retries
+ """
+ context.set_code(grpc.StatusCode.UNIMPLEMENTED)
+ context.set_details('Method not implemented!')
+ raise NotImplementedError('Method not implemented!')
+
+ def ThrowError(self, request, context):
+ """
+ Reports a business error (i.e. non-technical) that occurs while processing a job. The error is handled in the process by an error catch event. If there is no error catch event with the specified errorCode then an incident will be raised instead.
+
+ Errors:
+ NOT_FOUND:
+ - no job was found with the given key
+
+ FAILED_PRECONDITION:
+ - the job is not in an activated state
+ """
+ context.set_code(grpc.StatusCode.UNIMPLEMENTED)
+ context.set_details('Method not implemented!')
+ raise NotImplementedError('Method not implemented!')
+
+ def PublishMessage(self, request, context):
+ """
+ Publishes a single message. Messages are published to specific partitions computed from their
+ correlation keys.
+
+ Errors:
+ ALREADY_EXISTS:
+ - a message with the same ID was previously published (and is still alive)
+ """
+ context.set_code(grpc.StatusCode.UNIMPLEMENTED)
+ context.set_details('Method not implemented!')
+ raise NotImplementedError('Method not implemented!')
+
+ def ResolveIncident(self, request, context):
+ """
+ Resolves a given incident. This simply marks the incident as resolved; most likely a call to
+ UpdateJobRetries or SetVariables will be necessary to actually resolve the
+ problem, following by this call.
+
+ Errors:
+ NOT_FOUND:
+ - no incident with the given key exists
+ """
+ context.set_code(grpc.StatusCode.UNIMPLEMENTED)
+ context.set_details('Method not implemented!')
+ raise NotImplementedError('Method not implemented!')
+
+ def SetVariables(self, request, context):
+ """
+ Updates all the variables of a particular scope (e.g. process instance, flow element instance)
+ from the given JSON document.
+
+ Errors:
+ NOT_FOUND:
+ - no element with the given elementInstanceKey exists
+ INVALID_ARGUMENT:
+ - the given variables document is not a valid JSON document; valid documents are expected to
+ be JSON documents where the root node is an object.
+ """
+ context.set_code(grpc.StatusCode.UNIMPLEMENTED)
+ context.set_details('Method not implemented!')
+ raise NotImplementedError('Method not implemented!')
+
+ def Topology(self, request, context):
+ """
+ Obtains the current topology of the cluster the gateway is part of.
+ """
+ context.set_code(grpc.StatusCode.UNIMPLEMENTED)
+ context.set_details('Method not implemented!')
+ raise NotImplementedError('Method not implemented!')
+
+ def UpdateJobRetries(self, request, context):
+ """
+ Updates the number of retries a job has left. This is mostly useful for jobs that have run out of
+ retries, should the underlying problem be solved.
+
+ Errors:
+ NOT_FOUND:
+ - no job exists with the given key
+
+ INVALID_ARGUMENT:
+ - retries is not greater than 0
+ """
+ context.set_code(grpc.StatusCode.UNIMPLEMENTED)
+ context.set_details('Method not implemented!')
+ raise NotImplementedError('Method not implemented!')
+
+ def ModifyProcessInstance(self, request, context):
+ """
+ Modifies the process instance. This is done by activating and/or terminating specific elements of the instance.
+
+ Errors:
+ NOT_FOUND:
+ - no process instance exists with the given key
+
+ FAILED_PRECONDITION:
+ - trying to activate element inside of a multi-instance
+
+ INVALID_ARGUMENT:
+ - activating or terminating unknown element
+ - ancestor of element for activation doesn't exist
+ - scope of variable is unknown
+ """
+ context.set_code(grpc.StatusCode.UNIMPLEMENTED)
+ context.set_details('Method not implemented!')
+ raise NotImplementedError('Method not implemented!')
+
+ def MigrateProcessInstance(self, request, context):
+ """
+ Migrates the process instance to the specified process definition.
+ In simple terms, this is handled by updating the active element's process.
+
+ Errors:
+ NOT_FOUND:
+ - no process instance exists with the given key, or it is not active
+ - no process definition exists with the given target definition key
+ - no process instance exists with the given key for the tenants the user is authorized to work with.
+
+ FAILED_PRECONDITION:
+ - not all active elements in the given process instance are mapped to the elements in the target process definition
+ - a mapping instruction changes the type of an element or event
+ - a mapping instruction changes the implementation of a task
+ - a mapping instruction detaches a boundary event from an active element
+ - a mapping instruction refers to an unsupported element (i.e. some elements will be supported later on)
+ - a mapping instruction refers to element in unsupported scenarios.
+ (i.e. migration is not supported when process instance or target process elements contains event subscriptions)
+ - multiple mapping instructions target the same boundary event
+
+ INVALID_ARGUMENT:
+ - A `sourceElementId` does not refer to an element in the process instance's process definition
+ - A `targetElementId` does not refer to an element in the target process definition
+ - A `sourceElementId` is mapped by multiple mapping instructions.
+ For example, the engine cannot determine how to migrate a process instance when the instructions are: [A->B, A->C].
+ """
+ context.set_code(grpc.StatusCode.UNIMPLEMENTED)
+ context.set_details('Method not implemented!')
+ raise NotImplementedError('Method not implemented!')
+
+ def UpdateJobTimeout(self, request, context):
+ """
+ Updates the deadline of a job using the timeout (in ms) provided. This can be used
+ for extending or shortening the job deadline.
+
+ Errors:
+ NOT_FOUND:
+ - no job exists with the given key
+
+ INVALID_STATE:
+ - no deadline exists for the given job key
+ """
+ context.set_code(grpc.StatusCode.UNIMPLEMENTED)
+ context.set_details('Method not implemented!')
+ raise NotImplementedError('Method not implemented!')
+
+ def DeleteResource(self, request, context):
+ """
+ Deletes a resource from the state. Once a resource has been deleted it cannot
+ be recovered. If the resource needs to be available again, a new deployment
+ of the resource is required.
+
+ Deleting a process will cancel any running instances of this process
+ definition. New instances of a deleted process are created using
+ the lastest version that hasn't been deleted. Creating a new
+ process instance is impossible when all versions have been
+ deleted.
+
+ Deleting a decision requirement definitions could cause incidents in process
+ instances referencing these decisions in a business rule task. A decision
+ will be evaluated with the latest version that hasn't been deleted. If all
+ versions of a decision have been deleted the evaluation is rejected.
+
+ Errors:
+ NOT_FOUND:
+ - No resource exists with the given key
+
+ """
+ context.set_code(grpc.StatusCode.UNIMPLEMENTED)
+ context.set_details('Method not implemented!')
+ raise NotImplementedError('Method not implemented!')
+
+ def BroadcastSignal(self, request, context):
+ """
+ Broadcasts a signal.
+ """
+ context.set_code(grpc.StatusCode.UNIMPLEMENTED)
+ context.set_details('Method not implemented!')
+ raise NotImplementedError('Method not implemented!')
+
+
+def add_GatewayServicer_to_server(servicer, server):
+ rpc_method_handlers = {
+ 'ActivateJobs': grpc.unary_stream_rpc_method_handler(
+ servicer.ActivateJobs,
+ request_deserializer=pyzeebe_dot_proto_dot_gateway__pb2.ActivateJobsRequest.FromString,
+ response_serializer=pyzeebe_dot_proto_dot_gateway__pb2.ActivateJobsResponse.SerializeToString,
+ ),
+ 'StreamActivatedJobs': grpc.unary_stream_rpc_method_handler(
+ servicer.StreamActivatedJobs,
+ request_deserializer=pyzeebe_dot_proto_dot_gateway__pb2.StreamActivatedJobsRequest.FromString,
+ response_serializer=pyzeebe_dot_proto_dot_gateway__pb2.ActivatedJob.SerializeToString,
+ ),
+ 'CancelProcessInstance': grpc.unary_unary_rpc_method_handler(
+ servicer.CancelProcessInstance,
+ request_deserializer=pyzeebe_dot_proto_dot_gateway__pb2.CancelProcessInstanceRequest.FromString,
+ response_serializer=pyzeebe_dot_proto_dot_gateway__pb2.CancelProcessInstanceResponse.SerializeToString,
+ ),
+ 'CompleteJob': grpc.unary_unary_rpc_method_handler(
+ servicer.CompleteJob,
+ request_deserializer=pyzeebe_dot_proto_dot_gateway__pb2.CompleteJobRequest.FromString,
+ response_serializer=pyzeebe_dot_proto_dot_gateway__pb2.CompleteJobResponse.SerializeToString,
+ ),
+ 'CreateProcessInstance': grpc.unary_unary_rpc_method_handler(
+ servicer.CreateProcessInstance,
+ request_deserializer=pyzeebe_dot_proto_dot_gateway__pb2.CreateProcessInstanceRequest.FromString,
+ response_serializer=pyzeebe_dot_proto_dot_gateway__pb2.CreateProcessInstanceResponse.SerializeToString,
+ ),
+ 'CreateProcessInstanceWithResult': grpc.unary_unary_rpc_method_handler(
+ servicer.CreateProcessInstanceWithResult,
+ request_deserializer=pyzeebe_dot_proto_dot_gateway__pb2.CreateProcessInstanceWithResultRequest.FromString,
+ response_serializer=pyzeebe_dot_proto_dot_gateway__pb2.CreateProcessInstanceWithResultResponse.SerializeToString,
+ ),
+ 'EvaluateDecision': grpc.unary_unary_rpc_method_handler(
+ servicer.EvaluateDecision,
+ request_deserializer=pyzeebe_dot_proto_dot_gateway__pb2.EvaluateDecisionRequest.FromString,
+ response_serializer=pyzeebe_dot_proto_dot_gateway__pb2.EvaluateDecisionResponse.SerializeToString,
+ ),
+ 'DeployProcess': grpc.unary_unary_rpc_method_handler(
+ servicer.DeployProcess,
+ request_deserializer=pyzeebe_dot_proto_dot_gateway__pb2.DeployProcessRequest.FromString,
+ response_serializer=pyzeebe_dot_proto_dot_gateway__pb2.DeployProcessResponse.SerializeToString,
+ ),
+ 'DeployResource': grpc.unary_unary_rpc_method_handler(
+ servicer.DeployResource,
+ request_deserializer=pyzeebe_dot_proto_dot_gateway__pb2.DeployResourceRequest.FromString,
+ response_serializer=pyzeebe_dot_proto_dot_gateway__pb2.DeployResourceResponse.SerializeToString,
+ ),
+ 'FailJob': grpc.unary_unary_rpc_method_handler(
+ servicer.FailJob,
+ request_deserializer=pyzeebe_dot_proto_dot_gateway__pb2.FailJobRequest.FromString,
+ response_serializer=pyzeebe_dot_proto_dot_gateway__pb2.FailJobResponse.SerializeToString,
+ ),
+ 'ThrowError': grpc.unary_unary_rpc_method_handler(
+ servicer.ThrowError,
+ request_deserializer=pyzeebe_dot_proto_dot_gateway__pb2.ThrowErrorRequest.FromString,
+ response_serializer=pyzeebe_dot_proto_dot_gateway__pb2.ThrowErrorResponse.SerializeToString,
+ ),
+ 'PublishMessage': grpc.unary_unary_rpc_method_handler(
+ servicer.PublishMessage,
+ request_deserializer=pyzeebe_dot_proto_dot_gateway__pb2.PublishMessageRequest.FromString,
+ response_serializer=pyzeebe_dot_proto_dot_gateway__pb2.PublishMessageResponse.SerializeToString,
+ ),
+ 'ResolveIncident': grpc.unary_unary_rpc_method_handler(
+ servicer.ResolveIncident,
+ request_deserializer=pyzeebe_dot_proto_dot_gateway__pb2.ResolveIncidentRequest.FromString,
+ response_serializer=pyzeebe_dot_proto_dot_gateway__pb2.ResolveIncidentResponse.SerializeToString,
+ ),
+ 'SetVariables': grpc.unary_unary_rpc_method_handler(
+ servicer.SetVariables,
+ request_deserializer=pyzeebe_dot_proto_dot_gateway__pb2.SetVariablesRequest.FromString,
+ response_serializer=pyzeebe_dot_proto_dot_gateway__pb2.SetVariablesResponse.SerializeToString,
+ ),
+ 'Topology': grpc.unary_unary_rpc_method_handler(
+ servicer.Topology,
+ request_deserializer=pyzeebe_dot_proto_dot_gateway__pb2.TopologyRequest.FromString,
+ response_serializer=pyzeebe_dot_proto_dot_gateway__pb2.TopologyResponse.SerializeToString,
+ ),
+ 'UpdateJobRetries': grpc.unary_unary_rpc_method_handler(
+ servicer.UpdateJobRetries,
+ request_deserializer=pyzeebe_dot_proto_dot_gateway__pb2.UpdateJobRetriesRequest.FromString,
+ response_serializer=pyzeebe_dot_proto_dot_gateway__pb2.UpdateJobRetriesResponse.SerializeToString,
+ ),
+ 'ModifyProcessInstance': grpc.unary_unary_rpc_method_handler(
+ servicer.ModifyProcessInstance,
+ request_deserializer=pyzeebe_dot_proto_dot_gateway__pb2.ModifyProcessInstanceRequest.FromString,
+ response_serializer=pyzeebe_dot_proto_dot_gateway__pb2.ModifyProcessInstanceResponse.SerializeToString,
+ ),
+ 'MigrateProcessInstance': grpc.unary_unary_rpc_method_handler(
+ servicer.MigrateProcessInstance,
+ request_deserializer=pyzeebe_dot_proto_dot_gateway__pb2.MigrateProcessInstanceRequest.FromString,
+ response_serializer=pyzeebe_dot_proto_dot_gateway__pb2.MigrateProcessInstanceResponse.SerializeToString,
+ ),
+ 'UpdateJobTimeout': grpc.unary_unary_rpc_method_handler(
+ servicer.UpdateJobTimeout,
+ request_deserializer=pyzeebe_dot_proto_dot_gateway__pb2.UpdateJobTimeoutRequest.FromString,
+ response_serializer=pyzeebe_dot_proto_dot_gateway__pb2.UpdateJobTimeoutResponse.SerializeToString,
+ ),
+ 'DeleteResource': grpc.unary_unary_rpc_method_handler(
+ servicer.DeleteResource,
+ request_deserializer=pyzeebe_dot_proto_dot_gateway__pb2.DeleteResourceRequest.FromString,
+ response_serializer=pyzeebe_dot_proto_dot_gateway__pb2.DeleteResourceResponse.SerializeToString,
+ ),
+ 'BroadcastSignal': grpc.unary_unary_rpc_method_handler(
+ servicer.BroadcastSignal,
+ request_deserializer=pyzeebe_dot_proto_dot_gateway__pb2.BroadcastSignalRequest.FromString,
+ response_serializer=pyzeebe_dot_proto_dot_gateway__pb2.BroadcastSignalResponse.SerializeToString,
+ ),
+ }
+ generic_handler = grpc.method_handlers_generic_handler(
+ 'gateway_protocol.Gateway', rpc_method_handlers)
+ server.add_generic_rpc_handlers((generic_handler,))
+ server.add_registered_method_handlers('gateway_protocol.Gateway', rpc_method_handlers)
+
+
+ # This class is part of an EXPERIMENTAL API.
+class Gateway(object):
+ """Missing associated documentation comment in .proto file."""
+
+ @staticmethod
+ def ActivateJobs(request,
+ target,
+ options=(),
+ channel_credentials=None,
+ call_credentials=None,
+ insecure=False,
+ compression=None,
+ wait_for_ready=None,
+ timeout=None,
+ metadata=None):
+ return grpc.experimental.unary_stream(
+ request,
+ target,
+ '/gateway_protocol.Gateway/ActivateJobs',
+ pyzeebe_dot_proto_dot_gateway__pb2.ActivateJobsRequest.SerializeToString,
+ pyzeebe_dot_proto_dot_gateway__pb2.ActivateJobsResponse.FromString,
+ options,
+ channel_credentials,
+ insecure,
+ call_credentials,
+ compression,
+ wait_for_ready,
+ timeout,
+ metadata,
+ _registered_method=True)
+
+ @staticmethod
+ def StreamActivatedJobs(request,
+ target,
+ options=(),
+ channel_credentials=None,
+ call_credentials=None,
+ insecure=False,
+ compression=None,
+ wait_for_ready=None,
+ timeout=None,
+ metadata=None):
+ return grpc.experimental.unary_stream(
+ request,
+ target,
+ '/gateway_protocol.Gateway/StreamActivatedJobs',
+ pyzeebe_dot_proto_dot_gateway__pb2.StreamActivatedJobsRequest.SerializeToString,
+ pyzeebe_dot_proto_dot_gateway__pb2.ActivatedJob.FromString,
+ options,
+ channel_credentials,
+ insecure,
+ call_credentials,
+ compression,
+ wait_for_ready,
+ timeout,
+ metadata,
+ _registered_method=True)
+
+ @staticmethod
+ def CancelProcessInstance(request,
+ target,
+ options=(),
+ channel_credentials=None,
+ call_credentials=None,
+ insecure=False,
+ compression=None,
+ wait_for_ready=None,
+ timeout=None,
+ metadata=None):
+ return grpc.experimental.unary_unary(
+ request,
+ target,
+ '/gateway_protocol.Gateway/CancelProcessInstance',
+ pyzeebe_dot_proto_dot_gateway__pb2.CancelProcessInstanceRequest.SerializeToString,
+ pyzeebe_dot_proto_dot_gateway__pb2.CancelProcessInstanceResponse.FromString,
+ options,
+ channel_credentials,
+ insecure,
+ call_credentials,
+ compression,
+ wait_for_ready,
+ timeout,
+ metadata,
+ _registered_method=True)
+
+ @staticmethod
+ def CompleteJob(request,
+ target,
+ options=(),
+ channel_credentials=None,
+ call_credentials=None,
+ insecure=False,
+ compression=None,
+ wait_for_ready=None,
+ timeout=None,
+ metadata=None):
+ return grpc.experimental.unary_unary(
+ request,
+ target,
+ '/gateway_protocol.Gateway/CompleteJob',
+ pyzeebe_dot_proto_dot_gateway__pb2.CompleteJobRequest.SerializeToString,
+ pyzeebe_dot_proto_dot_gateway__pb2.CompleteJobResponse.FromString,
+ options,
+ channel_credentials,
+ insecure,
+ call_credentials,
+ compression,
+ wait_for_ready,
+ timeout,
+ metadata,
+ _registered_method=True)
+
+ @staticmethod
+ def CreateProcessInstance(request,
+ target,
+ options=(),
+ channel_credentials=None,
+ call_credentials=None,
+ insecure=False,
+ compression=None,
+ wait_for_ready=None,
+ timeout=None,
+ metadata=None):
+ return grpc.experimental.unary_unary(
+ request,
+ target,
+ '/gateway_protocol.Gateway/CreateProcessInstance',
+ pyzeebe_dot_proto_dot_gateway__pb2.CreateProcessInstanceRequest.SerializeToString,
+ pyzeebe_dot_proto_dot_gateway__pb2.CreateProcessInstanceResponse.FromString,
+ options,
+ channel_credentials,
+ insecure,
+ call_credentials,
+ compression,
+ wait_for_ready,
+ timeout,
+ metadata,
+ _registered_method=True)
+
+ @staticmethod
+ def CreateProcessInstanceWithResult(request,
+ target,
+ options=(),
+ channel_credentials=None,
+ call_credentials=None,
+ insecure=False,
+ compression=None,
+ wait_for_ready=None,
+ timeout=None,
+ metadata=None):
+ return grpc.experimental.unary_unary(
+ request,
+ target,
+ '/gateway_protocol.Gateway/CreateProcessInstanceWithResult',
+ pyzeebe_dot_proto_dot_gateway__pb2.CreateProcessInstanceWithResultRequest.SerializeToString,
+ pyzeebe_dot_proto_dot_gateway__pb2.CreateProcessInstanceWithResultResponse.FromString,
+ options,
+ channel_credentials,
+ insecure,
+ call_credentials,
+ compression,
+ wait_for_ready,
+ timeout,
+ metadata,
+ _registered_method=True)
+
+ @staticmethod
+ def EvaluateDecision(request,
+ target,
+ options=(),
+ channel_credentials=None,
+ call_credentials=None,
+ insecure=False,
+ compression=None,
+ wait_for_ready=None,
+ timeout=None,
+ metadata=None):
+ return grpc.experimental.unary_unary(
+ request,
+ target,
+ '/gateway_protocol.Gateway/EvaluateDecision',
+ pyzeebe_dot_proto_dot_gateway__pb2.EvaluateDecisionRequest.SerializeToString,
+ pyzeebe_dot_proto_dot_gateway__pb2.EvaluateDecisionResponse.FromString,
+ options,
+ channel_credentials,
+ insecure,
+ call_credentials,
+ compression,
+ wait_for_ready,
+ timeout,
+ metadata,
+ _registered_method=True)
+
+ @staticmethod
+ def DeployProcess(request,
+ target,
+ options=(),
+ channel_credentials=None,
+ call_credentials=None,
+ insecure=False,
+ compression=None,
+ wait_for_ready=None,
+ timeout=None,
+ metadata=None):
+ return grpc.experimental.unary_unary(
+ request,
+ target,
+ '/gateway_protocol.Gateway/DeployProcess',
+ pyzeebe_dot_proto_dot_gateway__pb2.DeployProcessRequest.SerializeToString,
+ pyzeebe_dot_proto_dot_gateway__pb2.DeployProcessResponse.FromString,
+ options,
+ channel_credentials,
+ insecure,
+ call_credentials,
+ compression,
+ wait_for_ready,
+ timeout,
+ metadata,
+ _registered_method=True)
+
+ @staticmethod
+ def DeployResource(request,
+ target,
+ options=(),
+ channel_credentials=None,
+ call_credentials=None,
+ insecure=False,
+ compression=None,
+ wait_for_ready=None,
+ timeout=None,
+ metadata=None):
+ return grpc.experimental.unary_unary(
+ request,
+ target,
+ '/gateway_protocol.Gateway/DeployResource',
+ pyzeebe_dot_proto_dot_gateway__pb2.DeployResourceRequest.SerializeToString,
+ pyzeebe_dot_proto_dot_gateway__pb2.DeployResourceResponse.FromString,
+ options,
+ channel_credentials,
+ insecure,
+ call_credentials,
+ compression,
+ wait_for_ready,
+ timeout,
+ metadata,
+ _registered_method=True)
+
+ @staticmethod
+ def FailJob(request,
+ target,
+ options=(),
+ channel_credentials=None,
+ call_credentials=None,
+ insecure=False,
+ compression=None,
+ wait_for_ready=None,
+ timeout=None,
+ metadata=None):
+ return grpc.experimental.unary_unary(
+ request,
+ target,
+ '/gateway_protocol.Gateway/FailJob',
+ pyzeebe_dot_proto_dot_gateway__pb2.FailJobRequest.SerializeToString,
+ pyzeebe_dot_proto_dot_gateway__pb2.FailJobResponse.FromString,
+ options,
+ channel_credentials,
+ insecure,
+ call_credentials,
+ compression,
+ wait_for_ready,
+ timeout,
+ metadata,
+ _registered_method=True)
+
+ @staticmethod
+ def ThrowError(request,
+ target,
+ options=(),
+ channel_credentials=None,
+ call_credentials=None,
+ insecure=False,
+ compression=None,
+ wait_for_ready=None,
+ timeout=None,
+ metadata=None):
+ return grpc.experimental.unary_unary(
+ request,
+ target,
+ '/gateway_protocol.Gateway/ThrowError',
+ pyzeebe_dot_proto_dot_gateway__pb2.ThrowErrorRequest.SerializeToString,
+ pyzeebe_dot_proto_dot_gateway__pb2.ThrowErrorResponse.FromString,
+ options,
+ channel_credentials,
+ insecure,
+ call_credentials,
+ compression,
+ wait_for_ready,
+ timeout,
+ metadata,
+ _registered_method=True)
+
+ @staticmethod
+ def PublishMessage(request,
+ target,
+ options=(),
+ channel_credentials=None,
+ call_credentials=None,
+ insecure=False,
+ compression=None,
+ wait_for_ready=None,
+ timeout=None,
+ metadata=None):
+ return grpc.experimental.unary_unary(
+ request,
+ target,
+ '/gateway_protocol.Gateway/PublishMessage',
+ pyzeebe_dot_proto_dot_gateway__pb2.PublishMessageRequest.SerializeToString,
+ pyzeebe_dot_proto_dot_gateway__pb2.PublishMessageResponse.FromString,
+ options,
+ channel_credentials,
+ insecure,
+ call_credentials,
+ compression,
+ wait_for_ready,
+ timeout,
+ metadata,
+ _registered_method=True)
+
+ @staticmethod
+ def ResolveIncident(request,
+ target,
+ options=(),
+ channel_credentials=None,
+ call_credentials=None,
+ insecure=False,
+ compression=None,
+ wait_for_ready=None,
+ timeout=None,
+ metadata=None):
+ return grpc.experimental.unary_unary(
+ request,
+ target,
+ '/gateway_protocol.Gateway/ResolveIncident',
+ pyzeebe_dot_proto_dot_gateway__pb2.ResolveIncidentRequest.SerializeToString,
+ pyzeebe_dot_proto_dot_gateway__pb2.ResolveIncidentResponse.FromString,
+ options,
+ channel_credentials,
+ insecure,
+ call_credentials,
+ compression,
+ wait_for_ready,
+ timeout,
+ metadata,
+ _registered_method=True)
+
+ @staticmethod
+ def SetVariables(request,
+ target,
+ options=(),
+ channel_credentials=None,
+ call_credentials=None,
+ insecure=False,
+ compression=None,
+ wait_for_ready=None,
+ timeout=None,
+ metadata=None):
+ return grpc.experimental.unary_unary(
+ request,
+ target,
+ '/gateway_protocol.Gateway/SetVariables',
+ pyzeebe_dot_proto_dot_gateway__pb2.SetVariablesRequest.SerializeToString,
+ pyzeebe_dot_proto_dot_gateway__pb2.SetVariablesResponse.FromString,
+ options,
+ channel_credentials,
+ insecure,
+ call_credentials,
+ compression,
+ wait_for_ready,
+ timeout,
+ metadata,
+ _registered_method=True)
+
+ @staticmethod
+ def Topology(request,
+ target,
+ options=(),
+ channel_credentials=None,
+ call_credentials=None,
+ insecure=False,
+ compression=None,
+ wait_for_ready=None,
+ timeout=None,
+ metadata=None):
+ return grpc.experimental.unary_unary(
+ request,
+ target,
+ '/gateway_protocol.Gateway/Topology',
+ pyzeebe_dot_proto_dot_gateway__pb2.TopologyRequest.SerializeToString,
+ pyzeebe_dot_proto_dot_gateway__pb2.TopologyResponse.FromString,
+ options,
+ channel_credentials,
+ insecure,
+ call_credentials,
+ compression,
+ wait_for_ready,
+ timeout,
+ metadata,
+ _registered_method=True)
+
+ @staticmethod
+ def UpdateJobRetries(request,
+ target,
+ options=(),
+ channel_credentials=None,
+ call_credentials=None,
+ insecure=False,
+ compression=None,
+ wait_for_ready=None,
+ timeout=None,
+ metadata=None):
+ return grpc.experimental.unary_unary(
+ request,
+ target,
+ '/gateway_protocol.Gateway/UpdateJobRetries',
+ pyzeebe_dot_proto_dot_gateway__pb2.UpdateJobRetriesRequest.SerializeToString,
+ pyzeebe_dot_proto_dot_gateway__pb2.UpdateJobRetriesResponse.FromString,
+ options,
+ channel_credentials,
+ insecure,
+ call_credentials,
+ compression,
+ wait_for_ready,
+ timeout,
+ metadata,
+ _registered_method=True)
+
+ @staticmethod
+ def ModifyProcessInstance(request,
+ target,
+ options=(),
+ channel_credentials=None,
+ call_credentials=None,
+ insecure=False,
+ compression=None,
+ wait_for_ready=None,
+ timeout=None,
+ metadata=None):
+ return grpc.experimental.unary_unary(
+ request,
+ target,
+ '/gateway_protocol.Gateway/ModifyProcessInstance',
+ pyzeebe_dot_proto_dot_gateway__pb2.ModifyProcessInstanceRequest.SerializeToString,
+ pyzeebe_dot_proto_dot_gateway__pb2.ModifyProcessInstanceResponse.FromString,
+ options,
+ channel_credentials,
+ insecure,
+ call_credentials,
+ compression,
+ wait_for_ready,
+ timeout,
+ metadata,
+ _registered_method=True)
+
+ @staticmethod
+ def MigrateProcessInstance(request,
+ target,
+ options=(),
+ channel_credentials=None,
+ call_credentials=None,
+ insecure=False,
+ compression=None,
+ wait_for_ready=None,
+ timeout=None,
+ metadata=None):
+ return grpc.experimental.unary_unary(
+ request,
+ target,
+ '/gateway_protocol.Gateway/MigrateProcessInstance',
+ pyzeebe_dot_proto_dot_gateway__pb2.MigrateProcessInstanceRequest.SerializeToString,
+ pyzeebe_dot_proto_dot_gateway__pb2.MigrateProcessInstanceResponse.FromString,
+ options,
+ channel_credentials,
+ insecure,
+ call_credentials,
+ compression,
+ wait_for_ready,
+ timeout,
+ metadata,
+ _registered_method=True)
+
+ @staticmethod
+ def UpdateJobTimeout(request,
+ target,
+ options=(),
+ channel_credentials=None,
+ call_credentials=None,
+ insecure=False,
+ compression=None,
+ wait_for_ready=None,
+ timeout=None,
+ metadata=None):
+ return grpc.experimental.unary_unary(
+ request,
+ target,
+ '/gateway_protocol.Gateway/UpdateJobTimeout',
+ pyzeebe_dot_proto_dot_gateway__pb2.UpdateJobTimeoutRequest.SerializeToString,
+ pyzeebe_dot_proto_dot_gateway__pb2.UpdateJobTimeoutResponse.FromString,
+ options,
+ channel_credentials,
+ insecure,
+ call_credentials,
+ compression,
+ wait_for_ready,
+ timeout,
+ metadata,
+ _registered_method=True)
+
+ @staticmethod
+ def DeleteResource(request,
+ target,
+ options=(),
+ channel_credentials=None,
+ call_credentials=None,
+ insecure=False,
+ compression=None,
+ wait_for_ready=None,
+ timeout=None,
+ metadata=None):
+ return grpc.experimental.unary_unary(
+ request,
+ target,
+ '/gateway_protocol.Gateway/DeleteResource',
+ pyzeebe_dot_proto_dot_gateway__pb2.DeleteResourceRequest.SerializeToString,
+ pyzeebe_dot_proto_dot_gateway__pb2.DeleteResourceResponse.FromString,
+ options,
+ channel_credentials,
+ insecure,
+ call_credentials,
+ compression,
+ wait_for_ready,
+ timeout,
+ metadata,
+ _registered_method=True)
+
+ @staticmethod
+ def BroadcastSignal(request,
+ target,
+ options=(),
+ channel_credentials=None,
+ call_credentials=None,
+ insecure=False,
+ compression=None,
+ wait_for_ready=None,
+ timeout=None,
+ metadata=None):
+ return grpc.experimental.unary_unary(
+ request,
+ target,
+ '/gateway_protocol.Gateway/BroadcastSignal',
+ pyzeebe_dot_proto_dot_gateway__pb2.BroadcastSignalRequest.SerializeToString,
+ pyzeebe_dot_proto_dot_gateway__pb2.BroadcastSignalResponse.FromString,
+ options,
+ channel_credentials,
+ insecure,
+ call_credentials,
+ compression,
+ wait_for_ready,
+ timeout,
+ metadata,
+ _registered_method=True)
diff --git a/pyzeebe/proto/gateway_pb2_grpc.pyi b/pyzeebe/proto/gateway_pb2_grpc.pyi
new file mode 100644
index 00000000..df1f1074
--- /dev/null
+++ b/pyzeebe/proto/gateway_pb2_grpc.pyi
@@ -0,0 +1,1131 @@
+"""
+@generated by mypy-protobuf. Do not edit manually!
+isort:skip_file
+"""
+
+import abc
+import collections.abc
+import grpc
+import grpc.aio
+import pyzeebe.proto.gateway_pb2
+import typing
+
+_T = typing.TypeVar("_T")
+
+class _MaybeAsyncIterator(collections.abc.AsyncIterator[_T], collections.abc.Iterator[_T], metaclass=abc.ABCMeta): ...
+
+class _ServicerContext(grpc.ServicerContext, grpc.aio.ServicerContext): # type: ignore[misc, type-arg]
+ ...
+
+class GatewayStub:
+ def __init__(self, channel: typing.Union[grpc.Channel, grpc.aio.Channel]) -> None: ...
+ ActivateJobs: grpc.UnaryStreamMultiCallable[
+ pyzeebe.proto.gateway_pb2.ActivateJobsRequest,
+ pyzeebe.proto.gateway_pb2.ActivateJobsResponse,
+ ]
+ """
+ Iterates through all known partitions round-robin and activates up to the requested
+ maximum and streams them back to the client as they are activated.
+
+ Errors:
+ INVALID_ARGUMENT:
+ - type is blank (empty string, null)
+ - worker is blank (empty string, null)
+ - timeout less than 1
+ - maxJobsToActivate is less than 1
+ """
+
+ StreamActivatedJobs: grpc.UnaryStreamMultiCallable[
+ pyzeebe.proto.gateway_pb2.StreamActivatedJobsRequest,
+ pyzeebe.proto.gateway_pb2.ActivatedJob,
+ ]
+ """
+ Registers client to a job stream that will stream jobs back to the client as
+ they become activatable.
+
+ Errors:
+ INVALID_ARGUMENT:
+ - type is blank (empty string, null)
+ - timeout less than 1
+ - If multi-tenancy is enabled, and tenantIds is empty (empty list)
+ - If multi-tenancy is enabled, and an invalid tenant ID is provided. A tenant ID is considered invalid if:
+ - The tenant ID is blank (empty string, null)
+ - The tenant ID is longer than 31 characters
+ - The tenant ID contains anything other than alphanumeric characters, dot (.), dash (-), or underscore (_)
+ - If multi-tenancy is disabled, and tenantIds is not empty (empty list), or has an ID other than
+ """
+
+ CancelProcessInstance: grpc.UnaryUnaryMultiCallable[
+ pyzeebe.proto.gateway_pb2.CancelProcessInstanceRequest,
+ pyzeebe.proto.gateway_pb2.CancelProcessInstanceResponse,
+ ]
+ """
+ Cancels a running process instance
+
+ Errors:
+ NOT_FOUND:
+ - no process instance exists with the given key
+ """
+
+ CompleteJob: grpc.UnaryUnaryMultiCallable[
+ pyzeebe.proto.gateway_pb2.CompleteJobRequest,
+ pyzeebe.proto.gateway_pb2.CompleteJobResponse,
+ ]
+ """
+ Completes a job with the given variables, which allows completing the associated service task.
+
+ Errors:
+ NOT_FOUND:
+ - no job exists with the given job key. Note that since jobs are removed once completed,
+ it could be that this job did exist at some point.
+
+ FAILED_PRECONDITION:
+ - the job was marked as failed. In that case, the related incident must be resolved before
+ the job can be activated again and completed.
+ """
+
+ CreateProcessInstance: grpc.UnaryUnaryMultiCallable[
+ pyzeebe.proto.gateway_pb2.CreateProcessInstanceRequest,
+ pyzeebe.proto.gateway_pb2.CreateProcessInstanceResponse,
+ ]
+ """
+ Creates and starts an instance of the specified process. The process definition to use to
+ create the instance can be specified either using its unique key (as returned by
+ DeployProcess), or using the BPMN process ID and a version. Pass -1 as the version to use the
+ latest deployed version. Note that only processes with none start events can be started through
+ this command.
+
+ Errors:
+ NOT_FOUND:
+ - no process with the given key exists (if processDefinitionKey was given)
+ - no process with the given process ID exists (if bpmnProcessId was given but version was -1)
+ - no process with the given process ID and version exists (if both bpmnProcessId and version were given)
+
+ FAILED_PRECONDITION:
+ - the process definition does not contain a none start event; only processes with none
+ start event can be started manually.
+
+ INVALID_ARGUMENT:
+ - the given variables argument is not a valid JSON document; it is expected to be a valid
+ JSON document where the root node is an object.
+ """
+
+ CreateProcessInstanceWithResult: grpc.UnaryUnaryMultiCallable[
+ pyzeebe.proto.gateway_pb2.CreateProcessInstanceWithResultRequest,
+ pyzeebe.proto.gateway_pb2.CreateProcessInstanceWithResultResponse,
+ ]
+ """
+ Behaves similarly to `rpc CreateProcessInstance`, except that a successful response is received when the process completes successfully.
+ """
+
+ EvaluateDecision: grpc.UnaryUnaryMultiCallable[
+ pyzeebe.proto.gateway_pb2.EvaluateDecisionRequest,
+ pyzeebe.proto.gateway_pb2.EvaluateDecisionResponse,
+ ]
+ """
+ Evaluates a decision. The decision to evaluate can be specified either by
+ using its unique key (as returned by DeployResource), or using the decision
+ ID. When using the decision ID, the latest deployed version of the decision
+ is used.
+
+ Errors:
+ INVALID_ARGUMENT:
+ - no decision with the given key exists (if decisionKey was given)
+ - no decision with the given decision ID exists (if decisionId was given)
+ - both decision ID and decision KEY were provided, or are missing
+ """
+
+ DeployProcess: grpc.UnaryUnaryMultiCallable[
+ pyzeebe.proto.gateway_pb2.DeployProcessRequest,
+ pyzeebe.proto.gateway_pb2.DeployProcessResponse,
+ ]
+ """
+ Deploys one or more processes to Zeebe. Note that this is an atomic call,
+ i.e. either all processes are deployed, or none of them are.
+
+ Errors:
+ INVALID_ARGUMENT:
+ - no resources given.
+ - if at least one resource is invalid. A resource is considered invalid if:
+ - the resource data is not deserializable (e.g. detected as BPMN, but it's broken XML)
+ - the process is invalid (e.g. an event-based gateway has an outgoing sequence flow to a task)
+ """
+
+ DeployResource: grpc.UnaryUnaryMultiCallable[
+ pyzeebe.proto.gateway_pb2.DeployResourceRequest,
+ pyzeebe.proto.gateway_pb2.DeployResourceResponse,
+ ]
+ """
+ Deploys one or more resources (e.g. processes or decision models) to Zeebe.
+ Note that this is an atomic call, i.e. either all resources are deployed, or none of them are.
+
+ Errors:
+ PERMISSION_DENIED:
+ - if a deployment to an unauthorized tenant is performed
+ INVALID_ARGUMENT:
+ - no resources given.
+ - if at least one resource is invalid. A resource is considered invalid if:
+ - the content is not deserializable (e.g. detected as BPMN, but it's broken XML)
+ - the content is invalid (e.g. an event-based gateway has an outgoing sequence flow to a task)
+ - if multi-tenancy is enabled, and:
+ - a tenant id is not provided
+ - a tenant id with an invalid format is provided
+ - if multi-tenancy is disabled and a tenant id is provided
+ """
+
+ FailJob: grpc.UnaryUnaryMultiCallable[
+ pyzeebe.proto.gateway_pb2.FailJobRequest,
+ pyzeebe.proto.gateway_pb2.FailJobResponse,
+ ]
+ """
+ Marks the job as failed; if the retries argument is positive, then the job will be immediately
+ activatable again, and a worker could try again to process it. If it is zero or negative however,
+ an incident will be raised, tagged with the given errorMessage, and the job will not be
+ activatable until the incident is resolved.
+
+ Errors:
+ NOT_FOUND:
+ - no job was found with the given key
+
+ FAILED_PRECONDITION:
+ - the job was not activated
+ - the job is already in a failed state, i.e. ran out of retries
+ """
+
+ ThrowError: grpc.UnaryUnaryMultiCallable[
+ pyzeebe.proto.gateway_pb2.ThrowErrorRequest,
+ pyzeebe.proto.gateway_pb2.ThrowErrorResponse,
+ ]
+ """
+ Reports a business error (i.e. non-technical) that occurs while processing a job. The error is handled in the process by an error catch event. If there is no error catch event with the specified errorCode then an incident will be raised instead.
+
+ Errors:
+ NOT_FOUND:
+ - no job was found with the given key
+
+ FAILED_PRECONDITION:
+ - the job is not in an activated state
+ """
+
+ PublishMessage: grpc.UnaryUnaryMultiCallable[
+ pyzeebe.proto.gateway_pb2.PublishMessageRequest,
+ pyzeebe.proto.gateway_pb2.PublishMessageResponse,
+ ]
+ """
+ Publishes a single message. Messages are published to specific partitions computed from their
+ correlation keys.
+
+ Errors:
+ ALREADY_EXISTS:
+ - a message with the same ID was previously published (and is still alive)
+ """
+
+ ResolveIncident: grpc.UnaryUnaryMultiCallable[
+ pyzeebe.proto.gateway_pb2.ResolveIncidentRequest,
+ pyzeebe.proto.gateway_pb2.ResolveIncidentResponse,
+ ]
+ """
+ Resolves a given incident. This simply marks the incident as resolved; most likely a call to
+ UpdateJobRetries or SetVariables will be necessary to actually resolve the
+ problem, following by this call.
+
+ Errors:
+ NOT_FOUND:
+ - no incident with the given key exists
+ """
+
+ SetVariables: grpc.UnaryUnaryMultiCallable[
+ pyzeebe.proto.gateway_pb2.SetVariablesRequest,
+ pyzeebe.proto.gateway_pb2.SetVariablesResponse,
+ ]
+ """
+ Updates all the variables of a particular scope (e.g. process instance, flow element instance)
+ from the given JSON document.
+
+ Errors:
+ NOT_FOUND:
+ - no element with the given elementInstanceKey exists
+ INVALID_ARGUMENT:
+ - the given variables document is not a valid JSON document; valid documents are expected to
+ be JSON documents where the root node is an object.
+ """
+
+ Topology: grpc.UnaryUnaryMultiCallable[
+ pyzeebe.proto.gateway_pb2.TopologyRequest,
+ pyzeebe.proto.gateway_pb2.TopologyResponse,
+ ]
+ """
+ Obtains the current topology of the cluster the gateway is part of.
+ """
+
+ UpdateJobRetries: grpc.UnaryUnaryMultiCallable[
+ pyzeebe.proto.gateway_pb2.UpdateJobRetriesRequest,
+ pyzeebe.proto.gateway_pb2.UpdateJobRetriesResponse,
+ ]
+ """
+ Updates the number of retries a job has left. This is mostly useful for jobs that have run out of
+ retries, should the underlying problem be solved.
+
+ Errors:
+ NOT_FOUND:
+ - no job exists with the given key
+
+ INVALID_ARGUMENT:
+ - retries is not greater than 0
+ """
+
+ ModifyProcessInstance: grpc.UnaryUnaryMultiCallable[
+ pyzeebe.proto.gateway_pb2.ModifyProcessInstanceRequest,
+ pyzeebe.proto.gateway_pb2.ModifyProcessInstanceResponse,
+ ]
+ """
+ Modifies the process instance. This is done by activating and/or terminating specific elements of the instance.
+
+ Errors:
+ NOT_FOUND:
+ - no process instance exists with the given key
+
+ FAILED_PRECONDITION:
+ - trying to activate element inside of a multi-instance
+
+ INVALID_ARGUMENT:
+ - activating or terminating unknown element
+ - ancestor of element for activation doesn't exist
+ - scope of variable is unknown
+ """
+
+ MigrateProcessInstance: grpc.UnaryUnaryMultiCallable[
+ pyzeebe.proto.gateway_pb2.MigrateProcessInstanceRequest,
+ pyzeebe.proto.gateway_pb2.MigrateProcessInstanceResponse,
+ ]
+ """
+ Migrates the process instance to the specified process definition.
+ In simple terms, this is handled by updating the active element's process.
+
+ Errors:
+ NOT_FOUND:
+ - no process instance exists with the given key, or it is not active
+ - no process definition exists with the given target definition key
+ - no process instance exists with the given key for the tenants the user is authorized to work with.
+
+ FAILED_PRECONDITION:
+ - not all active elements in the given process instance are mapped to the elements in the target process definition
+ - a mapping instruction changes the type of an element or event
+ - a mapping instruction changes the implementation of a task
+ - a mapping instruction detaches a boundary event from an active element
+ - a mapping instruction refers to an unsupported element (i.e. some elements will be supported later on)
+ - a mapping instruction refers to element in unsupported scenarios.
+ (i.e. migration is not supported when process instance or target process elements contains event subscriptions)
+ - multiple mapping instructions target the same boundary event
+
+ INVALID_ARGUMENT:
+ - A `sourceElementId` does not refer to an element in the process instance's process definition
+ - A `targetElementId` does not refer to an element in the target process definition
+ - A `sourceElementId` is mapped by multiple mapping instructions.
+ For example, the engine cannot determine how to migrate a process instance when the instructions are: [A->B, A->C].
+ """
+
+ UpdateJobTimeout: grpc.UnaryUnaryMultiCallable[
+ pyzeebe.proto.gateway_pb2.UpdateJobTimeoutRequest,
+ pyzeebe.proto.gateway_pb2.UpdateJobTimeoutResponse,
+ ]
+ """
+ Updates the deadline of a job using the timeout (in ms) provided. This can be used
+ for extending or shortening the job deadline.
+
+ Errors:
+ NOT_FOUND:
+ - no job exists with the given key
+
+ INVALID_STATE:
+ - no deadline exists for the given job key
+ """
+
+ DeleteResource: grpc.UnaryUnaryMultiCallable[
+ pyzeebe.proto.gateway_pb2.DeleteResourceRequest,
+ pyzeebe.proto.gateway_pb2.DeleteResourceResponse,
+ ]
+ """
+ Deletes a resource from the state. Once a resource has been deleted it cannot
+ be recovered. If the resource needs to be available again, a new deployment
+ of the resource is required.
+
+ Deleting a process will cancel any running instances of this process
+ definition. New instances of a deleted process are created using
+ the lastest version that hasn't been deleted. Creating a new
+ process instance is impossible when all versions have been
+ deleted.
+
+ Deleting a decision requirement definitions could cause incidents in process
+ instances referencing these decisions in a business rule task. A decision
+ will be evaluated with the latest version that hasn't been deleted. If all
+ versions of a decision have been deleted the evaluation is rejected.
+
+ Errors:
+ NOT_FOUND:
+ - No resource exists with the given key
+ """
+
+ BroadcastSignal: grpc.UnaryUnaryMultiCallable[
+ pyzeebe.proto.gateway_pb2.BroadcastSignalRequest,
+ pyzeebe.proto.gateway_pb2.BroadcastSignalResponse,
+ ]
+ """
+ Broadcasts a signal.
+ """
+
+class GatewayAsyncStub:
+ ActivateJobs: grpc.aio.UnaryStreamMultiCallable[
+ pyzeebe.proto.gateway_pb2.ActivateJobsRequest,
+ pyzeebe.proto.gateway_pb2.ActivateJobsResponse,
+ ]
+ """
+ Iterates through all known partitions round-robin and activates up to the requested
+ maximum and streams them back to the client as they are activated.
+
+ Errors:
+ INVALID_ARGUMENT:
+ - type is blank (empty string, null)
+ - worker is blank (empty string, null)
+ - timeout less than 1
+ - maxJobsToActivate is less than 1
+ """
+
+ StreamActivatedJobs: grpc.aio.UnaryStreamMultiCallable[
+ pyzeebe.proto.gateway_pb2.StreamActivatedJobsRequest,
+ pyzeebe.proto.gateway_pb2.ActivatedJob,
+ ]
+ """
+ Registers client to a job stream that will stream jobs back to the client as
+ they become activatable.
+
+ Errors:
+ INVALID_ARGUMENT:
+ - type is blank (empty string, null)
+ - timeout less than 1
+ - If multi-tenancy is enabled, and tenantIds is empty (empty list)
+ - If multi-tenancy is enabled, and an invalid tenant ID is provided. A tenant ID is considered invalid if:
+ - The tenant ID is blank (empty string, null)
+ - The tenant ID is longer than 31 characters
+ - The tenant ID contains anything other than alphanumeric characters, dot (.), dash (-), or underscore (_)
+ - If multi-tenancy is disabled, and tenantIds is not empty (empty list), or has an ID other than
+ """
+
+ CancelProcessInstance: grpc.aio.UnaryUnaryMultiCallable[
+ pyzeebe.proto.gateway_pb2.CancelProcessInstanceRequest,
+ pyzeebe.proto.gateway_pb2.CancelProcessInstanceResponse,
+ ]
+ """
+ Cancels a running process instance
+
+ Errors:
+ NOT_FOUND:
+ - no process instance exists with the given key
+ """
+
+ CompleteJob: grpc.aio.UnaryUnaryMultiCallable[
+ pyzeebe.proto.gateway_pb2.CompleteJobRequest,
+ pyzeebe.proto.gateway_pb2.CompleteJobResponse,
+ ]
+ """
+ Completes a job with the given variables, which allows completing the associated service task.
+
+ Errors:
+ NOT_FOUND:
+ - no job exists with the given job key. Note that since jobs are removed once completed,
+ it could be that this job did exist at some point.
+
+ FAILED_PRECONDITION:
+ - the job was marked as failed. In that case, the related incident must be resolved before
+ the job can be activated again and completed.
+ """
+
+ CreateProcessInstance: grpc.aio.UnaryUnaryMultiCallable[
+ pyzeebe.proto.gateway_pb2.CreateProcessInstanceRequest,
+ pyzeebe.proto.gateway_pb2.CreateProcessInstanceResponse,
+ ]
+ """
+ Creates and starts an instance of the specified process. The process definition to use to
+ create the instance can be specified either using its unique key (as returned by
+ DeployProcess), or using the BPMN process ID and a version. Pass -1 as the version to use the
+ latest deployed version. Note that only processes with none start events can be started through
+ this command.
+
+ Errors:
+ NOT_FOUND:
+ - no process with the given key exists (if processDefinitionKey was given)
+ - no process with the given process ID exists (if bpmnProcessId was given but version was -1)
+ - no process with the given process ID and version exists (if both bpmnProcessId and version were given)
+
+ FAILED_PRECONDITION:
+ - the process definition does not contain a none start event; only processes with none
+ start event can be started manually.
+
+ INVALID_ARGUMENT:
+ - the given variables argument is not a valid JSON document; it is expected to be a valid
+ JSON document where the root node is an object.
+ """
+
+ CreateProcessInstanceWithResult: grpc.aio.UnaryUnaryMultiCallable[
+ pyzeebe.proto.gateway_pb2.CreateProcessInstanceWithResultRequest,
+ pyzeebe.proto.gateway_pb2.CreateProcessInstanceWithResultResponse,
+ ]
+ """
+ Behaves similarly to `rpc CreateProcessInstance`, except that a successful response is received when the process completes successfully.
+ """
+
+ EvaluateDecision: grpc.aio.UnaryUnaryMultiCallable[
+ pyzeebe.proto.gateway_pb2.EvaluateDecisionRequest,
+ pyzeebe.proto.gateway_pb2.EvaluateDecisionResponse,
+ ]
+ """
+ Evaluates a decision. The decision to evaluate can be specified either by
+ using its unique key (as returned by DeployResource), or using the decision
+ ID. When using the decision ID, the latest deployed version of the decision
+ is used.
+
+ Errors:
+ INVALID_ARGUMENT:
+ - no decision with the given key exists (if decisionKey was given)
+ - no decision with the given decision ID exists (if decisionId was given)
+ - both decision ID and decision KEY were provided, or are missing
+ """
+
+ DeployProcess: grpc.aio.UnaryUnaryMultiCallable[
+ pyzeebe.proto.gateway_pb2.DeployProcessRequest,
+ pyzeebe.proto.gateway_pb2.DeployProcessResponse,
+ ]
+ """
+ Deploys one or more processes to Zeebe. Note that this is an atomic call,
+ i.e. either all processes are deployed, or none of them are.
+
+ Errors:
+ INVALID_ARGUMENT:
+ - no resources given.
+ - if at least one resource is invalid. A resource is considered invalid if:
+ - the resource data is not deserializable (e.g. detected as BPMN, but it's broken XML)
+ - the process is invalid (e.g. an event-based gateway has an outgoing sequence flow to a task)
+ """
+
+ DeployResource: grpc.aio.UnaryUnaryMultiCallable[
+ pyzeebe.proto.gateway_pb2.DeployResourceRequest,
+ pyzeebe.proto.gateway_pb2.DeployResourceResponse,
+ ]
+ """
+ Deploys one or more resources (e.g. processes or decision models) to Zeebe.
+ Note that this is an atomic call, i.e. either all resources are deployed, or none of them are.
+
+ Errors:
+ PERMISSION_DENIED:
+ - if a deployment to an unauthorized tenant is performed
+ INVALID_ARGUMENT:
+ - no resources given.
+ - if at least one resource is invalid. A resource is considered invalid if:
+ - the content is not deserializable (e.g. detected as BPMN, but it's broken XML)
+ - the content is invalid (e.g. an event-based gateway has an outgoing sequence flow to a task)
+ - if multi-tenancy is enabled, and:
+ - a tenant id is not provided
+ - a tenant id with an invalid format is provided
+ - if multi-tenancy is disabled and a tenant id is provided
+ """
+
+ FailJob: grpc.aio.UnaryUnaryMultiCallable[
+ pyzeebe.proto.gateway_pb2.FailJobRequest,
+ pyzeebe.proto.gateway_pb2.FailJobResponse,
+ ]
+ """
+ Marks the job as failed; if the retries argument is positive, then the job will be immediately
+ activatable again, and a worker could try again to process it. If it is zero or negative however,
+ an incident will be raised, tagged with the given errorMessage, and the job will not be
+ activatable until the incident is resolved.
+
+ Errors:
+ NOT_FOUND:
+ - no job was found with the given key
+
+ FAILED_PRECONDITION:
+ - the job was not activated
+ - the job is already in a failed state, i.e. ran out of retries
+ """
+
+ ThrowError: grpc.aio.UnaryUnaryMultiCallable[
+ pyzeebe.proto.gateway_pb2.ThrowErrorRequest,
+ pyzeebe.proto.gateway_pb2.ThrowErrorResponse,
+ ]
+ """
+ Reports a business error (i.e. non-technical) that occurs while processing a job. The error is handled in the process by an error catch event. If there is no error catch event with the specified errorCode then an incident will be raised instead.
+
+ Errors:
+ NOT_FOUND:
+ - no job was found with the given key
+
+ FAILED_PRECONDITION:
+ - the job is not in an activated state
+ """
+
+ PublishMessage: grpc.aio.UnaryUnaryMultiCallable[
+ pyzeebe.proto.gateway_pb2.PublishMessageRequest,
+ pyzeebe.proto.gateway_pb2.PublishMessageResponse,
+ ]
+ """
+ Publishes a single message. Messages are published to specific partitions computed from their
+ correlation keys.
+
+ Errors:
+ ALREADY_EXISTS:
+ - a message with the same ID was previously published (and is still alive)
+ """
+
+ ResolveIncident: grpc.aio.UnaryUnaryMultiCallable[
+ pyzeebe.proto.gateway_pb2.ResolveIncidentRequest,
+ pyzeebe.proto.gateway_pb2.ResolveIncidentResponse,
+ ]
+ """
+ Resolves a given incident. This simply marks the incident as resolved; most likely a call to
+ UpdateJobRetries or SetVariables will be necessary to actually resolve the
+ problem, following by this call.
+
+ Errors:
+ NOT_FOUND:
+ - no incident with the given key exists
+ """
+
+ SetVariables: grpc.aio.UnaryUnaryMultiCallable[
+ pyzeebe.proto.gateway_pb2.SetVariablesRequest,
+ pyzeebe.proto.gateway_pb2.SetVariablesResponse,
+ ]
+ """
+ Updates all the variables of a particular scope (e.g. process instance, flow element instance)
+ from the given JSON document.
+
+ Errors:
+ NOT_FOUND:
+ - no element with the given elementInstanceKey exists
+ INVALID_ARGUMENT:
+ - the given variables document is not a valid JSON document; valid documents are expected to
+ be JSON documents where the root node is an object.
+ """
+
+ Topology: grpc.aio.UnaryUnaryMultiCallable[
+ pyzeebe.proto.gateway_pb2.TopologyRequest,
+ pyzeebe.proto.gateway_pb2.TopologyResponse,
+ ]
+ """
+ Obtains the current topology of the cluster the gateway is part of.
+ """
+
+ UpdateJobRetries: grpc.aio.UnaryUnaryMultiCallable[
+ pyzeebe.proto.gateway_pb2.UpdateJobRetriesRequest,
+ pyzeebe.proto.gateway_pb2.UpdateJobRetriesResponse,
+ ]
+ """
+ Updates the number of retries a job has left. This is mostly useful for jobs that have run out of
+ retries, should the underlying problem be solved.
+
+ Errors:
+ NOT_FOUND:
+ - no job exists with the given key
+
+ INVALID_ARGUMENT:
+ - retries is not greater than 0
+ """
+
+ ModifyProcessInstance: grpc.aio.UnaryUnaryMultiCallable[
+ pyzeebe.proto.gateway_pb2.ModifyProcessInstanceRequest,
+ pyzeebe.proto.gateway_pb2.ModifyProcessInstanceResponse,
+ ]
+ """
+ Modifies the process instance. This is done by activating and/or terminating specific elements of the instance.
+
+ Errors:
+ NOT_FOUND:
+ - no process instance exists with the given key
+
+ FAILED_PRECONDITION:
+ - trying to activate element inside of a multi-instance
+
+ INVALID_ARGUMENT:
+ - activating or terminating unknown element
+ - ancestor of element for activation doesn't exist
+ - scope of variable is unknown
+ """
+
+ MigrateProcessInstance: grpc.aio.UnaryUnaryMultiCallable[
+ pyzeebe.proto.gateway_pb2.MigrateProcessInstanceRequest,
+ pyzeebe.proto.gateway_pb2.MigrateProcessInstanceResponse,
+ ]
+ """
+ Migrates the process instance to the specified process definition.
+ In simple terms, this is handled by updating the active element's process.
+
+ Errors:
+ NOT_FOUND:
+ - no process instance exists with the given key, or it is not active
+ - no process definition exists with the given target definition key
+ - no process instance exists with the given key for the tenants the user is authorized to work with.
+
+ FAILED_PRECONDITION:
+ - not all active elements in the given process instance are mapped to the elements in the target process definition
+ - a mapping instruction changes the type of an element or event
+ - a mapping instruction changes the implementation of a task
+ - a mapping instruction detaches a boundary event from an active element
+ - a mapping instruction refers to an unsupported element (i.e. some elements will be supported later on)
+ - a mapping instruction refers to element in unsupported scenarios.
+ (i.e. migration is not supported when process instance or target process elements contains event subscriptions)
+ - multiple mapping instructions target the same boundary event
+
+ INVALID_ARGUMENT:
+ - A `sourceElementId` does not refer to an element in the process instance's process definition
+ - A `targetElementId` does not refer to an element in the target process definition
+ - A `sourceElementId` is mapped by multiple mapping instructions.
+ For example, the engine cannot determine how to migrate a process instance when the instructions are: [A->B, A->C].
+ """
+
+ UpdateJobTimeout: grpc.aio.UnaryUnaryMultiCallable[
+ pyzeebe.proto.gateway_pb2.UpdateJobTimeoutRequest,
+ pyzeebe.proto.gateway_pb2.UpdateJobTimeoutResponse,
+ ]
+ """
+ Updates the deadline of a job using the timeout (in ms) provided. This can be used
+ for extending or shortening the job deadline.
+
+ Errors:
+ NOT_FOUND:
+ - no job exists with the given key
+
+ INVALID_STATE:
+ - no deadline exists for the given job key
+ """
+
+ DeleteResource: grpc.aio.UnaryUnaryMultiCallable[
+ pyzeebe.proto.gateway_pb2.DeleteResourceRequest,
+ pyzeebe.proto.gateway_pb2.DeleteResourceResponse,
+ ]
+ """
+ Deletes a resource from the state. Once a resource has been deleted it cannot
+ be recovered. If the resource needs to be available again, a new deployment
+ of the resource is required.
+
+ Deleting a process will cancel any running instances of this process
+ definition. New instances of a deleted process are created using
+ the lastest version that hasn't been deleted. Creating a new
+ process instance is impossible when all versions have been
+ deleted.
+
+ Deleting a decision requirement definitions could cause incidents in process
+ instances referencing these decisions in a business rule task. A decision
+ will be evaluated with the latest version that hasn't been deleted. If all
+ versions of a decision have been deleted the evaluation is rejected.
+
+ Errors:
+ NOT_FOUND:
+ - No resource exists with the given key
+ """
+
+ BroadcastSignal: grpc.aio.UnaryUnaryMultiCallable[
+ pyzeebe.proto.gateway_pb2.BroadcastSignalRequest,
+ pyzeebe.proto.gateway_pb2.BroadcastSignalResponse,
+ ]
+ """
+ Broadcasts a signal.
+ """
+
+class GatewayServicer(metaclass=abc.ABCMeta):
+ @abc.abstractmethod
+ def ActivateJobs(
+ self,
+ request: pyzeebe.proto.gateway_pb2.ActivateJobsRequest,
+ context: _ServicerContext,
+ ) -> typing.Union[collections.abc.Iterator[pyzeebe.proto.gateway_pb2.ActivateJobsResponse], collections.abc.AsyncIterator[pyzeebe.proto.gateway_pb2.ActivateJobsResponse]]:
+ """
+ Iterates through all known partitions round-robin and activates up to the requested
+ maximum and streams them back to the client as they are activated.
+
+ Errors:
+ INVALID_ARGUMENT:
+ - type is blank (empty string, null)
+ - worker is blank (empty string, null)
+ - timeout less than 1
+ - maxJobsToActivate is less than 1
+ """
+
+ @abc.abstractmethod
+ def StreamActivatedJobs(
+ self,
+ request: pyzeebe.proto.gateway_pb2.StreamActivatedJobsRequest,
+ context: _ServicerContext,
+ ) -> typing.Union[collections.abc.Iterator[pyzeebe.proto.gateway_pb2.ActivatedJob], collections.abc.AsyncIterator[pyzeebe.proto.gateway_pb2.ActivatedJob]]:
+ """
+ Registers client to a job stream that will stream jobs back to the client as
+ they become activatable.
+
+ Errors:
+ INVALID_ARGUMENT:
+ - type is blank (empty string, null)
+ - timeout less than 1
+ - If multi-tenancy is enabled, and tenantIds is empty (empty list)
+ - If multi-tenancy is enabled, and an invalid tenant ID is provided. A tenant ID is considered invalid if:
+ - The tenant ID is blank (empty string, null)
+ - The tenant ID is longer than 31 characters
+ - The tenant ID contains anything other than alphanumeric characters, dot (.), dash (-), or underscore (_)
+ - If multi-tenancy is disabled, and tenantIds is not empty (empty list), or has an ID other than
+ """
+
+ @abc.abstractmethod
+ def CancelProcessInstance(
+ self,
+ request: pyzeebe.proto.gateway_pb2.CancelProcessInstanceRequest,
+ context: _ServicerContext,
+ ) -> typing.Union[pyzeebe.proto.gateway_pb2.CancelProcessInstanceResponse, collections.abc.Awaitable[pyzeebe.proto.gateway_pb2.CancelProcessInstanceResponse]]:
+ """
+ Cancels a running process instance
+
+ Errors:
+ NOT_FOUND:
+ - no process instance exists with the given key
+ """
+
+ @abc.abstractmethod
+ def CompleteJob(
+ self,
+ request: pyzeebe.proto.gateway_pb2.CompleteJobRequest,
+ context: _ServicerContext,
+ ) -> typing.Union[pyzeebe.proto.gateway_pb2.CompleteJobResponse, collections.abc.Awaitable[pyzeebe.proto.gateway_pb2.CompleteJobResponse]]:
+ """
+ Completes a job with the given variables, which allows completing the associated service task.
+
+ Errors:
+ NOT_FOUND:
+ - no job exists with the given job key. Note that since jobs are removed once completed,
+ it could be that this job did exist at some point.
+
+ FAILED_PRECONDITION:
+ - the job was marked as failed. In that case, the related incident must be resolved before
+ the job can be activated again and completed.
+ """
+
+ @abc.abstractmethod
+ def CreateProcessInstance(
+ self,
+ request: pyzeebe.proto.gateway_pb2.CreateProcessInstanceRequest,
+ context: _ServicerContext,
+ ) -> typing.Union[pyzeebe.proto.gateway_pb2.CreateProcessInstanceResponse, collections.abc.Awaitable[pyzeebe.proto.gateway_pb2.CreateProcessInstanceResponse]]:
+ """
+ Creates and starts an instance of the specified process. The process definition to use to
+ create the instance can be specified either using its unique key (as returned by
+ DeployProcess), or using the BPMN process ID and a version. Pass -1 as the version to use the
+ latest deployed version. Note that only processes with none start events can be started through
+ this command.
+
+ Errors:
+ NOT_FOUND:
+ - no process with the given key exists (if processDefinitionKey was given)
+ - no process with the given process ID exists (if bpmnProcessId was given but version was -1)
+ - no process with the given process ID and version exists (if both bpmnProcessId and version were given)
+
+ FAILED_PRECONDITION:
+ - the process definition does not contain a none start event; only processes with none
+ start event can be started manually.
+
+ INVALID_ARGUMENT:
+ - the given variables argument is not a valid JSON document; it is expected to be a valid
+ JSON document where the root node is an object.
+ """
+
+ @abc.abstractmethod
+ def CreateProcessInstanceWithResult(
+ self,
+ request: pyzeebe.proto.gateway_pb2.CreateProcessInstanceWithResultRequest,
+ context: _ServicerContext,
+ ) -> typing.Union[pyzeebe.proto.gateway_pb2.CreateProcessInstanceWithResultResponse, collections.abc.Awaitable[pyzeebe.proto.gateway_pb2.CreateProcessInstanceWithResultResponse]]:
+ """
+ Behaves similarly to `rpc CreateProcessInstance`, except that a successful response is received when the process completes successfully.
+ """
+
+ @abc.abstractmethod
+ def EvaluateDecision(
+ self,
+ request: pyzeebe.proto.gateway_pb2.EvaluateDecisionRequest,
+ context: _ServicerContext,
+ ) -> typing.Union[pyzeebe.proto.gateway_pb2.EvaluateDecisionResponse, collections.abc.Awaitable[pyzeebe.proto.gateway_pb2.EvaluateDecisionResponse]]:
+ """
+ Evaluates a decision. The decision to evaluate can be specified either by
+ using its unique key (as returned by DeployResource), or using the decision
+ ID. When using the decision ID, the latest deployed version of the decision
+ is used.
+
+ Errors:
+ INVALID_ARGUMENT:
+ - no decision with the given key exists (if decisionKey was given)
+ - no decision with the given decision ID exists (if decisionId was given)
+ - both decision ID and decision KEY were provided, or are missing
+ """
+
+ @abc.abstractmethod
+ def DeployProcess(
+ self,
+ request: pyzeebe.proto.gateway_pb2.DeployProcessRequest,
+ context: _ServicerContext,
+ ) -> typing.Union[pyzeebe.proto.gateway_pb2.DeployProcessResponse, collections.abc.Awaitable[pyzeebe.proto.gateway_pb2.DeployProcessResponse]]:
+ """
+ Deploys one or more processes to Zeebe. Note that this is an atomic call,
+ i.e. either all processes are deployed, or none of them are.
+
+ Errors:
+ INVALID_ARGUMENT:
+ - no resources given.
+ - if at least one resource is invalid. A resource is considered invalid if:
+ - the resource data is not deserializable (e.g. detected as BPMN, but it's broken XML)
+ - the process is invalid (e.g. an event-based gateway has an outgoing sequence flow to a task)
+ """
+
+ @abc.abstractmethod
+ def DeployResource(
+ self,
+ request: pyzeebe.proto.gateway_pb2.DeployResourceRequest,
+ context: _ServicerContext,
+ ) -> typing.Union[pyzeebe.proto.gateway_pb2.DeployResourceResponse, collections.abc.Awaitable[pyzeebe.proto.gateway_pb2.DeployResourceResponse]]:
+ """
+ Deploys one or more resources (e.g. processes or decision models) to Zeebe.
+ Note that this is an atomic call, i.e. either all resources are deployed, or none of them are.
+
+ Errors:
+ PERMISSION_DENIED:
+ - if a deployment to an unauthorized tenant is performed
+ INVALID_ARGUMENT:
+ - no resources given.
+ - if at least one resource is invalid. A resource is considered invalid if:
+ - the content is not deserializable (e.g. detected as BPMN, but it's broken XML)
+ - the content is invalid (e.g. an event-based gateway has an outgoing sequence flow to a task)
+ - if multi-tenancy is enabled, and:
+ - a tenant id is not provided
+ - a tenant id with an invalid format is provided
+ - if multi-tenancy is disabled and a tenant id is provided
+ """
+
+ @abc.abstractmethod
+ def FailJob(
+ self,
+ request: pyzeebe.proto.gateway_pb2.FailJobRequest,
+ context: _ServicerContext,
+ ) -> typing.Union[pyzeebe.proto.gateway_pb2.FailJobResponse, collections.abc.Awaitable[pyzeebe.proto.gateway_pb2.FailJobResponse]]:
+ """
+ Marks the job as failed; if the retries argument is positive, then the job will be immediately
+ activatable again, and a worker could try again to process it. If it is zero or negative however,
+ an incident will be raised, tagged with the given errorMessage, and the job will not be
+ activatable until the incident is resolved.
+
+ Errors:
+ NOT_FOUND:
+ - no job was found with the given key
+
+ FAILED_PRECONDITION:
+ - the job was not activated
+ - the job is already in a failed state, i.e. ran out of retries
+ """
+
+ @abc.abstractmethod
+ def ThrowError(
+ self,
+ request: pyzeebe.proto.gateway_pb2.ThrowErrorRequest,
+ context: _ServicerContext,
+ ) -> typing.Union[pyzeebe.proto.gateway_pb2.ThrowErrorResponse, collections.abc.Awaitable[pyzeebe.proto.gateway_pb2.ThrowErrorResponse]]:
+ """
+ Reports a business error (i.e. non-technical) that occurs while processing a job. The error is handled in the process by an error catch event. If there is no error catch event with the specified errorCode then an incident will be raised instead.
+
+ Errors:
+ NOT_FOUND:
+ - no job was found with the given key
+
+ FAILED_PRECONDITION:
+ - the job is not in an activated state
+ """
+
+ @abc.abstractmethod
+ def PublishMessage(
+ self,
+ request: pyzeebe.proto.gateway_pb2.PublishMessageRequest,
+ context: _ServicerContext,
+ ) -> typing.Union[pyzeebe.proto.gateway_pb2.PublishMessageResponse, collections.abc.Awaitable[pyzeebe.proto.gateway_pb2.PublishMessageResponse]]:
+ """
+ Publishes a single message. Messages are published to specific partitions computed from their
+ correlation keys.
+
+ Errors:
+ ALREADY_EXISTS:
+ - a message with the same ID was previously published (and is still alive)
+ """
+
+ @abc.abstractmethod
+ def ResolveIncident(
+ self,
+ request: pyzeebe.proto.gateway_pb2.ResolveIncidentRequest,
+ context: _ServicerContext,
+ ) -> typing.Union[pyzeebe.proto.gateway_pb2.ResolveIncidentResponse, collections.abc.Awaitable[pyzeebe.proto.gateway_pb2.ResolveIncidentResponse]]:
+ """
+ Resolves a given incident. This simply marks the incident as resolved; most likely a call to
+ UpdateJobRetries or SetVariables will be necessary to actually resolve the
+ problem, following by this call.
+
+ Errors:
+ NOT_FOUND:
+ - no incident with the given key exists
+ """
+
+ @abc.abstractmethod
+ def SetVariables(
+ self,
+ request: pyzeebe.proto.gateway_pb2.SetVariablesRequest,
+ context: _ServicerContext,
+ ) -> typing.Union[pyzeebe.proto.gateway_pb2.SetVariablesResponse, collections.abc.Awaitable[pyzeebe.proto.gateway_pb2.SetVariablesResponse]]:
+ """
+ Updates all the variables of a particular scope (e.g. process instance, flow element instance)
+ from the given JSON document.
+
+ Errors:
+ NOT_FOUND:
+ - no element with the given elementInstanceKey exists
+ INVALID_ARGUMENT:
+ - the given variables document is not a valid JSON document; valid documents are expected to
+ be JSON documents where the root node is an object.
+ """
+
+ @abc.abstractmethod
+ def Topology(
+ self,
+ request: pyzeebe.proto.gateway_pb2.TopologyRequest,
+ context: _ServicerContext,
+ ) -> typing.Union[pyzeebe.proto.gateway_pb2.TopologyResponse, collections.abc.Awaitable[pyzeebe.proto.gateway_pb2.TopologyResponse]]:
+ """
+ Obtains the current topology of the cluster the gateway is part of.
+ """
+
+ @abc.abstractmethod
+ def UpdateJobRetries(
+ self,
+ request: pyzeebe.proto.gateway_pb2.UpdateJobRetriesRequest,
+ context: _ServicerContext,
+ ) -> typing.Union[pyzeebe.proto.gateway_pb2.UpdateJobRetriesResponse, collections.abc.Awaitable[pyzeebe.proto.gateway_pb2.UpdateJobRetriesResponse]]:
+ """
+ Updates the number of retries a job has left. This is mostly useful for jobs that have run out of
+ retries, should the underlying problem be solved.
+
+ Errors:
+ NOT_FOUND:
+ - no job exists with the given key
+
+ INVALID_ARGUMENT:
+ - retries is not greater than 0
+ """
+
+ @abc.abstractmethod
+ def ModifyProcessInstance(
+ self,
+ request: pyzeebe.proto.gateway_pb2.ModifyProcessInstanceRequest,
+ context: _ServicerContext,
+ ) -> typing.Union[pyzeebe.proto.gateway_pb2.ModifyProcessInstanceResponse, collections.abc.Awaitable[pyzeebe.proto.gateway_pb2.ModifyProcessInstanceResponse]]:
+ """
+ Modifies the process instance. This is done by activating and/or terminating specific elements of the instance.
+
+ Errors:
+ NOT_FOUND:
+ - no process instance exists with the given key
+
+ FAILED_PRECONDITION:
+ - trying to activate element inside of a multi-instance
+
+ INVALID_ARGUMENT:
+ - activating or terminating unknown element
+ - ancestor of element for activation doesn't exist
+ - scope of variable is unknown
+ """
+
+ @abc.abstractmethod
+ def MigrateProcessInstance(
+ self,
+ request: pyzeebe.proto.gateway_pb2.MigrateProcessInstanceRequest,
+ context: _ServicerContext,
+ ) -> typing.Union[pyzeebe.proto.gateway_pb2.MigrateProcessInstanceResponse, collections.abc.Awaitable[pyzeebe.proto.gateway_pb2.MigrateProcessInstanceResponse]]:
+ """
+ Migrates the process instance to the specified process definition.
+ In simple terms, this is handled by updating the active element's process.
+
+ Errors:
+ NOT_FOUND:
+ - no process instance exists with the given key, or it is not active
+ - no process definition exists with the given target definition key
+ - no process instance exists with the given key for the tenants the user is authorized to work with.
+
+ FAILED_PRECONDITION:
+ - not all active elements in the given process instance are mapped to the elements in the target process definition
+ - a mapping instruction changes the type of an element or event
+ - a mapping instruction changes the implementation of a task
+ - a mapping instruction detaches a boundary event from an active element
+ - a mapping instruction refers to an unsupported element (i.e. some elements will be supported later on)
+ - a mapping instruction refers to element in unsupported scenarios.
+ (i.e. migration is not supported when process instance or target process elements contains event subscriptions)
+ - multiple mapping instructions target the same boundary event
+
+ INVALID_ARGUMENT:
+ - A `sourceElementId` does not refer to an element in the process instance's process definition
+ - A `targetElementId` does not refer to an element in the target process definition
+ - A `sourceElementId` is mapped by multiple mapping instructions.
+ For example, the engine cannot determine how to migrate a process instance when the instructions are: [A->B, A->C].
+ """
+
+ @abc.abstractmethod
+ def UpdateJobTimeout(
+ self,
+ request: pyzeebe.proto.gateway_pb2.UpdateJobTimeoutRequest,
+ context: _ServicerContext,
+ ) -> typing.Union[pyzeebe.proto.gateway_pb2.UpdateJobTimeoutResponse, collections.abc.Awaitable[pyzeebe.proto.gateway_pb2.UpdateJobTimeoutResponse]]:
+ """
+ Updates the deadline of a job using the timeout (in ms) provided. This can be used
+ for extending or shortening the job deadline.
+
+ Errors:
+ NOT_FOUND:
+ - no job exists with the given key
+
+ INVALID_STATE:
+ - no deadline exists for the given job key
+ """
+
+ @abc.abstractmethod
+ def DeleteResource(
+ self,
+ request: pyzeebe.proto.gateway_pb2.DeleteResourceRequest,
+ context: _ServicerContext,
+ ) -> typing.Union[pyzeebe.proto.gateway_pb2.DeleteResourceResponse, collections.abc.Awaitable[pyzeebe.proto.gateway_pb2.DeleteResourceResponse]]:
+ """
+ Deletes a resource from the state. Once a resource has been deleted it cannot
+ be recovered. If the resource needs to be available again, a new deployment
+ of the resource is required.
+
+ Deleting a process will cancel any running instances of this process
+ definition. New instances of a deleted process are created using
+ the lastest version that hasn't been deleted. Creating a new
+ process instance is impossible when all versions have been
+ deleted.
+
+ Deleting a decision requirement definitions could cause incidents in process
+ instances referencing these decisions in a business rule task. A decision
+ will be evaluated with the latest version that hasn't been deleted. If all
+ versions of a decision have been deleted the evaluation is rejected.
+
+ Errors:
+ NOT_FOUND:
+ - No resource exists with the given key
+ """
+
+ @abc.abstractmethod
+ def BroadcastSignal(
+ self,
+ request: pyzeebe.proto.gateway_pb2.BroadcastSignalRequest,
+ context: _ServicerContext,
+ ) -> typing.Union[pyzeebe.proto.gateway_pb2.BroadcastSignalResponse, collections.abc.Awaitable[pyzeebe.proto.gateway_pb2.BroadcastSignalResponse]]:
+ """
+ Broadcasts a signal.
+ """
+
+def add_GatewayServicer_to_server(servicer: GatewayServicer, server: typing.Union[grpc.Server, grpc.aio.Server]) -> None: ...
diff --git a/tests/unit/conftest.py b/tests/unit/conftest.py
index 6396cb2d..0e08ce5c 100644
--- a/tests/unit/conftest.py
+++ b/tests/unit/conftest.py
@@ -157,7 +157,7 @@ async def simple_exception_handler(e: Exception, job: Job, job_controller: JobCo
@pytest.fixture(scope="module")
def grpc_add_to_server():
- from zeebe_grpc.gateway_pb2_grpc import add_GatewayServicer_to_server
+ from pyzeebe.proto.gateway_pb2_grpc import add_GatewayServicer_to_server
return add_GatewayServicer_to_server
@@ -169,7 +169,7 @@ def grpc_servicer():
@pytest.fixture(scope="module")
def grpc_stub_cls(grpc_channel):
- from zeebe_grpc.gateway_pb2_grpc import GatewayStub
+ from pyzeebe.proto.gateway_pb2_grpc import GatewayStub
return GatewayStub
diff --git a/tests/unit/utils/gateway_mock.py b/tests/unit/utils/gateway_mock.py
index 116a3a92..c6faa2cf 100644
--- a/tests/unit/utils/gateway_mock.py
+++ b/tests/unit/utils/gateway_mock.py
@@ -5,11 +5,11 @@
from uuid import uuid4
import grpc
-from zeebe_grpc.gateway_pb2 import *
-from zeebe_grpc.gateway_pb2_grpc import GatewayServicer
from pyzeebe.job.job import Job
from pyzeebe.job.job_status import JobStatus
+from pyzeebe.proto.gateway_pb2 import *
+from pyzeebe.proto.gateway_pb2_grpc import GatewayServicer
from pyzeebe.task.task import Task
from tests.unit.utils.random_utils import RANDOM_RANGE, random_job