diff --git a/Dockerfile b/Dockerfile index 763c58314..87b71e1a0 100644 --- a/Dockerfile +++ b/Dockerfile @@ -39,13 +39,13 @@ COPY . . RUN apt-get clean && find /var/lib/apt/lists/ -type f -delete && chown -R www-data /app/ ENV PROMETHEUS_PORT 9000 -ENV PULSE_SERVER_PORT 9010 +ENV HEALTHCHECK_SERVER_PORT 9010 EXPOSE $PROMETHEUS_PORT USER www-data HEALTHCHECK --interval=10s --timeout=3s \ - CMD curl -f http://localhost:$PULSE_SERVER_PORT/healthcheck || exit 1 + CMD curl -f http://localhost:$HEALTHCHECK_SERVER_PORT/healthcheck || exit 1 WORKDIR /app/ diff --git a/README.md b/README.md index c7b825b9a..0ae748770 100644 --- a/README.md +++ b/README.md @@ -26,18 +26,24 @@ You can build it locally using the following command: docker build -t lidofinance/oracle . ``` -## Run -1. Use `.env.example` file content to create your own `.env` file. - There are two options to run Oracle: - - `dry mode` (by default) - - `production mode` -\ -\ - To run Oracle in `production mode` set `MEMBER_PRIV_KEY` environment variable in `.env` file: +## Checks before running +1. Use [.env.example](.env.example) file content to create your own `.env` file. + Set required URI values. It will be enough to run the oracle in *check mode*. +2. Check that your environment is ready to run the oracle using the following command: ```bash - MEMBER_PRIV_KEY={value} + docker run --env-file .env --rm lidofinance/oracle:{tag} check ``` - Where `{value}` is a private key of the Oracle member account. + If everything is ok, you will see that all required checks are passed + and your environment is ready to run the oracle. + +## Run the oracle +1. By default, the oracle runs in *dry mode*. It means that it will not send any transactions to the Ethereum network. + Therefore, you are able to check that oracle works correctly before running it in production mode. + To run Oracle in *production mode*, set `MEMBER_PRIV_KEY` environment variable: + ``` + MEMBER_PRIV_KEY={value} + ``` + Where `{value}` is a private key of the Oracle member account. 2. Run the container using the following command: ```bash docker run --env-file .env lidofinance/oracle:{tag} {type} @@ -47,17 +53,32 @@ docker build -t lidofinance/oracle . - `{type}` is a type of the Oracle. There are two types of oracles: - `accounting` - `ejector` + And additional type from the [previous checks](#checks-before-running): + - `check` - checks that the environment is ready to run the oracle + +> **Note**: of course, you can pass env variables without using `.env` file. +> For example, you can run the container using the following command: +> ```bash +> docker run --env EXECUTION_CLIENT_URI={value} --env CONSENSUS_CLIENT_URI={value} --env KEYS_API_URI={value} --env LIDO_LOCATOR_ADDRESS={value} lidofinance/oracle:{tag} {type} +> ``` ## Env variables -| Name | Description | Required | Example value | -|-----------------------------------|--------------------------------------------------------------------|----------|-------------------------| -| `EXECUTION_CLIENT_URI` | URI of the Execution Layer client | True | `http://localhost:8545` | -| `CONSENSUS_CLIENT_URI` | URI of the Consensus Layer client | True | `http://localhost:5052` | -| `KEYS_API_URI` | URI of the Keys API | True | `http://localhost:8080` | -| `LIDO_LOCATOR_ADDRESS` | Address of the Lido contract | True | `0x1...` | -| `ALLOW_NEGATIVE_REBASE_REPORTING` | If 'False', a report with negative cl rebase would not be reported | False | `True` | -| `MEMBER_PRIV_KEY` | Private key of the Oracle member account | False | `0x1...` | +| Name | Description | Required | Example value | +|----------------------------------------------|--------------------------------------------------------------------------------------------------------------------------------------------------------------------------|----------|-------------------------| +| `EXECUTION_CLIENT_URI` | URI of the Execution Layer client | True | `http://localhost:8545` | +| `CONSENSUS_CLIENT_URI` | URI of the Consensus Layer client | True | `http://localhost:5052` | +| `KEYS_API_URI` | URI of the Keys API | True | `http://localhost:8080` | +| `LIDO_LOCATOR_ADDRESS` | Address of the Lido contract | True | `0x1...` | +| `MEMBER_PRIV_KEY` | Private key of the Oracle member account | False | `0x1...` | +| `FINALIZATION_BATCH_MAX_REQUEST_COUNT` | The size of the batch to be finalized per request (The larger the batch size, the more memory of the contract is used but the fewer requests are needed) | False | `1000` | +| `ALLOW_REPORTING_IN_BUNKER_MODE` | Allow the Oracle to do report if bunker mode is active | False | `True` | +| `TX_GAS_ADDITION` | Used to modify gas parameter that used in transaction. (gas = estimated_gas + TX_GAS_ADDITION) | False | `1.75` | +| `CYCLE_SLEEP_IN_SECONDS` | The time between cycles of the oracle's activity | False | `12` | +| `SUBMIT_DATA_DELAY_IN_SLOTS` | The difference in slots between submit data transactions from Oracles. It is used to prevent simultaneous sending of transactions and, as a result, transactions revert. | False | `6` | +| `HTTP_REQUEST_RETRY_COUNT` | Total number of retries to fetch data from endpoint | False | `5` | +| `HTTP_REQUEST_SLEEP_BEFORE_RETRY_IN_SECONDS` | The delay http provider sleeps if API is stuck | False | `12` | +| `HTTP_REQUEST_TIMEOUT` | Timeout for HTTP requests | False | `300` | ## Monitoring TBD @@ -100,6 +121,7 @@ poetry run python -m src.main {module} Where `{module}` is one of: - `accounting` - `ejector` +- `check` ## Tests diff --git a/fixtures/tests/modules/accounting/test_withdrawal_unit.py/test_returns_empty_batch_if_paused.cl.json b/fixtures/tests/modules/accounting/test_withdrawal_unit.py/test_returns_empty_batch_if_paused.cl.json new file mode 100644 index 000000000..41c955535 --- /dev/null +++ b/fixtures/tests/modules/accounting/test_withdrawal_unit.py/test_returns_empty_batch_if_paused.cl.json @@ -0,0 +1,439 @@ +[ + { + "url": "eth/v1/beacon/blocks/finalized/root", + "params": null, + "response": [ + { + "root": "0x057699a839b2d4eda1197b59f2a5873acc969a916c0cbad36541dc422acfaf2c" + }, + { + "execution_optimistic": false, + "finalized": true + } + ] + }, + { + "url": "eth/v2/beacon/blocks/0x057699a839b2d4eda1197b59f2a5873acc969a916c0cbad36541dc422acfaf2c", + "params": null, + "response": [ + { + "message": { + "slot": "285984", + "proposer_index": "60401", + "parent_root": "0x7a4d698ac3fbd804a574699824c8f1fab9d24cd889d05116a741355321c9e645", + "state_root": "0x8772f61f34c723567244a977986890f98269b08c8a592276d486dcf7228800a8", + "body": { + "randao_reveal": "0xae64d9c9fd4baaa8469e38de42d0fdae768c22e7fb101d23abda4fc3e813126eadafd926f5d8ece911e2e4da1fb3f722178ef00e1ffccf758489bb62164bfbd9e9b9f00cf46974817b3f79ea1d30f841aba9ada02a706e5c671f4fba52404fb1", + "eth1_data": { + "deposit_root": "0xdf9cad85080cec3d471a763fe8d899dab0636b57b8500efc2733fb2acefadfae", + "deposit_count": "3949", + "block_hash": "0x5d30cea9aaa726312ee1105d395ebcae3266d3fbf1d38b17cdfe337ae4a9d220" + }, + "graffiti": "0x4c69676874686f7573652f76332e342e302d3930623661653600000000000000", + "proposer_slashings": [], + "attester_slashings": [], + "attestations": [ + { + "aggregation_bits": "0xffbfffff7ffffffffffbfff7fffffffe3f", + "data": { + "slot": "285983", + "index": "6", + "beacon_block_root": "0x7a4d698ac3fbd804a574699824c8f1fab9d24cd889d05116a741355321c9e645", + "source": { + "epoch": "8935", + "root": "0xbff64a0f15bd5f6456c8ae36e42fbaca23605307f5946f61a54e9787b0234c97" + }, + "target": { + "epoch": "8936", + "root": "0x84aa74d43320a488eaec2418a8c350362c67b9db4948140fad61600834d987b0" + } + }, + "signature": "0xa1ffb7d51d7e4b12e4c90d0aa75d1331bb1e1cfaff76f7395119c6a7659b8756b1109fdc5cd672836c4ab4f1fe3243080ddecad693220da72512fdc60ea0d6d19f5abf5747bc8f3e2a6adfb056cde3d19bb5177a14cf4f2216755a74fbc0a97a" + }, + { + "aggregation_bits": "0xffef7fdffffffefffffffffffffffffe67", + "data": { + "slot": "285983", + "index": "8", + "beacon_block_root": "0x7a4d698ac3fbd804a574699824c8f1fab9d24cd889d05116a741355321c9e645", + "source": { + "epoch": "8935", + "root": "0xbff64a0f15bd5f6456c8ae36e42fbaca23605307f5946f61a54e9787b0234c97" + }, + "target": { + "epoch": "8936", + "root": "0x84aa74d43320a488eaec2418a8c350362c67b9db4948140fad61600834d987b0" + } + }, + "signature": "0x9575e5c8c45893411acd48e335e2ec0a809022c265f205bcfe0e340f7e2f98a3014612a51614558253bffbcf5fe6f1560d6108a68cc57d517e70a292532b964c79e0399a68fd5185fec8047d7db15e6ef62d1782e9f2589f9528c4dcb63bfa3d" + }, + { + "aggregation_bits": "0xfffb7ffef7fffffffffdffffffedffef3f", + "data": { + "slot": "285983", + "index": "10", + "beacon_block_root": "0x7a4d698ac3fbd804a574699824c8f1fab9d24cd889d05116a741355321c9e645", + "source": { + "epoch": "8935", + "root": "0xbff64a0f15bd5f6456c8ae36e42fbaca23605307f5946f61a54e9787b0234c97" + }, + "target": { + "epoch": "8936", + "root": "0x84aa74d43320a488eaec2418a8c350362c67b9db4948140fad61600834d987b0" + } + }, + "signature": "0x870a6bf0ada8e990cb53cff0f57944624d9f3ecb267c3f81478928813b922a37fe7f166eb2968530739ca1345227237a1266d0fdd6c4bf36b2ffa1ff52d89f4ac766c39aa562b57e50e82b22df973ae05fa85f0370650b06a4b2fbccfb06516e" + }, + { + "aggregation_bits": "0xffffff7bbdfffeffffffd7ffffffbffe7f", + "data": { + "slot": "285983", + "index": "14", + "beacon_block_root": "0x7a4d698ac3fbd804a574699824c8f1fab9d24cd889d05116a741355321c9e645", + "source": { + "epoch": "8935", + "root": "0xbff64a0f15bd5f6456c8ae36e42fbaca23605307f5946f61a54e9787b0234c97" + }, + "target": { + "epoch": "8936", + "root": "0x84aa74d43320a488eaec2418a8c350362c67b9db4948140fad61600834d987b0" + } + }, + "signature": "0xa8c7b8ee8431737659d07a529e2adb07ec7772d577dfd81ad76d6669d9742e2876d65e690d1fcc5b5eb83ebb4345657f107681688cf7cc3e0aa0c2b0cce39e09064af78ec229ca1eaa2b03e2e6e317818dc6d34d5c159ef498654ab459e78609" + }, + { + "aggregation_bits": "0xf7ffffffffffdffffdeefffeffef77ff3f", + "data": { + "slot": "285983", + "index": "2", + "beacon_block_root": "0x7a4d698ac3fbd804a574699824c8f1fab9d24cd889d05116a741355321c9e645", + "source": { + "epoch": "8935", + "root": "0xbff64a0f15bd5f6456c8ae36e42fbaca23605307f5946f61a54e9787b0234c97" + }, + "target": { + "epoch": "8936", + "root": "0x84aa74d43320a488eaec2418a8c350362c67b9db4948140fad61600834d987b0" + } + }, + "signature": "0xaa6ee1217f477aa63418fffba248aafedc56f13b4c1ca3a340b3eb1edde5b529e3ff9a78aca2a8188a2f496d13c95d430f19c9ed82e17886e3667f86c6f7f050f2482ad3c38c4868c0edf04ea17f8fdc86be43076b36c43f611256674b895630" + }, + { + "aggregation_bits": "0xdefffffffffb7ffb6fffff77ffffffff3f", + "data": { + "slot": "285983", + "index": "0", + "beacon_block_root": "0x7a4d698ac3fbd804a574699824c8f1fab9d24cd889d05116a741355321c9e645", + "source": { + "epoch": "8935", + "root": "0xbff64a0f15bd5f6456c8ae36e42fbaca23605307f5946f61a54e9787b0234c97" + }, + "target": { + "epoch": "8936", + "root": "0x84aa74d43320a488eaec2418a8c350362c67b9db4948140fad61600834d987b0" + } + }, + "signature": "0x8d92b15d7c989f603dcd5fda35ad045afca7a64d18037832d0f6d6175bba040fd1b9c818f7de7fb326c9c3c78e1d2080102c3274c14ac4b2ab2762cc519034a4de20130cd72e9d40b9f5cb7102e6d88c085fe2a8e4c582c0690c414312cb55bc" + }, + { + "aggregation_bits": "0xbffdff7dffffff6fdfffdfffffffbfef37", + "data": { + "slot": "285983", + "index": "7", + "beacon_block_root": "0x7a4d698ac3fbd804a574699824c8f1fab9d24cd889d05116a741355321c9e645", + "source": { + "epoch": "8935", + "root": "0xbff64a0f15bd5f6456c8ae36e42fbaca23605307f5946f61a54e9787b0234c97" + }, + "target": { + "epoch": "8936", + "root": "0x84aa74d43320a488eaec2418a8c350362c67b9db4948140fad61600834d987b0" + } + }, + "signature": "0x97167ac9f11b841d8d6f72bd206cb32981b4d023c11060fd03228d865d3121f728c6f7022f06d9629bcd589c33b8949702c0f0be1d15ae68d934f3f2e6c2d02ad0e4f2a67c86c8362cefb76ab3552ce599a9c03251072e39b706ea64b8111a0f" + }, + { + "aggregation_bits": "0xf73fffffbdffffffef3fffefffeffffb7d", + "data": { + "slot": "285983", + "index": "11", + "beacon_block_root": "0x7a4d698ac3fbd804a574699824c8f1fab9d24cd889d05116a741355321c9e645", + "source": { + "epoch": "8935", + "root": "0xbff64a0f15bd5f6456c8ae36e42fbaca23605307f5946f61a54e9787b0234c97" + }, + "target": { + "epoch": "8936", + "root": "0x84aa74d43320a488eaec2418a8c350362c67b9db4948140fad61600834d987b0" + } + }, + "signature": "0x99a9bb7ddb51ac8b4bc2f2822e3cfbebeced361f726b7b5d2deb87a9371e6550bc9dfdffa47821906ec55e8603255ab514240b1b26f7f84a15bf4c6982dae9187196fc2b95a3eec4fdefda82946c5112992f3459073c59a6562a46aad27c84b8" + }, + { + "aggregation_bits": "0xffb77fbfff7fdffff7bfffeff7dfffff3f", + "data": { + "slot": "285983", + "index": "5", + "beacon_block_root": "0x7a4d698ac3fbd804a574699824c8f1fab9d24cd889d05116a741355321c9e645", + "source": { + "epoch": "8935", + "root": "0xbff64a0f15bd5f6456c8ae36e42fbaca23605307f5946f61a54e9787b0234c97" + }, + "target": { + "epoch": "8936", + "root": "0x84aa74d43320a488eaec2418a8c350362c67b9db4948140fad61600834d987b0" + } + }, + "signature": "0x93f69dd56b518e6cb254c7c4463134f9b70345090ef597d8a6cc000cc831859a3528aab6887354769f117ae835e6514c16df118d412e2efc692c1522a180a31895d2f15f3ea363c812171eacdc1e3480f369e959fb36e234f684a157036f273e" + }, + { + "aggregation_bits": "0xff7ffbfbfdfffbeffffbb77fffffdff73f", + "data": { + "slot": "285983", + "index": "13", + "beacon_block_root": "0x7a4d698ac3fbd804a574699824c8f1fab9d24cd889d05116a741355321c9e645", + "source": { + "epoch": "8935", + "root": "0xbff64a0f15bd5f6456c8ae36e42fbaca23605307f5946f61a54e9787b0234c97" + }, + "target": { + "epoch": "8936", + "root": "0x84aa74d43320a488eaec2418a8c350362c67b9db4948140fad61600834d987b0" + } + }, + "signature": "0x8b62d2eccf83f1c792ae97aae7c9caabc042f48ad46a2f4a68267f8000bb2ec37637e95b70363b10cc197cab8f5cefc70316204eb4ebcca84a51eda382f9ac0101d986c1d9ffa12dc36097d7df9a4d9df79406954fa63c868fbdfb6e4e1b13ad" + }, + { + "aggregation_bits": "0xdffbffdff5ffdffffff3ffffdefff3fd7f", + "data": { + "slot": "285983", + "index": "4", + "beacon_block_root": "0x7a4d698ac3fbd804a574699824c8f1fab9d24cd889d05116a741355321c9e645", + "source": { + "epoch": "8935", + "root": "0xbff64a0f15bd5f6456c8ae36e42fbaca23605307f5946f61a54e9787b0234c97" + }, + "target": { + "epoch": "8936", + "root": "0x84aa74d43320a488eaec2418a8c350362c67b9db4948140fad61600834d987b0" + } + }, + "signature": "0xb65a543def4be1387ecc67707d3c371f67a038270fb807f01e282dc6fd2bae4bcfaf7edf97e020d11fa9132dd14ae83809f014701c97d81838ebc54acd2bad45874c89ac3be0d75c7d2fb1117a2823b4ab3c5e4fccf5f4dbc5f9abffb480bb4a" + }, + { + "aggregation_bits": "0x77fdfbffff7efffffddf7f7ffffbeffb3f", + "data": { + "slot": "285983", + "index": "9", + "beacon_block_root": "0x7a4d698ac3fbd804a574699824c8f1fab9d24cd889d05116a741355321c9e645", + "source": { + "epoch": "8935", + "root": "0xbff64a0f15bd5f6456c8ae36e42fbaca23605307f5946f61a54e9787b0234c97" + }, + "target": { + "epoch": "8936", + "root": "0x84aa74d43320a488eaec2418a8c350362c67b9db4948140fad61600834d987b0" + } + }, + "signature": "0xa009de98e03086761ea941ff9d33e62e13b2e038be1ad89c4a2e39fe41859b31a745b7f1013c478ce8e3de76cbcbb62a0d21b22a49330a848a51e49d979370885fdf5fff5fc6cfd33a781cc306d321449855429a4ff0ee7da5a9af3571e8c429" + }, + { + "aggregation_bits": "0xffeff7bf7ffffffe3fbffff3ebeffdff3d", + "data": { + "slot": "285983", + "index": "12", + "beacon_block_root": "0x7a4d698ac3fbd804a574699824c8f1fab9d24cd889d05116a741355321c9e645", + "source": { + "epoch": "8935", + "root": "0xbff64a0f15bd5f6456c8ae36e42fbaca23605307f5946f61a54e9787b0234c97" + }, + "target": { + "epoch": "8936", + "root": "0x84aa74d43320a488eaec2418a8c350362c67b9db4948140fad61600834d987b0" + } + }, + "signature": "0xb76b82ab63e81a05f7031af56d2fadd7c6469fb0770352b319900a738daf756603b40b122c372bbade379de8e41cee9d04440d69d27692ba5f0fec0032fdc2ba33b5c3b75573b9b1d61f8d6d179090cb0e0b7505a03edcc0c69e7598070797ec" + }, + { + "aggregation_bits": "0xfffebfff7e7febf7fffffff7c7fdff3b7b", + "data": { + "slot": "285983", + "index": "1", + "beacon_block_root": "0x7a4d698ac3fbd804a574699824c8f1fab9d24cd889d05116a741355321c9e645", + "source": { + "epoch": "8935", + "root": "0xbff64a0f15bd5f6456c8ae36e42fbaca23605307f5946f61a54e9787b0234c97" + }, + "target": { + "epoch": "8936", + "root": "0x84aa74d43320a488eaec2418a8c350362c67b9db4948140fad61600834d987b0" + } + }, + "signature": "0x80c0189c9bface238ea26d87d3b745b3f5df25545264df85d2430340c4fb3ef4ebd76ae7ce079d3df399100723043f2f038e5f3ebe166ddde3dc64524211de205a3e3b49e983e5758328e0c8ab6c092d7d2a4802cd9f4880e0068d23823c6fb8" + }, + { + "aggregation_bits": "0xffbffffdffbbfbf9bff7fffdbffd7ef93e", + "data": { + "slot": "285983", + "index": "3", + "beacon_block_root": "0x7a4d698ac3fbd804a574699824c8f1fab9d24cd889d05116a741355321c9e645", + "source": { + "epoch": "8935", + "root": "0xbff64a0f15bd5f6456c8ae36e42fbaca23605307f5946f61a54e9787b0234c97" + }, + "target": { + "epoch": "8936", + "root": "0x84aa74d43320a488eaec2418a8c350362c67b9db4948140fad61600834d987b0" + } + }, + "signature": "0x89fa52a810fcf9c5f983d11d7d06b64606066950947cbf72260cd977b49a3dc076d450e4d56a683313931a29e3c0232b061f1b32bded7fb2ee07a3f5a0163f515e584364f973a5a1cbf12230e14876d51c40f2ae88c435552fc0495e675aa487" + } + ], + "deposits": [], + "voluntary_exits": [], + "sync_aggregate": { + "sync_committee_bits": "0xfffbffd7fbefeff3ffffefffeefbefffffdf7ffbbdfff7eefefffffedffffffdffd7fff7fffffffffff7ffffff7effffffffffbfddfffa7ffbff3fbfffbfbbff", + "sync_committee_signature": "0xa3a7cda457f9dca727cbd8f44471786de75854e764477d4fa867dcac62c7a59018f637e96147ac61e4ae6f8f940e721a06be1f74ccfa139e9124672ef32a2ced77593c3e50f38c84bb97ae98dca5854432d8c21b6fdd437b22a9c51712e3488f" + }, + "execution_payload": { + "parent_hash": "0x9e1c27caaa7067433df2ab24abea7a15fa2443cdd2ffafe2cccfe399f26cf729", + "fee_recipient": "0x5eb53b983c4dd15a326f86ee11132c82979ec0c5", + "state_root": "0xa7824cda291cdd5a8815ef8397aa1e4de6ffb184b2cb98af4ee8fb0b7fd173b8", + "receipts_root": "0xf09237343cad2613afb76736605bf3a51439ae52d70dc3dec4281583a5140f5f", + "logs_bloom": "0x00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000", + "prev_randao": "0xf82dd28ac6dc0456643f180e8f301aad1608a9644156f66c2b385dde62d1e603", + "block_number": "273993", + "gas_limit": "30000000", + "gas_used": "294000", + "timestamp": "1678695408", + "extra_data": "0xd683010b00846765746886676f312e3230856c696e7578", + "base_fee_per_gas": "7", + "block_hash": "0x90659c38ec3b064b0196dee00903424a75185ca21c37bdeca79254042a372777", + "transactions": [ + "0x02f87a831469cb8305a9f0850ba43b7400850ba43b740e825208948aa2b8622c1223a62fbc1119fa2b483ab41b250b88016345785d8a000080c080a06d6557bb8ab3f4aae8aa634fce9bfde0eddf54c92685bf6d5e6c00196254f126a01e85f32e329b60acd208e1420d09d9d46177665c73824bcd34df17da435145a8", + "0x02f87a831469cb8305a9f1850ba43b7400850ba43b740e82520894388979a907fee4c8083aaffb9f66bc33001c1d4d88016345785d8a000080c080a0f47524cc57cdb41f480b747cfa5c723001bc71af5b8e09aa05f79a4610053c24a0497783fcd828da668d4fd75732073df46697d81ef3963863c8bef7149cd8b85f", + "0x02f87a831469cb8305a9f1850ba43b7400850ba43b740e825208948aa2b8622c1223a62fbc1119fa2b483ab41b250b88016345785d8a000080c080a0d5b8982d4d12df9fabb275597194730340199e6832c770560aa978ab80088cfba02d159352f3a5196bfc4901a781ab86309ac3a41b64e26e0b21e6854c2ab86880", + "0x02f87a831469cb8305a9f2850ba43b7400850ba43b740e82520894388979a907fee4c8083aaffb9f66bc33001c1d4d88016345785d8a000080c080a03e665778d6119573f9d73f254ea1b1b1cd5aead91af2362f5c0979f7db12130ea03996af735c9438864fe07f030120127bec84cdbbbaa47e0cdbdbc82781e6c7fb", + "0x02f87a831469cb8305a9f2850ba43b7400850ba43b740e825208948aa2b8622c1223a62fbc1119fa2b483ab41b250b88016345785d8a000080c001a0c3184207ae715b919e8ef9905270abd35a252f9cec952a0c87678c5b9694b07aa001db5fa63a38a21cee7d20ecf86a565a59f1bff1d703b31b2477725f25ee0d67", + "0x02f87a831469cb8305a9f3850ba43b7400850ba43b740e82520894388979a907fee4c8083aaffb9f66bc33001c1d4d88016345785d8a000080c001a0c5542e77e67bb7f6d83fbabb4a3b7595a84eefb87bb424a655ce2411666e3174a01fada2822c62e881d64d51ccec6725e0ec2c696fa00328e5e26c388e8f5c0024", + "0x02f87a831469cb8305a9f3850ba43b7400850ba43b740e825208948aa2b8622c1223a62fbc1119fa2b483ab41b250b88016345785d8a000080c080a062b365903b13132ba2e770f10a29bd3cdc040d1a80755b39877da18127818ee8a04d69280f0e74e0218e933740048a5b027b2f2dae616e1d702b58b6fdaf6717a3", + "0x02f87a831469cb8305a9f4850ba43b7400850ba43b740e82520894388979a907fee4c8083aaffb9f66bc33001c1d4d88016345785d8a000080c080a0807ef5bad11e86311b47de95ff460d18069dc1c3563598f5a9ef90797977ec51a0686b2f859e98e36acf1043b1cb6f8ca473161ad069ea271afa95dfc82f9a5a99", + "0x02f87a831469cb8305a9f4850ba43b7400850ba43b740e825208948aa2b8622c1223a62fbc1119fa2b483ab41b250b88016345785d8a000080c080a0e578708934c91ce611c8b1a567a80157c9ed33851de29c2e8f4729514aa7e5f9a07f548cda88eb85b157e95fa27235a0a62077060c6461ae5b61577f2dc0e8df4b", + "0x02f87a831469cb8305a9f5850ba43b7400850ba43b740e82520894388979a907fee4c8083aaffb9f66bc33001c1d4d88016345785d8a000080c080a08e3b7361fd716f7eacfe24dd735c95220765e6b2096124abe7e6fa1722a0c950a016014241cb4fac9b8356cd6db1cf6465b25d8aa0303405b7fd9ae308cf27658f", + "0x02f87a831469cb8305a9f5850ba43b7400850ba43b740e825208948aa2b8622c1223a62fbc1119fa2b483ab41b250b88016345785d8a000080c001a074c49661866b0d0535e2e8f87f4701b8a08713c4f624a826f29de959050a218ba01d80d4bd1e0fa39345cd73b49c3cad45f1e27cf1dbc317bd479c52f53e84fb72", + "0x02f87a831469cb8305a9f6850ba43b7400850ba43b740e82520894388979a907fee4c8083aaffb9f66bc33001c1d4d88016345785d8a000080c080a07de2e24e47dbc7dcb53df1723e211117b2976a2989c5a6ad9c4680f071ee0a14a01a0aa3e1767b31d43fdb0a40fbb43a45ff8b2b148394f7c5aee2f98297aa934b", + "0x02f87a831469cb8305a9f6850ba43b7400850ba43b740e825208948aa2b8622c1223a62fbc1119fa2b483ab41b250b88016345785d8a000080c080a08c9e45618b311ecd8459b552ca8e00288b2a3fe31a597b47d15bedbbee2b4e8fa02723b2b9704bff44e14f7980b21345fd4faaf7f8a90638fe61ad920881d76aaf", + "0x02f87a831469cb8305a9f7850ba43b7400850ba43b740e82520894388979a907fee4c8083aaffb9f66bc33001c1d4d88016345785d8a000080c001a0bf1807c055dcefb9038d693f23c2309d1d6e50f7cf7de83457750303b3cbf00ea02ccbf7913b8d8bd39eeaf7392b91963a889d9c6f6ca08fa5378609e76cd8b24d" + ], + "withdrawals": [ + { + "index": "3670321", + "validator_index": "49760", + "address": "0xf97e180c050e5ab072211ad2c213eb5aee4df134", + "amount": "306139" + }, + { + "index": "3670322", + "validator_index": "49761", + "address": "0xf97e180c050e5ab072211ad2c213eb5aee4df134", + "amount": "306139" + }, + { + "index": "3670323", + "validator_index": "49762", + "address": "0xf97e180c050e5ab072211ad2c213eb5aee4df134", + "amount": "290306" + }, + { + "index": "3670324", + "validator_index": "49763", + "address": "0xf97e180c050e5ab072211ad2c213eb5aee4df134", + "amount": "297942" + }, + { + "index": "3670325", + "validator_index": "49764", + "address": "0xf97e180c050e5ab072211ad2c213eb5aee4df134", + "amount": "306139" + }, + { + "index": "3670326", + "validator_index": "49765", + "address": "0xf97e180c050e5ab072211ad2c213eb5aee4df134", + "amount": "306139" + }, + { + "index": "3670327", + "validator_index": "49766", + "address": "0xf97e180c050e5ab072211ad2c213eb5aee4df134", + "amount": "306139" + }, + { + "index": "3670328", + "validator_index": "49767", + "address": "0xf97e180c050e5ab072211ad2c213eb5aee4df134", + "amount": "306139" + }, + { + "index": "3670329", + "validator_index": "49768", + "address": "0xf97e180c050e5ab072211ad2c213eb5aee4df134", + "amount": "306139" + }, + { + "index": "3670330", + "validator_index": "49769", + "address": "0xf97e180c050e5ab072211ad2c213eb5aee4df134", + "amount": "289749" + }, + { + "index": "3670331", + "validator_index": "49770", + "address": "0xf97e180c050e5ab072211ad2c213eb5aee4df134", + "amount": "290592" + }, + { + "index": "3670332", + "validator_index": "49771", + "address": "0xf97e180c050e5ab072211ad2c213eb5aee4df134", + "amount": "289746" + }, + { + "index": "3670333", + "validator_index": "49772", + "address": "0xf97e180c050e5ab072211ad2c213eb5aee4df134", + "amount": "297662" + }, + { + "index": "3670334", + "validator_index": "49773", + "address": "0xf97e180c050e5ab072211ad2c213eb5aee4df134", + "amount": "306139" + }, + { + "index": "3670335", + "validator_index": "49774", + "address": "0xf97e180c050e5ab072211ad2c213eb5aee4df134", + "amount": "298505" + }, + { + "index": "3670336", + "validator_index": "49775", + "address": "0xf97e180c050e5ab072211ad2c213eb5aee4df134", + "amount": "306139" + } + ] + }, + "bls_to_execution_changes": [] + } + }, + "signature": "0x98c35de19dda861e7456c0674afcbff1fa0daad8e97b07507d92224611646cb33e9389a2500dfd8615ed4eb8856d619a01a35de6c8d290ef7de0e6bc0a5584925e000c6d609a747671160fd4c2ba049a2397976531dd697eba182ef420f8a8a2" + }, + { + "version": "capella", + "execution_optimistic": false, + "finalized": true + } + ] + } +] diff --git a/fixtures/tests/modules/accounting/test_withdrawal_unit.py/test_returns_empty_batch_if_paused.json b/fixtures/tests/modules/accounting/test_withdrawal_unit.py/test_returns_empty_batch_if_paused.json new file mode 100644 index 000000000..30ed5bfa3 --- /dev/null +++ b/fixtures/tests/modules/accounting/test_withdrawal_unit.py/test_returns_empty_batch_if_paused.json @@ -0,0 +1,47 @@ +[ + { + "method": "eth_call", + "params": [ + { + "to": "0x8D49f1b4AF30598679D4D37Be4B094da1b459b82", + "data": "0xa3a3fd5d" + }, + "0x90659c38ec3b064b0196dee00903424a75185ca21c37bdeca79254042a372777" + ], + "response": { + "jsonrpc": "2.0", + "id": 0, + "result": "0x00000000000000000000000000000000000000000000000000000000000005dc00000000000000000000000000000000000000000000000000000000000001f400000000000000000000000000000000000000000000000000000000000003e800000000000000000000000000000000000000000000000000000000000000fa00000000000000000000000000000000000000000000000000000000000007d000000000000000000000000000000000000000000000000000000000000000640000000000000000000000000000000000000000000000000000000000000064000000000000000000000000000000000000000000000000000000000000008000000000000000000000000000000000000000000000000000000000004c4b40" + } + }, + { + "method": "eth_call", + "params": [ + { + "to": "0x53Fdb8445af417103E2f9e04bD935D7af0692Fc3", + "data": "0x693ec85e0000000000000000000000000000000000000000000000000000000000000020000000000000000000000000000000000000000000000000000000000000002c46494e414c495a4154494f4e5f4d41585f4e454741544956455f5245424153455f45504f43485f53484946540000000000000000000000000000000000000000" + }, + "0x90659c38ec3b064b0196dee00903424a75185ca21c37bdeca79254042a372777" + ], + "response": { + "jsonrpc": "2.0", + "id": 1, + "result": "0x000000000000000000000000000000000000000000000000000000000000002000000000000000000000000000000000000000000000000000000000000000200000000000000000000000000000000000000000000000000000000000000080" + } + }, + { + "method": "eth_call", + "params": [ + { + "to": "0x4c1F6cA213abdbc19b27f2562d7b1A645A019bD9", + "data": "0xb187bd26" + }, + "0x90659c38ec3b064b0196dee00903424a75185ca21c37bdeca79254042a372777" + ], + "response": { + "jsonrpc": "2.0", + "id": 2, + "result": "0x0000000000000000000000000000000000000000000000000000000000000000" + } + } +] diff --git a/fixtures/tests/modules/ejector/test_ejector.py/test_ejector_build_report.json b/fixtures/tests/modules/ejector/test_ejector.py/test_ejector_build_report.json new file mode 100644 index 000000000..58d71f4bc --- /dev/null +++ b/fixtures/tests/modules/ejector/test_ejector.py/test_ejector_build_report.json @@ -0,0 +1,17 @@ +[ + { + "method": "eth_call", + "params": [ + { + "to": "0x64E79C2E3A112e9CDc055e64afba9f1a8f0aB1Ef", + "data": "0x3584d59c" + }, + "0x0d339fdfa3018561311a39bf00568ed08048055082448d17091d5a4dc2fa035b" + ], + "response": { + "jsonrpc": "2.0", + "id": 0, + "result": "0x0000000000000000000000000000000000000000000000000000000000000000" + } + } +] \ No newline at end of file diff --git a/fixtures/tests/web3_extentions/test_lido_validators.py/test_get_lido_validators.json b/fixtures/tests/web3_extentions/test_lido_validators.py/test_get_lido_validators.json new file mode 100644 index 000000000..226072565 --- /dev/null +++ b/fixtures/tests/web3_extentions/test_lido_validators.py/test_get_lido_validators.json @@ -0,0 +1,32 @@ +[ + { + "method": "eth_call", + "params": [ + { + "to": "0x0Ed4aCd69f6e00a2Ca0d141f8A900aC6BFaF70F0", + "data": "0x6183214d" + }, + "0x0d339fdfa3018561311a39bf00568ed08048055082448d17091d5a4dc2fa035b" + ], + "response": { + "jsonrpc": "2.0", + "id": 0, + "result": "0x0000000000000000000000000000000000000000000000000000000000000020000000000000000000000000000000000000000000000000000000000000000100000000000000000000000000000000000000000000000000000000000000200000000000000000000000000000000000000000000000000000000000000001000000000000000000000000b099ec462e42ac2570fb298b42083d7a499045d800000000000000000000000000000000000000000000000000000000000001f400000000000000000000000000000000000000000000000000000000000001f400000000000000000000000000000000000000000000000000000000000027100000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000014000000000000000000000000000000000000000000000000000000000640f6e3c000000000000000000000000000000000000000000000000000000000004394b000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000154e6f64654f70657261746f727352656769737472790000000000000000000000" + } + }, + { + "method": "eth_call", + "params": [ + { + "to": "0x0Ed4aCd69f6e00a2Ca0d141f8A900aC6BFaF70F0", + "data": "0x3240a3220000000000000000000000000000000000000000000000000000000000000001" + }, + "0x0d339fdfa3018561311a39bf00568ed08048055082448d17091d5a4dc2fa035b" + ], + "response": { + "jsonrpc": "2.0", + "id": 1, + "result": "0x0000000000000000000000000000000000000000000000000000000000000020000000000000000000000000000000000000000000000000000000000000000200000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000001000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000a00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000001000000000000000000000000000000000000000000000000000000000000000100000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000070000000000000000000000000000000000000000000000000000000000000003" + } + } +] \ No newline at end of file diff --git a/fixtures/tests/web3_extentions/test_lido_validators.py/test_get_lido_validators_by_node_operator.cl.json b/fixtures/tests/web3_extentions/test_lido_validators.py/test_get_lido_validators_by_node_operator.cl.json index 8aa1cfd46..1831c9711 100644 --- a/fixtures/tests/web3_extentions/test_lido_validators.py/test_get_lido_validators_by_node_operator.cl.json +++ b/fixtures/tests/web3_extentions/test_lido_validators.py/test_get_lido_validators_by_node_operator.cl.json @@ -971528,8 +971528,7 @@ } ], { - "execution_optimistic": false, - "finalized": false + "execution_optimistic": false } ] } diff --git a/fixtures/tests/web3_extentions/test_lido_validators.py/test_get_lido_validators_by_node_operator.ka.json b/fixtures/tests/web3_extentions/test_lido_validators.py/test_get_lido_validators_by_node_operator.ka.json index 51fd947af..bf645b2f1 100644 --- a/fixtures/tests/web3_extentions/test_lido_validators.py/test_get_lido_validators_by_node_operator.ka.json +++ b/fixtures/tests/web3_extentions/test_lido_validators.py/test_get_lido_validators_by_node_operator.ka.json @@ -1,40 +1,40 @@ [ { - "url": "v1/keys", + "url": "v1/keys?used=true", "params": null, "response": [ [ { - "key": "0x892b41853e62c0e6de7e27bc08ed2883a45db1139589e71c6d1859bed2e54615e16431bb60f4e991738fc30a83f22bc9", - "depositSignature": "0x975f64aa7491c5c17a781ce50049597c636cbb84d72ff5580efc3f86b229e6ad0b3a078f09e9382f193e2bc4090615e115448f9707d63a48c48ad6bec01cac3f431cfda2bba65d35fcd19556a496ea13ba7a5576902dddb6e80264b842eac213", + "key": "0xa9b549dcc075c73500bcb6b6a5438b5f682635f224da6fd22154f5a7a482ecc3dc1641a4749f75e40a423bb2d41e8a18", + "depositSignature": "0x8a212ddf1fc2cecbcde5740948f4a801003b10f8b873f9a96cf055eb7e4e05c6012c4919f8f3186096296f4f577ad0f00cf2cb2582011eb8cf82156487f7d77faac29d7fb2dd8053c288d356ec5f6ab22cc340b421fe57965750edfce5b0d0b8", "operatorIndex": 0, "used": true, "moduleAddress": "0xB099EC462e42Ac2570fB298B42083D7A499045D8" }, { - "key": "0xa4105d83e3e7f38dba1c1f115c976266d333de1cbb22d94ebe66aabe1218d7d403a6c0b584c1c9d3fde29fe970648737", - "depositSignature": "0xb9c559b1aa6c42613ef3bcec0fd7827aad50c63965c653a8a4187d0df4563fdb353a7620a2e26aed5ca72ae0dad6133f0006160b6c53db5833adb9985ac6b58cab2dcd8fb4e9b1962baadc3e0aa82feea0217a404dc99a16962866a3da578b2f", + "key": "0x841bfb37af697c934db56038f14faa75d55bb53cd32f0f6602852982861535d634bf29f26dd51ddbfd7da78d5ff4f6e6", + "depositSignature": "0xab44618f3bafe2b06dd397e3261d9bfaf503fb16c76ea35b2f74438361ab4989b9f144ed00b4fc0e696981f78a10b957044a718b3d0dfa1a5285835fbc1e62a8a1fc78da1266b5da6d1e50116f7c6f1d144e054071c286e57878931566fdc58e", "operatorIndex": 0, "used": true, "moduleAddress": "0xB099EC462e42Ac2570fB298B42083D7A499045D8" }, { - "key": "0x84da91680276928029f425fd4938deec2ab96fd78a7c75d46f522acdae9c0765d098afa7522cda148d4c5684bddfdd0e", - "depositSignature": "0xb85307ebbff247a6f8e41e666cec13c5093ce1e0529f8d92492cc3c393b802e4b66986da36b260b23f61cca55e175b5c0a48355abc03befec149fd131aa4dfaf8104acc96d1789707eae16582741a4cb0c1cc6983ac952a39e9ad47a4d7ce255", + "key": "0x900d42cc82ffbb7b9c3c474899bae0015675182e3b3175157c951578f14fe518d614e5271fabc36fe122ee5e60472b2f", + "depositSignature": "0xa7870f6adc3b1fee4a5c5894eed9e6338e10ec587cdd0258f82578263d92ac4e52d27c0377e0db509bebf61972fadbc204de23c03171a6b1ca23528d8cf7318f199eadc21b4e56721b6ede51ee7e6778f3472c91e81c72c34d319278a067739c", "operatorIndex": 0, "used": true, "moduleAddress": "0xB099EC462e42Ac2570fB298B42083D7A499045D8" }, { - "key": "0xb392be0ff2cbf283a4f38d1d4d772bc330890f9e3785b1c36fc103528c75e16ff2e4c425949c6de65e9e56f85bbad78b", - "depositSignature": "0x8e8a40bcc34b1ef31c03950888175c6f71081fe59a1aacbc03d7ab254b27863779e7cadb8894f4ba2eea905f72517fbc0f6d3d46adc7006f9983a1ebb990871598abdb3be09442cf197d4f43fbb7836b7c6b914d136a75c89ec8aa2027c9f66b", + "key": "0xa70cee82a5430597fbfcfbb7bea97c6e9651a4ec6a5541358548bf18b24b798bd0df9fbbb204ae977b7bb78e642760b1", + "depositSignature": "0x8020ccf3af4ec6c62bb0f96ffc8e270e631b2fc5d24a0e5c294c92f918e560349bb6ab0bb37b2a1d5ebf0cda69c3e118073c8ad9e82a54c8b3df44866e018663d0490fb5743affa4b447d3c1ff16015a0d0791f2da569dbb9e5f37c6add2109d", "operatorIndex": 0, "used": true, "moduleAddress": "0xB099EC462e42Ac2570fB298B42083D7A499045D8" }, { - "key": "0xa873550fea1f2ccbbd3c71f5beb3554b2e05533cdf5264e9c13118085943157d629f5287765d57f21b5984e8ecdd0f1c", - "depositSignature": "0xb2a98bfb34da2fa834d62089c6f8f4b1d234714a6109823c9844d3df09ddc8e4791e0a05a55aff0cc14d13ce25ba3ff50490e7a372f6ab3920dcf1b4432218127c83cd8ff6cda9ac29bd2637ff2afd2d607e97d2b32ba5b1f05975f88671ffb2", + "key": "0xaf4298bc48bd43697e65007bfbe3eec2ba7c1622512730ade3692b490021d0421fa9ccbdc5f6ec66f9f8835e7bbee91b", + "depositSignature": "0x876a7554a0d56b0caf7fea05b30c0a0a8d055f5be7de2c61f37a8ff3fc0d2f60bd3e2a285cb69da54dd01cfa8604b24f13ea6a623c36527467aa5f93723891b49512a61d408b28a8d124698aa2ebfd0b9999b8337e1befea217560312f14e195", "operatorIndex": 0, "used": true, "moduleAddress": "0xB099EC462e42Ac2570fB298B42083D7A499045D8" @@ -47,43 +47,43 @@ "moduleAddress": "0xB099EC462e42Ac2570fB298B42083D7A499045D8" }, { - "key": "0xb843f510d9d68a18159226dad197398e701923aeb5448ca234a2280b8ba7fe895c246c518e101401c8a4190377246306", - "depositSignature": "0x9146886b30c66a6191768eedda96e01dff3474b68231574745955b234554bfa11a013ef2b72c395cef152f5a1c6d08061159a0a801921c892a456b49cb6f5b882706f1e70160e434c93ce9a1f7253295d83660083ede776d05d7c8722e333085", - "operatorIndex": 1, + "key": "0x886aafdb2a0845ca2c7a6fc4c3cbbfaacb07ba62944e1068800ad471133eed4ba30f467b248137e66471e622be6f8798", + "depositSignature": "0x902b3a76787d10037aac4d904b0f8aeb1a17773c99885530e665f6042ab3b0f1ff9efda5a358e8e31d58da4a1f774eb1178fcc75af92b3427a72bfa310c3450bd9969f42a8aab6ff6093f7b85f8ab33affd8b2ed278c28a017617878faa811d4", + "operatorIndex": 0, "used": true, "moduleAddress": "0xB099EC462e42Ac2570fB298B42083D7A499045D8" }, { - "key": "0x8d72954c07cb58d2b260b36b5236ef057198e88f3e73eb79f3e316219d44dd251032ab2b8875834a9797f0e0d8df8e7b", - "depositSignature": "0xb364868844a766087243589b54c8d544dc52c482a8d8d96def7a576000f7dc00edbeb55663bc9babf28b9f7c69b09c471485d8d4eea534a9dcead9a69513b84988fd047b5ca8f7ae689d18dd9e2bb351d58630d575ea7aabba631fc404daceab", - "operatorIndex": 1, + "key": "0xa214fa45c32f20218eb1b1f6a7365d174e1224cb757a0e33c30013db364413073fdd5671c377a1dcc7886536205b9ff8", + "depositSignature": "0x81f38ba546bb2736830be219b88c4542a4fdded56810ee14c705c7e4e20676cf0dd4057187d98b455a56813a3444dac902c7782bb08ce5c8fd128495a4b05987638fa159af4216be5984d01037070da6089a134f84a5fbe4c46a0171833a5694", + "operatorIndex": 0, "used": true, "moduleAddress": "0xB099EC462e42Ac2570fB298B42083D7A499045D8" }, { - "key": "0x886aafdb2a0845ca2c7a6fc4c3cbbfaacb07ba62944e1068800ad471133eed4ba30f467b248137e66471e622be6f8798", - "depositSignature": "0x902b3a76787d10037aac4d904b0f8aeb1a17773c99885530e665f6042ab3b0f1ff9efda5a358e8e31d58da4a1f774eb1178fcc75af92b3427a72bfa310c3450bd9969f42a8aab6ff6093f7b85f8ab33affd8b2ed278c28a017617878faa811d4", + "key": "0x88cb023551f6c6b2357aff3df7cdc94e46094f941346cc6c8e6ebaa66fbdc126d58003853cf747ba847db16a13054071", + "depositSignature": "0x808ba2d9b54b847377103e2023f0198a527e1dffac695c973cf49084529b1d10c7c9a04ed7be908e9cc69c2a77ce92da1981a3573ac1f94285f2bbfc8a5e7ee59266b12bd2f996cb712827ce63493396d1061c19a5452460778cdf0dc64c1a01", "operatorIndex": 0, "used": true, "moduleAddress": "0xB099EC462e42Ac2570fB298B42083D7A499045D8" }, { - "key": "0xa214fa45c32f20218eb1b1f6a7365d174e1224cb757a0e33c30013db364413073fdd5671c377a1dcc7886536205b9ff8", - "depositSignature": "0x81f38ba546bb2736830be219b88c4542a4fdded56810ee14c705c7e4e20676cf0dd4057187d98b455a56813a3444dac902c7782bb08ce5c8fd128495a4b05987638fa159af4216be5984d01037070da6089a134f84a5fbe4c46a0171833a5694", + "key": "0x951a3f1cd4c479182b415b0b018bdd280ac99c9e503aee8f05a3687991c29b88804a92b49a83eb2537e4b4c0251c12df", + "depositSignature": "0x807dec6e591f9d9fc010be5c3523641eb65409e7cb2e0870e23c7e92aaad0f719ff7937f158a8fd42b3f956d6bc69f8705ac0bd3a3bba35284bbde0e37c58a7a5e981a609579b9b5db1421e30b297ffb82fbf2ec74db5aff7f1391150b9cfc3d", "operatorIndex": 0, "used": true, "moduleAddress": "0xB099EC462e42Ac2570fB298B42083D7A499045D8" }, { - "key": "0x88cb023551f6c6b2357aff3df7cdc94e46094f941346cc6c8e6ebaa66fbdc126d58003853cf747ba847db16a13054071", - "depositSignature": "0x808ba2d9b54b847377103e2023f0198a527e1dffac695c973cf49084529b1d10c7c9a04ed7be908e9cc69c2a77ce92da1981a3573ac1f94285f2bbfc8a5e7ee59266b12bd2f996cb712827ce63493396d1061c19a5452460778cdf0dc64c1a01", - "operatorIndex": 0, + "key": "0xb843f510d9d68a18159226dad197398e701923aeb5448ca234a2280b8ba7fe895c246c518e101401c8a4190377246306", + "depositSignature": "0x9146886b30c66a6191768eedda96e01dff3474b68231574745955b234554bfa11a013ef2b72c395cef152f5a1c6d08061159a0a801921c892a456b49cb6f5b882706f1e70160e434c93ce9a1f7253295d83660083ede776d05d7c8722e333085", + "operatorIndex": 1, "used": true, "moduleAddress": "0xB099EC462e42Ac2570fB298B42083D7A499045D8" }, { - "key": "0xa51c74e9cfcc7d1cb2936a45934498f1db272bcba44018310041650a3b208ec880212f4c92831ce3addede128511c4d2", - "depositSignature": "0x94f9440c54532d79871bfaddebf350a2a75641d28f8b3fd8c837c41fb00cf5f08cf328a44359f78a1c2c989c84af0f4b0e59d3ac279a877d763d3a95a8516a966f577152578465b22c23d9e1f0752f74bb919e2deaadc2a7fd1f5154e9638f30", + "key": "0x8d72954c07cb58d2b260b36b5236ef057198e88f3e73eb79f3e316219d44dd251032ab2b8875834a9797f0e0d8df8e7b", + "depositSignature": "0xb364868844a766087243589b54c8d544dc52c482a8d8d96def7a576000f7dc00edbeb55663bc9babf28b9f7c69b09c471485d8d4eea534a9dcead9a69513b84988fd047b5ca8f7ae689d18dd9e2bb351d58630d575ea7aabba631fc404daceab", "operatorIndex": 1, "used": true, "moduleAddress": "0xB099EC462e42Ac2570fB298B42083D7A499045D8" @@ -110,47 +110,257 @@ "moduleAddress": "0xB099EC462e42Ac2570fB298B42083D7A499045D8" }, { - "key": "0x951a3f1cd4c479182b415b0b018bdd280ac99c9e503aee8f05a3687991c29b88804a92b49a83eb2537e4b4c0251c12df", - "depositSignature": "0x807dec6e591f9d9fc010be5c3523641eb65409e7cb2e0870e23c7e92aaad0f719ff7937f158a8fd42b3f956d6bc69f8705ac0bd3a3bba35284bbde0e37c58a7a5e981a609579b9b5db1421e30b297ffb82fbf2ec74db5aff7f1391150b9cfc3d", - "operatorIndex": 0, + "key": "0x8a9eab6c8603370c80ba6b5cf41f04ac6804a00198e4d04491afda106feb3014dffc890b60dce13cce26b069a274e8bf", + "depositSignature": "0x983fa385a93d27d68ebba242235f0fd2b10987a4cc71c59b276db55c69bf709d8b9fbb7f656c63a3d4ef1848ee707e0e170c0af2ada666b92b9995d9a714d04af670b698c3c8eca712e6d1521c6b586d96e5203bc24c4473a15ab94140a9858a", + "operatorIndex": 1, "used": true, "moduleAddress": "0xB099EC462e42Ac2570fB298B42083D7A499045D8" }, { - "key": "0xb9b4dbde35ae401558a646a89daf12c19d77a5f2c07777399074f357929cf13b4eb042591608ae9b4c670cdef81abf8f", - "depositSignature": "0x8fffea8b0663388023065b4cf3609834d24b9fe49b548c6eb4e5e54e38192619ae607ae4238e02f6c521eec6d966a4fd1050813772788570c666364c9e05bf1f5aecd21652463cbeb39c35de55046984d9b5389f4bf6dc04737b8aacc1db0d58", + "key": "0xa51c74e9cfcc7d1cb2936a45934498f1db272bcba44018310041650a3b208ec880212f4c92831ce3addede128511c4d2", + "depositSignature": "0x94f9440c54532d79871bfaddebf350a2a75641d28f8b3fd8c837c41fb00cf5f08cf328a44359f78a1c2c989c84af0f4b0e59d3ac279a877d763d3a95a8516a966f577152578465b22c23d9e1f0752f74bb919e2deaadc2a7fd1f5154e9638f30", "operatorIndex": 1, - "used": false, + "used": true, "moduleAddress": "0xB099EC462e42Ac2570fB298B42083D7A499045D8" }, { - "key": "0x8a9eab6c8603370c80ba6b5cf41f04ac6804a00198e4d04491afda106feb3014dffc890b60dce13cce26b069a274e8bf", - "depositSignature": "0x983fa385a93d27d68ebba242235f0fd2b10987a4cc71c59b276db55c69bf709d8b9fbb7f656c63a3d4ef1848ee707e0e170c0af2ada666b92b9995d9a714d04af670b698c3c8eca712e6d1521c6b586d96e5203bc24c4473a15ab94140a9858a", + "key": "0x8e5ec389388b5191a818d5d0104251ad4837dbff3ce9c2fd8ec5f8dcaa07f55f8fed4b87bb6d66a12b52b2276cdb219d", + "depositSignature": "0xb4b7e1956a2b73af6e791c5a97e969d3be7cdc4c7675f39c9f8560be374864180b6da69cc6d223864eafadc1531426e517b6e93d72a523f1c57da2d092f14bb1581a22f8210ddd62f3c46ce49882b327fb51849f40fd0d728ad283aca72178b7", "operatorIndex": 1, "used": true, "moduleAddress": "0xB099EC462e42Ac2570fB298B42083D7A499045D8" }, { - "key": "0x8e5ec389388b5191a818d5d0104251ad4837dbff3ce9c2fd8ec5f8dcaa07f55f8fed4b87bb6d66a12b52b2276cdb219d", - "depositSignature": "0xb4b7e1956a2b73af6e791c5a97e969d3be7cdc4c7675f39c9f8560be374864180b6da69cc6d223864eafadc1531426e517b6e93d72a523f1c57da2d092f14bb1581a22f8210ddd62f3c46ce49882b327fb51849f40fd0d728ad283aca72178b7", + "key": "0xb9b4dbde35ae401558a646a89daf12c19d77a5f2c07777399074f357929cf13b4eb042591608ae9b4c670cdef81abf8f", + "depositSignature": "0x8fffea8b0663388023065b4cf3609834d24b9fe49b548c6eb4e5e54e38192619ae607ae4238e02f6c521eec6d966a4fd1050813772788570c666364c9e05bf1f5aecd21652463cbeb39c35de55046984d9b5389f4bf6dc04737b8aacc1db0d58", "operatorIndex": 1, - "used": false, + "used": true, "moduleAddress": "0xB099EC462e42Ac2570fB298B42083D7A499045D8" }, { "key": "0xacbef54f8565d16e5075d91c7643889adc15ffaa2396c57bd2074e44d0e6c22f3cc6ed93a4e031df26a43054ef6a99ff", "depositSignature": "0xb0461e2da3f7123e43a123d2766eec2a2f0e8aabd398ad8289757b1ed7b77d8a74ba6368f3d27c1c4aa6ac7bc406188d0c500a38af2de69e7e0f88e02b84d2b72bbd6717af3fef687bcc3aef68b66ae07f8f9bbf43689ccf5ecd522c4629a4ac", "operatorIndex": 1, - "used": false, + "used": true, + "moduleAddress": "0xB099EC462e42Ac2570fB298B42083D7A499045D8" + }, + { + "key": "0x8d3de9906f9ad334c263334bff84a4e066d14b8e4fd40982054736320f379af4c7b9c4daa001cafde29355de4137cfe3", + "depositSignature": "0x95d7520090d0cad858306a1e9dec1a7cc321f75485b6ba174571b686e6c1fd4e6ef69cb1aa0ebee99a067d645aa47670122285f651b7689f511641e69ba2c4c25c3e7cbec8cf513308fc7a5c30444267eac3e635aeab6d1d1a6aa862c4f1f276", + "operatorIndex": 2, + "used": true, + "moduleAddress": "0xB099EC462e42Ac2570fB298B42083D7A499045D8" + }, + { + "key": "0x81b3f2692be37ad80b004806189fcb1f4440704ca6fd61a69149a91c19b0305585247399912678ff0c58289178146276", + "depositSignature": "0x8cdd274431c390d1f3487200196ba7f04c81aaaee8b382d155dbd43da9217973a21f31f725e1c255c19c6120ef20f03215285386817779a39590aa1fc558ae677cccbcd27548cfb0214c6ea77d602d59c3cb3ed5ed06a6391c3f8df09028ea98", + "operatorIndex": 2, + "used": true, + "moduleAddress": "0xB099EC462e42Ac2570fB298B42083D7A499045D8" + }, + { + "key": "0xa12f7939567824576b6ba6f6fe1b45a6c0f902a221624dec8a3e72d1618f79b31607a92b508b652f3fc48dac6289ea95", + "depositSignature": "0x92e7a15a324c42b09ee033d119f5a8bcb79f647ed9f0bffdc9bad8fa39d961461e5cb7f5bb47423397ce7faa93982c0416bd42cec7eb7d5c775f69bcabd7531445f7dc09c5164908c1f52a42b9fc38a1b24459c9cd0ce89e3cbd3fb5e5f82f45", + "operatorIndex": 2, + "used": true, + "moduleAddress": "0xB099EC462e42Ac2570fB298B42083D7A499045D8" + }, + { + "key": "0x85ff235eac70c7886fe9f2396cb43c04a1c200912df5ae40a2292086fc743e6e9dac11a0411caaad02b325853c2b95de", + "depositSignature": "0x9682620588fc97ef086c6bb9ea535b9effc5487c8b95884ffd6cfc341adc57ac479c42ea00fdb697d9924843b7fdd41e0f9031f1dc0340e51b093784a16eae2b68cb5c8e9ed23ee7f873f0d0da1bd95ea78827ccc25804020c06edbde2dbf5eb", + "operatorIndex": 2, + "used": true, + "moduleAddress": "0xB099EC462e42Ac2570fB298B42083D7A499045D8" + }, + { + "key": "0x8a4f74ade93fbd8bba3fc5a21a9750d9855ff741523850a076930fd4f3cc15304374f8e497c61f7bb232dea003852e45", + "depositSignature": "0x8b1a111eb331ce78e67713ef159731a92b5af9448ba31964f776bf80ae8acad13966ac72f741fd52b4320780348125dc111b9988abf025d624b84178b21611665798b3404f2477e5ff9e8201b61796d34a2448883758f6ff08381e41ca4f395b", + "operatorIndex": 2, + "used": true, + "moduleAddress": "0xB099EC462e42Ac2570fB298B42083D7A499045D8" + }, + { + "key": "0x89cdc0e0ba2732a389c395aa79ebdf72be08d6b442d72de835794e4bafb8f87fdf8b3203046940c7047f532c81af6a43", + "depositSignature": "0xb1c6f2789b749590a14a0a3e6e77a8f9e46eb7fb1b49dc1e54d0264c90351b1317d0ba08c95c20e55a2eb47d2c3e60a815854a3ee906e87f2dcc3cbfc2830fd1fb80575da3f15282c222b1458893032a25c7be9dbeb3d2a3a4561e9e6f1a1780", + "operatorIndex": 2, + "used": true, + "moduleAddress": "0xB099EC462e42Ac2570fB298B42083D7A499045D8" + }, + { + "key": "0x8218936940149f75c111105d3f8371ecc47a3f73b42f3f494ed09bbc74a198ca8afe56b712887b304363457620bcfbd1", + "depositSignature": "0x83a18e0250cf7676b432cccbdbae71d593eca9ab3743e2e76347a2f1ef73ebe618d7cc4c48599cf9550826749c310e5306aed12bb84bcf344041b643183ef3742aee826e2ff36db138879d859b8a15cca2b15a19fe3eeff2cff12f15d11d6b0b", + "operatorIndex": 2, + "used": true, + "moduleAddress": "0xB099EC462e42Ac2570fB298B42083D7A499045D8" + }, + { + "key": "0x8df3aaad69a73e46b74fe7f4df004fd4f8b381e7d042f6a9dc4097e07504cd24170655748661fc753aa3d64f1d028e18", + "depositSignature": "0xa954b694a9b8eba05ef8c1d43954888bd6a8784d0d81cf689e4fbf5f7048bb3add90729351fc394c1ef4ea01ae920c6900a82fe677b71327f5332b3fb2b75339d790b421a287d6bf28950cc5a2c942f5dcfc091ec495d22e5d39640e83d127a5", + "operatorIndex": 2, + "used": true, + "moduleAddress": "0xB099EC462e42Ac2570fB298B42083D7A499045D8" + }, + { + "key": "0xa31ae20208f01331f01e51b92237a2c5e92fabd226a7a8ce1f550b3c953e04b6f41bb745edbe9f0e74796cbd386002c2", + "depositSignature": "0x8e9133cb508247a2acffa1e8685fbe4a222edd02590abfe78d74108f0954331b089d380476aeb7f32d634578769761bd0a1324aad3bbcd95e1e6d28c9838f47f602a2af173e3f7d8b8612c2076d39696d9e51042297455c2b048415987414afe", + "operatorIndex": 2, + "used": true, + "moduleAddress": "0xB099EC462e42Ac2570fB298B42083D7A499045D8" + }, + { + "key": "0xaaf55527deb715d86750d10d4a6a2d5260a51a6c0c97fd8c1ceb2871482904466bd752cd0961fa5a2d6c7e8b3f7496c8", + "depositSignature": "0xb84b9e1860d290171590460ad893255f1b3ea98f496f20e7238fd6f616277dd426832f6a0ba910702f4f0b90606dce2919763f6b74289b4a49538a323409bb5e76d25a57adf69bc691fe1ade5e8d6fa739fc56a56689a781289494320da41907", + "operatorIndex": 2, + "used": true, + "moduleAddress": "0xB099EC462e42Ac2570fB298B42083D7A499045D8" + }, + { + "key": "0x839ff5416119f3e1b10d90be0cbac9fc1d8bd5d3f5c1603058d4365eb8c01800ab5d6b0c56229ffa622993162a338581", + "depositSignature": "0xb4dafd96f405775e5c725c4fa1145114089cd6505ef198153fbcb5014921ac295f7401cc8824f706e93cca88dab9fba5054f29071d9dcdbd30ade1eb2627d0e6034c03388861d02ab1cd33617eb86df1ad74341e84f35f796cdd2f95f6e2f6c0", + "operatorIndex": 3, + "used": true, + "moduleAddress": "0xB099EC462e42Ac2570fB298B42083D7A499045D8" + }, + { + "key": "0x84723159675f611109a20593eaf5870eb3eb150a54bdbdfc59b3e26e430bda4fafdeef39ba798b22ef1dc119c971f790", + "depositSignature": "0x9833c19b809af2135817711ab5b5e8c4fd00803aa255b5522c19aaaa1eb99e5588be7bae5fb65a9763db644ccabcea09070ce66a0e7b192786d28a3ef8de4cb28dd648071104831296c6efd8f0d1033042bc9fade7c3a7a28d97c97fc808091e", + "operatorIndex": 3, + "used": true, + "moduleAddress": "0xB099EC462e42Ac2570fB298B42083D7A499045D8" + }, + { + "key": "0xb74894beb2c95e7a185a47c872e4b22de745779ed57ffcc986ea73596d27797ee8a82500bfb287038c7a0fb715cd49a5", + "depositSignature": "0xa4e6ca84c65ff0a1f1599de3d79473665b8117d7fcc56d469e9edd293e346c4f63c12acdc0e12f1ea678908b17bed4ce0de5fc196e6801ee1c141da732445a25880a1a370ecc5d4b5a1d1568a9348e947fa618a7ac4fda9f4fddb06c944cb5fa", + "operatorIndex": 3, + "used": true, + "moduleAddress": "0xB099EC462e42Ac2570fB298B42083D7A499045D8" + }, + { + "key": "0xad45158d28abde17f0819c1710d1b0af09e8ce51f8f30be876bfb9c0a5c0501f06484b57f9fb1f46376e66d9a0b54d41", + "depositSignature": "0xa643ebebb64a5d03e0727475159044fb3254fb0717dc4c125b1ea27ac3bef884b7b26a888bec66c91ade13510195a375176d2d30e124afff001213dcf19aa02ca9c3eff532ead0886e082bd6d07ad781fbf4bd6df14650f253fecb99ca07747b", + "operatorIndex": 3, + "used": true, + "moduleAddress": "0xB099EC462e42Ac2570fB298B42083D7A499045D8" + }, + { + "key": "0xa5e03ca5173735eee5eabe6a733353944038d2793862d1aa279c09523e8c11d796ee7a8ecdf6d229db98f378c605d4ca", + "depositSignature": "0xb157eb775985b5d914f1cbd9a1fe70c75fe13af4b5154e94f4474e74fbc872071ca49cac9c6519354420dcf729902ed514d9240b7c59debab21bb097e3d37f523da53029bb2146a00e7e1813801c53d6f2500a8bc868c39ec96f7cfb854c154e", + "operatorIndex": 3, + "used": true, + "moduleAddress": "0xB099EC462e42Ac2570fB298B42083D7A499045D8" + }, + { + "key": "0xa9a1be5c4da73bb7ae5bc9a92aa6a13e21c90f3a7bc154eca73919b726987cad485a1575c05db10ced4eb5188693a711", + "depositSignature": "0x819fb3d2fd1b1266445251259f8a0fac237951bb94ca6a7b6c209d4441c8db11df9f43a5d1f8fb610ed13257a891944d001b4bbee813bfe04a3392d22687b4f997ef4b65cc4ab5f77567034c9fe877e70f25087f389a6a3a5b6089debb302427", + "operatorIndex": 3, + "used": true, + "moduleAddress": "0xB099EC462e42Ac2570fB298B42083D7A499045D8" + }, + { + "key": "0x8db879ec824cc88aa013ac16cd91dcd18bba7dc543c83093a1d142bc52402b86c00e5bbbb7e622cb731ad0c3a0280e27", + "depositSignature": "0xa5bfe83774f872e16575d95fd44bbef27a6c9a7d6edd98d32933ca8763b448279a908a59123574310b477e962a764bd1125a9d96f1768f7c0055af31b22392d94e70e0fc087e5a264ec8151d10cb68b376767c3c115376010b31498b3486ccf3", + "operatorIndex": 3, + "used": true, + "moduleAddress": "0xB099EC462e42Ac2570fB298B42083D7A499045D8" + }, + { + "key": "0xa19cb4313d0b479a0e465c4907d33e71a341e8df3d601108d250b0249f2f33de90cba9b84f31893cf5515f3a435a074c", + "depositSignature": "0x9243c764ad25c1a158b8f7de805fed5344a2668964e9c38cd9ed1e829de505c0381380efe77a1228903f8879fa697f92025f6f86b283f31fba87654d99b93881b246bc96fbd76e37770e6716f7a7d34b55dafbf2afa8530848a3099ab73ecaae", + "operatorIndex": 4, + "used": true, + "moduleAddress": "0xB099EC462e42Ac2570fB298B42083D7A499045D8" + }, + { + "key": "0xa3a9bf7bc18348536fcaac1ea6a99ab34debd728404fe7c3ba8011816980cd66560316bc508c3689140d807a8520414e", + "depositSignature": "0xb61ea428597af060919fec73b98d4d204ebc05f5211ec49538d7d197104b9a73ba5cce0905ff8c4528a7c998ff3d1b5d0044b9800f9a3312f1a4b1bb0bd01d7f51637eec0aaea0cc48aec985718f5d24da60d94a8f2263112cd9d65f6702ccf2", + "operatorIndex": 4, + "used": true, + "moduleAddress": "0xB099EC462e42Ac2570fB298B42083D7A499045D8" + }, + { + "key": "0x874f277436f010cbfbd80b7b11f9c25c344b4d2fcf09c535594a9b54cdd5db99d90a5bb7d1481ff172949b60705aeb0c", + "depositSignature": "0x8fa66794a2e8749f02c2ee3e05c1d03bce4d3b429edcba7ec7a91a7f49c1d08271daa865b3f712468e713a561739ff591682e3065c11d2002c804d6d14b15e0832ed814a843e4f8a78ef0a7125118ab4418b184a6cf73cb65f8f8972f790ec6d", + "operatorIndex": 4, + "used": true, + "moduleAddress": "0xB099EC462e42Ac2570fB298B42083D7A499045D8" + }, + { + "key": "0x8aa0c1122df8e3ef30aa0e8e0427fc9a9a7316a095bb6123fa334880553a3dc562db9720ed15dc266a87e6b6531704ec", + "depositSignature": "0x8d7cfd64bf0f3fb5de0cf4a0fecf9273859a89ca463bfc86ad76ed5f90f71b732b5ded4397316d5f9e03b3195a0cdc91045e26be3b5830e967e4c4cdd1e7bc4891d579f416846646f323ed612789b90a8bfe586203c7d09b2b4c5562267ef693", + "operatorIndex": 4, + "used": true, + "moduleAddress": "0xB099EC462e42Ac2570fB298B42083D7A499045D8" + }, + { + "key": "0xa093445fc46f6ee19bc9e91a92644a54106317e71e460c21b4b79b936accbca8b611587ff9c9f35a2e962b7a3ebbfd83", + "depositSignature": "0xa642479b5289ec38f03cac82840867512a8f0ec5e700be390e06922816d778bdf53d8e48229a1a249afa5f5c39c8d0af01286f1c7913bd723921676f8a53a4c43232de7c4b8133574e01d4cfded4c27f7a5cb9e89a16e54d9f7cc5fe73db27b5", + "operatorIndex": 4, + "used": true, + "moduleAddress": "0xB099EC462e42Ac2570fB298B42083D7A499045D8" + }, + { + "key": "0x83a86c4c223117de1730e3880ab313064cdc1d13f231f9e4a7574e5280307db9843db6fbed3d84cd9663c7a9deecfe07", + "depositSignature": "0xb28a0c9448131ae437dfb149f7237f88b6ff68ac2db9437888a029eb448bf66d50947632ae17a9e1d44197b8a80ce414176468be530317c0f5a74cd4b61f0aa0715eb7d13333b1d4c1874a24d741b358a5d90fa911a101772a257a900e5ed25a", + "operatorIndex": 4, + "used": true, + "moduleAddress": "0xB099EC462e42Ac2570fB298B42083D7A499045D8" + }, + { + "key": "0xafb573a00afa0d9affd1ee44b15439335a2f2936480e297f19b788c315027852a3d83cb18a627a0aa7b57d260ba505be", + "depositSignature": "0xa2d8f5a219fa1b046576c3304ce15c2bbec3fcaea2f9cffa667f9d5faef3b50471d589a81b149c6717c3fd6d46264cc613991b723cc2f7c03554378c83a340074cba5c8dc543b7a716112b0b3c396f3d5b63ee6e2308ba8a2b0bec73d18e02ab", + "operatorIndex": 3, + "used": true, + "moduleAddress": "0xB099EC462e42Ac2570fB298B42083D7A499045D8" + }, + { + "key": "0x89c2a0f2f31a056ce25669aa155852836381fbca8e1db81aeb657af595f1953a9517af3a1f11197907bf411b880881a9", + "depositSignature": "0x967bed6f4dc17905037c3b539705c3aabd889807c02721dfbbe22d2a6a8bc9b3a497837c825f066058f4725725ca5aaf04723f9420e6922acf5ff60a0c1bec79bd4c724a14e18c4df3577a5fb32a0a4a79e30e90a8cd3dc784989193a953d680", + "operatorIndex": 3, + "used": true, + "moduleAddress": "0xB099EC462e42Ac2570fB298B42083D7A499045D8" + }, + { + "key": "0x8508fe3cd55d041a4673bca77733c51a54292533364d4be0d46b4810373c0e0f6647c1d08fc45825f2caec0398e9209a", + "depositSignature": "0xb58e6e54f95093075d5eba0e49dadebce1244d1755befaad356a55740c12368c5949d7d17f580d35765a50606545147a037645e24271952cc317ff45c2f61fbbcbda7bbdadb87b4d9a6570a639c4ca0525c9f38652196538b325e2a4210fdc49", + "operatorIndex": 4, + "used": true, + "moduleAddress": "0xB099EC462e42Ac2570fB298B42083D7A499045D8" + }, + { + "key": "0x92c103b8c3aa1f1467bff486510e2fdd9937f272f18173f69599f2fa0038e7e481476484452db06d48dc89ac1b410993", + "depositSignature": "0x97e414739e31ca7127d45bf60892e3b5112dd2496a2a0af2a7d2a6579fbc5a11f06274904a1ad311ff2b9222426471b6119b8609d7d0ab5202669ca13bf22882f87436257b26600ce7b57fdc011a8300d0755320ec90c7b87e55754066f8ecc2", + "operatorIndex": 4, + "used": true, + "moduleAddress": "0xB099EC462e42Ac2570fB298B42083D7A499045D8" + }, + { + "key": "0xb709b550cb5e02bb856813115ef5f79ba5abd0b5a677f5a5417cdbdc3bcb8429449c7eb4577bbf3a211dfaecab5060c9", + "depositSignature": "0xa0147bbf64cf1b28c0eb0a63a7ff4084ec344a567a58632e2fc83d7aad1e6e16a60ed2d59421de7ba6a1dac64d3303e6153c2378c3edf2252709fd5373cc69b81193279678766d4893584d62762dfe3658720929ff375b12ea0d4757aa82fd9c", + "operatorIndex": 4, + "used": true, + "moduleAddress": "0xB099EC462e42Ac2570fB298B42083D7A499045D8" + }, + { + "key": "0x96ac04d5db591b4c1107abf93591222a73451a278f94837b9dfa72066f7016b4246a0b4afc66b43f2e3700100beb56fb", + "depositSignature": "0xae9ca7ff35514d32e8faa612ab25f81851c135af3bc08c65557181c75cf3be1c9eb6480e6b0b1dd3de5c224af195be420bec98b36c23a132fc30ca80ee2833137b76d145f9b41561fbec34b37edfd9ad135ee774f1e594857b5beca675559334", + "operatorIndex": 3, + "used": true, + "moduleAddress": "0xB099EC462e42Ac2570fB298B42083D7A499045D8" + }, + { + "key": "0xafe4a2f15b357ca4aa860514f85b8fcd9819294555799bbced3e931f0111126a2a1abee34f46d4a5ca4482c71c8d45c6", + "depositSignature": "0x8740c4fb6bccedf3ac6a9fd7b02b9568674e764ed3d89e89ae8f29c8890aabfa03a32d925f483b9078a676648915cb4105fc1c5bcf8dd2044423b554eb9fe0d47ffa42851fa34c4a8ce27725ff8bd2f833c5115a7efa34dc6ccbef771ccc1cc1", + "operatorIndex": 4, + "used": true, "moduleAddress": "0xB099EC462e42Ac2570fB298B42083D7A499045D8" } ], { "meta": { "elBlockSnapshot": { - "blockNumber": 281731, - "blockHash": "0xa177e936e5341bacae0059e90529114101000e0a63f6bcbdc89608d02fbf19e7", - "timestamp": 1678798236 + "blockNumber": 333865, + "blockHash": "0x23c02d0bada85f217dd505c505665a6409bd64942d796ceb5eaca068554097d1", + "timestamp": 1679481528 } } } diff --git a/fixtures/tests/web3_extentions/test_lido_validators.py/test_kapi_has_lesser_keys_than_deposited_validators_count.json b/fixtures/tests/web3_extentions/test_lido_validators.py/test_kapi_has_lesser_keys_than_deposited_validators_count.json new file mode 100644 index 000000000..226072565 --- /dev/null +++ b/fixtures/tests/web3_extentions/test_lido_validators.py/test_kapi_has_lesser_keys_than_deposited_validators_count.json @@ -0,0 +1,32 @@ +[ + { + "method": "eth_call", + "params": [ + { + "to": "0x0Ed4aCd69f6e00a2Ca0d141f8A900aC6BFaF70F0", + "data": "0x6183214d" + }, + "0x0d339fdfa3018561311a39bf00568ed08048055082448d17091d5a4dc2fa035b" + ], + "response": { + "jsonrpc": "2.0", + "id": 0, + "result": "0x0000000000000000000000000000000000000000000000000000000000000020000000000000000000000000000000000000000000000000000000000000000100000000000000000000000000000000000000000000000000000000000000200000000000000000000000000000000000000000000000000000000000000001000000000000000000000000b099ec462e42ac2570fb298b42083d7a499045d800000000000000000000000000000000000000000000000000000000000001f400000000000000000000000000000000000000000000000000000000000001f400000000000000000000000000000000000000000000000000000000000027100000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000014000000000000000000000000000000000000000000000000000000000640f6e3c000000000000000000000000000000000000000000000000000000000004394b000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000154e6f64654f70657261746f727352656769737472790000000000000000000000" + } + }, + { + "method": "eth_call", + "params": [ + { + "to": "0x0Ed4aCd69f6e00a2Ca0d141f8A900aC6BFaF70F0", + "data": "0x3240a3220000000000000000000000000000000000000000000000000000000000000001" + }, + "0x0d339fdfa3018561311a39bf00568ed08048055082448d17091d5a4dc2fa035b" + ], + "response": { + "jsonrpc": "2.0", + "id": 1, + "result": "0x0000000000000000000000000000000000000000000000000000000000000020000000000000000000000000000000000000000000000000000000000000000200000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000001000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000a00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000001000000000000000000000000000000000000000000000000000000000000000100000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000070000000000000000000000000000000000000000000000000000000000000003" + } + } +] \ No newline at end of file diff --git a/poetry.lock b/poetry.lock index f47156cee..b7a6896b8 100644 --- a/poetry.lock +++ b/poetry.lock @@ -511,7 +511,7 @@ rapidfuzz = ">=2.2.0,<3.0.0" name = "colorama" version = "0.4.6" description = "Cross-platform colored terminal text." -category = "dev" +category = "main" optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7" files = [ @@ -1050,6 +1050,21 @@ doc = ["Sphinx (>=1.6.5,<2)", "jinja2 (>=3.0.0,<3.0.1)", "sphinx-rtd-theme (>=0. lint = ["black (>=22)", "flake8 (==3.7.9)", "isort (>=4.2.15,<5)", "mypy (==0.910)", "pydocstyle (>=5.0.0,<6)", "pytest (>=6.2.5,<7)", "types-setuptools"] test = ["hypothesis (>=4.43.0,<5.0.0)", "pytest (>=6.2.5,<7)", "pytest-xdist", "tox (==3.14.6)", "types-setuptools"] +[[package]] +name = "execnet" +version = "1.9.0" +description = "execnet: rapid multi-Python deployment" +category = "main" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" +files = [ + {file = "execnet-1.9.0-py2.py3-none-any.whl", hash = "sha256:a295f7cc774947aac58dde7fdc85f4aa00c42adf5d8f5468fc630c1acf30a142"}, + {file = "execnet-1.9.0.tar.gz", hash = "sha256:8f694f3ba9cc92cab508b152dcfe322153975c29bda272e2fd7f3f00f36e47c5"}, +] + +[package.extras] +testing = ["pre-commit"] + [[package]] name = "faker" version = "17.6.0" @@ -1273,7 +1288,7 @@ testing = ["flake8 (<5)", "flufl.flake8", "importlib-resources (>=1.3)", "packag name = "iniconfig" version = "2.0.0" description = "brain-dead simple config-ini parsing" -category = "dev" +category = "main" optional = false python-versions = ">=3.7" files = [ @@ -1847,7 +1862,7 @@ test = ["appdirs (==1.4.4)", "covdefaults (>=2.2.2)", "pytest (>=7.2)", "pytest- name = "pluggy" version = "1.0.0" description = "plugin and hook calling mechanisms for python" -category = "dev" +category = "main" optional = false python-versions = ">=3.6" files = [ @@ -2165,7 +2180,7 @@ files = [ name = "pytest" version = "7.2.1" description = "pytest: simple powerful testing with Python" -category = "dev" +category = "main" optional = false python-versions = ">=3.7" files = [ @@ -2202,6 +2217,27 @@ pytest = ">=4.6" [package.extras] testing = ["fields", "hunter", "process-tests", "pytest-xdist", "six", "virtualenv"] +[[package]] +name = "pytest-xdist" +version = "3.2.1" +description = "pytest xdist plugin for distributed testing, most importantly across multiple CPUs" +category = "main" +optional = false +python-versions = ">=3.7" +files = [ + {file = "pytest-xdist-3.2.1.tar.gz", hash = "sha256:1849bd98d8b242b948e472db7478e090bf3361912a8fed87992ed94085f54727"}, + {file = "pytest_xdist-3.2.1-py3-none-any.whl", hash = "sha256:37290d161638a20b672401deef1cba812d110ac27e35d213f091d15b8beb40c9"}, +] + +[package.dependencies] +execnet = ">=1.1" +pytest = ">=6.2.0" + +[package.extras] +psutil = ["psutil (>=3.0)"] +setproctitle = ["setproctitle"] +testing = ["filelock"] + [[package]] name = "python-dateutil" version = "2.8.2" @@ -2748,14 +2784,14 @@ tester = ["eth-tester[py-evm] (==v0.8.0-b.3)", "py-geth (>=3.10.0)"] [[package]] name = "web3-multi-provider" -version = "0.5.0" +version = "0.6.0" description = "Web3py provider that makes it easy to switch between different blockchain nodes to make sure application will be be online if main blockchain node will be unavailable." category = "main" optional = false python-versions = ">=3.7.10,<4" files = [ - {file = "web3_multi_provider-0.5.0-py3-none-any.whl", hash = "sha256:58cbb61edae55c2e36d53c40455dfa6002805021be967dd38652925abddb1c68"}, - {file = "web3_multi_provider-0.5.0.tar.gz", hash = "sha256:fab888c2043f4ebe385c1f8b804fbc6e04c92f35fce66f873470b1b06d3b2e1a"}, + {file = "web3_multi_provider-0.6.0-py3-none-any.whl", hash = "sha256:edf787d77d9503928ae5e51146b1fb511e6a00b479d86b4fbdb4eaabf057e59d"}, + {file = "web3_multi_provider-0.6.0.tar.gz", hash = "sha256:16cc0c826eec8690e5dda41ded51f1ea9d1117409c76cca8af0342a7874474fc"}, ] [package.dependencies] @@ -3118,4 +3154,4 @@ testing = ["flake8 (<5)", "func-timeout", "jaraco.functools", "jaraco.itertools" [metadata] lock-version = "2.0" python-versions = "^3.11" -content-hash = "5d354f440a72d3ca8c3abbbac2203b924cddfd45227dd2cab0df58a54cd556e0" +content-hash = "f86ecbc3229db1ffcb8ba9e7d4935b49000c5544dd31eac4ffc3f8a792f011c1" diff --git a/pyproject.toml b/pyproject.toml index 2a8a65c6b..0a0f30a45 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -19,12 +19,13 @@ license = "GPL 3.0" python = "^3.11" web3 = "6.0.0b10" prometheus-client = "^0.16.0" -web3-multi-provider = "^0.5.0" timeout-decorator = "^0.5.0" +pytest = "^7.2.1" poetry = "^1.3.2" +pytest-xdist = "^3.2.1" +web3-multi-provider = "^0.6.0" [tool.poetry.group.dev.dependencies] -pytest = "^7.2.1" base58 = "^2.1.1" ipfshttpclient = "^0.7.0" pylint = "^2.16.2" diff --git a/src/main.py b/src/main.py index f491b81a9..760b9f378 100644 --- a/src/main.py +++ b/src/main.py @@ -2,7 +2,7 @@ from typing import cast from prometheus_client import start_http_server -from web3_multi_provider import MultiProvider # type: ignore[import] +from web3_multi_provider import FallbackProvider from web3.middleware import simple_cache_middleware from src import variables @@ -11,6 +11,7 @@ from src.metrics.prometheus.basic import ENV_VARIABLES_INFO, BUILD_INFO from src.modules.accounting.accounting import Accounting from src.modules.ejector.ejector import Ejector +from src.modules.checks.checks_module import ChecksModule from src.typings import OracleModule from src.utils.build import get_build_info from src.web3py.extensions import ( @@ -29,13 +30,13 @@ logger = logging.getLogger() -def main(module_name: OracleModule): +def main(module: OracleModule): build_info = get_build_info() logger.info({ 'msg': 'Oracle startup.', 'variables': { **build_info, - 'module': module_name, + 'module': module, 'ACCOUNT': variables.ACCOUNT.address if variables.ACCOUNT else 'Dry', 'LIDO_LOCATOR_ADDRESS': variables.LIDO_LOCATOR_ADDRESS, 'MAX_CYCLE_LIFETIME_IN_SECONDS': variables.MAX_CYCLE_LIFETIME_IN_SECONDS, @@ -56,7 +57,7 @@ def main(module_name: OracleModule): start_http_server(variables.PROMETHEUS_PORT) logger.info({'msg': 'Initialize multi web3 provider.'}) - web3 = Web3(MultiProvider(variables.EXECUTION_CLIENT_URI)) + web3 = Web3(FallbackProvider(variables.EXECUTION_CLIENT_URI)) logger.info({'msg': 'Modify web3 with custom contract function call.'}) tweak_w3_contracts(web3) @@ -82,37 +83,31 @@ def main(module_name: OracleModule): logger.info({'msg': 'Sanity checks.'}) check_providers_chain_ids(web3) - if module_name == OracleModule.ACCOUNTING: + if module == OracleModule.ACCOUNTING: logger.info({'msg': 'Initialize Accounting module.'}) accounting = Accounting(web3) accounting.check_contract_configs() accounting.run_as_daemon() - elif module_name == OracleModule.EJECTOR: + elif module == OracleModule.EJECTOR: logger.info({'msg': 'Initialize Ejector module.'}) ejector = Ejector(web3) ejector.check_contract_configs() ejector.run_as_daemon() -def check_required_variables(): - errors = [] - if '' in variables.EXECUTION_CLIENT_URI: - errors.append('EXECUTION_CLIENT_URI') - if variables.CONSENSUS_CLIENT_URI == '': - errors.append('CONSENSUS_CLIENT_URI') - if variables.KEYS_API_URI == '': - errors.append('KEYS_API_URI') - if variables.LIDO_LOCATOR_ADDRESS in (None, ''): - errors.append('LIDO_LOCATOR_ADDRESS') - if errors: - raise ValueError("The following variables are required: " + ", ".join(errors)) +def check(): + logger.info({'msg': 'Check oracle is ready to work in the current environment.'}) + + return ChecksModule().execute_module() def check_providers_chain_ids(web3: Web3): execution_chain_id = web3.eth.chain_id consensus_chain_id = int(web3.cc.get_config_spec().DEPOSIT_CHAIN_ID) - chain_ids = [Web3.to_int(hexstr=provider.make_request("eth_chainId", []).get('result')) - for provider in cast(MultiProvider, web3.provider)._providers] # type: ignore[attr-defined] # pylint: disable=protected-access + chain_ids = [ + Web3.to_int(hexstr=provider.make_request("eth_chainId", []).get('result')) + for provider in cast(FallbackProvider, web3.provider)._providers # type: ignore[attr-defined] # pylint: disable=protected-access + ] keys_api_chain_id = web3.kac.get_status().chainId if any(execution_chain_id != chain_id for chain_id in [*chain_ids, consensus_chain_id, keys_api_chain_id]): raise ValueError('Different chain ids detected:\n' @@ -122,11 +117,18 @@ def check_providers_chain_ids(web3: Web3): if __name__ == '__main__': - last_arg = sys.argv[-1] - if last_arg not in iter(OracleModule): - msg = f'Last arg should be one of {[str(item) for item in OracleModule]}, received {last_arg}.' + module_name = sys.argv[-1] + if module_name not in iter(OracleModule): + msg = f'Last arg should be one of {[str(item) for item in OracleModule]}, received {module_name}.' logger.error({'msg': msg}) raise ValueError(msg) + module = OracleModule(module_name) + if module == OracleModule.CHECK: + errors = variables.check_uri_required_variables() + variables.raise_from_errors(errors) + + sys.exit(check()) - check_required_variables() - main(OracleModule(last_arg)) + errors = variables.check_all_required_variables() + variables.raise_from_errors(errors) + main(module) diff --git a/src/metrics/prometheus/accounting.py b/src/metrics/prometheus/accounting.py new file mode 100644 index 000000000..8c62d3d37 --- /dev/null +++ b/src/metrics/prometheus/accounting.py @@ -0,0 +1,49 @@ +from prometheus_client import Gauge + +from src.variables import PROMETHEUS_PREFIX + + +ACCOUNTING_IS_BUNKER = Gauge( + "accounting_is_bunker", + "Is bunker mode enabled", + namespace=PROMETHEUS_PREFIX, +) + +ACCOUNTING_CL_BALANCE_GWEI = Gauge( + "accounting_cl_balance_gwei", + "Reported CL balance in gwei", + namespace=PROMETHEUS_PREFIX, +) + +ACCOUNTING_EL_REWARDS_VAULT_BALANCE_WEI = Gauge( + "accounting_el_rewards_vault_wei", + "Reported EL rewards", + namespace=PROMETHEUS_PREFIX, +) + +ACCOUNTING_WITHDRAWAL_VAULT_BALANCE_WEI = Gauge( + "accounting_withdrawal_vault_balance_wei", + "Reported withdrawal vault balance", + namespace=PROMETHEUS_PREFIX, +) + +ACCOUNTING_EXITED_VALIDATORS = Gauge( + "accounting_exited_validators", + "Reported exited validators count", + ["module_id", "no_id"], + namespace=PROMETHEUS_PREFIX, +) + +ACCOUNTING_STUCK_VALIDATORS = Gauge( + "accounting_stuck_validators", + "Reported stuck validators count", + ["module_id", "no_id"], + namespace=PROMETHEUS_PREFIX, +) + +ACCOUNTING_DELAYED_VALIDATORS = Gauge( + "accounting_delayed_validators", + "Reported delayed validators count", + ["module_id", "no_id"], + namespace=PROMETHEUS_PREFIX, +) diff --git a/src/metrics/prometheus/basic.py b/src/metrics/prometheus/basic.py index c40b6aea0..80456302d 100644 --- a/src/metrics/prometheus/basic.py +++ b/src/metrics/prometheus/basic.py @@ -22,6 +22,11 @@ class Status(Enum): namespace=PROMETHEUS_PREFIX, ) +GENESIS_TIME = Gauge( + 'genesis_time', + 'Genesis time', + namespace=PROMETHEUS_PREFIX, +) ACCOUNT_BALANCE = Gauge( 'account_balance', @@ -30,6 +35,20 @@ class Status(Enum): namespace=PROMETHEUS_PREFIX, ) +ORACLE_SLOT_NUMBER = Gauge( + "slot_number", + "Oracle head or finalized slot number", + ["state"], # "head" or "finalized" + namespace=PROMETHEUS_PREFIX, +) + +ORACLE_BLOCK_NUMBER = Gauge( + "block_number", + "Oracle head or finalized block number", + ["state"], # "head" or "finalized" + namespace=PROMETHEUS_PREFIX, +) + FUNCTIONS_DURATION = Histogram( 'functions_duration', 'Duration of oracle daemon tasks', diff --git a/src/metrics/prometheus/business.py b/src/metrics/prometheus/business.py index 4c9d71370..39b8f4538 100644 --- a/src/metrics/prometheus/business.py +++ b/src/metrics/prometheus/business.py @@ -15,13 +15,6 @@ namespace=PROMETHEUS_PREFIX, ) -ORACLE_SLOT_NUMBER = Gauge( - "slot_number", - "Oracle head slot number", - ["state"], # "head" or "finalized" - namespace=PROMETHEUS_PREFIX, -) - FRAME_CURRENT_REF_SLOT = Gauge( "frame_current_ref_slot", "Oracle frame current ref slot", @@ -34,8 +27,14 @@ namespace=PROMETHEUS_PREFIX, ) -FRAME_LAST_REPORT_REF_SLOT = Gauge( - "frame_last_report_ref_slot", - "Oracle frame last report ref slot", +FRAME_PREV_REPORT_REF_SLOT = Gauge( + "frame_prev_report_ref_slot", + "Oracle frame previous report ref slot", + namespace=PROMETHEUS_PREFIX, +) + +CONTRACT_ON_PAUSE = Gauge( + "contract_on_pause", + "Contract on pause", namespace=PROMETHEUS_PREFIX, ) diff --git a/src/metrics/prometheus/duration_meter.py b/src/metrics/prometheus/duration_meter.py index bcc13d5a4..b1fdefc72 100644 --- a/src/metrics/prometheus/duration_meter.py +++ b/src/metrics/prometheus/duration_meter.py @@ -1,18 +1,20 @@ import logging from functools import wraps from time import perf_counter -from types import FunctionType +from typing import Callable, TypeVar from src.metrics.prometheus.basic import FUNCTIONS_DURATION, Status - logger = logging.getLogger(__name__) +T = TypeVar("T") + + def duration_meter(): - def decorator(func: FunctionType): + def decorator(func: Callable[..., T]) -> Callable[..., T]: @wraps(func) - def wrapper(*args, **kwargs) -> FunctionType: + def wrapper(*args, **kwargs) -> T: full_name = f"{func.__module__}.{func.__name__}" with FUNCTIONS_DURATION.time() as t: try: @@ -25,9 +27,13 @@ def wrapper(*args, **kwargs) -> FunctionType: raise e finally: stop = perf_counter() - logger.debug({ - "msg": f"Task '{full_name}' finished", "duration (sec)": stop - t._start # pylint: disable=protected-access - }) + logger.debug( + { + "msg": f"Task '{full_name}' finished", + "duration (sec)": stop + - t._start, # pylint: disable=protected-access + } + ) return wrapper diff --git a/src/metrics/prometheus/ejector.py b/src/metrics/prometheus/ejector.py new file mode 100644 index 000000000..d61c1159f --- /dev/null +++ b/src/metrics/prometheus/ejector.py @@ -0,0 +1,22 @@ +from prometheus_client import Gauge + +from src.variables import PROMETHEUS_PREFIX + + +EJECTOR_TO_WITHDRAW_WEI_AMOUNT = Gauge( + "ejector_withdrawal_wei_amount", + "Withdrawal wei amount", + namespace=PROMETHEUS_PREFIX, +) + +EJECTOR_MAX_EXIT_EPOCH = Gauge( + "ejector_max_exit_epoch", + "The max exit epoch", + namespace=PROMETHEUS_PREFIX, +) + +EJECTOR_VALIDATORS_COUNT_TO_EJECT = Gauge( + "ejector_validators_count_to_eject", + "Reported validators count to eject", + namespace=PROMETHEUS_PREFIX, +) diff --git a/src/metrics/prometheus/validators.py b/src/metrics/prometheus/validators.py new file mode 100644 index 000000000..5bd38acb1 --- /dev/null +++ b/src/metrics/prometheus/validators.py @@ -0,0 +1,28 @@ +from prometheus_client import Gauge + +from src.variables import PROMETHEUS_PREFIX + + +ALL_VALIDATORS = Gauge( + "all_validators", + "All validators", + namespace=PROMETHEUS_PREFIX, +) + +LIDO_VALIDATORS = Gauge( + "lido_validators", + "Lido validators", + namespace=PROMETHEUS_PREFIX, +) + +ALL_SLASHED_VALIDATORS = Gauge( + "all_slashed_validators", + "All slashed validators", + namespace=PROMETHEUS_PREFIX, +) + +LIDO_SLASHED_VALIDATORS = Gauge( + "lido_slashed_validators", + "Lido slashed validators", + namespace=PROMETHEUS_PREFIX, +) diff --git a/src/modules/accounting/accounting.py b/src/modules/accounting/accounting.py index ff817e5c9..b3a12bd64 100644 --- a/src/modules/accounting/accounting.py +++ b/src/modules/accounting/accounting.py @@ -13,6 +13,12 @@ LidoReportRebase, SharesRequestedToBurn, ) +from src.metrics.prometheus.accounting import ( + ACCOUNTING_IS_BUNKER, + ACCOUNTING_CL_BALANCE_GWEI, + ACCOUNTING_EL_REWARDS_VAULT_BALANCE_WEI, + ACCOUNTING_WITHDRAWAL_VAULT_BALANCE_WEI +) from src.metrics.prometheus.duration_meter import duration_meter from src.services.validator_state import LidoValidatorStateService from src.modules.submodules.consensus import ConsensusModule @@ -21,7 +27,8 @@ from src.services.bunker import BunkerService from src.typings import BlockStamp, Gwei, ReferenceBlockStamp from src.utils.abi import named_tuple_to_dataclass -from src.variables import ALLOW_NEGATIVE_REBASE_REPORTING +from src.utils.cache import clear_object_lru_cache +from src.variables import ALLOW_REPORTING_IN_BUNKER_MODE from src.web3py.typings import Web3 from src.web3py.extensions.lido_validators import StakingModule, NodeOperatorGlobalIndex, StakingModuleId @@ -30,6 +37,16 @@ class Accounting(BaseModule, ConsensusModule): + """ + Accounting module updates the protocol TVL, distributes node-operator rewards, and processes user withdrawal requests. + + Report goes in tree phases: + - Send report hash + - Send report data (extra data hash inside) + Contains information about lido states, last withdrawal request to finalize and exited validators count by module. + - Send extra data + Contains stuck and exited validators count by each node operator. + """ CONSENSUS_VERSION = 1 CONTRACT_VERSION = 1 @@ -40,11 +57,20 @@ def __init__(self, w3: Web3): self.lido_validator_state_service = LidoValidatorStateService(self.w3) self.bunker_service = BunkerService(self.w3) + def refresh_contracts(self): + self.report_contract = self.w3.lido_contracts.accounting_oracle + + def clear_cache(self): + clear_object_lru_cache(self) + clear_object_lru_cache(self.lido_validator_state_service) + clear_object_lru_cache(self.bunker_service) + def execute_module(self, last_finalized_blockstamp: BlockStamp) -> ModuleExecuteDelay: report_blockstamp = self.get_blockstamp_for_report(last_finalized_blockstamp) if report_blockstamp: self.process_report(report_blockstamp) + # Third phase of report. Specific for accounting. self.process_extra_data(report_blockstamp) return ModuleExecuteDelay.NEXT_SLOT @@ -69,10 +95,6 @@ def process_extra_data(self, blockstamp: ReferenceBlockStamp): self._submit_extra_data(blockstamp) def _submit_extra_data(self, blockstamp: ReferenceBlockStamp) -> None: - if not variables.ACCOUNT: - logger.info({'msg': 'Dry mode. No account provided to submit extra data.'}) - return - extra_data = self.lido_validator_state_service.get_extra_data(blockstamp, self.get_chain_config(blockstamp)) if extra_data.extra_data: @@ -82,7 +104,6 @@ def _submit_extra_data(self, blockstamp: ReferenceBlockStamp) -> None: self.w3.transaction.check_and_send_transaction(tx, variables.ACCOUNT) - # Consensus module: main build report method @lru_cache(maxsize=1) @duration_meter() def build_report(self, blockstamp: ReferenceBlockStamp) -> tuple: @@ -90,32 +111,30 @@ def build_report(self, blockstamp: ReferenceBlockStamp) -> tuple: logger.info({'msg': 'Calculate report for accounting module.', 'value': report_data}) return report_data.as_tuple() - # # Consensus module: if contract got report data def is_main_data_submitted(self, blockstamp: BlockStamp) -> bool: + # Consensus module: if contract got report data (second phase) processing_state = self._get_processing_state(blockstamp) logger.info({'msg': 'Check if main data was submitted.', 'value': processing_state.main_data_submitted}) return processing_state.main_data_submitted - # Consensus module: if contract could accept any sort of report + def is_extra_data_submitted(self, blockstamp: BlockStamp) -> bool: + processing_state = self._get_processing_state(blockstamp) + return processing_state.extra_data_submitted + def is_contract_reportable(self, blockstamp: BlockStamp) -> bool: + # Consensus module: if contract can accept the report (in any phase) is_reportable = not self.is_main_data_submitted(blockstamp) or not self.is_extra_data_submitted(blockstamp) logger.info({'msg': 'Check if contract could accept report.', 'value': is_reportable}) return is_reportable - def is_extra_data_submitted(self, blockstamp: BlockStamp) -> bool: - processing_state = self._get_processing_state(blockstamp) - return processing_state.extra_data_submitted - def is_reporting_allowed(self, blockstamp: ReferenceBlockStamp) -> bool: - cl_rebase_report = self.simulate_cl_rebase(blockstamp) - frame_cl_rebase = self.bunker_service.get_cl_rebase_for_current_report(blockstamp, cl_rebase_report) - if frame_cl_rebase >= 0: + if not self._is_bunker(blockstamp): return True logger.warning({'msg': '!' * 50}) - logger.warning({'msg': 'CL rebase is negative.', 'value': frame_cl_rebase}) + logger.warning({'msg': 'Bunker mode is active'}) logger.warning({'msg': '!' * 50}) - return ALLOW_NEGATIVE_REBASE_REPORTING + return ALLOW_REPORTING_IN_BUNKER_MODE @lru_cache(maxsize=1) def _get_processing_state(self, blockstamp: BlockStamp) -> AccountingProcessingState: @@ -126,12 +145,14 @@ def _get_processing_state(self, blockstamp: BlockStamp) -> AccountingProcessingS logger.info({'msg': 'Fetch processing state.', 'value': ps}) return ps + # ---------------------------------------- Build report ---------------------------------------- def _calculate_report(self, blockstamp: ReferenceBlockStamp) -> ReportData: validators_count, cl_balance = self._get_consensus_lido_state(blockstamp) staking_module_ids_list, exit_validators_count_list = self._get_newly_exited_validators_by_modules(blockstamp) extra_data = self.lido_validator_state_service.get_extra_data(blockstamp, self.get_chain_config(blockstamp)) + finalization_share_rate, finalization_batches = self._get_finalization_data(blockstamp) report_data = ReportData( consensus_version=self.CONSENSUS_VERSION, @@ -143,14 +164,19 @@ def _calculate_report(self, blockstamp: ReferenceBlockStamp) -> ReportData: withdrawal_vault_balance=self.w3.lido_contracts.get_withdrawal_balance(blockstamp), el_rewards_vault_balance=self.w3.lido_contracts.get_el_vault_balance(blockstamp), shares_requested_to_burn=self.get_shares_to_burn(blockstamp), - withdrawal_finalization_batches=self._get_withdrawal_batches(blockstamp), - finalization_share_rate=self._get_finalization_shares_rate(blockstamp), + withdrawal_finalization_batches=finalization_batches, + finalization_share_rate=finalization_share_rate, is_bunker=self._is_bunker(blockstamp), extra_data_format=extra_data.format, extra_data_hash=extra_data.data_hash, extra_data_items_count=extra_data.items_count, ) + ACCOUNTING_IS_BUNKER.set(report_data.is_bunker) + ACCOUNTING_CL_BALANCE_GWEI.set(report_data.cl_balance_gwei) + ACCOUNTING_EL_REWARDS_VAULT_BALANCE_WEI.set(report_data.el_rewards_vault_balance) + ACCOUNTING_WITHDRAWAL_VAULT_BALANCE_WEI.set(report_data.withdrawal_vault_balance) + return report_data def _get_newly_exited_validators_by_modules( @@ -193,34 +219,34 @@ def _get_consensus_lido_state(self, blockstamp: ReferenceBlockStamp) -> tuple[in logger.info({'msg': 'Calculate consensus lido state.', 'value': (count, total_balance)}) return count, total_balance - def _get_withdrawal_batches(self, blockstamp: ReferenceBlockStamp) -> list[int]: + def _get_finalization_data(self, blockstamp: ReferenceBlockStamp) -> tuple[int, list[int]]: + simulation = self.simulate_full_rebase(blockstamp) chain_config = self.get_chain_config(blockstamp) frame_config = self.get_frame_config(blockstamp) - is_bunker = self._is_bunker(blockstamp) - withdrawal_vault_balance = self.w3.lido_contracts.get_withdrawal_balance(blockstamp) - el_rewards_vault_balance = self.w3.lido_contracts.get_el_vault_balance(blockstamp) - finalization_share_rate = self._get_finalization_shares_rate(blockstamp) + + share_rate = simulation.post_total_pooled_ether * SHARE_RATE_PRECISION_E27 // simulation.post_total_shares + logger.info({'msg': 'Calculate shares rate.', 'value': share_rate}) withdrawal_service = Withdrawal(self.w3, blockstamp, chain_config, frame_config) - withdrawal_batches = withdrawal_service.get_finalization_batches( + batches = withdrawal_service.get_finalization_batches( is_bunker, - finalization_share_rate, - withdrawal_vault_balance, - el_rewards_vault_balance, + share_rate, + simulation.withdrawals, + simulation.el_reward, ) - logger.info({'msg': 'Calculate last withdrawal id to finalize.', 'value': withdrawal_batches}) - return withdrawal_batches - @lru_cache(maxsize=1) - def _get_finalization_shares_rate(self, blockstamp: ReferenceBlockStamp) -> int: - simulation = self.simulate_full_rebase(blockstamp) - shares_rate = simulation.post_total_pooled_ether * SHARE_RATE_PRECISION_E27 // simulation.post_total_shares - logger.info({'msg': 'Calculate shares rate.', 'value': shares_rate}) - return shares_rate + logger.info({'msg': 'Calculate last withdrawal id to finalize.', 'value': batches}) + + return share_rate, batches + @lru_cache(maxsize=1) def simulate_cl_rebase(self, blockstamp: ReferenceBlockStamp) -> LidoReportRebase: - return self.simulate_rebase_after_report(blockstamp) + """ + Simulate rebase excluding any execution rewards. + This used to check worst scenarios in bunker service. + """ + return self.simulate_rebase_after_report(blockstamp, Wei(0)) def simulate_full_rebase(self, blockstamp: ReferenceBlockStamp) -> LidoReportRebase: el_rewards = self.w3.lido_contracts.get_el_vault_balance(blockstamp) @@ -229,20 +255,20 @@ def simulate_full_rebase(self, blockstamp: ReferenceBlockStamp) -> LidoReportReb def simulate_rebase_after_report( self, blockstamp: ReferenceBlockStamp, - el_rewards: Wei = Wei(0), + el_rewards: Wei, ) -> LidoReportRebase: """ To calculate how much withdrawal request protocol can finalize - needs finalization share rate after this report """ validators_count, cl_balance = self._get_consensus_lido_state(blockstamp) - timestamp = self.get_ref_slot_timestamp(blockstamp) - chain_conf = self.get_chain_config(blockstamp) simulated_tx = self.w3.lido_contracts.lido.functions.handleOracleReport( - # Oracle timings - timestamp, # _reportTimestamp + # We use block timestamp, instead of slot timestamp, + # because missed slot will break simulation contract logics + # Details: https://github.com/lidofinance/lido-oracle/issues/291 + blockstamp.block_timestamp, # _reportTimestamp self._get_slots_elapsed_from_last_report(blockstamp) * chain_conf.seconds_per_slot, # _timeElapsed # CL values validators_count, # _clValidators @@ -267,6 +293,7 @@ def simulate_rebase_after_report( return LidoReportRebase(*result) + @lru_cache(maxsize=1) def get_shares_to_burn(self, blockstamp: BlockStamp) -> int: shares_data = named_tuple_to_dataclass( self.w3.lido_contracts.burner.functions.getSharesRequestedToBurn().call( @@ -277,12 +304,7 @@ def get_shares_to_burn(self, blockstamp: BlockStamp) -> int: return shares_data.cover_shares + shares_data.non_cover_shares - def get_ref_slot_timestamp(self, blockstamp: ReferenceBlockStamp): - chain_conf = self.get_chain_config(blockstamp) - return chain_conf.genesis_time + blockstamp.ref_slot * chain_conf.seconds_per_slot - def _get_slots_elapsed_from_last_report(self, blockstamp: ReferenceBlockStamp): - """If no report was finalized return slots elapsed from initial epoch from contract""" chain_conf = self.get_chain_config(blockstamp) frame_config = self.get_frame_config(blockstamp) diff --git a/src/modules/accounting/extra_data.py b/src/modules/accounting/extra_data.py index f8f1bd68b..00040bc50 100644 --- a/src/modules/accounting/extra_data.py +++ b/src/modules/accounting/extra_data.py @@ -43,13 +43,18 @@ class ExtraData: class ExtraDataService: - # Extra data is an array of items, each item being encoded as follows: - # | 3 bytes | 2 bytes | X bytes | - # | itemIndex | itemType | itemPayload | - # - # itemPayload format: - # | 3 bytes | 8 bytes | nodeOpsCount * 8 bytes | nodeOpsCount * 16 bytes | - # | moduleId | nodeOpsCount | nodeOperatorIds | stuckOrExitedValsCount | + """ + Service that encodes extra data into bytes in correct order. + + Extra data is an array of items, each item being encoded as follows: + | 3 bytes | 2 bytes | X bytes | + | itemIndex | itemType | itemPayload | + + itemPayload format: + | 3 bytes | 8 bytes | nodeOpsCount * 8 bytes | nodeOpsCount * 16 bytes | + | moduleId | nodeOpsCount | nodeOperatorIds | stuckOrExitedValsCount | + """ + class Lengths: ITEM_INDEX = 3 ITEM_TYPE = 2 diff --git a/src/modules/accounting/typings.py b/src/modules/accounting/typings.py index a99d58c9b..24080fb09 100644 --- a/src/modules/accounting/typings.py +++ b/src/modules/accounting/typings.py @@ -77,8 +77,8 @@ class OracleReportLimits: class LidoReportRebase: post_total_pooled_ether: int post_total_shares: int - withdrawals: int - el_reward: int + withdrawals: Wei + el_reward: Wei @dataclass diff --git a/src/modules/checks/__init__.py b/src/modules/checks/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/src/modules/checks/checks_module.py b/src/modules/checks/checks_module.py new file mode 100644 index 000000000..1cebf23f4 --- /dev/null +++ b/src/modules/checks/checks_module.py @@ -0,0 +1,19 @@ +import pytest + + +class ChecksModule: + """ + Module that executes all tests to figure out that environment is ready for Oracle. + + Checks: + - Consensus Layer node + - Execution Layer node + - Keys API service + if LIDO_LOCATOR address provided + - Checks configs in Accounting module and Ejector module + """ + def execute_module(self): + return pytest.main([ + 'src/modules/checks/suites', + '-c', 'src/modules/checks/pytest.ini', + ]) diff --git a/src/modules/checks/pytest.ini b/src/modules/checks/pytest.ini new file mode 100644 index 000000000..d5efe8a16 --- /dev/null +++ b/src/modules/checks/pytest.ini @@ -0,0 +1,7 @@ +[pytest] +addopts = --verbosity=0 --no-header --tb=short --show-capture=no --assert=plain --color=yes -n auto +python_files = *.py +python_classes = Check +python_functions = check_* +filterwarnings = + ignore::pytest.PytestAssertRewriteWarning diff --git a/src/modules/checks/suites/__init__.py b/src/modules/checks/suites/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/src/modules/checks/suites/common.py b/src/modules/checks/suites/common.py new file mode 100644 index 000000000..a52c2d858 --- /dev/null +++ b/src/modules/checks/suites/common.py @@ -0,0 +1,38 @@ +"""Common checks""" +import pytest + +from src.main import check_providers_chain_ids as chain_ids_check # rename to not conflict with test +from src.modules.accounting.accounting import Accounting +from src.modules.ejector.ejector import Ejector + + +@pytest.fixture() +def web3(web3): + if not hasattr(web3, 'lido_contracts'): + pytest.skip('LIDO_LOCATOR_ADDRESS is not set') + return web3 + + +@pytest.fixture() +def accounting(web3): + return Accounting(web3) + + +@pytest.fixture() +def ejector(web3): + return Ejector(web3) + + +def check_providers_chain_ids(web3): + """Make sure all providers are on the same chain""" + chain_ids_check(web3) + + +def check_accounting_contract_configs(accounting): + """Make sure accounting contract configs are valid""" + accounting.check_contract_configs() + + +def check_ejector_contract_configs(ejector): + """Make sure ejector contract configs are valid""" + ejector.check_contract_configs() diff --git a/src/modules/checks/suites/conftest.py b/src/modules/checks/suites/conftest.py new file mode 100644 index 000000000..06468749a --- /dev/null +++ b/src/modules/checks/suites/conftest.py @@ -0,0 +1,144 @@ +import pytest +from _pytest._io import TerminalWriter +from web3_multi_provider import MultiProvider +from xdist import is_xdist_controller # type: ignore[import] +from xdist.dsession import TerminalDistReporter # type: ignore[import] + +from src import variables +from src.typings import EpochNumber, SlotNumber, BlockRoot +from src.utils.blockstamp import build_blockstamp +from src.utils.slot import get_reference_blockstamp +from src.web3py.contract_tweak import tweak_w3_contracts +from src.web3py.extensions import ( + ConsensusClientModule, KeysAPIClientModule, LidoValidatorsProvider, TransactionUtils, + LidoContracts, +) +from src.web3py.typings import Web3 + + +TITLE_PROPERTY_NAME = "test_title" + + +@pytest.fixture() +def web3(): + web3 = Web3(MultiProvider(variables.EXECUTION_CLIENT_URI)) + tweak_w3_contracts(web3) + cc = ConsensusClientModule(variables.CONSENSUS_CLIENT_URI, web3) + kac = KeysAPIClientModule(variables.KEYS_API_URI, web3) + + web3.attach_modules({ + 'lido_validators': LidoValidatorsProvider, + 'transaction': TransactionUtils, + 'cc': lambda: cc, # type: ignore[dict-item] + 'kac': lambda: kac, # type: ignore[dict-item] + }) + if variables.LIDO_LOCATOR_ADDRESS: + web3.attach_modules({'lido_contracts': LidoContracts}) + + return web3 + + +@pytest.fixture(params=[pytest.param("finalized_blockstamp", id="Finalized blockstamp"), + pytest.param("blockstamp_frame_ago", id="Blockstamp frame ago")]) +def blockstamp(request): + return request.getfixturevalue(request.param) + + +@pytest.fixture +def finalized_blockstamp(web3): + block_root = BlockRoot(web3.cc.get_block_root('finalized').root) + block_details = web3.cc.get_block_details(block_root) + bs = build_blockstamp(block_details) + cc_config = web3.cc.get_config_spec() + return get_reference_blockstamp( + web3.cc, + bs.slot_number, + ref_epoch=EpochNumber(bs.slot_number // int(cc_config.SLOTS_PER_EPOCH)), + last_finalized_slot_number=bs.slot_number + ) + + +@pytest.fixture +def blockstamp_frame_ago(web3, finalized_blockstamp): + epochs_per_frame = 270 + cc_config = web3.cc.get_config_spec() + slots_per_frame = epochs_per_frame * int(cc_config.SLOTS_PER_EPOCH) + last_report_ref_slot = SlotNumber(finalized_blockstamp.slot_number - slots_per_frame) + + return get_reference_blockstamp( + web3.cc, + last_report_ref_slot, + ref_epoch=EpochNumber(last_report_ref_slot // int(cc_config.SLOTS_PER_EPOCH)), + last_finalized_slot_number=finalized_blockstamp.slot_number + ) + + +def pytest_collection_modifyitems(items): + """Sort tests by finalized blockstamp first.""" + items.sort(key=lambda x: "Finalized blockstamp" in x.nodeid, reverse=True) + + +class CustomTerminal(TerminalDistReporter): + def ensure_show_status(self): + pass + + +@pytest.hookimpl(trylast=True) +def pytest_configure(config): + class SessionLike: + config = None + session_like = SessionLike() + session_like.config = config + if is_xdist_controller(session_like): + dsession = config.pluginmanager.getplugin("dsession") + config.pluginmanager.unregister(dsession.trdist, "terminaldistreporter") + + custom_terminal = CustomTerminal(config) + dsession.trdist = custom_terminal + config.pluginmanager.register(custom_terminal) + + +def pytest_report_teststatus(report, config): + if report.when == "setup": + if report.skipped: + reason = report.longrepr[-1] + return "skipped", reason, "Skipped" + if report.when == "call": + if report.passed: + return "passed", "✅ Checked", "✅ Checked" + if report.failed: + return "failed", "❌ Failed", "❌ Failed" + return None + + +@pytest.hookimpl(tryfirst=True) +def pytest_runtest_logreport(report) -> None: + title = [prop for name, prop in report.user_properties if name == TITLE_PROPERTY_NAME][0] + if report.when == 'setup' and not report.passed: + print(title, end="") + if report.when == 'call': + print(title, end="") + if report.when == 'teardown': + print() + + +def pytest_runtest_setup(item: pytest.Item): + tw: TerminalWriter = item.config.pluginmanager.get_plugin("terminalreporter")._tw # pylint: disable=protected-access + + obj = getattr(item, "obj", None) + parent = getattr(item.parent, "obj", None) + + module_doc = parent.__doc__ + if not module_doc or not obj: + module_doc = f"Placeholder doc for parent of {item.nodeid}" + + check_doc = obj.__doc__ + if not check_doc or not parent: + check_doc = f"Placeholder doc for {item.nodeid}" + + check_params = f"[{item.callspec.id}]" if hasattr(item, "callspec") else "" + + check_params_colorized = tw.markup(check_params, cyan=True) + module_doc_colorized = tw.markup(f"[{module_doc}]", blue=True) + message = f"{module_doc_colorized}{check_params_colorized} {check_doc}" + item.user_properties.append((TITLE_PROPERTY_NAME, f">> {message}... ")) diff --git a/src/modules/checks/suites/consensus_node.py b/src/modules/checks/suites/consensus_node.py new file mode 100644 index 000000000..ab351bba7 --- /dev/null +++ b/src/modules/checks/suites/consensus_node.py @@ -0,0 +1,23 @@ +"""Consensus node""" +from src.web3py.typings import Web3 + + +def check_validators_provided(web3: Web3, blockstamp): + """Check that consensus-client able to provide validators""" + result = web3.cc.get_validators_no_cache(blockstamp) + assert len(result) > 0, "consensus-client provide no validators" + + +def check_block_details_provided(web3: Web3, blockstamp): + """Check that consensus-client able to provide block details""" + web3.cc.get_block_details(blockstamp.slot_number) + + +def check_block_root_provided(web3: Web3, blockstamp): + """Check that consensus-client able to provide block root""" + web3.cc.get_block_root(blockstamp.slot_number) + + +def check_block_header_provided(web3: Web3, blockstamp): + """Check that consensus-client able to provide block header""" + web3.cc.get_block_header(blockstamp.slot_number) diff --git a/src/modules/checks/suites/execution_node.py b/src/modules/checks/suites/execution_node.py new file mode 100644 index 000000000..5d90ce9b2 --- /dev/null +++ b/src/modules/checks/suites/execution_node.py @@ -0,0 +1,54 @@ +"""Execution node""" +import pytest + +get_deposit_count_abi = { + "inputs": [], + "name": "get_deposit_count", + "outputs": [ + {'internalType': "bytes", 'name': "", 'type': "bytes"} + ], + "stateMutability": "view", + "type": "function" +} + +deposit_event_abi = {'anonymous': False, 'inputs': [ + {'indexed': False, 'internalType': "bytes", 'name': "pubkey", 'type': "bytes"}, + {'indexed': False, 'internalType': "bytes", 'name': "withdrawal_credentials", 'type': "bytes"}, + {'indexed': False, 'internalType': "bytes", 'name': "amount", 'type': "bytes"}, + {'indexed': False, 'internalType': "bytes", 'name': "signature", 'type': "bytes"}, + {'indexed': False, 'internalType': "bytes", 'name': "index", 'type': "bytes"} +], 'name': "DepositEvent", 'type': "event"} + + +@pytest.fixture +def deposit_contract(web3): + cc_config = web3.cc.get_config_spec() + return web3.eth.contract( + address=web3.to_checksum_address(cc_config.DEPOSIT_CONTRACT_ADDRESS), + abi=[get_deposit_count_abi, deposit_event_abi], + ) + + +def check_eth_call_availability(blockstamp, deposit_contract): + """Check that execution-client able to make eth_call on the provided blockstamp""" + deposit_contract.functions.get_deposit_count().call(block_identifier=blockstamp.block_hash) + + +def check_balance_availability(web3, blockstamp, deposit_contract): + """Check that execution-client able to get balance on the provided blockstamp""" + web3.eth.get_balance(deposit_contract.address, block_identifier=blockstamp.block_hash) + + +def check_events_range_availability(web3, blockstamp, deposit_contract): + """Check that execution-client able to get event logs on the blockstamp state""" + latest_block = web3.eth.get_block('latest') + deposit_contract.events.DepositEvent.get_logs(fromBlock=blockstamp.block_number, toBlock=latest_block.number) + + +def check_events_week_range_availability(web3, deposit_contract): + """Check that execution-client able to get event logs a week ago""" + latest_block = web3.eth.get_block('latest') + deposit_contract.events.DepositEvent.get_logs( + fromBlock=latest_block.number - 8 * 225 * 32, # 8 days + toBlock=latest_block.number, + ) diff --git a/src/modules/checks/suites/keys_api.py b/src/modules/checks/suites/keys_api.py new file mode 100644 index 000000000..34434be4d --- /dev/null +++ b/src/modules/checks/suites/keys_api.py @@ -0,0 +1,7 @@ +"""Keys api""" + + +def check_keys_api_provide_keys(web3, blockstamp): + """Check that keys-api able to provide keys""" + result = web3.kac.get_used_lido_keys(blockstamp) + assert len(result) > 0, "keys-api service provide no keys" diff --git a/src/modules/ejector/data_encode.py b/src/modules/ejector/data_encode.py index c88939cd2..27d5f6497 100644 --- a/src/modules/ejector/data_encode.py +++ b/src/modules/ejector/data_encode.py @@ -3,6 +3,7 @@ from src.utils.types import hex_str_to_bytes from src.web3py.extensions.lido_validators import LidoValidator, NodeOperatorGlobalIndex + DATA_FORMAT_LIST = 1 MODULE_ID_LENGTH = 3 @@ -12,9 +13,13 @@ def encode_data(validators: list[tuple[NodeOperatorGlobalIndex, LidoValidator]]): - # MSB <------------------------------------------------------- LSB - # | 3 bytes | 5 bytes | 8 bytes | 48 bytes | - # | moduleId | nodeOpId | validatorIndex | validatorPubkey | + """ + Encodes report data for Exit Bus Contract into bytes. + + MSB <------------------------------------------------------- LSB + | 3 bytes | 5 bytes | 8 bytes | 48 bytes | + | moduleId | nodeOpId | validatorIndex | validatorPubkey | + """ result = b'' diff --git a/src/modules/ejector/ejector.py b/src/modules/ejector/ejector.py index 2cc56c4bd..31b4ae624 100644 --- a/src/modules/ejector/ejector.py +++ b/src/modules/ejector/ejector.py @@ -12,6 +12,12 @@ MIN_PER_EPOCH_CHURN_LIMIT, MIN_VALIDATOR_WITHDRAWABILITY_DELAY, ) +from src.metrics.prometheus.business import CONTRACT_ON_PAUSE, FRAME_PREV_REPORT_REF_SLOT +from src.metrics.prometheus.ejector import ( + EJECTOR_VALIDATORS_COUNT_TO_EJECT, + EJECTOR_TO_WITHDRAW_WEI_AMOUNT, + EJECTOR_MAX_EXIT_EPOCH +) from src.metrics.prometheus.duration_meter import duration_meter from src.modules.ejector.data_encode import encode_data from src.modules.ejector.typings import EjectorProcessingState, ReportData @@ -23,6 +29,7 @@ from src.services.validator_state import LidoValidatorStateService from src.typings import BlockStamp, EpochNumber, ReferenceBlockStamp from src.utils.abi import named_tuple_to_dataclass +from src.utils.cache import clear_object_lru_cache from src.utils.validator_state import ( is_active_validator, is_fully_withdrawable_validator, @@ -36,20 +43,20 @@ class Ejector(BaseModule, ConsensusModule): """ - Module that ejects lido validators depends on withdrawal requests stETH value. + Module that ejects lido validators depends on total value of unfinalized withdrawal requests. Flow: 1. Calculate withdrawals amount to cover with ETH. 2. Calculate ETH rewards prediction per epoch. - 3. Calculate withdrawn epoch for next validator Loop: - a. Calculate predicted rewards we get until we reach withdrawn epoch - b. Check if validators to eject + predicted rewards + current balance is enough to finalize withdrawal requests + 1. Calculate withdrawn epoch for last validator in "to eject" list. + 2. Calculate predicted rewards we get until last validator will be withdrawn. + 3. Check if validators to eject + predicted rewards and withdrawals + current balance is enough to finalize all withdrawal requests. - If True - eject all validators in list. End. - c. Get next validator to eject. - d. Recalculate withdraw epoch + 4. Add new validator to "to eject" list. + 5. Recalculate withdrawn epoch. - 4. Decode lido validators into bytes and send report transaction + 3. Decode lido validators into bytes and send report transaction """ CONSENSUS_VERSION = 1 CONTRACT_VERSION = 1 @@ -63,21 +70,27 @@ def __init__(self, w3: Web3): self.prediction_service = RewardsPredictionService(w3) self.validators_state_service = LidoValidatorStateService(w3) + def refresh_contracts(self): + self.report_contract = self.w3.lido_contracts.validators_exit_bus_oracle + + def clear_cache(self): + clear_object_lru_cache(self) + clear_object_lru_cache(self.prediction_service) + clear_object_lru_cache(self.validators_state_service) + def execute_module(self, last_finalized_blockstamp: BlockStamp) -> ModuleExecuteDelay: report_blockstamp = self.get_blockstamp_for_report(last_finalized_blockstamp) if not report_blockstamp: return ModuleExecuteDelay.NEXT_FINALIZED_EPOCH - if self._is_paused(report_blockstamp): - logger.info({'msg': 'Ejector is paused. Skip report.'}) - return ModuleExecuteDelay.NEXT_FINALIZED_EPOCH - self.process_report(report_blockstamp) return ModuleExecuteDelay.NEXT_SLOT @lru_cache(maxsize=1) @duration_meter() def build_report(self, blockstamp: ReferenceBlockStamp) -> tuple: + last_report_ref_slot = self.w3.lido_contracts.get_ejector_last_processing_ref_slot(blockstamp) + FRAME_PREV_REPORT_REF_SLOT.set(last_report_ref_slot) validators: list[tuple[NodeOperatorGlobalIndex, LidoValidator]] = self.get_validators_to_eject(blockstamp) logger.info({ 'msg': f'Calculate validators to eject. Count: {len(validators)}', @@ -86,18 +99,24 @@ def build_report(self, blockstamp: ReferenceBlockStamp) -> tuple: data, data_format = encode_data(validators) - return ReportData( + report_data = ReportData( self.CONSENSUS_VERSION, blockstamp.ref_slot, len(validators), data_format, data, - ).as_tuple() + ) + + EJECTOR_VALIDATORS_COUNT_TO_EJECT.set(report_data.requests_count) + + return report_data.as_tuple() def get_validators_to_eject(self, blockstamp: ReferenceBlockStamp) -> list[tuple[NodeOperatorGlobalIndex, LidoValidator]]: to_withdraw_amount = self.get_total_unfinalized_withdrawal_requests_amount(blockstamp) logger.info({'msg': 'Calculate to withdraw amount.', 'value': to_withdraw_amount}) + EJECTOR_TO_WITHDRAW_WEI_AMOUNT.set(to_withdraw_amount) + if to_withdraw_amount == Wei(0): return [] @@ -110,13 +129,14 @@ def get_validators_to_eject(self, blockstamp: ReferenceBlockStamp) -> list[tuple logger.info({'msg': 'Calculate epochs to sweep.', 'value': epochs_to_sweep}) total_available_balance = self._get_total_el_balance(blockstamp) - logger.info({'msg': 'Calculate available balance.', 'value': total_available_balance}) + logger.info({'msg': 'Calculate el balance.', 'value': total_available_balance}) validators_going_to_exit = self.validators_state_service.get_recently_requested_but_not_exited_validators(blockstamp, chain_config) going_to_withdraw_balance = sum(map( self._get_predicted_withdrawable_balance, validators_going_to_exit, )) + logger.info({'msg': 'Calculate going to exit validators balance.', 'value': going_to_withdraw_balance}) validators_to_eject: list[tuple[NodeOperatorGlobalIndex, LidoValidator]] = [] validator_to_eject_balance_sum = 0 @@ -141,6 +161,23 @@ def get_validators_to_eject(self, blockstamp: ReferenceBlockStamp) -> list[tuple going_to_withdraw_balance # validators_to_eject balance ) if expected_balance >= to_withdraw_amount: + logger.info({ + 'msg': f'Expected withdrawal epoch: {withdrawal_epoch=}, ' + f'will be reached in {withdrawal_epoch - blockstamp.ref_epoch} epochs. ' + f'Validators with withdrawal_epoch before expected: {future_withdrawals=}. ' + f'Future rewards from skimming and EL rewards: {future_rewards=}. ' + f'Currently available balance: {total_available_balance=}. ' + f'Validators expecting to start exit balance: {validator_to_eject_balance_sum=}. ' + f'Validators going to eject balance: {going_to_withdraw_balance=}. ', + 'withdrawal_epoch': withdrawal_epoch, + 'ref_epoch': blockstamp.ref_epoch, + 'future_withdrawals': future_withdrawals, + 'future_rewards': future_rewards, + 'total_available_balance': total_available_balance, + 'validator_to_eject_balance_sum': validator_to_eject_balance_sum, + 'going_to_withdraw_balance': going_to_withdraw_balance, + }) + return validators_to_eject validators_to_eject.append(validator_container) @@ -152,6 +189,12 @@ def get_validators_to_eject(self, blockstamp: ReferenceBlockStamp) -> list[tuple def _is_paused(self, blockstamp: ReferenceBlockStamp) -> bool: return self.report_contract.functions.isPaused().call(block_identifier=blockstamp.block_hash) + def is_reporting_allowed(self, blockstamp: ReferenceBlockStamp) -> bool: + on_pause = self._is_paused(blockstamp) + CONTRACT_ON_PAUSE.set(on_pause) + logger.info({'msg': 'Fetch isPaused from ejector bus contract.', 'value': on_pause}) + return not on_pause + @lru_cache(maxsize=1) def _get_withdrawable_lido_validators_balance(self, blockstamp: BlockStamp, on_epoch: EpochNumber) -> Wei: lido_validators = self.w3.lido_validators.get_lido_validators(blockstamp=blockstamp) @@ -181,7 +224,6 @@ def _get_total_el_balance(self, blockstamp: BlockStamp) -> Wei: self.w3.lido_contracts.get_withdrawal_balance(blockstamp) + self._get_buffer_ether(blockstamp) ) - logger.info({'msg': 'Calculate total el balance.', 'value': total_el_balance}) return total_el_balance def _get_buffer_ether(self, blockstamp: BlockStamp) -> Wei: @@ -201,7 +243,6 @@ def get_total_unfinalized_withdrawal_requests_amount(self, blockstamp: BlockStam unfinalized_steth = self.w3.lido_contracts.withdrawal_queue_nft.functions.unfinalizedStETH().call( block_identifier=blockstamp.block_hash, ) - logger.info({'msg': 'Wei to finalize.', 'value': unfinalized_steth}) return unfinalized_steth def _get_predicted_withdrawable_epoch( @@ -219,6 +260,8 @@ def _get_predicted_withdrawable_epoch( self.compute_activation_exit_epoch(blockstamp), ) + EJECTOR_MAX_EXIT_EPOCH.set(max_exit_epoch_number) + churn_limit = self._get_churn_limit(blockstamp) remain_exits_capacity_for_epoch = churn_limit - latest_to_exit_validators_count @@ -258,7 +301,8 @@ def _get_latest_exit_epoch(self, blockstamp: BlockStamp) -> tuple[EpochNumber, i return max_exit_epoch_number, latest_to_exit_validators_count - def _get_sweep_delay_in_epochs(self, blockstamp: ReferenceBlockStamp): + def _get_sweep_delay_in_epochs(self, blockstamp: ReferenceBlockStamp) -> int: + """Returns amount of epochs that will take to sweep all validators in chain.""" validators = self.w3.cc.get_validators(blockstamp) total_withdrawable_validators = len(list(filter(lambda validator: ( @@ -267,7 +311,8 @@ def _get_sweep_delay_in_epochs(self, blockstamp: ReferenceBlockStamp): ), validators))) chain_config = self.get_chain_config(blockstamp) - return int(total_withdrawable_validators * self.AVG_EXPECTING_WITHDRAWALS_SWEEP_DURATION_MULTIPLIER / MAX_WITHDRAWALS_PER_PAYLOAD / chain_config.slots_per_epoch) + full_sweep_in_epochs = total_withdrawable_validators / MAX_WITHDRAWALS_PER_PAYLOAD / chain_config.slots_per_epoch + return int(full_sweep_in_epochs * self.AVG_EXPECTING_WITHDRAWALS_SWEEP_DURATION_MULTIPLIER) @lru_cache(maxsize=1) def _get_churn_limit(self, blockstamp: ReferenceBlockStamp) -> int: @@ -292,7 +337,3 @@ def is_main_data_submitted(self, blockstamp: BlockStamp) -> bool: def is_contract_reportable(self, blockstamp: BlockStamp) -> bool: return not self.is_main_data_submitted(blockstamp) - - def is_reporting_allowed(self, blockstamp: BlockStamp) -> bool: - """At this point we can't check anything, so just return True.""" - return True diff --git a/src/modules/submodules/consensus.py b/src/modules/submodules/consensus.py index 6f5fafbab..f0ed85749 100644 --- a/src/modules/submodules/consensus.py +++ b/src/modules/submodules/consensus.py @@ -10,17 +10,19 @@ from web3.contract import AsyncContract, Contract from src import variables -from src.typings import BlockStamp, EpochNumber, ReferenceBlockStamp, SlotNumber +from src.metrics.prometheus.basic import ORACLE_SLOT_NUMBER, ORACLE_BLOCK_NUMBER, GENESIS_TIME +from src.typings import BlockStamp, ReferenceBlockStamp, SlotNumber from src.metrics.prometheus.business import ( ORACLE_MEMBER_LAST_REPORT_REF_SLOT, FRAME_CURRENT_REF_SLOT, FRAME_DEADLINE_SLOT, - ORACLE_SLOT_NUMBER, ORACLE_MEMBER_INFO + ORACLE_MEMBER_INFO ) from src.modules.submodules.exceptions import IsNotMemberException, IncompatibleContractVersion from src.modules.submodules.typings import ChainConfig, MemberInfo, ZERO_HASH, CurrentFrame, FrameConfig from src.utils.abi import named_tuple_to_dataclass from src.utils.blockstamp import build_blockstamp +from src.utils.web3converter import Web3Converter from src.utils.slot import get_reference_blockstamp from src.web3py.typings import Web3 @@ -28,15 +30,21 @@ class ConsensusModule(ABC): + """ + Module that works with Hash Consensus Contract. + + Do next things: + - Calculate report blockstamp if contract is reportable + - Calculates and sends report hash + - Decides in what order Oracles should report + + report_contract should contain getConsensusContract method. + """ report_contract: Contract CONTRACT_VERSION: int CONSENSUS_VERSION: int - # Default delay for default Oracle members. Member with submit data role should submit data first. - # If contract is reportable each member in order will submit data with difference with this amount of slots - SUBMIT_DATA_DELAY_IN_SLOTS = 6 - def __init__(self, w3: Web3): self.w3 = w3 @@ -53,8 +61,9 @@ def check_contract_configs(self): config = self.get_chain_config(bs) cc_config = self.w3.cc.get_config_spec() - genesis_time = self.w3.cc.get_genesis().genesis_time - if any((config.genesis_time != int(genesis_time), + genesis_time = int(self.w3.cc.get_genesis().genesis_time) + GENESIS_TIME.set(genesis_time) + if any((config.genesis_time != genesis_time, config.seconds_per_slot != int(cc_config.SECONDS_PER_SLOT), config.slots_per_epoch != int(cc_config.SLOTS_PER_EPOCH))): raise ValueError('Contract chain config is not compatible with Beacon chain.\n' @@ -139,15 +148,7 @@ def get_member_info(self, blockstamp: BlockStamp) -> MemberInfo: variables.ACCOUNT.address, ).call(block_identifier=blockstamp.block_hash) - submit_role = self.report_contract.functions.SUBMIT_DATA_ROLE().call( - block_identifier=blockstamp.block_hash, - ) - is_submit_member = self.report_contract.functions.hasRole( - submit_role, - variables.ACCOUNT.address, - ).call( - block_identifier=blockstamp.block_hash, - ) + is_submit_member = self._is_submit_member(blockstamp) if not is_member and not is_submit_member: raise IsNotMemberException( @@ -170,6 +171,22 @@ def get_member_info(self, blockstamp: BlockStamp) -> MemberInfo: return mi + def _is_submit_member(self, blockstamp: BlockStamp) -> bool: + if not variables.ACCOUNT: + return True + + submit_role = self.report_contract.functions.SUBMIT_DATA_ROLE().call( + block_identifier=blockstamp.block_hash, + ) + is_submit_member = self.report_contract.functions.hasRole( + submit_role, + variables.ACCOUNT.address, + ).call( + block_identifier=blockstamp.block_hash, + ) + + return is_submit_member + # ----- Calculation reference slot for report ----- def get_blockstamp_for_report(self, last_finalized_blockstamp: BlockStamp) -> Optional[ReferenceBlockStamp]: """ @@ -199,10 +216,14 @@ def get_blockstamp_for_report(self, last_finalized_blockstamp: BlockStamp) -> Op return None chain_config = self.get_chain_config(last_finalized_blockstamp) + frame_config = self.get_frame_config(last_finalized_blockstamp) + + converter = Web3Converter(chain_config, frame_config) + bs = get_reference_blockstamp( cc=self.w3.cc, ref_slot=member_info.current_frame_ref_slot, - ref_epoch=EpochNumber(member_info.current_frame_ref_slot // chain_config.slots_per_epoch), + ref_epoch=converter.get_epoch_by_slot(member_info.current_frame_ref_slot), last_finalized_slot_number=last_finalized_blockstamp.slot_number, ) logger.info({'msg': 'Calculate blockstamp for report.', 'value': bs}) @@ -278,7 +299,11 @@ def _process_report_data(self, blockstamp: ReferenceBlockStamp, report_data: tup return # Fast lane offchain implementation for report data - slots_to_sleep = self._get_slot_delay_before_data_submit(blockstamp) + # If the member was added in the current frame, + # the result of _get_slot_delay_before_data_submit may be inconsistent for different latest blocks, but it's ok. + # We can't use ref blockstamp here because new oracle member will fail is_member check, + # because it wasn't in quorum on ref_slot + slots_to_sleep = self._get_slot_delay_before_data_submit(latest_blockstamp) if slots_to_sleep: chain_configs = self.get_chain_config(blockstamp) @@ -358,6 +383,7 @@ def _get_latest_blockstamp(self) -> BlockStamp: bs = build_blockstamp(block_details) logger.debug({'msg': 'Fetch latest blockstamp.', 'value': bs}) ORACLE_SLOT_NUMBER.labels('head').set(bs.slot_number) + ORACLE_BLOCK_NUMBER.labels('head').set(bs.block_number) return bs @lru_cache(maxsize=1) @@ -374,7 +400,9 @@ def _get_slot_delay_before_data_submit(self, blockstamp: BlockStamp) -> int: frame_config = self.get_frame_config(blockstamp) chain_config = self.get_chain_config(blockstamp) - current_frame_number = int(blockstamp.slot_number / chain_config.slots_per_epoch / frame_config.epochs_per_frame) + converter = Web3Converter(chain_config, frame_config) + + current_frame_number = converter.get_frame_by_slot(blockstamp.slot_number) current_position = current_frame_number % len(members) sleep_count = mem_position - current_position @@ -382,7 +410,7 @@ def _get_slot_delay_before_data_submit(self, blockstamp: BlockStamp) -> int: sleep_count += len(members) # 1 - is default delay for non submit members. - total_delay = (1 + sleep_count) * self.SUBMIT_DATA_DELAY_IN_SLOTS + total_delay = (1 + sleep_count) * variables.SUBMIT_DATA_DELAY_IN_SLOTS logger.info({'msg': 'Calculate slots delay.', 'value': total_delay}) return total_delay diff --git a/src/modules/submodules/oracle_module.py b/src/modules/submodules/oracle_module.py index 12d0484de..b9a2b3b87 100644 --- a/src/modules/submodules/oracle_module.py +++ b/src/modules/submodules/oracle_module.py @@ -6,10 +6,11 @@ from timeout_decorator import timeout -from src.metrics.prometheus.business import ORACLE_SLOT_NUMBER +from src.metrics.prometheus.basic import ORACLE_BLOCK_NUMBER, ORACLE_SLOT_NUMBER from src.modules.submodules.exceptions import IsNotMemberException, IncompatibleContractVersion from src.providers.http_provider import NotOkResponse from src.providers.keys.client import KeysOutdatedException +from src.web3py.extensions.lido_validators import CountOfKeysDiffersException from src.utils.blockstamp import build_blockstamp from src.utils.slot import NoSlotsAvailable, SlotNotFinalized, InconsistentData from src.web3py.typings import Web3 @@ -37,7 +38,7 @@ class BaseModule(ABC): - Raise exceptions that could not be proceeded automatically. - Check Module didn't stick inside cycle forever. """ - DEFAULT_SLEEP = 12 + # This is reference mark for long sleep. Sleep until new finalized slot found. _slot_threshold = SlotNumber(0) @@ -55,14 +56,16 @@ def _cycle_handler(self): blockstamp = self._receive_last_finalized_slot() if blockstamp.slot_number > self._slot_threshold: + if self.w3.lido_contracts.has_contract_address_changed(): + self.clear_cache() + self.refresh_contracts() result = self.run_cycle(blockstamp) if result is ModuleExecuteDelay.NEXT_FINALIZED_EPOCH: - self.w3.lido_contracts.reload_contracts() self._slot_threshold = blockstamp.slot_number - logger.info({'msg': f'Cycle end. Sleep for {self.DEFAULT_SLEEP} seconds.'}) - time.sleep(self.DEFAULT_SLEEP) + logger.info({'msg': f'Cycle end. Sleep for {variables.CYCLE_SLEEP_IN_SECONDS} seconds.'}) + time.sleep(variables.CYCLE_SLEEP_IN_SECONDS) def _receive_last_finalized_slot(self) -> BlockStamp: block_root = BlockRoot(self.w3.cc.get_block_root('finalized').root) @@ -70,6 +73,7 @@ def _receive_last_finalized_slot(self) -> BlockStamp: bs = build_blockstamp(block_details) logger.info({'msg': 'Fetch last finalized BlockStamp.', 'value': asdict(bs)}) ORACLE_SLOT_NUMBER.labels('finalized').set(bs.slot_number) + ORACLE_BLOCK_NUMBER.labels('finalized').set(bs.block_number) return bs def run_cycle(self, blockstamp: BlockStamp) -> ModuleExecuteDelay: @@ -79,10 +83,10 @@ def run_cycle(self, blockstamp: BlockStamp) -> ModuleExecuteDelay: return self.execute_module(blockstamp) except IsNotMemberException as exception: logger.error({'msg': 'Provided account is not part of Oracle`s committee.'}) - raise exception from exception + raise exception except IncompatibleContractVersion as exception: logger.error({'msg': 'Incompatible Contract version. Please update Oracle Daemon.'}) - raise exception from exception + raise exception except TimeoutError as exception: logger.error({'msg': 'Oracle module do not respond.', 'error': str(exception)}) except NoActiveProviderError as exception: @@ -95,6 +99,8 @@ def run_cycle(self, blockstamp: BlockStamp) -> ModuleExecuteDelay: logger.error({'msg': 'Inconsistent response from consensus layer node.', 'error': str(error)}) except KeysOutdatedException as error: logger.error({'msg': 'Keys API service returns outdated data.', 'error': str(error)}) + except CountOfKeysDiffersException as error: + logger.error({'msg': 'Keys API service returned incorrect number of keys.', 'error': str(error)}) return ModuleExecuteDelay.NEXT_SLOT @@ -106,4 +112,14 @@ def execute_module(self, last_finalized_blockstamp: BlockStamp) -> ModuleExecute ModuleExecuteDelay.NEXT_FINALIZED_EPOCH - to sleep until new finalized epoch ModuleExecuteDelay.NEXT_SLOT - to sleep for a slot """ - raise NotImplementedError('Module should implement this method.') + raise NotImplementedError('Module should implement this method.') # pragma: no cover + + @abstractmethod + def refresh_contracts(self): + """This method called if contracts addresses were changed""" + raise NotImplementedError('Module should implement this method.') # pragma: no cover + + @abstractmethod + def clear_cache(self): + """Clear cache for module and all submodules""" + raise NotImplementedError('Module should implement this method.') # pragma: no cover diff --git a/src/providers/consensus/client.py b/src/providers/consensus/client.py index 5a18654af..6e879021d 100644 --- a/src/providers/consensus/client.py +++ b/src/providers/consensus/client.py @@ -1,4 +1,5 @@ from functools import lru_cache +from http import HTTPStatus from typing import Literal, Optional, Union from src.metrics.logging import logging @@ -40,9 +41,7 @@ class ConsensusClient(HTTPProvider): API_GET_GENESIS = 'eth/v1/beacon/genesis' def get_config_spec(self): - """ - Spec: https://ethereum.github.io/beacon-APIs/#/Config/getSpec - """ + """Spec: https://ethereum.github.io/beacon-APIs/#/Config/getSpec""" data, _ = self._get(self.API_GET_SPEC) if not isinstance(data, dict): raise ValueError("Expected mapping response from getSpec") @@ -52,7 +51,7 @@ def get_genesis(self): """ Spec: https://ethereum.github.io/beacon-APIs/#/Beacon/getGenesis """ - data, _ = self._get('eth/v1/beacon/genesis') + data, _ = self._get(self.API_GET_GENESIS) if not isinstance(data, dict): raise ValueError("Expected mapping response from getGenesis") return GenesisResponse.from_response(**data) @@ -61,19 +60,25 @@ def get_block_root(self, state_id: Union[SlotNumber, BlockRoot, LiteralState]) - """ Spec: https://ethereum.github.io/beacon-APIs/#/Beacon/getBlockRoot - No cache because this method is using to get finalized and head block, and they could not be cached by args. + There is no cache because this method is used to get finalized and head blocks. """ - data, _ = self._get(self.API_GET_BLOCK_ROOT, (state_id,)) + data, _ = self._get( + self.API_GET_BLOCK_ROOT, + path_params=(state_id,), + force_raise=self.__raise_last_missed_slot_error, + ) if not isinstance(data, dict): raise ValueError("Expected mapping response from getBlockRoot") return BlockRootResponse.from_response(**data) @lru_cache(maxsize=1) def get_block_header(self, state_id: Union[SlotNumber, BlockRoot]) -> BlockHeaderFullResponse: - """ - Spec: https://ethereum.github.io/beacon-APIs/#/Beacon/getBlockHeader - """ - data, meta_data = self._get(self.API_GET_BLOCK_HEADER, (state_id,)) + """Spec: https://ethereum.github.io/beacon-APIs/#/Beacon/getBlockHeader""" + data, meta_data = self._get( + self.API_GET_BLOCK_HEADER, + path_params=(state_id,), + force_raise=self.__raise_last_missed_slot_error, + ) if not isinstance(data, dict): raise ValueError("Expected mapping response from getBlockHeader") resp = BlockHeaderFullResponse.from_response(data=BlockHeaderResponseData.from_response(**data), **meta_data) @@ -82,7 +87,11 @@ def get_block_header(self, state_id: Union[SlotNumber, BlockRoot]) -> BlockHeade @lru_cache(maxsize=1) def get_block_details(self, state_id: Union[SlotNumber, BlockRoot]) -> BlockDetailsResponse: """Spec: https://ethereum.github.io/beacon-APIs/#/Beacon/getBlockV2""" - data, _ = self._get(self.API_GET_BLOCK_DETAILS, (state_id,)) + data, _ = self._get( + self.API_GET_BLOCK_DETAILS, + path_params=(state_id,), + force_raise=self.__raise_last_missed_slot_error, + ) if not isinstance(data, dict): raise ValueError("Expected mapping response from getBlockV2") return BlockDetailsResponse.from_response(**data) @@ -96,16 +105,54 @@ def get_validators(self, blockstamp: BlockStamp) -> list[Validator]: def get_validators_no_cache(self, blockstamp: BlockStamp, pub_keys: Optional[str | tuple] = None) -> list[dict]: """Spec: https://ethereum.github.io/beacon-APIs/#/Beacon/getStateValidators""" try: - data, _ = self._get(self.API_GET_VALIDATORS, (blockstamp.state_root,), query_params={'id': pub_keys}) + data, _ = self._get( + self.API_GET_VALIDATORS, + path_params=(blockstamp.state_root,), + query_params={'id': pub_keys}, + force_raise=self.__raise_on_prysm_error + ) if not isinstance(data, list): raise ValueError("Expected list response from getStateValidators") return data except NotOkResponse as error: - # Avoid Prysm issue with state root - https://github.com/prysmaticlabs/prysm/issues/12053 - # Trying to get validators by slot number - if 'State not found: state not found in the last' in error.text: - data, _ = self._get(self.API_GET_VALIDATORS, (blockstamp.slot_number,), query_params={'id': pub_keys}) - if not isinstance(data, list): - raise ValueError("Expected list response from getStateValidators") # pylint: disable=raise-missing-from - return data - raise error from error + if self.PRYSM_STATE_NOT_FOUND_ERROR in error.text: + return self._get_validators_with_prysm(blockstamp, pub_keys) + + raise error + + PRYSM_STATE_NOT_FOUND_ERROR = 'State not found: state not found in the last' + + def __raise_on_prysm_error(self, errors: list[Exception]) -> Exception | None: + """ + Prysm can't return validators by state root if it is old enough, but it can return them via slot number. + + raise error immediately if this is prysm specific exception + """ + last_error = errors[-1] + if isinstance(last_error, NotOkResponse) and self.PRYSM_STATE_NOT_FOUND_ERROR in last_error.text: + return last_error + return None + + def _get_validators_with_prysm(self, blockstamp: BlockStamp, pub_keys: Optional[str | tuple] = None) -> list[dict]: + # Avoid Prysm issue with state root - https://github.com/prysmaticlabs/prysm/issues/12053 + # Trying to get validators by slot number + data, _ = self._get( + self.API_GET_VALIDATORS, + path_params=(blockstamp.slot_number,), + query_params={'id': pub_keys} + ) + if not isinstance(data, list): + raise ValueError("Expected list response from getStateValidators") # pylint: disable=raise-missing-from + return data + + def __raise_last_missed_slot_error(self, errors: list[Exception]) -> Exception | None: + """ + Prioritize NotOkResponse before other exceptions (ConnectionError, TimeoutError). + If status is 404 slot is missed and this should be handled correctly. + """ + if len(errors) == len(self.hosts): + for error in errors: + if isinstance(error, NotOkResponse) and error.status == HTTPStatus.NOT_FOUND: + return error + + return None diff --git a/src/providers/consensus/typings.py b/src/providers/consensus/typings.py index 5e35b7379..f5022e56b 100644 --- a/src/providers/consensus/typings.py +++ b/src/providers/consensus/typings.py @@ -11,6 +11,7 @@ class BeaconSpecResponse(FromResponse): DEPOSIT_CHAIN_ID: str SLOTS_PER_EPOCH: str SECONDS_PER_SLOT: str + DEPOSIT_CONTRACT_ADDRESS: str @dataclass diff --git a/src/providers/http_provider.py b/src/providers/http_provider.py index 7c8c44c41..6d3f5ab7f 100644 --- a/src/providers/http_provider.py +++ b/src/providers/http_provider.py @@ -1,17 +1,23 @@ import logging from abc import ABC from http import HTTPStatus -from typing import Optional, Tuple, Sequence +from typing import Optional, Tuple, Sequence, Callable from urllib.parse import urljoin, urlparse from prometheus_client import Histogram -from requests import Session, JSONDecodeError +from requests import Session, JSONDecodeError, Timeout from requests.adapters import HTTPAdapter from urllib3 import Retry +from src.variables import HTTP_REQUEST_RETRY_COUNT, HTTP_REQUEST_SLEEP_BEFORE_RETRY_IN_SECONDS, HTTP_REQUEST_TIMEOUT + logger = logging.getLogger(__name__) +class NoHostsProvided(Exception): + pass + + class NotOkResponse(Exception): status: int text: str @@ -23,17 +29,21 @@ def __init__(self, *args, status: int, text: str): class HTTPProvider(ABC): - REQUEST_TIMEOUT = 300 - + """ + Base HTTP Provider with metrics and retry strategy integrated inside. + """ PROMETHEUS_HISTOGRAM: Histogram - def __init__(self, host: str): - self.host = host + def __init__(self, hosts: list[str]): + if not hosts: + raise NoHostsProvided(f"No hosts provided for {self.__class__.__name__}") + + self.hosts = hosts retry_strategy = Retry( - total=5, + total=HTTP_REQUEST_RETRY_COUNT, status_forcelist=[418, 429, 500, 502, 503, 504], - backoff_factor=5, + backoff_factor=HTTP_REQUEST_SLEEP_BEFORE_RETRY_IN_SECONDS, ) adapter = HTTPAdapter(max_retries=retry_strategy) @@ -48,34 +58,88 @@ def _urljoin(host, url): return urljoin(host, url) def _get( - self, endpoint: str, path_params: Optional[Sequence[str | int]] = None, query_params: Optional[dict] = None + self, + endpoint: str, + path_params: Optional[Sequence[str | int]] = None, + query_params: Optional[dict] = None, + force_raise: Callable[..., Exception | None] = lambda _: None, ) -> Tuple[dict | list, dict]: """ - Returns (data, meta) + Get request with fallbacks + Returns (data, meta) or raises exception + + force_raise - function that returns an Exception if it should be thrown immediately. + Sometimes NotOk response from first provider is the response that we are expecting. """ - with self.PROMETHEUS_HISTOGRAM.time() as t: - response = self.session.get( - self._urljoin(self.host, endpoint.format(*path_params) if path_params else endpoint), - params=query_params, - timeout=self.REQUEST_TIMEOUT, - ) + errors: list[Exception] = [] + + for host in self.hosts: + try: + return self._get_without_fallbacks(host, endpoint, path_params, query_params) + except Exception as e: # pylint: disable=W0703 + errors.append(e) + + # Check if exception should be raised immediately + if to_force_raise := force_raise(errors): + raise to_force_raise from e + + logger.warning( + { + 'msg': f'[{self.__class__.__name__}] Host [{urlparse(host).netloc}] responded with error', + 'error': str(e), + 'provider': urlparse(host).netloc, + } + ) + + # Raise error from last provider. + raise errors[-1] + + def _get_without_fallbacks( + self, + host: str, + endpoint: str, + path_params: Optional[Sequence[str | int]] = None, + query_params: Optional[dict] = None + ) -> Tuple[dict | list, dict]: + """ + Simple get request without fallbacks + Returns (data, meta) or raises exception + """ + complete_endpoint = endpoint.format(*path_params) if path_params else endpoint + with self.PROMETHEUS_HISTOGRAM.time() as t: try: - if response.status_code != HTTPStatus.OK: - msg = f'Response [{response.status_code}] with text: "{str(response.text)}" returned.' - logger.debug({'msg': msg}) - raise NotOkResponse(msg, status=response.status_code, text=response.text) + response = self.session.get( + self._urljoin(host, complete_endpoint if path_params else endpoint), + params=query_params, + timeout=HTTP_REQUEST_TIMEOUT, + ) + except Timeout as error: + msg = f'Timeout error from {complete_endpoint}.' + logger.debug({'msg': msg}) + t.labels( + endpoint=endpoint, + code=0, + domain=urlparse(host).netloc, + ) + raise TimeoutError(msg) from error + + response_fail_msg = f'Response from {complete_endpoint} [{response.status_code}] with text: "{str(response.text)}" returned.' + + if response.status_code != HTTPStatus.OK: + logger.debug({'msg': response_fail_msg}) + raise NotOkResponse(response_fail_msg, status=response.status_code, text=response.text) + try: json_response = response.json() except JSONDecodeError as error: - msg = f'Response [{response.status_code}] with text: "{str(response.text)}" returned.' - logger.debug({'msg': msg}) - raise error from error + logger.debug({'msg': response_fail_msg}) + raise error finally: t.labels( endpoint=endpoint, code=response.status_code, - domain=urlparse(self.host).netloc, + domain=urlparse(host).netloc, ) if 'data' in json_response: @@ -85,4 +149,5 @@ def _get( else: data = json_response meta = {} + return data, meta diff --git a/src/providers/keys/client.py b/src/providers/keys/client.py index 40fbfe1ca..508acd6fa 100644 --- a/src/providers/keys/client.py +++ b/src/providers/keys/client.py @@ -7,6 +7,7 @@ from src.providers.keys.typings import LidoKey, KeysApiStatus from src.typings import BlockStamp from src.utils.dataclass import list_of_dataclasses +from src import variables class KeysOutdatedException(Exception): @@ -14,37 +15,41 @@ class KeysOutdatedException(Exception): class KeysAPIClient(HTTPProvider): - - RETRY_COUNT = 5 - REQUEST_TIMEOUT = 10 - SLEEP_SECONDS = 12 - + """ + Lido Keys are stored in different modules in on-chain and off-chain format. + Keys API service fetches all lido keys and provide them in convenient format. + Keys could not be deleted, so the amount of them always increasing. + One thing to check before use data from Keys API service is that latest fetched block in meta field is greater + than the block we are fetching on. + + Keys API specification can be found here https://keys-api.lido.fi/api/static/index.html + """ PROMETHEUS_HISTOGRAM = KEYS_API_REQUESTS_DURATION - ALL_KEYS = 'v1/keys' + USED_KEYS = 'v1/keys?used=true' STATUS = 'v1/status' def _get_with_blockstamp(self, url: str, blockstamp: BlockStamp, params: Optional[dict] = None) -> dict | list: """ Returns response if blockstamp < blockNumber from response """ - for i in range(self.RETRY_COUNT): + for i in range(variables.HTTP_REQUEST_RETRY_COUNT): data, meta = self._get(url, query_params=params) blocknumber_meta = meta['meta']['elBlockSnapshot']['blockNumber'] KEYS_API_LATEST_BLOCKNUMBER.set(blocknumber_meta) if blocknumber_meta >= blockstamp.block_number: return data - if i != self.RETRY_COUNT - 1: - sleep(self.SLEEP_SECONDS) + if i != variables.HTTP_REQUEST_RETRY_COUNT - 1: + sleep(variables.HTTP_REQUEST_SLEEP_BEFORE_RETRY_IN_SECONDS) - raise KeysOutdatedException(f'Keys API Service stuck, no updates for {self.SLEEP_SECONDS * self.RETRY_COUNT} seconds.') + raise KeysOutdatedException(f'Keys API Service stuck, no updates for {variables.HTTP_REQUEST_SLEEP_BEFORE_RETRY_IN_SECONDS * variables.HTTP_REQUEST_RETRY_COUNT} seconds.') @lru_cache(maxsize=1) @list_of_dataclasses(LidoKey.from_response) - def get_all_lido_keys(self, blockstamp: BlockStamp) -> list[dict]: - """Docs: https://keys-api.lido.fi/api/static/index.html#/sr-module-keys/SRModulesKeysController_getGroupedByModuleKeys""" - return cast(list[dict], self._get_with_blockstamp(self.ALL_KEYS, blockstamp)) + def get_used_lido_keys(self, blockstamp: BlockStamp) -> list[dict]: + """Docs: https://keys-api.lido.fi/api/static/index.html#/keys/KeysController_get""" + return cast(list[dict], self._get_with_blockstamp(self.USED_KEYS, blockstamp)) def get_status(self) -> KeysApiStatus: """Docs: https://keys-api.lido.fi/api/static/index.html#/status/StatusController_get""" diff --git a/src/services/bunker.py b/src/services/bunker.py index 4da34c3b1..f19d01066 100644 --- a/src/services/bunker.py +++ b/src/services/bunker.py @@ -1,6 +1,12 @@ import logging from src.constants import TOTAL_BASIS_POINTS, GWEI_TO_WEI +from src.metrics.prometheus.validators import ( + ALL_VALIDATORS, + LIDO_VALIDATORS, + ALL_SLASHED_VALIDATORS, + LIDO_SLASHED_VALIDATORS, +) from src.metrics.prometheus.duration_meter import duration_meter from src.services.bunker_cases.abnormal_cl_rebase import AbnormalClRebase from src.services.bunker_cases.midterm_slashing_penalty import MidtermSlashingPenalty @@ -8,6 +14,7 @@ from src.modules.accounting.typings import LidoReportRebase from src.modules.submodules.consensus import FrameConfig, ChainConfig from src.services.bunker_cases.typings import BunkerConfig +from src.services.safe_border import filter_slashed_validators from src.typings import BlockStamp, ReferenceBlockStamp, Gwei from src.web3py.typings import Web3 @@ -17,6 +24,15 @@ class BunkerService: """ + The "bunker mode" would be triggered by one of three cases: + - Negative CL rebase + - High midterm slashing penalty + - Abnormal CL rebase + + Its purpose is to maintain socialization of all problems in Lido validators pool and to prevent sophisticated attacks. + To achieve this, "bunker mode" limits an operations in Lido protocol (withdrawal requests finalization) + + For more info about bunker mode see: https://research.lido.fi/t/withdrawals-for-lido-on-ethereum-bunker-mode-design-and-implementation/ """ def __init__(self, w3: Web3): @@ -30,11 +46,19 @@ def is_bunker_mode( chain_config: ChainConfig, simulated_cl_rebase: LidoReportRebase, ) -> bool: + """If any of cases is True, then bunker mode is ON""" bunker_config = self._get_config(blockstamp) all_validators = self.w3.cc.get_validators(blockstamp) lido_validators = self.w3.lido_validators.get_lido_validators(blockstamp) + # Set metrics + ALL_VALIDATORS.set(len(all_validators)) + LIDO_VALIDATORS.set(len(lido_validators)) + ALL_SLASHED_VALIDATORS.set(len(filter_slashed_validators(all_validators))) + LIDO_SLASHED_VALIDATORS.set(len(filter_slashed_validators(lido_validators))) + last_report_ref_slot = self.w3.lido_contracts.get_accounting_last_processing_ref_slot(blockstamp) + # If it is the very first run, we don't check bunker mode if not last_report_ref_slot: logger.info({"msg": "No one report yet. Bunker status will not be checked"}) return False @@ -64,7 +88,7 @@ def is_bunker_mode( def get_cl_rebase_for_current_report(self, blockstamp: BlockStamp, simulated_cl_rebase: LidoReportRebase) -> Gwei: """ - Get CL rebase from Accounting contract + Get simulated Cl rebase and subtract total supply before report """ logger.info({"msg": "Getting CL rebase for frame"}) before_report_total_pooled_ether = self._get_total_supply(blockstamp) @@ -78,9 +102,7 @@ def _get_total_supply(self, blockstamp: BlockStamp) -> Gwei: return self.w3.lido_contracts.lido.functions.totalSupply().call(block_identifier=blockstamp.block_hash) def _get_config(self, blockstamp: BlockStamp) -> BunkerConfig: - """ - Get config values from OracleDaemonConfig contract - """ + """Get config values from OracleDaemonConfig contract""" config = self.w3.lido_contracts.oracle_daemon_config return BunkerConfig( Web3.to_int( diff --git a/src/services/bunker_cases/abnormal_cl_rebase.py b/src/services/bunker_cases/abnormal_cl_rebase.py index 281276bbb..6a58db035 100644 --- a/src/services/bunker_cases/abnormal_cl_rebase.py +++ b/src/services/bunker_cases/abnormal_cl_rebase.py @@ -1,12 +1,11 @@ import logging import math -from statistics import mean from typing import Sequence from web3.types import EventData -from src.constants import MAX_EFFECTIVE_BALANCE +from src.constants import MAX_EFFECTIVE_BALANCE, EFFECTIVE_BALANCE_INCREMENT from src.modules.submodules.typings import ChainConfig from src.providers.consensus.typings import Validator from src.providers.keys.typings import LidoKey @@ -39,23 +38,34 @@ def is_abnormal_cl_rebase( lido_validators: list[LidoValidator], current_report_cl_rebase: Gwei ) -> bool: + """ + First of all, we should calculate the normal CL rebase for this report + If diff between current CL rebase and normal CL rebase more than `normalized_cl_reward_mistake_rate`, + then we should check the intraframe sampled CL rebase: we consider two points (nearest and distant slots) + in frame and calculate CL rebase for each of them and if one of them is negative, then it is abnormal CL rebase. + + `normalized_cl_reward_mistake_rate`, `rebase_check_nearest_epoch_distance` and `rebase_check_distant_epoch_distance` + are configurable parameters, which can change behavior of this check. + Like, don't check intraframe sampled CL rebase and look only on normal CL rebase + """ self.all_validators = all_validators self.lido_validators = lido_validators - self.lido_keys = self.w3.kac.get_all_lido_keys(blockstamp) + self.lido_keys = self.w3.kac.get_used_lido_keys(blockstamp) logger.info({"msg": "Checking abnormal CL rebase"}) normal_report_cl_rebase = self._calculate_lido_normal_cl_rebase(blockstamp) + diff_current_with_normal = 1 - current_report_cl_rebase / normal_report_cl_rebase - if normal_report_cl_rebase > current_report_cl_rebase: + if diff_current_with_normal > self.b_conf.normalized_cl_reward_mistake_rate: logger.info({"msg": "CL rebase in frame is abnormal"}) - no_need_specific_cl_rebase_check = ( + no_need_intraframe_sampled_cl_rebase_check = ( self.b_conf.rebase_check_nearest_epoch_distance == 0 and self.b_conf.rebase_check_distant_epoch_distance == 0 ) - if no_need_specific_cl_rebase_check: - logger.info({"msg": "Specific CL rebase calculation are disabled. Cl rebase is abnormal"}) + if no_need_intraframe_sampled_cl_rebase_check: + logger.info({"msg": "Intraframe sampled CL rebase calculation are disabled. Cl rebase is abnormal"}) return True if self._is_negative_specific_cl_rebase(blockstamp): @@ -77,15 +87,18 @@ def _calculate_lido_normal_cl_rebase(self, blockstamp: ReferenceBlockStamp) -> G self.lido_keys, last_report_all_validators ) - mean_all_effective_balance = AbnormalClRebase.get_mean_effective_balance_sum( + mean_sum_of_all_effective_balance = AbnormalClRebase.get_mean_sum_of_effective_balance( last_report_blockstamp, blockstamp, last_report_all_validators, self.all_validators ) - mean_lido_effective_balance = AbnormalClRebase.get_mean_effective_balance_sum( + mean_sum_of_lido_effective_balance = AbnormalClRebase.get_mean_sum_of_effective_balance( last_report_blockstamp, blockstamp, last_report_lido_validators, self.lido_validators ) normal_cl_rebase = AbnormalClRebase.calculate_normal_cl_rebase( - self.b_conf, mean_all_effective_balance, mean_lido_effective_balance, epochs_passed_since_last_report + self.b_conf, + mean_sum_of_all_effective_balance, + mean_sum_of_lido_effective_balance, + epochs_passed_since_last_report ) logger.info({"msg": f"Normal CL rebase: {normal_cl_rebase} Gwei"}) @@ -101,19 +114,19 @@ def _is_negative_specific_cl_rebase(self, blockstamp: ReferenceBlockStamp) -> bo if nearest_blockstamp.block_number == distant_blockstamp.block_number: logger.info( - {"msg": "Nearest and distant blocks are the same. Specific CL rebase will be calculated once"} + {"msg": "Nearest and distant blocks are the same. Intraframe sampled CL rebase will be calculated once"} ) specific_cl_rebase = self._calculate_cl_rebase_between_blocks(nearest_blockstamp, blockstamp) - logger.info({"msg": f"Specific CL rebase: {specific_cl_rebase} Gwei"}) + logger.info({"msg": f"Intraframe sampled CL rebase: {specific_cl_rebase} Gwei"}) return specific_cl_rebase < 0 nearest_cl_rebase = self._calculate_cl_rebase_between_blocks(nearest_blockstamp, blockstamp) - logger.info({"msg": f"Nearest specific CL rebase {nearest_cl_rebase} Gwei"}) + logger.info({"msg": f"Nearest intraframe sampled CL rebase {nearest_cl_rebase} Gwei"}) if nearest_cl_rebase < 0: return True distant_cl_rebase = self._calculate_cl_rebase_between_blocks(distant_blockstamp, blockstamp) - logger.info({"msg": f"Distant specific CL rebase {distant_cl_rebase} Gwei"}) + logger.info({"msg": f"Distant intraframe sampled CL rebase {distant_cl_rebase} Gwei"}) if distant_cl_rebase < 0: return True @@ -153,8 +166,12 @@ def _calculate_cl_rebase_between_blocks( To account for all changes in validators' balances, we must account withdrawn events from WithdrawalVault contract. Check for these events is enough to account for all withdrawals since the protocol assumes that - the vault can only be withdrawn at the time of the Oracle report + the vault can only be withdrawn at the time of the Oracle report between reference slots. """ + if prev_blockstamp.block_number == ref_blockstamp.block_number: + # Can't calculate rebase between the same block + return Gwei(0) + prev_lido_validators = LidoValidatorsProvider.merge_validators_with_keys( self.lido_keys, self.w3.cc.get_validators_no_cache(prev_blockstamp), @@ -168,14 +185,15 @@ def _calculate_cl_rebase_between_blocks( prev_blockstamp, prev_lido_validators ) - # Raw CL rebase are calculated as difference between reference and previous Lido validators' balances + # Raw CL rebase is calculated as difference between reference and previous Lido validators' balances + # Without accounting withdrawals from WithdrawalVault raw_cl_rebase = ref_lido_balance_with_vault - prev_lido_balance_with_vault - # We should account validators who have been activated between blocks - # And withdrawals from WithdrawalVault + # We should account validators who have been appeared between blocks validators_count_diff_in_gwei = AbnormalClRebase.calculate_validators_count_diff_in_gwei( prev_lido_validators, self.lido_validators ) + # And withdrawals from WithdrawalVault withdrawn_from_vault = self._get_withdrawn_from_vault_between_blocks(prev_blockstamp, ref_blockstamp) # Finally, we can calculate corrected CL rebase @@ -206,6 +224,7 @@ def _get_withdrawn_from_vault_between_blocks( Lookup for ETHDistributed event and expect no one or only one event, from which we'll get withdrawalsWithdrawn value """ + logger.info( {"msg": f"Get withdrawn from vault between {prev_blockstamp.block_number,ref_blockstamp.block_number} blocks"} ) @@ -252,7 +271,7 @@ def calculate_validators_count_diff_in_gwei( ref_validators: Sequence[Validator], ) -> Gwei: """ - Handle 32 ETH balances of freshly baked validators, who was activated between epochs + Handle 32 ETH balances of freshly baked validators, who was appeared between epochs Lido validators are counted by public keys that the protocol deposited with 32 ETH, so we can safely count the differences in the number of validators when they occur by deposit size. Any predeposits to Lido keys will not be counted until the key is deposited through the protocol @@ -264,7 +283,7 @@ def calculate_validators_count_diff_in_gwei( return Gwei(validators_diff * MAX_EFFECTIVE_BALANCE) @staticmethod - def get_mean_effective_balance_sum( + def get_mean_sum_of_effective_balance( last_report_blockstamp: ReferenceBlockStamp, ref_blockstamp: ReferenceBlockStamp, last_report_validators: Sequence[Validator], @@ -279,7 +298,7 @@ def get_mean_effective_balance_sum( ref_effective_balance_sum = calculate_active_effective_balance_sum( ref_validators, ref_blockstamp.ref_epoch ) - return Gwei(int(mean((ref_effective_balance_sum, last_report_effective_balance_sum)))) + return Gwei((ref_effective_balance_sum + last_report_effective_balance_sum) // 2) @staticmethod def validate_slot_distance(distant_slot: SlotNumber, nearest_slot: SlotNumber, ref_slot: SlotNumber): @@ -296,8 +315,8 @@ def calculate_validators_balance_sum(validators: Sequence[Validator]) -> Gwei: @staticmethod def calculate_normal_cl_rebase( bunker_config: BunkerConfig, - mean_all_effective_balance_sum: Gwei, - mean_lido_effective_balance_sum: Gwei, + mean_sum_of_all_effective_balance: Gwei, + mean_sum_of_lido_effective_balance: Gwei, epochs_passed: int, ) -> Gwei: """ @@ -311,12 +330,14 @@ def calculate_normal_cl_rebase( active validators between current oracle report epoch and last one - Randomness within measurement algorithm * Not absolutely ideal performance of Lido Validators and network as a whole - Randomness of real world If the difference between observed real CL rewards and its theoretical value (normal_cl_rebase) couldn't be explained by - those 4 factors that means there is an additional factor leading to lower rewards - incidents within Protocol. + those 4 factors that means there is an additional factor leading to lower rewards - incidents within Lido or BeaconChain. To formalize “high enough” difference we’re suggesting `normalized_cl_reward_per_epoch` constant represent ethereum specification and equals to `BASE_REWARD_FACTOR` constant """ + # It should be at least 1 ETH to avoid division by zero + mean_sum_of_all_effective_balance = max(Gwei(EFFECTIVE_BALANCE_INCREMENT), mean_sum_of_all_effective_balance) normal_cl_rebase = int( - (bunker_config.normalized_cl_reward_per_epoch * mean_lido_effective_balance_sum * epochs_passed) - / math.sqrt(mean_all_effective_balance_sum) * (1 - bunker_config.normalized_cl_reward_mistake_rate) + (bunker_config.normalized_cl_reward_per_epoch * mean_sum_of_lido_effective_balance * epochs_passed) + / math.sqrt(mean_sum_of_all_effective_balance) ) return Gwei(normal_cl_rebase) diff --git a/src/services/bunker_cases/midterm_slashing_penalty.py b/src/services/bunker_cases/midterm_slashing_penalty.py index d64d8ea6b..69b75980e 100644 --- a/src/services/bunker_cases/midterm_slashing_penalty.py +++ b/src/services/bunker_cases/midterm_slashing_penalty.py @@ -10,7 +10,7 @@ from src.modules.submodules.typings import FrameConfig, ChainConfig from src.providers.consensus.typings import Validator from src.typings import EpochNumber, Gwei, ReferenceBlockStamp, FrameNumber, SlotNumber -from src.utils.validator_state import calculate_active_effective_balance_sum +from src.utils.validator_state import calculate_total_active_effective_balance from src.web3py.extensions.lido_validators import LidoValidator @@ -29,6 +29,14 @@ def is_high_midterm_slashing_penalty( current_report_cl_rebase: Gwei, last_report_ref_slot: SlotNumber ) -> bool: + """ + Check if there is a high midterm slashing penalty in the future frames. + + If current report CL rebase contains more than one frame, we should calculate the CL rebase for only one frame + and compare max midterm penalty with calculated for onel frame CL rebase + because we assume that reports in the future can be "per-frame" as normal reports. + So we need to understand can we avoid negative CL rebase because of slashings in the future or not + """ logger.info({"msg": "Detecting high midterm slashing penalty"}) all_slashed_validators = MidtermSlashingPenalty.get_slashed_validators_with_impact_on_midterm_penalties( all_validators, blockstamp.ref_epoch @@ -47,7 +55,7 @@ def is_high_midterm_slashing_penalty( # We should calculate total balance for each midterm penalty epoch and # make projection based on the current state of the chain - total_balance = calculate_active_effective_balance_sum(all_validators, blockstamp.ref_epoch) + total_balance = calculate_total_active_effective_balance(all_validators, blockstamp.ref_epoch) # Calculate sum of Lido midterm penalties in each future frame frames_lido_midterm_penalties = MidtermSlashingPenalty.get_future_midterm_penalty_sum_in_frames( @@ -66,6 +74,24 @@ def is_high_midterm_slashing_penalty( return False + @staticmethod + def get_slashed_validators_with_impact_on_midterm_penalties( + validators: list[Validator], + ref_epoch: EpochNumber + ) -> list[Validator | LidoValidator]: + """ + Get slashed validators which have impact on midterm penalties + We can detect such slashings by this condition: + `ref_epoch - EPOCHS_PER_SLASHINGS_VECTOR > possible_slashed_epoch > ref_epoch` + But if we look at: + https://github.com/ethereum/consensus-specs/blob/dev/specs/phase0/beacon-chain.md#slash_validator + it can be simplified to the condition above for our purposes + """ + def is_have_impact(v: Validator) -> bool: + return v.validator.slashed and int(v.validator.withdrawable_epoch) > ref_epoch + + return list(filter(is_have_impact, validators)) + @staticmethod def get_possible_slashed_epochs(validator: Validator, ref_epoch: EpochNumber) -> list[EpochNumber]: """ @@ -162,7 +188,7 @@ def get_validator_midterm_penalty( Calculate midterm penalty for particular validator https://github.com/ethereum/consensus-specs/blob/dev/specs/phase0/beacon-chain.md#slashings """ - # We don't know which balance was at slashing epoch, so we make an optimistic assumption that it was 32 ETH + # We don't know which balance was at slashing epoch, so we make a pessimistic assumption that it was 32 ETH slashings = Gwei(bound_slashed_validators_count * MAX_EFFECTIVE_BALANCE) adjusted_total_slashing_balance = min( slashings * PROPORTIONAL_SLASHING_MULTIPLIER_BELLATRIX, total_balance @@ -191,24 +217,6 @@ def is_bound(v: Validator) -> bool: return list(filter(is_bound, slashed_validators)) - @staticmethod - def get_slashed_validators_with_impact_on_midterm_penalties( - validators: list[Validator], - ref_epoch: EpochNumber - ) -> list[Validator | LidoValidator]: - """ - Get slashed validators which have impact on midterm penalties - We can detect such slashings by this condition: - `ref_epoch - EPOCHS_PER_SLASHINGS_VECTOR > possible_slashed_epoch > ref_epoch` - But if we look at: - https://github.com/ethereum/consensus-specs/blob/dev/specs/phase0/beacon-chain.md#slash_validator - it can be simplified to the condition above for our purposes - """ - def is_have_impact(v: Validator) -> bool: - return v.validator.slashed and int(v.validator.withdrawable_epoch) > ref_epoch - - return list(filter(is_have_impact, validators)) - @staticmethod def get_frame_cl_rebase_from_report_cl_rebase( frame_config: FrameConfig, diff --git a/src/services/prediction.py b/src/services/prediction.py index bad04cafa..9f89c16a4 100644 --- a/src/services/prediction.py +++ b/src/services/prediction.py @@ -16,15 +16,11 @@ def __init__(self, w3: Web3): self.w3 = w3 def get_rewards_per_epoch( - self, - blockstamp: ReferenceBlockStamp, - chain_configs: ChainConfig, + self, + blockstamp: ReferenceBlockStamp, + chain_configs: ChainConfig, ) -> Wei: - prediction_duration_in_slots = Web3.to_int( - self.w3.lido_contracts.oracle_daemon_config.functions.get('PREDICTION_DURATION_IN_SLOTS').call( - block_identifier=blockstamp.block_hash, - ) - ) + prediction_duration_in_slots = self._get_prediction_duration_in_slots(blockstamp) logger.info({'msg': 'Fetch prediction frame in slots.', 'value': prediction_duration_in_slots}) token_rebase_events = get_events_in_past( @@ -72,7 +68,16 @@ def _group_events_by_transaction_hash(event_type_1: list[EventData], event_type_ }) break - if len(event_type_1) != len(event_type_2) != len(result_event_data): - raise ValueError('Events are inconsistent.') + if len(event_type_1) == len(event_type_2) == len(result_event_data): + return result_event_data + + raise ValueError( + f"Events are inconsistent: {len(event_type_1)=}, {len(event_type_2)=}, {len(result_event_data)=}" + ) - return result_event_data + def _get_prediction_duration_in_slots(self, blockstamp: ReferenceBlockStamp) -> int: + return Web3.to_int( + self.w3.lido_contracts.oracle_daemon_config.functions.get('PREDICTION_DURATION_IN_SLOTS').call( + block_identifier=blockstamp.block_hash, + ) + ) diff --git a/src/services/safe_border.py b/src/services/safe_border.py index 8514a0aef..2f0a97ece 100644 --- a/src/services/safe_border.py +++ b/src/services/safe_border.py @@ -7,6 +7,7 @@ from src.metrics.prometheus.duration_meter import duration_meter from src.modules.submodules.consensus import ChainConfig, FrameConfig from src.modules.accounting.typings import OracleReportLimits +from src.utils.web3converter import Web3Converter from src.utils.abi import named_tuple_to_dataclass from src.typings import EpochNumber, FrameNumber, ReferenceBlockStamp, SlotNumber from src.web3py.extensions.lido_validators import Validator @@ -18,18 +19,36 @@ class WrongExitPeriod(Exception): pass -class SafeBorder: +class SafeBorder(Web3Converter): + """ + Safe border service calculates the range in which withdrawal requests can't be finalized. + + In Turbo mode, there is only one border that does not allow to finalize requests created close to the reference + slot to which the oracle report is performed. + + In Bunker mode there are more safe borders. The protocol takes into account the impact of negative factors + that occurred in a certain period and finalizes requests on which the negative effects have already been socialized. + + There are 3 types of the border: + 1. Default border + 2. Negative rebase border + 3. Associated slashing border + """ + chain_config: ChainConfig frame_config: FrameConfig blockstamp: ReferenceBlockStamp + converter: Web3Converter def __init__( self, w3: Web3, blockstamp: ReferenceBlockStamp, chain_config: ChainConfig, - frame_config: FrameConfig, + frame_config: FrameConfig ) -> None: + super().__init__(chain_config, frame_config) + self.w3 = w3 self.lido_contracts = w3.lido_contracts @@ -37,6 +56,8 @@ def __init__( self.chain_config = chain_config self.frame_config = frame_config + self.converter = Web3Converter(chain_config, frame_config) + self._retrieve_constants() @duration_meter() @@ -56,9 +77,17 @@ def get_safe_border_epoch( ) def _get_default_requests_border_epoch(self) -> EpochNumber: + """ + The default border is a few epochs before report reference epoch. + """ return EpochNumber(self.blockstamp.ref_epoch - self.finalization_default_shift) def _get_negative_rebase_border_epoch(self) -> EpochNumber: + """ + Bunker mode can be enabled by a negative rebase in case of mass validator penalties. + In this case the border is considered the reference slot of the previous successful oracle report before the + moment the Bunker mode was activated - default border + """ bunker_start_or_last_successful_report_epoch = self._get_bunker_start_or_last_successful_report_epoch() latest_allowable_epoch = bunker_start_or_last_successful_report_epoch - self.finalization_default_shift @@ -79,6 +108,14 @@ def _get_bunker_start_or_last_successful_report_epoch(self) -> EpochNumber: return EpochNumber(self.frame_config.initial_epoch) def _get_associated_slashings_border_epoch(self) -> EpochNumber: + """ + The border represents the latest epoch before associated slashings started. + + It is calculated as the earliest slashed_epoch among all incompleted slashings at + the point of reference_epoch rounded to the start of the previous oracle report frame - default border. + + See detailed research here: https://hackmd.io/@lido/r1Qkkiv3j + """ earliest_slashed_epoch = self._get_earliest_slashed_epoch_among_incomplete_slashings() if earliest_slashed_epoch: @@ -155,14 +192,14 @@ def _find_earliest_slashed_epoch_rounded_to_frame(self, validators: list[Validat start_frame = self.get_frame_by_epoch(start_epoch) end_frame = self.get_frame_by_epoch(end_epoch) - validators_set = set(validators) + slashed_pubkeys = set(v.validator.pubkey for v in validators) # Since the border will be rounded to the frame, we are iterating over the frames # to avoid unnecessary queries while start_frame < end_frame: mid_frame = FrameNumber((end_frame + start_frame) // 2) - if self._slashings_in_frame(mid_frame, validators_set): + if self._slashings_in_frame(mid_frame, slashed_pubkeys): end_frame = mid_frame else: start_frame = FrameNumber(mid_frame + 1) @@ -171,20 +208,18 @@ def _find_earliest_slashed_epoch_rounded_to_frame(self, validators: list[Validat epoch_number = self.get_epoch_by_slot(slot_number) return epoch_number - def _slashings_in_frame(self, frame: FrameNumber, validators: set[Validator]) -> bool: + def _slashings_in_frame(self, frame: FrameNumber, slashed_pubkeys: set[str]) -> bool: """ Returns number of slashed validators for the frame for the given validators Slashed flag can't be undone, so we can only look at the last slot """ last_slot_in_frame = self.get_frame_last_slot(frame) - last_slot_in_frame_blockstamp = get_blockstamp( - self.w3.cc, - last_slot_in_frame, - self.blockstamp.ref_slot, - ) + last_slot_in_frame_blockstamp = self._get_blockstamp(last_slot_in_frame) lido_validators = self.w3.lido_validators.get_lido_validators(last_slot_in_frame_blockstamp) - slashed_validators = filter_slashed_validators(list(filter(lambda x: x in validators, lido_validators))) + slashed_validators = filter_slashed_validators( + list(filter(lambda x: x.validator.pubkey in slashed_pubkeys, lido_validators)) + ) return len(slashed_validators) > 0 @@ -222,66 +257,56 @@ def _get_last_finalized_withdrawal_request_slot(self) -> SlotNumber: return self.get_epoch_first_slot(self.get_epoch_by_timestamp(last_finalized_request_data.timestamp)) - def _get_bunker_start_timestamp(self) -> int: - # If bunker mode is off returns max(uint256) - return self.w3.lido_contracts.withdrawal_queue_nft.functions.bunkerModeSinceTimestamp().call( - block_identifier=self.blockstamp.block_hash) + def _get_blockstamp(self, last_slot_in_frame: SlotNumber): + return get_blockstamp(self.w3.cc, last_slot_in_frame, self.blockstamp.ref_slot) - def _get_last_finalized_request_id(self) -> int: - return self.w3.lido_contracts.withdrawal_queue_nft.functions.getLastFinalizedRequestId().call( - block_identifier=self.blockstamp.block_hash) + def _retrieve_constants(self): + limits_list = self._fetch_oracle_report_limits_list() + self.finalization_default_shift = math.ceil( + limits_list.request_timestamp_margin / ( + self.chain_config.slots_per_epoch * self.chain_config.seconds_per_slot) + ) - def _get_withdrawal_request_status(self, request_id: int) -> Any: - return self.w3.lido_contracts.withdrawal_queue_nft.functions.getWithdrawalRequestStatus(request_id).call( - block_identifier=self.blockstamp.block_hash) + self.finalization_max_negative_rebase_shift = self._fetch_finalization_max_negative_rebase_epoch_shift() - def _retrieve_constants(self): - limits_list = named_tuple_to_dataclass( + def _fetch_oracle_report_limits_list(self): + return named_tuple_to_dataclass( self.w3.lido_contracts.oracle_report_sanity_checker.functions.getOracleReportLimits().call( block_identifier=self.blockstamp.block_hash ), OracleReportLimits ) - self.finalization_default_shift = math.ceil( - limits_list.request_timestamp_margin / (self.chain_config.slots_per_epoch * self.chain_config.seconds_per_slot) - ) - self.finalization_max_negative_rebase_shift = self.w3.to_int( + def _fetch_finalization_max_negative_rebase_epoch_shift(self): + return self.w3.to_int( primitive=self.w3.lido_contracts.oracle_daemon_config.functions.get( 'FINALIZATION_MAX_NEGATIVE_REBASE_EPOCH_SHIFT', ).call(block_identifier=self.blockstamp.block_hash) ) - def get_epoch_first_slot(self, epoch: EpochNumber) -> SlotNumber: - return SlotNumber(epoch * self.chain_config.slots_per_epoch) - - def get_frame_last_slot(self, frame: FrameNumber) -> SlotNumber: - return SlotNumber(self.get_frame_first_slot(FrameNumber(frame + 1)) - 1) - - def get_frame_first_slot(self, frame: FrameNumber) -> SlotNumber: - return SlotNumber(frame * self.frame_config.epochs_per_frame * self.chain_config.slots_per_epoch) - - def get_epoch_by_slot(self, ref_slot: SlotNumber) -> EpochNumber: - return EpochNumber(ref_slot // self.chain_config.slots_per_epoch) + def _get_bunker_start_timestamp(self) -> int: + # If bunker mode is off returns max(uint256) + return self.w3.lido_contracts.withdrawal_queue_nft.functions.bunkerModeSinceTimestamp().call( + block_identifier=self.blockstamp.block_hash + ) - def get_epoch_by_timestamp(self, timestamp: int) -> EpochNumber: - return EpochNumber(self.get_slot_by_timestamp(timestamp) // self.chain_config.slots_per_epoch) + def _get_last_finalized_request_id(self) -> int: + return self.w3.lido_contracts.withdrawal_queue_nft.functions.getLastFinalizedRequestId().call( + block_identifier=self.blockstamp.block_hash + ) - def get_slot_by_timestamp(self, timestamp: int) -> SlotNumber: - return SlotNumber((timestamp - self.chain_config.genesis_time) // self.chain_config.seconds_per_slot) + def _get_withdrawal_request_status(self, request_id: int) -> Any: + return self.w3.lido_contracts.withdrawal_queue_nft.functions.getWithdrawalRequestStatus(request_id).call( + block_identifier=self.blockstamp.block_hash + ) def round_slot_by_frame(self, slot: SlotNumber) -> SlotNumber: rounded_epoch = self.round_epoch_by_frame(self.get_epoch_by_slot(slot)) return self.get_epoch_first_slot(rounded_epoch) def round_epoch_by_frame(self, epoch: EpochNumber) -> EpochNumber: - return EpochNumber(self.get_frame_by_epoch(epoch) * self.frame_config.epochs_per_frame + self.frame_config.initial_epoch) - - def get_frame_by_slot(self, slot: SlotNumber) -> FrameNumber: - return self.get_frame_by_epoch(self.get_epoch_by_slot(slot)) - - def get_frame_by_epoch(self, epoch: EpochNumber) -> FrameNumber: - return FrameNumber((epoch - self.frame_config.initial_epoch) // self.frame_config.epochs_per_frame) + return EpochNumber( + self.get_frame_by_epoch(epoch) * self.frame_config.epochs_per_frame + self.frame_config.initial_epoch) def filter_slashed_validators(validators: Sequence[Validator]) -> list[Validator]: diff --git a/src/services/validator_state.py b/src/services/validator_state.py index df445a30b..647188dde 100644 --- a/src/services/validator_state.py +++ b/src/services/validator_state.py @@ -6,6 +6,11 @@ from eth_typing import HexStr from src.constants import FAR_FUTURE_EPOCH, SHARD_COMMITTEE_PERIOD +from src.metrics.prometheus.accounting import ( + ACCOUNTING_STUCK_VALIDATORS, + ACCOUNTING_EXITED_VALIDATORS, + ACCOUNTING_DELAYED_VALIDATORS +) from src.modules.accounting.extra_data import ExtraDataService, ExtraData from src.modules.accounting.typings import OracleReportLimits from src.modules.submodules.typings import ChainConfig @@ -88,9 +93,11 @@ def sum_stuck_validators(total: int, validator: LidoValidator) -> int: node_operators = self.w3.lido_validators.get_lido_node_operators(blockstamp) for operator in node_operators: + global_index = (operator.staking_module.id, operator.id) + ACCOUNTING_STUCK_VALIDATORS.labels(*global_index).set(result[global_index]) # If amount of exited validators weren't changed skip report for operator - if result[(operator.staking_module.id, operator.id)] == operator.stuck_validators_count: - del result[(operator.staking_module.id, operator.id)] + if result[global_index] == operator.stuck_validators_count: + del result[global_index] return result @@ -138,9 +145,11 @@ def get_lido_newly_exited_validators(self, blockstamp: ReferenceBlockStamp) -> d node_operators = self.w3.lido_validators.get_lido_node_operators(blockstamp) for operator in node_operators: + global_index = (operator.staking_module.id, operator.id) + ACCOUNTING_EXITED_VALIDATORS.labels(*global_index).set(lido_validators[global_index]) # If amount of exited validators weren't changed skip report for operator - if lido_validators[(operator.staking_module.id, operator.id)] == operator.total_exited_validators: - del lido_validators[(operator.staking_module.id, operator.id)] + if lido_validators[global_index] == operator.total_exited_validators: + del lido_validators[global_index] logger.info({'msg': 'Fetch new lido exited validators by node operator.', 'value': lido_validators}) return lido_validators @@ -196,13 +205,13 @@ def get_recently_requested_but_not_exited_validators( validators_recently_requested_to_exit = [] - for global_no_index, validators in lido_validators_by_operator.items(): + for global_index, validators in lido_validators_by_operator.items(): def validator_requested_to_exit(validator: LidoValidator) -> bool: - return int(validator.index) <= ejected_indexes[global_no_index] + return int(validator.index) <= ejected_indexes[global_index] def validator_recently_requested_to_exit(validator: LidoValidator) -> bool: - return int(validator.index) in recent_indexes[global_no_index] + return int(validator.index) in recent_indexes[global_index] def validator_eligible_to_exit(validator: LidoValidator) -> bool: delayed_timeout_in_epoch = self.get_validator_delayed_timeout_in_slot(blockstamp) // chain_config.slots_per_epoch @@ -223,9 +232,22 @@ def is_validator_recently_requested_but_not_exited(validator: LidoValidator) -> return False + def is_validator_delayed(validator: LidoValidator) -> bool: + if ( + validator_requested_to_exit(validator) and + not is_on_exit(validator) and + not validator_recently_requested_to_exit(validator) + ): + return True + + return False + validators_recently_requested_to_exit.extend( list(filter(is_validator_recently_requested_but_not_exited, validators)) ) + delayed_validators_count = len(list(filter(is_validator_delayed, validators))) + + ACCOUNTING_DELAYED_VALIDATORS.labels(*global_index).set(delayed_validators_count) return validators_recently_requested_to_exit diff --git a/src/services/withdrawal.py b/src/services/withdrawal.py index 125ba8608..34c2ebe77 100644 --- a/src/services/withdrawal.py +++ b/src/services/withdrawal.py @@ -1,5 +1,6 @@ from web3.types import Wei +from src.metrics.prometheus.business import CONTRACT_ON_PAUSE from src.variables import FINALIZATION_BATCH_MAX_REQUEST_COUNT from src.utils.abi import named_tuple_to_dataclass from src.web3py.typings import Web3 @@ -10,6 +11,12 @@ class Withdrawal: + """ + Service calculates which withdrawal requests should be finalized using next factors: + + 1. Safe border epoch for the current reference slot. + 2. The amount of available ETH is determined from the Withdrawal Vault, EL Vault, and buffered ETH. + """ def __init__( self, w3: Web3, @@ -31,7 +38,9 @@ def get_finalization_batches( withdrawal_vault_balance: Wei, el_rewards_vault_balance: Wei ) -> list[int]: - if self._is_requests_finalization_paused(): + on_pause = self._is_requests_finalization_paused() + CONTRACT_ON_PAUSE.set(on_pause) + if on_pause: return [] if not self._has_unfinalized_requests(): diff --git a/src/typings.py b/src/typings.py index caee1e636..38ce331c1 100644 --- a/src/typings.py +++ b/src/typings.py @@ -9,6 +9,7 @@ class OracleModule(StrEnum): ACCOUNTING = 'accounting' EJECTOR = 'ejector' + CHECK = 'check' EpochNumber = NewType('EpochNumber', int) diff --git a/src/utils/cache.py b/src/utils/cache.py new file mode 100644 index 000000000..13703fba0 --- /dev/null +++ b/src/utils/cache.py @@ -0,0 +1,5 @@ + +def clear_object_lru_cache(obj: object): + wrappers = [a for a in dir(obj) if hasattr(getattr(obj, a), 'cache_clear')] + for wrapper in wrappers: + getattr(obj, wrapper).cache_clear() diff --git a/src/utils/dataclass.py b/src/utils/dataclass.py index 9aaa3cc7b..dd61d853e 100644 --- a/src/utils/dataclass.py +++ b/src/utils/dataclass.py @@ -36,8 +36,10 @@ def __get_dataclass_factory(field_type): return field_type.from_response return field_type + T = TypeVar('T') + @dataclass class FromResponse: """ diff --git a/src/utils/slot.py b/src/utils/slot.py index 5a2ded42b..adfdb48ac 100644 --- a/src/utils/slot.py +++ b/src/utils/slot.py @@ -76,7 +76,7 @@ def get_first_non_missed_slot( except NotOkResponse as error: if error.status != HTTPStatus.NOT_FOUND: # Not expected status - raise exception - raise error from error + raise error ref_slot_is_missed = True diff --git a/src/utils/validator_state.py b/src/utils/validator_state.py index c981f5c5a..c34bb0c9b 100644 --- a/src/utils/validator_state.py +++ b/src/utils/validator_state.py @@ -5,6 +5,7 @@ ETH1_ADDRESS_WITHDRAWAL_PREFIX, SHARD_COMMITTEE_PERIOD, FAR_FUTURE_EPOCH, + EFFECTIVE_BALANCE_INCREMENT, ) from src.providers.consensus.typings import Validator from src.typings import EpochNumber, Gwei @@ -76,15 +77,24 @@ def is_validator_eligible_to_exit(validator: Validator, epoch: EpochNumber) -> b return active_long_enough and not is_on_exit(validator) -def calculate_active_effective_balance_sum(validators: Sequence[Validator], ref_epoch: EpochNumber) -> Gwei: +def calculate_total_active_effective_balance(all_validators: Sequence[Validator], ref_epoch: EpochNumber) -> Gwei: """ Return the combined effective balance of the active validators. + Note: returns ``EFFECTIVE_BALANCE_INCREMENT`` Gwei minimum to avoid divisions by zero. https://github.com/ethereum/consensus-specs/blob/dev/specs/phase0/beacon-chain.md#get_total_active_balance """ - total_effective_balance = 0 + total_effective_balance = calculate_active_effective_balance_sum(all_validators, ref_epoch) + return Gwei(max(EFFECTIVE_BALANCE_INCREMENT, total_effective_balance)) + + +def calculate_active_effective_balance_sum(validators: Sequence[Validator], ref_epoch: EpochNumber) -> Gwei: + """ + Return the combined effective balance of the active validators from the given list + """ + effective_balance_sum = 0 - for v in validators: - if is_active_validator(v, ref_epoch): - total_effective_balance += int(v.validator.effective_balance) + for validator in validators: + if is_active_validator(validator, ref_epoch): + effective_balance_sum += int(validator.validator.effective_balance) - return Gwei(total_effective_balance) + return Gwei(effective_balance_sum) diff --git a/src/utils/web3converter.py b/src/utils/web3converter.py new file mode 100644 index 000000000..043df356a --- /dev/null +++ b/src/utils/web3converter.py @@ -0,0 +1,42 @@ +from src.typings import SlotNumber, EpochNumber, FrameNumber +from src.modules.submodules.typings import ChainConfig, FrameConfig + + +class Web3Converter: + """ + The Web3Converter class contains methods for converting between slot, epoch, and frame numbers using chain and + frame settings passed as arguments when the class instance is created. + + Frame is the distance between two oracle reports. + """ + + chain_config: ChainConfig + frame_config: FrameConfig + + def __init__(self, chain_config: ChainConfig, frame_config: FrameConfig): + self.chain_config = chain_config + self.frame_config = frame_config + + def get_epoch_first_slot(self, epoch: EpochNumber) -> SlotNumber: + return SlotNumber(epoch * self.chain_config.slots_per_epoch) + + def get_frame_last_slot(self, frame: FrameNumber) -> SlotNumber: + return SlotNumber(self.get_frame_first_slot(FrameNumber(frame + 1)) - 1) + + def get_frame_first_slot(self, frame: FrameNumber) -> SlotNumber: + return SlotNumber(frame * self.frame_config.epochs_per_frame * self.chain_config.slots_per_epoch) + + def get_epoch_by_slot(self, ref_slot: SlotNumber) -> EpochNumber: + return EpochNumber(ref_slot // self.chain_config.slots_per_epoch) + + def get_epoch_by_timestamp(self, timestamp: int) -> EpochNumber: + return EpochNumber(self.get_slot_by_timestamp(timestamp) // self.chain_config.slots_per_epoch) + + def get_slot_by_timestamp(self, timestamp: int) -> SlotNumber: + return SlotNumber((timestamp - self.chain_config.genesis_time) // self.chain_config.seconds_per_slot) + + def get_frame_by_slot(self, slot: SlotNumber) -> FrameNumber: + return self.get_frame_by_epoch(self.get_epoch_by_slot(slot)) + + def get_frame_by_epoch(self, epoch: EpochNumber) -> FrameNumber: + return FrameNumber((epoch - self.frame_config.initial_epoch) // self.frame_config.epochs_per_frame) diff --git a/src/variables.py b/src/variables.py index 811285467..28fef2550 100644 --- a/src/variables.py +++ b/src/variables.py @@ -4,8 +4,8 @@ # - Providers- EXECUTION_CLIENT_URI = os.getenv('EXECUTION_CLIENT_URI', '').split(',') -CONSENSUS_CLIENT_URI = os.getenv('CONSENSUS_CLIENT_URI', '') -KEYS_API_URI = os.getenv('KEYS_API_URI', '') +CONSENSUS_CLIENT_URI = os.getenv('CONSENSUS_CLIENT_URI', '').split(',') +KEYS_API_URI = os.getenv('KEYS_API_URI', '').split(',') # - Account - ACCOUNT = None @@ -16,7 +16,20 @@ # - App specific - LIDO_LOCATOR_ADDRESS = os.getenv('LIDO_LOCATOR_ADDRESS') FINALIZATION_BATCH_MAX_REQUEST_COUNT = os.getenv('FINALIZATION_BATCH_MAX_REQUEST_COUNT', 1000) -ALLOW_NEGATIVE_REBASE_REPORTING = os.getenv('ALLOW_NEGATIVE_REBASE_REPORTING', 'False').lower() == 'true' +ALLOW_REPORTING_IN_BUNKER_MODE = os.getenv('ALLOW_REPORTING_IN_BUNKER_MODE', 'False').lower() == 'true' +# We add some gas to the transaction to be sure that we have enough gas to execute corner cases +# eg when we tried to submit a few reports in a single block +# In this case the second report will force report finalization and will consume more gas +TX_GAS_ADDITION = int(os.getenv('TX_GAS_ADDITION', 100_000)) + + +# Default delay for default Oracle members. Member with submit data role should submit data first. +# If contract is reportable each member in order will submit data with difference with this amount of slots +SUBMIT_DATA_DELAY_IN_SLOTS = int(os.getenv('SUBMIT_DATA_DELAY_IN_SLOTS', 6)) +CYCLE_SLEEP_IN_SECONDS = int(os.getenv('CYCLE_SLEEP_IN_SECONDS', 12)) +HTTP_REQUEST_RETRY_COUNT = int(os.getenv('HTTP_REQUEST_RETRY_COUNT', 5)) +HTTP_REQUEST_SLEEP_BEFORE_RETRY_IN_SECONDS = int(os.getenv('HTTP_REQUEST_SLEEP_BEFORE_RETRY_IN_SECONDS', 5)) +HTTP_REQUEST_TIMEOUT = int(os.getenv('HTTP_REQUEST_TIMEOUT', 5 * 60)) # - Metrics - PROMETHEUS_PORT = int(os.getenv('PROMETHEUS_PORT', 9000)) @@ -25,3 +38,26 @@ HEALTHCHECK_SERVER_PORT = int(os.getenv('HEALTHCHECK_SERVER_PORT', 9010)) MAX_CYCLE_LIFETIME_IN_SECONDS = int(os.getenv("MAX_CYCLE_LIFETIME_IN_SECONDS", 3000)) + + +def check_all_required_variables(): + errors = check_uri_required_variables() + if LIDO_LOCATOR_ADDRESS in (None, ''): + errors.append('LIDO_LOCATOR_ADDRESS') + return errors + + +def check_uri_required_variables(): + errors = [] + if '' in EXECUTION_CLIENT_URI: + errors.append('EXECUTION_CLIENT_URI') + if '' in CONSENSUS_CLIENT_URI: + errors.append('CONSENSUS_CLIENT_URI') + if '' in KEYS_API_URI: + errors.append('KEYS_API_URI') + return errors + + +def raise_from_errors(errors): + if errors: + raise ValueError("The following variables are required: " + ", ".join(errors)) diff --git a/src/web3py/extensions/consensus.py b/src/web3py/extensions/consensus.py index d273dc016..d53ea2aa8 100644 --- a/src/web3py/extensions/consensus.py +++ b/src/web3py/extensions/consensus.py @@ -5,8 +5,8 @@ class ConsensusClientModule(ConsensusClient, Module): - def __init__(self, host: str, w3: Web3): + def __init__(self, hosts: list[str], w3: Web3): self.w3 = w3 - super(ConsensusClient, self).__init__(host) + super(ConsensusClient, self).__init__(hosts) super(Module, self).__init__() diff --git a/src/web3py/extensions/contracts.py b/src/web3py/extensions/contracts.py index b8b3aca1f..fd310cca0 100644 --- a/src/web3py/extensions/contracts.py +++ b/src/web3py/extensions/contracts.py @@ -1,14 +1,16 @@ import json import logging from functools import lru_cache +from time import sleep from web3 import Web3 from web3.contract import Contract +from web3.exceptions import BadFunctionCallOutput from web3.module import Module from web3.types import Wei from src import variables -from src.metrics.prometheus.business import FRAME_LAST_REPORT_REF_SLOT +from src.metrics.prometheus.business import FRAME_PREV_REPORT_REF_SLOT from src.typings import BlockStamp, SlotNumber logger = logging.getLogger() @@ -36,8 +38,27 @@ def __setattr__(self, key, value): logger.info({'msg': f'Contract {key} has been changed to {value.address}'}) super().__setattr__(key, value) - def reload_contracts(self): + def has_contract_address_changed(self) -> bool: + addresses = [contract.address for contract in self.__dict__.values() if isinstance(contract, Contract)] self._load_contracts() + new_addresses = [contract.address for contract in self.__dict__.values() if isinstance(contract, Contract)] + return addresses != new_addresses + + def _check_contracts(self): + """This is startup check that checks that contract are deployed and has valid implementation""" + try: + self.accounting_oracle.functions.getContractVersion().call() + self.validators_exit_bus_oracle.functions.getContractVersion().call() + except BadFunctionCallOutput: + logger.info({ + 'msg': 'getContractVersion method from accounting_oracle and validators_exit_bus_oracle ' + 'doesn\'t return any data. Probably addresses from Lido Locator refer to the wrong ' + 'implementation or contracts don\'t exist. Sleep for 1 minute.' + }) + sleep(60) + self._load_contracts() + else: + return def _load_contracts(self): # Contract that stores all lido contract addresses @@ -95,11 +116,14 @@ def _load_contracts(self): decode_tuples=True, ) + self._check_contracts() + @staticmethod def load_abi(abi_name: str, abi_path: str = './assets/'): with open(f'{abi_path}{abi_name}.json') as f: return json.load(f) + # --- Contract methods --- @lru_cache(maxsize=1) def get_withdrawal_balance(self, blockstamp: BlockStamp) -> Wei: return self.get_withdrawal_balance_no_cache(blockstamp) @@ -124,5 +148,14 @@ def get_el_vault_balance(self, blockstamp: BlockStamp) -> Wei: @lru_cache(maxsize=1) def get_accounting_last_processing_ref_slot(self, blockstamp: BlockStamp) -> SlotNumber: result = self.accounting_oracle.functions.getLastProcessingRefSlot().call(block_identifier=blockstamp.block_hash) - FRAME_LAST_REPORT_REF_SLOT.set(result) + logger.info({'msg': f'Accounting last processing ref slot {result}'}) + FRAME_PREV_REPORT_REF_SLOT.set(result) + return result + + def get_ejector_last_processing_ref_slot(self, blockstamp: BlockStamp) -> SlotNumber: + result = self.validators_exit_bus_oracle.functions.getLastProcessingRefSlot().call( + block_identifier=blockstamp.block_hash + ) + logger.info({'msg': f'Ejector last processing ref slot {result}'}) + FRAME_PREV_REPORT_REF_SLOT.set(result) return result diff --git a/src/web3py/extensions/keys_api.py b/src/web3py/extensions/keys_api.py index e10ddcc46..1cc22ba63 100644 --- a/src/web3py/extensions/keys_api.py +++ b/src/web3py/extensions/keys_api.py @@ -5,8 +5,8 @@ class KeysAPIClientModule(KeysAPIClient, Module): - def __init__(self, host: str, w3: Web3): + def __init__(self, hosts: list[str], w3: Web3): self.w3 = w3 - super(KeysAPIClient, self).__init__(host) + super(KeysAPIClient, self).__init__(hosts) super(Module, self).__init__() diff --git a/src/web3py/extensions/lido_validators.py b/src/web3py/extensions/lido_validators.py index e08d326d8..0a8aeaea9 100644 --- a/src/web3py/extensions/lido_validators.py +++ b/src/web3py/extensions/lido_validators.py @@ -16,7 +16,7 @@ if TYPE_CHECKING: - from src.web3py.typings import Web3 + from src.web3py.typings import Web3 # pragma: no cover StakingModuleId = NewType('StakingModuleId', int) @@ -96,6 +96,10 @@ class LidoValidator(Validator): lido_id: LidoKey +class CountOfKeysDiffersException(Exception): + pass + + ValidatorsByNodeOperator = dict[NodeOperatorGlobalIndex, list[LidoValidator]] @@ -104,9 +108,20 @@ class LidoValidatorsProvider(Module): @lru_cache(maxsize=1) def get_lido_validators(self, blockstamp: BlockStamp) -> list[LidoValidator]: - lido_keys = self.w3.kac.get_all_lido_keys(blockstamp) + lido_keys = self.w3.kac.get_used_lido_keys(blockstamp) validators = self.w3.cc.get_validators(blockstamp) + no_operators = self.get_lido_node_operators(blockstamp) + + # Make sure that used keys fetched from Keys API >= total amount of total deposited validators from Staking Router + total_deposited_validators = 0 + for deposited_validators in no_operators: + total_deposited_validators += deposited_validators.total_deposited_validators + + if len(lido_keys) < total_deposited_validators: + raise CountOfKeysDiffersException(f'Keys API Service returned lesser keys ({len(lido_keys)}) ' + f'than amount of deposited validators ({total_deposited_validators}) returned from Staking Router') + return self.merge_validators_with_keys(lido_keys, validators) @staticmethod diff --git a/src/web3py/extensions/tx_utils.py b/src/web3py/extensions/tx_utils.py index 29515e403..3797ea1f1 100644 --- a/src/web3py/extensions/tx_utils.py +++ b/src/web3py/extensions/tx_utils.py @@ -5,16 +5,15 @@ from web3.contract.contract import ContractFunction from web3.exceptions import ContractLogicError from web3.module import Module -from web3.types import TxReceipt, Wei +from web3.types import TxReceipt, Wei, TxParams, BlockData +from src import variables from src.metrics.prometheus.basic import TRANSACTIONS_COUNT, Status, ACCOUNT_BALANCE logger = logging.getLogger(__name__) class TransactionUtils(Module): - GAS_MULTIPLIER = 1.15 - def check_and_send_transaction(self, transaction, account: Optional[LocalAccount] = None) -> Optional[TxReceipt]: if not account: logger.info({'msg': 'No account provided to submit extra data. Dry mode'}) @@ -22,13 +21,15 @@ def check_and_send_transaction(self, transaction, account: Optional[LocalAccount ACCOUNT_BALANCE.labels(str(account.address)).set(self.w3.eth.get_balance(account.address)) - if self.check_transaction(transaction, account.address): - return self.sign_and_send_transaction(transaction, account) + params = self.get_transaction_params(transaction, account) + + if self.check_transaction(transaction, params): + return self.sign_and_send_transaction(transaction, params, account) return None @staticmethod - def check_transaction(transaction, from_address: str) -> bool: + def check_transaction(transaction, params: Optional[TxParams]) -> bool: """ Returns: True - transaction succeed. @@ -37,7 +38,7 @@ def check_transaction(transaction, from_address: str) -> bool: logger.info({"msg": "Check transaction. Make static call.", "value": transaction.args}) try: - result = transaction.call({"from": from_address}) + result = transaction.call(params) except ContractLogicError as error: logger.warning({"msg": "Transaction reverted.", "error": str(error)}) return False @@ -45,29 +46,40 @@ def check_transaction(transaction, from_address: str) -> bool: logger.info({"msg": "Transaction executed successfully.", "value": result}) return True + def get_transaction_params(self, transaction: ContractFunction, account: Optional[LocalAccount] = None): + if not account: + logger.info({"msg": "No account provided. Dry mode."}) + return None + + # get pending block doesn't work on erigon node in specific cases + latest_block: BlockData = self.w3.eth.get_block("latest") + + params: Optional[TxParams] = { + "from": account.address, + "gas": min( + latest_block["gasLimit"], + int(transaction.estimate_gas({'from': account.address}) + variables.TX_GAS_ADDITION) + ), + "maxFeePerGas": Wei( + latest_block["baseFeePerGas"] * 2 + self.w3.eth.max_priority_fee + ), + "maxPriorityFeePerGas": self.w3.eth.max_priority_fee, + "nonce": self.w3.eth.get_transaction_count(account.address), + } + + return params + def sign_and_send_transaction( self, transaction: ContractFunction, + params: Optional[TxParams], account: Optional[LocalAccount] = None, ) -> Optional[TxReceipt]: if not account: logger.info({"msg": "No account provided. Dry mode."}) return None - pending_block = self.w3.eth.get_block("pending") - - tx = transaction.build_transaction( - { - "from": account.address, - "gas": int(transaction.estimate_gas({'from': account.address}) * self.GAS_MULTIPLIER), - "maxFeePerGas": Wei( - pending_block["baseFeePerGas"] * 2 + self.w3.eth.max_priority_fee - ), - "maxPriorityFeePerGas": self.w3.eth.max_priority_fee, - "nonce": self.w3.eth.get_transaction_count(account.address), - } - ) - + tx = transaction.build_transaction(params) signed_tx = self.w3.eth.account.sign_transaction(tx, account.key) tx_hash = self.w3.eth.send_raw_transaction(signed_tx.rawTransaction) diff --git a/src/web3py/middleware.py b/src/web3py/middleware.py index 4f2f8e569..c0c0de5d1 100644 --- a/src/web3py/middleware.py +++ b/src/web3py/middleware.py @@ -67,7 +67,7 @@ def middleware(method: RPCEndpoint, params: Any) -> RPCResponse: code=failed.status_code, domain=domain, ) - raise + raise ex # https://www.jsonrpc.org/specification#error_object error = response.get("error") diff --git a/stubs/web3_multi_provider/__init__.pyi b/stubs/web3_multi_provider/__init__.pyi index 966e694eb..b3a1dca7f 100644 --- a/stubs/web3_multi_provider/__init__.pyi +++ b/stubs/web3_multi_provider/__init__.pyi @@ -1,4 +1,5 @@ from .multi_http_provider import MultiHTTPProvider as MultiHTTPProvider +from .multi_http_provider import FallbackProvider as FallbackProvider from .multi_http_provider import MultiProvider as MultiProvider from .multi_http_provider import NoActiveProviderError as NoActiveProviderError from .multi_http_provider import ProtocolNotSupported as ProtocolNotSupported diff --git a/stubs/web3_multi_provider/multi_http_provider.pyi b/stubs/web3_multi_provider/multi_http_provider.pyi index cbd2f6c6a..9fdddce81 100644 --- a/stubs/web3_multi_provider/multi_http_provider.pyi +++ b/stubs/web3_multi_provider/multi_http_provider.pyi @@ -30,3 +30,11 @@ class MultiHTTPProvider(MultiProvider): request_kwargs: Optional[Any] = ..., session: Optional[Any] = ..., ) -> None: ... + +class FallbackProvider(MultiProvider): + def __init__( + self, + endpoint_urls: List[Union[URI, str]], + request_kwargs: Optional[Any] = ..., + session: Optional[Any] = ..., + ) -> None: ... diff --git a/tests/conftest.py b/tests/conftest.py index dace5c24e..c62efe0de 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -1,4 +1,5 @@ from pathlib import Path +from unittest.mock import Mock import pytest from _pytest.fixtures import FixtureRequest @@ -110,6 +111,7 @@ def keys_api_client(request, responses_path, web3): @pytest.fixture() def contracts(web3, provider): src.variables.LIDO_LOCATOR_ADDRESS = "0x548C1ED5C83Bdf19e567F4cd7Dd9AC4097088589" + LidoContracts._check_contracts = Mock() # pylint: disable=protected-access with provider.use_mock(Path('common/contracts.json')): # First contracts deployment web3.attach_modules({ diff --git a/tests/factory/configs.py b/tests/factory/configs.py index 8bb78bef9..0241938ab 100644 --- a/tests/factory/configs.py +++ b/tests/factory/configs.py @@ -1,4 +1,6 @@ +from src.modules.accounting.typings import OracleReportLimits from src.modules.submodules.typings import ChainConfig, FrameConfig +from src.services.bunker_cases.typings import BunkerConfig from tests.factory.web3_factory import Web3Factory @@ -15,3 +17,21 @@ class FrameConfigFactory(Web3Factory): initial_epoch = 0 epochs_per_frame = 10 + + +class OracleReportLimitsFactory(Web3Factory): + __model__ = OracleReportLimits + + churn_validators_per_day_limit = 0 + one_off_cl_balance_decrease_bp_limit = 0 + annual_balance_increase_bp_limit = 0 + simulated_share_rate_deviation_bp_limit = 0 + max_validator_exit_requests_per_report = 0 + max_accounting_extra_data_list_items_count = 0 + max_node_operators_per_extra_data_item_count = 0 + request_timestamp_margin = 0 + max_positive_token_rebase = 0 + + +class BunkerConfigFactory(Web3Factory): + __model__ = BunkerConfig diff --git a/tests/factory/no_registry.py b/tests/factory/no_registry.py index 22d70e4a4..f05a085db 100644 --- a/tests/factory/no_registry.py +++ b/tests/factory/no_registry.py @@ -4,7 +4,7 @@ from faker import Faker from pydantic_factories import Use -from src.providers.consensus.typings import Validator +from src.providers.consensus.typings import Validator, ValidatorState from src.providers.keys.typings import LidoKey from tests.factory.web3_factory import Web3Factory from src.web3py.extensions.lido_validators import StakingModule, LidoValidator, NodeOperator @@ -13,6 +13,10 @@ faker = Faker() +class ValidatorStateFactory(Web3Factory): + __model__ = ValidatorState + + class ValidatorFactory(Web3Factory): __model__ = Validator @@ -20,6 +24,8 @@ class ValidatorFactory(Web3Factory): class LidoKeyFactory(Web3Factory): __model__ = LidoKey + used: bool = True + @classmethod def generate_for_validators(cls, validators: list[Validator], **kwargs): return cls.butch_with('key', [v.validator.pubkey for v in validators], **kwargs) diff --git a/tests/modules/accounting/bunker/conftest.py b/tests/modules/accounting/bunker/conftest.py index 08318fdd4..8923a1365 100644 --- a/tests/modules/accounting/bunker/conftest.py +++ b/tests/modules/accounting/bunker/conftest.py @@ -25,7 +25,7 @@ def simple_key(pubkey: str) -> LidoKey: return key -def simple_validator(index, pubkey, balance, slashed=False, withdrawable_epoch='') -> Validator: +def simple_validator(index, pubkey, balance, slashed=False, withdrawable_epoch='', exit_epoch='100500') -> Validator: return Validator( index=str(index), balance=str(balance), @@ -37,7 +37,7 @@ def simple_validator(index, pubkey, balance, slashed=False, withdrawable_epoch=' slashed=slashed, activation_eligibility_epoch='', activation_epoch='0', - exit_epoch='100500', + exit_epoch=exit_epoch, withdrawable_epoch=withdrawable_epoch, ) ) @@ -54,16 +54,16 @@ def _get_accounting_last_processing_ref_slot(blockstamp: ReferenceBlockStamp): @pytest.fixture -def mock_get_all_lido_keys(abnormal_case): +def mock_get_used_lido_keys(abnormal_case): - def _get_all_lido_keys(blockstamp: ReferenceBlockStamp): + def _get_used_lido_keys(blockstamp: ReferenceBlockStamp): return [ simple_key('0x03'), simple_key('0x04'), simple_key('0x05'), ] - abnormal_case.w3.kac.get_all_lido_keys = Mock(side_effect=_get_all_lido_keys) + abnormal_case.w3.kac.get_used_lido_keys = Mock(side_effect=_get_used_lido_keys) @pytest.fixture @@ -75,6 +75,7 @@ def _get_blockstamp(_, ref_slot, last_finalized_slot_number): 10: simple_blockstamp(10), 20: simple_blockstamp(20), 30: simple_blockstamp(30), + 31: simple_blockstamp(31), 444424: simple_blockstamp(444420), 444434: simple_blockstamp(444431), 444444: simple_blockstamp(444444), @@ -87,6 +88,7 @@ def _get_reference_blockstamp(_, ref_slot, last_finalized_slot_number, ref_epoch 10: simple_ref_blockstamp(10), 20: simple_ref_blockstamp(20), 30: simple_ref_blockstamp(30), + 33: simple_ref_blockstamp(33), 444424: simple_ref_blockstamp(444420), 444434: simple_ref_blockstamp(444431), 444444: simple_ref_blockstamp(444444), @@ -112,6 +114,8 @@ def _get_eth_distributed_events(from_block: BlockNumber, to_block: BlockNumber): (21, 30): [{'args': {'withdrawalsWithdrawn': 7 * 10 ** 18}}, {'args': {'withdrawalsWithdrawn': 5 * 10 ** 18}}], (21, 40): [{'args': {'withdrawalsWithdrawn': 12 * 10 ** 18}}], (31, 40): [], + (32, 33): [], + (1, 33): [], } return events[(from_block, to_block)] @@ -140,6 +144,8 @@ def _get_withdrawal_vault_balance(blockstamp: BlockStamp): 10: 14 * 10 ** 18, 20: 14 * 10 ** 18, 30: 2 * 10 ** 18, + 31: 2 * 10 ** 18, + 33: 2 * 10 ** 18, 40: 2 * 10 ** 18, } return balance[blockstamp.block_number] @@ -183,6 +189,22 @@ def _get_validators(state: ReferenceBlockStamp, _=None): simple_validator(3, '0x03', 32 * 10 ** 9), simple_validator(4, '0x04', 32 * 10 ** 9), ], + 31: [ + simple_validator(0, '0x00', 32 * 10 ** 9), + simple_validator(1, '0x01', 32 * 10 ** 9), + simple_validator(2, '0x02', 32 * 10 ** 9), + simple_validator(3, '0x03', 32 * 10 ** 9), + simple_validator(4, '0x04', 32 * 10 ** 9), + simple_validator(5, '0x05', 32 * 10 ** 9), + ], + 33: [ + simple_validator(0, '0x00', 32 * 10 ** 9), + simple_validator(1, '0x01', 32 * 10 ** 9), + simple_validator(2, '0x02', 32 * 10 ** 9), + simple_validator(3, '0x03', 32 * 10 ** 9), + simple_validator(4, '0x04', 32 * 10 ** 9), + simple_validator(5, '0x05', 32 * 10 ** 9), + ], 40: [ simple_validator(0, '0x00', 32 * 10 ** 9), simple_validator(1, '0x01', 32 * 10 ** 9), @@ -199,7 +221,15 @@ def _get_validators(state: ReferenceBlockStamp, _=None): simple_validator(4, '0x04', 32 * 10 ** 9, slashed=True, withdrawable_epoch='10001'), simple_validator(5, '0x05', 32 * 10 ** 9, slashed=True, withdrawable_epoch='10001'), *[simple_validator(i, f'0x0{i}', 32 * 10 ** 9) for i in range(6, 200)], - ] + ], + 123: [ + simple_validator(0, '0x00', 32 * 10 ** 9, exit_epoch='1'), + simple_validator(1, '0x01', 32 * 10 ** 9, exit_epoch='1'), + simple_validator(2, '0x02', 32 * 10 ** 9, exit_epoch='1'), + simple_validator(3, '0x03', 32 * 10 ** 9, exit_epoch='1'), + simple_validator(4, '0x04', 32 * 10 ** 9, exit_epoch='1'), + simple_validator(5, '0x05', 32 * 10 ** 9, exit_epoch='1'), + ], } return validators[state.slot_number] diff --git a/tests/modules/accounting/bunker/test_bunker.py b/tests/modules/accounting/bunker/test_bunker.py index 73bc191da..7eed6dda3 100644 --- a/tests/modules/accounting/bunker/test_bunker.py +++ b/tests/modules/accounting/bunker/test_bunker.py @@ -1,17 +1,215 @@ +from typing import Iterable, Sequence +from unittest.mock import Mock + import pytest from src.modules.accounting.typings import LidoReportRebase +from src.services.bunker import BunkerService +from src.typings import ReferenceBlockStamp +from src.web3py.extensions.lido_validators import LidoValidator +from tests.factory.blockstamp import ReferenceBlockStampFactory +from tests.factory.configs import BunkerConfigFactory, ChainConfigFactory, FrameConfigFactory +from tests.factory.contract_responses import LidoReportRebaseFactory +from tests.factory.no_registry import LidoValidatorFactory from tests.modules.accounting.bunker.conftest import simple_ref_blockstamp +class TestIsBunkerMode: + @pytest.mark.unit + @pytest.mark.usefixtures( + "contracts", + "mock_get_config", + "mock_validators", + ) + def test_false_when_no_prev_report( + self, + bunker: BunkerService, + ref_blockstamp: ReferenceBlockStamp, + ) -> None: + bunker.w3.lido_contracts.get_accounting_last_processing_ref_slot = Mock( + return_value=None + ) + bunker.get_cl_rebase_for_current_report = Mock() + result = bunker.is_bunker_mode( + ref_blockstamp, + FrameConfigFactory.build(), + ChainConfigFactory.build(), + LidoReportRebaseFactory.build(), + ) + assert result is False + bunker.w3.lido_contracts.get_accounting_last_processing_ref_slot.assert_called_once() + bunker.get_cl_rebase_for_current_report.assert_not_called() + + @pytest.mark.unit + @pytest.mark.usefixtures( + "contracts", + "mock_get_config", + "mock_validators", + ) + def test_true_when_cl_rebase_is_negative( + self, + bunker: BunkerService, + ref_blockstamp: ReferenceBlockStamp, + is_high_midterm_slashing_penalty: Mock, + ) -> None: + bunker.w3.lido_contracts.get_accounting_last_processing_ref_slot = Mock( + return_value=ref_blockstamp + ) + bunker.get_cl_rebase_for_current_report = Mock(return_value=-1) + + result = bunker.is_bunker_mode( + ref_blockstamp, + FrameConfigFactory.build(), + ChainConfigFactory.build(), + LidoReportRebaseFactory.build(), + ) + assert result is True + + bunker.w3.lido_contracts.get_accounting_last_processing_ref_slot.assert_called_once() + bunker.get_cl_rebase_for_current_report.assert_called_once() + is_high_midterm_slashing_penalty.assert_not_called() + + @pytest.mark.unit + @pytest.mark.usefixtures( + "contracts", + "mock_get_config", + "mock_validators", + ) + def test_true_when_high_midterm_slashing_penalty( + self, + bunker: BunkerService, + ref_blockstamp: ReferenceBlockStamp, + is_high_midterm_slashing_penalty: Mock, + is_abnormal_cl_rebase: Mock, + ) -> None: + bunker.w3.lido_contracts.get_accounting_last_processing_ref_slot = Mock( + return_value=ref_blockstamp + ) + bunker.get_cl_rebase_for_current_report = Mock(return_value=0) + is_high_midterm_slashing_penalty.return_value = True + result = bunker.is_bunker_mode( + ref_blockstamp, + FrameConfigFactory.build(), + ChainConfigFactory.build(), + LidoReportRebaseFactory.build(), + ) + assert result is True + is_high_midterm_slashing_penalty.assert_called_once() + is_abnormal_cl_rebase.assert_not_called() + + @pytest.mark.unit + @pytest.mark.usefixtures( + "contracts", + "mock_get_config", + "mock_validators", + ) + def test_true_when_abnormal_cl_rebase( + self, + bunker: BunkerService, + ref_blockstamp: ReferenceBlockStamp, + is_high_midterm_slashing_penalty: Mock, + is_abnormal_cl_rebase: Mock, + ) -> None: + bunker.w3.lido_contracts.get_accounting_last_processing_ref_slot = Mock( + return_value=ref_blockstamp + ) + bunker.get_cl_rebase_for_current_report = Mock(return_value=0) + is_high_midterm_slashing_penalty.return_value = False + is_abnormal_cl_rebase.return_value = True + result = bunker.is_bunker_mode( + ref_blockstamp, + FrameConfigFactory.build(), + ChainConfigFactory.build(), + LidoReportRebaseFactory.build(), + ) + assert result is True + is_high_midterm_slashing_penalty.assert_called_once() + is_abnormal_cl_rebase.assert_called_once() + + @pytest.mark.unit + @pytest.mark.usefixtures( + "contracts", + "mock_get_config", + "mock_validators", + "mock_total_supply", + ) + def test_no_bunker_mode_by_default( + self, + bunker: BunkerService, + ref_blockstamp: ReferenceBlockStamp, + is_high_midterm_slashing_penalty: Mock, + is_abnormal_cl_rebase: Mock, + ) -> None: + bunker.w3.lido_contracts.get_accounting_last_processing_ref_slot = Mock( + return_value=ref_blockstamp + ) + bunker.get_cl_rebase_for_current_report = Mock(return_value=0) + is_high_midterm_slashing_penalty.return_value = False + is_abnormal_cl_rebase.return_value = False + result = bunker.is_bunker_mode( + ref_blockstamp, + FrameConfigFactory.build(), + ChainConfigFactory.build(), + LidoReportRebaseFactory.build(), + ) + assert result is False + is_high_midterm_slashing_penalty.assert_called_once() + is_abnormal_cl_rebase.assert_called_once() + + # === fixtures === # + + @pytest.fixture + def ref_blockstamp(self) -> ReferenceBlockStamp: + return ReferenceBlockStampFactory.build() + + @pytest.fixture + def mock_get_config(self, bunker: BunkerService) -> None: + bunker._get_config = Mock(return_value=BunkerConfigFactory.build()) + + @pytest.fixture + def mock_validators(self, bunker: BunkerService) -> Sequence[LidoValidator]: + validators = LidoValidatorFactory.batch(5) + bunker.w3.cc.get_validators = Mock(return_value=validators) + bunker.w3.lido_validators.get_lido_validators = Mock( + return_value=validators[:2] + ) + return validators + + @pytest.fixture + def mock_total_supply(self, bunker: BunkerService) -> None: + bunker._get_total_supply = Mock(return_value=15 * 10**18) + + @pytest.fixture + def is_high_midterm_slashing_penalty( + self, monkeypatch: pytest.MonkeyPatch + ) -> Iterable[Mock]: + mock = Mock() + with monkeypatch.context() as m: + m.setattr( + "src.services.bunker.MidtermSlashingPenalty.is_high_midterm_slashing_penalty", + mock, + ) + yield mock + + @pytest.fixture + def is_abnormal_cl_rebase(self, monkeypatch: pytest.MonkeyPatch) -> Iterable[Mock]: + mock = Mock() + with monkeypatch.context() as m: + m.setattr( + "src.services.bunker.AbnormalClRebase.is_abnormal_cl_rebase", + mock, + ) + yield mock + + @pytest.mark.unit @pytest.mark.parametrize( ("simulated_post_total_pooled_ether", "expected_rebase"), [ - (15 * 10 ** 18, 0), - (12 * 10 ** 18, -3 * 10 ** 9), - (18 * 10 ** 18, 3 * 10 ** 9), - ] + (15 * 10**18, 0), + (12 * 10**18, -3 * 10**9), + (18 * 10**18, 3 * 10**9), + ], ) def test_get_cl_rebase_for_frame( bunker, diff --git a/tests/modules/accounting/bunker/test_bunker_abnormal_cl_rebase.py b/tests/modules/accounting/bunker/test_bunker_abnormal_cl_rebase.py index eac49bc85..a47132781 100644 --- a/tests/modules/accounting/bunker/test_bunker_abnormal_cl_rebase.py +++ b/tests/modules/accounting/bunker/test_bunker_abnormal_cl_rebase.py @@ -35,23 +35,23 @@ def simple_validators( @pytest.mark.parametrize( ("blockstamp", "frame_cl_rebase", "nearest_epoch_distance", "far_epoch_distance", "expected_is_abnormal"), [ - (simple_ref_blockstamp(40), 378585832, 0, 0, False), # > normal cl rebase - (simple_ref_blockstamp(40), 378585831, 0, 0, False), # == normal cl rebase and no check specific rebase - (simple_ref_blockstamp(40), 378585830, 10, 20, False), # < normal cl rebase but specific rebase is positive - (simple_ref_blockstamp(40), 378585830, 10, 10, False), # < normal cl rebase but specific rebase is positive - (simple_ref_blockstamp(40), 378585830, 0, 0, True), # < normal cl rebase and no check specific rebase - (simple_ref_blockstamp(20), 126195276, 10, 20, True), # < normal cl rebase and specific rebase is negative - (simple_ref_blockstamp(20), 126195276, 10, 10, True), # < normal cl rebase and specific rebase is negative + (simple_ref_blockstamp(40), 378585832, 0, 0, False), # < mistake rate + (simple_ref_blockstamp(40), 378585831.6, 0, 0, False), # == mistake rate and no check specific rebase + (simple_ref_blockstamp(40), 378585830, 10, 20, False), # > mistake rate but specific rebase is positive + (simple_ref_blockstamp(40), 378585830, 10, 10, False), # > mistake rate but specific rebase is positive + (simple_ref_blockstamp(40), 378585830, 0, 0, True), # > mistake rate and no check specific rebase + (simple_ref_blockstamp(20), 126195276, 10, 20, True), # > mistake rate and specific rebase is negative + (simple_ref_blockstamp(20), 126195276, 10, 10, True), # > mistake rate and specific rebase is negative ], ) def test_is_abnormal_cl_rebase( blockstamp, abnormal_case, mock_get_accounting_last_processing_ref_slot, - mock_get_all_lido_keys, + mock_get_used_lido_keys, mock_get_eth_distributed_events, mock_get_withdrawal_vault_balance, - mock_get_blockstamp, + mock_get_blockstamp, frame_cl_rebase, nearest_epoch_distance, far_epoch_distance, @@ -74,23 +74,24 @@ def test_is_abnormal_cl_rebase( @pytest.mark.parametrize( ("blockstamp", "expected_rebase"), [ - (simple_ref_blockstamp(40), 378585831), - (simple_ref_blockstamp(20), 126195277), + (simple_ref_blockstamp(40), 420650924), + (simple_ref_blockstamp(20), 140216974), + (simple_ref_blockstamp(123), 1120376622), ] ) def test_calculate_lido_normal_cl_rebase( abnormal_case, - mock_get_all_lido_keys, + mock_get_used_lido_keys, mock_get_accounting_last_processing_ref_slot, mock_get_eth_distributed_events, mock_get_withdrawal_vault_balance, - mock_get_blockstamp, + mock_get_blockstamp, blockstamp, expected_rebase ): abnormal_case.all_validators = abnormal_case.w3.cc.get_validators(blockstamp) abnormal_case.lido_validators = abnormal_case.w3.cc.get_validators(blockstamp)[3:6] - abnormal_case.lido_keys = abnormal_case.w3.kac.get_all_lido_keys(blockstamp) + abnormal_case.lido_keys = abnormal_case.w3.kac.get_used_lido_keys(blockstamp) result = abnormal_case._calculate_lido_normal_cl_rebase(blockstamp) @@ -104,6 +105,7 @@ def test_calculate_lido_normal_cl_rebase( (simple_ref_blockstamp(40), 10, 20, False), (simple_ref_blockstamp(20), 10, 20, True), (simple_ref_blockstamp(20), 10, 10, True), + (simple_ref_blockstamp(33), 2, 33, True), ( simple_ref_blockstamp(20), 20, @@ -122,7 +124,7 @@ def test_is_negative_specific_cl_rebase( abnormal_case, mock_get_eth_distributed_events, mock_get_withdrawal_vault_balance, - mock_get_blockstamp, + mock_get_blockstamp, blockstamp, nearest_epoch_distance, far_epoch_distance, @@ -315,7 +317,7 @@ def test_get_validators_diff_in_gwei(prev_validators, curr_validators, expected_ ) def test_get_mean_effective_balance_sum(curr_validators, last_report_validators, expected_result): - result = AbnormalClRebase.get_mean_effective_balance_sum( + result = AbnormalClRebase.get_mean_sum_of_effective_balance( simple_ref_blockstamp(0), simple_ref_blockstamp(0), curr_validators, @@ -376,9 +378,9 @@ def test_calculate_real_balance(validators, expected_balance): ("epoch_passed", "mean_lido", "mean_total", "expected"), [ (0, 32 * 152261 * 10 ** 9, 32 * 517310 * 10 ** 9, 0), - (1, 32 * 152261 * 10 ** 9, 32 * 517310 * 10 ** 9, 2181276464), - (225, 32 * 152261 * 10 ** 9, 32 * 517310 * 10 ** 9, 490787204556), - (450, 32 * 152261 * 10 ** 9, 32 * 517310 * 10 ** 9, 981574409112) + (1, 32 * 152261 * 10 ** 9, 32 * 517310 * 10 ** 9, 2423640516), + (225, 32 * 152261 * 10 ** 9, 32 * 517310 * 10 ** 9, 545319116173), + (450, 32 * 152261 * 10 ** 9, 32 * 517310 * 10 ** 9, 1090638232347) ] ) def test_calculate_normal_cl_rebase(epoch_passed, mean_lido, mean_total, expected): diff --git a/tests/modules/accounting/bunker/test_bunker_medterm_penalty.py b/tests/modules/accounting/bunker/test_bunker_medterm_penalty.py index 9ef66fe14..9b4bcbbbe 100644 --- a/tests/modules/accounting/bunker/test_bunker_medterm_penalty.py +++ b/tests/modules/accounting/bunker/test_bunker_medterm_penalty.py @@ -14,13 +14,13 @@ def simple_blockstamp(block_number: int,) -> ReferenceBlockStamp: def simple_validators( - from_index: int, to_index: int, slashed=False, withdrawable_epoch="8192", exit_epoch="7892" + from_index: int, to_index: int, slashed=False, withdrawable_epoch="8192", exit_epoch="7892", effective_balance=str(32 * 10 ** 9) ) -> list[Validator]: validators = [] for index in range(from_index, to_index + 1): validator = Validator( index=str(index), - balance=str(32 * 10 ** 9), + balance=effective_balance, status=ValidatorStatus.ACTIVE_ONGOING, validator=ValidatorState( pubkey=f"0x{index}", diff --git a/tests/modules/accounting/test_accounting_module.py b/tests/modules/accounting/test_accounting_module.py index f50628096..458406e18 100644 --- a/tests/modules/accounting/test_accounting_module.py +++ b/tests/modules/accounting/test_accounting_module.py @@ -1,9 +1,10 @@ -from unittest.mock import Mock +from unittest.mock import Mock, patch import pytest from src.modules.accounting import accounting from src.modules.accounting.accounting import Accounting +from src.services.withdrawal import Withdrawal from tests.factory.blockstamp import ReferenceBlockStampFactory from tests.factory.configs import ChainConfigFactory, FrameConfigFactory from tests.factory.contract_responses import LidoReportRebaseFactory @@ -50,7 +51,6 @@ def test_get_consensus_lido_state(accounting_module, lido_validators): assert count == 10 assert balance == sum((int(val.balance) for val in validators)) - @pytest.mark.unit @pytest.mark.parametrize( ("post_total_pooled_ether", "post_total_shares", "expected_share_rate"), @@ -60,16 +60,25 @@ def test_get_consensus_lido_state(accounting_module, lido_validators): (18 * 10 ** 18, 14 * 10 ** 18, 1285714285714285714285714285), ] ) -def test_get_finalization_shares_rate(accounting_module, post_total_pooled_ether, post_total_shares, expected_share_rate): +def test_get_finalization_data(accounting_module, post_total_pooled_ether, post_total_shares, expected_share_rate): lido_rebase = LidoReportRebaseFactory.build( post_total_pooled_ether=post_total_pooled_ether, post_total_shares=post_total_shares, ) + accounting_module.get_chain_config = Mock(return_value=ChainConfigFactory.build()) + accounting_module.get_frame_config = Mock( + return_value=FrameConfigFactory.build(initial_epoch=2, epochs_per_frame=1) + ) accounting_module.simulate_full_rebase = Mock(return_value=lido_rebase) + accounting_module._is_bunker = Mock(return_value=False) bs = ReferenceBlockStampFactory.build() - share_rate = accounting_module._get_finalization_shares_rate(bs) + with patch.object(Withdrawal, '__init__', return_value=None), \ + patch.object(Withdrawal, 'get_finalization_batches', return_value=[]): + share_rate, batches = accounting_module._get_finalization_data(bs) + + assert batches == [] assert share_rate == expected_share_rate if post_total_pooled_ether > post_total_shares: @@ -77,7 +86,6 @@ def test_get_finalization_shares_rate(accounting_module, post_total_pooled_ether else: assert share_rate <= 10 ** 27 - @pytest.mark.unit def test_get_slots_elapsed_from_initialize(accounting_module, contracts): accounting_module.get_chain_config = Mock(return_value=ChainConfigFactory.build()) @@ -111,19 +119,16 @@ def bs(self): yield ReferenceBlockStampFactory.build() def test_env_toggle(self, accounting_module, monkeypatch, bs, caplog): - accounting_module.bunker_service._get_total_supply = Mock(return_value=100) - accounting_module.simulate_cl_rebase = Mock(return_value=LidoReportRebaseFactory.build(post_total_pooled_ether=90)) + accounting_module._is_bunker = Mock(return_value=True) with monkeypatch.context() as ctx: - ctx.setattr(accounting, 'ALLOW_NEGATIVE_REBASE_REPORTING', True) + ctx.setattr(accounting, 'ALLOW_REPORTING_IN_BUNKER_MODE', True) assert accounting_module.is_reporting_allowed(bs) - assert "CL rebase is negative" in caplog.text + assert "Bunker mode is active" in caplog.text - def test_no_negative_rebase(self, accounting_module, bs): - accounting_module.bunker_service._get_total_supply = Mock(return_value=90) - accounting_module.simulate_cl_rebase = Mock(return_value=LidoReportRebaseFactory.build(post_total_pooled_ether=100)) + def test_no_bunker_mode(self, accounting_module, bs): + accounting_module._is_bunker = Mock(return_value=False) assert accounting_module.is_reporting_allowed(bs) - def test_negative_rebase(self, accounting_module, bs): - accounting_module.bunker_service._get_total_supply = Mock(return_value=100) - accounting_module.simulate_cl_rebase = Mock(return_value=LidoReportRebaseFactory.build(post_total_pooled_ether=90)) + def test_bunker_mode_active(self, accounting_module, bs): + accounting_module._is_bunker = Mock(return_value=True) assert accounting_module.is_reporting_allowed(bs) is False diff --git a/tests/modules/accounting/test_extra_data.py b/tests/modules/accounting/test_extra_data.py index 00ec015f0..0d083e59a 100644 --- a/tests/modules/accounting/test_extra_data.py +++ b/tests/modules/accounting/test_extra_data.py @@ -49,6 +49,20 @@ def test_payload(self, extra_data_service): assert payload[0].node_operator_ids == b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01' assert payload[0].vals_counts == b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x02' + def test_collect_stuck_vals_in_cap(self, extra_data_service): + vals_stuck_non_zero = { + node_operator(1, 0): 1, + node_operator(1, 1): 1, + } + vals_exited_non_zero = { + node_operator(1, 0): 2, + } + extra_data = extra_data_service.collect(vals_stuck_non_zero, vals_exited_non_zero, 2, 2) + assert isinstance(extra_data, ExtraData) + assert extra_data.format == FormatList.EXTRA_DATA_FORMAT_LIST_NON_EMPTY.value + assert extra_data.extra_data == b'\x00\x00\x00\x00\x01\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x02\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01' + assert extra_data.data_hash == HexBytes(b'\xc7\x98\xd9\xa9\xe1A\xfe\x19\xc6"\xa0\xa0\xa3\x89N\xe3r\xfc\xff\xe6L\x08+K\x9doa\xabF\xc3\x0cs') + def test_order(self, extra_data_service, monkeypatch): vals_order = { node_operator(2, 0): 1, @@ -67,6 +81,8 @@ def test_order(self, extra_data_service, monkeypatch): assert payloads[0].node_operator_ids == b'\x00\x00\x00\x00\x00\x00\x00\x03' assert payloads[1].node_operator_ids == b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x02' + + def test_max_items_count(self, extra_data_service): """ nodeOpsCount must not be greater than maxAccountingExtraDataListItemsCount specified diff --git a/tests/modules/accounting/test_safe_border_integration.py b/tests/modules/accounting/test_safe_border_integration.py index f0321dd83..1f289cbb8 100644 --- a/tests/modules/accounting/test_safe_border_integration.py +++ b/tests/modules/accounting/test_safe_border_integration.py @@ -1,6 +1,14 @@ +from unittest.mock import MagicMock, patch + import pytest +from src.typings import ReferenceBlockStamp +from src.services.safe_border import SafeBorder from tests.factory.blockstamp import ReferenceBlockStampFactory +from tests.factory.configs import ChainConfigFactory, FrameConfigFactory, OracleReportLimitsFactory +from tests.factory.no_registry import LidoValidatorFactory, ValidatorStateFactory + +from src.constants import EPOCHS_PER_SLASHINGS_VECTOR, MIN_VALIDATOR_WITHDRAWABILITY_DELAY @pytest.fixture() @@ -8,27 +16,109 @@ def past_blockstamp(): yield ReferenceBlockStampFactory.build() +@pytest.fixture +def finalization_max_negative_rebase_epoch_shift(): + return 128 + + @pytest.fixture() -def subject(web3, contracts, keys_api_client, consensus_client): - # return SafeBorder(web3) - pass +def subject( + past_blockstamp, + web3, + contracts, + keys_api_client, + consensus_client, + lido_validators, + finalization_max_negative_rebase_epoch_shift +): + + with patch.object(SafeBorder, '_fetch_oracle_report_limits_list', return_value=OracleReportLimitsFactory.build(request_timestamp_margin=8 * 12 * 32)), \ + patch.object(SafeBorder, '_fetch_finalization_max_negative_rebase_epoch_shift', return_value=finalization_max_negative_rebase_epoch_shift): + safe_border = SafeBorder(web3, past_blockstamp, ChainConfigFactory.build(), FrameConfigFactory.build()) + + return safe_border + + +@pytest.mark.integration +def test_happy_path(subject, past_blockstamp: ReferenceBlockStamp): + is_bunker_mode = False + + assert subject.get_safe_border_epoch(is_bunker_mode) == past_blockstamp.ref_epoch - subject.finalization_default_shift + + +@pytest.mark.integration +def test_bunker_mode_negative_rebase(subject, past_blockstamp: ReferenceBlockStamp): + is_bunker_mode = True + + subject._get_bunker_start_or_last_successful_report_epoch = MagicMock( + return_value=past_blockstamp.ref_epoch + ) + subject._get_earliest_slashed_epoch_among_incomplete_slashings = MagicMock( + return_value=None + ) + + assert subject.get_safe_border_epoch(is_bunker_mode) == past_blockstamp.ref_epoch - subject.finalization_default_shift -@pytest.mark.skip(reason="waiting for testnet deployment") -def test_no_bunker_mode(subject, past_blockstamp): - pass +@pytest.mark.integration +def test_bunker_mode_associated_slashing_predicted( + subject: SafeBorder, + past_blockstamp: ReferenceBlockStamp, + finalization_max_negative_rebase_epoch_shift: int +): + is_bunker_mode = True + withdrawable_epoch = past_blockstamp.ref_epoch + 128 + exit_epoch = past_blockstamp.ref_epoch - MIN_VALIDATOR_WITHDRAWABILITY_DELAY + subject._get_bunker_start_or_last_successful_report_epoch = MagicMock( + return_value=past_blockstamp.ref_epoch - finalization_max_negative_rebase_epoch_shift - 1 + ) + subject.w3.lido_validators.get_lido_validators = MagicMock( + return_value=[LidoValidatorFactory.build( + validator=ValidatorStateFactory.build( + slashed=True, + withdrawable_epoch=withdrawable_epoch, + exit_epoch=exit_epoch + ) + )] + ) -@pytest.mark.skip(reason="waiting for testnet deployment") -def test_bunker_mode_associated_slashing(subject, past_blockstamp): - pass + assert subject.get_safe_border_epoch(is_bunker_mode) == ( + subject.round_epoch_by_frame(withdrawable_epoch - EPOCHS_PER_SLASHINGS_VECTOR) - subject.finalization_default_shift + ) -@pytest.mark.skip(reason="waiting for testnet deployment") -def test_bunker_mode_associated_slashing(subject, past_blockstamp): - pass +@pytest.mark.integration +def test_bunker_mode_associated_slashing_unpredicted( + subject: SafeBorder, + past_blockstamp: ReferenceBlockStamp, + finalization_max_negative_rebase_epoch_shift: int +): + is_bunker_mode = True + withdrawable_epoch = past_blockstamp.ref_epoch + 128 + exit_epoch = withdrawable_epoch - MIN_VALIDATOR_WITHDRAWABILITY_DELAY + activation_epoch = withdrawable_epoch - EPOCHS_PER_SLASHINGS_VECTOR - 2 + subject._get_blockstamp = MagicMock(return_value=past_blockstamp) + subject._get_bunker_start_or_last_successful_report_epoch = MagicMock( + return_value=past_blockstamp.ref_epoch - finalization_max_negative_rebase_epoch_shift - 1 + ) + subject._get_last_finalized_withdrawal_request_slot = MagicMock( + return_value=withdrawable_epoch - EPOCHS_PER_SLASHINGS_VECTOR - 2 + ) + subject.w3.lido_validators.get_lido_validators = MagicMock( + return_value=[ + LidoValidatorFactory.build( + validator=ValidatorStateFactory.build( + slashed=True, + withdrawable_epoch=withdrawable_epoch, + exit_epoch=exit_epoch, + activation_epoch=activation_epoch + ) + ) + ] + ) -@pytest.mark.skip(reason="waiting for testnet deployment") -def test_bunker_mode_negative_rebase(subject, past_blockstamp): - pass + assert subject.get_safe_border_epoch(is_bunker_mode) == ( + subject.round_epoch_by_frame(activation_epoch) - subject.finalization_default_shift + ) diff --git a/tests/modules/accounting/test_withdrawal_unit.py b/tests/modules/accounting/test_withdrawal_unit.py index 8ed149dad..d88946b07 100644 --- a/tests/modules/accounting/test_withdrawal_unit.py +++ b/tests/modules/accounting/test_withdrawal_unit.py @@ -45,6 +45,14 @@ def test_returns_empty_batch_if_there_is_no_requests(subject: Withdrawal): assert result == [] +@pytest.mark.unit +def test_returns_empty_batch_if_paused(subject: Withdrawal): + subject._is_requests_finalization_paused = Mock(return_value=True) + result = subject.get_finalization_batches(True, 100, 0, 0) + + assert result == [] + + @pytest.mark.unit def test_returns_batch_if_there_are_finalizable_requests(subject: Withdrawal): subject._has_unfinalized_requests = Mock(return_value=True) diff --git a/tests/modules/ejector/test_data_encode.py b/tests/modules/ejector/test_data_encode.py index 3a10077e5..7beadc1e0 100644 --- a/tests/modules/ejector/test_data_encode.py +++ b/tests/modules/ejector/test_data_encode.py @@ -220,6 +220,10 @@ def _max_num_fits_bytes(num_bytes: int) -> int: """ >>> _max_num_fits_bytes(1) 255 + >>> _max_num_fits_bytes(-255) + Traceback (most recent call last): + ... + ValueError: _max_num_fits_bytes: num_bytes must be positive """ if num_bytes < 0: raise ValueError("_max_num_fits_bytes: num_bytes must be positive") diff --git a/tests/modules/ejector/test_ejector.py b/tests/modules/ejector/test_ejector.py index ccf58d917..5f1cf7974 100644 --- a/tests/modules/ejector/test_ejector.py +++ b/tests/modules/ejector/test_ejector.py @@ -47,7 +47,6 @@ def ejector(web3: Web3, contracts: LidoContracts) -> Ejector: @pytest.mark.unit def test_ejector_execute_module(ejector: Ejector, blockstamp: BlockStamp) -> None: ejector.get_blockstamp_for_report = Mock(return_value=None) - ejector._is_paused = Mock(return_value=False) assert ( ejector.execute_module(last_finalized_blockstamp=blockstamp) is ModuleExecuteDelay.NEXT_FINALIZED_EPOCH @@ -56,7 +55,6 @@ def test_ejector_execute_module(ejector: Ejector, blockstamp: BlockStamp) -> Non ejector.get_blockstamp_for_report = Mock(return_value=blockstamp) ejector.process_report = Mock(return_value=None) - ejector._is_paused = Mock(return_value=False) assert ( ejector.execute_module(last_finalized_blockstamp=blockstamp) is ModuleExecuteDelay.NEXT_SLOT @@ -69,12 +67,13 @@ def test_ejector_execute_module(ejector: Ejector, blockstamp: BlockStamp) -> Non def test_ejector_execute_module_on_pause( ejector: Ejector, blockstamp: BlockStamp ) -> None: - ejector.get_blockstamp_for_report = Mock(return_value=None) + ejector.get_blockstamp_for_report = Mock(return_value=blockstamp) + ejector.build_report = Mock(return_value=(1, 294271, 0, 1, b'')) ejector._is_paused = Mock(return_value=True) assert ( ejector.execute_module(last_finalized_blockstamp=blockstamp) - is ModuleExecuteDelay.NEXT_FINALIZED_EPOCH - ), "execute_module should wait for the next finalized epoch" + is ModuleExecuteDelay.NEXT_SLOT + ), "execute_module should wait for the next slot" @pytest.mark.unit diff --git a/tests/modules/ejector/test_prediction.py b/tests/modules/ejector/test_prediction.py index 278ee8d2a..f90befe92 100644 --- a/tests/modules/ejector/test_prediction.py +++ b/tests/modules/ejector/test_prediction.py @@ -1,12 +1,13 @@ -import pytest -from unittest.mock import MagicMock, ANY +from unittest.mock import MagicMock +import pytest from hexbytes import HexBytes from web3.types import Wei -from src.services.prediction import RewardsPredictionService +import src.services.prediction as prediction_module from src.modules.submodules.typings import ChainConfig -from src.typings import SlotNumber, BlockNumber, ReferenceBlockStamp +from src.services.prediction import RewardsPredictionService +from src.typings import BlockNumber, SlotNumber from tests.factory.blockstamp import ReferenceBlockStampFactory @@ -228,47 +229,6 @@ def token_rebased_logs(tr_hashes): ] -def test_group_by_tx_hash(): - events_1 = [ - {'transactionHash': HexBytes('0x123'), 'args': {'name': 'first'}}, - {'transactionHash': HexBytes('0x456'), 'args': {'name': 'second'}}, - ] - - events_2 = [ - {'transactionHash': HexBytes('0x456'), 'args': {'value': 2}}, - {'transactionHash': HexBytes('0x123'), 'args': {'value': 1}}, - ] - - result = RewardsPredictionService._group_events_by_transaction_hash(events_1, events_2) - - assert len(result) == 2 - - for event_data in result: - if event_data['name'] == 'first': - assert event_data['value'] == 1 - elif event_data['name'] == 'second': - assert event_data['value'] == 2 - else: - # No other events should be here - assert False - - -@pytest.mark.unit -def get_rewards_per_slot(web3, contracts, eth_distributed_logs): - web3.lido_contracts = MagicMock() - web3.lido_contracts.events.ETHDistributed.get_logs.return_value = eth_distributed_logs - - p = RewardsPredictionService(web3) - got = p.get_ETHDistributed_events(ANY, ANY, 1675441508) - - expected = { - eth_distributed_logs[11]['transactionHash']: eth_distributed_logs[11]['args'], - eth_distributed_logs[12]['transactionHash']: eth_distributed_logs[12]['args'] - } - - assert got == expected - - @pytest.mark.unit def test_get_rewards_no_matching_events(web3, contracts): bp = ReferenceBlockStampFactory.build( @@ -297,3 +257,178 @@ def test_get_rewards_no_matching_events(web3, contracts): rewards = p.get_rewards_per_epoch(bp, cc) assert rewards == Wei(0) + + +@pytest.mark.unit +def test_get_rewards_prediction(web3, contracts, monkeypatch: pytest.MonkeyPatch): + bp = ReferenceBlockStampFactory.build( + block_number=BlockNumber(14), + block_timestamp=1675441520, + ref_slot=SlotNumber(100000), + slot_number=14, + block_hash=None, + ) + + cc = ChainConfig( + slots_per_epoch=32, + seconds_per_slot=12, + genesis_time=0, + ) + + SOME_EVENTS = object() + with monkeypatch.context() as m: + m.setattr( + RewardsPredictionService, + "_get_prediction_duration_in_slots", + MagicMock(return_value=12), + ) + m.setattr( + prediction_module, + "get_events_in_past", + MagicMock(return_value=SOME_EVENTS), + ) + + m.setattr( + RewardsPredictionService, + "_group_events_by_transaction_hash", + MagicMock( + return_value=[ + { + "postCLBalance": Wei(24), + "withdrawalsWithdrawn": Wei(0), + "preCLBalance": Wei(0), + "executionLayerRewardsWithdrawn": Wei(0), + "timeElapsed": 12, + }, + { + "postCLBalance": Wei(0), + "withdrawalsWithdrawn": Wei(0), + "preCLBalance": Wei(0), + "executionLayerRewardsWithdrawn": Wei(12), + "timeElapsed": 12, + }, + ] + ), + ) + + p = RewardsPredictionService(web3) + rewards = p.get_rewards_per_epoch(bp, cc) + assert rewards == Wei(576) + + +@pytest.mark.unit +@pytest.mark.parametrize( + "events_1, events_2", + [ + ( + [ + {"transactionHash": HexBytes("0x456"), "args": {"value": 2}}, + {"transactionHash": HexBytes("0x123"), "args": {"value": 1}}, + ], + [ + {"transactionHash": HexBytes("0x456"), "args": {"value": 2}}, + {"transactionHash": HexBytes("0x123"), "args": {"value": 1}}, + {"transactionHash": HexBytes("0x123"), "args": {"value": 3}}, + ], + ), + ( + [ + {"transactionHash": HexBytes("0x456"), "args": {"value": 2}}, + {"transactionHash": HexBytes("0x123"), "args": {"value": 1}}, + ], + [ + {"transactionHash": HexBytes("0x456"), "args": {"value": 2}}, + {"transactionHash": HexBytes("0x123"), "args": {"value": 1}}, + {"transactionHash": HexBytes("0x567"), "args": {"value": 3}}, + ], + ), + ( + [ + {"transactionHash": HexBytes("0x456"), "args": {"value": 2}}, + {"transactionHash": HexBytes("0x123"), "args": {"value": 1}}, + {"transactionHash": HexBytes("0x567"), "args": {"value": 3}}, + ], + [ + {"transactionHash": HexBytes("0x456"), "args": {"value": 2}}, + {"transactionHash": HexBytes("0x123"), "args": {"value": 1}}, + ], + ), + ( + [ + {"transactionHash": HexBytes("0x456"), "args": {"value": 2}}, + {"transactionHash": HexBytes("0x123"), "args": {"value": 1}}, + {"transactionHash": HexBytes("0x123"), "args": {"value": 3}}, + ], + [ + {"transactionHash": HexBytes("0x456"), "args": {"value": 2}}, + {"transactionHash": HexBytes("0x123"), "args": {"value": 1}}, + ], + ), + ( + [ + {"transactionHash": HexBytes("0x456"), "args": {"value": 2}}, + {"transactionHash": HexBytes("0x123"), "args": {"value": 1}}, + ], + [ + {"transactionHash": HexBytes("0x345"), "args": {"value": 2}}, + {"transactionHash": HexBytes("0x567"), "args": {"value": 1}}, + ], + ), + ], +) +def test_group_events_incosistent(events_1, events_2): + with pytest.raises(ValueError, match="Events are inconsistent"): + RewardsPredictionService._group_events_by_transaction_hash(events_1, events_2) + + +@pytest.mark.unit +@pytest.mark.parametrize( + "events_1, events_2, expected", + [ + ( + [ + {"transactionHash": HexBytes("0x456"), "args": {"a": 1}}, + {"transactionHash": HexBytes("0x123"), "args": {"a": 2}}, + ], + [ + {"transactionHash": HexBytes("0x123"), "args": {"a": 3}}, + {"transactionHash": HexBytes("0x456"), "args": {"a": 4}}, + ], + [ + {"a": 4}, + {"a": 3}, + ], + ), + ( + [ + {"transactionHash": HexBytes("0x456"), "args": {"a": 1}}, + {"transactionHash": HexBytes("0x123"), "args": {"a": 2}}, + ], + [ + {"transactionHash": HexBytes("0x123"), "args": {"b": 3}}, + {"transactionHash": HexBytes("0x456"), "args": {"b": 4}}, + ], + [ + {"a": 1, "b": 4}, + {"a": 2, "b": 3}, + ], + ), + ( + [ + {"transactionHash": HexBytes("0x456"), "args": {"a": 1}}, + {"transactionHash": HexBytes("0x123"), "args": {"a": 2}}, + ], + [ + {"transactionHash": HexBytes("0x123"), "args": {}}, + {"transactionHash": HexBytes("0x456"), "args": {"b": 4}}, + ], + [ + {"a": 1, "b": 4}, + {"a": 2}, + ], + ), + ] +) +def test_group_events(events_1, events_2, expected): + actual = RewardsPredictionService._group_events_by_transaction_hash(events_1, events_2) + assert actual == expected, "Unexpected merged events array" diff --git a/tests/modules/submodules/test_oracle_module.py b/tests/modules/submodules/test_oracle_module.py index 6846581d2..f12a38cf4 100644 --- a/tests/modules/submodules/test_oracle_module.py +++ b/tests/modules/submodules/test_oracle_module.py @@ -1,10 +1,16 @@ from unittest.mock import Mock +from typing import Type import pytest +from web3_multi_provider.multi_http_provider import NoActiveProviderError +from src.modules.submodules.exceptions import IsNotMemberException, IncompatibleContractVersion from src.modules.submodules.oracle_module import BaseModule, ModuleExecuteDelay +from src.providers.http_provider import NotOkResponse +from src.providers.keys.client import KeysOutdatedException from src.typings import BlockStamp -from src.web3py.extensions import LidoContracts +from src.utils.slot import InconsistentData, NoSlotsAvailable, SlotNotFinalized +from src import variables from tests.factory.blockstamp import ReferenceBlockStampFactory @@ -15,11 +21,17 @@ def execute_module(self, blockstamp): self.call_count += 1 return ModuleExecuteDelay.NEXT_FINALIZED_EPOCH + def refresh_contracts(self): + pass + + def clear_cache(self): + pass + @pytest.fixture(autouse=True) def set_default_sleep(monkeypatch): with monkeypatch.context(): - monkeypatch.setattr(BaseModule, "DEFAULT_SLEEP", 1) + monkeypatch.setattr(variables, "CYCLE_SLEEP_IN_SECONDS", 1) yield @@ -42,17 +54,79 @@ def test_receive_last_finalized_slot(oracle): @pytest.mark.unit def test_cycle_handler_run_once_per_slot(oracle, contracts, web3): - web3.lido_contracts.reload_contracts = Mock() + web3.lido_contracts.has_contract_address_changed = Mock() oracle._receive_last_finalized_slot = Mock(return_value=ReferenceBlockStampFactory.build(slot_number=1)) oracle._cycle_handler() assert oracle.call_count == 1 - assert web3.lido_contracts.reload_contracts.call_count == 1 + assert web3.lido_contracts.has_contract_address_changed.call_count == 1 oracle._cycle_handler() assert oracle.call_count == 1 - assert web3.lido_contracts.reload_contracts.call_count == 1 + assert web3.lido_contracts.has_contract_address_changed.call_count == 1 oracle._receive_last_finalized_slot = Mock(return_value=ReferenceBlockStampFactory.build(slot_number=2)) oracle._cycle_handler() assert oracle.call_count == 2 - assert web3.lido_contracts.reload_contracts.call_count == 2 + assert web3.lido_contracts.has_contract_address_changed.call_count == 2 + + +@pytest.mark.unit +def test_run_as_daemon(oracle): + times = 0 + + def _throw_on_third_call(): + nonlocal times + times += 1 + if times == 3: + raise Exception("Cycle failed") + + oracle._cycle_handler = Mock(side_effect=_throw_on_third_call) + + with pytest.raises(Exception, match="Cycle failed"): + oracle.run_as_daemon() + + assert oracle._cycle_handler.call_count == 3 + + +@pytest.mark.unit +@pytest.mark.parametrize( + "ex", + [ + TimeoutError, + NoActiveProviderError, + ConnectionError, + NotOkResponse, + NoSlotsAvailable, + SlotNotFinalized, + InconsistentData, + KeysOutdatedException, + ], +) +def test_run_cycle_no_fail_on_retryable_error(oracle: BaseModule, ex: Type[Exception]): + def _throw_with(*args): + if ex is NotOkResponse: + raise ex(status=500, text="Fake exception") # type: ignore + raise ex("Fake exception") + + oracle.execute_module = Mock(side_effect=_throw_with) + + ret = oracle.run_cycle(ReferenceBlockStampFactory.build()) + assert ret is ModuleExecuteDelay.NEXT_SLOT + + +@pytest.mark.unit +@pytest.mark.parametrize( + "ex", + [ + IsNotMemberException, + IncompatibleContractVersion, + ], +) +def test_run_cycle_fails_on_critical_exceptions(oracle: BaseModule, ex: Type[Exception]): + def _throw_with(*args): + raise ex("Fake exception") + + oracle.execute_module = Mock(side_effect=_throw_with) + + with pytest.raises(ex, match="Fake exception"): + oracle.run_cycle(ReferenceBlockStampFactory.build()) diff --git a/tests/providers.py b/tests/providers.py index ca7e8821b..88e86b67f 100644 --- a/tests/providers.py +++ b/tests/providers.py @@ -2,7 +2,7 @@ import os from contextlib import contextmanager from pathlib import Path -from typing import Any, Optional, Sequence +from typing import Any, Optional, Sequence, Callable from web3 import Web3 from web3.module import Module @@ -108,11 +108,17 @@ def use_mock(self, mock_path: Path): class ResponseFromFileHTTPProvider(HTTPProvider, Module, FromFile): def __init__(self, mock_path: Path, w3: Web3): self.w3 = w3 - HTTPProvider.__init__(self, host="") + HTTPProvider.__init__(self, hosts=[""]) Module.__init__(self, w3) FromFile.__init__(self, mock_path) - def _get(self, endpoint: str, path_params: Optional[Sequence[str | int]] = None, query_params: Optional[dict] = None) -> dict | list: + def _get( + self, + endpoint: str, + path_params: Optional[Sequence[str | int]] = None, + query_params: Optional[dict] = None, + force_raise: Callable[..., Exception | None] = lambda _: None, + ) -> dict | list: for response in self.responses: url = endpoint.format(*path_params) if path_params else endpoint if response.get('url') == url and json.dumps(response["params"]) == json.dumps(query_params): @@ -124,12 +130,18 @@ class UpdateResponsesHTTPProvider(HTTPProvider, Module, UpdateResponses): def __init__(self, mock_path: Path, host: str, w3: Web3): self.w3 = w3 - super().__init__(host) + super().__init__([host]) super(Module, self).__init__() self.responses = [] self.from_file = ResponseFromFileHTTPProvider(mock_path, w3) - def _get(self, endpoint: str, path_params: Optional[Sequence[str | int]] = None, query_params: Optional[dict] = None) -> dict | list: + def _get( + self, + endpoint: str, + path_params: Optional[Sequence[str | int]] = None, + query_params: Optional[dict] = None, + force_raise: Callable[..., Exception | None] = lambda _: None, + ) -> dict | list: url = endpoint.format(*path_params) if path_params else endpoint try: response = self.from_file._get(url, query_params=query_params) # pylint: disable=protected-access diff --git a/tests/providers_clients/test_http_provider.py b/tests/providers_clients/test_http_provider.py index 8893dcf43..9eeebaa05 100644 --- a/tests/providers_clients/test_http_provider.py +++ b/tests/providers_clients/test_http_provider.py @@ -1,3 +1,8 @@ +# pylint: disable=protected-access +from unittest.mock import Mock + +import pytest + from src.providers.http_provider import HTTPProvider @@ -11,3 +16,43 @@ def test_urljoin(): assert join('http://localhost/', 'api/') == 'http://localhost/api/' assert join('http://localhost/token', 'api') == 'http://localhost/token/api' assert join('http://localhost/token/', 'api') == 'http://localhost/token/api' + + +def test_all_fallbacks_ok(): + provider = HTTPProvider(['http://localhost:1', 'http://localhost:2']) + provider._get_without_fallbacks = lambda host, endpoint, path_params, query_params: (host, endpoint) + assert provider._get('test') == ('http://localhost:1', 'test') + + +def test_all_fallbacks_bad(): + provider = HTTPProvider(['http://localhost:1', 'http://localhost:2']) + with pytest.raises(Exception): + provider._get('test') + + +def test_first_fallback_bad(): + def _simple_get(host, endpoint, *_): + if host == 'http://localhost:1': + raise Exception('Bad host') # pylint: disable=broad-exception-raised + return host, endpoint + + provider = HTTPProvider(['http://localhost:1', 'http://localhost:2']) + provider._get_without_fallbacks = _simple_get + assert provider._get('test') == ('http://localhost:2', 'test') + + +def test_force_raise(): + + class CustomError(Exception): + pass + + def _simple_get(host, endpoint, *_): + if host == 'http://localhost:1': + raise Exception('Bad host') # pylint: disable=broad-exception-raised + return host, endpoint + + provider = HTTPProvider(['http://localhost:1', 'http://localhost:2']) + provider._get_without_fallbacks = Mock(side_effect=_simple_get) + with pytest.raises(CustomError): + provider._get('test', force_raise=lambda errors: CustomError) + provider._get_without_fallbacks.assert_called_once_with('http://localhost:1', 'test', None, None) diff --git a/tests/providers_clients/test_keys_api_client.py b/tests/providers_clients/test_keys_api_client.py index be30e3bec..641d95e6e 100644 --- a/tests/providers_clients/test_keys_api_client.py +++ b/tests/providers_clients/test_keys_api_client.py @@ -1,14 +1,15 @@ """Simple tests for the keys api client responses validity.""" +from unittest.mock import Mock + import pytest -from src.providers.keys.client import KeysAPIClient +import src.providers.keys.client as keys_api_client_module +from src import variables +from src.providers.keys.client import KeysAPIClient, KeysOutdatedException from src.variables import KEYS_API_URI from tests.factory.blockstamp import ReferenceBlockStampFactory -pytestmark = pytest.mark.integration - - @pytest.fixture() def keys_api_client(): return KeysAPIClient(KEYS_API_URI) @@ -17,7 +18,40 @@ def keys_api_client(): empty_blockstamp = ReferenceBlockStampFactory.build(block_number=0) -def test_get_all_lido_keys(keys_api_client): - lido_keys = keys_api_client.get_all_lido_keys(empty_blockstamp) +@pytest.mark.integration +def test_get_used_lido_keys(keys_api_client): + lido_keys = keys_api_client.get_used_lido_keys(empty_blockstamp) assert lido_keys + +@pytest.mark.integration +def test_get_status(keys_api_client): + status = keys_api_client.get_status() + assert status + + +@pytest.mark.unit +def test_get_with_blockstamp_retries_exhausted(keys_api_client, monkeypatch): + variables.HTTP_REQUEST_SLEEP_BEFORE_RETRY_IN_SECONDS = 1 + keys_api_client._get = Mock( + return_value=( + None, + { + "meta": { + "elBlockSnapshot": { + "blockNumber": empty_blockstamp.block_number - 1 + } + } + }, + ) + ) + + sleep_mock = Mock() + + with pytest.raises(KeysOutdatedException): + with monkeypatch.context() as m: + m.setattr(keys_api_client_module, "sleep", sleep_mock) + keys_api_client.get_used_lido_keys(empty_blockstamp) + + assert sleep_mock.call_count == variables.HTTP_REQUEST_RETRY_COUNT - 1 + sleep_mock.assert_called_with(variables.HTTP_REQUEST_SLEEP_BEFORE_RETRY_IN_SECONDS) diff --git a/tests/utils/test_cache.py b/tests/utils/test_cache.py new file mode 100644 index 000000000..4b31156d5 --- /dev/null +++ b/tests/utils/test_cache.py @@ -0,0 +1,19 @@ +from functools import lru_cache + +from src.utils.cache import clear_object_lru_cache + + +class Calc: + @lru_cache + def get(self, a, b): + return a + b + + +def test_clear_object_lru_cache(): + calc = Calc() + calc.get(1, 2) + assert calc.get.cache_info().currsize == 1 + + clear_object_lru_cache(calc) + + assert calc.get.cache_info().currsize == 0 diff --git a/tests/utils/test_dataclass.py b/tests/utils/test_dataclass.py index a65bce6ae..f72329380 100644 --- a/tests/utils/test_dataclass.py +++ b/tests/utils/test_dataclass.py @@ -1,8 +1,9 @@ from dataclasses import dataclass, is_dataclass +from typing import Any import pytest -from src.utils.dataclass import list_of_dataclasses, Nested, FromResponse +from src.utils.dataclass import DecodeToDataclassException, list_of_dataclasses, Nested, FromResponse pytestmark = pytest.mark.unit @@ -64,6 +65,44 @@ def get_cars() -> list[Car]: assert is_dataclass(wheel) +def test_list_of_dataclasses_with_wrong_type(): + @list_of_dataclasses(Car) + def get_cars_with_already_as_cars() -> list[Car]: + return [ + Car(**{ + 'wheel_count': 4, + 'wheels': [{'size': 2}, {'size': 4}], + 'wheels_immutable': ({'size': 2}, {'size': 4}), + 'state': {'condition': 'good'}, + }) + ] + + with pytest.raises(DecodeToDataclassException): + get_cars_with_already_as_cars() + + +def test_list_of_dataclasses_with_mixed_types(): + @list_of_dataclasses(Car) + def get_cars_inconsistent() -> list[Any]: + return [ + { + 'wheel_count': 2, + 'wheels': [{'size': 1}], + 'wheels_immutable': ({'size': 1},), + 'state': {'condition': 'bad'}, + }, + Car(**{ + 'wheel_count': 4, + 'wheels': [{'size': 2}, {'size': 4}], + 'wheels_immutable': ({'size': 2}, {'size': 4}), + 'state': {'condition': 'good'}, + }) + ] + + with pytest.raises(TypeError): + get_cars_inconsistent() + + def test_dataclasses_utils_fail_on_unexpected_key(): with pytest.raises(TypeError): Car( diff --git a/tests/utils/test_types.py b/tests/utils/test_types.py new file mode 100644 index 000000000..78bdcefd3 --- /dev/null +++ b/tests/utils/test_types.py @@ -0,0 +1,17 @@ +import pytest + +from src.utils.types import bytes_to_hex_str, hex_str_to_bytes + + +@pytest.mark.unit +def test_bytes_to_hex_str(): + assert bytes_to_hex_str(b"") == "0x" + assert bytes_to_hex_str(b"\x00") == "0x00" + assert bytes_to_hex_str(b"\x00\x01\x02") == "0x000102" + + +@pytest.mark.unit +def test_hex_str_to_bytes(): + assert hex_str_to_bytes("0x") == b"" + assert hex_str_to_bytes("0x00") == b"\x00" + assert hex_str_to_bytes("0x000102") == b"\x00\x01\x02" diff --git a/tests/utils/test_validator_state_utils.py b/tests/utils/test_validator_state_utils.py index a2020a81b..7c2db5104 100644 --- a/tests/utils/test_validator_state_utils.py +++ b/tests/utils/test_validator_state_utils.py @@ -1,48 +1,53 @@ +from pydantic.class_validators import validator import pytest -from src.constants import FAR_FUTURE_EPOCH +from src.constants import FAR_FUTURE_EPOCH, EFFECTIVE_BALANCE_INCREMENT from src.providers.consensus.typings import Validator, ValidatorStatus, ValidatorState -from src.typings import EpochNumber -from src.utils.validator_state import calculate_active_effective_balance_sum, is_on_exit, get_validator_age - -test_data_calculate_total_effective_balance = [ - ( - {'0x0': Validator('0', '1', ValidatorStatus.ACTIVE_ONGOING, - ValidatorState('0x0', '', '2', False, '', '1', '100500', '')), - '0x1': Validator('1', '1', ValidatorStatus.ACTIVE_EXITING, - ValidatorState('0x1', '', '3', False, '', '1', '100500', '')), - '0x2': Validator('2', '1', ValidatorStatus.ACTIVE_SLASHED, - ValidatorState('0x2', '', '4', True, '', '1', '100500', ''))}, - 9, - ), - ( - {'0x0': Validator('0', '1', ValidatorStatus.ACTIVE_ONGOING, - ValidatorState('0x0', '', '2', False, '', '1', '100500', '')), - '0x1': Validator('1', '1', ValidatorStatus.EXITED_SLASHED, - ValidatorState('0x1', '', '2', True, '', '1', '200', ''))}, - 2, - ), -] - - -@pytest.mark.unit -@pytest.mark.parametrize(("validators", "expected_balance"), test_data_calculate_total_effective_balance) -def test_calculate_active_effective_balance_sum(validators, expected_balance): - total_effective_balance = calculate_active_effective_balance_sum(validators.values(), EpochNumber(15000)) - assert total_effective_balance == expected_balance +from src.typings import EpochNumber, Gwei +from src.utils.validator_state import ( + calculate_total_active_effective_balance, + is_on_exit, + get_validator_age, + calculate_active_effective_balance_sum, + is_validator_eligible_to_exit, + is_fully_withdrawable_validator, + is_partially_withdrawable_validator, + has_eth1_withdrawal_credential, + is_exited_validator, + is_active_validator +) +from tests.factory.no_registry import ValidatorFactory +from tests.modules.accounting.bunker.test_bunker_abnormal_cl_rebase import simple_validators @pytest.mark.unit @pytest.mark.parametrize( - ('exit_epoch', 'expected'), - [(100500, True), - (FAR_FUTURE_EPOCH, False)] + ("validators", "expected_balance"), + [ + ([], 0), + ( + [Validator('0', '1', ValidatorStatus.ACTIVE_ONGOING, + ValidatorState('0x0', '', str(32 * 10 ** 9), False, '', '15000', '15001', '')), + Validator('1', '1', ValidatorStatus.ACTIVE_EXITING, + ValidatorState('0x1', '', str(31 * 10 ** 9), False, '', '14999', '15000', '')), + Validator('2', '1', ValidatorStatus.ACTIVE_SLASHED, + ValidatorState('0x2', '', str(31 * 10 ** 9), True, '', '15000', '15001', ''))], + 63 * 10 ** 9, + ), + ( + [ + Validator('0', '1', ValidatorStatus.ACTIVE_ONGOING, + ValidatorState('0x0', '', str(32 * 10 ** 9), False, '', '14000', '14999', '')), + Validator('1', '1', ValidatorStatus.EXITED_SLASHED, + ValidatorState('0x1', '', str(32 * 10 ** 9), True, '', '15000', '15000', '')) + ], + 0, + ), + ] ) -def test_is_on_exit(exit_epoch, expected): - validator = object.__new__(Validator) - validator.validator = object.__new__(ValidatorState) - validator.validator.exit_epoch = exit_epoch - assert is_on_exit(validator) == expected +def test_calculate_active_effective_balance_sum(validators, expected_balance): + total_effective_balance = calculate_active_effective_balance_sum(validators, EpochNumber(15000)) + assert total_effective_balance == expected_balance @pytest.mark.unit @@ -59,3 +64,196 @@ def test_get_validator_age(validator_activation_epoch, ref_epoch, expected_resul validator.validator = object.__new__(ValidatorState) validator.validator.activation_epoch = validator_activation_epoch assert get_validator_age(validator, ref_epoch) == expected_result + +@pytest.mark.unit +@pytest.mark.parametrize("activation_epoch, epoch, exit_epoch, expected", [ + (176720, 176720, 176722, True), + (176720, 176721, 176722, True), + (176900, 176900, 2 ** 64 - 1, True), + (176901, 176900, 2 ** 64 - 1, False), + (176720, 176720, 176720, False), + (176900, 176720, 176720, False), + (176900, 176720, 176750, False), +]) +def test_is_active_validator(activation_epoch, epoch, exit_epoch, expected): + validator = ValidatorFactory.build() + validator.validator.activation_epoch = activation_epoch + validator.validator.exit_epoch = exit_epoch + + actual = is_active_validator(validator, EpochNumber(epoch)) + assert actual == expected + + +@pytest.mark.unit +@pytest.mark.parametrize("exit_epoch, epoch, expected", [ + (176720, 176722, True), + (176730, 176722, False), + (2 ** 64 - 1, 176722, False), +]) +def test_is_exited_validator(exit_epoch, epoch, expected): + validator = ValidatorFactory.build() + validator.validator.exit_epoch = exit_epoch + + actual = is_exited_validator(validator, EpochNumber(epoch)) + assert actual == expected + + +@pytest.mark.unit +@pytest.mark.parametrize("exit_epoch, expected", [ + (176720, True), + (FAR_FUTURE_EPOCH, False), +]) +def test_is_on_exit(exit_epoch, expected): + validator = ValidatorFactory.build() + validator.validator.exit_epoch = exit_epoch + + actual = is_on_exit(validator) + assert actual == expected + + +@pytest.mark.unit +@pytest.mark.parametrize("withdrawal_credentials, expected", [ + ('0x01ba', True), + ('01ab', False), + ('0x00ba', False), + ('00ba', False), +]) +def test_has_eth1_withdrawal_credential(withdrawal_credentials, expected): + validator = ValidatorFactory.build() + validator.validator.withdrawal_credentials = withdrawal_credentials + + actual = has_eth1_withdrawal_credential(validator) + assert actual == expected + + +@pytest.mark.unit +@pytest.mark.parametrize("withdrawable_epoch, balance, epoch, expected", [ + (176720, 32 * (10 ** 10), 176722, True), + (176722, 32 * (10 ** 10), 176722, True), + (176723, 32 * (10 ** 10), 176722, False), + (176722, 0, 176722, False), +]) +def test_is_fully_withdrawable_validator(withdrawable_epoch, balance, epoch, expected): + validator = ValidatorFactory.build() + validator.validator.withdrawable_epoch = withdrawable_epoch + validator.validator.withdrawal_credentials = '0x01ba' + validator.balance = balance + + actual = is_fully_withdrawable_validator(validator, EpochNumber(epoch)) + assert actual == expected + + +@pytest.mark.unit +@pytest.mark.parametrize("effective_balance, add_balance, withdrawal_credentials, expected", [ + (32 * 10**9, 1, '0x01ba', True), + (32 * 10**9, 1, '0x0', False), + (32 * 10**8, 0, '0x01ba', False), + (32 * 10**9, 0, '0x', False), + (0, 0, '0x01ba', False), +]) +def test_is_partially_withdrawable(effective_balance, add_balance, withdrawal_credentials, expected): + validator = ValidatorFactory.build() + validator.validator.withdrawal_credentials = withdrawal_credentials + validator.validator.effective_balance = effective_balance + validator.balance = effective_balance + add_balance + + actual = is_partially_withdrawable_validator(validator) + assert actual == expected + + +@pytest.mark.unit +@pytest.mark.parametrize("activation_epoch, exit_epoch, epoch, expected", [ + (170000, 2 ** 64 - 1, 170256, True), + (170000, 170200, 170256, False), + (170000, 2 ** 64 - 1, 170255, False), +]) +def test_is_validator_eligible_to_exit(activation_epoch, exit_epoch, epoch, expected): + validator = ValidatorFactory.build() + validator.validator.activation_epoch = activation_epoch + validator.validator.exit_epoch = exit_epoch + + actual = is_validator_eligible_to_exit(validator, EpochNumber(epoch)) + assert actual == expected + + +class TestCalculateTotalEffectiveBalance: + @pytest.fixture + def validators(self): + validators = ValidatorFactory.batch(2) + + validators[0].validator.activation_epoch = 170000 + validators[0].validator.exit_epoch = 2 ** 64 - 1 + validators[0].validator.effective_balance = 1000000000 + validators[0].validator.withdrawal_credentials = '0x01ba' + + validators[1].validator.activation_epoch = 170001 + validators[1].validator.exit_epoch = 2 ** 64 - 1 + validators[1].validator.effective_balance = 2000000000 + validators[1].validator.withdrawal_credentials = '0x01ba' + + return validators + + @pytest.mark.unit + def test_no_validators(self): + actual = calculate_total_active_effective_balance([], EpochNumber(170256)) + assert actual == Gwei(1 * 10**9) + + @pytest.mark.unit + def test_all_active(self, validators: list[Validator]): + actual = calculate_total_active_effective_balance( + validators, EpochNumber(170256) + ) + assert actual == Gwei(3000000000) + + @pytest.mark.unit + def test_no_balance_validators(self): + actual = calculate_total_active_effective_balance( + simple_validators(0, 9, effective_balance="0"), EpochNumber(170256) + ) + assert actual == EFFECTIVE_BALANCE_INCREMENT + + @pytest.mark.unit + def test_skip_exiting(self, validators: list[Validator]): + validators[0].validator.exit_epoch = "170256" + + actual = calculate_total_active_effective_balance( + validators, EpochNumber(170256) + ) + assert actual == Gwei(2000000000) + + @pytest.mark.unit + def test_skip_exited(self, validators: list[Validator]): + validators[0].validator.exit_epoch = "170000" + + actual = calculate_total_active_effective_balance( + validators, EpochNumber(170256) + ) + assert actual == Gwei(2000000000) + + @pytest.mark.unit + def test_skip_exited_slashed(self, validators: list[Validator]): + validators[0].validator.exit_epoch = "170256" + validators[0].validator.slashed = True + + actual = calculate_total_active_effective_balance( + validators, EpochNumber(170256) + ) + assert actual == Gwei(2000000000) + + @pytest.mark.unit + def test_include_slashed(self, validators: list[Validator]): + validators[0].validator.slashed = True + + actual = calculate_total_active_effective_balance( + validators, EpochNumber(170256) + ) + assert actual == Gwei(3000000000) + + @pytest.mark.unit + def test_skip_ongoing(self, validators: list[Validator]): + validators[0].validator.activation_epoch = "170257" + + actual = calculate_total_active_effective_balance( + validators, EpochNumber(170256) + ) + assert actual == Gwei(2000000000) diff --git a/tests/web3_extentions/test_lido_validators.py b/tests/web3_extentions/test_lido_validators.py index ad496f3a9..326bfa761 100644 --- a/tests/web3_extentions/test_lido_validators.py +++ b/tests/web3_extentions/test_lido_validators.py @@ -4,18 +4,19 @@ from tests.factory.blockstamp import ReferenceBlockStampFactory from tests.factory.no_registry import ValidatorFactory, LidoKeyFactory +from src.web3py.extensions.lido_validators import CountOfKeysDiffersException blockstamp = ReferenceBlockStampFactory.build() @pytest.mark.unit -def test_get_lido_validators(web3, lido_validators): +def test_get_lido_validators(web3, lido_validators, contracts): validators = ValidatorFactory.batch(30) lido_keys = LidoKeyFactory.generate_for_validators(validators[:10]) - lido_keys.extend(LidoKeyFactory.batch(5)) + lido_keys.extend(LidoKeyFactory.batch(10)) web3.cc.get_validators = Mock(return_value=validators) - web3.kac.get_all_lido_keys = Mock(return_value=lido_keys) + web3.kac.get_used_lido_keys = Mock(return_value=lido_keys) lido_validators = web3.lido_validators.get_lido_validators(blockstamp) @@ -27,6 +28,17 @@ def test_get_lido_validators(web3, lido_validators): assert v.lido_id.key == v.validator.pubkey +@pytest.mark.unit +def test_kapi_has_lesser_keys_than_deposited_validators_count(web3, lido_validators, contracts): + validators = ValidatorFactory.batch(10) + lido_keys = [] + + web3.cc.get_validators = Mock(return_value=validators) + web3.kac.get_used_lido_keys = Mock(return_value=lido_keys) + + with pytest.raises(CountOfKeysDiffersException): + web3.lido_validators.get_lido_validators(blockstamp) + @pytest.mark.unit def test_get_node_operators(web3, lido_validators, contracts): node_operators = web3.lido_validators.get_lido_node_operators(blockstamp)