From 45120192347b41b045bbec96f3adea6fb5775ff1 Mon Sep 17 00:00:00 2001 From: Lev Gorodetskii Date: Mon, 9 Sep 2024 20:27:18 -0300 Subject: [PATCH] Bump version 8.0.0 (#1087) Co-authored-by: Vladimir Bobrikov Co-authored-by: Lev Gorodetskiy Co-authored-by: Wizard1209 <34334729+Wizard1209@users.noreply.github.com> --- CHANGELOG.md | 1195 +---------------- Makefile | 11 +- benchmarks/Makefile | 12 +- benchmarks/README.md | 36 +- docs/0.quickstart-evm.md | 10 +- docs/0.quickstart-starknet.md | 10 +- docs/0.quickstart-tezos.md | 10 +- docs/1.getting-started/1.installation.md | 2 +- .../{9.hooks.md => 10.hooks.md} | 71 +- docs/1.getting-started/2.core-concepts.md | 5 +- docs/1.getting-started/3.config.md | 51 +- .../5.database.md} | 65 +- .../{5.models.md => 6.models.md} | 0 .../{6.datasources.md => 7.datasources.md} | 20 +- .../{7.indexes.md => 8.indexes.md} | 2 +- .../{8.handlers.md => 9.handlers.md} | 0 docs/10.supported-networks/0.overview.md | 12 +- docs/12.faq.md | 12 + docs/13.troubleshooting.md | 4 +- docs/15.glossary.md | 141 ++ docs/{15.thanks.md => 16.thanks.md} | 0 docs/2.indexes/7.tezos_operations.md | 10 +- docs/2.indexes/_evm.md | 2 +- docs/2.indexes/_starknet.md | 2 +- .../{4.evm_subsquid.md => 1.evm_subsquid.md} | 0 docs/3.datasources/{5.http.md => 10.http.md} | 0 .../{3.evm_node.md => 2.evm_node.md} | 0 ...{1.abi_etherscan.md => 3.abi_etherscan.md} | 0 ...net_subsquid.md => 4.starknet_subsquid.md} | 0 ...{7.starknet_node.md => 5.starknet_node.md} | 0 .../{9.tezos_tzkt.md => 6.tezos_tzkt.md} | 0 ...10.tzip_metadata.md => 7.tzip_metadata.md} | 4 +- .../{2.coinbase.md => 8.coinbase.md} | 0 docs/3.datasources/{6.ipfs.md => 9.ipfs.md} | 0 docs/4.graphql/_dir.yml | 2 +- .../2.docker.md => 5.advanced/1.docker.md} | 27 +- docs/5.advanced/1.reindexing.md | 36 - ...-interface.md => 11.metadata-interface.md} | 7 +- docs/5.advanced/2.environment-variables.md | 26 + docs/5.advanced/2.feature-flags.md | 39 - docs/5.advanced/3.monitoring.md | 123 ++ docs/5.advanced/3.sql.md | 65 - .../{5.performance.md => 4.performance.md} | 0 .../6.backups.md => 5.advanced/5.backups.md} | 0 docs/5.advanced/6.sqd-cloud.md | 65 + docs/6.deployment/3.sentry.md | 17 - docs/6.deployment/4.prometheus.md | 36 - docs/6.deployment/5.logging.md | 26 - docs/6.deployment/7.monitoring.md | 63 - docs/6.deployment/_dir.yml | 1 - docs/7.references/1.cli.md | 12 + docs/7.references/2.config.md | 33 +- docs/7.references/3.context.md | 4 +- docs/8.examples/2.in-production.md | 2 +- docs/8.examples/_demos_table.md | 38 +- docs/9.release-notes/1.v8.0.md | 70 +- docs/9.release-notes/2.v7.5.md | 2 +- docs/9.release-notes/3.v7.4.md | 2 +- docs/9.release-notes/4.v7.3.md | 2 +- docs/9.release-notes/5.v7.2.md | 2 +- docs/9.release-notes/6.v7.1.md | 6 +- docs/9.release-notes/7.v7.0.md | 12 +- docs/9.release-notes/_8.0_changelog.md | 16 + docs/9.release-notes/_dir.yml | 2 +- docs/_curl-spell.md | 2 +- docs/context.rst | 2 +- docs/{assets => public}/dipdup-flow.png | Bin docs/{assets => public}/dipdup-flow.svg | 0 docs/{assets => public}/dipdup-new.png | Bin docs/{assets => public}/dipdup.png | Bin docs/{assets => public}/dipdup.svg | 0 .../{assets => public}/metadata_interface.svg | 0 docs/{assets => public}/operation-bcd.png | Bin docs/{assets => public}/operation-config.png | Bin .../troubleshooting-bcd.png | Bin docs/public/vscode-autocomplete.png | Bin 0 -> 49502 bytes pdm.lock | 872 +++++------- pyproject.toml | 77 +- requirements.txt | 63 +- schemas/dipdup-3.0.json | 69 +- scripts/demos.py | 18 +- scripts/docs.py | 53 +- src/demo_blank/Makefile | 11 +- src/demo_blank/deploy/compose.sqlite.yaml | 1 - src/demo_blank/deploy/compose.swarm.yaml | 3 +- src/demo_blank/deploy/compose.yaml | 5 +- src/demo_blank/pyproject.toml | 20 +- src/demo_evm_events/Makefile | 11 +- .../deploy/compose.sqlite.yaml | 1 - src/demo_evm_events/deploy/compose.swarm.yaml | 3 +- src/demo_evm_events/deploy/compose.yaml | 5 +- src/demo_evm_events/pyproject.toml | 20 +- .../types/eth_usdt/evm_events/transfer.py | 2 +- src/demo_evm_transactions/Makefile | 11 +- .../deploy/compose.sqlite.yaml | 1 - .../deploy/compose.swarm.yaml | 3 +- src/demo_evm_transactions/deploy/compose.yaml | 5 +- src/demo_evm_transactions/pyproject.toml | 20 +- .../eth_usdt/evm_transactions/transfer.py | 2 +- src/demo_evm_uniswap/Makefile | 11 +- .../deploy/compose.sqlite.yaml | 1 - .../deploy/compose.swarm.yaml | 3 +- src/demo_evm_uniswap/deploy/compose.yaml | 5 +- src/demo_evm_uniswap/pyproject.toml | 20 +- .../types/factory/evm_events/pool_created.py | 2 +- .../types/pool/evm_events/burn.py | 2 +- .../types/pool/evm_events/collect.py | 2 +- .../types/pool/evm_events/flash.py | 2 +- .../types/pool/evm_events/initialize.py | 2 +- .../types/pool/evm_events/mint.py | 2 +- .../types/pool/evm_events/swap.py | 2 +- .../position_manager/evm_events/collect.py | 2 +- .../evm_events/decrease_liquidity.py | 2 +- .../evm_events/increase_liquidity.py | 2 +- .../position_manager/evm_events/transfer.py | 2 +- src/demo_starknet_events/Makefile | 11 +- .../deploy/compose.sqlite.yaml | 1 - .../deploy/compose.swarm.yaml | 3 +- src/demo_starknet_events/deploy/compose.yaml | 5 +- src/demo_starknet_events/pyproject.toml | 20 +- .../stark_usdt/starknet_events/transfer.py | 2 +- src/demo_tezos_auction/Makefile | 11 +- .../deploy/compose.sqlite.yaml | 1 - .../deploy/compose.swarm.yaml | 3 +- src/demo_tezos_auction/deploy/compose.yaml | 5 +- src/demo_tezos_auction/pyproject.toml | 20 +- .../tzcolors_auction/tezos_parameters/bid.py | 2 +- .../tezos_parameters/create_auction.py | 2 +- .../tezos_parameters/withdraw.py | 2 +- .../types/tzcolors_auction/tezos_storage.py | 2 +- src/demo_tezos_big_maps/Makefile | 11 +- .../deploy/compose.sqlite.yaml | 1 - .../deploy/compose.swarm.yaml | 3 +- src/demo_tezos_big_maps/deploy/compose.yaml | 5 +- .../handlers/on_update_records.py | 2 +- src/demo_tezos_big_maps/pyproject.toml | 20 +- .../tezos_big_maps/store_expiry_map_key.py | 2 +- .../tezos_big_maps/store_expiry_map_value.py | 2 +- .../tezos_big_maps/store_records_key.py | 2 +- .../tezos_big_maps/store_records_value.py | 2 +- src/demo_tezos_dao/Makefile | 11 +- src/demo_tezos_dao/deploy/compose.sqlite.yaml | 1 - src/demo_tezos_dao/deploy/compose.swarm.yaml | 3 +- src/demo_tezos_dao/deploy/compose.yaml | 5 +- src/demo_tezos_dao/pyproject.toml | 20 +- .../registry/tezos_parameters/propose.py | 2 +- .../types/registry/tezos_storage.py | 2 +- src/demo_tezos_dex/Makefile | 11 +- src/demo_tezos_dex/deploy/compose.sqlite.yaml | 1 - src/demo_tezos_dex/deploy/compose.swarm.yaml | 3 +- src/demo_tezos_dex/deploy/compose.yaml | 5 +- src/demo_tezos_dex/pyproject.toml | 20 +- .../fa12_token/tezos_parameters/transfer.py | 2 +- .../types/fa12_token/tezos_storage.py | 2 +- .../fa2_token/tezos_parameters/transfer.py | 2 +- .../types/fa2_token/tezos_storage.py | 2 +- .../tezos_parameters/divest_liquidity.py | 2 +- .../tezos_parameters/invest_liquidity.py | 2 +- .../tezos_parameters/tez_to_token_payment.py | 2 +- .../tezos_parameters/token_to_tez_payment.py | 2 +- .../quipu_fa12/tezos_parameters/transfer.py | 2 +- .../tezos_parameters/withdraw_profit.py | 2 +- .../types/quipu_fa12/tezos_storage.py | 2 +- .../tezos_parameters/divest_liquidity.py | 2 +- .../tezos_parameters/invest_liquidity.py | 2 +- .../tezos_parameters/tez_to_token_payment.py | 2 +- .../tezos_parameters/token_to_tez_payment.py | 2 +- .../quipu_fa2/tezos_parameters/transfer.py | 2 +- .../tezos_parameters/withdraw_profit.py | 2 +- .../types/quipu_fa2/tezos_storage.py | 2 +- src/demo_tezos_domains/Makefile | 11 +- .../deploy/compose.sqlite.yaml | 1 - .../deploy/compose.swarm.yaml | 3 +- src/demo_tezos_domains/deploy/compose.yaml | 5 +- .../handlers/on_update_records.py | 2 +- src/demo_tezos_domains/pyproject.toml | 20 +- .../tezos_big_maps/store_expiry_map_key.py | 2 +- .../tezos_big_maps/store_expiry_map_value.py | 2 +- .../tezos_big_maps/store_records_key.py | 2 +- .../tezos_big_maps/store_records_value.py | 2 +- src/demo_tezos_etherlink/Makefile | 11 +- src/demo_tezos_etherlink/deploy/.env.default | 2 +- .../deploy/compose.sqlite.yaml | 1 - .../deploy/compose.swarm.yaml | 3 +- src/demo_tezos_etherlink/deploy/compose.yaml | 5 +- .../deploy/sqlite.env.default | 2 +- .../deploy/swarm.env.default | 2 +- src/demo_tezos_etherlink/dipdup.yaml | 8 +- src/demo_tezos_etherlink/handlers/batch.py | 12 + src/demo_tezos_etherlink/pyproject.toml | 20 +- .../types/rollup/tezos_parameters/default.py | 6 +- .../types/rollup/tezos_storage.py | 2 +- .../ticket_helper/tezos_parameters/default.py | 6 +- .../types/ticket_helper/tezos_storage.py | 5 +- .../ticketer/tezos_parameters/deposit.py | 2 +- .../ticketer/tezos_parameters/withdraw.py | 6 +- .../types/ticketer/tezos_storage.py | 21 +- src/demo_tezos_events/Makefile | 11 +- .../deploy/compose.sqlite.yaml | 1 - .../deploy/compose.swarm.yaml | 3 +- src/demo_tezos_events/deploy/compose.yaml | 5 +- src/demo_tezos_events/pyproject.toml | 20 +- .../events_contract/tezos_events/move.py | 2 +- .../events_contract/tezos_events/roll.py | 2 +- src/demo_tezos_factories/Makefile | 11 +- .../deploy/compose.sqlite.yaml | 1 - .../deploy/compose.swarm.yaml | 3 +- src/demo_tezos_factories/deploy/compose.yaml | 5 +- src/demo_tezos_factories/pyproject.toml | 20 +- .../types/factory/tezos_storage.py | 2 +- .../types/token/tezos_parameters/transfer.py | 2 +- .../types/token/tezos_storage.py | 2 +- src/demo_tezos_head/Makefile | 11 +- .../deploy/compose.sqlite.yaml | 1 - src/demo_tezos_head/deploy/compose.swarm.yaml | 3 +- src/demo_tezos_head/deploy/compose.yaml | 5 +- src/demo_tezos_head/pyproject.toml | 20 +- src/demo_tezos_nft_marketplace/Makefile | 11 +- .../deploy/compose.sqlite.yaml | 1 - .../deploy/compose.swarm.yaml | 3 +- .../deploy/compose.yaml | 5 +- src/demo_tezos_nft_marketplace/pyproject.toml | 20 +- .../tezos_parameters/cancel_swap.py | 2 +- .../hen_minter/tezos_parameters/collect.py | 2 +- .../hen_minter/tezos_parameters/mint_objkt.py | 2 +- .../types/hen_minter/tezos_parameters/swap.py | 2 +- .../types/hen_minter/tezos_storage.py | 2 +- .../types/hen_objkts/tezos_parameters/mint.py | 2 +- .../types/hen_objkts/tezos_storage.py | 2 +- src/demo_tezos_raw/Makefile | 11 +- src/demo_tezos_raw/deploy/compose.sqlite.yaml | 1 - src/demo_tezos_raw/deploy/compose.swarm.yaml | 3 +- src/demo_tezos_raw/deploy/compose.yaml | 5 +- src/demo_tezos_raw/pyproject.toml | 20 +- src/demo_tezos_token/Makefile | 11 +- .../deploy/compose.sqlite.yaml | 1 - .../deploy/compose.swarm.yaml | 3 +- src/demo_tezos_token/deploy/compose.yaml | 5 +- src/demo_tezos_token/pyproject.toml | 20 +- .../types/tzbtc/tezos_parameters/mint.py | 2 +- .../types/tzbtc/tezos_parameters/transfer.py | 2 +- .../types/tzbtc/tezos_storage.py | 2 +- src/demo_tezos_token_balances/Makefile | 11 +- .../deploy/compose.sqlite.yaml | 1 - .../deploy/compose.swarm.yaml | 3 +- .../deploy/compose.yaml | 5 +- src/demo_tezos_token_balances/pyproject.toml | 20 +- src/demo_tezos_token_transfers/Makefile | 11 +- .../deploy/compose.sqlite.yaml | 1 - .../deploy/compose.swarm.yaml | 3 +- .../deploy/compose.yaml | 5 +- src/demo_tezos_token_transfers/pyproject.toml | 20 +- src/dipdup/abi/evm.py | 2 +- src/dipdup/api.py | 1 + src/dipdup/cli.py | 138 +- src/dipdup/codegen/__init__.py | 12 +- src/dipdup/config/__init__.py | 15 +- src/dipdup/config/evm.py | 2 +- src/dipdup/config/starknet.py | 2 +- src/dipdup/config/tezos.py | 2 +- src/dipdup/context.py | 10 +- src/dipdup/database.py | 31 +- src/dipdup/datasources/_subsquid.py | 18 +- src/dipdup/datasources/_web3.py | 18 +- src/dipdup/datasources/tezos_tzkt.py | 3 +- src/dipdup/dipdup.py | 21 +- src/dipdup/env.py | 2 +- src/dipdup/fetcher.py | 31 +- src/dipdup/fields.py | 3 +- src/dipdup/hasura.py | 10 +- src/dipdup/http.py | 5 +- src/dipdup/index.py | 26 +- src/dipdup/indexes/evm.py | 3 +- src/dipdup/indexes/evm_events/fetcher.py | 23 +- src/dipdup/indexes/evm_events/index.py | 2 + src/dipdup/indexes/evm_node.py | 14 + src/dipdup/indexes/evm_subsquid.py | 17 +- .../indexes/evm_transactions/fetcher.py | 15 +- src/dipdup/indexes/evm_transactions/index.py | 2 + src/dipdup/indexes/starknet_events/fetcher.py | 19 +- src/dipdup/indexes/starknet_events/index.py | 2 + src/dipdup/indexes/starknet_node.py | 18 +- src/dipdup/indexes/starknet_subsquid.py | 17 +- src/dipdup/indexes/tezos_big_maps/fetcher.py | 21 +- src/dipdup/indexes/tezos_big_maps/index.py | 28 +- src/dipdup/indexes/tezos_events/fetcher.py | 7 +- src/dipdup/indexes/tezos_events/index.py | 1 + .../indexes/tezos_operations/fetcher.py | 14 +- .../indexes/tezos_token_transfers/fetcher.py | 14 +- .../indexes/tezos_token_transfers/index.py | 1 + src/dipdup/indexes/tezos_tzkt.py | 15 +- src/dipdup/install.py | 40 +- src/dipdup/package.py | 18 +- src/dipdup/performance.py | 27 +- src/dipdup/project.py | 2 +- src/dipdup/projects/base/Makefile.j2 | 24 +- .../base/deploy/compose.sqlite.yaml.j2 | 1 - .../base/deploy/compose.swarm.yaml.j2 | 3 +- .../projects/base/deploy/compose.yaml.j2 | 5 +- src/dipdup/projects/base/pyproject.toml.j2 | 18 +- .../handlers/on_update_records.py.j2 | 2 +- .../handlers/on_update_records.py.j2 | 2 +- .../demo_tezos_etherlink/dipdup.yaml.j2 | 8 +- src/dipdup/sql/dipdup_head_status.sql | 9 - src/dipdup/sql/dipdup_status.sql | 39 + src/dipdup/test.py | 3 +- tests/configs/demo_tezos_etherlink.yaml | 8 +- tests/test_demos.py | 9 +- tests/test_index/test_tzkt_operations.py | 2 +- tests/test_rollback.py | 4 +- 310 files changed, 2275 insertions(+), 3073 deletions(-) rename docs/1.getting-started/{9.hooks.md => 10.hooks.md} (93%) rename docs/{6.deployment/1.database.md => 1.getting-started/5.database.md} (50%) rename docs/1.getting-started/{5.models.md => 6.models.md} (100%) rename docs/1.getting-started/{6.datasources.md => 7.datasources.md} (82%) rename docs/1.getting-started/{7.indexes.md => 8.indexes.md} (98%) rename docs/1.getting-started/{8.handlers.md => 9.handlers.md} (100%) create mode 100644 docs/15.glossary.md rename docs/{15.thanks.md => 16.thanks.md} (100%) rename docs/3.datasources/{4.evm_subsquid.md => 1.evm_subsquid.md} (100%) rename docs/3.datasources/{5.http.md => 10.http.md} (100%) rename docs/3.datasources/{3.evm_node.md => 2.evm_node.md} (100%) rename docs/3.datasources/{1.abi_etherscan.md => 3.abi_etherscan.md} (100%) rename docs/3.datasources/{8.starknet_subsquid.md => 4.starknet_subsquid.md} (100%) rename docs/3.datasources/{7.starknet_node.md => 5.starknet_node.md} (100%) rename docs/3.datasources/{9.tezos_tzkt.md => 6.tezos_tzkt.md} (100%) rename docs/3.datasources/{10.tzip_metadata.md => 7.tzip_metadata.md} (87%) rename docs/3.datasources/{2.coinbase.md => 8.coinbase.md} (100%) rename docs/3.datasources/{6.ipfs.md => 9.ipfs.md} (100%) rename docs/{6.deployment/2.docker.md => 5.advanced/1.docker.md} (81%) delete mode 100644 docs/5.advanced/1.reindexing.md rename docs/5.advanced/{4.metadata-interface.md => 11.metadata-interface.md} (96%) create mode 100644 docs/5.advanced/2.environment-variables.md delete mode 100644 docs/5.advanced/2.feature-flags.md create mode 100644 docs/5.advanced/3.monitoring.md delete mode 100644 docs/5.advanced/3.sql.md rename docs/5.advanced/{5.performance.md => 4.performance.md} (100%) rename docs/{6.deployment/6.backups.md => 5.advanced/5.backups.md} (100%) create mode 100644 docs/5.advanced/6.sqd-cloud.md delete mode 100644 docs/6.deployment/3.sentry.md delete mode 100644 docs/6.deployment/4.prometheus.md delete mode 100644 docs/6.deployment/5.logging.md delete mode 100644 docs/6.deployment/7.monitoring.md delete mode 100644 docs/6.deployment/_dir.yml rename docs/{assets => public}/dipdup-flow.png (100%) rename docs/{assets => public}/dipdup-flow.svg (100%) rename docs/{assets => public}/dipdup-new.png (100%) rename docs/{assets => public}/dipdup.png (100%) rename docs/{assets => public}/dipdup.svg (100%) rename docs/{assets => public}/metadata_interface.svg (100%) rename docs/{assets => public}/operation-bcd.png (100%) rename docs/{assets => public}/operation-config.png (100%) rename docs/{assets => public}/troubleshooting-bcd.png (100%) create mode 100644 docs/public/vscode-autocomplete.png create mode 100644 src/demo_tezos_etherlink/handlers/batch.py delete mode 100644 src/dipdup/sql/dipdup_head_status.sql create mode 100644 src/dipdup/sql/dipdup_status.sql diff --git a/CHANGELOG.md b/CHANGELOG.md index 1ed71a29a..511eba6fb 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -4,6 +4,35 @@ All notable changes to this project will be documented in this file. The format is based on [Keep a Changelog], and this project adheres to [Semantic Versioning]. +Releases prior to 7.0 has been removed from this file to declutter search results; see the [archived copy](https://github.com/dipdup-io/dipdup/blob/8.0.0b5/CHANGELOG.md) for the full list. + +## [8.0.0] - 2024-09-10 + +### Added + +- cli: Added `-C` option, a shorthand for `-c . -c configs/dipdup..yaml`. +- database: Added `dipdup_status` view to the schema. + +### Fixed + +- cli: Don't update existing installation in `self install` command unless asked to. +- cli: Fixed env files not being loaded in some commands. +- install: Fixed reinstalling package when `--force` flag is used. +- package: Create package in-place if cwd equals package name. +- performance: Add index name to fetcher and realtime queues. +- subsquid: Fixed missing entry in `dipdup_head` internal table. +- tezos.big_maps: Fixed logging status message in `skip_history` mode. +- tezos.big_maps: Respect order of handlers in `skip_history` mode. + +### Removed + +- config: Removed `advanced.skip_version_check` flag; use `DIPDUP_NO_VERSION_CHECK` environment variable. +- database: Removed `dipdup_head_status` view; use `dipdup_status` view instead. + +### Performance + +- database: Set `synchronous=NORMAL` and `journal_mode=WAL` pragmas for on-disk SQLite databases. + ## [8.0.0b5] - 2024-08-09 ### Added @@ -220,19 +249,6 @@ The format is based on [Keep a Changelog], and this project adheres to [Semantic - performance: Decrease main loop and node polling intervals. - performance: Drop caches when all indexes have reached realtime. -## [6.5.16] - 2024-03-07 - -This is the last release in the 6.5 branch. Please update to 7.x to get the latest features and bug fixes. - -### Fixed - -- tzkt: Don't use deprecated `/events` WebSockets endpoint. - -### Other - -- deps: Updated pytezos to 3.11.3. -- metadata: Added `oxfordnet` to supported networks. - ## [7.4.0] - 2024-02-20 ### Added @@ -300,12 +316,6 @@ This is the last release in the 6.5 branch. Please update to 7.x to get the late - evm.node: Fixed crash on anonymous event logs during the last mile indexing. - evm.node: Raise an exception when no realtime messages have been received in `http.connection_timeout` seconds. -## [6.5.15] - 2023-12-01 - -### Other - -- deps: Updated pytezos to 3.10.3. - ## [7.2.0] - 2023-11-30 ### Added @@ -355,12 +365,6 @@ This is the last release in the 6.5 branch. Please update to 7.x to get the late - cli: Fixed `DIPDUP_DEBUG` not being applied to the package logger. - tezos.tzkt.token_transfers: Fixed filtering transfers by token_id. -## [6.5.14] - 2023-10-20 - -### Fixed - -- token_transfer: Fixed filtering transfers by token_id. - ## [7.0.2] - 2023-10-10 ### Added @@ -372,12 +376,6 @@ This is the last release in the 6.5 branch. Please update to 7.x to get the late - cli: Fixed `schema wipe` command for SQLite databases. - tezos.tzkt: Fixed regression in `get_transactions` method pagination. -## [6.5.13] - 2023-10-10 - -### Fixed - -- tzkt: Fixed regression in `get_transactions` method pagination. - ## [7.0.1] - 2023-09-30 ### Added @@ -406,14 +404,6 @@ This is the last release in the 6.5 branch. Please update to 7.x to get the late - tezos.tzkt.events: Fixed parsing contract event data. - tezos.tzkt.operations: Fixed parsing operations with empty parameters. -## [6.5.12] - 2023-09-15 - -### Fixed - -- tzkt: Fixed issue with processing rollbacks while sync is in progress. -- tzkt: Fixed operation matching when contract code hash specified as a string. -- tzkt: Fixed parsing contract event data. - ## [7.0.0rc5] - 2023-09-06 ### Fixed @@ -422,14 +412,6 @@ This is the last release in the 6.5 branch. Please update to 7.x to get the late - evm.subsquid.events: Sync to `last_level` if specified in config. - evm.node: Set `timestamp` field to the block timestamp. -## [6.5.11] - 2023-09-02 - -### Fixed - -- index: Fixed crash when parsing typed transactions with empty parameter. -- tzkt: Fixed pagination when requesting transactions. -- tzkt: Use cursor iteration where possible. - ## [7.0.0rc4] - 2023-08-23 ### Added @@ -468,12 +450,6 @@ This is the last release in the 6.5 branch. Please update to 7.x to get the late - package: Don't create empty pyproject.toml during init. - package: Fixed discovery of the package when workdir is project root. -## [6.5.10] - 2023-08-02 - -### Fixed - -- index: Remove Python limitation on large int<->str conversions. - ## [7.0.0rc2] - 2023-07-26 ### Fixed @@ -537,1037 +513,13 @@ This is the last release in the 6.5 branch. Please update to 7.x to get the late - tzkt: Request plain values instead of mappings from TzKT when possible. -## [6.5.9] - 2023-07-11 - -### Fixed - -- tzkt: Optimized queries for `operation_unfiltered` index. - -## [6.5.8] - 2023-06-28 - -### Fixed - -- cli: Fixed `init` crash when package name is equal to one of the project typenames. - -## [6.5.7] - 2023-05-30 - -### Added - -- config: Added `advanced.decimal_precision` option to adjust decimal context precision. - -### Fixed - -- database: Fixed `OperationalError` raised in some cases after calling `bulk_create`. -- database: Allow running project scripts and queries on SQLite. -- database: Don't cleanup model updates on every loop. -- http: Mark `asyncio.TimeoutError` exception as safe to retry. - -### Other - -- http: Deserialize JSON responses with `orjson`. - -## [6.5.6] - 2023-05-02 - -### Fixed - -- config: Fixed crash due to incorrect parsing of `event` index definitions. -- http: Fixed waiting for response indefinitely when IPFS hash is not available. - -### Other - -- ci: Slim Docker image updated to Alpine 3.17. -- metadata: Added `nairobinet` to supported networks. - -## [6.5.5] - 2023-04-17 - -### Fixed - -- config: Enable early realtime mode when config contains bigmap indexes with `skip_history`. -- http: Fixed crash when using custom datasources. -- index: Allow mixing `source` and `entrypoint` filters in `operation` index pattern. - -### Other - -- ci: Default git branch switched to `next`. - -## [6.5.4] - 2023-03-31 - -### Fixed - -- config: Fixed incorrest parsing of `token_transfer` index filters. - -### Other - -- deps: Updated pytezos to 3.9.0. - -## [6.5.3] - 2023-03-28 - -### Fixed - -- cli: Don't enforce logging `DeprecationWarning` warnings. -- cli: Fixed `BrokenPipeError` messages when interrupting with DipDup with SIGINT. -- config: Fixed crash when `token_transfer` index has `from` or `to` filter. - -### Security - -- hasura: Forbid using Hasura instances affected by [GHSA-c9rw-rw2f-mj4x](https://github.com/hasura/graphql-engine/security/advisories/GHSA-c9rw-rw2f-mj4x). - -## [6.5.2] - 2023-03-09 - -### Fixed - -- codegen: Fixed type generation for contracts with "default" entrypoint. -- metadata: Add "mumbainet" to available networks. -- sentry: Fixed bug leading to crash reports not being sent in some cases. -- sentry: Fixed crash report grouping. - -### Deprecated - -- ci: `-slim` images will be based on Ubuntu instead of Alpine in the next major release. - -## [6.5.1] - 2023-02-21 - -### Fixed - -- codegen: Fixed bug leading to incorrect imports in generated callbacks in some cases. -- codegen: Fixed validation of created package after `dipdup init`. -- config: Allow using empty string as default env (`{DEFAULT_EMPTY:-}`). - -### Other - -- deps: Updated pydantic to 1.10.5 -- deps: Updated datamodel-code-generator to 0.17.1 -- deps: Updated tortoise-orm to 0.19.3 -- deps: Updated pytezos to 3.8.0 - -## [6.5.0] - 2023-01-28 - -### Added - -- hasura: Apply arbitrary metadata from `hasura` project directory. -- config: Added `allow_inconsistent_metadata` option to `hasura` section. - -### Fixed - -- config: Do not include coinbase datasource credentials in config repr. -- database: Fixed crash when schema generation should fail with `schema_modified`. -- hasura: Stop using deprecated schema/metadata API. -- index: Fixed unnecessary prefetching of migration originations in `operation` index. -- index: Remove disabled indexes from the dispatcher queue. -- sentry: Flush and reopen session daily. -- tzkt: Fixed `OperationData.type` field value for migration originations. -- tzkt: Added missing `last_level` argument to migration origination fetching methods. - -### Other - -- tzkt: Updated current testnet protocol (`limanet`). -- deps: Updated asyncpg to 0.27.0 -- deps: Updated hasura to 2.17.0 - -## [6.4.3] - 2023-01-05 - -### Fixed - -- context: Fixed order of `add_contract` method arguments. -- index: Fixed matching operations when both `address` and `code_hash` filters are specified. -- sentry: Fixed sending crash reports when DSN is not set implicitly. -- sentry: Increase event length limit. - -## [6.4.2] - 2022-12-31 - -### Added - -- config: Added `http.ratelimit_sleep` option to set fixed sleep time on 429 responses. -- context: Allow adding contracts by code hash in runtime. - -### Fixed - -- http: Fixed merging user-defined HTTP settings and datasource defaults. -- tzkt: Fixed iterating over big map keys. - -## [6.4.1] - 2022-12-22 - -### Fixed - -- models: Fixed package model detection. - -## [6.4.0] - 2022-12-20 - -### Fixed - -- cli: `update` and `uninstall` commands no longer require a valid config. -- cli: Fixed a regression in `new` command leading to crash with `TypeError`. -- config: Fixed `jobs` section deserialization. -- database: Ignore abstract models during module validation. - -## [6.4.0rc1] - 2022-12-09 - -### Added - -- config: Added optional `code_hash` field to contract config. -- context: Added `first_level` and `last_level` arguments to `ctx.add_index` methods. -- index: Filtering by `code_hash` is available for `operation` index. -- tzkt: Added datasource methods `get_contract_address` and `get_contract_hashes`. -- tzkt: Originations and operations now can be fetched by contract code hashes. -- tzkt: Added `sender_code_hash` and `target_code_hash` fields to `OperationData` model. - -### Fixed - -- codegen: Unresolved index templates are now correctly processed during types generation. -- demos: Fixed outdated `demo_dao` project. -- http: Fixed a crash when datasource URL contains trailing slash. -- metadata: Add `limanet` to supported networks. -- projects: Do not scaffold an outdated `poetry.lock`. - -### Changed - -- demos: Demos were renamed to better indicate their purpose. -- exceptions: `FrameworkException` is raised instead of plain `RuntimeError` when a framework error occurs. -- exceptions: Known exceptions are inherited from `FrameworkError`. -- tzkt: Some datasource methods have changed their signatures. - -### Deprecated - -- config: `similar_to.address` filter is an alias for `originated_contract.code_hash` and will be removed in the next major release. -- config: `DipDupError` is an alias for `FrameworkError` and will be removed in the next major release. - -## [6.3.1] - 2022-11-25 - -### Fixed - -- cli: Do not apply cli hacks on module import. -- codegen: Include PEP 561 marker in generated packages. -- codegen: Untyped originations are now correctly handled. -- codegen: Fixed `alias` config field having no effect on originations. -- codegen: Fixed optional arguments in generated callbacks. -- config: Suggest snake_case for package name. -- config: Fixed crash with `RuntimeError` when index has no subscriptions. -- http: Limit aiohttp sessions to specific base URL. -- index: Do not deserialize originations matched by the `source` filter. -- index: Wrap storage deserialization exceptions with `InvalidDataError`. -- projects: Fixed Hasura environment in docker-compose examples. - -### Security - -- hasura: Forbid using Hasura instances running vulnerable versions ([GHSA-g7mj-g7f4-hgrg](https://github.com/hasura/graphql-engine/security/advisories/GHSA-g7mj-g7f4-hgrg)) - -### Other - -- ci: `mypy --strict` is now enforced on a codebase. -- ci: Finished migration to `pytest`. - -## [6.3.0] - 2022-11-15 - -### Added - -- context: Added `execute_sql_query` method to run queries from `sql` project directory. -- context: `execute_sql` method now accepts arbitrary arguments to format SQL script (unsafe, use with caution). -- index: New filters for `token_transfer` index. - -### Fixed - -- cli: Fixed missing log messages from `ctx.logger`. -- codegen: Better PEP 8 compatibility of generated callbacks. -- context: Fixed SQL scripts executed in the wrong order. -- context: Fixed `execute_sql` method crashes when the path is not a directory. -- database: Fixed crash with `CannotConnectNowError` before establishing the database connection. -- database: Fixed crash when using F expressions inside versioned transactions. -- http: Fixed caching datasource responses when `replay_path` contains tilde. -- http: Adjusted per-datasource default config values. -- project: Use the latest stable version instead of hardcoded values. -- tzkt: Fixed deserializing of `EventData` and `OperationData` models. -- tzkt: Fixed matching migration originations by address. - -### Deprecated - -- ci: `pytezos` extra and corresponding Docker image are deprecated. - -## [6.2.0] - 2022-10-12 - -### Added - -- cli: `new` command to create a new project interactively. -- cli: `install/update/uninstall` commands to manage local DipDup installation. -- index: New index kind `event` to process contract events. -- install: New interactive installer based on pipx (`install.py` or `dipdup-install`). - -### Fixed - -- cli: Fixed commands that don't require a valid config yet crash with `ConfigurationError`. -- codegen: Fail on demand when `datamodel-codegen` is not available. -- codegen: Fixed Jinja2 template caching. -- config: Allow `sentry.dsn` field to be empty. -- config: Fixed greedy environment variable regex. -- hooks: Raise a `FeatureAvailabilityHook` instead of a warning when trying to execute hooks on SQLite. - -### Improved - -- cli: Detect `src/` layout when guessing package path. -- codegen: Improved cross-platform compatibility. -- config: `sentry.user_id` option to set user ID for Sentry (affects release adoption data). -- sentry: Detect environment when not set in config (docker/gha/tests/local) -- sentry: Expose more tags under the `dipdup` namespace. - -### Performance - -- cli: Up to 5x faster startup for some commands. - -### Security - -- sentry: Prevent Sentry from leaking hostname if `server_name` is not set. -- sentry: Notify about using Sentry when DSN is set or crash reporting is enabled. - -### Other - -- ci: A significantly faster execution of GitHub Actions. -- docs: Updated "Contributing Guide" page. - -## [6.1.3] - 2022-09-21 - -### Added - -- sentry: Enable crash-free session reporting. - -### Fixed - -- metadata: Updated protocol aliases. -- sentry: Unwrap `CallbackError` traceback to fix event grouping. -- sentry: Hide "attempting to send..." message on shutdown. - -### Other - -- ci: Do not build default and `-pytezos` nightly images. - -## [6.1.2] - 2022-09-16 - -### Added - -- config: Added `alias` field to operation pattern items. -- tzkt: Added quote field `gbp`. - -### Fixed - -- config: Require aliases for multiple operations with the same entrypoint. -- http: Raise `InvalidRequestError` on 204 No Content responses. -- tzkt: Verify API version on datasource initialization. -- tzkt: Remove deprecated block field `priority`. - -## [6.1.1] - 2022-09-01 - -### Fixed - -- ci: Lock Pydantic to 1.9.2 to avoid breaking changes in dataclasses. - -## [6.1.0] - 2022-08-30 - -### Added - -- ci: Build `arm64` images for M1/M2 silicon. -- ci: Build `-slim` images based on Alpine Linux. -- ci: Introduced official MacOS support. -- ci: Introduced interactive installer (dipdup.io/install.py). - -## [6.0.1] - 2022-08-19 - -### Fixed - -- codegen: Fixed invalid `models.py` template. -- context: Do not wrap known exceptions with `CallbackError`. -- database: Raise `DatabaseConfigurationError` when backward relation name equals table name. -- database: Wrap schema wiping in a transaction to avoid orphaned tables in the immune schema. -- hasura: Fixed processing M2M relations. -- sentry: Fixed "invalid value `environment`" error. -- sentry: Ignore events from project callbacks when `crash_reporting` is enabled. - -## [6.0.0] - 2022-08-08 - -This release contains no changes except for the version number. - -## [6.0.0rc2] - 2022-08-06 - -### Added - -- config: Added `advanced.crash_reporting` flag to enable reporting crashes to Baking Bad. -- dipdup: Save Sentry crashdump in `/tmp/dipdup/crashdumps/XXXXXXX.json` on a crash. - -### Fixed - -- config: Do not perform env variable substitution in commented-out lines. - -### Removed - -- cli: `--logging-config` option is removed. -- cli: All `run` command flags are removed. Use the `advanced` section of the config. -- cli: `cache show` and `cache clear` commands are removed. -- config: `http.cache` flag is removed. - -## [6.0.0-rc1] - 2022-07-26 - -### Added - -- cli: Added `config export --full` flag to resolve templates before printing config. -- config: Added `advanced.rollback_depth` field, a number of levels to keep in a database for rollback. -- context: Added `rollback` method to perform database rollback. -- database: Added an internal `ModelUpdate` model to store the latest database changes. - -### Fixed - -- prometheus: Fixed updating `dipdup_index_handlers_matched_total` metric. - -### Changed - -- codegen: `on_index_rollback` hook calls `ctx.rollback` by default. -- database: Project models must be subclassed from `dipdup.models.Model` -- database: `bulk_create` and `bulk_update` model methods are no longer supported. - -### Removed - -- hooks: Removed deprecated `on_rollback` hook. -- index: Do not try to avoid single-level rollbacks by comparing operation hashes. - -## [5.2.5] - 2022-07-26 - -### Fixed - -- index: Fixed crash when adding an index with new subscriptions in runtime. - -## [5.2.4] - 2022-07-17 - -### Fixed - -- cli: Fixed logs being printed to stderr instead of stdout. -- config: Fixed job scheduler not starting when config contains no indexes. - -## [5.2.3] - 2022-07-07 - -### Added - -- sentry: Allow customizing `server_name` and `release` tags with corresponding fields in Sentry config. - -### Fixed - -- cli: Fixed `hasura configure` command crash when models have empty `Meta.table`. -- config: Removed secrets from config `__repr__`. - -## [5.2.2] - 2022-07-03 - -### Fixed - -- hasura: Fixed metadata generation. - -## [5.2.1] - 2022-07-02 - -### Fixed - -- cli: Fixed setting default logging level. -- hasura: Fixed metadata generation for relations with a custom field name. -- hasura: Fixed configuring existing instances after changing `camel_case` field in config. - -## [5.2.0] - 2022-06-28 - -### Added - -- config: Added `logging` config field. -- config: Added `hasura.create_source` flag to create PostgreSQL source if missing. - -### Fixed - -- hasura: Do not apply table customizations to tables from other sources. - -### Deprecated - -- cli: `--logging-config` option is deprecated. -- cli: All `run` command flags are deprecated. Use the `advanced` section of the config. -- cli: `cache show` and `cache clear` commands are deprecated. -- config: `http.cache` flag is deprecated. - -## [5.1.7] - 2022-06-15 - -### Fixed - -- index: Fixed `token_transfer` index not receiving realtime updates. - -## [5.1.6] - 2022-06-08 - -### Fixed - -- cli: Commands with `--help` option no longer require a working DipDup config. -- index: Fixed crash with `RuntimeError` after continuous realtime connection loss. - -### Performance - -- cli: Lazy import dependencies to speed up startup. - -### Other - -- docs: Migrate docs from GitBook to mdbook. - -## [5.1.5] - 2022-06-05 - -### Fixed - -- config: Fixed crash when rollback hook is about to be called. - -## [5.1.4] - 2022-06-02 - -### Fixed - -- config: Fixed `OperationIndexConfig.types` field being partially ignored. -- index: Allow mixing oneshot and regular indexes in a single config. -- index: Call rollback hook instead of triggering reindex when single-level rollback has failed. -- index: Fixed crash with `RuntimeError` after continuous realtime connection loss. -- tzkt: Fixed `origination` subscription missing when `merge_subscriptions` flag is set. - -### Performance - -- ci: Decrease the size of generic and `-pytezos` Docker images by 11% and 16%, respectively. - -## [5.1.3] - 2022-05-26 - -### Fixed - -- database: Fixed special characters in password not being URL encoded. - -### Performance - -- context: Do not reinitialize config when adding a single index. - -## [5.1.2] - 2022-05-24 - -### Added - -- tzkt: Added `originated_contract_tzips` field to `OperationData`. - -### Fixed - -- jobs: Fixed jobs with `daemon` schedule never start. -- jobs: Fixed failed jobs not throwing exceptions into the main loop. - -### Other - -- database: Tortoise ORM updated to `0.19.1`. - -## [5.1.1] - 2022-05-13 - -### Fixed - -- index: Ignore indexes with different message types on rollback. -- metadata: Add `ithacanet` to available networks. - -## [5.1.0] - 2022-05-12 - -### Added - -- ci: Push `X` and `X.Y` tags to the Docker Hub on release. -- cli: Added `config env` command to export env-file with default values. -- cli: Show warning when running an outdated version of DipDup. -- hooks: Added a new hook `on_index_rollback` to perform per-index rollbacks. - -### Fixed - -- index: Fixed fetching `migration` operations. -- tzkt: Fixed possible data corruption when using the `buffer_size` option. -- tzkt: Fixed reconnection due to `websockets` message size limit. - -### Deprecated - -- hooks: The `on_rollback` default hook is superseded by `on_index_rollback` and will be removed later. - -## [5.0.4] - 2022-05-05 - -### Fixed - -- exceptions: Fixed incorrect formatting and broken links in help messages. -- index: Fixed crash when the only index in config is `head`. -- index: Fixed fetching originations during the initial sync. - -## [5.0.3] - 2022-05-04 - -### Fixed - -- index: Fixed crash when no block with the same level arrived after a single-level rollback. -- index: Fixed setting initial index level when `IndexConfig.first_level` is set. -- tzkt: Fixed delayed emitting of buffered realtime messages. -- tzkt: Fixed inconsistent behavior of `first_level`/`last_level` arguments in different getter methods. - -## [5.0.2] - 2022-04-21 - -### Fixed - -- context: Fixed reporting incorrect reindexing reason. -- exceptions: Fixed crash with `FrozenInstanceError` when an exception is raised from a callback. -- jobs: Fixed graceful shutdown of daemon jobs. - -### Improved - -- codegen: Refined `on_rollback` hook template. -- exceptions: Updated help messages for known exceptions. -- tzkt: Do not request reindexing if missing subgroups have matched no handlers. - -## [5.0.1] - 2022-04-12 - -### Fixed - -- cli: Fixed `schema init` command crash with SQLite databases. -- index: Fixed spawning datasources in oneshot mode. -- tzkt: Fixed processing realtime messages. - -## [5.0.0] - 2022-04-08 - -This release contains no changes except for the version number. - -## [5.0.0-rc4] - 2022-04-04 - -### Added - -- tzkt: Added ability to process realtime messages with lag. - -## [4.2.7] - 2022-04-02 - -### Fixed - -- config: Fixed `jobs` config section validation. -- hasura: Fixed metadata generation for v2.3.0 and above. -- tzkt: Fixed `get_originated_contracts` and `get_similar_contracts` methods response. - -## [5.0.0-rc3] - 2022-03-28 - -### Added - -- config: Added `custom` section to store arbitrary user data. - -### Fixed - -- config: Fixed default SQLite path (`:memory:`). -- tzkt: Fixed pagination in several getter methods. -- tzkt: Fixed data loss when `skip_history` option is enabled. - -### Removed - -- config: Removed dummy `advanced.oneshot` flag. -- cli: Removed `docker init` command. -- cli: Removed dummy `schema approve --hashes` flag. - -## [5.0.0-rc2] - 2022-03-13 - -### Fixed - -- tzkt: Fixed crash in methods that do not support cursor pagination. -- prometheus: Fixed invalid metric labels. - -## [5.0.0-rc1] - 2022-03-02 - -### Added - -- metadata: Added `metadata_interface` feature flag to expose metadata in TzKT format. -- prometheus: Added ability to expose Prometheus metrics. -- tzkt: Added missing fields to the `HeadBlockData` model. -- tzkt: Added `iter_...` methods to iterate over item batches. - -### Fixed - -- tzkt: Fixed possible OOM while calling methods that support pagination. -- tzkt: Fixed possible data loss in `get_originations` and `get_quotes` methods. - -### Changed - -- tzkt: Added `offset` and `limit` arguments to all methods that support pagination. - -### Removed - -- bcd: Removed `bcd` datasource and config section. - -### Performance - -- dipdup: Use fast `orjson` library instead of built-in `json` where possible. - -## [4.2.6] - 2022-02-25 - -### Fixed - -- database: Fixed generating table names from uppercase model names. -- http: Fixed bug that leads to caching invalid responses on the disk. -- tzkt: Fixed processing realtime messages with data from multiple levels. - -## [4.2.5] - 2022-02-21 - -### Fixed - -- database: Do not add the `schema` argument to the PostgreSQL connection string when not needed. -- hasura: Wait for Hasura to be configured before starting indexing. - -## [4.2.4] - 2022-02-14 - -### Added - -- config: Added `http` datasource to making arbitrary http requests. - -### Fixed - -- context: Fixed crash when calling `fire_hook` method. -- context: Fixed `HookConfig.atomic` flag, which was ignored in `fire_hook` method. -- database: Create missing tables even if `Schema` model is present. -- database: Fixed excess increasing of `decimal` context precision. -- index: Fixed loading handler callbacks from nested packages ([@veqtor](https://github.com/veqtor)). - -### Other - -- ci: Added GitHub Action to build and publish Docker images for each PR opened. - -## [4.2.3] - 2022-02-08 - -### Fixed - -- ci: Removed `black 21.12b0` dependency since bug in `datamodel-codegen-generator` is fixed. -- cli: Fixed `config export` command crash when `advanced.reindex` dictionary is present. -- cli: Removed optionals from `config export` output so the result can be loaded again. -- config: Verify `advanced.scheduler` config for the correctness and unsupported features. -- context: Fixed ignored `wait` argument of `fire_hook` method. -- hasura: Fixed processing relation fields with missing `related_name`. -- jobs: Fixed default `apscheduler` config. -- tzkt: Fixed crash occurring when reorg message is the first one received by the datasource. - -## [4.2.2] - 2022-02-01 - -### Fixed - -- config: Fixed `ipfs` datasource config. - -## [4.2.1] - 2022-01-31 - -### Fixed - -- ci: Added `black 21.12b0` dependency to avoid possible conflict with `datamodel-codegen-generator`. - -## [4.2.0] - 2022-01-31 - -### Added - -- context: Added `wait` argument to `fire_hook` method to escape current transaction context. -- context: Added `ctx.get__datasource` helpers to avoid type casting. -- hooks: Added ability to configure `apscheduler` with `AdvancedConfig.scheduler` field. -- http: Added `request` method to send arbitrary requests (affects all datasources). -- ipfs: Added `ipfs` datasource to download JSON and binary data from IPFS. - -### Fixed - -- http: Removed dangerous method `close_session`. -- context: Fixed help message of `IndexAlreadyExistsError` exception. - -### Deprecated - -- bcd: Added deprecation notice. - -### Other - -- dipdup: Removed unused internal methods. - -## [4.1.2] - 2022-01-27 - -### Added - -- cli: Added `schema wipe --force` argument to skip confirmation prompt. - -### Fixed - -- cli: Show warning about deprecated `--hashes` argument -- cli: Ignore `SIGINT` signal when shutdown is in progress. -- sentry: Ignore exceptions when shutdown is in progress. - -## [4.1.1] - 2022-01-25 - -### Fixed - -- cli: Fixed stacktraces missing on exception. -- cli: Fixed wrapping `OSError` with `ConfigurationError` during config loading. -- hasura: Fixed printing help messages on `HasuraError`. -- hasura: Preserve a list of sources in Hasura Cloud environments. -- hasura: Fixed `HasuraConfig.source` config option. - -### Changed - -- cli: Unknown exceptions are no longer wrapped with `DipDupError`. - -### Performance - -- hasura: Removed some useless requests. - -## [4.1.0] - 2022-01-24 - -### Added - -- cli: Added `schema init` command to initialize database schema. -- cli: Added `--force` flag to `hasura configure` command. -- codegen: Added support for subpackages inside callback directories. -- hasura: Added `dipdup_head_status` view and REST endpoint. -- index: Added an ability to skip historical data while synchronizing `big_map` indexes. -- metadata: Added `metadata` datasource. -- tzkt: Added `get_big_map` and `get_contract_big_maps` datasource methods. - -## [4.0.5] - 2022-01-20 - -### Fixed - -- index: Fixed deserializing manually modified typeclasses. - -## [4.0.4] - 2022-01-17 - -### Added - -- cli: Added `--keep-schemas` flag to `init` command to preserve JSONSchemas along with generated types. - -### Fixed - -- demos: Tezos Domains and Homebase DAO demos were updated from edo2net to mainnet contracts. -- hasura: Fixed missing relations for models with `ManyToManyField` fields. -- tzkt: Fixed parsing storage with nested structures. - -### Performance - -- dipdup: Minor overall performance improvements. - -### Other - -- ci: Cache virtual environment in GitHub Actions. -- ci: Detect CI environment and skip tests that fail in GitHub Actions. -- ci: Execute tests in parallel with `pytest-xdist` when possible. -- ci: More strict linting rules of `flake8`. - -## [4.0.3] - 2022-01-09 - -### Fixed - -- tzkt: Fixed parsing parameter with an optional value. - -## [4.0.2] - 2022-01-06 - -### Added - -- tzkt: Added optional `delegate_address` and `delegate_alias` fields to `OperationData`. - -### Fixed - -- tzkt: Fixed crash due to unprocessed pysignalr exception. -- tzkt: Fixed parsing `OperationData.amount` field. -- tzkt: Fixed parsing storage with top-level boolean fields. - -## [4.0.1] - 2021-12-30 - -### Fixed - -- codegen: Fixed generating storage typeclasses with `Union` fields. -- codegen: Fixed preprocessing contract JSONSchema. -- index: Fixed processing reindexing reason saved in the database. -- tzkt: Fixed processing operations with default entrypoint and empty parameter. -- tzkt: Fixed crash while recursively applying bigmap diffs to the storage. - -### Performance - -- tzkt: Increased speed of applying bigmap diffs to operation storage. - -## [4.0.0] - 2021-12-24 - -This release contains no changes except for the version number. - -## [4.0.0-rc3] - 2021-12-20 - -### Fixed - -- cli: Fixed missing `schema approve --hashes` argument. -- codegen: Fixed contract address used instead of an alias when typename is not set. -- tzkt: Fixed processing operations with entrypoint `default`. -- tzkt: Fixed regression in processing migration originations. -- tzkt: Fixed filtering of big map diffs by the path. - -### Removed - -- cli: Removed deprecated `run --oneshot` argument and `clear-cache` command. - -## [4.0.0-rc2] - 2021-12-11 - -### Migration - -- Run `dipdup init` command to generate `on_synchronized` hook stubs. - -### Added - -- hooks: Added `on_synchronized` hook, which fires each time all indexes reach realtime state. - -### Fixed - -- cli: Fixed config not being verified when invoking some commands. -- codegen: Fixed generating callback arguments for untyped operations. -- index: Fixed incorrect log messages, remove duplicate ones. -- index: Fixed crash while processing storage of some contracts. -- index: Fixed matching of untyped operations filtered by `source` field ([@pravin-d](https://github.com/pravin-d)). - -### Performance - -- index: Checks performed on each iteration of the main DipDup loop are slightly faster now. - -## [4.0.0-rc1] - 2021-12-02 - -### Migration - -- Run `dipdup schema approve` command on every database you want to use with 4.0.0-rc1. Running `dipdup migrate` is not necessary since `spec_version` hasn't changed in this release. - -### Added - -- cli: Added `run --early-realtime` flag to establish a realtime connection before all indexes are synchronized. -- cli: Added `run --merge-subscriptions` flag to subscribe to all operations/big map diffs during realtime indexing. -- cli: Added `status` command to print the current status of indexes from the database. -- cli: Added `config export [--unsafe]` command to print config after resolving all links and variables. -- cli: Added `cache show` command to get information about file caches used by DipDup. -- config: Added `first_level` and `last_level` optional fields to `TemplateIndexConfig`. These limits are applied after ones from the template itself. -- config: Added `daemon` boolean field to `JobConfig` to run a single callback indefinitely. Conflicts with `crontab` and `interval` fields. -- config: Added `advanced` top-level section. - -### Fixed - -- cli: Fixed crashes and output inconsistency when piping DipDup commands. -- cli: Fixed `schema wipe --immune` flag being ignored. -- codegen: Fixed missing imports in handlers generated during init. -- coinbase: Fixed possible data inconsistency caused by caching enabled for method `get_candles`. -- http: Fixed increasing sleep time between failed request attempts. -- index: Fixed invocation of head index callback. -- index: Fixed `CallbackError` raised instead of `ReindexingRequiredError` in some cases. -- tzkt: Fixed resubscribing when realtime connectivity is lost for a long time. -- tzkt: Fixed sending useless subscription requests when adding indexes in runtime. -- tzkt: Fixed `get_originated_contracts` and `get_similar_contracts` methods whose output was limited to `HTTPConfig.batch_size` field. -- tzkt: Fixed lots of SignalR bugs by replacing `aiosignalrcore` library with `pysignalr`. - -### Changed - -- cli: `dipdup schema wipe` command now requires confirmation when invoked in the interactive shell. -- cli: `dipdup schema approve` command now also causes a recalculation of schema and index config hashes. -- index: DipDup will recalculate respective hashes if reindexing is triggered with `config_modified: ignore` or `schema_modified: ignore` in advanced config. - -### Deprecated - -- cli: `run --oneshot` option is deprecated and will be removed in the next major release. The oneshot mode applies automatically when `last_level` field is set in the index config. -- cli: `clear-cache` command is deprecated and will be removed in the next major release. Use `cache clear` command instead. - -### Performance - -- config: Configuration files are loaded 10x times faster. -- index: Number of operations processed by matcher reduced by 40%-95% depending on the number of addresses and entrypoints used. -- tzkt: Rate limit was increased. Try to set `connection_timeout` to a higher value if requests fail with `ConnectionTimeout` exception. -- tzkt: Improved performance of response deserialization. - -## [3.1.3] - 2021-11-15 - -### Fixed - -- codegen: Fixed missing imports in operation handlers. -- codegen: Fixed invalid imports and arguments in big_map handlers. - -## [3.1.2] - 2021-11-02 - -### Fixed - -- Fixed crash occurred during synchronization of big map indexes. - -## [3.1.1] - 2021-10-18 - -### Fixed - -- Fixed loss of realtime subscriptions occurred after TzKT API outage. -- Fixed updating schema hash in `schema approve` command. -- Fixed possible crash occurred while Hasura is not ready. - -## [3.1.0] - 2021-10-12 - -### Added - -- New index class `HeadIndex` (configuration: [`dipdup.config.HeadIndexConfig`](https://github.com/dipdup-io/dipdup/blob/master/src/dipdup/config.py#L778)). Use this index type to handle head (limited block header content) updates. This index type is realtime-only: historical data won't be indexed during the synchronization stage. -- Added three new commands: `schema approve`, `schema wipe`, and `schema export`. Run `dipdup schema --help` command for details. - -### Changed - -- Triggering reindexing won't lead to dropping the database automatically anymore. `ReindexingRequiredError` is raised instead. `--forbid-reindexing` option has become default. -- `--reindex` option is removed. Use `dipdup schema wipe` instead. -- Values of `dipdup_schema.reindex` field updated to simplify querying database. See [`dipdup.enums.ReindexingReason`](https://github.com/dipdup-io/dipdup/blob/master/src/dipdup/enums.py) class for possible values. - -### Fixed - -- Fixed `ReindexRequiredError` not being raised when running DipDup after reindexing was triggered. -- Fixed index config hash calculation. Hashes of existing indexes in a database will be updated during the first run. -- Fixed issue in `BigMapIndex` causing the partial loss of big map diffs. -- Fixed printing help for CLI commands. -- Fixed merging storage which contains specific nested structures. - -### Improved - -- Raise `DatabaseConfigurationError` exception when project models are not compatible with GraphQL. -- Another bunch of performance optimizations. Reduced DB pressure, speeded up parallel processing lots of indexes. -- Added initial set of performance benchmarks (run: `./scripts/run_benchmarks.sh`) - -## [3.0.4] - 2021-10-04 - -### Improved - -- A significant increase in indexing speed. - -### Fixed - -- Fixed unexpected reindexing caused by the bug in processing zero- and single-level rollbacks. -- Removed unnecessary file IO calls that could cause `PermissionError` exception in Docker environments. -- Fixed possible violation of block-level atomicity during realtime indexing. - -### Changes - -- Public methods of `TzktDatasource` now return immutable sequences. - -## [3.0.3] - 2021-10-01 - -### Fixed - -- Fixed processing of single-level rollbacks emitted before rolled back head. - -## [3.0.2] - 2021-09-30 - -### Added - -- Human-readable `CHANGELOG.md` 🕺 -- Two new options added to `dipdup run` command: - - `--forbid-reindexing` – raise `ReindexingRequiredError` instead of truncating database when reindexing is triggered for any reason. To continue indexing with existing database run `UPDATE dipdup_schema SET reindex = NULL;` - - `--postpone-jobs` – job scheduler won't start until all indexes are synchronized. - -### Changed - -- Migration to this version requires reindexing. -- `dipdup_index.head_id` foreign key removed. `dipdup_head` table still contains the latest blocks from Websocket received by each datasource. - -### Fixed - -- Removed unnecessary calls to TzKT API. -- Fixed removal of PostgreSQL extensions (`timescaledb`, `pgcrypto`) by function `truncate_database` triggered on reindex. -- Fixed creation of missing project package on `init`. -- Fixed invalid handler callbacks generated on `init`. -- Fixed detection of existing types in the project. -- Fixed race condition caused by event emitter concurrency. -- Capture unknown exceptions with Sentry before wrapping to `DipDupError`. -- Fixed job scheduler start delay. -- Fixed processing of reorg messages. - -## [3.0.1] - 2021-09-24 - -### Added - -- Added `get_quote` and `get_quotes` methods to `TzKTDatasource`. - -### Fixed - -- Defer spawning index datasources until initial sync is complete. It helps to mitigate some WebSocket-related crashes, but initial sync is a bit slower now. -- Fixed possible race conditions in `TzKTDatasource`. -- Start `jobs` scheduler after all indexes sync with a current head to speed up indexing. - [keep a changelog]: https://keepachangelog.com/en/1.0.0/ [semantic versioning]: https://semver.org/spec/v2.0.0.html -[Unreleased]: https://github.com/dipdup-io/dipdup/compare/8.0.0b5...HEAD +[Unreleased]: https://github.com/dipdup-io/dipdup/compare/8.0.0...HEAD +[8.0.0]: https://github.com/dipdup-io/dipdup/compare/8.0.0b5...8.0.0 [8.0.0b5]: https://github.com/dipdup-io/dipdup/compare/8.0.0b4...8.0.0b5 [8.0.0b4]: https://github.com/dipdup-io/dipdup/compare/8.0.0b3...8.0.0b4 [8.0.0b3]: https://github.com/dipdup-io/dipdup/compare/8.0.0b2...8.0.0b3 @@ -1583,105 +535,20 @@ This release contains no changes except for the version number. [7.5.2]: https://github.com/dipdup-io/dipdup/compare/7.5.1...7.5.2 [7.5.1]: https://github.com/dipdup-io/dipdup/compare/7.5.0...7.5.1 [7.5.0]: https://github.com/dipdup-io/dipdup/compare/7.4.0...7.5.0 -[6.5.16]: https://github.com/dipdup-io/dipdup/compare/6.5.15...6.5.16 [7.4.0]: https://github.com/dipdup-io/dipdup/compare/7.3.2...7.4.0 [7.3.2]: https://github.com/dipdup-io/dipdup/compare/7.3.1...7.3.2 [7.3.1]: https://github.com/dipdup-io/dipdup/compare/7.3.0...7.3.1 [7.3.0]: https://github.com/dipdup-io/dipdup/compare/7.2.2...7.3.0 [7.2.2]: https://github.com/dipdup-io/dipdup/compare/7.2.1...7.2.2 [7.2.1]: https://github.com/dipdup-io/dipdup/compare/7.2.0...7.2.1 -[6.5.15]: https://github.com/dipdup-io/dipdup/compare/6.5.14...6.5.15 [7.2.0]: https://github.com/dipdup-io/dipdup/compare/7.1.1...7.2.0 [7.1.1]: https://github.com/dipdup-io/dipdup/compare/7.1.0...7.1.1 [7.1.0]: https://github.com/dipdup-io/dipdup/compare/7.0.2...7.1.0 -[6.5.14]: https://github.com/dipdup-io/dipdup/compare/6.5.13...6.5.14 [7.0.2]: https://github.com/dipdup-io/dipdup/compare/7.0.1...7.0.2 -[6.5.13]: https://github.com/dipdup-io/dipdup/compare/6.5.12...6.5.13 [7.0.1]: https://github.com/dipdup-io/dipdup/compare/7.0.0...7.0.1 [7.0.0]: https://github.com/dipdup-io/dipdup/compare/7.0.0rc5...7.0.0 -[6.5.12]: https://github.com/dipdup-io/dipdup/compare/6.5.11...6.5.12 [7.0.0rc5]: https://github.com/dipdup-io/dipdup/compare/7.0.0rc4...7.0.0rc5 -[6.5.11]: https://github.com/dipdup-io/dipdup/compare/6.5.10...6.5.11 [7.0.0rc4]: https://github.com/dipdup-io/dipdup/compare/7.0.0rc3...7.0.0rc4 [7.0.0rc3]: https://github.com/dipdup-io/dipdup/compare/7.0.0rc2...7.0.0rc3 -[6.5.10]: https://github.com/dipdup-io/dipdup/compare/6.5.9...6.5.10 [7.0.0rc2]: https://github.com/dipdup-io/dipdup/compare/7.0.0rc1...7.0.0rc2 [7.0.0rc1]: https://github.com/dipdup-io/dipdup/compare/6.5.9...7.0.0rc1 -[6.5.9]: https://github.com/dipdup-io/dipdup/compare/6.5.8...6.5.9 -[6.5.8]: https://github.com/dipdup-io/dipdup/compare/6.5.7...6.5.8 -[6.5.7]: https://github.com/dipdup-io/dipdup/compare/6.5.6...6.5.7 -[6.5.6]: https://github.com/dipdup-io/dipdup/compare/6.5.5...6.5.6 -[6.5.5]: https://github.com/dipdup-io/dipdup/compare/6.5.4...6.5.5 -[6.5.4]: https://github.com/dipdup-io/dipdup/compare/6.5.3...6.5.4 -[6.5.3]: https://github.com/dipdup-io/dipdup/compare/6.5.2...6.5.3 -[6.5.2]: https://github.com/dipdup-io/dipdup/compare/6.5.1...6.5.2 -[6.5.1]: https://github.com/dipdup-io/dipdup/compare/6.5.0...6.5.1 -[6.5.0]: https://github.com/dipdup-io/dipdup/compare/6.4.3...6.5.0 -[6.4.3]: https://github.com/dipdup-io/dipdup/compare/6.4.2...6.4.3 -[6.4.2]: https://github.com/dipdup-io/dipdup/compare/6.4.1...6.4.2 -[6.4.1]: https://github.com/dipdup-io/dipdup/compare/6.4.0...6.4.1 -[6.4.0]: https://github.com/dipdup-io/dipdup/compare/6.4.0rc1...6.4.0 -[6.4.0rc1]: https://github.com/dipdup-io/dipdup/compare/6.3.1...6.4.0rc1 -[6.3.1]: https://github.com/dipdup-io/dipdup/compare/6.3.0...6.3.1 -[6.3.0]: https://github.com/dipdup-io/dipdup/compare/6.2.0...6.3.0 -[6.2.0]: https://github.com/dipdup-io/dipdup/compare/6.1.3...6.2.0 -[6.1.3]: https://github.com/dipdup-io/dipdup/compare/6.1.2...6.1.3 -[6.1.2]: https://github.com/dipdup-io/dipdup/compare/6.1.1...6.1.2 -[6.1.1]: https://github.com/dipdup-io/dipdup/compare/6.1.0...6.1.1 -[6.1.0]: https://github.com/dipdup-io/dipdup/compare/6.0.1...6.1.0 -[6.0.1]: https://github.com/dipdup-io/dipdup/compare/6.0.0...6.0.1 -[6.0.0]: https://github.com/dipdup-io/dipdup/compare/6.0.0rc2...6.0.0 -[6.0.0rc2]: https://github.com/dipdup-io/dipdup/compare/6.0.0-rc1...6.0.0rc2 -[6.0.0-rc1]: https://github.com/dipdup-io/dipdup/compare/5.2.5...6.0.0-rc1 -[5.2.5]: https://github.com/dipdup-io/dipdup/compare/5.2.4...5.2.5 -[5.2.4]: https://github.com/dipdup-io/dipdup/compare/5.2.3...5.2.4 -[5.2.3]: https://github.com/dipdup-io/dipdup/compare/5.2.2...5.2.3 -[5.2.2]: https://github.com/dipdup-io/dipdup/compare/5.2.1...5.2.2 -[5.2.1]: https://github.com/dipdup-io/dipdup/compare/5.2.0...5.2.1 -[5.2.0]: https://github.com/dipdup-io/dipdup/compare/5.1.7...5.2.0 -[5.1.7]: https://github.com/dipdup-io/dipdup/compare/5.1.6...5.1.7 -[5.1.6]: https://github.com/dipdup-io/dipdup/compare/5.1.5...5.1.6 -[5.1.5]: https://github.com/dipdup-io/dipdup/compare/5.1.4...5.1.5 -[5.1.4]: https://github.com/dipdup-io/dipdup/compare/5.1.3...5.1.4 -[5.1.3]: https://github.com/dipdup-io/dipdup/compare/5.1.2...5.1.3 -[5.1.2]: https://github.com/dipdup-io/dipdup/compare/5.1.1...5.1.2 -[5.1.1]: https://github.com/dipdup-io/dipdup/compare/5.1.0...5.1.1 -[5.1.0]: https://github.com/dipdup-io/dipdup/compare/5.0.4...5.1.0 -[5.0.4]: https://github.com/dipdup-io/dipdup/compare/5.0.3...5.0.4 -[5.0.3]: https://github.com/dipdup-io/dipdup/compare/5.0.2...5.0.3 -[5.0.2]: https://github.com/dipdup-io/dipdup/compare/5.0.1...5.0.2 -[5.0.1]: https://github.com/dipdup-io/dipdup/compare/5.0.0...5.0.1 -[5.0.0]: https://github.com/dipdup-io/dipdup/compare/5.0.0-rc4...5.0.0 -[5.0.0-rc4]: https://github.com/dipdup-io/dipdup/compare/5.0.0-rc3...5.0.0-rc4 -[4.2.7]: https://github.com/dipdup-io/dipdup/compare/4.2.6...4.2.7 -[5.0.0-rc3]: https://github.com/dipdup-io/dipdup/compare/5.0.0-rc2...5.0.0-rc3 -[5.0.0-rc2]: https://github.com/dipdup-io/dipdup/compare/5.0.0-rc1...5.0.0-rc2 -[5.0.0-rc1]: https://github.com/dipdup-io/dipdup/compare/4.2.6...5.0.0-rc1 -[4.2.6]: https://github.com/dipdup-io/dipdup/compare/4.2.5...4.2.6 -[4.2.5]: https://github.com/dipdup-io/dipdup/compare/4.2.4...4.2.5 -[4.2.4]: https://github.com/dipdup-io/dipdup/compare/4.2.3...4.2.4 -[4.2.3]: https://github.com/dipdup-io/dipdup/compare/4.2.2...4.2.3 -[4.2.2]: https://github.com/dipdup-io/dipdup/compare/4.2.1...4.2.2 -[4.2.1]: https://github.com/dipdup-io/dipdup/compare/4.2.0...4.2.1 -[4.2.0]: https://github.com/dipdup-io/dipdup/compare/4.1.2...4.2.0 -[4.1.2]: https://github.com/dipdup-io/dipdup/compare/4.1.1...4.1.2 -[4.1.1]: https://github.com/dipdup-io/dipdup/compare/4.1.0...4.1.1 -[4.1.0]: https://github.com/dipdup-io/dipdup/compare/4.0.5...4.1.0 -[4.0.5]: https://github.com/dipdup-io/dipdup/compare/4.0.4...4.0.5 -[4.0.4]: https://github.com/dipdup-io/dipdup/compare/4.0.3...4.0.4 -[4.0.3]: https://github.com/dipdup-io/dipdup/compare/4.0.2...4.0.3 -[4.0.2]: https://github.com/dipdup-io/dipdup/compare/4.0.1...4.0.2 -[4.0.1]: https://github.com/dipdup-io/dipdup/compare/4.0.0...4.0.1 -[4.0.0]: https://github.com/dipdup-io/dipdup/compare/4.0.0-rc3...4.0.0 -[4.0.0-rc3]: https://github.com/dipdup-io/dipdup/compare/4.0.0-rc2...4.0.0-rc3 -[4.0.0-rc2]: https://github.com/dipdup-io/dipdup/compare/4.0.0-rc1...4.0.0-rc2 -[4.0.0-rc1]: https://github.com/dipdup-io/dipdup/compare/3.1.3...4.0.0-rc1 -[3.1.3]: https://github.com/dipdup-io/dipdup/compare/3.1.2...3.1.3 -[3.1.2]: https://github.com/dipdup-io/dipdup/compare/3.1.1...3.1.2 -[3.1.1]: https://github.com/dipdup-io/dipdup/compare/3.1.0...3.1.1 -[3.1.0]: https://github.com/dipdup-io/dipdup/compare/3.0.4...3.1.0 -[3.0.4]: https://github.com/dipdup-io/dipdup/compare/3.0.3...3.0.4 -[3.0.3]: https://github.com/dipdup-io/dipdup/compare/3.0.2...3.0.3 -[3.0.2]: https://github.com/dipdup-io/dipdup/compare/3.0.1...3.0.2 -[3.0.1]: https://github.com/dipdup-io/dipdup/compare/3.0.0...3.0.1 -[3.0.0]: https://github.com/dipdup-io/dipdup/releases/tag/3.0.0 diff --git a/Makefile b/Makefile index c3a1325aa..06ddbfcb8 100644 --- a/Makefile +++ b/Makefile @@ -88,13 +88,12 @@ demos: ## Recreate demo projects from templates python scripts/demos.py init ${DEMO} make format lint +demos_refresh: + for demo in `ls src | grep demo | grep -v etherlink`; do cd src/$$demo && dipdup init -b -f && cd ../..; done + make format lint + before_release: ## Prepare for a new release after updating version in pyproject.toml - make format - make lint - make update - make demos - make test - make docs + make format lint update demos test docs jsonschemas: ## Dump config JSON schemas python scripts/docs.py dump-jsonschema diff --git a/benchmarks/Makefile b/benchmarks/Makefile index 63b7a79ad..5bd01d480 100644 --- a/benchmarks/Makefile +++ b/benchmarks/Makefile @@ -1,20 +1,20 @@ -SHELL=/bin/bash +SHELL=/usr/bin/zsh DEMO=demo_evm_events run_in_memory: time dipdup -c ../src/${DEMO} -c ./oneshot_${DEMO}.yaml run run_in_postgres: - touch ../src/${DEMO}/deploy/test.env && \ - echo "HASURA_SECRET=test" > ../src/${DEMO}/deploy/test.env && \ - echo "POSTGRES_PASSWORD=test" >> ../src/${DEMO}/deploy/test.env && \ - cd ../src/${DEMO}/deploy && docker-compose --env-file test.env up -d db + touch ../src/${DEMO}/deploy/.env && \ + echo "HASURA_SECRET=test" > ../src/${DEMO}/deploy/.env && \ + echo "POSTGRES_PASSWORD=test" >> ../src/${DEMO}/deploy/.env && \ + cd ../src/${DEMO}/deploy && docker-compose --env-file .env up -d db export POSTGRES_PORT=`docker port ${DEMO}-db-1 5432 | cut -d: -f2` && \ time dipdup -c ../src/${DEMO} -c ./oneshot_${DEMO}.yaml -c ./local_postgres.yaml run down: - cd ../src/${DEMO}/deploy && docker-compose down && rm test.env + cd ../src/${DEMO}/deploy && docker-compose down && rm .env docker volume rm -f ${DEMO}_db cpu_up: diff --git a/benchmarks/README.md b/benchmarks/README.md index a3b69076c..4d6149fbc 100644 --- a/benchmarks/README.md +++ b/benchmarks/README.md @@ -44,12 +44,12 @@ See the Makefile for details. - interval: 10,000,000 to 10,100,000 (100,000 levels, 93,745 non-empty) - database: in-memory sqlite -| run | time | bps | vs. asyncio | vs. 7.5 | -| ---------------- | ---------------------------------------------------- | --- | ----------- | ------- | -| 7.5.9, asyncio | 1044,56s user 258,07s system 102% cpu 21:06,02 total | 79 | | | -| 7.5.10, uvloop | 924,94s user 182,33s system 102% cpu 18:04,67 total | 92 | 1.15 | | -| 8.0.0b4, asyncio | 832,32s user 163,20s system 101% cpu 16:19,93 total | 102 | | 1.29 | -| 8.0.0b5, uvloop | 730,58s user 88,67s system 98% cpu 13:48,46 total | 121 | 1.18 | 1.31 | +| run | time | bps | vs. asyncio | vs. 7.5 | +| ---------------- | ---------------------------------------------------- | --------- | ----------- | ------- | +| 7.5.9, asyncio | 1044,56s user 258,07s system 102% cpu 21:06,02 total | 79 | | | +| 7.5.10, uvloop | 924,94s user 182,33s system 102% cpu 18:04,67 total | 92 | 1.15 | | +| 8.0.0b4, asyncio | 832,32s user 163,20s system 101% cpu 16:19,93 total | 102 | | 1.29 | +| 8.0.0, uvloop | 721,13s user 84,17s system 98% cpu 13:33,88 total | 123 (116) | 1.18 | 1.31 | #### Without CPU boost @@ -57,18 +57,18 @@ The same tests run without frequency boost, which increases frequency from 2.9 G Run `echo 0 | sudo tee /sys/devices/system/cpu/cpufreq/boost`. -| run | time | bps | vs. boost | -| ------------------------- | ---------------------------------------------------- | --- | --------- | -| 7.5.10, uvloop, no boost | 1329,36s user 231,93s system 101% cpu 25:31,69 total | 65 | 0.82 | -| 8.0.0b5, uvloop, no boost | 1048,85s user 115,34s system 99% cpu 19:35,61 total | 85 | 0.70 | +| run | time | bps | vs. boost | +| ------------------------ | ---------------------------------------------------- | --- | --------- | +| 7.5.10, uvloop, no boost | 1329,36s user 231,93s system 101% cpu 25:31,69 total | 65 | 0.82 | +| 8.0.0, uvloop, no boost | 1048,85s user 115,34s system 99% cpu 19:35,61 total | 85 | 0.70 | In the subsequent runs, we will skip the 7.5 branch; speedup vs 8.0 is pretty stable. #### With PostgreSQL -| run | time | bps | vs. in-memory | -| --------------- | --------------------------------------------- | --- | ------------- | -| 8.0.0b5, uvloop | real 36m30,878s user 17m23,406s sys 3m38,196s | 46 | 0.38 | +| run | time | bps | vs. in-memory | +| ------------- | --------------------------------------------------- | ------- | ------------- | +| 8.0.0, uvloop | 1083,66s user 214,23s system 57% cpu 37:33,04 total | 46 (42) | 0.36 | ### starknet.events @@ -79,13 +79,13 @@ In the subsequent runs, we will skip the 7.5 branch; speedup vs 8.0 is pretty st | run | time | bps | speedup | | ---------------- | ------------------------------------------------- | --- | ------- | | 8.0.0b4, asyncio | 246,94s user 61,67s system 100% cpu 5:07,54 total | 326 | 1 | -| 8.0.0b5, uvloop | 213,01s user 33,22s system 96% cpu 4:14,32 total | 394 | 1.20 | +| 8.0.0, uvloop | 213,01s user 33,22s system 96% cpu 4:14,32 total | 394 | 1.20 | #### With PostgreSQL -| run | time | bps | vs. in-memory | -| --------------- | ------------------------------------------- | --- | ------------- | -| 8.0.0b5, uvloop | real 12m6,394s user 5m24,683s sys 1m14,761s | 138 | 0.35 | +| run | time | bps | vs. in-memory | +| ------------- | ------------------------------------------- | --- | ------------- | +| 8.0.0, uvloop | real 12m6,394s user 5m24,683s sys 1m14,761s | 138 | 0.35 | ### tezos.big_maps @@ -98,4 +98,4 @@ Only our code. And only 7% of blocks are non-empty. | run | time | bps | speedup | | ---------------- | ------------------------------------------------ | ---------- | ------- | | 8.0.0b4, asyncio | 136,63s user 17,91s system 98% cpu 2:37,40 total | 3185 (221) | 1 | -| 8.0.0b5, uvloop | 124,44s user 9,75s system 98% cpu 2:16,80 total | 3650 (254) | 1.15 | +| 8.0.0, uvloop | 124,44s user 9,75s system 98% cpu 2:16,80 total | 3650 (254) | 1.15 | diff --git a/docs/0.quickstart-evm.md b/docs/0.quickstart-evm.md index 4a76ed44e..8e279b310 100644 --- a/docs/0.quickstart-evm.md +++ b/docs/0.quickstart-evm.md @@ -15,11 +15,11 @@ Let's create an indexer for the [USDt token contract](https://etherscan.io/addre A modern Linux/macOS distribution with Python 3.12 installed is required to run DipDup. -The easiest way to install DipDup as a CLI application [pipx](https://pipx.pypa.io/stable/). We have a convenient wrapper script that installs DipDup for the current user. Run the following command in your terminal: +The recommended way to install DipDup CLI is [pipx](https://pipx.pypa.io/stable/). We also provide a convenient helper script that installs all necessary tools. Run the following command in your terminal: {{ #include _curl-spell.md }} -See the [Installation](https://dipdup.io/docs/installation) page for all options. +See the [Installation](../docs/1.getting-started/1.installation.md) page for all options. ## Create a project @@ -32,14 +32,14 @@ dipdup new Choose `EVM` network and `demo_evm_events` template. ::banner{type="note"} -Want to skip a tutorial and start from scratch? Choose `[none]` and `demo_blank` instead and proceed to the [Config](1.getting-started/3.config.md) section. +Want to skip a tutorial and start from scratch? Choose `[none]` and `demo_blank` instead and proceed to the [Config](../docs/1.getting-started/3.config.md) section. :: Follow the instructions; the project will be created in the new directory. ## Write a configuration file -In the project root, you'll find a file named `dipdup.yaml`. It's the main configuration file of your indexer. We will discuss it in detail in the [Config](1.getting-started/3.config.md) section; now it has the following content: +In the project root, you'll find a file named `dipdup.yaml`. It's the main configuration file of your indexer. We will discuss it in detail in the [Config](../docs/1.getting-started/3.config.md) section; now it has the following content: ```yaml [dipdup.yaml] {{ #include ../src/demo_evm_events/dipdup.yaml }} @@ -157,6 +157,6 @@ If you use SQLite, run this query to check the data: sqlite3 demo_evm_events.sqlite 'SELECT * FROM holder LIMIT 10' ``` -If you run a Compose stack, check open `http://127.0.0.1:8080` in your browser to see the Hasura console (an exposed port may differ). You can use it to explore the database and build GraphQL queries. +If you run a Compose stack, open `http://127.0.0.1:8080` in your browser to see the Hasura console (an exposed port may differ). You can use it to explore the database and build GraphQL queries. Congratulations! You've just created your first DipDup indexer. Proceed to the Getting Started section to learn more about DipDup configuration and features. diff --git a/docs/0.quickstart-starknet.md b/docs/0.quickstart-starknet.md index f96eed317..aff037e03 100644 --- a/docs/0.quickstart-starknet.md +++ b/docs/0.quickstart-starknet.md @@ -15,11 +15,11 @@ Let's create an indexer for the [USDt token contract](https://starkscan.co/contr A modern Linux/macOS distribution with Python 3.12 installed is required to run DipDup. -The easiest way to install DipDup as a CLI application [pipx](https://pipx.pypa.io/stable/). We have a convenient wrapper script that installs DipDup for the current user. Run the following command in your terminal: +The recommended way to install DipDup CLI is [pipx](https://pipx.pypa.io/stable/). We also provide a convenient helper script that installs all necessary tools. Run the following command in your terminal: {{ #include _curl-spell.md }} -See the [Installation](https://dipdup.io/docs/installation) page for all options. +See the [Installation](../docs/1.getting-started/1.installation.md) page for all options. ## Create a project @@ -32,14 +32,14 @@ dipdup new Choose `Starknet` network and `demo_starknet_events` template. ::banner{type="note"} -Want to skip a tutorial and start from scratch? Choose `[none]` and `demo_blank` instead and proceed to the [Config](1.getting-started/3.config.md) section. +Want to skip a tutorial and start from scratch? Choose `[none]` and `demo_blank` instead and proceed to the [Config](../docs/1.getting-started/3.config.md) section. :: Follow the instructions; the project will be created in the new directory. ## Write a configuration file -In the project root, you'll find a file named `dipdup.yaml`. It's the main configuration file of your indexer. We will discuss it in detail in the [Config](1.getting-started/3.config.md) section; now it has the following content: +In the project root, you'll find a file named `dipdup.yaml`. It's the main configuration file of your indexer. We will discuss it in detail in the [Config](../docs/1.getting-started/3.config.md) section; now it has the following content: ```yaml [dipdup.yaml] {{ #include ../src/demo_starknet_events/dipdup.yaml }} @@ -157,6 +157,6 @@ If you use SQLite, run this query to check the data: sqlite3 demo_starknet_events.sqlite 'SELECT * FROM holder LIMIT 10' ``` -If you run a Compose stack, check open `http://127.0.0.1:8080` in your browser to see the Hasura console (an exposed port may differ). You can use it to explore the database and build GraphQL queries. +If you run a Compose stack, open `http://127.0.0.1:8080` in your browser to see the Hasura console (an exposed port may differ). You can use it to explore the database and build GraphQL queries. Congratulations! You've just created your first DipDup indexer. Proceed to the Getting Started section to learn more about DipDup configuration and features. diff --git a/docs/0.quickstart-tezos.md b/docs/0.quickstart-tezos.md index bab4cda41..f2ec5dfd3 100644 --- a/docs/0.quickstart-tezos.md +++ b/docs/0.quickstart-tezos.md @@ -15,11 +15,11 @@ Let's create an indexer for the [tzBTC FA1.2 token contract](https://tzkt.io/KT1 A modern Linux/macOS distribution with Python 3.12 installed is required to run DipDup. -The easiest way to install DipDup as a CLI application [pipx](https://pipx.pypa.io/stable/). We have a convenient wrapper script that installs DipDup for the current user. Run the following command in your terminal: +The recommended way to install DipDup CLI is [pipx](https://pipx.pypa.io/stable/). We also provide a convenient helper script that installs all necessary tools. Run the following command in your terminal: {{ #include _curl-spell.md }} -See the [Installation](https://dipdup.io/docs/installation) page for all options. +See the [Installation](../docs/1.getting-started/1.installation.md) page for all options. ## Create a project @@ -32,14 +32,14 @@ dipdup new Choose `Tezos` network and `demo_tezos_token` template. ::banner{type="note"} -Want to skip a tutorial and start from scratch? Choose `[none]` and `demo_blank` instead and proceed to the [Config](1.getting-started/3.config.md) section. +Want to skip a tutorial and start from scratch? Choose `[none]` and `demo_blank` instead and proceed to the [Config](./1.getting-started/3.config.md) section. :: Follow the instructions; the project will be created in the new directory. ## Write a configuration file -In the project root, you'll find a file named `dipdup.yaml`. It's the main configuration file of your indexer. We will discuss it in detail in the [Config](1.getting-started/3.config.md) section; now it has the following content: +In the project root, you'll find a file named `dipdup.yaml`. It's the main configuration file of your indexer. We will discuss it in detail in the [Config](../docs/1.getting-started/3.config.md) section; now it has the following content: ```yaml [dipdup.yaml] {{ #include ../src/demo_tezos_token/dipdup.yaml }} @@ -170,6 +170,6 @@ If you use SQLite, run this query to check the data: sqlite3 demo_tezos_token.sqlite 'SELECT * FROM holder LIMIT 10' ``` -If you run a Compose stack, check open `http://127.0.0.1:8080` in your browser to see the Hasura console (an exposed port may differ). You can use it to explore the database and build GraphQL queries. +If you run a Compose stack, open `http://127.0.0.1:8080` in your browser to see the Hasura console (an exposed port may differ). You can use it to explore the database and build GraphQL queries. Congratulations! You've just created your first DipDup indexer. Proceed to the Getting Started section to learn more about DipDup configuration and features. diff --git a/docs/1.getting-started/1.installation.md b/docs/1.getting-started/1.installation.md index ac54c7d67..6f3ca15b2 100644 --- a/docs/1.getting-started/1.installation.md +++ b/docs/1.getting-started/1.installation.md @@ -52,4 +52,4 @@ pip install -r requirements.txt -e . ## Docker -For Docker installation, please refer to the [Docker](../6.deployment/2.docker.md) page. +For Docker installation, please refer to the [Docker](../5.advanced/1.docker.md) page. diff --git a/docs/1.getting-started/9.hooks.md b/docs/1.getting-started/10.hooks.md similarity index 93% rename from docs/1.getting-started/9.hooks.md rename to docs/1.getting-started/10.hooks.md index 14835717d..cf88e840a 100644 --- a/docs/1.getting-started/9.hooks.md +++ b/docs/1.getting-started/10.hooks.md @@ -5,9 +5,40 @@ description: "Hooks are user-defined callbacks called either from the `ctx.fire_ # Hooks -Hooks are user-defined callbacks called either from the `ctx.fire_hook` method or by the job scheduler. +Hooks are user-defined callbacks not linked to any index. There are two types of hooks: -## Definition +- System hooks are called on system-wide events like process restart. +- User hooks are called either with the `ctx.fire_hook` method or by the job scheduler. + +## System hooks + +Every DipDup project has multiple system hooks; they fire on system-wide events and, like regular hooks, are not linked to any index. Names of those hooks are reserved; you can't use them in config. System hooks are not atomic and can't be fired manually or with a job scheduler. + +You can also put SQL scripts in corresponding `sql/on_*` directories to execute them like with regular hooks. + +### on_restart + +This hook executes right before starting indexing. It allows configuring DipDup in runtime based on data from external sources. Datasources are already initialized at execution and available at `ctx.datasources`. You can, for example, configure logging here or add contracts and indexes in runtime instead of from static config. + +SQL scripts in `sql/on_restart` directory may contain `CREATE OR REPLACE VIEW` or similar non-destructive operations. + +### on_reindex + +This hook fires after the database are re-initialized after reindexing (wipe) before starting indexing. + +Helpful in modifying schema with arbitrary SQL scripts before indexing. For example, you can to change the database schema in ways that are not supported by the DipDup ORM, e.g., to create a composite primary key. + +### on_synchronized + +This hook fires when every active index reaches a realtime state. Here you can clear caches internal caches or do other cleanups. + +### on_index_rollback + +Fires when one of the index datasources received a chain reorg message. + +Since version 6.0 this hook performs a database-level rollback by default. If you want to process rollbacks manually, remove `ctx.rollback` call and implement custom logic in this callback. + +## User hooks Let's assume we want to calculate some statistics on-demand to avoid blocking an indexer with heavy computations. Add the following lines to the DipDup config: @@ -23,7 +54,7 @@ hooks: Values of `args` mapping are used as type hints in a signature of a generated callback. The following callback stub will be created on init: -```python[hooks/calculate_stats.py] +```python [hooks/calculate_stats.py] from dipdup.context import HookContext async def calculate_stats( @@ -36,15 +67,13 @@ async def calculate_stats( By default, hooks execute SQL scripts from the corresponding subdirectory of `sql/`. Remove or comment out the `ctx.execute_sql` call to prevent it. -## Usage - To trigger the hook, call the `ctx.fire_hook` method from any callback: ```python await ctx.fire_hook('calculate_stats', major=True, depth=10) ``` -## Atomicity +### Atomicity and blocking The `atomic` option defines whether the hook callback will be wrapped in a single SQL transaction or not. If this option is set to true main indexing loop will be blocked until hook execution is complete. Some statements, like `REFRESH MATERIALIZED VIEW`, do not require to be wrapped in transactions, so choosing a value of the `atomic` option could decrease the time needed to perform initial indexing. @@ -57,35 +86,7 @@ async def handler(ctx: HandlerContext, ...) -> None: This hook will be executed when the current transaction is committed. -## System hooks - -Every DipDup project has multiple system hooks; they fire on system-wide events and, like regular hooks, are not linked to any index. Names of those hooks are reserved; you can't use them in config. System hooks are not atomic and can't be fired manually or with a job scheduler. - -You can also put SQL scripts in corresponding `sql/on_*` directories to execute them like with regular hooks. - -### on_restart - -This hook executes right before starting indexing. It allows configuring DipDup in runtime based on data from external sources. Datasources are already initialized at execution and available at `ctx.datasources`. You can, for example, configure logging here or add contracts and indexes in runtime instead of from static config. - -SQL scripts in `sql/on_restart` directory may contain `CREATE OR REPLACE VIEW` or similar non-destructive operations. - -### on_reindex - -This hook fires after the database are re-initialized after reindexing (wipe) before starting indexing. - -Helpful in modifying schema with arbitrary SQL scripts before indexing. For example, you can to change the database schema in ways that are not supported by the DipDup ORM, e.g., to create a composite primary key. - -### on_synchronized - -This hook fires when every active index reaches a realtime state. Here you can clear caches internal caches or do other cleanups. - -### on_index_rollback - -Fires when one of the index datasources received a chain reorg message. - -Since version 6.0 this hook performs a database-level rollback by default. If you want to process rollbacks manually, remove `ctx.rollback` call and implement custom logic in this callback. - -## Arguments typechecking +### Arguments typechecking DipDup will ensure that arguments passed to the hooks have the correct types when possible. `CallbackTypeError` exception will be raised otherwise. Values of an `args` mapping in a hook config should be either built-in types or `__qualname__` of external type like `decimal.Decimal`. Generic types are not supported: hints like `Optional[int] = None` will be correctly parsed during codegen but ignored on type checking. diff --git a/docs/1.getting-started/2.core-concepts.md b/docs/1.getting-started/2.core-concepts.md index 988f96fab..7a541612a 100644 --- a/docs/1.getting-started/2.core-concepts.md +++ b/docs/1.getting-started/2.core-concepts.md @@ -19,9 +19,8 @@ The Python package contains ORM models, callbacks, typeclasses, scripts and quer As a result, you get a service responsible for filling the database with indexed data. Then you can use it to build a custom API backend or integrate with existing ones. DipDup provides _Hasura GraphQL Engine_ integration to expose indexed data via REST and GraphQL with zero configuration, but you can use other API engines like PostgREST or develop one in-house. - - -![Generic DipDup setup and data flow](../assets/dipdup.svg) + + ## Storage layer diff --git a/docs/1.getting-started/3.config.md b/docs/1.getting-started/3.config.md index 6be2de2b0..f8f9b3c6a 100644 --- a/docs/1.getting-started/3.config.md +++ b/docs/1.getting-started/3.config.md @@ -3,7 +3,7 @@ title: "Config" description: "Developing a DipDup indexer begins with creating a YAML config file. You can find a minimal example to start indexing on the Quickstart page." --- -# Creating config +# Config Developing a DipDup indexer begins with creating a YAML config file of a specific format. DipDup expects it to be located in the project root and named `dipdup.yaml`. However, you can provide any path with a `-c` CLI option. @@ -24,7 +24,8 @@ See [Config reference guide](../7.references/2.config.md) for the full list of a | | `templates` | Also index definitions, but with placeholders that make them reusable | | **Hooks** | `hooks` | Callbacks to run manually or by schedule | | | `jobs` | Schedules for hooks | -| **Integrations** | `hasura` | Hasura GraphQL Engine configuration | +| **Integrations** | `api` | Internal API configuration | +| | `hasura` | Hasura GraphQL Engine configuration | | | `sentry` | Sentry configuration | | | `prometheus` | Prometheus configuration | | **Miscellaneous** | `advanced` | Tunables that affect framework behavior | @@ -43,6 +44,8 @@ Use `config export`{lang="sh"} and `config env`{lang="sh"} commands to check the ## Environment variables +_For the list of variables to configure DipDup see [this page](../5.advanced/2.environment-variables.md)._ + DipDup supports compose-style variable expansion with an optional default value. Use this feature to store sensitive data outside of the configuration file and make your app fully declarative. If a required variable is not set, DipDup will fail with an error. You can use these placeholders anywhere throughout the configuration file. ```yaml [dipdup.yaml] @@ -83,3 +86,47 @@ contracts: ``` If multiple contracts you index have the same interface but different code, see [F.A.Q.](../12.faq.md) to learn how to avoid conflicts. + +## Reindexing + +In some cases, DipDup can't proceed with indexing without a full wipe. Several reasons trigger reindexing: + +| reason | description | +| ----------------- | ------------------------------------------------------------------------------------------------------------------------------------------ | +| `manual` | Reindexing triggered manually from callback with `ctx.reindex`. | +| `migration` | Applied migration requires reindexing. Check release notes before switching between major DipDup versions to be prepared. | +| `rollback` | Reorg message received from datasource and can not be processed. | +| `config_modified` | One of the index configs has been modified. | +| `schema_modified` | Database schema has been modified. Try to avoid manual schema modifications in favor of [SQL scripts](../1.getting-started/5.database.md). | + +It is possible to configure desirable action on reindexing triggered by a specific reason. + +| action | description | +| --------------------- | -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | +| `exception` (default) | Raise `ReindexingRequiredError` and quit with error code. The safest option since you can trigger reindexing accidentally, e.g., by a typo in config. Don't forget to set up the correct restart policy when using it with containers. | +| `wipe` | Drop the whole database and start indexing from scratch. Be careful with this option! | +| `ignore` | Ignore the event and continue indexing as usual. It can lead to unexpected side-effects up to data corruption; make sure you know what you are doing. | + +To configure actions for each reason, add the following section to the DipDup config: + +```yaml +advanced: + reindex: + manual: wipe + migration: exception + rollback: ignore + config_modified: exception + schema_modified: exception +``` + +## Advanced options + +Flags related to the project are set in the `advanced` section of the config (most likely in `dipdup.yaml`). + +| flag | description | +| -------------------- | ---------------------------------------------------------------------------------------------------------------------- | +| `early_realtime` | Establish realtime connection and start collecting messages while sync is in progress (faster, but consumes more RAM). | +| `decimal_precision` | Overwrite precision if it's not guessed correctly based on project models. | +| `postpone_jobs` | Do not start job scheduler until all indexes reach the realtime state. | +| `rollback_depth` | A number of levels to keep for rollback. | +| `unsafe_sqlite` | Disable journaling and data integrity checks. Use only for testing. | diff --git a/docs/6.deployment/1.database.md b/docs/1.getting-started/5.database.md similarity index 50% rename from docs/6.deployment/1.database.md rename to docs/1.getting-started/5.database.md index ea3ef7a5e..b47a236f9 100644 --- a/docs/6.deployment/1.database.md +++ b/docs/1.getting-started/5.database.md @@ -1,9 +1,11 @@ --- -title: "Database engines" +title: "Database" description: "DipDup officially supports SQLite, PostgreSQL and TimescaleDB as a database engine. This table will help you choose a database engine that mostly suits your needs." --- -# Database engines +# Database + +## Database engines DipDup officially supports SQLite, PostgreSQL and TimescaleDB as a database engine. This table will help you choose a database engine that mostly suits your needs. @@ -25,6 +27,65 @@ The latest PostgreSQL and TimescaleDB versions are recommended due to significan Usually, it's okay to use different database engines for development and production, but be careful with SQL scripts and column types that can behave differently. +## Internal tables + +Several tables having `dipdup_` prefix are created by DipDup automatically and are not intended to be modified by the user. However, they can be useful for external monitoring and debugging. + +| table | description | +|:-------------------------- |:----------------------------------------------------------------------------------------------------------------------------------------- | +| `dipdup_schema` | Information about database schema in use including hash to detect changes that require reindexing. | +| `dipdup_head` | The latest block received by index datasources in realtime state. Indicates that underlying datasource is ok. | +| `dipdup_index` | Everything about specific indexes from config: status, current level, template and its values if applicable. | +| `dipdup_contract` | Info about contracts used by all indexes, including ones added in runtime. | +| `dipdup_model_update` | Service table to store model diffs for database rollback. Configured by `advanced.rollback_depth` | +| `dipdup_meta` | Arbitrary key-value storage for DipDup internal use. Survives reindexing. You can use it too, but don't touch keys with `dipdup_` prefix. | +| `dipdup_contract_metadata` | See [Metadata interface](../5.advanced/11.metadata-interface.md). | +| `dipdup_token_metadata` | See [Metadata interface](../5.advanced/11.metadata-interface.md) | + +See [`dipdup.models` module](https://github.com/dipdup-io/dipdup/blob/next/src/dipdup/models/__init__.py) for exact table definitions. + +If you want to use these tables in monitoring, here are some SQL snippets to help you get started: + +```sql +-- Time since last block received by index datasources +SELECT name, NOW() - timestamp FROM dipdup_head; + +-- Index statuses +SELECT name, status FROM dipdup_index; + +-- Get last reindex time +SELECT created_at FROM dipdup_schema WHERE name = 'public'; +``` + +## SQL scripts + +Put your `*.sql` scripts to `{{ project.package }}/sql`. You can run these scripts from any callback with `ctx.execute_sql('name')`. If `name` is a directory, each script it contains will be executed. + +Scripts are executed without being wrapped with SQL transactions. It's generally a good idea to avoid touching table data in scripts. + +By default, an empty `sql/` directory is generated for every hook in config during init. Remove `ctx.execute_sql` call from hook callback to avoid executing them. + +```python +# Execute all scripts in sql/my_hook directory +await ctx.execute_sql('my_hook') + +# Execute a single script +await ctx.execute_sql('my_hook/my_script.sql') +``` + +## Helper functions + +When using PostgreSQL or Timescale as database engine you can use `dipdup_approve` and `dipdup_wipe` functions to manage schema state from SQL console if needed: + +```sql +SELECT dipdup_approve('public'); + +-- WARNING: This action is irreversible! All indexed data will be lost! +SELECT dipdup_wipe('public'); +``` + +Please note that `dipdup_wipe` function doesn't support preserving immune tables. + ## Immune tables You might want to keep several tables during schema wipe if the data in them is not dependent on index states yet heavy. A typical example is indexing IPFS data — changes in your code won't affect off-chain storage, so you can safely reuse this data. diff --git a/docs/1.getting-started/5.models.md b/docs/1.getting-started/6.models.md similarity index 100% rename from docs/1.getting-started/5.models.md rename to docs/1.getting-started/6.models.md diff --git a/docs/1.getting-started/6.datasources.md b/docs/1.getting-started/7.datasources.md similarity index 82% rename from docs/1.getting-started/6.datasources.md rename to docs/1.getting-started/7.datasources.md index 81da962a2..db00d97d5 100644 --- a/docs/1.getting-started/6.datasources.md +++ b/docs/1.getting-started/7.datasources.md @@ -11,16 +11,16 @@ Index datasources, ones that can be attached to a specific index, are prefixed w | kind | blockchain | description | | ------------------------------------------------------------ | ---------------- | ------------------------------- | -| [abi.etherscan](../3.datasources/1.abi_etherscan.md) | ⟠ EVM-compatible | Provides ABIs for EVM contracts | -| [coinbase](../3.datasources/2.coinbase.md) | any | Coinbase price feed | -| [evm.node](../3.datasources/3.evm_node.md) | ⟠ EVM-compatible | Ethereum node | -| [evm.subsquid](../3.datasources/4.evm_subsquid.md) | ⟠ EVM-compatible | Subsquid Network API | -| [http](../3.datasources/5.http.md) | any | Generic HTTP API | -| [ipfs](../3.datasources/6.ipfs.md) | any | IPFS gateway | -| [starknet.node](../3.datasources/7.starknet_node.md) | 🐺 Starknet | Starknet node | -| [starknet.subsquid](../3.datasources/8.starknet_subsquid.md) | 🐺 Starknet | Subsquid Network API | -| [tezos.tzkt](../3.datasources/9.tezos_tzkt.md) | ꜩ Tezos | TzKT API | -| [tzip_metadata](../3.datasources/10.tzip_metadata.md) | ꜩ Tezos | TZIP-16 metadata | +| [evm.subsquid](../3.datasources/1.evm_subsquid.md) | ⟠ EVM-compatible | Subsquid Network API | +| [evm.node](../3.datasources/2.evm_node.md) | ⟠ EVM-compatible | Ethereum node | +| [abi.etherscan](../3.datasources/3.abi_etherscan.md) | ⟠ EVM-compatible | Provides ABIs for EVM contracts | +| [starknet.subsquid](../3.datasources/4.starknet_subsquid.md) | 🐺 Starknet | Subsquid Network API | +| [starknet.node](../3.datasources/5.starknet_node.md) | 🐺 Starknet | Starknet node | +| [tezos.tzkt](../3.datasources/6.tezos_tzkt.md) | ꜩ Tezos | TzKT API | +| [tzip_metadata](../3.datasources/7.tzip_metadata.md) | ꜩ Tezos | TZIP-16 metadata | +| [coinbase](../3.datasources/8.coinbase.md) | any | Coinbase price feed | +| [ipfs](../3.datasources/9.ipfs.md) | any | IPFS gateway | +| [http](../3.datasources/10.http.md) | any | Generic HTTP API | ## Connection settings diff --git a/docs/1.getting-started/7.indexes.md b/docs/1.getting-started/8.indexes.md similarity index 98% rename from docs/1.getting-started/7.indexes.md rename to docs/1.getting-started/8.indexes.md index 39e9153a3..20db53b18 100644 --- a/docs/1.getting-started/7.indexes.md +++ b/docs/1.getting-started/8.indexes.md @@ -46,7 +46,7 @@ indexes: - tzkt_mainnet ``` -You can also spawn indexes from templates in runtime; see [Spawning in runtime](../1.getting-started/7.indexes.md#spawning-in-runtime). +You can also spawn indexes from templates in runtime; see [Spawning in runtime](../1.getting-started/8.indexes.md#spawning-in-runtime). ## Limiting scope diff --git a/docs/1.getting-started/8.handlers.md b/docs/1.getting-started/9.handlers.md similarity index 100% rename from docs/1.getting-started/8.handlers.md rename to docs/1.getting-started/9.handlers.md diff --git a/docs/10.supported-networks/0.overview.md b/docs/10.supported-networks/0.overview.md index 162ab9c88..07e0cfcf9 100644 --- a/docs/10.supported-networks/0.overview.md +++ b/docs/10.supported-networks/0.overview.md @@ -6,10 +6,12 @@ network: "ethereum" -_Updated 2024-07-18: Found and marked/fixed broken external links._ - # Supported networks +::banner{type="note"} +This page is about EVM. Looking for [Starknet](../0.quickstart-starknet.md) or [Tezos](../0.quickstart-tezos.md)? +:: + DipDup can index any EVM-compatible network as long as there's enough historical data. This page contains a list of supported networks and instructions on how to configure your indexer for them. We aim to support all networks available in [Subsquid Network](https://docs.subsquid.io/subsquid-network/reference/evm-networks/#raw-urls), and several others in node-only mode. @@ -37,8 +39,8 @@ datasources: To configure datasources for other networks, you need to change URLs and API keys. You can do it in the config file directly, but it's better to use environment variables. Check the `deploy/.env.default` file in your project directory; it contains all the variables used in config. -[evm.subsquid](../3.datasources/4.evm_subsquid.md) - Subsquid Network is the main source of historical data for EVM-compatible networks. It's free and available for many networks. +[evm.subsquid](../3.datasources/1.evm_subsquid.md) - Subsquid Network is the main source of historical data for EVM-compatible networks. It's free and available for many networks. -[abi.etherscan](../3.datasources/1.abi_etherscan.md) - Etherscan is a source of contract ABIs, which are used to generate types for the indexer. Many explorers have Etherscan-like API which could be used to retrieve ABIs. Some of them require an API key, which you can get on their website. If there's no Etherscan-like API available, you need to obtain contract ABI JSON somewhere and put it to the `abi//abi.json` path. Don't forget to run `dipdup init` after that to generate all necessary types. +[abi.etherscan](../3.datasources/3.abi_etherscan.md) - Etherscan is a source of contract ABIs, which are used to generate types for the indexer. Many explorers have Etherscan-like API which could be used to retrieve ABIs. Some of them require an API key, which you can get on their website. If there's no Etherscan-like API available, you need to obtain contract ABI JSON somewhere and put it to the `abi//abi.json` path. Don't forget to run `dipdup init` after that to generate all necessary types. -[evm.node](../3.datasources/3.evm_node.md) - EVM node datasource can be used to fetch recent data not yet in Subsquid Network. API methods could vary a lot across different networks, but DipDup only uses a few of them, so most of the nodes will work. WebSocket URL can be specified to get real-time updates. This option can save you some requests to the node, but otherwise, it's not required. If Subsquid for your network is not available yet, you can use this datasource to fetch historical data, but it's significantly slower. +[evm.node](../3.datasources/2.evm_node.md) - EVM node datasource can be used to fetch recent data not yet in Subsquid Network. API methods could vary a lot across different networks, but DipDup only uses a few of them, so most of the nodes will work. WebSocket URL can be specified to get real-time updates. This option can save you some requests to the node, but otherwise, it's not required. If Subsquid for your network is not available yet, you can use this datasource to fetch historical data, but it's significantly slower. diff --git a/docs/12.faq.md b/docs/12.faq.md index 0f2799ffe..df61849ff 100644 --- a/docs/12.faq.md +++ b/docs/12.faq.md @@ -148,6 +148,18 @@ Don't forget to reindex after this change. When decimal context precision is adj WARNING dipdup.database Decimal context precision has been updated: 28 -> 128 ``` +### How to modify schema manually? + +Drop an idempotent SQL script into `sql/on_reindex/` directory. For example, here's how to create a Timescale hypertable: + +```sql [sql/on_reindex/00_prepare_db.sql] +CREATE EXTENSION IF NOT EXISTS timescaledb CASCADE; + +ALTER TABLE swap DROP CONSTRAINT swap_pkey; +ALTER TABLE swap ADD PRIMARY KEY (id, timestamp); +SELECT create_hypertable('swap', 'timestamp', chunk_time_interval => 7776000); +``` + ## Package ### What is the symlink in the project root for? diff --git a/docs/13.troubleshooting.md b/docs/13.troubleshooting.md index 774cc5d6b..0e55583c5 100644 --- a/docs/13.troubleshooting.md +++ b/docs/13.troubleshooting.md @@ -4,6 +4,8 @@ description: "DipDup troubleshooting guide" nested: Resources --- + + # Troubleshooting This page contains tips for troubleshooting DipDup issues. Start with "General" section and then proceed to one that matches your problem. @@ -96,7 +98,7 @@ See the `pyproject.toml` file in your project root for more details. [Better Call Dev](https://better-call.dev) is a blockchain explorer for Tezos smart contracts. It provides a more human-friendly interface than TzKT to explore exact contract calls and parameter/storage structures. -![BCD](assets/troubleshooting-bcd.png) +
BCD interface
Try it out when writing index definitions. diff --git a/docs/15.glossary.md b/docs/15.glossary.md new file mode 100644 index 000000000..720599db0 --- /dev/null +++ b/docs/15.glossary.md @@ -0,0 +1,141 @@ +--- +title: Glossary +description: "Our sponsors, contributors and other acknowledgments" +nested: Resources +--- + + + +# Glossary + +## General + +### ABI + +Application Binary Interface. It's a JSON-formatted description of how to interact with a smart contract on a blockchain, typically Ethereum. The ABI defines the contract's functions, their inputs and outputs, allowing other programs or users to call the contract's functions correctly. + +### block number + +A unique identifier for a block in the blockchain. It's a number that increases sequentially as new blocks are added to the blockchain. Each block contains a number of transactions, and the block number is used to identify the block that contains a specific transaction. + +### callback + +A function with a specific signature used in event handling. In DipDup there are two types of callbacks, [handlers](#handler) and [hooks](#hook). + +### config + +A configuration file which defines a project's structure, settings, environment configurations, and other metadata. Examples include `pyproject.toml` in Python, `compose.yaml` in Docker Compose. DipDup projects start with `dipdup.yaml` file in the project's root. See the [full reference](7.references/2.config.md). + +### context + +In DipDup, an object passed as a first argument to all callbacks. Provides access to the current state of the indexer and various methods to interact with it. + +### datasource + +### DipDup + +An open source framework for building smart contract indexes for the Tezos network. + +### Docker + +An open-source platform for creating, deploying, and managing containerized applications, improving consistency and reducing infrastructure overhead. + +### Docker Compose + +A tool for defining and managing multi-container Docker applications, using a YAML file to configure services, networks, and volumes, simplifying application deployment and scaling. + +### environment variables + +### GraphQL + +A query language and runtime for APIs that enables clients to request only the data they need, offering more flexibility and efficiency compared to traditional REST APIs. + +### handler + +### Hasura + +An open-source engine that connects to databases and microservices, providing real-time GraphQL APIs for faster and efficient data access. + +### head + +The latest block on the blockchain. In DipDup terminology, this term applies to [Datasources](1.getting-started/4.package.md). + +### hook + +A user-defined function that is executed at specific points in the lifecycle of a DipDup project. Unlike handlers, hooks are not tied to specific indexes and can be called from anywhere in the code. + +### index + +### indexer + +A program that reads data from a blockchain and stores it in a database for quick and easy querying. + +### job + +### JSONSchema + +### level + +In DipDup, [block number](#block-number). + +### model + +A Python class representing a database table, defined using the Tortoise ORM library. + +### package + +A directory containing all the files needed to run a DipDup project. DipDup projects must be a valid Python package. See the [Package](1.getting-started/4.package.md) page. + +### PostgreSQL + +A powerful, open-source object-relational database system known for its reliability, robustness, and performance, widely used for managing structured data. + +### Prometheus + +An open-source monitoring and alerting toolkit designed for reliability and scalability, used to collect and process metrics from various systems, providing valuable insights into application and infrastructure performance. + +### RPC API + +RPC stands for Remote Procedure Call. A protocol used to communicate with Tezos nodes and interact with the blockchain. DipDup receives minimal amount of data from RPC API due to slow performance relatively to TzKT and other APIs. + +### schema + +### SDK + +A toolkit for developing smart contract indexing applications. + +### Sentry + +A real-time error tracking and monitoring platform that helps developers identify, diagnose, and fix issues in applications, improving overall software quality and performance. + +### sync level + +### Tortoise + +A Python asyncio library for defining models and relationships between tables, simplifying asynchronous database interactions and data management. + +### transaction + +### typeclass + +## Tezos-specific + +### big map + +big_map object covered in [big map index page](2.indexes/4.tezos_big_maps.md). + +### contract storage + +Persistent data storage within a smart contract, holding the contract's state and any associated data. + +### entry points + +Functions defined within a smart contract that can be called by external contract invocations or other contracts. + +### origination + +The process of deploying a new smart contract on the Tezos network, creating a new originated contract address. + +### TzKT + +A popular Tezos indexer API that provides a more user-friendly way to access Tezos blockchain data compared to the RPC API, often used for building applications on top of Tezos. diff --git a/docs/15.thanks.md b/docs/16.thanks.md similarity index 100% rename from docs/15.thanks.md rename to docs/16.thanks.md diff --git a/docs/2.indexes/7.tezos_operations.md b/docs/2.indexes/7.tezos_operations.md index 8e8a1326a..589582b3d 100644 --- a/docs/2.indexes/7.tezos_operations.md +++ b/docs/2.indexes/7.tezos_operations.md @@ -5,10 +5,12 @@ network: "tezos" --- - + + +
+ Operation group in BCD explorer + `operation` index config +
# `tezos.operations` index diff --git a/docs/2.indexes/_evm.md b/docs/2.indexes/_evm.md index 061cba980..55c17ef6a 100644 --- a/docs/2.indexes/_evm.md +++ b/docs/2.indexes/_evm.md @@ -1,6 +1,6 @@ ## Datasources -DipDup indexes for EVM networks use [Subsquid Network](https://docs.subsquid.io/subsquid-network/overview/) as a main source of historical data. EVM nodes are not required for DipDup to operate, but they can be used to get the latest data (not yet in Subsquid Network) and realtime updates. See [evm.subsquid](../3.datasources/4.evm_subsquid.md) and [evm.node](../3.datasources/3.evm_node.md) pages for more info on how to configure both datasources. +DipDup indexes for EVM networks use [Subsquid Network](https://docs.subsquid.io/subsquid-network/overview/) as a main source of historical data. EVM nodes are not required for DipDup to operate, but they can be used to get the latest data (not yet in Subsquid Network) and realtime updates. See [evm.subsquid](../3.datasources/4.evm_subsquid.md) and [evm.node](../3.datasources/2.evm_node.md) pages for more info on how to configure both datasources. For testing purposes, you can use EVM node as a single datasource, but indexing will be significantly slower. For production, it's recommended to use Subsquid Network as the main datasource and EVM node(s) as a secondary one. If there are multiple `evm.node` datasources attached to index, DipDup will use random one for each request. diff --git a/docs/2.indexes/_starknet.md b/docs/2.indexes/_starknet.md index 47c607f8b..f5f1a1f3c 100644 --- a/docs/2.indexes/_starknet.md +++ b/docs/2.indexes/_starknet.md @@ -1,4 +1,4 @@ ## Datasources -DipDup indexes for Starknet use [Subsquid Network](https://docs.subsquid.io/subsquid-network/overview/) as a main source of historical data. Starknet nodes are not required for DipDup to operate, but in future updates, it will be possible to use them to get the latest data (not yet in Subsquid Network) and realtime updates. See [starknet.subsquid](../3.datasources/4.evm_subsquid.md) page for more info on how to configure the datasource. +DipDup indexes for Starknet use [Subsquid Network](https://docs.subsquid.io/subsquid-network/overview/) as a main source of historical data. Starknet nodes are not required for DipDup to operate, but in future updates, it will be possible to use them to get the latest data (not yet in Subsquid Network) and realtime updates. See [starknet.subsquid](../3.datasources/4.starknet_subsquid.md) page for more info on how to configure the datasource. diff --git a/docs/3.datasources/4.evm_subsquid.md b/docs/3.datasources/1.evm_subsquid.md similarity index 100% rename from docs/3.datasources/4.evm_subsquid.md rename to docs/3.datasources/1.evm_subsquid.md diff --git a/docs/3.datasources/5.http.md b/docs/3.datasources/10.http.md similarity index 100% rename from docs/3.datasources/5.http.md rename to docs/3.datasources/10.http.md diff --git a/docs/3.datasources/3.evm_node.md b/docs/3.datasources/2.evm_node.md similarity index 100% rename from docs/3.datasources/3.evm_node.md rename to docs/3.datasources/2.evm_node.md diff --git a/docs/3.datasources/1.abi_etherscan.md b/docs/3.datasources/3.abi_etherscan.md similarity index 100% rename from docs/3.datasources/1.abi_etherscan.md rename to docs/3.datasources/3.abi_etherscan.md diff --git a/docs/3.datasources/8.starknet_subsquid.md b/docs/3.datasources/4.starknet_subsquid.md similarity index 100% rename from docs/3.datasources/8.starknet_subsquid.md rename to docs/3.datasources/4.starknet_subsquid.md diff --git a/docs/3.datasources/7.starknet_node.md b/docs/3.datasources/5.starknet_node.md similarity index 100% rename from docs/3.datasources/7.starknet_node.md rename to docs/3.datasources/5.starknet_node.md diff --git a/docs/3.datasources/9.tezos_tzkt.md b/docs/3.datasources/6.tezos_tzkt.md similarity index 100% rename from docs/3.datasources/9.tezos_tzkt.md rename to docs/3.datasources/6.tezos_tzkt.md diff --git a/docs/3.datasources/10.tzip_metadata.md b/docs/3.datasources/7.tzip_metadata.md similarity index 87% rename from docs/3.datasources/10.tzip_metadata.md rename to docs/3.datasources/7.tzip_metadata.md index 11c1805db..443bc6fee 100644 --- a/docs/3.datasources/10.tzip_metadata.md +++ b/docs/3.datasources/7.tzip_metadata.md @@ -1,5 +1,5 @@ --- -title: "TZIP" +title: "TZIP metadata" description: "dipdup-metadata is a standalone companion indexer for DipDup written in Go. Configure datasource in the following way" network: "tezos" --- @@ -19,6 +19,6 @@ datasources: Then, in your hook or handler code: ```python -datasource = ctx.get_metadata_datasource('metadata') +datasource = ctx.get_tzip_metadata_datasource('metadata') token_metadata = await datasource.get_token_metadata('KT1...', '0') ``` diff --git a/docs/3.datasources/2.coinbase.md b/docs/3.datasources/8.coinbase.md similarity index 100% rename from docs/3.datasources/2.coinbase.md rename to docs/3.datasources/8.coinbase.md diff --git a/docs/3.datasources/6.ipfs.md b/docs/3.datasources/9.ipfs.md similarity index 100% rename from docs/3.datasources/6.ipfs.md rename to docs/3.datasources/9.ipfs.md diff --git a/docs/4.graphql/_dir.yml b/docs/4.graphql/_dir.yml index 593c5f696..87f484748 100644 --- a/docs/4.graphql/_dir.yml +++ b/docs/4.graphql/_dir.yml @@ -1 +1 @@ -navigation.icon: "grid" +navigation.icon: "graphql" diff --git a/docs/6.deployment/2.docker.md b/docs/5.advanced/1.docker.md similarity index 81% rename from docs/6.deployment/2.docker.md rename to docs/5.advanced/1.docker.md index b7ab4c449..22cbf2ad0 100644 --- a/docs/6.deployment/2.docker.md +++ b/docs/5.advanced/1.docker.md @@ -7,7 +7,7 @@ description: "DipDup provides prebuilt Docker images hosted on Docker Hub. You c DipDup provides prebuilt Docker images hosted on [Docker Hub](https://hub.docker.com/r/dipdup/dipdup). You can use them as is or build custom images based on them. -| link | latest tag | +| link | latest tag | | ------------------------------------------------------------------------------------------------ | ------------------------------------------------------- | | [Docker Hub](https://hub.docker.com/r/dipdup/dipdup) | `dipdup/dipdup:{{ project.dipdup_version }}` | | [GitHub Container Registry](https://github.com/dipdup-io/dipdup/pkgs/container/dipdup) | `ghcr.io/dipdup-io/dipdup:{{ project.dipdup_version }}` | @@ -40,25 +40,15 @@ docker run \ -c {{ project.package }} run ``` -## Building custom image +## Extending the base image -Start with creating .dockerignore file for your project if it's missing. - -```shell [.dockerignore] -{{ #include ../src/dipdup/projects/base/.dockerignore.j2 }} -``` - -Then copy your code and config file to the image: +In `deploy` project directory, you can find default `Dockerfile` with the following contents: ```docker [deploy/Dockerfile] {{ #include ../src/dipdup/projects/base/deploy/Dockerfile.j2 }} ``` -If you need to install additional Python dependencies, just call pip directly during the build stage: - -```docker [deploy/Dockerfile] -RUN pip install --no-cache -r requirements.txt -``` +To change the base image or install additional Python dependencies from `pyproject.toml`, uncomment the corresponding lines in the `Dockerfile`. ## Deploying with Docker Compose @@ -70,23 +60,20 @@ Here's an example `compose.yaml` file: Environment variables are expanded in the DipDup config file; PostgreSQL password and Hasura secret are forwarded from host environment in this example. -You can create a separate `dipdup..yaml` file for this stack to apply environment-specific config overrides: +`configs/dipdup.compose.yaml` file contains environment-specific config overrides. ```yaml [configs/dipdup.compose.yaml] {{ #include ../src/dipdup/projects/base/configs/dipdup.compose.yaml.j2 }} ``` -Then modify command in `compose.yaml`: +Note the command string in `compose.yaml`: ```yaml [deploy/compose.yaml] services: dipdup: - command: ["dipdup", "-c", "dipdup.yaml", "-c", "dipdup.prod.yaml", "run"] - ... + command: ["dipdup", "-c", ".", "-c", "configs/dipdup.compose.yaml", "run"] ``` -Note the hostnames (resolved in the docker network) and environment variables (expanded by DipDup). - Build and run the containers: ```shell [Terminal] diff --git a/docs/5.advanced/1.reindexing.md b/docs/5.advanced/1.reindexing.md deleted file mode 100644 index 741964f5f..000000000 --- a/docs/5.advanced/1.reindexing.md +++ /dev/null @@ -1,36 +0,0 @@ ---- -title: "Reindexing" -description: "In some cases, DipDup can't proceed with indexing without a full wipe. Several reasons trigger reindexing:" ---- - -# Reindexing - -In some cases, DipDup can't proceed with indexing without a full wipe. Several reasons trigger reindexing: - -| reason | description | -| ----------------- | ------------------------------------------------------------------------------------------------------------------------------ | -| `manual` | Reindexing triggered manually from callback with `ctx.reindex`. | -| `migration` | Applied migration requires reindexing. Check release notes before switching between major DipDup versions to be prepared. | -| `rollback` | Reorg message received from datasource and can not be processed. | -| `config_modified` | One of the index configs has been modified. | -| `schema_modified` | Database schema has been modified. Try to avoid manual schema modifications in favor of [SQL scripts](../5.advanced/3.sql.md). | - -It is possible to configure desirable action on reindexing triggered by a specific reason. - -| action | description | -| --------------------- | -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | -| `exception` (default) | Raise `ReindexingRequiredError` and quit with error code. The safest option since you can trigger reindexing accidentally, e.g., by a typo in config. Don't forget to set up the correct restart policy when using it with containers. | -| `wipe` | Drop the whole database and start indexing from scratch. Be careful with this option! | -| `ignore` | Ignore the event and continue indexing as usual. It can lead to unexpected side-effects up to data corruption; make sure you know what you are doing. | - -To configure actions for each reason, add the following section to the DipDup config: - -```yaml -advanced: - reindex: - manual: wipe - migration: exception - rollback: ignore - config_modified: exception - schema_modified: exception -``` diff --git a/docs/5.advanced/4.metadata-interface.md b/docs/5.advanced/11.metadata-interface.md similarity index 96% rename from docs/5.advanced/4.metadata-interface.md rename to docs/5.advanced/11.metadata-interface.md index e2aeec4be..746b6177b 100644 --- a/docs/5.advanced/4.metadata-interface.md +++ b/docs/5.advanced/11.metadata-interface.md @@ -1,14 +1,11 @@ --- title: "Metadata interface" description: "When issuing a token on Tezos blockchain, there is an important yet not enough covered aspect related to how various ecosystem applications (wallets, explorers, marketplaces, and others) will display and interact with it." +network: "tezos" --- # Metadata interface -::banner{type="warning"} -This page currently describes the metadata interface only for Tezos blockchain. For EVM-based chains the process is roughly the same. We will add the EVM-specific details later. -:: - When issuing a token on Tezos blockchain, there is an important yet not enough covered aspect related to how various ecosystem applications (wallets, explorers, marketplaces, and others) will display and interact with it. It's about token metadata, stored wholly or partially on-chain but intended for off-chain use only. ## Token metadata standards @@ -124,4 +121,4 @@ TzKT can be configured to subscribe to one or multiple DipDup metadata sources, * Tezos Domains metadata indexer [GitHub](https://github.com/dipdup-io/tezos-domains) | [Playground](https://play.dipdup.io/) * Ubisoft Quartz metadata indexer [GitHub](https://github.com/dipdup-io/quartz-metadata) | [Playground](https://play.dipdup.io/) -![TzKT token metadata flow](../assets/metadata_interface.svg) +![TzKT token metadata flow](../../public/metadata_interface.svg) diff --git a/docs/5.advanced/2.environment-variables.md b/docs/5.advanced/2.environment-variables.md new file mode 100644 index 000000000..8060e6c91 --- /dev/null +++ b/docs/5.advanced/2.environment-variables.md @@ -0,0 +1,26 @@ +--- +title: "Environment variables" +description: "Feature flags allow users to modify parameters that affect the behavior of the whole framework. Choosing the right combination of flags for an indexer project can improve performance, reduce RAM consumption, or enable useful features." +--- + +# Environment variables + +Feature flags allow users to modify parameters that affect the behavior of the whole framework. Choosing the right combination of flags for an indexer project can improve performance, reduce RAM consumption, or enable useful features. + +DipDup uses multiple environment variables internally. They read once on process start and usually do not change during runtime. You can either set variables in active shell or create an env file and pass it with `-e` CLI option. See [Config](../1.getting-started/3.config.md#environment-variables) for more details. + +| env variable | description | +| ------------------------- | ------------------------------------------------------------------------------------ | +| `DIPDUP_CI` | Running in GitHub Actions | +| `DIPDUP_DEBUG` | Enable debug logging and additional checks | +| `DIPDUP_DOCKER` | Running in Docker | +| `DIPDUP_JSON_LOG` | Log in JSON format | +| `DIPDUP_LOW_MEMORY` | Reduce the size of caches and buffers for low-memory environments (target is 256 MB) | +| `DIPDUP_NEXT` | Enable experimental features from the next major release that require schema changes | +| `DIPDUP_NO_SYMLINK` | Don't create magic symlink in the package root even when used as cwd | +| `DIPDUP_NO_VERSION_CHECK` | Disable warning about running unstable or out-of-date DipDup version | +| `DIPDUP_PACKAGE_PATH` | Disable package discovery and use the specified path | +| `DIPDUP_REPLAY_PATH` | Path to datasource replay files; used in tests (dev only) | +| `DIPDUP_TEST` | Running in pytest | + +You can also access these values as `dipdup.env` module attributes. diff --git a/docs/5.advanced/2.feature-flags.md b/docs/5.advanced/2.feature-flags.md deleted file mode 100644 index ed5de4f16..000000000 --- a/docs/5.advanced/2.feature-flags.md +++ /dev/null @@ -1,39 +0,0 @@ ---- -title: "Feature flags" -description: "Feature flags set in the `advanced` config section allow users to modify parameters that affect the behavior of the whole framework." ---- - -# Feature flags - -Feature flags allow users to modify parameters that affect the behavior of the whole framework. Choosing the right combination of flags for an indexer project can improve performance, reduce RAM consumption, or enable useful features. - -## Project config - -Flags related to the project are set in the `advanced` section of the config (most likely in `dipdup.yaml`). - -| flag | description | -| -------------------- | ---------------------------------------------------------------------------------------------------------------------- | -| `early_realtime` | Establish realtime connection and start collecting messages while sync is in progress (faster, but consumes more RAM). | -| `decimal_precision` | Overwrite precision if it's not guessed correctly based on project models. | -| `postpone_jobs` | Do not start job scheduler until all indexes reach the realtime state. | -| `skip_version_check` | Disable warning about running unstable or out-of-date DipDup version. | -| `rollback_depth` | A number of levels to keep for rollback. | -| `unsafe_sqlite` | Disable journaling and data integrity checks. Use only for testing. | - -## Environment variables - -DipDup uses multiple environment variables internally. They read once on process start and usually do not change during runtime. You can access them in the `dipdup.env` module to perform additional checks or modify the behavior of your project. - -| env variable | module path | description | -| ------------------------- | ----------------------------- | ------------------------------------------------------------------------------------ | -| `DIPDUP_CI` | `dipdup.env.CI` | Running in GitHub Actions | -| `DIPDUP_DEBUG` | `dipdup.env.DEBUG` | Enable debug logging and additional checks | -| `DIPDUP_DOCKER` | `dipdup.env.DOCKER` | Running in Docker | -| `DIPDUP_JSON_LOG` | `dipdup.env.JSON_LOG` | Log in JSON format | -| `DIPDUP_LOW_MEMORY` | `dipdup.env.LOW_MEMORY` | Reduce the size of caches and buffers for low-memory environments (target is 256 MB) | -| `DIPDUP_NEXT` | `dipdup.env.NEXT` | Enable experimental features from the next major release that require schema changes | -| `DIPDUP_NO_VERSION_CHECK` | `dipdup.env.NO_VERSION_CHECK` | Disable warning about running unstable or out-of-date DipDup version | -| `DIPDUP_NO_SYMLINK` | `dipdup.env.NO_SYMLINK` | Don't create magic symlink in the package root even when used as cwd | -| `DIPDUP_PACKAGE_PATH` | `dipdup.env.PACKAGE_PATH` | Disable package discovery and use the specified path | -| `DIPDUP_REPLAY_PATH` | `dipdup.env.REPLAY_PATH` | Path to datasource replay files; used in tests | -| `DIPDUP_TEST` | `dipdup.env.TEST` | Running in pytest | diff --git a/docs/5.advanced/3.monitoring.md b/docs/5.advanced/3.monitoring.md new file mode 100644 index 000000000..8a6bf28f0 --- /dev/null +++ b/docs/5.advanced/3.monitoring.md @@ -0,0 +1,123 @@ +--- +title: "Monitoring" +description: "To perform up-to-date and freshness checks, DipDup provides a standard REST endpoint you can use together with Betteruptime or similar services that can search for a keyword in the response." +--- + + + +# Monitoring + +It's important to configure your monitoring to know that your indexer is working properly. DipDup provides multiple ways to monitor your indexer: + +- Prometheus integration +- Internal database tables and views +- Monitoring API endpoint +- Sentry integration + +## Monitoring endpoint + +You can use API endpoint to get the various indexing stats. First, enable internal API in config: + +```yaml +api: {} +``` + +By default, DipDup will listen on `0.0.0.0:46339`. Query the `/performance` endpoint: + +```sh +curl http://0.0.0.0:46339/performance +``` + +## Internal tables + +DipDup uses a set of internal tables to keep track of indexing stats. See the [Internal tables](../1.getting-started/5.database.md#internal-tables) page for details. + +Additionally, metrics returned by the monitoring endpoint are stored in the `dipdup_meta` table in JSON format. You can use `dipdup_status` database view to get a nice summary. + +Via SQL: + +```sh +sqlite> select * from dipdup_status; +type name level size updated_at +---------- ------------------------- -------- ------ -------------------------------- +index eth_usdt_events 8211425 0 2024-09-09 20:33:07.482867+00:00 +datasource subsquid 20714000 0 2024-09-09 20:22:38.486122+00:00 +queue eth_usdt_events:realtime 0 0 2024-09-09 20:33:04.493736+00:00 +queue eth_usdt_events:readahead 0 9171 2024-09-09 20:33:04.493736+00:00 +cache model:Holder 0 262144 2024-09-09 20:33:04.493736+00:00 +``` + +Via Hasura: + +```sh +curl http://0.0.0.0:8000/api/rest/dipdup_head_status?name=eth_events +``` + +You can also create your custom alert endpoints using SQL views and functions; see the [SQL scripts](../1.getting-started/5.database.md#sql-scripts) page for details. + +## Sentry + +Sentry is an error tracking software that can be used either as a service or on-premise. It dramatically improves the troubleshooting experience and requires nearly zero configuration. To start catching exceptions with Sentry in your project, add the following section in `dipdup.yaml` config: + +```yaml [dipdup.yaml] +sentry: + dsn: https://example.com + environment: dev + debug: False +``` + +You can obtain Sentry DSN from the web interface at _Settings -> Projects -> -> Client Keys (DSN)_. The cool thing is that if you catch an exception and suspect there's a bug in DipDup, you can share this event with us using a public link (created at _Share_ menu). + +## Prometheus + +DipDup provides integration with the Prometheus monitoring system. To enable the integration and listen on `0.0.0.0:9000`, add the following section to the config: + +```yaml +prometheus: {} +``` + +The following metrics are exposed under `dipdup` namespace: + +| metric name | description | +| ---------------------------------------------- | ---------------------------------------------- | +| `dipdup_indexes_total` | Number of indexes in operation by status | +| `dipdup_index_total_sync_duration_seconds` | Duration of the last synchronization | +| `dipdup_index_total_realtime_duration_seconds` | Duration of the last realtime queue processing | +| `dipdup_index_levels_to_sync_total` | Number of levels to reach synced state | +| `dipdup_index_levels_to_realtime_total` | Number of levels to reach realtime state | +| `dipdup_index_handlers_matched_total` | Index total hits | +| `dipdup_datasource_head_updated_timestamp` | Timestamp of the last head update | +| `dipdup_datasource_rollbacks_total` | Number of rollbacks | +| `dipdup_http_errors_total` | Number of HTTP errors | + +Also, DipDup exposes the following metrics for compatibility with [Subsquid Cloud](https://app.subsquid.io/): + +| metric name | description | +| ------------------------------------------ | --------------------------------------------------------- | +| `sqd_processor_last_block` | Level of the last processed block from Subsquid Network | +| `sqd_processor_chain_height` | Current chain height as reported by Subsquid Network | +| `sqd_processor_archive_http_errors_in_row` | Number of consecutive failed requests to Subsquid Network | + +## Logging + +To control the number of logs DipDup produces, set the `logging` field in config. It can be either a string or a mapping from logger names to logging levels. + +```yaml [dipdup.yaml] +# Configure dipdup and package loggers +logging: WARN + +# Increase verbosity of some loggers +logging: + dipdup.database: DEBUG + aiosqlite: DEBUG + +# Enable ALL logs +logging: + '': DEBUG +``` + +By default only `dipdup` and `dipdup_indexer` namespace loggers are configured. DipDup loggers are usually named after the corresponding module, e.g., `dipdup.database` or `dipdup.index`. + +Keep in mind, that excessive logging can affect indexing performance. + +If you need your logs in JSON format, use `DIPDUP_JSON_LOG=1` environment variable. diff --git a/docs/5.advanced/3.sql.md b/docs/5.advanced/3.sql.md deleted file mode 100644 index e33a62dac..000000000 --- a/docs/5.advanced/3.sql.md +++ /dev/null @@ -1,65 +0,0 @@ ---- -title: "Advanced SQL" -description: "Put your *.sql scripts to dipdup_indexer/sql. You can run these scripts from any callback with ctx.execute_sql('name'). If name is a directory, each script it contains will be executed." ---- - -# Advanced SQL - -## Internal tables - -Several tables having `dipdup_` prefix are created by DipDup automatically and are not intended to be modified by the user. However, they can be useful for external monitoring and debugging. - -| table | description | -|:-------------------------- |:----------------------------------------------------------------------------------------------------------------------------------------- | -| `dipdup_schema` | Information about database schema in use including hash to detect changes that require reindexing. | -| `dipdup_head` | The latest block received by index datasources in realtime state. Indicates that underlying datasource is ok. | -| `dipdup_index` | Everything about specific indexes from config: status, current level, template and its values if applicable. | -| `dipdup_contract` | Info about contracts used by all indexes, including ones added in runtime. | -| `dipdup_model_update` | Service table to store model diffs for database rollback. Configured by `advanced.rollback_depth` | -| `dipdup_meta` | Arbitrary key-value storage for DipDup internal use. Survives reindexing. You can use it too, but don't touch keys with `dipdup_` prefix. | -| `dipdup_contract_metadata` | See [Metadata interface](4.metadata-interface.md) | -| `dipdup_token_metadata` | See [Metadata interface](4.metadata-interface.md) | - -See [`dipdup.models` module](https://github.com/dipdup-io/dipdup/blob/next/src/dipdup/models/__init__.py) for exact table definitions. - -If you want to use these tables in monitoring, here are some SQL snippets to help you get started: - -```sql --- Time since last block received by index datasources -SELECT name, NOW() - timestamp FROM dipdup_head; - --- Index statuses -SELECT name, status FROM dipdup_index; - --- Get last reindex time -SELECT created_at FROM dipdup_schema WHERE name = 'public'; -``` - -## Scripts - -Put your `*.sql` scripts to `{{ project.package }}/sql`. You can run these scripts from any callback with `ctx.execute_sql('name')`. If `name` is a directory, each script it contains will be executed. - -Scripts are executed without being wrapped with SQL transactions. It's generally a good idea to avoid touching table data in scripts. - -By default, an empty `sql/` directory is generated for every hook in config during init. Remove `ctx.execute_sql` call from hook callback to avoid executing them. - -```python -# Execute all scripts in sql/my_hook directory -await ctx.execute_sql('my_hook') - -# Execute a single script -await ctx.execute_sql('my_hook/my_script.sql') -``` - -## Managing schema - -When using PostgreSQL or Timescale as database engine you can use `dipdup_approve` and `dipdup_wipe` functions to manage schema state from SQL console if needed: - -```sql -SELECT dipdup_approve('public'); - --- WARNING: This action is irreversible! All indexed data will be lost! -SELECT dipdup_wipe('public'); -``` - -Please note that `dipdup_wipe` function doesn't support preserving immune tables. diff --git a/docs/5.advanced/5.performance.md b/docs/5.advanced/4.performance.md similarity index 100% rename from docs/5.advanced/5.performance.md rename to docs/5.advanced/4.performance.md diff --git a/docs/6.deployment/6.backups.md b/docs/5.advanced/5.backups.md similarity index 100% rename from docs/6.deployment/6.backups.md rename to docs/5.advanced/5.backups.md diff --git a/docs/5.advanced/6.sqd-cloud.md b/docs/5.advanced/6.sqd-cloud.md new file mode 100644 index 000000000..97c99047a --- /dev/null +++ b/docs/5.advanced/6.sqd-cloud.md @@ -0,0 +1,65 @@ +--- +title: "Squid Cloud" +description: "Deploy DipDup indexer to Subsquid Cloud" +--- + +# Deploying to Subsquid Cloud + +To deploy DipDup indexer to Subsquid Cloud, you need to create two files, `squid.yaml` project manifest for `sqd` tool and a separate config `configs/dipdup.squid-cloud.yaml`. + +```yaml [squid.yaml] +manifestVersion: subsquid.io/v0.1 +name: {{ project.package }} +version: 1 +description: {{ project.description }} +build: +deploy: + env: + HASURA_GRAPHQL_ADMIN_SECRET: "${{ secrets.HASURA_SECRET }}" + HASURA_GRAPHQL_UNAUTHORIZED_ROLE: user + HASURA_GRAPHQL_STRINGIFY_NUMERIC_TYPES: "true" + # SENTRY_DSN: "${{ secrets.SENTRY_DSN }}" + # NODE_API_KEY: "${{ secrets.NODE_API_KEY }}" + # + # Only for free tier: + DIPDUP_LOW_MEMORY: "1" + addons: + postgres: + hasura: + processor: + cmd: ["dipdup", "-c", "dipdup.yaml", "-c", "configs/dipdup.squid-cloud.yaml", "run"] + init: + cmd: ["echo", "dipdup"] +``` + +```yaml [configs/dipdup.squid-cloud.yaml] +database: + kind: postgres + host: ${DB_HOST:-db} + port: ${DB_PORT} + user: ${DB_USER:-dipdup} + password: ${DB_PASS} + database: ${DB_NAME:-dipdup} + +hasura: + url: http://${HASURA_HOST:-hasura}:8080 + admin_secret: ${HASURA_GRAPHQL_ADMIN_SECRET} + allow_aggregations: ${HASURA_ALLOW_AGGREGATIONS:-true} + select_limit: ${HASURA_SELECT_LIMIT:-10000} + camel_case: ${HASURA_CAMEL_CASE:-true} + +sentry: + dsn: ${SENTRY_DSN:-''} + environment: ${SENTRY_ENVIRONMENT:-''} + +prometheus: + host: 0.0.0.0 + port: 3000 + +api: + host: 0.0.0.0 +``` + +Pay attention to paths and environment variables. Run `dipdup init` to create default env file in `deploy` directory. + +Proceed to [Deployment workflow](https://docs.sqd.dev/cloud/overview) guide in Subsquid docs skipping the "Edit the squid.yaml file" section. diff --git a/docs/6.deployment/3.sentry.md b/docs/6.deployment/3.sentry.md deleted file mode 100644 index 6fabeb85d..000000000 --- a/docs/6.deployment/3.sentry.md +++ /dev/null @@ -1,17 +0,0 @@ ---- -title: "Sentry" -description: "Sentry is an error tracking software that can be used either as a service or on-premise. It dramatically improves the troubleshooting experience and requires nearly zero configuration." ---- - -# Sentry integration - -Sentry is an error tracking software that can be used either as a service or on-premise. It dramatically improves the troubleshooting experience and requires nearly zero configuration. To start catching exceptions with Sentry in your project, add the following section in `dipdup.yaml` config: - -```yaml [dipdup.yaml] -sentry: - dsn: https://... - environment: dev - debug: False -``` - -You can obtain Sentry DSN from the web interface at _Settings -> Projects -> -> Client Keys (DSN)_. The cool thing is that if you catch an exception and suspect there's a bug in DipDup, you can share this event with us using a public link (created at _Share_ menu). diff --git a/docs/6.deployment/4.prometheus.md b/docs/6.deployment/4.prometheus.md deleted file mode 100644 index 382d3a25a..000000000 --- a/docs/6.deployment/4.prometheus.md +++ /dev/null @@ -1,36 +0,0 @@ ---- -title: "Prometheus" -description: "DipDup provides basic integration with the Prometheus monitoring system by exposing some metrics." ---- - -# Prometheus integration - -DipDup provides basic integration with the Prometheus monitoring system by exposing some metrics. - -When running DipDup in Docker make sure that the Prometheus instance is in the same network. - -## Available metrics - -The following metrics are exposed under `dipdup` namespace: - -| metric name | description | -| ---------------------------------------------- | ---------------------------------------------- | -| `dipdup_indexes_total` | Number of indexes in operation by status | -| `dipdup_index_total_sync_duration_seconds` | Duration of the last synchronization | -| `dipdup_index_total_realtime_duration_seconds` | Duration of the last realtime queue processing | -| `dipdup_index_levels_to_sync_total` | Number of levels to reach synced state | -| `dipdup_index_levels_to_realtime_total` | Number of levels to reach realtime state | -| `dipdup_index_handlers_matched_total` | Index total hits | -| `dipdup_datasource_head_updated_timestamp` | Timestamp of the last head update | -| `dipdup_datasource_rollbacks_total` | Number of rollbacks | -| `dipdup_http_errors_total` | Number of HTTP errors | - -Also, DipDup exposes the following metrics for compatibility with Subsquid Cloud: - -| metric name | description | -| ------------------------------------------ | --------------------------------------------------------- | -| `sqd_processor_last_block` | Level of the last processed block from Subsquid Network | -| `sqd_processor_chain_height` | Current chain height as reported by Subsquid Network | -| `sqd_processor_archive_http_errors_in_row` | Number of consecutive failed requests to Subsquid Network | - -If you need more complex metrics, consider querying [Internal tables](../5.advanced/3.sql.md#internal-tables). diff --git a/docs/6.deployment/5.logging.md b/docs/6.deployment/5.logging.md deleted file mode 100644 index 5b8408a9b..000000000 --- a/docs/6.deployment/5.logging.md +++ /dev/null @@ -1,26 +0,0 @@ ---- -title: "Logging" -description: "To control the number of logs DipDup produces, set the `logging` field in config. It can be either a string or a mapping from logger names to logging levels." ---- - -# Logging - -To control the number of logs DipDup produces, set the `logging` field in config. It can be either a string or a mapping from logger names to logging levels. - -```yaml [dipdup.yaml] -# Configure dipdup and package loggers -logging: WARN - -# Increase verbosity of some loggers -logging: - dipdup.database: DEBUG - aiosqlite: DEBUG - -# Enable ALL logs -logging: - '': DEBUG -``` - -By default only `dipdup` and `{{ project.package }}` namespace loggers are configured. DipDup loggers are usually named after the corresponding module, e.g., `dipdup.database` or `dipdup.index`. - -Keep in mind, that excessive logging can affect indexing performance. diff --git a/docs/6.deployment/7.monitoring.md b/docs/6.deployment/7.monitoring.md deleted file mode 100644 index 452e7ec97..000000000 --- a/docs/6.deployment/7.monitoring.md +++ /dev/null @@ -1,63 +0,0 @@ ---- -title: "Monitoring" -description: "To perform up-to-date and freshness checks, DipDup provides a standard REST endpoint you can use together with Betteruptime or similar services that can search for a keyword in the response." ---- - -# Monitoring - -To perform up-to-date and freshness checks, DipDup provides a standard REST endpoint you can use together with Betteruptime or similar services that can search for a keyword in the response. - -This check says that DipDup is not stuck and keeps receiving new data (the last known block timestamp is not older than **three minutes** from now). Note that this is not enough to ensure everything works as expected. But it can at least cover the cases when datasource API is down or your indexer has crashed. - -## URI format - -```text -https:///api/rest/dipdup_head_status?name= -``` - -If you have camel case enabled in the Hasura config: - -```text -https:///api/rest/dipdupHeadStatus?name= -``` - -For example: - -* [https://domains.dipdup.net/api/rest/dipdup_head_status?name=mainnet](https://domains.dipdup.net/api/rest/dipdup_head_status?name=mainnet) -* [https://juster.dipdup.net/api/rest/dipdupHeadStatus?name=mainnet](https://domains.dipdup.net/api/rest/dipdup_head_status?name=mainnet) - -### Response - -If the (latest block) head subscription state was updated less than **three minutes** ago, everything is **OK**: - -```json -{ - "dipdup_head_status": [ - { - "status": "OK" - } - ] -} -``` - -Otherwise, the state is considered **OUTDATED**: - -```json -{ - "dipdup_head_status": [ - { - "status": "OUTDATED" - } - ] -} -``` - -### Custom checks - -The default check looks like the following: - -```sql -{{ #include ../src/dipdup/sql/dipdup_head_status.sql }} -``` - -You can also create your custom alert endpoints using SQL views and functions and then convert them to Hasura REST endpoints. diff --git a/docs/6.deployment/_dir.yml b/docs/6.deployment/_dir.yml deleted file mode 100644 index 1d6b256a4..000000000 --- a/docs/6.deployment/_dir.yml +++ /dev/null @@ -1 +0,0 @@ -navigation.icon: "upload" diff --git a/docs/7.references/1.cli.md b/docs/7.references/1.cli.md index f17c2eab1..fb320104b 100644 --- a/docs/7.references/1.cli.md +++ b/docs/7.references/1.cli.md @@ -47,6 +47,12 @@ description: "Command-line interface reference"

A path to .env file containing KEY=value strings.

+
+
+-C <NAME>
+

A shorthand for -c . -c configs/dipdup.<name>.yaml

+
+

Environment variables

@@ -454,6 +460,12 @@ Discord: +
+-e, --editable
+

Install DipDup in editable mode.

+
+ diff --git a/docs/7.references/2.config.md b/docs/7.references/2.config.md index 0d93208a9..4cab7c06e 100644 --- a/docs/7.references/2.config.md +++ b/docs/7.references/2.config.md @@ -15,7 +15,7 @@ description: "Config file reference"
Parameters:
    -
  • spec_version (Annotated[str | float, BeforeValidator(func=~dipdup.config.<lambda>)]) – Version of config specification, currently always 3.0

  • +
  • spec_version (ToStr) – Version of config specification, currently always 3.0

  • package (str) – Name of indexer’s Python package, existing or not

  • datasources (dict[str, CoinbaseDatasourceConfig | AbiEtherscanDatasourceConfig | HttpDatasourceConfig | IpfsDatasourceConfig | EvmSubsquidDatasourceConfig | EvmNodeDatasourceConfig | TzipMetadataDatasourceConfig | TezosTzktDatasourceConfig | StarknetSubsquidDatasourceConfig | StarknetNodeDatasourceConfig]) – Mapping of datasource aliases and datasource configs

  • database (SqliteDatabaseConfig | PostgresDatabaseConfig) – Database config

  • @@ -70,7 +70,7 @@ description: "Config file reference" ## dipdup.config.AdvancedConfig -class dipdup.config.AdvancedConfig(reindex=None, scheduler=None, postpone_jobs=False, early_realtime=False, skip_version_check=False, rollback_depth=None, decimal_precision=None, unsafe_sqlite=False, alt_operation_matcher=False, **kwargs) +class dipdup.config.AdvancedConfig(reindex=None, scheduler=None, postpone_jobs=False, early_realtime=False, rollback_depth=None, decimal_precision=None, unsafe_sqlite=False, alt_operation_matcher=False, **kwargs)

    This section allows users to tune some system-wide options, either experimental or unsuitable for generic configurations.

    Parameters:
    @@ -79,7 +79,6 @@ description: "Config file reference"
  • scheduler (dict[str, Any] | None) – apscheduler scheduler config.

  • postpone_jobs (bool) – Do not start job scheduler until all indexes reach the realtime state.

  • early_realtime (bool) – Establish realtime connection and start collecting messages while sync is in progress (faster, but consumes more RAM).

  • -
  • skip_version_check (bool) – Disable warning about running unstable or out-of-date DipDup version.

  • rollback_depth (int | None) – A number of levels to keep for rollback.

  • decimal_precision (int | None) – Overwrite precision if it’s not guessed correctly based on project models.

  • unsafe_sqlite (bool) – Disable journaling and data integrity checks. Use only for testing.

  • @@ -171,8 +170,8 @@ description: "Config file reference"
    Parameters:
    • kind (Literal['evm']) – Always evm

    • -
    • address (Annotated[str, BeforeValidator(func=~dipdup.config.<lambda>), AfterValidator(func=~dipdup.config.evm._validate_evm_address)] | None) – Contract address

    • -
    • abi (Annotated[str, BeforeValidator(func=~dipdup.config.<lambda>), AfterValidator(func=~dipdup.config.evm._validate_evm_address)] | None) – Contract ABI

    • +
    • address (EvmAddress | None) – Contract address

    • +
    • abi (EvmAddress | None) – Contract ABI

    • typename (str | None) – Alias for the contract script

    @@ -190,8 +189,8 @@ description: "Config file reference"
    Parameters:
    • kind (Literal['evm.node']) – Always ‘evm.node’

    • -
    • url (Annotated[str, BeforeValidator(func=~dipdup.config.<lambda>)]) – EVM node URL

    • -
    • ws_url (Annotated[str, BeforeValidator(func=~dipdup.config.<lambda>)] | None) – EVM node WebSocket URL

    • +
    • url (Url) – EVM node URL

    • +
    • ws_url (WsUrl | None) – EVM node WebSocket URL

    • http (HttpConfig | None) – HTTP client configuration

    • rollback_depth (int) – A number of blocks to store in database for rollback

    • @@ -248,7 +247,7 @@ description: "Config file reference"
      Parameters:
      • kind (Literal['evm.subsquid']) – always ‘evm.subsquid’

      • -
      • url (Annotated[str, BeforeValidator(func=~dipdup.config.<lambda>)]) – URL of Subsquid Network API

      • +
      • url (Url) – URL of Subsquid Network API

      • http (HttpConfig | None) – HTTP client configuration

      @@ -339,7 +338,7 @@ description: "Config file reference"
      Parameters:
        -
      • url (Annotated[str, BeforeValidator(func=~dipdup.config.<lambda>)]) – URL of the Hasura instance.

      • +
      • url (Url) – URL of the Hasura instance.

      • admin_secret (str | None) – Admin secret of the Hasura instance.

      • create_source (bool) – Whether source should be added to Hasura if missing.

      • source (str) – Hasura source for DipDup to configure, others will be left untouched.

      • @@ -638,8 +637,8 @@ description: "Config file reference"
        Parameters:
        • kind (Literal['tezos']) – Always tezos

        • -
        • address (Annotated[str, AfterValidator(func=~dipdup.config.tezos._validate_tezos_address)] | None) – Contract address

        • -
        • code_hash (int | Annotated[str, AfterValidator(func=~dipdup.config.tezos._validate_tezos_address)] | None) – Contract code hash or address to fetch it from

        • +
        • address (TezosAddress | None) – Contract address

        • +
        • code_hash (int | TezosAddress | None) – Contract code hash or address to fetch it from

        • typename (str | None) – Alias for the contract script

        @@ -971,7 +970,7 @@ description: "Config file reference"
        Parameters:
        • kind (Literal['tezos.tzkt']) – always ‘tezos.tzkt’

        • -
        • url (Annotated[str, BeforeValidator(func=~dipdup.config.<lambda>)]) – Base API URL, e.g. https://api.tzkt.io/

        • +
        • url (Url) – Base API URL, e.g. https://api.tzkt.io/

        • http (HttpConfig | None) – HTTP client configuration

        • buffer_size (int) – Number of levels to keep in FIFO buffer before processing

        • merge_subscriptions (bool) – Whether to merge realtime subscriptions

        • @@ -1071,8 +1070,8 @@ description: "Config file reference"
          Parameters:
          • kind (Literal['starknet']) – Always starknet

          • -
          • address (Annotated[str, BeforeValidator(func=~dipdup.config.<lambda>), AfterValidator(func=~dipdup.config.starknet._validate_starknet_address)] | None) – Contract address

          • -
          • abi (Annotated[str, BeforeValidator(func=~dipdup.config.<lambda>), AfterValidator(func=~dipdup.config.starknet._validate_starknet_address)] | None) – Contract ABI

          • +
          • address (StarknetAddress | None) – Contract address

          • +
          • abi (StarknetAddress | None) – Contract ABI

          • typename (str | None) – Alias for the contract script

          @@ -1147,8 +1146,8 @@ description: "Config file reference"
          Parameters:
          • kind (Literal['starknet.node']) – Always ‘starknet.node’

          • -
          • url (Annotated[str, BeforeValidator(func=~dipdup.config.<lambda>)]) – Starknet node URL

          • -
          • ws_url (Annotated[str, BeforeValidator(func=~dipdup.config.<lambda>)] | None) – Starknet node WebSocket URL

          • +
          • url (Url) – Starknet node URL

          • +
          • ws_url (WsUrl | None) – Starknet node WebSocket URL

          • http (HttpConfig | None) – HTTP client configuration

          • rollback_depth (int) – A number of blocks to store in database for rollback

          • @@ -1167,7 +1166,7 @@ description: "Config file reference"
            Parameters:
            • kind (Literal['starknet.subsquid']) – always ‘starknet.subsquid’

            • -
            • url (Annotated[str, BeforeValidator(func=~dipdup.config.<lambda>)]) – URL of Subsquid Network API

            • +
            • url (Url) – URL of Subsquid Network API

            • http (HttpConfig | None) – HTTP client configuration

            diff --git a/docs/7.references/3.context.md b/docs/7.references/3.context.md index ed15cf9fc..84ba2c739 100644 --- a/docs/7.references/3.context.md +++ b/docs/7.references/3.context.md @@ -268,9 +268,9 @@ description: "Context reference"
            -## dipdup.context.DipDupContext.get_metadata_datasource +## dipdup.context.DipDupContext.get_tzip_metadata_datasource -DipDupContext.get_metadata_datasource(name) +DipDupContext.get_tzip_metadata_datasource(name)

            Get metadata datasource by name

            Parameters:
            diff --git a/docs/8.examples/2.in-production.md b/docs/8.examples/2.in-production.md index 6a5a8f307..b5fab3618 100644 --- a/docs/8.examples/2.in-production.md +++ b/docs/8.examples/2.in-production.md @@ -23,7 +23,7 @@ Rarible is a multichain community-centric NFT marketplace, that also allows to t Ubisoft Quartz is a new platform for players to get Digits, the first NFTs playable in AAA games. Under the hood, Quartz uses Aleph as persistent token metadata storage and a non-standard token metadata signaling schema. To enable wallets and other TzKT API consumers with Quartz NFTs data we created a custom indexing plugin. -* [Metadata interface](../5.advanced/4.metadata-interface.md) +* [Metadata interface](../5.advanced/11.metadata-interface.md) ## Youves diff --git a/docs/8.examples/_demos_table.md b/docs/8.examples/_demos_table.md index 5a9f43c92..5838d123d 100644 --- a/docs/8.examples/_demos_table.md +++ b/docs/8.examples/_demos_table.md @@ -1,22 +1,22 @@ | name | network | description | source | |-|-|-|-| -| demo_blank | | Empty config for a fresh start | [link](https://github.com/dipdup-io/dipdup/tree/8.0.0b5/src/demo_blank) | -| demo_evm_events | EVM | ERC-20 token transfers (from event logs) | [link](https://github.com/dipdup-io/dipdup/tree/8.0.0b5/src/demo_evm_events) | -| demo_evm_transactions | EVM | ERC-20 token transfers (from transactions) | [link](https://github.com/dipdup-io/dipdup/tree/8.0.0b5/src/demo_evm_transactions) | -| demo_evm_uniswap | EVM | Uniswap V3 pools, positions, etc. (advanced, uses TimescaleDB) | [link](https://github.com/dipdup-io/dipdup/tree/8.0.0b5/src/demo_evm_uniswap) | -| demo_starknet_events | Starknet | ERC-20 token transfers (from events) | [link](https://github.com/dipdup-io/dipdup/tree/8.0.0b5/src/demo_starknet_events) | -| demo_tezos_auction | Tezos | NFT marketplace (TzColors) | [link](https://github.com/dipdup-io/dipdup/tree/8.0.0b5/src/demo_tezos_auction) | -| demo_tezos_big_maps | Tezos | Indexing specific big maps | [link](https://github.com/dipdup-io/dipdup/tree/8.0.0b5/src/demo_tezos_big_maps) | -| demo_tezos_dao | Tezos | DAO registry (Homebase DAO) | [link](https://github.com/dipdup-io/dipdup/tree/8.0.0b5/src/demo_tezos_dao) | -| demo_tezos_dex | Tezos | DEX balances and liquidity (Quipuswap) | [link](https://github.com/dipdup-io/dipdup/tree/8.0.0b5/src/demo_tezos_dex) | -| demo_tezos_domains | Tezos | Domain name service (Tezos Domains) | [link](https://github.com/dipdup-io/dipdup/tree/8.0.0b5/src/demo_tezos_domains) | -| demo_tezos_etherlink | Tezos | Etherlink smart rollup transactions | [link](https://github.com/dipdup-io/dipdup/tree/8.0.0b5/src/demo_tezos_etherlink) | -| demo_tezos_events | Tezos | Processing contract events | [link](https://github.com/dipdup-io/dipdup/tree/8.0.0b5/src/demo_tezos_events) | -| demo_tezos_factories | Tezos | Example of spawning indexes in runtime | [link](https://github.com/dipdup-io/dipdup/tree/8.0.0b5/src/demo_tezos_factories) | -| demo_tezos_head | Tezos | Processing head block metadata (realtime only) | [link](https://github.com/dipdup-io/dipdup/tree/8.0.0b5/src/demo_tezos_head) | -| demo_tezos_nft_marketplace | Tezos | NFT marketplace (hic at nunc) | [link](https://github.com/dipdup-io/dipdup/tree/8.0.0b5/src/demo_tezos_nft_marketplace) | -| demo_tezos_raw | Tezos | Process raw operations without filtering and typed payloads | [link](https://github.com/dipdup-io/dipdup/tree/8.0.0b5/src/demo_tezos_raw) | -| demo_tezos_token | Tezos | FA1.2 token contract operations | [link](https://github.com/dipdup-io/dipdup/tree/8.0.0b5/src/demo_tezos_token) | -| demo_tezos_token_balances | Tezos | FA1.2 token balances | [link](https://github.com/dipdup-io/dipdup/tree/8.0.0b5/src/demo_tezos_token_balances) | -| demo_tezos_token_transfers | Tezos | FA1.2 token transfers | [link](https://github.com/dipdup-io/dipdup/tree/8.0.0b5/src/demo_tezos_token_transfers) | +| demo_blank | | Empty config for a fresh start | [link](https://github.com/dipdup-io/dipdup/tree/8.0.0/src/demo_blank) | +| demo_evm_events | EVM | ERC-20 token transfers (from event logs) | [link](https://github.com/dipdup-io/dipdup/tree/8.0.0/src/demo_evm_events) | +| demo_evm_transactions | EVM | ERC-20 token transfers (from transactions) | [link](https://github.com/dipdup-io/dipdup/tree/8.0.0/src/demo_evm_transactions) | +| demo_evm_uniswap | EVM | Uniswap V3 pools, positions, etc. (advanced, uses TimescaleDB) | [link](https://github.com/dipdup-io/dipdup/tree/8.0.0/src/demo_evm_uniswap) | +| demo_starknet_events | Starknet | ERC-20 token transfers (from events) | [link](https://github.com/dipdup-io/dipdup/tree/8.0.0/src/demo_starknet_events) | +| demo_tezos_auction | Tezos | NFT marketplace (TzColors) | [link](https://github.com/dipdup-io/dipdup/tree/8.0.0/src/demo_tezos_auction) | +| demo_tezos_big_maps | Tezos | Indexing specific big maps | [link](https://github.com/dipdup-io/dipdup/tree/8.0.0/src/demo_tezos_big_maps) | +| demo_tezos_dao | Tezos | DAO registry (Homebase DAO) | [link](https://github.com/dipdup-io/dipdup/tree/8.0.0/src/demo_tezos_dao) | +| demo_tezos_dex | Tezos | DEX balances and liquidity (Quipuswap) | [link](https://github.com/dipdup-io/dipdup/tree/8.0.0/src/demo_tezos_dex) | +| demo_tezos_domains | Tezos | Domain name service (Tezos Domains) | [link](https://github.com/dipdup-io/dipdup/tree/8.0.0/src/demo_tezos_domains) | +| demo_tezos_etherlink | Tezos | Etherlink smart rollup transactions | [link](https://github.com/dipdup-io/dipdup/tree/8.0.0/src/demo_tezos_etherlink) | +| demo_tezos_events | Tezos | Processing contract events | [link](https://github.com/dipdup-io/dipdup/tree/8.0.0/src/demo_tezos_events) | +| demo_tezos_factories | Tezos | Example of spawning indexes in runtime | [link](https://github.com/dipdup-io/dipdup/tree/8.0.0/src/demo_tezos_factories) | +| demo_tezos_head | Tezos | Processing head block metadata (realtime only) | [link](https://github.com/dipdup-io/dipdup/tree/8.0.0/src/demo_tezos_head) | +| demo_tezos_nft_marketplace | Tezos | NFT marketplace (hic at nunc) | [link](https://github.com/dipdup-io/dipdup/tree/8.0.0/src/demo_tezos_nft_marketplace) | +| demo_tezos_raw | Tezos | Process raw operations without filtering and typed payloads | [link](https://github.com/dipdup-io/dipdup/tree/8.0.0/src/demo_tezos_raw) | +| demo_tezos_token | Tezos | FA1.2 token contract operations | [link](https://github.com/dipdup-io/dipdup/tree/8.0.0/src/demo_tezos_token) | +| demo_tezos_token_balances | Tezos | FA1.2 token balances | [link](https://github.com/dipdup-io/dipdup/tree/8.0.0/src/demo_tezos_token_balances) | +| demo_tezos_token_transfers | Tezos | FA1.2 token transfers | [link](https://github.com/dipdup-io/dipdup/tree/8.0.0/src/demo_tezos_token_transfers) | diff --git a/docs/9.release-notes/1.v8.0.md b/docs/9.release-notes/1.v8.0.md index bfac7890b..7836185fb 100644 --- a/docs/9.release-notes/1.v8.0.md +++ b/docs/9.release-notes/1.v8.0.md @@ -1,48 +1,44 @@ --- -title: 8.0.0b3 +title: "8.0" description: DipDup 8.0 release notes --- + + # Release Notes: 8.0 -::banner{type="warning"} -This page describes pre-release version of DipDup. API and features are subject to change. -:: +Welcome to DipDup 8.0 release notes! -Welcome to DipDup 8.0 release notes! This major release expands DipDup's functionality and improves indexing performance and developer experience. Key highlights of this release are: +This major release contains lots of new features and improvements both for existing users and newcomers. Key highlights include Starknet support, updated Python 3.12 environment, improved performance, handler batching, new CLI commands. As always, developer experience is at the top of our list, so coding with DipDup 8.0 is more enjoyable than ever. After three months in beta stage, we consider the 8.0 branch polished enough for a stable release. This article will guide you through the most significant changes and show you how to upgrade your projects. -- Starknet support 🐺 -- Updated Python 3.12 environment -- New config specification -- Useful CLI commands, tunables and other quality-of-life improvements +## Starknet support -## GM, Starknet users +GM, Starknet! 🐺 -[Starknet](https://docs.starknet.io/) is a permissionless zero-knowledge (ZK) rollup that operates on top of Ethereum, enabling dApps to achieve massive computational scale without compromising on Ethereum’s composability and security. +[Starknet](https://docs.starknet.io/) is a permissionless zero-knowledge (ZK) rollup on Ethereum, allowing developers to scale their dapps without compromising on security and composability of the Ethereum ecosystem. -We welcome Starknet to the large family of DipDup-supported networks! DipDup 8.0 introduces a new index kind `starknet.events` and a new datasource `starknet.subsquid` to work with Starknet events. Support for optional node datasources and more index kinds is coming soon. +We welcome Starknet to the large family of supported networks! DipDup 8.0 introduces a new index kind [`starknet.events`](../2.indexes/3.starknet_events.md) and new datasources [`starknet.subsquid`](../3.datasources/4.starknet_subsquid.md), [`starknet.node`](../3.datasources/5.starknet_node.md) to work with Starknet events. -Starknet contracts are written in [Cairo](https://github.com/starkware-libs/cairo), a specialized programming language. It's not EVM-compatible, but many concepts are similar. As a DipDup user, you don't need to know the difference in detail to start working with Starknet. Just add the index definition to the project config, place the contract ABI to `abi//cairo_abi.json` and DipDup will generate Python types and handler stubs for you. You can use [Starkscan](https://starkscan.co/contract/0x068f5c6a61780768455de69077e07e89787839bf8166decfbf92b645209c0fb8#class-code-history) explorer to get the ABI and other information about the contract. +Starknet smart contracts are written in [Cairo](https://github.com/starkware-libs/cairo) language. It's not EVM-compatible, but many concepts are similar. To start indexing Starknet events, you need to add a new index definition to the project config, then place the contract ABI to `abi//cairo_abi.json` file and run `dipdup init` command to generate Python types and handler stubs. You can use [Starkscan](https://starkscan.co/contract/0x068f5c6a61780768455de69077e07e89787839bf8166decfbf92b645209c0fb8#class-code-history) explorer to get the ABI and other information about the contract. We are going to add support for automatic fetching ABIs from node in the future. -To start building your indexer, run `dipdup new` and select the `demo_starknet_events` project as a template. +Follow the [Starknet Quickstart](../0.quickstart-starknet.md) guide to get started or run `dipdup new` and choose `demo_starknet_events` template. ## Updated Python 3.12 environment -One of the DipDup features is a unified Python environment across all projects to simplify deployment and maintenance. DipDup indexers now run on Python 3.12, enhancing both language features and performance. We've also updated our core dependencies to their latest versions, including: +DipDup indexers run in unified Python environment kept stable between releases to simplify deployment and maintenance. DipDup 8.0 introduces a major update including the following notable changes: -- pydantic 2.7 with significantly faster (de)serialization and powerful validators. -- tortoise-orm 0.21.2 with better Pydantic integration and a bunch of bugfixes and optimizations. -- web3 6.19 with the latest EIP and RPC changes. +- [Python 3.12](https://docs.python.org/release/3.12.0/whatsnew/3.12.html) with the latest language features and performance improvements. +- [pydantic 2.9](https://docs.pydantic.dev/dev/migration/#migration-guide) with significantly faster (de)serialization and powerful validators. +- [tortoise-orm 0.21.6](https://tortoise.github.io/CHANGELOG.html) with better Pydantic integration and a bunch of bugfixes and optimizations. +- [web3-py 7.2](https://web3py.readthedocs.io/en/stable/migration.html#migrating-v6-to-v7) with the latest EIP and RPC changes. -For more details, refer to the changelog. The update has also led to higher and more consistent indexing throughput; our tests show an average performance improvement of 10-20%. We are going to continue working on performance optimization in the future. +Make sure to visit the docs of corresponding libraries to learn about the important changes. ## New config specification DipDup 8.0 introduces an updated configuration specification for better flexibility and extensibility. Previously, every index definition was linked to a single "index datasource", which in turn could be linked to one or more complementary ones. This approach appeared to be limiting, and also confusing, since Subsquid and node RPC datasources could be used interchangeably despite the difference in indexing speed. -In the new spec version 3.0, an index can have any number of attached datasources. DipDup will choose the most suitable one for each stage of the process. For load balancing purposes, if multiple node datasources are attached, a random one will be chosen for each request. When applicable, DipDup will consider the order of datasources in the config file. - -The naming convention for index kinds has been updated to reflect these changes. They now consist of two parts: network and data type, without the datasource one. +In the new spec version 3.0, an index can have any number of attached datasources. DipDup will choose the most suitable one for each stage of the process. For load balancing purposes, if multiple node datasources are attached, a random one will be chosen for each request. When applicable, DipDup will consider the order of datasources in the config file. The naming convention for index kinds has been updated to reflect these changes. They now consist of two parts: network and data type, without the datasource one. ```yaml spec_version: 3.0 # <- was `2.0` @@ -58,20 +54,34 @@ indexes: - another_evm_node ``` +JSONSchema of DipDup config specification was uploaded to [SchemaStore](https://github.com/SchemaStore/schemastore) catalog. That means config file validation and auto-completion are available in major IDEs without additional configuration. + + + +## Handler batching + +DipDup 8.0 introduces a new `batch` handler to modify higher-level indexing logic. Examples could be skipping whole blocks by condition or recalculating some data between fixed intervals. Currently, the number of matched handlers in a single batch equals block, but the size of handler batch (and therefore database transaction) is going to be configurable in the future. + +```python [handlers/batch.py] +async def batch( + ctx: HandlerContext, + handlers: Iterable[MatchedHandler], +) -> None: + for handler in handlers: + await ctx.fire_matched_handler(handler) +``` + ## Migration from 7.5 -Existing projects require semi-automatic migration. Please follow the steps below to update your project to DipDup 8.0. +Existing projects require manual migration, but some steps are automated. Please follow the steps below to upgrade to 8.0. - -1. Make sure you have Python 3.12 installed. -2. Update the current DipDup installation. Run `dipdup self uninstall`, then `curl -Lsf https://dev.dipdup.io/install.py | python3.12 - --pre`. +1. Make sure you have Python 3.12 installed; `which python3.12` command will help you to check that. +2. Update the current DipDup installation. Run `dipdup self uninstall`, then `curl -Lsf https://dipdup.io/install.py | python3.12`. 3. Enter the project directory, but do not activate the virtual environment. Run the `dipdup migrate` command. It will update your config files and generate a new package structure. Modules requiring manual migration will be moved to `.old` path; leave them as is for now. Review and commit the changes. 4. Run `dipdup init --base --force` command to update pyproject.toml and other metadata files. Recreate and enter the virtual environment. For PDM run `rm -rf .venv pdm.lock && pdm venv create python3.12 && pdm install && $(pdm venv activate)`. For Poetry run `rm -rf .venv poetry.lock && poetry install && poetry shell`. Review and commit the changes. -5. Move the callback function bodies from `.old` to `` files. Review and commit the changes. -6. Run `make all` to check if everything works as expected. Fix any errors and commit the changes. +5. Move the callback function bodies from `.old` to `` files. Run `make all` to ensure that everything works as expected. Fix arrors if any, review and commit the changes for the last time. - -DipDup 7.5 release is going to be supported for 6 months after the stable release of 8.0. During this period, we will provide bug fixes and security updates. +DipDup 7.5 release is going to be supported for several months after the stable release of 8.0. During this period, we will provide bug fixes and security updates, but no new features will be added. End-of-Life date is going to be announced in advance. {{ #include 9.release-notes/_8.0_changelog.md }} {{ #include 9.release-notes/_footer.md }} diff --git a/docs/9.release-notes/2.v7.5.md b/docs/9.release-notes/2.v7.5.md index 821b1b01a..f3b8faaa2 100644 --- a/docs/9.release-notes/2.v7.5.md +++ b/docs/9.release-notes/2.v7.5.md @@ -1,5 +1,5 @@ --- -title: 7.5.0 +title: "7.5" description: DipDup 7.5 release notes --- diff --git a/docs/9.release-notes/3.v7.4.md b/docs/9.release-notes/3.v7.4.md index f51322524..5f83bf29e 100644 --- a/docs/9.release-notes/3.v7.4.md +++ b/docs/9.release-notes/3.v7.4.md @@ -1,5 +1,5 @@ --- -title: 7.4.0 +title: "7.4" description: DipDup 7.4 release notes --- diff --git a/docs/9.release-notes/4.v7.3.md b/docs/9.release-notes/4.v7.3.md index a3d229177..1b456066d 100644 --- a/docs/9.release-notes/4.v7.3.md +++ b/docs/9.release-notes/4.v7.3.md @@ -1,5 +1,5 @@ --- -title: 7.3.0 +title: "7.3" description: DipDup 7.3 release notes --- diff --git a/docs/9.release-notes/5.v7.2.md b/docs/9.release-notes/5.v7.2.md index 7f57ce323..f70093018 100644 --- a/docs/9.release-notes/5.v7.2.md +++ b/docs/9.release-notes/5.v7.2.md @@ -1,5 +1,5 @@ --- -title: 7.2.0 +title: "7.2" description: DipDup 7.2 release notes --- diff --git a/docs/9.release-notes/6.v7.1.md b/docs/9.release-notes/6.v7.1.md index 33a1e65f1..7c98da85c 100644 --- a/docs/9.release-notes/6.v7.1.md +++ b/docs/9.release-notes/6.v7.1.md @@ -1,5 +1,5 @@ --- -title: 7.1.0 +title: "7.1" description: DipDup 7.1 release notes --- @@ -51,7 +51,7 @@ SELECT dipdup_wipe('public'); Please note that `dipdup_wipe` function doesn't support preserving immune tables. -DipDup has lots of tools to work with SQL like parameterized scripts, queries, and more. Visit the [Advanced SQL](../5.advanced/3.sql.md) page to learn more. +DipDup has lots of tools to work with SQL like parameterized scripts, queries, and more. Visit the [Advanced SQL](../1.getting-started/5.database.md) page to learn more. ## More `config env` command options @@ -68,7 +68,7 @@ services: - API_URL=${API_URL:-https://api.example.com} ``` -`--internal, -i` flag allows including internal variables (with `DIPDUP_` prefix) in the command output. All of them are optional and rarely needed, but handy for debugging. See the [Feature Flags](../5.advanced/2.feature-flags.md) page for the full list of internal variables. +`--internal, -i` flag allows including internal variables (with `DIPDUP_` prefix) in the command output. All of them are optional and rarely needed, but handy for debugging. See the [Feature Flags](../5.advanced/2.environment-variables.md) page for the full list of internal variables. ```shell [Terminal] $ dipdup config env --internal diff --git a/docs/9.release-notes/7.v7.0.md b/docs/9.release-notes/7.v7.0.md index 7c5739670..24b16f949 100644 --- a/docs/9.release-notes/7.v7.0.md +++ b/docs/9.release-notes/7.v7.0.md @@ -1,5 +1,5 @@ --- -title: 7.0.0 +title: "7.0" description: DipDup 7.0 release notes --- @@ -54,7 +54,7 @@ class User(Model): name = fields.TextField(primary_key=True) ``` -See the [Models](../1.getting-started/5.models.md) page in the docs. +See the [Models](../1.getting-started/6.models.md) page in the docs. ## Developer experience @@ -96,17 +96,17 @@ We have improved pre-fetching and caching data during indexing to increase the i Docker images are now based on Debian 12. They are simple, secure and easy to extend - just run pip as a default user. Alpine images are no longer published due to the lack of support in one of the libraries we depend on, but migration should be seamless. -See the [Docker](../6.deployment/2.docker.md) page in the docs. +See the [Docker](../5.advanced/1.docker.md) page in the docs. ## Miscellaneous Here are some other notable changes not covered above: -- `dipdup_meta` internal table was added to the schema. You can store arbitrary JSON there and use it in your code, but don't touch records with `dipdup_` prefix. Survives reindexing. See the [Internal tables](../5.advanced/3.sql.md#internal-tables) page. -- Multiple feature flags were added for experimental and rarely used features. See the [Feature flags](../5.advanced/2.feature-flags.md) page. `metadata_interface` flag was removed, now it's always enabled. +- `dipdup_meta` internal table was added to the schema. You can store arbitrary JSON there and use it in your code, but don't touch records with `dipdup_` prefix. Survives reindexing. See the [Internal tables](../1.getting-started/5.database.md#internal-tables) page. +- Multiple feature flags were added for experimental and rarely used features. See the [Feature flags](../5.advanced/2.environment-variables.md) page. `metadata_interface` flag was removed, now it's always enabled. - We no longer accept crash reports. Enabling them required adding `crash_reporting: True` to the config. Set up your own Sentry instance; it's easy! - Saved crash- and performance reports in the home directory can be viewed with new `report ls` and `report show` commands. If you want to open an issue and share this report, just drop us a message on GitHub or Discord. -- You can use long syntax to configure logging, a mapping of logger name and loglevel. See the [Logging](../6.deployment/5.logging.md) page. +- You can use long syntax to configure logging, a mapping of logger name and loglevel. See the [Logging](../5.advanced/3.monitoring.md) page. - YAML files in templates and examples use `.yaml` extension instead of `.yml` as recommended by the YAML and Compose specs, but you can use any. - `report` command has been renamed to `report ls` for consistency with other command groups. diff --git a/docs/9.release-notes/_8.0_changelog.md b/docs/9.release-notes/_8.0_changelog.md index 4f5a9a8b7..f20cb17da 100644 --- a/docs/9.release-notes/_8.0_changelog.md +++ b/docs/9.release-notes/_8.0_changelog.md @@ -5,10 +5,12 @@ - cli: Added `--pre` flag to `self` group commands to install pre-release versions. - cli: Added `--raw` option to `config export` command to dump config preserving the original structure. +- cli: Added `-C` option, a shorthand for `-c . -c configs/dipdup..yaml`. - cli: Added `package verify` command to check the package consistency. - cli: Added full project migration support for 3.0 spec. - cli: Added spec_version 3.0 support to `migrate` command. - config: Publish JSON schemas for config validation and autocompletion. +- database: Added `dipdup_status` view to the schema. - env: Added `DIPDUP_JSON_LOG` environment variable to enable JSON logging. - env: Added `DIPDUP_LOW_MEMORY` variable to reduce the size of caches and buffers. - env: Added `DIPDUP_PACKAGE_PATH` environment variable to override discovered package path. @@ -21,7 +23,9 @@ ### Fixed - cli: Don't save reports for successful test runs. +- cli: Don't update existing installation in `self install` command unless asked to. - cli: Fixed `--pre` installer flag. +- cli: Fixed env files not being loaded in some commands. - cli: Fixed errors raised when the project package is invalid. - cli: Fixed progress estimation when there are indexes with `last_level` option set. - cli: Import some dependencies on demand to reduce memory footprint. @@ -34,10 +38,16 @@ - evm.events: Improve fetching event batches from node. - evm.subsquid: Fixed typo in `iter_events` method name. - evm: Fixed crash when contract ABI contains overloaded methods. +- install: Fixed reinstalling package when `--force` flag is used. - models: Fixed `CachedModel` preloading. - models: Fixed setting default value for `Meta.maxsize`. +- package: Create package in-place if cwd equals package name. +- performance: Add index name to fetcher and realtime queues. - performance: Fixed estimation indexing speed in levels per second. - starknet.events: Fixed filtering events by key. +- subsquid: Fixed missing entry in `dipdup_head` internal table. +- tezos.big_maps: Fixed logging status message in `skip_history` mode. +- tezos.big_maps: Respect order of handlers in `skip_history` mode. - tezos.operations: Fixed `sr_cement` operation index subscription. - yaml: Fixed indentation and formatting of generated YAML files. @@ -56,8 +66,14 @@ ### Removed +- config: Removed `advanced.skip_version_check` flag; use `DIPDUP_NO_VERSION_CHECK` environment variable. - config: `abi` index config field has been removed; add `abi.etherscan` datasource(s) to the `datasources` list instead. - config: `node_only` index config flag has been removed; add `evm.node` datasource(s) to the `datasources` list instead. +- database: Removed `dipdup_head_status` view; use `dipdup_status` view instead. + +### Performance + +- database: Set `synchronous=NORMAL` and `journal_mode=WAL` pragmas for on-disk SQLite databases. ### Other diff --git a/docs/9.release-notes/_dir.yml b/docs/9.release-notes/_dir.yml index 9972ed83c..992289d7a 100644 --- a/docs/9.release-notes/_dir.yml +++ b/docs/9.release-notes/_dir.yml @@ -1 +1 @@ -navigation.icon: "info" +navigation.icon: "idea" diff --git a/docs/_curl-spell.md b/docs/_curl-spell.md index 604edceae..d825bf09e 100644 --- a/docs/_curl-spell.md +++ b/docs/_curl-spell.md @@ -1,4 +1,4 @@ ```shell [Terminal] -curl -Lsf https://dev.dipdup.io/install.py | python3.12 - --pre +curl -Lsf https://dipdup.io/install.py | python3.12 ``` diff --git a/docs/context.rst b/docs/context.rst index e91b35e52..d0183170e 100644 --- a/docs/context.rst +++ b/docs/context.rst @@ -15,7 +15,7 @@ .. automethod:: dipdup.context.DipDupContext.get_evm_subsquid_datasource .. automethod:: dipdup.context.DipDupContext.get_http_datasource .. automethod:: dipdup.context.DipDupContext.get_ipfs_datasource -.. automethod:: dipdup.context.DipDupContext.get_metadata_datasource +.. automethod:: dipdup.context.DipDupContext.get_tzip_metadata_datasource .. automethod:: dipdup.context.DipDupContext.get_tezos_tzkt_datasource .. automethod:: dipdup.context.DipDupContext.reindex .. automethod:: dipdup.context.DipDupContext.restart diff --git a/docs/assets/dipdup-flow.png b/docs/public/dipdup-flow.png similarity index 100% rename from docs/assets/dipdup-flow.png rename to docs/public/dipdup-flow.png diff --git a/docs/assets/dipdup-flow.svg b/docs/public/dipdup-flow.svg similarity index 100% rename from docs/assets/dipdup-flow.svg rename to docs/public/dipdup-flow.svg diff --git a/docs/assets/dipdup-new.png b/docs/public/dipdup-new.png similarity index 100% rename from docs/assets/dipdup-new.png rename to docs/public/dipdup-new.png diff --git a/docs/assets/dipdup.png b/docs/public/dipdup.png similarity index 100% rename from docs/assets/dipdup.png rename to docs/public/dipdup.png diff --git a/docs/assets/dipdup.svg b/docs/public/dipdup.svg similarity index 100% rename from docs/assets/dipdup.svg rename to docs/public/dipdup.svg diff --git a/docs/assets/metadata_interface.svg b/docs/public/metadata_interface.svg similarity index 100% rename from docs/assets/metadata_interface.svg rename to docs/public/metadata_interface.svg diff --git a/docs/assets/operation-bcd.png b/docs/public/operation-bcd.png similarity index 100% rename from docs/assets/operation-bcd.png rename to docs/public/operation-bcd.png diff --git a/docs/assets/operation-config.png b/docs/public/operation-config.png similarity index 100% rename from docs/assets/operation-config.png rename to docs/public/operation-config.png diff --git a/docs/assets/troubleshooting-bcd.png b/docs/public/troubleshooting-bcd.png similarity index 100% rename from docs/assets/troubleshooting-bcd.png rename to docs/public/troubleshooting-bcd.png diff --git a/docs/public/vscode-autocomplete.png b/docs/public/vscode-autocomplete.png new file mode 100644 index 0000000000000000000000000000000000000000..dc74aede001dac5985257d10fae3337d82dd9a81 GIT binary patch literal 49502 zcmbTdWpo@{3?^u{V|L7pu^lrrvty>1nVFfHIc8>x8QaXv%*@OT{1Ox;pE+!-o1O#dZ1O#jY1@U=h=qY65^9#&gKwJ^( z^WyI&RbOHep0Er9nE4rkgu9@3l z8@p}3cxz7{LSO*_Ba(=P0}*%O-VzD^WI6-jTG z4Da=fdL*YH?5vfFJ2}SZZwnh1G#0FBX9Y0A6*7Jw)K zMHsNi=2(PH@b?%|kJ?EgADjic4C})NGlOow#2E60Y6huOcd>-a;`KqDG}Ub41JvVEqyH<( zS!2C#su|KTJ|P->HpS#U-%_WJL?4!s4S}Du|Me@=k{(E)?rdR_s*(8S6s)L~aF(edWZ{ksPgxbpw+4)gzf?6Um zP?LmOMGZbUJ=Hjcf33Zm;zlKFzC|s?bJI!_UQmy#fzHRj;IXnq_xBM2i!9ve#elJK z7m+n%2UiUM1Jks*1Uu5d6J(SOfH0fPQq%#D!B){27ph_K03{U?BmcK*oC`ByW?P4e z63{j;ifD_=U4*(87G(Iq)4BGZ;hAOsRe!@`;!jUS5OE0o`-BhkcIhZdIN12rdM3y* zRRYG2!30cGEnklYAsvDMZU+Q4+gP<3!5dMiCA1$c{Pc`6f>%rBt-*3`jCHF)KMNu$k)QocpVUqftLP22Yg{Wsowa1!hro5|y< zwf^Pr6|8@ewXI@bgk7f!i?`AeJs=2yPTM+`wd>r>{YTbqtD67CM*&?rBR^e(QbZcb zz8`k!Lg)db6p=2~Y2_^8NcW1%ZANmM5|nQnGh}YccJ`#m0b3WTll)QRIgvU>F{V(E zLN8TdgSrttqu|=IV9i9+yHPoWblJ;^>KhugWobD3d2;U<1ZFD#v~12E-GK}CFzK>a zd_J^c(YVHfy_>%!YBZo(@Cmv85qT1}|CNL3M3S`<38REV-wPBpS|d;T+-SRK&vJ#v z9xnL$C-_U)k7;YYGOdHv7WF#(KUT3FkiXyfI859toZUpUE&ID6y!Uzqy}-6^v(w~D zus-O`L*)_M-u{fb4U|y4hyxR4;25t(Zehb=P=UA2FbZ`(pt<(WlCihb1S+(*Ve6~i z@y~yi_+HU4qCq$|f-iIR!|*H==5kaJ;VNHsvyWAVo;zkcWs0A4tiUquTU3TuH1F_l zSU9Onw@su>c;K+8Glit(J&iGk2AI~z{g>7n(p>2?j$YOQKMbFpJ=d$TcpT#HnTPX( zl6Rm6kWIT++b8TDw3DZgyf+^q*-6^}EZ!z}%cDurN(PC-8qCUk-G{pz z%1J7ti&O$?=2wD^Rx)WxUpwZB7DGsfctw)#*r~VP)Nkz^nwqhi3hb>9h}!sYx^=jP zwXWlAh>^YQv#j0r2uq6>tz#0D8_2>v_n3?CP|+LS$<abO?Zcr#D1M?o;DLnhk4s@st=ISsngF0OtgnMOOAq+j5q6BK}s zQC!d@7VFSqg~niE9Jsdir#u*?Xjy63j%$?L)5N=lB!fgL=)19RS}dfLIs$+q!Mrag z-*E2y6~^paWeZMA77NFG+u5Qwkh_cD0?;cGKvv&KI<9VwLrXh}U%tE@cu#DkC&>QU zAhxaQBbD&2eQ8D5iuqe4t7TV?<}F}|>BcV#w|h~NN?|lMMOucdAJ(^{uO(QY!-nD?xV!D&kTTakgaGv1-kH4FDz6{k zvJRfS;z8Zdb*WCTpO2iMc1BR)Hy&tR+B>RAWpCKtugqVMzdPU2Npg4JAn=av__E2s zL%Qr~INpuVLQP0I`PJE3g;{&pT$w@0PqDMHiCNe>%>j~|J>n|0AbufE;ClTc1H~ho zXd7W=Sh&CyBNbsghBQNenom<`t-d~T^BK_RS#KI=lVYUjGhv4*>K7yNAo#l<_iCQ7 zkbh)vUWD%`cTyUDUqgMUpa>Rqj+OVFX7X-j_za0k>@4v{!|JtTlLX@6>8nh%`^xFX zP8rJMntIJ}A}%2V{%a3z*Q@Tz{!Bhbp^SIq<}2;xAxBE`Q;0JMGM8r~1*ijuaq663 zs`o0gC=bvc;hfHp(dOihek16wM56 zF}h}g*eQ}hNe!x_l?c$K!sAT3qfj-h`GSU)y(e;7%AGxV9^XJ686j(ViA z*=)Bw+nG;n_SNR(6*K5Zh)SLJqa&$>jYX2|av}Amq%VJt+IfU8*d>qCGBk1uO=E4H zM5p4UrR-&)*QA?R*rZ$Sp?KfmxA$EY3sYz1c_qPKp#Bkb?am{6Kb!=rbh5(myOsQX z-obZL|MziT`#U(iIz1$HXtiPKq~iRwLxU37gk)X5=wGFS)T0$xq-@IL3=4a%PBt;d zq)s;~7Q*~7Ck$e#86z#L#oDZC^_c0~s_kz%!xx1jV&L0ct}^Ll7gyv-H#k{sgCn)B zh1Mt@uFTh}L-~KAX^0lWEJ~WqB8ZwI`b8my%g7Sp1?hbF3Lmu5Q+)#k=lz#oYx{6X zSabqA9tV?P-^cW=wm@jd_EJh&i(TlNPk`Tb*d{$&_I50+Mcsz=&EA=`htdypxVk33 zZR13UVAk$%dC8;?1kql1b1L9;xYOwUjTSI&9I^^$U=0W+H1fr(y8=3mwGD#pUmlZc zwXugv0l(6Gq6?l3%)9aXHrSK+D%AVSjZ3&#SVVq+lJWMju4`*T^)>DLxzVr>fGWb~ zDNK8-M>U_>=;;O_!!Y`c@9KnYv^j@VZ<0?$6tH{UlO`4X1t87^cr>KjWH@j+Akw`~$aLTq(p(9Jf<2{)%gOTGi z@_yJ8%^XE#-%gJJ8976@Exb8v#{uQQT_|G=4cSbQ>$eS8+IZj2WWrwsN1=JNEUl(tEv`_o9h@BPyE`QfS@5cSY`hAAn8lu8 zJ9+d&T84MoaWjF5n^(|+W+&?){Zl1H7Qd@t5B2!IF~F)JFpCTk~16>%OH zCyv@04-j>4@*F$u-oB&2vZl->cE{bf_RAZY%aM+|6)t)wf80Mu45&kdgRJP|# ztz^8H{);+)P5nWg$G(vqPTDANCJ79yQh9hu3HAye-Ymm6f~2D7+@s!@!*;o+K6pCT z+P~;~^p`!-DmBP8hhBf(7`xIVA3O;K6ybA>t~&%V!jt6r*cPAFY9%<eDl*Vb}Z{1Z}H5oPX_V5A4lFujgt=U7aox z=x>2V2PWZ}&9Sp{j!D!OFtfhH5Ab-vq`z@7qU;S0vWX_*ml(MmUZ50+o)x}o;^|HX$SY(gXtodE=5-_ zE@=vOeu|}~3ypJ-jpxI!{pZ0hUgW)6Fyz12ojBfjybq->)!jwezJ}P4U>$V$Lqn^pzVEih|04U`w5YnLwxC3lwDZsC z8`*kB6ck$RG9-=OZcoyw@{=Qg0<*5xO~3qQ+2=;X?=SdgOKf6Ojy7v8OtR|YhNJNG z#|pNqSjH5NcC2BIivHr9Vv7EDZN0(F&LiQb7$QEPz`Q{V1e#@xZC08M>K>kBB!=oj zs7Ve5-Lqi}jZ(-fC5+A)n*F1Q?$o$F_5hXd^leimZmC@cbYhB*;tOVytQDR#aAO3h zuwUo8Y_anxl?qu?ky!L1&YqNTozLNf2I`v_w>l3Y$#T|{bexE!&wGQzX-cl3ORn5dp7r<$)qHDhM2 z5Z_lA9<(hP$^=T3DRt1)#{eY^r0aEg9fE4 z;d)UM@#RlSp$XaIUj}1mztSWPiU2bQi#&-|QdhxKsLH|v-f9-wjDoVnr50k0fVW*z z6*WVC`RtL>s%J-lcQBXiAm)Bia;kQk;$4GB8G-tM(xPji-d+VqN z!C8}Z0UJa&j2Wd}>W_M0!qvIENHT%y)!kTuo^BD7gWU^@dwW+xoi!mdN@vr|#lr3t zVtw{S*9BeyrZ(NwB^xs$C{GeT;wF7*cXS0kMXkOT%-iPBE9x1zsP_aw>8@3%gS@yN&1TIVDn%=CPt~b zc|taBr^nuTjc`2{OvWC!I#7E=MZ25i5Apv-B`UN>Z=idw8J+i~+C58zFnvrjJuVr! z&>7h#@w*210atuW)Y(F=9&7==11~)kFUks-7sYZxSSv5GGSfPxgKaX1}(V6q)7CXd3%3G1i<~9 z(zIPUX;Wo7g9sc^Fo1@ewCo0TUu3=l${WIHW}GeMd zD6t#=l&?BAY?y1a%$>t9Map2NK=3mO(y|68oQvMshl>lVZ3DM@smf*M@4x$=lHMRT zug-6mDEYB`W(F@Cm!9YAP}$y=qf<1YN3ZeUlOGt=8z}!MygJg2CJ!Q8Rqiy~cYCh* z7gn(^RH6(&JtPKB(C8`-^Ot#+KwZ(L6VO(W3$z7eyFv*=yMPdIOL+r!fcT4}Mx#Xk zZ^%+yH}G{$2X?Flc83Ywc62~*wL{QO_*_{J#{wM)()}@COLuViN*biY#0RzabGL0S zf0BHAkO$S@R1EBA(yydT6BeOwQXL$vkXF_Vg#56}EB8&@q73=>m{w}u6zIcUBlipm z#HVqdw?r$^KvfG>GX@-a`N5lZ4j?#M?T)$iUdUW2tm1x1$NZ4KOjkcJ_@wDarcntC z0fIsNlEBOGfDDYvqhEs;gKTnt|GJVJrK|!K(Hp%(ffO@mElNIcoh>CMgCk^rfA!m! z+#K+I=s4acrLrK#237nJ0CS;41`#@|D*FEY9M$Fagje^-&tC&yzra5Xu%j@f;9wXt z(D&-&cl5B*C|4SR6@Zoei9Cx2ZK%a1z^^>nXPZpTV7lc*b86U}`J1fN~b9!v&oSDS^hBgp#Y3!F^kRQq~*UZ1A-O$v5&qPJhF-V3{z zZ3zDfC?0m7|HIYr_>M&f@$^Lh>{_9enU5_Sh^Bs6lM&I{9Du1c#^%#C)%HE=`6sUh z1`=L0c!lOw^-+v9aG3D$dHG1mss{AzZ$9J9wKx@*bYqCIbL{V=(b3U2$$)niy{^vb z?RB_eO7v^1eb>BQV2)tt$QU&wi=~{CQ*+f8{)#PN3jM7~DW!#o_f_lbJzDOrgO9z+ zOLOga)O$-KQ|Z4()o-YS(fUGy8L_1NUiri!=Yxeo3NSbINUr5G^0GCpOBS>x^yg;q z(T%2Oz{=4mnKq*|5lPk|_o?73^GMMUW-v?Z*_o>?V*d)r8aPF2_aYfRfeF`e=xnfn z@6RJi_^NN+G4wF9#xil8Cz+A=3D6O6_Bm6i&^1vx5%;rcdm1+YokO^8W#hZoyCT6M zcT$)2pkC|bja7nQ@Fc*#FW^irh%%SNif67McP+Ms7ul;y|1$rjj#Vuik@rSuLBPt} zQ>}E;H|M(dN)^q^CQw7$=Zpt^W!3=liN8USOqy-hzCq$dO&Gayq!O@S6KvcRvCBQs z!KQ&CMeAR*4~%G1zMjBF!fL!B28&>XcOTu7aq0d+SGmR(XDZ*vy6ZqrMD72}SSuIZ z)t%UnYT1R5#s&O_LSnRYZ29?FHo#f(5HAF_Bqi&?Z@Zzfe>mM4dPfuu62MXQD$YEZ z3qNq#OZ_7GS?0L7@wALH{t65a2hLV^D+W+Bn)Qjt)&O&_O}J5b9qI#`xCo2E!XaaZ zYEp{I@=Ur=7JXa?^*^amR$#oi0YT>C<`ARc=~a0>){j!yzDu!xeR4?DNb>Z2`89m8 z!Pu+f5<}L*x3aZc@bEM$f zz3@mV1}{H3vk{*BO@6N;{d#-gU9K(pE_}1Y=#fE{sy<-5%Gu80(b|FUbK2bO(Qe2 zXdSyq<1diR&4iyl7vaSHFGcT^`70E;ZH#yD22uoLO^mtMDM48#V>_YZ2Jpw<1DZTP z!-!j&Sq$3*nk-7D`jYO{q+&5(9cS|)Eo{qr6BXWy;s{MV_nZh|cQx>yXb3=<3h4TUp z8f$aTzdOC+XDz}uO-?C8hb^q)$>}#7G<5U6Aj0G2Q#r1A@IR9DX8ZuDU&$O;e?fCS zHp!GZE2*K}-d0u$GvrMg4}2V0*lB$Y5;rPJ`_3{Z4QXobxIDdjWb{{=awM`Xk%0r( zukV%G+a-$nW5m>7jpBv8Q-a|qu2*W`UY8I=o@zM_m=9G|kbEHEf)l*;j_^ zPBWoDc~c`cxHpOPsF}i$(f1$t;h!~0sxPxv3Za6GuYJ}NH4Cj*_HTkPuj9%ilJA>% zUKd%fzu`_AW)?TSyquYS{O0OQk}+&Lx+_V5npo|~8hGQI);T9DVmr6w7i^h=czVuj z!F0TC^|)plaM~0arV}LWrKaDBh=$kszWJn{-dZJQUbIq&It!&=;w9PmV}G0K2^U(d z{E;Ro&+FHk;*{IEMW}(asBYUe7ph7QF8l1-V`aK_y}Ps}z&u z@41XOp6Dwx#%-PVWan=#QV)1NT=n^vQQJ8>9#4!P-K==EUvv(_JeRwm;v2r)$t(sr z#i#H1Y6lhy0@?s)Ei}U4Odv2A)k&Pm*-K2P8|^qJ{p28syc8>?Y1+ijuog=6?qVZS zi?t#H9en$pv?dxsiG^x-L*^^u(@yO0P~#VM@DrDFQ^#U9hO~-TghvXNAS8SfPii#Z zG*+bLCSiFKr}`JWp67WeLo7|1%;gQ#p=@s&_9`ZDUZ`Z=r;)kXR)-*F*Pkvqd{g^+ zkvoPuo>tk=d=&D6`X(!E?{QzLAib|a-)|_3imgR^+~wo7J>7zWL9%odv=fTFK2dl( zOSgIBnN1P}ZTkxr88O(lX}?oT8E2SG;oK$Ni(UXTLYa~-#mpJ#L7q3;5FcOP zkk;n4rW3Vw2(1_uUe5xJLWa75Cr`I18y<0P!(al5>`Kf&(E+a&;&o$5;)%s>&5dX< z#_>(wuLHKErOnDpGy@y9s5A4pw9IebjSp4OguP@{pEWDyZekga&;fkRF8MA;KGup3 z@xvTiM?WUxpoRBi$k#JSb`6JIgjeW}`vPKE&wcOxp_`A)j_0K{_JGchO`Hzf4Qg0L z%=820K*iAidI7v&9O4$2uL7YwZ&C#J7V4p`mufbn8m(OfyG0y&Ol1|ln&Cz43KpeK16Vci zUp-x7^EhC#JDQ__uU+asS`nGaGCA}_>mHJ0uS!mNVtC;2=>gt=BBYnajro z-K{)FbG=);fvKHt0=XJ~4euVly}p}3=j8pKjX>9<~M@*j!JGw|lZoJfDMR^yIy`I&pO^PLuVP z7`g6g=;{Z5#W{%ewhfRoa(T(}8g($#Pg|q<$Rv2-#nqu}e}!2nq*pdR#vd6Stvr2D z4DqBH2K|nDAs?Fc-Vu4%cWW8%|FK+%kW|a7v8xCCThJ57RmJcgUxE#?JHMYIh=D5s8b*&~3X65LqM__v_!B1~LvG=bd!awT8`~tw4U^KK2 zS+rz&!Vu`Gj`y$! z=of?2NM%pYi?z-`WwCps4uPAS1pc1|lNo5I;m9ni@1!9U2>u1Qqxmpro*8&j*+l8ZI4*FJDY|g4O&cq(x++b z(8`k})m+QXG>O#l@X2|H>Ozg*P>Y0qI_0uoT4oNb{iITCJ%YAGiR-3m2lC_NHeMAq zM%cLSIxz;?h$EHS+)*c9kUrdXN!2`o)1#P1ZEQUf3p;lY;N(2rp#CKKEI%8?Ms7@4 zm|=N;<9h#Ic{{}lLssQ^a$%wb-#0pFZa_Lr4>m{_(>bd~9W+Ewre*nNPu3epjr**^ zd&k=9a7@Z_sh+WpF;TEi9X5C{=#D6_PqN)vFlKsSDQFoNV$pp9mP(?SycCeMOjWY= z^0X|JuF^7GhM%E^nh(r`8w}u(@J-D!SnRdJ##lHCWOVRP&at<7MGWpiA~$rolgA}S zJTRg9zY-ZWln(`VpFeAV7CJliX|lG2#)z@PZgve?YdcQf{Q@3mKd!Bx^h)E6IGSMI zcd|Z`U&%Nq7xp_?M~l3>dMg`BMrViimJe;ZaOy)qllel3!0LNGcgglHl^ zyf;NG=eGq6aVkucSm6a(mkK|AU7z#x<(c`i&K#04GknHo5@@W_xq@<{&y;Fozp-R) z!Re{alpPL730O&V22N3}DN4{D@V%TP(keE>EH)j(cvAjql_Ri^LaL7dqb3wKLKhRA z-LtxmbWpYmVju;BPVUNP_}efX0yQELy;adOG;jS@7R%}gHT0yzWQoG)H!?J@YA^xa~2`i07l6?L+2{8Vdsay0VR4SdX1 ze*BW0{fAbdg@r@q>~E!oJ2T9u@3!~gAhH{S?H;}T_jFkESLx6&M@03BVMrm9`1B@d z(~=XrC5}hIGcO{Gjg)>ergF6C zVR4hs8;m%uBUO`0)WwJ^z;|S*r17ywVy<975k880aDgm$abYC|ix{bc15Gv>{;ND; zN6HZC%5M8a%A->BZJDdIN0rA(uFV#sTol(f@tr;C(Z|o0>8B$p)?`}sh-C;+edt>J z^4BZHqm2_maI_+<6KIy=0*cCyDuL<}2CSolPCCU;?(-b2$u-jZ97$Il672)AB*it# zDbONKs!70t*A?-AJf@Y4f|tF4Fl#^Rh<$^Bp_9K6y=+xwv&F?&Q1GCv+MWX)gA=B} zmbP~FyXmW#%|s^H{Pf!UCSSzu)8`O?+!|7I=-jC7`R!`LUHAU5jSiRJMBSGo7^a`? z;9a4F}N-|6CD^s`7x29{maFBh_TCLa00@K=%P zJr>h(JBE57`+ z6c`t7uK3MXo9{JRL|J8j6s$_&}uX#@?Q%*W7moy`KoHV}KB z7|@jypPBU{ZqthE$R{!nP6hOjRws17^)IEtS{%z{;9(o(PXJYRb@#!HqExo$gy|w9 zGz@C(EGxrhJg=&b(^8?^49PEL=rQH%2ZP9JxN5DS7>{4JHyTMNqZt@nu)TSu3609o zthKpCA<>bKXYP48xoWc?yI<(CRe5RuaPcuv3I3UqQcLOAVG$MAlBD>7=-x_-KmJR( zQTRgNfz40G_PYkhz!N1d67>8go;)aUoM1Kch61$bn`bgK6T0y{_L;1g|UfWLevMdPpq`VW^{v3s{SCK7#y{y(N9jb)m+HGYPt+1$ep01umXUDAIaFRcJTjUhYYoHc zmQHitl-yllt;pKSwsE6qs5*Es4%TOTC28-fYvT&3t1*TntYY!-oCIWcb@*Cibsqtb z%Iar-d^(ID6d4EUS{rj~$@Ym>b}LO%K+)ZS1vTr^Ei(!g-?{G%r#aR7T=yyQOmrb2 zwqd_nC~90EX(SOELpxFVmH2%2DJZL5dDW%uPP-mk%7P;0vet49Bu*Ym^j3c6eTzv$ zeZ?+rff`OmwbRw>i|t#~G_k%GSMTpcqlF?rfNBMfB-NDFLVY3^4=+t5cxoKtTu`e} zz|(SA+7e4-pyWZna&4&ad=_e>ylOLqS{)Zq(ht)k=2n(+mh6g>yX6W68rroX8r~u; zfL(-D*$jwOTO?0xm=w`5qz2jy%x7zg3KpIQqyy2^W~u$wML43OZ$FX!Qh%wv_6T(i z;0>Jg%@!NZ<~j+Wp@Az^m%#w@w9Jg`Qoel}MgZ4Y@bjWIsIcthV7nY{8b}S*P69o9 zi?xE?yHY7#6X`SHm&`R^jKHcq0#&NzR~@%!OBHD3e0l696fk!(64DP~&W_u=E)_mr zo@KyME}q8>+RqjuiB;Hb8iOO^l{O8~SdpNYV`745N~^&!*lP9zdS^fYbc3`f!H ze#bxga~aSnpR&AXm7q{I)!S>VhVd&YsEh*aw=~LRsZz0(lsUJYU?L8005}nU8O*GCBt63 z^5^`Al{<4OTnGdlm4q*4Pma3cuo4%i&pc~4k7uk#yHLAI7eJ84kdQ?^8r*C%^NsLh zLA$HtecKqb<>H3RR{P+h{s{j@`9tskMh2Jgcz@91K0j;4Z70b34V?lLRGmPUOujx4NOLuHbxUGuu^aY(i}EX)WbZCW)V%g_X8B9j8ct-a^+36U3Ha`WJ(6Xt5VF|Bt;nWZQ+9Z-@H7c(<1g- z{Yu4^Zd`{8>%annXN`7Pze60adpRU&as72G6FB5KHW8-8RZEX+yREA^gBi9nHXJI* zrAihxkXkqm+_GY+dDaDk`@A7h1#%XhtPJZV6|a8}G!xe7x@B3gbs1WCAC|r@_9}G2 zb9vTDWMcTnkNPJ}qgP_sz_$!N_yX|4nZ~PB8;OHi^Nmp?2axK>Ru_a;LvFLaC-cSy zZQe7|dNt(KG1SIa<;JB%gVAAxRW$9*u(=6tWKfmTV1-c|cI|>G2U~8sotDKmCqT6N zB#mM65I9CSIA#jQz;IFtU=!vZDH>x_DMbeTr?bZI)|i#a^J`aIY)1{Q<1yw7>6HRD zXWw`jukKhnBu=407Wy-CaZUjX+r(&EG&d4r+TQV#AwD>uN}>JJ?+ha!(KuT$!H_ZLzTwCdnNb{m7-;HE_jhxh=I$Z!d}nuX`I>#Gmc@=ivglWD z{#J+Ta*b4Kz6(V&gEIl>Fs}l;ycO!5hP74>81(U~Y%>$(eO1d`r?heIY`uh`p@XGZ4(ZJVZ_))2j5Dn-frtmI$J)ZD( zp_pWWqGzY?Ba&{JuggPqmgo!MubaEZY#HtSSnLaHmi~Gyg7j&?bD{<|0aNclA{zvP zPax<80ab{dXK?~hg)ZCPZ-)2aU2U(?uW#zk*2xfjG~E}T&JkJblDe_YW7@ApZLdqW ziS@_Gm!EZg&-X(+R8e}@(@uXb{fpgiM+};0ubYefX^tcB-z_Y*%=LYSCb+xMc^-g5 zrJ^6Wpl>o-<~Boo9c$ALPDaNXyDp4%Gzz@C`TB@GmjMb!EvxIxaAx)68OfM*3&uJg zM#eNUE&K%y&rz07Kg~Snjj(wX*3mxWvveL~oA4Df(*k~qUaAW8q}628UQ`fEeZfQr zR@Ks>vRvEp;qEL|_~tSx1u3usIG5R6TGqlWr4>z2lR+OT>nO>5X+}4-8Ib#$cxxhB zeF5Qds%Z_)K)!{YZ(hMu4mm{5f*)ror@H-J&eSSz@2oTJR#WR&1YtHQc!!Bwf)+f zy?F%kDeI~y={->uke6>;lC|;C)j1cj<7oPNyT|?7g5Z92=EVMf^+jtGkS*%+?EA*m zp3vdT`q?~a3T)w?T$D@cMTO!&pwN=iy2ULN0@-nkn(4KT{`iXAF<$$z52YD&u@Sc& z>=;H(fVtV}|D1|YEnw3vv%yTv7QHkh#f9?qVHp;VQr42reaopWuyv;HI}MsCh#L90 zPyMyu3>1P;HOl2N_%$WvP+bz}H`Jbv>jv>Ie0&nG86?kWn~_~gyzCj)ruOdLP95zc zJPU>;2R;@>am@P@#v%kaz99@~A@;009I8 zKHi+Yr#POGgQk#4R^_AVW#N<`@KRyU7{PehnM}`HAu6R_h-^>rH*el z+VI%N4-;zh;pou;w`a1YVyNkNqV_4+LKhpc?MLd)&&y!J4Cw-9??$~ZUr*L`I_Z+o z*dJ^(hqz&?EhJi~Z#}hl7reY45;9b##;MEdFT^w7uF-*P7zdszc|DBhWJT<4cp(vZ zM*Sf#jH!-6Qw@07`n5mqcZnHdWJ^ymtKRB+l3yzU!-xM;>R_g@;DeiX*@ zPK<&Hnla_EI6jR;uXeE@jnF`A<86O5A&XjWBC?s5k*hDCa2Yp4)lZ@7FbjkB%;@5bH*?%bmk1 zepfoKo1_Qfx9AWl`)&8U8n$I(z#f+2?S`3l)br(F|}eXKSnzKBCgw%3ayw#(r+H9q|FN&6UIMwt@msqZ7Vbiwcy*1@yw80Q5i)#f` znl?Vwo6{YU2=uz2B<%7?dn-IE%r{>DxtFum=@sAQV!Byi7X;t?!6{}#T+^DocsOMt zkDHP9EOiyc`}+Mm1b*sn2=_3)sE(T@h-uy+- zaqDZ>OE3KC!BFy0&Xz4aur%}7!$ulm!-sC>aRbH+gLTk@^yuZ_M0ECy!F@&}t0ykA ziGqiI>_REa11`%EHVY=M*1C7vFaX+E+rEN3Fn3w!UOw&J7ThHEU1h%fh??dp{Gbz8 zmmG@#M+Q;LEq{*O&ebSUGD&T>{%K@XHIKxSH?|iDsnh(|kVbNWL3I4_#guCjtNeuB zy+*iZ59lqpWVh$Yo+Ni_0*-ys%r1+qS3!J7uOela7=y?nK7k&U=jIBYG`px;BO>+ji+edAUZ^t=50;OcFK%CP_7u8FH8h<4xAI@`Fn!-BRwZwoGeox z_rcyAZ69w|G;{(?G4Mi=s=IrC8a;C-4eKQ?Em5IjGJoqu{nG{8RuSIog@;LQ`<#c# z;)z*`hkkJZNzhVNm2>cdpT2+5&LN}c6rlaqk4RQYl^}o6*x?nofj9Hm^5GEku~UAx z;`OW$4RSfNu)Z2mxM6F-*b6KXY!rwv0*{@w@*5cokMKv-NRmXFTASH*yTAE+?xxHk zm?bzCUP6R~KIAsd%Epo@V3l2hOy*(MU~nZVcc=07k;#P9u{4q+dv-?q@r&pHz6uh;crX`Ud<48`gYh%_{n3CMo{NctH^SJ_)1?$8PWU+x5ol^W zQO*-kTJSktQRXy)a-?uFac$Ypw>RrK<;G&9Nhj-OL&jyQ7j2fQ$=$G|B(e*}Yhste zdMfmk9O6<%PEV~pCh8gV^O1%0kuL$Ps#QytDpCgLEk-rlHpR(P;-uRo<1QO0b3yDW zn-EL6v{{CvZdzSM>ZnZu z$%>1GKS3PsYq|K>omWpJg@9NhB&kRGmBMF`VbcdxO!zcsF7Lv=rQP?eSaw?<6!e@J?kDrfuRr7 zRP@Et@abAEA=79;!9y-0>tQMwsyXuUlZ$8mJ#cDnI7d*5++Eb4TFE(@+X%f&(2W zu9X%$i)uOY4s}Y1uI-P18nN{}Fx1e=(poT7zc3Q^3N@Sb7vDK|MW}|mKYAme-87O? ze$gb%nYoz|+Rl&8;!+vCnHsXn$DqAZ2~boUKKj!}u5n1?BW8F?^J`S?#Mgu8ZtwL| zTpB3N49lh(4~0UQe6XlLjZZrlxnso{c3lSZy~2VE3IQwXp~9Tv1@Rd1bP9>S{1h1^ zXlEp=H1?o>`bZ*JLuCt-W2GIq@eFcLZe$d0QuwE4zr=W`MgdoT+m>UZ=6~jzQ28T) zWKcMd>`JU;=7T1FVDgXcP_V6WmM@ckx?RS*gmH_-V@P`+i#b#lUhJD{;k2V7Qc(U8 zi47h741@WzRUgAU)J2iO^sktkxv;Y&V#q{{Ow{yCQhyqAng|Sjgk)EEx)-eOQpBhwe+R6B|=JD4Tg zl=|pqpJXqmv#l;Q`PVZA=IT?3(eS4t9%My(JUUr5z2l42k6n0wh3*t#Y5X|tz3vwFKX-d0GTmGdTh z#g%ux8RAu&H+Proq>f>snLh?*pz-e$nl{;D^sCc?2z6;9O&wev^z7kwB>80H>F|UMC&GL;@U?J(l~UgQPr(A5 zI}tLTG9Q1=6HeMbL|uh?pY=_Ubuxj^4o9J)?T?Oyzjq`^{f&dwN-Tu6NC5 z)4Lj=W6ZqcmBy?za!IW=d)QjDBCP&^JKln<>aj)FtPOE zt)-S0F^)c8XRA#y7PGCFMHedpL3**EXXI=t)1l9CRJ+~p9S0RID=MbK_`Bq>?|T*8 zp#A07iRIv%x`0$EZ~J%$TNds=Hz%@3s*-aRY#?!C$7Mhaa^^qYnAuNmbxYcxL;g&q zB|uC~ESxFLmMWJ5-2MELS&ByTB4rnQU0Mj($m+cHyPb?7*;FAo*#H6oxQy zTY*_G_r@pV8WkOFid2wrwd9+I!}@vsLUzJ^Cjt5C+re=k|E#CY;njK9#_Xd0Y494W434@uk?!}fQ;Mm2QeHt8xPG3dV&(a=apHi-L03N=AwXS=GbylHP>Cmy*X=Cnqip&jGAI8P*ta!Uan>PDm-f zW1U@j>>$v3$;ODbOgHz1(&7Z}=sR8lNKA{=)~^77yd^UgEf4SnZ1#%L;l)Fi!NMFt zk)B7t4CG~4QOo5Hxhx3+UCR2N`0@mKm5r|tt?xY>VUd`3gWHt`F5IItW#Hy6B=L@2-u-u83f+t&Son`k~MIQKPq?l-4>@u}%uE%cr( zO>qSlghGzaLoOIZ6~`A|zzuE77~=1{k>AcQ-i)QQr@QGkm$=GxFQ#j(g@N+hVwGb#KoNR+uRvqkE|)@_|1U+^I1Vi6MSMnek);awmBUl z+QP0!$1Hy(i^Dr}ReU-?T6LR_61Yk)l%|RoxF70m(ghhc-py8KeNtOWOV{!A<3hb% z7QFc_6Xnft1hvTkwea-^m^nQ!&e3E>gg;j%vF#7nP=EQ)95DFwnLkG8sMhPV4GmKS zw-vJFm8CoV`q|G)p15Xu>oSi>(KLP7hfuaRu5drn0KPWJcT3ed*7$65yMGCd7P zt&T)eD&`kC8}-sed0{X2p`Bq9$03htF7JcsUWiv&B8lG}l-b#{Ck)p$DC~*oL~nL+ zPw+4_a$e;=`38rnDG*L=_Hi2bxm;0A;%5?O`O%U=e-C`}B@U&3$IipQ>MjYo@BZ2? z?ZJ!J)wVXVM*<3BO-K>IU^RbLHVHOL#r&YA*uVD^ih5J~*~yuQeREpT3KO5u)C^bW>6n=-Qi)_bq&2MF6hCNS%S+QzbvVljG zq~F8Hs*hs>%zvV1p6Ax{~IhkKl@4&!(cfuQ%?_dUHzyS%+$ z9K@BiSUd(+>GXWV(}+|qIcmbjT3+>YSywpB{nz66tG=x>B;JO9(_#h$B||f5LJdt# zyu`76Zo)Qf!{neJAGvrZa^)~u=E$|}Wv}R}^Lk^@f5fN8ucsaFTRgwB9;*HMK_m*Y z?tdyyv;<;A!Ep-s^xgbei1^(;rix?x0d*75{P~qU&U;>*5u9(mH6cwr~`j2=}#cnlEQW$AfQZKE0W zO(veeh?3LZ`H!#Y*}G56OL;XN);Ncb8BK{0oAdz5TVFLnFGQp~96Gtp%F8Asy-a4UI53!>Az2QzoXt}GP+={7SK{>{^7+rM5>Xy)gyj1vqy_3Rg$Ok zRqM}WkGI3Qr;&E6sx1b@4g(JaNZ@&QmIZ6$73Z8Ukt~-xkUN_Hgua0%(-U=d?NheV zVqhpGbmjZ`yG^EpmzkvVcTRJ8Q2Qj7tp@sNEeg14D9mr1m@5;%3}2aRzM;z1ftK

            #SXiW-aL%s!u^DQARvInt3UG=f9m(nq@Fw?$SHd*kFg1-P`oo zUJ~#V|586(wYK z%AsizBvzf3p!f*!wSN_93#B#x7y1_x{ZtkBg=J2VG3OBJwZ=1dk)Fw)TxQ}kH`Jh} zC$vmm(p1}-e#5XD zH9zgC_~iOY!M2#DKzy>8_ns@g=LwFQzj2J#JPtc8{o3JVse4r}V@;#`hKbQw*@O4+ z*8|vb0I`Hrsn3$&ed%6@9w(GL+lWJb_^9rR&c5?gFg9dQ6$i9U{l4ngTT9%snsc!C z--ehs>-NrW4f+j_j!dbYRc@J?mF;6$&$pccw!+`d@K2hbCCtWB;C~wZB^1)_JptUI z0fAAy#C9|>FPk5t=ZJ|<@25=Z-S;?8sJLShe^wmM5PGKX0aoN>WiNhS!M1^gDm>;} zwQ8Js)k_zrB?1ZxB=o^&A1m8yi1a$&m2$rN0hO6RM9Xl&pYz#2Kjhbv^G$|y^7Ik7 zuSb%it(SwCI5LWGP?+HVA`Pf2ezwy4`N3>7ZW^jbS9Hv?MBbl(`jIpV7pt}h*?H#7 zel&Fj-Q{|2w#I2OzYIiYQl9HC??b0{pi1PE4t%C8yTPi?ZVKWmkG#^NssnxU)x{;A z2vP>7fm#?a^RzwA?PJ;P#KdeQ7naqBB>d2=oEWTmzn`WqIUZ~Y4z)pwX>=4&uMk`# zG;Fr6=w+H23@K;tKUJzr4+DoAcD>AkOBtM*z;)sRqK9D+MGewQ=!q`uYi?ab!)3+v z%mT~bhQwrs^F#X~BJ-Qo3n%0y;MKMbljp#z$+Lor&y&$@YIB%}`1h+Kk-)9L;bn|2T=Ok@gkk zt2kfzu>cgZVJ$n8QEHZNVK7$_;2N}mtkbYgT}!PG}Pa0hR8{-NaZx-}2VZ z?HM2O<8Ta2)7=xv=6Rxq{d$*>@55zf$O>rR`P?64H5zQaQEVU#0%^EiGI@<(ai2xk z&)aVS8_(DMUaH1jIXMyU?)|)*$E~R1d#GQ|vQpL5LC@Sb-@Nh)#?O0tEOaj_4p~q$ zLx|h5dO1Tp8(W`xNzA+RBbqE?zccFA1(uarf5|3Ydx7}Cl8|GXh zNk}kG+rMG>V$Klp_w9cESauf;;A)Y%y|3wk9aXDur9a+B)gEW%i#d&Il=JRr3w1O{ z=N4jLWxSW`(Yw>okOZz^J8lY-+c=yxEQDLefUSAkk9P(RzVUUXW>4@-o-EnG?b=yQfBU(p*WjlJ7mzH2SJD2-OOdPU z#stnVo2@fre4I9~Pi-udlF?fDBk?hDKkw~=OlFjo1g`Rvbx6+oY5u2a;jK5XxBZi(5>}9TmPdGS@V)5#=*an-_mCq(XAg?y zu%+YAqtkAN7Ah*_oeP&Lt8N#F!^||>1?hG>udmxPPuKQ_01X4M0$=7n())kV(t z0ze@20!8Ls=%`J%gnd4wuLzyTzY1B1516v#nsp~?NOW2ChGzb0uGV6+P>G!*Hc_O@ z!p0#vEbwxNNHsN%`QAab*zkPreFEDMrVAdDOA)4#mSSv@7{B`|k*d7Jojz)OD5 z@U<~$P7$=q-;RpQH+az|4m|$4!>-Xe)VM0ZS00xbAL1`r*vMN);W!4ccin?wS4(V< zRWhee(5qrebspraj<0cQS7$-H11vrTj9gwc`>6~3zX=O+fR}Dg^VF--U2k>k1G@s& zc4OF}s#$m22M{agem5W-auDj?mDlug{nUDQa!V^u$L$>lptcWN`0k<4WUVKY2n$Ey z`<_+hX|nK>(ah4C{nkUOzU~)US@v|li|wjI39=6v3b;%+>lo;|f?BQi?x&GDe3`pl z&pw0;ofu!0HJjXN8+}h-t!Oq|!fEYeG+MO8{2Eb>4Qc){`yW{1PdXHqAlV-1k@{d! zoS!@1UW2CS`+@1OIGq873%*NKU6dCcj#U29gz*^nRG{3}fYwd%Vj^>!O?g7EJpJ6l z>8_>oVb8=|aING7PYL9!`mNmB_FZk!jF|K=8N`jFnmuO=gj_Xe_nA$zSn8)HoNxu& z=@lDu1YOd_4}E%m{w;AiI#Vz#PBWvp!qHy5TDkdKX&m+Bannq=y69YiW%q+O2S1S`q&08>W`1>hFEO&2)G1tv#?)PAq*vvw< zhFwA~^`zQ41a2&7KwX~M;{Iv&z);^-C=s1>r8{ZMMy$i{x@QUkIrE@yoX!0JyhXf^lYw!=e+AfhedZ8$9w%rJ}2nrD2ncf_6LhGATqg4z>tM?vC6 zm{yRg*1gvR@C^)toaCJ!=5(`<+7L#W%Pzn{U{1sJB zv^?*5KSoBxx!lSkLy;dE(t38@O-N#YobU~1M!x@HaDm%xZ1L`Wp~f({!{hB zpE}MXU8D8NmX?-aX_#ogDr2E;sjj=;@2RZ1`{16mnGw~Fj5K@iV7Jw5rU_w<=BWzcK*1&@MzG2i)E-Kd1r#b2p~=!_Vh ztpp7;^4ZP4edCKi_FD_C$ntekRikV)N7ZYp-6|V=6>0A4Q{Dv`xNb=B8>_08W_mCo zmH_?;ea!dh(^HejY7)RIrIMm9Itic3=&la6O%L%-W+jbZF3V zAY@26};m_L&sz`4$%`hyUq1!xZ5rimh`wIoN7^U>RrdQc)4!u z++CPl6slag-e}dV`$0STBVpdH1A5jgC~Nx_&U+z6u_@O;?%BF%z#wDuP2g1^Xpq*Y+@v>4{`8l1^e zEo?3g*fpwA*`odZu9ltk{HnfAVph{!WB$tw5mKg)Agg5nFrG`Tt{(pGE*FZw>74v= z9vEA7gvv~z8Yn=1UKx1Yoj8B(DQkrwn1SfbcgrAb_xsM_K3;AYg~Ct1U0mqtm@u`& z;>u_1`46ov2)@O>cemq6ZqD|1CG<^>z}fc7`B|c!)Iz^mYAHA=%6NfG;hS1W3qetv ziI9eVeS=$S#i44a%g?x!Fi6$ZOj^_GjVn!9aYt7ep&{(c%=oRu~rLDtac-oyI%>5^3&I+dZ z&l~86R3c5FvelTo8^_25^LTBicN_yWqj-Y%;cVIeg#qLL0|O4BW1i)K%;zGRWV{U1 zd&v3o;C{YdgNgVXz%P7w_!y8&>&|wewiSqHXHGx<>ObCnmXIJ;{s%8ZM}+mi66Nps{|&~$mQD9cx%&V_IeOe( zpv{^cFEbDtqO%`KF%e>&h#4~<)4S1S$>v{?-Os;ojqzK`w4tA@)*C=bCy4VUF5)k~ z|Ez@nX!IJR(W2%**e_{J?mu!6R5H77IOE~$L}`VR!`~R<;n8T?pcC2(1MG?98ISYs zXEQijBWQ-kMUR_fxKWj`2ugYbmJV~%81+UC~pZBYA#ShNvuez;*sUh%(zG4coU|(K@l|4ToqUZiM+5lye@SJ zn5dan9eOsf!k;>*%b~*)^ifQhK>2IxI{6&cS`}6A>hgD#pX5aLVs}^AdtIK{L zLDM7pQ3&Dx|75XOA5(~Tx6iaUEhDAoyM;c9|CbA}&(BVdU(nu+{}4$=IA^UJjbJ<} zSN}*>m_k!k+H{2l48w{;;>ojArmaqUN4XhTpc$E?ttL~31EsTUk$VY4_tuXnD(Bvw z@;xrZjQ_s?BT70D#CY=Ze)Y(j*k{`>wBH!h^$2Cy<-~(|Izo2gz9*}g_ww3=`&EuE zg#({3H%S7;4fVW5l#Cd$l7In+0Xc7cESj=c9N|zXIXXMP%ZT%PLkjHvGf%~sf-Oy{ zp-%_KOR?9INy2>e{OMK;DbR;4#no1;D&dM)qwLuYBcljw;zw^WKso$adh#-`Poaa03KMjrt0CH3hIcHmW8uZP0&3uZy4c zJ~ylyXRE5RUu1wVfC{G^1~e$jl`*Y1o4-qfr#M;Os1U(&fyxcbZ!Yg<>IjVv--Fjw zY?N5DwPEEL(rSONjS1BUgFuZ2J38GCk=GJVnKnV*K4%PciFo7f&FS(1;!dNZNe*ru zQBqdeS%S_E=2oidBRQ0#FiLWmxTN|&b`4rWMMz^b^ua=4`PZpPr4}3!h6*x)G>1tS zPD(BZO-6#>#!Ac=TBdCZm|j;IZIIax*ZiU6b3Yaj5wIzHU^o11L3X;1| zg;BH=)uxD4E8EuR{wBkU_`v5PsA6;wykD_y@aXdO;{~PGFWk!Pcb6X+k&_Wy7;|4UuW%nM`EcC?3x0 zGt?;wk4#LPw#2Hcovh-ciujb>~nafMgX7q9iCWtiC z+V=<+DVl<{Is8%)oDORc%Z3~8Dm9&-t++t*^6%@}mY+(S3O^_bSnGB~5@hhH)qCA) zTbU7yM?Uinf)}T{Ju72=XTa=8_NZo73v<$E?YN3q?F2QT>}^Ox&*V^4=7y!!7@bPR zjWkQ4kjJqpxYNeVzp=FRzU*Z(#wR>ypOVHLHKcVf!eD@BYkcOW<0%T(9|%3~mua^L zYnhsAmKW0hYT)2Ldj&B{;Ox+U=r}mgk~AO2w|BlA3}>l0{k56Xe`1=`_hEFEyg@wf zFZbKH8GmsV-}Bl@D{Zy}@=IGH5kK51T3B1dbnUoa+~&mL;J^sB)@ z0caRokCzbllcwgwY= zy-+fK4qTHTnfdt- zs13hhDf8Kfpg07mmFx60KzMhe`=Dd5kn)G6K=k+?b?2Yh$Hy;biHT0Y)(SDj!Z7ik zrWN8%a>Bk=@BSTKzut$5D~=eUb9cK~ghIA(eFk-utot$HS2$<2y>;><64Yf%SnSK$ zHeb0CKc0}FFPyyY^HysfjKwCuq0b2O_#nsI6@E0y0Cc(E|M(Yi?X!o3E92Cx^kJA8 z3?VwAnOc5rZl=`sr9tRiJhg5zYcEn`<{Tcoq`NfH8?j`Ds4JWsR?fQin7&iz%^E>69Yq9d&Wci zAb4tOqY}V#CRizvaJ1H5AIEUQj!_5$8D!py(b#? z$|EctbLx26IkM}qXW}cwL@Aqe)}bRLuCaZzLXFFyJH|-Tgto%s{TuOM!4ZY%drOp-_6Zi6?)E;y z%*rnF=CYd8?=^V73|X~#MQk!FAu#K*H@;C&cO(3A7t_{!6IBa%)!lN)PHPg8S3bi^ zh|T{+Y_ly+Ga$vsl13PW^>#TW5}Fhn43pxr@dh8?!eWseCNuEsp>AW<=6W^v2g~|@ z(5t&h8*rL^I2|wFP9o&rfv=@uPU#;|%_vbq(K$F+n0gT_D!Ik(ns_G<*w@;$=i{CR*5CG|$upMT?Fm!F%m?{|AZm-5=UNF~ZGC|S{aKOGPmU}IsM zlizG{#D7Aa38m`XFov7pA+#F!MFmA9#U^L@&H0Q~q{srZ8$Aw^V~jE;=W9x>ZX?*t zAp*9oPYPJ@zIO{$*C%V-NB7X(=XK%!*|^unobQ=)^!hS9m&n*)I8Bvx>+Nqf6hL*EP8DicOTK3ie5ch7&yEv%K= z>sHmx4L$GadA;|%U?uQ#)8ihMvPes02>dSoDff&HfT!LJ-rCs zEUl83mmI)kPs9{g^7C!qvzv`=_PW`2nXp~VR1%wy1Lns)BcJuSF~g>{MB=AzP@rCf z2}Uydl4uxd1r}h0x4pf+Y0qPV3_@F*o4kVk4k|SOOfpZ%!<`DP~CH zjHsu+uH8}Hr2-3Lq-FumY*bD?dU*BroMUmnd@8*VWEiinEnXeL=2c+nv9`m13JfOE zDXw(@{v|8PI#z1(OAlifP6v0y@asPlFg2#vTU8o?mSv#%264qgUgy80O9q~C2aN-j zDM8Sz&!XE4Tm%j?6!eU?Ac=}Xrt;ih%+W!S~Q7pwPCk_+dyP z&H5Gixw7b;xnitq4!Og|O;b9Bc3walhv9Y25PwokfDrF2zGNQ%CuKw(juD2Gq#s+u zD_~&DXt(qSrN3L7>bfmpL$)km79u;yJAwqUK+i9!6^- z(~Rr?6YQ1W%_74K?#^m_WWMovdO4k`Br$(U_ma+68B67liSgN-ZhL`ij2d-NogFC@ z_Tny8ugorI9Ew-8-h}#loVB|-AGm=5v_0#s_4{d;OrYY^N4@#tHWS!UW*y#^;H)5H zz9;aMOTa6VT-nhP-O`f#t>l%A!V`gNHp9?#KJw1p2Co{EgPSXcA>IrnR=E6qh!**j zW8mEB3>qBMyJGc*2Fo%UB`rO5&_O%mgDRt{8ne}kbVG1X0HTR>R0D^hgA#;`H77zq zlx!Fv0^I0RBmfFlK0u)ycE3=%$nZ=T%DW$oSJqKCg1DWdV8Q0bxWD|gUNupfMwv2| zK|9kCG#F3<0cJ##U@zez7P%-*cEGf_bQ0o~IKnOk|KMR+J$E&`{t%zr&#Qhj8LZ5b zBvCp;KtHvHTW6t$_q@O#r6GI$PHvzcJsPohaVx=gM3&+f@95Hfd}quPuPVc*V@CVz z60TL3W0x{5FEn~4gMLsZKaXpj>(~Q=W}68VKMCqHW+i?ee)2(1;L(FPE#81tHAvRo zI-j7vz?+`C(@zkZNrDScid19xM42dKRITZLy>EcA_@oRq<$Ln@Y7t<9np~HaShonb z*H9(|zv*`z+0-}L!D4=~{;muyHrX0sR`g;01i?EhxT8VE-Y^!Vo{jik`W|WJ`|uAacJ910GBe z_CY%R% zdC|FtH&|GDBTo%D*8?u9vp|7LP?F%of;UJk4|K*yK-6Z>25Q+SR^Nm5Bv{fZgoq^! z>Z`c_c(~1@=49+9@qJ;S*DwewDZ#>$Bj8Y)9;Xp8W<&EkYHNdh`OFB6`VLsX*KKQi zvfD;DZ{8f?^u7hfTx^l}WbN(_)Y|d9sh6}&(PK@nIS*%rw`^HmInH@mc-70?G=|z; zo;B6W(}6Zc4KZL2orMg~=Qj?O=ZT#ho%+)HgTf%%D_}Y|aj(&zY+&vY5ij*{H-!u(+zzWtTt~i7sEWBv0 z6>*(0*P_r4kL(3AbWIfTb!3r*pxo)38ZI<&Wfkq}xMC8>I?*6uDLWpdgrOr7;)KSC zHBURWfKJtAQGj3bv1tC(H&$H{ZgI?_EpsA_R(xy(DlZ5A7;?HA4_G|u*5BD2a$I7@E z7{p6&oR4b28M4OIY>v0BTgmp9ts3dxR?^Bkcqd7VEg)@ z^8)hOQ|nmpk?usu8&v2RjJ1SuVrJeR@4-Z1NjI<|)n+bKhrarj@iP^m%@VlG{pwu?B!#!wS*kXj|Eil@3-5{GfTPqK`lPsb)xxG zJ3ZzylOH3LlkvJAgpr{RzW(%sP|NWUPXoeK?`4AgzSvqq4Ci|S1<`H}+qfJjJb~wkEGScA6$4h-7%fkrc~>P?b7BqM zD|ByP;c{w)bs(sqtn3%b)B4iZN4Zzx0$7>N)f>dPh$u#e=yvQ;nU=d&3+#DxK{ERfO>qP`T1JPu% z*1Dw@P`dBzbY4LHSVg1Nsv+QEWx9xqAR?Jzs|GNJC*rfhDxjt1?COk&pRE*|W#^gl zI&#yUT3Y%nw63$QtH*IgUn!$!4%_E0PvmoyhoiNv;R6*I3VQn1n;RSEXH!-G#o~*5 zFuCvslsWP>JUqO4i`?z(FZoM=kX)?5M@4~a_Ox^Y=$7MFWw>FC!=Q^Q!tu#;DZgM!V(p7UdzRG`%Mk%yX|qSNE;dOdsYWN~sPak(>} zysQ%@BS(~{j=U_u$wmHkxAvy;PfJ-ezVbd!q>|9C65wz_`4AnUB#TXgmWq%?P(cEm z@EX|qN1P44hsKYzAIgl!@K$qsD}r<`PXC$FSmO99sPfh{P$$! zuc&Les!{(0C!fjb5mvfihKI?7#MmeX6hhzl?hXYI?;5iZ4<*b0emR7Ya^? z(Dz~fLlF*vvc_0bk=GwDI>(^rki1Um=k!=L3|0lND;{SpWc$8 zv=VHVUot|3oF4|MTakPo&(E?HTrvOM5fcrxd+HuhXJIUwX>C(K{WyNXr`r67w7ltJ zY1^<@@W1a)9^%{|NH?4{x)u2}Bg?DVWV2mbjcahGV;dY*k^qC4f{ z?w|%A%gV;Of^R4Z#YCv~EU$#ixEPC8zV|3rm>K$Yf$zU3Zt&B>m+~hwG$N*3_6I7^ z;gb{F=Cl7c9D1;-g=?ITnTDnSxB^~N0AbF7p_GzZ(>ijrw#msdjYON@<#sloRWFI1omDR?~kQV-vzo2(=Q=&p#{q@b8 z2^+qPg+-}qk&tKES73uLO?y%Ls(Dhq$_zW6WN5(hU{yl#kfsD}cBA_kNV)8o68~Xe z-Q_I2#$vow)q|+cr9qk8;UQtm(R>nnpCVHxyMHTwh44QX{|3$SmNvf2lI;%-dDp;& z8CJ-G#a>bv+SX=W1~7K~uAIgXI(jtgF?KSX%yrKAkw^ZK(gubK5g>xZtHmX7*H5uo zA4sX*>Yl%KgencgqLwOWCZHws@$844>wg&EkqQHE>P>VQqiDIEUH)VqZ2f?;fDe%t zSFqd}c?9&wh|2T48f7r8%Cm(Ms4|7Vn|nr{hIy(=@@lDbD-@>Mn}7N&AzNRRpeY~t z(cz0Z!BW91ieJ1_N zGdJT}{y}d6*`xplZi>b4&R+-H(j!SmHybv_*y4Uf#Yv&p<_z~HzWDOWUN~D4uZ&z& zMjPAb4+qlE`vY?nhaUb}YB;&Oo7|$#evq`1fMV0(w58j)J7*%3lmC0`r6FrdcAOln zI{a5T@os#QXTJL)aS14uk(JCA6YD_+oC4FsSFkXyeJ}KE? zV8w&VRlRd0UV@dtOy0a%@_03*LfNG)+>X$KoWoESWi(>1CYwVPugwQBr= zR5zcY-4VW@qw3*=S!JHH1}HJA#haNpkIk05J~OXL?Ri8|DXcTvea{{MgE?)RF`gEr zKE+x8cpWy;^=CSLkY89sj#%&LPMXs$^X<5rQ}Z*CEBebdOZ85z7T77-5 za^hhG&2<{-WPf*)M3CNb=RB)}9$$PtH|US|;{L?}-&EgdXT@$;b;q8X_aX9XyDH_A zD`$+b<1g6Hxs{=ZovIR_jr1QXxfi^Q%LWhBy$|VD+h_VFsSS2b+Y{(3nG4z$=AJ@0 zQvdiK`U3o@W(tWsxS93_)X=*kfh#Kz{6_um;B>|EY^TfNJK_!e!|Ee5gI=G8xu?>kg{_sTo!sEw@J8@R24oBEA@#~0j40fH zwX@m4TcjJO22>RIe z*+S>CwhaS9YGiN+d+b!K4eUXCJ1y2ijXrtrlWD*HHN?CE(ppbD(lRq|>b-3DJonCf zJT2^BSe8JxU(8Qd1D0AKVrzcY)&BBrS%D%`q+gK=Xk>6q>3eiO+MifvPp9Sy zcdom)_F7+C6a@t6>f+_;spw6QxuS*WP@Yc^3)S?aoKe^j(~0s7H5{Xpb=Jhg1yr7H zp3(1=fIse26ounA0)t9K7JK-Q8qxjJ!hx%AMO^5{9m#`WSW;PZR+`dJx{Bu}RK!fU4)8b}^cKYg zz3)LVz6I`AM~B{A^|u(_W`D8#csMRF>Vw9Y*MJagV&S6(2eNCYXGfaB%l5M+0iSVx zj}aSFuZaLoPKW%E;^L@6eqGICM=IAyVUJdxTKVioAKa|P&E95R)9|d_uEYwA*+!(~ zH<;WP`Yo;iM8G>^jS*DuJuG1%wk1DysdWvv^IPWDg$wskf9B}ebVf(p41c*$;+S;c z3f*1|&}YGDCpwd9Oyu0;z%f;8*ZJ38=Oq{I%Ec67$(9SK$u?-!mIYx?qf zX=LuSg?`*!K@#ik?7w{-4hgvWsYHXt?>&v7DgBvI{w(XQ^6|l8kaaPD=rh-(MCG`m zbBN4KT=QOjwMQc^wiKbPK9f_s3vefTvFTP(cngtvC#8Z_;a-t=U61Hf9FabcZ+;e+ z&tI%@BIDJG!P|(z3ckz9YVI$M1XAed%LV{))|*i%x2a|6i|#*;!)8PME{cA7TH4uy zoD%bIAcOE^56(e0yC?4PF_AkXAGlA4Vyb_KpQDeFlJM{D^h)}v%5MGh{N2$4FsA3? z``#{8eo7@{SaE(!bx*i7G?;5L7bgF$0RbHdpX|p3w}9srL;L`#r0*wka~O#*6iNN9 zDfy0MCTzuz)vy6b`uuEev6-R=Yn!N8UCSS!f<2%D%is&UzdvOT6}T_daqT_8i`gQa zVn78+; zCh-iTjdRGJbxGd7u<{#Kveu`IEP>_#XE(a(hevN{gS*2LEA%J#mkcN{E6yh5bIj4| zsRH{eZF$|QMU&Lk-SJFG@zonGbIn0pL1x@?bAD%?rBy_Y)4K*NzfBal^A0wa z9%NZ^uv`2xWM}a3_gfiqC-+F9m-f5#kqn+2K0jW*dO9Tjk@|z2tt6|c9^{~7Vnmp` z<+tXQ7hzos^ah)i!Vgd(dvjsni?7Gu1NaK*RA-NmKZheyzqT9vg_nr`X-q!B_Y(hG zAkO$>m8}f`{nAX+?H-)><+z6@KqEc4(UORVF3x6pUWiDR8PY%HL?HpwFG|Dq()LkU z@i#(U1rFEvuwKqF`lPYHrFGy!TXo^Q!F<_)YGM#vm5UG8@QCAHv*Uj4{(d00|GSfi z=~j>V-p|@^&X1{~1AolXY|W0LqOY*$x24DHH~PIlrRF(skU+(x+@iaz=(I${^Cpv{ zKzE_*zg&R%Rb$lw;nl@4AppcSdj)mp68@P>S$C@}Jc^UGH+{E>M8;lm9FxHOvRAL*;yGKzS-gy)OS zJ08^omG6-h-&8BA8*}s(;z0>8?5>TtqiV9DXyshpwbgIQU8?SLA4RP&xv9<9qni3X z=~0Vjah~xK22k>X9p%h$-=a%|T@)fiC$!$`{zeQsb2>jZjlARYHiGn&O29!5Nx&C2 zNj4#AzS`3^4PBo~O%Ue2PzMe|SLYgUP&=R^?Xm_<0!6auZ>nR6FPr|1LQw zZPq>cDenf(>vkc_x!MDa6#p7GLmjASOj@{LU7}ed_9fQv#@lT``afw+ zyM~I^4CGsTx65}_LPS^951`h&Jl}-zf;o0gn)s~ZRST_`s%ln(MLB^~{%fkWhh!@q z9-H$khi{!nr1s`NLzB#!s6|Qaed3OT4<*61x<*r8)69WlFR2Y%?FUx_Yy|a?tBi-XFh} z5kp{jQR9)GIHbUBHiFtFGJ=O8!l;s-V=enE6M#7d!eRIu9$eCdsG?+WO58n1qYe(!wj;#!$M- zj@iNaDK#ZQ0T&JKm>-ApX&q;&@Ku@RgqOf(crGmuik4JBX+=H6c;4^%t)k%l`_9Y1 zgYLV%4;6wbE!t=eHhCo?k|-zMT@C962HmaGibK_^`)qrP+eO(Q!PDJy3fPYE87Bt$ zpM8mR+%Jc@$jZ^bwDNfz6|=}bdBlf=NU8s1BqHRgVu4XUJwczW*;W$He9lt*_BH=v zl^=|w_L*PdWq)AL<-3a>b*|iJD;Q*d^cyuehVs&YCq&(3FQrOr`>iZ#x}3dHv^W2n zo-L!R9~+fzMo%0~vM3L-w7y%NTz9S4rxf-)8)^BW&dn4;BoF@5rv~j_sZ{K5>|p4& zpp4BTLmnwF_3dCu#wUCaltIzv#T~=XeRkG5lBd|m!?%ZCcN0zg950y)NdHkYW&5Ve z;U$wFtPRH!N)0;Oxr|zANTNn?x2;JvZ<8lnXvSUpOJhz?rvsP1P>7$0cFK%fM(ew% zQ<McZvx;d76{wB~Xr1aqETkz_f1@lMS0!^clP+*m@7(*A zvA@G1c6eh)+ZQnE#_j<^+pgN(`8?@QtY|*W{^IiwvOXJXh7_a@ubsz5|fHISQ z%9ln>i`9lOka}WIm{7-azDe(S(0B5_wUa_84U|lg>m8#_S;5KX?v{!eg`**!ht@Am ze*3mUcvA04*p*h8$aGNepCK;yX1F_ZzXWax^o~xu1u^8=>|K9ti=)k2lZ#wTBm7S$yroCzmhC@x0|YQ{EZkdna5!QG4{hG+CV|z@>*M!m1zFTnzu)2j<6T3 zEsIhBEr8z#`Q_S{_=dZ>wLV})q?u?3)AY=)SI#w6X1^L1Q$wZKJVm z8;$LzvEA5q(^xmQZTq{)^Xm70zJDd>&Yg2+=FC24?X~vaP8g$FsaMsq#49k_Oy`eO zWA>a}(rfEvx6Z|S`+Yjt=1qRr%mLecgj<>mbOf*UtQxtr*P^CuDiT?3e=Wyj=<(T& z#oVv7*fO}#vimf}$dk?I;b)yQU_#`s+WmSedGSc=sC$kTU1yF-Ppe=CQ|>Bx$8zY~ zy8&C8^UW(#*P!&1!(t47Gb0?gv&QT_&dJz5ia_HAmw4`Rv6{>qGUjQp(3pVdsTL~I z8Cs3}TvhhhL-GUM-&?E9mH%DXHfWUzlJu{AxJ(DHtrYE_SqzS0?(J<(<77U81d>}I zgs=Oyqiwl*w$AG}RPt0z99p?e2>4@f_ffte>+N!0W*4F&{e;fBvL+H_YB=>&?8G+D zyx`%S2X6j7r5SS?vn)c>P|oZVBj_-LC<}m1=LNc2%X>}yrW1MU!~76fv{`F>@b|sS z@1!U$#CDy$QcUC5Aa%Bf5*gJgMjVV> zq^x`--6V{1Lv1FfjO}K0(EyPQCuuPVj(2Zqr@BpbP~L}=1h{>_CI6@NJn(<3Z~rim z?+ze{|CD_GOLMJ#Eif|tCvzIYKT+e$nfYS>9+}RHb)bN{i7{m-VE#UEDEp;20+x@z zhyv&JNdKc;FTCh~6x#i0#uc?4;PG-k`AE(BS5iuFf*cY23*ij3C z?raT8h9q2)v^fS?>b*WsEY{e8=&?+n0(oP zT)+Rbe5KMrph&I0l!T^v?MzJ~5DmIY$iH zD;NMQr9;w;MM=}jcWcm7#QW^nB%-7b@7LzfDhq!N<4xjT6>>DjL{rb6LzbZh_7XC3 z{h094o$hq(yxDyaGA^4Kd{h;OlrVI-1hht+vVt%d^r=U*gQQn~^dy(jaP+OkA?=u> z^5J}>*;G3!h=)rwl9?p*+EUfUa+n>-6e&yYu{m>dVk`eP-hT03#BCbzPLuvhto)nH zp^7jGqGh9gb7kzwvTZd$Zi>f}N?e*3@ekm?d0QLq2(U_ z!Xi-KeJ<9I+-D3rglZHq92N6EuJV)^S6BNk!;I4n#-Tn@dONd0zS^WGu3Wt3Z4!${ zodfhT+(;u7?mN}UJy=*`2uG%@#U+tSFbXDHN6$dHGu!4isb;${zM}|wEEsou=h2^5 zFz45^vINjgc4HFL3G{_-&J<&{y{M7Q>;qM7JxrQQ&1=RPYP%Cbj$ZCw77Bg_@Ge&~ z6(DHz7pCO9jcbQTn{O+tkt%ZM3tO1`ty)<_TU@6*=HnyHdyw}D2||(V8uv!7iJvM` zERFK-pRzKyAA)%1O)z6@54P6}K{gg_qguMfT@r;pVY<2Gwz0=DF1dSIV#--xZm^BF zsZw4pE-`etQO=xeWY4=BNhKDsfUyvl-F zX$lvQv{v^&ZOS=R^IpQ`5XoeJux85iY=}2+CQq%mwp!EH_sHFKvXt^|V(F5a@hnXCh4pVt5TZ_iaw9?mSq~qFh)~y_BR&dwjxU|)!_;zYxTe5q) zZFp<*f~p3;WZ>sj%J*AbRnOtBU*LPrU+*%!S{YgRsDz_;&cL7#Z|0CmNR;4&H9#W+qYZ36#k4Zh;N^m{fL>{n6flwXKvh^INnTIUBPoIOT2-r{CXw*D|Ma#s;bfv^Q1xPS^o{|z#W~% z{=9;RdrK)tal3E-bDm0-#!=In+1p7>)i7vP46TgsFs3gv z!0ZFmg~Hq7gR!8ewL|+IDrW{G4wMTlv!Y^g)HJ7PGSc!pcEJZB|6wd(u`F*&K|4J= zPDPlKgnr$0v59lwPorp@^^^qFuw&)f=JIAFF_x^{7tO0gTHUgy+?#)I!rww)i zN57)8bc88`fPsgEh~^YdbPwXz`w>;brv_1XQs;2O)pNX2s>)2h!7?&J-{-Lvyec>L zrt%a5GPm14?95U2`+M(J_dv!6cSKQTgyjV${Ct_HOQ0TuTero5K;IVkE=_a~(wzTn zH}l{NE!*FsN4g2NR_&#}qO${i-KGdrQ6(=IgE1ESy30_tAdhI%n78!G(U)Io${uOn zu^PnUpQ=L4GOu3EZEo`z`(VT}KQfV6QCN0Fr$eSQFK#pQ9fQ5kV{zK$kHEc zkD0Q!+Od)JrnGgF$iSQsV9ss;WaJ9t2np907Fg&HIWqY?^K zO;N*o{+1=wq@CWg1J0ah`kl*s`wdt2u=tkc-tS{Y`_zs0U5A?`t!o0F#NE_`3_7E1 zzuso$Qw_&o(TjAi4Jq>nC2o=5SH|U@Svq;9@Ag4U_L|)k2no{3X3RIz&XCpA^7I9R zqbZO^iX#yOH4&PTGvRBW0mJ!~7i5bE^L^8rtd3o_ol{3Ok`+Z24!2;w?y4Jzxz@rb zpah*vU0bd=+gGSo0+dV{N|J*#2%j|9O zhP~s`T3$Qde7&U`@`y$qB+85-XK(p&Hwq9=HGVXC-y{>+KwF{}m;4LP$62{fv0|cp z$kAZZzxzyQ%eYizYayeGXcH5hBNv~g!5Y&vZ&tz7Rnx+GKXOVdJPJXw6~N8KC}1U7 zq{qJ<-Fm;J`Xpnyc{68>B$c^^YWV3IBGo;cgTHcItd*&Y#1(>l0%xzba=^`5wn!N} z06mJAm9nhrHBF$SB|b5fNhs=wIa6-2tF2*He-bstB20>dB3TLz!)j*Nm8RFf=>nS@eO`YE(NAUa)alEk{oxTtKV$SMAvEZYVFc% z_ekDbLVl9tg%rM3l!2rnSqM975uj(PRj&yvyFOfT)4o#SVDgCGGiAgLKDl*om)WZY zAM%^o(0}T2NzoF{fYC!5HrfTZPxiHS;w7`OIMd`z%g{NKzNrQVz%uT51*^k<^PE3v zzNq}=VL0-u|umR=L0w5)U zVoOe;T@P6x*&Lgv{+H&FQl-$=$TaQ#3g z$_BV|Gkv}FX*Kk)nWl+|5&OYpoM;>jdC270#k}VfAMsV&S6fyt&{RYFY8d2~cHL4T zpUK0}GjDC&#b@$T{%JCVU|={KJ6eiPX*OqfPBl1cZ7I`t%5oQCN51Y^5W=-(FQrfQ z4nDsKcj3OV*sqUK+|f7r_?R-qxM4Vrs12AbTJ5}+M+~c%6A)Z1VKz?VtRIZY2xESA zQCT=lA8gg793PkDsPtlsc^R|4n5XCK^L)J;c({QSqUL$0$pu5;*WrspXur9 z=&5ohGgaC$Z%?z&>v?%tiV?xw_hgQ)0nc2#iS-Nd;5^g>t<+NedSDjwW#Jh3V&nUb zCDf}BHdWDw$9@ct)9dP1OGMrCsHx;iJ!gof&V_&kDpR^Y_ZvUP#W<2TrJYfq04ABM zGcr$ZpsC!)D*)*xMv+W1j2Il%dEA3d1=DG#T^VH$E{}U<*hD7#R};6Ao54c2(^1&t z75st$>O9jHj;g^_WZQV!t~uQ@ zSvBZ=N7c=&mjWdf6|djp@-}}OAp~o@d9fh;p8w&7kI@xq(4eP^`}~UPZ~OMjP$gAtluG)T#Lc$4M*4n$bPOwBC{a940Rt z;T@h)1PaoJTlI@ev<5Gt(f*c%6#{Z=IzrDM0xq5>iA>4&X>5-i6%Y%1_VUAh4+I^4IihhU7F5DU&%_)rJUVQo#OM^$7@Ct#47jFYcYL$v3UU>@5GDZ|7GIbm)PA$vV_g>38&eN+#h04+wy^gl$50fPY zMXVUG>W#}n08@X9)H&Tp)3HdYUmvNBG!x!E4n}Yr5GYU(pyE+6YpTw)x$0q@_*ROg zk-Lt4s&;(VENsZmi{~k&kr*(2RBAsMO_fl`E&1y}JS4!8XvstN!2J2LBsx!oN0$u^>Z1&{r;f zgt`L#t(m~^Rp9x7om6)NYnK1h8xn5hdsHjDcz0)Gf-BEL_imR*qsaSV1}7y)DXLbV&oMe)H4VreAWd& zg_^_SO3G_lJ^5Hv)eZmeZV8i;%xbD;;mARetqi!x(&Ywd`_?ita%}Mm*MEdbvje5z z7D-_daB3Oyf8{gr?UgGP#{NjKa6o%1ThSrzj?AOukvS zp+q(iQMC`3igKWvUccdd1faR4XO|X}71QFqQuLKmpc%bAH(>u5SPm%ZI7mVE<(_yzE)fHf34nL(c1TCCd?cs@ z0tZvg9@3Ha^>ImC__DX{JOwLk;Smk;iMKgqHMYB}5#eg;q8Yw=@+qwSID&ukDa3ul z&iS=rZEd)t0sKY|?&@W{Uz`rrjl)IEeLMl*3BqKz0mJdSlbK)Oa`F8&*~F-)YlcFr zb$?9OfEW6~Y|mvE;&vV5ao{4vl2XX0%Fl+zoRucdEPj?i2LMpFIT^^-qWD!IO7DX(8jw znifo|X72{;1Px6t2MeCfL|u?3+_hEM@$r!hIjufA70m8eI#9ocytIIE;dAP2#2Cj5 zL1-@>PQuOz5tWbEbeVXTNM5OWAq}KhcIUfCU(yc?MC9`p^O|1_Gt~P95{w7eNjZsv zR!Gt9$So`U4zYW$mv~{#wpOhH`!=u_sB8~WmUG1&&zx(IvsPy*g2Rw!TSAa{_k_zs z4TOYgs7{o^Yb(LJ{yi6L_kWTTuBKMXHoi>C`@e=gs1&z0*7znvi257q-edD|usI@N z;BxtR(j%U=r05cHzUcBuz8&+qo^9XxG#WqXN!eHL4IX?cx4VSzxG6xcVUhv3IN6`O zazRN{rhb440ZSiPLko*PhD-wO;;gmH+xY0MzNoUJkrE7f7P9FbTxQmtpPn3I=7Q8A zQ@sn1iWTnEs`-!4x1E1~azQU)r}s8JMG_0>iwNkW_Uo5&dRqP>*}crX(jc0n;%t=u znos~&W3vUy!f&94ki?kJRh4Ri%d*4h| zk32#e!#6cH*?p4E50eTOf4AMsqW>Lp{)DQtP!3NK5h6a?kx-;0jIM&M?G;L(6q6uk zsgAUu6zlZ@#xqL7dd5{K(fom3lXts*K^ENj;>n8yWO?CWdd&^^aJT0>iv}x1Ae8bX ziTGa1cGLL;XkIGG-MgC;LI|(7PQ$A@se63#Bsp0n-+bX5p563)h>>f9dpTY#i8_&c zneCcPAw+j?f(Mm8Y_pBZ|AZYYNd7VoeJypy#HdA&z?Q5@PgVa@KWMi(IK%%8Z)=#IbH1%Hh|Bf! z0-p26nT4UR8Z1{#M|WdKeEzqhzdLd1xGYfd`V$|EsZ6;*cA){Hb-{u)IFE<}yT4B_ zn|C}uzm+_C#`YxpV4l6a^7EbZ#eB%pd85nudM`9YM_TJ6gH2%WW|iDqsp-%Co?%QZ zb~M9vc{pa=NphFVb5Q!z_LXD4`0averl*Eu2P|GMZ`)@&2lSNV;^uFMw zN%U+WaQ*aR%%yuKrJ8#{>pv(cz|NL*sIseX7obOeIu)VY1!aCNcXR*-sZ5m|=)NG& z<=)a*T;bKH_?at&B=^X?%nl;#6!nT_g_!L1w4k{czViT~nzK}^e~N*q9@?Gduz)nI z5j9a2Nh{^I&ktH!cACNm+z~o2UQDuVjbBERyG3E}!bgXCZ~hD5IeRuh=JI>s_wnSt zaZu_5=^2pzU{4za_lT%dL>H9i#UdsoQk;y|1i|e|1Y(RYe4@Uc47Qs4-4vcK%;GL< z$31zVSZKR4`O{$og`Obw`1r?AqYK-xmlbz57U`U8#koh*&K^At?$+mo`-N-{Nmnfp zGmcw*5q|x0XF`qv65wz2&MSbHkLfi#RSNH|yo{X6gF|+RWrEqwr4`{?sx}CmK#m+w zc`0PJP({<>!y7{MMk!Qk-B!K3PWmiD?eSzuLsEals_o4L2%e+pT<+I1L&&h%xda?? zgAeN_Ys!MeK_;uMVakzLFVa2AOZBC>->_(Rs^0gilh4u zG^=JcANe3cdE77+in2cW)+PTmL`9=PAZ``9Poj#-x=9S}{O*SU`TD7y+XS0f0bsU@ zOxA)OPWcH8euxsmg?mP#$gz-c>?9T8a|Y2ZpJCc52a{x9hi~-j0$j`kX=Qu5;Z+`; zVCN+QFA4$iZG-r3zy)jJ%9BxtYzHNh`cO-NnjUHc=cUmO>@y z&1lpHw;bltP0-Hwp;lMZvj=|+D-(F`Sn1!+l+v-Z?%i^R=J~PT#z4CL0?so)i8V8d zd+lFOhWGw1V-sysqP`6-!HuRY77A<3&smE3wr63k)lAlQ29>Yy_7K5_>k!`tB(&Y< zabNM00f8wVgIy4J;F5M=!+jXAH=JriA#(d?FnXUIFd^7CZja3FZq9Bxdfu!ky`q>| z7u@>Bv8(Yn9sMIZ%tPg$wQsDlE@KDibUWWT1B6)@&TXM5sz)Q{er?~+$vM6X z9Up*>A&fx7$U#pQzO2GNXE6Df6t78l1P91Lezjsc8rmP&e5W!EG{iFu0^?^$2YMJ zDh@v7tbpCBU=vUtC1QMfZJzF!+>tuu>uQ?8e9%_cOV7@wAFRlG&2N)(ZQTrYs`NS8 zg3fJS^HpP|WERegW(QR{s!mgwKtj}j<0BJMs7g%(td4F1XT3w%Qqz_6ax{=t{U!T& z=&7TxJg6g{(7wuf^%ccbt%E?zIhWtS0Ju@wWrVB2B&%$WdORn+*s!E=YQOx7Qe->T z;`Xts9!Wdo{zLfvH|7)9Z9fmmKuAgkiA%q`Jdt%>asJ~6jjYhX(y^KI4wk0Y^ zi~kit+PwHuFnHK~a$_+4ySR{Z#vx46Cf!P)sG|iQ)1X(;TZgl;NzX_tv+kc^7pk7| z%2jVbyRJK`Gk^k)01Wz*R`cmTTFp|VqU0s4kxkO-%O`Y3sBXo$IB9oD)oLEnkZ3|Re8pGa_-EQJxp_^RuyWn!(pqKs@00eaeS00+~b=O(FMd^kOZ znaKf2tl|CSMgTR(C6Jh{Mq-P>sM@H)LQ~{?)MzthL2n!fbI-8y0r$_$U57INMU5Vg zs!}?Kic3Wm9UE-2Z$kqyp*N45Vwqerr$ zx6MUoi{+yzkGe=1dw+o_N3!|32fnuj47aSjiZUu-7s1!+a%PKC@X9>MJZKviYJHQxXutjS$Oaa$($cXeX{n$9)-@XdI&%~AAvNv`-bIq`;9(B zoy-ghV&U!;xI{HiBpxs>Of$UhPzR8P!{5#U;Y>}M=iQjs7)Us{0e#li#MKrWhUyIZ z4oT-s_$NQAYFoVB35lq`s65otop#jwW@{GdhZ4OFvjmR~7_k8cmSrWNgP&X8>9T4qCrc4JPrykkDw zQ7Vx>iZ0@*ZiRh%-1Rn z8KYIa?I^a&^z@eYG(>L$SmZ3h*B-3uX>$}?OiGpV`-aDZxgUJ zuNer;PVswxLIG79-N50WWc$|hF(4KHRWd)RMFkdv1iNnd|Xfi(&k(alR zvImDWe`DkWQ(;}}{`9nL#)^xN=!{a2-hBQ@5di&ZoQ$iYpd#np8uj6P56QQuV)aA? znJMAE5)(+NvY1|?^1z;15dE@0V8Q?@U-^?l8X+!~Vu*&II;Xg`#qAyx(t=@B?)BC= zy5Dp)2LrM_Ykt7L0s(7wq(}+IR!Kp6e=83gBG*UB+!Sb0MmPY3mXB&;bdZy_yLVS{A$ahW|J*~3 z?^ocJGBTrXR%FGPgC-ca6~7clrv*|BZ1cVz4`>X>59j~gkphgergxXueYToTAq|>G zQf_E(1&56F*;!5kijozx&me`2K;JSkG;DUs>CpEIIDmYq3p*b^+JZ`5^OJ?ZY4aTD ze%nJ&(1XyUB1zjbAqPK+N2TvV5nD8^4_>uT$K4F_6fieM_IMh1sh&+L@1*EJX>{bY zZ%v(>P<9YcD+_jZh2<`C3r%G{Ta;G@U{~ zBaN7(fJP~k7#{qG`a;N(i`4-%h1(jJum)uOo?0eck}C14hUzMV{`u!WGyN6`fW3m`Z1ZXRsu=3pUKN1pE=juAscpAWdvt4`k~+{2>ymQcSwC>(4m zeA9RTMHYE))?*M8zNfb(?_D%Dgw4o| zfDb1cZWj)?1y%F}1{gxpBLIse3@s^`LG{ET#801Dou*u;Py<{UwR8cDMkhcf^n0=o zP_6waW+@-XXsVR~IwNAcLY3l%5Xl{Ma|q5l{?Pts(<-7KWsp6Vw%V#fr9HeOp8wAT z_Wc+`$8&DLsHbL=qgd>y0_H0)m$++v)zv<(*Aexg`_&)7!jT)TfBee0KVM7QYOJ6$ zd-uj!bSxH~!Y?pqV3A6%I%6`!f)DFcZVV4pWq6uVkJ?c0Q^xm(8QWZ2C{wHOi`1)s z#|urNLyTfRyqwpxLV7>}dQVlb?#IU64z4}e%&IL!vob4Hq#WPG}h{a!3s1TKXLa@`Qre{&M2OZ$zT0mC(lC|F7$-%-m87lTi z^n)>(tsfc`{;$|JcRoAMimqKb8{t>lu{ws0vXwe5hzgZ)Gc+o0ATYS%bN1xaPD_Ol zz~l$Y)n%)8XSYN=r``q9Lc=acVBx`6gSGa*{oH=Roz6a`(SkHgJlj(TMYWE`A=e@J z({gE|jHH)pOCO23H)pHEU9Eu6O;u1Q%7ap3^W<|u88FsDR6kx2egeqfINEB;sJ96m z!^mfTp&!N#gU=Iwi#vb|c!g|Pz|=mkt}zonz1h+^oHj|MG=k?Rn%L0EusQXL??C0u zu09E)BR&OR^47_1wVnMU5Ht~>aC>YP%T&D(B=+eW1xEOni=rH^QCb0O#b~AH>8i) zY}c?H#=#{~VxTw6-IUHM?VNF}j-q`T8Va9(F3CrKwm96&)pq>0{YhAk3>&M&z=bRH z7!Jh&w=aWltC^5ZQYROci*f9=0_jw@B@Gb*rEdUExiR7|MnFfWXGX_%RKtYc`qN8# z^leQZOHa3)d$Swks5(YnY{VG8&$49n9Pb+w0^%6uEY7ZWCrTe`gUlNj+HRMvM=_34 zUk4${ECs{RPg?HB9NA|{$UC@bCZyqb${!f#Q_WnVYhW{R?B3)U(bhDRcQFjCZvvQc zvj_;U<6kabfl7WDpy%{22CfCGz>RMPF>8Vgqc^j_ramTZ2ULs?SajB3g&92I+XiIo z3WbSo{k!QW8%*2IL``lsxpzxr5TNm)*EOBxWYTX~#NnH)*fVFI`JIaWMIdn34>OIt zZ-kaOI8)!iDs(W(X%xEpXCz8drxV&sm<@r~7jZM!z=`7q1OEtm#LHLjI_%^L{qf8a9A_IutQ15H8Y$X1jy88L89-(X{8mi^s&>T6b8K&SZGS_@g!x_bNP>W%pokyu=^<} z2X5xZ-$_7ctb5l?gp}Z}<7fr}jNr6XrwD8Wqx`yVyt2491;VVYPWJC;Bs9vz5YnR4 z&t`7@0F&z9Xsk_$a;3-jWE~$nQz@Vh^kd*qM66iv=-ReJuIFaTAJpRjRfznu;ABeu!jkB%LSI=8)ulnu(lv*OP@w*1rfch8! zP>0I{WV@nl)z}ER_CLz7>LWEa){O`rO^;(te0?duE!(7N_4G+wK2P#%<19lTX= zXancX(9p&H2CZ#&f#%P(@_bxjnbrSMEZg#BUXdA*pwjWdaE@uegn4JiqV=C!BltT{ z1r|V)$?AnbeN)0*QjL76q;g2cq^gFS6N?!^Em);&M9RdsT)Gp2HH9ma!IZVTsj5Xj9<-jUiIw&!pXW;1m&Gb+4D6RT5(tX;PJtx3at_XAr zEt&ea$+C9-7&O2@ckKf62-*}*Lf1x+>BW~XHg<-gE;0Q15AtJj9@P5C!MmKRY9Fof zKao`B`K}|gk*$NDXPBFxsa79g>{pZp&rbE;0tCP+Qgk*fQUJKn2z-3fjBKWA{eMp1 z@Hg@99rZzm!qOvAe30{X21Yr2M1{-}XjMtv>u1}q^DM29q6aB%2 z{5XEo!)#wNy$iA69q1a~W4{hy4iXhe+%*_b;&QXAJaWJfa zi4NPR!#j|(H-J0$^?kxs`dbus@ zbJ9)()&Mh@K$6bEICh>dxPFS@bK=TrRzJG1LV-;9Ti^A2*zxU_h7)&?cqL)-u%z`j zA&Xlc+j;}nh51{58<(eubUw~*BbBe`dTeK&E#Pze?0aL0@`scVj<%Z}lhZY3?8`W> z8JZmTx}MENTh4Kn^XZxFKgjaNQm=;@Mz@3LT7v3-OslI49`^Rn4mR;gU zOV9g>fgfhD!xXF@Z{x1#=k0WLsj{1WNZug>gREk1*WR=dcUc9^^KXOR+|Pc5t-KP) zcIh1?Tcq%ilVWGPLWHoi+8^E=pbZbXex_o5a#9^QRQ53z!lLzAMqmP5J-?l-XiIC} zVGmqXo&?}q5gJ?*dR2pT@_Ty zI^LIGpLm;$Ni$su!*0+LH3X9#=xPj2pc*YU_y9wknx$C&GR{>|*f1o`WM^(ao)&#}Rne8{_tZn9vyGYJU4=?Iks)YYc*TjD2zm zQJ(wQrz4>OL_OeB+r=SPb&z-Dm~7^o1a;ghFiCm0eYB3c*Wo6|#9+7VCkIN|c8_q> z0N5o9uj3~LMBN2oMW=A?+7K~K_~UA7yw}kQ0~h+`zDAu@Au(G_Ij?Wt1I@y~Pw7gF zDF81Gr=-l8-#*$XciF2Xp6C2?uN$7c_-O3ZZ?O{2?~9$wewd+Ic-nANAjfboVtM&~ z>#GpNQoi;HN|Dka!GjGpclcsXCs5Ohu+p+jUgg)3j(N}~G$3ie!DF```8|Bvi;NA~ z{c`JF-ucZ5HmOh=#+69(sv%NcxThkn*y4K^<4p))hn6$(3T1k*$2!L4;ctNa* zH|?(WkI>dO`F|8haa%zL%VgKmR!M-Guju1{};i%&bAY zcA}zJPA_kMt`QEIE?BB8OsVdMdxT0uYOi;y)no9Y6B)-y7;P1Fi;-a@G-WqGMc9Ne zc`OX|)gU(GK@s7=wfr7O$dybYcOcSd20OL?d3Q51ijGg-KWwLCmS2*Z;`$ySa1Fez$$OF( zZdh*c*?Q|G;6EcNon2D*6UgFbk;3}b@e+GLKlyJLJnu%L`BS%ee;TFf3U3S7z4o zfao$&hvX7F*N~r4$>%UXG&EE^Nh5@RvUEYNBX@&6|+j*q%Ac72p;lhq{W}C*^yiUb{F_fXQg0 z^XaTSasegNkHL3CLSy;$yC%nPymkA{8!jo=7FaHQeNa)}KVd@Na1=PGz{m6?9260T z<%YWy25|36-F&RKG%gZM+9Q&;1Dbb~(5Bz;fq8QO`PR-YZB+qz&=YW+M_)$bgZ>wcE7g-GZ%!9_HKvp3e`&~qFWWze$R!dVnPW+Ie zS(zzeW18(RSobRHZ6oc|cWj$f&%$}T=j&UVOX-HQ*3MPNQb^!o z-3Qz>`FUBujCv$mu(@8skh?y(wGHR*4IoyZ7(`YVM-5+3Eu)iKVPM^5Y@HbH5OAVl zaiNkgt?*w75p|htOGief$bI@d;p&!%&u2|?+0XNw?eoLG8_wO=&pJjp)a{G6OYGg) zDn~oRV#P>TsQ)b|w+I!H$@Psus)Ek6otfynVao!s!)V`q{j$TvZdB$KeF62Fu%rDWV|dQ~i zjJCu0m6q}qm~EpPc)w~G%58g(RHznP2IJxXHiX5Hg{~dhD??i{_V;aD+)~66L?xRn zs~WHh)%n7EY=yl~8$%WR;;_Fsc8~LS2AJCZNdN?@Z?86lXWqpjr0>t*E*^{}?nwte zXBG)Dknbe}B;8q|(76uy)6m}};1KN~S|RuH2FvE{ldhZpcbhy5`PjN7g|& literal 0 HcmV?d00001 diff --git a/pdm.lock b/pdm.lock index e719f58d4..21d6fafb3 100644 --- a/pdm.lock +++ b/pdm.lock @@ -5,20 +5,20 @@ groups = ["default", "docs", "lint", "perf", "test"] strategy = ["inherit_metadata"] lock_version = "4.5.0" -content_hash = "sha256:02796e8f5b77793e493b9187c064cd5bb05c0ae048f20841a55b2b43dba34b82" +content_hash = "sha256:fadfbe86974a56d4e949d3736ce5f1aa5cd2baa5b4ab0a0b4ed950fdd2f9615b" [[metadata.targets]] requires_python = ">=3.12,<3.13" [[package]] name = "aiohappyeyeballs" -version = "2.3.5" +version = "2.4.0" requires_python = ">=3.8" summary = "Happy Eyeballs for asyncio" groups = ["default", "test"] files = [ - {file = "aiohappyeyeballs-2.3.5-py3-none-any.whl", hash = "sha256:4d6dea59215537dbc746e93e779caea8178c866856a721c9c660d7a5a7b8be03"}, - {file = "aiohappyeyeballs-2.3.5.tar.gz", hash = "sha256:6fa48b9f1317254f122a07a131a86b71ca6946ca989ce6326fff54a99a920105"}, + {file = "aiohappyeyeballs-2.4.0-py3-none-any.whl", hash = "sha256:7ce92076e249169a13c2f49320d1967425eaf1f407522d707d59cac7628d62bd"}, + {file = "aiohappyeyeballs-2.4.0.tar.gz", hash = "sha256:55a1714f084e63d49639800f95716da97a1f173d46a16dfcfda0016abb93b6b2"}, ] [[package]] @@ -99,13 +99,13 @@ files = [ [[package]] name = "alabaster" -version = "0.7.16" -requires_python = ">=3.9" +version = "1.0.0" +requires_python = ">=3.10" summary = "A light, configurable Sphinx theme" groups = ["docs"] files = [ - {file = "alabaster-0.7.16-py3-none-any.whl", hash = "sha256:b46733c07dce03ae4e150330b975c75737fa60f0a7c591b6c8bf4928a28e2c92"}, - {file = "alabaster-0.7.16.tar.gz", hash = "sha256:75a8b99c28a5dad50dd7f8ccdd447a121ddb3892da9e53d1ca5cca3106d58d65"}, + {file = "alabaster-1.0.0-py3-none-any.whl", hash = "sha256:fc6786402dc3fcb2de3cabd5fe455a2db534b371124f1f21de8731783dec828b"}, + {file = "alabaster-1.0.0.tar.gz", hash = "sha256:c00dca57bca26fa62a6d7d0a9fcce65f3e026e9bfe33e9c538fd3fbb2144fd9e"}, ] [[package]] @@ -158,13 +158,13 @@ files = [ [[package]] name = "argcomplete" -version = "3.4.0" +version = "3.5.0" requires_python = ">=3.8" summary = "Bash tab completion for argparse" groups = ["default"] files = [ - {file = "argcomplete-3.4.0-py3-none-any.whl", hash = "sha256:69a79e083a716173e5532e0fa3bef45f793f4e61096cf52b5a42c0211c8b8aa5"}, - {file = "argcomplete-3.4.0.tar.gz", hash = "sha256:c2abcdfe1be8ace47ba777d4fce319eb13bf8ad9dace8d085dcad6eded88057f"}, + {file = "argcomplete-3.5.0-py3-none-any.whl", hash = "sha256:d4bcf3ff544f51e16e54228a7ac7f486ed70ebf2ecfe49a63a91171c76bf029b"}, + {file = "argcomplete-3.5.0.tar.gz", hash = "sha256:4349400469dccfb7950bb60334a680c58d88699bff6159df61251878dc6bf74b"}, ] [[package]] @@ -218,7 +218,7 @@ files = [ [[package]] name = "attrs" -version = "23.2.0" +version = "24.2.0" requires_python = ">=3.7" summary = "Classes Without Boilerplate" groups = ["default", "test"] @@ -226,13 +226,13 @@ dependencies = [ "importlib-metadata; python_version < \"3.8\"", ] files = [ - {file = "attrs-23.2.0-py3-none-any.whl", hash = "sha256:99b87a485a5820b23b879f04c2305b44b951b502fd64be915879d77a7e8fc6f1"}, - {file = "attrs-23.2.0.tar.gz", hash = "sha256:935dc3b529c262f6cf76e50877d35a4bd3c1de194fd41f47a2b7ae8f19971f30"}, + {file = "attrs-24.2.0-py3-none-any.whl", hash = "sha256:81921eb96de3191c8258c199618104dd27ac608d9366f5e35d011eae1867ede2"}, + {file = "attrs-24.2.0.tar.gz", hash = "sha256:5cfb1b9148b5b086569baec03f20d7b6bf3bcacc9a42bebf87ffaaca362f6346"}, ] [[package]] name = "babel" -version = "2.15.0" +version = "2.16.0" requires_python = ">=3.8" summary = "Internationalization utilities" groups = ["docs"] @@ -240,8 +240,8 @@ dependencies = [ "pytz>=2015.7; python_version < \"3.9\"", ] files = [ - {file = "Babel-2.15.0-py3-none-any.whl", hash = "sha256:08706bdad8d0a3413266ab61bd6c34d0c28d6e1e7badf40a2cebe67644e2e1fb"}, - {file = "babel-2.15.0.tar.gz", hash = "sha256:8daf0e265d05768bc6c7a314cf1321e9a123afc328cc635c18622a2f30a04413"}, + {file = "babel-2.16.0-py3-none-any.whl", hash = "sha256:368b5b98b37c06b7daf6696391c3240c938b37767d4584413e8438c5c435fa8b"}, + {file = "babel-2.16.0.tar.gz", hash = "sha256:d1f3554ca26605fe173f3de0c65f750f5a42f924499bf134de6423582298e316"}, ] [[package]] @@ -265,26 +265,6 @@ files = [ {file = "bitarray-2.9.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:4b558ce85579b51a2e38703877d1e93b7728a7af664dd45a34e833534f0b755d"}, {file = "bitarray-2.9.2-cp312-cp312-win32.whl", hash = "sha256:dac2399ee2889fbdd3472bfc2ede74c34cceb1ccf29a339964281a16eb1d3188"}, {file = "bitarray-2.9.2-cp312-cp312-win_amd64.whl", hash = "sha256:48a30d718d1a6dfc22a49547450107abe8f4afdf2abdcbe76eb9ed88edc49498"}, - {file = "bitarray-2.9.2-pp310-pypy310_pp73-macosx_10_9_x86_64.whl", hash = "sha256:43847799461d8ba71deb4d97b47250c2c2fb66d82cd3cb8b4caf52bb97c03034"}, - {file = "bitarray-2.9.2-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f4f44381b0a4bdf64416082f4f0e7140377ae962c0ced6f983c6d7bbfc034040"}, - {file = "bitarray-2.9.2-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a484061616fb4b158b80789bd3cb511f399d2116525a8b29b6334c68abc2310f"}, - {file = "bitarray-2.9.2-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1ff9e38356cc803e06134cf8ae9758e836ccd1b793135ef3db53c7c5d71e93bc"}, - {file = "bitarray-2.9.2-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:b44105792fbdcfbda3e26ee88786790fda409da4c71f6c2b73888108cf8f062f"}, - {file = "bitarray-2.9.2-pp37-pypy37_pp73-macosx_10_9_x86_64.whl", hash = "sha256:7e913098de169c7fc890638ce5e171387363eb812579e637c44261460ac00aa2"}, - {file = "bitarray-2.9.2-pp37-pypy37_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d6fe315355cdfe3ed22ef355b8bdc81a805ca4d0949d921576560e5b227a1112"}, - {file = "bitarray-2.9.2-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f708e91fdbe443f3bec2df394ed42328fb9b0446dff5cb4199023ac6499e09fd"}, - {file = "bitarray-2.9.2-pp37-pypy37_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5b7b09489b71f9f1f64c0fa0977e250ec24500767dab7383ba9912495849cadf"}, - {file = "bitarray-2.9.2-pp37-pypy37_pp73-win_amd64.whl", hash = "sha256:128cc3488176145b9b137fdcf54c1c201809bbb8dd30b260ee40afe915843b43"}, - {file = "bitarray-2.9.2-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:21f21e7f56206be346bdbda2a6bdb2165a5e6a11821f88fd4911c5a6bbbdc7e2"}, - {file = "bitarray-2.9.2-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5f4dd3af86dd8a617eb6464622fb64ca86e61ce99b59b5c35d8cd33f9c30603d"}, - {file = "bitarray-2.9.2-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6465de861aff7a2559f226b37982007417eab8c3557543879987f58b453519bd"}, - {file = "bitarray-2.9.2-pp38-pypy38_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:dbaf2bb71d6027152d603f1d5f31e0dfd5e50173d06f877bec484e5396d4594b"}, - {file = "bitarray-2.9.2-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:2f32948c86e0d230a296686db28191b67ed229756f84728847daa0c7ab7406e3"}, - {file = "bitarray-2.9.2-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:be94e5a685e60f9d24532af8fe5c268002e9016fa80272a94727f435de3d1003"}, - {file = "bitarray-2.9.2-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a5cc9381fd54f3c23ae1039f977bfd6d041a5c3c1518104f616643c3a5a73b15"}, - {file = "bitarray-2.9.2-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cd926e8ae4d1ed1ac4a8f37212a62886292f692bc1739fde98013bf210c2d175"}, - {file = "bitarray-2.9.2-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:461a3dafb9d5fda0bb3385dc507d78b1984b49da3fe4c6d56c869a54373b7008"}, - {file = "bitarray-2.9.2-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:393cb27fd859af5fd9c16eb26b1c59b17b390ff66b3ae5d0dd258270191baf13"}, {file = "bitarray-2.9.2.tar.gz", hash = "sha256:a8f286a51a32323715d77755ed959f94bef13972e9a2fe71b609e40e6d27957e"}, ] @@ -314,13 +294,13 @@ files = [ [[package]] name = "certifi" -version = "2024.7.4" +version = "2024.8.30" requires_python = ">=3.6" summary = "Python package for providing Mozilla's CA Bundle." groups = ["default", "docs", "test"] files = [ - {file = "certifi-2024.7.4-py3-none-any.whl", hash = "sha256:c198e21b1289c2ab85ee4e67bb4b4ef3ead0892059901a8d5b622f24a1101e90"}, - {file = "certifi-2024.7.4.tar.gz", hash = "sha256:5a1e7645bc0ec61a09e26c36f6106dd4cf40c6db3a1fb6352b0244e7fb057c7b"}, + {file = "certifi-2024.8.30-py3-none-any.whl", hash = "sha256:922820b53db7a7257ffbda3f597266d435245903d80737e34f8a45ff3e3230d8"}, + {file = "certifi-2024.8.30.tar.gz", hash = "sha256:bec941d2aa8195e248a60b31ff9f0558284cf01a52591ceda73ea9afffd69fd9"}, ] [[package]] @@ -351,43 +331,20 @@ files = [ [[package]] name = "ckzg" -version = "1.0.2" +version = "2.0.0" summary = "Python bindings for C-KZG-4844" groups = ["default"] files = [ - {file = "ckzg-1.0.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:e3cb2f8c767aee57e88944f90848e8689ce43993b9ff21589cfb97a562208fe7"}, - {file = "ckzg-1.0.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:5b29889f5bc5db530f766871c0ff4133e7270ecf63aaa3ca756d3b2731980802"}, - {file = "ckzg-1.0.2-cp312-cp312-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:bfcc70fb76b3d36125d646110d5001f2aa89c1c09ff5537a4550cdb7951f44d4"}, - {file = "ckzg-1.0.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1ca8a256cdd56d06bc5ef24caac64845240dbabca402c5a1966d519b2514b4ec"}, - {file = "ckzg-1.0.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6ea91b0236384f93ad1df01d530672f09e254bd8c3cf097ebf486aebb97f6c8c"}, - {file = "ckzg-1.0.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:65311e72780105f239d1d66512629a9f468b7c9f2609b8567fc68963ac638ef9"}, - {file = "ckzg-1.0.2-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:0d7600ce7a73ac41d348712d0c1fe5e4cb6caa329377064cfa3a6fd8fbffb410"}, - {file = "ckzg-1.0.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:19893ee7bd7da8688382cb134cb9ee7bce5c38e3a9386e3ed99bb010487d2d17"}, - {file = "ckzg-1.0.2-cp312-cp312-win_amd64.whl", hash = "sha256:c3e1a9a72695e777497e95bb2213316a1138f82d1bb5d67b9c029a522d24908e"}, - {file = "ckzg-1.0.2-pp310-pypy310_pp73-macosx_10_9_x86_64.whl", hash = "sha256:f37be0054ebb4b8ac6e6d5267290b239b09e7ddc611776051b4c3c4032d161ba"}, - {file = "ckzg-1.0.2-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:611c03a170f0f746180eeb0cc28cdc6f954561b8eb9013605a046de86520ee6b"}, - {file = "ckzg-1.0.2-pp310-pypy310_pp73-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:75b2f0ab341f3c33702ce64e1c101116c7462a25686d0b1a0193ca654ad4f96e"}, - {file = "ckzg-1.0.2-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ab29fc61fbd32096b82b02e6b18ae0d7423048d3540b7b90805b16ae10bdb769"}, - {file = "ckzg-1.0.2-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e43741e7453262aa3ba1754623d7864250b33751bd850dd548e3ed6bd1911093"}, - {file = "ckzg-1.0.2-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:155eacc237cb28c9eafda1c47a89e6e4550f1c2e711f2eee21e0bb2f4df75546"}, - {file = "ckzg-1.0.2-pp37-pypy37_pp73-macosx_10_9_x86_64.whl", hash = "sha256:d31d7fbe396a51f43375e38c31bc3a96c7996882582f95f3fcfd54acfa7b3ce6"}, - {file = "ckzg-1.0.2-pp37-pypy37_pp73-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9d3d049186c9966e9140de39a9979d7adcfe22f8b02d2852c94d3c363235cc18"}, - {file = "ckzg-1.0.2-pp37-pypy37_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:88728fbd410d61bd5d655ac50b842714c38bc34ff717f73592132d28911fc88e"}, - {file = "ckzg-1.0.2-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:052d302058d72431acc9dd4a9c76854c8dfce10c698deef5252884e32a1ac7bf"}, - {file = "ckzg-1.0.2-pp37-pypy37_pp73-win_amd64.whl", hash = "sha256:633110a9431231664be2ad32baf10971547f18289d33967654581b9ae9c94a7e"}, - {file = "ckzg-1.0.2-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:f439c9e5297ae29a700f6d55de1525e2e295dbbb7366f0974c8702fca9e536b9"}, - {file = "ckzg-1.0.2-pp38-pypy38_pp73-macosx_11_0_arm64.whl", hash = "sha256:94f7eb080c00c0ccbd4fafad69f0b35b624a6a229a28e11d365b60b58a072832"}, - {file = "ckzg-1.0.2-pp38-pypy38_pp73-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f876783ec654b7b9525503c2a0a1b086e5d4f52ff65cac7e8747769b0c2e5468"}, - {file = "ckzg-1.0.2-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c7e039800e50592580171830e788ef4a1d6bb54300d074ae9f9119e92aefc568"}, - {file = "ckzg-1.0.2-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:13a8cccf0070a29bc01493179db2e61220ee1a6cb17f8ea41c68a2f043ace87f"}, - {file = "ckzg-1.0.2-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:4f86cef801d7b0838e17b6ee2f2c9e747447d91ad1220a701baccdf7ef11a3c8"}, - {file = "ckzg-1.0.2-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:2433a89af4158beddebbdd66fae95b34d40f2467bee8dc40df0333de5e616b5f"}, - {file = "ckzg-1.0.2-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:c49d5dc0918ad912777720035f9820bdbb6c7e7d1898e12506d44ab3c938d525"}, - {file = "ckzg-1.0.2-pp39-pypy39_pp73-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:331d49bc72430a3f85ea6ecb55a0d0d65f66a21d61af5783b465906a741366d5"}, - {file = "ckzg-1.0.2-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5e86627bc33bc63b8de869d7d5bfa9868619a4f3e4e7082103935c52f56c66b5"}, - {file = "ckzg-1.0.2-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ab6a2ba2706b5eaa1ce6bc7c4e72970bf9587e2e0e482e5fb4df1996bccb7a40"}, - {file = "ckzg-1.0.2-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:8bca5e7c38d913fabc24ad09545f78ba23cfc13e1ac8250644231729ca908549"}, - {file = "ckzg-1.0.2.tar.gz", hash = "sha256:4295acc380f8d42ebea4a4a0a68c424a322bb335a33bad05c72ead8cbb28d118"}, + {file = "ckzg-2.0.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:5fb8a7ed9f430e1102f7d25df015e555c255c512c372373bd1b52fa65b2c32b2"}, + {file = "ckzg-2.0.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:a93ef601f87960f881b6a2519d6689ee829cc35e0847ed3dff38c6afff383b41"}, + {file = "ckzg-2.0.0-cp312-cp312-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0d0ca9e939b7b0dfd5a91cd981a595512000f42739b6262824c886b3a06960fe"}, + {file = "ckzg-2.0.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:187a0fc230f3993fa8cb2c17d589f8b3ea6b74e1f5ac9927d4f37c19e153afd1"}, + {file = "ckzg-2.0.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:68a3c4aec3ffef2a20f67f6d4a13e9980560aa25d89bbc553aff1e4144f3239a"}, + {file = "ckzg-2.0.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:cb48fd7d110fda65a5b9f34f921d15d468354662752d252a0de02797e9510c50"}, + {file = "ckzg-2.0.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:de94dd1615e6aa003a6c864d5c8e8771d98ef912e32f12c555e7703134e77717"}, + {file = "ckzg-2.0.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:805d3a11bf6c50badaf02464340dcfb52363b1889b7f75b04a7179959285bac7"}, + {file = "ckzg-2.0.0-cp312-cp312-win_amd64.whl", hash = "sha256:ea02a706d42e9c273554192949439742267b0031054d859c5c63db064b768a79"}, + {file = "ckzg-2.0.0.tar.gz", hash = "sha256:cd115a39cbc301b8465f6e19191cbb375b3589f3458cc995122595649a6f193f"}, ] [[package]] @@ -430,49 +387,47 @@ files = [ [[package]] name = "coverage" -version = "7.5.4" +version = "7.6.1" requires_python = ">=3.8" summary = "Code coverage measurement for Python" groups = ["test"] files = [ - {file = "coverage-7.5.4-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:54317c2b806354cbb2dc7ac27e2b93f97096912cc16b18289c5d4e44fc663233"}, - {file = "coverage-7.5.4-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:042183de01f8b6d531e10c197f7f0315a61e8d805ab29c5f7b51a01d62782747"}, - {file = "coverage-7.5.4-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a6bb74ed465d5fb204b2ec41d79bcd28afccf817de721e8a807d5141c3426638"}, - {file = "coverage-7.5.4-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b3d45ff86efb129c599a3b287ae2e44c1e281ae0f9a9bad0edc202179bcc3a2e"}, - {file = "coverage-7.5.4-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5013ed890dc917cef2c9f765c4c6a8ae9df983cd60dbb635df8ed9f4ebc9f555"}, - {file = "coverage-7.5.4-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:1014fbf665fef86cdfd6cb5b7371496ce35e4d2a00cda501cf9f5b9e6fced69f"}, - {file = "coverage-7.5.4-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:3684bc2ff328f935981847082ba4fdc950d58906a40eafa93510d1b54c08a66c"}, - {file = "coverage-7.5.4-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:581ea96f92bf71a5ec0974001f900db495488434a6928a2ca7f01eee20c23805"}, - {file = "coverage-7.5.4-cp312-cp312-win32.whl", hash = "sha256:73ca8fbc5bc622e54627314c1a6f1dfdd8db69788f3443e752c215f29fa87a0b"}, - {file = "coverage-7.5.4-cp312-cp312-win_amd64.whl", hash = "sha256:cef4649ec906ea7ea5e9e796e68b987f83fa9a718514fe147f538cfeda76d7a7"}, - {file = "coverage-7.5.4-pp38.pp39.pp310-none-any.whl", hash = "sha256:79b356f3dd5b26f3ad23b35c75dbdaf1f9e2450b6bcefc6d0825ea0aa3f86ca5"}, - {file = "coverage-7.5.4.tar.gz", hash = "sha256:a44963520b069e12789d0faea4e9fdb1e410cdc4aab89d94f7f55cbb7fef0353"}, + {file = "coverage-7.6.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:95cae0efeb032af8458fc27d191f85d1717b1d4e49f7cb226cf526ff28179778"}, + {file = "coverage-7.6.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:5621a9175cf9d0b0c84c2ef2b12e9f5f5071357c4d2ea6ca1cf01814f45d2391"}, + {file = "coverage-7.6.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:260933720fdcd75340e7dbe9060655aff3af1f0c5d20f46b57f262ab6c86a5e8"}, + {file = "coverage-7.6.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:07e2ca0ad381b91350c0ed49d52699b625aab2b44b65e1b4e02fa9df0e92ad2d"}, + {file = "coverage-7.6.1-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c44fee9975f04b33331cb8eb272827111efc8930cfd582e0320613263ca849ca"}, + {file = "coverage-7.6.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:877abb17e6339d96bf08e7a622d05095e72b71f8afd8a9fefc82cf30ed944163"}, + {file = "coverage-7.6.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:3e0cadcf6733c09154b461f1ca72d5416635e5e4ec4e536192180d34ec160f8a"}, + {file = "coverage-7.6.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:c3c02d12f837d9683e5ab2f3d9844dc57655b92c74e286c262e0fc54213c216d"}, + {file = "coverage-7.6.1-cp312-cp312-win32.whl", hash = "sha256:e05882b70b87a18d937ca6768ff33cc3f72847cbc4de4491c8e73880766718e5"}, + {file = "coverage-7.6.1-cp312-cp312-win_amd64.whl", hash = "sha256:b5d7b556859dd85f3a541db6a4e0167b86e7273e1cdc973e5b175166bb634fdb"}, + {file = "coverage-7.6.1.tar.gz", hash = "sha256:953510dfb7b12ab69d20135a0662397f077c59b1e6379a768e97c59d852ee51d"}, ] [[package]] name = "coverage" -version = "7.5.4" +version = "7.6.1" extras = ["toml"] requires_python = ">=3.8" summary = "Code coverage measurement for Python" groups = ["test"] dependencies = [ - "coverage==7.5.4", + "coverage==7.6.1", "tomli; python_full_version <= \"3.11.0a6\"", ] files = [ - {file = "coverage-7.5.4-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:54317c2b806354cbb2dc7ac27e2b93f97096912cc16b18289c5d4e44fc663233"}, - {file = "coverage-7.5.4-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:042183de01f8b6d531e10c197f7f0315a61e8d805ab29c5f7b51a01d62782747"}, - {file = "coverage-7.5.4-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a6bb74ed465d5fb204b2ec41d79bcd28afccf817de721e8a807d5141c3426638"}, - {file = "coverage-7.5.4-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b3d45ff86efb129c599a3b287ae2e44c1e281ae0f9a9bad0edc202179bcc3a2e"}, - {file = "coverage-7.5.4-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5013ed890dc917cef2c9f765c4c6a8ae9df983cd60dbb635df8ed9f4ebc9f555"}, - {file = "coverage-7.5.4-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:1014fbf665fef86cdfd6cb5b7371496ce35e4d2a00cda501cf9f5b9e6fced69f"}, - {file = "coverage-7.5.4-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:3684bc2ff328f935981847082ba4fdc950d58906a40eafa93510d1b54c08a66c"}, - {file = "coverage-7.5.4-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:581ea96f92bf71a5ec0974001f900db495488434a6928a2ca7f01eee20c23805"}, - {file = "coverage-7.5.4-cp312-cp312-win32.whl", hash = "sha256:73ca8fbc5bc622e54627314c1a6f1dfdd8db69788f3443e752c215f29fa87a0b"}, - {file = "coverage-7.5.4-cp312-cp312-win_amd64.whl", hash = "sha256:cef4649ec906ea7ea5e9e796e68b987f83fa9a718514fe147f538cfeda76d7a7"}, - {file = "coverage-7.5.4-pp38.pp39.pp310-none-any.whl", hash = "sha256:79b356f3dd5b26f3ad23b35c75dbdaf1f9e2450b6bcefc6d0825ea0aa3f86ca5"}, - {file = "coverage-7.5.4.tar.gz", hash = "sha256:a44963520b069e12789d0faea4e9fdb1e410cdc4aab89d94f7f55cbb7fef0353"}, + {file = "coverage-7.6.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:95cae0efeb032af8458fc27d191f85d1717b1d4e49f7cb226cf526ff28179778"}, + {file = "coverage-7.6.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:5621a9175cf9d0b0c84c2ef2b12e9f5f5071357c4d2ea6ca1cf01814f45d2391"}, + {file = "coverage-7.6.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:260933720fdcd75340e7dbe9060655aff3af1f0c5d20f46b57f262ab6c86a5e8"}, + {file = "coverage-7.6.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:07e2ca0ad381b91350c0ed49d52699b625aab2b44b65e1b4e02fa9df0e92ad2d"}, + {file = "coverage-7.6.1-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c44fee9975f04b33331cb8eb272827111efc8930cfd582e0320613263ca849ca"}, + {file = "coverage-7.6.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:877abb17e6339d96bf08e7a622d05095e72b71f8afd8a9fefc82cf30ed944163"}, + {file = "coverage-7.6.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:3e0cadcf6733c09154b461f1ca72d5416635e5e4ec4e536192180d34ec160f8a"}, + {file = "coverage-7.6.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:c3c02d12f837d9683e5ab2f3d9844dc57655b92c74e286c262e0fc54213c216d"}, + {file = "coverage-7.6.1-cp312-cp312-win32.whl", hash = "sha256:e05882b70b87a18d937ca6768ff33cc3f72847cbc4de4491c8e73880766718e5"}, + {file = "coverage-7.6.1-cp312-cp312-win_amd64.whl", hash = "sha256:b5d7b556859dd85f3a541db6a4e0167b86e7273e1cdc973e5b175166bb634fdb"}, + {file = "coverage-7.6.1.tar.gz", hash = "sha256:953510dfb7b12ab69d20135a0662397f077c59b1e6379a768e97c59d852ee51d"}, ] [[package]] @@ -528,26 +483,6 @@ files = [ {file = "cytoolz-0.12.3-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:71b6eb97f6695f7ba8ce69c49b707a351c5f46fd97f5aeb5f6f2fb0d6e72b887"}, {file = "cytoolz-0.12.3-cp312-cp312-win32.whl", hash = "sha256:cee3de65584e915053412cd178729ff510ad5f8f585c21c5890e91028283518f"}, {file = "cytoolz-0.12.3-cp312-cp312-win_amd64.whl", hash = "sha256:9eef0d23035fa4dcfa21e570961e86c375153a7ee605cdd11a8b088c24f707f6"}, - {file = "cytoolz-0.12.3-pp310-pypy310_pp73-macosx_10_9_x86_64.whl", hash = "sha256:55f9bd1ae6c2a27eda5abe2a0b65a83029d2385c5a1da7b8ef47af5905d7e905"}, - {file = "cytoolz-0.12.3-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d2d271393c378282727f1231d40391ae93b93ddc0997448acc21dd0cb6a1e56d"}, - {file = "cytoolz-0.12.3-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ee98968d6a66ee83a8ceabf31182189ab5d8598998c8ce69b6d5843daeb2db60"}, - {file = "cytoolz-0.12.3-pp310-pypy310_pp73-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:01cfb8518828c1189200c02a5010ea404407fb18fd5589e29c126e84bbeadd36"}, - {file = "cytoolz-0.12.3-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:456395d7aec01db32bf9e6db191d667347c78d8d48e77234521fa1078f60dabb"}, - {file = "cytoolz-0.12.3-pp37-pypy37_pp73-macosx_10_9_x86_64.whl", hash = "sha256:cd88028bb897fba99ddd84f253ca6bef73ecb7bdf3f3cf25bc493f8f97d3c7c5"}, - {file = "cytoolz-0.12.3-pp37-pypy37_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:59b19223e7f7bd7a73ec3aa6fdfb73b579ff09c2bc0b7d26857eec2d01a58c76"}, - {file = "cytoolz-0.12.3-pp37-pypy37_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0a79d72b08048a0980a59457c239555f111ac0c8bdc140c91a025f124104dbb4"}, - {file = "cytoolz-0.12.3-pp37-pypy37_pp73-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1dd70141b32b717696a72b8876e86bc9c6f8eff995c1808e299db3541213ff82"}, - {file = "cytoolz-0.12.3-pp37-pypy37_pp73-win_amd64.whl", hash = "sha256:a1445c91009eb775d479e88954c51d0b4cf9a1e8ce3c503c2672d17252882647"}, - {file = "cytoolz-0.12.3-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:ca6a9a9300d5bda417d9090107c6d2b007683efc59d63cc09aca0e7930a08a85"}, - {file = "cytoolz-0.12.3-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:be6feb903d2a08a4ba2e70e950e862fd3be9be9a588b7c38cee4728150a52918"}, - {file = "cytoolz-0.12.3-pp38-pypy38_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:92b6f43f086e5a965d33d62a145ae121b4ccb6e0789ac0acc895ce084fec8c65"}, - {file = "cytoolz-0.12.3-pp38-pypy38_pp73-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:534fa66db8564d9b13872d81d54b6b09ae592c585eb826aac235bd6f1830f8ad"}, - {file = "cytoolz-0.12.3-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:fea649f979def23150680de1bd1d09682da3b54932800a0f90f29fc2a6c98ba8"}, - {file = "cytoolz-0.12.3-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:a447247ed312dd64e3a8d9483841ecc5338ee26d6e6fbd29cd373ed030db0240"}, - {file = "cytoolz-0.12.3-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ba3f843aa89f35467b38c398ae5b980a824fdbdb94065adc6ec7c47a0a22f4c7"}, - {file = "cytoolz-0.12.3-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:582c22f97a380211fb36a7b65b1beeb84ea11d82015fa84b054be78580390082"}, - {file = "cytoolz-0.12.3-pp39-pypy39_pp73-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:47feb089506fc66e1593cd9ade3945693a9d089a445fbe9a11385cab200b9f22"}, - {file = "cytoolz-0.12.3-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:ba9002d2f043943744a9dc8e50a47362bcb6e6f360dc0a1abcb19642584d87bb"}, {file = "cytoolz-0.12.3.tar.gz", hash = "sha256:4503dc59f4ced53a54643272c61dc305d1dbbfbd7d6bdf296948de9f34c3a282"}, ] @@ -583,6 +518,7 @@ version = "2.6.1" requires_python = ">=3.8" summary = "DNS toolkit" groups = ["default"] +marker = "python_version ~= \"3.11\"" files = [ {file = "dnspython-2.6.1-py3-none-any.whl", hash = "sha256:5ef3b9680161f6fa89daf8ad451b5f1a33b18ae8a1c6778cdf4b43f08c0a6e50"}, {file = "dnspython-2.6.1.tar.gz", hash = "sha256:e8f0f9c23a7b7cb99ded64e6c3a6f3e701d78f50c55e002b839dea7225cff7cc"}, @@ -635,6 +571,7 @@ version = "2.2.0" requires_python = ">=3.8" summary = "A robust email address syntax and deliverability validation library." groups = ["default"] +marker = "python_version ~= \"3.11\"" dependencies = [ "dnspython>=2.0.0", "idna>=2.0.0", @@ -662,24 +599,25 @@ files = [ [[package]] name = "eth-account" -version = "0.11.2" +version = "0.13.3" requires_python = "<4,>=3.8" summary = "eth-account: Sign Ethereum transactions and messages with local private keys" groups = ["default"] dependencies = [ "bitarray>=2.4.0", - "ckzg>=0.4.3", + "ckzg>=2.0.0", "eth-abi>=4.0.0-b.2", - "eth-keyfile>=0.6.0", + "eth-keyfile>=0.7.0", "eth-keys>=0.4.0", - "eth-rlp>=0.3.0", + "eth-rlp>=2.1.0", "eth-utils>=2.0.0", - "hexbytes<0.4.0,>=0.1.0", + "hexbytes>=1.2.0", + "pydantic>=2.0.0", "rlp>=1.0.0", ] files = [ - {file = "eth-account-0.11.2.tar.gz", hash = "sha256:b43daf2c0ae43f2a24ba754d66889f043fae4d3511559cb26eb0122bae9afbbd"}, - {file = "eth_account-0.11.2-py3-none-any.whl", hash = "sha256:95157c262a9823c1e08be826d4bc304bf32f0c32e80afb38c126a325a64f651a"}, + {file = "eth_account-0.13.3-py3-none-any.whl", hash = "sha256:c8f3dae3403b8647f386fcc081fb8c2a0970991cf3e00af7e7ebd73f95d6a319"}, + {file = "eth_account-0.13.3.tar.gz", hash = "sha256:03d6af5d314e64b3dd53283e15b24736c5caa24542e5edac0455d6ff87d8b1e0"}, ] [[package]] @@ -742,24 +680,24 @@ files = [ [[package]] name = "eth-rlp" -version = "1.0.1" +version = "2.1.0" requires_python = ">=3.8, <4" summary = "eth-rlp: RLP definitions for common Ethereum objects in Python" groups = ["default"] dependencies = [ "eth-utils>=2.0.0", - "hexbytes<1,>=0.1.0", + "hexbytes>=1.2.0", "rlp>=0.6.0", - "typing-extensions>=4.0.1; python_version <= \"3.11\"", + "typing-extensions>=4.0.1; python_version <= \"3.10\"", ] files = [ - {file = "eth-rlp-1.0.1.tar.gz", hash = "sha256:d61dbda892ee1220f28fb3663c08f6383c305db9f1f5624dc585c9cd05115027"}, - {file = "eth_rlp-1.0.1-py3-none-any.whl", hash = "sha256:dd76515d71654277377d48876b88e839d61553aaf56952e580bb7cebef2b1517"}, + {file = "eth-rlp-2.1.0.tar.gz", hash = "sha256:d5b408a8cd20ed496e8e66d0559560d29bc21cee482f893936a1f05d0dddc4a0"}, + {file = "eth_rlp-2.1.0-py3-none-any.whl", hash = "sha256:6f476eb7e37d81feaba5d98aed887e467be92648778c44b19fe594aea209cde1"}, ] [[package]] name = "eth-typing" -version = "4.3.1" +version = "5.0.0" requires_python = "<4,>=3.8" summary = "eth-typing: Common type annotations for ethereum python packages" groups = ["default"] @@ -767,25 +705,26 @@ dependencies = [ "typing-extensions>=4.5.0", ] files = [ - {file = "eth_typing-4.3.1-py3-none-any.whl", hash = "sha256:b4d7cee912c7779da75da4b42fa61475c1089d35a4df5081a786eaa29d5f6865"}, - {file = "eth_typing-4.3.1.tar.gz", hash = "sha256:4504559c87a9f71f4b99aa5a1e0549adaa7f192cbf8e37a295acfcddb1b5412d"}, + {file = "eth_typing-5.0.0-py3-none-any.whl", hash = "sha256:c7ebc8595e7b65175bb4b4176c2b548ab21b13329f2058e84d4f8c289ba9f577"}, + {file = "eth_typing-5.0.0.tar.gz", hash = "sha256:87ce7cee75665c09d2dcff8de1b496609d5e32fcd2e2b1d8fc0370c29eedcdc0"}, ] [[package]] name = "eth-utils" -version = "4.1.1" +version = "5.0.0" requires_python = "<4,>=3.8" summary = "eth-utils: Common utility functions for python code that interacts with Ethereum" groups = ["default"] dependencies = [ "cytoolz>=0.10.1; implementation_name == \"cpython\"", "eth-hash>=0.3.1", - "eth-typing>=3.0.0", + "eth-typing>=5.0.0", + "hexbytes>=1.0.0", "toolz>0.8.2; implementation_name == \"pypy\"", ] files = [ - {file = "eth_utils-4.1.1-py3-none-any.whl", hash = "sha256:ccbbac68a6d65cb6e294c5bcb6c6a5cec79a241c56dc5d9c345ed788c30f8534"}, - {file = "eth_utils-4.1.1.tar.gz", hash = "sha256:71c8d10dec7494aeed20fa7a4d52ec2ce4a2e52fdce80aab4f5c3c19f3648b25"}, + {file = "eth_utils-5.0.0-py3-none-any.whl", hash = "sha256:99c44eca11db74dbb881a1d70b24cd80436fc62fe527d2f5c3e3cf7932aba7b2"}, + {file = "eth_utils-5.0.0.tar.gz", hash = "sha256:a5eb9555f43f4579eb83cb84f9dda9f3d6663bbd4a5a6b693f8d35045f305a1f"}, ] [[package]] @@ -837,24 +776,24 @@ files = [ [[package]] name = "hexbytes" -version = "0.3.1" -requires_python = ">=3.7, <4" +version = "1.2.1" +requires_python = "<4,>=3.8" summary = "hexbytes: Python `bytes` subclass that decodes hex, with a readable console output" groups = ["default"] files = [ - {file = "hexbytes-0.3.1-py3-none-any.whl", hash = "sha256:383595ad75026cf00abd570f44b368c6cdac0c6becfae5c39ff88829877f8a59"}, - {file = "hexbytes-0.3.1.tar.gz", hash = "sha256:a3fe35c6831ee8fafd048c4c086b986075fc14fd46258fa24ecb8d65745f9a9d"}, + {file = "hexbytes-1.2.1-py3-none-any.whl", hash = "sha256:e64890b203a31f4a23ef11470ecfcca565beaee9198df623047df322b757471a"}, + {file = "hexbytes-1.2.1.tar.gz", hash = "sha256:515f00dddf31053db4d0d7636dd16061c1d896c3109b8e751005db4ca46bcca7"}, ] [[package]] name = "idna" -version = "3.7" -requires_python = ">=3.5" +version = "3.8" +requires_python = ">=3.6" summary = "Internationalized Domain Names in Applications (IDNA)" groups = ["default", "docs", "test"] files = [ - {file = "idna-3.7-py3-none-any.whl", hash = "sha256:82fee1fc78add43492d3a1898bfa6d8a904cc97d8427f683ed8e798d07761aa0"}, - {file = "idna-3.7.tar.gz", hash = "sha256:028ff3aadf0609c1fd278d8ea3089299412a7a8b9bd005dd08b9f8285bcb5cfc"}, + {file = "idna-3.8-py3-none-any.whl", hash = "sha256:050b4e5baadcd44d760cedbd2b8e639f2ff89bbc7a5730fcc662954303377aac"}, + {file = "idna-3.8.tar.gz", hash = "sha256:d838c2c0ed6fced7693d5e8ab8e734d5f8fda53a039c0164afb0b82e771e3603"}, ] [[package]] @@ -926,49 +865,15 @@ files = [ {file = "jinja2-3.1.4.tar.gz", hash = "sha256:4a3aee7acbbe7303aede8e9648d13b8bf88a429282aa6122a993f0ac800cb369"}, ] -[[package]] -name = "jsonschema" -version = "4.23.0" -requires_python = ">=3.8" -summary = "An implementation of JSON Schema validation for Python" -groups = ["default"] -dependencies = [ - "attrs>=22.2.0", - "importlib-resources>=1.4.0; python_version < \"3.9\"", - "jsonschema-specifications>=2023.03.6", - "pkgutil-resolve-name>=1.3.10; python_version < \"3.9\"", - "referencing>=0.28.4", - "rpds-py>=0.7.1", -] -files = [ - {file = "jsonschema-4.23.0-py3-none-any.whl", hash = "sha256:fbadb6f8b144a8f8cf9f0b89ba94501d143e50411a1278633f56a7acf7fd5566"}, - {file = "jsonschema-4.23.0.tar.gz", hash = "sha256:d71497fef26351a33265337fa77ffeb82423f3ea21283cd9467bb03999266bc4"}, -] - -[[package]] -name = "jsonschema-specifications" -version = "2023.12.1" -requires_python = ">=3.8" -summary = "The JSON Schema meta-schemas and vocabularies, exposed as a Registry" -groups = ["default"] -dependencies = [ - "importlib-resources>=1.4.0; python_version < \"3.9\"", - "referencing>=0.31.0", -] -files = [ - {file = "jsonschema_specifications-2023.12.1-py3-none-any.whl", hash = "sha256:87e4fdf3a94858b8a2ba2778d9ba57d8a9cafca7c7489c46ba0d30a8bc6a9c3c"}, - {file = "jsonschema_specifications-2023.12.1.tar.gz", hash = "sha256:48a76787b3e70f5ed53f1160d2b81f586e4ca6d1548c5de7085d1682674764cc"}, -] - [[package]] name = "lark" -version = "1.1.9" -requires_python = ">=3.6" +version = "1.2.2" +requires_python = ">=3.8" summary = "a modern parsing library" groups = ["default"] files = [ - {file = "lark-1.1.9-py3-none-any.whl", hash = "sha256:a0dd3a87289f8ccbb325901e4222e723e7d745dbfc1803eaf5f3d2ace19cf2db"}, - {file = "lark-1.1.9.tar.gz", hash = "sha256:15fa5236490824c2c4aba0e22d2d6d823575dcaf4cdd1848e34b6ad836240fba"}, + {file = "lark-1.2.2-py3-none-any.whl", hash = "sha256:c2276486b02f0f1b90be155f2c8ba4a8e194d42775786db622faccd652d8e80c"}, + {file = "lark-1.2.2.tar.gz", hash = "sha256:ca807d0162cd16cef15a8feecb862d7319e7a09bdb13aef927968e45040fed80"}, ] [[package]] @@ -992,21 +897,6 @@ files = [ {file = "lru_dict-1.3.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:0e88dba16695f17f41701269fa046197a3fd7b34a8dba744c8749303ddaa18df"}, {file = "lru_dict-1.3.0-cp312-cp312-win32.whl", hash = "sha256:6ffaf595e625b388babc8e7d79b40f26c7485f61f16efe76764e32dce9ea17fc"}, {file = "lru_dict-1.3.0-cp312-cp312-win_amd64.whl", hash = "sha256:cf9da32ef2582434842ab6ba6e67290debfae72771255a8e8ab16f3e006de0aa"}, - {file = "lru_dict-1.3.0-pp310-pypy310_pp73-macosx_10_9_x86_64.whl", hash = "sha256:f8f7824db5a64581180ab9d09842e6dd9fcdc46aac9cb592a0807cd37ea55680"}, - {file = "lru_dict-1.3.0-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:acd04b7e7b0c0c192d738df9c317093335e7282c64c9d1bb6b7ebb54674b4e24"}, - {file = "lru_dict-1.3.0-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e5c20f236f27551e3f0adbf1a987673fb1e9c38d6d284502cd38f5a3845ef681"}, - {file = "lru_dict-1.3.0-pp310-pypy310_pp73-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ca3703ff03b03a1848c563bc2663d0ad813c1cd42c4d9cf75b623716d4415d9a"}, - {file = "lru_dict-1.3.0-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:a9fb71ba262c6058a0017ce83d343370d0a0dbe2ae62c2eef38241ec13219330"}, - {file = "lru_dict-1.3.0-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:f5b88a7c39e307739a3701194993455968fcffe437d1facab93546b1b8a334c1"}, - {file = "lru_dict-1.3.0-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2682bfca24656fb7a643621520d57b7fe684ed5fa7be008704c1235d38e16a32"}, - {file = "lru_dict-1.3.0-pp38-pypy38_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:96fc87ddf569181827458ec5ad8fa446c4690cffacda66667de780f9fcefd44d"}, - {file = "lru_dict-1.3.0-pp38-pypy38_pp73-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dcec98e2c7da7631f0811730303abc4bdfe70d013f7a11e174a2ccd5612a7c59"}, - {file = "lru_dict-1.3.0-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:6bba2863060caeaedd8386b0c8ee9a7ce4d57a7cb80ceeddf440b4eff2d013ba"}, - {file = "lru_dict-1.3.0-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:3c497fb60279f1e1d7dfbe150b1b069eaa43f7e172dab03f206282f4994676c5"}, - {file = "lru_dict-1.3.0-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8d9509d817a47597988615c1a322580c10100acad10c98dfcf3abb41e0e5877f"}, - {file = "lru_dict-1.3.0-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0213ab4e3d9a8d386c18e485ad7b14b615cb6f05df6ef44fb2a0746c6ea9278b"}, - {file = "lru_dict-1.3.0-pp39-pypy39_pp73-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b50fbd69cd3287196796ab4d50e4cc741eb5b5a01f89d8e930df08da3010c385"}, - {file = "lru_dict-1.3.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:5247d1f011f92666010942434020ddc5a60951fefd5d12a594f0e5d9f43e3b3b"}, ] [[package]] @@ -1045,7 +935,7 @@ files = [ [[package]] name = "marshmallow" -version = "3.21.3" +version = "3.22.0" requires_python = ">=3.8" summary = "A lightweight library for converting complex datatypes to and from native Python datatypes." groups = ["default"] @@ -1053,27 +943,25 @@ dependencies = [ "packaging>=17.0", ] files = [ - {file = "marshmallow-3.21.3-py3-none-any.whl", hash = "sha256:86ce7fb914aa865001a4b2092c4c2872d13bc347f3d42673272cabfdbad386f1"}, - {file = "marshmallow-3.21.3.tar.gz", hash = "sha256:4f57c5e050a54d66361e826f94fba213eb10b67b2fdb02c3e0343ce207ba1662"}, + {file = "marshmallow-3.22.0-py3-none-any.whl", hash = "sha256:71a2dce49ef901c3f97ed296ae5051135fd3febd2bf43afe0ae9a82143a494d9"}, + {file = "marshmallow-3.22.0.tar.gz", hash = "sha256:4972f529104a220bb8637d595aa4c9762afbe7f7a77d82dc58c1615d70c5823e"}, ] [[package]] name = "marshmallow-dataclass" -version = "8.6.1" -requires_python = ">=3.6" +version = "8.7.0" +requires_python = ">=3.8" summary = "Python library to convert dataclasses into marshmallow schemas." groups = ["default"] dependencies = [ - "dataclasses; python_version == \"3.6\"", - "marshmallow<4.0,>=3.13.0", - "types-dataclasses<0.6.4; python_version == \"3.6\"", - "typing-extensions>=3.7.2; python_version < \"3.8\"", - "typing-extensions>=4.2.0; python_version < \"3.11\" and python_version >= \"3.7\"", - "typing-inspect<1.0,>=0.8.0", + "marshmallow>=3.18.0", + "typeguard~=4.0.0", + "typing-extensions>=4.2.0; python_version < \"3.11\"", + "typing-inspect~=0.9.0", ] files = [ - {file = "marshmallow_dataclass-8.6.1-py3-none-any.whl", hash = "sha256:d941137bcb480729a14810e5f10d9bb79a498fb5b57c875aad6be37e97b98a5f"}, - {file = "marshmallow_dataclass-8.6.1.tar.gz", hash = "sha256:dca312c841f73f8f665b4434d23b3204e8cfbf50b8cbb57bb76f41a6ee8184c8"}, + {file = "marshmallow_dataclass-8.7.0-py3-none-any.whl", hash = "sha256:9e528d72b83f2b6b0f60cb29fd38781a6f7ce2155295adb1ed33289826a93c4b"}, + {file = "marshmallow_dataclass-8.7.0.tar.gz", hash = "sha256:0218008fec3fd4b5f739b2a0c6d7593bcc403308f6da953e341e4e359e268849"}, ] [[package]] @@ -1160,7 +1048,7 @@ files = [ [[package]] name = "mypy" -version = "1.11.1" +version = "1.11.2" requires_python = ">=3.8" summary = "Optional static typing for Python" groups = ["lint"] @@ -1170,13 +1058,13 @@ dependencies = [ "typing-extensions>=4.6.0", ] files = [ - {file = "mypy-1.11.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:f39918a50f74dc5969807dcfaecafa804fa7f90c9d60506835036cc1bc891dc8"}, - {file = "mypy-1.11.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:0bc71d1fb27a428139dd78621953effe0d208aed9857cb08d002280b0422003a"}, - {file = "mypy-1.11.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:b868d3bcff720dd7217c383474008ddabaf048fad8d78ed948bb4b624870a417"}, - {file = "mypy-1.11.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:a707ec1527ffcdd1c784d0924bf5cb15cd7f22683b919668a04d2b9c34549d2e"}, - {file = "mypy-1.11.1-cp312-cp312-win_amd64.whl", hash = "sha256:64f4a90e3ea07f590c5bcf9029035cf0efeae5ba8be511a8caada1a4893f5525"}, - {file = "mypy-1.11.1-py3-none-any.whl", hash = "sha256:0624bdb940255d2dd24e829d99a13cfeb72e4e9031f9492148f410ed30bcab54"}, - {file = "mypy-1.11.1.tar.gz", hash = "sha256:f404a0b069709f18bbdb702eb3dcfe51910602995de00bd39cea3050b5772d08"}, + {file = "mypy-1.11.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:e8960dbbbf36906c5c0b7f4fbf2f0c7ffb20f4898e6a879fcf56a41a08b0d318"}, + {file = "mypy-1.11.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:06d26c277962f3fb50e13044674aa10553981ae514288cb7d0a738f495550b36"}, + {file = "mypy-1.11.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:6e7184632d89d677973a14d00ae4d03214c8bc301ceefcdaf5c474866814c987"}, + {file = "mypy-1.11.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:3a66169b92452f72117e2da3a576087025449018afc2d8e9bfe5ffab865709ca"}, + {file = "mypy-1.11.2-cp312-cp312-win_amd64.whl", hash = "sha256:969ea3ef09617aff826885a22ece0ddef69d95852cdad2f60c8bb06bf1f71f70"}, + {file = "mypy-1.11.2-py3-none-any.whl", hash = "sha256:b499bc07dbdcd3de92b0a8b29fdf592c111276f6a12fe29c30f6c417dd546d12"}, + {file = "mypy-1.11.2.tar.gz", hash = "sha256:7f9993ad3e0ffdc95c2a14b66dee63729f021968bff8ad911867579c65d13a79"}, ] [[package]] @@ -1210,13 +1098,13 @@ files = [ [[package]] name = "nvidia-ml-py" -version = "12.555.43" +version = "12.560.30" summary = "Python Bindings for the NVIDIA Management Library" groups = ["perf"] marker = "platform_system != \"Darwin\"" files = [ - {file = "nvidia-ml-py-12.555.43.tar.gz", hash = "sha256:e9e7f12ef1ec234bb0dc22d2bdc762ffafab394bdc472a07a4377c95bbf93afe"}, - {file = "nvidia_ml_py-12.555.43-py3-none-any.whl", hash = "sha256:b89ba66e8ba8032fdbbaa907323f248be0ed001e106f814a1e1137e58eba2a80"}, + {file = "nvidia-ml-py-12.560.30.tar.gz", hash = "sha256:f0254dc7400647680a072ee02509bfd46102b60bdfeca321576d4d4817e7fe97"}, + {file = "nvidia_ml_py-12.560.30-py3-none-any.whl", hash = "sha256:fea371c94d63e38a611c17bbb85fe400e9c8ddb9e8684a9cd0e47786a4bc3c73"}, ] [[package]] @@ -1298,25 +1186,25 @@ files = [ [[package]] name = "poseidon-py" -version = "0.1.4" +version = "0.1.5" requires_python = ">=3.8" summary = "Python implementation of Poseidon hash" groups = ["default"] files = [ - {file = "poseidon_py-0.1.4-cp312-cp312-macosx_12_0_universal2.whl", hash = "sha256:0eef537ac31cbf195a170e696c9f5b6d2d13ce3a0da98f37cf03f0d855ae2be1"}, - {file = "poseidon_py-0.1.4-cp312-cp312-macosx_13_0_universal2.whl", hash = "sha256:446048c88e8ee97c569032171c7cb057b9c739d2985109c55eb234ce66cd2d16"}, - {file = "poseidon_py-0.1.4-cp312-cp312-macosx_14_0_universal2.whl", hash = "sha256:91c259f85cd1b3a5f2d450a481d90bdd4b911d05bd6bcc51fd290d2fd7d66613"}, - {file = "poseidon_py-0.1.4-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b53d6eaecac2c7a37a66f7a55a451fe5abed0e317ebb26022e9937b43a54f8c9"}, - {file = "poseidon_py-0.1.4-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8ac18854c6305d80f0ccb270b261eb278bbc21f055b43737a8b55726d4caee3a"}, - {file = "poseidon_py-0.1.4-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:53edc8142cca4fe56eb2e2451eb77165925e2d7df25186fd5ab5b7d813555381"}, - {file = "poseidon_py-0.1.4-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6880dc2461a2c7336abb7f4774971a053bc04169dbfce5e08feecefad4fd49c2"}, - {file = "poseidon_py-0.1.4-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:514bc3df634a4ca5621bc0ea60e253257f8cc7773e1d3701e5e8f36a35f4d0f3"}, - {file = "poseidon_py-0.1.4-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:76d7cdb3c4a744573627f8170f659236737218245d04a7ead00e74bfb48ab28a"}, - {file = "poseidon_py-0.1.4-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:8fa27f2797abdec641a26abb05fee82194ebb40fd4ded3fa385b8b143bef4d7f"}, - {file = "poseidon_py-0.1.4-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:53a78d0fcd1a7742f1d72ce9a2871790c0ae862dec62b18f58982c0ca07a3e79"}, - {file = "poseidon_py-0.1.4-cp312-cp312-win32.whl", hash = "sha256:1176e308c18ff878cc8763cb0dc03541d56775a75b8128f5da6d55ad041a6bd8"}, - {file = "poseidon_py-0.1.4-cp312-cp312-win_amd64.whl", hash = "sha256:9efac87f378ceb64c2b2bad5a2bf8678459ce7ae7947c12326effc4a700fe9e5"}, - {file = "poseidon_py-0.1.4.tar.gz", hash = "sha256:416139d80931c85fd117cbc5b47d2eb1daf25e52acd77b6dd889c4820592a479"}, + {file = "poseidon_py-0.1.5-cp312-cp312-macosx_12_0_universal2.whl", hash = "sha256:4f2d28448e5b0bea2622ca23667de8a27b1a6056068181c5a41e78be65f0d980"}, + {file = "poseidon_py-0.1.5-cp312-cp312-macosx_13_0_universal2.whl", hash = "sha256:311291b8caa39f7ebeb3c837f239a46e8214d30722f9e01ded62d79c0b061b8a"}, + {file = "poseidon_py-0.1.5-cp312-cp312-macosx_14_0_universal2.whl", hash = "sha256:06c07b1e2ad273d50b243121b1e94b28bf893bd4942fe1808f98d33cd2de2790"}, + {file = "poseidon_py-0.1.5-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:55501c7c881eaf406568db70a16c56c5f92c325eda65f9f741e75ec5cced7ad6"}, + {file = "poseidon_py-0.1.5-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:cbd1ed7d8567e057cc181542aff18dc0b11cf2c67593243830cc2fedb73b112f"}, + {file = "poseidon_py-0.1.5-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:19ae4ddd2b1d05d1ed2e3b38e58c3ac5940a46001b080d5112ce346be4d09681"}, + {file = "poseidon_py-0.1.5-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a1ba4956c6b886457d43e43a4465aed18f9bd1f23e7bd2c5388c9a74ae8f0842"}, + {file = "poseidon_py-0.1.5-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:3e4e76745181fb50c77813b1c7c845f92f2d3b4620bf784f43e01be5713e1c14"}, + {file = "poseidon_py-0.1.5-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:28c0f134533b827eb0bc883347ec38dca2a5559fb277aeeb4ac9cce7f6d30a16"}, + {file = "poseidon_py-0.1.5-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:7bc3995f771b7d98f05b200c2c19c1ae6238bb9f625733042a6df9633219e762"}, + {file = "poseidon_py-0.1.5-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:f09faa440ff5f10099a3e667bb5f8781d27a7bc719f45119d08d12574a4d9281"}, + {file = "poseidon_py-0.1.5-cp312-cp312-win32.whl", hash = "sha256:4a7fad5110fc64125ef8fbbdc1d49798159b568e239dbef18ada8fd87eed1fc9"}, + {file = "poseidon_py-0.1.5-cp312-cp312-win_amd64.whl", hash = "sha256:0261221c68c36fd11cfcb91e5074953bfbe7a33031d806d3ab2dc5c7c1e70a2b"}, + {file = "poseidon_py-0.1.5.tar.gz", hash = "sha256:acfa0f79176505226dc79c27e1a6a55e1184753920463826101a2f1c2dd2fbf6"}, ] [[package]] @@ -1330,22 +1218,6 @@ files = [ {file = "prometheus_client-0.20.0.tar.gz", hash = "sha256:287629d00b147a32dcb2be0b9df905da599b2d82f80377083ec8463309a4bb89"}, ] -[[package]] -name = "protobuf" -version = "5.27.2" -requires_python = ">=3.8" -summary = "" -groups = ["default"] -files = [ - {file = "protobuf-5.27.2-cp310-abi3-win32.whl", hash = "sha256:354d84fac2b0d76062e9b3221f4abbbacdfd2a4d8af36bab0474f3a0bb30ab38"}, - {file = "protobuf-5.27.2-cp310-abi3-win_amd64.whl", hash = "sha256:0e341109c609749d501986b835f667c6e1e24531096cff9d34ae411595e26505"}, - {file = "protobuf-5.27.2-cp38-abi3-macosx_10_9_universal2.whl", hash = "sha256:a109916aaac42bff84702fb5187f3edadbc7c97fc2c99c5ff81dd15dcce0d1e5"}, - {file = "protobuf-5.27.2-cp38-abi3-manylinux2014_aarch64.whl", hash = "sha256:176c12b1f1c880bf7a76d9f7c75822b6a2bc3db2d28baa4d300e8ce4cde7409b"}, - {file = "protobuf-5.27.2-cp38-abi3-manylinux2014_x86_64.whl", hash = "sha256:b848dbe1d57ed7c191dfc4ea64b8b004a3f9ece4bf4d0d80a367b76df20bf36e"}, - {file = "protobuf-5.27.2-py3-none-any.whl", hash = "sha256:54330f07e4949d09614707c48b06d1a22f8ffb5763c159efd5c0928326a91470"}, - {file = "protobuf-5.27.2.tar.gz", hash = "sha256:f3ecdef226b9af856075f28227ff2c90ce3a594d092c39bee5513573f25e2714"}, -] - [[package]] name = "psutil" version = "6.0.0" @@ -1385,24 +1257,24 @@ files = [ [[package]] name = "pydantic" -version = "2.8.2" +version = "2.9.1" requires_python = ">=3.8" summary = "Data validation using Python type hints" groups = ["default"] dependencies = [ - "annotated-types>=0.4.0", - "pydantic-core==2.20.1", + "annotated-types>=0.6.0", + "pydantic-core==2.23.3", "typing-extensions>=4.12.2; python_version >= \"3.13\"", "typing-extensions>=4.6.1; python_version < \"3.13\"", ] files = [ - {file = "pydantic-2.8.2-py3-none-any.whl", hash = "sha256:73ee9fddd406dc318b885c7a2eab8a6472b68b8fb5ba8150949fc3db939f23c8"}, - {file = "pydantic-2.8.2.tar.gz", hash = "sha256:6f62c13d067b0755ad1c21a34bdd06c0c12625a22b0fc09c6b149816604f7c2a"}, + {file = "pydantic-2.9.1-py3-none-any.whl", hash = "sha256:7aff4db5fdf3cf573d4b3c30926a510a10e19a0774d38fc4967f78beb6deb612"}, + {file = "pydantic-2.9.1.tar.gz", hash = "sha256:1363c7d975c7036df0db2b4a61f2e062fbc0aa5ab5f2772e0ffc7191a4f4bce2"}, ] [[package]] name = "pydantic-core" -version = "2.20.1" +version = "2.23.3" requires_python = ">=3.8" summary = "Core functionality for Pydantic validation and serialization" groups = ["default"] @@ -1410,40 +1282,24 @@ dependencies = [ "typing-extensions!=4.7.0,>=4.6.0", ] files = [ - {file = "pydantic_core-2.20.1-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:595ba5be69b35777474fa07f80fc260ea71255656191adb22a8c53aba4479231"}, - {file = "pydantic_core-2.20.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:a4f55095ad087474999ee28d3398bae183a66be4823f753cd7d67dd0153427c9"}, - {file = "pydantic_core-2.20.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f9aa05d09ecf4c75157197f27cdc9cfaeb7c5f15021c6373932bf3e124af029f"}, - {file = "pydantic_core-2.20.1-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:e97fdf088d4b31ff4ba35db26d9cc472ac7ef4a2ff2badeabf8d727b3377fc52"}, - {file = "pydantic_core-2.20.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:bc633a9fe1eb87e250b5c57d389cf28998e4292336926b0b6cdaee353f89a237"}, - {file = "pydantic_core-2.20.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d573faf8eb7e6b1cbbcb4f5b247c60ca8be39fe2c674495df0eb4318303137fe"}, - {file = "pydantic_core-2.20.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:26dc97754b57d2fd00ac2b24dfa341abffc380b823211994c4efac7f13b9e90e"}, - {file = "pydantic_core-2.20.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:33499e85e739a4b60c9dac710c20a08dc73cb3240c9a0e22325e671b27b70d24"}, - {file = "pydantic_core-2.20.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:bebb4d6715c814597f85297c332297c6ce81e29436125ca59d1159b07f423eb1"}, - {file = "pydantic_core-2.20.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:516d9227919612425c8ef1c9b869bbbee249bc91912c8aaffb66116c0b447ebd"}, - {file = "pydantic_core-2.20.1-cp312-none-win32.whl", hash = "sha256:469f29f9093c9d834432034d33f5fe45699e664f12a13bf38c04967ce233d688"}, - {file = "pydantic_core-2.20.1-cp312-none-win_amd64.whl", hash = "sha256:035ede2e16da7281041f0e626459bcae33ed998cca6a0a007a5ebb73414ac72d"}, - {file = "pydantic_core-2.20.1-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:a45f84b09ac9c3d35dfcf6a27fd0634d30d183205230a0ebe8373a0e8cfa0906"}, - {file = "pydantic_core-2.20.1-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:d02a72df14dfdbaf228424573a07af10637bd490f0901cee872c4f434a735b94"}, - {file = "pydantic_core-2.20.1-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d2b27e6af28f07e2f195552b37d7d66b150adbaa39a6d327766ffd695799780f"}, - {file = "pydantic_core-2.20.1-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:084659fac3c83fd674596612aeff6041a18402f1e1bc19ca39e417d554468482"}, - {file = "pydantic_core-2.20.1-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:242b8feb3c493ab78be289c034a1f659e8826e2233786e36f2893a950a719bb6"}, - {file = "pydantic_core-2.20.1-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:38cf1c40a921d05c5edc61a785c0ddb4bed67827069f535d794ce6bcded919fc"}, - {file = "pydantic_core-2.20.1-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:e0bbdd76ce9aa5d4209d65f2b27fc6e5ef1312ae6c5333c26db3f5ade53a1e99"}, - {file = "pydantic_core-2.20.1-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:254ec27fdb5b1ee60684f91683be95e5133c994cc54e86a0b0963afa25c8f8a6"}, - {file = "pydantic_core-2.20.1-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:407653af5617f0757261ae249d3fba09504d7a71ab36ac057c938572d1bc9331"}, - {file = "pydantic_core-2.20.1-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:c693e916709c2465b02ca0ad7b387c4f8423d1db7b4649c551f27a529181c5ad"}, - {file = "pydantic_core-2.20.1-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5b5ff4911aea936a47d9376fd3ab17e970cc543d1b68921886e7f64bd28308d1"}, - {file = "pydantic_core-2.20.1-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:177f55a886d74f1808763976ac4efd29b7ed15c69f4d838bbd74d9d09cf6fa86"}, - {file = "pydantic_core-2.20.1-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:964faa8a861d2664f0c7ab0c181af0bea66098b1919439815ca8803ef136fc4e"}, - {file = "pydantic_core-2.20.1-pp39-pypy39_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:4dd484681c15e6b9a977c785a345d3e378d72678fd5f1f3c0509608da24f2ac0"}, - {file = "pydantic_core-2.20.1-pp39-pypy39_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:f6d6cff3538391e8486a431569b77921adfcdef14eb18fbf19b7c0a5294d4e6a"}, - {file = "pydantic_core-2.20.1-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:a6d511cc297ff0883bc3708b465ff82d7560193169a8b93260f74ecb0a5e08a7"}, - {file = "pydantic_core-2.20.1.tar.gz", hash = "sha256:26ca695eeee5f9f1aeeb211ffc12f10bcb6f71e2989988fda61dabd65db878d4"}, + {file = "pydantic_core-2.23.3-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:e0ec50663feedf64d21bad0809f5857bac1ce91deded203efc4a84b31b2e4305"}, + {file = "pydantic_core-2.23.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:db6e6afcb95edbe6b357786684b71008499836e91f2a4a1e55b840955b341dbb"}, + {file = "pydantic_core-2.23.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:98ccd69edcf49f0875d86942f4418a4e83eb3047f20eb897bffa62a5d419c8fa"}, + {file = "pydantic_core-2.23.3-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:a678c1ac5c5ec5685af0133262103defb427114e62eafeda12f1357a12140162"}, + {file = "pydantic_core-2.23.3-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:01491d8b4d8db9f3391d93b0df60701e644ff0894352947f31fff3e52bd5c801"}, + {file = "pydantic_core-2.23.3-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:fcf31facf2796a2d3b7fe338fe8640aa0166e4e55b4cb108dbfd1058049bf4cb"}, + {file = "pydantic_core-2.23.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7200fd561fb3be06827340da066df4311d0b6b8eb0c2116a110be5245dceb326"}, + {file = "pydantic_core-2.23.3-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:dc1636770a809dee2bd44dd74b89cc80eb41172bcad8af75dd0bc182c2666d4c"}, + {file = "pydantic_core-2.23.3-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:67a5def279309f2e23014b608c4150b0c2d323bd7bccd27ff07b001c12c2415c"}, + {file = "pydantic_core-2.23.3-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:748bdf985014c6dd3e1e4cc3db90f1c3ecc7246ff5a3cd4ddab20c768b2f1dab"}, + {file = "pydantic_core-2.23.3-cp312-none-win32.whl", hash = "sha256:255ec6dcb899c115f1e2a64bc9ebc24cc0e3ab097775755244f77360d1f3c06c"}, + {file = "pydantic_core-2.23.3-cp312-none-win_amd64.whl", hash = "sha256:40b8441be16c1e940abebed83cd006ddb9e3737a279e339dbd6d31578b802f7b"}, + {file = "pydantic_core-2.23.3.tar.gz", hash = "sha256:3cb0f65d8b4121c1b015c60104a685feb929a29d7cf204387c7f2688c7974690"}, ] [[package]] name = "pydantic" -version = "2.8.2" +version = "2.9.1" extras = ["email"] requires_python = ">=3.8" summary = "Data validation using Python type hints" @@ -1451,11 +1307,11 @@ groups = ["default"] marker = "python_version ~= \"3.11\"" dependencies = [ "email-validator>=2.0.0", - "pydantic==2.8.2", + "pydantic==2.9.1", ] files = [ - {file = "pydantic-2.8.2-py3-none-any.whl", hash = "sha256:73ee9fddd406dc318b885c7a2eab8a6472b68b8fb5ba8150949fc3db939f23c8"}, - {file = "pydantic-2.8.2.tar.gz", hash = "sha256:6f62c13d067b0755ad1c21a34bdd06c0c12625a22b0fc09c6b149816604f7c2a"}, + {file = "pydantic-2.9.1-py3-none-any.whl", hash = "sha256:7aff4db5fdf3cf573d4b3c30926a510a10e19a0774d38fc4967f78beb6deb612"}, + {file = "pydantic-2.9.1.tar.gz", hash = "sha256:1363c7d975c7036df0db2b4a61f2e062fbc0aa5ab5f2772e0ffc7191a4f4bce2"}, ] [[package]] @@ -1509,22 +1365,21 @@ files = [ [[package]] name = "pytest" -version = "7.4.4" -requires_python = ">=3.7" +version = "8.3.2" +requires_python = ">=3.8" summary = "pytest: simple powerful testing with Python" groups = ["test"] dependencies = [ "colorama; sys_platform == \"win32\"", "exceptiongroup>=1.0.0rc8; python_version < \"3.11\"", - "importlib-metadata>=0.12; python_version < \"3.8\"", "iniconfig", "packaging", - "pluggy<2.0,>=0.12", - "tomli>=1.0.0; python_version < \"3.11\"", + "pluggy<2,>=1.5", + "tomli>=1; python_version < \"3.11\"", ] files = [ - {file = "pytest-7.4.4-py3-none-any.whl", hash = "sha256:b090cdf5ed60bf4c45261be03239c2c1c22df034fbffe691abe93cd80cea01d8"}, - {file = "pytest-7.4.4.tar.gz", hash = "sha256:2cf0005922c6ace4a3e2ec8b4080eb0d9753fdc93107415332f50ce9e7994280"}, + {file = "pytest-8.3.2-py3-none-any.whl", hash = "sha256:4ba08f9ae7dcf84ded419494d229b48d0903ea6407b030eaec46df5e6a73bba5"}, + {file = "pytest-8.3.2.tar.gz", hash = "sha256:c132345d12ce551242c87269de812483f5bcc87cdbb4722e48487ba194f9fdce"}, ] [[package]] @@ -1545,16 +1400,16 @@ files = [ [[package]] name = "pytest-asyncio" -version = "0.23.8" +version = "0.24.0" requires_python = ">=3.8" summary = "Pytest support for asyncio" groups = ["test"] dependencies = [ - "pytest<9,>=7.0.0", + "pytest<9,>=8.2", ] files = [ - {file = "pytest_asyncio-0.23.8-py3-none-any.whl", hash = "sha256:50265d892689a5faefb84df80819d1ecef566eb3549cf915dfb33569359d1ce2"}, - {file = "pytest_asyncio-0.23.8.tar.gz", hash = "sha256:759b10b33a6dc61cce40a8bd5205e302978bbbcc00e279a8b61d9a6a3c82e4d3"}, + {file = "pytest_asyncio-0.24.0-py3-none-any.whl", hash = "sha256:a811296ed596b69bf0b6f3dc40f83bcaf341b155a269052d82efa2b25ac7037b"}, + {file = "pytest_asyncio-0.24.0.tar.gz", hash = "sha256:d081d828e576d85f875399194281e92bf8a68d60d72d1a2faf2feddb6c46b276"}, ] [[package]] @@ -1643,59 +1498,46 @@ files = [ [[package]] name = "pyyaml" -version = "6.0.1" -requires_python = ">=3.6" -summary = "YAML parser and emitter for Python" -groups = ["default"] -files = [ - {file = "PyYAML-6.0.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:855fb52b0dc35af121542a76b9a84f8d1cd886ea97c84703eaa6d88e37a2ad28"}, - {file = "PyYAML-6.0.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:40df9b996c2b73138957fe23a16a4f0ba614f4c0efce1e9406a184b6d07fa3a9"}, - {file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a08c6f0fe150303c1c6b71ebcd7213c2858041a7e01975da3a99aed1e7a378ef"}, - {file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6c22bec3fbe2524cde73d7ada88f6566758a8f7227bfbf93a408a9d86bcc12a0"}, - {file = "PyYAML-6.0.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8d4e9c88387b0f5c7d5f281e55304de64cf7f9c0021a3525bd3b1c542da3b0e4"}, - {file = "PyYAML-6.0.1-cp312-cp312-win32.whl", hash = "sha256:d483d2cdf104e7c9fa60c544d92981f12ad66a457afae824d146093b8c294c54"}, - {file = "PyYAML-6.0.1-cp312-cp312-win_amd64.whl", hash = "sha256:0d3304d8c0adc42be59c5f8a4d9e3d7379e6955ad754aa9d6ab7a398b59dd1df"}, - {file = "PyYAML-6.0.1.tar.gz", hash = "sha256:bfdf460b1736c775f2ba9f6a92bca30bc2095067b8a9d77876d1fad6cc3b4a43"}, -] - -[[package]] -name = "referencing" -version = "0.35.1" +version = "6.0.2" requires_python = ">=3.8" -summary = "JSON Referencing + Python" +summary = "YAML parser and emitter for Python" groups = ["default"] -dependencies = [ - "attrs>=22.2.0", - "rpds-py>=0.7.0", -] files = [ - {file = "referencing-0.35.1-py3-none-any.whl", hash = "sha256:eda6d3234d62814d1c64e305c1331c9a3a6132da475ab6382eaa997b21ee75de"}, - {file = "referencing-0.35.1.tar.gz", hash = "sha256:25b42124a6c8b632a425174f24087783efb348a6f1e0008e63cd4466fedf703c"}, + {file = "PyYAML-6.0.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:c70c95198c015b85feafc136515252a261a84561b7b1d51e3384e0655ddf25ab"}, + {file = "PyYAML-6.0.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:ce826d6ef20b1bc864f0a68340c8b3287705cae2f8b4b1d932177dcc76721725"}, + {file = "PyYAML-6.0.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1f71ea527786de97d1a0cc0eacd1defc0985dcf6b3f17bb77dcfc8c34bec4dc5"}, + {file = "PyYAML-6.0.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9b22676e8097e9e22e36d6b7bda33190d0d400f345f23d4065d48f4ca7ae0425"}, + {file = "PyYAML-6.0.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:80bab7bfc629882493af4aa31a4cfa43a4c57c83813253626916b8c7ada83476"}, + {file = "PyYAML-6.0.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:0833f8694549e586547b576dcfaba4a6b55b9e96098b36cdc7ebefe667dfed48"}, + {file = "PyYAML-6.0.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8b9c7197f7cb2738065c481a0461e50ad02f18c78cd75775628afb4d7137fb3b"}, + {file = "PyYAML-6.0.2-cp312-cp312-win32.whl", hash = "sha256:ef6107725bd54b262d6dedcc2af448a266975032bc85ef0172c5f059da6325b4"}, + {file = "PyYAML-6.0.2-cp312-cp312-win_amd64.whl", hash = "sha256:7e7401d0de89a9a855c839bc697c079a4af81cf878373abd7dc625847d25cbd8"}, + {file = "pyyaml-6.0.2.tar.gz", hash = "sha256:d584d9ec91ad65861cc08d42e834324ef890a082e591037abe114850ff7bbc3e"}, ] [[package]] name = "regex" -version = "2024.5.15" +version = "2024.7.24" requires_python = ">=3.8" summary = "Alternative regular expression module, to replace re." groups = ["default"] files = [ - {file = "regex-2024.5.15-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:632b01153e5248c134007209b5c6348a544ce96c46005d8456de1d552455b014"}, - {file = "regex-2024.5.15-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:e64198f6b856d48192bf921421fdd8ad8eb35e179086e99e99f711957ffedd6e"}, - {file = "regex-2024.5.15-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:68811ab14087b2f6e0fc0c2bae9ad689ea3584cad6917fc57be6a48bbd012c49"}, - {file = "regex-2024.5.15-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f8ec0c2fea1e886a19c3bee0cd19d862b3aa75dcdfb42ebe8ed30708df64687a"}, - {file = "regex-2024.5.15-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d0c0c0003c10f54a591d220997dd27d953cd9ccc1a7294b40a4be5312be8797b"}, - {file = "regex-2024.5.15-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2431b9e263af1953c55abbd3e2efca67ca80a3de8a0437cb58e2421f8184717a"}, - {file = "regex-2024.5.15-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4a605586358893b483976cffc1723fb0f83e526e8f14c6e6614e75919d9862cf"}, - {file = "regex-2024.5.15-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:391d7f7f1e409d192dba8bcd42d3e4cf9e598f3979cdaed6ab11288da88cb9f2"}, - {file = "regex-2024.5.15-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:9ff11639a8d98969c863d4617595eb5425fd12f7c5ef6621a4b74b71ed8726d5"}, - {file = "regex-2024.5.15-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:4eee78a04e6c67e8391edd4dad3279828dd66ac4b79570ec998e2155d2e59fd5"}, - {file = "regex-2024.5.15-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:8fe45aa3f4aa57faabbc9cb46a93363edd6197cbc43523daea044e9ff2fea83e"}, - {file = "regex-2024.5.15-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:d0a3d8d6acf0c78a1fff0e210d224b821081330b8524e3e2bc5a68ef6ab5803d"}, - {file = "regex-2024.5.15-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:c486b4106066d502495b3025a0a7251bf37ea9540433940a23419461ab9f2a80"}, - {file = "regex-2024.5.15-cp312-cp312-win32.whl", hash = "sha256:c49e15eac7c149f3670b3e27f1f28a2c1ddeccd3a2812cba953e01be2ab9b5fe"}, - {file = "regex-2024.5.15-cp312-cp312-win_amd64.whl", hash = "sha256:673b5a6da4557b975c6c90198588181029c60793835ce02f497ea817ff647cb2"}, - {file = "regex-2024.5.15.tar.gz", hash = "sha256:d3ee02d9e5f482cc8309134a91eeaacbdd2261ba111b0fef3748eeb4913e6a2c"}, + {file = "regex-2024.7.24-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:fe4ebef608553aff8deb845c7f4f1d0740ff76fa672c011cc0bacb2a00fbde86"}, + {file = "regex-2024.7.24-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:74007a5b25b7a678459f06559504f1eec2f0f17bca218c9d56f6a0a12bfffdad"}, + {file = "regex-2024.7.24-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:7df9ea48641da022c2a3c9c641650cd09f0cd15e8908bf931ad538f5ca7919c9"}, + {file = "regex-2024.7.24-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6a1141a1dcc32904c47f6846b040275c6e5de0bf73f17d7a409035d55b76f289"}, + {file = "regex-2024.7.24-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:80c811cfcb5c331237d9bad3bea2c391114588cf4131707e84d9493064d267f9"}, + {file = "regex-2024.7.24-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7214477bf9bd195894cf24005b1e7b496f46833337b5dedb7b2a6e33f66d962c"}, + {file = "regex-2024.7.24-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d55588cba7553f0b6ec33130bc3e114b355570b45785cebdc9daed8c637dd440"}, + {file = "regex-2024.7.24-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:558a57cfc32adcf19d3f791f62b5ff564922942e389e3cfdb538a23d65a6b610"}, + {file = "regex-2024.7.24-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:a512eed9dfd4117110b1881ba9a59b31433caed0c4101b361f768e7bcbaf93c5"}, + {file = "regex-2024.7.24-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:86b17ba823ea76256b1885652e3a141a99a5c4422f4a869189db328321b73799"}, + {file = "regex-2024.7.24-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:5eefee9bfe23f6df09ffb6dfb23809f4d74a78acef004aa904dc7c88b9944b05"}, + {file = "regex-2024.7.24-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:731fcd76bbdbf225e2eb85b7c38da9633ad3073822f5ab32379381e8c3c12e94"}, + {file = "regex-2024.7.24-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:eaef80eac3b4cfbdd6de53c6e108b4c534c21ae055d1dbea2de6b3b8ff3def38"}, + {file = "regex-2024.7.24-cp312-cp312-win32.whl", hash = "sha256:185e029368d6f89f36e526764cf12bf8d6f0e3a2a7737da625a76f594bdfcbfc"}, + {file = "regex-2024.7.24-cp312-cp312-win_amd64.whl", hash = "sha256:2f1baff13cc2521bea83ab2528e7a80cbe0ebb2c6f0bfad15be7da3aed443908"}, + {file = "regex-2024.7.24.tar.gz", hash = "sha256:9cfd009eed1a46b27c14039ad5bbc5e71b6367c5b2e6d5f5da0ea91600817506"}, ] [[package]] @@ -1717,7 +1559,7 @@ files = [ [[package]] name = "rich" -version = "13.7.1" +version = "13.8.0" requires_python = ">=3.7.0" summary = "Render rich text, tables, progress bars, syntax highlighting, markdown and more to the terminal" groups = ["perf"] @@ -1727,8 +1569,8 @@ dependencies = [ "typing-extensions<5.0,>=4.0.0; python_version < \"3.9\"", ] files = [ - {file = "rich-13.7.1-py3-none-any.whl", hash = "sha256:4edbae314f59eb482f54e9e30bf00d33350aaa94f4bfcd4e9e3110e64d0d7222"}, - {file = "rich-13.7.1.tar.gz", hash = "sha256:9be308cb1fe2f1f57d67ce99e95af38a1e2bc71ad9813b0e247cf7ffbcc3a432"}, + {file = "rich-13.8.0-py3-none-any.whl", hash = "sha256:2e85306a063b9492dffc86278197a60cbece75bcb766022f3436f567cae11bdc"}, + {file = "rich-13.8.0.tar.gz", hash = "sha256:a5ac1f1cd448ade0d59cc3356f7db7a7ccda2c8cbae9c7a90c28ff463d3e91f4"}, ] [[package]] @@ -1745,62 +1587,6 @@ files = [ {file = "rlp-4.0.1.tar.gz", hash = "sha256:bcefb11013dfadf8902642337923bd0c786dc8a27cb4c21da6e154e52869ecb1"}, ] -[[package]] -name = "rpds-py" -version = "0.19.0" -requires_python = ">=3.8" -summary = "Python bindings to Rust's persistent data structures (rpds)" -groups = ["default"] -files = [ - {file = "rpds_py-0.19.0-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:5095a7c838a8647c32aa37c3a460d2c48debff7fc26e1136aee60100a8cd8f68"}, - {file = "rpds_py-0.19.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:6f2f78ef14077e08856e788fa482107aa602636c16c25bdf59c22ea525a785e9"}, - {file = "rpds_py-0.19.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b7cc6cb44f8636fbf4a934ca72f3e786ba3c9f9ba4f4d74611e7da80684e48d2"}, - {file = "rpds_py-0.19.0-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:cf902878b4af334a09de7a45badbff0389e7cf8dc2e4dcf5f07125d0b7c2656d"}, - {file = "rpds_py-0.19.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:688aa6b8aa724db1596514751ffb767766e02e5c4a87486ab36b8e1ebc1aedac"}, - {file = "rpds_py-0.19.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:57dbc9167d48e355e2569346b5aa4077f29bf86389c924df25c0a8b9124461fb"}, - {file = "rpds_py-0.19.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3b4cf5a9497874822341c2ebe0d5850fed392034caadc0bad134ab6822c0925b"}, - {file = "rpds_py-0.19.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:8a790d235b9d39c70a466200d506bb33a98e2ee374a9b4eec7a8ac64c2c261fa"}, - {file = "rpds_py-0.19.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:1d16089dfa58719c98a1c06f2daceba6d8e3fb9b5d7931af4a990a3c486241cb"}, - {file = "rpds_py-0.19.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:bc9128e74fe94650367fe23f37074f121b9f796cabbd2f928f13e9661837296d"}, - {file = "rpds_py-0.19.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:c8f77e661ffd96ff104bebf7d0f3255b02aa5d5b28326f5408d6284c4a8b3248"}, - {file = "rpds_py-0.19.0-cp312-none-win32.whl", hash = "sha256:5f83689a38e76969327e9b682be5521d87a0c9e5a2e187d2bc6be4765f0d4600"}, - {file = "rpds_py-0.19.0-cp312-none-win_amd64.whl", hash = "sha256:06925c50f86da0596b9c3c64c3837b2481337b83ef3519e5db2701df695453a4"}, - {file = "rpds_py-0.19.0-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:75969cf900d7be665ccb1622a9aba225cf386bbc9c3bcfeeab9f62b5048f4a07"}, - {file = "rpds_py-0.19.0-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:8445f23f13339da640d1be8e44e5baf4af97e396882ebbf1692aecd67f67c479"}, - {file = "rpds_py-0.19.0-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a5a7c1062ef8aea3eda149f08120f10795835fc1c8bc6ad948fb9652a113ca55"}, - {file = "rpds_py-0.19.0-pp310-pypy310_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:462b0c18fbb48fdbf980914a02ee38c423a25fcc4cf40f66bacc95a2d2d73bc8"}, - {file = "rpds_py-0.19.0-pp310-pypy310_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3208f9aea18991ac7f2b39721e947bbd752a1abbe79ad90d9b6a84a74d44409b"}, - {file = "rpds_py-0.19.0-pp310-pypy310_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c3444fe52b82f122d8a99bf66777aed6b858d392b12f4c317da19f8234db4533"}, - {file = "rpds_py-0.19.0-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:88cb4bac7185a9f0168d38c01d7a00addece9822a52870eee26b8d5b61409213"}, - {file = "rpds_py-0.19.0-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:6b130bd4163c93798a6b9bb96be64a7c43e1cec81126ffa7ffaa106e1fc5cef5"}, - {file = "rpds_py-0.19.0-pp310-pypy310_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:a707b158b4410aefb6b054715545bbb21aaa5d5d0080217290131c49c2124a6e"}, - {file = "rpds_py-0.19.0-pp310-pypy310_pp73-musllinux_1_2_i686.whl", hash = "sha256:dc9ac4659456bde7c567107556ab065801622396b435a3ff213daef27b495388"}, - {file = "rpds_py-0.19.0-pp310-pypy310_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:81ea573aa46d3b6b3d890cd3c0ad82105985e6058a4baed03cf92518081eec8c"}, - {file = "rpds_py-0.19.0-pp38-pypy38_pp73-macosx_10_12_x86_64.whl", hash = "sha256:3f148c3f47f7f29a79c38cc5d020edcb5ca780020fab94dbc21f9af95c463581"}, - {file = "rpds_py-0.19.0-pp38-pypy38_pp73-macosx_11_0_arm64.whl", hash = "sha256:b0906357f90784a66e89ae3eadc2654f36c580a7d65cf63e6a616e4aec3a81be"}, - {file = "rpds_py-0.19.0-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f629ecc2db6a4736b5ba95a8347b0089240d69ad14ac364f557d52ad68cf94b0"}, - {file = "rpds_py-0.19.0-pp38-pypy38_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:c6feacd1d178c30e5bc37184526e56740342fd2aa6371a28367bad7908d454fc"}, - {file = "rpds_py-0.19.0-pp38-pypy38_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ae8b6068ee374fdfab63689be0963333aa83b0815ead5d8648389a8ded593378"}, - {file = "rpds_py-0.19.0-pp38-pypy38_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:78d57546bad81e0da13263e4c9ce30e96dcbe720dbff5ada08d2600a3502e526"}, - {file = "rpds_py-0.19.0-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a8b6683a37338818646af718c9ca2a07f89787551057fae57c4ec0446dc6224b"}, - {file = "rpds_py-0.19.0-pp38-pypy38_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:e8481b946792415adc07410420d6fc65a352b45d347b78fec45d8f8f0d7496f0"}, - {file = "rpds_py-0.19.0-pp38-pypy38_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:bec35eb20792ea64c3c57891bc3ca0bedb2884fbac2c8249d9b731447ecde4fa"}, - {file = "rpds_py-0.19.0-pp38-pypy38_pp73-musllinux_1_2_i686.whl", hash = "sha256:aa5476c3e3a402c37779e95f7b4048db2cb5b0ed0b9d006983965e93f40fe05a"}, - {file = "rpds_py-0.19.0-pp38-pypy38_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:19d02c45f2507b489fd4df7b827940f1420480b3e2e471e952af4d44a1ea8e34"}, - {file = "rpds_py-0.19.0-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:a3e2fd14c5d49ee1da322672375963f19f32b3d5953f0615b175ff7b9d38daed"}, - {file = "rpds_py-0.19.0-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:93a91c2640645303e874eada51f4f33351b84b351a689d470f8108d0e0694210"}, - {file = "rpds_py-0.19.0-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e5b9fc03bf76a94065299d4a2ecd8dfbae4ae8e2e8098bbfa6ab6413ca267709"}, - {file = "rpds_py-0.19.0-pp39-pypy39_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:5a4b07cdf3f84310c08c1de2c12ddadbb7a77568bcb16e95489f9c81074322ed"}, - {file = "rpds_py-0.19.0-pp39-pypy39_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ba0ed0dc6763d8bd6e5de5cf0d746d28e706a10b615ea382ac0ab17bb7388633"}, - {file = "rpds_py-0.19.0-pp39-pypy39_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:474bc83233abdcf2124ed3f66230a1c8435896046caa4b0b5ab6013c640803cc"}, - {file = "rpds_py-0.19.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:329c719d31362355a96b435f4653e3b4b061fcc9eba9f91dd40804ca637d914e"}, - {file = "rpds_py-0.19.0-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:ef9101f3f7b59043a34f1dccbb385ca760467590951952d6701df0da9893ca0c"}, - {file = "rpds_py-0.19.0-pp39-pypy39_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:0121803b0f424ee2109d6e1f27db45b166ebaa4b32ff47d6aa225642636cd834"}, - {file = "rpds_py-0.19.0-pp39-pypy39_pp73-musllinux_1_2_i686.whl", hash = "sha256:8344127403dea42f5970adccf6c5957a71a47f522171fafaf4c6ddb41b61703a"}, - {file = "rpds_py-0.19.0-pp39-pypy39_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:443cec402ddd650bb2b885113e1dcedb22b1175c6be223b14246a714b61cd521"}, - {file = "rpds_py-0.19.0.tar.gz", hash = "sha256:4fdc9afadbeb393b4bbbad75481e0ea78e4469f2e1d713a90811700830b553a9"}, -] - [[package]] name = "ruamel-yaml" version = "0.18.6" @@ -1821,7 +1607,7 @@ version = "0.2.8" requires_python = ">=3.6" summary = "C version of reader, parser and emitter for ruamel.yaml derived from libyaml" groups = ["default"] -marker = "platform_python_implementation == \"CPython\"" +marker = "platform_python_implementation == \"CPython\" and python_version < \"3.13\"" files = [ {file = "ruamel.yaml.clib-0.2.8-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:ebc06178e8821efc9692ea7544aa5644217358490145629914d8020042c24aa1"}, {file = "ruamel.yaml.clib-0.2.8-cp312-cp312-macosx_13_0_arm64.whl", hash = "sha256:edaef1c1200c4b4cb914583150dcaa3bc30e592e907c01117c08b13a07255ec2"}, @@ -1836,34 +1622,34 @@ files = [ [[package]] name = "ruff" -version = "0.5.7" +version = "0.6.4" requires_python = ">=3.7" summary = "An extremely fast Python linter and code formatter, written in Rust." groups = ["lint"] files = [ - {file = "ruff-0.5.7-py3-none-linux_armv6l.whl", hash = "sha256:548992d342fc404ee2e15a242cdbea4f8e39a52f2e7752d0e4cbe88d2d2f416a"}, - {file = "ruff-0.5.7-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:00cc8872331055ee017c4f1071a8a31ca0809ccc0657da1d154a1d2abac5c0be"}, - {file = "ruff-0.5.7-py3-none-macosx_11_0_arm64.whl", hash = "sha256:eaf3d86a1fdac1aec8a3417a63587d93f906c678bb9ed0b796da7b59c1114a1e"}, - {file = "ruff-0.5.7-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a01c34400097b06cf8a6e61b35d6d456d5bd1ae6961542de18ec81eaf33b4cb8"}, - {file = "ruff-0.5.7-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:fcc8054f1a717e2213500edaddcf1dbb0abad40d98e1bd9d0ad364f75c763eea"}, - {file = "ruff-0.5.7-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7f70284e73f36558ef51602254451e50dd6cc479f8b6f8413a95fcb5db4a55fc"}, - {file = "ruff-0.5.7-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:a78ad870ae3c460394fc95437d43deb5c04b5c29297815a2a1de028903f19692"}, - {file = "ruff-0.5.7-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9ccd078c66a8e419475174bfe60a69adb36ce04f8d4e91b006f1329d5cd44bcf"}, - {file = "ruff-0.5.7-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7e31c9bad4ebf8fdb77b59cae75814440731060a09a0e0077d559a556453acbb"}, - {file = "ruff-0.5.7-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8d796327eed8e168164346b769dd9a27a70e0298d667b4ecee6877ce8095ec8e"}, - {file = "ruff-0.5.7-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:4a09ea2c3f7778cc635e7f6edf57d566a8ee8f485f3c4454db7771efb692c499"}, - {file = "ruff-0.5.7-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:a36d8dcf55b3a3bc353270d544fb170d75d2dff41eba5df57b4e0b67a95bb64e"}, - {file = "ruff-0.5.7-py3-none-musllinux_1_2_i686.whl", hash = "sha256:9369c218f789eefbd1b8d82a8cf25017b523ac47d96b2f531eba73770971c9e5"}, - {file = "ruff-0.5.7-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:b88ca3db7eb377eb24fb7c82840546fb7acef75af4a74bd36e9ceb37a890257e"}, - {file = "ruff-0.5.7-py3-none-win32.whl", hash = "sha256:33d61fc0e902198a3e55719f4be6b375b28f860b09c281e4bdbf783c0566576a"}, - {file = "ruff-0.5.7-py3-none-win_amd64.whl", hash = "sha256:083bbcbe6fadb93cd86709037acc510f86eed5a314203079df174c40bbbca6b3"}, - {file = "ruff-0.5.7-py3-none-win_arm64.whl", hash = "sha256:2dca26154ff9571995107221d0aeaad0e75a77b5a682d6236cf89a58c70b76f4"}, - {file = "ruff-0.5.7.tar.gz", hash = "sha256:8dfc0a458797f5d9fb622dd0efc52d796f23f0a1493a9527f4e49a550ae9a7e5"}, + {file = "ruff-0.6.4-py3-none-linux_armv6l.whl", hash = "sha256:c4b153fc152af51855458e79e835fb6b933032921756cec9af7d0ba2aa01a258"}, + {file = "ruff-0.6.4-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:bedff9e4f004dad5f7f76a9d39c4ca98af526c9b1695068198b3bda8c085ef60"}, + {file = "ruff-0.6.4-py3-none-macosx_11_0_arm64.whl", hash = "sha256:d02a4127a86de23002e694d7ff19f905c51e338c72d8e09b56bfb60e1681724f"}, + {file = "ruff-0.6.4-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7862f42fc1a4aca1ea3ffe8a11f67819d183a5693b228f0bb3a531f5e40336fc"}, + {file = "ruff-0.6.4-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:eebe4ff1967c838a1a9618a5a59a3b0a00406f8d7eefee97c70411fefc353617"}, + {file = "ruff-0.6.4-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:932063a03bac394866683e15710c25b8690ccdca1cf192b9a98260332ca93408"}, + {file = "ruff-0.6.4-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:50e30b437cebef547bd5c3edf9ce81343e5dd7c737cb36ccb4fe83573f3d392e"}, + {file = "ruff-0.6.4-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c44536df7b93a587de690e124b89bd47306fddd59398a0fb12afd6133c7b3818"}, + {file = "ruff-0.6.4-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0ea086601b22dc5e7693a78f3fcfc460cceabfdf3bdc36dc898792aba48fbad6"}, + {file = "ruff-0.6.4-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0b52387d3289ccd227b62102c24714ed75fbba0b16ecc69a923a37e3b5e0aaaa"}, + {file = "ruff-0.6.4-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:0308610470fcc82969082fc83c76c0d362f562e2f0cdab0586516f03a4e06ec6"}, + {file = "ruff-0.6.4-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:803b96dea21795a6c9d5bfa9e96127cc9c31a1987802ca68f35e5c95aed3fc0d"}, + {file = "ruff-0.6.4-py3-none-musllinux_1_2_i686.whl", hash = "sha256:66dbfea86b663baab8fcae56c59f190caba9398df1488164e2df53e216248baa"}, + {file = "ruff-0.6.4-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:34d5efad480193c046c86608dbba2bccdc1c5fd11950fb271f8086e0c763a5d1"}, + {file = "ruff-0.6.4-py3-none-win32.whl", hash = "sha256:f0f8968feea5ce3777c0d8365653d5e91c40c31a81d95824ba61d871a11b8523"}, + {file = "ruff-0.6.4-py3-none-win_amd64.whl", hash = "sha256:549daccee5227282289390b0222d0fbee0275d1db6d514550d65420053021a58"}, + {file = "ruff-0.6.4-py3-none-win_arm64.whl", hash = "sha256:ac4b75e898ed189b3708c9ab3fc70b79a433219e1e87193b4f2b77251d058d14"}, + {file = "ruff-0.6.4.tar.gz", hash = "sha256:ac3b5bfbee99973f80aa1b7cbd1c9cbce200883bdd067300c22a6cc1c7fba212"}, ] [[package]] name = "scalene" -version = "1.5.43.2" +version = "1.5.44.1" requires_python = "!=3.11.0,>=3.8" summary = "Scalene: A high-resolution, low-overhead CPU, GPU, and memory profiler for Python with AI-powered optimization suggestions" groups = ["perf"] @@ -1878,11 +1664,11 @@ dependencies = [ "wheel>=0.36.1", ] files = [ - {file = "scalene-1.5.43.2-cp312-cp312-macosx_13_0_universal2.whl", hash = "sha256:320d4b4855545f345b51d690a30ed8a2459a6f95054906e857c13a193a1dd2e7"}, - {file = "scalene-1.5.43.2-cp312-cp312-macosx_14_0_universal2.whl", hash = "sha256:b4ebfad0cf484eb4d2b09580599fc1ebdb08ba155f8ca2dd7cdd9581cc1a60d4"}, - {file = "scalene-1.5.43.2-cp312-cp312-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:74d0d6de7b171a2784404e6ee2a3550c705cbd397d25412b7bc52a05a23788c2"}, - {file = "scalene-1.5.43.2-cp312-cp312-win_amd64.whl", hash = "sha256:fc6e9cffca7a0bf2d8512a31de9e97cbfd7f4090a5792b0dd8137a0ce263d1ec"}, - {file = "scalene-1.5.43.2.tar.gz", hash = "sha256:2ed0fbbf7a4bcf85029e1eb196190f75c123caede6b7e6103d86749cd08bb83c"}, + {file = "scalene-1.5.44.1-cp312-cp312-macosx_13_0_universal2.whl", hash = "sha256:fdcd10f4e34b00fd50714f07cc2fe5294da64a6a478c212e42da717a931f19df"}, + {file = "scalene-1.5.44.1-cp312-cp312-macosx_14_0_universal2.whl", hash = "sha256:5b00e5a774ec25613282e0da48a7af9d294df8277595b9e3104a1c2794af8189"}, + {file = "scalene-1.5.44.1-cp312-cp312-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:803dfb2085fe7f9fba31008472599b8b42aba8029f6f44dd8bc003078bc9e325"}, + {file = "scalene-1.5.44.1-cp312-cp312-win_amd64.whl", hash = "sha256:f89261a40ca690b0e1d865ed2eeea2448357db5db0d6ff5bfc3158208e1578d8"}, + {file = "scalene-1.5.44.1.tar.gz", hash = "sha256:ce9a79b23a94fdc28b37522bdc3f22b964467583606cf4d49169bd3fe68e286b"}, ] [[package]] @@ -1980,7 +1766,7 @@ files = [ [[package]] name = "sphinx-markdown-builder" -version = "0.6.6" +version = "0.6.7" requires_python = ">=3.7" summary = "A Sphinx extension to add markdown generation support." groups = ["docs"] @@ -1990,41 +1776,41 @@ dependencies = [ "tabulate", ] files = [ - {file = "sphinx-markdown-builder-0.6.6.tar.gz", hash = "sha256:febd8e03e20e357e624c52efdc7ef9d5ab70d58549784158033c9657d040f44e"}, - {file = "sphinx_markdown_builder-0.6.6-py3-none-any.whl", hash = "sha256:e6fd4626c6daf1c25a464fd7d6d64e4a97e69abca1684fb2a12fba44cb6db363"}, + {file = "sphinx_markdown_builder-0.6.7-py3-none-any.whl", hash = "sha256:6d52b63d2b7b3504ca664773e805b0ee8957239f2ca86103e793d96103970839"}, + {file = "sphinx_markdown_builder-0.6.7.tar.gz", hash = "sha256:9623c8d5963e18b3733ec8335a48b58c3e556a96529b73e4c65113cabd8e8591"}, ] [[package]] name = "sphinxcontrib-applehelp" -version = "1.0.8" +version = "2.0.0" requires_python = ">=3.9" summary = "sphinxcontrib-applehelp is a Sphinx extension which outputs Apple help books" groups = ["docs"] files = [ - {file = "sphinxcontrib_applehelp-1.0.8-py3-none-any.whl", hash = "sha256:cb61eb0ec1b61f349e5cc36b2028e9e7ca765be05e49641c97241274753067b4"}, - {file = "sphinxcontrib_applehelp-1.0.8.tar.gz", hash = "sha256:c40a4f96f3776c4393d933412053962fac2b84f4c99a7982ba42e09576a70619"}, + {file = "sphinxcontrib_applehelp-2.0.0-py3-none-any.whl", hash = "sha256:4cd3f0ec4ac5dd9c17ec65e9ab272c9b867ea77425228e68ecf08d6b28ddbdb5"}, + {file = "sphinxcontrib_applehelp-2.0.0.tar.gz", hash = "sha256:2f29ef331735ce958efa4734873f084941970894c6090408b079c61b2e1c06d1"}, ] [[package]] name = "sphinxcontrib-devhelp" -version = "1.0.6" +version = "2.0.0" requires_python = ">=3.9" summary = "sphinxcontrib-devhelp is a sphinx extension which outputs Devhelp documents" groups = ["docs"] files = [ - {file = "sphinxcontrib_devhelp-1.0.6-py3-none-any.whl", hash = "sha256:6485d09629944511c893fa11355bda18b742b83a2b181f9a009f7e500595c90f"}, - {file = "sphinxcontrib_devhelp-1.0.6.tar.gz", hash = "sha256:9893fd3f90506bc4b97bdb977ceb8fbd823989f4316b28c3841ec128544372d3"}, + {file = "sphinxcontrib_devhelp-2.0.0-py3-none-any.whl", hash = "sha256:aefb8b83854e4b0998877524d1029fd3e6879210422ee3780459e28a1f03a8a2"}, + {file = "sphinxcontrib_devhelp-2.0.0.tar.gz", hash = "sha256:411f5d96d445d1d73bb5d52133377b4248ec79db5c793ce7dbe59e074b4dd1ad"}, ] [[package]] name = "sphinxcontrib-htmlhelp" -version = "2.0.5" +version = "2.1.0" requires_python = ">=3.9" summary = "sphinxcontrib-htmlhelp is a sphinx extension which renders HTML help files" groups = ["docs"] files = [ - {file = "sphinxcontrib_htmlhelp-2.0.5-py3-none-any.whl", hash = "sha256:393f04f112b4d2f53d93448d4bce35842f62b307ccdc549ec1585e950bc35e04"}, - {file = "sphinxcontrib_htmlhelp-2.0.5.tar.gz", hash = "sha256:0dc87637d5de53dd5eec3a6a01753b1ccf99494bd756aafecd74b4fa9e729015"}, + {file = "sphinxcontrib_htmlhelp-2.1.0-py3-none-any.whl", hash = "sha256:166759820b47002d22914d64a075ce08f4c46818e17cfc9470a9786b759b19f8"}, + {file = "sphinxcontrib_htmlhelp-2.1.0.tar.gz", hash = "sha256:c9e2916ace8aad64cc13a0d233ee22317f2b9025b9cf3295249fa985cc7082e9"}, ] [[package]] @@ -2040,24 +1826,24 @@ files = [ [[package]] name = "sphinxcontrib-qthelp" -version = "1.0.7" +version = "2.0.0" requires_python = ">=3.9" summary = "sphinxcontrib-qthelp is a sphinx extension which outputs QtHelp documents" groups = ["docs"] files = [ - {file = "sphinxcontrib_qthelp-1.0.7-py3-none-any.whl", hash = "sha256:e2ae3b5c492d58fcbd73281fbd27e34b8393ec34a073c792642cd8e529288182"}, - {file = "sphinxcontrib_qthelp-1.0.7.tar.gz", hash = "sha256:053dedc38823a80a7209a80860b16b722e9e0209e32fea98c90e4e6624588ed6"}, + {file = "sphinxcontrib_qthelp-2.0.0-py3-none-any.whl", hash = "sha256:b18a828cdba941ccd6ee8445dbe72ffa3ef8cbe7505d8cd1fa0d42d3f2d5f3eb"}, + {file = "sphinxcontrib_qthelp-2.0.0.tar.gz", hash = "sha256:4fe7d0ac8fc171045be623aba3e2a8f613f8682731f9153bb2e40ece16b9bbab"}, ] [[package]] name = "sphinxcontrib-serializinghtml" -version = "1.1.10" +version = "2.0.0" requires_python = ">=3.9" summary = "sphinxcontrib-serializinghtml is a sphinx extension which outputs \"serialized\" HTML files (json and pickle)" groups = ["docs"] files = [ - {file = "sphinxcontrib_serializinghtml-1.1.10-py3-none-any.whl", hash = "sha256:326369b8df80a7d2d8d7f99aa5ac577f51ea51556ed974e7716cfd4fca3f6cb7"}, - {file = "sphinxcontrib_serializinghtml-1.1.10.tar.gz", hash = "sha256:93f3f5dc458b91b192fe10c397e324f262cf163d79f3282c158e8436a2c4511f"}, + {file = "sphinxcontrib_serializinghtml-2.0.0-py3-none-any.whl", hash = "sha256:6e2cb0eef194e10c27ec0023bfeb25badbbb5868244cf5bc5bdc04e4464bf331"}, + {file = "sphinxcontrib_serializinghtml-2.0.0.tar.gz", hash = "sha256:e9d912827f872c029017a53f0ef2180b327c3f7fd23c87229f7a8e8b70031d4d"}, ] [[package]] @@ -2073,25 +1859,27 @@ files = [ [[package]] name = "starknet-py" -version = "0.23.0" +version = "0.24.1" requires_python = "<3.13,>=3.8" summary = "A python SDK for Starknet" groups = ["default"] dependencies = [ "aiohttp<4.0.0,>=3.8.4", "asgiref<4.0.0,>=3.4.1", + "bip-utils<3.0.0,>=2.9.3", "crypto-cpp-py==1.4.4", - "eth-keyfile<1.0.0,>=0.8.1", + "eth-keyfile<0.9.0,>=0.8.1", "lark<2.0.0,>=1.1.5", + "ledgerwallet<0.6.0,>=0.5.0", "marshmallow-dataclass<8.8.0", "marshmallow-oneofschema==3.1.1", "marshmallow<4.0.0,>=3.15.0", - "poseidon-py==0.1.4", + "poseidon-py==0.1.5", "pycryptodome<4.0,>=3.17", "typing-extensions<5.0.0,>=4.3.0", ] files = [ - {file = "starknet_py-0.23.0.tar.gz", hash = "sha256:3dbab1a608e4bc67e646f3d096bcf346383da26472cfdb60409b5478c3432e39"}, + {file = "starknet_py-0.24.1.tar.gz", hash = "sha256:5f0971f6b9a46f695a334171d9bdd7cb35e3a735d57808b058324f45e102b130"}, ] [[package]] @@ -2153,7 +1941,7 @@ files = [ [[package]] name = "tortoise-orm" -version = "0.21.5" +version = "0.21.6" requires_python = "<4.0,>=3.8" summary = "Easy async ORM for python, built with relations in mind" groups = ["default"] @@ -2165,8 +1953,23 @@ dependencies = [ "pytz", ] files = [ - {file = "tortoise_orm-0.21.5-py3-none-any.whl", hash = "sha256:a9657568b31c5ee24c0596d531fd51210c75855551c4c18b376e8a24f33b3e1d"}, - {file = "tortoise_orm-0.21.5.tar.gz", hash = "sha256:cccd23178380a325890e10742c74250722e92e4aa088fd7ebf863c3475a4f1ef"}, + {file = "tortoise_orm-0.21.6-py3-none-any.whl", hash = "sha256:98fcf07dce3396075eac36b0d2b14d2267ff875d32455e03ee15e38de2f138df"}, + {file = "tortoise_orm-0.21.6.tar.gz", hash = "sha256:0fbc718001647bf282c01eaaa360f94f1432c9281701244180703d48d58a88ec"}, +] + +[[package]] +name = "typeguard" +version = "4.0.1" +requires_python = ">=3.7.4" +summary = "Run-time type checker for Python" +groups = ["default"] +dependencies = [ + "importlib-metadata>=3.6; python_version < \"3.10\"", + "typing-extensions>=4.7.0; python_version < \"3.12\"", +] +files = [ + {file = "typeguard-4.0.1-py3-none-any.whl", hash = "sha256:43f55cc9953f26dae362adb973b6c9ad6b97bfffcc6757277912eddd5cfa345b"}, + {file = "typeguard-4.0.1.tar.gz", hash = "sha256:db35142d1f92fc8c1b954e5cc03b57810428f9cd4e4604647bdf5764fc5bbba9"}, ] [[package]] @@ -2180,6 +1983,20 @@ files = [ {file = "types_pytz-2024.1.0.20240417-py3-none-any.whl", hash = "sha256:8335d443310e2db7b74e007414e74c4f53b67452c0cb0d228ca359ccfba59659"}, ] +[[package]] +name = "types-requests" +version = "2.32.0.20240712" +requires_python = ">=3.8" +summary = "Typing stubs for requests" +groups = ["default"] +dependencies = [ + "urllib3>=2", +] +files = [ + {file = "types-requests-2.32.0.20240712.tar.gz", hash = "sha256:90c079ff05e549f6bf50e02e910210b98b8ff1ebdd18e19c873cd237737c1358"}, + {file = "types_requests-2.32.0.20240712-py3-none-any.whl", hash = "sha256:f754283e152c752e46e70942fa2a146b5bc70393522257bb85bd1ef7e019dcc3"}, +] + [[package]] name = "types-tabulate" version = "0.9.0.20240106" @@ -2257,69 +2074,68 @@ files = [ [[package]] name = "uvloop" -version = "0.19.0" +version = "0.20.0" requires_python = ">=3.8.0" summary = "Fast implementation of asyncio event loop on top of libuv" groups = ["default"] files = [ - {file = "uvloop-0.19.0-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:da8435a3bd498419ee8c13c34b89b5005130a476bda1d6ca8cfdde3de35cd650"}, - {file = "uvloop-0.19.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:02506dc23a5d90e04d4f65c7791e65cf44bd91b37f24cfc3ef6cf2aff05dc7ec"}, - {file = "uvloop-0.19.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2693049be9d36fef81741fddb3f441673ba12a34a704e7b4361efb75cf30befc"}, - {file = "uvloop-0.19.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7010271303961c6f0fe37731004335401eb9075a12680738731e9c92ddd96ad6"}, - {file = "uvloop-0.19.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:5daa304d2161d2918fa9a17d5635099a2f78ae5b5960e742b2fcfbb7aefaa593"}, - {file = "uvloop-0.19.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:7207272c9520203fea9b93843bb775d03e1cf88a80a936ce760f60bb5add92f3"}, - {file = "uvloop-0.19.0.tar.gz", hash = "sha256:0246f4fd1bf2bf702e06b0d45ee91677ee5c31242f39aab4ea6fe0c51aedd0fd"}, + {file = "uvloop-0.20.0-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:4b75f2950ddb6feed85336412b9a0c310a2edbcf4cf931aa5cfe29034829676d"}, + {file = "uvloop-0.20.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:77fbc69c287596880ecec2d4c7a62346bef08b6209749bf6ce8c22bbaca0239e"}, + {file = "uvloop-0.20.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6462c95f48e2d8d4c993a2950cd3d31ab061864d1c226bbf0ee2f1a8f36674b9"}, + {file = "uvloop-0.20.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:649c33034979273fa71aa25d0fe120ad1777c551d8c4cd2c0c9851d88fcb13ab"}, + {file = "uvloop-0.20.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:3a609780e942d43a275a617c0839d85f95c334bad29c4c0918252085113285b5"}, + {file = "uvloop-0.20.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:aea15c78e0d9ad6555ed201344ae36db5c63d428818b4b2a42842b3870127c00"}, + {file = "uvloop-0.20.0.tar.gz", hash = "sha256:4603ca714a754fc8d9b197e325db25b2ea045385e8a3ad05d3463de725fdf469"}, ] [[package]] name = "watchdog" -version = "4.0.1" -requires_python = ">=3.8" +version = "5.0.2" +requires_python = ">=3.9" summary = "Filesystem events monitoring" groups = ["docs"] files = [ - {file = "watchdog-4.0.1-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:093b23e6906a8b97051191a4a0c73a77ecc958121d42346274c6af6520dec175"}, - {file = "watchdog-4.0.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:611be3904f9843f0529c35a3ff3fd617449463cb4b73b1633950b3d97fa4bfb7"}, - {file = "watchdog-4.0.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:62c613ad689ddcb11707f030e722fa929f322ef7e4f18f5335d2b73c61a85c28"}, - {file = "watchdog-4.0.1-py3-none-manylinux2014_aarch64.whl", hash = "sha256:dddba7ca1c807045323b6af4ff80f5ddc4d654c8bce8317dde1bd96b128ed253"}, - {file = "watchdog-4.0.1-py3-none-manylinux2014_armv7l.whl", hash = "sha256:4513ec234c68b14d4161440e07f995f231be21a09329051e67a2118a7a612d2d"}, - {file = "watchdog-4.0.1-py3-none-manylinux2014_i686.whl", hash = "sha256:4107ac5ab936a63952dea2a46a734a23230aa2f6f9db1291bf171dac3ebd53c6"}, - {file = "watchdog-4.0.1-py3-none-manylinux2014_ppc64.whl", hash = "sha256:6e8c70d2cd745daec2a08734d9f63092b793ad97612470a0ee4cbb8f5f705c57"}, - {file = "watchdog-4.0.1-py3-none-manylinux2014_ppc64le.whl", hash = "sha256:f27279d060e2ab24c0aa98363ff906d2386aa6c4dc2f1a374655d4e02a6c5e5e"}, - {file = "watchdog-4.0.1-py3-none-manylinux2014_s390x.whl", hash = "sha256:f8affdf3c0f0466e69f5b3917cdd042f89c8c63aebdb9f7c078996f607cdb0f5"}, - {file = "watchdog-4.0.1-py3-none-manylinux2014_x86_64.whl", hash = "sha256:ac7041b385f04c047fcc2951dc001671dee1b7e0615cde772e84b01fbf68ee84"}, - {file = "watchdog-4.0.1-py3-none-win32.whl", hash = "sha256:206afc3d964f9a233e6ad34618ec60b9837d0582b500b63687e34011e15bb429"}, - {file = "watchdog-4.0.1-py3-none-win_amd64.whl", hash = "sha256:7577b3c43e5909623149f76b099ac49a1a01ca4e167d1785c76eb52fa585745a"}, - {file = "watchdog-4.0.1-py3-none-win_ia64.whl", hash = "sha256:d7b9f5f3299e8dd230880b6c55504a1f69cf1e4316275d1b215ebdd8187ec88d"}, - {file = "watchdog-4.0.1.tar.gz", hash = "sha256:eebaacf674fa25511e8867028d281e602ee6500045b57f43b08778082f7f8b44"}, + {file = "watchdog-5.0.2-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:aa9cd6e24126d4afb3752a3e70fce39f92d0e1a58a236ddf6ee823ff7dba28ee"}, + {file = "watchdog-5.0.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:f627c5bf5759fdd90195b0c0431f99cff4867d212a67b384442c51136a098ed7"}, + {file = "watchdog-5.0.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:d7594a6d32cda2b49df3fd9abf9b37c8d2f3eab5df45c24056b4a671ac661619"}, + {file = "watchdog-5.0.2-py3-none-manylinux2014_aarch64.whl", hash = "sha256:5597c051587f8757798216f2485e85eac583c3b343e9aa09127a3a6f82c65ee8"}, + {file = "watchdog-5.0.2-py3-none-manylinux2014_armv7l.whl", hash = "sha256:53ed1bf71fcb8475dd0ef4912ab139c294c87b903724b6f4a8bd98e026862e6d"}, + {file = "watchdog-5.0.2-py3-none-manylinux2014_i686.whl", hash = "sha256:29e4a2607bd407d9552c502d38b45a05ec26a8e40cc7e94db9bb48f861fa5abc"}, + {file = "watchdog-5.0.2-py3-none-manylinux2014_ppc64.whl", hash = "sha256:b6dc8f1d770a8280997e4beae7b9a75a33b268c59e033e72c8a10990097e5fde"}, + {file = "watchdog-5.0.2-py3-none-manylinux2014_ppc64le.whl", hash = "sha256:d2ab34adc9bf1489452965cdb16a924e97d4452fcf88a50b21859068b50b5c3b"}, + {file = "watchdog-5.0.2-py3-none-manylinux2014_s390x.whl", hash = "sha256:7d1aa7e4bb0f0c65a1a91ba37c10e19dabf7eaaa282c5787e51371f090748f4b"}, + {file = "watchdog-5.0.2-py3-none-manylinux2014_x86_64.whl", hash = "sha256:726eef8f8c634ac6584f86c9c53353a010d9f311f6c15a034f3800a7a891d941"}, + {file = "watchdog-5.0.2-py3-none-win32.whl", hash = "sha256:bda40c57115684d0216556671875e008279dea2dc00fcd3dde126ac8e0d7a2fb"}, + {file = "watchdog-5.0.2-py3-none-win_amd64.whl", hash = "sha256:d010be060c996db725fbce7e3ef14687cdcc76f4ca0e4339a68cc4532c382a73"}, + {file = "watchdog-5.0.2-py3-none-win_ia64.whl", hash = "sha256:3960136b2b619510569b90f0cd96408591d6c251a75c97690f4553ca88889769"}, + {file = "watchdog-5.0.2.tar.gz", hash = "sha256:dcebf7e475001d2cdeb020be630dc5b687e9acdd60d16fea6bb4508e7b94cf76"}, ] [[package]] name = "web3" -version = "6.20.1" -requires_python = ">=3.7.2" -summary = "web3.py" +version = "7.2.0" +requires_python = "<4,>=3.8" +summary = "web3: A Python library for interacting with Ethereum" groups = ["default"] dependencies = [ "aiohttp>=3.7.4.post0", - "eth-abi>=4.0.0", - "eth-account<0.13,>=0.8.0", + "eth-abi>=5.0.1", + "eth-account>=0.13.1", "eth-hash[pycryptodome]>=0.5.1", - "eth-typing!=4.2.0,>=3.0.0", - "eth-utils>=2.1.0", - "hexbytes<0.4.0,>=0.1.0", - "jsonschema>=4.0.0", - "lru-dict<1.3.0,>=1.1.6", - "protobuf>=4.21.6", + "eth-typing>=5.0.0", + "eth-utils>=5.0.0", + "hexbytes>=1.2.0", + "pydantic>=2.4.0", "pyunormalize>=15.0.0", "pywin32>=223; platform_system == \"Windows\"", - "requests>=2.16.0", + "requests>=2.23.0", + "types-requests>=2.0.0", "typing-extensions>=4.0.1", "websockets>=10.0.0", ] files = [ - {file = "web3-6.20.1-py3-none-any.whl", hash = "sha256:16fe72aeb48bbd5f7e7e64b323a0d3a16522a28eb4f19ef9f9dd6ce7ee813c82"}, - {file = "web3-6.20.1.tar.gz", hash = "sha256:a29bc1863734e1c05f128ddbc56878f299ea71776806e667b581a83b5d5be0ed"}, + {file = "web3-7.2.0-py3-none-any.whl", hash = "sha256:35def004dd652a7ee5b2321431797c4aa26697faec4e34196aa2a158e63005ff"}, + {file = "web3-7.2.0.tar.gz", hash = "sha256:98bbee7e73dcdfa567633c694a80e62ce78a0a7b16a9c52027764db06b194be0"}, ] [[package]] @@ -2340,63 +2156,47 @@ files = [ {file = "websockets-12.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:9fdf06fd06c32205a07e47328ab49c40fc1407cdec801d698a7c41167ea45113"}, {file = "websockets-12.0-cp312-cp312-win32.whl", hash = "sha256:baa386875b70cbd81798fa9f71be689c1bf484f65fd6fb08d051a0ee4e79924d"}, {file = "websockets-12.0-cp312-cp312-win_amd64.whl", hash = "sha256:ae0a5da8f35a5be197f328d4727dbcfafa53d1824fac3d96cdd3a642fe09394f"}, - {file = "websockets-12.0-pp310-pypy310_pp73-macosx_10_9_x86_64.whl", hash = "sha256:248d8e2446e13c1d4326e0a6a4e9629cb13a11195051a73acf414812700badbd"}, - {file = "websockets-12.0-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f44069528d45a933997a6fef143030d8ca8042f0dfaad753e2906398290e2870"}, - {file = "websockets-12.0-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c4e37d36f0d19f0a4413d3e18c0d03d0c268ada2061868c1e6f5ab1a6d575077"}, - {file = "websockets-12.0-pp310-pypy310_pp73-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3d829f975fc2e527a3ef2f9c8f25e553eb7bc779c6665e8e1d52aa22800bb38b"}, - {file = "websockets-12.0-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:2c71bd45a777433dd9113847af751aae36e448bc6b8c361a566cb043eda6ec30"}, - {file = "websockets-12.0-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:0bee75f400895aef54157b36ed6d3b308fcab62e5260703add87f44cee9c82a6"}, - {file = "websockets-12.0-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:423fc1ed29f7512fceb727e2d2aecb952c46aa34895e9ed96071821309951123"}, - {file = "websockets-12.0-pp38-pypy38_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:27a5e9964ef509016759f2ef3f2c1e13f403725a5e6a1775555994966a66e931"}, - {file = "websockets-12.0-pp38-pypy38_pp73-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c3181df4583c4d3994d31fb235dc681d2aaad744fbdbf94c4802485ececdecf2"}, - {file = "websockets-12.0-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:b067cb952ce8bf40115f6c19f478dc71c5e719b7fbaa511359795dfd9d1a6468"}, - {file = "websockets-12.0-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:00700340c6c7ab788f176d118775202aadea7602c5cc6be6ae127761c16d6b0b"}, - {file = "websockets-12.0-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e469d01137942849cff40517c97a30a93ae79917752b34029f0ec72df6b46399"}, - {file = "websockets-12.0-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ffefa1374cd508d633646d51a8e9277763a9b78ae71324183693959cf94635a7"}, - {file = "websockets-12.0-pp39-pypy39_pp73-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ba0cab91b3956dfa9f512147860783a1829a8d905ee218a9837c18f683239611"}, - {file = "websockets-12.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:2cb388a5bfb56df4d9a406783b7f9dbefb888c09b71629351cc6b036e9259370"}, {file = "websockets-12.0-py3-none-any.whl", hash = "sha256:dc284bbc8d7c78a6c69e0c7325ab46ee5e40bb4d50e494d8131a07ef47500e9e"}, {file = "websockets-12.0.tar.gz", hash = "sha256:81df9cbcbb6c260de1e007e58c011bfebe2dafc8435107b0537f393dd38c8b1b"}, ] [[package]] name = "wheel" -version = "0.43.0" +version = "0.44.0" requires_python = ">=3.8" summary = "A built-package format for Python" groups = ["perf"] files = [ - {file = "wheel-0.43.0-py3-none-any.whl", hash = "sha256:55c570405f142630c6b9f72fe09d9b67cf1477fcf543ae5b8dcb1f5b7377da81"}, - {file = "wheel-0.43.0.tar.gz", hash = "sha256:465ef92c69fa5c5da2d1cf8ac40559a8c940886afcef87dcf14b9470862f1d85"}, + {file = "wheel-0.44.0-py3-none-any.whl", hash = "sha256:2376a90c98cc337d18623527a97c31797bd02bad0033d41547043a1cbfbe448f"}, + {file = "wheel-0.44.0.tar.gz", hash = "sha256:a29c3f2817e95ab89aa4660681ad547c0e9547f20e75b0562fe7723c9a2a9d49"}, ] [[package]] name = "yarl" -version = "1.9.4" -requires_python = ">=3.7" +version = "1.9.5" +requires_python = ">=3.8" summary = "Yet another URL library" groups = ["default", "test"] dependencies = [ "idna>=2.0", "multidict>=4.0", - "typing-extensions>=3.7.4; python_version < \"3.8\"", -] -files = [ - {file = "yarl-1.9.4-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:0d2454f0aef65ea81037759be5ca9947539667eecebca092733b2eb43c965a81"}, - {file = "yarl-1.9.4-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:44d8ffbb9c06e5a7f529f38f53eda23e50d1ed33c6c869e01481d3fafa6b8142"}, - {file = "yarl-1.9.4-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:aaaea1e536f98754a6e5c56091baa1b6ce2f2700cc4a00b0d49eca8dea471074"}, - {file = "yarl-1.9.4-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3777ce5536d17989c91696db1d459574e9a9bd37660ea7ee4d3344579bb6f129"}, - {file = "yarl-1.9.4-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9fc5fc1eeb029757349ad26bbc5880557389a03fa6ada41703db5e068881e5f2"}, - {file = "yarl-1.9.4-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:ea65804b5dc88dacd4a40279af0cdadcfe74b3e5b4c897aa0d81cf86927fee78"}, - {file = "yarl-1.9.4-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:aa102d6d280a5455ad6a0f9e6d769989638718e938a6a0a2ff3f4a7ff8c62cc4"}, - {file = "yarl-1.9.4-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:09efe4615ada057ba2d30df871d2f668af661e971dfeedf0c159927d48bbeff0"}, - {file = "yarl-1.9.4-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:008d3e808d03ef28542372d01057fd09168419cdc8f848efe2804f894ae03e51"}, - {file = "yarl-1.9.4-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:6f5cb257bc2ec58f437da2b37a8cd48f666db96d47b8a3115c29f316313654ff"}, - {file = "yarl-1.9.4-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:992f18e0ea248ee03b5a6e8b3b4738850ae7dbb172cc41c966462801cbf62cf7"}, - {file = "yarl-1.9.4-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:0e9d124c191d5b881060a9e5060627694c3bdd1fe24c5eecc8d5d7d0eb6faabc"}, - {file = "yarl-1.9.4-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:3986b6f41ad22988e53d5778f91855dc0399b043fc8946d4f2e68af22ee9ff10"}, - {file = "yarl-1.9.4-cp312-cp312-win32.whl", hash = "sha256:4b21516d181cd77ebd06ce160ef8cc2a5e9ad35fb1c5930882baff5ac865eee7"}, - {file = "yarl-1.9.4-cp312-cp312-win_amd64.whl", hash = "sha256:a9bd00dc3bc395a662900f33f74feb3e757429e545d831eef5bb280252631984"}, - {file = "yarl-1.9.4-py3-none-any.whl", hash = "sha256:928cecb0ef9d5a7946eb6ff58417ad2fe9375762382f1bf5c55e61645f2c43ad"}, - {file = "yarl-1.9.4.tar.gz", hash = "sha256:566db86717cf8080b99b58b083b773a908ae40f06681e87e589a976faf8246bf"}, +] +files = [ + {file = "yarl-1.9.5-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:8f5a8f396313da1d011a4d7abe179f2e1248710a873bb744138370335912966b"}, + {file = "yarl-1.9.5-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:0e87e493bf5ee802d271491988701fe09ea34ae6e8b6f5b7319bbd336bed4211"}, + {file = "yarl-1.9.5-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:bf4edd995dfd9e2f16edf542e917bccae48d1bcf995d95aa2252646c6ee90e4e"}, + {file = "yarl-1.9.5-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:57030cf990297e1d648c51555805a3892c49311ca3edee99774dfaa40955aa41"}, + {file = "yarl-1.9.5-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:aabc452eddad8abc7e94ea47162e89d24445066504d26a2f8aa4e1936aedf39f"}, + {file = "yarl-1.9.5-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0b2bc7ce7bd40ee5ad55291676414062cdfa3a89e11a17619d30b7d18f388e6e"}, + {file = "yarl-1.9.5-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cd02efd9f252da151db11142256ed9e577fefb25f7eb0ab6826ebee2aaa5d251"}, + {file = "yarl-1.9.5-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ec06b1ef69af30ddbc3931d3e3756c83e90abc2bb5ac796c2f0845ce75274386"}, + {file = "yarl-1.9.5-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:cdc3cf183fdbd924b6705d2317719e81f14e4be6603b590bb04a963abb8ca7cd"}, + {file = "yarl-1.9.5-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:608704b4d2a6da4bfb95cac97a7be9f40d047fbb23b1597579cd0a38aed10753"}, + {file = "yarl-1.9.5-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:f4edf7cd1e763f2c126bd37d69f56ffb495724319a7807e6ff216b9ea5b57c64"}, + {file = "yarl-1.9.5-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:8c468aea27c0fdee3d55a026bd8ccb1b17306837cd50829dc81fbbf0024aa750"}, + {file = "yarl-1.9.5-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:a40d014c18754415655fc0516d9a59c74db0ecd023e9139c99354362db7cf480"}, + {file = "yarl-1.9.5-cp312-cp312-win32.whl", hash = "sha256:d217b391dd96ee523a17920987752d9c0871f8d82312a1db6862013d9f0a3dce"}, + {file = "yarl-1.9.5-cp312-cp312-win_amd64.whl", hash = "sha256:a419be4f256861492c9ee7e51751afdd5967d64b6c98176db605eae4d75ced06"}, + {file = "yarl-1.9.5-py3-none-any.whl", hash = "sha256:f072c95c97badd8b05250b50d8ba072c212c6bf16f0b61e92299a28302a3a021"}, + {file = "yarl-1.9.5.tar.gz", hash = "sha256:5c9b12dbb3d4607dd4d8877d44c8ac410a6eb2cde3792be30aa0b371d88260a2"}, ] diff --git a/pyproject.toml b/pyproject.toml index 7465f07c3..d28304aaa 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,7 +1,7 @@ [project] name = "dipdup" description = "Modular framework for creating selective indexers and featureful backends for dapps" -version = "8.0.0b5" +version = "8.0.0" license = { text = "MIT" } authors = [ { name = "Lev Gorodetskii", email = "dipdup@drsr.io" }, @@ -23,18 +23,15 @@ keywords = [ "crypto", "cryptocurrencies", "dapp", - "declarative", "ethereum", "evm", "framework", "indexer", "indexers", - "michelson", - "scheduler", + "starknet", "sdk", "smart-contracts", "tezos", - "tzkt", "web3", ] classifiers = [ @@ -42,7 +39,8 @@ classifiers = [ "Environment :: Console", "Intended Audience :: Developers", "License :: OSI Approved :: MIT License", - "Operating System :: OS Independent", + "Operating System :: POSIX :: Linux", + "Operating System :: MacOS :: MacOS X", "Programming Language :: Python :: 3", "Programming Language :: Python :: 3.12", "Programming Language :: Python :: Implementation :: CPython", @@ -51,42 +49,44 @@ classifiers = [ ] dependencies = [ - # NOTE: Core dependencies; keep pinned between major versions. - "datamodel-code-generator~=0.25.7", - "pydantic~=2.7", - "tortoise-orm==0.21.5", - "web3~=6.19", - # - "aiohttp~=3.9", - "aiolimiter~=1.0", - "anyio~=4.1", - "APScheduler~=3.8", + # FIXME: KeyError in _make_or_get_ssl_context + "aiohttp==3.10.2", + "aiolimiter~=1.1", + "anyio~=4.4", + "APScheduler~=3.10", "async-lru~=2.0", "asyncpg~=0.29", - "click~=8.0", - "eth-abi>=5.0.1,<6", - "orjson~=3.9", - "prometheus-client~=0.17", - "pycryptodome~=3.17", - "pyhumps~=3.0", + "click~=8.1", + "datamodel-code-generator~=0.25.9", + "eth-abi~=5.0", + "lru-dict~=1.3", + "orjson~=3.10", + "prometheus-client~=0.20", + "pycryptodome~=3.20", + "pydantic~=2.9", + "pyhumps~=3.8", "pysignalr~=1.0", "python-dotenv~=1.0", - "python-json-logger>=2.0.7", - "ruamel.yaml~=0.17", - "sentry-sdk~=2.5", - "sqlparse~=0.4", - "starknet-py~=0.22", + "python-json-logger~=2.0", + "ruamel.yaml~=0.18.6", + # FIXME: Argument 1 to "serialize" has incompatible type "Event"; + "sentry-sdk==2.12.0", + "sqlparse~=0.5", + "starknet-py~=0.24", "strict-rfc3339~=0.7", - "survey~=5.3", + "survey~=5.4", "tabulate~=0.9", - "uvloop>=0.19.0", + # NOTE: Heavily patched; don't update without testing. + "tortoise-orm==0.21.6", + "uvloop~=0.20", + "web3~=7.2", ] -[tool.pdm.resolution.overrides] -# NOTE: Update blocked by web3; safe to override (see https://github.com/ethereum/web3.py/pull/3144) -lru-dict = "1.3.0" +[tool.pdm.resolution] +# NOTE: Introduced by starknetpy 0.24; depends on half of the PyPI +excludes = ["bip-utils", "ledgerwallet"] # NOTE: tortoise-orm -aiosqlite = "0.20.0" +overrides = { aiosqlite = "0.20.0" } [project.urls] Homepage = "https://dipdup.io/" @@ -106,7 +106,7 @@ lint = [ ] test = [ "docker", - "pytest~=7.4", + "pytest", "pytest-aiohttp", "pytest-asyncio", "pytest-cov", @@ -122,8 +122,8 @@ perf = [ "scalene", ] -[tool.pdm.build.targets.wheel] -packages = ["src/dipdup"] +[tool.pdm.build] +includes = ["src/dipdup"] [tool.black] line-length = 120 @@ -150,9 +150,14 @@ python_version = "3.12" plugins = ["pydantic.mypy"] strict = true +[[tool.mypy.overrides]] +module = "ruamel" +ignore_missing_imports = true + [tool.pytest.ini_options] addopts="--cov-report=term-missing --cov=dipdup --cov-report=xml -n auto -s -v" asyncio_mode = "auto" +asyncio_default_fixture_loop_scope = "function" log_cli_level = "WARNING" filterwarnings = [ 'ignore:RemovedInMarshmallow4Warning', diff --git a/requirements.txt b/requirements.txt index bd6cbab58..920c9dc0e 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,7 +1,7 @@ # This file is @generated by PDM. # Please do not edit it manually. -aiohappyeyeballs==2.3.5 +aiohappyeyeballs==2.4.0 aiohttp==3.10.2 aiolimiter==1.1.0 aiosignal==1.3.1 @@ -9,48 +9,46 @@ aiosqlite==0.20.0 annotated-types==0.7.0 anyio==4.4.0 apscheduler==3.10.4 -argcomplete==3.4.0 +argcomplete==3.5.0 asgiref==3.8.1 async-lru==2.0.4 asyncpg==0.29.0 -attrs==23.2.0 +attrs==24.2.0 bitarray==2.9.2 black==24.8.0 -certifi==2024.7.4 +certifi==2024.8.30 charset-normalizer==3.3.2 -ckzg==1.0.2 +ckzg==2.0.0 click==8.1.7 colorama==0.4.6; platform_system == "Windows" or sys_platform == "win32" crypto-cpp-py==1.4.4 cytoolz==0.12.3; implementation_name == "cpython" datamodel-code-generator==0.25.9 -dnspython==2.6.1 +dnspython==2.6.1; python_version ~= "3.11" ecdsa==0.18.0 -email-validator==2.2.0 +email-validator==2.2.0; python_version ~= "3.11" eth-abi==5.1.0 -eth-account==0.11.2 +eth-account==0.13.3 eth-hash==0.7.0 eth-hash[pycryptodome]==0.7.0 eth-keyfile==0.8.1 eth-keys==0.5.1 -eth-rlp==1.0.1 -eth-typing==4.3.1 -eth-utils==4.1.1 +eth-rlp==2.1.0 +eth-typing==5.0.0 +eth-utils==5.0.0 frozenlist==1.4.1 genson==1.3.0 -hexbytes==0.3.1 -idna==3.7 +hexbytes==1.2.1 +idna==3.8 inflect==5.6.2 iso8601==1.1.0 isort==5.13.2 jinja2==3.1.4 -jsonschema==4.23.0 -jsonschema-specifications==2023.12.1 -lark==1.1.9 +lark==1.2.2 lru-dict==1.3.0 markupsafe==2.1.5 -marshmallow==3.21.3 -marshmallow-dataclass==8.6.1 +marshmallow==3.22.0 +marshmallow-dataclass==8.7.0 marshmallow-oneofschema==3.1.1 mpmath==1.3.0 msgpack==1.0.8 @@ -61,13 +59,12 @@ packaging==24.1 parsimonious==0.10.0 pathspec==0.12.1 platformdirs==4.2.2 -poseidon-py==0.1.4 +poseidon-py==0.1.5 prometheus-client==0.20.0 -protobuf==5.27.2 pycryptodome==3.20.0 -pydantic==2.8.2 -pydantic-core==2.20.1 -pydantic[email]==2.8.2; python_version ~= "3.11" +pydantic==2.9.1 +pydantic-core==2.23.3 +pydantic[email]==2.9.1; python_version ~= "3.11" pyhumps==3.8.0 pypika-tortoise==0.1.6 pysignalr==1.0.0 @@ -76,31 +73,31 @@ python-json-logger==2.0.7 pytz==2024.1 pyunormalize==15.1.0 pywin32==306; platform_system == "Windows" or sys_platform == "win32" or os_name == "nt" -pyyaml==6.0.1 -referencing==0.35.1 -regex==2024.5.15 +pyyaml==6.0.2 +regex==2024.7.24 requests==2.32.3 rlp==4.0.1 -rpds-py==0.19.0 ruamel-yaml==0.18.6 -ruamel-yaml-clib==0.2.8; platform_python_implementation == "CPython" +ruamel-yaml-clib==0.2.8; platform_python_implementation == "CPython" and python_version < "3.13" sentry-sdk==2.12.0 six==1.16.0 sniffio==1.3.1 sqlparse==0.5.1 -starknet-py==0.23.0 +starknet-py==0.24.1 strict-rfc3339==0.7 survey==5.4.0 sympy==1.11.1 tabulate==0.9.0 toolz==0.12.1; implementation_name == "pypy" or implementation_name == "cpython" -tortoise-orm==0.21.5 +tortoise-orm==0.21.6 +typeguard==4.0.1 +types-requests==2.32.0.20240712 typing-extensions==4.12.2 typing-inspect==0.9.0 tzdata==2024.1; platform_system == "Windows" tzlocal==5.2 urllib3==2.2.2 -uvloop==0.19.0 -web3==6.20.1 +uvloop==0.20.0 +web3==7.2.0 websockets==12.0 -yarl==1.9.4 +yarl==1.9.5 diff --git a/schemas/dipdup-3.0.json b/schemas/dipdup-3.0.json index b06fd4bee..f26031272 100644 --- a/schemas/dipdup-3.0.json +++ b/schemas/dipdup-3.0.json @@ -86,12 +86,6 @@ "type": "boolean", "description": "Establish realtime connection and start collecting messages while sync is in progress (faster, but consumes more RAM)." }, - "skip_version_check": { - "default": false, - "title": "skip_version_check", - "type": "boolean", - "description": "Disable warning about running unstable or out-of-date DipDup version." - }, "rollback_depth": { "anyOf": [ { @@ -223,6 +217,9 @@ "title": "CoinbaseDatasourceConfig", "type": "object" }, + "EvmAddress": { + "$ref": "#/$defs/Hex" + }, "EvmContractConfig": { "additionalProperties": false, "description": "EVM contract config", @@ -391,14 +388,14 @@ "description": "Always 'evm.node'" }, "url": { + "$ref": "#/$defs/Url", "title": "url", - "type": "string", "description": "EVM node URL" }, "ws_url": { "anyOf": [ { - "type": "string" + "$ref": "#/$defs/WsUrl" }, { "type": "null" @@ -446,8 +443,8 @@ "description": "always 'evm.subsquid'" }, "url": { + "$ref": "#/$defs/Url", "title": "url", - "type": "string", "description": "URL of Subsquid Network API" }, "http": { @@ -610,8 +607,8 @@ "description": "Config for the Hasura integration.", "properties": { "url": { + "$ref": "#/$defs/Url", "title": "url", - "type": "string", "description": "URL of the Hasura instance." }, "admin_secret": { @@ -689,6 +686,9 @@ "title": "HasuraConfig", "type": "object" }, + "Hex": { + "type": "string" + }, "HookConfig": { "additionalProperties": false, "description": "Hook config", @@ -1289,6 +1289,9 @@ "title": "SqliteDatabaseConfig", "type": "object" }, + "StarknetAddress": { + "$ref": "#/$defs/Hex" + }, "StarknetContractConfig": { "additionalProperties": false, "description": "Starknet contract config", @@ -1454,14 +1457,14 @@ "description": "Always 'starknet.node'" }, "url": { + "$ref": "#/$defs/Url", "title": "url", - "type": "string", "description": "Starknet node URL" }, "ws_url": { "anyOf": [ { - "type": "string" + "$ref": "#/$defs/WsUrl" }, { "type": "null" @@ -1509,8 +1512,8 @@ "description": "always 'starknet.subsquid'" }, "url": { + "$ref": "#/$defs/Url", "title": "url", - "type": "string", "description": "URL of Subsquid Network API" }, "http": { @@ -1534,6 +1537,9 @@ "title": "StarknetSubsquidDatasourceConfig", "type": "object" }, + "TezosAddress": { + "type": "string" + }, "TezosBigMapsHandlerConfig": { "additionalProperties": false, "description": "Big map handler config", @@ -1603,11 +1609,7 @@ "description": "Mapping of big map diff handlers" }, "skip_history": { - "allOf": [ - { - "$ref": "#/$defs/SkipHistory" - } - ], + "$ref": "#/$defs/SkipHistory", "default": "never", "title": "skip_history", "description": "Fetch only current big map keys ignoring historical changes" @@ -1646,7 +1648,7 @@ "address": { "anyOf": [ { - "type": "string" + "$ref": "#/$defs/TezosAddress" }, { "type": "null" @@ -1662,7 +1664,7 @@ "type": "integer" }, { - "type": "string" + "$ref": "#/$defs/TezosAddress" }, { "type": "null" @@ -2572,9 +2574,9 @@ "description": "always 'tezos.tzkt'" }, "url": { + "$ref": "#/$defs/Url", "default": "https://api.tzkt.io", "title": "url", - "type": "string", "description": "Base API URL, e.g. https://api.tzkt.io/" }, "http": { @@ -2615,6 +2617,16 @@ "title": "TezosTzktDatasourceConfig", "type": "object" }, + "ToStr": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "number" + } + ] + }, "TzipMetadataDatasourceConfig": { "additionalProperties": false, "description": "DipDup Metadata datasource config", @@ -2668,6 +2680,12 @@ ], "title": "TzipMetadataNetwork", "type": "string" + }, + "Url": { + "type": "string" + }, + "WsUrl": { + "type": "string" } }, "additionalProperties": false, @@ -2850,14 +2868,7 @@ "description": "User-defined configuration to use in callbacks" }, "spec_version": { - "anyOf": [ - { - "type": "string" - }, - { - "type": "number" - } - ], + "$ref": "#/$defs/ToStr", "title": "spec_version", "description": "Version of config specification, currently always `3.0`" }, diff --git a/scripts/demos.py b/scripts/demos.py index f2802500d..7d717bd73 100644 --- a/scripts/demos.py +++ b/scripts/demos.py @@ -21,23 +21,11 @@ def _get_demos() -> dict[str, Path]: - return { - p.name: p - for p in SRC_PATH.iterdir() - if p.is_dir() and p.name.startswith(DEMO_PREFIX) - # FIXME: nairobinet is dead - if p.name != 'demo_tezos_etherlink' - } + return {p.name: p for p in SRC_PATH.iterdir() if p.is_dir() and p.name.startswith(DEMO_PREFIX)} def _get_projects() -> dict[str, Path]: - return { - p.name: p - for p in PROJECTS_PATH.iterdir() - if p.is_dir() and p.name.startswith(DEMO_PREFIX) - # FIXME: nairobinet is dead - if p.name != 'demo_tezos_etherlink' - } + return {p.name: p for p in PROJECTS_PATH.iterdir() if p.is_dir() and p.name.startswith(DEMO_PREFIX)} def _render_demo(path: Path) -> None: @@ -62,7 +50,7 @@ def _init_demo(path: Path) -> None: ) # NOTE: We don't need magic symlinks in demo projects. - Path(package_path).joinpath(package).unlink() + Path(package_path).joinpath(package).unlink(missing_ok=True) def _rm_demo(path: Path) -> None: diff --git a/scripts/docs.py b/scripts/docs.py index 635ef390f..ca95ae501 100755 --- a/scripts/docs.py +++ b/scripts/docs.py @@ -190,6 +190,14 @@ class ReferencePage(TypedDict): 'single-title', 'single-h1', ) +MARKDOWNLINT_CMD = ( + 'markdownlint', + '-f', + '--disable', + *MARKDOWNLINT_IGNORE, + '--', + 'docs', +) # NOTE: As in Keep a Changelog spec @@ -204,9 +212,6 @@ class ReferencePage(TypedDict): 'Other', ) -# NOTE: Don't process older versions -CHANGELOG_FIRST_VERSION = 7 - class ScriptObserver(FileSystemEventHandler): def on_modified(self, event: FileSystemEvent) -> None: @@ -233,21 +238,21 @@ def on_rst_modified(self) -> None: check=True, ) - def on_modified(self, event: FileSystemEvent, with_rst: bool = True) -> None: - src_file = Path(event.src_path).relative_to(self._source) - if src_file.is_dir(): + def on_modified( + self, + event: FileSystemEvent, + skip_rst: bool = False, + ) -> None: + src_file = Path(event.src_path).relative_to(self._source) # type: ignore[arg-type] + if src_file.is_dir() or 'html' in src_file.parts: return # NOTE: Sphinx autodoc reference; rebuild HTML if src_file.name.endswith('.rst'): - if with_rst: + if not skip_rst: self.on_rst_modified() return - # FIXME: Frontend dies otherwise - if not (src_file.name[0] == '_' or src_file.name[0].isdigit()): - return - if event.event_type == EVENT_TYPE_DELETED: dst_file = (self._destination / src_file.relative_to(self._source)).resolve() dst_file.unlink(True) @@ -256,8 +261,12 @@ def on_modified(self, event: FileSystemEvent, with_rst: bool = True) -> None: if event.event_type not in (EVENT_TYPE_CREATED, EVENT_TYPE_MODIFIED, EVENT_TYPE_MOVED): return + # NOTE: Vite doesn't like images in content directory; add '../public' + destination = self._destination.parent.parent / 'public' if 'public' in src_file.parts else self._destination + src_file = self._source / src_file - dst_file = (self._destination / src_file.relative_to(self._source)).resolve() + dst_file = (destination / src_file.relative_to(self._source)).resolve() + # NOTE: Make sure the destination directory exists dst_file.parent.mkdir(parents=True, exist_ok=True) @@ -314,12 +323,12 @@ def callback(data: str) -> str: @contextmanager def observer(path: Path, handler: Any) -> Iterator[BaseObserver]: observer = Observer() - observer.schedule(handler, path=path, recursive=True) # type: ignore[no-untyped-call] - observer.start() # type: ignore[no-untyped-call] + observer.schedule(handler, path=str(path), recursive=True) + observer.start() yield observer - observer.stop() # type: ignore[no-untyped-call] + observer.stop() observer.join() @@ -378,7 +387,10 @@ def build(source: Path, destination: Path, watch: bool, serve: bool) -> None: ) event_handler.on_rst_modified() for path in source.glob('**/*'): - event_handler.on_modified(FileModifiedEvent(str(path)), with_rst=False) + event_handler.on_modified( + FileModifiedEvent(str(path)), + skip_rst=True, + ) if not (watch or serve): return @@ -424,6 +436,10 @@ def check_links(source: Path, http: bool) -> None: link, anchor = link.split('#') if '#' in link else (link, None) + # NOTE: Vite doesn't like images in content directory; revert path hack + if 'public' in link: + link = link.replace('../../public', '../public') + full_path = path.parent.joinpath(link) if not full_path.exists(): logging.error('broken link: `%s`', full_path) @@ -604,7 +620,7 @@ def markdownlint() -> None: green_echo('=> Running markdownlint') try: subprocess.run( - ('markdownlint', '-f', '--disable', *MARKDOWNLINT_IGNORE, '--', 'docs'), + MARKDOWNLINT_CMD, check=True, ) except subprocess.CalledProcessError: @@ -642,9 +658,6 @@ def merge_changelog() -> None: major = int(version.split('.')[0]) minor = int(version.split('.')[1]) - if major < CHANGELOG_FIRST_VERSION: - continue - version_path = Path(f'docs/9.release-notes/_{version}_changelog.md') lines: list[str] = [''] diff --git a/src/demo_blank/Makefile b/src/demo_blank/Makefile index a61c566b6..3125aaf14 100644 --- a/src/demo_blank/Makefile +++ b/src/demo_blank/Makefile @@ -13,6 +13,15 @@ help: ## Show this help (default) all: ## Run an entire CI pipeline make format lint +## + +install: ## Install dependencies + pdm install + +update: ## Update dependencies + pdm update + dipdup self update -q + format: ## Format with all tools make black @@ -28,7 +37,7 @@ ruff: ## Lint with ruff ruff check --fix . mypy: ## Lint with mypy - mypy --no-incremental --exclude ${PACKAGE} . + mypy . ## diff --git a/src/demo_blank/deploy/compose.sqlite.yaml b/src/demo_blank/deploy/compose.sqlite.yaml index acbe19272..c4506a42e 100644 --- a/src/demo_blank/deploy/compose.sqlite.yaml +++ b/src/demo_blank/deploy/compose.sqlite.yaml @@ -1,4 +1,3 @@ -version: "3.8" name: demo_blank services: diff --git a/src/demo_blank/deploy/compose.swarm.yaml b/src/demo_blank/deploy/compose.swarm.yaml index 8455c8052..1fd34e002 100644 --- a/src/demo_blank/deploy/compose.swarm.yaml +++ b/src/demo_blank/deploy/compose.swarm.yaml @@ -1,4 +1,3 @@ -version: "3.8" name: demo_blank services: @@ -38,7 +37,7 @@ services: - POSTGRES_DB=dipdup - POSTGRES_PASSWORD=${POSTGRES_PASSWORD} healthcheck: - test: ["CMD-SHELL", "pg_isready -U postgres"] + test: ["CMD-SHELL", "pg_isready -U dipdup"] interval: 10s timeout: 5s retries: 5 diff --git a/src/demo_blank/deploy/compose.yaml b/src/demo_blank/deploy/compose.yaml index b9dfcae6e..2c6b74e5b 100644 --- a/src/demo_blank/deploy/compose.yaml +++ b/src/demo_blank/deploy/compose.yaml @@ -1,4 +1,3 @@ -version: "3.8" name: demo_blank services: @@ -19,7 +18,7 @@ services: db: image: postgres:15 ports: - - 5432 + - "${POSTGRES_HOST_PORT:-5432}:5432" volumes: - db:/var/lib/postgresql/data restart: always @@ -37,7 +36,7 @@ services: hasura: image: hasura/graphql-engine:latest ports: - - 8080 + - "${HASURA_HOST_PORT:-8080}:8080" depends_on: - db restart: always diff --git a/src/demo_blank/pyproject.toml b/src/demo_blank/pyproject.toml index 6b12d3ed9..820b2a3f1 100644 --- a/src/demo_blank/pyproject.toml +++ b/src/demo_blank/pyproject.toml @@ -1,4 +1,4 @@ -# generated by DipDup 8.0.0b5 +# generated by DipDup 8.0.0 [project] name = "demo_blank" version = "0.0.1" @@ -20,22 +20,11 @@ dev = [ "mypy", ] -[tool.pdm.scripts] -help = {cmd = "make help", help = "Show this help (default)"} -all = {cmd = "make all", help = "Run an entire CI pipeline"} -format = {cmd = "make format", help = "Format with all tools"} -lint = {cmd = "make lint", help = "Lint with all tools"} -black = {cmd = "make black", help = "Format with black"} -ruff = {cmd = "make ruff", help = "Lint with ruff"} -mypy = {cmd = "make mypy", help = "Lint with mypy"} -image = {cmd = "make image", help = "Build Docker image"} -up = {cmd = "make up", help = "Start Compose stack"} -down = {cmd = "make down", help = "Stop Compose stack"} - [tool.black] line-length = 120 target-version = ['py312'] skip-string-normalization = true +extend-exclude = "demo_blank" [tool.ruff] line-length = 120 @@ -50,6 +39,11 @@ isort = { force-single-line = true} python_version = "3.12" plugins = ["pydantic.mypy"] strict = false +exclude = "demo_blank" + +[[tool.mypy.overrides]] +module = "ruamel" +ignore_missing_imports = true [build-system] requires = ["pdm-backend"] diff --git a/src/demo_evm_events/Makefile b/src/demo_evm_events/Makefile index 010085850..715c3c407 100644 --- a/src/demo_evm_events/Makefile +++ b/src/demo_evm_events/Makefile @@ -13,6 +13,15 @@ help: ## Show this help (default) all: ## Run an entire CI pipeline make format lint +## + +install: ## Install dependencies + pdm install + +update: ## Update dependencies + pdm update + dipdup self update -q + format: ## Format with all tools make black @@ -28,7 +37,7 @@ ruff: ## Lint with ruff ruff check --fix . mypy: ## Lint with mypy - mypy --no-incremental --exclude ${PACKAGE} . + mypy . ## diff --git a/src/demo_evm_events/deploy/compose.sqlite.yaml b/src/demo_evm_events/deploy/compose.sqlite.yaml index 0fcc631d2..697b0266c 100644 --- a/src/demo_evm_events/deploy/compose.sqlite.yaml +++ b/src/demo_evm_events/deploy/compose.sqlite.yaml @@ -1,4 +1,3 @@ -version: "3.8" name: demo_evm_events services: diff --git a/src/demo_evm_events/deploy/compose.swarm.yaml b/src/demo_evm_events/deploy/compose.swarm.yaml index dae26314b..42a0c1efd 100644 --- a/src/demo_evm_events/deploy/compose.swarm.yaml +++ b/src/demo_evm_events/deploy/compose.swarm.yaml @@ -1,4 +1,3 @@ -version: "3.8" name: demo_evm_events services: @@ -38,7 +37,7 @@ services: - POSTGRES_DB=dipdup - POSTGRES_PASSWORD=${POSTGRES_PASSWORD} healthcheck: - test: ["CMD-SHELL", "pg_isready -U postgres"] + test: ["CMD-SHELL", "pg_isready -U dipdup"] interval: 10s timeout: 5s retries: 5 diff --git a/src/demo_evm_events/deploy/compose.yaml b/src/demo_evm_events/deploy/compose.yaml index 238422777..c1f60cd28 100644 --- a/src/demo_evm_events/deploy/compose.yaml +++ b/src/demo_evm_events/deploy/compose.yaml @@ -1,4 +1,3 @@ -version: "3.8" name: demo_evm_events services: @@ -19,7 +18,7 @@ services: db: image: postgres:15 ports: - - 5432 + - "${POSTGRES_HOST_PORT:-5432}:5432" volumes: - db:/var/lib/postgresql/data restart: always @@ -37,7 +36,7 @@ services: hasura: image: hasura/graphql-engine:latest ports: - - 8080 + - "${HASURA_HOST_PORT:-8080}:8080" depends_on: - db restart: always diff --git a/src/demo_evm_events/pyproject.toml b/src/demo_evm_events/pyproject.toml index a04b92bd4..be6008466 100644 --- a/src/demo_evm_events/pyproject.toml +++ b/src/demo_evm_events/pyproject.toml @@ -1,4 +1,4 @@ -# generated by DipDup 8.0.0b5 +# generated by DipDup 8.0.0 [project] name = "demo_evm_events" version = "0.0.1" @@ -20,22 +20,11 @@ dev = [ "mypy", ] -[tool.pdm.scripts] -help = {cmd = "make help", help = "Show this help (default)"} -all = {cmd = "make all", help = "Run an entire CI pipeline"} -format = {cmd = "make format", help = "Format with all tools"} -lint = {cmd = "make lint", help = "Lint with all tools"} -black = {cmd = "make black", help = "Format with black"} -ruff = {cmd = "make ruff", help = "Lint with ruff"} -mypy = {cmd = "make mypy", help = "Lint with mypy"} -image = {cmd = "make image", help = "Build Docker image"} -up = {cmd = "make up", help = "Start Compose stack"} -down = {cmd = "make down", help = "Stop Compose stack"} - [tool.black] line-length = 120 target-version = ['py312'] skip-string-normalization = true +extend-exclude = "demo_evm_events" [tool.ruff] line-length = 120 @@ -50,6 +39,11 @@ isort = { force-single-line = true} python_version = "3.12" plugins = ["pydantic.mypy"] strict = false +exclude = "demo_evm_events" + +[[tool.mypy.overrides]] +module = "ruamel" +ignore_missing_imports = true [build-system] requires = ["pdm-backend"] diff --git a/src/demo_evm_events/types/eth_usdt/evm_events/transfer.py b/src/demo_evm_events/types/eth_usdt/evm_events/transfer.py index e498099a5..199eb8e3b 100644 --- a/src/demo_evm_events/types/eth_usdt/evm_events/transfer.py +++ b/src/demo_evm_events/types/eth_usdt/evm_events/transfer.py @@ -1,4 +1,4 @@ -# generated by DipDup 8.0.0b5 +# generated by DipDup 8.0.0 from __future__ import annotations diff --git a/src/demo_evm_transactions/Makefile b/src/demo_evm_transactions/Makefile index 8702d8d72..092eb3a8f 100644 --- a/src/demo_evm_transactions/Makefile +++ b/src/demo_evm_transactions/Makefile @@ -13,6 +13,15 @@ help: ## Show this help (default) all: ## Run an entire CI pipeline make format lint +## + +install: ## Install dependencies + pdm install + +update: ## Update dependencies + pdm update + dipdup self update -q + format: ## Format with all tools make black @@ -28,7 +37,7 @@ ruff: ## Lint with ruff ruff check --fix . mypy: ## Lint with mypy - mypy --no-incremental --exclude ${PACKAGE} . + mypy . ## diff --git a/src/demo_evm_transactions/deploy/compose.sqlite.yaml b/src/demo_evm_transactions/deploy/compose.sqlite.yaml index 6cd82b582..251751cb2 100644 --- a/src/demo_evm_transactions/deploy/compose.sqlite.yaml +++ b/src/demo_evm_transactions/deploy/compose.sqlite.yaml @@ -1,4 +1,3 @@ -version: "3.8" name: demo_evm_transactions services: diff --git a/src/demo_evm_transactions/deploy/compose.swarm.yaml b/src/demo_evm_transactions/deploy/compose.swarm.yaml index bdb67ab79..087ef170d 100644 --- a/src/demo_evm_transactions/deploy/compose.swarm.yaml +++ b/src/demo_evm_transactions/deploy/compose.swarm.yaml @@ -1,4 +1,3 @@ -version: "3.8" name: demo_evm_transactions services: @@ -38,7 +37,7 @@ services: - POSTGRES_DB=dipdup - POSTGRES_PASSWORD=${POSTGRES_PASSWORD} healthcheck: - test: ["CMD-SHELL", "pg_isready -U postgres"] + test: ["CMD-SHELL", "pg_isready -U dipdup"] interval: 10s timeout: 5s retries: 5 diff --git a/src/demo_evm_transactions/deploy/compose.yaml b/src/demo_evm_transactions/deploy/compose.yaml index 27178d8dd..19c56a231 100644 --- a/src/demo_evm_transactions/deploy/compose.yaml +++ b/src/demo_evm_transactions/deploy/compose.yaml @@ -1,4 +1,3 @@ -version: "3.8" name: demo_evm_transactions services: @@ -19,7 +18,7 @@ services: db: image: postgres:15 ports: - - 5432 + - "${POSTGRES_HOST_PORT:-5432}:5432" volumes: - db:/var/lib/postgresql/data restart: always @@ -37,7 +36,7 @@ services: hasura: image: hasura/graphql-engine:latest ports: - - 8080 + - "${HASURA_HOST_PORT:-8080}:8080" depends_on: - db restart: always diff --git a/src/demo_evm_transactions/pyproject.toml b/src/demo_evm_transactions/pyproject.toml index b04f33a0e..e96819e7c 100644 --- a/src/demo_evm_transactions/pyproject.toml +++ b/src/demo_evm_transactions/pyproject.toml @@ -1,4 +1,4 @@ -# generated by DipDup 8.0.0b5 +# generated by DipDup 8.0.0 [project] name = "demo_evm_transactions" version = "0.0.1" @@ -20,22 +20,11 @@ dev = [ "mypy", ] -[tool.pdm.scripts] -help = {cmd = "make help", help = "Show this help (default)"} -all = {cmd = "make all", help = "Run an entire CI pipeline"} -format = {cmd = "make format", help = "Format with all tools"} -lint = {cmd = "make lint", help = "Lint with all tools"} -black = {cmd = "make black", help = "Format with black"} -ruff = {cmd = "make ruff", help = "Lint with ruff"} -mypy = {cmd = "make mypy", help = "Lint with mypy"} -image = {cmd = "make image", help = "Build Docker image"} -up = {cmd = "make up", help = "Start Compose stack"} -down = {cmd = "make down", help = "Stop Compose stack"} - [tool.black] line-length = 120 target-version = ['py312'] skip-string-normalization = true +extend-exclude = "demo_evm_transactions" [tool.ruff] line-length = 120 @@ -50,6 +39,11 @@ isort = { force-single-line = true} python_version = "3.12" plugins = ["pydantic.mypy"] strict = false +exclude = "demo_evm_transactions" + +[[tool.mypy.overrides]] +module = "ruamel" +ignore_missing_imports = true [build-system] requires = ["pdm-backend"] diff --git a/src/demo_evm_transactions/types/eth_usdt/evm_transactions/transfer.py b/src/demo_evm_transactions/types/eth_usdt/evm_transactions/transfer.py index b680f3296..f1bb36e15 100644 --- a/src/demo_evm_transactions/types/eth_usdt/evm_transactions/transfer.py +++ b/src/demo_evm_transactions/types/eth_usdt/evm_transactions/transfer.py @@ -1,4 +1,4 @@ -# generated by DipDup 8.0.0b5 +# generated by DipDup 8.0.0 from __future__ import annotations diff --git a/src/demo_evm_uniswap/Makefile b/src/demo_evm_uniswap/Makefile index 0abd9cab8..47a23e586 100644 --- a/src/demo_evm_uniswap/Makefile +++ b/src/demo_evm_uniswap/Makefile @@ -13,6 +13,15 @@ help: ## Show this help (default) all: ## Run an entire CI pipeline make format lint +## + +install: ## Install dependencies + pdm install + +update: ## Update dependencies + pdm update + dipdup self update -q + format: ## Format with all tools make black @@ -28,7 +37,7 @@ ruff: ## Lint with ruff ruff check --fix . mypy: ## Lint with mypy - mypy --no-incremental --exclude ${PACKAGE} . + mypy . ## diff --git a/src/demo_evm_uniswap/deploy/compose.sqlite.yaml b/src/demo_evm_uniswap/deploy/compose.sqlite.yaml index 3b9e8fe75..9cae453a1 100644 --- a/src/demo_evm_uniswap/deploy/compose.sqlite.yaml +++ b/src/demo_evm_uniswap/deploy/compose.sqlite.yaml @@ -1,4 +1,3 @@ -version: "3.8" name: demo_evm_uniswap services: diff --git a/src/demo_evm_uniswap/deploy/compose.swarm.yaml b/src/demo_evm_uniswap/deploy/compose.swarm.yaml index 7239bd8c3..99a27f92c 100644 --- a/src/demo_evm_uniswap/deploy/compose.swarm.yaml +++ b/src/demo_evm_uniswap/deploy/compose.swarm.yaml @@ -1,4 +1,3 @@ -version: "3.8" name: demo_evm_uniswap services: @@ -38,7 +37,7 @@ services: - POSTGRES_DB=dipdup - POSTGRES_PASSWORD=${POSTGRES_PASSWORD} healthcheck: - test: ["CMD-SHELL", "pg_isready -U postgres"] + test: ["CMD-SHELL", "pg_isready -U dipdup"] interval: 10s timeout: 5s retries: 5 diff --git a/src/demo_evm_uniswap/deploy/compose.yaml b/src/demo_evm_uniswap/deploy/compose.yaml index 5ff082540..30ad0d24b 100644 --- a/src/demo_evm_uniswap/deploy/compose.yaml +++ b/src/demo_evm_uniswap/deploy/compose.yaml @@ -1,4 +1,3 @@ -version: "3.8" name: demo_evm_uniswap services: @@ -19,7 +18,7 @@ services: db: image: timescale/timescaledb-ha:pg15 ports: - - 5432 + - "${POSTGRES_HOST_PORT:-5432}:5432" volumes: - db:/home/postgres/pgdata/data restart: always @@ -37,7 +36,7 @@ services: hasura: image: hasura/graphql-engine:latest ports: - - 8080 + - "${HASURA_HOST_PORT:-8080}:8080" depends_on: - db restart: always diff --git a/src/demo_evm_uniswap/pyproject.toml b/src/demo_evm_uniswap/pyproject.toml index 027bbf2a5..226647777 100644 --- a/src/demo_evm_uniswap/pyproject.toml +++ b/src/demo_evm_uniswap/pyproject.toml @@ -1,4 +1,4 @@ -# generated by DipDup 8.0.0b5 +# generated by DipDup 8.0.0 [project] name = "demo_evm_uniswap" version = "0.0.1" @@ -20,22 +20,11 @@ dev = [ "mypy", ] -[tool.pdm.scripts] -help = {cmd = "make help", help = "Show this help (default)"} -all = {cmd = "make all", help = "Run an entire CI pipeline"} -format = {cmd = "make format", help = "Format with all tools"} -lint = {cmd = "make lint", help = "Lint with all tools"} -black = {cmd = "make black", help = "Format with black"} -ruff = {cmd = "make ruff", help = "Lint with ruff"} -mypy = {cmd = "make mypy", help = "Lint with mypy"} -image = {cmd = "make image", help = "Build Docker image"} -up = {cmd = "make up", help = "Start Compose stack"} -down = {cmd = "make down", help = "Stop Compose stack"} - [tool.black] line-length = 120 target-version = ['py312'] skip-string-normalization = true +extend-exclude = "demo_evm_uniswap" [tool.ruff] line-length = 120 @@ -50,6 +39,11 @@ isort = { force-single-line = true} python_version = "3.12" plugins = ["pydantic.mypy"] strict = false +exclude = "demo_evm_uniswap" + +[[tool.mypy.overrides]] +module = "ruamel" +ignore_missing_imports = true [build-system] requires = ["pdm-backend"] diff --git a/src/demo_evm_uniswap/types/factory/evm_events/pool_created.py b/src/demo_evm_uniswap/types/factory/evm_events/pool_created.py index 75a16689c..dcdbdd40b 100644 --- a/src/demo_evm_uniswap/types/factory/evm_events/pool_created.py +++ b/src/demo_evm_uniswap/types/factory/evm_events/pool_created.py @@ -1,4 +1,4 @@ -# generated by DipDup 8.0.0b5 +# generated by DipDup 8.0.0 from __future__ import annotations diff --git a/src/demo_evm_uniswap/types/pool/evm_events/burn.py b/src/demo_evm_uniswap/types/pool/evm_events/burn.py index 2bc1f7fa6..a34fb42b6 100644 --- a/src/demo_evm_uniswap/types/pool/evm_events/burn.py +++ b/src/demo_evm_uniswap/types/pool/evm_events/burn.py @@ -1,4 +1,4 @@ -# generated by DipDup 8.0.0b5 +# generated by DipDup 8.0.0 from __future__ import annotations diff --git a/src/demo_evm_uniswap/types/pool/evm_events/collect.py b/src/demo_evm_uniswap/types/pool/evm_events/collect.py index c86831c6f..ba4ee114b 100644 --- a/src/demo_evm_uniswap/types/pool/evm_events/collect.py +++ b/src/demo_evm_uniswap/types/pool/evm_events/collect.py @@ -1,4 +1,4 @@ -# generated by DipDup 8.0.0b5 +# generated by DipDup 8.0.0 from __future__ import annotations diff --git a/src/demo_evm_uniswap/types/pool/evm_events/flash.py b/src/demo_evm_uniswap/types/pool/evm_events/flash.py index c81888eb4..c037f3d4d 100644 --- a/src/demo_evm_uniswap/types/pool/evm_events/flash.py +++ b/src/demo_evm_uniswap/types/pool/evm_events/flash.py @@ -1,4 +1,4 @@ -# generated by DipDup 8.0.0b5 +# generated by DipDup 8.0.0 from __future__ import annotations diff --git a/src/demo_evm_uniswap/types/pool/evm_events/initialize.py b/src/demo_evm_uniswap/types/pool/evm_events/initialize.py index c752c5119..1624d90fb 100644 --- a/src/demo_evm_uniswap/types/pool/evm_events/initialize.py +++ b/src/demo_evm_uniswap/types/pool/evm_events/initialize.py @@ -1,4 +1,4 @@ -# generated by DipDup 8.0.0b5 +# generated by DipDup 8.0.0 from __future__ import annotations diff --git a/src/demo_evm_uniswap/types/pool/evm_events/mint.py b/src/demo_evm_uniswap/types/pool/evm_events/mint.py index 7e4efff50..4f54cc1cf 100644 --- a/src/demo_evm_uniswap/types/pool/evm_events/mint.py +++ b/src/demo_evm_uniswap/types/pool/evm_events/mint.py @@ -1,4 +1,4 @@ -# generated by DipDup 8.0.0b5 +# generated by DipDup 8.0.0 from __future__ import annotations diff --git a/src/demo_evm_uniswap/types/pool/evm_events/swap.py b/src/demo_evm_uniswap/types/pool/evm_events/swap.py index 86a8f627c..6b56f59be 100644 --- a/src/demo_evm_uniswap/types/pool/evm_events/swap.py +++ b/src/demo_evm_uniswap/types/pool/evm_events/swap.py @@ -1,4 +1,4 @@ -# generated by DipDup 8.0.0b5 +# generated by DipDup 8.0.0 from __future__ import annotations diff --git a/src/demo_evm_uniswap/types/position_manager/evm_events/collect.py b/src/demo_evm_uniswap/types/position_manager/evm_events/collect.py index ee0aa469f..cc1c101c1 100644 --- a/src/demo_evm_uniswap/types/position_manager/evm_events/collect.py +++ b/src/demo_evm_uniswap/types/position_manager/evm_events/collect.py @@ -1,4 +1,4 @@ -# generated by DipDup 8.0.0b5 +# generated by DipDup 8.0.0 from __future__ import annotations diff --git a/src/demo_evm_uniswap/types/position_manager/evm_events/decrease_liquidity.py b/src/demo_evm_uniswap/types/position_manager/evm_events/decrease_liquidity.py index 43202de92..5887cdadf 100644 --- a/src/demo_evm_uniswap/types/position_manager/evm_events/decrease_liquidity.py +++ b/src/demo_evm_uniswap/types/position_manager/evm_events/decrease_liquidity.py @@ -1,4 +1,4 @@ -# generated by DipDup 8.0.0b5 +# generated by DipDup 8.0.0 from __future__ import annotations diff --git a/src/demo_evm_uniswap/types/position_manager/evm_events/increase_liquidity.py b/src/demo_evm_uniswap/types/position_manager/evm_events/increase_liquidity.py index 6454d262d..551004ffc 100644 --- a/src/demo_evm_uniswap/types/position_manager/evm_events/increase_liquidity.py +++ b/src/demo_evm_uniswap/types/position_manager/evm_events/increase_liquidity.py @@ -1,4 +1,4 @@ -# generated by DipDup 8.0.0b5 +# generated by DipDup 8.0.0 from __future__ import annotations diff --git a/src/demo_evm_uniswap/types/position_manager/evm_events/transfer.py b/src/demo_evm_uniswap/types/position_manager/evm_events/transfer.py index 985054330..965449e2c 100644 --- a/src/demo_evm_uniswap/types/position_manager/evm_events/transfer.py +++ b/src/demo_evm_uniswap/types/position_manager/evm_events/transfer.py @@ -1,4 +1,4 @@ -# generated by DipDup 8.0.0b5 +# generated by DipDup 8.0.0 from __future__ import annotations diff --git a/src/demo_starknet_events/Makefile b/src/demo_starknet_events/Makefile index 4a99f03c5..36be0402a 100644 --- a/src/demo_starknet_events/Makefile +++ b/src/demo_starknet_events/Makefile @@ -13,6 +13,15 @@ help: ## Show this help (default) all: ## Run an entire CI pipeline make format lint +## + +install: ## Install dependencies + pdm install + +update: ## Update dependencies + pdm update + dipdup self update -q + format: ## Format with all tools make black @@ -28,7 +37,7 @@ ruff: ## Lint with ruff ruff check --fix . mypy: ## Lint with mypy - mypy --no-incremental --exclude ${PACKAGE} . + mypy . ## diff --git a/src/demo_starknet_events/deploy/compose.sqlite.yaml b/src/demo_starknet_events/deploy/compose.sqlite.yaml index ed33d4d06..0711af314 100644 --- a/src/demo_starknet_events/deploy/compose.sqlite.yaml +++ b/src/demo_starknet_events/deploy/compose.sqlite.yaml @@ -1,4 +1,3 @@ -version: "3.8" name: demo_starknet_events services: diff --git a/src/demo_starknet_events/deploy/compose.swarm.yaml b/src/demo_starknet_events/deploy/compose.swarm.yaml index 8bc9b5cb6..96befe249 100644 --- a/src/demo_starknet_events/deploy/compose.swarm.yaml +++ b/src/demo_starknet_events/deploy/compose.swarm.yaml @@ -1,4 +1,3 @@ -version: "3.8" name: demo_starknet_events services: @@ -38,7 +37,7 @@ services: - POSTGRES_DB=dipdup - POSTGRES_PASSWORD=${POSTGRES_PASSWORD} healthcheck: - test: ["CMD-SHELL", "pg_isready -U postgres"] + test: ["CMD-SHELL", "pg_isready -U dipdup"] interval: 10s timeout: 5s retries: 5 diff --git a/src/demo_starknet_events/deploy/compose.yaml b/src/demo_starknet_events/deploy/compose.yaml index 648f0040a..f7b9107a5 100644 --- a/src/demo_starknet_events/deploy/compose.yaml +++ b/src/demo_starknet_events/deploy/compose.yaml @@ -1,4 +1,3 @@ -version: "3.8" name: demo_starknet_events services: @@ -19,7 +18,7 @@ services: db: image: postgres:15 ports: - - 5432 + - "${POSTGRES_HOST_PORT:-5432}:5432" volumes: - db:/var/lib/postgresql/data restart: always @@ -37,7 +36,7 @@ services: hasura: image: hasura/graphql-engine:latest ports: - - 8080 + - "${HASURA_HOST_PORT:-8080}:8080" depends_on: - db restart: always diff --git a/src/demo_starknet_events/pyproject.toml b/src/demo_starknet_events/pyproject.toml index 3c5e71320..48e47f988 100644 --- a/src/demo_starknet_events/pyproject.toml +++ b/src/demo_starknet_events/pyproject.toml @@ -1,4 +1,4 @@ -# generated by DipDup 8.0.0b5 +# generated by DipDup 8.0.0 [project] name = "demo_starknet_events" version = "0.0.1" @@ -20,22 +20,11 @@ dev = [ "mypy", ] -[tool.pdm.scripts] -help = {cmd = "make help", help = "Show this help (default)"} -all = {cmd = "make all", help = "Run an entire CI pipeline"} -format = {cmd = "make format", help = "Format with all tools"} -lint = {cmd = "make lint", help = "Lint with all tools"} -black = {cmd = "make black", help = "Format with black"} -ruff = {cmd = "make ruff", help = "Lint with ruff"} -mypy = {cmd = "make mypy", help = "Lint with mypy"} -image = {cmd = "make image", help = "Build Docker image"} -up = {cmd = "make up", help = "Start Compose stack"} -down = {cmd = "make down", help = "Stop Compose stack"} - [tool.black] line-length = 120 target-version = ['py312'] skip-string-normalization = true +extend-exclude = "demo_starknet_events" [tool.ruff] line-length = 120 @@ -50,6 +39,11 @@ isort = { force-single-line = true} python_version = "3.12" plugins = ["pydantic.mypy"] strict = false +exclude = "demo_starknet_events" + +[[tool.mypy.overrides]] +module = "ruamel" +ignore_missing_imports = true [build-system] requires = ["pdm-backend"] diff --git a/src/demo_starknet_events/types/stark_usdt/starknet_events/transfer.py b/src/demo_starknet_events/types/stark_usdt/starknet_events/transfer.py index db5707d59..0f59827ef 100644 --- a/src/demo_starknet_events/types/stark_usdt/starknet_events/transfer.py +++ b/src/demo_starknet_events/types/stark_usdt/starknet_events/transfer.py @@ -1,4 +1,4 @@ -# generated by DipDup 8.0.0b5 +# generated by DipDup 8.0.0 from __future__ import annotations diff --git a/src/demo_tezos_auction/Makefile b/src/demo_tezos_auction/Makefile index 24889ed90..225b46de1 100644 --- a/src/demo_tezos_auction/Makefile +++ b/src/demo_tezos_auction/Makefile @@ -13,6 +13,15 @@ help: ## Show this help (default) all: ## Run an entire CI pipeline make format lint +## + +install: ## Install dependencies + pdm install + +update: ## Update dependencies + pdm update + dipdup self update -q + format: ## Format with all tools make black @@ -28,7 +37,7 @@ ruff: ## Lint with ruff ruff check --fix . mypy: ## Lint with mypy - mypy --no-incremental --exclude ${PACKAGE} . + mypy . ## diff --git a/src/demo_tezos_auction/deploy/compose.sqlite.yaml b/src/demo_tezos_auction/deploy/compose.sqlite.yaml index 29d2c4320..5f14ef88c 100644 --- a/src/demo_tezos_auction/deploy/compose.sqlite.yaml +++ b/src/demo_tezos_auction/deploy/compose.sqlite.yaml @@ -1,4 +1,3 @@ -version: "3.8" name: demo_tezos_auction services: diff --git a/src/demo_tezos_auction/deploy/compose.swarm.yaml b/src/demo_tezos_auction/deploy/compose.swarm.yaml index c2d9046e0..f610a0859 100644 --- a/src/demo_tezos_auction/deploy/compose.swarm.yaml +++ b/src/demo_tezos_auction/deploy/compose.swarm.yaml @@ -1,4 +1,3 @@ -version: "3.8" name: demo_tezos_auction services: @@ -38,7 +37,7 @@ services: - POSTGRES_DB=dipdup - POSTGRES_PASSWORD=${POSTGRES_PASSWORD} healthcheck: - test: ["CMD-SHELL", "pg_isready -U postgres"] + test: ["CMD-SHELL", "pg_isready -U dipdup"] interval: 10s timeout: 5s retries: 5 diff --git a/src/demo_tezos_auction/deploy/compose.yaml b/src/demo_tezos_auction/deploy/compose.yaml index bcc4562c8..d0bb18e27 100644 --- a/src/demo_tezos_auction/deploy/compose.yaml +++ b/src/demo_tezos_auction/deploy/compose.yaml @@ -1,4 +1,3 @@ -version: "3.8" name: demo_tezos_auction services: @@ -19,7 +18,7 @@ services: db: image: postgres:15 ports: - - 5432 + - "${POSTGRES_HOST_PORT:-5432}:5432" volumes: - db:/var/lib/postgresql/data restart: always @@ -37,7 +36,7 @@ services: hasura: image: hasura/graphql-engine:latest ports: - - 8080 + - "${HASURA_HOST_PORT:-8080}:8080" depends_on: - db restart: always diff --git a/src/demo_tezos_auction/pyproject.toml b/src/demo_tezos_auction/pyproject.toml index 397f3427e..3176cbd12 100644 --- a/src/demo_tezos_auction/pyproject.toml +++ b/src/demo_tezos_auction/pyproject.toml @@ -1,4 +1,4 @@ -# generated by DipDup 8.0.0b5 +# generated by DipDup 8.0.0 [project] name = "demo_tezos_auction" version = "0.0.1" @@ -20,22 +20,11 @@ dev = [ "mypy", ] -[tool.pdm.scripts] -help = {cmd = "make help", help = "Show this help (default)"} -all = {cmd = "make all", help = "Run an entire CI pipeline"} -format = {cmd = "make format", help = "Format with all tools"} -lint = {cmd = "make lint", help = "Lint with all tools"} -black = {cmd = "make black", help = "Format with black"} -ruff = {cmd = "make ruff", help = "Lint with ruff"} -mypy = {cmd = "make mypy", help = "Lint with mypy"} -image = {cmd = "make image", help = "Build Docker image"} -up = {cmd = "make up", help = "Start Compose stack"} -down = {cmd = "make down", help = "Stop Compose stack"} - [tool.black] line-length = 120 target-version = ['py312'] skip-string-normalization = true +extend-exclude = "demo_tezos_auction" [tool.ruff] line-length = 120 @@ -50,6 +39,11 @@ isort = { force-single-line = true} python_version = "3.12" plugins = ["pydantic.mypy"] strict = false +exclude = "demo_tezos_auction" + +[[tool.mypy.overrides]] +module = "ruamel" +ignore_missing_imports = true [build-system] requires = ["pdm-backend"] diff --git a/src/demo_tezos_auction/types/tzcolors_auction/tezos_parameters/bid.py b/src/demo_tezos_auction/types/tzcolors_auction/tezos_parameters/bid.py index 03ab35e69..913b0b0b5 100644 --- a/src/demo_tezos_auction/types/tzcolors_auction/tezos_parameters/bid.py +++ b/src/demo_tezos_auction/types/tzcolors_auction/tezos_parameters/bid.py @@ -1,4 +1,4 @@ -# generated by DipDup 8.0.0b5 +# generated by DipDup 8.0.0 from __future__ import annotations diff --git a/src/demo_tezos_auction/types/tzcolors_auction/tezos_parameters/create_auction.py b/src/demo_tezos_auction/types/tzcolors_auction/tezos_parameters/create_auction.py index 47b958ff0..c59690d43 100644 --- a/src/demo_tezos_auction/types/tzcolors_auction/tezos_parameters/create_auction.py +++ b/src/demo_tezos_auction/types/tzcolors_auction/tezos_parameters/create_auction.py @@ -1,4 +1,4 @@ -# generated by DipDup 8.0.0b5 +# generated by DipDup 8.0.0 from __future__ import annotations diff --git a/src/demo_tezos_auction/types/tzcolors_auction/tezos_parameters/withdraw.py b/src/demo_tezos_auction/types/tzcolors_auction/tezos_parameters/withdraw.py index ab50f051a..b28457c23 100644 --- a/src/demo_tezos_auction/types/tzcolors_auction/tezos_parameters/withdraw.py +++ b/src/demo_tezos_auction/types/tzcolors_auction/tezos_parameters/withdraw.py @@ -1,4 +1,4 @@ -# generated by DipDup 8.0.0b5 +# generated by DipDup 8.0.0 from __future__ import annotations diff --git a/src/demo_tezos_auction/types/tzcolors_auction/tezos_storage.py b/src/demo_tezos_auction/types/tzcolors_auction/tezos_storage.py index a12579e12..82e34659c 100644 --- a/src/demo_tezos_auction/types/tzcolors_auction/tezos_storage.py +++ b/src/demo_tezos_auction/types/tzcolors_auction/tezos_storage.py @@ -1,4 +1,4 @@ -# generated by DipDup 8.0.0b5 +# generated by DipDup 8.0.0 from __future__ import annotations diff --git a/src/demo_tezos_big_maps/Makefile b/src/demo_tezos_big_maps/Makefile index 0b7a204da..d0afb9e5e 100644 --- a/src/demo_tezos_big_maps/Makefile +++ b/src/demo_tezos_big_maps/Makefile @@ -13,6 +13,15 @@ help: ## Show this help (default) all: ## Run an entire CI pipeline make format lint +## + +install: ## Install dependencies + pdm install + +update: ## Update dependencies + pdm update + dipdup self update -q + format: ## Format with all tools make black @@ -28,7 +37,7 @@ ruff: ## Lint with ruff ruff check --fix . mypy: ## Lint with mypy - mypy --no-incremental --exclude ${PACKAGE} . + mypy . ## diff --git a/src/demo_tezos_big_maps/deploy/compose.sqlite.yaml b/src/demo_tezos_big_maps/deploy/compose.sqlite.yaml index f41c49631..162deba88 100644 --- a/src/demo_tezos_big_maps/deploy/compose.sqlite.yaml +++ b/src/demo_tezos_big_maps/deploy/compose.sqlite.yaml @@ -1,4 +1,3 @@ -version: "3.8" name: demo_tezos_big_maps services: diff --git a/src/demo_tezos_big_maps/deploy/compose.swarm.yaml b/src/demo_tezos_big_maps/deploy/compose.swarm.yaml index 0853a14de..817734f3a 100644 --- a/src/demo_tezos_big_maps/deploy/compose.swarm.yaml +++ b/src/demo_tezos_big_maps/deploy/compose.swarm.yaml @@ -1,4 +1,3 @@ -version: "3.8" name: demo_tezos_big_maps services: @@ -38,7 +37,7 @@ services: - POSTGRES_DB=dipdup - POSTGRES_PASSWORD=${POSTGRES_PASSWORD} healthcheck: - test: ["CMD-SHELL", "pg_isready -U postgres"] + test: ["CMD-SHELL", "pg_isready -U dipdup"] interval: 10s timeout: 5s retries: 5 diff --git a/src/demo_tezos_big_maps/deploy/compose.yaml b/src/demo_tezos_big_maps/deploy/compose.yaml index 5fa187f94..96dfa394a 100644 --- a/src/demo_tezos_big_maps/deploy/compose.yaml +++ b/src/demo_tezos_big_maps/deploy/compose.yaml @@ -1,4 +1,3 @@ -version: "3.8" name: demo_tezos_big_maps services: @@ -19,7 +18,7 @@ services: db: image: postgres:15 ports: - - 5432 + - "${POSTGRES_HOST_PORT:-5432}:5432" volumes: - db:/var/lib/postgresql/data restart: always @@ -37,7 +36,7 @@ services: hasura: image: hasura/graphql-engine:latest ports: - - 8080 + - "${HASURA_HOST_PORT:-8080}:8080" depends_on: - db restart: always diff --git a/src/demo_tezos_big_maps/handlers/on_update_records.py b/src/demo_tezos_big_maps/handlers/on_update_records.py index f259a81fe..707522a37 100644 --- a/src/demo_tezos_big_maps/handlers/on_update_records.py +++ b/src/demo_tezos_big_maps/handlers/on_update_records.py @@ -16,7 +16,7 @@ async def on_update_records( record_name = bytes.fromhex(store_records.key.root).decode() record_path = record_name.split('.') - ctx.logger.info('Processing `%s`', record_name) + ctx.logger.debug('Processing `%s`', record_name) level = store_records.value.level if len(record_path) != int(level): diff --git a/src/demo_tezos_big_maps/pyproject.toml b/src/demo_tezos_big_maps/pyproject.toml index 0aee872de..833df370b 100644 --- a/src/demo_tezos_big_maps/pyproject.toml +++ b/src/demo_tezos_big_maps/pyproject.toml @@ -1,4 +1,4 @@ -# generated by DipDup 8.0.0b5 +# generated by DipDup 8.0.0 [project] name = "demo_tezos_big_maps" version = "0.0.1" @@ -20,22 +20,11 @@ dev = [ "mypy", ] -[tool.pdm.scripts] -help = {cmd = "make help", help = "Show this help (default)"} -all = {cmd = "make all", help = "Run an entire CI pipeline"} -format = {cmd = "make format", help = "Format with all tools"} -lint = {cmd = "make lint", help = "Lint with all tools"} -black = {cmd = "make black", help = "Format with black"} -ruff = {cmd = "make ruff", help = "Lint with ruff"} -mypy = {cmd = "make mypy", help = "Lint with mypy"} -image = {cmd = "make image", help = "Build Docker image"} -up = {cmd = "make up", help = "Start Compose stack"} -down = {cmd = "make down", help = "Stop Compose stack"} - [tool.black] line-length = 120 target-version = ['py312'] skip-string-normalization = true +extend-exclude = "demo_tezos_big_maps" [tool.ruff] line-length = 120 @@ -50,6 +39,11 @@ isort = { force-single-line = true} python_version = "3.12" plugins = ["pydantic.mypy"] strict = false +exclude = "demo_tezos_big_maps" + +[[tool.mypy.overrides]] +module = "ruamel" +ignore_missing_imports = true [build-system] requires = ["pdm-backend"] diff --git a/src/demo_tezos_big_maps/types/name_registry/tezos_big_maps/store_expiry_map_key.py b/src/demo_tezos_big_maps/types/name_registry/tezos_big_maps/store_expiry_map_key.py index b4077a6fa..2aaec3f55 100644 --- a/src/demo_tezos_big_maps/types/name_registry/tezos_big_maps/store_expiry_map_key.py +++ b/src/demo_tezos_big_maps/types/name_registry/tezos_big_maps/store_expiry_map_key.py @@ -1,4 +1,4 @@ -# generated by DipDup 8.0.0b5 +# generated by DipDup 8.0.0 from __future__ import annotations diff --git a/src/demo_tezos_big_maps/types/name_registry/tezos_big_maps/store_expiry_map_value.py b/src/demo_tezos_big_maps/types/name_registry/tezos_big_maps/store_expiry_map_value.py index cfa0e7bab..b5509791f 100644 --- a/src/demo_tezos_big_maps/types/name_registry/tezos_big_maps/store_expiry_map_value.py +++ b/src/demo_tezos_big_maps/types/name_registry/tezos_big_maps/store_expiry_map_value.py @@ -1,4 +1,4 @@ -# generated by DipDup 8.0.0b5 +# generated by DipDup 8.0.0 from __future__ import annotations diff --git a/src/demo_tezos_big_maps/types/name_registry/tezos_big_maps/store_records_key.py b/src/demo_tezos_big_maps/types/name_registry/tezos_big_maps/store_records_key.py index 6789a85c8..f91fa91ec 100644 --- a/src/demo_tezos_big_maps/types/name_registry/tezos_big_maps/store_records_key.py +++ b/src/demo_tezos_big_maps/types/name_registry/tezos_big_maps/store_records_key.py @@ -1,4 +1,4 @@ -# generated by DipDup 8.0.0b5 +# generated by DipDup 8.0.0 from __future__ import annotations diff --git a/src/demo_tezos_big_maps/types/name_registry/tezos_big_maps/store_records_value.py b/src/demo_tezos_big_maps/types/name_registry/tezos_big_maps/store_records_value.py index f5aec2ebc..4f853abad 100644 --- a/src/demo_tezos_big_maps/types/name_registry/tezos_big_maps/store_records_value.py +++ b/src/demo_tezos_big_maps/types/name_registry/tezos_big_maps/store_records_value.py @@ -1,4 +1,4 @@ -# generated by DipDup 8.0.0b5 +# generated by DipDup 8.0.0 from __future__ import annotations diff --git a/src/demo_tezos_dao/Makefile b/src/demo_tezos_dao/Makefile index b2cac6949..91437d585 100644 --- a/src/demo_tezos_dao/Makefile +++ b/src/demo_tezos_dao/Makefile @@ -13,6 +13,15 @@ help: ## Show this help (default) all: ## Run an entire CI pipeline make format lint +## + +install: ## Install dependencies + pdm install + +update: ## Update dependencies + pdm update + dipdup self update -q + format: ## Format with all tools make black @@ -28,7 +37,7 @@ ruff: ## Lint with ruff ruff check --fix . mypy: ## Lint with mypy - mypy --no-incremental --exclude ${PACKAGE} . + mypy . ## diff --git a/src/demo_tezos_dao/deploy/compose.sqlite.yaml b/src/demo_tezos_dao/deploy/compose.sqlite.yaml index 1167e7da5..5eef6451c 100644 --- a/src/demo_tezos_dao/deploy/compose.sqlite.yaml +++ b/src/demo_tezos_dao/deploy/compose.sqlite.yaml @@ -1,4 +1,3 @@ -version: "3.8" name: demo_tezos_dao services: diff --git a/src/demo_tezos_dao/deploy/compose.swarm.yaml b/src/demo_tezos_dao/deploy/compose.swarm.yaml index 427064d1e..4432c6c79 100644 --- a/src/demo_tezos_dao/deploy/compose.swarm.yaml +++ b/src/demo_tezos_dao/deploy/compose.swarm.yaml @@ -1,4 +1,3 @@ -version: "3.8" name: demo_tezos_dao services: @@ -38,7 +37,7 @@ services: - POSTGRES_DB=dipdup - POSTGRES_PASSWORD=${POSTGRES_PASSWORD} healthcheck: - test: ["CMD-SHELL", "pg_isready -U postgres"] + test: ["CMD-SHELL", "pg_isready -U dipdup"] interval: 10s timeout: 5s retries: 5 diff --git a/src/demo_tezos_dao/deploy/compose.yaml b/src/demo_tezos_dao/deploy/compose.yaml index d6ee7270f..e147d292c 100644 --- a/src/demo_tezos_dao/deploy/compose.yaml +++ b/src/demo_tezos_dao/deploy/compose.yaml @@ -1,4 +1,3 @@ -version: "3.8" name: demo_tezos_dao services: @@ -19,7 +18,7 @@ services: db: image: postgres:15 ports: - - 5432 + - "${POSTGRES_HOST_PORT:-5432}:5432" volumes: - db:/var/lib/postgresql/data restart: always @@ -37,7 +36,7 @@ services: hasura: image: hasura/graphql-engine:latest ports: - - 8080 + - "${HASURA_HOST_PORT:-8080}:8080" depends_on: - db restart: always diff --git a/src/demo_tezos_dao/pyproject.toml b/src/demo_tezos_dao/pyproject.toml index 3672d606a..92fec878f 100644 --- a/src/demo_tezos_dao/pyproject.toml +++ b/src/demo_tezos_dao/pyproject.toml @@ -1,4 +1,4 @@ -# generated by DipDup 8.0.0b5 +# generated by DipDup 8.0.0 [project] name = "demo_tezos_dao" version = "0.0.1" @@ -20,22 +20,11 @@ dev = [ "mypy", ] -[tool.pdm.scripts] -help = {cmd = "make help", help = "Show this help (default)"} -all = {cmd = "make all", help = "Run an entire CI pipeline"} -format = {cmd = "make format", help = "Format with all tools"} -lint = {cmd = "make lint", help = "Lint with all tools"} -black = {cmd = "make black", help = "Format with black"} -ruff = {cmd = "make ruff", help = "Lint with ruff"} -mypy = {cmd = "make mypy", help = "Lint with mypy"} -image = {cmd = "make image", help = "Build Docker image"} -up = {cmd = "make up", help = "Start Compose stack"} -down = {cmd = "make down", help = "Stop Compose stack"} - [tool.black] line-length = 120 target-version = ['py312'] skip-string-normalization = true +extend-exclude = "demo_tezos_dao" [tool.ruff] line-length = 120 @@ -50,6 +39,11 @@ isort = { force-single-line = true} python_version = "3.12" plugins = ["pydantic.mypy"] strict = false +exclude = "demo_tezos_dao" + +[[tool.mypy.overrides]] +module = "ruamel" +ignore_missing_imports = true [build-system] requires = ["pdm-backend"] diff --git a/src/demo_tezos_dao/types/registry/tezos_parameters/propose.py b/src/demo_tezos_dao/types/registry/tezos_parameters/propose.py index 1a1813431..f0248c0ca 100644 --- a/src/demo_tezos_dao/types/registry/tezos_parameters/propose.py +++ b/src/demo_tezos_dao/types/registry/tezos_parameters/propose.py @@ -1,4 +1,4 @@ -# generated by DipDup 8.0.0b5 +# generated by DipDup 8.0.0 from __future__ import annotations diff --git a/src/demo_tezos_dao/types/registry/tezos_storage.py b/src/demo_tezos_dao/types/registry/tezos_storage.py index edc1bcf5a..9b45c24cf 100644 --- a/src/demo_tezos_dao/types/registry/tezos_storage.py +++ b/src/demo_tezos_dao/types/registry/tezos_storage.py @@ -1,4 +1,4 @@ -# generated by DipDup 8.0.0b5 +# generated by DipDup 8.0.0 from __future__ import annotations diff --git a/src/demo_tezos_dex/Makefile b/src/demo_tezos_dex/Makefile index a1880393c..ea08d0868 100644 --- a/src/demo_tezos_dex/Makefile +++ b/src/demo_tezos_dex/Makefile @@ -13,6 +13,15 @@ help: ## Show this help (default) all: ## Run an entire CI pipeline make format lint +## + +install: ## Install dependencies + pdm install + +update: ## Update dependencies + pdm update + dipdup self update -q + format: ## Format with all tools make black @@ -28,7 +37,7 @@ ruff: ## Lint with ruff ruff check --fix . mypy: ## Lint with mypy - mypy --no-incremental --exclude ${PACKAGE} . + mypy . ## diff --git a/src/demo_tezos_dex/deploy/compose.sqlite.yaml b/src/demo_tezos_dex/deploy/compose.sqlite.yaml index 976b690ad..45de223bd 100644 --- a/src/demo_tezos_dex/deploy/compose.sqlite.yaml +++ b/src/demo_tezos_dex/deploy/compose.sqlite.yaml @@ -1,4 +1,3 @@ -version: "3.8" name: demo_tezos_dex services: diff --git a/src/demo_tezos_dex/deploy/compose.swarm.yaml b/src/demo_tezos_dex/deploy/compose.swarm.yaml index 416161492..2208b78b3 100644 --- a/src/demo_tezos_dex/deploy/compose.swarm.yaml +++ b/src/demo_tezos_dex/deploy/compose.swarm.yaml @@ -1,4 +1,3 @@ -version: "3.8" name: demo_tezos_dex services: @@ -38,7 +37,7 @@ services: - POSTGRES_DB=dipdup - POSTGRES_PASSWORD=${POSTGRES_PASSWORD} healthcheck: - test: ["CMD-SHELL", "pg_isready -U postgres"] + test: ["CMD-SHELL", "pg_isready -U dipdup"] interval: 10s timeout: 5s retries: 5 diff --git a/src/demo_tezos_dex/deploy/compose.yaml b/src/demo_tezos_dex/deploy/compose.yaml index 6dd154485..dc2262c6c 100644 --- a/src/demo_tezos_dex/deploy/compose.yaml +++ b/src/demo_tezos_dex/deploy/compose.yaml @@ -1,4 +1,3 @@ -version: "3.8" name: demo_tezos_dex services: @@ -19,7 +18,7 @@ services: db: image: postgres:15 ports: - - 5432 + - "${POSTGRES_HOST_PORT:-5432}:5432" volumes: - db:/var/lib/postgresql/data restart: always @@ -37,7 +36,7 @@ services: hasura: image: hasura/graphql-engine:latest ports: - - 8080 + - "${HASURA_HOST_PORT:-8080}:8080" depends_on: - db restart: always diff --git a/src/demo_tezos_dex/pyproject.toml b/src/demo_tezos_dex/pyproject.toml index bb52da211..04c49a2b4 100644 --- a/src/demo_tezos_dex/pyproject.toml +++ b/src/demo_tezos_dex/pyproject.toml @@ -1,4 +1,4 @@ -# generated by DipDup 8.0.0b5 +# generated by DipDup 8.0.0 [project] name = "demo_tezos_dex" version = "0.0.1" @@ -20,22 +20,11 @@ dev = [ "mypy", ] -[tool.pdm.scripts] -help = {cmd = "make help", help = "Show this help (default)"} -all = {cmd = "make all", help = "Run an entire CI pipeline"} -format = {cmd = "make format", help = "Format with all tools"} -lint = {cmd = "make lint", help = "Lint with all tools"} -black = {cmd = "make black", help = "Format with black"} -ruff = {cmd = "make ruff", help = "Lint with ruff"} -mypy = {cmd = "make mypy", help = "Lint with mypy"} -image = {cmd = "make image", help = "Build Docker image"} -up = {cmd = "make up", help = "Start Compose stack"} -down = {cmd = "make down", help = "Stop Compose stack"} - [tool.black] line-length = 120 target-version = ['py312'] skip-string-normalization = true +extend-exclude = "demo_tezos_dex" [tool.ruff] line-length = 120 @@ -50,6 +39,11 @@ isort = { force-single-line = true} python_version = "3.12" plugins = ["pydantic.mypy"] strict = false +exclude = "demo_tezos_dex" + +[[tool.mypy.overrides]] +module = "ruamel" +ignore_missing_imports = true [build-system] requires = ["pdm-backend"] diff --git a/src/demo_tezos_dex/types/fa12_token/tezos_parameters/transfer.py b/src/demo_tezos_dex/types/fa12_token/tezos_parameters/transfer.py index b1c6ed4e7..fc96d1a65 100644 --- a/src/demo_tezos_dex/types/fa12_token/tezos_parameters/transfer.py +++ b/src/demo_tezos_dex/types/fa12_token/tezos_parameters/transfer.py @@ -1,4 +1,4 @@ -# generated by DipDup 8.0.0b5 +# generated by DipDup 8.0.0 from __future__ import annotations diff --git a/src/demo_tezos_dex/types/fa12_token/tezos_storage.py b/src/demo_tezos_dex/types/fa12_token/tezos_storage.py index 5e9185c3b..4d304ab01 100644 --- a/src/demo_tezos_dex/types/fa12_token/tezos_storage.py +++ b/src/demo_tezos_dex/types/fa12_token/tezos_storage.py @@ -1,4 +1,4 @@ -# generated by DipDup 8.0.0b5 +# generated by DipDup 8.0.0 from __future__ import annotations diff --git a/src/demo_tezos_dex/types/fa2_token/tezos_parameters/transfer.py b/src/demo_tezos_dex/types/fa2_token/tezos_parameters/transfer.py index 03adbf083..7af7a6449 100644 --- a/src/demo_tezos_dex/types/fa2_token/tezos_parameters/transfer.py +++ b/src/demo_tezos_dex/types/fa2_token/tezos_parameters/transfer.py @@ -1,4 +1,4 @@ -# generated by DipDup 8.0.0b5 +# generated by DipDup 8.0.0 from __future__ import annotations diff --git a/src/demo_tezos_dex/types/fa2_token/tezos_storage.py b/src/demo_tezos_dex/types/fa2_token/tezos_storage.py index a74c735b3..7ea4ee7e6 100644 --- a/src/demo_tezos_dex/types/fa2_token/tezos_storage.py +++ b/src/demo_tezos_dex/types/fa2_token/tezos_storage.py @@ -1,4 +1,4 @@ -# generated by DipDup 8.0.0b5 +# generated by DipDup 8.0.0 from __future__ import annotations diff --git a/src/demo_tezos_dex/types/quipu_fa12/tezos_parameters/divest_liquidity.py b/src/demo_tezos_dex/types/quipu_fa12/tezos_parameters/divest_liquidity.py index 7639d9d89..5e6fd36df 100644 --- a/src/demo_tezos_dex/types/quipu_fa12/tezos_parameters/divest_liquidity.py +++ b/src/demo_tezos_dex/types/quipu_fa12/tezos_parameters/divest_liquidity.py @@ -1,4 +1,4 @@ -# generated by DipDup 8.0.0b5 +# generated by DipDup 8.0.0 from __future__ import annotations diff --git a/src/demo_tezos_dex/types/quipu_fa12/tezos_parameters/invest_liquidity.py b/src/demo_tezos_dex/types/quipu_fa12/tezos_parameters/invest_liquidity.py index 1b0e23066..c272829b6 100644 --- a/src/demo_tezos_dex/types/quipu_fa12/tezos_parameters/invest_liquidity.py +++ b/src/demo_tezos_dex/types/quipu_fa12/tezos_parameters/invest_liquidity.py @@ -1,4 +1,4 @@ -# generated by DipDup 8.0.0b5 +# generated by DipDup 8.0.0 from __future__ import annotations diff --git a/src/demo_tezos_dex/types/quipu_fa12/tezos_parameters/tez_to_token_payment.py b/src/demo_tezos_dex/types/quipu_fa12/tezos_parameters/tez_to_token_payment.py index 1b1b13888..221b68ecc 100644 --- a/src/demo_tezos_dex/types/quipu_fa12/tezos_parameters/tez_to_token_payment.py +++ b/src/demo_tezos_dex/types/quipu_fa12/tezos_parameters/tez_to_token_payment.py @@ -1,4 +1,4 @@ -# generated by DipDup 8.0.0b5 +# generated by DipDup 8.0.0 from __future__ import annotations diff --git a/src/demo_tezos_dex/types/quipu_fa12/tezos_parameters/token_to_tez_payment.py b/src/demo_tezos_dex/types/quipu_fa12/tezos_parameters/token_to_tez_payment.py index 878b586f1..9d1c155ad 100644 --- a/src/demo_tezos_dex/types/quipu_fa12/tezos_parameters/token_to_tez_payment.py +++ b/src/demo_tezos_dex/types/quipu_fa12/tezos_parameters/token_to_tez_payment.py @@ -1,4 +1,4 @@ -# generated by DipDup 8.0.0b5 +# generated by DipDup 8.0.0 from __future__ import annotations diff --git a/src/demo_tezos_dex/types/quipu_fa12/tezos_parameters/transfer.py b/src/demo_tezos_dex/types/quipu_fa12/tezos_parameters/transfer.py index b1c6ed4e7..fc96d1a65 100644 --- a/src/demo_tezos_dex/types/quipu_fa12/tezos_parameters/transfer.py +++ b/src/demo_tezos_dex/types/quipu_fa12/tezos_parameters/transfer.py @@ -1,4 +1,4 @@ -# generated by DipDup 8.0.0b5 +# generated by DipDup 8.0.0 from __future__ import annotations diff --git a/src/demo_tezos_dex/types/quipu_fa12/tezos_parameters/withdraw_profit.py b/src/demo_tezos_dex/types/quipu_fa12/tezos_parameters/withdraw_profit.py index 6cc88f67b..649b75098 100644 --- a/src/demo_tezos_dex/types/quipu_fa12/tezos_parameters/withdraw_profit.py +++ b/src/demo_tezos_dex/types/quipu_fa12/tezos_parameters/withdraw_profit.py @@ -1,4 +1,4 @@ -# generated by DipDup 8.0.0b5 +# generated by DipDup 8.0.0 from __future__ import annotations diff --git a/src/demo_tezos_dex/types/quipu_fa12/tezos_storage.py b/src/demo_tezos_dex/types/quipu_fa12/tezos_storage.py index b9080b3be..ed602091d 100644 --- a/src/demo_tezos_dex/types/quipu_fa12/tezos_storage.py +++ b/src/demo_tezos_dex/types/quipu_fa12/tezos_storage.py @@ -1,4 +1,4 @@ -# generated by DipDup 8.0.0b5 +# generated by DipDup 8.0.0 from __future__ import annotations diff --git a/src/demo_tezos_dex/types/quipu_fa2/tezos_parameters/divest_liquidity.py b/src/demo_tezos_dex/types/quipu_fa2/tezos_parameters/divest_liquidity.py index 7639d9d89..5e6fd36df 100644 --- a/src/demo_tezos_dex/types/quipu_fa2/tezos_parameters/divest_liquidity.py +++ b/src/demo_tezos_dex/types/quipu_fa2/tezos_parameters/divest_liquidity.py @@ -1,4 +1,4 @@ -# generated by DipDup 8.0.0b5 +# generated by DipDup 8.0.0 from __future__ import annotations diff --git a/src/demo_tezos_dex/types/quipu_fa2/tezos_parameters/invest_liquidity.py b/src/demo_tezos_dex/types/quipu_fa2/tezos_parameters/invest_liquidity.py index 1b0e23066..c272829b6 100644 --- a/src/demo_tezos_dex/types/quipu_fa2/tezos_parameters/invest_liquidity.py +++ b/src/demo_tezos_dex/types/quipu_fa2/tezos_parameters/invest_liquidity.py @@ -1,4 +1,4 @@ -# generated by DipDup 8.0.0b5 +# generated by DipDup 8.0.0 from __future__ import annotations diff --git a/src/demo_tezos_dex/types/quipu_fa2/tezos_parameters/tez_to_token_payment.py b/src/demo_tezos_dex/types/quipu_fa2/tezos_parameters/tez_to_token_payment.py index 1b1b13888..221b68ecc 100644 --- a/src/demo_tezos_dex/types/quipu_fa2/tezos_parameters/tez_to_token_payment.py +++ b/src/demo_tezos_dex/types/quipu_fa2/tezos_parameters/tez_to_token_payment.py @@ -1,4 +1,4 @@ -# generated by DipDup 8.0.0b5 +# generated by DipDup 8.0.0 from __future__ import annotations diff --git a/src/demo_tezos_dex/types/quipu_fa2/tezos_parameters/token_to_tez_payment.py b/src/demo_tezos_dex/types/quipu_fa2/tezos_parameters/token_to_tez_payment.py index 878b586f1..9d1c155ad 100644 --- a/src/demo_tezos_dex/types/quipu_fa2/tezos_parameters/token_to_tez_payment.py +++ b/src/demo_tezos_dex/types/quipu_fa2/tezos_parameters/token_to_tez_payment.py @@ -1,4 +1,4 @@ -# generated by DipDup 8.0.0b5 +# generated by DipDup 8.0.0 from __future__ import annotations diff --git a/src/demo_tezos_dex/types/quipu_fa2/tezos_parameters/transfer.py b/src/demo_tezos_dex/types/quipu_fa2/tezos_parameters/transfer.py index 03adbf083..7af7a6449 100644 --- a/src/demo_tezos_dex/types/quipu_fa2/tezos_parameters/transfer.py +++ b/src/demo_tezos_dex/types/quipu_fa2/tezos_parameters/transfer.py @@ -1,4 +1,4 @@ -# generated by DipDup 8.0.0b5 +# generated by DipDup 8.0.0 from __future__ import annotations diff --git a/src/demo_tezos_dex/types/quipu_fa2/tezos_parameters/withdraw_profit.py b/src/demo_tezos_dex/types/quipu_fa2/tezos_parameters/withdraw_profit.py index 6cc88f67b..649b75098 100644 --- a/src/demo_tezos_dex/types/quipu_fa2/tezos_parameters/withdraw_profit.py +++ b/src/demo_tezos_dex/types/quipu_fa2/tezos_parameters/withdraw_profit.py @@ -1,4 +1,4 @@ -# generated by DipDup 8.0.0b5 +# generated by DipDup 8.0.0 from __future__ import annotations diff --git a/src/demo_tezos_dex/types/quipu_fa2/tezos_storage.py b/src/demo_tezos_dex/types/quipu_fa2/tezos_storage.py index 2945343d6..2e29f4a40 100644 --- a/src/demo_tezos_dex/types/quipu_fa2/tezos_storage.py +++ b/src/demo_tezos_dex/types/quipu_fa2/tezos_storage.py @@ -1,4 +1,4 @@ -# generated by DipDup 8.0.0b5 +# generated by DipDup 8.0.0 from __future__ import annotations diff --git a/src/demo_tezos_domains/Makefile b/src/demo_tezos_domains/Makefile index 035f7d7b2..a9fc034bd 100644 --- a/src/demo_tezos_domains/Makefile +++ b/src/demo_tezos_domains/Makefile @@ -13,6 +13,15 @@ help: ## Show this help (default) all: ## Run an entire CI pipeline make format lint +## + +install: ## Install dependencies + pdm install + +update: ## Update dependencies + pdm update + dipdup self update -q + format: ## Format with all tools make black @@ -28,7 +37,7 @@ ruff: ## Lint with ruff ruff check --fix . mypy: ## Lint with mypy - mypy --no-incremental --exclude ${PACKAGE} . + mypy . ## diff --git a/src/demo_tezos_domains/deploy/compose.sqlite.yaml b/src/demo_tezos_domains/deploy/compose.sqlite.yaml index 1df796b73..22519d470 100644 --- a/src/demo_tezos_domains/deploy/compose.sqlite.yaml +++ b/src/demo_tezos_domains/deploy/compose.sqlite.yaml @@ -1,4 +1,3 @@ -version: "3.8" name: demo_tezos_domains services: diff --git a/src/demo_tezos_domains/deploy/compose.swarm.yaml b/src/demo_tezos_domains/deploy/compose.swarm.yaml index 5204005bc..9a468f051 100644 --- a/src/demo_tezos_domains/deploy/compose.swarm.yaml +++ b/src/demo_tezos_domains/deploy/compose.swarm.yaml @@ -1,4 +1,3 @@ -version: "3.8" name: demo_tezos_domains services: @@ -38,7 +37,7 @@ services: - POSTGRES_DB=dipdup - POSTGRES_PASSWORD=${POSTGRES_PASSWORD} healthcheck: - test: ["CMD-SHELL", "pg_isready -U postgres"] + test: ["CMD-SHELL", "pg_isready -U dipdup"] interval: 10s timeout: 5s retries: 5 diff --git a/src/demo_tezos_domains/deploy/compose.yaml b/src/demo_tezos_domains/deploy/compose.yaml index d676bbf0c..12bac33c8 100644 --- a/src/demo_tezos_domains/deploy/compose.yaml +++ b/src/demo_tezos_domains/deploy/compose.yaml @@ -1,4 +1,3 @@ -version: "3.8" name: demo_tezos_domains services: @@ -19,7 +18,7 @@ services: db: image: postgres:15 ports: - - 5432 + - "${POSTGRES_HOST_PORT:-5432}:5432" volumes: - db:/var/lib/postgresql/data restart: always @@ -37,7 +36,7 @@ services: hasura: image: hasura/graphql-engine:latest ports: - - 8080 + - "${HASURA_HOST_PORT:-8080}:8080" depends_on: - db restart: always diff --git a/src/demo_tezos_domains/handlers/on_update_records.py b/src/demo_tezos_domains/handlers/on_update_records.py index fcd746447..3d2425e87 100644 --- a/src/demo_tezos_domains/handlers/on_update_records.py +++ b/src/demo_tezos_domains/handlers/on_update_records.py @@ -29,7 +29,7 @@ async def on_update_records( record_name = bytes.fromhex(store_records.key.root).decode() record_path = record_name.split('.') domain_data = decode_domain_data(store_records.value.data) - ctx.logger.info('Processing `%s`', record_name) + ctx.logger.debug('Processing `%s`', record_name) if len(record_path) != int(store_records.value.level): ctx.logger.warning( diff --git a/src/demo_tezos_domains/pyproject.toml b/src/demo_tezos_domains/pyproject.toml index 82e84d062..258ffaf89 100644 --- a/src/demo_tezos_domains/pyproject.toml +++ b/src/demo_tezos_domains/pyproject.toml @@ -1,4 +1,4 @@ -# generated by DipDup 8.0.0b5 +# generated by DipDup 8.0.0 [project] name = "demo_tezos_domains" version = "0.0.1" @@ -20,22 +20,11 @@ dev = [ "mypy", ] -[tool.pdm.scripts] -help = {cmd = "make help", help = "Show this help (default)"} -all = {cmd = "make all", help = "Run an entire CI pipeline"} -format = {cmd = "make format", help = "Format with all tools"} -lint = {cmd = "make lint", help = "Lint with all tools"} -black = {cmd = "make black", help = "Format with black"} -ruff = {cmd = "make ruff", help = "Lint with ruff"} -mypy = {cmd = "make mypy", help = "Lint with mypy"} -image = {cmd = "make image", help = "Build Docker image"} -up = {cmd = "make up", help = "Start Compose stack"} -down = {cmd = "make down", help = "Stop Compose stack"} - [tool.black] line-length = 120 target-version = ['py312'] skip-string-normalization = true +extend-exclude = "demo_tezos_domains" [tool.ruff] line-length = 120 @@ -50,6 +39,11 @@ isort = { force-single-line = true} python_version = "3.12" plugins = ["pydantic.mypy"] strict = false +exclude = "demo_tezos_domains" + +[[tool.mypy.overrides]] +module = "ruamel" +ignore_missing_imports = true [build-system] requires = ["pdm-backend"] diff --git a/src/demo_tezos_domains/types/name_registry/tezos_big_maps/store_expiry_map_key.py b/src/demo_tezos_domains/types/name_registry/tezos_big_maps/store_expiry_map_key.py index b4077a6fa..2aaec3f55 100644 --- a/src/demo_tezos_domains/types/name_registry/tezos_big_maps/store_expiry_map_key.py +++ b/src/demo_tezos_domains/types/name_registry/tezos_big_maps/store_expiry_map_key.py @@ -1,4 +1,4 @@ -# generated by DipDup 8.0.0b5 +# generated by DipDup 8.0.0 from __future__ import annotations diff --git a/src/demo_tezos_domains/types/name_registry/tezos_big_maps/store_expiry_map_value.py b/src/demo_tezos_domains/types/name_registry/tezos_big_maps/store_expiry_map_value.py index cfa0e7bab..b5509791f 100644 --- a/src/demo_tezos_domains/types/name_registry/tezos_big_maps/store_expiry_map_value.py +++ b/src/demo_tezos_domains/types/name_registry/tezos_big_maps/store_expiry_map_value.py @@ -1,4 +1,4 @@ -# generated by DipDup 8.0.0b5 +# generated by DipDup 8.0.0 from __future__ import annotations diff --git a/src/demo_tezos_domains/types/name_registry/tezos_big_maps/store_records_key.py b/src/demo_tezos_domains/types/name_registry/tezos_big_maps/store_records_key.py index 6789a85c8..f91fa91ec 100644 --- a/src/demo_tezos_domains/types/name_registry/tezos_big_maps/store_records_key.py +++ b/src/demo_tezos_domains/types/name_registry/tezos_big_maps/store_records_key.py @@ -1,4 +1,4 @@ -# generated by DipDup 8.0.0b5 +# generated by DipDup 8.0.0 from __future__ import annotations diff --git a/src/demo_tezos_domains/types/name_registry/tezos_big_maps/store_records_value.py b/src/demo_tezos_domains/types/name_registry/tezos_big_maps/store_records_value.py index f5aec2ebc..4f853abad 100644 --- a/src/demo_tezos_domains/types/name_registry/tezos_big_maps/store_records_value.py +++ b/src/demo_tezos_domains/types/name_registry/tezos_big_maps/store_records_value.py @@ -1,4 +1,4 @@ -# generated by DipDup 8.0.0b5 +# generated by DipDup 8.0.0 from __future__ import annotations diff --git a/src/demo_tezos_etherlink/Makefile b/src/demo_tezos_etherlink/Makefile index 85f69cb3c..c38e75050 100644 --- a/src/demo_tezos_etherlink/Makefile +++ b/src/demo_tezos_etherlink/Makefile @@ -13,6 +13,15 @@ help: ## Show this help (default) all: ## Run an entire CI pipeline make format lint +## + +install: ## Install dependencies + pdm install + +update: ## Update dependencies + pdm update + dipdup self update -q + format: ## Format with all tools make black @@ -28,7 +37,7 @@ ruff: ## Lint with ruff ruff check --fix . mypy: ## Lint with mypy - mypy --no-incremental --exclude ${PACKAGE} . + mypy . ## diff --git a/src/demo_tezos_etherlink/deploy/.env.default b/src/demo_tezos_etherlink/deploy/.env.default index 94e440099..e916c0e80 100644 --- a/src/demo_tezos_etherlink/deploy/.env.default +++ b/src/demo_tezos_etherlink/deploy/.env.default @@ -12,4 +12,4 @@ POSTGRES_PASSWORD= POSTGRES_USER=dipdup SENTRY_DSN='' SENTRY_ENVIRONMENT='' -TZKT_URL=https://api.nairobinet.tzkt.io +TZKT_URL=https://api.parisnet.tzkt.io diff --git a/src/demo_tezos_etherlink/deploy/compose.sqlite.yaml b/src/demo_tezos_etherlink/deploy/compose.sqlite.yaml index b3c6d24f0..4f98f020c 100644 --- a/src/demo_tezos_etherlink/deploy/compose.sqlite.yaml +++ b/src/demo_tezos_etherlink/deploy/compose.sqlite.yaml @@ -1,4 +1,3 @@ -version: "3.8" name: demo_tezos_etherlink services: diff --git a/src/demo_tezos_etherlink/deploy/compose.swarm.yaml b/src/demo_tezos_etherlink/deploy/compose.swarm.yaml index 6db27f1d3..1fadcd252 100644 --- a/src/demo_tezos_etherlink/deploy/compose.swarm.yaml +++ b/src/demo_tezos_etherlink/deploy/compose.swarm.yaml @@ -1,4 +1,3 @@ -version: "3.8" name: demo_tezos_etherlink services: @@ -38,7 +37,7 @@ services: - POSTGRES_DB=dipdup - POSTGRES_PASSWORD=${POSTGRES_PASSWORD} healthcheck: - test: ["CMD-SHELL", "pg_isready -U postgres"] + test: ["CMD-SHELL", "pg_isready -U dipdup"] interval: 10s timeout: 5s retries: 5 diff --git a/src/demo_tezos_etherlink/deploy/compose.yaml b/src/demo_tezos_etherlink/deploy/compose.yaml index 29b1015f1..f342e95e8 100644 --- a/src/demo_tezos_etherlink/deploy/compose.yaml +++ b/src/demo_tezos_etherlink/deploy/compose.yaml @@ -1,4 +1,3 @@ -version: "3.8" name: demo_tezos_etherlink services: @@ -19,7 +18,7 @@ services: db: image: postgres:15 ports: - - 5432 + - "${POSTGRES_HOST_PORT:-5432}:5432" volumes: - db:/var/lib/postgresql/data restart: always @@ -37,7 +36,7 @@ services: hasura: image: hasura/graphql-engine:latest ports: - - 8080 + - "${HASURA_HOST_PORT:-8080}:8080" depends_on: - db restart: always diff --git a/src/demo_tezos_etherlink/deploy/sqlite.env.default b/src/demo_tezos_etherlink/deploy/sqlite.env.default index a92b3e5f7..341383b2f 100644 --- a/src/demo_tezos_etherlink/deploy/sqlite.env.default +++ b/src/demo_tezos_etherlink/deploy/sqlite.env.default @@ -2,4 +2,4 @@ # Create a copy with .env extension, fill it with your values and run DipDup with `--env-file` option. # SQLITE_PATH=/tmp/demo_tezos_etherlink.sqlite -TZKT_URL=https://api.nairobinet.tzkt.io +TZKT_URL=https://api.parisnet.tzkt.io diff --git a/src/demo_tezos_etherlink/deploy/swarm.env.default b/src/demo_tezos_etherlink/deploy/swarm.env.default index 55481ceac..aad3fab70 100644 --- a/src/demo_tezos_etherlink/deploy/swarm.env.default +++ b/src/demo_tezos_etherlink/deploy/swarm.env.default @@ -12,4 +12,4 @@ POSTGRES_PASSWORD= POSTGRES_USER=dipdup SENTRY_DSN='' SENTRY_ENVIRONMENT='' -TZKT_URL=https://api.nairobinet.tzkt.io +TZKT_URL=https://api.parisnet.tzkt.io diff --git a/src/demo_tezos_etherlink/dipdup.yaml b/src/demo_tezos_etherlink/dipdup.yaml index 3bb1f2acf..ce4e84c7e 100644 --- a/src/demo_tezos_etherlink/dipdup.yaml +++ b/src/demo_tezos_etherlink/dipdup.yaml @@ -4,20 +4,20 @@ package: demo_tezos_etherlink datasources: tzkt: kind: tezos.tzkt - url: ${TZKT_URL:-https://api.nairobinet.tzkt.io} + url: ${TZKT_URL:-https://api.parisnet.tzkt.io} contracts: ticketer: kind: tezos - address: KT1PmYUomF3HDxsGWYQUCbLi2X8WvT7ZHv8o + address: KT1AAi4DCQiTUv5MYoXtdiFwUrPH3t3Yhkjo typename: ticketer ticket_helper: kind: tezos - address: KT1TZg9EwGHKbfWvsHGsqBjm3J5NhJBtHPKX + address: KT1FcXb4oFBWtUVbEa96Do4DfQZXn6878yu1 typename: ticket_helper rollup: kind: tezos - address: sr1QgYF6ARMSLcWyAX4wFDrWFaZTyy4twbqe + address: sr1GBHEgzZmpWH4URqshZEZFCxBpqzi6ahvL typename: rollup indexes: diff --git a/src/demo_tezos_etherlink/handlers/batch.py b/src/demo_tezos_etherlink/handlers/batch.py new file mode 100644 index 000000000..c2c321ae0 --- /dev/null +++ b/src/demo_tezos_etherlink/handlers/batch.py @@ -0,0 +1,12 @@ +from collections.abc import Iterable + +from dipdup.context import HandlerContext +from dipdup.index import MatchedHandler + + +async def batch( + ctx: HandlerContext, + handlers: Iterable[MatchedHandler], +) -> None: + for handler in handlers: + await ctx.fire_matched_handler(handler) diff --git a/src/demo_tezos_etherlink/pyproject.toml b/src/demo_tezos_etherlink/pyproject.toml index 03908a72f..fe638a66b 100644 --- a/src/demo_tezos_etherlink/pyproject.toml +++ b/src/demo_tezos_etherlink/pyproject.toml @@ -1,4 +1,4 @@ -# generated by DipDup 8.0.0b2 +# generated by DipDup 8.0.0 [project] name = "demo_tezos_etherlink" version = "0.0.1" @@ -20,22 +20,11 @@ dev = [ "mypy", ] -[tool.pdm.scripts] -help = {cmd = "make help", help = "Show this help (default)"} -all = {cmd = "make all", help = "Run an entire CI pipeline"} -format = {cmd = "make format", help = "Format with all tools"} -lint = {cmd = "make lint", help = "Lint with all tools"} -black = {cmd = "make black", help = "Format with black"} -ruff = {cmd = "make ruff", help = "Lint with ruff"} -mypy = {cmd = "make mypy", help = "Lint with mypy"} -image = {cmd = "make image", help = "Build Docker image"} -up = {cmd = "make up", help = "Start Compose stack"} -down = {cmd = "make down", help = "Stop Compose stack"} - [tool.black] line-length = 120 target-version = ['py312'] skip-string-normalization = true +extend-exclude = "demo_tezos_etherlink" [tool.ruff] line-length = 120 @@ -50,6 +39,11 @@ isort = { force-single-line = true} python_version = "3.12" plugins = ["pydantic.mypy"] strict = false +exclude = "demo_tezos_etherlink" + +[[tool.mypy.overrides]] +module = "ruamel" +ignore_missing_imports = true [build-system] requires = ["pdm-backend"] diff --git a/src/demo_tezos_etherlink/types/rollup/tezos_parameters/default.py b/src/demo_tezos_etherlink/types/rollup/tezos_parameters/default.py index f4b23ce20..d5201c3a5 100644 --- a/src/demo_tezos_etherlink/types/rollup/tezos_parameters/default.py +++ b/src/demo_tezos_etherlink/types/rollup/tezos_parameters/default.py @@ -1,4 +1,4 @@ -# generated by DipDup 8.0.0b2 +# generated by DipDup 8.0.0 from __future__ import annotations @@ -7,7 +7,7 @@ from pydantic import RootModel -class Data(BaseModel): +class Content(BaseModel): model_config = ConfigDict( extra='forbid', ) @@ -20,7 +20,7 @@ class Ticket(BaseModel): extra='forbid', ) address: str - data: Data + content: Content amount: str diff --git a/src/demo_tezos_etherlink/types/rollup/tezos_storage.py b/src/demo_tezos_etherlink/types/rollup/tezos_storage.py index 978c7d363..2f20e6e87 100644 --- a/src/demo_tezos_etherlink/types/rollup/tezos_storage.py +++ b/src/demo_tezos_etherlink/types/rollup/tezos_storage.py @@ -1,4 +1,4 @@ -# generated by DipDup 8.0.0b2 +# generated by DipDup 8.0.0 from __future__ import annotations diff --git a/src/demo_tezos_etherlink/types/ticket_helper/tezos_parameters/default.py b/src/demo_tezos_etherlink/types/ticket_helper/tezos_parameters/default.py index 697663801..3d60c5f4d 100644 --- a/src/demo_tezos_etherlink/types/ticket_helper/tezos_parameters/default.py +++ b/src/demo_tezos_etherlink/types/ticket_helper/tezos_parameters/default.py @@ -1,4 +1,4 @@ -# generated by DipDup 8.0.0b2 +# generated by DipDup 8.0.0 from __future__ import annotations @@ -6,7 +6,7 @@ from pydantic import ConfigDict -class Data(BaseModel): +class Content(BaseModel): model_config = ConfigDict( extra='forbid', ) @@ -19,5 +19,5 @@ class DefaultParameter(BaseModel): extra='forbid', ) address: str - data: Data + content: Content amount: str diff --git a/src/demo_tezos_etherlink/types/ticket_helper/tezos_storage.py b/src/demo_tezos_etherlink/types/ticket_helper/tezos_storage.py index 84e94a61a..e93127277 100644 --- a/src/demo_tezos_etherlink/types/ticket_helper/tezos_storage.py +++ b/src/demo_tezos_etherlink/types/ticket_helper/tezos_storage.py @@ -1,4 +1,4 @@ -# generated by DipDup 8.0.0b2 +# generated by DipDup 8.0.0 from __future__ import annotations @@ -32,7 +32,7 @@ class Context(BaseModel): model_config = ConfigDict( extra='forbid', ) - routing_info: str + receiver: str rollup: str @@ -42,5 +42,6 @@ class TicketHelperStorage(BaseModel): ) token: Token | Token1 ticketer: str + erc_proxy: str context: Context | None = None metadata: dict[str, str] diff --git a/src/demo_tezos_etherlink/types/ticketer/tezos_parameters/deposit.py b/src/demo_tezos_etherlink/types/ticketer/tezos_parameters/deposit.py index 5e295b9a6..82103d4c4 100644 --- a/src/demo_tezos_etherlink/types/ticketer/tezos_parameters/deposit.py +++ b/src/demo_tezos_etherlink/types/ticketer/tezos_parameters/deposit.py @@ -1,4 +1,4 @@ -# generated by DipDup 8.0.0b2 +# generated by DipDup 8.0.0 from __future__ import annotations diff --git a/src/demo_tezos_etherlink/types/ticketer/tezos_parameters/withdraw.py b/src/demo_tezos_etherlink/types/ticketer/tezos_parameters/withdraw.py index 61d122e09..c931eb408 100644 --- a/src/demo_tezos_etherlink/types/ticketer/tezos_parameters/withdraw.py +++ b/src/demo_tezos_etherlink/types/ticketer/tezos_parameters/withdraw.py @@ -1,4 +1,4 @@ -# generated by DipDup 8.0.0b2 +# generated by DipDup 8.0.0 from __future__ import annotations @@ -6,7 +6,7 @@ from pydantic import ConfigDict -class Data(BaseModel): +class Content(BaseModel): model_config = ConfigDict( extra='forbid', ) @@ -19,7 +19,7 @@ class Ticket(BaseModel): extra='forbid', ) address: str - data: Data + content: Content amount: str diff --git a/src/demo_tezos_etherlink/types/ticketer/tezos_storage.py b/src/demo_tezos_etherlink/types/ticketer/tezos_storage.py index 18ef96185..98dd02f27 100644 --- a/src/demo_tezos_etherlink/types/ticketer/tezos_storage.py +++ b/src/demo_tezos_etherlink/types/ticketer/tezos_storage.py @@ -1,4 +1,4 @@ -# generated by DipDup 8.0.0b2 +# generated by DipDup 8.0.0 from __future__ import annotations @@ -6,14 +6,6 @@ from pydantic import ConfigDict -class Content(BaseModel): - model_config = ConfigDict( - extra='forbid', - ) - nat: str - bytes: str | None = None - - class Token(BaseModel): model_config = ConfigDict( extra='forbid', @@ -36,10 +28,19 @@ class Token1(BaseModel): fa2: Fa2 +class Content(BaseModel): + model_config = ConfigDict( + extra='forbid', + ) + nat: str + bytes: str | None = None + + class TicketerStorage(BaseModel): model_config = ConfigDict( extra='forbid', ) - content: Content metadata: dict[str, str] token: Token | Token1 + content: Content + total_supply: str diff --git a/src/demo_tezos_events/Makefile b/src/demo_tezos_events/Makefile index 55d8f30a6..8af63b565 100644 --- a/src/demo_tezos_events/Makefile +++ b/src/demo_tezos_events/Makefile @@ -13,6 +13,15 @@ help: ## Show this help (default) all: ## Run an entire CI pipeline make format lint +## + +install: ## Install dependencies + pdm install + +update: ## Update dependencies + pdm update + dipdup self update -q + format: ## Format with all tools make black @@ -28,7 +37,7 @@ ruff: ## Lint with ruff ruff check --fix . mypy: ## Lint with mypy - mypy --no-incremental --exclude ${PACKAGE} . + mypy . ## diff --git a/src/demo_tezos_events/deploy/compose.sqlite.yaml b/src/demo_tezos_events/deploy/compose.sqlite.yaml index 5b79fca0f..2b176fb69 100644 --- a/src/demo_tezos_events/deploy/compose.sqlite.yaml +++ b/src/demo_tezos_events/deploy/compose.sqlite.yaml @@ -1,4 +1,3 @@ -version: "3.8" name: demo_tezos_events services: diff --git a/src/demo_tezos_events/deploy/compose.swarm.yaml b/src/demo_tezos_events/deploy/compose.swarm.yaml index 18f4bcdcc..56ed4adad 100644 --- a/src/demo_tezos_events/deploy/compose.swarm.yaml +++ b/src/demo_tezos_events/deploy/compose.swarm.yaml @@ -1,4 +1,3 @@ -version: "3.8" name: demo_tezos_events services: @@ -38,7 +37,7 @@ services: - POSTGRES_DB=dipdup - POSTGRES_PASSWORD=${POSTGRES_PASSWORD} healthcheck: - test: ["CMD-SHELL", "pg_isready -U postgres"] + test: ["CMD-SHELL", "pg_isready -U dipdup"] interval: 10s timeout: 5s retries: 5 diff --git a/src/demo_tezos_events/deploy/compose.yaml b/src/demo_tezos_events/deploy/compose.yaml index e06291403..761278504 100644 --- a/src/demo_tezos_events/deploy/compose.yaml +++ b/src/demo_tezos_events/deploy/compose.yaml @@ -1,4 +1,3 @@ -version: "3.8" name: demo_tezos_events services: @@ -19,7 +18,7 @@ services: db: image: postgres:15 ports: - - 5432 + - "${POSTGRES_HOST_PORT:-5432}:5432" volumes: - db:/var/lib/postgresql/data restart: always @@ -37,7 +36,7 @@ services: hasura: image: hasura/graphql-engine:latest ports: - - 8080 + - "${HASURA_HOST_PORT:-8080}:8080" depends_on: - db restart: always diff --git a/src/demo_tezos_events/pyproject.toml b/src/demo_tezos_events/pyproject.toml index 2089ab459..b4765adc1 100644 --- a/src/demo_tezos_events/pyproject.toml +++ b/src/demo_tezos_events/pyproject.toml @@ -1,4 +1,4 @@ -# generated by DipDup 8.0.0b5 +# generated by DipDup 8.0.0 [project] name = "demo_tezos_events" version = "0.0.1" @@ -20,22 +20,11 @@ dev = [ "mypy", ] -[tool.pdm.scripts] -help = {cmd = "make help", help = "Show this help (default)"} -all = {cmd = "make all", help = "Run an entire CI pipeline"} -format = {cmd = "make format", help = "Format with all tools"} -lint = {cmd = "make lint", help = "Lint with all tools"} -black = {cmd = "make black", help = "Format with black"} -ruff = {cmd = "make ruff", help = "Lint with ruff"} -mypy = {cmd = "make mypy", help = "Lint with mypy"} -image = {cmd = "make image", help = "Build Docker image"} -up = {cmd = "make up", help = "Start Compose stack"} -down = {cmd = "make down", help = "Stop Compose stack"} - [tool.black] line-length = 120 target-version = ['py312'] skip-string-normalization = true +extend-exclude = "demo_tezos_events" [tool.ruff] line-length = 120 @@ -50,6 +39,11 @@ isort = { force-single-line = true} python_version = "3.12" plugins = ["pydantic.mypy"] strict = false +exclude = "demo_tezos_events" + +[[tool.mypy.overrides]] +module = "ruamel" +ignore_missing_imports = true [build-system] requires = ["pdm-backend"] diff --git a/src/demo_tezos_events/types/events_contract/tezos_events/move.py b/src/demo_tezos_events/types/events_contract/tezos_events/move.py index 6afa1c09b..fe390214b 100644 --- a/src/demo_tezos_events/types/events_contract/tezos_events/move.py +++ b/src/demo_tezos_events/types/events_contract/tezos_events/move.py @@ -1,4 +1,4 @@ -# generated by DipDup 8.0.0b5 +# generated by DipDup 8.0.0 from __future__ import annotations diff --git a/src/demo_tezos_events/types/events_contract/tezos_events/roll.py b/src/demo_tezos_events/types/events_contract/tezos_events/roll.py index 6f4c2d24f..cfe47ad7b 100644 --- a/src/demo_tezos_events/types/events_contract/tezos_events/roll.py +++ b/src/demo_tezos_events/types/events_contract/tezos_events/roll.py @@ -1,4 +1,4 @@ -# generated by DipDup 8.0.0b5 +# generated by DipDup 8.0.0 from __future__ import annotations diff --git a/src/demo_tezos_factories/Makefile b/src/demo_tezos_factories/Makefile index 0a9eaed32..a3e01c1f8 100644 --- a/src/demo_tezos_factories/Makefile +++ b/src/demo_tezos_factories/Makefile @@ -13,6 +13,15 @@ help: ## Show this help (default) all: ## Run an entire CI pipeline make format lint +## + +install: ## Install dependencies + pdm install + +update: ## Update dependencies + pdm update + dipdup self update -q + format: ## Format with all tools make black @@ -28,7 +37,7 @@ ruff: ## Lint with ruff ruff check --fix . mypy: ## Lint with mypy - mypy --no-incremental --exclude ${PACKAGE} . + mypy . ## diff --git a/src/demo_tezos_factories/deploy/compose.sqlite.yaml b/src/demo_tezos_factories/deploy/compose.sqlite.yaml index 7e2293586..2be1d6d2c 100644 --- a/src/demo_tezos_factories/deploy/compose.sqlite.yaml +++ b/src/demo_tezos_factories/deploy/compose.sqlite.yaml @@ -1,4 +1,3 @@ -version: "3.8" name: demo_tezos_factories services: diff --git a/src/demo_tezos_factories/deploy/compose.swarm.yaml b/src/demo_tezos_factories/deploy/compose.swarm.yaml index 603c2c672..e7942b590 100644 --- a/src/demo_tezos_factories/deploy/compose.swarm.yaml +++ b/src/demo_tezos_factories/deploy/compose.swarm.yaml @@ -1,4 +1,3 @@ -version: "3.8" name: demo_tezos_factories services: @@ -38,7 +37,7 @@ services: - POSTGRES_DB=dipdup - POSTGRES_PASSWORD=${POSTGRES_PASSWORD} healthcheck: - test: ["CMD-SHELL", "pg_isready -U postgres"] + test: ["CMD-SHELL", "pg_isready -U dipdup"] interval: 10s timeout: 5s retries: 5 diff --git a/src/demo_tezos_factories/deploy/compose.yaml b/src/demo_tezos_factories/deploy/compose.yaml index d4fdfedf7..786f57c30 100644 --- a/src/demo_tezos_factories/deploy/compose.yaml +++ b/src/demo_tezos_factories/deploy/compose.yaml @@ -1,4 +1,3 @@ -version: "3.8" name: demo_tezos_factories services: @@ -19,7 +18,7 @@ services: db: image: postgres:15 ports: - - 5432 + - "${POSTGRES_HOST_PORT:-5432}:5432" volumes: - db:/var/lib/postgresql/data restart: always @@ -37,7 +36,7 @@ services: hasura: image: hasura/graphql-engine:latest ports: - - 8080 + - "${HASURA_HOST_PORT:-8080}:8080" depends_on: - db restart: always diff --git a/src/demo_tezos_factories/pyproject.toml b/src/demo_tezos_factories/pyproject.toml index 379d86c86..78ca245ac 100644 --- a/src/demo_tezos_factories/pyproject.toml +++ b/src/demo_tezos_factories/pyproject.toml @@ -1,4 +1,4 @@ -# generated by DipDup 8.0.0b5 +# generated by DipDup 8.0.0 [project] name = "demo_tezos_factories" version = "0.0.1" @@ -20,22 +20,11 @@ dev = [ "mypy", ] -[tool.pdm.scripts] -help = {cmd = "make help", help = "Show this help (default)"} -all = {cmd = "make all", help = "Run an entire CI pipeline"} -format = {cmd = "make format", help = "Format with all tools"} -lint = {cmd = "make lint", help = "Lint with all tools"} -black = {cmd = "make black", help = "Format with black"} -ruff = {cmd = "make ruff", help = "Lint with ruff"} -mypy = {cmd = "make mypy", help = "Lint with mypy"} -image = {cmd = "make image", help = "Build Docker image"} -up = {cmd = "make up", help = "Start Compose stack"} -down = {cmd = "make down", help = "Stop Compose stack"} - [tool.black] line-length = 120 target-version = ['py312'] skip-string-normalization = true +extend-exclude = "demo_tezos_factories" [tool.ruff] line-length = 120 @@ -50,6 +39,11 @@ isort = { force-single-line = true} python_version = "3.12" plugins = ["pydantic.mypy"] strict = false +exclude = "demo_tezos_factories" + +[[tool.mypy.overrides]] +module = "ruamel" +ignore_missing_imports = true [build-system] requires = ["pdm-backend"] diff --git a/src/demo_tezos_factories/types/factory/tezos_storage.py b/src/demo_tezos_factories/types/factory/tezos_storage.py index c9b962343..362eb32fa 100644 --- a/src/demo_tezos_factories/types/factory/tezos_storage.py +++ b/src/demo_tezos_factories/types/factory/tezos_storage.py @@ -1,4 +1,4 @@ -# generated by DipDup 8.0.0b5 +# generated by DipDup 8.0.0 from __future__ import annotations diff --git a/src/demo_tezos_factories/types/token/tezos_parameters/transfer.py b/src/demo_tezos_factories/types/token/tezos_parameters/transfer.py index 03adbf083..7af7a6449 100644 --- a/src/demo_tezos_factories/types/token/tezos_parameters/transfer.py +++ b/src/demo_tezos_factories/types/token/tezos_parameters/transfer.py @@ -1,4 +1,4 @@ -# generated by DipDup 8.0.0b5 +# generated by DipDup 8.0.0 from __future__ import annotations diff --git a/src/demo_tezos_factories/types/token/tezos_storage.py b/src/demo_tezos_factories/types/token/tezos_storage.py index f35447488..755cb2523 100644 --- a/src/demo_tezos_factories/types/token/tezos_storage.py +++ b/src/demo_tezos_factories/types/token/tezos_storage.py @@ -1,4 +1,4 @@ -# generated by DipDup 8.0.0b5 +# generated by DipDup 8.0.0 from __future__ import annotations diff --git a/src/demo_tezos_head/Makefile b/src/demo_tezos_head/Makefile index 5da90fe7a..81838ec03 100644 --- a/src/demo_tezos_head/Makefile +++ b/src/demo_tezos_head/Makefile @@ -13,6 +13,15 @@ help: ## Show this help (default) all: ## Run an entire CI pipeline make format lint +## + +install: ## Install dependencies + pdm install + +update: ## Update dependencies + pdm update + dipdup self update -q + format: ## Format with all tools make black @@ -28,7 +37,7 @@ ruff: ## Lint with ruff ruff check --fix . mypy: ## Lint with mypy - mypy --no-incremental --exclude ${PACKAGE} . + mypy . ## diff --git a/src/demo_tezos_head/deploy/compose.sqlite.yaml b/src/demo_tezos_head/deploy/compose.sqlite.yaml index 13d1949e5..c25c253ba 100644 --- a/src/demo_tezos_head/deploy/compose.sqlite.yaml +++ b/src/demo_tezos_head/deploy/compose.sqlite.yaml @@ -1,4 +1,3 @@ -version: "3.8" name: demo_tezos_head services: diff --git a/src/demo_tezos_head/deploy/compose.swarm.yaml b/src/demo_tezos_head/deploy/compose.swarm.yaml index 0566f5d9c..6683b4cfa 100644 --- a/src/demo_tezos_head/deploy/compose.swarm.yaml +++ b/src/demo_tezos_head/deploy/compose.swarm.yaml @@ -1,4 +1,3 @@ -version: "3.8" name: demo_tezos_head services: @@ -38,7 +37,7 @@ services: - POSTGRES_DB=dipdup - POSTGRES_PASSWORD=${POSTGRES_PASSWORD} healthcheck: - test: ["CMD-SHELL", "pg_isready -U postgres"] + test: ["CMD-SHELL", "pg_isready -U dipdup"] interval: 10s timeout: 5s retries: 5 diff --git a/src/demo_tezos_head/deploy/compose.yaml b/src/demo_tezos_head/deploy/compose.yaml index fc6595cad..b853b9491 100644 --- a/src/demo_tezos_head/deploy/compose.yaml +++ b/src/demo_tezos_head/deploy/compose.yaml @@ -1,4 +1,3 @@ -version: "3.8" name: demo_tezos_head services: @@ -19,7 +18,7 @@ services: db: image: postgres:15 ports: - - 5432 + - "${POSTGRES_HOST_PORT:-5432}:5432" volumes: - db:/var/lib/postgresql/data restart: always @@ -37,7 +36,7 @@ services: hasura: image: hasura/graphql-engine:latest ports: - - 8080 + - "${HASURA_HOST_PORT:-8080}:8080" depends_on: - db restart: always diff --git a/src/demo_tezos_head/pyproject.toml b/src/demo_tezos_head/pyproject.toml index bd2fd91c0..8a4d16701 100644 --- a/src/demo_tezos_head/pyproject.toml +++ b/src/demo_tezos_head/pyproject.toml @@ -1,4 +1,4 @@ -# generated by DipDup 8.0.0b5 +# generated by DipDup 8.0.0 [project] name = "demo_tezos_head" version = "0.0.1" @@ -20,22 +20,11 @@ dev = [ "mypy", ] -[tool.pdm.scripts] -help = {cmd = "make help", help = "Show this help (default)"} -all = {cmd = "make all", help = "Run an entire CI pipeline"} -format = {cmd = "make format", help = "Format with all tools"} -lint = {cmd = "make lint", help = "Lint with all tools"} -black = {cmd = "make black", help = "Format with black"} -ruff = {cmd = "make ruff", help = "Lint with ruff"} -mypy = {cmd = "make mypy", help = "Lint with mypy"} -image = {cmd = "make image", help = "Build Docker image"} -up = {cmd = "make up", help = "Start Compose stack"} -down = {cmd = "make down", help = "Stop Compose stack"} - [tool.black] line-length = 120 target-version = ['py312'] skip-string-normalization = true +extend-exclude = "demo_tezos_head" [tool.ruff] line-length = 120 @@ -50,6 +39,11 @@ isort = { force-single-line = true} python_version = "3.12" plugins = ["pydantic.mypy"] strict = false +exclude = "demo_tezos_head" + +[[tool.mypy.overrides]] +module = "ruamel" +ignore_missing_imports = true [build-system] requires = ["pdm-backend"] diff --git a/src/demo_tezos_nft_marketplace/Makefile b/src/demo_tezos_nft_marketplace/Makefile index 26f580fb4..99ffe533f 100644 --- a/src/demo_tezos_nft_marketplace/Makefile +++ b/src/demo_tezos_nft_marketplace/Makefile @@ -13,6 +13,15 @@ help: ## Show this help (default) all: ## Run an entire CI pipeline make format lint +## + +install: ## Install dependencies + pdm install + +update: ## Update dependencies + pdm update + dipdup self update -q + format: ## Format with all tools make black @@ -28,7 +37,7 @@ ruff: ## Lint with ruff ruff check --fix . mypy: ## Lint with mypy - mypy --no-incremental --exclude ${PACKAGE} . + mypy . ## diff --git a/src/demo_tezos_nft_marketplace/deploy/compose.sqlite.yaml b/src/demo_tezos_nft_marketplace/deploy/compose.sqlite.yaml index 0e77ebcfb..df0fabd22 100644 --- a/src/demo_tezos_nft_marketplace/deploy/compose.sqlite.yaml +++ b/src/demo_tezos_nft_marketplace/deploy/compose.sqlite.yaml @@ -1,4 +1,3 @@ -version: "3.8" name: demo_tezos_nft_marketplace services: diff --git a/src/demo_tezos_nft_marketplace/deploy/compose.swarm.yaml b/src/demo_tezos_nft_marketplace/deploy/compose.swarm.yaml index 02888c062..1c0de4265 100644 --- a/src/demo_tezos_nft_marketplace/deploy/compose.swarm.yaml +++ b/src/demo_tezos_nft_marketplace/deploy/compose.swarm.yaml @@ -1,4 +1,3 @@ -version: "3.8" name: demo_tezos_nft_marketplace services: @@ -38,7 +37,7 @@ services: - POSTGRES_DB=dipdup - POSTGRES_PASSWORD=${POSTGRES_PASSWORD} healthcheck: - test: ["CMD-SHELL", "pg_isready -U postgres"] + test: ["CMD-SHELL", "pg_isready -U dipdup"] interval: 10s timeout: 5s retries: 5 diff --git a/src/demo_tezos_nft_marketplace/deploy/compose.yaml b/src/demo_tezos_nft_marketplace/deploy/compose.yaml index 424b3b42d..77c059f16 100644 --- a/src/demo_tezos_nft_marketplace/deploy/compose.yaml +++ b/src/demo_tezos_nft_marketplace/deploy/compose.yaml @@ -1,4 +1,3 @@ -version: "3.8" name: demo_tezos_nft_marketplace services: @@ -19,7 +18,7 @@ services: db: image: postgres:15 ports: - - 5432 + - "${POSTGRES_HOST_PORT:-5432}:5432" volumes: - db:/var/lib/postgresql/data restart: always @@ -37,7 +36,7 @@ services: hasura: image: hasura/graphql-engine:latest ports: - - 8080 + - "${HASURA_HOST_PORT:-8080}:8080" depends_on: - db restart: always diff --git a/src/demo_tezos_nft_marketplace/pyproject.toml b/src/demo_tezos_nft_marketplace/pyproject.toml index 47a6b70ee..66d1db1c6 100644 --- a/src/demo_tezos_nft_marketplace/pyproject.toml +++ b/src/demo_tezos_nft_marketplace/pyproject.toml @@ -1,4 +1,4 @@ -# generated by DipDup 8.0.0b5 +# generated by DipDup 8.0.0 [project] name = "demo_tezos_nft_marketplace" version = "0.0.1" @@ -20,22 +20,11 @@ dev = [ "mypy", ] -[tool.pdm.scripts] -help = {cmd = "make help", help = "Show this help (default)"} -all = {cmd = "make all", help = "Run an entire CI pipeline"} -format = {cmd = "make format", help = "Format with all tools"} -lint = {cmd = "make lint", help = "Lint with all tools"} -black = {cmd = "make black", help = "Format with black"} -ruff = {cmd = "make ruff", help = "Lint with ruff"} -mypy = {cmd = "make mypy", help = "Lint with mypy"} -image = {cmd = "make image", help = "Build Docker image"} -up = {cmd = "make up", help = "Start Compose stack"} -down = {cmd = "make down", help = "Stop Compose stack"} - [tool.black] line-length = 120 target-version = ['py312'] skip-string-normalization = true +extend-exclude = "demo_tezos_nft_marketplace" [tool.ruff] line-length = 120 @@ -50,6 +39,11 @@ isort = { force-single-line = true} python_version = "3.12" plugins = ["pydantic.mypy"] strict = false +exclude = "demo_tezos_nft_marketplace" + +[[tool.mypy.overrides]] +module = "ruamel" +ignore_missing_imports = true [build-system] requires = ["pdm-backend"] diff --git a/src/demo_tezos_nft_marketplace/types/hen_minter/tezos_parameters/cancel_swap.py b/src/demo_tezos_nft_marketplace/types/hen_minter/tezos_parameters/cancel_swap.py index abf32b025..0d3fb1388 100644 --- a/src/demo_tezos_nft_marketplace/types/hen_minter/tezos_parameters/cancel_swap.py +++ b/src/demo_tezos_nft_marketplace/types/hen_minter/tezos_parameters/cancel_swap.py @@ -1,4 +1,4 @@ -# generated by DipDup 8.0.0b5 +# generated by DipDup 8.0.0 from __future__ import annotations diff --git a/src/demo_tezos_nft_marketplace/types/hen_minter/tezos_parameters/collect.py b/src/demo_tezos_nft_marketplace/types/hen_minter/tezos_parameters/collect.py index 0fc516d9e..adcf10c3a 100644 --- a/src/demo_tezos_nft_marketplace/types/hen_minter/tezos_parameters/collect.py +++ b/src/demo_tezos_nft_marketplace/types/hen_minter/tezos_parameters/collect.py @@ -1,4 +1,4 @@ -# generated by DipDup 8.0.0b5 +# generated by DipDup 8.0.0 from __future__ import annotations diff --git a/src/demo_tezos_nft_marketplace/types/hen_minter/tezos_parameters/mint_objkt.py b/src/demo_tezos_nft_marketplace/types/hen_minter/tezos_parameters/mint_objkt.py index 8c2280b28..a1efa64cb 100644 --- a/src/demo_tezos_nft_marketplace/types/hen_minter/tezos_parameters/mint_objkt.py +++ b/src/demo_tezos_nft_marketplace/types/hen_minter/tezos_parameters/mint_objkt.py @@ -1,4 +1,4 @@ -# generated by DipDup 8.0.0b5 +# generated by DipDup 8.0.0 from __future__ import annotations diff --git a/src/demo_tezos_nft_marketplace/types/hen_minter/tezos_parameters/swap.py b/src/demo_tezos_nft_marketplace/types/hen_minter/tezos_parameters/swap.py index f33b76aaf..8c95b7bfc 100644 --- a/src/demo_tezos_nft_marketplace/types/hen_minter/tezos_parameters/swap.py +++ b/src/demo_tezos_nft_marketplace/types/hen_minter/tezos_parameters/swap.py @@ -1,4 +1,4 @@ -# generated by DipDup 8.0.0b5 +# generated by DipDup 8.0.0 from __future__ import annotations diff --git a/src/demo_tezos_nft_marketplace/types/hen_minter/tezos_storage.py b/src/demo_tezos_nft_marketplace/types/hen_minter/tezos_storage.py index e90795ed6..8ad1f5831 100644 --- a/src/demo_tezos_nft_marketplace/types/hen_minter/tezos_storage.py +++ b/src/demo_tezos_nft_marketplace/types/hen_minter/tezos_storage.py @@ -1,4 +1,4 @@ -# generated by DipDup 8.0.0b5 +# generated by DipDup 8.0.0 from __future__ import annotations diff --git a/src/demo_tezos_nft_marketplace/types/hen_objkts/tezos_parameters/mint.py b/src/demo_tezos_nft_marketplace/types/hen_objkts/tezos_parameters/mint.py index 1b11e6aa6..6bac35298 100644 --- a/src/demo_tezos_nft_marketplace/types/hen_objkts/tezos_parameters/mint.py +++ b/src/demo_tezos_nft_marketplace/types/hen_objkts/tezos_parameters/mint.py @@ -1,4 +1,4 @@ -# generated by DipDup 8.0.0b5 +# generated by DipDup 8.0.0 from __future__ import annotations diff --git a/src/demo_tezos_nft_marketplace/types/hen_objkts/tezos_storage.py b/src/demo_tezos_nft_marketplace/types/hen_objkts/tezos_storage.py index 7e662c431..011360c9d 100644 --- a/src/demo_tezos_nft_marketplace/types/hen_objkts/tezos_storage.py +++ b/src/demo_tezos_nft_marketplace/types/hen_objkts/tezos_storage.py @@ -1,4 +1,4 @@ -# generated by DipDup 8.0.0b5 +# generated by DipDup 8.0.0 from __future__ import annotations diff --git a/src/demo_tezos_raw/Makefile b/src/demo_tezos_raw/Makefile index 08eddc4dd..ae4a0644d 100644 --- a/src/demo_tezos_raw/Makefile +++ b/src/demo_tezos_raw/Makefile @@ -13,6 +13,15 @@ help: ## Show this help (default) all: ## Run an entire CI pipeline make format lint +## + +install: ## Install dependencies + pdm install + +update: ## Update dependencies + pdm update + dipdup self update -q + format: ## Format with all tools make black @@ -28,7 +37,7 @@ ruff: ## Lint with ruff ruff check --fix . mypy: ## Lint with mypy - mypy --no-incremental --exclude ${PACKAGE} . + mypy . ## diff --git a/src/demo_tezos_raw/deploy/compose.sqlite.yaml b/src/demo_tezos_raw/deploy/compose.sqlite.yaml index 31fcdfdf5..087a2f4b1 100644 --- a/src/demo_tezos_raw/deploy/compose.sqlite.yaml +++ b/src/demo_tezos_raw/deploy/compose.sqlite.yaml @@ -1,4 +1,3 @@ -version: "3.8" name: demo_tezos_raw services: diff --git a/src/demo_tezos_raw/deploy/compose.swarm.yaml b/src/demo_tezos_raw/deploy/compose.swarm.yaml index 796079517..c38dc0edf 100644 --- a/src/demo_tezos_raw/deploy/compose.swarm.yaml +++ b/src/demo_tezos_raw/deploy/compose.swarm.yaml @@ -1,4 +1,3 @@ -version: "3.8" name: demo_tezos_raw services: @@ -38,7 +37,7 @@ services: - POSTGRES_DB=dipdup - POSTGRES_PASSWORD=${POSTGRES_PASSWORD} healthcheck: - test: ["CMD-SHELL", "pg_isready -U postgres"] + test: ["CMD-SHELL", "pg_isready -U dipdup"] interval: 10s timeout: 5s retries: 5 diff --git a/src/demo_tezos_raw/deploy/compose.yaml b/src/demo_tezos_raw/deploy/compose.yaml index b84178109..56d85aa3e 100644 --- a/src/demo_tezos_raw/deploy/compose.yaml +++ b/src/demo_tezos_raw/deploy/compose.yaml @@ -1,4 +1,3 @@ -version: "3.8" name: demo_tezos_raw services: @@ -19,7 +18,7 @@ services: db: image: postgres:15 ports: - - 5432 + - "${POSTGRES_HOST_PORT:-5432}:5432" volumes: - db:/var/lib/postgresql/data restart: always @@ -37,7 +36,7 @@ services: hasura: image: hasura/graphql-engine:latest ports: - - 8080 + - "${HASURA_HOST_PORT:-8080}:8080" depends_on: - db restart: always diff --git a/src/demo_tezos_raw/pyproject.toml b/src/demo_tezos_raw/pyproject.toml index 5189a502f..a7ffddb03 100644 --- a/src/demo_tezos_raw/pyproject.toml +++ b/src/demo_tezos_raw/pyproject.toml @@ -1,4 +1,4 @@ -# generated by DipDup 8.0.0b5 +# generated by DipDup 8.0.0 [project] name = "demo_tezos_raw" version = "0.0.1" @@ -20,22 +20,11 @@ dev = [ "mypy", ] -[tool.pdm.scripts] -help = {cmd = "make help", help = "Show this help (default)"} -all = {cmd = "make all", help = "Run an entire CI pipeline"} -format = {cmd = "make format", help = "Format with all tools"} -lint = {cmd = "make lint", help = "Lint with all tools"} -black = {cmd = "make black", help = "Format with black"} -ruff = {cmd = "make ruff", help = "Lint with ruff"} -mypy = {cmd = "make mypy", help = "Lint with mypy"} -image = {cmd = "make image", help = "Build Docker image"} -up = {cmd = "make up", help = "Start Compose stack"} -down = {cmd = "make down", help = "Stop Compose stack"} - [tool.black] line-length = 120 target-version = ['py312'] skip-string-normalization = true +extend-exclude = "demo_tezos_raw" [tool.ruff] line-length = 120 @@ -50,6 +39,11 @@ isort = { force-single-line = true} python_version = "3.12" plugins = ["pydantic.mypy"] strict = false +exclude = "demo_tezos_raw" + +[[tool.mypy.overrides]] +module = "ruamel" +ignore_missing_imports = true [build-system] requires = ["pdm-backend"] diff --git a/src/demo_tezos_token/Makefile b/src/demo_tezos_token/Makefile index 39a5639da..cbe1e5d65 100644 --- a/src/demo_tezos_token/Makefile +++ b/src/demo_tezos_token/Makefile @@ -13,6 +13,15 @@ help: ## Show this help (default) all: ## Run an entire CI pipeline make format lint +## + +install: ## Install dependencies + pdm install + +update: ## Update dependencies + pdm update + dipdup self update -q + format: ## Format with all tools make black @@ -28,7 +37,7 @@ ruff: ## Lint with ruff ruff check --fix . mypy: ## Lint with mypy - mypy --no-incremental --exclude ${PACKAGE} . + mypy . ## diff --git a/src/demo_tezos_token/deploy/compose.sqlite.yaml b/src/demo_tezos_token/deploy/compose.sqlite.yaml index 039321ead..2e9dad75a 100644 --- a/src/demo_tezos_token/deploy/compose.sqlite.yaml +++ b/src/demo_tezos_token/deploy/compose.sqlite.yaml @@ -1,4 +1,3 @@ -version: "3.8" name: demo_tezos_token services: diff --git a/src/demo_tezos_token/deploy/compose.swarm.yaml b/src/demo_tezos_token/deploy/compose.swarm.yaml index 85cd2f533..7641f1275 100644 --- a/src/demo_tezos_token/deploy/compose.swarm.yaml +++ b/src/demo_tezos_token/deploy/compose.swarm.yaml @@ -1,4 +1,3 @@ -version: "3.8" name: demo_tezos_token services: @@ -38,7 +37,7 @@ services: - POSTGRES_DB=dipdup - POSTGRES_PASSWORD=${POSTGRES_PASSWORD} healthcheck: - test: ["CMD-SHELL", "pg_isready -U postgres"] + test: ["CMD-SHELL", "pg_isready -U dipdup"] interval: 10s timeout: 5s retries: 5 diff --git a/src/demo_tezos_token/deploy/compose.yaml b/src/demo_tezos_token/deploy/compose.yaml index 856e0d784..c0919eb35 100644 --- a/src/demo_tezos_token/deploy/compose.yaml +++ b/src/demo_tezos_token/deploy/compose.yaml @@ -1,4 +1,3 @@ -version: "3.8" name: demo_tezos_token services: @@ -19,7 +18,7 @@ services: db: image: postgres:15 ports: - - 5432 + - "${POSTGRES_HOST_PORT:-5432}:5432" volumes: - db:/var/lib/postgresql/data restart: always @@ -37,7 +36,7 @@ services: hasura: image: hasura/graphql-engine:latest ports: - - 8080 + - "${HASURA_HOST_PORT:-8080}:8080" depends_on: - db restart: always diff --git a/src/demo_tezos_token/pyproject.toml b/src/demo_tezos_token/pyproject.toml index 4e45de7e5..a00b3e8fa 100644 --- a/src/demo_tezos_token/pyproject.toml +++ b/src/demo_tezos_token/pyproject.toml @@ -1,4 +1,4 @@ -# generated by DipDup 8.0.0b5 +# generated by DipDup 8.0.0 [project] name = "demo_tezos_token" version = "0.0.1" @@ -20,22 +20,11 @@ dev = [ "mypy", ] -[tool.pdm.scripts] -help = {cmd = "make help", help = "Show this help (default)"} -all = {cmd = "make all", help = "Run an entire CI pipeline"} -format = {cmd = "make format", help = "Format with all tools"} -lint = {cmd = "make lint", help = "Lint with all tools"} -black = {cmd = "make black", help = "Format with black"} -ruff = {cmd = "make ruff", help = "Lint with ruff"} -mypy = {cmd = "make mypy", help = "Lint with mypy"} -image = {cmd = "make image", help = "Build Docker image"} -up = {cmd = "make up", help = "Start Compose stack"} -down = {cmd = "make down", help = "Stop Compose stack"} - [tool.black] line-length = 120 target-version = ['py312'] skip-string-normalization = true +extend-exclude = "demo_tezos_token" [tool.ruff] line-length = 120 @@ -50,6 +39,11 @@ isort = { force-single-line = true} python_version = "3.12" plugins = ["pydantic.mypy"] strict = false +exclude = "demo_tezos_token" + +[[tool.mypy.overrides]] +module = "ruamel" +ignore_missing_imports = true [build-system] requires = ["pdm-backend"] diff --git a/src/demo_tezos_token/types/tzbtc/tezos_parameters/mint.py b/src/demo_tezos_token/types/tzbtc/tezos_parameters/mint.py index 075473511..9f3c9b495 100644 --- a/src/demo_tezos_token/types/tzbtc/tezos_parameters/mint.py +++ b/src/demo_tezos_token/types/tzbtc/tezos_parameters/mint.py @@ -1,4 +1,4 @@ -# generated by DipDup 8.0.0b5 +# generated by DipDup 8.0.0 from __future__ import annotations diff --git a/src/demo_tezos_token/types/tzbtc/tezos_parameters/transfer.py b/src/demo_tezos_token/types/tzbtc/tezos_parameters/transfer.py index b1c6ed4e7..fc96d1a65 100644 --- a/src/demo_tezos_token/types/tzbtc/tezos_parameters/transfer.py +++ b/src/demo_tezos_token/types/tzbtc/tezos_parameters/transfer.py @@ -1,4 +1,4 @@ -# generated by DipDup 8.0.0b5 +# generated by DipDup 8.0.0 from __future__ import annotations diff --git a/src/demo_tezos_token/types/tzbtc/tezos_storage.py b/src/demo_tezos_token/types/tzbtc/tezos_storage.py index 26812dadf..65bc30c0c 100644 --- a/src/demo_tezos_token/types/tzbtc/tezos_storage.py +++ b/src/demo_tezos_token/types/tzbtc/tezos_storage.py @@ -1,4 +1,4 @@ -# generated by DipDup 8.0.0b5 +# generated by DipDup 8.0.0 from __future__ import annotations diff --git a/src/demo_tezos_token_balances/Makefile b/src/demo_tezos_token_balances/Makefile index ecdbca7cc..fdff18d25 100644 --- a/src/demo_tezos_token_balances/Makefile +++ b/src/demo_tezos_token_balances/Makefile @@ -13,6 +13,15 @@ help: ## Show this help (default) all: ## Run an entire CI pipeline make format lint +## + +install: ## Install dependencies + pdm install + +update: ## Update dependencies + pdm update + dipdup self update -q + format: ## Format with all tools make black @@ -28,7 +37,7 @@ ruff: ## Lint with ruff ruff check --fix . mypy: ## Lint with mypy - mypy --no-incremental --exclude ${PACKAGE} . + mypy . ## diff --git a/src/demo_tezos_token_balances/deploy/compose.sqlite.yaml b/src/demo_tezos_token_balances/deploy/compose.sqlite.yaml index 52ebc9d62..7b635d9bc 100644 --- a/src/demo_tezos_token_balances/deploy/compose.sqlite.yaml +++ b/src/demo_tezos_token_balances/deploy/compose.sqlite.yaml @@ -1,4 +1,3 @@ -version: "3.8" name: demo_tezos_token_balances services: diff --git a/src/demo_tezos_token_balances/deploy/compose.swarm.yaml b/src/demo_tezos_token_balances/deploy/compose.swarm.yaml index 45588b09f..9dd27099f 100644 --- a/src/demo_tezos_token_balances/deploy/compose.swarm.yaml +++ b/src/demo_tezos_token_balances/deploy/compose.swarm.yaml @@ -1,4 +1,3 @@ -version: "3.8" name: demo_tezos_token_balances services: @@ -38,7 +37,7 @@ services: - POSTGRES_DB=dipdup - POSTGRES_PASSWORD=${POSTGRES_PASSWORD} healthcheck: - test: ["CMD-SHELL", "pg_isready -U postgres"] + test: ["CMD-SHELL", "pg_isready -U dipdup"] interval: 10s timeout: 5s retries: 5 diff --git a/src/demo_tezos_token_balances/deploy/compose.yaml b/src/demo_tezos_token_balances/deploy/compose.yaml index e7c8d4bfc..97970e77e 100644 --- a/src/demo_tezos_token_balances/deploy/compose.yaml +++ b/src/demo_tezos_token_balances/deploy/compose.yaml @@ -1,4 +1,3 @@ -version: "3.8" name: demo_tezos_token_balances services: @@ -19,7 +18,7 @@ services: db: image: postgres:15 ports: - - 5432 + - "${POSTGRES_HOST_PORT:-5432}:5432" volumes: - db:/var/lib/postgresql/data restart: always @@ -37,7 +36,7 @@ services: hasura: image: hasura/graphql-engine:latest ports: - - 8080 + - "${HASURA_HOST_PORT:-8080}:8080" depends_on: - db restart: always diff --git a/src/demo_tezos_token_balances/pyproject.toml b/src/demo_tezos_token_balances/pyproject.toml index 582c1e62c..8c441a637 100644 --- a/src/demo_tezos_token_balances/pyproject.toml +++ b/src/demo_tezos_token_balances/pyproject.toml @@ -1,4 +1,4 @@ -# generated by DipDup 8.0.0b5 +# generated by DipDup 8.0.0 [project] name = "demo_tezos_token_balances" version = "0.0.1" @@ -20,22 +20,11 @@ dev = [ "mypy", ] -[tool.pdm.scripts] -help = {cmd = "make help", help = "Show this help (default)"} -all = {cmd = "make all", help = "Run an entire CI pipeline"} -format = {cmd = "make format", help = "Format with all tools"} -lint = {cmd = "make lint", help = "Lint with all tools"} -black = {cmd = "make black", help = "Format with black"} -ruff = {cmd = "make ruff", help = "Lint with ruff"} -mypy = {cmd = "make mypy", help = "Lint with mypy"} -image = {cmd = "make image", help = "Build Docker image"} -up = {cmd = "make up", help = "Start Compose stack"} -down = {cmd = "make down", help = "Stop Compose stack"} - [tool.black] line-length = 120 target-version = ['py312'] skip-string-normalization = true +extend-exclude = "demo_tezos_token_balances" [tool.ruff] line-length = 120 @@ -50,6 +39,11 @@ isort = { force-single-line = true} python_version = "3.12" plugins = ["pydantic.mypy"] strict = false +exclude = "demo_tezos_token_balances" + +[[tool.mypy.overrides]] +module = "ruamel" +ignore_missing_imports = true [build-system] requires = ["pdm-backend"] diff --git a/src/demo_tezos_token_transfers/Makefile b/src/demo_tezos_token_transfers/Makefile index 6a8d8efa1..b492a6bf8 100644 --- a/src/demo_tezos_token_transfers/Makefile +++ b/src/demo_tezos_token_transfers/Makefile @@ -13,6 +13,15 @@ help: ## Show this help (default) all: ## Run an entire CI pipeline make format lint +## + +install: ## Install dependencies + pdm install + +update: ## Update dependencies + pdm update + dipdup self update -q + format: ## Format with all tools make black @@ -28,7 +37,7 @@ ruff: ## Lint with ruff ruff check --fix . mypy: ## Lint with mypy - mypy --no-incremental --exclude ${PACKAGE} . + mypy . ## diff --git a/src/demo_tezos_token_transfers/deploy/compose.sqlite.yaml b/src/demo_tezos_token_transfers/deploy/compose.sqlite.yaml index bf0d2c553..9f6ac570a 100644 --- a/src/demo_tezos_token_transfers/deploy/compose.sqlite.yaml +++ b/src/demo_tezos_token_transfers/deploy/compose.sqlite.yaml @@ -1,4 +1,3 @@ -version: "3.8" name: demo_tezos_token_transfers services: diff --git a/src/demo_tezos_token_transfers/deploy/compose.swarm.yaml b/src/demo_tezos_token_transfers/deploy/compose.swarm.yaml index 007ab4215..e2d395282 100644 --- a/src/demo_tezos_token_transfers/deploy/compose.swarm.yaml +++ b/src/demo_tezos_token_transfers/deploy/compose.swarm.yaml @@ -1,4 +1,3 @@ -version: "3.8" name: demo_tezos_token_transfers services: @@ -38,7 +37,7 @@ services: - POSTGRES_DB=dipdup - POSTGRES_PASSWORD=${POSTGRES_PASSWORD} healthcheck: - test: ["CMD-SHELL", "pg_isready -U postgres"] + test: ["CMD-SHELL", "pg_isready -U dipdup"] interval: 10s timeout: 5s retries: 5 diff --git a/src/demo_tezos_token_transfers/deploy/compose.yaml b/src/demo_tezos_token_transfers/deploy/compose.yaml index e993cdf94..8f057db2a 100644 --- a/src/demo_tezos_token_transfers/deploy/compose.yaml +++ b/src/demo_tezos_token_transfers/deploy/compose.yaml @@ -1,4 +1,3 @@ -version: "3.8" name: demo_tezos_token_transfers services: @@ -19,7 +18,7 @@ services: db: image: postgres:15 ports: - - 5432 + - "${POSTGRES_HOST_PORT:-5432}:5432" volumes: - db:/var/lib/postgresql/data restart: always @@ -37,7 +36,7 @@ services: hasura: image: hasura/graphql-engine:latest ports: - - 8080 + - "${HASURA_HOST_PORT:-8080}:8080" depends_on: - db restart: always diff --git a/src/demo_tezos_token_transfers/pyproject.toml b/src/demo_tezos_token_transfers/pyproject.toml index 03e1b2035..925e1b2b9 100644 --- a/src/demo_tezos_token_transfers/pyproject.toml +++ b/src/demo_tezos_token_transfers/pyproject.toml @@ -1,4 +1,4 @@ -# generated by DipDup 8.0.0b5 +# generated by DipDup 8.0.0 [project] name = "demo_tezos_token_transfers" version = "0.0.1" @@ -20,22 +20,11 @@ dev = [ "mypy", ] -[tool.pdm.scripts] -help = {cmd = "make help", help = "Show this help (default)"} -all = {cmd = "make all", help = "Run an entire CI pipeline"} -format = {cmd = "make format", help = "Format with all tools"} -lint = {cmd = "make lint", help = "Lint with all tools"} -black = {cmd = "make black", help = "Format with black"} -ruff = {cmd = "make ruff", help = "Lint with ruff"} -mypy = {cmd = "make mypy", help = "Lint with mypy"} -image = {cmd = "make image", help = "Build Docker image"} -up = {cmd = "make up", help = "Start Compose stack"} -down = {cmd = "make down", help = "Stop Compose stack"} - [tool.black] line-length = 120 target-version = ['py312'] skip-string-normalization = true +extend-exclude = "demo_tezos_token_transfers" [tool.ruff] line-length = 120 @@ -50,6 +39,11 @@ isort = { force-single-line = true} python_version = "3.12" plugins = ["pydantic.mypy"] strict = false +exclude = "demo_tezos_token_transfers" + +[[tool.mypy.overrides]] +module = "ruamel" +ignore_missing_imports = true [build-system] requires = ["pdm-backend"] diff --git a/src/dipdup/abi/evm.py b/src/dipdup/abi/evm.py index c1d63e0ad..ef703a432 100644 --- a/src/dipdup/abi/evm.py +++ b/src/dipdup/abi/evm.py @@ -169,7 +169,7 @@ def sighash_from_abi(abi_item: dict[str, Any]) -> str: raise FrameworkException(f"`{abi_item['name']}` is not a function; can't get sighash") signature = f'{abi_item["name"]}({",".join([i["type"] for i in abi_item["inputs"]])})' - return Web3.keccak(text=signature).hex()[:10] + return '0x' + Web3.keccak(text=signature).hex()[:8] def topic0_from_abi(event: dict[str, Any]) -> str: diff --git a/src/dipdup/api.py b/src/dipdup/api.py index 1703eef0c..95fe5d5c2 100644 --- a/src/dipdup/api.py +++ b/src/dipdup/api.py @@ -51,6 +51,7 @@ async def _performance(ctx: 'DipDupContext', request: web.Request) -> web.Respon async def create_api(ctx: DipDupContext) -> web.Application: routes = web.RouteTableDef() + routes.get('/')(_method_wrapper(ctx, _performance)) routes.get('/performance')(_method_wrapper(ctx, _performance)) routes.post('/add_index')(_method_wrapper(ctx, _add_index)) routes.post('/add_contract')(_method_wrapper(ctx, _add_contract)) diff --git a/src/dipdup/cli.py b/src/dipdup/cli.py index dc10a2177..c43742aa4 100644 --- a/src/dipdup/cli.py +++ b/src/dipdup/cli.py @@ -32,7 +32,18 @@ if TYPE_CHECKING: from dipdup.config import DipDupConfig +ROOT_CONFIG = 'dipdup.yaml' +CONFIG_RE = r'dipdup.*\.ya?ml' + +# NOTE: Do not try to load config for these commands as they don't need it +NO_CONFIG_CMDS = { + 'new', + 'migrate', + 'config', +} + +_logger = logging.getLogger(__name__) _click_wrap_text = click.formatting.wrap_text @@ -47,21 +58,51 @@ def _wrap_text(text: str, *a: Any, **kw: Any) -> str: click.formatting.wrap_text = _wrap_text -ROOT_CONFIG = 'dipdup.yaml' -CONFIG_RE = r'dipdup.*\.ya?ml' -# NOTE: Do not try to load config for these commands as they don't need it -NO_CONFIG_CMDS = { - 'new', - 'install', - 'uninstall', - 'update', - 'migrate', - 'config', -} +def _get_paths( + params: dict[str, Any], +) -> tuple[list[Path], list[Path]]: + from dipdup.exceptions import ConfigurationError + config_args: list[str] = params.pop('config', []) + env_file_args: list[str] = params.pop('env_file', []) + config_alias_args: list[str] = params.pop('c', []) -_logger = logging.getLogger(__name__) + config_paths: list[Path] = [] + env_file_paths: list[Path] = [] + + if config_alias_args: + if config_args: + raise ConfigurationError('Cannot use both `-c` and `-C` options at the same time') + config_args = [ + ROOT_CONFIG, + *[f'configs/dipdup.{name}.yaml' for name in config_alias_args], + ] + config_args = config_args or [ROOT_CONFIG] + + for arg in config_args: + path = Path(arg) + if path.is_dir(): + path = path / ROOT_CONFIG + if not path.is_file(): + raise ConfigurationError(f'Config file not found: {path}') + config_paths.append(path) + + for arg in env_file_args: + path = Path(arg) + if not path.is_file(): + raise ConfigurationError(f'Env file not found: {path}') + env_file_paths.append(path) + + return config_paths, env_file_paths + + +def _load_env_files(env_file_paths: list[Path]) -> None: + for path in env_file_paths: + from dotenv import load_dotenv + + _logger.info('Applying env_file `%s`', path) + load_dotenv(path, override=True) def echo(message: str, err: bool = False, **styles: Any) -> None: @@ -133,11 +174,13 @@ def _cli_unwrapper(cmd: click.Command) -> Callable[..., Coroutine[Any, Any, None async def _check_version() -> None: if '+editable' in __version__: return + + _skip_msg = 'Set `DIPDUP_NO_VERSION_CHECK` variable to hide this message.' if not all(c.isdigit() or c == '.' for c in __version__): _logger.warning( 'You are running a pre-release version of DipDup. Please, report any issues to the GitHub repository.' ) - _logger.info('Set `advanced.skip_version_check` flag in config to hide this message.') + _logger.info(_skip_msg) return import aiohttp @@ -150,8 +193,12 @@ async def _check_version() -> None: latest_version = response_json['tag_name'] if __version__ != latest_version: - _logger.warning('You are running an outdated version of DipDup. Please run `dipdup update`.') - _logger.info('Set `skip_version_check` flag in config to hide this message.') + _logger.warning( + 'You are running DipDup %s, while %s is available. Please run `dipdup update` to upgrade.', + __version__, + latest_version, + ) + _logger.info(_skip_msg) def _skip_cli_group() -> bool: @@ -161,15 +208,15 @@ def _skip_cli_group() -> bool: is_empty_group = args in ( ['config'], ['hasura'], + ['package'], ['schema'], ) # NOTE: Simple helpers that don't use any of our cli boilerplate - is_script = args[0] in ( - 'self', + is_script_group = args[0] in ( 'report', + 'self', ) - if not (is_help or is_empty_group or is_script): - _logger.debug('Skipping cli group') + if not (is_help or is_empty_group or is_script_group): return False return True @@ -186,7 +233,7 @@ def _skip_cli_group() -> bool: type=str, multiple=True, help='A path to DipDup project config.', - default=[ROOT_CONFIG], + default=[], metavar='PATH', envvar='DIPDUP_CONFIG', ) @@ -200,9 +247,17 @@ def _skip_cli_group() -> bool: metavar='PATH', envvar='DIPDUP_ENV_FILE', ) +@click.option( + '-C', + type=str, + multiple=True, + help='A shorthand for `-c . -c configs/dipdup..yaml`', + default=[], + metavar='NAME', +) @click.pass_context @_cli_wrapper -async def cli(ctx: click.Context, config: list[str], env_file: list[str]) -> None: +async def cli(ctx: click.Context, config: list[str], env_file: list[str], c: list[str]) -> None: set_up_process() if _skip_cli_group(): @@ -215,23 +270,10 @@ async def cli(ctx: click.Context, config: list[str], env_file: list[str]) -> Non except AttributeError: _logger.warning("You're running an outdated Python 3.12 release; consider upgrading") - from dotenv import load_dotenv - - from dipdup.exceptions import ConfigurationError from dipdup.sys import set_up_logging set_up_logging() - env_file_paths = [Path(file) for file in env_file] - config_paths = [Path(file) for file in config] - - # NOTE: Apply env files before loading the config - for env_path in env_file_paths: - if not env_path.is_file(): - raise ConfigurationError(f'env file `{env_path}` does not exist') - _logger.info('Applying env_file `%s`', env_path) - load_dotenv(env_path, override=True) - # NOTE: These commands need no other preparations if ctx.invoked_subcommand in NO_CONFIG_CMDS: logging.getLogger('dipdup').setLevel(logging.INFO) @@ -241,6 +283,11 @@ async def cli(ctx: click.Context, config: list[str], env_file: list[str]) -> Non from dipdup.exceptions import InitializationRequiredError from dipdup.package import DipDupPackage + # NOTE: Early config loading; some commands do it later + config_paths, env_file_paths = _get_paths(ctx.params) + # NOTE: Apply env files before loading the config + _load_env_files(env_file_paths) + _config = DipDupConfig.load( paths=config_paths, environment=True, @@ -258,13 +305,13 @@ async def cli(ctx: click.Context, config: list[str], env_file: list[str]) -> Non _config.initialize() # NOTE: Fire and forget, do not block instant commands - if not any((_config.advanced.skip_version_check, env.TEST, env.CI, env.NO_VERSION_CHECK)): + if not (env.TEST or env.CI or env.NO_VERSION_CHECK): fire_and_forget(_check_version()) try: # NOTE: Avoid early import errors if project package is incomplete. # NOTE: `ConfigurationError` will be raised later with more details. - DipDupPackage(_config.package_path).initialize() + DipDupPackage(_config.package_path, quiet=True).initialize() except Exception as e: if ctx.invoked_subcommand != 'init': raise InitializationRequiredError(f'Failed to create a project package: {e}') from e @@ -338,9 +385,10 @@ async def migrate(ctx: click.Context, dry_run: bool) -> None: from dipdup.config import DipDupConfig from dipdup.migrations.three_zero import ThreeZeroProjectMigration - # NOTE: Extract paths from arguments since we can't load config with old spec version + # NOTE: Late loading: can't load config with old spec version assert ctx.parent - config_paths: list[Path] = [Path(file) for file in ctx.parent.params['config']] + config_paths, env_file_paths = _get_paths(ctx.parent.params) + _load_env_files(env_file_paths) migration = ThreeZeroProjectMigration(tuple(config_paths), dry_run) migration.migrate() @@ -360,6 +408,7 @@ async def migrate(ctx: click.Context, dry_run: bool) -> None: ctx=ctx, base=True, force=True, + include=[], ) @@ -391,7 +440,10 @@ async def config_export( from dipdup.config import DipDupConfig from dipdup.yaml import DipDupYAMLConfig - config_paths = [Path(c) for c in ctx.parent.parent.params['config']] # type: ignore[union-attr] + # NOTE: Late loading; cli() was skipped. + config_paths, env_file_paths = _get_paths(ctx.parent.parent.params) # type: ignore[union-attr] + _load_env_files(env_file_paths) + if raw: raw_config, _ = DipDupYAMLConfig.load( paths=config_paths, @@ -433,7 +485,9 @@ async def config_env( """ from dipdup.yaml import DipDupYAMLConfig - config_paths = [Path(c) for c in ctx.parent.parent.params['config']] # type: ignore[union-attr] + # NOTE: Late loading; cli() was skipped. + config_paths, env_file_paths = _get_paths(ctx.parent.parent.params) # type: ignore[union-attr] + _load_env_files(env_file_paths) _, environment = DipDupYAMLConfig.load( paths=config_paths, @@ -766,6 +820,7 @@ async def self(ctx: click.Context) -> None: @click.option('--ref', '-r', default=None, help='Install DipDup from specific git ref.') @click.option('--path', '-p', default=None, help='Install DipDup from local path.') @click.option('--pre', is_flag=True, help='Include pre-release versions.') +@click.option('--editable', '-e', is_flag=True, help='Install DipDup in editable mode.') @_cli_wrapper async def self_install( ctx: click.Context, @@ -775,6 +830,7 @@ async def self_install( ref: str | None, path: str | None, pre: bool, + editable: bool, ) -> None: """Install DipDup for the current user.""" import dipdup.install @@ -788,6 +844,7 @@ async def self_install( ref=ref, path=path, pre=pre, + editable=editable, with_pdm=replay is not None and replay['package_manager'] == 'pdm', with_poetry=replay is not None and replay['package_manager'] == 'poetry', ) @@ -831,6 +888,7 @@ async def self_update( ref=None, path=None, pre=pre, + update=True, with_pdm=replay is not None and replay['package_manager'] == 'pdm', with_poetry=replay is not None and replay['package_manager'] == 'poetry', ) diff --git a/src/dipdup/codegen/__init__.py b/src/dipdup/codegen/__init__.py index f1a9cbac9..18bece4d6 100644 --- a/src/dipdup/codegen/__init__.py +++ b/src/dipdup/codegen/__init__.py @@ -18,7 +18,6 @@ from dipdup.config import IndexTemplateConfig from dipdup.config._mixin import CallbackMixin from dipdup.datasources import Datasource -from dipdup.exceptions import FrameworkException from dipdup.package import DEFAULT_ENV from dipdup.package import KEEP_MARKER from dipdup.package import PACKAGE_MARKER @@ -84,12 +83,13 @@ async def init( self._package.initialize() # NOTE: Common files - replay = self._package.replay if base or self._include: - if not replay: - raise FrameworkException('`--base` option passed but `configs/replay.yaml` file is missing') _logger.info('Recreating base template with replay.yaml') - render_base(replay, force, self._include) + render_base( + answers=self._package.replay, + force=force, + include=self._include, + ) if self._include: force = any(str(path).startswith('types') for path in self._include) @@ -252,7 +252,7 @@ async def _generate_models(self) -> None: def _cleanup_schemas(self) -> None: rmtree(self._package.schemas) - self._package.schemas.mkdir() + self._package.schemas.mkdir(parents=True, exist_ok=True) class CommonCodeGenerator(_BaseCodeGenerator): diff --git a/src/dipdup/config/__init__.py b/src/dipdup/config/__init__.py index 64bd63164..51c176bc6 100644 --- a/src/dipdup/config/__init__.py +++ b/src/dipdup/config/__init__.py @@ -15,7 +15,6 @@ from __future__ import annotations -import hashlib import importlib import inspect import logging.config @@ -82,10 +81,10 @@ def _valid_url(v: str, ws: bool) -> str: _T = TypeVar('_T') Alias = Annotated[_T, NoneType] -Hex = Annotated[str, BeforeValidator(lambda v: hex(v) if isinstance(v, int) else v)] -ToStr = Annotated[str | float, BeforeValidator(lambda v: str(v))] -Url = Annotated[str, BeforeValidator(lambda v: _valid_url(v, ws=False))] -WsUrl = Annotated[str, BeforeValidator(lambda v: _valid_url(v, ws=True))] +type Hex = Annotated[str, BeforeValidator(lambda v: hex(v) if isinstance(v, int) else v)] # type: ignore +type ToStr = Annotated[str | float, BeforeValidator(lambda v: str(v))] # type: ignore +type Url = Annotated[str, BeforeValidator(lambda v: _valid_url(v, ws=False))] # type: ignore +type WsUrl = Annotated[str, BeforeValidator(lambda v: _valid_url(v, ws=True))] # type: ignore _logger = logging.getLogger(__name__) @@ -343,6 +342,8 @@ def get_subscriptions(self) -> set[Subscription]: ... def hash(self) -> str: """Calculate hash to ensure config has not changed since last run.""" + import hashlib + # FIXME: How to convert pydantic dataclass into dict without json.dumps? asdict is not recursive. config_json = orjson.dumps(self, default=to_jsonable_python) config_dict = orjson.loads(config_json) @@ -536,7 +537,6 @@ class AdvancedConfig: :param scheduler: `apscheduler` scheduler config. :param postpone_jobs: Do not start job scheduler until all indexes reach the realtime state. :param early_realtime: Establish realtime connection and start collecting messages while sync is in progress (faster, but consumes more RAM). - :param skip_version_check: Disable warning about running unstable or out-of-date DipDup version. :param rollback_depth: A number of levels to keep for rollback. :param decimal_precision: Overwrite precision if it's not guessed correctly based on project models. :param unsafe_sqlite: Disable journaling and data integrity checks. Use only for testing. @@ -547,7 +547,6 @@ class AdvancedConfig: scheduler: dict[str, Any] | None = None postpone_jobs: bool = False early_realtime: bool = False - skip_version_check: bool = False rollback_depth: int | None = None decimal_precision: int | None = None unsafe_sqlite: bool = False @@ -928,7 +927,7 @@ def _resolve_template(self, template_config: IndexTemplateConfig) -> None: if missing_value := re.search(r'<*>', raw_template): raise ConfigurationError( - f'`{template_config.name}` index config is missing required template value `{missing_value}`' + f'`{template_config.name}` index config is missing required template value `{missing_value.group()}`' ) json_template = orjson.loads(raw_template) diff --git a/src/dipdup/config/evm.py b/src/dipdup/config/evm.py index b8ece47de..7229f9fa4 100644 --- a/src/dipdup/config/evm.py +++ b/src/dipdup/config/evm.py @@ -42,7 +42,7 @@ def _validate_evm_address(v: str) -> str: return to_normalized_address(v) -EvmAddress = Annotated[Hex, AfterValidator(_validate_evm_address)] +type EvmAddress = Annotated[Hex, AfterValidator(_validate_evm_address)] # type: ignore @dataclass(config=ConfigDict(extra='forbid'), kw_only=True) diff --git a/src/dipdup/config/starknet.py b/src/dipdup/config/starknet.py index ed1f413d9..742c466fb 100644 --- a/src/dipdup/config/starknet.py +++ b/src/dipdup/config/starknet.py @@ -42,7 +42,7 @@ def _validate_starknet_address(v: str) -> str: return v -StarknetAddress = Annotated[Hex, AfterValidator(_validate_starknet_address)] +type StarknetAddress = Annotated[Hex, AfterValidator(_validate_starknet_address)] # type: ignore @dataclass(config=ConfigDict(extra='forbid'), kw_only=True) diff --git a/src/dipdup/config/tezos.py b/src/dipdup/config/tezos.py index a759a670d..02669ecbc 100644 --- a/src/dipdup/config/tezos.py +++ b/src/dipdup/config/tezos.py @@ -46,7 +46,7 @@ def _validate_tezos_address(v: str) -> str: return v -TezosAddress = Annotated[str, AfterValidator(_validate_tezos_address)] +type TezosAddress = Annotated[str, AfterValidator(_validate_tezos_address)] # type: ignore @dataclass(config=ConfigDict(extra='forbid'), kw_only=True) diff --git a/src/dipdup/context.py b/src/dipdup/context.py index 712d5bb9e..5ade61827 100644 --- a/src/dipdup/context.py +++ b/src/dipdup/context.py @@ -335,10 +335,12 @@ async def _spawn_index( raise NotImplementedError for handler_config in index_config.handlers: + handler_config.parent = index_config self.register_handler(handler_config) - batch_handler = BatchHandlerConfig() - batch_handler.parent = index_config - self.register_handler(batch_handler) + + batch_handler = BatchHandlerConfig() + batch_handler.parent = index_config + self.register_handler(batch_handler) await index.initialize_state(state) @@ -503,7 +505,7 @@ def get_coinbase_datasource(self, name: str) -> CoinbaseDatasource: """ return self._get_datasource(name, CoinbaseDatasource) - def get_metadata_datasource(self, name: str) -> TzipMetadataDatasource: + def get_tzip_metadata_datasource(self, name: str) -> TzipMetadataDatasource: """Get `metadata` datasource by name :param name: Name of the datasource diff --git a/src/dipdup/database.py b/src/dipdup/database.py index c3e5d6ee3..2fa53cd9b 100644 --- a/src/dipdup/database.py +++ b/src/dipdup/database.py @@ -94,11 +94,17 @@ async def tortoise_wrapper( except asyncpg.exceptions.InvalidPasswordError as e: raise ConfigurationError(f'{e.__class__.__name__}: {e}') from e - if unsafe_sqlite: + if not isinstance(conn, SqliteClient): + pass + elif unsafe_sqlite: _logger.warning('Unsafe SQLite mode enabled; database integrity is not guaranteed!') await conn.execute_script('PRAGMA foreign_keys = OFF') await conn.execute_script('PRAGMA synchronous = OFF') await conn.execute_script('PRAGMA journal_mode = OFF') + else: + await conn.execute_script('PRAGMA foreign_keys = ON') + await conn.execute_script('PRAGMA synchronous = NORMAL') + await conn.execute_script('PRAGMA journal_mode = WAL') # FIXME: Poor logging except (OSError, asyncpg.exceptions.CannotConnectNowError): @@ -194,21 +200,20 @@ async def generate_schema( conn: SupportedClient, name: str, ) -> None: - if isinstance(conn, SqliteClient): - await Tortoise.generate_schemas() - elif isinstance(conn, AsyncpgClient): + if isinstance(conn, AsyncpgClient): await _pg_create_schema(conn, name) - await Tortoise.generate_schemas() - await _pg_create_functions(conn) - await _pg_create_views(conn) - else: - raise NotImplementedError + await Tortoise.generate_schemas() + + if isinstance(conn, AsyncpgClient): + await _pg_run_scripts(conn) -async def _pg_create_functions(conn: AsyncpgClient) -> None: + +async def _pg_run_scripts(conn: AsyncpgClient) -> None: for fn in ( 'dipdup_approve.sql', 'dipdup_wipe.sql', + 'dipdup_status.sql', ): sql_path = Path(__file__).parent / 'sql' / fn await execute_sql(conn, sql_path) @@ -228,12 +233,6 @@ async def get_tables() -> set[str]: raise NotImplementedError -async def _pg_create_views(conn: AsyncpgClient) -> None: - sql_path = Path(__file__).parent / 'sql' / 'dipdup_head_status.sql' - # TODO: Configurable interval - await execute_sql(conn, sql_path, HEAD_STATUS_TIMEOUT) - - # FIXME: Private but used in dipdup.hasura async def _pg_get_views(conn: AsyncpgClient, schema_name: str) -> list[str]: return [ diff --git a/src/dipdup/datasources/_subsquid.py b/src/dipdup/datasources/_subsquid.py index 5a5966124..b33c552f2 100644 --- a/src/dipdup/datasources/_subsquid.py +++ b/src/dipdup/datasources/_subsquid.py @@ -12,7 +12,9 @@ from dipdup.exceptions import DatasourceError from dipdup.exceptions import FrameworkException from dipdup.http import safe_exceptions +from dipdup.models import Head from dipdup.models._subsquid import AbstractSubsquidQuery +from dipdup.sys import fire_and_forget QueryT = TypeVar('QueryT', bound=AbstractSubsquidQuery) @@ -41,6 +43,7 @@ class AbstractSubsquidDatasource( def __init__(self, config: Any) -> None: self._started = asyncio.Event() + self._last_level: int = 0 super().__init__(config, False) async def run(self) -> None: @@ -81,12 +84,25 @@ async def query_worker(self, query: QueryT, current_level: int) -> list[dict[str retry_sleep *= self._http_config.retry_multiplier async def initialize(self) -> None: - level = await self.get_head_level() + curr_level = self._last_level + level = self._last_level = await self.get_head_level() if not level: raise DatasourceError('Subsquid is not ready yet', self.name) + if level == curr_level: + return self.set_sync_level(None, level) + fire_and_forget( + Head.update_or_create( + name=self.name, + defaults={ + 'level': level, + 'hash': '', + 'timestamp': 0, + }, + ), + ) async def get_head_level(self) -> int: response = await self.request('get', 'height') diff --git a/src/dipdup/datasources/_web3.py b/src/dipdup/datasources/_web3.py index a495241f2..bb887ea68 100644 --- a/src/dipdup/datasources/_web3.py +++ b/src/dipdup/datasources/_web3.py @@ -12,14 +12,7 @@ async def create_web3_client(datasource: 'EvmNodeDatasource') -> 'AsyncWeb3': from web3 import AsyncWeb3 - from web3.middleware.async_cache import async_construct_simple_cache_middleware from web3.providers.async_base import AsyncJSONBaseProvider - from web3.utils.caching import SimpleCache - - from dipdup.performance import caches - - web3_cache = SimpleCache(WEB3_CACHE_SIZE) - caches.add_plain(web3_cache._data, f'{datasource.name}:web3_cache') class ProxyProvider(AsyncJSONBaseProvider): async def make_request(_, method: str, params: list[Any]) -> Any: @@ -30,11 +23,8 @@ async def make_request(_, method: str, params: list[Any]) -> Any: ws=False, ) - web3_client = AsyncWeb3( - provider=ProxyProvider(), - ) - web3_client.middleware_onion.add( - await async_construct_simple_cache_middleware(web3_cache), - 'cache', + return AsyncWeb3( + provider=ProxyProvider( + cache_allowed_requests=True, + ), ) - return web3_client diff --git a/src/dipdup/datasources/tezos_tzkt.py b/src/dipdup/datasources/tezos_tzkt.py index af754c4cb..cc76db0dc 100644 --- a/src/dipdup/datasources/tezos_tzkt.py +++ b/src/dipdup/datasources/tezos_tzkt.py @@ -175,7 +175,7 @@ class MessageBuffer: """ def __init__(self, size: int) -> None: - self._logger = logging.getLogger('dipdup.tzkt') + self._logger = logging.getLogger(__name__) self._size = size self._messages: dict[int, list[BufferedMessage]] = {} @@ -1215,6 +1215,7 @@ def _get_signalr_client(self) -> SignalRClient: self._signalr_client = SignalRClient( url=f'{self._http._url}/v1/ws', max_size=None, + connection_timeout=30, ) self._signalr_client.on_open(self._on_connected) diff --git a/src/dipdup/dipdup.py b/src/dipdup/dipdup.py index d90f7df45..362f19aff 100644 --- a/src/dipdup/dipdup.py +++ b/src/dipdup/dipdup.py @@ -61,6 +61,7 @@ from dipdup.models import Index as IndexState from dipdup.models import IndexStatus from dipdup.models import MessageType +from dipdup.models import Meta from dipdup.models import ReindexingReason from dipdup.models import RollbackMessage from dipdup.models import Schema @@ -75,6 +76,7 @@ from dipdup.models.tezos import TezosTokenTransferData from dipdup.package import DipDupPackage from dipdup.performance import caches +from dipdup.performance import get_stats from dipdup.performance import metrics from dipdup.prometheus import Metrics from dipdup.scheduler import SchedulerManager @@ -287,6 +289,13 @@ async def _update_metrics(self) -> None: self._last_levels_nonempty = metrics.levels_nonempty self._last_objects_indexed = metrics.objects_indexed + fire_and_forget( + Meta.update_or_create( + key='dipdup_metrics', + defaults={'value': get_stats()}, + ) + ) + async def _status_loop(self, update_interval: float) -> None: while True: await asyncio.sleep(update_interval) @@ -299,9 +308,17 @@ def _log_status(self) -> None: return progress, left = metrics.progress * 100, int(total - indexed) + scanned_levels = int(metrics.levels_indexed) or int(metrics.levels_nonempty) if not progress: - scanned_levels = int(metrics.levels_indexed) or int(metrics.levels_nonempty) - msg = f'indexing: {scanned_levels:6} levels, estimating...' + if self._indexes: + if scanned_levels: + msg = f'indexing: {scanned_levels:6} levels, estimating...' + elif metrics.objects_indexed: + msg = f'indexing: {metrics.objects_indexed:6} objects, estimating...' + else: + msg = 'indexing: warming up...' + else: + msg = 'no indexes, idling' _logger.info(msg) return diff --git a/src/dipdup/env.py b/src/dipdup/env.py index c274c2ec8..f2bcebd90 100644 --- a/src/dipdup/env.py +++ b/src/dipdup/env.py @@ -50,7 +50,7 @@ def get_package_path(package: str) -> Path: return Path.cwd() / package # NOTE: If cwd is a package, use it - if get_pyproject_name() == package: + if package in {get_pyproject_name(), Path.cwd().name}: return Path.cwd() # NOTE: Detect existing package in current environment diff --git a/src/dipdup/fetcher.py b/src/dipdup/fetcher.py index 339ad5c88..fce0c0678 100644 --- a/src/dipdup/fetcher.py +++ b/src/dipdup/fetcher.py @@ -16,6 +16,7 @@ from dipdup import env from dipdup.exceptions import FrameworkException from dipdup.performance import queues +from dipdup.utils import FormattedLogger if TYPE_CHECKING: from collections.abc import AsyncGenerator @@ -65,17 +66,18 @@ async def yield_by_level( yield items[0].level, items -async def readahead_by_level( +async def _readahead_by_level( fetcher_iter: AsyncIterator[tuple[BufferT, ...]], limit: int, + name: str, ) -> AsyncIterator[tuple[int, tuple[BufferT, ...]]]: if env.LOW_MEMORY: limit = min(limit, 1000) - queue_name = f'fetcher_readahead:{id(fetcher_iter)}' + name = f'{name}:readahead' queue: deque[tuple[int, tuple[BufferT, ...]]] = deque() queues.add_queue( queue, - name=queue_name, + name=name, limit=limit, ) has_more = asyncio.Event() @@ -92,7 +94,7 @@ async def _readahead() -> None: task = asyncio.create_task( _readahead(), - name=f'fetcher:{id(fetcher_iter)}', + name=name, ) while True: @@ -107,7 +109,7 @@ async def _readahead() -> None: with suppress(asyncio.TimeoutError): await asyncio.wait_for(has_more.wait(), timeout=10) - queues.remove_queue(queue_name) + queues.remove_queue(name) class FetcherChannel(ABC, Generic[BufferT, DatasourceT, FilterT]): @@ -155,18 +157,23 @@ class DataFetcher(ABC, Generic[BufferT, DatasourceT]): def __init__( self, + name: str, datasources: tuple[DatasourceT, ...], first_level: int, last_level: int, + readahead_limit: int, ) -> None: + self._name = name self._datasources = datasources self._first_level = first_level self._last_level = last_level + self._readahead_limit = readahead_limit + self._logger = FormattedLogger(__name__, fmt=f'{self._name}: ' + '{}') self._buffer: defaultdict[Level, deque[BufferT]] = defaultdict(deque) self._head = 0 def __repr__(self) -> str: - return f'<{self.__class__.__name__} head={self._head} buffer={len(self._buffer)}>' + return f'<{self.__class__.__name__} name={self._name} head={self._head} buffer={len(self._buffer)}>' @property def random_datasource(self) -> DatasourceT: @@ -180,6 +187,18 @@ def fetch_by_level(self) -> AsyncIterator[tuple[int, tuple[BufferT, ...]]]: """ ... + async def readahead_by_level( + self, + fetcher_iter: AsyncIterator[tuple[BufferT, ...]], + limit: int | None = None, + ) -> AsyncIterator[tuple[int, tuple[BufferT, ...]]]: + async for level, batch in _readahead_by_level( + fetcher_iter=fetcher_iter, + limit=limit or self._readahead_limit, + name=self._name, + ): + yield level, batch + async def _merged_iter( self, channels: set[FetcherChannel[Any, Any, Any]], diff --git a/src/dipdup/fields.py b/src/dipdup/fields.py index 0c5a8b562..bf3583e9e 100644 --- a/src/dipdup/fields.py +++ b/src/dipdup/fields.py @@ -43,7 +43,6 @@ from tortoise.fields.relational import OneToOneRelation as OneToOneRelation from tortoise.fields.relational import ReverseRelation as ReverseRelation -from dipdup import fields from dipdup.exceptions import FrameworkException if TYPE_CHECKING: @@ -56,7 +55,7 @@ _EnumFieldT = TypeVar('_EnumFieldT', bound=Enum) -class EnumField(fields.Field[_EnumFieldT]): +class EnumField(Field[_EnumFieldT]): """Like CharEnumField but without max_size and additional validation""" indexable = True diff --git a/src/dipdup/hasura.py b/src/dipdup/hasura.py index 14d2f135c..0c084eb03 100644 --- a/src/dipdup/hasura.py +++ b/src/dipdup/hasura.py @@ -441,7 +441,7 @@ async def _generate_query_collections_metadata(self) -> list[dict[str, Any]]: queries.append({'name': query_name, 'query': query}) # NOTE: This is the only view we add by ourselves and thus know all params. Won't work for any view. - queries.append(self._format_rest_head_status_query()) + queries.append(self._format_rest_status_query()) return queries @@ -566,14 +566,16 @@ def _format_rest_query(self, name: str, table: str, filter: str, fields: Iterabl ), } - def _format_rest_head_status_query(self) -> dict[str, Any]: - name = 'dipdup_head_status' + def _format_rest_status_query(self) -> dict[str, Any]: + name = 'dipdup_status' + fields = '{type name level size updated_at}' if self._hasura_config.camel_case: name = humps.camelize(name) + fields = fields.replace('updated_at', 'updatedAt') return { 'name': name, - 'query': 'query ' + name + ' ($name: String!) {' + name + '(where: {name: {_eq: $name}}) {status}}', + 'query': 'query ' + name + ' ($name: String!) {' + name + '(where: {name: {_eq: $name}}) ' + fields + '}', } def _format_rest_endpoint(self, query_name: str) -> dict[str, Any]: diff --git a/src/dipdup/http.py b/src/dipdup/http.py index e0bf13fce..c4c5fa9b7 100644 --- a/src/dipdup/http.py +++ b/src/dipdup/http.py @@ -178,8 +178,9 @@ async def _retry_request( if attempt == last_attempt: raise e - self._logger.info('Waiting %s seconds before retry', ratelimit_sleep or retry_sleep) - await asyncio.sleep(ratelimit_sleep or retry_sleep) + to_sleep = float(f'{ratelimit_sleep or retry_sleep:.1f}') + self._logger.info('Waiting %s seconds before retry', to_sleep) + await asyncio.sleep(to_sleep) attempt += 1 if not ratelimit_sleep: diff --git a/src/dipdup/index.py b/src/dipdup/index.py index 722a902d1..ad708f8f1 100644 --- a/src/dipdup/index.py +++ b/src/dipdup/index.py @@ -62,17 +62,23 @@ def __init__( self._ctx = ctx self._config = config self._datasources = datasources - self._queue: deque[IndexQueueItemT] = deque() - queues.add_queue(self._queue, f'index_realtime:{config.name}:{id(self)})') + self._queue: deque[IndexQueueItemT] | None = None self._logger = FormattedLogger(__name__, fmt=f'{config.name}: ' + '{}') self._state: models.Index | None = None + @property + def queue(self) -> deque[IndexQueueItemT]: + if self._queue is None: + self._queue = deque() + queues.add_queue(self._queue, f'{self._config.name}:realtime') + return self._queue + def push_realtime_message(self, message: IndexQueueItemT) -> None: """Push message to the queue""" - self._queue.append(message) + self.queue.append(message) - Metrics.set_levels_to_realtime(self._config.name, len(self._queue)) + Metrics.set_levels_to_realtime(self._config.name, len(self.queue)) @abstractmethod async def _synchronize(self, sync_level: int) -> None: @@ -88,10 +94,10 @@ def _match_level_data( async def _process_queue(self) -> None: """Process WebSocket queue""" - if self._queue: + if self.queue: self._logger.debug('Processing websocket queue') - while self._queue: - message = self._queue.popleft() + while self.queue: + message = self.queue.popleft() if not message: raise FrameworkException('Empty message in the queue') @@ -186,7 +192,7 @@ def synchronized(self) -> bool: @property def realtime(self) -> bool: - return self.state.status == IndexStatus.realtime and not self._queue + return self.state.status == IndexStatus.realtime and not self.queue def get_sync_level(self) -> int: """Get level index needs to be synchronized to depending on its subscription status""" @@ -252,13 +258,13 @@ async def process(self) -> bool: if index_level < sync_level: self._logger.info('Index is behind the datasource level, syncing: %s -> %s', index_level, sync_level) - self._queue.clear() + self.queue.clear() with Metrics.measure_total_sync_duration(): await self._synchronize(sync_level) return True - if self._queue: + if self.queue: with Metrics.measure_total_realtime_duration(): await self._process_queue() return True diff --git a/src/dipdup/indexes/evm.py b/src/dipdup/indexes/evm.py index 36f8052ea..c33de436e 100644 --- a/src/dipdup/indexes/evm.py +++ b/src/dipdup/indexes/evm.py @@ -20,7 +20,6 @@ if TYPE_CHECKING: from dipdup.context import DipDupContext -EVM_SUBSQUID_READAHEAD_LIMIT = 10000 IndexConfigT = TypeVar('IndexConfigT', bound=Any) DatasourceT = TypeVar('DatasourceT', bound=Any) @@ -45,7 +44,7 @@ def get_sighash( if (not to) and signature: from web3 import Web3 - return Web3.keccak(text=signature).hex()[:10] + return '0x' + Web3.keccak(text=signature).hex()[:8] raise ConfigurationError('Either `to` or `signature` filters are expected') diff --git a/src/dipdup/indexes/evm_events/fetcher.py b/src/dipdup/indexes/evm_events/fetcher.py index 9e4b9036e..e4c16c4ec 100644 --- a/src/dipdup/indexes/evm_events/fetcher.py +++ b/src/dipdup/indexes/evm_events/fetcher.py @@ -3,9 +3,6 @@ from dipdup.datasources.evm_node import EvmNodeDatasource from dipdup.datasources.evm_subsquid import EvmSubsquidDatasource -from dipdup.fetcher import readahead_by_level -from dipdup.indexes.evm import EVM_SUBSQUID_READAHEAD_LIMIT -from dipdup.indexes.evm_node import EVM_NODE_READAHEAD_LIMIT from dipdup.indexes.evm_node import MIN_BATCH_SIZE from dipdup.indexes.evm_node import EvmNodeFetcher from dipdup.indexes.evm_subsquid import EvmSubsquidFetcher @@ -15,12 +12,18 @@ class EvmSubsquidEventFetcher(EvmSubsquidFetcher[EvmEventData]): def __init__( self, + name: str, datasources: tuple[EvmSubsquidDatasource, ...], first_level: int, last_level: int, topics: tuple[tuple[str | None, str], ...], ) -> None: - super().__init__(datasources, first_level, last_level) + super().__init__( + name=name, + datasources=datasources, + first_level=first_level, + last_level=last_level, + ) self._topics = topics async def fetch_by_level(self) -> AsyncIterator[tuple[int, tuple[EvmEventData, ...]]]: @@ -29,7 +32,7 @@ async def fetch_by_level(self) -> AsyncIterator[tuple[int, tuple[EvmEventData, . self._first_level, self._last_level, ) - async for level, batch in readahead_by_level(event_iter, limit=EVM_SUBSQUID_READAHEAD_LIMIT): + async for level, batch in self.readahead_by_level(event_iter): yield level, batch @@ -38,17 +41,23 @@ class EvmNodeEventFetcher(EvmNodeFetcher[EvmEventData]): def __init__( self, + name: str, datasources: tuple[EvmNodeDatasource, ...], first_level: int, last_level: int, addresses: set[str], ) -> None: - super().__init__(datasources, first_level, last_level) + super().__init__( + name=name, + datasources=datasources, + first_level=first_level, + last_level=last_level, + ) self._addresses = addresses async def fetch_by_level(self) -> AsyncIterator[tuple[int, tuple[EvmEventData, ...]]]: event_iter = self._fetch_by_level() - async for level, batch in readahead_by_level(event_iter, limit=EVM_NODE_READAHEAD_LIMIT): + async for level, batch in self.readahead_by_level(event_iter): yield level, batch async def _fetch_by_level(self) -> AsyncIterator[tuple[EvmEventData, ...]]: diff --git a/src/dipdup/indexes/evm_events/index.py b/src/dipdup/indexes/evm_events/index.py index 5bee6e98d..58405932d 100644 --- a/src/dipdup/indexes/evm_events/index.py +++ b/src/dipdup/indexes/evm_events/index.py @@ -62,6 +62,7 @@ def _create_subsquid_fetcher(self, first_level: int, last_level: int) -> EvmSubs raise FrameworkException('Creating EvmSubsquidEventFetcher, but no `evm.subsquid` datasources available') return EvmSubsquidEventFetcher( + name=self.name, datasources=self.subsquid_datasources, first_level=first_level, last_level=last_level, @@ -81,6 +82,7 @@ def _create_node_fetcher(self, first_level: int, last_level: int) -> EvmNodeEven break return EvmNodeEventFetcher( + name=self.name, datasources=self.node_datasources, first_level=first_level, last_level=last_level, diff --git a/src/dipdup/indexes/evm_node.py b/src/dipdup/indexes/evm_node.py index 27b27a88e..e55da16a6 100644 --- a/src/dipdup/indexes/evm_node.py +++ b/src/dipdup/indexes/evm_node.py @@ -23,6 +23,20 @@ class EvmNodeFetcher(Generic[BufferT], DataFetcher[BufferT, EvmNodeDatasource], ABC): + def __init__( + self, + name: str, + datasources: tuple[EvmNodeDatasource, ...], + first_level: int, + last_level: int, + ) -> None: + super().__init__( + name=name, + datasources=datasources, + first_level=first_level, + last_level=last_level, + readahead_limit=EVM_NODE_READAHEAD_LIMIT, + ) def get_next_batch_size(self, batch_size: int, ratelimited: bool) -> int: old_batch_size = batch_size diff --git a/src/dipdup/indexes/evm_subsquid.py b/src/dipdup/indexes/evm_subsquid.py index 44669cdcf..461af089e 100644 --- a/src/dipdup/indexes/evm_subsquid.py +++ b/src/dipdup/indexes/evm_subsquid.py @@ -5,6 +5,21 @@ from dipdup.fetcher import BufferT from dipdup.fetcher import DataFetcher +EVM_SUBSQUID_READAHEAD_LIMIT = 10000 + class EvmSubsquidFetcher(Generic[BufferT], DataFetcher[BufferT, EvmSubsquidDatasource], ABC): - pass + def __init__( + self, + name: str, + datasources: tuple[EvmSubsquidDatasource, ...], + first_level: int, + last_level: int, + ) -> None: + super().__init__( + name=name, + datasources=datasources, + first_level=first_level, + last_level=last_level, + readahead_limit=EVM_SUBSQUID_READAHEAD_LIMIT, + ) diff --git a/src/dipdup/indexes/evm_transactions/fetcher.py b/src/dipdup/indexes/evm_transactions/fetcher.py index 357b67fde..e514d203c 100644 --- a/src/dipdup/indexes/evm_transactions/fetcher.py +++ b/src/dipdup/indexes/evm_transactions/fetcher.py @@ -3,9 +3,6 @@ from collections.abc import AsyncIterator from dipdup.datasources.evm_subsquid import EvmSubsquidDatasource -from dipdup.fetcher import readahead_by_level -from dipdup.indexes.evm import EVM_SUBSQUID_READAHEAD_LIMIT -from dipdup.indexes.evm_node import EVM_NODE_READAHEAD_LIMIT from dipdup.indexes.evm_node import MIN_BATCH_SIZE from dipdup.indexes.evm_node import EvmNodeFetcher from dipdup.indexes.evm_subsquid import EvmSubsquidFetcher @@ -18,12 +15,18 @@ class EvmSubsquidTransactionFetcher(EvmSubsquidFetcher[EvmTransactionData]): def __init__( self, + name: str, datasources: tuple[EvmSubsquidDatasource, ...], first_level: int, last_level: int, filters: tuple[TransactionRequest, ...], ) -> None: - super().__init__(datasources, first_level, last_level) + super().__init__( + name=name, + datasources=datasources, + first_level=first_level, + last_level=last_level, + ) self._filters = filters async def fetch_by_level(self) -> AsyncIterator[tuple[int, tuple[EvmTransactionData, ...]]]: @@ -32,7 +35,7 @@ async def fetch_by_level(self) -> AsyncIterator[tuple[int, tuple[EvmTransactionD self._last_level, self._filters, ) - async for level, batch in readahead_by_level(transaction_iter, limit=EVM_SUBSQUID_READAHEAD_LIMIT): + async for level, batch in self.readahead_by_level(transaction_iter): yield level, batch @@ -40,7 +43,7 @@ class EvmNodeTransactionFetcher(EvmNodeFetcher[EvmTransactionData]): async def fetch_by_level(self) -> AsyncIterator[tuple[int, tuple[EvmTransactionData, ...]]]: transaction_iter = self._fetch_by_level() - async for level, batch in readahead_by_level(transaction_iter, limit=EVM_NODE_READAHEAD_LIMIT): + async for level, batch in self.readahead_by_level(transaction_iter): yield level, batch async def _fetch_by_level(self) -> AsyncIterator[tuple[EvmTransactionData, ...]]: diff --git a/src/dipdup/indexes/evm_transactions/index.py b/src/dipdup/indexes/evm_transactions/index.py index f9771c2ae..1b93ecbb2 100644 --- a/src/dipdup/indexes/evm_transactions/index.py +++ b/src/dipdup/indexes/evm_transactions/index.py @@ -68,6 +68,7 @@ def _create_subsquid_fetcher(self, first_level: int, last_level: int) -> EvmSubs filters.append(query) return EvmSubsquidTransactionFetcher( + name=self.name, datasources=self.subsquid_datasources, first_level=first_level, last_level=last_level, @@ -76,6 +77,7 @@ def _create_subsquid_fetcher(self, first_level: int, last_level: int) -> EvmSubs def _create_node_fetcher(self, first_level: int, last_level: int) -> EvmNodeTransactionFetcher: return EvmNodeTransactionFetcher( + name=self.name, datasources=self.node_datasources, first_level=first_level, last_level=last_level, diff --git a/src/dipdup/indexes/starknet_events/fetcher.py b/src/dipdup/indexes/starknet_events/fetcher.py index f98fbd96c..9061856d9 100644 --- a/src/dipdup/indexes/starknet_events/fetcher.py +++ b/src/dipdup/indexes/starknet_events/fetcher.py @@ -6,25 +6,27 @@ from dipdup.datasources.starknet_subsquid import StarknetSubsquidDatasource from dipdup.exceptions import FrameworkException from dipdup.fetcher import FetcherChannel -from dipdup.fetcher import readahead_by_level from dipdup.indexes.starknet_node import StarknetNodeFetcher from dipdup.indexes.starknet_subsquid import StarknetSubsquidFetcher from dipdup.models.starknet import StarknetEventData from dipdup.models.starknet_subsquid import EventRequest -STARKNET_NODE_READAHEAD_LIMIT = 100 -STARKNET_SUBSQUID_READAHEAD_LIMIT = 10000 - class StarknetSubsquidEventFetcher(StarknetSubsquidFetcher[StarknetEventData]): def __init__( self, + name: str, datasources: tuple[StarknetSubsquidDatasource, ...], first_level: int, last_level: int, event_ids: dict[str, set[str]], ) -> None: - super().__init__(datasources, first_level, last_level) + super().__init__( + name=name, + datasources=datasources, + first_level=first_level, + last_level=last_level, + ) self._event_ids = event_ids async def fetch_by_level(self) -> AsyncIterator[tuple[int, tuple[StarknetEventData, ...]]]: @@ -40,7 +42,7 @@ async def fetch_by_level(self) -> AsyncIterator[tuple[int, tuple[StarknetEventDa last_level=self._last_level, filters=filters, ) - async for level, batch in readahead_by_level(event_iter, limit=STARKNET_SUBSQUID_READAHEAD_LIMIT): + async for level, batch in self.readahead_by_level(event_iter): yield level, batch @@ -80,12 +82,13 @@ async def fetch(self) -> None: class StarknetNodeEventFetcher(StarknetNodeFetcher[StarknetEventData]): def __init__( self, + name: str, datasources: tuple[StarknetNodeDatasource, ...], first_level: int, last_level: int, event_ids: dict[str, set[str]], ) -> None: - super().__init__(datasources, first_level, last_level) + super().__init__(name, datasources, first_level, last_level) self._event_ids = event_ids async def fetch_by_level(self) -> AsyncIterator[tuple[int, tuple[StarknetEventData, ...]]]: @@ -106,7 +109,7 @@ async def fetch_by_level(self) -> AsyncIterator[tuple[int, tuple[StarknetEventDa events_iter = self._merged_iter( channels, lambda i: tuple(sorted(i, key=lambda x: f'{x.block_number}_{x.transaction_index}')) ) - async for level, batch in readahead_by_level(events_iter, limit=STARKNET_NODE_READAHEAD_LIMIT): + async for level, batch in self.readahead_by_level(events_iter): yield level, batch def get_random_node(self) -> StarknetNodeDatasource: diff --git a/src/dipdup/indexes/starknet_events/index.py b/src/dipdup/indexes/starknet_events/index.py index 107cf4fae..f024613ba 100644 --- a/src/dipdup/indexes/starknet_events/index.py +++ b/src/dipdup/indexes/starknet_events/index.py @@ -83,6 +83,7 @@ def _create_subsquid_fetcher(self, first_level: int, last_level: int) -> Starkne event_ids[handler_config.contract.address].add(event_abi['event_identifier']) return StarknetSubsquidEventFetcher( + name=self.name, datasources=self.subsquid_datasources, first_level=first_level, last_level=last_level, @@ -108,6 +109,7 @@ def _create_node_fetcher(self, first_level: int, last_level: int) -> StarknetNod event_ids[handler_config.contract.address].add(event_abi['event_identifier']) return StarknetNodeEventFetcher( + name=self.name, datasources=self.node_datasources, first_level=first_level, last_level=last_level, diff --git a/src/dipdup/indexes/starknet_node.py b/src/dipdup/indexes/starknet_node.py index b0100fcaf..179f1c023 100644 --- a/src/dipdup/indexes/starknet_node.py +++ b/src/dipdup/indexes/starknet_node.py @@ -5,5 +5,21 @@ from dipdup.fetcher import BufferT from dipdup.fetcher import DataFetcher +STARKNET_NODE_READAHEAD_LIMIT = 100 -class StarknetNodeFetcher(Generic[BufferT], DataFetcher[BufferT, StarknetNodeDatasource], ABC): ... + +class StarknetNodeFetcher(Generic[BufferT], DataFetcher[BufferT, StarknetNodeDatasource], ABC): + def __init__( + self, + name: str, + datasources: tuple[StarknetNodeDatasource, ...], + first_level: int, + last_level: int, + ) -> None: + super().__init__( + name=name, + datasources=datasources, + first_level=first_level, + last_level=last_level, + readahead_limit=STARKNET_NODE_READAHEAD_LIMIT, + ) diff --git a/src/dipdup/indexes/starknet_subsquid.py b/src/dipdup/indexes/starknet_subsquid.py index cefa66df9..2c8953ae7 100644 --- a/src/dipdup/indexes/starknet_subsquid.py +++ b/src/dipdup/indexes/starknet_subsquid.py @@ -5,6 +5,21 @@ from dipdup.fetcher import BufferT from dipdup.fetcher import DataFetcher +STARKNET_SUBSQUID_READAHEAD_LIMIT = 10000 + class StarknetSubsquidFetcher(Generic[BufferT], DataFetcher[BufferT, StarknetSubsquidDatasource], ABC): - pass + def __init__( + self, + name: str, + datasources: tuple[StarknetSubsquidDatasource, ...], + first_level: int, + last_level: int, + ) -> None: + super().__init__( + name=name, + datasources=datasources, + first_level=first_level, + last_level=last_level, + readahead_limit=STARKNET_SUBSQUID_READAHEAD_LIMIT, + ) diff --git a/src/dipdup/indexes/tezos_big_maps/fetcher.py b/src/dipdup/indexes/tezos_big_maps/fetcher.py index b80b9a2f6..e5dde19f0 100644 --- a/src/dipdup/indexes/tezos_big_maps/fetcher.py +++ b/src/dipdup/indexes/tezos_big_maps/fetcher.py @@ -1,11 +1,8 @@ from __future__ import annotations -import logging from typing import TYPE_CHECKING from dipdup.config.tezos_big_maps import TezosBigMapsHandlerConfig -from dipdup.fetcher import readahead_by_level -from dipdup.indexes.tezos_tzkt import TZKT_READAHEAD_LIMIT from dipdup.indexes.tezos_tzkt import TezosTzktFetcher from dipdup.models.tezos import TezosBigMapData @@ -33,11 +30,11 @@ def get_big_map_paths(handlers: Iterable[TezosBigMapsHandlerConfig]) -> set[str] return paths -def get_big_map_pairs(handlers: Iterable[TezosBigMapsHandlerConfig]) -> set[tuple[str, str]]: +def get_big_map_pairs(handlers: Iterable[TezosBigMapsHandlerConfig]) -> list[tuple[str, str]]: """Get address-path pairs for fetch big map diffs during sync with `skip_history`""" - pairs = set() + pairs = [] for handler_config in handlers: - pairs.add( + pairs.append( ( handler_config.contract.get_address(), handler_config.path, @@ -51,14 +48,19 @@ class BigMapFetcher(TezosTzktFetcher[TezosBigMapData]): def __init__( self, + name: str, datasources: tuple[TezosTzktDatasource, ...], first_level: int, last_level: int, big_map_addresses: set[str], big_map_paths: set[str], ) -> None: - super().__init__(datasources, first_level, last_level) - self._logger = logging.getLogger('dipdup.fetcher') + super().__init__( + name=name, + datasources=datasources, + first_level=first_level, + last_level=last_level, + ) self._big_map_addresses = big_map_addresses self._big_map_paths = big_map_paths @@ -74,6 +76,7 @@ def create( big_map_paths = get_big_map_paths(config.handlers) return BigMapFetcher( + name=config.name, datasources=datasources, first_level=first_level, last_level=last_level, @@ -88,5 +91,5 @@ async def fetch_by_level(self) -> AsyncGenerator[tuple[int, tuple[TezosBigMapDat self._first_level, self._last_level, ) - async for level, batch in readahead_by_level(big_map_iter, limit=TZKT_READAHEAD_LIMIT): + async for level, batch in self.readahead_by_level(big_map_iter): yield level, batch diff --git a/src/dipdup/indexes/tezos_big_maps/index.py b/src/dipdup/indexes/tezos_big_maps/index.py index acea4180d..6d410f2c0 100644 --- a/src/dipdup/indexes/tezos_big_maps/index.py +++ b/src/dipdup/indexes/tezos_big_maps/index.py @@ -1,9 +1,10 @@ +import time from collections import deque from datetime import datetime from typing import Any from dipdup.config.tezos_big_maps import TezosBigMapsIndexConfig -from dipdup.exceptions import ConfigurationError +from dipdup.exceptions import FrameworkException from dipdup.indexes.tezos_big_maps.fetcher import BigMapFetcher from dipdup.indexes.tezos_big_maps.fetcher import get_big_map_pairs from dipdup.indexes.tezos_big_maps.matcher import match_big_maps @@ -14,6 +15,7 @@ from dipdup.models.tezos import TezosBigMapData from dipdup.models.tezos_tzkt import TezosTzktMessageType from dipdup.performance import metrics +from dipdup.prometheus import Metrics QueueItem = tuple[TezosBigMapData, ...] | RollbackMessage @@ -52,22 +54,24 @@ async def _synchronize_full(self, index_level: int, sync_level: int) -> None: await self._process_level_data(big_maps, sync_level) async def _synchronize_level(self, head_level: int) -> None: - # NOTE: Checking late because feature flags could be modified after loading config if not self._ctx.config.advanced.early_realtime: - raise ConfigurationError('`skip_history` requires `early_realtime` feature flag to be enabled') + raise FrameworkException('`skip_history` requires `early_realtime` feature flag to be enabled') big_map_pairs = get_big_map_pairs(self._config.handlers) - big_map_ids: set[tuple[int, str, str]] = set() + big_map_ids: list[tuple[int, str, str]] = [] for address, path in big_map_pairs: async for contract_big_maps in self.random_datasource.iter_contract_big_maps(address): for contract_big_map in contract_big_maps: if contract_big_map['path'] == path: - big_map_ids.add((int(contract_big_map['ptr']), address, path)) + big_map_ids.append((int(contract_big_map['ptr']), address, path)) # NOTE: Do not use `_process_level_data` here; we want to maintain transaction manually. async with self._ctx.transactions.in_transaction(head_level, head_level, self.name): for big_map_id, address, path in big_map_ids: + total_keys = (await self.random_datasource.request('get', f'v1/bigmaps/{big_map_id}'))['activeKeys'] + self._logger.info('Processing %s keys of big map %s; this may take a while', total_keys, big_map_id) + async for big_map_keys in self.random_datasource.iter_big_map(big_map_id, head_level): big_map_data = tuple( TezosBigMapData( @@ -85,9 +89,18 @@ async def _synchronize_level(self, head_level: int) -> None: ) for big_map_key in big_map_keys ) - metrics.objects_indexed += len(big_map_data) + + started_at = time.time() matched_handlers = match_big_maps(self._ctx.package, self._config.handlers, big_map_data) + + total_matched = len(matched_handlers) + Metrics.set_index_handlers_matched(total_matched) + metrics.handlers_matched[self.name] += total_matched + metrics.time_in_matcher[self.name] += time.time() - started_at + + started_at = time.time() + for handler_config, big_map_diff in matched_handlers: await self._ctx.fire_handler( name=handler_config.callback, @@ -95,6 +108,9 @@ async def _synchronize_level(self, head_level: int) -> None: args=(big_map_diff,), ) + metrics.objects_indexed += len(big_map_data) + metrics.time_in_callbacks[self.name] += time.time() - started_at + await self._update_state(level=head_level) def _match_level_data(self, handlers: Any, level_data: Any) -> deque[Any]: diff --git a/src/dipdup/indexes/tezos_events/fetcher.py b/src/dipdup/indexes/tezos_events/fetcher.py index 51405e334..701a44eb6 100644 --- a/src/dipdup/indexes/tezos_events/fetcher.py +++ b/src/dipdup/indexes/tezos_events/fetcher.py @@ -2,8 +2,6 @@ from typing import TYPE_CHECKING -from dipdup.fetcher import readahead_by_level -from dipdup.indexes.tezos_tzkt import TZKT_READAHEAD_LIMIT from dipdup.indexes.tezos_tzkt import TezosTzktFetcher from dipdup.models.tezos import TezosEventData @@ -16,13 +14,14 @@ class EventFetcher(TezosTzktFetcher[TezosEventData]): def __init__( self, + name: str, datasources: tuple[TezosTzktDatasource, ...], first_level: int, last_level: int, event_addresses: set[str], event_tags: set[str], ) -> None: - super().__init__(datasources, first_level, last_level) + super().__init__(name, datasources, first_level, last_level) self._event_addresses = event_addresses self._event_tags = event_tags @@ -33,5 +32,5 @@ async def fetch_by_level(self) -> AsyncGenerator[tuple[int, tuple[TezosEventData self._first_level, self._last_level, ) - async for level, batch in readahead_by_level(event_iter, limit=TZKT_READAHEAD_LIMIT): + async for level, batch in self.readahead_by_level(event_iter): yield level, batch diff --git a/src/dipdup/indexes/tezos_events/index.py b/src/dipdup/indexes/tezos_events/index.py index 5f4325df2..e70a4305c 100644 --- a/src/dipdup/indexes/tezos_events/index.py +++ b/src/dipdup/indexes/tezos_events/index.py @@ -21,6 +21,7 @@ def _create_fetcher(self, first_level: int, last_level: int) -> EventFetcher: event_addresses = self._get_event_addresses() event_tags = self._get_event_tags() return EventFetcher( + name=self.name, datasources=self._datasources, first_level=first_level, last_level=last_level, diff --git a/src/dipdup/indexes/tezos_operations/fetcher.py b/src/dipdup/indexes/tezos_operations/fetcher.py index 393b6a869..ab8f81e7d 100644 --- a/src/dipdup/indexes/tezos_operations/fetcher.py +++ b/src/dipdup/indexes/tezos_operations/fetcher.py @@ -22,8 +22,6 @@ from dipdup.exceptions import FrameworkException from dipdup.fetcher import FetcherChannel from dipdup.fetcher import FilterT -from dipdup.fetcher import readahead_by_level -from dipdup.indexes.tezos_tzkt import TZKT_READAHEAD_LIMIT from dipdup.indexes.tezos_tzkt import TezosTzktFetcher from dipdup.models.tezos import TezosOperationData from dipdup.models.tezos import TezosOperationType @@ -462,6 +460,7 @@ class OperationsFetcher(TezosTzktFetcher[TezosOperationData]): def __init__( self, + name: str, datasources: tuple[TezosTzktDatasource, ...], first_level: int, last_level: int, @@ -472,7 +471,7 @@ def __init__( sr_execute_addresses: set[str], migration_originations: bool = False, ) -> None: - super().__init__(datasources, first_level, last_level) + super().__init__(name, datasources, first_level, last_level) self._transaction_addresses = transaction_addresses self._transaction_hashes = transaction_hashes self._origination_addresses = origination_addresses @@ -493,6 +492,7 @@ async def create( sr_execute_addresses = await get_sr_execute_filters(config) return OperationsFetcher( + name=config.name, datasources=datasources, first_level=first_level, last_level=last_level, @@ -560,13 +560,14 @@ async def fetch_by_level(self) -> AsyncIterator[tuple[int, tuple[TezosOperationD channels=set(channels), sort_fn=dedup_operations, ) - async for level, operations in readahead_by_level(operations_iter, limit=TZKT_READAHEAD_LIMIT): + async for level, operations in self.readahead_by_level(operations_iter): yield level, operations class OperationsUnfilteredFetcher(TezosTzktFetcher[TezosOperationData]): def __init__( self, + name: str, datasources: tuple[TezosTzktDatasource, ...], first_level: int, last_level: int, @@ -574,7 +575,7 @@ def __init__( originations: bool, migration_originations: bool, ) -> None: - super().__init__(datasources, first_level, last_level) + super().__init__(name, datasources, first_level, last_level) self._transactions = transactions self._originations = originations self._migration_originations = migration_originations @@ -588,6 +589,7 @@ async def create( last_level: int, ) -> OperationsUnfilteredFetcher: return OperationsUnfilteredFetcher( + name=config.name, datasources=datasources, first_level=first_level, last_level=last_level, @@ -634,5 +636,5 @@ async def fetch_by_level(self) -> AsyncIterator[tuple[int, tuple[TezosOperationD channels=set(channels), sort_fn=dedup_operations, ) - async for level, operations in readahead_by_level(operations_iter, limit=TZKT_READAHEAD_LIMIT): + async for level, operations in self.readahead_by_level(operations_iter): yield level, operations diff --git a/src/dipdup/indexes/tezos_token_transfers/fetcher.py b/src/dipdup/indexes/tezos_token_transfers/fetcher.py index 76962f4ad..45ce1ef63 100644 --- a/src/dipdup/indexes/tezos_token_transfers/fetcher.py +++ b/src/dipdup/indexes/tezos_token_transfers/fetcher.py @@ -1,10 +1,7 @@ from __future__ import annotations -import logging from typing import TYPE_CHECKING -from dipdup.fetcher import readahead_by_level -from dipdup.indexes.tezos_tzkt import TZKT_READAHEAD_LIMIT from dipdup.indexes.tezos_tzkt import TezosTzktFetcher from dipdup.models.tezos import TezosTokenTransferData @@ -18,6 +15,7 @@ class TokenTransferFetcher(TezosTzktFetcher[TezosTokenTransferData]): def __init__( self, + name: str, datasources: tuple[TezosTzktDatasource, ...], token_addresses: set[str], token_ids: set[int], @@ -26,8 +24,12 @@ def __init__( first_level: int, last_level: int, ) -> None: - super().__init__(datasources, first_level, last_level) - self._logger = logging.getLogger('dipdup.fetcher') + super().__init__( + name=name, + datasources=datasources, + first_level=first_level, + last_level=last_level, + ) self._token_addresses = token_addresses self._token_ids = token_ids self._from_addresses = from_addresses @@ -42,5 +44,5 @@ async def fetch_by_level(self) -> AsyncIterator[tuple[int, tuple[TezosTokenTrans self._first_level, self._last_level, ) - async for level, batch in readahead_by_level(token_transfer_iter, limit=TZKT_READAHEAD_LIMIT): + async for level, batch in self.readahead_by_level(token_transfer_iter): yield level, batch diff --git a/src/dipdup/indexes/tezos_token_transfers/index.py b/src/dipdup/indexes/tezos_token_transfers/index.py index 9ffc7bf0d..d21041b03 100644 --- a/src/dipdup/indexes/tezos_token_transfers/index.py +++ b/src/dipdup/indexes/tezos_token_transfers/index.py @@ -32,6 +32,7 @@ def _create_fetcher(self, first_level: int, last_level: int) -> TokenTransferFet to_addresses.add(handler_config.to.get_address()) return TokenTransferFetcher( + name=self.name, datasources=self._datasources, token_addresses=token_addresses, token_ids=token_ids, diff --git a/src/dipdup/indexes/tezos_tzkt.py b/src/dipdup/indexes/tezos_tzkt.py index b8021804b..e04ea92c5 100644 --- a/src/dipdup/indexes/tezos_tzkt.py +++ b/src/dipdup/indexes/tezos_tzkt.py @@ -25,4 +25,17 @@ class TezosTzktFetcher( DataFetcher[BufferT, TezosTzktDatasource], ABC, ): - pass + def __init__( + self, + name: str, + datasources: tuple[TezosTzktDatasource, ...], + first_level: int, + last_level: int, + ) -> None: + super().__init__( + name=name, + datasources=datasources, + first_level=first_level, + last_level=last_level, + readahead_limit=TZKT_READAHEAD_LIMIT, + ) diff --git a/src/dipdup/install.py b/src/dipdup/install.py index caf44ba8c..ed3ec548b 100755 --- a/src/dipdup/install.py +++ b/src/dipdup/install.py @@ -179,6 +179,8 @@ def install( ref: str | None, path: str | None, pre: bool = False, + editable: bool = False, + update: bool = False, with_pdm: bool = False, with_poetry: bool = False, ) -> None: @@ -191,12 +193,6 @@ def install( if not quiet: env.print() - pipx_args = [] - if force: - pipx_args.append('--force') - if pre: - pipx_args.append('--pip-args="--pre"') - pipx_packages = env._pipx_packages python_inter_pipx = cast(str, which('python3.12')) @@ -211,9 +207,21 @@ def install( .split('\n')[0] ) - if 'dipdup' in pipx_packages and not force: - echo('Updating DipDup') - env.run_cmd('pipx', 'upgrade', 'dipdup', *pipx_args) + pipx_args = [] + if force: + pipx_args.append('--force') + if pre: + pipx_args.append('--pip-args="--pre"') + if editable: + pipx_args.append('--editable') + + if 'dipdup' in pipx_packages and force: + env.run_cmd('pipx', 'uninstall', 'dipdup') + pipx_packages.remove('dipdup') + + if 'dipdup' in pipx_packages: + if update: + env.run_cmd('pipx', 'upgrade', '--python', python_inter_pipx, 'dipdup', *pipx_args) elif path: echo(f'Installing DipDup from `{path}`') env.run_cmd('pipx', 'install', '--python', python_inter_pipx, path, *pipx_args) @@ -231,16 +239,16 @@ def install( ('poetry', with_poetry), ): if pm in pipx_packages: - echo(f'Updating `{pm}`') - env.run_cmd('pipx', 'upgrade', pm, *pipx_args) + if update: + env.run_cmd('pipx', 'upgrade', '--python', python_inter_pipx, pm, *pipx_args) elif with_pm or force or quiet or ask(f'Install `{pm}`?', False): echo(f'Installing `{pm}`') - env.run_cmd('pipx', 'install', '--python', python_inter_pipx, pm, *pipx_args) + env.run_cmd('pipx', 'install', '--python', python_inter_pipx, *pipx_args, pm) env._commands[pm] = which(pm) done( - 'Done! DipDup is ready to use.\nRun `dipdup new` to create a new project or `dipdup` to see all available' - ' commands.' + 'Done! DipDup is ready to use.\n' + 'Run `dipdup new` to create a new project or `dipdup` to see all available commands.' ) @@ -281,7 +289,9 @@ def cli() -> None: parser.add_argument('-r', '--ref', help='Install DipDup from a specific git ref') parser.add_argument('-p', '--path', help='Install DipDup from a local path') parser.add_argument('-u', '--uninstall', action='store_true', help='Uninstall DipDup') + parser.add_argument('-U', '--update', action='store_true', help='Update DipDup') parser.add_argument('--pre', action='store_true', help='Include pre-release versions') + parser.add_argument('-e', '--editable', action='store_true', help='Install DipDup in editable mode') parser.add_argument('--with-pdm', action='store_true', help='Install PDM') parser.add_argument('--with-poetry', action='store_true', help='Install Poetry') args = parser.parse_args() @@ -299,6 +309,8 @@ def cli() -> None: ref=args.ref.strip() if args.ref else None, path=args.path.strip() if args.path else None, pre=args.pre, + editable=args.editable, + update=args.update, with_pdm=args.with_pdm, with_poetry=args.with_poetry, ) diff --git a/src/dipdup/package.py b/src/dipdup/package.py index d6e2fffa9..724efd49a 100644 --- a/src/dipdup/package.py +++ b/src/dipdup/package.py @@ -15,6 +15,7 @@ from dipdup.exceptions import ProjectPackageError from dipdup.project import Answers from dipdup.project import answers_from_replay +from dipdup.project import get_default_answers from dipdup.utils import import_from from dipdup.utils import import_submodules from dipdup.utils import pascal_to_snake @@ -53,8 +54,9 @@ def draw_package_tree(root: Path, project_tree: dict[str, tuple[Path, ...]]) -> class DipDupPackage: - def __init__(self, root: Path) -> None: - _logger.info('Loading package `%s` from `%s`', root.name, root) + def __init__(self, root: Path, quiet: bool = False) -> None: + _log = _logger.debug if quiet else _logger.info + _log('Loading package `%s` from `%s`', root.name, root) self.root = root self.name = root.name @@ -91,10 +93,14 @@ def evm_abi_paths(self) -> Generator[Any, None, None]: return self.abi.glob(f'**/{EVM_ABI_JSON}') @property - def replay(self) -> Answers | None: - if not self._replay and (self.root / 'configs' / 'replay.yaml').exists(): - self._replay = answers_from_replay(self.root / 'configs' / 'replay.yaml') - return self._replay + def replay_path(self) -> Path: + return self.root / 'configs' / 'replay.yaml' + + @property + def replay(self) -> Answers: + if self.replay_path.exists(): + return answers_from_replay(self.replay_path) + return get_default_answers() @property def skel(self) -> dict[Path, str | None]: diff --git a/src/dipdup/performance.py b/src/dipdup/performance.py index e214ad5da..ca32b39c2 100644 --- a/src/dipdup/performance.py +++ b/src/dipdup/performance.py @@ -134,9 +134,12 @@ def __init__(self) -> None: self._queues: dict[str, deque[Any]] = {} self._limits: dict[str, int] = {} - def add_queue(self, queue: deque[Any], name: str | None = None, limit: int = 0) -> None: - if name is None: - name = f'{queue.__module__}:{id(queue)}' + def add_queue( + self, + queue: deque[Any], + name: str, + limit: int = 0, + ) -> None: if name in self._queues: raise FrameworkException(f'Queue `{name}` already exists') self._queues[name] = queue @@ -209,16 +212,14 @@ class _MetricManager: progress: float = 0.0 def stats(self) -> dict[str, Any]: - result = {} - for k, v in self.__dict__.items(): - if k.startswith('_'): - continue - if isinstance(v, defaultdict): - for kk, vv in v.items(): - result[f'{k}:{kk}'] = vv - else: - result[k] = v - return result + def _round(value: Any) -> Any: + if isinstance(value, dict): + return {k: _round(v) for k, v in value.items()} + if isinstance(value, float): + return round(value, 2) + return value + + return {k: _round(v) for k, v in self.__dict__.items() if not k.startswith('_')} caches = _CacheManager() diff --git a/src/dipdup/project.py b/src/dipdup/project.py index 76796ada5..fefb4874c 100644 --- a/src/dipdup/project.py +++ b/src/dipdup/project.py @@ -10,7 +10,6 @@ from pydantic import ConfigDict from pydantic import TypeAdapter from pydantic.dataclasses import dataclass -from tabulate import tabulate from typing_extensions import TypedDict from dipdup import __version__ @@ -125,6 +124,7 @@ def prompt_anyof( ) -> tuple[int, str]: """Ask user to choose one of options; returns index and value""" import survey # type: ignore[import-untyped] + from tabulate import tabulate table = tabulate( zip(options, comments, strict=True), diff --git a/src/dipdup/projects/base/Makefile.j2 b/src/dipdup/projects/base/Makefile.j2 index 3c785f986..acb7e5f84 100644 --- a/src/dipdup/projects/base/Makefile.j2 +++ b/src/dipdup/projects/base/Makefile.j2 @@ -13,6 +13,28 @@ help: ## Show this help (default) all: ## Run an entire CI pipeline make format lint +## + +install: ## Install dependencies +{%- if project.package_manager == 'none' %} + true +{%- elif project.package_manager == 'pdm' %} + pdm install +{%- elif project.package_manager == 'poetry' %} + poetry install +{% endif %} + +update: ## Update dependencies +{%- if project.package_manager == 'none' %} + true +{%- elif project.package_manager == 'pdm' %} + pdm update + dipdup self update -q +{%- elif project.package_manager == 'poetry' %} + poetry update + dipdup self update -q +{% endif %} + format: ## Format with all tools make black @@ -28,7 +50,7 @@ ruff: ## Lint with ruff ruff check --fix . mypy: ## Lint with mypy - mypy --no-incremental --exclude ${PACKAGE} . + mypy . ## diff --git a/src/dipdup/projects/base/deploy/compose.sqlite.yaml.j2 b/src/dipdup/projects/base/deploy/compose.sqlite.yaml.j2 index 7fb4e4d3c..8be07744f 100644 --- a/src/dipdup/projects/base/deploy/compose.sqlite.yaml.j2 +++ b/src/dipdup/projects/base/deploy/compose.sqlite.yaml.j2 @@ -1,4 +1,3 @@ -version: "3.8" name: {{ project.package }} services: diff --git a/src/dipdup/projects/base/deploy/compose.swarm.yaml.j2 b/src/dipdup/projects/base/deploy/compose.swarm.yaml.j2 index f2c986a54..f9878cdcf 100644 --- a/src/dipdup/projects/base/deploy/compose.swarm.yaml.j2 +++ b/src/dipdup/projects/base/deploy/compose.swarm.yaml.j2 @@ -1,4 +1,3 @@ -version: "3.8" name: {{ project.package }} services: @@ -38,7 +37,7 @@ services: - POSTGRES_DB=dipdup - POSTGRES_PASSWORD=${POSTGRES_PASSWORD} healthcheck: - test: ["CMD-SHELL", "pg_isready -U postgres"] + test: ["CMD-SHELL", "pg_isready -U dipdup"] interval: 10s timeout: 5s retries: 5 diff --git a/src/dipdup/projects/base/deploy/compose.yaml.j2 b/src/dipdup/projects/base/deploy/compose.yaml.j2 index 76f6b63c5..3d16ccf56 100644 --- a/src/dipdup/projects/base/deploy/compose.yaml.j2 +++ b/src/dipdup/projects/base/deploy/compose.yaml.j2 @@ -1,4 +1,3 @@ -version: "3.8" name: {{ project.package }} services: @@ -19,7 +18,7 @@ services: db: image: {{ project.postgres_image }} ports: - - 5432 + - "${POSTGRES_HOST_PORT:-5432}:5432" volumes: - db:{{ project.postgres_data_path }} restart: always @@ -37,7 +36,7 @@ services: hasura: image: {{ project.hasura_image }} ports: - - 8080 + - "${HASURA_HOST_PORT:-8080}:8080" depends_on: - db restart: always diff --git a/src/dipdup/projects/base/pyproject.toml.j2 b/src/dipdup/projects/base/pyproject.toml.j2 index 42f4dd76a..4a7a106ac 100644 --- a/src/dipdup/projects/base/pyproject.toml.j2 +++ b/src/dipdup/projects/base/pyproject.toml.j2 @@ -34,18 +34,6 @@ dev = [ "ruff", "mypy", ] - -[tool.pdm.scripts] -help = {cmd = "make help", help = "Show this help (default)"} -all = {cmd = "make all", help = "Run an entire CI pipeline"} -format = {cmd = "make format", help = "Format with all tools"} -lint = {cmd = "make lint", help = "Lint with all tools"} -black = {cmd = "make black", help = "Format with black"} -ruff = {cmd = "make ruff", help = "Lint with ruff"} -mypy = {cmd = "make mypy", help = "Lint with mypy"} -image = {cmd = "make image", help = "Build Docker image"} -up = {cmd = "make up", help = "Start Compose stack"} -down = {cmd = "make down", help = "Stop Compose stack"} {% elif project.package_manager == 'poetry' %} [tool.poetry] name = "{{ project.package }}" @@ -70,6 +58,7 @@ mypy = "*" line-length = {{ project.line_length }} target-version = ['py312'] skip-string-normalization = true +extend-exclude = "{{ project.package }}" [tool.ruff] line-length = {{ project.line_length }} @@ -84,6 +73,11 @@ isort = { force-single-line = true} python_version = "3.12" plugins = ["pydantic.mypy"] strict = false +exclude = "{{ project.package }}" + +[[tool.mypy.overrides]] +module = "ruamel" +ignore_missing_imports = true {% if project.package_manager == 'pdm' -%} [build-system] diff --git a/src/dipdup/projects/demo_tezos_big_maps/handlers/on_update_records.py.j2 b/src/dipdup/projects/demo_tezos_big_maps/handlers/on_update_records.py.j2 index c8c643a40..bee98502c 100644 --- a/src/dipdup/projects/demo_tezos_big_maps/handlers/on_update_records.py.j2 +++ b/src/dipdup/projects/demo_tezos_big_maps/handlers/on_update_records.py.j2 @@ -16,7 +16,7 @@ async def on_update_records( record_name = bytes.fromhex(store_records.key.root).decode() record_path = record_name.split('.') - ctx.logger.info('Processing `%s`', record_name) + ctx.logger.debug('Processing `%s`', record_name) level = store_records.value.level if len(record_path) != int(level): diff --git a/src/dipdup/projects/demo_tezos_domains/handlers/on_update_records.py.j2 b/src/dipdup/projects/demo_tezos_domains/handlers/on_update_records.py.j2 index 470c8d4f6..c175246f0 100644 --- a/src/dipdup/projects/demo_tezos_domains/handlers/on_update_records.py.j2 +++ b/src/dipdup/projects/demo_tezos_domains/handlers/on_update_records.py.j2 @@ -32,7 +32,7 @@ async def on_update_records( record_name = bytes.fromhex(store_records.key.root).decode() record_path = record_name.split('.') domain_data = decode_domain_data(store_records.value.data) - ctx.logger.info('Processing `%s`', record_name) + ctx.logger.debug('Processing `%s`', record_name) if len(record_path) != int(store_records.value.level): ctx.logger.warning( diff --git a/src/dipdup/projects/demo_tezos_etherlink/dipdup.yaml.j2 b/src/dipdup/projects/demo_tezos_etherlink/dipdup.yaml.j2 index 3bb1f2acf..ce4e84c7e 100644 --- a/src/dipdup/projects/demo_tezos_etherlink/dipdup.yaml.j2 +++ b/src/dipdup/projects/demo_tezos_etherlink/dipdup.yaml.j2 @@ -4,20 +4,20 @@ package: demo_tezos_etherlink datasources: tzkt: kind: tezos.tzkt - url: ${TZKT_URL:-https://api.nairobinet.tzkt.io} + url: ${TZKT_URL:-https://api.parisnet.tzkt.io} contracts: ticketer: kind: tezos - address: KT1PmYUomF3HDxsGWYQUCbLi2X8WvT7ZHv8o + address: KT1AAi4DCQiTUv5MYoXtdiFwUrPH3t3Yhkjo typename: ticketer ticket_helper: kind: tezos - address: KT1TZg9EwGHKbfWvsHGsqBjm3J5NhJBtHPKX + address: KT1FcXb4oFBWtUVbEa96Do4DfQZXn6878yu1 typename: ticket_helper rollup: kind: tezos - address: sr1QgYF6ARMSLcWyAX4wFDrWFaZTyy4twbqe + address: sr1GBHEgzZmpWH4URqshZEZFCxBpqzi6ahvL typename: rollup indexes: diff --git a/src/dipdup/sql/dipdup_head_status.sql b/src/dipdup/sql/dipdup_head_status.sql deleted file mode 100644 index 46af4ba63..000000000 --- a/src/dipdup/sql/dipdup_head_status.sql +++ /dev/null @@ -1,9 +0,0 @@ -CREATE OR REPLACE VIEW dipdup_head_status AS -SELECT - name, - CASE - WHEN timestamp < NOW() - interval '{} seconds' THEN 'OUTDATED' - ELSE 'OK' - END AS status -FROM - dipdup_head; \ No newline at end of file diff --git a/src/dipdup/sql/dipdup_status.sql b/src/dipdup/sql/dipdup_status.sql new file mode 100644 index 000000000..e16c9c543 --- /dev/null +++ b/src/dipdup/sql/dipdup_status.sql @@ -0,0 +1,39 @@ +DROP VIEW IF EXISTS dipdup_head_status; + +CREATE OR REPLACE VIEW dipdup_status AS +SELECT * +FROM ( + SELECT 'index' as type, name, level, 0 as size, updated_at + FROM dipdup_index + + UNION ALL + + SELECT 'datasource' as type, name, level, 0 as size, updated_at + FROM dipdup_head + + UNION ALL + + SELECT 'queue' as type, queue_key as name, 0 as level, queue_size as size, updated_at + FROM ( + SELECT + queue_key, + (value::jsonb -> 'queues' -> queue_key ->> 'size')::numeric as queue_size, + updated_at + FROM dipdup_meta, + jsonb_object_keys(value::jsonb -> 'queues') as queue_key + WHERE dipdup_meta.key = 'dipdup_metrics' + ) as queue_subquery + + UNION ALL + + SELECT 'cache' as type, cache_key as name, 0 as level, cache_size as size, updated_at + FROM ( + SELECT + cache_key, + (value::jsonb -> 'caches' -> cache_key ->> 'size')::numeric as cache_size, + updated_at + FROM dipdup_meta, + jsonb_object_keys(value::jsonb -> 'caches') as cache_key + WHERE dipdup_meta.key = 'dipdup_metrics' + ) as cache_subquery +) as combined_data; diff --git a/src/dipdup/test.py b/src/dipdup/test.py index c6009bcd4..6f8b266db 100644 --- a/src/dipdup/test.py +++ b/src/dipdup/test.py @@ -121,7 +121,7 @@ async def run_hasura_container(postgres_ip: str) -> HasuraConfig: 'HASURA_GRAPHQL_DATABASE_URL': f'postgres://test:test@{postgres_ip}:5432', }, detach=True, - remove=True, + # remove=True, ) atexit.register(hasura_container.stop) hasura_container.reload() @@ -180,6 +180,7 @@ async def tmp_project( 'PYTHONPATH': str(tmp_package_path), 'DIPDUP_TEST': '1', 'DIPDUP_DEBUG': '1', + 'DIPDUP_NO_VERSION_CHECK': '1', } yield Path(tmp_package_path), env diff --git a/tests/configs/demo_tezos_etherlink.yaml b/tests/configs/demo_tezos_etherlink.yaml index f777dde7f..4f474dfe3 100644 --- a/tests/configs/demo_tezos_etherlink.yaml +++ b/tests/configs/demo_tezos_etherlink.yaml @@ -4,20 +4,20 @@ package: demo_tezos_etherlink datasources: tzkt: kind: tezos.tzkt - url: ${TZKT_URL:-https://api.nairobinet.tzkt.io} + url: ${TZKT_URL:-https://api.parisnet.tzkt.io} contracts: ticketer: kind: tezos - address: KT1PmYUomF3HDxsGWYQUCbLi2X8WvT7ZHv8o + address: KT1AAi4DCQiTUv5MYoXtdiFwUrPH3t3Yhkjo typename: ticketer ticket_helper: kind: tezos - address: KT1TZg9EwGHKbfWvsHGsqBjm3J5NhJBtHPKX + address: KT1FcXb4oFBWtUVbEa96Do4DfQZXn6878yu1 typename: ticket_helper rollup: kind: tezos - address: sr1QgYF6ARMSLcWyAX4wFDrWFaZTyy4twbqe + address: sr1GBHEgzZmpWH4URqshZEZFCxBpqzi6ahvL typename: rollup indexes: diff --git a/tests/test_demos.py b/tests/test_demos.py index 70bc7c1c7..b8159aaa5 100644 --- a/tests/test_demos.py +++ b/tests/test_demos.py @@ -93,9 +93,10 @@ async def assert_init(package: str) -> None: async def assert_run_dex() -> None: - import demo_tezos_dex.models from tortoise.transactions import in_transaction + import demo_tezos_dex.models + trades = await demo_tezos_dex.models.Trade.filter().count() positions = await demo_tezos_dex.models.Position.filter().count() async with in_transaction() as conn: @@ -121,7 +122,6 @@ async def assert_run_events() -> None: async def assert_run_factories() -> None: import demo_tezos_factories.models - from dipdup import models indexes = await models.Index.filter().count() @@ -223,9 +223,8 @@ async def assert_run_dao() -> None: ('demo_evm_events.yml:test_evm.yml', 'demo_evm_events', 'init', None), ('demo_evm_transactions.yml:test_evm.yml', 'demo_evm_transactions', 'run', assert_run_evm_transactions), ('demo_evm_transactions.yml:test_evm.yml', 'demo_evm_transactions', 'init', None), - # FIXME: nairobinet is dead - # ('demo_tezos_etherlink.yml', 'demo_tezos_etherlink', 'run', None), - # ('demo_tezos_etherlink.yml', 'demo_tezos_etherlink', 'init', None), + ('demo_tezos_etherlink.yml', 'demo_tezos_etherlink', 'run', None), + ('demo_tezos_etherlink.yml', 'demo_tezos_etherlink', 'init', None), # NOTE: Indexes with `evm.node` as index datasource ('demo_evm_events_node.yml:test_evm.yml', 'demo_evm_events', 'run', assert_run_evm_events), ('demo_evm_transactions_node.yml:test_evm.yml', 'demo_evm_transactions', 'run', assert_run_evm_transactions), diff --git a/tests/test_index/test_tzkt_operations.py b/tests/test_index/test_tzkt_operations.py index b85584533..fd67d0780 100644 --- a/tests/test_index/test_tzkt_operations.py +++ b/tests/test_index/test_tzkt_operations.py @@ -176,7 +176,7 @@ async def test_realtime() -> None: operations=operations, ) - assert len(index._queue) == 4 + assert len(index.queue) == 4 assert await models.Holder.filter().count() == 0 # NOTE: We don't want index with `last_level` to be disabled diff --git a/tests/test_rollback.py b/tests/test_rollback.py index a0baece0c..c539e5902 100644 --- a/tests/test_rollback.py +++ b/tests/test_rollback.py @@ -1,10 +1,10 @@ from contextlib import AsyncExitStack from datetime import datetime -import demo_tezos_domains.models as domains_models -import demo_tezos_nft_marketplace.models as hen_models from tortoise.expressions import F +import demo_tezos_domains.models as domains_models +import demo_tezos_nft_marketplace.models as hen_models from dipdup.config import DipDupConfig from dipdup.context import HookContext from dipdup.models import Index