diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 6f9d24ee7..1f34dd53f 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -22,7 +22,7 @@ jobs: name: Test mpt_trie runs-on: ubuntu-latest timeout-minutes: 30 - if: "! contains(toJSON(github.event.commits.*.message), '[skip-ci]')" + if: ${{ ! contains(toJSON(github.event.commits.*.message), '[skip-ci]') }} steps: - name: Checkout sources uses: actions/checkout@v4 @@ -47,7 +47,12 @@ jobs: name: Test trace_decoder runs-on: zero-ci timeout-minutes: 30 - if: "! contains(toJSON(github.event.commits.*.message), '[skip-ci]')" + env: + RUST_LOG: info + CARGO_INCREMENTAL: 1 + RUST_BACKTRACE: 1 + + if: ${{ ! contains(toJSON(github.event.commits.*.message), '[skip-ci]') }} steps: - name: Checkout sources uses: actions/checkout@v4 @@ -60,18 +65,17 @@ jobs: with: cache-on-failure: true - - name: Test in trace_decoder subdirectory + - name: build # build separately so test logs are actually nice + run: cargo build --tests --manifest-path trace_decoder/Cargo.toml + + - name: test run: cargo test --release --manifest-path trace_decoder/Cargo.toml -- --nocapture - env: - RUST_LOG: info - CARGO_INCREMENTAL: 1 - RUST_BACKTRACE: 1 test_proof_gen: name: Test proof_gen runs-on: ubuntu-latest timeout-minutes: 30 - if: "! contains(toJSON(github.event.commits.*.message), '[skip-ci]')" + if: ${{ ! contains(toJSON(github.event.commits.*.message), '[skip-ci]') }} steps: - name: Checkout sources uses: actions/checkout@v4 @@ -96,7 +100,7 @@ jobs: name: Test evm_arithmetization runs-on: ubuntu-latest timeout-minutes: 30 - if: "! contains(toJSON(github.event.commits.*.message), '[skip-ci]')" + if: ${{ ! contains(toJSON(github.event.commits.*.message), '[skip-ci]') }} steps: - name: Checkout sources uses: actions/checkout@v4 @@ -121,7 +125,7 @@ jobs: name: Test zero_bin runs-on: ubuntu-latest timeout-minutes: 30 - if: "! contains(toJSON(github.event.commits.*.message), '[skip-ci]')" + if: ${{ ! contains(toJSON(github.event.commits.*.message), '[skip-ci]') }} steps: - name: Checkout sources uses: actions/checkout@v4 @@ -132,7 +136,7 @@ jobs: - name: Set up rust cache uses: Swatinem/rust-cache@v2 with: - cache-on-failure: true + cache-on-failure: true - name: Test in zero_bin subdirectory run: | @@ -154,7 +158,7 @@ jobs: name: Test zk_evm_proc_macro runs-on: ubuntu-latest timeout-minutes: 30 - if: "! contains(toJSON(github.event.commits.*.message), '[skip-ci]')" + if: ${{ ! contains(toJSON(github.event.commits.*.message), '[skip-ci]') }} steps: - name: Checkout sources uses: actions/checkout@v4 @@ -218,7 +222,7 @@ jobs: name: Rustdoc, Formatting and Clippy runs-on: ubuntu-latest timeout-minutes: 10 - if: "! contains(toJSON(github.event.commits.*.message), '[skip-ci]')" + if: ${{ ! contains(toJSON(github.event.commits.*.message), '[skip-ci]') }} steps: - name: Checkout sources uses: actions/checkout@v4 diff --git a/.gitignore b/.gitignore index 667ccfc9c..f035f7002 100644 --- a/.gitignore +++ b/.gitignore @@ -4,5 +4,5 @@ *.iml .idea/ .vscode +/**/*.ignoreme **/output.log - diff --git a/Cargo.lock b/Cargo.lock index dcd859ede..6e83cdd32 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -4,18 +4,18 @@ version = 3 [[package]] name = "addr2line" -version = "0.22.0" +version = "0.24.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6e4503c46a5c0c7844e948c9a4d6acd9f50cccb4de1c48eb9e291ea17470c678" +checksum = "f5fb1d8e4442bd405fdfd1dacb42792696b0cf9cb15882e5d097b742a676d375" dependencies = [ "gimli", ] [[package]] -name = "adler" -version = "1.0.2" +name = "adler2" +version = "2.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f26201604c87b1e01bd3d98f8d5d9a8fcbb815e8cedb41ffccbeb4bf593a35fe" +checksum = "512761e0bb2578dd7380c6baaa0f4ce03e84f95e960231d1dec8bf4d7d6e2627" [[package]] name = "aes" @@ -58,14 +58,15 @@ checksum = "5c6cb57a04249c6480766f7f7cef5467412af1490f8d1e243141daddada3264f" [[package]] name = "alloy" -version = "0.3.0" +version = "0.3.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3eadd758805fe353ea8a520da4531efb9739382312c354d3e90b7a16353b0315" +checksum = "0f13f1940c81e269e84ddb58f3b611be9660fbbfe39d4338aa2984dc3df0c402" dependencies = [ "alloy-consensus", "alloy-core", "alloy-eips", "alloy-json-rpc", + "alloy-network", "alloy-provider", "alloy-rpc-client", "alloy-rpc-types", @@ -75,9 +76,9 @@ dependencies = [ [[package]] name = "alloy-chains" -version = "0.1.29" +version = "0.1.30" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bb07629a5d0645d29f68d2fb6f4d0cf15c89ec0965be915f303967180929743f" +checksum = "2b4f201b0ac8f81315fbdc55269965a8ddadbc04ab47fa65a1a468f9a40f7a5f" dependencies = [ "num_enum", "strum", @@ -95,9 +96,9 @@ dependencies = [ [[package]] name = "alloy-consensus" -version = "0.3.0" +version = "0.3.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7198a527b4c4762cb88d54bcaeb0428f4298b72552c9c8ec4af614b4a4990c59" +checksum = "4177d135789e282e925092be8939d421b701c6d92c0a16679faa659d9166289d" dependencies = [ "alloy-eips", "alloy-primitives", @@ -109,9 +110,9 @@ dependencies = [ [[package]] name = "alloy-core" -version = "0.8.0" +version = "0.8.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8e6dbb79f4e3285cc87f50c0d4be9a3a812643623b2e3558d425b41cbd795ceb" +checksum = "4f5aeeac2715738ff43076b65ca27bc0a2025ce0ee69f537c11c632027360bff" dependencies = [ "alloy-primitives", "alloy-rlp", @@ -141,9 +142,9 @@ dependencies = [ [[package]] name = "alloy-eips" -version = "0.3.0" +version = "0.3.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "159eab0e4e15b88571f55673af37314f4b8f17630dc1b393c3d70f2128a1d494" +checksum = "499ee14d296a133d142efd215eb36bf96124829fe91cf8f5d4e5ccdd381eae00" dependencies = [ "alloy-eip2930", "alloy-eip7702", @@ -151,6 +152,7 @@ dependencies = [ "alloy-rlp", "alloy-serde", "c-kzg", + "derive_more", "once_cell", "serde", "sha2", @@ -158,9 +160,9 @@ dependencies = [ [[package]] name = "alloy-json-rpc" -version = "0.3.0" +version = "0.3.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f7733446dd531f8eb877331fea02f6c40bdbb47444a17dc3464bf75319cc073a" +checksum = "4207166c79cfdf7f3bed24bbc84f5c7c5d4db1970f8c82e3fcc76257f16d2166" dependencies = [ "alloy-primitives", "alloy-sol-types", @@ -172,9 +174,9 @@ dependencies = [ [[package]] name = "alloy-network" -version = "0.3.0" +version = "0.3.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b80851d1697fc4fa2827998e3ee010a3d1fc59c7d25e87070840169fcf465832" +checksum = "dfbe2802d5b8c632f18d68c352073378f02a3407c1b6a4487194e7d21ab0f002" dependencies = [ "alloy-consensus", "alloy-eips", @@ -193,9 +195,9 @@ dependencies = [ [[package]] name = "alloy-network-primitives" -version = "0.3.0" +version = "0.3.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d76a2336889f3d0624b18213239d27f4f34eb476eb35bef22f6a8cc24e0c0078" +checksum = "396c07726030fa0f9dab5da8c71ccd69d5eb74a7fe1072b7ae453a67e4fe553e" dependencies = [ "alloy-primitives", "alloy-serde", @@ -204,9 +206,9 @@ dependencies = [ [[package]] name = "alloy-primitives" -version = "0.8.0" +version = "0.8.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a767e59c86900dd7c3ce3ecef04f3ace5ac9631ee150beb8b7d22f7fa3bbb2d7" +checksum = "ccb865df835f851b367ae439d6c82b117ded971628c8888b24fed411a290e38a" dependencies = [ "alloy-rlp", "bytes", @@ -226,9 +228,9 @@ dependencies = [ [[package]] name = "alloy-provider" -version = "0.3.0" +version = "0.3.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f2d2a195caa6707f5ce13905794865765afc6d9ea92c3a56e3a973c168d703bc" +checksum = "1376948df782ffee83a54cac4b2aba14134edd997229a3db97da0a606586eb5c" dependencies = [ "alloy-chains", "alloy-consensus", @@ -245,7 +247,7 @@ dependencies = [ "async-stream", "async-trait", "auto_impl", - "dashmap 6.0.1", + "dashmap 6.1.0", "futures", "futures-utils-wasm", "lru", @@ -278,14 +280,14 @@ checksum = "4d0f2d905ebd295e7effec65e5f6868d153936130ae718352771de3e7d03c75c" dependencies = [ "proc-macro2", "quote", - "syn 2.0.76", + "syn 2.0.77", ] [[package]] name = "alloy-rpc-client" -version = "0.3.0" +version = "0.3.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ed31cdba2b23d71c555505b06674f8e7459496abfd7f4875d268434ef5a99ee6" +checksum = "02378418a429f8a14a0ad8ffaa15b2d25ff34914fc4a1e366513c6a3800e03b3" dependencies = [ "alloy-json-rpc", "alloy-transport", @@ -304,9 +306,9 @@ dependencies = [ [[package]] name = "alloy-rpc-types" -version = "0.3.0" +version = "0.3.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e2d758f65aa648491c6358335c578de45cd7de6fdf2877c3cef61f2c9bebea21" +checksum = "d9ae4c4fbd37d9996f501fbc7176405aab97ae3a5772789be06ef0e7c4dad6dd" dependencies = [ "alloy-rpc-types-eth", "alloy-rpc-types-trace", @@ -316,9 +318,9 @@ dependencies = [ [[package]] name = "alloy-rpc-types-eth" -version = "0.3.0" +version = "0.3.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c0ba05d6ee4db0d89113294a614137940f79abfc2c40a9a3bee2995660358776" +checksum = "15bb3506ab1cf415d4752778c93e102050399fb8de97b7da405a5bf3e31f5f3b" dependencies = [ "alloy-consensus", "alloy-eips", @@ -335,9 +337,9 @@ dependencies = [ [[package]] name = "alloy-rpc-types-trace" -version = "0.3.0" +version = "0.3.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cd2af822ed58f2b6dd7cfccf88bf69f42c9a8cbf4663316227646a8a3e5a591f" +checksum = "16cca915e0aab3b2657b4f9efe02eb88e5483905fb6d244749652aae14e5f92e" dependencies = [ "alloy-primitives", "alloy-rpc-types-eth", @@ -349,9 +351,9 @@ dependencies = [ [[package]] name = "alloy-serde" -version = "0.3.0" +version = "0.3.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bfd260ede54f0b53761fdd04133acc10ae70427f66a69aa9590529bbd066cd58" +checksum = "ae417978015f573b4a8c02af17f88558fb22e3fccd12e8a910cf6a2ff331cfcb" dependencies = [ "alloy-primitives", "serde", @@ -360,9 +362,9 @@ dependencies = [ [[package]] name = "alloy-signer" -version = "0.3.0" +version = "0.3.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7b5193ee6b370b89db154d7dc40c6a8e6ce11213865baaf2b418a9f2006be762" +checksum = "b750c9b61ac0646f8f4a61231c2732a337b2c829866fc9a191b96b7eedf80ffe" dependencies = [ "alloy-primitives", "async-trait", @@ -374,56 +376,56 @@ dependencies = [ [[package]] name = "alloy-sol-macro" -version = "0.8.0" +version = "0.8.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "183bcfc0f3291d9c41a3774172ee582fb2ce6eb6569085471d8f225de7bb86fc" +checksum = "e2dc5201ca0018afb7a3e0cd8bd15f7ca6aca924333b5f3bb87463b41d0c4ef2" dependencies = [ "alloy-sol-macro-expander", "alloy-sol-macro-input", - "proc-macro-error", + "proc-macro-error2", "proc-macro2", "quote", - "syn 2.0.76", + "syn 2.0.77", ] [[package]] name = "alloy-sol-macro-expander" -version = "0.8.0" +version = "0.8.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "71c4d842beb7a6686d04125603bc57614d5ed78bf95e4753274db3db4ba95214" +checksum = "155f63dc6945885aa4532601800201fddfaa3b20901fda8e8c2570327242fe0e" dependencies = [ "alloy-sol-macro-input", "const-hex", "heck 0.5.0", "indexmap", - "proc-macro-error", + "proc-macro-error2", "proc-macro2", "quote", - "syn 2.0.76", + "syn 2.0.77", "syn-solidity", "tiny-keccak", ] [[package]] name = "alloy-sol-macro-input" -version = "0.8.0" +version = "0.8.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1306e8d3c9e6e6ecf7a39ffaf7291e73a5f655a2defd366ee92c2efebcdf7fee" +checksum = "847700aa9cb59d3c7b290b2d05976cd8d76b64d73bb63116a9533132d995586b" dependencies = [ "const-hex", "dunce", "heck 0.5.0", "proc-macro2", "quote", - "syn 2.0.76", + "syn 2.0.77", "syn-solidity", ] [[package]] name = "alloy-sol-types" -version = "0.8.0" +version = "0.8.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "577e262966e92112edbd15b1b2c0947cc434d6e8311df96d3329793fe8047da9" +checksum = "83665e5607725a7a1aab3cb0dea708f4a05e70776954ec7f0a9461439175c957" dependencies = [ "alloy-primitives", "alloy-sol-macro", @@ -432,9 +434,9 @@ dependencies = [ [[package]] name = "alloy-transport" -version = "0.3.0" +version = "0.3.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "454220c714857cf68af87d788d1f0638ad8766268b94f6a49fed96cbc2ab382c" +checksum = "2799749ca692ae145f54968778877afd7c95e788488f176cfdfcf2a8abeb2062" dependencies = [ "alloy-json-rpc", "base64", @@ -451,9 +453,9 @@ dependencies = [ [[package]] name = "alloy-transport-http" -version = "0.3.0" +version = "0.3.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "377f2353d7fea03a2dba6b9ffbb7d610402c040dd5700d1fae8b9ec2673eed9b" +checksum = "bc10c4dd932f66e0db6cc5735241e0c17a6a18564b430bbc1839f7db18587a93" dependencies = [ "alloy-json-rpc", "alloy-transport", @@ -569,9 +571,9 @@ dependencies = [ [[package]] name = "anyhow" -version = "1.0.86" +version = "1.0.87" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b3d1d046238990b9cf5bcde22a3fb3584ee5cf65fb2765f454ed428c7a0063da" +checksum = "10f00e1f6e58a40e807377c75c6a7f97bf9044fab57816f2414e6f5f4499d7b8" dependencies = [ "backtrace", ] @@ -730,7 +732,7 @@ checksum = "965c2d33e53cb6b267e148a4cb0760bc01f4904c1cd4bb4002a085bb016d1490" dependencies = [ "proc-macro2", "quote", - "syn 2.0.76", + "syn 2.0.77", "synstructure", ] @@ -742,7 +744,7 @@ checksum = "7b18050c2cd6fe86c3a76584ef5e0baf286d038cda203eb6223df2cc413565f7" dependencies = [ "proc-macro2", "quote", - "syn 2.0.76", + "syn 2.0.77", ] [[package]] @@ -754,7 +756,7 @@ dependencies = [ "assert2-macros", "diff", "is-terminal", - "yansi", + "yansi 1.0.1", ] [[package]] @@ -766,7 +768,7 @@ dependencies = [ "proc-macro2", "quote", "rustc_version 0.4.1", - "syn 2.0.76", + "syn 2.0.77", ] [[package]] @@ -783,9 +785,9 @@ dependencies = [ [[package]] name = "async-executor" -version = "1.13.0" +version = "1.13.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d7ebdfa2ebdab6b1760375fa7d6f382b9f486eac35fc994625a00e89280bdbb7" +checksum = "30ca9a001c1e8ba5149f91a74362376cc6bc5b919d92d988668657bd570bdcec" dependencies = [ "async-task", "concurrent-queue", @@ -853,7 +855,7 @@ dependencies = [ "futures-lite 2.3.0", "parking", "polling 3.7.3", - "rustix 0.38.35", + "rustix 0.38.36", "slab", "tracing", "windows-sys 0.59.0", @@ -910,7 +912,7 @@ checksum = "16e62a023e7c117e27523144c5d2459f4397fcc3cab0085af8e2224f643a0193" dependencies = [ "proc-macro2", "quote", - "syn 2.0.76", + "syn 2.0.77", ] [[package]] @@ -921,13 +923,13 @@ checksum = "8b75356056920673b02621b35afd0f7dda9306d03c79a30f5c56c44cf256e3de" [[package]] name = "async-trait" -version = "0.1.81" +version = "0.1.82" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6e0c28dcc82d7c8ead5cb13beb15405b57b8546e93215673ff8ca0349a028107" +checksum = "a27b8a3a6e1a44fa4c8baf1f653e4172e81486d4941f2237e20dc2d0cf4ddff1" dependencies = [ "proc-macro2", "quote", - "syn 2.0.76", + "syn 2.0.77", ] [[package]] @@ -953,7 +955,7 @@ checksum = "3c87f3f15e7794432337fc718554eaa4dc8f04c9677a950ffe366f20a162ae42" dependencies = [ "proc-macro2", "quote", - "syn 2.0.76", + "syn 2.0.77", ] [[package]] @@ -1033,17 +1035,17 @@ dependencies = [ [[package]] name = "backtrace" -version = "0.3.73" +version = "0.3.74" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5cc23269a4f8976d0a4d2e7109211a419fe30e8d88d677cd60b6bc79c5732e0a" +checksum = "8d82cb332cdfaed17ae235a638438ac4d4839913cc2af585c3c6746e8f8bee1a" dependencies = [ "addr2line", - "cc", "cfg-if", "libc", "miniz_oxide", "object", "rustc-demangle", + "windows-targets 0.52.6", ] [[package]] @@ -1237,9 +1239,9 @@ dependencies = [ [[package]] name = "cc" -version = "1.1.15" +version = "1.1.18" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "57b6a275aa2903740dc87da01c62040406b8812552e97129a63ea8850a17c6e6" +checksum = "b62ac837cdb5cb22e10a256099b4fc502b1dfe560cb282963a974d7abd80e476" dependencies = [ "shlex", ] @@ -1289,9 +1291,9 @@ dependencies = [ [[package]] name = "clap" -version = "4.5.16" +version = "4.5.17" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ed6719fffa43d0d87e5fd8caeab59be1554fb028cd30edc88fc4369b17971019" +checksum = "3e5a21b8495e732f1b3c364c9949b201ca7bae518c502c80256c96ad79eaf6ac" dependencies = [ "clap_builder", "clap_derive", @@ -1299,9 +1301,9 @@ dependencies = [ [[package]] name = "clap_builder" -version = "4.5.15" +version = "4.5.17" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "216aec2b177652e3846684cbfe25c9964d18ec45234f0f5da5157b207ed1aab6" +checksum = "8cf2dd12af7a047ad9d6da2b6b249759a22a7abc0f474c1dae1777afa4b21a73" dependencies = [ "anstream", "anstyle", @@ -1318,7 +1320,7 @@ dependencies = [ "heck 0.5.0", "proc-macro2", "quote", - "syn 2.0.76", + "syn 2.0.77", ] [[package]] @@ -1413,12 +1415,6 @@ dependencies = [ "tiny-keccak", ] -[[package]] -name = "convert_case" -version = "0.4.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6245d59a3e82a7fc217c5828a6692dbc6dfb63a0c8c90495621f7b9d79704a0e" - [[package]] name = "cookie-factory" version = "0.3.3" @@ -1427,9 +1423,12 @@ checksum = "9885fa71e26b8ab7855e2ec7cae6e9b380edff76cd052e07c683a0319d51b3a2" [[package]] name = "copyvec" -version = "0.2.0" +version = "0.2.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d540a4ee3e6ea3547a6b492c5e76a4b9086e6b41178061e03ca82fc385912b1f" +checksum = "8aba112395a3627b61476b950f4c015964642a1d32a44112ad97c4a781dab81f" +dependencies = [ + "quickcheck", +] [[package]] name = "core-foundation" @@ -1449,9 +1448,9 @@ checksum = "773648b94d0e5d620f64f280777445740e61fe701025087ec8b57f45c791888b" [[package]] name = "cpufeatures" -version = "0.2.13" +version = "0.2.14" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "51e852e6dc9a5bed1fae92dd2375037bf2b768725bf3be87811edee3249d09ad" +checksum = "608697df725056feaccfa42cffdaeeec3fccc4ffc38358ecd19b243e716a78e0" dependencies = [ "libc", ] @@ -1613,7 +1612,7 @@ dependencies = [ "proc-macro2", "quote", "strsim", - "syn 2.0.76", + "syn 2.0.77", ] [[package]] @@ -1624,7 +1623,7 @@ checksum = "d336a2a514f6ccccaa3e09b02d41d35330c07ddf03a62165fcec10bb561c7806" dependencies = [ "darling_core", "quote", - "syn 2.0.76", + "syn 2.0.77", ] [[package]] @@ -1642,9 +1641,9 @@ dependencies = [ [[package]] name = "dashmap" -version = "6.0.1" +version = "6.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "804c8821570c3f8b70230c2ba75ffa5c0f9a4189b9a432b6656c536712acae28" +checksum = "5041cc499144891f3790297212f32a74fb938e5136a14943f338ef9e0ae276cf" dependencies = [ "cfg-if", "crossbeam-utils", @@ -1695,7 +1694,7 @@ checksum = "8034092389675178f570469e6c3b0465d3d30b4505c294a6550db47f3c17ad18" dependencies = [ "proc-macro2", "quote", - "syn 2.0.76", + "syn 2.0.77", ] [[package]] @@ -1718,48 +1717,68 @@ dependencies = [ "syn 1.0.109", ] +[[package]] +name = "derive-quickcheck-arbitrary" +version = "0.1.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "697d85c38ac8f4dad3129d38d0d40060a98fd2557bfaf0bc8c071ecfce884ce5" +dependencies = [ + "proc-macro2", + "quote", + "structmeta", + "syn 2.0.77", +] + [[package]] name = "derive_builder" -version = "0.20.0" +version = "0.20.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0350b5cb0331628a5916d6c5c0b72e97393b8b6b03b47a9284f4e7f5a405ffd7" +checksum = "cd33f37ee6a119146a1781d3356a7c26028f83d779b2e04ecd45fdc75c76877b" dependencies = [ "derive_builder_macro", ] [[package]] name = "derive_builder_core" -version = "0.20.0" +version = "0.20.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d48cda787f839151732d396ac69e3473923d54312c070ee21e9effcaa8ca0b1d" +checksum = "7431fa049613920234f22c47fdc33e6cf3ee83067091ea4277a3f8c4587aae38" dependencies = [ "darling", "proc-macro2", "quote", - "syn 2.0.76", + "syn 2.0.77", ] [[package]] name = "derive_builder_macro" -version = "0.20.0" +version = "0.20.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "206868b8242f27cecce124c19fd88157fbd0dd334df2587f36417bafbc85097b" +checksum = "4abae7035bf79b9877b779505d8cf3749285b80c43941eda66604841889451dc" dependencies = [ "derive_builder_core", - "syn 2.0.76", + "syn 2.0.77", ] [[package]] name = "derive_more" -version = "0.99.18" +version = "1.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5f33878137e4dafd7fa914ad4e259e18a4e8e532b9617a2d0150262bf53abfce" +checksum = "4a9b99b9cbbe49445b21764dc0625032a89b145a2642e67603e1c936f5458d05" +dependencies = [ + "derive_more-impl", +] + +[[package]] +name = "derive_more-impl" +version = "1.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cb7330aeadfbe296029522e6c40f315320aba36fc43a5b3632f3795348f3bd22" dependencies = [ - "convert_case", "proc-macro2", "quote", - "rustc_version 0.4.1", - "syn 2.0.76", + "syn 2.0.77", + "unicode-xid", ] [[package]] @@ -1827,7 +1846,7 @@ checksum = "97369cbbc041bc366949bc74d34658d6cda5621039731c6310521892a3a20ae0" dependencies = [ "proc-macro2", "quote", - "syn 2.0.76", + "syn 2.0.77", ] [[package]] @@ -1908,7 +1927,7 @@ dependencies = [ "heck 0.4.1", "proc-macro2", "quote", - "syn 2.0.76", + "syn 2.0.77", ] [[package]] @@ -1921,6 +1940,16 @@ dependencies = [ "regex", ] +[[package]] +name = "env_logger" +version = "0.8.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a19187fea3ac7e84da7dacf48de0c45d63c6a76f9490dae389aead16c243fce3" +dependencies = [ + "log", + "regex", +] + [[package]] name = "env_logger" version = "0.10.2" @@ -2281,7 +2310,7 @@ checksum = "87750cf4b7a4c0625b1529e4c543c2182106e4dedc60a2a6455e00d212c489ac" dependencies = [ "proc-macro2", "quote", - "syn 2.0.76", + "syn 2.0.77", ] [[package]] @@ -2358,9 +2387,9 @@ dependencies = [ [[package]] name = "gimli" -version = "0.29.0" +version = "0.31.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "40ecd4077b5ae9fd2e9e169b102c6c330d0605168eb0e8bf79952b256dbefffd" +checksum = "32085ea23f3234fc7846555e85283ba4de91e21016dc0455a16286d87a292d64" [[package]] name = "glob" @@ -2647,9 +2676,9 @@ dependencies = [ [[package]] name = "indexmap" -version = "2.4.0" +version = "2.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "93ead53efc7ea8ed3cfb0c79fc8023fbb782a5432b52830b6518941cebe6505c" +checksum = "68b900aa2f7301e21c36462b170ee99994de34dff39a4a6a528e80e7376d07e5" dependencies = [ "equivalent", "hashbrown", @@ -2696,9 +2725,9 @@ dependencies = [ [[package]] name = "ipnet" -version = "2.9.0" +version = "2.10.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8f518f335dce6725a761382244631d86cf0ccb2863413590b31338feb467f9c3" +checksum = "187674a687eed5fe42285b40c6291f9a01517d415fad1c3cbc6a9f778af7fcd4" [[package]] name = "is-terminal" @@ -2929,7 +2958,7 @@ checksum = "cb26336e6dc7cc76e7927d2c9e7e3bb376d7af65a6f56a0b16c47d18a9b1abc5" dependencies = [ "proc-macro2", "quote", - "syn 2.0.76", + "syn 2.0.77", ] [[package]] @@ -3005,11 +3034,11 @@ checksum = "68354c5c6bd36d73ff3feceb05efa59b6acb7626617f4962be322a825e61f79a" [[package]] name = "miniz_oxide" -version = "0.7.4" +version = "0.8.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b8a240ddb74feaf34a79a7add65a741f3167852fba007066dcac1ca548d89c08" +checksum = "e2d80299ef12ff69b16a84bb182e3b9df68b5a91574d3d4fa6e41b65deec4df1" dependencies = [ - "adler", + "adler2", ] [[package]] @@ -3198,7 +3227,7 @@ checksum = "af1844ef2428cc3e1cb900be36181049ef3d3193c63e43026cfe202983b27a56" dependencies = [ "proc-macro2", "quote", - "syn 2.0.76", + "syn 2.0.77", ] [[package]] @@ -3221,9 +3250,9 @@ dependencies = [ [[package]] name = "object" -version = "0.36.3" +version = "0.36.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "27b64972346851a39438c60b341ebc01bba47464ae329e55cf343eb93964efd9" +checksum = "084f1a5821ac4c651660a94a7153d27ac9d8a53736203f58b31945ded098070a" dependencies = [ "memchr", ] @@ -3272,7 +3301,7 @@ checksum = "a948666b637a0f465e8564c73e89d4dde00d72d4d473cc972f390fc3dcee7d9c" dependencies = [ "proc-macro2", "quote", - "syn 2.0.76", + "syn 2.0.77", ] [[package]] @@ -3381,7 +3410,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "af25dcb10b7c0ce99abee8694e2e79e4787d7f778b9339dc5a50ba6fc45e5cc9" dependencies = [ "quote", - "syn 2.0.76", + "syn 2.0.77", ] [[package]] @@ -3412,9 +3441,9 @@ dependencies = [ [[package]] name = "parking" -version = "2.2.0" +version = "2.2.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bb813b8af86854136c6922af0598d719255ecb2179515e6e7730d468f05c9cae" +checksum = "f38d5652c16fde515bb1ecef450ab0f6a219d619a7274976324d5e377f7dceba" [[package]] name = "parking_lot" @@ -3472,9 +3501,9 @@ checksum = "e3148f5046208a5d56bcfc03053e3ca6334e51da8dfb19b6cdc8b306fae3283e" [[package]] name = "pest" -version = "2.7.11" +version = "2.7.12" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cd53dff83f26735fdc1ca837098ccf133605d794cdae66acfc2bfac3ec809d95" +checksum = "9c73c26c01b8c87956cea613c907c9d6ecffd8d18a2a5908e5de0adfaa185cea" dependencies = [ "memchr", "thiserror", @@ -3483,9 +3512,9 @@ dependencies = [ [[package]] name = "pest_derive" -version = "2.7.11" +version = "2.7.12" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2a548d2beca6773b1c244554d36fcf8548a8a58e74156968211567250e48e49a" +checksum = "664d22978e2815783adbdd2c588b455b1bd625299ce36b2a99881ac9627e6d8d" dependencies = [ "pest", "pest_generator", @@ -3493,22 +3522,22 @@ dependencies = [ [[package]] name = "pest_generator" -version = "2.7.11" +version = "2.7.12" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3c93a82e8d145725dcbaf44e5ea887c8a869efdcc28706df2d08c69e17077183" +checksum = "a2d5487022d5d33f4c30d91c22afa240ce2a644e87fe08caad974d4eab6badbe" dependencies = [ "pest", "pest_meta", "proc-macro2", "quote", - "syn 2.0.76", + "syn 2.0.77", ] [[package]] name = "pest_meta" -version = "2.7.11" +version = "2.7.12" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a941429fea7e08bedec25e4f6785b6ffaacc6b755da98df5ef3e7dcf4a124c4f" +checksum = "0091754bbd0ea592c4deb3a122ce8ecbb0753b738aa82bc055fcc2eccc8d8174" dependencies = [ "once_cell", "pest", @@ -3532,7 +3561,7 @@ checksum = "2f38a4412a78282e09a2cf38d195ea5420d15ba0602cb375210efbc877243965" dependencies = [ "proc-macro2", "quote", - "syn 2.0.76", + "syn 2.0.77", ] [[package]] @@ -3679,9 +3708,9 @@ source = "git+https://github.com/0xPolygonZero/plonky2.git?rev=dc77c77f2b06500e1 [[package]] name = "plotters" -version = "0.3.6" +version = "0.3.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a15b6eccb8484002195a3e44fe65a4ce8e93a625797a063735536fd59cb01cf3" +checksum = "5aeb6f403d7a4911efb1e33402027fc44f29b5bf6def3effcc22d7bb75f2b747" dependencies = [ "num-traits", "plotters-backend", @@ -3692,15 +3721,15 @@ dependencies = [ [[package]] name = "plotters-backend" -version = "0.3.6" +version = "0.3.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "414cec62c6634ae900ea1c56128dfe87cf63e7caece0852ec76aba307cebadb7" +checksum = "df42e13c12958a16b3f7f4386b9ab1f3e7933914ecea48da7139435263a4172a" [[package]] name = "plotters-svg" -version = "0.3.6" +version = "0.3.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "81b30686a7d9c3e010b84284bdd26a29f2138574f52f5eb6f794fc0ad924e705" +checksum = "51bae2ac328883f7acdfea3d66a7c35751187f870bc81f94563733a154d7a670" dependencies = [ "plotters-backend", ] @@ -3731,7 +3760,7 @@ dependencies = [ "concurrent-queue", "hermit-abi 0.4.0", "pin-project-lite", - "rustix 0.38.35", + "rustix 0.38.36", "tracing", "windows-sys 0.59.0", ] @@ -3764,6 +3793,16 @@ dependencies = [ "zerocopy", ] +[[package]] +name = "pretty_assertions" +version = "1.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "af7cee1a6c8a5b9208b3cb1061f10c0cb689087b3d8ce85fb9d2dd7a29b6ba66" +dependencies = [ + "diff", + "yansi 0.5.1", +] + [[package]] name = "pretty_env_logger" version = "0.5.0" @@ -3830,6 +3869,28 @@ dependencies = [ "version_check", ] +[[package]] +name = "proc-macro-error-attr2" +version = "2.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "96de42df36bb9bba5542fe9f1a054b8cc87e172759a1868aa05c1f3acc89dfc5" +dependencies = [ + "proc-macro2", + "quote", +] + +[[package]] +name = "proc-macro-error2" +version = "2.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "11ec05c52be0a07b08061f7dd003e7d7092e0472bc731b4af7bb1ef876109802" +dependencies = [ + "proc-macro-error-attr2", + "proc-macro2", + "quote", + "syn 2.0.77", +] + [[package]] name = "proc-macro2" version = "1.0.86" @@ -3901,6 +3962,17 @@ version = "1.2.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a1d01941d82fa2ab50be1e79e6714289dd7cde78eba4c074bc5a4374f650dfe0" +[[package]] +name = "quickcheck" +version = "1.0.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "588f6378e4dd99458b60ec275b4477add41ce4fa9f64dcba6f15adccb19b50d6" +dependencies = [ + "env_logger 0.8.4", + "log", + "rand", +] + [[package]] name = "quote" version = "1.0.37" @@ -4158,6 +4230,7 @@ name = "rpc" version = "0.1.0" dependencies = [ "alloy", + "alloy-compat", "anyhow", "cargo_metadata", "clap", @@ -4266,9 +4339,9 @@ dependencies = [ [[package]] name = "rustix" -version = "0.38.35" +version = "0.38.36" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a85d50532239da68e9addb745ba38ff4612a242c1c7ceea689c4bc7c2f43c36f" +checksum = "3f55e80d50763938498dd5ebb18647174e0c76dc38c5505294bb224624f30f36" dependencies = [ "bitflags 2.6.0", "errno", @@ -4388,11 +4461,11 @@ dependencies = [ [[package]] name = "schannel" -version = "0.1.23" +version = "0.1.24" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fbc91545643bcf3a0bbb6569265615222618bdf33ce4ffbbd13c4bbd4c093534" +checksum = "e9aaafd5a2b6e3d657ff009d82fbd630b6bd54dd4eb06f21693925cdf80f9b8b" dependencies = [ - "windows-sys 0.52.0", + "windows-sys 0.59.0", ] [[package]] @@ -4478,9 +4551,9 @@ dependencies = [ [[package]] name = "serde" -version = "1.0.209" +version = "1.0.210" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "99fce0ffe7310761ca6bf9faf5115afbc19688edd00171d81b1bb1b116c63e09" +checksum = "c8e3592472072e6e22e0a54d5904d9febf8508f65fb8552499a1abc7d1078c3a" dependencies = [ "serde_derive", ] @@ -4496,20 +4569,20 @@ dependencies = [ [[package]] name = "serde_derive" -version = "1.0.209" +version = "1.0.210" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a5831b979fd7b5439637af1752d535ff49f4860c0f341d1baeb6faf0f4242170" +checksum = "243902eda00fad750862fc144cea25caca5e20d615af0a81bee94ca738f1df1f" dependencies = [ "proc-macro2", "quote", - "syn 2.0.76", + "syn 2.0.77", ] [[package]] name = "serde_json" -version = "1.0.127" +version = "1.0.128" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8043c06d9f82bd7271361ed64f415fe5e12a77fdb52e573e7f06a516dea329ad" +checksum = "6ff5456707a1de34e7e37f2a6fd3d3f808c318259cbd01ab6377795054b483d8" dependencies = [ "itoa", "memchr", @@ -4719,6 +4792,29 @@ version = "0.11.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "7da8b5736845d9f2fcb837ea5d9e2628564b3b043a70948a3f0b778838c5fb4f" +[[package]] +name = "structmeta" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "78ad9e09554f0456d67a69c1584c9798ba733a5b50349a6c0d0948710523922d" +dependencies = [ + "proc-macro2", + "quote", + "structmeta-derive", + "syn 2.0.77", +] + +[[package]] +name = "structmeta-derive" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a60bcaff7397072dca0017d1db428e30d5002e00b6847703e2e42005c95fbe00" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.77", +] + [[package]] name = "strum" version = "0.26.3" @@ -4738,7 +4834,7 @@ dependencies = [ "proc-macro2", "quote", "rustversion", - "syn 2.0.76", + "syn 2.0.77", ] [[package]] @@ -4760,9 +4856,9 @@ dependencies = [ [[package]] name = "syn" -version = "2.0.76" +version = "2.0.77" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "578e081a14e0cefc3279b0472138c513f37b41a08d5a3cca9b6e4e8ceb6cd525" +checksum = "9f35bcdf61fd8e7be6caf75f429fdca8beb3ed76584befb503b1569faee373ed" dependencies = [ "proc-macro2", "quote", @@ -4771,14 +4867,14 @@ dependencies = [ [[package]] name = "syn-solidity" -version = "0.8.0" +version = "0.8.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "284c41c2919303438fcf8dede4036fd1e82d4fc0fbb2b279bd2a1442c909ca92" +checksum = "f1e1355d44af21638c8e05d45097db6cb5ec2aa3e970c51cb2901605cf3344fa" dependencies = [ "paste", "proc-macro2", "quote", - "syn 2.0.76", + "syn 2.0.77", ] [[package]] @@ -4804,7 +4900,7 @@ checksum = "c8af7666ab7b6390ab78131fb5b0fce11d6b7a6951602017c35fa82800708971" dependencies = [ "proc-macro2", "quote", - "syn 2.0.76", + "syn 2.0.77", ] [[package]] @@ -4834,7 +4930,7 @@ dependencies = [ "cfg-if", "fastrand 2.1.1", "once_cell", - "rustix 0.38.35", + "rustix 0.38.36", "windows-sys 0.59.0", ] @@ -4864,7 +4960,7 @@ checksum = "a4558b58466b9ad7ca0f102865eccc95938dca1a74a856f2b57b6629050da261" dependencies = [ "proc-macro2", "quote", - "syn 2.0.76", + "syn 2.0.77", ] [[package]] @@ -4955,9 +5051,9 @@ checksum = "1f3ccbac311fea05f86f61904b462b55fb3df8837a366dfc601a0161d0532f20" [[package]] name = "tokio" -version = "1.39.3" +version = "1.40.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9babc99b9923bfa4804bd74722ff02c0381021eafa4db9949217e3be8e84fff5" +checksum = "e2b070231665d27ad9ec9b8df639893f46727666c6767db40317fbe920a5d998" dependencies = [ "backtrace", "bytes", @@ -4990,7 +5086,7 @@ checksum = "693d596312e88961bc67d7f1f97af8a70227d9f90c31bba5806eec004978d752" dependencies = [ "proc-macro2", "quote", - "syn 2.0.76", + "syn 2.0.77", ] [[package]] @@ -5019,9 +5115,9 @@ dependencies = [ [[package]] name = "tokio-stream" -version = "0.1.15" +version = "0.1.16" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "267ac89e0bec6e691e5813911606935d77c476ff49024f98abcea3e7b15e37af" +checksum = "4f4e6ce100d0eb49a2734f8c0812bcd324cf357d21810932c5df6b96ef2b86f1" dependencies = [ "futures-core", "pin-project-lite", @@ -5031,9 +5127,9 @@ dependencies = [ [[package]] name = "tokio-util" -version = "0.7.11" +version = "0.7.12" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9cf6b47b3771c49ac75ad09a6162f53ad4b8088b76ac60e8ec1455b31a189fe1" +checksum = "61e7c3654c13bcd040d4a03abee2c75b1d14a37b423cf5a813ceae1cc903ec6a" dependencies = [ "bytes", "futures-core", @@ -5118,6 +5214,7 @@ dependencies = [ "camino", "ciborium", "ciborium-io", + "clap", "copyvec", "criterion", "either", @@ -5135,8 +5232,10 @@ dependencies = [ "nunny", "plonky2", "plonky2_maybe_rayon 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)", + "pretty_assertions", "pretty_env_logger", "prover", + "quickcheck", "rlp", "serde", "serde_json", @@ -5170,7 +5269,7 @@ checksum = "34704c8d6ebcbc939824180af020566b01a7c01f80641264eba0999f6c2b6be7" dependencies = [ "proc-macro2", "quote", - "syn 2.0.76", + "syn 2.0.77", ] [[package]] @@ -5240,11 +5339,13 @@ checksum = "42ff0bf0c66b8238c6f3b578df37d0b7848e55df8577b3f74f92a69acceeb825" [[package]] name = "u4" -version = "0.1.1" +version = "0.1.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "068a8b5939889cb4b24fd99201f2c7bc142a25986ae0eb903cb05537484192d5" +checksum = "390884f06a6f4eee34a2082a3eb17844cf3605a69f033f79bf0e6f460279112c" dependencies = [ "const-default", + "derive-quickcheck-arbitrary", + "quickcheck", "serde", ] @@ -5293,6 +5394,12 @@ dependencies = [ "tinyvec", ] +[[package]] +name = "unicode-xid" +version = "0.2.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "229730647fbc343e3a80e463c1db7f78f3855d3f3739bee0dda773c9a037c90a" + [[package]] name = "unroll" version = "0.1.5" @@ -5460,7 +5567,7 @@ dependencies = [ "once_cell", "proc-macro2", "quote", - "syn 2.0.76", + "syn 2.0.77", "wasm-bindgen-shared", ] @@ -5494,7 +5601,7 @@ checksum = "afc340c74d9005395cf9dd098506f7f44e38f2b4a21c6aaacf9a105ea5e1e836" dependencies = [ "proc-macro2", "quote", - "syn 2.0.76", + "syn 2.0.77", "wasm-bindgen-backend", "wasm-bindgen-shared", ] @@ -5797,6 +5904,12 @@ dependencies = [ "time", ] +[[package]] +name = "yansi" +version = "0.5.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "09041cd90cf85f7f8b2df60c646f853b7f535ce68f85244eb6731cf89fa498ec" + [[package]] name = "yansi" version = "1.0.1" @@ -5846,7 +5959,7 @@ checksum = "fa4f8080344d4671fb4e831a13ad1e68092748387dfc4f55e356242fae12ce3e" dependencies = [ "proc-macro2", "quote", - "syn 2.0.76", + "syn 2.0.77", ] [[package]] @@ -5866,7 +5979,7 @@ checksum = "ce36e65b0d2999d2aafac989fb249189a141aee1f53c612c1f37d72631959f69" dependencies = [ "proc-macro2", "quote", - "syn 2.0.76", + "syn 2.0.77", ] [[package]] @@ -5885,6 +5998,6 @@ version = "0.1.0" dependencies = [ "proc-macro2", "quote", - "syn 2.0.76", + "syn 2.0.77", "trybuild", ] diff --git a/evm_arithmetization/src/cpu/kernel/constants/mod.rs b/evm_arithmetization/src/cpu/kernel/constants/mod.rs index 853e5500c..348c5b2c6 100644 --- a/evm_arithmetization/src/cpu/kernel/constants/mod.rs +++ b/evm_arithmetization/src/cpu/kernel/constants/mod.rs @@ -18,6 +18,13 @@ pub(crate) mod journal_entry; pub(crate) mod trie_type; pub(crate) mod txn_fields; +/// A named constant. +/// Prefer this over `(name, value)` tuples. +pub struct Named<'a, T> { + pub name: &'a str, + pub value: T, +} + /// Constants that are accessible to our kernel assembly code. pub(crate) fn evm_constants() -> HashMap { let mut c = HashMap::new(); @@ -71,8 +78,8 @@ pub(crate) fn evm_constants() -> HashMap { U256::from_big_endian(&cancun_constants::BEACON_ROOTS_CONTRACT_STATE_KEY.1), ); c.insert( - cancun_constants::HISTORY_BUFFER_LENGTH.0.into(), - cancun_constants::HISTORY_BUFFER_LENGTH.1.into(), + cancun_constants::HISTORY_BUFFER_LENGTH.name.into(), + cancun_constants::HISTORY_BUFFER_LENGTH.value, ); c.insert( @@ -424,7 +431,10 @@ pub mod cancun_constants { // Beacon constants /////////////////// - pub const HISTORY_BUFFER_LENGTH: (&str, u64) = ("HISTORY_BUFFER_LENGTH", 8191); + pub const HISTORY_BUFFER_LENGTH: Named = Named { + name: "HISTORY_BUFFER_LENGTH", + value: U256([8191, 0, 0, 0]), + }; pub const BEACON_ROOTS_CONTRACT_ADDRESS: Address = H160(hex!("000F3df6D732807Ef1319fB7B8bB8522d0Beac02")); diff --git a/evm_arithmetization/src/testing_utils.rs b/evm_arithmetization/src/testing_utils.rs index 4152da5b6..d8556916b 100644 --- a/evm_arithmetization/src/testing_utils.rs +++ b/evm_arithmetization/src/testing_utils.rs @@ -64,8 +64,8 @@ pub fn update_beacon_roots_account_storage( timestamp: U256, parent_root: H256, ) -> anyhow::Result<()> { - let timestamp_idx = timestamp % HISTORY_BUFFER_LENGTH.1; - let root_idx = timestamp_idx + HISTORY_BUFFER_LENGTH.1; + let timestamp_idx = timestamp % HISTORY_BUFFER_LENGTH.value; + let root_idx = timestamp_idx + HISTORY_BUFFER_LENGTH.value; insert_storage(storage_trie, timestamp_idx, timestamp)?; insert_storage(storage_trie, root_idx, h2u(parent_root)) diff --git a/trace_decoder/Cargo.toml b/trace_decoder/Cargo.toml index 4febd85f2..b56d229e6 100644 --- a/trace_decoder/Cargo.toml +++ b/trace_decoder/Cargo.toml @@ -10,19 +10,21 @@ homepage.workspace = true keywords.workspace = true [dependencies] -anyhow.workspace = true +alloy = { workspace = true } +alloy-compat = "0.1.0" +anyhow = { workspace = true } bitflags = { workspace = true } bitvec = { workspace = true } bytes = { workspace = true } ciborium = { workspace = true } ciborium-io = { workspace = true } -copyvec = "0.2.0" +copyvec = { version = "0.2.0", features = ["quickcheck"] } either = { workspace = true } enum-as-inner = { workspace = true } ethereum-types = { workspace = true } hex = { workspace = true } hex-literal = { workspace = true } -itertools.workspace = true +itertools = { workspace = true } keccak-hash = { workspace = true } log = { workspace = true } nunny = { workspace = true, features = ["serde"] } @@ -32,7 +34,7 @@ serde = { workspace = true } stackstack = "0.3.0" strum = { version = "0.26.3", features = ["derive"] } thiserror = { workspace = true } -u4 = { workspace = true } +u4 = { workspace = true, features = ["quickcheck"] } winnow = { workspace = true } # Local dependencies @@ -46,26 +48,23 @@ alloy = { workspace = true } alloy-compat = "0.1.0" assert2 = "0.3.15" camino = "1.1.9" +clap = { workspace = true } criterion = { workspace = true } glob = "0.3.1" libtest-mimic = "0.7.3" plonky2_maybe_rayon = { workspace = true } +pretty_assertions = "1.4.0" pretty_env_logger = { workspace = true } prover = { workspace = true } +quickcheck = "1.0.3" serde_json = { workspace = true } serde_path_to_error = { workspace = true } [features] default = ["eth_mainnet"] -eth_mainnet = [ - "evm_arithmetization/eth_mainnet", - "prover/eth_mainnet", -] -cdk_erigon = [ - "evm_arithmetization/cdk_erigon", - "prover/cdk_erigon", -] +eth_mainnet = ["evm_arithmetization/eth_mainnet", "prover/eth_mainnet"] +cdk_erigon = ["evm_arithmetization/cdk_erigon", "prover/cdk_erigon"] [[bench]] name = "block_processing" @@ -78,3 +77,7 @@ harness = false [[test]] name = "simulate-execution" harness = false + +[[test]] +name = "check-subsets" +harness = false diff --git a/trace_decoder/src/core.rs b/trace_decoder/src/core.rs new file mode 100644 index 000000000..666ea5caf --- /dev/null +++ b/trace_decoder/src/core.rs @@ -0,0 +1,644 @@ +use std::{ + cmp, + collections::{BTreeMap, BTreeSet, HashMap}, + mem, +}; + +use alloy::primitives::address; +use alloy_compat::Compat as _; +use anyhow::{anyhow, bail, ensure, Context as _}; +use ethereum_types::{Address, U256}; +use evm_arithmetization::{ + generation::{mpt::AccountRlp, TrieInputs}, + proof::TrieRoots, + testing_utils::{BEACON_ROOTS_CONTRACT_ADDRESS, HISTORY_BUFFER_LENGTH}, + GenerationInputs, +}; +use itertools::Itertools as _; +use keccak_hash::H256; +use mpt_trie::partial_trie::PartialTrie as _; +use nunny::NonEmpty; + +use crate::{ + typed_mpt::{ReceiptTrie, StateMpt, StateTrie, StorageTrie, TransactionTrie, TrieKey}, + BlockLevelData, BlockTrace, BlockTraceTriePreImages, CombinedPreImages, ContractCodeUsage, + OtherBlockData, SeparateStorageTriesPreImage, SeparateTriePreImage, SeparateTriePreImages, + TxnInfo, TxnMeta, TxnTrace, +}; + +/// TODO(0xaatif): doc +pub fn entrypoint( + trace: BlockTrace, + other: OtherBlockData, + batch_size_hint: usize, + use_burn_addr: bool, +) -> anyhow::Result> { + ensure!(batch_size_hint != 0); + + let BlockTrace { + trie_pre_images, + code_db, + txn_info, + } = trace; + let (state, storage, mut code) = start(trie_pre_images)?; + code.extend(code_db); + + let OtherBlockData { + b_data: + BlockLevelData { + b_meta, + b_hashes, + mut withdrawals, + }, + checkpoint_state_trie_root, + } = other; + + // TODO(0xaatif): docs for the RPC field say this is gwei already: + // https://docs.rs/alloy/0.3.1/alloy/eips/eip4895/struct.Withdrawal.html#structfield.amount + // in any case, this shouldn't be our problem. + for (_, amt) in &mut withdrawals { + *amt = eth_to_gwei(*amt) + } + + let batches = middle( + state, + storage, + batch(txn_info, batch_size_hint), + &mut code, + b_meta.block_timestamp, + b_meta.parent_beacon_block_root, + withdrawals, + )?; + + let mut running_gas_used = 0; + Ok(batches + .into_iter() + .map( + |Batch { + first_txn_ix, + gas_used, + contract_code, + byte_code, + before: + IntraBlockTries { + state, + storage, + transaction, + receipt, + }, + after, + withdrawals, + }| GenerationInputs { + txn_number_before: first_txn_ix.into(), + gas_used_before: running_gas_used.into(), + gas_used_after: { + running_gas_used += gas_used; + running_gas_used.into() + }, + signed_txns: byte_code.into_iter().map(Into::into).collect(), + withdrawals, + ger_data: None, + tries: TrieInputs { + state_trie: state.into(), + transactions_trie: transaction.into(), + receipts_trie: receipt.into(), + storage_tries: storage.into_iter().map(|(k, v)| (k, v.into())).collect(), + }, + trie_roots_after: after, + checkpoint_state_trie_root, + contract_code: contract_code + .into_iter() + .map(|it| (keccak_hash::keccak(&it), it)) + .collect(), + block_metadata: b_meta.clone(), + block_hashes: b_hashes.clone(), + burn_addr: use_burn_addr.then_some(Address::zero()), + }, + ) + .collect()) +} + +/// The user has either provided us with a [`serde`]-ed +/// [`HashedPartialTrie`](mpt_trie::partial_trie::HashedPartialTrie), +/// or a [`wire`](crate::wire)-encoded representation of one. +/// +/// Turn either of those into our [`typed_mpt`](crate::typed_mpt) +/// representations. +fn start( + pre_images: BlockTraceTriePreImages, +) -> anyhow::Result<(StateMpt, BTreeMap, Hash2Code)> { + Ok(match pre_images { + // TODO(0xaatif): refactor our convoluted input types + BlockTraceTriePreImages::Separate(SeparateTriePreImages { + state: SeparateTriePreImage::Direct(state), + storage: SeparateStorageTriesPreImage::MultipleTries(storage), + }) => { + let state = state.items().try_fold( + StateMpt::default(), + |mut acc, (nibbles, hash_or_val)| { + let path = TrieKey::from_nibbles(nibbles); + match hash_or_val { + mpt_trie::trie_ops::ValOrHash::Val(bytes) => { + #[expect(deprecated)] // this is MPT specific + acc.insert_by_hashed_address( + path.into_hash() + .context("invalid path length in direct state trie")?, + rlp::decode(&bytes) + .context("invalid AccountRlp in direct state trie")?, + )?; + } + mpt_trie::trie_ops::ValOrHash::Hash(h) => { + acc.insert_hash_by_key(path, h)?; + } + }; + anyhow::Ok(acc) + }, + )?; + let storage = storage + .into_iter() + .map(|(k, SeparateTriePreImage::Direct(v))| { + v.items() + .try_fold(StorageTrie::default(), |mut acc, (nibbles, hash_or_val)| { + let path = TrieKey::from_nibbles(nibbles); + match hash_or_val { + mpt_trie::trie_ops::ValOrHash::Val(value) => { + acc.insert(path, value)?; + } + mpt_trie::trie_ops::ValOrHash::Hash(h) => { + acc.insert_hash(path, h)?; + } + }; + anyhow::Ok(acc) + }) + .map(|v| (k, v)) + }) + .collect::>()?; + (state, storage, Hash2Code::new()) + } + BlockTraceTriePreImages::Combined(CombinedPreImages { compact }) => { + let instructions = crate::wire::parse(&compact) + .context("couldn't parse instructions from binary format")?; + let crate::type1::Frontend { + state, + storage, + code, + } = crate::type1::frontend(instructions)?; + (state, storage, code.into_iter().map(Into::into).collect()) + } + }) +} + +/// Break `txns` into batches of length `batch_size_hint`, prioritising creating +/// at least two batches. +/// +/// [`None`] represents a dummy transaction that should not increment the +/// transaction index. +fn batch(txns: Vec, batch_size_hint: usize) -> Vec>> { + let hint = cmp::max(batch_size_hint, 1); + let mut txns = txns.into_iter().map(Some).collect::>(); + let n_batches = txns.iter().chunks(hint).into_iter().count(); + match (txns.len(), n_batches) { + // enough + (_, 2..) => txns + .into_iter() + .chunks(hint) + .into_iter() + .map(FromIterator::from_iter) + .collect(), + // not enough batches at `hint`, but enough real transactions + (2.., ..2) => { + let second = txns.split_off(txns.len() / 2); + vec![txns, second] + } + // add padding + (0 | 1, _) => txns + .into_iter() + .pad_using(2, |_ix| None) + .map(|it| vec![it]) + .collect(), + } +} + +#[test] +fn test_batch() { + #[track_caller] + fn do_test(n: usize, hint: usize, exp: impl IntoIterator) { + itertools::assert_equal( + exp, + batch(vec![TxnInfo::default(); n], hint) + .iter() + .map(Vec::len), + ) + } + + do_test(0, 0, [1, 1]); // pad2 + do_test(1, 0, [1, 1]); // pad1 + do_test(2, 0, [1, 1]); // exact + do_test(3, 0, [1, 1, 1]); + do_test(3, 1, [1, 1, 1]); + do_test(3, 2, [2, 1]); // leftover after hint + do_test(3, 3, [1, 2]); // big hint +} + +#[derive(Debug)] +struct Batch { + pub first_txn_ix: usize, + pub gas_used: u64, + /// See [`GenerationInputs::contract_code`]. + pub contract_code: BTreeSet>, + /// For each transaction in batch, in order. + pub byte_code: Vec>>, + + pub before: IntraBlockTries, + pub after: TrieRoots, + + /// Empty for all but the final batch + pub withdrawals: Vec<(Address, U256)>, +} + +/// [`evm_arithmetization::generation::TrieInputs`], +/// generic over state trie representation. +#[derive(Debug)] +struct IntraBlockTries { + pub state: StateTrieT, + pub storage: BTreeMap, + pub transaction: TransactionTrie, + pub receipt: ReceiptTrie, +} + +/// Does the main work mentioned in the [module documentation](mod@self). +fn middle( + // state at the beginning of the block + mut state_trie: StateTrieT, + // storage at the beginning of the block + mut storage_tries: BTreeMap, + // None represents a dummy transaction that should not increment the transaction index + // all batches SHOULD not be empty + batches: Vec>>, + code: &mut Hash2Code, + block_timestamp: U256, + parent_beacon_block_root: H256, + // added to final batch + mut withdrawals: Vec<(Address, U256)>, +) -> anyhow::Result>> { + // Initialise the storage tries. + for (haddr, acct) in state_trie.iter() { + let storage = storage_tries.entry(haddr).or_insert({ + let mut it = StorageTrie::default(); + it.insert_hash(TrieKey::default(), acct.storage_root) + .expect("empty trie insert cannot fail"); + it + }); + ensure!( + storage.root() == acct.storage_root, + "inconsistent initial storage for hashed address {haddr:x}" + ) + } + + // These are the per-block tries. + let mut transaction_trie = TransactionTrie::new(); + let mut receipt_trie = ReceiptTrie::new(); + + let mut out = vec![]; + + let mut txn_ix = 0; // incremented for non-dummy transactions + let mut loop_ix = 0; // always incremented + let loop_len = batches.iter().flatten().count(); + for batch in batches { + let batch_first_txn_ix = txn_ix; // GOTCHA: if there are no transactions in this batch + let mut batch_gas_used = 0; + let mut batch_byte_code = vec![]; + let mut batch_contract_code = BTreeSet::from([vec![]]); // always include empty code + + let mut before = IntraBlockTries { + state: state_trie.clone(), + transaction: transaction_trie.clone(), + receipt: receipt_trie.clone(), + storage: storage_tries.clone(), + }; + + // We want to trim the TrieInputs above, + // but won't know the bounds until after the loop below, + // so store that information here. + let mut storage_masks = BTreeMap::<_, BTreeSet>::new(); + let mut state_mask = BTreeSet::new(); + + if txn_ix == 0 { + do_beacon_hook( + block_timestamp, + &mut storage_tries, + &mut storage_masks, + parent_beacon_block_root, + &mut state_mask, + &mut state_trie, + )?; + } + + for txn in batch { + let do_increment_txn_ix = txn.is_some(); + let TxnInfo { + traces, + meta: + TxnMeta { + byte_code, + new_receipt_trie_node_byte, + gas_used: txn_gas_used, + }, + } = txn.unwrap_or_default(); + + if let Ok(nonempty) = nunny::Vec::new(byte_code) { + batch_byte_code.push(nonempty.clone()); + transaction_trie.insert(txn_ix, nonempty.into())?; + receipt_trie.insert( + txn_ix, + map_receipt_bytes(new_receipt_trie_node_byte.clone())?, + )?; + } + + batch_gas_used += txn_gas_used; + + for ( + addr, + just_access, + TxnTrace { + balance, + nonce, + storage_read, + storage_written, + code_usage, + self_destructed, + }, + ) in traces + .into_iter() + .map(|(addr, trc)| (addr, trc == TxnTrace::default(), trc)) + { + let (_, _, receipt) = evm_arithmetization::generation::mpt::decode_receipt( + &map_receipt_bytes(new_receipt_trie_node_byte.clone())?, + ) + .map_err(|e| anyhow!("{e:?}")) + .context("couldn't decode receipt")?; + + let (mut acct, born) = state_trie + .get_by_address(addr) + .map(|acct| (acct, false)) + .unwrap_or((AccountRlp::default(), true)); + + if born || just_access { + state_trie + .clone() + .insert_by_address(addr, acct) + .context(format!( + "couldn't reach state of {} address {addr:x}", + match born { + true => "created", + false => "accessed", + } + ))?; + } + + let do_writes = !just_access + && match born { + // if txn failed, don't commit changes to trie + true => receipt.status, + false => true, + }; + + let trim_storage = storage_masks.entry(addr).or_default(); + + trim_storage.extend( + storage_written + .keys() + .chain(&storage_read) + .map(|it| TrieKey::from_hash(keccak_hash::keccak(it))), + ); + + if do_writes { + acct.balance = balance.unwrap_or(acct.balance); + acct.nonce = nonce.unwrap_or(acct.nonce); + acct.code_hash = code_usage + .map(|it| match it { + ContractCodeUsage::Read(hash) => { + batch_contract_code.insert(code.get(hash)?); + anyhow::Ok(hash) + } + ContractCodeUsage::Write(bytes) => { + code.insert(bytes.clone()); + let hash = keccak_hash::keccak(&bytes); + batch_contract_code.insert(bytes); + Ok(hash) + } + }) + .transpose()? + .unwrap_or(acct.code_hash); + + if !storage_written.is_empty() { + let storage = match born { + true => storage_tries.entry(keccak_hash::keccak(addr)).or_default(), + false => storage_tries + .get_mut(&keccak_hash::keccak(addr)) + .context(format!("missing storage trie for address {addr:x}"))?, + }; + + for (k, v) in storage_written { + let slot = TrieKey::from_hash(keccak_hash::keccak(k)); + match v.is_zero() { + // this is actually a delete + true => trim_storage.extend(storage.reporting_remove(slot)?), + false => { + storage.insert(slot, rlp::encode(&v).to_vec())?; + } + } + } + acct.storage_root = storage.root(); + } + + state_trie.insert_by_address(addr, acct)?; + } + + if self_destructed { + storage_tries.remove(&keccak_hash::keccak(addr)); + state_mask.extend(state_trie.reporting_remove(addr)?) + } + + let precompiled_addresses = address!("0000000000000000000000000000000000000001") + ..address!("000000000000000000000000000000000000000a"); + + if !precompiled_addresses.contains(&addr.compat()) { + // TODO(0xaatif): this looks like an optimization, + // but if it's omitted, the tests fail... + if std::env::var_os("SKIP_MASK_PRECOMPILED").is_some_and(|it| it == "true") { + } else { + state_mask.insert(TrieKey::from_address(addr)); + } + } + } + + if do_increment_txn_ix { + txn_ix += 1; + } + loop_ix += 1; + } // txn in batch + + out.push(Batch { + first_txn_ix: batch_first_txn_ix, + gas_used: batch_gas_used, + contract_code: batch_contract_code, + byte_code: batch_byte_code, + withdrawals: match loop_ix == loop_len { + true => { + for (addr, amt) in &withdrawals { + state_mask.insert(TrieKey::from_address(*addr)); + let mut acct = state_trie + .get_by_address(*addr) + .context("missing address for withdrawal")?; + acct.balance += *amt; + state_trie + .insert_by_address(*addr, acct) + // TODO(0xaatif): https://github.com/0xPolygonZero/zk_evm/issues/275 + // Add an entry API + .expect("insert must succeed with the same key as a successful `get`"); + } + mem::take(&mut withdrawals) + } + false => vec![], + }, + before: { + before.state.mask(state_mask)?; + before.receipt.mask(batch_first_txn_ix..txn_ix)?; + before.transaction.mask(batch_first_txn_ix..txn_ix)?; + + let keep = storage_masks + .keys() + .map(keccak_hash::keccak) + .collect::>(); + before.storage.retain(|haddr, _| keep.contains(haddr)); + + for (addr, mask) in storage_masks { + if let Some(it) = before.storage.get_mut(&keccak_hash::keccak(addr)) { + it.mask(mask)? + } // else self_destructed + } + before + }, + after: TrieRoots { + state_root: state_trie.root(), + transactions_root: transaction_trie.root(), + receipts_root: receipt_trie.root(), + }, + }); + } // batch in batches + + Ok(out) +} + +/// Updates the storage of the beacon block root contract, +/// according to +/// +/// This is cancun-specific, and runs at the start of the block, +/// before any transactions (as per the EIP). +fn do_beacon_hook( + block_timestamp: U256, + storage: &mut BTreeMap, + trim_storage: &mut BTreeMap>, + parent_beacon_block_root: H256, + trim_state: &mut BTreeSet, + state_trie: &mut StateTrieT, +) -> anyhow::Result<()> { + let history_timestamp = block_timestamp % HISTORY_BUFFER_LENGTH.value; + let history_timestamp_next = history_timestamp + HISTORY_BUFFER_LENGTH.value; + let beacon_storage = storage + .get_mut(&keccak_hash::keccak(BEACON_ROOTS_CONTRACT_ADDRESS)) + .context("missing beacon contract storage trie")?; + let beacon_trim = trim_storage + .entry(BEACON_ROOTS_CONTRACT_ADDRESS) + .or_default(); + for (ix, u) in [ + (history_timestamp, block_timestamp), + ( + history_timestamp_next, + U256::from_big_endian(parent_beacon_block_root.as_bytes()), + ), + ] { + let mut h = [0; 32]; + ix.to_big_endian(&mut h); + let slot = TrieKey::from_hash(keccak_hash::keccak(H256::from_slice(&h))); + beacon_trim.insert(slot); + + match u.is_zero() { + true => beacon_trim.extend(beacon_storage.reporting_remove(slot)?), + false => { + beacon_storage.insert(slot, alloy::rlp::encode(u.compat()))?; + beacon_trim.insert(slot); + } + } + } + trim_state.insert(TrieKey::from_address(BEACON_ROOTS_CONTRACT_ADDRESS)); + let mut beacon_acct = state_trie + .get_by_address(BEACON_ROOTS_CONTRACT_ADDRESS) + .context("missing beacon contract address")?; + beacon_acct.storage_root = beacon_storage.root(); + state_trie + .insert_by_address(BEACON_ROOTS_CONTRACT_ADDRESS, beacon_acct) + // TODO(0xaatif): https://github.com/0xPolygonZero/zk_evm/issues/275 + // Add an entry API + .expect("insert must succeed with the same key as a successful `get`"); + Ok(()) +} + +fn eth_to_gwei(eth: U256) -> U256 { + // 1 ether = 10^9 gwei. + eth * U256::from(10).pow(9.into()) +} + +fn map_receipt_bytes(bytes: Vec) -> anyhow::Result> { + match rlp::decode::(&bytes) { + Ok(_) => Ok(bytes), + Err(_) => { + rlp::decode(&bytes).context("couldn't decode receipt as a legacy receipt or raw bytes") + } + } +} + +/// Code hash mappings that we have constructed from parsing the block +/// trace. +/// If there are any txns that create contracts, then they will also +/// get added here as we process the deltas. +struct Hash2Code { + /// Key must always be [`hash`] of value. + inner: HashMap>, +} + +impl Hash2Code { + pub fn new() -> Self { + let mut this = Self { + inner: HashMap::new(), + }; + this.insert(vec![]); + this + } + pub fn get(&mut self, hash: H256) -> anyhow::Result> { + match self.inner.get(&hash) { + Some(code) => Ok(code.clone()), + None => bail!("no code for hash {}", hash), + } + } + pub fn insert(&mut self, code: Vec) { + self.inner.insert(keccak_hash::keccak(&code), code); + } +} + +impl Extend> for Hash2Code { + fn extend>>(&mut self, iter: II) { + for it in iter { + self.insert(it) + } + } +} + +impl FromIterator> for Hash2Code { + fn from_iter>>(iter: II) -> Self { + let mut this = Self::new(); + this.extend(iter); + this + } +} diff --git a/trace_decoder/src/decoding.rs b/trace_decoder/src/decoding.rs deleted file mode 100644 index b0f909374..000000000 --- a/trace_decoder/src/decoding.rs +++ /dev/null @@ -1,710 +0,0 @@ -use std::{cmp::min, collections::HashMap, ops::Range}; - -use anyhow::{anyhow, Context as _}; -use ethereum_types::H160; -use ethereum_types::{Address, BigEndianHash, H256, U256, U512}; -use evm_arithmetization::{ - generation::{ - mpt::{decode_receipt, AccountRlp}, - GenerationInputs, TrieInputs, - }, - proof::{BlockMetadata, ExtraBlockData, TrieRoots}, - testing_utils::{ - BEACON_ROOTS_CONTRACT_ADDRESS, BEACON_ROOTS_CONTRACT_ADDRESS_HASHED, HISTORY_BUFFER_LENGTH, - }, -}; -use mpt_trie::{ - nibbles::Nibbles, - partial_trie::{HashedPartialTrie, PartialTrie as _}, - special_query::path_for_query, - trie_ops::TrieOpError, - utils::{IntoTrieKey as _, TriePath}, -}; - -use crate::{ - hash, - processed_block_trace::{ - NodesUsedByTxnBatch, ProcessedBlockTrace, ProcessedTxnBatchInfo, StateWrite, TxnMetaState, - }, - typed_mpt::{ReceiptTrie, StateTrie, StorageTrie, TransactionTrie, TrieKey}, - OtherBlockData, PartialTriePreImages, TryIntoExt as TryIntoBounds, -}; - -/// The current state of all tries as we process txn deltas. These are mutated -/// after every txn we process in the trace. -#[derive(Clone, Debug, Default)] -struct PartialTrieState { - state: StateTrieT, - storage: HashMap, - txn: TransactionTrie, - receipt: ReceiptTrie, -} - -/// Additional information discovered during delta application. -#[derive(Debug, Default)] -struct TrieDeltaApplicationOutput { - // During delta application, if a delete occurs, we may have to make sure additional nodes - // that are not accessed by the txn remain unhashed. - additional_state_trie_paths_to_not_hash: Vec, - additional_storage_trie_paths_to_not_hash: HashMap>, -} - -pub fn into_txn_proof_gen_ir( - ProcessedBlockTrace { - tries: PartialTriePreImages { state, storage }, - txn_info, - withdrawals, - }: ProcessedBlockTrace, - other_data: OtherBlockData, - use_burn_addr: bool, - batch_size: usize, -) -> anyhow::Result> { - let mut curr_block_tries = PartialTrieState { - state: state.clone(), - storage: storage.iter().map(|(k, v)| (*k, v.clone())).collect(), - ..Default::default() - }; - - let mut extra_data = ExtraBlockData { - checkpoint_state_trie_root: other_data.checkpoint_state_trie_root, - txn_number_before: U256::zero(), - txn_number_after: U256::zero(), - gas_used_before: U256::zero(), - gas_used_after: U256::zero(), - }; - - let num_txs = txn_info - .iter() - .map(|tx_info| tx_info.meta.len()) - .sum::(); - - let mut txn_gen_inputs = txn_info - .into_iter() - .enumerate() - .map(|(txn_idx, txn_info)| { - let txn_range = - min(txn_idx * batch_size, num_txs)..min(txn_idx * batch_size + batch_size, num_txs); - let is_initial_payload = txn_range.start == 0; - - process_txn_info( - txn_range.clone(), - is_initial_payload, - txn_info, - &mut curr_block_tries, - &mut extra_data, - &other_data, - use_burn_addr, - ) - .context(format!( - "at transaction range {}..{}", - txn_range.start, txn_range.end - )) - }) - .collect::>>() - .context(format!( - "at block num {} with chain id {}", - other_data.b_data.b_meta.block_number, other_data.b_data.b_meta.block_chain_id - ))?; - - if !withdrawals.is_empty() { - add_withdrawals_to_txns(&mut txn_gen_inputs, &mut curr_block_tries, withdrawals)?; - } - - Ok(txn_gen_inputs) -} - -/// Cancun HF specific: At the start of a block, prior txn execution, we -/// need to update the storage of the beacon block root contract. -// See . -fn update_beacon_block_root_contract_storage( - trie_state: &mut PartialTrieState, - delta_out: &mut TrieDeltaApplicationOutput, - nodes_used: &mut NodesUsedByTxnBatch, - block_data: &BlockMetadata, -) -> anyhow::Result<()> { - const HISTORY_BUFFER_LENGTH_MOD: U256 = U256([HISTORY_BUFFER_LENGTH.1, 0, 0, 0]); - - let timestamp_idx = block_data.block_timestamp % HISTORY_BUFFER_LENGTH_MOD; - let timestamp = rlp::encode(&block_data.block_timestamp).to_vec(); - - let root_idx = timestamp_idx + HISTORY_BUFFER_LENGTH_MOD; - let calldata = rlp::encode(&U256::from_big_endian( - &block_data.parent_beacon_block_root.0, - )) - .to_vec(); - - let storage_trie = trie_state - .storage - .get_mut(&BEACON_ROOTS_CONTRACT_ADDRESS_HASHED) - .context(format!( - "missing account storage trie for address {:x}", - BEACON_ROOTS_CONTRACT_ADDRESS - ))?; - - let slots_nibbles = nodes_used - .storage_accesses - .entry(BEACON_ROOTS_CONTRACT_ADDRESS_HASHED) - .or_default(); - - for (ix, val) in [(timestamp_idx, timestamp), (root_idx, calldata)] { - // TODO(0xaatif): https://github.com/0xPolygonZero/zk_evm/issues/275 - // document this - let slot = TrieKey::from_nibbles(Nibbles::from_h256_be(hash( - Nibbles::from_h256_be(H256::from_uint(&ix)).bytes_be(), - ))); - - slots_nibbles.push(slot); - - // If we are writing a zero, then we actually need to perform a delete. - match val == ZERO_STORAGE_SLOT_VAL_RLPED { - false => { - storage_trie.insert(slot, val.clone()).context(format!( - "at slot {:?} with value {}", - slot, - U512::from_big_endian(val.as_slice()) - ))?; - - delta_out - .additional_storage_trie_paths_to_not_hash - .entry(BEACON_ROOTS_CONTRACT_ADDRESS_HASHED) - .or_default() - .push(slot); - } - true => { - if let Ok(Some(remaining_slot_key)) = - delete_node_and_report_remaining_key_if_branch_collapsed( - storage_trie.as_mut_hashed_partial_trie_unchecked(), - &slot, - ) - { - delta_out - .additional_storage_trie_paths_to_not_hash - .entry(BEACON_ROOTS_CONTRACT_ADDRESS_HASHED) - .or_default() - .push(remaining_slot_key); - } - } - } - } - - delta_out - .additional_state_trie_paths_to_not_hash - .push(TrieKey::from_hash(BEACON_ROOTS_CONTRACT_ADDRESS_HASHED)); - let mut account = trie_state - .state - .get_by_address(BEACON_ROOTS_CONTRACT_ADDRESS) - .context(format!( - "missing account storage trie for address {:x}", - BEACON_ROOTS_CONTRACT_ADDRESS - ))?; - - account.storage_root = storage_trie.root(); - - trie_state - .state - .insert_by_address(BEACON_ROOTS_CONTRACT_ADDRESS, account) - // TODO(0xaatif): https://github.com/0xPolygonZero/zk_evm/issues/275 - // Add an entry API - .expect("insert must succeed with the same key as a successful `get`"); - - Ok(()) -} - -fn update_txn_and_receipt_tries( - trie_state: &mut PartialTrieState, - meta: &TxnMetaState, - txn_idx: usize, -) -> anyhow::Result<()> { - if let Some(bytes) = &meta.txn_bytes { - trie_state.txn.insert(txn_idx, bytes.clone())?; - trie_state - .receipt - .insert(txn_idx, meta.receipt_node_bytes.clone())?; - } // else it's just a dummy - Ok(()) -} - -/// If the account does not have a storage trie or does but is not -/// accessed by any txns, then we still need to manually create an entry for -/// them. -fn init_any_needed_empty_storage_tries<'a>( - storage_tries: &mut HashMap, - accounts_with_storage: impl Iterator, - accts_with_unaccessed_storage: &HashMap, -) { - for h_addr in accounts_with_storage { - if !storage_tries.contains_key(h_addr) { - let trie = accts_with_unaccessed_storage - .get(h_addr) - .map(|s_root| { - let mut it = StorageTrie::default(); - it.insert_hash(TrieKey::default(), *s_root) - .expect("empty trie insert cannot fail"); - it - }) - .unwrap_or_default(); - - storage_tries.insert(*h_addr, trie); - }; - } -} - -fn create_minimal_partial_tries_needed_by_txn( - curr_block_tries: &PartialTrieState>, - nodes_used_by_txn: &NodesUsedByTxnBatch, - txn_range: Range, - delta_application_out: TrieDeltaApplicationOutput, -) -> anyhow::Result { - let mut state_trie = curr_block_tries.state.clone(); - state_trie.trim_to( - nodes_used_by_txn - .state_accesses - .iter() - .map(|it| TrieKey::from_address(*it)) - .chain(delta_application_out.additional_state_trie_paths_to_not_hash), - )?; - - let txn_keys = txn_range.map(TrieKey::from_txn_ix); - - let transactions_trie = create_trie_subset_wrapped( - curr_block_tries.txn.as_hashed_partial_trie(), - txn_keys.clone(), - TrieType::Txn, - )?; - - let receipts_trie = create_trie_subset_wrapped( - curr_block_tries.receipt.as_hashed_partial_trie(), - txn_keys, - TrieType::Receipt, - )?; - - let storage_tries = create_minimal_storage_partial_tries( - &curr_block_tries.storage, - &nodes_used_by_txn.storage_accesses, - &delta_application_out.additional_storage_trie_paths_to_not_hash, - )?; - - Ok(TrieInputs { - state_trie: state_trie.try_into()?, - transactions_trie, - receipts_trie, - storage_tries, - }) -} - -fn apply_deltas_to_trie_state( - trie_state: &mut PartialTrieState, - deltas: &NodesUsedByTxnBatch, - meta: &[TxnMetaState], -) -> anyhow::Result { - let mut out = TrieDeltaApplicationOutput::default(); - - for (hashed_acc_addr, storage_writes) in deltas.storage_writes.iter() { - let storage_trie = trie_state - .storage - .get_mut(hashed_acc_addr) - .context(format!( - "missing account storage trie {:x}", - hashed_acc_addr - ))?; - - for (key, val) in storage_writes { - let slot = TrieKey::from_hash(hash(key.into_nibbles().bytes_be())); - // If we are writing a zero, then we actually need to perform a delete. - match val == &ZERO_STORAGE_SLOT_VAL_RLPED { - false => { - storage_trie.insert(slot, val.clone()).context(format!( - "at slot {:?} with value {}", - slot, - U512::from_big_endian(val.as_slice()) - ))?; - } - true => { - if let Some(remaining_slot_key) = - delete_node_and_report_remaining_key_if_branch_collapsed( - storage_trie.as_mut_hashed_partial_trie_unchecked(), - &slot, - )? - { - out.additional_storage_trie_paths_to_not_hash - .entry(*hashed_acc_addr) - .or_default() - .push(remaining_slot_key); - } - } - }; - } - } - - for (addr, state_write) in &deltas.state_writes { - // If the account was created, then it will not exist in the trie yet. - let is_created = !trie_state.state.contains_address(*addr); - let mut account = trie_state.state.get_by_address(*addr).unwrap_or_default(); - - state_write.apply_writes_to_state_node(&mut account, &hash(addr), &trie_state.storage)?; - - trie_state.state.insert_by_address(*addr, account)?; - - if is_created { - // If the account did not exist prior this transaction, we - // need to make sure the transaction didn't revert. - - // We will check the status of the last receipt that attempted to create the - // account in this batch. - let last_creation_receipt = &meta - .iter() - .rev() - .find(|tx| tx.created_accounts.contains(addr)) - .expect("We should have found a matching transaction") - .receipt_node_bytes; - - let (_, _, receipt) = decode_receipt(last_creation_receipt) - .map_err(|_| anyhow!("couldn't RLP-decode receipt node bytes"))?; - - if !receipt.status { - // The transaction failed, hence any created account should be removed. - if let Some(remaining_account_key) = trie_state.state.reporting_remove(*addr)? { - out.additional_state_trie_paths_to_not_hash - .push(remaining_account_key); - trie_state.storage.remove(&hash(addr)); - continue; - } - } - } - } - - // Remove any accounts that self-destructed. - for addr in deltas.self_destructed_accounts.iter() { - trie_state.storage.remove(&hash(addr)); - - if let Some(remaining_account_key) = trie_state.state.reporting_remove(*addr)? { - out.additional_state_trie_paths_to_not_hash - .push(remaining_account_key); - } - } - - Ok(out) -} - -fn get_trie_trace(trie: &HashedPartialTrie, k: &Nibbles) -> TriePath { - path_for_query(trie, *k, true).collect() -} - -/// If a branch collapse occurred after a delete, then we must ensure that -/// the other single child that remains also is not hashed when passed into -/// plonky2. Returns the key to the remaining child if a collapse occurred. -pub fn delete_node_and_report_remaining_key_if_branch_collapsed( - trie: &mut HashedPartialTrie, - key: &TrieKey, -) -> Result, TrieOpError> { - let key = key.into_nibbles(); - let old_trace = get_trie_trace(trie, &key); - trie.delete(key)?; - let new_trace = get_trie_trace(trie, &key); - Ok( - node_deletion_resulted_in_a_branch_collapse(&old_trace, &new_trace) - .map(TrieKey::from_nibbles), - ) -} - -/// Comparing the path of the deleted key before and after the deletion, -/// determine if the deletion resulted in a branch collapsing into a leaf or -/// extension node, and return the path to the remaining child if this -/// occurred. -fn node_deletion_resulted_in_a_branch_collapse( - old_path: &TriePath, - new_path: &TriePath, -) -> Option { - // Collapse requires at least 2 nodes. - if old_path.0.len() < 2 { - return None; - } - - // If the node path length decreased after the delete, then a collapse occurred. - // As an aside, note that while it's true that the branch could have collapsed - // into an extension node with multiple nodes below it, the query logic will - // always stop at most one node after the keys diverge, which guarantees that - // the new trie path will always be shorter if a collapse occurred. - let branch_collapse_occurred = old_path.0.len() > new_path.0.len(); - - // Now we need to determine the key of the only remaining node after the - // collapse. - branch_collapse_occurred.then(|| new_path.iter().into_key()) -} - -/// The withdrawals are always in the final ir payload. -fn add_withdrawals_to_txns( - txn_ir: &mut [GenerationInputs], - final_trie_state: &mut PartialTrieState< - impl StateTrie + Clone + TryIntoBounds, - >, - mut withdrawals: Vec<(Address, U256)>, -) -> anyhow::Result<()> { - // Scale withdrawals amounts. - for (_addr, amt) in withdrawals.iter_mut() { - *amt = eth_to_gwei(*amt) - } - - let withdrawals_with_hashed_addrs_iter = || { - withdrawals - .iter() - .map(|(addr, v)| (*addr, hash(addr.as_bytes()), *v)) - }; - - let last_inputs = txn_ir - .last_mut() - .expect("We cannot have an empty list of payloads."); - - if last_inputs.signed_txns.is_empty() { - let mut state_trie = final_trie_state.state.clone(); - state_trie.trim_to( - // This is a dummy payload, hence it does not contain yet - // state accesses to the withdrawal addresses. - withdrawals - .iter() - .map(|(addr, _)| *addr) - .chain(match last_inputs.txn_number_before == 0.into() { - // We need to include the beacon roots contract as this payload is at the - // start of the block execution. - true => Some(BEACON_ROOTS_CONTRACT_ADDRESS), - false => None, - }) - .map(TrieKey::from_address), - )?; - last_inputs.tries.state_trie = state_trie.try_into()?; - } - - update_trie_state_from_withdrawals( - withdrawals_with_hashed_addrs_iter(), - &mut final_trie_state.state, - )?; - - last_inputs.withdrawals = withdrawals; - last_inputs.trie_roots_after.state_root = final_trie_state.state.clone().try_into()?.hash(); - - Ok(()) -} - -/// Withdrawals update balances in the account trie, so we need to update -/// our local trie state. -fn update_trie_state_from_withdrawals<'a>( - withdrawals: impl IntoIterator + 'a, - state: &mut impl StateTrie, -) -> anyhow::Result<()> { - for (addr, h_addr, amt) in withdrawals { - let mut acc_data = state.get_by_address(addr).context(format!( - "No account present at {addr:x} (hashed: {h_addr:x}) to withdraw {amt} Gwei from!" - ))?; - - acc_data.balance += amt; - - state - .insert_by_address(addr, acc_data) - // TODO(0xaatif): https://github.com/0xPolygonZero/zk_evm/issues/275 - // Add an entry API - .expect("insert must succeed with the same key as a successful `get`"); - } - - Ok(()) -} - -/// Processes a single transaction in the trace. -fn process_txn_info( - txn_range: Range, - is_initial_payload: bool, - txn_info: ProcessedTxnBatchInfo, - curr_block_tries: &mut PartialTrieState< - impl StateTrie + Clone + TryIntoBounds, - >, - extra_data: &mut ExtraBlockData, - other_data: &OtherBlockData, - use_burn_target: bool, -) -> anyhow::Result { - log::trace!( - "Generating proof IR for txn {} through {}...", - txn_range.start, - txn_range.end - 1 - ); - - init_any_needed_empty_storage_tries( - &mut curr_block_tries.storage, - txn_info.nodes_used_by_txn.storage_accesses.keys(), - &txn_info.nodes_used_by_txn.accts_with_unaccessed_storage, - ); - - // For each non-dummy txn, we increment `txn_number_after` and - // update `gas_used_after` accordingly. - extra_data.txn_number_after += txn_info.meta.len().into(); - extra_data.gas_used_after += txn_info.meta.iter().map(|i| i.gas_used).sum::().into(); - - // Because we need to run delta application before creating the minimal - // sub-tries (we need to detect if deletes collapsed any branches), we need to - // do this clone every iteration. - let tries_at_start_of_txn = curr_block_tries.clone(); - - for (i, meta) in txn_info.meta.iter().enumerate() { - update_txn_and_receipt_tries( - curr_block_tries, - meta, - extra_data.txn_number_before.as_usize() + i, - )?; - } - - let mut delta_out = apply_deltas_to_trie_state( - curr_block_tries, - &txn_info.nodes_used_by_txn, - &txn_info.meta, - )?; - - let nodes_used_by_txn = if is_initial_payload { - let mut nodes_used = txn_info.nodes_used_by_txn; - update_beacon_block_root_contract_storage( - curr_block_tries, - &mut delta_out, - &mut nodes_used, - &other_data.b_data.b_meta, - )?; - - nodes_used - } else { - txn_info.nodes_used_by_txn - }; - - let tries = create_minimal_partial_tries_needed_by_txn( - &tries_at_start_of_txn, - &nodes_used_by_txn, - txn_range, - delta_out, - )?; - - let burn_addr = match use_burn_target { - // TODO: https://github.com/0xPolygonZero/zk_evm/issues/565 - // Retrieve the actual burn address from `cdk-erigon`. - true => Some(H160::zero()), - false => None, - }; - let gen_inputs = GenerationInputs { - txn_number_before: extra_data.txn_number_before, - burn_addr, - gas_used_before: extra_data.gas_used_before, - gas_used_after: extra_data.gas_used_after, - signed_txns: txn_info - .meta - .iter() - .filter_map(|t| t.txn_bytes.clone()) - .collect::>(), - withdrawals: Vec::default(), /* Only ever set in a dummy txn at the end of - * the block (see `[add_withdrawals_to_txns]` - * for more info). */ - tries, - trie_roots_after: TrieRoots { - state_root: curr_block_tries.state.clone().try_into()?.hash(), - transactions_root: curr_block_tries.txn.root(), - receipts_root: curr_block_tries.receipt.root(), - }, - checkpoint_state_trie_root: extra_data.checkpoint_state_trie_root, - contract_code: txn_info - .contract_code_accessed - .into_iter() - .map(|code| (hash(&code), code)) - .collect(), - block_metadata: other_data.b_data.b_meta.clone(), - block_hashes: other_data.b_data.b_hashes.clone(), - ger_data: None, - }; - - // After processing a transaction, we update the remaining accumulators - // for the next transaction. - extra_data.txn_number_before = extra_data.txn_number_after; - extra_data.gas_used_before = extra_data.gas_used_after; - - Ok(gen_inputs) -} - -impl StateWrite { - fn apply_writes_to_state_node( - &self, - state_node: &mut AccountRlp, - h_addr: &H256, - acc_storage_tries: &HashMap, - ) -> anyhow::Result<()> { - let storage_root_hash_change = match self.storage_trie_change { - false => None, - true => { - let storage_trie = acc_storage_tries - .get(h_addr) - .context(format!("missing account storage trie {:x}", h_addr))?; - - Some(storage_trie.root()) - } - }; - - state_node.balance = self.balance.unwrap_or(state_node.balance); - state_node.nonce = self.nonce.unwrap_or(state_node.nonce); - state_node.storage_root = storage_root_hash_change.unwrap_or(state_node.storage_root); - state_node.code_hash = self.code_hash.unwrap_or(state_node.code_hash); - - Ok(()) - } -} - -// TODO!!!: We really need to be appending the empty storage tries to the base -// trie somewhere else! This is a big hack! -fn create_minimal_storage_partial_tries<'a>( - storage_tries: &HashMap, - accesses_per_account: impl IntoIterator)>, - additional_storage_trie_paths_to_not_hash: &HashMap>, -) -> anyhow::Result> { - accesses_per_account - .into_iter() - .map(|(h_addr, mem_accesses)| { - // Guaranteed to exist due to calling `init_any_needed_empty_storage_tries` - // earlier on. - let base_storage_trie = &storage_tries[h_addr]; - - let storage_slots_to_not_hash = mem_accesses.iter().cloned().chain( - additional_storage_trie_paths_to_not_hash - .get(h_addr) - .into_iter() - .flat_map(|slots| slots.iter().cloned()), - ); - - let partial_storage_trie = create_trie_subset_wrapped( - base_storage_trie.as_hashed_partial_trie(), - storage_slots_to_not_hash, - TrieType::Storage, - )?; - - Ok((*h_addr, partial_storage_trie)) - }) - .collect() -} - -fn create_trie_subset_wrapped( - trie: &HashedPartialTrie, - accesses: impl IntoIterator, - trie_type: TrieType, -) -> anyhow::Result { - mpt_trie::trie_subsets::create_trie_subset( - trie, - accesses.into_iter().map(TrieKey::into_nibbles), - ) - .context(format!("missing keys when creating {}", trie_type)) -} - -fn eth_to_gwei(eth: U256) -> U256 { - // 1 ether = 10^9 gwei. - eth * U256::from(10).pow(9.into()) -} - -// This is just `rlp(0)`. -const ZERO_STORAGE_SLOT_VAL_RLPED: [u8; 1] = [128]; - -/// Aid for error context. -#[derive(Debug, strum::Display)] -#[allow(missing_docs)] -enum TrieType { - Storage, - Receipt, - Txn, -} diff --git a/trace_decoder/src/interface.rs b/trace_decoder/src/interface.rs new file mode 100644 index 000000000..5f406580a --- /dev/null +++ b/trace_decoder/src/interface.rs @@ -0,0 +1,185 @@ +//! Public types for this crate. +//! +//! These are all in one place because they're about to be heavily refactored in [#401](https://github.com/0xPolygonZero/zk_evm/issues/401). + +use std::collections::{BTreeMap, BTreeSet, HashMap}; + +use ethereum_types::{Address, U256}; +use evm_arithmetization::proof::{BlockHashes, BlockMetadata}; +use keccak_hash::H256; +use mpt_trie::partial_trie::HashedPartialTrie; +use serde::{Deserialize, Serialize}; + +/// Core payload needed to generate proof for a block. +/// Additional data retrievable from the blockchain node (using standard ETH RPC +/// API) may be needed for proof generation. +/// +/// The trie preimages are the hashed partial tries at the +/// start of the block. A [TxnInfo] contains all the transaction data +/// necessary to generate an IR. +#[derive(Clone, Debug, Deserialize, Serialize)] +pub struct BlockTrace { + /// The state and storage trie pre-images (i.e. the tries before + /// the execution of the current block) in multiple possible formats. + pub trie_pre_images: BlockTraceTriePreImages, + + /// A collection of contract code. + /// This will be accessed by its hash internally. + #[serde(default)] + pub code_db: BTreeSet>, + + /// Traces and other info per transaction. The index of the transaction + /// within the block corresponds to the slot in this vec. + pub txn_info: Vec, +} + +/// Minimal hashed out tries needed by all txns in the block. +#[derive(Clone, Debug, Deserialize, Serialize)] +#[serde(rename_all = "snake_case")] +pub enum BlockTraceTriePreImages { + /// The trie pre-image with separate state/storage tries. + Separate(SeparateTriePreImages), + /// The trie pre-image with combined state/storage tries. + Combined(CombinedPreImages), +} + +/// State/Storage trie pre-images that are separate. +#[derive(Clone, Debug, Deserialize, Serialize)] +pub struct SeparateTriePreImages { + /// State trie. + pub state: SeparateTriePreImage, + /// Storage trie. + pub storage: SeparateStorageTriesPreImage, +} + +/// A trie pre-image where state & storage are separate. +#[derive(Clone, Debug, Deserialize, Serialize)] +#[serde(rename_all = "snake_case")] +pub enum SeparateTriePreImage { + /// Storage or state trie format that can be processed as is, as it + /// corresponds to the internal format. + Direct(HashedPartialTrie), +} + +/// A trie pre-image where both state & storage are combined into one payload. +#[derive(Clone, Debug, Deserialize, Serialize)] +#[serde(rename_all = "snake_case")] +pub struct CombinedPreImages { + /// Compact combined state and storage tries. + #[serde(with = "crate::hex")] + pub compact: Vec, +} + +/// A trie pre-image where state and storage are separate. +#[derive(Clone, Debug, Deserialize, Serialize)] +#[serde(rename_all = "snake_case")] +pub enum SeparateStorageTriesPreImage { + /// Each storage trie is sent over in a hashmap with the hashed account + /// address as a key. + MultipleTries(HashMap), +} + +/// Info specific to txns in the block. +#[derive(Clone, Debug, Deserialize, Serialize, Default)] +pub struct TxnInfo { + /// Trace data for the txn. This is used by the protocol to: + /// - Mutate it's own trie state between txns to arrive at the correct trie + /// state for the start of each txn. + /// - Create minimal partial tries needed for proof gen based on what state + /// the txn accesses. (eg. What trie nodes are accessed). + pub traces: BTreeMap, + + /// Data that is specific to the txn as a whole. + pub meta: TxnMeta, +} + +/// Structure holding metadata for one transaction. +#[derive(Clone, Debug, Deserialize, Serialize, Default)] +pub struct TxnMeta { + /// Txn byte code. This is also the raw RLP bytestring inserted into the txn + /// trie by this txn. Note that the key is not included and this is only + /// the rlped value of the node! + #[serde(with = "crate::hex")] + pub byte_code: Vec, + + /// Rlped bytes of the new receipt value inserted into the receipt trie by + /// this txn. Note that the key is not included and this is only the rlped + /// value of the node! + #[serde(with = "crate::hex")] + pub new_receipt_trie_node_byte: Vec, + + /// Gas used by this txn (Note: not cumulative gas used). + pub gas_used: u64, +} + +/// A "trace" specific to an account for a txn. +/// +/// Specifically, since we can not execute the txn before proof generation, we +/// rely on a separate EVM to run the txn and supply this data for us. +#[derive(Clone, Debug, Deserialize, Serialize, Default, PartialEq)] +pub struct TxnTrace { + /// If the balance changed, then the new balance will appear here. Will be + /// `None` if no change. + #[serde(skip_serializing_if = "Option::is_none")] + pub balance: Option, + + /// If the nonce changed, then the new nonce will appear here. Will be + /// `None` if no change. + #[serde(skip_serializing_if = "Option::is_none")] + pub nonce: Option, + + /// [hash](keccak_hash)([Address]) of storages read by the + /// transaction. + #[serde(default, skip_serializing_if = "BTreeSet::is_empty")] + pub storage_read: BTreeSet, + + /// [hash](keccak_hash)([Address]) of storages written by the + /// transaction, with their new value. + #[serde(default, skip_serializing_if = "BTreeMap::is_empty")] + pub storage_written: BTreeMap, + + /// Contract code that this account has accessed or created + #[serde(skip_serializing_if = "Option::is_none")] + pub code_usage: Option, + + /// True if the account got self-destructed at the end of this txn. + #[serde(default, skip_serializing_if = "is_false")] + pub self_destructed: bool, +} + +fn is_false(b: &bool) -> bool { + !b +} + +/// Contract code access type. Used by txn traces. +#[derive(Clone, Debug, Deserialize, Serialize, PartialEq)] +#[serde(rename_all = "snake_case")] +pub enum ContractCodeUsage { + /// Contract was read. + Read(H256), + + /// Contract was created (and these are the bytes). Note that this new + /// contract code will not appear in the [`BlockTrace`] map. + Write(#[serde(with = "crate::hex")] Vec), +} + +/// Other data that is needed for proof gen. +#[derive(Clone, Debug, Deserialize, Serialize)] +pub struct OtherBlockData { + /// Data that is specific to the block. + pub b_data: BlockLevelData, + /// State trie root hash at the checkpoint. + pub checkpoint_state_trie_root: H256, +} + +/// Data that is specific to a block and is constant for all txns in a given +/// block. +#[derive(Clone, Debug, Deserialize, Serialize)] +pub struct BlockLevelData { + /// All block data excluding block hashes and withdrawals. + pub b_meta: BlockMetadata, + /// Block hashes: the previous 256 block hashes and the current block hash. + pub b_hashes: BlockHashes, + /// Block withdrawal addresses and values. + pub withdrawals: Vec<(Address, U256)>, +} diff --git a/trace_decoder/src/lib.rs b/trace_decoder/src/lib.rs index 2202d58c5..4a0bdf7b4 100644 --- a/trace_decoder/src/lib.rs +++ b/trace_decoder/src/lib.rs @@ -1,92 +1,65 @@ -//!
-//! This library is undergoing major refactoring as part of (#275)(https://github.com/0xPolygonZero/zk_evm/issues/275). -//! Consider all TODOs to be tracked under that issue. -//!
+//! An _Ethereum Node_[^1] executes _transactions_ in _blocks_. //! -//! Your neighborhood zk-ready [ethereum](https://github.com/0xPolygonZero/erigon) -//! [node](https://github.com/0xPolygonHermez/cdk-erigon/) emits binary "witnesses"[^1]. +//! Execution mutates two key data structures: +//! - [The state trie](https://ethereum.org/en/developers/docs/data-structures-and-encoding/patricia-merkle-trie/#state-trie). +//! - [The storage tries](https://ethereum.org/en/developers/docs/data-structures-and-encoding/patricia-merkle-trie/#storage-trie). //! -//! But [`plonky2`], your prover, wants [`GenerationInputs`]. +//! Ethereum nodes expose information about the transactions over RPC, e.g: +//! - [The specific changes to the storage tries](TxnTrace::storage_written). +//! - [Changes to account balance in the state trie](TxnTrace::balance). //! -//! This library helps you get there. +//! The state execution correctness is then asserted by the zkEVM prover in +//! [`evm_arithmetization`], relying on `starky` and [`plonky2`]. //! -//! [^1]: A witness is an attestation of the state of the world, which can be -//! proven by a prover. +//! **Prover perfomance is a high priority.** //! -//! # Non-Goals -//! - Performance - this won't be the bottleneck in any proving system. -//! - Robustness - malicious or malformed input may crash this library. +//! The aformentioned trie structures may have subtries _indirected_. +//! That is, any node (and its children!) may be replaced by its hash, +//! while maintaining provability of its contents: +//! ```text +//! A A +//! / \ / \ +//! B C -> H C +//! / \ \ \ +//! D E F F +//! ``` +//! (where `H` is the hash of the `D/B\E` subtrie). //! -//! TODO(0xaatif): https://github.com/0xPolygonZero/zk_evm/issues/275 -//! refactor all the docs below +//! The principle concern of this library is to step through the transactions, +//! and reproduce the _intermediate tries_, +//! while indirecting all possible subtries to minimise prover load +//! (since prover performance is sensitive to the size of the trie). +//! The prover can therefore prove each batch of transactions independently. //! -//! It might not be obvious why we need traces for each txn in order to generate -//! proofs. While it's true that we could just run all the txns of a block in an -//! EVM to generate the traces ourselves, there are a few major downsides: -//! - The client is likely a full node and already has to run the txns in an EVM -//! anyways. -//! - We want this protocol to be as agnostic as possible to the underlying -//! chain that we're generating proofs for, and running our own EVM would -//! likely cause us to loose this genericness. +//! [^1]: In our stack, this is [a fork of erigon](https://github.com/0xPolygonZero/erigon), +//! which exposes more information over RPC. //! -//! While it's also true that we run our own zk-EVM (plonky2) to generate -//! proofs, it's critical that we are able to generate txn proofs in parallel. -//! Since generating proofs with plonky2 is very slow, this would force us to -//! sequentialize the entire proof generation process. So in the end, it's ideal -//! if we can get this information sent to us instead. -//! -//! This library generates an Intermediary Representation (IR) of -//! a block's transactions, given a [BlockTrace] and some additional -//! data represented by [OtherBlockData]. -//! -//! It first preprocesses the [BlockTrace] to provide transaction, -//! withdrawals and tries data that can be directly used to generate an IR. -//! For each transaction, this library extracts the -//! necessary data from the processed transaction information to -//! return the IR. -//! -//! The IR is used to generate root proofs, then aggregation proofs and finally -//! block proofs. Because aggregation proofs require at least two entries, we -//! pad the vector of IRs thanks to additional dummy payload intermediary -//! representations whenever necessary. -//! -//! ### [Withdrawals](https://ethereum.org/staking/withdrawals) and Padding -//! -//! Withdrawals are all proven together in a dummy payload. A dummy payload -//! corresponds to the IR of a proof with no transaction. They must, however, be -//! proven last. The padding is therefore carried out as follows: If there are -//! no transactions in the block, we add two dummy transactions. The withdrawals -//! -- if any -- are added to the second dummy transaction. If there is only one -//! transaction in the block, we add one dummy transaction. If -//! there are withdrawals, the dummy transaction is at the end. Otherwise, it is -//! added at the start. If there are two or more transactions: -//! - if there are no withdrawals, no dummy transactions are added -//! - if there are withdrawals, one dummy transaction is added at the end, with -//! all the withdrawals in it. +//! # Non-goals +//! - Performance - this will never be the bottleneck in any proving stack. +//! - Robustness - this library depends on other libraries that are not robust, +//! so may panic at any time. #![deny(rustdoc::broken_intra_doc_links)] #![warn(missing_debug_implementations)] #![warn(missing_docs)] -/// The broad overview is as follows: +/// Over RPC, ethereum nodes expose their tries as a series of binary +/// [`wire::Instruction`]s in a node-dependant format. /// -/// 1. Ethereum nodes emit a bunch of binary [`wire::Instruction`]s, which are -/// parsed in [`wire`]. -/// 2. They are passed to one of two "frontends", depending on the node +/// These are parsed into the relevant trie depending on the node: /// - [`type2`], which contains an [`smt_trie`]. /// - [`type1`], which contains an [`mpt_trie`]. -/// 3. The frontend ([`type1::Frontend`] or [`type2::Frontend`]) is passed to -/// the "backend", which lowers to [`evm_arithmetization::GenerationInputs`]. /// -/// Deviations from the specification are signalled with `BUG(spec)` in the -/// code. +/// After getting the tries, +/// we can continue to do the main work of "executing" the transactions. const _DEVELOPER_DOCS: () = (); -/// Defines the main functions used to generate the IR. -mod decoding; -/// Defines functions that processes a [BlockTrace] so that it is easier to turn -/// the block transactions into IRs. -mod processed_block_trace; +mod interface; + +pub use interface::*; +pub use type1::frontend; +pub use wire::parse; + mod type1; // TODO(0xaatif): https://github.com/0xPolygonZero/zk_evm/issues/275 // add backend/prod support for type 2 @@ -96,364 +69,9 @@ mod type2; mod typed_mpt; mod wire; -use std::collections::{BTreeMap, BTreeSet, HashMap}; - -use ethereum_types::{Address, U256}; -use evm_arithmetization::proof::{BlockHashes, BlockMetadata}; -use evm_arithmetization::GenerationInputs; -use keccak_hash::keccak as hash; -use keccak_hash::H256; -use mpt_trie::partial_trie::{HashedPartialTrie, OnOrphanedHashNode}; -use processed_block_trace::ProcessedTxnBatchInfo; -use serde::{Deserialize, Serialize}; -use typed_mpt::{StateMpt, StateTrie as _, StorageTrie, TrieKey}; - -/// Core payload needed to generate proof for a block. -/// Additional data retrievable from the blockchain node (using standard ETH RPC -/// API) may be needed for proof generation. -/// -/// The trie preimages are the hashed partial tries at the -/// start of the block. A [TxnInfo] contains all the transaction data -/// necessary to generate an IR. -#[derive(Clone, Debug, Deserialize, Serialize)] -pub struct BlockTrace { - /// The state and storage trie pre-images (i.e. the tries before - /// the execution of the current block) in multiple possible formats. - pub trie_pre_images: BlockTraceTriePreImages, - - /// A collection of contract code. - /// This will be accessed by its hash internally. - #[serde(default)] - pub code_db: BTreeSet>, - - /// Traces and other info per transaction. The index of the transaction - /// within the block corresponds to the slot in this vec. - pub txn_info: Vec, -} - -/// Minimal hashed out tries needed by all txns in the block. -#[derive(Clone, Debug, Deserialize, Serialize)] -#[serde(rename_all = "snake_case")] -pub enum BlockTraceTriePreImages { - /// The trie pre-image with separate state/storage tries. - Separate(SeparateTriePreImages), - /// The trie pre-image with combined state/storage tries. - Combined(CombinedPreImages), -} - -/// State/Storage trie pre-images that are separate. -#[derive(Clone, Debug, Deserialize, Serialize)] -pub struct SeparateTriePreImages { - /// State trie. - pub state: SeparateTriePreImage, - /// Storage trie. - pub storage: SeparateStorageTriesPreImage, -} - -/// A trie pre-image where state & storage are separate. -#[derive(Clone, Debug, Deserialize, Serialize)] -#[serde(rename_all = "snake_case")] -pub enum SeparateTriePreImage { - /// Storage or state trie format that can be processed as is, as it - /// corresponds to the internal format. - Direct(HashedPartialTrie), -} - -/// A trie pre-image where both state & storage are combined into one payload. -#[derive(Clone, Debug, Deserialize, Serialize)] -#[serde(rename_all = "snake_case")] -pub struct CombinedPreImages { - /// Compact combined state and storage tries. - #[serde(with = "crate::hex")] - pub compact: Vec, -} - -/// A trie pre-image where state and storage are separate. -#[derive(Clone, Debug, Deserialize, Serialize)] -#[serde(rename_all = "snake_case")] -pub enum SeparateStorageTriesPreImage { - /// Each storage trie is sent over in a hashmap with the hashed account - /// address as a key. - MultipleTries(HashMap), -} - -/// Info specific to txns in the block. -#[derive(Clone, Debug, Deserialize, Serialize)] -pub struct TxnInfo { - /// Trace data for the txn. This is used by the protocol to: - /// - Mutate it's own trie state between txns to arrive at the correct trie - /// state for the start of each txn. - /// - Create minimal partial tries needed for proof gen based on what state - /// the txn accesses. (eg. What trie nodes are accessed). - pub traces: BTreeMap, - - /// Data that is specific to the txn as a whole. - pub meta: TxnMeta, -} - -/// Structure holding metadata for one transaction. -#[derive(Clone, Debug, Deserialize, Serialize)] -pub struct TxnMeta { - /// Txn byte code. This is also the raw RLP bytestring inserted into the txn - /// trie by this txn. Note that the key is not included and this is only - /// the rlped value of the node! - #[serde(with = "crate::hex")] - pub byte_code: Vec, - - /// Rlped bytes of the new receipt value inserted into the receipt trie by - /// this txn. Note that the key is not included and this is only the rlped - /// value of the node! - #[serde(with = "crate::hex")] - pub new_receipt_trie_node_byte: Vec, - - /// Gas used by this txn (Note: not cumulative gas used). - pub gas_used: u64, -} - -/// A "trace" specific to an account for a txn. -/// -/// Specifically, since we can not execute the txn before proof generation, we -/// rely on a separate EVM to run the txn and supply this data for us. -#[derive(Clone, Debug, Deserialize, Serialize, Default)] -pub struct TxnTrace { - /// If the balance changed, then the new balance will appear here. Will be - /// `None` if no change. - #[serde(skip_serializing_if = "Option::is_none")] - pub balance: Option, - - /// If the nonce changed, then the new nonce will appear here. Will be - /// `None` if no change. - #[serde(skip_serializing_if = "Option::is_none")] - pub nonce: Option, - - /// [hash](hash)([Address]) of storages read by the - /// transaction. - #[serde(default, skip_serializing_if = "BTreeSet::is_empty")] - pub storage_read: BTreeSet, - - /// [hash](hash)([Address]) of storages written by the - /// transaction, with their new value. - #[serde(default, skip_serializing_if = "BTreeMap::is_empty")] - pub storage_written: BTreeMap, +pub use core::entrypoint; - /// Contract code that this account has accessed or created - #[serde(skip_serializing_if = "Option::is_none")] - pub code_usage: Option, - - /// True if the account got self-destructed at the end of this txn. - #[serde(default, skip_serializing_if = "is_false")] - pub self_destructed: bool, -} - -fn is_false(b: &bool) -> bool { - !b -} - -/// Contract code access type. Used by txn traces. -#[derive(Clone, Debug, Deserialize, Serialize)] -#[serde(rename_all = "snake_case")] -pub enum ContractCodeUsage { - /// Contract was read. - Read(H256), - - /// Contract was created (and these are the bytes). Note that this new - /// contract code will not appear in the [`BlockTrace`] map. - Write(#[serde(with = "crate::hex")] Vec), -} - -/// Other data that is needed for proof gen. -#[derive(Clone, Debug, Deserialize, Serialize)] -pub struct OtherBlockData { - /// Data that is specific to the block. - pub b_data: BlockLevelData, - /// State trie root hash at the checkpoint. - pub checkpoint_state_trie_root: H256, -} - -/// Data that is specific to a block and is constant for all txns in a given -/// block. -#[derive(Clone, Debug, Deserialize, Serialize)] -pub struct BlockLevelData { - /// All block data excluding block hashes and withdrawals. - pub b_meta: BlockMetadata, - /// Block hashes: the previous 256 block hashes and the current block hash. - pub b_hashes: BlockHashes, - /// Block withdrawal addresses and values. - pub withdrawals: Vec<(Address, U256)>, -} - -/// TODO(0xaatif): -/// document this once we have the API finalized -pub fn entrypoint( - trace: BlockTrace, - other: OtherBlockData, - mut batch_size: usize, - use_burn_addr: bool, -) -> anyhow::Result> { - use anyhow::Context as _; - use mpt_trie::partial_trie::PartialTrie as _; - - use crate::processed_block_trace::{ - Hash2Code, ProcessedBlockTrace, ProcessedBlockTracePreImages, - }; - use crate::PartialTriePreImages; - use crate::{ - BlockTraceTriePreImages, CombinedPreImages, SeparateStorageTriesPreImage, - SeparateTriePreImage, SeparateTriePreImages, - }; - - let BlockTrace { - trie_pre_images, - code_db, - txn_info, - } = trace; - - let pre_images = match trie_pre_images { - BlockTraceTriePreImages::Separate(SeparateTriePreImages { - state: SeparateTriePreImage::Direct(state), - storage: SeparateStorageTriesPreImage::MultipleTries(storage), - }) => ProcessedBlockTracePreImages { - tries: PartialTriePreImages { - state: state.items().try_fold( - StateMpt::new(OnOrphanedHashNode::Reject), - |mut acc, (nibbles, hash_or_val)| { - let path = TrieKey::from_nibbles(nibbles); - match hash_or_val { - mpt_trie::trie_ops::ValOrHash::Val(bytes) => { - #[expect(deprecated)] // this is MPT specific - acc.insert_by_hashed_address( - path.into_hash() - .context("invalid path length in direct state trie")?, - rlp::decode(&bytes) - .context("invalid AccountRlp in direct state trie")?, - )?; - } - mpt_trie::trie_ops::ValOrHash::Hash(h) => { - acc.insert_hash_by_key(path, h)?; - } - }; - anyhow::Ok(acc) - }, - )?, - storage: storage - .into_iter() - .map(|(k, SeparateTriePreImage::Direct(v))| { - v.items() - .try_fold( - StorageTrie::new(OnOrphanedHashNode::Reject), - |mut acc, (nibbles, hash_or_val)| { - let path = TrieKey::from_nibbles(nibbles); - match hash_or_val { - mpt_trie::trie_ops::ValOrHash::Val(value) => { - acc.insert(path, value)?; - } - mpt_trie::trie_ops::ValOrHash::Hash(h) => { - acc.insert_hash(path, h)?; - } - }; - anyhow::Ok(acc) - }, - ) - .map(|v| (k, v)) - }) - .collect::>()?, - }, - extra_code_hash_mappings: None, - }, - BlockTraceTriePreImages::Combined(CombinedPreImages { compact }) => { - let instructions = - wire::parse(&compact).context("couldn't parse instructions from binary format")?; - let type1::Frontend { - state, - code, - storage, - } = type1::frontend(instructions)?; - ProcessedBlockTracePreImages { - tries: PartialTriePreImages { - state, - storage: storage.into_iter().collect(), - }, - extra_code_hash_mappings: match code.is_empty() { - true => None, - false => Some( - code.into_iter() - .map(|it| (crate::hash(&it), it.into_vec())) - .collect(), - ), - }, - } - } - }; - - let all_accounts_in_pre_images = pre_images.tries.state.iter().collect::>(); - - // Note we discard any user-provided hashes. - let mut hash2code = code_db - .into_iter() - .chain( - pre_images - .extra_code_hash_mappings - .unwrap_or_default() - .into_values(), - ) - .collect::(); - - // Make sure the batch size is smaller than the total number of transactions, - // or we would need to generate dummy proofs for the aggregation layers. - if batch_size > txn_info.len() { - batch_size = txn_info.len() / 2 + 1; - } - - let last_tx_idx = txn_info.len().saturating_sub(1) / batch_size; - - let mut txn_info = txn_info - .chunks(batch_size) - .enumerate() - .map(|(i, t)| { - let extra_state_accesses = if last_tx_idx == i { - // If this is the last transaction, we mark the withdrawal addresses - // as accessed in the state trie. - other - .b_data - .withdrawals - .iter() - .map(|(addr, _)| *addr) - .collect::>() - } else { - Vec::new() - }; - - TxnInfo::into_processed_txn_info( - t, - &pre_images.tries, - &all_accounts_in_pre_images, - &extra_state_accesses, - &mut hash2code, - ) - }) - .collect::, _>>()?; - - while txn_info.len() < 2 { - txn_info.push(ProcessedTxnBatchInfo::default()); - } - - decoding::into_txn_proof_gen_ir( - ProcessedBlockTrace { - tries: pre_images.tries, - txn_info, - withdrawals: other.b_data.withdrawals.clone(), - }, - other, - use_burn_addr, - batch_size, - ) -} - -#[derive(Debug, Default)] -struct PartialTriePreImages { - pub state: StateMpt, - pub storage: HashMap, -} +mod core; /// Like `#[serde(with = "hex")`, but tolerates and emits leading `0x` prefixes mod hex { @@ -481,23 +99,6 @@ mod hex { } } -trait TryIntoExt { - type Error: std::error::Error + Send + Sync + 'static; - fn try_into(self) -> Result; -} - -impl TryIntoExt for ThisT -where - ThisT: TryInto, - E: std::error::Error + Send + Sync + 'static, -{ - type Error = ThisT::Error; - - fn try_into(self) -> Result { - TryInto::try_into(self) - } -} - #[cfg(test)] #[derive(serde::Deserialize)] struct Case { diff --git a/trace_decoder/src/processed_block_trace.rs b/trace_decoder/src/processed_block_trace.rs deleted file mode 100644 index f2e4fcb5e..000000000 --- a/trace_decoder/src/processed_block_trace.rs +++ /dev/null @@ -1,298 +0,0 @@ -use std::collections::{BTreeSet, HashMap, HashSet}; - -use anyhow::{bail, Context as _}; -use ethereum_types::{Address, H256, U256}; -use evm_arithmetization::generation::mpt::{AccountRlp, LegacyReceiptRlp}; -use itertools::Itertools; -use zk_evm_common::EMPTY_TRIE_HASH; - -use crate::typed_mpt::{StateTrie as _, TrieKey}; -use crate::PartialTriePreImages; -use crate::{hash, TxnTrace}; -use crate::{ContractCodeUsage, TxnInfo}; - -const FIRST_PRECOMPILE_ADDRESS: U256 = U256([1, 0, 0, 0]); -const LAST_PRECOMPILE_ADDRESS: U256 = U256([10, 0, 0, 0]); - -/// A processed block trace, ready to be used to generate prover input payloads. -#[derive(Debug)] -pub(crate) struct ProcessedBlockTrace { - pub tries: PartialTriePreImages, - pub txn_info: Vec, - pub withdrawals: Vec<(Address, U256)>, -} - -#[derive(Debug)] -pub(crate) struct ProcessedBlockTracePreImages { - pub tries: PartialTriePreImages, - pub extra_code_hash_mappings: Option>>, -} - -/// A processed transaction batch, containing all information necessary to -/// reproduce the state transition incurred by its set of transactions. -#[derive(Debug, Default)] -pub(crate) struct ProcessedTxnBatchInfo { - pub nodes_used_by_txn: NodesUsedByTxnBatch, - pub contract_code_accessed: HashSet>, - pub meta: Vec, -} - -/// Code hash mappings that we have constructed from parsing the block -/// trace. -/// If there are any txns that create contracts, then they will also -/// get added here as we process the deltas. -pub(crate) struct Hash2Code { - /// Key must always be [`hash`] of value. - inner: HashMap>, -} - -impl Hash2Code { - pub fn new() -> Self { - Self { - inner: HashMap::new(), - } - } - fn get(&mut self, hash: H256) -> anyhow::Result> { - match self.inner.get(&hash) { - Some(code) => Ok(code.clone()), - None => bail!("no code for hash {}", hash), - } - } - fn insert(&mut self, code: Vec) { - self.inner.insert(hash(&code), code); - } -} - -impl FromIterator> for Hash2Code { - fn from_iter>>(iter: II) -> Self { - let mut this = Self::new(); - for code in iter { - this.insert(code) - } - this - } -} - -impl TxnInfo { - pub(crate) fn into_processed_txn_info( - tx_infos: &[Self], - tries: &PartialTriePreImages, - all_accounts_in_pre_image: &[(H256, AccountRlp)], - extra_state_accesses: &[Address], - hash2code: &mut Hash2Code, - ) -> anyhow::Result { - let mut nodes_used_by_txn = NodesUsedByTxnBatch::default(); - let mut contract_code_accessed = HashSet::from([vec![]]); // we always "access" empty code - let mut meta = Vec::with_capacity(tx_infos.len()); - - let all_accounts: BTreeSet = - all_accounts_in_pre_image.iter().map(|(h, _)| *h).collect(); - - for txn in tx_infos { - let mut created_accounts = BTreeSet::new(); - - for ( - addr, - TxnTrace { - balance, - nonce, - storage_read, - storage_written, - code_usage, - self_destructed, - }, - ) in &txn.traces - { - // record storage changes - let storage_written = storage_written.clone(); - - let storage_read_keys = storage_read.clone().into_iter(); - - let storage_written_keys = storage_written.keys(); - let storage_access_keys = storage_read_keys.chain(storage_written_keys.copied()); - - if let Some(storage) = nodes_used_by_txn.storage_accesses.get_mut(&hash(addr)) { - storage.extend( - storage_access_keys - .map(|H256(bytes)| TrieKey::from_hash(hash(bytes))) - .collect_vec(), - ) - } else { - nodes_used_by_txn.storage_accesses.insert( - hash(addr), - storage_access_keys - .map(|H256(bytes)| TrieKey::from_hash(hash(bytes))) - .collect(), - ); - }; - - // record state changes - let state_write = StateWrite { - balance: *balance, - nonce: *nonce, - storage_trie_change: !storage_written.is_empty(), - code_hash: code_usage.as_ref().map(|it| match it { - ContractCodeUsage::Read(hash) => *hash, - ContractCodeUsage::Write(bytes) => hash(bytes), - }), - }; - - if state_write != StateWrite::default() { - // a write occurred - - // Account creations are flagged to handle reverts. - if !all_accounts.contains(&hash(addr)) { - created_accounts.insert(*addr); - } - - // Some edge case may see a contract creation followed by a `SELFDESTRUCT`, with - // then a follow-up transaction within the same batch updating the state of the - // account. If that happens, we should not delete the account after processing - // this batch. - nodes_used_by_txn.self_destructed_accounts.remove(addr); - - if let Some(existing_state_write) = nodes_used_by_txn.state_writes.get_mut(addr) - { - // The entry already exists, so we update only the relevant fields. - if state_write.balance.is_some() { - existing_state_write.balance = state_write.balance; - } - if state_write.nonce.is_some() { - existing_state_write.nonce = state_write.nonce; - } - if state_write.storage_trie_change { - existing_state_write.storage_trie_change = - state_write.storage_trie_change; - } - if state_write.code_hash.is_some() { - existing_state_write.code_hash = state_write.code_hash; - } - } else { - nodes_used_by_txn.state_writes.insert(*addr, state_write); - } - } - - for (k, v) in storage_written.into_iter() { - if let Some(storage) = nodes_used_by_txn.storage_writes.get_mut(&hash(addr)) { - storage.insert(TrieKey::from_hash(k), rlp::encode(&v).to_vec()); - } else { - nodes_used_by_txn.storage_writes.insert( - hash(addr), - HashMap::from_iter([(TrieKey::from_hash(k), rlp::encode(&v).to_vec())]), - ); - } - } - - let is_precompile = (FIRST_PRECOMPILE_ADDRESS..LAST_PRECOMPILE_ADDRESS) - .contains(&U256::from_big_endian(&addr.0)); - - // Trie witnesses will only include accessed precompile accounts as hash - // nodes if the transaction calling them reverted. If this is the case, we - // shouldn't include them in this transaction's `state_accesses` to allow the - // decoder to build a minimal state trie without hitting any hash node. - if !is_precompile || tries.state.get_by_address(*addr).is_some() { - nodes_used_by_txn.state_accesses.insert(*addr); - } - - match code_usage { - Some(ContractCodeUsage::Read(hash)) => { - contract_code_accessed.insert(hash2code.get(*hash)?); - } - Some(ContractCodeUsage::Write(code)) => { - contract_code_accessed.insert(code.clone()); - hash2code.insert(code.to_vec()); - } - None => {} - } - - if *self_destructed { - nodes_used_by_txn.self_destructed_accounts.insert(*addr); - } - } - - for &addr in extra_state_accesses { - nodes_used_by_txn.state_accesses.insert(addr); - } - - let accounts_with_storage_accesses = nodes_used_by_txn - .storage_accesses - .iter() - .filter(|(_, slots)| !slots.is_empty()) - .map(|(addr, _)| *addr) - .collect::>(); - - let all_accounts_with_non_empty_storage = all_accounts_in_pre_image - .iter() - .filter(|(_, data)| data.storage_root != EMPTY_TRIE_HASH); - - let accounts_with_storage_but_no_storage_accesses = all_accounts_with_non_empty_storage - .filter(|&(addr, _data)| !accounts_with_storage_accesses.contains(addr)) - .map(|(addr, data)| (*addr, data.storage_root)); - - nodes_used_by_txn - .accts_with_unaccessed_storage - .extend(accounts_with_storage_but_no_storage_accesses); - - meta.push(TxnMetaState { - txn_bytes: match txn.meta.byte_code.is_empty() { - false => Some(txn.meta.byte_code.clone()), - true => None, - }, - receipt_node_bytes: check_receipt_bytes( - txn.meta.new_receipt_trie_node_byte.clone(), - )?, - gas_used: txn.meta.gas_used, - created_accounts, - }); - } - - Ok(ProcessedTxnBatchInfo { - nodes_used_by_txn, - contract_code_accessed, - meta, - }) - } -} - -fn check_receipt_bytes(bytes: Vec) -> anyhow::Result> { - match rlp::decode::(&bytes) { - Ok(_) => Ok(bytes), - Err(_) => { - rlp::decode(&bytes).context("couldn't decode receipt as a legacy receipt or raw bytes") - } - } -} - -/// A collection of all the state and storage accesses performed by a batch of -/// transaction. -/// -/// Note that "*_accesses" fields include writes. -#[derive(Debug, Default)] -pub(crate) struct NodesUsedByTxnBatch { - pub state_accesses: HashSet
, - pub state_writes: HashMap, - - pub storage_accesses: HashMap>, - pub storage_writes: HashMap>>, - - /// Hashed address -> storage root. - pub accts_with_unaccessed_storage: HashMap, - pub self_destructed_accounts: HashSet
, -} - -#[derive(Debug, Default, PartialEq)] -pub(crate) struct StateWrite { - pub balance: Option, - pub nonce: Option, - pub storage_trie_change: bool, - pub code_hash: Option, -} - -#[derive(Debug, Default)] -pub(crate) struct TxnMetaState { - /// [`None`] if this is a dummy transaction inserted for padding. - pub txn_bytes: Option>, - pub receipt_node_bytes: Vec, - pub gas_used: u64, - pub created_accounts: BTreeSet
, -} diff --git a/trace_decoder/src/type1.rs b/trace_decoder/src/type1.rs index 019a75c95..aeea0dbb6 100644 --- a/trace_decoder/src/type1.rs +++ b/trace_decoder/src/type1.rs @@ -97,11 +97,11 @@ fn visit( match code { Some(Either::Left(Hash { raw_hash })) => raw_hash.into(), Some(Either::Right(Code { code })) => { - let hash = crate::hash(&code); + let hash = keccak_hash::keccak(&code); frontend.code.insert(code); hash } - None => crate::hash([]), + None => keccak_hash::keccak([]), } }, }; diff --git a/trace_decoder/src/typed_mpt.rs b/trace_decoder/src/typed_mpt.rs index 5a49966a6..9c0d4110f 100644 --- a/trace_decoder/src/typed_mpt.rs +++ b/trace_decoder/src/typed_mpt.rs @@ -9,12 +9,9 @@ use evm_arithmetization::generation::mpt::AccountRlp; use mpt_trie::partial_trie::{HashedPartialTrie, Node, OnOrphanedHashNode, PartialTrie as _}; use u4::{AsNibbles, U4}; -/// Map where keys are [up to 64 nibbles](TrieKey), -/// and values are [`rlp::Encodable`]/[`rlp::Decodable`]. -/// /// See . /// -/// Portions of the trie may be deferred: see [`Self::insert_hash`]. +/// Portions of the trie may be indirected: see [`Self::insert_hash`]. #[derive(Debug, Clone, PartialEq, Eq)] struct TypedMpt { inner: HashedPartialTrie, @@ -186,6 +183,9 @@ pub struct TransactionTrie { } impl TransactionTrie { + pub fn new() -> Self { + Self::default() + } pub fn insert(&mut self, txn_ix: usize, val: Vec) -> anyhow::Result>> { let prev = self .untyped @@ -201,6 +201,22 @@ impl TransactionTrie { pub const fn as_hashed_partial_trie(&self) -> &mpt_trie::partial_trie::HashedPartialTrie { &self.untyped } + /// Indirect (hash) parts of the trie that aren't in `txn_ixs`. + pub fn mask(&mut self, txn_ixs: impl IntoIterator) -> anyhow::Result<()> { + self.untyped = mpt_trie::trie_subsets::create_trie_subset( + &self.untyped, + txn_ixs + .into_iter() + .map(|it| TrieKey::from_txn_ix(it).into_nibbles()), + )?; + Ok(()) + } +} + +impl From for HashedPartialTrie { + fn from(value: TransactionTrie) -> Self { + value.untyped + } } /// Per-block, `txn_ix -> [u8]`. @@ -212,6 +228,9 @@ pub struct ReceiptTrie { } impl ReceiptTrie { + pub fn new() -> Self { + Self::default() + } pub fn insert(&mut self, txn_ix: usize, val: Vec) -> anyhow::Result>> { let prev = self .untyped @@ -227,6 +246,36 @@ impl ReceiptTrie { pub const fn as_hashed_partial_trie(&self) -> &mpt_trie::partial_trie::HashedPartialTrie { &self.untyped } + /// Indirect (hash) parts of the trie that aren't in `txn_ixs`. + pub fn mask(&mut self, txn_ixs: impl IntoIterator) -> anyhow::Result<()> { + self.untyped = mpt_trie::trie_subsets::create_trie_subset( + &self.untyped, + txn_ixs + .into_iter() + .map(|it| TrieKey::from_txn_ix(it).into_nibbles()), + )?; + Ok(()) + } +} + +impl From for HashedPartialTrie { + fn from(value: ReceiptTrie) -> Self { + value.untyped + } +} + +pub trait StateTrie { + fn insert_by_address( + &mut self, + address: Address, + account: AccountRlp, + ) -> anyhow::Result>; + fn insert_hash_by_key(&mut self, key: TrieKey, hash: H256) -> anyhow::Result<()>; + fn get_by_address(&self, address: Address) -> Option; + fn reporting_remove(&mut self, address: Address) -> anyhow::Result>; + fn mask(&mut self, address: impl IntoIterator) -> anyhow::Result<()>; + fn iter(&self) -> impl Iterator + '_; + fn root(&self) -> H256; } /// Global, [`Address`] `->` [`AccountRlp`]. @@ -259,12 +308,9 @@ impl StateMpt { .iter() .map(|(key, rlp)| (key.into_hash().expect("key is always H256"), rlp)) } - pub const fn as_hashed_partial_trie(&self) -> &mpt_trie::partial_trie::HashedPartialTrie { + pub fn as_hashed_partial_trie(&self) -> &mpt_trie::partial_trie::HashedPartialTrie { self.typed.as_hashed_partial_trie() } - pub fn root(&self) -> H256 { - self.typed.root() - } } impl StateTrie for StateMpt { @@ -274,9 +320,9 @@ impl StateTrie for StateMpt { account: AccountRlp, ) -> anyhow::Result> { #[expect(deprecated)] - self.insert_by_hashed_address(crate::hash(address), account) + self.insert_by_hashed_address(keccak_hash::keccak(address), account) } - /// Insert a deferred part of the trie + /// Insert an indirected part of the trie fn insert_hash_by_key(&mut self, key: TrieKey, hash: H256) -> anyhow::Result<()> { self.typed.insert_hash(key, hash) } @@ -287,19 +333,12 @@ impl StateTrie for StateMpt { /// Delete the account at `address`, returning any remaining branch on /// collapse fn reporting_remove(&mut self, address: Address) -> anyhow::Result> { - Ok( - crate::decoding::delete_node_and_report_remaining_key_if_branch_collapsed( - self.typed.as_mut_hashed_partial_trie_unchecked(), - &TrieKey::from_address(address), - )?, + delete_node_and_report_remaining_key_if_branch_collapsed( + self.typed.as_mut_hashed_partial_trie_unchecked(), + TrieKey::from_address(address), ) } - fn contains_address(&self, address: Address) -> bool { - self.typed - .as_hashed_partial_trie() - .contains(TrieKey::from_address(address).into_nibbles()) - } - fn trim_to(&mut self, addresses: impl IntoIterator) -> anyhow::Result<()> { + fn mask(&mut self, addresses: impl IntoIterator) -> anyhow::Result<()> { let inner = mpt_trie::trie_subsets::create_trie_subset( self.typed.as_hashed_partial_trie(), addresses.into_iter().map(TrieKey::into_nibbles), @@ -310,6 +349,14 @@ impl StateTrie for StateMpt { }; Ok(()) } + fn iter(&self) -> impl Iterator + '_ { + self.typed + .iter() + .map(|(key, rlp)| (key.into_hash().expect("key is always H256"), rlp)) + } + fn root(&self) -> H256 { + self.typed.root() + } } impl From for HashedPartialTrie { @@ -323,20 +370,7 @@ impl From for HashedPartialTrie { pub struct StateSmt { address2state: BTreeMap, - deferred: BTreeMap, -} - -pub trait StateTrie { - fn insert_by_address( - &mut self, - address: Address, - account: AccountRlp, - ) -> anyhow::Result>; - fn insert_hash_by_key(&mut self, key: TrieKey, hash: H256) -> anyhow::Result<()>; - fn get_by_address(&self, address: Address) -> Option; - fn reporting_remove(&mut self, address: Address) -> anyhow::Result>; - fn contains_address(&self, address: Address) -> bool; - fn trim_to(&mut self, address: impl IntoIterator) -> anyhow::Result<()>; + indirected: BTreeMap, } impl StateTrie for StateSmt { @@ -348,7 +382,7 @@ impl StateTrie for StateSmt { Ok(self.address2state.insert(address, account)) } fn insert_hash_by_key(&mut self, key: TrieKey, hash: H256) -> anyhow::Result<()> { - self.deferred.insert(key, hash); + self.indirected.insert(key, hash); Ok(()) } fn get_by_address(&self, address: Address) -> Option { @@ -358,13 +392,18 @@ impl StateTrie for StateSmt { self.address2state.remove(&address); Ok(None) } - fn contains_address(&self, address: Address) -> bool { - self.address2state.contains_key(&address) - } - fn trim_to(&mut self, address: impl IntoIterator) -> anyhow::Result<()> { + fn mask(&mut self, address: impl IntoIterator) -> anyhow::Result<()> { let _ = address; Ok(()) } + fn iter(&self) -> impl Iterator + '_ { + self.address2state + .iter() + .map(|(addr, acct)| (keccak_hash::keccak(addr), *acct)) + } + fn root(&self) -> H256 { + todo!() + } } /// Global, per-account. @@ -395,8 +434,148 @@ impl StorageTrie { pub const fn as_hashed_partial_trie(&self) -> &HashedPartialTrie { &self.untyped } - + pub fn reporting_remove(&mut self, key: TrieKey) -> anyhow::Result> { + delete_node_and_report_remaining_key_if_branch_collapsed(&mut self.untyped, key) + } pub fn as_mut_hashed_partial_trie_unchecked(&mut self) -> &mut HashedPartialTrie { &mut self.untyped } + /// Indirect (hash) the parts of the trie that aren't in `paths`. + pub fn mask(&mut self, paths: impl IntoIterator) -> anyhow::Result<()> { + self.untyped = mpt_trie::trie_subsets::create_trie_subset( + &self.untyped, + paths.into_iter().map(TrieKey::into_nibbles), + )?; + Ok(()) + } +} + +impl From for HashedPartialTrie { + fn from(value: StorageTrie) -> Self { + value.untyped + } +} + +/// If a branch collapse occurred after a delete, then we must ensure that +/// the other single child that remains also is not hashed when passed into +/// plonky2. Returns the key to the remaining child if a collapse occurred. +fn delete_node_and_report_remaining_key_if_branch_collapsed( + trie: &mut HashedPartialTrie, + key: TrieKey, +) -> anyhow::Result> { + let old_trace = get_trie_trace(trie, key); + trie.delete(key.into_nibbles())?; + let new_trace = get_trie_trace(trie, key); + Ok( + node_deletion_resulted_in_a_branch_collapse(&old_trace, &new_trace) + .map(TrieKey::from_nibbles), + ) +} + +fn get_trie_trace(trie: &HashedPartialTrie, k: TrieKey) -> mpt_trie::utils::TriePath { + mpt_trie::special_query::path_for_query(trie, k.into_nibbles(), true).collect() +} + +/// Comparing the path of the deleted key before and after the deletion, +/// determine if the deletion resulted in a branch collapsing into a leaf or +/// extension node, and return the path to the remaining child if this +/// occurred. +fn node_deletion_resulted_in_a_branch_collapse( + old_path: &mpt_trie::utils::TriePath, + new_path: &mpt_trie::utils::TriePath, +) -> Option { + // Collapse requires at least 2 nodes. + if old_path.0.len() < 2 { + return None; + } + + // If the node path length decreased after the delete, then a collapse occurred. + // As an aside, note that while it's true that the branch could have collapsed + // into an extension node with multiple nodes below it, the query logic will + // always stop at most one node after the keys diverge, which guarantees that + // the new trie path will always be shorter if a collapse occurred. + let branch_collapse_occurred = old_path.0.len() > new_path.0.len(); + + // Now we need to determine the key of the only remaining node after the + // collapse. + branch_collapse_occurred.then(|| mpt_trie::utils::IntoTrieKey::into_key(new_path.iter())) +} + +#[cfg(test)] +mod tests { + use std::array; + + use itertools::Itertools as _; + use quickcheck::Arbitrary; + + use super::*; + + quickcheck::quickcheck! { + fn quickcheck( + kvs: Vec<(TrieKey, Vec)>, + mask_kvs: Vec<(TrieKey, Vec)>, + khs: Vec<(TrieKey, ArbitraryHash)> + ) -> () { + do_quickcheck(kvs, mask_kvs, khs) + } + } + + fn do_quickcheck( + kvs: Vec<(TrieKey, Vec)>, + mask_kvs: Vec<(TrieKey, Vec)>, + khs: Vec<(TrieKey, ArbitraryHash)>, + ) { + let mut mpt = HashedPartialTrie::default(); + let mask = mask_kvs + .iter() + .map(|(k, _)| k.into_nibbles()) + .collect::>(); + for (k, v) in kvs.into_iter().chain(mask_kvs) { + let _ = mpt.insert(k.into_nibbles(), v); + } + for (k, ArbitraryHash(h)) in khs { + let _ = mpt.insert(k.into_nibbles(), h); + } + let root = mpt.hash(); + if let Ok(sub) = mpt_trie::trie_subsets::create_trie_subset(&mpt, mask) { + assert_eq!(sub.hash(), root) + } + } + + impl Arbitrary for TrieKey { + fn arbitrary(g: &mut quickcheck::Gen) -> Self { + Self(Arbitrary::arbitrary(g)) + } + + fn shrink(&self) -> Box> { + let Self(comps) = *self; + Box::new(comps.shrink().map(Self)) + } + } + + #[derive(Debug, Clone, Copy)] + struct ArbitraryHash(H256); + impl ArbitraryHash { + pub fn new((a, b, c, d): (u64, u64, u64, u64)) -> Self { + let mut iter = [a, b, c, d].into_iter().flat_map(u64::to_ne_bytes); + let h = H256(array::from_fn(|_| iter.next().unwrap())); + assert_eq!(iter.count(), 0); + Self(h) + } + } + impl Arbitrary for ArbitraryHash { + fn arbitrary(g: &mut quickcheck::Gen) -> Self { + Self::new(Arbitrary::arbitrary(g)) + } + + fn shrink(&self) -> Box> { + let Self(H256(bytes)) = self; + let (a, b, c, d) = bytes + .chunks_exact(8) + .map(|it| u64::from_ne_bytes(it.try_into().unwrap())) + .collect_tuple() + .unwrap(); + Box::new((a, b, c, d).shrink().map(Self::new)) + } + } } diff --git a/trace_decoder/src/wire.rs b/trace_decoder/src/wire.rs index 355d30827..6f56f1e44 100644 --- a/trace_decoder/src/wire.rs +++ b/trace_decoder/src/wire.rs @@ -4,7 +4,7 @@ //! //! Fortunately, their opcodes don't conflict, so we can have a single //! [`Instruction`] type, with shared parsing logic in this module, and bail on -//! unsupported instructions later on in the frontend. +//! unsupported instructions later on. //! //! This is fine because we don't care about failing fast when parsing. @@ -68,7 +68,7 @@ pub enum Instruction { AccountLeaf { key: NonEmpty>, nonce: Option, - /// BUG(spec): see decode site [`account_leaf`]. + /// BUG(spec): see parse site [`account_leaf`]. balance: Option, has_code: bool, has_storage: bool, diff --git a/trace_decoder/tests/check-subsets.rs b/trace_decoder/tests/check-subsets.rs new file mode 100644 index 000000000..694a7df40 --- /dev/null +++ b/trace_decoder/tests/check-subsets.rs @@ -0,0 +1,32 @@ +use common::{cases, Case}; +use itertools::Itertools; +use mpt_trie::partial_trie::PartialTrie; +use trace_decoder::{BlockTraceTriePreImages, CombinedPreImages}; + +mod common; + +fn main() -> anyhow::Result<()> { + for Case { name, trace, .. } in cases().unwrap() { + let BlockTraceTriePreImages::Combined(CombinedPreImages { compact }) = + trace.trie_pre_images + else { + panic!() + }; + let whole = trace_decoder::frontend(trace_decoder::parse(&compact).unwrap()) + .unwrap() + .state + .as_hashed_partial_trie() + .clone(); + let all_keys = whole.keys().collect::>(); + let len = all_keys.len(); + for n in 0..len { + println!("{name}\t{n}\t{len}"); + for comb in all_keys.iter().copied().combinations(n) { + if let Ok(sub) = mpt_trie::trie_subsets::create_trie_subset(&whole, comb.clone()) { + assert_eq!(sub.hash(), whole.hash(), "{comb:?}") + } + } + } + } + Ok(()) +} diff --git a/zero_bin/leader/Cargo.toml b/zero_bin/leader/Cargo.toml index 5b9a67f34..71d1d9497 100644 --- a/zero_bin/leader/Cargo.toml +++ b/zero_bin/leader/Cargo.toml @@ -21,7 +21,7 @@ tokio = { workspace = true } serde_json = { workspace = true } serde_path_to_error = { workspace = true } futures = { workspace = true } -alloy.workspace = true +alloy = { workspace = true } axum = { workspace = true } toml = { workspace = true } @@ -41,7 +41,7 @@ eth_mainnet = [ "proof_gen/eth_mainnet", "prover/eth_mainnet", "rpc/eth_mainnet", - "zero_bin_common/eth_mainnet" + "zero_bin_common/eth_mainnet", ] cdk_erigon = [ "evm_arithmetization/cdk_erigon", @@ -49,7 +49,7 @@ cdk_erigon = [ "proof_gen/cdk_erigon", "prover/cdk_erigon", "rpc/cdk_erigon", - "zero_bin_common/cdk_erigon" + "zero_bin_common/cdk_erigon", ] [build-dependencies] diff --git a/zero_bin/prover/Cargo.toml b/zero_bin/prover/Cargo.toml index b4e54b19a..fea054b88 100644 --- a/zero_bin/prover/Cargo.toml +++ b/zero_bin/prover/Cargo.toml @@ -9,9 +9,9 @@ keywords.workspace = true categories.workspace = true [dependencies] -alloy.workspace = true +alloy = { workspace = true } anyhow = { workspace = true } -clap = {workspace = true, features = ["derive", "string"] } +clap = { workspace = true, features = ["derive", "string"] } futures = { workspace = true } num-traits = { workspace = true } ops = { workspace = true } diff --git a/zero_bin/rpc/Cargo.toml b/zero_bin/rpc/Cargo.toml index 8d4444bb6..d91b15755 100644 --- a/zero_bin/rpc/Cargo.toml +++ b/zero_bin/rpc/Cargo.toml @@ -11,7 +11,8 @@ build = "../common/build.rs" [dependencies] __compat_primitive_types = { workspace = true } -alloy.workspace = true +alloy = { workspace = true } +alloy-compat = "0.1.0" anyhow = { workspace = true } clap = { workspace = true } futures = { workspace = true } @@ -51,4 +52,4 @@ cdk_erigon = [ "prover/cdk_erigon", "trace_decoder/cdk_erigon", "zero_bin_common/cdk_erigon", -] \ No newline at end of file +] diff --git a/zero_bin/rpc/src/native/state.rs b/zero_bin/rpc/src/native/state.rs index b46eabc9d..24c7b1214 100644 --- a/zero_bin/rpc/src/native/state.rs +++ b/zero_bin/rpc/src/native/state.rs @@ -1,12 +1,14 @@ use std::collections::{HashMap, HashSet}; use std::sync::Arc; +use alloy::primitives::Bytes; use alloy::{ primitives::{keccak256, Address, StorageKey, B256, U256}, providers::Provider, rpc::types::eth::{Block, BlockTransactionsKind, EIP1186AccountProofResponse}, transports::Transport, }; +use alloy_compat::Compat; use anyhow::Context as _; use evm_arithmetization::testing_utils::{BEACON_ROOTS_CONTRACT_STATE_KEY, HISTORY_BUFFER_LENGTH}; use futures::future::{try_join, try_join_all}; @@ -17,8 +19,6 @@ use trace_decoder::{ }; use zero_bin_common::provider::CachedProvider; -use crate::Compat; - /// Processes the state witness for the given block. pub async fn process_state_witness( cached_provider: Arc>, @@ -93,13 +93,12 @@ fn insert_beacon_roots_update( state_access: &mut HashMap>, block: &Block, ) -> anyhow::Result<()> { - let timestamp = block.header.timestamp; - - const MODULUS: u64 = HISTORY_BUFFER_LENGTH.1; + let timestamp = U256::from(block.header.timestamp); - let keys = HashSet::from_iter([ - U256::from(timestamp % MODULUS).into(), // timestamp_idx - U256::from((timestamp % MODULUS) + MODULUS).into(), // root_idx + let chunk = HISTORY_BUFFER_LENGTH.value.compat(); + let keys = HashSet::from([ + (timestamp % chunk).into(), // timestamp_idx + ((timestamp % chunk) + chunk).into(), // root_idx ]); state_access.insert(BEACON_ROOTS_CONTRACT_STATE_KEY.1.into(), keys); @@ -128,7 +127,7 @@ where // Insert account proofs for (address, proof) in account_proofs.into_iter() { - state.insert_proof(proof.account_proof.compat()); + state.insert_proof(conv_vec_bytes(proof.account_proof)); let storage_mpt = storage_proofs @@ -138,17 +137,17 @@ where Default::default(), )); for proof in proof.storage_proof { - storage_mpt.insert_proof(proof.proof.compat()); + storage_mpt.insert_proof(conv_vec_bytes(proof.proof)); } } // Insert short node variants from next proofs for (address, proof) in next_account_proofs.into_iter() { - state.insert_short_node_variants_from_proof(proof.account_proof.compat()); + state.insert_short_node_variants_from_proof(conv_vec_bytes(proof.account_proof)); if let Some(storage_mpt) = storage_proofs.get_mut(&keccak256(address)) { for proof in proof.storage_proof { - storage_mpt.insert_short_node_variants_from_proof(proof.proof.compat()); + storage_mpt.insert_short_node_variants_from_proof(conv_vec_bytes(proof.proof)); } } } @@ -156,6 +155,10 @@ where Ok((state, storage_proofs)) } +fn conv_vec_bytes(bytes: Vec) -> Vec> { + bytes.into_iter().map(|bytes| bytes.to_vec()).collect() +} + /// Fetches the proof data for the given accounts and associated storage keys. async fn fetch_proof_data( accounts_state: HashMap>, diff --git a/zero_bin/tools/prove_stdio.sh b/zero_bin/tools/prove_stdio.sh index 35e39d400..815a7048d 100755 --- a/zero_bin/tools/prove_stdio.sh +++ b/zero_bin/tools/prove_stdio.sh @@ -95,7 +95,7 @@ fi # proof. This is useful for quickly testing decoding and all of the # other non-proving code. if [[ $TEST_ONLY == "test_only" ]]; then - cargo run --release --bin leader -- --test-only --runtime in-memory --load-strategy on-demand --block-batch-size $BLOCK_BATCH_SIZE --proof-output-dir $PROOF_OUTPUT_DIR stdio < $INPUT_FILE &> $TEST_OUT_PATH + cargo run --quiet --release --bin leader -- --test-only --runtime in-memory --load-strategy on-demand --block-batch-size $BLOCK_BATCH_SIZE --proof-output-dir $PROOF_OUTPUT_DIR stdio < $INPUT_FILE &> $TEST_OUT_PATH if grep -q 'All proof witnesses have been generated successfully.' $TEST_OUT_PATH; then echo -e "\n\nSuccess - Note this was just a test, not a proof" rm $TEST_OUT_PATH