From e1def1bfaa304ffa461428ffd36eb8fbdc5e2aa9 Mon Sep 17 00:00:00 2001 From: Robin Salen <30937548+Nashtare@users.noreply.github.com> Date: Mon, 16 Sep 2024 10:52:16 -0400 Subject: [PATCH 1/5] Small fixes to `rpc` and `common` modules (#629) * fix: do not abort on witness fetching * fix: properly deserialize proofs * Remove unrelated code * Apply comment --- zero_bin/common/src/fs.rs | 10 ++++++++-- zero_bin/rpc/src/main.rs | 2 -- 2 files changed, 8 insertions(+), 4 deletions(-) diff --git a/zero_bin/common/src/fs.rs b/zero_bin/common/src/fs.rs index 6d274903e..7576cf20f 100644 --- a/zero_bin/common/src/fs.rs +++ b/zero_bin/common/src/fs.rs @@ -1,6 +1,7 @@ use std::fs::File; use std::path::PathBuf; +use anyhow::anyhow; use proof_gen::proof_types::GeneratedBlockProof; pub fn generate_block_proof_file_name(directory: &Option<&str>, block_height: u64) -> PathBuf { @@ -17,6 +18,11 @@ pub fn get_previous_proof(path: Option) -> anyhow::Result = serde_path_to_error::deserialize(des)?; + // Individual proofs are serialized as vector to match other output formats. + if proof.len() != 1 { + return Err(anyhow!("Invalid proof format, expected vector of generated block proofs with a single element.")); + } + + Ok(Some(proof[0].to_owned())) } diff --git a/zero_bin/rpc/src/main.rs b/zero_bin/rpc/src/main.rs index 12594877a..308ea8cd0 100644 --- a/zero_bin/rpc/src/main.rs +++ b/zero_bin/rpc/src/main.rs @@ -14,7 +14,6 @@ use rpc::{retry::build_http_retry_provider, RpcType}; use tracing_subscriber::{prelude::*, EnvFilter}; use url::Url; use zero_bin_common::block_interval::BlockIntervalStream; -use zero_bin_common::pre_checks::check_previous_proof_and_checkpoint; use zero_bin_common::provider::CachedProvider; use zero_bin_common::version; use zero_bin_common::{block_interval::BlockInterval, prover_state::persistence::CIRCUIT_VERSION}; @@ -88,7 +87,6 @@ where let checkpoint_block_number = params .checkpoint_block_number .unwrap_or(params.start_block - 1); - check_previous_proof_and_checkpoint(checkpoint_block_number, &None, params.start_block)?; let block_interval = BlockInterval::Range(params.start_block..params.end_block + 1); let mut block_prover_inputs = Vec::new(); From 6932ab3d2ae91d747d2c02e261564f31e555601c Mon Sep 17 00:00:00 2001 From: 0xaatif <169152398+0xaatif@users.noreply.github.com> Date: Mon, 16 Sep 2024 19:35:48 +0100 Subject: [PATCH 2/5] refactor: one zero package (#625) --- .github/CODEOWNERS | 2 +- .github/labeler.yml | 4 +- .github/workflows/ci.yml | 22 +-- .github/workflows/jerigon-native.yml | 6 +- .github/workflows/jerigon-zero.yml | 6 +- .gitignore | 29 +++- Cargo.lock | 154 +++--------------- Cargo.toml | 15 +- .../witness_b19807080.json | 0 .../witness_b3_b6.json | 0 proof_gen/src/lib.rs | 1 - {zero_bin/tools => scripts}/prove_rpc.sh | 11 +- {zero_bin/tools => scripts}/prove_stdio.sh | 14 +- trace_decoder/Cargo.toml | 18 +- trace_decoder/src/lib.rs | 2 - trace_decoder/tests/common/mod.rs | 2 +- {zero_bin/common => zero}/Cargo.toml | 44 +++-- {zero_bin => zero}/README.md | 4 +- {zero_bin/common => zero}/build.rs | 0 .../src/main.rs => zero/src/bin/leader.rs | 27 +-- .../leader/src => zero/src/bin/leader}/cli.rs | 6 +- .../src => zero/src/bin/leader}/client.rs | 14 +- .../src => zero/src/bin/leader}/http.rs | 2 +- .../src => zero/src/bin/leader}/stdio.rs | 5 +- .../rpc/src/main.rs => zero/src/bin/rpc.rs | 13 +- .../src/main.rs => zero/src/bin/verifier.rs | 9 +- .../src => zero/src/bin/verifier}/cli.rs | 2 +- .../src => zero/src/bin/verifier}/init.rs | 0 .../src/main.rs => zero/src/bin/worker.rs | 5 +- .../common => zero}/src/block_interval.rs | 2 +- {zero_bin/common => zero}/src/debug_utils.rs | 0 {zero_bin/common => zero}/src/env.rs | 0 {zero_bin/common => zero}/src/fs.rs | 0 {zero_bin/common => zero}/src/lib.rs | 3 + zero_bin/ops/src/lib.rs => zero/src/ops.rs | 7 +- {zero_bin/common => zero}/src/parsing.rs | 0 {zero_bin/common => zero}/src/pre_checks.rs | 0 .../prover/src/lib.rs => zero/src/prover.rs | 4 +- .../prover/src => zero/src/prover}/cli.rs | 2 +- .../src/prover_state/circuit.rs | 0 .../common => zero}/src/prover_state/cli.rs | 0 .../common => zero}/src/prover_state/mod.rs | 0 .../src/prover_state/persistence.rs | 0 {zero_bin/common => zero}/src/provider.rs | 0 {zero_bin/rpc/src => zero/src/rpc}/jerigon.rs | 4 +- .../rpc/src/lib.rs => zero/src/rpc/mod.rs | 5 +- .../rpc/src => zero/src/rpc}/native/mod.rs | 7 +- .../rpc/src => zero/src/rpc}/native/state.rs | 3 +- .../rpc/src => zero/src/rpc}/native/txn.rs | 2 +- {zero_bin/rpc/src => zero/src/rpc}/retry.rs | 0 {zero_bin/common => zero}/src/tracing.rs | 0 {zero_bin/common => zero}/src/version.rs | 0 zero_bin/.gitignore | 15 -- zero_bin/leader/Cargo.toml | 69 -------- zero_bin/ops/Cargo.toml | 39 ----- zero_bin/prover/Cargo.toml | 60 ------- zero_bin/rpc/Cargo.toml | 65 -------- zero_bin/verifier/Cargo.toml | 41 ----- zero_bin/worker/Cargo.toml | 45 ----- 59 files changed, 169 insertions(+), 621 deletions(-) rename {zero_bin/tools/artifacts => artifacts}/witness_b19807080.json (100%) rename {zero_bin/tools/artifacts => artifacts}/witness_b3_b6.json (100%) rename {zero_bin/tools => scripts}/prove_rpc.sh (86%) rename {zero_bin/tools => scripts}/prove_stdio.sh (89%) rename {zero_bin/common => zero}/Cargo.toml (52%) rename {zero_bin => zero}/README.md (98%) rename {zero_bin/common => zero}/build.rs (100%) rename zero_bin/leader/src/main.rs => zero/src/bin/leader.rs (89%) rename {zero_bin/leader/src => zero/src/bin/leader}/cli.rs (95%) rename {zero_bin/leader/src => zero/src/bin/leader}/client.rs (88%) rename {zero_bin/leader/src => zero/src/bin/leader}/http.rs (98%) rename {zero_bin/leader/src => zero/src/bin/leader}/stdio.rs (91%) rename zero_bin/rpc/src/main.rs => zero/src/bin/rpc.rs (96%) rename zero_bin/verifier/src/main.rs => zero/src/bin/verifier.rs (94%) rename {zero_bin/verifier/src => zero/src/bin/verifier}/cli.rs (87%) rename {zero_bin/verifier/src => zero/src/bin/verifier}/init.rs (100%) rename zero_bin/worker/src/main.rs => zero/src/bin/worker.rs (92%) rename {zero_bin/common => zero}/src/block_interval.rs (99%) rename {zero_bin/common => zero}/src/debug_utils.rs (100%) rename {zero_bin/common => zero}/src/env.rs (100%) rename {zero_bin/common => zero}/src/fs.rs (100%) rename {zero_bin/common => zero}/src/lib.rs (92%) rename zero_bin/ops/src/lib.rs => zero/src/ops.rs (98%) rename {zero_bin/common => zero}/src/parsing.rs (100%) rename {zero_bin/common => zero}/src/pre_checks.rs (100%) rename zero_bin/prover/src/lib.rs => zero/src/prover.rs (99%) rename {zero_bin/prover/src => zero/src/prover}/cli.rs (97%) rename {zero_bin/common => zero}/src/prover_state/circuit.rs (100%) rename {zero_bin/common => zero}/src/prover_state/cli.rs (100%) rename {zero_bin/common => zero}/src/prover_state/mod.rs (100%) rename {zero_bin/common => zero}/src/prover_state/persistence.rs (100%) rename {zero_bin/common => zero}/src/provider.rs (100%) rename {zero_bin/rpc/src => zero/src/rpc}/jerigon.rs (96%) rename zero_bin/rpc/src/lib.rs => zero/src/rpc/mod.rs (99%) rename {zero_bin/rpc/src => zero/src/rpc}/native/mod.rs (90%) rename {zero_bin/rpc/src => zero/src/rpc}/native/state.rs (99%) rename {zero_bin/rpc/src => zero/src/rpc}/native/txn.rs (99%) rename {zero_bin/rpc/src => zero/src/rpc}/retry.rs (100%) rename {zero_bin/common => zero}/src/tracing.rs (100%) rename {zero_bin/common => zero}/src/version.rs (100%) delete mode 100644 zero_bin/.gitignore delete mode 100644 zero_bin/leader/Cargo.toml delete mode 100644 zero_bin/ops/Cargo.toml delete mode 100644 zero_bin/prover/Cargo.toml delete mode 100644 zero_bin/rpc/Cargo.toml delete mode 100644 zero_bin/verifier/Cargo.toml delete mode 100644 zero_bin/worker/Cargo.toml diff --git a/.github/CODEOWNERS b/.github/CODEOWNERS index 3872f92ac..a3608580d 100644 --- a/.github/CODEOWNERS +++ b/.github/CODEOWNERS @@ -1,6 +1,6 @@ * @muursh @Nashtare /evm_arithmetization/ @wborgeaud @muursh @Nashtare @LindaGuiga -/zero_bin/ @muursh @Nashtare @atanmarko +/zero/ @muursh @Nashtare @atanmarko /smt_trie/ @0xaatif @muursh @Nashtare /mpt_trie/ @0xaatif @Nashtare @muursh /trace_decoder/ @0xaatif @muursh @Nashtare diff --git a/.github/labeler.yml b/.github/labeler.yml index 72647b77b..d64cda1ec 100644 --- a/.github/labeler.yml +++ b/.github/labeler.yml @@ -18,10 +18,10 @@ - changed-files: - any-glob-to-any-file: proof_gen/** -# Add 'crate: zero_bin' label to any changes within 'zero_bin' folder. +# Add 'crate: zero_bin' label to any changes within 'zero' folder. 'crate: zero_bin': - changed-files: - - any-glob-to-any-file: zero_bin/** + - any-glob-to-any-file: zero/** # Add 'specs' label to any changes within 'docs' or `book` folder. 'specs': diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 1f34dd53f..066789eea 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -140,13 +140,7 @@ jobs: - name: Test in zero_bin subdirectory run: | - cargo test --manifest-path zero_bin/common/Cargo.toml - cargo test --manifest-path zero_bin/leader/Cargo.toml - cargo test --manifest-path zero_bin/ops/Cargo.toml - cargo test --manifest-path zero_bin/prover/Cargo.toml - cargo test --manifest-path zero_bin/rpc/Cargo.toml - cargo test --manifest-path zero_bin/verifier/Cargo.toml - cargo test --manifest-path zero_bin/worker/Cargo.toml + cargo test --manifest-path zero/Cargo.toml env: RUSTFLAGS: -Copt-level=3 -Cdebug-assertions -Coverflow-checks=y -Cdebuginfo=0 @@ -188,9 +182,7 @@ jobs: uses: actions/checkout@v3 - name: Run the script - run: | - pushd zero_bin/tools - ./prove_stdio.sh artifacts/witness_b19807080.json + run: ./scripts/prove_stdio.sh artifacts/witness_b19807080.json simple_proof_witness_only: name: Execute bash script to generate the proof witness for a small block. @@ -201,9 +193,7 @@ jobs: uses: actions/checkout@v3 - name: Run the script - run: | - pushd zero_bin/tools - ./prove_stdio.sh artifacts/witness_b19807080.json test_only + run: ./scripts/prove_stdio.sh artifacts/witness_b19807080.json test_only multi_blocks_proof_regular: name: Execute bash script to generate and verify a proof for multiple blocks using parallel proving. @@ -214,9 +204,7 @@ jobs: uses: actions/checkout@v3 - name: Run the script - run: | - pushd zero_bin/tools - ./prove_stdio.sh artifacts/witness_b3_b6.json + run: ./scripts/prove_stdio.sh artifacts/witness_b3_b6.json lints: name: Rustdoc, Formatting and Clippy @@ -244,7 +232,7 @@ jobs: run: cargo clippy --all-targets -- -D warnings -A incomplete-features - name: Run cargo clippy (with `cdk_erigon` flag) - run: cargo clippy --all-targets --no-default-features --features cdk_erigon -- -D warnings -A incomplete-features + run: cargo clippy --package zero --all-targets --no-default-features --features cdk_erigon -- -D warnings -A incomplete-features - name: Rustdoc run: cargo doc --all diff --git a/.github/workflows/jerigon-native.yml b/.github/workflows/jerigon-native.yml index 6c5967278..7c79454a1 100644 --- a/.github/workflows/jerigon-native.yml +++ b/.github/workflows/jerigon-native.yml @@ -74,17 +74,15 @@ jobs: - name: Run prove blocks with native tracer in test_only mode run: | ETH_RPC_URL="$(kurtosis port print cancun-testnet el-2-erigon-lighthouse ws-rpc)" - cd zero_bin/tools ulimit -n 8192 - OUTPUT_TO_TERMINAL=true ./prove_rpc.sh 0x1 0xf $ETH_RPC_URL native true 3000 100 test_only + OUTPUT_TO_TERMINAL=true ./scripts/prove_rpc.sh 0x1 0xf $ETH_RPC_URL native true 3000 100 test_only echo "Proving blocks in test_only mode finished" - name: Run prove blocks with native tracer in real mode run: | ETH_RPC_URL="$(kurtosis port print cancun-testnet el-2-erigon-lighthouse ws-rpc)" - cd zero_bin/tools rm -rf proofs/* circuits/* ./proofs.json test.out verify.out leader.out - OUTPUT_TO_TERMINAL=true RUN_VERIFICATION=true ./prove_rpc.sh 0x4 0x7 $ETH_RPC_URL native true 3000 100 + OUTPUT_TO_TERMINAL=true RUN_VERIFICATION=true ./scripts/prove_rpc.sh 0x4 0x7 $ETH_RPC_URL native true 3000 100 echo "Proving blocks in real mode finished" - name: Shut down network diff --git a/.github/workflows/jerigon-zero.yml b/.github/workflows/jerigon-zero.yml index a7e6fcb3e..ca14085dc 100644 --- a/.github/workflows/jerigon-zero.yml +++ b/.github/workflows/jerigon-zero.yml @@ -75,18 +75,16 @@ jobs: - name: Run prove blocks with zero tracer in test_only mode run: | ETH_RPC_URL="$(kurtosis port print cancun-testnet el-2-erigon-lighthouse ws-rpc)" - cd zero_bin/tools ulimit -n 8192 - OUTPUT_TO_TERMINAL=true ./prove_rpc.sh 0x1 0xf $ETH_RPC_URL jerigon true 3000 100 test_only + OUTPUT_TO_TERMINAL=true ./scripts/prove_rpc.sh 0x1 0xf $ETH_RPC_URL jerigon true 3000 100 test_only echo "Proving blocks in test_only mode finished" - name: Run prove blocks with zero tracer in real mode run: | ETH_RPC_URL="$(kurtosis port print cancun-testnet el-2-erigon-lighthouse ws-rpc)" - cd zero_bin/tools rm -rf proofs/* circuits/* ./proofs.json test.out verify.out leader.out - OUTPUT_TO_TERMINAL=true RUN_VERIFICATION=true ./prove_rpc.sh 0x2 0x5 $ETH_RPC_URL jerigon true 3000 100 + OUTPUT_TO_TERMINAL=true RUN_VERIFICATION=true ./scripts/prove_rpc.sh 0x2 0x5 $ETH_RPC_URL jerigon true 3000 100 echo "Proving blocks in real mode finished" - name: Shut down network diff --git a/.gitignore b/.gitignore index f035f7002..0016ff3b4 100644 --- a/.gitignore +++ b/.gitignore @@ -1,8 +1,25 @@ -/**/target +# Alphabetically, by-category + +# Build artifacts +################# /target -**/*.rs.bk -*.iml -.idea/ -.vscode + +# Editors +######### +/.idea/ +/.vscode +/*.iml + +# Misc +###### /**/*.ignoreme -**/output.log +/**/*.ipynb +/**/*.log +/**/*.out +# Hardcoded in https://github.com/0xPolygonZero/zk_evm/blob/v0.6.0/zero_bin/common/src/debug_utils.rs#L9 +/debug + +# Proof artifacts +################# +/proofs +/**/*.zkproof diff --git a/Cargo.lock b/Cargo.lock index 1791c16ed..16fda46f7 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -2850,34 +2850,6 @@ version = "1.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "bbd2bcb4c963f2ddae06a2efc7e9f3591312473c50c6685e1f298068316e66fe" -[[package]] -name = "leader" -version = "0.1.0" -dependencies = [ - "alloy", - "anyhow", - "axum", - "cargo_metadata", - "clap", - "evm_arithmetization", - "futures", - "ops", - "paladin-core", - "proof_gen", - "prover", - "rpc", - "serde", - "serde_json", - "serde_path_to_error", - "tokio", - "toml", - "tracing", - "tracing-subscriber", - "vergen", - "zero_bin_common", - "zk_evm_common", -] - [[package]] name = "libc" version = "0.2.158" @@ -3293,20 +3265,6 @@ dependencies = [ "vcpkg", ] -[[package]] -name = "ops" -version = "0.1.0" -dependencies = [ - "evm_arithmetization", - "keccak-hash 0.10.0", - "paladin-core", - "proof_gen", - "serde", - "tracing", - "zero_bin_common", - "zk_evm_common", -] - [[package]] name = "option-ext" version = "0.2.0" @@ -3880,31 +3838,6 @@ dependencies = [ "unarray", ] -[[package]] -name = "prover" -version = "0.1.0" -dependencies = [ - "alloy", - "anyhow", - "clap", - "evm_arithmetization", - "futures", - "num-traits", - "ops", - "paladin-core", - "plonky2", - "plonky2_maybe_rayon 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)", - "proof_gen", - "ruint", - "serde", - "serde_json", - "tokio", - "trace_decoder", - "tracing", - "zero_bin_common", - "zk_evm_common", -] - [[package]] name = "quick-error" version = "1.2.3" @@ -4163,37 +4096,6 @@ dependencies = [ "syn 1.0.109", ] -[[package]] -name = "rpc" -version = "0.1.0" -dependencies = [ - "alloy", - "alloy-compat", - "anyhow", - "cargo_metadata", - "clap", - "compat", - "evm_arithmetization", - "futures", - "hex", - "itertools 0.13.0", - "mpt_trie", - "primitive-types 0.12.2", - "proof_gen", - "prover", - "serde", - "serde_json", - "tokio", - "tower 0.4.13", - "trace_decoder", - "tracing", - "tracing-subscriber", - "url", - "vergen", - "zero_bin_common", - "zk_evm_common", -] - [[package]] name = "ruint" version = "1.12.3" @@ -5165,7 +5067,6 @@ dependencies = [ "plonky2_maybe_rayon 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)", "pretty_assertions", "pretty_env_logger", - "prover", "rlp", "serde", "serde_json", @@ -5176,6 +5077,7 @@ dependencies = [ "thiserror", "u4", "winnow", + "zero", "zk_evm_common", ] @@ -5410,24 +5312,6 @@ dependencies = [ "rustversion", ] -[[package]] -name = "verifier" -version = "0.1.0" -dependencies = [ - "anyhow", - "cargo_metadata", - "clap", - "dotenvy", - "proof_gen", - "serde_json", - "serde_path_to_error", - "tracing", - "tracing-subscriber", - "vergen", - "zero_bin_common", - "zk_evm_common", -] - [[package]] name = "version_check" version = "0.9.5" @@ -5779,23 +5663,6 @@ dependencies = [ "memchr", ] -[[package]] -name = "worker" -version = "0.1.0" -dependencies = [ - "anyhow", - "cargo_metadata", - "clap", - "dotenvy", - "jemallocator", - "ops", - "paladin-core", - "tokio", - "tracing-subscriber", - "vergen", - "zero_bin_common", -] - [[package]] name = "wyz" version = "0.5.1" @@ -5846,29 +5713,46 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "cfe53a6657fd280eaa890a3bc59152892ffa3e30101319d168b781ed6529b049" [[package]] -name = "zero_bin_common" +name = "zero" version = "0.1.0" dependencies = [ "alloy", + "alloy-compat", "anyhow", "async-stream", + "axum", "cargo_metadata", "clap", + "compat", "directories", "dotenvy", "evm_arithmetization", "futures", + "hex", + "itertools 0.13.0", + "jemallocator", + "keccak-hash 0.10.0", "lru", + "mpt_trie", + "num-traits", "once_cell", + "paladin-core", "plonky2", + "plonky2_maybe_rayon 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)", + "primitive-types 0.12.2", "proof_gen", + "ruint", "serde", "serde_json", "serde_path_to_error", "thiserror", "tokio", + "toml", + "tower 0.4.13", + "trace_decoder", "tracing", "tracing-subscriber", + "url", "vergen", "zk_evm_common", ] diff --git a/Cargo.toml b/Cargo.toml index 9628a345c..5bbffbb10 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -8,13 +8,7 @@ members = [ "proof_gen", "smt_trie", "trace_decoder", - "zero_bin/common", - "zero_bin/leader", - "zero_bin/ops", - "zero_bin/prover", - "zero_bin/rpc", - "zero_bin/verifier", - "zero_bin/worker", + "zero", ] resolver = "2" @@ -118,12 +112,7 @@ smt_trie = { path = "smt_trie", version = "0.1.1" } trace_decoder = { path = "trace_decoder", version = "0.6.0", default-features = false } zk_evm_common = { path = "common", version = "0.1.0" } zk_evm_proc_macro = { path = "proc_macro", version = "0.1.0" } - -# zero-bin related dependencies -ops = { path = "zero_bin/ops", default-features = false } -prover = { path = "zero_bin/prover", default-features = false } -rpc = { path = "zero_bin/rpc", default-features = false } -zero_bin_common = { path = "zero_bin/common", default-features = false } +zero = { path = "zero", default-features = false } # plonky2-related dependencies plonky2 = { git = "https://github.com/0xPolygonZero/plonky2.git", rev = "dc77c77f2b06500e16ad4d7f1c2b057903602eed" } diff --git a/zero_bin/tools/artifacts/witness_b19807080.json b/artifacts/witness_b19807080.json similarity index 100% rename from zero_bin/tools/artifacts/witness_b19807080.json rename to artifacts/witness_b19807080.json diff --git a/zero_bin/tools/artifacts/witness_b3_b6.json b/artifacts/witness_b3_b6.json similarity index 100% rename from zero_bin/tools/artifacts/witness_b3_b6.json rename to artifacts/witness_b3_b6.json diff --git a/proof_gen/src/lib.rs b/proof_gen/src/lib.rs index f1e48a109..bdfa7943e 100644 --- a/proof_gen/src/lib.rs +++ b/proof_gen/src/lib.rs @@ -145,5 +145,4 @@ pub mod verifier_state; // Re-exports -pub use prover_state::ProverState; pub use verifier_state::VerifierState; diff --git a/zero_bin/tools/prove_rpc.sh b/scripts/prove_rpc.sh similarity index 86% rename from zero_bin/tools/prove_rpc.sh rename to scripts/prove_rpc.sh index 1c7420491..736b9a943 100755 --- a/zero_bin/tools/prove_rpc.sh +++ b/scripts/prove_rpc.sh @@ -33,10 +33,9 @@ if ! [[ $8 == "test_only" ]]; then export MEMORY_AFTER_CIRCUIT_SIZE="7..23" fi -# Force the working directory to always be the `tools/` directory. -TOOLS_DIR=$(dirname $(realpath "$0")) +REPO_ROOT=$(git rev-parse --show-toplevel) -PROOF_OUTPUT_DIR="${TOOLS_DIR}/proofs" +PROOF_OUTPUT_DIR="${REPO_ROOT}/proofs" OUT_LOG_PATH="${PROOF_OUTPUT_DIR}/b$1_$2.log" ALWAYS_WRITE_LOGS=0 # Change this to `1` if you always want logs to be written. TOT_BLOCKS=$(($2-$1+1)) @@ -110,7 +109,7 @@ fi if [[ $8 == "test_only" ]]; then # test only run echo "Proving blocks ${BLOCK_INTERVAL} in a test_only mode now... (Total: ${TOT_BLOCKS})" - command='cargo r --release --bin leader -- --test-only --runtime in-memory --load-strategy on-demand --proof-output-dir $PROOF_OUTPUT_DIR --block-batch-size $BLOCK_BATCH_SIZE rpc --rpc-type "$NODE_RPC_TYPE" --rpc-url "$NODE_RPC_URL" --block-interval $BLOCK_INTERVAL $PREV_PROOF_EXTRA_ARG --backoff "$BACKOFF" --max-retries "$RETRIES" ' + command='cargo r --release --package zero --bin leader -- --test-only --runtime in-memory --load-strategy on-demand --proof-output-dir $PROOF_OUTPUT_DIR --block-batch-size $BLOCK_BATCH_SIZE rpc --rpc-type "$NODE_RPC_TYPE" --rpc-url "$NODE_RPC_URL" --block-interval $BLOCK_INTERVAL $PREV_PROOF_EXTRA_ARG --backoff "$BACKOFF" --max-retries "$RETRIES" ' if [ "$OUTPUT_TO_TERMINAL" = true ]; then eval $command retVal=$? @@ -133,7 +132,7 @@ if [[ $8 == "test_only" ]]; then else # normal run echo "Proving blocks ${BLOCK_INTERVAL} now... (Total: ${TOT_BLOCKS})" - command='cargo r --release --bin leader -- --runtime in-memory --load-strategy on-demand --proof-output-dir $PROOF_OUTPUT_DIR --block-batch-size $BLOCK_BATCH_SIZE rpc --rpc-type "$NODE_RPC_TYPE" --rpc-url "$3" --block-interval $BLOCK_INTERVAL $PREV_PROOF_EXTRA_ARG --backoff "$BACKOFF" --max-retries "$RETRIES" ' + command='cargo r --release --package zero --bin leader -- --runtime in-memory --load-strategy on-demand --proof-output-dir $PROOF_OUTPUT_DIR --block-batch-size $BLOCK_BATCH_SIZE rpc --rpc-type "$NODE_RPC_TYPE" --rpc-url "$3" --block-interval $BLOCK_INTERVAL $PREV_PROOF_EXTRA_ARG --backoff "$BACKOFF" --max-retries "$RETRIES" ' if [ "$OUTPUT_TO_TERMINAL" = true ]; then eval $command echo -e "Proof generation finished with result: $?" @@ -161,7 +160,7 @@ if [ "$RUN_VERIFICATION" = true ]; then proof_file_name=$PROOF_OUTPUT_DIR/b$END_BLOCK.zkproof echo "Verifying the proof of the latest block in the interval:" $proof_file_name - cargo r --release --bin verifier -- -f $proof_file_name > $PROOF_OUTPUT_DIR/verify.out 2>&1 + cargo r --release --package zero --bin verifier -- -f $proof_file_name > $PROOF_OUTPUT_DIR/verify.out 2>&1 if grep -q 'All proofs verified successfully!' $PROOF_OUTPUT_DIR/verify.out; then echo "$proof_file_name verified successfully!"; diff --git a/zero_bin/tools/prove_stdio.sh b/scripts/prove_stdio.sh similarity index 89% rename from zero_bin/tools/prove_stdio.sh rename to scripts/prove_stdio.sh index 815a7048d..f54969930 100755 --- a/zero_bin/tools/prove_stdio.sh +++ b/scripts/prove_stdio.sh @@ -18,15 +18,15 @@ else fi # Force the working directory to always be the `tools/` directory. -TOOLS_DIR=$(dirname $(realpath "$0")) -PROOF_OUTPUT_DIR="${TOOLS_DIR}/proofs" +REPO_ROOT=$(git rev-parse --show-toplevel) +PROOF_OUTPUT_DIR="${REPO_ROOT}/proofs" BLOCK_BATCH_SIZE="${BLOCK_BATCH_SIZE:-8}" echo "Block batch size: $BLOCK_BATCH_SIZE" -OUTPUT_LOG="${TOOLS_DIR}/output.log" +OUTPUT_LOG="${REPO_ROOT}/output.log" PROOFS_FILE_LIST="${PROOF_OUTPUT_DIR}/proof_files.json" -TEST_OUT_PATH="${TOOLS_DIR}/test.out" +TEST_OUT_PATH="${REPO_ROOT}/test.out" # Configured Rayon and Tokio with rough defaults export RAYON_NUM_THREADS=$num_procs @@ -95,7 +95,7 @@ fi # proof. This is useful for quickly testing decoding and all of the # other non-proving code. if [[ $TEST_ONLY == "test_only" ]]; then - cargo run --quiet --release --bin leader -- --test-only --runtime in-memory --load-strategy on-demand --block-batch-size $BLOCK_BATCH_SIZE --proof-output-dir $PROOF_OUTPUT_DIR stdio < $INPUT_FILE &> $TEST_OUT_PATH + cargo run --quiet --release --package zero --bin leader -- --test-only --runtime in-memory --load-strategy on-demand --block-batch-size $BLOCK_BATCH_SIZE --proof-output-dir $PROOF_OUTPUT_DIR stdio < $INPUT_FILE &> $TEST_OUT_PATH if grep -q 'All proof witnesses have been generated successfully.' $TEST_OUT_PATH; then echo -e "\n\nSuccess - Note this was just a test, not a proof" rm $TEST_OUT_PATH @@ -110,7 +110,7 @@ cargo build --release --jobs "$num_procs" start_time=$(date +%s%N) -"${TOOLS_DIR}/../../target/release/leader" --runtime in-memory --load-strategy on-demand --block-batch-size $BLOCK_BATCH_SIZE \ +"${REPO_ROOT}/target/release/leader" --runtime in-memory --load-strategy on-demand --block-batch-size $BLOCK_BATCH_SIZE \ --proof-output-dir $PROOF_OUTPUT_DIR stdio < $INPUT_FILE &> $OUTPUT_LOG end_time=$(date +%s%N) @@ -125,7 +125,7 @@ cat $PROOFS_FILE_LIST | while read proof_file; do echo "Verifying proof file $proof_file" verify_file=$PROOF_OUTPUT_DIR/verify_$(basename $proof_file).out - "${TOOLS_DIR}/../../target/release/verifier" -f $proof_file | tee $verify_file + "${REPO_ROOT}/target/release/verifier" -f $proof_file | tee $verify_file if grep -q 'All proofs verified successfully!' $verify_file; then echo "Proof verification for file $proof_file successful"; rm $verify_file # we keep the generated proof for potential reuse diff --git a/trace_decoder/Cargo.toml b/trace_decoder/Cargo.toml index 262185c7a..7b9aa83e2 100644 --- a/trace_decoder/Cargo.toml +++ b/trace_decoder/Cargo.toml @@ -54,26 +54,16 @@ glob = "0.3.1" libtest-mimic = "0.7.3" plonky2_maybe_rayon = { workspace = true } pretty_assertions = "1.4.0" +zero = { workspace = true, features = ["eth_mainnet"] } pretty_env_logger = { workspace = true } -prover = { workspace = true } serde_json = { workspace = true } serde_path_to_error = { workspace = true } - [features] default = ["eth_mainnet"] -eth_mainnet = [ - "evm_arithmetization/eth_mainnet", - "prover/eth_mainnet", -] -cdk_erigon = [ - "evm_arithmetization/cdk_erigon", - "prover/cdk_erigon", -] -polygon_pos = [ - "evm_arithmetization/polygon_pos", - "prover/polygon_pos", -] +eth_mainnet = ["evm_arithmetization/eth_mainnet"] +cdk_erigon = ["evm_arithmetization/cdk_erigon"] +polygon_pos = ["evm_arithmetization/polygon_pos"] [[bench]] name = "block_processing" diff --git a/trace_decoder/src/lib.rs b/trace_decoder/src/lib.rs index fba82d8f7..8c54eea8e 100644 --- a/trace_decoder/src/lib.rs +++ b/trace_decoder/src/lib.rs @@ -41,8 +41,6 @@ #![warn(missing_debug_implementations)] #![warn(missing_docs)] -zk_evm_common::check_chain_features!(); - /// Over RPC, ethereum nodes expose their tries as a series of binary /// [`wire::Instruction`]s in a node-dependant format. /// diff --git a/trace_decoder/tests/common/mod.rs b/trace_decoder/tests/common/mod.rs index 51c74f75c..0183c785a 100644 --- a/trace_decoder/tests/common/mod.rs +++ b/trace_decoder/tests/common/mod.rs @@ -3,9 +3,9 @@ use std::{fs::File, path::Path}; use alloy::rpc::types::Header; use anyhow::{ensure, Context as _}; use camino::Utf8Path; -use prover::BlockProverInput; use serde::de::DeserializeOwned; use trace_decoder::{BlockTrace, OtherBlockData}; +use zero::prover::BlockProverInput; pub fn cases() -> anyhow::Result> { print!("loading test vectors..."); diff --git a/zero_bin/common/Cargo.toml b/zero/Cargo.toml similarity index 52% rename from zero_bin/common/Cargo.toml rename to zero/Cargo.toml index 1b69be922..109cb1f28 100644 --- a/zero_bin/common/Cargo.toml +++ b/zero/Cargo.toml @@ -1,6 +1,6 @@ [package] -name = "zero_bin_common" -authors = ["Polygon Zero "] +name = "zero" +authors = ["Polygon Zero"] version = "0.1.0" edition.workspace = true license.workspace = true @@ -9,32 +9,51 @@ keywords.workspace = true categories.workspace = true [dependencies] +__compat_primitive_types = { workspace = true } alloy = { workspace = true } +alloy-compat = "0.1.0" anyhow = { workspace = true } async-stream = { workspace = true } +axum = { workspace = true } cargo_metadata = { workspace = true } -clap = { workspace = true } +clap = { workspace = true, features = ["derive", "string"] } +directories = "5.0.1" dotenvy = { workspace = true } futures = { workspace = true } +hex = { workspace = true } +itertools = { workspace = true } +keccak-hash = { workspace = true } lru = { workspace = true } +num-traits = { workspace = true } once_cell = { workspace = true } +paladin-core = { workspace = true } plonky2 = { workspace = true } +plonky2_maybe_rayon = { workspace = true } +ruint = { workspace = true, features = ["num-traits", "primitive-types"] } serde = { workspace = true } serde_json = { workspace = true } serde_path_to_error = { workspace = true } thiserror = { workspace = true } tokio = { workspace = true } +toml = { workspace = true } +tower = { workspace = true, features = ["retry"] } tracing = { workspace = true } tracing-subscriber = { workspace = true } +url = { workspace = true } vergen = { workspace = true } -directories = "5.0.1" - # Local dependencies +compat = { workspace = true } evm_arithmetization = { workspace = true } +mpt_trie = { workspace = true } proof_gen = { workspace = true } +trace_decoder = { workspace = true } zk_evm_common = { workspace = true } +[target.'cfg(not(target_env = "msvc"))'.dependencies] +jemallocator = "0.5.4" + + [build-dependencies] anyhow = { workspace = true } cargo_metadata = { workspace = true } @@ -43,18 +62,9 @@ vergen = { workspace = true } [features] default = ["eth_mainnet"] -eth_mainnet = [ - "evm_arithmetization/eth_mainnet", - "proof_gen/eth_mainnet", -] -cdk_erigon = [ - "evm_arithmetization/cdk_erigon", - "proof_gen/cdk_erigon", -] -polygon_pos = [ - "evm_arithmetization/polygon_pos", - "proof_gen/polygon_pos", -] +eth_mainnet = ["evm_arithmetization/eth_mainnet", "proof_gen/eth_mainnet"] +cdk_erigon = ["evm_arithmetization/cdk_erigon", "proof_gen/cdk_erigon"] +polygon_pos = ["evm_arithmetization/polygon_pos", "proof_gen/polygon_pos"] [lints] workspace = true diff --git a/zero_bin/README.md b/zero/README.md similarity index 98% rename from zero_bin/README.md rename to zero/README.md index b6221a724..dda10a95b 100644 --- a/zero_bin/README.md +++ b/zero/README.md @@ -339,7 +339,7 @@ docker run --name rabbitmq -p 5672:5672 -p 15672:15672 rabbitmq:3-management Start worker process(es). The default paladin runtime is AMQP, so no additional flags are required to enable it. ```bash -RUST_LOG=debug cargo r --release --bin worker +RUST_LOG=debug cargo r --release --package zk_evm_common --bin worker ``` ##### Start leader @@ -466,7 +466,7 @@ When needed (e.g. some block with corner-case discovered), additional input witn 1. Run the `rpc` tool to fetch the block (or multiple blocks) witness: ```sh -cargo run --bin rpc fetch --rpc-url --start-block --end-block > ./b_.json +cargo run --package zero --bin rpc fetch --rpc-url --start-block --end-block > ./b_.json ``` 2. Download the header file for the block (or range of blocks), making the json array of headers: diff --git a/zero_bin/common/build.rs b/zero/build.rs similarity index 100% rename from zero_bin/common/build.rs rename to zero/build.rs diff --git a/zero_bin/leader/src/main.rs b/zero/src/bin/leader.rs similarity index 89% rename from zero_bin/leader/src/main.rs rename to zero/src/bin/leader.rs index 61ffb98ce..e01465c29 100644 --- a/zero_bin/leader/src/main.rs +++ b/zero/src/bin/leader.rs @@ -7,29 +7,30 @@ use anyhow::Result; use clap::Parser; use cli::Command; use client::RpcParams; -use ops::register; use paladin::runtime::Runtime; -use prover::ProverConfig; use tracing::info; -use zero_bin_common::env::load_dotenvy_vars_if_present; -use zero_bin_common::fs::get_previous_proof; -use zero_bin_common::{ +use zero::env::load_dotenvy_vars_if_present; +use zero::prover::ProverConfig; +use zero::{ block_interval::BlockInterval, prover_state::persistence::set_circuit_cache_dir_env_if_not_set, }; -use zero_bin_common::{prover_state::persistence::CIRCUIT_VERSION, version}; +use zero::{fs::get_previous_proof, ops::register}; +use zero::{prover_state::persistence::CIRCUIT_VERSION, version}; +use self::leader::*; use crate::client::{client_main, LeaderConfig}; - -mod cli; -mod client; -mod http; -mod stdio; +mod leader { + pub mod cli; + pub mod client; + pub mod http; + pub mod stdio; +} #[tokio::main] async fn main() -> Result<()> { load_dotenvy_vars_if_present(); set_circuit_cache_dir_env_if_not_set()?; - zero_bin_common::tracing::init(); + zero::tracing::init(); let args: Vec = env::args().collect(); @@ -45,7 +46,7 @@ async fn main() -> Result<()> { let args = cli::Cli::parse(); if let Command::Clean = args.command { - return zero_bin_common::prover_state::persistence::delete_all(); + return zero::prover_state::persistence::delete_all(); } let runtime = Arc::new(Runtime::from_config(&args.paladin, register()).await?); diff --git a/zero_bin/leader/src/cli.rs b/zero/src/bin/leader/cli.rs similarity index 95% rename from zero_bin/leader/src/cli.rs rename to zero/src/bin/leader/cli.rs index 9cc2de300..ee45c946f 100644 --- a/zero_bin/leader/src/cli.rs +++ b/zero/src/bin/leader/cli.rs @@ -2,9 +2,9 @@ use std::path::PathBuf; use alloy::transports::http::reqwest::Url; use clap::{Parser, Subcommand, ValueHint}; -use prover::cli::CliProverConfig; -use rpc::RpcType; -use zero_bin_common::prover_state::cli::CliProverStateConfig; +use zero::prover::cli::CliProverConfig; +use zero::prover_state::cli::CliProverStateConfig; +use zero::rpc::RpcType; /// zero-bin leader config #[derive(Parser)] diff --git a/zero_bin/leader/src/client.rs b/zero/src/bin/leader/client.rs similarity index 88% rename from zero_bin/leader/src/client.rs rename to zero/src/bin/leader/client.rs index a0f38727d..6825b6683 100644 --- a/zero_bin/leader/src/client.rs +++ b/zero/src/bin/leader/client.rs @@ -5,12 +5,13 @@ use alloy::transports::http::reqwest::Url; use anyhow::{anyhow, Result}; use paladin::runtime::Runtime; use proof_gen::proof_types::GeneratedBlockProof; -use prover::{BlockProverInput, ProverConfig}; -use rpc::{retry::build_http_retry_provider, RpcType}; use tokio::sync::mpsc; use tracing::info; -use zero_bin_common::block_interval::{BlockInterval, BlockIntervalStream}; -use zero_bin_common::pre_checks::check_previous_proof_and_checkpoint; +use zero::block_interval::{BlockInterval, BlockIntervalStream}; +use zero::pre_checks::check_previous_proof_and_checkpoint; +use zero::prover::{self, BlockProverInput, ProverConfig}; +use zero::rpc; +use zero::rpc::{retry::build_http_retry_provider, RpcType}; #[derive(Debug)] pub struct RpcParams { @@ -37,7 +38,7 @@ pub(crate) async fn client_main( ) -> Result<()> { use futures::StreamExt; - let cached_provider = Arc::new(zero_bin_common::provider::CachedProvider::new( + let cached_provider = Arc::new(zero::provider::CachedProvider::new( build_http_retry_provider( rpc_params.rpc_url.clone(), rpc_params.backoff, @@ -53,8 +54,7 @@ pub(crate) async fn client_main( // Create a channel for block prover input and use it to send prover input to // the proving task. The second element of the tuple is a flag indicating // whether the block is the last one in the interval. - let (block_tx, block_rx) = - mpsc::channel::<(BlockProverInput, bool)>(zero_bin_common::BLOCK_CHANNEL_SIZE); + let (block_tx, block_rx) = mpsc::channel::<(BlockProverInput, bool)>(zero::BLOCK_CHANNEL_SIZE); let test_only = leader_config.prover_config.test_only; // Run proving task diff --git a/zero_bin/leader/src/http.rs b/zero/src/bin/leader/http.rs similarity index 98% rename from zero_bin/leader/src/http.rs rename to zero/src/bin/leader/http.rs index 14fa965a4..3ebcbdaec 100644 --- a/zero_bin/leader/src/http.rs +++ b/zero/src/bin/leader/http.rs @@ -5,10 +5,10 @@ use anyhow::{bail, Result}; use axum::{http::StatusCode, routing::post, Json, Router}; use paladin::runtime::Runtime; use proof_gen::proof_types::GeneratedBlockProof; -use prover::{BlockProverInput, ProverConfig}; use serde::{Deserialize, Serialize}; use serde_json::to_writer; use tracing::{debug, error, info}; +use zero::prover::{BlockProverInput, ProverConfig}; /// The main function for the HTTP mode. pub(crate) async fn http_main( diff --git a/zero_bin/leader/src/stdio.rs b/zero/src/bin/leader/stdio.rs similarity index 91% rename from zero_bin/leader/src/stdio.rs rename to zero/src/bin/leader/stdio.rs index 71db3c6ae..2bef3ceff 100644 --- a/zero_bin/leader/src/stdio.rs +++ b/zero/src/bin/leader/stdio.rs @@ -4,9 +4,9 @@ use std::sync::Arc; use anyhow::{anyhow, Result}; use paladin::runtime::Runtime; use proof_gen::proof_types::GeneratedBlockProof; -use prover::{BlockProverInput, ProverConfig}; use tokio::sync::mpsc; use tracing::info; +use zero::prover::{self, BlockProverInput, ProverConfig}; /// The main function for the stdio mode. pub(crate) async fn stdio_main( @@ -22,8 +22,7 @@ pub(crate) async fn stdio_main( .into_iter() .collect::>(); - let (block_tx, block_rx) = - mpsc::channel::<(BlockProverInput, bool)>(zero_bin_common::BLOCK_CHANNEL_SIZE); + let (block_tx, block_rx) = mpsc::channel::<(BlockProverInput, bool)>(zero::BLOCK_CHANNEL_SIZE); let runtime_ = runtime.clone(); let prover_config_ = prover_config.clone(); diff --git a/zero_bin/rpc/src/main.rs b/zero/src/bin/rpc.rs similarity index 96% rename from zero_bin/rpc/src/main.rs rename to zero/src/bin/rpc.rs index 308ea8cd0..5f9d9c276 100644 --- a/zero_bin/rpc/src/main.rs +++ b/zero/src/bin/rpc.rs @@ -9,14 +9,15 @@ use alloy::transports::Transport; use anyhow::anyhow; use clap::{Args, Parser, Subcommand, ValueHint}; use futures::StreamExt; -use prover::BlockProverInput; -use rpc::{retry::build_http_retry_provider, RpcType}; use tracing_subscriber::{prelude::*, EnvFilter}; use url::Url; -use zero_bin_common::block_interval::BlockIntervalStream; -use zero_bin_common::provider::CachedProvider; -use zero_bin_common::version; -use zero_bin_common::{block_interval::BlockInterval, prover_state::persistence::CIRCUIT_VERSION}; +use zero::block_interval::BlockIntervalStream; +use zero::prover::BlockProverInput; +use zero::provider::CachedProvider; +use zero::{block_interval::BlockInterval, prover_state::persistence::CIRCUIT_VERSION}; +use zero::{rpc, version}; + +use self::rpc::{retry::build_http_retry_provider, RpcType}; #[derive(Clone, Debug, Copy)] struct FetchParams { diff --git a/zero_bin/verifier/src/main.rs b/zero/src/bin/verifier.rs similarity index 94% rename from zero_bin/verifier/src/main.rs rename to zero/src/bin/verifier.rs index aa97d6087..a7d80197a 100644 --- a/zero_bin/verifier/src/main.rs +++ b/zero/src/bin/verifier.rs @@ -9,13 +9,16 @@ use dotenvy::dotenv; use proof_gen::proof_types::GeneratedBlockProof; use serde_json::Deserializer; use tracing::info; -use zero_bin_common::{ +use zero::{ prover_state::persistence::{set_circuit_cache_dir_env_if_not_set, CIRCUIT_VERSION}, version, }; -mod cli; -mod init; +use self::verifier::*; +mod verifier { + pub mod cli; + pub mod init; +} fn main() -> Result<()> { dotenv().ok(); diff --git a/zero_bin/verifier/src/cli.rs b/zero/src/bin/verifier/cli.rs similarity index 87% rename from zero_bin/verifier/src/cli.rs rename to zero/src/bin/verifier/cli.rs index 76306ec41..157674130 100644 --- a/zero_bin/verifier/src/cli.rs +++ b/zero/src/bin/verifier/cli.rs @@ -1,7 +1,7 @@ use std::path::PathBuf; use clap::{Parser, ValueHint}; -use zero_bin_common::prover_state::cli::CliProverStateConfig; +use zero::prover_state::cli::CliProverStateConfig; #[derive(Parser)] pub(crate) struct Cli { diff --git a/zero_bin/verifier/src/init.rs b/zero/src/bin/verifier/init.rs similarity index 100% rename from zero_bin/verifier/src/init.rs rename to zero/src/bin/verifier/init.rs diff --git a/zero_bin/worker/src/main.rs b/zero/src/bin/worker.rs similarity index 92% rename from zero_bin/worker/src/main.rs rename to zero/src/bin/worker.rs index 3ea4c7cae..1d941341f 100644 --- a/zero_bin/worker/src/main.rs +++ b/zero/src/bin/worker.rs @@ -3,13 +3,12 @@ use std::env; use anyhow::Result; use clap::Parser; use dotenvy::dotenv; -use ops::register; use paladin::runtime::WorkerRuntime; -use zero_bin_common::prover_state::{ +use zero::prover_state::{ cli::CliProverStateConfig, persistence::{set_circuit_cache_dir_env_if_not_set, CIRCUIT_VERSION}, }; -use zero_bin_common::{tracing, version}; +use zero::{ops::register, tracing, version}; // TODO: https://github.com/0xPolygonZero/zk_evm/issues/302 // this should probably be removed. diff --git a/zero_bin/common/src/block_interval.rs b/zero/src/block_interval.rs similarity index 99% rename from zero_bin/common/src/block_interval.rs rename to zero/src/block_interval.rs index a659976a3..e424076e0 100644 --- a/zero_bin/common/src/block_interval.rs +++ b/zero/src/block_interval.rs @@ -43,7 +43,7 @@ impl BlockInterval { /// /// ```rust /// # use alloy::rpc::types::eth::BlockId; - /// # use zero_bin_common::block_interval::BlockInterval; + /// # use zero::block_interval::BlockInterval; /// assert_eq!(BlockInterval::new("0..10").unwrap(), BlockInterval::Range(0..10)); /// assert_eq!(BlockInterval::new("0..=10").unwrap(), BlockInterval::Range(0..11)); /// assert_eq!(BlockInterval::new("32141").unwrap(), BlockInterval::SingleBlockId(BlockId::Number(32141.into()))); diff --git a/zero_bin/common/src/debug_utils.rs b/zero/src/debug_utils.rs similarity index 100% rename from zero_bin/common/src/debug_utils.rs rename to zero/src/debug_utils.rs diff --git a/zero_bin/common/src/env.rs b/zero/src/env.rs similarity index 100% rename from zero_bin/common/src/env.rs rename to zero/src/env.rs diff --git a/zero_bin/common/src/fs.rs b/zero/src/fs.rs similarity index 100% rename from zero_bin/common/src/fs.rs rename to zero/src/fs.rs diff --git a/zero_bin/common/src/lib.rs b/zero/src/lib.rs similarity index 92% rename from zero_bin/common/src/lib.rs rename to zero/src/lib.rs index 890558de6..3dc34d421 100644 --- a/zero_bin/common/src/lib.rs +++ b/zero/src/lib.rs @@ -4,10 +4,13 @@ pub mod block_interval; pub mod debug_utils; pub mod env; pub mod fs; +pub mod ops; pub mod parsing; pub mod pre_checks; +pub mod prover; pub mod prover_state; pub mod provider; +pub mod rpc; pub mod tracing; pub mod version; diff --git a/zero_bin/ops/src/lib.rs b/zero/src/ops.rs similarity index 98% rename from zero_bin/ops/src/lib.rs rename to zero/src/ops.rs index 3a6d3854c..a697d9416 100644 --- a/zero_bin/ops/src/lib.rs +++ b/zero/src/ops.rs @@ -19,7 +19,8 @@ use proof_gen::{ use serde::{Deserialize, Serialize}; use tracing::error; use tracing::{event, info_span, Level}; -use zero_bin_common::{debug_utils::save_inputs_to_disk, prover_state::p_state}; + +use crate::{debug_utils::save_inputs_to_disk, prover_state::p_state}; registry!(); @@ -40,7 +41,7 @@ impl Operation for SegmentProof { let segment_index = all_data.1.segment_index(); let _span = SegmentProofSpan::new(&input, all_data.1.segment_index()); let proof = if self.save_inputs_on_error { - zero_bin_common::prover_state::p_manager() + crate::prover_state::p_manager() .generate_segment_proof(all_data) .map_err(|err| { if let Err(write_err) = save_inputs_to_disk( @@ -59,7 +60,7 @@ impl Operation for SegmentProof { FatalError::from_anyhow(err, FatalStrategy::Terminate) })? } else { - zero_bin_common::prover_state::p_manager() + crate::prover_state::p_manager() .generate_segment_proof(all_data) .map_err(|err| FatalError::from_anyhow(err, FatalStrategy::Terminate))? }; diff --git a/zero_bin/common/src/parsing.rs b/zero/src/parsing.rs similarity index 100% rename from zero_bin/common/src/parsing.rs rename to zero/src/parsing.rs diff --git a/zero_bin/common/src/pre_checks.rs b/zero/src/pre_checks.rs similarity index 100% rename from zero_bin/common/src/pre_checks.rs rename to zero/src/pre_checks.rs diff --git a/zero_bin/prover/src/lib.rs b/zero/src/prover.rs similarity index 99% rename from zero_bin/prover/src/lib.rs rename to zero/src/prover.rs index 29bf42127..3916f98d2 100644 --- a/zero_bin/prover/src/lib.rs +++ b/zero/src/prover.rs @@ -18,7 +18,9 @@ use tokio::sync::mpsc::Receiver; use tokio::sync::{oneshot, Semaphore}; use trace_decoder::{BlockTrace, OtherBlockData}; use tracing::{error, info}; -use zero_bin_common::fs::generate_block_proof_file_name; + +use crate::fs::generate_block_proof_file_name; +use crate::ops; // All proving tasks are executed concurrently, which can cause issues for large // block intervals, where distant future blocks may be proven first. diff --git a/zero_bin/prover/src/cli.rs b/zero/src/prover/cli.rs similarity index 97% rename from zero_bin/prover/src/cli.rs rename to zero/src/prover/cli.rs index 694194ad9..e55141b7a 100644 --- a/zero_bin/prover/src/cli.rs +++ b/zero/src/prover/cli.rs @@ -45,7 +45,7 @@ pub struct CliProverConfig { block_batch_size: usize, } -impl From for crate::ProverConfig { +impl From for super::ProverConfig { fn from(cli: CliProverConfig) -> Self { Self { batch_size: cli.batch_size, diff --git a/zero_bin/common/src/prover_state/circuit.rs b/zero/src/prover_state/circuit.rs similarity index 100% rename from zero_bin/common/src/prover_state/circuit.rs rename to zero/src/prover_state/circuit.rs diff --git a/zero_bin/common/src/prover_state/cli.rs b/zero/src/prover_state/cli.rs similarity index 100% rename from zero_bin/common/src/prover_state/cli.rs rename to zero/src/prover_state/cli.rs diff --git a/zero_bin/common/src/prover_state/mod.rs b/zero/src/prover_state/mod.rs similarity index 100% rename from zero_bin/common/src/prover_state/mod.rs rename to zero/src/prover_state/mod.rs diff --git a/zero_bin/common/src/prover_state/persistence.rs b/zero/src/prover_state/persistence.rs similarity index 100% rename from zero_bin/common/src/prover_state/persistence.rs rename to zero/src/prover_state/persistence.rs diff --git a/zero_bin/common/src/provider.rs b/zero/src/provider.rs similarity index 100% rename from zero_bin/common/src/provider.rs rename to zero/src/provider.rs diff --git a/zero_bin/rpc/src/jerigon.rs b/zero/src/rpc/jerigon.rs similarity index 96% rename from zero_bin/rpc/src/jerigon.rs rename to zero/src/rpc/jerigon.rs index 00d56cf48..df00bc605 100644 --- a/zero_bin/rpc/src/jerigon.rs +++ b/zero/src/rpc/jerigon.rs @@ -1,12 +1,12 @@ use alloy::{providers::Provider, rpc::types::eth::BlockId, transports::Transport}; use anyhow::Context as _; -use prover::BlockProverInput; use serde::Deserialize; use serde_json::json; use trace_decoder::{BlockTrace, BlockTraceTriePreImages, CombinedPreImages, TxnInfo}; -use zero_bin_common::provider::CachedProvider; use super::fetch_other_block_data; +use crate::prover::BlockProverInput; +use crate::provider::CachedProvider; /// Transaction traces retrieved from Erigon zeroTracer. #[derive(Debug, Deserialize)] diff --git a/zero_bin/rpc/src/lib.rs b/zero/src/rpc/mod.rs similarity index 99% rename from zero_bin/rpc/src/lib.rs rename to zero/src/rpc/mod.rs index 87a581a14..cf8ec09cf 100644 --- a/zero_bin/rpc/src/lib.rs +++ b/zero/src/rpc/mod.rs @@ -15,16 +15,17 @@ use compat::Compat; use evm_arithmetization::proof::{consolidate_hashes, BlockHashes, BlockMetadata}; use futures::{StreamExt as _, TryStreamExt as _}; use proof_gen::types::{Field, Hasher}; -use prover::BlockProverInput; use serde_json::json; use trace_decoder::{BlockLevelData, OtherBlockData}; use tracing::warn; +use crate::prover::BlockProverInput; + pub mod jerigon; pub mod native; pub mod retry; -use zero_bin_common::provider::CachedProvider; +use crate::provider::CachedProvider; pub(crate) type PreviousBlockHashes = [FixedBytes<32>; 256]; diff --git a/zero_bin/rpc/src/native/mod.rs b/zero/src/rpc/native/mod.rs similarity index 90% rename from zero_bin/rpc/src/native/mod.rs rename to zero/src/rpc/native/mod.rs index 2e9527274..5b4ed5dd9 100644 --- a/zero_bin/rpc/src/native/mod.rs +++ b/zero/src/rpc/native/mod.rs @@ -8,9 +8,10 @@ use alloy::{ transports::Transport, }; use futures::try_join; -use prover::BlockProverInput; use trace_decoder::BlockTrace; -use zero_bin_common::provider::CachedProvider; + +use crate::prover::BlockProverInput; +use crate::provider::CachedProvider; mod state; mod txn; @@ -29,7 +30,7 @@ where { let (block_trace, other_data) = try_join!( process_block_trace(provider.clone(), block_number), - crate::fetch_other_block_data(provider.clone(), block_number, checkpoint_block_number) + crate::rpc::fetch_other_block_data(provider.clone(), block_number, checkpoint_block_number) )?; Ok(BlockProverInput { diff --git a/zero_bin/rpc/src/native/state.rs b/zero/src/rpc/native/state.rs similarity index 99% rename from zero_bin/rpc/src/native/state.rs rename to zero/src/rpc/native/state.rs index a61f73e01..579c3f5a6 100644 --- a/zero_bin/rpc/src/native/state.rs +++ b/zero/src/rpc/native/state.rs @@ -16,7 +16,8 @@ use trace_decoder::{ BlockTraceTriePreImages, SeparateStorageTriesPreImage, SeparateTriePreImage, SeparateTriePreImages, TxnInfo, }; -use zero_bin_common::provider::CachedProvider; + +use crate::provider::CachedProvider; /// Processes the state witness for the given block. pub async fn process_state_witness( diff --git a/zero_bin/rpc/src/native/txn.rs b/zero/src/rpc/native/txn.rs similarity index 99% rename from zero_bin/rpc/src/native/txn.rs rename to zero/src/rpc/native/txn.rs index 5e3be656a..79de06d79 100644 --- a/zero_bin/rpc/src/native/txn.rs +++ b/zero/src/rpc/native/txn.rs @@ -20,11 +20,11 @@ use alloy::{ transports::Transport, }; use anyhow::Context as _; +use compat::Compat; use futures::stream::{FuturesOrdered, TryStreamExt}; use trace_decoder::{ContractCodeUsage, TxnInfo, TxnMeta, TxnTrace}; use super::CodeDb; -use crate::Compat; /// Processes the transactions in the given block and updates the code db. pub(super) async fn process_transactions( diff --git a/zero_bin/rpc/src/retry.rs b/zero/src/rpc/retry.rs similarity index 100% rename from zero_bin/rpc/src/retry.rs rename to zero/src/rpc/retry.rs diff --git a/zero_bin/common/src/tracing.rs b/zero/src/tracing.rs similarity index 100% rename from zero_bin/common/src/tracing.rs rename to zero/src/tracing.rs diff --git a/zero_bin/common/src/version.rs b/zero/src/version.rs similarity index 100% rename from zero_bin/common/src/version.rs rename to zero/src/version.rs diff --git a/zero_bin/.gitignore b/zero_bin/.gitignore deleted file mode 100644 index ab214fd70..000000000 --- a/zero_bin/.gitignore +++ /dev/null @@ -1,15 +0,0 @@ -# Default extension for generated block proofs -*.zkproof - -# Folder containing all the locally generated circuit data -circuits/ - -# Folders containing logs from the utility scripts in tools/ -debug/ -proofs/ - -# Files generated from the tools script tests -test.out -proofs.json -verify.out -leader.out diff --git a/zero_bin/leader/Cargo.toml b/zero_bin/leader/Cargo.toml deleted file mode 100644 index 27d22a347..000000000 --- a/zero_bin/leader/Cargo.toml +++ /dev/null @@ -1,69 +0,0 @@ -[package] -name = "leader" -authors = ["Polygon Zero "] -version = "0.1.0" -edition.workspace = true -license.workspace = true -repository.workspace = true -keywords.workspace = true -categories.workspace = true -build = "../common/build.rs" - -[dependencies] -alloy = { workspace = true } -anyhow = { workspace = true } -axum = { workspace = true } -clap = { workspace = true } -futures = { workspace = true } -paladin-core = { workspace = true } -serde = { workspace = true } -serde_json = { workspace = true } -serde_path_to_error = { workspace = true } -tokio = { workspace = true } -toml = { workspace = true } -tracing = { workspace = true } -tracing-subscriber = { workspace = true } - -# Local dependencies -evm_arithmetization = { workspace = true } -ops = { workspace = true } -proof_gen = { workspace = true } -prover = { workspace = true } -rpc = { workspace = true } -zero_bin_common = { workspace = true } -zk_evm_common = { workspace = true } - -[features] -default = ["eth_mainnet"] -eth_mainnet = [ - "evm_arithmetization/eth_mainnet", - "ops/eth_mainnet", - "proof_gen/eth_mainnet", - "prover/eth_mainnet", - "rpc/eth_mainnet", - "zero_bin_common/eth_mainnet", -] -cdk_erigon = [ - "evm_arithmetization/cdk_erigon", - "ops/cdk_erigon", - "proof_gen/cdk_erigon", - "prover/cdk_erigon", - "rpc/cdk_erigon", - "zero_bin_common/cdk_erigon", -] -polygon_pos = [ - "evm_arithmetization/polygon_pos", - "ops/polygon_pos", - "proof_gen/polygon_pos", - "prover/polygon_pos", - "rpc/polygon_pos", - "zero_bin_common/polygon_pos", -] - -[build-dependencies] -cargo_metadata = { workspace = true } -vergen = { workspace = true } -anyhow = { workspace = true } - -[lints] -workspace = true diff --git a/zero_bin/ops/Cargo.toml b/zero_bin/ops/Cargo.toml deleted file mode 100644 index 4e49a3ae6..000000000 --- a/zero_bin/ops/Cargo.toml +++ /dev/null @@ -1,39 +0,0 @@ -[package] -name = "ops" -version = "0.1.0" -authors = ["Polygon Zero "] -edition.workspace = true -license.workspace = true -repository.workspace = true -keywords.workspace = true -categories.workspace = true - -[dependencies] -keccak-hash = { workspace = true } -paladin-core = { workspace = true } -serde = { workspace = true } -tracing = { workspace = true } - -# Local dependencies -evm_arithmetization = { workspace = true } -proof_gen = { workspace = true } -zero_bin_common = { workspace = true } -zk_evm_common = { workspace = true } - -[features] -default = ["eth_mainnet"] -eth_mainnet = [ - "evm_arithmetization/eth_mainnet", - "proof_gen/eth_mainnet", - "zero_bin_common/eth_mainnet", -] -cdk_erigon = [ - "evm_arithmetization/cdk_erigon", - "proof_gen/cdk_erigon", - "zero_bin_common/cdk_erigon", -] -polygon_pos = [ - "evm_arithmetization/polygon_pos", - "proof_gen/polygon_pos", - "zero_bin_common/polygon_pos", -] diff --git a/zero_bin/prover/Cargo.toml b/zero_bin/prover/Cargo.toml deleted file mode 100644 index a2ad4ccb4..000000000 --- a/zero_bin/prover/Cargo.toml +++ /dev/null @@ -1,60 +0,0 @@ -[package] -name = "prover" -authors = ["Polygon Zero "] -version = "0.1.0" -edition.workspace = true -license.workspace = true -repository.workspace = true -keywords.workspace = true -categories.workspace = true - -[dependencies] -alloy = { workspace = true } -anyhow = { workspace = true } -clap = { workspace = true, features = ["derive", "string"] } -futures = { workspace = true } -num-traits = { workspace = true } -ops = { workspace = true } -paladin-core = { workspace = true } -plonky2 = { workspace = true } -plonky2_maybe_rayon = { workspace = true } -ruint = { workspace = true, features = ["num-traits", "primitive-types"] } -serde = { workspace = true } -serde_json = { workspace = true } -tokio = { workspace = true } -tracing = { workspace = true } - -# Local dependencies -evm_arithmetization = { workspace = true } -proof_gen = { workspace = true } -trace_decoder = { workspace = true } -zero_bin_common = { workspace = true } -zk_evm_common = { workspace = true } - - -[features] -default = ["eth_mainnet"] -eth_mainnet = [ - "evm_arithmetization/eth_mainnet", - "ops/eth_mainnet", - "proof_gen/eth_mainnet", - "trace_decoder/eth_mainnet", - "zero_bin_common/eth_mainnet", -] -cdk_erigon = [ - "evm_arithmetization/cdk_erigon", - "ops/cdk_erigon", - "proof_gen/cdk_erigon", - "trace_decoder/cdk_erigon", - "zero_bin_common/cdk_erigon", -] -polygon_pos = [ - "evm_arithmetization/polygon_pos", - "ops/polygon_pos", - "proof_gen/polygon_pos", - "trace_decoder/polygon_pos", - "zero_bin_common/polygon_pos", -] - -[lints] -workspace = true diff --git a/zero_bin/rpc/Cargo.toml b/zero_bin/rpc/Cargo.toml deleted file mode 100644 index c694335d4..000000000 --- a/zero_bin/rpc/Cargo.toml +++ /dev/null @@ -1,65 +0,0 @@ -[package] -name = "rpc" -authors = ["Polygon Zero "] -version = "0.1.0" -edition.workspace = true -license.workspace = true -repository.workspace = true -keywords.workspace = true -categories.workspace = true -build = "../common/build.rs" - -[dependencies] -__compat_primitive_types = { workspace = true } -alloy = { workspace = true } -alloy-compat = "0.1.0" -anyhow = { workspace = true } -clap = { workspace = true } -futures = { workspace = true } -hex = { workspace = true } -itertools = { workspace = true } -serde = { workspace = true } -serde_json = { workspace = true } -tokio = { workspace = true } -tower = { workspace = true, features = ["retry"] } -tracing = { workspace = true } -tracing-subscriber = { workspace = true } -url = { workspace = true } - -# Local dependencies -compat = { workspace = true } -evm_arithmetization = { workspace = true } -mpt_trie = { workspace = true } -proof_gen = { workspace = true } -prover = { workspace = true } -trace_decoder = { workspace = true } -zero_bin_common = { workspace = true } -zk_evm_common = { workspace = true } - -[build-dependencies] -anyhow = { workspace = true } -cargo_metadata = { workspace = true } -vergen = { workspace = true } - -[features] -default = ["eth_mainnet"] -eth_mainnet = [ - "evm_arithmetization/eth_mainnet", - "proof_gen/eth_mainnet", - "prover/eth_mainnet", - "trace_decoder/eth_mainnet", - "zero_bin_common/eth_mainnet", -] -cdk_erigon = [ - "evm_arithmetization/cdk_erigon", - "proof_gen/cdk_erigon", - "prover/cdk_erigon", - "trace_decoder/cdk_erigon", - "zero_bin_common/cdk_erigon", -] -polygon_pos = [ - "evm_arithmetization/polygon_pos", - "prover/polygon_pos", - "trace_decoder/polygon_pos", - "zero_bin_common/polygon_pos", -] diff --git a/zero_bin/verifier/Cargo.toml b/zero_bin/verifier/Cargo.toml deleted file mode 100644 index ff0458197..000000000 --- a/zero_bin/verifier/Cargo.toml +++ /dev/null @@ -1,41 +0,0 @@ -[package] -name = "verifier" -authors = ["Polygon Zero "] -version = "0.1.0" -edition = "2021" -build = "../common/build.rs" - -[dependencies] -anyhow = { workspace = true } -clap = { workspace = true } -dotenvy = { workspace = true } -serde_json = { workspace = true } -serde_path_to_error = { workspace = true } -tracing = { workspace = true } -tracing-subscriber = { workspace = true } - -# Local dependencies -proof_gen = { workspace = true } -zero_bin_common = { workspace = true } -zk_evm_common = { workspace = true } - -[build-dependencies] -anyhow = { workspace = true } -cargo_metadata = { workspace = true } -vergen = { workspace = true } - - -[features] -default = ["eth_mainnet"] -eth_mainnet = [ - "proof_gen/eth_mainnet", - "zero_bin_common/eth_mainnet", -] -cdk_erigon = [ - "proof_gen/cdk_erigon", - "zero_bin_common/cdk_erigon", -] -polygon_pos = [ - "proof_gen/polygon_pos", - "zero_bin_common/polygon_pos", -] diff --git a/zero_bin/worker/Cargo.toml b/zero_bin/worker/Cargo.toml deleted file mode 100644 index ffb762627..000000000 --- a/zero_bin/worker/Cargo.toml +++ /dev/null @@ -1,45 +0,0 @@ -[package] -name = "worker" -authors = ["Polygon Zero "] -version = "0.1.0" -edition.workspace = true -license.workspace = true -repository.workspace = true -keywords.workspace = true -categories.workspace = true -build = "../common/build.rs" - -[dependencies] -paladin-core = { workspace = true } -anyhow = { workspace = true } -dotenvy = { workspace = true } -tracing-subscriber = { workspace = true } -clap = { workspace = true } -tokio = { workspace = true } - -# Local dependencies -ops = { workspace = true } -zero_bin_common = { workspace = true } - -[target.'cfg(not(target_env = "msvc"))'.dependencies] -jemallocator = "0.5.4" - -[build-dependencies] -cargo_metadata = { workspace = true } -vergen = { workspace = true } -anyhow = { workspace = true } - -[features] -default = ["eth_mainnet"] -eth_mainnet = [ - "ops/eth_mainnet", - "zero_bin_common/eth_mainnet", -] -cdk_erigon = [ - "ops/cdk_erigon", - "zero_bin_common/cdk_erigon", -] -polygon_pos = [ - "ops/polygon_pos", - "zero_bin_common/polygon_pos", -] From 8b7856ec9f089475770c7b159366cf2f6facc635 Mon Sep 17 00:00:00 2001 From: Robin Salen <30937548+Nashtare@users.noreply.github.com> Date: Mon, 16 Sep 2024 16:44:27 -0400 Subject: [PATCH 3/5] feat(type2): Skip jumpdest analysis (#631) * Remove JDA for type2 * Perform sanity checks * Clippy --- evm_arithmetization/src/cpu/jumps.rs | 141 ++++++++++-------- .../src/cpu/kernel/aggregator.rs | 5 +- .../src/cpu/kernel/asm/core/call.asm | 16 +- .../src/cpu/kernel/asm/core/exception.asm | 36 +++-- .../kernel/tests/core/jumpdest_analysis.rs | 39 +++++ .../src/cpu/kernel/tests/mod.rs | 38 +---- evm_arithmetization/src/witness/transition.rs | 106 ++++++++----- 7 files changed, 217 insertions(+), 164 deletions(-) diff --git a/evm_arithmetization/src/cpu/jumps.rs b/evm_arithmetization/src/cpu/jumps.rs index 2a8df148a..c100c4f91 100644 --- a/evm_arithmetization/src/cpu/jumps.rs +++ b/evm_arithmetization/src/cpu/jumps.rs @@ -74,7 +74,6 @@ pub(crate) fn eval_packed_jump_jumpi( let dst = lv.mem_channels[0].value; let cond = lv.mem_channels[1].value; let filter = lv.op.jumps; // `JUMP` or `JUMPI` - let jumpdest_flag_channel = lv.mem_channels[NUM_GP_CHANNELS - 1]; let is_jump = filter * (P::ONES - lv.opcode_bits[0]); let is_jumpi = filter * lv.opcode_bits[0]; @@ -121,26 +120,34 @@ pub(crate) fn eval_packed_jump_jumpi( // If we're jumping, then the high 7 limbs of the destination must be 0. let dst_hi_sum: P = dst[1..].iter().copied().sum(); yield_constr.constraint(filter * jumps_lv.should_jump * dst_hi_sum); - // Check that the destination address holds a `JUMPDEST` instruction. Note that - // this constraint does not need to be conditioned on `should_jump` because - // no read takes place if we're not jumping, so we're free to set the - // channel to 1. - yield_constr.constraint(filter * (jumpdest_flag_channel.value[0] - P::ONES)); - - // Make sure that the JUMPDEST flag channel is constrained. - // Only need to read if we're about to jump and we're not in kernel mode. - yield_constr.constraint( - filter - * (jumpdest_flag_channel.used - jumps_lv.should_jump * (P::ONES - lv.is_kernel_mode)), - ); - yield_constr.constraint(filter * (jumpdest_flag_channel.is_read - P::ONES)); - yield_constr.constraint(filter * (jumpdest_flag_channel.addr_context - lv.context)); - yield_constr.constraint( - filter - * (jumpdest_flag_channel.addr_segment - - P::Scalar::from_canonical_usize(Segment::JumpdestBits.unscale())), - ); - yield_constr.constraint(filter * (jumpdest_flag_channel.addr_virtual - dst[0])); + + // We skip jump destinations verification with `cdk_erigon`. + #[cfg(not(feature = "cdk_erigon"))] + { + let jumpdest_flag_channel = lv.mem_channels[NUM_GP_CHANNELS - 1]; + + // Check that the destination address holds a `JUMPDEST` instruction. Note that + // this constraint does not need to be conditioned on `should_jump` because + // no read takes place if we're not jumping, so we're free to set the + // channel to 1. + yield_constr.constraint(filter * (jumpdest_flag_channel.value[0] - P::ONES)); + + // Make sure that the JUMPDEST flag channel is constrained. + // Only need to read if we're about to jump and we're not in kernel mode. + yield_constr.constraint( + filter + * (jumpdest_flag_channel.used + - jumps_lv.should_jump * (P::ONES - lv.is_kernel_mode)), + ); + yield_constr.constraint(filter * (jumpdest_flag_channel.is_read - P::ONES)); + yield_constr.constraint(filter * (jumpdest_flag_channel.addr_context - lv.context)); + yield_constr.constraint( + filter + * (jumpdest_flag_channel.addr_segment + - P::Scalar::from_canonical_usize(Segment::JumpdestBits.unscale())), + ); + yield_constr.constraint(filter * (jumpdest_flag_channel.addr_virtual - dst[0])); + } // Disable unused memory channels for &channel in &lv.mem_channels[2..NUM_GP_CHANNELS - 1] { @@ -179,7 +186,6 @@ pub(crate) fn eval_ext_circuit_jump_jumpi, const D: let dst = lv.mem_channels[0].value; let cond = lv.mem_channels[1].value; let filter = lv.op.jumps; // `JUMP` or `JUMPI` - let jumpdest_flag_channel = lv.mem_channels[NUM_GP_CHANNELS - 1]; let one_extension = builder.one_extension(); let is_jump = builder.sub_extension(one_extension, lv.opcode_bits[0]); let is_jump = builder.mul_extension(filter, is_jump); @@ -281,50 +287,57 @@ pub(crate) fn eval_ext_circuit_jump_jumpi, const D: let constr = builder.mul_extension(filter, constr); yield_constr.constraint(builder, constr); } - // Check that the destination address holds a `JUMPDEST` instruction. Note that - // this constraint does not need to be conditioned on `should_jump` because - // no read takes place if we're not jumping, so we're free to set the - // channel to 1. - { - let constr = builder.mul_sub_extension(filter, jumpdest_flag_channel.value[0], filter); - yield_constr.constraint(builder, constr); - } - // Make sure that the JUMPDEST flag channel is constrained. - // Only need to read if we're about to jump and we're not in kernel mode. + // We skip jump destinations verification with `cdk_erigon`. + #[cfg(not(feature = "cdk_erigon"))] { - let constr = builder.mul_sub_extension( - jumps_lv.should_jump, - lv.is_kernel_mode, - jumps_lv.should_jump, - ); - let constr = builder.add_extension(jumpdest_flag_channel.used, constr); - let constr = builder.mul_extension(filter, constr); - yield_constr.constraint(builder, constr); - } - { - let constr = builder.mul_sub_extension(filter, jumpdest_flag_channel.is_read, filter); - yield_constr.constraint(builder, constr); - } - { - let constr = builder.sub_extension(jumpdest_flag_channel.addr_context, lv.context); - let constr = builder.mul_extension(filter, constr); - yield_constr.constraint(builder, constr); - } - { - let constr = builder.arithmetic_extension( - F::ONE, - -F::from_canonical_usize(Segment::JumpdestBits.unscale()), - filter, - jumpdest_flag_channel.addr_segment, - filter, - ); - yield_constr.constraint(builder, constr); - } - { - let constr = builder.sub_extension(jumpdest_flag_channel.addr_virtual, dst[0]); - let constr = builder.mul_extension(filter, constr); - yield_constr.constraint(builder, constr); + let jumpdest_flag_channel = lv.mem_channels[NUM_GP_CHANNELS - 1]; + + // Check that the destination address holds a `JUMPDEST` instruction. Note that + // this constraint does not need to be conditioned on `should_jump` because + // no read takes place if we're not jumping, so we're free to set the + // channel to 1. + { + let constr = builder.mul_sub_extension(filter, jumpdest_flag_channel.value[0], filter); + yield_constr.constraint(builder, constr); + } + + // Make sure that the JUMPDEST flag channel is constrained. + // Only need to read if we're about to jump and we're not in kernel mode. + { + let constr = builder.mul_sub_extension( + jumps_lv.should_jump, + lv.is_kernel_mode, + jumps_lv.should_jump, + ); + let constr = builder.add_extension(jumpdest_flag_channel.used, constr); + let constr = builder.mul_extension(filter, constr); + yield_constr.constraint(builder, constr); + } + { + let constr = builder.mul_sub_extension(filter, jumpdest_flag_channel.is_read, filter); + yield_constr.constraint(builder, constr); + } + { + let constr = builder.sub_extension(jumpdest_flag_channel.addr_context, lv.context); + let constr = builder.mul_extension(filter, constr); + yield_constr.constraint(builder, constr); + } + { + let constr = builder.arithmetic_extension( + F::ONE, + -F::from_canonical_usize(Segment::JumpdestBits.unscale()), + filter, + jumpdest_flag_channel.addr_segment, + filter, + ); + yield_constr.constraint(builder, constr); + } + { + let constr = builder.sub_extension(jumpdest_flag_channel.addr_virtual, dst[0]); + let constr = builder.mul_extension(filter, constr); + yield_constr.constraint(builder, constr); + } } // Disable unused memory channels diff --git a/evm_arithmetization/src/cpu/kernel/aggregator.rs b/evm_arithmetization/src/cpu/kernel/aggregator.rs index 74a5f17cb..d24e856fa 100644 --- a/evm_arithmetization/src/cpu/kernel/aggregator.rs +++ b/evm_arithmetization/src/cpu/kernel/aggregator.rs @@ -11,9 +11,7 @@ use crate::cpu::kernel::parser::parse; pub const NUMBER_KERNEL_FILES: usize = if cfg!(feature = "eth_mainnet") { 157 -} else if cfg!(feature = "cdk_erigon") { - 155 -} else if cfg!(feature = "polygon_pos") { +} else if cfg!(feature = "cdk_erigon") || cfg!(feature = "polygon_pos") { 154 } else { // unreachable @@ -43,6 +41,7 @@ pub static KERNEL_FILES: [&str; NUMBER_KERNEL_FILES] = [ include_str!("asm/core/create_receipt.asm"), include_str!("asm/core/gas.asm"), include_str!("asm/core/intrinsic_gas.asm"), + #[cfg(not(feature = "cdk_erigon"))] include_str!("asm/core/jumpdest_analysis.asm"), include_str!("asm/core/nonce.asm"), include_str!("asm/core/process_txn.asm"), diff --git a/evm_arithmetization/src/cpu/kernel/asm/core/call.asm b/evm_arithmetization/src/cpu/kernel/asm/core/call.asm index c6f91459e..ab8b67c4e 100644 --- a/evm_arithmetization/src/cpu/kernel/asm/core/call.asm +++ b/evm_arithmetization/src/cpu/kernel/asm/core/call.asm @@ -386,11 +386,17 @@ call_too_deep: SET_CONTEXT %checkpoint // Checkpoint %increment_call_depth - // Perform jumpdest analysis - %mload_context_metadata(@CTX_METADATA_CODE_SIZE) - GET_CONTEXT - // stack: ctx, code_size, retdest - %jumpdest_analysis + + // We skip jumpdest analysis with `cdk_erigon`. + #[cfg(not(feature = cdk_erigon))] + { + // Perform jumpdest analysis + %mload_context_metadata(@CTX_METADATA_CODE_SIZE) + GET_CONTEXT + // stack: ctx, code_size, retdest + %jumpdest_analysis + } + PUSH 0 // jump dest EXIT_KERNEL // (Old context) stack: new_ctx diff --git a/evm_arithmetization/src/cpu/kernel/asm/core/exception.asm b/evm_arithmetization/src/cpu/kernel/asm/core/exception.asm index 925921097..654dad8f5 100644 --- a/evm_arithmetization/src/cpu/kernel/asm/core/exception.asm +++ b/evm_arithmetization/src/cpu/kernel/asm/core/exception.asm @@ -124,22 +124,26 @@ global exc_invalid_jumpi_destination: global invalid_jump_jumpi_destination_common: - // We have a jump destination on the stack. We want to `PANIC` if it is valid, and jump to - // `fault_exception` if it is not. An address is a valid jump destination if it points to a - // `JUMPDEST` instruction. In practice, since in this implementation memory addresses are - // limited to 32 bits, we check two things: - // 1. the address is no more than 32 bits long, and - // 2. it points to a `JUMPDEST` instruction. - // stack: jump_dest - DUP1 - %shr_const(32) - %jumpi(fault_exception) // This keeps one copy of jump_dest on the stack, but that's fine. - // jump_dest is a valid address; check if it points to a `JUMP_DEST`. - DUP1 - %verify_non_jumpdest - %mload_current(@SEGMENT_JUMPDEST_BITS) - // stack: is_valid_jumpdest - %jumpi(panic) // Trap should never have been entered. + // We skip jump destinations verification with `cdk_erigon`. + #[cfg(not(feature = cdk_erigon))] + { + // We have a jump destination on the stack. We want to `PANIC` if it is valid, and jump to + // `fault_exception` if it is not. An address is a valid jump destination if it points to a + // `JUMPDEST` instruction. In practice, since in this implementation memory addresses are + // limited to 32 bits, we check two things: + // 1. the address is no more than 32 bits long, and + // 2. it points to a `JUMPDEST` instruction. + // stack: jump_dest + DUP1 + %shr_const(32) + %jumpi(fault_exception) // This keeps one copy of jump_dest on the stack, but that's fine. + // jump_dest is a valid address; check if it points to a `JUMP_DEST`. + DUP1 + %verify_non_jumpdest + %mload_current(@SEGMENT_JUMPDEST_BITS) + // stack: is_valid_jumpdest + %jumpi(panic) // Trap should never have been entered. + } %jump(fault_exception) diff --git a/evm_arithmetization/src/cpu/kernel/tests/core/jumpdest_analysis.rs b/evm_arithmetization/src/cpu/kernel/tests/core/jumpdest_analysis.rs index b0ef17033..f2d00ede5 100644 --- a/evm_arithmetization/src/cpu/kernel/tests/core/jumpdest_analysis.rs +++ b/evm_arithmetization/src/cpu/kernel/tests/core/jumpdest_analysis.rs @@ -1,14 +1,53 @@ +#![cfg(not(feature = "cdk_erigon"))] + use std::collections::{BTreeSet, HashMap}; use anyhow::Result; use ethereum_types::U256; use plonky2::field::goldilocks_field::GoldilocksField as F; +use plonky2::hash::hash_types::RichField; use crate::cpu::kernel::aggregator::KERNEL; use crate::cpu::kernel::interpreter::Interpreter; use crate::cpu::kernel::opcodes::{get_opcode, get_push_opcode}; +use crate::memory::segments::Segment; +use crate::witness::memory::MemoryAddress; use crate::witness::operation::CONTEXT_SCALING_FACTOR; +impl Interpreter { + pub(crate) fn set_jumpdest_analysis_inputs(&mut self, jumps: HashMap>) { + self.generation_state.set_jumpdest_analysis_inputs(jumps); + } + + pub(crate) fn get_jumpdest_bit(&self, offset: usize) -> U256 { + if self.generation_state.memory.contexts[self.context()].segments + [Segment::JumpdestBits.unscale()] + .content + .len() + > offset + { + // Even though we are in the interpreter, `JumpdestBits` is not part of the + // preinitialized segments, so we don't need to carry out the additional checks + // when get the value from memory. + self.generation_state.memory.get_with_init(MemoryAddress { + context: self.context(), + segment: Segment::JumpdestBits.unscale(), + virt: offset, + }) + } else { + 0.into() + } + } + + pub(crate) fn get_jumpdest_bits(&self, context: usize) -> Vec { + self.generation_state.memory.contexts[context].segments[Segment::JumpdestBits.unscale()] + .content + .iter() + .map(|x| x.unwrap_or_default().bit(0)) + .collect() + } +} + #[test] fn test_jumpdest_analysis() -> Result<()> { // By default the interpreter will skip jumpdest analysis asm and compute diff --git a/evm_arithmetization/src/cpu/kernel/tests/mod.rs b/evm_arithmetization/src/cpu/kernel/tests/mod.rs index 39810148b..2b983d099 100644 --- a/evm_arithmetization/src/cpu/kernel/tests/mod.rs +++ b/evm_arithmetization/src/cpu/kernel/tests/mod.rs @@ -26,11 +26,7 @@ mod signed_syscalls; mod transaction_parsing; mod transient_storage; -use std::{ - collections::{BTreeSet, HashMap}, - ops::Range, - str::FromStr, -}; +use std::{ops::Range, str::FromStr}; use anyhow::Result; use ethereum_types::U256; @@ -248,10 +244,6 @@ impl Interpreter { code.into_iter().map(|val| Some(U256::from(val))).collect(); } - pub(crate) fn set_jumpdest_analysis_inputs(&mut self, jumps: HashMap>) { - self.generation_state.set_jumpdest_analysis_inputs(jumps); - } - pub(crate) fn extract_kernel_memory(self, segment: Segment, range: Range) -> Vec { let mut output: Vec = Vec::with_capacity(range.end); for i in range { @@ -312,34 +304,6 @@ impl Interpreter { result } - pub(crate) fn get_jumpdest_bit(&self, offset: usize) -> U256 { - if self.generation_state.memory.contexts[self.context()].segments - [Segment::JumpdestBits.unscale()] - .content - .len() - > offset - { - // Even though we are in the interpreter, `JumpdestBits` is not part of the - // preinitialized segments, so we don't need to carry out the additional checks - // when get the value from memory. - self.generation_state.memory.get_with_init(MemoryAddress { - context: self.context(), - segment: Segment::JumpdestBits.unscale(), - virt: offset, - }) - } else { - 0.into() - } - } - - pub(crate) fn get_jumpdest_bits(&self, context: usize) -> Vec { - self.generation_state.memory.contexts[context].segments[Segment::JumpdestBits.unscale()] - .content - .iter() - .map(|x| x.unwrap_or_default().bit(0)) - .collect() - } - pub(crate) fn set_is_kernel(&mut self, is_kernel: bool) { self.generation_state.registers.is_kernel = is_kernel } diff --git a/evm_arithmetization/src/witness/transition.rs b/evm_arithmetization/src/witness/transition.rs index 6263977f4..fa66bee5b 100644 --- a/evm_arithmetization/src/witness/transition.rs +++ b/evm_arithmetization/src/witness/transition.rs @@ -1,12 +1,12 @@ -use ethereum_types::U256; use log::log_enabled; use plonky2::field::types::Field; use plonky2::hash::hash_types::RichField; -use super::util::{mem_read_gp_with_log_and_fill, stack_pop_with_log_and_fill}; +use super::util::stack_pop_with_log_and_fill; use crate::cpu::columns::CpuColumnsView; use crate::cpu::kernel::aggregator::KERNEL; use crate::cpu::kernel::constants::context_metadata::ContextMetadata; +use crate::cpu::kernel::opcodes::get_opcode; use crate::cpu::membus::NUM_GP_CHANNELS; use crate::cpu::stack::{ EQ_STACK_BEHAVIOR, IS_ZERO_STACK_BEHAVIOR, JUMPI_OP, JUMP_OP, MIGHT_OVERFLOW, STACK_BEHAVIORS, @@ -18,7 +18,7 @@ use crate::witness::gas::gas_to_charge; use crate::witness::memory::MemoryAddress; use crate::witness::operation::*; use crate::witness::state::RegistersState; -use crate::witness::util::mem_read_code_with_log_and_fill; +use crate::witness::util::{mem_read_code_with_log_and_fill, mem_read_gp_with_log_and_fill}; use crate::{arithmetic, logic}; pub(crate) const EXC_STOP_CODE: u8 = 6; @@ -382,30 +382,42 @@ where .map_err(|_| ProgramError::InvalidJumpDestination)?; if !self.generate_jumpdest_analysis(dst as usize) { - let gen_state = self.get_mut_generation_state(); - let (jumpdest_bit, jumpdest_bit_log) = mem_read_gp_with_log_and_fill( - NUM_GP_CHANNELS - 1, - MemoryAddress::new( - gen_state.registers.context, - Segment::JumpdestBits, - dst as usize, - ), - gen_state, - &mut row, - ); - row.mem_channels[1].value[0] = F::ONE; - if gen_state.registers.is_kernel { - // Don't actually do the read, just set the address, etc. - let channel = &mut row.mem_channels[NUM_GP_CHANNELS - 1]; - channel.used = F::ZERO; - channel.value[0] = F::ONE; - } else { - if jumpdest_bit != U256::one() { + let gen_state = self.get_mut_generation_state(); + + // We skip jump destinations verification with `cdk_erigon`. + if !cfg!(feature = "cdk_erigon") { + let (jumpdest_bit, jumpdest_bit_log) = mem_read_gp_with_log_and_fill( + NUM_GP_CHANNELS - 1, + MemoryAddress::new( + gen_state.registers.context, + Segment::JumpdestBits, + dst as usize, + ), + gen_state, + &mut row, + ); + + if gen_state.registers.is_kernel { + // Don't actually do the read, just set the address, etc. + let channel = &mut row.mem_channels[NUM_GP_CHANNELS - 1]; + channel.used = F::ZERO; + channel.value[0] = F::ONE; + } else { + if jumpdest_bit != ethereum_types::U256::one() { + return Err(ProgramError::InvalidJumpDestination); + } + self.push_memory(jumpdest_bit_log); + } + } else if !gen_state.registers.is_kernel { + // Perform a sanity check on the jumpdest, and abort if it is invalid. + let addr = + MemoryAddress::new(gen_state.registers.context, Segment::Code, dst as usize); + let jump_dst = gen_state.get_from_memory(addr); + if jump_dst != get_opcode("JUMPDEST").into() { return Err(ProgramError::InvalidJumpDestination); } - self.push_memory(jumpdest_bit_log); } // Extra fields required by the constraints. @@ -454,26 +466,42 @@ where } let gen_state = self.get_mut_generation_state(); - let (jumpdest_bit, jumpdest_bit_log) = mem_read_gp_with_log_and_fill( - NUM_GP_CHANNELS - 1, - MemoryAddress::new( + + // We skip jump destinations verification with `cdk_erigon`. + if !cfg!(feature = "cdk_erigon") { + let (jumpdest_bit, jumpdest_bit_log) = mem_read_gp_with_log_and_fill( + NUM_GP_CHANNELS - 1, + MemoryAddress::new( + gen_state.registers.context, + Segment::JumpdestBits, + dst.low_u32() as usize, + ), + gen_state, + &mut row, + ); + + if !should_jump || gen_state.registers.is_kernel { + // Don't actually do the read, just set the address, etc. + let channel = &mut row.mem_channels[NUM_GP_CHANNELS - 1]; + channel.used = F::ZERO; + channel.value[0] = F::ONE; + } else { + if jumpdest_bit != ethereum_types::U256::one() { + return Err(ProgramError::InvalidJumpiDestination); + } + self.push_memory(jumpdest_bit_log); + } + } else if should_jump && !gen_state.registers.is_kernel { + // Perform a sanity check on the jumpdest, and abort if it is invalid. + let addr = MemoryAddress::new( gen_state.registers.context, - Segment::JumpdestBits, + Segment::Code, dst.low_u32() as usize, - ), - gen_state, - &mut row, - ); - if !should_jump || gen_state.registers.is_kernel { - // Don't actually do the read, just set the address, etc. - let channel = &mut row.mem_channels[NUM_GP_CHANNELS - 1]; - channel.used = F::ZERO; - channel.value[0] = F::ONE; - } else { - if jumpdest_bit != U256::one() { + ); + let jump_dst = gen_state.get_from_memory(addr); + if jump_dst != get_opcode("JUMPDEST").into() { return Err(ProgramError::InvalidJumpiDestination); } - self.push_memory(jumpdest_bit_log); } let diff = row.stack_len - F::TWO; From eafbcc6c170e1a2938db65eb4bb9f6f40319cdcd Mon Sep 17 00:00:00 2001 From: 0xaatif <169152398+0xaatif@users.noreply.github.com> Date: Tue, 17 Sep 2024 13:51:59 +0100 Subject: [PATCH 4/5] refactor: --version in the CLI (#632) * mark: 0xaatif/version * refactor: --version * refactor: remove dead code * fix: propagate_version --- Cargo.lock | 66 +++++++++++++++++++++++++++- Cargo.toml | 3 -- zero/Cargo.toml | 3 +- zero/build.rs | 31 +++---------- zero/src/bin/leader.rs | 13 ------ zero/src/bin/leader/cli.rs | 1 + zero/src/bin/rpc.rs | 16 ++----- zero/src/bin/verifier.rs | 16 +------ zero/src/bin/verifier/cli.rs | 1 + zero/src/bin/worker.rs | 18 ++------ zero/src/lib.rs | 10 ++++- zero/src/prover_state/persistence.rs | 18 +++++--- zero/src/version.rs | 10 ----- 13 files changed, 100 insertions(+), 106 deletions(-) delete mode 100644 zero/src/version.rs diff --git a/Cargo.lock b/Cargo.lock index 16fda46f7..dcd3ec2b7 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1244,6 +1244,8 @@ version = "1.1.18" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b62ac837cdb5cb22e10a256099b4fc502b1dfe560cb282963a974d7abd80e476" dependencies = [ + "jobserver", + "libc", "shlex", ] @@ -2368,6 +2370,19 @@ version = "0.31.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "32085ea23f3234fc7846555e85283ba4de91e21016dc0455a16286d87a292d64" +[[package]] +name = "git2" +version = "0.19.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b903b73e45dc0c6c596f2d37eccece7c1c8bb6e4407b001096387c63d0d93724" +dependencies = [ + "bitflags 2.6.0", + "libc", + "libgit2-sys", + "log", + "url", +] + [[package]] name = "glob" version = "0.3.1" @@ -2770,6 +2785,15 @@ dependencies = [ "libc", ] +[[package]] +name = "jobserver" +version = "0.1.32" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "48d1dbcbbeb6a7fec7e059840aa538bd62aaccf972c7346c4d9d2059312853d0" +dependencies = [ + "libc", +] + [[package]] name = "js-sys" version = "0.3.70" @@ -2856,6 +2880,18 @@ version = "0.2.158" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d8adc4bb1803a324070e64a98ae98f38934d91957a99cfb3a43dcbc01bc56439" +[[package]] +name = "libgit2-sys" +version = "0.17.0+1.8.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "10472326a8a6477c3c20a64547b0059e4b0d086869eee31e6d7da728a8eb7224" +dependencies = [ + "cc", + "libc", + "libz-sys", + "pkg-config", +] + [[package]] name = "libm" version = "0.2.8" @@ -2884,6 +2920,18 @@ dependencies = [ "threadpool", ] +[[package]] +name = "libz-sys" +version = "1.1.20" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d2d16453e800a8cf6dd2fc3eb4bc99b786a9b90c663b8559a5b1a041bf89e472" +dependencies = [ + "cc", + "libc", + "pkg-config", + "vcpkg", +] + [[package]] name = "linkme" version = "0.3.28" @@ -5294,12 +5342,26 @@ checksum = "c32e7318e93a9ac53693b6caccfb05ff22e04a44c7cf8a279051f24c09da286f" dependencies = [ "anyhow", "derive_builder", - "rustc_version 0.4.1", "rustversion", "time", "vergen-lib", ] +[[package]] +name = "vergen-git2" +version = "1.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a62c52cd2b2b8b7ec75fc20111b3022ac3ff83e4fc14b9497cfcfd39c54f9c67" +dependencies = [ + "anyhow", + "derive_builder", + "git2", + "rustversion", + "time", + "vergen", + "vergen-lib", +] + [[package]] name = "vergen-lib" version = "0.1.3" @@ -5753,7 +5815,7 @@ dependencies = [ "tracing", "tracing-subscriber", "url", - "vergen", + "vergen-git2", "zk_evm_common", ] diff --git a/Cargo.toml b/Cargo.toml index 5bbffbb10..45f521447 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -51,7 +51,6 @@ criterion = "0.5.1" dotenvy = "0.15.7" either = "1.12.0" enum-as-inner = "0.6.0" -enumn = "0.1.13" env_logger = "0.11.3" eth_trie = "0.4.0" ethereum-types = "0.14.1" @@ -88,7 +87,6 @@ serde = "1.0.203" serde-big-array = "0.5.1" serde_json = "1.0.118" serde_path_to_error = "0.1.16" -serde_with = "3.8.1" sha2 = "0.10.8" static_assertions = "1.1.0" thiserror = "1.0.61" @@ -101,7 +99,6 @@ tracing-subscriber = { version = "0.3", features = ["env-filter"] } u4 = "0.1.0" uint = "0.9.5" url = "2.5.2" -vergen = { version = "9.0.0", features = ["build", "rustc"] } winnow = "0.6.13" # local dependencies diff --git a/zero/Cargo.toml b/zero/Cargo.toml index 109cb1f28..7086ecbd6 100644 --- a/zero/Cargo.toml +++ b/zero/Cargo.toml @@ -40,7 +40,6 @@ tower = { workspace = true, features = ["retry"] } tracing = { workspace = true } tracing-subscriber = { workspace = true } url = { workspace = true } -vergen = { workspace = true } # Local dependencies compat = { workspace = true } @@ -57,7 +56,7 @@ jemallocator = "0.5.4" [build-dependencies] anyhow = { workspace = true } cargo_metadata = { workspace = true } -vergen = { workspace = true } +vergen-git2 = { version = "1.0.0", features = ["build"] } [features] diff --git a/zero/build.rs b/zero/build.rs index c3e7b006d..53799224d 100644 --- a/zero/build.rs +++ b/zero/build.rs @@ -1,30 +1,9 @@ -use anyhow::Context as _; -use vergen::{BuildBuilder, Emitter, RustcBuilder}; +use vergen_git2::{BuildBuilder, Emitter, Git2Builder}; -fn main() -> anyhow::Result<()> { - let build_timestamp = BuildBuilder::default().build_timestamp(true).build()?; - let rust_commit_hash = RustcBuilder::default().commit_hash(true).build()?; - - Emitter::default() - .add_instructions(&build_timestamp)? - .add_instructions(&rust_commit_hash)? +fn main() -> Result<(), Box> { + Emitter::new() + .add_instructions(&BuildBuilder::default().build_timestamp(true).build()?)? + .add_instructions(&Git2Builder::default().describe(true, true, None).build()?)? .emit()?; - - let meta = cargo_metadata::MetadataCommand::new() - .exec() - .context("failed to probe cargo-metadata")?; - let version = &meta - .packages - .iter() - .find(|it| it.name == "evm_arithmetization") - .context("couldn't find evm_arithmetization package")? - .version; - println!( - "cargo::rustc-env=EVM_ARITHMETIZATION_PKG_VER={}.{}.x", - // patch version change should not prompt circuits regeneration - version.major, - version.minor - ); - Ok(()) } diff --git a/zero/src/bin/leader.rs b/zero/src/bin/leader.rs index e01465c29..abf19bde3 100644 --- a/zero/src/bin/leader.rs +++ b/zero/src/bin/leader.rs @@ -1,6 +1,5 @@ zk_evm_common::check_chain_features!(); -use std::env; use std::sync::Arc; use anyhow::Result; @@ -15,7 +14,6 @@ use zero::{ block_interval::BlockInterval, prover_state::persistence::set_circuit_cache_dir_env_if_not_set, }; use zero::{fs::get_previous_proof, ops::register}; -use zero::{prover_state::persistence::CIRCUIT_VERSION, version}; use self::leader::*; use crate::client::{client_main, LeaderConfig}; @@ -32,17 +30,6 @@ async fn main() -> Result<()> { set_circuit_cache_dir_env_if_not_set()?; zero::tracing::init(); - let args: Vec = env::args().collect(); - - if args.contains(&"--version".to_string()) { - version::print_version( - CIRCUIT_VERSION.as_str(), - env!("VERGEN_RUSTC_COMMIT_HASH"), - env!("VERGEN_BUILD_TIMESTAMP"), - ); - return Ok(()); - } - let args = cli::Cli::parse(); if let Command::Clean = args.command { diff --git a/zero/src/bin/leader/cli.rs b/zero/src/bin/leader/cli.rs index ee45c946f..3569efc41 100644 --- a/zero/src/bin/leader/cli.rs +++ b/zero/src/bin/leader/cli.rs @@ -8,6 +8,7 @@ use zero::rpc::RpcType; /// zero-bin leader config #[derive(Parser)] +#[command(version = zero::version(), propagate_version = true)] pub(crate) struct Cli { #[command(subcommand)] pub(crate) command: Command, diff --git a/zero/src/bin/rpc.rs b/zero/src/bin/rpc.rs index 5f9d9c276..9c6baa535 100644 --- a/zero/src/bin/rpc.rs +++ b/zero/src/bin/rpc.rs @@ -1,4 +1,3 @@ -use std::env; use std::sync::Arc; use alloy::primitives::B256; @@ -11,11 +10,11 @@ use clap::{Args, Parser, Subcommand, ValueHint}; use futures::StreamExt; use tracing_subscriber::{prelude::*, EnvFilter}; use url::Url; +use zero::block_interval::BlockInterval; use zero::block_interval::BlockIntervalStream; use zero::prover::BlockProverInput; use zero::provider::CachedProvider; -use zero::{block_interval::BlockInterval, prover_state::persistence::CIRCUIT_VERSION}; -use zero::{rpc, version}; +use zero::rpc; use self::rpc::{retry::build_http_retry_provider, RpcType}; @@ -68,6 +67,7 @@ enum Command { } #[derive(Parser)] +#[command(version = zero::version(), propagate_version = true)] struct Cli { #[clap(flatten)] pub(crate) config: RpcToolConfig, @@ -203,16 +203,6 @@ impl Cli { #[tokio::main] async fn main() -> anyhow::Result<()> { - let args: Vec = env::args().collect(); - if args.contains(&"--version".to_string()) { - version::print_version( - CIRCUIT_VERSION.as_str(), - env!("VERGEN_RUSTC_COMMIT_HASH"), - env!("VERGEN_BUILD_TIMESTAMP"), - ); - return Ok(()); - } - tracing_subscriber::Registry::default() .with( tracing_subscriber::fmt::layer() diff --git a/zero/src/bin/verifier.rs b/zero/src/bin/verifier.rs index a7d80197a..7c5a671be 100644 --- a/zero/src/bin/verifier.rs +++ b/zero/src/bin/verifier.rs @@ -1,6 +1,5 @@ zk_evm_common::check_chain_features!(); -use std::env; use std::fs::File; use anyhow::Result; @@ -9,10 +8,7 @@ use dotenvy::dotenv; use proof_gen::proof_types::GeneratedBlockProof; use serde_json::Deserializer; use tracing::info; -use zero::{ - prover_state::persistence::{set_circuit_cache_dir_env_if_not_set, CIRCUIT_VERSION}, - version, -}; +use zero::prover_state::persistence::set_circuit_cache_dir_env_if_not_set; use self::verifier::*; mod verifier { @@ -25,16 +21,6 @@ fn main() -> Result<()> { init::tracing(); set_circuit_cache_dir_env_if_not_set()?; - let args: Vec = env::args().collect(); - if args.contains(&"--version".to_string()) { - version::print_version( - CIRCUIT_VERSION.as_str(), - env!("VERGEN_RUSTC_COMMIT_HASH"), - env!("VERGEN_BUILD_TIMESTAMP"), - ); - return Ok(()); - } - let args = cli::Cli::parse(); let file = File::open(args.file_path)?; diff --git a/zero/src/bin/verifier/cli.rs b/zero/src/bin/verifier/cli.rs index 157674130..8c046d85c 100644 --- a/zero/src/bin/verifier/cli.rs +++ b/zero/src/bin/verifier/cli.rs @@ -4,6 +4,7 @@ use clap::{Parser, ValueHint}; use zero::prover_state::cli::CliProverStateConfig; #[derive(Parser)] +#[command(version = zero::version(), propagate_version = true)] pub(crate) struct Cli { /// The file containing the proof to verify #[arg(short, long, value_hint = ValueHint::FilePath)] diff --git a/zero/src/bin/worker.rs b/zero/src/bin/worker.rs index 1d941341f..096863a53 100644 --- a/zero/src/bin/worker.rs +++ b/zero/src/bin/worker.rs @@ -1,14 +1,11 @@ -use std::env; - use anyhow::Result; use clap::Parser; use dotenvy::dotenv; use paladin::runtime::WorkerRuntime; use zero::prover_state::{ - cli::CliProverStateConfig, - persistence::{set_circuit_cache_dir_env_if_not_set, CIRCUIT_VERSION}, + cli::CliProverStateConfig, persistence::set_circuit_cache_dir_env_if_not_set, }; -use zero::{ops::register, tracing, version}; +use zero::{ops::register, tracing}; // TODO: https://github.com/0xPolygonZero/zk_evm/issues/302 // this should probably be removed. @@ -17,6 +14,7 @@ use zero::{ops::register, tracing, version}; static GLOBAL: jemallocator::Jemalloc = jemallocator::Jemalloc; #[derive(Parser)] +#[command(version = zero::version(), propagate_version = true)] struct Cli { #[clap(flatten)] paladin: paladin::config::Config, @@ -26,16 +24,6 @@ struct Cli { #[tokio::main] async fn main() -> Result<()> { - let args: Vec = env::args().collect(); - if args.contains(&"--version".to_string()) { - version::print_version( - CIRCUIT_VERSION.as_str(), - env!("VERGEN_RUSTC_COMMIT_HASH"), - env!("VERGEN_BUILD_TIMESTAMP"), - ); - return Ok(()); - } - dotenv().ok(); tracing::init(); set_circuit_cache_dir_env_if_not_set()?; diff --git a/zero/src/lib.rs b/zero/src/lib.rs index 3dc34d421..88f376fc7 100644 --- a/zero/src/lib.rs +++ b/zero/src/lib.rs @@ -12,10 +12,18 @@ pub mod prover_state; pub mod provider; pub mod rpc; pub mod tracing; -pub mod version; /// Size of the channel used to send block prover inputs to the per block /// proving task. If the proving task is slow and can not consume inputs fast /// enough retrieval of the block prover inputs will block until the proving /// task consumes some of the inputs. pub const BLOCK_CHANNEL_SIZE: usize = 16; + +/// Common information for the `--version` CLI flags. +pub fn version() -> String { + let pkg_name = env!("CARGO_PKG_NAME"); + let git_describe = env!("VERGEN_GIT_DESCRIBE"); + let timestamp = env!("VERGEN_BUILD_TIMESTAMP"); + let kernel_hash = &**prover_state::persistence::KERNEL_HASH; + format!("{pkg_name} ({git_describe}) (kernel hash: {kernel_hash}) [built: {timestamp}]") +} diff --git a/zero/src/prover_state/persistence.rs b/zero/src/prover_state/persistence.rs index 6096b7ab1..cd3f9a043 100644 --- a/zero/src/prover_state/persistence.rs +++ b/zero/src/prover_state/persistence.rs @@ -35,8 +35,14 @@ const KERNEL_HASH_PREFIX: usize = 8; /// kernel to determine if the circuit is compatible with our current binary. If /// the kernel hash of the circuit that we are loading in from disk differs, /// then using these circuits would cause failures during proof generation -pub static CIRCUIT_VERSION: Lazy = - Lazy::new(|| hex::encode(KERNEL.hash())[..KERNEL_HASH_PREFIX].to_string()); +pub static KERNEL_HASH: Lazy<&'static str> = Lazy::new(|| { + String::leak( + hex::encode(KERNEL.hash()) + .chars() + .take(KERNEL_HASH_PREFIX) + .collect(), + ) +}); fn get_serializers() -> ( DefaultGateSerializer, @@ -126,7 +132,7 @@ impl DiskResource for BaseProverResource { "{}/{}_base_{}_{}", circuit_dir(), PROVER_STATE_FILE_PREFIX, - *CIRCUIT_VERSION, + *KERNEL_HASH, p.get_configuration_digest() ) } @@ -162,7 +168,7 @@ impl DiskResource for MonolithicProverResource { "{}/{}_monolithic_{}_{}", circuit_dir(), PROVER_STATE_FILE_PREFIX, - *CIRCUIT_VERSION, + *KERNEL_HASH, p.get_configuration_digest() ) } @@ -197,7 +203,7 @@ impl DiskResource for RecursiveCircuitResource { "{}/{}_{}_{}_{}", circuit_dir(), PROVER_STATE_FILE_PREFIX, - *CIRCUIT_VERSION, + *KERNEL_HASH, circuit_type.as_short_str(), size ) @@ -241,7 +247,7 @@ impl DiskResource for VerifierResource { "{}/{}_{}_{}", circuit_dir(), VERIFIER_STATE_FILE_PREFIX, - *CIRCUIT_VERSION, + *KERNEL_HASH, p.get_configuration_digest() ) } diff --git a/zero/src/version.rs b/zero/src/version.rs deleted file mode 100644 index db5ff42b0..000000000 --- a/zero/src/version.rs +++ /dev/null @@ -1,10 +0,0 @@ -pub fn print_version( - evm_arithmetization_kernel_version: &str, - rustc_commit_hash: &str, - rustc_timestamp: &str, -) { - println!( - "evm_arithmetization Kernel Version: {}\nBuild Commit Hash: {}\nBuild Timestamp: {}", - evm_arithmetization_kernel_version, rustc_commit_hash, rustc_timestamp - ) -} From 6ef8827767143006ee9bd76db3a3c15b0deadb98 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Alonso=20Gonz=C3=A1lez?= Date: Wed, 18 Sep 2024 15:43:36 +0200 Subject: [PATCH 5/5] Refactor linked lists initial hashing (#581) * Deattach hashed nodes from initial trie * Merge mpt_set_payload with mpt_hash * Add missing files * Fix errors in the stack * Fix hash mismatch on initial trie * Refactor final state hash * Fix final hash * [WIP] debugging erc721 * Fix erc721 * [WIP] debugging block 28 * Fix b28 * Fix block 28 * [WIP] Debugging block 20240058 * Fix extension nodes bug * Fix block 20240058 * [WIP] benchmarking * Set inital trie with insertions * Remove hash nodes * Fix missing segment number * Clean code * Fix unit tests * Fix erc20 * [WIP] Fixing blocks * Clean code * Clean and fmt * Address reviews * Apply suggestions from code review Co-authored-by: Hamy Ratoanina * [WIP] Fixing trie data length * Fix trie_data_length mismatch * Check correctness of inital next node ptr and check strict keys monotonicity * Address review comment * Apply suggestions from code review Co-authored-by: Hamy Ratoanina * Minor * Fix circuit sizes * Apply suggestions from code review Co-authored-by: Robin Salen <30937548+Nashtare@users.noreply.github.com> * Add missing stack comment --------- Co-authored-by: Hamy Ratoanina Co-authored-by: Robin Salen Co-authored-by: Robin Salen <30937548+Nashtare@users.noreply.github.com> --- .../src/cpu/kernel/aggregator.rs | 2 +- .../src/cpu/kernel/asm/main.asm | 8 +- .../src/cpu/kernel/asm/mpt/hash/hash.asm | 4 +- .../asm/mpt/linked_list/final_tries.asm | 12 +- .../asm/mpt/linked_list/initial_tries.asm | 399 +++++------------- .../asm/mpt/linked_list/linked_list.asm | 171 +++++++- .../src/cpu/kernel/asm/mpt/read.asm | 6 +- .../src/cpu/kernel/tests/account_code.rs | 59 +-- .../src/cpu/kernel/tests/mpt/delete.rs | 24 +- .../src/cpu/kernel/tests/mpt/insert.rs | 29 +- .../src/cpu/kernel/tests/mpt/read.rs | 4 +- .../src/generation/prover_input.rs | 2 +- scripts/prove_stdio.sh | 4 +- 13 files changed, 325 insertions(+), 399 deletions(-) diff --git a/evm_arithmetization/src/cpu/kernel/aggregator.rs b/evm_arithmetization/src/cpu/kernel/aggregator.rs index d24e856fa..9ba350242 100644 --- a/evm_arithmetization/src/cpu/kernel/aggregator.rs +++ b/evm_arithmetization/src/cpu/kernel/aggregator.rs @@ -140,8 +140,8 @@ pub static KERNEL_FILES: [&str; NUMBER_KERNEL_FILES] = [ include_str!("asm/mpt/insert/insert_leaf.asm"), include_str!("asm/mpt/insert/insert_trie_specific.asm"), include_str!("asm/mpt/linked_list/linked_list.asm"), - include_str!("asm/mpt/linked_list/initial_tries.asm"), include_str!("asm/mpt/linked_list/final_tries.asm"), + include_str!("asm/mpt/linked_list/initial_tries.asm"), include_str!("asm/mpt/read.asm"), include_str!("asm/mpt/storage/storage_read.asm"), include_str!("asm/mpt/storage/storage_write.asm"), diff --git a/evm_arithmetization/src/cpu/kernel/asm/main.asm b/evm_arithmetization/src/cpu/kernel/asm/main.asm index 6f20938af..e72c20ee6 100644 --- a/evm_arithmetization/src/cpu/kernel/asm/main.asm +++ b/evm_arithmetization/src/cpu/kernel/asm/main.asm @@ -215,14 +215,16 @@ global check_state_trie: // `GLOBAL_METADATA_TRIE_DATA_SIZE` is correct. %get_trie_data_size // stack: trie_data_len - PROVER_INPUT(trie_ptr::state) + PROVER_INPUT(trie_ptr::initial_state) %mstore_global_metadata(@GLOBAL_METADATA_STATE_TRIE_ROOT) PROVER_INPUT(trie_ptr::trie_data_size) %mstore_global_metadata(@GLOBAL_METADATA_TRIE_DATA_SIZE) - %set_initial_tries + // stack: trie_data_len + %set_initial_state_trie + // stack: trie_data_len PUSH @INITIAL_RLP_ADDR // stack: rlp_start, trie_data_len @@ -233,7 +235,7 @@ global check_state_trie: %mload_global_metadata(@GLOBAL_METADATA_STATE_TRIE_DIGEST_BEFORE) %assert_eq // Check that the stored trie data length is correct. - %mload_global_metadata(@GLOBAL_METADATA_TRIE_DATA_SIZE) + %mload_global_metadata(@GLOBAL_METADATA_TRIE_DATA_SIZE) %assert_eq // We set a dummy value as an initial trie data length, diff --git a/evm_arithmetization/src/cpu/kernel/asm/mpt/hash/hash.asm b/evm_arithmetization/src/cpu/kernel/asm/mpt/hash/hash.asm index 82b14865c..37f9fc670 100644 --- a/evm_arithmetization/src/cpu/kernel/asm/mpt/hash/hash.asm +++ b/evm_arithmetization/src/cpu/kernel/asm/mpt/hash/hash.asm @@ -29,12 +29,12 @@ mpt_hash_hash_if_rlp: mpt_hash_hash_rlp: // stack: result, result_len, new_len, retdest %stack (result, result_len, new_len) - -> (@SEGMENT_RLP_RAW, result, result_len, mpt_hash_hash_rlp_after_unpacking, result_len, new_len) + -> (@INITIAL_RLP_ADDR, result, result_len, mpt_hash_hash_rlp_after_unpacking, result_len, new_len) // stack: addr, result, result_len, mpt_hash_hash_rlp_after_unpacking, result_len, new_len %jump(mstore_unpacking) mpt_hash_hash_rlp_after_unpacking: // stack: result_addr, result_len, new_len, retdest - POP PUSH @SEGMENT_RLP_RAW // ctx == virt == 0 + POP PUSH @INITIAL_RLP_ADDR // ctx == 0, virt == 1 // stack: result_addr, result_len, new_len, retdest KECCAK_GENERAL // stack: hash, new_len, retdest diff --git a/evm_arithmetization/src/cpu/kernel/asm/mpt/linked_list/final_tries.asm b/evm_arithmetization/src/cpu/kernel/asm/mpt/linked_list/final_tries.asm index 9db07083d..53093f4f4 100644 --- a/evm_arithmetization/src/cpu/kernel/asm/mpt/linked_list/final_tries.asm +++ b/evm_arithmetization/src/cpu/kernel/asm/mpt/linked_list/final_tries.asm @@ -209,21 +209,17 @@ after_mpt_delete_slot: global set_final_tries: PUSH set_final_tries_after - PUSH @SEGMENT_STORAGE_LINKED_LIST - %add_const(@STORAGE_LINKED_LISTS_NODE_SIZE) // Skip the first node. + %first_initial_slot // Skip the first node. %mload_global_metadata(@GLOBAL_METADATA_STATE_TRIE_ROOT) - PUSH @SEGMENT_ACCOUNTS_LINKED_LIST - %add_const(@ACCOUNTS_LINKED_LISTS_NODE_SIZE) // Skip the first node. + %first_initial_account // Skip the first node. %jump(delete_removed_accounts) set_final_tries_after: // stack: new_state_root PUSH set_final_tries_after_after SWAP1 // stack: new_state_root, set_final_tries_after_after - PUSH @SEGMENT_STORAGE_LINKED_LIST - %next_slot + %first_slot SWAP1 - PUSH @SEGMENT_ACCOUNTS_LINKED_LIST - %next_account + %first_account %jump(insert_all_accounts) set_final_tries_after_after: //stack: new_state_root diff --git a/evm_arithmetization/src/cpu/kernel/asm/mpt/linked_list/initial_tries.asm b/evm_arithmetization/src/cpu/kernel/asm/mpt/linked_list/initial_tries.asm index 12d463b58..df4762a51 100644 --- a/evm_arithmetization/src/cpu/kernel/asm/mpt/linked_list/initial_tries.asm +++ b/evm_arithmetization/src/cpu/kernel/asm/mpt/linked_list/initial_tries.asm @@ -1,306 +1,129 @@ -// Set the payload pointers of the leaves in the trie with root at `node_ptr` -// to mem[payload_ptr_ptr] + step*i, -// for i =0..n_leaves. This is used to constraint the -// initial state and account tries payload pointers such that they are exactly -// those of the initial accounts and linked lists. -// Pre stack: node_ptr, account_ptr_ptr, storage_ptr_ptr, num_nibbles, packed_nibbles, retdest -// Post stack: account_ptr_ptr, storage_ptr_ptr -global mpt_set_payload: - // stack: node_ptr, account_ptr_ptr, storage_ptr_ptr, num_nibbles, packed_nibbles, retdest - DUP1 %mload_trie_data - // stack: node_type, node_ptr, account_ptr_ptr, storage_ptr_ptr, retdest - // Increment node_ptr, so it points to the node payload instead of its type. - SWAP1 %increment SWAP1 - // stack: node_type, after_node_type, account_ptr_ptr, storage_ptr_ptr, retdest - DUP1 %eq_const(@MPT_NODE_EMPTY) %jumpi(skip) - DUP1 %eq_const(@MPT_NODE_BRANCH) %jumpi(set_payload_branch) - DUP1 %eq_const(@MPT_NODE_EXTENSION) %jumpi(set_payload_extension) - DUP1 %eq_const(@MPT_NODE_LEAF) %jumpi(set_payload_leaf) - DUP1 %eq_const(@MPT_NODE_HASH) %jumpi(skip) - PANIC - -skip: - // The following 2-lines block is the inlined version of - // %stack (node_type, after_node_type, account_ptr_ptr, storage_ptr_ptr, num_nibbles, packed_nibbles, retdest) -> - // (retdest, account_ptr_ptr, storage_ptr_ptr) - POP POP SWAP3 POP - SWAP3 SWAP1 POP - - JUMP - -%macro mpt_set_payload - %stack (node_ptr, account_ptr_ptr, storage_ptr_ptr, num_nibbles, packed_nibbles) -> - (node_ptr, account_ptr_ptr, storage_ptr_ptr, num_nibbles, packed_nibbles, %%after) - %jump(mpt_set_payload) -%%after: -%endmacro - -%macro set_initial_tries - PUSH %%after - PUSH 0 // empty nibbles - PUSH 0 // num nibbles - PUSH @SEGMENT_STORAGE_LINKED_LIST - %add_const(8) // The first node is the special node, of size 5, so the first value is at position 5 + 3. - PUSH @SEGMENT_ACCOUNTS_LINKED_LIST - %add_const(6) // The first node is the special node, of size 4, so the first payload is at position 4 + 2. +global set_initial_state_trie: + // stack: retdest + PUSH set_initial_state_trie_after + %first_initial_slot // Skip the first node. %mload_global_metadata(@GLOBAL_METADATA_STATE_TRIE_ROOT) - %jump(mpt_set_payload) -%%after: - // We store account_ptr_ptr - 2, i.e. a pointer to the first node not in the initial state. - %sub_const(2) - %mstore_global_metadata(@GLOBAL_METADATA_INITIAL_ACCOUNTS_LINKED_LIST_LEN) - // We store storage_ptr_ptr - 3, i.e. a pointer to the first node not in the initial state. - %sub_const(3) - %mstore_global_metadata(@GLOBAL_METADATA_INITIAL_STORAGE_LINKED_LIST_LEN) -%endmacro - -// Pre stack: node_ptr, storage_ptr_ptr, num_nibbles, packed_nibbles, retdest -// Post stack: storage_ptr_ptr -global mpt_set_storage_payload: - // stack: node_ptr, storage_ptr_ptr, num_nibbles, packed_nibbles, retdest - DUP1 %mload_trie_data - // stack: node_type, node_ptr, storage_ptr_ptr, num_nibbles, packed_nibbles, retdest - // Increment node_ptr, so it points to the node payload instead of its type. - SWAP1 %increment SWAP1 - // stack: node_type, after_node_type, storage_ptr_ptr, num_nibbles, packed_nibbles, retdest - - DUP1 %eq_const(@MPT_NODE_EMPTY) %jumpi(storage_skip) - DUP1 %eq_const(@MPT_NODE_BRANCH) %jumpi(set_payload_storage_branch) - DUP1 %eq_const(@MPT_NODE_EXTENSION) %jumpi(set_payload_storage_extension) - DUP1 %eq_const(@MPT_NODE_LEAF) %jumpi(set_payload_storage_leaf) - -storage_skip: - // stack: node_type, after_node_type, storage_ptr_ptr, num_nibbles, packedـnibbles, retdest - %stack (node_type, after_node_type, storage_ptr_ptr, num_nibbles, packed_nibbles, retdest) -> (retdest, storage_ptr_ptr) + %first_initial_account // Skip the first node. + %jump(insert_all_initial_accounts) +set_initial_state_trie_after: + //stack: new_state_root + %mstore_global_metadata(@GLOBAL_METADATA_STATE_TRIE_ROOT) JUMP -%macro mpt_set_storage_payload - %stack(node_ptr, storage_ptr_ptr, num_nibbles, nibbles) -> (node_ptr, storage_ptr_ptr, num_nibbles, nibbles, %%after) - %jump(mpt_set_storage_payload) +%macro set_initial_state_trie + // stack: (empty) + PUSH %%after + %jump(set_initial_state_trie) %%after: %endmacro -set_payload_branch: - // stack: node_type, after_node_type, account_ptr_ptr, storage_ptr_ptr, num_nibbles, packed_nibbles, retdest - POP - - PUSH 0 // child counter - // Call mpt_set_payload on each child - %rep 16 - // The following 4-lines block is the inlined version of - // %stack (i, child_ptr_ptr, account_ptr_ptr, storage_ptr_ptr, num_nibbles, packed_nibbles) -> - // (num_nibbles, packed_nibbles, 1, i, child_ptr_ptr, account_ptr_ptr, storage_ptr_ptr, i, num_nibbles, packed_nibbles, child_ptr_ptr) - SWAP2 DUP2 DUP4 - PUSH 1 - DUP9 DUP9 SWAP8 - SWAP6 SWAP10 SWAP9 - - // We do not check the stored nibbles here, as the current value is not written yet. - %merge_nibbles - // stack: num_merged_nibbles, merged_nibbles, child_ptr_ptr, account_ptr_ptr, storage_ptr_ptr, i, num_nibbles, packed_nibbles, child_ptr_ptr - - // The following line is the inlined version of - // %stack (num_merged_nibbles, merged_nibbles, child_ptr_ptr, account_ptr_ptr, storage_ptr_ptr) -> - // (child_ptr_ptr, account_ptr_ptr, storage_ptr_ptr, num_merged_nibbles, merged_nibbles) - SWAP3 SWAP1 SWAP4 SWAP2 - - // stack: child_ptr_ptr, account_ptr_ptr, storage_ptr_ptr, num_merged_nibbles, merged_nibbles, i, num_nibbles, packed_nibbles, child_ptr_ptr, retdest - %mload_trie_data - // stack: child_ptr, account_ptr_ptr, storage_ptr_ptr, num_merged_nibbles, merged_nibbles, i, num_nibbles, packed_nibbles, child_ptr_ptr, retdest - %mpt_set_payload - // stack: account_ptr_ptr', storage_ptr_ptr', i, num_nibbles, packed_nibbles, child_ptr_ptr, retdest - - // The following line is the inlined version of - // %stack (account_ptr_ptr_p, storage_ptr_ptr_p, i, num_nibbles, packed_nibbles, child_ptr_ptr) -> - // (child_ptr_ptr, i, account_ptr_ptr_p, storage_ptr_ptr_p, num_nibbles, packed_nibbles) - SWAP2 SWAP1 SWAP3 SWAP4 SWAP5 - - // stack: (child_ptr_ptr, i, account_ptr_ptr_p, storage_ptr_ptr_p, num_nibbles, packed_nibbles, retdest) - %increment - SWAP1 - %increment - %endrep - // stack: i, child_ptr_ptr', account_ptr_ptr', storage_ptr_ptr', num_nibbles, packed_nibbles, retdest - POP - %stack (child_ptr_ptr, account_ptr_ptr, storage_ptr_ptr, num_nibbles, packed_nibbles, retdest) -> - (retdest, account_ptr_ptr, storage_ptr_ptr) - JUMP - -set_payload_storage_branch: - // stack: node_type, child_ptr_ptr, storage_ptr_ptr, num_nibbles, packed_nibbles, retdest - POP - - // Child counter - PUSH 0 - // Call mpt_set_storage_payload on each child - %rep 16 - // The following 3-lines block is the inlined version of - // %stack (i, child_ptr_ptr, storage_ptr_ptr, num_nibbles, packed_nibbles) -> - // (num_nibbles, packed_nibbles, 1, i, child_ptr_ptr, storage_ptr_ptr, i, num_nibbles, packed_nibbles, child_ptr_ptr) - SWAP1 SWAP4 SWAP3 - SWAP2 DUP5 DUP3 - PUSH 1 DUP7 DUP7 - - %merge_nibbles - // stack: num_merged_nibbles, merged_nibbles, child_ptr_ptr, storage_ptr_ptr, i, num_nibbles, packed_nibbles, child_ptr_ptr, retdest - %stack (num_merged_nibbles, merged_nibbles, child_ptr_ptr, storage_ptr_ptr) -> - (child_ptr_ptr, storage_ptr_ptr, num_merged_nibbles, merged_nibbles) - %mload_trie_data - // stack: child_ptr, storage_ptr_ptr, num_merged_nibbles, merged_nibbles, i, num_nibbles, packed_nibbles, child_ptr_ptr, retdest - %mpt_set_storage_payload - // stack: storage_ptr_ptr', i, num_nibbles, packed_nibbles, child_ptr_ptr, retdest - %stack (storage_ptr_ptr_p, i, num_nibbles, packed_nibbles, child_ptr_ptr) -> - (child_ptr_ptr, i, storage_ptr_ptr_p, num_nibbles, packed_nibbles) - %increment - SWAP1 - %increment - %endrep - // stack: i, child_ptr_ptr', storage_ptr_ptr', num_nibbles, packed_nibbles, retdest - %stack (i, child_ptr_ptr, storage_ptr_ptr, num_nibbles, packed_nibbles, retdest) -> (retdest, storage_ptr_ptr) - JUMP - -set_payload_extension: - // stack: node_type, after_node_type, account_ptr_ptr, storage_ptr_ptr, num_nibbles, packed_nibbles, retdest - POP - // stack: after_node_type, account_ptr_ptr, storage_ptr_ptr, num_nibbles, packed_nibbles, retdest - DUP1 %mload_trie_data // num_nibbles - DUP2 %increment %mload_trie_data // nibbles - SWAP2 - %add_const(2) %mload_trie_data - // stack: child_ptr, loaded_num_nibbles, loaded_nibbles, account_ptr_ptr, storage_ptr_ptr, num_nibbles, packed_nibbles, retdest - - // The following 2-lines block is the inlined version of - // %stack (child_ptr, loaded_num_nibbles, loaded_nibbles, account_ptr_ptr, storage_ptr_ptr, num_nibbles, packed_nibbles) -> - // (num_nibbles, packed_nibbles, loaded_num_nibbles, loaded_nibbles, child_ptr, account_ptr_ptr, storage_ptr_ptr) - SWAP4 SWAP6 SWAP1 - SWAP2 SWAP3 SWAP5 - - %merge_nibbles - // stack: merged_num_nibbles, merged_nibbles, child_ptr, account_ptr_ptr, storage_ptr_ptr, retdest - %stack (merged_num_nibbles, merged_nibbles, child_ptr, account_ptr_ptr, storage_ptr_ptr) -> - (child_ptr, account_ptr_ptr, storage_ptr_ptr, merged_num_nibbles, merged_nibbles) - %jump(mpt_set_payload) - -set_payload_storage_extension: - // stack: node_type, after_node_type, storage_ptr_ptr, num_nibbles, packed_nibbles, retdest - POP - // stack: after_node_type, storage_ptr_ptr, num_nibbles, packed_nibbles, retdest - DUP1 %mload_trie_data // num_nibbles - DUP2 %increment %mload_trie_data // nibbles +// Given a pointer `root_ptr` to the root of a trie, insert all the initial accounts in +// the accounts_linked_list starting at `account_ptr_ptr` as well as the +// respective initial storage slots in `storage_ptr_ptr`. +// Pre stack: account_ptr_ptr, root_ptr, storage_ptr_ptr, retdest +// Post stack: new_root_ptr. // The value of new_root_ptr shouldn't change +global insert_all_initial_accounts: + // stack: account_ptr_ptr, root_ptr, storage_ptr_ptr, retdest SWAP2 - // stack: after_node_type, loaded_num_nibbles, loaded_packed_nibbles, storage_ptr_ptr, num_nibbles, packed_nibbles, retdest - %add_const(2) %mload_trie_data - // stack: child_ptr, loaded_num_nibbles, loaded_packed_nibbles, storage_ptr_ptr, num_nibbles, packed_nibbles, retdest - - // The following 2-lines block is the inlined version of - // %stack (child_ptr, loaded_num_nibbles, loaded_packed_nibbles, storage_ptr_ptr, num_nibbles, packed_nibbles) -> - // (num_nibbles, packed_nibbles, loaded_num_nibbles, loaded_packed_nibbles, child_ptr, storage_ptr_ptr) - SWAP1 SWAP2 SWAP3 - SWAP5 SWAP1 SWAP4 - - %merge_nibbles - %stack (merged_num_nibbles, merged_nibbles, child_ptr, storage_ptr_ptr) -> - (child_ptr, storage_ptr_ptr, merged_num_nibbles, merged_nibbles) - %jump(mpt_set_storage_payload) - -set_payload_leaf: - // stack: node_type, after_node_type, account_ptr_ptr, storage_ptr_ptr, num_nibbles, packed_nibbles, retdest - POP - DUP1 %increment %mload_trie_data - DUP2 %mload_trie_data - - // The following 2-lines block is the inlined version of - // %stack (loaded_num_nibbles, loaded_packed_nibbles, after_node_type, account_ptr_ptr, storage_ptr_ptr, num_nibbles, packed_nibbles, retdest) -> - // (num_nibbles, packed_nibbles, loaded_num_nibbles, loaded_packed_nibbles, after_node_type, account_ptr_ptr, storage_ptr_ptr, retdest) - SWAP2 SWAP4 SWAP6 - SWAP1 SWAP3 SWAP5 - - %merge_nibbles - // stack: merged_len, merged_nibbles, after_node_type, account_ptr_ptr, storage_ptr_ptr, retdest - PUSH 64 %assert_eq - DUP3 %sub_const(2) MLOAD_GENERAL - // stack: addr_key, merged_nibbles, after_node_type, account_ptr_ptr, storage_ptr_ptr, retdest - %assert_eq - // stack: after_node_type, account_ptr_ptr, storage_ptr_ptr, retdest - %add_const(2) // The payload pointer starts at index 3, after num_nibbles and packed_nibbles. - DUP1 - // stack: payload_ptr_ptr, payload_ptr_ptr, account_ptr_ptr, storage_ptr_ptr, retdest - %mload_trie_data - // stack: account_ptr, payload_ptr_ptr, account_ptr_ptr, storage_ptr_ptr, retdest - %add_const(2) - %mload_trie_data // storage_root_ptr = account[2] - - // stack: storage_root_ptr, payload_ptr_ptr, account_ptr_ptr, storage_ptr_ptr, retdest - - // The following 4-lines block is the inlined version of - // %stack (storage_root_ptr, payload_ptr_ptr, account_ptr_ptr, storage_ptr_ptr) -> - // (storage_root_ptr, storage_ptr_ptr, 0, 0, after_set_storage_payload, storage_root_ptr, payload_ptr_ptr, account_ptr_ptr) - PUSH 0 PUSH 0 - DUP3 SWAP4 SWAP5 SWAP6 - PUSH after_set_storage_payload - SWAP4 - - %jump(mpt_set_storage_payload) -after_set_storage_payload: - // stack: storage_ptr_ptr', storage_root_ptr, payload_ptr_ptr, account_ptr_ptr, retdest + DUP3 + MLOAD_GENERAL + // stack: key, storage_ptr_ptr, root_ptr, account_ptr_ptr, retdest DUP4 - MLOAD_GENERAL // load the next payload pointer in the linked list - DUP1 %add_const(2) // new_storage_root_ptr_ptr = payload_ptr[2] - // stack: new_storage_root_ptr_ptr, new_payload_ptr, storage_root_ptr, storage_ptr_ptr', payload_ptr_ptr, account_ptr_ptr, retdest - // Load also the old "dynamic" payload for storing the storage_root_ptr - DUP6 %decrement + %mload_global_metadata(@GLOBAL_METADATA_INITIAL_ACCOUNTS_LINKED_LIST_LEN) + EQ + %jumpi(no_more_accounts) + // stack: key, storage_ptr_ptr, root_ptr, account_ptr_ptr, retdest + PUSH after_mpt_read + DUP2 + PUSH 64 + DUP6 + // stack: root_ptr, nibbles, key, after_mpt_read, key, storage_ptr_ptr, root_ptr, account_ptr_ptr, retdest + %jump(mpt_read) +after_mpt_read: + //stack: trie_account_ptr_ptr, key, storage_ptr_ptr, root_ptr, account_ptr_ptr, retdest + DUP1 + %mload_trie_data + %add_const(2) + %mload_trie_data + // stack: trie_storage_root, trie_account_ptr_ptr, key, storage_ptr_ptr, root_ptr, account_ptr_ptr, retdest + SWAP1 + // stack: trie_account_ptr_ptr, trie_storage_root, key, storage_ptr_ptr, root_ptr, account_ptr_ptr, retdest + DUP6 + %add_const(2) // intial account_ptr = account_ptr_ptr + 2 MLOAD_GENERAL - %add_const(2) // dyn_storage_root_ptr_ptr = dyn_paylod_ptr[2] + // stack: account_ptr, trie_account_ptr_ptr, trie_storage_root, key, storage_ptr_ptr, root_ptr, account_ptr_ptr, retdest + DUP1 SWAP2 + // stack: trie_account_ptr_ptr, account_ptr, account_ptr, trie_storage_root, key, storage_ptr_ptr, root_ptr, account_ptr_ptr, retdest + %mstore_trie_data // The trie's account points to the linked list initial account + // stack: account_ptr, trie_storage_root, key, storage_ptr_ptr, root_ptr, account_ptr_ptr, retdest + %add_const(2) + // stack: storage_root_ptr_ptr, trie_storage_root, key, storage_ptr_ptr, root_ptr, account_ptr_ptr, retdest - // The following 3-lines block is the inlined version of - // %stack (dyn_storage_root_ptr_ptr, new_storage_root_ptr_ptr, new_payload_ptr, storage_ptr_ptr_p, storage_root_ptr, payload_ptr_ptr, account_ptr_ptr) -> - // (new_storage_root_ptr_ptr, storage_root_ptr, dyn_storage_root_ptr_ptr, storage_root_ptr, payload_ptr_ptr, new_payload_ptr, account_ptr_ptr, storage_ptr_ptr_p) - DUP5 - SWAP3 SWAP5 SWAP1 SWAP4 - SWAP7 SWAP6 SWAP4 SWAP2 + %stack + (storage_root_ptr_ptr, trie_storage_root, key, storage_ptr_ptr) -> + (key, storage_ptr_ptr, trie_storage_root, after_insert_all_initial_slots, storage_root_ptr_ptr) + %jump(insert_all_initial_slots) - %mstore_trie_data // The initial account pointer in the linked list has no storage root so we need to manually set it. - %mstore_trie_data // The dynamic account pointer in the linked list has no storage root so we need to manually set it. - %mstore_trie_data // Set the leaf payload pointing to next account in the linked list. - // stack: account_ptr_ptr, storage_ptr_ptr', retdest - %add_const(@ACCOUNTS_LINKED_LISTS_NODE_SIZE) // The next pointer is at distance `ACCOUNTS_LINKED_LISTS_NODE_SIZE` - // stack: payload_ptr_ptr', storage_ptr_ptr', retdest - SWAP1 +after_insert_all_initial_slots: + // stack: storage_ptr_ptr', trie_storage_root_ptr', storage_root_ptr_ptr, root_ptr, account_ptr_ptr, retdest SWAP2 + %mstore_trie_data + // stack: storage_ptr_ptr', root_ptr, account_ptr_ptr, retdest + SWAP2 + %next_initial_account + // stack: account_ptr_ptr', root_ptr, storage_ptr_ptr', retdest + %jump(insert_all_initial_accounts) + +no_more_accounts: + // stack: key, storage_ptr_ptr, root_ptr, account_ptr_ptr, retdest + %stack (key, storage_ptr_ptr, root_ptr, account_ptr_ptr, retdest) ->(retdest, root_ptr) JUMP -set_payload_storage_leaf: - // stack: node_type, after_node_type, storage_ptr_ptr, num_nibbles, packed_nibbles, retdest - POP - DUP1 %increment %mload_trie_data - DUP2 %mload_trie_data - %stack (loaded_num_nibbles, loaded_nibbles, after_node_type, storage_ptr_ptr, num_nibbles, packed_nibbles) -> - (num_nibbles, packed_nibbles, loaded_num_nibbles, loaded_nibbles, after_node_type, storage_ptr_ptr) - %merge_nibbles - // stack: merged_num_nibbles, merged_nibbles, after_node_type, storage_ptr_ptr, retdest - PUSH 64 %assert_eq - // stack: merged_nibbles, after_node_type, storage_ptr_ptr, retdest - DUP3 %sub_const(2) MLOAD_GENERAL - // stack: slot_key, merged_nibbles, after_node_type, storage_ptr_ptr, retdest - %assert_eq - // stack: after_node_type, storage_ptr_ptr, retdest - %add_const(2) // The value pointer starts at index 3, after num_nibbles and packed_nibbles. - // stack: value_ptr_ptr, storage_ptr_ptr, retdest - DUP2 MLOAD_GENERAL - // stack: value, value_ptr_ptr, storage_ptr_ptr, retdest - // If value == 0, then value_ptr = 0, and we don't need to append the value to the `TrieData` segment. - DUP1 ISZERO %jumpi(set_payload_storage_leaf_end) - %get_trie_data_size - // stack: value_ptr, value, value_ptr_ptr, storage_ptr_ptr, retdest - SWAP1 - %append_to_trie_data -set_payload_storage_leaf_end: - // stack: value_ptr, value_ptr_ptr, storage_ptr_ptr, retdest +// Insert all slots before the account key changes +// Pre stack: addr, storage_ptr_ptr, root_ptr, retdest +// Post stack: storage_ptr_ptr', root_ptr' +global insert_all_initial_slots: + DUP2 + MLOAD_GENERAL + DUP2 + EQ // Check that the node address is the same as `addr` + DUP3 + %mload_global_metadata(@GLOBAL_METADATA_INITIAL_STORAGE_LINKED_LIST_LEN) + SUB + MUL + %jumpi(insert_next_slot) + // The addr has changed, meaning that we've inserted all slots for addr, + // or we reached the end of the initial storage linked list. + // stack: addr, storage_ptr_ptr, root_ptr, retdest + %stack (addr, storage_ptr_ptr, root_ptr, retdest) -> (retdest, storage_ptr_ptr, root_ptr) + JUMP +insert_next_slot: + // stack: addr, storage_ptr_ptr, root_ptr, retdest + DUP2 + %increment + MLOAD_GENERAL + // stack: key, addr, storage_ptr_ptr, root_ptr, retdest + DUP3 + %add_const(3) // inital value is at position 3 + MLOAD_GENERAL + // stack: value, key, addr, storage_ptr_ptr, root_ptr, retdest + // If the value is 0, then payload_ptr = 0, and we don't need to insert a value in the `TrieData` segment. + DUP1 ISZERO %jumpi(insert_with_payload_ptr) + %get_trie_data_size // payload_ptr SWAP1 + %append_to_trie_data // append the value to the trie data segment +insert_with_payload_ptr: + %stack + (payload_ptr, key, addr, storage_ptr_ptr, root_ptr) -> + (root_ptr, 64, key, after_insert_slot, payload_ptr, storage_ptr_ptr, addr, root_ptr) + %jump(mpt_read) +after_insert_slot: + // stack: slot_ptr_ptr, payload_ptr, storage_ptr_ptr, addr, root_ptr, retdest %mstore_trie_data - // stack: storage_ptr_ptr, retdest - %add_const(@STORAGE_LINKED_LISTS_NODE_SIZE) // The next pointer is at distance `STORAGE_LINKED_LISTS_NODE_SIZE` - // stack: storage_ptr_ptr', retdest + // stack: storage_ptr_ptr, addr, root_ptr, retdest + %next_initial_slot + // stack: storage_ptr_ptr', addr, root_ptr, retdest SWAP1 - JUMP + %jump(insert_all_initial_slots) + diff --git a/evm_arithmetization/src/cpu/kernel/asm/mpt/linked_list/linked_list.asm b/evm_arithmetization/src/cpu/kernel/asm/mpt/linked_list/linked_list.asm index 39e4604d3..650d69db2 100644 --- a/evm_arithmetization/src/cpu/kernel/asm/mpt/linked_list/linked_list.asm +++ b/evm_arithmetization/src/cpu/kernel/asm/mpt/linked_list/linked_list.asm @@ -38,6 +38,11 @@ /// the accounts, storing a pointer to the copied account in the node. /// Computes the length of `SEGMENT_ACCOUNTS_LINKED_LIST` and /// stores it in `GLOBAL_METADATA_ACCOUNTS_LINKED_LIST_NEXT_AVAILABLE`. +/// It also checks that the next node address is current address + 4 +/// and that all keys are strictly increasing. +/// NOTE: It may be more efficient to check that the next node addres != U256_MAX +/// (i.e. node was not deleted) and ensure that no node with repeated key +/// is ever read. global store_initial_accounts: // stack: retdest PUSH @ACCOUNTS_LINKED_LISTS_NODE_SIZE @@ -45,15 +50,24 @@ global store_initial_accounts: ADD // stack: cur_len, retdest PUSH @SEGMENT_ACCOUNTS_LINKED_LIST + // stack: current_node_ptr, cur_len, retdest + DUP1 + MLOAD_GENERAL + // stack: current_addr_key, current_node_ptr, cur_len', retdest + %assert_eq_const(@U256_MAX) + DUP1 %next_account + // stack: next_node_ptr, current_node_ptr, cur_len', retdest + DUP1 + SWAP2 + %next_initial_account + %assert_eq(store_initial_accounts_end) // next_node_ptr == current_node_ptr + node_size + // stack: next_node_ptr, cur_len', retdest + loop_store_initial_accounts: // stack: current_node_ptr, cur_len, retdest %get_trie_data_size - DUP2 - MLOAD_GENERAL - // stack: current_addr_key, cpy_ptr, current_node_ptr, cur_len, retdest - %eq_const(@U256_MAX) - %jumpi(store_initial_accounts_end) + // stack: cpy_ptr, current_node_ptr, cur_len, retdest DUP2 %increment MLOAD_GENERAL @@ -84,13 +98,35 @@ loop_store_initial_accounts: SWAP1 PUSH @ACCOUNTS_LINKED_LISTS_NODE_SIZE ADD SWAP1 - // stack: current_node_ptr, cur_len', retdest + // Check next node ptr validity and strict keys monotonicity + DUP1 + MLOAD_GENERAL + // stack: current_addr_key, current_node_ptr, cur_len', retdest + SWAP1 + DUP1 %next_account + // stack: next_node_ptr, current_node_ptr, current_addr_key, cur_len', retdest + DUP1 + SWAP2 + %next_initial_account + %assert_eq(store_initial_accounts_end_pop_key) // next_node_ptr == current_node_ptr + node_size + // stack: next_node_ptr, current_addr_key, cur_len', retdest + SWAP1 + DUP2 + MLOAD_GENERAL + %assert_gt // next_addr_key > current_addr_key + // stack: next_node_ptr, cur_len', retdest %jump(loop_store_initial_accounts) +store_initial_accounts_end_pop_key: + // stack: next_node_ptr, current_addr_key, cur_len', retdest + SWAP1 POP store_initial_accounts_end: - %pop2 + // stack: next_node_ptr, cur_len', retdest + %assert_eq_const(@SEGMENT_ACCOUNTS_LINKED_LIST) // stack: cur_len, retdest + DUP1 + %mstore_global_metadata(@GLOBAL_METADATA_INITIAL_ACCOUNTS_LINKED_LIST_LEN) %mstore_global_metadata(@GLOBAL_METADATA_ACCOUNTS_LINKED_LIST_NEXT_AVAILABLE) JUMP @@ -324,6 +360,11 @@ global remove_account: /// the accounts, storing a pointer to the copied account in the node. /// Computes the length of `SEGMENT_STORAGE_LINKED_LIST` and /// checks against `GLOBAL_METADATA_STORAGE_LINKED_LIST_NEXT_AVAILABLE`. +/// It also checks that the next node address is current address + 5 +/// and that all keys are strictly increasing. +/// NOTE: It may be more efficient to check that the next node addres != U256_MAX +/// (i.e. node was not deleted) and ensure that no node with repeated key +/// is ever read. global store_initial_slots: // stack: retdest PUSH @STORAGE_LINKED_LISTS_NODE_SIZE @@ -331,15 +372,23 @@ global store_initial_slots: ADD // stack: cur_len, retdest PUSH @SEGMENT_STORAGE_LINKED_LIST - %next_slot - -loop_store_initial_slots: - // stack: current_node_ptr, cur_len, retdest DUP1 MLOAD_GENERAL // stack: current_addr_key, current_node_ptr, cur_len, retdest - %eq_const(@U256_MAX) - %jumpi(store_initial_slots_end) + %assert_eq_const(@U256_MAX) + + // stack: current_node_ptr, cur_len', retdest + DUP1 + %next_slot + // stack: next_node_ptr, current_node_ptr, cur_len, retdest + DUP1 + SWAP2 + %next_initial_slot + %assert_eq(store_initial_slots_end) // next_node_ptr == current_node_ptr + node_size + // stack: next_node_ptr, cur_len', retdest + +loop_store_initial_slots: + // stack: current_node_ptr, cur_len, retdest DUP1 %add_const(2) MLOAD_GENERAL @@ -353,13 +402,65 @@ loop_store_initial_slots: SWAP1 PUSH @STORAGE_LINKED_LISTS_NODE_SIZE ADD SWAP1 - // stack: current_node_ptr, cur_len', retdest + // Check correctness of next node ptr and strict key monotonicity. + DUP1 + MLOAD_GENERAL + // stack: current_addr_key, current_node_ptr, cur_len', retdest + SWAP1 + DUP1 + %increment + MLOAD_GENERAL + // stack: current_slot_key, current_node_ptr, current_addr_key, cur_len', retdest + SWAP1 + DUP1 %next_slot + // stack: next_node_ptr, current_node_ptr, current_slot_key, current_addr_key, cur_len', retdest + DUP1 + SWAP2 + %next_initial_slot + %assert_eq(store_initial_slots_end_pop_keys) // next_node_ptr == current_node_ptr + node_size + // stack: next_node_ptr, current_slot_key, current_addr_key, cur_len', retdest + DUP1 + DUP1 + %increment + MLOAD_GENERAL + // stack: next_node_slot_key, next_node_ptr, next_node_ptr, current_slot_key, current_addr_key, cur_len', retdest + SWAP1 + MLOAD_GENERAL + // stack: next_node_addr_key, next_node_slot_key, next_node_ptr, current_slot_key, current_addr_key, cur_len', retdest + SWAP3 + LT + // stack: current_slot_key > next_node_slot_key, next_node_ptr, next_node_addr_key, current_addr_key, cur_len', retdest + SWAP2 + SWAP1 + SWAP3 + // stack: current_addr_key, next_node_addr_key, current_slot_key > next_node_slot_key, next_node_ptr, cur_len', retdest + DUP2 + DUP2 + EQ + // stack: current_addr_key == next_node_addr_key, current_addr_key, next_node_addr_key, current_slot_key > next_node_slot_key, next_node_ptr, cur_len', retdest + SWAP1 + SWAP3 + MUL // AND + // stack current_slot_key > next_node_slot_key AND current_addr_key == next_node_addr_key, next_node_addr_key, current_addr_key, next_node_ptr, cur_len', retdest + SWAP2 + LT + ADD // OR + %assert_nonzero %jump(loop_store_initial_slots) +store_initial_slots_end_pop_keys: + // stack: next_node_ptr, current_slot_key, current_addr_key, cur_len', retdest + SWAP2 + %pop2 + store_initial_slots_end: - POP + // stack: next_node_ptr, cur_len', retdest + %assert_eq_const(@SEGMENT_STORAGE_LINKED_LIST) + // stack: cur_len, retdest + DUP1 + %mstore_global_metadata(@GLOBAL_METADATA_INITIAL_STORAGE_LINKED_LIST_LEN) %mstore_global_metadata(@GLOBAL_METADATA_STORAGE_LINKED_LIST_NEXT_AVAILABLE) JUMP @@ -894,6 +995,12 @@ remove_all_slots_end: %next_account %endmacro +%macro first_initial_account + // stack: empty + PUSH @SEGMENT_ACCOUNTS_LINKED_LIST + %next_initial_account +%endmacro + %macro next_account // stack: node_ptr %add_const(@ACCOUNTS_NEXT_NODE_PTR) @@ -901,15 +1008,49 @@ remove_all_slots_end: // stack: next_node_ptr %endmacro +%macro next_initial_account + // stack: node_ptr + %add_const(@ACCOUNTS_LINKED_LISTS_NODE_SIZE) + // stack: next_node_ptr +%endmacro + %macro first_slot // stack: empty PUSH @SEGMENT_STORAGE_LINKED_LIST %next_slot %endmacro +%macro first_initial_slot + // stack: empty + PUSH @SEGMENT_STORAGE_LINKED_LIST + %next_initial_slot +%endmacro + %macro next_slot // stack: node_ptr %add_const(@STORAGE_NEXT_NODE_PTR) MLOAD_GENERAL // stack: next_node_ptr %endmacro + +%macro next_initial_slot + // stack: node_ptr + %add_const(@STORAGE_LINKED_LISTS_NODE_SIZE) + // stack: next_node_ptr +%endmacro + +%macro next_hash_node + // stack: hash_node_ptr + %add_const(4) + // stack: next_hash_node_ptr +%endmacro + +// Skip over the the first three words (number of nibbles and keys) +// and load the hash from memory. +%macro get_hash + // stack: hash_node_ptr + %add_const(3) + // stack: next_ptr + MLOAD_GENERAL + // stack: hash +%endmacro diff --git a/evm_arithmetization/src/cpu/kernel/asm/mpt/read.asm b/evm_arithmetization/src/cpu/kernel/asm/mpt/read.asm index 148a7897d..303377c04 100644 --- a/evm_arithmetization/src/cpu/kernel/asm/mpt/read.asm +++ b/evm_arithmetization/src/cpu/kernel/asm/mpt/read.asm @@ -24,7 +24,8 @@ global mpt_read_state_trie: // - the key, as a U256 // - return destination // -// This function returns a pointer to the value, or 0 if the key is not found. +// This function returns a pointer to the value, or 0 if the key is not found. If the key +// is a leaf, it returns a pointer to a pointer. global mpt_read: // stack: node_ptr, num_nibbles, key, retdest DUP1 @@ -145,7 +146,6 @@ global mpt_read_leaf_found: // stack: node_payload_ptr, retdest %add_const(2) // The value pointer is located after num_nibbles and the key. // stack: value_ptr_ptr, retdest - %mload_trie_data - // stack: value_ptr, retdest SWAP1 + // For leaves, we return the pointer JUMP diff --git a/evm_arithmetization/src/cpu/kernel/tests/account_code.rs b/evm_arithmetization/src/cpu/kernel/tests/account_code.rs index ff4dba48e..b290b8403 100644 --- a/evm_arithmetization/src/cpu/kernel/tests/account_code.rs +++ b/evm_arithmetization/src/cpu/kernel/tests/account_code.rs @@ -197,36 +197,30 @@ pub(crate) fn prepare_interpreter( interpreter.stack() ); - // Set initial tries. + // Now, set the payload. interpreter .push(0xDEADBEEFu32.into()) .expect("The stack should not overflow"); interpreter - .push(0.into()) // Initial nibbles - .expect("The stack should not overflow"); - interpreter - .push(0.into()) // Initial number of nibbles - .expect("The stack should not overflow"); - interpreter - .push((Segment::StorageLinkedList as usize + 8).into()) - .expect("The stack should not overflow"); - interpreter - .push((Segment::AccountsLinkedList as usize + 6).into()) + .push((Segment::StorageLinkedList as usize + 5).into()) .expect("The stack should not overflow"); interpreter .push(interpreter.get_global_metadata_field(GlobalMetadata::StateTrieRoot)) + .unwrap(); + interpreter + .push((Segment::AccountsLinkedList as usize + 4).into()) .expect("The stack should not overflow"); // Now, set the payload. interpreter.generation_state.registers.program_counter = - KERNEL.global_labels["mpt_set_payload"]; + KERNEL.global_labels["insert_all_initial_accounts"]; interpreter.run()?; - let acc_ptr = interpreter.pop().expect("The stack should not be empty") - 2; - let storage_ptr = interpreter.pop().expect("The stack should not be empty") - 3; - interpreter.set_global_metadata_field(GlobalMetadata::InitialAccountsLinkedListLen, acc_ptr); - interpreter.set_global_metadata_field(GlobalMetadata::InitialStorageLinkedListLen, storage_ptr); + assert_eq!(interpreter.stack_len(), 1); + + let state_root = interpreter.pop().expect("The stack should not be empty"); + interpreter.set_global_metadata_field(GlobalMetadata::StateTrieRoot, state_root); // Now, execute `mpt_hash_state_trie`. state_trie.insert(k, rlp::encode(account).to_vec())?; @@ -417,43 +411,30 @@ fn prepare_interpreter_all_accounts( KERNEL.global_labels["store_initial_slots"]; interpreter.run()?; - // Set the pointers to the initial payloads. + // Now, set the payload. interpreter .push(0xDEADBEEFu32.into()) .expect("The stack should not overflow"); interpreter - .push(0.into()) // Initial nibbles - .expect("The stack should not overflow"); - interpreter - .push(0.into()) // Initial number of nibbles - .expect("The stack should not overflow"); - interpreter - .push((Segment::StorageLinkedList as usize + 8).into()) - .expect("The stack should not overflow"); - interpreter - .push((Segment::AccountsLinkedList as usize + 6).into()) + .push((Segment::StorageLinkedList as usize + 5).into()) .expect("The stack should not overflow"); interpreter .push(interpreter.get_global_metadata_field(GlobalMetadata::StateTrieRoot)) + .unwrap(); + interpreter + .push((Segment::AccountsLinkedList as usize + 4).into()) .expect("The stack should not overflow"); - // Now, set the payloads in the state trie leaves. + // Now, set the payload. interpreter.generation_state.registers.program_counter = - KERNEL.global_labels["mpt_set_payload"]; + KERNEL.global_labels["insert_all_initial_accounts"]; interpreter.run()?; - assert_eq!( - interpreter.stack().len(), - 2, - "Expected 2 items on stack after setting the initial trie payloads, found {:?}", - interpreter.stack() - ); + assert_eq!(interpreter.stack_len(), 1); - let acc_ptr = interpreter.pop().expect("The stack should not be empty") - 2; - let storage_ptr = interpreter.pop().expect("The stack should not be empty") - 3; - interpreter.set_global_metadata_field(GlobalMetadata::InitialAccountsLinkedListLen, acc_ptr); - interpreter.set_global_metadata_field(GlobalMetadata::InitialStorageLinkedListLen, storage_ptr); + let state_root = interpreter.pop().expect("The stack should not be empty"); + interpreter.set_global_metadata_field(GlobalMetadata::StateTrieRoot, state_root); // Switch context and initialize memory with the data we need for the tests. interpreter.generation_state.registers.program_counter = 0; diff --git a/evm_arithmetization/src/cpu/kernel/tests/mpt/delete.rs b/evm_arithmetization/src/cpu/kernel/tests/mpt/delete.rs index ca3de5163..72edba94f 100644 --- a/evm_arithmetization/src/cpu/kernel/tests/mpt/delete.rs +++ b/evm_arithmetization/src/cpu/kernel/tests/mpt/delete.rs @@ -121,33 +121,25 @@ fn test_state_trie( .push(0xDEADBEEFu32.into()) .expect("The stack should not overflow"); interpreter - .push(0.into()) // Initial nibbles - .expect("The stack should not overflow"); - interpreter - .push(0.into()) // Initial number of nibbles - .expect("The stack should not overflow"); - interpreter - .push((Segment::StorageLinkedList as usize + 8).into()) - .expect("The stack should not overflow"); - interpreter - .push((Segment::AccountsLinkedList as usize + 6).into()) + .push((Segment::StorageLinkedList as usize + 5).into()) .expect("The stack should not overflow"); interpreter .push(interpreter.get_global_metadata_field(GlobalMetadata::StateTrieRoot)) .unwrap(); + interpreter + .push((Segment::AccountsLinkedList as usize + 4).into()) + .expect("The stack should not overflow"); // Now, set the payload. interpreter.generation_state.registers.program_counter = - KERNEL.global_labels["mpt_set_payload"]; + KERNEL.global_labels["insert_all_initial_accounts"]; interpreter.run()?; - assert_eq!(interpreter.stack_len(), 2); + assert_eq!(interpreter.stack_len(), 1); - let acc_ptr = interpreter.pop().expect("The stack should not be empty") - 2; - let storage_ptr = interpreter.pop().expect("The stack should not be empty") - 3; - interpreter.set_global_metadata_field(GlobalMetadata::InitialAccountsLinkedListLen, acc_ptr); - interpreter.set_global_metadata_field(GlobalMetadata::InitialStorageLinkedListLen, storage_ptr); + let state_root = interpreter.pop().expect("The stack should not be empty"); + interpreter.set_global_metadata_field(GlobalMetadata::StateTrieRoot, state_root); // Next, execute mpt_insert_state_trie. interpreter.generation_state.registers.program_counter = mpt_insert_state_trie; diff --git a/evm_arithmetization/src/cpu/kernel/tests/mpt/insert.rs b/evm_arithmetization/src/cpu/kernel/tests/mpt/insert.rs index ccf576a4f..7428044d1 100644 --- a/evm_arithmetization/src/cpu/kernel/tests/mpt/insert.rs +++ b/evm_arithmetization/src/cpu/kernel/tests/mpt/insert.rs @@ -190,36 +190,29 @@ fn test_state_trie( interpreter.generation_state.registers.program_counter = KERNEL.global_labels["store_initial"]; interpreter.run()?; - // Set initial tries. interpreter .push(0xDEADBEEFu32.into()) .expect("The stack should not overflow"); interpreter - .push(0.into()) // Initial nibbles - .expect("The stack should not overflow"); - interpreter - .push(0.into()) // Initial number of nibbles - .expect("The stack should not overflow"); - interpreter - .push((Segment::StorageLinkedList as usize + 8).into()) - .expect("The stack should not overflow"); - interpreter - .push((Segment::AccountsLinkedList as usize + 6).into()) + .push((Segment::StorageLinkedList as usize + 5).into()) .expect("The stack should not overflow"); interpreter .push(interpreter.get_global_metadata_field(GlobalMetadata::StateTrieRoot)) .unwrap(); + interpreter + .push((Segment::AccountsLinkedList as usize + 4).into()) + .expect("The stack should not overflow"); // Now, set the payload. interpreter.generation_state.registers.program_counter = - KERNEL.global_labels["mpt_set_payload"]; + KERNEL.global_labels["insert_all_initial_accounts"]; interpreter.run()?; - let acc_ptr = interpreter.pop().expect("The stack should not be empty") - 2; - let storage_ptr = interpreter.pop().expect("The stack should not be empty") - 3; - interpreter.set_global_metadata_field(GlobalMetadata::InitialAccountsLinkedListLen, acc_ptr); - interpreter.set_global_metadata_field(GlobalMetadata::InitialStorageLinkedListLen, storage_ptr); + assert_eq!(interpreter.stack_len(), 1); + + let state_root = interpreter.pop().expect("The stack should not be empty"); + interpreter.set_global_metadata_field(GlobalMetadata::StateTrieRoot, state_root); // Next, execute mpt_insert_state_trie. interpreter.generation_state.registers.program_counter = mpt_insert_state_trie; @@ -267,10 +260,6 @@ fn test_state_trie( ); interpreter.generation_state.registers.program_counter = check_state_trie; - interpreter - .halt_offsets - .push(KERNEL.global_labels["check_txn_trie"]); - interpreter .push(interpreter.get_global_metadata_field(GlobalMetadata::TrieDataSize)) // Initial trie data segment size, unused. .expect("The stack should not overflow"); diff --git a/evm_arithmetization/src/cpu/kernel/tests/mpt/read.rs b/evm_arithmetization/src/cpu/kernel/tests/mpt/read.rs index 571b45c38..8390c6a59 100644 --- a/evm_arithmetization/src/cpu/kernel/tests/mpt/read.rs +++ b/evm_arithmetization/src/cpu/kernel/tests/mpt/read.rs @@ -42,7 +42,9 @@ fn mpt_read() -> Result<()> { interpreter.run()?; assert_eq!(interpreter.stack().len(), 1); - let result_ptr = interpreter.stack()[0].as_usize(); + // mpt_read returns a pointer to the accounts pointer + let result_ptr_ptr = interpreter.stack()[0].as_usize(); + let result_ptr = interpreter.get_trie_data()[result_ptr_ptr..][..4][0].as_usize(); let result = &interpreter.get_trie_data()[result_ptr..][..4]; assert_eq!(result[0], test_account_1().nonce); assert_eq!(result[1], test_account_1().balance); diff --git a/evm_arithmetization/src/generation/prover_input.rs b/evm_arithmetization/src/generation/prover_input.rs index 16c1e5310..366b75a35 100644 --- a/evm_arithmetization/src/generation/prover_input.rs +++ b/evm_arithmetization/src/generation/prover_input.rs @@ -93,7 +93,7 @@ impl GenerationState { fn run_trie_ptr(&mut self, input_fn: &ProverInputFn) -> Result { let trie = input_fn.0[1].as_str(); match trie { - "state" => self + "initial_state" => self .trie_root_ptrs .state_root_ptr .map_or_else( diff --git a/scripts/prove_stdio.sh b/scripts/prove_stdio.sh index f54969930..7249dce92 100755 --- a/scripts/prove_stdio.sh +++ b/scripts/prove_stdio.sh @@ -53,8 +53,8 @@ if ! [[ $TEST_ONLY == "test_only" ]]; then # These sizes are configured specifically for block 19807080. Don't use this in other scenarios echo "Using specific circuit sizes for witness_b19807080.json" export ARITHMETIC_CIRCUIT_SIZE="16..18" - export BYTE_PACKING_CIRCUIT_SIZE="10..15" - export CPU_CIRCUIT_SIZE="16..20" + export BYTE_PACKING_CIRCUIT_SIZE="9..15" + export CPU_CIRCUIT_SIZE="15..20" export KECCAK_CIRCUIT_SIZE="12..18" export KECCAK_SPONGE_CIRCUIT_SIZE="8..14" export LOGIC_CIRCUIT_SIZE="8..17"