Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Add independent block proof aggregation pipeline #656

Open
wants to merge 21 commits into
base: develop
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from 17 commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
6 changes: 3 additions & 3 deletions .github/workflows/ci.yml
Original file line number Diff line number Diff line change
Expand Up @@ -157,7 +157,7 @@ jobs:
uses: actions/checkout@v3

- name: Run the script
run: ./scripts/prove_stdio.sh artifacts/witness_b19807080.json
run: ./scripts/prove_stdio.sh artifacts/witness_b19807080.json true

simple_proof_witness_only:
name: Execute bash script to generate the proof witness for a small block.
Expand All @@ -168,7 +168,7 @@ jobs:
uses: actions/checkout@v3

- name: Run the script
run: ./scripts/prove_stdio.sh artifacts/witness_b19807080.json test_only
run: ./scripts/prove_stdio.sh artifacts/witness_b19807080.json false test_only

multi_blocks_proof_regular:
name: Execute bash script to generate and verify a proof for multiple blocks using parallel proving.
Expand All @@ -179,7 +179,7 @@ jobs:
uses: actions/checkout@v3

- name: Run the script
run: ./scripts/prove_stdio.sh artifacts/witness_b3_b6.json
run: ./scripts/prove_stdio.sh artifacts/witness_b3_b6.json true

lints:
name: Rustdoc, Formatting and Clippy
Expand Down
94 changes: 90 additions & 4 deletions evm_arithmetization/src/fixed_recursive_verifier.rs
Original file line number Diff line number Diff line change
Expand Up @@ -108,6 +108,87 @@ where
pub by_table: [RecursiveCircuitsForTable<F, C, D>; NUM_TABLES],
}

// TODO(Robin): This should be refactored in two distinct states, chain-specific
// (i.e. current `AllRecursiveCircuits` up to block circuit), and cross-chain
// specific (the 2-to-1 aggregation piece).
// cf: https://github.com/0xPolygonZero/zk_evm/issues/622
pub struct AllVerifierData<F, C, const D: usize>
atanmarko marked this conversation as resolved.
Show resolved Hide resolved
where
F: RichField + Extendable<D>,
C: GenericConfig<D, F = F>,
C::Hasher: AlgebraicHasher<F>,
{
/// Verifier data used to verify block proofs.
block_data: VerifierCircuitData<F, C, D>,
/// Verifier data used to verify wrapped block proofs.
wrapped_block_data: VerifierCircuitData<F, C, D>,
/// Verifier data used to verify aggregated block proofs.
aggregated_block_data: VerifierCircuitData<F, C, D>,
}

impl<F, C, const D: usize> AllVerifierData<F, C, D>
where
F: RichField + Extendable<D>,
C: GenericConfig<D, F = F>,
C::Hasher: AlgebraicHasher<F>,
{
pub fn verify_block(&self, proof: ProofWithPublicInputs<F, C, D>) -> anyhow::Result<()> {
// Verifier data verification
check_cyclic_proof_verifier_data(
&proof,
&self.block_data.verifier_only,
&self.block_data.common,
)?;

// Proof verification
self.block_data.verify(proof)
}

pub fn verify_block_wrapper(
&self,
proof: ProofWithPublicInputs<F, C, D>,
) -> anyhow::Result<()> {
// Proof verification
self.wrapped_block_data.verify(proof)
}

pub fn verify_block_aggreg(&self, proof: ProofWithPublicInputs<F, C, D>) -> anyhow::Result<()> {
// Verifier data verification
check_cyclic_proof_verifier_data(
&proof,
&self.aggregated_block_data.verifier_only,
&self.aggregated_block_data.common,
)?;

// Proof verification
self.aggregated_block_data.verify(proof)
}

pub fn to_bytes(&self, gate_serializer: &dyn GateSerializer<F, D>) -> IoResult<Vec<u8>> {
let mut buffer = Vec::new();
buffer.write_verifier_circuit_data(&self.block_data, gate_serializer)?;
buffer.write_verifier_circuit_data(&self.wrapped_block_data, gate_serializer)?;
buffer.write_verifier_circuit_data(&self.aggregated_block_data, gate_serializer)?;
Ok(buffer)
}

pub fn from_bytes(
bytes: Vec<u8>,
gate_serializer: &dyn GateSerializer<F, D>,
) -> IoResult<Self> {
let mut buffer = Buffer::new(&bytes);
let block_data = buffer.read_verifier_circuit_data(gate_serializer)?;
let wrapped_block_data = buffer.read_verifier_circuit_data(gate_serializer)?;
let aggregated_block_data = buffer.read_verifier_circuit_data(gate_serializer)?;

Ok(Self {
block_data,
wrapped_block_data,
aggregated_block_data,
})
}
}

/// Data for the EVM root circuit, which is used to combine each STARK's shrunk
/// wrapper proof into a single proof.
#[derive(Eq, PartialEq, Debug)]
Expand Down Expand Up @@ -755,7 +836,8 @@ where
}
}

/// Outputs the `VerifierCircuitData` needed to verify any block proof
/// Outputs the `AllVerifierData` needed to verify any upper level proofs
/// (i.e. block, wrapped block, and block aggregation proofs)
/// generated by an honest prover.
/// While the [`AllRecursiveCircuits`] prover state can also verify proofs,
/// verifiers only need a fraction of the state to verify proofs. This
Expand All @@ -769,10 +851,14 @@ where
/// let verifier_state = prover_state.final_verifier_data();
///
/// // Verify a provided block proof
/// assert!(verifier_state.verify(&block_proof).is_ok());
/// assert!(verifier_state.verify_block(&block_proof).is_ok());
/// ```
pub fn final_verifier_data(&self) -> VerifierCircuitData<F, C, D> {
self.block.circuit.verifier_data()
pub fn final_verifier_data(&self) -> AllVerifierData<F, C, D> {
AllVerifierData {
block_data: self.block.circuit.verifier_data(),
wrapped_block_data: self.block_wrapper.circuit.verifier_data(),
aggregated_block_data: self.two_to_one_block.circuit.verifier_data(),
}
}

fn create_segment_circuit(
Expand Down
1 change: 1 addition & 0 deletions evm_arithmetization/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -218,6 +218,7 @@ pub mod util;

// Public definitions and re-exports
mod public_types;
pub use proof::FinalPublicValues;
pub use public_types::*;
pub use starky::config::StarkConfig;

Expand Down
95 changes: 83 additions & 12 deletions evm_arithmetization/src/proof.rs
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,7 @@ use starky::lookup::GrandProductChallengeSet;
use starky::proof::{MultiProof, StarkProofChallenges};

use crate::all_stark::NUM_TABLES;
use crate::util::{get_h160, get_h256, get_u256, h256_limbs, h2u};
use crate::util::{get_h160, get_h256, get_u256, h256_limbs, h2u, u256_to_u32};
use crate::witness::state::RegistersState;

/// The default cap height used for our zkEVM STARK proofs.
Expand Down Expand Up @@ -152,6 +152,18 @@ pub struct FinalPublicValues<F: RichField, H: Hasher<F>> {
}

impl<F: RichField, H: Hasher<F>> FinalPublicValues<F, H> {
fn to_field_elements(&self) -> Vec<F> {
let mut out = Vec::with_capacity(FinalPublicValuesTarget::SIZE);

out.push(u256_to_u32(self.chain_id).expect("Chain ID should fit in a u32"));
out.extend(&h256_limbs(self.checkpoint_state_trie_root));
out.extend(&h256_limbs(self.new_state_trie_root));
out.extend(&self.checkpoint_consolidated_hash);
out.extend(&self.new_consolidated_hash);

out
}

/// Extracts final public values from the given public inputs of a proof.
/// Public values are always the first public inputs added to the circuit,
/// so we can start extracting at index 0.
Expand Down Expand Up @@ -293,6 +305,65 @@ impl FinalPublicValuesTarget {
}
}

#[derive(Clone, Debug, Deserialize, Serialize)]
#[serde(bound = "")]
pub enum HashOrPV<F: RichField, H: Hasher<F>> {
/// Some `PublicValues` associated to a proof.
Val(FinalPublicValues<F, H>),

/// The hash of some `PublicValues`.
Hash(H::Hash),

/// An arbitrary sequence of `HashorPV` values, useful for nested sequences.
Sequence(Vec<HashOrPV<F, H>>),
}

impl<F: RichField, H: Hasher<F>> From<FinalPublicValues<F, H>> for HashOrPV<F, H> {
fn from(values: FinalPublicValues<F, H>) -> Self {
Self::Val(values)
}
}

impl<F: RichField, H: Hasher<F>> HashOrPV<F, H> {
pub fn hash(&self) -> H::Hash {
match self {
// Do nothing and just extract the underlying value
Self::Hash(h) => *h,

// Flatten these public values into field elements and hash them
Self::Val(pvs) => H::hash_no_pad(&pvs.to_field_elements()),

// Flatten this sequence first, and then hash and compress its
// public values using a foldleft approach.
Self::Sequence(seq) => {
if seq.is_empty() {
panic!("Sequence should not be empty");
}

if seq.len() == 1 {
return seq[0].hash();
}

let mut seq_hash = seq[0].hash();

for item in seq.iter().skip(1) {
let next_hash = match item {
HashOrPV::Val(pvs) => H::hash_no_pad(&pvs.to_field_elements()),
HashOrPV::Hash(h) => *h,
HashOrPV::Sequence(sub_seq) => {
Self::hash(&HashOrPV::Sequence(sub_seq.to_vec()))
}
};

seq_hash = H::two_to_one(seq_hash, next_hash);
}

seq_hash
}
}
}
}

/// Trie hashes.
#[derive(Debug, Clone, Default, PartialEq, Eq, Serialize, Deserialize)]
pub struct TrieRoots {
Expand Down Expand Up @@ -320,17 +391,6 @@ impl TrieRoots {
}
}

// There should be 256 previous hashes stored, so the default should also
// contain 256 values.
impl Default for BlockHashes {
fn default() -> Self {
Self {
prev_hashes: vec![H256::default(); 256],
cur_hash: H256::default(),
}
}
}

/// User-provided helper values to compute the `BLOCKHASH` opcode.
/// The proofs across consecutive blocks ensure that these values
/// are consistent (i.e. shifted by one to the left).
Expand All @@ -347,6 +407,17 @@ pub struct BlockHashes {
pub cur_hash: H256,
}

/// There should be 256 previous hashes stored, so the default should also
/// contain 256 values.
impl Default for BlockHashes {
fn default() -> Self {
Self {
prev_hashes: vec![H256::default(); 256],
cur_hash: H256::default(),
}
}
}

impl BlockHashes {
pub fn from_public_inputs<F: RichField>(pis: &[F]) -> Self {
assert!(pis.len() == BlockHashesTarget::SIZE);
Expand Down
5 changes: 4 additions & 1 deletion evm_arithmetization/src/public_types.rs
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,7 @@ use crate::{generation::segments::SegmentError, GenerationSegmentData};

pub type Node = mpt_trie::partial_trie::Node<HashedPartialTrie>;
pub type BlockHeight = u64;
pub type ChainID = u64;

use plonky2::{
field::goldilocks_field::GoldilocksField, hash::poseidon::PoseidonHash,
Expand All @@ -31,6 +32,8 @@ pub type Hash = <Hasher as plonky2::plonk::config::Hasher<Field>>::Hash;
pub type ConsolidatedHash = [Field; NUM_HASH_OUT_ELTS];
pub use crate::proof::EMPTY_CONSOLIDATED_BLOCKHASH;

pub type HashOrPV = crate::proof::HashOrPV<Field, Hasher>;

/// A type alias for recursive proofs generated by the zkEVM.
pub type ProofWithPublicInputs =
plonky2::plonk::proof::ProofWithPublicInputs<Field, RecursionConfig, EXTENSION_DEGREE>;
Expand Down Expand Up @@ -67,4 +70,4 @@ pub type RecursiveCircuitsForTableSize =
/// [`VerifierData`] is much lighter, allowing anyone to verify block proofs,
/// regardless of the underlying hardware.
pub type VerifierData =
plonky2::plonk::circuit_data::VerifierCircuitData<Field, RecursionConfig, EXTENSION_DEGREE>;
crate::fixed_recursive_verifier::AllVerifierData<Field, RecursionConfig, EXTENSION_DEGREE>;
5 changes: 3 additions & 2 deletions scripts/prove_rpc.sh
Original file line number Diff line number Diff line change
Expand Up @@ -47,6 +47,7 @@ NODE_RPC_TYPE=$4
IGNORE_PREVIOUS_PROOFS=$5
BACKOFF=${6:-0}
RETRIES=${7:-0}
TEST_ONLY=$8
Nashtare marked this conversation as resolved.
Show resolved Hide resolved

# Sometimes we need to override file logging, e.g. in the CI run
OUTPUT_TO_TERMINAL="${OUTPUT_TO_TERMINAL:-false}"
Expand Down Expand Up @@ -106,7 +107,7 @@ fi
# If we set test_only flag, we'll generate a dummy
# proof. This is useful for quickly testing decoding and all of the
# other non-proving code.
if [[ $8 == "test_only" ]]; then
if [[ $TEST_ONLY == "test_only" ]]; then
# test only run
echo "Proving blocks ${BLOCK_INTERVAL} in a test_only mode now... (Total: ${TOT_BLOCKS})"
command='cargo r --release --package zero --bin leader -- --test-only --runtime in-memory --load-strategy on-demand --proof-output-dir $PROOF_OUTPUT_DIR --block-batch-size $BLOCK_BATCH_SIZE rpc --rpc-type "$NODE_RPC_TYPE" --rpc-url "$NODE_RPC_URL" --block-interval $BLOCK_INTERVAL $PREV_PROOF_EXTRA_ARG --backoff "$BACKOFF" --max-retries "$RETRIES" '
Expand Down Expand Up @@ -160,7 +161,7 @@ if [ "$RUN_VERIFICATION" = true ]; then

proof_file_name=$PROOF_OUTPUT_DIR/b$END_BLOCK.zkproof
echo "Verifying the proof of the latest block in the interval:" $proof_file_name
cargo r --release --package zero --bin verifier -- -f $proof_file_name > $PROOF_OUTPUT_DIR/verify.out 2>&1
cargo r --release --package zero --bin verifier -- -f $proof_file_name block > $PROOF_OUTPUT_DIR/verify.out 2>&1

if grep -q 'All proofs verified successfully!' $PROOF_OUTPUT_DIR/verify.out; then
echo "$proof_file_name verified successfully!";
Expand Down
32 changes: 27 additions & 5 deletions scripts/prove_stdio.sh
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,8 @@ set -exo pipefail

# Args:
# 1 --> Input witness json file
# 2 --> Test run only flag `test_only` (optional)
# 2 --> Wrapping flag for the final block proof (boolean)
# 3 --> Test run only flag `test_only` (optional)

# We're going to set the parallelism in line with the total cpu count
if [[ "$OSTYPE" == "darwin"* ]]; then
Expand Down Expand Up @@ -40,7 +41,8 @@ export RUST_LOG=info
export RUSTFLAGS='-C target-cpu=native -Zlinker-features=-lld'

INPUT_FILE=$1
TEST_ONLY=$2
WRAP_PROOF=$2
TEST_ONLY=$3

if [[ $INPUT_FILE == "" ]]; then
echo "Please provide witness json input file, e.g. artifacts/witness_b19240705.json"
Expand Down Expand Up @@ -125,10 +127,30 @@ cat $PROOFS_FILE_LIST | while read proof_file;
do
echo "Verifying proof file $proof_file"
verify_file=$PROOF_OUTPUT_DIR/verify_$(basename $proof_file).out
"${REPO_ROOT}/target/release/verifier" -f $proof_file | tee $verify_file
"${REPO_ROOT}/target/release/verifier" -f $proof_file block | tee $verify_file
if grep -q 'All proofs verified successfully!' $verify_file; then
echo "Proof verification for file $proof_file successful";
rm $verify_file # we keep the generated proof for potential reuse
echo "Proof verification for file $proof_file successful";
rm $verify_file # we keep the generated proof for potential reuse

if $WRAP_PROOF ; then
"${REPO_ROOT}/target/release/aggregator" --runtime in-memory --load-strategy on-demand --wrap stdio < $proof_file &> $OUTPUT_LOG
Nashtare marked this conversation as resolved.
Show resolved Hide resolved
cat $OUTPUT_LOG | grep "Successfully wrote to disk proof file " | awk '{print $NF}' | tee $PROOFS_FILE_LIST
Nashtare marked this conversation as resolved.
Show resolved Hide resolved
if [ ! -s "$PROOFS_FILE_LIST" ]; then
echo "Proof list not generated, some error happened. For more details check the log file $OUTPUT_LOG"
exit 1
fi

cat $PROOFS_FILE_LIST | while read proof_file;
do
echo "Verifying wrapped proof file $proof_file"
verify_file=$PROOF_OUTPUT_DIR/verify_$(basename $proof_file).out
"${REPO_ROOT}/target/release/verifier" -f $proof_file wrapped-block | tee $verify_file
if grep -q 'All proofs verified successfully!' $verify_file; then
echo "Wrapper proof verification for file $proof_file successful";
rm $verify_file # we keep the generated proof for potential reuse
fi
done
fi
else
echo "there was an issue with proof verification";
exit 1
Expand Down
Loading
Loading