Skip to content

Commit

Permalink
create an empty payload and verify no keccak op
Browse files Browse the repository at this point in the history
  • Loading branch information
sai-deng committed Sep 26, 2024
1 parent ab2ec50 commit 34c9bad
Show file tree
Hide file tree
Showing 8 changed files with 163 additions and 100 deletions.
5 changes: 3 additions & 2 deletions evm_arithmetization/src/arithmetic/mod.rs
Original file line number Diff line number Diff line change
@@ -1,5 +1,6 @@
use ethereum_types::U256;
use plonky2::field::types::PrimeField64;
use serde::{Deserialize, Serialize};

use self::columns::{
INPUT_REGISTER_0, INPUT_REGISTER_1, INPUT_REGISTER_2, OPCODE_COL, OUTPUT_REGISTER,
Expand All @@ -24,7 +25,7 @@ pub(crate) mod columns;
///
/// `Shl` and `Shr` are handled differently, by leveraging `Mul` and `Div`
/// respectively.
#[derive(Clone, Copy, Debug, Eq, PartialEq)]
#[derive(Clone, Copy, Debug, Eq, PartialEq, Hash, Serialize, Deserialize)]
pub(crate) enum BinaryOperator {
Add,
Mul,
Expand Down Expand Up @@ -114,7 +115,7 @@ impl BinaryOperator {

/// An enum representing different ternary operations.
#[allow(clippy::enum_variant_names)]
#[derive(Clone, Copy, Debug, Eq, PartialEq)]
#[derive(Clone, Copy, Debug, Eq, PartialEq, Hash, Serialize, Deserialize)]
pub(crate) enum TernaryOperator {
AddMod,
MulMod,
Expand Down
14 changes: 10 additions & 4 deletions evm_arithmetization/src/cpu/kernel/interpreter.rs
Original file line number Diff line number Diff line change
Expand Up @@ -54,8 +54,7 @@ pub(crate) struct Interpreter<F: RichField> {
/// halt_context
pub(crate) halt_context: Option<usize>,
/// Counts the number of appearances of each opcode. For debugging purposes.
#[allow(unused)]
pub(crate) opcode_count: [usize; 0x100],
pub(crate) opcode_count: HashMap<Operation, usize>,
jumpdest_table: HashMap<usize, BTreeSet<usize>>,
/// `true` if the we are currently carrying out a jumpdest analysis.
pub(crate) is_jumpdest_analysis: bool,
Expand Down Expand Up @@ -178,7 +177,7 @@ impl<F: RichField> Interpreter<F> {
// while the label `halt` is the halting label in the kernel.
halt_offsets: vec![DEFAULT_HALT_OFFSET, KERNEL.global_labels["halt_final"]],
halt_context: None,
opcode_count: [0; 256],
opcode_count: HashMap::new(),
jumpdest_table: HashMap::new(),
is_jumpdest_analysis: false,
clock: 0,
Expand Down Expand Up @@ -209,7 +208,7 @@ impl<F: RichField> Interpreter<F> {
generation_state: state.soft_clone(),
halt_offsets: vec![halt_offset],
halt_context: Some(halt_context),
opcode_count: [0; 256],
opcode_count: HashMap::new(),
jumpdest_table: HashMap::new(),
is_jumpdest_analysis: true,
clock: 0,
Expand Down Expand Up @@ -428,6 +427,10 @@ impl<F: RichField> Interpreter<F> {
self.max_cpu_len_log
}

pub(crate) fn reset_opcode_counts(&mut self) {
self.opcode_count = HashMap::new();
}

pub(crate) fn code(&self) -> &MemorySegmentState {
// The context is 0 if we are in kernel mode.
&self.generation_state.memory.contexts[(1 - self.is_kernel() as usize) * self.context()]
Expand Down Expand Up @@ -661,6 +664,9 @@ impl<F: RichField> State<F> for Interpreter<F> {

let op = decode(registers, opcode)?;

// Increment the opcode count
*self.opcode_count.entry(op).or_insert(0) += 1;

fill_op_flag(op, &mut row);

self.fill_stack_fields(&mut row)?;
Expand Down
56 changes: 24 additions & 32 deletions evm_arithmetization/src/fixed_recursive_verifier.rs
Original file line number Diff line number Diff line change
Expand Up @@ -3076,7 +3076,8 @@ mod tests {
use plonky2::timed;

use super::*;
use crate::testing_utils::{dummy_payload, init_logger};
use crate::testing_utils::{empty_payload, init_logger};
use crate::witness::operation::Operation;

type F = GoldilocksField;
const D: usize = 2;
Expand All @@ -3091,56 +3092,47 @@ mod tests {
let all_stark = AllStark::<F, D>::default();
let config = StarkConfig::standard_fast_config();

// Generate a dummy payload for testing
let payload = empty_payload()?;
let max_cpu_len_log = Some(7);
let mut segment_iterator = SegmentDataIterator::<F>::new(&payload, max_cpu_len_log);
let (_, mut segment_data) = segment_iterator.next().unwrap()?;

let opcode_counts = &segment_data.opcode_counts;
assert!(!opcode_counts.contains_key(&Operation::KeccakGeneral));

// Process and prove segment
let all_circuits = timed!(
timing,
log::Level::Info,
"Create all recursive circuits",
AllRecursiveCircuits::<F, C, D>::new(
&all_stark,
&[16..17, 8..9, 9..10, 4..9, 8..9, 4..7, 17..18, 17..18, 7..18],
&[16..17, 8..9, 7..8, 4..9, 8..9, 4..7, 17..18, 17..18, 17..18],
&config,
)
);

// Generate a dummy payload for testing
let dummy_payload = timed!(
let segment_proof = timed!(
timing,
log::Level::Info,
"Generate dummy payload",
dummy_payload(100, true)?
);

let max_cpu_len_log = 9;
let segment_iterator = SegmentDataIterator::<F>::new(&dummy_payload, Some(max_cpu_len_log));

let mut proofs_without_keccak = vec![];

for segment_run in segment_iterator {
// Process and prove segment
let (_, mut segment_data) = segment_run?;
let segment_proof = timed!(
"Prove segment",
all_circuits.prove_segment(
&all_stark,
&config,
payload.trim(),
&mut segment_data,
timing,
log::Level::Info,
"Prove segment",
all_circuits.prove_segment(
&all_stark,
&config,
dummy_payload.trim(),
&mut segment_data,
timing,
None,
)?
);

proofs_without_keccak.push(segment_proof);
}
None,
)?
);

// Verify the generated segment proof
timed!(
timing,
log::Level::Info,
"Verify segment proof",
all_circuits.verify_root(proofs_without_keccak[0].proof_with_pis.clone())?
all_circuits.verify_root(segment_proof.proof_with_pis.clone())?
);

// Print timing details
Expand Down
11 changes: 11 additions & 0 deletions evm_arithmetization/src/generation/segments.rs
Original file line number Diff line number Diff line change
@@ -1,6 +1,8 @@
//! Module defining the logic around proof segmentation into chunks,
//! which allows what is commonly known as zk-continuations.

use std::collections::HashMap;

use anyhow::Result;
use plonky2::hash::hash_types::RichField;
use serde::{Deserialize, Serialize};
Expand All @@ -11,6 +13,7 @@ use crate::cpu::kernel::interpreter::{set_registers_and_run, ExtraSegmentData, I
use crate::generation::state::State;
use crate::generation::{collect_debug_tries, debug_inputs, ErrorWithTries, GenerationInputs};
use crate::witness::memory::MemoryState;
use crate::witness::operation::Operation;
use crate::witness::state::RegistersState;

/// Structure holding the data needed to initialize a segment.
Expand All @@ -29,6 +32,8 @@ pub struct GenerationSegmentData {
pub(crate) extra_data: ExtraSegmentData,
/// Log of the maximal cpu length.
pub(crate) max_cpu_len_log: Option<usize>,
/// Counts the number of appearances of each opcode. For debugging purposes.
pub(crate) opcode_counts: HashMap<Operation, usize>,
}

impl GenerationSegmentData {
Expand Down Expand Up @@ -77,6 +82,7 @@ fn build_segment_data<F: RichField>(
accounts: interpreter.generation_state.accounts_pointers.clone(),
storage: interpreter.generation_state.storage_pointers.clone(),
},
opcode_counts: interpreter.opcode_count.clone(),
}
}

Expand Down Expand Up @@ -133,6 +139,9 @@ impl<F: RichField> SegmentDataIterator<F> {

let segment_index = segment_data.segment_index;

// Reset opcode counts before executing the segment
self.interpreter.reset_opcode_counts();

// Run the interpreter to get `registers_after` and the partial data for the
// next segment.
let execution_result =
Expand All @@ -147,6 +156,8 @@ impl<F: RichField> SegmentDataIterator<F> {
));

segment_data.registers_after = updated_registers;
segment_data.opcode_counts = self.interpreter.opcode_count.clone();

Ok(Some(Box::new((segment_data, partial_segment_data))))
} else {
let inputs = &self.interpreter.get_generation_state().inputs;
Expand Down
3 changes: 2 additions & 1 deletion evm_arithmetization/src/logic.rs
Original file line number Diff line number Diff line change
Expand Up @@ -11,6 +11,7 @@ use plonky2::hash::hash_types::RichField;
use plonky2::iop::ext_target::ExtensionTarget;
use plonky2::timed;
use plonky2::util::timing::TimingTree;
use serde::{Deserialize, Serialize};
use starky::constraint_consumer::{ConstraintConsumer, RecursiveConstraintConsumer};
use starky::evaluation_frame::StarkEvaluationFrame;
use starky::lookup::{Column, Filter};
Expand Down Expand Up @@ -118,7 +119,7 @@ pub(crate) struct LogicStark<F, const D: usize> {
}

/// Logic operations.
#[derive(Copy, Clone, Debug, Eq, PartialEq)]
#[derive(Copy, Clone, Debug, Eq, PartialEq, Hash, Serialize, Deserialize)]
pub(crate) enum Op {
And,
Or,
Expand Down
69 changes: 23 additions & 46 deletions evm_arithmetization/src/testing_utils.rs
Original file line number Diff line number Diff line change
Expand Up @@ -165,70 +165,47 @@ pub fn scalable_contract_from_storage(storage_trie: &HashedPartialTrie) -> Accou
}
}

/// Get `GenerationInputs` for a dummy payload, where the block has the given
/// timestamp.
pub fn dummy_payload(timestamp: u64, is_first_payload: bool) -> Result<GenerationInputs> {
let beneficiary = hex!("deadbeefdeadbeefdeadbeefdeadbeefdeadbeef");

pub fn empty_payload() -> Result<GenerationInputs> {
// Set up default block metadata
let block_metadata = BlockMetadata {
block_beneficiary: Address::from(beneficiary),
block_timestamp: timestamp.into(),
block_number: 1.into(),
block_difficulty: 0x020000.into(),
block_random: H256::from_uint(&0x020000.into()),
block_gaslimit: 0xff112233u32.into(),
block_chain_id: 1.into(),
block_base_fee: 0xa.into(),
block_beneficiary: Address::zero(),
block_timestamp: U256::zero(),
block_number: U256::one(),
block_difficulty: U256::zero(),
block_random: H256::zero(),
block_gaslimit: U256::zero(),
block_chain_id: U256::one(),
block_base_fee: U256::zero(),
..Default::default()
};

let (mut state_trie_before, mut storage_tries) = preinitialized_state_and_storage_tries()?;
// Initialize an empty state trie and storage tries
let state_trie_before = HashedPartialTrie::from(crate::Node::Empty);
let storage_tries = Vec::new();
let checkpoint_state_trie_root = state_trie_before.hash();
let mut beacon_roots_account_storage = storage_tries[0].1.clone();

update_beacon_roots_account_storage(
&mut beacon_roots_account_storage,
block_metadata.block_timestamp,
block_metadata.parent_beacon_block_root,
)?;
let updated_beacon_roots_account =
beacon_roots_contract_from_storage(&beacon_roots_account_storage);

if !is_first_payload {
// This isn't the first dummy payload being processed. We need to update the
// initial state trie to account for the update on the beacon roots contract.
state_trie_before.insert(
beacon_roots_account_nibbles(),
rlp::encode(&updated_beacon_roots_account).to_vec(),
)?;
storage_tries[0].1 = beacon_roots_account_storage;
}

// Prepare the tries without any transactions or receipts
let tries_before = TrieInputs {
state_trie: state_trie_before,
storage_tries,
state_trie: state_trie_before.clone(),
storage_tries: storage_tries.clone(),
transactions_trie: HashedPartialTrie::from(crate::Node::Empty),
receipts_trie: HashedPartialTrie::from(crate::Node::Empty),
..Default::default()
};

let expected_state_trie_after: HashedPartialTrie = {
let mut state_trie_after = HashedPartialTrie::from(crate::Node::Empty);
state_trie_after.insert(
beacon_roots_account_nibbles(),
rlp::encode(&updated_beacon_roots_account).to_vec(),
)?;

state_trie_after
};
// The expected state trie after execution remains the same as before
let expected_state_trie_after = state_trie_before;

// Compute the trie roots after execution
let trie_roots_after = TrieRoots {
state_root: expected_state_trie_after.hash(),
transactions_root: tries_before.transactions_trie.hash(),
receipts_root: tries_before.receipts_trie.hash(),
};

// Construct the GenerationInputs without any transactions or state changes
let inputs = GenerationInputs {
tries: tries_before.clone(),
burn_addr: None,
tries: tries_before,
trie_roots_after,
checkpoint_state_trie_root,
block_metadata,
Expand Down
3 changes: 2 additions & 1 deletion evm_arithmetization/src/witness/operation.rs
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,7 @@ use ethereum_types::{BigEndianHash, U256};
use itertools::Itertools;
use keccak_hash::keccak;
use plonky2::hash::hash_types::RichField;
use serde::{Deserialize, Serialize};

use super::state::KERNEL_CONTEXT;
use super::transition::Transition;
Expand Down Expand Up @@ -29,7 +30,7 @@ use crate::witness::util::{
};
use crate::{arithmetic, logic};

#[derive(Clone, Copy, Debug, Eq, PartialEq)]
#[derive(Clone, Copy, Debug, Eq, PartialEq, Hash, Serialize, Deserialize)]
pub(crate) enum Operation {
Iszero,
Not,
Expand Down
Loading

0 comments on commit 34c9bad

Please sign in to comment.