Skip to content

Commit

Permalink
feat: fix aggregation add segmentation ci test
Browse files Browse the repository at this point in the history
  • Loading branch information
atanmarko committed Jul 25, 2024
1 parent 8060288 commit eaaf614
Show file tree
Hide file tree
Showing 7 changed files with 92 additions and 65 deletions.
21 changes: 18 additions & 3 deletions .github/workflows/ci.yml
Original file line number Diff line number Diff line change
Expand Up @@ -176,6 +176,20 @@ jobs:
CARGO_INCREMENTAL: 1
RUST_BACKTRACE: 1

simple_proof_witness_only:
name: Execute bash script to generate the proof witness for a small block.
runs-on: zero-ci

steps:
- name: Checkout code
uses: actions/checkout@v3

- name: Run the script
run: |
pushd zero_bin/tools
./prove_stdio.sh artifacts/witness_b19240705.json test_only
simple_proof_regular:
name: Execute bash script to generate and verify a proof for a small block.
runs-on: zero-ci
Expand All @@ -189,8 +203,8 @@ jobs:
pushd zero_bin/tools
./prove_stdio.sh artifacts/witness_b19240705.json
simple_proof_witness_only:
name: Execute bash script to generate the proof witness for a small block.
simple_proof_using_continuations:
name: Execute bash script to generate and verify a proof for a small block utilizing custom batch and segment chunk size
runs-on: zero-ci

steps:
Expand All @@ -200,7 +214,8 @@ jobs:
- name: Run the script
run: |
pushd zero_bin/tools
./prove_stdio.sh artifacts/witness_b19240705.json test_only
time PROVER_BATCH_SIZE=5 PROVER_SEGMENT_CHUNK_SIZE=4 PROVER_MAX_CPU_LEN_LOG=17 ./prove_stdio.sh ./artifacts/witness_b19240705.json
multi_blocks_proof_regular:
name: Execute bash script to generate and verify a proof for multiple blocks using parallel proving.
Expand Down
8 changes: 4 additions & 4 deletions proof_gen/src/proof_gen.rs
Original file line number Diff line number Diff line change
Expand Up @@ -16,8 +16,8 @@ use plonky2::{

use crate::{
proof_types::{
GeneratedBlockProof, GeneratedSegmentAggProof, GeneratedSegmentProof, GeneratedTxnAggProof,
SegmentAggregatableProof, TxnAggregatableProof,
BatchAggregatableProof, GeneratedBlockProof, GeneratedSegmentAggProof,
GeneratedSegmentProof, GeneratedTxnAggProof, SegmentAggregatableProof,
},
prover_state::ProverState,
types::{Field, PlonkyProofIntern, EXTENSION_DEGREE},
Expand Down Expand Up @@ -121,8 +121,8 @@ pub fn generate_segment_agg_proof(
/// Note that the child proofs may be either transaction or aggregation proofs.
pub fn generate_transaction_agg_proof(
p_state: &ProverState,
lhs_child: &TxnAggregatableProof,
rhs_child: &TxnAggregatableProof,
lhs_child: &BatchAggregatableProof,
rhs_child: &BatchAggregatableProof,
) -> ProofGenResult<GeneratedTxnAggProof> {
let (b_proof_intern, p_vals) = p_state
.state
Expand Down
32 changes: 16 additions & 16 deletions proof_gen/src/proof_types.rs
Original file line number Diff line number Diff line change
Expand Up @@ -67,7 +67,7 @@ pub enum SegmentAggregatableProof {
/// we can combine it into an agg proof. For these cases, we want to abstract
/// away whether or not the proof was a txn or agg proof.
#[derive(Clone, Debug, Deserialize, Serialize)]
pub enum TxnAggregatableProof {
pub enum BatchAggregatableProof {
/// The underlying proof is a segment proof. It first needs to be aggregated
/// with another segment proof, or a dummy one.
Segment(GeneratedSegmentProof),
Expand Down Expand Up @@ -100,28 +100,28 @@ impl SegmentAggregatableProof {
}
}

impl TxnAggregatableProof {
impl BatchAggregatableProof {
pub(crate) fn public_values(&self) -> PublicValues {
match self {
TxnAggregatableProof::Segment(info) => info.p_vals.clone(),
TxnAggregatableProof::Txn(info) => info.p_vals.clone(),
TxnAggregatableProof::Agg(info) => info.p_vals.clone(),
BatchAggregatableProof::Segment(info) => info.p_vals.clone(),
BatchAggregatableProof::Txn(info) => info.p_vals.clone(),
BatchAggregatableProof::Agg(info) => info.p_vals.clone(),
}
}

pub(crate) fn is_agg(&self) -> bool {
match self {
TxnAggregatableProof::Segment(_) => false,
TxnAggregatableProof::Txn(_) => false,
TxnAggregatableProof::Agg(_) => true,
BatchAggregatableProof::Segment(_) => false,
BatchAggregatableProof::Txn(_) => false,
BatchAggregatableProof::Agg(_) => true,
}
}

pub(crate) fn intern(&self) -> &PlonkyProofIntern {
match self {
TxnAggregatableProof::Segment(info) => &info.intern,
TxnAggregatableProof::Txn(info) => &info.intern,
TxnAggregatableProof::Agg(info) => &info.intern,
BatchAggregatableProof::Segment(info) => &info.intern,
BatchAggregatableProof::Txn(info) => &info.intern,
BatchAggregatableProof::Agg(info) => &info.intern,
}
}
}
Expand All @@ -138,23 +138,23 @@ impl From<GeneratedSegmentAggProof> for SegmentAggregatableProof {
}
}

impl From<GeneratedSegmentAggProof> for TxnAggregatableProof {
impl From<GeneratedSegmentAggProof> for BatchAggregatableProof {
fn from(v: GeneratedSegmentAggProof) -> Self {
Self::Txn(v)
}
}

impl From<GeneratedTxnAggProof> for TxnAggregatableProof {
impl From<GeneratedTxnAggProof> for BatchAggregatableProof {
fn from(v: GeneratedTxnAggProof) -> Self {
Self::Agg(v)
}
}

impl From<SegmentAggregatableProof> for TxnAggregatableProof {
impl From<SegmentAggregatableProof> for BatchAggregatableProof {
fn from(v: SegmentAggregatableProof) -> Self {
match v {
SegmentAggregatableProof::Agg(agg) => TxnAggregatableProof::Txn(agg),
SegmentAggregatableProof::Seg(seg) => TxnAggregatableProof::Segment(seg),
SegmentAggregatableProof::Agg(agg) => BatchAggregatableProof::Txn(agg),
SegmentAggregatableProof::Seg(seg) => BatchAggregatableProof::Segment(seg),
}
}
}
20 changes: 10 additions & 10 deletions zero_bin/ops/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,7 @@ use paladin::{
use proof_gen::{
proof_gen::{generate_block_proof, generate_segment_agg_proof, generate_transaction_agg_proof},
proof_types::{
GeneratedBlockProof, GeneratedTxnAggProof, SegmentAggregatableProof, TxnAggregatableProof,
BatchAggregatableProof, GeneratedBlockProof, GeneratedTxnAggProof, SegmentAggregatableProof,
},
};
use serde::{Deserialize, Serialize};
Expand Down Expand Up @@ -234,23 +234,23 @@ impl Monoid for SegmentAggProof {
}

#[derive(Deserialize, Serialize, RemoteExecute)]
pub struct TxnAggProof {
pub struct BatchAggProof {
pub save_inputs_on_error: bool,
}
fn get_agg_proof_public_values(elem: TxnAggregatableProof) -> PublicValues {
fn get_agg_proof_public_values(elem: BatchAggregatableProof) -> PublicValues {
match elem {
TxnAggregatableProof::Segment(info) => info.p_vals,
TxnAggregatableProof::Txn(info) => info.p_vals,
TxnAggregatableProof::Agg(info) => info.p_vals,
BatchAggregatableProof::Segment(info) => info.p_vals,
BatchAggregatableProof::Txn(info) => info.p_vals,
BatchAggregatableProof::Agg(info) => info.p_vals,
}
}

impl Monoid for TxnAggProof {
type Elem = TxnAggregatableProof;
impl Monoid for BatchAggProof {
type Elem = BatchAggregatableProof;

fn combine(&self, a: Self::Elem, b: Self::Elem) -> Result<Self::Elem> {
let lhs = match a {
TxnAggregatableProof::Segment(segment) => TxnAggregatableProof::from(
BatchAggregatableProof::Segment(segment) => BatchAggregatableProof::from(
generate_segment_agg_proof(
p_state(),
&SegmentAggregatableProof::from(segment.clone()),
Expand All @@ -263,7 +263,7 @@ impl Monoid for TxnAggProof {
};

let rhs = match b {
TxnAggregatableProof::Segment(segment) => TxnAggregatableProof::from(
BatchAggregatableProof::Segment(segment) => BatchAggregatableProof::from(
generate_segment_agg_proof(
p_state(),
&SegmentAggregatableProof::from(segment.clone()),
Expand Down
42 changes: 21 additions & 21 deletions zero_bin/prover/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -77,21 +77,21 @@ impl BlockProverInput {
);

// Create segment proof
let seg_ops = ops::SegmentProof {
let seg_prove_ops = ops::SegmentProof {
save_inputs_on_error,
};

// Generate segment data.
let agg_ops = ops::SegmentAggProof {
let seg_agg_ops = ops::SegmentAggProof {
save_inputs_on_error,
};

// Aggregate transaction proofs
let txn_agg_proof = ops::TxnAggProof {
// Aggregate batch proofs to a single proof
let batch_agg_ops = ops::BatchAggProof {
save_inputs_on_error,
};

let mut all_block_txn_aggregatable_proofs = Vec::new();
let mut all_block_batch_proofs = Vec::new();
// Loop for all generation inputs in the block
for generation_inputs in block_generation_inputs {
let mut segment_data_iter = SegmentDataIterator {
Expand All @@ -102,42 +102,42 @@ impl BlockProverInput {
let mut chunk_segment_iter =
SegmentDataChunkIterator::new(&mut segment_data_iter, segment_chunk_size);

let mut chunk_txn_aggregatable_proofs = Vec::new();
let mut chunk_seg_aggregatable_proofs = Vec::new();
// We take one chunk of segments, perform proving and
// aggregate it to `TxnAggregatableProof`
// aggregate it into a single chunk proof
while let Some(chunk) = chunk_segment_iter.next() {
chunk_txn_aggregatable_proofs.push(
Directive::map(IndexedStream::from(chunk.into_iter()), &seg_ops)
.fold(&agg_ops)
chunk_seg_aggregatable_proofs.push(
Directive::map(IndexedStream::from(chunk.into_iter()), &seg_prove_ops)
.fold(&seg_agg_ops)
.run(runtime)
.map(move |e| e.map(proof_gen::proof_types::TxnAggregatableProof::from))
.await,
);
}

// Fold all the generation input transaction proofs
// into a single transaction proof
let generation_input_txn_proof = Directive::fold(
IndexedStream::from(chunk_txn_aggregatable_proofs.into_iter().collect::<Result<
// Fold all the generation input segment chunk proofs
// into a single batch proof
let batch_aggregated_proof = Directive::fold(
IndexedStream::from(chunk_seg_aggregatable_proofs.into_iter().collect::<Result<
Vec<_>,
anyhow::Error,
>>(
)?),
&txn_agg_proof,
&seg_agg_ops,
)
.run(runtime)
.map(move |e| e.map(proof_gen::proof_types::BatchAggregatableProof::from))
.await?;
all_block_txn_aggregatable_proofs.push(generation_input_txn_proof);
all_block_batch_proofs.push(batch_aggregated_proof);
}
// Fold all the agg transaction proofs into a single transaction proof
// Fold all the aggregated batch proofs into a single batch proof
let final_txn_proof = Directive::fold(
IndexedStream::from(all_block_txn_aggregatable_proofs.into_iter()),
&txn_agg_proof,
IndexedStream::from(all_block_batch_proofs.into_iter()),
&batch_agg_ops,
)
.run(runtime)
.await?;

if let proof_gen::proof_types::TxnAggregatableProof::Agg(proof) = final_txn_proof {
if let proof_gen::proof_types::BatchAggregatableProof::Agg(proof) = final_txn_proof {
let block_number = block_number
.to_u64()
.context("block number overflows u64")?;
Expand Down
8 changes: 7 additions & 1 deletion zero_bin/tools/prove_rpc.sh
Original file line number Diff line number Diff line change
Expand Up @@ -74,6 +74,12 @@ else
PROVER_SAVE_INPUTS_ON_ERROR=""
fi

if [ -n "$NUM_WORKERS" ]; then
SET_NUM_WORKERS="--num-workers $NUM_WORKERS"
else
SET_NUM_WORKERS=""
fi

mkdir -p $PROOF_OUTPUT_DIR

if [ $IGNORE_PREVIOUS_PROOFS ]; then
Expand Down Expand Up @@ -132,7 +138,7 @@ if [[ $8 == "test_only" ]]; then
else
# normal run
echo "Proving blocks ${BLOCK_INTERVAL} now... (Total: ${TOT_BLOCKS})"
command='cargo r --release --bin leader -- --runtime in-memory --load-strategy on-demand --batch-size $PROVER_BATCH_SIZE --max-cpu-len-log $PROVER_MAC_CPU_LEN_LOG --segment-chunk-size $PROVER_SEGMENT_CHUNK_SIZE $PROVER_SAVE_INPUTS_ON_ERROR rpc --rpc-type "$NODE_RPC_TYPE" --rpc-url "$3" --block-interval $BLOCK_INTERVAL --proof-output-dir $PROOF_OUTPUT_DIR $PREV_PROOF_EXTRA_ARG --backoff "$BACKOFF" --max-retries "$RETRIES" '
command='cargo r --release --bin leader -- --runtime in-memory --load-strategy on-demand --batch-size $PROVER_BATCH_SIZE --max-cpu-len-log $PROVER_MAC_CPU_LEN_LOG --segment-chunk-size $PROVER_SEGMENT_CHUNK_SIZE $SET_NUM_WORKERS $PROVER_SAVE_INPUTS_ON_ERROR rpc --rpc-type "$NODE_RPC_TYPE" --rpc-url "$3" --block-interval $BLOCK_INTERVAL --proof-output-dir $PROOF_OUTPUT_DIR $PREV_PROOF_EXTRA_ARG --backoff "$BACKOFF" --max-retries "$RETRIES" '
if [ "$OUTPUT_TO_TERMINAL" = true ]; then
eval $command
echo -e "Proof generation finished with result: $?"
Expand Down
26 changes: 16 additions & 10 deletions zero_bin/tools/prove_stdio.sh
Original file line number Diff line number Diff line change
Expand Up @@ -57,14 +57,14 @@ else
# These sizes are configured specifically for block 19240705. Don't use this in other scenarios.
echo "Using specific circuit sizes for witness_b19240705.json"
export ARITHMETIC_CIRCUIT_SIZE="16..19"
export BYTE_PACKING_CIRCUIT_SIZE="11..15"
export BYTE_PACKING_CIRCUIT_SIZE="10..15"
export CPU_CIRCUIT_SIZE="18..21"
export KECCAK_CIRCUIT_SIZE="14..18"
export KECCAK_SPONGE_CIRCUIT_SIZE="9..13"
export LOGIC_CIRCUIT_SIZE="12..17"
export MEMORY_CIRCUIT_SIZE="20..23"
export MEMORY_BEFORE_CIRCUIT_SIZE="16..17"
export MEMORY_AFTER_CIRCUIT_SIZE="7..8"
export KECCAK_CIRCUIT_SIZE="12..18"
export KECCAK_SPONGE_CIRCUIT_SIZE="7..13"
export LOGIC_CIRCUIT_SIZE="10..17"
export MEMORY_CIRCUIT_SIZE="19..23"
export MEMORY_BEFORE_CIRCUIT_SIZE="15..19"
export MEMORY_AFTER_CIRCUIT_SIZE="7..19"
elif [[ $INPUT_FILE == *"witness_b2_b7"* ]]; then
# These sizes are configured specifically for custom small blocks. Don't use this in other scenarios.
echo "Using specific circuit sizes for witness_b2_b7.json"
Expand Down Expand Up @@ -94,13 +94,19 @@ fi
# Prover config. Override the defaults if needed by setting the env variables.
PROVER_BATCH_SIZE="${PROVER_BATCH_SIZE:-1}"
PROVER_SEGMENT_CHUNK_SIZE="${PROVER_SEGMENT_CHUNK_SIZE:-64}"
PROVER_MAC_CPU_LEN_LOG="${PROVER_MAC_CPU_LEN_LOG:-20}"
PROVER_MAX_CPU_LEN_LOG="${PROVER_MAX_CPU_LEN_LOG:-20}"
if [[ $PROVER_SAVE_INPUTS_ON_ERROR == "true" ]]; then
PROVER_SAVE_INPUTS_ON_ERROR="--save-inputs-on-error"
else
PROVER_SAVE_INPUTS_ON_ERROR=""
fi

if [ -n "$NUM_WORKERS" ]; then
SET_NUM_WORKERS="--num-workers $NUM_WORKERS"
else
SET_NUM_WORKERS=""
fi



# If we run ./prove_stdio.sh <witness file name> test_only, we'll generate a dummy
Expand All @@ -121,8 +127,8 @@ cargo build --release --jobs "$num_procs"

start_time=$(date +%s%N)
"${TOOLS_DIR}/../../target/release/leader" --runtime in-memory --load-strategy on-demand --batch-size $PROVER_BATCH_SIZE \
--max-cpu-len-log $PROVER_MAC_CPU_LEN_LOG --segment-chunk-size $PROVER_SEGMENT_CHUNK_SIZE \
$PROVER_SAVE_INPUTS_ON_ERROR stdio < $INPUT_FILE | tee $LEADER_OUT_PATH
--max-cpu-len-log $PROVER_MAX_CPU_LEN_LOG --segment-chunk-size $PROVER_SEGMENT_CHUNK_SIZE \
$SET_NUM_WORKERS $PROVER_SAVE_INPUTS_ON_ERROR stdio < $INPUT_FILE | tee $LEADER_OUT_PATH
end_time=$(date +%s%N)

tail -n 1 $LEADER_OUT_PATH > $PROOFS_JSON_PATH
Expand Down

0 comments on commit eaaf614

Please sign in to comment.