Skip to content

Commit

Permalink
proof serialize
Browse files Browse the repository at this point in the history
  • Loading branch information
bytetang committed Aug 12, 2024
1 parent ea43897 commit fd26455
Show file tree
Hide file tree
Showing 5 changed files with 13 additions and 8 deletions.
3 changes: 2 additions & 1 deletion plonky2/src/hash/hashing.rs
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,7 @@
#[cfg(not(feature = "std"))]
use alloc::vec::Vec;
use core::fmt::Debug;
use serde::Serialize;

use crate::field::extension::Extendable;
use crate::field::types::Field;
Expand Down Expand Up @@ -60,7 +61,7 @@ impl<F: RichField + Extendable<D>, const D: usize> CircuitBuilder<F, D> {

/// Permutation that can be used in the sponge construction for an algebraic hash.
pub trait PlonkyPermutation<T: Copy + Default>:
AsRef<[T]> + Copy + Debug + Default + Eq + Sync + Send
AsRef<[T]> + Copy + Debug + Default + Eq + Sync + Send + Serialize
{
const RATE: usize;
const WIDTH: usize;
Expand Down
3 changes: 2 additions & 1 deletion plonky2/src/hash/keccak.rs
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,7 @@ use core::mem::size_of;

use itertools::Itertools;
use keccak_hash::keccak;
use serde::Serialize;

use crate::hash::hash_types::{BytesHash, RichField};
use crate::hash::hashing::PlonkyPermutation;
Expand All @@ -17,7 +18,7 @@ pub const SPONGE_WIDTH: usize = SPONGE_RATE + SPONGE_CAPACITY;
/// Keccak-256 pseudo-permutation (not necessarily one-to-one) used in the challenger.
/// A state `input: [F; 12]` is sent to the field representation of `H(input) || H(H(input)) || H(H(H(input)))`
/// where `H` is the Keccak-256 hash.
#[derive(Copy, Clone, Default, Debug, PartialEq)]
#[derive(Copy, Clone, Default, Debug, PartialEq, Serialize)]
pub struct KeccakPermutation<F: RichField> {
state: [F; SPONGE_WIDTH],
}
Expand Down
5 changes: 3 additions & 2 deletions plonky2/src/hash/poseidon.rs
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,7 @@ use alloc::{vec, vec::Vec};
use core::fmt::Debug;

use plonky2_field::packed::PackedField;
use serde::Serialize;
use unroll::unroll_for_loops;

use crate::field::extension::{Extendable, FieldExtension};
Expand Down Expand Up @@ -801,7 +802,7 @@ pub trait Poseidon: PrimeField64 {
}
}

#[derive(Copy, Clone, Default, Debug, PartialEq)]
#[derive(Copy, Clone, Default, Debug, PartialEq, Serialize)]
pub struct PoseidonPermutation<T> {
state: [T; SPONGE_WIDTH],
}
Expand Down Expand Up @@ -830,7 +831,7 @@ impl Permuter for Target {
}
}

impl<T: Copy + Debug + Default + Eq + Permuter + Send + Sync> PlonkyPermutation<T>
impl<T: Copy + Debug + Default + Eq + Permuter + Send + Sync + Serialize> PlonkyPermutation<T>
for PoseidonPermutation<T>
{
const RATE: usize = SPONGE_RATE;
Expand Down
1 change: 1 addition & 0 deletions starky/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -29,6 +29,7 @@ num-bigint = { version = "0.4.3", default-features = false }
plonky2 = { version = "0.2.2", path = "../plonky2", default-features = false }
plonky2_maybe_rayon = { version = "0.2.0", path = "../maybe_rayon", default-features = false }
plonky2_util = { version = "0.2.0", path = "../util", default-features = false }
serde = { version = "1.0.204", features = ["derive"] }

[dev-dependencies]
env_logger = { version = "0.9.0", default-features = false }
Expand Down
9 changes: 5 additions & 4 deletions starky/src/proof.rs
Original file line number Diff line number Diff line change
Expand Up @@ -21,12 +21,13 @@ use plonky2::iop::target::Target;
use plonky2::plonk::config::{GenericConfig, Hasher};
use plonky2::util::serialization::{Buffer, IoResult, Read, Write};
use plonky2_maybe_rayon::*;
use serde::{Deserialize, Serialize};

use crate::config::StarkConfig;
use crate::lookup::GrandProductChallengeSet;

/// Merkle caps and openings that form the proof of a single STARK.
#[derive(Debug, Clone)]
#[derive(Debug, Clone, Serialize)]
pub struct StarkProof<F: RichField + Extendable<D>, C: GenericConfig<D, F = F>, const D: usize> {
/// Merkle cap of LDEs of trace values.
pub trace_cap: MerkleCap<F, C::Hasher>,
Expand Down Expand Up @@ -132,7 +133,7 @@ impl<const D: usize> StarkProofTarget<D> {
}

/// Merkle caps and openings that form the proof of a single STARK, along with its public inputs.
#[derive(Debug, Clone)]
#[derive(Debug, Clone, Serialize)]
pub struct StarkProofWithPublicInputs<
F: RichField + Extendable<D>,
C: GenericConfig<D, F = F>,
Expand Down Expand Up @@ -184,7 +185,7 @@ pub struct CompressedStarkProofWithPublicInputs<

/// A [`StarkProof`] along with metadata about the initial Fiat-Shamir state, which is used when
/// creating a recursive wrapper proof around a STARK proof.
#[derive(Debug, Clone)]
#[derive(Debug, Clone, Serialize)]
pub struct StarkProofWithMetadata<F, C, const D: usize>
where
F: RichField + Extendable<D>,
Expand Down Expand Up @@ -261,7 +262,7 @@ pub struct MultiProofChallenges<F: RichField + Extendable<D>, const D: usize, co
}

/// Purported values of each polynomial at the challenge point.
#[derive(Debug, Clone)]
#[derive(Debug, Clone, Serialize)]
pub struct StarkOpeningSet<F: RichField + Extendable<D>, const D: usize> {
/// Openings of trace polynomials at `zeta`.
pub local_values: Vec<F::Extension>,
Expand Down

0 comments on commit fd26455

Please sign in to comment.