From 942266b75e3f57cd98e36291aca6251a62745705 Mon Sep 17 00:00:00 2001 From: "sm.wu" Date: Tue, 7 Jan 2025 11:57:40 +0800 Subject: [PATCH] cleanup --- Cargo.lock | 18 -- Cargo.toml | 2 - ff_ext/src/lib.rs | 22 +-- poseidon/Cargo.toml | 4 - poseidon/benches/hashing.rs | 256 ++++++++++++++-------------- poseidon/src/digest.rs | 6 +- poseidon/src/poseidon.rs | 9 +- poseidon/src/poseidon_goldilocks.rs | 15 +- poseidon/src/poseidon_hash.rs | 180 +++++++++---------- transcript/Cargo.toml | 2 - 10 files changed, 235 insertions(+), 279 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index a3f5e2a4f..e1a9dbc8d 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1364,18 +1364,6 @@ version = "0.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b15813163c1d831bf4a13c3610c05c0d03b39feb07f7e09fa234dac9b15aaf39" -[[package]] -name = "p3-challenger" -version = "0.1.0" -source = "git+https://github.com/plonky3/plonky3#b0591e9b82d58d10f86359875b5d5fa96433b4cf" -dependencies = [ - "p3-field", - "p3-maybe-rayon", - "p3-symmetric", - "p3-util", - "tracing", -] - [[package]] name = "p3-dft" version = "0.1.0" @@ -1645,11 +1633,7 @@ dependencies = [ "ark-std", "criterion", "ff", - "ff_ext", "goldilocks", - "p3-field", - "p3-goldilocks", - "p3-poseidon", "plonky2", "rand", "serde", @@ -2446,12 +2430,10 @@ dependencies = [ "ff", "ff_ext", "goldilocks", - "p3-challenger", "p3-field", "p3-goldilocks", "p3-mds", "p3-poseidon", - "p3-poseidon2", "p3-symmetric", "serde", ] diff --git a/Cargo.toml b/Cargo.toml index 5fef73980..9e686e612 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -39,8 +39,6 @@ plonky2 = "0.2" p3-field = { git = "https://github.com/plonky3/plonky3" } p3-goldilocks = { git = "https://github.com/plonky3/plonky3" } p3-poseidon = { git = "https://github.com/plonky3/plonky3" } -p3-poseidon2 = { git = "https://github.com/Plonky3/Plonky3.git" } -p3-challenger = { git = "https://github.com/plonky3/plonky3" } p3-mds = { git = "https://github.com/Plonky3/Plonky3.git" } p3-symmetric = { git = "https://github.com/Plonky3/Plonky3.git" } poseidon = { path = "./poseidon" } diff --git a/ff_ext/src/lib.rs b/ff_ext/src/lib.rs index e9ca39e0a..d8bba3c86 100644 --- a/ff_ext/src/lib.rs +++ b/ff_ext/src/lib.rs @@ -89,24 +89,7 @@ pub trait SmallField: Serialize + P3Field { fn to_noncanonical_u64(&self) -> u64; } -pub trait ExtensionField: P3ExtensionField + FromUniformBytes -// + FromUniformBytes<64> -// + From -// + Add -// + Sub -// + Mul -// // + for<'a> Add<&'a Self::BaseField, Output = Self> -// + for<'a> Sub<&'a Self::BaseField, Output = Self> -// + for<'a> Mul<&'a Self::BaseField, Output = Self> -// + AddAssign -// + SubAssign -// + MulAssign -// + for<'a> AddAssign<&'a Self::BaseField> -// + for<'a> SubAssign<&'a Self::BaseField> -// + for<'a> MulAssign<&'a Self::BaseField> -// + Ord -// + std::hash::Hash -{ +pub trait ExtensionField: P3ExtensionField + FromUniformBytes { const DEGREE: usize; type BaseField: SmallField + Ord + PrimeField + FromUniformBytes; @@ -174,7 +157,6 @@ mod impl_goldilocks { fn from_bases(bases: &[Goldilocks]) -> Self { debug_assert_eq!(bases.len(), 2); Self::from_base_slice(bases) - // Self([bases[0], bases[1]]) } fn as_bases(&self) -> &[Goldilocks] { @@ -183,7 +165,6 @@ mod impl_goldilocks { /// Convert limbs into self fn from_limbs(limbs: &[Self::BaseField]) -> Self { - // Self([limbs[0], limbs[1]]) Self::from_base_slice(&limbs[0..2]) } @@ -192,7 +173,6 @@ mod impl_goldilocks { .iter() .map(|v: &Self::BaseField| v.as_canonical_u64()) .collect() - // ::to_canonical_u64_vec(self) } } } diff --git a/poseidon/Cargo.toml b/poseidon/Cargo.toml index 69709a28f..eff0f50b7 100644 --- a/poseidon/Cargo.toml +++ b/poseidon/Cargo.toml @@ -15,10 +15,6 @@ ff.workspace = true goldilocks.workspace = true serde.workspace = true unroll = "0.1" -ff_ext = { path = "../ff_ext" } -p3-field.workspace = true -p3-goldilocks.workspace = true -p3-poseidon.workspace = true [dev-dependencies] ark-std.workspace = true diff --git a/poseidon/benches/hashing.rs b/poseidon/benches/hashing.rs index d352f7291..43a299ddc 100644 --- a/poseidon/benches/hashing.rs +++ b/poseidon/benches/hashing.rs @@ -1,128 +1,128 @@ -// use ark_std::test_rng; -// use criterion::{BatchSize, Criterion, black_box, criterion_group, criterion_main}; -// use ff::Field; -// use goldilocks::Goldilocks; -// use plonky2::{ -// field::{goldilocks_field::GoldilocksField, types::Sample}, -// hash::{ -// hash_types::HashOut, -// hashing::PlonkyPermutation, -// poseidon::{PoseidonHash as PlonkyPoseidonHash, PoseidonPermutation}, -// }, -// plonk::config::Hasher, -// }; -// use poseidon::{digest::Digest, poseidon_hash::PoseidonHash}; - -// fn random_plonky_2_goldy() -> GoldilocksField { -// GoldilocksField::rand() -// } - -// fn random_ceno_goldy() -> Goldilocks { -// Goldilocks::random(&mut test_rng()) -// } - -// fn random_ceno_hash() -> Digest { -// Digest( -// vec![Goldilocks::random(&mut test_rng()); 4] -// .try_into() -// .unwrap(), -// ) -// } - -// fn plonky_hash_single(a: GoldilocksField) { -// let _result = black_box(PlonkyPoseidonHash::hash_or_noop(&[a])); -// } - -// fn ceno_hash_single(a: Goldilocks) { -// let _result = black_box(PoseidonHash::hash_or_noop(&[a])); -// } - -// fn plonky_hash_2_to_1(left: HashOut, right: HashOut) { -// let _result = black_box(PlonkyPoseidonHash::two_to_one(left, right)); -// } - -// fn ceno_hash_2_to_1(left: &Digest, right: &Digest) { -// let _result = black_box(PoseidonHash::two_to_one(left, right)); -// } - -// fn plonky_hash_many_to_1(values: &[GoldilocksField]) { -// let _result = black_box(PlonkyPoseidonHash::hash_or_noop(values)); -// } - -// fn ceno_hash_many_to_1(values: &[Goldilocks]) { -// let _result = black_box(PoseidonHash::hash_or_noop(values)); -// } - -// pub fn hashing_benchmark(c: &mut Criterion) { -// c.bench_function("plonky hash single", |bencher| { -// bencher.iter_batched( -// random_plonky_2_goldy, -// plonky_hash_single, -// BatchSize::SmallInput, -// ) -// }); - -// c.bench_function("plonky hash 2 to 1", |bencher| { -// bencher.iter_batched( -// || { -// ( -// HashOut::::rand(), -// HashOut::::rand(), -// ) -// }, -// |(left, right)| plonky_hash_2_to_1(left, right), -// BatchSize::SmallInput, -// ) -// }); - -// c.bench_function("plonky hash 60 to 1", |bencher| { -// bencher.iter_batched( -// || GoldilocksField::rand_vec(60), -// |sixty_elems| plonky_hash_many_to_1(sixty_elems.as_slice()), -// BatchSize::SmallInput, -// ) -// }); - -// c.bench_function("ceno hash single", |bencher| { -// bencher.iter_batched(random_ceno_goldy, ceno_hash_single, BatchSize::SmallInput) -// }); - -// c.bench_function("ceno hash 2 to 1", |bencher| { -// bencher.iter_batched( -// || (random_ceno_hash(), random_ceno_hash()), -// |(left, right)| ceno_hash_2_to_1(&left, &right), -// BatchSize::SmallInput, -// ) -// }); - -// c.bench_function("ceno hash 60 to 1", |bencher| { -// bencher.iter_batched( -// || { -// (0..60) -// .map(|_| Goldilocks::random(&mut test_rng())) -// .collect::>() -// }, -// |values| ceno_hash_many_to_1(values.as_slice()), -// BatchSize::SmallInput, -// ) -// }); -// } - -// // bench permutation -// pub fn permutation_benchmark(c: &mut Criterion) { -// let mut plonky_permutation = PoseidonPermutation::new(core::iter::repeat(GoldilocksField(0))); -// let mut ceno_permutation = poseidon::poseidon_permutation::PoseidonPermutation::new( -// core::iter::repeat(Goldilocks::ZERO), -// ); - -// c.bench_function("plonky permute", |bencher| { -// bencher.iter(|| plonky_permutation.permute()) -// }); - -// c.bench_function("ceno permute", |bencher| { -// bencher.iter(|| ceno_permutation.permute()) -// }); -// } - -// criterion_group!(benches, permutation_benchmark, hashing_benchmark); -// criterion_main!(benches); +use ark_std::test_rng; +use criterion::{BatchSize, Criterion, black_box, criterion_group, criterion_main}; +use ff::Field; +use goldilocks::Goldilocks; +use plonky2::{ + field::{goldilocks_field::GoldilocksField, types::Sample}, + hash::{ + hash_types::HashOut, + hashing::PlonkyPermutation, + poseidon::{PoseidonHash as PlonkyPoseidonHash, PoseidonPermutation}, + }, + plonk::config::Hasher, +}; +use poseidon::{digest::Digest, poseidon_hash::PoseidonHash}; + +fn random_plonky_2_goldy() -> GoldilocksField { + GoldilocksField::rand() +} + +fn random_ceno_goldy() -> Goldilocks { + Goldilocks::random(&mut test_rng()) +} + +fn random_ceno_hash() -> Digest { + Digest( + vec![Goldilocks::random(&mut test_rng()); 4] + .try_into() + .unwrap(), + ) +} + +fn plonky_hash_single(a: GoldilocksField) { + let _result = black_box(PlonkyPoseidonHash::hash_or_noop(&[a])); +} + +fn ceno_hash_single(a: Goldilocks) { + let _result = black_box(PoseidonHash::hash_or_noop(&[a])); +} + +fn plonky_hash_2_to_1(left: HashOut, right: HashOut) { + let _result = black_box(PlonkyPoseidonHash::two_to_one(left, right)); +} + +fn ceno_hash_2_to_1(left: &Digest, right: &Digest) { + let _result = black_box(PoseidonHash::two_to_one(left, right)); +} + +fn plonky_hash_many_to_1(values: &[GoldilocksField]) { + let _result = black_box(PlonkyPoseidonHash::hash_or_noop(values)); +} + +fn ceno_hash_many_to_1(values: &[Goldilocks]) { + let _result = black_box(PoseidonHash::hash_or_noop(values)); +} + +pub fn hashing_benchmark(c: &mut Criterion) { + c.bench_function("plonky hash single", |bencher| { + bencher.iter_batched( + random_plonky_2_goldy, + plonky_hash_single, + BatchSize::SmallInput, + ) + }); + + c.bench_function("plonky hash 2 to 1", |bencher| { + bencher.iter_batched( + || { + ( + HashOut::::rand(), + HashOut::::rand(), + ) + }, + |(left, right)| plonky_hash_2_to_1(left, right), + BatchSize::SmallInput, + ) + }); + + c.bench_function("plonky hash 60 to 1", |bencher| { + bencher.iter_batched( + || GoldilocksField::rand_vec(60), + |sixty_elems| plonky_hash_many_to_1(sixty_elems.as_slice()), + BatchSize::SmallInput, + ) + }); + + c.bench_function("ceno hash single", |bencher| { + bencher.iter_batched(random_ceno_goldy, ceno_hash_single, BatchSize::SmallInput) + }); + + c.bench_function("ceno hash 2 to 1", |bencher| { + bencher.iter_batched( + || (random_ceno_hash(), random_ceno_hash()), + |(left, right)| ceno_hash_2_to_1(&left, &right), + BatchSize::SmallInput, + ) + }); + + c.bench_function("ceno hash 60 to 1", |bencher| { + bencher.iter_batched( + || { + (0..60) + .map(|_| Goldilocks::random(&mut test_rng())) + .collect::>() + }, + |values| ceno_hash_many_to_1(values.as_slice()), + BatchSize::SmallInput, + ) + }); +} + +// bench permutation +pub fn permutation_benchmark(c: &mut Criterion) { + let mut plonky_permutation = PoseidonPermutation::new(core::iter::repeat(GoldilocksField(0))); + let mut ceno_permutation = poseidon::poseidon_permutation::PoseidonPermutation::new( + core::iter::repeat(Goldilocks::ZERO), + ); + + c.bench_function("plonky permute", |bencher| { + bencher.iter(|| plonky_permutation.permute()) + }); + + c.bench_function("ceno permute", |bencher| { + bencher.iter(|| ceno_permutation.permute()) + }); +} + +criterion_group!(benches, permutation_benchmark, hashing_benchmark); +criterion_main!(benches); diff --git a/poseidon/src/digest.rs b/poseidon/src/digest.rs index a4175cccf..a487d676b 100644 --- a/poseidon/src/digest.rs +++ b/poseidon/src/digest.rs @@ -1,8 +1,8 @@ use crate::constants::DIGEST_WIDTH; -use ff_ext::SmallField; -use serde::Serialize; +use goldilocks::SmallField; +use serde::{Deserialize, Serialize}; -#[derive(Clone, Debug, Default, Serialize, PartialEq, Eq)] +#[derive(Clone, Debug, Default, Serialize, Deserialize, PartialEq, Eq)] pub struct Digest(pub [F; DIGEST_WIDTH]); impl TryFrom> for Digest { diff --git a/poseidon/src/poseidon.rs b/poseidon/src/poseidon.rs index f3736a4af..ed5d76d14 100644 --- a/poseidon/src/poseidon.rs +++ b/poseidon/src/poseidon.rs @@ -1,7 +1,7 @@ use crate::constants::{ ALL_ROUND_CONSTANTS, HALF_N_FULL_ROUNDS, N_PARTIAL_ROUNDS, N_ROUNDS, SPONGE_WIDTH, }; -use ff_ext::SmallField; +use goldilocks::SmallField; use unroll::unroll_for_loops; pub trait Poseidon: AdaptedField { @@ -247,6 +247,13 @@ pub trait AdaptedField: SmallField { fn multiply_accumulate(&self, x: Self, y: Self) -> Self; + /// Returns `n`. Assumes that `n` is already in canonical form, i.e. `n < Self::order()`. + // TODO: Should probably be unsafe. + fn from_canonical_u64(n: u64) -> Self { + debug_assert!(n < Self::ORDER); + Self::from(n) + } + /// # Safety /// Equivalent to *self + Self::from_canonical_u64(rhs), but may be cheaper. The caller must /// ensure that 0 <= rhs < Self::ORDER. The function may return incorrect results if this diff --git a/poseidon/src/poseidon_goldilocks.rs b/poseidon/src/poseidon_goldilocks.rs index 7a3de236b..eaab6fcd0 100644 --- a/poseidon/src/poseidon_goldilocks.rs +++ b/poseidon/src/poseidon_goldilocks.rs @@ -2,9 +2,7 @@ use crate::{ constants::N_PARTIAL_ROUNDS, poseidon::{AdaptedField, Poseidon}, }; -use goldilocks::EPSILON; -use p3_field::{FieldAlgebra, PrimeField64}; -use p3_goldilocks::Goldilocks; +use goldilocks::{EPSILON, Goldilocks, SmallField}; #[cfg(target_arch = "x86_64")] use std::hint::unreachable_unchecked; @@ -216,7 +214,7 @@ impl Poseidon for Goldilocks { } impl AdaptedField for Goldilocks { - const ORDER: u64 = Goldilocks::ORDER_U64; + const ORDER: u64 = Goldilocks::MODULUS_U64; fn from_noncanonical_u96(n_lo: u64, n_hi: u32) -> Self { reduce96((n_lo, n_hi)) @@ -228,10 +226,7 @@ impl AdaptedField for Goldilocks { fn multiply_accumulate(&self, x: Self, y: Self) -> Self { // u64 + u64 * u64 cannot overflow. - reduce128( - (self.as_canonical_u64() as u128) - + (x.as_canonical_u64() as u128) * (y.as_canonical_u64() as u128), - ) + reduce128((self.0 as u128) + (x.0 as u128) * (y.0 as u128)) } } @@ -281,7 +276,7 @@ const unsafe fn add_no_canonicalize_trashing_input(x: u64, y: u64) -> u64 { fn reduce96((x_lo, x_hi): (u64, u32)) -> Goldilocks { let t1 = x_hi as u64 * EPSILON; let t2 = unsafe { add_no_canonicalize_trashing_input(x_lo, t1) }; - Goldilocks::from_canonical_u64(t2) + Goldilocks(t2) } /// Reduces to a 64-bit value. The result might not be in canonical form; it could be in between the @@ -299,7 +294,7 @@ fn reduce128(x: u128) -> Goldilocks { } let t1 = x_hi_lo * EPSILON; let t2 = unsafe { add_no_canonicalize_trashing_input(t0, t1) }; - Goldilocks::from_canonical_u64(t2) + Goldilocks(t2) } #[inline] diff --git a/poseidon/src/poseidon_hash.rs b/poseidon/src/poseidon_hash.rs index 186f5d0e5..c4559248e 100644 --- a/poseidon/src/poseidon_hash.rs +++ b/poseidon/src/poseidon_hash.rs @@ -120,93 +120,93 @@ pub fn compress(x: &Digest, y: &Digest) -> Digest { Digest(perm.squeeze()[..DIGEST_WIDTH].try_into().unwrap()) } -// #[cfg(test)] -// mod tests { -// use crate::{digest::Digest, poseidon_hash::PoseidonHash}; -// use p3_goldilocks::Goldilocks; -// use plonky2::{ -// field::{ -// goldilocks_field::GoldilocksField, -// types::{PrimeField64, Sample}, -// }, -// hash::{hash_types::HashOut, poseidon::PoseidonHash as PlonkyPoseidonHash}, -// plonk::config::{GenericHashOut, Hasher}, -// }; -// use rand::{Rng, thread_rng}; - -// type PlonkyFieldElements = Vec; -// type CenoFieldElements = Vec; - -// const N_ITERATIONS: usize = 100; - -// fn ceno_goldy_from_plonky_goldy(values: &[GoldilocksField]) -> Vec { -// values -// .iter() -// .map(|value| Goldilocks(value.to_canonical_u64())) -// .collect() -// } - -// fn test_vector_pair(n: usize) -> (PlonkyFieldElements, CenoFieldElements) { -// let plonky_elems = GoldilocksField::rand_vec(n); -// let ceno_elems = ceno_goldy_from_plonky_goldy(plonky_elems.as_slice()); -// (plonky_elems, ceno_elems) -// } - -// fn random_hash_pair() -> (HashOut, Digest) { -// let plonky_random_hash = HashOut::::rand(); -// let ceno_equivalent_hash = Digest( -// ceno_goldy_from_plonky_goldy(plonky_random_hash.elements.as_slice()) -// .try_into() -// .unwrap(), -// ); -// (plonky_random_hash, ceno_equivalent_hash) -// } - -// fn compare_hash_output( -// plonky_hash: HashOut, -// ceno_hash: Digest, -// ) -> bool { -// let plonky_elems = plonky_hash.to_vec(); -// let plonky_in_ceno_field = ceno_goldy_from_plonky_goldy(plonky_elems.as_slice()); -// plonky_in_ceno_field == ceno_hash.elements() -// } - -// #[test] -// fn compare_hash() { -// let mut rng = thread_rng(); -// for _ in 0..N_ITERATIONS { -// let n = rng.gen_range(5..=100); -// let (plonky_elems, ceno_elems) = test_vector_pair(n); -// let plonky_out = PlonkyPoseidonHash::hash_or_noop(plonky_elems.as_slice()); -// let ceno_out = PoseidonHash::hash_or_noop(ceno_elems.as_slice()); -// let ceno_iter = PoseidonHash::hash_or_noop_iter(ceno_elems.iter()); -// assert!(compare_hash_output(plonky_out, ceno_out)); -// assert!(compare_hash_output(plonky_out, ceno_iter)); -// } -// } - -// #[test] -// fn compare_noop() { -// let mut rng = thread_rng(); -// for _ in 0..N_ITERATIONS { -// let n = rng.gen_range(0..=4); -// let (plonky_elems, ceno_elems) = test_vector_pair(n); -// let plonky_out = PlonkyPoseidonHash::hash_or_noop(plonky_elems.as_slice()); -// let ceno_out = PoseidonHash::hash_or_noop(ceno_elems.as_slice()); -// let ceno_iter = PoseidonHash::hash_or_noop_iter(ceno_elems.iter()); -// assert!(compare_hash_output(plonky_out, ceno_out)); -// assert!(compare_hash_output(plonky_out, ceno_iter)); -// } -// } - -// #[test] -// fn compare_two_to_one() { -// for _ in 0..N_ITERATIONS { -// let (plonky_hash_a, ceno_hash_a) = random_hash_pair(); -// let (plonky_hash_b, ceno_hash_b) = random_hash_pair(); -// let plonky_combined = PlonkyPoseidonHash::two_to_one(plonky_hash_a, plonky_hash_b); -// let ceno_combined = PoseidonHash::two_to_one(&ceno_hash_a, &ceno_hash_b); -// assert!(compare_hash_output(plonky_combined, ceno_combined)); -// } -// } -// } +#[cfg(test)] +mod tests { + use crate::{digest::Digest, poseidon_hash::PoseidonHash}; + use goldilocks::Goldilocks; + use plonky2::{ + field::{ + goldilocks_field::GoldilocksField, + types::{PrimeField64, Sample}, + }, + hash::{hash_types::HashOut, poseidon::PoseidonHash as PlonkyPoseidonHash}, + plonk::config::{GenericHashOut, Hasher}, + }; + use rand::{Rng, thread_rng}; + + type PlonkyFieldElements = Vec; + type CenoFieldElements = Vec; + + const N_ITERATIONS: usize = 100; + + fn ceno_goldy_from_plonky_goldy(values: &[GoldilocksField]) -> Vec { + values + .iter() + .map(|value| Goldilocks(value.to_canonical_u64())) + .collect() + } + + fn test_vector_pair(n: usize) -> (PlonkyFieldElements, CenoFieldElements) { + let plonky_elems = GoldilocksField::rand_vec(n); + let ceno_elems = ceno_goldy_from_plonky_goldy(plonky_elems.as_slice()); + (plonky_elems, ceno_elems) + } + + fn random_hash_pair() -> (HashOut, Digest) { + let plonky_random_hash = HashOut::::rand(); + let ceno_equivalent_hash = Digest( + ceno_goldy_from_plonky_goldy(plonky_random_hash.elements.as_slice()) + .try_into() + .unwrap(), + ); + (plonky_random_hash, ceno_equivalent_hash) + } + + fn compare_hash_output( + plonky_hash: HashOut, + ceno_hash: Digest, + ) -> bool { + let plonky_elems = plonky_hash.to_vec(); + let plonky_in_ceno_field = ceno_goldy_from_plonky_goldy(plonky_elems.as_slice()); + plonky_in_ceno_field == ceno_hash.elements() + } + + #[test] + fn compare_hash() { + let mut rng = thread_rng(); + for _ in 0..N_ITERATIONS { + let n = rng.gen_range(5..=100); + let (plonky_elems, ceno_elems) = test_vector_pair(n); + let plonky_out = PlonkyPoseidonHash::hash_or_noop(plonky_elems.as_slice()); + let ceno_out = PoseidonHash::hash_or_noop(ceno_elems.as_slice()); + let ceno_iter = PoseidonHash::hash_or_noop_iter(ceno_elems.iter()); + assert!(compare_hash_output(plonky_out, ceno_out)); + assert!(compare_hash_output(plonky_out, ceno_iter)); + } + } + + #[test] + fn compare_noop() { + let mut rng = thread_rng(); + for _ in 0..N_ITERATIONS { + let n = rng.gen_range(0..=4); + let (plonky_elems, ceno_elems) = test_vector_pair(n); + let plonky_out = PlonkyPoseidonHash::hash_or_noop(plonky_elems.as_slice()); + let ceno_out = PoseidonHash::hash_or_noop(ceno_elems.as_slice()); + let ceno_iter = PoseidonHash::hash_or_noop_iter(ceno_elems.iter()); + assert!(compare_hash_output(plonky_out, ceno_out)); + assert!(compare_hash_output(plonky_out, ceno_iter)); + } + } + + #[test] + fn compare_two_to_one() { + for _ in 0..N_ITERATIONS { + let (plonky_hash_a, ceno_hash_a) = random_hash_pair(); + let (plonky_hash_b, ceno_hash_b) = random_hash_pair(); + let plonky_combined = PlonkyPoseidonHash::two_to_one(plonky_hash_a, plonky_hash_b); + let ceno_combined = PoseidonHash::two_to_one(&ceno_hash_a, &ceno_hash_b); + assert!(compare_hash_output(plonky_combined, ceno_combined)); + } + } +} diff --git a/transcript/Cargo.toml b/transcript/Cargo.toml index acbdabe26..84ab7ed30 100644 --- a/transcript/Cargo.toml +++ b/transcript/Cargo.toml @@ -18,8 +18,6 @@ serde.workspace = true p3-field.workspace = true p3-goldilocks.workspace = true p3-poseidon.workspace = true -p3-poseidon2.workspace = true -p3-challenger.workspace = true p3-mds.workspace = true p3-symmetric.workspace = true