Skip to content

Commit

Permalink
cleanup
Browse files Browse the repository at this point in the history
  • Loading branch information
hero78119 committed Jan 7, 2025
1 parent 7f7f262 commit 942266b
Show file tree
Hide file tree
Showing 10 changed files with 235 additions and 279 deletions.
18 changes: 0 additions & 18 deletions Cargo.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

2 changes: 0 additions & 2 deletions Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -39,8 +39,6 @@ plonky2 = "0.2"
p3-field = { git = "https://github.com/plonky3/plonky3" }
p3-goldilocks = { git = "https://github.com/plonky3/plonky3" }
p3-poseidon = { git = "https://github.com/plonky3/plonky3" }
p3-poseidon2 = { git = "https://github.com/Plonky3/Plonky3.git" }
p3-challenger = { git = "https://github.com/plonky3/plonky3" }
p3-mds = { git = "https://github.com/Plonky3/Plonky3.git" }
p3-symmetric = { git = "https://github.com/Plonky3/Plonky3.git" }
poseidon = { path = "./poseidon" }
Expand Down
22 changes: 1 addition & 21 deletions ff_ext/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -89,24 +89,7 @@ pub trait SmallField: Serialize + P3Field {
fn to_noncanonical_u64(&self) -> u64;
}

pub trait ExtensionField: P3ExtensionField<Self::BaseField> + FromUniformBytes
// + FromUniformBytes<64>
// + From<Self::BaseField>
// + Add<Self::BaseField, Output = Self>
// + Sub<Self::BaseField, Output = Self>
// + Mul<Self::BaseField, Output = Self>
// // + for<'a> Add<&'a Self::BaseField, Output = Self>
// + for<'a> Sub<&'a Self::BaseField, Output = Self>
// + for<'a> Mul<&'a Self::BaseField, Output = Self>
// + AddAssign<Self::BaseField>
// + SubAssign<Self::BaseField>
// + MulAssign<Self::BaseField>
// + for<'a> AddAssign<&'a Self::BaseField>
// + for<'a> SubAssign<&'a Self::BaseField>
// + for<'a> MulAssign<&'a Self::BaseField>
// + Ord
// + std::hash::Hash
{
pub trait ExtensionField: P3ExtensionField<Self::BaseField> + FromUniformBytes {
const DEGREE: usize;

type BaseField: SmallField + Ord + PrimeField + FromUniformBytes;
Expand Down Expand Up @@ -174,7 +157,6 @@ mod impl_goldilocks {
fn from_bases(bases: &[Goldilocks]) -> Self {
debug_assert_eq!(bases.len(), 2);
Self::from_base_slice(bases)
// Self([bases[0], bases[1]])
}

fn as_bases(&self) -> &[Goldilocks] {
Expand All @@ -183,7 +165,6 @@ mod impl_goldilocks {

/// Convert limbs into self
fn from_limbs(limbs: &[Self::BaseField]) -> Self {
// Self([limbs[0], limbs[1]])
Self::from_base_slice(&limbs[0..2])
}

Expand All @@ -192,7 +173,6 @@ mod impl_goldilocks {
.iter()
.map(|v: &Self::BaseField| v.as_canonical_u64())
.collect()
// <GoldilocksExt2 as GoldilocksEF>::to_canonical_u64_vec(self)
}
}
}
4 changes: 0 additions & 4 deletions poseidon/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -15,10 +15,6 @@ ff.workspace = true
goldilocks.workspace = true
serde.workspace = true
unroll = "0.1"
ff_ext = { path = "../ff_ext" }
p3-field.workspace = true
p3-goldilocks.workspace = true
p3-poseidon.workspace = true

[dev-dependencies]
ark-std.workspace = true
Expand Down
256 changes: 128 additions & 128 deletions poseidon/benches/hashing.rs
Original file line number Diff line number Diff line change
@@ -1,128 +1,128 @@
// use ark_std::test_rng;
// use criterion::{BatchSize, Criterion, black_box, criterion_group, criterion_main};
// use ff::Field;
// use goldilocks::Goldilocks;
// use plonky2::{
// field::{goldilocks_field::GoldilocksField, types::Sample},
// hash::{
// hash_types::HashOut,
// hashing::PlonkyPermutation,
// poseidon::{PoseidonHash as PlonkyPoseidonHash, PoseidonPermutation},
// },
// plonk::config::Hasher,
// };
// use poseidon::{digest::Digest, poseidon_hash::PoseidonHash};

// fn random_plonky_2_goldy() -> GoldilocksField {
// GoldilocksField::rand()
// }

// fn random_ceno_goldy() -> Goldilocks {
// Goldilocks::random(&mut test_rng())
// }

// fn random_ceno_hash() -> Digest<Goldilocks> {
// Digest(
// vec![Goldilocks::random(&mut test_rng()); 4]
// .try_into()
// .unwrap(),
// )
// }

// fn plonky_hash_single(a: GoldilocksField) {
// let _result = black_box(PlonkyPoseidonHash::hash_or_noop(&[a]));
// }

// fn ceno_hash_single(a: Goldilocks) {
// let _result = black_box(PoseidonHash::hash_or_noop(&[a]));
// }

// fn plonky_hash_2_to_1(left: HashOut<GoldilocksField>, right: HashOut<GoldilocksField>) {
// let _result = black_box(PlonkyPoseidonHash::two_to_one(left, right));
// }

// fn ceno_hash_2_to_1(left: &Digest<Goldilocks>, right: &Digest<Goldilocks>) {
// let _result = black_box(PoseidonHash::two_to_one(left, right));
// }

// fn plonky_hash_many_to_1(values: &[GoldilocksField]) {
// let _result = black_box(PlonkyPoseidonHash::hash_or_noop(values));
// }

// fn ceno_hash_many_to_1(values: &[Goldilocks]) {
// let _result = black_box(PoseidonHash::hash_or_noop(values));
// }

// pub fn hashing_benchmark(c: &mut Criterion) {
// c.bench_function("plonky hash single", |bencher| {
// bencher.iter_batched(
// random_plonky_2_goldy,
// plonky_hash_single,
// BatchSize::SmallInput,
// )
// });

// c.bench_function("plonky hash 2 to 1", |bencher| {
// bencher.iter_batched(
// || {
// (
// HashOut::<GoldilocksField>::rand(),
// HashOut::<GoldilocksField>::rand(),
// )
// },
// |(left, right)| plonky_hash_2_to_1(left, right),
// BatchSize::SmallInput,
// )
// });

// c.bench_function("plonky hash 60 to 1", |bencher| {
// bencher.iter_batched(
// || GoldilocksField::rand_vec(60),
// |sixty_elems| plonky_hash_many_to_1(sixty_elems.as_slice()),
// BatchSize::SmallInput,
// )
// });

// c.bench_function("ceno hash single", |bencher| {
// bencher.iter_batched(random_ceno_goldy, ceno_hash_single, BatchSize::SmallInput)
// });

// c.bench_function("ceno hash 2 to 1", |bencher| {
// bencher.iter_batched(
// || (random_ceno_hash(), random_ceno_hash()),
// |(left, right)| ceno_hash_2_to_1(&left, &right),
// BatchSize::SmallInput,
// )
// });

// c.bench_function("ceno hash 60 to 1", |bencher| {
// bencher.iter_batched(
// || {
// (0..60)
// .map(|_| Goldilocks::random(&mut test_rng()))
// .collect::<Vec<_>>()
// },
// |values| ceno_hash_many_to_1(values.as_slice()),
// BatchSize::SmallInput,
// )
// });
// }

// // bench permutation
// pub fn permutation_benchmark(c: &mut Criterion) {
// let mut plonky_permutation = PoseidonPermutation::new(core::iter::repeat(GoldilocksField(0)));
// let mut ceno_permutation = poseidon::poseidon_permutation::PoseidonPermutation::new(
// core::iter::repeat(Goldilocks::ZERO),
// );

// c.bench_function("plonky permute", |bencher| {
// bencher.iter(|| plonky_permutation.permute())
// });

// c.bench_function("ceno permute", |bencher| {
// bencher.iter(|| ceno_permutation.permute())
// });
// }

// criterion_group!(benches, permutation_benchmark, hashing_benchmark);
// criterion_main!(benches);
use ark_std::test_rng;
use criterion::{BatchSize, Criterion, black_box, criterion_group, criterion_main};
use ff::Field;
use goldilocks::Goldilocks;
use plonky2::{
field::{goldilocks_field::GoldilocksField, types::Sample},
hash::{
hash_types::HashOut,
hashing::PlonkyPermutation,
poseidon::{PoseidonHash as PlonkyPoseidonHash, PoseidonPermutation},
},
plonk::config::Hasher,
};
use poseidon::{digest::Digest, poseidon_hash::PoseidonHash};

fn random_plonky_2_goldy() -> GoldilocksField {
GoldilocksField::rand()
}

fn random_ceno_goldy() -> Goldilocks {
Goldilocks::random(&mut test_rng())
}

fn random_ceno_hash() -> Digest<Goldilocks> {
Digest(
vec![Goldilocks::random(&mut test_rng()); 4]
.try_into()
.unwrap(),
)
}

fn plonky_hash_single(a: GoldilocksField) {
let _result = black_box(PlonkyPoseidonHash::hash_or_noop(&[a]));
}

fn ceno_hash_single(a: Goldilocks) {
let _result = black_box(PoseidonHash::hash_or_noop(&[a]));
}

fn plonky_hash_2_to_1(left: HashOut<GoldilocksField>, right: HashOut<GoldilocksField>) {
let _result = black_box(PlonkyPoseidonHash::two_to_one(left, right));
}

fn ceno_hash_2_to_1(left: &Digest<Goldilocks>, right: &Digest<Goldilocks>) {
let _result = black_box(PoseidonHash::two_to_one(left, right));
}

fn plonky_hash_many_to_1(values: &[GoldilocksField]) {
let _result = black_box(PlonkyPoseidonHash::hash_or_noop(values));
}

fn ceno_hash_many_to_1(values: &[Goldilocks]) {
let _result = black_box(PoseidonHash::hash_or_noop(values));
}

pub fn hashing_benchmark(c: &mut Criterion) {
c.bench_function("plonky hash single", |bencher| {
bencher.iter_batched(
random_plonky_2_goldy,
plonky_hash_single,
BatchSize::SmallInput,
)
});

c.bench_function("plonky hash 2 to 1", |bencher| {
bencher.iter_batched(
|| {
(
HashOut::<GoldilocksField>::rand(),
HashOut::<GoldilocksField>::rand(),
)
},
|(left, right)| plonky_hash_2_to_1(left, right),
BatchSize::SmallInput,
)
});

c.bench_function("plonky hash 60 to 1", |bencher| {
bencher.iter_batched(
|| GoldilocksField::rand_vec(60),
|sixty_elems| plonky_hash_many_to_1(sixty_elems.as_slice()),
BatchSize::SmallInput,
)
});

c.bench_function("ceno hash single", |bencher| {
bencher.iter_batched(random_ceno_goldy, ceno_hash_single, BatchSize::SmallInput)
});

c.bench_function("ceno hash 2 to 1", |bencher| {
bencher.iter_batched(
|| (random_ceno_hash(), random_ceno_hash()),
|(left, right)| ceno_hash_2_to_1(&left, &right),
BatchSize::SmallInput,
)
});

c.bench_function("ceno hash 60 to 1", |bencher| {
bencher.iter_batched(
|| {
(0..60)
.map(|_| Goldilocks::random(&mut test_rng()))
.collect::<Vec<_>>()
},
|values| ceno_hash_many_to_1(values.as_slice()),
BatchSize::SmallInput,
)
});
}

// bench permutation
pub fn permutation_benchmark(c: &mut Criterion) {
let mut plonky_permutation = PoseidonPermutation::new(core::iter::repeat(GoldilocksField(0)));
let mut ceno_permutation = poseidon::poseidon_permutation::PoseidonPermutation::new(
core::iter::repeat(Goldilocks::ZERO),
);

c.bench_function("plonky permute", |bencher| {
bencher.iter(|| plonky_permutation.permute())
});

c.bench_function("ceno permute", |bencher| {
bencher.iter(|| ceno_permutation.permute())
});
}

criterion_group!(benches, permutation_benchmark, hashing_benchmark);
criterion_main!(benches);
6 changes: 3 additions & 3 deletions poseidon/src/digest.rs
Original file line number Diff line number Diff line change
@@ -1,8 +1,8 @@
use crate::constants::DIGEST_WIDTH;
use ff_ext::SmallField;
use serde::Serialize;
use goldilocks::SmallField;
use serde::{Deserialize, Serialize};

#[derive(Clone, Debug, Default, Serialize, PartialEq, Eq)]
#[derive(Clone, Debug, Default, Serialize, Deserialize, PartialEq, Eq)]
pub struct Digest<F: SmallField>(pub [F; DIGEST_WIDTH]);

impl<F: SmallField> TryFrom<Vec<F>> for Digest<F> {
Expand Down
Loading

0 comments on commit 942266b

Please sign in to comment.