diff --git a/examples/kzg_keygen_tools.rs b/examples/kzg_keygen_tools.rs new file mode 100644 index 00000000..cf729c54 --- /dev/null +++ b/examples/kzg_keygen_tools.rs @@ -0,0 +1,91 @@ +use std::{ + fs::OpenOptions, + io::{BufReader, BufWriter}, +}; + +use halo2curves::bn256; +use nova_snark::{ + provider::{ + check_sanity_of_ptau_file, + hyperkzg::{CommitmentEngine, CommitmentKey}, + Bn256EngineKZG, + }, + traits::commitment::CommitmentEngineTrait, +}; +use rand_core::OsRng; + +type E = Bn256EngineKZG; + +const KZG_KEY_DIR: &str = "/tmp/"; + +pub fn get_key_file_path(num_gens: usize) -> String { + let id = std::any::type_name::().chars() + .filter(|c| c.is_alphanumeric()) // Keep only alphanumeric characters + .collect::(); + + let base_dir = KZG_KEY_DIR.trim_end_matches("/"); + format!("{}/kzg_{}_{}.keys", base_dir, id, num_gens) +} + +const LABEL: &[u8; 4] = b"test"; + +const MAX_NUM_GENS: usize = 1 << 21; + +macro_rules! timeit { + ($e:expr) => {{ + let start = std::time::Instant::now(); + let res = $e(); + let dur = start.elapsed(); + (res, dur) + }}; +} + +fn keygen_save_large() { + const BUFFER_SIZE: usize = 64 * 1024; + + let path = get_key_file_path(MAX_NUM_GENS); + + if check_sanity_of_ptau_file::(&path, MAX_NUM_GENS + 1, 1).is_err() { + println!("Generating {} KZG keys ", MAX_NUM_GENS); + + let (ck, dur) = timeit!(|| { CommitmentKey::::setup_from_rng(LABEL, MAX_NUM_GENS, OsRng) }); + + println!("Generated {} keys in {:?}", MAX_NUM_GENS, dur); + + let file = OpenOptions::new() + .write(true) + .create(true) + .truncate(true) + .open(&path) + .unwrap(); + let mut writer = BufWriter::with_capacity(BUFFER_SIZE, &file); + + let (_, dur) = timeit!(|| { + ck.save_to(&mut writer).unwrap(); + }); + + println!( + "Saved {} keys to {} in {:?}, file size={}MB", + MAX_NUM_GENS, + &path, + dur, + file.metadata().unwrap().len() / 1024 / 1024 + ); + } else { + println!("Key file already exists at {}", &path); + } + + let (res, dur) = timeit!(|| { + let file = OpenOptions::new().read(true).open(&path).unwrap(); + let mut reader = BufReader::new(file); + CommitmentEngine::::load_setup(&mut reader, MAX_NUM_GENS) + }); + + assert!(res.is_ok()); + + println!("Loaded {} keys from {} in {:?}", MAX_NUM_GENS, &path, dur); +} + +fn main() { + keygen_save_large(); +} diff --git a/src/provider/hyperkzg.rs b/src/provider/hyperkzg.rs index a5a0152f..31442b39 100644 --- a/src/provider/hyperkzg.rs +++ b/src/provider/hyperkzg.rs @@ -21,11 +21,13 @@ use core::{ ops::{Add, Mul, MulAssign}, slice, }; -use ff::Field; +use ff::{Field, PrimeFieldBits}; use rand_core::OsRng; use rayon::prelude::*; use serde::{Deserialize, Serialize}; +use super::{ptau::PtauFileError, read_ptau, write_ptau}; + /// Alias to points on G1 that are in preprocessed form type G1Affine = <::GE as DlogGroup>::AffineGroupElement; @@ -123,6 +125,26 @@ where } } +impl CommitmentKey +where + E::GE: PairingGroup, +{ + /// Save keys + pub fn save_to( + &self, + mut writer: &mut (impl std::io::Write + std::io::Seek), + ) -> Result<(), PtauFileError> { + let mut g1_points = Vec::with_capacity(self.ck.len() + 1); + g1_points.push(self.h); + g1_points.extend(self.ck.iter().cloned()); + + let g2_points = vec![self.tau_H]; + let power = g1_points.len().next_power_of_two().trailing_zeros() + 1; + + write_ptau(&mut writer, g1_points, g2_points, power) + } +} + impl Default for Commitment where E::GE: PairingGroup, @@ -228,27 +250,172 @@ where /// NOTE: this is for testing purposes and should not be used in production /// This can be used instead of `setup` to generate a reproducible commitment key pub fn setup_from_rng(label: &'static [u8], n: usize, rng: impl rand_core::RngCore) -> Self { - let tau = E::Scalar::random(rng); + const T1: usize = 1 << 16; + const T2: usize = 100_000; + let num_gens = n.next_power_of_two(); - // Compute powers of tau in E::Scalar, then scalar muls in parallel - let mut powers_of_tau: Vec = Vec::with_capacity(num_gens); - powers_of_tau.insert(0, E::Scalar::ONE); - for i in 1..num_gens { - powers_of_tau.insert(i, powers_of_tau[i - 1] * tau); + let tau = E::Scalar::random(rng); + + let powers_of_tau = if num_gens < T1 { + Self::compute_powers_serial(tau, num_gens) + } else { + Self::compute_powers_par(tau, num_gens) + }; + + if num_gens < T2 { + Self::setup_from_tau_direct(label, &powers_of_tau) + } else { + Self::setup_from_tau_fixed_base_exp(label, &powers_of_tau) } + } + + fn setup_from_tau_fixed_base_exp(label: &'static [u8], powers_of_tau: &[E::Scalar]) -> Self { + let tau = powers_of_tau[1]; + + let gen = ::gen(); + + let ck = fixed_base_exp_comb_batch::<4, 16, 64, 2, 32, _>(gen, powers_of_tau); + let ck = ck.par_iter().map(|p| p.affine()).collect(); + + let h = *E::GE::from_label(label, 1).first().unwrap(); + + let tau_H = (<::G2 as DlogGroup>::gen() * tau).affine(); + + Self { ck, h, tau_H } + } + + fn setup_from_tau_direct(label: &'static [u8], powers_of_tau: &[E::Scalar]) -> Self { + let num_gens = powers_of_tau.len(); + let tau = powers_of_tau[1]; let ck: Vec> = (0..num_gens) .into_par_iter() .map(|i| (::gen() * powers_of_tau[i]).affine()) .collect(); - let h = E::GE::from_label(label, 1).first().unwrap().clone(); + let h = *E::GE::from_label(label, 1).first().unwrap(); let tau_H = (<::G2 as DlogGroup>::gen() * tau).affine(); Self { ck, h, tau_H } } + + fn compute_powers_serial(tau: E::Scalar, n: usize) -> Vec { + let mut powers_of_tau = Vec::with_capacity(n); + powers_of_tau.insert(0, E::Scalar::ONE); + for i in 1..n { + powers_of_tau.insert(i, powers_of_tau[i - 1] * tau); + } + powers_of_tau + } + + fn compute_powers_par(tau: E::Scalar, n: usize) -> Vec { + let num_threads = rayon::current_num_threads(); + (0..n) + .collect::>() + .par_chunks(std::cmp::max(n / num_threads, 1)) + .into_par_iter() + .map(|sub_list| { + let mut res = Vec::with_capacity(sub_list.len()); + res.push(tau.pow([sub_list[0] as u64])); + for i in 1..sub_list.len() { + res.push(res[i - 1] * tau); + } + res + }) + .flatten() + .collect::>() + } +} + +// * Implementation of https://www.weimerskirch.org/files/Weimerskirch_FixedBase.pdf +fn fixed_base_exp_comb_batch< + const H: usize, + const POW_2_H: usize, + const A: usize, + const B: usize, + const V: usize, + G: DlogGroup, +>( + gen: G, + scalars: &[G::Scalar], +) -> Vec { + assert_eq!(1 << H, POW_2_H); + assert_eq!(A, V * B); + assert!(A <= 64); + + let zero = G::zero(); + let one = gen; + + let gi = { + let mut res = [one; H]; + for i in 1..H { + let prod = (0..A).fold(res[i - 1], |acc, _| acc + acc); + res[i] = prod; + } + res + }; + + let mut precompute_res = (1..POW_2_H) + .into_par_iter() + .map(|i| { + let mut res = [zero; V]; + + // * G[0][i] + let mut g_0_i = zero; + for (j, item) in gi.iter().enumerate().take(H) { + if (1 << j) & i > 0 { + g_0_i += item; + } + } + + res[0] = g_0_i; + + // * G[j][i] + for j in 1..V { + res[j] = (0..B).fold(res[j - 1], |acc, _| acc + acc); + } + + res + }) + .collect::>(); + + precompute_res.insert(0, [zero; V]); + + let precomputed_g: [_; POW_2_H] = std::array::from_fn(|j| precompute_res[j]); + + let zero = G::zero(); + + scalars + .par_iter() + .map(|e| { + let mut a = zero; + let mut bits = e.to_le_bits().into_iter().collect::>(); + + while bits.len() % A != 0 { + bits.push(false); + } + + for k in (0..B).rev() { + a += a; + for j in (0..V).rev() { + let i_j_k = (0..H) + .map(|h| { + let b = bits[h * A + j * B + k]; + (1 << h) * b as usize + }) + .sum::(); + + if i_j_k > 0 { + a += precomputed_g[i_j_k][j]; + } + } + } + + a + }) + .collect::>() } impl CommitmentEngineTrait for CommitmentEngine @@ -261,12 +428,11 @@ where fn setup(label: &'static [u8], n: usize) -> Self::CommitmentKey { // NOTE: this is for testing purposes and should not be used in production - // TODO: we need to decide how to generate load/store parameters Self::CommitmentKey::setup_from_rng(label, n, OsRng) } fn derand_key(ck: &Self::CommitmentKey) -> Self::DerandKey { - Self::DerandKey { h: ck.h.clone() } + Self::DerandKey { h: ck.h } } fn commit(ck: &Self::CommitmentKey, v: &[E::Scalar], r: &E::Scalar) -> Self::Commitment { @@ -308,6 +474,25 @@ where comm: commit.comm - ::group(&dk.h) * r, } } + + fn load_setup( + reader: &mut (impl std::io::Read + std::io::Seek), + n: usize, + ) -> Result { + let num = n.next_power_of_two(); + + let (g1_points, g2_points) = read_ptau(reader, num + 1, 1)?; + + let (h, ck) = g1_points.split_at(1); + let h = h[0]; + let ck = ck.to_vec(); + + Ok(CommitmentKey { + ck, + h, + tau_H: g2_points[0], + }) + } } /// Provides an implementation of generators for proving evaluations @@ -427,7 +612,7 @@ where let vk = VerifierKey { G: E::GE::gen().affine(), H: <::G2 as DlogGroup>::gen().affine(), - tau_H: ck.tau_H.clone(), + tau_H: ck.tau_H, }; (pk, vk) @@ -723,9 +908,14 @@ where #[cfg(test)] mod tests { + use std::{ + fs::OpenOptions, + io::{BufReader, BufWriter}, + }; + use super::*; use crate::{ - provider::{keccak::Keccak256Transcript, Bn256EngineKZG}, + provider::{hyperkzg, keccak::Keccak256Transcript, Bn256EngineKZG}, spartan::polys::multilinear::MultilinearPolynomial, }; use bincode::Options; @@ -882,4 +1072,52 @@ mod tests { ); } } + + #[test] + fn test_key_gen() { + let n = 100; + let tau = Fr::random(OsRng); + let powers_of_tau = CommitmentKey::::compute_powers_serial(tau, n); + let label = b"test"; + let res1 = CommitmentKey::::setup_from_tau_direct(label, &powers_of_tau); + let res2 = CommitmentKey::::setup_from_tau_fixed_base_exp(label, &powers_of_tau); + + assert_eq!(res1.ck.len(), res2.ck.len()); + assert_eq!(res1.h, res2.h); + assert_eq!(res1.tau_H, res2.tau_H); + for i in 0..res1.ck.len() { + assert_eq!(res1.ck[i], res2.ck[i]); + } + } + + #[test] + fn test_save_load_ck() { + let n = 4; + let filename = "/tmp/kzg_test.ptau"; + const BUFFER_SIZE: usize = 64 * 1024; + let ck: CommitmentKey = CommitmentEngine::setup(b"test", n); + + let file = OpenOptions::new() + .write(true) + .create(true) + .truncate(true) + .open(filename) + .unwrap(); + let mut writer = BufWriter::with_capacity(BUFFER_SIZE, file); + + ck.save_to(&mut writer).unwrap(); + + let file = OpenOptions::new().read(true).open(filename).unwrap(); + + let mut reader = BufReader::new(file); + + let read_ck = hyperkzg::CommitmentEngine::::load_setup(&mut reader, ck.ck.len()).unwrap(); + + assert_eq!(ck.ck.len(), read_ck.ck.len()); + assert_eq!(ck.h, read_ck.h); + assert_eq!(ck.tau_H, read_ck.tau_H); + for i in 0..ck.ck.len() { + assert_eq!(ck.ck[i], read_ck.ck[i]); + } + } } diff --git a/src/provider/mod.rs b/src/provider/mod.rs index 84f6aeef..a6e74016 100644 --- a/src/provider/mod.rs +++ b/src/provider/mod.rs @@ -8,6 +8,9 @@ pub mod pasta; pub mod poseidon; pub mod secp_secq; +pub(crate) mod ptau; +pub use ptau::{check_sanity_of_ptau_file, read_ptau, write_ptau}; + // crate-private modules pub(crate) mod keccak; pub(crate) mod pedersen; diff --git a/src/provider/pedersen.rs b/src/provider/pedersen.rs index b60e80a5..13ed63a2 100644 --- a/src/provider/pedersen.rs +++ b/src/provider/pedersen.rs @@ -1,7 +1,7 @@ //! This module provides an implementation of a commitment engine use crate::{ errors::NovaError, - provider::traits::DlogGroup, + provider::{ptau::read_points, traits::DlogGroup}, traits::{ commitment::{CommitmentEngineTrait, CommitmentTrait, Len}, AbsorbInROTrait, Engine, ROTrait, TranscriptReprTrait, @@ -16,6 +16,10 @@ use ff::Field; use rayon::prelude::*; use serde::{Deserialize, Serialize}; +use super::ptau::{write_points, PtauFileError}; + +const KEY_FILE_HEAD: [u8; 12] = *b"PEDERSEN_KEY"; + /// A type that holds commitment generators #[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize)] pub struct CommitmentKey @@ -158,6 +162,19 @@ pub struct CommitmentEngine { _p: PhantomData, } +impl CommitmentKey +where + E::GE: DlogGroup, +{ + pub fn save_to(&self, writer: &mut impl std::io::Write) -> Result<(), PtauFileError> { + writer.write_all(&KEY_FILE_HEAD)?; + let mut points = Vec::with_capacity(self.ck.len() + 1); + points.push(self.h.unwrap()); + points.extend(self.ck.iter().cloned()); + write_points(writer, points) + } +} + impl CommitmentEngineTrait for CommitmentEngine where E::GE: DlogGroup, @@ -173,14 +190,14 @@ where Self::CommitmentKey { ck: ck.to_vec(), - h: Some(h.clone()), + h: Some(*h), } } fn derand_key(ck: &Self::CommitmentKey) -> Self::DerandKey { assert!(ck.h.is_some()); Self::DerandKey { - h: ck.h.as_ref().unwrap().clone(), + h: *ck.h.as_ref().unwrap(), } } @@ -210,6 +227,29 @@ where comm: commit.comm - ::group(&dk.h) * r, } } + + fn load_setup( + reader: &mut (impl std::io::Read + std::io::Seek), + n: usize, + ) -> Result { + let num = n.next_power_of_two(); + { + let mut head = [0u8; 12]; + reader.read_exact(&mut head)?; + if head != KEY_FILE_HEAD { + return Err(PtauFileError::InvalidHead); + } + } + + let points = read_points(reader, num + 1)?; + + let (first, second) = points.split_at(1); + + Ok(Self::CommitmentKey { + ck: second.to_vec(), + h: Some(first[0]), + }) + } } /// A trait listing properties of a commitment key that can be managed in a divide-and-conquer fashion @@ -248,11 +288,11 @@ where ( CommitmentKey { ck: self.ck[0..n].to_vec(), - h: self.h.clone(), + h: self.h, }, CommitmentKey { ck: self.ck[n..].to_vec(), - h: self.h.clone(), + h: self.h, }, ) } @@ -263,10 +303,7 @@ where c.extend(other.ck.clone()); c }; - CommitmentKey { - ck, - h: self.h.clone(), - } + CommitmentKey { ck, h: self.h } } // combines the left and right halves of `self` using `w1` and `w2` as the weights @@ -277,15 +314,12 @@ where let ck = (0..self.ck.len() / 2) .into_par_iter() .map(|i| { - let bases = [L.ck[i].clone(), R.ck[i].clone()].to_vec(); + let bases = [L.ck[i], R.ck[i]].to_vec(); E::GE::vartime_multiscalar_mul(&w, &bases).affine() }) .collect(); - CommitmentKey { - ck, - h: self.h.clone(), - } + CommitmentKey { ck, h: self.h } } /// Scales each element in `self` by `r` @@ -299,7 +333,7 @@ where CommitmentKey { ck: ck_scaled, - h: self.h.clone(), + h: self.h, } } @@ -319,3 +353,32 @@ where }) } } + +#[cfg(test)] +mod tests { + use super::*; + + use crate::{provider::GrumpkinEngine, CommitmentKey}; + use std::{fs::File, io::BufWriter}; + + type E = GrumpkinEngine; + + #[test] + fn test_key_save_load() { + let path = "/tmp/pedersen_test.keys"; + + let keys = CommitmentEngine::::setup(b"test", 100); + + keys + .save_to(&mut BufWriter::new(File::create(path).unwrap())) + .unwrap(); + + let keys_read = CommitmentEngine::load_setup(&mut File::open(path).unwrap(), 100); + + assert!(keys_read.is_ok()); + let keys_read: CommitmentKey = keys_read.unwrap(); + assert_eq!(keys_read.ck.len(), keys.ck.len()); + assert_eq!(keys_read.h, keys.h); + assert_eq!(keys_read.ck, keys.ck); + } +} diff --git a/src/provider/ptau.rs b/src/provider/ptau.rs new file mode 100644 index 00000000..16020de1 --- /dev/null +++ b/src/provider/ptau.rs @@ -0,0 +1,297 @@ +use std::{ + fs::File, + io::{self, Read, Seek, SeekFrom, Write}, + path::Path, + str::{from_utf8, Utf8Error}, +}; + +use byteorder::{LittleEndian, ReadBytesExt, WriteBytesExt}; +use ff::PrimeField; +use halo2curves::CurveAffine; +use num_bigint::BigUint; + +#[derive(thiserror::Error, Debug)] +pub enum PtauFileError { + #[error("Invalid magic string")] + InvalidHead, + + #[error("Unsupported version")] + UnsupportedVersion(u32), + + #[error("Invalid number of sections")] + InvalidNumSections(u32), + + #[error("Invalid base prime")] + InvalidPrime(BigUint), + + #[error("Insufficient power for G1")] + InsufficientPowerForG1 { power: u32, required: usize }, + + #[error("Insufficient power for G2")] + InsufficientPowerForG2 { power: u32, required: usize }, + + #[error(transparent)] + IoError(#[from] io::Error), + #[error(transparent)] + Utf8Error(#[from] Utf8Error), +} + +#[derive(Debug)] +struct MetaData { + pos_header: u64, + pos_tau_g1: u64, + pos_tau_g2: u64, +} + +const PTAU_VERSION: u32 = 1; +const NUM_SECTIONS: u32 = 11; + +fn write_header( + writer: &mut impl Write, + power: u32, +) -> Result<(), PtauFileError> { + const N8: usize = 32; + + writer.write_all(b"ptau")?; + + writer.write_u32::(PTAU_VERSION)?; + writer.write_u32::(NUM_SECTIONS)?; + + // * header + writer.write_u32::(1)?; + writer.write_i64::(4 + N8 as i64 + 4)?; + + writer.write_u32::(N8 as u32)?; + + let modulus = BigUint::parse_bytes(Base::MODULUS[2..].as_bytes(), 16).unwrap(); + let mut bytes = [0u8; N8]; + bytes.copy_from_slice(&modulus.to_bytes_le()); + writer.write_all(&bytes)?; + + writer.write_u32::(power)?; + + Ok(()) +} + +pub(crate) fn write_points( + mut writer: &mut impl Write, + points: Vec, + // section_id: u32, +) -> Result<(), PtauFileError> +where + G: halo2curves::serde::SerdeObject + CurveAffine, +{ + for point in points { + point.write_raw(&mut writer)?; + } + Ok(()) +} + +/// Save Ptau File +pub fn write_ptau( + mut writer: &mut (impl Write + Seek), + g1_points: Vec, + g2_points: Vec, + power: u32, +) -> Result<(), PtauFileError> +where + G1: halo2curves::serde::SerdeObject + CurveAffine, + G2: halo2curves::serde::SerdeObject + CurveAffine, +{ + write_header::(&mut writer, power)?; + + writer.write_u32::(0)?; + writer.write_i64::(0)?; + + for id in 4..NUM_SECTIONS { + writer.write_u32::(id)?; + writer.write_i64::(0)?; + } + + { + writer.write_u32::(2)?; + let pos = writer.stream_position()?; + + writer.write_i64::(0)?; + let start = writer.stream_position()?; + + write_points(writer, g1_points)?; + + let size = writer.stream_position()? - start; + + writer.seek(SeekFrom::Start(pos))?; + writer.write_i64::(size as i64)?; + + writer.seek(SeekFrom::Current(size as i64))?; + } + + { + writer.write_u32::(3)?; + let pos = writer.stream_position()?; + + writer.write_i64::(0)?; + let start = writer.stream_position()?; + + write_points(writer, g2_points)?; + + let size = writer.stream_position()? - start; + + writer.seek(SeekFrom::Start(pos))?; + writer.write_i64::(size as i64)?; + } + Ok(()) +} + +fn read_meta_data(reader: &mut (impl Read + Seek)) -> Result { + { + let mut buf = [0u8; 4]; + reader.read_exact(&mut buf)?; + if from_utf8(&buf)? != "ptau" { + return Err(PtauFileError::InvalidHead); + } + } + { + let version = reader.read_u32::()?; + if version != PTAU_VERSION { + return Err(PtauFileError::UnsupportedVersion(version)); + } + } + { + let num_sections = reader.read_u32::()?; + if num_sections != NUM_SECTIONS { + return Err(PtauFileError::InvalidNumSections(num_sections)); + } + } + let mut pos_header = 0; + let mut pos_tau_g1 = 0; + let mut pos_tau_g2 = 0; + + for _ in 0..NUM_SECTIONS { + let id = reader.read_u32::()?; + let size = reader.read_i64::()?; + + let pos = reader.stream_position()?; + + match id { + 1 => { + pos_header = pos; + } + 2 => { + pos_tau_g1 = pos; + } + 3 => { + pos_tau_g2 = pos; + } + _ => {} + }; + reader.seek(SeekFrom::Current(size))?; + } + + assert_ne!(pos_header, 0); + assert_ne!(pos_tau_g1, 0); + assert_ne!(pos_tau_g2, 0); + + Ok(MetaData { + pos_header, + pos_tau_g1, + pos_tau_g2, + }) +} + +fn read_header( + reader: &mut impl Read, + num_g1: usize, + num_g2: usize, +) -> Result<(), PtauFileError> { + // * n8 + let n8 = reader.read_u32::()?; + + // * prime + { + let mut buf = vec![0u8; n8 as usize]; + reader.read_exact(&mut buf)?; + + let modulus = BigUint::from_bytes_le(&buf); + + let modulus_expected = BigUint::parse_bytes(Base::MODULUS[2..].as_bytes(), 16).unwrap(); + + if modulus != modulus_expected { + return Err(PtauFileError::InvalidPrime(modulus)); + } + } + + // * power + let power = reader.read_u32::()?; + + let max_num_g2 = 1 << power; + let max_num_g1 = max_num_g2 * 2 - 1; + if num_g1 > max_num_g1 { + return Err(PtauFileError::InsufficientPowerForG1 { + power, + required: max_num_g1, + }); + } + if num_g2 > max_num_g2 { + return Err(PtauFileError::InsufficientPowerForG2 { + power, + required: max_num_g2, + }); + } + + Ok(()) +} + +pub(crate) fn read_points( + mut reader: &mut impl Read, + num: usize, +) -> Result, PtauFileError> +where + G: halo2curves::serde::SerdeObject + CurveAffine, +{ + let mut res = Vec::with_capacity(num); + for _ in 0..num { + res.push(G::read_raw(&mut reader)?); + } + Ok(res) +} + +/// Load Ptau File +pub fn read_ptau( + mut reader: &mut (impl Read + Seek), + num_g1: usize, + num_g2: usize, +) -> Result<(Vec, Vec), PtauFileError> +where + G1: halo2curves::serde::SerdeObject + CurveAffine, + G2: halo2curves::serde::SerdeObject + CurveAffine, +{ + let metadata = read_meta_data(&mut reader)?; + + reader.seek(SeekFrom::Start(metadata.pos_header))?; + read_header::(reader, num_g1, num_g2)?; + + reader.seek(SeekFrom::Start(metadata.pos_tau_g1))?; + let g1_points = read_points::(&mut reader, num_g1)?; + + reader.seek(SeekFrom::Start(metadata.pos_tau_g2))?; + let g2_points = read_points::(&mut reader, num_g2)?; + + Ok((g1_points, g2_points)) +} + +/// Check the sanity of the ptau file +pub fn check_sanity_of_ptau_file( + path: impl AsRef, + num_g1: usize, + num_g2: usize, +) -> Result<(), PtauFileError> +where + G1: halo2curves::serde::SerdeObject + CurveAffine, +{ + let mut reader = File::open(path)?; + + let metadata = read_meta_data(&mut reader)?; + + reader.seek(SeekFrom::Start(metadata.pos_header))?; + read_header::(&mut reader, num_g1, num_g2) +} diff --git a/src/provider/traits.rs b/src/provider/traits.rs index a15384a3..6b67a86a 100644 --- a/src/provider/traits.rs +++ b/src/provider/traits.rs @@ -3,6 +3,7 @@ use core::{ fmt::Debug, ops::{Add, AddAssign, Sub, SubAssign}, }; +use halo2curves::{serde::SerdeObject, CurveAffine}; use rayon::iter::{IntoParallelRefIterator, ParallelIterator}; use serde::{Deserialize, Serialize}; @@ -44,7 +45,9 @@ pub trait DlogGroup: + Sync + Serialize + for<'de> Deserialize<'de> - + TranscriptReprTrait; + + TranscriptReprTrait + + CurveAffine + + SerdeObject; /// A method to compute a multiexponentation fn vartime_multiscalar_mul(scalars: &[Self::Scalar], bases: &[Self::AffineGroupElement]) -> Self; diff --git a/src/traits/commitment.rs b/src/traits/commitment.rs index 55de7ddc..398dc620 100644 --- a/src/traits/commitment.rs +++ b/src/traits/commitment.rs @@ -1,6 +1,9 @@ //! This module defines a collection of traits that define the behavior of a commitment engine //! We require the commitment engine to provide a commitment to vectors with a single group element -use crate::traits::{AbsorbInROTrait, Engine, TranscriptReprTrait}; +use crate::{ + provider::ptau::PtauFileError, + traits::{AbsorbInROTrait, Engine, TranscriptReprTrait}, +}; use core::{ fmt::Debug, ops::{Add, Mul, MulAssign}, @@ -54,6 +57,12 @@ pub trait CommitmentEngineTrait: Clone + Send + Sync { /// Holds the type of the commitment type Commitment: CommitmentTrait; + /// Load keys + fn load_setup( + reader: &mut (impl std::io::Read + std::io::Seek), + n: usize, + ) -> Result; + /// Samples a new commitment key of a specified size fn setup(label: &'static [u8], n: usize) -> Self::CommitmentKey;