diff --git a/README.md b/README.md index 4bfc06c5..68d6d557 100644 --- a/README.md +++ b/README.md @@ -57,7 +57,7 @@ This trait defines the interface for a polynomial commitment scheme. It is recom // In this example, we will commit to a single polynomial, open it first at one point, and then batched at two points, and finally verify the proofs. // We will use the KZG10 polynomial commitment scheme, following the approach from Marlin. -use ark_poly_commit::{Polynomial, marlin_pc::MarlinKZG10, LabeledPolynomial, PolynomialCommitment, QuerySet, Evaluations, challenge::ChallengeGenerator}; +use ark_poly_commit::{Polynomial, marlin_pc::MarlinKZG10, LabeledPolynomial, PolynomialCommitment, QuerySet, Evaluations}; use ark_bls12_377::Bls12_377; use ark_crypto_primitives::sponge::poseidon::{PoseidonSponge, PoseidonConfig}; use ark_crypto_primitives::sponge::CryptographicSponge; @@ -130,15 +130,13 @@ let (ck, vk) = PCS::trim(&pp, degree, 2, Some(&[degree])).unwrap(); // The prover commits to the polynomial using their committer key `ck`. let (comms, states) = PCS::commit(&ck, [&labeled_poly], Some(rng)).unwrap(); -let challenge_generator: ChallengeGenerator<::ScalarField, Sponge_Bls12_377> = ChallengeGenerator::new_univariate(&mut test_sponge); - // 4a. PolynomialCommitment::open // Opening proof at a single point. -let proof_single = PCS::open(&ck, [&labeled_poly], &comms, &point_1, &mut (challenge_generator.clone()), &states, None).unwrap(); +let proof_single = PCS::open(&ck, [&labeled_poly], &comms, &point_1, &mut (test_sponge.clone()), &states, None).unwrap(); // 5a. PolynomialCommitment::check // Verifying the proof at a single point, given the commitment, the point, the claimed evaluation, and the proof. -assert!(PCS::check(&vk, &comms, &point_1, [secret_poly.evaluate(&point_1)], &proof_single, &mut (challenge_generator.clone()), Some(rng)).unwrap()); +assert!(PCS::check(&vk, &comms, &point_1, [secret_poly.evaluate(&point_1)], &proof_single, &mut (test_sponge.clone()), Some(rng)).unwrap()); let mut query_set = QuerySet::new(); let mut values = Evaluations::new(); @@ -155,7 +153,7 @@ let proof_batched = PCS::batch_open( [&labeled_poly], &comms, &query_set, - &mut (challenge_generator.clone()), + &mut (test_sponge.clone()), &states, Some(rng), ).unwrap(); @@ -167,7 +165,7 @@ assert!(PCS::batch_check( &query_set, &values, &proof_batched, - &mut (challenge_generator.clone()), + &mut (test_sponge.clone()), rng, ).unwrap()); ``` diff --git a/bench-templates/src/lib.rs b/bench-templates/src/lib.rs index 1ad700fe..c3211c7f 100644 --- a/bench-templates/src/lib.rs +++ b/bench-templates/src/lib.rs @@ -17,9 +17,7 @@ use rand_chacha::{ use core::time::Duration; use std::{borrow::Borrow, marker::PhantomData, time::Instant}; -use ark_poly_commit::{ - challenge::ChallengeGenerator, to_bytes, LabeledPolynomial, PolynomialCommitment, -}; +use ark_poly_commit::{to_bytes, LabeledPolynomial, PolynomialCommitment}; pub use criterion::*; pub use paste::paste; @@ -140,7 +138,7 @@ where [&labeled_poly], &coms, &point, - &mut ChallengeGenerator::new_univariate(&mut test_sponge()), + &mut test_sponge(), &states, Some(rng), ) @@ -173,7 +171,7 @@ where [&labeled_poly], &coms, &point, - &mut ChallengeGenerator::new_univariate(&mut test_sponge()), + &mut test_sponge(), &states, Some(rng), ) @@ -210,7 +208,7 @@ where [&labeled_poly], &coms, &point, - &mut ChallengeGenerator::new_univariate(&mut test_sponge()), + &mut test_sponge(), &states, Some(rng), ) @@ -223,7 +221,7 @@ where &point, [claimed_eval], &proof, - &mut ChallengeGenerator::new_univariate(&mut test_sponge()), + &mut test_sponge(), None, ) .unwrap(); diff --git a/poly-commit/src/challenge.rs b/poly-commit/src/challenge.rs deleted file mode 100644 index 23b3c9d1..00000000 --- a/poly-commit/src/challenge.rs +++ /dev/null @@ -1,61 +0,0 @@ -use ark_crypto_primitives::sponge::{CryptographicSponge, FieldElementSize}; -use ark_ff::PrimeField; - -/// `ChallengeGenerator` generates opening challenges using multivariate or univariate strategy. -/// For multivariate strategy, each challenge is freshly squeezed from a sponge. -/// For univariate strategy, each challenge is a power of one squeezed element from sponge. -/// -/// Note that mutable reference cannot be cloned. -#[derive(Clone)] -pub enum ChallengeGenerator { - /// Each challenge is freshly squeezed from a sponge. - Multivariate(S), - /// Each challenge is a power of one squeezed element from sponge. - /// - /// `Univariate(generator, next_element)` - Univariate(F, F), -} - -impl ChallengeGenerator { - /// Returns a challenge generator with multivariate strategy. Each challenge is freshly squeezed - /// from a sponge. - pub fn new_multivariate(sponge: S) -> Self { - Self::Multivariate(sponge) - } - - /// Returns a challenge generator with univariate strategy. Each challenge is a power of one - /// squeezed element from sponge. - pub fn new_univariate(sponge: &mut S) -> Self { - let gen = sponge.squeeze_field_elements(1)[0]; - Self::Univariate(gen, gen) - } - - /// Returns a challenge of size `size`. - /// * If `self == Self::Multivariate(...)`, then this squeezes out a challenge of size `size`. - /// * If `self == Self::Univariate(...)`, then this ignores the `size` argument and simply squeezes out - /// the next field element. - pub fn try_next_challenge_of_size(&mut self, size: FieldElementSize) -> F { - match self { - // multivariate (full) - Self::Multivariate(sponge) => sponge.squeeze_field_elements_with_sizes(&[size])[0], - // univariate - Self::Univariate(gen, next) => { - let result = next.clone(); - *next *= *gen; - result - } - } - } - /// Returns the next challenge generated. - pub fn next_challenge(&mut self) -> F { - self.try_next_challenge_of_size(FieldElementSize::Full) - } - - /// Returns the sponge state if `self` is multivariate. Returns `None` otherwise. - pub fn into_sponge(self) -> Option { - match self { - Self::Multivariate(s) => Some(s), - _ => None, - } - } -} diff --git a/poly-commit/src/constraints.rs b/poly-commit/src/constraints.rs index e6fb5d4f..1300509a 100644 --- a/poly-commit/src/constraints.rs +++ b/poly-commit/src/constraints.rs @@ -5,7 +5,7 @@ use crate::{ use ark_crypto_primitives::sponge::CryptographicSponge; use ark_ff::PrimeField; use ark_poly::Polynomial; -use ark_r1cs_std::fields::nonnative::NonNativeFieldVar; +use ark_r1cs_std::fields::emulated_fp::EmulatedFpVar; use ark_r1cs_std::{fields::fp::FpVar, prelude::*}; use ark_relations::r1cs::{ConstraintSystemRef, Namespace, Result as R1CSResult, SynthesisError}; use ark_std::{borrow::Borrow, cmp::Eq, cmp::PartialEq, hash::Hash, marker::Sized}; @@ -24,8 +24,8 @@ pub enum LinearCombinationCoeffVar), + /// Other coefficient, represented as a "emulated" field element. + Var(EmulatedFpVar), } /// An allocated version of `LinearCombination`. @@ -60,7 +60,7 @@ impl let (f, lc_term) = term; let fg = - NonNativeFieldVar::new_variable(ark_relations::ns!(cs, "term"), || Ok(f), mode) + EmulatedFpVar::new_variable(ark_relations::ns!(cs, "term"), || Ok(f), mode) .unwrap(); (LinearCombinationCoeffVar::Var(fg), lc_term.clone()) @@ -79,12 +79,12 @@ impl pub struct PCCheckRandomDataVar { /// Opening challenges. /// The prover and the verifier MUST use the same opening challenges. - pub opening_challenges: Vec>, + pub opening_challenges: Vec>, /// Bit representations of the opening challenges. pub opening_challenges_bits: Vec>>, /// Batching random numbers. /// The verifier can choose these numbers freely, as long as they are random. - pub batching_rands: Vec>, + pub batching_rands: Vec>, /// Bit representations of the batching random numbers. pub batching_rands_bits: Vec>>, } @@ -172,7 +172,7 @@ pub struct LabeledPointVar { /// MUST be a unique identifier in a query set. pub name: String, /// The point value. - pub value: NonNativeFieldVar, + pub value: EmulatedFpVar, } /// An allocated version of `QuerySet`. @@ -184,7 +184,7 @@ pub struct QuerySetVar( /// An allocated version of `Evaluations`. #[derive(Clone)] pub struct EvaluationsVar( - pub HashMap, NonNativeFieldVar>, + pub HashMap, EmulatedFpVar>, ); impl EvaluationsVar { @@ -192,8 +192,8 @@ impl EvaluationsVar, - ) -> Result, SynthesisError> { + point: &EmulatedFpVar, + ) -> Result, SynthesisError> { let key = LabeledPointVar:: { name: String::from(lc_string), value: point.clone(), diff --git a/poly-commit/src/ipa_pc/mod.rs b/poly-commit/src/ipa_pc/mod.rs index 26234f1e..43a40852 100644 --- a/poly-commit/src/ipa_pc/mod.rs +++ b/poly-commit/src/ipa_pc/mod.rs @@ -15,7 +15,6 @@ pub use data_structures::*; #[cfg(feature = "parallel")] use rayon::prelude::*; -use crate::challenge::ChallengeGenerator; use ark_crypto_primitives::sponge::CryptographicSponge; use digest::Digest; @@ -105,7 +104,7 @@ where point: G::ScalarField, values: impl IntoIterator, proof: &Proof, - opening_challenges: &mut ChallengeGenerator, + sponge: &mut S, ) -> Option> { let check_time = start_timer!(|| "Succinct checking"); @@ -117,7 +116,8 @@ where let mut combined_commitment_proj = G::Group::zero(); let mut combined_v = G::ScalarField::zero(); - let mut cur_challenge = opening_challenges.try_next_challenge_of_size(CHALLENGE_SIZE); + let mut cur_challenge: G::ScalarField = + sponge.squeeze_field_elements_with_sizes(&[CHALLENGE_SIZE])[0]; let labeled_commitments = commitments.into_iter(); let values = values.into_iter(); @@ -126,7 +126,7 @@ where let commitment = labeled_commitment.commitment(); combined_v += &(cur_challenge * &value); combined_commitment_proj += &labeled_commitment.commitment().comm.mul(cur_challenge); - cur_challenge = opening_challenges.try_next_challenge_of_size(CHALLENGE_SIZE); + cur_challenge = sponge.squeeze_field_elements_with_sizes(&[CHALLENGE_SIZE])[0]; let degree_bound = labeled_commitment.degree_bound(); assert_eq!(degree_bound.is_some(), commitment.shifted_comm.is_some()); @@ -137,7 +137,7 @@ where combined_commitment_proj += &commitment.shifted_comm.unwrap().mul(cur_challenge); } - cur_challenge = opening_challenges.try_next_challenge_of_size(CHALLENGE_SIZE); + cur_challenge = sponge.squeeze_field_elements_with_sizes(&[CHALLENGE_SIZE])[0]; } let mut combined_commitment = combined_commitment_proj.into_affine(); @@ -488,7 +488,7 @@ where labeled_polynomials: impl IntoIterator>, commitments: impl IntoIterator>, point: &'a P::Point, - opening_challenges: &mut ChallengeGenerator, + sponge: &mut S, states: impl IntoIterator, rng: Option<&mut dyn RngCore>, ) -> Result @@ -509,7 +509,7 @@ where let combine_time = start_timer!(|| "Combining polynomials, randomness, and commitments."); - let mut cur_challenge = opening_challenges.try_next_challenge_of_size(CHALLENGE_SIZE); + let mut cur_challenge = sponge.squeeze_field_elements_with_sizes(&[CHALLENGE_SIZE])[0]; for (labeled_polynomial, (labeled_commitment, state)) in polys_iter.zip(comms_iter.zip(states_iter)) @@ -531,7 +531,7 @@ where combined_rand += &(cur_challenge * &state.rand); } - cur_challenge = opening_challenges.try_next_challenge_of_size(CHALLENGE_SIZE); + cur_challenge = sponge.squeeze_field_elements_with_sizes(&[CHALLENGE_SIZE])[0]; let has_degree_bound = degree_bound.is_some(); @@ -564,7 +564,7 @@ where } } - cur_challenge = opening_challenges.try_next_challenge_of_size(CHALLENGE_SIZE); + cur_challenge = sponge.squeeze_field_elements_with_sizes(&[CHALLENGE_SIZE])[0]; } end_timer!(combine_time); @@ -739,7 +739,7 @@ where point: &'a P::Point, values: impl IntoIterator, proof: &Self::Proof, - opening_challenges: &mut ChallengeGenerator, + sponge: &mut S, _rng: Option<&mut dyn RngCore>, ) -> Result where @@ -762,8 +762,7 @@ where )); } - let check_poly = - Self::succinct_check(vk, commitments, *point, values, proof, opening_challenges); + let check_poly = Self::succinct_check(vk, commitments, *point, values, proof, sponge); if check_poly.is_none() { return Ok(false); @@ -790,7 +789,7 @@ where query_set: &QuerySet, values: &Evaluations, proof: &Self::BatchProof, - opening_challenges: &mut ChallengeGenerator, + sponge: &mut S, rng: &mut R, ) -> Result where @@ -833,14 +832,8 @@ where vals.push(*v_i); } - let check_poly = Self::succinct_check( - vk, - comms.into_iter(), - *point, - vals.into_iter(), - p, - opening_challenges, - ); + let check_poly = + Self::succinct_check(vk, comms.into_iter(), *point, vals.into_iter(), p, sponge); if check_poly.is_none() { return Ok(false); @@ -876,7 +869,7 @@ where polynomials: impl IntoIterator>, commitments: impl IntoIterator>, query_set: &QuerySet, - opening_challenges: &mut ChallengeGenerator, + sponge: &mut S, states: impl IntoIterator, rng: Option<&mut dyn RngCore>, ) -> Result, Self::Error> @@ -971,7 +964,7 @@ where lc_polynomials.iter(), lc_commitments.iter(), &query_set, - opening_challenges, + sponge, lc_states.iter(), rng, )?; @@ -987,7 +980,7 @@ where eqn_query_set: &QuerySet, eqn_evaluations: &Evaluations, proof: &BatchLCProof, - opening_challenges: &mut ChallengeGenerator, + sponge: &mut S, rng: &mut R, ) -> Result where @@ -1060,7 +1053,7 @@ where &eqn_query_set, &evaluations, proof, - opening_challenges, + sponge, rng, ) } diff --git a/poly-commit/src/lib.rs b/poly-commit/src/lib.rs index 33cbfb02..0c25c1fe 100644 --- a/poly-commit/src/lib.rs +++ b/poly-commit/src/lib.rs @@ -9,7 +9,7 @@ #![deny(renamed_and_removed_lints, stable_features, unused_allocation)] #![deny(unused_comparisons, bare_trait_objects, unused_must_use)] #![forbid(unsafe_code)] -#![doc = include_str!("../README.md")] +#![doc = include_str!("../../README.md")] #[allow(unused)] #[macro_use] @@ -101,8 +101,6 @@ pub mod sonic_pc; /// [pcdas]: https://eprint.iacr.org/2020/499 pub mod ipa_pc; -/// Defines the challenge strategies and challenge generator. -pub mod challenge; /// A multilinear polynomial commitment scheme that converts n-variate multilinear polynomial into /// n quotient UV polynomial. This scheme is based on hardness of the discrete logarithm /// in prime-order groups. Construction is detailed in [[XZZPD19]][xzzpd19] and [[ZGKPP18]][zgkpp18] @@ -111,7 +109,6 @@ pub mod challenge; /// [zgkpp]: https://ieeexplore.ieee.org/document/8418645 pub mod multilinear_pc; -use crate::challenge::ChallengeGenerator; use ark_crypto_primitives::sponge::{CryptographicSponge, FieldElementSize}; /// Multivariate polynomial commitment based on the construction in /// [[PST13]][pst] with batching and (optional) hiding property inspired @@ -225,7 +222,7 @@ pub trait PolynomialCommitment, S: Cryptographic labeled_polynomials: impl IntoIterator>, commitments: impl IntoIterator>, point: &'a P::Point, - challenge_generator: &mut ChallengeGenerator, + sponge: &mut S, states: impl IntoIterator, rng: Option<&mut dyn RngCore>, ) -> Result @@ -241,7 +238,7 @@ pub trait PolynomialCommitment, S: Cryptographic point: &'a P::Point, values: impl IntoIterator, proof: &Self::Proof, - challenge_generator: &mut ChallengeGenerator, + sponge: &mut S, rng: Option<&mut dyn RngCore>, ) -> Result where @@ -261,7 +258,7 @@ pub trait PolynomialCommitment, S: Cryptographic labeled_polynomials: impl IntoIterator>, commitments: impl IntoIterator>, query_set: &QuerySet, - challenge_generator: &mut ChallengeGenerator, + sponge: &mut S, states: impl IntoIterator, rng: Option<&mut dyn RngCore>, ) -> Result @@ -333,7 +330,7 @@ pub trait PolynomialCommitment, S: Cryptographic query_polys, query_comms, &point, - challenge_generator, + sponge, query_states, Some(rng), )?; @@ -366,7 +363,7 @@ pub trait PolynomialCommitment, S: Cryptographic query_set: &QuerySet, evaluations: &Evaluations, proof: &Self::BatchProof, - challenge_generator: &mut ChallengeGenerator, + sponge: &mut S, rng: &mut R, ) -> Result where @@ -424,15 +421,7 @@ pub trait PolynomialCommitment, S: Cryptographic // Verify all proofs referring to the current point simultaneously // with a single call to `check` - result &= Self::check( - vk, - comms, - &point, - values, - &proof, - challenge_generator, - Some(rng), - )?; + result &= Self::check(vk, comms, &point, values, &proof, sponge, Some(rng))?; end_timer!(proof_time); } Ok(result) @@ -446,7 +435,7 @@ pub trait PolynomialCommitment, S: Cryptographic polynomials: impl IntoIterator>, commitments: impl IntoIterator>, query_set: &QuerySet, - challenge_generator: &mut ChallengeGenerator, + sponge: &mut S, states: impl IntoIterator, rng: Option<&mut dyn RngCore>, ) -> Result, Self::Error> @@ -472,7 +461,7 @@ pub trait PolynomialCommitment, S: Cryptographic polynomials, commitments, &poly_query_set, - challenge_generator, + sponge, states, rng, )?; @@ -491,7 +480,7 @@ pub trait PolynomialCommitment, S: Cryptographic eqn_query_set: &QuerySet, eqn_evaluations: &Evaluations, proof: &BatchLCProof, - challenge_generator: &mut ChallengeGenerator, + sponge: &mut S, rng: &mut R, ) -> Result where @@ -562,7 +551,7 @@ pub trait PolynomialCommitment, S: Cryptographic &poly_query_set, &poly_evals, proof, - challenge_generator, + sponge, rng, )?; if !pc_result { @@ -674,12 +663,9 @@ pub mod tests { PC: PolynomialCommitment, S: CryptographicSponge, { - let challenge_generators = vec![ - ChallengeGenerator::new_multivariate(sponge()), - ChallengeGenerator::new_univariate(&mut sponge()), - ]; + let sponge = sponge(); - for challenge_gen in challenge_generators { + for __ in 0..1 { let rng = &mut ChaCha20Rng::from_rng(test_rng()).unwrap(); let max_degree = 100; let pp = PC::setup(max_degree, None, rng)?; @@ -741,7 +727,7 @@ pub mod tests { &polynomials, &comms, &query_set, - &mut (challenge_gen.clone()), + &mut (sponge.clone()), &states, Some(rng), )?; @@ -751,7 +737,7 @@ pub mod tests { &query_set, &values, &proof, - &mut (challenge_gen.clone()), + &mut (sponge.clone()), rng, )?; assert!(result, "proof was incorrect, Query set: {:#?}", query_set); @@ -782,12 +768,9 @@ pub mod tests { sponge, } = info; - let challenge_gens = vec![ - ChallengeGenerator::new_multivariate(sponge()), - ChallengeGenerator::new_univariate(&mut sponge()), - ]; + let sponge = sponge(); - for challenge_gen in challenge_gens { + for _ in 0..1 { let rng = &mut ChaCha20Rng::from_rng(test_rng()).unwrap(); // If testing multivariate polynomials, make the max degree lower let max_degree = match num_vars { @@ -796,7 +779,7 @@ pub mod tests { }; let pp = PC::setup(max_degree, num_vars, rng)?; - for _ in 0..num_iters { + for __ in 0..num_iters { let supported_degree = supported_degree.unwrap_or(Uniform::from(1..=max_degree).sample(rng)); assert!( @@ -877,7 +860,7 @@ pub mod tests { &polynomials, &comms, &query_set, - &mut (challenge_gen.clone()), + &mut (sponge.clone()), &states, Some(rng), )?; @@ -887,7 +870,7 @@ pub mod tests { &query_set, &values, &proof, - &mut (challenge_gen.clone()), + &mut (sponge.clone()), rng, )?; if !result { @@ -927,12 +910,9 @@ pub mod tests { sponge, } = info; - let challenge_gens = vec![ - ChallengeGenerator::new_multivariate(sponge()), - ChallengeGenerator::new_univariate(&mut sponge()), - ]; + let sponge = sponge(); - for challenge_gen in challenge_gens { + for __ in 0..1 { let rng = &mut ChaCha20Rng::from_rng(test_rng()).unwrap(); // If testing multivariate polynomials, make the max degree lower let max_degree = match num_vars { @@ -1056,7 +1036,7 @@ pub mod tests { &polynomials, &comms, &query_set, - &mut (challenge_gen.clone()), + &mut (sponge.clone()), &states, Some(rng), )?; @@ -1068,7 +1048,7 @@ pub mod tests { &query_set, &values, &proof, - &mut (challenge_gen.clone()), + &mut (sponge.clone()), rng, )?; if !result { diff --git a/poly-commit/src/linear_codes/mod.rs b/poly-commit/src/linear_codes/mod.rs index 2626adec..d0b8f90b 100644 --- a/poly-commit/src/linear_codes/mod.rs +++ b/poly-commit/src/linear_codes/mod.rs @@ -1,12 +1,15 @@ -use crate::utils::{inner_product, IOPTranscript, Matrix}; +use crate::utils::{inner_product, Matrix}; use crate::{ - Error, LabeledCommitment, LabeledPolynomial, PCCommitterKey, PCUniversalParams, PCVerifierKey, - PolynomialCommitment, + to_bytes, Error, LabeledCommitment, LabeledPolynomial, PCCommitterKey, PCUniversalParams, + PCVerifierKey, PolynomialCommitment, }; use ark_crypto_primitives::crh::{CRHScheme, TwoToOneCRHScheme}; use ark_crypto_primitives::merkle_tree::MerkleTree; -use ark_crypto_primitives::{merkle_tree::Config, sponge::CryptographicSponge}; +use ark_crypto_primitives::{ + merkle_tree::Config, + sponge::{Absorb, CryptographicSponge}, +}; use ark_ff::PrimeField; use ark_poly::Polynomial; use ark_std::borrow::Borrow; @@ -30,7 +33,7 @@ use data_structures::*; pub use data_structures::LinCodePCProof; -use utils::{calculate_t, get_indices_from_transcript}; +use utils::{calculate_t, get_indices_from_sponge}; const FIELD_SIZE_ERROR: &str = "This field is not suitable for the proposed parameters"; @@ -154,13 +157,13 @@ where impl PolynomialCommitment for LinearCodePCS where L: LinearEncode, - F: PrimeField, + F: PrimeField + Absorb, P: Polynomial, S: CryptographicSponge, C: Config + 'static, Vec: Borrow<::Input>, H::Output: Into + Send, - C::Leaf: Sized + Clone + Default + Send, + C::Leaf: Sized + Clone + Default + Send + AsRef, H: CRHScheme + 'static, { type UniversalParams = L::LinCodePCParams; @@ -268,15 +271,9 @@ where ck.two_to_one_hash_param(), )?; - // 3. Obtain the MT root and add it to the transcript. + // 3. Obtain the MT root let root = col_tree.root(); - let mut transcript: IOPTranscript = IOPTranscript::new(b"transcript"); - - transcript - .append_serializable_element(b"root", &root) - .map_err(|_| Error::TranscriptError)?; - // 4. The commitment is just the root, but since each commitment could be to a differently-sized polynomial, we also add some metadata. let commitment = LinCodePCCommitment { metadata: Metadata { @@ -302,7 +299,7 @@ where _labeled_polynomials: impl IntoIterator>, commitments: impl IntoIterator>, point: &'a P::Point, - _challenge_generator: &mut crate::challenge::ChallengeGenerator, + sponge: &mut S, states: impl IntoIterator, _rng: Option<&mut dyn RngCore>, ) -> Result @@ -317,7 +314,6 @@ where let commitment = labeled_commitment.commitment(); let n_rows = commitment.metadata.n_rows; let n_cols = commitment.metadata.n_cols; - let root = &commitment.root; // 1. Arrange the coefficients of the polynomial into a matrix, // and apply encoding to get `ext_mat`. @@ -339,37 +335,21 @@ where // 3. Generate vector `b` to left-multiply the matrix. let (_, b) = L::tensor(point, n_cols, n_rows); - let mut transcript = IOPTranscript::new(b"transcript"); - transcript - .append_serializable_element(b"root", root) - .map_err(|_| Error::TranscriptError)?; + sponge.absorb(&to_bytes!(&commitment.root).map_err(|_| Error::TranscriptError)?); // If we are checking well-formedness, we need to compute the well-formedness proof (which is just r.M) and append it to the transcript. let well_formedness = if ck.check_well_formedness() { - let mut r = Vec::new(); - for _ in 0..n_rows { - r.push( - transcript - .get_and_append_challenge(b"r") - .map_err(|_| Error::TranscriptError)?, - ); - } + let r = sponge.squeeze_field_elements::(n_rows); let v = mat.row_mul(&r); - transcript - .append_serializable_element(b"v", &v) - .map_err(|_| Error::TranscriptError)?; + sponge.absorb(&v); Some(v) } else { None }; let point_vec = L::point_to_vec(point.clone()); - for element in point_vec.iter() { - transcript - .append_serializable_element(b"point", element) - .map_err(|_| Error::TranscriptError)?; - } + sponge.absorb(&point_vec); proof_array.push(LinCodePCProof { // Compute the opening proof and append b.M to the transcript. @@ -380,7 +360,7 @@ where &mat, &ext_mat, &col_tree, - &mut transcript, + sponge, )?, well_formedness, }); @@ -395,7 +375,7 @@ where point: &'a P::Point, values: impl IntoIterator, proof_array: &Self::Proof, - _challenge_generator: &mut crate::challenge::ChallengeGenerator, + sponge: &mut S, _rng: Option<&mut dyn RngCore>, ) -> Result where @@ -415,31 +395,19 @@ where let root = &commitment.root; let t = calculate_t::(vk.sec_param(), vk.distance(), n_ext_cols)?; - let mut transcript = IOPTranscript::new(b"transcript"); - transcript - .append_serializable_element(b"root", &commitment.root) - .map_err(|_| Error::TranscriptError)?; + sponge.absorb(&to_bytes!(&commitment.root).map_err(|_| Error::TranscriptError)?); let out = if vk.check_well_formedness() { if proof.well_formedness.is_none() { return Err(Error::InvalidCommitment); } let tmp = &proof.well_formedness.as_ref(); - let well_formedness = tmp.unwrap(); - let mut r = Vec::with_capacity(n_rows); - for _ in 0..n_rows { - r.push( - transcript - .get_and_append_challenge(b"r") - .map_err(|_| Error::TranscriptError)?, - ); - } + let v = tmp.unwrap(); + let r = sponge.squeeze_field_elements::(n_rows); // Upon sending `v` to the Verifier, add it to the sponge. The claim is that v = r.M. - transcript - .append_serializable_element(b"v", well_formedness) - .map_err(|_| Error::TranscriptError)?; + sponge.absorb(&v); - (Some(well_formedness), Some(r)) + (Some(v), Some(r)) } else { (None, None) }; @@ -447,17 +415,11 @@ where // 1. Seed the transcript with the point and the recieved vector // TODO Consider removing the evaluation point from the transcript. let point_vec = L::point_to_vec(point.clone()); - for element in point_vec.iter() { - transcript - .append_serializable_element(b"point", element) - .map_err(|_| Error::TranscriptError)?; - } - transcript - .append_serializable_element(b"v", &proof.opening.v) - .map_err(|_| Error::TranscriptError)?; + sponge.absorb(&point_vec); + sponge.absorb(&proof.opening.v); // 2. Ask random oracle for the `t` indices where the checks happen. - let indices = get_indices_from_transcript::(n_ext_cols, t, &mut transcript)?; + let indices = get_indices_from_sponge(n_ext_cols, t, sponge)?; // 3. Hash the received columns into leaf hashes. let col_hashes: Vec = proof @@ -545,7 +507,7 @@ fn create_merkle_tree( ) -> Result, Error> where C: Config, - C::Leaf: Default + Clone + Send, + C::Leaf: Default + Clone + Send + AsRef, { // pad the column hashes with zeroes let next_pow_of_two = leaves.len().next_power_of_two(); @@ -555,30 +517,28 @@ where .map_err(|_| Error::HashingError) } -fn generate_proof( +fn generate_proof( sec_param: usize, distance: (usize, usize), b: &[F], mat: &Matrix, ext_mat: &Matrix, col_tree: &MerkleTree, - transcript: &mut IOPTranscript, + sponge: &mut S, ) -> Result, Error> where - F: PrimeField, + F: PrimeField + Absorb, C: Config, + S: CryptographicSponge, { let t = calculate_t::(sec_param, distance, ext_mat.m)?; // 1. left-multiply the matrix by `b`. let v = mat.row_mul(b); - - transcript - .append_serializable_element(b"v", &v) - .map_err(|_| Error::TranscriptError)?; + sponge.absorb(&v); // 2. Generate t column indices to test the linear combination on. - let indices = get_indices_from_transcript(ext_mat.m, t, transcript)?; + let indices = get_indices_from_sponge(ext_mat.m, t, sponge)?; // 3. Compute Merkle tree paths for the requested columns. let mut queried_columns = Vec::with_capacity(t); diff --git a/poly-commit/src/linear_codes/multilinear_brakedown/tests.rs b/poly-commit/src/linear_codes/multilinear_brakedown/tests.rs index 8af84e5b..e4be256f 100644 --- a/poly-commit/src/linear_codes/multilinear_brakedown/tests.rs +++ b/poly-commit/src/linear_codes/multilinear_brakedown/tests.rs @@ -4,7 +4,6 @@ mod tests { use crate::linear_codes::LinearCodePCS; use crate::utils::test_sponge; use crate::{ - challenge::ChallengeGenerator, linear_codes::{utils::*, BrakedownPCParams, MultilinearBrakedown, PolynomialCommitment}, LabeledPolynomial, }; @@ -114,22 +113,19 @@ mod tests { ); let mut test_sponge = test_sponge::(); - let (c, rands) = BrakedownPCS::::commit(&ck, &[labeled_poly.clone()], None).unwrap(); + let (c, states) = BrakedownPCS::::commit(&ck, &[labeled_poly.clone()], None).unwrap(); let point = rand_point(Some(num_vars), rand_chacha); let value = labeled_poly.evaluate(&point); - let mut challenge_generator: ChallengeGenerator> = - ChallengeGenerator::new_univariate(&mut test_sponge); - let proof = BrakedownPCS::::open( &ck, &[labeled_poly], &c, &point, - &mut (challenge_generator.clone()), - &rands, + &mut (test_sponge.clone()), + &states, None, ) .unwrap(); @@ -139,7 +135,7 @@ mod tests { &point, [value], &proof, - &mut challenge_generator, + &mut test_sponge, None ) .unwrap()); diff --git a/poly-commit/src/linear_codes/utils.rs b/poly-commit/src/linear_codes/utils.rs index 2b344d78..2b1d8e73 100644 --- a/poly-commit/src/linear_codes/utils.rs +++ b/poly-commit/src/linear_codes/utils.rs @@ -1,5 +1,5 @@ -use crate::utils::IOPTranscript; use crate::{utils::ceil_div, Error}; +use ark_crypto_primitives::sponge::CryptographicSponge; use ark_ff::{Field, PrimeField}; use ark_serialize::{CanonicalDeserialize, CanonicalSerialize}; use ark_std::string::ToString; @@ -111,18 +111,16 @@ pub(crate) fn get_num_bytes(n: usize) -> usize { /// Generate `t` (not necessarily distinct) random points in `[0, n)` /// using the current state of the `transcript`. -pub(crate) fn get_indices_from_transcript( +pub(crate) fn get_indices_from_sponge( n: usize, t: usize, - transcript: &mut IOPTranscript, + sponge: &mut S, ) -> Result, Error> { let bytes_to_squeeze = get_num_bytes(n); let mut indices = Vec::with_capacity(t); for _ in 0..t { - let mut bytes: Vec = vec![0; bytes_to_squeeze]; - transcript - .get_and_append_byte_challenge(b"i", &mut bytes) - .map_err(|_| Error::TranscriptError)?; + let bytes = sponge.squeeze_bytes(bytes_to_squeeze); + sponge.absorb(&bytes); // get the usize from Vec: let ind = bytes.iter().fold(0, |acc, &x| (acc << 8) + x as usize); diff --git a/poly-commit/src/marlin/marlin_pc/mod.rs b/poly-commit/src/marlin/marlin_pc/mod.rs index 1b45bff7..7fbfba07 100644 --- a/poly-commit/src/marlin/marlin_pc/mod.rs +++ b/poly-commit/src/marlin/marlin_pc/mod.rs @@ -12,7 +12,6 @@ use ark_std::rand::RngCore; use ark_std::{marker::PhantomData, ops::Div, vec}; mod data_structures; -use crate::challenge::ChallengeGenerator; use ark_crypto_primitives::sponge::CryptographicSponge; pub use data_structures::*; @@ -251,7 +250,7 @@ where labeled_polynomials: impl IntoIterator>, _commitments: impl IntoIterator>, point: &'a P::Point, - opening_challenges: &mut ChallengeGenerator, + sponge: &mut S, states: impl IntoIterator, _rng: Option<&mut dyn RngCore>, ) -> Result @@ -283,7 +282,7 @@ where )?; // compute next challenges challenge^j and challenge^{j+1}. - let challenge_j = opening_challenges.try_next_challenge_of_size(CHALLENGE_SIZE); + let challenge_j = sponge.squeeze_field_elements_with_sizes(&[CHALLENGE_SIZE])[0]; assert_eq!(degree_bound.is_some(), rand.shifted_rand.is_some()); @@ -299,7 +298,7 @@ where *point, &shifted_rand, )?; - let challenge_j_1 = opening_challenges.try_next_challenge_of_size(CHALLENGE_SIZE); + let challenge_j_1 = sponge.squeeze_field_elements_with_sizes(&[CHALLENGE_SIZE])[0]; let shifted_witness = shift_polynomial(ck, &witness, degree_bound); @@ -347,7 +346,7 @@ where point: &'a P::Point, values: impl IntoIterator, proof: &Self::Proof, - opening_challenges: &mut ChallengeGenerator, + sponge: &mut S, _rng: Option<&mut dyn RngCore>, ) -> Result where @@ -358,7 +357,7 @@ where Marlin::::accumulate_commitments_and_values( commitments, values, - opening_challenges, + sponge, Some(vk), )?; let combined_comm = kzg10::Commitment(combined_comm.into()); @@ -373,7 +372,7 @@ where query_set: &QuerySet, values: &Evaluations, proof: &Self::BatchProof, - opening_challenges: &mut ChallengeGenerator, + sponge: &mut S, rng: &mut R, ) -> Result where @@ -384,7 +383,7 @@ where commitments, query_set, values, - opening_challenges, + sponge, Some(vk), )?; assert_eq!(proof.len(), combined_queries.len()); @@ -407,7 +406,7 @@ where polynomials: impl IntoIterator>, commitments: impl IntoIterator>, query_set: &QuerySet, - opening_challenges: &mut ChallengeGenerator, + sponge: &mut S, states: impl IntoIterator, rng: Option<&mut dyn RngCore>, ) -> Result, Self::Error> @@ -422,7 +421,7 @@ where polynomials, commitments, query_set, - opening_challenges, + sponge, states, rng, ) @@ -437,7 +436,7 @@ where query_set: &QuerySet, evaluations: &Evaluations, proof: &BatchLCProof, - opening_challenges: &mut ChallengeGenerator, + sponge: &mut S, rng: &mut R, ) -> Result where @@ -450,7 +449,7 @@ where query_set, evaluations, proof, - opening_challenges, + sponge, rng, ) } @@ -462,7 +461,7 @@ where labeled_polynomials: impl IntoIterator>, commitments: impl IntoIterator>>, query_set: &QuerySet, - opening_challenges: &mut ChallengeGenerator, + sponge: &mut S, states: impl IntoIterator, rng: Option<&mut dyn RngCore>, ) -> Result>, Error> @@ -517,7 +516,7 @@ where query_polys, query_comms, point, - opening_challenges, + sponge, query_states, Some(rng), )?; diff --git a/poly-commit/src/marlin/marlin_pst13_pc/mod.rs b/poly-commit/src/marlin/marlin_pst13_pc/mod.rs index 93d5c0c6..a72d9199 100644 --- a/poly-commit/src/marlin/marlin_pst13_pc/mod.rs +++ b/poly-commit/src/marlin/marlin_pst13_pc/mod.rs @@ -20,7 +20,6 @@ pub use data_structures::*; mod combinations; use combinations::*; -use crate::challenge::ChallengeGenerator; use ark_crypto_primitives::sponge::CryptographicSponge; #[cfg(feature = "parallel")] use rayon::prelude::*; @@ -440,7 +439,7 @@ where labeled_polynomials: impl IntoIterator>, _commitments: impl IntoIterator>, point: &P::Point, - opening_challenges: &mut ChallengeGenerator, + sponge: &mut S, states: impl IntoIterator, _rng: Option<&mut dyn RngCore>, ) -> Result @@ -456,7 +455,7 @@ where Self::check_degrees_and_bounds(ck.supported_degree, &polynomial)?; // compute challenge^j and challenge^{j+1}. - let challenge_j = opening_challenges.try_next_challenge_of_size(CHALLENGE_SIZE); + let challenge_j = sponge.squeeze_field_elements_with_sizes(&[CHALLENGE_SIZE])[0]; p += (challenge_j, polynomial.polynomial()); r += (challenge_j, state); @@ -538,7 +537,7 @@ where point: &'a P::Point, values: impl IntoIterator, proof: &Self::Proof, - opening_challenges: &mut ChallengeGenerator, + sponge: &mut S, _rng: Option<&mut dyn RngCore>, ) -> Result where @@ -550,7 +549,7 @@ where Marlin::::accumulate_commitments_and_values( commitments, values, - opening_challenges, + sponge, None, )?; // Compute both sides of the pairing equation @@ -582,7 +581,7 @@ where query_set: &QuerySet, values: &Evaluations, proof: &Self::BatchProof, - opening_challenges: &mut ChallengeGenerator, + sponge: &mut S, rng: &mut R, ) -> Result where @@ -593,7 +592,7 @@ where commitments, query_set, values, - opening_challenges, + sponge, None, )?; let check_time = @@ -660,7 +659,7 @@ where polynomials: impl IntoIterator>, commitments: impl IntoIterator>, query_set: &QuerySet, - opening_challenges: &mut ChallengeGenerator, + sponge: &mut S, states: impl IntoIterator, rng: Option<&mut dyn RngCore>, ) -> Result, Self::Error> @@ -675,7 +674,7 @@ where polynomials, commitments, query_set, - opening_challenges, + sponge, states, rng, ) @@ -690,7 +689,7 @@ where eqn_query_set: &QuerySet, eqn_evaluations: &Evaluations, proof: &BatchLCProof, - opening_challenges: &mut ChallengeGenerator, + sponge: &mut S, rng: &mut R, ) -> Result where @@ -703,7 +702,7 @@ where eqn_query_set, eqn_evaluations, proof, - opening_challenges, + sponge, rng, ) } diff --git a/poly-commit/src/marlin/mod.rs b/poly-commit/src/marlin/mod.rs index e0b026d2..d7e7f5a1 100644 --- a/poly-commit/src/marlin/mod.rs +++ b/poly-commit/src/marlin/mod.rs @@ -1,4 +1,4 @@ -use crate::{challenge::ChallengeGenerator, CHALLENGE_SIZE}; +use crate::CHALLENGE_SIZE; use crate::{kzg10, Error}; use crate::{BTreeMap, BTreeSet, Debug, RngCore, String, ToString, Vec}; use crate::{BatchLCProof, LabeledPolynomial, LinearCombination}; @@ -110,7 +110,7 @@ where fn accumulate_commitments_and_values<'a>( commitments: impl IntoIterator>>, values: impl IntoIterator, - challenge_gen: &mut ChallengeGenerator, + sponge: &mut S, vk: Option<&marlin_pc::VerifierKey>, ) -> Result<(E::G1, E::ScalarField), Error> { let acc_time = start_timer!(|| "Accumulating commitments and values"); @@ -121,13 +121,14 @@ where let commitment = labeled_commitment.commitment(); assert_eq!(degree_bound.is_some(), commitment.shifted_comm.is_some()); - let challenge_i = challenge_gen.try_next_challenge_of_size(CHALLENGE_SIZE); + let challenge_i = sponge.squeeze_field_elements_with_sizes(&[CHALLENGE_SIZE])[0]; combined_comm += &commitment.comm.0.mul(challenge_i); combined_value += &(value * &challenge_i); if let Some(degree_bound) = degree_bound { - let challenge_i_1 = challenge_gen.try_next_challenge_of_size(CHALLENGE_SIZE); + let challenge_i_1: E::ScalarField = + sponge.squeeze_field_elements_with_sizes(&[CHALLENGE_SIZE])[0]; let shifted_comm = commitment.shifted_comm.as_ref().unwrap().0.into_group(); @@ -152,7 +153,7 @@ where commitments: impl IntoIterator>>, query_set: &QuerySet, evaluations: &Evaluations, - opening_challenges: &mut ChallengeGenerator, + sponge: &mut S, vk: Option<&marlin_pc::VerifierKey>, ) -> Result<(Vec>, Vec, Vec), Error> where @@ -199,7 +200,7 @@ where let (c, v) = Self::accumulate_commitments_and_values( comms_to_combine, values_to_combine, - opening_challenges, + sponge, vk, )?; end_timer!(lc_time); @@ -227,7 +228,7 @@ where polynomials: impl IntoIterator>, commitments: impl IntoIterator>, query_set: &QuerySet, - opening_challenges: &mut ChallengeGenerator, + sponge: &mut S, states: impl IntoIterator, rng: Option<&mut dyn RngCore>, ) -> Result, Error> @@ -308,7 +309,7 @@ where lc_polynomials.iter(), lc_commitments.iter(), &query_set, - opening_challenges, + sponge, lc_states.iter(), rng, )?; @@ -323,7 +324,7 @@ where query_set: &QuerySet, evaluations: &Evaluations, proof: &BatchLCProof, - opening_challenges: &mut ChallengeGenerator, + sponge: &mut S, rng: &mut R, ) -> Result where @@ -404,7 +405,7 @@ where &query_set, &evaluations, proof, - opening_challenges, + sponge, rng, ) } diff --git a/poly-commit/src/sonic_pc/mod.rs b/poly-commit/src/sonic_pc/mod.rs index b1d7f28b..caf9b79c 100644 --- a/poly-commit/src/sonic_pc/mod.rs +++ b/poly-commit/src/sonic_pc/mod.rs @@ -12,7 +12,6 @@ use ark_std::rand::RngCore; use ark_std::{convert::TryInto, marker::PhantomData, ops::Div, ops::Mul, vec}; mod data_structures; -use crate::challenge::ChallengeGenerator; use ark_crypto_primitives::sponge::CryptographicSponge; pub use data_structures::*; @@ -47,12 +46,12 @@ where point: P::Point, values: impl IntoIterator, proof: &kzg10::Proof, - opening_challenges: &mut ChallengeGenerator, + sponge: &mut S, randomizer: Option, ) { let acc_time = start_timer!(|| "Accumulating elements"); - let mut curr_challenge = opening_challenges.try_next_challenge_of_size(CHALLENGE_SIZE); + let mut curr_challenge = sponge.squeeze_field_elements_with_sizes(&[CHALLENGE_SIZE])[0]; // Keeps track of running combination of values let mut combined_values = E::ScalarField::zero(); @@ -73,7 +72,7 @@ where // Accumulate values in the BTreeMap *combined_comms.entry(degree_bound).or_insert(E::G1::zero()) += &comm_with_challenge; - curr_challenge = opening_challenges.try_next_challenge_of_size(CHALLENGE_SIZE); + curr_challenge = sponge.squeeze_field_elements_with_sizes(&[CHALLENGE_SIZE])[0]; } // Push expected results into list of elems. Power will be the negative of the expected power @@ -345,7 +344,7 @@ where labeled_polynomials: impl IntoIterator>, _commitments: impl IntoIterator>, point: &'a P::Point, - opening_challenges: &mut ChallengeGenerator, + sponge: &mut S, states: impl IntoIterator, _rng: Option<&mut dyn RngCore>, ) -> Result @@ -357,7 +356,7 @@ where let mut combined_polynomial = P::zero(); let mut combined_rand = kzg10::Randomness::empty(); - let mut curr_challenge = opening_challenges.try_next_challenge_of_size(CHALLENGE_SIZE); + let mut curr_challenge = sponge.squeeze_field_elements_with_sizes(&[CHALLENGE_SIZE])[0]; for (polynomial, state) in labeled_polynomials.into_iter().zip(states) { let enforced_degree_bounds: Option<&[usize]> = ck @@ -374,7 +373,7 @@ where combined_polynomial += (curr_challenge, polynomial.polynomial()); combined_rand += (curr_challenge, state); - curr_challenge = opening_challenges.try_next_challenge_of_size(CHALLENGE_SIZE); + curr_challenge = sponge.squeeze_field_elements_with_sizes(&[CHALLENGE_SIZE])[0]; } let proof_time = start_timer!(|| "Creating proof for polynomials"); @@ -390,7 +389,7 @@ where point: &'a P::Point, values: impl IntoIterator, proof: &Self::Proof, - opening_challenges: &mut ChallengeGenerator, + sponge: &mut S, _rng: Option<&mut dyn RngCore>, ) -> Result where @@ -410,7 +409,7 @@ where *point, values, proof, - opening_challenges, + sponge, None, ); @@ -430,7 +429,7 @@ where query_set: &QuerySet, values: &Evaluations, proof: &Self::BatchProof, - opening_challenges: &mut ChallengeGenerator, + sponge: &mut S, rng: &mut R, ) -> Result where @@ -481,7 +480,7 @@ where *point, values_to_combine.into_iter(), p, - opening_challenges, + sponge, Some(randomizer), ); @@ -502,7 +501,7 @@ where polynomials: impl IntoIterator>, commitments: impl IntoIterator>, query_set: &QuerySet, - opening_challenges: &mut ChallengeGenerator, + sponge: &mut S, states: impl IntoIterator, rng: Option<&mut dyn RngCore>, ) -> Result, Self::Error> @@ -581,7 +580,7 @@ where lc_polynomials.iter(), lc_commitments.iter(), &query_set, - opening_challenges, + sponge, lc_states.iter(), rng, )?; @@ -597,7 +596,7 @@ where eqn_query_set: &QuerySet, eqn_evaluations: &Evaluations, proof: &BatchLCProof, - opening_challenges: &mut ChallengeGenerator, + sponge: &mut S, rng: &mut R, ) -> Result where @@ -666,7 +665,7 @@ where &eqn_query_set, &evaluations, proof, - opening_challenges, + sponge, rng, ) } diff --git a/poly-commit/src/streaming_kzg/data_structures.rs b/poly-commit/src/streaming_kzg/data_structures.rs index 7adaf005..c8b19c83 100644 --- a/poly-commit/src/streaming_kzg/data_structures.rs +++ b/poly-commit/src/streaming_kzg/data_structures.rs @@ -141,7 +141,7 @@ where /// Stream implementation of foleded polynomial. #[derive(Clone, Copy)] -pub struct FoldedPolynomialStream<'a, F, S>(FoldedPolynomialTree<'a, F, S>, usize); +pub struct FoldedPolynomialStream<'a, F, S>(FoldedPolynomialTree<'a, F, S>); /// Iterator implementation of foleded polynomial. pub struct FoldedPolynomialStreamIter<'a, F, I> { challenges: &'a [F], @@ -158,8 +158,7 @@ where /// Initialize a new folded polynomial stream. pub fn new(coefficients: &'a S, challenges: &'a [F]) -> Self { let tree = FoldedPolynomialTree::new(coefficients, challenges); - let len = challenges.len(); - Self(tree, len) + Self(tree) } } @@ -241,7 +240,7 @@ fn test_folded_polynomial() { let challenges = vec![F::one(), two]; let coefficients_stream = coefficients.as_slice(); let foldstream = FoldedPolynomialTree::new(&coefficients_stream, challenges.as_slice()); - let fold_stream = FoldedPolynomialStream(foldstream, 2); + let fold_stream = FoldedPolynomialStream(foldstream); assert_eq!(fold_stream.len(), 1); assert_eq!( fold_stream.iter().next(), @@ -253,7 +252,7 @@ fn test_folded_polynomial() { let challenges = vec![F::one(); 4]; let coefficients_stream = coefficients.as_slice(); let foldstream = FoldedPolynomialTree::new(&coefficients_stream, challenges.as_slice()); - let fold_stream = FoldedPolynomialStream(foldstream, 4).iter(); + let fold_stream = FoldedPolynomialStream(foldstream).iter(); assert_eq!(fold_stream.last(), Some(coefficients.iter().sum())); } diff --git a/poly-commit/src/utils.rs b/poly-commit/src/utils.rs index 2b2ed34a..3b2a336f 100644 --- a/poly-commit/src/utils.rs +++ b/poly-commit/src/utils.rs @@ -1,5 +1,3 @@ -use core::marker::PhantomData; - #[cfg(not(feature = "std"))] use num_traits::Float; @@ -9,12 +7,9 @@ use rayon::{ prelude::IndexedParallelIterator, }; -use ark_ff::{Field, PrimeField}; +use ark_ff::Field; use ark_serialize::{CanonicalDeserialize, CanonicalSerialize}; use ark_std::vec::Vec; -use merlin::Transcript; - -use crate::Error; /// Takes as input a struct, and converts them to a series of bytes. All traits /// that implement `CanonicalSerialize` can be automatically converted to bytes @@ -160,86 +155,6 @@ pub(crate) fn inner_product(v1: &[F], v2: &[F]) -> F { .sum() } -/// The following struct is taken from jellyfish repository. Once they change -/// their dependency on `crypto-primitive`, we use their crate instead of -/// a copy-paste. We needed the newer `crypto-primitive` for serializing. -#[derive(Clone)] -pub(crate) struct IOPTranscript { - transcript: Transcript, - is_empty: bool, - #[doc(hidden)] - phantom: PhantomData, -} - -// TODO: merge this with jf_plonk::transcript -impl IOPTranscript { - /// Create a new IOP transcript. - pub(crate) fn new(label: &'static [u8]) -> Self { - Self { - transcript: Transcript::new(label), - is_empty: true, - phantom: PhantomData, - } - } - - /// Append the message to the transcript. - pub(crate) fn append_message(&mut self, label: &'static [u8], msg: &[u8]) -> Result<(), Error> { - self.transcript.append_message(label, msg); - self.is_empty = false; - Ok(()) - } - - /// Append the message to the transcript. - pub(crate) fn append_serializable_element( - &mut self, - label: &'static [u8], - group_elem: &S, - ) -> Result<(), Error> { - self.append_message( - label, - &to_bytes!(group_elem).map_err(|_| Error::TranscriptError)?, - ) - } - - /// Generate the challenge from the current transcript - /// and append it to the transcript. - /// - /// The output field element is statistical uniform as long - /// as the field has a size less than 2^384. - pub(crate) fn get_and_append_challenge(&mut self, label: &'static [u8]) -> Result { - // we need to reject when transcript is empty - if self.is_empty { - return Err(Error::TranscriptError); - } - - let mut buf = [0u8; 64]; - self.transcript.challenge_bytes(label, &mut buf); - let challenge = F::from_le_bytes_mod_order(&buf); - self.append_serializable_element(label, &challenge)?; - Ok(challenge) - } - - /// Generate the challenge from the current transcript - /// and append it to the transcript. - /// - /// Without exposing the internal field `transcript`, - /// this is a wrapper around getting bytes as opposed to field elements. - pub(crate) fn get_and_append_byte_challenge( - &mut self, - label: &'static [u8], - dest: &mut [u8], - ) -> Result<(), Error> { - // we need to reject when transcript is empty - if self.is_empty { - return Err(Error::TranscriptError); - } - - self.transcript.challenge_bytes(label, dest); - self.append_message(label, dest)?; - Ok(()) - } -} - #[inline] #[cfg(test)] pub(crate) fn to_field(v: Vec) -> Vec { @@ -249,6 +164,8 @@ pub(crate) fn to_field(v: Vec) -> Vec { // TODO: replace by https://github.com/arkworks-rs/crypto-primitives/issues/112. #[cfg(test)] use ark_crypto_primitives::sponge::poseidon::PoseidonSponge; +#[cfg(test)] +use ark_ff::PrimeField; #[cfg(test)] pub(crate) fn test_sponge() -> PoseidonSponge {