From 044d74a85791919c1e49f7e812de046bf2555507 Mon Sep 17 00:00:00 2001 From: Hossein Moghaddas Date: Thu, 18 Jan 2024 14:19:52 +0100 Subject: [PATCH] Delete `IOPTranscript`, update with master (#51) (aka Ligero++) * Add the trait bounds * Add `CommitmentState` * Update benches for the new type * Fix the name of local variable * Merge `PCCommitmentState` with `PCRandomness` * Update `README.md` * Fix a bug * Simplify `hash_column` * Delete comments * Add `CommitmentState` * Make `fmt` happy * Refactor, remove `hash_columns` * Rename all params * Maybe `empty` not return `Self` * Make `empty` return `Self` * Rename `rand` to `state` * Add type `Randomness` * Ligero+++ (#46) * conversion to `into_iter` is a no-op * remove explicit casts to vecs * rename to use singular of `labeled_commitment` * simplify the iterators even further by zipping two iters * Apply suggestions from code review * Fix tests: sponge config for univariate ligero * Rename nonnative to emulated, as in `r1cs-std` (#137) * Rename nonnative to emulated, as in `r1cs-std` * Run `fmt` * Temporarily change `Cargo.toml` * Revert `Cargo.toml` * Refactor `FoldedPolynomialStream` partially * Substitute `ChallengeGenerator` by the generic sponge (#139) * Rename nonnative to emulated, as in `r1cs-std` * Run `fmt` * Temporarily change `Cargo.toml` * Substitute `ChallengeGenerator` with the generic sponge * Run `fmt` * Remove the extra file * Update modules * Delete the unnecessary loop * Revert `Cargo.toml` * Refactor `FoldedPolynomialStream` partially * Update README * Make the diff more readable * Bring the whitespace back * Make diff more readable, 2 * Fix according to breaking changes in `ark-ec` (#141) * Fix for KZG10 * Fix the breaking changes in `ark-ec` * Remove the extra loop * Fix the loop range * re-use the preprocessing table * also re-use the preprocessing table for multilinear_pc --------- Co-authored-by: mmagician * Auxiliary opening data (#134) * Add the trait bounds * Add `CommitmentState` * Update benches for the new type * Fix the name of local variable * Merge `PCCommitmentState` with `PCRandomness` * Update `README.md` * Fix a bug * Put `Randomness` in `CommitmentState` * Add a comment * Remove the extra loop * Update the comment for `CommitmentState` Co-authored-by: Marcin * cargo fmt --------- Co-authored-by: Marcin * `batch_mul_with_preprocessing` no longer takes `self` as argument (#142) * batch_mul_with_preprocessing no longer takes `self` as argument * Apply suggestions from code review Co-authored-by: Pratyush Mishra * fix variable name --------- Co-authored-by: Pratyush Mishra * Remove `ChallengeGenerator` and `IOPTranscript` for Ligero (#57) * Squash and merge `delete-chalgen` onto here * Fix Ligero for `ChallengeGenerator` and `AsRef` for Merkle tree * Fix tests: sponge config for univariate ligero * Delete `IOPTranscript` for Ligero (#54) * Replace the `IOPTranscript` with `CryptographicSponge` * Delete extra comments * Run fmt * Fix tests: sponge config for univariate ligero * Delete TODOs and do not absorb what you just squeezed * Fix unused import * Revert "Fix unused import" This reverts commit e85af9086180b486b71fc41add00be88ffaf147f. * Try to fix * Remove the extra loop --------- Co-authored-by: mmagician Co-authored-by: Pratyush Mishra --- README.md | 16 +- bench-templates/src/lib.rs | 25 +- poly-commit/src/challenge.rs | 61 -- poly-commit/src/constraints.rs | 20 +- poly-commit/src/data_structures.rs | 12 +- poly-commit/src/ipa_pc/data_structures.rs | 3 +- poly-commit/src/ipa_pc/mod.rs | 89 +-- poly-commit/src/kzg10/data_structures.rs | 3 +- poly-commit/src/kzg10/mod.rs | 49 +- poly-commit/src/lib.rs | 723 +++++++++--------- .../src/linear_codes/data_structures.rs | 27 +- poly-commit/src/linear_codes/ligero.rs | 18 +- poly-commit/src/linear_codes/mod.rs | 288 +++---- .../linear_codes/multilinear_ligero/mod.rs | 8 +- .../linear_codes/multilinear_ligero/tests.rs | 28 +- .../src/linear_codes/univariate_ligero/mod.rs | 8 +- .../linear_codes/univariate_ligero/tests.rs | 27 +- poly-commit/src/linear_codes/utils.rs | 31 +- .../src/marlin/marlin_pc/data_structures.rs | 7 +- poly-commit/src/marlin/marlin_pc/mod.rs | 65 +- .../marlin/marlin_pst13_pc/data_structures.rs | 5 +- poly-commit/src/marlin/marlin_pst13_pc/mod.rs | 73 +- poly-commit/src/marlin/mod.rs | 41 +- poly-commit/src/multilinear_pc/mod.rs | 48 +- poly-commit/src/sonic_pc/mod.rs | 65 +- .../src/streaming_kzg/data_structures.rs | 9 +- poly-commit/src/streaming_kzg/time.rs | 10 +- poly-commit/src/utils.rs | 96 +-- 28 files changed, 776 insertions(+), 1079 deletions(-) delete mode 100644 poly-commit/src/challenge.rs diff --git a/README.md b/README.md index 518d63bd..e9c1e50e 100644 --- a/README.md +++ b/README.md @@ -57,7 +57,7 @@ This trait defines the interface for a polynomial commitment scheme. It is recom // In this example, we will commit to a single polynomial, open it first at one point, and then batched at two points, and finally verify the proofs. // We will use the KZG10 polynomial commitment scheme, following the approach from Marlin. -use ark_poly_commit::{Polynomial, marlin_pc::MarlinKZG10, LabeledPolynomial, PolynomialCommitment, QuerySet, Evaluations, challenge::ChallengeGenerator}; +use ark_poly_commit::{Polynomial, marlin_pc::MarlinKZG10, LabeledPolynomial, PolynomialCommitment, QuerySet, Evaluations}; use ark_bls12_377::Bls12_377; use ark_crypto_primitives::sponge::poseidon::{PoseidonSponge, PoseidonConfig}; use ark_crypto_primitives::sponge::CryptographicSponge; @@ -128,17 +128,15 @@ let (ck, vk) = PCS::trim(&pp, degree, 2, Some(&[degree])).unwrap(); // 3. PolynomialCommitment::commit // The prover commits to the polynomial using their committer key `ck`. -let (comms, rands) = PCS::commit(&ck, [&labeled_poly], Some(rng)).unwrap(); - -let challenge_generator: ChallengeGenerator<::ScalarField, Sponge_Bls12_377> = ChallengeGenerator::new_univariate(&mut test_sponge); +let (comms, states) = PCS::commit(&ck, [&labeled_poly], Some(rng)).unwrap(); // 4a. PolynomialCommitment::open // Opening proof at a single point. -let proof_single = PCS::open(&ck, [&labeled_poly], &comms, &point_1, &mut (challenge_generator.clone()), &rands, None).unwrap(); +let proof_single = PCS::open(&ck, [&labeled_poly], &comms, &point_1, &mut (test_sponge.clone()), &states, None).unwrap(); // 5a. PolynomialCommitment::check // Verifying the proof at a single point, given the commitment, the point, the claimed evaluation, and the proof. -assert!(PCS::check(&vk, &comms, &point_1, [secret_poly.evaluate(&point_1)], &proof_single, &mut (challenge_generator.clone()), Some(rng)).unwrap()); +assert!(PCS::check(&vk, &comms, &point_1, [secret_poly.evaluate(&point_1)], &proof_single, &mut (test_sponge.clone()), Some(rng)).unwrap()); let mut query_set = QuerySet::new(); let mut values = Evaluations::new(); @@ -155,8 +153,8 @@ let proof_batched = PCS::batch_open( [&labeled_poly], &comms, &query_set, - &mut (challenge_generator.clone()), - &rands, + &mut (test_sponge.clone()), + &states, Some(rng), ).unwrap(); @@ -167,7 +165,7 @@ assert!(PCS::batch_check( &query_set, &values, &proof_batched, - &mut (challenge_generator.clone()), + &mut (test_sponge.clone()), rng, ).unwrap()); ``` diff --git a/bench-templates/src/lib.rs b/bench-templates/src/lib.rs index 9500eb9c..1bc1fdbf 100644 --- a/bench-templates/src/lib.rs +++ b/bench-templates/src/lib.rs @@ -17,9 +17,7 @@ use rand_chacha::{ use core::time::Duration; use std::{borrow::Borrow, marker::PhantomData, time::Instant}; -use ark_poly_commit::{ - challenge::ChallengeGenerator, to_bytes, LabeledPolynomial, PolynomialCommitment, -}; +use ark_poly_commit::{to_bytes, LabeledPolynomial, PolynomialCommitment}; pub use criterion::*; pub use paste::paste; @@ -131,7 +129,7 @@ where let labeled_poly = LabeledPolynomial::new("test".to_string(), rand_poly(num_vars, rng), None, None); - let (coms, randomness) = PCS::commit(&ck, [&labeled_poly], Some(rng)).unwrap(); + let (coms, states) = PCS::commit(&ck, [&labeled_poly], Some(rng)).unwrap(); let point = rand_point(num_vars, rng); let start = Instant::now(); @@ -140,8 +138,8 @@ where [&labeled_poly], &coms, &point, - &mut ChallengeGenerator::new_univariate(&mut test_sponge()), - &randomness, + &mut test_sponge(), + &states, Some(rng), ) .unwrap(); @@ -165,7 +163,7 @@ where let labeled_poly = LabeledPolynomial::new("test".to_string(), rand_poly(num_vars, rng), None, None); - let (coms, randomness) = PCS::commit(&ck, [&labeled_poly], Some(rng)).unwrap(); + let (coms, states) = PCS::commit(&ck, [&labeled_poly], Some(rng)).unwrap(); let point = P::Point::rand(rng); let proofs = PCS::open( @@ -173,8 +171,8 @@ where [&labeled_poly], &coms, &point, - &mut ChallengeGenerator::new_univariate(&mut test_sponge()), - &randomness, + &mut test_sponge(), + &states, Some(rng), ) .unwrap(); @@ -202,16 +200,17 @@ where let labeled_poly = LabeledPolynomial::new("test".to_string(), rand_poly(num_vars, rng), None, None); - let (coms, randomness) = PCS::commit(&ck, [&labeled_poly], Some(rng)).unwrap(); + let (coms, states) = PCS::commit(&ck, [&labeled_poly], Some(rng)).unwrap(); let point = rand_point(num_vars, rng); + let claimed_eval = labeled_poly.evaluate(&point); let proof = PCS::open( &ck, [&labeled_poly], &coms, &point, - &mut ChallengeGenerator::new_univariate(&mut test_sponge()), - &randomness, + &mut test_sponge(), + &states, Some(rng), ) .unwrap(); @@ -223,7 +222,7 @@ where &point, [claimed_eval], &proof, - &mut ChallengeGenerator::new_univariate(&mut test_sponge()), + &mut test_sponge(), None, ) .unwrap(); diff --git a/poly-commit/src/challenge.rs b/poly-commit/src/challenge.rs deleted file mode 100644 index 23b3c9d1..00000000 --- a/poly-commit/src/challenge.rs +++ /dev/null @@ -1,61 +0,0 @@ -use ark_crypto_primitives::sponge::{CryptographicSponge, FieldElementSize}; -use ark_ff::PrimeField; - -/// `ChallengeGenerator` generates opening challenges using multivariate or univariate strategy. -/// For multivariate strategy, each challenge is freshly squeezed from a sponge. -/// For univariate strategy, each challenge is a power of one squeezed element from sponge. -/// -/// Note that mutable reference cannot be cloned. -#[derive(Clone)] -pub enum ChallengeGenerator { - /// Each challenge is freshly squeezed from a sponge. - Multivariate(S), - /// Each challenge is a power of one squeezed element from sponge. - /// - /// `Univariate(generator, next_element)` - Univariate(F, F), -} - -impl ChallengeGenerator { - /// Returns a challenge generator with multivariate strategy. Each challenge is freshly squeezed - /// from a sponge. - pub fn new_multivariate(sponge: S) -> Self { - Self::Multivariate(sponge) - } - - /// Returns a challenge generator with univariate strategy. Each challenge is a power of one - /// squeezed element from sponge. - pub fn new_univariate(sponge: &mut S) -> Self { - let gen = sponge.squeeze_field_elements(1)[0]; - Self::Univariate(gen, gen) - } - - /// Returns a challenge of size `size`. - /// * If `self == Self::Multivariate(...)`, then this squeezes out a challenge of size `size`. - /// * If `self == Self::Univariate(...)`, then this ignores the `size` argument and simply squeezes out - /// the next field element. - pub fn try_next_challenge_of_size(&mut self, size: FieldElementSize) -> F { - match self { - // multivariate (full) - Self::Multivariate(sponge) => sponge.squeeze_field_elements_with_sizes(&[size])[0], - // univariate - Self::Univariate(gen, next) => { - let result = next.clone(); - *next *= *gen; - result - } - } - } - /// Returns the next challenge generated. - pub fn next_challenge(&mut self) -> F { - self.try_next_challenge_of_size(FieldElementSize::Full) - } - - /// Returns the sponge state if `self` is multivariate. Returns `None` otherwise. - pub fn into_sponge(self) -> Option { - match self { - Self::Multivariate(s) => Some(s), - _ => None, - } - } -} diff --git a/poly-commit/src/constraints.rs b/poly-commit/src/constraints.rs index e6fb5d4f..1300509a 100644 --- a/poly-commit/src/constraints.rs +++ b/poly-commit/src/constraints.rs @@ -5,7 +5,7 @@ use crate::{ use ark_crypto_primitives::sponge::CryptographicSponge; use ark_ff::PrimeField; use ark_poly::Polynomial; -use ark_r1cs_std::fields::nonnative::NonNativeFieldVar; +use ark_r1cs_std::fields::emulated_fp::EmulatedFpVar; use ark_r1cs_std::{fields::fp::FpVar, prelude::*}; use ark_relations::r1cs::{ConstraintSystemRef, Namespace, Result as R1CSResult, SynthesisError}; use ark_std::{borrow::Borrow, cmp::Eq, cmp::PartialEq, hash::Hash, marker::Sized}; @@ -24,8 +24,8 @@ pub enum LinearCombinationCoeffVar), + /// Other coefficient, represented as a "emulated" field element. + Var(EmulatedFpVar), } /// An allocated version of `LinearCombination`. @@ -60,7 +60,7 @@ impl let (f, lc_term) = term; let fg = - NonNativeFieldVar::new_variable(ark_relations::ns!(cs, "term"), || Ok(f), mode) + EmulatedFpVar::new_variable(ark_relations::ns!(cs, "term"), || Ok(f), mode) .unwrap(); (LinearCombinationCoeffVar::Var(fg), lc_term.clone()) @@ -79,12 +79,12 @@ impl pub struct PCCheckRandomDataVar { /// Opening challenges. /// The prover and the verifier MUST use the same opening challenges. - pub opening_challenges: Vec>, + pub opening_challenges: Vec>, /// Bit representations of the opening challenges. pub opening_challenges_bits: Vec>>, /// Batching random numbers. /// The verifier can choose these numbers freely, as long as they are random. - pub batching_rands: Vec>, + pub batching_rands: Vec>, /// Bit representations of the batching random numbers. pub batching_rands_bits: Vec>>, } @@ -172,7 +172,7 @@ pub struct LabeledPointVar { /// MUST be a unique identifier in a query set. pub name: String, /// The point value. - pub value: NonNativeFieldVar, + pub value: EmulatedFpVar, } /// An allocated version of `QuerySet`. @@ -184,7 +184,7 @@ pub struct QuerySetVar( /// An allocated version of `Evaluations`. #[derive(Clone)] pub struct EvaluationsVar( - pub HashMap, NonNativeFieldVar>, + pub HashMap, EmulatedFpVar>, ); impl EvaluationsVar { @@ -192,8 +192,8 @@ impl EvaluationsVar, - ) -> Result, SynthesisError> { + point: &EmulatedFpVar, + ) -> Result, SynthesisError> { let key = LabeledPointVar:: { name: String::from(lc_string), value: point.clone(), diff --git a/poly-commit/src/data_structures.rs b/poly-commit/src/data_structures.rs index 4a5eec21..2b942ee1 100644 --- a/poly-commit/src/data_structures.rs +++ b/poly-commit/src/data_structures.rs @@ -70,9 +70,12 @@ pub trait PCPreparedCommitment: Clone { fn prepare(comm: &UNPREPARED) -> Self; } -/// Defines the minimal interface of commitment randomness for any polynomial -/// commitment scheme. -pub trait PCRandomness: Clone + CanonicalSerialize + CanonicalDeserialize { +/// Defines the minimal interface of commitment state for any polynomial +/// commitment scheme. It might be randomness etc. +pub trait PCCommitmentState: Clone + CanonicalSerialize + CanonicalDeserialize { + /// This is the type of `Randomness` that the `rand` method returns + type Randomness: Clone + CanonicalSerialize + CanonicalDeserialize; + /// Outputs empty randomness that does not hide the commitment. fn empty() -> Self; @@ -86,9 +89,8 @@ pub trait PCRandomness: Clone + CanonicalSerialize + CanonicalDeserialize { has_degree_bound: bool, num_vars: Option, rng: &mut R, - ) -> Self; + ) -> Self::Randomness; } - /// A proof of satisfaction of linear combinations. #[derive(Clone, CanonicalSerialize, CanonicalDeserialize)] pub struct BatchLCProof { diff --git a/poly-commit/src/ipa_pc/data_structures.rs b/poly-commit/src/ipa_pc/data_structures.rs index 7ba56c95..84fcb7f2 100644 --- a/poly-commit/src/ipa_pc/data_structures.rs +++ b/poly-commit/src/ipa_pc/data_structures.rs @@ -146,7 +146,8 @@ pub struct Randomness { pub shifted_rand: Option, } -impl PCRandomness for Randomness { +impl PCCommitmentState for Randomness { + type Randomness = Self; fn empty() -> Self { Self { rand: G::ScalarField::zero(), diff --git a/poly-commit/src/ipa_pc/mod.rs b/poly-commit/src/ipa_pc/mod.rs index 25752d78..43a40852 100644 --- a/poly-commit/src/ipa_pc/mod.rs +++ b/poly-commit/src/ipa_pc/mod.rs @@ -1,7 +1,7 @@ use crate::{BTreeMap, BTreeSet, String, ToString, Vec, CHALLENGE_SIZE}; use crate::{BatchLCProof, DenseUVPolynomial, Error, Evaluations, QuerySet}; use crate::{LabeledCommitment, LabeledPolynomial, LinearCombination}; -use crate::{PCCommitterKey, PCRandomness, PCUniversalParams, PolynomialCommitment}; +use crate::{PCCommitmentState, PCCommitterKey, PCUniversalParams, PolynomialCommitment}; use ark_ec::{AffineRepr, CurveGroup, VariableBaseMSM}; use ark_ff::{Field, One, PrimeField, UniformRand, Zero}; @@ -15,7 +15,6 @@ pub use data_structures::*; #[cfg(feature = "parallel")] use rayon::prelude::*; -use crate::challenge::ChallengeGenerator; use ark_crypto_primitives::sponge::CryptographicSponge; use digest::Digest; @@ -105,7 +104,7 @@ where point: G::ScalarField, values: impl IntoIterator, proof: &Proof, - opening_challenges: &mut ChallengeGenerator, + sponge: &mut S, ) -> Option> { let check_time = start_timer!(|| "Succinct checking"); @@ -117,7 +116,8 @@ where let mut combined_commitment_proj = G::Group::zero(); let mut combined_v = G::ScalarField::zero(); - let mut cur_challenge = opening_challenges.try_next_challenge_of_size(CHALLENGE_SIZE); + let mut cur_challenge: G::ScalarField = + sponge.squeeze_field_elements_with_sizes(&[CHALLENGE_SIZE])[0]; let labeled_commitments = commitments.into_iter(); let values = values.into_iter(); @@ -126,7 +126,7 @@ where let commitment = labeled_commitment.commitment(); combined_v += &(cur_challenge * &value); combined_commitment_proj += &labeled_commitment.commitment().comm.mul(cur_challenge); - cur_challenge = opening_challenges.try_next_challenge_of_size(CHALLENGE_SIZE); + cur_challenge = sponge.squeeze_field_elements_with_sizes(&[CHALLENGE_SIZE])[0]; let degree_bound = labeled_commitment.degree_bound(); assert_eq!(degree_bound.is_some(), commitment.shifted_comm.is_some()); @@ -137,7 +137,7 @@ where combined_commitment_proj += &commitment.shifted_comm.unwrap().mul(cur_challenge); } - cur_challenge = opening_challenges.try_next_challenge_of_size(CHALLENGE_SIZE); + cur_challenge = sponge.squeeze_field_elements_with_sizes(&[CHALLENGE_SIZE])[0]; } let mut combined_commitment = combined_commitment_proj.into_affine(); @@ -347,7 +347,7 @@ where type CommitterKey = CommitterKey; type VerifierKey = VerifierKey; type Commitment = Commitment; - type Randomness = Randomness; + type CommitmentState = Randomness; type Proof = Proof; type BatchProof = Vec; type Error = Error; @@ -418,7 +418,7 @@ where ) -> Result< ( Vec>, - Vec, + Vec, ), Self::Error, > @@ -427,7 +427,7 @@ where { let rng = &mut crate::optional_rng::OptionalRng(rng); let mut comms = Vec::new(); - let mut rands = Vec::new(); + let mut states = Vec::new(); let commit_time = start_timer!(|| "Committing to polynomials"); for labeled_polynomial in polynomials { @@ -446,7 +446,7 @@ where hiding_bound, )); - let randomness = if let Some(h) = hiding_bound { + let state = if let Some(h) = hiding_bound { Randomness::rand(h, degree_bound.is_some(), None, rng) } else { Randomness::empty() @@ -456,7 +456,7 @@ where &ck.comm_key[..(polynomial.degree() + 1)], &polynomial.coeffs(), Some(ck.s), - Some(randomness.rand), + Some(state.rand), ) .into(); @@ -465,7 +465,7 @@ where &ck.comm_key[(ck.supported_degree() - d)..], &polynomial.coeffs(), Some(ck.s), - randomness.shifted_rand, + state.shifted_rand, ) .into() }); @@ -474,13 +474,13 @@ where let labeled_comm = LabeledCommitment::new(label.to_string(), commitment, degree_bound); comms.push(labeled_comm); - rands.push(randomness); + states.push(state); end_timer!(commit_time); } end_timer!(commit_time); - Ok((comms, rands)) + Ok((comms, states)) } fn open<'a>( @@ -488,13 +488,13 @@ where labeled_polynomials: impl IntoIterator>, commitments: impl IntoIterator>, point: &'a P::Point, - opening_challenges: &mut ChallengeGenerator, - rands: impl IntoIterator, + sponge: &mut S, + states: impl IntoIterator, rng: Option<&mut dyn RngCore>, ) -> Result where Self::Commitment: 'a, - Self::Randomness: 'a, + Self::CommitmentState: 'a, P: 'a, { let mut combined_polynomial = P::zero(); @@ -504,15 +504,15 @@ where let mut has_hiding = false; let polys_iter = labeled_polynomials.into_iter(); - let rands_iter = rands.into_iter(); + let states_iter = states.into_iter(); let comms_iter = commitments.into_iter(); let combine_time = start_timer!(|| "Combining polynomials, randomness, and commitments."); - let mut cur_challenge = opening_challenges.try_next_challenge_of_size(CHALLENGE_SIZE); + let mut cur_challenge = sponge.squeeze_field_elements_with_sizes(&[CHALLENGE_SIZE])[0]; - for (labeled_polynomial, (labeled_commitment, randomness)) in - polys_iter.zip(comms_iter.zip(rands_iter)) + for (labeled_polynomial, (labeled_commitment, state)) in + polys_iter.zip(comms_iter.zip(states_iter)) { let label = labeled_polynomial.label(); assert_eq!(labeled_polynomial.label(), labeled_commitment.label()); @@ -528,10 +528,10 @@ where if hiding_bound.is_some() { has_hiding = true; - combined_rand += &(cur_challenge * &randomness.rand); + combined_rand += &(cur_challenge * &state.rand); } - cur_challenge = opening_challenges.try_next_challenge_of_size(CHALLENGE_SIZE); + cur_challenge = sponge.squeeze_field_elements_with_sizes(&[CHALLENGE_SIZE])[0]; let has_degree_bound = degree_bound.is_some(); @@ -554,7 +554,7 @@ where combined_commitment_proj += &commitment.shifted_comm.unwrap().mul(cur_challenge); if hiding_bound.is_some() { - let shifted_rand = randomness.shifted_rand; + let shifted_rand = state.shifted_rand; assert!( shifted_rand.is_some(), "shifted_rand.is_none() for {}", @@ -564,7 +564,7 @@ where } } - cur_challenge = opening_challenges.try_next_challenge_of_size(CHALLENGE_SIZE); + cur_challenge = sponge.squeeze_field_elements_with_sizes(&[CHALLENGE_SIZE])[0]; } end_timer!(combine_time); @@ -739,7 +739,7 @@ where point: &'a P::Point, values: impl IntoIterator, proof: &Self::Proof, - opening_challenges: &mut ChallengeGenerator, + sponge: &mut S, _rng: Option<&mut dyn RngCore>, ) -> Result where @@ -762,8 +762,7 @@ where )); } - let check_poly = - Self::succinct_check(vk, commitments, *point, values, proof, opening_challenges); + let check_poly = Self::succinct_check(vk, commitments, *point, values, proof, sponge); if check_poly.is_none() { return Ok(false); @@ -790,7 +789,7 @@ where query_set: &QuerySet, values: &Evaluations, proof: &Self::BatchProof, - opening_challenges: &mut ChallengeGenerator, + sponge: &mut S, rng: &mut R, ) -> Result where @@ -833,14 +832,8 @@ where vals.push(*v_i); } - let check_poly = Self::succinct_check( - vk, - comms.into_iter(), - *point, - vals.into_iter(), - p, - opening_challenges, - ); + let check_poly = + Self::succinct_check(vk, comms.into_iter(), *point, vals.into_iter(), p, sponge); if check_poly.is_none() { return Ok(false); @@ -876,24 +869,24 @@ where polynomials: impl IntoIterator>, commitments: impl IntoIterator>, query_set: &QuerySet, - opening_challenges: &mut ChallengeGenerator, - rands: impl IntoIterator, + sponge: &mut S, + states: impl IntoIterator, rng: Option<&mut dyn RngCore>, ) -> Result, Self::Error> where - Self::Randomness: 'a, + Self::CommitmentState: 'a, Self::Commitment: 'a, P: 'a, { let label_poly_map = polynomials .into_iter() - .zip(rands) + .zip(states) .zip(commitments) - .map(|((p, r), c)| (p.label(), (p, r, c))) + .map(|((p, s), c)| (p.label(), (p, s, c))) .collect::>(); let mut lc_polynomials = Vec::new(); - let mut lc_randomness = Vec::new(); + let mut lc_states = Vec::new(); let mut lc_commitments = Vec::new(); let mut lc_info = Vec::new(); @@ -951,7 +944,7 @@ where let lc_poly = LabeledPolynomial::new(lc_label.clone(), poly, degree_bound, hiding_bound); lc_polynomials.push(lc_poly); - lc_randomness.push(Randomness { + lc_states.push(Randomness { rand: combined_rand, shifted_rand: combined_shifted_rand, }); @@ -971,8 +964,8 @@ where lc_polynomials.iter(), lc_commitments.iter(), &query_set, - opening_challenges, - lc_randomness.iter(), + sponge, + lc_states.iter(), rng, )?; Ok(BatchLCProof { proof, evals: None }) @@ -987,7 +980,7 @@ where eqn_query_set: &QuerySet, eqn_evaluations: &Evaluations, proof: &BatchLCProof, - opening_challenges: &mut ChallengeGenerator, + sponge: &mut S, rng: &mut R, ) -> Result where @@ -1060,7 +1053,7 @@ where &eqn_query_set, &evaluations, proof, - opening_challenges, + sponge, rng, ) } diff --git a/poly-commit/src/kzg10/data_structures.rs b/poly-commit/src/kzg10/data_structures.rs index 60626e70..d648f19f 100644 --- a/poly-commit/src/kzg10/data_structures.rs +++ b/poly-commit/src/kzg10/data_structures.rs @@ -420,7 +420,8 @@ impl> Randomness { } } -impl> PCRandomness for Randomness { +impl> PCCommitmentState for Randomness { + type Randomness = Self; fn empty() -> Self { Self { blinding_polynomial: P::zero(), diff --git a/poly-commit/src/kzg10/mod.rs b/poly-commit/src/kzg10/mod.rs index a6ea5752..508db2cb 100644 --- a/poly-commit/src/kzg10/mod.rs +++ b/poly-commit/src/kzg10/mod.rs @@ -5,10 +5,10 @@ //! proposed by Kate, Zaverucha, and Goldberg ([KZG10](http://cacr.uwaterloo.ca/techreports/2010/cacr2010-10.pdf)). //! This construction achieves extractability in the algebraic group model (AGM). -use crate::{BTreeMap, Error, LabeledPolynomial, PCRandomness, ToString, Vec}; +use crate::{BTreeMap, Error, LabeledPolynomial, PCCommitmentState, ToString, Vec}; use ark_ec::AffineRepr; use ark_ec::{pairing::Pairing, CurveGroup}; -use ark_ec::{scalar_mul::fixed_base::FixedBase, VariableBaseMSM}; +use ark_ec::{scalar_mul::ScalarMul, VariableBaseMSM}; use ark_ff::{One, PrimeField, UniformRand, Zero}; use ark_poly::DenseUVPolynomial; use ark_std::{format, marker::PhantomData, ops::Div, ops::Mul, vec}; @@ -66,36 +66,27 @@ where let gamma_g = E::G1::rand(rng); let h = E::G2::rand(rng); + // powers_of_beta = [1, b, ..., b^(max_degree + 1)], len = max_degree + 2 let mut powers_of_beta = vec![E::ScalarField::one()]; - let mut cur = beta; - for _ in 0..max_degree { + for _ in 0..=max_degree { powers_of_beta.push(cur); cur *= β } - let window_size = FixedBase::get_mul_window_size(max_degree + 1); - - let scalar_bits = E::ScalarField::MODULUS_BIT_SIZE as usize; let g_time = start_timer!(|| "Generating powers of G"); - let g_table = FixedBase::get_window_table(scalar_bits, window_size, g); - let powers_of_g = - FixedBase::msm::(scalar_bits, window_size, &g_table, &powers_of_beta); + let powers_of_g = g.batch_mul(&powers_of_beta[0..max_degree + 1]); end_timer!(g_time); - let gamma_g_time = start_timer!(|| "Generating powers of gamma * G"); - let gamma_g_table = FixedBase::get_window_table(scalar_bits, window_size, gamma_g); - let mut powers_of_gamma_g = - FixedBase::msm::(scalar_bits, window_size, &gamma_g_table, &powers_of_beta); - // Add an additional power of gamma_g, because we want to be able to support - // up to D queries. - powers_of_gamma_g.push(powers_of_gamma_g.last().unwrap().mul(&beta)); - end_timer!(gamma_g_time); - let powers_of_g = E::G1::normalize_batch(&powers_of_g); - let powers_of_gamma_g = E::G1::normalize_batch(&powers_of_gamma_g) + // Use the entire `powers_of_beta`, since we want to be able to support + // up to D queries. + let gamma_g_time = start_timer!(|| "Generating powers of gamma * G"); + let powers_of_gamma_g = gamma_g + .batch_mul(&powers_of_beta) .into_iter() .enumerate() .collect(); + end_timer!(gamma_g_time); let neg_powers_of_h_time = start_timer!(|| "Generating negative powers of h in G2"); let neg_powers_of_h = if produce_g2_powers { @@ -106,20 +97,10 @@ where cur /= β } - let neg_h_table = FixedBase::get_window_table(scalar_bits, window_size, h); - let neg_powers_of_h = FixedBase::msm::( - scalar_bits, - window_size, - &neg_h_table, - &neg_powers_of_beta, - ); - - let affines = E::G2::normalize_batch(&neg_powers_of_h); - let mut affines_map = BTreeMap::new(); - affines.into_iter().enumerate().for_each(|(i, a)| { - affines_map.insert(i, a); - }); - affines_map + h.batch_mul(&neg_powers_of_beta) + .into_iter() + .enumerate() + .collect() } else { BTreeMap::new() }; diff --git a/poly-commit/src/lib.rs b/poly-commit/src/lib.rs index c8805bac..bcd625a7 100644 --- a/poly-commit/src/lib.rs +++ b/poly-commit/src/lib.rs @@ -9,7 +9,7 @@ #![deny(renamed_and_removed_lints, stable_features, unused_allocation)] #![deny(unused_comparisons, bare_trait_objects, unused_must_use)] #![forbid(unsafe_code)] -#![doc = include_str!("../README.md")] +#![doc = include_str!("../../README.md")] #[allow(unused)] #[macro_use] @@ -101,8 +101,6 @@ pub mod sonic_pc; /// [pcdas]: https://eprint.iacr.org/2020/499 pub mod ipa_pc; -/// Defines the challenge strategies and challenge generator. -pub mod challenge; /// A multilinear polynomial commitment scheme that converts n-variate multilinear polynomial into /// n quotient UV polynomial. This scheme is based on hardness of the discrete logarithm /// in prime-order groups. Construction is detailed in [[XZZPD19]][xzzpd19] and [[ZGKPP18]][zgkpp18] @@ -111,7 +109,6 @@ pub mod challenge; /// [zgkpp]: https://ieeexplore.ieee.org/document/8418645 pub mod multilinear_pc; -use crate::challenge::ChallengeGenerator; use ark_crypto_primitives::sponge::{CryptographicSponge, FieldElementSize}; /// Multivariate polynomial commitment based on the construction in /// [[PST13]][pst] with batching and (optional) hiding property inspired @@ -164,8 +161,11 @@ pub trait PolynomialCommitment, S: Cryptographic type VerifierKey: PCVerifierKey; /// The commitment to a polynomial. type Commitment: PCCommitment + Default; - /// The commitment randomness. - type Randomness: PCRandomness; + /// Auxiliary state of the commitment, output by the `commit` phase. + /// It contains information that can be reused by the committer + /// during the `open` phase, such as the commitment randomness. + /// Not to be shared with the verifier. + type CommitmentState: PCCommitmentState; /// The evaluation proof for a single point. type Proof: Clone; /// The evaluation proof for a query set. @@ -211,7 +211,7 @@ pub trait PolynomialCommitment, S: Cryptographic ) -> Result< ( Vec>, - Vec, + Vec, ), Self::Error, > @@ -224,13 +224,13 @@ pub trait PolynomialCommitment, S: Cryptographic labeled_polynomials: impl IntoIterator>, commitments: impl IntoIterator>, point: &'a P::Point, - challenge_generator: &mut ChallengeGenerator, - rands: impl IntoIterator, + sponge: &mut S, + states: impl IntoIterator, rng: Option<&mut dyn RngCore>, ) -> Result where P: 'a, - Self::Randomness: 'a, + Self::CommitmentState: 'a, Self::Commitment: 'a; /// check but with individual challenges @@ -240,7 +240,7 @@ pub trait PolynomialCommitment, S: Cryptographic point: &'a P::Point, values: impl IntoIterator, proof: &Self::Proof, - challenge_generator: &mut ChallengeGenerator, + sponge: &mut S, rng: Option<&mut dyn RngCore>, ) -> Result where @@ -260,13 +260,13 @@ pub trait PolynomialCommitment, S: Cryptographic labeled_polynomials: impl IntoIterator>, commitments: impl IntoIterator>, query_set: &QuerySet, - challenge_generator: &mut ChallengeGenerator, - rands: impl IntoIterator, + sponge: &mut S, + states: impl IntoIterator, rng: Option<&mut dyn RngCore>, ) -> Result where P: 'a, - Self::Randomness: 'a, + Self::CommitmentState: 'a, Self::Commitment: 'a, { // The default implementation achieves proceeds by rearranging the queries in @@ -274,16 +274,16 @@ pub trait PolynomialCommitment, S: Cryptographic // the same point, then opening their commitments simultaneously with a // single call to `open` (per point) let rng = &mut crate::optional_rng::OptionalRng(rng); - let poly_rand_comm: BTreeMap<_, _> = labeled_polynomials + let poly_st_comm: BTreeMap<_, _> = labeled_polynomials .into_iter() - .zip(rands) + .zip(states) .zip(commitments.into_iter()) - .map(|((poly, r), comm)| (poly.label(), (poly, r, comm))) + .map(|((poly, st), comm)| (poly.label(), (poly, st, comm))) .collect(); let open_time = start_timer!(|| format!( "Opening {} polynomials at query set of size {}", - poly_rand_comm.len(), + poly_st_comm.len(), query_set.len(), )); @@ -306,20 +306,20 @@ pub trait PolynomialCommitment, S: Cryptographic let mut proofs = Vec::new(); for (_point_label, (point, labels)) in query_to_labels_map.into_iter() { let mut query_polys: Vec<&'a LabeledPolynomial<_, _>> = Vec::new(); - let mut query_rands: Vec<&'a Self::Randomness> = Vec::new(); + let mut query_states: Vec<&'a Self::CommitmentState> = Vec::new(); let mut query_comms: Vec<&'a LabeledCommitment> = Vec::new(); // Constructing matching vectors with the polynomial, commitment // randomness and actual commitment for each polynomial being // queried at `point` for label in labels { - let (polynomial, rand, comm) = - poly_rand_comm.get(label).ok_or(Error::MissingPolynomial { + let (polynomial, state, comm) = + poly_st_comm.get(label).ok_or(Error::MissingPolynomial { label: label.to_string(), })?; query_polys.push(polynomial); - query_rands.push(rand); + query_states.push(state); query_comms.push(comm); } @@ -332,8 +332,8 @@ pub trait PolynomialCommitment, S: Cryptographic query_polys, query_comms, &point, - challenge_generator, - query_rands, + sponge, + query_states, Some(rng), )?; @@ -365,7 +365,7 @@ pub trait PolynomialCommitment, S: Cryptographic query_set: &QuerySet, evaluations: &Evaluations, proof: &Self::BatchProof, - challenge_generator: &mut ChallengeGenerator, + sponge: &mut S, rng: &mut R, ) -> Result where @@ -423,15 +423,7 @@ pub trait PolynomialCommitment, S: Cryptographic // Verify all proofs referring to the current point simultaneously // with a single call to `check` - result &= Self::check( - vk, - comms, - &point, - values, - &proof, - challenge_generator, - Some(rng), - )?; + result &= Self::check(vk, comms, &point, values, &proof, sponge, Some(rng))?; end_timer!(proof_time); } Ok(result) @@ -445,12 +437,12 @@ pub trait PolynomialCommitment, S: Cryptographic polynomials: impl IntoIterator>, commitments: impl IntoIterator>, query_set: &QuerySet, - challenge_generator: &mut ChallengeGenerator, - rands: impl IntoIterator, + sponge: &mut S, + states: impl IntoIterator, rng: Option<&mut dyn RngCore>, ) -> Result, Self::Error> where - Self::Randomness: 'a, + Self::CommitmentState: 'a, Self::Commitment: 'a, P: 'a, { @@ -471,8 +463,8 @@ pub trait PolynomialCommitment, S: Cryptographic polynomials, commitments, &poly_query_set, - challenge_generator, - rands, + sponge, + states, rng, )?; Ok(BatchLCProof { @@ -490,7 +482,7 @@ pub trait PolynomialCommitment, S: Cryptographic eqn_query_set: &QuerySet, eqn_evaluations: &Evaluations, proof: &BatchLCProof, - challenge_generator: &mut ChallengeGenerator, + sponge: &mut S, rng: &mut R, ) -> Result where @@ -561,7 +553,7 @@ pub trait PolynomialCommitment, S: Cryptographic &poly_query_set, &poly_evals, proof, - challenge_generator, + sponge, rng, )?; if !pc_result { @@ -673,88 +665,83 @@ pub mod tests { PC: PolynomialCommitment, S: CryptographicSponge, { - let challenge_generators = vec![ - ChallengeGenerator::new_multivariate(sponge()), - ChallengeGenerator::new_univariate(&mut sponge()), - ]; - - for challenge_gen in challenge_generators { - let rng = &mut ChaCha20Rng::from_rng(test_rng()).unwrap(); - let max_degree = 100; - let pp = PC::setup(max_degree, None, rng)?; - for _ in 0..10 { - let supported_degree = Uniform::from(1..=max_degree).sample(rng); - assert!( - max_degree >= supported_degree, - "max_degree < supported_degree" - ); - - let mut labels = Vec::new(); - let mut polynomials = Vec::new(); - let mut degree_bounds = Vec::new(); - - for i in 0..10 { - let label = format!("Test{}", i); - labels.push(label.clone()); - let degree_bound = 1usize; - let hiding_bound = Some(1); - degree_bounds.push(degree_bound); - - polynomials.push(LabeledPolynomial::new( - label, - rand_poly(supported_degree, None, rng), - Some(degree_bound), - hiding_bound, - )); - } + let sponge = sponge(); + + let rng = &mut ChaCha20Rng::from_rng(test_rng()).unwrap(); + let max_degree = 100; + let pp = PC::setup(max_degree, None, rng)?; + for _ in 0..10 { + let supported_degree = Uniform::from(1..=max_degree).sample(rng); + assert!( + max_degree >= supported_degree, + "max_degree < supported_degree" + ); + + let mut labels = Vec::new(); + let mut polynomials = Vec::new(); + let mut degree_bounds = Vec::new(); + + for i in 0..10 { + let label = format!("Test{}", i); + labels.push(label.clone()); + let degree_bound = 1usize; + let hiding_bound = Some(1); + degree_bounds.push(degree_bound); + + polynomials.push(LabeledPolynomial::new( + label, + rand_poly(supported_degree, None, rng), + Some(degree_bound), + hiding_bound, + )); + } - let supported_hiding_bound = polynomials - .iter() - .map(|p| p.hiding_bound().unwrap_or(0)) - .max() - .unwrap_or(0); - println!("supported degree: {:?}", supported_degree); - println!("supported hiding bound: {:?}", supported_hiding_bound); - let (ck, vk) = PC::trim( - &pp, - supported_degree, - supported_hiding_bound, - Some(degree_bounds.as_slice()), - )?; - println!("Trimmed"); + let supported_hiding_bound = polynomials + .iter() + .map(|p| p.hiding_bound().unwrap_or(0)) + .max() + .unwrap_or(0); + println!("supported degree: {:?}", supported_degree); + println!("supported hiding bound: {:?}", supported_hiding_bound); + let (ck, vk) = PC::trim( + &pp, + supported_degree, + supported_hiding_bound, + Some(degree_bounds.as_slice()), + )?; + println!("Trimmed"); - let (comms, rands) = PC::commit(&ck, &polynomials, Some(rng))?; + let (comms, rands) = PC::commit(&ck, &polynomials, Some(rng))?; - let mut query_set = QuerySet::new(); - let mut values = Evaluations::new(); - let point = rand_point(None, rng); - for (i, label) in labels.iter().enumerate() { - query_set.insert((label.clone(), (format!("{}", i), point.clone()))); - let value = polynomials[i].evaluate(&point); - values.insert((label.clone(), point.clone()), value); - } - println!("Generated query set"); - - let proof = PC::batch_open( - &ck, - &polynomials, - &comms, - &query_set, - &mut (challenge_gen.clone()), - &rands, - Some(rng), - )?; - let result = PC::batch_check( - &vk, - &comms, - &query_set, - &values, - &proof, - &mut (challenge_gen.clone()), - rng, - )?; - assert!(result, "proof was incorrect, Query set: {:#?}", query_set); + let mut query_set = QuerySet::new(); + let mut values = Evaluations::new(); + let point = rand_point(None, rng); + for (i, label) in labels.iter().enumerate() { + query_set.insert((label.clone(), (format!("{}", i), point.clone()))); + let value = polynomials[i].evaluate(&point); + values.insert((label.clone(), point.clone()), value); } + println!("Generated query set"); + + let proof = PC::batch_open( + &ck, + &polynomials, + &comms, + &query_set, + &mut (sponge.clone()), + &rands, + Some(rng), + )?; + let result = PC::batch_check( + &vk, + &comms, + &query_set, + &values, + &proof, + &mut (sponge.clone()), + rng, + )?; + assert!(result, "proof was incorrect, Query set: {:#?}", query_set); } Ok(()) @@ -781,127 +768,123 @@ pub mod tests { sponge, } = info; - let challenge_gens = vec![ - ChallengeGenerator::new_multivariate(sponge()), - ChallengeGenerator::new_univariate(&mut sponge()), - ]; + let sponge = sponge(); - for challenge_gen in challenge_gens { - let rng = &mut ChaCha20Rng::from_rng(test_rng()).unwrap(); - // If testing multivariate polynomials, make the max degree lower - let max_degree = match num_vars { - Some(_) => max_degree.unwrap_or(Uniform::from(2..=10).sample(rng)), - None => max_degree.unwrap_or(Uniform::from(2..=64).sample(rng)), + let rng = &mut ChaCha20Rng::from_rng(test_rng()).unwrap(); + // If testing multivariate polynomials, make the max degree lower + let max_degree = match num_vars { + Some(_) => max_degree.unwrap_or(Uniform::from(2..=10).sample(rng)), + None => max_degree.unwrap_or(Uniform::from(2..=64).sample(rng)), + }; + let pp = PC::setup(max_degree, num_vars, rng)?; + + for _ in 0..num_iters { + let supported_degree = + supported_degree.unwrap_or(Uniform::from(1..=max_degree).sample(rng)); + assert!( + max_degree >= supported_degree, + "max_degree < supported_degree" + ); + let mut polynomials: Vec> = Vec::new(); + let mut degree_bounds = if enforce_degree_bounds { + Some(Vec::new()) + } else { + None }; - let pp = PC::setup(max_degree, num_vars, rng)?; - - for _ in 0..num_iters { - let supported_degree = - supported_degree.unwrap_or(Uniform::from(1..=max_degree).sample(rng)); - assert!( - max_degree >= supported_degree, - "max_degree < supported_degree" - ); - let mut polynomials: Vec> = Vec::new(); - let mut degree_bounds = if enforce_degree_bounds { - Some(Vec::new()) + + let mut labels = Vec::new(); + println!("Sampled supported degree"); + + // Generate polynomials + let num_points_in_query_set = Uniform::from(1..=max_num_queries).sample(rng); + for i in 0..num_polynomials { + let label = format!("Test{}", i); + labels.push(label.clone()); + let degree = Uniform::from(1..=supported_degree).sample(rng); + let degree_bound = if let Some(degree_bounds) = &mut degree_bounds { + let range = Uniform::from(degree..=supported_degree); + let degree_bound = range.sample(rng); + degree_bounds.push(degree_bound); + Some(degree_bound) } else { None }; - let mut labels = Vec::new(); - println!("Sampled supported degree"); - - // Generate polynomials - let num_points_in_query_set = Uniform::from(1..=max_num_queries).sample(rng); - for i in 0..num_polynomials { - let label = format!("Test{}", i); - labels.push(label.clone()); - let degree = Uniform::from(1..=supported_degree).sample(rng); - let degree_bound = if let Some(degree_bounds) = &mut degree_bounds { - let range = Uniform::from(degree..=supported_degree); - let degree_bound = range.sample(rng); - degree_bounds.push(degree_bound); - Some(degree_bound) - } else { - None - }; - - let hiding_bound = if num_points_in_query_set >= degree { - Some(degree) - } else { - Some(num_points_in_query_set) - }; + let hiding_bound = if num_points_in_query_set >= degree { + Some(degree) + } else { + Some(num_points_in_query_set) + }; - polynomials.push(LabeledPolynomial::new( - label, - rand_poly(degree, num_vars, rng).into(), - degree_bound, - hiding_bound, - )) - } - let supported_hiding_bound = polynomials - .iter() - .map(|p| p.hiding_bound().unwrap_or(0)) - .max() - .unwrap_or(0); - println!("supported degree: {:?}", supported_degree); - println!("supported hiding bound: {:?}", supported_hiding_bound); - println!("num_points_in_query_set: {:?}", num_points_in_query_set); - let (ck, vk) = PC::trim( - &pp, - supported_degree, - supported_hiding_bound, - degree_bounds.as_ref().map(|s| s.as_slice()), - )?; - println!("Trimmed"); + polynomials.push(LabeledPolynomial::new( + label, + rand_poly(degree, num_vars, rng).into(), + degree_bound, + hiding_bound, + )) + } + let supported_hiding_bound = polynomials + .iter() + .map(|p| p.hiding_bound().unwrap_or(0)) + .max() + .unwrap_or(0); + println!("supported degree: {:?}", supported_degree); + println!("supported hiding bound: {:?}", supported_hiding_bound); + println!("num_points_in_query_set: {:?}", num_points_in_query_set); + let (ck, vk) = PC::trim( + &pp, + supported_degree, + supported_hiding_bound, + degree_bounds.as_ref().map(|s| s.as_slice()), + )?; + println!("Trimmed"); - let (comms, rands) = PC::commit(&ck, &polynomials, Some(rng))?; + let (comms, rands) = PC::commit(&ck, &polynomials, Some(rng))?; - // Construct query set - let mut query_set = QuerySet::new(); - let mut values = Evaluations::new(); - for _ in 0..num_points_in_query_set { - let point = rand_point(num_vars, rng); - for (i, label) in labels.iter().enumerate() { - query_set.insert((label.clone(), (format!("{}", i), point.clone()))); - let value = polynomials[i].evaluate(&point); - values.insert((label.clone(), point.clone()), value); - } + // Construct query set + let mut query_set = QuerySet::new(); + let mut values = Evaluations::new(); + for _ in 0..num_points_in_query_set { + let point = rand_point(num_vars, rng); + for (i, label) in labels.iter().enumerate() { + query_set.insert((label.clone(), (format!("{}", i), point.clone()))); + let value = polynomials[i].evaluate(&point); + values.insert((label.clone(), point.clone()), value); } - println!("Generated query set"); - - let proof = PC::batch_open( - &ck, - &polynomials, - &comms, - &query_set, - &mut (challenge_gen.clone()), - &rands, - Some(rng), - )?; - let result = PC::batch_check( - &vk, - &comms, - &query_set, - &values, - &proof, - &mut (challenge_gen.clone()), - rng, - )?; - if !result { - println!( - "Failed with {} polynomials, num_points_in_query_set: {:?}", - num_polynomials, num_points_in_query_set - ); - println!("Degree of polynomials:",); - for poly in polynomials { - println!("Degree: {:?}", poly.degree()); - } + } + println!("Generated query set"); + + let proof = PC::batch_open( + &ck, + &polynomials, + &comms, + &query_set, + &mut (sponge.clone()), + &rands, + Some(rng), + )?; + let result = PC::batch_check( + &vk, + &comms, + &query_set, + &values, + &proof, + &mut (sponge.clone()), + rng, + )?; + if !result { + println!( + "Failed with {} polynomials, num_points_in_query_set: {:?}", + num_polynomials, num_points_in_query_set + ); + println!("Degree of polynomials:",); + for poly in polynomials { + println!("Degree: {:?}", poly.degree()); } - assert!(result, "proof was incorrect, Query set: {:#?}", query_set); } + assert!(result, "proof was incorrect, Query set: {:#?}", query_set); } + Ok(()) } @@ -926,167 +909,163 @@ pub mod tests { sponge, } = info; - let challenge_gens = vec![ - ChallengeGenerator::new_multivariate(sponge()), - ChallengeGenerator::new_univariate(&mut sponge()), - ]; + let sponge = sponge(); - for challenge_gen in challenge_gens { - let rng = &mut ChaCha20Rng::from_rng(test_rng()).unwrap(); - // If testing multivariate polynomials, make the max degree lower - let max_degree = match num_vars { - Some(_) => max_degree.unwrap_or(Uniform::from(2..=10).sample(rng)), - None => max_degree.unwrap_or(Uniform::from(2..=64).sample(rng)), + let rng = &mut ChaCha20Rng::from_rng(test_rng()).unwrap(); + // If testing multivariate polynomials, make the max degree lower + let max_degree = match num_vars { + Some(_) => max_degree.unwrap_or(Uniform::from(2..=10).sample(rng)), + None => max_degree.unwrap_or(Uniform::from(2..=64).sample(rng)), + }; + let pp = PC::setup(max_degree, num_vars, rng)?; + + for _ in 0..num_iters { + let supported_degree = + supported_degree.unwrap_or(Uniform::from(1..=max_degree).sample(rng)); + assert!( + max_degree >= supported_degree, + "max_degree < supported_degree" + ); + let mut polynomials = Vec::new(); + let mut degree_bounds = if enforce_degree_bounds { + Some(Vec::new()) + } else { + None }; - let pp = PC::setup(max_degree, num_vars, rng)?; - - for _ in 0..num_iters { - let supported_degree = - supported_degree.unwrap_or(Uniform::from(1..=max_degree).sample(rng)); - assert!( - max_degree >= supported_degree, - "max_degree < supported_degree" - ); - let mut polynomials = Vec::new(); - let mut degree_bounds = if enforce_degree_bounds { - Some(Vec::new()) + + let mut labels = Vec::new(); + println!("Sampled supported degree"); + + // Generate polynomials + let num_points_in_query_set = Uniform::from(1..=max_num_queries).sample(rng); + for i in 0..num_polynomials { + let label = format!("Test{}", i); + labels.push(label.clone()); + let degree = Uniform::from(1..=supported_degree).sample(rng); + let degree_bound = if let Some(degree_bounds) = &mut degree_bounds { + if rng.gen() { + let range = Uniform::from(degree..=supported_degree); + let degree_bound = range.sample(rng); + degree_bounds.push(degree_bound); + Some(degree_bound) + } else { + None + } } else { None }; - let mut labels = Vec::new(); - println!("Sampled supported degree"); - - // Generate polynomials - let num_points_in_query_set = Uniform::from(1..=max_num_queries).sample(rng); - for i in 0..num_polynomials { - let label = format!("Test{}", i); - labels.push(label.clone()); - let degree = Uniform::from(1..=supported_degree).sample(rng); - let degree_bound = if let Some(degree_bounds) = &mut degree_bounds { - if rng.gen() { - let range = Uniform::from(degree..=supported_degree); - let degree_bound = range.sample(rng); - degree_bounds.push(degree_bound); - Some(degree_bound) + let hiding_bound = if num_points_in_query_set >= degree { + Some(degree) + } else { + Some(num_points_in_query_set) + }; + println!("Hiding bound: {:?}", hiding_bound); + + polynomials.push(LabeledPolynomial::new( + label, + rand_poly(degree, num_vars, rng), + degree_bound, + hiding_bound, + )) + } + println!("supported degree: {:?}", supported_degree); + println!("num_points_in_query_set: {:?}", num_points_in_query_set); + println!("{:?}", degree_bounds); + println!("{}", num_polynomials); + println!("{}", enforce_degree_bounds); + + let (ck, vk) = PC::trim( + &pp, + supported_degree, + supported_degree, + degree_bounds.as_ref().map(|s| s.as_slice()), + )?; + println!("Trimmed"); + + let (comms, rands) = PC::commit(&ck, &polynomials, Some(rng))?; + + // Let's construct our equations + let mut linear_combinations = Vec::new(); + let mut query_set = QuerySet::new(); + let mut values = Evaluations::new(); + for i in 0..num_points_in_query_set { + let point = rand_point(num_vars, rng); + for j in 0..num_equations.unwrap() { + let label = format!("query {} eqn {}", i, j); + let mut lc = LinearCombination::empty(label.clone()); + + let mut value = F::zero(); + let should_have_degree_bounds: bool = rng.gen(); + for (k, label) in labels.iter().enumerate() { + if should_have_degree_bounds { + value += &polynomials[k].evaluate(&point); + lc.push((F::one(), label.to_string().into())); + break; } else { - None - } - } else { - None - }; - - let hiding_bound = if num_points_in_query_set >= degree { - Some(degree) - } else { - Some(num_points_in_query_set) - }; - println!("Hiding bound: {:?}", hiding_bound); - - polynomials.push(LabeledPolynomial::new( - label, - rand_poly(degree, num_vars, rng), - degree_bound, - hiding_bound, - )) - } - println!("supported degree: {:?}", supported_degree); - println!("num_points_in_query_set: {:?}", num_points_in_query_set); - println!("{:?}", degree_bounds); - println!("{}", num_polynomials); - println!("{}", enforce_degree_bounds); - - let (ck, vk) = PC::trim( - &pp, - supported_degree, - supported_degree, - degree_bounds.as_ref().map(|s| s.as_slice()), - )?; - println!("Trimmed"); - - let (comms, rands) = PC::commit(&ck, &polynomials, Some(rng))?; - - // Let's construct our equations - let mut linear_combinations = Vec::new(); - let mut query_set = QuerySet::new(); - let mut values = Evaluations::new(); - for i in 0..num_points_in_query_set { - let point = rand_point(num_vars, rng); - for j in 0..num_equations.unwrap() { - let label = format!("query {} eqn {}", i, j); - let mut lc = LinearCombination::empty(label.clone()); - - let mut value = F::zero(); - let should_have_degree_bounds: bool = rng.gen(); - for (k, label) in labels.iter().enumerate() { - if should_have_degree_bounds { - value += &polynomials[k].evaluate(&point); - lc.push((F::one(), label.to_string().into())); - break; + let poly = &polynomials[k]; + if poly.degree_bound().is_some() { + continue; } else { - let poly = &polynomials[k]; - if poly.degree_bound().is_some() { - continue; - } else { - assert!(poly.degree_bound().is_none()); - let coeff = F::rand(rng); - value += &(coeff * poly.evaluate(&point)); - lc.push((coeff, label.to_string().into())); - } + assert!(poly.degree_bound().is_none()); + let coeff = F::rand(rng); + value += &(coeff * poly.evaluate(&point)); + lc.push((coeff, label.to_string().into())); } } - values.insert((label.clone(), point.clone()), value); - if !lc.is_empty() { - linear_combinations.push(lc); - // Insert query - query_set.insert((label.clone(), (format!("{}", i), point.clone()))); - } } - } - if linear_combinations.is_empty() { - continue; - } - println!("Generated query set"); - println!("Linear combinations: {:?}", linear_combinations); - - let proof = PC::open_combinations( - &ck, - &linear_combinations, - &polynomials, - &comms, - &query_set, - &mut (challenge_gen.clone()), - &rands, - Some(rng), - )?; - println!("Generated proof"); - let result = PC::check_combinations( - &vk, - &linear_combinations, - &comms, - &query_set, - &values, - &proof, - &mut (challenge_gen.clone()), - rng, - )?; - if !result { - println!( - "Failed with {} polynomials, num_points_in_query_set: {:?}", - num_polynomials, num_points_in_query_set - ); - println!("Degree of polynomials:",); - for poly in polynomials { - println!("Degree: {:?}", poly.degree()); + values.insert((label.clone(), point.clone()), value); + if !lc.is_empty() { + linear_combinations.push(lc); + // Insert query + query_set.insert((label.clone(), (format!("{}", i), point.clone()))); } } - assert!( - result, - "proof was incorrect, equations: {:#?}", - linear_combinations + } + if linear_combinations.is_empty() { + continue; + } + println!("Generated query set"); + println!("Linear combinations: {:?}", linear_combinations); + + let proof = PC::open_combinations( + &ck, + &linear_combinations, + &polynomials, + &comms, + &query_set, + &mut (sponge.clone()), + &rands, + Some(rng), + )?; + println!("Generated proof"); + let result = PC::check_combinations( + &vk, + &linear_combinations, + &comms, + &query_set, + &values, + &proof, + &mut (sponge.clone()), + rng, + )?; + if !result { + println!( + "Failed with {} polynomials, num_points_in_query_set: {:?}", + num_polynomials, num_points_in_query_set ); + println!("Degree of polynomials:",); + for poly in polynomials { + println!("Degree: {:?}", poly.degree()); + } } + assert!( + result, + "proof was incorrect, equations: {:#?}", + linear_combinations + ); } + Ok(()) } diff --git a/poly-commit/src/linear_codes/data_structures.rs b/poly-commit/src/linear_codes/data_structures.rs index 8a6f91dd..960e62cf 100644 --- a/poly-commit/src/linear_codes/data_structures.rs +++ b/poly-commit/src/linear_codes/data_structures.rs @@ -1,4 +1,4 @@ -use crate::{PCCommitment, PCRandomness}; +use crate::{utils::Matrix, PCCommitment, PCCommitmentState}; use ark_crypto_primitives::{ crh::CRHScheme, merkle_tree::{Config, LeafParam, Path, TwoToOneParam}, @@ -22,10 +22,10 @@ pub struct LigeroPCParams { pub(crate) check_well_formedness: bool, /// Parameters for hash function of Merkle tree leaves #[derivative(Debug = "ignore")] - pub(crate) leaf_hash_params: LeafParam, + pub(crate) leaf_hash_param: LeafParam, /// Parameters for hash function of Merke tree combining two nodes into one #[derivative(Debug = "ignore")] - pub(crate) two_to_one_params: TwoToOneParam, + pub(crate) two_to_one_hash_param: TwoToOneParam, // Parameters for obtaining leaf digest from leaf value. #[derivative(Debug = "ignore")] pub(crate) col_hash_params: H::Parameters, @@ -59,9 +59,24 @@ impl PCCommitment for LinCodePCCommitment { } } -pub(crate) type LinCodePCRandomness = (); +#[derive(Derivative, CanonicalSerialize, CanonicalDeserialize)] +#[derivative(Default(bound = ""), Clone(bound = ""), Debug(bound = ""))] +pub struct LinCodePCCommitmentState +where + F: PrimeField, + H: CRHScheme, +{ + pub(crate) mat: Matrix, + pub(crate) ext_mat: Matrix, + pub(crate) leaves: Vec, +} -impl PCRandomness for LinCodePCRandomness { +impl PCCommitmentState for LinCodePCCommitmentState +where + F: PrimeField, + H: CRHScheme, +{ + type Randomness = (); fn empty() -> Self { unimplemented!() } @@ -71,7 +86,7 @@ impl PCRandomness for LinCodePCRandomness { _has_degree_bound: bool, _num_vars: Option, _rng: &mut R, - ) -> Self { + ) -> Self::Randomness { unimplemented!() } } diff --git a/poly-commit/src/linear_codes/ligero.rs b/poly-commit/src/linear_codes/ligero.rs index f60125b4..3eb13043 100644 --- a/poly-commit/src/linear_codes/ligero.rs +++ b/poly-commit/src/linear_codes/ligero.rs @@ -23,8 +23,8 @@ where sec_param: usize, rho_inv: usize, check_well_formedness: bool, - leaf_hash_params: LeafParam, - two_to_one_params: TwoToOneParam, + leaf_hash_param: LeafParam, + two_to_one_hash_param: TwoToOneParam, col_hash_params: H::Parameters, ) -> Self { Self { @@ -32,8 +32,8 @@ where sec_param, rho_inv, check_well_formedness, - leaf_hash_params, - two_to_one_params, + leaf_hash_param, + two_to_one_hash_param, col_hash_params, } } @@ -127,12 +127,14 @@ where (n, m) } - fn leaf_hash_params(&self) -> &<::LeafHash as CRHScheme>::Parameters { - &self.leaf_hash_params + fn leaf_hash_param(&self) -> &<::LeafHash as CRHScheme>::Parameters { + &self.leaf_hash_param } - fn two_to_one_params(&self) -> &<::TwoToOneHash as TwoToOneCRHScheme>::Parameters { - &self.two_to_one_params + fn two_to_one_hash_param( + &self, + ) -> &<::TwoToOneHash as TwoToOneCRHScheme>::Parameters { + &self.two_to_one_hash_param } fn col_hash_params(&self) -> &::Parameters { diff --git a/poly-commit/src/linear_codes/mod.rs b/poly-commit/src/linear_codes/mod.rs index 8e21c83a..b31595c5 100644 --- a/poly-commit/src/linear_codes/mod.rs +++ b/poly-commit/src/linear_codes/mod.rs @@ -1,12 +1,15 @@ -use crate::utils::{inner_product, IOPTranscript, Matrix}; +use crate::utils::{inner_product, Matrix}; use crate::{ - Error, LabeledCommitment, LabeledPolynomial, PCCommitterKey, PCUniversalParams, PCVerifierKey, - PolynomialCommitment, + to_bytes, Error, LabeledCommitment, LabeledPolynomial, PCCommitterKey, PCUniversalParams, + PCVerifierKey, PolynomialCommitment, }; use ark_crypto_primitives::crh::{CRHScheme, TwoToOneCRHScheme}; use ark_crypto_primitives::merkle_tree::MerkleTree; -use ark_crypto_primitives::{merkle_tree::Config, sponge::CryptographicSponge}; +use ark_crypto_primitives::{ + merkle_tree::Config, + sponge::{Absorb, CryptographicSponge}, +}; use ark_ff::PrimeField; use ark_poly::Polynomial; use ark_std::borrow::Borrow; @@ -32,7 +35,7 @@ use data_structures::*; pub use data_structures::{LigeroPCParams, LinCodePCProof}; -use utils::{calculate_t, get_indices_from_transcript, hash_column}; +use utils::{calculate_t, get_indices_from_sponge}; const FIELD_SIZE_ERROR: &str = "This field is not suitable for the proposed parameters"; @@ -57,10 +60,12 @@ where fn compute_dimensions(&self, n: usize) -> (usize, usize); /// Get the hash parameters for obtaining leaf digest from leaf value. - fn leaf_hash_params(&self) -> &<::LeafHash as CRHScheme>::Parameters; + fn leaf_hash_param(&self) -> &<::LeafHash as CRHScheme>::Parameters; /// Get the parameters for hashing nodes in the merkle tree. - fn two_to_one_params(&self) -> &<::TwoToOneHash as TwoToOneCRHScheme>::Parameters; + fn two_to_one_hash_param( + &self, + ) -> &<::TwoToOneHash as TwoToOneCRHScheme>::Parameters; /// Get the parameters for hashing a vector of values, /// representing a column of the coefficient matrix, into a leaf value. @@ -87,8 +92,8 @@ where max_degree: usize, num_vars: Option, rng: &mut R, - leaf_hash_params: <::LeafHash as CRHScheme>::Parameters, - two_to_one_params: <::TwoToOneHash as TwoToOneCRHScheme>::Parameters, + leaf_hash_param: <::LeafHash as CRHScheme>::Parameters, + two_to_one_hash_param: <::TwoToOneHash as TwoToOneCRHScheme>::Parameters, col_hash_params: H::Parameters, ) -> Self::LinCodePCParams; @@ -151,14 +156,14 @@ where impl PolynomialCommitment for LinearCodePCS where L: LinearEncode, - F: PrimeField, + F: PrimeField + Absorb, P: Polynomial, S: CryptographicSponge, C: Config + 'static, Vec: Borrow<::Input>, - H::Output: Into, - C::Leaf: Sized + Clone + Default + Send, - H: CRHScheme, + H::Output: Into + Send, + C::Leaf: Sized + Clone + Default + Send + AsRef, + H: CRHScheme + 'static, { type UniversalParams = L::LinCodePCParams; @@ -168,7 +173,7 @@ where type Commitment = LinCodePCCommitment; - type Randomness = LinCodePCRandomness; + type CommitmentState = LinCodePCCommitmentState; type Proof = LPCPArray; @@ -184,8 +189,8 @@ where num_vars: Option, rng: &mut R, ) -> Result { - let leaf_hash_params = ::setup(rng).unwrap(); - let two_to_one_params = ::setup(rng) + let leaf_hash_param = ::setup(rng).unwrap(); + let two_to_one_hash_param = ::setup(rng) .unwrap() .clone(); let col_hash_params = ::setup(rng).unwrap(); @@ -193,8 +198,8 @@ where max_degree, num_vars, rng, - leaf_hash_params, - two_to_one_params, + leaf_hash_param, + two_to_one_hash_param, col_hash_params, ); let real_max_degree = ::max_degree(&pp); @@ -223,7 +228,7 @@ where ) -> Result< ( Vec>, - Vec, + Vec, ), Self::Error, > @@ -231,35 +236,43 @@ where P: 'a, { let mut commitments = Vec::new(); + let mut states = Vec::new(); - for labeled_polynomial in polynomials.into_iter() { + for labeled_polynomial in polynomials { let polynomial = labeled_polynomial.polynomial(); // 1. Arrange the coefficients of the polynomial into a matrix, // and apply encoding to get `ext_mat`. let (mat, ext_mat) = L::compute_matrices(polynomial, ck); + let n_rows = mat.n; + let n_cols = mat.m; + let n_ext_cols = ext_mat.m; // 2. Create the Merkle tree from the hashes of each column. - let col_tree = create_merkle_tree::( - &ext_mat, - ck.leaf_hash_params(), - ck.two_to_one_params(), - ck.col_hash_params(), + let ext_mat_cols = ext_mat.cols(); + let leaves: Vec = cfg_into_iter!(ext_mat_cols) + .map(|col| { + H::evaluate(ck.col_hash_params(), col) + .map_err(|_| Error::HashingError) + .unwrap() + }) + .collect(); + let state = Self::CommitmentState { + mat, + ext_mat, + leaves, + }; + let mut leaves: Vec = + state.leaves.clone().into_iter().map(|h| h.into()).collect(); // TODO cfg_inter + let col_tree = create_merkle_tree::( + &mut leaves, + ck.leaf_hash_param(), + ck.two_to_one_hash_param(), )?; - // 3. Obtain the MT root and add it to the transcript. + // 3. Obtain the MT root let root = col_tree.root(); - let mut transcript: IOPTranscript = IOPTranscript::new(b"transcript"); - - transcript - .append_serializable_element(b"root", &root) - .map_err(|_| Error::TranscriptError)?; - - let n_rows = mat.n; - let n_cols = mat.m; - let n_ext_cols = ext_mat.m; - // 4. The commitment is just the root, but since each commitment could be to a differently-sized polynomial, we also add some metadata. let commitment = LinCodePCCommitment { metadata: Metadata { @@ -275,92 +288,67 @@ where commitment, None, )); + states.push(state); } - let com_len = &commitments.len(); - Ok((commitments, vec![(); *com_len])) + Ok((commitments, states)) } fn open<'a>( ck: &Self::CommitterKey, - labeled_polynomials: impl IntoIterator>, + _labeled_polynomials: impl IntoIterator>, commitments: impl IntoIterator>, point: &'a P::Point, - _challenge_generator: &mut crate::challenge::ChallengeGenerator, - _rands: impl IntoIterator, + sponge: &mut S, + states: impl IntoIterator, _rng: Option<&mut dyn RngCore>, ) -> Result where P: 'a, - Self::Randomness: 'a, + Self::CommitmentState: 'a, Self::Commitment: 'a, { let mut proof_array = LPCPArray::default(); - let labeled_commitments: Vec<&'a LabeledCommitment> = - commitments.into_iter().collect(); - let labeled_polynomials: Vec<&'a LabeledPolynomial> = - labeled_polynomials.into_iter().collect(); - - if labeled_commitments.len() != labeled_polynomials.len() { - return Err(Error::IncorrectInputLength(format!( - "Mismatched lengths: {} commitments, {} polynomials", - labeled_commitments.len(), - labeled_polynomials.len() - ))); - } - for i in 0..labeled_polynomials.len() { - let polynomial = labeled_polynomials[i].polynomial(); - let commitment = labeled_commitments[i].commitment(); + for (labeled_commitment, state) in commitments.into_iter().zip(states) { + let commitment = labeled_commitment.commitment(); let n_rows = commitment.metadata.n_rows; let n_cols = commitment.metadata.n_cols; - let root = &commitment.root; // 1. Arrange the coefficients of the polynomial into a matrix, // and apply encoding to get `ext_mat`. - let (mat, ext_mat) = L::compute_matrices(polynomial, ck); - // 2. Create the Merkle tree from the hashes of each column. - let col_tree = create_merkle_tree::( - &ext_mat, - ck.leaf_hash_params(), - ck.two_to_one_params(), - ck.col_hash_params(), + let Self::CommitmentState { + mat, + ext_mat, + leaves: col_hashes, + } = state; + let mut col_hashes: Vec = + col_hashes.clone().into_iter().map(|h| h.into()).collect(); // TODO cfg_inter + + let col_tree = create_merkle_tree::( + &mut col_hashes, + ck.leaf_hash_param(), + ck.two_to_one_hash_param(), )?; // 3. Generate vector `b` to left-multiply the matrix. let (_, b) = L::tensor(point, n_cols, n_rows); - let mut transcript = IOPTranscript::new(b"transcript"); - transcript - .append_serializable_element(b"root", root) - .map_err(|_| Error::TranscriptError)?; + sponge.absorb(&to_bytes!(&commitment.root).map_err(|_| Error::TranscriptError)?); // If we are checking well-formedness, we need to compute the well-formedness proof (which is just r.M) and append it to the transcript. let well_formedness = if ck.check_well_formedness() { - let mut r = Vec::new(); - for _ in 0..n_rows { - r.push( - transcript - .get_and_append_challenge(b"r") - .map_err(|_| Error::TranscriptError)?, - ); - } + let r = sponge.squeeze_field_elements::(n_rows); let v = mat.row_mul(&r); - transcript - .append_serializable_element(b"v", &v) - .map_err(|_| Error::TranscriptError)?; + sponge.absorb(&v); Some(v) } else { None }; let point_vec = L::point_to_vec(point.clone()); - for element in point_vec.iter() { - transcript - .append_serializable_element(b"point", element) - .map_err(|_| Error::TranscriptError)?; - } + sponge.absorb(&point_vec); proof_array.push(LinCodePCProof { // Compute the opening proof and append b.M to the transcript. @@ -371,7 +359,7 @@ where &mat, &ext_mat, &col_tree, - &mut transcript, + sponge, )?, well_formedness, }); @@ -386,31 +374,19 @@ where point: &'a P::Point, values: impl IntoIterator, proof_array: &Self::Proof, - _challenge_generator: &mut crate::challenge::ChallengeGenerator, + sponge: &mut S, _rng: Option<&mut dyn RngCore>, ) -> Result where Self::Commitment: 'a, { - let labeled_commitments: Vec<&'a LabeledCommitment> = - commitments.into_iter().collect(); - let values: Vec = values.into_iter().collect(); - - if labeled_commitments.len() != proof_array.len() - || labeled_commitments.len() != values.len() - { - return Err(Error::IncorrectInputLength( - format!( - "Mismatched lengths: {} proofs were provided for {} commitments with {} claimed values",labeled_commitments.len(), proof_array.len(), values.len() - ) - )); - } - let leaf_hash_params: &<::LeafHash as CRHScheme>::Parameters = - vk.leaf_hash_params(); - let two_to_one_params: &<::TwoToOneHash as TwoToOneCRHScheme>::Parameters = - vk.two_to_one_params(); + let leaf_hash_param: &<::LeafHash as CRHScheme>::Parameters = + vk.leaf_hash_param(); + let two_to_one_hash_param: &<::TwoToOneHash as TwoToOneCRHScheme>::Parameters = + vk.two_to_one_hash_param(); - for (i, labeled_commitment) in labeled_commitments.iter().enumerate() { + for (i, (labeled_commitment, value)) in commitments.into_iter().zip(values).enumerate() { + let proof = &proof_array[i]; let commitment = labeled_commitment.commitment(); let n_rows = commitment.metadata.n_rows; let n_cols = commitment.metadata.n_cols; @@ -418,31 +394,19 @@ where let root = &commitment.root; let t = calculate_t::(vk.sec_param(), vk.distance(), n_ext_cols)?; - let mut transcript = IOPTranscript::new(b"transcript"); - transcript - .append_serializable_element(b"root", &commitment.root) - .map_err(|_| Error::TranscriptError)?; + sponge.absorb(&to_bytes!(&commitment.root).map_err(|_| Error::TranscriptError)?); let out = if vk.check_well_formedness() { - if proof_array[i].well_formedness.is_none() { + if proof.well_formedness.is_none() { return Err(Error::InvalidCommitment); } - let tmp = &proof_array[i].well_formedness.as_ref(); - let well_formedness = tmp.unwrap(); - let mut r = Vec::with_capacity(n_rows); - for _ in 0..n_rows { - r.push( - transcript - .get_and_append_challenge(b"r") - .map_err(|_| Error::TranscriptError)?, - ); - } + let tmp = &proof.well_formedness.as_ref(); + let v = tmp.unwrap(); + let r = sponge.squeeze_field_elements::(n_rows); // Upon sending `v` to the Verifier, add it to the sponge. The claim is that v = r.M. - transcript - .append_serializable_element(b"v", well_formedness) - .map_err(|_| Error::TranscriptError)?; + sponge.absorb(&v); - (Some(well_formedness), Some(r)) + (Some(v), Some(r)) } else { (None, None) }; @@ -450,36 +414,35 @@ where // 1. Seed the transcript with the point and the recieved vector // TODO Consider removing the evaluation point from the transcript. let point_vec = L::point_to_vec(point.clone()); - for element in point_vec.iter() { - transcript - .append_serializable_element(b"point", element) - .map_err(|_| Error::TranscriptError)?; - } - transcript - .append_serializable_element(b"v", &proof_array[i].opening.v) - .map_err(|_| Error::TranscriptError)?; + sponge.absorb(&point_vec); + sponge.absorb(&proof.opening.v); // 2. Ask random oracle for the `t` indices where the checks happen. - let indices = get_indices_from_transcript::(n_ext_cols, t, &mut transcript)?; + let indices = get_indices_from_sponge(n_ext_cols, t, sponge)?; // 3. Hash the received columns into leaf hashes. - let col_hashes: Vec = proof_array[i] + let col_hashes: Vec = proof .opening .columns .iter() - .map(|c| hash_column::(c.clone(), vk.col_hash_params()).unwrap()) + .map(|c| { + H::evaluate(vk.col_hash_params(), c.clone()) + .map_err(|_| Error::HashingError) + .unwrap() + .into() + }) .collect(); // 4. Verify the paths for each of the leaf hashes - this is only run once, // even if we have a well-formedness check (i.e., we save sending and checking the columns). // See "Concrete optimizations to the commitment scheme", p.12 of [Brakedown](https://eprint.iacr.org/2021/1043.pdf). for (j, (leaf, q_j)) in col_hashes.iter().zip(indices.iter()).enumerate() { - let path = &proof_array[i].opening.paths[j]; + let path = &proof.opening.paths[j]; if path.leaf_index != *q_j { return Err(Error::InvalidCommitment); } - path.verify(leaf_hash_params, two_to_one_params, root, leaf.clone()) + path.verify(leaf_hash_param, two_to_one_hash_param, root, leaf.clone()) .map_err(|_| Error::InvalidCommitment)?; } @@ -493,7 +456,7 @@ where }; // 5. Compute the encoding w = E(v). - let w = L::encode(&proof_array[i].opening.v, vk); + let w = L::encode(&proof.opening.v, vk); // 6. Compute `a`, `b` to right- and left- multiply with the matrix `M`. let (a, b) = L::tensor(point, n_cols, n_rows); @@ -506,12 +469,12 @@ where for (transcript_index, matrix_index) in indices.iter().enumerate() { check_inner_product( &r, - &proof_array[i].opening.columns[transcript_index], + &proof.opening.columns[transcript_index], w_well_formedness[*matrix_index], )?; check_inner_product( &b, - &proof_array[i].opening.columns[transcript_index], + &proof.opening.columns[transcript_index], w[*matrix_index], )?; } @@ -519,13 +482,13 @@ where for (transcript_index, matrix_index) in indices.iter().enumerate() { check_inner_product( &b, - &proof_array[i].opening.columns[transcript_index], + &proof.opening.columns[transcript_index], w[*matrix_index], )?; } } - if inner_product(&proof_array[i].opening.v, &a) != values[i] { + if inner_product(&proof.opening.v, &a) != value { eprintln!("Function check: claimed value in position {i} does not match the evaluation of the committed polynomial in the same position"); return Ok(false); } @@ -536,58 +499,45 @@ where } // TODO maybe this can go to utils -fn create_merkle_tree( - ext_mat: &Matrix, - leaf_hash_params: &<::LeafHash as CRHScheme>::Parameters, - two_to_one_params: &<::TwoToOneHash as TwoToOneCRHScheme>::Parameters, - col_hash_params: &H::Parameters, +fn create_merkle_tree( + leaves: &mut Vec, + leaf_hash_param: &<::LeafHash as CRHScheme>::Parameters, + two_to_one_hash_param: &<::TwoToOneHash as TwoToOneCRHScheme>::Parameters, ) -> Result, Error> where - F: PrimeField, C: Config, - H: CRHScheme, - Vec: Borrow<::Input>, - H::Output: Into, - C::Leaf: Default + Clone + Send, + C::Leaf: Default + Clone + Send + AsRef, { - let ext_mat_cols = ext_mat.cols(); - - let mut col_hashes: Vec = cfg_into_iter!(ext_mat_cols) - .map(|col| hash_column::(col, &col_hash_params).unwrap()) - .collect(); - // pad the column hashes with zeroes - let next_pow_of_two = col_hashes.len().next_power_of_two(); - col_hashes.resize(next_pow_of_two, ::default()); + let next_pow_of_two = leaves.len().next_power_of_two(); + leaves.resize(next_pow_of_two, ::default()); - MerkleTree::::new(leaf_hash_params, two_to_one_params, col_hashes) + MerkleTree::::new(leaf_hash_param, two_to_one_hash_param, leaves) .map_err(|_| Error::HashingError) } -fn generate_proof( +fn generate_proof( sec_param: usize, distance: (usize, usize), b: &[F], mat: &Matrix, ext_mat: &Matrix, col_tree: &MerkleTree, - transcript: &mut IOPTranscript, + sponge: &mut S, ) -> Result, Error> where - F: PrimeField, + F: PrimeField + Absorb, C: Config, + S: CryptographicSponge, { let t = calculate_t::(sec_param, distance, ext_mat.m)?; // 1. left-multiply the matrix by `b`. let v = mat.row_mul(b); - - transcript - .append_serializable_element(b"v", &v) - .map_err(|_| Error::TranscriptError)?; + sponge.absorb(&v); // 2. Generate t column indices to test the linear combination on. - let indices = get_indices_from_transcript(ext_mat.m, t, transcript)?; + let indices = get_indices_from_sponge(ext_mat.m, t, sponge)?; // 3. Compute Merkle tree paths for the requested columns. let mut queried_columns = Vec::with_capacity(t); diff --git a/poly-commit/src/linear_codes/multilinear_ligero/mod.rs b/poly-commit/src/linear_codes/multilinear_ligero/mod.rs index ed0c4ab1..7f071cc9 100644 --- a/poly-commit/src/linear_codes/multilinear_ligero/mod.rs +++ b/poly-commit/src/linear_codes/multilinear_ligero/mod.rs @@ -47,16 +47,16 @@ where _max_degree: usize, _num_vars: Option, _rng: &mut R, - leaf_hash_params: <::LeafHash as CRHScheme>::Parameters, - two_to_one_params: <::TwoToOneHash as TwoToOneCRHScheme>::Parameters, + leaf_hash_param: <::LeafHash as CRHScheme>::Parameters, + two_to_one_hash_param: <::TwoToOneHash as TwoToOneCRHScheme>::Parameters, col_hash_params: H::Parameters, ) -> Self::LinCodePCParams { Self::LinCodePCParams::new( 128, 2, true, - leaf_hash_params, - two_to_one_params, + leaf_hash_param, + two_to_one_hash_param, col_hash_params, ) } diff --git a/poly-commit/src/linear_codes/multilinear_ligero/tests.rs b/poly-commit/src/linear_codes/multilinear_ligero/tests.rs index 2f91c402..016dc39f 100644 --- a/poly-commit/src/linear_codes/multilinear_ligero/tests.rs +++ b/poly-commit/src/linear_codes/multilinear_ligero/tests.rs @@ -4,7 +4,6 @@ mod tests { use crate::linear_codes::LinearCodePCS; use crate::utils::test_sponge; use crate::{ - challenge::ChallengeGenerator, linear_codes::{LigeroPCParams, MultilinearLigero, PolynomialCommitment}, LabeledPolynomial, }; @@ -89,8 +88,8 @@ mod tests { let mut rng = &mut test_rng(); let num_vars = 10; // just to make sure we have the right degree given the FFT domain for our field - let leaf_hash_params = ::setup(&mut rng).unwrap(); - let two_to_one_params = ::setup(&mut rng) + let leaf_hash_param = ::setup(&mut rng).unwrap(); + let two_to_one_hash_param = ::setup(&mut rng) .unwrap() .clone(); let col_hash_params = as CRHScheme>::setup(&mut rng).unwrap(); @@ -100,8 +99,8 @@ mod tests { 128, 4, check_well_formedness, - leaf_hash_params, - two_to_one_params, + leaf_hash_param, + two_to_one_hash_param, col_hash_params, ); @@ -122,29 +121,20 @@ mod tests { let value = labeled_poly.evaluate(&point); - let mut challenge_generator: ChallengeGenerator> = - ChallengeGenerator::new_univariate(&mut test_sponge); - let proof = LigeroPCS::::open( &ck, &[labeled_poly], &c, &point, - &mut (challenge_generator.clone()), + &mut (test_sponge.clone()), &rands, None, ) .unwrap(); - assert!(LigeroPCS::::check( - &vk, - &c, - &point, - [value], - &proof, - &mut challenge_generator, - None - ) - .unwrap()); + assert!( + LigeroPCS::::check(&vk, &c, &point, [value], &proof, &mut test_sponge, None) + .unwrap() + ); } fn rand_point(num_vars: Option, rng: &mut ChaCha20Rng) -> Vec { diff --git a/poly-commit/src/linear_codes/univariate_ligero/mod.rs b/poly-commit/src/linear_codes/univariate_ligero/mod.rs index 973a5c30..e6b59fcc 100644 --- a/poly-commit/src/linear_codes/univariate_ligero/mod.rs +++ b/poly-commit/src/linear_codes/univariate_ligero/mod.rs @@ -41,16 +41,16 @@ where _max_degree: usize, _num_vars: Option, _rng: &mut R, - leaf_hash_params: <::LeafHash as CRHScheme>::Parameters, - two_to_one_params: <::TwoToOneHash as TwoToOneCRHScheme>::Parameters, + leaf_hash_param: <::LeafHash as CRHScheme>::Parameters, + two_to_one_hash_param: <::TwoToOneHash as TwoToOneCRHScheme>::Parameters, col_hash_params: H::Parameters, ) -> Self::LinCodePCParams { Self::LinCodePCParams::new( 128, 4, true, - leaf_hash_params, - two_to_one_params, + leaf_hash_param, + two_to_one_hash_param, col_hash_params, ) } diff --git a/poly-commit/src/linear_codes/univariate_ligero/tests.rs b/poly-commit/src/linear_codes/univariate_ligero/tests.rs index c98c09ec..3151b2d5 100644 --- a/poly-commit/src/linear_codes/univariate_ligero/tests.rs +++ b/poly-commit/src/linear_codes/univariate_ligero/tests.rs @@ -5,7 +5,6 @@ mod tests { use crate::linear_codes::LinearCodePCS; use crate::utils::test_sponge; use crate::{ - challenge::ChallengeGenerator, linear_codes::{LigeroPCParams, PolynomialCommitment, UnivariateLigero}, LabeledPolynomial, }; @@ -83,8 +82,8 @@ mod tests { let degree = 4; let mut rng = &mut test_rng(); // just to make sure we have the right degree given the FFT domain for our field - let leaf_hash_params = ::setup(&mut rng).unwrap(); - let two_to_one_params = ::setup(&mut rng) + let leaf_hash_param = ::setup(&mut rng).unwrap(); + let two_to_one_hash_param = ::setup(&mut rng) .unwrap() .clone(); let col_hash_params = as CRHScheme>::setup(&mut rng).unwrap(); @@ -94,8 +93,8 @@ mod tests { 128, 4, check_well_formedness, - leaf_hash_params, - two_to_one_params, + leaf_hash_param, + two_to_one_hash_param, col_hash_params, ); @@ -116,29 +115,19 @@ mod tests { let value = labeled_poly.evaluate(&point); - let mut challenge_generator: ChallengeGenerator> = - ChallengeGenerator::new_univariate(&mut test_sponge); - let proof = LigeroPCS::open( &ck, &[labeled_poly], &c, &point, - &mut (challenge_generator.clone()), + &mut (test_sponge.clone()), &rands, None, ) .unwrap(); - assert!(LigeroPCS::check( - &vk, - &c, - &point, - [value], - &proof, - &mut challenge_generator, - None - ) - .unwrap()); + assert!( + LigeroPCS::check(&vk, &c, &point, [value], &proof, &mut test_sponge, None).unwrap() + ); } fn rand_point(_: Option, rng: &mut ChaCha20Rng) -> F { diff --git a/poly-commit/src/linear_codes/utils.rs b/poly-commit/src/linear_codes/utils.rs index 9e43221f..12b868ad 100644 --- a/poly-commit/src/linear_codes/utils.rs +++ b/poly-commit/src/linear_codes/utils.rs @@ -1,11 +1,7 @@ -use core::borrow::Borrow; - -use crate::utils::IOPTranscript; use crate::{utils::ceil_div, Error}; -use ark_crypto_primitives::{crh::CRHScheme, merkle_tree::Config}; +use ark_crypto_primitives::sponge::CryptographicSponge; use ark_ff::{FftField, PrimeField}; - use ark_poly::{EvaluationDomain, GeneralEvaluationDomain}; use ark_std::string::ToString; use ark_std::vec::Vec; @@ -38,35 +34,18 @@ pub(crate) fn get_num_bytes(n: usize) -> usize { ceil_div((usize::BITS - n.leading_zeros()) as usize, 8) } -#[inline] -pub(crate) fn hash_column(array: Vec, params: &H::Parameters) -> Result -where - F: PrimeField, - C: Config, - H: CRHScheme, - Vec: Borrow<::Input>, - C::Leaf: Sized, - H::Output: Into, -{ - H::evaluate(params, array) - .map_err(|_| Error::HashingError) - .map(|x| x.into()) -} - /// Generate `t` (not necessarily distinct) random points in `[0, n)` /// using the current state of the `transcript`. -pub(crate) fn get_indices_from_transcript( +pub(crate) fn get_indices_from_sponge( n: usize, t: usize, - transcript: &mut IOPTranscript, + sponge: &mut S, ) -> Result, Error> { let bytes_to_squeeze = get_num_bytes(n); let mut indices = Vec::with_capacity(t); for _ in 0..t { - let mut bytes: Vec = vec![0; bytes_to_squeeze]; - transcript - .get_and_append_byte_challenge(b"i", &mut bytes) - .map_err(|_| Error::TranscriptError)?; + let bytes = sponge.squeeze_bytes(bytes_to_squeeze); + sponge.absorb(&bytes); // get the usize from Vec: let ind = bytes.iter().fold(0, |acc, &x| (acc << 8) + x as usize); diff --git a/poly-commit/src/marlin/marlin_pc/data_structures.rs b/poly-commit/src/marlin/marlin_pc/data_structures.rs index 2b09e03a..203e3201 100644 --- a/poly-commit/src/marlin/marlin_pc/data_structures.rs +++ b/poly-commit/src/marlin/marlin_pc/data_structures.rs @@ -1,6 +1,6 @@ use crate::{ - DenseUVPolynomial, PCCommitment, PCCommitterKey, PCPreparedCommitment, PCPreparedVerifierKey, - PCRandomness, PCVerifierKey, Vec, + DenseUVPolynomial, PCCommitment, PCCommitmentState, PCCommitterKey, PCPreparedCommitment, + PCPreparedVerifierKey, PCVerifierKey, Vec, }; use ark_ec::pairing::Pairing; use ark_ec::AdditiveGroup; @@ -360,7 +360,8 @@ impl<'a, F: PrimeField, P: DenseUVPolynomial> AddAssign<(F, &'a Randomness> PCRandomness for Randomness { +impl> PCCommitmentState for Randomness { + type Randomness = Self; fn empty() -> Self { Self { rand: kzg10::Randomness::empty(), diff --git a/poly-commit/src/marlin/marlin_pc/mod.rs b/poly-commit/src/marlin/marlin_pc/mod.rs index 39c4e362..7fbfba07 100644 --- a/poly-commit/src/marlin/marlin_pc/mod.rs +++ b/poly-commit/src/marlin/marlin_pc/mod.rs @@ -2,7 +2,7 @@ use crate::{kzg10, marlin::Marlin, PCCommitterKey, CHALLENGE_SIZE}; use crate::{BTreeMap, BTreeSet, ToString, Vec}; use crate::{BatchLCProof, Error, Evaluations, QuerySet}; use crate::{LabeledCommitment, LabeledPolynomial, LinearCombination}; -use crate::{PCRandomness, PCUniversalParams, PolynomialCommitment}; +use crate::{PCCommitmentState, PCUniversalParams, PolynomialCommitment}; use ark_ec::pairing::Pairing; use ark_ec::AffineRepr; use ark_ec::CurveGroup; @@ -12,7 +12,6 @@ use ark_std::rand::RngCore; use ark_std::{marker::PhantomData, ops::Div, vec}; mod data_structures; -use crate::challenge::ChallengeGenerator; use ark_crypto_primitives::sponge::CryptographicSponge; pub use data_structures::*; @@ -66,7 +65,7 @@ where type CommitterKey = CommitterKey; type VerifierKey = VerifierKey; type Commitment = Commitment; - type Randomness = Randomness; + type CommitmentState = Randomness; type Proof = kzg10::Proof; type BatchProof = Vec; type Error = Error; @@ -180,7 +179,7 @@ where ) -> Result< ( Vec>, - Vec, + Vec, ), Self::Error, > @@ -191,7 +190,7 @@ where let commit_time = start_timer!(|| "Committing to polynomials"); let mut commitments = Vec::new(); - let mut randomness = Vec::new(); + let mut states = Vec::new(); for p in polynomials { let label = p.label(); @@ -232,17 +231,17 @@ where }; let comm = Commitment { comm, shifted_comm }; - let rand = Randomness { rand, shifted_rand }; + let state = Randomness { rand, shifted_rand }; commitments.push(LabeledCommitment::new( label.to_string(), comm, degree_bound, )); - randomness.push(rand); + states.push(state); end_timer!(commit_time); } end_timer!(commit_time); - Ok((commitments, randomness)) + Ok((commitments, states)) } /// On input a polynomial `p` and a point `point`, outputs a proof for the same. @@ -251,13 +250,13 @@ where labeled_polynomials: impl IntoIterator>, _commitments: impl IntoIterator>, point: &'a P::Point, - opening_challenges: &mut ChallengeGenerator, - rands: impl IntoIterator, + sponge: &mut S, + states: impl IntoIterator, _rng: Option<&mut dyn RngCore>, ) -> Result where P: 'a, - Self::Randomness: 'a, + Self::CommitmentState: 'a, Self::Commitment: 'a, { let mut p = P::zero(); @@ -267,7 +266,7 @@ where let mut shifted_r_witness = P::zero(); let mut enforce_degree_bound = false; - for (polynomial, rand) in labeled_polynomials.into_iter().zip(rands) { + for (polynomial, rand) in labeled_polynomials.into_iter().zip(states) { let degree_bound = polynomial.degree_bound(); assert_eq!(degree_bound.is_some(), rand.shifted_rand.is_some()); @@ -283,7 +282,7 @@ where )?; // compute next challenges challenge^j and challenge^{j+1}. - let challenge_j = opening_challenges.try_next_challenge_of_size(CHALLENGE_SIZE); + let challenge_j = sponge.squeeze_field_elements_with_sizes(&[CHALLENGE_SIZE])[0]; assert_eq!(degree_bound.is_some(), rand.shifted_rand.is_some()); @@ -299,7 +298,7 @@ where *point, &shifted_rand, )?; - let challenge_j_1 = opening_challenges.try_next_challenge_of_size(CHALLENGE_SIZE); + let challenge_j_1 = sponge.squeeze_field_elements_with_sizes(&[CHALLENGE_SIZE])[0]; let shifted_witness = shift_polynomial(ck, &witness, degree_bound); @@ -347,7 +346,7 @@ where point: &'a P::Point, values: impl IntoIterator, proof: &Self::Proof, - opening_challenges: &mut ChallengeGenerator, + sponge: &mut S, _rng: Option<&mut dyn RngCore>, ) -> Result where @@ -358,7 +357,7 @@ where Marlin::::accumulate_commitments_and_values( commitments, values, - opening_challenges, + sponge, Some(vk), )?; let combined_comm = kzg10::Commitment(combined_comm.into()); @@ -373,7 +372,7 @@ where query_set: &QuerySet, values: &Evaluations, proof: &Self::BatchProof, - opening_challenges: &mut ChallengeGenerator, + sponge: &mut S, rng: &mut R, ) -> Result where @@ -384,7 +383,7 @@ where commitments, query_set, values, - opening_challenges, + sponge, Some(vk), )?; assert_eq!(proof.len(), combined_queries.len()); @@ -407,13 +406,13 @@ where polynomials: impl IntoIterator>, commitments: impl IntoIterator>, query_set: &QuerySet, - opening_challenges: &mut ChallengeGenerator, - rands: impl IntoIterator, + sponge: &mut S, + states: impl IntoIterator, rng: Option<&mut dyn RngCore>, ) -> Result, Self::Error> where P: 'a, - Self::Randomness: 'a, + Self::CommitmentState: 'a, Self::Commitment: 'a, { Marlin::::open_combinations( @@ -422,8 +421,8 @@ where polynomials, commitments, query_set, - opening_challenges, - rands, + sponge, + states, rng, ) } @@ -437,7 +436,7 @@ where query_set: &QuerySet, evaluations: &Evaluations, proof: &BatchLCProof, - opening_challenges: &mut ChallengeGenerator, + sponge: &mut S, rng: &mut R, ) -> Result where @@ -450,7 +449,7 @@ where query_set, evaluations, proof, - opening_challenges, + sponge, rng, ) } @@ -462,19 +461,19 @@ where labeled_polynomials: impl IntoIterator>, commitments: impl IntoIterator>>, query_set: &QuerySet, - opening_challenges: &mut ChallengeGenerator, - rands: impl IntoIterator, + sponge: &mut S, + states: impl IntoIterator, rng: Option<&mut dyn RngCore>, ) -> Result>, Error> where P: 'a, - Self::Randomness: 'a, + Self::CommitmentState: 'a, Self::Commitment: 'a, { let rng = &mut crate::optional_rng::OptionalRng(rng); let poly_rand_comm: BTreeMap<_, _> = labeled_polynomials .into_iter() - .zip(rands) + .zip(states) .zip(commitments.into_iter()) .map(|((poly, r), comm)| (poly.label(), (poly, r, comm))) .collect(); @@ -497,7 +496,7 @@ where let mut proofs = Vec::new(); for (_point_label, (point, labels)) in query_to_labels_map.into_iter() { let mut query_polys: Vec<&'a LabeledPolynomial<_, _>> = Vec::new(); - let mut query_rands: Vec<&'a Self::Randomness> = Vec::new(); + let mut query_states: Vec<&'a Self::CommitmentState> = Vec::new(); let mut query_comms: Vec<&'a LabeledCommitment> = Vec::new(); for label in labels { @@ -507,7 +506,7 @@ where })?; query_polys.push(polynomial); - query_rands.push(rand); + query_states.push(rand); query_comms.push(comm); } @@ -517,8 +516,8 @@ where query_polys, query_comms, point, - opening_challenges, - query_rands, + sponge, + query_states, Some(rng), )?; diff --git a/poly-commit/src/marlin/marlin_pst13_pc/data_structures.rs b/poly-commit/src/marlin/marlin_pst13_pc/data_structures.rs index 8ccf300b..9cc8d73b 100644 --- a/poly-commit/src/marlin/marlin_pst13_pc/data_structures.rs +++ b/poly-commit/src/marlin/marlin_pst13_pc/data_structures.rs @@ -1,6 +1,6 @@ use crate::{BTreeMap, Vec}; use crate::{ - PCCommitterKey, PCPreparedVerifierKey, PCRandomness, PCUniversalParams, PCVerifierKey, + PCCommitmentState, PCCommitterKey, PCPreparedVerifierKey, PCUniversalParams, PCVerifierKey, }; use ark_ec::pairing::Pairing; use ark_poly::DenseMVPolynomial; @@ -362,12 +362,13 @@ where } } -impl PCRandomness for Randomness +impl PCCommitmentState for Randomness where E: Pairing, P: DenseMVPolynomial, P::Point: Index, { + type Randomness = Self; fn empty() -> Self { Self { blinding_polynomial: P::zero(), diff --git a/poly-commit/src/marlin/marlin_pst13_pc/mod.rs b/poly-commit/src/marlin/marlin_pst13_pc/mod.rs index ac47c2a7..eee026d7 100644 --- a/poly-commit/src/marlin/marlin_pst13_pc/mod.rs +++ b/poly-commit/src/marlin/marlin_pst13_pc/mod.rs @@ -5,10 +5,14 @@ use crate::{ }; use crate::{BatchLCProof, Error, Evaluations, QuerySet}; use crate::{LabeledCommitment, LabeledPolynomial, LinearCombination}; -use crate::{PCRandomness, PCUniversalParams, PolynomialCommitment}; +use crate::{PCCommitmentState, PCUniversalParams, PolynomialCommitment}; use crate::{ToString, Vec}; use ark_ec::AffineRepr; -use ark_ec::{pairing::Pairing, scalar_mul::fixed_base::FixedBase, CurveGroup, VariableBaseMSM}; +use ark_ec::{ + pairing::Pairing, + scalar_mul::{BatchMulPreprocessing, ScalarMul}, + CurveGroup, VariableBaseMSM, +}; use ark_ff::{One, PrimeField, UniformRand, Zero}; use ark_poly::{multivariate::Term, DenseMVPolynomial}; use ark_std::rand::RngCore; @@ -20,7 +24,6 @@ pub use data_structures::*; mod combinations; use combinations::*; -use crate::challenge::ChallengeGenerator; use ark_crypto_primitives::sponge::CryptographicSponge; #[cfg(feature = "parallel")] use rayon::prelude::*; @@ -151,7 +154,7 @@ where type CommitterKey = CommitterKey; type VerifierKey = VerifierKey; type Commitment = marlin_pc::Commitment; - type Randomness = Randomness; + type CommitmentState = Randomness; type Proof = Proof; type BatchProof = Vec; type Error = Error; @@ -211,47 +214,33 @@ where }) .unzip(); - let scalar_bits = E::ScalarField::MODULUS_BIT_SIZE as usize; let g_time = start_timer!(|| "Generating powers of G"); - let window_size = FixedBase::get_mul_window_size(max_degree + 1); - let g_table = FixedBase::get_window_table(scalar_bits, window_size, g); - let mut powers_of_g = - FixedBase::msm::(scalar_bits, window_size, &g_table, &powers_of_beta); - powers_of_g.push(g); + let mut powers_of_g = g.batch_mul(&powers_of_beta); + powers_of_g.push(g.into_affine()); powers_of_beta_terms.push(P::Term::new(vec![])); end_timer!(g_time); let gamma_g_time = start_timer!(|| "Generating powers of gamma * G"); - let window_size = FixedBase::get_mul_window_size(max_degree + 2); - let gamma_g_table = FixedBase::get_window_table(scalar_bits, window_size, gamma_g); // Each element `i` of `powers_of_gamma_g` is a vector of length `max_degree+1` // containing `betas[i]^j \gamma G` for `j` from 1 to `max_degree+1` to support // up to `max_degree` queries let mut powers_of_gamma_g = vec![Vec::new(); num_vars]; + let gamma_g_table = BatchMulPreprocessing::new(gamma_g, max_degree + 1); + ark_std::cfg_iter_mut!(powers_of_gamma_g) .enumerate() .for_each(|(i, v)| { - let mut powers_of_beta = Vec::with_capacity(max_degree); + let mut powers_of_beta = Vec::with_capacity(max_degree + 1); let mut cur = E::ScalarField::one(); for _ in 0..=max_degree { cur *= &betas[i]; powers_of_beta.push(cur); } - *v = FixedBase::msm::( - scalar_bits, - window_size, - &gamma_g_table, - &powers_of_beta, - ); + *v = gamma_g_table.batch_mul(&powers_of_beta); }); end_timer!(gamma_g_time); - let powers_of_g = E::G1::normalize_batch(&powers_of_g); let gamma_g = gamma_g.into_affine(); - let powers_of_gamma_g = powers_of_gamma_g - .into_iter() - .map(|v| E::G1::normalize_batch(&v)) - .collect(); let beta_h: Vec<_> = betas.iter().map(|b| h.mul(b).into_affine()).collect(); let h = h.into_affine(); let prepared_h = h.into(); @@ -343,7 +332,7 @@ where ) -> Result< ( Vec>, - Vec, + Vec, ), Self::Error, > @@ -440,26 +429,26 @@ where labeled_polynomials: impl IntoIterator>, _commitments: impl IntoIterator>, point: &P::Point, - opening_challenges: &mut ChallengeGenerator, - rands: impl IntoIterator, + sponge: &mut S, + states: impl IntoIterator, _rng: Option<&mut dyn RngCore>, ) -> Result where P: 'a, - Self::Randomness: 'a, + Self::CommitmentState: 'a, Self::Commitment: 'a, { // Compute random linear combinations of committed polynomials and randomness let mut p = P::zero(); let mut r = Randomness::empty(); - for (polynomial, rand) in labeled_polynomials.into_iter().zip(rands) { + for (polynomial, state) in labeled_polynomials.into_iter().zip(states) { Self::check_degrees_and_bounds(ck.supported_degree, &polynomial)?; // compute challenge^j and challenge^{j+1}. - let challenge_j = opening_challenges.try_next_challenge_of_size(CHALLENGE_SIZE); + let challenge_j = sponge.squeeze_field_elements_with_sizes(&[CHALLENGE_SIZE])[0]; p += (challenge_j, polynomial.polynomial()); - r += (challenge_j, rand); + r += (challenge_j, state); } let open_time = start_timer!(|| format!("Opening polynomial of degree {}", p.degree())); @@ -538,7 +527,7 @@ where point: &'a P::Point, values: impl IntoIterator, proof: &Self::Proof, - opening_challenges: &mut ChallengeGenerator, + sponge: &mut S, _rng: Option<&mut dyn RngCore>, ) -> Result where @@ -550,7 +539,7 @@ where Marlin::::accumulate_commitments_and_values( commitments, values, - opening_challenges, + sponge, None, )?; // Compute both sides of the pairing equation @@ -582,7 +571,7 @@ where query_set: &QuerySet, values: &Evaluations, proof: &Self::BatchProof, - opening_challenges: &mut ChallengeGenerator, + sponge: &mut S, rng: &mut R, ) -> Result where @@ -593,7 +582,7 @@ where commitments, query_set, values, - opening_challenges, + sponge, None, )?; let check_time = @@ -660,13 +649,13 @@ where polynomials: impl IntoIterator>, commitments: impl IntoIterator>, query_set: &QuerySet, - opening_challenges: &mut ChallengeGenerator, - rands: impl IntoIterator, + sponge: &mut S, + states: impl IntoIterator, rng: Option<&mut dyn RngCore>, ) -> Result, Self::Error> where P: 'a, - Self::Randomness: 'a, + Self::CommitmentState: 'a, Self::Commitment: 'a, { Marlin::::open_combinations( @@ -675,8 +664,8 @@ where polynomials, commitments, query_set, - opening_challenges, - rands, + sponge, + states, rng, ) } @@ -690,7 +679,7 @@ where eqn_query_set: &QuerySet, eqn_evaluations: &Evaluations, proof: &BatchLCProof, - opening_challenges: &mut ChallengeGenerator, + sponge: &mut S, rng: &mut R, ) -> Result where @@ -703,7 +692,7 @@ where eqn_query_set, eqn_evaluations, proof, - opening_challenges, + sponge, rng, ) } diff --git a/poly-commit/src/marlin/mod.rs b/poly-commit/src/marlin/mod.rs index 4bd4fe27..d7e7f5a1 100644 --- a/poly-commit/src/marlin/mod.rs +++ b/poly-commit/src/marlin/mod.rs @@ -1,9 +1,9 @@ -use crate::{challenge::ChallengeGenerator, CHALLENGE_SIZE}; +use crate::CHALLENGE_SIZE; use crate::{kzg10, Error}; use crate::{BTreeMap, BTreeSet, Debug, RngCore, String, ToString, Vec}; use crate::{BatchLCProof, LabeledPolynomial, LinearCombination}; use crate::{Evaluations, LabeledCommitment, QuerySet}; -use crate::{PCRandomness, Polynomial, PolynomialCommitment}; +use crate::{PCCommitmentState, Polynomial, PolynomialCommitment}; use ark_crypto_primitives::sponge::CryptographicSponge; use ark_ec::pairing::Pairing; use ark_ec::AffineRepr; @@ -110,7 +110,7 @@ where fn accumulate_commitments_and_values<'a>( commitments: impl IntoIterator>>, values: impl IntoIterator, - challenge_gen: &mut ChallengeGenerator, + sponge: &mut S, vk: Option<&marlin_pc::VerifierKey>, ) -> Result<(E::G1, E::ScalarField), Error> { let acc_time = start_timer!(|| "Accumulating commitments and values"); @@ -121,13 +121,14 @@ where let commitment = labeled_commitment.commitment(); assert_eq!(degree_bound.is_some(), commitment.shifted_comm.is_some()); - let challenge_i = challenge_gen.try_next_challenge_of_size(CHALLENGE_SIZE); + let challenge_i = sponge.squeeze_field_elements_with_sizes(&[CHALLENGE_SIZE])[0]; combined_comm += &commitment.comm.0.mul(challenge_i); combined_value += &(value * &challenge_i); if let Some(degree_bound) = degree_bound { - let challenge_i_1 = challenge_gen.try_next_challenge_of_size(CHALLENGE_SIZE); + let challenge_i_1: E::ScalarField = + sponge.squeeze_field_elements_with_sizes(&[CHALLENGE_SIZE])[0]; let shifted_comm = commitment.shifted_comm.as_ref().unwrap().0.into_group(); @@ -152,7 +153,7 @@ where commitments: impl IntoIterator>>, query_set: &QuerySet, evaluations: &Evaluations, - opening_challenges: &mut ChallengeGenerator, + sponge: &mut S, vk: Option<&marlin_pc::VerifierKey>, ) -> Result<(Vec>, Vec, Vec), Error> where @@ -199,7 +200,7 @@ where let (c, v) = Self::accumulate_commitments_and_values( comms_to_combine, values_to_combine, - opening_challenges, + sponge, vk, )?; end_timer!(lc_time); @@ -227,8 +228,8 @@ where polynomials: impl IntoIterator>, commitments: impl IntoIterator>, query_set: &QuerySet, - opening_challenges: &mut ChallengeGenerator, - rands: impl IntoIterator, + sponge: &mut S, + states: impl IntoIterator, rng: Option<&mut dyn RngCore>, ) -> Result, Error> where @@ -241,18 +242,18 @@ where Commitment = marlin_pc::Commitment, Error = Error, >, - PC::Randomness: 'a + AddAssign<(E::ScalarField, &'a PC::Randomness)>, + PC::CommitmentState: 'a + AddAssign<(E::ScalarField, &'a PC::CommitmentState)>, PC::Commitment: 'a, { let label_map = polynomials .into_iter() - .zip(rands) + .zip(states) .zip(commitments) .map(|((p, r), c)| (p.label(), (p, r, c))) .collect::>(); let mut lc_polynomials = Vec::new(); - let mut lc_randomness = Vec::new(); + let mut lc_states: Vec = Vec::new(); let mut lc_commitments = Vec::new(); let mut lc_info = Vec::new(); @@ -262,13 +263,13 @@ where let mut degree_bound = None; let mut hiding_bound = None; - let mut randomness = PC::Randomness::empty(); + let mut randomness = PC::CommitmentState::empty(); let mut coeffs_and_comms = Vec::new(); let num_polys = lc.len(); for (coeff, label) in lc.iter().filter(|(_, l)| !l.is_one()) { let label: &String = label.try_into().expect("cannot be one!"); - let &(cur_poly, cur_rand, cur_comm) = + let &(cur_poly, cur_state, cur_comm) = label_map.get(label).ok_or(Error::MissingPolynomial { label: label.to_string(), })?; @@ -284,14 +285,14 @@ where // Some(_) > None, always. hiding_bound = core::cmp::max(hiding_bound, cur_poly.hiding_bound()); poly += (*coeff, cur_poly.polynomial()); - randomness += (*coeff, cur_rand); + randomness += (*coeff, cur_state); coeffs_and_comms.push((*coeff, cur_comm.commitment())); } let lc_poly = LabeledPolynomial::new(lc_label.clone(), poly, degree_bound, hiding_bound); lc_polynomials.push(lc_poly); - lc_randomness.push(randomness); + lc_states.push(randomness); lc_commitments.push(Self::combine_commitments(coeffs_and_comms)); lc_info.push((lc_label, degree_bound)); } @@ -308,8 +309,8 @@ where lc_polynomials.iter(), lc_commitments.iter(), &query_set, - opening_challenges, - lc_randomness.iter(), + sponge, + lc_states.iter(), rng, )?; @@ -323,7 +324,7 @@ where query_set: &QuerySet, evaluations: &Evaluations, proof: &BatchLCProof, - opening_challenges: &mut ChallengeGenerator, + sponge: &mut S, rng: &mut R, ) -> Result where @@ -404,7 +405,7 @@ where &query_set, &evaluations, proof, - opening_challenges, + sponge, rng, ) } diff --git a/poly-commit/src/multilinear_pc/mod.rs b/poly-commit/src/multilinear_pc/mod.rs index eff86ab9..0973e822 100644 --- a/poly-commit/src/multilinear_pc/mod.rs +++ b/poly-commit/src/multilinear_pc/mod.rs @@ -1,9 +1,10 @@ use crate::multilinear_pc::data_structures::{ Commitment, CommitterKey, Proof, UniversalParams, VerifierKey, }; +use ark_ec::scalar_mul::BatchMulPreprocessing; use ark_ec::AffineRepr; use ark_ec::{pairing::Pairing, CurveGroup}; -use ark_ec::{scalar_mul::fixed_base::FixedBase, VariableBaseMSM}; +use ark_ec::{scalar_mul::ScalarMul, VariableBaseMSM}; use ark_ff::{Field, PrimeField}; use ark_ff::{One, Zero}; use ark_poly::{DenseMultilinearExtension, MultilinearExtension}; @@ -27,14 +28,11 @@ impl MultilinearPC { /// setup pub fn setup(num_vars: usize, rng: &mut R) -> UniversalParams { assert!(num_vars > 0, "constant polynomial not supported"); - let g: E::G1 = E::G1::rand(rng); - let h: E::G2 = E::G2::rand(rng); - let g = g.into_affine(); - let h = h.into_affine(); + let g = E::G1::rand(rng); + let h = E::G2::rand(rng); let mut powers_of_g = Vec::new(); let mut powers_of_h = Vec::new(); let t: Vec<_> = (0..num_vars).map(|_| E::ScalarField::rand(rng)).collect(); - let scalar_bits = E::ScalarField::MODULUS_BIT_SIZE as usize; let mut eq: LinkedList> = LinkedList::from_iter(eq_extension(&t).into_iter()); @@ -54,29 +52,15 @@ impl MultilinearPC { } let mut pp_powers = Vec::new(); - let mut total_scalars = 0; for i in 0..num_vars { let eq = eq_arr.pop_front().unwrap(); let pp_k_powers = (0..(1 << (num_vars - i))).map(|x| eq[x]); pp_powers.extend(pp_k_powers); - total_scalars += 1 << (num_vars - i); } - let window_size = FixedBase::get_mul_window_size(total_scalars); - let g_table = FixedBase::get_window_table(scalar_bits, window_size, g.into_group()); - let h_table = FixedBase::get_window_table(scalar_bits, window_size, h.into_group()); - - let pp_g = E::G1::normalize_batch(&FixedBase::msm( - scalar_bits, - window_size, - &g_table, - &pp_powers, - )); - let pp_h = E::G2::normalize_batch(&FixedBase::msm( - scalar_bits, - window_size, - &h_table, - &pp_powers, - )); + + let g_table = BatchMulPreprocessing::new(g, num_vars); + let pp_g = g_table.batch_mul(&pp_powers); + let pp_h = h.batch_mul(&pp_powers); let mut start = 0; for i in 0..num_vars { let size = 1 << (num_vars - i); @@ -89,18 +73,14 @@ impl MultilinearPC { // uncomment to measure the time for calculating vp // let vp_generation_timer = start_timer!(|| "VP generation"); - let g_mask = { - let window_size = FixedBase::get_mul_window_size(num_vars); - let g_table = FixedBase::get_window_table(scalar_bits, window_size, g.into_group()); - E::G1::normalize_batch(&FixedBase::msm(scalar_bits, window_size, &g_table, &t)) - }; + let g_mask = g_table.batch_mul(&t); // end_timer!(vp_generation_timer); UniversalParams { num_vars, - g, + g: g.into_affine(), g_mask, - h, + h: h.into_affine(), powers_of_g, powers_of_h, } @@ -199,11 +179,7 @@ impl MultilinearPC { ) -> bool { let left = E::pairing(commitment.g_product.into_group() - &vk.g.mul(value), vk.h); - let scalar_size = E::ScalarField::MODULUS_BIT_SIZE as usize; - let window_size = FixedBase::get_mul_window_size(vk.nv); - - let g_table = FixedBase::get_window_table(scalar_size, window_size, vk.g.into_group()); - let g_mul: Vec = FixedBase::msm(scalar_size, window_size, &g_table, point); + let g_mul = vk.g.into_group().batch_mul(point); let pairing_lefts: Vec<_> = (0..vk.nv) .map(|i| vk.g_mask_random[i].into_group() - &g_mul[i]) diff --git a/poly-commit/src/sonic_pc/mod.rs b/poly-commit/src/sonic_pc/mod.rs index b989b323..caf9b79c 100644 --- a/poly-commit/src/sonic_pc/mod.rs +++ b/poly-commit/src/sonic_pc/mod.rs @@ -2,7 +2,7 @@ use crate::{kzg10, PCCommitterKey, CHALLENGE_SIZE}; use crate::{BTreeMap, BTreeSet, String, ToString, Vec}; use crate::{BatchLCProof, DenseUVPolynomial, Error, Evaluations, QuerySet}; use crate::{LabeledCommitment, LabeledPolynomial, LinearCombination}; -use crate::{PCRandomness, PCUniversalParams, PolynomialCommitment}; +use crate::{PCCommitmentState, PCUniversalParams, PolynomialCommitment}; use ark_ec::AffineRepr; use ark_ec::CurveGroup; @@ -12,7 +12,6 @@ use ark_std::rand::RngCore; use ark_std::{convert::TryInto, marker::PhantomData, ops::Div, ops::Mul, vec}; mod data_structures; -use crate::challenge::ChallengeGenerator; use ark_crypto_primitives::sponge::CryptographicSponge; pub use data_structures::*; @@ -47,12 +46,12 @@ where point: P::Point, values: impl IntoIterator, proof: &kzg10::Proof, - opening_challenges: &mut ChallengeGenerator, + sponge: &mut S, randomizer: Option, ) { let acc_time = start_timer!(|| "Accumulating elements"); - let mut curr_challenge = opening_challenges.try_next_challenge_of_size(CHALLENGE_SIZE); + let mut curr_challenge = sponge.squeeze_field_elements_with_sizes(&[CHALLENGE_SIZE])[0]; // Keeps track of running combination of values let mut combined_values = E::ScalarField::zero(); @@ -73,7 +72,7 @@ where // Accumulate values in the BTreeMap *combined_comms.entry(degree_bound).or_insert(E::G1::zero()) += &comm_with_challenge; - curr_challenge = opening_challenges.try_next_challenge_of_size(CHALLENGE_SIZE); + curr_challenge = sponge.squeeze_field_elements_with_sizes(&[CHALLENGE_SIZE])[0]; } // Push expected results into list of elems. Power will be the negative of the expected power @@ -146,7 +145,7 @@ where type CommitterKey = CommitterKey; type VerifierKey = VerifierKey; type Commitment = Commitment; - type Randomness = Randomness; + type CommitmentState = Randomness; type Proof = kzg10::Proof; type BatchProof = Vec; type Error = Error; @@ -281,7 +280,7 @@ where ) -> Result< ( Vec>, - Vec, + Vec, ), Self::Error, > @@ -291,7 +290,7 @@ where let rng = &mut crate::optional_rng::OptionalRng(rng); let commit_time = start_timer!(|| "Committing to polynomials"); let mut labeled_comms: Vec> = Vec::new(); - let mut randomness: Vec = Vec::new(); + let mut randomness: Vec = Vec::new(); for labeled_polynomial in polynomials { let enforced_degree_bounds: Option<&[usize]> = ck @@ -345,21 +344,21 @@ where labeled_polynomials: impl IntoIterator>, _commitments: impl IntoIterator>, point: &'a P::Point, - opening_challenges: &mut ChallengeGenerator, - rands: impl IntoIterator, + sponge: &mut S, + states: impl IntoIterator, _rng: Option<&mut dyn RngCore>, ) -> Result where - Self::Randomness: 'a, + Self::CommitmentState: 'a, Self::Commitment: 'a, P: 'a, { let mut combined_polynomial = P::zero(); let mut combined_rand = kzg10::Randomness::empty(); - let mut curr_challenge = opening_challenges.try_next_challenge_of_size(CHALLENGE_SIZE); + let mut curr_challenge = sponge.squeeze_field_elements_with_sizes(&[CHALLENGE_SIZE])[0]; - for (polynomial, rand) in labeled_polynomials.into_iter().zip(rands) { + for (polynomial, state) in labeled_polynomials.into_iter().zip(states) { let enforced_degree_bounds: Option<&[usize]> = ck .enforced_degree_bounds .as_ref() @@ -373,8 +372,8 @@ where )?; combined_polynomial += (curr_challenge, polynomial.polynomial()); - combined_rand += (curr_challenge, rand); - curr_challenge = opening_challenges.try_next_challenge_of_size(CHALLENGE_SIZE); + combined_rand += (curr_challenge, state); + curr_challenge = sponge.squeeze_field_elements_with_sizes(&[CHALLENGE_SIZE])[0]; } let proof_time = start_timer!(|| "Creating proof for polynomials"); @@ -390,7 +389,7 @@ where point: &'a P::Point, values: impl IntoIterator, proof: &Self::Proof, - opening_challenges: &mut ChallengeGenerator, + sponge: &mut S, _rng: Option<&mut dyn RngCore>, ) -> Result where @@ -410,7 +409,7 @@ where *point, values, proof, - opening_challenges, + sponge, None, ); @@ -430,7 +429,7 @@ where query_set: &QuerySet, values: &Evaluations, proof: &Self::BatchProof, - opening_challenges: &mut ChallengeGenerator, + sponge: &mut S, rng: &mut R, ) -> Result where @@ -481,7 +480,7 @@ where *point, values_to_combine.into_iter(), p, - opening_challenges, + sponge, Some(randomizer), ); @@ -502,24 +501,24 @@ where polynomials: impl IntoIterator>, commitments: impl IntoIterator>, query_set: &QuerySet, - opening_challenges: &mut ChallengeGenerator, - rands: impl IntoIterator, + sponge: &mut S, + states: impl IntoIterator, rng: Option<&mut dyn RngCore>, ) -> Result, Self::Error> where - Self::Randomness: 'a, + Self::CommitmentState: 'a, Self::Commitment: 'a, P: 'a, { let label_map = polynomials .into_iter() - .zip(rands) + .zip(states) .zip(commitments) - .map(|((p, r), c)| (p.label(), (p, r, c))) + .map(|((p, s), c)| (p.label(), (p, s, c))) .collect::>(); let mut lc_polynomials = Vec::new(); - let mut lc_randomness = Vec::new(); + let mut lc_states = Vec::new(); let mut lc_commitments = Vec::new(); let mut lc_info = Vec::new(); @@ -528,13 +527,13 @@ where let mut poly = P::zero(); let mut degree_bound = None; let mut hiding_bound = None; - let mut randomness = Self::Randomness::empty(); + let mut state = Self::CommitmentState::empty(); let mut comm = E::G1::zero(); let num_polys = lc.len(); for (coeff, label) in lc.iter().filter(|(_, l)| !l.is_one()) { let label: &String = label.try_into().expect("cannot be one!"); - let &(cur_poly, cur_rand, curr_comm) = + let &(cur_poly, cur_state, curr_comm) = label_map.get(label).ok_or(Error::MissingPolynomial { label: label.to_string(), })?; @@ -553,14 +552,14 @@ where // Some(_) > None, always. hiding_bound = core::cmp::max(hiding_bound, cur_poly.hiding_bound()); poly += (*coeff, cur_poly.polynomial()); - randomness += (*coeff, cur_rand); + state += (*coeff, cur_state); comm += &curr_comm.commitment().0.mul(*coeff); } let lc_poly = LabeledPolynomial::new(lc_label.clone(), poly, degree_bound, hiding_bound); lc_polynomials.push(lc_poly); - lc_randomness.push(randomness); + lc_states.push(state); lc_commitments.push(comm); lc_info.push((lc_label, degree_bound)); } @@ -581,8 +580,8 @@ where lc_polynomials.iter(), lc_commitments.iter(), &query_set, - opening_challenges, - lc_randomness.iter(), + sponge, + lc_states.iter(), rng, )?; Ok(BatchLCProof { proof, evals: None }) @@ -597,7 +596,7 @@ where eqn_query_set: &QuerySet, eqn_evaluations: &Evaluations, proof: &BatchLCProof, - opening_challenges: &mut ChallengeGenerator, + sponge: &mut S, rng: &mut R, ) -> Result where @@ -666,7 +665,7 @@ where &eqn_query_set, &evaluations, proof, - opening_challenges, + sponge, rng, ) } diff --git a/poly-commit/src/streaming_kzg/data_structures.rs b/poly-commit/src/streaming_kzg/data_structures.rs index 7adaf005..c8b19c83 100644 --- a/poly-commit/src/streaming_kzg/data_structures.rs +++ b/poly-commit/src/streaming_kzg/data_structures.rs @@ -141,7 +141,7 @@ where /// Stream implementation of foleded polynomial. #[derive(Clone, Copy)] -pub struct FoldedPolynomialStream<'a, F, S>(FoldedPolynomialTree<'a, F, S>, usize); +pub struct FoldedPolynomialStream<'a, F, S>(FoldedPolynomialTree<'a, F, S>); /// Iterator implementation of foleded polynomial. pub struct FoldedPolynomialStreamIter<'a, F, I> { challenges: &'a [F], @@ -158,8 +158,7 @@ where /// Initialize a new folded polynomial stream. pub fn new(coefficients: &'a S, challenges: &'a [F]) -> Self { let tree = FoldedPolynomialTree::new(coefficients, challenges); - let len = challenges.len(); - Self(tree, len) + Self(tree) } } @@ -241,7 +240,7 @@ fn test_folded_polynomial() { let challenges = vec![F::one(), two]; let coefficients_stream = coefficients.as_slice(); let foldstream = FoldedPolynomialTree::new(&coefficients_stream, challenges.as_slice()); - let fold_stream = FoldedPolynomialStream(foldstream, 2); + let fold_stream = FoldedPolynomialStream(foldstream); assert_eq!(fold_stream.len(), 1); assert_eq!( fold_stream.iter().next(), @@ -253,7 +252,7 @@ fn test_folded_polynomial() { let challenges = vec![F::one(); 4]; let coefficients_stream = coefficients.as_slice(); let foldstream = FoldedPolynomialTree::new(&coefficients_stream, challenges.as_slice()); - let fold_stream = FoldedPolynomialStream(foldstream, 4).iter(); + let fold_stream = FoldedPolynomialStream(foldstream).iter(); assert_eq!(fold_stream.last(), Some(coefficients.iter().sum())); } diff --git a/poly-commit/src/streaming_kzg/time.rs b/poly-commit/src/streaming_kzg/time.rs index 8c7fa2f8..b8d52093 100644 --- a/poly-commit/src/streaming_kzg/time.rs +++ b/poly-commit/src/streaming_kzg/time.rs @@ -1,9 +1,9 @@ //! An impementation of a time-efficient version of Kate et al's polynomial commitment, //! with optimization from [\[BDFG20\]](https://eprint.iacr.org/2020/081.pdf). use ark_ec::pairing::Pairing; -use ark_ec::scalar_mul::fixed_base::FixedBase; +use ark_ec::scalar_mul::ScalarMul; use ark_ec::CurveGroup; -use ark_ff::{PrimeField, Zero}; +use ark_ff::Zero; use ark_poly::{univariate::DensePolynomial, DenseUVPolynomial}; use ark_std::{borrow::Borrow, ops::Div, ops::Mul, rand::RngCore, vec::Vec, UniformRand}; @@ -50,11 +50,7 @@ impl CommitterKey { let powers_of_tau = powers(tau, max_degree + 1); let g = E::G1::rand(rng); - let window_size = FixedBase::get_mul_window_size(max_degree + 1); - let scalar_bits = E::ScalarField::MODULUS_BIT_SIZE as usize; - let g_table = FixedBase::get_window_table(scalar_bits, window_size, g); - let powers_of_g_proj = FixedBase::msm(scalar_bits, window_size, &g_table, &powers_of_tau); - let powers_of_g = E::G1::normalize_batch(&powers_of_g_proj); + let powers_of_g = g.batch_mul(&powers_of_tau); let g2 = E::G2::rand(rng).into_affine(); let powers_of_g2 = powers_of_tau diff --git a/poly-commit/src/utils.rs b/poly-commit/src/utils.rs index 5606c6b0..9f731a0b 100644 --- a/poly-commit/src/utils.rs +++ b/poly-commit/src/utils.rs @@ -1,4 +1,6 @@ -use core::marker::PhantomData; +use ark_ff::Field; +use ark_serialize::{CanonicalDeserialize, CanonicalSerialize}; +use ark_std::vec::Vec; #[cfg(feature = "parallel")] use rayon::{ @@ -6,13 +8,6 @@ use rayon::{ prelude::IndexedParallelIterator, }; -use ark_ff::{Field, PrimeField}; -use ark_serialize::CanonicalSerialize; -use ark_std::vec::Vec; -use merlin::Transcript; - -use crate::Error; - /// Takes as input a struct, and converts them to a series of bytes. All traits /// that implement `CanonicalSerialize` can be automatically converted to bytes /// in this manner. @@ -31,7 +26,8 @@ pub(crate) fn ceil_div(x: usize, y: usize) -> usize { (x + y - 1) / y } -#[derive(Debug)] +#[derive(Derivative, CanonicalSerialize, CanonicalDeserialize)] +#[derivative(Default(bound = ""), Clone(bound = ""), Debug(bound = ""))] pub struct Matrix { pub(crate) n: usize, pub(crate) m: usize, @@ -140,86 +136,6 @@ pub(crate) fn inner_product(v1: &[F], v2: &[F]) -> F { .sum() } -/// The following struct is taken from jellyfish repository. Once they change -/// their dependency on `crypto-primitive`, we use their crate instead of -/// a copy-paste. We needed the newer `crypto-primitive` for serializing. -#[derive(Clone)] -pub(crate) struct IOPTranscript { - transcript: Transcript, - is_empty: bool, - #[doc(hidden)] - phantom: PhantomData, -} - -// TODO: merge this with jf_plonk::transcript -impl IOPTranscript { - /// Create a new IOP transcript. - pub(crate) fn new(label: &'static [u8]) -> Self { - Self { - transcript: Transcript::new(label), - is_empty: true, - phantom: PhantomData, - } - } - - /// Append the message to the transcript. - pub(crate) fn append_message(&mut self, label: &'static [u8], msg: &[u8]) -> Result<(), Error> { - self.transcript.append_message(label, msg); - self.is_empty = false; - Ok(()) - } - - /// Append the message to the transcript. - pub(crate) fn append_serializable_element( - &mut self, - label: &'static [u8], - group_elem: &S, - ) -> Result<(), Error> { - self.append_message( - label, - &to_bytes!(group_elem).map_err(|_| Error::TranscriptError)?, - ) - } - - /// Generate the challenge from the current transcript - /// and append it to the transcript. - /// - /// The output field element is statistical uniform as long - /// as the field has a size less than 2^384. - pub(crate) fn get_and_append_challenge(&mut self, label: &'static [u8]) -> Result { - // we need to reject when transcript is empty - if self.is_empty { - return Err(Error::TranscriptError); - } - - let mut buf = [0u8; 64]; - self.transcript.challenge_bytes(label, &mut buf); - let challenge = F::from_le_bytes_mod_order(&buf); - self.append_serializable_element(label, &challenge)?; - Ok(challenge) - } - - /// Generate the challenge from the current transcript - /// and append it to the transcript. - /// - /// Without exposing the internal field `transcript`, - /// this is a wrapper around getting bytes as opposed to field elements. - pub(crate) fn get_and_append_byte_challenge( - &mut self, - label: &'static [u8], - dest: &mut [u8], - ) -> Result<(), Error> { - // we need to reject when transcript is empty - if self.is_empty { - return Err(Error::TranscriptError); - } - - self.transcript.challenge_bytes(label, dest); - self.append_message(label, dest)?; - Ok(()) - } -} - #[inline] #[cfg(test)] pub(crate) fn to_field(v: Vec) -> Vec { @@ -229,6 +145,8 @@ pub(crate) fn to_field(v: Vec) -> Vec { // TODO: replace by https://github.com/arkworks-rs/crypto-primitives/issues/112. #[cfg(test)] use ark_crypto_primitives::sponge::poseidon::PoseidonSponge; +#[cfg(test)] +use ark_ff::PrimeField; #[cfg(test)] pub(crate) fn test_sponge() -> PoseidonSponge {