diff --git a/Cargo.toml b/Cargo.toml index b0ed3f07c..2eb4b7f3f 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -10,8 +10,7 @@ members = [ "prover", "verifier", "winterfell", - "examples" -] + "examples"] resolver = "2" [profile.release] diff --git a/air/src/air/boundary/mod.rs b/air/src/air/boundary/mod.rs index 7f92c80ab..d1ad7271e 100644 --- a/air/src/air/boundary/mod.rs +++ b/air/src/air/boundary/mod.rs @@ -170,7 +170,7 @@ where let group = groups.entry(key).or_insert_with(|| { BoundaryConstraintGroup::new(ConstraintDivisor::from_assertion( &assertion, - context.trace_len(), + context.trace_info().length(), )) }); diff --git a/air/src/air/context.rs b/air/src/air/context.rs index 09341afe3..c7412aece 100644 --- a/air/src/air/context.rs +++ b/air/src/air/context.rs @@ -26,6 +26,8 @@ pub struct AirContext { pub(super) trace_domain_generator: B, pub(super) lde_domain_generator: B, pub(super) num_transition_exemptions: usize, + pub(super) trace_length_ext: usize, + pub(super) zk_parameters: Option, } impl AirContext { @@ -133,18 +135,35 @@ impl AirContext { ); } + let h = options.zk_witness_randomizer_degree().unwrap_or(0); + let trace_length = trace_info.length(); + let trace_length_ext = (trace_length + h as usize).next_power_of_two(); + let zk_blowup = trace_length_ext / trace_length; + let lde_domain_size = trace_length_ext * options.blowup_factor(); + // equation (12) in https://eprint.iacr.org/2024/1037 + let h_q = options.num_queries() + 1; + let zk_parameters = if options.is_zk() { + Some(ZkParameters { + degree_witness_randomizer: h as usize, + degree_constraint_randomizer: h_q, + zk_blowup_witness: zk_blowup, + }) + } else { + None + }; + // determine minimum blowup factor needed to evaluate transition constraints by taking // the blowup factor of the highest degree constraint let mut ce_blowup_factor = 0; for degree in main_transition_constraint_degrees.iter() { - if degree.min_blowup_factor() > ce_blowup_factor { - ce_blowup_factor = degree.min_blowup_factor(); + if degree.min_blowup_factor(trace_length, trace_length_ext) > ce_blowup_factor { + ce_blowup_factor = degree.min_blowup_factor(trace_length, trace_length_ext); } } for degree in aux_transition_constraint_degrees.iter() { - if degree.min_blowup_factor() > ce_blowup_factor { - ce_blowup_factor = degree.min_blowup_factor(); + if degree.min_blowup_factor(trace_length, trace_length_ext) > ce_blowup_factor { + ce_blowup_factor = degree.min_blowup_factor(trace_length, trace_length_ext); } } @@ -155,9 +174,6 @@ impl AirContext { options.blowup_factor() ); - let trace_length = trace_info.length(); - let lde_domain_size = trace_length * options.blowup_factor(); - AirContext { options, trace_info, @@ -170,6 +186,8 @@ impl AirContext { trace_domain_generator: B::get_root_of_unity(trace_length.ilog2()), lde_domain_generator: B::get_root_of_unity(lde_domain_size.ilog2()), num_transition_exemptions: 1, + trace_length_ext, + zk_parameters, } } @@ -188,25 +206,31 @@ impl AirContext { self.trace_info.length() } + /// Returns length of the possibly extended execution trace. This is the same as the original + /// trace length when zero-knowledge is not enabled. + pub fn trace_length_ext(&self) -> usize { + self.trace_length_ext + } + /// Returns degree of trace polynomials for an instance of a computation. /// - /// The degree is always `trace_length` - 1. + /// The degree is always `trace_length_ext` - 1. pub fn trace_poly_degree(&self) -> usize { - self.trace_info.length() - 1 + self.trace_length_ext() - 1 } /// Returns size of the constraint evaluation domain. /// - /// This is guaranteed to be a power of two, and is equal to `trace_length * ce_blowup_factor`. + /// This is guaranteed to be a power of two, and is equal to `trace_length_ext * ce_blowup_factor`. pub fn ce_domain_size(&self) -> usize { - self.trace_info.length() * self.ce_blowup_factor + self.trace_length_ext() * self.ce_blowup_factor } /// Returns the size of the low-degree extension domain. /// - /// This is guaranteed to be a power of two, and is equal to `trace_length * lde_blowup_factor`. + /// This is guaranteed to be a power of two, and is equal to `trace_length_ext * lde_blowup_factor`. pub fn lde_domain_size(&self) -> usize { - self.trace_info.length() * self.options.blowup_factor() + self.trace_length_ext() * self.options.blowup_factor() } /// Returns the number of transition constraints for a computation, excluding the Lagrange @@ -292,6 +316,8 @@ impl AirContext { /// numerator is `trace_len - 1` for all transition constraints (i.e. the base degree is 1). /// Hence, no matter what the degree of the divisor is for each, the degree of the fraction will /// be at most `trace_len - 1`. + /// + /// TODO: update documentation pub fn num_constraint_composition_columns(&self) -> usize { let mut highest_constraint_degree = 0_usize; for degree in self @@ -299,19 +325,93 @@ impl AirContext { .iter() .chain(self.aux_transition_constraint_degrees.iter()) { - let eval_degree = degree.get_evaluation_degree(self.trace_len()); + let eval_degree = + degree.get_evaluation_degree(self.trace_len(), self.trace_length_ext()); if eval_degree > highest_constraint_degree { highest_constraint_degree = eval_degree } } let trace_length = self.trace_len(); + let trace_length_ext = self.trace_length_ext(); let transition_divisior_degree = trace_length - self.num_transition_exemptions(); - // we use the identity: ceil(a/b) = (a + b - 1)/b let num_constraint_col = - (highest_constraint_degree - transition_divisior_degree).div_ceil(trace_length); + (highest_constraint_degree - transition_divisior_degree).div_ceil(trace_length_ext); + + if self.zk_parameters.is_some() { + let quotient_degree = if highest_constraint_degree < trace_length_ext { + // This means that our transition constraints have degree 1 and hence the boundary + // constraints will determine the degree + trace_length_ext - 2 + } else { + highest_constraint_degree - transition_divisior_degree + }; + let n_q = self.options.num_queries(); + let den = self.trace_length_ext() - (n_q + 1); + + (quotient_degree + 1).div_ceil(den) + } else { + cmp::max(num_constraint_col, 1) + } + } + + pub fn constraint_composition_degree(&self) -> usize { + let mut highest_constraint_degree = 0_usize; + for degree in self + .main_transition_constraint_degrees + .iter() + .chain(self.aux_transition_constraint_degrees.iter()) + { + let eval_degree = + degree.get_evaluation_degree(self.trace_len(), self.trace_length_ext()); + if eval_degree > highest_constraint_degree { + highest_constraint_degree = eval_degree + } + } + let trace_length = self.trace_len(); + let transition_divisior_degree = trace_length - self.num_transition_exemptions(); - cmp::max(num_constraint_col, 1) + // highest_constraint_degree - transition_divisior_degree + if highest_constraint_degree < self.trace_length_ext { + // This means that our transition constraints have degree 1 and hence the boundary + // constraints will determine the degree + self.trace_length_ext - 2 + } else { + highest_constraint_degree - transition_divisior_degree + } + } + + pub fn num_coefficients_chunk_quotient(&self) -> usize { + if self.zk_parameters().is_some() { + let num_constraint_composition_cols = self.num_constraint_composition_columns(); + let quotient_degree = self.constraint_composition_degree(); + + (quotient_degree + 1).div_ceil(num_constraint_composition_cols) + } else { + self.trace_len() + } + } + + pub fn zk_parameters(&self) -> Option { + self.zk_parameters + } + + pub fn zk_blowup_factor(&self) -> usize { + self.zk_parameters() + .map(|parameters| parameters.zk_blowup_witness()) + .unwrap_or(1) + } + + pub fn zk_witness_randomizer_degree(&self) -> usize { + self.zk_parameters() + .map(|parameters| parameters.degree_witness_randomizer()) + .unwrap_or(0) + } + + pub fn zk_constraint_randomizer_degree(&self) -> usize { + self.zk_parameters() + .map(|parameters| parameters.degree_constraint_randomizer()) + .unwrap_or(0) } // DATA MUTATORS @@ -347,9 +447,11 @@ impl AirContext { .iter() .chain(self.aux_transition_constraint_degrees.iter()) { - let eval_degree = degree.get_evaluation_degree(self.trace_len()); + let eval_degree = + degree.get_evaluation_degree(self.trace_len(), self.trace_length_ext()); let max_constraint_composition_degree = self.ce_domain_size() - 1; - let max_exemptions = max_constraint_composition_degree + self.trace_len() - eval_degree; + let max_exemptions = + max_constraint_composition_degree + self.trace_length_ext() - eval_degree; assert!( n <= max_exemptions, "number of transition exemptions cannot exceed: {max_exemptions}, but was {n}" @@ -360,3 +462,24 @@ impl AirContext { self } } + +#[derive(Clone, Copy, PartialEq, Eq)] +pub struct ZkParameters { + degree_witness_randomizer: usize, + degree_constraint_randomizer: usize, + zk_blowup_witness: usize, +} + +impl ZkParameters { + pub fn degree_witness_randomizer(&self) -> usize { + self.degree_witness_randomizer + } + + pub fn degree_constraint_randomizer(&self) -> usize { + self.degree_constraint_randomizer + } + + pub fn zk_blowup_witness(&self) -> usize { + self.zk_blowup_witness + } +} diff --git a/air/src/air/mod.rs b/air/src/air/mod.rs index 53a59fa5a..dc9a93c32 100644 --- a/air/src/air/mod.rs +++ b/air/src/air/mod.rs @@ -17,7 +17,7 @@ mod trace_info; pub use trace_info::TraceInfo; mod context; -pub use context::AirContext; +pub use context::{AirContext, ZkParameters}; mod assertions; pub use assertions::Assertion; @@ -547,7 +547,7 @@ pub trait Air: Send + Sync { let lagrange = if self.context().has_lagrange_kernel_aux_column() { let mut lagrange_kernel_t_coefficients = Vec::new(); - for _ in 0..self.context().trace_len().ilog2() { + for _ in 0..self.context().trace_info().length().ilog2() { lagrange_kernel_t_coefficients.push(public_coin.draw()?); } @@ -600,4 +600,9 @@ pub trait Air: Send + Sync { lagrange: lagrange_cc, }) } + + /// Returns whether zero-knowledge is enabled. + fn is_zk(&self) -> bool { + self.options().is_zk() + } } diff --git a/air/src/air/tests.rs b/air/src/air/tests.rs index e0063ed3b..8338a3350 100644 --- a/air/src/air/tests.rs +++ b/air/src/air/tests.rs @@ -205,7 +205,7 @@ impl MockAir { let mut result = Self::new( TraceInfo::with_meta(4, trace_length, vec![1]), (), - ProofOptions::new(32, 8, 0, FieldExtension::None, 4, 31), + ProofOptions::new(32, 8, 0, FieldExtension::None, 4, 31, false), ); result.periodic_columns = column_values; result @@ -215,7 +215,7 @@ impl MockAir { let mut result = Self::new( TraceInfo::with_meta(4, trace_length, vec![assertions.len() as u8]), (), - ProofOptions::new(32, 8, 0, FieldExtension::None, 4, 31), + ProofOptions::new(32, 8, 0, FieldExtension::None, 4, 31, false), ); result.assertions = assertions; result @@ -267,7 +267,7 @@ pub fn build_context( trace_width: usize, num_assertions: usize, ) -> AirContext { - let options = ProofOptions::new(32, 8, 0, FieldExtension::None, 4, 31); + let options = ProofOptions::new(32, 8, 0, FieldExtension::None, 4, 31, false); let t_degrees = vec![TransitionConstraintDegree::new(2)]; let trace_info = TraceInfo::new(trace_width, trace_length); AirContext::new(trace_info, t_degrees, num_assertions, options) diff --git a/air/src/air/transition/degree.rs b/air/src/air/transition/degree.rs index a51ab2840..9f5b99f69 100644 --- a/air/src/air/transition/degree.rs +++ b/air/src/air/transition/degree.rs @@ -87,8 +87,10 @@ impl TransitionConstraintDegree { /// $$ /// 2 \cdot (64 - 1) + \frac{64 \cdot (32 - 1)}{32} = 126 + 62 = 188 /// $$ - pub fn get_evaluation_degree(&self, trace_length: usize) -> usize { - let mut result = self.base * (trace_length - 1); + /// + /// TODO: Update docs + pub fn get_evaluation_degree(&self, trace_length: usize, trace_length_ext: usize) -> usize { + let mut result = self.base * (trace_length_ext - 1); for cycle_length in self.cycles.iter() { result += (trace_length / cycle_length) * (cycle_length - 1); } @@ -98,7 +100,7 @@ impl TransitionConstraintDegree { /// Returns a minimum blowup factor needed to evaluate constraint of this degree. /// /// This is guaranteed to be a power of two, greater than one. - pub fn min_blowup_factor(&self) -> usize { + pub fn min_blowup_factor(&self, trace_length: usize, trace_length_ext: usize) -> usize { // The blowup factor needs to be a power of two large enough to accommodate degree of // transition constraints defined by rational functions `C(x) / z(x)` where `C(x)` is the // constraint polynomial and `z(x)` is the transition constraint divisor. @@ -110,7 +112,12 @@ impl TransitionConstraintDegree { // // For example, if degree of our constraints is 6, the blowup factor would need to be 8. // However, if the degree is 5, the blowup factor could be as small as 4. - let degree_bound = self.base + self.cycles.len() - 1; - cmp::max(degree_bound.next_power_of_two(), ProofOptions::MIN_BLOWUP_FACTOR) + // + // TODO: update documentation + let degree_bound = self.base + self.cycles.len(); + let q_deg = degree_bound * (trace_length_ext - 1) - (trace_length - 1); + let blowup_factor = q_deg.div_ceil(trace_length_ext); + + cmp::max(blowup_factor.next_power_of_two(), ProofOptions::MIN_BLOWUP_FACTOR) } } diff --git a/air/src/air/transition/mod.rs b/air/src/air/transition/mod.rs index 60e641817..89f44577a 100644 --- a/air/src/air/transition/mod.rs +++ b/air/src/air/transition/mod.rs @@ -55,7 +55,7 @@ impl TransitionConstraints { // build constraint divisor; the same divisor applies to all transition constraints let divisor = ConstraintDivisor::from_transition( - context.trace_len(), + context.trace_info().length(), context.num_transition_exemptions(), ); diff --git a/air/src/lib.rs b/air/src/lib.rs index 0a471a706..184d2b862 100644 --- a/air/src/lib.rs +++ b/air/src/lib.rs @@ -48,5 +48,5 @@ pub use air::{ LagrangeConstraintsCompositionCoefficients, LagrangeKernelBoundaryConstraint, LagrangeKernelConstraints, LagrangeKernelEvaluationFrame, LagrangeKernelRandElements, LagrangeKernelTransitionConstraints, TraceInfo, TransitionConstraintDegree, - TransitionConstraints, + TransitionConstraints, ZkParameters, }; diff --git a/air/src/options.rs b/air/src/options.rs index a831bdad7..92295787c 100644 --- a/air/src/options.rs +++ b/air/src/options.rs @@ -95,6 +95,7 @@ pub struct ProofOptions { fri_folding_factor: u8, fri_remainder_max_degree: u8, partition_options: PartitionOptions, + is_zk: bool, } // PROOF OPTIONS IMPLEMENTATION @@ -128,6 +129,7 @@ impl ProofOptions { field_extension: FieldExtension, fri_folding_factor: usize, fri_remainder_max_degree: usize, + is_zk: bool, ) -> ProofOptions { // TODO: return errors instead of panicking assert!(num_queries > 0, "number of queries must be greater than 0"); @@ -169,6 +171,7 @@ impl ProofOptions { fri_folding_factor: fri_folding_factor as u8, fri_remainder_max_degree: fri_remainder_max_degree as u8, partition_options: PartitionOptions::new(1, 1), + is_zk, } } @@ -249,6 +252,32 @@ impl ProofOptions { pub fn partition_options(&self) -> PartitionOptions { self.partition_options } + /// Returns whether zero-knowledge is enabled. + pub fn is_zk(&self) -> bool { + self.is_zk + } + + /// Computes a lower bound on the degree of the polynomial used for randomizing the witness + /// polynomials. + pub(crate) fn zk_witness_randomizer_degree(&self) -> Option { + if self.is_zk { + let h = compute_degree_randomizing_poly( + self.field_extension().degree() as usize, + self.num_queries(), + ); + + Some(h as u32) + } else { + None + } + } +} + +/// Computes the number of coefficients of the polynomials used to randomize the witness polynomials. +/// +/// This is based on equation (13) in https://eprint.iacr.org/2024/1037 +pub fn compute_degree_randomizing_poly(extension_degree: usize, num_fri_queries: usize) -> usize { + 2 * (extension_degree + num_fri_queries) } impl ToElements for ProofOptions { @@ -278,6 +307,7 @@ impl Serializable for ProofOptions { target.write_u8(self.fri_remainder_max_degree); target.write_u8(self.partition_options.num_partitions); target.write_u8(self.partition_options.min_partition_size); + target.write_bool(self.is_zk) } } @@ -294,6 +324,7 @@ impl Deserializable for ProofOptions { FieldExtension::read_from(source)?, source.read_u8()? as usize, source.read_u8()? as usize, + source.read_bool()?, ); Ok(result.with_partitions(source.read_u8()? as usize, source.read_u8()? as usize)) } @@ -431,6 +462,7 @@ mod tests { field_extension, fri_folding_factor as usize, fri_remainder_max_degree as usize, + false, ); assert_eq!(expected, options.to_elements()); } diff --git a/air/src/proof/context.rs b/air/src/proof/context.rs index 83c2beece..1df47c463 100644 --- a/air/src/proof/context.rs +++ b/air/src/proof/context.rs @@ -5,7 +5,7 @@ use alloc::{string::ToString, vec::Vec}; -use math::{StarkField, ToElements}; +use math::{FieldElement, StarkField, ToElements}; use utils::{ByteReader, ByteWriter, Deserializable, DeserializationError, Serializable}; use crate::{ProofOptions, TraceInfo}; @@ -18,6 +18,7 @@ pub struct Context { trace_info: TraceInfo, field_modulus_bytes: Vec, options: ProofOptions, + zk_blowup: usize, } impl Context { @@ -29,7 +30,11 @@ impl Context { /// # Panics /// Panics if either trace length or the LDE domain size implied by the trace length and the /// blowup factor is greater then [u32::MAX]. - pub fn new(trace_info: TraceInfo, options: ProofOptions) -> Self { + pub fn new( + trace_info: TraceInfo, + options: ProofOptions, + zk_blowup: usize, + ) -> Self { // TODO: return errors instead of panicking? let trace_length = trace_info.length(); @@ -42,6 +47,7 @@ impl Context { trace_info, field_modulus_bytes: B::get_modulus_le_bytes(), options, + zk_blowup, } } @@ -54,8 +60,8 @@ impl Context { } /// Returns the size of the LDE domain for the computation described by this context. - pub fn lde_domain_size(&self) -> usize { - self.trace_info.length() * self.options.blowup_factor() + pub fn lde_domain_size(&self) -> usize { + self.trace_info.length() * self.zk_blowup * self.options.blowup_factor() } /// Returns modulus of the field for the computation described by this context. @@ -124,6 +130,7 @@ impl Serializable for Context { target.write_u8(self.field_modulus_bytes.len() as u8); target.write_bytes(&self.field_modulus_bytes); self.options.write_into(target); + self.zk_blowup.write_into(target); } } @@ -148,7 +155,15 @@ impl Deserializable for Context { // read options let options = ProofOptions::read_from(source)?; - Ok(Context { trace_info, field_modulus_bytes, options }) + // TODO: should we validate it? + let zk_blowup = usize::read_from(source)?; + + Ok(Context { + trace_info, + field_modulus_bytes, + options, + zk_blowup, + }) } } @@ -212,10 +227,11 @@ mod tests { field_extension, fri_folding_factor as usize, fri_remainder_max_degree as usize, + false, ); let trace_info = TraceInfo::new_multi_segment(main_width, aux_width, aux_rands, trace_length, vec![]); - let context = Context::new::(trace_info, options); + let context = Context::new::(trace_info, options, 1); assert_eq!(expected, context.to_elements()); } } diff --git a/air/src/proof/mod.rs b/air/src/proof/mod.rs index 7307ba1d3..e791b345f 100644 --- a/air/src/proof/mod.rs +++ b/air/src/proof/mod.rs @@ -79,6 +79,8 @@ pub struct Proof { pub pow_nonce: u64, /// Optionally, an auxiliary (non-STARK) proof that was generated during auxiliary trace generation. pub gkr_proof: Option>, + /// Random values needed for Fiat-Shamir. + pub salts: Vec, } impl Proof { @@ -93,8 +95,8 @@ impl Proof { } /// Returns the size of the LDE domain for the computation described by this proof. - pub fn lde_domain_size(&self) -> usize { - self.context.lde_domain_size() + pub fn lde_domain_size(&self) -> usize { + self.context.lde_domain_size::() } // SECURITY LEVEL @@ -108,15 +110,21 @@ impl Proof { pub fn security_level(&self, conjectured: bool) -> u32 { if conjectured { get_conjectured_security( - self.context.options(), self.context.num_modulus_bits(), + self.context.options().field_extension() as u32, + self.context.options().blowup_factor(), + self.options().num_queries(), + self.options().grinding_factor(), self.trace_info().length(), H::COLLISION_RESISTANCE, ) } else { get_proven_security( - self.context.options(), self.context.num_modulus_bits(), + self.context.options().field_extension() as u32, + self.context.options().blowup_factor(), + self.options().num_queries(), + self.options().grinding_factor(), self.trace_info().length(), H::COLLISION_RESISTANCE, ) @@ -149,7 +157,8 @@ impl Proof { Self { context: Context::new::( TraceInfo::new(1, 8), - ProofOptions::new(1, 2, 2, FieldExtension::None, 8, 1), + ProofOptions::new(1, 2, 2, FieldExtension::None, 8, 1, false), + 1, ), num_unique_queries: 0, commitments: Commitments::default(), @@ -162,6 +171,7 @@ impl Proof { fri_proof: FriProof::new_dummy(), pow_nonce: 0, gkr_proof: None, + salts: vec![], } } } @@ -180,6 +190,7 @@ impl Serializable for Proof { self.fri_proof.write_into(target); self.pow_nonce.write_into(target); self.gkr_proof.write_into(target); + self.salts.write_into(target); } } @@ -204,6 +215,7 @@ impl Deserializable for Proof { fri_proof: FriProof::read_from(source)?, pow_nonce: source.read_u64()?, gkr_proof: Option::>::read_from(source)?, + salts: Vec::read_from(source)?, }; Ok(proof) } @@ -213,32 +225,38 @@ impl Deserializable for Proof { // ================================================================================================ /// Computes conjectured security level for the specified proof parameters. -fn get_conjectured_security( - options: &ProofOptions, +pub(crate) fn get_conjectured_security( base_field_bits: u32, + extension_degree: u32, + blowup_factor: usize, + num_queries: usize, + grinding_factor: u32, trace_domain_size: usize, collision_resistance: u32, ) -> u32 { // compute max security we can get for a given field size - let field_size = base_field_bits * options.field_extension().degree(); - let field_security = field_size - (trace_domain_size * options.blowup_factor()).ilog2(); + let field_size = base_field_bits * extension_degree; + let field_security = field_size - (trace_domain_size * blowup_factor).ilog2(); // compute security we get by executing multiple query rounds - let security_per_query = options.blowup_factor().ilog2(); - let mut query_security = security_per_query * options.num_queries() as u32; + let security_per_query = blowup_factor.ilog2(); + let mut query_security = security_per_query * num_queries as u32; // include grinding factor contributions only for proofs adequate security if query_security >= GRINDING_CONTRIBUTION_FLOOR { - query_security += options.grinding_factor(); + query_security += grinding_factor; } cmp::min(cmp::min(field_security, query_security) - 1, collision_resistance) } /// Estimates proven security level for the specified proof parameters. -fn get_proven_security( - options: &ProofOptions, +pub(crate) fn get_proven_security( base_field_bits: u32, + extension_degree: u32, + blowup_factor: usize, + num_queries: usize, + grinding_factor: u32, trace_domain_size: usize, collision_resistance: u32, ) -> u32 { @@ -248,8 +266,11 @@ fn get_proven_security( let m_optimal = (m_min as u32..m_max as u32) .max_by_key(|&a| { proven_security_protocol_for_m( - options, base_field_bits, + extension_degree, + blowup_factor, + num_queries, + grinding_factor, trace_domain_size, a as usize, ) @@ -260,8 +281,11 @@ fn get_proven_security( cmp::min( proven_security_protocol_for_m( - options, base_field_bits, + extension_degree, + blowup_factor, + num_queries, + grinding_factor, trace_domain_size, m_optimal as usize, ), @@ -272,17 +296,20 @@ fn get_proven_security( /// Computes proven security level for the specified proof parameters for a fixed /// value of the proximity parameter m in the list-decoding regime. fn proven_security_protocol_for_m( - options: &ProofOptions, base_field_bits: u32, + extension_degree: u32, + blowup_factor: usize, + num_queries: usize, + grinding_factor: u32, trace_domain_size: usize, m: usize, ) -> u64 { - let extension_field_bits = (base_field_bits * options.field_extension().degree()) as f64; - let num_fri_queries = options.num_queries() as f64; + let extension_field_bits = (base_field_bits * extension_degree) as f64; + let num_fri_queries = num_queries as f64; let m = m as f64; - let rho = 1.0 / options.blowup_factor() as f64; + let rho = 1.0 / blowup_factor as f64; let alpha = (1.0 + 0.5 / m) * sqrt(rho); - let max_deg = options.blowup_factor() as f64 + 1.0; + let max_deg = blowup_factor as f64 + 1.0; // To apply Theorem 8 in https://eprint.iacr.org/2022/1216.pdf, we need to apply FRI with // a slightly larger agreement parameter alpha. @@ -296,7 +323,7 @@ fn proven_security_protocol_for_m( // the list-decoding list size in F(Z). // Modified rate in function field F(Z) - let lde_domain_size = (trace_domain_size * options.blowup_factor()) as f64; + let lde_domain_size = (trace_domain_size * blowup_factor) as f64; let trace_domain_size = trace_domain_size as f64; let num_openings = 2.0; let rho_plus = (trace_domain_size + num_openings) / lde_domain_size; @@ -315,7 +342,7 @@ fn proven_security_protocol_for_m( // Compute FRI query-phase soundness error let fri_queries_err_bits = - options.grinding_factor() as f64 - log2(powf(1.0 - theta_plus, num_fri_queries)); + grinding_factor as f64 - log2(powf(1.0 - theta_plus, num_fri_queries)); // Combined error for FRI let fri_err_bits = cmp::min(fri_commit_err_bits as u64, fri_queries_err_bits as u64); @@ -405,31 +432,27 @@ pub fn ceil(value: f64) -> f64 { mod prove_security_tests { use math::{fields::f64::BaseElement, StarkField}; - use super::ProofOptions; use crate::{proof::get_proven_security, FieldExtension}; #[test] fn get_96_bits_security() { let field_extension = FieldExtension::Cubic; let base_field_bits = BaseElement::MODULUS_BITS; - let fri_folding_factor = 8; - let fri_remainder_max_degree = 127; let grinding_factor = 20; let blowup_factor = 4; let num_queries = 80; let collision_resistance = 128; let trace_length = 2_usize.pow(18); - let mut options = ProofOptions::new( - num_queries, + let security_1 = get_proven_security( + base_field_bits, + field_extension.degree(), blowup_factor, + num_queries, grinding_factor, - field_extension, - fri_folding_factor as usize, - fri_remainder_max_degree as usize, + trace_length, + collision_resistance, ); - let security_1 = - get_proven_security(&options, base_field_bits, trace_length, collision_resistance); assert_eq!(security_1, 97); @@ -437,16 +460,15 @@ mod prove_security_tests { let blowup_factor = 8; let num_queries = 53; - options = ProofOptions::new( - num_queries, + let security_2 = get_proven_security( + base_field_bits, + field_extension.degree(), blowup_factor, + num_queries, grinding_factor, - field_extension, - fri_folding_factor as usize, - fri_remainder_max_degree as usize, + trace_length, + collision_resistance, ); - let security_2 = - get_proven_security(&options, base_field_bits, trace_length, collision_resistance); assert_eq!(security_2, 97); } @@ -455,24 +477,21 @@ mod prove_security_tests { fn get_128_bits_security() { let field_extension = FieldExtension::Cubic; let base_field_bits = BaseElement::MODULUS_BITS; - let fri_folding_factor = 8; - let fri_remainder_max_degree = 127; let grinding_factor = 20; let blowup_factor = 8; let num_queries = 85; let collision_resistance = 128; let trace_length = 2_usize.pow(18); - let mut options = ProofOptions::new( - num_queries, + let security_1 = get_proven_security( + base_field_bits, + field_extension.degree(), blowup_factor, + num_queries, grinding_factor, - field_extension, - fri_folding_factor as usize, - fri_remainder_max_degree as usize, + trace_length, + collision_resistance, ); - let security_1 = - get_proven_security(&options, base_field_bits, trace_length, collision_resistance); assert_eq!(security_1, 128); @@ -480,16 +499,15 @@ mod prove_security_tests { let blowup_factor = 16; let num_queries = 65; - options = ProofOptions::new( - num_queries, + let security_2 = get_proven_security( + base_field_bits, + field_extension.degree(), blowup_factor, + num_queries, grinding_factor, - field_extension, - fri_folding_factor as usize, - fri_remainder_max_degree as usize, + trace_length, + collision_resistance, ); - let security_2 = - get_proven_security(&options, base_field_bits, trace_length, collision_resistance); assert_eq!(security_2, 128); } @@ -498,24 +516,21 @@ mod prove_security_tests { fn extension_degree() { let field_extension = FieldExtension::Quadratic; let base_field_bits = BaseElement::MODULUS_BITS; - let fri_folding_factor = 8; - let fri_remainder_max_degree = 127; let grinding_factor = 20; let blowup_factor = 8; let num_queries = 85; let collision_resistance = 128; let trace_length = 2_usize.pow(18); - let mut options = ProofOptions::new( - num_queries, + let security_1 = get_proven_security( + base_field_bits, + field_extension.degree(), blowup_factor, + num_queries, grinding_factor, - field_extension, - fri_folding_factor as usize, - fri_remainder_max_degree as usize, + trace_length, + collision_resistance, ); - let security_1 = - get_proven_security(&options, base_field_bits, trace_length, collision_resistance); assert_eq!(security_1, 67); @@ -523,16 +538,15 @@ mod prove_security_tests { // reaching 128 bits security let field_extension = FieldExtension::Cubic; - options = ProofOptions::new( - num_queries, + let security_2 = get_proven_security( + base_field_bits, + field_extension.degree(), blowup_factor, + num_queries, grinding_factor, - field_extension, - fri_folding_factor as usize, - fri_remainder_max_degree as usize, + trace_length, + collision_resistance, ); - let security_2 = - get_proven_security(&options, base_field_bits, trace_length, collision_resistance); assert_eq!(security_2, 128); } @@ -541,37 +555,33 @@ mod prove_security_tests { fn trace_length() { let field_extension = FieldExtension::Cubic; let base_field_bits = BaseElement::MODULUS_BITS; - let fri_folding_factor = 8; - let fri_remainder_max_degree = 127; let grinding_factor = 20; let blowup_factor = 8; let num_queries = 80; let collision_resistance = 128; let trace_length = 2_usize.pow(20); - let mut options = ProofOptions::new( - num_queries, + let security_1 = get_proven_security( + base_field_bits, + field_extension.degree(), blowup_factor, + num_queries, grinding_factor, - field_extension, - fri_folding_factor as usize, - fri_remainder_max_degree as usize, + trace_length, + collision_resistance, ); - let security_1 = - get_proven_security(&options, base_field_bits, trace_length, collision_resistance); let trace_length = 2_usize.pow(16); - options = ProofOptions::new( - num_queries, + let security_2 = get_proven_security( + base_field_bits, + field_extension.degree(), blowup_factor, + num_queries, grinding_factor, - field_extension, - fri_folding_factor as usize, - fri_remainder_max_degree as usize, + trace_length, + collision_resistance, ); - let security_2 = - get_proven_security(&options, base_field_bits, trace_length, collision_resistance); assert!(security_1 < security_2); } @@ -580,37 +590,33 @@ mod prove_security_tests { fn num_fri_queries() { let field_extension = FieldExtension::Cubic; let base_field_bits = BaseElement::MODULUS_BITS; - let fri_folding_factor = 8; - let fri_remainder_max_degree = 127; let grinding_factor = 20; let blowup_factor = 8; let num_queries = 60; let collision_resistance = 128; let trace_length = 2_usize.pow(20); - let mut options = ProofOptions::new( - num_queries, + let security_1 = get_proven_security( + base_field_bits, + field_extension.degree(), blowup_factor, + num_queries, grinding_factor, - field_extension, - fri_folding_factor as usize, - fri_remainder_max_degree as usize, + trace_length, + collision_resistance, ); - let security_1 = - get_proven_security(&options, base_field_bits, trace_length, collision_resistance); let num_queries = 80; - options = ProofOptions::new( - num_queries, + let security_2 = get_proven_security( + base_field_bits, + field_extension.degree(), blowup_factor, + num_queries, grinding_factor, - field_extension, - fri_folding_factor as usize, - fri_remainder_max_degree as usize, + trace_length, + collision_resistance, ); - let security_2 = - get_proven_security(&options, base_field_bits, trace_length, collision_resistance); assert!(security_1 < security_2); } @@ -619,37 +625,33 @@ mod prove_security_tests { fn blowup_factor() { let field_extension = FieldExtension::Cubic; let base_field_bits = BaseElement::MODULUS_BITS; - let fri_folding_factor = 8; - let fri_remainder_max_degree = 127; let grinding_factor = 20; let blowup_factor = 8; let num_queries = 30; let collision_resistance = 128; let trace_length = 2_usize.pow(20); - let mut options = ProofOptions::new( - num_queries, + let security_1 = get_proven_security( + base_field_bits, + field_extension.degree(), blowup_factor, + num_queries, grinding_factor, - field_extension, - fri_folding_factor as usize, - fri_remainder_max_degree as usize, + trace_length, + collision_resistance, ); - let security_1 = - get_proven_security(&options, base_field_bits, trace_length, collision_resistance); let blowup_factor = 16; - options = ProofOptions::new( - num_queries, + let security_2 = get_proven_security( + base_field_bits, + field_extension.degree(), blowup_factor, + num_queries, grinding_factor, - field_extension, - fri_folding_factor as usize, - fri_remainder_max_degree as usize, + trace_length, + collision_resistance, ); - let security_2 = - get_proven_security(&options, base_field_bits, trace_length, collision_resistance); assert!(security_1 < security_2); } diff --git a/air/src/proof/ood_frame.rs b/air/src/proof/ood_frame.rs index 52d394747..2c8317637 100644 --- a/air/src/proof/ood_frame.rs +++ b/air/src/proof/ood_frame.rs @@ -145,7 +145,6 @@ impl OodFrame { let mut reader = SliceReader::new(&self.trace_states); let frame_size = reader.read_u8()? as usize; let trace = reader.read_many((main_trace_width + aux_trace_width) * frame_size)?; - if reader.has_more_bytes() { return Err(DeserializationError::UnconsumedBytes); } diff --git a/crypto/Cargo.toml b/crypto/Cargo.toml index 23f985fee..b02b4d62b 100644 --- a/crypto/Cargo.toml +++ b/crypto/Cargo.toml @@ -34,6 +34,7 @@ blake3 = { version = "1.5", default-features = false } math = { version = "0.10", path = "../math", package = "winter-math", default-features = false } sha3 = { version = "0.10", default-features = false } utils = { version = "0.10", path = "../utils/core", package = "winter-utils", default-features = false } +rand = { version = "0.8" } [dev-dependencies] criterion = "0.5" diff --git a/crypto/src/commitment.rs b/crypto/src/commitment.rs index 1d2667f7a..72ec674e7 100644 --- a/crypto/src/commitment.rs +++ b/crypto/src/commitment.rs @@ -49,7 +49,7 @@ pub trait VectorCommitment: Sized { fn commitment(&self) -> H::Digest; /// Returns the length of the vector committed to for `Self`. - fn domain_len(&self) -> usize; + fn get_domain_len(&self) -> usize; /// Returns the length of the vector committed to for `Self::Proof`. fn get_proof_domain_len(proof: &Self::Proof) -> usize; diff --git a/crypto/src/hash/mod.rs b/crypto/src/hash/mod.rs index 4bede6b8d..4bfc5eea1 100644 --- a/crypto/src/hash/mod.rs +++ b/crypto/src/hash/mod.rs @@ -17,7 +17,7 @@ pub use sha::Sha3_256; mod mds; mod rescue; -pub use rescue::{Rp62_248, Rp64_256, RpJive64_256}; +pub use rescue::{Rp62_248, Rp64_256, RpJive64_256, ARK1, ARK2, MDS}; // HASHER TRAITS // ================================================================================================ @@ -77,6 +77,9 @@ pub trait Digest: /// upper limit on the possible digest size. For digests which are smaller than 32 bytes, the /// unused bytes should be set to 0. fn as_bytes(&self) -> [u8; 32]; + + /// Returns a digest that is drawn uniformly at random from the space of all digests. + fn from_random_bytes(buffer: &[u8]) -> Self; } // BYTE DIGEST @@ -111,6 +114,14 @@ impl Digest for ByteDigest { result[..N].copy_from_slice(&self.0); result } + + fn from_random_bytes(buffer: &[u8]) -> Self { + Self::new( + buffer + .try_into() + .expect("The size of the buffer with random bytes should be 32"), + ) + } } impl Default for ByteDigest { diff --git a/crypto/src/hash/rescue/mod.rs b/crypto/src/hash/rescue/mod.rs index dbb13dee7..6a126ceb2 100644 --- a/crypto/src/hash/rescue/mod.rs +++ b/crypto/src/hash/rescue/mod.rs @@ -9,7 +9,7 @@ mod rp62_248; pub use rp62_248::Rp62_248; mod rp64_256; -pub use rp64_256::Rp64_256; +pub use rp64_256::{Rp64_256, ARK1, ARK2, MDS}; mod rp64_256_jive; pub use rp64_256_jive::RpJive64_256; diff --git a/crypto/src/hash/rescue/rp62_248/digest.rs b/crypto/src/hash/rescue/rp62_248/digest.rs index bacece257..01ecbf996 100644 --- a/crypto/src/hash/rescue/rp62_248/digest.rs +++ b/crypto/src/hash/rescue/rp62_248/digest.rs @@ -5,7 +5,7 @@ use core::slice; -use math::{fields::f62::BaseElement, StarkField}; +use math::{fields::f62::BaseElement, FieldElement, StarkField}; use utils::{ByteReader, ByteWriter, Deserializable, DeserializationError, Serializable}; use super::{Digest, DIGEST_SIZE}; @@ -47,6 +47,18 @@ impl Digest for ElementDigest { result } + + fn from_random_bytes(buffer: &[u8]) -> Self { + let mut digest: [BaseElement; DIGEST_SIZE] = [BaseElement::ZERO; DIGEST_SIZE]; + + buffer.chunks(8).zip(digest.iter_mut()).for_each(|(chunk, digest)| { + *digest = BaseElement::new(u64::from_be_bytes( + chunk.try_into().expect("Given the size of the chunk this should not panic"), + )) + }); + + Self(digest) + } } impl Default for ElementDigest { diff --git a/crypto/src/hash/rescue/rp64_256/digest.rs b/crypto/src/hash/rescue/rp64_256/digest.rs index 84cec4123..f1bc78d6b 100644 --- a/crypto/src/hash/rescue/rp64_256/digest.rs +++ b/crypto/src/hash/rescue/rp64_256/digest.rs @@ -5,8 +5,11 @@ use core::slice; -use math::fields::f64::BaseElement; -use utils::{ByteReader, ByteWriter, Deserializable, DeserializationError, Serializable}; +use math::{fields::f64::BaseElement, FieldElement}; +use rand::distributions::{Distribution, Standard}; +use utils::{ + ByteReader, ByteWriter, Deserializable, DeserializationError, Randomizable, Serializable, +}; use super::{Digest, DIGEST_SIZE}; @@ -43,6 +46,18 @@ impl Digest for ElementDigest { result } + + fn from_random_bytes(buffer: &[u8]) -> Self { + let mut digest: [BaseElement; DIGEST_SIZE] = [BaseElement::ZERO; DIGEST_SIZE]; + + buffer.chunks(8).zip(digest.iter_mut()).for_each(|(chunk, digest)| { + *digest = BaseElement::new(u64::from_be_bytes( + chunk.try_into().expect("Given the size of the chunk this should not panic"), + )) + }); + + digest.into() + } } impl Default for ElementDigest { @@ -87,6 +102,18 @@ impl From for [u8; 32] { } } +impl Distribution for Standard { + fn sample(&self, rng: &mut R) -> ElementDigest { + let mut res = [BaseElement::ZERO; DIGEST_SIZE]; + for r in res.iter_mut() { + let mut source = [0_u8; 8]; + rng.fill_bytes(&mut source); + *r = BaseElement::from_random_bytes(&source).expect("failed to generate element"); + } + ElementDigest::new(res) + } +} + // TESTS // ================================================================================================ diff --git a/crypto/src/hash/rescue/rp64_256/mod.rs b/crypto/src/hash/rescue/rp64_256/mod.rs index 0d87de3f7..584395d2e 100644 --- a/crypto/src/hash/rescue/rp64_256/mod.rs +++ b/crypto/src/hash/rescue/rp64_256/mod.rs @@ -388,7 +388,7 @@ impl Rp64_256 { // MDS // ================================================================================================ /// Rescue MDS matrix -const MDS: [[BaseElement; STATE_WIDTH]; STATE_WIDTH] = [ +pub const MDS: [[BaseElement; STATE_WIDTH]; STATE_WIDTH] = [ [ BaseElement::new(7), BaseElement::new(23), @@ -560,7 +560,7 @@ const MDS: [[BaseElement; STATE_WIDTH]; STATE_WIDTH] = [ ]; /// Rescue Inverse MDS matrix -const INV_MDS: [[BaseElement; STATE_WIDTH]; STATE_WIDTH] = [ +pub const INV_MDS: [[BaseElement; STATE_WIDTH]; STATE_WIDTH] = [ [ BaseElement::new(14868391535953158196), BaseElement::new(13278298489594233127), @@ -739,7 +739,7 @@ const INV_MDS: [[BaseElement; STATE_WIDTH]; STATE_WIDTH] = [ /// /// The constants are broken up into two arrays ARK1 and ARK2; ARK1 contains the constants for the /// first half of Rescue round, and ARK2 contains constants for the second half of Rescue round. -const ARK1: [[BaseElement; STATE_WIDTH]; NUM_ROUNDS] = [ +pub const ARK1: [[BaseElement; STATE_WIDTH]; NUM_ROUNDS] = [ [ BaseElement::new(13917550007135091859), BaseElement::new(16002276252647722320), @@ -840,7 +840,7 @@ const ARK1: [[BaseElement; STATE_WIDTH]; NUM_ROUNDS] = [ ], ]; -const ARK2: [[BaseElement; STATE_WIDTH]; NUM_ROUNDS] = [ +pub const ARK2: [[BaseElement; STATE_WIDTH]; NUM_ROUNDS] = [ [ BaseElement::new(7989257206380839449), BaseElement::new(8639509123020237648), diff --git a/crypto/src/hash/rescue/rp64_256_jive/digest.rs b/crypto/src/hash/rescue/rp64_256_jive/digest.rs index 84cec4123..703118093 100644 --- a/crypto/src/hash/rescue/rp64_256_jive/digest.rs +++ b/crypto/src/hash/rescue/rp64_256_jive/digest.rs @@ -5,7 +5,7 @@ use core::slice; -use math::fields::f64::BaseElement; +use math::{fields::f64::BaseElement, FieldElement}; use utils::{ByteReader, ByteWriter, Deserializable, DeserializationError, Serializable}; use super::{Digest, DIGEST_SIZE}; @@ -43,6 +43,18 @@ impl Digest for ElementDigest { result } + + fn from_random_bytes(buffer: &[u8]) -> Self { + let mut digest: [BaseElement; DIGEST_SIZE] = [BaseElement::ZERO; DIGEST_SIZE]; + + buffer.chunks(8).zip(digest.iter_mut()).for_each(|(chunk, digest)| { + *digest = BaseElement::new(u64::from_be_bytes( + chunk.try_into().expect("Given the size of the chunk this should not panic"), + )) + }); + + digest.into() + } } impl Default for ElementDigest { diff --git a/crypto/src/lib.rs b/crypto/src/lib.rs index ff29176bb..e9a961c77 100644 --- a/crypto/src/lib.rs +++ b/crypto/src/lib.rs @@ -26,13 +26,15 @@ pub use hash::{Digest, ElementHasher, Hasher}; pub mod hashers { //! Contains implementations of currently supported hash functions. - pub use super::hash::{Blake3_192, Blake3_256, Rp62_248, Rp64_256, RpJive64_256, Sha3_256}; + pub use super::hash::{ + Blake3_192, Blake3_256, Rp62_248, Rp64_256, RpJive64_256, Sha3_256, ARK1, ARK2, MDS, + }; } mod merkle; #[cfg(feature = "concurrent")] pub use merkle::concurrent; -pub use merkle::{build_merkle_nodes, BatchMerkleProof, MerkleTree}; +pub use merkle::{build_merkle_nodes, BatchMerkleProof, MerkleTree, SaltedMerkleTree}; mod random; pub use random::{DefaultRandomCoin, RandomCoin}; diff --git a/crypto/src/merkle/mod.rs b/crypto/src/merkle/mod.rs index 51b4a76dc..bee8207f1 100644 --- a/crypto/src/merkle/mod.rs +++ b/crypto/src/merkle/mod.rs @@ -9,11 +9,20 @@ use alloc::{ }; use core::slice; +use rand::{ + distributions::{Distribution, Standard}, + thread_rng, Rng, RngCore, +}; + +use crate::{ + errors::MerkleTreeError, + hash::{ByteDigest, Hasher}, + VectorCommitment, +}; + mod proofs; pub use proofs::BatchMerkleProof; -use crate::{Hasher, MerkleTreeError, VectorCommitment}; - #[cfg(feature = "concurrent")] pub mod concurrent; @@ -97,6 +106,17 @@ pub struct MerkleTree { /// up to the root (excluding the root itself). pub type MerkleTreeOpening = (::Digest, Vec<::Digest>); +/// Salted Merkle tree opening consisting of a leaf value, a salt, and a Merkle path leading +/// from this leaf up to the root (excluding the root itself). +pub type SaltedMerkleTreeOpening = + (::Digest, (::Digest, Vec<::Digest>)); + +/// Salted Merkle tree multi opening consisting of a vector of leaves, a vector of corresponding salts, +/// and a collection of corresponding Merkle paths leading from these leaves up to the root +/// (excluding the root itself). The collection of Merkle paths is stored as a [BatchMerkleProof]. +pub type SaltedMerkleTreeMultiOpening = + (Vec<::Digest>, (Vec<::Digest>, BatchMerkleProof)); + // MERKLE TREE IMPLEMENTATION // ================================================================================================ @@ -416,7 +436,7 @@ impl VectorCommitment for MerkleTree { *self.root() } - fn domain_len(&self) -> usize { + fn get_domain_len(&self) -> usize { 1 << self.depth() } @@ -457,3 +477,179 @@ impl VectorCommitment for MerkleTree { MerkleTree::::verify_batch(&commitment, indexes, items, proof) } } + +// SALTED MERKLE TREE +// ================================================================================================ + +pub struct SaltedMerkleTree { + leaves: Vec, + tree: MerkleTree, + salts: Vec, +} + +impl SaltedMerkleTree +where + Standard: Distribution<::Digest>, +{ + // CONSTRUCTORS + // -------------------------------------------------------------------------------------------- + + pub fn new(leaves: Vec, prng: &mut R) -> Result { + if leaves.len() < 2 { + return Err(MerkleTreeError::TooFewLeaves(2, leaves.len())); + } + if !leaves.len().is_power_of_two() { + return Err(MerkleTreeError::NumberOfLeavesNotPowerOfTwo(leaves.len())); + } + + let num_leaves = leaves.len(); + let salts: Vec = (0..num_leaves).map(|_| prng.sample(Standard)).collect(); + + let salted_leaves: Vec = leaves + .iter() + .zip(salts.iter()) + .map(|(leaf, salt)| H::merge(&[*leaf, *salt])) + .collect(); + + let tree = MerkleTree::new(salted_leaves)?; + + Ok(Self { tree, leaves, salts }) + } + + /// Returns the root of the tree. + pub fn root(&self) -> &H::Digest { + self.tree.root() + } + + pub fn depth(&self) -> usize { + self.tree.depth() + } + + pub fn prove(&self, index: usize) -> Result, MerkleTreeError> { + let (_, proof) = self.tree.prove(index)?; + Ok((self.leaves[index], (self.salts[index], proof))) + } + + pub fn prove_batch( + &self, + indexes: &[usize], + ) -> Result, MerkleTreeError> { + let (_, proof) = self.tree.prove_batch(indexes)?; + let leaves_at_indices = indexes.iter().map(|index| self.leaves[*index]).collect(); + let salts_at_indices = indexes.iter().map(|index| self.salts[*index]).collect(); + Ok((leaves_at_indices, (salts_at_indices, proof))) + } + + pub fn verify( + root: H::Digest, + index: usize, + leaf: H::Digest, + salt: H::Digest, + proof: &[H::Digest], + ) -> Result<(), MerkleTreeError> { + let salted_leaf = H::merge(&[leaf, salt]); + MerkleTree::::verify(root, index, salted_leaf, proof) + } + + /// Checks whether the batch proof contains Merkle paths for the of the specified `indexes`. + /// + /// # Errors + /// Returns an error if: + /// * No indexes were provided (i.e., `indexes` is an empty slice). + /// * Number of provided indexes is greater than 255. + /// * Any of the specified `indexes` is greater than or equal to the number of leaves in the + /// tree from which the batch proof was generated. + /// * List of indexes contains duplicates. + /// * Any of the paths in the batch proof does not resolve to the specified `root`. + pub fn verify_batch( + root: &H::Digest, + indexes: &[usize], + leaves: &[H::Digest], + salts: &[H::Digest], + proof: &BatchMerkleProof, + ) -> Result<(), MerkleTreeError> { + let salted_leaves: Vec = leaves + .iter() + .zip(salts.iter()) + .map(|(leaf, salt)| H::merge(&[*leaf, *salt])) + .collect(); + + MerkleTree::::verify_batch(root, indexes, &salted_leaves, proof) + } +} + +impl Distribution> for Standard { + fn sample(&self, rng: &mut R) -> ByteDigest<32> { + let mut dest = [0; 32]; + rng.fill_bytes(&mut dest); + ByteDigest::new(dest) + } +} + +impl VectorCommitment for SaltedMerkleTree +where + Standard: Distribution<::Digest>, +{ + type Options = (); + + type Proof = (H::Digest, Vec); + + type MultiProof = (Vec, BatchMerkleProof); + + type Error = MerkleTreeError; + + fn new(items: Vec) -> Result { + let mut prng = thread_rng(); + SaltedMerkleTree::new(items, &mut prng) + } + + fn with_options(items: Vec, _options: Self::Options) -> Result { + let mut prng = thread_rng(); + Self::new(items, &mut prng) + } + + fn get_domain_len(&self) -> usize { + 1 << self.depth() + } + + fn get_proof_domain_len(proof: &Self::Proof) -> usize { + proof.1.len() + } + + fn get_multiproof_domain_len(proof: &Self::MultiProof) -> usize { + 1 << proof.1.depth + } + + fn commitment(&self) -> H::Digest { + *self.root() + } + + fn open(&self, index: usize) -> Result<(H::Digest, Self::Proof), Self::Error> { + self.prove(index) + } + + fn open_many( + &self, + indexes: &[usize], + ) -> Result<(Vec, Self::MultiProof), Self::Error> { + self.prove_batch(indexes) + } + + fn verify( + commitment: H::Digest, + index: usize, + item: H::Digest, + proof: &Self::Proof, + ) -> Result<(), Self::Error> { + SaltedMerkleTree::::verify(commitment, index, item, proof.0, &proof.1) + } + + fn verify_many( + commitment: H::Digest, + indexes: &[usize], + items: &[H::Digest], + proof: &Self::MultiProof, + ) -> Result<(), Self::Error> { + SaltedMerkleTree::::verify_batch(&commitment, indexes, items, &proof.0, &proof.1) + } +} diff --git a/crypto/src/merkle/tests.rs b/crypto/src/merkle/tests.rs index f66c638a2..dac785294 100644 --- a/crypto/src/merkle/tests.rs +++ b/crypto/src/merkle/tests.rs @@ -254,6 +254,28 @@ fn from_proofs() { assert_eq!(proof1.depth, proof2.depth); } +#[test] +fn verify_salted() { + // depth 4 + let leaves = Digest256::bytes_as_digests(&LEAVES4).to_vec(); + let mut prng = thread_rng(); + let tree: SaltedMerkleTree = SaltedMerkleTree::new(leaves, &mut prng).unwrap(); + let (leaf, (salt, proof)) = tree.prove(1).unwrap(); + assert!(SaltedMerkleTree::::verify(*tree.root(), 1, leaf, salt, &proof).is_ok()); + + let (leaf, (salt, proof)) = tree.prove(2).unwrap(); + assert!(SaltedMerkleTree::::verify(*tree.root(), 2, leaf, salt, &proof).is_ok()); + + // depth 5 + let leaf = Digest256::bytes_as_digests(&LEAVES8).to_vec(); + let tree: SaltedMerkleTree = SaltedMerkleTree::new(leaf, &mut prng).unwrap(); + let (leaf, (salt, proof)) = tree.prove(1).unwrap(); + assert!(SaltedMerkleTree::::verify(*tree.root(), 1, leaf, salt, &proof).is_ok()); + + let (leaf, (salt, proof)) = tree.prove(6).unwrap(); + assert!(SaltedMerkleTree::::verify(*tree.root(), 6, leaf, salt, &proof).is_ok()); +} + proptest! { #[test] fn prove_n_verify(tree in random_blake3_merkle_tree(128), diff --git a/crypto/src/random/default.rs b/crypto/src/random/default.rs index f5a996404..fa002171d 100644 --- a/crypto/src/random/default.rs +++ b/crypto/src/random/default.rs @@ -118,6 +118,22 @@ impl> RandomCoin for DefaultRando self.counter = 0; } + fn reseed_with_salt( + &mut self, + data: ::Digest, + salt: Option<::Digest>, + ) { + // TODO: revisit + if let Some(salt) = salt { + self.seed = H::merge(&[self.seed, data]); + self.seed = H::merge(&[self.seed, salt]); + self.counter = 0; + } else { + self.seed = H::merge(&[self.seed, data]); + self.counter = 0; + } + } + // PUBLIC ACCESSORS // -------------------------------------------------------------------------------------------- diff --git a/crypto/src/random/mod.rs b/crypto/src/random/mod.rs index 7ee540ee5..10ee5d40c 100644 --- a/crypto/src/random/mod.rs +++ b/crypto/src/random/mod.rs @@ -38,6 +38,14 @@ pub trait RandomCoin: Sync { /// Reseeds the coin with the specified data by setting the new seed to hash(`seed` || `data`). fn reseed(&mut self, data: ::Digest); + /// Similar to `Self::reseed` but takes a salt which is not a `None` when zero-knowledge is enabled. + /// TODO: Should we remove `Self::reseed`? + fn reseed_with_salt( + &mut self, + data: ::Digest, + salt: Option<::Digest>, + ); + /// Computes hash(`seed` || `value`) and returns the number of leading zeros in the resulting /// value if it is interpreted as an integer in big-endian byte order. fn check_leading_zeros(&self, value: u64) -> u32; diff --git a/examples/Cargo.toml b/examples/Cargo.toml index f86e9ad50..179ba05b3 100644 --- a/examples/Cargo.toml +++ b/examples/Cargo.toml @@ -26,6 +26,7 @@ default = ["std"] std = ["core-utils/std", "hex/std", "rand-utils", "winterfell/std"] [dependencies] +air = { version = "0.10", path = "../air", package = "winter-air", default-features = false } blake3 = { version = "1.5", default-features = false } core-utils = { version = "0.10", path = "../utils/core", package = "winter-utils", default-features = false } hex = { version = "0.4", optional = true } @@ -35,6 +36,7 @@ tracing = { version = "0.1", default-features = false } tracing-forest = { version = "0.1", features = ["ansi", "smallvec"], optional = true } tracing-subscriber = { version = "0.3", features = ["std", "env-filter"] } winterfell = { version = "0.10", path = "../winterfell", default-features = false } +rand_chacha = { version = "0.3", default-features = false } [dev-dependencies] criterion = "0.5" diff --git a/examples/benches/fibonacci.rs b/examples/benches/fibonacci.rs index 44094beaf..076f2ee2f 100644 --- a/examples/benches/fibonacci.rs +++ b/examples/benches/fibonacci.rs @@ -18,7 +18,7 @@ fn fibonacci(c: &mut Criterion) { group.sample_size(10); group.measurement_time(Duration::from_secs(20)); - let options = ProofOptions::new(32, 8, 0, FieldExtension::None, 4, 255); + let options = ProofOptions::new(32, 8, 0, FieldExtension::None, 4, 255, false); for &size in SIZES.iter() { let fib = diff --git a/examples/benches/rescue.rs b/examples/benches/rescue.rs index bf6e8cc26..19e3a0815 100644 --- a/examples/benches/rescue.rs +++ b/examples/benches/rescue.rs @@ -18,7 +18,7 @@ fn rescue(c: &mut Criterion) { group.sample_size(10); group.measurement_time(Duration::from_secs(25)); - let options = ProofOptions::new(32, 32, 0, FieldExtension::None, 4, 255); + let options = ProofOptions::new(32, 32, 0, FieldExtension::None, 4, 255, false); for &size in SIZES.iter() { let resc = rescue::RescueExample::>::new(size, options.clone()); diff --git a/examples/src/fibonacci/fib2/prover.rs b/examples/src/fibonacci/fib2/prover.rs index 99d48f004..42a3ff270 100644 --- a/examples/src/fibonacci/fib2/prover.rs +++ b/examples/src/fibonacci/fib2/prover.rs @@ -3,6 +3,8 @@ // This source code is licensed under the MIT license found in the // LICENSE file in the root directory of this source tree. +use air::ZkParameters; +use rand_chacha::{rand_core::SeedableRng, ChaCha20Rng}; use winterfell::{ crypto::MerkleTree, matrix::ColMatrix, AuxRandElements, ConstraintCompositionCoefficients, DefaultConstraintEvaluator, DefaultTraceLde, PartitionOptions, StarkDomain, Trace, TraceInfo, @@ -78,8 +80,17 @@ where main_trace: &ColMatrix, domain: &StarkDomain, partition_option: PartitionOptions, + zk_parameters: Option, ) -> (Self::TraceLde, TracePolyTable) { - DefaultTraceLde::new(trace_info, main_trace, domain, partition_option) + let mut prng = ChaCha20Rng::from_entropy(); + DefaultTraceLde::new( + trace_info, + main_trace, + domain, + partition_option, + zk_parameters, + &mut prng, + ) } fn new_evaluator<'a, E: FieldElement>( diff --git a/examples/src/fibonacci/fib8/prover.rs b/examples/src/fibonacci/fib8/prover.rs index 64182978c..01f56103b 100644 --- a/examples/src/fibonacci/fib8/prover.rs +++ b/examples/src/fibonacci/fib8/prover.rs @@ -3,6 +3,8 @@ // This source code is licensed under the MIT license found in the // LICENSE file in the root directory of this source tree. +use air::ZkParameters; +use rand_chacha::{rand_core::SeedableRng, ChaCha20Rng}; use winterfell::{ crypto::MerkleTree, matrix::ColMatrix, AuxRandElements, ConstraintCompositionCoefficients, DefaultConstraintEvaluator, DefaultTraceLde, PartitionOptions, StarkDomain, Trace, TraceInfo, @@ -93,8 +95,17 @@ where main_trace: &ColMatrix, domain: &StarkDomain, partition_option: PartitionOptions, + zk_parameters: Option, ) -> (Self::TraceLde, TracePolyTable) { - DefaultTraceLde::new(trace_info, main_trace, domain, partition_option) + let mut prng = ChaCha20Rng::from_entropy(); + DefaultTraceLde::new( + trace_info, + main_trace, + domain, + partition_option, + zk_parameters, + &mut prng, + ) } fn new_evaluator<'a, E: FieldElement>( diff --git a/examples/src/fibonacci/fib_small/prover.rs b/examples/src/fibonacci/fib_small/prover.rs index 553988064..be69faee8 100644 --- a/examples/src/fibonacci/fib_small/prover.rs +++ b/examples/src/fibonacci/fib_small/prover.rs @@ -2,6 +2,8 @@ // // This source code is licensed under the MIT license found in the // LICENSE file in the root directory of this source tree. +use air::ZkParameters; +use rand_chacha::{rand_core::SeedableRng, ChaCha20Rng}; use winterfell::{ crypto::MerkleTree, matrix::ColMatrix, AuxRandElements, ConstraintCompositionCoefficients, DefaultConstraintEvaluator, DefaultTraceLde, PartitionOptions, StarkDomain, Trace, TraceInfo, @@ -83,8 +85,17 @@ where main_trace: &ColMatrix, domain: &StarkDomain, partition_option: PartitionOptions, + zk_parameters: Option, ) -> (Self::TraceLde, TracePolyTable) { - DefaultTraceLde::new(trace_info, main_trace, domain, partition_option) + let mut prng = ChaCha20Rng::from_entropy(); + DefaultTraceLde::new( + trace_info, + main_trace, + domain, + partition_option, + zk_parameters, + &mut prng, + ) } fn new_evaluator<'a, E: FieldElement>( diff --git a/examples/src/fibonacci/mulfib2/prover.rs b/examples/src/fibonacci/mulfib2/prover.rs index 4c99187bf..15907a100 100644 --- a/examples/src/fibonacci/mulfib2/prover.rs +++ b/examples/src/fibonacci/mulfib2/prover.rs @@ -3,6 +3,8 @@ // This source code is licensed under the MIT license found in the // LICENSE file in the root directory of this source tree. +use air::ZkParameters; +use rand_chacha::{rand_core::SeedableRng, ChaCha20Rng}; use winterfell::{ crypto::MerkleTree, matrix::ColMatrix, AuxRandElements, ConstraintCompositionCoefficients, DefaultConstraintEvaluator, DefaultTraceLde, PartitionOptions, StarkDomain, Trace, TraceInfo, @@ -74,8 +76,17 @@ where main_trace: &ColMatrix, domain: &StarkDomain, partition_option: PartitionOptions, + zk_parameters: Option, ) -> (Self::TraceLde, TracePolyTable) { - DefaultTraceLde::new(trace_info, main_trace, domain, partition_option) + let mut prng = ChaCha20Rng::from_entropy(); + DefaultTraceLde::new( + trace_info, + main_trace, + domain, + partition_option, + zk_parameters, + &mut prng, + ) } fn new_evaluator<'a, E: FieldElement>( diff --git a/examples/src/fibonacci/mulfib8/prover.rs b/examples/src/fibonacci/mulfib8/prover.rs index 1fb58bd1a..197a350c1 100644 --- a/examples/src/fibonacci/mulfib8/prover.rs +++ b/examples/src/fibonacci/mulfib8/prover.rs @@ -3,6 +3,8 @@ // This source code is licensed under the MIT license found in the // LICENSE file in the root directory of this source tree. +use air::ZkParameters; +use rand_chacha::{rand_core::SeedableRng, ChaCha20Rng}; use winterfell::{ crypto::MerkleTree, matrix::ColMatrix, AuxRandElements, ConstraintCompositionCoefficients, DefaultConstraintEvaluator, DefaultTraceLde, PartitionOptions, StarkDomain, Trace, TraceInfo, @@ -86,8 +88,17 @@ where main_trace: &ColMatrix, domain: &StarkDomain, partition_option: PartitionOptions, + zk_parameters: Option, ) -> (Self::TraceLde, TracePolyTable) { - DefaultTraceLde::new(trace_info, main_trace, domain, partition_option) + let mut prng = ChaCha20Rng::from_entropy(); + DefaultTraceLde::new( + trace_info, + main_trace, + domain, + partition_option, + zk_parameters, + &mut prng, + ) } fn new_evaluator<'a, E: FieldElement>( diff --git a/examples/src/fibonacci/utils.rs b/examples/src/fibonacci/utils.rs index e2f29f7c2..acb52d397 100644 --- a/examples/src/fibonacci/utils.rs +++ b/examples/src/fibonacci/utils.rs @@ -38,5 +38,5 @@ pub fn build_proof_options(use_extension_field: bool) -> winterfell::ProofOption } else { FieldExtension::None }; - ProofOptions::new(28, 8, 0, extension, 4, 7) + ProofOptions::new(28, 8, 0, extension, 4, 7, false) } diff --git a/examples/src/lamport/aggregate/prover.rs b/examples/src/lamport/aggregate/prover.rs index 3927a20e6..61af1b91b 100644 --- a/examples/src/lamport/aggregate/prover.rs +++ b/examples/src/lamport/aggregate/prover.rs @@ -3,6 +3,8 @@ // This source code is licensed under the MIT license found in the // LICENSE file in the root directory of this source tree. +use air::ZkParameters; +use rand_chacha::{rand_core::SeedableRng, ChaCha20Rng}; #[cfg(feature = "concurrent")] use winterfell::iterators::*; use winterfell::{ @@ -122,8 +124,17 @@ where main_trace: &ColMatrix, domain: &StarkDomain, partition_option: PartitionOptions, + zk_parameters: Option, ) -> (Self::TraceLde, TracePolyTable) { - DefaultTraceLde::new(trace_info, main_trace, domain, partition_option) + let mut prng = ChaCha20Rng::from_entropy(); + DefaultTraceLde::new( + trace_info, + main_trace, + domain, + partition_option, + zk_parameters, + &mut prng, + ) } fn new_evaluator<'a, E: FieldElement>( diff --git a/examples/src/lamport/threshold/prover.rs b/examples/src/lamport/threshold/prover.rs index 87bd09bf6..f5cad228c 100644 --- a/examples/src/lamport/threshold/prover.rs +++ b/examples/src/lamport/threshold/prover.rs @@ -5,6 +5,8 @@ use std::collections::HashMap; +use air::ZkParameters; +use rand_chacha::{rand_core::SeedableRng, ChaCha20Rng}; #[cfg(feature = "concurrent")] use winterfell::iterators::*; use winterfell::{ @@ -164,8 +166,17 @@ where main_trace: &ColMatrix, domain: &StarkDomain, partition_option: PartitionOptions, + zk_parameters: Option, ) -> (Self::TraceLde, TracePolyTable) { - DefaultTraceLde::new(trace_info, main_trace, domain, partition_option) + let mut prng = ChaCha20Rng::from_entropy(); + DefaultTraceLde::new( + trace_info, + main_trace, + domain, + partition_option, + zk_parameters, + &mut prng, + ) } fn new_evaluator<'a, E: FieldElement>( diff --git a/examples/src/lib.rs b/examples/src/lib.rs index 33f733d7c..517871ecd 100644 --- a/examples/src/lib.rs +++ b/examples/src/lib.rs @@ -99,6 +99,7 @@ impl ExampleOptions { field_extension, self.folding_factor, 31, + false, ), hash_fn, ) diff --git a/examples/src/merkle/prover.rs b/examples/src/merkle/prover.rs index b1164ff83..57a21625b 100644 --- a/examples/src/merkle/prover.rs +++ b/examples/src/merkle/prover.rs @@ -3,6 +3,8 @@ // This source code is licensed under the MIT license found in the // LICENSE file in the root directory of this source tree. +use air::ZkParameters; +use rand_chacha::{rand_core::SeedableRng, ChaCha20Rng}; use winterfell::{ crypto::MerkleTree, matrix::ColMatrix, AuxRandElements, ConstraintCompositionCoefficients, DefaultConstraintEvaluator, DefaultTraceLde, PartitionOptions, StarkDomain, Trace, TraceInfo, @@ -129,8 +131,17 @@ where main_trace: &ColMatrix, domain: &StarkDomain, partition_option: PartitionOptions, + zk_parameters: Option, ) -> (Self::TraceLde, TracePolyTable) { - DefaultTraceLde::new(trace_info, main_trace, domain, partition_option) + let mut prng = ChaCha20Rng::from_entropy(); + DefaultTraceLde::new( + trace_info, + main_trace, + domain, + partition_option, + zk_parameters, + &mut prng, + ) } fn new_evaluator<'a, E: FieldElement>( diff --git a/examples/src/merkle/tests.rs b/examples/src/merkle/tests.rs index 4851d596c..cd180a63a 100644 --- a/examples/src/merkle/tests.rs +++ b/examples/src/merkle/tests.rs @@ -31,5 +31,5 @@ fn build_options(use_extension_field: bool) -> ProofOptions { } else { FieldExtension::None }; - ProofOptions::new(28, 8, 0, extension, 4, 31) + ProofOptions::new(28, 8, 0, extension, 4, 31, true) } diff --git a/examples/src/rescue/prover.rs b/examples/src/rescue/prover.rs index e8ca93757..a2797d9ed 100644 --- a/examples/src/rescue/prover.rs +++ b/examples/src/rescue/prover.rs @@ -3,6 +3,8 @@ // This source code is licensed under the MIT license found in the // LICENSE file in the root directory of this source tree. +use air::ZkParameters; +use rand_chacha::{rand_core::SeedableRng, ChaCha20Rng}; use winterfell::{ crypto::MerkleTree, matrix::ColMatrix, AuxRandElements, ConstraintCompositionCoefficients, DefaultConstraintEvaluator, DefaultTraceLde, PartitionOptions, StarkDomain, Trace, TraceInfo, @@ -96,8 +98,17 @@ where main_trace: &ColMatrix, domain: &StarkDomain, partition_option: PartitionOptions, + zk_parameters: Option, ) -> (Self::TraceLde, TracePolyTable) { - DefaultTraceLde::new(trace_info, main_trace, domain, partition_option) + let mut prng = ChaCha20Rng::from_entropy(); + DefaultTraceLde::new( + trace_info, + main_trace, + domain, + partition_option, + zk_parameters, + &mut prng, + ) } fn new_evaluator<'a, E: FieldElement>( diff --git a/examples/src/rescue/tests.rs b/examples/src/rescue/tests.rs index 7daf66694..9ab273500 100644 --- a/examples/src/rescue/tests.rs +++ b/examples/src/rescue/tests.rs @@ -31,5 +31,5 @@ fn build_options(use_extension_field: bool) -> ProofOptions { } else { FieldExtension::None }; - ProofOptions::new(28, 8, 0, extension, 4, 31) + ProofOptions::new(28, 8, 0, extension, 4, 31, true) } diff --git a/examples/src/rescue_raps/prover.rs b/examples/src/rescue_raps/prover.rs index b8b21b1f3..7050626e9 100644 --- a/examples/src/rescue_raps/prover.rs +++ b/examples/src/rescue_raps/prover.rs @@ -3,7 +3,9 @@ // This source code is licensed under the MIT license found in the // LICENSE file in the root directory of this source tree. +use air::ZkParameters; use core_utils::uninit_vector; +use rand_chacha::{rand_core::SeedableRng, ChaCha20Rng}; use winterfell::{ crypto::MerkleTree, matrix::ColMatrix, AuxRandElements, ConstraintCompositionCoefficients, DefaultConstraintEvaluator, DefaultTraceLde, PartitionOptions, StarkDomain, Trace, TraceInfo, @@ -128,8 +130,17 @@ where main_trace: &ColMatrix, domain: &StarkDomain, partition_option: PartitionOptions, + zk_parameters: Option, ) -> (Self::TraceLde, TracePolyTable) { - DefaultTraceLde::new(trace_info, main_trace, domain, partition_option) + let mut prng = ChaCha20Rng::from_entropy(); + DefaultTraceLde::new( + trace_info, + main_trace, + domain, + partition_option, + zk_parameters, + &mut prng, + ) } fn new_evaluator<'a, E: FieldElement>( diff --git a/examples/src/rescue_raps/tests.rs b/examples/src/rescue_raps/tests.rs index 99c8d24dc..3f3419fae 100644 --- a/examples/src/rescue_raps/tests.rs +++ b/examples/src/rescue_raps/tests.rs @@ -33,5 +33,5 @@ fn build_options(use_extension_field: bool) -> ProofOptions { } else { FieldExtension::None }; - ProofOptions::new(28, 8, 0, extension, 4, 31) + ProofOptions::new(28, 8, 0, extension, 4, 31, true) } diff --git a/examples/src/utils/rescue.rs b/examples/src/utils/rescue.rs index 33ca425ca..54d72a094 100644 --- a/examples/src/utils/rescue.rs +++ b/examples/src/utils/rescue.rs @@ -162,6 +162,18 @@ impl Digest for Hash { result[..bytes.len()].copy_from_slice(bytes); result } + + fn from_random_bytes(buffer: &[u8]) -> Self { + let mut digest: [BaseElement; DIGEST_SIZE] = [BaseElement::ZERO; DIGEST_SIZE]; + + buffer.chunks(16).zip(digest.iter_mut()).for_each(|(chunk, digest)| { + *digest = BaseElement::new(u128::from_be_bytes( + chunk.try_into().expect("Given the size of the chunk this should not panic"), + )) + }); + + Self(digest) + } } impl Serializable for Hash { diff --git a/examples/src/vdf/exempt/prover.rs b/examples/src/vdf/exempt/prover.rs index 16a7b8169..ed41c3799 100644 --- a/examples/src/vdf/exempt/prover.rs +++ b/examples/src/vdf/exempt/prover.rs @@ -3,6 +3,8 @@ // This source code is licensed under the MIT license found in the // LICENSE file in the root directory of this source tree. +use air::ZkParameters; +use rand_chacha::{rand_core::SeedableRng, ChaCha20Rng}; use winterfell::{ crypto::MerkleTree, matrix::ColMatrix, AuxRandElements, ConstraintCompositionCoefficients, DefaultConstraintEvaluator, DefaultTraceLde, PartitionOptions, StarkDomain, Trace, TraceInfo, @@ -79,8 +81,17 @@ where main_trace: &ColMatrix, domain: &StarkDomain, partition_option: PartitionOptions, + zk_parameters: Option, ) -> (Self::TraceLde, TracePolyTable) { - DefaultTraceLde::new(trace_info, main_trace, domain, partition_option) + let mut prng = ChaCha20Rng::from_entropy(); + DefaultTraceLde::new( + trace_info, + main_trace, + domain, + partition_option, + zk_parameters, + &mut prng, + ) } fn new_evaluator<'a, E: FieldElement>( diff --git a/examples/src/vdf/exempt/tests.rs b/examples/src/vdf/exempt/tests.rs index 212cda767..c9c46d6e2 100644 --- a/examples/src/vdf/exempt/tests.rs +++ b/examples/src/vdf/exempt/tests.rs @@ -31,5 +31,5 @@ fn build_options(use_extension_field: bool) -> ProofOptions { } else { FieldExtension::None }; - ProofOptions::new(85, 2, 0, extension, 4, 31) + ProofOptions::new(85, 4, 0, extension, 4, 31, true) } diff --git a/examples/src/vdf/regular/prover.rs b/examples/src/vdf/regular/prover.rs index 20bdf7874..41dbac4f2 100644 --- a/examples/src/vdf/regular/prover.rs +++ b/examples/src/vdf/regular/prover.rs @@ -3,6 +3,8 @@ // This source code is licensed under the MIT license found in the // LICENSE file in the root directory of this source tree. +use air::ZkParameters; +use rand_chacha::{rand_core::SeedableRng, ChaCha20Rng}; use winterfell::{ crypto::MerkleTree, matrix::ColMatrix, AuxRandElements, ConstraintCompositionCoefficients, DefaultConstraintEvaluator, DefaultTraceLde, PartitionOptions, StarkDomain, Trace, TraceInfo, @@ -74,8 +76,17 @@ where main_trace: &ColMatrix, domain: &StarkDomain, partition_option: PartitionOptions, + zk_parameters: Option, ) -> (Self::TraceLde, TracePolyTable) { - DefaultTraceLde::new(trace_info, main_trace, domain, partition_option) + let mut prng = ChaCha20Rng::from_entropy(); + DefaultTraceLde::new( + trace_info, + main_trace, + domain, + partition_option, + zk_parameters, + &mut prng, + ) } fn new_evaluator<'a, E: FieldElement>( diff --git a/examples/src/vdf/regular/tests.rs b/examples/src/vdf/regular/tests.rs index a3100a444..93ed54e54 100644 --- a/examples/src/vdf/regular/tests.rs +++ b/examples/src/vdf/regular/tests.rs @@ -31,5 +31,5 @@ fn build_options(use_extension_field: bool) -> ProofOptions { } else { FieldExtension::None }; - ProofOptions::new(85, 2, 0, extension, 4, 31) + ProofOptions::new(2, 4, 0, extension, 2, 255, true) } diff --git a/fri/Cargo.toml b/fri/Cargo.toml index 2e3d1b20b..b7fd456ee 100644 --- a/fri/Cargo.toml +++ b/fri/Cargo.toml @@ -32,6 +32,8 @@ std = ["crypto/std", "math/std", "utils/std"] crypto = { version = "0.10", path = "../crypto", package = "winter-crypto", default-features = false } math = { version = "0.10", path = "../math", package = "winter-math", default-features = false } utils = { version = "0.10", path = "../utils/core", package = "winter-utils", default-features = false } +rand_chacha = { version = "0.3", default-features = false } +rand = { version = "0.8" } [dev-dependencies] criterion = "0.5" diff --git a/fri/benches/prover.rs b/fri/benches/prover.rs index bfc096fc3..07b3b4ef5 100644 --- a/fri/benches/prover.rs +++ b/fri/benches/prover.rs @@ -23,6 +23,7 @@ pub fn build_layers(c: &mut Criterion) { for &domain_size in &BATCH_SIZES { let evaluations = build_evaluations(domain_size); + let mut prng = ::from_entropy(); fri_group.bench_with_input( BenchmarkId::new("build_layers", domain_size), @@ -37,8 +38,9 @@ pub fn build_layers(c: &mut Criterion) { BaseElement, Blake3_256, DefaultRandomCoin>, - >::new(domain_size, 32); - prover.build_layers(&mut channel, evaluations); + >::new(domain_size, 32, false); + + prover.build_layers(&mut channel, evaluations, &mut prng); prover.reset(); }, BatchSize::LargeInput, diff --git a/fri/src/proof.rs b/fri/src/proof.rs index 65dd2af92..8d4495213 100644 --- a/fri/src/proof.rs +++ b/fri/src/proof.rs @@ -34,6 +34,7 @@ pub struct FriProof { layers: Vec, remainder: Vec, num_partitions: u8, // stored as power of 2 + salts: Vec, } impl FriProof { @@ -49,6 +50,7 @@ impl FriProof { layers: Vec, remainder: Vec, num_partitions: usize, + salts: Vec, ) -> Self { assert!(!remainder.is_empty(), "number of remainder elements must be greater than zero"); assert!( @@ -69,6 +71,7 @@ impl FriProof { layers, remainder: remainder_bytes, num_partitions: num_partitions.trailing_zeros() as u8, + salts, } } @@ -78,6 +81,7 @@ impl FriProof { layers: Vec::new(), remainder: Vec::new(), num_partitions: 0, + salts: vec![], } } @@ -190,6 +194,16 @@ impl FriProof { } Ok(remainder) } + + /// Returns a vector of values used in order to salt the transcript when zero-knowledge is + /// enabled. + pub fn parse_salts(&self) -> Result>, DeserializationError> + where + E: FieldElement, + H: ElementHasher, + { + Vec::read_from_bytes(&self.salts) + } } // SERIALIZATION / DESERIALIZATION @@ -210,6 +224,10 @@ impl Serializable for FriProof { // write number of partitions target.write_u8(self.num_partitions); + + // write salts + target.write_u32(self.salts.len() as u32); + target.write_bytes(&self.salts); } } @@ -230,7 +248,11 @@ impl Deserializable for FriProof { // read number of partitions let num_partitions = source.read_u8()?; - Ok(FriProof { layers, remainder, num_partitions }) + // read salts + let salts_len = source.read_u32()? as usize; + let salts = source.read_vec(salts_len)?; + + Ok(FriProof { layers, remainder, num_partitions, salts }) } } diff --git a/fri/src/prover/channel.rs b/fri/src/prover/channel.rs index 7231e757c..38a4771b4 100644 --- a/fri/src/prover/channel.rs +++ b/fri/src/prover/channel.rs @@ -6,7 +6,7 @@ use alloc::vec::Vec; use core::marker::PhantomData; -use crypto::{ElementHasher, Hasher, RandomCoin}; +use crypto::{Digest, ElementHasher, Hasher, RandomCoin}; use math::FieldElement; // PROVER CHANNEL TRAIT @@ -34,7 +34,13 @@ pub trait ProverChannel { /// the hash of each row to get one entry of the vector being committed to. Thus, the number /// of elements grouped into a single leaf is equal to the `folding_factor` used for FRI layer /// construction. - fn commit_fri_layer(&mut self, layer_root: ::Digest); + fn commit_fri_layer

( + &mut self, + layer_root: ::Digest, + prng: &mut P, + ) -> Option<::Digest> + where + P: rand::RngCore; /// Returns a random α drawn uniformly at random from the entire field. /// @@ -63,6 +69,8 @@ where commitments: Vec, domain_size: usize, num_queries: usize, + is_zk: bool, + salts: Vec>, _field_element: PhantomData, } @@ -78,7 +86,7 @@ where /// Panics if: /// * `domain_size` is smaller than 8 or is not a power of two. /// * `num_queries` is zero. - pub fn new(domain_size: usize, num_queries: usize) -> Self { + pub fn new(domain_size: usize, num_queries: usize, is_zk: bool) -> Self { assert!(domain_size >= 8, "domain size must be at least 8, but was {domain_size}"); assert!( domain_size.is_power_of_two(), @@ -90,6 +98,8 @@ where commitments: Vec::new(), domain_size, num_queries, + is_zk, + salts: vec![], _field_element: PhantomData, } } @@ -124,9 +134,27 @@ where { type Hasher = H; - fn commit_fri_layer(&mut self, layer_root: H::Digest) { + fn commit_fri_layer( + &mut self, + layer_root: H::Digest, + prng: &mut P, + ) -> Option<::Digest> { self.commitments.push(layer_root); - self.public_coin.reseed(layer_root); + + // sample a salt for Fiat-Shamir is zero-knowledge is enabled + let salt = if self.is_zk { + let mut buffer = [0_u8; 32]; + prng.fill_bytes(&mut buffer); + + let salt = Digest::from_random_bytes(&buffer); + + Some(salt) + } else { + None + }; + self.salts.push(salt); + self.public_coin.reseed_with_salt(layer_root, salt); + salt } fn draw_fri_alpha(&mut self) -> E { diff --git a/fri/src/prover/mod.rs b/fri/src/prover/mod.rs index 17092ad34..3accc5998 100644 --- a/fri/src/prover/mod.rs +++ b/fri/src/prover/mod.rs @@ -12,6 +12,7 @@ use math::{fft, FieldElement}; use utils::iterators::*; use utils::{ flatten_vector_elements, group_slice_elements, iter_mut, transpose_slice, uninit_vector, + Serializable, }; use crate::{ @@ -102,6 +103,7 @@ where options: FriOptions, layers: Vec>, remainder_poly: FriRemainder, + salts: Vec>, _channel: PhantomData, } @@ -131,6 +133,7 @@ where options, layers: Vec::new(), remainder_poly: FriRemainder(vec![]), + salts: vec![], _channel: PhantomData, } } @@ -176,7 +179,12 @@ where /// /// # Panics /// Panics if the prover state is dirty (the vector of layers is not empty). - pub fn build_layers(&mut self, channel: &mut C, mut evaluations: Vec) { + pub fn build_layers( + &mut self, + channel: &mut C, + mut evaluations: Vec, + prng: &mut R, + ) { assert!( self.layers.is_empty(), "a prior proof generation request has not been completed yet" @@ -186,20 +194,25 @@ where // has small enough degree for _ in 0..self.options.num_fri_layers(evaluations.len()) { match self.folding_factor() { - 2 => self.build_layer::<2>(channel, &mut evaluations), - 4 => self.build_layer::<4>(channel, &mut evaluations), - 8 => self.build_layer::<8>(channel, &mut evaluations), - 16 => self.build_layer::<16>(channel, &mut evaluations), + 2 => self.build_layer::(channel, &mut evaluations, prng), + 4 => self.build_layer::(channel, &mut evaluations, prng), + 8 => self.build_layer::(channel, &mut evaluations, prng), + 16 => self.build_layer::(channel, &mut evaluations, prng), _ => unimplemented!("folding factor {} is not supported", self.folding_factor()), } } - self.set_remainder(channel, &mut evaluations); + self.set_remainder(channel, &mut evaluations, prng); } /// Builds a single FRI layer by first committing to the `evaluations`, then drawing a random /// alpha from the channel and use it to perform degree-respecting projection. - fn build_layer(&mut self, channel: &mut C, evaluations: &mut Vec) { + fn build_layer( + &mut self, + channel: &mut C, + evaluations: &mut Vec, + prng: &mut R, + ) { // commit to the evaluations at the current layer; we do this by first transposing the // evaluations into a matrix of N columns, then hashing each row into a digest, and finally // commiting to vector of these digests; we do this so that we could de-commit to N values @@ -208,7 +221,8 @@ where let evaluation_vector_commitment = build_layer_commitment::<_, _, V, N>(&transposed_evaluations) .expect("failed to construct FRI layer commitment"); - channel.commit_fri_layer(evaluation_vector_commitment.commitment()); + let salt = channel.commit_fri_layer(evaluation_vector_commitment.commitment(), prng); + self.salts.push(salt); // draw a pseudo-random coefficient from the channel, and use it in degree-respecting // projection to reduce the degree of evaluations by N @@ -222,13 +236,19 @@ where } /// Creates remainder polynomial in coefficient form from a vector of `evaluations` over a domain. - fn set_remainder(&mut self, channel: &mut C, evaluations: &mut [E]) { + fn set_remainder( + &mut self, + channel: &mut C, + evaluations: &mut [E], + prng: &mut R, + ) { let inv_twiddles = fft::get_inv_twiddles(evaluations.len()); fft::interpolate_poly_with_offset(evaluations, &inv_twiddles, self.options.domain_offset()); let remainder_poly_size = evaluations.len() / self.options.blowup_factor(); let remainder_poly = evaluations[..remainder_poly_size].to_vec(); let commitment = ::hash_elements(&remainder_poly); - channel.commit_fri_layer(commitment); + let salt = channel.commit_fri_layer(commitment, prng); + self.salts.push(salt); self.remainder_poly = FriRemainder(remainder_poly); } @@ -278,7 +298,8 @@ where // clear layers so that another proof can be generated self.reset(); - FriProof::new(layers, remainder, 1) + let salts = self.salts.to_bytes(); + FriProof::new(layers, remainder, 1, salts) } } diff --git a/fri/src/prover/tests.rs b/fri/src/prover/tests.rs index e765092c5..7387076cd 100644 --- a/fri/src/prover/tests.rs +++ b/fri/src/prover/tests.rs @@ -7,6 +7,8 @@ use alloc::vec::Vec; use crypto::{hashers::Blake3_256, DefaultRandomCoin, Hasher, MerkleTree, RandomCoin}; use math::{fft, fields::f128::BaseElement, FieldElement}; +use rand::SeedableRng; +use rand_chacha::ChaCha20Rng; use utils::{Deserializable, Serializable, SliceReader}; use super::{DefaultProverChannel, FriProver}; @@ -45,7 +47,7 @@ pub fn build_prover_channel( trace_length: usize, options: &FriOptions, ) -> DefaultProverChannel> { - DefaultProverChannel::new(trace_length * options.blowup_factor(), 32) + DefaultProverChannel::new(trace_length * options.blowup_factor(), 32, false) } pub fn build_evaluations(trace_length: usize, lde_blowup: usize) -> Vec { @@ -105,7 +107,8 @@ fn fri_prove_verify( // instantiate the prover and generate the proof let mut prover = FriProver::<_, _, _, MerkleTree>::new(options.clone()); - prover.build_layers(&mut channel, evaluations.clone()); + let mut prng = ChaCha20Rng::from_entropy(); + prover.build_layers(&mut channel, evaluations.clone(), &mut prng); let positions = channel.draw_query_positions(0); let proof = prover.build_proof(&positions); diff --git a/fri/src/verifier/channel.rs b/fri/src/verifier/channel.rs index 6f8709858..91f7ce142 100644 --- a/fri/src/verifier/channel.rs +++ b/fri/src/verifier/channel.rs @@ -70,6 +70,9 @@ pub trait VerifierChannel { /// Reads and removes the remainder polynomial from the channel. fn take_fri_remainder(&mut self) -> Vec; + /// Reads and removes the salt value needed for Fiat-Shamir at the current round. + fn take_salt(&mut self) -> Option<::Digest>; + // PROVIDED METHODS // -------------------------------------------------------------------------------------------- @@ -135,6 +138,7 @@ pub struct DefaultVerifierChannel< layer_queries: Vec>, remainder: Vec, num_partitions: usize, + salts: Vec>, _h: PhantomData, } @@ -156,6 +160,7 @@ where ) -> Result { let num_partitions = proof.num_partitions(); + let salts = proof.parse_salts::()?; let remainder = proof.parse_remainder()?; let (layer_queries, layer_proofs) = proof.parse_layers::(domain_size, folding_factor)?; @@ -166,6 +171,7 @@ where layer_queries, remainder, num_partitions, + salts, _h: PhantomData, }) } @@ -199,4 +205,8 @@ where fn take_fri_remainder(&mut self) -> Vec { self.remainder.clone() } + + fn take_salt(&mut self) -> Option { + self.salts.remove(0) + } } diff --git a/fri/src/verifier/mod.rs b/fri/src/verifier/mod.rs index ff0582b2c..da7f889fa 100644 --- a/fri/src/verifier/mod.rs +++ b/fri/src/verifier/mod.rs @@ -121,7 +121,8 @@ where let mut layer_alphas = Vec::with_capacity(layer_commitments.len()); let mut max_degree_plus_1 = max_poly_degree + 1; for (depth, commitment) in layer_commitments.iter().enumerate() { - public_coin.reseed(*commitment); + let salt = channel.take_salt(); + public_coin.reseed_with_salt(*commitment, salt); let alpha = public_coin.draw().map_err(VerifierError::RandomCoinError)?; layer_alphas.push(alpha); diff --git a/prover/Cargo.toml b/prover/Cargo.toml index 6dd616a1a..199bde2a7 100644 --- a/prover/Cargo.toml +++ b/prover/Cargo.toml @@ -35,6 +35,9 @@ crypto = { version = "0.10", path = "../crypto", package = "winter-crypto", defa fri = { version = "0.10", path = '../fri', package = "winter-fri", default-features = false } math = { version = "0.10", path = "../math", package = "winter-math", default-features = false } maybe_async = { version = "0.10", path = "../utils/maybe_async" , package = "winter-maybe-async" } +rand_chacha = { version = "0.3", default-features = false } +rand-utils = { version = "0.10", path = "../utils/rand", package = "winter-rand-utils" } +rand = { version = "0.8" } tracing = { version = "0.1", default-features = false, features = ["attributes"]} utils = { version = "0.10", path = "../utils/core", package = "winter-utils", default-features = false } diff --git a/prover/benches/lagrange_kernel.rs b/prover/benches/lagrange_kernel.rs index d6ab6a5bc..82d3c00f0 100644 --- a/prover/benches/lagrange_kernel.rs +++ b/prover/benches/lagrange_kernel.rs @@ -8,11 +8,13 @@ use std::time::Duration; use air::{ Air, AirContext, Assertion, AuxRandElements, ConstraintCompositionCoefficients, EvaluationFrame, FieldExtension, GkrRandElements, LagrangeKernelRandElements, PartitionOptions, - ProofOptions, TraceInfo, TransitionConstraintDegree, + ProofOptions, TraceInfo, TransitionConstraintDegree, ZkParameters, }; use criterion::{criterion_group, criterion_main, BatchSize, BenchmarkId, Criterion}; use crypto::{hashers::Blake3_256, DefaultRandomCoin, MerkleTree, RandomCoin}; use math::{fields::f64::BaseElement, ExtensionOf, FieldElement}; +use rand::SeedableRng; +use rand_chacha::ChaCha20Rng; use winter_prover::{ matrix::ColMatrix, DefaultConstraintEvaluator, DefaultTraceLde, Prover, ProverGkrProof, StarkDomain, Trace, TracePolyTable, @@ -173,7 +175,7 @@ impl LagrangeProver { fn new(aux_trace_width: usize) -> Self { Self { aux_trace_width, - options: ProofOptions::new(1, 2, 0, FieldExtension::None, 2, 1), + options: ProofOptions::new(1, 2, 0, FieldExtension::None, 2, 1, false), } } } @@ -203,11 +205,20 @@ impl Prover for LagrangeProver { main_trace: &ColMatrix, domain: &StarkDomain, partition_option: PartitionOptions, + zk_parameters: Option, ) -> (Self::TraceLde, TracePolyTable) where E: math::FieldElement, { - DefaultTraceLde::new(trace_info, main_trace, domain, partition_option) + let mut prng = ChaCha20Rng::from_entropy(); + DefaultTraceLde::new( + trace_info, + main_trace, + domain, + partition_option, + zk_parameters, + &mut prng, + ) } fn new_evaluator<'a, E>( diff --git a/prover/src/channel.rs b/prover/src/channel.rs index db82f5095..f73b4f3b3 100644 --- a/prover/src/channel.rs +++ b/prover/src/channel.rs @@ -10,11 +10,13 @@ use air::{ proof::{Commitments, Context, OodFrame, Proof, Queries, TraceOodFrame}, Air, ConstraintCompositionCoefficients, DeepCompositionCoefficients, }; -use crypto::{ElementHasher, RandomCoin, VectorCommitment}; +use crypto::{Digest, ElementHasher, Hasher, RandomCoin, VectorCommitment}; use fri::FriProof; use math::{FieldElement, ToElements}; +use rand::RngCore; #[cfg(feature = "concurrent")] use utils::iterators::*; +use utils::Serializable; // TYPES AND INTERFACES // ================================================================================================ @@ -33,6 +35,7 @@ where commitments: Commitments, ood_frame: OodFrame, pow_nonce: u64, + salts: Vec>, _field_element: PhantomData, _vector_commitment: PhantomData, } @@ -51,8 +54,12 @@ where // CONSTRUCTOR // -------------------------------------------------------------------------------------------- /// Creates a new prover channel for the specified `air` and public inputs. - pub fn new(air: &'a A, mut pub_inputs_elements: Vec) -> Self { - let context = Context::new::(air.trace_info().clone(), air.options().clone()); + pub fn new(air: &'a A, mut pub_inputs_elements: Vec, zk_blowup: usize) -> Self { + let context = Context::new::( + air.trace_info().clone(), + air.options().clone(), + zk_blowup, + ); // build a seed for the public coin; the initial seed is a hash of the proof context and // the public inputs, but as the protocol progresses, the coin will be reseeded with the @@ -67,6 +74,7 @@ where commitments: Commitments::default(), ood_frame: OodFrame::default(), pow_nonce: 0, + salts: vec![], _field_element: PhantomData, _vector_commitment: PhantomData, } @@ -76,29 +84,81 @@ where // -------------------------------------------------------------------------------------------- /// Commits the prover the extended execution trace. - pub fn commit_trace(&mut self, trace_root: H::Digest) { + pub fn commit_trace

(&mut self, trace_root: H::Digest, prng: &mut P) + where + P: RngCore, + { self.commitments.add::(&trace_root); - self.public_coin.reseed(trace_root); + + // sample a salt for Fiat-Shamir is zero-knowledge is enabled + let salt = if self.air.is_zk() { + let mut buffer = [0_u8; 32]; + prng.fill_bytes(&mut buffer); + Some(Digest::from_random_bytes(&buffer)) + } else { + None + }; + self.salts.push(salt); + self.public_coin.reseed_with_salt(trace_root, salt); } /// Commits the prover to the evaluations of the constraint composition polynomial. - pub fn commit_constraints(&mut self, constraint_root: H::Digest) { + pub fn commit_constraints

(&mut self, constraint_root: H::Digest, prng: &mut P) + where + P: RngCore, + { self.commitments.add::(&constraint_root); - self.public_coin.reseed(constraint_root); + + // sample a salt for Fiat-Shamir is zero-knowledge is enabled + let salt = if self.air.is_zk() { + let mut buffer = [0_u8; 32]; + prng.fill_bytes(&mut buffer); + Some(Digest::from_random_bytes(&buffer)) + } else { + None + }; + self.salts.push(salt); + self.public_coin.reseed_with_salt(constraint_root, salt); } /// Saves the evaluations of trace polynomials over the out-of-domain evaluation frame. This /// also reseeds the public coin with the hashes of the evaluation frame states. - pub fn send_ood_trace_states(&mut self, trace_ood_frame: &TraceOodFrame) { + pub fn send_ood_trace_states

(&mut self, trace_ood_frame: &TraceOodFrame, prng: &mut P) + where + P: RngCore, + { let trace_states_hash = self.ood_frame.set_trace_states::(trace_ood_frame); - self.public_coin.reseed(trace_states_hash); + + // sample a salt for Fiat-Shamir is zero-knowledge is enabled + let salt = if self.air.is_zk() { + let mut buffer = [0_u8; 32]; + prng.fill_bytes(&mut buffer); + Some(Digest::from_random_bytes(&buffer)) + } else { + None + }; + self.salts.push(salt); + self.public_coin.reseed_with_salt(trace_states_hash, salt); } /// Saves the evaluations of constraint composition polynomial columns at the out-of-domain /// point. This also reseeds the public coin wit the hash of the evaluations. - pub fn send_ood_constraint_evaluations(&mut self, evaluations: &[E]) { + pub fn send_ood_constraint_evaluations

(&mut self, evaluations: &[E], prng: &mut P) + where + P: RngCore, + { self.ood_frame.set_constraint_evaluations(evaluations); - self.public_coin.reseed(H::hash_elements(evaluations)); + + // sample a salt for Fiat-Shamir is zero-knowledge is enabled + let salt = if self.air.is_zk() { + let mut buffer = [0_u8; 32]; + prng.fill_bytes(&mut buffer); + Some(Digest::from_random_bytes(&buffer)) + } else { + None + }; + self.salts.push(salt); + self.public_coin.reseed_with_salt(H::hash_elements(evaluations), salt); } // PUBLIC COIN METHODS @@ -139,7 +199,7 @@ where /// are removed from the returned vector. pub fn get_query_positions(&mut self) -> Vec { let num_queries = self.context.options().num_queries(); - let lde_domain_size = self.context.lde_domain_size(); + let lde_domain_size = self.context.lde_domain_size::(); let mut positions = self .public_coin .draw_integers(num_queries, lde_domain_size, self.pow_nonce) @@ -196,6 +256,7 @@ where pow_nonce: self.pow_nonce, num_unique_queries: num_query_positions as u8, gkr_proof, + salts: self.salts.to_bytes(), } } } @@ -214,9 +275,26 @@ where type Hasher = H; /// Commits the prover to a FRI layer. - fn commit_fri_layer(&mut self, layer_root: H::Digest) { + fn commit_fri_layer

( + &mut self, + layer_root: H::Digest, + prng: &mut P, + ) -> Option<::Digest> + where + P: RngCore, + { self.commitments.add::(&layer_root); - self.public_coin.reseed(layer_root); + + // sample a salt for Fiat-Shamir is zero-knowledge is enabled + let salt = if self.air.is_zk() { + let mut buffer = [0_u8; 32]; + prng.fill_bytes(&mut buffer); + Some(Digest::from_random_bytes(&buffer)) + } else { + None + }; + self.public_coin.reseed_with_salt(layer_root, salt); + salt } /// Returns a new alpha drawn from the public coin. diff --git a/prover/src/composer/mod.rs b/prover/src/composer/mod.rs index 5d463d331..1d394cc63 100644 --- a/prover/src/composer/mod.rs +++ b/prover/src/composer/mod.rs @@ -4,7 +4,7 @@ // LICENSE file in the root directory of this source tree. use alloc::vec::Vec; -use air::{proof::TraceOodFrame, DeepCompositionCoefficients}; +use air::{proof::TraceOodFrame, Air, DeepCompositionCoefficients}; use math::{ add_in_place, fft, mul_acc, polynom::{self, syn_div_roots_in_place}, @@ -22,6 +22,8 @@ pub struct DeepCompositionPoly { coefficients: Vec, cc: DeepCompositionCoefficients, z: E, + g: E, + is_zk: bool, } impl DeepCompositionPoly { @@ -30,17 +32,27 @@ impl DeepCompositionPoly { /// Returns a new DEEP composition polynomial. Initially, this polynomial will be empty, and /// the intent is to populate the coefficients via add_trace_polys() and add_constraint_polys() /// methods. - pub fn new(z: E, cc: DeepCompositionCoefficients) -> Self { - DeepCompositionPoly { coefficients: vec![], cc, z } + pub fn new>( + air: &A, + z: E, + cc: DeepCompositionCoefficients, + ) -> Self { + DeepCompositionPoly { + coefficients: vec![], + cc, + z, + g: E::from(air.trace_domain_generator()), + is_zk: air.is_zk(), + } } // ACCESSORS // -------------------------------------------------------------------------------------------- - /// Returns the size of the DEEP composition polynomial. - pub fn poly_size(&self) -> usize { - self.coefficients.len() - } + ///// Returns the size of the DEEP composition polynomial. + //pub fn poly_size(&self) -> usize { + //self.coefficients.len() + //} /// Returns the degree of the composition polynomial. pub fn degree(&self) -> usize { @@ -82,8 +94,7 @@ impl DeepCompositionPoly { // compute a second out-of-domain point offset from z by exactly trace generator; this // point defines the "next" computation state in relation to point z let trace_length = trace_polys.poly_size(); - let g = E::from(E::BaseField::get_root_of_unity(trace_length.ilog2())); - let next_z = self.z * g; + let next_z = self.z * self.g; // combine trace polynomials into 2 composition polynomials T'(x) and T''(x) let mut t1_composition = vec![E::ZERO; trace_length]; @@ -185,7 +196,6 @@ impl DeepCompositionPoly { // set the coefficients of the DEEP composition polynomial self.coefficients = trace_poly; - assert_eq!(self.poly_size() - 2, self.degree()); } // CONSTRAINT POLYNOMIAL COMPOSITION @@ -194,7 +204,7 @@ impl DeepCompositionPoly { /// into the DEEP composition polynomial. This method is intended to be called only after the /// add_trace_polys() method has been executed. The composition is done as follows: /// - /// - For each H_i(x), compute H'_i(x) = (H_i(x) - H(z)) / (x - z), where H_i(x) is the + /// - For each H_i(x), compute H'_i(x) = (H_i(x) - H(z)) / (x - z^m), where H_i(x) is the /// ith composition polynomial column. /// - Then, combine all H_i(x) polynomials together by computing H(x) = sum(H_i(x) * cc_i) for /// all i, where cc_i is the coefficient for the random linear combination drawn from the @@ -208,22 +218,32 @@ impl DeepCompositionPoly { ) { assert!(!self.coefficients.is_empty()); - let z = self.z; - let mut column_polys = composition_poly.into_columns(); + let num_cols = ood_evaluations.len(); + let z = self.z; // Divide out the OOD point z from column polynomials - iter_mut!(column_polys).zip(ood_evaluations).for_each(|(poly, value_at_z)| { - // compute H'_i(x) = (H_i(x) - H_i(z)) / (x - z) - poly[0] -= value_at_z; - polynom::syn_div_in_place(poly, 1, z); - }); + iter_mut!(column_polys).take(num_cols).zip(ood_evaluations).for_each( + |(poly, value_at_z)| { + // compute H'_i(x) = (H_i(x) - H_i(z)) / (x - z) + poly[0] -= value_at_z; + polynom::syn_div_in_place(poly, 1, z); + }, + ); // add H'_i(x) * cc_i for all i into the DEEP composition polynomial - for (i, poly) in column_polys.into_iter().enumerate() { - mul_acc::(&mut self.coefficients, &poly, self.cc.constraints[i]); + for (i, poly) in column_polys.iter().enumerate().take(num_cols) { + mul_acc::(&mut self.coefficients, poly, self.cc.constraints[i]); + } + + // add the randomizer codeword for FRI + if self.is_zk { + iter_mut!(self.coefficients) + .zip(&column_polys[column_polys.len() - 1]) + .for_each(|(a, b)| *a += *b); } - assert_eq!(self.poly_size() - 2, self.degree()); + + assert_eq!(self.coefficients.len() - 2, self.degree()); } // LOW-DEGREE EXTENSION diff --git a/prover/src/constraints/commitment.rs b/prover/src/constraints/commitment.rs index ac71fdc94..d79e02d46 100644 --- a/prover/src/constraints/commitment.rs +++ b/prover/src/constraints/commitment.rs @@ -42,7 +42,7 @@ where pub fn new(evaluations: RowMatrix, commitment: V) -> ConstraintCommitment { assert_eq!( evaluations.num_rows(), - commitment.domain_len(), + commitment.get_domain_len(), "number of rows in constraint evaluation matrix must be the same as the size \ of the vector commitment domain" ); diff --git a/prover/src/constraints/composition_poly.rs b/prover/src/constraints/composition_poly.rs index bad52f7f5..a418a04c9 100644 --- a/prover/src/constraints/composition_poly.rs +++ b/prover/src/constraints/composition_poly.rs @@ -5,7 +5,9 @@ use alloc::vec::Vec; -use math::{fft, polynom::degree_of, FieldElement}; +use air::ZkParameters; +use math::{fft, polynom, FieldElement}; +use rand::{Rng, RngCore}; use super::{ColMatrix, StarkDomain}; @@ -47,16 +49,23 @@ impl CompositionPolyTrace { /// /// For example, if the composition polynomial has degree 2N - 1, where N is the trace length, /// it will be stored as two columns of size N (each of degree N - 1). +/// +/// When zero-knowledge is enabled, the composition polynomial is split into segment polynomials +/// such that each segment polynomial's degree is small enough to accommodate adding a randomizer +/// polynomial without the degree of the resulting ranomized segment polynomial exceeding +/// `domain.trace_length()`. pub struct CompositionPoly { data: ColMatrix, } impl CompositionPoly { /// Returns a new composition polynomial. - pub fn new( + pub fn new( composition_trace: CompositionPolyTrace, domain: &StarkDomain, num_cols: usize, + zk_parameters: Option, + prng: &mut R, ) -> Self { assert!( domain.trace_length() < composition_trace.num_rows(), @@ -70,7 +79,30 @@ impl CompositionPoly { let inv_twiddles = fft::get_inv_twiddles::(trace.len()); fft::interpolate_poly_with_offset(&mut trace, &inv_twiddles, domain.offset()); - let polys = segment(trace, domain.trace_length(), num_cols); + // compute the segment quotient polynomials + let quotient_degree = polynom::degree_of(&trace); + let degree_chunked_quotient = if zk_parameters.is_some() { + (quotient_degree + 1).div_ceil(num_cols) + } else { + domain.trace_length() + }; + let polys = segment(trace, degree_chunked_quotient, num_cols); + let mut polys = complement_to(polys, domain.trace_length(), prng); + + // generate a randomizer polynomial for FRI + if zk_parameters.is_some() { + let extended_len = polys[0].len(); + let mut zk_col = vec![E::ZERO; extended_len]; + + for a in zk_col.iter_mut() { + let bytes = prng.gen::<[u8; 32]>(); + *a = E::from_random_bytes(&bytes[..E::VALUE_SIZE]) + .expect("failed to generate randomness"); + } + // reduce the degree to match that of the DEEP composition polynomial + zk_col[extended_len - 1] = E::ZERO; + polys.push(zk_col) + } CompositionPoly { data: ColMatrix::new(polys) } } @@ -96,8 +128,8 @@ impl CompositionPoly { } /// Returns evaluations of all composition polynomial columns at point z. - pub fn evaluate_at(&self, z: E) -> Vec { - self.data.evaluate_columns_at(z) + pub fn evaluate_at(&self, z: E, is_zk: bool) -> Vec { + self.data.evaluate_columns_at(z, is_zk) } /// Returns a reference to the matrix of individual column polynomials. @@ -111,6 +143,55 @@ impl CompositionPoly { } } +/// Takes a vector of coefficients representing the segment polynomials of a given composition +/// polynomial as input, and generates coefficients of their randomized version. +/// +/// The randomization technique is the one in section 4.1 in https://eprint.iacr.org/2024/1037.pdf. +fn complement_to( + polys: Vec>, + l: usize, + prng: &mut R, +) -> Vec> { + let mut result = vec![]; + + let randomizer_poly_size = l - polys[0].len(); + let mut current_poly = vec![E::ZERO; randomizer_poly_size]; + let mut previous_poly = vec![E::ZERO; randomizer_poly_size]; + + for (_, poly) in polys.iter().enumerate().take_while(|(index, _)| *index != polys.len() - 1) { + let diff = l - poly.len(); + + for eval in current_poly.iter_mut().take(diff) { + let bytes = prng.gen::<[u8; 32]>(); + *eval = E::from_random_bytes(&bytes[..E::VALUE_SIZE]) + .expect("failed to generate randomness"); + } + + let mut res = vec![]; + res.extend_from_slice(poly); + res.extend_from_slice(¤t_poly); + + for i in 0..randomizer_poly_size { + res[i] -= previous_poly[i]; + } + + previous_poly.copy_from_slice(¤t_poly[..randomizer_poly_size]); + + result.push(res) + } + + let poly = polys.last().unwrap(); + let mut res = vec![E::ZERO; l]; + for (i, entry) in poly.iter().enumerate() { + res[i] = *entry; + } + for i in 0..randomizer_poly_size { + res[i] -= previous_poly[i]; + } + result.push(res); + result +} + // HELPER FUNCTIONS // ================================================================================================ @@ -123,8 +204,6 @@ fn segment( trace_len: usize, num_cols: usize, ) -> Vec> { - debug_assert!(degree_of(&coefficients) < trace_len * num_cols); - coefficients .chunks(trace_len) .take(num_cols) diff --git a/prover/src/constraints/evaluation_table.rs b/prover/src/constraints/evaluation_table.rs index 9add913f4..554136a56 100644 --- a/prover/src/constraints/evaluation_table.rs +++ b/prover/src/constraints/evaluation_table.rs @@ -73,7 +73,7 @@ impl<'a, E: FieldElement> ConstraintEvaluationTable<'a, E> { // collect expected degrees for all transition constraints to compare them against actual // degrees; we do this in debug mode only because this comparison is expensive let expected_transition_degrees = - build_transition_constraint_degrees(transition_constraints, domain.trace_length()); + build_transition_constraint_degrees(transition_constraints, domain); ConstraintEvaluationTable { evaluations: uninit_matrix(num_columns, num_rows), @@ -420,16 +420,35 @@ fn get_inv_evaluation( #[cfg(debug_assertions)] fn build_transition_constraint_degrees( constraints: &TransitionConstraints, - trace_length: usize, + domain: &StarkDomain, ) -> Vec { + use crate::domain::ZkInfo; + let mut result = Vec::new(); + let (trace_length, trace_len_ext) = if let Some(zk_info) = domain.zk_info() { + let ZkInfo { + original_trace_length, + degree_witness_randomizer, + }: ZkInfo = zk_info; + + let ext_len = (original_trace_length + degree_witness_randomizer).next_power_of_two(); + (original_trace_length, ext_len) + } else { + (domain.trace_length(), domain.trace_length()) + }; for degree in constraints.main_constraint_degrees() { - result.push(degree.get_evaluation_degree(trace_length) - constraints.divisor().degree()) + result.push( + degree.get_evaluation_degree(trace_length, trace_len_ext) + - constraints.divisor().degree(), + ) } for degree in constraints.aux_constraint_degrees() { - result.push(degree.get_evaluation_degree(trace_length) - constraints.divisor().degree()) + result.push( + degree.get_evaluation_degree(trace_length, trace_len_ext) + - constraints.divisor().degree(), + ) } result diff --git a/prover/src/constraints/evaluator/periodic_table.rs b/prover/src/constraints/evaluator/periodic_table.rs index ec72aa766..4601460e3 100644 --- a/prover/src/constraints/evaluator/periodic_table.rs +++ b/prover/src/constraints/evaluator/periodic_table.rs @@ -37,23 +37,29 @@ impl PeriodicValueTable { // them for polynomials of the same size let mut twiddle_map = BTreeMap::new(); + // zero-knowledge blowup factor + let factor = air.context().trace_length_ext() / air.trace_length(); let evaluations = polys .iter() .map(|poly| { let poly_size = poly.len(); let num_cycles = (air.trace_length() / poly_size) as u64; let offset = air.domain_offset().exp(num_cycles.into()); - let twiddles = - twiddle_map.entry(poly_size).or_insert_with(|| fft::get_twiddles(poly_size)); - fft::evaluate_poly_with_offset(poly, twiddles, offset, air.ce_blowup_factor()) + let mut new_poly = vec![B::ZERO; factor * poly_size]; + new_poly[..poly_size].copy_from_slice(&poly[..poly_size]); + let twiddles = twiddle_map + .entry(new_poly.len()) + .or_insert_with(|| fft::get_twiddles(new_poly.len())); + + fft::evaluate_poly_with_offset(&new_poly, twiddles, offset, air.ce_blowup_factor()) }) .collect::>(); // allocate memory to hold all expanded values and copy polynomial evaluations into the // table in such a way that values for the same row are adjacent to each other. let row_width = polys.len(); - let column_length = max_poly_size * air.ce_blowup_factor(); + let column_length = factor * max_poly_size * air.ce_blowup_factor(); let mut values = unsafe { uninit_vector(row_width * column_length) }; for i in 0..column_length { for (j, column) in evaluations.iter().enumerate() { diff --git a/prover/src/domain.rs b/prover/src/domain.rs index 87a54bbe5..525733a1b 100644 --- a/prover/src/domain.rs +++ b/prover/src/domain.rs @@ -30,6 +30,10 @@ pub struct StarkDomain { /// Offset of the low-degree extension domain. domain_offset: B, + + /// Extra information needed for constraint evaluation validation when zero-knowledge is enabled. + #[cfg(debug_assertions)] + zk_info: Option, } // STARK DOMAIN IMPLEMENTATION @@ -38,18 +42,30 @@ pub struct StarkDomain { impl StarkDomain { /// Returns a new STARK domain initialized with the provided `context`. pub fn new>(air: &A) -> Self { - let trace_twiddles = fft::get_twiddles(air.trace_length()); + let trace_twiddles = fft::get_twiddles(air.context().trace_length_ext()); // build constraint evaluation domain let domain_gen = B::get_root_of_unity(air.ce_domain_size().ilog2()); let ce_domain = get_power_series(domain_gen, air.ce_domain_size()); + #[cfg(debug_assertions)] + let zk_info = if air.is_zk() { + Some(ZkInfo { + original_trace_length: air.trace_length(), + degree_witness_randomizer: air.context().zk_witness_randomizer_degree(), + }) + } else { + None + }; + StarkDomain { trace_twiddles, ce_domain, ce_to_lde_blowup: air.lde_domain_size() / air.ce_domain_size(), ce_domain_mod_mask: air.ce_domain_size() - 1, domain_offset: air.domain_offset(), + #[cfg(debug_assertions)] + zk_info, } } @@ -72,6 +88,8 @@ impl StarkDomain { ce_to_lde_blowup: 1, ce_domain_mod_mask: ce_domain_size - 1, domain_offset, + #[cfg(debug_assertions)] + zk_info: None, } } @@ -152,4 +170,16 @@ impl StarkDomain { pub fn offset(&self) -> B { self.domain_offset } + + #[cfg(debug_assertions)] + pub(crate) fn zk_info(&self) -> Option { + self.zk_info + } +} + +#[cfg(debug_assertions)] +#[derive(Clone, Copy, Debug)] +pub struct ZkInfo { + pub(crate) original_trace_length: usize, + pub(crate) degree_witness_randomizer: usize, } diff --git a/prover/src/lib.rs b/prover/src/lib.rs index 035d6c655..254364094 100644 --- a/prover/src/lib.rs +++ b/prover/src/lib.rs @@ -48,7 +48,7 @@ pub use air::{ EvaluationFrame, FieldExtension, LagrangeKernelRandElements, ProofOptions, TraceInfo, TransitionConstraintDegree, }; -use air::{AuxRandElements, GkrRandElements, PartitionOptions}; +use air::{AuxRandElements, GkrRandElements, PartitionOptions, ZkParameters}; pub use crypto; use crypto::{ElementHasher, RandomCoin, VectorCommitment}; use fri::FriProver; @@ -58,6 +58,8 @@ use math::{ fields::{CubeExtension, QuadExtension}, ExtensibleField, FieldElement, StarkField, ToElements, }; +use rand::{RngCore, SeedableRng}; +use rand_chacha::ChaCha20Rng; use tracing::{event, info_span, instrument, Level}; pub use utils::{ iterators, ByteReader, ByteWriter, Deserializable, DeserializationError, Serializable, @@ -183,6 +185,7 @@ pub trait Prover { main_trace: &ColMatrix, domain: &StarkDomain, partition_option: PartitionOptions, + zk_parameters: Option, ) -> (Self::TraceLde, TracePolyTable) where E: FieldElement; @@ -296,21 +299,29 @@ pub trait Prover { ProverChannel::::new( &air, pub_inputs_elements, + air.context().zk_blowup_factor(), ); + let mut prng = ChaCha20Rng::from_entropy(); + let zk_parameters = air.context().zk_parameters(); // 1 ----- Commit to the execution trace -------------------------------------------------- // build computation domain; this is used later for polynomial evaluations let lde_domain_size = air.lde_domain_size(); - let trace_length = air.trace_length(); + let trace_length = air.context().trace_length_ext(); let domain = info_span!("build_domain", trace_length, lde_domain_size) .in_scope(|| StarkDomain::new(&air)); assert_eq!(domain.lde_domain_size(), lde_domain_size); assert_eq!(domain.trace_length(), trace_length); // commit to the main trace segment - let (mut trace_lde, mut trace_polys) = - maybe_await!(self.commit_to_main_trace_segment(&trace, &domain, &mut channel)); + let (mut trace_lde, mut trace_polys) = maybe_await!(self.commit_to_main_trace_segment( + &trace, + &domain, + zk_parameters, + &mut prng, + &mut channel + )); // build the auxiliary trace segment, and append the resulting segments to trace commitment // and trace polynomial table structs @@ -342,11 +353,11 @@ pub trait Prover { // extend the auxiliary trace segment and commit to the extended trace let span = info_span!("commit_to_aux_trace_segment").entered(); let (aux_segment_polys, aux_segment_commitment) = - trace_lde.set_aux_trace(&aux_trace, &domain); + trace_lde.set_aux_trace(&aux_trace, &domain, zk_parameters, &mut prng); // commit to the LDE of the extended auxiliary trace segment by writing its // commitment into the channel - channel.commit_trace(aux_segment_commitment); + channel.commit_trace(aux_segment_commitment, &mut prng); drop(span); aux_segment_polys @@ -391,7 +402,14 @@ pub trait Prover { // 3 ----- commit to constraint evaluations ----------------------------------------------- let (constraint_commitment, composition_poly) = maybe_await!(self - .commit_to_constraint_evaluations(&air, composition_poly_trace, &domain, &mut channel)); + .commit_to_constraint_evaluations( + &air, + composition_poly_trace, + &domain, + &mut channel, + zk_parameters, + &mut prng + )); // 4 ----- build DEEP composition polynomial ---------------------------------------------- let deep_composition_poly = { @@ -410,16 +428,17 @@ pub trait Prover { // g, where g is the generator of the trace domain. Additionally, if the Lagrange kernel // auxiliary column is present, we also evaluate that column over the points: z, z * g, // z * g^2, z * g^4, ..., z * g^(2^(v-1)), where v = log(trace_len). - let ood_trace_states = trace_polys.get_ood_frame(z); - channel.send_ood_trace_states(&ood_trace_states); + let ood_trace_states = + trace_polys.get_ood_frame(z, air.context().trace_info().length()); + channel.send_ood_trace_states(&ood_trace_states, &mut prng); - let ood_evaluations = composition_poly.evaluate_at(z); - channel.send_ood_constraint_evaluations(&ood_evaluations); + let ood_evaluations = composition_poly.evaluate_at(z, air.is_zk()); + channel.send_ood_constraint_evaluations(&ood_evaluations, &mut prng); // draw random coefficients to use during DEEP polynomial composition, and use them to // initialize the DEEP composition polynomial let deep_coefficients = channel.get_deep_composition_coeffs(); - let mut deep_composition_poly = DeepCompositionPoly::new(z, deep_coefficients); + let mut deep_composition_poly = DeepCompositionPoly::new(&air, z, deep_coefficients); // combine all trace polynomials together and merge them into the DEEP composition // polynomial @@ -437,7 +456,7 @@ pub trait Prover { // make sure the degree of the DEEP composition polynomial is equal to trace polynomial // degree minus 1. - assert_eq!(trace_length - 2, deep_composition_poly.degree()); + assert_eq!(air.context().trace_length_ext() - 2, deep_composition_poly.degree()); // 5 ----- evaluate DEEP composition polynomial over LDE domain --------------------------- let deep_evaluations = { @@ -445,7 +464,10 @@ pub trait Prover { let deep_evaluations = deep_composition_poly.evaluate(&domain); // we check the following condition in debug mode only because infer_degree is an // expensive operation - debug_assert_eq!(trace_length - 2, infer_degree(&deep_evaluations, domain.offset())); + debug_assert_eq!( + air.context().trace_length_ext() - 2, + infer_degree(&deep_evaluations, domain.offset()) + ); drop(span); deep_evaluations @@ -456,7 +478,7 @@ pub trait Prover { let num_layers = fri_options.num_fri_layers(lde_domain_size); let mut fri_prover = FriProver::<_, _, _, Self::VC>::new(fri_options); info_span!("compute_fri_layers", num_layers) - .in_scope(|| fri_prover.build_layers(&mut channel, deep_evaluations)); + .in_scope(|| fri_prover.build_layers(&mut channel, deep_evaluations, &mut prng)); // 7 ----- determine query positions ------------------------------------------------------ let query_positions = { @@ -518,14 +540,17 @@ pub trait Prover { /// The commitment is computed by building a vector containing the hashes of each row in /// the evaluation matrix, and then building vector commitment of the resulting vector. #[maybe_async] - fn build_constraint_commitment( + fn build_constraint_commitment( &self, composition_poly_trace: CompositionPolyTrace, num_constraint_composition_columns: usize, domain: &StarkDomain, + zk_parameters: Option, + prng: &mut R, ) -> (ConstraintCommitment, CompositionPoly) where E: FieldElement, + R: RngCore, { // first, build constraint composition polynomial from its trace as follows: // - interpolate the trace into a polynomial in coefficient form @@ -536,9 +561,18 @@ pub trait Prover { num_columns = num_constraint_composition_columns ) .in_scope(|| { - CompositionPoly::new(composition_poly_trace, domain, num_constraint_composition_columns) + CompositionPoly::new( + composition_poly_trace, + domain, + num_constraint_composition_columns, + zk_parameters, + prng, + ) }); - assert_eq!(composition_poly.num_columns(), num_constraint_composition_columns); + assert_eq!( + composition_poly.num_columns(), + num_constraint_composition_columns + zk_parameters.is_some() as usize + ); assert_eq!(composition_poly.column_degree(), domain.trace_length() - 1); // then, evaluate composition polynomial columns over the LDE domain @@ -546,7 +580,10 @@ pub trait Prover { let composed_evaluations = info_span!("evaluate_composition_poly_columns").in_scope(|| { RowMatrix::evaluate_polys_over::(composition_poly.data(), domain) }); - assert_eq!(composed_evaluations.num_cols(), num_constraint_composition_columns); + assert_eq!( + composed_evaluations.num_cols(), + num_constraint_composition_columns + zk_parameters.is_some() as usize + ); assert_eq!(composed_evaluations.num_rows(), domain_size); // finally, build constraint evaluation commitment @@ -569,14 +606,17 @@ pub trait Prover { #[doc(hidden)] #[instrument(skip_all)] #[maybe_async] - fn commit_to_main_trace_segment( + fn commit_to_main_trace_segment( &self, trace: &Self::Trace, domain: &StarkDomain, + zk_parameters: Option, + prng: &mut R, channel: &mut ProverChannel<'_, Self::Air, E, Self::HashFn, Self::RandomCoin, Self::VC>, ) -> (Self::TraceLde, TracePolyTable) where E: FieldElement, + R: RngCore, { // extend the main execution trace and commit to the extended trace let (trace_lde, trace_polys) = maybe_await!(self.new_trace_lde( @@ -584,6 +624,7 @@ pub trait Prover { trace.main_segment(), domain, self.options().partition_options(), + zk_parameters )); // get the commitment to the main trace segment LDE @@ -591,7 +632,7 @@ pub trait Prover { // commit to the LDE of the main trace by writing the the commitment string into // the channel - channel.commit_trace(main_trace_commitment); + channel.commit_trace(main_trace_commitment, prng); (trace_lde, trace_polys) } @@ -599,28 +640,33 @@ pub trait Prover { #[doc(hidden)] #[instrument(skip_all)] #[maybe_async] - fn commit_to_constraint_evaluations( + fn commit_to_constraint_evaluations( &self, air: &Self::Air, composition_poly_trace: CompositionPolyTrace, domain: &StarkDomain, channel: &mut ProverChannel<'_, Self::Air, E, Self::HashFn, Self::RandomCoin, Self::VC>, + zk_parameters: Option, + prng: &mut R, ) -> (ConstraintCommitment, CompositionPoly) where E: FieldElement, + R: RngCore, { // first, build a commitment to the evaluations of the constraint composition polynomial // columns let (constraint_commitment, composition_poly) = maybe_await!(self - .build_constraint_commitment::( + .build_constraint_commitment::( composition_poly_trace, air.context().num_constraint_composition_columns(), domain, + zk_parameters, + prng )); // then, commit to the evaluations of constraints by writing the commitment string of // the constraint commitment into the channel - channel.commit_constraints(constraint_commitment.commitment()); + channel.commit_constraints(constraint_commitment.commitment(), prng); (constraint_commitment, composition_poly) } diff --git a/prover/src/matrix/col_matrix.rs b/prover/src/matrix/col_matrix.rs index 8872cca71..62a953878 100644 --- a/prover/src/matrix/col_matrix.rs +++ b/prover/src/matrix/col_matrix.rs @@ -8,6 +8,7 @@ use core::{iter::FusedIterator, slice}; use crypto::{ElementHasher, VectorCommitment}; use math::{fft, polynom, FieldElement}; +use rand::{Rng, RngCore}; #[cfg(feature = "concurrent")] use utils::iterators::*; use utils::{batch_iter_mut, iter, iter_mut, uninit_vector}; @@ -242,11 +243,13 @@ impl ColMatrix { } /// Evaluates polynomials contained in the columns of this matrix at a single point `x`. - pub fn evaluate_columns_at(&self, x: F) -> Vec + pub fn evaluate_columns_at(&self, x: F, skip_last: bool) -> Vec where F: FieldElement + From, { - iter!(self.columns).map(|p| polynom::eval(p, x)).collect() + iter!(&self.columns[..self.columns.len() - skip_last as usize]) + .map(|p| polynom::eval(p, x)) + .collect() } // COMMITMENTS @@ -294,6 +297,49 @@ impl ColMatrix { pub fn into_columns(self) -> Vec> { self.columns } + + /// Randomizes the trace polynomials when zero-knowledge is enabled. + /// + /// Takes as input a factor that is a power of two which is used to determine the size (i.e., + /// the number of coefficients) of the randomized witness polynomial. + /// + /// The randomized witness polynomial has the form: + /// + /// ```text + /// \hat{w}(x) = w(x) + r(x) * Z_H(x) + /// ``` + /// where: + /// + /// 1. w(x) is the witness polynomial of degree trace length minus one. + /// 2. \hat{w}(x) is the randomized witness polynomial. + /// 3. r(x) is the randomizer polynomial and has degree `(zk_blowup - 1) * n`. + /// 4. Z_H(x) = (x^n - 1). + pub(crate) fn randomize(&self, zk_blowup: usize, prng: &mut R) -> Self { + let cur_len = self.num_rows(); + let extended_len = zk_blowup * cur_len; + let pad_len = extended_len - cur_len; + + let randomized_cols: Vec> = self + .columns() + .map(|col| { + let mut added = vec![E::ZERO; pad_len]; + for a in added.iter_mut() { + let bytes = prng.gen::<[u8; 32]>(); + *a = E::from_random_bytes(&bytes[..E::VALUE_SIZE]) + .expect("failed to generate randomness"); + } + + let mut res_col = col.to_vec(); + res_col.extend_from_slice(&added); + for i in 0..pad_len { + res_col[i] -= added[i] + } + res_col + }) + .collect(); + + Self { columns: randomized_cols } + } } // COLUMN ITERATOR diff --git a/prover/src/tests/mod.rs b/prover/src/tests/mod.rs index 6b44fa0e9..a4230e3d1 100644 --- a/prover/src/tests/mod.rs +++ b/prover/src/tests/mod.rs @@ -44,7 +44,7 @@ impl MockAir { Self::new( TraceInfo::new(4, trace_length), (), - ProofOptions::new(32, 8, 0, FieldExtension::None, 4, 31), + ProofOptions::new(32, 8, 0, FieldExtension::None, 4, 31, false), ) } @@ -55,7 +55,7 @@ impl MockAir { let mut result = Self::new( TraceInfo::new(4, trace_length), (), - ProofOptions::new(32, 8, 0, FieldExtension::None, 4, 31), + ProofOptions::new(32, 8, 0, FieldExtension::None, 4, 31, false), ); result.periodic_columns = column_values; result @@ -65,7 +65,7 @@ impl MockAir { let mut result = Self::new( TraceInfo::new(4, trace_length), (), - ProofOptions::new(32, 8, 0, FieldExtension::None, 4, 31), + ProofOptions::new(32, 8, 0, FieldExtension::None, 4, 31, false), ); result.assertions = assertions; result @@ -116,7 +116,7 @@ fn build_context( blowup_factor: usize, num_assertions: usize, ) -> AirContext { - let options = ProofOptions::new(32, blowup_factor, 0, FieldExtension::None, 4, 31); + let options = ProofOptions::new(32, blowup_factor, 0, FieldExtension::None, 4, 31, false); let t_degrees = vec![TransitionConstraintDegree::new(2)]; AirContext::new(trace_info, t_degrees, num_assertions, options) } diff --git a/prover/src/trace/poly_table.rs b/prover/src/trace/poly_table.rs index 87fec88d4..bca5c9bad 100644 --- a/prover/src/trace/poly_table.rs +++ b/prover/src/trace/poly_table.rs @@ -69,10 +69,10 @@ impl TracePolyTable { } /// Evaluates all trace polynomials (across all trace segments) at the specified point `x`. - pub fn evaluate_at(&self, x: E) -> Vec { - let mut result = self.main_trace_polys.evaluate_columns_at(x); + pub fn evaluate_at(&self, x: E, skip_last: bool) -> Vec { + let mut result = self.main_trace_polys.evaluate_columns_at(x, skip_last); for aux_polys in self.aux_trace_polys.iter() { - result.append(&mut aux_polys.evaluate_columns_at(x)); + result.append(&mut aux_polys.evaluate_columns_at(x, false)); } result } @@ -82,11 +82,11 @@ impl TracePolyTable { /// Additionally, if the Lagrange kernel auxiliary column is present, we also evaluate that /// column over the points: z, z * g, z * g^2, z * g^4, ..., z * g^(2^(v-1)), where v = /// log(trace_len). - pub fn get_ood_frame(&self, z: E) -> TraceOodFrame { - let log_trace_len = self.poly_size().ilog2(); + pub fn get_ood_frame(&self, z: E, trace_len: usize) -> TraceOodFrame { + let log_trace_len = trace_len.ilog2(); let g = E::from(E::BaseField::get_root_of_unity(log_trace_len)); - let current_row = self.evaluate_at(z); - let next_row = self.evaluate_at(z * g); + let current_row = self.evaluate_at(z, false); + let next_row = self.evaluate_at(z * g, false); let lagrange_kernel_frame = self.lagrange_kernel_poly.as_ref().map(|lagrange_kernel_col_poly| { diff --git a/prover/src/trace/trace_lde/default/mod.rs b/prover/src/trace/trace_lde/default/mod.rs index 26b5e3916..2f596aed7 100644 --- a/prover/src/trace/trace_lde/default/mod.rs +++ b/prover/src/trace/trace_lde/default/mod.rs @@ -6,8 +6,11 @@ use alloc::vec::Vec; use core::marker::PhantomData; -use air::{proof::Queries, LagrangeKernelEvaluationFrame, PartitionOptions, TraceInfo}; +use air::{ + proof::Queries, LagrangeKernelEvaluationFrame, PartitionOptions, TraceInfo, ZkParameters, +}; use crypto::VectorCommitment; +use rand::RngCore; use tracing::info_span; use super::{ @@ -60,18 +63,22 @@ where /// /// Returns a tuple containing a [TracePolyTable] with the trace polynomials for the main trace /// segment and the new [DefaultTraceLde]. - pub fn new( + pub fn new( trace_info: &TraceInfo, main_trace: &ColMatrix, domain: &StarkDomain, partition_option: PartitionOptions, + zk_parameters: Option, + prng: &mut R, ) -> (Self, TracePolyTable) { // extend the main execution trace and build a commitment to the extended trace let (main_segment_lde, main_segment_vector_com, main_segment_polys) = - build_trace_commitment::( + build_trace_commitment::( main_trace, domain, partition_option.partition_size::(main_trace.num_cols()), + zk_parameters, + prng, ); let trace_poly_table = TracePolyTable::new(main_segment_polys); @@ -80,9 +87,9 @@ where main_segment_oracles: main_segment_vector_com, aux_segment_lde: None, aux_segment_oracles: None, - blowup: domain.trace_to_lde_blowup(), trace_info: trace_info.clone(), partition_option, + blowup: domain.lde_domain_size() / trace_info.length(), _h: PhantomData, }; @@ -141,17 +148,21 @@ where /// This function will panic if any of the following are true: /// - the number of rows in the provided `aux_trace` does not match the main trace. /// - the auxiliary trace has been previously set already. - fn set_aux_trace( + fn set_aux_trace( &mut self, aux_trace: &ColMatrix, domain: &StarkDomain, + zk_parameters: Option, + prng: &mut R, ) -> (ColMatrix, H::Digest) { // extend the auxiliary trace segment and build a commitment to the extended trace let (aux_segment_lde, aux_segment_oracles, aux_segment_polys) = - build_trace_commitment::( + build_trace_commitment::( aux_trace, domain, self.partition_option.partition_size::(aux_trace.num_cols()), + zk_parameters, + prng, ); // check errors @@ -181,10 +192,9 @@ where ) { // at the end of the trace, next state wraps around and we read the first step again let next_lde_step = (lde_step + self.blowup()) % self.trace_len(); - - // copy main trace segment values into the frame - frame.current_mut().copy_from_slice(self.main_segment_lde.row(lde_step)); - frame.next_mut().copy_from_slice(self.main_segment_lde.row(next_lde_step)); + let l = frame.current().len(); + frame.current_mut().copy_from_slice(&self.main_segment_lde.row(lde_step)[..l]); + frame.next_mut().copy_from_slice(&self.main_segment_lde.row(next_lde_step)[..l]); } /// Reads current and next rows from the auxiliary trace segment into the specified frame. @@ -260,7 +270,6 @@ where &self.trace_info } } - // HELPER FUNCTIONS // ================================================================================================ @@ -273,16 +282,19 @@ where /// /// The trace commitment is computed by building a vector containing the hashes of each row of /// the extended execution trace, then building a vector commitment to the resulting vector. -fn build_trace_commitment( +fn build_trace_commitment( trace: &ColMatrix, domain: &StarkDomain, partition_size: usize, + zk_parameters: Option, + prng: &mut R, ) -> (RowMatrix, V, ColMatrix) where E: FieldElement, F: FieldElement, H: ElementHasher, V: VectorCommitment, + R: RngCore, { // extend the execution trace let (trace_lde, trace_polys) = { @@ -292,22 +304,31 @@ where blowup = domain.trace_to_lde_blowup() ) .entered(); + let trace_polys = trace.interpolate_columns(); + + // when zero-knowledge is enabled, we randomize the witness polynomials by adding a random + // polynomial times the zerofier over the trace domain. The degree of the random polynomial + // is a function of the number of FRI queries. + let trace_polys = if let Some(parameters) = zk_parameters { + trace_polys.randomize(parameters.zk_blowup_witness(), prng) + } else { + trace_polys + }; + let trace_lde = RowMatrix::evaluate_polys_over::(&trace_polys, domain); drop(span); (trace_lde, trace_polys) }; - assert_eq!(trace_lde.num_cols(), trace.num_cols()); - assert_eq!(trace_polys.num_rows(), trace.num_rows()); assert_eq!(trace_lde.num_rows(), domain.lde_domain_size()); // build trace commitment let commitment_domain_size = trace_lde.num_rows(); let trace_vector_com = info_span!("compute_execution_trace_commitment", commitment_domain_size) .in_scope(|| trace_lde.commit_to_rows::(partition_size)); - assert_eq!(trace_vector_com.domain_len(), commitment_domain_size); + assert_eq!(trace_vector_com.get_domain_len(), commitment_domain_size); (trace_lde, trace_vector_com, trace_polys) } diff --git a/prover/src/trace/trace_lde/default/tests.rs b/prover/src/trace/trace_lde/default/tests.rs index 734accf68..f716fa0ab 100644 --- a/prover/src/trace/trace_lde/default/tests.rs +++ b/prover/src/trace/trace_lde/default/tests.rs @@ -11,6 +11,8 @@ use math::{ fields::f128::BaseElement, get_power_series, get_power_series_with_offset, polynom, FieldElement, StarkField, }; +use rand::SeedableRng; +use rand_chacha::ChaCha20Rng; use crate::{ tests::{build_fib_trace, MockAir}, @@ -27,6 +29,7 @@ fn extend_trace_table() { let trace = build_fib_trace(trace_length * 2); let domain = StarkDomain::new(&air); let partition_option = PartitionOptions::default(); + let mut prng = ChaCha20Rng::from_entropy(); // build the trace polynomials, extended trace, and commitment using the default TraceLde impl let (trace_lde, trace_polys) = DefaultTraceLde::>::new( @@ -34,6 +37,8 @@ fn extend_trace_table() { trace.main_segment(), &domain, partition_option, + None, + &mut prng, ); // check the width and length of the extended trace @@ -79,6 +84,7 @@ fn commit_trace_table() { let trace = build_fib_trace(trace_length * 2); let domain = StarkDomain::new(&air); let partition_option = PartitionOptions::default(); + let mut prng = ChaCha20Rng::from_entropy(); // build the trace polynomials, extended trace, and commitment using the default TraceLde impl let (trace_lde, _) = DefaultTraceLde::>::new( @@ -86,6 +92,8 @@ fn commit_trace_table() { trace.main_segment(), &domain, partition_option, + None, + &mut prng, ); // build commitment, using a Merkle tree, to the trace rows diff --git a/prover/src/trace/trace_lde/mod.rs b/prover/src/trace/trace_lde/mod.rs index dbce21491..6abcf8b96 100644 --- a/prover/src/trace/trace_lde/mod.rs +++ b/prover/src/trace/trace_lde/mod.rs @@ -5,8 +5,9 @@ use alloc::vec::Vec; -use air::{proof::Queries, LagrangeKernelEvaluationFrame, TraceInfo}; +use air::{proof::Queries, LagrangeKernelEvaluationFrame, TraceInfo, ZkParameters}; use crypto::{ElementHasher, Hasher, VectorCommitment}; +use rand::RngCore; use super::{ColMatrix, EvaluationFrame, FieldElement, TracePolyTable}; use crate::StarkDomain; @@ -45,10 +46,12 @@ pub trait TraceLde: Sync { /// This function is expected to panic if any of the following are true: /// - the number of rows in the provided `aux_trace` does not match the main trace. /// - this segment would exceed the number of segments specified by the trace layout. - fn set_aux_trace( + fn set_aux_trace( &mut self, aux_trace: &ColMatrix, domain: &StarkDomain, + zk_parameters: Option, + prng: &mut R, ) -> (ColMatrix, ::Digest); /// Reads current and next rows from the main trace segment into the specified frame. diff --git a/prover/src/trace/trace_table.rs b/prover/src/trace/trace_table.rs index a5c10069b..9ce826ffb 100644 --- a/prover/src/trace/trace_table.rs +++ b/prover/src/trace/trace_table.rs @@ -272,6 +272,11 @@ impl TraceTable { pub fn read_row_into(&self, step: usize, target: &mut [B]) { self.trace.read_row_into(step, target); } + + /// Returns the trace meta data. + pub fn meta_data(&self) -> &[u8] { + self.info.meta() + } } // TRACE TRAIT IMPLEMENTATION diff --git a/verifier/src/channel.rs b/verifier/src/channel.rs index 9d7dbc426..094bccd59 100644 --- a/verifier/src/channel.rs +++ b/verifier/src/channel.rs @@ -13,6 +13,7 @@ use air::{ use crypto::{ElementHasher, VectorCommitment}; use fri::VerifierChannel as FriVerifierChannel; use math::{FieldElement, StarkField}; +use utils::Deserializable; use crate::VerifierError; @@ -45,12 +46,14 @@ pub struct VerifierChannel< fri_layer_queries: Vec>, fri_remainder: Option>, fri_num_partitions: usize, + fri_salts: Vec>, // out-of-domain frame ood_trace_frame: Option>, ood_constraint_evaluations: Option>, // query proof-of-work pow_nonce: u64, gkr_proof: Option>, + salts: Vec>, } impl VerifierChannel @@ -76,6 +79,7 @@ where fri_proof, pow_nonce, gkr_proof, + salts, } = proof; // make sure AIR and proof base fields are the same @@ -103,6 +107,7 @@ where constraint_queries, air, num_unique_queries as usize, + air.is_zk(), )?; // --- parse FRI proofs ------------------------------------------------------------------- @@ -110,6 +115,10 @@ where let fri_remainder = fri_proof .parse_remainder() .map_err(|err| VerifierError::ProofDeserializationError(err.to_string()))?; + + let fri_salts = fri_proof + .parse_salts::() + .map_err(|err| VerifierError::ProofDeserializationError(err.to_string()))?; let (fri_layer_queries, fri_layer_proofs) = fri_proof .parse_layers::(lde_domain_size, fri_options.folding_factor()) .map_err(|err| VerifierError::ProofDeserializationError(err.to_string()))?; @@ -126,6 +135,9 @@ where partition_options.partition_size::(air.context().trace_info().aux_segment_width()); let partition_size_constraint = partition_options .partition_size::(air.context().num_constraint_composition_columns()); + // --- parse Fiat-Shamir salts ----------------------------------------------- + let salts: Vec> = Vec::read_from_bytes(&salts) + .map_err(|err| VerifierError::ProofDeserializationError(err.to_string()))?; Ok(VerifierChannel { // trace queries @@ -144,12 +156,14 @@ where fri_layer_queries, fri_remainder: Some(fri_remainder), fri_num_partitions, + fri_salts, // out-of-domain evaluation ood_trace_frame: Some(ood_trace_frame), ood_constraint_evaluations: Some(ood_constraint_evaluations), // query seed pow_nonce, gkr_proof, + salts, }) } @@ -194,6 +208,11 @@ where self.gkr_proof.as_ref() } + /// Returns the salts needed for Fiat-Shamir. + pub fn read_salts(&self) -> Vec> { + self.salts.clone() + } + /// Returns trace states at the specified positions of the LDE domain. This also checks if /// the trace states are valid against the trace commitment sent by the prover. /// @@ -298,6 +317,10 @@ where fn take_fri_remainder(&mut self) -> Vec { self.fri_remainder.take().expect("already read") } + + fn take_salt(&mut self) -> Option<::Digest> { + self.fri_salts.remove(0) + } } // TRACE QUERIES @@ -414,8 +437,11 @@ where queries: Queries, air: &A, num_queries: usize, + is_zk: bool, ) -> Result { - let constraint_frame_width = air.context().num_constraint_composition_columns(); + // In the case zero-knowledge is enabled, we parse the randomizer polynomial as well + let constraint_frame_width = + air.context().num_constraint_composition_columns() + is_zk as usize; let (query_proofs, evaluations) = queries .parse::(air.lde_domain_size(), num_queries, constraint_frame_width) diff --git a/verifier/src/composer.rs b/verifier/src/composer.rs index 5f10ef79f..4c6af9cbe 100644 --- a/verifier/src/composer.rs +++ b/verifier/src/composer.rs @@ -88,12 +88,13 @@ impl DeepComposer { let n = queried_main_trace_states.num_rows(); let mut result_num = Vec::::with_capacity(n); let mut result_den = Vec::::with_capacity(n); - for ((_, row), &x) in (0..n).zip(queried_main_trace_states.rows()).zip(&self.x_coordinates) { let mut t1_num = E::ZERO; let mut t2_num = E::ZERO; + // we iterate over all polynomials except for the randomizer when zero-knowledge + // is enabled for (i, &value) in row.iter().enumerate() { let value = E::from(value); // compute the numerator of T'_i(x) as (T_i(x) - T_i(z)), multiply it by a @@ -122,6 +123,8 @@ impl DeepComposer { // we define this offset here because composition of the main trace columns has // consumed some number of composition coefficients already. + // In the case zero-knowledge is enabled, the offset is adjusted so as to account for + // the randomizer polynomial. let cc_offset = queried_main_trace_states.num_columns(); // we treat the Lagrange column separately if present @@ -215,10 +218,12 @@ impl DeepComposer { &self, queried_evaluations: Table, ood_evaluations: Vec, + is_zk: bool, ) -> Vec { assert_eq!(queried_evaluations.num_rows(), self.x_coordinates.len()); let n = queried_evaluations.num_rows(); + let num_cols = ood_evaluations.len(); let mut result_num = Vec::::with_capacity(n); let mut result_den = Vec::::with_capacity(n); @@ -228,11 +233,17 @@ impl DeepComposer { // this way we can use batch inversion in the end. for (query_values, &x) in queried_evaluations.rows().zip(&self.x_coordinates) { let mut composition_num = E::ZERO; - for (i, &evaluation) in query_values.iter().enumerate() { + for (i, &evaluation) in query_values.iter().enumerate().take(num_cols) { // compute the numerator of H'_i(x) as (H_i(x) - H_i(z)), multiply it by a // composition coefficient, and add the result to the numerator aggregator composition_num += (evaluation - ood_evaluations[i]) * self.cc.constraints[i]; } + // In the case zero-knowledge is enabled, the randomizer is added to DEEP composition + // polynomial. + if is_zk { + let randmizer_at_x = query_values[num_cols]; + composition_num += randmizer_at_x * (x - z); + } result_num.push(composition_num); result_den.push(x - z); } diff --git a/verifier/src/lib.rs b/verifier/src/lib.rs index 2c75ecd1d..9d54ac60c 100644 --- a/verifier/src/lib.rs +++ b/verifier/src/lib.rs @@ -170,8 +170,12 @@ where const AUX_TRACE_IDX: usize = 1; let trace_commitments = channel.read_trace_commitments(); + // read all the salts needed for Fiat-Shamir. These are random values sampled by the Prover + // and required for zero-knowledge i.e., if zero-knowledge is not enabled then they are `None`. + let mut salts = channel.read_salts(); + // reseed the coin with the commitment to the main trace segment - public_coin.reseed(trace_commitments[MAIN_TRACE_IDX]); + public_coin.reseed_with_salt(trace_commitments[MAIN_TRACE_IDX], salts.remove(0)); // process auxiliary trace segments (if any), to build a set of random elements for each segment let aux_trace_rand_elements = if air.trace_info().is_multi_segment() { @@ -193,7 +197,7 @@ where "failed to generate the random elements needed to build the auxiliary trace", ); - public_coin.reseed(trace_commitments[AUX_TRACE_IDX]); + public_coin.reseed_with_salt(trace_commitments[AUX_TRACE_IDX], salts.remove(0)); Some(AuxRandElements::new_with_gkr(rand_elements, gkr_rand_elements)) } else { @@ -201,7 +205,7 @@ where "failed to generate the random elements needed to build the auxiliary trace", ); - public_coin.reseed(trace_commitments[AUX_TRACE_IDX]); + public_coin.reseed_with_salt(trace_commitments[AUX_TRACE_IDX], salts.remove(0)); Some(AuxRandElements::new(rand_elements)) } @@ -221,7 +225,7 @@ where // to the prover, and the prover evaluates trace and constraint composition polynomials at z, // and sends the results back to the verifier. let constraint_commitment = channel.read_constraint_commitment(); - public_coin.reseed(constraint_commitment); + public_coin.reseed_with_salt(constraint_commitment, salts.remove(0)); let z = public_coin.draw::().map_err(|_| VerifierError::RandomCoinError)?; // 3 ----- OOD consistency check -------------------------------------------------------------- @@ -244,14 +248,14 @@ where aux_trace_rand_elements.as_ref(), z, ); - public_coin.reseed(ood_trace_frame.hash::()); + public_coin.reseed_with_salt(ood_trace_frame.hash::(), salts.remove(0)); // read evaluations of composition polynomial columns sent by the prover, and reduce them into - // a single value by computing \sum_{i=0}^{m-1}(z^(i * l) * value_i), where value_i is the - // evaluation of the ith column polynomial H_i(X) at z, l is the trace length and m is + // a single value by computing \sum_{i=0}^{m-1}(z^(i) * value_i), where value_i is the + // evaluation of the ith column polynomial H_i(X) at z^m, l is the trace length and m is // the number of composition column polynomials. This computes H(z) (i.e. // the evaluation of the composition polynomial at z) using the fact that - // H(X) = \sum_{i=0}^{m-1} X^{i * l} H_i(X). + // H(X) = \sum_{i=0}^{m-1} X^{i} H_i(X^m). // Also, reseed the public coin with the OOD constraint evaluations received from the prover. let ood_constraint_evaluations = channel.read_ood_constraint_evaluations(); let ood_constraint_evaluation_2 = @@ -259,9 +263,12 @@ where .iter() .enumerate() .fold(E::ZERO, |result, (i, &value)| { - result + z.exp_vartime(((i * (air.trace_length())) as u32).into()) * value + result + + z.exp_vartime( + ((i * air.context().num_coefficients_chunk_quotient()) as u32).into(), + ) * value }); - public_coin.reseed(H::hash_elements(&ood_constraint_evaluations)); + public_coin.reseed_with_salt(H::hash_elements(&ood_constraint_evaluations), salts.remove(0)); // finally, make sure the values are the same if ood_constraint_evaluation_1 != ood_constraint_evaluation_2 { @@ -329,8 +336,11 @@ where ood_aux_trace_frame, ood_lagrange_kernel_frame, ); - let c_composition = composer - .compose_constraint_evaluations(queried_constraint_evaluations, ood_constraint_evaluations); + let c_composition = composer.compose_constraint_evaluations( + queried_constraint_evaluations, + ood_constraint_evaluations, + air.is_zk(), + ); let deep_evaluations = composer.combine_compositions(t_composition, c_composition); // 7 ----- Verify low-degree proof ------------------------------------------------------------- diff --git a/winterfell/Cargo.toml b/winterfell/Cargo.toml index cdeeb59ec..1d3aaa04f 100644 --- a/winterfell/Cargo.toml +++ b/winterfell/Cargo.toml @@ -26,6 +26,8 @@ air = { version = "0.10", path = "../air", package = "winter-air", default-featu prover = { version = "0.10", path = "../prover", package = "winter-prover", default-features = false } verifier = { version = "0.10", path = "../verifier", package = "winter-verifier", default-features = false } +rand_chacha = { version = "0.3", default-features = false } + # Allow math in docs [package.metadata.docs.rs] rustdoc-args = ["--html-in-header", ".cargo/katex-header.html"] diff --git a/winterfell/src/lib.rs b/winterfell/src/lib.rs index e05d5ca5c..50b235aaf 100644 --- a/winterfell/src/lib.rs +++ b/winterfell/src/lib.rs @@ -262,6 +262,7 @@ //! math::{fields::f128::BaseElement, FieldElement, ToElements}, //! matrix::ColMatrix, //! DefaultTraceLde, ProofOptions, Prover, StarkDomain, Trace, TracePolyTable, TraceTable, +//! ZkParameters, //! }; //! //! # use winterfell::{ @@ -372,8 +373,9 @@ //! main_trace: &ColMatrix, //! domain: &StarkDomain, //! partition_option: PartitionOptions, +//! is_zk: Option, //! ) -> (Self::TraceLde, TracePolyTable) { -//! DefaultTraceLde::new(trace_info, main_trace, domain, partition_option) +//! DefaultTraceLde::new(trace_info, main_trace, domain, partition_option, is_zk) //! } //! //! fn new_evaluator<'a, E: FieldElement>( @@ -403,6 +405,7 @@ //! # DefaultTraceLde, EvaluationFrame, TraceInfo, //! # TransitionConstraintDegree, TraceTable, FieldExtension, PartitionOptions, Prover, //! # ProofOptions, StarkDomain, Proof, Trace, TracePolyTable, +//! # ZkParameters //! # }; //! # //! # pub fn build_do_work_trace(start: BaseElement, n: usize) -> TraceTable { @@ -516,8 +519,9 @@ //! # main_trace: &ColMatrix, //! # domain: &StarkDomain, //! # partition_option: PartitionOptions, +//! # is_zk: Option, //! # ) -> (Self::TraceLde, TracePolyTable) { -//! # DefaultTraceLde::new(trace_info, main_trace, domain, partition_option) +//! # DefaultTraceLde::new(trace_info, main_trace, domain, partition_option, is_zk) //! # } //! # //! # fn new_evaluator<'a, E: FieldElement>( @@ -548,6 +552,7 @@ //! FieldExtension::None, //! 8, // FRI folding factor //! 31, // FRI max remainder polynomial degree +//! false, // Enable zero-knowledge //! ); //! //! // Instantiate the prover and generate the proof. @@ -596,7 +601,7 @@ #[cfg(test)] extern crate std; -pub use air::{AuxRandElements, GkrVerifier, PartitionOptions}; +pub use air::{AuxRandElements, GkrVerifier, PartitionOptions, ZkParameters}; pub use prover::{ crypto, iterators, math, matrix, Air, AirContext, Assertion, AuxTraceWithMetadata, BoundaryConstraint, BoundaryConstraintGroup, CompositionPolyTrace, diff --git a/winterfell/src/tests.rs b/winterfell/src/tests.rs index 3fb0c5197..0bcb25566 100644 --- a/winterfell/src/tests.rs +++ b/winterfell/src/tests.rs @@ -5,13 +5,14 @@ use std::{vec, vec::Vec}; -use air::{GkrRandElements, LagrangeKernelRandElements}; +use air::{GkrRandElements, LagrangeKernelRandElements, ZkParameters}; use crypto::MerkleTree; use prover::{ crypto::{hashers::Blake3_256, DefaultRandomCoin, RandomCoin}, math::{fields::f64::BaseElement, ExtensionOf, FieldElement}, matrix::ColMatrix, }; +use rand_chacha::{rand_core::SeedableRng, ChaCha20Rng}; use super::*; @@ -205,7 +206,7 @@ impl LagrangeComplexProver { fn new(aux_trace_width: usize) -> Self { Self { aux_trace_width, - options: ProofOptions::new(1, 2, 0, FieldExtension::None, 2, 1), + options: ProofOptions::new(1, 2, 0, FieldExtension::None, 2, 1, false), } } } @@ -235,11 +236,20 @@ impl Prover for LagrangeComplexProver { main_trace: &ColMatrix, domain: &StarkDomain, partition_option: PartitionOptions, + zk_parameters: Option, ) -> (Self::TraceLde, TracePolyTable) where E: math::FieldElement, { - DefaultTraceLde::new(trace_info, main_trace, domain, partition_option) + let mut prng = ChaCha20Rng::from_entropy(); + DefaultTraceLde::new( + trace_info, + main_trace, + domain, + partition_option, + zk_parameters, + &mut prng, + ) } fn new_evaluator<'a, E>(