Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Upgrade ripp to arkworks 0.4 #50

Merged
merged 8 commits into from
Oct 27, 2023
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
26 changes: 13 additions & 13 deletions benches/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -7,19 +7,19 @@ edition = "2018"
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html

[dev-dependencies]
ark-ec = { version = "0.3", features = [ "parallel" ] }
ark-ff = { version = "0.3", features = [ "parallel" ] }
ark-poly = { version = "0.3", features = [ "parallel" ] }
ark-std = { version = "0.3", features = [ "parallel" ] }
ark-groth16 = { version = "0.3", features = [ "parallel", "r1cs" ] }
ark-crypto-primitives = { version = "0.3", features = [ "parallel", "r1cs" ] }
ark-bls12-381 = { version = "0.3", features = [ "curve" ] }
ark-ed-on-bls12-381 = "0.3"
ark-bls12-377 = { version = "0.3", features = [ "curve", "r1cs" ] }
ark-bw6-761 = "0.3"

ark-relations = "0.3"
ark-r1cs-std = "0.3"
ark-ec = { version = "0.4", features = [ "parallel" ] }
ark-ff = { version = "0.4", features = [ "parallel" ] }
ark-poly = { version = "0.4", features = [ "parallel" ] }
ark-std = { version = "0.4", features = [ "parallel" ] }
ark-groth16 = { version = "0.4", features = [ "parallel", "r1cs" ] }
ark-crypto-primitives = { version = "0.4", features = [ "parallel", "r1cs", "prf" ] }
ark-bls12-381 = { version = "0.4", features = [ "curve" ] }
ark-ed-on-bls12-381 = "0.4"
ark-bls12-377 = { version = "0.4", features = [ "curve", "r1cs" ] }
ark-bw6-761 = "0.4"

ark-relations = "0.4"
ark-r1cs-std = "0.4"

digest = "0.9"
blake2 = "0.9"
Expand Down
17 changes: 6 additions & 11 deletions benches/benches/gipa.rs
Original file line number Diff line number Diff line change
Expand Up @@ -5,11 +5,9 @@ use ark_dh_commitments::{
pedersen::PedersenCommitment,
DoublyHomomorphicCommitment,
};
use ark_ec::PairingEngine;
use ark_ec::pairing::{Pairing, PairingOutput};
use ark_ff::UniformRand;
use ark_inner_products::{
ExtensionFieldElement, InnerProduct, MultiexponentiationInnerProduct, PairingInnerProduct,
};
use ark_inner_products::{InnerProduct, MultiexponentiationInnerProduct, PairingInnerProduct};
use ark_ip_proofs::gipa::GIPA;

use ark_std::rand::{rngs::StdRng, Rng, SeedableRng};
Expand Down Expand Up @@ -70,7 +68,7 @@ fn main() {
const LEN: usize = 16;
type GC1 = AFGHOCommitmentG1<Bls12_381>;
type GC2 = AFGHOCommitmentG2<Bls12_381>;
type SC1 = PedersenCommitment<<Bls12_381 as PairingEngine>::G1Projective>;
type SC1 = PedersenCommitment<<Bls12_381 as Pairing>::G1>;
let mut rng = StdRng::seed_from_u64(0u64);

println!("Benchmarking GIPA with vector length: {}", LEN);
Expand All @@ -80,20 +78,17 @@ fn main() {
PairingInnerProduct<Bls12_381>,
GC1,
GC2,
IdentityCommitment<ExtensionFieldElement<Bls12_381>, <Bls12_381 as PairingEngine>::Fr>,
IdentityCommitment<PairingOutput<Bls12_381>, <Bls12_381 as Pairing>::ScalarField>,
Blake2b,
StdRng,
>(&mut rng, LEN);

println!("2) Multiexponentiation G1 inner product...");
bench_gipa::<
MultiexponentiationInnerProduct<<Bls12_381 as PairingEngine>::G1Projective>,
MultiexponentiationInnerProduct<<Bls12_381 as Pairing>::G1>,
GC1,
SC1,
IdentityCommitment<
<Bls12_381 as PairingEngine>::G1Projective,
<Bls12_381 as PairingEngine>::Fr,
>,
IdentityCommitment<<Bls12_381 as Pairing>::G1, <Bls12_381 as Pairing>::ScalarField>,
Blake2b,
StdRng,
>(&mut rng, LEN);
Expand Down
88 changes: 46 additions & 42 deletions benches/benches/groth16_aggregation/bench.rs
Original file line number Diff line number Diff line change
@@ -1,7 +1,4 @@
use ark_bls12_377::{
constraints::PairingVar as BLS12PairingVar, Bls12_377, Fr as BLS12Fr,
FrParameters as BLS12FrParameters,
};
use ark_bls12_377::{constraints::PairingVar as BLS12PairingVar, Bls12_377, Fr as BLS12Fr};
use ark_bw6_761::{Fr as BW6Fr, BW6_761};
use ark_crypto_primitives::{
prf::{
Expand All @@ -11,10 +8,11 @@ use ark_crypto_primitives::{
},
snark::*,
};
use ark_ec::{AffineCurve, PairingEngine, ProjectiveCurve};
use ark_ff::{
biginteger::BigInteger, FftParameters, One, PrimeField, ToConstraintField, UniformRand,
use ark_ec::{
pairing::{MillerLoopOutput, Pairing},
CurveGroup,
};
use ark_ff::{One, PrimeField, ToConstraintField, UniformRand};
use ark_groth16::{constraints::*, Groth16, PreparedVerifyingKey, Proof, VerifyingKey};
use ark_r1cs_std::prelude::*;
use ark_relations::r1cs::{ConstraintSynthesizer, ConstraintSystemRef, SynthesisError};
Expand Down Expand Up @@ -97,7 +95,7 @@ impl ConstraintSynthesizer<BW6Fr> for AggregateBlake2SCircuitVerificationCircuit
.iter()
.map(|bls_fr| {
bls_fr
.into_repr()
.into_bigint()
.as_ref()
.iter()
.map(|bls_fr_int| bls_fr_int.to_le_bytes().to_vec())
Expand All @@ -116,8 +114,7 @@ impl ConstraintSynthesizer<BW6Fr> for AggregateBlake2SCircuitVerificationCircuit
// Now split BW6-761 byte representation back to iterator over BLS12-377 field element byte representations
.iter()
.map(|h_as_bls_fr_bytes| {
let bls_field_element_size_in_bytes =
<BLS12FrParameters as FftParameters>::BigInt::NUM_LIMBS * 8;
let bls_field_element_size_in_bytes = (BLS12Fr::MODULUS_BIT_SIZE as usize + 7) / 8;
h_as_bls_fr_bytes
.chunks(bls_field_element_size_in_bytes)
.map(|bls_field_element_chunk| bls_field_element_chunk.to_vec())
Expand Down Expand Up @@ -182,7 +179,7 @@ impl ToConstraintField<BW6Fr> for AggregateBlake2SCircuitVerificationCircuitInpu
.iter()
.map(|bls_fr| {
bls_fr
.into_repr()
.into_bigint()
.as_ref()
.iter()
.map(|bls_fr_int| bls_fr_int.to_le_bytes().to_vec())
Expand Down Expand Up @@ -322,7 +319,7 @@ fn main() {
&hash_outputs
.iter()
.map(|h| h.to_field_elements())
.collect::<Option<Vec<Vec<<Bls12_377 as PairingEngine>::Fr>>>>()
.collect::<Option<Vec<Vec<<Bls12_377 as Pairing>::ScalarField>>>>()
.unwrap(),
&proofs,
)
Expand Down Expand Up @@ -381,7 +378,7 @@ fn main() {
&hash_outputs
.iter()
.map(|h| h.to_field_elements())
.collect::<Option<Vec<Vec<<Bls12_377 as PairingEngine>::Fr>>>>()
.collect::<Option<Vec<Vec<<Bls12_377 as Pairing>::ScalarField>>>>()
.unwrap(),
&aggregate_proof,
)
Expand Down Expand Up @@ -528,64 +525,71 @@ fn main() {
}
}

pub fn batch_verify_proof<E: PairingEngine>(
pub fn batch_verify_proof<E: Pairing>(
pvk: &PreparedVerifyingKey<E>,
public_inputs: &[Vec<E::Fr>],
public_inputs: &[Vec<E::ScalarField>],
proofs: &[Proof<E>],
) -> Result<bool, SynthesisError> {
let mut rng = StdRng::seed_from_u64(0u64);
let mut r_powers = Vec::with_capacity(proofs.len());
for _ in 0..proofs.len() {
let challenge: E::Fr = u128::rand(&mut rng).into();
let challenge: E::ScalarField = u128::rand(&mut rng).into();
r_powers.push(challenge);
}

let combined_inputs = public_inputs
.iter()
.zip(&r_powers)
.map(|(input, r)| {
let mut g_ic = pvk.vk.gamma_abc_g1[0].into_projective();
for (i, b) in input.iter().zip(pvk.vk.gamma_abc_g1.iter().skip(1)) {
g_ic += &b.mul(i.into_repr());
let mut g_ic: E::G1 = pvk.vk.gamma_abc_g1[0].into();
for (&i, &b) in input.iter().zip(pvk.vk.gamma_abc_g1.iter().skip(1)) {
g_ic += b * i;
}
g_ic.mul(r.into_repr())
g_ic * r
})
.sum::<E::G1Projective>()
.into_affine();
.sum::<E::G1>()
.into();

let combined_proof_a_s = proofs
.iter()
.zip(&r_powers)
.map(|(proof, r)| proof.a.mul(*r))
.map(|(proof, r)| proof.a * r)
.collect::<Vec<_>>();
let combined_proof_a_s = E::G1Projective::batch_normalization_into_affine(&combined_proof_a_s);
let ml_inputs = proofs
.iter()
.zip(&combined_proof_a_s)
.map(|(proof, a)| ((*a).into(), proof.b.into()))
let combined_proof_a_s = E::G1::normalize_batch(&combined_proof_a_s);
let combined_proof_a_s = combined_proof_a_s
.into_iter()
.map(E::G1Prepared::from)
.collect::<Vec<_>>();
let a_r_times_b = E::miller_loop(ml_inputs.iter());
let combined_proof_b_s = proofs
.into_iter()
.map(|proof| proof.b.into())
.collect::<Vec<E::G2Prepared>>();
let a_r_times_b = E::multi_miller_loop(combined_proof_a_s, combined_proof_b_s);

let combined_c_s = proofs
.iter()
.zip(&r_powers)
.map(|(proof, r)| proof.c.mul(*r))
.sum::<E::G1Projective>()
.map(|(proof, r)| proof.c * r)
.sum::<E::G1>()
.into_affine();

let sum_of_rs = (&r_powers).iter().copied().sum::<E::Fr>();
let combined_alpha = (-pvk.vk.alpha_g1.mul(sum_of_rs)).into_affine();
let qap = E::miller_loop(
let sum_of_rs = (&r_powers).iter().copied().sum::<E::ScalarField>();
let combined_alpha = (-(pvk.vk.alpha_g1 * sum_of_rs)).into_affine();
let qap = E::multi_miller_loop(
[
E::G1Prepared::from(combined_alpha),
combined_inputs.into(),
combined_c_s.into(),
],
[
(combined_alpha.into(), pvk.vk.beta_g2.into()),
(combined_inputs.into(), pvk.gamma_g2_neg_pc.clone()),
(combined_c_s.into(), pvk.delta_g2_neg_pc.clone()),
]
.iter(),
E::G2Prepared::from(pvk.vk.beta_g2),
pvk.gamma_g2_neg_pc.clone().into(),
pvk.delta_g2_neg_pc.clone().into(),
],
);

let test =
E::final_exponentiation(&(qap * &a_r_times_b)).ok_or(SynthesisError::UnexpectedIdentity)?;
let test = E::final_exponentiation(MillerLoopOutput(qap.0 * a_r_times_b.0))
.ok_or(SynthesisError::UnexpectedIdentity)?;

Ok(test == E::Fqk::one())
Ok(test.0 == E::TargetField::one())
}
16 changes: 7 additions & 9 deletions benches/benches/inner_products.rs
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
use ark_bls12_381::Bls12_381;
use ark_ec::PairingEngine;
use ark_ec::pairing::Pairing;
use ark_ff::UniformRand;
use ark_inner_products::{InnerProduct, MultiexponentiationInnerProduct, PairingInnerProduct};

Expand Down Expand Up @@ -33,14 +33,12 @@ fn main() {
bench_inner_product::<PairingInnerProduct<Bls12_381>, StdRng>(&mut rng, LEN);

println!("2) Multiexponentiation G1 inner product...");
bench_inner_product::<
MultiexponentiationInnerProduct<<Bls12_381 as PairingEngine>::G1Projective>,
StdRng,
>(&mut rng, LEN);
bench_inner_product::<MultiexponentiationInnerProduct<<Bls12_381 as Pairing>::G1>, StdRng>(
&mut rng, LEN,
);

println!("3) Multiexponentiation G2 inner product...");
bench_inner_product::<
MultiexponentiationInnerProduct<<Bls12_381 as PairingEngine>::G2Projective>,
StdRng,
>(&mut rng, LEN);
bench_inner_product::<MultiexponentiationInnerProduct<<Bls12_381 as Pairing>::G2>, StdRng>(
&mut rng, LEN,
);
}
10 changes: 5 additions & 5 deletions benches/benches/poly_commit.rs
Original file line number Diff line number Diff line change
@@ -1,12 +1,12 @@
use ark_bls12_381::Bls12_381;
use ark_ec::PairingEngine;
use ark_ec::pairing::Pairing;
use ark_ff::UniformRand;
use ark_ip_proofs::applications::poly_commit::{
transparent::UnivariatePolynomialCommitment as TransparentIPA,
UnivariatePolynomialCommitment as IPA, KZG,
};
use ark_poly::polynomial::{
univariate::DensePolynomial as UnivariatePolynomial, Polynomial, UVPolynomial,
univariate::DensePolynomial as UnivariatePolynomial, DenseUVPolynomial, Polynomial,
};

use ark_std::rand::{rngs::StdRng, SeedableRng};
Expand Down Expand Up @@ -67,7 +67,7 @@ fn main() {
csv_writer.flush().unwrap();
for i in 1..num_trials + 1 {
let polynomial = UnivariatePolynomial::rand(degree, &mut rng);
let point = <Bls12_381 as PairingEngine>::Fr::rand(&mut rng);
let point = <Bls12_381 as Pairing>::ScalarField::rand(&mut rng);
let eval = polynomial.evaluate(&point);

// Commit
Expand Down Expand Up @@ -139,7 +139,7 @@ fn main() {
csv_writer.flush().unwrap();
for i in 1..num_trials + 1 {
let polynomial = UnivariatePolynomial::rand(degree, &mut rng);
let point = <Bls12_381 as PairingEngine>::Fr::rand(&mut rng);
let point = <Bls12_381 as Pairing>::ScalarField::rand(&mut rng);
let eval = polynomial.evaluate(&point);

// Commit
Expand Down Expand Up @@ -213,7 +213,7 @@ fn main() {
csv_writer.flush().unwrap();
for i in 1..num_trials + 1 {
let polynomial = UnivariatePolynomial::rand(degree, &mut rng);
let point = <Bls12_381 as PairingEngine>::Fr::rand(&mut rng);
let point = <Bls12_381 as Pairing>::ScalarField::rand(&mut rng);
let eval = polynomial.evaluate(&point);

// Commit
Expand Down
Loading
Loading