Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Remove IOPTranscript #52

Merged
merged 3 commits into from
Jan 17, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
102 changes: 31 additions & 71 deletions poly-commit/src/linear_codes/mod.rs
Original file line number Diff line number Diff line change
@@ -1,12 +1,15 @@
use crate::utils::{inner_product, IOPTranscript, Matrix};
use crate::utils::{inner_product, Matrix};
use crate::{
Error, LabeledCommitment, LabeledPolynomial, PCCommitterKey, PCUniversalParams, PCVerifierKey,
PolynomialCommitment,
to_bytes, Error, LabeledCommitment, LabeledPolynomial, PCCommitterKey, PCUniversalParams,
PCVerifierKey, PolynomialCommitment,
};

use ark_crypto_primitives::crh::{CRHScheme, TwoToOneCRHScheme};
use ark_crypto_primitives::merkle_tree::MerkleTree;
use ark_crypto_primitives::{merkle_tree::Config, sponge::CryptographicSponge};
use ark_crypto_primitives::{
merkle_tree::Config,
sponge::{Absorb, CryptographicSponge},
};
use ark_ff::PrimeField;
use ark_poly::Polynomial;
use ark_std::borrow::Borrow;
Expand All @@ -30,7 +33,7 @@ use data_structures::*;

pub use data_structures::LinCodePCProof;

use utils::{calculate_t, get_indices_from_transcript};
use utils::{calculate_t, get_indices_from_sponge};

const FIELD_SIZE_ERROR: &str = "This field is not suitable for the proposed parameters";

Expand Down Expand Up @@ -154,7 +157,7 @@ where
impl<L, F, P, S, C, H> PolynomialCommitment<F, P, S> for LinearCodePCS<L, F, P, S, C, H>
where
L: LinearEncode<F, C, P, H>,
F: PrimeField,
F: PrimeField + Absorb,
P: Polynomial<F>,
S: CryptographicSponge,
C: Config + 'static,
Expand Down Expand Up @@ -268,15 +271,9 @@ where
ck.two_to_one_hash_param(),
)?;

// 3. Obtain the MT root and add it to the transcript.
// 3. Obtain the MT root
let root = col_tree.root();

let mut transcript: IOPTranscript<F> = IOPTranscript::new(b"transcript");

transcript
.append_serializable_element(b"root", &root)
.map_err(|_| Error::TranscriptError)?;

// 4. The commitment is just the root, but since each commitment could be to a differently-sized polynomial, we also add some metadata.
let commitment = LinCodePCCommitment {
metadata: Metadata {
Expand All @@ -302,7 +299,7 @@ where
_labeled_polynomials: impl IntoIterator<Item = &'a LabeledPolynomial<F, P>>,
commitments: impl IntoIterator<Item = &'a LabeledCommitment<Self::Commitment>>,
point: &'a P::Point,
_sponge: &mut S,
sponge: &mut S,
states: impl IntoIterator<Item = &'a Self::CommitmentState>,
_rng: Option<&mut dyn RngCore>,
) -> Result<Self::Proof, Self::Error>
Expand All @@ -317,7 +314,6 @@ where
let commitment = labeled_commitment.commitment();
let n_rows = commitment.metadata.n_rows;
let n_cols = commitment.metadata.n_cols;
let root = &commitment.root;

// 1. Arrange the coefficients of the polynomial into a matrix,
// and apply encoding to get `ext_mat`.
Expand All @@ -339,37 +335,21 @@ where
// 3. Generate vector `b` to left-multiply the matrix.
let (_, b) = L::tensor(point, n_cols, n_rows);

let mut transcript = IOPTranscript::new(b"transcript");
transcript
.append_serializable_element(b"root", root)
.map_err(|_| Error::TranscriptError)?;
sponge.absorb(&to_bytes!(&commitment.root).map_err(|_| Error::TranscriptError)?);

// If we are checking well-formedness, we need to compute the well-formedness proof (which is just r.M) and append it to the transcript.
let well_formedness = if ck.check_well_formedness() {
let mut r = Vec::new();
for _ in 0..n_rows {
r.push(
transcript
.get_and_append_challenge(b"r")
.map_err(|_| Error::TranscriptError)?,
);
}
let r = sponge.squeeze_field_elements::<F>(n_rows);
let v = mat.row_mul(&r);

transcript
.append_serializable_element(b"v", &v)
.map_err(|_| Error::TranscriptError)?;
sponge.absorb(&v);
Some(v)
} else {
None
};

let point_vec = L::point_to_vec(point.clone());
for element in point_vec.iter() {
transcript
.append_serializable_element(b"point", element)
.map_err(|_| Error::TranscriptError)?;
}
sponge.absorb(&point_vec);

proof_array.push(LinCodePCProof {
// Compute the opening proof and append b.M to the transcript.
Expand All @@ -380,7 +360,7 @@ where
&mat,
&ext_mat,
&col_tree,
&mut transcript,
sponge,
)?,
well_formedness,
});
Expand All @@ -395,7 +375,7 @@ where
point: &'a P::Point,
values: impl IntoIterator<Item = F>,
proof_array: &Self::Proof,
_sponge: &mut S,
sponge: &mut S,
_rng: Option<&mut dyn RngCore>,
) -> Result<bool, Self::Error>
where
Expand All @@ -415,49 +395,31 @@ where
let root = &commitment.root;
let t = calculate_t::<F>(vk.sec_param(), vk.distance(), n_ext_cols)?;

let mut transcript = IOPTranscript::new(b"transcript");
transcript
.append_serializable_element(b"root", &commitment.root)
.map_err(|_| Error::TranscriptError)?;
sponge.absorb(&to_bytes!(&commitment.root).map_err(|_| Error::TranscriptError)?);

let out = if vk.check_well_formedness() {
if proof.well_formedness.is_none() {
return Err(Error::InvalidCommitment);
}
let tmp = &proof.well_formedness.as_ref();
let well_formedness = tmp.unwrap();
let mut r = Vec::with_capacity(n_rows);
for _ in 0..n_rows {
r.push(
transcript
.get_and_append_challenge(b"r")
.map_err(|_| Error::TranscriptError)?,
);
}
let v = tmp.unwrap();
let r = sponge.squeeze_field_elements::<F>(n_rows);
// Upon sending `v` to the Verifier, add it to the sponge. The claim is that v = r.M.
transcript
.append_serializable_element(b"v", well_formedness)
.map_err(|_| Error::TranscriptError)?;
sponge.absorb(&v);

(Some(well_formedness), Some(r))
(Some(v), Some(r))
} else {
(None, None)
};

// 1. Seed the transcript with the point and the recieved vector
// TODO Consider removing the evaluation point from the transcript.
let point_vec = L::point_to_vec(point.clone());
for element in point_vec.iter() {
transcript
.append_serializable_element(b"point", element)
.map_err(|_| Error::TranscriptError)?;
}
transcript
.append_serializable_element(b"v", &proof.opening.v)
.map_err(|_| Error::TranscriptError)?;
sponge.absorb(&point_vec);
sponge.absorb(&proof.opening.v);

// 2. Ask random oracle for the `t` indices where the checks happen.
let indices = get_indices_from_transcript::<F>(n_ext_cols, t, &mut transcript)?;
let indices = get_indices_from_sponge(n_ext_cols, t, sponge)?;

// 3. Hash the received columns into leaf hashes.
let col_hashes: Vec<C::Leaf> = proof
Expand Down Expand Up @@ -555,30 +517,28 @@ where
.map_err(|_| Error::HashingError)
}

fn generate_proof<F, C>(
fn generate_proof<F, C, S>(
sec_param: usize,
distance: (usize, usize),
b: &[F],
mat: &Matrix<F>,
ext_mat: &Matrix<F>,
col_tree: &MerkleTree<C>,
transcript: &mut IOPTranscript<F>,
sponge: &mut S,
) -> Result<LinCodePCProofSingle<F, C>, Error>
where
F: PrimeField,
F: PrimeField + Absorb,
C: Config,
S: CryptographicSponge,
{
let t = calculate_t::<F>(sec_param, distance, ext_mat.m)?;

// 1. left-multiply the matrix by `b`.
let v = mat.row_mul(b);

transcript
.append_serializable_element(b"v", &v)
.map_err(|_| Error::TranscriptError)?;
sponge.absorb(&v);

// 2. Generate t column indices to test the linear combination on.
let indices = get_indices_from_transcript(ext_mat.m, t, transcript)?;
let indices = get_indices_from_sponge(ext_mat.m, t, sponge)?;

// 3. Compute Merkle tree paths for the requested columns.
let mut queried_columns = Vec::with_capacity(t);
Expand Down
12 changes: 5 additions & 7 deletions poly-commit/src/linear_codes/utils.rs
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
use crate::utils::IOPTranscript;
use crate::{utils::ceil_div, Error};
use ark_crypto_primitives::sponge::CryptographicSponge;
use ark_ff::{Field, PrimeField};
use ark_serialize::{CanonicalDeserialize, CanonicalSerialize};
use ark_std::string::ToString;
Expand Down Expand Up @@ -111,18 +111,16 @@ pub(crate) fn get_num_bytes(n: usize) -> usize {

/// Generate `t` (not necessarily distinct) random points in `[0, n)`
/// using the current state of the `transcript`.
pub(crate) fn get_indices_from_transcript<F: PrimeField>(
pub(crate) fn get_indices_from_sponge<S: CryptographicSponge>(
n: usize,
t: usize,
transcript: &mut IOPTranscript<F>,
sponge: &mut S,
) -> Result<Vec<usize>, Error> {
let bytes_to_squeeze = get_num_bytes(n);
let mut indices = Vec::with_capacity(t);
for _ in 0..t {
let mut bytes: Vec<u8> = vec![0; bytes_to_squeeze];
transcript
.get_and_append_byte_challenge(b"i", &mut bytes)
.map_err(|_| Error::TranscriptError)?;
let bytes = sponge.squeeze_bytes(bytes_to_squeeze);
sponge.absorb(&bytes);

// get the usize from Vec<u8>:
let ind = bytes.iter().fold(0, |acc, &x| (acc << 8) + x as usize);
Expand Down
89 changes: 3 additions & 86 deletions poly-commit/src/utils.rs
Original file line number Diff line number Diff line change
@@ -1,5 +1,3 @@
use core::marker::PhantomData;

#[cfg(not(feature = "std"))]
use num_traits::Float;

Expand All @@ -9,12 +7,9 @@ use rayon::{
prelude::IndexedParallelIterator,
};

use ark_ff::{Field, PrimeField};
use ark_ff::Field;
use ark_serialize::{CanonicalDeserialize, CanonicalSerialize};
use ark_std::vec::Vec;
use merlin::Transcript;

use crate::Error;

/// Takes as input a struct, and converts them to a series of bytes. All traits
/// that implement `CanonicalSerialize` can be automatically converted to bytes
Expand Down Expand Up @@ -160,86 +155,6 @@ pub(crate) fn inner_product<F: Field>(v1: &[F], v2: &[F]) -> F {
.sum()
}

/// The following struct is taken from jellyfish repository. Once they change
/// their dependency on `crypto-primitive`, we use their crate instead of
/// a copy-paste. We needed the newer `crypto-primitive` for serializing.
#[derive(Clone)]
pub(crate) struct IOPTranscript<F: PrimeField> {
transcript: Transcript,
is_empty: bool,
#[doc(hidden)]
phantom: PhantomData<F>,
}

// TODO: merge this with jf_plonk::transcript
impl<F: PrimeField> IOPTranscript<F> {
/// Create a new IOP transcript.
pub(crate) fn new(label: &'static [u8]) -> Self {
Self {
transcript: Transcript::new(label),
is_empty: true,
phantom: PhantomData,
}
}

/// Append the message to the transcript.
pub(crate) fn append_message(&mut self, label: &'static [u8], msg: &[u8]) -> Result<(), Error> {
self.transcript.append_message(label, msg);
self.is_empty = false;
Ok(())
}

/// Append the message to the transcript.
pub(crate) fn append_serializable_element<S: CanonicalSerialize>(
&mut self,
label: &'static [u8],
group_elem: &S,
) -> Result<(), Error> {
self.append_message(
label,
&to_bytes!(group_elem).map_err(|_| Error::TranscriptError)?,
)
}

/// Generate the challenge from the current transcript
/// and append it to the transcript.
///
/// The output field element is statistical uniform as long
/// as the field has a size less than 2^384.
pub(crate) fn get_and_append_challenge(&mut self, label: &'static [u8]) -> Result<F, Error> {
// we need to reject when transcript is empty
if self.is_empty {
return Err(Error::TranscriptError);
}

let mut buf = [0u8; 64];
self.transcript.challenge_bytes(label, &mut buf);
let challenge = F::from_le_bytes_mod_order(&buf);
self.append_serializable_element(label, &challenge)?;
Ok(challenge)
}

/// Generate the challenge from the current transcript
/// and append it to the transcript.
///
/// Without exposing the internal field `transcript`,
/// this is a wrapper around getting bytes as opposed to field elements.
pub(crate) fn get_and_append_byte_challenge(
&mut self,
label: &'static [u8],
dest: &mut [u8],
) -> Result<(), Error> {
// we need to reject when transcript is empty
if self.is_empty {
return Err(Error::TranscriptError);
}

self.transcript.challenge_bytes(label, dest);
self.append_message(label, dest)?;
Ok(())
}
}

#[inline]
#[cfg(test)]
pub(crate) fn to_field<F: Field>(v: Vec<u64>) -> Vec<F> {
Expand All @@ -249,6 +164,8 @@ pub(crate) fn to_field<F: Field>(v: Vec<u64>) -> Vec<F> {
// TODO: replace by https://github.com/arkworks-rs/crypto-primitives/issues/112.
#[cfg(test)]
use ark_crypto_primitives::sponge::poseidon::PoseidonSponge;
#[cfg(test)]
use ark_ff::PrimeField;

#[cfg(test)]
pub(crate) fn test_sponge<F: PrimeField>() -> PoseidonSponge<F> {
Expand Down
Loading