Skip to content

Commit

Permalink
mastic: Add VDAF algorithm ID to domain separation
Browse files Browse the repository at this point in the history
  • Loading branch information
cjpatton committed Jan 10, 2025
1 parent fb75c23 commit 442c6c2
Show file tree
Hide file tree
Showing 11 changed files with 889 additions and 871 deletions.
48 changes: 29 additions & 19 deletions src/flp/szk.rs
Original file line number Diff line number Diff line change
Expand Up @@ -303,13 +303,14 @@ where
{
/// The Type representing the specific FLP system used to prove validity of an input.
pub(crate) typ: T,
id: [u8; 4],
phantom: PhantomData<P>,
}

impl<T: Type> Szk<T, XofTurboShake128, 32> {
/// Create an instance of [`Szk`] using [`XofTurboShake128`].
pub fn new_turboshake128(typ: T) -> Self {
Szk::new(typ)
pub fn new_turboshake128(typ: T, algorithm_id: u32) -> Self {
Self::new(typ, algorithm_id)
}
}

Expand All @@ -320,9 +321,10 @@ where
{
/// Construct an instance of this sharedZK proof system with the underlying
/// FLP.
pub fn new(typ: T) -> Self {
pub fn new(typ: T, algorithm_id: u32) -> Self {
Self {
typ,
id: algorithm_id.to_le_bytes(),
phantom: PhantomData,
}
}
Expand All @@ -332,7 +334,7 @@ where
fn derive_prove_rand(&self, prove_rand_seed: &Seed<SEED_SIZE>, ctx: &[u8]) -> Vec<T::Field> {
P::seed_stream(
prove_rand_seed,
&[&mastic::dst_usage(mastic::USAGE_PROVE_RAND), ctx],
&[&mastic::dst_usage(mastic::USAGE_PROVE_RAND), &self.id, ctx],
&[],
)
.into_field_vec(self.typ.prove_rand_len())
Expand All @@ -347,7 +349,11 @@ where
) -> Result<Seed<SEED_SIZE>, SzkError> {
let mut xof = P::init(
aggregator_blind.as_ref(),
&[&mastic::dst_usage(mastic::USAGE_JOINT_RAND_PART), ctx],
&[
&mastic::dst_usage(mastic::USAGE_JOINT_RAND_PART),
&self.id,
ctx,
],
);
xof.update(nonce);
// Encode measurement_share (currently an array of field elements) into
Expand All @@ -369,7 +375,11 @@ where
) -> Seed<SEED_SIZE> {
let mut xof = P::init(
&[0; SEED_SIZE],
&[&mastic::dst_usage(mastic::USAGE_JOINT_RAND_SEED), ctx],
&[
&mastic::dst_usage(mastic::USAGE_JOINT_RAND_SEED),
&self.id,
ctx,
],
);
xof.update(&leader_joint_rand_part.0);
xof.update(&helper_joint_rand_part.0);
Expand All @@ -386,7 +396,7 @@ where
self.derive_joint_rand_seed(leader_joint_rand_part, helper_joint_rand_part, ctx);
let joint_rand = P::seed_stream(
&joint_rand_seed,
&[&mastic::dst_usage(mastic::USAGE_JOINT_RAND), ctx],
&[&mastic::dst_usage(mastic::USAGE_JOINT_RAND), &self.id, ctx],
&[],
)
.into_field_vec(self.typ.joint_rand_len());
Expand All @@ -401,7 +411,7 @@ where
) -> Vec<T::Field> {
Prng::from_seed_stream(P::seed_stream(
proof_share_seed,
&[&mastic::dst_usage(USAGE_PROOF_SHARE), ctx],
&[&mastic::dst_usage(USAGE_PROOF_SHARE), &self.id, ctx],
&[],
))
.take(self.typ.proof_len())
Expand All @@ -417,7 +427,7 @@ where
) -> Vec<T::Field> {
let mut xof = P::init(
verify_key,
&[&mastic::dst_usage(mastic::USAGE_QUERY_RAND), ctx],
&[&mastic::dst_usage(mastic::USAGE_QUERY_RAND), &self.id, ctx],
);
xof.update(nonce);
xof.update(&level.to_le_bytes());
Expand Down Expand Up @@ -701,7 +711,7 @@ mod tests {
let ctx = b"some application context";
let mut nonce = [0u8; 16];
let mut verify_key = [0u8; 32];
let szk_typ = Szk::new_turboshake128(typ.clone());
let szk_typ = Szk::new_turboshake128(typ.clone(), 0);
thread_rng().fill(&mut verify_key[..]);
thread_rng().fill(&mut nonce[..]);
let prove_rand_seed = Seed::generate().unwrap();
Expand Down Expand Up @@ -870,7 +880,7 @@ mod tests {
thread_rng().fill(&mut nonce[..]);
let sum = Sum::<Field128>::new(max_measurement).unwrap();
let encoded_measurement = sum.encode_measurement(&9).unwrap();
let szk_typ = Szk::new_turboshake128(sum);
let szk_typ = Szk::new_turboshake128(sum, 0);
let prove_rand_seed = Seed::generate().unwrap();
let helper_seed = Seed::generate().unwrap();
let leader_seed_opt = Some(Seed::generate().unwrap());
Expand Down Expand Up @@ -905,7 +915,7 @@ mod tests {
let sumvec =
SumVec::<Field128, ParallelSum<Field128, Mul<Field128>>>::new(5, 3, 3).unwrap();
let encoded_measurement = sumvec.encode_measurement(&vec![1, 16, 0]).unwrap();
let szk_typ = Szk::new_turboshake128(sumvec);
let szk_typ = Szk::new_turboshake128(sumvec, 0);
let prove_rand_seed = Seed::generate().unwrap();
let helper_seed = Seed::generate().unwrap();
let leader_seed_opt = Some(Seed::generate().unwrap());
Expand Down Expand Up @@ -939,7 +949,7 @@ mod tests {
thread_rng().fill(&mut nonce[..]);
let count = Count::<Field128>::new();
let encoded_measurement = count.encode_measurement(&true).unwrap();
let szk_typ = Szk::new_turboshake128(count);
let szk_typ = Szk::new_turboshake128(count, 0);
let prove_rand_seed = Seed::generate().unwrap();
let helper_seed = Seed::generate().unwrap();
let leader_seed_opt = Some(Seed::generate().unwrap());
Expand Down Expand Up @@ -974,7 +984,7 @@ mod tests {
thread_rng().fill(&mut nonce[..]);
let sum = Sum::<Field128>::new(max_measurement).unwrap();
let encoded_measurement = sum.encode_measurement(&9).unwrap();
let szk_typ = Szk::new_turboshake128(sum);
let szk_typ = Szk::new_turboshake128(sum, 0);
let prove_rand_seed = Seed::generate().unwrap();
let helper_seed = Seed::generate().unwrap();
let leader_seed_opt = None;
Expand Down Expand Up @@ -1015,7 +1025,7 @@ mod tests {
thread_rng().fill(&mut nonce[..]);
let sum = Sum::<Field128>::new(max_measurement).unwrap();
let encoded_measurement = sum.encode_measurement(&9).unwrap();
let szk_typ = Szk::new_turboshake128(sum);
let szk_typ = Szk::new_turboshake128(sum, 0);
let prove_rand_seed = Seed::generate().unwrap();
let helper_seed = Seed::generate().unwrap();
let leader_seed_opt = None;
Expand Down Expand Up @@ -1055,7 +1065,7 @@ mod tests {
thread_rng().fill(&mut nonce[..]);
let count = Count::<Field128>::new();
let encoded_measurement = count.encode_measurement(&true).unwrap();
let szk_typ = Szk::new_turboshake128(count);
let szk_typ = Szk::new_turboshake128(count, 0);
let prove_rand_seed = Seed::generate().unwrap();
let helper_seed = Seed::generate().unwrap();
let leader_seed_opt = None;
Expand Down Expand Up @@ -1095,7 +1105,7 @@ mod tests {
thread_rng().fill(&mut nonce[..]);
let count = Count::<Field128>::new();
let encoded_measurement = count.encode_measurement(&true).unwrap();
let szk_typ = Szk::new_turboshake128(count);
let szk_typ = Szk::new_turboshake128(count, 0);
let prove_rand_seed = Seed::generate().unwrap();
let helper_seed = Seed::generate().unwrap();
let leader_seed_opt = None;
Expand Down Expand Up @@ -1136,7 +1146,7 @@ mod tests {
let sumvec =
SumVec::<Field128, ParallelSum<Field128, Mul<Field128>>>::new(5, 3, 3).unwrap();
let encoded_measurement = sumvec.encode_measurement(&vec![1, 16, 0]).unwrap();
let szk_typ = Szk::new_turboshake128(sumvec);
let szk_typ = Szk::new_turboshake128(sumvec, 0);
let prove_rand_seed = Seed::generate().unwrap();
let helper_seed = Seed::generate().unwrap();
let leader_seed_opt = Some(Seed::generate().unwrap());
Expand Down Expand Up @@ -1177,7 +1187,7 @@ mod tests {
let sumvec =
SumVec::<Field128, ParallelSum<Field128, Mul<Field128>>>::new(5, 3, 3).unwrap();
let encoded_measurement = sumvec.encode_measurement(&vec![1, 16, 0]).unwrap();
let szk_typ = Szk::new_turboshake128(sumvec);
let szk_typ = Szk::new_turboshake128(sumvec, 0);
let prove_rand_seed = Seed::generate().unwrap();
let helper_seed = Seed::generate().unwrap();
let leader_seed_opt = Some(Seed::generate().unwrap());
Expand Down
48 changes: 28 additions & 20 deletions src/vdaf/mastic.rs
Original file line number Diff line number Diff line change
Expand Up @@ -41,8 +41,8 @@ const NONCE_SIZE: usize = 16;
// dst(b'', USAGE_ONEHOT_PROOF_INIT),
// b'').next(PROOF_SIZE)
pub(crate) const ONEHOT_PROOF_INIT: [u8; 32] = [
186, 76, 128, 104, 116, 50, 149, 133, 2, 164, 82, 118, 128, 155, 163, 239, 117, 95, 162, 196,
173, 31, 244, 180, 171, 86, 176, 209, 12, 221, 28, 204,
253, 211, 45, 179, 139, 135, 183, 67, 202, 144, 13, 205, 241, 39, 165, 73, 232, 54, 57, 193,
106, 154, 133, 22, 15, 194, 223, 162, 79, 108, 60, 133,
];

pub(crate) const USAGE_PROVE_RAND: u8 = 0;
Expand All @@ -58,8 +58,9 @@ pub(crate) const USAGE_EXTEND: u8 = 10;
pub(crate) const USAGE_CONVERT: u8 = 11;
pub(crate) const USAGE_PAYLOAD_CHECK: u8 = 12;

pub(crate) fn dst_usage(usage: u8) -> [u8; 11] {
[b'm', b'a', b's', b't', b'i', b'c', 0, 0, 0, 0, usage]
pub(crate) fn dst_usage(usage: u8) -> [u8; 8] {
const VERSION: u8 = 0;
[b'm', b'a', b's', b't', b'i', b'c', VERSION, usage]
}

/// The main struct implementing the Mastic VDAF.
Expand All @@ -71,7 +72,7 @@ where
T: Type,
P: Xof<SEED_SIZE>,
{
algorithm_id: u32,
id: [u8; 4],
pub(crate) szk: Szk<T, P, SEED_SIZE>,
pub(crate) vidpf: Vidpf<VidpfWeight<T::Field>>,
/// The length of the private attribute associated with any input.
Expand All @@ -86,9 +87,9 @@ where
/// Creates a new instance of Mastic, with a specific attribute length and weight type.
pub fn new(algorithm_id: u32, typ: T, bits: usize) -> Result<Self, VdafError> {
let vidpf = Vidpf::new(bits, typ.input_len() + 1)?;
let szk = Szk::new(typ);
let szk = Szk::new(typ, algorithm_id);
Ok(Self {
algorithm_id,
id: algorithm_id.to_le_bytes(),
szk,
vidpf,
bits,
Expand Down Expand Up @@ -290,7 +291,7 @@ where
type AggregateShare = MasticAggregateShare<T::Field>;

fn algorithm_id(&self) -> u32 {
self.algorithm_id
u32::from_be_bytes(self.id)
}

fn num_aggregators(&self) -> usize {
Expand Down Expand Up @@ -360,6 +361,16 @@ where
};
Ok((public_share, vec![leader_share, helper_share]))
}

fn hash_proof(&self, mut proof: VidpfProof, ctx: &[u8]) -> VidpfProof {
let mut xof = XofTurboShake128::from_seed_slice(
&[],
&[&dst_usage(USAGE_ONEHOT_PROOF_HASH), &self.id, ctx],
);
xof.update(&proof);
xof.into_seed_stream().fill_bytes(&mut proof);
proof
}
}

impl<T, P, const SEED_SIZE: usize> Client<16> for Mastic<T, P, SEED_SIZE>
Expand Down Expand Up @@ -591,8 +602,10 @@ where

// Onehot and payload checks
let (payload_check, onehot_proof) = {
let mut payload_check_xof =
P::init(&[0; SEED_SIZE], &[&dst_usage(USAGE_PAYLOAD_CHECK), ctx]);
let mut payload_check_xof = P::init(
&[0; SEED_SIZE],
&[&dst_usage(USAGE_PAYLOAD_CHECK), &self.id, ctx],
);
let mut payload_check_buf = Vec::with_capacity(T::Field::ENCODED_SIZE);
let mut onehot_proof = ONEHOT_PROOF_INIT;

Expand All @@ -604,7 +617,7 @@ where
// Update onehot proof.
onehot_proof = xor_proof(
onehot_proof,
&hash_proof(xor_proof(onehot_proof, &node.value.state.node_proof), ctx),
&self.hash_proof(xor_proof(onehot_proof, &node.value.state.node_proof), ctx),
);

// Update payload check.
Expand Down Expand Up @@ -645,7 +658,10 @@ where
};

let eval_proof = {
let mut eval_proof_xof = P::init(&[0; SEED_SIZE], &[&dst_usage(USAGE_EVAL_PROOF), ctx]);
let mut eval_proof_xof = P::init(
&[0; SEED_SIZE],
&[&dst_usage(USAGE_EVAL_PROOF), &self.id, ctx],
);
eval_proof_xof.update(&onehot_proof);
eval_proof_xof.update(&counter_check);
eval_proof_xof.update(&payload_check);
Expand Down Expand Up @@ -831,14 +847,6 @@ where
}
}

fn hash_proof(mut proof: VidpfProof, ctx: &[u8]) -> VidpfProof {
let mut xof =
XofTurboShake128::from_seed_slice(&[], &[&dst_usage(USAGE_ONEHOT_PROOF_HASH), ctx]);
xof.update(&proof);
xof.into_seed_stream().fill_bytes(&mut proof);
proof
}

#[cfg(test)]
mod tests {
use super::*;
Expand Down
28 changes: 14 additions & 14 deletions src/vdaf/test_vec/mastic/04/MasticCount_0.json
Original file line number Diff line number Diff line change
Expand Up @@ -5,14 +5,14 @@
1
],
"agg_shares": [
"b7f9921c929a6d97e6dcde35a90237a34ad73fa33cefc5223b1354b6afbebcad",
"4a066de36c6592681b2321ca55fdc85cb828c05cc2103addc7ecab494f414352"
"3e9d16bcf3364f0b855cecb7b6c54efdb5f87e0adad194cb9a4b489d3f726b60",
"c362e9430bc9b0f47ca31348483ab1024d0781f5242e6b3468b4b762bf8d949f"
],
"ctx": "736f6d65206170706c69636174696f6e",
"prep": [
{
"input_shares": [
"000102030405060708090a0b0c0d0e0f18e4527f6e16e586819caa63d4a3b11ca35ae9cba1bac0edd99ecf422a9233e9223000eb26fe45b8",
"000102030405060708090a0b0c0d0e0f2712998769f26b6277b32c9f3a40c040110fbc523feaddcea2f553d5770bb55232e052d8e61616f7",
"101112131415161718191a1b1c1d1e1f404142434445464748494a4b4c4d4e4f505152535455565758595a5b5c5d5e5f"
],
"measurement": [
Expand All @@ -25,28 +25,28 @@
"nonce": "000102030405060708090a0b0c0d0e0f",
"out_shares": [
[
"b7f9921c929a6d97",
"e6dcde35a90237a3",
"4ad73fa33cefc522",
"3b1354b6afbebcad"
"3e9d16bcf3364f0b",
"855cecb7b6c54efd",
"b5f87e0adad194cb",
"9a4b489d3f726b60"
],
[
"4a066de36c659268",
"1b2321ca55fdc85c",
"b828c05cc2103add",
"c7ecab494f414352"
"c362e9430bc9b0f4",
"7ca31348483ab102",
"4d0781f5242e6b34",
"68b4b762bf8d949f"
]
],
"prep_messages": [
""
],
"prep_shares": [
[
"8fbf7bd1e6d11830692bd828041433ff60905d220dd23f1e4d9809e3b38276a5ccfbe6874465df6b13c091772d6cb8ff1dc268ab0e2591b0def6e33ffdb5a834",
"8fbf7bd1e6d11830692bd828041433ff60905d220dd23f1e4d9809e3b38276a535041978ba9a2094ff3ae157f12d9afe90008d6ef8ae35298a5adc89a5f1fffa"
"fd29b942ce3020071ba652fa5597991fb6baa655eeb3dc6aced3b5fc4c8c0bd082518600b8bd84150c9df3154968eb00fd6949610bbaa838f528e845e9b134bf",
"fd29b942ce3020071ba652fa5597991fb6baa655eeb3dc6aced3b5fc4c8c0bd07fae79ff46427bea6814d366e7d3bd95781ac4b7ab90e6a99f6399ebb8914e6d"
]
],
"public_share": "0b40eac2eab18af3ac91184d5f466dc0fe1e9b00a994e6abffb9486a9d48a9623b3101fa6350e7a9831bfb7cf323506e335d928f520ed8fbfe0056b2dbdc41b30abc9bfeb20525cc0ce8771086a16c39a00aa20a65d67590d601d29a78e295828074afb0c0c610f3d0139e2c7e5f1762a72cc1bd0036307bce53dc15c49d419ac4",
"public_share": "0e30118b55bf77ff34817d80025c5c736b1ee2188425350e71cc1dc5a779de3e6ff0676312c362381937b2713cf2d65c6bfd5e3a8400fe1b3f71da0726d672f55f796f086f2d82ba5c736f5c7854b6fbe6605a571bdabcacce7acd94b9c87b314bead370e09dc18618bd5cc3c8825338a085d4790108cbeb9ecdb914466664cfa0",
"rand": "000102030405060708090a0b0c0d0e0f101112131415161718191a1b1c1d1e1f202122232425262728292a2b2c2d2e2f303132333435363738393a3b3c3d3e3f404142434445464748494a4b4c4d4e4f505152535455565758595a5b5c5d5e5f"
}
],
Expand Down
Loading

0 comments on commit 442c6c2

Please sign in to comment.