Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

chore: Update for latest Sphinx #298

Draft
wants to merge 2 commits into
base: main
Choose a base branch
from
Draft
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
13 changes: 10 additions & 3 deletions Cargo.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

4 changes: 2 additions & 2 deletions Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -32,8 +32,8 @@ p3-poseidon2 = { git = "https://github.com/argumentcomputer/Plonky3.git", branch
p3-symmetric = { git = "https://github.com/argumentcomputer/Plonky3.git", branch = "sp1" }
p3-uni-stark = { git = "https://github.com/argumentcomputer/Plonky3.git", branch = "sp1" }
p3-util = { git = "https://github.com/argumentcomputer/Plonky3.git", branch = "sp1" }
sphinx-core = { git = "https://github.com/argumentcomputer/sphinx.git", branch = "dev"}
sphinx-derive = { git = "https://github.com/argumentcomputer/sphinx.git", branch = "dev" }
sphinx-core = { git = "https://github.com/argumentcomputer/sphinx.git", branch = "forward_ports_48"}
sphinx-derive = { git = "https://github.com/argumentcomputer/sphinx.git", branch = "forward_ports_48" }
anyhow = "1.0.72"
ascent = { git = "https://github.com/argumentcomputer/ascent.git" }
arc-swap = "1.7.1"
Expand Down
9 changes: 6 additions & 3 deletions benches/fib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@ use p3_field::AbstractField;
use rayon::iter::{IntoParallelRefIterator, ParallelIterator};
use sphinx_core::{
air::MachineAir,
stark::{LocalProver, StarkGenericConfig, StarkMachine},
stark::{DefaultProver, MachineProver, StarkGenericConfig, StarkMachine},
utils::{BabyBearPoseidon2, SphinxCoreOpts},
};
use std::time::Duration;
Expand Down Expand Up @@ -48,7 +48,7 @@ fn setup<C: Chipset<BabyBear>>(
toplevel: &Toplevel<BabyBear, C, NoChip>,
) -> (
List<BabyBear>,
FuncChip<'_, BabyBear, C, NoChip>,
FuncChip<BabyBear, C, NoChip>,
QueryRecord<BabyBear>,
) {
let code = build_lurk_expr(arg);
Expand Down Expand Up @@ -125,7 +125,10 @@ fn e2e(c: &mut Criterion) {
let mut challenger_p = machine.config().challenger();
let opts = SphinxCoreOpts::default();
let shard = Shard::new(&record);
machine.prove::<LocalProver<_, _>>(&pk, shard, &mut challenger_p, opts);
let prover = DefaultProver::new(machine);
prover
.prove(&pk, vec![shard], &mut challenger_p, opts)
.unwrap();
},
BatchSize::SmallInput,
)
Expand Down
9 changes: 6 additions & 3 deletions benches/lcs.rs
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@ use p3_field::AbstractField;
use rayon::iter::{IntoParallelRefIterator, ParallelIterator};
use sphinx_core::{
air::MachineAir,
stark::{LocalProver, StarkGenericConfig, StarkMachine},
stark::{DefaultProver, MachineProver, StarkGenericConfig, StarkMachine},
utils::{BabyBearPoseidon2, SphinxCoreOpts},
};
use std::time::Duration;
Expand Down Expand Up @@ -52,7 +52,7 @@ fn setup<'a, C: Chipset<BabyBear>>(
toplevel: &'a Toplevel<BabyBear, C, NoChip>,
) -> (
List<BabyBear>,
FuncChip<'a, BabyBear, C, NoChip>,
FuncChip<BabyBear, C, NoChip>,
QueryRecord<BabyBear>,
) {
let code = build_lurk_expr(a, b);
Expand Down Expand Up @@ -129,7 +129,10 @@ fn e2e(c: &mut Criterion) {
let mut challenger_p = machine.config().challenger();
let opts = SphinxCoreOpts::default();
let shard = Shard::new(&record);
machine.prove::<LocalProver<_, _>>(&pk, shard, &mut challenger_p, opts);
let prover = DefaultProver::new(machine);
prover
.prove(&pk, vec![shard], &mut challenger_p, opts)
.unwrap();
},
BatchSize::SmallInput,
)
Expand Down
9 changes: 6 additions & 3 deletions benches/sum.rs
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@ use p3_field::AbstractField;
use rayon::iter::{IntoParallelRefIterator, ParallelIterator};
use sphinx_core::{
air::MachineAir,
stark::{LocalProver, StarkGenericConfig, StarkMachine},
stark::{DefaultProver, MachineProver, StarkGenericConfig, StarkMachine},
utils::{BabyBearPoseidon2, SphinxCoreOpts},
};
use std::time::Duration;
Expand Down Expand Up @@ -52,7 +52,7 @@ fn setup<C: Chipset<BabyBear>>(
toplevel: &Toplevel<BabyBear, C, NoChip>,
) -> (
List<BabyBear>,
FuncChip<'_, BabyBear, C, NoChip>,
FuncChip<BabyBear, C, NoChip>,
QueryRecord<BabyBear>,
) {
let code = build_lurk_expr(n);
Expand Down Expand Up @@ -130,7 +130,10 @@ fn e2e(c: &mut Criterion) {
let mut challenger_p = machine.config().challenger();
let opts = SphinxCoreOpts::default();
let shard = Shard::new(&record);
machine.prove::<LocalProver<_, _>>(&pk, shard, &mut challenger_p, opts);
let prover = DefaultProver::new(machine);
prover
.prove(&pk, vec![shard], &mut challenger_p, opts)
.unwrap();
},
BatchSize::SmallInput,
)
Expand Down
8 changes: 3 additions & 5 deletions src/air/debug.rs
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,6 @@ use p3_matrix::dense::{RowMajorMatrix, RowMajorMatrixView};
use p3_matrix::stack::VerticalPair;
use p3_matrix::Matrix;
use sphinx_core::air::MachineAir;
use sphinx_core::stark::MachineRecord;
use std::collections::BTreeMap;

type LocalRowView<'a, F> = VerticalPair<RowMajorMatrixView<'a, F>, RowMajorMatrixView<'a, F>>;
Expand Down Expand Up @@ -122,14 +121,13 @@ pub fn debug_chip_constraints_and_queries_with_sharding<
C2: Chipset<F>,
>(
record: &QueryRecord<F>,
chips: &[LairChip<'_, F, C1, C2>],
chips: &[LairChip<F, C1, C2>],
config: Option<ShardingConfig>,
) {
let full_shard = Shard::new(record);
let shards = if let Some(config) = config {
full_shard.shard(&config)
config.shard(record)
} else {
vec![full_shard]
vec![Shard::new(&record.clone())]
};

let lookup_queries: Vec<_> = shards
Expand Down
4 changes: 2 additions & 2 deletions src/lair/air.rs
Original file line number Diff line number Diff line change
Expand Up @@ -130,13 +130,13 @@ fn eval_depth<F: Field, AB>(
out.extend(dep_depth.iter().cloned());
}

impl<'a, AB, C1: Chipset<AB::F>, C2: Chipset<AB::F>> Air<AB> for FuncChip<'a, AB::F, C1, C2>
impl<AB, C1: Chipset<AB::F>, C2: Chipset<AB::F>> Air<AB> for FuncChip<AB::F, C1, C2>
where
AB: AirBuilder + LookupBuilder,
<AB as AirBuilder>::Var: Debug,
{
fn eval(&self, builder: &mut AB) {
self.func.eval(builder, self.toplevel, self.layout_sizes)
self.func.eval(builder, &self.toplevel, self.layout_sizes)
}
}

Expand Down
6 changes: 3 additions & 3 deletions src/lair/chipset.rs
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@ use crate::air::builder::{LookupBuilder, Record, RequireRecord};

use super::execute::QueryRecord;

pub trait Chipset<F>: Sync {
pub trait Chipset<F>: Send + Sync + 'static + Clone {
fn input_size(&self) -> usize;

fn output_size(&self) -> usize;
Expand Down Expand Up @@ -46,7 +46,7 @@ pub trait Chipset<F>: Sync {
) -> Vec<AB::Expr>;
}

impl<F, C1: Chipset<F>, C2: Chipset<F>> Chipset<F> for &Either<C1, C2> {
impl<F, C1: Chipset<F>, C2: Chipset<F>> Chipset<F> for Either<C1, C2> {
fn input_size(&self) -> usize {
match self {
Either::Left(c) => c.input_size(),
Expand Down Expand Up @@ -121,7 +121,7 @@ impl<F, C1: Chipset<F>, C2: Chipset<F>> Chipset<F> for &Either<C1, C2> {
}
}

#[derive(Default)]
#[derive(Clone, Default)]
pub struct NoChip;

impl<F> Chipset<F> for NoChip {
Expand Down
90 changes: 42 additions & 48 deletions src/lair/execute.rs
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@ use hashbrown::HashMap;
use itertools::Itertools;
use p3_field::{AbstractField, PrimeField32};
use rustc_hash::FxHashMap;
use sphinx_core::stark::{Indexed, MachineRecord};
use sphinx_core::{stark::MachineRecord, utils::SphinxCoreOpts};
use std::ops::Range;

use crate::{
Expand Down Expand Up @@ -75,32 +75,34 @@ pub struct QueryRecord<F: PrimeField32> {
}

#[derive(Default, Clone, Debug, Eq, PartialEq)]
pub struct Shard<'a, F: PrimeField32> {
pub struct Shard<F: PrimeField32> {
pub(crate) index: u32,
// TODO: remove this `Option` once Sphinx no longer requires `Default`
pub(crate) queries: Option<&'a QueryRecord<F>>,
pub(crate) queries: Option<QueryRecord<F>>,
pub(crate) shard_config: ShardingConfig,
}

impl<'a, F: PrimeField32> Shard<'a, F> {
impl<F: PrimeField32> Shard<F> {
/// Creates a new initial shard from the given `QueryRecord`.
///
/// # Note
///
/// Make sure to call `.shard()` on a `Shard` created by `new` when generating
/// the traces, otherwise you will only get the first shard's trace.
#[inline]
pub fn new(queries: &'a QueryRecord<F>) -> Self {
pub fn new(queries: &QueryRecord<F>) -> Self {
Shard {
index: 0,
queries: queries.into(),
queries: Some(queries.clone()),
shard_config: ShardingConfig::default(),
}
}

#[inline]
pub fn queries(&self) -> &QueryRecord<F> {
self.queries.expect("Missing query record reference")
self.queries
.as_ref()
.expect("Missing query record reference")
}

pub fn get_func_range(&self, func_index: usize) -> Range<usize> {
Expand All @@ -123,18 +125,9 @@ impl<'a, F: PrimeField32> Shard<'a, F> {
}
}

impl<'a, F: PrimeField32> Indexed for Shard<'a, F> {
fn index(&self) -> u32 {
self.index
}
}

impl<'a, F: PrimeField32> MachineRecord for Shard<'a, F> {
type Config = ShardingConfig;

fn set_index(&mut self, index: u32) {
self.index = index
}
impl<F: PrimeField32> MachineRecord for Shard<F> {
// type Config = ShardingConfig; // FIXME
type Config = SphinxCoreOpts;

fn stats(&self) -> HashMap<String, usize> {
// TODO: use `IndexMap` instead so the original insertion order is kept
Expand Down Expand Up @@ -183,9 +176,34 @@ impl<'a, F: PrimeField32> MachineRecord for Shard<'a, F> {
// just a no-op because `generate_dependencies` is a no-op
}

fn shard(self, config: &Self::Config) -> Vec<Self> {
let queries = self.queries();
let shard_size = config.max_shard_size as usize;
fn public_values<F2: AbstractField>(&self) -> Vec<F2> {
self.expect_public_values()
.iter()
.map(|f| F2::from_canonical_u32(f.as_canonical_u32()))
.collect()
}
}

#[derive(Clone, Copy, Debug, Eq, PartialEq)]
pub struct ShardingConfig {
pub(crate) max_shard_size: u32,
}

impl Default for ShardingConfig {
fn default() -> Self {
const DEFAULT_SHARD_SIZE: u32 = 1 << 22;
Self {
max_shard_size: std::env::var("SHARD_SIZE").map_or_else(
|_| DEFAULT_SHARD_SIZE,
|s| s.parse::<u32>().unwrap_or(DEFAULT_SHARD_SIZE),
),
}
}
}

impl ShardingConfig {
pub fn shard<F: PrimeField32>(&self, queries: &QueryRecord<F>) -> Vec<Shard<F>> {
let shard_size = self.max_shard_size as usize;
let max_num_func_rows: usize = queries
.func_queries
.iter()
Expand All @@ -208,36 +226,12 @@ impl<'a, F: PrimeField32> MachineRecord for Shard<'a, F> {
for shard_index in 0..num_shards {
shards.push(Shard {
index: shard_index as u32,
queries: self.queries,
shard_config: *config,
queries: Some(queries.clone()),
shard_config: *self,
});
}
shards
}

fn public_values<F2: AbstractField>(&self) -> Vec<F2> {
self.expect_public_values()
.iter()
.map(|f| F2::from_canonical_u32(f.as_canonical_u32()))
.collect()
}
}

#[derive(Clone, Copy, Debug, Eq, PartialEq)]
pub struct ShardingConfig {
pub(crate) max_shard_size: u32,
}

impl Default for ShardingConfig {
fn default() -> Self {
const DEFAULT_SHARD_SIZE: u32 = 1 << 22;
Self {
max_shard_size: std::env::var("SHARD_SIZE").map_or_else(
|_| DEFAULT_SHARD_SIZE,
|s| s.parse::<u32>().unwrap_or(DEFAULT_SHARD_SIZE),
),
}
}
}

const NUM_MEM_TABLES: usize = 5;
Expand Down
Loading