Skip to content

Commit

Permalink
replace JSON with TOML
Browse files Browse the repository at this point in the history
  • Loading branch information
jimouris committed Feb 6, 2024
1 parent fa28638 commit 12cc9a3
Show file tree
Hide file tree
Showing 13 changed files with 94 additions and 104 deletions.
2 changes: 1 addition & 1 deletion Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -29,7 +29,7 @@ rand_core = "0.6.4"
rayon = "1.8.0"
rs_merkle = "1.2"
serde = { version = "1.0", features = ["derive"] }
serde_json = "1.0"
toml = "0.8.10"
tarpc = { version = "0.30.0", features = ["full", "serde-transport", "tcp", "tokio1"] }
tokio = { version = "1.32.0", features = ["full", "macros"] }
zipf = "7.0.1"
Expand Down
90 changes: 43 additions & 47 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -28,114 +28,110 @@ file. Each mode (Weighted Heavy Hitters, Attribute-Based Metrics, and Plain
Metrics with Prio) uses a different config. The contents that are shared between
all the config files are shown below:

```bash
{
"data_bits": 8, # Number of bits of each string.
"hist_buckets": 2, # Number of each histogram buckets
"mode": ..., # See below.
"server_0": "0.0.0.0:8000", # The `IP:port` for server 0.
"server_1": "0.0.0.0:8001", # The `IP:port` for server 1.
"add_key_batch_size": 1000, # Size of RPC requests for transmitting keys.
"flp_batch_size": 100000, # Size of RPC requests for transmitting FLPs.
"unique_buckets": 1000, # Zipf parameter
"zipf_exponent": 1.03 # Zipf exponent
}
```toml
data_bits = 8 # Number of bits of each string.
hist_buckets = 2 # Number of each histogram buckets

# [mode] # Mode of operation, one of:
# mode.weighted_heavy_hitters.threshold = 0.01
# mode.attribute_based_metrics.num_attributes = 10
# mode = "plain_metrics"

server_0 = "0.0.0.0:8000" # The `IP:port` for server 0.
server_1 = "0.0.0.0:8001" # The `IP:port` for server 1.

add_key_batch_size = 1000 # Size of RPC requests for transmitting keys.
flp_batch_size = 100000 # Size of RPC requests for transmitting FLPs.

unique_buckets = 1000 # Zipf parameter
zipf_exponent = 1.03 # Zipf exponent
```

### 1. Weighted Heavy Hitters
[Config-weights.json](./src/bin/config-weights.json)
[weighted-heavy-hitters.toml](./src/configs/weighted-heavy-hitters.toml)
```bash
...
"mode": {
"weighted_heavy_hitters": {
"threshold": 0.01
}
},
...
...
mode.weighted_heavy_hitters.threshold = 0.01
...
```

#### Weighted Heavy Hitters: Aggregators
Run the aggregators in two separate shells. They will wait and be ready to
process client requests.
```bash
cargo run --release --bin server -- --config src/bin/config-weights.json --server_id 0
cargo run --release --bin server -- --config src/bin/config-weights.json --server_id 1
cargo run --release --bin server -- --config src/configs/weighted-heavy-hitters.toml --server_id 0
cargo run --release --bin server -- --config src/configs/weighted-heavy-hitters.toml --server_id 1
```

#### Weighted Heavy Hitters: Clients
In another shell, send 100 client requests to the Aggregators:
```bash
cargo run --release --bin driver -- --config src/bin/config-weights.json -n 100
cargo run --release --bin driver -- --config src/configs/weighted-heavy-hitters.toml -n 100
```

To run with the presence of malicious clients include the `--malicious` flag followed by the
percentage of malicious clients to generate ([0.0, 0.9]). For instance, to run with 5% malicious
clients use:
```bash
cargo run --release --bin driver -- --config src/bin/config-weights.json -n 100 --malicious 0.05
cargo run --release --bin driver -- --config src/configs/weighted-heavy-hitters.toml -n 100 --malicious 0.05
```

### 2. Attribute-Based Metrics
[Config-attributes.json](./src/bin/config-attributes.json)
```bash
...
"mode": {
"attribute_based_metrics": {
"threshold": 10
}
},
...
[attribute-based-metrics.toml](./src/configs/attribute-based-metrics.toml)
```toml
...
mode.attribute_based_metrics.num_attributes = 10
...
```

#### Attribute-Based Metrics: Aggregators
Run the aggregators in two separate shells. They will wait and be ready to
process client requests.
```bash
cargo run --release --bin server -- --config src/bin/config-attributes.json --server_id 0
cargo run --release --bin server -- --config src/bin/config-attributes.json --server_id 1
cargo run --release --bin server -- --config src/configs/attribute-based-metrics.toml --server_id 0
cargo run --release --bin server -- --config src/configs/attribute-based-metrics.toml --server_id 1
```

#### Attribute-Based Metrics: Clients
In another shell, send 100 client requests to the Aggregators:
```bash
cargo run --release --bin driver -- --config src/bin/config-attributes.json -n 100
cargo run --release --bin driver -- --config src/configs/attribute-based-metrics.toml -n 100
```

To run with the presence of malicious clients include the `--malicious` flag followed by the
percentage of malicious clients to generate ([0.0, 0.9]). For instance, to run with 5% malicious
clients use:
```bash
cargo run --release --bin driver -- --config src/bin/config-attributes.json -n 100 --malicious 0.05
cargo run --release --bin driver -- --config src/configs/attribute-based-metrics.toml -n 100 --malicious 0.05
```

### 3. Plain Metrics with Prio
[Config-plain.json](./src/bin/config-plain.json)
```bash
...
"data_bits": 0, # This is unused in this use-case
"mode": "plain_metrics",
...
[plain-metrics.toml](./src/configs/plain-metrics.toml)
```toml
...
mode = "plain_metrics"
...
```

#### Plain Metrics with Prios: Aggregators
Run the aggregators in two separate shells. They will wait and be ready to
process client requests.
```bash
cargo run --release --bin server -- --config src/bin/config-plain.json --server_id 0
cargo run --release --bin server -- --config src/bin/config-plain.json --server_id 1
cargo run --release --bin server -- --config src/configs/plain-metrics.toml --server_id 0
cargo run --release --bin server -- --config src/configs/plain-metrics.toml --server_id 1
```

#### Plain Metrics with Prio: Clients
In another shell, send 100 client requests to the servers:
```bash
cargo run --release --bin driver -- --config src/bin/config-plain.json -n 100
cargo run --release --bin driver -- --config src/configs/plain-metrics.toml -n 100
```

To run with the presence of malicious clients include the `--malicious` flag followed by the
percentage of malicious clients to generate ([0.0, 0.9]). For instance, to run with 5% malicious
clients use:
```bash
cargo run --release --bin driver -- --config src/bin/config-plain.json -n 100 --malicious 0.05
cargo run --release --bin driver -- --config src/configs/plain-metrics.toml -n 100 --malicious 0.05
```


Expand Down
15 changes: 0 additions & 15 deletions src/bin/config-attributes.json

This file was deleted.

11 changes: 0 additions & 11 deletions src/bin/config-plain.json

This file was deleted.

15 changes: 0 additions & 15 deletions src/bin/config-weights.json

This file was deleted.

6 changes: 3 additions & 3 deletions src/bin/driver.rs
Original file line number Diff line number Diff line change
Expand Up @@ -139,7 +139,7 @@ fn generate_reports(cfg: &config::Config, mastic: &MasticHistogram) -> Vec<Plain
match cfg.mode {
Mode::WeightedHeavyHitters { .. } | Mode::AttributeBasedMetrics { .. } => {
// Synthesize a fake input and weight.
let alpha = sample_bits(cfg.data_bits);
let alpha = sample_bits(cfg.data_bits.unwrap());
let beta = mastic.encode_measurement(&bucket).unwrap();

let (key_0, key_1) = VidpfKey::gen(&alpha, &beta);
Expand Down Expand Up @@ -523,7 +523,7 @@ async fn run_level_last(
let (shares_0, shares_1) = try_join!(resp_0, resp_1).unwrap();
println!(
"- Time for level {}: {:?}\n",
cfg.data_bits,
cfg.data_bits.unwrap(),
start_last.elapsed().as_secs_f64()
);
for res in &collect::KeyCollection::final_values(mastic.input_len(), &shares_0, &shares_1) {
Expand Down Expand Up @@ -753,7 +753,7 @@ async fn main() -> io::Result<()> {
Mode::WeightedHeavyHitters { .. } => {
tree_init(&client_0, &client_1).await?;

for level in 0..cfg.data_bits - 1 {
for level in 0..cfg.data_bits.unwrap() - 1 {
let start_level = Instant::now();
if level == 0 {
run_flp_queries(&cfg, &mastic, &client_0, &client_1, num_clients).await?;
Expand Down
2 changes: 1 addition & 1 deletion src/bin/server.rs
Original file line number Diff line number Diff line change
Expand Up @@ -40,7 +40,7 @@ use tarpc::{
struct CollectorServer {
server_id: i8,
seed: prg::PrgSeed,
data_bits: usize,
data_bits: Option<usize>,
arc: Arc<Mutex<collect::KeyCollection>>,
}

Expand Down
8 changes: 4 additions & 4 deletions src/collect.rs
Original file line number Diff line number Diff line change
Expand Up @@ -126,7 +126,7 @@ pub struct KeyCollection {
pub verify_key: [u8; 16],

/// The depth of the tree.
depth: usize,
depth: Option<usize>,

/// The report shares of the clients. The first element of the tuple is whether the client is honest or
/// not.
Expand Down Expand Up @@ -165,7 +165,7 @@ impl KeyCollection {
mastic: MasticHistogram,
server_id: i8,
_seed: &prg::PrgSeed,
depth: usize,
depth: Option<usize>,
verify_key: [u8; 16],
) -> KeyCollection {
KeyCollection {
Expand Down Expand Up @@ -355,13 +355,13 @@ impl KeyCollection {
}

let level = self.frontier[0].path.len();
debug_assert!(level < self.depth);
debug_assert!(level < self.depth.unwrap());

let next_frontier = self
.frontier
.par_iter()
.flat_map(|node| {
assert!(node.path.len() <= self.depth);
assert!(node.path.len() <= self.depth.unwrap());
let child_0 = self.make_tree_node(node, false);
let child_1 = self.make_tree_node(node, true);

Expand Down
6 changes: 3 additions & 3 deletions src/config.rs
Original file line number Diff line number Diff line change
Expand Up @@ -29,7 +29,7 @@ pub enum Mode {
#[derive(Deserialize)]
pub struct Config {
/// Number of bits of each string.
pub data_bits: usize,
pub data_bits: Option<usize>,

/// Number of histogram buckets for the FLP range check.
pub hist_buckets: usize,
Expand Down Expand Up @@ -58,8 +58,8 @@ pub struct Config {
}

pub fn get_config(filename: &str) -> Config {
let json_data = &fs::read_to_string(filename).expect("Cannot open JSON file");
serde_json::from_str(json_data).expect("Cannot parse JSON config")
let toml_data = &fs::read_to_string(filename).expect("Cannot open TOML file");
toml::from_str(toml_data).expect("Cannot parse TOML config")
}

pub fn get_args(
Expand Down
12 changes: 12 additions & 0 deletions src/configs/attribute-based-metrics.toml
Original file line number Diff line number Diff line change
@@ -0,0 +1,12 @@
mode.attribute_based_metrics.num_attributes = 10

data_bits = 8
hist_buckets = 4

server_0 = "0.0.0.0:8000"
server_1 = "0.0.0.0:8001"

add_key_batch_size = 1000
flp_batch_size = 100000
unique_buckets = 1000
zipf_exponent = 1.03
11 changes: 11 additions & 0 deletions src/configs/plain-metrics.toml
Original file line number Diff line number Diff line change
@@ -0,0 +1,11 @@
mode = "plain_metrics"

hist_buckets = 4

server_0 = "0.0.0.0:8000"
server_1 = "0.0.0.0:8001"

add_key_batch_size = 1000
flp_batch_size = 100000
unique_buckets = 1000
zipf_exponent = 1.03
12 changes: 12 additions & 0 deletions src/configs/weighted-heavy-hitters.toml
Original file line number Diff line number Diff line change
@@ -0,0 +1,12 @@
mode.weighted_heavy_hitters.threshold = 0.01

data_bits = 8
hist_buckets = 4

server_0 = "0.0.0.0:8000"
server_1 = "0.0.0.0:8001"

add_key_batch_size = 1000
flp_batch_size = 100000
unique_buckets = 1000
zipf_exponent = 1.03
8 changes: 4 additions & 4 deletions tests/collect_test.rs
Original file line number Diff line number Diff line change
Expand Up @@ -15,8 +15,8 @@ fn collect_test_eval_groups() {
thread_rng().fill(&mut verify_key);

let mastic = Mastic::new_histogram(4).unwrap();
let mut col_0 = KeyCollection::new(mastic.clone(), 0, &seed, strlen, verify_key);
let mut col_1 = KeyCollection::new(mastic.clone(), 1, &seed, strlen, verify_key);
let mut col_0 = KeyCollection::new(mastic.clone(), 0, &seed, Some(strlen), verify_key);
let mut col_1 = KeyCollection::new(mastic.clone(), 1, &seed, Some(strlen), verify_key);

for cstr in &client_strings {
let input_beta = mastic.encode_measurement(&2).unwrap();
Expand Down Expand Up @@ -109,8 +109,8 @@ fn collect_test_eval_full_groups() {
let mut verify_key = [0; 16];
thread_rng().fill(&mut verify_key);
let mastic = Mastic::new_histogram(4).unwrap();
let mut col_0 = KeyCollection::new(mastic.clone(), 0, &seed, strlen, verify_key);
let mut col_1 = KeyCollection::new(mastic.clone(), 1, &seed, strlen, verify_key);
let mut col_0 = KeyCollection::new(mastic.clone(), 0, &seed, Some(strlen), verify_key);
let mut col_1 = KeyCollection::new(mastic.clone(), 1, &seed, Some(strlen), verify_key);

let mut keys = vec![];
println!("Starting to generate keys");
Expand Down

0 comments on commit 12cc9a3

Please sign in to comment.