Skip to content

Commit

Permalink
Return errors from utils instead of panicking
Browse files Browse the repository at this point in the history
  • Loading branch information
TrAyZeN committed Apr 5, 2024
1 parent 8399fa2 commit cce93a5
Show file tree
Hide file tree
Showing 7 changed files with 104 additions and 90 deletions.
2 changes: 2 additions & 0 deletions Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -18,10 +18,12 @@ rayon = "1.10.0"
indicatif = "0.17.8"
ndarray-npy ="0.8.1"
itertools = "0.12.1"
thiserror = "1.0.58"

[dev-dependencies]
criterion = "0.5.1"
ndarray-rand = "0.14.0"
anyhow = "1.0.81"

[[bench]]
name = "cpa"
Expand Down
57 changes: 34 additions & 23 deletions examples/cpa.rs
Original file line number Diff line number Diff line change
@@ -1,10 +1,11 @@
use anyhow::Result;
use indicatif::ProgressIterator;
use muscat::cpa_normal::*;
use muscat::leakage::{hw, sbox};
use muscat::util::{progress_bar, read_array_2_from_npy_file, save_array2};
use muscat::util::{progress_bar, read_array2_from_npy_file, save_array2};
use ndarray::*;
use rayon::iter::{ParallelBridge, ParallelIterator};
use std::time::{self};
use std::time;

// leakage model
pub fn leakage_model(value: ArrayView1<usize>, guess: usize) -> usize {
Expand All @@ -16,21 +17,21 @@ type FormatTraces = f64;
type FormatMetadata = u8;

#[allow(dead_code)]
fn cpa() {
let start_sample: usize = 0;
let end_sample: usize = 5000;
let size: usize = end_sample - start_sample; // Number of samples
let patch: usize = 500;
fn cpa() -> Result<()> {
let start_sample = 0;
let end_sample = 5000;
let size = end_sample - start_sample; // Number of samples
let patch = 500;
let guess_range = 256; // 2**(key length)
let folder = String::from("../../data/cw");
let dir_l = format!("{folder}/leakages.npy");
let dir_p = format!("{folder}/plaintexts.npy");
let leakages: Array2<FormatTraces> = read_array_2_from_npy_file::<FormatTraces>(&dir_l);
let plaintext: Array2<FormatMetadata> = read_array_2_from_npy_file::<FormatMetadata>(&dir_p);
let leakages = read_array2_from_npy_file::<FormatTraces>(&dir_l)?;
let plaintext = read_array2_from_npy_file::<FormatMetadata>(&dir_p)?;
let len_traces = leakages.shape()[0];

let mut cpa_parallel = ((0..len_traces).step_by(patch))
.progress_with(progress_bar(len_traces))
.map(|row| row)
.par_bridge()
.map(|row_number| {
let mut cpa = Cpa::new(size, patch, guess_range, leakage_model);
Expand All @@ -48,29 +49,33 @@ fn cpa() {
|| Cpa::new(size, patch, guess_range, leakage_model),
|x, y| x + y,
);

cpa_parallel.finalize();
println!("Guessed key = {}", cpa_parallel.pass_guess());
save_array2("results/corr.npy", cpa_parallel.pass_corr_array().view());

save_array2("results/corr.npy", cpa_parallel.pass_corr_array().view())?;

Ok(())
}

#[allow(dead_code)]
fn success() {
let start_sample: usize = 0;
let end_sample: usize = 5000;
let size: usize = end_sample - start_sample; // Number of samples
let patch: usize = 500;
fn success() -> Result<()> {
let start_sample = 0;
let end_sample = 5000;
let size = end_sample - start_sample; // Number of samples
let patch = 500;
let guess_range = 256; // 2**(key length)
let folder = String::from("../data/log_584012"); // "../../../intenship/scripts/log_584012"
let nfiles = 13; // Number of files in the directory. TBD: Automating this value
let rank_traces: usize = 1000;
let rank_traces = 1000;

let mut cpa = Cpa::new(size, patch, guess_range, leakage_model);
cpa.success_traces(rank_traces);
for i in (0..nfiles).progress() {
let dir_l = format!("{folder}/l/{i}.npy");
let dir_p = format!("{folder}/p/{i}.npy");
let leakages: Array2<FormatTraces> = read_array_2_from_npy_file::<FormatTraces>(&dir_l);
let plaintext: Array2<FormatMetadata> =
read_array_2_from_npy_file::<FormatMetadata>(&dir_p);
let leakages = read_array2_from_npy_file::<FormatTraces>(&dir_l)?;
let plaintext = read_array2_from_npy_file::<FormatMetadata>(&dir_p)?;
let len_leakages = leakages.shape()[0];
for row in (0..len_leakages).step_by(patch) {
let range_samples = start_sample..end_sample;
Expand All @@ -84,14 +89,20 @@ fn success() {
cpa.update_success(sample_traces, sample_metadata);
}
}

cpa.finalize();
println!("Guessed key = {}", cpa.pass_guess());

// save corr key curves in npy
save_array2("results/success.npy", cpa.pass_rank().view());
save_array2("results/success.npy", cpa.pass_rank().view())?;

Ok(())
}

fn main() {
fn main() -> Result<()> {
let t = time::Instant::now();
cpa();
cpa()?;
println!("{:?}", t.elapsed());

Ok(())
}
30 changes: 16 additions & 14 deletions examples/cpa_partioned.rs
Original file line number Diff line number Diff line change
@@ -1,11 +1,9 @@
// use simple_bar::ProgressBar;
use anyhow::Result;
use indicatif::ProgressIterator;
use muscat::cpa::*;
use muscat::leakage::{hw, sbox};
use muscat::util::{progress_bar, read_array_2_from_npy_file, save_array};
use ndarray::*;
use muscat::util::{progress_bar, read_array2_from_npy_file, save_array};
use rayon::prelude::{ParallelBridge, ParallelIterator};
use std::time::Instant;

// traces format
type FormatTraces = i16;
Expand All @@ -17,28 +15,26 @@ pub fn leakage_model(value: usize, guess: usize) -> usize {
}

// multi-threading cpa
fn cpa() {
let size: usize = 5000; // Number of samples
fn cpa() -> Result<()> {
let size = 5000; // Number of samples
let guess_range = 256; // 2**(key length)
let target_byte = 1;
let folder = String::from("../../data"); // Directory of leakages and metadata
let nfiles = 5; // Number of files in the directory. TBD: Automating this value

/* Parallel operation using multi-threading on patches */
let mut cpa = (0..nfiles)
.into_iter()
.progress_with(progress_bar(nfiles))
.map(|n| {
let dir_l = format!("{folder}/l{n}.npy");
let dir_p = format!("{folder}/p{n}.npy");
let leakages: Array2<FormatTraces> = read_array_2_from_npy_file(&dir_l);
let plaintext: Array2<FormatMetadata> = read_array_2_from_npy_file(&dir_p);
let leakages = read_array2_from_npy_file::<FormatTraces>(&dir_l).unwrap();
let plaintext = read_array2_from_npy_file::<FormatMetadata>(&dir_p).unwrap();
(leakages, plaintext)
})
.into_iter()
.par_bridge()
.map(|patch| {
let mut c: Cpa = Cpa::new(size, guess_range, target_byte, leakage_model);
let mut c = Cpa::new(size, guess_range, target_byte, leakage_model);
let len_leakage = patch.0.shape()[0];
for i in 0..len_leakage {
c.update(
Expand All @@ -52,12 +48,18 @@ fn cpa() {
|| Cpa::new(size, guess_range, target_byte, leakage_model),
|a: Cpa, b| a + b,
);

cpa.finalize();
println!("Guessed key = {}", cpa.pass_guess());

// save corr key curves in npy
save_array("../results/corr.npy", &cpa.pass_corr_array());
save_array("../results/corr.npy", &cpa.pass_corr_array())?;

Ok(())
}

fn main() {
cpa();
fn main() -> Result<()> {
cpa()?;

Ok(())
}
24 changes: 14 additions & 10 deletions examples/rank.rs
Original file line number Diff line number Diff line change
@@ -1,10 +1,10 @@
use anyhow::Result;
use indicatif::ProgressIterator;
use muscat::cpa::*;
use muscat::leakage::{hw, sbox};
use muscat::util::{progress_bar, read_array_2_from_npy_file, save_array};
use muscat::util::{progress_bar, read_array2_from_npy_file, save_array};
use ndarray::*;
use rayon::prelude::{ParallelBridge, ParallelIterator};
use std::time::Instant;

// traces format
type FormatTraces = i16;
Expand All @@ -15,8 +15,8 @@ pub fn leakage_model(value: usize, guess: usize) -> usize {
hw(sbox((value ^ guess) as u8) as usize)
}

fn rank() {
let size: usize = 5000; // Number of samples
fn rank() -> Result<()> {
let size = 5000; // Number of samples
let guess_range = 256; // 2**(key length)
let target_byte = 1;
let folder = String::from("../../data");
Expand All @@ -26,8 +26,8 @@ fn rank() {
for file in (0..nfiles).progress_with(progress_bar(nfiles)) {
let dir_l = format!("{folder}/l{file}.npy");
let dir_p = format!("{folder}/p{file}.npy");
let leakages: Array2<FormatTraces> = read_array_2_from_npy_file(&dir_l);
let plaintext: Array2<FormatMetadata> = read_array_2_from_npy_file(&dir_p);
let leakages = read_array2_from_npy_file::<FormatTraces>(&dir_l)?;
let plaintext = read_array2_from_npy_file::<FormatMetadata>(&dir_p)?;
let len_file = leakages.shape()[0];
for sample in (0..len_file).step_by(chunk) {
let l_sample: ndarray::ArrayBase<
Expand All @@ -36,7 +36,6 @@ fn rank() {
> = leakages.slice(s![sample..sample + chunk, ..]);
let p_sample = plaintext.slice(s![sample..sample + chunk, ..]);
let x = (0..chunk)
.into_iter()
.par_bridge()
.fold(
|| Cpa::new(size, guess_range, target_byte, leakage_model),
Expand All @@ -56,10 +55,15 @@ fn rank() {
rank.finalize();
}
}

// save rank key curves in npy
save_array("../results/rank.npy", &rank.pass_rank());
save_array("../results/rank.npy", &rank.pass_rank())?;

Ok(())
}

fn main() {
rank();
fn main() -> Result<()> {
rank()?;

Ok(())
}
9 changes: 6 additions & 3 deletions examples/snr.rs
Original file line number Diff line number Diff line change
@@ -1,13 +1,14 @@
use anyhow::Result;
use indicatif::ProgressIterator;
use muscat::processors::Snr;
use muscat::quicklog::{BatchIter, Log};
use muscat::util::{progress_bar, save_array};
use rayon::prelude::{ParallelBridge, ParallelIterator};

fn main() {
fn main() -> Result<()> {
// Open log file
// This uses logs from the python quicklog library.
let log = Log::<i16>::new("log").unwrap();
let log = Log::<i16>::new("log")?;
let leakage_size = log.leakage_size();
let trace_count = log.len();

Expand Down Expand Up @@ -37,5 +38,7 @@ fn main() {
.reduce(|| Snr::new(leakage_size, 256), |a, b| a + b);

// Save the resulting SNR trace to a numpy file
save_array("result.npy", &result.snr());
save_array("result.npy", &result.snr())?;

Ok(())
}
38 changes: 15 additions & 23 deletions src/quicklog.rs
Original file line number Diff line number Diff line change
Expand Up @@ -4,12 +4,13 @@ use ndarray::Array1;
use npyz::{Deserialize, NpyFile};
use std::{
fs::File,
io::{BufRead, BufReader, Lines, Seek, SeekFrom},
io::{self, BufRead, BufReader, Lines, Seek, SeekFrom},
marker::PhantomData,
path::Path,
};
use thiserror::Error;

use crate::{trace::Trace, util::read_array_1_from_npy_file};
use crate::{trace::Trace, util::read_array1_from_npy_file};

/// Returns traces database directory from `TRACESDIR` environment variable, or `None` if it is not
/// defined.
Expand Down Expand Up @@ -45,25 +46,16 @@ pub fn guess_leakages_size<T: Deserialize>(path: &str) -> usize {
}

/// Parsing records log can produce to types of errors, wrapped into this single error type.
#[derive(Debug)]
#[derive(Error, Debug)]
pub enum LogError {
IoError(std::io::Error),
JsonError(serde_json::Error),
#[error("IO error")]
IoError(#[from] io::Error),
#[error("Failed to deserialize json")]
JsonError(#[from] serde_json::Error),
#[error("No records")]
NoRecords,
}

impl From<std::io::Error> for LogError {
fn from(error: std::io::Error) -> Self {
Self::IoError(error)
}
}

impl From<serde_json::Error> for LogError {
fn from(error: serde_json::Error) -> Self {
Self::JsonError(error)
}
}

/// Opens a log file and allows iterating over the records.
///
/// `T` specifies the type of the elements in the leakages.
Expand Down Expand Up @@ -178,7 +170,7 @@ impl<T: Deserialize> Record<T> {
f.seek(SeekFrom::Start(toff)).unwrap();
let buf = BufReader::new(f);
let npy = NpyFile::new(buf).unwrap();
Ok(read_array_1_from_npy_file(npy))
Ok(read_array1_from_npy_file(npy))
} else if let Some(_tid) = self.tid() {
// Trace is stored in a single file
todo!()
Expand All @@ -195,7 +187,7 @@ pub struct FileRecordIterator<T> {
}

impl<T> FileRecordIterator<T> {
pub fn new(path: &str) -> Result<Self, std::io::Error> {
pub fn new(path: &str) -> Result<Self, io::Error> {
let file = File::open(path)?;
let reader = BufReader::new(file);
Ok(Self {
Expand All @@ -218,10 +210,10 @@ impl<T> Iterator for FileRecordIterator<T> {
data: value,
phantom: PhantomData,
})),
Err(err) => Some(Err(LogError::JsonError(err))),
Err(err) => Some(Err(err.into())),
}
}
Err(err) => Some(Err(LogError::IoError(err))),
Err(err) => Some(Err(err.into())),
}
} else {
None
Expand Down Expand Up @@ -255,7 +247,7 @@ impl CachedLoader {
let toff = record.toff();
let chunk = &self.current_data.as_slice()[toff as usize..];
let npy = NpyFile::new(chunk).unwrap();
Ok(read_array_1_from_npy_file(npy))
Ok(read_array1_from_npy_file(npy))
} else {
record.load_trace()
}
Expand Down Expand Up @@ -415,5 +407,5 @@ impl<T: Deserialize, U> Iterator for BatchTraceIterator<T, U> {
pub fn array_from_bytes<T: Deserialize>(bytes: &[u8], toff: usize) -> Array1<T> {
let chunk = &bytes[toff..];
let npy = NpyFile::new(chunk).unwrap();
read_array_1_from_npy_file(npy)
read_array1_from_npy_file(npy)
}
Loading

0 comments on commit cce93a5

Please sign in to comment.