Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

feat: add some initial cargo-fuzz test for queues #13

Merged
merged 14 commits into from
Dec 21, 2023
Merged
6 changes: 6 additions & 0 deletions .vscode/settings.json
Original file line number Diff line number Diff line change
@@ -0,0 +1,6 @@
{
"rust-analyzer.linkedProjects": [
"Cargo.toml",
"fuzz/Cargo.toml"
]
}
2 changes: 2 additions & 0 deletions Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -14,9 +14,11 @@ keywords = ["no_std", "embedded", "flash", "storage"]
[dependencies]
embedded-storage = "0.3.0"
defmt = { version = "0.3", optional = true }
rand = { version = "0.8.5", optional = true }

[dev-dependencies]
rand = "0.8.5"

[features]
defmt = ["dep:defmt"]
_test = ["rand"]
2 changes: 1 addition & 1 deletion README.md
Original file line number Diff line number Diff line change
Expand Up @@ -54,7 +54,7 @@ An item is considered erased when its data CRC field is 0.
*NOTE: This means the data itself is still stored on the flash when it's considered erased.*
*Depending on your usecase, this might not be secure*

The length is a u16, so any item cannot be longer than 0xFFFF.
The length is a u16, so any item cannot be longer than 0xFFFF or `page size - the item header (aligned to word boundary) - page state (2 words)`.

### Inner workings for map

Expand Down
7 changes: 7 additions & 0 deletions fuzz.sh
Original file line number Diff line number Diff line change
@@ -0,0 +1,7 @@
#!/bin/bash

set -euxo pipefail

CPUS=32

cargo fuzz run --sanitizer none -j$CPUS queue
5 changes: 5 additions & 0 deletions fuzz/.gitignore
Original file line number Diff line number Diff line change
@@ -0,0 +1,5 @@
target
corpus
artifacts
coverage
Cargo.lock
28 changes: 28 additions & 0 deletions fuzz/Cargo.toml
Original file line number Diff line number Diff line change
@@ -0,0 +1,28 @@
[package]
name = "sequential-storage-fuzz"
version = "0.0.0"
publish = false
edition = "2021"

[package.metadata]
cargo-fuzz = true

[dependencies]
libfuzzer-sys = "0.4"
sequential-storage = { path = "..", features = ["_test"] }
arbitrary = { version = "1.2.2", features = ["derive"] }
rand = "0.8.5"
embedded-storage = "0.3.0"

# Prevent this from interfering with workspaces
[workspace]
members = ["."]

[profile.release]
debug = 1

[[bin]]
name = "queue"
path = "fuzz_targets/queue.rs"
test = false
doc = false
131 changes: 131 additions & 0 deletions fuzz/fuzz_targets/queue.rs
Original file line number Diff line number Diff line change
@@ -0,0 +1,131 @@
#![no_main]

use libfuzzer_sys::arbitrary::Arbitrary;
use libfuzzer_sys::fuzz_target;
use sequential_storage::mock_flash::{MockFlashBase, MockFlashError};
use std::collections::VecDeque;
const MAX_VALUE_SIZE: usize = u8::MAX as usize;

fuzz_target!(|data: Input| fuzz(data));

#[derive(Arbitrary, Debug)]
struct Input {
ops: Vec<Op>,
}

#[derive(Arbitrary, Debug)]
enum Op {
Push(PushOp),
PopMany(u8),
PeekMany(u8),
Peek,
Pop,
}

#[derive(Arbitrary, Debug)]
struct PushOp {
value_len: u8,
}

fn fuzz(ops: Input) {
const PAGES: usize = 4;
const WORD_SIZE: usize = 4;
const WORDS_PER_PAGE: usize = 256;

let mut flash = MockFlashBase::<PAGES, WORD_SIZE, WORDS_PER_PAGE>::default();
let flash_range = 0x000..0x1000;

let mut order = VecDeque::new();
let mut buf = [0; MAX_VALUE_SIZE + 1];

for op in ops.ops.into_iter() {
println!(
"==================================================== op: {:?}",
op,
);
match op {
Op::Push(op) => {
let val: Vec<u8> = (0..op.value_len as usize)
.map(|_| rand::random::<u8>())
.collect();

let max_fit =
sequential_storage::queue::find_max_fit(&mut flash, flash_range.clone())
.unwrap();

let result: Result<(), sequential_storage::Error<MockFlashError>> =
sequential_storage::queue::push(&mut flash, flash_range.clone(), &val, false);

if let Some(max_fit) = max_fit {
if val.len() <= max_fit {
result.unwrap();
order.push_back(val.to_vec());
} else {
assert!(result.is_err());
}
} else {
assert!(result.is_err());
}
}
Op::Pop => {
if let Some(expected) = order.pop_front() {
let result =
sequential_storage::queue::pop(&mut flash, flash_range.clone(), &mut buf);
assert!(result.is_ok());
assert_eq!(result.unwrap().unwrap(), expected);
} else {
assert!(sequential_storage::queue::pop(
&mut flash,
flash_range.clone(),
&mut buf
)
.unwrap()
.is_none());
}
}
Op::PopMany(n) => {
let mut popper =
sequential_storage::queue::pop_many(&mut flash, flash_range.clone());
for _i in 0..n {
if let Some(expected) = order.pop_front() {
let result = popper.next(&mut buf);
assert!(result.is_ok());
assert_eq!(result.unwrap().unwrap(), expected);
} else {
assert!(popper.next(&mut buf).unwrap().is_none());
}
}
}

Op::Peek => {
if let Some(expected) = order.get(0) {
let result =
sequential_storage::queue::peek(&mut flash, flash_range.clone(), &mut buf);
assert!(result.is_ok());
assert_eq!(result.unwrap().unwrap(), expected);
} else {
assert!(sequential_storage::queue::peek(
&mut flash,
flash_range.clone(),
&mut buf
)
.unwrap()
.is_none());
}
}
Op::PeekMany(n) => {
let mut peeker =
sequential_storage::queue::peek_many(&mut flash, flash_range.clone());
for i in 0..n {
if let Some(expected) = order.get(i as usize) {
let result = peeker.next(&mut buf);
assert!(result.is_ok());
assert_eq!(result.unwrap().unwrap(), expected);
} else {
assert!(peeker.next(&mut buf).unwrap().is_none());
}
}
}
}
}
}
2 changes: 1 addition & 1 deletion src/item.rs
Original file line number Diff line number Diff line change
Expand Up @@ -320,7 +320,7 @@ pub fn find_next_free_item_spot<S: NorFlash>(
Ok(None) => {
if ItemHeader::data_address::<S>(current_address)
+ round_up_to_alignment::<S>(data_length)
>= end_address
> end_address
diondokter marked this conversation as resolved.
Show resolved Hide resolved
{
// Items does not fit anymore between the current address and the end address
return Ok(None);
Expand Down
5 changes: 3 additions & 2 deletions src/lib.rs
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
#![cfg_attr(not(any(test, doctest)), no_std)]
#![cfg_attr(not(any(test, doctest, feature = "_test")), no_std)]
#![deny(missing_docs)]
#![doc = include_str!("../README.md")]

Expand All @@ -17,7 +17,8 @@ mod item;
pub mod map;
pub mod queue;

#[cfg(any(test, doctest))]
#[cfg(any(test, doctest, feature = "_test"))]
/// An in-memory flash type that can be used for mocking.
pub mod mock_flash;

/// The biggest wordsize we support.
Expand Down
14 changes: 14 additions & 0 deletions src/mock_flash.rs
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,7 @@ use embedded_storage::nor_flash::{
ErrorType, MultiwriteNorFlash, NorFlash, NorFlashError, NorFlashErrorKind, ReadNorFlash,
};

/// State of a word in the flash.
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
pub enum Writable {
/// Twice
Expand All @@ -15,14 +16,20 @@ pub enum Writable {

use Writable::*;

/// Base type for in memory flash that can be used for mocking.
#[derive(Debug, Clone)]
pub struct MockFlashBase<const PAGES: usize, const BYTES_PER_WORD: usize, const PAGE_WORDS: usize> {
writable: Vec<Writable>,
data: Vec<u8>,
/// Number of erases done.
pub erases: u32,
/// Number of reads done.
pub reads: u32,
/// Number of writes done.
pub writes: u32,
/// The chance for a bit flip to happen.
pub write_bit_flip_chance: f32,
/// Check that all write locations are writeable.
pub use_strict_write_count: bool,
}

Expand All @@ -42,6 +49,7 @@ impl<const PAGES: usize, const BYTES_PER_WORD: usize, const PAGE_WORDS: usize>

const PAGE_BYTES: usize = PAGE_WORDS * BYTES_PER_WORD;

/// Create a new flash instance.
pub fn new(write_bit_flip_chance: f32, use_strict_write_count: bool) -> Self {
Self {
writable: vec![T; Self::CAPACITY_WORDS],
Expand All @@ -54,10 +62,12 @@ impl<const PAGES: usize, const BYTES_PER_WORD: usize, const PAGE_WORDS: usize>
}
}

/// Get a reference to the underlying data.
pub fn as_bytes(&self) -> &[u8] {
&self.data
}

/// Get a mutable reference to the underlying data.
pub fn as_bytes_mut(&mut self) -> &mut [u8] {
&mut self.data
}
Expand Down Expand Up @@ -202,10 +212,14 @@ impl<const PAGES: usize, const BYTES_PER_WORD: usize, const PAGE_WORDS: usize> N
}
}

/// Errors reported by mock flash.
#[derive(Debug, Clone, PartialEq, Eq)]
pub enum MockFlashError {
/// Operation out of bounds.
OutOfBounds,
/// Offset or data not aligned.
NotAligned,
/// Location not writeable.
NotWritable(u32),
}

Expand Down
83 changes: 83 additions & 0 deletions src/queue.rs
Original file line number Diff line number Diff line change
Expand Up @@ -403,6 +403,89 @@ impl<'d, S: MultiwriteNorFlash> QueueIterator<'d, S> {
}
}

/// Find the largest size of data that can be stored.
///
/// This will read through the entire flash to find the largest chunk of
/// data that can be stored, taking alignment requirements of the item into account.
///
/// If there is no space left, `None` is returned.
pub fn find_max_fit<S: NorFlash>(
lulf marked this conversation as resolved.
Show resolved Hide resolved
flash: &mut S,
flash_range: Range<u32>,
) -> Result<Option<usize>, Error<S::Error>> {
assert_eq!(flash_range.start % S::ERASE_SIZE as u32, 0);
assert_eq!(flash_range.end % S::ERASE_SIZE as u32, 0);

assert!(S::ERASE_SIZE >= S::WORD_SIZE * 4);
assert!(S::WORD_SIZE <= MAX_WORD_SIZE);

let current_page = find_youngest_page(flash, flash_range.clone())?;

// Check if we have space on the next page
let next_page = next_page::<S>(flash_range.clone(), current_page);
match get_page_state(flash, flash_range.clone(), next_page)? {
PageState::Closed => {
let next_page_data_start_address =
calculate_page_address::<S>(flash_range.clone(), next_page) + S::WORD_SIZE as u32;
let next_page_data_end_address =
calculate_page_end_address::<S>(flash_range.clone(), next_page)
- S::WORD_SIZE as u32;

let next_page_empty = read_item_headers(
flash,
next_page_data_start_address,
next_page_data_end_address,
|_, header, _| match header.crc {
Some(_) => ControlFlow::Break(()),
None => ControlFlow::Continue(()),
},
)?
.is_none();
if next_page_empty {
return Ok(Some(S::ERASE_SIZE - (2 * S::WORD_SIZE)));
}
}
PageState::Open => {
return Ok(Some(S::ERASE_SIZE - (2 * S::WORD_SIZE)));
}
PageState::PartialOpen => {
// This should never happen
return Err(Error::Corrupted);
}
};

// See how much space we can ind in the current page.
let mut max_free: Option<usize> = None;
let page_data_start_address =
calculate_page_address::<S>(flash_range.clone(), current_page) + S::WORD_SIZE as u32;
let page_data_end_address =
calculate_page_end_address::<S>(flash_range.clone(), current_page) - S::WORD_SIZE as u32;

let mut current_address = page_data_start_address;
let end_address = page_data_end_address;

while current_address < end_address {
let result = ItemHeader::read_new(flash, current_address, end_address)?;
match result {
Some(header) => current_address = header.next_item_address::<S>(current_address),
None => {
let data_address =
round_up_to_alignment_usize::<S>(current_address as usize + ItemHeader::LENGTH);
if data_address <= end_address as usize {
let free = round_down_to_alignment_usize::<S>(
end_address as usize - data_address as usize,
);
max_free = max_free.map(|current| current.max(free)).or(Some(free));
}

break;
}
}
}

Ok(max_free)
}

fn find_youngest_page<S: NorFlash>(
flash: &mut S,
flash_range: Range<u32>,
Expand Down