From 0a22680f3e9f73d1af4bab0a7150d7d05eb9dd26 Mon Sep 17 00:00:00 2001 From: Brandon Vrooman Date: Tue, 16 Jan 2024 19:18:13 -0500 Subject: [PATCH 01/15] Initial POC --- fuel-merkle/src/common.rs | 11 + fuel-merkle/src/common/node.rs | 29 +- fuel-merkle/src/sparse.rs | 5 + fuel-merkle/src/sparse/hash_generic.rs | 34 + fuel-merkle/src/sparse/merkle_tree_generic.rs | 1365 +++++++++++++++++ fuel-merkle/src/sparse/node_generic.rs | 824 ++++++++++ fuel-merkle/src/sparse/primitive_generic.rs | 77 + 7 files changed, 2333 insertions(+), 12 deletions(-) create mode 100644 fuel-merkle/src/sparse/hash_generic.rs create mode 100644 fuel-merkle/src/sparse/merkle_tree_generic.rs create mode 100644 fuel-merkle/src/sparse/node_generic.rs create mode 100644 fuel-merkle/src/sparse/primitive_generic.rs diff --git a/fuel-merkle/src/common.rs b/fuel-merkle/src/common.rs index e0514e8dfe..0176cea398 100644 --- a/fuel-merkle/src/common.rs +++ b/fuel-merkle/src/common.rs @@ -31,6 +31,17 @@ pub type Bytes4 = [u8; 4]; pub type Bytes8 = [u8; 8]; pub type Bytes16 = [u8; 16]; pub type Bytes32 = [u8; 32]; +pub type Bytes = [u8; N]; + +pub trait Zero { + fn zero() -> Self; +} + +impl Zero for [u8; N] { + fn zero() -> Self { + [0u8; N] + } +} use alloc::vec::Vec; pub type ProofSet = Vec; diff --git a/fuel-merkle/src/common/node.rs b/fuel-merkle/src/common/node.rs index 645007cf30..e47ab80430 100644 --- a/fuel-merkle/src/common/node.rs +++ b/fuel-merkle/src/common/node.rs @@ -1,7 +1,4 @@ -use crate::common::{ - Bytes32, - Bytes8, -}; +use crate::common::Bytes; use alloc::string::String; use core::{ @@ -61,15 +58,23 @@ where } } -impl KeyFormatting for Bytes8 { - type PrettyType = u64; - - fn pretty(&self) -> Self::PrettyType { - u64::from_be_bytes(*self) - } -} +// impl KeyFormatting for Bytes8 { +// type PrettyType = u64; +// +// fn pretty(&self) -> Self::PrettyType { +// u64::from_be_bytes(*self) +// } +// } +// +// impl KeyFormatting for Bytes32 { +// type PrettyType = String; +// +// fn pretty(&self) -> Self::PrettyType { +// hex::encode(self) +// } +// } -impl KeyFormatting for Bytes32 { +impl KeyFormatting for Bytes { type PrettyType = String; fn pretty(&self) -> Self::PrettyType { diff --git a/fuel-merkle/src/sparse.rs b/fuel-merkle/src/sparse.rs index 792a5665d7..5dc2e83480 100644 --- a/fuel-merkle/src/sparse.rs +++ b/fuel-merkle/src/sparse.rs @@ -19,6 +19,11 @@ pub use merkle_tree::{ pub use primitive::Primitive; pub mod in_memory; +mod hash_generic; +mod merkle_tree_generic; +mod node_generic; +mod primitive_generic; + use crate::common::Bytes32; pub const fn empty_sum() -> &'static Bytes32 { diff --git a/fuel-merkle/src/sparse/hash_generic.rs b/fuel-merkle/src/sparse/hash_generic.rs new file mode 100644 index 0000000000..8ad7301f04 --- /dev/null +++ b/fuel-merkle/src/sparse/hash_generic.rs @@ -0,0 +1,34 @@ +use crate::common::{ + Bytes, + Zero, +}; +use std::{ + convert::TryInto, + sync::OnceLock, +}; + +use digest::Digest; +use sha2::Sha256; + +pub(crate) type Hash = Sha256; + +// pub fn zero_sum() -> &'static T { +// static COMPUTATION: OnceLock = OnceLock::new(); +// COMPUTATION.get_or_init(|| T::zero()) +// } + +pub fn zero_sum() -> [u8; N] { + [0u8; N] +} + +pub fn sum(data: I) -> Bytes +where + I: AsRef<[u8]>, +{ + let mut hash = crate::sparse::hash::Hash::new(); + hash.update(data); + let h = hash.finalize(); + let mut vec = h.as_slice().to_vec(); + vec.truncate(N); + vec.try_into().unwrap() +} diff --git a/fuel-merkle/src/sparse/merkle_tree_generic.rs b/fuel-merkle/src/sparse/merkle_tree_generic.rs new file mode 100644 index 0000000000..f333889160 --- /dev/null +++ b/fuel-merkle/src/sparse/merkle_tree_generic.rs @@ -0,0 +1,1365 @@ +use crate::{ + common::{ + error::DeserializeError, + node::ChildError, + AsPathIterator, + }, + sparse::{ + node_generic::{ + Node, + StorageNode, + StorageNodeError, + }, + primitive_generic::Primitive, + }, + storage::{ + Mappable, + StorageInspect, + StorageMutate, + }, +}; + +use crate::common::{ + Bytes, + Bytes32, +}; +use alloc::vec::Vec; +use core::{ + cmp, + iter, + marker::PhantomData, +}; + +fn truncate(bytes: &[u8]) -> Bytes { + (&bytes[0..N]).clone().try_into().unwrap() +} + +#[derive(Debug, Clone, derive_more::Display)] +pub enum MerkleTreeError { + #[display( + fmt = "cannot load node with key {}; the key is not found in storage", + "hex::encode(_0)" + )] + LoadError(Bytes), + + #[display(fmt = "{}", _0)] + StorageError(StorageError), + + #[display(fmt = "{}", _0)] + DeserializeError(DeserializeError), + + #[display(fmt = "{}", _0)] + ChildError(ChildError, StorageNodeError>), +} + +impl From + for MerkleTreeError +{ + fn from(err: StorageError) -> MerkleTreeError { + MerkleTreeError::StorageError(err) + } +} + +/// The safe Merkle tree storage key prevents Merkle tree structure manipulations. +/// The type contains only one constructor that hashes the storage key. +#[derive(Debug, Clone, Copy)] +pub struct MerkleTreeKey(Bytes); + +impl MerkleTreeKey { + /// The safe way to create a `Self`. It hashes the `storage_key`, making + /// it entirely random and preventing SMT structure manipulation. + pub fn new(storage_key: B) -> Self + where + B: AsRef<[u8]>, + { + use digest::Digest; + let mut hash = sha2::Sha256::new(); + hash.update(storage_key.as_ref()); + let hash: Bytes32 = hash + .finalize() + .try_into() + .expect("`sha2::Sha256` can't fail during hashing"); + let truncated = truncate::(&hash); + Self(truncated) + } + + /// Unsafe analog to create a `Self` that doesn't hash the `storage_key` unlike + /// `Self::new`. + /// + /// # Safety + /// + /// It is safe to use this method if you know that `storage_key` + /// was randomly generated like `ContractId` or `AssetId`. + pub unsafe fn convert(storage_key: B) -> Self + where + B: Into>, + { + Self(storage_key.into()) + } + + #[cfg(any(test, feature = "test-helpers"))] + pub fn new_without_hash(storage_key: B) -> Self + where + B: Into>, + { + unsafe { Self::convert(storage_key) } + } +} + +impl From> for Bytes { + fn from(value: MerkleTreeKey) -> Self { + value.0 + } +} + +#[derive(Debug)] +pub struct MerkleTree { + root_node: Node, + storage: StorageType, + phantom_table: PhantomData, +} + +impl + MerkleTree +{ + pub fn empty_root() -> Bytes { + vec![0; KeySize].try_into().unwrap() + } + + pub fn root(&self) -> Bytes { + self.root_node().hash() + } + + pub fn into_storage(self) -> StorageType { + self.storage + } + + pub fn storage(&self) -> &StorageType { + &self.storage + } + + // PRIVATE + + fn root_node(&self) -> &Node { + &self.root_node + } + + fn set_root_node(&mut self, node: Node) { + debug_assert!(node.is_leaf() || node.height() == Node::::max_height()); + self.root_node = node; + } +} + +impl + MerkleTree +where + TableType: Mappable< + Key = Bytes, + Value = Primitive, + OwnedValue = Primitive, + >, + StorageType: StorageInspect, +{ + pub fn new(storage: StorageType) -> Self { + Self { + root_node: Node::create_placeholder(), + storage, + phantom_table: Default::default(), + } + } + + pub fn load( + storage: StorageType, + root: &Bytes, + ) -> Result> { + if *root == Self::empty_root() { + let tree = Self::new(storage); + Ok(tree) + } else { + let primitive = storage + .get(root)? + .ok_or_else(|| MerkleTreeError::LoadError(*root))? + .into_owned(); + let tree = Self { + root_node: primitive + .try_into() + .map_err(MerkleTreeError::DeserializeError)?, + storage, + phantom_table: Default::default(), + }; + Ok(tree) + } + } + + // PRIVATE + + fn path_set( + &self, + leaf_key: Bytes, + ) -> Result< + (Vec>, Vec>), + MerkleTreeError, + > { + let root_node = self.root_node().clone(); + let root_storage_node = StorageNode::new(&self.storage, root_node); + let (mut path_nodes, mut side_nodes): (Vec>, Vec>) = + root_storage_node + .as_path_iter(leaf_key) + .map(|(path_node, side_node)| { + Ok(( + path_node.map_err(MerkleTreeError::ChildError)?.into_node(), + side_node.map_err(MerkleTreeError::ChildError)?.into_node(), + )) + }) + .collect::, MerkleTreeError>>()? + .into_iter() + .unzip(); + path_nodes.reverse(); + side_nodes.reverse(); + side_nodes.pop(); // The last element in the side nodes list is the + // root; remove it. + + Ok((path_nodes, side_nodes)) + } +} + +impl + MerkleTree +where + TableType: Mappable< + Key = Bytes, + Value = Primitive, + OwnedValue = Primitive, + >, + StorageType: StorageMutate, +{ + // /// Build a sparse Merkle tree from a set of key-value pairs. This is + // /// equivalent to creating an empty sparse Merkle tree and sequentially + // /// calling [update](Self::update) for each key-value pair. This constructor + // /// is more performant than calling individual sequential updates and is the + // /// preferred approach when the key-values are known upfront. Leaves can be + // /// appended to the returned tree using `update` to further accumulate leaf + // /// data. + // pub fn from_set( + // mut storage: StorageType, + // set: I, + // ) -> Result + // where + // I: Iterator, + // B: Into, + // D: AsRef<[u8]>, + // { + // let sorted = set + // .into_iter() + // .map(|(k, v)| (k.into(), v)) + // .collect::>(); + // let mut branches = sorted + // .iter() + // .filter(|(_, value)| !value.as_ref().is_empty()) + // .map(|(key, data)| Node::create_leaf(key, data)) + // .map(Into::::into) + // .collect::>(); + // + // for branch in branches.iter() { + // let leaf = &branch.node; + // storage.insert(leaf.hash(), &leaf.as_ref().into())?; + // } + // + // if branches.is_empty() { + // let tree = Self::new(storage); + // return Ok(tree) + // } + // + // if branches.len() == 1 { + // let leaf = branches.pop().expect("Expected at least 1 leaf").node; + // let mut tree = Self::new(storage); + // tree.set_root_node(leaf); + // return Ok(tree) + // } + // + // let mut nodes = Vec::::with_capacity(branches.len()); + // let mut proximities = Vec::::with_capacity(branches.len()); + // + // // Building the tree starts by merging all leaf nodes where possible. + // // Given a set of leaf nodes sorted left to right (i.e., keys are sorted + // // in lexical order), we scan the leaf set right to left, and analyze a + // // moving window of three leaves: a center (or "current") leaf, its left + // // neighbor, and its right neighbor. + // // + // // When merging leaf nodes, we analyze this three-node window to + // // determine if the condition for merging is met: When the current node + // // is closer to its right neighbor than it is to its left neighbor, we + // // merge the current node with its right neighbor. The merged node then + // // becomes the center of the window, and we must check the merge + // // condition again. We calculate proximity using the common path length + // // between two nodes, which is also the depth of their shared ancestor + // // in the tree. + // // + // // This three-node window is centered around a current node, and moves + // // leftward: At the next iteration, the current node is now the right + // // node, the left node is now the current node, and so on. When we have + // // checked all windows, we know that we have merged all leaf nodes where + // // possible. + // while let Some(left) = branches.pop() { + // if let Some(current) = nodes.last() { + // let left_proximity = current.node.common_path_length(&left.node); + // while { + // // The current node's proximity to its right neighbor was + // // stored previously. We now compare the distances between + // // the current node's left and right neighbors. If, and only + // // if, the current node is closer to its right neighbor, we + // // merge these nodes to form an ancestor node. We then + // // reform the window, using the ancestor node in the center, + // // to check if we must merge again. + // // + // // If the current node is closer to its left, we do not have + // // enough information to merge nodes, and we must continue + // // scanning the leaf set leftwards to find a configuration + // // that satisfies the merge condition. + // if let Some(right_proximity) = proximities.last() { + // *right_proximity > left_proximity + // } else { + // false + // } + // } { + // // The current node is closer to its right neighbor than its + // // left neighbor. We now merge the current node with its + // // right neighbor. + // let current = + // nodes.pop().expect("Expected current node to be present"); + // let right = nodes.pop().expect("Expected right node to be + // present"); let merged = merge_branches(&mut storage, + // current, right)?; nodes.push(merged); + // + // // Now that the current node and its right neighbour are + // // merged, the distance between them has collapsed and their + // // proximity is no longer needed. + // proximities.pop(); + // } + // proximities.push(left_proximity); + // } + // nodes.push(left); + // } + // + // // Where possible, all the leaves have been merged. The remaining leaves + // // and nodes are stacked in order of height descending. This means that + // // they are also ordered with the leftmost leaves at the top and the + // // rightmost nodes at the bottom. We can iterate through the stack and + // // merge them left to right. + // let top = { + // let mut node = nodes + // .pop() + // .expect("Nodes stack must have at least 1 element"); + // while let Some(next) = nodes.pop() { + // node = merge_branches(&mut storage, node, next)?; + // } + // node + // }; + // + // // Lastly, all leaves and nodes are merged into one. The resulting node + // // may still be an ancestor node below the root. To calculate the final + // // root, we merge placeholder nodes along the path until the resulting + // // node has the final height and forms the root node. + // let mut node = top.node; + // let path = top.bits; + // let height = node.height(); + // let depth = Node::max_height() - height; + // let placeholders = iter::repeat(Node::create_placeholder()).take(depth as + // usize); for placeholder in placeholders { + // node = Node::create_node_on_path(&path, &node, &placeholder); + // storage.insert(node.hash(), &node.as_ref().into())?; + // } + // + // let tree = Self { + // root_node: node, + // storage, + // phantom_table: Default::default(), + // }; + // Ok(tree) + // } + + pub fn update( + &mut self, + key: MerkleTreeKey, + data: &[u8], + ) -> Result<(), MerkleTreeError> { + if data.is_empty() { + // If the data is empty, this signifies a delete operation for the + // given key. + self.delete(key)?; + return Ok(()) + } + + let key = key.into(); + let leaf_node = Node::create_leaf(&key, data); + self.storage + .insert(&leaf_node.hash(), &leaf_node.as_ref().into())?; + + if self.root_node().is_placeholder() { + self.set_root_node(leaf_node); + } else { + let (path_nodes, side_nodes) = self.path_set(key)?; + self.update_with_path_set( + &leaf_node, + path_nodes.as_slice(), + side_nodes.as_slice(), + )?; + } + + Ok(()) + } + + pub fn delete( + &mut self, + key: MerkleTreeKey, + ) -> Result<(), MerkleTreeError> { + if self.root() == Self::empty_root() { + // The zero root signifies that all leaves are empty, including the + // given key. + return Ok(()) + } + + let key = key.into(); + let (path_nodes, side_nodes): (Vec>, Vec>) = + self.path_set(key)?; + + match path_nodes.get(0) { + Some(node) if node.leaf_key() == key => { + self.delete_with_path_set( + &key, + path_nodes.as_slice(), + side_nodes.as_slice(), + )?; + } + _ => {} + }; + + Ok(()) + } + + // PRIVATE + + fn update_with_path_set( + &mut self, + requested_leaf_node: &Node, + path_nodes: &[Node], + side_nodes: &[Node], + ) -> Result<(), StorageError> { + let path = requested_leaf_node.leaf_key(); + let actual_leaf_node = &path_nodes[0]; + + if requested_leaf_node == actual_leaf_node { + return Ok(()) + } + + // Build the tree upwards starting with the requested leaf node. + let mut current_node = requested_leaf_node.clone(); + + // If we are creating a new leaf node, the corresponding side node will + // be the first node in the path set. The side node will be the leaf + // node currently closest to the requested new leaf node. When creating + // a new leaf node, we must merge the leaf node with its corresponding + // side node to create a common ancestor. We then continue building the + // tree upwards from this ancestor node. This may require creating new + // placeholder side nodes, in addition to the existing side node set. + // + // If we are updating an existing leaf node, the leaf node we are + // updating is the first node in the path set. The side node set will + // already include all the side nodes needed to build up the tree from + // the requested leaf node, since these side nodes were already built + // during the creation of the leaf node. + // + // We can determine if we are updating an existing leaf node, or if we + // are creating a new leaf node, by comparing the paths of the requested + // leaf node and the leaf node at the start of the path set. When the + // paths are equal, it means the leaf nodes occupy the same location, + // and we are updating an existing leaf. Otherwise, it means we are + // adding a new leaf node. + if requested_leaf_node.leaf_key() != actual_leaf_node.leaf_key() { + // Merge leaves + if !actual_leaf_node.is_placeholder() { + current_node = + Node::create_node_on_path(&path, ¤t_node, actual_leaf_node); + self.storage + .insert(¤t_node.hash(), ¤t_node.as_ref().into())?; + } + + // Merge placeholders + let ancestor_depth = requested_leaf_node.common_path_length(actual_leaf_node); + let stale_depth = cmp::max(side_nodes.len(), ancestor_depth as usize); + let placeholders_count = stale_depth - side_nodes.len(); + let placeholders = + iter::repeat(Node::create_placeholder()).take(placeholders_count); + for placeholder in placeholders { + current_node = + Node::create_node_on_path(&path, ¤t_node, &placeholder); + self.storage + .insert(¤t_node.hash(), ¤t_node.as_ref().into())?; + } + } else { + self.storage.remove(&actual_leaf_node.hash())?; + } + + // Merge side nodes + for side_node in side_nodes { + current_node = Node::create_node_on_path(&path, ¤t_node, side_node); + self.storage + .insert(¤t_node.hash(), ¤t_node.as_ref().into())?; + } + + for node in path_nodes.iter().skip(1 /* leaf */) { + self.storage.remove(&node.hash())?; + } + + self.set_root_node(current_node); + + Ok(()) + } + + fn delete_with_path_set( + &mut self, + requested_leaf_key: &Bytes, + path_nodes: &[Node], + side_nodes: &[Node], + ) -> Result<(), StorageError> { + for node in path_nodes { + self.storage.remove(&node.hash())?; + } + + let path = requested_leaf_key; + let mut side_nodes_iter = side_nodes.iter(); + + // The deleted leaf is replaced by a placeholder. Build the tree upwards + // starting with the placeholder. + let mut current_node = Node::create_placeholder(); + + // If the first side node is a leaf, it means the ancestor node is now + // parent to a placeholder (the deleted leaf node) and a leaf node (the + // first side node). We can immediately discard the ancestor node from + // further calculation and attach the orphaned leaf node to its next + // ancestor. Any subsequent ancestor nodes composed of this leaf node + // and a placeholder must be similarly discarded from further + // calculation. We then create a valid ancestor node for the orphaned + // leaf node by joining it with the earliest non-placeholder side node. + if let Some(first_side_node) = side_nodes.first() { + if first_side_node.is_leaf() { + side_nodes_iter.next(); + current_node = first_side_node.clone(); + + // Advance the side node iterator to the next non-placeholder + // node. This may be either another leaf node or an internal + // node. If only placeholder nodes exist beyond the first leaf + // node, then that leaf node is, in fact, the new root node. + // + // Using `find(..)` advances the iterator beyond the next + // non-placeholder side node and returns it. Therefore, we must + // consume the side node at this point. If another non- + // placeholder node was found in the side node collection, merge + // it with the first side node. This guarantees that the current + // node will be an internal node, and not a leaf, by the time we + // start merging the remaining side nodes. + // See https://doc.rust-lang.org/std/iter/trait.Iterator.html#method.find. + if let Some(side_node) = + side_nodes_iter.find(|side_node| !side_node.is_placeholder()) + { + current_node = + Node::create_node_on_path(path, ¤t_node, side_node); + self.storage + .insert(¤t_node.hash(), ¤t_node.as_ref().into())?; + } + } + } + + // Merge side nodes + for side_node in side_nodes_iter { + current_node = Node::create_node_on_path(path, ¤t_node, side_node); + self.storage + .insert(¤t_node.hash(), ¤t_node.as_ref().into())?; + } + + self.set_root_node(current_node); + + Ok(()) + } +} + +#[cfg(test)] +mod test { + use super::{ + MerkleTree, + MerkleTreeError, + MerkleTreeKey, + Node, + Primitive, + }; + use crate::{ + common::{ + Bytes, + StorageMap, + }, + sparse::hash_generic::{ + sum, + zero_sum, + }, + }; + use fuel_storage::Mappable; + use hex; + + // fn random_bytes(n: usize, rng: &mut R) -> Bytes32 + // where + // R: rand::Rng + ?Sized, + // { + // let mut bytes = vec![0u8; n]; + // rng.fill(&mut bytes); + // bytes.try_into().unwrap() + // } + + #[derive(Debug)] + struct TestTable; + + impl Mappable for TestTable { + type Key = Self::OwnedKey; + type OwnedKey = Bytes<4>; + type OwnedValue = Primitive<4>; + type Value = Self::OwnedValue; + } + + fn key>(data: B) -> MerkleTreeKey { + MerkleTreeKey::new_without_hash(sum(data.as_ref())) + } + + #[test] + fn test_empty_root() { + let mut storage = StorageMap::::new(); + let tree = MerkleTree::new(&mut storage); + let root = tree.root(); + let expected_root = "00000000"; + assert_eq!(hex::encode(root), expected_root); + } + + #[test] + fn test_update_1() { + let mut storage = StorageMap::::new(); + let mut tree = MerkleTree::new(&mut storage); + + tree.update(key(b"\x00\x00\x00\x00"), b"DATA").unwrap(); + + let root = tree.root(); + let expected_root = + "39f36a7cb4dfb1b46f03d044265df6a491dffc1034121bc1071a34ddce9bb14b"; + assert_eq!(hex::encode(root), expected_root); + } + + #[test] + fn test_update_2() { + let mut storage = StorageMap::::new(); + let mut tree = MerkleTree::new(&mut storage); + + tree.update(key(b"\x00\x00\x00\x00"), b"DATA").unwrap(); + tree.update(key(b"\x00\x00\x00\x01"), b"DATA").unwrap(); + + let root = tree.root(); + let expected_root = + "8d0ae412ca9ca0afcb3217af8bcd5a673e798bd6fd1dfacad17711e883f494cb"; + assert_eq!(hex::encode(root), expected_root); + } + + #[test] + fn test_update_3() { + let mut storage = StorageMap::::new(); + let mut tree = MerkleTree::new(&mut storage); + + tree.update(key(b"\x00\x00\x00\x00"), b"DATA").unwrap(); + tree.update(key(b"\x00\x00\x00\x01"), b"DATA").unwrap(); + tree.update(key(b"\x00\x00\x00\x02"), b"DATA").unwrap(); + + let root = tree.root(); + let expected_root = + "52295e42d8de2505fdc0cc825ff9fead419cbcf540d8b30c7c4b9c9b94c268b7"; + assert_eq!(hex::encode(root), expected_root); + } + + #[test] + fn test_update_5() { + let mut storage = StorageMap::::new(); + let mut tree = MerkleTree::new(&mut storage); + + tree.update(key(b"\x00\x00\x00\x00"), b"DATA").unwrap(); + tree.update(key(b"\x00\x00\x00\x01"), b"DATA").unwrap(); + tree.update(key(b"\x00\x00\x00\x02"), b"DATA").unwrap(); + tree.update(key(b"\x00\x00\x00\x03"), b"DATA").unwrap(); + tree.update(key(b"\x00\x00\x00\x04"), b"DATA").unwrap(); + + let root = tree.root(); + let expected_root = + "108f731f2414e33ae57e584dc26bd276db07874436b2264ca6e520c658185c6b"; + assert_eq!(hex::encode(root), expected_root); + } + + #[test] + fn test_update_10() { + let mut storage = StorageMap::::new(); + let mut tree = MerkleTree::new(&mut storage); + + for i in 0_u32..10 { + let key = key(i.to_be_bytes()); + tree.update(key, b"DATA").unwrap(); + } + + let root = tree.root(); + let expected_root = + "21ca4917e99da99a61de93deaf88c400d4c082991cb95779e444d43dd13e8849"; + assert_eq!(hex::encode(root), expected_root); + } + + #[test] + fn test_update_100() { + let mut storage = StorageMap::::new(); + let mut tree = MerkleTree::new(&mut storage); + + for i in 0_u32..100 { + let key = key(i.to_be_bytes()); + tree.update(key, b"DATA").unwrap(); + } + + let root = tree.root(); + let expected_root = + "82bf747d455a55e2f7044a03536fc43f1f55d43b855e72c0110c986707a23e4d"; + assert_eq!(hex::encode(root), expected_root); + } + + #[test] + fn test_update_with_repeated_inputs() { + let mut storage = StorageMap::::new(); + let mut tree = MerkleTree::new(&mut storage); + + tree.update(key(b"\x00\x00\x00\x00"), b"DATA").unwrap(); + tree.update(key(b"\x00\x00\x00\x00"), b"DATA").unwrap(); + + let root = tree.root(); + let expected_root = + "39f36a7cb4dfb1b46f03d044265df6a491dffc1034121bc1071a34ddce9bb14b"; + assert_eq!(hex::encode(root), expected_root); + } + + #[test] + fn test_update_overwrite_key() { + let mut storage = StorageMap::::new(); + let mut tree = MerkleTree::new(&mut storage); + + tree.update(key(b"\x00\x00\x00\x00"), b"DATA").unwrap(); + tree.update(key(b"\x00\x00\x00\x00"), b"CHANGE").unwrap(); + + let root = tree.root(); + let expected_root = + "dd97174c80e5e5aa3a31c61b05e279c1495c8a07b2a08bca5dbc9fb9774f9457"; + assert_eq!(hex::encode(root), expected_root); + } + + #[test] + fn test_update_overwrite_key_2() { + let mut storage = StorageMap::::new(); + let mut tree = MerkleTree::new(&mut storage); + + for i in 0_u32..10 { + let key = key(i.to_be_bytes()); + tree.update(key, b"DATA").unwrap(); + } + + let root_hash_before = tree.root(); + + for i in 3_u32..7 { + let key = key(i.to_be_bytes()); + tree.update(key, b"DATA_2").unwrap(); + } + + for i in 3_u32..7 { + let key = key(i.to_be_bytes()); + tree.update(key, b"DATA").unwrap(); + } + + let root_hash_after = tree.root(); + + assert_eq!(root_hash_before, root_hash_after); + } + + #[test] + fn test_update_union() { + let mut storage = StorageMap::::new(); + let mut tree = MerkleTree::new(&mut storage); + + for i in 0_u32..5 { + let key = key(i.to_be_bytes()); + tree.update(key, b"DATA").unwrap(); + } + + for i in 10_u32..15 { + let key = key(i.to_be_bytes()); + tree.update(key, b"DATA").unwrap(); + } + + for i in 20_u32..25 { + let key = key(i.to_be_bytes()); + tree.update(key, b"DATA").unwrap(); + } + + let root = tree.root(); + let expected_root = + "7e6643325042cfe0fc76626c043b97062af51c7e9fc56665f12b479034bce326"; + assert_eq!(hex::encode(root), expected_root); + } + + #[test] + fn test_update_sparse_union() { + let mut storage = StorageMap::::new(); + let mut tree = MerkleTree::new(&mut storage); + + tree.update(key(b"\x00\x00\x00\x00"), b"DATA").unwrap(); + tree.update(key(b"\x00\x00\x00\x02"), b"DATA").unwrap(); + tree.update(key(b"\x00\x00\x00\x04"), b"DATA").unwrap(); + tree.update(key(b"\x00\x00\x00\x06"), b"DATA").unwrap(); + tree.update(key(b"\x00\x00\x00\x08"), b"DATA").unwrap(); + + let root = tree.root(); + let expected_root = + "e912e97abc67707b2e6027338292943b53d01a7fbd7b244674128c7e468dd696"; + assert_eq!(hex::encode(root), expected_root); + } + + #[test] + fn test_update_with_empty_data_does_not_change_root() { + let mut storage = StorageMap::::new(); + let mut tree = MerkleTree::new(&mut storage); + + tree.update(key(b"\x00\x00\x00\x00"), b"").unwrap(); + + let root = tree.root(); + let expected_root = + "0000000000000000000000000000000000000000000000000000000000000000"; + assert_eq!(hex::encode(root), expected_root); + } + + #[test] + fn test_update_with_empty_data_performs_delete() { + let mut storage = StorageMap::::new(); + let mut tree = MerkleTree::new(&mut storage); + + tree.update(key(b"\x00\x00\x00\x00"), b"DATA").unwrap(); + tree.update(key(b"\x00\x00\x00\x00"), b"").unwrap(); + + let root = tree.root(); + let expected_root = + "0000000000000000000000000000000000000000000000000000000000000000"; + assert_eq!(hex::encode(root), expected_root); + } + + #[test] + fn test_update_1_delete_1() { + let mut storage = StorageMap::::new(); + let mut tree = MerkleTree::new(&mut storage); + + tree.update(key(b"\x00\x00\x00\x00"), b"DATA").unwrap(); + tree.delete(key(b"\x00\x00\x00\x00")).unwrap(); + + let root = tree.root(); + let expected_root = + "0000000000000000000000000000000000000000000000000000000000000000"; + assert_eq!(hex::encode(root), expected_root); + } + + #[test] + fn test_update_2_delete_1() { + let mut storage = StorageMap::::new(); + let mut tree = MerkleTree::new(&mut storage); + + tree.update(key(b"\x00\x00\x00\x00"), b"DATA").unwrap(); + tree.update(key(b"\x00\x00\x00\x01"), b"DATA").unwrap(); + tree.delete(key(b"\x00\x00\x00\x01")).unwrap(); + + let root = tree.root(); + let expected_root = + "39f36a7cb4dfb1b46f03d044265df6a491dffc1034121bc1071a34ddce9bb14b"; + assert_eq!(hex::encode(root), expected_root); + } + + #[test] + fn test_update_10_delete_5() { + let mut storage = StorageMap::::new(); + let mut tree = MerkleTree::new(&mut storage); + + for i in 0_u32..10 { + let key = key(i.to_be_bytes()); + tree.update(key, b"DATA").unwrap(); + } + + for i in 5_u32..10 { + let key = key(i.to_be_bytes()); + tree.delete(key).unwrap(); + } + + let root = tree.root(); + let expected_root = + "108f731f2414e33ae57e584dc26bd276db07874436b2264ca6e520c658185c6b"; + assert_eq!(hex::encode(root), expected_root); + } + + #[test] + fn test_delete_non_existent_key() { + let mut storage = StorageMap::::new(); + let mut tree = MerkleTree::new(&mut storage); + + tree.update(key(b"\x00\x00\x00\x00"), b"DATA").unwrap(); + tree.update(key(b"\x00\x00\x00\x01"), b"DATA").unwrap(); + tree.update(key(b"\x00\x00\x00\x02"), b"DATA").unwrap(); + tree.update(key(b"\x00\x00\x00\x03"), b"DATA").unwrap(); + tree.update(key(b"\x00\x00\x00\x04"), b"DATA").unwrap(); + tree.delete(key(b"\x00\x00\x04\x00")).unwrap(); + + let root = tree.root(); + let expected_root = + "108f731f2414e33ae57e584dc26bd276db07874436b2264ca6e520c658185c6b"; + assert_eq!(hex::encode(root), expected_root); + } + + #[test] + fn test_interleaved_update_delete() { + let mut storage = StorageMap::::new(); + let mut tree = MerkleTree::new(&mut storage); + + for i in 0_u32..10 { + let key = key(i.to_be_bytes()); + tree.update(key, b"DATA").unwrap(); + } + + for i in 5_u32..15 { + let key = key(i.to_be_bytes()); + tree.delete(key).unwrap(); + } + + for i in 10_u32..20 { + let key = key(i.to_be_bytes()); + tree.update(key, b"DATA").unwrap(); + } + + for i in 15_u32..25 { + let key = key(i.to_be_bytes()); + tree.delete(key).unwrap(); + } + + for i in 20_u32..30 { + let key = key(i.to_be_bytes()); + tree.update(key, b"DATA").unwrap(); + } + + for i in 25_u32..35 { + let key = key(i.to_be_bytes()); + tree.delete(key).unwrap(); + } + + let root = tree.root(); + let expected_root = + "7e6643325042cfe0fc76626c043b97062af51c7e9fc56665f12b479034bce326"; + assert_eq!(hex::encode(root), expected_root); + } + + #[test] + fn test_update_removes_old_entries() { + let mut storage = StorageMap::::new(); + let mut tree = MerkleTree::new(&mut storage); + let tenth_index = 9u32; + + for i in 0_u32..tenth_index { + let key = key(i.to_be_bytes()); + tree.update(key, b"DATA").unwrap(); + } + let size_before_tenth = tree.storage().len(); + let tenth_key = key(tenth_index.to_be_bytes()); + + // Given + tree.update(tenth_key, b"DATA").unwrap(); + let size_after_tenth = tree.storage().len(); + assert_ne!(size_after_tenth, size_before_tenth); + + // When + tree.update(tenth_key, b"ANOTHER_DATA").unwrap(); + + // Then + assert_eq!(tree.storage().len(), size_after_tenth); + } + + #[test] + fn test_update_with_the_same_value_does_not_remove_old_entries() { + let mut storage = StorageMap::::new(); + let mut tree = MerkleTree::new(&mut storage); + let tenth_index = 9u32; + + for i in 0_u32..tenth_index { + let key = key(i.to_be_bytes()); + tree.update(key, b"DATA").unwrap(); + } + let size_before_tenth = tree.storage().len(); + let tenth_key = key(tenth_index.to_be_bytes()); + + // Given + tree.update(tenth_key, b"DATA").unwrap(); + let size_after_tenth = tree.storage().len(); + assert_ne!(size_after_tenth, size_before_tenth); + + // When + tree.update(tenth_key, b"DATA").unwrap(); + + // Then + assert_eq!(tree.storage().len(), size_after_tenth); + } + + #[test] + fn test_delete_removes_path_entries() { + let mut storage = StorageMap::::new(); + let mut tree = MerkleTree::new(&mut storage); + let tenth_index = 9u32; + + for i in 0_u32..tenth_index { + let key = key(i.to_be_bytes()); + tree.update(key, b"DATA").unwrap(); + } + let size_before_tenth = tree.storage().len(); + let tenth_key = key(tenth_index.to_be_bytes()); + + // Given + tree.update(tenth_key, b"DATA").unwrap(); + let size_after_tenth = tree.storage().len(); + assert_ne!(size_after_tenth, size_before_tenth); + + // When + tree.delete(tenth_key).unwrap(); + + // Then + assert_eq!(tree.storage().len(), size_before_tenth); + } + + #[test] + fn test_delete_sparse_union() { + let mut storage = StorageMap::::new(); + let mut tree = MerkleTree::new(&mut storage); + + for i in 0_u32..10 { + let key = key(i.to_be_bytes()); + tree.update(key, b"DATA").unwrap(); + } + + for i in 0_u32..5 { + let key = key((i * 2 + 1).to_be_bytes()); + tree.delete(key).unwrap(); + } + + let root = tree.root(); + let expected_root = + "e912e97abc67707b2e6027338292943b53d01a7fbd7b244674128c7e468dd696"; + assert_eq!(hex::encode(root), expected_root); + } + + #[test] + fn test_override_hash_key() { + use fuel_storage::StorageInspect; + let mut storage = StorageMap::::new(); + let mut tree = MerkleTree::new(&mut storage); + + let leaf_1_key = key(b"\x00\x00\x00\x00"); + let leaf_1_data = b"DATA_1"; + let leaf_1 = Node::create_leaf(&leaf_1_key.0, leaf_1_data); + + let leaf_2_key = MerkleTreeKey::new_without_hash(leaf_1.hash()); + let leaf_2_data = b"DATA_2"; + let leaf_2 = Node::create_leaf(&leaf_2_key.0, leaf_2_data); + + tree.update(leaf_2_key, leaf_2_data).unwrap(); + tree.update(leaf_1_key, leaf_1_data).unwrap(); + assert_eq!( + tree.storage + .get(&leaf_2.hash()) + .unwrap() + .unwrap() + .into_owned(), + leaf_2.as_ref().into() + ); + assert_eq!( + tree.storage + .get(&leaf_1.hash()) + .unwrap() + .unwrap() + .into_owned(), + leaf_1.as_ref().into() + ); + } + + #[test] + fn test_load_returns_a_valid_tree() { + // Instantiate a new key-value storage backing and populate it using a sparse + // Merkle tree. The root of the Merkle tree is the key that maps to the buffer + // of the root node in the storage. When loading a Merkle tree from storage, we + // need a reference to the storage object, as well as the root that allows us to + // look up the buffer of the root node. We will later use this storage backing + // and root to load a Merkle tree. + let (mut storage_to_load, root_to_load) = { + let mut storage = StorageMap::::new(); + let mut tree = MerkleTree::new(&mut storage); + tree.update(key(b"\x00\x00\x00\x00"), b"DATA").unwrap(); + tree.update(key(b"\x00\x00\x00\x01"), b"DATA").unwrap(); + tree.update(key(b"\x00\x00\x00\x02"), b"DATA").unwrap(); + tree.update(key(b"\x00\x00\x00\x03"), b"DATA").unwrap(); + tree.update(key(b"\x00\x00\x00\x04"), b"DATA").unwrap(); + let root = tree.root(); + (storage, root) + }; + + // Generate an expected root for this test by using both the set of `update` + // data used when generating the loadable storage above and an additional set of + // `update` data. + let expected_root = { + let mut storage = StorageMap::::new(); + let mut tree = MerkleTree::new(&mut storage); + tree.update(key(b"\x00\x00\x00\x00"), b"DATA").unwrap(); + tree.update(key(b"\x00\x00\x00\x01"), b"DATA").unwrap(); + tree.update(key(b"\x00\x00\x00\x02"), b"DATA").unwrap(); + tree.update(key(b"\x00\x00\x00\x03"), b"DATA").unwrap(); + tree.update(key(b"\x00\x00\x00\x04"), b"DATA").unwrap(); + tree.update(key(b"\x00\x00\x00\x05"), b"DATA").unwrap(); + tree.update(key(b"\x00\x00\x00\x06"), b"DATA").unwrap(); + tree.update(key(b"\x00\x00\x00\x07"), b"DATA").unwrap(); + tree.update(key(b"\x00\x00\x00\x08"), b"DATA").unwrap(); + tree.update(key(b"\x00\x00\x00\x09"), b"DATA").unwrap(); + tree.root() + }; + + let root = { + // Create a Merkle tree by loading the generated storage and root. + let mut tree = MerkleTree::load(&mut storage_to_load, &root_to_load).unwrap(); + // Build up the loaded tree using the additional set of `update` data so its + // root matches the expected root. This verifies that the loaded tree has + // successfully wrapped the given storage backing and assumed the correct + // state so that future updates can be made seamlessly. + tree.update(key(b"\x00\x00\x00\x05"), b"DATA").unwrap(); + tree.update(key(b"\x00\x00\x00\x06"), b"DATA").unwrap(); + tree.update(key(b"\x00\x00\x00\x07"), b"DATA").unwrap(); + tree.update(key(b"\x00\x00\x00\x08"), b"DATA").unwrap(); + tree.update(key(b"\x00\x00\x00\x09"), b"DATA").unwrap(); + tree.root() + }; + + assert_eq!(root, expected_root); + } + + #[test] + fn test_load_returns_an_empty_tree_for_empty_sum_root() { + let mut storage = StorageMap::::new(); + let tree = MerkleTree::load(&mut storage, &zero_sum()).unwrap(); + let root = tree.root(); + + assert_eq!(root, zero_sum()); + } + + #[test] + fn test_load_returns_a_load_error_if_the_storage_is_not_valid_for_the_root() { + let mut storage = StorageMap::::new(); + + { + let mut tree = MerkleTree::new(&mut storage); + tree.update(key(b"\x00\x00\x00\x00"), b"DATA").unwrap(); + tree.update(key(b"\x00\x00\x00\x01"), b"DATA").unwrap(); + tree.update(key(b"\x00\x00\x00\x02"), b"DATA").unwrap(); + tree.update(key(b"\x00\x00\x00\x03"), b"DATA").unwrap(); + tree.update(key(b"\x00\x00\x00\x04"), b"DATA").unwrap(); + } + + let root = &sum(b"\xff\xff\xff\xff"); + let err = MerkleTree::load(&mut storage, root) + .expect_err("Expected load() to return Error; got Ok"); + assert!(matches!(err, MerkleTreeError::LoadError(_))); + } + + #[test] + fn test_load_returns_a_deserialize_error_if_the_storage_is_corrupted() { + use fuel_storage::StorageMutate; + + let mut storage = StorageMap::::new(); + + let mut tree = MerkleTree::new(&mut storage); + tree.update(key(b"\x00\x00\x00\x00"), b"DATA").unwrap(); + tree.update(key(b"\x00\x00\x00\x01"), b"DATA").unwrap(); + tree.update(key(b"\x00\x00\x00\x02"), b"DATA").unwrap(); + tree.update(key(b"\x00\x00\x00\x03"), b"DATA").unwrap(); + tree.update(key(b"\x00\x00\x00\x04"), b"DATA").unwrap(); + let root = tree.root(); + + // Overwrite the root key-value with an invalid primitive to create a + // DeserializeError. + let primitive = (0xff, 0xff, [0xff; 4], [0xff; 4]); + storage.insert(&root, &primitive).unwrap(); + + let err = MerkleTree::load(&mut storage, &root) + .expect_err("Expected load() to return Error; got Ok"); + assert!(matches!(err, MerkleTreeError::DeserializeError(_))); + } + + // #[test] + // fn test_from_set_yields_expected_root() { + // let rng = &mut rand::thread_rng(); + // let gen = || { + // Some(( + // MerkleTreeKey::new_without_hash(random_bytes32(rng)), + // random_bytes32(rng), + // )) + // }; + // let data = std::iter::from_fn(gen).take(1_000).collect::>(); + // + // let expected_root = { + // let mut storage = StorageMap::::new(); + // let mut tree = MerkleTree::new(&mut storage); + // let input = data.clone(); + // for (key, value) in input.into_iter() { + // tree.update(key, &value).unwrap(); + // } + // tree.root() + // }; + // + // let root = { + // let mut storage = StorageMap::::new(); + // let tree = MerkleTree::from_set(&mut storage, data.into_iter()).unwrap(); + // tree.root() + // }; + // + // assert_eq!(root, expected_root); + // } + // + // #[test] + // fn test_from_empty_set_yields_expected_root() { + // let rng = &mut rand::thread_rng(); + // let gen = || { + // Some(( + // MerkleTreeKey::new_without_hash(random_bytes32(rng)), + // random_bytes32(rng), + // )) + // }; + // let data = std::iter::from_fn(gen).take(0).collect::>(); + // + // let expected_root = { + // let mut storage = StorageMap::::new(); + // let mut tree = MerkleTree::new(&mut storage); + // let input = data.clone(); + // for (key, value) in input.into_iter() { + // tree.update(key, &value).unwrap(); + // } + // tree.root() + // }; + // + // let root = { + // let mut storage = StorageMap::::new(); + // let tree = MerkleTree::from_set(&mut storage, data.into_iter()).unwrap(); + // tree.root() + // }; + // + // assert_eq!(root, expected_root); + // } + // + // #[test] + // fn test_from_unit_set_yields_expected_root() { + // let rng = &mut rand::thread_rng(); + // let gen = || { + // Some(( + // MerkleTreeKey::new_without_hash(random_bytes32(rng)), + // random_bytes32(rng), + // )) + // }; + // let data = std::iter::from_fn(gen).take(1).collect::>(); + // + // let expected_root = { + // let mut storage = StorageMap::::new(); + // let mut tree = MerkleTree::new(&mut storage); + // let input = data.clone(); + // for (key, value) in input.into_iter() { + // tree.update(key, &value).unwrap(); + // } + // tree.root() + // }; + // + // let root = { + // let mut storage = StorageMap::::new(); + // let tree = MerkleTree::from_set(&mut storage, data.into_iter()).unwrap(); + // tree.root() + // }; + // + // assert_eq!(root, expected_root); + // } + // + // #[test] + // fn test_from_set_with_duplicate_keys_yields_expected_root() { + // let rng = &mut rand::thread_rng(); + // let keys = [ + // key(b"\x00\x00\x00\x00"), + // key(b"\x00\x00\x00\x01"), + // key(b"\x00\x00\x00\x02"), + // ]; + // let data = [ + // (keys[0], random_bytes32(rng)), + // (keys[1], random_bytes32(rng)), + // (keys[2], random_bytes32(rng)), + // (keys[0], random_bytes32(rng)), + // (keys[1], random_bytes32(rng)), + // (keys[2], random_bytes32(rng)), + // ]; + // + // let expected_root = { + // let mut storage = StorageMap::::new(); + // let mut tree = MerkleTree::new(&mut storage); + // let input = data; + // for (key, value) in input.into_iter() { + // tree.update(key, &value).unwrap(); + // } + // tree.root() + // }; + // + // let root = { + // let mut storage = StorageMap::::new(); + // let tree = MerkleTree::from_set(&mut storage, data.into_iter()).unwrap(); + // tree.root() + // }; + // + // assert_eq!(root, expected_root); + // } + // + // #[test] + // fn test_from_set_with_empty_data_yields_expected_root() { + // let rng = &mut rand::thread_rng(); + // let keys = [ + // key(b"\x00\x00\x00\x00"), + // key(b"\x00\x00\x00\x01"), + // key(b"\x00\x00\x00\x02"), + // ]; + // let data = [ + // (keys[0], random_bytes32(rng).to_vec()), + // (keys[1], random_bytes32(rng).to_vec()), + // (keys[2], random_bytes32(rng).to_vec()), + // (keys[0], b"".to_vec()), + // (keys[1], b"".to_vec()), + // (keys[2], b"".to_vec()), + // ]; + // + // let expected_root = { + // let mut storage = StorageMap::::new(); + // let mut tree = MerkleTree::new(&mut storage); + // let input = data.clone(); + // for (key, value) in input.into_iter() { + // tree.update(key, &value).unwrap(); + // } + // tree.root() + // }; + // + // let root = { + // let mut storage = StorageMap::::new(); + // let tree = MerkleTree::from_set(&mut storage, data.into_iter()).unwrap(); + // tree.root() + // }; + // + // assert_eq!(root, expected_root); + // } +} diff --git a/fuel-merkle/src/sparse/node_generic.rs b/fuel-merkle/src/sparse/node_generic.rs new file mode 100644 index 0000000000..15167656d2 --- /dev/null +++ b/fuel-merkle/src/sparse/node_generic.rs @@ -0,0 +1,824 @@ +use crate::{ + common::{ + error::DeserializeError, + node::{ + ChildError, + ChildResult, + Node as NodeTrait, + ParentNode as ParentNodeTrait, + }, + path::{ + ComparablePath, + Instruction, + Path, + }, + Prefix, + }, + sparse::{ + hash_generic::{ + sum, + zero_sum, + }, + primitive_generic::Primitive, + }, + storage::{ + Mappable, + StorageInspect, + }, +}; + +use crate::{ + common::Bytes, + sparse::hash::Hash, +}; +use core::{ + cmp, + fmt, + marker::PhantomData, +}; +use digest::Digest; + +#[derive(Clone, PartialEq, Eq)] +pub(crate) enum Node { + Node { + hash: Bytes, + height: u32, + prefix: Prefix, + bytes_lo: Bytes, + bytes_hi: Bytes, + }, + Placeholder, +} + +impl Node { + fn calculate_hash( + prefix: &Prefix, + bytes_lo: &Bytes, + bytes_hi: &Bytes, + ) -> Bytes { + let mut hash = sha2::Sha256::new(); + hash.update(prefix); + hash.update(bytes_lo); + hash.update(bytes_hi); + let h = hash.finalize(); + let mut vec = h.to_vec(); + vec.truncate(N); + vec.try_into().unwrap() + } + + pub fn max_height() -> u32 { + Node::::key_size_in_bits() + } + + pub fn new( + height: u32, + prefix: Prefix, + bytes_lo: Bytes, + bytes_hi: Bytes, + ) -> Self { + Self::Node { + hash: Self::calculate_hash(&prefix, &bytes_lo, &bytes_hi), + height, + prefix, + bytes_lo, + bytes_hi, + } + } + + pub fn create_leaf>(key: &Bytes, data: D) -> Self { + let bytes_hi = sum(data); + Self::Node { + hash: Self::calculate_hash(&Prefix::Leaf, key, &bytes_hi), + height: 0u32, + prefix: Prefix::Leaf, + bytes_lo: *key, + bytes_hi, + } + } + + pub fn create_node(left_child: &Node, right_child: &Node, height: u32) -> Self { + let bytes_lo = left_child.hash(); + let bytes_hi = right_child.hash(); + Self::Node { + hash: Self::calculate_hash(&Prefix::Node, &bytes_lo, &bytes_hi), + height, + prefix: Prefix::Node, + bytes_lo, + bytes_hi, + } + } + + pub fn create_node_on_path( + path: &dyn Path, + path_node: &Node, + side_node: &Node, + ) -> Self { + if path_node.is_leaf() && side_node.is_leaf() { + // When joining two leaves, the joined node is found where the paths + // of the two leaves diverge. The joined node may be a direct parent + // of the leaves or an ancestor multiple generations above the + // leaves. + // N.B.: A leaf can be a placeholder. + let parent_depth = path_node.common_path_length(side_node); + let parent_height = Node::::max_height() - parent_depth; + match path.get_instruction(parent_depth).unwrap() { + Instruction::Left => { + Node::create_node(path_node, side_node, parent_height) + } + Instruction::Right => { + Node::create_node(side_node, path_node, parent_height) + } + } + } else { + // When joining two nodes, or a node and a leaf, the joined node is + // the direct parent of the node with the greater height and an + // ancestor of the node with the lesser height. + // N.B.: A leaf can be a placeholder. + let parent_height = cmp::max(path_node.height(), side_node.height()) + 1; + let parent_depth = Node::::max_height() - parent_height; + match path.get_instruction(parent_depth).unwrap() { + Instruction::Left => { + Node::create_node(path_node, side_node, parent_height) + } + Instruction::Right => { + Node::create_node(side_node, path_node, parent_height) + } + } + } + } + + pub fn create_placeholder() -> Self { + Self::Placeholder + } + + pub fn common_path_length(&self, other: &Node) -> u32 { + debug_assert!(self.is_leaf()); + debug_assert!(other.is_leaf()); + + // If either of the nodes is a placeholder, the common path length is + // defined to be 0. This is needed to prevent a 0 bit in the + // placeholder's key from producing an erroneous match with a 0 bit in + // the leaf's key. + if self.is_placeholder() || other.is_placeholder() { + 0 + } else { + self.leaf_key().common_path_length(&other.leaf_key()) + } + } + + pub fn height(&self) -> u32 { + match self { + Node::Node { height, .. } => *height, + Node::Placeholder => 0, + } + } + + pub fn prefix(&self) -> Prefix { + match self { + Node::Node { prefix, .. } => *prefix, + Node::Placeholder => Prefix::Leaf, + } + } + + pub fn bytes_lo(&self) -> Bytes { + match self { + Node::Node { bytes_lo, .. } => *bytes_lo, + Node::Placeholder => zero_sum(), + } + } + + pub fn bytes_hi(&self) -> Bytes { + match self { + Node::Node { bytes_hi, .. } => *bytes_hi, + Node::Placeholder => zero_sum(), + } + } + + pub fn is_leaf(&self) -> bool { + self.prefix() == Prefix::Leaf || self.is_placeholder() + } + + pub fn is_node(&self) -> bool { + self.prefix() == Prefix::Node + } + + pub fn leaf_key(&self) -> Bytes { + assert!(self.is_leaf()); + self.bytes_lo() + } + + pub fn leaf_data(&self) -> Bytes { + assert!(self.is_leaf()); + self.bytes_hi() + } + + pub fn left_child_key(&self) -> Bytes { + assert!(self.is_node()); + self.bytes_lo() + } + + pub fn right_child_key(&self) -> Bytes { + assert!(self.is_node()); + self.bytes_hi() + } + + pub fn is_placeholder(&self) -> bool { + &Self::Placeholder == self + } + + pub fn hash(&self) -> Bytes { + match self { + Node::Node { hash, .. } => *hash, + Node::Placeholder => zero_sum(), + } + } +} + +impl AsRef> for Node { + fn as_ref(&self) -> &Node { + self + } +} + +impl NodeTrait for Node { + type Key = Bytes; + + fn height(&self) -> u32 { + Node::height(self) + } + + fn leaf_key(&self) -> Self::Key { + Node::leaf_key(self) + } + + fn is_leaf(&self) -> bool { + Node::is_leaf(self) + } + + fn is_node(&self) -> bool { + Node::is_node(self) + } +} + +impl fmt::Debug for Node { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + if self.is_node() { + f.debug_struct("Node (Internal)") + .field("Height", &self.height()) + .field("Hash", &hex::encode(self.hash())) + .field("Left child key", &hex::encode(self.left_child_key())) + .field("Right child key", &hex::encode(self.right_child_key())) + .finish() + } else { + f.debug_struct("Node (Leaf)") + .field("Height", &self.height()) + .field("Hash", &hex::encode(self.hash())) + .field("Leaf key", &hex::encode(self.leaf_key())) + .field("Leaf data", &hex::encode(self.leaf_data())) + .finish() + } + } +} + +pub(crate) struct StorageNode<'storage, const KeySize: usize, TableType, StorageType> { + storage: &'storage StorageType, + node: Node, + phantom_table: PhantomData, +} + +impl Clone + for StorageNode<'_, KeySize, TableType, StorageType> +{ + fn clone(&self) -> Self { + Self { + storage: self.storage, + node: self.node.clone(), + phantom_table: Default::default(), + } + } +} + +impl<'s, const KeySize: usize, TableType, StorageType> + StorageNode<'s, KeySize, TableType, StorageType> +{ + pub fn new(storage: &'s StorageType, node: Node) -> Self { + Self { + node, + storage, + phantom_table: Default::default(), + } + } +} + +impl + StorageNode<'_, KeySize, TableType, StorageType> +{ + pub fn hash(&self) -> Bytes { + self.node.hash() + } + + pub fn into_node(self) -> Node { + self.node + } +} + +impl NodeTrait + for StorageNode<'_, KeySize, TableType, StorageType> +{ + type Key = Bytes; + + fn height(&self) -> u32 { + self.node.height() + } + + fn leaf_key(&self) -> Self::Key { + self.node.leaf_key() + } + + fn is_leaf(&self) -> bool { + self.node.is_leaf() + } + + fn is_node(&self) -> bool { + self.node.is_node() + } +} + +#[derive(Debug, Clone, derive_more::Display)] +pub enum StorageNodeError { + #[display(fmt = "{}", _0)] + StorageError(StorageError), + #[display(fmt = "{}", _0)] + DeserializeError(DeserializeError), +} + +impl ParentNodeTrait + for StorageNode<'_, KeySize, TableType, StorageType> +where + StorageType: StorageInspect, + TableType: Mappable< + Key = Bytes, + Value = Primitive, + OwnedValue = Primitive, + >, +{ + type Error = StorageNodeError; + + fn left_child(&self) -> ChildResult { + if self.is_leaf() { + return Err(ChildError::NodeIsLeaf) + } + let key = self.node.left_child_key(); + if key == zero_sum() { + return Ok(Self::new(self.storage, Node::create_placeholder())) + } + let primitive = self + .storage + .get(&key) + .map_err(StorageNodeError::StorageError)? + .ok_or(ChildError::ChildNotFound(key))?; + Ok(primitive + .into_owned() + .try_into() + .map(|node| Self::new(self.storage, node)) + .map_err(StorageNodeError::DeserializeError)?) + } + + fn right_child(&self) -> ChildResult { + if self.is_leaf() { + return Err(ChildError::NodeIsLeaf) + } + let key = self.node.right_child_key(); + if key == zero_sum() { + return Ok(Self::new(self.storage, Node::create_placeholder())) + } + let primitive = self + .storage + .get(&key) + .map_err(StorageNodeError::StorageError)? + .ok_or(ChildError::ChildNotFound(key))?; + Ok(primitive + .into_owned() + .try_into() + .map(|node| Self::new(self.storage, node)) + .map_err(StorageNodeError::DeserializeError)?) + } +} + +impl fmt::Debug + for StorageNode<'_, KeySize, TableType, StorageType> +where + StorageType: StorageInspect, + TableType: Mappable< + Key = Bytes, + Value = Primitive, + OwnedValue = Primitive, + >, +{ + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + if self.is_node() { + f.debug_struct("StorageNode (Internal)") + .field("Height", &self.height()) + .field("Hash", &hex::encode(self.hash())) + .field("Left child key", &hex::encode(self.node.left_child_key())) + .field("Right child key", &hex::encode(self.node.right_child_key())) + .finish() + } else { + f.debug_struct("StorageNode (Leaf)") + .field("Height", &self.height()) + .field("Hash", &hex::encode(self.hash())) + .field("Leaf key", &hex::encode(self.node.leaf_key())) + .field("Leaf data", &hex::encode(self.node.leaf_data())) + .finish() + } + } +} + +#[cfg(test)] +mod test_node { + use crate::{ + common::{ + error::DeserializeError, + Bytes32, + Prefix, + PrefixError, + }, + sparse::{ + hash::sum, + zero_sum, + Node, + Primitive, + }, + }; + + fn leaf_hash(key: &Bytes32, data: &[u8]) -> Bytes32 { + let mut buffer = [0; 65]; + buffer[0..1].clone_from_slice(Prefix::Leaf.as_ref()); + buffer[1..33].clone_from_slice(key); + buffer[33..65].clone_from_slice(&sum(data)); + sum(buffer) + } + + #[test] + fn test_create_leaf_returns_a_valid_leaf() { + let leaf = Node::create_leaf(&sum(b"LEAF"), [1u8; 32]); + assert_eq!(leaf.is_leaf(), true); + assert_eq!(leaf.is_node(), false); + assert_eq!(leaf.height(), 0); + assert_eq!(leaf.prefix(), Prefix::Leaf); + assert_eq!(*leaf.leaf_key(), sum(b"LEAF")); + assert_eq!(*leaf.leaf_data(), sum([1u8; 32])); + } + + #[test] + fn test_create_node_returns_a_valid_node() { + let left_child = Node::create_leaf(&sum(b"LEFT CHILD"), [1u8; 32]); + let right_child = Node::create_leaf(&sum(b"RIGHT CHILD"), [1u8; 32]); + let node = Node::create_node(&left_child, &right_child, 1); + assert_eq!(node.is_leaf(), false); + assert_eq!(node.is_node(), true); + assert_eq!(node.height(), 1); + assert_eq!(node.prefix(), Prefix::Node); + assert_eq!( + *node.left_child_key(), + leaf_hash(&sum(b"LEFT CHILD"), &[1u8; 32]) + ); + assert_eq!( + *node.right_child_key(), + leaf_hash(&sum(b"RIGHT CHILD"), &[1u8; 32]) + ); + } + + #[test] + fn test_create_placeholder_returns_a_placeholder_node() { + let node = Node::create_placeholder(); + assert_eq!(node.is_placeholder(), true); + assert_eq!(node.hash(), zero_sum()); + } + + #[test] + fn test_create_leaf_from_primitive_returns_a_valid_leaf() { + let primitive = (0, Prefix::Leaf as u8, [0xff; 32], [0xff; 32]); + + let node: Node = primitive.try_into().unwrap(); + assert_eq!(node.is_leaf(), true); + assert_eq!(node.is_node(), false); + assert_eq!(node.height(), 0); + assert_eq!(node.prefix(), Prefix::Leaf); + assert_eq!(*node.leaf_key(), [0xff; 32]); + assert_eq!(*node.leaf_data(), [0xff; 32]); + } + + #[test] + fn test_create_node_from_primitive_returns_a_valid_node() { + let primitive = (255, Prefix::Node as u8, [0xff; 32], [0xff; 32]); + + let node: Node = primitive.try_into().unwrap(); + assert_eq!(node.is_leaf(), false); + assert_eq!(node.is_node(), true); + assert_eq!(node.height(), 255); + assert_eq!(node.prefix(), Prefix::Node); + assert_eq!(*node.left_child_key(), [0xff; 32]); + assert_eq!(*node.right_child_key(), [0xff; 32]); + } + + #[test] + fn test_create_from_primitive_returns_deserialize_error_if_invalid_prefix() { + let primitive = (0xff, 0xff, [0xff; 32], [0xff; 32]); + + // Should return Error; prefix 0xff is does not represent a node or leaf + let err = Node::try_from(primitive) + .expect_err("Expected try_from() to be Error; got OK"); + assert!(matches!( + err, + DeserializeError::PrefixError(PrefixError::InvalidPrefix(0xff)) + )); + } + + /// For leaf node `node` of leaf data `d` with key `k`: + /// ```node = (0x00, k, h(serialize(d)))``` + #[test] + fn test_leaf_primitive_returns_expected_primitive() { + let expected_primitive = + (0_u32, Prefix::Leaf as u8, sum(b"LEAF"), sum([1u8; 32])); + + let leaf = Node::create_leaf(&sum(b"LEAF"), [1u8; 32]); + let primitive = Primitive::from(&leaf); + + assert_eq!(primitive, expected_primitive); + } + + /// For internal node `node` with children `l` and `r`: + /// ```node = (0x01, l.v, r.v)``` + #[test] + fn test_node_primitive_returns_expected_primitive() { + let expected_primitive = ( + 1_u32, + Prefix::Node as u8, + leaf_hash(&sum(b"LEFT CHILD"), &[1u8; 32]), + leaf_hash(&sum(b"RIGHT CHILD"), &[1u8; 32]), + ); + + let left_child = Node::create_leaf(&sum(b"LEFT CHILD"), [1u8; 32]); + let right_child = Node::create_leaf(&sum(b"RIGHT CHILD"), [1u8; 32]); + let node = Node::create_node(&left_child, &right_child, 1); + let primitive = Primitive::from(&node); + + assert_eq!(primitive, expected_primitive); + } + + /// For leaf node `node` of leaf data `d` with key `k`: + /// ```node.v = h(0x00, k, h(serialize(d)))``` + #[test] + fn test_leaf_hash_returns_expected_hash_value() { + let mut expected_buffer = [0u8; 65]; + expected_buffer[0..1].clone_from_slice(Prefix::Leaf.as_ref()); + expected_buffer[1..33].clone_from_slice(&sum(b"LEAF")); + expected_buffer[33..65].clone_from_slice(&sum([1u8; 32])); + let expected_value = sum(expected_buffer); + + let node = Node::create_leaf(&sum(b"LEAF"), [1u8; 32]); + let value = *node.hash(); + + assert_eq!(value, expected_value); + } + + /// For internal node `node` with children `l` and `r`: + /// ```node.v = h(0x01, l.v, r.v)``` + #[test] + fn test_node_hash_returns_expected_hash_value() { + let mut expected_buffer = [0u8; 65]; + expected_buffer[0..1].clone_from_slice(Prefix::Node.as_ref()); + expected_buffer[1..33] + .clone_from_slice(&leaf_hash(&sum(b"LEFT CHILD"), &[1u8; 32])); + expected_buffer[33..65] + .clone_from_slice(&leaf_hash(&sum(b"RIGHT CHILD"), &[1u8; 32])); + let expected_value = sum(expected_buffer); + + let left_child = Node::create_leaf(&sum(b"LEFT CHILD"), [1u8; 32]); + let right_child = Node::create_leaf(&sum(b"RIGHT CHILD"), [1u8; 32]); + let node = Node::create_node(&left_child, &right_child, 1); + let value = *node.hash(); + + assert_eq!(value, expected_value); + } +} + +#[cfg(test)] +mod test_storage_node { + use super::{ + Node, + StorageNode, + StorageNodeError, + }; + + use crate::{ + common::{ + error::DeserializeError, + node::{ + ChildError, + ParentNode, + }, + Bytes32, + PrefixError, + StorageMap, + }, + sparse::{ + hash::sum, + Primitive, + }, + storage::{ + Mappable, + StorageMutate, + }, + }; + + pub struct TestTable; + + impl Mappable for TestTable { + type Key = Self::OwnedKey; + type OwnedKey = Bytes32; + type OwnedValue = Primitive; + type Value = Self::OwnedValue; + } + + #[test] + fn test_node_left_child_returns_the_left_child() { + let mut s = StorageMap::::new(); + + let leaf_0 = Node::create_leaf(&sum(b"Hello World"), [1u8; 32]); + let _ = s.insert(&leaf_0.hash(), &leaf_0.as_ref().into()); + + let leaf_1 = Node::create_leaf(&sum(b"Goodbye World"), [1u8; 32]); + let _ = s.insert(&leaf_1.hash(), &leaf_1.as_ref().into()); + + let node_0 = Node::create_node(&leaf_0, &leaf_1, 1); + let _ = s.insert(&node_0.hash(), &node_0.as_ref().into()); + + let storage_node = StorageNode::new(&s, node_0); + let child = storage_node.left_child().unwrap(); + + assert_eq!(child.hash(), leaf_0.hash()); + } + + #[test] + fn test_node_right_child_returns_the_right_child() { + let mut s = StorageMap::::new(); + + let leaf_0 = Node::create_leaf(&sum(b"Hello World"), [1u8; 32]); + let _ = s.insert(&leaf_0.hash(), &leaf_0.as_ref().into()); + + let leaf_1 = Node::create_leaf(&sum(b"Goodbye World"), [1u8; 32]); + let _ = s.insert(&leaf_1.hash(), &leaf_1.as_ref().into()); + + let node_0 = Node::create_node(&leaf_0, &leaf_1, 1); + let _ = s.insert(&node_0.hash(), &node_0.as_ref().into()); + + let storage_node = StorageNode::new(&s, node_0); + let child = storage_node.right_child().unwrap(); + + assert_eq!(child.hash(), leaf_1.hash()); + } + + #[test] + fn test_node_left_child_returns_placeholder_when_key_is_zero_sum() { + let mut s = StorageMap::::new(); + + let leaf = Node::create_leaf(&sum(b"Goodbye World"), [1u8; 32]); + let _ = s.insert(&leaf.hash(), &leaf.as_ref().into()); + + let node_0 = Node::create_node(&Node::create_placeholder(), &leaf, 1); + let _ = s.insert(&node_0.hash(), &node_0.as_ref().into()); + + let storage_node = StorageNode::new(&s, node_0); + let child = storage_node.left_child().unwrap(); + + assert!(child.node.is_placeholder()); + } + + #[test] + fn test_node_right_child_returns_placeholder_when_key_is_zero_sum() { + let mut s = StorageMap::::new(); + + let leaf = Node::create_leaf(&sum(b"Goodbye World"), [1u8; 32]); + let _ = s.insert(&leaf.hash(), &leaf.as_ref().into()); + + let node_0 = Node::create_node(&leaf, &Node::create_placeholder(), 1); + let _ = s.insert(&node_0.hash(), &node_0.as_ref().into()); + + let storage_node = StorageNode::new(&s, node_0); + let child = storage_node.right_child().unwrap(); + + assert!(child.node.is_placeholder()); + } + + #[test] + fn test_node_left_child_returns_error_when_node_is_leaf() { + let s = StorageMap::::new(); + + let leaf_0 = Node::create_leaf(&sum(b"Hello World"), [1u8; 32]); + let storage_node = StorageNode::new(&s, leaf_0); + let err = storage_node + .left_child() + .expect_err("Expected left_child() to return Error; got OK"); + + assert!(matches!(err, ChildError::NodeIsLeaf)); + } + + #[test] + fn test_node_right_child_returns_error_when_node_is_leaf() { + let s = StorageMap::::new(); + + let leaf_0 = Node::create_leaf(&sum(b"Hello World"), [1u8; 32]); + let storage_node = StorageNode::new(&s, leaf_0); + let err = storage_node + .right_child() + .expect_err("Expected right_child() to return Error; got OK"); + + assert!(matches!(err, ChildError::NodeIsLeaf)); + } + + #[test] + fn test_node_left_child_returns_error_when_key_is_not_found() { + let s = StorageMap::::new(); + + let leaf_0 = Node::create_leaf(&sum(b"Hello World"), [0u8; 32]); + let leaf_1 = Node::create_leaf(&sum(b"Goodbye World"), [1u8; 32]); + let node_0 = Node::create_node(&leaf_0, &leaf_1, 1); + + let storage_node = StorageNode::new(&s, node_0); + let err = storage_node + .left_child() + .expect_err("Expected left_child() to return Error; got Ok"); + + let key = storage_node.into_node().left_child_key(); + assert!(matches!( + err, + ChildError::ChildNotFound(k) if k == key + )); + } + + #[test] + fn test_node_right_child_returns_error_when_key_is_not_found() { + let s = StorageMap::::new(); + + let leaf_0 = Node::create_leaf(&sum(b"Hello World"), [1u8; 32]); + let leaf_1 = Node::create_leaf(&sum(b"Goodbye World"), [1u8; 32]); + let node_0 = Node::create_node(&leaf_0, &leaf_1, 1); + + let storage_node = StorageNode::new(&s, node_0); + let err = storage_node + .right_child() + .expect_err("Expected right_child() to return Error; got Ok"); + + let key = storage_node.into_node().right_child_key(); + assert!(matches!( + err, + ChildError::ChildNotFound(k) if k == key + )); + } + + #[test] + fn test_node_left_child_returns_deserialize_error_when_primitive_is_invalid() { + let mut s = StorageMap::::new(); + + let leaf_0 = Node::create_leaf(&sum(b"Hello World"), [1u8; 32]); + let _ = s.insert(&leaf_0.hash(), &(0xff, 0xff, [0xff; 32], [0xff; 32])); + let leaf_1 = Node::create_leaf(&sum(b"Goodbye World"), [1u8; 32]); + let node_0 = Node::create_node(&leaf_0, &leaf_1, 1); + + let storage_node = StorageNode::new(&s, node_0); + let err = storage_node + .left_child() + .expect_err("Expected left_child() to be Error; got Ok"); + + assert!(matches!( + err, + ChildError::Error(StorageNodeError::DeserializeError( + DeserializeError::PrefixError(PrefixError::InvalidPrefix(0xff)) + )) + )); + } + + #[test] + fn test_node_right_child_returns_deserialize_error_when_primitive_is_invalid() { + let mut s = StorageMap::::new(); + + let leaf_0 = Node::create_leaf(&sum(b"Hello World"), [1u8; 32]); + let leaf_1 = Node::create_leaf(&sum(b"Goodbye World"), [1u8; 32]); + let _ = s.insert(&leaf_1.hash(), &(0xff, 0xff, [0xff; 32], [0xff; 32])); + let node_0 = Node::create_node(&leaf_0, &leaf_1, 1); + + let storage_node = StorageNode::new(&s, node_0); + let err = storage_node + .right_child() + .expect_err("Expected right_child() to be Error; got Ok"); + + assert!(matches!( + err, + ChildError::Error(StorageNodeError::DeserializeError( + DeserializeError::PrefixError(PrefixError::InvalidPrefix(0xff)) + )) + )); + } +} diff --git a/fuel-merkle/src/sparse/primitive_generic.rs b/fuel-merkle/src/sparse/primitive_generic.rs new file mode 100644 index 0000000000..8b564a2202 --- /dev/null +++ b/fuel-merkle/src/sparse/primitive_generic.rs @@ -0,0 +1,77 @@ +use crate::{ + common::{ + error::DeserializeError, + Bytes, + Prefix, + PrefixError, + }, + sparse::node_generic::Node, +}; + +/// **Leaf buffer:** +/// +/// | Allocation | Data | +/// |------------|----------------------------| +/// | `00 - 04` | Height (4 bytes) | +/// | `04 - 05` | Prefix (1 byte, `0x00`) | +/// | `05 - 37` | hash(Key) (32 bytes) | +/// | `37 - 69` | hash(Data) (32 bytes) | +/// +/// **Node buffer:** +/// +/// | Allocation | Data | +/// |------------|----------------------------| +/// | `00 - 04` | Height (4 bytes) | +/// | `04 - 05` | Prefix (1 byte, `0x01`) | +/// | `05 - 37` | Left child key (32 bytes) | +/// | `37 - 69` | Right child key (32 bytes) | +pub type Primitive = (u32, u8, Bytes, Bytes); + +trait PrimitiveView { + fn height(&self) -> u32; + fn prefix(&self) -> Result; + fn bytes_lo(&self) -> &Bytes; + fn bytes_hi(&self) -> &Bytes; +} + +impl PrimitiveView for Primitive { + fn height(&self) -> u32 { + self.0 + } + + fn prefix(&self) -> Result { + Prefix::try_from(self.1) + } + + fn bytes_lo(&self) -> &Bytes { + &self.2 + } + + fn bytes_hi(&self) -> &Bytes { + &self.3 + } +} + +impl From<&Node> for Primitive { + fn from(node: &Node) -> Self { + ( + node.height(), + node.prefix() as u8, + node.bytes_lo(), + node.bytes_hi(), + ) + } +} + +impl TryFrom> for Node { + type Error = DeserializeError; + + fn try_from(primitive: Primitive) -> Result { + let height = primitive.height(); + let prefix = primitive.prefix()?; + let bytes_lo = *primitive.bytes_lo(); + let bytes_hi = *primitive.bytes_hi(); + let node = Self::new(height, prefix, bytes_lo, bytes_hi); + Ok(node) + } +} From 45be393f7bbb7835692d59a23384ae5e5175b6d5 Mon Sep 17 00:00:00 2001 From: Brandon Vrooman Date: Wed, 17 Jan 2024 11:04:59 -0500 Subject: [PATCH 02/15] Refactor --- fuel-merkle/src/sparse.rs | 11 +- fuel-merkle/src/sparse/generic.rs | 4 + .../src/sparse/{ => generic}/hash_generic.rs | 21 ++- .../{ => generic}/merkle_tree_generic.rs | 123 +++++++++--------- .../src/sparse/{ => generic}/node_generic.rs | 101 +++++++------- .../sparse/{ => generic}/primitive_generic.rs | 28 ++-- fuel-merkle/src/sparse/merkle_tree.rs | 22 ++-- 7 files changed, 152 insertions(+), 158 deletions(-) create mode 100644 fuel-merkle/src/sparse/generic.rs rename fuel-merkle/src/sparse/{ => generic}/hash_generic.rs (52%) rename fuel-merkle/src/sparse/{ => generic}/merkle_tree_generic.rs (93%) rename fuel-merkle/src/sparse/{ => generic}/node_generic.rs (90%) rename fuel-merkle/src/sparse/{ => generic}/primitive_generic.rs (64%) diff --git a/fuel-merkle/src/sparse.rs b/fuel-merkle/src/sparse.rs index 5dc2e83480..7f8b2b5874 100644 --- a/fuel-merkle/src/sparse.rs +++ b/fuel-merkle/src/sparse.rs @@ -19,13 +19,4 @@ pub use merkle_tree::{ pub use primitive::Primitive; pub mod in_memory; -mod hash_generic; -mod merkle_tree_generic; -mod node_generic; -mod primitive_generic; - -use crate::common::Bytes32; - -pub const fn empty_sum() -> &'static Bytes32 { - zero_sum() -} +pub mod generic; diff --git a/fuel-merkle/src/sparse/generic.rs b/fuel-merkle/src/sparse/generic.rs new file mode 100644 index 0000000000..6f35723fd6 --- /dev/null +++ b/fuel-merkle/src/sparse/generic.rs @@ -0,0 +1,4 @@ +pub mod hash_generic; +pub mod merkle_tree_generic; +pub mod node_generic; +pub mod primitive_generic; diff --git a/fuel-merkle/src/sparse/hash_generic.rs b/fuel-merkle/src/sparse/generic/hash_generic.rs similarity index 52% rename from fuel-merkle/src/sparse/hash_generic.rs rename to fuel-merkle/src/sparse/generic/hash_generic.rs index 8ad7301f04..1c10dd2c74 100644 --- a/fuel-merkle/src/sparse/hash_generic.rs +++ b/fuel-merkle/src/sparse/generic/hash_generic.rs @@ -1,7 +1,4 @@ -use crate::common::{ - Bytes, - Zero, -}; +use crate::common::Bytes; use std::{ convert::TryInto, sync::OnceLock, @@ -9,23 +6,21 @@ use std::{ use digest::Digest; use sha2::Sha256; - pub(crate) type Hash = Sha256; -// pub fn zero_sum() -> &'static T { -// static COMPUTATION: OnceLock = OnceLock::new(); -// COMPUTATION.get_or_init(|| T::zero()) -// } - -pub fn zero_sum() -> [u8; N] { - [0u8; N] +pub fn zero_sum() -> &'static [u8; N] { + static ZERO: OnceLock> = OnceLock::new(); + ZERO.get_or_init(|| vec![0; N]) + .as_slice() + .try_into() + .expect("Expected valid zero sum") } pub fn sum(data: I) -> Bytes where I: AsRef<[u8]>, { - let mut hash = crate::sparse::hash::Hash::new(); + let mut hash = Hash::new(); hash.update(data); let h = hash.finalize(); let mut vec = h.as_slice().to_vec(); diff --git a/fuel-merkle/src/sparse/merkle_tree_generic.rs b/fuel-merkle/src/sparse/generic/merkle_tree_generic.rs similarity index 93% rename from fuel-merkle/src/sparse/merkle_tree_generic.rs rename to fuel-merkle/src/sparse/generic/merkle_tree_generic.rs index f333889160..759fe9aac4 100644 --- a/fuel-merkle/src/sparse/merkle_tree_generic.rs +++ b/fuel-merkle/src/sparse/generic/merkle_tree_generic.rs @@ -4,7 +4,7 @@ use crate::{ node::ChildError, AsPathIterator, }, - sparse::{ + sparse::generic::{ node_generic::{ Node, StorageNode, @@ -19,9 +19,12 @@ use crate::{ }, }; -use crate::common::{ - Bytes, - Bytes32, +use crate::{ + common::{ + Bytes, + Bytes32, + }, + sparse::generic::hash_generic::zero_sum, }; use alloc::vec::Vec; use core::{ @@ -31,16 +34,16 @@ use core::{ }; fn truncate(bytes: &[u8]) -> Bytes { - (&bytes[0..N]).clone().try_into().unwrap() + (&bytes[0..N]).try_into().unwrap() } #[derive(Debug, Clone, derive_more::Display)] -pub enum MerkleTreeError { +pub enum MerkleTreeError { #[display( fmt = "cannot load node with key {}; the key is not found in storage", "hex::encode(_0)" )] - LoadError(Bytes), + LoadError(Bytes), #[display(fmt = "{}", _0)] StorageError(StorageError), @@ -49,13 +52,13 @@ pub enum MerkleTreeError { DeserializeError(DeserializeError), #[display(fmt = "{}", _0)] - ChildError(ChildError, StorageNodeError>), + ChildError(ChildError, StorageNodeError>), } -impl From - for MerkleTreeError +impl From + for MerkleTreeError { - fn from(err: StorageError) -> MerkleTreeError { + fn from(err: StorageError) -> MerkleTreeError { MerkleTreeError::StorageError(err) } } @@ -113,21 +116,21 @@ impl From> for Bytes { } #[derive(Debug)] -pub struct MerkleTree { - root_node: Node, +pub struct MerkleTree { + root_node: Node, storage: StorageType, phantom_table: PhantomData, } -impl - MerkleTree +impl + MerkleTree { - pub fn empty_root() -> Bytes { - vec![0; KeySize].try_into().unwrap() + pub fn empty_root() -> &'static Bytes { + zero_sum() } - pub fn root(&self) -> Bytes { - self.root_node().hash() + pub fn root(&self) -> Bytes { + *self.root_node().hash() } pub fn into_storage(self) -> StorageType { @@ -140,23 +143,23 @@ impl // PRIVATE - fn root_node(&self) -> &Node { + fn root_node(&self) -> &Node { &self.root_node } - fn set_root_node(&mut self, node: Node) { - debug_assert!(node.is_leaf() || node.height() == Node::::max_height()); + fn set_root_node(&mut self, node: Node) { + debug_assert!(node.is_leaf() || node.height() == Node::::max_height()); self.root_node = node; } } -impl - MerkleTree +impl + MerkleTree where TableType: Mappable< - Key = Bytes, - Value = Primitive, - OwnedValue = Primitive, + Key = Bytes, + Value = Primitive, + OwnedValue = Primitive, >, StorageType: StorageInspect, { @@ -170,9 +173,9 @@ where pub fn load( storage: StorageType, - root: &Bytes, - ) -> Result> { - if *root == Self::empty_root() { + root: &Bytes, + ) -> Result> { + if root == Self::empty_root() { let tree = Self::new(storage); Ok(tree) } else { @@ -195,14 +198,14 @@ where fn path_set( &self, - leaf_key: Bytes, + leaf_key: Bytes, ) -> Result< - (Vec>, Vec>), - MerkleTreeError, + (Vec>, Vec>), + MerkleTreeError, > { let root_node = self.root_node().clone(); let root_storage_node = StorageNode::new(&self.storage, root_node); - let (mut path_nodes, mut side_nodes): (Vec>, Vec>) = + let (mut path_nodes, mut side_nodes): (Vec>, Vec>) = root_storage_node .as_path_iter(leaf_key) .map(|(path_node, side_node)| { @@ -211,7 +214,7 @@ where side_node.map_err(MerkleTreeError::ChildError)?.into_node(), )) }) - .collect::, MerkleTreeError>>()? + .collect::, MerkleTreeError>>()? .into_iter() .unzip(); path_nodes.reverse(); @@ -223,13 +226,13 @@ where } } -impl - MerkleTree +impl + MerkleTree where TableType: Mappable< - Key = Bytes, - Value = Primitive, - OwnedValue = Primitive, + Key = Bytes, + Value = Primitive, + OwnedValue = Primitive, >, StorageType: StorageMutate, { @@ -380,9 +383,9 @@ where pub fn update( &mut self, - key: MerkleTreeKey, + key: MerkleTreeKey, data: &[u8], - ) -> Result<(), MerkleTreeError> { + ) -> Result<(), MerkleTreeError> { if data.is_empty() { // If the data is empty, this signifies a delete operation for the // given key. @@ -411,20 +414,20 @@ where pub fn delete( &mut self, - key: MerkleTreeKey, - ) -> Result<(), MerkleTreeError> { - if self.root() == Self::empty_root() { + key: MerkleTreeKey, + ) -> Result<(), MerkleTreeError> { + if self.root() == *Self::empty_root() { // The zero root signifies that all leaves are empty, including the // given key. return Ok(()) } let key = key.into(); - let (path_nodes, side_nodes): (Vec>, Vec>) = + let (path_nodes, side_nodes): (Vec>, Vec>) = self.path_set(key)?; match path_nodes.get(0) { - Some(node) if node.leaf_key() == key => { + Some(node) if node.leaf_key() == &key => { self.delete_with_path_set( &key, path_nodes.as_slice(), @@ -441,9 +444,9 @@ where fn update_with_path_set( &mut self, - requested_leaf_node: &Node, - path_nodes: &[Node], - side_nodes: &[Node], + requested_leaf_node: &Node, + path_nodes: &[Node], + side_nodes: &[Node], ) -> Result<(), StorageError> { let path = requested_leaf_node.leaf_key(); let actual_leaf_node = &path_nodes[0]; @@ -479,7 +482,7 @@ where // Merge leaves if !actual_leaf_node.is_placeholder() { current_node = - Node::create_node_on_path(&path, ¤t_node, actual_leaf_node); + Node::create_node_on_path(path, ¤t_node, actual_leaf_node); self.storage .insert(¤t_node.hash(), ¤t_node.as_ref().into())?; } @@ -492,7 +495,7 @@ where iter::repeat(Node::create_placeholder()).take(placeholders_count); for placeholder in placeholders { current_node = - Node::create_node_on_path(&path, ¤t_node, &placeholder); + Node::create_node_on_path(path, ¤t_node, &placeholder); self.storage .insert(¤t_node.hash(), ¤t_node.as_ref().into())?; } @@ -502,7 +505,7 @@ where // Merge side nodes for side_node in side_nodes { - current_node = Node::create_node_on_path(&path, ¤t_node, side_node); + current_node = Node::create_node_on_path(path, ¤t_node, side_node); self.storage .insert(¤t_node.hash(), ¤t_node.as_ref().into())?; } @@ -518,9 +521,9 @@ where fn delete_with_path_set( &mut self, - requested_leaf_key: &Bytes, - path_nodes: &[Node], - side_nodes: &[Node], + requested_leaf_key: &Bytes, + path_nodes: &[Node], + side_nodes: &[Node], ) -> Result<(), StorageError> { for node in path_nodes { self.storage.remove(&node.hash())?; @@ -597,7 +600,7 @@ mod test { Bytes, StorageMap, }, - sparse::hash_generic::{ + sparse::generic::hash_generic::{ sum, zero_sum, }, @@ -624,7 +627,7 @@ mod test { type Value = Self::OwnedValue; } - fn key>(data: B) -> MerkleTreeKey { + fn key>(data: B) -> MerkleTreeKey { MerkleTreeKey::new_without_hash(sum(data.as_ref())) } @@ -1068,7 +1071,7 @@ mod test { let leaf_1_data = b"DATA_1"; let leaf_1 = Node::create_leaf(&leaf_1_key.0, leaf_1_data); - let leaf_2_key = MerkleTreeKey::new_without_hash(leaf_1.hash()); + let leaf_2_key = MerkleTreeKey::new_without_hash(*leaf_1.hash()); let leaf_2_data = b"DATA_2"; let leaf_2 = Node::create_leaf(&leaf_2_key.0, leaf_2_data); @@ -1155,7 +1158,7 @@ mod test { let tree = MerkleTree::load(&mut storage, &zero_sum()).unwrap(); let root = tree.root(); - assert_eq!(root, zero_sum()); + assert_eq!(root, *zero_sum()); } #[test] diff --git a/fuel-merkle/src/sparse/node_generic.rs b/fuel-merkle/src/sparse/generic/node_generic.rs similarity index 90% rename from fuel-merkle/src/sparse/node_generic.rs rename to fuel-merkle/src/sparse/generic/node_generic.rs index 15167656d2..68f6c5a3fd 100644 --- a/fuel-merkle/src/sparse/node_generic.rs +++ b/fuel-merkle/src/sparse/generic/node_generic.rs @@ -14,7 +14,7 @@ use crate::{ }, Prefix, }, - sparse::{ + sparse::generic::{ hash_generic::{ sum, zero_sum, @@ -27,10 +27,7 @@ use crate::{ }, }; -use crate::{ - common::Bytes, - sparse::hash::Hash, -}; +use crate::common::Bytes; use core::{ cmp, fmt, @@ -97,8 +94,8 @@ impl Node { } pub fn create_node(left_child: &Node, right_child: &Node, height: u32) -> Self { - let bytes_lo = left_child.hash(); - let bytes_hi = right_child.hash(); + let bytes_lo = *left_child.hash(); + let bytes_hi = *right_child.hash(); Self::Node { hash: Self::calculate_hash(&Prefix::Node, &bytes_lo, &bytes_hi), height, @@ -180,16 +177,16 @@ impl Node { } } - pub fn bytes_lo(&self) -> Bytes { + pub fn bytes_lo(&self) -> &Bytes { match self { - Node::Node { bytes_lo, .. } => *bytes_lo, + Node::Node { bytes_lo, .. } => bytes_lo, Node::Placeholder => zero_sum(), } } - pub fn bytes_hi(&self) -> Bytes { + pub fn bytes_hi(&self) -> &Bytes { match self { - Node::Node { bytes_hi, .. } => *bytes_hi, + Node::Node { bytes_hi, .. } => bytes_hi, Node::Placeholder => zero_sum(), } } @@ -202,22 +199,22 @@ impl Node { self.prefix() == Prefix::Node } - pub fn leaf_key(&self) -> Bytes { + pub fn leaf_key(&self) -> &Bytes { assert!(self.is_leaf()); self.bytes_lo() } - pub fn leaf_data(&self) -> Bytes { + pub fn leaf_data(&self) -> &Bytes { assert!(self.is_leaf()); self.bytes_hi() } - pub fn left_child_key(&self) -> Bytes { + pub fn left_child_key(&self) -> &Bytes { assert!(self.is_node()); self.bytes_lo() } - pub fn right_child_key(&self) -> Bytes { + pub fn right_child_key(&self) -> &Bytes { assert!(self.is_node()); self.bytes_hi() } @@ -226,29 +223,29 @@ impl Node { &Self::Placeholder == self } - pub fn hash(&self) -> Bytes { + pub fn hash(&self) -> &Bytes { match self { - Node::Node { hash, .. } => *hash, + Node::Node { hash, .. } => hash, Node::Placeholder => zero_sum(), } } } -impl AsRef> for Node { - fn as_ref(&self) -> &Node { +impl AsRef> for Node { + fn as_ref(&self) -> &Node { self } } -impl NodeTrait for Node { - type Key = Bytes; +impl NodeTrait for Node { + type Key = Bytes; fn height(&self) -> u32 { Node::height(self) } fn leaf_key(&self) -> Self::Key { - Node::leaf_key(self) + *Node::leaf_key(self) } fn is_leaf(&self) -> bool { @@ -260,7 +257,7 @@ impl NodeTrait for Node { } } -impl fmt::Debug for Node { +impl fmt::Debug for Node { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { if self.is_node() { f.debug_struct("Node (Internal)") @@ -280,14 +277,14 @@ impl fmt::Debug for Node { } } -pub(crate) struct StorageNode<'storage, const KeySize: usize, TableType, StorageType> { +pub(crate) struct StorageNode<'storage, const KEY_SIZE: usize, TableType, StorageType> { storage: &'storage StorageType, - node: Node, + node: Node, phantom_table: PhantomData, } -impl Clone - for StorageNode<'_, KeySize, TableType, StorageType> +impl Clone + for StorageNode<'_, KEY_SIZE, TableType, StorageType> { fn clone(&self) -> Self { Self { @@ -298,10 +295,10 @@ impl Clone } } -impl<'s, const KeySize: usize, TableType, StorageType> - StorageNode<'s, KeySize, TableType, StorageType> +impl<'s, const KEY_SIZE: usize, TableType, StorageType> + StorageNode<'s, KEY_SIZE, TableType, StorageType> { - pub fn new(storage: &'s StorageType, node: Node) -> Self { + pub fn new(storage: &'s StorageType, node: Node) -> Self { Self { node, storage, @@ -310,29 +307,29 @@ impl<'s, const KeySize: usize, TableType, StorageType> } } -impl - StorageNode<'_, KeySize, TableType, StorageType> +impl + StorageNode<'_, KEY_SIZE, TableType, StorageType> { - pub fn hash(&self) -> Bytes { + pub fn hash(&self) -> &Bytes { self.node.hash() } - pub fn into_node(self) -> Node { + pub fn into_node(self) -> Node { self.node } } -impl NodeTrait - for StorageNode<'_, KeySize, TableType, StorageType> +impl NodeTrait + for StorageNode<'_, KEY_SIZE, TableType, StorageType> { - type Key = Bytes; + type Key = Bytes; fn height(&self) -> u32 { self.node.height() } fn leaf_key(&self) -> Self::Key { - self.node.leaf_key() + *self.node.leaf_key() } fn is_leaf(&self) -> bool { @@ -352,14 +349,14 @@ pub enum StorageNodeError { DeserializeError(DeserializeError), } -impl ParentNodeTrait - for StorageNode<'_, KeySize, TableType, StorageType> +impl ParentNodeTrait + for StorageNode<'_, KEY_SIZE, TableType, StorageType> where StorageType: StorageInspect, TableType: Mappable< - Key = Bytes, - Value = Primitive, - OwnedValue = Primitive, + Key = Bytes, + Value = Primitive, + OwnedValue = Primitive, >, { type Error = StorageNodeError; @@ -376,7 +373,7 @@ where .storage .get(&key) .map_err(StorageNodeError::StorageError)? - .ok_or(ChildError::ChildNotFound(key))?; + .ok_or(ChildError::ChildNotFound(*key))?; Ok(primitive .into_owned() .try_into() @@ -396,7 +393,7 @@ where .storage .get(&key) .map_err(StorageNodeError::StorageError)? - .ok_or(ChildError::ChildNotFound(key))?; + .ok_or(ChildError::ChildNotFound(*key))?; Ok(primitive .into_owned() .try_into() @@ -405,14 +402,14 @@ where } } -impl fmt::Debug - for StorageNode<'_, KeySize, TableType, StorageType> +impl fmt::Debug + for StorageNode<'_, KEY_SIZE, TableType, StorageType> where StorageType: StorageInspect, TableType: Mappable< - Key = Bytes, - Value = Primitive, - OwnedValue = Primitive, + Key = Bytes, + Value = Primitive, + OwnedValue = Primitive, >, { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { @@ -751,7 +748,7 @@ mod test_storage_node { .left_child() .expect_err("Expected left_child() to return Error; got Ok"); - let key = storage_node.into_node().left_child_key(); + let key = *storage_node.into_node().left_child_key(); assert!(matches!( err, ChildError::ChildNotFound(k) if k == key @@ -771,7 +768,7 @@ mod test_storage_node { .right_child() .expect_err("Expected right_child() to return Error; got Ok"); - let key = storage_node.into_node().right_child_key(); + let key = *storage_node.into_node().right_child_key(); assert!(matches!( err, ChildError::ChildNotFound(k) if k == key diff --git a/fuel-merkle/src/sparse/primitive_generic.rs b/fuel-merkle/src/sparse/generic/primitive_generic.rs similarity index 64% rename from fuel-merkle/src/sparse/primitive_generic.rs rename to fuel-merkle/src/sparse/generic/primitive_generic.rs index 8b564a2202..d84abba0c4 100644 --- a/fuel-merkle/src/sparse/primitive_generic.rs +++ b/fuel-merkle/src/sparse/generic/primitive_generic.rs @@ -5,7 +5,7 @@ use crate::{ Prefix, PrefixError, }, - sparse::node_generic::Node, + sparse::generic::node_generic::Node, }; /// **Leaf buffer:** @@ -25,16 +25,16 @@ use crate::{ /// | `04 - 05` | Prefix (1 byte, `0x01`) | /// | `05 - 37` | Left child key (32 bytes) | /// | `37 - 69` | Right child key (32 bytes) | -pub type Primitive = (u32, u8, Bytes, Bytes); +pub type Primitive = (u32, u8, Bytes, Bytes); -trait PrimitiveView { +trait PrimitiveView { fn height(&self) -> u32; fn prefix(&self) -> Result; - fn bytes_lo(&self) -> &Bytes; - fn bytes_hi(&self) -> &Bytes; + fn bytes_lo(&self) -> &Bytes; + fn bytes_hi(&self) -> &Bytes; } -impl PrimitiveView for Primitive { +impl PrimitiveView for Primitive { fn height(&self) -> u32 { self.0 } @@ -43,30 +43,30 @@ impl PrimitiveView for Primitive { Prefix::try_from(self.1) } - fn bytes_lo(&self) -> &Bytes { + fn bytes_lo(&self) -> &Bytes { &self.2 } - fn bytes_hi(&self) -> &Bytes { + fn bytes_hi(&self) -> &Bytes { &self.3 } } -impl From<&Node> for Primitive { - fn from(node: &Node) -> Self { +impl From<&Node> for Primitive { + fn from(node: &Node) -> Self { ( node.height(), node.prefix() as u8, - node.bytes_lo(), - node.bytes_hi(), + *node.bytes_lo(), + *node.bytes_hi(), ) } } -impl TryFrom> for Node { +impl TryFrom> for Node { type Error = DeserializeError; - fn try_from(primitive: Primitive) -> Result { + fn try_from(primitive: Primitive) -> Result { let height = primitive.height(); let prefix = primitive.prefix()?; let bytes_lo = *primitive.bytes_lo(); diff --git a/fuel-merkle/src/sparse/merkle_tree.rs b/fuel-merkle/src/sparse/merkle_tree.rs index b96e40c36d..e94b4b65a7 100644 --- a/fuel-merkle/src/sparse/merkle_tree.rs +++ b/fuel-merkle/src/sparse/merkle_tree.rs @@ -6,7 +6,6 @@ use crate::{ Bytes32, }, sparse::{ - empty_sum, primitive::Primitive, Node, StorageNode, @@ -19,9 +18,12 @@ use crate::{ }, }; -use crate::sparse::branch::{ - merge_branches, - Branch, +use crate::sparse::{ + branch::{ + merge_branches, + Branch, + }, + zero_sum, }; use alloc::vec::Vec; use core::{ @@ -115,7 +117,7 @@ pub struct MerkleTree { impl MerkleTree { pub const fn empty_root() -> &'static Bytes32 { - empty_sum() + zero_sum() } pub fn root(&self) -> Bytes32 { @@ -568,8 +570,10 @@ mod test { StorageMap, }, sparse::{ - empty_sum, - hash::sum, + hash::{ + sum, + zero_sum, + }, MerkleTree, MerkleTreeError, MerkleTreeKey, @@ -1128,10 +1132,10 @@ mod test { #[test] fn test_load_returns_an_empty_tree_for_empty_sum_root() { let mut storage = StorageMap::::new(); - let tree = MerkleTree::load(&mut storage, empty_sum()).unwrap(); + let tree = MerkleTree::load(&mut storage, zero_sum()).unwrap(); let root = tree.root(); - assert_eq!(root, *empty_sum()); + assert_eq!(root, *zero_sum()); } #[test] From 2f854e5958ec0bda712e04a82c9fc2eff588d4e2 Mon Sep 17 00:00:00 2001 From: Brandon Vrooman Date: Wed, 17 Jan 2024 16:08:49 -0500 Subject: [PATCH 03/15] Refactor --- fuel-merkle/src/sparse/generic.rs | 12 +- fuel-merkle/src/sparse/generic/branch.rs | 94 +++ .../generic/{hash_generic.rs => hash.rs} | 4 +- ...{merkle_tree_generic.rs => merkle_tree.rs} | 648 +++++++++--------- .../generic/{node_generic.rs => node.rs} | 4 +- .../{primitive_generic.rs => primitive.rs} | 2 +- 6 files changed, 435 insertions(+), 329 deletions(-) create mode 100644 fuel-merkle/src/sparse/generic/branch.rs rename fuel-merkle/src/sparse/generic/{hash_generic.rs => hash.rs} (88%) rename fuel-merkle/src/sparse/generic/{merkle_tree_generic.rs => merkle_tree.rs} (72%) rename fuel-merkle/src/sparse/generic/{node_generic.rs => node.rs} (99%) rename fuel-merkle/src/sparse/generic/{primitive_generic.rs => primitive.rs} (98%) diff --git a/fuel-merkle/src/sparse/generic.rs b/fuel-merkle/src/sparse/generic.rs index 6f35723fd6..2273c9a812 100644 --- a/fuel-merkle/src/sparse/generic.rs +++ b/fuel-merkle/src/sparse/generic.rs @@ -1,4 +1,8 @@ -pub mod hash_generic; -pub mod merkle_tree_generic; -pub mod node_generic; -pub mod primitive_generic; +pub mod branch; +pub mod hash; +pub mod merkle_tree; +pub mod node; +pub mod primitive; + +pub(crate) use node::Node; +pub(crate) use primitive::Primitive; diff --git a/fuel-merkle/src/sparse/generic/branch.rs b/fuel-merkle/src/sparse/generic/branch.rs new file mode 100644 index 0000000000..e1007860ec --- /dev/null +++ b/fuel-merkle/src/sparse/generic/branch.rs @@ -0,0 +1,94 @@ +use crate::{ + common::{ + path::ComparablePath, + Bytes, + }, + sparse::generic::{ + Node, + Primitive, + }, +}; +use fuel_storage::{ + Mappable, + StorageMutate, +}; + +use core::iter; + +pub(crate) struct Branch { + pub bits: Bytes, + pub node: Node, +} + +impl From> for Branch { + fn from(leaf: Node) -> Self { + Self { + bits: *leaf.leaf_key(), + node: leaf, + } + } +} + +pub(crate) fn merge_branches( + storage: &mut Storage, + mut left_branch: Branch, + mut right_branch: Branch, +) -> Result, Storage::Error> +where + Storage: StorageMutate, + Table: Mappable< + Key = Bytes, + Value = Primitive, + OwnedValue = Primitive, + >, +{ + let branch = if left_branch.node.is_leaf() && right_branch.node.is_leaf() { + let parent_depth = left_branch.node.common_path_length(&right_branch.node); + let parent_height = Node::::max_height() - parent_depth; + let node = + Node::create_node(&left_branch.node, &right_branch.node, parent_height); + Branch { + bits: left_branch.bits, + node, + } + } else { + let ancestor_depth = left_branch.bits.common_path_length(&right_branch.bits); + let ancestor_height = Node::::max_height() - ancestor_depth; + if right_branch.node.is_node() { + let mut current_node = right_branch.node; + let path = right_branch.bits; + let parent_height = current_node.height() + 1; + let stale_depth = ancestor_height - parent_height; + let placeholders = + iter::repeat(Node::create_placeholder()).take(stale_depth as usize); + for placeholder in placeholders { + current_node = + Node::create_node_on_path(&path, ¤t_node, &placeholder); + storage.insert(current_node.hash(), ¤t_node.as_ref().into())?; + } + right_branch.node = current_node; + } + if left_branch.node.is_node() { + let mut current_node = left_branch.node; + let path = left_branch.bits; + let parent_height = current_node.height() + 1; + let stale_depth = ancestor_height - parent_height; + let placeholders = + iter::repeat(Node::create_placeholder()).take(stale_depth as usize); + for placeholder in placeholders { + current_node = + Node::create_node_on_path(&path, ¤t_node, &placeholder); + storage.insert(current_node.hash(), ¤t_node.as_ref().into())?; + } + left_branch.node = current_node; + } + let node = + Node::create_node(&left_branch.node, &right_branch.node, ancestor_height); + Branch { + bits: left_branch.bits, + node, + } + }; + storage.insert(branch.node.hash(), &branch.node.as_ref().into())?; + Ok(branch) +} diff --git a/fuel-merkle/src/sparse/generic/hash_generic.rs b/fuel-merkle/src/sparse/generic/hash.rs similarity index 88% rename from fuel-merkle/src/sparse/generic/hash_generic.rs rename to fuel-merkle/src/sparse/generic/hash.rs index 1c10dd2c74..36b0fdec51 100644 --- a/fuel-merkle/src/sparse/generic/hash_generic.rs +++ b/fuel-merkle/src/sparse/generic/hash.rs @@ -22,8 +22,8 @@ where { let mut hash = Hash::new(); hash.update(data); - let h = hash.finalize(); - let mut vec = h.as_slice().to_vec(); + let hash = hash.finalize(); + let mut vec = hash.as_slice().to_vec(); vec.truncate(N); vec.try_into().unwrap() } diff --git a/fuel-merkle/src/sparse/generic/merkle_tree_generic.rs b/fuel-merkle/src/sparse/generic/merkle_tree.rs similarity index 72% rename from fuel-merkle/src/sparse/generic/merkle_tree_generic.rs rename to fuel-merkle/src/sparse/generic/merkle_tree.rs index 759fe9aac4..762f94bafe 100644 --- a/fuel-merkle/src/sparse/generic/merkle_tree_generic.rs +++ b/fuel-merkle/src/sparse/generic/merkle_tree.rs @@ -5,12 +5,12 @@ use crate::{ AsPathIterator, }, sparse::generic::{ - node_generic::{ + node::{ Node, StorageNode, StorageNodeError, }, - primitive_generic::Primitive, + primitive::Primitive, }, storage::{ Mappable, @@ -24,7 +24,13 @@ use crate::{ Bytes, Bytes32, }, - sparse::generic::hash_generic::zero_sum, + sparse::generic::{ + branch::{ + merge_branches, + Branch, + }, + hash::zero_sum, + }, }; use alloc::vec::Vec; use core::{ @@ -236,150 +242,150 @@ where >, StorageType: StorageMutate, { - // /// Build a sparse Merkle tree from a set of key-value pairs. This is - // /// equivalent to creating an empty sparse Merkle tree and sequentially - // /// calling [update](Self::update) for each key-value pair. This constructor - // /// is more performant than calling individual sequential updates and is the - // /// preferred approach when the key-values are known upfront. Leaves can be - // /// appended to the returned tree using `update` to further accumulate leaf - // /// data. - // pub fn from_set( - // mut storage: StorageType, - // set: I, - // ) -> Result - // where - // I: Iterator, - // B: Into, - // D: AsRef<[u8]>, - // { - // let sorted = set - // .into_iter() - // .map(|(k, v)| (k.into(), v)) - // .collect::>(); - // let mut branches = sorted - // .iter() - // .filter(|(_, value)| !value.as_ref().is_empty()) - // .map(|(key, data)| Node::create_leaf(key, data)) - // .map(Into::::into) - // .collect::>(); - // - // for branch in branches.iter() { - // let leaf = &branch.node; - // storage.insert(leaf.hash(), &leaf.as_ref().into())?; - // } - // - // if branches.is_empty() { - // let tree = Self::new(storage); - // return Ok(tree) - // } - // - // if branches.len() == 1 { - // let leaf = branches.pop().expect("Expected at least 1 leaf").node; - // let mut tree = Self::new(storage); - // tree.set_root_node(leaf); - // return Ok(tree) - // } - // - // let mut nodes = Vec::::with_capacity(branches.len()); - // let mut proximities = Vec::::with_capacity(branches.len()); - // - // // Building the tree starts by merging all leaf nodes where possible. - // // Given a set of leaf nodes sorted left to right (i.e., keys are sorted - // // in lexical order), we scan the leaf set right to left, and analyze a - // // moving window of three leaves: a center (or "current") leaf, its left - // // neighbor, and its right neighbor. - // // - // // When merging leaf nodes, we analyze this three-node window to - // // determine if the condition for merging is met: When the current node - // // is closer to its right neighbor than it is to its left neighbor, we - // // merge the current node with its right neighbor. The merged node then - // // becomes the center of the window, and we must check the merge - // // condition again. We calculate proximity using the common path length - // // between two nodes, which is also the depth of their shared ancestor - // // in the tree. - // // - // // This three-node window is centered around a current node, and moves - // // leftward: At the next iteration, the current node is now the right - // // node, the left node is now the current node, and so on. When we have - // // checked all windows, we know that we have merged all leaf nodes where - // // possible. - // while let Some(left) = branches.pop() { - // if let Some(current) = nodes.last() { - // let left_proximity = current.node.common_path_length(&left.node); - // while { - // // The current node's proximity to its right neighbor was - // // stored previously. We now compare the distances between - // // the current node's left and right neighbors. If, and only - // // if, the current node is closer to its right neighbor, we - // // merge these nodes to form an ancestor node. We then - // // reform the window, using the ancestor node in the center, - // // to check if we must merge again. - // // - // // If the current node is closer to its left, we do not have - // // enough information to merge nodes, and we must continue - // // scanning the leaf set leftwards to find a configuration - // // that satisfies the merge condition. - // if let Some(right_proximity) = proximities.last() { - // *right_proximity > left_proximity - // } else { - // false - // } - // } { - // // The current node is closer to its right neighbor than its - // // left neighbor. We now merge the current node with its - // // right neighbor. - // let current = - // nodes.pop().expect("Expected current node to be present"); - // let right = nodes.pop().expect("Expected right node to be - // present"); let merged = merge_branches(&mut storage, - // current, right)?; nodes.push(merged); - // - // // Now that the current node and its right neighbour are - // // merged, the distance between them has collapsed and their - // // proximity is no longer needed. - // proximities.pop(); - // } - // proximities.push(left_proximity); - // } - // nodes.push(left); - // } - // - // // Where possible, all the leaves have been merged. The remaining leaves - // // and nodes are stacked in order of height descending. This means that - // // they are also ordered with the leftmost leaves at the top and the - // // rightmost nodes at the bottom. We can iterate through the stack and - // // merge them left to right. - // let top = { - // let mut node = nodes - // .pop() - // .expect("Nodes stack must have at least 1 element"); - // while let Some(next) = nodes.pop() { - // node = merge_branches(&mut storage, node, next)?; - // } - // node - // }; - // - // // Lastly, all leaves and nodes are merged into one. The resulting node - // // may still be an ancestor node below the root. To calculate the final - // // root, we merge placeholder nodes along the path until the resulting - // // node has the final height and forms the root node. - // let mut node = top.node; - // let path = top.bits; - // let height = node.height(); - // let depth = Node::max_height() - height; - // let placeholders = iter::repeat(Node::create_placeholder()).take(depth as - // usize); for placeholder in placeholders { - // node = Node::create_node_on_path(&path, &node, &placeholder); - // storage.insert(node.hash(), &node.as_ref().into())?; - // } - // - // let tree = Self { - // root_node: node, - // storage, - // phantom_table: Default::default(), - // }; - // Ok(tree) - // } + /// Build a sparse Merkle tree from a set of key-value pairs. This is + /// equivalent to creating an empty sparse Merkle tree and sequentially + /// calling [update](Self::update) for each key-value pair. This constructor + /// is more performant than calling individual sequential updates and is the + /// preferred approach when the key-values are known upfront. Leaves can be + /// appended to the returned tree using `update` to further accumulate leaf + /// data. + pub fn from_set( + mut storage: StorageType, + set: I, + ) -> Result + where + I: Iterator, + B: Into>, + D: AsRef<[u8]>, + { + let sorted = set + .into_iter() + .map(|(k, v)| (k.into(), v)) + .collect::, D>>(); + let mut branches = sorted + .iter() + .filter(|(_, value)| !value.as_ref().is_empty()) + .map(|(key, data)| Node::create_leaf(key, data)) + .map(Into::>::into) + .collect::>(); + + for branch in branches.iter() { + let leaf = &branch.node; + storage.insert(leaf.hash(), &leaf.as_ref().into())?; + } + + if branches.is_empty() { + let tree = Self::new(storage); + return Ok(tree) + } + + if branches.len() == 1 { + let leaf = branches.pop().expect("Expected at least 1 leaf").node; + let mut tree = Self::new(storage); + tree.set_root_node(leaf); + return Ok(tree) + } + + let mut nodes = Vec::>::with_capacity(branches.len()); + let mut proximities = Vec::::with_capacity(branches.len()); + + // Building the tree starts by merging all leaf nodes where possible. + // Given a set of leaf nodes sorted left to right (i.e., keys are sorted + // in lexical order), we scan the leaf set right to left, and analyze a + // moving window of three leaves: a center (or "current") leaf, its left + // neighbor, and its right neighbor. + // + // When merging leaf nodes, we analyze this three-node window to + // determine if the condition for merging is met: When the current node + // is closer to its right neighbor than it is to its left neighbor, we + // merge the current node with its right neighbor. The merged node then + // becomes the center of the window, and we must check the merge + // condition again. We calculate proximity using the common path length + // between two nodes, which is also the depth of their shared ancestor + // in the tree. + // + // This three-node window is centered around a current node, and moves + // leftward: At the next iteration, the current node is now the right + // node, the left node is now the current node, and so on. When we have + // checked all windows, we know that we have merged all leaf nodes where + // possible. + while let Some(left) = branches.pop() { + if let Some(current) = nodes.last() { + let left_proximity = current.node.common_path_length(&left.node); + while { + // The current node's proximity to its right neighbor was + // stored previously. We now compare the distances between + // the current node's left and right neighbors. If, and only + // if, the current node is closer to its right neighbor, we + // merge these nodes to form an ancestor node. We then + // reform the window, using the ancestor node in the center, + // to check if we must merge again. + // + // If the current node is closer to its left, we do not have + // enough information to merge nodes, and we must continue + // scanning the leaf set leftwards to find a configuration + // that satisfies the merge condition. + if let Some(right_proximity) = proximities.last() { + *right_proximity > left_proximity + } else { + false + } + } { + // The current node is closer to its right neighbor than its + // left neighbor. We now merge the current node with its + // right neighbor. + let current = + nodes.pop().expect("Expected current node to be present"); + let right = nodes.pop().expect("Expected right node to be present"); + let merged = merge_branches(&mut storage, current, right)?; + nodes.push(merged); + + // Now that the current node and its right neighbour are + // merged, the distance between them has collapsed and their + // proximity is no longer needed. + proximities.pop(); + } + proximities.push(left_proximity); + } + nodes.push(left); + } + + // Where possible, all the leaves have been merged. The remaining leaves + // and nodes are stacked in order of height descending. This means that + // they are also ordered with the leftmost leaves at the top and the + // rightmost nodes at the bottom. We can iterate through the stack and + // merge them left to right. + let top = { + let mut node = nodes + .pop() + .expect("Nodes stack must have at least 1 element"); + while let Some(next) = nodes.pop() { + node = merge_branches(&mut storage, node, next)?; + } + node + }; + + // Lastly, all leaves and nodes are merged into one. The resulting node + // may still be an ancestor node below the root. To calculate the final + // root, we merge placeholder nodes along the path until the resulting + // node has the final height and forms the root node. + let mut node = top.node; + let path = top.bits; + let height = node.height(); + let depth = Node::::max_height() - height; + let placeholders = iter::repeat(Node::create_placeholder()).take(depth as usize); + for placeholder in placeholders { + node = Node::create_node_on_path(&path, &node, &placeholder); + storage.insert(node.hash(), &node.as_ref().into())?; + } + + let tree = Self { + root_node: node, + storage, + phantom_table: Default::default(), + }; + Ok(tree) + } pub fn update( &mut self, @@ -600,7 +606,7 @@ mod test { Bytes, StorageMap, }, - sparse::generic::hash_generic::{ + sparse::generic::hash::{ sum, zero_sum, }, @@ -608,22 +614,24 @@ mod test { use fuel_storage::Mappable; use hex; - // fn random_bytes(n: usize, rng: &mut R) -> Bytes32 - // where - // R: rand::Rng + ?Sized, - // { - // let mut bytes = vec![0u8; n]; - // rng.fill(&mut bytes); - // bytes.try_into().unwrap() - // } + fn random_bytes(rng: &mut R) -> Bytes + where + R: rand::Rng + ?Sized, + { + let mut bytes = [0u8; SZ]; + for byte in bytes.as_mut() { + *byte = rng.gen(); + } + bytes + } #[derive(Debug)] struct TestTable; impl Mappable for TestTable { type Key = Self::OwnedKey; - type OwnedKey = Bytes<4>; - type OwnedValue = Primitive<4>; + type OwnedKey = Bytes<32>; + type OwnedValue = Primitive<32>; type Value = Self::OwnedValue; } @@ -1196,7 +1204,7 @@ mod test { // Overwrite the root key-value with an invalid primitive to create a // DeserializeError. - let primitive = (0xff, 0xff, [0xff; 4], [0xff; 4]); + let primitive = (0xff, 0xff, [0xff; 32], [0xff; 32]); storage.insert(&root, &primitive).unwrap(); let err = MerkleTree::load(&mut storage, &root) @@ -1204,165 +1212,165 @@ mod test { assert!(matches!(err, MerkleTreeError::DeserializeError(_))); } - // #[test] - // fn test_from_set_yields_expected_root() { - // let rng = &mut rand::thread_rng(); - // let gen = || { - // Some(( - // MerkleTreeKey::new_without_hash(random_bytes32(rng)), - // random_bytes32(rng), - // )) - // }; - // let data = std::iter::from_fn(gen).take(1_000).collect::>(); - // - // let expected_root = { - // let mut storage = StorageMap::::new(); - // let mut tree = MerkleTree::new(&mut storage); - // let input = data.clone(); - // for (key, value) in input.into_iter() { - // tree.update(key, &value).unwrap(); - // } - // tree.root() - // }; - // - // let root = { - // let mut storage = StorageMap::::new(); - // let tree = MerkleTree::from_set(&mut storage, data.into_iter()).unwrap(); - // tree.root() - // }; - // - // assert_eq!(root, expected_root); - // } - // - // #[test] - // fn test_from_empty_set_yields_expected_root() { - // let rng = &mut rand::thread_rng(); - // let gen = || { - // Some(( - // MerkleTreeKey::new_without_hash(random_bytes32(rng)), - // random_bytes32(rng), - // )) - // }; - // let data = std::iter::from_fn(gen).take(0).collect::>(); - // - // let expected_root = { - // let mut storage = StorageMap::::new(); - // let mut tree = MerkleTree::new(&mut storage); - // let input = data.clone(); - // for (key, value) in input.into_iter() { - // tree.update(key, &value).unwrap(); - // } - // tree.root() - // }; - // - // let root = { - // let mut storage = StorageMap::::new(); - // let tree = MerkleTree::from_set(&mut storage, data.into_iter()).unwrap(); - // tree.root() - // }; - // - // assert_eq!(root, expected_root); - // } - // - // #[test] - // fn test_from_unit_set_yields_expected_root() { - // let rng = &mut rand::thread_rng(); - // let gen = || { - // Some(( - // MerkleTreeKey::new_without_hash(random_bytes32(rng)), - // random_bytes32(rng), - // )) - // }; - // let data = std::iter::from_fn(gen).take(1).collect::>(); - // - // let expected_root = { - // let mut storage = StorageMap::::new(); - // let mut tree = MerkleTree::new(&mut storage); - // let input = data.clone(); - // for (key, value) in input.into_iter() { - // tree.update(key, &value).unwrap(); - // } - // tree.root() - // }; - // - // let root = { - // let mut storage = StorageMap::::new(); - // let tree = MerkleTree::from_set(&mut storage, data.into_iter()).unwrap(); - // tree.root() - // }; - // - // assert_eq!(root, expected_root); - // } - // - // #[test] - // fn test_from_set_with_duplicate_keys_yields_expected_root() { - // let rng = &mut rand::thread_rng(); - // let keys = [ - // key(b"\x00\x00\x00\x00"), - // key(b"\x00\x00\x00\x01"), - // key(b"\x00\x00\x00\x02"), - // ]; - // let data = [ - // (keys[0], random_bytes32(rng)), - // (keys[1], random_bytes32(rng)), - // (keys[2], random_bytes32(rng)), - // (keys[0], random_bytes32(rng)), - // (keys[1], random_bytes32(rng)), - // (keys[2], random_bytes32(rng)), - // ]; - // - // let expected_root = { - // let mut storage = StorageMap::::new(); - // let mut tree = MerkleTree::new(&mut storage); - // let input = data; - // for (key, value) in input.into_iter() { - // tree.update(key, &value).unwrap(); - // } - // tree.root() - // }; - // - // let root = { - // let mut storage = StorageMap::::new(); - // let tree = MerkleTree::from_set(&mut storage, data.into_iter()).unwrap(); - // tree.root() - // }; - // - // assert_eq!(root, expected_root); - // } - // - // #[test] - // fn test_from_set_with_empty_data_yields_expected_root() { - // let rng = &mut rand::thread_rng(); - // let keys = [ - // key(b"\x00\x00\x00\x00"), - // key(b"\x00\x00\x00\x01"), - // key(b"\x00\x00\x00\x02"), - // ]; - // let data = [ - // (keys[0], random_bytes32(rng).to_vec()), - // (keys[1], random_bytes32(rng).to_vec()), - // (keys[2], random_bytes32(rng).to_vec()), - // (keys[0], b"".to_vec()), - // (keys[1], b"".to_vec()), - // (keys[2], b"".to_vec()), - // ]; - // - // let expected_root = { - // let mut storage = StorageMap::::new(); - // let mut tree = MerkleTree::new(&mut storage); - // let input = data.clone(); - // for (key, value) in input.into_iter() { - // tree.update(key, &value).unwrap(); - // } - // tree.root() - // }; - // - // let root = { - // let mut storage = StorageMap::::new(); - // let tree = MerkleTree::from_set(&mut storage, data.into_iter()).unwrap(); - // tree.root() - // }; - // - // assert_eq!(root, expected_root); - // } + #[test] + fn test_from_set_yields_expected_root() { + let rng = &mut rand::thread_rng(); + let gen = || { + Some(( + MerkleTreeKey::new_without_hash(random_bytes::<_, 32>(rng)), + random_bytes::<_, 32>(rng), + )) + }; + let data = std::iter::from_fn(gen).take(1_000).collect::>(); + + let expected_root = { + let mut storage = StorageMap::::new(); + let mut tree = MerkleTree::new(&mut storage); + let input = data.clone(); + for (key, value) in input.into_iter() { + tree.update(key, &value).unwrap(); + } + tree.root() + }; + + let root = { + let mut storage = StorageMap::::new(); + let tree = MerkleTree::from_set(&mut storage, data.into_iter()).unwrap(); + tree.root() + }; + + assert_eq!(root, expected_root); + } + + #[test] + fn test_from_empty_set_yields_expected_root() { + let rng = &mut rand::thread_rng(); + let gen = || { + Some(( + MerkleTreeKey::new_without_hash(random_bytes::<_, 32>(rng)), + random_bytes::<_, 32>(rng), + )) + }; + let data = std::iter::from_fn(gen).take(0).collect::>(); + + let expected_root = { + let mut storage = StorageMap::::new(); + let mut tree = MerkleTree::new(&mut storage); + let input = data.clone(); + for (key, value) in input.into_iter() { + tree.update(key, &value).unwrap(); + } + tree.root() + }; + + let root = { + let mut storage = StorageMap::::new(); + let tree = MerkleTree::from_set(&mut storage, data.into_iter()).unwrap(); + tree.root() + }; + + assert_eq!(root, expected_root); + } + + #[test] + fn test_from_unit_set_yields_expected_root() { + let rng = &mut rand::thread_rng(); + let gen = || { + Some(( + MerkleTreeKey::new_without_hash(random_bytes::<_, 32>(rng)), + random_bytes::<_, 32>(rng), + )) + }; + let data = std::iter::from_fn(gen).take(1).collect::>(); + + let expected_root = { + let mut storage = StorageMap::::new(); + let mut tree = MerkleTree::new(&mut storage); + let input = data.clone(); + for (key, value) in input.into_iter() { + tree.update(key, &value).unwrap(); + } + tree.root() + }; + + let root = { + let mut storage = StorageMap::::new(); + let tree = MerkleTree::from_set(&mut storage, data.into_iter()).unwrap(); + tree.root() + }; + + assert_eq!(root, expected_root); + } + + #[test] + fn test_from_set_with_duplicate_keys_yields_expected_root() { + let rng = &mut rand::thread_rng(); + let keys = [ + key(b"\x00\x00\x00\x00"), + key(b"\x00\x00\x00\x01"), + key(b"\x00\x00\x00\x02"), + ]; + let data = [ + (keys[0], random_bytes::<_, 32>(rng)), + (keys[1], random_bytes::<_, 32>(rng)), + (keys[2], random_bytes::<_, 32>(rng)), + (keys[0], random_bytes::<_, 32>(rng)), + (keys[1], random_bytes::<_, 32>(rng)), + (keys[2], random_bytes::<_, 32>(rng)), + ]; + + let expected_root = { + let mut storage = StorageMap::::new(); + let mut tree = MerkleTree::new(&mut storage); + let input = data; + for (key, value) in input.into_iter() { + tree.update(key, &value).unwrap(); + } + tree.root() + }; + + let root = { + let mut storage = StorageMap::::new(); + let tree = MerkleTree::from_set(&mut storage, data.into_iter()).unwrap(); + tree.root() + }; + + assert_eq!(root, expected_root); + } + + #[test] + fn test_from_set_with_empty_data_yields_expected_root() { + let rng = &mut rand::thread_rng(); + let keys = [ + key(b"\x00\x00\x00\x00"), + key(b"\x00\x00\x00\x01"), + key(b"\x00\x00\x00\x02"), + ]; + let data = [ + (keys[0], random_bytes::<_, 32>(rng).to_vec()), + (keys[1], random_bytes::<_, 32>(rng).to_vec()), + (keys[2], random_bytes::<_, 32>(rng).to_vec()), + (keys[0], b"".to_vec()), + (keys[1], b"".to_vec()), + (keys[2], b"".to_vec()), + ]; + + let expected_root = { + let mut storage = StorageMap::::new(); + let mut tree = MerkleTree::new(&mut storage); + let input = data.clone(); + for (key, value) in input.into_iter() { + tree.update(key, &value).unwrap(); + } + tree.root() + }; + + let root = { + let mut storage = StorageMap::::new(); + let tree = MerkleTree::from_set(&mut storage, data.into_iter()).unwrap(); + tree.root() + }; + + assert_eq!(root, expected_root); + } } diff --git a/fuel-merkle/src/sparse/generic/node_generic.rs b/fuel-merkle/src/sparse/generic/node.rs similarity index 99% rename from fuel-merkle/src/sparse/generic/node_generic.rs rename to fuel-merkle/src/sparse/generic/node.rs index 68f6c5a3fd..32f55ef11e 100644 --- a/fuel-merkle/src/sparse/generic/node_generic.rs +++ b/fuel-merkle/src/sparse/generic/node.rs @@ -15,11 +15,11 @@ use crate::{ Prefix, }, sparse::generic::{ - hash_generic::{ + hash::{ sum, zero_sum, }, - primitive_generic::Primitive, + primitive::Primitive, }, storage::{ Mappable, diff --git a/fuel-merkle/src/sparse/generic/primitive_generic.rs b/fuel-merkle/src/sparse/generic/primitive.rs similarity index 98% rename from fuel-merkle/src/sparse/generic/primitive_generic.rs rename to fuel-merkle/src/sparse/generic/primitive.rs index d84abba0c4..18c8ffe9d7 100644 --- a/fuel-merkle/src/sparse/generic/primitive_generic.rs +++ b/fuel-merkle/src/sparse/generic/primitive.rs @@ -5,7 +5,7 @@ use crate::{ Prefix, PrefixError, }, - sparse::generic::node_generic::Node, + sparse::generic::node::Node, }; /// **Leaf buffer:** From 564dda7634796eac5e8ab23436911a1d213dc9c5 Mon Sep 17 00:00:00 2001 From: Brandon Vrooman Date: Wed, 17 Jan 2024 17:14:27 -0500 Subject: [PATCH 04/15] Refactor --- fuel-merkle/src/sparse.rs | 29 +- fuel-merkle/src/sparse/branch.rs | 90 -- fuel-merkle/src/sparse/generic.rs | 5 + fuel-merkle/src/sparse/generic/merkle_tree.rs | 3 +- fuel-merkle/src/sparse/generic/node.rs | 22 +- fuel-merkle/src/sparse/hash.rs | 21 - fuel-merkle/src/sparse/in_memory.rs | 4 +- fuel-merkle/src/sparse/merkle_tree.rs | 1345 ----------------- fuel-merkle/src/sparse/node.rs | 795 ---------- fuel-merkle/src/sparse/primitive.rs | 77 - 10 files changed, 31 insertions(+), 2360 deletions(-) delete mode 100644 fuel-merkle/src/sparse/branch.rs delete mode 100644 fuel-merkle/src/sparse/hash.rs delete mode 100644 fuel-merkle/src/sparse/merkle_tree.rs delete mode 100644 fuel-merkle/src/sparse/node.rs delete mode 100644 fuel-merkle/src/sparse/primitive.rs diff --git a/fuel-merkle/src/sparse.rs b/fuel-merkle/src/sparse.rs index 7f8b2b5874..9d47e8e5bd 100644 --- a/fuel-merkle/src/sparse.rs +++ b/fuel-merkle/src/sparse.rs @@ -1,22 +1,15 @@ -mod hash; -mod merkle_tree; -mod node; -mod primitive; - -pub(crate) use hash::zero_sum; -pub(crate) use node::{ - Node, - StorageNode, - StorageNodeError, +pub(crate) use generic::{ + hash::zero_sum, + node::Node, }; -pub(crate) mod branch; -pub use merkle_tree::{ - MerkleTree, - MerkleTreeError, - MerkleTreeKey, -}; -pub use primitive::Primitive; +pub mod generic; pub mod in_memory; -pub mod generic; +// Define default Merkle Tree structures as concrete implementations of generic +// types, using 32 byte key sizes +pub type MerkleTree = + generic::MerkleTree<32, TableType, StorageType>; +pub type MerkleTreeError = generic::MerkleTreeError<32, StorageError>; +pub type MerkleTreeKey = generic::MerkleTreeKey<32>; +pub type Primitive = generic::Primitive<32>; diff --git a/fuel-merkle/src/sparse/branch.rs b/fuel-merkle/src/sparse/branch.rs deleted file mode 100644 index 4b56a67da2..0000000000 --- a/fuel-merkle/src/sparse/branch.rs +++ /dev/null @@ -1,90 +0,0 @@ -use crate::{ - common::{ - path::ComparablePath, - Bytes32, - }, - sparse::{ - Node, - Primitive, - }, -}; -use fuel_storage::{ - Mappable, - StorageMutate, -}; - -use core::iter; - -pub(crate) struct Branch { - pub bits: Bytes32, - pub node: Node, -} - -impl From for Branch { - fn from(leaf: Node) -> Self { - Self { - bits: *leaf.leaf_key(), - node: leaf, - } - } -} - -pub(crate) fn merge_branches( - storage: &mut Storage, - mut left_branch: Branch, - mut right_branch: Branch, -) -> Result -where - Storage: StorageMutate
, - Table: Mappable, -{ - let branch = if left_branch.node.is_leaf() && right_branch.node.is_leaf() { - let parent_depth = left_branch.node.common_path_length(&right_branch.node); - let parent_height = Node::max_height() - parent_depth; - let node = - Node::create_node(&left_branch.node, &right_branch.node, parent_height); - Branch { - bits: left_branch.bits, - node, - } - } else { - let ancestor_depth = left_branch.bits.common_path_length(&right_branch.bits); - let ancestor_height = Node::max_height() - ancestor_depth; - if right_branch.node.is_node() { - let mut current_node = right_branch.node; - let path = right_branch.bits; - let parent_height = current_node.height() + 1; - let stale_depth = ancestor_height - parent_height; - let placeholders = - iter::repeat(Node::create_placeholder()).take(stale_depth as usize); - for placeholder in placeholders { - current_node = - Node::create_node_on_path(&path, ¤t_node, &placeholder); - storage.insert(current_node.hash(), ¤t_node.as_ref().into())?; - } - right_branch.node = current_node; - } - if left_branch.node.is_node() { - let mut current_node = left_branch.node; - let path = left_branch.bits; - let parent_height = current_node.height() + 1; - let stale_depth = ancestor_height - parent_height; - let placeholders = - iter::repeat(Node::create_placeholder()).take(stale_depth as usize); - for placeholder in placeholders { - current_node = - Node::create_node_on_path(&path, ¤t_node, &placeholder); - storage.insert(current_node.hash(), ¤t_node.as_ref().into())?; - } - left_branch.node = current_node; - } - let node = - Node::create_node(&left_branch.node, &right_branch.node, ancestor_height); - Branch { - bits: left_branch.bits, - node, - } - }; - storage.insert(branch.node.hash(), &branch.node.as_ref().into())?; - Ok(branch) -} diff --git a/fuel-merkle/src/sparse/generic.rs b/fuel-merkle/src/sparse/generic.rs index 2273c9a812..aacb4c0a03 100644 --- a/fuel-merkle/src/sparse/generic.rs +++ b/fuel-merkle/src/sparse/generic.rs @@ -4,5 +4,10 @@ pub mod merkle_tree; pub mod node; pub mod primitive; +pub use merkle_tree::{ + MerkleTree, + MerkleTreeError, + MerkleTreeKey, +}; pub(crate) use node::Node; pub(crate) use primitive::Primitive; diff --git a/fuel-merkle/src/sparse/generic/merkle_tree.rs b/fuel-merkle/src/sparse/generic/merkle_tree.rs index 762f94bafe..09745cf580 100644 --- a/fuel-merkle/src/sparse/generic/merkle_tree.rs +++ b/fuel-merkle/src/sparse/generic/merkle_tree.rs @@ -644,7 +644,8 @@ mod test { let mut storage = StorageMap::::new(); let tree = MerkleTree::new(&mut storage); let root = tree.root(); - let expected_root = "00000000"; + let expected_root = + "0000000000000000000000000000000000000000000000000000000000000000"; assert_eq!(hex::encode(root), expected_root); } diff --git a/fuel-merkle/src/sparse/generic/node.rs b/fuel-merkle/src/sparse/generic/node.rs index 32f55ef11e..5c7fb0bab2 100644 --- a/fuel-merkle/src/sparse/generic/node.rs +++ b/fuel-merkle/src/sparse/generic/node.rs @@ -441,7 +441,7 @@ mod test_node { PrefixError, }, sparse::{ - hash::sum, + generic::hash::sum, zero_sum, Node, Primitive, @@ -452,13 +452,13 @@ mod test_node { let mut buffer = [0; 65]; buffer[0..1].clone_from_slice(Prefix::Leaf.as_ref()); buffer[1..33].clone_from_slice(key); - buffer[33..65].clone_from_slice(&sum(data)); + buffer[33..65].clone_from_slice(&sum::<_, 32>(data)); sum(buffer) } #[test] fn test_create_leaf_returns_a_valid_leaf() { - let leaf = Node::create_leaf(&sum(b"LEAF"), [1u8; 32]); + let leaf = Node::<32>::create_leaf(&sum(b"LEAF"), [1u8; 32]); assert_eq!(leaf.is_leaf(), true); assert_eq!(leaf.is_node(), false); assert_eq!(leaf.height(), 0); @@ -488,7 +488,7 @@ mod test_node { #[test] fn test_create_placeholder_returns_a_placeholder_node() { - let node = Node::create_placeholder(); + let node = Node::<32>::create_placeholder(); assert_eq!(node.is_placeholder(), true); assert_eq!(node.hash(), zero_sum()); } @@ -497,7 +497,7 @@ mod test_node { fn test_create_leaf_from_primitive_returns_a_valid_leaf() { let primitive = (0, Prefix::Leaf as u8, [0xff; 32], [0xff; 32]); - let node: Node = primitive.try_into().unwrap(); + let node: Node<32> = primitive.try_into().unwrap(); assert_eq!(node.is_leaf(), true); assert_eq!(node.is_node(), false); assert_eq!(node.height(), 0); @@ -510,7 +510,7 @@ mod test_node { fn test_create_node_from_primitive_returns_a_valid_node() { let primitive = (255, Prefix::Node as u8, [0xff; 32], [0xff; 32]); - let node: Node = primitive.try_into().unwrap(); + let node: Node<32> = primitive.try_into().unwrap(); assert_eq!(node.is_leaf(), false); assert_eq!(node.is_node(), true); assert_eq!(node.height(), 255); @@ -570,9 +570,9 @@ mod test_node { fn test_leaf_hash_returns_expected_hash_value() { let mut expected_buffer = [0u8; 65]; expected_buffer[0..1].clone_from_slice(Prefix::Leaf.as_ref()); - expected_buffer[1..33].clone_from_slice(&sum(b"LEAF")); - expected_buffer[33..65].clone_from_slice(&sum([1u8; 32])); - let expected_value = sum(expected_buffer); + expected_buffer[1..33].clone_from_slice(&sum::<_, 32>(b"LEAF")); + expected_buffer[33..65].clone_from_slice(&sum::<_, 32>([1u8; 32])); + let expected_value = sum::<_, 32>(expected_buffer); let node = Node::create_leaf(&sum(b"LEAF"), [1u8; 32]); let value = *node.hash(); @@ -590,7 +590,7 @@ mod test_node { .clone_from_slice(&leaf_hash(&sum(b"LEFT CHILD"), &[1u8; 32])); expected_buffer[33..65] .clone_from_slice(&leaf_hash(&sum(b"RIGHT CHILD"), &[1u8; 32])); - let expected_value = sum(expected_buffer); + let expected_value = sum::<_, 32>(expected_buffer); let left_child = Node::create_leaf(&sum(b"LEFT CHILD"), [1u8; 32]); let right_child = Node::create_leaf(&sum(b"RIGHT CHILD"), [1u8; 32]); @@ -621,7 +621,7 @@ mod test_storage_node { StorageMap, }, sparse::{ - hash::sum, + generic::hash::sum, Primitive, }, storage::{ diff --git a/fuel-merkle/src/sparse/hash.rs b/fuel-merkle/src/sparse/hash.rs deleted file mode 100644 index 837f6befb2..0000000000 --- a/fuel-merkle/src/sparse/hash.rs +++ /dev/null @@ -1,21 +0,0 @@ -use crate::common::Bytes32; - -use digest::Digest; -use sha2::Sha256; - -pub(crate) type Hash = Sha256; - -pub const fn zero_sum() -> &'static Bytes32 { - const ZERO_SUM: Bytes32 = [0; 32]; - - &ZERO_SUM -} - -pub fn sum(data: I) -> Bytes32 -where - I: AsRef<[u8]>, -{ - let mut hash = Hash::new(); - hash.update(data); - hash.finalize().try_into().unwrap() -} diff --git a/fuel-merkle/src/sparse/in_memory.rs b/fuel-merkle/src/sparse/in_memory.rs index e6202ff7be..d1a8c5cf66 100644 --- a/fuel-merkle/src/sparse/in_memory.rs +++ b/fuel-merkle/src/sparse/in_memory.rs @@ -5,7 +5,7 @@ use crate::{ }, sparse::{ self, - merkle_tree::MerkleTreeKey, + MerkleTreeKey, Primitive, }, storage::{ @@ -188,7 +188,7 @@ impl Default for MerkleTree { #[cfg(test)] mod test { use super::*; - use sparse::hash::sum; + use sparse::generic::hash::sum; fn key(data: &[u8]) -> MerkleTreeKey { MerkleTreeKey::new_without_hash(sum(data)) diff --git a/fuel-merkle/src/sparse/merkle_tree.rs b/fuel-merkle/src/sparse/merkle_tree.rs deleted file mode 100644 index e94b4b65a7..0000000000 --- a/fuel-merkle/src/sparse/merkle_tree.rs +++ /dev/null @@ -1,1345 +0,0 @@ -use crate::{ - common::{ - error::DeserializeError, - node::ChildError, - AsPathIterator, - Bytes32, - }, - sparse::{ - primitive::Primitive, - Node, - StorageNode, - StorageNodeError, - }, - storage::{ - Mappable, - StorageInspect, - StorageMutate, - }, -}; - -use crate::sparse::{ - branch::{ - merge_branches, - Branch, - }, - zero_sum, -}; -use alloc::vec::Vec; -use core::{ - cmp, - iter, - marker::PhantomData, -}; - -#[derive(Debug, Clone, derive_more::Display)] -pub enum MerkleTreeError { - #[display( - fmt = "cannot load node with key {}; the key is not found in storage", - "hex::encode(_0)" - )] - LoadError(Bytes32), - - #[display(fmt = "{}", _0)] - StorageError(StorageError), - - #[display(fmt = "{}", _0)] - DeserializeError(DeserializeError), - - #[display(fmt = "{}", _0)] - ChildError(ChildError>), -} - -impl From for MerkleTreeError { - fn from(err: StorageError) -> MerkleTreeError { - MerkleTreeError::StorageError(err) - } -} - -/// The safe Merkle tree storage key prevents Merkle tree structure manipulations. -/// The type contains only one constructor that hashes the storage key. -#[derive(Debug, Clone, Copy)] -pub struct MerkleTreeKey(Bytes32); - -impl MerkleTreeKey { - /// The safe way to create a `Self`. It hashes the `storage_key`, making - /// it entirely random and preventing SMT structure manipulation. - pub fn new(storage_key: B) -> Self - where - B: AsRef<[u8]>, - { - use digest::Digest; - let mut hash = sha2::Sha256::new(); - hash.update(storage_key.as_ref()); - let hash = hash - .finalize() - .try_into() - .expect("`sha2::Sha256` can't fail during hashing"); - - Self(hash) - } - - /// Unsafe analog to create a `Self` that doesn't hash the `storage_key` unlike - /// `Self::new`. - /// - /// # Safety - /// - /// It is safe to use this method if you know that `storage_key` - /// was randomly generated like `ContractId` or `AssetId`. - pub unsafe fn convert(storage_key: B) -> Self - where - B: Into, - { - Self(storage_key.into()) - } - - #[cfg(any(test, feature = "test-helpers"))] - pub fn new_without_hash(storage_key: B) -> Self - where - B: Into, - { - unsafe { Self::convert(storage_key) } - } -} - -impl From for Bytes32 { - fn from(value: MerkleTreeKey) -> Self { - value.0 - } -} - -#[derive(Debug)] -pub struct MerkleTree { - root_node: Node, - storage: StorageType, - phantom_table: PhantomData, -} - -impl MerkleTree { - pub const fn empty_root() -> &'static Bytes32 { - zero_sum() - } - - pub fn root(&self) -> Bytes32 { - *self.root_node().hash() - } - - pub fn into_storage(self) -> StorageType { - self.storage - } - - pub fn storage(&self) -> &StorageType { - &self.storage - } - - // PRIVATE - - fn root_node(&self) -> &Node { - &self.root_node - } - - fn set_root_node(&mut self, node: Node) { - debug_assert!(node.is_leaf() || node.height() == Node::max_height()); - self.root_node = node; - } -} - -impl MerkleTree -where - TableType: Mappable, - StorageType: StorageInspect, -{ - pub fn new(storage: StorageType) -> Self { - Self { - root_node: Node::create_placeholder(), - storage, - phantom_table: Default::default(), - } - } - - pub fn load( - storage: StorageType, - root: &Bytes32, - ) -> Result> { - if root == Self::empty_root() { - let tree = Self::new(storage); - Ok(tree) - } else { - let primitive = storage - .get(root)? - .ok_or_else(|| MerkleTreeError::LoadError(*root))? - .into_owned(); - let tree = Self { - root_node: primitive - .try_into() - .map_err(MerkleTreeError::DeserializeError)?, - storage, - phantom_table: Default::default(), - }; - Ok(tree) - } - } - - // PRIVATE - - fn path_set( - &self, - leaf_key: Bytes32, - ) -> Result<(Vec, Vec), MerkleTreeError> { - let root_node = self.root_node().clone(); - let root_storage_node = StorageNode::new(&self.storage, root_node); - let (mut path_nodes, mut side_nodes): (Vec, Vec) = root_storage_node - .as_path_iter(leaf_key) - .map(|(path_node, side_node)| { - Ok(( - path_node.map_err(MerkleTreeError::ChildError)?.into_node(), - side_node.map_err(MerkleTreeError::ChildError)?.into_node(), - )) - }) - .collect::, MerkleTreeError>>()? - .into_iter() - .unzip(); - path_nodes.reverse(); - side_nodes.reverse(); - side_nodes.pop(); // The last element in the side nodes list is the - // root; remove it. - - Ok((path_nodes, side_nodes)) - } -} - -impl MerkleTree -where - TableType: Mappable, - StorageType: StorageMutate, -{ - /// Build a sparse Merkle tree from a set of key-value pairs. This is - /// equivalent to creating an empty sparse Merkle tree and sequentially - /// calling [update](Self::update) for each key-value pair. This constructor - /// is more performant than calling individual sequential updates and is the - /// preferred approach when the key-values are known upfront. Leaves can be - /// appended to the returned tree using `update` to further accumulate leaf - /// data. - pub fn from_set( - mut storage: StorageType, - set: I, - ) -> Result - where - I: Iterator, - B: Into, - D: AsRef<[u8]>, - { - let sorted = set - .into_iter() - .map(|(k, v)| (k.into(), v)) - .collect::>(); - let mut branches = sorted - .iter() - .filter(|(_, value)| !value.as_ref().is_empty()) - .map(|(key, data)| Node::create_leaf(key, data)) - .map(Into::::into) - .collect::>(); - - for branch in branches.iter() { - let leaf = &branch.node; - storage.insert(leaf.hash(), &leaf.as_ref().into())?; - } - - if branches.is_empty() { - let tree = Self::new(storage); - return Ok(tree) - } - - if branches.len() == 1 { - let leaf = branches.pop().expect("Expected at least 1 leaf").node; - let mut tree = Self::new(storage); - tree.set_root_node(leaf); - return Ok(tree) - } - - let mut nodes = Vec::::with_capacity(branches.len()); - let mut proximities = Vec::::with_capacity(branches.len()); - - // Building the tree starts by merging all leaf nodes where possible. - // Given a set of leaf nodes sorted left to right (i.e., keys are sorted - // in lexical order), we scan the leaf set right to left, and analyze a - // moving window of three leaves: a center (or "current") leaf, its left - // neighbor, and its right neighbor. - // - // When merging leaf nodes, we analyze this three-node window to - // determine if the condition for merging is met: When the current node - // is closer to its right neighbor than it is to its left neighbor, we - // merge the current node with its right neighbor. The merged node then - // becomes the center of the window, and we must check the merge - // condition again. We calculate proximity using the common path length - // between two nodes, which is also the depth of their shared ancestor - // in the tree. - // - // This three-node window is centered around a current node, and moves - // leftward: At the next iteration, the current node is now the right - // node, the left node is now the current node, and so on. When we have - // checked all windows, we know that we have merged all leaf nodes where - // possible. - while let Some(left) = branches.pop() { - if let Some(current) = nodes.last() { - let left_proximity = current.node.common_path_length(&left.node); - while { - // The current node's proximity to its right neighbor was - // stored previously. We now compare the distances between - // the current node's left and right neighbors. If, and only - // if, the current node is closer to its right neighbor, we - // merge these nodes to form an ancestor node. We then - // reform the window, using the ancestor node in the center, - // to check if we must merge again. - // - // If the current node is closer to its left, we do not have - // enough information to merge nodes, and we must continue - // scanning the leaf set leftwards to find a configuration - // that satisfies the merge condition. - if let Some(right_proximity) = proximities.last() { - *right_proximity > left_proximity - } else { - false - } - } { - // The current node is closer to its right neighbor than its - // left neighbor. We now merge the current node with its - // right neighbor. - let current = - nodes.pop().expect("Expected current node to be present"); - let right = nodes.pop().expect("Expected right node to be present"); - let merged = merge_branches(&mut storage, current, right)?; - nodes.push(merged); - - // Now that the current node and its right neighbour are - // merged, the distance between them has collapsed and their - // proximity is no longer needed. - proximities.pop(); - } - proximities.push(left_proximity); - } - nodes.push(left); - } - - // Where possible, all the leaves have been merged. The remaining leaves - // and nodes are stacked in order of height descending. This means that - // they are also ordered with the leftmost leaves at the top and the - // rightmost nodes at the bottom. We can iterate through the stack and - // merge them left to right. - let top = { - let mut node = nodes - .pop() - .expect("Nodes stack must have at least 1 element"); - while let Some(next) = nodes.pop() { - node = merge_branches(&mut storage, node, next)?; - } - node - }; - - // Lastly, all leaves and nodes are merged into one. The resulting node - // may still be an ancestor node below the root. To calculate the final - // root, we merge placeholder nodes along the path until the resulting - // node has the final height and forms the root node. - let mut node = top.node; - let path = top.bits; - let height = node.height(); - let depth = Node::max_height() - height; - let placeholders = iter::repeat(Node::create_placeholder()).take(depth as usize); - for placeholder in placeholders { - node = Node::create_node_on_path(&path, &node, &placeholder); - storage.insert(node.hash(), &node.as_ref().into())?; - } - - let tree = Self { - root_node: node, - storage, - phantom_table: Default::default(), - }; - Ok(tree) - } - - pub fn update( - &mut self, - key: MerkleTreeKey, - data: &[u8], - ) -> Result<(), MerkleTreeError> { - if data.is_empty() { - // If the data is empty, this signifies a delete operation for the - // given key. - self.delete(key)?; - return Ok(()) - } - - let key = key.into(); - let leaf_node = Node::create_leaf(&key, data); - self.storage - .insert(leaf_node.hash(), &leaf_node.as_ref().into())?; - - if self.root_node().is_placeholder() { - self.set_root_node(leaf_node); - } else { - let (path_nodes, side_nodes) = self.path_set(key)?; - self.update_with_path_set( - &leaf_node, - path_nodes.as_slice(), - side_nodes.as_slice(), - )?; - } - - Ok(()) - } - - pub fn delete( - &mut self, - key: MerkleTreeKey, - ) -> Result<(), MerkleTreeError> { - if self.root() == *Self::empty_root() { - // The zero root signifies that all leaves are empty, including the - // given key. - return Ok(()) - } - - let key = key.into(); - let (path_nodes, side_nodes): (Vec, Vec) = self.path_set(key)?; - - match path_nodes.get(0) { - Some(node) if node.leaf_key() == &key => { - self.delete_with_path_set( - &key, - path_nodes.as_slice(), - side_nodes.as_slice(), - )?; - } - _ => {} - }; - - Ok(()) - } - - // PRIVATE - - fn update_with_path_set( - &mut self, - requested_leaf_node: &Node, - path_nodes: &[Node], - side_nodes: &[Node], - ) -> Result<(), StorageError> { - let path = requested_leaf_node.leaf_key(); - let actual_leaf_node = &path_nodes[0]; - - if requested_leaf_node == actual_leaf_node { - return Ok(()) - } - - // Build the tree upwards starting with the requested leaf node. - let mut current_node = requested_leaf_node.clone(); - - // If we are creating a new leaf node, the corresponding side node will - // be the first node in the path set. The side node will be the leaf - // node currently closest to the requested new leaf node. When creating - // a new leaf node, we must merge the leaf node with its corresponding - // side node to create a common ancestor. We then continue building the - // tree upwards from this ancestor node. This may require creating new - // placeholder side nodes, in addition to the existing side node set. - // - // If we are updating an existing leaf node, the leaf node we are - // updating is the first node in the path set. The side node set will - // already include all the side nodes needed to build up the tree from - // the requested leaf node, since these side nodes were already built - // during the creation of the leaf node. - // - // We can determine if we are updating an existing leaf node, or if we - // are creating a new leaf node, by comparing the paths of the requested - // leaf node and the leaf node at the start of the path set. When the - // paths are equal, it means the leaf nodes occupy the same location, - // and we are updating an existing leaf. Otherwise, it means we are - // adding a new leaf node. - if requested_leaf_node.leaf_key() != actual_leaf_node.leaf_key() { - // Merge leaves - if !actual_leaf_node.is_placeholder() { - current_node = - Node::create_node_on_path(path, ¤t_node, actual_leaf_node); - self.storage - .insert(current_node.hash(), ¤t_node.as_ref().into())?; - } - - // Merge placeholders - let ancestor_depth = requested_leaf_node.common_path_length(actual_leaf_node); - let stale_depth = cmp::max(side_nodes.len(), ancestor_depth as usize); - let placeholders_count = stale_depth - side_nodes.len(); - let placeholders = - iter::repeat(Node::create_placeholder()).take(placeholders_count); - for placeholder in placeholders { - current_node = - Node::create_node_on_path(path, ¤t_node, &placeholder); - self.storage - .insert(current_node.hash(), ¤t_node.as_ref().into())?; - } - } else { - self.storage.remove(actual_leaf_node.hash())?; - } - - // Merge side nodes - for side_node in side_nodes { - current_node = Node::create_node_on_path(path, ¤t_node, side_node); - self.storage - .insert(current_node.hash(), ¤t_node.as_ref().into())?; - } - - for node in path_nodes.iter().skip(1 /* leaf */) { - self.storage.remove(node.hash())?; - } - - self.set_root_node(current_node); - - Ok(()) - } - - fn delete_with_path_set( - &mut self, - requested_leaf_key: &Bytes32, - path_nodes: &[Node], - side_nodes: &[Node], - ) -> Result<(), StorageError> { - for node in path_nodes { - self.storage.remove(node.hash())?; - } - - let path = requested_leaf_key; - let mut side_nodes_iter = side_nodes.iter(); - - // The deleted leaf is replaced by a placeholder. Build the tree upwards - // starting with the placeholder. - let mut current_node = Node::create_placeholder(); - - // If the first side node is a leaf, it means the ancestor node is now - // parent to a placeholder (the deleted leaf node) and a leaf node (the - // first side node). We can immediately discard the ancestor node from - // further calculation and attach the orphaned leaf node to its next - // ancestor. Any subsequent ancestor nodes composed of this leaf node - // and a placeholder must be similarly discarded from further - // calculation. We then create a valid ancestor node for the orphaned - // leaf node by joining it with the earliest non-placeholder side node. - if let Some(first_side_node) = side_nodes.first() { - if first_side_node.is_leaf() { - side_nodes_iter.next(); - current_node = first_side_node.clone(); - - // Advance the side node iterator to the next non-placeholder - // node. This may be either another leaf node or an internal - // node. If only placeholder nodes exist beyond the first leaf - // node, then that leaf node is, in fact, the new root node. - // - // Using `find(..)` advances the iterator beyond the next - // non-placeholder side node and returns it. Therefore, we must - // consume the side node at this point. If another non- - // placeholder node was found in the side node collection, merge - // it with the first side node. This guarantees that the current - // node will be an internal node, and not a leaf, by the time we - // start merging the remaining side nodes. - // See https://doc.rust-lang.org/std/iter/trait.Iterator.html#method.find. - if let Some(side_node) = - side_nodes_iter.find(|side_node| !side_node.is_placeholder()) - { - current_node = - Node::create_node_on_path(path, ¤t_node, side_node); - self.storage - .insert(current_node.hash(), ¤t_node.as_ref().into())?; - } - } - } - - // Merge side nodes - for side_node in side_nodes_iter { - current_node = Node::create_node_on_path(path, ¤t_node, side_node); - self.storage - .insert(current_node.hash(), ¤t_node.as_ref().into())?; - } - - self.set_root_node(current_node); - - Ok(()) - } -} - -#[cfg(test)] -mod test { - use crate::{ - common::{ - Bytes32, - StorageMap, - }, - sparse::{ - hash::{ - sum, - zero_sum, - }, - MerkleTree, - MerkleTreeError, - MerkleTreeKey, - Node, - Primitive, - }, - }; - use fuel_storage::Mappable; - use hex; - - fn random_bytes32(rng: &mut R) -> Bytes32 - where - R: rand::Rng + ?Sized, - { - let mut bytes = [0u8; 32]; - rng.fill(bytes.as_mut()); - bytes - } - - #[derive(Debug)] - struct TestTable; - - impl Mappable for TestTable { - type Key = Self::OwnedKey; - type OwnedKey = Bytes32; - type OwnedValue = Primitive; - type Value = Self::OwnedValue; - } - - fn key>(data: B) -> MerkleTreeKey { - MerkleTreeKey::new_without_hash(sum(data.as_ref())) - } - - #[test] - fn test_empty_root() { - let mut storage = StorageMap::::new(); - let tree = MerkleTree::new(&mut storage); - let root = tree.root(); - let expected_root = - "0000000000000000000000000000000000000000000000000000000000000000"; - assert_eq!(hex::encode(root), expected_root); - } - - #[test] - fn test_update_1() { - let mut storage = StorageMap::::new(); - let mut tree = MerkleTree::new(&mut storage); - - tree.update(key(b"\x00\x00\x00\x00"), b"DATA").unwrap(); - - let root = tree.root(); - let expected_root = - "39f36a7cb4dfb1b46f03d044265df6a491dffc1034121bc1071a34ddce9bb14b"; - assert_eq!(hex::encode(root), expected_root); - } - - #[test] - fn test_update_2() { - let mut storage = StorageMap::::new(); - let mut tree = MerkleTree::new(&mut storage); - - tree.update(key(b"\x00\x00\x00\x00"), b"DATA").unwrap(); - tree.update(key(b"\x00\x00\x00\x01"), b"DATA").unwrap(); - - let root = tree.root(); - let expected_root = - "8d0ae412ca9ca0afcb3217af8bcd5a673e798bd6fd1dfacad17711e883f494cb"; - assert_eq!(hex::encode(root), expected_root); - } - - #[test] - fn test_update_3() { - let mut storage = StorageMap::::new(); - let mut tree = MerkleTree::new(&mut storage); - - tree.update(key(b"\x00\x00\x00\x00"), b"DATA").unwrap(); - tree.update(key(b"\x00\x00\x00\x01"), b"DATA").unwrap(); - tree.update(key(b"\x00\x00\x00\x02"), b"DATA").unwrap(); - - let root = tree.root(); - let expected_root = - "52295e42d8de2505fdc0cc825ff9fead419cbcf540d8b30c7c4b9c9b94c268b7"; - assert_eq!(hex::encode(root), expected_root); - } - - #[test] - fn test_update_5() { - let mut storage = StorageMap::::new(); - let mut tree = MerkleTree::new(&mut storage); - - tree.update(key(b"\x00\x00\x00\x00"), b"DATA").unwrap(); - tree.update(key(b"\x00\x00\x00\x01"), b"DATA").unwrap(); - tree.update(key(b"\x00\x00\x00\x02"), b"DATA").unwrap(); - tree.update(key(b"\x00\x00\x00\x03"), b"DATA").unwrap(); - tree.update(key(b"\x00\x00\x00\x04"), b"DATA").unwrap(); - - let root = tree.root(); - let expected_root = - "108f731f2414e33ae57e584dc26bd276db07874436b2264ca6e520c658185c6b"; - assert_eq!(hex::encode(root), expected_root); - } - - #[test] - fn test_update_10() { - let mut storage = StorageMap::::new(); - let mut tree = MerkleTree::new(&mut storage); - - for i in 0_u32..10 { - let key = key(i.to_be_bytes()); - tree.update(key, b"DATA").unwrap(); - } - - let root = tree.root(); - let expected_root = - "21ca4917e99da99a61de93deaf88c400d4c082991cb95779e444d43dd13e8849"; - assert_eq!(hex::encode(root), expected_root); - } - - #[test] - fn test_update_100() { - let mut storage = StorageMap::::new(); - let mut tree = MerkleTree::new(&mut storage); - - for i in 0_u32..100 { - let key = key(i.to_be_bytes()); - tree.update(key, b"DATA").unwrap(); - } - - let root = tree.root(); - let expected_root = - "82bf747d455a55e2f7044a03536fc43f1f55d43b855e72c0110c986707a23e4d"; - assert_eq!(hex::encode(root), expected_root); - } - - #[test] - fn test_update_with_repeated_inputs() { - let mut storage = StorageMap::::new(); - let mut tree = MerkleTree::new(&mut storage); - - tree.update(key(b"\x00\x00\x00\x00"), b"DATA").unwrap(); - tree.update(key(b"\x00\x00\x00\x00"), b"DATA").unwrap(); - - let root = tree.root(); - let expected_root = - "39f36a7cb4dfb1b46f03d044265df6a491dffc1034121bc1071a34ddce9bb14b"; - assert_eq!(hex::encode(root), expected_root); - } - - #[test] - fn test_update_overwrite_key() { - let mut storage = StorageMap::::new(); - let mut tree = MerkleTree::new(&mut storage); - - tree.update(key(b"\x00\x00\x00\x00"), b"DATA").unwrap(); - tree.update(key(b"\x00\x00\x00\x00"), b"CHANGE").unwrap(); - - let root = tree.root(); - let expected_root = - "dd97174c80e5e5aa3a31c61b05e279c1495c8a07b2a08bca5dbc9fb9774f9457"; - assert_eq!(hex::encode(root), expected_root); - } - - #[test] - fn test_update_overwrite_key_2() { - let mut storage = StorageMap::::new(); - let mut tree = MerkleTree::new(&mut storage); - - for i in 0_u32..10 { - let key = key(i.to_be_bytes()); - tree.update(key, b"DATA").unwrap(); - } - - let root_hash_before = tree.root(); - - for i in 3_u32..7 { - let key = key(i.to_be_bytes()); - tree.update(key, b"DATA_2").unwrap(); - } - - for i in 3_u32..7 { - let key = key(i.to_be_bytes()); - tree.update(key, b"DATA").unwrap(); - } - - let root_hash_after = tree.root(); - - assert_eq!(root_hash_before, root_hash_after); - } - - #[test] - fn test_update_union() { - let mut storage = StorageMap::::new(); - let mut tree = MerkleTree::new(&mut storage); - - for i in 0_u32..5 { - let key = key(i.to_be_bytes()); - tree.update(key, b"DATA").unwrap(); - } - - for i in 10_u32..15 { - let key = key(i.to_be_bytes()); - tree.update(key, b"DATA").unwrap(); - } - - for i in 20_u32..25 { - let key = key(i.to_be_bytes()); - tree.update(key, b"DATA").unwrap(); - } - - let root = tree.root(); - let expected_root = - "7e6643325042cfe0fc76626c043b97062af51c7e9fc56665f12b479034bce326"; - assert_eq!(hex::encode(root), expected_root); - } - - #[test] - fn test_update_sparse_union() { - let mut storage = StorageMap::::new(); - let mut tree = MerkleTree::new(&mut storage); - - tree.update(key(b"\x00\x00\x00\x00"), b"DATA").unwrap(); - tree.update(key(b"\x00\x00\x00\x02"), b"DATA").unwrap(); - tree.update(key(b"\x00\x00\x00\x04"), b"DATA").unwrap(); - tree.update(key(b"\x00\x00\x00\x06"), b"DATA").unwrap(); - tree.update(key(b"\x00\x00\x00\x08"), b"DATA").unwrap(); - - let root = tree.root(); - let expected_root = - "e912e97abc67707b2e6027338292943b53d01a7fbd7b244674128c7e468dd696"; - assert_eq!(hex::encode(root), expected_root); - } - - #[test] - fn test_update_with_empty_data_does_not_change_root() { - let mut storage = StorageMap::::new(); - let mut tree = MerkleTree::new(&mut storage); - - tree.update(key(b"\x00\x00\x00\x00"), b"").unwrap(); - - let root = tree.root(); - let expected_root = - "0000000000000000000000000000000000000000000000000000000000000000"; - assert_eq!(hex::encode(root), expected_root); - } - - #[test] - fn test_update_with_empty_data_performs_delete() { - let mut storage = StorageMap::::new(); - let mut tree = MerkleTree::new(&mut storage); - - tree.update(key(b"\x00\x00\x00\x00"), b"DATA").unwrap(); - tree.update(key(b"\x00\x00\x00\x00"), b"").unwrap(); - - let root = tree.root(); - let expected_root = - "0000000000000000000000000000000000000000000000000000000000000000"; - assert_eq!(hex::encode(root), expected_root); - } - - #[test] - fn test_update_1_delete_1() { - let mut storage = StorageMap::::new(); - let mut tree = MerkleTree::new(&mut storage); - - tree.update(key(b"\x00\x00\x00\x00"), b"DATA").unwrap(); - tree.delete(key(b"\x00\x00\x00\x00")).unwrap(); - - let root = tree.root(); - let expected_root = - "0000000000000000000000000000000000000000000000000000000000000000"; - assert_eq!(hex::encode(root), expected_root); - } - - #[test] - fn test_update_2_delete_1() { - let mut storage = StorageMap::::new(); - let mut tree = MerkleTree::new(&mut storage); - - tree.update(key(b"\x00\x00\x00\x00"), b"DATA").unwrap(); - tree.update(key(b"\x00\x00\x00\x01"), b"DATA").unwrap(); - tree.delete(key(b"\x00\x00\x00\x01")).unwrap(); - - let root = tree.root(); - let expected_root = - "39f36a7cb4dfb1b46f03d044265df6a491dffc1034121bc1071a34ddce9bb14b"; - assert_eq!(hex::encode(root), expected_root); - } - - #[test] - fn test_update_10_delete_5() { - let mut storage = StorageMap::::new(); - let mut tree = MerkleTree::new(&mut storage); - - for i in 0_u32..10 { - let key = key(i.to_be_bytes()); - tree.update(key, b"DATA").unwrap(); - } - - for i in 5_u32..10 { - let key = key(i.to_be_bytes()); - tree.delete(key).unwrap(); - } - - let root = tree.root(); - let expected_root = - "108f731f2414e33ae57e584dc26bd276db07874436b2264ca6e520c658185c6b"; - assert_eq!(hex::encode(root), expected_root); - } - - #[test] - fn test_delete_non_existent_key() { - let mut storage = StorageMap::::new(); - let mut tree = MerkleTree::new(&mut storage); - - tree.update(key(b"\x00\x00\x00\x00"), b"DATA").unwrap(); - tree.update(key(b"\x00\x00\x00\x01"), b"DATA").unwrap(); - tree.update(key(b"\x00\x00\x00\x02"), b"DATA").unwrap(); - tree.update(key(b"\x00\x00\x00\x03"), b"DATA").unwrap(); - tree.update(key(b"\x00\x00\x00\x04"), b"DATA").unwrap(); - tree.delete(key(b"\x00\x00\x04\x00")).unwrap(); - - let root = tree.root(); - let expected_root = - "108f731f2414e33ae57e584dc26bd276db07874436b2264ca6e520c658185c6b"; - assert_eq!(hex::encode(root), expected_root); - } - - #[test] - fn test_interleaved_update_delete() { - let mut storage = StorageMap::::new(); - let mut tree = MerkleTree::new(&mut storage); - - for i in 0_u32..10 { - let key = key(i.to_be_bytes()); - tree.update(key, b"DATA").unwrap(); - } - - for i in 5_u32..15 { - let key = key(i.to_be_bytes()); - tree.delete(key).unwrap(); - } - - for i in 10_u32..20 { - let key = key(i.to_be_bytes()); - tree.update(key, b"DATA").unwrap(); - } - - for i in 15_u32..25 { - let key = key(i.to_be_bytes()); - tree.delete(key).unwrap(); - } - - for i in 20_u32..30 { - let key = key(i.to_be_bytes()); - tree.update(key, b"DATA").unwrap(); - } - - for i in 25_u32..35 { - let key = key(i.to_be_bytes()); - tree.delete(key).unwrap(); - } - - let root = tree.root(); - let expected_root = - "7e6643325042cfe0fc76626c043b97062af51c7e9fc56665f12b479034bce326"; - assert_eq!(hex::encode(root), expected_root); - } - - #[test] - fn test_update_removes_old_entries() { - let mut storage = StorageMap::::new(); - let mut tree = MerkleTree::new(&mut storage); - let tenth_index = 9u32; - - for i in 0_u32..tenth_index { - let key = key(i.to_be_bytes()); - tree.update(key, b"DATA").unwrap(); - } - let size_before_tenth = tree.storage().len(); - let tenth_key = key(tenth_index.to_be_bytes()); - - // Given - tree.update(tenth_key, b"DATA").unwrap(); - let size_after_tenth = tree.storage().len(); - assert_ne!(size_after_tenth, size_before_tenth); - - // When - tree.update(tenth_key, b"ANOTHER_DATA").unwrap(); - - // Then - assert_eq!(tree.storage().len(), size_after_tenth); - } - - #[test] - fn test_update_with_the_same_value_does_not_remove_old_entries() { - let mut storage = StorageMap::::new(); - let mut tree = MerkleTree::new(&mut storage); - let tenth_index = 9u32; - - for i in 0_u32..tenth_index { - let key = key(i.to_be_bytes()); - tree.update(key, b"DATA").unwrap(); - } - let size_before_tenth = tree.storage().len(); - let tenth_key = key(tenth_index.to_be_bytes()); - - // Given - tree.update(tenth_key, b"DATA").unwrap(); - let size_after_tenth = tree.storage().len(); - assert_ne!(size_after_tenth, size_before_tenth); - - // When - tree.update(tenth_key, b"DATA").unwrap(); - - // Then - assert_eq!(tree.storage().len(), size_after_tenth); - } - - #[test] - fn test_delete_removes_path_entries() { - let mut storage = StorageMap::::new(); - let mut tree = MerkleTree::new(&mut storage); - let tenth_index = 9u32; - - for i in 0_u32..tenth_index { - let key = key(i.to_be_bytes()); - tree.update(key, b"DATA").unwrap(); - } - let size_before_tenth = tree.storage().len(); - let tenth_key = key(tenth_index.to_be_bytes()); - - // Given - tree.update(tenth_key, b"DATA").unwrap(); - let size_after_tenth = tree.storage().len(); - assert_ne!(size_after_tenth, size_before_tenth); - - // When - tree.delete(tenth_key).unwrap(); - - // Then - assert_eq!(tree.storage().len(), size_before_tenth); - } - - #[test] - fn test_delete_sparse_union() { - let mut storage = StorageMap::::new(); - let mut tree = MerkleTree::new(&mut storage); - - for i in 0_u32..10 { - let key = key(i.to_be_bytes()); - tree.update(key, b"DATA").unwrap(); - } - - for i in 0_u32..5 { - let key = key((i * 2 + 1).to_be_bytes()); - tree.delete(key).unwrap(); - } - - let root = tree.root(); - let expected_root = - "e912e97abc67707b2e6027338292943b53d01a7fbd7b244674128c7e468dd696"; - assert_eq!(hex::encode(root), expected_root); - } - - #[test] - fn test_override_hash_key() { - use fuel_storage::StorageInspect; - let mut storage = StorageMap::::new(); - let mut tree = MerkleTree::new(&mut storage); - - let leaf_1_key = key(b"\x00\x00\x00\x00"); - let leaf_1_data = b"DATA_1"; - let leaf_1 = Node::create_leaf(&leaf_1_key.0, leaf_1_data); - - let leaf_2_key = MerkleTreeKey::new_without_hash(*leaf_1.hash()); - let leaf_2_data = b"DATA_2"; - let leaf_2 = Node::create_leaf(&leaf_2_key.0, leaf_2_data); - - tree.update(leaf_2_key, leaf_2_data).unwrap(); - tree.update(leaf_1_key, leaf_1_data).unwrap(); - assert_eq!( - tree.storage - .get(leaf_2.hash()) - .unwrap() - .unwrap() - .into_owned(), - leaf_2.as_ref().into() - ); - assert_eq!( - tree.storage - .get(leaf_1.hash()) - .unwrap() - .unwrap() - .into_owned(), - leaf_1.as_ref().into() - ); - } - - #[test] - fn test_load_returns_a_valid_tree() { - // Instantiate a new key-value storage backing and populate it using a sparse - // Merkle tree. The root of the Merkle tree is the key that maps to the buffer - // of the root node in the storage. When loading a Merkle tree from storage, we - // need a reference to the storage object, as well as the root that allows us to - // look up the buffer of the root node. We will later use this storage backing - // and root to load a Merkle tree. - let (mut storage_to_load, root_to_load) = { - let mut storage = StorageMap::::new(); - let mut tree = MerkleTree::new(&mut storage); - tree.update(key(b"\x00\x00\x00\x00"), b"DATA").unwrap(); - tree.update(key(b"\x00\x00\x00\x01"), b"DATA").unwrap(); - tree.update(key(b"\x00\x00\x00\x02"), b"DATA").unwrap(); - tree.update(key(b"\x00\x00\x00\x03"), b"DATA").unwrap(); - tree.update(key(b"\x00\x00\x00\x04"), b"DATA").unwrap(); - let root = tree.root(); - (storage, root) - }; - - // Generate an expected root for this test by using both the set of `update` - // data used when generating the loadable storage above and an additional set of - // `update` data. - let expected_root = { - let mut storage = StorageMap::::new(); - let mut tree = MerkleTree::new(&mut storage); - tree.update(key(b"\x00\x00\x00\x00"), b"DATA").unwrap(); - tree.update(key(b"\x00\x00\x00\x01"), b"DATA").unwrap(); - tree.update(key(b"\x00\x00\x00\x02"), b"DATA").unwrap(); - tree.update(key(b"\x00\x00\x00\x03"), b"DATA").unwrap(); - tree.update(key(b"\x00\x00\x00\x04"), b"DATA").unwrap(); - tree.update(key(b"\x00\x00\x00\x05"), b"DATA").unwrap(); - tree.update(key(b"\x00\x00\x00\x06"), b"DATA").unwrap(); - tree.update(key(b"\x00\x00\x00\x07"), b"DATA").unwrap(); - tree.update(key(b"\x00\x00\x00\x08"), b"DATA").unwrap(); - tree.update(key(b"\x00\x00\x00\x09"), b"DATA").unwrap(); - tree.root() - }; - - let root = { - // Create a Merkle tree by loading the generated storage and root. - let mut tree = MerkleTree::load(&mut storage_to_load, &root_to_load).unwrap(); - // Build up the loaded tree using the additional set of `update` data so its - // root matches the expected root. This verifies that the loaded tree has - // successfully wrapped the given storage backing and assumed the correct - // state so that future updates can be made seamlessly. - tree.update(key(b"\x00\x00\x00\x05"), b"DATA").unwrap(); - tree.update(key(b"\x00\x00\x00\x06"), b"DATA").unwrap(); - tree.update(key(b"\x00\x00\x00\x07"), b"DATA").unwrap(); - tree.update(key(b"\x00\x00\x00\x08"), b"DATA").unwrap(); - tree.update(key(b"\x00\x00\x00\x09"), b"DATA").unwrap(); - tree.root() - }; - - assert_eq!(root, expected_root); - } - - #[test] - fn test_load_returns_an_empty_tree_for_empty_sum_root() { - let mut storage = StorageMap::::new(); - let tree = MerkleTree::load(&mut storage, zero_sum()).unwrap(); - let root = tree.root(); - - assert_eq!(root, *zero_sum()); - } - - #[test] - fn test_load_returns_a_load_error_if_the_storage_is_not_valid_for_the_root() { - let mut storage = StorageMap::::new(); - - { - let mut tree = MerkleTree::new(&mut storage); - tree.update(key(b"\x00\x00\x00\x00"), b"DATA").unwrap(); - tree.update(key(b"\x00\x00\x00\x01"), b"DATA").unwrap(); - tree.update(key(b"\x00\x00\x00\x02"), b"DATA").unwrap(); - tree.update(key(b"\x00\x00\x00\x03"), b"DATA").unwrap(); - tree.update(key(b"\x00\x00\x00\x04"), b"DATA").unwrap(); - } - - let root = &sum(b"\xff\xff\xff\xff"); - let err = MerkleTree::load(&mut storage, root) - .expect_err("Expected load() to return Error; got Ok"); - assert!(matches!(err, MerkleTreeError::LoadError(_))); - } - - #[test] - fn test_load_returns_a_deserialize_error_if_the_storage_is_corrupted() { - use fuel_storage::StorageMutate; - - let mut storage = StorageMap::::new(); - - let mut tree = MerkleTree::new(&mut storage); - tree.update(key(b"\x00\x00\x00\x00"), b"DATA").unwrap(); - tree.update(key(b"\x00\x00\x00\x01"), b"DATA").unwrap(); - tree.update(key(b"\x00\x00\x00\x02"), b"DATA").unwrap(); - tree.update(key(b"\x00\x00\x00\x03"), b"DATA").unwrap(); - tree.update(key(b"\x00\x00\x00\x04"), b"DATA").unwrap(); - let root = tree.root(); - - // Overwrite the root key-value with an invalid primitive to create a - // DeserializeError. - let primitive = (0xff, 0xff, [0xff; 32], [0xff; 32]); - storage.insert(&root, &primitive).unwrap(); - - let err = MerkleTree::load(&mut storage, &root) - .expect_err("Expected load() to return Error; got Ok"); - assert!(matches!(err, MerkleTreeError::DeserializeError(_))); - } - - #[test] - fn test_from_set_yields_expected_root() { - let rng = &mut rand::thread_rng(); - let gen = || { - Some(( - MerkleTreeKey::new_without_hash(random_bytes32(rng)), - random_bytes32(rng), - )) - }; - let data = std::iter::from_fn(gen).take(1_000).collect::>(); - - let expected_root = { - let mut storage = StorageMap::::new(); - let mut tree = MerkleTree::new(&mut storage); - let input = data.clone(); - for (key, value) in input.into_iter() { - tree.update(key, &value).unwrap(); - } - tree.root() - }; - - let root = { - let mut storage = StorageMap::::new(); - let tree = MerkleTree::from_set(&mut storage, data.into_iter()).unwrap(); - tree.root() - }; - - assert_eq!(root, expected_root); - } - - #[test] - fn test_from_empty_set_yields_expected_root() { - let rng = &mut rand::thread_rng(); - let gen = || { - Some(( - MerkleTreeKey::new_without_hash(random_bytes32(rng)), - random_bytes32(rng), - )) - }; - let data = std::iter::from_fn(gen).take(0).collect::>(); - - let expected_root = { - let mut storage = StorageMap::::new(); - let mut tree = MerkleTree::new(&mut storage); - let input = data.clone(); - for (key, value) in input.into_iter() { - tree.update(key, &value).unwrap(); - } - tree.root() - }; - - let root = { - let mut storage = StorageMap::::new(); - let tree = MerkleTree::from_set(&mut storage, data.into_iter()).unwrap(); - tree.root() - }; - - assert_eq!(root, expected_root); - } - - #[test] - fn test_from_unit_set_yields_expected_root() { - let rng = &mut rand::thread_rng(); - let gen = || { - Some(( - MerkleTreeKey::new_without_hash(random_bytes32(rng)), - random_bytes32(rng), - )) - }; - let data = std::iter::from_fn(gen).take(1).collect::>(); - - let expected_root = { - let mut storage = StorageMap::::new(); - let mut tree = MerkleTree::new(&mut storage); - let input = data.clone(); - for (key, value) in input.into_iter() { - tree.update(key, &value).unwrap(); - } - tree.root() - }; - - let root = { - let mut storage = StorageMap::::new(); - let tree = MerkleTree::from_set(&mut storage, data.into_iter()).unwrap(); - tree.root() - }; - - assert_eq!(root, expected_root); - } - - #[test] - fn test_from_set_with_duplicate_keys_yields_expected_root() { - let rng = &mut rand::thread_rng(); - let keys = [ - key(b"\x00\x00\x00\x00"), - key(b"\x00\x00\x00\x01"), - key(b"\x00\x00\x00\x02"), - ]; - let data = [ - (keys[0], random_bytes32(rng)), - (keys[1], random_bytes32(rng)), - (keys[2], random_bytes32(rng)), - (keys[0], random_bytes32(rng)), - (keys[1], random_bytes32(rng)), - (keys[2], random_bytes32(rng)), - ]; - - let expected_root = { - let mut storage = StorageMap::::new(); - let mut tree = MerkleTree::new(&mut storage); - let input = data; - for (key, value) in input.into_iter() { - tree.update(key, &value).unwrap(); - } - tree.root() - }; - - let root = { - let mut storage = StorageMap::::new(); - let tree = MerkleTree::from_set(&mut storage, data.into_iter()).unwrap(); - tree.root() - }; - - assert_eq!(root, expected_root); - } - - #[test] - fn test_from_set_with_empty_data_yields_expected_root() { - let rng = &mut rand::thread_rng(); - let keys = [ - key(b"\x00\x00\x00\x00"), - key(b"\x00\x00\x00\x01"), - key(b"\x00\x00\x00\x02"), - ]; - let data = [ - (keys[0], random_bytes32(rng).to_vec()), - (keys[1], random_bytes32(rng).to_vec()), - (keys[2], random_bytes32(rng).to_vec()), - (keys[0], b"".to_vec()), - (keys[1], b"".to_vec()), - (keys[2], b"".to_vec()), - ]; - - let expected_root = { - let mut storage = StorageMap::::new(); - let mut tree = MerkleTree::new(&mut storage); - let input = data.clone(); - for (key, value) in input.into_iter() { - tree.update(key, &value).unwrap(); - } - tree.root() - }; - - let root = { - let mut storage = StorageMap::::new(); - let tree = MerkleTree::from_set(&mut storage, data.into_iter()).unwrap(); - tree.root() - }; - - assert_eq!(root, expected_root); - } -} diff --git a/fuel-merkle/src/sparse/node.rs b/fuel-merkle/src/sparse/node.rs deleted file mode 100644 index b88d7265fe..0000000000 --- a/fuel-merkle/src/sparse/node.rs +++ /dev/null @@ -1,795 +0,0 @@ -use crate::{ - common::{ - error::DeserializeError, - node::{ - ChildError, - ChildResult, - Node as NodeTrait, - ParentNode as ParentNodeTrait, - }, - path::{ - ComparablePath, - Instruction, - Path, - }, - Bytes32, - Prefix, - }, - sparse::{ - hash::sum, - zero_sum, - Primitive, - }, - storage::{ - Mappable, - StorageInspect, - }, -}; - -use core::{ - cmp, - fmt, - marker::PhantomData, -}; - -#[derive(Clone, PartialEq, Eq)] -pub(crate) enum Node { - Node { - hash: Bytes32, - height: u32, - prefix: Prefix, - bytes_lo: Bytes32, - bytes_hi: Bytes32, - }, - Placeholder, -} - -impl Node { - fn calculate_hash( - prefix: &Prefix, - bytes_lo: &Bytes32, - bytes_hi: &Bytes32, - ) -> Bytes32 { - use digest::Digest; - let mut hash = sha2::Sha256::new(); - hash.update(prefix); - hash.update(bytes_lo); - hash.update(bytes_hi); - hash.finalize().try_into().unwrap() - } - - pub fn max_height() -> u32 { - Node::key_size_in_bits() - } - - pub fn new( - height: u32, - prefix: Prefix, - bytes_lo: Bytes32, - bytes_hi: Bytes32, - ) -> Self { - Self::Node { - hash: Self::calculate_hash(&prefix, &bytes_lo, &bytes_hi), - height, - prefix, - bytes_lo, - bytes_hi, - } - } - - pub fn create_leaf>(key: &Bytes32, data: D) -> Self { - let bytes_hi = sum(data); - Self::Node { - hash: Self::calculate_hash(&Prefix::Leaf, key, &bytes_hi), - height: 0u32, - prefix: Prefix::Leaf, - bytes_lo: *key, - bytes_hi, - } - } - - pub fn create_node(left_child: &Node, right_child: &Node, height: u32) -> Self { - let bytes_lo = *left_child.hash(); - let bytes_hi = *right_child.hash(); - Self::Node { - hash: Self::calculate_hash(&Prefix::Node, &bytes_lo, &bytes_hi), - height, - prefix: Prefix::Node, - bytes_lo, - bytes_hi, - } - } - - pub fn create_node_on_path( - path: &dyn Path, - path_node: &Node, - side_node: &Node, - ) -> Self { - if path_node.is_leaf() && side_node.is_leaf() { - // When joining two leaves, the joined node is found where the paths - // of the two leaves diverge. The joined node may be a direct parent - // of the leaves or an ancestor multiple generations above the - // leaves. - // N.B.: A leaf can be a placeholder. - let parent_depth = path_node.common_path_length(side_node); - let parent_height = Node::max_height() - parent_depth; - match path.get_instruction(parent_depth).unwrap() { - Instruction::Left => { - Node::create_node(path_node, side_node, parent_height) - } - Instruction::Right => { - Node::create_node(side_node, path_node, parent_height) - } - } - } else { - // When joining two nodes, or a node and a leaf, the joined node is - // the direct parent of the node with the greater height and an - // ancestor of the node with the lesser height. - // N.B.: A leaf can be a placeholder. - let parent_height = cmp::max(path_node.height(), side_node.height()) + 1; - let parent_depth = Node::max_height() - parent_height; - match path.get_instruction(parent_depth).unwrap() { - Instruction::Left => { - Node::create_node(path_node, side_node, parent_height) - } - Instruction::Right => { - Node::create_node(side_node, path_node, parent_height) - } - } - } - } - - pub fn create_placeholder() -> Self { - Self::Placeholder - } - - pub fn common_path_length(&self, other: &Node) -> u32 { - debug_assert!(self.is_leaf()); - debug_assert!(other.is_leaf()); - - // If either of the nodes is a placeholder, the common path length is - // defined to be 0. This is needed to prevent a 0 bit in the - // placeholder's key from producing an erroneous match with a 0 bit in - // the leaf's key. - if self.is_placeholder() || other.is_placeholder() { - 0 - } else { - self.leaf_key().common_path_length(other.leaf_key()) - } - } - - pub fn height(&self) -> u32 { - match self { - Node::Node { height, .. } => *height, - Node::Placeholder => 0, - } - } - - pub fn prefix(&self) -> Prefix { - match self { - Node::Node { prefix, .. } => *prefix, - Node::Placeholder => Prefix::Leaf, - } - } - - pub fn bytes_lo(&self) -> &Bytes32 { - match self { - Node::Node { bytes_lo, .. } => bytes_lo, - Node::Placeholder => zero_sum(), - } - } - - pub fn bytes_hi(&self) -> &Bytes32 { - match self { - Node::Node { bytes_hi, .. } => bytes_hi, - Node::Placeholder => zero_sum(), - } - } - - pub fn is_leaf(&self) -> bool { - self.prefix() == Prefix::Leaf || self.is_placeholder() - } - - pub fn is_node(&self) -> bool { - self.prefix() == Prefix::Node - } - - pub fn leaf_key(&self) -> &Bytes32 { - assert!(self.is_leaf()); - self.bytes_lo() - } - - pub fn leaf_data(&self) -> &Bytes32 { - assert!(self.is_leaf()); - self.bytes_hi() - } - - pub fn left_child_key(&self) -> &Bytes32 { - assert!(self.is_node()); - self.bytes_lo() - } - - pub fn right_child_key(&self) -> &Bytes32 { - assert!(self.is_node()); - self.bytes_hi() - } - - pub fn is_placeholder(&self) -> bool { - &Self::Placeholder == self - } - - pub fn hash(&self) -> &Bytes32 { - match self { - Node::Node { hash, .. } => hash, - Node::Placeholder => zero_sum(), - } - } -} - -impl AsRef for Node { - fn as_ref(&self) -> &Node { - self - } -} - -impl NodeTrait for Node { - type Key = Bytes32; - - fn height(&self) -> u32 { - Node::height(self) - } - - fn leaf_key(&self) -> Self::Key { - *Node::leaf_key(self) - } - - fn is_leaf(&self) -> bool { - Node::is_leaf(self) - } - - fn is_node(&self) -> bool { - Node::is_node(self) - } -} - -impl fmt::Debug for Node { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - if self.is_node() { - f.debug_struct("Node (Internal)") - .field("Height", &self.height()) - .field("Hash", &hex::encode(self.hash())) - .field("Left child key", &hex::encode(self.left_child_key())) - .field("Right child key", &hex::encode(self.right_child_key())) - .finish() - } else { - f.debug_struct("Node (Leaf)") - .field("Height", &self.height()) - .field("Hash", &hex::encode(self.hash())) - .field("Leaf key", &hex::encode(self.leaf_key())) - .field("Leaf data", &hex::encode(self.leaf_data())) - .finish() - } - } -} - -pub(crate) struct StorageNode<'storage, TableType, StorageType> { - storage: &'storage StorageType, - node: Node, - phantom_table: PhantomData, -} - -impl Clone for StorageNode<'_, TableType, StorageType> { - fn clone(&self) -> Self { - Self { - storage: self.storage, - node: self.node.clone(), - phantom_table: Default::default(), - } - } -} - -impl<'s, TableType, StorageType> StorageNode<'s, TableType, StorageType> { - pub fn new(storage: &'s StorageType, node: Node) -> Self { - Self { - node, - storage, - phantom_table: Default::default(), - } - } -} - -impl StorageNode<'_, TableType, StorageType> { - pub fn hash(&self) -> &Bytes32 { - self.node.hash() - } - - pub fn into_node(self) -> Node { - self.node - } -} - -impl NodeTrait for StorageNode<'_, TableType, StorageType> { - type Key = Bytes32; - - fn height(&self) -> u32 { - self.node.height() - } - - fn leaf_key(&self) -> Self::Key { - *self.node.leaf_key() - } - - fn is_leaf(&self) -> bool { - self.node.is_leaf() - } - - fn is_node(&self) -> bool { - self.node.is_node() - } -} - -#[derive(Debug, Clone, derive_more::Display)] -pub enum StorageNodeError { - #[display(fmt = "{}", _0)] - StorageError(StorageError), - #[display(fmt = "{}", _0)] - DeserializeError(DeserializeError), -} - -impl ParentNodeTrait for StorageNode<'_, TableType, StorageType> -where - StorageType: StorageInspect, - TableType: Mappable, -{ - type Error = StorageNodeError; - - fn left_child(&self) -> ChildResult { - if self.is_leaf() { - return Err(ChildError::NodeIsLeaf) - } - let key = self.node.left_child_key(); - if key == zero_sum() { - return Ok(Self::new(self.storage, Node::create_placeholder())) - } - let primitive = self - .storage - .get(key) - .map_err(StorageNodeError::StorageError)? - .ok_or(ChildError::ChildNotFound(*key))?; - Ok(primitive - .into_owned() - .try_into() - .map(|node| Self::new(self.storage, node)) - .map_err(StorageNodeError::DeserializeError)?) - } - - fn right_child(&self) -> ChildResult { - if self.is_leaf() { - return Err(ChildError::NodeIsLeaf) - } - let key = self.node.right_child_key(); - if key == zero_sum() { - return Ok(Self::new(self.storage, Node::create_placeholder())) - } - let primitive = self - .storage - .get(key) - .map_err(StorageNodeError::StorageError)? - .ok_or(ChildError::ChildNotFound(*key))?; - Ok(primitive - .into_owned() - .try_into() - .map(|node| Self::new(self.storage, node)) - .map_err(StorageNodeError::DeserializeError)?) - } -} - -impl fmt::Debug for StorageNode<'_, TableType, StorageType> -where - StorageType: StorageInspect, - TableType: Mappable, -{ - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - if self.is_node() { - f.debug_struct("StorageNode (Internal)") - .field("Height", &self.height()) - .field("Hash", &hex::encode(self.hash())) - .field("Left child key", &hex::encode(self.node.left_child_key())) - .field("Right child key", &hex::encode(self.node.right_child_key())) - .finish() - } else { - f.debug_struct("StorageNode (Leaf)") - .field("Height", &self.height()) - .field("Hash", &hex::encode(self.hash())) - .field("Leaf key", &hex::encode(self.node.leaf_key())) - .field("Leaf data", &hex::encode(self.node.leaf_data())) - .finish() - } - } -} - -#[cfg(test)] -mod test_node { - use crate::{ - common::{ - error::DeserializeError, - Bytes32, - Prefix, - PrefixError, - }, - sparse::{ - hash::sum, - zero_sum, - Node, - Primitive, - }, - }; - - fn leaf_hash(key: &Bytes32, data: &[u8]) -> Bytes32 { - let mut buffer = [0; 65]; - buffer[0..1].clone_from_slice(Prefix::Leaf.as_ref()); - buffer[1..33].clone_from_slice(key); - buffer[33..65].clone_from_slice(&sum(data)); - sum(buffer) - } - - #[test] - fn test_create_leaf_returns_a_valid_leaf() { - let leaf = Node::create_leaf(&sum(b"LEAF"), [1u8; 32]); - assert_eq!(leaf.is_leaf(), true); - assert_eq!(leaf.is_node(), false); - assert_eq!(leaf.height(), 0); - assert_eq!(leaf.prefix(), Prefix::Leaf); - assert_eq!(*leaf.leaf_key(), sum(b"LEAF")); - assert_eq!(*leaf.leaf_data(), sum([1u8; 32])); - } - - #[test] - fn test_create_node_returns_a_valid_node() { - let left_child = Node::create_leaf(&sum(b"LEFT CHILD"), [1u8; 32]); - let right_child = Node::create_leaf(&sum(b"RIGHT CHILD"), [1u8; 32]); - let node = Node::create_node(&left_child, &right_child, 1); - assert_eq!(node.is_leaf(), false); - assert_eq!(node.is_node(), true); - assert_eq!(node.height(), 1); - assert_eq!(node.prefix(), Prefix::Node); - assert_eq!( - *node.left_child_key(), - leaf_hash(&sum(b"LEFT CHILD"), &[1u8; 32]) - ); - assert_eq!( - *node.right_child_key(), - leaf_hash(&sum(b"RIGHT CHILD"), &[1u8; 32]) - ); - } - - #[test] - fn test_create_placeholder_returns_a_placeholder_node() { - let node = Node::create_placeholder(); - assert_eq!(node.is_placeholder(), true); - assert_eq!(node.hash(), zero_sum()); - } - - #[test] - fn test_create_leaf_from_primitive_returns_a_valid_leaf() { - let primitive = (0, Prefix::Leaf as u8, [0xff; 32], [0xff; 32]); - - let node: Node = primitive.try_into().unwrap(); - assert_eq!(node.is_leaf(), true); - assert_eq!(node.is_node(), false); - assert_eq!(node.height(), 0); - assert_eq!(node.prefix(), Prefix::Leaf); - assert_eq!(*node.leaf_key(), [0xff; 32]); - assert_eq!(*node.leaf_data(), [0xff; 32]); - } - - #[test] - fn test_create_node_from_primitive_returns_a_valid_node() { - let primitive = (255, Prefix::Node as u8, [0xff; 32], [0xff; 32]); - - let node: Node = primitive.try_into().unwrap(); - assert_eq!(node.is_leaf(), false); - assert_eq!(node.is_node(), true); - assert_eq!(node.height(), 255); - assert_eq!(node.prefix(), Prefix::Node); - assert_eq!(*node.left_child_key(), [0xff; 32]); - assert_eq!(*node.right_child_key(), [0xff; 32]); - } - - #[test] - fn test_create_from_primitive_returns_deserialize_error_if_invalid_prefix() { - let primitive = (0xff, 0xff, [0xff; 32], [0xff; 32]); - - // Should return Error; prefix 0xff is does not represent a node or leaf - let err = Node::try_from(primitive) - .expect_err("Expected try_from() to be Error; got OK"); - assert!(matches!( - err, - DeserializeError::PrefixError(PrefixError::InvalidPrefix(0xff)) - )); - } - - /// For leaf node `node` of leaf data `d` with key `k`: - /// ```node = (0x00, k, h(serialize(d)))``` - #[test] - fn test_leaf_primitive_returns_expected_primitive() { - let expected_primitive = - (0_u32, Prefix::Leaf as u8, sum(b"LEAF"), sum([1u8; 32])); - - let leaf = Node::create_leaf(&sum(b"LEAF"), [1u8; 32]); - let primitive = Primitive::from(&leaf); - - assert_eq!(primitive, expected_primitive); - } - - /// For internal node `node` with children `l` and `r`: - /// ```node = (0x01, l.v, r.v)``` - #[test] - fn test_node_primitive_returns_expected_primitive() { - let expected_primitive = ( - 1_u32, - Prefix::Node as u8, - leaf_hash(&sum(b"LEFT CHILD"), &[1u8; 32]), - leaf_hash(&sum(b"RIGHT CHILD"), &[1u8; 32]), - ); - - let left_child = Node::create_leaf(&sum(b"LEFT CHILD"), [1u8; 32]); - let right_child = Node::create_leaf(&sum(b"RIGHT CHILD"), [1u8; 32]); - let node = Node::create_node(&left_child, &right_child, 1); - let primitive = Primitive::from(&node); - - assert_eq!(primitive, expected_primitive); - } - - /// For leaf node `node` of leaf data `d` with key `k`: - /// ```node.v = h(0x00, k, h(serialize(d)))``` - #[test] - fn test_leaf_hash_returns_expected_hash_value() { - let mut expected_buffer = [0u8; 65]; - expected_buffer[0..1].clone_from_slice(Prefix::Leaf.as_ref()); - expected_buffer[1..33].clone_from_slice(&sum(b"LEAF")); - expected_buffer[33..65].clone_from_slice(&sum([1u8; 32])); - let expected_value = sum(expected_buffer); - - let node = Node::create_leaf(&sum(b"LEAF"), [1u8; 32]); - let value = *node.hash(); - - assert_eq!(value, expected_value); - } - - /// For internal node `node` with children `l` and `r`: - /// ```node.v = h(0x01, l.v, r.v)``` - #[test] - fn test_node_hash_returns_expected_hash_value() { - let mut expected_buffer = [0u8; 65]; - expected_buffer[0..1].clone_from_slice(Prefix::Node.as_ref()); - expected_buffer[1..33] - .clone_from_slice(&leaf_hash(&sum(b"LEFT CHILD"), &[1u8; 32])); - expected_buffer[33..65] - .clone_from_slice(&leaf_hash(&sum(b"RIGHT CHILD"), &[1u8; 32])); - let expected_value = sum(expected_buffer); - - let left_child = Node::create_leaf(&sum(b"LEFT CHILD"), [1u8; 32]); - let right_child = Node::create_leaf(&sum(b"RIGHT CHILD"), [1u8; 32]); - let node = Node::create_node(&left_child, &right_child, 1); - let value = *node.hash(); - - assert_eq!(value, expected_value); - } -} - -#[cfg(test)] -mod test_storage_node { - use crate::{ - common::{ - error::DeserializeError, - node::{ - ChildError, - ParentNode, - }, - Bytes32, - PrefixError, - StorageMap, - }, - sparse::{ - hash::sum, - node::StorageNodeError, - Node, - Primitive, - StorageNode, - }, - storage::{ - Mappable, - StorageMutate, - }, - }; - - pub struct TestTable; - - impl Mappable for TestTable { - type Key = Self::OwnedKey; - type OwnedKey = Bytes32; - type OwnedValue = Primitive; - type Value = Self::OwnedValue; - } - - #[test] - fn test_node_left_child_returns_the_left_child() { - let mut s = StorageMap::::new(); - - let leaf_0 = Node::create_leaf(&sum(b"Hello World"), [1u8; 32]); - let _ = s.insert(leaf_0.hash(), &leaf_0.as_ref().into()); - - let leaf_1 = Node::create_leaf(&sum(b"Goodbye World"), [1u8; 32]); - let _ = s.insert(leaf_1.hash(), &leaf_1.as_ref().into()); - - let node_0 = Node::create_node(&leaf_0, &leaf_1, 1); - let _ = s.insert(node_0.hash(), &node_0.as_ref().into()); - - let storage_node = StorageNode::new(&s, node_0); - let child = storage_node.left_child().unwrap(); - - assert_eq!(child.hash(), leaf_0.hash()); - } - - #[test] - fn test_node_right_child_returns_the_right_child() { - let mut s = StorageMap::::new(); - - let leaf_0 = Node::create_leaf(&sum(b"Hello World"), [1u8; 32]); - let _ = s.insert(leaf_0.hash(), &leaf_0.as_ref().into()); - - let leaf_1 = Node::create_leaf(&sum(b"Goodbye World"), [1u8; 32]); - let _ = s.insert(leaf_1.hash(), &leaf_1.as_ref().into()); - - let node_0 = Node::create_node(&leaf_0, &leaf_1, 1); - let _ = s.insert(node_0.hash(), &node_0.as_ref().into()); - - let storage_node = StorageNode::new(&s, node_0); - let child = storage_node.right_child().unwrap(); - - assert_eq!(child.hash(), leaf_1.hash()); - } - - #[test] - fn test_node_left_child_returns_placeholder_when_key_is_zero_sum() { - let mut s = StorageMap::::new(); - - let leaf = Node::create_leaf(&sum(b"Goodbye World"), [1u8; 32]); - let _ = s.insert(leaf.hash(), &leaf.as_ref().into()); - - let node_0 = Node::create_node(&Node::create_placeholder(), &leaf, 1); - let _ = s.insert(node_0.hash(), &node_0.as_ref().into()); - - let storage_node = StorageNode::new(&s, node_0); - let child = storage_node.left_child().unwrap(); - - assert!(child.node.is_placeholder()); - } - - #[test] - fn test_node_right_child_returns_placeholder_when_key_is_zero_sum() { - let mut s = StorageMap::::new(); - - let leaf = Node::create_leaf(&sum(b"Goodbye World"), [1u8; 32]); - let _ = s.insert(leaf.hash(), &leaf.as_ref().into()); - - let node_0 = Node::create_node(&leaf, &Node::create_placeholder(), 1); - let _ = s.insert(node_0.hash(), &node_0.as_ref().into()); - - let storage_node = StorageNode::new(&s, node_0); - let child = storage_node.right_child().unwrap(); - - assert!(child.node.is_placeholder()); - } - - #[test] - fn test_node_left_child_returns_error_when_node_is_leaf() { - let s = StorageMap::::new(); - - let leaf_0 = Node::create_leaf(&sum(b"Hello World"), [1u8; 32]); - let storage_node = StorageNode::new(&s, leaf_0); - let err = storage_node - .left_child() - .expect_err("Expected left_child() to return Error; got OK"); - - assert!(matches!(err, ChildError::NodeIsLeaf)); - } - - #[test] - fn test_node_right_child_returns_error_when_node_is_leaf() { - let s = StorageMap::::new(); - - let leaf_0 = Node::create_leaf(&sum(b"Hello World"), [1u8; 32]); - let storage_node = StorageNode::new(&s, leaf_0); - let err = storage_node - .right_child() - .expect_err("Expected right_child() to return Error; got OK"); - - assert!(matches!(err, ChildError::NodeIsLeaf)); - } - - #[test] - fn test_node_left_child_returns_error_when_key_is_not_found() { - let s = StorageMap::::new(); - - let leaf_0 = Node::create_leaf(&sum(b"Hello World"), [0u8; 32]); - let leaf_1 = Node::create_leaf(&sum(b"Goodbye World"), [1u8; 32]); - let node_0 = Node::create_node(&leaf_0, &leaf_1, 1); - - let storage_node = StorageNode::new(&s, node_0); - let err = storage_node - .left_child() - .expect_err("Expected left_child() to return Error; got Ok"); - - let key = *storage_node.into_node().left_child_key(); - assert!(matches!( - err, - ChildError::ChildNotFound(k) if k == key - )); - } - - #[test] - fn test_node_right_child_returns_error_when_key_is_not_found() { - let s = StorageMap::::new(); - - let leaf_0 = Node::create_leaf(&sum(b"Hello World"), [1u8; 32]); - let leaf_1 = Node::create_leaf(&sum(b"Goodbye World"), [1u8; 32]); - let node_0 = Node::create_node(&leaf_0, &leaf_1, 1); - - let storage_node = StorageNode::new(&s, node_0); - let err = storage_node - .right_child() - .expect_err("Expected right_child() to return Error; got Ok"); - - let key = *storage_node.into_node().right_child_key(); - assert!(matches!( - err, - ChildError::ChildNotFound(k) if k == key - )); - } - - #[test] - fn test_node_left_child_returns_deserialize_error_when_primitive_is_invalid() { - let mut s = StorageMap::::new(); - - let leaf_0 = Node::create_leaf(&sum(b"Hello World"), [1u8; 32]); - let _ = s.insert(leaf_0.hash(), &(0xff, 0xff, [0xff; 32], [0xff; 32])); - let leaf_1 = Node::create_leaf(&sum(b"Goodbye World"), [1u8; 32]); - let node_0 = Node::create_node(&leaf_0, &leaf_1, 1); - - let storage_node = StorageNode::new(&s, node_0); - let err = storage_node - .left_child() - .expect_err("Expected left_child() to be Error; got Ok"); - - assert!(matches!( - err, - ChildError::Error(StorageNodeError::DeserializeError( - DeserializeError::PrefixError(PrefixError::InvalidPrefix(0xff)) - )) - )); - } - - #[test] - fn test_node_right_child_returns_deserialize_error_when_primitive_is_invalid() { - let mut s = StorageMap::::new(); - - let leaf_0 = Node::create_leaf(&sum(b"Hello World"), [1u8; 32]); - let leaf_1 = Node::create_leaf(&sum(b"Goodbye World"), [1u8; 32]); - let _ = s.insert(leaf_1.hash(), &(0xff, 0xff, [0xff; 32], [0xff; 32])); - let node_0 = Node::create_node(&leaf_0, &leaf_1, 1); - - let storage_node = StorageNode::new(&s, node_0); - let err = storage_node - .right_child() - .expect_err("Expected right_child() to be Error; got Ok"); - - assert!(matches!( - err, - ChildError::Error(StorageNodeError::DeserializeError( - DeserializeError::PrefixError(PrefixError::InvalidPrefix(0xff)) - )) - )); - } -} diff --git a/fuel-merkle/src/sparse/primitive.rs b/fuel-merkle/src/sparse/primitive.rs deleted file mode 100644 index c8b6bfce69..0000000000 --- a/fuel-merkle/src/sparse/primitive.rs +++ /dev/null @@ -1,77 +0,0 @@ -use crate::{ - common::{ - error::DeserializeError, - Bytes32, - Prefix, - PrefixError, - }, - sparse::Node, -}; - -/// **Leaf buffer:** -/// -/// | Allocation | Data | -/// |------------|----------------------------| -/// | `00 - 04` | Height (4 bytes) | -/// | `04 - 05` | Prefix (1 byte, `0x00`) | -/// | `05 - 37` | hash(Key) (32 bytes) | -/// | `37 - 69` | hash(Data) (32 bytes) | -/// -/// **Node buffer:** -/// -/// | Allocation | Data | -/// |------------|----------------------------| -/// | `00 - 04` | Height (4 bytes) | -/// | `04 - 05` | Prefix (1 byte, `0x01`) | -/// | `05 - 37` | Left child key (32 bytes) | -/// | `37 - 69` | Right child key (32 bytes) | -pub type Primitive = (u32, u8, Bytes32, Bytes32); - -trait PrimitiveView { - fn height(&self) -> u32; - fn prefix(&self) -> Result; - fn bytes_lo(&self) -> &Bytes32; - fn bytes_hi(&self) -> &Bytes32; -} - -impl PrimitiveView for Primitive { - fn height(&self) -> u32 { - self.0 - } - - fn prefix(&self) -> Result { - Prefix::try_from(self.1) - } - - fn bytes_lo(&self) -> &Bytes32 { - &self.2 - } - - fn bytes_hi(&self) -> &Bytes32 { - &self.3 - } -} - -impl From<&Node> for Primitive { - fn from(node: &Node) -> Self { - ( - node.height(), - node.prefix() as u8, - *node.bytes_lo(), - *node.bytes_hi(), - ) - } -} - -impl TryFrom for Node { - type Error = DeserializeError; - - fn try_from(primitive: Primitive) -> Result { - let height = primitive.height(); - let prefix = primitive.prefix()?; - let bytes_lo = *primitive.bytes_lo(); - let bytes_hi = *primitive.bytes_hi(); - let node = Self::new(height, prefix, bytes_lo, bytes_hi); - Ok(node) - } -} From 6b951fffb3fa4525bac79e0f9c45cf154259eab2 Mon Sep 17 00:00:00 2001 From: Brandon Vrooman Date: Thu, 18 Jan 2024 10:20:56 -0500 Subject: [PATCH 05/15] Fix imports --- fuel-merkle/src/sparse.rs | 5 ----- fuel-merkle/src/sparse/generic/node.rs | 10 +++++++--- 2 files changed, 7 insertions(+), 8 deletions(-) diff --git a/fuel-merkle/src/sparse.rs b/fuel-merkle/src/sparse.rs index 9d47e8e5bd..7354c8df11 100644 --- a/fuel-merkle/src/sparse.rs +++ b/fuel-merkle/src/sparse.rs @@ -1,8 +1,3 @@ -pub(crate) use generic::{ - hash::zero_sum, - node::Node, -}; - pub mod generic; pub mod in_memory; diff --git a/fuel-merkle/src/sparse/generic/node.rs b/fuel-merkle/src/sparse/generic/node.rs index 5c7fb0bab2..b8174f9813 100644 --- a/fuel-merkle/src/sparse/generic/node.rs +++ b/fuel-merkle/src/sparse/generic/node.rs @@ -441,9 +441,13 @@ mod test_node { PrefixError, }, sparse::{ - generic::hash::sum, - zero_sum, - Node, + generic::{ + hash::{ + sum, + zero_sum, + }, + Node, + }, Primitive, }, }; From f80f162b79ae5313008d34254bf3a3dc0dc085d0 Mon Sep 17 00:00:00 2001 From: Brandon Vrooman Date: Thu, 18 Jan 2024 10:23:50 -0500 Subject: [PATCH 06/15] Update CHANGELOG.md --- CHANGELOG.md | 1 + 1 file changed, 1 insertion(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index bed3df8998..39a8a37cca 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -8,6 +8,7 @@ and this project adheres to [Semantic Versioning](http://semver.org/). #### Changed +- [#660](https://github.com/FuelLabs/fuel-vm/pull/660): Added generic argument to the sparse Merkle tree to allow users to define the key space. The default export sparse Merkle tree is defined as the 32 byte SMT to ensure compatability. - [#653](https://github.com/FuelLabs/fuel-vm/pull/653): `ECAL` opcode handler can now hold internal state. - [#657](https://github.com/FuelLabs/fuel-vm/pull/657): Add debugger methods to remove or replace all breakpoints at once. From 2e8db025751f38b7cd6fab057aa3f98bf577179b Mon Sep 17 00:00:00 2001 From: Brandon Vrooman Date: Thu, 18 Jan 2024 11:05:00 -0500 Subject: [PATCH 07/15] Clippy --- fuel-merkle/src/sparse/generic/merkle_tree.rs | 29 +++++++++--------- fuel-merkle/src/sparse/generic/node.rs | 30 +++++++++---------- fuel-merkle/src/sum/hash.rs | 4 +-- fuel-merkle/test-helpers/src/binary/hash.rs | 6 ++-- .../test-helpers/src/suites/binary_proofs.rs | 4 +-- 5 files changed, 36 insertions(+), 37 deletions(-) diff --git a/fuel-merkle/src/sparse/generic/merkle_tree.rs b/fuel-merkle/src/sparse/generic/merkle_tree.rs index 09745cf580..ade4ca3b29 100644 --- a/fuel-merkle/src/sparse/generic/merkle_tree.rs +++ b/fuel-merkle/src/sparse/generic/merkle_tree.rs @@ -86,8 +86,7 @@ impl MerkleTreeKey { hash.update(storage_key.as_ref()); let hash: Bytes32 = hash .finalize() - .try_into() - .expect("`sha2::Sha256` can't fail during hashing"); + .into(); let truncated = truncate::(&hash); Self(truncated) } @@ -402,7 +401,7 @@ where let key = key.into(); let leaf_node = Node::create_leaf(&key, data); self.storage - .insert(&leaf_node.hash(), &leaf_node.as_ref().into())?; + .insert(leaf_node.hash(), &leaf_node.as_ref().into())?; if self.root_node().is_placeholder() { self.set_root_node(leaf_node); @@ -432,7 +431,7 @@ where let (path_nodes, side_nodes): (Vec>, Vec>) = self.path_set(key)?; - match path_nodes.get(0) { + match path_nodes.first() { Some(node) if node.leaf_key() == &key => { self.delete_with_path_set( &key, @@ -490,7 +489,7 @@ where current_node = Node::create_node_on_path(path, ¤t_node, actual_leaf_node); self.storage - .insert(¤t_node.hash(), ¤t_node.as_ref().into())?; + .insert(current_node.hash(), ¤t_node.as_ref().into())?; } // Merge placeholders @@ -503,21 +502,21 @@ where current_node = Node::create_node_on_path(path, ¤t_node, &placeholder); self.storage - .insert(¤t_node.hash(), ¤t_node.as_ref().into())?; + .insert(current_node.hash(), ¤t_node.as_ref().into())?; } } else { - self.storage.remove(&actual_leaf_node.hash())?; + self.storage.remove(actual_leaf_node.hash())?; } // Merge side nodes for side_node in side_nodes { current_node = Node::create_node_on_path(path, ¤t_node, side_node); self.storage - .insert(¤t_node.hash(), ¤t_node.as_ref().into())?; + .insert(current_node.hash(), ¤t_node.as_ref().into())?; } for node in path_nodes.iter().skip(1 /* leaf */) { - self.storage.remove(&node.hash())?; + self.storage.remove(node.hash())?; } self.set_root_node(current_node); @@ -532,7 +531,7 @@ where side_nodes: &[Node], ) -> Result<(), StorageError> { for node in path_nodes { - self.storage.remove(&node.hash())?; + self.storage.remove(node.hash())?; } let path = requested_leaf_key; @@ -574,7 +573,7 @@ where current_node = Node::create_node_on_path(path, ¤t_node, side_node); self.storage - .insert(¤t_node.hash(), ¤t_node.as_ref().into())?; + .insert(current_node.hash(), ¤t_node.as_ref().into())?; } } } @@ -583,7 +582,7 @@ where for side_node in side_nodes_iter { current_node = Node::create_node_on_path(path, ¤t_node, side_node); self.storage - .insert(¤t_node.hash(), ¤t_node.as_ref().into())?; + .insert(current_node.hash(), ¤t_node.as_ref().into())?; } self.set_root_node(current_node); @@ -1088,7 +1087,7 @@ mod test { tree.update(leaf_1_key, leaf_1_data).unwrap(); assert_eq!( tree.storage - .get(&leaf_2.hash()) + .get(leaf_2.hash()) .unwrap() .unwrap() .into_owned(), @@ -1096,7 +1095,7 @@ mod test { ); assert_eq!( tree.storage - .get(&leaf_1.hash()) + .get(leaf_1.hash()) .unwrap() .unwrap() .into_owned(), @@ -1164,7 +1163,7 @@ mod test { #[test] fn test_load_returns_an_empty_tree_for_empty_sum_root() { let mut storage = StorageMap::::new(); - let tree = MerkleTree::load(&mut storage, &zero_sum()).unwrap(); + let tree = MerkleTree::load(&mut storage, zero_sum()).unwrap(); let root = tree.root(); assert_eq!(root, *zero_sum()); diff --git a/fuel-merkle/src/sparse/generic/node.rs b/fuel-merkle/src/sparse/generic/node.rs index b8174f9813..21e6e0e1a9 100644 --- a/fuel-merkle/src/sparse/generic/node.rs +++ b/fuel-merkle/src/sparse/generic/node.rs @@ -159,7 +159,7 @@ impl Node { if self.is_placeholder() || other.is_placeholder() { 0 } else { - self.leaf_key().common_path_length(&other.leaf_key()) + self.leaf_key().common_path_length(other.leaf_key()) } } @@ -371,7 +371,7 @@ where } let primitive = self .storage - .get(&key) + .get(key) .map_err(StorageNodeError::StorageError)? .ok_or(ChildError::ChildNotFound(*key))?; Ok(primitive @@ -391,7 +391,7 @@ where } let primitive = self .storage - .get(&key) + .get(key) .map_err(StorageNodeError::StorageError)? .ok_or(ChildError::ChildNotFound(*key))?; Ok(primitive @@ -648,13 +648,13 @@ mod test_storage_node { let mut s = StorageMap::::new(); let leaf_0 = Node::create_leaf(&sum(b"Hello World"), [1u8; 32]); - let _ = s.insert(&leaf_0.hash(), &leaf_0.as_ref().into()); + let _ = s.insert(leaf_0.hash(), &leaf_0.as_ref().into()); let leaf_1 = Node::create_leaf(&sum(b"Goodbye World"), [1u8; 32]); - let _ = s.insert(&leaf_1.hash(), &leaf_1.as_ref().into()); + let _ = s.insert(leaf_1.hash(), &leaf_1.as_ref().into()); let node_0 = Node::create_node(&leaf_0, &leaf_1, 1); - let _ = s.insert(&node_0.hash(), &node_0.as_ref().into()); + let _ = s.insert(node_0.hash(), &node_0.as_ref().into()); let storage_node = StorageNode::new(&s, node_0); let child = storage_node.left_child().unwrap(); @@ -667,13 +667,13 @@ mod test_storage_node { let mut s = StorageMap::::new(); let leaf_0 = Node::create_leaf(&sum(b"Hello World"), [1u8; 32]); - let _ = s.insert(&leaf_0.hash(), &leaf_0.as_ref().into()); + let _ = s.insert(leaf_0.hash(), &leaf_0.as_ref().into()); let leaf_1 = Node::create_leaf(&sum(b"Goodbye World"), [1u8; 32]); - let _ = s.insert(&leaf_1.hash(), &leaf_1.as_ref().into()); + let _ = s.insert(leaf_1.hash(), &leaf_1.as_ref().into()); let node_0 = Node::create_node(&leaf_0, &leaf_1, 1); - let _ = s.insert(&node_0.hash(), &node_0.as_ref().into()); + let _ = s.insert(node_0.hash(), &node_0.as_ref().into()); let storage_node = StorageNode::new(&s, node_0); let child = storage_node.right_child().unwrap(); @@ -686,10 +686,10 @@ mod test_storage_node { let mut s = StorageMap::::new(); let leaf = Node::create_leaf(&sum(b"Goodbye World"), [1u8; 32]); - let _ = s.insert(&leaf.hash(), &leaf.as_ref().into()); + let _ = s.insert(leaf.hash(), &leaf.as_ref().into()); let node_0 = Node::create_node(&Node::create_placeholder(), &leaf, 1); - let _ = s.insert(&node_0.hash(), &node_0.as_ref().into()); + let _ = s.insert(node_0.hash(), &node_0.as_ref().into()); let storage_node = StorageNode::new(&s, node_0); let child = storage_node.left_child().unwrap(); @@ -702,10 +702,10 @@ mod test_storage_node { let mut s = StorageMap::::new(); let leaf = Node::create_leaf(&sum(b"Goodbye World"), [1u8; 32]); - let _ = s.insert(&leaf.hash(), &leaf.as_ref().into()); + let _ = s.insert(leaf.hash(), &leaf.as_ref().into()); let node_0 = Node::create_node(&leaf, &Node::create_placeholder(), 1); - let _ = s.insert(&node_0.hash(), &node_0.as_ref().into()); + let _ = s.insert(node_0.hash(), &node_0.as_ref().into()); let storage_node = StorageNode::new(&s, node_0); let child = storage_node.right_child().unwrap(); @@ -784,7 +784,7 @@ mod test_storage_node { let mut s = StorageMap::::new(); let leaf_0 = Node::create_leaf(&sum(b"Hello World"), [1u8; 32]); - let _ = s.insert(&leaf_0.hash(), &(0xff, 0xff, [0xff; 32], [0xff; 32])); + let _ = s.insert(leaf_0.hash(), &(0xff, 0xff, [0xff; 32], [0xff; 32])); let leaf_1 = Node::create_leaf(&sum(b"Goodbye World"), [1u8; 32]); let node_0 = Node::create_node(&leaf_0, &leaf_1, 1); @@ -807,7 +807,7 @@ mod test_storage_node { let leaf_0 = Node::create_leaf(&sum(b"Hello World"), [1u8; 32]); let leaf_1 = Node::create_leaf(&sum(b"Goodbye World"), [1u8; 32]); - let _ = s.insert(&leaf_1.hash(), &(0xff, 0xff, [0xff; 32], [0xff; 32])); + let _ = s.insert(leaf_1.hash(), &(0xff, 0xff, [0xff; 32], [0xff; 32])); let node_0 = Node::create_node(&leaf_0, &leaf_1, 1); let storage_node = StorageNode::new(&s, node_0); diff --git a/fuel-merkle/src/sum/hash.rs b/fuel-merkle/src/sum/hash.rs index 0975517313..5e4a31e8fe 100644 --- a/fuel-merkle/src/sum/hash.rs +++ b/fuel-merkle/src/sum/hash.rs @@ -22,7 +22,7 @@ pub fn node_sum(lhs_fee: u64, lhs_data: &[u8], rhs_fee: u64, rhs_data: &[u8]) -> hash.update(lhs_data); hash.update(rhs_fee.to_be_bytes()); hash.update(rhs_data); - hash.finalize().try_into().unwrap() + hash.finalize().into() } // Merkle tree hash of a list with one entry @@ -32,5 +32,5 @@ pub fn leaf_sum(fee: u64, data: &[u8]) -> Bytes32 { hash.update(Prefix::Leaf); hash.update(fee.to_be_bytes()); hash.update(data); - hash.finalize().try_into().unwrap() + hash.finalize().into() } diff --git a/fuel-merkle/test-helpers/src/binary/hash.rs b/fuel-merkle/test-helpers/src/binary/hash.rs index cef8b5e2fd..ebb03d9186 100644 --- a/fuel-merkle/test-helpers/src/binary/hash.rs +++ b/fuel-merkle/test-helpers/src/binary/hash.rs @@ -1,6 +1,6 @@ use digest::Digest; use sha2::Sha256 as Hash; -use std::convert::TryInto; + pub type Data = [u8; 32]; @@ -26,7 +26,7 @@ pub fn node_sum(lhs_data: &[u8], rhs_data: &[u8]) -> Data { hash.update([NODE]); hash.update(lhs_data); hash.update(rhs_data); - hash.finalize().try_into().unwrap() + hash.finalize().into() } // Merkle tree hash of a list with one entry @@ -35,5 +35,5 @@ pub fn leaf_sum(data: &[u8]) -> Data { let mut hash = Hash::new(); hash.update([LEAF]); hash.update(data); - hash.finalize().try_into().unwrap() + hash.finalize().into() } diff --git a/fuel-merkle/test-helpers/src/suites/binary_proofs.rs b/fuel-merkle/test-helpers/src/suites/binary_proofs.rs index d766385e0a..f066c80c46 100644 --- a/fuel-merkle/test-helpers/src/suites/binary_proofs.rs +++ b/fuel-merkle/test-helpers/src/suites/binary_proofs.rs @@ -14,14 +14,14 @@ use rand::seq::IteratorRandom; use rand_pcg::Pcg64; use rand_seeder::Seeder; use sha2::Sha256; -use std::convert::TryInto; + type Hash = Sha256; pub fn sum(data: &[u8]) -> Bytes32 { let mut hash = Hash::new(); hash.update(data); - hash.finalize().try_into().unwrap() + hash.finalize().into() } fn generate_test( From ef67255e2e8a7b279c32a1e44a4454ebba59f0ab Mon Sep 17 00:00:00 2001 From: Brandon Vrooman Date: Thu, 18 Jan 2024 11:07:37 -0500 Subject: [PATCH 08/15] Fmt --- fuel-merkle/src/sparse/generic/merkle_tree.rs | 4 +--- fuel-merkle/test-helpers/src/binary/hash.rs | 1 - fuel-merkle/test-helpers/src/suites/binary_proofs.rs | 1 - 3 files changed, 1 insertion(+), 5 deletions(-) diff --git a/fuel-merkle/src/sparse/generic/merkle_tree.rs b/fuel-merkle/src/sparse/generic/merkle_tree.rs index ade4ca3b29..d4d486db4a 100644 --- a/fuel-merkle/src/sparse/generic/merkle_tree.rs +++ b/fuel-merkle/src/sparse/generic/merkle_tree.rs @@ -84,9 +84,7 @@ impl MerkleTreeKey { use digest::Digest; let mut hash = sha2::Sha256::new(); hash.update(storage_key.as_ref()); - let hash: Bytes32 = hash - .finalize() - .into(); + let hash: Bytes32 = hash.finalize().into(); let truncated = truncate::(&hash); Self(truncated) } diff --git a/fuel-merkle/test-helpers/src/binary/hash.rs b/fuel-merkle/test-helpers/src/binary/hash.rs index ebb03d9186..0db5688827 100644 --- a/fuel-merkle/test-helpers/src/binary/hash.rs +++ b/fuel-merkle/test-helpers/src/binary/hash.rs @@ -1,7 +1,6 @@ use digest::Digest; use sha2::Sha256 as Hash; - pub type Data = [u8; 32]; const NODE: u8 = 0x01; diff --git a/fuel-merkle/test-helpers/src/suites/binary_proofs.rs b/fuel-merkle/test-helpers/src/suites/binary_proofs.rs index f066c80c46..8bbbe47112 100644 --- a/fuel-merkle/test-helpers/src/suites/binary_proofs.rs +++ b/fuel-merkle/test-helpers/src/suites/binary_proofs.rs @@ -15,7 +15,6 @@ use rand_pcg::Pcg64; use rand_seeder::Seeder; use sha2::Sha256; - type Hash = Sha256; pub fn sum(data: &[u8]) -> Bytes32 { From 579e5afd964118ba5d4c5c66caa3749e7f04b5f0 Mon Sep 17 00:00:00 2001 From: Brandon Vrooman Date: Thu, 18 Jan 2024 11:22:24 -0500 Subject: [PATCH 09/15] Fix for no_std --- fuel-merkle/src/sparse/generic/hash.rs | 7 +++++-- 1 file changed, 5 insertions(+), 2 deletions(-) diff --git a/fuel-merkle/src/sparse/generic/hash.rs b/fuel-merkle/src/sparse/generic/hash.rs index 36b0fdec51..2c441086ab 100644 --- a/fuel-merkle/src/sparse/generic/hash.rs +++ b/fuel-merkle/src/sparse/generic/hash.rs @@ -1,12 +1,15 @@ use crate::common::Bytes; +use alloc::{ + vec, + vec::Vec, +}; use std::{ convert::TryInto, sync::OnceLock, }; use digest::Digest; -use sha2::Sha256; -pub(crate) type Hash = Sha256; +use sha2::Sha256 as Hash; pub fn zero_sum() -> &'static [u8; N] { static ZERO: OnceLock> = OnceLock::new(); From 848e2c0d42fd39c5f8737d9078ef974992be6838 Mon Sep 17 00:00:00 2001 From: Brandon Vrooman Date: Thu, 18 Jan 2024 11:51:17 -0500 Subject: [PATCH 10/15] Use once_cell::OnceCell over std::OnceLock for no_std --- fuel-merkle/Cargo.toml | 1 + fuel-merkle/src/sparse/generic/hash.rs | 7 ++----- 2 files changed, 3 insertions(+), 5 deletions(-) diff --git a/fuel-merkle/Cargo.toml b/fuel-merkle/Cargo.toml index 74fd830451..478eb5d00f 100644 --- a/fuel-merkle/Cargo.toml +++ b/fuel-merkle/Cargo.toml @@ -16,6 +16,7 @@ digest = { version = "0.10", default-features = false } fuel-storage = { workspace = true, default-features = false } hashbrown = "0.13" hex = { version = "0.4", default-features = false, features = ["alloc"] } +once_cell = "1.19" serde = { version = "1.0", default-features = false, optional = true } sha2 = { version = "0.10", default-features = false } diff --git a/fuel-merkle/src/sparse/generic/hash.rs b/fuel-merkle/src/sparse/generic/hash.rs index 2c441086ab..155ccfa892 100644 --- a/fuel-merkle/src/sparse/generic/hash.rs +++ b/fuel-merkle/src/sparse/generic/hash.rs @@ -3,16 +3,13 @@ use alloc::{ vec, vec::Vec, }; -use std::{ - convert::TryInto, - sync::OnceLock, -}; +use once_cell::sync::OnceCell; use digest::Digest; use sha2::Sha256 as Hash; pub fn zero_sum() -> &'static [u8; N] { - static ZERO: OnceLock> = OnceLock::new(); + static ZERO: OnceCell> = OnceCell::new(); ZERO.get_or_init(|| vec![0; N]) .as_slice() .try_into() From 1a26c2c630e285545ec9c12076422a68e8c27db1 Mon Sep 17 00:00:00 2001 From: Brandon Vrooman Date: Thu, 18 Jan 2024 12:00:51 -0500 Subject: [PATCH 11/15] race::once_cell --- fuel-merkle/src/common.rs | 10 ---------- fuel-merkle/src/sparse/generic/hash.rs | 6 +++--- 2 files changed, 3 insertions(+), 13 deletions(-) diff --git a/fuel-merkle/src/common.rs b/fuel-merkle/src/common.rs index 0176cea398..dc880aa6ee 100644 --- a/fuel-merkle/src/common.rs +++ b/fuel-merkle/src/common.rs @@ -33,16 +33,6 @@ pub type Bytes16 = [u8; 16]; pub type Bytes32 = [u8; 32]; pub type Bytes = [u8; N]; -pub trait Zero { - fn zero() -> Self; -} - -impl Zero for [u8; N] { - fn zero() -> Self { - [0u8; N] - } -} - use alloc::vec::Vec; pub type ProofSet = Vec; diff --git a/fuel-merkle/src/sparse/generic/hash.rs b/fuel-merkle/src/sparse/generic/hash.rs index 155ccfa892..1b26da595c 100644 --- a/fuel-merkle/src/sparse/generic/hash.rs +++ b/fuel-merkle/src/sparse/generic/hash.rs @@ -3,14 +3,14 @@ use alloc::{ vec, vec::Vec, }; -use once_cell::sync::OnceCell; +use once_cell::race::OnceBox; use digest::Digest; use sha2::Sha256 as Hash; pub fn zero_sum() -> &'static [u8; N] { - static ZERO: OnceCell> = OnceCell::new(); - ZERO.get_or_init(|| vec![0; N]) + static ZERO: OnceBox> = OnceBox::new(); + ZERO.get_or_init(|| Box::new(vec![0; N])) .as_slice() .try_into() .expect("Expected valid zero sum") From 003901336aacf4d0d7b1f6a2f1aebcfc6217e318 Mon Sep 17 00:00:00 2001 From: Brandon Vrooman Date: Thu, 18 Jan 2024 12:04:47 -0500 Subject: [PATCH 12/15] Fix once_cell feature flags --- fuel-merkle/Cargo.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/fuel-merkle/Cargo.toml b/fuel-merkle/Cargo.toml index 478eb5d00f..8d10b1cedb 100644 --- a/fuel-merkle/Cargo.toml +++ b/fuel-merkle/Cargo.toml @@ -16,7 +16,7 @@ digest = { version = "0.10", default-features = false } fuel-storage = { workspace = true, default-features = false } hashbrown = "0.13" hex = { version = "0.4", default-features = false, features = ["alloc"] } -once_cell = "1.19" +once_cell = { version = "1.19", default-features = false, features = ["race"] } serde = { version = "1.0", default-features = false, optional = true } sha2 = { version = "0.10", default-features = false } From 39633e99a793cff085ec19997e98fe28116f8394 Mon Sep 17 00:00:00 2001 From: Brandon Vrooman Date: Thu, 18 Jan 2024 12:06:32 -0500 Subject: [PATCH 13/15] Update primitive.rs --- fuel-merkle/src/sparse/generic/primitive.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/fuel-merkle/src/sparse/generic/primitive.rs b/fuel-merkle/src/sparse/generic/primitive.rs index 18c8ffe9d7..641201d1d9 100644 --- a/fuel-merkle/src/sparse/generic/primitive.rs +++ b/fuel-merkle/src/sparse/generic/primitive.rs @@ -5,7 +5,7 @@ use crate::{ Prefix, PrefixError, }, - sparse::generic::node::Node, + sparse::generic::Node, }; /// **Leaf buffer:** From 7b5f58c0e8f461659950915fe5e0af10ac18d1ec Mon Sep 17 00:00:00 2001 From: Brandon Vrooman Date: Fri, 31 May 2024 18:13:05 -0400 Subject: [PATCH 14/15] Revert "Merge branch 'master' into bvrooman/feat/generic_key_size_sparse_merkle_tree" This reverts commit 3cc67850d72193ec35040b6144bd9689b2b90030, reversing changes made to 39633e99a793cff085ec19997e98fe28116f8394. --- .github/pull_request_template.md | 22 - .github/workflows/ci.yml | 52 +- .npm/packages/fuel-tx/index.test.cjs | 461 ++++----- .npm/packages/fuel-tx/index.test.mjs | 461 ++++----- .npm/pnpm-lock.yaml | 10 +- CHANGELOG.md | 615 ++++-------- Cargo.toml | 19 +- fuel-asm/src/args.rs | 43 +- fuel-asm/src/args/wideint.rs | 8 +- fuel-asm/src/encoding_tests.rs | 7 +- fuel-asm/src/lib.rs | 9 +- fuel-asm/src/panic_reason.rs | 20 +- fuel-crypto/src/lib.rs | 9 +- fuel-crypto/src/secp256/backend.rs | 2 +- fuel-derive/src/deserialize.rs | 10 - fuel-derive/src/lib.rs | 7 - fuel-derive/src/serialize.rs | 86 +- fuel-merkle/Cargo.toml | 2 - fuel-merkle/src/binary/merkle_tree.rs | 367 ++++--- fuel-merkle/src/binary/node.rs | 19 +- fuel-merkle/src/binary/root_calculator.rs | 71 +- fuel-merkle/src/binary/verify.rs | 89 +- fuel-merkle/src/common.rs | 13 +- fuel-merkle/src/common/hash.rs | 17 - fuel-merkle/src/common/msb.rs | 42 +- fuel-merkle/src/common/node.rs | 13 +- fuel-merkle/src/common/path.rs | 32 +- fuel-merkle/src/common/path_iterator.rs | 92 +- fuel-merkle/src/common/position.rs | 259 ++--- fuel-merkle/src/common/position_path.rs | 61 +- fuel-merkle/src/common/subtree.rs | 40 + fuel-merkle/src/lib.rs | 9 +- fuel-merkle/src/sparse.rs | 26 +- fuel-merkle/src/sparse/generic/branch.rs | 94 ++ fuel-merkle/src/sparse/generic/merkle_tree.rs | 608 +++--------- .../sparse/{merkle_tree => generic}/node.rs | 219 +---- fuel-merkle/src/sparse/generic/primitive.rs | 50 +- fuel-merkle/src/sparse/hash.rs | 28 - fuel-merkle/src/sparse/in_memory.rs | 9 +- fuel-merkle/src/sparse/merkle_tree/branch.rs | 77 -- fuel-merkle/src/sparse/proof.rs | 800 --------------- fuel-merkle/src/sum.rs | 14 + fuel-merkle/src/sum/hash.rs | 36 + fuel-merkle/src/sum/merkle_tree.rs | 317 ++++++ fuel-merkle/src/sum/node.rs | 98 ++ fuel-merkle/src/tests/binary_verify.rs | 199 ---- fuel-merkle/src/tests/mod.rs | 2 - fuel-merkle/src/tests/sparse.rs | 201 ---- .../test-helpers/src/binary/merkle_tree.rs | 3 + fuel-merkle/test-helpers/src/binary/verify.rs | 2 +- .../test-helpers/src/suites/binary_proofs.rs | 2 +- fuel-storage/src/impls.rs | 24 +- fuel-storage/src/lib.rs | 23 +- fuel-tx/Cargo.toml | 14 +- fuel-tx/src/builder.rs | 191 ++-- fuel-tx/src/consts.rs | 4 - fuel-tx/src/contract.rs | 32 +- fuel-tx/src/lib.rs | 33 +- fuel-tx/src/tests/bytes.rs | 470 +-------- fuel-tx/src/tests/display.rs | 24 +- fuel-tx/src/tests/mod.rs | 4 +- fuel-tx/src/tests/offset.rs | 116 +-- fuel-tx/src/tests/prepared_init.rs | 41 + fuel-tx/src/tests/valid_cases.rs | 2 - fuel-tx/src/tests/valid_cases/input.rs | 115 ++- fuel-tx/src/tests/valid_cases/output.rs | 3 + fuel-tx/src/tests/valid_cases/transaction.rs | 646 ++++++------ .../tests/valid_cases/transaction/upgrade.rs | 730 -------------- .../tests/valid_cases/transaction/upload.rs | 856 ---------------- fuel-tx/src/transaction.rs | 608 +++--------- .../src/transaction/consensus_parameters.rs | 920 ++++-------------- .../transaction/consensus_parameters/gas.rs | 851 +++------------- .../gas/default_gas_costs.rs | 8 +- fuel-tx/src/transaction/fee.rs | 102 +- fuel-tx/src/transaction/id.rs | 156 ++- fuel-tx/src/transaction/metadata.rs | 93 +- fuel-tx/src/transaction/policies.rs | 26 +- fuel-tx/src/transaction/repr.rs | 4 - fuel-tx/src/transaction/types.rs | 29 +- .../types/chargeable_transaction.rs | 409 -------- fuel-tx/src/transaction/types/create.rs | 478 ++++++--- .../transaction/types/create/ser_de_tests.rs | 12 +- fuel-tx/src/transaction/types/input.rs | 97 +- fuel-tx/src/transaction/types/input/coin.rs | 18 +- fuel-tx/src/transaction/types/input/consts.rs | 1 + .../src/transaction/types/input/message.rs | 12 +- .../transaction/types/input/snapshot_tests.rs | 30 +- ...shot_tests__tx_with_contract_snapshot.snap | 2 +- ...ests__tx_with_predicate_coin_snapshot.snap | 2 +- ...tests__tx_with_predicate_message_coin.snap | 2 +- ...tests__tx_with_predicate_message_data.snap | 2 +- ...t_tests__tx_with_signed_coin_snapshot.snap | 2 +- ...ot_tests__tx_with_signed_message_coin.snap | 2 +- ...ot_tests__tx_with_signed_message_data.snap | 2 +- fuel-tx/src/transaction/types/mint.rs | 41 +- fuel-tx/src/transaction/types/output.rs | 39 +- .../src/transaction/types/output/contract.rs | 4 +- fuel-tx/src/transaction/types/script.rs | 376 +++++-- fuel-tx/src/transaction/types/storage.rs | 1 + fuel-tx/src/transaction/types/upgrade.rs | 320 ------ fuel-tx/src/transaction/types/upload.rs | 403 -------- fuel-tx/src/transaction/types/utxo_id.rs | 79 +- fuel-tx/src/transaction/validity.rs | 87 +- fuel-tx/src/transaction/validity/error.rs | 54 +- fuel-tx/src/tx_pointer.rs | 2 +- fuel-tx/test-helpers/Cargo.toml | 17 + .../src/lib.rs} | 166 +--- fuel-types/Cargo.toml | 4 +- fuel-types/benches/bench.rs | 64 +- fuel-types/src/array_types.rs | 130 ++- fuel-types/src/bytes.rs | 93 +- fuel-types/src/canonical.rs | 41 +- fuel-types/src/fmt.rs | 9 +- fuel-types/src/lib.rs | 17 +- fuel-types/src/numeric_types.rs | 25 +- fuel-types/src/tests/types.rs | 2 - fuel-vm/Cargo.toml | 49 +- fuel-vm/examples/external.rs | 44 +- fuel-vm/examples/single_step.rs | 100 -- .../tests/binary_verify.txt | 1 - fuel-vm/src/backtrace.rs | 27 +- fuel-vm/src/call.rs | 19 +- fuel-vm/src/checked_transaction.rs | 813 +++++----------- fuel-vm/src/checked_transaction/balances.rs | 68 +- fuel-vm/src/checked_transaction/builder.rs | 8 +- fuel-vm/src/checked_transaction/types.rs | 198 +--- fuel-vm/src/constraints.rs | 102 ++ fuel-vm/src/constraints/reg_key.rs | 59 +- fuel-vm/src/constraints/reg_key/tests.rs | 2 - fuel-vm/src/consts.rs | 12 +- fuel-vm/src/error.rs | 51 +- fuel-vm/src/interpreter.rs | 216 ++-- fuel-vm/src/interpreter/alu.rs | 2 +- fuel-vm/src/interpreter/alu/muldiv.rs | 21 +- fuel-vm/src/interpreter/alu/tests.rs | 2 - fuel-vm/src/interpreter/alu/wideint.rs | 131 ++- fuel-vm/src/interpreter/balances.rs | 63 +- fuel-vm/src/interpreter/blockchain.rs | 540 +++++----- .../src/interpreter/blockchain/code_tests.rs | 77 +- .../src/interpreter/blockchain/croo_tests.rs | 201 ---- .../src/interpreter/blockchain/other_tests.rs | 135 ++- .../src/interpreter/blockchain/smo_tests.rs | 17 +- fuel-vm/src/interpreter/blockchain/test.rs | 22 +- .../src/interpreter/blockchain/test/scwq.rs | 91 +- .../src/interpreter/blockchain/test/srwq.rs | 255 +++-- .../src/interpreter/blockchain/test/swwq.rs | 240 ++--- fuel-vm/src/interpreter/constructors.rs | 62 +- fuel-vm/src/interpreter/contract.rs | 80 +- fuel-vm/src/interpreter/contract/tests.rs | 50 +- fuel-vm/src/interpreter/crypto.rs | 86 +- fuel-vm/src/interpreter/crypto/tests.rs | 17 +- fuel-vm/src/interpreter/debug.rs | 24 +- fuel-vm/src/interpreter/diff.rs | 43 +- fuel-vm/src/interpreter/diff/storage.rs | 162 ++- fuel-vm/src/interpreter/diff/tests.rs | 41 +- fuel-vm/src/interpreter/ecal.rs | 25 +- fuel-vm/src/interpreter/executors/debug.rs | 4 +- .../src/interpreter/executors/instruction.rs | 253 ++--- .../executors/instruction/tests.rs | 6 + .../instruction/tests/reserved_registers.rs | 38 +- fuel-vm/src/interpreter/executors/main.rs | 493 ++-------- .../src/interpreter/executors/main/tests.rs | 173 +--- .../src/interpreter/executors/predicate.rs | 14 +- fuel-vm/src/interpreter/flow.rs | 293 ++++-- fuel-vm/src/interpreter/flow/ret_tests.rs | 26 +- fuel-vm/src/interpreter/flow/tests.rs | 137 ++- fuel-vm/src/interpreter/gas.rs | 2 +- fuel-vm/src/interpreter/initialization.rs | 55 +- fuel-vm/src/interpreter/internal.rs | 188 +++- .../src/interpreter/internal/message_tests.rs | 34 +- fuel-vm/src/interpreter/internal/tests.rs | 61 +- fuel-vm/src/interpreter/log.rs | 58 +- fuel-vm/src/interpreter/log/tests.rs | 10 +- fuel-vm/src/interpreter/memory.rs | 759 +++++---------- .../interpreter/memory/allocation_tests.rs | 62 +- fuel-vm/src/interpreter/memory/impl_tests.rs | 119 --- fuel-vm/src/interpreter/memory/stack_tests.rs | 23 +- fuel-vm/src/interpreter/memory/tests.rs | 170 +++- fuel-vm/src/interpreter/metadata.rs | 400 ++++---- fuel-vm/src/interpreter/metadata/tests.rs | 16 +- fuel-vm/src/interpreter/post_execution.rs | 4 +- fuel-vm/src/lib.rs | 15 +- fuel-vm/src/memory_client.rs | 92 +- fuel-vm/src/pool.rs | 29 - fuel-vm/src/predicate.rs | 37 +- fuel-vm/src/profiler.rs | 5 +- fuel-vm/src/state/debug.rs | 8 +- fuel-vm/src/storage.rs | 141 ++- fuel-vm/src/storage/contracts_assets.rs | 27 - fuel-vm/src/storage/contracts_state.rs | 111 --- fuel-vm/src/storage/interpreter.rs | 187 ++-- fuel-vm/src/storage/memory.rs | 353 ++----- fuel-vm/src/storage/predicate.rs | 123 +-- fuel-vm/src/tests/alu.rs | 2 +- fuel-vm/src/tests/backtrace.rs | 3 +- fuel-vm/src/tests/blockchain.rs | 485 ++------- fuel-vm/src/tests/cgas.rs | 2 +- fuel-vm/src/tests/code_coverage.rs | 12 +- fuel-vm/src/tests/contract.rs | 8 +- fuel-vm/src/tests/crypto.rs | 36 +- fuel-vm/src/tests/encoding.rs | 306 +----- fuel-vm/src/tests/external.rs | 25 +- fuel-vm/src/tests/flow.rs | 10 +- fuel-vm/src/tests/gas_factor.rs | 44 +- fuel-vm/src/tests/limits.rs | 78 -- fuel-vm/src/tests/log.rs | 1 - fuel-vm/src/tests/memory.rs | 185 ++-- fuel-vm/src/tests/metadata.rs | 286 +++--- fuel-vm/src/tests/mod.rs | 6 +- fuel-vm/src/tests/outputs.rs | 10 +- fuel-vm/src/tests/predicate.rs | 91 +- fuel-vm/src/tests/profile_gas.rs | 16 +- fuel-vm/src/tests/test_helpers.rs | 17 +- fuel-vm/src/tests/upgrade.rs | 416 -------- fuel-vm/src/tests/upload.rs | 375 ------- fuel-vm/src/tests/validation.rs | 138 +-- fuel-vm/src/transactor.rs | 172 +--- fuel-vm/src/util.rs | 131 +-- rustfmt.toml | 1 + 219 files changed, 8225 insertions(+), 18235 deletions(-) delete mode 100644 .github/pull_request_template.md delete mode 100644 fuel-merkle/src/common/hash.rs create mode 100644 fuel-merkle/src/common/subtree.rs create mode 100644 fuel-merkle/src/sparse/generic/branch.rs rename fuel-merkle/src/sparse/{merkle_tree => generic}/node.rs (76%) delete mode 100644 fuel-merkle/src/sparse/hash.rs delete mode 100644 fuel-merkle/src/sparse/merkle_tree/branch.rs delete mode 100644 fuel-merkle/src/sparse/proof.rs create mode 100644 fuel-merkle/src/sum.rs create mode 100644 fuel-merkle/src/sum/hash.rs create mode 100644 fuel-merkle/src/sum/merkle_tree.rs create mode 100644 fuel-merkle/src/sum/node.rs delete mode 100644 fuel-merkle/src/tests/binary_verify.rs delete mode 100644 fuel-merkle/src/tests/sparse.rs create mode 100644 fuel-tx/src/tests/prepared_init.rs delete mode 100644 fuel-tx/src/tests/valid_cases/transaction/upgrade.rs delete mode 100644 fuel-tx/src/tests/valid_cases/transaction/upload.rs delete mode 100644 fuel-tx/src/transaction/types/chargeable_transaction.rs delete mode 100644 fuel-tx/src/transaction/types/upgrade.rs delete mode 100644 fuel-tx/src/transaction/types/upload.rs create mode 100644 fuel-tx/test-helpers/Cargo.toml rename fuel-tx/{src/test_helper.rs => test-helpers/src/lib.rs} (67%) delete mode 100644 fuel-vm/examples/single_step.rs delete mode 100644 fuel-vm/fuel-merkle/proptest-regressions/tests/binary_verify.txt delete mode 100644 fuel-vm/src/interpreter/blockchain/croo_tests.rs delete mode 100644 fuel-vm/src/interpreter/memory/impl_tests.rs delete mode 100644 fuel-vm/src/pool.rs delete mode 100644 fuel-vm/src/storage/contracts_assets.rs delete mode 100644 fuel-vm/src/storage/contracts_state.rs delete mode 100644 fuel-vm/src/tests/limits.rs delete mode 100644 fuel-vm/src/tests/upgrade.rs delete mode 100644 fuel-vm/src/tests/upload.rs create mode 100644 rustfmt.toml diff --git a/.github/pull_request_template.md b/.github/pull_request_template.md deleted file mode 100644 index cbfafa224d..0000000000 --- a/.github/pull_request_template.md +++ /dev/null @@ -1,22 +0,0 @@ -[Link to related issue(s) here, if any] - -[Short description of the changes.] - -## Checklist -- [ ] Breaking changes are clearly marked as such in the PR description and changelog -- [ ] New behavior is reflected in tests -- [ ] If performance characteristic of an instruction change, update gas costs as well or make a follow-up PR for that -- [ ] [The specification](https://github.com/FuelLabs/fuel-specs/) matches the implemented behavior (link update PR if changes are needed) - -### Before requesting review -- [ ] I have reviewed the code myself -- [ ] I have created follow-up issues caused by this PR and linked them here - -### After merging, notify other teams - -[Add or remove entries as needed] - -- [ ] [Rust SDK](https://github.com/FuelLabs/fuels-rs/) -- [ ] [Sway compiler](https://github.com/FuelLabs/sway/) -- [ ] [Platform documentation](https://github.com/FuelLabs/devrel-requests/issues/new?assignees=&labels=new+request&projects=&template=NEW-REQUEST.yml&title=%5BRequest%5D%3A+) (for out-of-organization contributors, the person merging the PR will do this) -- [ ] Someone else? diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 0e336b92e8..111f0c8410 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -16,8 +16,8 @@ concurrency: env: CARGO_TERM_COLOR: always - RUST_VERSION: 1.75.0 - NIGHTLY_RUST_VERSION: nightly-2024-02-07 + RUST_VERSION: 1.73.0 + NIGHTLY_RUST_VERSION: nightly-2023-10-29 jobs: check-changelog: @@ -137,14 +137,13 @@ jobs: - rustfmt - cargo - cargo-toml-fmt-check - - publish-check - - publish-wasm-packages-check runs-on: ubuntu-latest steps: - run: true publish-check: runs-on: ubuntu-latest + if: github.event_name == 'release' steps: - name: Checkout repository uses: actions/checkout@v3 @@ -154,54 +153,15 @@ jobs: toolchain: ${{ env.RUST_VERSION }} - name: Publish crate check - uses: xgreenx/publish-crates@v1 + uses: katyo/publish-crates@v2 with: dry-run: true - check-repo: false - ignore-unpublished-changes: true - - publish-wasm-packages-check: - needs: - - publish-check - runs-on: buildjet-4vcpu-ubuntu-2204 - - steps: - - uses: actions/checkout@v3 - with: - fetch-depth: 0 - ref: ${{ github.event.pull_request.head.ref }} - - - uses: dtolnay/rust-toolchain@master - with: - toolchain: ${{ env.RUST_VERSION }} - targets: "wasm32-unknown-unknown" - - - name: Installing required crates - run: cargo install wasm-bindgen-cli wasm-opt - - - name: Setup PNPM - uses: pnpm/action-setup@v2 - with: - version: 8.6.9 - - - name: Setup Node - uses: actions/setup-node@v3 - with: - cache: "pnpm" - node-version: 18.14.1 - node-version-file: ".npm/package.json" - cache-dependency-path: ".npm/pnpm-lock.yaml" - registry-url: 'https://registry.npmjs.org' - - - name: Build and Test packages - run: | - pnpm -C .npm install - pnpm -C .npm pack:all publish: # Only do this job if publishing a release and all checks pass. needs: - verifications-complete + - publish-check if: github.event_name == 'release' && github.event.action == 'published' runs-on: buildjet-4vcpu-ubuntu-2204 @@ -218,7 +178,7 @@ jobs: ./.github/workflows/scripts/verify_tag.sh ${{ github.ref_name }} Cargo.toml - name: Publish crate - uses: xgreenx/publish-crates@v1 + uses: katyo/publish-crates@v2 with: registry-token: ${{ secrets.CARGO_REGISTRY_TOKEN }} diff --git a/.npm/packages/fuel-tx/index.test.cjs b/.npm/packages/fuel-tx/index.test.cjs index 21f0ea63f2..249509e369 100644 --- a/.npm/packages/fuel-tx/index.test.cjs +++ b/.npm/packages/fuel-tx/index.test.cjs @@ -1,243 +1,244 @@ -const {expect} = require('chai') +const { expect } = require('chai') const path = require('node:path') const fs = require('node:fs') const tx = require('.') describe('fuel-tx [cjs]', () => { - it('should export all types', () => { - expect(tx.UtxoId).to.be.ok - expect(tx.TxPointer).to.be.ok - expect(tx.PredicateParameters).to.be.ok - expect(tx.Input).to.be.ok - expect(tx.Output).to.be.ok - expect(tx.Script).to.be.ok - expect(tx.Create).to.be.ok - expect(tx.Mint).to.be.ok - expect(tx.Transaction).to.be.ok - expect(tx.Policies).to.be.ok + it('should export all types', () => { + expect(tx.UtxoId).to.be.ok + expect(tx.TxPointer).to.be.ok + expect(tx.PredicateParameters).to.be.ok + expect(tx.Input).to.be.ok + expect(tx.Output).to.be.ok + expect(tx.Script).to.be.ok + expect(tx.Create).to.be.ok + expect(tx.Mint).to.be.ok + expect(tx.Transaction).to.be.ok + expect(tx.Policies).to.be.ok + }) + + it('should serialize and deserialize UtxoId correctly', () => { + let utxo_id = new tx.UtxoId("0x0c0000000000000000000000000000000000000000000000000000000000000b1a"); + let bytes = utxo_id.to_bytes(); + let utxo_id2 = tx.UtxoId.from_bytes(bytes); + expect(utxo_id.toString()).to.equal(utxo_id2.toString()) + }) + + it('should serialize and deserialize TxPointer correctly', () => { + let utxo_id = new tx.TxPointer("0123456789ab"); + let bytes = utxo_id.to_bytes(); + let utxo_id2 = tx.TxPointer.from_bytes(bytes); + expect(utxo_id.toString()).to.equal(utxo_id2.toString()) + }) + + + it('should serialize and deserialize all input variants correctly', () => { + [ + tx.Input.coin_predicate( + new tx.UtxoId("0x0c0000000000000000000000000000000000000000000000000000000000000b1a"), + tx.Address.zeroed(), + BigInt(1234), + tx.AssetId.zeroed(), + new tx.TxPointer("0123456789ab"), + new tx.BlockHeight(5678), + BigInt(9012), + [1, 2, 3, 4], + [5, 6, 7, 8], + ), + tx.Input.coin_signed( + new tx.UtxoId("0x0c0000000000000000000000000000000000000000000000000000000000000b1a"), + tx.Address.zeroed(), + BigInt(1234), + tx.AssetId.zeroed(), + new tx.TxPointer("0123456789ab"), + 2, + new tx.BlockHeight(5678), + ), + tx.Input.contract( + new tx.UtxoId("0x0c0000000000000000000000000000000000000000000000000000000000000b1a"), + tx.Bytes32.zeroed(), + tx.Bytes32.zeroed(), + new tx.TxPointer("0123456789ab"), + tx.ContractId.zeroed(), + ), + tx.Input.message_coin_signed( + tx.Address.zeroed(), + tx.Address.zeroed(), + BigInt(1234), + tx.Nonce.zeroed(), + 2, + ), + tx.Input.message_coin_predicate( + tx.Address.zeroed(), + tx.Address.zeroed(), + BigInt(1234), + tx.Nonce.zeroed(), + BigInt(1234), + [1, 2, 3, 4], + [5, 6, 7, 8], + ), + tx.Input.message_data_signed( + tx.Address.zeroed(), + tx.Address.zeroed(), + BigInt(1234), + tx.Nonce.zeroed(), + 2, + [1, 2, 3, 4], + ), + tx.Input.message_data_predicate( + tx.Address.zeroed(), + tx.Address.zeroed(), + BigInt(1234), + tx.Nonce.zeroed(), + BigInt(1234), + [0, 1, 2, 3], + [1, 2, 3, 4], + [5, 6, 7, 8], + ), + ].forEach(input => { + let bytes = input.to_bytes(); + let input2 = tx.Input.from_bytes(bytes); + expect(input.toString()).to.equal(input2.toString()) }) - - it('should serialize and deserialize UtxoId correctly', () => { - let utxo_id = new tx.UtxoId("0x0c0000000000000000000000000000000000000000000000000000000000000b001a"); - let bytes = utxo_id.to_bytes(); - let utxo_id2 = tx.UtxoId.from_bytes(bytes); - expect(utxo_id.toString()).to.equal(utxo_id2.toString()) - }) - - it('should serialize and deserialize TxPointer correctly', () => { - let utxo_id = new tx.TxPointer("0123456789ab"); - let bytes = utxo_id.to_bytes(); - let utxo_id2 = tx.TxPointer.from_bytes(bytes); - expect(utxo_id.toString()).to.equal(utxo_id2.toString()) - }) - - - it('should serialize and deserialize all input variants correctly', () => { - [ - tx.Input.coin_predicate( - new tx.UtxoId("0x0c0000000000000000000000000000000000000000000000000000000000000b001a"), - tx.Address.zeroed(), - 1234n, - tx.AssetId.zeroed(), - new tx.TxPointer("0123456789ab"), - 9012n, - [1, 2, 3, 4], - [5, 6, 7, 8], - ), - tx.Input.coin_signed( - new tx.UtxoId("0x0c0000000000000000000000000000000000000000000000000000000000000b001a"), - tx.Address.zeroed(), - BigInt(1234), - tx.AssetId.zeroed(), - new tx.TxPointer("0123456789ab"), - 2, - ), - tx.Input.contract( - new tx.UtxoId("0x0c0000000000000000000000000000000000000000000000000000000000000b001a"), - tx.Bytes32.zeroed(), - tx.Bytes32.zeroed(), - new tx.TxPointer("0123456789ab"), - tx.ContractId.zeroed(), - ), - tx.Input.message_coin_signed( - tx.Address.zeroed(), - tx.Address.zeroed(), - BigInt(1234), - tx.Nonce.zeroed(), - 2, - ), - tx.Input.message_coin_predicate( - tx.Address.zeroed(), - tx.Address.zeroed(), - BigInt(1234), - tx.Nonce.zeroed(), - BigInt(1234), - [1, 2, 3, 4], - [5, 6, 7, 8], - ), - tx.Input.message_data_signed( - tx.Address.zeroed(), - tx.Address.zeroed(), - BigInt(1234), - tx.Nonce.zeroed(), - 2, - [1, 2, 3, 4], - ), - tx.Input.message_data_predicate( - tx.Address.zeroed(), - tx.Address.zeroed(), - BigInt(1234), - tx.Nonce.zeroed(), - BigInt(1234), - [0, 1, 2, 3], - [1, 2, 3, 4], - [5, 6, 7, 8], - ), - ].forEach(input => { - let bytes = input.to_bytes(); - let input2 = tx.Input.from_bytes(bytes); - expect(input.toString()).to.equal(input2.toString()) - }) - }) - - - it('should serialize and deserialize all output variants correctly', () => { - [ - tx.Output.coin( - tx.Address.zeroed(), - BigInt(1234), - tx.AssetId.zeroed(), - ), - tx.Output.contract( - 2, - tx.Bytes32.zeroed(), - tx.Bytes32.zeroed(), - ), - tx.Output.change( - tx.Address.zeroed(), - BigInt(1234), - tx.AssetId.zeroed(), - ), - tx.Output.variable( - tx.Address.zeroed(), - BigInt(1234), - tx.AssetId.zeroed(), - ), - tx.Output.contract_created( - tx.ContractId.zeroed(), - tx.Bytes32.zeroed(), - ), - ].forEach(output => { - let bytes = output.to_bytes(); - let output2 = tx.Output.from_bytes(bytes); - expect(output.toString()).to.equal(output2.toString()) - }) + }) + + + it('should serialize and deserialize all output variants correctly', () => { + [ + tx.Output.coin( + tx.Address.zeroed(), + BigInt(1234), + tx.AssetId.zeroed(), + ), + tx.Output.contract( + 2, + tx.Bytes32.zeroed(), + tx.Bytes32.zeroed(), + ), + tx.Output.change( + tx.Address.zeroed(), + BigInt(1234), + tx.AssetId.zeroed(), + ), + tx.Output.variable( + tx.Address.zeroed(), + BigInt(1234), + tx.AssetId.zeroed(), + ), + tx.Output.contract_created( + tx.ContractId.zeroed(), + tx.Bytes32.zeroed(), + ), + ].forEach(output => { + let bytes = output.to_bytes(); + let output2 = tx.Output.from_bytes(bytes); + expect(output.toString()).to.equal(output2.toString()) }) - - - it('should serialize and deserialize all transaction variants correctly', () => { - [ - [tx.Script, tx.Transaction.script( - 1234n, - [1, 2, 3, 4], - [5, 6, 7, 8], - new tx.Policies(), - [], - [], - [], - )], - [tx.Create, tx.Transaction.create( - 1, - new tx.Policies(), - tx.Salt.zeroed(), - [], - [], - [], - [], - )], - [tx.Mint, tx.Transaction.mint( - new tx.TxPointer("0123456789ab"), - new tx.InputContract( - new tx.UtxoId("0xc49d65de61cf04588a764b557d25cc6c6b4bc0d7429227e2a21e61c213b3a3e2:18ab"), - tx.Bytes32.zeroed(), - tx.Bytes32.zeroed(), - new tx.TxPointer("0123456789ab"), - tx.ContractId.zeroed(), - ), - new tx.OutputContract( - 3, - tx.Bytes32.zeroed(), - tx.Bytes32.zeroed(), - ), - 1234n, - tx.AssetId.zeroed(), - 1234n, - )], - ].forEach(([tx_variant_type, tx_variant]) => { - let bytes = tx_variant.to_bytes(); - let tx_variant2 = tx_variant_type.from_bytes(bytes); - expect(tx_variant.toString()).to.equal(tx_variant2.toString()) - - let wrapped_tx = tx_variant.as_tx(); - let tx_bytes = wrapped_tx.to_bytes(); - let wrapped_tx2 = tx.Transaction.from_bytes(tx_bytes); - expect(wrapped_tx.toString()).to.equal(wrapped_tx2.toString()) - }) + }) + + + it('should serialize and deserialize all transaction variants correctly', () => { + [ + [tx.Script, tx.Transaction.script( + 1234n, + [1, 2, 3, 4], + [5, 6, 7, 8], + new tx.Policies(), + [], + [], + [], + )], + [tx.Create, tx.Transaction.create( + 1, + new tx.Policies(), + tx.Salt.zeroed(), + [], + [], + [], + [], + )], + [tx.Mint, tx.Transaction.mint( + new tx.TxPointer("0123456789ab"), + new tx.InputContract( + new tx.UtxoId("0xc49d65de61cf04588a764b557d25cc6c6b4bc0d7429227e2a21e61c213b3a3e2:18"), + tx.Bytes32.zeroed(), + tx.Bytes32.zeroed(), + new tx.TxPointer("0123456789ab"), + tx.ContractId.zeroed(), + ), + new tx.OutputContract( + 3, + tx.Bytes32.zeroed(), + tx.Bytes32.zeroed(), + ), + 1234n, + tx.AssetId.zeroed(), + )], + ].forEach(([tx_variant_type, tx_variant]) => { + let bytes = tx_variant.to_bytes(); + let tx_variant2 = tx_variant_type.from_bytes(bytes); + expect(tx_variant.toString()).to.equal(tx_variant2.toString()) + + let wrapped_tx = tx_variant.as_tx(); + let tx_bytes = wrapped_tx.to_bytes(); + let wrapped_tx2 = tx.Transaction.from_bytes(tx_bytes); + expect(wrapped_tx.toString()).to.equal(wrapped_tx2.toString()) }) + }) - // Hex string to byte string conversion. - const hexToBytes = hex => { - if (hex.length % 2 != 0) { - throw new Error("Needs full bytes"); - } - const lookup = "0123456789abcdef"; - let result = new Uint8Array(hex.length / 2); - for (let i = 0; i < result.length; i += 1) { - let high = lookup.indexOf(hex[i * 2]); - let low = lookup.indexOf(hex[i * 2 + 1]); - if (high === -1 || low === -1) { - throw new Error("Invalid hex char"); - } - result[i] = (high << 4) | low; - } - return result; + // Hex string to byte string conversion. + const hexToBytes = hex => { + if (hex.length % 2 != 0) { + throw new Error("Needs full bytes"); } - - it('should validate input correctly', () => { - let input = tx.Input.coin_signed( - new tx.UtxoId("0xc49d65de61cf04588a764b557d25cc6c6b4bc0d7429227e2a21e61c213b3a3e2:18ab"), - tx.Address.from_bytes(hexToBytes("f1e92c42b90934aa6372e30bc568a326f6e66a1a0288595e6e3fbd392a4f3e6e")), - 10599410012256088338n, - tx.AssetId.from_bytes(hexToBytes("2cafad611543e0265d89f1c2b60d9ebf5d56ad7e23d9827d6b522fd4d6e44bc3")), - new tx.TxPointer("000000000000"), - 0, - new tx.BlockHeight(0), - ); - - tx.check_input(input, 0, tx.Bytes32.from_bytes(hexToBytes("108eae4147d2c1c86ef4c2ab7c9fe94126645c8d8737495a0574ef1518ae74d8")), [], [{data: hexToBytes("7ce4de2225f041b7f9fec727343a501d99e5b7b58d33f3d4a2cf218d3489959bdec24d13770b5d3bb084b4dac3474f95153e6ecc98f6f0f8ca37a2897b9562ee")}], new tx.PredicateParameters(10000n, 10000n, 10000n, 10000n)); - }) - - it('should validate output correctly', () => { - let output = tx.Output.change( - tx.Address.zeroed(), - 1234n, - tx.AssetId.zeroed(), - ); - - tx.check_output(output, 0, []); - }) - - it('should be able to deserialize snapshots', () => { - const snapshots = '../../../fuel-tx/src/transaction/types/input/snapshots'; - fs.readdirSync(snapshots).forEach(file => { - fs.readFile(path.join(snapshots, file), 'utf8', (err, data) => { - expect(err).to.be.null; - let dataBytes = hexToBytes(data.split('---\n').at(-1).trim()); - let inTx = tx.Transaction.from_bytes(dataBytes); - let serialized = inTx.to_bytes(); - expect(serialized.toString()).to.eq(dataBytes.toString()); - }) - }) + const lookup = "0123456789abcdef"; + let result = new Uint8Array(hex.length / 2); + for (let i = 0; i < result.length; i += 1) { + let high = lookup.indexOf(hex[i * 2]); + let low = lookup.indexOf(hex[i * 2 + 1]); + if (high === -1 || low === -1) { + throw new Error("Invalid hex char"); + } + result[i] = (high << 4) | low; + } + return result; + } + + it('should validate input correctly', () => { + let input = tx.Input.coin_signed( + new tx.UtxoId("0xc49d65de61cf04588a764b557d25cc6c6b4bc0d7429227e2a21e61c213b3a3e2:18"), + tx.Address.from_bytes(hexToBytes("f1e92c42b90934aa6372e30bc568a326f6e66a1a0288595e6e3fbd392a4f3e6e")), + 10599410012256088338n, + tx.AssetId.from_bytes(hexToBytes("2cafad611543e0265d89f1c2b60d9ebf5d56ad7e23d9827d6b522fd4d6e44bc3")), + new tx.TxPointer("000000000000"), + 0, + new tx.BlockHeight(0), + ); + + tx.check_input(input, 0, tx.Bytes32.from_bytes(hexToBytes("108eae4147d2c1c86ef4c2ab7c9fe94126645c8d8737495a0574ef1518ae74d8")), [], [{ data: hexToBytes("7ce4de2225f041b7f9fec727343a501d99e5b7b58d33f3d4a2cf218d3489959bdec24d13770b5d3bb084b4dac3474f95153e6ecc98f6f0f8ca37a2897b9562ee") }], new tx.PredicateParameters()); + }) + + it('should validate output correctly', () => { + let output = tx.Output.change( + tx.Address.zeroed(), + 1234n, + tx.AssetId.zeroed(), + ); + + tx.check_output(output, 0, []); + }) + + it('should be able to deserialize snapshots', () => { + const snapshots = '../../../fuel-tx/src/transaction/types/input/snapshots'; + fs.readdirSync(snapshots).forEach(file => { + fs.readFile(path.join(snapshots, file), 'utf8', (err, data) => { + expect(err).to.be.null; + let dataBytes = hexToBytes(data.split('---\n').at(-1).trim()); + let inTx = tx.Transaction.from_bytes(dataBytes); + let serialized = inTx.to_bytes(); + expect(serialized.toString()).to.eq(dataBytes.toString()); + }) }) + }) }) diff --git a/.npm/packages/fuel-tx/index.test.mjs b/.npm/packages/fuel-tx/index.test.mjs index d0a960efc1..03acd3c6f4 100644 --- a/.npm/packages/fuel-tx/index.test.mjs +++ b/.npm/packages/fuel-tx/index.test.mjs @@ -1,243 +1,244 @@ -import {expect} from 'chai' +import { expect } from 'chai' import * as path from 'node:path' import * as fs from 'node:fs' import * as tx from './dist/web/index.mjs' describe('fuel-tx [mjs]', () => { - it('should export all types', () => { - expect(tx.UtxoId).to.be.ok - expect(tx.TxPointer).to.be.ok - expect(tx.PredicateParameters).to.be.ok - expect(tx.Input).to.be.ok - expect(tx.Output).to.be.ok - expect(tx.Script).to.be.ok - expect(tx.Create).to.be.ok - expect(tx.Mint).to.be.ok - expect(tx.Transaction).to.be.ok - expect(tx.Policies).to.be.ok + it('should export all types', () => { + expect(tx.UtxoId).to.be.ok + expect(tx.TxPointer).to.be.ok + expect(tx.PredicateParameters).to.be.ok + expect(tx.Input).to.be.ok + expect(tx.Output).to.be.ok + expect(tx.Script).to.be.ok + expect(tx.Create).to.be.ok + expect(tx.Mint).to.be.ok + expect(tx.Transaction).to.be.ok + expect(tx.Policies).to.be.ok + }) + + it('should serialize and deserialize UtxoId correctly', () => { + let utxo_id = new tx.UtxoId("0x0c0000000000000000000000000000000000000000000000000000000000000b1a"); + let bytes = utxo_id.to_bytes(); + let utxo_id2 = tx.UtxoId.from_bytes(bytes); + expect(utxo_id.toString()).to.equal(utxo_id2.toString()) + }) + + it('should serialize and deserialize TxPointer correctly', () => { + let utxo_id = new tx.TxPointer("0123456789ab"); + let bytes = utxo_id.to_bytes(); + let utxo_id2 = tx.TxPointer.from_bytes(bytes); + expect(utxo_id.toString()).to.equal(utxo_id2.toString()) + }) + + + it('should serialize and deserialize all input variants correctly', () => { + [ + tx.Input.coin_predicate( + new tx.UtxoId("0x0c0000000000000000000000000000000000000000000000000000000000000b1a"), + tx.Address.zeroed(), + BigInt(1234), + tx.AssetId.zeroed(), + new tx.TxPointer("0123456789ab"), + new tx.BlockHeight(5678), + BigInt(9012), + [1, 2, 3, 4], + [5, 6, 7, 8], + ), + tx.Input.coin_signed( + new tx.UtxoId("0x0c0000000000000000000000000000000000000000000000000000000000000b1a"), + tx.Address.zeroed(), + BigInt(1234), + tx.AssetId.zeroed(), + new tx.TxPointer("0123456789ab"), + 2, + new tx.BlockHeight(5678), + ), + tx.Input.contract( + new tx.UtxoId("0x0c0000000000000000000000000000000000000000000000000000000000000b1a"), + tx.Bytes32.zeroed(), + tx.Bytes32.zeroed(), + new tx.TxPointer("0123456789ab"), + tx.ContractId.zeroed(), + ), + tx.Input.message_coin_signed( + tx.Address.zeroed(), + tx.Address.zeroed(), + BigInt(1234), + tx.Nonce.zeroed(), + 2, + ), + tx.Input.message_coin_predicate( + tx.Address.zeroed(), + tx.Address.zeroed(), + BigInt(1234), + tx.Nonce.zeroed(), + BigInt(1234), + [1, 2, 3, 4], + [5, 6, 7, 8], + ), + tx.Input.message_data_signed( + tx.Address.zeroed(), + tx.Address.zeroed(), + BigInt(1234), + tx.Nonce.zeroed(), + 2, + [1, 2, 3, 4], + ), + tx.Input.message_data_predicate( + tx.Address.zeroed(), + tx.Address.zeroed(), + BigInt(1234), + tx.Nonce.zeroed(), + BigInt(1234), + [0, 1, 2, 3], + [1, 2, 3, 4], + [5, 6, 7, 8], + ), + ].forEach(input => { + let bytes = input.to_bytes(); + let input2 = tx.Input.from_bytes(bytes); + expect(input.toString()).to.equal(input2.toString()) }) - - it('should serialize and deserialize UtxoId correctly', () => { - let utxo_id = new tx.UtxoId("0x0c0000000000000000000000000000000000000000000000000000000000000b001a"); - let bytes = utxo_id.to_bytes(); - let utxo_id2 = tx.UtxoId.from_bytes(bytes); - expect(utxo_id.toString()).to.equal(utxo_id2.toString()) - }) - - it('should serialize and deserialize TxPointer correctly', () => { - let utxo_id = new tx.TxPointer("0123456789ab"); - let bytes = utxo_id.to_bytes(); - let utxo_id2 = tx.TxPointer.from_bytes(bytes); - expect(utxo_id.toString()).to.equal(utxo_id2.toString()) - }) - - - it('should serialize and deserialize all input variants correctly', () => { - [ - tx.Input.coin_predicate( - new tx.UtxoId("0x0c0000000000000000000000000000000000000000000000000000000000000b001a"), - tx.Address.zeroed(), - BigInt(1234), - tx.AssetId.zeroed(), - new tx.TxPointer("0123456789ab"), - BigInt(9012), - [1, 2, 3, 4], - [5, 6, 7, 8], - ), - tx.Input.coin_signed( - new tx.UtxoId("0x0c0000000000000000000000000000000000000000000000000000000000000b001a"), - tx.Address.zeroed(), - BigInt(1234), - tx.AssetId.zeroed(), - new tx.TxPointer("0123456789ab"), - 2, - ), - tx.Input.contract( - new tx.UtxoId("0x0c0000000000000000000000000000000000000000000000000000000000000b001a"), - tx.Bytes32.zeroed(), - tx.Bytes32.zeroed(), - new tx.TxPointer("0123456789ab"), - tx.ContractId.zeroed(), - ), - tx.Input.message_coin_signed( - tx.Address.zeroed(), - tx.Address.zeroed(), - BigInt(1234), - tx.Nonce.zeroed(), - 2, - ), - tx.Input.message_coin_predicate( - tx.Address.zeroed(), - tx.Address.zeroed(), - BigInt(1234), - tx.Nonce.zeroed(), - BigInt(1234), - [1, 2, 3, 4], - [5, 6, 7, 8], - ), - tx.Input.message_data_signed( - tx.Address.zeroed(), - tx.Address.zeroed(), - BigInt(1234), - tx.Nonce.zeroed(), - 2, - [1, 2, 3, 4], - ), - tx.Input.message_data_predicate( - tx.Address.zeroed(), - tx.Address.zeroed(), - BigInt(1234), - tx.Nonce.zeroed(), - BigInt(1234), - [0, 1, 2, 3], - [1, 2, 3, 4], - [5, 6, 7, 8], - ), - ].forEach(input => { - let bytes = input.to_bytes(); - let input2 = tx.Input.from_bytes(bytes); - expect(input.toString()).to.equal(input2.toString()) - }) - }) - - - it('should serialize and deserialize all output variants correctly', () => { - [ - tx.Output.coin( - tx.Address.zeroed(), - BigInt(1234), - tx.AssetId.zeroed(), - ), - tx.Output.contract( - 2, - tx.Bytes32.zeroed(), - tx.Bytes32.zeroed(), - ), - tx.Output.change( - tx.Address.zeroed(), - BigInt(1234), - tx.AssetId.zeroed(), - ), - tx.Output.variable( - tx.Address.zeroed(), - BigInt(1234), - tx.AssetId.zeroed(), - ), - tx.Output.contract_created( - tx.ContractId.zeroed(), - tx.Bytes32.zeroed(), - ), - ].forEach(output => { - let bytes = output.to_bytes(); - let output2 = tx.Output.from_bytes(bytes); - expect(output.toString()).to.equal(output2.toString()) - }) + }) + + + it('should serialize and deserialize all output variants correctly', () => { + [ + tx.Output.coin( + tx.Address.zeroed(), + BigInt(1234), + tx.AssetId.zeroed(), + ), + tx.Output.contract( + 2, + tx.Bytes32.zeroed(), + tx.Bytes32.zeroed(), + ), + tx.Output.change( + tx.Address.zeroed(), + BigInt(1234), + tx.AssetId.zeroed(), + ), + tx.Output.variable( + tx.Address.zeroed(), + BigInt(1234), + tx.AssetId.zeroed(), + ), + tx.Output.contract_created( + tx.ContractId.zeroed(), + tx.Bytes32.zeroed(), + ), + ].forEach(output => { + let bytes = output.to_bytes(); + let output2 = tx.Output.from_bytes(bytes); + expect(output.toString()).to.equal(output2.toString()) }) - - - it('should serialize and deserialize all transaction variants correctly', () => { - [ - [tx.Script, tx.Transaction.script( - 1234n, - [1, 2, 3, 4], - [5, 6, 7, 8], - new tx.Policies(), - [], - [], - [], - )], - [tx.Create, tx.Transaction.create( - 1, - new tx.Policies(), - tx.Salt.zeroed(), - [], - [], - [], - [], - )], - [tx.Mint, tx.Transaction.mint( - new tx.TxPointer("0123456789ab"), - new tx.InputContract( - new tx.UtxoId("0xc49d65de61cf04588a764b557d25cc6c6b4bc0d7429227e2a21e61c213b3a3e2:18ab"), - tx.Bytes32.zeroed(), - tx.Bytes32.zeroed(), - new tx.TxPointer("0123456789ab"), - tx.ContractId.zeroed(), - ), - new tx.OutputContract( - 3, - tx.Bytes32.zeroed(), - tx.Bytes32.zeroed(), - ), - 1234n, - tx.AssetId.zeroed(), - 1234n, - )], - ].forEach(([tx_variant_type, tx_variant]) => { - let bytes = tx_variant.to_bytes(); - let tx_variant2 = tx_variant_type.from_bytes(bytes); - expect(tx_variant.toString()).to.equal(tx_variant2.toString()) - - let wrapped_tx = tx_variant.as_tx(); - let tx_bytes = wrapped_tx.to_bytes(); - let wrapped_tx2 = tx.Transaction.from_bytes(tx_bytes); - expect(wrapped_tx.toString()).to.equal(wrapped_tx2.toString()) - }) + }) + + + it('should serialize and deserialize all transaction variants correctly', () => { + [ + [tx.Script, tx.Transaction.script( + 1234n, + [1, 2, 3, 4], + [5, 6, 7, 8], + new tx.Policies(), + [], + [], + [], + )], + [tx.Create, tx.Transaction.create( + 1, + new tx.Policies(), + tx.Salt.zeroed(), + [], + [], + [], + [], + )], + [tx.Mint, tx.Transaction.mint( + new tx.TxPointer("0123456789ab"), + new tx.InputContract( + new tx.UtxoId("0xc49d65de61cf04588a764b557d25cc6c6b4bc0d7429227e2a21e61c213b3a3e2:18"), + tx.Bytes32.zeroed(), + tx.Bytes32.zeroed(), + new tx.TxPointer("0123456789ab"), + tx.ContractId.zeroed(), + ), + new tx.OutputContract( + 3, + tx.Bytes32.zeroed(), + tx.Bytes32.zeroed(), + ), + 1234n, + tx.AssetId.zeroed(), + )], + ].forEach(([tx_variant_type, tx_variant]) => { + let bytes = tx_variant.to_bytes(); + let tx_variant2 = tx_variant_type.from_bytes(bytes); + expect(tx_variant.toString()).to.equal(tx_variant2.toString()) + + let wrapped_tx = tx_variant.as_tx(); + let tx_bytes = wrapped_tx.to_bytes(); + let wrapped_tx2 = tx.Transaction.from_bytes(tx_bytes); + expect(wrapped_tx.toString()).to.equal(wrapped_tx2.toString()) }) + }) - // Hex string to byte string conversion. - const hexToBytes = hex => { - if (hex.length % 2 != 0) { - throw new Error("Needs full bytes"); - } - const lookup = "0123456789abcdef"; - let result = new Uint8Array(hex.length / 2); - for (let i = 0; i < result.length; i += 1) { - let high = lookup.indexOf(hex[i * 2]); - let low = lookup.indexOf(hex[i * 2 + 1]); - if (high === -1 || low === -1) { - throw new Error("Invalid hex char"); - } - result[i] = (high << 4) | low; - } - return result; + // Hex string to byte string conversion. + const hexToBytes = hex => { + if (hex.length % 2 != 0) { + throw new Error("Needs full bytes"); } - - it('should validate input correctly', () => { - let input = tx.Input.coin_signed( - new tx.UtxoId("0xc49d65de61cf04588a764b557d25cc6c6b4bc0d7429227e2a21e61c213b3a3e2:18ab"), - tx.Address.from_bytes(hexToBytes("f1e92c42b90934aa6372e30bc568a326f6e66a1a0288595e6e3fbd392a4f3e6e")), - 10599410012256088338n, - tx.AssetId.from_bytes(hexToBytes("2cafad611543e0265d89f1c2b60d9ebf5d56ad7e23d9827d6b522fd4d6e44bc3")), - new tx.TxPointer("000000000000"), - 0, - new tx.BlockHeight(0), - ); - - tx.check_input(input, 0, tx.Bytes32.from_bytes(hexToBytes("108eae4147d2c1c86ef4c2ab7c9fe94126645c8d8737495a0574ef1518ae74d8")), [], [{data: hexToBytes("7ce4de2225f041b7f9fec727343a501d99e5b7b58d33f3d4a2cf218d3489959bdec24d13770b5d3bb084b4dac3474f95153e6ecc98f6f0f8ca37a2897b9562ee")}], new tx.PredicateParameters(10000n, 10000n, 10000n, 10000n)); - }) - - it('should validate output correctly', () => { - let output = tx.Output.change( - tx.Address.zeroed(), - 1234n, - tx.AssetId.zeroed(), - ); - - tx.check_output(output, 0, []); - }) - - it('should be able to deserialize snapshots', () => { - const snapshots = '../../../fuel-tx/src/transaction/types/input/snapshots'; - fs.readdirSync(snapshots).forEach(file => { - fs.readFile(path.join(snapshots, file), 'utf8', (err, data) => { - expect(err).to.be.null; - let dataBytes = hexToBytes(data.split('---\n').at(-1).trim()); - let inTx = tx.Transaction.from_bytes(dataBytes); - let serialized = inTx.to_bytes(); - expect(serialized.toString()).to.eq(dataBytes.toString()); - }) - }) + const lookup = "0123456789abcdef"; + let result = new Uint8Array(hex.length / 2); + for (let i = 0; i < result.length; i += 1) { + let high = lookup.indexOf(hex[i * 2]); + let low = lookup.indexOf(hex[i * 2 + 1]); + if (high === -1 || low === -1) { + throw new Error("Invalid hex char"); + } + result[i] = (high << 4) | low; + } + return result; + } + + it('should validate input correctly', () => { + let input = tx.Input.coin_signed( + new tx.UtxoId("0xc49d65de61cf04588a764b557d25cc6c6b4bc0d7429227e2a21e61c213b3a3e2:18"), + tx.Address.from_bytes(hexToBytes("f1e92c42b90934aa6372e30bc568a326f6e66a1a0288595e6e3fbd392a4f3e6e")), + 10599410012256088338n, + tx.AssetId.from_bytes(hexToBytes("2cafad611543e0265d89f1c2b60d9ebf5d56ad7e23d9827d6b522fd4d6e44bc3")), + new tx.TxPointer("000000000000"), + 0, + new tx.BlockHeight(0), + ); + + tx.check_input(input, 0, tx.Bytes32.from_bytes(hexToBytes("108eae4147d2c1c86ef4c2ab7c9fe94126645c8d8737495a0574ef1518ae74d8")), [], [{ data: hexToBytes("7ce4de2225f041b7f9fec727343a501d99e5b7b58d33f3d4a2cf218d3489959bdec24d13770b5d3bb084b4dac3474f95153e6ecc98f6f0f8ca37a2897b9562ee") }], new tx.PredicateParameters()); + }) + + it('should validate output correctly', () => { + let output = tx.Output.change( + tx.Address.zeroed(), + 1234n, + tx.AssetId.zeroed(), + ); + + tx.check_output(output, 0, []); + }) + + it('should be able to deserialize snapshots', () => { + const snapshots = '../../../fuel-tx/src/transaction/types/input/snapshots'; + fs.readdirSync(snapshots).forEach(file => { + fs.readFile(path.join(snapshots, file), 'utf8', (err, data) => { + expect(err).to.be.null; + let dataBytes = hexToBytes(data.split('---\n').at(-1).trim()); + let inTx = tx.Transaction.from_bytes(dataBytes); + let serialized = inTx.to_bytes(); + expect(serialized.toString()).to.eq(dataBytes.toString()); + }) }) + }) }) diff --git a/.npm/pnpm-lock.yaml b/.npm/pnpm-lock.yaml index 823b0aef88..a1de42aa9c 100644 --- a/.npm/pnpm-lock.yaml +++ b/.npm/pnpm-lock.yaml @@ -1,8 +1,4 @@ -lockfileVersion: '6.1' - -settings: - autoInstallPeers: true - excludeLinksFromLockfile: false +lockfileVersion: '6.0' importers: @@ -1486,3 +1482,7 @@ packages: resolution: {integrity: sha512-rVksvsnNCdJ/ohGc6xgPwyN8eheCxsiLM8mxuE/t/mOVqJewPuO1miLpTHQiRgTKCLexL4MeAFVagts7HmNZ2Q==} engines: {node: '>=10'} dev: true + +settings: + autoInstallPeers: true + excludeLinksFromLockfile: false diff --git a/CHANGELOG.md b/CHANGELOG.md index 293862b554..39a8a37cca 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,5 +1,4 @@ # Change Log - All notable changes to this project will be documented in this file. The format is based on [Keep a Changelog](http://keepachangelog.com/) @@ -7,210 +6,16 @@ and this project adheres to [Semantic Versioning](http://semver.org/). ## [Unreleased] -### Added - -- [#732](https://github.com/FuelLabs/fuel-vm/pull/732): Adds `reset` method to VM memory. - -#### Breaking - -- [#732](https://github.com/FuelLabs/fuel-vm/pull/732): Makes the VM generic over the memory type, allowing reuse of relatively expensive-to-allocate VM memories through `VmMemoryPool`. Functions and traits which require VM initalization such as `estimate_predicates` now take either the memory or `VmMemoryPool` as an argument. The `Interpterter::eq` method now only compares accessible memory regions. `Memory` was renamed into `MemoryInstance` and `Memory` is a trait now. - - -## [Version 0.50.0] - -### Changed - -- [#725](https://github.com/FuelLabs/fuel-vm/pull/725): Adds more clippy lints to catch possible integer overflow and casting bugs on compile time. -- [#729](https://github.com/FuelLabs/fuel-vm/pull/729): Adds more clippy lints to `fuel-merkle` to catch possible integer overflow and casting bugs on compile time. It also does some internal refactoring. - -### Added - -#### Breaking - -- [#725](https://github.com/FuelLabs/fuel-vm/pull/725): `UtxoId::from_str` now rejects inputs with multiple `0x` prefixes. Many `::from_str` implementations also reject extra data in the end of the input, instead of silently ignoring it. `UtxoId::from_str` allows a single `:` between the fields. Unused `GasUnit` struct removed. -- [#726](https://github.com/FuelLabs/fuel-vm/pull/726): Removed code related to Binary Merkle Sum Trees (BMSTs). The BMST is deprecated and not used in production environments. -- [#729](https://github.com/FuelLabs/fuel-vm/pull/729): Removed default implementation of `Node::key_size_bits`, implementors must now define it themselves. Also some helper traits have been merged together, or their types changed. -### Fixed - -#### Breaking - -- [#736](https://github.com/FuelLabs/fuel-vm/pull/736): LDC instruction now works in internal contexts as well. Call frames use code size padded to word alignment. - -## [Version 0.49.0] - -### Added - -- [#721](https://github.com/FuelLabs/fuel-vm/pull/721): Added additional logic to the BMT proof verification algorithm to check the length of the provided proof set against the index provided in the proof. - -#### Breaking - -- [#719](https://github.com/FuelLabs/fuel-vm/pull/719): Fix overflow in `LDC` instruction when contract size with padding would overflow. -- [#715](https://github.com/FuelLabs/fuel-vm/pull/715): The `Interpreter` supports the processing of the `Upload` transaction. The change affects `InterpreterStorage`, adding `StorageMutate` constrain. -- [#714](https://github.com/FuelLabs/fuel-vm/pull/714): The change adds a new `Upload` transaction that allows uploading huge byte code on chain subsection by subsection. This transaction is chargeable and is twice as expensive as the `Create` transaction. Anyone can submit this transaction. -- [#712](https://github.com/FuelLabs/fuel-vm/pull/712): The `Interpreter` supports the processing of the `Upgrade` transaction. The change affects `InterpreterStorage`, adding 5 new methods that must be implemented. -- [#707](https://github.com/FuelLabs/fuel-vm/pull/707): The change adds a new `Upgrade` transaction that allows upgrading either consensus parameters or state transition function used by the network to produce future blocks. - The purpose of the upgrade is defined by the `Upgrade Purpose` type: - - ```rust - pub enum UpgradePurpose { - /// The upgrade is performed to change the consensus parameters. - ConsensusParameters { - /// The index of the witness in the [`Witnesses`] field that contains - /// the serialized consensus parameters. - witness_index: u16, - /// The hash of the serialized consensus parameters. - /// Since the serialized consensus parameters live inside witnesses(malleable - /// data), any party can override them. The `checksum` is used to verify that the - /// data was not modified. - checksum: Bytes32, - }, - /// The upgrade is performed to change the state transition function. - StateTransition { - /// The Merkle root of the new bytecode of the state transition function. - /// The bytecode must be present on the blockchain(should be known by the - /// network) at the moment of inclusion of this transaction. - root: Bytes32, - }, - } - ``` - - The `Upgrade` transaction is chargeable, and the sender should pay for it. Transaction inputs should contain only base assets. - - Only the privileged address can upgrade the network. The privileged address can be either a real account or a predicate. - - Since serialized consensus parameters are small(< 2kb), they can be part of the upgrade transaction and live inside of witness data. The bytecode of the blockchain state transition function is huge ~1.6MB(relative to consensus parameters), and it is impossible to fit it into one transaction. So when we perform the upgrade of the state transition function, it should already be available on the blockchain. The transaction to actually upload the bytecode(`Upload` transaction) will implemented in the https://github.com/FuelLabs/fuel-core/issues/1754. - -### Changed - -- [#707](https://github.com/FuelLabs/fuel-vm/pull/707): Used the same pattern everywhere in the codebase: - ```rust - Self::Script(tx) => tx.encode_static(buffer), - Self::Create(tx) => tx.encode_static(buffer), - Self::Mint(tx) => tx.encode_static(buffer), - Self::Upgrade(tx) => tx.encode_static(buffer), - ``` - - Instead of: - ```rust - Transaction::Script(script) => script.encode_static(buffer), - Transaction::Create(create) => create.encode_static(buffer), - Transaction::Mint(mint) => mint.encode_static(buffer), - Transaction::Upgrade(upgrade) => upgrade.encode_static(buffer), - ``` - -#### Breaking - -- [#714](https://github.com/FuelLabs/fuel-vm/pull/714): Added `max_bytecode_subsections` field to the `TxParameters` to limit the number of subsections that can be uploaded. -- [#707](https://github.com/FuelLabs/fuel-vm/pull/707): Side small breaking for tests changes from the `Upgrade` transaction: - - Moved `fuel-tx-test-helpers` logic into the `fuel_tx::test_helpers` module. - - Added a new rule for `Create` transaction: all inputs should use base asset otherwise it returns `TransactionInputContainsNonBaseAssetId` error. - - Renamed some errors because now they are used for several transactions(`Upgrade` uses some errors from `Create` and some from `Script` transactions): - - `TransactionScriptOutputContractCreated` -> `TransactionOutputContainsContractCreated`. - - `TransactionCreateOutputContract` -> `TransactionOutputContainsContract`. - - `TransactionCreateOutputVariable` -> `TransactionOutputContainsVariable`. - - `TransactionCreateOutputChangeNotBaseAsset` -> `TransactionChangeChangeUsesNotBaseAsset`. - - `TransactionCreateInputContract` -> `TransactionInputContainsContract`. - - `TransactionCreateMessageData` -> `TransactionInputContainsMessageData`. - - The combination of `serde` and `postcard` is used to serialize and deserialize `ConsensusParameters` during the upgrade. This means the protocol and state transition function requires the `serde` feature by default for `ConsensusParameters` and `fuel-types`. - -- [#697](https://github.com/FuelLabs/fuel-vm/pull/697): Changed the VM to internally use separate buffers for the stack and the heap to improve startup time. After this change, memory that was never part of the stack or the heap cannot be accessed, even for reading. Also, even if the whole memory is allocated, accesses spanning from the stack to the heap are not allowed. This PR also fixes a bug that required one-byte gap between the stack and the heap. Multiple errors have been changed to be more sensible ones, and sometimes the order of which error is returned has changed. `ALOC` opcode now zeroes the newly allocated memory. - -## [Version 0.48.0] - -### Added - -- [#705](https://github.com/FuelLabs/fuel-vm/pull/705): Added `privileged_address` to the `ConsensusParameters` for permissioned operations(like upgrade of the network). -- [#648](https://github.com/FuelLabs/fuel-vm/pull/648): Added support for generating proofs for Sparse Merkle Trees (SMTs) and proof verification. Proofs can be used to attest to the inclusion or exclusion of data from the set. - -### Changed - -#### Breaking - -- [#709](https://github.com/FuelLabs/fuel-vm/pull/709): Removed `bytecode_length` from the `Create` transaction. -- [#706](https://github.com/FuelLabs/fuel-vm/pull/706): Unified `Create` and `Script` logic via `ChargeableTransaction`. The change is breaking because affects JSON serialization and deserialization. Now `Script` and `Create` transactions have `body` fields that include unique transactions. -- [#703](https://github.com/FuelLabs/fuel-vm/pull/703): Reshuffled fields `Script` and `Create` transactions to unify part used by all chargeable transactions. It breaks the serialization and deserialization and requires adoption on the SDK side. -- [#708](https://github.com/FuelLabs/fuel-vm/pull/708): Hidden `Default` params under the "test-helper" feature to avoid accidental use in production code. It is a huge breaking change for any code that has used them before in production, and instead, it should be fetched from the network. In the case of tests simply use the "test-helper" feature in your `[dev-dependencies]` section. -- [#702](https://github.com/FuelLabs/fuel-vm/pull/702): Wrapped `FeeParameters`, `PredicateParameters`, `TxParameters`, `ScriptParameters` and `ContractParameters` into an enum to support versioning. -- [#701](https://github.com/FuelLabs/fuel-vm/pull/701): Wrapped `ConsensusParameters` and `GasCosts` into an enum to support versioning. Moved `block_gas_limit` from `fuel_core_chain_config::ChainConfig` to `ConsensusPataremeters`. Reduced default `MAX_SIZE` to be [110kb](https://github.com/FuelLabs/fuel-core/pull/1761) and `MAX_CONTRACT_SIZE` to be [100kb](https://github.com/FuelLabs/fuel-core/pull/1761). -- [#692](https://github.com/FuelLabs/fuel-vm/pull/692): Add GTF getters for tx size and address. -- [#698](https://github.com/FuelLabs/fuel-vm/pull/698): Store input, output and witness limits to u16, while keeping the values limited to 255. - -## [Version 0.47.1] - -### Added - -- [#689](https://github.com/FuelLabs/fuel-vm/pull/689): Re-add fields to the checked tx `Metadata` for min and max gas. -- [#689](https://github.com/FuelLabs/fuel-vm/pull/689): Add test helpers and additional getters. - -## [Version 0.47.0] - -### Added - -- [#686](https://github.com/FuelLabs/fuel-vm/pull/686): Implement `serde` for `InterpreterError`. - -### Changed - -#### Breaking - -- [#685](https://github.com/FuelLabs/fuel-vm/pull/685): - The `MaxFee` is a mandatory policy to set. The `MaxFee` policy is used to check that the transaction is valid. - Added a new stage for the `Checked` transaction - `Ready`. This type can be constructed with the - `gas_price` before being transacted by the `Interpreter`. -- [#671](https://github.com/FuelLabs/fuel-vm/pull/671): Support dynamically sized values in the ContractsState table by - using a vector data type (`Vec`). -- [#682](https://github.com/FuelLabs/fuel-vm/pull/682): Include `Tip` policy in fee calculation -- [#683](https://github.com/FuelLabs/fuel-vm/pull/683): Simplify `InterpreterStorage` by removing dependency - on `MerkleRootStorage` and removing `merkle_` prefix from method names. -- [#678](https://github.com/FuelLabs/fuel-vm/pull/678): Zero malleable fields before execution. Remove some now-obsolete - GTF getters. Don't update `tx.receiptsRoot` after pushing receipts, and do it after execution instead. -- [#672](https://github.com/FuelLabs/fuel-vm/pull/672): Remove `GasPrice` policy -- [#672](https://github.com/FuelLabs/fuel-vm/pull/672): Add `gas_price` field to transaction execution -- [#684](https://github.com/FuelLabs/fuel-vm/pull/684): Remove `maturity` field from `Input` coin types. Also remove - related `GTF` getter. -- [#675](https://github.com/FuelLabs/fuel-vm/pull/675): Add `GTF` access for `asset_id` and `to` fields for `Change` - outputs. - -## [Version 0.46.0] - -### Changed - -#### Breaking - -- [#679](https://github.com/FuelLabs/fuel-vm/pull/679): Require less restricted constraint on `MerkleRootStorage` trait. - Now it requires `StorageInspect` instead of the `StorageMutate`. -- [#673](https://github.com/FuelLabs/fuel-vm/pull/673): Removed `ContractsInfo` table. Contract salts and roots are no - longer stored in on-chain data. -- [#673](https://github.com/FuelLabs/fuel-vm/pull/673): Opcode `CROO` now calculates the given contract's root on - demand. `CROO` has therefore been changed to a `DependentCost` gas cost. - -### Changed - -- [#672](https://github.com/FuelLabs/fuel-vm/pull/672): Add `Tip` policy - -## [Version 0.45.0] - -### Changed - -#### Breaking - -- [#668](https://github.com/FuelLabs/fuel-vm/pull/668): Remove `non_exhaustive` from versionable types for security - reasons - -## [Version 0.44.0] - #### Changed - [#660](https://github.com/FuelLabs/fuel-vm/pull/660): Added generic argument to the sparse Merkle tree to allow users to define the key space. The default export sparse Merkle tree is defined as the 32 byte SMT to ensure compatability. - [#653](https://github.com/FuelLabs/fuel-vm/pull/653): `ECAL` opcode handler can now hold internal state. -- [#657](https://github.com/FuelLabs/fuel-vm/pull/657): Add debugger methods to remove or replace all breakpoints at - once. +- [#657](https://github.com/FuelLabs/fuel-vm/pull/657): Add debugger methods to remove or replace all breakpoints at once. #### Breaking - [#654](https://github.com/FuelLabs/fuel-vm/pull/654): Make public types versionable by making non-exhaustive. -- [#658](https://github.com/FuelLabs/fuel-vm/pull/658): Make `key!`-generated types - like `Address`, `AssetId`, `ContractId` and `Bytes32` consume one less byte when serialized with a binary serde - serializer like postcard. +- [#658](https://github.com/FuelLabs/fuel-vm/pull/658): Make `key!`-generated types like `Address`, `AssetId`, `ContractId` and `Bytes32` consume one less byte when serialized with a binary serde serializer like postcard. ## [Version 0.43.2] @@ -230,8 +35,7 @@ and this project adheres to [Semantic Versioning](http://semver.org/). #### Breaking -- [#640](https://github.com/FuelLabs/fuel-vm/pull/640): Update VM initialization cost to dependent cost; this is - required because the time it takes to initialize the VM depends on the size of the transaction. +- [#640](https://github.com/FuelLabs/fuel-vm/pull/640): Update VM initialization cost to dependent cost; this is required because the time it takes to initialize the VM depends on the size of the transaction. ## [Version 0.42.1] @@ -247,16 +51,12 @@ and this project adheres to [Semantic Versioning](http://semver.org/). #### Breaking -- [#676](https://github.com/FuelLabs/fuel-vm/pull/676) Add `gas_price` to `Mint` transaction - [#629](https://github.com/FuelLabs/fuel-vm/pull/629): Charge the user for VM initialization. -- [#628](https://github.com/FuelLabs/fuel-vm/pull/628): Renamed `transaction::CheckError` - to `transaction::ValidityError`. +- [#628](https://github.com/FuelLabs/fuel-vm/pull/628): Renamed `transaction::CheckError` to `transaction::ValidityError`. Created a new `checked_transaction::CheckError` that combines `ValidityError` and `PredicateVerificationFailed` errors into one. It allows the return of the `PredicateVerificationFailed` to the end user instead of losing the reason why predicate verification failed. -- [#625](https://github.com/FuelLabs/fuel-vm/pull/625): Use `ArithmeticError` only for arithmetic operations, and - introduce new errors like `BalanceOverflow` for others. Whenever an error is internally caused by a type conversion - to `usize`, so that an overflowing value wouldn't map to a valid index anyway, return the missing item error instead. +- [#625](https://github.com/FuelLabs/fuel-vm/pull/625): Use `ArithmeticError` only for arithmetic operations, and introduce new errors like `BalanceOverflow` for others. Whenever an error is internally caused by a type conversion to `usize`, so that an overflowing value wouldn't map to a valid index anyway, return the missing item error instead. - [#623](https://github.com/FuelLabs/fuel-vm/pull/623): Added support for transaction policies. The `Script` and `Create` transactions received a new field, `policies`. Policies allow the addition @@ -266,160 +66,106 @@ and this project adheres to [Semantic Versioning](http://semver.org/). `GasLimit` was renamed to `ScriptGasLimit`. Along with this change, we introduced two new policies: - - `WitnessLimit` - allows the limitation of the maximum size of witnesses in bytes for the contract. Because of the - changes in the gas calculation model(the blockchain also charges the user for the witness data), the user should - protect himself from the block producer or third parties blowing up witness data and draining the user's funds. + - `WitnessLimit` - allows the limitation of the maximum size of witnesses in bytes for the contract. Because of the changes in the gas calculation model(the blockchain also charges the user for the witness data), the user should protect himself from the block producer or third parties blowing up witness data and draining the user's funds. - `MaxFee` - allows the upper bound for the maximum fee that users agree to pay for the transaction. This change brings the following modification to the gas model: - - The `ScriptGasLimit` only limits script execution. Previously, the `ScriptGasLimit` also limited the predicate - execution time, instead predicate gas is now directly included into `min_fee`. So, it is not possible to use - the `ScriptGasLimit` for transaction cost limitations. A new `MaxFee` policy is a way to do that. The `GasLimit` - field was removed from the `Create` transaction because it only relates to the script execution (which - the `Create` transaction doesn't have). - - The blockchain charges the user for the size of witness data (before it was free). There is no separate price for - the storage, so it uses gas to charge the user. This change affects `min_gas` and `min_fee` calculation. - - A new policy called `WitnessLimit` also impacts the `max_gas` and `max_fee` calculation in addition - to `ScriptGasLimit`(in the case of `Create` transaction only `WitnessLimit` affects the `max_gas` and `max_fee`). + - The `ScriptGasLimit` only limits script execution. Previously, the `ScriptGasLimit` also limited the predicate execution time, instead predicate gas is now directly included into `min_fee`. So, it is not possible to use the `ScriptGasLimit` for transaction cost limitations. A new `MaxFee` policy is a way to do that. The `GasLimit` field was removed from the `Create` transaction because it only relates to the script execution (which the `Create` transaction doesn't have). + - The blockchain charges the user for the size of witness data (before it was free). There is no separate price for the storage, so it uses gas to charge the user. This change affects `min_gas` and `min_fee` calculation. + - A new policy called `WitnessLimit` also impacts the `max_gas` and `max_fee` calculation in addition to `ScriptGasLimit`(in the case of `Create` transaction only `WitnessLimit` affects the `max_gas` and `max_fee`). - The minimal gas also charges the user for transaction ID calculation. The change has the following modification to the transaction layout: - - The `Create` transaction doesn't have the `ScriptGasLimit` field anymore. Because the `Create` transaction doesn't - have any script to execute - - The `Create` and `Script` transactions don't have explicit `maturity` and `gas_price` fields. Instead, these - fields can be set via a new `policies` field. - - The `Create` and `Script` transactions have a new `policies` field with a unique canonical serialization and - deserialization for optimal space consumption. + - The `Create` transaction doesn't have the `ScriptGasLimit` field anymore. Because the `Create` transaction doesn't have any script to execute + - The `Create` and `Script` transactions don't have explicit `maturity` and `gas_price` fields. Instead, these fields can be set via a new `policies` field. + - The `Create` and `Script` transactions have a new `policies` field with a unique canonical serialization and deserialization for optimal space consumption. Other breaking changes caused by the change: - Each transaction requires setting the `GasPrice` policy. - - Previously, `ScriptGasLimit` should be less than the `MAX_GAS_PER_TX` constant. After removing this field from - the `Create` transaction, it is impossible to require it. Instead, it requires that `max_gas <= MAX_GAS_PER_TX` - for any transaction. Consequently, any `Script` transaction that uses `MAX_GAS_PER_TX` as a `ScriptGasLimit` will - always fail because of a new rule. Setting the estimated gas usage instead solves the problem. + - Previously, `ScriptGasLimit` should be less than the `MAX_GAS_PER_TX` constant. After removing this field from the `Create` transaction, it is impossible to require it. Instead, it requires that `max_gas <= MAX_GAS_PER_TX` for any transaction. Consequently, any `Script` transaction that uses `MAX_GAS_PER_TX` as a `ScriptGasLimit` will always fail because of a new rule. Setting the estimated gas usage instead solves the problem. - If the `max_fee > policies.max_fee`, then transaction will be rejected. - If the `witnessses_size > policies.witness_limit`, then transaction will be rejected. - - GTF opcode changed its hardcoded constants for fields. It should be updated according to the values from the - specification on the Sway side. + - GTF opcode changed its hardcoded constants for fields. It should be updated according to the values from the specification on the Sway side. - [#633](https://github.com/FuelLabs/fuel-vm/pull/633): Limit receipt count to `u16::MAX`. -- [#634](https://github.com/FuelLabs/fuel-vm/pull/634): Charge for storage per new byte written. Write opcodes now - return the number of new storage slots created, instead of just a boolean on whether the value existed before. +- [#634](https://github.com/FuelLabs/fuel-vm/pull/634): Charge for storage per new byte written. Write opcodes now return the number of new storage slots created, instead of just a boolean on whether the value existed before. ### Fixed -- [#627](https://github.com/FuelLabs/fuel-vm/pull/627): Added removal of obsolete SMT nodes along the path - during `update` and `delete` operations. +- [#627](https://github.com/FuelLabs/fuel-vm/pull/627): Added removal of obsolete SMT nodes along the path during `update` and `delete` operations. ## [Version 0.41.0] #### Breaking -- [#622](https://github.com/FuelLabs/fuel-vm/pull/622): Divide `DependentCost` into "light" and "heavy" operations: - Light operations consume `0 < x < 1` gas per unit, while heavy operations consume `x` gas per unit. This distinction - provides more precision when calculating dependent costs. +- [#622](https://github.com/FuelLabs/fuel-vm/pull/622): Divide `DependentCost` into "light" and "heavy" operations: Light operations consume `0 < x < 1` gas per unit, while heavy operations consume `x` gas per unit. This distinction provides more precision when calculating dependent costs. ## [Version 0.40.0] ### Added - - [#607](https://github.com/FuelLabs/fuel-vm/pull/607): Added `ECAL` instruction support. ### Changed -- [#612](https://github.com/FuelLabs/fuel-vm/pull/612): Reduced the memory consumption in all places where we calculate - BMT root. -- [#615](https://github.com/FuelLabs/fuel-vm/pull/615): Made `ReceiptsCtx` of the VM modifiable with `test-helpers` - feature. +- [#612](https://github.com/FuelLabs/fuel-vm/pull/612): Reduced the memory consumption in all places where we calculate BMT root. +- [#615](https://github.com/FuelLabs/fuel-vm/pull/615): Made `ReceiptsCtx` of the VM modifiable with `test-helpers` feature. #### Breaking -- [#618](https://github.com/FuelLabs/fuel-vm/pull/618): Transaction fees for `Create` now include the cost of metadata - calculations, including: contract root calculation, state root calculation, and contract id calculation. -- [#613](https://github.com/FuelLabs/fuel-vm/pull/613): Transaction fees now include the cost of signature verification - for each input. For signed inputs, the cost of an EC recovery is charged. For predicate inputs, the cost of a BMT root - of bytecode is charged. -- [#607](https://github.com/FuelLabs/fuel-vm/pull/607): The `Interpreter` expects the third generic argument during type - definition that specifies the implementer of the `EcalHandler` trait for `ecal` opcode. -- [#609](https://github.com/FuelLabs/fuel-vm/pull/609): Checked transactions (`Create`, `Script`, and `Mint`) now - enforce a maximum size. The maximum size is specified by `MAX_TRANSACTION_SIZE` in the transaction parameters, under - consensus parameters. Checking a transaction above this size raises `CheckError::TransactionSizeLimitExceeded`. -- [#617](https://github.com/FuelLabs/fuel-vm/pull/617): Makes memory outside `$is..$ssp` range not executable. - Separates `ErrorFlag` into `InvalidFlags`, `MemoryNotExecutable` and `InvalidInstruction`. Fixes related tests. -- [#619](https://github.com/FuelLabs/fuel-vm/pull/619): Avoid possible truncation of higher bits. It may invalidate the - code that truncated higher bits causing different behavior on 32-bit vs. 64-bit systems. +- [#618](https://github.com/FuelLabs/fuel-vm/pull/618): Transaction fees for `Create` now include the cost of metadata calculations, including: contract root calculation, state root calculation, and contract id calculation. +- [#613](https://github.com/FuelLabs/fuel-vm/pull/613): Transaction fees now include the cost of signature verification for each input. For signed inputs, the cost of an EC recovery is charged. For predicate inputs, the cost of a BMT root of bytecode is charged. +- [#607](https://github.com/FuelLabs/fuel-vm/pull/607): The `Interpreter` expects the third generic argument during type definition that specifies the implementer of the `EcalHandler` trait for `ecal` opcode. +- [#609](https://github.com/FuelLabs/fuel-vm/pull/609): Checked transactions (`Create`, `Script`, and `Mint`) now enforce a maximum size. The maximum size is specified by `MAX_TRANSACTION_SIZE` in the transaction parameters, under consensus parameters. Checking a transaction above this size raises `CheckError::TransactionSizeLimitExceeded`. +- [#617](https://github.com/FuelLabs/fuel-vm/pull/617): Makes memory outside `$is..$ssp` range not executable. Separates `ErrorFlag` into `InvalidFlags`, `MemoryNotExecutable` and `InvalidInstruction`. Fixes related tests. +- [#619](https://github.com/FuelLabs/fuel-vm/pull/619): Avoid possible truncation of higher bits. It may invalidate the code that truncated higher bits causing different behavior on 32-bit vs. 64-bit systems. ## [Version 0.39.0] ### Added - -- [#603](https://github.com/FuelLabs/fuel-vm/pull/603): Added `MerkleRootCalculator`for efficient in-memory Merkle root - calculation. -- [#603](https://github.com/FuelLabs/fuel-vm/pull/606): Added Serialization and Deserialization support - to `MerkleRootCalculator`. +- [#603](https://github.com/FuelLabs/fuel-vm/pull/603): Added `MerkleRootCalculator`for efficient in-memory Merkle root calculation. +- [#603](https://github.com/FuelLabs/fuel-vm/pull/606): Added Serialization and Deserialization support to `MerkleRootCalculator`. ### Changed -- [#595](https://github.com/FuelLabs/fuel-vm/pull/595): Removed `wee_alloc` dependency from `fuel-asm`. It now uses the - builtin allocator on web targets as well. +- [#595](https://github.com/FuelLabs/fuel-vm/pull/595): Removed `wee_alloc` dependency from `fuel-asm`. It now uses the builtin allocator on web targets as well. #### Breaking -- [#598](https://github.com/FuelLabs/fuel-vm/pull/598): Update cost model for `ldc` opcode to take into account contract - size. -- [#604](https://github.com/FuelLabs/fuel-vm/pull/604): Removed `ChainId` from `PredicateId` calculation. It changes the - generated address of the predicates and may break tests or logic that uses hard-coded predicate IDs. -- [#594](https://github.com/FuelLabs/fuel-vm/pull/594): Add new predicate input validation tests. Also improves error - propagation so that predicate error message better reflects the reason for invalidity. -- [#596](https://github.com/FuelLabs/fuel-vm/pull/596): Remove `core::ops::{Add, Sub}` impls from `BlockHeight`. - Use `succ` and `pred` to access adjacent blocks, or perform arithmetic directly on the wrapped integer instead. -- [#593](https://github.com/FuelLabs/fuel-vm/pull/593): Reworked `Mint` transaction to work with `Input::Contract` - and `Output::Contract` instead of `Output::Coin`. It allows account-based fee collection for the block producer. +- [#598](https://github.com/FuelLabs/fuel-vm/pull/598): Update cost model for `ldc` opcode to take into account contract size. +- [#604](https://github.com/FuelLabs/fuel-vm/pull/604): Removed `ChainId` from `PredicateId` calculation. It changes the generated address of the predicates and may break tests or logic that uses hard-coded predicate IDs. +- [#594](https://github.com/FuelLabs/fuel-vm/pull/594): Add new predicate input validation tests. Also improves error propagation so that predicate error message better reflects the reason for invalidity. +- [#596](https://github.com/FuelLabs/fuel-vm/pull/596): Remove `core::ops::{Add, Sub}` impls from `BlockHeight`. Use `succ` and `pred` to access adjacent blocks, or perform arithmetic directly on the wrapped integer instead. +- [#593](https://github.com/FuelLabs/fuel-vm/pull/593): Reworked `Mint` transaction to work with `Input::Contract` and `Output::Contract` instead of `Output::Coin`. It allows account-based fee collection for the block producer. + ## [Version 0.38.0] ### Added -- [#586](https://github.com/FuelLabs/fuel-vm/pull/586): Added `default_asset` method to the `ContractIdExt` trait - implementation, to mirror the `default` method on AssetId in the Sway std lib. +- [#586](https://github.com/FuelLabs/fuel-vm/pull/586): Added `default_asset` method to the `ContractIdExt` trait implementation, to mirror the `default` method on AssetId in the Sway std lib. ### Changed #### Breaking -- [#578](https://github.com/FuelLabs/fuel-vm/pull/578): Support `no_std` environments for `fuel-crypto`, falling back to - a pure-Rust crypto implementation. -- [#582](https://github.com/FuelLabs/fuel-vm/pull/582): Make `fuel-vm` and `fuel-tx` crates compatible - with `no_std` + `alloc`. This includes reworking all error handling that used `std::io::Error`, replacing - some `std::collection::{HashMap, HashSet}` with `hashbrown::{HashMap, HashSet}` and many changes to feature-gating of - APIs. -- [#587](https://github.com/FuelLabs/fuel-vm/pull/587): Replace `thiserror` dependency with `derive_more`, so - that `core::fmt::Display` is implemented without the `std` feature. Removes `std::io::Error` trait impls from the - affected types. -- [#588](https://github.com/FuelLabs/fuel-vm/pull/588): Re-worked the size calculation of the canonical - serialization/deserialization. -- [#700](https://github.com/FuelLabs/fuel-vm/pull/700): Add `BASE_ASSET_ID` to `GM` instruction. - +- [#578](https://github.com/FuelLabs/fuel-vm/pull/578): Support `no_std` environments for `fuel-crypto`, falling back to a pure-Rust crypto implementation. +- [#582](https://github.com/FuelLabs/fuel-vm/pull/582): Make `fuel-vm` and `fuel-tx` crates compatible with `no_std` + `alloc`. This includes reworking all error handling that used `std::io::Error`, replacing some `std::collection::{HashMap, HashSet}` with `hashbrown::{HashMap, HashSet}` and many changes to feature-gating of APIs. +- [#587](https://github.com/FuelLabs/fuel-vm/pull/587): Replace `thiserror` dependency with `derive_more`, so that `core::fmt::Display` is implemented without the `std` feature. Removes `std::io::Error` trait impls from the affected types. +- [#588](https://github.com/FuelLabs/fuel-vm/pull/588): Re-worked the size calculation of the canonical serialization/deserialization. #### Removed -- [#588](https://github.com/FuelLabs/fuel-vm/pull/588): Removed `SerializedSize` and `SerializedFixedSize` traits. - Removed support for `SIZE_NO_DYNAMIC` and `SIZE_STATIC`. Removed enum attributes from derive macro for `Serialize` - and `Deserialize` traits. +- [#588](https://github.com/FuelLabs/fuel-vm/pull/588): Removed `SerializedSize` and `SerializedFixedSize` traits. Removed support for `SIZE_NO_DYNAMIC` and `SIZE_STATIC`. Removed enum attributes from derive macro for `Serialize` and `Deserialize` traits. ## [Version 0.37.0] #### Breaking -- [#573](https://github.com/FuelLabs/fuel-vm/pull/573): Added `base_asset_id` as a required field - to `FeeParameters`. `base_asset_id` is used to supply the ID of the base asset. -- [#554](https://github.com/FuelLabs/fuel-vm/pull/554): Removed `debug` feature from the `fuel-vm`. The debugger is - always available and becomes active after calling any `set_*` method. -- [#537](https://github.com/FuelLabs/fuel-vm/pull/537): Use dependent cost for `k256`, `s256`, `mcpi`, `scwq`, `swwq` - opcodes. - These opcodes charged inadequately low costs in comparison to the amount of work. - This change should make all transactions that used these opcodes much more expensive than before. -- [#533](https://github.com/FuelLabs/fuel-vm/pull/533): Use custom serialization for fuel-types to allow no_std - compilation. +- [#573](https://github.com/FuelLabs/fuel-vm/pull/573): Added `base_asset_id` as a required field to `FeeParameters`. `base_asset_id` is used to supply the ID of the base asset. +- [#554](https://github.com/FuelLabs/fuel-vm/pull/554): Removed `debug` feature from the `fuel-vm`. The debugger is always available and becomes active after calling any `set_*` method. +- [#537](https://github.com/FuelLabs/fuel-vm/pull/537): Use dependent cost for `k256`, `s256`, `mcpi`, `scwq`, `swwq` opcodes. + These opcodes charged inadequately low costs in comparison to the amount of work. + This change should make all transactions that used these opcodes much more expensive than before. +- [#533](https://github.com/FuelLabs/fuel-vm/pull/533): Use custom serialization for fuel-types to allow no_std compilation. ## [Version 0.36.1] @@ -429,44 +175,34 @@ and this project adheres to [Semantic Versioning](http://semver.org/). ### Fixed -- [#574](https://github.com/FuelLabs/fuel-vm/pull/574): Enforce fixed 32-byte input length for LHS and RHS inputs to the - BMT's internal node sum. -- [#547](https://github.com/FuelLabs/fuel-vm/pull/547): Bump `ed25519-dalek` to `2.0.0` to deal with RustSec Advisory. +- [#574](https://github.com/FuelLabs/fuel-vm/pull/574): Enforce fixed 32-byte input length for LHS and RHS inputs to the BMT's internal node sum. +- [#547](https://github.com/FuelLabs/fuel-vm/pull/547): Bump `ed25519-dalek` to `2.0.0` to deal with RustSec Advisory. #### Breaking +- [#524](https://github.com/FuelLabs/fuel-vm/pull/524): Fix a crash in `CCP` instruction when overflowing contract bounds. Fix a bug in `CCP` where overflowing contract bounds in a different way would not actually copy the contract bytes, but just zeroes out the section. Fix a bug in `LDC` where it would revert the transaction when the contract bounds were exceeded, when it's just supposed to fill the rest of the bytes with zeroes. -- [#524](https://github.com/FuelLabs/fuel-vm/pull/524): Fix a crash in `CCP` instruction when overflowing contract - bounds. Fix a bug in `CCP` where overflowing contract bounds in a different way would not actually copy the contract - bytes, but just zeroes out the section. Fix a bug in `LDC` where it would revert the transaction when the contract - bounds were exceeded, when it's just supposed to fill the rest of the bytes with zeroes. ## [Version 0.36.0] ### Changed -- [#525](https://github.com/FuelLabs/fuel-vm/pull/525): The `$hp` register is no longer restored to it's previous value - when returning from a call, making it possible to return heap-allocated types from `CALL`. +- [#525](https://github.com/FuelLabs/fuel-vm/pull/525): The `$hp` register is no longer restored to it's previous value when returning from a call, making it possible to return heap-allocated types from `CALL`. - [#535](https://github.com/FuelLabs/fuel-vm/pull/535): Add better test coverage for TR and TRO. #### Breaking -- [#514](https://github.com/FuelLabs/fuel-vm/pull/514/): Add `ChainId` and `GasCosts` to `ConsensusParameters`. - Break down `ConsensusParameters` into sub-structs to match usage. Change signatures of functions to ask for - necessary fields only. -- [#532](https://github.com/FuelLabs/fuel-vm/pull/532): The `TRO` instruction now reverts when attempting to send zero - coins to an output. Panic reason of this `TransferZeroCoins`, and `TR` was changed to use the same panic reason as - well. +- [#514](https://github.com/FuelLabs/fuel-vm/pull/514/): Add `ChainId` and `GasCosts` to `ConsensusParameters`. + Break down `ConsensusParameters` into sub-structs to match usage. Change signatures of functions to ask for + necessary fields only. +- [#532](https://github.com/FuelLabs/fuel-vm/pull/532): The `TRO` instruction now reverts when attempting to send zero coins to an output. Panic reason of this `TransferZeroCoins`, and `TR` was changed to use the same panic reason as well. ### Fixed -- [#511](https://github.com/FuelLabs/fuel-vm/pull/511): Changes multiple panic reasons to be more accurate, and - internally refactors instruction fetch logic to be less error-prone. +- [#511](https://github.com/FuelLabs/fuel-vm/pull/511): Changes multiple panic reasons to be more accurate, and internally refactors instruction fetch logic to be less error-prone. -- [#529](https://github.com/FuelLabs/fuel-vm/pull/529) [#534](https://github.com/FuelLabs/fuel-vm/pull/534): Enforcing - async WASM initialization for all NPM wrapper packages. +- [#529](https://github.com/FuelLabs/fuel-vm/pull/529) [#534](https://github.com/FuelLabs/fuel-vm/pull/534): Enforcing async WASM initialization for all NPM wrapper packages. -- [#531](https://github.com/FuelLabs/fuel-vm/pull/531): UtxoId::from_str and TxPointer::from_str no longer crash on - invalid input with multibyte characters. Also adds clippy lints to prevent future issues. +- [#531](https://github.com/FuelLabs/fuel-vm/pull/531): UtxoId::from_str and TxPointer::from_str no longer crash on invalid input with multibyte characters. Also adds clippy lints to prevent future issues. #### Breaking @@ -476,8 +212,7 @@ and this project adheres to [Semantic Versioning](http://semver.org/). ### Changed -- [#542](https://github.com/FuelLabs/fuel-vm/pull/542/): Make the `fuel-tx` WASM compatible with `serde` feature - enabled. +- [#542](https://github.com/FuelLabs/fuel-vm/pull/542/): Make the `fuel-tx` WASM compatible with `serde` feature enabled. ## [Version 0.35.2] @@ -485,55 +220,51 @@ and this project adheres to [Semantic Versioning](http://semver.org/). #### Breaking -- [#539](https://github.com/FuelLabs/fuel-vm/pull/539/): Rollbacked the change for the gas charging formula. - Actualized the gas prices for opcodes. +- [#539](https://github.com/FuelLabs/fuel-vm/pull/539/): Rollbacked the change for the gas charging formula. + Actualized the gas prices for opcodes. ## [Version 0.35.1] ### Added - [#499](https://github.com/FuelLabs/fuel-vm/pull/499/): The `wasm_bindgen` support of `fuel-asm` and `fuel-types`. - Each new release also publish a typescript analog of the `fuel-asm` and `fuel-types` crates to the npm. + Each new release also publish a typescript analog of the `fuel-asm` and `fuel-types` crates to the npm. ## [Version 0.35.0] -The release mostly fixes funding during the audit and integration with the bridge. But the release also contains some -new features like: - +The release mostly fixes funding during the audit and integration with the bridge. But the release also contains some new features like: - Asynchronous predicate estimation/verification. - Multi-asset support per contract. - Support Secp256r1 signature recovery and Ed25519 verificaiton. + ### Added -- [#486](https://github.com/FuelLabs/fuel-vm/pull/486/): Adds `ed25519` signature verification and `secp256r1` signature - recovery to `fuel-crypto`, and corresponding opcodes `ED19` and `ECR1` to `fuel-vm`. +- [#486](https://github.com/FuelLabs/fuel-vm/pull/486/): Adds `ed25519` signature verification and `secp256r1` signature recovery to `fuel-crypto`, and corresponding opcodes `ED19` and `ECR1` to `fuel-vm`. -- [#486](https://github.com/FuelLabs/fuel-vm/pull/498): Adds `PSHL`, `PSHH`, `POPH` and `POPL` instructions, which allow - cheap push and pop stack operations with multiple registers. +- [#486](https://github.com/FuelLabs/fuel-vm/pull/498): Adds `PSHL`, `PSHH`, `POPH` and `POPL` instructions, which allow cheap push and pop stack operations with multiple registers. - [#500](https://github.com/FuelLabs/fuel-vm/pull/500): Introduced `ParallelExecutor` trait - and made available async versions of verify and estimate predicates. - Updated tests to test for both parallel and sequential execution. - Fixed a bug in `transaction/check_predicate_owners`. + and made available async versions of verify and estimate predicates. + Updated tests to test for both parallel and sequential execution. + Fixed a bug in `transaction/check_predicate_owners`. #### Breaking - [#506](https://github.com/FuelLabs/fuel-vm/pull/506): Added new `Mint` and `Burn` variants to `Receipt` enum. - It affects serialization and deserialization with new variants. + It affects serialization and deserialization with new variants. ### Changed #### Breaking -- [#506](https://github.com/FuelLabs/fuel-vm/pull/506): The `mint` and `burn` - opcodes accept a new `$rB` register. It is a sub-identifier used to generate an - `AssetId` by [this rule](https://github.com/FuelLabs/fuel-specs/blob/master/src/identifiers/asset.md). - This feature allows having multi-asset per one contract. It is a huge breaking change, and - after this point, `ContractId` can't be equal to `AssetId`. +- [#506](https://github.com/FuelLabs/fuel-vm/pull/506): The `mint` and `burn` + opcodes accept a new `$rB` register. It is a sub-identifier used to generate an + `AssetId` by [this rule](https://github.com/FuelLabs/fuel-specs/blob/master/src/identifiers/asset.md). + This feature allows having multi-asset per one contract. It is a huge breaking change, and + after this point, `ContractId` can't be equal to `AssetId`. - The conversion like `AssetId::from(*contract_id)` is no longer valid. Instead, the `ContractId` implements - the `ContractIdExt` trait: + The conversion like `AssetId::from(*contract_id)` is no longer valid. Instead, the `ContractId` implements the `ContractIdExt` trait: ```rust /// Trait extends the functionality of the `ContractId` type. pub trait ContractIdExt { @@ -542,53 +273,50 @@ new features like: } ``` -- [#506](https://github.com/FuelLabs/fuel-vm/pull/506): The `mint` and `burn` - opcodes affect the `receipts_root` of the `Script` transaction. +- [#506](https://github.com/FuelLabs/fuel-vm/pull/506): The `mint` and `burn` + opcodes affect the `receipts_root` of the `Script` transaction. ### Removed #### Breaking -- [#486](https://github.com/FuelLabs/fuel-vm/pull/486/): Removes apparently unused `Keystore` and `Signer` traits - from `fuel-crypto`. Also renames `ECR` opcode to `ECK1`. +- [#486](https://github.com/FuelLabs/fuel-vm/pull/486/): Removes apparently unused `Keystore` and `Signer` traits from `fuel-crypto`. Also renames `ECR` opcode to `ECK1`. ### Fixed -- [#500](https://github.com/FuelLabs/fuel-vm/pull/500): Fixed a bug where `MessageCoinPredicate` wasn't checked for - in `check_predicate_owners`. +- [#500](https://github.com/FuelLabs/fuel-vm/pull/500): Fixed a bug where `MessageCoinPredicate` wasn't checked for in `check_predicate_owners`. #### Breaking - [#502](https://github.com/FuelLabs/fuel-vm/pull/502): The algorithm used by the - binary Merkle tree for generating Merkle proofs has been updated to remove - the leaf data from the proof set. This change allows BMT proofs to conform - to the format expected by the Solidity contracts used for verifying proofs. + binary Merkle tree for generating Merkle proofs has been updated to remove + the leaf data from the proof set. This change allows BMT proofs to conform + to the format expected by the Solidity contracts used for verifying proofs. - [#503](https://github.com/FuelLabs/fuel-vm/pull/503): Use correct amount of gas in call - receipts when limited by cgas. Before this change, the `Receipt::Call` could show an incorrect value for the gas - limit. + receipts when limited by cgas. Before this change, the `Receipt::Call` could show an incorrect value for the gas limit. -- [#504](https://github.com/FuelLabs/fuel-vm/pull/504): The `CROO` and `CSIZ` opcodes require - the existence of corresponding `ContractId` in the transaction's - inputs(the same behavior as for the `CROO` opcode). +- [#504](https://github.com/FuelLabs/fuel-vm/pull/504): The `CROO` and `CSIZ` opcodes require + the existence of corresponding `ContractId` in the transaction's + inputs(the same behavior as for the `CROO` opcode). -- [#504](https://github.com/FuelLabs/fuel-vm/pull/504): The size of the contract - was incorrectly padded. It affects the end of the call frame in the memory, - making it not 8 bytes align. Also, it affects the cost of the contract - call(in some cases, we charged less in some more). +- [#504](https://github.com/FuelLabs/fuel-vm/pull/504): The size of the contract + was incorrectly padded. It affects the end of the call frame in the memory, + making it not 8 bytes align. Also, it affects the cost of the contract + call(in some cases, we charged less in some more). - [#504](https://github.com/FuelLabs/fuel-vm/pull/504): The charging for `DependentCost` - was done incorrectly, devaluing the `dep_per_unit` part. After the fixing of - this, the execution should become much more expensive. + was done incorrectly, devaluing the `dep_per_unit` part. After the fixing of + this, the execution should become much more expensive. -- [#505](https://github.com/FuelLabs/fuel-vm/pull/505): The `data` field of the `Receipt` - is not part of the canonical serialization and deserialization anymore. The SDK should use the - `Receipt` type instead of `OpaqueReceipt`. The `Receipt.raw_payload` will be removed for the - `fuel-core 0.20`. The `data` field is optional now. The SDK should update serialization and - deserialization for `MessageOut`, `LogData`, and `ReturnData` receipts. +- [#505](https://github.com/FuelLabs/fuel-vm/pull/505): The `data` field of the `Receipt` + is not part of the canonical serialization and deserialization anymore. The SDK should use the + `Receipt` type instead of `OpaqueReceipt`. The `Receipt.raw_payload` will be removed for the + `fuel-core 0.20`. The `data` field is optional now. The SDK should update serialization and + deserialization for `MessageOut`, `LogData`, and `ReturnData` receipts. -- [#505](https://github.com/FuelLabs/fuel-vm/pull/505): The `len` field of the `Receipt` - is not padded anymore and represents an initial value. +- [#505](https://github.com/FuelLabs/fuel-vm/pull/505): The `len` field of the `Receipt` + is not padded anymore and represents an initial value. ## [Version 0.34.1] @@ -597,33 +325,32 @@ Mainly new opcodes prices and small performance improvements in the `BinaryMerkl ### Changed - [#492](https://github.com/FuelLabs/fuel-vm/pull/492): Minor improvements to BMT - internals, including a reduction in usage of `Box`, using `expect(...)` over - `unwrap()`, and additional comments. + internals, including a reduction in usage of `Box`, using `expect(...)` over + `unwrap()`, and additional comments. #### Breaking - [#493](https://github.com/FuelLabs/fuel-vm/pull/493): The default `GasCostsValues` - is updated according to the benches with `fuel-core 0.19`. - It may break some unit tests that compare actual gas usage with expected. + is updated according to the benches with `fuel-core 0.19`. + It may break some unit tests that compare actual gas usage with expected. ## [Version 0.34.0] -This release contains fixes for critical issues that we found before the audit. -Mainly, these changes pertain to the Sparse Merkle Tree (SMT) and related -code. The SMT API was extended to provide more flexibility and to allow users -to select the most appropriate method for their performance needs. Where +This release contains fixes for critical issues that we found before the audit. +Mainly, these changes pertain to the Sparse Merkle Tree (SMT) and related +code. The SMT API was extended to provide more flexibility and to allow users +to select the most appropriate method for their performance needs. Where possible, sequential SMT updates were replaced with constructors that take in a complete data set. ### Added -- [#476](https://github.com/FuelLabs/fuel-vm/pull/476): The `fuel_vm::Call` supports `From<[u8; Self::LEN]>` - and `Into<[u8; Self::LEN]>`. +- [#476](https://github.com/FuelLabs/fuel-vm/pull/476): The `fuel_vm::Call` supports `From<[u8; Self::LEN]>` and `Into<[u8; Self::LEN]>`. - [#484](https://github.com/FuelLabs/fuel-vm/pull/484): The `sparse::in_memory::MerkleTree` - got new methods `from_set`, `root_from_set`, and `nodes_from_set` methods. These methods allow - a more optimal way to build and calculate the SMT when you know all leaves. - The `Contract::initial_state_root` is much faster now (by ~15 times). + got new methods `from_set`, `root_from_set`, and `nodes_from_set` methods. These methods allow + a more optimal way to build and calculate the SMT when you know all leaves. + The `Contract::initial_state_root` is much faster now (by ~15 times). ### Removed @@ -632,35 +359,37 @@ complete data set. ### Changed - [#477](https://github.com/FuelLabs/fuel-vm/pull/477): The `PanicReason::UnknownPanicReason` is `0x00`. - The `PanicReason` now implements `From` instead of `TryFrom` and can't return an error anymore. + The `PanicReason` now implements `From` instead of `TryFrom` and can't return an error anymore. - [#478](https://github.com/FuelLabs/fuel-vm/pull/478): The `memcopy` method is updated - and returns `MemoryWriteOverlap` instead of `MemoryOverflow`. + and returns `MemoryWriteOverlap` instead of `MemoryOverflow`. ### Fixed -- [#482](https://github.com/FuelLabs/fuel-vm/pull/482): This PR address a security - issue where updates to a Sparse Merkle Tree could deliberately overwrite existing - leaves by setting the leaf key to the hash of an existing leaf or node. This is - done by removing the insertion of the leaf using the leaf key. +- [#482](https://github.com/FuelLabs/fuel-vm/pull/482): This PR address a security + issue where updates to a Sparse Merkle Tree could deliberately overwrite existing + leaves by setting the leaf key to the hash of an existing leaf or node. This is + done by removing the insertion of the leaf using the leaf key. - [#484](https://github.com/FuelLabs/fuel-vm/pull/484): Fixed bug with not-working `CreateMetadata`. + #### Breaking - [#473](https://github.com/FuelLabs/fuel-vm/pull/473): CFS and CFSI were not validating - that the new `$sp` value isn't below `$ssp`, allowing write access to non-owned - memory. This is now fixed, and attempting to set an incorrect `$sp` value panics. + that the new `$sp` value isn't below `$ssp`, allowing write access to non-owned + memory. This is now fixed, and attempting to set an incorrect `$sp` value panics. - [#485](https://github.com/FuelLabs/fuel-vm/pull/485): This PR addresses a security - issue where the user may manipulate the structure of the Sparse Merkle Tree. - SMT expects hashed storage key wrapped into a `MerkleTreeKey` structure. - The change is breaking because it changes the `state_root` generated by the SMT - and may change the `ContractId` if the `Create` transaction has non-empty `StoargeSlot`s. + issue where the user may manipulate the structure of the Sparse Merkle Tree. + SMT expects hashed storage key wrapped into a `MerkleTreeKey` structure. + The change is breaking because it changes the `state_root` generated by the SMT + and may change the `ContractId` if the `Create` transaction has non-empty `StoargeSlot`s. + ## [Version 0.33.0] -The release contains a lot of breaking changes. +The release contains a lot of breaking changes. Most of them are audit blockers and affect the protocol itself. Starting this release we plan to maintain the changelog file and describe all minor and major changes that make sense. @@ -668,19 +397,19 @@ Starting this release we plan to maintain the changelog file and describe all mi #### Breaking -- [#386](https://github.com/FuelLabs/fuel-vm/pull/386): The coin and message inputs - got a new field - `predicate_gas_used`. So it breaks the constructor API - of these inputs. +- [#386](https://github.com/FuelLabs/fuel-vm/pull/386): The coin and message inputs + got a new field - `predicate_gas_used`. So it breaks the constructor API + of these inputs. - The value of this field is zero for non-predicate inputs, but for the - predicates, it indicates the exact amount of gas used by the predicate - to execute. If after the execution of the predicate remaining gas is not - zero, then the predicate execution failed. - - This field is malleable but will be used by the VM, and each predicate - should be estimated before performing the verification logic. - The `Transaction`, `Create`, and `Script` types implement the - `EstimatePredicates` for these purposes. + The value of this field is zero for non-predicate inputs, but for the + predicates, it indicates the exact amount of gas used by the predicate + to execute. If after the execution of the predicate remaining gas is not + zero, then the predicate execution failed. + + This field is malleable but will be used by the VM, and each predicate + should be estimated before performing the verification logic. + The `Transaction`, `Create`, and `Script` types implement the + `EstimatePredicates` for these purposes. ```rust /// Provides predicate estimation functionality for the transaction. @@ -690,27 +419,23 @@ Starting this release we plan to maintain the changelog file and describe all mi } ``` - During the creation of the `Input`, the best strategy is to use a default - value like `0` and call the `estimate_predicates` method to actualize - the `predicate_gas_used` after. + During the creation of the `Input`, the best strategy is to use a default + value like `0` and call the `estimate_predicates` method to actualize + the `predicate_gas_used` after. -- [#454](https://github.com/FuelLabs/fuel-vm/pull/454): VM native array-backed types - `Address`, `AssetId`, `ContractId`, `Bytes4`, `Bytes8`, `Bytes20`, `Bytes32`, - `Nonce`, `MessageId`, `Salt` now use more compact representation instead of - hex-encoded string when serialized using serde format that sets - `is_human_readable` to false. +- [#454](https://github.com/FuelLabs/fuel-vm/pull/454): VM native array-backed types +`Address`, `AssetId`, `ContractId`, `Bytes4`, `Bytes8`, `Bytes20`, `Bytes32`, +`Nonce`, `MessageId`, `Salt` now use more compact representation instead of +hex-encoded string when serialized using serde format that sets +`is_human_readable` to false. -- [#456](https://github.com/FuelLabs/fuel-vm/pull/456): Added a new type - `ChainId` to represent the identifier of the - chain. - It is a wrapper around the `u64`, so any `u64` can be converted into this type via `.into()` or `ChainId::new(...)`. +- [#456](https://github.com/FuelLabs/fuel-vm/pull/456): Added a new type - `ChainId` to represent the identifier of the chain. +It is a wrapper around the `u64`, so any `u64` can be converted into this type via `.into()` or `ChainId::new(...)`. -- [#459](https://github.com/FuelLabs/fuel-vm/pull/459) Require witness index to be specified when adding an unsigned - coin to a transaction. - This allows for better reuse of witness data when using the transaction builder and helper methods to make - transactions compact. +- [#459](https://github.com/FuelLabs/fuel-vm/pull/459) Require witness index to be specified when adding an unsigned coin to a transaction. +This allows for better reuse of witness data when using the transaction builder and helper methods to make transactions compact. -- [#462](https://github.com/FuelLabs/fuel-vm/pull/462): Adds a `cache` parameter to `Input::check` - and `Input::check_signature`. +- [#462](https://github.com/FuelLabs/fuel-vm/pull/462): Adds a `cache` parameter to `Input::check` and `Input::check_signature`. This is used to avoid redundant signature recovery when multiple inputs share the same witness index. ### Changed @@ -719,35 +444,29 @@ Starting this release we plan to maintain the changelog file and describe all mi #### Breaking -- [#386](https://github.com/FuelLabs/fuel-vm/pull/386): Several methods of the `TransactionFee` are - renamed `total` -> `max_fee` +- [#386](https://github.com/FuelLabs/fuel-vm/pull/386): Several methods of the `TransactionFee` are renamed `total` -> `max_fee` and `bytes` -> `min_fee`. The `TransactionFee::min_fee` take into account the gas used by predicates. -- [#450](https://github.com/FuelLabs/fuel-vm/pull/450): The Merkle root of a contract's code is now calculated by - partitioning the code into chunks of 16 KiB, instead of 8 bytes. If the last leaf is does not a full 16 KiB, it is - padded with `0` up to the nearest multiple of 8 bytes. This affects the `ContractId` and `PredicateId` calculations, - breaking all code that used hardcoded values. +- [#450](https://github.com/FuelLabs/fuel-vm/pull/450): The Merkle root of a contract's code is now calculated by partitioning the code into chunks of 16 KiB, instead of 8 bytes. If the last leaf is does not a full 16 KiB, it is padded with `0` up to the nearest multiple of 8 bytes. This affects the `ContractId` and `PredicateId` calculations, breaking all code that used hardcoded values. -- [#456](https://github.com/FuelLabs/fuel-vm/pull/456): The basic - methods `UniqueIdentifier::id`, `Signable::sign_inputs`, - and `Input::predicate_owner` use `ChainId` instead of the `ConsensusParameters`. - It is a less strict requirement than before because you can get `ChainId` - from `ConsensusParameters.chain_id`, and it makes the API cleaner. - It affects all downstream functions that use listed methods. +- [#456](https://github.com/FuelLabs/fuel-vm/pull/456): The basic methods `UniqueIdentifier::id`, `Signable::sign_inputs`, +and `Input::predicate_owner` use `ChainId` instead of the `ConsensusParameters`. +It is a less strict requirement than before because you can get `ChainId` +from `ConsensusParameters.chain_id`, and it makes the API cleaner. +It affects all downstream functions that use listed methods. -- [#463](https://github.com/FuelLabs/fuel-vm/pull/463): Moves verification that the `Output::ContractCreated` - output contains valid `contract_id` and `state_root`(the values from the `Output` match with calculated - values from the bytecode, storage slots, and salt) from `fuel-vm` to `fuel-tx`. - It means the end-user will receive this error earlier on the SDK side before `dry_run` instead of after. +- [#463](https://github.com/FuelLabs/fuel-vm/pull/463): Moves verification that the `Output::ContractCreated` +output contains valid `contract_id` and `state_root`(the values from the `Output` match with calculated +values from the bytecode, storage slots, and salt) from `fuel-vm` to `fuel-tx`. +It means the end-user will receive this error earlier on the SDK side before `dry_run` instead of after. ### Fixed #### Breaking -- [#457](https://github.com/FuelLabs/fuel-vm/pull/457): Transactions got one more validity rule: - Each `Script` or `Create` transaction requires at least one input coin or message to be spendable. - It may break code/tests that previously didn't set any spendable inputs. - Note: `Message` with non-empty `data` field is not spendable. +- [#457](https://github.com/FuelLabs/fuel-vm/pull/457): Transactions got one more validity rule: +Each `Script` or `Create` transaction requires at least one input coin or message to be spendable. +It may break code/tests that previously didn't set any spendable inputs. +Note: `Message` with non-empty `data` field is not spendable. -- [#458](https://github.com/FuelLabs/fuel-vm/pull/458): The storage slots with the same key inside the `Create` - transaction are forbidden. +- [#458](https://github.com/FuelLabs/fuel-vm/pull/458): The storage slots with the same key inside the `Create` transaction are forbidden. diff --git a/Cargo.toml b/Cargo.toml index f681e66e00..a418a156fd 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -5,6 +5,7 @@ members = [ "fuel-merkle", "fuel-storage", "fuel-tx", + "fuel-tx/test-helpers", "fuel-types", "fuel-vm", ] @@ -17,17 +18,17 @@ edition = "2021" homepage = "https://fuel.network/" license = "BUSL-1.1" repository = "https://github.com/FuelLabs/fuel-vm" -version = "0.50.0" +version = "0.43.2" [workspace.dependencies] -fuel-asm = { version = "0.50.0", path = "fuel-asm", default-features = false } -fuel-crypto = { version = "0.50.0", path = "fuel-crypto", default-features = false } -fuel-derive = { version = "0.50.0", path = "fuel-derive", default-features = false } -fuel-merkle = { version = "0.50.0", path = "fuel-merkle", default-features = false } -fuel-storage = { version = "0.50.0", path = "fuel-storage", default-features = false } -fuel-tx = { version = "0.50.0", path = "fuel-tx", default-features = false } -fuel-types = { version = "0.50.0", path = "fuel-types", default-features = false } -fuel-vm = { version = "0.50.0", path = "fuel-vm", default-features = false } +fuel-asm = { version = "0.43.2", path = "fuel-asm", default-features = false } +fuel-crypto = { version = "0.43.2", path = "fuel-crypto", default-features = false } +fuel-derive = { version = "0.43.2", path = "fuel-derive", default-features = false } +fuel-merkle = { version = "0.43.2", path = "fuel-merkle", default-features = false } +fuel-storage = { version = "0.43.2", path = "fuel-storage", default-features = false } +fuel-tx = { version = "0.43.2", path = "fuel-tx", default-features = false } +fuel-types = { version = "0.43.2", path = "fuel-types", default-features = false } +fuel-vm = { version = "0.43.2", path = "fuel-vm", default-features = false } bitflags = "2" bincode = { version = "1.3", default-features = false } criterion = "0.5.0" diff --git a/fuel-asm/src/args.rs b/fuel-asm/src/args.rs index eea9525393..035f6b0a3e 100644 --- a/fuel-asm/src/args.rs +++ b/fuel-asm/src/args.rs @@ -27,12 +27,6 @@ crate::enum_try_from! { /// Get the Chain ID this VM is operating within GetChainId = 0x04, - - /// Get memory address where the transaction is located - TxStart = 0x05, - - /// Get memory address of base asset ID - BaseAssetId = 0x06, }, Immediate18 } @@ -92,8 +86,8 @@ crate::enum_try_from! { /// Set `$rA` to `Memory address of tx.witnesses[$rB]` ScriptWitnessAtIndex = 0x00D, - /// Set `$rA` to size of the transaction in memory, in bytes - TxLength = 0x00E, + /// Set `$rA` to `tx.bytecodeLength` + CreateBytecodeLength = 0x100, /// Set `$rA` to `tx.bytecodeWitnessIndex` CreateBytecodeWitnessIndex = 0x101, @@ -149,6 +143,9 @@ crate::enum_try_from! { /// Set `$rA` to `tx.inputs[$rB].witnessIndex` InputCoinWitnessIndex = 0x207, + /// Set `$rA` to `tx.inputs[$rB].maturity` + InputCoinMaturity = 0x208, + /// Set `$rA` to `tx.inputs[$rB].predicateLength` InputCoinPredicateLength = 0x209, @@ -170,6 +167,15 @@ crate::enum_try_from! { /// Set `$rA` to `tx.inputs[$rB].outputIndex` InputContractOutputIndex = 0x221, + /// Set `$rA` to `Memory address of tx.inputs[$rB].balanceRoot` + InputContractBalanceRoot = 0x222, + + /// Set `$rA` to `Memory address of tx.inputs[$rB].stateRoot` + InputContractStateRoot = 0x223, + + /// Set `$rA` to `Memory address of tx.inputs[$rB].txPointer` + InputContractTxPointer = 0x224, + /// Set `$rA` to `Memory address of tx.inputs[$rB].contractID` InputContractId = 0x225, @@ -224,6 +230,12 @@ crate::enum_try_from! { /// Set `$rA` to `tx.outputs[$rB].inputIndex` OutputContractInputIndex = 0x304, + /// Set `$rA` to `Memory address of tx.outputs[$rB].balanceRoot` + OutputContractBalanceRoot = 0x305, + + /// Set `$rA` to `Memory address of tx.outputs[$rB].stateRoot` + OutputContractStateRoot = 0x306, + /// Set `$rA` to `Memory address of tx.outputs[$rB].contractID` OutputContractCreatedContractId = 0x307, @@ -240,7 +252,7 @@ crate::enum_try_from! { PolicyTypes = 0x500, /// Set `$rA` to `tx.policies[0x00].gasPrice` - PolicyTip = 0x501, + PolicyGasPrice = 0x501, /// Set `$rA` to `tx.policies[count_ones(0b11 & tx.policyTypes) - 1].witnessLimit` PolicyWitnessLimit = 0x502, @@ -268,8 +280,6 @@ fn encode_gm_args() { GMArgs::GetCaller, GMArgs::GetVerifyingPredicate, GMArgs::GetChainId, - GMArgs::TxStart, - GMArgs::BaseAssetId, ]; args.into_iter().for_each(|a| { @@ -297,6 +307,7 @@ fn encode_gtf_args() { GTFArgs::ScriptInputAtIndex, GTFArgs::ScriptOutputAtIndex, GTFArgs::ScriptWitnessAtIndex, + GTFArgs::CreateBytecodeLength, GTFArgs::CreateBytecodeWitnessIndex, GTFArgs::CreateStorageSlotsCount, GTFArgs::CreateInputsCount, @@ -315,11 +326,17 @@ fn encode_gtf_args() { GTFArgs::InputCoinAssetId, GTFArgs::InputCoinTxPointer, GTFArgs::InputCoinWitnessIndex, + GTFArgs::InputCoinMaturity, GTFArgs::InputCoinPredicateLength, GTFArgs::InputCoinPredicateDataLength, GTFArgs::InputCoinPredicate, GTFArgs::InputCoinPredicateData, GTFArgs::InputCoinPredicateGasUsed, + GTFArgs::InputContractTxId, + GTFArgs::InputContractOutputIndex, + GTFArgs::InputContractBalanceRoot, + GTFArgs::InputContractStateRoot, + GTFArgs::InputContractTxPointer, GTFArgs::InputContractId, GTFArgs::InputMessageSender, GTFArgs::InputMessageRecipient, @@ -338,12 +355,14 @@ fn encode_gtf_args() { GTFArgs::OutputCoinAmount, GTFArgs::OutputCoinAssetId, GTFArgs::OutputContractInputIndex, + GTFArgs::OutputContractBalanceRoot, + GTFArgs::OutputContractStateRoot, GTFArgs::OutputContractCreatedContractId, GTFArgs::OutputContractCreatedStateRoot, GTFArgs::WitnessDataLength, GTFArgs::WitnessData, GTFArgs::PolicyTypes, - GTFArgs::PolicyTip, + GTFArgs::PolicyGasPrice, GTFArgs::PolicyWitnessLimit, GTFArgs::PolicyMaturity, GTFArgs::PolicyMaxFee, diff --git a/fuel-asm/src/args/wideint.rs b/fuel-asm/src/args/wideint.rs index 747a82f318..66db2db91b 100644 --- a/fuel-asm/src/args/wideint.rs +++ b/fuel-asm/src/args/wideint.rs @@ -184,7 +184,7 @@ mod tests { #[test] fn decode_encode_compare() { for imm in 0..Imm06::MAX.0 { - let bits = Imm06::from(imm); + let bits = Imm06::try_from(imm).unwrap(); if let Some(decoded) = CompareArgs::from_imm(bits) { assert_eq!(decoded.to_imm().0, imm); } @@ -204,7 +204,7 @@ mod tests { #[test] fn decode_encode_mathop() { for imm in 0..Imm06::MAX.0 { - let bits = Imm06::from(imm); + let bits = Imm06::try_from(imm).unwrap(); if let Some(decoded) = MathArgs::from_imm(bits) { assert_eq!(decoded.to_imm().0, imm); } @@ -227,7 +227,7 @@ mod tests { #[test] fn decode_encode_mul() { for imm in 0..Imm06::MAX.0 { - let bits = Imm06::from(imm); + let bits = Imm06::try_from(imm).unwrap(); if let Some(decoded) = MulArgs::from_imm(bits) { assert_eq!(decoded.to_imm().0, imm); } @@ -244,7 +244,7 @@ mod tests { #[test] fn decode_encode_div() { for imm in 0..Imm06::MAX.0 { - let bits = Imm06::from(imm); + let bits = Imm06::try_from(imm).unwrap(); if let Some(decoded) = DivArgs::from_imm(bits) { assert_eq!(decoded.to_imm().0, imm); } diff --git a/fuel-asm/src/encoding_tests.rs b/fuel-asm/src/encoding_tests.rs index e793095c18..ae0b3bc9bf 100644 --- a/fuel-asm/src/encoding_tests.rs +++ b/fuel-asm/src/encoding_tests.rs @@ -60,16 +60,17 @@ fn panic_reason_description() { for r in PanicReason::iter() { let b = r as u8; - let r_p = PanicReason::from(b); + let r_p = PanicReason::try_from(b).expect("Should get panic reason"); let w = Word::from(r as u8); - let r_q = PanicReason::from(u8::try_from(w).unwrap()); + let r_q = PanicReason::try_from(u8::try_from(w).unwrap()) + .expect("Should get panic reason"); assert_eq!(r, r_p); assert_eq!(r, r_q); let op = op::ji(imm24); let pd = PanicInstruction::error(r, op.into()); let w = Word::from(pd); - let pd_p = PanicInstruction::from(w); + let pd_p = PanicInstruction::try_from(w).expect("Should get panic reason"); assert_eq!(pd, pd_p); #[cfg(feature = "serde")] diff --git a/fuel-asm/src/lib.rs b/fuel-asm/src/lib.rs index 1c26587960..7f1b075b1d 100644 --- a/fuel-asm/src/lib.rs +++ b/fuel-asm/src/lib.rs @@ -3,13 +3,8 @@ #![cfg_attr(docsrs, feature(doc_auto_cfg))] #![cfg_attr(not(feature = "std"), no_std)] #![cfg_attr(feature = "std", doc = include_str!("../README.md"))] -#![deny( - clippy::arithmetic_side_effects, - clippy::cast_sign_loss, - clippy::cast_possible_truncation, - clippy::cast_possible_wrap, - clippy::string_slice -)] +#![deny(clippy::cast_possible_truncation)] +#![deny(clippy::string_slice)] #![deny(missing_docs)] #![deny(unsafe_code)] #![deny(unused_crate_dependencies)] diff --git a/fuel-asm/src/panic_reason.rs b/fuel-asm/src/panic_reason.rs index e2640d2e40..57a9b31251 100644 --- a/fuel-asm/src/panic_reason.rs +++ b/fuel-asm/src/panic_reason.rs @@ -129,24 +129,6 @@ enum_from! { /// Attempt to use sequential memory instructions with too large slot count, /// typically because it cannot fit into usize TooManySlots = 0x2d, - /// Caller of this internal context is also expected to be internal, - /// i.e. $fp->$fp must be non-zero. - ExpectedNestedCaller = 0x2e, - /// During memory growth, the stack overlapped with the heap - MemoryGrowthOverlap = 0x2f, - /// Attempting to read or write uninitialized memory. - /// Also occurs when boundary crosses from stack to heap. - UninitalizedMemoryAccess = 0x30, - /// Overriding consensus parameters is not allowed. - OverridingConsensusParameters = 0x31, - /// The storage doesn't know about the hash of the state transition bytecode. - UnknownStateTransactionBytecodeRoot = 0x32, - /// Overriding the state transition bytecode is not allowed. - OverridingStateTransactionBytecode = 0x33, - /// The bytecode is already uploaded and cannot be uploaded again. - BytecodeAlreadyUploaded = 0x34, - /// The part of the bytecode is not sequentially connected to the previous parts. - ThePartIsNotSequentiallyConnected = 0x35, } } @@ -174,7 +156,7 @@ mod tests { assert_eq!(reason, PanicReason::UnknownPanicReason); for i in 1..last_known_panic_reason { - let reason = PanicReason::from(i); + let reason = PanicReason::try_from(i).unwrap(); let i2 = reason as u8; assert_eq!(i, i2); } diff --git a/fuel-crypto/src/lib.rs b/fuel-crypto/src/lib.rs index 8cb33e497b..b8d9c69f35 100644 --- a/fuel-crypto/src/lib.rs +++ b/fuel-crypto/src/lib.rs @@ -4,16 +4,11 @@ #![cfg_attr(not(feature = "std"), no_std)] // Wrong clippy convention; check // https://rust-lang.github.io/api-guidelines/naming.html +#![deny(clippy::string_slice)] #![warn(missing_docs)] #![deny(unsafe_code)] #![deny(unused_crate_dependencies)] -#![deny( - clippy::arithmetic_side_effects, - clippy::cast_sign_loss, - clippy::cast_possible_truncation, - clippy::cast_possible_wrap, - clippy::string_slice -)] +#![deny(clippy::cast_possible_truncation)] // Satisfy unused_crate_dependencies lint for self-dependency enabling test features #[cfg(test)] diff --git a/fuel-crypto/src/secp256/backend.rs b/fuel-crypto/src/secp256/backend.rs index c84ebb5f56..ee9dcc4d2e 100644 --- a/fuel-crypto/src/secp256/backend.rs +++ b/fuel-crypto/src/secp256/backend.rs @@ -46,7 +46,7 @@ mod tests { for case in 0..100 { let secret = SecretKey::random(rng); - let message = Message::new(vec![rng.gen(); case]); + let message = Message::new(&vec![rng.gen(); case]); let public_k = k256::public_key(&secret); let public_s = secp256k1::public_key(&secret); diff --git a/fuel-derive/src/deserialize.rs b/fuel-derive/src/deserialize.rs index 21ad09ba00..0edbfb17e5 100644 --- a/fuel-derive/src/deserialize.rs +++ b/fuel-derive/src/deserialize.rs @@ -50,11 +50,6 @@ fn deserialize_struct(s: &mut synstructure::Structure) -> TokenStream2 { quote! {} }; - let mut s = s.clone(); - - let variant: &mut synstructure::VariantInfo = &mut s.variants_mut()[0]; - variant.filter(|binding| !should_skip_field_binding(binding)); - s.gen_impl(quote! { gen impl ::fuel_types::canonical::Deserialize for @Self { fn decode_static(buffer: &mut I) -> ::core::result::Result { @@ -143,11 +138,6 @@ fn deserialize_enum(s: &synstructure::Structure) -> TokenStream2 { } }; - let mut s = s.clone(); - s.variants_mut().iter_mut().for_each(|v| { - v.filter(|binding| !should_skip_field_binding(binding)); - }); - s.gen_impl(quote! { gen impl ::fuel_types::canonical::Deserialize for @Self { fn decode_static(buffer: &mut I) -> ::core::result::Result { diff --git a/fuel-derive/src/lib.rs b/fuel-derive/src/lib.rs index 758a7d4a07..d130ba6b46 100644 --- a/fuel-derive/src/lib.rs +++ b/fuel-derive/src/lib.rs @@ -1,13 +1,6 @@ //! Derive macros for canonical type serialization and deserialization. #![deny(unused_must_use, missing_docs)] -#![deny( - clippy::arithmetic_side_effects, - clippy::cast_sign_loss, - clippy::cast_possible_truncation, - clippy::cast_possible_wrap, - clippy::string_slice -)] extern crate proc_macro; mod attribute; diff --git a/fuel-derive/src/serialize.rs b/fuel-derive/src/serialize.rs index 42b2eefdc1..5a17dec106 100644 --- a/fuel-derive/src/serialize.rs +++ b/fuel-derive/src/serialize.rs @@ -8,45 +8,58 @@ use crate::attribute::{ fn serialize_struct(s: &synstructure::Structure) -> TokenStream2 { let attrs = StructAttrs::parse(s); - let mut s = s.clone(); assert_eq!(s.variants().len(), 1, "structs must have one variant"); - let variant: &mut synstructure::VariantInfo = &mut s.variants_mut()[0]; - variant.filter(|binding| !should_skip_field_binding(binding)); - + let variant: &synstructure::VariantInfo = &s.variants()[0]; let encode_static = variant.each(|binding| { - quote! { - ::fuel_types::canonical::Serialize::encode_static(#binding, buffer)?; + if should_skip_field_binding(binding) { + quote! {} + } else { + quote! { + ::fuel_types::canonical::Serialize::encode_static(#binding, buffer)?; + } } }); let encode_dynamic = variant.each(|binding| { - quote! { - ::fuel_types::canonical::Serialize::encode_dynamic(#binding, buffer)?; + if should_skip_field_binding(binding) { + quote! {} + } else { + quote! { + ::fuel_types::canonical::Serialize::encode_dynamic(#binding, buffer)?; + } } }); let size_static_code = variant.each(|binding| { - quote! { - size = size.saturating_add(#binding.size_static()); + if should_skip_field_binding(binding) { + quote! {} + } else { + quote! { + size = ::fuel_types::canonical::add_sizes(size, #binding.size_static()); + } } }); let initial_size = if attrs.prefix.is_some() { - quote! { let mut size = 8usize; } + quote! { let mut size = 8; } } else { - quote! { let mut size = 0usize; } + quote! { let mut size = 0; } }; let size_static_code = quote! { #initial_size match self { #size_static_code}; size }; let size_dynamic_code = variant.each(|binding| { - quote! { - size = size.saturating_add(#binding.size_dynamic()); + if should_skip_field_binding(binding) { + quote! {} + } else { + quote! { + size = ::fuel_types::canonical::add_sizes(size, #binding.size_dynamic()); + } } }); let size_dynamic_code = - quote! { let mut size = 0usize; match self { #size_dynamic_code}; size }; + quote! { let mut size = 0; match self { #size_dynamic_code}; size }; let prefix = if let Some(prefix_type) = attrs.prefix.as_ref() { quote! { @@ -91,19 +104,16 @@ fn serialize_struct(s: &synstructure::Structure) -> TokenStream2 { fn serialize_enum(s: &synstructure::Structure) -> TokenStream2 { assert!(!s.variants().is_empty(), "got invalid empty enum"); - let mut s = s.clone(); let mut next_discriminant = quote! { { 0u64 } }; - - s.variants_mut().iter_mut().for_each(|v| { - v.filter(|binding| !should_skip_field_binding(binding)); - }); - let encode_static = s.variants().iter().map(|v| { let pat = v.pat(); - let encode_static_iter = v.bindings().iter().map(|binding| { - quote! { - ::fuel_types::canonical::Serialize::encode_static(#binding, buffer)?; + if should_skip_field_binding(binding) { + quote! {} + } else { + quote! { + ::fuel_types::canonical::Serialize::encode_static(#binding, buffer)?; + } } }); @@ -128,8 +138,12 @@ fn serialize_enum(s: &synstructure::Structure) -> TokenStream2 { }); let encode_dynamic = s.variants().iter().map(|v| { let encode_dynamic_iter = v.each(|binding| { - quote! { - ::fuel_types::canonical::Serialize::encode_dynamic(#binding, buffer)?; + if should_skip_field_binding(binding) { + quote! {} + } else { + quote! { + ::fuel_types::canonical::Serialize::encode_dynamic(#binding, buffer)?; + } } }); quote! { @@ -142,15 +156,19 @@ fn serialize_enum(s: &synstructure::Structure) -> TokenStream2 { .iter() .map(|variant| { variant.each(|binding| { - quote! { - size = size.saturating_add(#binding.size_static()); + if should_skip_field_binding(binding) { + quote! {} + } else { + quote! { + size = ::fuel_types::canonical::add_sizes(size, #binding.size_static()); + } } }) }) .collect(); let match_size_static = quote! {{ // `repr(128)` is unstable, so because of that we can use 8 bytes. - let mut size = 8usize; + let mut size = 8; match self { #match_size_static } size } }; @@ -159,14 +177,18 @@ fn serialize_enum(s: &synstructure::Structure) -> TokenStream2 { .iter() .map(|variant| { variant.each(|binding| { - quote! { - size = size.saturating_add(#binding.size_dynamic()); + if should_skip_field_binding(binding) { + quote! {} + } else { + quote! { + size = ::fuel_types::canonical::add_sizes(size, #binding.size_dynamic()); + } } }) }) .collect(); let match_size_dynamic = - quote! {{ let mut size = 0usize; match self { #match_size_dynamic } size }}; + quote! {{ let mut size = 0; match self { #match_size_dynamic } size }}; let impl_code = s.gen_impl(quote! { gen impl ::fuel_types::canonical::Serialize for @Self { diff --git a/fuel-merkle/Cargo.toml b/fuel-merkle/Cargo.toml index 5bc7890d6c..8d10b1cedb 100644 --- a/fuel-merkle/Cargo.toml +++ b/fuel-merkle/Cargo.toml @@ -25,8 +25,6 @@ criterion = { workspace = true } datatest-stable = "0.1" fuel-merkle-test-helpers = { path = "test-helpers" } hex = "0.4" -proptest = "1.4" -proptest-derive = "0.4" rand = "0.8" serde_json = "1.0" serde_yaml = "0.9" diff --git a/fuel-merkle/src/binary/merkle_tree.rs b/fuel-merkle/src/binary/merkle_tree.rs index 276a36367d..34799bc47d 100644 --- a/fuel-merkle/src/binary/merkle_tree.rs +++ b/fuel-merkle/src/binary/merkle_tree.rs @@ -10,6 +10,7 @@ use crate::{ Position, ProofSet, StorageMap, + Subtree, }, storage::{ Mappable, @@ -21,17 +22,9 @@ use crate::{ }; use alloc::vec::Vec; -use core::{ - convert::Infallible, - marker::PhantomData, -}; - -use super::root_calculator::{ - MerkleRootCalculator, - NodeStackPushError, -}; +use core::marker::PhantomData; -#[derive(Debug, Clone, derive_more::Display, PartialEq, Eq)] +#[derive(Debug, Clone, derive_more::Display)] pub enum MerkleTreeError { #[display(fmt = "proof index {_0} is not valid")] InvalidProofIndex(u64), @@ -41,9 +34,6 @@ pub enum MerkleTreeError { #[display(fmt = "{}", _0)] StorageError(StorageError), - - #[display(fmt = "the tree is too large")] - TooLarge, } impl From for MerkleTreeError { @@ -55,7 +45,7 @@ impl From for MerkleTreeError { #[derive(Debug, Clone)] pub struct MerkleTree { storage: StorageType, - nodes: MerkleRootCalculator, + head: Option>, leaves_count: u64, phantom_table: PhantomData, } @@ -67,19 +57,24 @@ impl MerkleTree { pub fn root(&self) -> Bytes32 { let mut scratch_storage = StorageMap::::new(); - let root_node = self - .root_node::(&mut scratch_storage) - .expect("The type doesn't allow constructing invalid trees."); + let root_node = self.root_node(&mut scratch_storage); match root_node { None => *Self::empty_root(), Some(ref node) => *node.hash(), } } + fn head(&self) -> Option<&Subtree> { + self.head.as_ref() + } + pub fn leaves_count(&self) -> u64 { self.leaves_count } + // PRIVATE + // + /// The root node is generated by joining all MMR peaks, where a peak is /// defined as the head of a balanced subtree. A tree can be composed of a /// single balanced subtree, in which case the tree is itself balanced, or @@ -96,31 +91,38 @@ impl MerkleTree { /// call, this temporary storage space will contain all intermediate nodes /// not held in persistent storage, and these nodes will be available to the /// callee. - /// - /// Returns `None` if the tree is empty, and the root node otherwise. - fn root_node( - &self, - scratch_storage: &mut StorageMap, - ) -> Result, MerkleTreeError> { - let mut nodes = self.nodes.stack().iter().rev(); - let Some(mut head) = nodes.next().cloned() else { - return Ok(None); // Empty tree - }; + fn root_node(&self, scratch_storage: &mut StorageMap) -> Option { + self.head() + .map(|head| build_root_node(head, scratch_storage)) + } - for node in nodes { - let parent = node - .position() - .parent() - .map_err(|_| MerkleTreeError::TooLarge)?; - head = Node::create_node(parent, node, &head); - StorageMutateInfallible::insert( - scratch_storage, - &head.key(), - &(&head).into(), - ); - } + fn peak_positions(&self) -> Vec { + // Define a new tree with a leaf count 1 greater than the current leaf + // count. + let leaves_count = self.leaves_count + 1; + + // The rightmost leaf position of a tree will always have a leaf index + // N - 1, where N is the number of leaves. + let leaf_position = Position::from_leaf_index(leaves_count - 1); + let root_position = self.root_position(); + let mut peaks_itr = root_position.path(&leaf_position, leaves_count).iter(); + peaks_itr.next(); // Omit the root + + let (_, peaks): (Vec<_>, Vec<_>) = peaks_itr.unzip(); - Ok(Some(head)) + peaks + } + + fn root_position(&self) -> Position { + // Define a new tree with a leaf count 1 greater than the current leaf + // count. + let leaves_count = self.leaves_count + 1; + + // The root position of a tree will always have an in-order index equal + // to N' - 1, where N is the leaves count and N' is N rounded (or equal) + // to the next power of 2. + let root_index = leaves_count.next_power_of_two() - 1; + Position::from_in_order_index(root_index) } } @@ -132,12 +134,80 @@ where pub fn new(storage: StorageType) -> Self { Self { storage, - nodes: MerkleRootCalculator::new(), + head: None, leaves_count: 0, phantom_table: Default::default(), } } + pub fn load( + storage: StorageType, + leaves_count: u64, + ) -> Result> { + let mut tree = Self { + storage, + head: None, + leaves_count, + phantom_table: Default::default(), + }; + + tree.build()?; + + Ok(tree) + } + + pub fn prove( + &self, + proof_index: u64, + ) -> Result<(Bytes32, ProofSet), MerkleTreeError> { + if proof_index + 1 > self.leaves_count { + return Err(MerkleTreeError::InvalidProofIndex(proof_index)) + } + + let mut proof_set = ProofSet::new(); + + let root_position = self.root_position(); + let leaf_position = Position::from_leaf_index(proof_index); + let (_, mut side_positions): (Vec<_>, Vec<_>) = root_position + .path(&leaf_position, self.leaves_count) + .iter() + .unzip(); + side_positions.reverse(); // Reorder side positions from leaf to root. + side_positions.pop(); // The last side position is the root; remove it. + + // Allocate scratch storage to store temporary nodes when building the + // root. + let mut scratch_storage = StorageMap::::new(); + let root_node = self + .root_node(&mut scratch_storage) + .expect("Root node must be present"); + + // Get side nodes. First, we check the scratch storage. If the side node + // is not found in scratch storage, we then check main storage. Finally, + // if the side node is not found in main storage, we exit with a load + // error. + for side_position in side_positions { + let key = side_position.in_order_index(); + let primitive = StorageInspectInfallible::get(&scratch_storage, &key) + .or(StorageInspect::get(&self.storage, &key)?) + .ok_or(MerkleTreeError::LoadError(key))? + .into_owned(); + let node = Node::from(primitive); + proof_set.push(*node.hash()); + } + + let root = *root_node.hash(); + Ok((root, proof_set)) + } + + pub fn reset(&mut self) { + self.leaves_count = 0; + self.head = None; + } + + // PRIVATE + // + /// A binary Merkle tree can be built from a collection of Merkle Mountain /// Range (MMR) peaks. The MMR structure can be accurately defined by the /// number of leaves in the leaf row. @@ -217,77 +287,24 @@ where /// /// By excluding the root position `07`, we have established the set of /// side positions `03`, `09`, and `12`, matching our set of MMR peaks. - pub fn load( - storage: StorageType, - leaves_count: u64, - ) -> Result> { - let mut nodes = Vec::new(); - let peaks = peak_positions(leaves_count).ok_or(MerkleTreeError::TooLarge)?; + fn build(&mut self) -> Result<(), MerkleTreeError> { + let mut current_head = None; + let peaks = &self.peak_positions(); for peak in peaks.iter() { let key = peak.in_order_index(); - let node = storage + let node = self + .storage .get(&key)? .ok_or(MerkleTreeError::LoadError(key))? .into_owned() .into(); - nodes.push(node); + let next = Subtree::new(node, current_head); + current_head = Some(next); } - Ok(Self { - storage, - nodes: MerkleRootCalculator::new_with_stack(nodes), - leaves_count, - phantom_table: Default::default(), - }) - } - - pub fn prove( - &self, - proof_index: u64, - ) -> Result<(Bytes32, ProofSet), MerkleTreeError> { - if proof_index >= self.leaves_count { - return Err(MerkleTreeError::InvalidProofIndex(proof_index)) - } - - let root_position = root_position(self.leaves_count) - .expect("This tree is too large, but push should have prevented this"); - let leaf_position = Position::from_leaf_index(proof_index) - .expect("leaves_count is valid, and this is less than leaves_count"); - let (_, mut side_positions): (Vec<_>, Vec<_>) = root_position - .path(&leaf_position, self.leaves_count) - .iter() - .unzip(); - side_positions.reverse(); // Reorder side positions from leaf to root. - side_positions.pop(); // The last side position is the root; remove it. - - // Allocate scratch storage to store temporary nodes when building the - // root. - let mut scratch_storage = StorageMap::::new(); - let root_node = self - .root_node(&mut scratch_storage)? - .expect("Root node must be present, as leaves_count is nonzero"); + self.head = current_head; - // Get side nodes. First, we check the scratch storage. If the side node - // is not found in scratch storage, we then check main storage. Finally, - // if the side node is not found in main storage, we exit with a load - // error. - let mut proof_set = ProofSet::new(); - for side_position in side_positions { - let key = side_position.in_order_index(); - let primitive = StorageInspectInfallible::get(&scratch_storage, &key) - .or(StorageInspect::get(&self.storage, &key)?) - .ok_or(MerkleTreeError::LoadError(key))? - .into_owned(); - let node = Node::from(primitive); - proof_set.push(*node.hash()); - } - - let root = *root_node.hash(); - Ok((root, proof_set)) - } - - pub fn reset(&mut self) { - self.nodes.clear(); + Ok(()) } } @@ -296,62 +313,67 @@ where TableType: Mappable, StorageType: StorageMutate, { - /// Adds a new leaf node to the tree. - /// # WARNING - /// This code might modify the storage, and then return an error. - /// TODO: fix this issue - pub fn push(&mut self, data: &[u8]) -> Result<(), MerkleTreeError> { - let new_node = Node::create_leaf(self.leaves_count, data) - .ok_or(MerkleTreeError::TooLarge)?; - - // u64 cannot overflow, as memory is finite - #[allow(clippy::arithmetic_side_effects)] - { - self.leaves_count += 1; - } - - self.nodes - .push_with_callback(new_node, |node| { - self.storage - .insert(&node.key(), &node.into()) - .map_err(MerkleTreeError::StorageError) - .map(|_| ()) - }) - .map_err(|err| match err { - NodeStackPushError::Callback(err) => err, - NodeStackPushError::TooLarge => MerkleTreeError::TooLarge, - }) - } -} + pub fn push(&mut self, data: &[u8]) -> Result<(), StorageError> { + let node = Node::create_leaf(self.leaves_count, data); + self.storage.insert(&node.key(), &node.as_ref().into())?; + let next = self.head.take(); + let head = Subtree::new(node, next); + self.head = Some(head); + self.join_all_subtrees()?; -/// Calculcate root position from leaf count. -/// Returns `None` if the tree is too large. -fn root_position(leaves_count: u64) -> Option { - // The root position of a tree will always have an in-order index equal - // to N' - 1, where N is the leaves count and N' is N rounded (or equal) - // to the next power of 2. - #[allow(clippy::arithmetic_side_effects)] // next_power_of_two() > 0 - Some(Position::from_in_order_index( - leaves_count.checked_add(1)?.next_power_of_two() - 1, - )) -} + self.leaves_count += 1; -/// Calculcate peak positons for given leaf count. -/// Returns `None` if the tree is too large. -fn peak_positions(leaves_count: u64) -> Option> { - let leaf_position = Position::from_leaf_index(leaves_count)?; - let root_position = root_position(leaves_count)?; + Ok(()) + } - // Checked by root_position - #[allow(clippy::arithmetic_side_effects)] - let next_leaves_count = leaves_count + 1; + // PRIVATE + // + + fn join_all_subtrees(&mut self) -> Result<(), StorageError> { + while { + // Iterate through all subtrees in the tree to see which subtrees + // can be merged. Two consecutive subtrees will be merged if, and + // only if, their heads are the same height. + if let Some((head, next)) = self + .head() + .and_then(|head| head.next().map(|next| (head, next))) + { + head.node().height() == next.node().height() + } else { + // This head belongs to the last subtree and merging is + // complete. + false + } + } { + // Merge the two front heads of the list into a single head + let mut head = self.head.take().expect("Expected head to be present"); + let mut head_next = head.take_next().expect("Expected next to be present"); + let joined_head = join_subtrees(&mut head_next, &mut head); + self.storage + .insert(&joined_head.node().key(), &joined_head.node().into())?; + self.head = Some(joined_head); + } - let mut peaks_itr = root_position.path(&leaf_position, next_leaves_count).iter(); - peaks_itr.next(); // Omit the root + Ok(()) + } +} - let (_, peaks): (Vec<_>, Vec<_>) = peaks_itr.unzip(); +fn join_subtrees(lhs: &mut Subtree, rhs: &mut Subtree) -> Subtree { + let joined_node = Node::create_node(lhs.node(), rhs.node()); + Subtree::new(joined_node, lhs.take_next()) +} - Some(peaks) +fn build_root_node(subtree: &Subtree, storage: &mut Storage) -> Node +where + Table: Mappable, + Storage: StorageMutateInfallible
, +{ + let mut head = subtree.clone(); + while let Some(mut head_next) = head.take_next() { + head = join_subtrees(&mut head_next, &mut head); + storage.insert(&head.node().key(), &head.node().into()); + } + head.node().clone() } #[cfg(test)] @@ -374,7 +396,6 @@ mod test { use fuel_storage::{ Mappable, StorageInspect, - StorageMutate, }; use alloc::vec::Vec; @@ -870,42 +891,4 @@ mod test { let expected_root = node_3; assert_eq!(root, expected_root); } - - #[test] - fn load_overflows() { - // Given - let storage_map = StorageMap::::new(); - const LEAVES_COUNT: u64 = u64::MAX; - - // When - let result = MerkleTree::load(storage_map, LEAVES_COUNT).map(|_| ()); - - // Then - assert_eq!(result, Err(MerkleTreeError::TooLarge)); - } - - #[test] - fn push_overflows() { - // Given - let mut storage_map = StorageMap::::new(); - const LEAVES_COUNT: u64 = u64::MAX / 2; - loop { - let result = MerkleTree::load(&mut storage_map, LEAVES_COUNT).map(|_| ()); - - if let Err(MerkleTreeError::LoadError(index)) = result { - storage_map.insert(&index, &Primitive::default()).unwrap(); - } else { - break; - } - } - - // When - let mut tree = MerkleTree::load(storage_map, LEAVES_COUNT) - .expect("Expected `load()` to succeed"); - let _ = tree.push(&[]); - let result = tree.push(&[]); - - // Then - assert_eq!(result, Err(MerkleTreeError::TooLarge)); - } } diff --git a/fuel-merkle/src/binary/node.rs b/fuel-merkle/src/binary/node.rs index 653879bd96..54765df8a5 100644 --- a/fuel-merkle/src/binary/node.rs +++ b/fuel-merkle/src/binary/node.rs @@ -22,25 +22,20 @@ impl Node { Self { position, hash } } - /// Returns `None` if the leaf cannot be created due to incorrect position. - pub fn create_leaf(index: u64, data: &[u8]) -> Option { - let position = Position::from_leaf_index(index)?; + pub fn create_leaf(index: u64, data: &[u8]) -> Self { + let position = Position::from_leaf_index(index); let hash = leaf_sum(data); - Some(Self { position, hash }) + Self { position, hash } } - /// Creates a new node with the given children. - pub fn create_node( - position: Position, - left_child: &Self, - right_child: &Self, - ) -> Self { + pub fn create_node(left_child: &Self, right_child: &Self) -> Self { + let position = left_child.position().parent(); let hash = node_sum(left_child.hash(), right_child.hash()); Self { position, hash } } - pub fn position(&self) -> &Position { - &self.position + pub fn position(&self) -> Position { + self.position } pub fn key(&self) -> u64 { diff --git a/fuel-merkle/src/binary/root_calculator.rs b/fuel-merkle/src/binary/root_calculator.rs index e6a0a3b8a6..959bda02f9 100644 --- a/fuel-merkle/src/binary/root_calculator.rs +++ b/fuel-merkle/src/binary/root_calculator.rs @@ -1,5 +1,3 @@ -use core::convert::Infallible; - use crate::{ binary::{ empty_sum, @@ -11,12 +9,6 @@ use crate::{ use crate::alloc::borrow::ToOwned; use alloc::vec::Vec; -#[derive(Debug)] -pub(crate) enum NodeStackPushError { - Callback(E), - TooLarge, -} - #[derive(Default, Debug, Clone, PartialEq)] #[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))] pub struct MerkleRootCalculator { @@ -32,53 +24,22 @@ impl MerkleRootCalculator { Self { stack } } - pub fn clear(&mut self) { - self.stack.clear(); - } - - /// Push a leaf to stack of nodes, propagating changes through the tree. - /// Calls `node_created` for each new node created, stopping on first error. - pub(crate) fn push_with_callback( - &mut self, - node: Node, - mut node_created: F, - ) -> Result<(), NodeStackPushError> - where - F: FnMut(&Node) -> Result<(), E>, - { - node_created(&node).map_err(NodeStackPushError::Callback)?; + pub fn push(&mut self, data: &[u8]) { + let node = Node::create_leaf(0, data); self.stack.push(node); - // Propagate changes through the tree. - #[allow(clippy::arithmetic_side_effects)] // ensured by loop condition while self.stack.len() > 1 { - let rhs = &self.stack[self.stack.len() - 1]; - let lhs = &self.stack[self.stack.len() - 2]; - if rhs.height() != lhs.height() { - break; + let right_node = &self.stack[self.stack.len() - 1]; + let left_node = &self.stack[self.stack.len() - 2]; + if right_node.height() == left_node.height() { + let merged_node = Node::create_node(left_node, right_node); + self.stack.pop(); + self.stack.pop(); + self.stack.push(merged_node); + } else { + break } - - let parent_pos = lhs - .position() - .parent() - .map_err(|_| NodeStackPushError::TooLarge)?; - let new = Node::create_node(parent_pos, lhs, rhs); - node_created(&new).map_err(NodeStackPushError::Callback)?; - let _ = self.stack.pop(); - let _ = self.stack.pop(); - self.stack.push(new); } - - Ok(()) - } - - /// Push a new leaf node. - /// Panics if the tree would be too large to compute the root for. - /// In practice this never occurs, as you'd run out of memory first. - pub fn push(&mut self, data: &[u8]) { - let node = Node::create_leaf(0, data).expect("Zero is a valid index for a leaf"); - self.push_with_callback::<_, Infallible>(node, |_| Ok(())) - .expect("Tree too large"); } pub fn root(mut self) -> Bytes32 { @@ -86,13 +47,9 @@ impl MerkleRootCalculator { return empty_sum().to_owned() } while self.stack.len() > 1 { - let right_child = self.stack.pop().expect("Checked in loop bound"); - let left_child = self.stack.pop().expect("Checked in loop bound"); - let merged_pos = left_child - .position() - .parent() - .expect("Left child has no parent"); - let merged_node = Node::create_node(merged_pos, &left_child, &right_child); + let right_child = self.stack.pop().expect("Unable to pop element from stack"); + let left_child = self.stack.pop().expect("Unable to pop element from stack"); + let merged_node = Node::create_node(&left_child, &right_child); self.stack.push(merged_node); } self.stack.pop().unwrap().hash().to_owned() diff --git a/fuel-merkle/src/binary/verify.rs b/fuel-merkle/src/binary/verify.rs index 4c3f296ecf..14374d5bb9 100644 --- a/fuel-merkle/src/binary/verify.rs +++ b/fuel-merkle/src/binary/verify.rs @@ -9,40 +9,6 @@ use crate::{ }, }; -/// Returns None if: -/// - `num_leaves` is 0 -/// - the result doens't fit in an usize -fn path_length_from_key(key: u64, num_leaves: u64) -> Option { - if num_leaves == 0 { - return None; - } - - #[allow(clippy::arithmetic_side_effects)] // ilog2(..) < 64 - let path_length = if num_leaves.is_power_of_two() { - num_leaves.ilog2() - } else { - num_leaves.ilog2() + 1 - }; - - #[allow(clippy::arithmetic_side_effects)] // ilog2(..) > 0 - let num_leaves_left_subtree = 1 << (path_length - 1); - - let subtree_leaves = num_leaves.saturating_sub(num_leaves_left_subtree); - - let Some(subtree_key) = key.checked_sub(num_leaves_left_subtree) else { - // If leaf is in left subtree, path length is full height of left subtree - return path_length.try_into().ok(); - }; - - // Otherwise, if left or right subtree has only one leaf, path has one additional step - if num_leaves_left_subtree == 1 || subtree_leaves <= 1 { - return Some(1); - } - - // Otherwise, add 1 to height and recurse into right subtree - path_length_from_key(subtree_key, subtree_leaves)?.checked_add(1) -} - pub fn verify>( root: &Bytes32, data: &T, @@ -50,37 +16,22 @@ pub fn verify>( proof_index: u64, num_leaves: u64, ) -> bool { - if num_leaves <= 1 { - if !proof_set.is_empty() { - return false; - } - } else if Some(proof_set.len()) != path_length_from_key(proof_index, num_leaves) { - return false; - } + let mut sum = leaf_sum(data.as_ref()); if proof_index >= num_leaves { - return false; + return false } - let mut sum = leaf_sum(data.as_ref()); if proof_set.is_empty() { return if num_leaves == 1 { *root == sum } else { false } } - #[allow(clippy::arithmetic_side_effects)] // checked above - let last_leaf = num_leaves - 1; - let mut parent = 0usize; + let mut height = 1usize; let mut stable_end = proof_index; loop { - #[allow(clippy::arithmetic_side_effects)] // path_length_from_key checks - let height = parent + 1; - - let subtree_size = 1u64 << height; - #[allow(clippy::arithmetic_side_effects)] // floor(a / b) * b <= a - let subtree_start_index = proof_index / subtree_size * subtree_size; - #[allow(clippy::arithmetic_side_effects)] - let subtree_end_index = subtree_start_index + subtree_size - 1; + let subtree_start_index = proof_index / (1 << height) * (1 << height); + let subtree_end_index = subtree_start_index + (1 << height) - 1; if subtree_end_index >= num_leaves { break @@ -92,39 +43,29 @@ pub fn verify>( return false } - let proof_data = proof_set[parent]; - #[allow(clippy::arithmetic_side_effects)] // proof_index > subtree_start_index - if proof_index - subtree_start_index < (1 << parent) { + let proof_data = proof_set[height - 1]; + if proof_index - subtree_start_index < 1 << (height - 1) { sum = node_sum(&sum, &proof_data); } else { sum = node_sum(&proof_data, &sum); } - #[allow(clippy::arithmetic_side_effects)] // path_length_from_key checks - { - parent += 1; - } + height += 1; } - if stable_end != last_leaf { - if proof_set.len() <= parent { + if stable_end != num_leaves - 1 { + if proof_set.len() < height { return false } - let proof_data = proof_set[parent]; + let proof_data = proof_set[height - 1]; sum = node_sum(&sum, &proof_data); - #[allow(clippy::arithmetic_side_effects)] // path_length_from_key checks - { - parent += 1; - } + height += 1; } - while parent < proof_set.len() { - let proof_data = proof_set[parent]; + while height - 1 < proof_set.len() { + let proof_data = proof_set[height - 1]; sum = node_sum(&proof_data, &sum); - #[allow(clippy::arithmetic_side_effects)] // path_length_from_key checks - { - parent += 1; - } + height += 1; } sum == *root diff --git a/fuel-merkle/src/common.rs b/fuel-merkle/src/common.rs index 299bdc3a2f..dc880aa6ee 100644 --- a/fuel-merkle/src/common.rs +++ b/fuel-merkle/src/common.rs @@ -1,10 +1,10 @@ -mod hash; mod msb; mod path_iterator; mod position; mod position_path; mod prefix; mod storage_map; +mod subtree; pub(crate) mod error; pub(crate) mod node; @@ -13,8 +13,12 @@ pub(crate) mod path; pub use path_iterator::AsPathIterator; pub use position::Position; pub use storage_map::StorageMap; +pub use subtree::Subtree; -pub(crate) use msb::Msb; +pub(crate) use msb::{ + Bit, + Msb, +}; pub(crate) use position_path::PositionPath; pub(crate) use prefix::{ Prefix, @@ -32,11 +36,6 @@ pub type Bytes = [u8; N]; use alloc::vec::Vec; pub type ProofSet = Vec; -pub use hash::{ - sum, - sum_iter, -}; - // Merkle Tree hash of an empty list // MTH({}) = Hash() pub const fn empty_sum_sha256() -> &'static Bytes32 { diff --git a/fuel-merkle/src/common/hash.rs b/fuel-merkle/src/common/hash.rs deleted file mode 100644 index 8771d04873..0000000000 --- a/fuel-merkle/src/common/hash.rs +++ /dev/null @@ -1,17 +0,0 @@ -use super::Bytes32; - -pub fn sum>(data: T) -> Bytes32 { - use digest::Digest; - let mut hash = sha2::Sha256::new(); - hash.update(data.as_ref()); - hash.finalize().into() -} - -pub fn sum_iter, T: AsRef<[u8]>>(iterator: I) -> Bytes32 { - use digest::Digest; - let mut hash = sha2::Sha256::new(); - for data in iterator { - hash.update(data.as_ref()); - } - hash.finalize().into() -} diff --git a/fuel-merkle/src/common/msb.rs b/fuel-merkle/src/common/msb.rs index bc7412baee..1193d3dd59 100644 --- a/fuel-merkle/src/common/msb.rs +++ b/fuel-merkle/src/common/msb.rs @@ -1,8 +1,31 @@ -type Bit = bool; +#[derive(Debug, Eq, PartialEq)] +pub enum Bit { + _0 = 0, + _1 = 1, +} + +trait GetBit { + fn get_bit(&self, bit_index: u32) -> Option; +} + +impl GetBit for u8 { + fn get_bit(&self, bit_index: u32) -> Option { + if bit_index < 8 { + let mask = 1 << (7 - bit_index); + let bit = self & mask; + match bit { + 0 => Some(Bit::_0), + _ => Some(Bit::_1), + } + } else { + None + } + } +} pub trait Msb { fn get_bit_at_index_from_msb(&self, index: u32) -> Option; - fn common_prefix_count(&self, other: &[u8]) -> u64; + fn common_prefix_count(&self, other: &Self) -> u32; } impl Msb for [u8; N] { @@ -11,24 +34,17 @@ impl Msb for [u8; N] { let byte_index = index / 8; // The bit within the containing byte let byte_bit_index = index % 8; - self.get(byte_index as usize).map(|byte| { - #[allow(clippy::arithmetic_side_effects)] // checked above - let mask = 1 << (7 - byte_bit_index); - byte & mask != 0 - }) + self.get(byte_index as usize) + .and_then(|byte| byte.get_bit(byte_bit_index)) } - fn common_prefix_count(&self, other: &[u8]) -> u64 { + fn common_prefix_count(&self, other: &Self) -> u32 { let mut count = 0; for (byte1, byte2) in self.iter().zip(other.iter()) { // For each pair of bytes, compute the similarity of each byte using // exclusive or (XOR). The leading zeros measures the number of // similar bits from left to right. For equal bytes, this will be 8. - let common_bits = (byte1 ^ byte2).leading_zeros(); - #[allow(clippy::arithmetic_side_effects)] // u64 is always large enough - { - count += common_bits as u64; - } + count += (byte1 ^ byte2).leading_zeros(); if byte1 != byte2 { break } diff --git a/fuel-merkle/src/common/node.rs b/fuel-merkle/src/common/node.rs index 2bb610bda9..e47ab80430 100644 --- a/fuel-merkle/src/common/node.rs +++ b/fuel-merkle/src/common/node.rs @@ -1,7 +1,10 @@ use crate::common::Bytes; use alloc::string::String; -use core::fmt; +use core::{ + fmt, + mem, +}; pub trait KeyFormatting { type PrettyType: fmt::Display; @@ -12,7 +15,11 @@ pub trait KeyFormatting { pub trait Node { type Key: KeyFormatting; - fn key_size_bits() -> u32; + fn key_size_in_bits() -> u32 { + u32::try_from(mem::size_of::() * 8) + .expect("The key usually is several bytes") + } + fn height(&self) -> u32; fn leaf_key(&self) -> Self::Key; fn is_leaf(&self) -> bool; @@ -36,8 +43,6 @@ where { #[display(fmt = "Child with key {} was not found in storage", _0.pretty())] ChildNotFound(Key), - #[display(fmt = "Node channot have the requested child")] - ChildCannotExist, #[display(fmt = "Node is a leaf with no children")] NodeIsLeaf, #[display(fmt = "{}", _0)] diff --git a/fuel-merkle/src/common/path.rs b/fuel-merkle/src/common/path.rs index 0a1d7cb76a..6a202345ef 100644 --- a/fuel-merkle/src/common/path.rs +++ b/fuel-merkle/src/common/path.rs @@ -1,36 +1,44 @@ -use crate::common::Msb; +use crate::common::{ + Bit, + Msb, +}; -/// The side of a child node in a binary tree. -pub enum Side { +pub enum Instruction { Left, Right, } -impl From for Side { - fn from(bit: bool) -> Self { +impl From for Instruction { + fn from(bit: Bit) -> Self { match bit { - false => Side::Left, - true => Side::Right, + Bit::_0 => Instruction::Left, + Bit::_1 => Instruction::Right, } } } pub trait Path { - /// Which child node to follow at the given index. - fn get_instruction(&self, index: u32) -> Option; + fn get_instruction(&self, index: u32) -> Option; +} - fn common_path_length(&self, other: &[u8]) -> u64; +pub trait ComparablePath { + fn common_path_length(&self, other: &Self) -> u32; } impl Path for T where T: Msb, { - fn get_instruction(&self, index: u32) -> Option { + fn get_instruction(&self, index: u32) -> Option { self.get_bit_at_index_from_msb(index).map(Into::into) } +} - fn common_path_length(&self, other: &[u8]) -> u64 { +impl ComparablePath for T +where + T: Msb, +{ + fn common_path_length(&self, other: &Self) -> u32 { self.common_prefix_count(other) } } diff --git a/fuel-merkle/src/common/path_iterator.rs b/fuel-merkle/src/common/path_iterator.rs index be7bfe5807..ac4ec27ee7 100644 --- a/fuel-merkle/src/common/path_iterator.rs +++ b/fuel-merkle/src/common/path_iterator.rs @@ -4,8 +4,8 @@ use crate::common::{ ParentNode, }, path::{ + Instruction, Path, - Side, }, }; @@ -90,12 +90,10 @@ pub struct PathIter { impl PathIter where T: ParentNode + Clone, - T::Key: Clone, { - pub fn new(root: &T, leaf_key: &T::Key) -> Self { + pub fn new(root: &T, leaf_key: T::Key) -> Self { let initial = (Ok(root.clone()), Ok(root.clone())); - #[rustfmt::skip] // The initial offset from the most significant bit (MSB). // // The offset from the MSB indicates which bit to read when deducing the @@ -118,31 +116,30 @@ where // With an 8-bit key and heights 1 through 7: // // Height Depth - // 7 0 127 Offset = Bits - Height = 8 - 7 = 1 - // / \ + // 7 0 127 Offset = Bits - Height = + // 8 - 7 = 1 / \ // / \ // ... ... ... // / \ // / \ - // 3 4 07 247 Offset = Bits - Height = 8 - 3 = 5 - // / \ / \ + // 3 4 07 247 Offset = Bits - Height = + // 8 - 3 = 5 / \ / \ // / \ ... \ // / \ \ // / \ \ // / \ \ // / \ \ - // 2 5 03 11 251 Offset = Bits - Height = 8 - 2 = 6 - // / \ / \ / \ + // 2 5 03 11 251 Offset = Bits - Height = + // 8 - 2 = 6 / \ / \ / \ // / \ / \ ... \ - // 1 6 01 05 09 13 253 Offset = Bits - Height = 8 - 1 = 7 - // / \ / \ / \ / \ / \ - // 0 7 00 02 04 06 08 10 12 14 252 254 + // 1 6 01 05 09 13 253 Offset = Bits - Height = + // 8 - 1 = 7 / \ / \ / \ / \ / \ + // 0 7 00 02 04 06 08 10 12 14 252 254 // 00 01 02 03 04 05 06 07 126 127 // - let initial_offset = T::key_size_bits().checked_sub(root.height()) - .expect("Root height more than key size allows, ParentNode impl is incorrect"); + let initial_offset = T::key_size_in_bits() - root.height(); Self { - leaf_key: leaf_key.clone(), + leaf_key, current: Some(initial), current_offset: initial_offset, } @@ -165,16 +162,12 @@ where let path = &self.leaf_key; let instruction = path.get_instruction(self.current_offset); self.current = instruction.map(|instruction| { - // get_instruction ensures current_offset is ok - #[allow(clippy::arithmetic_side_effects)] - { - self.current_offset += 1; - } + self.current_offset += 1; match instruction { - Side::Left => { + Instruction::Left => { (path_node.left_child(), path_node.right_child()) } - Side::Right => { + Instruction::Right => { (path_node.right_child(), path_node.left_child()) } } @@ -194,25 +187,19 @@ where } pub trait AsPathIterator { - fn as_path_iter(&self, leaf_key: &T::Key) -> PathIter; + fn as_path_iter(&self, leaf_key: T::Key) -> PathIter; } impl AsPathIterator for T where T: ParentNode + Clone, - T::Key: Clone, { - fn as_path_iter(&self, leaf_key: &T::Key) -> PathIter { + fn as_path_iter(&self, leaf_key: T::Key) -> PathIter { PathIter::new(self, leaf_key) } } #[cfg(test)] -#[allow( - clippy::restriction, - clippy::cast_possible_wrap, - clippy::cast_sign_loss -)] mod test { use crate::common::{ node::{ @@ -272,11 +259,6 @@ mod test { TestNode::height(self) } - #[allow(clippy::arithmetic_side_effects, clippy::cast_possible_truncation)] // const - fn key_size_bits() -> u32 { - core::mem::size_of::() as u32 * 8 - } - fn leaf_key(&self) -> Self::Key { TestNode::leaf_index(self).to_be_bytes() } @@ -325,7 +307,7 @@ mod test { { let leaf = Node::from_leaf_index(0); let (path, _): (Vec, Vec) = root - .as_path_iter(&leaf.leaf_key()) + .as_path_iter(leaf.leaf_key()) .map(|(path, side)| (path.unwrap(), side.unwrap())) .unzip(); let expected_path = vec![ @@ -340,7 +322,7 @@ mod test { { let leaf = Node::from_leaf_index(1); let (path, _): (Vec, Vec) = root - .as_path_iter(&leaf.leaf_key()) + .as_path_iter(leaf.leaf_key()) .map(|(path, side)| (path.unwrap(), side.unwrap())) .unzip(); let expected_path = vec![ @@ -355,7 +337,7 @@ mod test { { let leaf = Node::from_leaf_index(2); let (path, _): (Vec, Vec) = root - .as_path_iter(&leaf.leaf_key()) + .as_path_iter(leaf.leaf_key()) .map(|(path, side)| (path.unwrap(), side.unwrap())) .unzip(); let expected_path = vec![ @@ -370,7 +352,7 @@ mod test { { let leaf = Node::from_leaf_index(3); let (path, _): (Vec, Vec) = root - .as_path_iter(&leaf.leaf_key()) + .as_path_iter(leaf.leaf_key()) .map(|(path, side)| (path.unwrap(), side.unwrap())) .unzip(); let expected_path = vec![ @@ -385,7 +367,7 @@ mod test { { let leaf = Node::from_leaf_index(4); let (path, _): (Vec, Vec) = root - .as_path_iter(&leaf.leaf_key()) + .as_path_iter(leaf.leaf_key()) .map(|(path, side)| (path.unwrap(), side.unwrap())) .unzip(); let expected_path = vec![ @@ -400,7 +382,7 @@ mod test { { let leaf = Node::from_leaf_index(5); let (path, _): (Vec, Vec) = root - .as_path_iter(&leaf.leaf_key()) + .as_path_iter(leaf.leaf_key()) .map(|(path, side)| (path.unwrap(), side.unwrap())) .unzip(); let expected_path = vec![ @@ -415,7 +397,7 @@ mod test { { let leaf = Node::from_leaf_index(6); let (path, _): (Vec, Vec) = root - .as_path_iter(&leaf.leaf_key()) + .as_path_iter(leaf.leaf_key()) .map(|(path, side)| (path.unwrap(), side.unwrap())) .unzip(); let expected_path = vec![ @@ -430,7 +412,7 @@ mod test { { let leaf = Node::from_leaf_index(7); let (path, _): (Vec, Vec) = root - .as_path_iter(&leaf.leaf_key()) + .as_path_iter(leaf.leaf_key()) .map(|(path, side)| (path.unwrap(), side.unwrap())) .unzip(); let expected_path = vec![ @@ -466,7 +448,7 @@ mod test { { let leaf = Node::from_leaf_index(0); let (_, side): (Vec, Vec) = root - .as_path_iter(&leaf.leaf_key()) + .as_path_iter(leaf.leaf_key()) .map(|(path, side)| (path.unwrap(), side.unwrap())) .unzip(); let expected_side = vec![ @@ -481,7 +463,7 @@ mod test { { let leaf = Node::from_leaf_index(1); let (_, side): (Vec, Vec) = root - .as_path_iter(&leaf.leaf_key()) + .as_path_iter(leaf.leaf_key()) .map(|(path, side)| (path.unwrap(), side.unwrap())) .unzip(); let expected_side = vec![ @@ -496,7 +478,7 @@ mod test { { let leaf = Node::from_leaf_index(2); let (_, side): (Vec, Vec) = root - .as_path_iter(&leaf.leaf_key()) + .as_path_iter(leaf.leaf_key()) .map(|(path, side)| (path.unwrap(), side.unwrap())) .unzip(); let expected_side = vec![ @@ -511,7 +493,7 @@ mod test { { let leaf = Node::from_leaf_index(3); let (_, side): (Vec, Vec) = root - .as_path_iter(&leaf.leaf_key()) + .as_path_iter(leaf.leaf_key()) .map(|(path, side)| (path.unwrap(), side.unwrap())) .unzip(); let expected_side = vec![ @@ -526,7 +508,7 @@ mod test { { let leaf = Node::from_leaf_index(4); let (_, side): (Vec, Vec) = root - .as_path_iter(&leaf.leaf_key()) + .as_path_iter(leaf.leaf_key()) .map(|(path, side)| (path.unwrap(), side.unwrap())) .unzip(); let expected_side = vec![ @@ -541,7 +523,7 @@ mod test { { let leaf = Node::from_leaf_index(5); let (_, side): (Vec, Vec) = root - .as_path_iter(&leaf.leaf_key()) + .as_path_iter(leaf.leaf_key()) .map(|(path, side)| (path.unwrap(), side.unwrap())) .unzip(); let expected_side = vec![ @@ -556,7 +538,7 @@ mod test { { let leaf = Node::from_leaf_index(6); let (_, side): (Vec, Vec) = root - .as_path_iter(&leaf.leaf_key()) + .as_path_iter(leaf.leaf_key()) .map(|(path, side)| (path.unwrap(), side.unwrap())) .unzip(); let expected_side = vec![ @@ -571,7 +553,7 @@ mod test { { let leaf = Node::from_leaf_index(7); let (_, side): (Vec, Vec) = root - .as_path_iter(&leaf.leaf_key()) + .as_path_iter(leaf.leaf_key()) .map(|(path, side)| (path.unwrap(), side.unwrap())) .unzip(); let expected_side = vec![ @@ -591,7 +573,7 @@ mod test { let leaf = Node::from_leaf_index(4); // 0b0100 let (path, _): (Vec, Vec) = root - .as_path_iter(&leaf.leaf_key()) + .as_path_iter(leaf.leaf_key()) .map(|(path, side)| (path.unwrap(), side.unwrap())) .unzip(); @@ -612,7 +594,7 @@ mod test { let leaf = Node::from_leaf_index(61); // 0b00111101 let (path, _): (Vec, Vec) = root - .as_path_iter(&leaf.leaf_key()) + .as_path_iter(leaf.leaf_key()) .map(|(path, side)| (path.unwrap(), side.unwrap())) .unzip(); @@ -637,7 +619,7 @@ mod test { let leaf = Node::from_leaf_index(0); let (path, side): (Vec, Vec) = root - .as_path_iter(&leaf.leaf_key()) + .as_path_iter(leaf.leaf_key()) .map(|(path, side)| (path.unwrap(), side.unwrap())) .unzip(); diff --git a/fuel-merkle/src/common/position.rs b/fuel-merkle/src/common/position.rs index cd17a38b7d..929f74718d 100644 --- a/fuel-merkle/src/common/position.rs +++ b/fuel-merkle/src/common/position.rs @@ -1,5 +1,6 @@ use crate::common::{ node::{ + ChildResult, Node, ParentNode, }, @@ -8,14 +9,6 @@ use crate::common::{ }; use core::convert::Infallible; -use super::{ - node::{ - ChildError, - ChildResult, - }, - path::Side, -}; - /// # Position /// /// A `Position` represents a node's position in a binary tree by encapsulating @@ -97,6 +90,9 @@ use super::{ #[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))] pub struct Position(u64); +const LEFT_CHILD_DIRECTION: i64 = -1; +const RIGHT_CHILD_DIRECTION: i64 = 1; + impl Position { pub fn in_order_index(self) -> u64 { self.0 @@ -114,86 +110,43 @@ impl Position { /// Construct a position from a leaf index. The in-order index corresponding /// to the leaf index will always equal the leaf index multiplied by 2. - /// Panics if index is too large to fit into u64. - pub fn from_leaf_index(index: u64) -> Option { - Some(Position(index.checked_mul(2)?)) + pub fn from_leaf_index(index: u64) -> Self { + Position(index * 2) } - #[cfg(test)] - pub(crate) fn from_leaf_index_unwrap(index: u64) -> Self { - Self::from_leaf_index(index).expect("Index too large") + /// The sibling position. + /// A position shares the same parent and height as its sibling. + pub fn sibling(self) -> Self { + let shift = 1 << (self.height() + 1); + let index = self.in_order_index() as i64 + shift * self.direction(); + Self::from_in_order_index(index as u64) } /// The parent position. /// The parent position has a height less 1 relative to this position. - pub fn parent(self) -> Result { - let shift = 1u64 - .checked_shl(self.height()) - .ok_or(GetNodeError::CannotExist)?; - let this = self.in_order_index(); - Ok(Self::from_in_order_index(match self.orientation()? { - Side::Left => this.checked_sub(shift).ok_or(GetNodeError::CannotExist)?, - Side::Right => this.checked_add(shift).ok_or(GetNodeError::CannotExist)?, - })) - } - - /// The sibling position. - /// A position shares the same parent and height as its sibling. - #[cfg(test)] - pub fn sibling(self) -> Result { - #[allow(clippy::arithmetic_side_effects)] // height() <= 64 - let shift = 1u64 - .checked_shl(self.height() + 1) - .ok_or(GetNodeError::CannotExist)?; - let this = self.in_order_index(); - Ok(Self::from_in_order_index(match self.orientation()? { - Side::Left => this.checked_sub(shift).ok_or(GetNodeError::CannotExist)?, - Side::Right => this.checked_add(shift).ok_or(GetNodeError::CannotExist)?, - })) + pub fn parent(self) -> Self { + let shift = 1 << self.height(); + let index = self.in_order_index() as i64 + shift * self.direction(); + Self::from_in_order_index(index as u64) } /// The uncle position. /// The uncle position is the sibling of the parent and has a height less 1 /// relative to this position. - #[cfg(test)] - pub fn uncle(self) -> Result { - self.parent()?.sibling() + pub fn uncle(self) -> Self { + self.parent().sibling() } - /// The child position of the current position given by the direction. - /// A child position has a height less 1 than the current position. - /// - /// A child position is calculated as a function of the current position's - /// index and height, and the supplied direction. The left child - /// position has the in-order index arriving before the current index; - /// the right child position has the in-order index arriving after the - /// current index. - pub fn child(self, side: Side) -> Result { - if !self.is_node() { - return Err(GetNodeError::IsLeaf); - } - let shift = 1u64 - .checked_shl( - self.height() - .checked_sub(1) - .ok_or(GetNodeError::CannotExist)?, - ) - .ok_or(GetNodeError::CannotExist)?; - let this = self.in_order_index(); - Ok(Self::from_in_order_index(match side { - Side::Left => this.checked_sub(shift).ok_or(GetNodeError::CannotExist)?, - Side::Right => this.checked_add(shift).ok_or(GetNodeError::CannotExist)?, - })) - } - - /// Returns the left child of the current position. - pub fn left_child(self) -> Result { - self.child(Side::Left) - } - - /// Returns the right child of the current position. - pub fn right_child(self) -> Result { - self.child(Side::Right) + /// The left child position. + /// See [child](Self::child). + pub fn left_child(self) -> Self { + self.child(LEFT_CHILD_DIRECTION) + } + + /// The right child position. + /// See [child](Self::child). + pub fn right_child(self) -> Self { + self.child(RIGHT_CHILD_DIRECTION) } /// The height of the index in a binary tree. @@ -245,11 +198,31 @@ impl Position { /// tree is defined by the `leaves_count` parameter and constrains the /// path. See [PositionPath](crate::common::PositionPath). pub fn path(self, leaf: &Self, leaves_count: u64) -> PositionPath { - debug_assert!(leaves_count > 0); PositionPath::new(self, *leaf, leaves_count) } + // PRIVATE + + /// The child position of the current position given by the direction. + /// A direction of `-1` denotes the left child. A direction of `+1` denotes + /// the right child. A child position has a height less 1 than the + /// current position. + /// + /// A child position is calculated as a function of the current position's + /// index and height, and the supplied direction. The left child + /// position has the in-order index arriving before the current index; + /// the right child position has the in-order index arriving after the + /// current index. + fn child(self, direction: i64) -> Self { + assert!(self.is_node()); + let shift = 1 << (self.height() - 1); + let index = self.in_order_index() as i64 + shift * direction; + Self::from_in_order_index(index as u64) + } + /// Orientation of the position index relative to its parent. + /// Returns 0 if the index is left of its parent. + /// Returns 1 if the index is right of its parent. /// /// The orientation is determined by the reading the `n`th rightmost digit /// of the index's binary value, where `n` = the height of the position @@ -258,23 +231,25 @@ impl Position { /// /// | Index (Dec) | Index (Bin) | Height | Orientation | /// |-------------|-------------|--------|-------------| - /// | 0 | 0000 | 0 | L | - /// | 2 | 0010 | 0 | R | - /// | 4 | 0100 | 0 | L | - /// | 6 | 0110 | 0 | R | - /// | 1 | 0001 | 1 | L | - /// | 5 | 0101 | 1 | R | - /// | 9 | 1001 | 1 | L | - /// | 13 | 1101 | 1 | R | - fn orientation(self) -> Result { - #[allow(clippy::arithmetic_side_effects)] // height() <= 64 - let shift = 1u64 - .checked_shl(self.height() + 1) - .ok_or(GetNodeError::CannotExist)?; - Ok(match self.in_order_index() & shift { - 0 => Side::Right, - _ => Side::Left, - }) + /// | 0 | 0000 | 0 | 0 | + /// | 2 | 0010 | 0 | 1 | + /// | 4 | 0100 | 0 | 0 | + /// | 6 | 0110 | 0 | 1 | + /// | 1 | 0001 | 1 | 0 | + /// | 5 | 0101 | 1 | 1 | + /// | 9 | 1001 | 1 | 0 | + /// | 13 | 1101 | 1 | 1 | + fn orientation(self) -> u8 { + let shift = 1 << (self.height() + 1); + (self.in_order_index() & shift != 0) as u8 + } + + /// The "direction" to travel to reach the parent node. + /// Returns +1 if the index is left of its parent. + /// Returns -1 if the index is right of its parent. + fn direction(self) -> i64 { + let scale = self.orientation() as i64 * 2 - 1; // Scale [0, 1] to [-1, 1]; + -scale } } @@ -285,11 +260,6 @@ impl Node for Position { Position::height(*self) } - #[allow(clippy::arithmetic_side_effects, clippy::cast_possible_truncation)] // const - fn key_size_bits() -> u32 { - core::mem::size_of::() as u32 * 8 - } - fn leaf_key(&self) -> Self::Key { Position::leaf_index(*self).to_be_bytes() } @@ -303,32 +273,15 @@ impl Node for Position { } } -#[derive(Debug, Clone, Copy, PartialEq, Eq)] -pub enum GetNodeError { - /// The operation requires a node that can have children. - /// This is a leaf, and cannot have children. - IsLeaf, - /// The requested node cannot exists as it would be out of bounds. - CannotExist, -} - impl ParentNode for Position { type Error = Infallible; fn left_child(&self) -> ChildResult { - match self.child(Side::Left) { - Ok(child) => Ok(child), - Err(GetNodeError::IsLeaf) => Err(ChildError::NodeIsLeaf), - Err(GetNodeError::CannotExist) => Err(ChildError::ChildCannotExist), - } + Ok(Position::left_child(*self)) } fn right_child(&self) -> ChildResult { - match self.child(Side::Right) { - Ok(child) => Ok(child), - Err(GetNodeError::IsLeaf) => Err(ChildError::NodeIsLeaf), - Err(GetNodeError::CannotExist) => Err(ChildError::ChildCannotExist), - } + Ok(Position::right_child(*self)) } } @@ -344,11 +297,11 @@ mod test { } #[test] - fn test_from_leaf_index_unwrap() { - assert_eq!(Position::from_leaf_index_unwrap(0).in_order_index(), 0); - assert_eq!(Position::from_leaf_index_unwrap(1).in_order_index(), 2); + fn test_from_leaf_index() { + assert_eq!(Position::from_leaf_index(0).in_order_index(), 0); + assert_eq!(Position::from_leaf_index(1).in_order_index(), 2); assert_eq!( - Position::from_leaf_index_unwrap((!0u64) >> 1).in_order_index(), + Position::from_leaf_index((!0u64) >> 1).in_order_index(), !0u64 - 1 ); } @@ -357,14 +310,14 @@ mod test { fn test_equality_returns_true_for_two_equal_positions() { assert_eq!(Position(0), Position(0)); assert_eq!(Position::from_in_order_index(0), Position(0)); - assert_eq!(Position::from_leaf_index_unwrap(1), Position(2)); + assert_eq!(Position::from_leaf_index(1), Position(2)); } #[test] fn test_equality_returns_false_for_two_unequal_positions() { assert_ne!(Position(0), Position(1)); assert_ne!(Position::from_in_order_index(0), Position(1)); - assert_ne!(Position::from_leaf_index_unwrap(0), Position(2)); + assert_ne!(Position::from_leaf_index(0), Position(2)); } #[test] @@ -384,57 +337,57 @@ mod test { #[test] fn test_sibling() { - assert_eq!(Position(0).sibling(), Ok(Position(2))); - assert_eq!(Position(2).sibling(), Ok(Position(0))); + assert_eq!(Position(0).sibling(), Position(2)); + assert_eq!(Position(2).sibling(), Position(0)); - assert_eq!(Position(1).sibling(), Ok(Position(5))); - assert_eq!(Position(5).sibling(), Ok(Position(1))); + assert_eq!(Position(1).sibling(), Position(5)); + assert_eq!(Position(5).sibling(), Position(1)); - assert_eq!(Position(3).sibling(), Ok(Position(11))); - assert_eq!(Position(11).sibling(), Ok(Position(3))); + assert_eq!(Position(3).sibling(), Position(11)); + assert_eq!(Position(11).sibling(), Position(3)); } #[test] fn test_parent() { - assert_eq!(Position(0).parent(), Ok(Position(1))); - assert_eq!(Position(2).parent(), Ok(Position(1))); + assert_eq!(Position(0).parent(), Position(1)); + assert_eq!(Position(2).parent(), Position(1)); - assert_eq!(Position(1).parent(), Ok(Position(3))); - assert_eq!(Position(5).parent(), Ok(Position(3))); + assert_eq!(Position(1).parent(), Position(3)); + assert_eq!(Position(5).parent(), Position(3)); - assert_eq!(Position(3).parent(), Ok(Position(7))); - assert_eq!(Position(11).parent(), Ok(Position(7))); + assert_eq!(Position(3).parent(), Position(7)); + assert_eq!(Position(11).parent(), Position(7)); } #[test] fn test_uncle() { - assert_eq!(Position(0).uncle(), Ok(Position(5))); - assert_eq!(Position(2).uncle(), Ok(Position(5))); - assert_eq!(Position(4).uncle(), Ok(Position(1))); - assert_eq!(Position(6).uncle(), Ok(Position(1))); + assert_eq!(Position(0).uncle(), Position(5)); + assert_eq!(Position(2).uncle(), Position(5)); + assert_eq!(Position(4).uncle(), Position(1)); + assert_eq!(Position(6).uncle(), Position(1)); - assert_eq!(Position(1).uncle(), Ok(Position(11))); - assert_eq!(Position(5).uncle(), Ok(Position(11))); - assert_eq!(Position(9).uncle(), Ok(Position(3))); - assert_eq!(Position(13).uncle(), Ok(Position(3))); + assert_eq!(Position(1).uncle(), Position(11)); + assert_eq!(Position(5).uncle(), Position(11)); + assert_eq!(Position(9).uncle(), Position(3)); + assert_eq!(Position(13).uncle(), Position(3)); } #[test] fn test_left_child() { - assert_eq!(Position(7).left_child(), Ok(Position(3))); - assert_eq!(Position(3).left_child(), Ok(Position(1))); - assert_eq!(Position(1).left_child(), Ok(Position(0))); - assert_eq!(Position(11).left_child(), Ok(Position(9))); - assert_eq!(Position(9).left_child(), Ok(Position(8))); + assert_eq!(Position(7).left_child(), Position(3)); + assert_eq!(Position(3).left_child(), Position(1)); + assert_eq!(Position(1).left_child(), Position(0)); + assert_eq!(Position(11).left_child(), Position(9)); + assert_eq!(Position(9).left_child(), Position(8)); } #[test] fn test_right_child() { - assert_eq!(Position(7).right_child(), Ok(Position(11))); - assert_eq!(Position(3).right_child(), Ok(Position(5))); - assert_eq!(Position(1).right_child(), Ok(Position(2))); - assert_eq!(Position(11).right_child(), Ok(Position(13))); - assert_eq!(Position(9).right_child(), Ok(Position(10))); + assert_eq!(Position(7).right_child(), Position(11)); + assert_eq!(Position(3).right_child(), Position(5)); + assert_eq!(Position(1).right_child(), Position(2)); + assert_eq!(Position(11).right_child(), Position(13)); + assert_eq!(Position(9).right_child(), Position(10)); } #[test] diff --git a/fuel-merkle/src/common/position_path.rs b/fuel-merkle/src/common/position_path.rs index 552a041156..5b0a41c85d 100644 --- a/fuel-merkle/src/common/position_path.rs +++ b/fuel-merkle/src/common/position_path.rs @@ -5,8 +5,6 @@ use crate::common::{ Position, }; -use super::path::Side; - /// # PositionPath /// /// A PositionPath represents the path of positions created by traversing a @@ -30,7 +28,6 @@ pub struct PositionPath { impl PositionPath { pub fn new(root: Position, leaf: Position, leaves_count: u64) -> Self { - debug_assert!(leaves_count > 0); Self { root, leaf, @@ -50,17 +47,11 @@ pub struct PositionPathIter { } impl PositionPathIter { - /// Panics if leaves_count is zero, as the tree is not valid pub fn new(root: Position, leaf: Position, leaves_count: u64) -> Self { Self { - rightmost_position: Position::from_leaf_index( - leaves_count - .checked_sub(1) - .expect("Path to a tree without leaves"), - ) - .unwrap(), + rightmost_position: Position::from_leaf_index(leaves_count - 1), current_side_node: None, - path_iter: root.as_path_iter(&leaf.leaf_key()), + path_iter: root.as_path_iter(leaf.leaf_key()), } } } @@ -108,7 +99,7 @@ impl Iterator for PositionPathIter { // correct side node will always be a leftward descendent of // this invalid side node. while side.in_order_index() > self.rightmost_position.in_order_index() { - side = side.child(Side::Left).expect("Verified above"); + side = side.left_child() } return Some((path, side)) @@ -133,7 +124,7 @@ mod test { #[test] fn test_path_set_returns_path_and_side_nodes_for_1_leaf() { let root = Position::from_in_order_index(0); - let leaf = Position::from_leaf_index_unwrap(0); + let leaf = Position::from_leaf_index(0); let (path_positions, side_positions): (Vec, Vec) = root.path(&leaf, 1).iter().unzip(); let expected_path = [Position::from_in_order_index(0)]; @@ -154,7 +145,7 @@ mod test { let root = Position::from_in_order_index(3); - let leaf = Position::from_leaf_index_unwrap(0); + let leaf = Position::from_leaf_index(0); let (path_positions, side_positions): (Vec, Vec) = root.path(&leaf, 4).iter().unzip(); let expected_path = [ @@ -170,7 +161,7 @@ mod test { assert_eq!(path_positions, expected_path); assert_eq!(side_positions, expected_side); - let leaf = Position::from_leaf_index_unwrap(1); + let leaf = Position::from_leaf_index(1); let (path_positions, side_positions): (Vec, Vec) = root.path(&leaf, 4).iter().unzip(); let expected_path = [ @@ -186,7 +177,7 @@ mod test { assert_eq!(path_positions, expected_path); assert_eq!(side_positions, expected_side); - let leaf = Position::from_leaf_index_unwrap(2); + let leaf = Position::from_leaf_index(2); let (path_positions, side_positions): (Vec, Vec) = root.path(&leaf, 4).iter().unzip(); let expected_path = [ @@ -202,7 +193,7 @@ mod test { assert_eq!(path_positions, expected_path); assert_eq!(side_positions, expected_side); - let leaf = Position::from_leaf_index_unwrap(3); + let leaf = Position::from_leaf_index(3); let (path_positions, side_positions): (Vec, Vec) = root.path(&leaf, 4).iter().unzip(); let expected_path = [ @@ -233,7 +224,7 @@ mod test { let root = Position::from_in_order_index(7); - let leaf = Position::from_leaf_index_unwrap(0); + let leaf = Position::from_leaf_index(0); let (path_positions, side_positions): (Vec, Vec) = root.path(&leaf, 5).iter().unzip(); let expected_path = [ @@ -251,7 +242,7 @@ mod test { assert_eq!(path_positions, expected_path); assert_eq!(side_positions, expected_side); - let leaf = Position::from_leaf_index_unwrap(1); + let leaf = Position::from_leaf_index(1); let (path_positions, side_positions): (Vec, Vec) = root.path(&leaf, 5).iter().unzip(); let expected_path = [ @@ -269,7 +260,7 @@ mod test { assert_eq!(path_positions, expected_path); assert_eq!(side_positions, expected_side); - let leaf = Position::from_leaf_index_unwrap(2); + let leaf = Position::from_leaf_index(2); let (path_positions, side_positions): (Vec, Vec) = root.path(&leaf, 5).iter().unzip(); let expected_path = [ @@ -287,7 +278,7 @@ mod test { assert_eq!(path_positions, expected_path); assert_eq!(side_positions, expected_side); - let leaf = Position::from_leaf_index_unwrap(3); + let leaf = Position::from_leaf_index(3); let (path_positions, side_positions): (Vec, Vec) = root.path(&leaf, 5).iter().unzip(); let expected_path = [ @@ -305,7 +296,7 @@ mod test { assert_eq!(path_positions, expected_path); assert_eq!(side_positions, expected_side); - let leaf = Position::from_leaf_index_unwrap(4); + let leaf = Position::from_leaf_index(4); let (path_positions, side_positions): (Vec, Vec) = root.path(&leaf, 5).iter().unzip(); let expected_path = [ @@ -336,7 +327,7 @@ mod test { let root = Position::from_in_order_index(7); - let leaf = Position::from_leaf_index_unwrap(0); + let leaf = Position::from_leaf_index(0); let (path_positions, side_positions): (Vec, Vec) = root.path(&leaf, 6).iter().unzip(); let expected_path = [ @@ -354,7 +345,7 @@ mod test { assert_eq!(path_positions, expected_path); assert_eq!(side_positions, expected_side); - let leaf = Position::from_leaf_index_unwrap(1); + let leaf = Position::from_leaf_index(1); let (path_positions, side_positions): (Vec, Vec) = root.path(&leaf, 6).iter().unzip(); let expected_path = [ @@ -372,7 +363,7 @@ mod test { assert_eq!(path_positions, expected_path); assert_eq!(side_positions, expected_side); - let leaf = Position::from_leaf_index_unwrap(2); + let leaf = Position::from_leaf_index(2); let (path_positions, side_positions): (Vec, Vec) = root.path(&leaf, 6).iter().unzip(); let expected_path = [ @@ -390,7 +381,7 @@ mod test { assert_eq!(path_positions, expected_path); assert_eq!(side_positions, expected_side); - let leaf = Position::from_leaf_index_unwrap(3); + let leaf = Position::from_leaf_index(3); let (path_positions, side_positions): (Vec, Vec) = root.path(&leaf, 6).iter().unzip(); let expected_path = [ @@ -408,7 +399,7 @@ mod test { assert_eq!(path_positions, expected_path); assert_eq!(side_positions, expected_side); - let leaf = Position::from_leaf_index_unwrap(4); + let leaf = Position::from_leaf_index(4); let (path_positions, side_positions): (Vec, Vec) = root.path(&leaf, 6).iter().unzip(); let expected_path = [ @@ -424,7 +415,7 @@ mod test { assert_eq!(path_positions, expected_path); assert_eq!(side_positions, expected_side); - let leaf = Position::from_leaf_index_unwrap(5); + let leaf = Position::from_leaf_index(5); let (path_positions, side_positions): (Vec, Vec) = root.path(&leaf, 6).iter().unzip(); let expected_path = [ @@ -460,7 +451,7 @@ mod test { let root = Position::from_in_order_index(7); - let leaf = Position::from_leaf_index_unwrap(0); + let leaf = Position::from_leaf_index(0); let (path_positions, side_positions): (Vec, Vec) = root.path(&leaf, 7).iter().unzip(); let expected_path = [ @@ -478,7 +469,7 @@ mod test { assert_eq!(path_positions, expected_path); assert_eq!(side_positions, expected_side); - let leaf = Position::from_leaf_index_unwrap(1); + let leaf = Position::from_leaf_index(1); let (path_positions, side_positions): (Vec, Vec) = root.path(&leaf, 7).iter().unzip(); let expected_path = [ @@ -496,7 +487,7 @@ mod test { assert_eq!(path_positions, expected_path); assert_eq!(side_positions, expected_side); - let leaf = Position::from_leaf_index_unwrap(2); + let leaf = Position::from_leaf_index(2); let (path_positions, side_positions): (Vec, Vec) = root.path(&leaf, 7).iter().unzip(); let expected_path = [ @@ -514,7 +505,7 @@ mod test { assert_eq!(path_positions, expected_path); assert_eq!(side_positions, expected_side); - let leaf = Position::from_leaf_index_unwrap(3); + let leaf = Position::from_leaf_index(3); let (path_positions, side_positions): (Vec, Vec) = root.path(&leaf, 7).iter().unzip(); let expected_path = [ @@ -532,7 +523,7 @@ mod test { assert_eq!(path_positions, expected_path); assert_eq!(side_positions, expected_side); - let leaf = Position::from_leaf_index_unwrap(4); + let leaf = Position::from_leaf_index(4); let (path_positions, side_positions): (Vec, Vec) = root.path(&leaf, 7).iter().unzip(); let expected_path = [ @@ -550,7 +541,7 @@ mod test { assert_eq!(path_positions, expected_path); assert_eq!(side_positions, expected_side); - let leaf = Position::from_leaf_index_unwrap(5); + let leaf = Position::from_leaf_index(5); let (path_positions, side_positions): (Vec, Vec) = root.path(&leaf, 7).iter().unzip(); let expected_path = [ @@ -568,7 +559,7 @@ mod test { assert_eq!(path_positions, expected_path); assert_eq!(side_positions, expected_side); - let leaf = Position::from_leaf_index_unwrap(6); + let leaf = Position::from_leaf_index(6); let (path_positions, side_positions): (Vec, Vec) = root.path(&leaf, 7).iter().unzip(); let expected_path = [ diff --git a/fuel-merkle/src/common/subtree.rs b/fuel-merkle/src/common/subtree.rs new file mode 100644 index 0000000000..b0a281daf3 --- /dev/null +++ b/fuel-merkle/src/common/subtree.rs @@ -0,0 +1,40 @@ +use alloc::boxed::Box; + +#[derive(Debug, Clone)] +pub struct Subtree { + node: T, + next: Option>>, +} + +impl Subtree { + pub fn new(node: T, next: Option>) -> Self { + Self { + node, + next: next.map(Box::new), + } + } + + pub fn next(&self) -> Option<&Subtree> { + self.next.as_ref().map(AsRef::as_ref) + } + + pub fn next_mut(&mut self) -> Option<&mut Subtree> { + self.next.as_mut().map(AsMut::as_mut) + } + + pub fn take_next(&mut self) -> Option> { + self.next.take().map(|next| *next) + } + + pub fn node(&self) -> &T { + &self.node + } + + pub fn node_mut(&mut self) -> &mut T { + &mut self.node + } + + pub fn next_node(&self) -> Option<&T> { + self.next().map(|next| next.node()) + } +} diff --git a/fuel-merkle/src/lib.rs b/fuel-merkle/src/lib.rs index 3298fe0c39..3bb72df028 100644 --- a/fuel-merkle/src/lib.rs +++ b/fuel-merkle/src/lib.rs @@ -1,13 +1,7 @@ #![cfg_attr(not(feature = "std"), no_std)] #![allow(clippy::bool_assert_comparison, clippy::identity_op)] #![deny(unused_crate_dependencies)] -#![deny( - clippy::arithmetic_side_effects, - clippy::cast_sign_loss, - clippy::cast_possible_truncation, - clippy::cast_possible_wrap, - clippy::string_slice -)] +#![deny(clippy::cast_possible_truncation)] #[cfg_attr(test, macro_use)] extern crate alloc; @@ -16,6 +10,7 @@ pub mod binary; pub mod common; pub mod sparse; pub mod storage; +pub mod sum; #[cfg(test)] mod tests; diff --git a/fuel-merkle/src/sparse.rs b/fuel-merkle/src/sparse.rs index c8fd73c506..7354c8df11 100644 --- a/fuel-merkle/src/sparse.rs +++ b/fuel-merkle/src/sparse.rs @@ -1,20 +1,10 @@ -mod hash; -mod merkle_tree; -mod primitive; - -pub(crate) use hash::zero_sum; - -pub use merkle_tree::{ - MerkleTree, - MerkleTreeError, - MerkleTreeKey, -}; -pub use primitive::Primitive; +pub mod generic; pub mod in_memory; -pub mod proof; - -use crate::common::Bytes32; -pub const fn empty_sum() -> &'static Bytes32 { - zero_sum() -} +// Define default Merkle Tree structures as concrete implementations of generic +// types, using 32 byte key sizes +pub type MerkleTree = + generic::MerkleTree<32, TableType, StorageType>; +pub type MerkleTreeError = generic::MerkleTreeError<32, StorageError>; +pub type MerkleTreeKey = generic::MerkleTreeKey<32>; +pub type Primitive = generic::Primitive<32>; diff --git a/fuel-merkle/src/sparse/generic/branch.rs b/fuel-merkle/src/sparse/generic/branch.rs new file mode 100644 index 0000000000..e1007860ec --- /dev/null +++ b/fuel-merkle/src/sparse/generic/branch.rs @@ -0,0 +1,94 @@ +use crate::{ + common::{ + path::ComparablePath, + Bytes, + }, + sparse::generic::{ + Node, + Primitive, + }, +}; +use fuel_storage::{ + Mappable, + StorageMutate, +}; + +use core::iter; + +pub(crate) struct Branch { + pub bits: Bytes, + pub node: Node, +} + +impl From> for Branch { + fn from(leaf: Node) -> Self { + Self { + bits: *leaf.leaf_key(), + node: leaf, + } + } +} + +pub(crate) fn merge_branches( + storage: &mut Storage, + mut left_branch: Branch, + mut right_branch: Branch, +) -> Result, Storage::Error> +where + Storage: StorageMutate
, + Table: Mappable< + Key = Bytes, + Value = Primitive, + OwnedValue = Primitive, + >, +{ + let branch = if left_branch.node.is_leaf() && right_branch.node.is_leaf() { + let parent_depth = left_branch.node.common_path_length(&right_branch.node); + let parent_height = Node::::max_height() - parent_depth; + let node = + Node::create_node(&left_branch.node, &right_branch.node, parent_height); + Branch { + bits: left_branch.bits, + node, + } + } else { + let ancestor_depth = left_branch.bits.common_path_length(&right_branch.bits); + let ancestor_height = Node::::max_height() - ancestor_depth; + if right_branch.node.is_node() { + let mut current_node = right_branch.node; + let path = right_branch.bits; + let parent_height = current_node.height() + 1; + let stale_depth = ancestor_height - parent_height; + let placeholders = + iter::repeat(Node::create_placeholder()).take(stale_depth as usize); + for placeholder in placeholders { + current_node = + Node::create_node_on_path(&path, ¤t_node, &placeholder); + storage.insert(current_node.hash(), ¤t_node.as_ref().into())?; + } + right_branch.node = current_node; + } + if left_branch.node.is_node() { + let mut current_node = left_branch.node; + let path = left_branch.bits; + let parent_height = current_node.height() + 1; + let stale_depth = ancestor_height - parent_height; + let placeholders = + iter::repeat(Node::create_placeholder()).take(stale_depth as usize); + for placeholder in placeholders { + current_node = + Node::create_node_on_path(&path, ¤t_node, &placeholder); + storage.insert(current_node.hash(), ¤t_node.as_ref().into())?; + } + left_branch.node = current_node; + } + let node = + Node::create_node(&left_branch.node, &right_branch.node, ancestor_height); + Branch { + bits: left_branch.bits, + node, + } + }; + storage.insert(branch.node.hash(), &branch.node.as_ref().into())?; + Ok(branch) +} diff --git a/fuel-merkle/src/sparse/generic/merkle_tree.rs b/fuel-merkle/src/sparse/generic/merkle_tree.rs index 83bb8f5912..d4d486db4a 100644 --- a/fuel-merkle/src/sparse/generic/merkle_tree.rs +++ b/fuel-merkle/src/sparse/generic/merkle_tree.rs @@ -1,33 +1,16 @@ -mod branch; -mod node; - -use branch::{ - merge_branches, - Branch, -}; -use node::{ - Node, - StorageNode, - StorageNodeError, -}; - use crate::{ common::{ error::DeserializeError, node::ChildError, AsPathIterator, - Bytes32, }, - sparse::{ - empty_sum, - proof::{ - ExclusionLeaf, - ExclusionLeafData, - ExclusionProof, - InclusionProof, - Proof, + sparse::generic::{ + node::{ + Node, + StorageNode, + StorageNodeError, }, - Primitive, + primitive::Primitive, }, storage::{ Mappable, @@ -35,27 +18,38 @@ use crate::{ StorageMutate, }, }; -use alloc::{ - format, - vec::Vec, + +use crate::{ + common::{ + Bytes, + Bytes32, + }, + sparse::generic::{ + branch::{ + merge_branches, + Branch, + }, + hash::zero_sum, + }, }; +use alloc::vec::Vec; use core::{ - fmt::{ - Debug, - Formatter, - }, + cmp, iter, marker::PhantomData, - ops::Deref, }; +fn truncate(bytes: &[u8]) -> Bytes { + (&bytes[0..N]).try_into().unwrap() +} + #[derive(Debug, Clone, derive_more::Display)] -pub enum MerkleTreeError { +pub enum MerkleTreeError { #[display( fmt = "cannot load node with key {}; the key is not found in storage", "hex::encode(_0)" )] - LoadError(Bytes32), + LoadError(Bytes), #[display(fmt = "{}", _0)] StorageError(StorageError), @@ -64,22 +58,23 @@ pub enum MerkleTreeError { DeserializeError(DeserializeError), #[display(fmt = "{}", _0)] - ChildError(ChildError>), + ChildError(ChildError, StorageNodeError>), } -impl From for MerkleTreeError { - fn from(err: StorageError) -> MerkleTreeError { +impl From + for MerkleTreeError +{ + fn from(err: StorageError) -> MerkleTreeError { MerkleTreeError::StorageError(err) } } /// The safe Merkle tree storage key prevents Merkle tree structure manipulations. /// The type contains only one constructor that hashes the storage key. -#[derive(Clone, Copy, PartialEq, Eq, Hash)] -#[cfg_attr(test, derive(proptest_derive::Arbitrary))] -pub struct MerkleTreeKey(Bytes32); +#[derive(Debug, Clone, Copy)] +pub struct MerkleTreeKey(Bytes); -impl MerkleTreeKey { +impl MerkleTreeKey { /// The safe way to create a `Self`. It hashes the `storage_key`, making /// it entirely random and preventing SMT structure manipulation. pub fn new(storage_key: B) -> Self @@ -89,9 +84,9 @@ impl MerkleTreeKey { use digest::Digest; let mut hash = sha2::Sha256::new(); hash.update(storage_key.as_ref()); - let hash = hash.finalize().into(); - - Self(hash) + let hash: Bytes32 = hash.finalize().into(); + let truncated = truncate::(&hash); + Self(truncated) } /// Unsafe analog to create a `Self` that doesn't hash the `storage_key` unlike @@ -103,7 +98,7 @@ impl MerkleTreeKey { /// was randomly generated like `ContractId` or `AssetId`. pub unsafe fn convert(storage_key: B) -> Self where - B: Into, + B: Into>, { Self(storage_key.into()) } @@ -111,64 +106,33 @@ impl MerkleTreeKey { #[cfg(any(test, feature = "test-helpers"))] pub fn new_without_hash(storage_key: B) -> Self where - B: Into, + B: Into>, { unsafe { Self::convert(storage_key) } } } -impl Debug for MerkleTreeKey { - fn fmt(&self, f: &mut Formatter<'_>) -> core::fmt::Result { - f.write_str(&format!("MerkleTreeKey({})", hex::encode(self.0))) - } -} - -impl From for Bytes32 { - fn from(value: MerkleTreeKey) -> Self { +impl From> for Bytes { + fn from(value: MerkleTreeKey) -> Self { value.0 } } -impl AsRef<[u8]> for MerkleTreeKey { - fn as_ref(&self) -> &[u8] { - self.0.as_ref() - } -} - -impl AsRef for MerkleTreeKey { - fn as_ref(&self) -> &Bytes32 { - &self.0 - } -} - -impl Deref for MerkleTreeKey { - type Target = Bytes32; - - fn deref(&self) -> &Self::Target { - &self.0 - } -} - -#[cfg(any(test, feature = "test-helpers"))] -impl From for MerkleTreeKey { - fn from(value: Bytes32) -> Self { - Self::new_without_hash(value) - } -} - #[derive(Debug)] -pub struct MerkleTree { - root_node: Node, +pub struct MerkleTree { + root_node: Node, storage: StorageType, phantom_table: PhantomData, } -impl MerkleTree { - pub const fn empty_root() -> &'static Bytes32 { - empty_sum() +impl + MerkleTree +{ + pub fn empty_root() -> &'static Bytes { + zero_sum() } - pub fn root(&self) -> Bytes32 { + pub fn root(&self) -> Bytes { *self.root_node().hash() } @@ -180,19 +144,26 @@ impl MerkleTree { &self.storage } - fn root_node(&self) -> &Node { + // PRIVATE + + fn root_node(&self) -> &Node { &self.root_node } - fn set_root_node(&mut self, node: Node) { - debug_assert!(node.is_leaf() || node.height() == Node::max_height()); + fn set_root_node(&mut self, node: Node) { + debug_assert!(node.is_leaf() || node.height() == Node::::max_height()); self.root_node = node; } } -impl MerkleTree +impl + MerkleTree where - TableType: Mappable, + TableType: Mappable< + Key = Bytes, + Value = Primitive, + OwnedValue = Primitive, + >, StorageType: StorageInspect, { pub fn new(storage: StorageType) -> Self { @@ -205,8 +176,8 @@ where pub fn load( storage: StorageType, - root: &Bytes32, - ) -> Result> { + root: &Bytes, + ) -> Result> { if root == Self::empty_root() { let tree = Self::new(storage); Ok(tree) @@ -226,23 +197,29 @@ where } } + // PRIVATE + fn path_set( &self, - leaf_key: &Bytes32, - ) -> Result<(Vec, Vec), MerkleTreeError> { + leaf_key: Bytes, + ) -> Result< + (Vec>, Vec>), + MerkleTreeError, + > { let root_node = self.root_node().clone(); let root_storage_node = StorageNode::new(&self.storage, root_node); - let (mut path_nodes, mut side_nodes): (Vec, Vec) = root_storage_node - .as_path_iter(leaf_key) - .map(|(path_node, side_node)| { - Ok(( - path_node.map_err(MerkleTreeError::ChildError)?.into_node(), - side_node.map_err(MerkleTreeError::ChildError)?.into_node(), - )) - }) - .collect::, MerkleTreeError>>()? - .into_iter() - .unzip(); + let (mut path_nodes, mut side_nodes): (Vec>, Vec>) = + root_storage_node + .as_path_iter(leaf_key) + .map(|(path_node, side_node)| { + Ok(( + path_node.map_err(MerkleTreeError::ChildError)?.into_node(), + side_node.map_err(MerkleTreeError::ChildError)?.into_node(), + )) + }) + .collect::, MerkleTreeError>>()? + .into_iter() + .unzip(); path_nodes.reverse(); side_nodes.reverse(); side_nodes.pop(); // The last element in the side nodes list is the @@ -252,9 +229,14 @@ where } } -impl MerkleTree +impl + MerkleTree where - TableType: Mappable, + TableType: Mappable< + Key = Bytes, + Value = Primitive, + OwnedValue = Primitive, + >, StorageType: StorageMutate, { /// Build a sparse Merkle tree from a set of key-value pairs. This is @@ -270,18 +252,18 @@ where ) -> Result where I: Iterator, - B: Into, + B: Into>, D: AsRef<[u8]>, { let sorted = set .into_iter() .map(|(k, v)| (k.into(), v)) - .collect::>(); + .collect::, D>>(); let mut branches = sorted .iter() .filter(|(_, value)| !value.as_ref().is_empty()) .map(|(key, data)| Node::create_leaf(key, data)) - .map(Into::::into) + .map(Into::>::into) .collect::>(); for branch in branches.iter() { @@ -301,7 +283,7 @@ where return Ok(tree) } - let mut nodes = Vec::::with_capacity(branches.len()); + let mut nodes = Vec::>::with_capacity(branches.len()); let mut proximities = Vec::::with_capacity(branches.len()); // Building the tree starts by merging all leaf nodes where possible. @@ -326,8 +308,7 @@ where // possible. while let Some(left) = branches.pop() { if let Some(current) = nodes.last() { - #[allow(clippy::cast_possible_truncation)] // Key is 32 bytes - let left_proximity = current.node.common_path_length(&left.node) as u32; + let left_proximity = current.node.common_path_length(&left.node); while { // The current node's proximity to its right neighbor was // stored previously. We now compare the distances between @@ -388,8 +369,7 @@ where let mut node = top.node; let path = top.bits; let height = node.height(); - #[allow(clippy::arithmetic_side_effects)] // height <= max_height - let depth = Node::max_height() - height; + let depth = Node::::max_height() - height; let placeholders = iter::repeat(Node::create_placeholder()).take(depth as usize); for placeholder in placeholders { node = Node::create_node_on_path(&path, &node, &placeholder); @@ -406,9 +386,9 @@ where pub fn update( &mut self, - key: MerkleTreeKey, + key: MerkleTreeKey, data: &[u8], - ) -> Result<(), MerkleTreeError> { + ) -> Result<(), MerkleTreeError> { if data.is_empty() { // If the data is empty, this signifies a delete operation for the // given key. @@ -416,14 +396,15 @@ where return Ok(()) } - let leaf_node = Node::create_leaf(key.as_ref(), data); + let key = key.into(); + let leaf_node = Node::create_leaf(&key, data); self.storage .insert(leaf_node.hash(), &leaf_node.as_ref().into())?; if self.root_node().is_placeholder() { self.set_root_node(leaf_node); } else { - let (path_nodes, side_nodes) = self.path_set(key.as_ref())?; + let (path_nodes, side_nodes) = self.path_set(key)?; self.update_with_path_set( &leaf_node, path_nodes.as_slice(), @@ -436,21 +417,22 @@ where pub fn delete( &mut self, - key: MerkleTreeKey, - ) -> Result<(), MerkleTreeError> { + key: MerkleTreeKey, + ) -> Result<(), MerkleTreeError> { if self.root() == *Self::empty_root() { // The zero root signifies that all leaves are empty, including the // given key. return Ok(()) } - let (path_nodes, side_nodes): (Vec, Vec) = - self.path_set(key.as_ref())?; + let key = key.into(); + let (path_nodes, side_nodes): (Vec>, Vec>) = + self.path_set(key)?; match path_nodes.first() { - Some(node) if *node.leaf_key() == key.as_ref() => { + Some(node) if node.leaf_key() == &key => { self.delete_with_path_set( - key.as_ref(), + &key, path_nodes.as_slice(), side_nodes.as_slice(), )?; @@ -461,11 +443,13 @@ where Ok(()) } + // PRIVATE + fn update_with_path_set( &mut self, - requested_leaf_node: &Node, - path_nodes: &[Node], - side_nodes: &[Node], + requested_leaf_node: &Node, + path_nodes: &[Node], + side_nodes: &[Node], ) -> Result<(), StorageError> { let path = requested_leaf_node.leaf_key(); let actual_leaf_node = &path_nodes[0]; @@ -508,9 +492,8 @@ where // Merge placeholders let ancestor_depth = requested_leaf_node.common_path_length(actual_leaf_node); - #[allow(clippy::cast_possible_truncation)] // Key is 32 bytes - let placeholders_count = - (ancestor_depth as usize).saturating_sub(side_nodes.len()); + let stale_depth = cmp::max(side_nodes.len(), ancestor_depth as usize); + let placeholders_count = stale_depth - side_nodes.len(); let placeholders = iter::repeat(Node::create_placeholder()).take(placeholders_count); for placeholder in placeholders { @@ -541,9 +524,9 @@ where fn delete_with_path_set( &mut self, - requested_leaf_key: &Bytes32, - path_nodes: &[Node], - side_nodes: &[Node], + requested_leaf_key: &Bytes, + path_nodes: &[Node], + side_nodes: &[Node], ) -> Result<(), StorageError> { for node in path_nodes { self.storage.remove(node.hash())?; @@ -606,104 +589,36 @@ where } } -impl MerkleTree -where - TableType: Mappable, - StorageType: StorageInspect, -{ - pub fn generate_proof( - &self, - key: &MerkleTreeKey, - ) -> Result> { - let path = key.as_ref(); - let (path_nodes, side_nodes) = self.path_set(path)?; - // Identify the closest leaf that is included in the tree to the - // requested leaf. The closest leaf, as returned by the path set - // corresponding to the requested leaf, will be the requested leaf - // itself, a different leaf than requested, or a placeholder. - // - // If the closest leaf is the requested leaf, then the requested leaf is - // included in the tree, and we are requesting an inclusion proof. - // Otherwise (i.e, the closest leaf is either another leaf or a - // placeholder), the requested leaf is not in the tree, and we are - // requesting an exclusion proof. - // - let actual_leaf = &path_nodes[0]; - let proof_set = side_nodes - .into_iter() - .map(|side_node| *side_node.hash()) - .collect::>(); - let proof = if !actual_leaf.is_placeholder() && actual_leaf.leaf_key() == path { - // If the requested key is part of the tree, build an inclusion - // proof. - let inclusion_proof = InclusionProof { proof_set }; - Proof::Inclusion(inclusion_proof) - } else { - // If the requested key is not part of the tree, we are verifying - // that the given key is a placeholder, and we must build an - // exclusion proof. When building an exclusion proof, the requested - // leaf is unset and is currently a placeholder. The path to this - // placeholder is designated by the requested leaf's key. - // - // If the closest leaf is a real leaf, and not a placeholder, we can - // build the root upwards using this leaf's key and value. If the - // closest leaf is a placeholder, it has a leaf key and a - // placeholder value (the zero sum). The leaf key of this - // placeholder leaf is unknown (since placeholders do not store - // their leaf key), and by extension, the path from the root to the - // placeholder is also unknown. - // - // However, in both cases, the path defined by the requested - // placeholder is sufficiently close: All branches stemming from the - // point where the paths of the requested placeholder and closest - // leaf diverge are saturated with the closest leaf's hash. In the - // case where the closest leaf is a placeholder, this hash is simply - // the zero sum. The hash of any placeholder under this point of - // divergence equates to this hash. - // - let leaf = if actual_leaf.is_placeholder() { - ExclusionLeaf::Placeholder - } else { - ExclusionLeaf::Leaf(ExclusionLeafData { - leaf_key: *actual_leaf.leaf_key(), - leaf_value: *actual_leaf.leaf_data(), - }) - }; - - let exclusion_proof = ExclusionProof { proof_set, leaf }; - Proof::Exclusion(exclusion_proof) - }; - Ok(proof) - } -} - #[cfg(test)] -#[allow(non_snake_case)] mod test { - use super::Node; + use super::{ + MerkleTree, + MerkleTreeError, + MerkleTreeKey, + Node, + Primitive, + }; use crate::{ common::{ - sum, - Bytes32, + Bytes, StorageMap, }, - sparse::{ - empty_sum, - MerkleTree, - MerkleTreeError, - MerkleTreeKey, - Primitive, + sparse::generic::hash::{ + sum, + zero_sum, }, }; use fuel_storage::Mappable; use hex; - fn random_bytes32(rng: &mut R) -> Bytes32 + fn random_bytes(rng: &mut R) -> Bytes where R: rand::Rng + ?Sized, { - let mut bytes = [0u8; 32]; - rng.fill(bytes.as_mut()); + let mut bytes = [0u8; SZ]; + for byte in bytes.as_mut() { + *byte = rng.gen(); + } bytes } @@ -712,13 +627,13 @@ mod test { impl Mappable for TestTable { type Key = Self::OwnedKey; - type OwnedKey = Bytes32; - type OwnedValue = Primitive; + type OwnedKey = Bytes<32>; + type OwnedValue = Primitive<32>; type Value = Self::OwnedValue; } - fn key>(data: B) -> MerkleTreeKey { - MerkleTreeKey::new(data.as_ref()) + fn key>(data: B) -> MerkleTreeKey { + MerkleTreeKey::new_without_hash(sum(data.as_ref())) } #[test] @@ -1246,10 +1161,10 @@ mod test { #[test] fn test_load_returns_an_empty_tree_for_empty_sum_root() { let mut storage = StorageMap::::new(); - let tree = MerkleTree::load(&mut storage, empty_sum()).unwrap(); + let tree = MerkleTree::load(&mut storage, zero_sum()).unwrap(); let root = tree.root(); - assert_eq!(root, *empty_sum()); + assert_eq!(root, *zero_sum()); } #[test] @@ -1300,8 +1215,8 @@ mod test { let rng = &mut rand::thread_rng(); let gen = || { Some(( - MerkleTreeKey::new_without_hash(random_bytes32(rng)), - random_bytes32(rng), + MerkleTreeKey::new_without_hash(random_bytes::<_, 32>(rng)), + random_bytes::<_, 32>(rng), )) }; let data = std::iter::from_fn(gen).take(1_000).collect::>(); @@ -1330,8 +1245,8 @@ mod test { let rng = &mut rand::thread_rng(); let gen = || { Some(( - MerkleTreeKey::new_without_hash(random_bytes32(rng)), - random_bytes32(rng), + MerkleTreeKey::new_without_hash(random_bytes::<_, 32>(rng)), + random_bytes::<_, 32>(rng), )) }; let data = std::iter::from_fn(gen).take(0).collect::>(); @@ -1360,8 +1275,8 @@ mod test { let rng = &mut rand::thread_rng(); let gen = || { Some(( - MerkleTreeKey::new_without_hash(random_bytes32(rng)), - random_bytes32(rng), + MerkleTreeKey::new_without_hash(random_bytes::<_, 32>(rng)), + random_bytes::<_, 32>(rng), )) }; let data = std::iter::from_fn(gen).take(1).collect::>(); @@ -1394,12 +1309,12 @@ mod test { key(b"\x00\x00\x00\x02"), ]; let data = [ - (keys[0], random_bytes32(rng)), - (keys[1], random_bytes32(rng)), - (keys[2], random_bytes32(rng)), - (keys[0], random_bytes32(rng)), - (keys[1], random_bytes32(rng)), - (keys[2], random_bytes32(rng)), + (keys[0], random_bytes::<_, 32>(rng)), + (keys[1], random_bytes::<_, 32>(rng)), + (keys[2], random_bytes::<_, 32>(rng)), + (keys[0], random_bytes::<_, 32>(rng)), + (keys[1], random_bytes::<_, 32>(rng)), + (keys[2], random_bytes::<_, 32>(rng)), ]; let expected_root = { @@ -1430,9 +1345,9 @@ mod test { key(b"\x00\x00\x00\x02"), ]; let data = [ - (keys[0], random_bytes32(rng).to_vec()), - (keys[1], random_bytes32(rng).to_vec()), - (keys[2], random_bytes32(rng).to_vec()), + (keys[0], random_bytes::<_, 32>(rng).to_vec()), + (keys[1], random_bytes::<_, 32>(rng).to_vec()), + (keys[2], random_bytes::<_, 32>(rng).to_vec()), (keys[0], b"".to_vec()), (keys[1], b"".to_vec()), (keys[2], b"".to_vec()), @@ -1456,217 +1371,4 @@ mod test { assert_eq!(root, expected_root); } - - #[test] - fn merkle_tree__generate_proof__returns_proof_with_proof_set_for_given_key() { - // Given - let mut storage = StorageMap::::new(); - let mut tree = MerkleTree::new(&mut storage); - - // 256: N4 - // / \ - // 255: N3 \ - // / \ \ - // 254: / N2 \ - // / / \ \ - // 253: / N1 \ \ - // / / \ \ \ - // 252: / N0 \ \ \ - // ... / / \ \ \ \ - // 0: L0 L1 L3 P1 L2 P0 - // K0 K1 K3 K2 - - let k0 = [0u8; 32]; - let v0 = sum(b"DATA"); - tree.update(MerkleTreeKey::new_without_hash(k0), &v0) - .expect("Expected successful update"); - - let mut k1 = [0u8; 32]; - k1[0] = 0b01000000; - let v1 = sum(b"DATA"); - tree.update(MerkleTreeKey::new_without_hash(k1), &v1) - .expect("Expected successful update"); - - let mut k2 = [0u8; 32]; - k2[0] = 0b01100000; - let v2 = sum(b"DATA"); - tree.update(MerkleTreeKey::new_without_hash(k2), &v2) - .expect("Expected successful update"); - - let mut k3 = [0u8; 32]; - k3[0] = 0b01001000; - let v3 = sum(b"DATA"); - tree.update(MerkleTreeKey::new_without_hash(k3), &v3) - .expect("Expected successful update"); - - let l0 = Node::create_leaf(&k0, v0); - let l1 = Node::create_leaf(&k1, v1); - let l2 = Node::create_leaf(&k2, v2); - let l3 = Node::create_leaf(&k3, v3); - let n0 = Node::create_node(&l1, &l3, 252); - let n1 = Node::create_node(&n0, &Node::create_placeholder(), 253); - let n2 = Node::create_node(&n1, &l2, 254); - let n3 = Node::create_node(&l0, &n2, 255); - - { - // When - let proof = tree.generate_proof(&k0.into()).expect("Expected proof"); - let expected_proof_set = [*n2.hash(), *Node::create_placeholder().hash()]; - - // Then - assert_eq!(*proof.proof_set(), expected_proof_set); - } - - { - // When - let proof = tree.generate_proof(&k1.into()).expect("Expected proof"); - let expected_proof_set = [ - *l3.hash(), - *Node::create_placeholder().hash(), - *l2.hash(), - *l0.hash(), - *Node::create_placeholder().hash(), - ]; - - // Then - assert_eq!(*proof.proof_set(), expected_proof_set); - } - - { - // When - let proof = tree.generate_proof(&k2.into()).expect("Expected proof"); - let expected_proof_set = - [*n1.hash(), *l0.hash(), *Node::create_placeholder().hash()]; - - // Then - assert_eq!(*proof.proof_set(), expected_proof_set); - } - - { - // When - let proof = tree.generate_proof(&k3.into()).expect("Expected proof"); - let expected_proof_set = [ - *l1.hash(), - *Node::create_placeholder().hash(), - *l2.hash(), - *l0.hash(), - *Node::create_placeholder().hash(), - ]; - - // Then - assert_eq!(*proof.proof_set(), expected_proof_set); - } - - { - // Test that supplying an arbitrary leaf "outside" the range of - // leaves produces a valid proof set - - // When - let key = [255u8; 32]; - let proof = tree.generate_proof(&key.into()).expect("Expected proof"); - let expected_proof_set = [*n3.hash()]; - - // Then - assert_eq!(*proof.proof_set(), expected_proof_set); - } - } - - #[test] - fn merkle_tree__generate_proof__returns_inclusion_proof_for_included_key() { - // Given - let mut storage = StorageMap::::new(); - let mut tree = MerkleTree::new(&mut storage); - - // 256: N4 - // / \ - // 255: N3 \ - // / \ \ - // 254: / N2 \ - // / / \ \ - // 253: / N1 \ \ - // / / \ \ \ - // 252: / N0 \ \ \ - // ... / / \ \ \ \ - // 0: L0 L1 L3 P1 L2 P0 - // K0 K1 K3 K2 - - let k0 = [0u8; 32]; - let v0 = sum(b"DATA"); - tree.update(MerkleTreeKey::new_without_hash(k0), &v0) - .expect("Expected successful update"); - - let mut k1 = [0u8; 32]; - k1[0] = 0b01000000; - let v1 = sum(b"DATA"); - tree.update(MerkleTreeKey::new_without_hash(k1), &v1) - .expect("Expected successful update"); - - let mut k2 = [0u8; 32]; - k2[0] = 0b01100000; - let v2 = sum(b"DATA"); - tree.update(MerkleTreeKey::new_without_hash(k2), &v2) - .expect("Expected successful update"); - - let mut k3 = [0u8; 32]; - k3[0] = 0b01001000; - let v3 = sum(b"DATA"); - tree.update(MerkleTreeKey::new_without_hash(k3), &v3) - .expect("Expected successful update"); - - // When - let proof = tree.generate_proof(&k1.into()).expect("Expected proof"); - - // Then - assert!(proof.is_inclusion()); - } - - #[test] - fn merkle_tree__generate_proof__returns_exclusion_proof_for_excluded_key() { - // Given - let mut storage = StorageMap::::new(); - let mut tree = MerkleTree::new(&mut storage); - - // 256: N4 - // / \ - // 255: N3 \ - // / \ \ - // 254: / N2 \ - // / / \ \ - // 253: / N1 \ \ - // / / \ \ \ - // 252: / N0 \ \ \ - // ... / / \ \ \ \ - // 0: L0 L1 L3 P1 L2 P0 - // K0 K1 K3 K2 - - let k0 = [0u8; 32]; - let v0 = sum(b"DATA"); - tree.update(MerkleTreeKey::new_without_hash(k0), &v0) - .expect("Expected successful update"); - - let mut k1 = [0u8; 32]; - k1[0] = 0b01000000; - let v1 = sum(b"DATA"); - tree.update(MerkleTreeKey::new_without_hash(k1), &v1) - .expect("Expected successful update"); - - let mut k2 = [0u8; 32]; - k2[0] = 0b01100000; - let v2 = sum(b"DATA"); - tree.update(MerkleTreeKey::new_without_hash(k2), &v2) - .expect("Expected successful update"); - - let mut k3 = [0u8; 32]; - k3[0] = 0b01001000; - let v3 = sum(b"DATA"); - tree.update(MerkleTreeKey::new_without_hash(k3), &v3) - .expect("Expected successful update"); - - // When - let key = [255u8; 32]; - let proof = tree.generate_proof(&key.into()).expect("Expected proof"); - - // Then - assert!(proof.is_exclusion()); - } } diff --git a/fuel-merkle/src/sparse/merkle_tree/node.rs b/fuel-merkle/src/sparse/generic/node.rs similarity index 76% rename from fuel-merkle/src/sparse/merkle_tree/node.rs rename to fuel-merkle/src/sparse/generic/node.rs index 71e81eb11b..21e6e0e1a9 100644 --- a/fuel-merkle/src/sparse/merkle_tree/node.rs +++ b/fuel-merkle/src/sparse/generic/node.rs @@ -8,10 +8,10 @@ use crate::{ ParentNode as ParentNodeTrait, }, path::{ + ComparablePath, + Instruction, Path, - Side, }, -<<<<<<<< HEAD:fuel-merkle/src/sparse/generic/node.rs Prefix, }, sparse::generic::{ @@ -20,23 +20,6 @@ use crate::{ zero_sum, }, primitive::Primitive, -======== - sum, - Bytes32, - Prefix, - }, - sparse::{ - hash::{ - calculate_hash, - calculate_leaf_hash, - calculate_node_hash, - }, - primitive::{ - Primitive, - PrimitiveView, - }, - zero_sum, ->>>>>>>> master:fuel-merkle/src/sparse/merkle_tree/node.rs }, storage::{ Mappable, @@ -46,17 +29,14 @@ use crate::{ use crate::common::Bytes; use core::{ + cmp, fmt, marker::PhantomData, }; use digest::Digest; #[derive(Clone, PartialEq, Eq)] -<<<<<<<< HEAD:fuel-merkle/src/sparse/generic/node.rs pub(crate) enum Node { -======== -pub(super) enum Node { ->>>>>>>> master:fuel-merkle/src/sparse/merkle_tree/node.rs Node { hash: Bytes, height: u32, @@ -67,7 +47,6 @@ pub(super) enum Node { Placeholder, } -<<<<<<<< HEAD:fuel-merkle/src/sparse/generic/node.rs impl Node { fn calculate_hash( prefix: &Prefix, @@ -86,11 +65,6 @@ impl Node { pub fn max_height() -> u32 { Node::::key_size_in_bits() -======== -impl Node { - pub fn max_height() -> u32 { - Node::key_size_bits() ->>>>>>>> master:fuel-merkle/src/sparse/merkle_tree/node.rs } pub fn new( @@ -100,7 +74,7 @@ impl Node { bytes_hi: Bytes, ) -> Self { Self::Node { - hash: calculate_hash(&prefix, &bytes_lo, &bytes_hi), + hash: Self::calculate_hash(&prefix, &bytes_lo, &bytes_hi), height, prefix, bytes_lo, @@ -111,7 +85,7 @@ impl Node { pub fn create_leaf>(key: &Bytes, data: D) -> Self { let bytes_hi = sum(data); Self::Node { - hash: calculate_leaf_hash(key, &bytes_hi), + hash: Self::calculate_hash(&Prefix::Leaf, key, &bytes_hi), height: 0u32, prefix: Prefix::Leaf, bytes_lo: *key, @@ -123,7 +97,7 @@ impl Node { let bytes_lo = *left_child.hash(); let bytes_hi = *right_child.hash(); Self::Node { - hash: calculate_node_hash(&bytes_lo, &bytes_hi), + hash: Self::calculate_hash(&Prefix::Node, &bytes_lo, &bytes_hi), height, prefix: Prefix::Node, bytes_lo, @@ -142,36 +116,30 @@ impl Node { // of the leaves or an ancestor multiple generations above the // leaves. // N.B.: A leaf can be a placeholder. -<<<<<<<< HEAD:fuel-merkle/src/sparse/generic/node.rs let parent_depth = path_node.common_path_length(side_node); let parent_height = Node::::max_height() - parent_depth; -======== - #[allow(clippy::cast_possible_truncation)] // Key is 32 bytes - let parent_depth = path_node.common_path_length(side_node) as u32; - #[allow(clippy::arithmetic_side_effects)] // parent_depth <= max_height - let parent_height = Node::max_height() - parent_depth; ->>>>>>>> master:fuel-merkle/src/sparse/merkle_tree/node.rs match path.get_instruction(parent_depth).unwrap() { - Side::Left => Node::create_node(path_node, side_node, parent_height), - Side::Right => Node::create_node(side_node, path_node, parent_height), + Instruction::Left => { + Node::create_node(path_node, side_node, parent_height) + } + Instruction::Right => { + Node::create_node(side_node, path_node, parent_height) + } } } else { // When joining two nodes, or a node and a leaf, the joined node is // the direct parent of the node with the greater height and an // ancestor of the node with the lesser height. // N.B.: A leaf can be a placeholder. -<<<<<<<< HEAD:fuel-merkle/src/sparse/generic/node.rs let parent_height = cmp::max(path_node.height(), side_node.height()) + 1; let parent_depth = Node::::max_height() - parent_height; -======== - #[allow(clippy::arithmetic_side_effects)] // Neither node cannot be root - let parent_height = path_node.height().max(side_node.height()) + 1; - #[allow(clippy::arithmetic_side_effects)] // parent_height <= max_height - let parent_depth = Node::max_height() - parent_height; ->>>>>>>> master:fuel-merkle/src/sparse/merkle_tree/node.rs match path.get_instruction(parent_depth).unwrap() { - Side::Left => Node::create_node(path_node, side_node, parent_height), - Side::Right => Node::create_node(side_node, path_node, parent_height), + Instruction::Left => { + Node::create_node(path_node, side_node, parent_height) + } + Instruction::Right => { + Node::create_node(side_node, path_node, parent_height) + } } } } @@ -180,11 +148,7 @@ impl Node { Self::Placeholder } -<<<<<<<< HEAD:fuel-merkle/src/sparse/generic/node.rs pub fn common_path_length(&self, other: &Node) -> u32 { -======== - pub fn common_path_length(&self, other: &Node) -> u64 { ->>>>>>>> master:fuel-merkle/src/sparse/merkle_tree/node.rs debug_assert!(self.is_leaf()); debug_assert!(other.is_leaf()); @@ -206,7 +170,6 @@ impl Node { } } -<<<<<<<< HEAD:fuel-merkle/src/sparse/generic/node.rs pub fn prefix(&self) -> Prefix { match self { Node::Node { prefix, .. } => *prefix, @@ -228,8 +191,6 @@ impl Node { } } -======== ->>>>>>>> master:fuel-merkle/src/sparse/merkle_tree/node.rs pub fn is_leaf(&self) -> bool { self.prefix() == Prefix::Leaf || self.is_placeholder() } @@ -238,7 +199,6 @@ impl Node { self.prefix() == Prefix::Node } -<<<<<<<< HEAD:fuel-merkle/src/sparse/generic/node.rs pub fn leaf_key(&self) -> &Bytes { assert!(self.is_leaf()); self.bytes_lo() @@ -259,8 +219,6 @@ impl Node { self.bytes_hi() } -======== ->>>>>>>> master:fuel-merkle/src/sparse/merkle_tree/node.rs pub fn is_placeholder(&self) -> bool { &Self::Placeholder == self } @@ -271,91 +229,6 @@ impl Node { Node::Placeholder => zero_sum(), } } - - fn prefix(&self) -> Prefix { - match self { - Node::Node { prefix, .. } => *prefix, - Node::Placeholder => Prefix::Leaf, - } - } - - fn bytes_lo(&self) -> &Bytes32 { - match self { - Node::Node { bytes_lo, .. } => bytes_lo, - Node::Placeholder => zero_sum(), - } - } - - fn bytes_hi(&self) -> &Bytes32 { - match self { - Node::Node { bytes_hi, .. } => bytes_hi, - Node::Placeholder => zero_sum(), - } - } - - /// Get the leaf key of a leaf node. - /// - /// The leaf key is the lower 32 bytes stored in a leaf node. - /// This method expects the node to be a leaf node, and this precondition - /// must be guaranteed at the call site for correctness. This method should - /// only be used within contexts where this precondition can be guaranteed, - /// such as the [MerkleTree](super::MerkleTree). - /// - /// In `debug`, this method will panic if the node is not a leaf node to - /// indicate to the developer that there is a potential problem in the - /// tree's implementation. - pub(super) fn leaf_key(&self) -> &Bytes32 { - debug_assert!(self.is_leaf()); - self.bytes_lo() - } - - /// Get the leaf data of a leaf node. - /// - /// The leaf key is the upper 32 bytes stored in a leaf node. - /// This method expects the node to be a leaf node, and this precondition - /// must be guaranteed at the call site for correctness. This method should - /// only be used within contexts where this precondition can be guaranteed, - /// such as the [MerkleTree](super::MerkleTree). - /// - /// In `debug`, this method will panic if the node is not a leaf node to - /// indicate to the developer that there is a potential problem in the - /// tree's implementation. - pub(super) fn leaf_data(&self) -> &Bytes32 { - debug_assert!(self.is_leaf()); - self.bytes_hi() - } - - /// Get the left child key of an internal node. - /// - /// The left child key is the lower 32 bytes stored in an internal node. - /// This method expects the node to be an internal node, and this - /// precondition must be guaranteed at the call site for correctness. This - /// method should only be used within contexts where this precondition can - /// be guaranteed, such as the [MerkleTree](super::MerkleTree). - /// - /// In `debug`, this method will panic if the node is not an internal node - /// to indicate to the developer that there is a potential problem in the - /// tree's implementation. - pub(super) fn left_child_key(&self) -> &Bytes32 { - debug_assert!(self.is_node()); - self.bytes_lo() - } - - /// Get the right child key of an internal node. - /// - /// The right child key is the upper 32 bytes stored in an internal node. - /// This method expects the node to be an internal node, and this - /// precondition must be guaranteed at the call site for correctness. This - /// method should only be used within contexts where this precondition can - /// be guaranteed, such as the [MerkleTree](super::MerkleTree). - /// - /// In `debug`, this method will panic if the node is not an internal node - /// to indicate to the developer that there is a potential problem in the - /// tree's implementation. - pub(super) fn right_child_key(&self) -> &Bytes32 { - debug_assert!(self.is_node()); - self.bytes_hi() - } } impl AsRef> for Node { @@ -371,11 +244,6 @@ impl NodeTrait for Node { Node::height(self) } - #[allow(clippy::arithmetic_side_effects, clippy::cast_possible_truncation)] // const - fn key_size_bits() -> u32 { - core::mem::size_of::() as u32 * 8 - } - fn leaf_key(&self) -> Self::Key { *Node::leaf_key(self) } @@ -389,35 +257,7 @@ impl NodeTrait for Node { } } -<<<<<<<< HEAD:fuel-merkle/src/sparse/generic/node.rs impl fmt::Debug for Node { -======== -impl From<&Node> for Primitive { - fn from(node: &Node) -> Self { - ( - node.height(), - node.prefix() as u8, - *node.bytes_lo(), - *node.bytes_hi(), - ) - } -} - -impl TryFrom for Node { - type Error = DeserializeError; - - fn try_from(primitive: Primitive) -> Result { - let height = primitive.height(); - let prefix = primitive.prefix()?; - let bytes_lo = *primitive.bytes_lo(); - let bytes_hi = *primitive.bytes_hi(); - let node = Self::new(height, prefix, bytes_lo, bytes_hi); - Ok(node) - } -} - -impl fmt::Debug for Node { ->>>>>>>> master:fuel-merkle/src/sparse/merkle_tree/node.rs fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { if self.is_node() { f.debug_struct("Node (Internal)") @@ -437,11 +277,7 @@ impl fmt::Debug for Node { } } -<<<<<<<< HEAD:fuel-merkle/src/sparse/generic/node.rs pub(crate) struct StorageNode<'storage, const KEY_SIZE: usize, TableType, StorageType> { -======== -pub(super) struct StorageNode<'storage, TableType, StorageType> { ->>>>>>>> master:fuel-merkle/src/sparse/merkle_tree/node.rs storage: &'storage StorageType, node: Node, phantom_table: PhantomData, @@ -492,11 +328,6 @@ impl NodeTrait self.node.height() } - #[allow(clippy::arithmetic_side_effects, clippy::cast_possible_truncation)] // const - fn key_size_bits() -> u32 { - core::mem::size_of::() as u32 * 8 - } - fn leaf_key(&self) -> Self::Key { *self.node.leaf_key() } @@ -602,17 +433,14 @@ where #[cfg(test)] mod test_node { - use super::Node; use crate::{ common::{ error::DeserializeError, - sum, Bytes32, Prefix, PrefixError, }, sparse::{ -<<<<<<<< HEAD:fuel-merkle/src/sparse/generic/node.rs generic::{ hash::{ sum, @@ -620,9 +448,6 @@ mod test_node { }, Node, }, -======== - zero_sum, ->>>>>>>> master:fuel-merkle/src/sparse/merkle_tree/node.rs Primitive, }, }; @@ -787,10 +612,7 @@ mod test_storage_node { StorageNode, StorageNodeError, }; -<<<<<<<< HEAD:fuel-merkle/src/sparse/generic/node.rs -======== ->>>>>>>> master:fuel-merkle/src/sparse/merkle_tree/node.rs use crate::{ common::{ error::DeserializeError, @@ -798,19 +620,14 @@ mod test_storage_node { ChildError, ParentNode, }, - sum, Bytes32, PrefixError, StorageMap, }, -<<<<<<<< HEAD:fuel-merkle/src/sparse/generic/node.rs sparse::{ generic::hash::sum, Primitive, }, -======== - sparse::Primitive, ->>>>>>>> master:fuel-merkle/src/sparse/merkle_tree/node.rs storage::{ Mappable, StorageMutate, diff --git a/fuel-merkle/src/sparse/generic/primitive.rs b/fuel-merkle/src/sparse/generic/primitive.rs index a46a7c20cd..641201d1d9 100644 --- a/fuel-merkle/src/sparse/generic/primitive.rs +++ b/fuel-merkle/src/sparse/generic/primitive.rs @@ -1,7 +1,11 @@ -use crate::common::{ - Bytes32, - Prefix, - PrefixError, +use crate::{ + common::{ + error::DeserializeError, + Bytes, + Prefix, + PrefixError, + }, + sparse::generic::Node, }; /// **Leaf buffer:** @@ -21,16 +25,16 @@ use crate::common::{ /// | `04 - 05` | Prefix (1 byte, `0x01`) | /// | `05 - 37` | Left child key (32 bytes) | /// | `37 - 69` | Right child key (32 bytes) | -pub type Primitive = (u32, u8, Bytes32, Bytes32); +pub type Primitive = (u32, u8, Bytes, Bytes); -pub trait PrimitiveView { +trait PrimitiveView { fn height(&self) -> u32; fn prefix(&self) -> Result; - fn bytes_lo(&self) -> &Bytes32; - fn bytes_hi(&self) -> &Bytes32; + fn bytes_lo(&self) -> &Bytes; + fn bytes_hi(&self) -> &Bytes; } -impl PrimitiveView for Primitive { +impl PrimitiveView for Primitive { fn height(&self) -> u32 { self.0 } @@ -39,11 +43,35 @@ impl PrimitiveView for Primitive { Prefix::try_from(self.1) } - fn bytes_lo(&self) -> &Bytes32 { + fn bytes_lo(&self) -> &Bytes { &self.2 } - fn bytes_hi(&self) -> &Bytes32 { + fn bytes_hi(&self) -> &Bytes { &self.3 } } + +impl From<&Node> for Primitive { + fn from(node: &Node) -> Self { + ( + node.height(), + node.prefix() as u8, + *node.bytes_lo(), + *node.bytes_hi(), + ) + } +} + +impl TryFrom> for Node { + type Error = DeserializeError; + + fn try_from(primitive: Primitive) -> Result { + let height = primitive.height(); + let prefix = primitive.prefix()?; + let bytes_lo = *primitive.bytes_lo(); + let bytes_hi = *primitive.bytes_hi(); + let node = Self::new(height, prefix, bytes_lo, bytes_hi); + Ok(node) + } +} diff --git a/fuel-merkle/src/sparse/hash.rs b/fuel-merkle/src/sparse/hash.rs deleted file mode 100644 index 0d9086c193..0000000000 --- a/fuel-merkle/src/sparse/hash.rs +++ /dev/null @@ -1,28 +0,0 @@ -use crate::common::{ - sum_iter, - Bytes32, - Prefix, -}; - -pub const fn zero_sum() -> &'static Bytes32 { - const ZERO_SUM: Bytes32 = [0; 32]; - - &ZERO_SUM -} - -pub fn calculate_hash( - prefix: &Prefix, - bytes_lo: &Bytes32, - bytes_hi: &Bytes32, -) -> Bytes32 { - let input = [prefix.as_ref(), bytes_lo.as_ref(), bytes_hi.as_ref()]; - sum_iter(input) -} - -pub fn calculate_leaf_hash(leaf_key: &Bytes32, leaf_value: &Bytes32) -> Bytes32 { - calculate_hash(&Prefix::Leaf, leaf_key, leaf_value) -} - -pub fn calculate_node_hash(left_child: &Bytes32, right_child: &Bytes32) -> Bytes32 { - calculate_hash(&Prefix::Node, left_child, right_child) -} diff --git a/fuel-merkle/src/sparse/in_memory.rs b/fuel-merkle/src/sparse/in_memory.rs index ae1c77bec0..d1a8c5cf66 100644 --- a/fuel-merkle/src/sparse/in_memory.rs +++ b/fuel-merkle/src/sparse/in_memory.rs @@ -5,8 +5,7 @@ use crate::{ }, sparse::{ self, - merkle_tree::MerkleTreeKey, - proof::Proof, + MerkleTreeKey, Primitive, }, storage::{ @@ -178,10 +177,6 @@ impl MerkleTree { pub fn root(&self) -> Bytes32 { self.tree.root() } - - pub fn generate_proof(&self, key: &MerkleTreeKey) -> Option { - self.tree.generate_proof(key).ok() - } } impl Default for MerkleTree { @@ -193,7 +188,7 @@ impl Default for MerkleTree { #[cfg(test)] mod test { use super::*; - use crate::common::sum; + use sparse::generic::hash::sum; fn key(data: &[u8]) -> MerkleTreeKey { MerkleTreeKey::new_without_hash(sum(data)) diff --git a/fuel-merkle/src/sparse/merkle_tree/branch.rs b/fuel-merkle/src/sparse/merkle_tree/branch.rs deleted file mode 100644 index 7d597f38f6..0000000000 --- a/fuel-merkle/src/sparse/merkle_tree/branch.rs +++ /dev/null @@ -1,77 +0,0 @@ -use super::Node; -use crate::{ - common::{ - path::Path, - Bytes32, - }, - sparse::Primitive, -}; -use fuel_storage::{ - Mappable, - StorageMutate, -}; - -use core::iter; - -pub(super) struct Branch { - pub bits: Bytes32, - pub node: Node, -} - -impl From for Branch { - fn from(leaf: Node) -> Self { - Self { - bits: *leaf.leaf_key(), - node: leaf, - } - } -} - -pub(super) fn merge_branches( - storage: &mut Storage, - mut left_branch: Branch, - mut right_branch: Branch, -) -> Result -where - Storage: StorageMutate
, - Table: Mappable, -{ - #[allow(clippy::cast_possible_truncation)] // Key is 32 bytes, never truncates - let ancestor_height = if left_branch.node.is_leaf() && right_branch.node.is_leaf() { - let parent_depth = left_branch.node.common_path_length(&right_branch.node) as u32; - #[allow(clippy::arithmetic_side_effects)] // common_path_length <= max_height - let parent_height = Node::max_height() - parent_depth; - parent_height - } else { - let ancestor_depth = - left_branch.bits.common_path_length(&right_branch.bits) as u32; - #[allow(clippy::arithmetic_side_effects)] // common_path_length <= max_height - let ancestor_height = Node::max_height() - ancestor_depth; - - for branch in [&mut right_branch, &mut left_branch] { - if branch.node.is_node() { - let path = branch.bits; - #[allow(clippy::arithmetic_side_effects)] - // branch cannot be at max height - let parent_height = branch.node.height() + 1; - #[allow(clippy::arithmetic_side_effects)] - // common_path_length <= max_height - let stale_depth = ancestor_height - parent_height; - let placeholders = - iter::repeat(Node::create_placeholder()).take(stale_depth as usize); - for placeholder in placeholders { - branch.node = - Node::create_node_on_path(&path, &branch.node, &placeholder); - storage.insert(branch.node.hash(), &branch.node.as_ref().into())?; - } - } - } - ancestor_height - }; - let node = Node::create_node(&left_branch.node, &right_branch.node, ancestor_height); - storage.insert(node.hash(), &node.as_ref().into())?; - Ok(Branch { - bits: left_branch.bits, - node, - }) -} diff --git a/fuel-merkle/src/sparse/proof.rs b/fuel-merkle/src/sparse/proof.rs deleted file mode 100644 index 6d9337c1db..0000000000 --- a/fuel-merkle/src/sparse/proof.rs +++ /dev/null @@ -1,800 +0,0 @@ -use crate::{ - common::{ - path::{ - Path, - Side, - }, - sum, - Bytes32, - ProofSet, - }, - sparse::{ - hash::{ - calculate_leaf_hash, - calculate_node_hash, - }, - zero_sum, - MerkleTreeKey, - }, -}; - -use alloc::vec::Vec; -use core::{ - fmt, - fmt::Debug, -}; - -#[derive(Debug, Clone, Eq, PartialEq)] -pub enum Proof { - Inclusion(InclusionProof), - Exclusion(ExclusionProof), -} - -impl Proof { - pub fn proof_set(&self) -> &ProofSet { - match self { - Proof::Inclusion(proof) => &proof.proof_set, - Proof::Exclusion(proof) => &proof.proof_set, - } - } - - pub fn is_inclusion(&self) -> bool { - match self { - Proof::Inclusion(_) => true, - Proof::Exclusion(_) => false, - } - } - - pub fn is_exclusion(&self) -> bool { - !self.is_inclusion() - } -} - -#[derive(Clone, Eq, PartialEq)] -pub struct InclusionProof { - pub proof_set: ProofSet, -} - -impl InclusionProof { - pub fn verify(&self, root: &Bytes32, key: &MerkleTreeKey, value: &[u8]) -> bool { - let Self { proof_set } = self; - - if proof_set.len() > 256usize { - return false; - } - - let mut current = calculate_leaf_hash(key, &sum(value)); - for (i, side_hash) in proof_set.iter().enumerate() { - #[allow(clippy::arithmetic_side_effects)] // Cannot underflow - let index = - u32::try_from(proof_set.len() - 1 - i).expect("We've checked it above"); - current = match key.get_instruction(index).expect("Infallible") { - Side::Left => calculate_node_hash(¤t, side_hash), - Side::Right => calculate_node_hash(side_hash, ¤t), - }; - } - current == *root - } -} - -impl Debug for InclusionProof { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - let proof_set = self.proof_set.iter().map(hex::encode).collect::>(); - f.debug_struct("InclusionProof") - .field("Proof set", &proof_set) - .finish() - } -} - -#[derive(Debug, Clone, Eq, PartialEq)] -pub enum ExclusionLeaf { - Leaf(ExclusionLeafData), - Placeholder, -} - -#[derive(Clone, Eq, PartialEq)] -pub struct ExclusionLeafData { - /// The leaf key. - pub leaf_key: Bytes32, - /// Hash of the value of the leaf. - pub leaf_value: Bytes32, -} - -impl Debug for ExclusionLeafData { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - f.debug_struct("ExclusionLeafData") - .field("Leaf key", &hex::encode(self.leaf_key)) - .field("Leaf value", &hex::encode(self.leaf_value)) - .finish() - } -} - -impl ExclusionLeaf { - fn hash(&self) -> Bytes32 { - match self { - ExclusionLeaf::Leaf(data) => { - calculate_leaf_hash(&data.leaf_key, &data.leaf_value) - } - ExclusionLeaf::Placeholder => *zero_sum(), - } - } -} - -#[derive(Clone, Eq, PartialEq)] -pub struct ExclusionProof { - pub proof_set: ProofSet, - pub leaf: ExclusionLeaf, -} - -impl ExclusionProof { - pub fn verify(&self, root: &Bytes32, key: &MerkleTreeKey) -> bool { - let Self { proof_set, leaf } = self; - - if let ExclusionLeaf::Leaf(data) = leaf { - if data.leaf_key == key.as_ref() { - return false; - } - } - - if proof_set.len() > 256usize { - return false; - } - - let mut current = leaf.hash(); - for (i, side_hash) in proof_set.iter().enumerate() { - #[allow(clippy::arithmetic_side_effects)] // Cannot underflow - let index = - u32::try_from(proof_set.len() - 1 - i).expect("We've checked it above"); - current = match key.get_instruction(index).expect("Infallible") { - Side::Left => calculate_node_hash(¤t, side_hash), - Side::Right => calculate_node_hash(side_hash, ¤t), - }; - } - current == *root - } -} - -impl Debug for ExclusionProof { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - let proof_set = self.proof_set.iter().map(hex::encode).collect::>(); - f.debug_struct("ExclusionProof") - .field("Proof set", &proof_set) - .field("Leaf", &self.leaf) - .finish() - } -} - -#[cfg(test)] -#[allow(non_snake_case)] -mod test { - use crate::{ - common::{ - Bytes32, - StorageMap, - }, - sparse::{ - proof::Proof, - MerkleTree, - Primitive, - }, - }; - use fuel_storage::Mappable; - - #[derive(Debug)] - struct TestTable; - - impl Mappable for TestTable { - type Key = Self::OwnedKey; - type OwnedKey = Bytes32; - type OwnedValue = Primitive; - type Value = Self::OwnedValue; - } - - #[test] - fn inclusion_proof__verify__returns_true_for_correct_key_and_correct_value() { - let mut storage = StorageMap::::new(); - let mut tree = MerkleTree::new(&mut storage); - - // 256: N4 - // / \ - // 255: N3 \ - // / \ \ - // 254: / N2 \ - // / / \ \ - // 253: / N1 \ \ - // / / \ \ \ - // 252: / N0 \ \ \ - // ... / / \ \ \ \ - // 0: L0 L1 L3 P1 L2 P0 - // K0 K1 K3 K2 - - let k0 = [0u8; 32].into(); - let v0 = b"DATA_0"; - tree.update(k0, v0).expect("Expected successful update"); - - let mut k1 = [0u8; 32]; - k1[0] = 0b01000000; - let k1 = k1.into(); - let v1 = b"DATA_1"; - tree.update(k1, v1).expect("Expected successful update"); - - let mut k2 = [0u8; 32]; - k2[0] = 0b01100000; - let k2 = k2.into(); - let v2 = b"DATA_2"; - tree.update(k2, v2).expect("Expected successful update"); - - let mut k3 = [0u8; 32]; - k3[0] = 0b01001000; - let k3 = k3.into(); - let v3 = b"DATA_3"; - tree.update(k3, v3).expect("Expected successful update"); - - let root = tree.root(); - - { - // Given - let proof = tree.generate_proof(&k0).unwrap(); - - // When - let inclusion = match proof { - Proof::Inclusion(proof) => proof.verify(&root, &k0, b"DATA_0"), - Proof::Exclusion(_) => panic!("Expected InclusionProof"), - }; - - // Then - assert!(inclusion); - } - - { - // Given - let proof = tree.generate_proof(&k1).unwrap(); - - // When - let inclusion = match proof { - Proof::Inclusion(proof) => proof.verify(&root, &k1, b"DATA_1"), - Proof::Exclusion(_) => panic!("Expected InclusionProof"), - }; - - // Then - assert!(inclusion); - } - - { - // Given - let proof = tree.generate_proof(&k2).unwrap(); - - // When - let inclusion = match proof { - Proof::Inclusion(proof) => proof.verify(&root, &k2, b"DATA_2"), - Proof::Exclusion(_) => panic!("Expected InclusionProof"), - }; - - // Then - assert!(inclusion); - } - - { - // Given - let proof = tree.generate_proof(&k3).unwrap(); - - // When - let inclusion = match proof { - Proof::Inclusion(proof) => proof.verify(&root, &k3, b"DATA_3"), - Proof::Exclusion(_) => panic!("Expected InclusionProof"), - }; - - // Then - assert!(inclusion); - } - } - - #[test] - fn inclusion_proof__verify__returns_false_for_correct_key_and_incorrect_value() { - let mut storage = StorageMap::::new(); - let mut tree = MerkleTree::new(&mut storage); - - // 256: N4 - // / \ - // 255: N3 \ - // / \ \ - // 254: / N2 \ - // / / \ \ - // 253: / N1 \ \ - // / / \ \ \ - // 252: / N0 \ \ \ - // ... / / \ \ \ \ - // 0: L0 L1 L3 P1 L2 P0 - // K0 K1 K3 K2 - - let k0 = [0u8; 32].into(); - let v0 = b"DATA_0"; - tree.update(k0, v0).expect("Expected successful update"); - - let mut k1 = [0u8; 32]; - k1[0] = 0b01000000; - let k1 = k1.into(); - let v1 = b"DATA_1"; - tree.update(k1, v1).expect("Expected successful update"); - - let mut k2 = [0u8; 32]; - k2[0] = 0b01100000; - let k2 = k2.into(); - let v2 = b"DATA_2"; - tree.update(k2, v2).expect("Expected successful update"); - - let mut k3 = [0u8; 32]; - k3[0] = 0b01001000; - let k3 = k3.into(); - let v3 = b"DATA_3"; - tree.update(k3, v3).expect("Expected successful update"); - - let root = tree.root(); - - { - // Given - let proof = tree.generate_proof(&k0).unwrap(); - - // When - let inclusion = match proof { - Proof::Inclusion(proof) => proof.verify(&root, &k0, b"DATA_100"), - Proof::Exclusion(_) => panic!("Expected InclusionProof"), - }; - - // Then - assert!(!inclusion); - } - - { - // Given - let proof = tree.generate_proof(&k1).unwrap(); - - // When - let inclusion = match proof { - Proof::Inclusion(proof) => proof.verify(&root, &k1, b"DATA_100"), - Proof::Exclusion(_) => panic!("Expected InclusionProof"), - }; - - // Then - assert!(!inclusion); - } - - { - // Given - let proof = tree.generate_proof(&k2).unwrap(); - - // When - let inclusion = match proof { - Proof::Inclusion(proof) => proof.verify(&root, &k2, b"DATA_100"), - Proof::Exclusion(_) => panic!("Expected InclusionProof"), - }; - - // Then - assert!(!inclusion); - } - - { - // Given - let proof = tree.generate_proof(&k3).unwrap(); - - // When - let inclusion = match proof { - Proof::Inclusion(proof) => proof.verify(&root, &k3, b"DATA_100"), - Proof::Exclusion(_) => panic!("Expected InclusionProof"), - }; - // Then - assert!(!inclusion); - } - } - - #[test] - fn inclusion_proof__verify__returns_false_for_incorrect_key() { - let mut storage = StorageMap::::new(); - let mut tree = MerkleTree::new(&mut storage); - - // 256: N4 - // / \ - // 255: N3 \ - // / \ \ - // 254: / N2 \ - // / / \ \ - // 253: / N1 \ \ - // / / \ \ \ - // 252: / N0 \ \ \ - // ... / / \ \ \ \ - // 0: L0 L1 L3 P1 L2 P0 - // K0 K1 K3 K2 - - let k0 = [0u8; 32].into(); - let v0 = b"DATA_0"; - tree.update(k0, v0).expect("Expected successful update"); - - let mut k1 = [0u8; 32]; - k1[0] = 0b01000000; - let k1 = k1.into(); - let v1 = b"DATA_1"; - tree.update(k1, v1).expect("Expected successful update"); - - let mut k2 = [0u8; 32]; - k2[0] = 0b01100000; - let k2 = k2.into(); - let v2 = b"DATA_2"; - tree.update(k2, v2).expect("Expected successful update"); - - let mut k3 = [0u8; 32]; - k3[0] = 0b01001000; - let k3 = k3.into(); - let v3 = b"DATA_3"; - tree.update(k3, v3).expect("Expected successful update"); - - let root = tree.root(); - - // Given - let proof = tree.generate_proof(&k3).unwrap(); - - // When - let key = [1u8; 32].into(); - let inclusion = match proof { - Proof::Inclusion(proof) => proof.verify(&root, &key, b"DATA_3"), - Proof::Exclusion(_) => panic!("Expected InclusionProof"), - }; - - // Then - assert!(!inclusion); - } - - #[test] - fn exclusion_proof__verify__returns_true_for_correct_key() { - let mut storage = StorageMap::::new(); - let mut tree = MerkleTree::new(&mut storage); - - // 256: N4 - // / \ - // 255: N3 \ - // / \ \ - // 254: / N2 \ - // / / \ \ - // 253: / N1 \ \ - // / / \ \ \ - // 252: / N0 \ \ \ - // ... / / \ \ \ \ - // 0: L0 L1 L3 P1 L2 P0 - // K0 K1 K3 K2 - - let k0 = [0u8; 32]; - let v0 = b"DATA_0"; - tree.update(k0.into(), v0) - .expect("Expected successful update"); - - let mut k1 = [0u8; 32]; - k1[0] = 0b01000000; - let v1 = b"DATA_1"; - tree.update(k1.into(), v1) - .expect("Expected successful update"); - - let mut k2 = [0u8; 32]; - k2[0] = 0b01100000; - let v2 = b"DATA_2"; - tree.update(k2.into(), v2) - .expect("Expected successful update"); - - let mut k3 = [0u8; 32]; - k3[0] = 0b01001000; - let v3 = b"DATA_3"; - tree.update(k3.into(), v3) - .expect("Expected successful update"); - - let root = tree.root(); - - // Given - let key = [0xffu8; 32].into(); - let proof = tree.generate_proof(&key).unwrap(); - - // When - let exclusion = match proof { - Proof::Inclusion(_) => panic!("Expected ExclusionProof"), - Proof::Exclusion(proof) => proof.verify(&root, &key), - }; - - // Then - assert!(exclusion); - } - - #[test] - fn exclusion_proof__verify__returns_false_for_incorrect_key() { - let mut storage = StorageMap::::new(); - let mut tree = MerkleTree::new(&mut storage); - - // 256: N4 - // / \ - // 255: N3 \ - // / \ \ - // 254: / N2 \ - // / / \ \ - // 253: / N1 \ \ - // / / \ \ \ - // 252: / N0 \ \ \ - // ... / / \ \ \ \ - // 0: L0 L1 L3 P1 L2 P0 - // K0 K1 K3 K2 - - let k0 = [0u8; 32].into(); - let v0 = b"DATA_0"; - tree.update(k0, v0).expect("Expected successful update"); - - let mut k1 = [0u8; 32]; - k1[0] = 0b01000000; - let k1 = k1.into(); - let v1 = b"DATA_1"; - tree.update(k1, v1).expect("Expected successful update"); - - let mut k2 = [0u8; 32]; - k2[0] = 0b01100000; - let k2 = k2.into(); - let v2 = b"DATA_2"; - tree.update(k2, v2).expect("Expected successful update"); - - let mut k3 = [0u8; 32]; - k3[0] = 0b01001000; - let k3 = k3.into(); - let v3 = b"DATA_3"; - tree.update(k3, v3).expect("Expected successful update"); - - let root = tree.root(); - - // Given - let key = [0xffu8; 32].into(); - let proof = tree.generate_proof(&key).unwrap(); - - // When - let exclusion = match proof { - Proof::Inclusion(_) => panic!("Expected ExclusionProof"), - Proof::Exclusion(proof) => proof.verify(&root, &k1), - }; - - // Then - assert!(!exclusion); - } - - #[test] - fn exclusion_proof__verify__returns_true_for_placeholder() { - let mut storage = StorageMap::::new(); - let mut tree = MerkleTree::new(&mut storage); - - // 256: N4 - // / \ - // 255: N3 \ - // / \ \ - // 254: / N2 \ - // / / \ \ - // 253: / N1 \ \ - // / / \ \ \ - // 252: / N0 \ \ \ - // ... / / \ \ \ \ - // 0: P1 L0 L2 P0 L1 P2 - // K0 K2 K1 - - let mut k0 = [0u8; 32]; - k0[0] = 0b01000000; - let k0 = k0.into(); - let v0 = b"DATA_0"; - tree.update(k0, v0).expect("Expected successful update"); - - let mut k1 = [0u8; 32]; - k1[0] = 0b01100000; - let k1 = k1.into(); - let v1 = b"DATA_1"; - tree.update(k1, v1).expect("Expected successful update"); - - let mut k2 = [0u8; 32]; - k2[0] = 0b01001000; - let k2 = k2.into(); - let v2 = b"DATA_2"; - tree.update(k2, v2).expect("Expected successful update"); - - let root = tree.root(); - - // Given - let key = [0b00000000; 32].into(); - let proof = tree.generate_proof(&key).unwrap(); - - // When - let exclusion = match proof { - Proof::Inclusion(_) => panic!("Expected ExclusionProof"), - Proof::Exclusion(proof) => proof.verify(&root, &key), - }; - - // Then - assert!(exclusion); - } -} - -#[cfg(test)] -#[allow(non_snake_case)] -mod test_random { - use crate::{ - common::{ - Bytes32, - StorageMap, - }, - sparse::{ - proof::Proof, - MerkleTree, - MerkleTreeKey, - Primitive, - }, - }; - use fuel_storage::Mappable; - - use rand::{ - prelude::StdRng, - SeedableRng, - }; - - #[derive(Debug)] - struct TestTable; - - impl Mappable for TestTable { - type Key = Self::OwnedKey; - type OwnedKey = Bytes32; - type OwnedValue = Primitive; - type Value = Self::OwnedValue; - } - - fn random_bytes32(rng: &mut R) -> Bytes32 - where - R: rand::Rng + ?Sized, - { - let mut bytes = [0u8; 32]; - rng.fill(bytes.as_mut()); - bytes - } - - #[test] - fn inclusion_proof__verify__returns_true_for_correct_key_and_correct_value() { - let mut rng = StdRng::seed_from_u64(0xDEADBEEF); - let mut storage = StorageMap::::new(); - let mut tree = MerkleTree::new(&mut storage); - - let key = random_bytes32(&mut rng).into(); - let value = random_bytes32(&mut rng); - tree.update(key, &value).unwrap(); - - for _ in 0..1_000 { - let key = random_bytes32(&mut rng).into(); - let value = random_bytes32(&mut rng); - tree.update(key, &value).unwrap(); - } - - let root = tree.root(); - - // Given - let proof = tree.generate_proof(&key).unwrap(); - - // When - let inclusion = match proof { - Proof::Inclusion(proof) => proof.verify(&root, &key, &value), - Proof::Exclusion(_) => panic!("Expected InclusionProof"), - }; - - // Then - assert!(inclusion); - } - - #[test] - fn inclusion_proof__verify__returns_false_for_correct_key_and_incorrect_value() { - let mut rng = StdRng::seed_from_u64(0xDEADBEEF); - let mut storage = StorageMap::::new(); - let mut tree = MerkleTree::new(&mut storage); - - let key = random_bytes32(&mut rng).into(); - let value = random_bytes32(&mut rng); - tree.update(key, &value).unwrap(); - - for _ in 0..1_000 { - let key = random_bytes32(&mut rng).into(); - let value = random_bytes32(&mut rng); - tree.update(key, &value).unwrap(); - } - - let root = tree.root(); - - // Given - let proof = tree.generate_proof(&key).unwrap(); - - // When - let inclusion = match proof { - Proof::Inclusion(proof) => proof.verify(&root, &key, b"DATA"), - Proof::Exclusion(_) => panic!("Expected InclusionProof"), - }; - - // Then - assert!(!inclusion); - } - - #[test] - fn inclusion_proof__verify__returns_false_for_incorrect_key() { - let mut rng = StdRng::seed_from_u64(0xDEADBEEF); - let mut storage = StorageMap::::new(); - let mut tree = MerkleTree::new(&mut storage); - - let key_1 = random_bytes32(&mut rng).into(); - let value_1 = random_bytes32(&mut rng); - tree.update(key_1, &value_1).unwrap(); - - let key_2 = random_bytes32(&mut rng).into(); - let value_2 = random_bytes32(&mut rng); - tree.update(key_2, &value_2).unwrap(); - - for _ in 0..1_000 { - let key = random_bytes32(&mut rng).into(); - let value = random_bytes32(&mut rng); - tree.update(key, &value).unwrap(); - } - - let root = tree.root(); - - // Given - // - Generate a proof with key_1 - let proof = tree.generate_proof(&key_1).unwrap(); - - // When - // - Attempt to verify the proof with key_2 - let inclusion = match proof { - Proof::Inclusion(proof) => proof.verify(&root, &key_2, &value_2), - Proof::Exclusion(_) => panic!("Expected InclusionProof"), - }; - - // Then - assert!(!inclusion); - } - - #[test] - fn exclusion_proof__verify__returns_true_for_correct_key() { - let mut rng = StdRng::seed_from_u64(0xDEADBEEF); - let mut storage = StorageMap::::new(); - let mut tree = MerkleTree::new(&mut storage); - - for _ in 0..1_000 { - let key = random_bytes32(&mut rng); - let value = random_bytes32(&mut rng); - tree.update(key.into(), &value).unwrap(); - } - - let root = tree.root(); - - // Given - let key: MerkleTreeKey = random_bytes32(&mut rng).into(); - let proof = tree.generate_proof(&key).unwrap(); - - // When - let exclusion = match proof { - Proof::Inclusion(_) => panic!("Expected ExclusionProof"), - Proof::Exclusion(proof) => proof.verify(&root, &key), - }; - - // Then - assert!(exclusion); - } - - #[test] - fn exclusion_proof__verify__returns_true_for_any_key_in_empty_tree() { - let mut rng = StdRng::seed_from_u64(0xDEADBEEF); - let mut storage = StorageMap::::new(); - let tree = MerkleTree::new(&mut storage); - let root = tree.root(); - - // Given - let key = random_bytes32(&mut rng).into(); - let proof = tree.generate_proof(&key).unwrap(); - - // When - let exclusion = match proof { - Proof::Inclusion(_) => panic!("Expected ExclusionProof"), - Proof::Exclusion(proof) => proof.verify(&root, &key), - }; - - // Then - assert!(exclusion); - } -} diff --git a/fuel-merkle/src/sum.rs b/fuel-merkle/src/sum.rs new file mode 100644 index 0000000000..090fc638fe --- /dev/null +++ b/fuel-merkle/src/sum.rs @@ -0,0 +1,14 @@ +mod hash; +mod merkle_tree; +mod node; + +pub(crate) use hash::{ + empty_sum, + leaf_sum, + node_sum, +}; +pub use merkle_tree::{ + MerkleTree, + MerkleTreeError, +}; +pub(crate) use node::Node; diff --git a/fuel-merkle/src/sum/hash.rs b/fuel-merkle/src/sum/hash.rs new file mode 100644 index 0000000000..5e4a31e8fe --- /dev/null +++ b/fuel-merkle/src/sum/hash.rs @@ -0,0 +1,36 @@ +use crate::common::{ + self, + Bytes32, + Prefix, +}; + +use digest::Digest; +use sha2::Sha256; + +// Merkle Tree hash of an empty list +// MTH({}) = Hash() +pub const fn empty_sum() -> &'static Bytes32 { + common::empty_sum_sha256() +} + +// Merkle tree hash of an n-element list D[n] +// MTH(D[n]) = Hash(0x01 || LHS fee || MTH(D[0:k]) || RHS fee || MTH(D[k:n]) +pub fn node_sum(lhs_fee: u64, lhs_data: &[u8], rhs_fee: u64, rhs_data: &[u8]) -> Bytes32 { + let mut hash = Sha256::new(); + hash.update(Prefix::Node); + hash.update(lhs_fee.to_be_bytes()); + hash.update(lhs_data); + hash.update(rhs_fee.to_be_bytes()); + hash.update(rhs_data); + hash.finalize().into() +} + +// Merkle tree hash of a list with one entry +// MTH({d(0)}) = Hash(0x00 || fee || d(0)) +pub fn leaf_sum(fee: u64, data: &[u8]) -> Bytes32 { + let mut hash = Sha256::new(); + hash.update(Prefix::Leaf); + hash.update(fee.to_be_bytes()); + hash.update(data); + hash.finalize().into() +} diff --git a/fuel-merkle/src/sum/merkle_tree.rs b/fuel-merkle/src/sum/merkle_tree.rs new file mode 100644 index 0000000000..38d1f62257 --- /dev/null +++ b/fuel-merkle/src/sum/merkle_tree.rs @@ -0,0 +1,317 @@ +use crate::{ + common::{ + Bytes32, + Subtree, + }, + sum::{ + empty_sum, + Node, + }, +}; + +use fuel_storage::{ + Mappable, + StorageMutate, +}; + +use core::marker::PhantomData; + +#[derive(Debug, Clone, derive_more::Display)] +pub enum MerkleTreeError { + #[display(fmt = "proof index {_0} is not valid")] + InvalidProofIndex(u64), +} + +/// The Binary Merkle Sum Tree is an extension to the existing Binary +/// [`MerkleTree`](crate::binary::MerkleTree). A node (leaf or internal node) in the tree +/// is defined as having: +/// - a fee (u64, 8 bytes) +/// - a digest (array of bytes) +/// +/// Therefore, a node's data is now a data pair formed by `(fee, digest)`. The data pair +/// of a node with two or more leaves is defined as: +/// +/// (left.fee + right.fee, hash(0x01 ++ left.fee ++ left.digest ++ right.fee ++ +/// right.digest)) +/// +/// This is in contrast to the Binary Merkle Tree node, where a node has only a digest. +/// +/// See the [specification](https://github.com/FuelLabs/fuel-specs/blob/master/src/protocol/cryptographic-primitives.md#merkle-trees) +/// for more details. +/// +/// **Details** +/// +/// When joining subtrees `a` and `b`, the joined subtree is now defined as: +/// +/// fee: a.fee + b.fee +/// data: node_sum(a.fee, a.data, b.fee, b.data) +/// +/// where `node_sum` is defined as the hash function described in the data pair +/// description above. +pub struct MerkleTree { + storage: StorageType, + head: Option>, + phantom_table: PhantomData, +} + +impl MerkleTree { + pub const fn empty_root() -> (u64, Bytes32) { + (0, *empty_sum()) + } +} + +impl MerkleTree +where + TableType: Mappable, + StorageType: StorageMutate, +{ + pub fn new(storage: StorageType) -> Self { + Self { + storage, + head: None, + phantom_table: Default::default(), + } + } + + pub fn root(&mut self) -> Result<(u64, Bytes32), StorageError> { + let root_node = self.root_node()?; + let root_pair = match root_node { + None => Self::empty_root(), + Some(ref node) => (node.fee(), *node.hash()), + }; + + Ok(root_pair) + } + + pub fn push(&mut self, fee: u64, data: &[u8]) -> Result<(), StorageError> { + let node = Node::create_leaf(fee, data); + self.storage.insert(node.hash(), &node)?; + + let next = self.head.take(); + let head = Subtree::::new(node, next); + self.head = Some(head); + self.join_all_subtrees()?; + + Ok(()) + } + + // PRIVATE + // + + fn root_node(&mut self) -> Result, StorageError> { + let root_node = match self.head { + None => None, + Some(ref initial) => { + let mut current = initial.clone(); + while current.next().is_some() { + let mut head = current; + let mut head_next = head.take_next().unwrap(); + current = self.join_subtrees(&mut head_next, &mut head)? + } + Some(current.node().clone()) + } + }; + + Ok(root_node) + } + + fn join_all_subtrees(&mut self) -> Result<(), StorageError> { + loop { + let current = self.head.as_ref().unwrap(); + if !(current.next().is_some() + && current.node().height() == current.next_node().unwrap().height()) + { + break + } + + // Merge the two front nodes of the list into a single node + let joined_node = { + let mut head = self.head.take().unwrap(); + let mut head_next = head.take_next().unwrap(); + self.join_subtrees(&mut head_next, &mut head)? + }; + self.head = Some(joined_node); + } + + Ok(()) + } + + fn join_subtrees( + &mut self, + lhs: &mut Subtree, + rhs: &mut Subtree, + ) -> Result, StorageError> { + let height = lhs.node().height() + 1; + let joined_node = Node::create_node( + height, + lhs.node().fee(), + lhs.node().hash(), + rhs.node().fee(), + rhs.node().hash(), + ); + self.storage.insert(joined_node.hash(), &joined_node)?; + + let joined_head = Subtree::new(joined_node, lhs.take_next()); + + Ok(joined_head) + } +} + +#[cfg(test)] +mod test { + use crate::{ + common::{ + Bytes32, + StorageMap, + }, + sum::{ + leaf_sum, + node_sum, + MerkleTree, + Node, + }, + }; + use fuel_merkle_test_helpers::TEST_DATA; + use fuel_storage::Mappable; + + pub struct TestTable; + + impl Mappable for TestTable { + type Key = Self::OwnedKey; + type OwnedKey = Bytes32; + type OwnedValue = Node; + type Value = Self::OwnedValue; + } + + const FEE: u64 = 100; + + #[test] + fn root_returns_the_hash_of_the_empty_string_when_no_leaves_are_pushed() { + let mut storage_map = StorageMap::::new(); + let mut tree = MerkleTree::new(&mut storage_map); + + let root = tree.root().unwrap(); + assert_eq!(root, MerkleTree::<(), ()>::empty_root()); + } + + #[test] + fn root_returns_the_hash_of_the_leaf_when_one_leaf_is_pushed() { + let mut storage_map = StorageMap::::new(); + let mut tree = MerkleTree::new(&mut storage_map); + + let data = &TEST_DATA[0]; + let _ = tree.push(FEE, data); + let root = tree.root().unwrap(); + + let expected = (FEE, leaf_sum(FEE, data)); + assert_eq!(root, expected); + } + + #[test] + fn root_returns_the_hash_of_the_head_when_4_leaves_are_pushed() { + let mut storage_map = StorageMap::::new(); + let mut tree = MerkleTree::new(&mut storage_map); + + let data = &TEST_DATA[0..4]; // 4 leaves + for datum in data.iter() { + let _ = tree.push(FEE, datum); + } + let root = tree.root().unwrap(); + + // N2 + // / \ + // / \ + // N0 N1 + // / \ / \ + // L0 L1 L2 L3 + + let leaf_0 = leaf_sum(FEE, data[0]); + let leaf_1 = leaf_sum(FEE, data[1]); + let leaf_2 = leaf_sum(FEE, data[2]); + let leaf_3 = leaf_sum(FEE, data[3]); + + let node_0 = node_sum(FEE * 1, &leaf_0, FEE * 1, &leaf_1); + let node_1 = node_sum(FEE * 1, &leaf_2, FEE * 1, &leaf_3); + let node_2 = node_sum(FEE * 2, &node_0, FEE * 2, &node_1); + + let expected = (FEE * 4, node_2); + assert_eq!(root, expected); + } + + #[test] + fn root_returns_the_hash_of_the_head_when_5_leaves_are_pushed() { + let mut storage_map = StorageMap::::new(); + let mut tree = MerkleTree::new(&mut storage_map); + + let data = &TEST_DATA[0..5]; // 5 leaves + for datum in data.iter() { + let _ = tree.push(FEE, datum); + } + let root = tree.root().unwrap(); + + // N3 + // / \ + // N2 \ + // / \ \ + // / \ \ + // N0 N1 \ + // / \ / \ \ + // L0 L1 L2 L3 L4 + + let leaf_0 = leaf_sum(FEE, data[0]); + let leaf_1 = leaf_sum(FEE, data[1]); + let leaf_2 = leaf_sum(FEE, data[2]); + let leaf_3 = leaf_sum(FEE, data[3]); + let leaf_4 = leaf_sum(FEE, data[4]); + + let node_0 = node_sum(FEE * 1, &leaf_0, FEE * 1, &leaf_1); + let node_1 = node_sum(FEE * 1, &leaf_2, FEE * 1, &leaf_3); + let node_2 = node_sum(FEE * 2, &node_0, FEE * 2, &node_1); + let node_3 = node_sum(FEE * 4, &node_2, FEE * 1, &leaf_4); + + let expected = (FEE * 5, node_3); + assert_eq!(root, expected); + } + + #[test] + fn root_returns_the_hash_of_the_head_when_7_leaves_are_pushed() { + let mut storage_map = StorageMap::::new(); + let mut tree = MerkleTree::new(&mut storage_map); + + let data = &TEST_DATA[0..7]; // 7 leaves + for datum in data.iter() { + let _ = tree.push(FEE, datum); + } + let root = tree.root().unwrap(); + + // N5 + // / \ + // / \ + // / \ + // / \ + // N3 N4 + // / \ /\ + // / \ / \ + // N0 N1 N2 \ + // / \ / \ / \ \ + // L0 L1 L2 L3 L4 L5 L6 + + let leaf_0 = leaf_sum(FEE, data[0]); + let leaf_1 = leaf_sum(FEE, data[1]); + let leaf_2 = leaf_sum(FEE, data[2]); + let leaf_3 = leaf_sum(FEE, data[3]); + let leaf_4 = leaf_sum(FEE, data[4]); + let leaf_5 = leaf_sum(FEE, data[5]); + let leaf_6 = leaf_sum(FEE, data[6]); + + let node_0 = node_sum(FEE * 1, &leaf_0, FEE * 1, &leaf_1); + let node_1 = node_sum(FEE * 1, &leaf_2, FEE * 1, &leaf_3); + let node_2 = node_sum(FEE * 1, &leaf_4, FEE * 1, &leaf_5); + let node_3 = node_sum(FEE * 2, &node_0, FEE * 2, &node_1); + let node_4 = node_sum(FEE * 2, &node_2, FEE * 1, &leaf_6); + let node_5 = node_sum(FEE * 4, &node_3, FEE * 3, &node_4); + + let expected = (FEE * 7, node_5); + assert_eq!(root, expected); + } +} diff --git a/fuel-merkle/src/sum/node.rs b/fuel-merkle/src/sum/node.rs new file mode 100644 index 0000000000..1059ec1f4e --- /dev/null +++ b/fuel-merkle/src/sum/node.rs @@ -0,0 +1,98 @@ +use crate::{ + common::Bytes32, + sum::{ + leaf_sum, + node_sum, + }, +}; +use core::fmt; + +#[derive(Clone)] +pub struct Node { + height: u32, + hash: Bytes32, + fee: u64, + left_child_key: Option, + right_child_key: Option, +} + +impl Node { + pub fn create_leaf(fee: u64, data: &[u8]) -> Self { + Self { + height: 0, + hash: leaf_sum(fee, data), + fee, + left_child_key: None, + right_child_key: None, + } + } + + pub fn create_node( + height: u32, + lhs_fee: u64, + lhs_key: &Bytes32, + rhs_fee: u64, + rhs_key: &Bytes32, + ) -> Self { + Self { + height, + hash: node_sum(lhs_fee, lhs_key, rhs_fee, rhs_key), + fee: lhs_fee + rhs_fee, + left_child_key: Some(*lhs_key), + right_child_key: Some(*rhs_key), + } + } + + pub fn height(&self) -> u32 { + self.height + } + + pub fn hash(&self) -> &Bytes32 { + &self.hash + } + + pub fn fee(&self) -> u64 { + self.fee + } + + pub fn left_child_key(&self) -> Option { + self.left_child_key + } + + pub fn right_child_key(&self) -> Option { + self.right_child_key + } + + pub fn is_leaf(&self) -> bool { + self.height == 0 + } + + pub fn is_node(&self) -> bool { + !self.is_leaf() + } +} + +impl fmt::Debug for Node { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + if self.is_node() { + f.debug_struct("Node (Internal)") + .field("Hash", &hex::encode(self.hash())) + .field("Fee", &self.fee) + .field( + "Left child key", + &hex::encode(self.left_child_key().unwrap()), + ) + .field( + "Right child key", + &hex::encode(self.right_child_key().unwrap()), + ) + .finish() + } else { + f.debug_struct("Node (Leaf)") + .field("Hash", &hex::encode(self.hash())) + .field("Fee", &self.fee) + .field("Key", &hex::encode(self.hash())) + .finish() + } + } +} diff --git a/fuel-merkle/src/tests/binary_verify.rs b/fuel-merkle/src/tests/binary_verify.rs deleted file mode 100644 index bb1cdde6d3..0000000000 --- a/fuel-merkle/src/tests/binary_verify.rs +++ /dev/null @@ -1,199 +0,0 @@ -#![allow(non_snake_case, clippy::arithmetic_side_effects)] - -use core::fmt::{ - Debug, - Formatter, -}; - -use proptest::{ - arbitrary::any, - collection::vec, - prop_assert, - prop_compose, - proptest, - strategy::Strategy, -}; - -use crate::{ - binary::{ - verify, - MerkleTree, - Primitive, - }, - common::{ - Bytes32, - ProofSet, - StorageMap, - }, -}; -use fuel_storage::Mappable; - -#[derive(Debug)] -struct TestTable; - -impl Mappable for TestTable { - type Key = Self::OwnedKey; - type OwnedKey = u64; - type OwnedValue = Primitive; - type Value = Self::OwnedValue; -} - -#[derive(Copy, Clone, Eq, PartialEq, proptest_derive::Arbitrary)] -struct Value(Bytes32); - -impl Debug for Value { - fn fmt(&self, f: &mut Formatter<'_>) -> core::fmt::Result { - f.write_str(&format!("Value({})", hex::encode(self.0))) - } -} - -impl AsRef<[u8]> for Value { - fn as_ref(&self) -> &[u8] { - &self.0 - } -} - -impl From for Bytes32 { - fn from(value: Value) -> Self { - value.0 - } -} - -fn _values(n: usize) -> impl Strategy> { - vec(any::(), n) -} - -prop_compose! { - fn values(min: usize, max: usize)(n in min..max)(v in _values(n)) -> Vec { - v.into_iter().collect::>() - } -} - -prop_compose! { - fn random_tree(min: usize, max: usize)(values in values(min, max)) -> (Vec, MerkleTree>) { - let storage = StorageMap::::new(); - let mut tree = MerkleTree::new(storage); - for datum in values.iter() { - tree.push(datum.as_ref()).unwrap(); - } - (values, tree) - } -} - -proptest! { - #[test] - fn verify__returns_true_for_valid_proof((values, tree) in random_tree(1, 1_000), arb_num: usize){ - let num_leaves = values.len(); - let index = arb_num % num_leaves; - let data = values[index]; - - // Given - let (root, proof_set) = tree.prove(index as u64).expect("Unable to generate proof"); - - // When - let verification = verify(&root, &data, &proof_set, index as u64, num_leaves as u64); - - // Then - prop_assert!(verification) - } - - #[test] - fn verify__returns_false_for_invalid_root((values, tree) in random_tree(1, 1_000), arb_num: usize, root: Bytes32){ - let num_leaves = values.len(); - let index = arb_num % num_leaves; - let data = values[index]; - - // Given - let (_, proof_set) = tree.prove(index as u64).expect("Unable to generate proof"); - - // When - let verification = verify(&root, &data, &proof_set, index as u64, num_leaves as u64); - - // Then - prop_assert!(!verification) - } - - #[test] - fn verify__returns_false_for_invalid_proof_set((values, tree) in random_tree(1, 1_000), arb_num: usize, proof_set: ProofSet){ - let num_leaves = values.len(); - let index = arb_num % num_leaves; - let data = values[index]; - - // Given - let (root, _) = tree.prove(index as u64).expect("Unable to generate proof"); - - // When - let verification = verify(&root, &data, &proof_set, index as u64, num_leaves as u64); - - // Then - prop_assert!(!verification) - } - - #[test] - fn verify__returns_true_for_valid_proof_of_last_leaf((values, tree) in random_tree(1, 1_000)){ - let num_leaves = values.len(); - let index = num_leaves - 1; - let data = values[index]; - - // Given - let (root, proof_set) = tree.prove(index as u64).expect("Unable to generate proof"); - - // When - let verification = verify(&root, &data, &proof_set, index as u64, num_leaves as u64); - - // Then - prop_assert!(verification) - } - - #[test] - fn verify__returns_false_for_invalid_proof_of_last_leaf((values, tree) in random_tree(1, 1_000), incorrect_num_leaves: u64){ - let num_leaves = values.len(); - proptest::prop_assume!(num_leaves as u64 != incorrect_num_leaves); - - let index = num_leaves - 1; - let data = values[index]; - - // Given - let (root, proof_set) = tree.prove(index as u64).expect("Unable to generate proof"); - - // When - let verification = verify(&root, &data, &proof_set, index as u64, incorrect_num_leaves); - - // Then - prop_assert!(!verification) - } - - #[test] - fn verify__returns_false_for_invalid_proof_index((values, tree) in random_tree(1, 1_000), invalid_index: u64){ - let num_leaves = values.len(); - let valid_index = num_leaves - 1; - proptest::prop_assume!(invalid_index != valid_index as u64); - let data = values[valid_index]; - - // Given - let (root, proof_set) = tree.prove(valid_index as u64).expect("Unable to generate proof"); - - // When - let verification = verify(&root, &data, &proof_set, invalid_index, num_leaves as u64); - - // Then - prop_assert!(!verification) - } - - #[test] - fn verify__returns_false_for_invalid_proof_index_and_num_leaves((values, tree) in random_tree(1, 1_000), invalid_index: u64, incorrect_num_leaves: u64){ - let num_leaves = values.len(); - let valid_index = num_leaves - 1; - proptest::prop_assume!(invalid_index != valid_index as u64); - let data = values[valid_index]; - - // Given - let (root, proof_set) = tree.prove(valid_index as u64).expect("Unable to generate proof"); - - // When - let verification = verify(&root, &data, &proof_set, invalid_index, incorrect_num_leaves); - - // Then - prop_assert!(!verification) - } -} diff --git a/fuel-merkle/src/tests/mod.rs b/fuel-merkle/src/tests/mod.rs index bcd6c1d02a..f7810763ca 100644 --- a/fuel-merkle/src/tests/mod.rs +++ b/fuel-merkle/src/tests/mod.rs @@ -4,5 +4,3 @@ use datatest_stable as _; use serde_yaml as _; mod binary; -mod binary_verify; -mod sparse; diff --git a/fuel-merkle/src/tests/sparse.rs b/fuel-merkle/src/tests/sparse.rs deleted file mode 100644 index fdf56d66fc..0000000000 --- a/fuel-merkle/src/tests/sparse.rs +++ /dev/null @@ -1,201 +0,0 @@ -#![allow(non_snake_case, clippy::arithmetic_side_effects)] - -use crate::{ - common::{ - Bytes32, - StorageMap, - }, - sparse::{ - proof::{ - ExclusionLeaf, - ExclusionLeafData, - ExclusionProof, - Proof, - }, - MerkleTree, - MerkleTreeKey, - Primitive, - }, -}; - -use fuel_storage::Mappable; - -use core::fmt::{ - Debug, - Formatter, -}; -use proptest::{ - arbitrary::any, - collection::{ - hash_set, - vec, - }, - prop_assert, - prop_assume, - prop_compose, - proptest, - strategy::Strategy, -}; -use std::collections::HashSet; - -#[derive(Debug)] -struct TestTable; - -impl Mappable for TestTable { - type Key = Self::OwnedKey; - type OwnedKey = Bytes32; - type OwnedValue = Primitive; - type Value = Self::OwnedValue; -} - -#[derive(Copy, Clone, Eq, Hash, PartialEq, proptest_derive::Arbitrary)] -struct Key(Bytes32); - -impl Debug for Key { - fn fmt(&self, f: &mut Formatter<'_>) -> core::fmt::Result { - f.write_str(&format!("Key({})", hex::encode(self.0))) - } -} - -impl AsRef<[u8]> for Key { - fn as_ref(&self) -> &[u8] { - &self.0 - } -} - -impl From for Bytes32 { - fn from(value: Key) -> Self { - value.0 - } -} - -#[derive(Copy, Clone, Eq, PartialEq, proptest_derive::Arbitrary)] -struct Value(Bytes32); - -impl Debug for Value { - fn fmt(&self, f: &mut Formatter<'_>) -> core::fmt::Result { - f.write_str(&format!("Value({})", hex::encode(self.0))) - } -} - -impl AsRef<[u8]> for Value { - fn as_ref(&self) -> &[u8] { - &self.0 - } -} - -impl From for Bytes32 { - fn from(value: Value) -> Self { - value.0 - } -} - -fn keys(n: usize) -> impl Strategy> { - hash_set(any::(), n) -} - -fn values(n: usize) -> impl Strategy> { - vec(any::(), n) -} - -prop_compose! { - fn key_values(min: usize, max: usize)(n in min..max)(k in keys(n), v in values(n)) -> Vec<(Key, Value)> { - k.into_iter().zip(v.into_iter()).collect::>() - } -} - -prop_compose! { - fn random_tree(min: usize, max: usize)(kv in key_values(min, max)) -> (Vec<(Key, Value)>, MerkleTree>) { - let storage = StorageMap::::new(); - let iter = kv.clone().into_iter().map(|(key, value)| (MerkleTreeKey::new(key), value)); - let tree = MerkleTree::from_set(storage, iter).expect("Unable to create Merkle tree"); - (kv, tree) - } -} - -proptest! { - #[test] - fn inclusion_proof__verify__returns_true_with_correct_key_and_correct_value((key_values, tree) in random_tree(1, 100), arb_num: usize) { - let root = tree.root(); - - // Given - let index = arb_num % key_values.len(); - let (key, value) = key_values[index]; - let key = MerkleTreeKey::new(key); - let proof = tree.generate_proof(&key).expect("Infallible"); - - // When - let inclusion = match proof { - Proof::Inclusion(proof) => proof.verify(&root, &key, value.as_ref()), - Proof::Exclusion(_) => panic!("Expected InclusionProof"), - }; - - // Then - prop_assert!(inclusion) - } - - #[test] - fn inclusion_proof__verify__returns_false_with_correct_key_and_incorrect_value((key_values, tree) in random_tree(1, 100), arb_num: usize, value: Value) { - let root = tree.root(); - - // Given - let index = arb_num % key_values.len(); - let (key, _) = key_values[index]; - let key = MerkleTreeKey::new(key); - let proof = tree.generate_proof(&key).expect("Infallible"); - - // When - let inclusion = match proof { - Proof::Inclusion(proof) => proof.verify(&root, &key, value.as_ref()), - Proof::Exclusion(_) => panic!("Expected InclusionProof"), - }; - - // Then - prop_assert!(!inclusion) - } - - #[test] - fn exclusion_proof__verify__returns_true_with_excluded_key((key_values, tree) in random_tree(1, 100), key: Key) { - let root = tree.root(); - - // Given - prop_assume!(!key_values.iter().any(|(k, _)| *k == key)); - let key = MerkleTreeKey::new(key); - let proof = tree.generate_proof(&key).expect("Infallible"); - - // When - let exclusion = match proof { - Proof::Inclusion(_) => panic!("Expected ExclusionProof"), - Proof::Exclusion(proof) => proof.verify(&root, &key), - }; - - // Then - prop_assert!(exclusion) - } - - #[test] - fn exclusion_proof__verify__returns_false_for_included_key((key_values, tree) in random_tree(1, 100)) { - let root = tree.root(); - - // Given - let (included_key, included_value) = key_values[0]; - let included_key = MerkleTreeKey::new(included_key); - let Proof::Inclusion(inclusion_proof) = tree.generate_proof(&included_key).expect("Infallible") else { panic!("Expected InclusionProof") }; - let exlucion_proof = ExclusionProof { - proof_set: inclusion_proof.proof_set.clone(), - leaf: ExclusionLeaf::Leaf(ExclusionLeafData { - leaf_key: included_key.into(), - leaf_value: included_value.into(), - }) - }; - - // When - let inclusion_result = inclusion_proof.verify(&root, &included_key, included_value.as_ref()); - let exclusion_result = exlucion_proof.verify(&root, &included_key); - - // Then - prop_assert!(inclusion_result); - prop_assert!(!exclusion_result); - prop_assert!(inclusion_result != exclusion_result); - } -} diff --git a/fuel-merkle/test-helpers/src/binary/merkle_tree.rs b/fuel-merkle/test-helpers/src/binary/merkle_tree.rs index 86b9435b06..ac27d85366 100644 --- a/fuel-merkle/test-helpers/src/binary/merkle_tree.rs +++ b/fuel-merkle/test-helpers/src/binary/merkle_tree.rs @@ -99,6 +99,9 @@ impl MerkleTree { (root, proof_set.into()) } + // PRIVATE + // + fn head(&self) -> &Option> { &self.head } diff --git a/fuel-merkle/test-helpers/src/binary/verify.rs b/fuel-merkle/test-helpers/src/binary/verify.rs index 708e4e59f8..73a37e8c0e 100644 --- a/fuel-merkle/test-helpers/src/binary/verify.rs +++ b/fuel-merkle/test-helpers/src/binary/verify.rs @@ -7,7 +7,7 @@ use crate::binary::{ pub fn verify>( root: &Data, data: &T, - proof_set: &[Data], + proof_set: &Vec, proof_index: u64, num_leaves: u64, ) -> bool { diff --git a/fuel-merkle/test-helpers/src/suites/binary_proofs.rs b/fuel-merkle/test-helpers/src/suites/binary_proofs.rs index 1bbadddef3..8bbbe47112 100644 --- a/fuel-merkle/test-helpers/src/suites/binary_proofs.rs +++ b/fuel-merkle/test-helpers/src/suites/binary_proofs.rs @@ -27,7 +27,7 @@ fn generate_test( name: String, function_name: String, description: String, - sample_data: &[Bytes32], + sample_data: &Vec, proof_index: u64, ) -> ProofTest { let (root, proof_set) = { diff --git a/fuel-storage/src/impls.rs b/fuel-storage/src/impls.rs index e7abe4e306..92ddfeb5cd 100644 --- a/fuel-storage/src/impls.rs +++ b/fuel-storage/src/impls.rs @@ -124,26 +124,6 @@ impl<'a, T: StorageRead + StorageSize + ?Sized, Type: Mappable> } } -impl<'a, T: StorageWrite + ?Sized, Type: Mappable> StorageWrite - for &'a mut T -{ - fn write(&mut self, key: &Type::Key, buf: &[u8]) -> Result { - >::write(self, key, buf) - } - - fn replace( - &mut self, - key: &Type::Key, - buf: &[u8], - ) -> Result<(usize, Option>), Self::Error> { - >::replace(self, key, buf) - } - - fn take(&mut self, key: &Type::Key) -> Result>, Self::Error> { - >::take(self, key) - } -} - impl<'a, T: MerkleRootStorage + ?Sized, Key, Type: Mappable> MerkleRootStorage for &'a mut T { @@ -241,7 +221,7 @@ impl<'a, T, Type: Mappable> StorageMut<'a, T, Type> { impl<'a, T: StorageWrite, Type: Mappable> StorageMut<'a, T, Type> { #[inline(always)] - pub fn write(&mut self, key: &Type::Key, buf: &[u8]) -> Result { + pub fn write(&mut self, key: &Type::Key, buf: Vec) -> Result { self.0.write(key, buf) } @@ -249,7 +229,7 @@ impl<'a, T: StorageWrite, Type: Mappable> StorageMut<'a, T, Type> { pub fn replace( &mut self, key: &Type::Key, - buf: &[u8], + buf: Vec, ) -> Result<(usize, Option>), T::Error> where T: StorageSize, diff --git a/fuel-storage/src/lib.rs b/fuel-storage/src/lib.rs index 719c2bc6d7..3c9fcac6e5 100644 --- a/fuel-storage/src/lib.rs +++ b/fuel-storage/src/lib.rs @@ -1,11 +1,5 @@ #![no_std] -#![deny( - clippy::arithmetic_side_effects, - clippy::cast_sign_loss, - clippy::cast_possible_truncation, - clippy::cast_possible_wrap, - clippy::string_slice -)] +#![deny(clippy::cast_possible_truncation)] #![deny(unsafe_code)] #![deny(unused_crate_dependencies)] @@ -138,7 +132,7 @@ pub trait StorageWrite: StorageMutate { /// Does not perform any serialization. /// /// Returns the number of bytes written. - fn write(&mut self, key: &Type::Key, buf: &[u8]) -> Result; + fn write(&mut self, key: &Type::Key, buf: Vec) -> Result; /// Write the value to the given key from the provided buffer and /// return the previous value if it existed. @@ -149,16 +143,19 @@ pub trait StorageWrite: StorageMutate { fn replace( &mut self, key: &Type::Key, - buf: &[u8], - ) -> Result<(usize, Option>), Self::Error>; + buf: Vec, + ) -> Result<(usize, Option>), Self::Error> + where + Self: StorageSize; /// Removes a value from the storage and returning it without deserializing it. fn take(&mut self, key: &Type::Key) -> Result>, Self::Error>; } -/// Returns the merkle root for the `StorageType` per merkle `Key`. Per one storage, it is -/// possible to have several merkle trees under different `Key`. -pub trait MerkleRootStorage: StorageInspect +/// Returns the merkle root for the `StorageType` per merkle `Key`. The type should +/// implement the `StorageMutate` for the `StorageType`. Per one storage, it is possible +/// to have several merkle trees under different `Key`. +pub trait MerkleRootStorage: StorageMutate where StorageType: Mappable, { diff --git a/fuel-tx/Cargo.toml b/fuel-tx/Cargo.toml index b1ccdb1dc8..1a347cca6f 100644 --- a/fuel-tx/Cargo.toml +++ b/fuel-tx/Cargo.toml @@ -17,13 +17,12 @@ derive_more = { version = "0.99", default-features = false, features = ["display fuel-asm = { workspace = true, default-features = false } fuel-crypto = { workspace = true, default-features = false } fuel-merkle = { workspace = true, default-features = false, optional = true } -fuel-types = { workspace = true, default-features = false, features = ["serde"] } +fuel-types = { workspace = true, default-features = false } hashbrown = { version = "0.14", optional = true } itertools = { version = "0.10", default-features = false, optional = true } js-sys = { version = "0.3", optional = true } -postcard = { version = "1.0", features = ["alloc"] } rand = { version = "0.8", default-features = false, features = ["std_rng"], optional = true } -serde = { version = "1.0", default-features = false, features = ["alloc", "derive"] } +serde = { version = "1.0", default-features = false, features = ["alloc", "derive"], optional = true } serde-wasm-bindgen = { version = "0.6", optional = true } serde_json = { version = "1.0", default-features = false, features = ["alloc"], optional = true } strum = { version = "0.24", default-features = false, optional = true } @@ -33,7 +32,8 @@ wasm-bindgen = { version = "0.2.88", optional = true } [dev-dependencies] bincode = { workspace = true } fuel-crypto = { workspace = true, default-features = false, features = ["random"] } -fuel-tx = { path = ".", features = ["random", "serde", "test-helpers"] } +fuel-tx = { path = ".", features = ["builder", "random", "serde"] } +fuel-tx-test-helpers = { path = "test-helpers" } fuel-types = { workspace = true, default-features = false, features = ["random"] } hex = { version = "0.4", default-features = false } insta = "1.0" @@ -45,11 +45,11 @@ serde_json = { version = "1.0" } [features] default = ["fuel-asm/default", "fuel-crypto/default", "fuel-merkle/default", "fuel-types/default", "std"] -test-helpers = ["alloc", "internals"] +builder = ["alloc", "internals"] internals = [] typescript = ["alloc", "js-sys", "wasm-bindgen", "serde", "serde-wasm-bindgen", "fuel-types/typescript"] random = ["fuel-crypto/random", "fuel-types/random", "rand"] -std = ["alloc", "fuel-asm/std", "fuel-crypto/std", "fuel-merkle/std", "fuel-types/std", "itertools/default", "rand?/default", "serde/default", "hex/std"] +std = ["alloc", "fuel-asm/std", "fuel-crypto/std", "fuel-merkle/std", "fuel-types/std", "itertools/default", "rand?/default", "serde?/default", "hex/std"] alloc = ["hashbrown", "fuel-types/alloc", "itertools/use_alloc", "derivative", "fuel-merkle", "strum", "strum_macros"] # serde is requiring alloc because its mandatory for serde_json. to avoid adding a new feature only for serde_json, we just require `alloc` here since as of the moment we don't have a use case of serde without alloc. -serde = ["alloc", "fuel-asm/serde", "fuel-crypto/serde", "fuel-merkle/serde", "serde_json", "hashbrown/serde", "bitflags/serde"] +serde = ["alloc", "dep:serde", "fuel-asm/serde", "fuel-crypto/serde", "fuel-types/serde", "fuel-merkle/serde", "serde_json", "hashbrown/serde", "bitflags/serde"] diff --git a/fuel-tx/src/builder.rs b/fuel-tx/src/builder.rs index 81ce73cff6..f583c8019a 100644 --- a/fuel-tx/src/builder.rs +++ b/fuel-tx/src/builder.rs @@ -2,11 +2,12 @@ use crate::{ input, output, transaction::{ + field, field::{ - self, + BytecodeLength, BytecodeWitnessIndex, + GasPrice, Maturity, - Tip, Witnesses, }, Chargeable, @@ -27,10 +28,6 @@ use crate::{ Transaction, TxParameters, TxPointer, - Upgrade, - UpgradePurpose, - Upload, - UploadBody, Witness, }; @@ -45,11 +42,6 @@ use crate::{ WitnessLimit, }, policies::Policies, - transaction::{ - CreateBody, - ScriptBody, - UpgradeBody, - }, }; use alloc::{ collections::BTreeMap, @@ -104,39 +96,42 @@ impl BuildableAloc for T where } impl BuildableStd for T where T: Signable + Cacheable {} - impl BuildableSet for T where T: BuildableAloc + BuildableStd {} - impl Buildable for T where T: BuildableSet {} #[derive(Debug, Clone)] pub struct TransactionBuilder { tx: Tx, + should_prepare_script: bool, + should_prepare_predicate: bool, params: ConsensusParameters, // We take the key by reference so this lib won't have the responsibility to properly // zeroize the keys // Maps signing keys -> witness indexes - sign_keys: BTreeMap, + sign_keys: BTreeMap, } impl TransactionBuilder