Skip to content

Commit

Permalink
refactor: Remove serializable things from BlockStore
Browse files Browse the repository at this point in the history
Use the `Storable` trait and its `store` and `load` functions instead.
  • Loading branch information
matheus23 committed Feb 14, 2024
1 parent f121a79 commit 73cb792
Show file tree
Hide file tree
Showing 7 changed files with 74 additions and 101 deletions.
89 changes: 23 additions & 66 deletions wnfs-common/src/blockstore.rs
Original file line number Diff line number Diff line change
Expand Up @@ -10,10 +10,10 @@ use libipld::{
cbor::DagCborCodec,
cid::Version,
multihash::{Code, MultihashDigest},
serde as ipld_serde, Cid,
Cid,
};
use parking_lot::Mutex;
use serde::{de::DeserializeOwned, Deserialize, Serialize};
use serde::{Deserialize, Serialize};
use std::collections::HashMap;

//--------------------------------------------------------------------------------------------------
Expand Down Expand Up @@ -45,7 +45,7 @@ pub const CODEC_DAG_PB: u64 = 0x70;
pub const CODEC_RAW: u64 = 0x55;

//--------------------------------------------------------------------------------------------------
// Type Definitions
// Traits
//--------------------------------------------------------------------------------------------------

/// For types that implement block store operations like adding, getting content from the store.
Expand All @@ -58,27 +58,6 @@ pub trait BlockStore: CondSync {
codec: u64,
) -> impl Future<Output = Result<Cid>> + CondSend;

fn get_deserializable<V: DeserializeOwned>(
&self,
cid: &Cid,
) -> impl Future<Output = Result<V>> + CondSend {
async {
let bytes = self.get_block(cid).await?;
let ipld = decode(bytes.as_ref(), DagCborCodec)?;
Ok(ipld_serde::from_ipld::<V>(ipld)?)
}
}

fn put_serializable<V: Serialize + CondSync>(
&self,
value: &V,
) -> impl Future<Output = Result<Cid>> + CondSend {
async move {
let bytes = encode(&ipld_serde::to_ipld(value)?, DagCborCodec)?;
self.put_block(bytes, CODEC_DAG_CBOR).await
}
}

// This should be the same in all implementations of BlockStore
fn create_cid(&self, bytes: &[u8], codec: u64) -> Result<Cid> {
// If there are too many bytes, abandon this task
Expand All @@ -96,6 +75,10 @@ pub trait BlockStore: CondSync {
}
}

//--------------------------------------------------------------------------------------------------
// Implementations
//--------------------------------------------------------------------------------------------------

impl<B: BlockStore> BlockStore for &B {
async fn get_block(&self, cid: &Cid) -> Result<Bytes> {
(**self).get_block(cid).await
Expand All @@ -105,14 +88,6 @@ impl<B: BlockStore> BlockStore for &B {
(**self).put_block(bytes, codec).await
}

async fn get_deserializable<V: DeserializeOwned>(&self, cid: &Cid) -> Result<V> {
(**self).get_deserializable(cid).await
}

async fn put_serializable<V: Serialize + CondSync>(&self, value: &V) -> Result<Cid> {
(**self).put_serializable(value).await
}

fn create_cid(&self, bytes: &[u8], codec: u64) -> Result<Cid> {
(**self).create_cid(bytes, codec)
}
Expand All @@ -127,23 +102,11 @@ impl<B: BlockStore> BlockStore for Box<B> {
(**self).put_block(bytes, codec).await
}

async fn get_deserializable<V: DeserializeOwned>(&self, cid: &Cid) -> Result<V> {
(**self).get_deserializable(cid).await
}

async fn put_serializable<V: Serialize + CondSync>(&self, value: &V) -> Result<Cid> {
(**self).put_serializable(value).await
}

fn create_cid(&self, bytes: &[u8], codec: u64) -> Result<Cid> {
(**self).create_cid(bytes, codec)
}
}

//--------------------------------------------------------------------------------------------------
// Implementations
//--------------------------------------------------------------------------------------------------

/// An in-memory block store to simulate IPFS.
///
/// IPFS is basically a glorified HashMap.
Expand Down Expand Up @@ -195,21 +158,18 @@ impl BlockStore for MemoryBlockStore {
//--------------------------------------------------------------------------------------------------

/// Tests the retrieval property of a BlockStore-conforming type.
pub async fn bs_retrieval_test<T>(store: &T) -> Result<()>
where
T: BlockStore + 'static,
{
pub async fn bs_retrieval_test<T>(store: impl BlockStore) -> Result<()> {
// Example objects to insert and remove from the blockstore
let first_bytes = vec![1, 2, 3, 4, 5];
let second_bytes = b"hello world".to_vec();

// Insert the objects into the blockstore
let first_cid = store.put_serializable(&first_bytes).await?;
let second_cid = store.put_serializable(&second_bytes).await?;
let first_cid = store.put_block(first_bytes.clone(), CODEC_RAW).await?;
let second_cid = store.put_block(second_bytes.clone(), CODEC_RAW).await?;

// Retrieve the objects from the blockstore
let first_loaded: Vec<u8> = store.get_deserializable(&first_cid).await?;
let second_loaded: Vec<u8> = store.get_deserializable(&second_cid).await?;
let first_loaded = store.get_block(&first_cid).await?;
let second_loaded = store.get_block(&second_cid).await?;

// Assert that the objects are the same as the ones we inserted
assert_eq!(first_loaded, first_bytes);
Expand All @@ -219,24 +179,21 @@ where
}

/// Tests the duplication of a BlockStore-conforming type.
pub async fn bs_duplication_test<T>(store: &T) -> Result<()>
where
T: BlockStore + 'static,
{
pub async fn bs_duplication_test<T>(store: impl BlockStore) -> Result<()> {
// Example objects to insert and remove from the blockstore
let first_bytes = vec![1, 2, 3, 4, 5];
let second_bytes = first_bytes.clone();

// Insert the objects into the blockstore
let first_cid = store.put_serializable(&first_bytes).await?;
let second_cid = store.put_serializable(&second_bytes).await?;
let first_cid = store.put_block(first_bytes.clone(), CODEC_RAW).await?;
let second_cid = store.put_block(second_bytes.clone(), CODEC_RAW).await?;

// Assert that the two vecs produced the same CID
assert_eq!(first_cid, second_cid);

// Retrieve the objects from the blockstore
let first_loaded: Vec<u8> = store.get_deserializable(&first_cid).await?;
let second_loaded: Vec<u8> = store.get_deserializable(&second_cid).await?;
let first_loaded = store.get_block(&first_cid).await?;
let second_loaded = store.get_block(&second_cid).await?;

// Assert that the objects are the same as the ones we inserted
assert_eq!(first_loaded, first_bytes);
Expand All @@ -251,20 +208,20 @@ where
/// Tests the serialization of a BlockStore-conforming type.
pub async fn bs_serialization_test<T>(store: &T) -> Result<()>
where
T: BlockStore + Serialize + 'static + for<'de> Deserialize<'de>,
T: BlockStore + Serialize + for<'de> Deserialize<'de>,
{
// Example objects to insert and remove from the blockstore
let bytes = vec![1, 2, 3, 4, 5];

// Insert the object into the blockstore
let cid = store.put_serializable(&bytes).await?;
let cid = store.put_block(bytes.clone(), CODEC_RAW).await?;

// Serialize the BlockStore
let serial_store: Vec<u8> = encode(&store, DagCborCodec)?;
// Construct a new BlockStore from the Serialized object
let deserial_store: T = decode(&serial_store, DagCborCodec)?;
// Retrieve the object from the blockstore
let loaded: Vec<u8> = deserial_store.get_deserializable(&cid).await?;
let loaded = deserial_store.get_block(&cid).await?;

// Assert that the objects are the same as the ones we inserted
assert_eq!(loaded, bytes);
Expand All @@ -280,9 +237,9 @@ mod tests {
#[async_std::test]
async fn memory_blockstore() -> Result<()> {
let store = &MemoryBlockStore::new();
bs_retrieval_test(store).await?;
bs_duplication_test(store).await?;
bs_serialization_test(store).await?;
bs_retrieval_test::<MemoryBlockStore>(store).await?;
bs_duplication_test::<MemoryBlockStore>(store).await?;
bs_serialization_test::<MemoryBlockStore>(store).await?;
Ok(())
}
}
2 changes: 1 addition & 1 deletion wnfs-common/src/link.rs
Original file line number Diff line number Diff line change
Expand Up @@ -277,7 +277,7 @@ mod tests {
async fn link_value_can_be_resolved() {
let store = &MemoryBlockStore::default();
let example = Example::new(256);
let cid = store.put_serializable(&example).await.unwrap();
let cid = example.store(store).await.unwrap();
let link = Link::<Example>::from_cid(cid);

let value = link.resolve_value(store).await.unwrap();
Expand Down
4 changes: 2 additions & 2 deletions wnfs-nameaccumulator/src/name.rs
Original file line number Diff line number Diff line change
Expand Up @@ -821,7 +821,7 @@ mod snapshot_tests {
use crate::{BigNumDig, NameSegment};
use rand_chacha::ChaCha12Rng;
use rand_core::SeedableRng;
use wnfs_common::{utils::SnapshotBlockStore, BlockStore};
use wnfs_common::utils::SnapshotBlockStore;

#[async_std::test]
async fn test_name_accumulator() {
Expand All @@ -839,7 +839,7 @@ mod snapshot_tests {
setup,
);

let cid = store.put_serializable(&acc).await.unwrap();
let cid = acc.store(store).await.unwrap();
let name = store.get_block_snapshot(&cid).await.unwrap();

insta::assert_json_snapshot!(name);
Expand Down
27 changes: 19 additions & 8 deletions wnfs/src/private/keys/access.rs
Original file line number Diff line number Diff line change
Expand Up @@ -124,29 +124,40 @@ impl From<&AccessKey> for Vec<u8> {
#[cfg(test)]
mod snapshot_tests {
use super::*;
use libipld_core::ipld::Ipld;
use rand::Rng;
use rand_chacha::ChaCha12Rng;
use rand_core::SeedableRng;
use wnfs_common::{utils::SnapshotBlockStore, BlockStore};
use testresult::TestResult;
use wnfs_common::{
decode, encode,
libipld::{cbor::DagCborCodec, json::DagJsonCodec},
};

#[async_std::test]
async fn test_access_key() {
async fn test_access_key() -> TestResult {
let rng = &mut ChaCha12Rng::seed_from_u64(0);
let store = &SnapshotBlockStore::default();

let private_ref =
PrivateRef::with_temporal_key(rng.gen(), TemporalKey(rng.gen()), Cid::default());

let temporal_access_key = AccessKey::Temporal(TemporalAccessKey::from(&private_ref));
let snapshot_access_key = AccessKey::Snapshot(SnapshotAccessKey::from(&private_ref));

let temp_cid = store.put_serializable(&temporal_access_key).await.unwrap();
let snap_cid = store.put_serializable(&snapshot_access_key).await.unwrap();

let temp_key = store.get_block_snapshot(&temp_cid).await.unwrap();
let snap_key = store.get_block_snapshot(&snap_cid).await.unwrap();
let temp_key = as_dag_json_value(&temporal_access_key)?;
let snap_key = as_dag_json_value(&snapshot_access_key)?;

insta::assert_json_snapshot!(temp_key);
insta::assert_json_snapshot!(snap_key);

Ok(())
}

fn as_dag_json_value(s: impl Serialize) -> Result<serde_json::Value> {
let dag_cbor = encode(&s, DagCborCodec)?;
let ipld: Ipld = decode(&dag_cbor, DagCborCodec)?;
let dag_json = encode(&ipld, DagJsonCodec)?;
let value = serde_json::from_slice(&dag_json)?;
Ok(value)
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -3,23 +3,19 @@ source: wnfs/src/private/keys/access.rs
expression: temp_key
---
{
"cid": "bafyr4ih2hblsecub5jvxxlbkhjgmabg2mnaxzq7jtzkchd4s4sacqxwbge",
"value": {
"wnfs/share/temporal": {
"contentCid": {
"/": "baeaaaaa"
},
"label": {
"/": {
"bytes": "f7J7lBYC0B0RVCIRE0/HGqyuVON+fQB7u3tV7/BiooQ"
}
},
"temporalKey": {
"/": {
"bytes": "mmMoPLrw/bzrH2R5sZfzqI3Q2Akv5yp8VigVOHOLB+I"
}
"wnfs/share/temporal": {
"contentCid": {
"/": "baeaaaaa"
},
"label": {
"/": {
"bytes": "f7J7lBYC0B0RVCIRE0/HGqyuVON+fQB7u3tV7/BiooQ"
}
},
"temporalKey": {
"/": {
"bytes": "mmMoPLrw/bzrH2R5sZfzqI3Q2Akv5yp8VigVOHOLB+I"
}
}
},
"bytes": "oXN3bmZzL3NoYXJlL3RlbXBvcmFso2VsYWJlbFggf7J7lBYC0B0RVCIRE0/HGqyuVON+fQB7u3tV7/BiooRqY29udGVudENpZNgqRQABAAAAa3RlbXBvcmFsS2V5WCCaYyg8uvD9vOsfZHmxl/OojdDYCS/nKnxWKBU4c4sH4g=="
}
}
14 changes: 9 additions & 5 deletions wnfs/src/public/directory.rs
Original file line number Diff line number Diff line change
Expand Up @@ -891,7 +891,7 @@ mod tests {
use chrono::Utc;
use libipld_core::ipld::Ipld;
use testresult::TestResult;
use wnfs_common::MemoryBlockStore;
use wnfs_common::{decode, libipld::cbor::DagCborCodec, MemoryBlockStore};

#[async_std::test]
async fn look_up_can_fetch_file_added_to_directory() -> TestResult {
Expand Down Expand Up @@ -1242,10 +1242,14 @@ mod tests {

root_dir.mkdir(&["test".into()], time, store).await.unwrap();

let ipld = store
.get_deserializable::<Ipld>(&root_dir.store(store).await.unwrap())
.await
.unwrap();
let ipld: Ipld = decode(
&store
.get_block(&root_dir.store(store).await.unwrap())
.await
.unwrap(),
DagCborCodec,
)
.unwrap();
match ipld {
Ipld::Map(map) => match map.get("wnfs/pub/dir") {
Some(Ipld::Map(content)) => match content.get("previous") {
Expand Down
9 changes: 7 additions & 2 deletions wnfs/src/root_tree.rs
Original file line number Diff line number Diff line change
Expand Up @@ -25,6 +25,8 @@ use std::collections::HashMap;
#[cfg(test)]
use wnfs_common::MemoryBlockStore;
use wnfs_common::{
decode, encode,
libipld::cbor::DagCborCodec,
utils::{Arc, CondSend},
BlockStore, Metadata, Storable,
};
Expand Down Expand Up @@ -309,7 +311,9 @@ where
version: WNFS_VERSION,
};

store.put_serializable(&serializable).await
store
.put_block(encode(&serializable, DagCborCodec)?, DagCborCodec.into())
.await
}

pub async fn load(
Expand All @@ -318,7 +322,8 @@ where
rng: R,
private_map: HashMap<Vec<String>, Arc<PrivateDirectory>>,
) -> Result<RootTree<'a, B, R>> {
let deserialized: RootTreeSerializable = store.get_deserializable(cid).await?;
let deserialized: RootTreeSerializable =
decode(&store.get_block(cid).await?, DagCborCodec)?;
let forest = Arc::new(HamtForest::load(&deserialized.forest, store).await?);
let public_root = Arc::new(PublicDirectory::load(&deserialized.public, store).await?);
let exchange_root = Arc::new(PublicDirectory::load(&deserialized.exchange, store).await?);
Expand Down

0 comments on commit 73cb792

Please sign in to comment.