Skip to content

Commit

Permalink
chore: test setup, logs
Browse files Browse the repository at this point in the history
  • Loading branch information
rkdud007 committed Feb 7, 2024
1 parent d591507 commit 4d437bd
Show file tree
Hide file tree
Showing 10 changed files with 276 additions and 67 deletions.
3 changes: 2 additions & 1 deletion .gitignore
Original file line number Diff line number Diff line change
@@ -1 +1,2 @@
target
target
.env
9 changes: 6 additions & 3 deletions cli/src/main.rs
Original file line number Diff line number Diff line change
Expand Up @@ -26,20 +26,21 @@ struct Cli {

#[tokio::main]
async fn main() {
let start = std::time::Instant::now();
let args = Cli::parse();
dotenv::dotenv().ok();
let config = Config::init(args.rpc_url, args.datalakes, args.tasks).await;
let abstract_fetcher = AbstractFetcher::new(config.rpc_url.clone());
let tasks = tasks_decoder(config.tasks.clone()).unwrap();
let datalakes = datalake_decoder(config.datalakes.clone()).unwrap();

println!("tasks: {:?}\n", tasks);
println!("datalakes: {:?}\n", datalakes);

if tasks.len() != datalakes.len() {
panic!("Tasks and datalakes must have the same length");
}

println!("tasks: {:?}\n", tasks);
println!("datalakes: {:?}\n", datalakes);

let res = evaluator(
tasks,
Some(datalakes),
Expand All @@ -49,4 +50,6 @@ async fn main() {
.unwrap();
println!("res: {:?}", res.result);
println!("rpc_url: {:?}", config.rpc_url);
let duration = start.elapsed();
println!("Time elapsed in main() is: {:?}", duration);
}
35 changes: 22 additions & 13 deletions crates/common/src/datalake/block_sampled.rs
Original file line number Diff line number Diff line change
Expand Up @@ -31,18 +31,7 @@ pub struct BlockSampledDatalake {

impl ToString for BlockSampledDatalake {
fn to_string(&self) -> String {
let block_range_start = DynSolValue::Uint(U256::from(self.block_range_start), 256);
let block_range_end = DynSolValue::Uint(U256::from(self.block_range_end), 256);
let sampled_property =
DynSolValue::Bytes(serialize_sampled_property(&self.sampled_property));
let increment = DynSolValue::Uint(U256::from(self.increment), 256);
let tuple_value = DynSolValue::Tuple(vec![
block_range_start,
block_range_end,
increment,
sampled_property,
]);
let encoded_datalake = tuple_value.abi_encode();
let encoded_datalake = self.serialize().unwrap();
let hash = keccak256(encoded_datalake);
format!("0x{:x}", hash)
}
Expand All @@ -63,7 +52,27 @@ impl BlockSampledDatalake {
}
}

pub fn from_serialized(serialized: String) -> Result<Self> {
pub fn serialize(&self) -> Result<String> {
let block_range_start = DynSolValue::Uint(U256::from(self.block_range_start), 256);
let block_range_end = DynSolValue::Uint(U256::from(self.block_range_end), 256);
let sampled_property =
DynSolValue::Bytes(serialize_sampled_property(&self.sampled_property));
let increment = DynSolValue::Uint(U256::from(self.increment), 256);
let datalake_code = DynSolValue::Uint(U256::from(0), 256);

let tuple_value = DynSolValue::Tuple(vec![
datalake_code,
block_range_start,
block_range_end,
increment,
sampled_property,
]);

let encoded_datalake = tuple_value.abi_encode_sequence().unwrap();
Ok(format!("0x{}", hex::encode(encoded_datalake)))
}

pub fn deserialize(serialized: String) -> Result<Self> {
let datalake_type: DynSolType = "(uint256,uint256,uint256,uint256,bytes)".parse()?;
let bytes = Vec::from_hex(serialized).expect("Invalid hex string");
let decoded = datalake_type.abi_decode_sequence(&bytes)?;
Expand Down
46 changes: 29 additions & 17 deletions crates/common/src/datalake/dynamic_layout.rs
Original file line number Diff line number Diff line change
@@ -1,5 +1,8 @@
use alloy_dyn_abi::{DynSolType, DynSolValue};
use alloy_primitives::{hex::FromHex, keccak256, U256};
use alloy_primitives::{
hex::{self, FromHex},
keccak256, U256,
};
use anyhow::{bail, Result};

use crate::compiler::test::test_closer;
Expand All @@ -21,21 +24,7 @@ pub struct DynamicLayoutDatalake {

impl ToString for DynamicLayoutDatalake {
fn to_string(&self) -> String {
let blocknumber = DynSolValue::Uint(U256::from(self.block_number), 256);
let account_address = DynSolValue::Address(self.account_address.parse().unwrap());
let slot_index = DynSolValue::Uint(U256::from(self.slot_index), 256);
let initial_key = DynSolValue::Uint(U256::from(self.initial_key), 256);
let key_boundry = DynSolValue::Uint(U256::from(self.key_boundry), 256);
let increment = DynSolValue::Uint(U256::from(self.increment), 256);
let tuple_value = DynSolValue::Tuple(vec![
blocknumber,
account_address,
slot_index,
initial_key,
key_boundry,
increment,
]);
let encoded_datalake = tuple_value.abi_encode();
let encoded_datalake = self.serialize().unwrap();
let hash = keccak256(encoded_datalake);
format!("0x{:x}", hash)
}
Expand All @@ -60,7 +49,30 @@ impl DynamicLayoutDatalake {
}
}

pub fn from_serialized(serialized: String) -> Result<Self> {
pub fn serialize(&self) -> Result<String> {
let blocknumber = DynSolValue::Uint(U256::from(self.block_number), 256);
let account_address = DynSolValue::Address(self.account_address.parse().unwrap());
let slot_index = DynSolValue::Uint(U256::from(self.slot_index), 256);
let initial_key = DynSolValue::Uint(U256::from(self.initial_key), 256);
let key_boundry = DynSolValue::Uint(U256::from(self.key_boundry), 256);
let increment = DynSolValue::Uint(U256::from(self.increment), 256);
let datalake_code = DynSolValue::Uint(U256::from(1), 256);

let tuple_value = DynSolValue::Tuple(vec![
datalake_code,
blocknumber,
account_address,
slot_index,
initial_key,
key_boundry,
increment,
]);

let encoded_datalake = tuple_value.abi_encode();
Ok(format!("0x{}", hex::encode(encoded_datalake)))
}

pub fn deserialize(serialized: String) -> Result<Self> {
let datalake_type: DynSolType =
"(uint256,uint256,address,uint256,uint256,uint256,uint256)".parse()?;
let bytes = Vec::from_hex(serialized).expect("Invalid hex string");
Expand Down
3 changes: 3 additions & 0 deletions crates/common/src/fetcher/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -28,6 +28,7 @@ impl AbstractFetcher {
}

pub async fn get_rlp_header(&mut self, block_number: usize) -> RlpEncodedValue {
let start_fetch = std::time::Instant::now();
match self.memory.get_rlp_header(block_number) {
Some(header) => header,
None => {
Expand All @@ -39,6 +40,8 @@ impl AbstractFetcher {
let block_header = BlockHeader::from(&header_rpc);
let rlp_encoded = block_header.rlp_encode();
self.memory.set_header(block_number, rlp_encoded.clone());
let duration = start_fetch.elapsed();
println!("Time elapsed in get_rlp_header() is: {:?}", duration);
rlp_encoded
}
}
Expand Down
50 changes: 29 additions & 21 deletions crates/common/src/task.rs
Original file line number Diff line number Diff line change
@@ -1,5 +1,8 @@
use alloy_dyn_abi::{DynSolType, DynSolValue};
use alloy_primitives::{keccak256, U256};
use alloy_primitives::{
hex::{self},
keccak256, U256,
};
use anyhow::Result;

use crate::{datalake::base::DatalakeBase, utils::bytes32_to_utf8_str};
Expand All @@ -25,7 +28,29 @@ impl ComputationalTask {
}
}

pub fn from_serialized(serialized: &[u8]) -> Result<Self> {
pub fn serialize(&self) -> Result<String> {
let datalake = self.datalake.as_ref().ok_or("Datalake is None").unwrap();

let datalake_identifier =
U256::from_str_radix(&datalake.identifier[2..], 16).expect("Invalid hex string");
let identifier_value = DynSolValue::Uint(datalake_identifier, 256);
let aggregate_fn_id_value = DynSolValue::String(self.aggregate_fn_id.clone());
let aggregate_fn_ctx_value = match &self.aggregate_fn_ctx {
None => DynSolValue::Bytes("".to_string().into_bytes()),
Some(ctx) => DynSolValue::Bytes(ctx.clone().into_bytes()),
};

let header_tuple_value = DynSolValue::Tuple(vec![
identifier_value,
aggregate_fn_id_value,
aggregate_fn_ctx_value,
]);

let encoded_datalake = header_tuple_value.abi_encode();
Ok(format!("0x{}", hex::encode(encoded_datalake)))
}

pub fn deserialize_aggregate_fn(serialized: &[u8]) -> Result<Self> {
let aggregate_fn_type: DynSolType = "(bytes32,bytes)".parse()?;
let decoded = aggregate_fn_type.abi_decode(serialized)?;

Expand All @@ -44,25 +69,8 @@ impl ComputationalTask {

impl ToString for ComputationalTask {
fn to_string(&self) -> String {
let datalake = self.datalake.as_ref().ok_or("Datalake is None").unwrap();

let datalake_identifier =
U256::from_str_radix(&datalake.identifier[2..], 16).expect("Invalid hex string");
let identifier_value = DynSolValue::Uint(datalake_identifier, 256);
let aggregate_fn_id_value = DynSolValue::String(self.aggregate_fn_id.clone());
let aggregate_fn_ctx_value = match &self.aggregate_fn_ctx {
None => DynSolValue::Bytes("".to_string().into_bytes()),
Some(ctx) => DynSolValue::Bytes(ctx.clone().into_bytes()),
};

let header_tuple_value = DynSolValue::Tuple(vec![
identifier_value,
aggregate_fn_id_value,
aggregate_fn_ctx_value,
]);

let datalake_header_encode = header_tuple_value.abi_encode();
let hash = keccak256(datalake_header_encode);
let encoded_datalake = self.serialize().unwrap();
let hash = keccak256(encoded_datalake);
format!("0x{:x}", hash)
}
}
83 changes: 83 additions & 0 deletions crates/common/tests/datalake.rs
Original file line number Diff line number Diff line change
@@ -0,0 +1,83 @@
use common::datalake::{
block_sampled::BlockSampledDatalake, dynamic_layout::DynamicLayoutDatalake,
};

#[test]
fn test_block_datalake_for_header() {
let blocksample_datalake = "0x000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000009eb0f600000000000000000000000000000000000000000000000000000000009eb100000000000000000000000000000000000000000000000000000000000000000100000000000000000000000000000000000000000000000000000000000000a00000000000000000000000000000000000000000000000000000000000000002010f000000000000000000000000000000000000000000000000000000000000";
let decoded_datalake =
BlockSampledDatalake::deserialize(blocksample_datalake.to_string()).unwrap();
let block_datalake =
BlockSampledDatalake::new(10399990, 10400000, "header.base_fee_per_gas".to_string(), 1);
assert_eq!(
decoded_datalake.serialize().unwrap(),
block_datalake.serialize().unwrap()
);

assert_eq!(decoded_datalake.serialize().unwrap(), blocksample_datalake);
}

#[test]
fn test_block_datalake_for_header_massive() {
let blocksample_datalake = "0x000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000009d2a6000000000000000000000000000000000000000000000000000000000009eb100000000000000000000000000000000000000000000000000000000000000000100000000000000000000000000000000000000000000000000000000000000a00000000000000000000000000000000000000000000000000000000000000002010f000000000000000000000000000000000000000000000000000000000000";
let decoded_datalake: BlockSampledDatalake =
BlockSampledDatalake::deserialize(blocksample_datalake.to_string()).unwrap();
let block_datalake =
BlockSampledDatalake::new(10300000, 10400000, "header.base_fee_per_gas".to_string(), 1);
assert_eq!(
decoded_datalake.serialize().unwrap(),
block_datalake.serialize().unwrap()
);

assert_eq!(decoded_datalake.serialize().unwrap(), blocksample_datalake);
}

#[test]
fn test_block_datalake_for_account() {
let blocksample_datalake = "0x000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000009eb0f600000000000000000000000000000000000000000000000000000000009eb100000000000000000000000000000000000000000000000000000000000000000100000000000000000000000000000000000000000000000000000000000000a00000000000000000000000000000000000000000000000000000000000000016027b2f05ce9ae365c3dbf30657e2dc6449989e83d60000000000000000000000";
let decoded_datalake =
BlockSampledDatalake::deserialize(blocksample_datalake.to_string()).unwrap();
let block_datalake = BlockSampledDatalake::new(
10399990,
10400000,
"account.0x7b2f05ce9ae365c3dbf30657e2dc6449989e83d6.nonce".to_string(),
1,
);
assert_eq!(decoded_datalake, block_datalake);
assert_eq!(decoded_datalake.serialize().unwrap(), blocksample_datalake);
}

#[test]
fn test_block_datalake_for_storage() {
let blocksample_datalake = "0x000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000009eb0f600000000000000000000000000000000000000000000000000000000009eb100000000000000000000000000000000000000000000000000000000000000000100000000000000000000000000000000000000000000000000000000000000a00000000000000000000000000000000000000000000000000000000000000035037b2f05ce9ae365c3dbf30657e2dc6449989e83d600000000000000000000000000000000000000000000000000000000000000ff0000000000000000000000";
let decoded_datalake =
BlockSampledDatalake::deserialize(blocksample_datalake.to_string()).unwrap();
let block_datalake = BlockSampledDatalake::new(
10399990,
10400000,
"storage.0x7b2f05ce9ae365c3dbf30657e2dc6449989e83d6.0x00000000000000000000000000000000000000000000000000000000000000ff".to_string(),
1,
);
assert_eq!(decoded_datalake, block_datalake);
assert_eq!(decoded_datalake.serialize().unwrap(), blocksample_datalake);
}

#[test]
fn test_dynamic_layout_datalake_serialized() {
let dynamic_layout_datalake = "0x000000000000000000000000000000000000000000000000000000000000000100000000000000000000000000000000000000000000000000000000009eb0f60000000000000000000000007b2f05ce9ae365c3dbf30657e2dc6449989e83d60000000000000000000000000000000000000000000000000000000000000005000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000030000000000000000000000000000000000000000000000000000000000000001";
let decoded_datalake =
DynamicLayoutDatalake::deserialize(dynamic_layout_datalake.to_string()).unwrap();
let dynamic_layout_datalake = DynamicLayoutDatalake::new(
10399990,
"0x7b2f05cE9aE365c3DBF30657e2DC6449989e83D6".to_string(),
5,
0,
3,
1,
);
assert_eq!(decoded_datalake, dynamic_layout_datalake);
assert_eq!(
decoded_datalake.serialize().unwrap(),
dynamic_layout_datalake.serialize().unwrap()
);
}
42 changes: 42 additions & 0 deletions crates/common/tests/task.rs
Original file line number Diff line number Diff line change
@@ -0,0 +1,42 @@
use alloy_dyn_abi::DynSolType;
use alloy_primitives::hex::FromHex;
use common::task::ComputationalTask;

#[test]
fn test_task_from_serialized() {
let serialized_tasks_batch = "0x0000000000000000000000000000000000000000000000000000000000000020000000000000000000000000000000000000000000000000000000000000000400000000000000000000000000000000000000000000000000000000000000800000000000000000000000000000000000000000000000000000000000000100000000000000000000000000000000000000000000000000000000000000018000000000000000000000000000000000000000000000000000000000000002000000000000000000000000000000000000000000000000000000000000000060617667000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000400000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000006073756d00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000040000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000606d696e00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000040000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000606d6178000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000400000000000000000000000000000000000000000000000000000000000000000";
let tasks_type: DynSolType = "bytes[]".parse().unwrap();
let bytes = Vec::from_hex(serialized_tasks_batch).expect("Invalid hex string");
let serialized_tasks = tasks_type.abi_decode(&bytes).unwrap();
let mut computational_task_result = Vec::new();

if let Some(tasks) = serialized_tasks.as_array() {
for task in tasks {
let computational_task =
ComputationalTask::deserialize_aggregate_fn(task.as_bytes().unwrap()).unwrap();
computational_task_result.push(computational_task);
}
}

assert_eq!(computational_task_result.len(), 4);
assert_eq!(
computational_task_result[0].aggregate_fn_id,
"avg".to_string()
);
assert_eq!(computational_task_result[0].aggregate_fn_ctx, None);
assert_eq!(
computational_task_result[1].aggregate_fn_id,
"sum".to_string()
);
assert_eq!(computational_task_result[1].aggregate_fn_ctx, None);
assert_eq!(
computational_task_result[2].aggregate_fn_id,
"min".to_string()
);
assert_eq!(computational_task_result[2].aggregate_fn_ctx, None);
assert_eq!(
computational_task_result[3].aggregate_fn_id,
"max".to_string()
);
assert_eq!(computational_task_result[3].aggregate_fn_ctx, None);
}
Loading

0 comments on commit 4d437bd

Please sign in to comment.