-
Notifications
You must be signed in to change notification settings - Fork 4
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
Merge pull request #7 from HerodotusDev/feat/decode
feat: decoder implementation from cli
- Loading branch information
Showing
21 changed files
with
2,097 additions
and
90 deletions.
There are no files selected for viewing
Large diffs are not rendered by default.
Oops, something went wrong.
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,56 @@ | ||
use alloy_dyn_abi::DynSolType; | ||
use alloy_primitives::hex::FromHex; | ||
use anyhow::{Ok, Result}; | ||
use types::{ | ||
datalake::{block_datalake::BlockDatalake, dynamic_layout_datalake::DynamicLayoutDatalake}, | ||
task::ComputationalTask, | ||
}; | ||
|
||
/// Datatype for decoded datalakes | ||
#[derive(Debug)] | ||
pub enum DatalakeType { | ||
Block(BlockDatalake), | ||
DynamicLayout(DynamicLayoutDatalake), | ||
} | ||
|
||
pub fn tasks_decoder(serialized_tasks_batch: String) -> Result<Vec<ComputationalTask>> { | ||
let tasks_type: DynSolType = "bytes[]".parse()?; | ||
let bytes = Vec::from_hex(serialized_tasks_batch).expect("Invalid hex string"); | ||
let serialized_tasks = tasks_type.abi_decode(&bytes)?; | ||
let mut decoded_tasks = Vec::new(); | ||
|
||
if let Some(tasks) = serialized_tasks.as_array() { | ||
for task in tasks { | ||
let computational_task = ComputationalTask::from_serialized(task.as_bytes().unwrap())?; | ||
decoded_tasks.push(computational_task); | ||
} | ||
} | ||
|
||
Ok(decoded_tasks) | ||
} | ||
|
||
// TODO: Update this to use the new bytes format | ||
pub fn datalake_decoder(serialized_datalakes_batch: String) -> Result<Vec<DatalakeType>> { | ||
let datalakes_type: DynSolType = "bytes[]".parse()?; | ||
let bytes = Vec::from_hex(serialized_datalakes_batch).expect("Invalid hex string"); | ||
let serialized_datalakes = datalakes_type.abi_decode(&bytes)?; | ||
|
||
let mut decoded_datalakes = Vec::new(); | ||
|
||
if let Some(datalakes) = serialized_datalakes.as_array() { | ||
for datalake in datalakes { | ||
let datalake_bytes = datalake.as_bytes().ok_or("Invalid datalake bytes").unwrap(); | ||
|
||
let decoded_datalake = BlockDatalake::from_serialized(datalake_bytes) | ||
.map(DatalakeType::Block) | ||
.or_else(|_| { | ||
DynamicLayoutDatalake::from_serialized(datalake_bytes) | ||
.map(DatalakeType::DynamicLayout) | ||
})?; | ||
|
||
decoded_datalakes.push(decoded_datalake); | ||
} | ||
} | ||
|
||
Ok(decoded_datalakes) | ||
} |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -1 +1 @@ | ||
|
||
pub mod args_decoder; |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,71 @@ | ||
use decoder::args_decoder::{datalake_decoder, tasks_decoder, DatalakeType}; | ||
|
||
#[test] | ||
fn test_task_decoder() { | ||
// Note: all task's datalake is None | ||
let decoded_tasks = tasks_decoder("0x0000000000000000000000000000000000000000000000000000000000000020000000000000000000000000000000000000000000000000000000000000000400000000000000000000000000000000000000000000000000000000000000800000000000000000000000000000000000000000000000000000000000000100000000000000000000000000000000000000000000000000000000000000018000000000000000000000000000000000000000000000000000000000000002000000000000000000000000000000000000000000000000000000000000000060617667000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000400000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000006073756d00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000040000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000606d696e00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000040000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000606d6178000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000400000000000000000000000000000000000000000000000000000000000000000".to_string()).unwrap(); | ||
assert_eq!(decoded_tasks.len(), 4); | ||
assert_eq!(decoded_tasks[0].aggregate_fn_id, "avg".to_string()); | ||
assert_eq!(decoded_tasks[0].aggregate_fn_ctx, None); | ||
assert_eq!(decoded_tasks[1].aggregate_fn_id, "sum".to_string()); | ||
assert_eq!(decoded_tasks[1].aggregate_fn_ctx, None); | ||
assert_eq!(decoded_tasks[2].aggregate_fn_id, "min".to_string()); | ||
assert_eq!(decoded_tasks[2].aggregate_fn_ctx, None); | ||
assert_eq!(decoded_tasks[3].aggregate_fn_id, "max".to_string()); | ||
assert_eq!(decoded_tasks[3].aggregate_fn_ctx, None); | ||
} | ||
|
||
#[test] | ||
fn test_block_datalake_decoder() { | ||
let decoded_datalakes = datalake_decoder("0x00000000000000000000000000000000000000000000000000000000000000200000000000000000000000000000000000000000000000000000000000000004000000000000000000000000000000000000000000000000000000000000008000000000000000000000000000000000000000000000000000000000000001600000000000000000000000000000000000000000000000000000000000000240000000000000000000000000000000000000000000000000000000000000032000000000000000000000000000000000000000000000000000000000000000c000000000000000000000000000000000000000000000000000000000009eb0f600000000000000000000000000000000000000000000000000000000009eb100000000000000000000000000000000000000000000000000000000000000000100000000000000000000000000000000000000000000000000000000000000800000000000000000000000000000000000000000000000000000000000000002010f00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000c000000000000000000000000000000000000000000000000000000000009eb0f600000000000000000000000000000000000000000000000000000000009eb100000000000000000000000000000000000000000000000000000000000000000100000000000000000000000000000000000000000000000000000000000000800000000000000000000000000000000000000000000000000000000000000002010f00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000c000000000000000000000000000000000000000000000000000000000009eb0f600000000000000000000000000000000000000000000000000000000009eb100000000000000000000000000000000000000000000000000000000000000000100000000000000000000000000000000000000000000000000000000000000800000000000000000000000000000000000000000000000000000000000000002010f00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000c000000000000000000000000000000000000000000000000000000000009eb0f600000000000000000000000000000000000000000000000000000000009eb100000000000000000000000000000000000000000000000000000000000000000100000000000000000000000000000000000000000000000000000000000000800000000000000000000000000000000000000000000000000000000000000002010f000000000000000000000000000000000000000000000000000000000000".to_string()).unwrap(); | ||
assert_eq!(decoded_datalakes.len(), 4); | ||
if let DatalakeType::Block(block_datalake) = &decoded_datalakes[0] { | ||
assert_eq!(block_datalake.block_range_start, 10399990); | ||
assert_eq!(block_datalake.block_range_end, 10400000); | ||
assert_eq!( | ||
block_datalake.sampled_property, | ||
"header.base_fee_per_gas".to_string() | ||
); | ||
assert_eq!(block_datalake.increment, 1); | ||
} else { | ||
panic!("Expected block datalake"); | ||
} | ||
|
||
if let DatalakeType::Block(block_datalake) = &decoded_datalakes[1] { | ||
assert_eq!(block_datalake.block_range_start, 10399990); | ||
assert_eq!(block_datalake.block_range_end, 10400000); | ||
assert_eq!( | ||
block_datalake.sampled_property, | ||
"header.base_fee_per_gas".to_string() | ||
); | ||
assert_eq!(block_datalake.increment, 1); | ||
} else { | ||
panic!("Expected block datalake"); | ||
} | ||
|
||
if let DatalakeType::Block(block_datalake) = &decoded_datalakes[2] { | ||
assert_eq!(block_datalake.block_range_start, 10399990); | ||
assert_eq!(block_datalake.block_range_end, 10400000); | ||
assert_eq!( | ||
block_datalake.sampled_property, | ||
"header.base_fee_per_gas".to_string() | ||
); | ||
assert_eq!(block_datalake.increment, 1); | ||
} else { | ||
panic!("Expected block datalake"); | ||
} | ||
|
||
if let DatalakeType::Block(block_datalake) = &decoded_datalakes[3] { | ||
assert_eq!(block_datalake.block_range_start, 10399990); | ||
assert_eq!(block_datalake.block_range_end, 10400000); | ||
assert_eq!( | ||
block_datalake.sampled_property, | ||
"header.base_fee_per_gas".to_string() | ||
); | ||
assert_eq!(block_datalake.increment, 1); | ||
} else { | ||
panic!("Expected block datalake"); | ||
} | ||
} | ||
|
||
// TODO: After Solidity calldata format update, update the datalake decoder and write test |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,93 @@ | ||
#[derive(Debug)] | ||
pub enum HeaderField { | ||
ParentHash, | ||
OmmerHash, | ||
Beneficiary, | ||
StateRoot, | ||
TransactionsRoot, | ||
ReceiptsRoot, | ||
LogsBloom, | ||
Difficulty, | ||
Number, | ||
GasLimit, | ||
GasUsed, | ||
Timestamp, | ||
ExtraData, | ||
MixHash, | ||
Nonce, | ||
BaseFeePerGas, | ||
} | ||
|
||
impl HeaderField { | ||
pub fn from_index(index: usize) -> Option<Self> { | ||
match index { | ||
0 => Some(HeaderField::ParentHash), | ||
1 => Some(HeaderField::OmmerHash), | ||
2 => Some(HeaderField::Beneficiary), | ||
3 => Some(HeaderField::StateRoot), | ||
4 => Some(HeaderField::TransactionsRoot), | ||
5 => Some(HeaderField::ReceiptsRoot), | ||
6 => Some(HeaderField::LogsBloom), | ||
7 => Some(HeaderField::Difficulty), | ||
8 => Some(HeaderField::Number), | ||
9 => Some(HeaderField::GasLimit), | ||
10 => Some(HeaderField::GasUsed), | ||
11 => Some(HeaderField::Timestamp), | ||
12 => Some(HeaderField::ExtraData), | ||
13 => Some(HeaderField::MixHash), | ||
14 => Some(HeaderField::Nonce), | ||
15 => Some(HeaderField::BaseFeePerGas), | ||
_ => None, | ||
} | ||
} | ||
|
||
pub fn as_str(&self) -> &'static str { | ||
match self { | ||
HeaderField::ParentHash => "PARENT_HASH", | ||
HeaderField::OmmerHash => "OMMERS_HASH", | ||
HeaderField::Beneficiary => "BENEFICIARY", | ||
HeaderField::StateRoot => "STATE_ROOT", | ||
HeaderField::TransactionsRoot => "TRANSACTIONS_ROOT", | ||
HeaderField::ReceiptsRoot => "RECEIPTS_ROOT", | ||
HeaderField::LogsBloom => "LOGS_BLOOM", | ||
HeaderField::Difficulty => "DIFFICULTY", | ||
HeaderField::Number => "NUMBER", | ||
HeaderField::GasLimit => "GAS_LIMIT", | ||
HeaderField::GasUsed => "GAS_USED", | ||
HeaderField::Timestamp => "TIMESTAMP", | ||
HeaderField::ExtraData => "EXTRA_DATA", | ||
HeaderField::MixHash => "MIX_HASH", | ||
HeaderField::Nonce => "NONCE", | ||
HeaderField::BaseFeePerGas => "BASE_FEE_PER_GAS", | ||
} | ||
} | ||
} | ||
|
||
#[derive(Debug)] | ||
pub enum AccountField { | ||
Nonce, | ||
Balance, | ||
StorageRoot, | ||
CodeHash, | ||
} | ||
|
||
impl AccountField { | ||
pub fn from_index(index: usize) -> Option<Self> { | ||
match index { | ||
0 => Some(AccountField::Nonce), | ||
1 => Some(AccountField::Balance), | ||
2 => Some(AccountField::StorageRoot), | ||
3 => Some(AccountField::CodeHash), | ||
_ => None, | ||
} | ||
} | ||
|
||
pub fn as_str(&self) -> &'static str { | ||
match self { | ||
AccountField::Nonce => "NONCE", | ||
AccountField::Balance => "BALANCE", | ||
AccountField::StorageRoot => "STORAGE_ROOT", | ||
AccountField::CodeHash => "CODE_HASH", | ||
} | ||
} | ||
} |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,89 @@ | ||
use alloy_dyn_abi::DynSolType; | ||
use alloy_primitives::Address; | ||
use anyhow::{bail, Result}; | ||
|
||
use crate::block_fields::{AccountField, HeaderField}; | ||
|
||
/// BlockDatalake represents a datalake for a block range | ||
#[derive(Debug, Clone, PartialEq)] | ||
pub struct BlockDatalake { | ||
pub block_range_start: usize, | ||
pub block_range_end: usize, | ||
pub sampled_property: String, | ||
pub increment: usize, | ||
} | ||
|
||
impl BlockDatalake { | ||
pub fn new( | ||
block_range_start: usize, | ||
block_range_end: usize, | ||
sampled_property: String, | ||
increment: usize, | ||
) -> Self { | ||
Self { | ||
block_range_start, | ||
block_range_end, | ||
sampled_property, | ||
increment, | ||
} | ||
} | ||
|
||
pub fn from_serialized(serialized: &[u8]) -> Result<Self> { | ||
let datalake_type: DynSolType = "(uint256,uint256,uint256,bytes)".parse()?; | ||
let decoded = datalake_type.abi_decode_sequence(serialized)?; | ||
|
||
let value = decoded.as_tuple().unwrap(); | ||
|
||
let block_range_start = value[0].as_uint().unwrap().0.to_string().parse::<usize>()?; | ||
let block_range_end = value[1].as_uint().unwrap().0.to_string().parse::<usize>()?; | ||
|
||
let sampled_property = Self::deserialize_sampled_property(value[3].as_bytes().unwrap())?; | ||
let increment = value[2].as_uint().unwrap().0.to_string().parse::<usize>()?; | ||
|
||
Ok(Self { | ||
block_range_start, | ||
block_range_end, | ||
sampled_property, | ||
increment, | ||
}) | ||
} | ||
|
||
fn deserialize_sampled_property(serialized: &[u8]) -> Result<String> { | ||
let collection_id = serialized[0] as usize; | ||
let collection = ["header", "account", "storage"][collection_id - 1]; | ||
|
||
match collection { | ||
"header" => { | ||
let header_prop_index = serialized[1] as usize; | ||
let prop = HeaderField::from_index(header_prop_index) | ||
.ok_or("Invalid header property index") | ||
.unwrap() | ||
.as_str(); | ||
Ok(format!("{}.{}", collection, prop.to_lowercase())) | ||
} | ||
"account" => { | ||
let account = Address::from_slice(&serialized[1..21]); | ||
let account_checksum = format!("{:?}", account); | ||
let account_prop_index = serialized[21] as usize; | ||
let prop = AccountField::from_index(account_prop_index) | ||
.ok_or("Invalid account property index") | ||
.unwrap() | ||
.as_str(); | ||
Ok(format!( | ||
"{}.{}.{}", | ||
collection, | ||
account_checksum, | ||
prop.to_lowercase() | ||
)) | ||
} | ||
"storage" => { | ||
let account = Address::from_slice(&serialized[1..21]); | ||
let account_checksum = format!("{:?}", account); | ||
let slot = &serialized[21..53]; | ||
let slot_hex = format!("0x{:x?}", slot); | ||
Ok(format!("{}.{}.{}", collection, account_checksum, slot_hex)) | ||
} | ||
_ => bail!("Invalid collection id"), | ||
} | ||
} | ||
} |
Oops, something went wrong.