Skip to content

Commit

Permalink
chore: fix unit tests
Browse files Browse the repository at this point in the history
  • Loading branch information
petscheit committed Jul 23, 2024
1 parent 08b963e commit 7e5a3e2
Show file tree
Hide file tree
Showing 7 changed files with 49 additions and 36 deletions.
2 changes: 1 addition & 1 deletion Makefile
Original file line number Diff line number Diff line change
Expand Up @@ -60,7 +60,7 @@ run-pie:

get-program-hash:
@echo "Getting hdp.cairo program's hash..."
cairo-compile ./src/hdp.cairo --output $(BUILD_DIR)/hdp.json
cairo-compile --cairo_path="packages/eth_essentials" "src/hdp.cairo" --output $(BUILD_DIR)/hdp.json
cairo-hash-program --program $(BUILD_DIR)/hdp.json
@echo "Program hash retrieved."

Expand Down
15 changes: 9 additions & 6 deletions src/datalakes/block_sampled_datalake.cairo
Original file line number Diff line number Diff line change
Expand Up @@ -64,11 +64,11 @@ func init_block_sampled{
// Account Input Layout:
// extract & write field_idx
let field_idx = extract_byte_at_pos([input + 26], 5, pow2_array);
let field_idx = extract_byte_at_pos([input + 30], 5, pow2_array);
assert [properties] = field_idx;
let (address) = extract_address{bitwise_ptr=bitwise_ptr}(
chunk_one=[input + 24], chunk_two=[input + 25], chunk_three=[input + 26]
chunk_one=[input + 28], chunk_two=[input + 29], chunk_three=[input + 30]
);

// write address to properties
Expand All @@ -90,7 +90,7 @@ func init_block_sampled{
// Account Slot Input Layout:
let (address) = extract_address{bitwise_ptr=bitwise_ptr}(
chunk_one=[input + 24], chunk_two=[input + 25], chunk_three=[input + 26]
chunk_one=[input + 28], chunk_two=[input + 29], chunk_three=[input + 30]
);

// write address to properties
Expand All @@ -99,7 +99,7 @@ func init_block_sampled{

extract_and_write_slot{
range_check_ptr=range_check_ptr, bitwise_ptr=bitwise_ptr, properties=properties
}(chunks=input + 26);
}(chunks=input + 30);

return (
res=BlockSampledDataLake(
Expand Down Expand Up @@ -336,7 +336,10 @@ func extract_constant_params{range_check_ptr, bitwise_ptr: BitwiseBuiltin*}(inpu
let (increment) = word_reverse_endian_64([input + 19]);

return (
chain_id=chain_id, block_range_start=block_range_start, block_range_end=block_range_end, increment=increment
chain_id=chain_id,
block_range_start=block_range_start,
block_range_end=block_range_end,
increment=increment,
);
}

Expand Down Expand Up @@ -479,4 +482,4 @@ func fetch_header_data_points{
return fetch_header_data_points(
chain_id=chain_id, datalake=datalake, index=index + 1, data_points=data_points
);
}
}
1 change: 1 addition & 0 deletions tests/cairo_programs/block_sampled_datalake.cairo
Original file line number Diff line number Diff line change
Expand Up @@ -47,6 +47,7 @@ func test_block_sampled_datalake_decoding{
func block_sampled_datalake_eq(
a: BlockSampledDataLake, b: BlockSampledDataLake, property_type: felt
) {
assert a.chain_id = b.chain_id;
assert a.block_range_start = b.block_range_start;
assert a.block_range_end = b.block_range_end;
assert a.increment = b.increment;
Expand Down
9 changes: 5 additions & 4 deletions tests/cairo_programs/computational_task.cairo
Original file line number Diff line number Diff line change
Expand Up @@ -64,9 +64,9 @@ func test_computational_task_init{
"type": "datalake_compute",
"context": {
"task_bytes_len": 128,
"encoded_task": ["0x25ca8521ba63d557", "0xc9f9f40f48f31e27", "0x739b20c59ba605a5", "0x813cc91cdc15ae0e", "0x0", "0x0", "0x0", "0x0", "0x0", "0x0", "0x0", "0x0", "0x0", "0x0", "0x0", "0x0"],
"datalake_bytes_len": 224,
"encoded_datalake": ["0x0", "0x0", "0x0", "0x0", "0x0", "0x0", "0x0", "0xf826540000000000", "0x0", "0x0", "0x0", "0x1527540000000000", "0x0", "0x0", "0x0", "0x100000000000000", "0x0", "0x0", "0x0", "0xa000000000000000", "0x0", "0x0", "0x0", "0x200000000000000", "0x1101", "0x0", "0x0", "0x0"],
"encoded_task": ["0xAE1B44980CDF67EC", "0x45E6CCA5D27DED75", "0x438632DD6582D123", "0x3DD3EFC40A866EC9", "0x0", "0x0", "0x0", "0x0", "0x0", "0x0", "0x0", "0x0", "0x0", "0x0", "0x0", "0x0"],
"datalake_bytes_len": 256,
"encoded_datalake": ["0x0", "0x0", "0x0", "0x0", "0x0","0x0","0x0", "0xA736AA0000000000", "0x0", "0x0", "0x0", "0xf826540000000000", "0x0", "0x0", "0x0", "0x1527540000000000", "0x0", "0x0", "0x0", "0x100000000000000", "0x0", "0x0", "0x0", "0xa000000000000000", "0x0", "0x0", "0x0", "0x200000000000000", "0x1101", "0x0", "0x0", "0x0"],
"datalake_type": 0,
"property_type": 1
}
Expand All @@ -81,6 +81,7 @@ func test_computational_task_init{
local expected_datalake: BlockSampledDataLake;

assert expected_datalake = BlockSampledDataLake(
chain_id=11155111,
block_range_start=5515000,
block_range_end=5515029,
increment=1,
Expand All @@ -94,7 +95,7 @@ func test_computational_task_init{

assert expected_task = ComputationalTask(
chain_id=0x1,
hash=Uint256(0xB85414EBA86F94BAC1CA653D3D3CF014, 0x212F54CE9F4342F21C5D865F1641AABC),
hash=Uint256(0x38008646DD09E46B5D7C68B43B5C5DE2, 0xEA113874535324B3CEB180080880F599),
datalake_ptr=datalake_ptr,
datalake_type=DatalakeType.BLOCK_SAMPLED,
aggregate_fn_id=AGGREGATE_FN.AVG,
Expand Down
41 changes: 22 additions & 19 deletions tests/cairo_programs/test_vectors.cairo
Original file line number Diff line number Diff line change
Expand Up @@ -21,14 +21,15 @@ namespace BlockSampledDataLakeMocker {
local datalake_bytes_len: felt;
%{
ids.datalake.chain_id = 11155111
ids.datalake.block_range_start = 5382810
ids.datalake.block_range_end = 5382815
ids.datalake.increment = 1
ids.datalake.property_type = 1
ids.datalake.properties = segments.gen_arg([8])
datalake_input = [0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x9a22520000000000, 0x0, 0x0, 0x0, 0x9f22520000000000, 0x0, 0x0, 0x0, 0x100000000000000, 0x0, 0x0, 0x0, 0xa000000000000000, 0x0, 0x0, 0x0, 0x200000000000000, 0x801, 0x0, 0x0, 0x0]
ids.datalake_bytes_len = 224
datalake_input = [0x0,0x0,0x0,0x0,0x0,0x0,0x0,0xA736AA0000000000,0x0,0x0,0x0,0x9a22520000000000,0x0,0x0,0x0,0x9f22520000000000,0x0,0x0,0x0,0x100000000000000,0x0,0x0,0x0,0xa000000000000000,0x0,0x0,0x0,0x200000000000000,0x801,0x0,0x0,0x0]
ids.datalake_bytes_len = 256
segments.write_arg(ids.datalake_input, datalake_input)
%}

Expand All @@ -48,14 +49,15 @@ namespace BlockSampledDataLakeMocker {
local datalake_bytes_len: felt;
%{
ids.datalake.chain_id = 11155111
ids.datalake.block_range_start = 4952100
ids.datalake.block_range_end = 4952120
ids.datalake.increment = 1
ids.datalake.property_type = 2
ids.datalake.properties = segments.gen_arg([0x1, 0x7f2c6f930306d3aa736b3a6c6a98f512f74036d4])
ids.datalake.properties = segments.gen_arg([0x1,0x7f2c6f930306d3aa736b3a6c6a98f512f74036d4])
datalake_input = [0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x24904b0000000000,0x0,0x0,0x0,0x38904b0000000000,0x0,0x0,0x0,0x100000000000000,0x0,0x0,0x0,0xa000000000000000,0x0,0x0,0x0,0x1600000000000000,0xd30603936f2c7f02,0xf5986a6c3a6b73aa,0x1d43640f712,0x0]
ids.datalake_bytes_len = 224
datalake_input = [0x0,0x0,0x0,0x0,0x0,0x0,0x0,0xA736AA0000000000,0x0,0x0,0x0,0x24904b0000000000,0x0,0x0,0x0,0x38904b0000000000,0x0,0x0,0x0,0x100000000000000,0x0,0x0,0x0,0xa000000000000000,0x0,0x0,0x0,0x1600000000000000,0xd30603936f2c7f02,0xf5986a6c3a6b73aa,0x1d43640f712,0x0]
ids.datalake_bytes_len = 256
segments.write_arg(ids.datalake_input, datalake_input)
%}

Expand All @@ -75,14 +77,15 @@ namespace BlockSampledDataLakeMocker {
local datalake_bytes_len: felt;
%{
ids.datalake.chain_id = 11155111
ids.datalake.block_range_start = 5382810
ids.datalake.block_range_end = 5382815
ids.datalake.increment = 1
ids.datalake.property_type = 3
ids.datalake.properties = segments.gen_arg([0x75CeC1db9dCeb703200EAa6595f66885C962B920, 0x0, 0x2])
ids.datalake.properties = segments.gen_arg([0x75CeC1db9dCeb703200EAa6595f66885C962B920,0x0,0x2])
datalake_input = [0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x9a22520000000000,0x0,0x0,0x0,0x9f22520000000000,0x0,0x0,0x0,0x100000000000000,0x0,0x0,0x0,0xa000000000000000,0x0,0x0,0x0,0x3500000000000000,0xb7ce9ddbc1ce7503,0x68f69565aa0e2003,0x20b962c985,0x0,0x0,0x0,0x200000000,0x0]
ids.datalake_bytes_len = 256
datalake_input = [0x0,0x0,0x0,0x0,0x0,0x0,0x0,0xA736AA0000000000,0x0,0x0,0x0,0x9a22520000000000,0x0,0x0,0x0,0x9f22520000000000,0x0,0x0,0x0,0x100000000000000,0x0,0x0,0x0,0xa000000000000000,0x0,0x0,0x0,0x3500000000000000,0xb7ce9ddbc1ce7503,0x68f69565aa0e2003,0x20b962c985,0x0,0x0,0x0,0x200000000,0x0]
ids.datalake_bytes_len = 288
segments.write_arg(ids.datalake_input, datalake_input)
%}

Expand Down Expand Up @@ -113,7 +116,7 @@ namespace BlockSampledTaskMocker {
from tools.py.utils import bytes_to_8_bytes_chunks_little
# mocks python params that are available during full flow
block_sampled_tasks = [{'property_type': 1 }]
task_bytes = bytes.fromhex("22B4DA4CC94620C9DFCC5AE7429AD350AC86587E6D9925A6209587EF17967F20000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000")
task_bytes = bytes.fromhex("6A3B90F31FC36A592E67293D5D9359BADCD9B6E2B5E078B349B546A5AEE0904A000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000")
segments.write_arg(ids.tasks_bytes_len, [len(task_bytes)])
segments.write_arg(ids.task_input, bytes_to_8_bytes_chunks_little(task_bytes))
%}
Expand Down Expand Up @@ -162,7 +165,7 @@ namespace BlockSampledTaskMocker {
%{
from tools.py.utils import bytes_to_8_bytes_chunks_little
task_bytes = bytes.fromhex("22B4DA4CC94620C9DFCC5AE7429AD350AC86587E6D9925A6209587EF17967F20000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000")
task_bytes = bytes.fromhex("6A3B90F31FC36A592E67293D5D9359BADCD9B6E2B5E078B349B546A5AEE0904A000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000")
ids.tasks_bytes_len = len(task_bytes)
segments.write_arg(ids.task_input, bytes_to_8_bytes_chunks_little(task_bytes))
%}
Expand All @@ -174,7 +177,7 @@ namespace BlockSampledTaskMocker {
let task = ComputationalTask(
chain_id=0x1,
hash=Uint256(
low=0x407E98D423A7BB2DBF09B0E42601FC9B, high=0xEF8B01F35B404615F0339EEFAE7719A2
low=0x29EDECDB24D47C8CFA6FA2C538D8C0AD, high=0x319EF071671DCEA889F113920CBB48DD
),
datalake_ptr=datalake_ptr,
datalake_type=DatalakeType.BLOCK_SAMPLED,
Expand Down Expand Up @@ -205,7 +208,7 @@ namespace BlockSampledTaskMocker {

%{
from tools.py.utils import bytes_to_8_bytes_chunks_little
task_bytes = bytes.fromhex("22B4DA4CC94620C9DFCC5AE7429AD350AC86587E6D9925A6209587EF17967F20000000000000000000000000000000000000000000000000000000000000000100000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000")
task_bytes = bytes.fromhex("6A3B90F31FC36A592E67293D5D9359BADCD9B6E2B5E078B349B546A5AEE0904A000000000000000000000000000000000000000000000000000000000000000100000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000")
ids.tasks_bytes_len = len(task_bytes)
segments.write_arg(ids.task_input, bytes_to_8_bytes_chunks_little(task_bytes))
%}
Expand All @@ -215,7 +218,7 @@ namespace BlockSampledTaskMocker {
let task = ComputationalTask(
chain_id=0x1,
hash=Uint256(
low=0x3CB6684D1B4B7FDEA3FBACAEA422C944, high=0x02F8516E3F7BE7FCCFDE22FB4A98DF37
low=0xBB8F8BE052FA69FC932F586EFF3FFF82, high=0x800F013218B39FE67DF7C0D1F7246CB8
),
datalake_ptr=datalake_ptr,
datalake_type=DatalakeType.BLOCK_SAMPLED,
Expand Down Expand Up @@ -248,7 +251,7 @@ namespace BlockSampledTaskMocker {

%{
from tools.py.utils import bytes_to_8_bytes_chunks_little
task_bytes = bytes.fromhex("22B4DA4CC94620C9DFCC5AE7429AD350AC86587E6D9925A6209587EF17967F20000000000000000000000000000000000000000000000000000000000000000200000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000")
task_bytes = bytes.fromhex("6A3B90F31FC36A592E67293D5D9359BADCD9B6E2B5E078B349B546A5AEE0904A000000000000000000000000000000000000000000000000000000000000000200000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000")
ids.tasks_bytes_len = len(task_bytes)
segments.write_arg(ids.task_input, bytes_to_8_bytes_chunks_little(task_bytes))
%}
Expand All @@ -258,7 +261,7 @@ namespace BlockSampledTaskMocker {
let task = ComputationalTask(
chain_id=0x1,
hash=Uint256(
low=0x9F439795EE0CA868B463479E5A905BF0, high=0x72CEFA1188B199ECEEAB39767CD32605
low=0xCC50E918B8F9F1DF33CC8C9C86CBF4F0, high=0x133680AC8C33499C4364FEFFCB804E94
),
datalake_ptr=datalake_ptr,
datalake_type=DatalakeType.BLOCK_SAMPLED,
Expand Down Expand Up @@ -291,7 +294,7 @@ namespace BlockSampledTaskMocker {

%{
from tools.py.utils import bytes_to_8_bytes_chunks_little
task_bytes = bytes.fromhex("22B4DA4CC94620C9DFCC5AE7429AD350AC86587E6D9925A6209587EF17967F20000000000000000000000000000000000000000000000000000000000000000300000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000")
task_bytes = bytes.fromhex("6A3B90F31FC36A592E67293D5D9359BADCD9B6E2B5E078B349B546A5AEE0904A000000000000000000000000000000000000000000000000000000000000000300000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000")
ids.tasks_bytes_len = len(task_bytes)
segments.write_arg(ids.task_input, bytes_to_8_bytes_chunks_little(task_bytes))
%}
Expand All @@ -301,7 +304,7 @@ namespace BlockSampledTaskMocker {
let task = ComputationalTask(
chain_id=0x1,
hash=Uint256(
low=0x1CD2E160D860B4D1BD1E327B6AA209BD, high=0xCABA4809710EB228D6A31DE1B852DFB7
low=0x3ADE97877E3502F427D1853837DD1B41, high=0x67905CD8E3ACC23EFF54245537FFA500
),
datalake_ptr=datalake_ptr,
datalake_type=DatalakeType.BLOCK_SAMPLED,
Expand Down Expand Up @@ -335,7 +338,7 @@ namespace BlockSampledTaskMocker {

%{
from tools.py.utils import bytes_to_8_bytes_chunks_little
task_bytes = bytes.fromhex("22B4DA4CC94620C9DFCC5AE7429AD350AC86587E6D9925A6209587EF17967F200000000000000000000000000000000000000000000000000000000000000004000000000000000000000000000000000000000000000000000000000000000100000000000000000000000000000000000000000000000000000000000186a0")
task_bytes = bytes.fromhex("6A3B90F31FC36A592E67293D5D9359BADCD9B6E2B5E078B349B546A5AEE0904A0000000000000000000000000000000000000000000000000000000000000004000000000000000000000000000000000000000000000000000000000000000100000000000000000000000000000000000000000000000000000000000186a0")
ids.tasks_bytes_len = len(task_bytes)
segments.write_arg(ids.task_input, bytes_to_8_bytes_chunks_little(task_bytes))
%}
Expand All @@ -344,7 +347,7 @@ namespace BlockSampledTaskMocker {
let task = ComputationalTask(
chain_id=0x1,
hash=Uint256(
low=0xAE5641FEA9032C936D7E54D7CF36E2C3, high=0xA53CFAB970F9780B3C39CFAC1DD3D425
low=0x18E95103512DFA47ABF4237FB5FBF673, high=0xE6FF175F1DAB2E8AC4315F634B27BE8E
),
datalake_ptr=datalake_ptr,
datalake_type=DatalakeType.BLOCK_SAMPLED,
Expand Down
Loading

0 comments on commit 7e5a3e2

Please sign in to comment.