From 899403b44f6d3f6780703ce798fe7d5822f57049 Mon Sep 17 00:00:00 2001 From: pia <76558220+rkdud007@users.noreply.github.com> Date: Wed, 26 Jun 2024 17:42:32 +0900 Subject: [PATCH] Solid `EvmProvider` , `alloy` dependency version bumped to v0.1.1 (#96) * provider rework wip * chore: unified alloy type, finalized reworked provider * hotfix: with test * hotfix to make it work * clean up log * chore * bumped `eth-trie-proofs` to 0.1.0 * feat: tx provider error handling * chore: provider docs, rough bench for provider * feat: update storage test case * chore: clean up * wip * chore: provider cleanup * chore: cleaner * fix: validation, bump `eth-trie-proof` 0.1.1 * chore: docs, readme --- Cargo.lock | 1148 +++++++++++++++-- Cargo.toml | 16 +- README.md | 61 +- cli/Cargo.toml | 2 +- cli/src/main.rs | 40 +- crates/core/Cargo.toml | 3 +- crates/core/src/cairo_runner/input/run.rs | 5 +- crates/core/src/cairo_runner/run.rs | 21 +- crates/core/src/codec/datalake_compute.rs | 112 +- .../datalake_compute/block_sampled.rs | 132 +- .../core/src/compiler/datalake_compute/mod.rs | 17 +- .../compiler/datalake_compute/transactions.rs | 100 +- crates/core/src/compiler/mod.rs | 24 +- crates/core/src/pre_processor.rs | 71 +- crates/core/src/processor.rs | 39 +- crates/core/tests/integration_test.rs | 29 +- crates/primitives/Cargo.toml | 7 +- crates/primitives/fixtures/processed/mpt.json | 13 + crates/primitives/src/aggregate_fn/integer.rs | 97 +- crates/primitives/src/aggregate_fn/mod.rs | 374 +++--- crates/primitives/src/aggregate_fn/rand.rs | 2 +- crates/primitives/src/block/account.rs | 145 +-- crates/primitives/src/block/header.rs | 36 +- crates/primitives/src/block/mod.rs | 1 - crates/primitives/src/block/tx.rs | 26 - .../src/datalake/block_sampled/collection.rs | 2 +- .../src/datalake/block_sampled/datalake.rs | 29 +- .../src/datalake/block_sampled/mod.rs | 40 +- .../src/datalake/block_sampled/rand.rs | 2 +- .../src/datalake/block_sampled/rlp_fields.rs | 66 +- crates/primitives/src/datalake/envelope.rs | 7 +- crates/primitives/src/datalake/mod.rs | 9 +- crates/primitives/src/datalake/task.rs | 47 +- .../src/datalake/transactions/datalake.rs | 60 +- .../src/datalake/transactions/mod.rs | 17 +- .../src/datalake/transactions/rlp_fields.rs | 87 +- crates/primitives/src/lib.rs | 1 + crates/primitives/src/module.rs | 7 +- .../primitives/src/processed_types/account.rs | 11 +- .../src/processed_types/block_proofs.rs | 17 + .../processed_types/cairo_format/account.rs | 3 +- .../cairo_format/datalake_compute.rs | 4 +- .../cairo_format/felt_vec_unit.rs | 39 +- .../processed_types/cairo_format/header.rs | 2 +- .../src/processed_types/cairo_format/mpt.rs | 2 +- .../processed_types/cairo_format/receipt.rs | 2 +- .../processed_types/cairo_format/storage.rs | 10 +- .../cairo_format/transaction.rs | 2 +- .../src/processed_types/datalake_compute.rs | 32 +- .../primitives/src/processed_types/header.rs | 14 +- crates/primitives/src/processed_types/mod.rs | 2 + crates/primitives/src/processed_types/mpt.rs | 20 +- .../primitives/src/processed_types/receipt.rs | 5 +- .../primitives/src/processed_types/storage.rs | 15 +- .../src/processed_types/transaction.rs | 5 +- .../primitives/src/processed_types/uint256.rs | 2 +- crates/primitives/src/serde.rs | 36 + crates/primitives/src/task.rs | 4 +- crates/primitives/src/utils.rs | 6 +- crates/provider/Cargo.toml | 16 +- crates/provider/README.md | 15 + crates/provider/benches/README.md | 21 + crates/provider/benches/provider_benchmark.rs | 101 ++ crates/provider/src/evm/mod.rs | 618 +-------- crates/provider/src/evm/provider.rs | 582 +++++++++ crates/provider/src/evm/rpc.rs | 316 +++++ crates/provider/src/evm/rpc_provider.rs | 464 ------- crates/provider/src/indexer.rs | 202 +++ crates/provider/src/key.rs | 5 +- crates/provider/src/lib.rs | 4 +- crates/provider/src/types.rs | 63 + 71 files changed, 3344 insertions(+), 2191 deletions(-) create mode 100644 crates/primitives/fixtures/processed/mpt.json delete mode 100644 crates/primitives/src/block/tx.rs create mode 100644 crates/primitives/src/processed_types/block_proofs.rs create mode 100644 crates/primitives/src/serde.rs create mode 100644 crates/provider/README.md create mode 100644 crates/provider/benches/README.md create mode 100644 crates/provider/benches/provider_benchmark.rs create mode 100644 crates/provider/src/evm/provider.rs create mode 100644 crates/provider/src/evm/rpc.rs delete mode 100644 crates/provider/src/evm/rpc_provider.rs create mode 100644 crates/provider/src/indexer.rs create mode 100644 crates/provider/src/types.rs diff --git a/Cargo.lock b/Cargo.lock index 12959242..f2485633 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -55,21 +55,93 @@ version = "0.2.18" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "5c6cb57a04249c6480766f7f7cef5467412af1490f8d1e243141daddada3264f" +[[package]] +name = "alloy" +version = "0.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9134b68e24175eff6c3c4d2bffeefb0a1b7435462130862c88d1524ca376e7e5" +dependencies = [ + "alloy-consensus", + "alloy-contract", + "alloy-core", + "alloy-eips", + "alloy-genesis", + "alloy-network", + "alloy-provider", + "alloy-pubsub", + "alloy-rpc-client", + "alloy-rpc-types", + "alloy-serde", + "alloy-signer", + "alloy-signer-local", + "alloy-transport", + "alloy-transport-http", + "alloy-transport-ipc", + "alloy-transport-ws", + "reqwest 0.12.5", +] + +[[package]] +name = "alloy-chains" +version = "0.1.22" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "04e9a1892803b02f53e25bea3e414ddd0501f12d97456c9d5ade4edf88f9516f" +dependencies = [ + "num_enum", + "strum", +] + [[package]] name = "alloy-consensus" -version = "0.1.0" -source = "git+https://github.com/alloy-rs/alloy?rev=52d16d3#52d16d33e125670e952b38d888eaefc560738cc2" +version = "0.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a016bfa21193744d4c38b3f3ab845462284d129e5e23c7cc0fafca7e92d9db37" dependencies = [ "alloy-eips", "alloy-primitives", "alloy-rlp", + "alloy-serde", + "c-kzg", + "serde", +] + +[[package]] +name = "alloy-contract" +version = "0.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e47b2a620fd588d463ccf0f5931b41357664b293a8d31592768845a2a101bb9e" +dependencies = [ + "alloy-dyn-abi", + "alloy-json-abi", + "alloy-network", + "alloy-primitives", + "alloy-provider", + "alloy-pubsub", + "alloy-rpc-types-eth", + "alloy-sol-types", + "alloy-transport", + "futures", + "futures-util", + "thiserror", +] + +[[package]] +name = "alloy-core" +version = "0.7.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5af3faff14c12c8b11037e0a093dd157c3702becb8435577a2408534d0758315" +dependencies = [ + "alloy-dyn-abi", + "alloy-json-abi", + "alloy-primitives", + "alloy-sol-types", ] [[package]] name = "alloy-dyn-abi" -version = "0.6.4" +version = "0.7.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2919acdad13336bc5dc26b636cdd6892c2f27fb0d4a58320a00c2713cf6a4e9a" +checksum = "cb6e6436a9530f25010d13653e206fab4c9feddacf21a54de8d7311b275bc56b" dependencies = [ "alloy-json-abi", "alloy-primitives", @@ -84,19 +156,36 @@ dependencies = [ [[package]] name = "alloy-eips" -version = "0.1.0" -source = "git+https://github.com/alloy-rs/alloy?rev=52d16d3#52d16d33e125670e952b38d888eaefc560738cc2" +version = "0.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "32d6d8118b83b0489cfb7e6435106948add2b35217f4a5004ef895f613f60299" dependencies = [ "alloy-primitives", "alloy-rlp", + "alloy-serde", + "c-kzg", + "derive_more", + "once_cell", + "serde", + "sha2", +] + +[[package]] +name = "alloy-genesis" +version = "0.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "894f33a7822abb018db56b10ab90398e63273ce1b5a33282afd186c132d764a6" +dependencies = [ + "alloy-primitives", + "alloy-serde", "serde", ] [[package]] name = "alloy-json-abi" -version = "0.6.4" +version = "0.7.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "24ed0f2a6c3a1c947b4508522a53a190dba8f94dcd4e3e1a5af945a498e78f2f" +checksum = "aaeaccd50238126e3a0ff9387c7c568837726ad4f4e399b528ca88104d6c25ef" dependencies = [ "alloy-primitives", "alloy-sol-type-parser", @@ -106,49 +195,53 @@ dependencies = [ [[package]] name = "alloy-json-rpc" -version = "0.1.0" -source = "git+https://github.com/alloy-rs/alloy?rev=52d16d3#52d16d33e125670e952b38d888eaefc560738cc2" +version = "0.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "61f0ae6e93b885cc70fe8dae449e7fd629751dbee8f59767eaaa7285333c5727" dependencies = [ "alloy-primitives", "serde", "serde_json", "thiserror", + "tracing", ] [[package]] name = "alloy-merkle-tree" -version = "0.5.0" +version = "0.6.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "45e4c892319f32f2ef619965ba222552f6bc9898a996e23bfd5ea625a963e980" +checksum = "c258360f494a477a6122ea91033b3d7bf126d8de96a0c38eb81dd3d3d1c117c5" dependencies = [ - "alloy-dyn-abi", - "alloy-primitives", + "alloy", "anyhow", "hashbrown 0.14.5", ] [[package]] name = "alloy-network" -version = "0.1.0" -source = "git+https://github.com/alloy-rs/alloy?rev=52d16d3#52d16d33e125670e952b38d888eaefc560738cc2" +version = "0.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "dc122cbee2b8523854cc11d87bcd5773741602c553d2d2d106d82eeb9c16924a" dependencies = [ "alloy-consensus", "alloy-eips", "alloy-json-rpc", "alloy-primitives", - "alloy-rpc-types", + "alloy-rpc-types-eth", + "alloy-serde", "alloy-signer", + "alloy-sol-types", "async-trait", + "auto_impl", "futures-utils-wasm", - "serde", "thiserror", ] [[package]] name = "alloy-primitives" -version = "0.6.4" +version = "0.7.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "600d34d8de81e23b6d909c094e23b3d357e01ca36b78a8c5424c501eedbe86f0" +checksum = "f783611babedbbe90db3478c120fb5f5daacceffc210b39adc0af4fe0da70bad" dependencies = [ "alloy-rlp", "bytes", @@ -168,34 +261,63 @@ dependencies = [ [[package]] name = "alloy-provider" -version = "0.1.0" -source = "git+https://github.com/alloy-rs/alloy?rev=52d16d3#52d16d33e125670e952b38d888eaefc560738cc2" +version = "0.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3d5af289798fe8783acd0c5f10644d9d26f54a12bc52a083e4f3b31718e9bf92" dependencies = [ + "alloy-chains", + "alloy-consensus", + "alloy-eips", "alloy-json-rpc", "alloy-network", "alloy-primitives", + "alloy-pubsub", "alloy-rpc-client", - "alloy-rpc-trace-types", - "alloy-rpc-types", + "alloy-rpc-types-eth", "alloy-transport", "alloy-transport-http", + "alloy-transport-ipc", + "alloy-transport-ws", "async-stream", "async-trait", "auto_impl", "dashmap", "futures", + "futures-utils-wasm", "lru", - "reqwest", + "pin-project", + "reqwest 0.12.5", + "serde", "serde_json", "tokio", "tracing", + "url", +] + +[[package]] +name = "alloy-pubsub" +version = "0.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "702f330b7da123a71465ab9d39616292f8344a2811c28f2cc8d8438a69d79e35" +dependencies = [ + "alloy-json-rpc", + "alloy-primitives", + "alloy-transport", + "bimap", + "futures", + "serde", + "serde_json", + "tokio", + "tokio-stream", + "tower", + "tracing", ] [[package]] name = "alloy-rlp" -version = "0.3.4" +version = "0.3.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8d58d9f5da7b40e9bfff0b7e7816700be4019db97d4b6359fe7f94a9e22e42ac" +checksum = "b155716bab55763c95ba212806cf43d05bcc70e5f35b02bad20cf5ec7fe11fed" dependencies = [ "alloy-rlp-derive", "arrayvec", @@ -204,9 +326,9 @@ dependencies = [ [[package]] name = "alloy-rlp-derive" -version = "0.3.4" +version = "0.3.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1a047897373be4bbb0224c1afdabca92648dc57a9c9ef6e7b0be3aff7a859c83" +checksum = "8037e03c7f462a063f28daec9fda285a9a89da003c552f8637a80b9c8fd96241" dependencies = [ "proc-macro2", "quote", @@ -215,15 +337,20 @@ dependencies = [ [[package]] name = "alloy-rpc-client" -version = "0.1.0" -source = "git+https://github.com/alloy-rs/alloy?rev=52d16d3#52d16d33e125670e952b38d888eaefc560738cc2" +version = "0.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b40fcb53b2a9d0a78a4968b2eca8805a4b7011b9ee3fdfa2acaf137c5128f36b" dependencies = [ "alloy-json-rpc", + "alloy-primitives", + "alloy-pubsub", "alloy-transport", "alloy-transport-http", + "alloy-transport-ipc", + "alloy-transport-ws", "futures", "pin-project", - "reqwest", + "reqwest 0.12.5", "serde", "serde_json", "tokio", @@ -234,26 +361,47 @@ dependencies = [ ] [[package]] -name = "alloy-rpc-trace-types" -version = "0.1.0" -source = "git+https://github.com/alloy-rs/alloy?rev=52d16d3#52d16d33e125670e952b38d888eaefc560738cc2" +name = "alloy-rpc-types" +version = "0.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "50f2fbe956a3e0f0975c798f488dc6be96b669544df3737e18f4a325b42f4c86" dependencies = [ + "alloy-rpc-types-engine", + "alloy-rpc-types-eth", + "alloy-serde", +] + +[[package]] +name = "alloy-rpc-types-engine" +version = "0.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cd473d98ec552f8229cd6d566bd2b0bbfc5bb4efcefbb5288c834aa8fd832020" +dependencies = [ + "alloy-consensus", + "alloy-eips", "alloy-primitives", - "alloy-rpc-types", + "alloy-rlp", + "alloy-rpc-types-eth", "alloy-serde", + "jsonwebtoken", + "rand", "serde", - "serde_json", + "thiserror", ] [[package]] -name = "alloy-rpc-types" -version = "0.1.0" -source = "git+https://github.com/alloy-rs/alloy?rev=52d16d3#52d16d33e125670e952b38d888eaefc560738cc2" +name = "alloy-rpc-types-eth" +version = "0.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "083f443a83b9313373817236a8f4bea09cca862618e9177d822aee579640a5d6" dependencies = [ + "alloy-consensus", + "alloy-eips", "alloy-primitives", "alloy-rlp", "alloy-serde", - "itertools 0.12.1", + "alloy-sol-types", + "itertools 0.13.0", "serde", "serde_json", "thiserror", @@ -261,8 +409,9 @@ dependencies = [ [[package]] name = "alloy-serde" -version = "0.1.0" -source = "git+https://github.com/alloy-rs/alloy?rev=52d16d3#52d16d33e125670e952b38d888eaefc560738cc2" +version = "0.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d94da1c0c4e27cc344b05626fe22a89dc6b8b531b9475f3b7691dbf6913e4109" dependencies = [ "alloy-primitives", "serde", @@ -271,8 +420,9 @@ dependencies = [ [[package]] name = "alloy-signer" -version = "0.1.0" -source = "git+https://github.com/alloy-rs/alloy?rev=52d16d3#52d16d33e125670e952b38d888eaefc560738cc2" +version = "0.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "58d876be3afd8b78979540084ff63995292a26aa527ad0d44276405780aa0ffd" dependencies = [ "alloy-primitives", "async-trait", @@ -282,15 +432,46 @@ dependencies = [ "thiserror", ] +[[package]] +name = "alloy-signer-local" +version = "0.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d40a37dc216c269b8a7244047cb1c18a9c69f7a0332ab2c4c2aa4cbb1a31468b" +dependencies = [ + "alloy-consensus", + "alloy-network", + "alloy-primitives", + "alloy-signer", + "async-trait", + "k256", + "rand", + "thiserror", +] + [[package]] name = "alloy-sol-macro" -version = "0.6.4" +version = "0.7.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e86ec0a47740b20bc5613b8712d0d321d031c4efc58e9645af96085d5cccfc27" +checksum = "4bad41a7c19498e3f6079f7744656328699f8ea3e783bdd10d85788cd439f572" dependencies = [ + "alloy-sol-macro-expander", + "alloy-sol-macro-input", + "proc-macro-error", + "proc-macro2", + "quote", + "syn 2.0.60", +] + +[[package]] +name = "alloy-sol-macro-expander" +version = "0.7.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fd9899da7d011b4fe4c406a524ed3e3f963797dbc93b45479d60341d3a27b252" +dependencies = [ + "alloy-json-abi", + "alloy-sol-macro-input", "const-hex", - "dunce", - "heck 0.4.1", + "heck 0.5.0", "indexmap 2.2.6", "proc-macro-error", "proc-macro2", @@ -300,21 +481,39 @@ dependencies = [ "tiny-keccak", ] +[[package]] +name = "alloy-sol-macro-input" +version = "0.7.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d32d595768fdc61331a132b6f65db41afae41b9b97d36c21eb1b955c422a7e60" +dependencies = [ + "alloy-json-abi", + "const-hex", + "dunce", + "heck 0.5.0", + "proc-macro2", + "quote", + "serde_json", + "syn 2.0.60", + "syn-solidity", +] + [[package]] name = "alloy-sol-type-parser" -version = "0.6.4" +version = "0.7.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0045cc89524e1451ccf33e8581355b6027ac7c6e494bb02959d4213ad0d8e91d" +checksum = "baa2fbd22d353d8685bd9fee11ba2d8b5c3b1d11e56adb3265fcf1f32bfdf404" dependencies = [ "winnow 0.6.7", ] [[package]] name = "alloy-sol-types" -version = "0.6.4" +version = "0.7.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ad09ec5853fa700d12d778ad224dcdec636af424d29fad84fb9a2f16a5b0ef09" +checksum = "a49042c6d3b66a9fe6b2b5a8bf0d39fc2ae1ee0310a2a26ffedd79fb097878dd" dependencies = [ + "alloy-json-abi", "alloy-primitives", "alloy-sol-macro", "const-hex", @@ -323,8 +522,9 @@ dependencies = [ [[package]] name = "alloy-transport" -version = "0.1.0" -source = "git+https://github.com/alloy-rs/alloy?rev=52d16d3#52d16d33e125670e952b38d888eaefc560738cc2" +version = "0.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "245af9541f0a0dbd5258669c80dfe3af118164cacec978a520041fc130550deb" dependencies = [ "alloy-json-rpc", "base64 0.22.0", @@ -336,22 +536,60 @@ dependencies = [ "tokio", "tower", "url", - "wasm-bindgen-futures", ] [[package]] name = "alloy-transport-http" -version = "0.1.0" -source = "git+https://github.com/alloy-rs/alloy?rev=52d16d3#52d16d33e125670e952b38d888eaefc560738cc2" +version = "0.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5619c017e1fdaa1db87f9182f4f0ed97c53d674957f4902fba655e972d359c6c" dependencies = [ "alloy-json-rpc", "alloy-transport", - "reqwest", + "reqwest 0.12.5", "serde_json", "tower", + "tracing", "url", ] +[[package]] +name = "alloy-transport-ipc" +version = "0.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "173cefa110afac7a53cf2e75519327761f2344d305eea2993f3af1b2c1fc1c44" +dependencies = [ + "alloy-json-rpc", + "alloy-pubsub", + "alloy-transport", + "bytes", + "futures", + "interprocess", + "pin-project", + "serde_json", + "tokio", + "tokio-util", + "tracing", +] + +[[package]] +name = "alloy-transport-ws" +version = "0.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9c0aff8af5be5e58856c5cdd1e46db2c67c7ecd3a652d9100b4822c96c899947" +dependencies = [ + "alloy-pubsub", + "alloy-transport", + "futures", + "http 1.1.0", + "rustls 0.23.10", + "serde_json", + "tokio", + "tokio-tungstenite", + "tracing", + "ws_stream_wasm", +] + [[package]] name = "android-tzdata" version = "0.1.1" @@ -367,6 +605,12 @@ dependencies = [ "libc", ] +[[package]] +name = "anes" +version = "0.1.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4b46cbb362ab8752921c97e041f5e366ee6297bd428a31275b9fcf1e380f7299" + [[package]] name = "anstream" version = "0.6.13" @@ -599,6 +843,28 @@ dependencies = [ "syn 2.0.60", ] +[[package]] +name = "async_io_stream" +version = "0.3.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b6d7b9decdf35d8908a7e3ef02f64c5e9b1695e230154c0e8de3969142d9b94c" +dependencies = [ + "futures", + "pharos", + "rustc_version 0.4.0", +] + +[[package]] +name = "atty" +version = "0.2.14" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d9b39be18770d11421cdb1b9947a45dd3f37e93092cbf377614828a319d5fee8" +dependencies = [ + "hermit-abi 0.1.19", + "libc", + "winapi", +] + [[package]] name = "auto_impl" version = "1.2.0" @@ -673,6 +939,12 @@ dependencies = [ "serde", ] +[[package]] +name = "bimap" +version = "0.6.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "230c5f1ca6a325a32553f8640d31ac9b49f2411e901e427570154868b46da4f7" + [[package]] name = "bit-set" version = "0.5.3" @@ -721,6 +993,18 @@ dependencies = [ "generic-array", ] +[[package]] +name = "blst" +version = "0.3.12" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "62dc83a094a71d43eeadd254b1ec2d24cb6a0bb6cadce00df51f0db594711a32" +dependencies = [ + "cc", + "glob", + "threadpool", + "zeroize", +] + [[package]] name = "bumpalo" version = "3.16.0" @@ -748,6 +1032,20 @@ dependencies = [ "serde", ] +[[package]] +name = "c-kzg" +version = "1.0.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cdf100c4cea8f207e883ff91ca886d621d8a166cb04971dfaa9bb8fd99ed95df" +dependencies = [ + "blst", + "cc", + "glob", + "hex", + "libc", + "serde", +] + [[package]] name = "cairo-felt" version = "0.9.1" @@ -912,6 +1210,12 @@ dependencies = [ "serde", ] +[[package]] +name = "cast" +version = "0.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "37b2a672a2cb129a2e41c10b1224bb368f9f37a2b16b612598138befd7b37eb5" + [[package]] name = "cc" version = "1.0.95" @@ -937,6 +1241,33 @@ dependencies = [ "windows-targets 0.52.5", ] +[[package]] +name = "ciborium" +version = "0.2.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "42e69ffd6f0917f5c029256a24d0161db17cea3997d185db0d35926308770f0e" +dependencies = [ + "ciborium-io", + "ciborium-ll", + "serde", +] + +[[package]] +name = "ciborium-io" +version = "0.2.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "05afea1e0a06c9be33d539b876f1ce3692f4afea2cb41f740e7743225ed1c757" + +[[package]] +name = "ciborium-ll" +version = "0.2.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "57663b653d948a338bfb3eeba9bb2fd5fcfaecb9e199e87e1eda4d9e8b240fd9" +dependencies = [ + "ciborium-io", + "half", +] + [[package]] name = "cipher" version = "0.4.4" @@ -947,6 +1278,18 @@ dependencies = [ "inout", ] +[[package]] +name = "clap" +version = "3.2.25" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4ea181bf566f71cb9a5d17a59e1871af638180a18fb0035c92ae62b705207123" +dependencies = [ + "bitflags 1.3.2", + "clap_lex 0.2.4", + "indexmap 1.9.3", + "textwrap", +] + [[package]] name = "clap" version = "4.5.4" @@ -965,7 +1308,7 @@ checksum = "ae129e2e766ae0ec03484e609954119f123cc1fe650337e155d03b022f24f7b4" dependencies = [ "anstream", "anstyle", - "clap_lex", + "clap_lex 0.7.0", "strsim 0.11.1", ] @@ -981,6 +1324,15 @@ dependencies = [ "syn 2.0.60", ] +[[package]] +name = "clap_lex" +version = "0.2.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2850f2f5a82cbf437dd5af4d49848fbdfc27c157c3d010345776f952765261c5" +dependencies = [ + "os_str_bytes", +] + [[package]] name = "clap_lex" version = "0.7.0" @@ -1067,6 +1419,62 @@ dependencies = [ "cfg-if", ] +[[package]] +name = "criterion" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e7c76e09c1aae2bc52b3d2f29e13c6572553b30c4aa1b8a49fd70de6412654cb" +dependencies = [ + "anes", + "atty", + "cast", + "ciborium", + "clap 3.2.25", + "criterion-plot", + "futures", + "itertools 0.10.5", + "lazy_static", + "num-traits 0.2.18", + "oorandom", + "plotters", + "rayon", + "regex", + "serde", + "serde_derive", + "serde_json", + "tinytemplate", + "walkdir", +] + +[[package]] +name = "criterion-plot" +version = "0.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6b50826342786a51a89e2da3a28f1c32b06e387201bc2d19791f622c673706b1" +dependencies = [ + "cast", + "itertools 0.10.5", +] + +[[package]] +name = "crossbeam-deque" +version = "0.8.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "613f8cc01fe9cf1a3eb3d7f488fd2fa8388403e97039e2f73692932e291a770d" +dependencies = [ + "crossbeam-epoch", + "crossbeam-utils", +] + +[[package]] +name = "crossbeam-epoch" +version = "0.9.18" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5b82ac4a3c2ca9c3460964f020e1402edd5753411d7737aa39c3714ad1b5420e" +dependencies = [ + "crossbeam-utils", +] + [[package]] name = "crossbeam-utils" version = "0.8.20" @@ -1183,6 +1591,12 @@ dependencies = [ "parking_lot_core 0.9.10", ] +[[package]] +name = "data-encoding" +version = "2.6.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e8566979429cf69b49a5c740c60791108e86440e8be149bbea4fe54d2c32d6e2" + [[package]] name = "der" version = "0.7.9" @@ -1269,6 +1683,12 @@ dependencies = [ "winapi", ] +[[package]] +name = "doctest-file" +version = "1.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "aac81fa3e28d21450aa4d2ac065992ba96a1d7303efbce51a95f4fd175b67562" + [[package]] name = "dotenv" version = "0.15.0" @@ -1384,26 +1804,21 @@ dependencies = [ [[package]] name = "eth-trie-proofs" -version = "0.1.0" -source = "git+https://github.com/HerodotusDev/eth-trie-proofs.git?branch=main#22fcf5abb5bfb444780adf5054e9f6a1ca5ffe93" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ffad0513a634350b4859934c3367cd171b4ed57c6b5c64d443e30bf278f1f8d0" dependencies = [ - "alloy-consensus", - "alloy-eips", - "alloy-network", + "alloy", "alloy-primitives", - "alloy-provider", "alloy-rlp", - "alloy-rpc-client", - "alloy-rpc-types", - "alloy-transport", - "alloy-transport-http", - "clap", + "clap 4.5.4", "eth_trie", "ethereum-types", - "reqwest", + "reqwest 0.11.27", "serde", "serde_json", "serde_with 3.8.1", + "thiserror", "tokio", "url", ] @@ -1683,6 +2098,12 @@ version = "0.28.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "4271d37baee1b8c7e4b708028c57d816cf9d2434acb33a549475f78c181f6253" +[[package]] +name = "glob" +version = "0.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d2fabcfbdc87f4758337ca535fb41a6d701b65693ce38287d856d1674551ec9b" + [[package]] name = "good_lp" version = "1.8.1" @@ -1715,7 +2136,7 @@ dependencies = [ "futures-core", "futures-sink", "futures-util", - "http", + "http 0.2.12", "indexmap 2.2.6", "slab", "tokio", @@ -1723,6 +2144,16 @@ dependencies = [ "tracing", ] +[[package]] +name = "half" +version = "2.4.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6dd08c532ae367adf81c312a4580bc67f1d0fe8bc9c460520283f4c0ff277888" +dependencies = [ + "cfg-if", + "crunchy", +] + [[package]] name = "hashbrown" version = "0.12.3" @@ -1744,9 +2175,9 @@ dependencies = [ name = "hdp-cli" version = "0.2.0" dependencies = [ - "alloy-primitives", + "alloy", "anyhow", - "clap", + "clap 4.5.4", "dotenv", "hdp-core", "hdp-primitives", @@ -1762,9 +2193,8 @@ dependencies = [ name = "hdp-core" version = "0.2.0" dependencies = [ - "alloy-dyn-abi", + "alloy", "alloy-merkle-tree", - "alloy-primitives", "anyhow", "cairo-lang-starknet-classes", "futures", @@ -1786,8 +2216,7 @@ dependencies = [ name = "hdp-primitives" version = "0.2.0" dependencies = [ - "alloy-dyn-abi", - "alloy-primitives", + "alloy", "alloy-rlp", "anyhow", "cairo-lang-starknet-classes", @@ -1804,14 +2233,16 @@ dependencies = [ name = "hdp-provider" version = "0.2.0" dependencies = [ - "alloy-primitives", + "alloy", "anyhow", + "criterion", "eth-trie-proofs", "futures", "hdp-primitives", - "reqwest", - "serde", + "itertools 0.10.5", + "reqwest 0.11.27", "serde_json", + "thiserror", "tokio", "tracing", ] @@ -1827,15 +2258,18 @@ dependencies = [ [[package]] name = "heck" -version = "0.4.1" +version = "0.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "95505c38b4572b2d910cecb0281560f54b440a19336cbbcb27bf6ce6adc6f5a8" +checksum = "2304e00983f87ffb38b55b444b5e3b60a884b5d30c0fca7d82fe33449bbe55ea" [[package]] -name = "heck" -version = "0.5.0" +name = "hermit-abi" +version = "0.1.19" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2304e00983f87ffb38b55b444b5e3b60a884b5d30c0fca7d82fe33449bbe55ea" +checksum = "62b467343b94ba476dcb2500d242dadbb39557df889310ac77c5d99100aaac33" +dependencies = [ + "libc", +] [[package]] name = "hermit-abi" @@ -1878,6 +2312,17 @@ dependencies = [ "itoa", ] +[[package]] +name = "http" +version = "1.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "21b9ddb458710bc376481b842f5da65cdf31522de232c1ca8146abce2a358258" +dependencies = [ + "bytes", + "fnv", + "itoa", +] + [[package]] name = "http-body" version = "0.4.6" @@ -1885,7 +2330,30 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "7ceab25649e9960c0311ea418d17bee82c0dcec1bd053b5f9a66e265a693bed2" dependencies = [ "bytes", - "http", + "http 0.2.12", + "pin-project-lite", +] + +[[package]] +name = "http-body" +version = "1.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1cac85db508abc24a2e48553ba12a996e87244a0395ce011e62b37158745d643" +dependencies = [ + "bytes", + "http 1.1.0", +] + +[[package]] +name = "http-body-util" +version = "0.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "793429d76616a256bcb62c2a2ec2bed781c8307e797e2598c50010f2bee2544f" +dependencies = [ + "bytes", + "futures-util", + "http 1.1.0", + "http-body 1.0.0", "pin-project-lite", ] @@ -1912,8 +2380,8 @@ dependencies = [ "futures-core", "futures-util", "h2", - "http", - "http-body", + "http 0.2.12", + "http-body 0.4.6", "httparse", "httpdate", "itoa", @@ -1925,6 +2393,25 @@ dependencies = [ "want", ] +[[package]] +name = "hyper" +version = "1.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fe575dd17d0862a9a33781c8c4696a55c320909004a67a00fb286ba8b1bc496d" +dependencies = [ + "bytes", + "futures-channel", + "futures-util", + "http 1.1.0", + "http-body 1.0.0", + "httparse", + "itoa", + "pin-project-lite", + "smallvec", + "tokio", + "want", +] + [[package]] name = "hyper-rustls" version = "0.24.2" @@ -1932,11 +2419,11 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ec3efd23720e2049821a693cbc7e65ea87c72f1c58ff2f9522ff332b1491e590" dependencies = [ "futures-util", - "http", - "hyper", - "rustls", + "http 0.2.12", + "hyper 0.14.28", + "rustls 0.21.12", "tokio", - "tokio-rustls", + "tokio-rustls 0.24.1", ] [[package]] @@ -1946,10 +2433,46 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d6183ddfa99b85da61a140bea0efc93fdf56ceaa041b37d553518030827f9905" dependencies = [ "bytes", - "hyper", + "hyper 0.14.28", + "native-tls", + "tokio", + "tokio-native-tls", +] + +[[package]] +name = "hyper-tls" +version = "0.6.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "70206fc6890eaca9fde8a0bf71caa2ddfc9fe045ac9e5c70df101a7dbde866e0" +dependencies = [ + "bytes", + "http-body-util", + "hyper 1.3.1", + "hyper-util", "native-tls", "tokio", "tokio-native-tls", + "tower-service", +] + +[[package]] +name = "hyper-util" +version = "0.1.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7b875924a60b96e5d7b9ae7b066540b1dd1cbd90d1828f54c92e02a283351c56" +dependencies = [ + "bytes", + "futures-channel", + "futures-util", + "http 1.1.0", + "http-body 1.0.0", + "hyper 1.3.1", + "pin-project-lite", + "socket2", + "tokio", + "tower", + "tower-service", + "tracing", ] [[package]] @@ -2092,6 +2615,21 @@ dependencies = [ "cfg-if", ] +[[package]] +name = "interprocess" +version = "2.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "67bafc2f5dbdad79a6d925649758d5472647b416028099f0b829d1b67fdd47d3" +dependencies = [ + "doctest-file", + "futures-core", + "libc", + "recvmsg", + "tokio", + "widestring", + "windows-sys 0.52.0", +] + [[package]] name = "ipnet" version = "2.9.0" @@ -2118,9 +2656,9 @@ dependencies = [ [[package]] name = "itertools" -version = "0.12.1" +version = "0.13.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ba291022dbbd398a455acf126c1e341954079855bc60dfdda641363bd6922569" +checksum = "413ee7dfc52ee1a4949ceeb7dbc8a33f2d6c088194d9f922fb8318faf1f01186" dependencies = [ "either", ] @@ -2140,6 +2678,21 @@ dependencies = [ "wasm-bindgen", ] +[[package]] +name = "jsonwebtoken" +version = "9.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b9ae10193d25051e74945f1ea2d0b42e03cc3b890f7e4cc5faa44997d808193f" +dependencies = [ + "base64 0.21.7", + "js-sys", + "pem", + "ring", + "serde", + "serde_json", + "simple_asn1", +] + [[package]] name = "k256" version = "0.13.3" @@ -2151,7 +2704,6 @@ dependencies = [ "elliptic-curve", "once_cell", "sha2", - "signature", ] [[package]] @@ -2446,10 +2998,30 @@ version = "1.16.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "4161fcb6d602d4d2081af7c3a45852d875a03dd337a6bfdd6e06407b61342a43" dependencies = [ - "hermit-abi", + "hermit-abi 0.3.9", "libc", ] +[[package]] +name = "num_enum" +version = "0.7.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "02339744ee7253741199f897151b38e72257d13802d4ee837285cc2990a90845" +dependencies = [ + "num_enum_derive", +] + +[[package]] +name = "num_enum_derive" +version = "0.7.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "681030a937600a36906c185595136d26abfebb4aa9c65701cefcaf8578bb982b" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.60", +] + [[package]] name = "object" version = "0.32.2" @@ -2515,6 +3087,12 @@ dependencies = [ "vcpkg", ] +[[package]] +name = "os_str_bytes" +version = "6.6.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e2355d85b9a3786f481747ced0e0ff2ba35213a1f9bd406ed906554d7af805a1" + [[package]] name = "overload" version = "0.1.1" @@ -2610,6 +3188,16 @@ dependencies = [ "digest 0.10.7", ] +[[package]] +name = "pem" +version = "3.0.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8e459365e590736a54c3fa561947c84837534b8e9af6fc5bf781307e82658fae" +dependencies = [ + "base64 0.22.0", + "serde", +] + [[package]] name = "percent-encoding" version = "2.3.1" @@ -2637,6 +3225,16 @@ dependencies = [ "indexmap 2.2.6", ] +[[package]] +name = "pharos" +version = "0.5.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e9567389417feee6ce15dd6527a8a1ecac205ef62c2932bcf3d9f6fc5b78b414" +dependencies = [ + "futures", + "rustc_version 0.4.0", +] + [[package]] name = "phf_shared" version = "0.10.0" @@ -2700,6 +3298,34 @@ version = "0.3.30" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d231b230927b5e4ad203db57bbcbee2802f6bce620b1e4a9024a07d94e2907ec" +[[package]] +name = "plotters" +version = "0.3.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a15b6eccb8484002195a3e44fe65a4ce8e93a625797a063735536fd59cb01cf3" +dependencies = [ + "num-traits 0.2.18", + "plotters-backend", + "plotters-svg", + "wasm-bindgen", + "web-sys", +] + +[[package]] +name = "plotters-backend" +version = "0.3.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "414cec62c6634ae900ea1c56128dfe87cf63e7caece0852ec76aba307cebadb7" + +[[package]] +name = "plotters-svg" +version = "0.3.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "81b30686a7d9c3e010b84284bdd26a29f2138574f52f5eb6f794fc0ad924e705" +dependencies = [ + "plotters-backend", +] + [[package]] name = "powerfmt" version = "0.2.0" @@ -2859,6 +3485,32 @@ version = "0.2.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "60a357793950651c4ed0f3f52338f53b2f809f32d83a07f72909fa13e4c6c1e3" +[[package]] +name = "rayon" +version = "1.10.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b418a60154510ca1a002a752ca9714984e21e4241e804d32555251faf8b78ffa" +dependencies = [ + "either", + "rayon-core", +] + +[[package]] +name = "rayon-core" +version = "1.12.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1465873a3dfdaa8ae7cb14b4383657caab0b3e8a0aa9ae8e04b044854c8dfce2" +dependencies = [ + "crossbeam-deque", + "crossbeam-utils", +] + +[[package]] +name = "recvmsg" +version = "1.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d3edd4d5d42c92f0a659926464d4cce56b562761267ecf0f469d85b7de384175" + [[package]] name = "redox_syscall" version = "0.2.16" @@ -2929,11 +3581,11 @@ dependencies = [ "futures-core", "futures-util", "h2", - "http", - "http-body", - "hyper", + "http 0.2.12", + "http-body 0.4.6", + "hyper 0.14.28", "hyper-rustls", - "hyper-tls", + "hyper-tls 0.5.0", "ipnet", "js-sys", "log", @@ -2942,23 +3594,62 @@ dependencies = [ "once_cell", "percent-encoding", "pin-project-lite", - "rustls", - "rustls-pemfile", + "rustls 0.21.12", + "rustls-pemfile 1.0.4", "serde", "serde_json", "serde_urlencoded", - "sync_wrapper", + "sync_wrapper 0.1.2", "system-configuration", "tokio", "tokio-native-tls", - "tokio-rustls", + "tokio-rustls 0.24.1", + "tower-service", + "url", + "wasm-bindgen", + "wasm-bindgen-futures", + "web-sys", + "webpki-roots 0.25.4", + "winreg 0.50.0", +] + +[[package]] +name = "reqwest" +version = "0.12.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c7d6d2a27d57148378eb5e111173f4276ad26340ecc5c49a4a2152167a2d6a37" +dependencies = [ + "base64 0.22.0", + "bytes", + "futures-core", + "futures-util", + "http 1.1.0", + "http-body 1.0.0", + "http-body-util", + "hyper 1.3.1", + "hyper-tls 0.6.0", + "hyper-util", + "ipnet", + "js-sys", + "log", + "mime", + "native-tls", + "once_cell", + "percent-encoding", + "pin-project-lite", + "rustls-pemfile 2.1.2", + "serde", + "serde_json", + "serde_urlencoded", + "sync_wrapper 1.0.1", + "tokio", + "tokio-native-tls", "tower-service", "url", "wasm-bindgen", "wasm-bindgen-futures", "web-sys", - "webpki-roots", - "winreg", + "winreg 0.52.0", ] [[package]] @@ -2998,9 +3689,9 @@ dependencies = [ [[package]] name = "ruint" -version = "1.12.1" +version = "1.12.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8f308135fef9fc398342da5472ce7c484529df23743fb7c734e0f3d472971e62" +checksum = "2c3cc4c2511671f327125da14133d0c5c5d137f006a1017a16f557bc85b16286" dependencies = [ "alloy-rlp", "ark-ff 0.3.0", @@ -3022,9 +3713,9 @@ dependencies = [ [[package]] name = "ruint-macro" -version = "1.2.0" +version = "1.2.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f86854cf50259291520509879a5c294c3c9a4c334e9ff65071c51e42ef1e2343" +checksum = "48fd7bd8a6377e15ad9d42a8ec25371b94ddc67abe7c8b9127bec79bebaaae18" [[package]] name = "rustc-demangle" @@ -3083,10 +3774,24 @@ checksum = "3f56a14d1f48b391359b22f731fd4bd7e43c97f3c50eee276f3aa09c94784d3e" dependencies = [ "log", "ring", - "rustls-webpki", + "rustls-webpki 0.101.7", "sct", ] +[[package]] +name = "rustls" +version = "0.23.10" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "05cff451f60db80f490f3c182b77c35260baace73209e9cdbbe526bfe3a4d402" +dependencies = [ + "once_cell", + "ring", + "rustls-pki-types", + "rustls-webpki 0.102.4", + "subtle", + "zeroize", +] + [[package]] name = "rustls-pemfile" version = "1.0.4" @@ -3096,6 +3801,22 @@ dependencies = [ "base64 0.21.7", ] +[[package]] +name = "rustls-pemfile" +version = "2.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "29993a25686778eb88d4189742cd713c9bce943bc54251a33509dc63cbacf73d" +dependencies = [ + "base64 0.22.0", + "rustls-pki-types", +] + +[[package]] +name = "rustls-pki-types" +version = "1.7.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "976295e77ce332211c0d24d92c0e83e50f5c5f046d11082cea19f3df13a3562d" + [[package]] name = "rustls-webpki" version = "0.101.7" @@ -3106,6 +3827,17 @@ dependencies = [ "untrusted", ] +[[package]] +name = "rustls-webpki" +version = "0.102.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ff448f7e92e913c4b7d4c6d8e4540a1724b319b4152b8aef6d4cf8339712b33e" +dependencies = [ + "ring", + "rustls-pki-types", + "untrusted", +] + [[package]] name = "rustversion" version = "1.0.17" @@ -3300,6 +4032,12 @@ dependencies = [ "pest", ] +[[package]] +name = "send_wrapper" +version = "0.6.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cd0b0ec5f1c1ca621c432a25813d8d60c88abe6d3e08a3eb9cf37d97a0fe3d73" + [[package]] name = "serde" version = "1.0.199" @@ -3423,6 +4161,17 @@ dependencies = [ "syn 2.0.60", ] +[[package]] +name = "sha1" +version = "0.10.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e3bf829a2d51ab4a5ddf1352d8470c140cadc8301b2ae1789db023f01cedd6ba" +dependencies = [ + "cfg-if", + "cpufeatures", + "digest 0.10.7", +] + [[package]] name = "sha2" version = "0.10.8" @@ -3503,6 +4252,18 @@ dependencies = [ "rand_core", ] +[[package]] +name = "simple_asn1" +version = "0.6.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "adc4e5204eb1910f40f9cfa375f6f05b68c3abac4b6fd879c8ff5e7ae8a0a085" +dependencies = [ + "num-bigint", + "num-traits 0.2.18", + "thiserror", + "time", +] + [[package]] name = "siphasher" version = "0.3.11" @@ -3715,7 +4476,7 @@ dependencies = [ "ethereum-types", "flate2", "log", - "reqwest", + "reqwest 0.11.27", "serde", "serde_json", "serde_with 2.3.3", @@ -3771,6 +4532,28 @@ version = "0.11.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "7da8b5736845d9f2fcb837ea5d9e2628564b3b043a70948a3f0b778838c5fb4f" +[[package]] +name = "strum" +version = "0.26.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8fec0f0aef304996cf250b31b5a10dee7980c85da9d759361292b8bca5a18f06" +dependencies = [ + "strum_macros", +] + +[[package]] +name = "strum_macros" +version = "0.26.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4c6bee85a5a24955dc440386795aa378cd9cf82acd5f764469152d2270e581be" +dependencies = [ + "heck 0.5.0", + "proc-macro2", + "quote", + "rustversion", + "syn 2.0.60", +] + [[package]] name = "subtle" version = "2.5.0" @@ -3801,9 +4584,9 @@ dependencies = [ [[package]] name = "syn-solidity" -version = "0.6.4" +version = "0.7.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cb3d0961cd53c23ea94eeec56ba940f636f6394788976e9f16ca5ee0aca7464a" +checksum = "8d71e19bca02c807c9faa67b5a47673ff231b6e7449b251695188522f1dc44b2" dependencies = [ "paste", "proc-macro2", @@ -3817,6 +4600,12 @@ version = "0.1.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "2047c6ded9c721764247e62cd3b03c09ffc529b2ba5b10ec482ae507a4a70160" +[[package]] +name = "sync_wrapper" +version = "1.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a7065abeca94b6a8a577f9bd45aa0867a2238b74e8eb67cf10d492bc39351394" + [[package]] name = "system-configuration" version = "0.5.1" @@ -3867,6 +4656,12 @@ dependencies = [ "winapi", ] +[[package]] +name = "textwrap" +version = "0.16.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "23d434d3f8967a09480fb04132ebe0a3e088c173e6d0ee7897abbdf4eab0f8b9" + [[package]] name = "thiserror" version = "1.0.59" @@ -3897,6 +4692,15 @@ dependencies = [ "once_cell", ] +[[package]] +name = "threadpool" +version = "1.8.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d050e60b33d41c19108b32cea32164033a9013fe3b46cbd4457559bfbf77afaa" +dependencies = [ + "num_cpus", +] + [[package]] name = "time" version = "0.3.36" @@ -3937,6 +4741,16 @@ dependencies = [ "crunchy", ] +[[package]] +name = "tinytemplate" +version = "1.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "be4d6b5f19ff7664e8c98d03e2139cb510db9b0a60b55f8e8709b689d939b6bc" +dependencies = [ + "serde", + "serde_json", +] + [[package]] name = "tinyvec" version = "1.6.0" @@ -3998,7 +4812,18 @@ version = "0.24.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c28327cf380ac148141087fbfb9de9d7bd4e84ab5d2c28fbc911d753de8a7081" dependencies = [ - "rustls", + "rustls 0.21.12", + "tokio", +] + +[[package]] +name = "tokio-rustls" +version = "0.26.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0c7bc40d0e5a97695bb96e27995cd3a08538541b0a846f65bba7a359f36700d4" +dependencies = [ + "rustls 0.23.10", + "rustls-pki-types", "tokio", ] @@ -4014,6 +4839,22 @@ dependencies = [ "tokio-util", ] +[[package]] +name = "tokio-tungstenite" +version = "0.23.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c6989540ced10490aaf14e6bad2e3d33728a2813310a0c71d1574304c49631cd" +dependencies = [ + "futures-util", + "log", + "rustls 0.23.10", + "rustls-pki-types", + "tokio", + "tokio-rustls 0.26.0", + "tungstenite", + "webpki-roots 0.26.3", +] + [[package]] name = "tokio-util" version = "0.7.10" @@ -4055,6 +4896,7 @@ dependencies = [ "futures-util", "pin-project", "pin-project-lite", + "tokio", "tower-layer", "tower-service", "tracing", @@ -4136,6 +4978,26 @@ version = "0.2.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e421abadd41a4225275504ea4d6566923418b7f05506fbc9c0fe86ba7396114b" +[[package]] +name = "tungstenite" +version = "0.23.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6e2e2ce1e47ed2994fd43b04c8f618008d4cabdd5ee34027cf14f9d918edd9c8" +dependencies = [ + "byteorder", + "bytes", + "data-encoding", + "http 1.1.0", + "httparse", + "log", + "rand", + "rustls 0.23.10", + "rustls-pki-types", + "sha1", + "thiserror", + "utf-8", +] + [[package]] name = "typenum" version = "1.17.0" @@ -4222,6 +5084,12 @@ dependencies = [ "percent-encoding", ] +[[package]] +name = "utf-8" +version = "0.7.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "09cc8ee72d2a9becf2f2febe0205bbed8fc6615b7cb429ad062dc7b7ddd036a9" + [[package]] name = "utf8parse" version = "0.2.1" @@ -4372,6 +5240,21 @@ version = "0.25.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "5f20c57d8d7db6d3b86154206ae5d8fba62dd39573114de97c2cb0578251f8e1" +[[package]] +name = "webpki-roots" +version = "0.26.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bd7c23921eeb1713a4e851530e9b9756e4fb0e89978582942612524cf09f01cd" +dependencies = [ + "rustls-pki-types", +] + +[[package]] +name = "widestring" +version = "1.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7219d36b6eac893fa81e84ebe06485e7dcbb616177469b142df14f1f4deb1311" + [[package]] name = "winapi" version = "0.3.9" @@ -4579,6 +5462,35 @@ dependencies = [ "windows-sys 0.48.0", ] +[[package]] +name = "winreg" +version = "0.52.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a277a57398d4bfa075df44f501a17cfdf8542d224f0d36095a2adc7aee4ef0a5" +dependencies = [ + "cfg-if", + "windows-sys 0.48.0", +] + +[[package]] +name = "ws_stream_wasm" +version = "0.7.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7999f5f4217fe3818726b66257a4475f71e74ffd190776ad053fa159e50737f5" +dependencies = [ + "async_io_stream", + "futures", + "js-sys", + "log", + "pharos", + "rustc_version 0.4.0", + "send_wrapper", + "thiserror", + "wasm-bindgen", + "wasm-bindgen-futures", + "web-sys", +] + [[package]] name = "wyz" version = "0.5.1" diff --git a/Cargo.toml b/Cargo.toml index 997f2002..26078ddb 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -1,6 +1,6 @@ [workspace] resolver = "2" -members = ["cli", "crates/core", "crates/provider", "crates/primitives"] +members = ["cli", "crates/core", "crates/primitives", "crates/provider"] [workspace.package] version = "0.2.0" @@ -14,10 +14,10 @@ hdp-primitives = { version = "0.2.0", path = "crates/primitives" } hdp-provider = { version = "0.2.0", path = "crates/provider" } tokio = { version = "1", features = ["full"] } tempfile = "3.10.1" -alloy-dyn-abi = "0.6.2" -alloy-primitives = { version = "0.6.2", feature = ["rlp"] } -alloy-merkle-tree = { version = "0.5.0" } -alloy-rlp = { version = "0.3.4", features = ["derive"] } +alloy-merkle-tree = { version = "0.6.0" } +alloy-rpc-client = { version = "0.1.1" } +alloy = { version = "0.1.1", features = ["full"] } +alloy-rlp = { version = "0.3.5", features = ["derive"] } anyhow = "1.0.79" serde = { version = "1.0", features = ["derive"] } serde_with = "2.3.2" @@ -31,6 +31,6 @@ starknet-crypto = "0.6.1" cairo-lang-starknet-classes = "2.6.3" futures = "0.3.30" lazy_static = "1.4.0" - -# TODO: ideally should published -eth-trie-proofs = { git = "https://github.com/HerodotusDev/eth-trie-proofs.git", branch = "main" } +thiserror = "1.0" +eth-trie-proofs = "0.1.1" +itertools = "0.10" diff --git a/README.md b/README.md index 5147dc6e..c8af489d 100644 --- a/README.md +++ b/README.md @@ -132,35 +132,38 @@ The core soundness of HDP relies on generating the correct input file and runnin Here is the support matrix indicating which blockchain elements are tested for each aggregate function. The matrix highlights fields where these functions are applicable. -| Field Description | SUM | AVG | MIN | MAX | COUNT | SLR | -| ----------------------------- | --- | --- | --- | --- | ----- | --- | -| `account.nonce` | ✅ | ✅ | ✅ | ✅ | ✅ | ✅ | -| `account.balance` | ✅ | ✅ | ✅ | ✅ | ✅ | ✅ | -| `account.storage_root` | - | - | - | - | - | - | -| `account.code_hash` | - | - | - | - | - | - | -| `storage.key` (numeric value) | ✅ | ✅ | ✅ | ✅ | ✅ | ✅ | -| `storage.key` (hash value) | - | - | - | - | - | - | -| `header.difficulty` | ✅ | ✅ | ✅ | ✅ | ✅ | ✅ | -| `header.gas_limit` | ✅ | ✅ | ✅ | ✅ | ✅ | ✅ | -| `header.gas_used` | ✅ | ✅ | ✅ | ✅ | ✅ | ✅ | -| `header.timestamp` | ✅ | ✅ | ✅ | ✅ | ✅ | ✅ | -| `header.base_fee_per_gas` | ✅ | ✅ | ✅ | ✅ | ✅ | ✅ | -| `header.blob_gas_used` | ✅ | ✅ | ✅ | ✅ | ✅ | ✅ | -| `header.excess_blob_gas` | ✅ | ✅ | ✅ | ✅ | ✅ | ✅ | -| `header.nonce` | ✅ | ✅ | ✅ | ✅ | ✅ | ✅ | -| Other `header` elements | - | - | - | - | - | - | -| `tx.nonce` | ✅ | ✅ | ✅ | ✅ | ✅ | ✅ | -| `tx.gas_price` | ✅ | ✅ | ✅ | ✅ | ✅ | ✅ | -| `tx.gas_limit` | ✅ | ✅ | ✅ | ✅ | ✅ | ✅ | -| `tx.value` | ✅ | ✅ | ✅ | ✅ | ✅ | ✅ | -| `tx.v` | ✅ | ✅ | ✅ | ✅ | ✅ | ✅ | -| `tx.r` | ✅ | ✅ | ✅ | ✅ | ✅ | ✅ | -| `tx.s` | ✅ | ✅ | ✅ | ✅ | ✅ | ✅ | -| `tx.chain_id` | ✅ | ✅ | ✅ | ✅ | ✅ | ✅ | -| `tx.max_fee_per_gas` | ✅ | ✅ | ✅ | ✅ | ✅ | ✅ | -| `tx.max_priority_fee_per_gas` | ✅ | ✅ | ✅ | ✅ | ✅ | ✅ | -| `tx.max_fee_per_blob_gas` | ✅ | ✅ | ✅ | ✅ | ✅ | ✅ | -| Other `tx` elements | - | - | - | - | - | - | +| Field Description | SUM | AVG | MIN | MAX | COUNT | SLR | +| -------------------------------- | --- | --- | --- | --- | ----- | --- | +| `account.nonce` | ✅ | ✅ | ✅ | ✅ | ✅ | ✅ | +| `account.balance` | ✅ | ✅ | ✅ | ✅ | ✅ | ✅ | +| `account.storage_root` | - | - | - | - | - | - | +| `account.code_hash` | - | - | - | - | - | - | +| `storage.key` (numeric value) | ✅ | ✅ | ✅ | ✅ | ✅ | ✅ | +| `storage.key` (hash value) | - | - | - | - | - | - | +| `header.difficulty` | ✅ | ✅ | ✅ | ✅ | ✅ | ✅ | +| `header.gas_limit` | ✅ | ✅ | ✅ | ✅ | ✅ | ✅ | +| `header.gas_used` | ✅ | ✅ | ✅ | ✅ | ✅ | ✅ | +| `header.timestamp` | ✅ | ✅ | ✅ | ✅ | ✅ | ✅ | +| `header.base_fee_per_gas` | ✅ | ✅ | ✅ | ✅ | ✅ | ✅ | +| `header.blob_gas_used` | ✅ | ✅ | ✅ | ✅ | ✅ | ✅ | +| `header.excess_blob_gas` | ✅ | ✅ | ✅ | ✅ | ✅ | ✅ | +| `header.nonce` | ✅ | ✅ | ✅ | ✅ | ✅ | ✅ | +| Other `header` elements | - | - | - | - | - | - | +| `tx.nonce` | ✅ | ✅ | ✅ | ✅ | ✅ | ✅ | +| `tx.gas_price` | ✅ | ✅ | ✅ | ✅ | ✅ | ✅ | +| `tx.gas_limit` | ✅ | ✅ | ✅ | ✅ | ✅ | ✅ | +| `tx.value` | ✅ | ✅ | ✅ | ✅ | ✅ | ✅ | +| `tx.v` | ✅ | ✅ | ✅ | ✅ | ✅ | ✅ | +| `tx.r` | ✅ | ✅ | ✅ | ✅ | ✅ | ✅ | +| `tx.s` | ✅ | ✅ | ✅ | ✅ | ✅ | ✅ | +| `tx.chain_id` | ✅ | ✅ | ✅ | ✅ | ✅ | ✅ | +| `tx.max_fee_per_gas` | ✅ | ✅ | ✅ | ✅ | ✅ | ✅ | +| `tx.max_priority_fee_per_gas` | ✅ | ✅ | ✅ | ✅ | ✅ | ✅ | +| `tx.max_fee_per_blob_gas` | ✅ | ✅ | ✅ | ✅ | ✅ | ✅ | +| Other `tx` elements | - | - | - | - | - | - | +| `tx_receipt.success` | ✅ | ✅ | ✅ | ✅ | ✅ | ✅ | +| `tx_receipt.cumulative_gas_used` | ✅ | ✅ | ✅ | ✅ | ✅ | ✅ | +| Other `tx_receipt` elements | - | - | - | - | - | - | _Note: Fields marked with "-" are not applicable for the specified aggregate functions because they do not contain numeric data or the data type is not suitable for these calculations._ diff --git a/cli/Cargo.toml b/cli/Cargo.toml index 8d510cd0..eeffb19b 100644 --- a/cli/Cargo.toml +++ b/cli/Cargo.toml @@ -27,9 +27,9 @@ tokio = { workspace = true } tracing = { workspace = true } hdp-provider = { workspace = true } hdp-primitives = { workspace = true } -alloy-primitives = { workspace = true } serde_json = { workspace = true } clap = { version = "4.4.4", features = ["derive"] } dotenv = "0.15.0" tracing-subscriber = "0.3.0" inquire = "0.7.4" +alloy = { workspace = true } diff --git a/cli/src/main.rs b/cli/src/main.rs index 82b8f7c5..d03ece41 100644 --- a/cli/src/main.rs +++ b/cli/src/main.rs @@ -1,6 +1,6 @@ #![deny(unused_crate_dependencies)] -use alloy_primitives::U256; +use alloy::{hex, primitives::U256}; use anyhow::{bail, Result}; use hdp_primitives::{ aggregate_fn::{integer::Operator, FunctionContext}, @@ -18,6 +18,7 @@ use hdp_primitives::{ }, processed_types::cairo_format::AsCairoFormat, }; +use hdp_provider::evm::provider::EvmProviderConfig; use inquire::{error::InquireError, Select}; use std::{fs, path::PathBuf, str::FromStr, vec}; use tracing_subscriber::FmtSubscriber; @@ -25,14 +26,12 @@ use tracing_subscriber::FmtSubscriber; use clap::{Parser, Subcommand}; use hdp_core::{ codec::datalake_compute::DatalakeComputeCodec, - compiler::{module::ModuleCompilerConfig, CompilerConfig}, + compiler::module::ModuleCompilerConfig, config::Config, - pre_processor::PreProcessor, + pre_processor::{PreProcessor, PreProcessorConfig}, processor::Processor, }; -use hdp_provider::evm::AbstractProviderConfig; - use tracing::{error, info, Level}; /// Simple Herodotus Data Processor CLI to handle tasks and datalakes @@ -174,17 +173,17 @@ async fn handle_run( let url: &str = "http://localhost:3030"; let program_path = "./build/compiled_cairo/hdp.json"; let config = Config::init(rpc_url, datalakes, tasks, chain_id).await; - let provider_config = AbstractProviderConfig { - rpc_url: &config.rpc_url, + let datalake_config = EvmProviderConfig { + rpc_url: config.rpc_url.parse().expect("Failed to parse RPC URL"), chain_id: config.chain_id, - rpc_chunk_size: config.rpc_chunk_size, + max_requests: config.rpc_chunk_size, }; let module_config = ModuleCompilerConfig { module_registry_rpc_url: url.parse().unwrap(), program_path: PathBuf::from(&program_path), }; - let compiler_config = CompilerConfig::new(provider_config.clone(), module_config); - let preprocessor = PreProcessor::new_with_config(compiler_config); + let preprocessor_config = PreProcessorConfig::new(datalake_config, module_config); + let preprocessor = PreProcessor::new_with_config(preprocessor_config); let result = preprocessor .process_from_serialized(config.datalakes.clone(), config.tasks.clone()) .await?; @@ -208,7 +207,7 @@ async fn handle_run( Ok(()) } else { let output_file_path = output_file.unwrap(); - let processor = Processor::new(provider_config, PathBuf::from(program_path)); + let processor = Processor::new(PathBuf::from(program_path)); let processor_result = processor.process(result, pie_file.unwrap()).await?; let output_string = serde_json::to_string_pretty(&processor_result).unwrap(); fs::write(&output_file_path, output_string).expect("Unable to write file"); @@ -507,8 +506,8 @@ async fn main() -> Result<()> { .prompt()?; handle_run( - Some(encoded_computes), - Some(encoded_datalakes), + Some(hex::encode(encoded_computes)), + Some(hex::encode(encoded_datalakes)), rpc_url, chain_id, Some(output_file), @@ -570,11 +569,14 @@ async fn main() -> Result<()> { let datalake_compute_codec = DatalakeComputeCodec::new(); let (encoded_datalakes, encoded_computes) = datalake_compute_codec.encode_batch(vec![target_datalake_compute])?; + + let encoded_computes_str = hex::encode(encoded_computes); + let encoded_datalakes_str = hex::encode(encoded_datalakes); // if allow_run is true, then run the evaluator if allow_run { handle_run( - Some(encoded_computes), - Some(encoded_datalakes), + Some(encoded_computes_str), + Some(encoded_datalakes_str), rpc_url, chain_id, output_file, @@ -586,11 +588,15 @@ async fn main() -> Result<()> { } Commands::Decode { tasks, datalakes } => { let datalake_compute_codec = DatalakeComputeCodec::new(); - datalake_compute_codec.decode_batch(datalakes, tasks)?; + let tasks = hex::decode(tasks)?; + let datalakes = hex::decode(datalakes)?; + datalake_compute_codec.decode_batch(&datalakes, &tasks)?; } Commands::DecodeOne { task, datalake } => { let datalake_compute_codec = DatalakeComputeCodec::new(); - datalake_compute_codec.decode_single(datalake, task)?; + let task = hex::decode(task)?; + let datalake = hex::decode(datalake)?; + datalake_compute_codec.decode_single(&datalake, &task)?; } Commands::Run { tasks, diff --git a/crates/core/Cargo.toml b/crates/core/Cargo.toml index 328f15f6..4f0e0990 100644 --- a/crates/core/Cargo.toml +++ b/crates/core/Cargo.toml @@ -9,9 +9,8 @@ version.workspace = true [dependencies] hdp-provider = { workspace = true } hdp-primitives = { workspace = true } -alloy-primitives = { workspace = true } alloy-merkle-tree = { workspace = true } -alloy-dyn-abi = { workspace = true } +alloy = { workspace = true } anyhow = { workspace = true } cairo-lang-starknet-classes.workspace = true starknet-crypto.workspace = true diff --git a/crates/core/src/cairo_runner/input/run.rs b/crates/core/src/cairo_runner/input/run.rs index 6ec02fa9..61d96cff 100644 --- a/crates/core/src/cairo_runner/input/run.rs +++ b/crates/core/src/cairo_runner/input/run.rs @@ -1,5 +1,6 @@ -use hdp_primitives::processed_types::{cairo_format, module::ProcessedModule}; -use hdp_provider::evm::ProcessedBlockProofs; +use hdp_primitives::processed_types::{ + block_proofs::ProcessedBlockProofs, cairo_format, module::ProcessedModule, +}; use serde::Serialize; /* diff --git a/crates/core/src/cairo_runner/run.rs b/crates/core/src/cairo_runner/run.rs index 7c778c17..b7ee70cb 100644 --- a/crates/core/src/cairo_runner/run.rs +++ b/crates/core/src/cairo_runner/run.rs @@ -1,8 +1,10 @@ +use alloy::primitives::{B256, U256}; use anyhow::Result; use hdp_primitives::processed_types::uint256::Uint256; use std::fs; use std::path::{Path, PathBuf}; use std::process::{Command, Stdio}; +use std::str::FromStr; use tempfile::NamedTempFile; use tracing::info; @@ -13,8 +15,8 @@ use regex::Regex; #[derive(Debug)] pub struct RunResult { pub pie_path: PathBuf, - pub task_results: Vec, - pub results_root: String, + pub task_results: Vec, + pub results_root: B256, } pub struct Runner { @@ -68,20 +70,20 @@ impl Runner { } /// Parse the output of the run command - fn parse_run(&self, output: String) -> Result<(Vec, String)> { + fn parse_run(&self, output: String) -> Result<(Vec, B256)> { let task_result_re = Regex::new(r"Task Result\((\d+)\): (\S+)").unwrap(); let mut task_results = vec![]; for caps in task_result_re.captures_iter(&output) { let _ = &caps[1]; let value = &caps[2]; - task_results.push(value.to_string()); + task_results.push(U256::from_str(value)?); } let results_root_re = Regex::new(r"Results Root: (\S+) (\S+)").unwrap(); if let Some(results_root_caps) = results_root_re.captures(&output) { let results_root_1 = &results_root_caps[1]; let results_root_2 = &results_root_caps[2]; let result_root = Uint256::from_strs(results_root_2, results_root_1)?; - let combined_results_root = result_root.to_combined_string().to_string(); + let combined_results_root = result_root.to_combined_string(); Ok((task_results, combined_results_root)) } else { bail!("Results Root not found"); @@ -100,10 +102,15 @@ mod tests { let output = "Task Result(0): 0x01020304\nResults Root: 0x01020304 0x05060708"; let (task_results, results_root) = runner.parse_run(output.to_string()).unwrap(); assert_eq!(task_results.len(), 1); - assert_eq!(task_results[0], "0x01020304"); + assert_eq!( + task_results[0], + U256::from_str_radix("01020304", 16).unwrap() + ); assert_eq!( results_root, - "0x0000000000000000000000000506070800000000000000000000000001020304" + Uint256::from_strs("05060708", "01020304") + .unwrap() + .to_combined_string() ); } } diff --git a/crates/core/src/codec/datalake_compute.rs b/crates/core/src/codec/datalake_compute.rs index 2303e3e5..90ae1d72 100644 --- a/crates/core/src/codec/datalake_compute.rs +++ b/crates/core/src/codec/datalake_compute.rs @@ -1,5 +1,4 @@ -use alloy_dyn_abi::{DynSolType, DynSolValue}; -use alloy_primitives::hex::FromHex; +use alloy::dyn_abi::{DynSolType, DynSolValue}; use anyhow::{Ok, Result}; use hdp_primitives::{ datalake::{ @@ -10,7 +9,7 @@ use hdp_primitives::{ transactions::TransactionsInBlockDatalake, Datalake, }, - utils::{bytes_to_hex_string, last_byte_to_u8}, + utils::last_byte_to_u8, }; use tracing::info; @@ -26,14 +25,14 @@ impl DatalakeCodec { fn _decode_single( &self, datalake_code: &[u8], - datalake_string: String, + encoded_datalake: &[u8], ) -> Result { let decoded_datalake = match DatalakeType::from_index(last_byte_to_u8(datalake_code))? { DatalakeType::BlockSampled => { - DatalakeEnvelope::BlockSampled(BlockSampledDatalake::decode(&datalake_string)?) + DatalakeEnvelope::BlockSampled(BlockSampledDatalake::decode(encoded_datalake)?) } DatalakeType::TransactionsInBlock => DatalakeEnvelope::Transactions( - TransactionsInBlockDatalake::decode(&datalake_string)?, + TransactionsInBlockDatalake::decode(encoded_datalake)?, ), }; @@ -41,32 +40,30 @@ impl DatalakeCodec { } /// Decode a single datalake - fn decode_single(&self, serialized_datalake: String) -> Result { - let datalake_code = serialized_datalake.as_bytes().chunks(32).next().unwrap(); - let datalake_string = bytes_to_hex_string(serialized_datalake.as_bytes()); - Ok(self._decode_single(datalake_code, datalake_string)?) + fn decode_single(&self, serialized_datalake: &[u8]) -> Result { + let datalake_code = serialized_datalake.chunks(32).next().unwrap(); + Ok(self._decode_single(datalake_code, serialized_datalake)?) } /// Decode a batch of datalakes - fn decode_batch(&self, serialized_datalakes_batch: String) -> Result> { + fn decode_batch(&self, serialized_datalakes_batch: &[u8]) -> Result> { let datalakes_type: DynSolType = "bytes[]".parse()?; - let bytes = Vec::from_hex(serialized_datalakes_batch).expect("Invalid hex string"); - let serialized_datalakes = datalakes_type.abi_decode(&bytes)?; - + let serialized_datalakes = datalakes_type.abi_decode(serialized_datalakes_batch)?; let mut decoded_datalakes = Vec::new(); if let Some(datalakes) = serialized_datalakes.as_array() { for datalake in datalakes { - let datalake_code = datalake.as_bytes().unwrap().chunks(32).next().unwrap(); - let datalake_string = bytes_to_hex_string(datalake.as_bytes().unwrap()); - decoded_datalakes.push(self._decode_single(datalake_code, datalake_string)?); + let datalake_as_bytes = + datalake.as_bytes().expect("Cannot get bytes from datalake"); + let datalake_code = datalake_as_bytes.chunks(32).next().unwrap(); + decoded_datalakes.push(self._decode_single(datalake_code, datalake_as_bytes)?); } } Ok(decoded_datalakes) } - pub fn encode_single(&self, datalake: DatalakeEnvelope) -> Result { + pub fn encode_single(&self, datalake: DatalakeEnvelope) -> Result> { let encoded_datalake = match datalake { DatalakeEnvelope::BlockSampled(block_sampled_datalake) => { block_sampled_datalake.encode()? @@ -78,7 +75,7 @@ impl DatalakeCodec { Ok(encoded_datalake) } - pub fn encode_batch(&self, datalakes: Vec) -> Result { + pub fn encode_batch(&self, datalakes: Vec) -> Result> { let mut encoded_datalakes: Vec = Vec::new(); for datalake in datalakes { @@ -90,13 +87,12 @@ impl DatalakeCodec { transactions_datalake.encode()? } }; - let bytes = Vec::from_hex(encoded_datalake).expect("Invalid hex string"); - encoded_datalakes.push(DynSolValue::Bytes(bytes)); + encoded_datalakes.push(DynSolValue::Bytes(encoded_datalake)); } let array_encoded_datalakes = DynSolValue::Array(encoded_datalakes); let encoded_datalakes = array_encoded_datalakes.abi_encode(); - Ok(bytes_to_hex_string(&encoded_datalakes)) + Ok(encoded_datalakes) } } @@ -114,21 +110,22 @@ impl ComputeCodec { } /// Decode a single computation - pub fn decode_single(&self, serialized_task: String) -> Result { - Ok(self._decode_single(serialized_task.as_bytes())?) + pub fn decode_single(&self, serialized_task: &[u8]) -> Result { + Ok(self._decode_single(serialized_task)?) } /// Decode a batch of computations - pub fn decode_batch(&self, serialized_tasks_batch: String) -> Result> { + pub fn decode_batch(&self, serialized_tasks_batch: &[u8]) -> Result> { let tasks_type: DynSolType = "bytes[]".parse()?; - let bytes = Vec::from_hex(serialized_tasks_batch).expect("Invalid hex string"); - let serialized_tasks = tasks_type.abi_decode(&bytes)?; + let serialized_tasks = tasks_type.abi_decode(serialized_tasks_batch)?; let mut decoded_tasks = Vec::new(); if let Some(tasks) = serialized_tasks.as_array() { for task in tasks { - decoded_tasks.push(self._decode_single(task.as_bytes().unwrap())?); + decoded_tasks.push( + self._decode_single(task.as_bytes().expect("Cannot get bytes from task"))?, + ); } } @@ -136,22 +133,21 @@ impl ComputeCodec { } /// Encode batch of computations - pub fn encode_batch(&self, tasks: Vec) -> Result { + pub fn encode_batch(&self, tasks: Vec) -> Result> { let mut encoded_tasks: Vec = Vec::new(); for task in tasks { let encoded_task = task.encode()?; - let bytes = Vec::from_hex(encoded_task).expect("Invalid hex string"); - encoded_tasks.push(DynSolValue::Bytes(bytes)); + encoded_tasks.push(DynSolValue::Bytes(encoded_task)); } let array_encoded_tasks = DynSolValue::Array(encoded_tasks); let encoded_tasks = array_encoded_tasks.abi_encode(); - Ok(bytes_to_hex_string(&encoded_tasks)) + Ok(encoded_tasks) } /// Encode single computation - fn encode_single(&self, task: Computation) -> Result { + fn encode_single(&self, task: Computation) -> Result> { Ok(task.encode()?) } } @@ -172,8 +168,8 @@ impl DatalakeComputeCodec { pub fn decode_single( &self, - serialized_datalake: String, - serialized_task: String, + serialized_datalake: &[u8], + serialized_task: &[u8], ) -> Result { let decoded_datalake = self.datalake_codec.decode_single(serialized_datalake)?; let decoded_compute = self.compute_codec.decode_single(serialized_task)?; @@ -184,16 +180,14 @@ impl DatalakeComputeCodec { pub fn decode_batch( &self, - serialized_datalakes_batch: String, - serialized_tasks_batch: String, + serialized_datalakes_batch: &[u8], + serialized_tasks_batch: &[u8], ) -> Result> { // decode datalakes and tasks let decoded_datalakes = self .datalake_codec .decode_batch(serialized_datalakes_batch)?; - info!("Decoded datalakes: {:#?}", decoded_datalakes); let decoded_computes = self.compute_codec.decode_batch(serialized_tasks_batch)?; - info!("Decoded computes: {:#?}", decoded_computes); // check if the number of datalakes and tasks are the same if decoded_datalakes.len() != decoded_computes.len() { return Err(anyhow::anyhow!( @@ -213,29 +207,25 @@ impl DatalakeComputeCodec { Ok(decoded_datalakes_compute) } - pub fn encode_single(&self, datalake_compute: DatalakeCompute) -> Result<(String, String)> { + pub fn encode_single(&self, datalake_compute: DatalakeCompute) -> Result<(Vec, Vec)> { let encoded_datalake = self .datalake_codec .encode_single(datalake_compute.datalake)?; - info!("Encoded datalake: {}", encoded_datalake); let encoded_compute = self.compute_codec.encode_single(datalake_compute.compute)?; - info!("Encoded compute: {}", encoded_compute); Ok((encoded_datalake, encoded_compute)) } pub fn encode_batch( &self, datalakes_compute: Vec, - ) -> Result<(String, String)> { + ) -> Result<(Vec, Vec)> { let (datalakes, computes) = datalakes_compute .into_iter() .map(|datalake_compute| (datalake_compute.datalake, datalake_compute.compute)) .unzip(); let encoded_datalakes = self.datalake_codec.encode_batch(datalakes)?; - info!("Encoded datalakes: {}", encoded_datalakes); let encoded_computes = self.compute_codec.encode_batch(computes)?; - info!("Encoded computes: {}", encoded_computes); Ok((encoded_datalakes, encoded_computes)) } @@ -243,7 +233,7 @@ impl DatalakeComputeCodec { #[cfg(test)] mod tests { - use alloy_primitives::Address; + use alloy::{hex, primitives::Address}; use hdp_primitives::{ aggregate_fn::{AggregationFunction, FunctionContext}, datalake::block_sampled::{AccountField, BlockSampledCollection, HeaderField}, @@ -265,7 +255,7 @@ mod tests { ]; let encoded_tasks = compute_decoder.encode_batch(original_tasks).unwrap(); - let decoded_tasks = compute_decoder.decode_batch(encoded_tasks).unwrap(); + let decoded_tasks = compute_decoder.decode_batch(&encoded_tasks).unwrap(); assert_eq!(decoded_tasks.len(), 4); assert_eq!(decoded_tasks[0].aggregate_fn_id, AggregationFunction::AVG); @@ -296,9 +286,9 @@ mod tests { #[test] fn test_block_datalake_decoder() { let datalake_decoder = DatalakeCodec::new(); - let batched_block_datalake = "0x00000000000000000000000000000000000000000000000000000000000000200000000000000000000000000000000000000000000000000000000000000004000000000000000000000000000000000000000000000000000000000000008000000000000000000000000000000000000000000000000000000000000001800000000000000000000000000000000000000000000000000000000000000280000000000000000000000000000000000000000000000000000000000000038000000000000000000000000000000000000000000000000000000000000000e0000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000009eb0f600000000000000000000000000000000000000000000000000000000009eb100000000000000000000000000000000000000000000000000000000000000000100000000000000000000000000000000000000000000000000000000000000a00000000000000000000000000000000000000000000000000000000000000002010f00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000e0000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000009eb0f600000000000000000000000000000000000000000000000000000000009eb100000000000000000000000000000000000000000000000000000000000000000100000000000000000000000000000000000000000000000000000000000000a00000000000000000000000000000000000000000000000000000000000000002010f00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000e0000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000009eb0f600000000000000000000000000000000000000000000000000000000009eb100000000000000000000000000000000000000000000000000000000000000000100000000000000000000000000000000000000000000000000000000000000a00000000000000000000000000000000000000000000000000000000000000002010f00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000e0000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000009eb0f600000000000000000000000000000000000000000000000000000000009eb100000000000000000000000000000000000000000000000000000000000000000100000000000000000000000000000000000000000000000000000000000000a00000000000000000000000000000000000000000000000000000000000000002010f000000000000000000000000000000000000000000000000000000000000"; + let batched_block_datalake = hex::decode("0x00000000000000000000000000000000000000000000000000000000000000200000000000000000000000000000000000000000000000000000000000000004000000000000000000000000000000000000000000000000000000000000008000000000000000000000000000000000000000000000000000000000000001800000000000000000000000000000000000000000000000000000000000000280000000000000000000000000000000000000000000000000000000000000038000000000000000000000000000000000000000000000000000000000000000e0000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000009eb0f600000000000000000000000000000000000000000000000000000000009eb100000000000000000000000000000000000000000000000000000000000000000100000000000000000000000000000000000000000000000000000000000000a00000000000000000000000000000000000000000000000000000000000000002010f00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000e0000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000009eb0f600000000000000000000000000000000000000000000000000000000009eb100000000000000000000000000000000000000000000000000000000000000000100000000000000000000000000000000000000000000000000000000000000a00000000000000000000000000000000000000000000000000000000000000002010f00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000e0000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000009eb0f600000000000000000000000000000000000000000000000000000000009eb100000000000000000000000000000000000000000000000000000000000000000100000000000000000000000000000000000000000000000000000000000000a00000000000000000000000000000000000000000000000000000000000000002010f00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000e0000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000009eb0f600000000000000000000000000000000000000000000000000000000009eb100000000000000000000000000000000000000000000000000000000000000000100000000000000000000000000000000000000000000000000000000000000a00000000000000000000000000000000000000000000000000000000000000002010f000000000000000000000000000000000000000000000000000000000000").unwrap(); let decoded_datalakes = datalake_decoder - .decode_batch(batched_block_datalake.to_string()) + .decode_batch(&batched_block_datalake) .unwrap(); assert_eq!(decoded_datalakes.len(), 4); @@ -325,7 +315,7 @@ mod tests { #[test] fn test_block_datalake_decoder_for_account() { let datalake_decoder = DatalakeCodec::new(); - let batched_block_datalake = "0x00000000000000000000000000000000000000000000000000000000000000200000000000000000000000000000000000000000000000000000000000000001000000000000000000000000000000000000000000000000000000000000002000000000000000000000000000000000000000000000000000000000000000e0000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000004b902400000000000000000000000000000000000000000000000000000000004b9027000000000000000000000000000000000000000000000000000000000000000100000000000000000000000000000000000000000000000000000000000000a00000000000000000000000000000000000000000000000000000000000000016020a4de450feb156a2a51ed159b2fb99da26e5f3a30000000000000000000000"; + let batched_block_datalake = hex::decode("0x00000000000000000000000000000000000000000000000000000000000000200000000000000000000000000000000000000000000000000000000000000001000000000000000000000000000000000000000000000000000000000000002000000000000000000000000000000000000000000000000000000000000000e0000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000004b902400000000000000000000000000000000000000000000000000000000004b9027000000000000000000000000000000000000000000000000000000000000000100000000000000000000000000000000000000000000000000000000000000a00000000000000000000000000000000000000000000000000000000000000016020a4de450feb156a2a51ed159b2fb99da26e5f3a30000000000000000000000").unwrap(); let block_datalake = BlockSampledDatalake::new( 4952100, 4952103, @@ -358,7 +348,7 @@ mod tests { ); assert_eq!( datalake_decoder - .decode_batch(batched_block_datalake.to_string()) + .decode_batch(&batched_block_datalake) .unwrap(), datalakes ); @@ -367,7 +357,7 @@ mod tests { #[test] fn test_block_massive_datalake_decoder() { let datalake_decoder = DatalakeCodec::new(); - let batched_block_datalake = "0x00000000000000000000000000000000000000000000000000000000000000200000000000000000000000000000000000000000000000000000000000000004000000000000000000000000000000000000000000000000000000000000008000000000000000000000000000000000000000000000000000000000000001800000000000000000000000000000000000000000000000000000000000000280000000000000000000000000000000000000000000000000000000000000038000000000000000000000000000000000000000000000000000000000000000e0000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000009ead1800000000000000000000000000000000000000000000000000000000009eb100000000000000000000000000000000000000000000000000000000000000000100000000000000000000000000000000000000000000000000000000000000a00000000000000000000000000000000000000000000000000000000000000002010f00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000e0000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000009ead1800000000000000000000000000000000000000000000000000000000009eb100000000000000000000000000000000000000000000000000000000000000000100000000000000000000000000000000000000000000000000000000000000a00000000000000000000000000000000000000000000000000000000000000002010f00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000e0000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000009ead1800000000000000000000000000000000000000000000000000000000009eb100000000000000000000000000000000000000000000000000000000000000000100000000000000000000000000000000000000000000000000000000000000a00000000000000000000000000000000000000000000000000000000000000002010f00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000e0000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000009ead1800000000000000000000000000000000000000000000000000000000009eb100000000000000000000000000000000000000000000000000000000000000000100000000000000000000000000000000000000000000000000000000000000a00000000000000000000000000000000000000000000000000000000000000002010f000000000000000000000000000000000000000000000000000000000000"; + let batched_block_datalake = hex::decode("0x00000000000000000000000000000000000000000000000000000000000000200000000000000000000000000000000000000000000000000000000000000004000000000000000000000000000000000000000000000000000000000000008000000000000000000000000000000000000000000000000000000000000001800000000000000000000000000000000000000000000000000000000000000280000000000000000000000000000000000000000000000000000000000000038000000000000000000000000000000000000000000000000000000000000000e0000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000009ead1800000000000000000000000000000000000000000000000000000000009eb100000000000000000000000000000000000000000000000000000000000000000100000000000000000000000000000000000000000000000000000000000000a00000000000000000000000000000000000000000000000000000000000000002010f00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000e0000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000009ead1800000000000000000000000000000000000000000000000000000000009eb100000000000000000000000000000000000000000000000000000000000000000100000000000000000000000000000000000000000000000000000000000000a00000000000000000000000000000000000000000000000000000000000000002010f00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000e0000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000009ead1800000000000000000000000000000000000000000000000000000000009eb100000000000000000000000000000000000000000000000000000000000000000100000000000000000000000000000000000000000000000000000000000000a00000000000000000000000000000000000000000000000000000000000000002010f00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000e0000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000009ead1800000000000000000000000000000000000000000000000000000000009eb100000000000000000000000000000000000000000000000000000000000000000100000000000000000000000000000000000000000000000000000000000000a00000000000000000000000000000000000000000000000000000000000000002010f000000000000000000000000000000000000000000000000000000000000").unwrap(); let datalake_massive_block = DatalakeEnvelope::BlockSampled( BlockSampledDatalake::new(10399000, 10400000, "header.base_fee_per_gas".to_string(), 1) .unwrap(), @@ -380,7 +370,7 @@ mod tests { datalake_massive_block.clone(), ]; let decoded_datalakes = datalake_decoder - .decode_batch(batched_block_datalake.to_string()) + .decode_batch(&batched_block_datalake) .unwrap(); assert_eq!(decoded_datalakes.len(), 4); @@ -419,16 +409,14 @@ mod tests { ]; let encoded_datalakes = datalake_decoder.encode_batch(datalakes).unwrap(); - assert_eq!(encoded_datalakes, "0x00000000000000000000000000000000000000000000000000000000000000200000000000000000000000000000000000000000000000000000000000000002000000000000000000000000000000000000000000000000000000000000004000000000000000000000000000000000000000000000000000000000000001800000000000000000000000000000000000000000000000000000000000000120000000000000000000000000000000000000000000000000000000000000000100000000000000000000000000000000000000000000000000000000000186a0000000000000000000000000000000000000000000000000000000000000000100000000000000000000000000000000000000000000000000000000000000640000000000000000000000000000000000000000000000000000000000000001000000000000000000000000000000000000000000000000000000000000010100000000000000000000000000000000000000000000000000000000000000e0000000000000000000000000000000000000000000000000000000000000000201000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000120000000000000000000000000000000000000000000000000000000000000000100000000000000000000000000000000000000000000000000000000000186a0000000000000000000000000000000000000000000000000000000000000000100000000000000000000000000000000000000000000000000000000000000640000000000000000000000000000000000000000000000000000000000000001000000000000000000000000000000000000000000000000000000000000010100000000000000000000000000000000000000000000000000000000000000e00000000000000000000000000000000000000000000000000000000000000002010a000000000000000000000000000000000000000000000000000000000000") + assert_eq!(encoded_datalakes, hex::decode("00000000000000000000000000000000000000000000000000000000000000200000000000000000000000000000000000000000000000000000000000000002000000000000000000000000000000000000000000000000000000000000004000000000000000000000000000000000000000000000000000000000000001800000000000000000000000000000000000000000000000000000000000000120000000000000000000000000000000000000000000000000000000000000000100000000000000000000000000000000000000000000000000000000000186a0000000000000000000000000000000000000000000000000000000000000000100000000000000000000000000000000000000000000000000000000000000640000000000000000000000000000000000000000000000000000000000000001000000000000000000000000000000000000000000000000000000000000010100000000000000000000000000000000000000000000000000000000000000e0000000000000000000000000000000000000000000000000000000000000000201000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000120000000000000000000000000000000000000000000000000000000000000000100000000000000000000000000000000000000000000000000000000000186a0000000000000000000000000000000000000000000000000000000000000000100000000000000000000000000000000000000000000000000000000000000640000000000000000000000000000000000000000000000000000000000000001000000000000000000000000000000000000000000000000000000000000010100000000000000000000000000000000000000000000000000000000000000e00000000000000000000000000000000000000000000000000000000000000002010a000000000000000000000000000000000000000000000000000000000000").unwrap()); } #[test] fn test_transaction_datalake_decoder() { let datalake_decoder = DatalakeCodec::new(); - let encoded_datalake = "0x00000000000000000000000000000000000000000000000000000000000000200000000000000000000000000000000000000000000000000000000000000002000000000000000000000000000000000000000000000000000000000000004000000000000000000000000000000000000000000000000000000000000001800000000000000000000000000000000000000000000000000000000000000120000000000000000000000000000000000000000000000000000000000000000100000000000000000000000000000000000000000000000000000000000186a0000000000000000000000000000000000000000000000000000000000000000100000000000000000000000000000000000000000000000000000000000000640000000000000000000000000000000000000000000000000000000000000001000000000000000000000000000000000000000000000000000000000000010100000000000000000000000000000000000000000000000000000000000000e0000000000000000000000000000000000000000000000000000000000000000201000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000120000000000000000000000000000000000000000000000000000000000000000100000000000000000000000000000000000000000000000000000000000186a0000000000000000000000000000000000000000000000000000000000000000100000000000000000000000000000000000000000000000000000000000000640000000000000000000000000000000000000000000000000000000000000001000000000000000000000000000000000000000000000000000000000000010100000000000000000000000000000000000000000000000000000000000000e00000000000000000000000000000000000000000000000000000000000000002010a000000000000000000000000000000000000000000000000000000000000"; - let decoded_datalake = datalake_decoder - .decode_batch(encoded_datalake.to_string()) - .unwrap(); + let encoded_datalake = hex::decode("00000000000000000000000000000000000000000000000000000000000000200000000000000000000000000000000000000000000000000000000000000002000000000000000000000000000000000000000000000000000000000000004000000000000000000000000000000000000000000000000000000000000001800000000000000000000000000000000000000000000000000000000000000120000000000000000000000000000000000000000000000000000000000000000100000000000000000000000000000000000000000000000000000000000186a0000000000000000000000000000000000000000000000000000000000000000100000000000000000000000000000000000000000000000000000000000000640000000000000000000000000000000000000000000000000000000000000001000000000000000000000000000000000000000000000000000000000000010100000000000000000000000000000000000000000000000000000000000000e0000000000000000000000000000000000000000000000000000000000000000201000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000120000000000000000000000000000000000000000000000000000000000000000100000000000000000000000000000000000000000000000000000000000186a0000000000000000000000000000000000000000000000000000000000000000100000000000000000000000000000000000000000000000000000000000000640000000000000000000000000000000000000000000000000000000000000001000000000000000000000000000000000000000000000000000000000000010100000000000000000000000000000000000000000000000000000000000000e00000000000000000000000000000000000000000000000000000000000000002010a000000000000000000000000000000000000000000000000000000000000").unwrap(); + let decoded_datalake = datalake_decoder.decode_batch(&encoded_datalake).unwrap(); assert_eq!(decoded_datalake.len(), 2); let transaction_datalake1 = TransactionsInBlockDatalake::new( @@ -490,16 +478,14 @@ mod tests { ]; let encoded_datalakes = datalake_decoder.encode_batch(datalakes).unwrap(); - assert_eq!(encoded_datalakes, "0x00000000000000000000000000000000000000000000000000000000000000200000000000000000000000000000000000000000000000000000000000000002000000000000000000000000000000000000000000000000000000000000004000000000000000000000000000000000000000000000000000000000000001800000000000000000000000000000000000000000000000000000000000000120000000000000000000000000000000000000000000000000000000000000000100000000000000000000000000000000000000000000000000000000000186a0000000000000000000000000000000000000000000000000000000000000000100000000000000000000000000000000000000000000000000000000000000640000000000000000000000000000000000000000000000000000000000000001000000000000000000000000000000000000000000000000000000000000010100000000000000000000000000000000000000000000000000000000000000e0000000000000000000000000000000000000000000000000000000000000000202000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000120000000000000000000000000000000000000000000000000000000000000000100000000000000000000000000000000000000000000000000000000000186a0000000000000000000000000000000000000000000000000000000000000000100000000000000000000000000000000000000000000000000000000000000640000000000000000000000000000000000000000000000000000000000000001000000000000000000000000000000000000000000000000000000000000010100000000000000000000000000000000000000000000000000000000000000e000000000000000000000000000000000000000000000000000000000000000020203000000000000000000000000000000000000000000000000000000000000") + assert_eq!(encoded_datalakes, hex::decode("0x00000000000000000000000000000000000000000000000000000000000000200000000000000000000000000000000000000000000000000000000000000002000000000000000000000000000000000000000000000000000000000000004000000000000000000000000000000000000000000000000000000000000001800000000000000000000000000000000000000000000000000000000000000120000000000000000000000000000000000000000000000000000000000000000100000000000000000000000000000000000000000000000000000000000186a0000000000000000000000000000000000000000000000000000000000000000100000000000000000000000000000000000000000000000000000000000000640000000000000000000000000000000000000000000000000000000000000001000000000000000000000000000000000000000000000000000000000000010100000000000000000000000000000000000000000000000000000000000000e0000000000000000000000000000000000000000000000000000000000000000202000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000120000000000000000000000000000000000000000000000000000000000000000100000000000000000000000000000000000000000000000000000000000186a0000000000000000000000000000000000000000000000000000000000000000100000000000000000000000000000000000000000000000000000000000000640000000000000000000000000000000000000000000000000000000000000001000000000000000000000000000000000000000000000000000000000000010100000000000000000000000000000000000000000000000000000000000000e000000000000000000000000000000000000000000000000000000000000000020203000000000000000000000000000000000000000000000000000000000000").unwrap()) } #[test] fn test_transaction_datalake_decoder_receipt() { let datalake_decoder = DatalakeCodec::new(); - let encoded_datalake = "0x00000000000000000000000000000000000000000000000000000000000000200000000000000000000000000000000000000000000000000000000000000002000000000000000000000000000000000000000000000000000000000000004000000000000000000000000000000000000000000000000000000000000001800000000000000000000000000000000000000000000000000000000000000120000000000000000000000000000000000000000000000000000000000000000100000000000000000000000000000000000000000000000000000000000186a0000000000000000000000000000000000000000000000000000000000000000100000000000000000000000000000000000000000000000000000000000000640000000000000000000000000000000000000000000000000000000000000001000000000000000000000000000000000000000000000000000000000000010100000000000000000000000000000000000000000000000000000000000000e0000000000000000000000000000000000000000000000000000000000000000202000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000120000000000000000000000000000000000000000000000000000000000000000100000000000000000000000000000000000000000000000000000000000186a0000000000000000000000000000000000000000000000000000000000000000100000000000000000000000000000000000000000000000000000000000000640000000000000000000000000000000000000000000000000000000000000001000000000000000000000000000000000000000000000000000000000000010100000000000000000000000000000000000000000000000000000000000000e000000000000000000000000000000000000000000000000000000000000000020203000000000000000000000000000000000000000000000000000000000000"; - let decoded_datalake = datalake_decoder - .decode_batch(encoded_datalake.to_string()) - .unwrap(); + let encoded_datalake = hex::decode("0x00000000000000000000000000000000000000000000000000000000000000200000000000000000000000000000000000000000000000000000000000000002000000000000000000000000000000000000000000000000000000000000004000000000000000000000000000000000000000000000000000000000000001800000000000000000000000000000000000000000000000000000000000000120000000000000000000000000000000000000000000000000000000000000000100000000000000000000000000000000000000000000000000000000000186a0000000000000000000000000000000000000000000000000000000000000000100000000000000000000000000000000000000000000000000000000000000640000000000000000000000000000000000000000000000000000000000000001000000000000000000000000000000000000000000000000000000000000010100000000000000000000000000000000000000000000000000000000000000e0000000000000000000000000000000000000000000000000000000000000000202000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000120000000000000000000000000000000000000000000000000000000000000000100000000000000000000000000000000000000000000000000000000000186a0000000000000000000000000000000000000000000000000000000000000000100000000000000000000000000000000000000000000000000000000000000640000000000000000000000000000000000000000000000000000000000000001000000000000000000000000000000000000000000000000000000000000010100000000000000000000000000000000000000000000000000000000000000e000000000000000000000000000000000000000000000000000000000000000020203000000000000000000000000000000000000000000000000000000000000").unwrap(); + let decoded_datalake = datalake_decoder.decode_batch(&encoded_datalake).unwrap(); assert_eq!(decoded_datalake.len(), 2); let transaction_datalake1 = TransactionsInBlockDatalake::new( diff --git a/crates/core/src/compiler/datalake_compute/block_sampled.rs b/crates/core/src/compiler/datalake_compute/block_sampled.rs index b1a9c084..df11484e 100644 --- a/crates/core/src/compiler/datalake_compute/block_sampled.rs +++ b/crates/core/src/compiler/datalake_compute/block_sampled.rs @@ -1,23 +1,21 @@ use hdp_primitives::{ + block::account::Account, datalake::{ block_sampled::{BlockSampledCollection, BlockSampledDatalake}, DatalakeField, }, processed_types::{ - account::ProcessedAccount, - header::{ProcessedHeader, ProcessedHeaderProof}, - mmr::MMRMeta, - mpt::ProcessedMPTProof, + account::ProcessedAccount, header::ProcessedHeader, mmr::MMRMeta, mpt::ProcessedMPTProof, storage::ProcessedStorage, }, }; use serde::{Deserialize, Serialize}; use std::{collections::HashSet, sync::Arc}; -use alloy_primitives::keccak256; +use alloy::primitives::{Bytes, U256}; use anyhow::Result; -use hdp_provider::evm::AbstractProvider; +use hdp_provider::evm::provider::EvmProvider; use tokio::sync::RwLock; /// [`CompiledBlockSampledDatalake`] is a unified structure that contains all the required data to verify the datalake @@ -26,7 +24,7 @@ use tokio::sync::RwLock; #[derive(Debug, Clone, PartialEq, Serialize, Deserialize)] pub struct CompiledBlockSampledDatalake { /// Targeted datalake's compiled results - pub values: Vec, + pub values: Vec, /// Headers related to the datalake pub headers: HashSet, /// Accounts related to the datalake @@ -39,16 +37,20 @@ pub struct CompiledBlockSampledDatalake { pub async fn compile_block_sampled_datalake( datalake: BlockSampledDatalake, - provider: &Arc>, + provider: &Arc>, ) -> Result { - let mut abstract_provider = provider.write().await; + let abstract_provider = provider.write().await; - let mut aggregation_set: Vec = Vec::new(); + let mut aggregation_set: Vec = Vec::new(); - let full_header_and_proof_result = abstract_provider - .get_sequencial_full_header_with_proof(datalake.block_range_start, datalake.block_range_end) + let (mmr_meta, headers_proofs) = abstract_provider + .get_range_of_header_proofs( + datalake.block_range_start, + datalake.block_range_end, + datalake.increment, + ) .await?; - let mmr_meta = full_header_and_proof_result.1; + let mmr_meta = MMRMeta::from(mmr_meta); let mut headers: HashSet = HashSet::new(); let mut accounts: HashSet = HashSet::new(); let mut storages: HashSet = HashSet::new(); @@ -58,27 +60,24 @@ pub async fn compile_block_sampled_datalake( match datalake.sampled_property { BlockSampledCollection::Header(property) => { for block in block_range { - let fetched_block = full_header_and_proof_result.0.get(&block).unwrap().clone(); - let value = property.decode_field_from_rlp(&fetched_block.0); - - headers.insert(ProcessedHeader { - rlp: fetched_block.0, - proof: ProcessedHeaderProof { - leaf_idx: fetched_block.2, - mmr_path: fetched_block.1, - }, - }); - + let fetched_block = headers_proofs.get(&block).unwrap(); + let value = property + .decode_field_from_rlp(&Bytes::from(fetched_block.rlp_block_header.clone())); + headers.insert(ProcessedHeader::new( + fetched_block.rlp_block_header.clone(), + fetched_block.element_index, + fetched_block.siblings_hashes.clone(), + )); aggregation_set.push(value); } } BlockSampledCollection::Account(address, property) => { let accounts_and_proofs_result = abstract_provider - .get_range_account_with_proof( + .get_range_of_account_proofs( datalake.block_range_start, datalake.block_range_end, datalake.increment, - address.to_string(), + address, ) .await?; @@ -86,17 +85,16 @@ pub async fn compile_block_sampled_datalake( // let mut encoded_account = "".to_string(); for block in block_range { - let fetched_block = full_header_and_proof_result.0.get(&block).unwrap().clone(); + let fetched_block = headers_proofs.get(&block).unwrap().clone(); let account_proof = accounts_and_proofs_result.get(&block).unwrap().clone(); - let value = property.decode_field_from_rlp(&account_proof.encoded_account); + let account = Account::from(&account_proof).rlp_encode(); - headers.insert(ProcessedHeader { - rlp: fetched_block.0, - proof: ProcessedHeaderProof { - leaf_idx: fetched_block.2, - mmr_path: fetched_block.1, - }, - }); + let value = property.decode_field_from_rlp(&account); + headers.insert(ProcessedHeader::new( + fetched_block.rlp_block_header.clone(), + fetched_block.element_index, + fetched_block.siblings_hashes.clone(), + )); let account_proof = ProcessedMPTProof { block_number: block, @@ -107,21 +105,16 @@ pub async fn compile_block_sampled_datalake( aggregation_set.push(value); } - let account_key = keccak256(address); - accounts.insert(ProcessedAccount { - address: address.to_string(), - account_key: account_key.to_string(), - proofs: account_proofs, - }); + accounts.insert(ProcessedAccount::new(address, account_proofs)); } BlockSampledCollection::Storage(address, slot) => { let storages_and_proofs_result = abstract_provider - .get_range_storage_with_proof( + .get_range_of_storage_proofs( datalake.block_range_start, datalake.block_range_end, datalake.increment, - address.to_string(), - slot.to_string(), + address, + slot, ) .await?; @@ -129,44 +122,27 @@ pub async fn compile_block_sampled_datalake( let mut account_proofs: Vec = vec![]; for i in block_range { - let fetched_block = full_header_and_proof_result.0.get(&i).unwrap().clone(); + let fetched_block = headers_proofs.get(&i).unwrap().clone(); let storage_proof = storages_and_proofs_result.get(&i).unwrap().clone(); - headers.insert(ProcessedHeader { - rlp: fetched_block.0, - proof: ProcessedHeaderProof { - leaf_idx: fetched_block.2, - mmr_path: fetched_block.1, - }, - }); - - account_proofs.push(ProcessedMPTProof { - block_number: i, - proof: storage_proof.account_proof, - }); - - storage_proofs.push(ProcessedMPTProof { - block_number: i, - proof: storage_proof.storage_proof, - }); - - aggregation_set.push(storage_proof.storage_value); + headers.insert(ProcessedHeader::new( + fetched_block.rlp_block_header.clone(), + fetched_block.element_index, + fetched_block.siblings_hashes.clone(), + )); + + account_proofs.push(ProcessedMPTProof::new(i, storage_proof.account_proof)); + + storage_proofs.push(ProcessedMPTProof::new( + i, + storage_proof.storage_proof[0].proof.clone(), + )); + + aggregation_set.push(storage_proof.storage_proof[0].value); } - let storage_key = keccak256(slot).to_string(); - let account_key = keccak256(address); - - storages.insert(ProcessedStorage { - address: address.to_string(), - slot: slot.to_string(), - storage_key, - proofs: storage_proofs, - }); - accounts.insert(ProcessedAccount { - address: address.to_string(), - account_key: account_key.to_string(), - proofs: account_proofs, - }); + storages.insert(ProcessedStorage::new(address, slot, storage_proofs)); + accounts.insert(ProcessedAccount::new(address, account_proofs)); } } diff --git a/crates/core/src/compiler/datalake_compute/mod.rs b/crates/core/src/compiler/datalake_compute/mod.rs index 85858ce9..7b30bd45 100644 --- a/crates/core/src/compiler/datalake_compute/mod.rs +++ b/crates/core/src/compiler/datalake_compute/mod.rs @@ -1,3 +1,4 @@ +use alloy::primitives::{B256, U256}; use anyhow::{bail, Result}; use hdp_primitives::{ datalake::{envelope::DatalakeEnvelope, task::DatalakeCompute}, @@ -6,7 +7,7 @@ use hdp_primitives::{ receipt::ProcessedReceipt, storage::ProcessedStorage, transaction::ProcessedTransaction, }, }; -use hdp_provider::evm::{AbstractProvider, AbstractProviderConfig}; +use hdp_provider::evm::provider::{EvmProvider, EvmProviderConfig}; use std::{ collections::{HashMap, HashSet}, sync::Arc, @@ -22,7 +23,7 @@ pub struct DatalakeComputeCompilationResults { /// flag to check if the aggregation function is pre-processable pub pre_processable: bool, /// task_commitment -> value - pub commit_results_maps: HashMap, + pub commit_results_maps: HashMap, /// Headers related to the datalake pub headers: HashSet, /// Accounts related to the datalake @@ -41,7 +42,7 @@ impl DatalakeComputeCompilationResults { #[allow(clippy::too_many_arguments)] pub fn new( pre_processable: bool, - commit_results_maps: HashMap, + commit_results_maps: HashMap, headers: HashSet, accounts: HashSet, storages: HashSet, @@ -63,13 +64,13 @@ impl DatalakeComputeCompilationResults { } pub struct DatalakeCompiler { - provider: Arc>, + provider: Arc>, } impl DatalakeCompiler { /// initialize DatalakeCompiler with commitment and datalake - pub fn new_from_config(config: AbstractProviderConfig) -> Self { - let provider = AbstractProvider::new(config); + pub fn new_from_config(config: EvmProviderConfig) -> Self { + let provider = EvmProvider::new(config); Self { provider: Arc::new(provider.into()), } @@ -112,7 +113,7 @@ impl DatalakeCompiler { let aggregated_result = aggregation_fn .operation(&compiled_block_sampled.values, Some(fn_context))?; // Save the datalake results - commit_results_maps.insert(task_commitment.to_string(), aggregated_result); + commit_results_maps.insert(task_commitment, aggregated_result); if !aggregation_fn.is_pre_processable() { pre_processable = false; } @@ -134,7 +135,7 @@ impl DatalakeCompiler { let aggregated_result = aggregation_fn.operation(&compiled_tx_datalake.values, Some(fn_context))?; // Save the datalake results - commit_results_maps.insert(task_commitment.to_string(), aggregated_result); + commit_results_maps.insert(task_commitment, aggregated_result); if !aggregation_fn.is_pre_processable() { pre_processable = false; } diff --git a/crates/core/src/compiler/datalake_compute/transactions.rs b/crates/core/src/compiler/datalake_compute/transactions.rs index 1c0c7ccf..f305d7cd 100644 --- a/crates/core/src/compiler/datalake_compute/transactions.rs +++ b/crates/core/src/compiler/datalake_compute/transactions.rs @@ -1,3 +1,4 @@ +use alloy::primitives::U256; use anyhow::Result; use hdp_primitives::{ datalake::{ @@ -5,14 +6,12 @@ use hdp_primitives::{ DatalakeField, }, processed_types::{ - header::{ProcessedHeader, ProcessedHeaderProof}, - mmr::MMRMeta, - receipt::ProcessedReceipt, + header::ProcessedHeader, mmr::MMRMeta, receipt::ProcessedReceipt, transaction::ProcessedTransaction, }, - utils::tx_index_to_tx_key, }; -use hdp_provider::evm::AbstractProvider; + +use hdp_provider::evm::provider::EvmProvider; use serde::{Deserialize, Serialize}; use std::{collections::HashSet, sync::Arc}; use tokio::sync::RwLock; @@ -20,7 +19,7 @@ use tokio::sync::RwLock; #[derive(Debug, Clone, PartialEq, Serialize, Deserialize)] pub struct CompiledTransactionsDatalake { /// Targeted datalake's compiled results - pub values: Vec, + pub values: Vec, /// Headers related to the datalake pub headers: HashSet, /// Transactions related to the datalake @@ -33,18 +32,29 @@ pub struct CompiledTransactionsDatalake { pub async fn compile_tx_datalake( datalake: TransactionsInBlockDatalake, - provider: &Arc>, + provider: &Arc>, ) -> Result { let abstract_provider = provider.write().await; - let mut aggregation_set: Vec = Vec::new(); + let mut aggregation_set: Vec = Vec::new(); - let full_header_and_proof_result = abstract_provider - .get_sequencial_full_header_with_proof(datalake.target_block, datalake.target_block) + let (mmr_meta, headers_proofs) = abstract_provider + .get_range_of_header_proofs( + datalake.target_block, + datalake.target_block, + datalake.increment, + ) .await?; - let mmr_meta = full_header_and_proof_result.1; + let mmr_meta = MMRMeta::from(mmr_meta); let mut headers: HashSet = HashSet::new(); let mut transactions: HashSet = HashSet::new(); let mut transaction_receipts: HashSet = HashSet::new(); + let fetched_block = headers_proofs.get(&datalake.target_block).unwrap(); + + headers.insert(ProcessedHeader::new( + fetched_block.rlp_block_header.clone(), + fetched_block.element_index, + fetched_block.siblings_hashes.clone(), + )); match datalake.sampled_property { TransactionsCollection::Transactions(property) => { @@ -57,35 +67,11 @@ pub async fn compile_tx_datalake( ) .await? { - let key_fixed_bytes = tx_index_to_tx_key(tx.tx_index); - - transactions.insert(ProcessedTransaction { - key: key_fixed_bytes.to_string(), - block_number: tx.block_number, - proof: tx.transaction_proof, - }); - - headers.insert(ProcessedHeader { - rlp: full_header_and_proof_result - .0 - .get(&tx.block_number) - .unwrap() - .0 - .clone(), - proof: ProcessedHeaderProof { - leaf_idx: full_header_and_proof_result - .0 - .get(&tx.block_number) - .unwrap() - .2, - mmr_path: full_header_and_proof_result - .0 - .get(&tx.block_number) - .unwrap() - .1 - .clone(), - }, - }); + transactions.insert(ProcessedTransaction::new( + tx.tx_index, + tx.block_number, + tx.transaction_proof, + )); // depends on datalake.included_types filter the value to be included in the aggregation set if datalake.included_types.is_included(tx.tx_type) { @@ -104,35 +90,11 @@ pub async fn compile_tx_datalake( ) .await? { - let key_fixed_bytes = tx_index_to_tx_key(tx_receipt.tx_index); - - transaction_receipts.insert(ProcessedReceipt { - key: key_fixed_bytes.to_string(), - block_number: tx_receipt.block_number, - proof: tx_receipt.receipt_proof, - }); - - headers.insert(ProcessedHeader { - rlp: full_header_and_proof_result - .0 - .get(&tx_receipt.block_number) - .unwrap() - .0 - .clone(), - proof: ProcessedHeaderProof { - leaf_idx: full_header_and_proof_result - .0 - .get(&tx_receipt.block_number) - .unwrap() - .2, - mmr_path: full_header_and_proof_result - .0 - .get(&tx_receipt.block_number) - .unwrap() - .1 - .clone(), - }, - }); + transaction_receipts.insert(ProcessedReceipt::new( + tx_receipt.tx_index, + tx_receipt.block_number, + tx_receipt.receipt_proof, + )); // depends on datalake.included_types filter the value to be included in the aggregation set if datalake.included_types.is_included(tx_receipt.tx_type) { diff --git a/crates/core/src/compiler/mod.rs b/crates/core/src/compiler/mod.rs index bcb452b1..da6bfef2 100644 --- a/crates/core/src/compiler/mod.rs +++ b/crates/core/src/compiler/mod.rs @@ -1,8 +1,8 @@ use anyhow::Result; use datalake_compute::DatalakeComputeCompilationResults; use hdp_primitives::datalake::task::DatalakeCompute; -use hdp_provider::evm::AbstractProviderConfig; -use module::ModuleCompilerConfig; + +use crate::pre_processor::PreProcessorConfig; pub mod datalake_compute; pub mod module; @@ -12,26 +12,8 @@ pub struct Compiler { // pub(crate) module: module::ModuleCompiler, } -pub struct CompilerConfig { - datalake_config: AbstractProviderConfig, - #[allow(dead_code)] - module_config: ModuleCompilerConfig, -} - -impl CompilerConfig { - pub fn new( - datalake_config: AbstractProviderConfig, - module_config: ModuleCompilerConfig, - ) -> Self { - Self { - datalake_config, - module_config, - } - } -} - impl Compiler { - pub fn new(config: CompilerConfig) -> Self { + pub fn new(config: PreProcessorConfig) -> Self { Self { datalake: datalake_compute::DatalakeCompiler::new_from_config(config.datalake_config), // module: module::ModuleCompiler::new_with_config(config), diff --git a/crates/core/src/pre_processor.rs b/crates/core/src/pre_processor.rs index e96a5135..6f88f91f 100644 --- a/crates/core/src/pre_processor.rs +++ b/crates/core/src/pre_processor.rs @@ -2,38 +2,45 @@ //! Preprocessor is reponsible for identifying the required values. //! This will be most abstract layer of the preprocessor. -use std::str::FromStr; - use crate::codec::datalake_compute::DatalakeComputeCodec; use crate::compiler::datalake_compute::DatalakeComputeCompilationResults; use crate::compiler::module::ModuleCompilerConfig; -use crate::compiler::{Compiler, CompilerConfig}; -use alloy_dyn_abi::DynSolValue; +use crate::compiler::Compiler; +use alloy::dyn_abi::DynSolValue; +use alloy::hex; +use alloy::primitives::{Bytes, Keccak256, B256, U256}; use alloy_merkle_tree::standard_binary_tree::StandardMerkleTree; -use alloy_primitives::hex::FromHex; -use alloy_primitives::{FixedBytes, Keccak256, B256, U256}; use anyhow::{bail, Ok, Result}; use cairo_lang_starknet_classes::casm_contract_class::CasmContractClass; use hdp_primitives::module::Module; use hdp_primitives::processed_types::datalake_compute::ProcessedDatalakeCompute; use hdp_primitives::{datalake::task::DatalakeCompute, processed_types::v1_query::ProcessedResult}; -use hdp_provider::evm::AbstractProviderConfig; +use hdp_provider::evm::provider::EvmProviderConfig; use hdp_provider::key::FetchKeyEnvelope; + use tracing::info; pub struct PreProcessor { /// compiler compiler: Compiler, - decoder: DatalakeComputeCodec, } pub struct PreProcessorConfig { - pub datalake_config: AbstractProviderConfig, + pub datalake_config: EvmProviderConfig, pub module_config: ModuleCompilerConfig, } +impl PreProcessorConfig { + pub fn new(datalake_config: EvmProviderConfig, module_config: ModuleCompilerConfig) -> Self { + Self { + datalake_config, + module_config, + } + } +} + pub struct ExtendedDatalake { pub task: DatalakeCompute, pub fetch_keys_set: Vec, @@ -46,7 +53,7 @@ pub struct ExtendedModule { } impl PreProcessor { - pub fn new_with_config(config: CompilerConfig) -> Self { + pub fn new_with_config(config: PreProcessorConfig) -> Self { let compiler = Compiler::new(config); let datalake_compute_codec = DatalakeComputeCodec::new(); Self { @@ -60,10 +67,10 @@ impl PreProcessor { batched_datalakes: String, batched_tasks: String, ) -> Result { + let bytes_datalake = hex::decode(batched_datalakes)?; + let bytes_tasks = hex::decode(batched_tasks)?; // 1. decode the tasks - let tasks = self - .decoder - .decode_batch(batched_datalakes, batched_tasks)?; + let tasks = self.decoder.decode_batch(&bytes_datalake, &bytes_tasks)?; self.process(tasks).await } @@ -72,7 +79,8 @@ impl PreProcessor { /// Then it will run the preprocessor and return the result, fetch points /// Fetch points are the values that are required to run the module pub async fn process(&self, tasks: Vec) -> Result { - let task_commitments: Vec = tasks.iter().map(|task| task.commit()).collect(); + let task_commitments: Vec = + tasks.iter().map(|task| task.commit()).collect::>(); // do compile with the tasks let compiled_results = self.compiler.compile(&tasks).await?; // do operation if possible @@ -91,7 +99,7 @@ impl PreProcessor { .get(&task_commitment) .unwrap(); let result_commitment = - self._raw_result_to_result_commitment(&task_commitment, compiled_result); + self._raw_result_to_result_commitment(&task_commitment, *compiled_result); let result_proof = results_merkle_tree .as_ref() .unwrap() @@ -100,10 +108,8 @@ impl PreProcessor { } else { None }; - - let typed_task_commitment = FixedBytes::from_hex(task_commitment.clone())?; let task_proof = - tasks_merkle_tree.get_proof(&DynSolValue::FixedBytes(typed_task_commitment, 32)); + tasks_merkle_tree.get_proof(&DynSolValue::FixedBytes(task_commitment, 32)); let encoded_task = task.encode()?; let datalake_type = task.datalake.get_datalake_type(); let property_type = task.datalake.get_collection_type().to_index(); @@ -112,22 +118,22 @@ impl PreProcessor { Some(result_value) => { let (compiled_result, result_commitment, result_proof) = result_value; ProcessedDatalakeCompute::new_with_result( - encoded_task, + Bytes::from(encoded_task), task_commitment, - compiled_result.to_string(), - result_commitment.to_string(), + *compiled_result, + result_commitment, task_proof, result_proof, - task.datalake.encode()?, + Bytes::from(task.datalake.encode()?), datalake_type.into(), property_type, ) } None => ProcessedDatalakeCompute::new_without_result( - encoded_task, + Bytes::from(encoded_task), task_commitment, task_proof, - task.datalake.encode()?, + Bytes::from(task.datalake.encode()?), datalake_type.into(), property_type, ), @@ -155,7 +161,7 @@ impl PreProcessor { fn build_merkle_tree( &self, compiled_results: &DatalakeComputeCompilationResults, - task_commitments: Vec, + task_commitments: Vec, ) -> Result<(StandardMerkleTree, Option)> { let mut tasks_leaves = Vec::new(); let mut results_leaves = Vec::new(); @@ -168,11 +174,10 @@ impl PreProcessor { None => bail!("Task commitment not found in compiled results"), }; let result_commitment = - self._raw_result_to_result_commitment(&task_commitment, compiled_result); + self._raw_result_to_result_commitment(&task_commitment, *compiled_result); results_leaves.push(DynSolValue::FixedBytes(result_commitment, 32)); } - let typed_task_commitment = FixedBytes::from_hex(task_commitment)?; - tasks_leaves.push(DynSolValue::FixedBytes(typed_task_commitment, 32)); + tasks_leaves.push(DynSolValue::FixedBytes(task_commitment, 32)); } let tasks_merkle_tree = StandardMerkleTree::of(tasks_leaves); @@ -186,12 +191,12 @@ impl PreProcessor { fn _raw_result_to_result_commitment( &self, - task_commitment: &str, - compiled_result: &str, - ) -> FixedBytes<32> { + task_commitment: &B256, + compiled_result: U256, + ) -> B256 { let mut hasher = Keccak256::new(); - hasher.update(Vec::from_hex(task_commitment).unwrap()); - hasher.update(B256::from(U256::from_str(compiled_result).unwrap())); + hasher.update(task_commitment); + hasher.update(B256::from(compiled_result)); hasher.finalize() } } diff --git a/crates/core/src/processor.rs b/crates/core/src/processor.rs index 4ecb0a74..69532fa4 100644 --- a/crates/core/src/processor.rs +++ b/crates/core/src/processor.rs @@ -2,24 +2,21 @@ //! This run is sound execution of the module. //! This will be most abstract layer of the processor. -use alloy_dyn_abi::DynSolValue; +use alloy::dyn_abi::DynSolValue; +use alloy::primitives::{FixedBytes, Keccak256, B256, U256}; use alloy_merkle_tree::standard_binary_tree::StandardMerkleTree; -use alloy_primitives::{hex::FromHex, FixedBytes, Keccak256, B256, U256}; use anyhow::Result; use hdp_primitives::processed_types::{ cairo_format::AsCairoFormat, datalake_compute::ProcessedDatalakeCompute, v1_query::ProcessedResult, }; use serde::Serialize; -use std::{path::PathBuf, str::FromStr}; - -use hdp_provider::evm::{AbstractProvider, AbstractProviderConfig}; +use std::path::PathBuf; use crate::cairo_runner::run::{RunResult, Runner}; pub struct Processor { runner: Runner, - _provider: AbstractProvider, } #[derive(Debug, Serialize)] @@ -68,13 +65,9 @@ impl ProcessorResult { } impl Processor { - pub fn new(provider_config: AbstractProviderConfig, program_path: PathBuf) -> Self { + pub fn new(program_path: PathBuf) -> Self { let runner = Runner::new(program_path); - let provider = AbstractProvider::new(provider_config); - Self { - runner, - _provider: provider, - } + Self { runner } } pub async fn process( @@ -130,10 +123,10 @@ impl Processor { requset: ProcessedResult, result: RunResult, ) -> Result { - let task_commitments: Vec = requset + let task_commitments: Vec = requset .tasks .iter() - .map(|task| task.task_commitment.clone()) + .map(|task| task.task_commitment) .collect(); // let task_inclusion_proofs: Vec<_> = requset // .tasks @@ -152,8 +145,8 @@ impl Processor { let mut new_tasks: Vec = Vec::new(); for (idx, mut task) in requset.tasks.into_iter().enumerate() { - let compiled_result = result.task_results[idx].clone(); - let result_commitment = result_commitments[idx].to_string(); + let compiled_result = result.task_results[idx]; + let result_commitment = result_commitments[idx]; let result_proof = results_inclusion_proofs[idx].clone(); task.update_results(compiled_result, result_commitment, result_proof); new_tasks.push(task.clone()); @@ -175,8 +168,8 @@ impl Processor { fn build_result_merkle_tree( &self, - task_commitments: Vec, - task_results: Vec, + task_commitments: Vec, + task_results: Vec, ) -> Result<(StandardMerkleTree, Vec>)> { let mut results_leaves = Vec::new(); let mut results_commitments = Vec::new(); @@ -192,12 +185,12 @@ impl Processor { fn _raw_result_to_result_commitment( &self, - task_commitment: &str, - compiled_result: &str, - ) -> FixedBytes<32> { + task_commitment: &B256, + compiled_result: &U256, + ) -> B256 { let mut hasher = Keccak256::new(); - hasher.update(Vec::from_hex(task_commitment).unwrap()); - hasher.update(B256::from(U256::from_str(compiled_result).unwrap())); + hasher.update(task_commitment); + hasher.update(B256::from(*compiled_result)); hasher.finalize() } } diff --git a/crates/core/tests/integration_test.rs b/crates/core/tests/integration_test.rs index fbcd7947..0890f0b4 100644 --- a/crates/core/tests/integration_test.rs +++ b/crates/core/tests/integration_test.rs @@ -2,8 +2,8 @@ mod integration_test { use std::path::PathBuf; use hdp_core::{ - compiler::{module::ModuleCompilerConfig, CompilerConfig}, - pre_processor::PreProcessor, + compiler::module::ModuleCompilerConfig, + pre_processor::{PreProcessor, PreProcessorConfig}, processor::Processor, }; use hdp_primitives::datalake::{ @@ -11,7 +11,8 @@ mod integration_test { envelope::DatalakeEnvelope, task::{Computation, DatalakeCompute}, }; - use hdp_provider::evm::AbstractProviderConfig; + + use hdp_provider::evm::provider::EvmProviderConfig; use starknet::providers::Url; // Non-paid personal alchemy endpoint @@ -27,22 +28,22 @@ mod integration_test { module_registry_rpc_url: Url::parse(STARKNET_SEPOLIA_RPC).unwrap(), program_path: PathBuf::from(PREPROCESS_PROGRAM_PATH), }; - - let datalake_config = AbstractProviderConfig { - rpc_url: SEPOLIA_RPC_URL, + let datalake_config = EvmProviderConfig { + rpc_url: Url::parse(SEPOLIA_RPC_URL).unwrap(), chain_id: 11155111, - rpc_chunk_size: 40, + max_requests: 100, + }; + + let preprocessor_config = PreProcessorConfig { + datalake_config, + module_config, }; - PreProcessor::new_with_config(CompilerConfig::new(datalake_config, module_config)) + + PreProcessor::new_with_config(preprocessor_config) } fn init_processor() -> Processor { - let config = AbstractProviderConfig { - rpc_url: SEPOLIA_RPC_URL, - chain_id: 11155111, - rpc_chunk_size: 40, - }; - Processor::new(config, PathBuf::from(PREPROCESS_PROGRAM_PATH)) + Processor::new(PathBuf::from(PREPROCESS_PROGRAM_PATH)) } #[ignore = "ignore for now"] diff --git a/crates/primitives/Cargo.toml b/crates/primitives/Cargo.toml index 41dbdc72..db54ac63 100644 --- a/crates/primitives/Cargo.toml +++ b/crates/primitives/Cargo.toml @@ -7,11 +7,10 @@ repository.workspace = true version.workspace = true [dependencies] -serde = { workspace = true, features = ["derive"] } +serde = { workspace = true } anyhow = { workspace = true } -alloy-dyn-abi = { workspace = true } -alloy-rlp = { workspace = true, features = ["derive"] } -alloy-primitives = { workspace = true, features = ["rlp", "serde"] } +alloy = { workspace = true } +alloy-rlp = { workspace = true } eth-trie-proofs.workspace = true rand = { workspace = true } starknet-crypto.workspace = true diff --git a/crates/primitives/fixtures/processed/mpt.json b/crates/primitives/fixtures/processed/mpt.json new file mode 100644 index 00000000..1bbc4af9 --- /dev/null +++ b/crates/primitives/fixtures/processed/mpt.json @@ -0,0 +1,13 @@ +{ + "block_number": 5244634, + "proof": [ + "0xf90211a0f9c48878cf7ff267e2b1a685491c745eb2b8686f00ac15e3725105ffd404042ba05929b6aabbfbfd7576caa353650bca5800e245f2531ef590787007ed123cdfe0a0a8b96339a2824738365bd080649c38f22ffb6cbd060db0fa3389580ef1ba8428a0ccdcf2ec17db8187252805e650370e323bd9b353597e930897ab70e907d7d614a0e91f7c3fb281f5f5707a8cf5fa44329a8ab8abd779b81d984616c19f21d52749a098074f7e7ab1c11dfbf74588b306c62d16b5059f9f97da9891618061ef38ea7ca0fc3c6a10bb61487ff8f86f2f42c3ba418a56215a5d000335b9c829c1512cedf7a0d6834cc09a267e8e7a48a1909e0d353b6783852070c5eb72858f36b6d7589656a0459651f7b4ba40026ef3162a3bf810719ae3ab53ebde780936ba4bc7406195cda00dd15caf7adbbf0ca9afcab96e97010cb3d46ba8d8ebf4d74db2080b91c5dda6a0679bbef6b6142ed25dd96410e509773d8f869123555b786aa3f98d79baa9abdfa0b2125fd81c270f4477db982dfee9637afdafeb311a573035b08000ae572267d1a01049729bc60474abbc9b1a6b2f95cb4943a4b2005b8aa8158f3a101a36a6b67da07ec3a2d138856e8aa59c4a66543e41bc55a7573469d5a61a6373f7641da23e0da0c5646a6d9be5d170d8dd6017bdd70e45d787222ad943b918ca7a3793433b6e97a0b6b6329bf71b44a98bfd9595bdac57676f52b6c490aba5b8e902729610172e3c80", + "0xf90211a0740decc682f4d2e79465ee48b042e978c7f0e4419c1c5b478195f43c515002e3a039ccce14eae4681d983d9ebccce51f77191f620719992488ec18b297cacd311ea0a44dfcf29726f626752b3944d241e5c3f80366ab9367324894c9e9d05a5790d1a039b21cf9d33eaca271c4564e2dddb1e978dd0c356bf46fc3640fdac4ecc8571ba0fe9ef8eca0322371353c926fb1c5d66355870b0bfd0c00ce598cd2f40b4c3341a0b321d1aeb3ec1560acaba62ce2aeae362f94e711c27fcb924b2cbfa2db5e87dea0e69857e44c092b4ec4ef40b82a5fb640efc9618e43cfbb5ef14e31c4aa2f8737a011266dd12235f119fa0061419550726d8891115eeb4ce02d2a537923153c8201a08eff558ad389e76e6a3e6b37b5b6207587948aaae02b71515a54c2364430a359a0f48ce5d3b1217f770aa16bf304af51f9d24cc736e2c43fe041c455e36d6322eba028e781fd4b7fe0bd1e97f313065caaccb23bc92c55b55845b23f73008840b223a04a81b5d128f2085f380c8d649ae898e1843f05faa78f9473896b1f2033603adfa0508673aa83abfeb0ef84d0e550232129af6b224a7347e903e11597573078e6d1a0a330534d7c5edf4cac0c262d39819636b14eddcf04947aa95d870848ae4f296ca06101e2706cafb764fa0a8f0af9eb651dd39c1efdb8ef00898395217437971000a05c3a9f9a40b6c797da423fee163b300830ecac2edd1dd8c0c90b8260af8a709b80", + "0xf90211a0e3872b8de3e46b99dee7b7cf8c1006dfa225c71b4bfc0f9f1d7985ce3ac85f85a07244f28e674fc34b37f7138049716f7fefbec0a1e248dceec6b76e5d48515843a05a296af545bf6c6a9946a679df0d51460df88d528e35bfd32b3647cba0f3d067a00d33ff73bbc02eb40042127bce0c1655d9e607bba2503f8cb5f9bea22842271ba0b33c1d308b0913b13b07ec4057d04fa2db16dcef874f967537bace89ac0b8172a063847f8ecd14274a513b683b2b4d75a55e5ebfe8235a550633b748086acb0716a059fcb30fd1d6421408e8c989b0e74dcb8244ba829d2f2bdf151e33d6e1d56fcea038663525428cabd8980d9296b6cc36af729fc6acc46e9eb0914ca1a0d8520078a04ea2f597ba766536282f18aadb2890fb2bfb1c102d70d1bb48503620c92d226ca027691440cd4194b44aff15036797c6ce48f4e02a0fa8765be84c43439cd08904a00e03963a6fa156828e10d59a45ad6fd4e2402f0dec42582df4f6995b87cbf3d4a04ae62c15632e728fb416ed29924d486115df77ae5c5d1d1f45fef85e439458c4a0ce28c4c118e4e3697f522f1fe86a7260d5e9dae38689aaeb88c790dadf072b74a0aafa9764d62f03d8b9ea6000b08f40f7adc7b3c1ba539be96826c17ee8739849a0fb0555be461ae94a3c770da16054579e10a6597e0a83e5769eae97627e8aa0daa0393f7a64c162818271e10af1c46c8a2c0a64e5ee58d797ed7ea1dfc67cd5f11e80", + "0xf90211a014420ce3dab1e17e2f0dc99738b961a247237c3f6070b53a4eaaa7715d474e33a0efd7d5e3931c2fefc1fe126b75c7a5e108e2cabf307d73db29d626d01bb9bcdfa088f7a078b10ca2ea2fb784aeacfb15952bd5fa3063022cacf005c329c22d286ca0fe3e1b0516f57322cc9c08144b0fad9de191b3ca4fcb6159edfb797782d4ee00a0d6d0737f0a3953c388815ced56038a2c5dd5254ce6dceed7b0d3d2c1e98893dba05cb083b302dd9afb90ab493ca4a5b974ba77424ed3d4b7405a7d45435016a2b9a0f9cb84056435cca15e9114d18e4e2c0171e84433e162c40d0f072dae22d7cf37a0d813f1226746f08c365e474dd5e316ffa0ce1ffb9604b73b440fa779be65c83fa070de25aaad32aee0482e4ffd3017e662b5ca90d9e17b878e21b518c008379ab3a016edb7310cd71e57feb562a768ec7423dcbe45f8b71c11f6a0d371daf606d03fa01a4642285451b4c0a832a888bdcdc58d33627acf4f0e9c4813ecdfb3d4c65388a0dfbdfc5afec24957c1c1562703e6b1b29d2a99f6462eb3b9d0356aadabdd971da0280c8606d45d2c8e14fd229e79c1681f946c38d51f66367195fa34b7e119e86fa05b507254f925c0569f233c164e8f078044dd6fe6c9513fa3c97eaf4e3919212fa0e50deb6777ea6e4187690beed1a9369e8c4e18b48bf6a899a9e7c3908a42bcb5a0a24c340fa072f88352ec85d66efb0f0c1f6a6f122f0081c18a836c2aa86beec580", + "0xf90211a0e01cb9e9892eb581e5a3c79c26cbca325f8212ad2bc5c6279a7c4f631f81f474a06619b556f715beef28c32e23fafc86593210ca80525d9239f927ebea560ff2a0a0c3a975ed26435be5eb9e22ef1ab4561a2c0b01c6c508bff14f9bc4ee6e73ca90a05a0823e4ae2140d889f241ea260919cd644e6171b9f2e2cd0f8c5658bf8f5814a0123907391bcb99e715d5742a174a2cb6e9426f483043d405db8ef414c6275836a01581b48149d60e7cb082ab9048046a76351f95de1e976f574b116ed9c2927feea083a5877bb1b47cd849493c4f134380f204c252e034690fd44f6c58a43868e5b7a00a7d967b45786d760465e977b511328af38732800a3973a28db36e3b5bf4ec03a0912bfb4a5894b2fa8b9be3a881739c2746310c7d8dda9f444361f9f7be461994a0a010bf67eb48eaa4912c94ef0709f8839e79975e51d8e9fce2495206cb0979b0a0435f9a1c205a9969f049c0b141a35b349b84a37eeb9293f0a1d2fd3164797c21a025a035592e96995aafe23be26d2a873ebdb06f5ab7e59141e2ef8b6b1809ed3aa0c0cf5b20feb949571f9063ef4935bc801d47d5bb95772b48c090666468df836ca06371135f103da7303cbcf89e5e903afeb46fa03f8e7e62450dfc7e3ddbcb8802a0901908c9e2ea0d9cc709bc10e11249771d35f17afcda522f7c1e5c642e037a83a0dfc70ff78993d59a29df872ad71184b2ab54e09126149a58917b2fa3844e853480", + "0xf901b1a01276884378b684253b8886587c8f1bc9b34faed42af94915798e03e95616eb9c80a05ad8f58c5eeb611212582513105c3967f2be0c437c163e0a500f3e99376a1d0d8080a07039ebe73cd8e4a725451e6a9353314ec4b7a0899b79c192404bae1f13d8b8f9a0a8db7c7ba8f3f5d4a5201ef6ceead27195ff8893435ec15cbbe45df55b0e90c7a020a06be73b79a2293abc6e08b110a2dfe44804f705ff5c1220c170c927515b19a0a896b76855eb0753f592ea1952d63f64add5952cc00fc7a0a1ac7ac722a46658a0cf60aebe174d15925f304a8226f6162b32a4f817f3153c9db02b359f95496bd6a0729d651316546407ccdcd3b05358318105bd5c7ee6efa3fbc3c8cf3afe9f0363a01ec3512523b23d000cfd0ff5eaf77787223de5fc55dd3cda8d3792634a037977a08bc0a97065188cd5310ebceb9a17200651e6c767c47f047735404284c09b8c58a0864ff5e9ad3b6a76602e5575c52f9ec123679f7509451fa98cb2e152081a21b5a0275a300b4cb1ad1c3404cb863df2cd12b855bdf13895a464cb778ac53e1fa11ca0045d68895435f0e949bced85f319c36da7e6a2ea3fd5ff45632284f26ddfe79a80", + "0xf8718080808080a058d90c97d5325ac87fdf362e1f3460000534c81a1ed807d19c5f3532d9d65bbb808080a07b3c71cc818328815c79bcd344c717789bde929b23bf30bfe28a36ca3cad72cd80808080a0f7f9e3618702d28e1135289a6fba3fea9eae2f0de6b0afe2cbc60bee91fc91a58080", + "0xf8709d3d41ff168cfccb34c4efa2db7e4f369c363cf9480dc12886f2b6fb82a5b850f84e8263f98833244a70bb7ec914a056e81f171bcc55a6ff8345e692c0f86e5b48e01b996cadc001622fb5e363b421a0c5d2460186f7233c927e7db2dcc703c0e500b653ca82273b7bfad8045d85a470" + ] +} diff --git a/crates/primitives/src/aggregate_fn/integer.rs b/crates/primitives/src/aggregate_fn/integer.rs index 10f34fc8..8eb6adfd 100644 --- a/crates/primitives/src/aggregate_fn/integer.rs +++ b/crates/primitives/src/aggregate_fn/integer.rs @@ -1,12 +1,12 @@ use std::str::FromStr; -use alloy_primitives::U256; +use alloy::primitives::U256; use anyhow::{bail, Result}; use super::FunctionContext; /// Returns the average of the values: [`AVG`](https://en.wikipedia.org/wiki/Average) -pub fn average(values: &[U256]) -> Result { +pub fn average(values: &[U256]) -> Result { if values.is_empty() { bail!("No values found"); } @@ -16,18 +16,16 @@ pub fn average(values: &[U256]) -> Result { .try_fold(U256::from(0), |acc, val| acc.checked_add(*val)) .unwrap(); - let divided_value = divide(sum, U256::from(values.len())); - - Ok(divided_value) + divide(sum, U256::from(values.len())) } // TODO: Implement bloom_filterize -pub fn bloom_filterize(_values: &[U256]) -> Result { - Ok("0".to_string()) +pub fn bloom_filterize(_values: &[U256]) -> Result { + Ok(U256::from(0)) } /// Find the maximum value: [`MAX`](https://en.wikipedia.org/wiki/Maxima_and_minima) -pub fn find_max(values: &[U256]) -> Result { +pub fn find_max(values: &[U256]) -> Result { if values.is_empty() { bail!("No values found"); } @@ -40,11 +38,11 @@ pub fn find_max(values: &[U256]) -> Result { } } - Ok(max.to_string()) + Ok(max) } /// Find the minimum value: [`MIN`](https://en.wikipedia.org/wiki/Maxima_and_minima) -pub fn find_min(values: &[U256]) -> Result { +pub fn find_min(values: &[U256]) -> Result { if values.is_empty() { bail!("No values found"); } @@ -56,12 +54,12 @@ pub fn find_min(values: &[U256]) -> Result { } } - Ok(min.to_string()) + Ok(min) } /// Standard deviation /// wip -pub fn standard_deviation(values: &[U256]) -> Result { +pub fn standard_deviation(values: &[U256]) -> Result { if values.is_empty() { bail!("No values found"); } @@ -73,7 +71,11 @@ pub fn standard_deviation(values: &[U256]) -> Result { sum += value; } - let avg = divide(sum, count).parse::().unwrap(); + let avg = divide(sum, count) + .expect("Division have failed") + .to_string() + .parse::() + .unwrap(); let mut variance_sum = 0.0; for value in values { @@ -82,13 +84,15 @@ pub fn standard_deviation(values: &[U256]) -> Result { } let variance: f64 = divide(U256::from(variance_sum), U256::from(count)) - .parse() + .expect("Division have failed") + .to_string() + .parse::() .unwrap(); - Ok(roundup(variance.sqrt().to_string()).to_string()) + Ok(U256::from(roundup(variance.sqrt().to_string()))) } /// Sum of values: [`SUM`](https://en.wikipedia.org/wiki/Summation) -pub fn sum(values: &[U256]) -> Result { +pub fn sum(values: &[U256]) -> Result { if values.is_empty() { bail!("No values found"); } @@ -99,7 +103,7 @@ pub fn sum(values: &[U256]) -> Result { sum += value; } - Ok(sum.to_string()) + Ok(sum) } /// Count number of values that satisfy a condition @@ -115,7 +119,7 @@ pub fn sum(values: &[U256]) -> Result { /// - 03: Greater than or equal (>=) /// - 04: Less than (<) /// - 05: Less than or equal (<=) -pub fn count(values: &[U256], ctx: &FunctionContext) -> Result { +pub fn count(values: &[U256], ctx: &FunctionContext) -> Result { let logical_operator = &ctx.operator; let value_to_compare = ctx.value_to_compare; @@ -159,16 +163,16 @@ pub fn count(values: &[U256], ctx: &FunctionContext) -> Result { } } - Ok(condition_satisfiability_count.to_string()) + Ok(U256::from(condition_satisfiability_count)) } -pub fn simple_linear_regression(values: &[U256]) -> Result { +pub fn simple_linear_regression(values: &[U256]) -> Result { // if value is empty or has only one value, return error if values.is_empty() || values.len() == 1 { bail!("At least 2 values are needed to compute SLR"); } // TODO: handle custom compute module - Ok("0".to_string()) + Ok(U256::from(0)) } #[derive(Debug, Clone, PartialEq, Eq)] @@ -255,9 +259,9 @@ impl Operator { } // Handle division properly using U256 type -fn divide(a: U256, b: U256) -> String { +fn divide(a: U256, b: U256) -> Result { if b.is_zero() { - return "Division by zero error".to_string(); + bail!("Division by zero error"); } let quotient = a / b; @@ -266,9 +270,9 @@ fn divide(a: U256, b: U256) -> String { if remainder > divisor_half || (remainder == divisor_half && b % U256::from(2) == U256::from(0)) { - (quotient + U256::from(1)).to_string() + Ok(quotient + U256::from(1)) } else { - quotient.to_string() + Ok(quotient) } } @@ -286,34 +290,37 @@ mod tests { #[test] fn test_avg() { let values = vec![U256::from(1), U256::from(2), U256::from(3)]; - assert_eq!(average(&values).unwrap(), "2".to_string()); + assert_eq!(average(&values).unwrap(), U256::from(2)); let values = vec![U256::from(1), U256::from(2)]; - assert_eq!(average(&values).unwrap(), "2".to_string()); - + assert_eq!(average(&values).unwrap(), U256::from(2)); let values = vec![U256::from_str("1000000000000").unwrap()]; - assert_eq!(average(&values).unwrap(), "1000000000000".to_string()); - + assert_eq!( + average(&values).unwrap(), + U256::from_str("1000000000000").unwrap() + ); let values = vec![U256::from_str("41697298409483537348").unwrap()]; assert_eq!( average(&values).unwrap(), - "41697298409483537348".to_string() + U256::from_str("41697298409483537348").unwrap() ); } #[test] fn test_sum() { let values = vec![U256::from(1), U256::from(2), U256::from(3)]; - assert_eq!(sum(&values).unwrap(), "6".to_string()); + assert_eq!(sum(&values).unwrap(), U256::from(6)); let values = vec![U256::from(1), U256::from(2)]; - assert_eq!(sum(&values).unwrap(), "3".to_string()); + assert_eq!(sum(&values).unwrap(), U256::from(3)); let values = vec![U256::from_str("6776").unwrap()]; - assert_eq!(sum(&values).unwrap(), "6776".to_string()); - + assert_eq!(sum(&values).unwrap(), U256::from(6776)); let values = vec![U256::from_str("41697298409483537348").unwrap()]; - assert_eq!(sum(&values).unwrap(), "41697298409483537348".to_string()); + assert_eq!( + sum(&values).unwrap(), + U256::from_str("41697298409483537348").unwrap() + ); } #[test] @@ -333,7 +340,7 @@ mod tests { ]; assert_eq!( average(&values).unwrap(), - "41697151157910180414".to_string() + U256::from_str("41697151157910180414").unwrap() ); } @@ -346,25 +353,25 @@ mod tests { #[test] fn test_find_max() { let values = vec![U256::from(1), U256::from(2), U256::from(3)]; - assert_eq!(find_max(&values).unwrap(), "3".to_string()); + assert_eq!(find_max(&values).unwrap(), U256::from(3)); let values = vec![U256::from(1), U256::from(2)]; - assert_eq!(find_max(&values).unwrap(), "2".to_string()); + assert_eq!(find_max(&values).unwrap(), U256::from(2)); } #[test] fn test_find_min() { let values = vec![U256::from(1), U256::from(2), U256::from(3)]; - assert_eq!(find_min(&values).unwrap(), "1".to_string()); + assert_eq!(find_min(&values).unwrap(), U256::from(1)); let values = vec![U256::from(1), U256::from(2)]; - assert_eq!(find_min(&values).unwrap(), "1".to_string()); + assert_eq!(find_min(&values).unwrap(), U256::from(1)); } #[test] fn test_std() { let values = vec![U256::from(1), U256::from(2), U256::from(3)]; - assert_eq!(standard_deviation(&values).unwrap(), "1".to_string()); + assert_eq!(standard_deviation(&values).unwrap(), U256::from(1)); let values = vec![ U256::from(0), @@ -373,7 +380,7 @@ mod tests { U256::from(2), U256::from(100), ]; - assert_eq!(standard_deviation(&values).unwrap(), "39".to_string()); + assert_eq!(standard_deviation(&values).unwrap(), U256::from(39)); } #[test] @@ -386,7 +393,7 @@ mod tests { &FunctionContext::new(Operator::GreaterThanOrEqual, U256::from(2)) ) .unwrap(), - "2".to_string() + U256::from(2) ); let values = vec![U256::from(1), U256::from(10)]; @@ -397,7 +404,7 @@ mod tests { &FunctionContext::new(Operator::GreaterThan, U256::from(1)) ) .unwrap(), - "1".to_string() + U256::from(1) ); } } diff --git a/crates/primitives/src/aggregate_fn/mod.rs b/crates/primitives/src/aggregate_fn/mod.rs index d037a4c9..1b812fbe 100644 --- a/crates/primitives/src/aggregate_fn/mod.rs +++ b/crates/primitives/src/aggregate_fn/mod.rs @@ -1,4 +1,4 @@ -use alloy_primitives::U256; +use alloy::primitives::U256; use anyhow::{bail, Result}; use std::str::FromStr; @@ -134,25 +134,23 @@ impl AggregationFunction { } } - pub fn operation(&self, values: &[String], ctx: Option) -> Result { + pub fn operation(&self, values: &[U256], ctx: Option) -> Result { match self { // Aggregation functions for integer values - AggregationFunction::AVG => integer::average(&parse_int_value(values).unwrap()), - AggregationFunction::MAX => integer::find_max(&parse_int_value(values).unwrap()), - AggregationFunction::MIN => integer::find_min(&parse_int_value(values).unwrap()), - AggregationFunction::SUM => integer::sum(&parse_int_value(values).unwrap()), + AggregationFunction::AVG => integer::average(values), + AggregationFunction::MAX => integer::find_max(values), + AggregationFunction::MIN => integer::find_min(values), + AggregationFunction::SUM => integer::sum(values), AggregationFunction::COUNT => { if let Some(ctx) = ctx { - integer::count(&parse_int_value(values).unwrap(), &ctx) + integer::count(values, &ctx) } else { bail!("Context not provided for COUNT") } } // Aggregation functions for string values - AggregationFunction::MERKLE => string::merkleize(values), - AggregationFunction::SLR => { - integer::simple_linear_regression(&parse_int_value(values).unwrap()) - } + AggregationFunction::MERKLE => todo!("Merkleize not implemented yet"), + AggregationFunction::SLR => integer::simple_linear_regression(values), } } @@ -169,28 +167,29 @@ impl AggregationFunction { } } -// Remove the "0x" prefix if exist, so that integer functions can parse integer values -// In case of storage value, either if this is number or hex string type, all stored in hex string format. -// So, we need to remove the "0x" prefix to parse the integer value if user target to use integer functions. -// If the value is already in integer format, then it will be parsed as integer, which is decimal format. -// -// This also implies, even if the value is in hex string format, it will be parsed as integer, which is decimal format. -// So for user it's importantant to know the value type and the function type. -fn parse_int_value(values: &[String]) -> Result> { - let int_values: Vec = values - .iter() - .map(|hex_str| { - if hex_str.starts_with("0x") { - let hex_value = hex_str.trim_start_matches("0x").to_string(); - U256::from_str_radix(&hex_value, 16).unwrap() - } else { - U256::from_str_radix(hex_str, 10).unwrap() - } - }) - .collect(); - - Ok(int_values) -} +// TODO: legacy parse. +// // Remove the "0x" prefix if exist, so that integer functions can parse integer values +// // In case of storage value, either if this is number or hex string type, all stored in hex string format. +// // So, we need to remove the "0x" prefix to parse the integer value if user target to use integer functions. +// // If the value is already in integer format, then it will be parsed as integer, which is decimal format. +// // +// // This also implies, even if the value is in hex string format, it will be parsed as integer, which is decimal format. +// // So for user it's importantant to know the value type and the function type. +// fn parse_int_value(values: &[String]) -> Result> { +// let int_values: Vec = values +// .iter() +// .map(|hex_str| { +// if hex_str.starts_with("0x") { +// let hex_value = hex_str.trim_start_matches("0x").to_string(); +// U256::from_str_radix(&hex_value, 16).unwrap() +// } else { +// U256::from_str_radix(hex_str, 10).unwrap() +// } +// }) +// .collect(); + +// Ok(int_values) +// } #[cfg(test)] mod tests { @@ -201,44 +200,47 @@ mod tests { let sum_fn = AggregationFunction::SUM; // 4952100 ~ 4952100, account.0x7f2c6f930306d3aa736b3a6c6a98f512f74036d4.nonce - let values = vec!["6776".to_string()]; + let values = vec![U256::from_str_radix("6776", 10).unwrap()]; let result = sum_fn.operation(&values, None).unwrap(); - assert_eq!(result, "6776"); + assert_eq!(result, U256::from(6776)); // 4952100 ~ 4952103, account.0x7f2c6f930306d3aa736b3a6c6a98f512f74036d4.nonce let values = vec![ - "6776".to_string(), - "6776".to_string(), - "6776".to_string(), - "6777".to_string(), + U256::from_str_radix("6776", 10).unwrap(), + U256::from_str_radix("6776", 10).unwrap(), + U256::from_str_radix("6776", 10).unwrap(), + U256::from_str_radix("6777", 10).unwrap(), ]; let result = sum_fn.operation(&values, None).unwrap(); - assert_eq!(result, "27105"); + assert_eq!(result, U256::from(27105)); // 5382810 ~ 5382810, storage.0x75CeC1db9dCeb703200EAa6595f66885C962B920.0x0000000000000000000000000000000000000000000000000000000000000002 - let values = vec!["0x9184e72a000".to_string()]; + let values = vec![U256::from_str_radix("9184e72a000", 16).unwrap()]; let result = sum_fn.operation(&values, None).unwrap(); - assert_eq!(result, "10000000000000"); + assert_eq!(result, U256::from_str_radix("10000000000000", 10).unwrap()); // 5382810 ~ 5382813, storage.0x75CeC1db9dCeb703200EAa6595f66885C962B920.0x0000000000000000000000000000000000000000000000000000000000000002 let values = vec![ - "0x9184e72a000".to_string(), - "0x9184e72a000".to_string(), - "0x9184e72a000".to_string(), - "0x9184e72a000".to_string(), + U256::from_str_radix("9184e72a000", 16).unwrap(), + U256::from_str_radix("9184e72a000", 16).unwrap(), + U256::from_str_radix("9184e72a000", 16).unwrap(), + U256::from_str_radix("9184e72a000", 16).unwrap(), ]; let result = sum_fn.operation(&values, None).unwrap(); - assert_eq!(result, "40000000000000"); + assert_eq!(result, U256::from_str_radix("40000000000000", 10).unwrap()); // 4952100 ~ 4952103, account.0x7f2c6f930306d3aa736b3a6c6a98f512f74036d4.balance let values = vec![ - "41697298409483537348".to_string(), - "41697298409483537348".to_string(), - "41697298409483537348".to_string(), - "41697095938570171564".to_string(), + U256::from_str_radix("41697298409483537348", 10).unwrap(), + U256::from_str_radix("41697298409483537348", 10).unwrap(), + U256::from_str_radix("41697298409483537348", 10).unwrap(), + U256::from_str_radix("41697095938570171564", 10).unwrap(), ]; let result = sum_fn.operation(&values, None).unwrap(); - assert_eq!(result, "166788991167020783608"); + assert_eq!( + result, + U256::from_str_radix("166788991167020783608", 10).unwrap() + ); } #[test] @@ -246,58 +248,58 @@ mod tests { let avg_fn = AggregationFunction::AVG; // 4952100 ~ 4952100, account.0x7f2c6f930306d3aa736b3a6c6a98f512f74036d4.nonce - let values = vec!["6776".to_string()]; + let values = vec![U256::from_str_radix("6776", 10).unwrap()]; let result = avg_fn.operation(&values, None).unwrap(); - assert_eq!(result, "6776"); + assert_eq!(result, U256::from(6776)); // 4952100 ~ 4952110, account.0x7f2c6f930306d3aa736b3a6c6a98f512f74036d4.nonce let values = vec![ - "6776".to_string(), - "6776".to_string(), - "6776".to_string(), - "6777".to_string(), - "6777".to_string(), - "6777".to_string(), - "6777".to_string(), - "6777".to_string(), - "6777".to_string(), - "6777".to_string(), - "6777".to_string(), + U256::from_str_radix("6776", 10).unwrap(), + U256::from_str_radix("6776", 10).unwrap(), + U256::from_str_radix("6776", 10).unwrap(), + U256::from_str_radix("6777", 10).unwrap(), + U256::from_str_radix("6777", 10).unwrap(), + U256::from_str_radix("6777", 10).unwrap(), + U256::from_str_radix("6777", 10).unwrap(), + U256::from_str_radix("6777", 10).unwrap(), + U256::from_str_radix("6777", 10).unwrap(), + U256::from_str_radix("6777", 10).unwrap(), + U256::from_str_radix("6777", 10).unwrap(), ]; let result = avg_fn.operation(&values, None).unwrap(); - assert_eq!(result, "6777"); + assert_eq!(result, U256::from(6777)); // 5382810 ~ 5382810, storage.0x75CeC1db9dCeb703200EAa6595f66885C962B920.0x0000000000000000000000000000000000000000000000000000000000000002 - let values = vec!["0x9184e72a000".to_string()]; + let values = vec![U256::from_str_radix("9184e72a000", 16).unwrap()]; let result = avg_fn.operation(&values, None).unwrap(); - assert_eq!(result, "10000000000000"); + assert_eq!(result, U256::from(10000000000000u64)); // 5382810 ~ 5382813, storage.0x75CeC1db9dCeb703200EAa6595f66885C962B920.0x0000000000000000000000000000000000000000000000000000000000000002 let values = vec![ - "0x9184e72a000".to_string(), - "0x9184e72a000".to_string(), - "0x9184e72a000".to_string(), - "0x9184e72a000".to_string(), + U256::from_str_radix("9184e72a000", 16).unwrap(), + U256::from_str_radix("9184e72a000", 16).unwrap(), + U256::from_str_radix("9184e72a000", 16).unwrap(), + U256::from_str_radix("9184e72a000", 16).unwrap(), ]; let result = avg_fn.operation(&values, None).unwrap(); - assert_eq!(result, "10000000000000"); + assert_eq!(result, U256::from(10000000000000u64)); // 4952100 ~ 4952110, account.0x7f2c6f930306d3aa736b3a6c6a98f512f74036d4.balance let values = vec![ - "41697298409483537348".to_string(), - "41697298409483537348".to_string(), - "41697298409483537348".to_string(), - "41697095938570171564".to_string(), - "41697095938570171564".to_string(), - "41697095938570171564".to_string(), - "41697095938570171564".to_string(), - "41697095938570171564".to_string(), - "41697095938570171564".to_string(), - "41697095938570171564".to_string(), - "41697095938570171564".to_string(), + U256::from_str_radix("41697298409483537348", 10).unwrap(), + U256::from_str_radix("41697298409483537348", 10).unwrap(), + U256::from_str_radix("41697298409483537348", 10).unwrap(), + U256::from_str_radix("41697095938570171564", 10).unwrap(), + U256::from_str_radix("41697095938570171564", 10).unwrap(), + U256::from_str_radix("41697095938570171564", 10).unwrap(), + U256::from_str_radix("41697095938570171564", 10).unwrap(), + U256::from_str_radix("41697095938570171564", 10).unwrap(), + U256::from_str_radix("41697095938570171564", 10).unwrap(), + U256::from_str_radix("41697095938570171564", 10).unwrap(), + U256::from_str_radix("41697095938570171564", 10).unwrap(), ]; let result = avg_fn.operation(&values, None).unwrap(); - assert_eq!(result, "41697151157910180414"); + assert_eq!(result, U256::from(41697151157910180414u128)); } #[test] @@ -305,58 +307,58 @@ mod tests { let max_fn = AggregationFunction::MAX; // 4952100 ~ 4952100, account.0x7f2c6f930306d3aa736b3a6c6a98f512f74036d4.nonce - let values = vec!["6776".to_string()]; + let values = vec![U256::from_str_radix("6776", 10).unwrap()]; let result = max_fn.operation(&values, None).unwrap(); - assert_eq!(result, "6776"); + assert_eq!(result, U256::from(6776)); // 4952100 ~ 4952110, account.0x7f2c6f930306d3aa736b3a6c6a98f512f74036d4.nonce let values = vec![ - "6776".to_string(), - "6776".to_string(), - "6776".to_string(), - "6777".to_string(), - "6777".to_string(), - "6777".to_string(), - "6777".to_string(), - "6777".to_string(), - "6777".to_string(), - "6777".to_string(), - "6777".to_string(), + U256::from_str_radix("6776", 10).unwrap(), + U256::from_str_radix("6776", 10).unwrap(), + U256::from_str_radix("6776", 10).unwrap(), + U256::from_str_radix("6777", 10).unwrap(), + U256::from_str_radix("6777", 10).unwrap(), + U256::from_str_radix("6777", 10).unwrap(), + U256::from_str_radix("6777", 10).unwrap(), + U256::from_str_radix("6777", 10).unwrap(), + U256::from_str_radix("6777", 10).unwrap(), + U256::from_str_radix("6777", 10).unwrap(), + U256::from_str_radix("6777", 10).unwrap(), ]; let result = max_fn.operation(&values, None).unwrap(); - assert_eq!(result, "6777"); + assert_eq!(result, U256::from(6777)); // 5382810 ~ 5382810, storage.0x75CeC1db9dCeb703200EAa6595f66885C962B920.0x0000000000000000000000000000000000000000000000000000000000000002 - let values = vec!["0x9184e72a000".to_string()]; + let values = vec![U256::from_str_radix("9184e72a000", 16).unwrap()]; let result = max_fn.operation(&values, None).unwrap(); - assert_eq!(result, "10000000000000"); + assert_eq!(result, U256::from(10000000000000u64)); // 5382810 ~ 5382813, storage.0x75CeC1db9dCeb703200EAa6595f66885C962B920.0x0000000000000000000000000000000000000000000000000000000000000002 let values = vec![ - "0x9184e72a000".to_string(), - "0x9184e72a000".to_string(), - "0x9184e72a000".to_string(), - "0x9184e72a000".to_string(), + U256::from_str_radix("9184e72a000", 16).unwrap(), + U256::from_str_radix("9184e72a000", 16).unwrap(), + U256::from_str_radix("9184e72a000", 16).unwrap(), + U256::from_str_radix("9184e72a000", 16).unwrap(), ]; let result = max_fn.operation(&values, None).unwrap(); - assert_eq!(result, "10000000000000"); + assert_eq!(result, U256::from(10000000000000u64)); // 4952100 ~ 4952110, account.0x7f2c6f930306d3aa736b3a6c6a98f512f74036d4.balance let values = vec![ - "41697298409483537348".to_string(), - "41697298409483537348".to_string(), - "41697298409483537348".to_string(), - "41697095938570171564".to_string(), - "41697095938570171564".to_string(), - "41697095938570171564".to_string(), - "41697095938570171564".to_string(), - "41697095938570171564".to_string(), - "41697095938570171564".to_string(), - "41697095938570171564".to_string(), - "41697095938570171564".to_string(), + U256::from_str_radix("41697298409483537348", 10).unwrap(), + U256::from_str_radix("41697298409483537348", 10).unwrap(), + U256::from_str_radix("41697298409483537348", 10).unwrap(), + U256::from_str_radix("41697095938570171564", 10).unwrap(), + U256::from_str_radix("41697095938570171564", 10).unwrap(), + U256::from_str_radix("41697095938570171564", 10).unwrap(), + U256::from_str_radix("41697095938570171564", 10).unwrap(), + U256::from_str_radix("41697095938570171564", 10).unwrap(), + U256::from_str_radix("41697095938570171564", 10).unwrap(), + U256::from_str_radix("41697095938570171564", 10).unwrap(), + U256::from_str_radix("41697095938570171564", 10).unwrap(), ]; let result = max_fn.operation(&values, None).unwrap(); - assert_eq!(result, "41697298409483537348"); + assert_eq!(result, U256::from(41697298409483537348u128)); } #[test] @@ -364,58 +366,58 @@ mod tests { let min_fn = AggregationFunction::MIN; // 4952100 ~ 4952100, account.0x7f2c6f930306d3aa736b3a6c6a98f512f74036d4.nonce - let values = vec!["6776".to_string()]; + let values = vec![U256::from_str_radix("6776", 10).unwrap()]; let result = min_fn.operation(&values, None).unwrap(); - assert_eq!(result, "6776"); + assert_eq!(result, U256::from(6776)); // 4952100 ~ 4952110, account.0x7f2c6f930306d3aa736b3a6c6a98f512f74036d4.nonce let values = vec![ - "6776".to_string(), - "6776".to_string(), - "6776".to_string(), - "6777".to_string(), - "6777".to_string(), - "6777".to_string(), - "6777".to_string(), - "6777".to_string(), - "6777".to_string(), - "6777".to_string(), - "6777".to_string(), + U256::from_str_radix("6776", 10).unwrap(), + U256::from_str_radix("6776", 10).unwrap(), + U256::from_str_radix("6776", 10).unwrap(), + U256::from_str_radix("6777", 10).unwrap(), + U256::from_str_radix("6777", 10).unwrap(), + U256::from_str_radix("6777", 10).unwrap(), + U256::from_str_radix("6777", 10).unwrap(), + U256::from_str_radix("6777", 10).unwrap(), + U256::from_str_radix("6777", 10).unwrap(), + U256::from_str_radix("6777", 10).unwrap(), + U256::from_str_radix("6777", 10).unwrap(), ]; let result = min_fn.operation(&values, None).unwrap(); - assert_eq!(result, "6776"); + assert_eq!(result, U256::from(6776)); // 5382810 ~ 5382810, storage.0x75CeC1db9dCeb703200EAa6595f66885C962B920.0x0000000000000000000000000000000000000000000000000000000000000002 - let values = vec!["0x9184e72a000".to_string()]; + let values = vec![U256::from_str_radix("9184e72a000", 16).unwrap()]; let result = min_fn.operation(&values, None).unwrap(); - assert_eq!(result, "10000000000000"); + assert_eq!(result, U256::from(10000000000000u64)); // 5382810 ~ 5382813, storage.0x75CeC1db9dCeb703200EAa6595f66885C962B920.0x0000000000000000000000000000000000000000000000000000000000000002 let values = vec![ - "0x9184e72a000".to_string(), - "0x9184e72a000".to_string(), - "0x9184e72a000".to_string(), - "0x9184e72a000".to_string(), + U256::from_str_radix("9184e72a000", 16).unwrap(), + U256::from_str_radix("9184e72a000", 16).unwrap(), + U256::from_str_radix("9184e72a000", 16).unwrap(), + U256::from_str_radix("9184e72a000", 16).unwrap(), ]; let result = min_fn.operation(&values, None).unwrap(); - assert_eq!(result, "10000000000000"); + assert_eq!(result, U256::from(10000000000000u64)); // 4952100 ~ 4952110, account.0x7f2c6f930306d3aa736b3a6c6a98f512f74036d4.balance let values = vec![ - "41697298409483537348".to_string(), - "41697298409483537348".to_string(), - "41697298409483537348".to_string(), - "41697095938570171564".to_string(), - "41697095938570171564".to_string(), - "41697095938570171564".to_string(), - "41697095938570171564".to_string(), - "41697095938570171564".to_string(), - "41697095938570171564".to_string(), - "41697095938570171564".to_string(), - "41697095938570171564".to_string(), + U256::from_str_radix("41697298409483537348", 10).unwrap(), + U256::from_str_radix("41697298409483537348", 10).unwrap(), + U256::from_str_radix("41697298409483537348", 10).unwrap(), + U256::from_str_radix("41697095938570171564", 10).unwrap(), + U256::from_str_radix("41697095938570171564", 10).unwrap(), + U256::from_str_radix("41697095938570171564", 10).unwrap(), + U256::from_str_radix("41697095938570171564", 10).unwrap(), + U256::from_str_radix("41697095938570171564", 10).unwrap(), + U256::from_str_radix("41697095938570171564", 10).unwrap(), + U256::from_str_radix("41697095938570171564", 10).unwrap(), + U256::from_str_radix("41697095938570171564", 10).unwrap(), ]; let result = min_fn.operation(&values, None).unwrap(); - assert_eq!(result, "41697095938570171564"); + assert_eq!(result, U256::from(41697095938570171564u128)); } #[test] @@ -423,7 +425,7 @@ mod tests { let count = AggregationFunction::COUNT; // 4952100 ~ 4952100, account.0x7f2c6f930306d3aa736b3a6c6a98f512f74036d4.nonce - let values = vec!["6776".to_string()]; + let values = vec![U256::from_str_radix("6776", 10).unwrap()]; // logical_operator: 03 (>=) // value_to_compare: 0x0000000000000000000000000000000000000000000000000000000000000fff (4095) let result = count @@ -435,7 +437,7 @@ mod tests { )), ) .unwrap(); - assert_eq!(result, "1"); + assert_eq!(result, U256::from(1)); // logical_operator: 00 (=) // value_to_compare: 0x0000000000000000000000000000000000000000000000000000000000001A78 (6776) let result = count @@ -444,21 +446,21 @@ mod tests { Some(FunctionContext::new(Operator::Equal, U256::from(6776))), ) .unwrap(); - assert_eq!(result, "1"); + assert_eq!(result, U256::from(1)); // 4952100 ~ 4952110, account.0x7f2c6f930306d3aa736b3a6c6a98f512f74036d4.nonce let values = vec![ - "6776".to_string(), - "6776".to_string(), - "6776".to_string(), - "6777".to_string(), - "6777".to_string(), - "6777".to_string(), - "6777".to_string(), - "6777".to_string(), - "6777".to_string(), - "6777".to_string(), - "6777".to_string(), + U256::from_str_radix("6776", 10).unwrap(), + U256::from_str_radix("6776", 10).unwrap(), + U256::from_str_radix("6776", 10).unwrap(), + U256::from_str_radix("6777", 10).unwrap(), + U256::from_str_radix("6777", 10).unwrap(), + U256::from_str_radix("6777", 10).unwrap(), + U256::from_str_radix("6777", 10).unwrap(), + U256::from_str_radix("6777", 10).unwrap(), + U256::from_str_radix("6777", 10).unwrap(), + U256::from_str_radix("6777", 10).unwrap(), + U256::from_str_radix("6777", 10).unwrap(), ]; // logical_operator: 01 (!=) // value_to_compare: 0x0000000000000000000000000000000000000000000000000000000000001A78 (6776) @@ -468,7 +470,7 @@ mod tests { Some(FunctionContext::new(Operator::NotEqual, U256::from(6776))), ) .unwrap(); - assert_eq!(result, "8"); + assert_eq!(result, U256::from(8)); // logical_operator: 02 (>) // value_to_compare: 0x0000000000000000000000000000000000000000000000000000000000001A78 (6776) @@ -481,10 +483,10 @@ mod tests { )), ) .unwrap(); - assert_eq!(result, "8"); + assert_eq!(result, U256::from(8)); // 5382810 ~ 5382810, storage.0x75CeC1db9dCeb703200EAa6595f66885C962B920.0x0000000000000000000000000000000000000000000000000000000000000002 - let values = vec!["0x9184e72a000".to_string()]; + let values = vec![U256::from_str_radix("9184e72a000", 16).unwrap()]; // logical_operator: 00 (=) // value_to_compare: 0x000000000000000000000000000000000000000000000000000009184e72a000 (10000000000000) let result = count @@ -496,16 +498,16 @@ mod tests { )), ) .unwrap(); - assert_eq!(result, "1"); + assert_eq!(result, U256::from(1)); // 5382810 ~ 5382813, storage.0x75CeC1db9dCeb703200EAa6595f66885C962B920.0x0000000000000000000000000000000000000000000000000000000000000002 // logical_operator: 05 (<=) // value_to_compare: 0x000000000000000000000000000000000000000000000000000009184e72a001 (10000000000001) let values = vec![ - "0x9184e72a000".to_string(), - "0x9184e72a000".to_string(), - "0x9184e72a000".to_string(), - "0x9184e72a000".to_string(), + U256::from_str_radix("9184e72a000", 16).unwrap(), + U256::from_str_radix("9184e72a000", 16).unwrap(), + U256::from_str_radix("9184e72a000", 16).unwrap(), + U256::from_str_radix("9184e72a000", 16).unwrap(), ]; let result = count .operation( @@ -516,23 +518,23 @@ mod tests { )), ) .unwrap(); - assert_eq!(result, "4"); + assert_eq!(result, U256::from(4)); // 4952100 ~ 4952110, account.0x7f2c6f930306d3aa736b3a6c6a98f512f74036d4.balance // logical_operator: 05 (<=) // value_to_compare: 0x00000000000000000000000000000000000000000000000242a9d7d5dfdbb4ac (41697095938570171564) let values = vec![ - "41697298409483537348".to_string(), - "41697298409483537348".to_string(), - "41697298409483537348".to_string(), - "41697095938570171564".to_string(), - "41697095938570171564".to_string(), - "41697095938570171564".to_string(), - "41697095938570171564".to_string(), - "41697095938570171564".to_string(), - "41697095938570171564".to_string(), - "41697095938570171564".to_string(), - "41697095938570171564".to_string(), + U256::from_str_radix("41697298409483537348", 10).unwrap(), + U256::from_str_radix("41697298409483537348", 10).unwrap(), + U256::from_str_radix("41697298409483537348", 10).unwrap(), + U256::from_str_radix("41697095938570171564", 10).unwrap(), + U256::from_str_radix("41697095938570171564", 10).unwrap(), + U256::from_str_radix("41697095938570171564", 10).unwrap(), + U256::from_str_radix("41697095938570171564", 10).unwrap(), + U256::from_str_radix("41697095938570171564", 10).unwrap(), + U256::from_str_radix("41697095938570171564", 10).unwrap(), + U256::from_str_radix("41697095938570171564", 10).unwrap(), + U256::from_str_radix("41697095938570171564", 10).unwrap(), ]; let result = count .operation( @@ -543,6 +545,6 @@ mod tests { )), ) .unwrap(); - assert_eq!(result, "8"); + assert_eq!(result, U256::from(8)); } } diff --git a/crates/primitives/src/aggregate_fn/rand.rs b/crates/primitives/src/aggregate_fn/rand.rs index 85b657fd..1deb1370 100644 --- a/crates/primitives/src/aggregate_fn/rand.rs +++ b/crates/primitives/src/aggregate_fn/rand.rs @@ -1,4 +1,4 @@ -use alloy_primitives::U256; +use alloy::primitives::U256; use rand::{ distributions::{Distribution, Standard}, Rng, diff --git a/crates/primitives/src/block/account.rs b/crates/primitives/src/block/account.rs index 7e744df5..447bb93e 100644 --- a/crates/primitives/src/block/account.rs +++ b/crates/primitives/src/block/account.rs @@ -1,24 +1,20 @@ -use std::str::FromStr; +//! Account struct and its associated methods -use alloy_primitives::{hex, FixedBytes, U256}; -use alloy_rlp::{Decodable, Encodable as _, RlpDecodable, RlpEncodable}; -use serde::{Deserialize, Serialize}; +use alloy::{ + primitives::B256, primitives::U256, primitives::U64, rpc::types::EIP1186AccountProofResponse, +}; +use alloy_rlp::{Decodable, Encodable, RlpDecodable, RlpEncodable}; #[derive(Debug, RlpDecodable, RlpEncodable, PartialEq)] pub struct Account { - pub nonce: u64, + pub nonce: U64, pub balance: U256, - pub storage_root: FixedBytes<32>, - pub code_hash: FixedBytes<32>, + pub storage_root: B256, + pub code_hash: B256, } impl Account { - pub fn new( - nonce: u64, - balance: U256, - storage_root: FixedBytes<32>, - code_hash: FixedBytes<32>, - ) -> Self { + pub fn new(nonce: U64, balance: U256, storage_root: B256, code_hash: B256) -> Self { Account { nonce, balance, @@ -27,56 +23,24 @@ impl Account { } } - pub fn rlp_encode(&self) -> String { + pub fn rlp_encode(&self) -> Vec { let mut buffer = Vec::::new(); self.encode(&mut buffer); - hex::encode(buffer) + buffer } - pub fn rlp_decode(rlp: &str) -> Self { - ::decode(&mut hex::decode(rlp).unwrap().as_slice()).unwrap() + pub fn rlp_decode(mut rlp: &[u8]) -> Self { + ::decode(&mut rlp).unwrap() } } -/// Account data from RPC `eth_getProof` response -#[derive(Serialize, Deserialize, Debug, Clone)] -#[serde(rename_all = "camelCase")] -pub struct AccountProofFromRpc { - pub account_proof: Vec, - pub address: String, - pub balance: String, - pub code_hash: String, - pub nonce: String, - pub storage_hash: String, - pub storage_proof: Vec, -} - -/// Account data from RPC `eth_getAccount` response -#[derive(Serialize, Deserialize, Debug, Clone)] -#[serde(rename_all = "camelCase")] -pub struct AccountFromRpc { - pub balance: String, - pub code_hash: String, - pub nonce: String, - pub storage_root: String, -} - -/// Storage data from RPC `eth_getProof` response -#[derive(Serialize, Deserialize, Debug, Clone)] -#[serde(rename_all = "camelCase")] -pub struct StorageProofFromRpc { - pub key: String, - pub proof: Vec, - pub value: String, -} - -impl From<&AccountProofFromRpc> for Account { - fn from(account_from_rpc: &AccountProofFromRpc) -> Self { +impl From<&EIP1186AccountProofResponse> for Account { + fn from(account_from_rpc: &EIP1186AccountProofResponse) -> Self { Account { - nonce: u64::from_str_radix(&account_from_rpc.nonce[2..], 16).unwrap(), - balance: U256::from_str_radix(&account_from_rpc.balance[2..], 16).unwrap(), - storage_root: FixedBytes::from_str(&account_from_rpc.storage_hash[2..]).unwrap(), - code_hash: FixedBytes::from_str(&account_from_rpc.code_hash[2..]).unwrap(), + nonce: account_from_rpc.nonce, + balance: account_from_rpc.balance, + storage_root: account_from_rpc.storage_hash, + code_hash: account_from_rpc.code_hash, } } } @@ -84,70 +48,69 @@ impl From<&AccountProofFromRpc> for Account { #[cfg(test)] mod tests { use super::*; - use alloy_primitives::{FixedBytes, U256}; + use alloy::hex; + use alloy::primitives::U256; use std::str::FromStr; #[test] fn test_get_account_rlp() { - // let account_addr = "0x7b2f05ce9ae365c3dbf30657e2dc6449989e83d6"; let account = Account::new( - 1, + U64::from(1), U256::from(0), - FixedBytes::from_str( - "0x1c35dfde2b62d99d3a74fda76446b60962c4656814bdd7815eb6e5b8be1e7185", - ) - .unwrap(), - FixedBytes::from_str( - "0xcd4f25236fff0ccac15e82bf4581beb08e95e1b5ba89de6031c75893cd91245c", - ) - .unwrap(), + B256::from_str("0x1c35dfde2b62d99d3a74fda76446b60962c4656814bdd7815eb6e5b8be1e7185") + .unwrap(), + B256::from_str("0xcd4f25236fff0ccac15e82bf4581beb08e95e1b5ba89de6031c75893cd91245c") + .unwrap(), ); let account_rlp = account.rlp_encode(); - assert_eq!(account_rlp, "f8440180a01c35dfde2b62d99d3a74fda76446b60962c4656814bdd7815eb6e5b8be1e7185a0cd4f25236fff0ccac15e82bf4581beb08e95e1b5ba89de6031c75893cd91245c"); + assert_eq!( + hex::encode(account_rlp), + "f8440180a01c35dfde2b62d99d3a74fda76446b60962c4656814bdd7815eb6e5b8be1e7185a0cd4f25236fff0ccac15e82bf4581beb08e95e1b5ba89de6031c75893cd91245c" + ); + let account = Account::new( - 2, + U64::from(2), U256::from(0), - FixedBytes::from_str( - "0x1c35dfde2b62d99d3a74fda76446b60962c4656814bdd7815eb6e5b8be1e7185", - ) - .unwrap(), - FixedBytes::from_str( - "0xcd4f25236fff0ccac15e82bf4581beb08e95e1b5ba89de6031c75893cd91245c", - ) - .unwrap(), + B256::from_str("0x1c35dfde2b62d99d3a74fda76446b60962c4656814bdd7815eb6e5b8be1e7185") + .unwrap(), + B256::from_str("0xcd4f25236fff0ccac15e82bf4581beb08e95e1b5ba89de6031c75893cd91245c") + .unwrap(), ); let account_rlp = account.rlp_encode(); - assert_eq!(account_rlp, "f8440280a01c35dfde2b62d99d3a74fda76446b60962c4656814bdd7815eb6e5b8be1e7185a0cd4f25236fff0ccac15e82bf4581beb08e95e1b5ba89de6031c75893cd91245c"); + assert_eq!( + hex::encode(account_rlp), + "f8440280a01c35dfde2b62d99d3a74fda76446b60962c4656814bdd7815eb6e5b8be1e7185a0cd4f25236fff0ccac15e82bf4581beb08e95e1b5ba89de6031c75893cd91245c" + ); + let account = Account::new( - 2, + U64::from(2), U256::from(0x1), - FixedBytes::from_str( - "0x1c35dfde2b62d99d3a74fda76446b60962c4656814bdd7815eb6e5b8be1e7185", - ) - .unwrap(), - FixedBytes::from_str( - "0xcd4f25236fff0ccac15e82bf4581beb08e95e1b5ba89de6031c75893cd91245c", - ) - .unwrap(), + B256::from_str("0x1c35dfde2b62d99d3a74fda76446b60962c4656814bdd7815eb6e5b8be1e7185") + .unwrap(), + B256::from_str("0xcd4f25236fff0ccac15e82bf4581beb08e95e1b5ba89de6031c75893cd91245c") + .unwrap(), ); let account_rlp = account.rlp_encode(); - assert_eq!(account_rlp, "f8440201a01c35dfde2b62d99d3a74fda76446b60962c4656814bdd7815eb6e5b8be1e7185a0cd4f25236fff0ccac15e82bf4581beb08e95e1b5ba89de6031c75893cd91245c"); + assert_eq!( + hex::encode(account_rlp), + "f8440201a01c35dfde2b62d99d3a74fda76446b60962c4656814bdd7815eb6e5b8be1e7185a0cd4f25236fff0ccac15e82bf4581beb08e95e1b5ba89de6031c75893cd91245c" + ); } #[test] fn test_decode_account_rlp() { let account_rlp = "f8440180a01c35dfde2b62d99d3a74fda76446b60962c4656814bdd7815eb6e5b8be1e7185a0cd4f25236fff0ccac15e82bf4581beb08e95e1b5ba89de6031c75893cd91245c"; - let account = Account::rlp_decode(account_rlp); + let account = Account::rlp_decode(hex::decode(account_rlp).unwrap().as_slice()); assert_eq!( account, Account::new( - 1, + U64::from(1), U256::from(0), - FixedBytes::from_str( + B256::from_str( "0x1c35dfde2b62d99d3a74fda76446b60962c4656814bdd7815eb6e5b8be1e7185" ) .unwrap(), - FixedBytes::from_str( + B256::from_str( "0xcd4f25236fff0ccac15e82bf4581beb08e95e1b5ba89de6031c75893cd91245c" ) .unwrap() diff --git a/crates/primitives/src/block/header.rs b/crates/primitives/src/block/header.rs index 5d48e058..dc27ef9c 100644 --- a/crates/primitives/src/block/header.rs +++ b/crates/primitives/src/block/header.rs @@ -1,9 +1,14 @@ use std::str::FromStr; -use alloy_primitives::{hex, keccak256, Address, BlockNumber, Bloom, Bytes, B256, B64, U256}; +use alloy::{ + hex, + primitives::{keccak256, Address, BlockNumber, Bloom, Bytes, B256, B64, U256}, +}; use alloy_rlp::{length_of_length, BufMut, Decodable, Encodable}; use serde::{Deserialize, Serialize}; +use crate::processed_types::mmr::MMRMeta; + // ============================================================================= // Header (credit: https://github.com/paradigmxyz/reth/blob/main/crates/primitives/src/header.rs#L133) // Orignally had dependnecy on `reth_primitives` crate, but it was removed to publish in crates.io @@ -324,14 +329,14 @@ impl Header { } } - pub fn rlp_encode(&self) -> String { + pub fn rlp_encode(&self) -> Vec { let mut buffer = Vec::::new(); self.encode(&mut buffer); - hex::encode(buffer) + buffer } - pub fn rlp_decode(rlp: &str) -> Self { -
::decode(&mut hex::decode(rlp).unwrap().as_slice()).unwrap() + pub fn rlp_decode(mut rlp: &[u8]) -> Self { +
::decode(&mut rlp).unwrap() } pub fn get_block_hash(&self) -> String { @@ -442,6 +447,17 @@ pub struct MMRMetaFromIndexer { pub mmr_size: u64, } +impl From<&MMRMetaFromIndexer> for MMRMeta { + fn from(value: &MMRMetaFromIndexer) -> Self { + Self { + id: value.mmr_id, + peaks: value.mmr_peaks.clone(), + root: value.mmr_root.to_string(), + size: value.mmr_size, + } + } +} + #[derive(Serialize, Deserialize, Debug, Clone)] pub struct MMRProofFromIndexer { pub block_number: u64, @@ -465,7 +481,7 @@ pub struct MMRDataFromNewIndexer { pub proofs: Vec, } -#[derive(Serialize, Deserialize, Debug, Clone)] +#[derive(Serialize, Deserialize, Debug, Clone, PartialEq)] pub struct MMRMetaFromNewIndexer { pub mmr_id: u64, pub mmr_peaks: Vec, @@ -479,6 +495,12 @@ pub struct RlpBlockHeader { pub value: String, } +impl From for Bytes { + fn from(rlp_block_header: RlpBlockHeader) -> Self { + Bytes::from(hex::decode(rlp_block_header.value).expect("Cannot decode RLP block header")) + } +} + #[derive(Serialize, Deserialize, Debug, Clone)] pub struct MMRProofFromNewIndexer { pub block_number: u64, @@ -494,7 +516,7 @@ mod tests { use super::*; use std::str::FromStr; - use alloy_primitives::{hex, Address, Bloom, Bytes, FixedBytes, U256}; + use alloy::primitives::{hex, Address, Bloom, Bytes, FixedBytes, U256}; use alloy_rlp::Decodable; #[test] diff --git a/crates/primitives/src/block/mod.rs b/crates/primitives/src/block/mod.rs index 521f6fc8..f853eb48 100644 --- a/crates/primitives/src/block/mod.rs +++ b/crates/primitives/src/block/mod.rs @@ -1,3 +1,2 @@ pub mod account; pub mod header; -pub mod tx; diff --git a/crates/primitives/src/block/tx.rs b/crates/primitives/src/block/tx.rs deleted file mode 100644 index 4e2ce848..00000000 --- a/crates/primitives/src/block/tx.rs +++ /dev/null @@ -1,26 +0,0 @@ -use serde::{Deserialize, Serialize}; - -#[derive(Serialize, Deserialize, Debug, Clone)] -#[serde(rename_all = "camelCase")] -pub struct TxFromEtherscan { - pub block_number: String, - pub time_stamp: String, - pub hash: String, - pub nonce: String, - pub block_hash: String, - pub transaction_index: String, - pub from: String, - pub to: String, - pub value: String, - pub gas: String, - pub gas_price: String, - pub is_error: String, - // not camelCase - #[serde(rename = "txreceipt_status")] - pub txreceipt_status: String, - pub input: String, - pub contract_address: String, - pub cumulative_gas_used: String, - pub gas_used: String, - pub confirmations: String, -} diff --git a/crates/primitives/src/datalake/block_sampled/collection.rs b/crates/primitives/src/datalake/block_sampled/collection.rs index 57ae9593..1eb097a5 100644 --- a/crates/primitives/src/datalake/block_sampled/collection.rs +++ b/crates/primitives/src/datalake/block_sampled/collection.rs @@ -1,6 +1,6 @@ use std::{fmt::Display, str::FromStr}; -use alloy_primitives::{Address, StorageKey}; +use alloy::primitives::{Address, StorageKey}; use anyhow::{bail, Result}; use crate::datalake::{DatalakeCollection, DatalakeField}; diff --git a/crates/primitives/src/datalake/block_sampled/datalake.rs b/crates/primitives/src/datalake/block_sampled/datalake.rs index fbd2c2c2..3728ed6a 100644 --- a/crates/primitives/src/datalake/block_sampled/datalake.rs +++ b/crates/primitives/src/datalake/block_sampled/datalake.rs @@ -1,13 +1,13 @@ use std::str::FromStr; -use crate::{ - datalake::{datalake_type::DatalakeType, Datalake, DatalakeCollection}, - utils::bytes_to_hex_string, -}; +use crate::datalake::{datalake_type::DatalakeType, Datalake, DatalakeCollection}; use super::collection::BlockSampledCollection; -use alloy_dyn_abi::{DynSolType, DynSolValue}; -use alloy_primitives::{hex::FromHex, keccak256}; +use alloy::primitives::keccak256; +use alloy::{ + dyn_abi::{DynSolType, DynSolValue}, + primitives::B256, +}; use anyhow::{bail, Result}; /// [`BlockSampledDatalake`] is a struct that represents a block sampled datalake. @@ -49,7 +49,7 @@ impl Datalake for BlockSampledDatalake { } /// Encode the block sampled datalake - fn encode(&self) -> Result { + fn encode(&self) -> Result> { let datalake_code: DynSolValue = self.get_datalake_type().to_u8().into(); let block_range_start: DynSolValue = self.block_range_start.into(); let block_range_end: DynSolValue = self.block_range_end.into(); @@ -65,25 +65,20 @@ impl Datalake for BlockSampledDatalake { ]); match tuple_value.abi_encode_sequence() { - Some(encoded_datalake) => Ok(bytes_to_hex_string(&encoded_datalake)), + Some(encoded_datalake) => Ok(encoded_datalake), None => bail!("Encoding failed"), } } /// Get the commitment hash of the block sampled datalake - fn commit(&self) -> String { - let encoded_datalake = self.encode().expect("Encoding failed"); - let bytes = Vec::from_hex(encoded_datalake).expect("Invalid hex string"); - let hash = keccak256(bytes); - format!("0x{:x}", hash) + fn commit(&self) -> B256 { + keccak256(self.encode().expect("Encoding failed")) } /// Decode the encoded block sampled datalake - fn decode(encoded: &str) -> Result { + fn decode(encoded: &[u8]) -> Result { let abi_type: DynSolType = "(uint256,uint256,uint256,uint256,bytes)".parse()?; - let bytes = Vec::from_hex(encoded).expect("Invalid hex string"); - let decoded = abi_type.abi_decode_sequence(&bytes)?; - + let decoded = abi_type.abi_decode_sequence(encoded)?; let value = decoded.as_tuple().unwrap(); let datalake_code = value[0].as_uint().unwrap().0.to_string().parse::()?; diff --git a/crates/primitives/src/datalake/block_sampled/mod.rs b/crates/primitives/src/datalake/block_sampled/mod.rs index ffe61be0..66806a4b 100644 --- a/crates/primitives/src/datalake/block_sampled/mod.rs +++ b/crates/primitives/src/datalake/block_sampled/mod.rs @@ -13,13 +13,17 @@ mod tests { use crate::datalake::{Datalake, DatalakeCollection}; use super::*; - use alloy_primitives::{Address, StorageKey}; + use alloy::{ + hex, + primitives::{Address, StorageKey, B256}, + }; use std::str::FromStr; #[test] fn test_block_datalake_for_header() { - let encoded_block_sample_datalake = "0x000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000009eb0f600000000000000000000000000000000000000000000000000000000009eb100000000000000000000000000000000000000000000000000000000000000000100000000000000000000000000000000000000000000000000000000000000a00000000000000000000000000000000000000000000000000000000000000002010f000000000000000000000000000000000000000000000000000000000000"; - let decoded_datalake = BlockSampledDatalake::decode(encoded_block_sample_datalake).unwrap(); + let encoded_block_sample_datalake = hex::decode("000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000009eb0f600000000000000000000000000000000000000000000000000000000009eb100000000000000000000000000000000000000000000000000000000000000000100000000000000000000000000000000000000000000000000000000000000a00000000000000000000000000000000000000000000000000000000000000002010f000000000000000000000000000000000000000000000000000000000000").unwrap(); + let decoded_datalake = + BlockSampledDatalake::decode(&encoded_block_sample_datalake).unwrap(); let block_datalake = BlockSampledDatalake::new(10399990, 10400000, "header.base_fee_per_gas".to_string(), 1) .unwrap(); @@ -34,7 +38,7 @@ mod tests { ); assert_eq!( - block_datalake.commit(), + block_datalake.commit().to_string(), "0x26365cf5692cc38bca06023b8b62ceb0f6bd959a57e3c453be213d1b71d73732".to_string() ); @@ -46,9 +50,9 @@ mod tests { #[test] fn test_block_datalake_for_header_massive() { - let encoded_block_sample_datalake = "0x000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000009d2a6000000000000000000000000000000000000000000000000000000000009eb100000000000000000000000000000000000000000000000000000000000000000100000000000000000000000000000000000000000000000000000000000000a00000000000000000000000000000000000000000000000000000000000000002010f000000000000000000000000000000000000000000000000000000000000"; + let encoded_block_sample_datalake = hex::decode("000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000009d2a6000000000000000000000000000000000000000000000000000000000009eb100000000000000000000000000000000000000000000000000000000000000000100000000000000000000000000000000000000000000000000000000000000a00000000000000000000000000000000000000000000000000000000000000002010f000000000000000000000000000000000000000000000000000000000000").unwrap(); let decoded_datalake: BlockSampledDatalake = - BlockSampledDatalake::decode(encoded_block_sample_datalake).unwrap(); + BlockSampledDatalake::decode(&encoded_block_sample_datalake).unwrap(); let block_datalake = BlockSampledDatalake::new(10300000, 10400000, "header.base_fee_per_gas".to_string(), 1) .unwrap(); @@ -64,7 +68,7 @@ mod tests { ); assert_eq!( - block_datalake.commit(), + block_datalake.commit().to_string(), "0xc21f3b3a49c5bed8b7624d0efc050a2a481f06f627d04212bf1d745d0aa5c6f1".to_string() ); @@ -76,8 +80,9 @@ mod tests { #[test] fn test_block_datalake_for_account() { - let encoded_block_sample_datalake = "0x000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000009eb0f600000000000000000000000000000000000000000000000000000000009eb100000000000000000000000000000000000000000000000000000000000000000100000000000000000000000000000000000000000000000000000000000000a00000000000000000000000000000000000000000000000000000000000000016027b2f05ce9ae365c3dbf30657e2dc6449989e83d60000000000000000000000"; - let decoded_datalake = BlockSampledDatalake::decode(encoded_block_sample_datalake).unwrap(); + let encoded_block_sample_datalake = hex::decode("000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000009eb0f600000000000000000000000000000000000000000000000000000000009eb100000000000000000000000000000000000000000000000000000000000000000100000000000000000000000000000000000000000000000000000000000000a00000000000000000000000000000000000000000000000000000000000000016027b2f05ce9ae365c3dbf30657e2dc6449989e83d60000000000000000000000").unwrap(); + let decoded_datalake = + BlockSampledDatalake::decode(&encoded_block_sample_datalake).unwrap(); let block_datalake = BlockSampledDatalake::new( 10399990, 10400000, @@ -92,7 +97,7 @@ mod tests { ); assert_eq!( - block_datalake.commit(), + block_datalake.commit().to_string(), "0x79b0d86f9b08c78f527666d4d39d01349530ced0a3d37f4c63e7108814a670b7".to_string() ); @@ -107,8 +112,9 @@ mod tests { #[test] fn test_block_datalake_for_account_2() { - let encoded_block_sample_datalake = "0x000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000004b902400000000000000000000000000000000000000000000000000000000004b9027000000000000000000000000000000000000000000000000000000000000000100000000000000000000000000000000000000000000000000000000000000a00000000000000000000000000000000000000000000000000000000000000016020a4de450feb156a2a51ed159b2fb99da26e5f3a30000000000000000000000"; - let decoded_datalake = BlockSampledDatalake::decode(encoded_block_sample_datalake).unwrap(); + let encoded_block_sample_datalake = hex::decode("000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000004b902400000000000000000000000000000000000000000000000000000000004b9027000000000000000000000000000000000000000000000000000000000000000100000000000000000000000000000000000000000000000000000000000000a00000000000000000000000000000000000000000000000000000000000000016020a4de450feb156a2a51ed159b2fb99da26e5f3a30000000000000000000000").unwrap(); + let decoded_datalake = + BlockSampledDatalake::decode(&encoded_block_sample_datalake).unwrap(); let block_datalake = BlockSampledDatalake::new( 4952100, 4952103, @@ -126,7 +132,8 @@ mod tests { assert_eq!( block_datalake.commit(), - "0x6db54c04174bd625449785ca58efd313e016b807d0a17add522d74e0e27c3b08".to_string() + B256::from_str("0x6db54c04174bd625449785ca58efd313e016b807d0a17add522d74e0e27c3b08") + .unwrap() ); assert_eq!( @@ -140,8 +147,9 @@ mod tests { #[test] fn test_block_datalake_for_storage() { - let encoded_block_sample_datalake = "0x000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000009eb0f600000000000000000000000000000000000000000000000000000000009eb100000000000000000000000000000000000000000000000000000000000000000100000000000000000000000000000000000000000000000000000000000000a00000000000000000000000000000000000000000000000000000000000000035037b2f05ce9ae365c3dbf30657e2dc6449989e83d600000000000000000000000000000000000000000000000000000000000000ff0000000000000000000000"; - let decoded_datalake = BlockSampledDatalake::decode(encoded_block_sample_datalake).unwrap(); + let encoded_block_sample_datalake = hex::decode("000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000009eb0f600000000000000000000000000000000000000000000000000000000009eb100000000000000000000000000000000000000000000000000000000000000000100000000000000000000000000000000000000000000000000000000000000a00000000000000000000000000000000000000000000000000000000000000035037b2f05ce9ae365c3dbf30657e2dc6449989e83d600000000000000000000000000000000000000000000000000000000000000ff0000000000000000000000").unwrap(); + let decoded_datalake = + BlockSampledDatalake::decode(&encoded_block_sample_datalake).unwrap(); let block_datalake = BlockSampledDatalake::new( 10399990, 10400000, @@ -155,7 +163,7 @@ mod tests { ); assert_eq!( - block_datalake.commit(), + block_datalake.commit().to_string(), "0x147dc75fd577a75dca31c0c5181539a1078c48759e379685b827f8c0e3f0b6ef".to_string() ); diff --git a/crates/primitives/src/datalake/block_sampled/rand.rs b/crates/primitives/src/datalake/block_sampled/rand.rs index be11a5d1..56854033 100644 --- a/crates/primitives/src/datalake/block_sampled/rand.rs +++ b/crates/primitives/src/datalake/block_sampled/rand.rs @@ -1,6 +1,6 @@ use std::str::FromStr; -use alloy_primitives::{Address, B256, U256}; +use alloy::primitives::{Address, B256, U256}; use rand::{ distributions::{Distribution, Standard}, Rng, diff --git a/crates/primitives/src/datalake/block_sampled/rlp_fields.rs b/crates/primitives/src/datalake/block_sampled/rlp_fields.rs index abf7c84e..163be6d4 100644 --- a/crates/primitives/src/datalake/block_sampled/rlp_fields.rs +++ b/crates/primitives/src/datalake/block_sampled/rlp_fields.rs @@ -4,6 +4,7 @@ use std::{fmt::Display, str::FromStr}; +use alloy::primitives::U256; use anyhow::{bail, Result}; use crate::{ @@ -129,32 +130,43 @@ impl DatalakeField for HeaderField { } } - fn decode_field_from_rlp(&self, header_rlp: &str) -> String { + fn decode_field_from_rlp(&self, header_rlp: &[u8]) -> U256 { let decoded =
::rlp_decode(header_rlp); match self { - HeaderField::ParentHash => decoded.parent_hash.to_string(), - HeaderField::OmmerHash => decoded.ommers_hash.to_string(), - HeaderField::Beneficiary => decoded.beneficiary.to_string(), - HeaderField::StateRoot => decoded.state_root.to_string(), - HeaderField::TransactionsRoot => decoded.transactions_root.to_string(), - HeaderField::ReceiptsRoot => decoded.receipts_root.to_string(), - HeaderField::LogsBloom => decoded.logs_bloom.to_string(), - HeaderField::Difficulty => decoded.difficulty.to_string(), - HeaderField::Number => decoded.number.to_string(), - HeaderField::GasLimit => decoded.gas_limit.to_string(), - HeaderField::GasUsed => decoded.gas_used.to_string(), - HeaderField::Timestamp => decoded.timestamp.to_string(), - HeaderField::ExtraData => decoded.extra_data.to_string(), - HeaderField::MixHash => decoded.mix_hash.to_string(), - HeaderField::Nonce => decoded.nonce.to_string(), - HeaderField::BaseFeePerGas => decoded.base_fee_per_gas.unwrap().to_string(), - HeaderField::WithdrawalsRoot => decoded.withdrawals_root.unwrap().to_string(), - HeaderField::BlobGasUsed => decoded.blob_gas_used.unwrap().to_string(), - HeaderField::ExcessBlobGas => decoded.excess_blob_gas.unwrap().to_string(), - HeaderField::ParentBeaconBlockRoot => { - decoded.parent_beacon_block_root.unwrap().to_string() + HeaderField::ParentHash => decoded.parent_hash.into(), + HeaderField::OmmerHash => decoded.ommers_hash.into(), + HeaderField::Beneficiary => { + U256::from_str_radix(&decoded.beneficiary.to_string(), 16).unwrap() } + HeaderField::StateRoot => decoded.state_root.into(), + HeaderField::TransactionsRoot => decoded.transactions_root.into(), + HeaderField::ReceiptsRoot => decoded.receipts_root.into(), + HeaderField::LogsBloom => U256::from_str_radix(&decoded.logs_bloom.to_string(), 16) + .expect("logs bloom does not match U256"), + HeaderField::Difficulty => U256::from(decoded.difficulty), + HeaderField::Number => U256::from(decoded.number), + HeaderField::GasLimit => U256::from(decoded.gas_limit), + HeaderField::GasUsed => U256::from(decoded.gas_used), + HeaderField::Timestamp => U256::from(decoded.timestamp), + HeaderField::ExtraData => todo!("extra data doesn't fit into U256"), + HeaderField::MixHash => decoded.mix_hash.into(), + HeaderField::Nonce => U256::from(decoded.nonce), + HeaderField::BaseFeePerGas => U256::from( + decoded + .base_fee_per_gas + .expect("base fee per gas does not exist"), + ), + HeaderField::WithdrawalsRoot => decoded + .withdrawals_root + .expect("withdrawals root does not exist") + .into(), + HeaderField::BlobGasUsed => U256::from(decoded.blob_gas_used.unwrap()), + HeaderField::ExcessBlobGas => U256::from(decoded.excess_blob_gas.unwrap()), + HeaderField::ParentBeaconBlockRoot => decoded + .parent_beacon_block_root + .expect("parent beacon block root does not exist") + .into(), } } } @@ -271,13 +283,13 @@ impl DatalakeField for AccountField { } } - fn decode_field_from_rlp(&self, account_rlp: &str) -> String { + fn decode_field_from_rlp(&self, account_rlp: &[u8]) -> U256 { let decoded = ::rlp_decode(account_rlp); match self { - AccountField::Nonce => decoded.nonce.to_string(), - AccountField::Balance => decoded.balance.to_string(), - AccountField::StorageRoot => decoded.storage_root.to_string(), - AccountField::CodeHash => decoded.code_hash.to_string(), + AccountField::Nonce => U256::from(decoded.nonce), + AccountField::Balance => U256::from(decoded.balance), + AccountField::StorageRoot => decoded.storage_root.into(), + AccountField::CodeHash => decoded.code_hash.into(), } } } diff --git a/crates/primitives/src/datalake/envelope.rs b/crates/primitives/src/datalake/envelope.rs index 09e88bdb..04ffe1b9 100644 --- a/crates/primitives/src/datalake/envelope.rs +++ b/crates/primitives/src/datalake/envelope.rs @@ -1,3 +1,4 @@ +use alloy::primitives::B256; use anyhow::Result; use super::{ @@ -24,7 +25,7 @@ impl DatalakeEnvelope { } } - pub fn encode(&self) -> Result { + pub fn encode(&self) -> Result> { match self { DatalakeEnvelope::BlockSampled(datalake) => datalake.encode(), DatalakeEnvelope::Transactions(datalake) => datalake.encode(), @@ -38,14 +39,14 @@ impl DatalakeEnvelope { } } - pub fn get_commitment(&self) -> String { + pub fn get_commitment(&self) -> B256 { match self { DatalakeEnvelope::BlockSampled(datalake) => datalake.commit(), DatalakeEnvelope::Transactions(datalake) => datalake.commit(), } } - pub fn from_index(value: u8, data: &str) -> Result { + pub fn from_index(value: u8, data: &[u8]) -> Result { match DatalakeType::from_index(value)? { DatalakeType::BlockSampled => Ok(DatalakeEnvelope::BlockSampled( BlockSampledDatalake::decode(data)?, diff --git a/crates/primitives/src/datalake/mod.rs b/crates/primitives/src/datalake/mod.rs index bbf400da..adb211ff 100644 --- a/crates/primitives/src/datalake/mod.rs +++ b/crates/primitives/src/datalake/mod.rs @@ -1,6 +1,7 @@ use std::{fmt::Display, str::FromStr}; use self::datalake_type::DatalakeType; +use alloy::primitives::{B256, U256}; use anyhow::Result; pub mod block_sampled; @@ -20,9 +21,9 @@ pub trait DatalakeCollection { /// Define the common trait for all datalakes pub trait Datalake { fn get_datalake_type(&self) -> DatalakeType; - fn encode(&self) -> Result; - fn commit(&self) -> String; - fn decode(encoded: &str) -> Result + fn encode(&self) -> Result>; + fn commit(&self) -> B256; + fn decode(encoded: &[u8]) -> Result where Self: Sized; } @@ -32,5 +33,5 @@ pub trait DatalakeField: FromStr + Display { where Self: Sized; fn to_index(&self) -> u8; - fn decode_field_from_rlp(&self, rlp: &str) -> String; + fn decode_field_from_rlp(&self, rlp: &[u8]) -> U256; } diff --git a/crates/primitives/src/datalake/task.rs b/crates/primitives/src/datalake/task.rs index 0a0af69d..3056a286 100644 --- a/crates/primitives/src/datalake/task.rs +++ b/crates/primitives/src/datalake/task.rs @@ -1,13 +1,13 @@ use std::str::FromStr; -use crate::{ - aggregate_fn::{integer::Operator, AggregationFunction, FunctionContext}, - utils::bytes_to_hex_string, -}; +use crate::aggregate_fn::{integer::Operator, AggregationFunction, FunctionContext}; use super::envelope::DatalakeEnvelope; -use alloy_dyn_abi::{DynSolType, DynSolValue}; -use alloy_primitives::{hex::FromHex, keccak256, FixedBytes, U256}; +use alloy::primitives::{keccak256, U256}; +use alloy::{ + dyn_abi::{DynSolType, DynSolValue}, + primitives::B256, +}; use anyhow::{bail, Result}; #[derive(Debug)] @@ -21,18 +21,13 @@ impl DatalakeCompute { Self { datalake, compute } } - pub fn commit(&self) -> String { + pub fn commit(&self) -> B256 { let encoded_datalake = self.encode().unwrap(); - let bytes = Vec::from_hex(encoded_datalake).expect("Invalid hex string"); - let hash = keccak256(bytes); - format!("0x{:x}", hash) + keccak256(encoded_datalake) } - pub fn encode(&self) -> Result { - let identifier_value = DynSolValue::FixedBytes( - FixedBytes::from_str(&self.datalake.get_commitment()).unwrap(), - 32, - ); + pub fn encode(&self) -> Result> { + let identifier_value = DynSolValue::FixedBytes(self.datalake.get_commitment(), 32); let aggregate_fn_id = DynSolValue::Uint( U256::from(AggregationFunction::to_index(&self.compute.aggregate_fn_id)), @@ -53,7 +48,7 @@ impl DatalakeCompute { value_to_compare, ]); - Ok(bytes_to_hex_string(&tuple_value.abi_encode())) + Ok(tuple_value.abi_encode()) // match header_tuple_value.abi_encode_sequence() { // Some(encoded) => Ok(bytes_to_hex_string(&encoded)), @@ -82,7 +77,7 @@ impl Computation { } /// Encode the task without datalake - pub fn encode(&self) -> Result { + pub fn encode(&self) -> Result> { let aggregate_fn_id = DynSolValue::Uint( U256::from(AggregationFunction::to_index(&self.aggregate_fn_id)), 8, @@ -98,8 +93,7 @@ impl Computation { let header_tuple_value = DynSolValue::Tuple(vec![aggregate_fn_id, operator, value_to_compare]); - let encoded_datalake = header_tuple_value.abi_encode(); - Ok(bytes_to_hex_string(&encoded_datalake)) + Ok(header_tuple_value.abi_encode()) } /// Decode task that is not filled with datalake @@ -154,6 +148,8 @@ pub struct ExtendedDatalakeTask { #[cfg(test)] mod tests { + use alloy::hex::FromHex; + use crate::datalake::block_sampled::BlockSampledDatalake; use super::*; @@ -196,12 +192,10 @@ mod tests { let serialized = task.encode().unwrap(); let inner_task_serialized = inner_task.encode().unwrap(); assert_eq!(serialized, inner_task_serialized); - let serialized_bytes: &str = "0x000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000"; + let serialized_bytes: Vec = Vec::from_hex("0x000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000").unwrap(); assert_eq!(serialized, serialized_bytes); - let deserialized = - Computation::decode_not_filled_task(&Vec::from_hex(serialized_bytes).unwrap()).unwrap(); + let deserialized = Computation::decode_not_filled_task(&serialized_bytes).unwrap(); assert_eq!(task, deserialized); - // MIN let task = Computation::new("min", None); @@ -213,10 +207,9 @@ mod tests { let serialized = task.encode().unwrap(); let inner_task_serialized = inner_task.encode().unwrap(); assert_eq!(serialized, inner_task_serialized); - let serialized_bytes: &str = "0x000000000000000000000000000000000000000000000000000000000000000200000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000"; + let serialized_bytes: Vec = Vec::from_hex("0x000000000000000000000000000000000000000000000000000000000000000200000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000").unwrap(); assert_eq!(serialized, serialized_bytes); - let deserialized = - Computation::decode_not_filled_task(&Vec::from_hex(serialized_bytes).unwrap()).unwrap(); + let deserialized = Computation::decode_not_filled_task(&serialized_bytes).unwrap(); assert_eq!(task, deserialized); } @@ -235,7 +228,7 @@ mod tests { let task_with_datalake = DatalakeCompute::new(datalake, task); let serialized = task_with_datalake.encode().unwrap(); - let serialized_bytes: &str = "0xcfa530587401307617ef751178c78751c83757e2143b73b4ffadb5969ca6215e000000000000000000000000000000000000000000000000000000000000000400000000000000000000000000000000000000000000000000000000000000040000000000000000000000000000000000000000000000000000000000000064"; + let serialized_bytes: Vec = Vec::from_hex("cfa530587401307617ef751178c78751c83757e2143b73b4ffadb5969ca6215e000000000000000000000000000000000000000000000000000000000000000400000000000000000000000000000000000000000000000000000000000000040000000000000000000000000000000000000000000000000000000000000064").unwrap(); assert_eq!(serialized, serialized_bytes); } } diff --git a/crates/primitives/src/datalake/transactions/datalake.rs b/crates/primitives/src/datalake/transactions/datalake.rs index 51e1b72a..ead5836a 100644 --- a/crates/primitives/src/datalake/transactions/datalake.rs +++ b/crates/primitives/src/datalake/transactions/datalake.rs @@ -7,14 +7,13 @@ use std::str::FromStr; -use alloy_dyn_abi::{DynSolType, DynSolValue}; -use alloy_primitives::{hex::FromHex, keccak256, U256}; +use alloy::consensus::TxType; +use alloy::dyn_abi::{DynSolType, DynSolValue}; +use alloy::primitives::B256; +use alloy::primitives::{keccak256, U256}; use anyhow::{bail, Result}; -use crate::{ - datalake::{datalake_type::DatalakeType, Datalake, DatalakeCollection}, - utils::bytes_to_hex_string, -}; +use crate::datalake::{datalake_type::DatalakeType, Datalake, DatalakeCollection}; use super::TransactionsCollection; @@ -61,7 +60,7 @@ impl Datalake for TransactionsInBlockDatalake { } /// Encode the [`TransactionsInBlockDatalake`] into a hex string - fn encode(&self) -> Result { + fn encode(&self) -> Result> { let datalake_code: DynSolValue = self.get_datalake_type().to_u8().into(); let target_block: DynSolValue = self.target_block.into(); let sampled_property: DynSolValue = self.sampled_property.serialize()?.into(); @@ -81,25 +80,22 @@ impl Datalake for TransactionsInBlockDatalake { ]); match tuple_value.abi_encode_sequence() { - Some(encoded_datalake) => Ok(bytes_to_hex_string(&encoded_datalake)), + Some(encoded_datalake) => Ok(encoded_datalake), None => bail!("Encoding failed"), } } /// Get the commitment hash of the [`TransactionsDatalake`] - fn commit(&self) -> String { + fn commit(&self) -> B256 { let encoded_datalake = self.encode().expect("Encoding failed"); - let bytes = Vec::from_hex(encoded_datalake).expect("Invalid hex string"); - let hash = keccak256(bytes); - format!("0x{:x}", hash) + keccak256(encoded_datalake) } /// Decode the encoded transactions datalake hex string into a [`TransactionsDatalake`] - fn decode(encoded: &str) -> Result { + fn decode(encoded: &[u8]) -> Result { let abi_type: DynSolType = "(uint256, uint256, uint256, uint256, uint256, uint256, bytes)".parse()?; - let bytes = Vec::from_hex(encoded).expect("Invalid hex string"); - let decoded = abi_type.abi_decode_sequence(&bytes)?; + let decoded = abi_type.abi_decode_sequence(encoded)?; let value = decoded.as_tuple().unwrap(); let datalake_code = value[0].as_uint().unwrap().0.to_string().parse::()?; @@ -155,7 +151,7 @@ impl IncludedTypes { Self { inner } } - pub fn is_included(&self, target_type: u8) -> bool { + pub fn is_included(&self, target_type: TxType) -> bool { // check with the index of bytes is either 0 or 1 self.inner[target_type as usize] != 0 } @@ -181,36 +177,36 @@ mod tests { #[test] fn test_included_types() { let included_types = IncludedTypes::from(&[1, 1, 1, 1]); - assert!(included_types.is_included(0)); - assert!(included_types.is_included(1)); - assert!(included_types.is_included(2)); - assert!(included_types.is_included(3)); + assert!(included_types.is_included(TxType::Legacy)); + assert!(included_types.is_included(TxType::Eip2930)); + assert!(included_types.is_included(TxType::Eip1559)); + assert!(included_types.is_included(TxType::Eip4844)); let uint256 = included_types.to_uint256(); assert_eq!(uint256, U256::from(0x01010101)); let included_types = IncludedTypes::from_uint256(uint256); - assert!(included_types.is_included(0)); - assert!(included_types.is_included(1)); - assert!(included_types.is_included(2)); - assert!(included_types.is_included(3)); + assert!(included_types.is_included(TxType::Legacy)); + assert!(included_types.is_included(TxType::Eip2930)); + assert!(included_types.is_included(TxType::Eip1559)); + assert!(included_types.is_included(TxType::Eip4844)); } #[test] fn test_included_types_partial() { let included_types = IncludedTypes::from(&[1, 0, 1, 0]); - assert!(included_types.is_included(0)); - assert!(!included_types.is_included(1)); - assert!(included_types.is_included(2)); - assert!(!included_types.is_included(3)); + assert!(included_types.is_included(TxType::Legacy)); + assert!(!included_types.is_included(TxType::Eip2930)); + assert!(included_types.is_included(TxType::Eip1559)); + assert!(!included_types.is_included(TxType::Eip4844)); let uint256 = included_types.to_uint256(); assert_eq!(uint256, U256::from(0x01000100)); let included_types = IncludedTypes::from_uint256(uint256); - assert!(included_types.is_included(0)); - assert!(!included_types.is_included(1)); - assert!(included_types.is_included(2)); - assert!(!included_types.is_included(3)); + assert!(included_types.is_included(TxType::Legacy)); + assert!(!included_types.is_included(TxType::Eip2930)); + assert!(included_types.is_included(TxType::Eip1559)); + assert!(!included_types.is_included(TxType::Eip4844)); } } diff --git a/crates/primitives/src/datalake/transactions/mod.rs b/crates/primitives/src/datalake/transactions/mod.rs index 2d897cfe..bb1cace9 100644 --- a/crates/primitives/src/datalake/transactions/mod.rs +++ b/crates/primitives/src/datalake/transactions/mod.rs @@ -11,8 +11,13 @@ pub use rlp_fields::*; #[cfg(test)] mod tests { + use std::str::FromStr; + use crate::datalake::{Datalake, DatalakeCollection}; - use alloy_primitives::U256; + use alloy::{ + hex, + primitives::{B256, U256}, + }; use super::*; @@ -32,11 +37,12 @@ mod tests { let encoded = transaction_datalake.encode().unwrap(); - assert_eq!(encoded, encoded_datalake); + assert_eq!(encoded, hex::decode(encoded_datalake).unwrap()); assert_eq!( transaction_datalake.commit(), - "0xbcc6e9f8aea4122dedf75f9cc0ce1126b3a38694366ee6f98ddbfffffeaf8e1a" + B256::from_str("0xbcc6e9f8aea4122dedf75f9cc0ce1126b3a38694366ee6f98ddbfffffeaf8e1a") + .unwrap() ); assert_eq!( @@ -68,11 +74,12 @@ mod tests { let encoded = transaction_datalake.encode().unwrap(); - assert_eq!(encoded, encoded_datalake); + assert_eq!(encoded, hex::decode(encoded_datalake).unwrap()); assert_eq!( transaction_datalake.commit(), - "0x64882f901df552341bc9582ae597945cc69575b8e37cde61eaacb3b0f7af682c" + B256::from_str("0x64882f901df552341bc9582ae597945cc69575b8e37cde61eaacb3b0f7af682c") + .unwrap() ); assert_eq!( diff --git a/crates/primitives/src/datalake/transactions/rlp_fields.rs b/crates/primitives/src/datalake/transactions/rlp_fields.rs index 66c0d2e3..7efbf9f1 100644 --- a/crates/primitives/src/datalake/transactions/rlp_fields.rs +++ b/crates/primitives/src/datalake/transactions/rlp_fields.rs @@ -1,10 +1,10 @@ use std::{fmt::Display, str::FromStr}; -use alloy_primitives::hex; +use alloy::{consensus::Eip658Value, primitives::U256}; use anyhow::{bail, Result}; use eth_trie_proofs::{tx::ConsensusTx, tx_receipt::ConsensusTxReceipt}; -use crate::{datalake::DatalakeField, utils::bytes_to_hex_string}; +use crate::datalake::DatalakeField; #[derive(Debug, Clone, PartialEq)] pub enum TransactionField { @@ -113,40 +113,48 @@ impl DatalakeField for TransactionField { } } - fn decode_field_from_rlp(&self, rlp: &str) -> String { - let raw_tx = ConsensusTx::rlp_decode(hex::decode(rlp).unwrap().as_slice()).unwrap(); + fn decode_field_from_rlp(&self, rlp: &[u8]) -> U256 { + let raw_tx = ConsensusTx::rlp_decode(rlp).unwrap(); match self { - TransactionField::Nonce => raw_tx.nonce().to_string(), - TransactionField::GasPrice => raw_tx.gas_price().map(|x| x.to_string()).unwrap(), - TransactionField::GasLimit => raw_tx.gas_limit().to_string(), - TransactionField::To => raw_tx.to().to().map(|x| x.to_string()).unwrap(), - TransactionField::Value => raw_tx.value().to_string(), - TransactionField::Input => bytes_to_hex_string(raw_tx.input()), - TransactionField::V => raw_tx.v().to_string(), - TransactionField::R => raw_tx.r().to_string(), - TransactionField::S => raw_tx.s().to_string(), - TransactionField::ChainId => raw_tx.chain_id().map(|x| x.to_string()).unwrap(), - // TODO: string should be properly rlp encoded - TransactionField::AccessList => raw_tx - .access_list() - .map(|_| "access_list".to_string()) - .unwrap(), - TransactionField::MaxFeePerGas => { - raw_tx.max_fee_per_gas().map(|x| x.to_string()).unwrap() + TransactionField::Nonce => U256::from(raw_tx.nonce()), + TransactionField::GasPrice => { + U256::from(raw_tx.gas_price().expect("gas price does not exist")) + } + TransactionField::GasLimit => U256::from(raw_tx.gas_limit()), + TransactionField::To => U256::from_str_radix( + &raw_tx.to().to().expect("to does not exist").to_string(), + 16, + ) + .unwrap(), + TransactionField::Value => U256::from(raw_tx.value()), + TransactionField::Input => U256::from_be_slice(raw_tx.input()), + TransactionField::V => U256::from(raw_tx.v()), + TransactionField::R => U256::from(raw_tx.r()), + TransactionField::S => U256::from(raw_tx.s()), + TransactionField::ChainId => { + U256::from(raw_tx.chain_id().expect("chain id does not exist")) } - TransactionField::MaxPriorityFeePerGas => raw_tx - .max_priority_fee_per_gas() - .map(|x| x.to_string()) - .unwrap(), // TODO: string should be properly rlp encoded + TransactionField::AccessList => todo!("access list cannot parse into u256"), + TransactionField::MaxFeePerGas => U256::from( + raw_tx + .max_fee_per_gas() + .expect("max fee per gas does not exist"), + ), + TransactionField::MaxPriorityFeePerGas => U256::from( + raw_tx + .max_priority_fee_per_gas() + .expect("max priority fee per gas does not exist"), + ), TransactionField::BlobVersionedHashes => raw_tx .blob_versioned_hashes() - .map(|x| x[0].to_string()) - .unwrap(), - TransactionField::MaxFeePerBlobGas => raw_tx - .max_fee_per_blob_gas() - .map(|x| x.to_string()) - .unwrap(), + .expect("blob versioned hashes does not exist")[0] + .into(), + TransactionField::MaxFeePerBlobGas => U256::from( + raw_tx + .max_fee_per_blob_gas() + .expect("max fee per blob gas does not exist"), + ), } } } @@ -262,21 +270,20 @@ impl DatalakeField for TransactionReceiptField { } } - fn decode_field_from_rlp(&self, rlp: &str) -> String { - let raw_tx_receipt = - ConsensusTxReceipt::rlp_decode(hex::decode(rlp).unwrap().as_slice()).unwrap(); + fn decode_field_from_rlp(&self, rlp: &[u8]) -> U256 { + let raw_tx_receipt = ConsensusTxReceipt::rlp_decode(rlp).unwrap(); match self { - TransactionReceiptField::Success => match raw_tx_receipt.success() { - true => "1".to_string(), - false => "0".to_string(), + TransactionReceiptField::Success => match raw_tx_receipt.status() { + Eip658Value::Eip658(bool) => U256::from(*bool as u8), + Eip658Value::PostState(state) => (*state).into(), }, TransactionReceiptField::CumulativeGasUsed => { - raw_tx_receipt.cumulative_gas_used().to_string() + U256::from(raw_tx_receipt.cumulative_gas_used()) } // TODO: string should be properly rlp encoded - TransactionReceiptField::Logs => "logs".to_string(), - TransactionReceiptField::Bloom => "bloom".to_string(), + TransactionReceiptField::Logs => U256::from(raw_tx_receipt.logs().len()), + TransactionReceiptField::Bloom => U256::from(raw_tx_receipt.bloom().len()), } } } diff --git a/crates/primitives/src/lib.rs b/crates/primitives/src/lib.rs index 79ab073f..77904767 100644 --- a/crates/primitives/src/lib.rs +++ b/crates/primitives/src/lib.rs @@ -7,5 +7,6 @@ pub mod block; pub mod datalake; pub mod module; pub mod processed_types; +pub mod serde; pub mod task; pub mod utils; diff --git a/crates/primitives/src/module.rs b/crates/primitives/src/module.rs index 5726f10a..06d51578 100644 --- a/crates/primitives/src/module.rs +++ b/crates/primitives/src/module.rs @@ -2,7 +2,7 @@ //! It contains the hash and the input. //! This is request interface for the preprocessor. -use alloy_primitives::{keccak256, Keccak256}; +use alloy::primitives::{keccak256, Keccak256, B256}; use serde::Serialize; use serde_with::serde_as; use starknet::core::serde::unsigned_field_element::UfeHex; @@ -44,7 +44,7 @@ impl Module { self.inputs.clone() } - pub fn commit(&self) -> String { + pub fn commit(&self) -> B256 { // commit = keccak256(class_hash, keccak256(inputs)) let input_bytes: Vec = self.inputs.iter().flat_map(|x| x.to_bytes_be()).collect(); let commit_input = keccak256(input_bytes); @@ -53,7 +53,6 @@ impl Module { hasher.update(self.class_hash.to_bytes_be()); hasher.update(commit_input); - let commit = hasher.clone().finalize(); - format!("0x{:x}", commit) + hasher.clone().finalize() } } diff --git a/crates/primitives/src/processed_types/account.rs b/crates/primitives/src/processed_types/account.rs index 39bbdc6c..ee749187 100644 --- a/crates/primitives/src/processed_types/account.rs +++ b/crates/primitives/src/processed_types/account.rs @@ -2,22 +2,23 @@ //! This contains the processed account type and its conversion to cairo format. use super::mpt::ProcessedMPTProof; -use alloy_primitives::keccak256; +use alloy::primitives::{keccak256, Address}; use serde::{Deserialize, Serialize}; #[derive(Debug, Clone, PartialEq, Serialize, Deserialize, Eq, Hash)] pub struct ProcessedAccount { - pub address: String, + pub address: Address, pub account_key: String, pub proofs: Vec, } impl ProcessedAccount { - pub fn new(address: String, proofs: Vec) -> Self { - let account_key = keccak256(&address).to_string(); + pub fn new(address: Address, proofs: Vec) -> Self { + // TODO: actually this is account trie leaf to be more accurate + let account_trie_leaf = keccak256(address).to_string(); ProcessedAccount { address, - account_key, + account_key: account_trie_leaf, proofs, } } diff --git a/crates/primitives/src/processed_types/block_proofs.rs b/crates/primitives/src/processed_types/block_proofs.rs new file mode 100644 index 00000000..80fc697b --- /dev/null +++ b/crates/primitives/src/processed_types/block_proofs.rs @@ -0,0 +1,17 @@ +use serde::Serialize; + +use super::{ + account::ProcessedAccount, header::ProcessedHeader, mmr::MMRMeta, receipt::ProcessedReceipt, + storage::ProcessedStorage, transaction::ProcessedTransaction, +}; + +/// Provider should fetch all the proofs and rlp values from given keys. +#[derive(Serialize, Debug)] +pub struct ProcessedBlockProofs { + pub mmr_meta: MMRMeta, + pub headers: Vec, + pub accounts: Vec, + pub storages: Vec, + pub transactions: Vec, + pub transaction_receipts: Vec, +} diff --git a/crates/primitives/src/processed_types/cairo_format/account.rs b/crates/primitives/src/processed_types/cairo_format/account.rs index 42a0d3b7..c9d02dd9 100644 --- a/crates/primitives/src/processed_types/cairo_format/account.rs +++ b/crates/primitives/src/processed_types/cairo_format/account.rs @@ -14,7 +14,8 @@ impl AsCairoFormat for BaseProcessedAccount { type Output = ProcessedAccount; fn as_cairo_format(&self) -> Self::Output { - let address_chunk_result = FieldElementVectorUnit::from_hex_str(&self.address).unwrap(); + let address_chunk_result = + FieldElementVectorUnit::from_bytes(self.address.as_ref()).unwrap(); let account_key = &self.account_key; let proofs = self .proofs diff --git a/crates/primitives/src/processed_types/cairo_format/datalake_compute.rs b/crates/primitives/src/processed_types/cairo_format/datalake_compute.rs index 00ff4c9e..946c20ae 100644 --- a/crates/primitives/src/processed_types/cairo_format/datalake_compute.rs +++ b/crates/primitives/src/processed_types/cairo_format/datalake_compute.rs @@ -11,8 +11,8 @@ impl AsCairoFormat for BaseProcessedDatalakeCompute { fn as_cairo_format(&self) -> Self::Output { let computational_task_felts = - FieldElementVectorUnit::from_hex_str(&self.encoded_task).unwrap(); - let datalake_felts = FieldElementVectorUnit::from_hex_str(&self.encoded_datalake).unwrap(); + FieldElementVectorUnit::from_bytes(&self.encoded_task).unwrap(); + let datalake_felts = FieldElementVectorUnit::from_bytes(&self.encoded_datalake).unwrap(); ProcessedDatalakeCompute { task_bytes_len: computational_task_felts.bytes_len, encoded_task: computational_task_felts.felts, diff --git a/crates/primitives/src/processed_types/cairo_format/felt_vec_unit.rs b/crates/primitives/src/processed_types/cairo_format/felt_vec_unit.rs index b16e1974..afc4baca 100644 --- a/crates/primitives/src/processed_types/cairo_format/felt_vec_unit.rs +++ b/crates/primitives/src/processed_types/cairo_format/felt_vec_unit.rs @@ -1,4 +1,3 @@ -use alloy_primitives::hex; use anyhow::Result; use serde::Serialize; use serde_with::serde_as; @@ -14,11 +13,10 @@ pub struct FieldElementVectorUnit { } impl FieldElementVectorUnit { - pub fn from_hex_str(hex_str: &str) -> Result { - if hex_str.is_empty() { + pub fn from_bytes(bytes: &[u8]) -> Result { + if bytes.is_empty() { return Err(anyhow::anyhow!("Empty hex input")); } - let bytes = hex::decode(hex_str)?; let bytes_len = bytes.len() as u64; let felts = bytes .chunks(8) @@ -38,35 +36,30 @@ impl FieldElementVectorUnit { #[cfg(test)] mod tests { + use alloy::hex; + use super::*; #[test] - fn test_empty_hex_str() { - let hex_str = ""; - let result = FieldElementVectorUnit::from_hex_str(hex_str); + fn test_empty_bytes() { + let bytes = hex::decode("").unwrap(); + let result = FieldElementVectorUnit::from_bytes(&bytes); assert!(result.is_err()); } #[test] - fn test_single_byte_hex_str() { - let hex_str = "0x01"; - let result = FieldElementVectorUnit::from_hex_str(hex_str).unwrap(); + fn test_single_byte_bytes() { + let bytes = hex::decode("0x01").unwrap(); + let result = FieldElementVectorUnit::from_bytes(&bytes).unwrap(); assert_eq!(result.bytes_len, 1); assert_eq!(result.felts.len(), 1); assert_eq!(result.felts[0], FieldElement::from_hex_be("0x1").unwrap()); } #[test] - fn test_non_aligned_hex_str() { - let hex_str = "0x1234567890abc"; - let result = FieldElementVectorUnit::from_hex_str(hex_str); - assert!(result.is_err()); - } - - #[test] - fn test_single_chunk_hex_str() { - let hex_str = "0x1234567890abcdef"; - let result = FieldElementVectorUnit::from_hex_str(hex_str).unwrap(); + fn test_single_chunk_bytes() { + let bytes = hex::decode("0x1234567890abcdef").unwrap(); + let result = FieldElementVectorUnit::from_bytes(&bytes).unwrap(); assert_eq!(result.bytes_len, 8); assert_eq!(result.felts.len(), 1); assert_eq!( @@ -76,9 +69,9 @@ mod tests { } #[test] - fn test_multiple_chunks_hex_str() { - let hex_str = "0x1234567890abcdef1122334455667788"; - let result = FieldElementVectorUnit::from_hex_str(hex_str).unwrap(); + fn test_multiple_chunks_bytes() { + let bytes = hex::decode("0x1234567890abcdef1122334455667788").unwrap(); + let result = FieldElementVectorUnit::from_bytes(&bytes).unwrap(); assert_eq!(result.bytes_len, 16); assert_eq!(result.felts.len(), 2); assert_eq!( diff --git a/crates/primitives/src/processed_types/cairo_format/header.rs b/crates/primitives/src/processed_types/cairo_format/header.rs index 7899f3aa..67d0f501 100644 --- a/crates/primitives/src/processed_types/cairo_format/header.rs +++ b/crates/primitives/src/processed_types/cairo_format/header.rs @@ -13,7 +13,7 @@ impl AsCairoFormat for BaseProcessedHeader { type Output = ProcessedHeader; fn as_cairo_format(&self) -> Self::Output { - let felts_unit = FieldElementVectorUnit::from_hex_str(&format!("0x{}", &self.rlp)).unwrap(); + let felts_unit = FieldElementVectorUnit::from_bytes(&self.rlp).unwrap(); let proof = self.proof.clone(); ProcessedHeader { rlp: felts_unit.felts, diff --git a/crates/primitives/src/processed_types/cairo_format/mpt.rs b/crates/primitives/src/processed_types/cairo_format/mpt.rs index 75386ea3..9105b410 100644 --- a/crates/primitives/src/processed_types/cairo_format/mpt.rs +++ b/crates/primitives/src/processed_types/cairo_format/mpt.rs @@ -13,7 +13,7 @@ impl AsCairoFormat for BaseProcessedMPTProof { let proof_felts: Vec = self .proof .iter() - .map(|proof| FieldElementVectorUnit::from_hex_str(proof).unwrap()) + .map(|proof| FieldElementVectorUnit::from_bytes(proof).unwrap()) .collect(); let proof_bytes_len = proof_felts.iter().map(|f| f.bytes_len).collect(); diff --git a/crates/primitives/src/processed_types/cairo_format/receipt.rs b/crates/primitives/src/processed_types/cairo_format/receipt.rs index 722d3c44..bd577e8b 100644 --- a/crates/primitives/src/processed_types/cairo_format/receipt.rs +++ b/crates/primitives/src/processed_types/cairo_format/receipt.rs @@ -15,7 +15,7 @@ impl AsCairoFormat for BaseProcessedReceipt { let proof_felts: Vec = self .proof .iter() - .map(|proof| FieldElementVectorUnit::from_hex_str(proof).unwrap()) + .map(|proof| FieldElementVectorUnit::from_bytes(proof).unwrap()) .collect(); let proof_bytes_len = proof_felts.iter().map(|f| f.bytes_len).collect(); diff --git a/crates/primitives/src/processed_types/cairo_format/storage.rs b/crates/primitives/src/processed_types/cairo_format/storage.rs index d1258223..5bebb84d 100644 --- a/crates/primitives/src/processed_types/cairo_format/storage.rs +++ b/crates/primitives/src/processed_types/cairo_format/storage.rs @@ -1,5 +1,6 @@ //! This module defines the `ProcessedStorage` struct and its corresponding `ProcessedStorageInFelts` struct. +use alloy::primitives::StorageKey; use serde::{Deserialize, Serialize}; use serde_with::serde_as; use starknet::core::serde::unsigned_field_element::UfeHex; @@ -13,9 +14,10 @@ impl AsCairoFormat for BaseProcessedStorage { type Output = ProcessedStorage; fn as_cairo_format(&self) -> Self::Output { - let address_chunk_result = FieldElementVectorUnit::from_hex_str(&self.address).unwrap(); - let slot_chunk_result = FieldElementVectorUnit::from_hex_str(&self.slot).unwrap(); - let storage_key = self.storage_key.clone(); + let address_chunk_result = + FieldElementVectorUnit::from_bytes(self.address.as_ref()).unwrap(); + let slot_chunk_result = FieldElementVectorUnit::from_bytes(self.slot.as_ref()).unwrap(); + let storage_key = self.storage_key; let proofs = self .proofs .iter() @@ -39,7 +41,7 @@ pub struct ProcessedStorage { // chunked storage slot #[serde_as(as = "Vec")] pub slot: Vec, - pub storage_key: String, + pub storage_key: StorageKey, pub proofs: Vec, } diff --git a/crates/primitives/src/processed_types/cairo_format/transaction.rs b/crates/primitives/src/processed_types/cairo_format/transaction.rs index bcbdc8e7..304d149c 100644 --- a/crates/primitives/src/processed_types/cairo_format/transaction.rs +++ b/crates/primitives/src/processed_types/cairo_format/transaction.rs @@ -15,7 +15,7 @@ impl AsCairoFormat for BaseProcessedTransaction { let proof_felts: Vec = self .proof .iter() - .map(|proof| FieldElementVectorUnit::from_hex_str(proof).unwrap()) + .map(|proof| FieldElementVectorUnit::from_bytes(proof).unwrap()) .collect(); let proof_bytes_len = proof_felts.iter().map(|f| f.bytes_len).collect(); diff --git a/crates/primitives/src/processed_types/datalake_compute.rs b/crates/primitives/src/processed_types/datalake_compute.rs index b103f8c2..53a7cb5b 100644 --- a/crates/primitives/src/processed_types/datalake_compute.rs +++ b/crates/primitives/src/processed_types/datalake_compute.rs @@ -1,23 +1,23 @@ -use alloy_primitives::B256; +use alloy::primitives::{Bytes, B256, U256}; use serde::{Deserialize, Serialize}; #[derive(Debug, Clone, PartialEq, Serialize, Deserialize, Eq, Hash)] pub struct ProcessedDatalakeCompute { /// encoded computational task - pub encoded_task: String, + pub encoded_task: Bytes, /// computational task commitment - pub task_commitment: String, + pub task_commitment: B256, /// raw evaluation result of target compiled task #[serde(skip_serializing_if = "Option::is_none")] - pub compiled_result: Option, + pub compiled_result: Option, /// results merkle tree's entry value #[serde(skip_serializing_if = "Option::is_none")] - pub result_commitment: Option, + pub result_commitment: Option, pub task_proof: Vec, #[serde(skip_serializing_if = "Option::is_none")] pub result_proof: Option>, /// encoded datalake - pub encoded_datalake: String, + pub encoded_datalake: Bytes, // ex. block sampled datalake / transaction datalake pub datalake_type: u8, // ex. "header", "account", "storage" @@ -27,13 +27,13 @@ pub struct ProcessedDatalakeCompute { impl ProcessedDatalakeCompute { #[allow(clippy::too_many_arguments)] pub fn new_with_result( - encoded_task: String, - task_commitment: String, - compiled_result: String, - result_commitment: String, + encoded_task: Bytes, + task_commitment: B256, + compiled_result: U256, + result_commitment: B256, task_proof: Vec, result_proof: Vec, - encoded_datalake: String, + encoded_datalake: Bytes, datalake_type: u8, property_type: u8, ) -> Self { @@ -51,10 +51,10 @@ impl ProcessedDatalakeCompute { } pub fn new_without_result( - encoded_task: String, - task_commitment: String, + encoded_task: Bytes, + task_commitment: B256, task_proof: Vec, - encoded_datalake: String, + encoded_datalake: Bytes, datalake_type: u8, property_type: u8, ) -> Self { @@ -73,8 +73,8 @@ impl ProcessedDatalakeCompute { pub fn update_results( &mut self, - compiled_result: String, - result_commitment: String, + compiled_result: U256, + result_commitment: B256, result_proof: Vec, ) { self.compiled_result = Some(compiled_result); diff --git a/crates/primitives/src/processed_types/header.rs b/crates/primitives/src/processed_types/header.rs index 596fcaea..6573b60c 100644 --- a/crates/primitives/src/processed_types/header.rs +++ b/crates/primitives/src/processed_types/header.rs @@ -1,4 +1,11 @@ +use alloy::hex; use serde::{Deserialize, Serialize}; +use serde_with::serde_as; + +use crate::{ + block::header::RlpBlockHeader, + serde::{deserialize_hex, serialize_hex}, +}; #[derive(Debug, Clone, PartialEq, Serialize, Deserialize, Eq, Hash)] pub struct ProcessedHeaderProof { @@ -13,13 +20,16 @@ impl ProcessedHeaderProof { } #[derive(Debug, Clone, PartialEq, Serialize, Deserialize, Eq, Hash)] +#[serde_as] pub struct ProcessedHeader { - pub rlp: String, + #[serde(serialize_with = "serialize_hex", deserialize_with = "deserialize_hex")] + pub rlp: Vec, pub proof: ProcessedHeaderProof, } impl ProcessedHeader { - pub fn new(rlp: String, leaf_idx: u64, mmr_path: Vec) -> Self { + pub fn new(rlp: RlpBlockHeader, leaf_idx: u64, mmr_path: Vec) -> Self { + let rlp = hex::decode(rlp.value).expect("Cannot decode RLP block header to bytes"); let proof = ProcessedHeaderProof::new(leaf_idx, mmr_path); ProcessedHeader { rlp, proof } } diff --git a/crates/primitives/src/processed_types/mod.rs b/crates/primitives/src/processed_types/mod.rs index 8db889a8..536f5f95 100644 --- a/crates/primitives/src/processed_types/mod.rs +++ b/crates/primitives/src/processed_types/mod.rs @@ -12,3 +12,5 @@ pub mod transaction; pub mod uint256; // TODO: temporary query type for first sync with original flow, will merge with new genric query later pub mod v1_query; +// TODO: will be use in v2 +pub mod block_proofs; diff --git a/crates/primitives/src/processed_types/mpt.rs b/crates/primitives/src/processed_types/mpt.rs index ba091b26..29a7b09b 100644 --- a/crates/primitives/src/processed_types/mpt.rs +++ b/crates/primitives/src/processed_types/mpt.rs @@ -1,16 +1,32 @@ +use alloy::primitives::Bytes; use serde::{Deserialize, Serialize}; #[derive(Debug, Clone, PartialEq, Serialize, Deserialize, Eq, Hash)] pub struct ProcessedMPTProof { pub block_number: u64, - pub proof: Vec, + pub proof: Vec, } impl ProcessedMPTProof { - pub fn new(block_number: u64, proof: Vec) -> Self { + pub fn new(block_number: u64, proof: Vec) -> Self { ProcessedMPTProof { block_number, proof, } } } + +#[cfg(test)] +mod tests { + use std::fs; + + use super::*; + + #[test] + fn test_mpt_proof() { + let processed_string = fs::read_to_string("fixtures/processed/mpt.json").unwrap(); + let processed_mpt: ProcessedMPTProof = serde_json::from_str(&processed_string).unwrap(); + assert_eq!(processed_mpt.block_number, 5244634); + assert_eq!(processed_mpt.proof.len(), 8); + } +} diff --git a/crates/primitives/src/processed_types/receipt.rs b/crates/primitives/src/processed_types/receipt.rs index 1a60b2dd..589e10c7 100644 --- a/crates/primitives/src/processed_types/receipt.rs +++ b/crates/primitives/src/processed_types/receipt.rs @@ -1,17 +1,18 @@ //! This module defines the `ProcessedReceipt` struct and its corresponding `ProcessedReceiptInFelts` struct. use crate::utils::tx_index_to_tx_key; +use alloy::primitives::Bytes; use serde::{Deserialize, Serialize}; #[derive(Debug, Clone, PartialEq, Serialize, Deserialize, Eq, Hash)] pub struct ProcessedReceipt { pub key: String, pub block_number: u64, - pub proof: Vec, + pub proof: Vec, } impl ProcessedReceipt { - pub fn new(index: u64, block_number: u64, proof: Vec) -> Self { + pub fn new(index: u64, block_number: u64, proof: Vec) -> Self { let key = tx_index_to_tx_key(index); Self { key, diff --git a/crates/primitives/src/processed_types/storage.rs b/crates/primitives/src/processed_types/storage.rs index 2dbab118..bd618250 100644 --- a/crates/primitives/src/processed_types/storage.rs +++ b/crates/primitives/src/processed_types/storage.rs @@ -1,24 +1,25 @@ //! This module defines the `ProcessedStorage` struct and its corresponding `ProcessedStorageInFelts` struct. use super::mpt::ProcessedMPTProof; -use alloy_primitives::keccak256; +use alloy::primitives::{keccak256, Address, StorageKey, B256}; use serde::{Deserialize, Serialize}; #[derive(Debug, Clone, PartialEq, Serialize, Deserialize, Eq, Hash)] pub struct ProcessedStorage { - pub address: String, - pub slot: String, - pub storage_key: String, + pub address: Address, + pub slot: B256, + pub storage_key: StorageKey, pub proofs: Vec, } impl ProcessedStorage { - pub fn new(address: String, slot: String, proofs: Vec) -> Self { - let storage_key = keccak256(&slot).to_string(); + pub fn new(address: Address, slot: B256, proofs: Vec) -> Self { + // TODO: actually this is storage leaf. slot == storage key + let storage_trie_leaf = keccak256(slot); ProcessedStorage { address, slot, - storage_key, + storage_key: storage_trie_leaf, proofs, } } diff --git a/crates/primitives/src/processed_types/transaction.rs b/crates/primitives/src/processed_types/transaction.rs index 2959ee27..7e309b8f 100644 --- a/crates/primitives/src/processed_types/transaction.rs +++ b/crates/primitives/src/processed_types/transaction.rs @@ -1,17 +1,18 @@ //! The transaction module contains the ProcessedTransaction struct and its conversion to ProcessedTransactionInFelts. use crate::utils::tx_index_to_tx_key; +use alloy::primitives::Bytes; use serde::{Deserialize, Serialize}; #[derive(Debug, Clone, PartialEq, Serialize, Deserialize, Eq, Hash)] pub struct ProcessedTransaction { pub key: String, pub block_number: u64, - pub proof: Vec, + pub proof: Vec, } impl ProcessedTransaction { - pub fn new(index: u64, block_number: u64, proof: Vec) -> Self { + pub fn new(index: u64, block_number: u64, proof: Vec) -> Self { let key = tx_index_to_tx_key(index); Self { key, diff --git a/crates/primitives/src/processed_types/uint256.rs b/crates/primitives/src/processed_types/uint256.rs index d691582d..3a3fac16 100644 --- a/crates/primitives/src/processed_types/uint256.rs +++ b/crates/primitives/src/processed_types/uint256.rs @@ -1,7 +1,7 @@ //! This module contains the `Uint256` type, which is a 256-bit unsigned integer. //! This is compatible with Cairo `uint256` type. -use alloy_primitives::{hex::FromHex, B256}; +use alloy::primitives::{hex::FromHex, B256}; use anyhow::Result; use serde::{Deserialize, Serialize}; use serde_with::serde_as; diff --git a/crates/primitives/src/serde.rs b/crates/primitives/src/serde.rs new file mode 100644 index 00000000..ead944b5 --- /dev/null +++ b/crates/primitives/src/serde.rs @@ -0,0 +1,36 @@ +use alloy::hex; +use serde::de::{self, Visitor}; +use serde::{Deserializer, Serializer}; +use std::fmt; + +pub fn serialize_hex(bytes: &Vec, serializer: S) -> Result +where + S: Serializer, +{ + let hex_string = hex::encode(bytes); + serializer.serialize_str(&hex_string) +} + +pub fn deserialize_hex<'de, D>(deserializer: D) -> Result, D::Error> +where + D: Deserializer<'de>, +{ + struct HexVisitor; + + impl<'de> Visitor<'de> for HexVisitor { + type Value = Vec; + + fn expecting(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + formatter.write_str("a hex-encoded string") + } + + fn visit_str(self, value: &str) -> Result, E> + where + E: de::Error, + { + hex::decode(value).map_err(de::Error::custom) + } + } + + deserializer.deserialize_str(HexVisitor) +} diff --git a/crates/primitives/src/task.rs b/crates/primitives/src/task.rs index 26faf0c5..fb2a343b 100644 --- a/crates/primitives/src/task.rs +++ b/crates/primitives/src/task.rs @@ -1,5 +1,7 @@ //! Task is a unit of work that can be executed by the processor/pre-processor. +use alloy::primitives::B256; + use crate::datalake::task::DatalakeCompute; use crate::module::Module; @@ -11,7 +13,7 @@ pub enum TaskEnvelope { } impl TaskEnvelope { - pub fn commit(&self) -> String { + pub fn commit(&self) -> B256 { match self { TaskEnvelope::DatalakeCompute(task) => task.commit(), TaskEnvelope::Module(module) => module.commit(), diff --git a/crates/primitives/src/utils.rs b/crates/primitives/src/utils.rs index baef2ece..fe2eddbd 100644 --- a/crates/primitives/src/utils.rs +++ b/crates/primitives/src/utils.rs @@ -1,5 +1,5 @@ -use alloy_primitives::hex::{self}; -use alloy_primitives::{FixedBytes, U256}; +use alloy::primitives::hex::{self}; +use alloy::primitives::{FixedBytes, U256}; use anyhow::Result; /// Convert a `FixedBytes<32>` which originally encoded from utf8 string into original utf8 string value @@ -47,7 +47,7 @@ pub fn tx_index_to_tx_key(tx_index: u64) -> String { #[cfg(test)] mod tests { use super::*; - use alloy_primitives::{hex::FromHex, FixedBytes}; + use alloy::primitives::{hex::FromHex, FixedBytes}; #[test] fn test_bytes32_to_str() { diff --git a/crates/provider/Cargo.toml b/crates/provider/Cargo.toml index 01cc323c..e9398f81 100644 --- a/crates/provider/Cargo.toml +++ b/crates/provider/Cargo.toml @@ -10,10 +10,22 @@ version.workspace = true anyhow.workspace = true reqwest.workspace = true serde_json.workspace = true -serde.workspace = true hdp-primitives.workspace = true -alloy-primitives.workspace = true tokio.workspace = true tracing.workspace = true eth-trie-proofs.workspace = true futures.workspace = true +thiserror.workspace = true +alloy.workspace = true +itertools.workspace = true + +[dev-dependencies] +criterion = { version = "0.4", features = [ + "async", + "async_futures", + "html_reports", +] } + +[[bench]] +name = "provider_benchmark" +harness = false diff --git a/crates/provider/README.md b/crates/provider/README.md new file mode 100644 index 00000000..f1ca34d3 --- /dev/null +++ b/crates/provider/README.md @@ -0,0 +1,15 @@ +# Provider + +## `EvmProvider` + +For datalake compiler, `EvmProvider` need to fetch + +- Headers and MMR: large range of block header, MMR proof from Herodotus Indexer. +- Accounts and Account Proofs: large range of account data, and it's MPT proof from `eth_getProof`. +- Storages and Storage Proofs: large range of storage data, and it's MPT proof from `eth_getProof`. +- Tx and Tx Proofs: for specific block number, fetch large indexes of tx and it's MPT proof +- Receipt and Receipt Proofs: for specific block number, fetch large indexes of receipt and it's MPT proof + +For module compiler, `EvmProvider` need to fetch + +All the things above from the key as entry point. diff --git a/crates/provider/benches/README.md b/crates/provider/benches/README.md new file mode 100644 index 00000000..a5f07c28 --- /dev/null +++ b/crates/provider/benches/README.md @@ -0,0 +1,21 @@ +# `EvmProvider` Benchmark + +## Hardware Specifications + +- **Processor**: Apple M2 +- **Memory**: 32 GB +- **Operating System**: macOS + +## RPC Specifications + +- used Alchemy non-paid plan rpc url + +## Benchmark Results + +| Benchmark | Time (ms) | Iterations | Notes | +| ------------------------------------ | --------------- | ---------- | -------------- | +| get_10_header_proofs | 1667.7 - 1720.4 | 10 | Block Range 10 | +| get_10_account_proofs | 343.19 - 403.63 | 10 | Block Range 10 | +| get_10_storage_proofs | 331.28 - 385.67 | 10 | Block Range 10 | +| get_tx_with_proof_from_block | 458.63 - 552.80 | 10 | -- | +| get_tx_receipt_with_proof_from_block | 2090.4 - 2692.8 | 10 | -- | diff --git a/crates/provider/benches/provider_benchmark.rs b/crates/provider/benches/provider_benchmark.rs new file mode 100644 index 00000000..1e53a4ad --- /dev/null +++ b/crates/provider/benches/provider_benchmark.rs @@ -0,0 +1,101 @@ +use alloy::primitives::{address, B256}; +use criterion::{criterion_group, criterion_main, Bencher, Criterion}; +use hdp_provider::evm::provider::EvmProvider; // Adjust this import path according to your project structure +use reqwest::Url; +use tokio::runtime::Runtime; + +// Note: this is non-paid alchemy rpc url +const SEPOLIA_RPC_URL: &str = + "https://eth-sepolia.g.alchemy.com/v2/xar76cftwEtqTBWdF4ZFy9n8FLHAETDv"; + +fn benchmark_header(b: &mut Bencher) { + let provider = EvmProvider::new_with_url(Url::parse(SEPOLIA_RPC_URL).unwrap(), 11155111); + let rt = Runtime::new().unwrap(); + + b.iter(|| { + rt.block_on(async { + provider + .get_range_of_header_proofs(6127485, 6127485 + 10, 1) + .await + .unwrap(); + }); + }); +} + +fn benchmark_account(b: &mut Bencher) { + let provider = EvmProvider::new_with_url(Url::parse(SEPOLIA_RPC_URL).unwrap(), 11155111); + let target_address = address!("7f2c6f930306d3aa736b3a6c6a98f512f74036d4"); + let rt = Runtime::new().unwrap(); + + b.iter(|| { + rt.block_on(async { + provider + .get_range_of_account_proofs(6127485, 6127485 + 10, 1, target_address) + .await + .unwrap(); + }); + }); +} + +fn benchmark_storage(b: &mut Bencher) { + let provider = EvmProvider::new_with_url(Url::parse(SEPOLIA_RPC_URL).unwrap(), 11155111); + let target_address = address!("75CeC1db9dCeb703200EAa6595f66885C962B920"); + let storage_key = B256::ZERO; + let rt = Runtime::new().unwrap(); + + b.iter(|| { + rt.block_on(async { + provider + .get_range_of_storage_proofs(6127485, 6127485 + 10, 1, target_address, storage_key) + .await + .unwrap(); + }); + }); +} + +fn benchmark_transaction(b: &mut Bencher) { + let provider = EvmProvider::new_with_url(Url::parse(SEPOLIA_RPC_URL).unwrap(), 11155111); + let rt = Runtime::new().unwrap(); + + b.iter(|| { + rt.block_on(async { + provider + .get_tx_with_proof_from_block(6127485, 0, 23, 1) + .await + .unwrap(); + }); + }); +} + +fn benchmark_transaction_receipt(b: &mut Bencher) { + let provider = EvmProvider::new_with_url(Url::parse(SEPOLIA_RPC_URL).unwrap(), 11155111); + let rt = Runtime::new().unwrap(); + + b.iter(|| { + rt.block_on(async { + provider + .get_tx_receipt_with_proof_from_block(6127485, 0, 23, 1) + .await + .unwrap(); + }); + }); +} + +fn criterion_benchmark(c: &mut Criterion) { + c.bench_function("get_10_header_proofs", benchmark_header); + c.bench_function("get_10_account_proofs", benchmark_account); + c.bench_function("get_10_storage_proofs", benchmark_storage); + c.bench_function("get_tx_with_proof_from_block", benchmark_transaction); + c.bench_function( + "get_tx_receipt_with_proof_from_block", + benchmark_transaction_receipt, + ); +} + +criterion_group! { + name = benches; + config = Criterion::default().sample_size(10).measurement_time(std::time::Duration::new(10, 0)); + targets = criterion_benchmark +} + +criterion_main!(benches); diff --git a/crates/provider/src/evm/mod.rs b/crates/provider/src/evm/mod.rs index 7508e9cf..4ba06f17 100644 --- a/crates/provider/src/evm/mod.rs +++ b/crates/provider/src/evm/mod.rs @@ -1,616 +1,2 @@ -use alloy_primitives::Bytes; -use anyhow::Result; -use eth_trie_proofs::{tx_receipt_trie::TxReceiptsMptHandler, tx_trie::TxsMptHandler}; -use rpc_provider::{ - FetchedAccountProof, FetchedStorageAccountProof, FetchedTransactionProof, - FetchedTransactionReceiptProof, HeaderProvider, TrieProofProvider, -}; -use serde::Serialize; -use std::{ - collections::{HashMap, HashSet}, - time::Instant, - vec, -}; - -use tokio::sync::mpsc; -use tracing::{error, info}; - -use hdp_primitives::processed_types::{ - account::ProcessedAccount, header::ProcessedHeader, mmr::MMRMeta, mpt::ProcessedMPTProof, - receipt::ProcessedReceipt, storage::ProcessedStorage, transaction::ProcessedTransaction, -}; - -use crate::key::{ - AccountProviderKey, FetchKeyEnvelope, HeaderProviderKey, StorageProviderKey, TxProviderKey, - TxReceiptProviderKey, -}; - -pub type RlpEncodedValue = String; -pub type MPTProof = Vec; -/// `StoredHeader` is a tuple of RLP encoded header and MMR proof and element_index and mmr_id. -pub type StoredHeader = (RlpEncodedValue, MPTProof, u64, u64); -/// `StoredHeader` is a map of block number to a tuple of RLP encoded header and MMR proof and element_index and mmr_id. -pub type StoredHeaders = HashMap; - -pub mod rpc_provider; - -// For more information swagger doc: https://rs-indexer.api.herodotus.cloud/swagger -const HERODOTUS_RS_INDEXER_URL: &str = "https://rs-indexer.api.herodotus.cloud/accumulators"; - -/// [`AbstractProvider`] abstracts the fetching of data from the RPC and memory. -/// It uses a [`InMemoryProvider`] and a [`RpcProvider`] to fetch data. -/// -/// but handle requests so that it would not make duplicate requests -pub struct AbstractProvider { - /// Fetch data from the RPC - trie_proof_provider: TrieProofProvider, - /// Fetch block headers and MMR data from the Herodotus indexer. - header_provider: HeaderProvider, -} - -#[derive(Clone)] -pub struct AbstractProviderConfig { - pub rpc_url: &'static str, - pub chain_id: u64, - pub rpc_chunk_size: u64, -} - -/// Provider should fetch all the proofs and rlp values from given keys. -#[derive(Serialize, Debug)] -pub struct ProcessedBlockProofs { - pub mmr_meta: MMRMeta, - pub headers: Vec, - pub accounts: Vec, - pub storages: Vec, - pub transactions: Vec, - pub transaction_receipts: Vec, -} - -impl AbstractProvider { - pub fn new(config: AbstractProviderConfig) -> Self { - Self { - trie_proof_provider: TrieProofProvider::new(config.rpc_url, config.rpc_chunk_size), - header_provider: HeaderProvider::new(HERODOTUS_RS_INDEXER_URL, config.chain_id), - } - } - - /// This is the public entry point of provider. - pub async fn fetch_proofs_from_keys( - &self, - fetch_keys: HashSet, - ) -> Result { - // categorize fetch keys - let mut target_keys_for_header = vec![]; - let mut target_keys_for_account = vec![]; - let mut target_keys_for_storage = vec![]; - let mut target_keys_for_tx = vec![]; - let mut target_keys_for_tx_receipt = vec![]; - for key in fetch_keys { - match key { - FetchKeyEnvelope::Header(header_key) => { - target_keys_for_header.push(header_key); - } - FetchKeyEnvelope::Account(account_key) => { - target_keys_for_account.push(account_key); - } - FetchKeyEnvelope::Storage(storage_key) => { - target_keys_for_storage.push(storage_key); - } - FetchKeyEnvelope::Tx(tx_key) => { - target_keys_for_tx.push(tx_key); - } - FetchKeyEnvelope::TxReceipt(tx_receipt_key) => { - target_keys_for_tx_receipt.push(tx_receipt_key); - } - } - } - - // fetch proofs using keys and construct result - let (headers, mmr_meta) = self - .fetch_headers_from_keys(&target_keys_for_header) - .await?; - let accounts = self - .get_accounts_from_keys(&target_keys_for_account) - .await?; - let storages = self - .get_storages_from_keys(&target_keys_for_storage) - .await?; - let transactions = self.get_txs_from_keys(&target_keys_for_tx).await?; - let transaction_receipts = self - .get_tx_receipts_from_keys(&target_keys_for_tx_receipt) - .await?; - - Ok(ProcessedBlockProofs { - mmr_meta, - headers, - accounts, - storages, - transactions, - transaction_receipts, - }) - } - - pub async fn fetch_headers_from_keys( - &self, - keys: &[HeaderProviderKey], - ) -> Result<(Vec, MMRMeta)> { - let mut result_headers: Vec = vec![]; - let start_fetch = Instant::now(); - - let start_block = keys.iter().map(|x| x.block_number).min().unwrap(); - let end_block = keys.iter().map(|x| x.block_number).max().unwrap(); - - let mmr_data = self - .header_provider - .get_sequencial_headers_and_mmr_from_indexer(start_block, end_block) - .await; - - match mmr_data { - Ok(mmr) => { - info!("Successfully fetched MMR data from indexer"); - let duration = start_fetch.elapsed(); - info!("Time taken (fetch from Indexer): {:?}", duration); - for block_proof in &mmr.1 { - result_headers.push(ProcessedHeader::new( - block_proof.1.rlp_block_header.value.clone(), - block_proof.1.element_index, - block_proof.1.siblings_hashes.clone(), - )); - } - Ok((result_headers, mmr.0.into())) - } - Err(e) => { - let duration = start_fetch.elapsed(); - info!("Time taken (during from Indexer): {:?}", duration); - error!( - "Something went wrong while fetching MMR data from indexer: {}", - e - ); - Err(e) - } - } - } - - pub async fn get_accounts_from_keys( - &self, - keys: &[AccountProviderKey], - ) -> Result> { - let start_fetch = Instant::now(); - - // group by address - let mut address_to_block_range: HashMap> = HashMap::new(); - for key in keys { - let block_range = address_to_block_range - .entry(key.address.to_string()) - .or_default(); - block_range.push(key.block_number); - } - // loop through each address and fetch accounts - let mut accounts = vec![]; - for (address, block_range) in address_to_block_range { - let (rpc_sender, mut rx) = mpsc::channel::(32); - - self.trie_proof_provider - .get_account_proofs(rpc_sender, block_range, &address) - .await; - - let mut account_proofs: Vec = vec![]; - - while let Some(proof) = rx.recv().await { - account_proofs.push(ProcessedMPTProof::new( - proof.block_number, - proof.account_proof, - )); - } - accounts.push(ProcessedAccount::new(address, account_proofs)); - } - let duration = start_fetch.elapsed(); - info!("Time taken (Account Fetch): {:?}", duration); - - Ok(accounts) - } - - pub async fn get_storages_from_keys( - &self, - keys: &[StorageProviderKey], - ) -> Result> { - let start_fetch = Instant::now(); - - // group by address and slot - let mut address_slot_to_block_range: HashMap<(String, String), Vec> = HashMap::new(); - for key in keys { - let block_range = address_slot_to_block_range - .entry((key.address.to_string(), key.key.to_string())) - .or_default(); - block_range.push(key.block_number); - } - // loop through each address and fetch storages - let mut storages = vec![]; - for ((address, slot), block_range) in address_slot_to_block_range { - let (rpc_sender, mut rx) = mpsc::channel::(32); - - self.trie_proof_provider - .get_storage_proofs(rpc_sender, block_range, &address, slot.clone()) - .await; - - let mut storage_proofs: Vec = vec![]; - - while let Some(proof) = rx.recv().await { - storage_proofs.push(ProcessedMPTProof::new( - proof.block_number, - proof.storage_proof, - )); - } - storages.push(ProcessedStorage::new(address, slot, storage_proofs)); - } - let duration = start_fetch.elapsed(); - info!("Time taken (Storage Fetch): {:?}", duration); - - Ok(storages) - } - - pub async fn get_txs_from_keys( - &self, - keys: &[TxProviderKey], - ) -> Result> { - let start_fetch = Instant::now(); - // group by block number - let mut block_to_tx_range: HashMap> = HashMap::new(); - for key in keys { - let tx_range = block_to_tx_range.entry(key.block_number).or_default(); - tx_range.push(key.tx_index); - } - - let mut transactions = vec![]; - for (block_number, tx_range) in block_to_tx_range { - let mut txs_mpt_handler = TxsMptHandler::new(self.trie_proof_provider.url).unwrap(); - txs_mpt_handler - .build_tx_tree_from_block(block_number) - .await - .unwrap(); - // let txs = txs_mpt_handler.get_elements().unwrap(); - - for tx_index in tx_range { - let proof = txs_mpt_handler - .get_proof(tx_index) - .unwrap() - .into_iter() - .map(|x| Bytes::from(x).to_string()) - .collect::>(); - // let consensus_tx = txs[tx_index as usize].clone(); - // let rlp = Bytes::from(consensus_tx.rlp_encode()).to_string(); - transactions.push(ProcessedTransaction::new(tx_index, block_number, proof)); - } - } - let duration = start_fetch.elapsed(); - info!("Time taken (Transaction Fetch): {:?}", duration); - Ok(transactions) - } - - pub async fn get_tx_receipts_from_keys( - &self, - keys: &[TxReceiptProviderKey], - ) -> Result> { - let start_fetch = Instant::now(); - // group by block number - let mut block_to_tx_receipt_range: HashMap> = HashMap::new(); - for key in keys { - let tx_receipt_range = block_to_tx_receipt_range - .entry(key.block_number) - .or_default(); - tx_receipt_range.push(key.tx_index); - } - - let mut transaction_receipts = vec![]; - for (block_number, tx_receipt_range) in block_to_tx_receipt_range { - let mut tx_reciepts_mpt_handler = - TxReceiptsMptHandler::new(self.trie_proof_provider.url).unwrap(); - - tx_reciepts_mpt_handler - .build_tx_receipts_tree_from_block(block_number) - .await - .unwrap(); - //let tx_receipts = tx_reciepts_mpt_handler.get_elements().unwrap(); - - for tx_receipt_index in tx_receipt_range { - let proof = tx_reciepts_mpt_handler - .get_proof(tx_receipt_index) - .unwrap() - .into_iter() - .map(|x| Bytes::from(x).to_string()) - .collect::>(); - // let consensus_tx_receipt = tx_receipts[tx_receipt_index as usize].clone(); - // let rlp = Bytes::from(consensus_tx_receipt.rlp_encode()).to_string(); - transaction_receipts.push(ProcessedReceipt::new( - tx_receipt_index, - block_number, - proof, - )); - } - } - - let duration = start_fetch.elapsed(); - info!("Time taken (Transaction Receipt Fetch): {:?}", duration); - Ok(transaction_receipts) - } - - // TODO: wip - pub async fn get_sequencial_full_header_with_proof( - &self, - start_block: u64, - end_block: u64, - ) -> Result<(StoredHeaders, MMRMeta)> { - //? A map of block numbers to a boolean indicating whether the block was fetched. - let mut blocks_map: HashMap = HashMap::new(); - - // Fetch MMR data and header data from Herodotus indexer - let start_fetch = Instant::now(); - - let mmr_data = self - .header_provider - .get_sequencial_headers_and_mmr_from_indexer(start_block, end_block) - .await; - - match mmr_data { - Ok(mmr) => { - info!("Successfully fetched MMR data from indexer"); - let duration = start_fetch.elapsed(); - info!("Time taken (fetch from Indexer): {:?}", duration); - for block_proof in &mmr.1 { - blocks_map.insert( - *block_proof.0, - ( - block_proof.1.rlp_block_header.value.clone(), - block_proof.1.siblings_hashes.clone(), - block_proof.1.element_index, - mmr.0.mmr_id, - ), - ); - } - Ok((blocks_map, mmr.0.into())) - } - Err(e) => { - let duration = start_fetch.elapsed(); - info!("Time taken (during from Indexer): {:?}", duration); - error!( - "Something went wrong while fetching MMR data from indexer: {}", - e - ); - Err(e) - } - } - } - - // // Unoptimized version of get_rlp_header, just for testing purposes - // pub async fn get_rlp_header(&mut self, block_number: u64) -> RlpEncodedValue { - // match self.memory.get_rlp_header(block_number) { - // Some(header) => header, - // None => { - // let header_rpc = self - // .trie_proof_provider - // .get_block_by_number(block_number) - // .await - // .unwrap(); - // let block_header = HeaderPrimitive::from(&header_rpc); - // let rlp_encoded = block_header.rlp_encode(); - // self.memory.set_header(block_number, rlp_encoded.clone()); - - // rlp_encoded - // } - // } - // } - - // Get account with proof in given range of blocks - // This need to be used for block sampled datalake - pub async fn get_range_account_with_proof( - &mut self, - block_range_start: u64, - block_range_end: u64, - increment: u64, - address: String, - ) -> Result> { - let start_fetch = Instant::now(); - - let target_block_range: Vec = (block_range_start..=block_range_end) - .step_by(increment as usize) - .collect(); - - let (rpc_sender, mut rx) = mpsc::channel::(32); - - self.trie_proof_provider - .get_account_proofs(rpc_sender, target_block_range, &address) - .await; - - let mut result = HashMap::new(); - - while let Some(proof) = rx.recv().await { - result.insert(proof.block_number, proof); - } - - let duration = start_fetch.elapsed(); - info!("Time taken (Account Fetch): {:?}", duration); - - Ok(result) - } - - // Get storage with proof in given range of blocks - // This need to be used for block sampled datalake - pub async fn get_range_storage_with_proof( - &mut self, - block_range_start: u64, - block_range_end: u64, - increment: u64, - address: String, - slot: String, - ) -> Result> { - let start_fetch = Instant::now(); - //? A map of block numbers to a boolean indicating whether the block was fetched. - let target_block_range: Vec = (block_range_start..=block_range_end) - .step_by(increment as usize) - .collect(); - - let (rpc_sender, mut rx) = mpsc::channel::(32); - self.trie_proof_provider - .get_storage_proofs(rpc_sender, target_block_range, &address, slot) - .await; - - let mut result = HashMap::new(); - - while let Some(proof) = rx.recv().await { - result.insert(proof.block_number, proof); - } - let duration = start_fetch.elapsed(); - info!("Time taken (Storage Fetch): {:?}", duration); - - Ok(result) - } - - /// Fetches the encoded transaction with proof from the MPT trie for the given block number. - /// The transaction is fetched from the MPT trie and the proof is generated from the MPT trie. - pub async fn get_tx_with_proof_from_block( - &self, - target_block: u64, - start_index: u64, - end_index: u64, - incremental: u64, - ) -> Result> { - let mut tx_with_proof = vec![]; - let mut txs_mpt_handler = TxsMptHandler::new(self.trie_proof_provider.url).unwrap(); - txs_mpt_handler - .build_tx_tree_from_block(target_block) - .await - .unwrap(); - let txs = txs_mpt_handler.get_elements().unwrap(); - - let target_tx_index_range = (start_index..end_index).step_by(incremental as usize); - for tx_index in target_tx_index_range { - let proof = txs_mpt_handler - .get_proof(tx_index) - .unwrap() - .into_iter() - .map(|x| Bytes::from(x).to_string()) - .collect::>(); - let consensus_tx = txs[tx_index as usize].clone(); - let rlp = Bytes::from(consensus_tx.rlp_encode()).to_string(); - let tx_type = consensus_tx.0.tx_type() as u8; - let fetched_result = FetchedTransactionProof { - block_number: target_block, - tx_index, - encoded_transaction: rlp, - transaction_proof: proof, - tx_type, - }; - tx_with_proof.push(fetched_result); - } - - Ok(tx_with_proof) - } - - /// Fetches the encoded transaction receipt with proof from the MPT trie for the given block number. - /// The transaction receipt is fetched from the MPT trie and the proof is generated from the MPT trie. - pub async fn get_tx_receipt_with_proof_from_block( - &self, - target_block: u64, - start_index: u64, - end_index: u64, - incremental: u64, - ) -> Result> { - let mut tx_receipt_with_proof = vec![]; - let mut tx_reciepts_mpt_handler = - TxReceiptsMptHandler::new(self.trie_proof_provider.url).unwrap(); - - tx_reciepts_mpt_handler - .build_tx_receipts_tree_from_block(target_block) - .await - .unwrap(); - let tx_receipts = tx_reciepts_mpt_handler.get_elements().unwrap(); - let target_tx_receipt_index_range = (start_index..end_index).step_by(incremental as usize); - - for tx_receipt_index in target_tx_receipt_index_range { - let proof = tx_reciepts_mpt_handler - .get_proof(tx_receipt_index) - .unwrap() - .into_iter() - .map(|x| Bytes::from(x).to_string()) - .collect::>(); - let consensus_tx_receipt = tx_receipts[tx_receipt_index as usize].clone(); - let rlp = Bytes::from(consensus_tx_receipt.rlp_encode()).to_string(); - let tx_receipt_type = consensus_tx_receipt.0.tx_type() as u8; - tx_receipt_with_proof.push(FetchedTransactionReceiptProof { - block_number: target_block, - tx_index: tx_receipt_index, - encoded_receipt: rlp, - receipt_proof: proof, - tx_type: tx_receipt_type, - }); - } - - Ok(tx_receipt_with_proof) - } -} - -#[cfg(test)] -mod tests { - use super::*; - //use alloy_primitives::{hex, keccak256}; - - // fn rlp_string_to_block_hash(rlp_string: &str) -> String { - // keccak256(hex::decode(rlp_string).unwrap()).to_string() - // } - - // Non-paid personal alchemy endpoint - const SEPOLIA_RPC_URL: &str = - "https://eth-sepolia.g.alchemy.com/v2/xar76cftwEtqTBWdF4ZFy9n8FLHAETDv"; - - // #[tokio::test] - // async fn test_provider_get_rlp_header() { - // let config = AbstractProviderConfig { - // rpc_url: SEPOLIA_RPC_URL, - // chain_id: 11155111, - // rpc_chunk_size: 40, - // }; - // let mut provider = AbstractProvider::new(config); - // let rlp_header = provider.get_rlp_header(0).await; - // let block_hash = rlp_string_to_block_hash(&rlp_header); - // assert_eq!( - // block_hash, - // "0x25a5cc106eea7138acab33231d7160d69cb777ee0c2c553fcddf5138993e6dd9" - // ); - // let rlp_header = provider.get_rlp_header(5521772).await; - // let block_hash = rlp_string_to_block_hash(&rlp_header); - // assert_eq!( - // block_hash, - // "0xe72515bc74912f67912a64a458e6f2cd2742f8dfe0666e985749483dab0b7b9a" - // ); - // let rlp_header = provider.get_rlp_header(487680).await; - // let block_hash = rlp_string_to_block_hash(&rlp_header); - // assert_eq!( - // block_hash, - // "0xf494127d30817d04b634eae9f6139d8155ee4c78ba60a35bd7be187378e93d6e" - // ); - // } - - #[tokio::test] - async fn get_block_range_from_nonce_range_non_constant() { - let config = AbstractProviderConfig { - rpc_url: SEPOLIA_RPC_URL, - chain_id: 11155111, - rpc_chunk_size: 40, - }; - let provider = AbstractProvider::new(config); - let block_range = provider - .get_tx_with_proof_from_block(5530433, 10, 100, 1) - .await - .unwrap(); - - assert_eq!(block_range.len(), 90); - assert_eq!(block_range[0].encoded_transaction,"0xf873830beeeb84faa6fd50830148209447b854ad2ddb01cfee0b07f4e2da0ac50277b1168806f05b59d3b20000808401546d72a06af2b103dfb7bccc757d575bc11c38f2ecd1a22ca2fcf95a602119582c607927a047329735997e3357dfd7d63eda024d35f7012855aa12ba210f9ed311a517b5e6"); - - let block_range = provider - .get_tx_with_proof_from_block(5530433, 10, 100, 3) - .await - .unwrap(); - - assert_eq!(block_range.len(), 30); - } -} +pub mod provider; +pub mod rpc; diff --git a/crates/provider/src/evm/provider.rs b/crates/provider/src/evm/provider.rs new file mode 100644 index 00000000..ffcde5dd --- /dev/null +++ b/crates/provider/src/evm/provider.rs @@ -0,0 +1,582 @@ +use alloy::{ + primitives::{Address, BlockNumber, Bytes, ChainId, StorageKey, TxIndex}, + rpc::types::EIP1186AccountProofResponse, + transports::{RpcError, TransportErrorKind}, +}; +use eth_trie_proofs::{ + tx_receipt_trie::TxReceiptsMptHandler, tx_trie::TxsMptHandler, EthTrieError, +}; +use hdp_primitives::{ + block::header::{MMRMetaFromNewIndexer, MMRProofFromNewIndexer}, + processed_types::block_proofs::ProcessedBlockProofs, +}; +use itertools::Itertools; +use reqwest::Url; +use std::{ + collections::{HashMap, HashSet}, + time::Instant, +}; +use thiserror::Error; +use tracing::info; + +use crate::{ + indexer::{Indexer, IndexerError}, + key::FetchKeyEnvelope, + types::{FetchedTransactionProof, FetchedTransactionReceiptProof}, +}; + +use super::rpc::{RpcProvider, RpcProviderError}; + +/// This is optimal max number of requests to send in parallel when using non-paid alchemy rpc url +const DEFAULT_MAX_REQUESTS: u64 = 100; + +/// Error from [`EvmProvider`] +#[derive(Error, Debug)] +pub enum ProviderError { + /// Error when the query is invalid + #[error("Out of bound: requested index: {0}, length: {1}")] + OutOfBoundRequestError(u64, u64), + + /// Error when the MMR meta is mismatched among range of requested blocks + #[error("MMR meta mismatch among range of requested blocks")] + MismatchedMMRMeta, + + /// Error from the [`Indexer`] + #[error("Failed from indexer")] + IndexerError(#[from] IndexerError), + + /// Error from [`RpcProvider`] + #[error("Failed to get proofs: {0}")] + RpcProviderError(#[from] RpcProviderError), + + /// Error from [`eth_trie_proofs`] + #[error("EthTrieError: {0}")] + EthTrieError(#[from] eth_trie_proofs::EthTrieError), +} + +/// EVM provider +/// +/// This provider is responsible for fetching proofs from the EVM chain. +/// It uses the RPC provider to fetch proofs from the EVM chain and the indexer to fetch +/// header proofs +/// +/// Run benchmark [here](../benchmark/provider_benchmark.rs) +#[derive(Clone)] +pub struct EvmProvider { + /// Account and storage trie provider + rpc_provider: super::rpc::RpcProvider, + /// Header provider + header_provider: Indexer, + /// transaction url + tx_provider_url: Url, +} + +/// EVM provider configuration +pub struct EvmProviderConfig { + /// RPC url + pub rpc_url: Url, + /// Chain id + pub chain_id: u64, + /// Max number of requests to send in parallel + /// + /// For default, it is set to 100 + /// For archive node, recommend to set it to 1000 + /// This will effect fetch speed of account, storage proofs + pub max_requests: u64, +} + +impl EvmProvider { + pub fn new(config: EvmProviderConfig) -> Self { + let rpc_provider = RpcProvider::new(config.rpc_url.clone(), config.max_requests); + let header_provider = Indexer::new(config.chain_id); + + Self { + rpc_provider, + header_provider, + tx_provider_url: config.rpc_url, + } + } + + pub fn new_with_url(url: Url, chain_id: ChainId) -> Self { + let rpc_provider = RpcProvider::new(url.clone(), DEFAULT_MAX_REQUESTS); + let header_provider = Indexer::new(chain_id); + + Self { + rpc_provider, + header_provider, + tx_provider_url: url, + } + } + + #[allow(unused)] + // TODO: not implemented yet for sync with module compiler + pub async fn fetch_proofs_from_keys( + &self, + fetch_keys: HashSet, + ) -> Result { + todo!("Implement fetch_proofs_from_keys") + } + + /// Fetches the header proofs for the given block range. + /// The header proofs are fetched from the indexer and the MMR meta is fetched from the indexer. + /// + /// Return: + /// - MMR meta + /// - Header proofs mapped by block number + pub async fn get_range_of_header_proofs( + &self, + from_block: BlockNumber, + to_block: BlockNumber, + increment: u64, + ) -> Result< + ( + MMRMetaFromNewIndexer, + HashMap, + ), + ProviderError, + > { + let start_fetch = Instant::now(); + + let target_blocks_batch: Vec> = + self._chunk_block_range(from_block, to_block, increment); + + let mut fetched_headers_proofs_with_blocks_map = HashMap::new(); + let mut mmr = None; + + for target_blocks in target_blocks_batch { + let (start_block, end_block) = + (target_blocks[0], target_blocks[target_blocks.len() - 1]); + + let indexer_response = self + .header_provider + .get_headers_proof(start_block, end_block) + .await?; + + // validate MMR among range of blocks + match mmr { + None => { + mmr = Some(indexer_response.mmr_meta); + } + Some(ref existing_mmr) if existing_mmr != &indexer_response.mmr_meta => { + return Err(ProviderError::MismatchedMMRMeta); + } + _ => {} + } + fetched_headers_proofs_with_blocks_map.extend(indexer_response.headers); + } + + let duration = start_fetch.elapsed(); + info!("Time taken (Headers Proofs Fetch): {:?}", duration); + + Ok((mmr.unwrap(), fetched_headers_proofs_with_blocks_map)) + } + + /// Fetches the account proofs for the given block range. + /// The account proofs are fetched from the RPC provider. + /// + /// Return: + /// - Account proofs mapped by block number + pub async fn get_range_of_account_proofs( + &self, + from_block: BlockNumber, + to_block: BlockNumber, + increment: u64, + address: Address, + ) -> Result, ProviderError> { + let start_fetch = Instant::now(); + + let target_blocks_batch: Vec> = + self._chunk_block_range(from_block, to_block, increment); + + let mut fetched_accounts_proofs_with_blocks_map = HashMap::new(); + for target_blocks in target_blocks_batch { + fetched_accounts_proofs_with_blocks_map.extend( + self.rpc_provider + .get_account_proofs(target_blocks, address) + .await?, + ); + } + + let duration = start_fetch.elapsed(); + info!("Time taken (Account Proofs Fetch): {:?}", duration); + + Ok(fetched_accounts_proofs_with_blocks_map) + } + + /// Chunks the block range into smaller ranges of 800 blocks. + /// This is to avoid fetching too many blocks at once from the RPC provider. + fn _chunk_block_range( + &self, + from_block: BlockNumber, + to_block: BlockNumber, + increment: u64, + ) -> Vec> { + (from_block..=to_block) + .step_by(increment as usize) + .chunks(800) + .into_iter() + .map(|chunk| chunk.collect()) + .collect() + } + + /// Fetches the storage proofs for the given block range. + /// The storage proofs are fetched from the RPC provider. + /// + /// Return: + /// - Storage proofs mapped by block number + pub async fn get_range_of_storage_proofs( + &self, + from_block: BlockNumber, + to_block: BlockNumber, + increment: u64, + address: Address, + storage_slot: StorageKey, + ) -> Result, ProviderError> { + let start_fetch = Instant::now(); + + let target_blocks_batch: Vec> = + self._chunk_block_range(from_block, to_block, increment); + + let mut processed_accounts = HashMap::new(); + for target_blocks in target_blocks_batch { + processed_accounts.extend( + self.rpc_provider + .get_storage_proofs(target_blocks, address, storage_slot) + .await?, + ); + } + + let duration = start_fetch.elapsed(); + info!("Time taken (Storage Proofs Fetch): {:?}", duration); + + Ok(processed_accounts) + } + + /// Fetches the encoded transaction with proof from the MPT trie for the given block number. + /// The transaction is fetched from the MPT trie and the proof is generated from the MPT trie. + /// + /// Return: + /// - Transaction proofs mapped by block number + pub async fn get_tx_with_proof_from_block( + &self, + target_block: BlockNumber, + start_index: TxIndex, + end_index: TxIndex, + incremental: u64, + ) -> Result, ProviderError> { + let start_fetch = Instant::now(); + + let mut fetched_transaction_proofs = vec![]; + let mut tx_trie_provider = TxsMptHandler::new(self.tx_provider_url.clone()).unwrap(); + + loop { + let trie_response = tx_trie_provider + .build_tx_tree_from_block(target_block) + .await; + + match trie_response { + Ok(_) => break, + Err(EthTrieError::RPC(RpcError::Transport(TransportErrorKind::HttpError( + http_error, + )))) if http_error.status == 429 => { + // retry if 429 error + tokio::time::sleep(tokio::time::Duration::from_secs(1)).await; + continue; + } + _ => return Err(ProviderError::EthTrieError(trie_response.err().unwrap())), + } + } + + let fetched_transactions = tx_trie_provider.get_elements()?; + let tx_length = fetched_transactions.len() as u64; + let target_tx_index_range = (start_index..end_index).step_by(incremental as usize); + for tx_index in target_tx_index_range { + // validate out of bound request + if tx_index >= tx_length { + return Err(ProviderError::OutOfBoundRequestError(tx_index, tx_length)); + } + + let tx_trie_proof = tx_trie_provider + .get_proof(tx_index) + .unwrap() + .into_iter() + .map(Bytes::from) + .collect::>(); + + let consensus_tx = fetched_transactions[tx_index as usize].clone(); + fetched_transaction_proofs.push(FetchedTransactionProof::new( + target_block, + tx_index, + consensus_tx.rlp_encode(), + tx_trie_proof, + consensus_tx.0.tx_type(), + )); + } + + let duration = start_fetch.elapsed(); + info!("Time taken (Transactions Proofs Fetch): {:?}", duration); + + Ok(fetched_transaction_proofs) + } + + /// Fetches the transaction receipts with proof from the MPT trie for the given block number. + /// The transaction receipts are fetched from the MPT trie and the proof is generated from the MPT trie. + /// + /// Return: + /// - Transaction receipts proofs mapped by block number + pub async fn get_tx_receipt_with_proof_from_block( + &self, + target_block: BlockNumber, + start_index: TxIndex, + end_index: TxIndex, + incremental: u64, + ) -> Result, ProviderError> { + let start_fetch = Instant::now(); + + let mut fetched_transaction_receipts_proofs = vec![]; + let mut tx_receipt_trie_provider = TxReceiptsMptHandler::new(self.tx_provider_url.clone())?; + + loop { + let trie_response = tx_receipt_trie_provider + .build_tx_receipts_tree_from_block(target_block) + .await; + + match trie_response { + Ok(_) => break, + Err(EthTrieError::RPC(RpcError::Transport(TransportErrorKind::HttpError( + http_error, + )))) if http_error.status == 429 => { + // retry if 429 error + tokio::time::sleep(tokio::time::Duration::from_secs(1)).await; + continue; + } + _ => return Err(ProviderError::EthTrieError(trie_response.err().unwrap())), + } + } + + let fetched_transaction_receipts = tx_receipt_trie_provider.get_elements()?; + let tx_receipt_length = fetched_transaction_receipts.len() as u64; + let target_tx_index_range = (start_index..end_index).step_by(incremental as usize); + for tx_index in target_tx_index_range { + // validate out of bound request + if tx_index >= tx_receipt_length { + return Err(ProviderError::OutOfBoundRequestError( + tx_index, + tx_receipt_length, + )); + } + + let tx_receipt_trie_proof = tx_receipt_trie_provider + .get_proof(tx_index) + .unwrap() + .into_iter() + .map(Bytes::from) + .collect::>(); + + let consensus_tx_receipt = fetched_transaction_receipts[tx_index as usize].clone(); + fetched_transaction_receipts_proofs.push(FetchedTransactionReceiptProof::new( + target_block, + tx_index, + consensus_tx_receipt.rlp_encode(), + tx_receipt_trie_proof, + consensus_tx_receipt.0.tx_type(), + )); + } + + let duration = start_fetch.elapsed(); + info!( + "Time taken (Transaction Receipts Proofs Fetch): {:?}", + duration + ); + + Ok(fetched_transaction_receipts_proofs) + } +} + +#[cfg(test)] +mod tests { + use super::*; + use alloy::primitives::{address, B256}; + + const SEPOLIA_RPC_URL: &str = + "https://eth-sepolia.g.alchemy.com/v2/xar76cftwEtqTBWdF4ZFy9n8FLHAETDv"; + + #[ignore = "too many requests, recommend to run locally"] + #[tokio::test] + async fn test_get_2000_range_of_account_proofs() -> Result<(), ProviderError> { + let start_time = Instant::now(); + let provider = EvmProvider::new_with_url(Url::parse(SEPOLIA_RPC_URL).unwrap(), 1155511); + let target_address = address!("7f2c6f930306d3aa736b3a6c6a98f512f74036d4"); + let response = provider + .get_range_of_account_proofs(6127485, 6127485 + 2000 - 1, 1, target_address) + .await; + assert!(response.is_ok()); + let length = response.unwrap().len(); + assert_eq!(length, 2000); + let duration = start_time.elapsed(); + println!("Time taken (Account Fetch): {:?}", duration); + Ok(()) + } + + #[ignore = "too many requests, recommend to run locally"] + #[tokio::test] + async fn test_get_2000_range_of_storage_proofs() -> Result<(), ProviderError> { + let start_time = Instant::now(); + let provider = EvmProvider::new_with_url(Url::parse(SEPOLIA_RPC_URL).unwrap(), 11155111); + let target_address = address!("75CeC1db9dCeb703200EAa6595f66885C962B920"); + let result = provider + .get_range_of_storage_proofs(6127485, 6127485 + 2000 - 1, 1, target_address, B256::ZERO) + .await; + assert!(result.is_ok()); + let length = result.unwrap().len(); + assert_eq!(length, 2000); + let duration = start_time.elapsed(); + println!("Time taken (Storage Fetch): {:?}", duration); + Ok(()) + } + + #[ignore = "too many requests, recommend to run locally"] + #[tokio::test] + async fn test_get_2000_range_of_header_proofs() -> Result<(), ProviderError> { + let start_time = Instant::now(); + let provider = EvmProvider::new_with_url(Url::parse(SEPOLIA_RPC_URL).unwrap(), 11155111); + let (_meta, header_response) = provider + .get_range_of_header_proofs(6127485, 6127485 + 2000 - 1, 1) + .await?; + assert_eq!(header_response.len(), 2000); + // assert_eq!(meta.mmr_id, 26); + let duration = start_time.elapsed(); + println!("Time taken (Header Fetch): {:?}", duration); + Ok(()) + } + + #[tokio::test] + async fn test_get_parallel_4_all_tx_with_proof_from_block() { + let provider = EvmProvider::new_with_url(Url::parse(SEPOLIA_RPC_URL).unwrap(), 11155111); + + let task1 = { + let provider = provider.clone(); + tokio::spawn(async move { + provider + .get_tx_with_proof_from_block(6127485, 0, 23, 1) + .await + }) + }; + + let task2 = { + let provider = provider.clone(); + tokio::spawn(async move { + provider + .get_tx_with_proof_from_block(6127486, 0, 20, 1) + .await + }) + }; + + let task3 = { + let provider = provider.clone(); + tokio::spawn(async move { + provider + .get_tx_with_proof_from_block(6127487, 1, 1 + 29, 1) + .await + }) + }; + + let task4 = { + let provider = provider.clone(); + tokio::spawn(async move { + provider + .get_tx_with_proof_from_block(6127488, 5, 5 + 75, 1) + .await + }) + }; + + let (result1, result2, result3, result4) = + tokio::try_join!(task1, task2, task3, task4).unwrap(); + // validate result 1 + assert_eq!(result1.unwrap().len(), 23); + // validate result 2 + assert_eq!(result2.unwrap().len(), 20); + // validate result 3 + assert_eq!(result3.unwrap().len(), 29); + // validate result 4 + assert_eq!(result4.unwrap().len(), 75); + } + + #[tokio::test] + async fn test_get_parallel_4_all_tx_receipt_with_proof_from_block() { + let provider = EvmProvider::new_with_url(Url::parse(SEPOLIA_RPC_URL).unwrap(), 11155111); + let task1 = { + let provider = provider.clone(); + tokio::spawn(async move { + provider + .get_tx_receipt_with_proof_from_block(6127485, 0, 23, 1) + .await + }) + }; + + let task2 = { + let provider = provider.clone(); + tokio::spawn(async move { + provider + .get_tx_receipt_with_proof_from_block(6127486, 0, 20, 1) + .await + }) + }; + + let task3 = { + let provider = provider.clone(); + tokio::spawn(async move { + provider + .get_tx_receipt_with_proof_from_block(6127487, 1, 30, 1) + .await + }) + }; + + let task4 = { + let provider = provider.clone(); + tokio::spawn(async move { + provider + .get_tx_receipt_with_proof_from_block(6127488, 5, 80, 1) + .await + }) + }; + + let (result1, result2, result3, result4) = + tokio::try_join!(task1, task2, task3, task4).unwrap(); + + // validate result 1 + assert_eq!(result1.unwrap().len(), 23); + // validate result 2 + assert_eq!(result2.unwrap().len(), 20); + // validate result 3 + assert_eq!(result3.unwrap().len(), 29); + // validate result 4 + assert_eq!(result4.unwrap().len(), 75); + } + + #[tokio::test] + async fn test_error_get_tx_with_proof_from_block() { + let provider = EvmProvider::new_with_url(Url::parse(SEPOLIA_RPC_URL).unwrap(), 11155111); + let response = provider + .get_tx_with_proof_from_block(6127485, 0, 2000, 1) + .await; + assert!(response.is_err()); + assert!(matches!( + response, + Err(ProviderError::OutOfBoundRequestError(93, 93)) + )); + } + + #[tokio::test] + async fn test_error_get_tx_receipt_with_proof_from_block() { + let provider = EvmProvider::new_with_url(Url::parse(SEPOLIA_RPC_URL).unwrap(), 1155511); + let response = provider + .get_tx_receipt_with_proof_from_block(6127485, 0, 2000, 1) + .await; + assert!(response.is_err()); + assert!(matches!( + response, + Err(ProviderError::OutOfBoundRequestError(93, 93)) + )); + } +} diff --git a/crates/provider/src/evm/rpc.rs b/crates/provider/src/evm/rpc.rs new file mode 100644 index 00000000..613c2bc2 --- /dev/null +++ b/crates/provider/src/evm/rpc.rs @@ -0,0 +1,316 @@ +use std::{ + collections::{HashMap, HashSet}, + sync::Arc, + time::{Duration, Instant}, +}; + +use alloy::{ + primitives::{Address, BlockNumber, StorageKey}, + providers::{Provider, RootProvider}, + rpc::types::EIP1186AccountProofResponse, + transports::{ + http::{Client, Http}, + RpcError, TransportErrorKind, + }, +}; +use futures::future::join_all; +use reqwest::Url; +use thiserror::Error; +use tokio::sync::{ + mpsc::{self, Sender}, + RwLock, +}; +use tracing::debug; + +/// Error from [`RpcProvider`] +#[derive(Error, Debug)] +pub enum RpcProviderError { + #[error("Failed to send proofs with mpsc")] + MpscError( + #[from] + tokio::sync::mpsc::error::SendError<( + BlockNumber, + alloy::rpc::types::EIP1186AccountProofResponse, + )>, + ), +} + +/// RPC provider for fetching data from Ethereum RPC +/// It is a wrapper around the alloy provider, using eth_getProof for fetching account and storage proofs +/// +/// How to use: +/// ```rust +/// use reqwest::Url; +/// use hdp_provider::evm::rpc::RpcProvider; +/// use alloy::primitives::Address; +/// +/// async fn call_provider(url: Url, chunk_size: u64, block_range_start: u64, block_range_end: u64, increment: u64, address: Address) { +/// let provider = RpcProvider::new(url, chunk_size); +/// let target_block_range = (block_range_start..=block_range_end).collect::>(); +/// let result = provider.get_account_proofs(target_block_range, address).await; +/// match result { +/// Ok(proofs) => println!("Fetched proofs: {:?}", proofs), +/// Err(e) => eprintln!("Error fetching proofs: {:?}", e), +/// } +/// } +/// ``` +#[derive(Clone)] +pub struct RpcProvider { + provider: RootProvider>, + chunk_size: u64, +} + +impl RpcProvider { + pub fn new(rpc_url: Url, chunk_size: u64) -> Self { + let provider = RootProvider::new_http(rpc_url); + Self { + provider, + chunk_size, + } + } + + /// Get account with proof in given vector of blocks + pub async fn get_account_proofs( + &self, + blocks: Vec, + address: Address, + ) -> Result, RpcProviderError> { + self.get_proofs(blocks, address, None).await + } + + /// Get storage with proof in given vector of blocks and slot + pub async fn get_storage_proofs( + &self, + block_range: Vec, + address: Address, + storage_key: StorageKey, + ) -> Result, RpcProviderError> { + self.get_proofs(block_range, address, Some(storage_key)) + .await + } + + /// Generalized function to get proofs (account or storage) in given vector of blocks + async fn get_proofs( + &self, + blocks: Vec, + address: Address, + storage_key: Option, + ) -> Result, RpcProviderError> { + let start_fetch = Instant::now(); + + let (rpc_sender, mut rx) = mpsc::channel::<(BlockNumber, EIP1186AccountProofResponse)>(32); + self.spawn_proof_fetcher(rpc_sender, blocks, address, storage_key); + + let mut fetched_proofs = HashMap::new(); + while let Some((block_number, proof)) = rx.recv().await { + fetched_proofs.insert(block_number, proof); + } + let duration = start_fetch.elapsed(); + debug!("RPC| Time taken (Fetch): {:?}", duration); + + Ok(fetched_proofs) + } + + /// Spawns a task to fetch proofs (account or storage) in parallel with chunk size + fn spawn_proof_fetcher( + &self, + rpc_sender: Sender<(BlockNumber, EIP1186AccountProofResponse)>, + blocks: Vec, + address: Address, + storage_key: Option, + ) { + let chunk_size = self.chunk_size; + let provider_clone = self.provider.clone(); + let target_blocks_length = blocks.len(); + + debug!("Fetching proofs for {} chunk size: {}", address, chunk_size); + + tokio::spawn(async move { + let mut try_count = 0; + let blocks_map = Arc::new(RwLock::new(HashSet::::new())); + + while blocks_map.read().await.len() < target_blocks_length { + try_count += 1; + if try_count > 50 { + panic!("❗️❗️❗️ Too many retries, failed to fetch all blocks") + } + let fetched_blocks_clone = blocks_map.read().await.clone(); + + let blocks_to_fetch: Vec = blocks + .iter() + .filter(|block_number| !fetched_blocks_clone.contains(block_number)) + .take(chunk_size as usize) + .cloned() + .collect(); + + let fetch_futures = blocks_to_fetch + .into_iter() + .map(|block_number| { + let fetched_blocks_clone = blocks_map.clone(); + let rpc_sender = rpc_sender.clone(); + let provider_clone = provider_clone.clone(); + async move { + let proof = + fetch_proof(&provider_clone, address, block_number, storage_key) + .await; + handle_proof_result( + proof, + block_number, + fetched_blocks_clone, + rpc_sender, + ) + .await; + } + }) + .collect::>(); + + join_all(fetch_futures).await; + } + }); + } +} + +/// Fetches proof (account or storage) for a given block number +async fn fetch_proof( + provider: &RootProvider>, + address: Address, + block_number: BlockNumber, + storage_key: Option, +) -> Result> { + match storage_key { + Some(key) => { + provider + .get_proof(address, vec![key]) + .block_id(block_number.into()) + .await + } + None => { + provider + .get_proof(address, vec![]) + .block_id(block_number.into()) + .await + } + } +} + +/// Handles the result of a proof fetch operation +async fn handle_proof_result( + proof: Result>, + block_number: BlockNumber, + blocks_map: Arc>>, + rpc_sender: Sender<(BlockNumber, EIP1186AccountProofResponse)>, +) { + match proof { + Ok(proof) => { + let mut blocks_identifier = blocks_map.write().await; + rpc_sender + .send((block_number, proof)) + .await + .map_err(RpcProviderError::MpscError) + .unwrap(); + blocks_identifier.insert(block_number); + } + Err(e) => { + if let Some(backoff) = handle_error(e) { + let mut delay = backoff; + while delay <= 4 { + tokio::time::sleep(Duration::from_nanos(delay)).await; + delay *= 2; + } + } + } + } +} + +fn handle_error(e: RpcError) -> Option { + match e { + RpcError::Transport(TransportErrorKind::HttpError(http_error)) + if http_error.status == 429 => + { + Some(1) + } + + _ => None, + } +} + +#[cfg(test)] +mod tests { + use alloy::primitives::{address, b256, B256, U256}; + + use super::*; + + // Non-paid personal alchemy endpoint + const SEPOLIA_RPC_URL: &str = + "https://eth-sepolia.g.alchemy.com/v2/a-w72ZvoUS0dfMD_LBPAuRzHOlQEhi_m"; + + #[tokio::test] + async fn test_get_100_range_storage_with_proof_by_storage_key() { + let start_time = Instant::now(); + let rpc_url = Url::parse(SEPOLIA_RPC_URL).unwrap(); + let provider = RpcProvider::new(rpc_url, 100); + let block_range_start = 6127485; + let block_range_end = 6127584; + let target_block_range = (block_range_start..=block_range_end).collect::>(); + let target_address = address!("75CeC1db9dCeb703200EAa6595f66885C962B920"); + let target_key = b256!("3c2b98cf472a02b84793a789af8876a73167e29a1a4f8bdbcb51dbfef0a75d7b"); + let result = provider + .get_storage_proofs(target_block_range, target_address, target_key) + .await; + assert!(result.is_ok()); + let result = result.unwrap(); + let length = result.len(); + assert_eq!(length, 100); + let value = result.get(&6127485).unwrap(); + assert_eq!(value.storage_proof[0].key.0, target_key); + assert_eq!(value.storage_proof[0].value, U256::from(20000000000000u64)); + let duration = start_time.elapsed(); + println!("Time taken (Storage Fetch): {:?}", duration); + } + + #[tokio::test] + async fn test_get_100_range_storage_with_proof_by_storage_slot() { + let start_time = Instant::now(); + let rpc_url = Url::parse(SEPOLIA_RPC_URL).unwrap(); + let provider = RpcProvider::new(rpc_url, 100); + let block_range_start = 6127485; + let block_range_end = 6127584; + let target_block_range = + (block_range_start..=block_range_end).collect::>(); + let target_address = address!("75CeC1db9dCeb703200EAa6595f66885C962B920"); + let target_slot = B256::from(U256::from(1)); + let result = provider + .get_storage_proofs(target_block_range, target_address, target_slot) + .await; + assert!(result.is_ok()); + let result = result.unwrap(); + let length = result.len(); + assert_eq!(length, 100); + let value = result.get(&6127485).unwrap(); + assert_eq!(value.storage_proof[0].key.0, target_slot); + assert_eq!(value.storage_proof[0].value, U256::from(20000000000000u64)); + let duration = start_time.elapsed(); + println!("Time taken (Storage Fetch): {:?}", duration); + } + + #[tokio::test] + async fn test_get_100_range_account_with_proof() { + let start_time = Instant::now(); + let rpc_url = Url::parse(SEPOLIA_RPC_URL).unwrap(); + let provider = RpcProvider::new(rpc_url, 100); + let block_range_start = 6127485; + let block_range_end = 6127584; + let target_block_range = + (block_range_start..=block_range_end).collect::>(); + let target_address = address!("7f2c6f930306d3aa736b3a6c6a98f512f74036d4"); + + let result = provider + .get_account_proofs(target_block_range, target_address) + .await; + assert!(result.is_ok()); + let length = result.unwrap().len(); + assert_eq!(length, 100); + let duration = start_time.elapsed(); + println!("Time taken (Account Fetch): {:?}", duration); + } +} diff --git a/crates/provider/src/evm/rpc_provider.rs b/crates/provider/src/evm/rpc_provider.rs deleted file mode 100644 index df7260ae..00000000 --- a/crates/provider/src/evm/rpc_provider.rs +++ /dev/null @@ -1,464 +0,0 @@ -use std::{ - collections::{HashMap, HashSet}, - sync::Arc, - vec, -}; - -use anyhow::{anyhow, bail, Result}; -use futures::future::join_all; -use reqwest::{header, Client}; -use serde_json::{from_value, json, Value}; - -use hdp_primitives::block::{ - account::{Account, AccountProofFromRpc}, - header::{MMRFromNewIndexer, MMRMetaFromNewIndexer, MMRProofFromNewIndexer}, -}; -use tokio::sync::{mpsc::Sender, RwLock}; -use tracing::debug; - -#[derive(Debug, Clone)] -pub struct FetchedAccountProof { - pub block_number: u64, - pub encoded_account: String, - pub account_proof: Vec, -} - -/// Fetched storage and account proof and it's value -#[derive(Debug, Clone)] -pub struct FetchedStorageAccountProof { - pub block_number: u64, - pub encoded_account: String, - pub account_proof: Vec, - pub storage_value: String, - pub storage_proof: Vec, -} - -#[derive(Debug, Clone)] -pub struct FetchedTransactionProof { - pub block_number: u64, - pub tx_index: u64, - pub encoded_transaction: String, - pub transaction_proof: Vec, - pub tx_type: u8, -} - -#[derive(Debug, Clone)] -pub struct FetchedTransactionReceiptProof { - pub block_number: u64, - pub tx_index: u64, - pub encoded_receipt: String, - pub receipt_proof: Vec, - pub tx_type: u8, -} - -pub struct HeaderProvider { - client: Client, - pub url: &'static str, - chain_id: u64, -} - -impl HeaderProvider { - pub fn new(rpc_url: &'static str, chain_id: u64) -> Self { - Self { - client: Client::new(), - url: rpc_url, - chain_id, - } - } - - // TODO: result should not chunked - pub async fn get_sequencial_headers_and_mmr_from_indexer( - &self, - from_block: u64, - to_block: u64, - ) -> Result<(MMRMetaFromNewIndexer, HashMap)> { - let query_params = vec![ - ("deployed_on_chain".to_string(), self.chain_id.to_string()), - ("accumulates_chain".to_string(), self.chain_id.to_string()), - ("hashing_function".to_string(), "poseidon".to_string()), - ("contract_type".to_string(), "AGGREGATOR".to_string()), - ( - "from_block_number_inclusive".to_string(), - from_block.to_string(), - ), - ( - "to_block_number_inclusive".to_string(), - to_block.to_string(), - ), - ("is_meta_included".to_string(), "true".to_string()), - ("is_whole_tree".to_string(), "true".to_string()), - ("is_rlp_included".to_string(), "true".to_string()), - ("is_pure_rlp".to_string(), "true".to_string()), - ]; - - let url = format!("{}/proofs", &self.url); - - let response = self - .client - .get(url) - .header(header::CONTENT_TYPE, "application/json") - .query(&query_params) - .send() - .await - .map_err(|e| anyhow!("Failed to send request: {}", e))?; - - // Check if the response status is success - if !response.status().is_success() { - bail!( - "rs-indexer request failed with status: {}", - response.status() - ); - } - - // Parse the response body as JSON - let rpc_response: Value = response - .json() - .await - .map_err(|e| anyhow!("Failed to parse response: {}", e))?; - - let mmr_from_indexer: MMRFromNewIndexer = from_value(rpc_response)?; - - if mmr_from_indexer.data.is_empty() { - bail!( - "No MMR data found for block numbers: {} - {}", - from_block, - to_block - ); - } else if mmr_from_indexer.data.len() > 1 { - bail!( - "More than one MMR data found for block numbers: {} - {}", - from_block, - to_block - ); - } else { - // As we are requesting for one tree, we expect only one tree to be returned - // sort the proofs by block number - // TODO: This sorting should be done in the indexer side - let mut mmr_from_indexer_map: HashMap = HashMap::new(); - for proof in &mmr_from_indexer.data[0].proofs { - mmr_from_indexer_map.insert(proof.block_number, proof.clone()); - } - - Ok((mmr_from_indexer.data[0].meta.clone(), mmr_from_indexer_map)) - } - } -} - -pub struct TrieProofProvider { - client: Client, - pub url: &'static str, - chunk_size: u64, -} - -impl TrieProofProvider { - pub fn new(rpc_url: &'static str, chunk_size: u64) -> Self { - Self { - client: Client::new(), - url: rpc_url, - chunk_size, - } - } - - pub async fn get_account_proofs( - &self, - rpc_sender: Sender, - block_numbers: Vec, - address: &str, - ) { - let url = self.url; - let address = address.to_string(); - let chunk_size = self.chunk_size; - - debug!( - "Fetching account proofs for {} chunk size: {}", - address, chunk_size - ); - - tokio::spawn(async move { - let mut try_count = 0; - let blocks_map = Arc::new(RwLock::new(HashSet::::new())); - - while blocks_map.read().await.len() < block_numbers.len() { - try_count += 1; - if try_count > 50 { - panic!("❗️❗️❗️ Too many retries, failed to fetch all blocks") - } - let fetched_blocks_clone = blocks_map.read().await.clone(); - let blocks_to_fetch: Vec = block_numbers - .iter() - .filter(|block_number| !fetched_blocks_clone.contains(*block_number)) - .take(chunk_size as usize) - .cloned() - .collect(); - - let fetch_futures = blocks_to_fetch - .iter() - .map(|block_number| { - let fetched_blocks_clone = blocks_map.clone(); - let rpc_sender = rpc_sender.clone(); - let address = address.clone(); - async move { - let account_from_rpc = TrieProofProvider::new(url, chunk_size) - .get_proof(*block_number, &address, None) - .await; - match account_from_rpc { - Ok(account_from_rpc) => { - let mut blocks_identifier = fetched_blocks_clone.write().await; - let account = Account::from(&account_from_rpc); - let encoded_account = account.rlp_encode(); - let account_proof = account_from_rpc.account_proof; - let mpt_proof = FetchedAccountProof { - block_number: *block_number, - encoded_account, - account_proof, - }; - rpc_sender.send(mpt_proof).await.unwrap(); - blocks_identifier.insert(*block_number); - } - Err(_) => { - // println!( - // "Failed to fetch block number: {}, error: {}", - // block_number, e - // ); - } - } - } - }) - .collect::>(); - - join_all(fetch_futures).await; - } - }); - } - - pub async fn get_storage_proofs( - &self, - rpc_sender: Sender, - block_numbers: Vec, - address: &str, - slot: String, - ) { - let url = self.url; - let address = address.to_string(); - let chunk_size = self.chunk_size; - - tokio::spawn(async move { - let mut try_count = 0; - let blocks_map = Arc::new(RwLock::new(HashSet::::new())); - - while blocks_map.read().await.len() < block_numbers.len() { - try_count += 1; - if try_count > 50 { - panic!("❗️❗️❗️ Too many retries, failed to fetch all blocks") - } - let fetched_blocks_clone = blocks_map.read().await.clone(); - let blocks_to_fetch: Vec = block_numbers - .iter() - .filter(|block_number| !fetched_blocks_clone.contains(*block_number)) - .take(chunk_size as usize) - .cloned() - .collect(); - - // println!("Fetching blocks: {:?}", blocks_to_fetch); - - let fetch_futures = blocks_to_fetch - .iter() - .map(|block_number| { - let fetched_blocks_clone = blocks_map.clone(); - let rpc_sender = rpc_sender.clone(); - let address = address.clone(); - let slot = slot.clone(); - async move { - let account_from_rpc = TrieProofProvider::new(url, chunk_size) - .get_proof(*block_number, &address, Some(vec![slot.clone()])) - .await; - match account_from_rpc { - Ok(account_from_rpc) => { - let mut blocks_identifier = fetched_blocks_clone.write().await; - let account = Account::from(&account_from_rpc); - let encoded_account = account.rlp_encode(); - let storage = &account_from_rpc.storage_proof[0]; - let storage_value = storage.value.clone(); - let storage_proof = - account_from_rpc.storage_proof[0].proof.clone(); - let account_proof = account_from_rpc.account_proof; - let mpt_proof = FetchedStorageAccountProof { - block_number: *block_number, - encoded_account, - account_proof, - storage_value, - storage_proof, - }; - rpc_sender.send(mpt_proof).await.unwrap(); - blocks_identifier.insert(*block_number); - } - Err(_) => { - // println!( - // "Failed to fetch block number: {}, error: {}", - // block_number, e - // ); - } - } - } - }) - .collect::>(); - - join_all(fetch_futures).await; - } - }); - } - - async fn get_proof( - &self, - block_number: u64, - address: &str, - storage_keys: Option>, - ) -> Result { - let storage_key_param = storage_keys.unwrap_or_default(); - - let target_num = if block_number == u64::MAX { - "latest".to_string() - } else { - format!("0x{:x}", block_number) - }; - - let rpc_request: Value = json!({ - "jsonrpc": "2.0", - "method": "eth_getProof", - "params": [ - address, - storage_key_param, - target_num, - ], - "id": 1, - }); - - let response = self - .client - .post(self.url) - .header(header::CONTENT_TYPE, "application/json") - .json(&rpc_request) - .send() - .await - .map_err(|e| anyhow!("Failed to send request: {}", e))?; - - // Check if the response status is success - if !response.status().is_success() { - bail!( - "RPC request `eth_getProof` failed with status: {}", - response.status() - ); - } - - // Parse the response body as JSON - let rpc_response: Value = response - .json() - .await - .map_err(|e| anyhow!("Failed to parse response: {}", e))?; - let result = &rpc_response["result"]; - - let account_from_rpc: AccountProofFromRpc = from_value(result.clone())?; - - // Error handling for empty proof (no account found) - if account_from_rpc.account_proof.is_empty() { - bail!( - "No account found for address {} in blocknumber {}", - address, - block_number - ); - } - - // For now we only request for one storage key - if !storage_key_param.is_empty() && account_from_rpc.storage_proof[0].proof.is_empty() { - bail!( - "No storage proof found for address {} in blocknumber {}", - address, - block_number - ); - } - - Ok(account_from_rpc) - } -} - -#[cfg(test)] -mod tests { - use std::str::FromStr; - - use alloy_primitives::{FixedBytes, U256}; - use hdp_primitives::block::account::Account; - - use super::*; - - const HERODOTUS_RS_INDEXER_URL: &str = "https://rs-indexer.api.herodotus.cloud/accumulators"; - - #[tokio::test] - async fn test_get_sepolia_sequencial_headers_and_mmr_from_indexer() { - let rpc_provider = HeaderProvider::new(HERODOTUS_RS_INDEXER_URL, 11155111); - - let block_header = rpc_provider - .get_sequencial_headers_and_mmr_from_indexer(4952200, 4952229) - .await - .unwrap(); - - let _ = &block_header.0; - // assert_eq!(mmr_meta.mmr_id, 2); - let length = block_header.1.len(); - assert_eq!(length, 30); - let block_4952200 = block_header.1.get(&4952200).unwrap(); - assert_eq!(block_4952200.block_number, 4952200); - - let block_4952229 = block_header.1.get(&4952229).unwrap(); - assert_eq!(block_4952229.block_number, 4952229); - } - - #[tokio::test] - async fn test_get_mainnet_sequencial_headers_and_mmr_from_indexer() { - let rpc_provider = HeaderProvider::new(HERODOTUS_RS_INDEXER_URL, 1); - - let block_header = rpc_provider - .get_sequencial_headers_and_mmr_from_indexer(4952200, 4952229) - .await - .unwrap(); - - let mmr_meta = &block_header.0; - assert_eq!(mmr_meta.mmr_id, 6); - let length = block_header.1.len(); - assert_eq!(length, 30); - let block_4952200 = block_header.1.get(&4952200).unwrap(); - assert_eq!(block_4952200.block_number, 4952200); - - let block_4952229 = block_header.1.get(&4952229).unwrap(); - assert_eq!(block_4952229.block_number, 4952229); - } - - // Non-paid personal alchemy endpoint - const SEPOLIA_RPC_URL: &str = - "https://eth-sepolia.g.alchemy.com/v2/a-w72ZvoUS0dfMD_LBPAuRzHOlQEhi_m"; - - const SEPOLIA_TARGET_ADDRESS: &str = "0x7f2c6f930306d3aa736b3a6c6a98f512f74036d4"; - - #[tokio::test] - async fn test_rpc_get_proof() { - let rpc_provider = TrieProofProvider::new(SEPOLIA_RPC_URL, 40); - - let account_from_rpc = rpc_provider - .get_proof(4952229, SEPOLIA_TARGET_ADDRESS, None) - .await - .unwrap(); - let account: Account = Account::from(&account_from_rpc); - let expected_account = Account::new( - 6789, - U256::from_str_radix("41694965332469803456", 10).unwrap(), - FixedBytes::from_str( - "0x56e81f171bcc55a6ff8345e692c0f86e5b48e01b996cadc001622fb5e363b421", - ) - .unwrap(), - FixedBytes::from_str( - "0xc5d2460186f7233c927e7db2dcc703c0e500b653ca82273b7bfad8045d85a470", - ) - .unwrap(), - ); - assert_eq!(account, expected_account); - } -} diff --git a/crates/provider/src/indexer.rs b/crates/provider/src/indexer.rs new file mode 100644 index 00000000..2bd7bc23 --- /dev/null +++ b/crates/provider/src/indexer.rs @@ -0,0 +1,202 @@ +use alloy::primitives::{BlockNumber, ChainId}; +use hdp_primitives::block::header::{ + MMRDataFromNewIndexer, MMRFromNewIndexer, MMRMetaFromNewIndexer, MMRProofFromNewIndexer, +}; +use reqwest::Client; +use serde_json::{from_value, Value}; +use std::collections::HashMap; +use thiserror::Error; +use tracing::error; + +pub const HERODOTUS_RS_INDEXER_URL: &str = + "https://rs-indexer.api.herodotus.cloud/accumulators/proofs"; + +/// Error from [`Indexer`] +#[derive(Error, Debug)] +pub enum IndexerError { + /// The block range provided is invalid. + #[error("Invalid block range")] + InvalidBlockRange, + + /// Failed to send a request using [`reqwest`]. + #[error("Failed to send request")] + ReqwestError(#[from] reqwest::Error), + + /// Failed to parse the response using [`serde_json`]. + #[error("Failed to parse response")] + SerdeJsonError(#[from] serde_json::Error), + + /// Validation error with a detailed message. + #[error("Validation error: {0}")] + ValidationError(String), + + /// Failed to get headers proof with a detailed message. + #[error("Failed to get headers proof: {0}")] + GetHeadersProofError(String), +} + +/// Indexer client for fetching MMR and headers proof from Herodotus Indexer +/// +/// For more information, see: https://rs-indexer.api.herodotus.cloud/swagger +/// +/// How to use: +/// ```rust +/// use hdp_provider::indexer::Indexer; +/// use hdp_provider::errors::IndexerError; +/// +/// async fn call_indexer(chain_id: u64, block_range_start: u64, block_range_end: u64) -> Result<(), IndexerError> { +/// let indexer = Indexer::new(chain_id); +/// let response = indexer.get_headers_proof(block_range_start, block_range_end).await?; +/// Ok(()) +/// } +/// ``` + +#[derive(Clone)] +pub struct Indexer { + client: Client, + chain_id: u64, +} + +#[derive(Debug)] +pub struct IndexerHeadersProofResponse { + pub mmr_meta: MMRMetaFromNewIndexer, + pub headers: HashMap, +} + +impl IndexerHeadersProofResponse { + pub fn new(mmr_data: MMRDataFromNewIndexer) -> Self { + let mmr_meta = mmr_data.meta; + let headers = mmr_data + .proofs + .into_iter() + .map(|block| (block.block_number, block)) + .collect(); + Self { mmr_meta, headers } + } +} + +impl Indexer { + pub fn new(chain_id: ChainId) -> Self { + Self { + client: Client::new(), + chain_id, + } + } + + /// Fetch MMR and headers proof from Herodotus Indexer + /// + /// ## Parameters + /// - `from_block`: The start block number (inclusive) + /// - `to_block`: The end block number (inclusive) + /// - `chain_id`: The chain id + pub async fn get_headers_proof( + &self, + from_block: BlockNumber, + to_block: BlockNumber, + ) -> Result { + // validate from_block and to_block + if from_block > to_block { + return Err(IndexerError::InvalidBlockRange); + } + + let target_length = (to_block - from_block + 1) as usize; + + let response = self + .client + .get(HERODOTUS_RS_INDEXER_URL) + .query(&self._query(from_block, to_block, self.chain_id)) + .send() + .await + .map_err(IndexerError::ReqwestError)?; + + // validate status + if response.status().is_success() { + let body: Value = response.json().await.map_err(IndexerError::ReqwestError)?; + let parsed_mmr: MMRFromNewIndexer = + from_value(body).map_err(IndexerError::SerdeJsonError)?; + + // validate MMR should be 1 + if parsed_mmr.data.is_empty() { + Err(IndexerError::ValidationError("No MMR found".to_string())) + } else if parsed_mmr.data.len() > 1 { + return Err(IndexerError::ValidationError( + "MMR length should be 1".to_string(), + )); + } else { + // validate header response length + if parsed_mmr.data[0].proofs.len() != target_length { + return Err(IndexerError::ValidationError( + "Indexer didn't return the correct number of headers that requested" + .to_string(), + )); + } + let mmr_data = parsed_mmr.data[0].clone(); + Ok(IndexerHeadersProofResponse::new(mmr_data)) + } + } else { + error!( + "Failed to get headers proof from rs-indexer: {}", + response.status() + ); + Err(IndexerError::GetHeadersProofError( + response.text().await.map_err(IndexerError::ReqwestError)?, + )) + } + } + + fn _query( + &self, + from_block: BlockNumber, + to_block: BlockNumber, + chain_id: ChainId, + ) -> Vec<(String, String)> { + vec![ + ("deployed_on_chain".to_string(), chain_id.to_string()), + ("accumulates_chain".to_string(), chain_id.to_string()), + ("hashing_function".to_string(), "poseidon".to_string()), + ("contract_type".to_string(), "AGGREGATOR".to_string()), + ( + "from_block_number_inclusive".to_string(), + from_block.to_string(), + ), + ( + "to_block_number_inclusive".to_string(), + to_block.to_string(), + ), + ("is_meta_included".to_string(), "true".to_string()), + ("is_whole_tree".to_string(), "true".to_string()), + ("is_rlp_included".to_string(), "true".to_string()), + ("is_pure_rlp".to_string(), "true".to_string()), + ] + } +} + +#[cfg(test)] +mod tests { + use super::*; + + #[tokio::test] + async fn test_get_headers_proof() -> Result<(), IndexerError> { + let indexer = Indexer::new(11155111); + let response = indexer.get_headers_proof(1, 1).await?; + // check header length is 1 + assert!(response.headers.len() == 1); + Ok(()) + } + + #[tokio::test] + async fn test_get_headers_proof_multiple_blocks() -> Result<(), IndexerError> { + let indexer = Indexer::new(11155111); + let response = indexer.get_headers_proof(0, 10).await?; + // check header length is 11 + assert!(response.headers.len() == 11); + Ok(()) + } + + #[tokio::test] + async fn test_invalid_query() { + let indexer = Indexer::new(11155111); + let response = indexer.get_headers_proof(10, 1).await; + assert!(matches!(response, Err(IndexerError::InvalidBlockRange))); + } +} diff --git a/crates/provider/src/key.rs b/crates/provider/src/key.rs index 10c8f949..39cb7979 100644 --- a/crates/provider/src/key.rs +++ b/crates/provider/src/key.rs @@ -1,8 +1,11 @@ //! Provider keys for fetching data from memoizer and rpc. +//! Only used for context of Module Compiler +//! +//! TODO: need to sync with how bootloader will emit the keys use std::str::FromStr; -use alloy_primitives::{Address, BlockNumber, ChainId, StorageKey}; +use alloy::primitives::{Address, BlockNumber, ChainId, StorageKey}; macro_rules! impl_hash_for_provider_key { // Match a struct with an identifier and any number of fields. diff --git a/crates/provider/src/lib.rs b/crates/provider/src/lib.rs index b7c97d41..28705bc0 100644 --- a/crates/provider/src/lib.rs +++ b/crates/provider/src/lib.rs @@ -1,4 +1,4 @@ -#![deny(unused_crate_dependencies)] - pub mod evm; +pub mod indexer; pub mod key; +pub mod types; diff --git a/crates/provider/src/types.rs b/crates/provider/src/types.rs new file mode 100644 index 00000000..b8eacb33 --- /dev/null +++ b/crates/provider/src/types.rs @@ -0,0 +1,63 @@ +//! Types for the provider crate. +//! For the `FetchedTransactionProof` and `FetchedTransactionReceiptProof` types. +//! +//! We need this type to bind encoded transaction and receipts to the block number and proofs. + +use alloy::{ + consensus::TxType, + primitives::{BlockNumber, Bytes, TxIndex}, +}; + +#[derive(Debug, Clone)] +pub struct FetchedTransactionProof { + pub block_number: BlockNumber, + pub tx_index: TxIndex, + pub encoded_transaction: Vec, + pub transaction_proof: Vec, + pub tx_type: TxType, +} + +impl FetchedTransactionProof { + pub fn new( + block_number: BlockNumber, + tx_index: TxIndex, + encoded_transaction: Vec, + transaction_proof: Vec, + tx_type: TxType, + ) -> Self { + Self { + block_number, + tx_index, + encoded_transaction, + transaction_proof, + tx_type, + } + } +} + +#[derive(Debug, Clone)] +pub struct FetchedTransactionReceiptProof { + pub block_number: BlockNumber, + pub tx_index: TxIndex, + pub encoded_receipt: Vec, + pub receipt_proof: Vec, + pub tx_type: TxType, +} + +impl FetchedTransactionReceiptProof { + pub fn new( + block_number: BlockNumber, + tx_index: TxIndex, + encoded_receipt: Vec, + receipt_proof: Vec, + tx_type: TxType, + ) -> Self { + Self { + block_number, + tx_index, + encoded_receipt, + receipt_proof, + tx_type, + } + } +}