Skip to content

Commit

Permalink
Merge pull request #134 from HerodotusDev/starknet
Browse files Browse the repository at this point in the history
feat: chain id refactoring
  • Loading branch information
rkdud007 authored Sep 3, 2024
2 parents 34f9455 + 90c3ca5 commit 2cbf18b
Show file tree
Hide file tree
Showing 31 changed files with 814 additions and 288 deletions.
12 changes: 7 additions & 5 deletions .env.example
Original file line number Diff line number Diff line change
@@ -1,7 +1,9 @@
# Note that CHAIN_ID and RPC_URL are both required for fetch data
CHAIN_ID=11155111
RPC_URL=https://goerli.infura.io/v3/your-infura-api-key
# Note that RPC_URL_{CHAIN_ID} is required for fetch data
RPC_URL_ETHEREUM_SEPOLIA=https://goerli.infura.io/v3/your-infura-api-key
# this value is optional
RPC_CHUNK_SIZE_ETHEREUM_SEPOLIA=2000

# Optional
RPC_CHUNK_SIZE=2000
DRY_RUN_CAIRO_PATH= # path for dry run cairo
SOUND_RUN_CAIRO_PATH= # path for sound run cairo
SOUND_RUN_CAIRO_PATH= # path for sound run cairo
SAVE_FETCH_KEYS_FILE= # path for dry run output file
12 changes: 4 additions & 8 deletions cli/src/cli.rs
Original file line number Diff line number Diff line change
Expand Up @@ -93,8 +93,6 @@ pub async fn process_entry_run(args: ProcessArgs) -> Result<()> {

pub async fn module_entry_run(args: RunModuleArgs) -> Result<()> {
let config = hdp_run::HdpRunConfig::init(
args.rpc_url,
args.chain_id,
args.dry_run_cairo_file,
args.sound_run_cairo_file,
args.program_input_file,
Expand All @@ -120,8 +118,6 @@ pub async fn module_entry_run(args: RunModuleArgs) -> Result<()> {

pub async fn datalake_entry_run(args: RunDatalakeArgs) -> Result<()> {
let config = hdp_run::HdpRunConfig::init(
args.rpc_url,
args.chain_id,
None,
args.sound_run_cairo_file,
args.program_input_file,
Expand All @@ -132,26 +128,28 @@ pub async fn datalake_entry_run(args: RunDatalakeArgs) -> Result<()> {
);
let parsed_datalake = match args.datalake {
DataLakeCommands::BlockSampled {
chain_id,
block_range_start,
block_range_end,
sampled_property,
increment,
} => DatalakeEnvelope::BlockSampled(BlockSampledDatalake::new(
11155111,
chain_id,
block_range_start,
block_range_end,
increment,
sampled_property,
)),
DataLakeCommands::TransactionsInBlock {
chain_id,
target_block,
sampled_property,
start_index,
end_index,
increment,
included_types,
} => DatalakeEnvelope::TransactionsInBlock(TransactionsInBlockDatalake::new(
11155111,
chain_id,
target_block,
sampled_property,
start_index,
Expand All @@ -175,8 +173,6 @@ pub async fn entry_run(args: RunArgs) -> Result<()> {
let parsed: SubmitBatchQuery = serde_json::from_str(&request_context)
.expect("Invalid format of request. Cannot parse it.");
let config = hdp_run::HdpRunConfig::init(
args.rpc_url,
Some(parsed.destination_chain_id),
args.dry_run_cairo_file,
args.sound_run_cairo_file,
args.program_input_file,
Expand Down
7 changes: 0 additions & 7 deletions cli/src/commands/run.rs
Original file line number Diff line number Diff line change
@@ -1,5 +1,4 @@
use crate::commands::Parser;
use starknet::providers::Url;
use std::path::PathBuf;

#[derive(Parser, Debug)]
Expand All @@ -8,12 +7,6 @@ pub struct RunArgs {
#[arg(short, long)]
pub request_file: PathBuf,

/// The RPC URL to fetch the data.
///
/// Can be overwritten by `RPC_URL` environment variable.
#[arg(long)]
pub rpc_url: Option<Url>,

/// dry run contract bootloader program.
/// only used for module task
#[arg(long)]
Expand Down
14 changes: 6 additions & 8 deletions cli/src/commands/run_datalake.rs
Original file line number Diff line number Diff line change
@@ -1,15 +1,15 @@
use std::path::PathBuf;

use alloy::primitives::{BlockNumber, ChainId, TxIndex};
use alloy::primitives::{BlockNumber, TxIndex};
use clap::{arg, command, Parser, Subcommand};
use hdp::primitives::{
aggregate_fn::{AggregationFunction, FunctionContext},
task::datalake::{
block_sampled::BlockSampledCollection,
transactions::{IncludedTypes, TransactionsCollection},
},
ChainId,
};
use starknet::providers::Url;

#[derive(Parser, Debug)]
pub struct RunDatalakeArgs {
Expand All @@ -23,12 +23,6 @@ pub struct RunDatalakeArgs {
#[command(subcommand)]
pub datalake: DataLakeCommands,

/// The RPC URL to fetch the datalake
pub rpc_url: Option<Url>,

/// The chain id to fetch the datalake
pub chain_id: Option<ChainId>,

/// Path to save program input file after pre-processing.
///
/// This will be input data for cairo program
Expand Down Expand Up @@ -63,6 +57,8 @@ pub enum DataLakeCommands {
#[command(arg_required_else_help = true)]
#[command(short_flag = 's')]
BlockSampled {
/// Chain id
chain_id: ChainId,
/// Block number range start (inclusive)
block_range_start: BlockNumber,
/// Block number range end (inclusive)
Expand All @@ -77,6 +73,8 @@ pub enum DataLakeCommands {
#[command(arg_required_else_help = true)]
#[command(short_flag = 't')]
TransactionsInBlock {
/// Chain id
chain_id: ChainId,
/// Target block number
target_block: BlockNumber,
/// Sampled property
Expand Down
17 changes: 1 addition & 16 deletions cli/src/commands/run_module.rs
Original file line number Diff line number Diff line change
@@ -1,8 +1,5 @@
use std::path::PathBuf;

use alloy::primitives::ChainId;
use clap::{arg, Parser};
use starknet::providers::Url;
use std::path::PathBuf;

#[derive(Parser, Debug)]
pub struct RunModuleArgs {
Expand Down Expand Up @@ -30,18 +27,6 @@ pub struct RunModuleArgs {
#[arg(long)]
pub save_fetch_keys_file: Option<PathBuf>,

/// The RPC URL to fetch the data.
///
/// Can be overwritten by `RPC_URL` environment variable.
#[arg(long)]
pub rpc_url: Option<Url>,

/// The chain id to fetch the data.
///
/// Can be overwritten by `CHAIN_ID` environment variable
#[arg(long)]
pub chain_id: Option<ChainId>,

/// dry run contract bootloader program.
/// only used for module task
#[arg(long)]
Expand Down
56 changes: 17 additions & 39 deletions cli/src/interactive.rs
Original file line number Diff line number Diff line change
@@ -1,7 +1,8 @@
use alloy::{primitives::U256, transports::http::reqwest::Url};
use alloy::primitives::U256;
use anyhow::bail;
use hdp::hdp_run;
use hdp::preprocessor::module_registry::ModuleRegistry;
use hdp::primitives::ChainId;
use hdp::primitives::{
aggregate_fn::{integer::Operator, FunctionContext},
task::{
Expand Down Expand Up @@ -30,11 +31,11 @@ pub async fn run_interactive() -> anyhow::Result<()> {
println!("Welcome to Herodotus Data Processor interactive CLI! 🛰️");
println!(
r"
_ _ ____ ____
| | | | | _ \ | _ \
_ _ ____ ____
| | | | | _ \ | _ \
| |_| | | | | | | |_) |
| _ | | |_| | | __/
|_| |_| |____/ |_|
| _ | | |_| | | __/
|_| |_| |____/ |_|
"
);

Expand All @@ -60,11 +61,10 @@ pub async fn run_interactive() -> anyhow::Result<()> {
DatalakeType::BlockSampled => {
// ================== Block Sampled Datalake Fields ==================
// 0. Chain ID
let chain_id: u64 = inquire::Text::new("Chain ID")
let chain_id: String = inquire::Text::new("Chain ID")
.with_help_message("What is the chain ID? (Enter to set default)")
.with_default("11155111")
.prompt()?
.parse()?;
.with_default("ETHEREUM_SEPOLIA")
.prompt()?;
// 1. Block range start
let block_range_start: u64 = inquire::Text::new("Block range start")
.with_help_message(
Expand Down Expand Up @@ -138,7 +138,7 @@ pub async fn run_interactive() -> anyhow::Result<()> {
}
};
let block_sampled_datalake = BlockSampledDatalake::new(
chain_id,
ChainId::from_str(&chain_id)?,
block_range_start,
block_range_end,
increment,
Expand All @@ -148,11 +148,10 @@ pub async fn run_interactive() -> anyhow::Result<()> {
}
DatalakeType::TransactionsInBlock => {
// 0. Chain ID
let chain_id: u64 = inquire::Text::new("Chain ID")
let chain_id: String = inquire::Text::new("Chain ID")
.with_help_message("What is the chain ID? (Enter to set default)")
.with_default("11155111")
.prompt()?
.parse()?;
.with_default("ETHEREUM_SEPOLIA")
.prompt()?;
let target_block: u64 = inquire::Text::new("Enter target block number")
.with_help_message(
"What block you target to get transactions? (Enter to set default)",
Expand Down Expand Up @@ -225,7 +224,7 @@ pub async fn run_interactive() -> anyhow::Result<()> {
}
};
let transactions_datalake = TransactionsInBlockDatalake::new(
chain_id,
ChainId::from_str(&chain_id)?,
target_block,
TransactionsCollection::from_str(&sampled_property)?,
start_index,
Expand Down Expand Up @@ -317,29 +316,10 @@ pub async fn run_interactive() -> anyhow::Result<()> {
.with_default(true)
.prompt()?;
if allow_run {
let rpc_url: Option<Url> = match inquire::Text::new("Enter RPC URL: ")
.with_help_message("Skip if you have it in your .env file")
.prompt()
{
Ok(url) => match url.as_str() {
"" => None,
_ => Some(url.parse()?),
},
Err(_) => None,
};
let chain_id: Option<u64> = match inquire::Text::new("Enter Chain ID: ")
.with_help_message("Skip if you have it in your .env file")
.prompt()
{
Ok(chain_id) => match chain_id.as_str() {
"" => None,
_ => Some(chain_id.parse()?),
},
Err(_) => None,
};
println!("Make sure to position correct rpc url related env variables.");

let output_file: PathBuf = inquire::Text::new("Enter Output file path: ")
.with_default("output.json")
let output_file: PathBuf = inquire::Text::new("Enter Batch proof file path: ")
.with_default("batch.json")
.prompt()?
.into();
let cairo_input: PathBuf = inquire::Text::new("Enter Cairo input file path:")
Expand All @@ -351,8 +331,6 @@ pub async fn run_interactive() -> anyhow::Result<()> {
.prompt()?
.into();
let config = hdp_run::HdpRunConfig::init(
rpc_url,
chain_id,
None,
None,
cairo_input,
Expand Down
31 changes: 0 additions & 31 deletions hdp-config.example.toml

This file was deleted.

Loading

0 comments on commit 2cbf18b

Please sign in to comment.