diff --git a/.gitignore b/.gitignore index 537b79b0..9f5f379b 100644 --- a/.gitignore +++ b/.gitignore @@ -65,6 +65,4 @@ tools/ # Ignore specific directories and files hdp-cairo/ -*.pie - -*.zip \ No newline at end of file +*.pie \ No newline at end of file diff --git a/cli/src/cli.rs b/cli/src/cli.rs index b9f7ecec..d094b208 100644 --- a/cli/src/cli.rs +++ b/cli/src/cli.rs @@ -23,7 +23,7 @@ use hdp::{ use tracing::info; use tracing_subscriber::{EnvFilter, FmtSubscriber}; -pub async fn run() -> anyhow::Result<()> { +pub async fn hdp_cli_run() -> anyhow::Result<()> { let start_run = std::time::Instant::now(); let cli = init_cli()?; match cli.command { @@ -80,7 +80,7 @@ pub async fn module_entry_run(args: RunModuleArgs) -> Result<()> { // TODO: for now, we only support one task if its a module let tasks = vec![TaskEnvelope::Module(module)]; - hdp_run(&config, tasks).await?; + hdp::run(&config, tasks).await?; Ok(()) } @@ -130,7 +130,7 @@ pub async fn datalake_entry_run(args: RunDatalakeArgs) -> Result<()> { Computation::new(args.aggregate_fn_id, args.aggregate_fn_ctx), ))]; - hdp_run(&config, tasks).await?; + hdp::run(&config, tasks).await?; Ok(()) } @@ -168,6 +168,6 @@ pub async fn entry_run(args: RunArgs) -> Result<()> { } } } - hdp_run(&config, task_envelopes).await?; + hdp::run(&config, task_envelopes).await?; Ok(()) } diff --git a/cli/src/interactive.rs b/cli/src/interactive.rs index a055a9e5..249ac850 100644 --- a/cli/src/interactive.rs +++ b/cli/src/interactive.rs @@ -371,7 +371,7 @@ pub async fn run_interactive() -> anyhow::Result<()> { Some(pie_file), ); - hdp_run(&config, tasks).await? + hdp::run(&config, tasks).await? } Ok(()) } diff --git a/cli/src/main.rs b/cli/src/main.rs index 5b9e2351..f54946ab 100644 --- a/cli/src/main.rs +++ b/cli/src/main.rs @@ -1,4 +1,4 @@ #[tokio::main] async fn main() -> anyhow::Result<()> { - hdp_cli::cli::run().await + hdp_cli::cli::hdp_cli_run().await } diff --git a/examples/private-input-module/src/main.rs b/examples/private-input-module/src/main.rs index f5ee0dfa..b58194ba 100644 --- a/examples/private-input-module/src/main.rs +++ b/examples/private-input-module/src/main.rs @@ -1,5 +1,5 @@ use hdp::{ - hdp_run::{self, HdpRunConfig}, + hdp_run::HdpRunConfig, preprocessor::module_registry::ModuleRegistry, primitives::task::{ module::{ModuleInput, Visibility}, @@ -38,5 +38,5 @@ async fn main() { ..Default::default() }; - hdp_run::hdp_run(&hdp_run_config, tasks).await.unwrap(); + hdp::run(&hdp_run_config, tasks).await.unwrap(); } diff --git a/hdp/src/cairo_runner/dry_run.rs b/hdp/src/cairo_runner/dry_run.rs index 9d56b046..0234ae76 100644 --- a/hdp/src/cairo_runner/dry_run.rs +++ b/hdp/src/cairo_runner/dry_run.rs @@ -72,7 +72,7 @@ impl DryRunner { // parse output to return dry run result let dry_run_result = self.parse_run(&PathBuf::from(DRY_CAIRO_RUN_OUTPUT_FILE))?; - info!("Dry-runner executed successfully"); + info!("dry-runner executed successfully"); Ok(dry_run_result) } diff --git a/hdp/src/cairo_runner/run.rs b/hdp/src/cairo_runner/run.rs index 430451a8..f0ef3e71 100644 --- a/hdp/src/cairo_runner/run.rs +++ b/hdp/src/cairo_runner/run.rs @@ -76,7 +76,7 @@ impl Runner { let output = self._run(input_file_path, pie_file_path)?; let cairo_run_output = self.parse_run(output, &PathBuf::from(SOUND_CAIRO_RUN_OUTPUT_FILE))?; - info!("Cairo run output: {:#?}", cairo_run_output); + info!("cairo run output: {:#?}", cairo_run_output); Ok(RunResult { pie_path: pie_file_path.to_owned(), @@ -93,7 +93,7 @@ impl Runner { let number_of_steps = Regex::new(r"Number of steps: (\d+)").unwrap(); if let Some(number_of_steps_caps) = number_of_steps.captures(&output) { let number_of_steps = number_of_steps_caps[1].parse::()?; - info!("Number of steps: {:#?}", number_of_steps); + info!("number of steps: {:#?}", number_of_steps); let cairo_run_output_from_file = fs::read_to_string(cairo_run_output_path) .expect("Failed to read cairo run output file"); let cairo_run_output: CairoRunOutput = diff --git a/hdp/src/hdp_run.rs b/hdp/src/hdp_run.rs index 9a88be5b..2c119295 100644 --- a/hdp/src/hdp_run.rs +++ b/hdp/src/hdp_run.rs @@ -110,7 +110,7 @@ impl HdpRunConfig { /// - `pre_processor_output_file`: The path to the file where the preprocessor output will be saved. (Optional) /// - `output_file`: The path to the file where the output will be saved. (Optional) /// - `cairo_pie_file`: The path to the file where the cairo pie will be saved. (Optional) -pub async fn hdp_run(hdp_run_config: &HdpRunConfig, tasks: Vec) -> Result<()> { +pub async fn run(hdp_run_config: &HdpRunConfig, tasks: Vec) -> Result<()> { let compiler_config = CompilerConfig { dry_run_program_path: hdp_run_config.dry_run_program_path.clone(), provider_config: hdp_run_config.evm_provider.clone(), @@ -125,13 +125,13 @@ pub async fn hdp_run(hdp_run_config: &HdpRunConfig, tasks: Vec) -> fs::write(&hdp_run_config.pre_processor_output_file, input_string) .map_err(|e| anyhow::anyhow!("Unable to write input file: {}", e))?; info!( - "Finished pre processing the data, saved the input file in {}", + "finished pre processing the data, saved the input file in {}", &hdp_run_config.pre_processor_output_file.display() ); if hdp_run_config.processor_output_file.is_none() && hdp_run_config.cairo_pie_file.is_none() { Ok(()) } else { - info!("Starting processing the data... "); + info!("starting processing the data... "); let output_file_path = &hdp_run_config .processor_output_file .clone() @@ -152,7 +152,7 @@ pub async fn hdp_run(hdp_run_config: &HdpRunConfig, tasks: Vec) -> .map_err(|e| anyhow::anyhow!("Unable to write output file: {}", e))?; info!( - "Finished processing the data, saved the output file in {} and pie file in {}", + "finished processing the data, saved the output file in {} and pie file in {}", output_file_path.display(), pie_file_path.display() ); diff --git a/hdp/src/lib.rs b/hdp/src/lib.rs index 0a1c6307..0c64bec9 100644 --- a/hdp/src/lib.rs +++ b/hdp/src/lib.rs @@ -6,4 +6,4 @@ pub mod primitives; pub mod processor; pub mod provider; -pub use hdp_run::hdp_run; +pub use hdp_run::run; diff --git a/hdp/src/preprocessor/compile/datalake/mod.rs b/hdp/src/preprocessor/compile/datalake/mod.rs index 9433726f..54f343f4 100644 --- a/hdp/src/preprocessor/compile/datalake/mod.rs +++ b/hdp/src/preprocessor/compile/datalake/mod.rs @@ -12,7 +12,7 @@ impl Compilable for DatalakeCompute { &self, compile_config: &CompilerConfig, ) -> Result { - info!("Target task: {:#?}", self); + info!("target task: {:#?}", self); let aggregation_fn = &self.compute.aggregate_fn_id; let fn_context = &self.compute.aggregate_fn_ctx; let provider = EvmProvider::new(compile_config.provider_config.clone()); diff --git a/hdp/src/preprocessor/module_registry.rs b/hdp/src/preprocessor/module_registry.rs index 7dde989b..bc4cdb36 100644 --- a/hdp/src/preprocessor/module_registry.rs +++ b/hdp/src/preprocessor/module_registry.rs @@ -37,6 +37,9 @@ pub enum ModuleRegistryError { #[error("Module class source error: {0}")] ClassSourceError(String), + + #[error("Type conversion error: {0}")] + TypeConversionError(String), } pub struct ModuleRegistry { @@ -63,10 +66,14 @@ impl ModuleRegistry { ) -> Result { let program_hash = program_hash.map(|program_hash| FieldElement::from_hex_be(&program_hash).unwrap()); - let module_inputs = module_inputs + let module_inputs: Result, _> = module_inputs .into_iter() - .map(|input| ModuleInput::from_str(&input).unwrap()) + .map(|input| ModuleInput::from_str(&input)) .collect(); + + let module_inputs = + module_inputs.map_err(|e| ModuleRegistryError::TypeConversionError(e.to_string()))?; + self.get_extended_module_from_class_source(program_hash, local_class_path, module_inputs) .await } @@ -97,7 +104,7 @@ impl ModuleRegistry { let program_hash = casm.compiled_class_hash(); let converted_hash = FieldElement::from_bytes_be(&program_hash.to_bytes_be()).unwrap(); - info!("Program Hash: {:#?}", converted_hash); + info!("program Hash: {:#?}", converted_hash); let module = Module { program_hash: converted_hash, @@ -123,7 +130,7 @@ impl ModuleRegistry { })?)?; info!( - "Contract class fetched successfully from local path: {:?}", + "contract class fetched successfully from local path: {:?}", local_class_path ); Ok(casm) @@ -136,7 +143,7 @@ impl ModuleRegistry { let program_hash_hex = format!("{:#x}", program_hash); info!( - "Fetching contract class from module registry... program_hash: {}", + "fetching contract class from module registry... program_hash: {}", program_hash_hex ); @@ -155,13 +162,13 @@ impl ModuleRegistry { let response_text = response.text().await.expect("cannot get response"); let casm: CasmContractClass = serde_json::from_str(&response_text)?; info!( - "Contract class fetched successfully from program_hash: {:?}", + "contract class fetched successfully from program_hash: {:?}", program_hash ); Ok(casm) } else { Err(ModuleRegistryError::ClassSourceError( - "Failed to fetch contract class".to_string(), + "failed to fetch contract class".to_string(), )) } } diff --git a/hdp/src/provider/evm/from_keys.rs b/hdp/src/provider/evm/from_keys.rs index 3b02daf9..ec634bfc 100644 --- a/hdp/src/provider/evm/from_keys.rs +++ b/hdp/src/provider/evm/from_keys.rs @@ -200,7 +200,7 @@ impl EvmProvider { } let duration = start_fetch.elapsed(); - info!("Time taken (Headers Proofs Fetch): {:?}", duration); + info!("time taken (Headers Proofs Fetch): {:?}", duration); if !mmrs.is_empty() { let vec_mmrs = mmrs.into_iter().collect::>(); @@ -256,7 +256,7 @@ impl EvmProvider { fetched_accounts_proofs.insert(ProcessedAccount::new(address, account_mpt_proofs)); } let duration = start_fetch.elapsed(); - info!("Time taken (Accounts Proofs Fetch): {:?}", duration); + info!("time taken (Accounts Proofs Fetch): {:?}", duration); Ok(fetched_accounts_proofs) } @@ -322,7 +322,7 @@ impl EvmProvider { )); } let duration = start_fetch.elapsed(); - info!("Time taken (Storage Proofs Fetch): {:?}", duration); + info!("time taken (Storage Proofs Fetch): {:?}", duration); Ok((fetched_accounts_proofs, fetched_storage_proofs)) } @@ -372,7 +372,7 @@ impl EvmProvider { } } let duration = start_fetch.elapsed(); - info!("Time taken (Transaction Fetch): {:?}", duration); + info!("time taken (Transaction Fetch): {:?}", duration); Ok(fetched_transactions) } @@ -425,7 +425,7 @@ impl EvmProvider { } } let duration = start_fetch.elapsed(); - info!("Time taken (Transaction Receipts Fetch): {:?}", duration); + info!("time taken (Transaction Receipts Fetch): {:?}", duration); Ok(fetched_transaction_receipts) } } diff --git a/hdp/src/provider/evm/provider.rs b/hdp/src/provider/evm/provider.rs index 045f7d78..e13ee261 100644 --- a/hdp/src/provider/evm/provider.rs +++ b/hdp/src/provider/evm/provider.rs @@ -135,7 +135,7 @@ impl EvmProvider { } let duration = start_fetch.elapsed(); - info!("Time taken (Headers Proofs Fetch): {:?}", duration); + info!("time taken (Headers Proofs Fetch): {:?}", duration); if !mmrs.is_empty() { Ok((mmrs, fetched_headers_proofs_with_blocks_map)) } else { @@ -170,7 +170,7 @@ impl EvmProvider { } let duration = start_fetch.elapsed(); - info!("Time taken (Account Proofs Fetch): {:?}", duration); + info!("time taken (Account Proofs Fetch): {:?}", duration); Ok(fetched_accounts_proofs_with_blocks_map) } @@ -261,7 +261,7 @@ impl EvmProvider { } let duration = start_fetch.elapsed(); - info!("Time taken (Storage Proofs Fetch): {:?}", duration); + info!("time taken (Storage Proofs Fetch): {:?}", duration); Ok(processed_accounts) } @@ -328,7 +328,7 @@ impl EvmProvider { } let duration = start_fetch.elapsed(); - info!("Time taken (Transactions Proofs Fetch): {:?}", duration); + info!("time taken (Transactions Proofs Fetch): {:?}", duration); Ok(fetched_transaction_proofs) } @@ -399,7 +399,7 @@ impl EvmProvider { let duration = start_fetch.elapsed(); info!( - "Time taken (Transaction Receipts Proofs Fetch): {:?}", + "time taken (Transaction Receipts Proofs Fetch): {:?}", duration );