Skip to content

Commit

Permalink
Merge pull request #123 from HerodotusDev/private-input
Browse files Browse the repository at this point in the history
  • Loading branch information
rkdud007 authored Aug 13, 2024
2 parents 97a2774 + b169daf commit ceb33b3
Show file tree
Hide file tree
Showing 45 changed files with 1,176 additions and 5,325 deletions.
5 changes: 3 additions & 2 deletions .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -24,8 +24,8 @@
# Generated by Cargo
Cargo.lock

# Build directory for `cargo build`
build/
# Cairo build artifacts
build/*.json

# Binaries produced by `cargo install`
bin/
Expand Down Expand Up @@ -66,3 +66,4 @@ tools/
# Ignore specific directories and files
hdp-cairo/
*.pie
*.zip
9 changes: 9 additions & 0 deletions Cargo.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

5 changes: 3 additions & 2 deletions Cargo.toml
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
[workspace]
resolver = "2"
members = ["cli", "hdp"]
members = ["cli", "examples/private-input-module", "hdp"]

[workspace.package]
version = "0.4.0"
Expand All @@ -9,7 +9,7 @@ license-file = "LICENSE"
authors = ["Pia <pia@herodotus.dev>"]
repository = "https://github.com/HerodotusDev/hdp"
homepage = "https://herodotus.dev/"
exclude = ["benches/", "tests/", "fixtures/"]
exclude = ["benches/", "fixtures/", "examples/"]
keywords = ["blockchain", "ethereum", "rust", "data-processor", "storage-proof"]
categories = [
"command-line-interface",
Expand All @@ -20,6 +20,7 @@ categories = [

[workspace.dependencies]
hdp = { path = "hdp" }
hdp-cli = { path = "cli" }
tokio = { version = "1", features = ["full"] }
tempfile = "3.10.1"
alloy-merkle-tree = { version = "0.6.0" }
Expand Down
14 changes: 14 additions & 0 deletions build/readme.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,14 @@
### Pedersen Program Hash

```json
{
"HDP_PROGRAM_HASH": "0x7f4e04ae49045719567040fd49a42283c63f50c9241abdadd23e96f7d9bda8c",
"DRY_RUN_PROGRAM_HASH": "0x48ac124e876e38ec61c5cd1543930e8211d17be84fd37e6c65da472f6801529"
}
```

### Solidity Contract

```
0x17e6E8e650e96B0cE39FB389B372E122C68F5a41
```
173 changes: 173 additions & 0 deletions cli/src/cli.rs
Original file line number Diff line number Diff line change
@@ -0,0 +1,173 @@
use std::{env, fs};

use crate::commands::run::RunArgs;
use crate::commands::run_datalake::DataLakeCommands;
use crate::{
commands::{run_datalake::RunDatalakeArgs, run_module::RunModuleArgs, HDPCli, HDPCliCommands},
interactive,
};
use anyhow::Result;
use clap::Parser;
use hdp::primitives::request::{SubmitBatchQuery, Task};
use hdp::{
hdp_run,
preprocessor::module_registry::ModuleRegistry,
primitives::task::{
datalake::{
block_sampled::BlockSampledDatalake, compute::Computation, envelope::DatalakeEnvelope,
transactions::TransactionsInBlockDatalake, DatalakeCompute,
},
TaskEnvelope,
},
};
use tracing::info;
use tracing_subscriber::{EnvFilter, FmtSubscriber};

pub async fn hdp_cli_run() -> anyhow::Result<()> {
let start_run = std::time::Instant::now();
let cli = init_cli()?;
match cli.command {
HDPCliCommands::Start => {
interactive::run_interactive().await?;
}
HDPCliCommands::RunDatalake(args) => {
datalake_entry_run(args).await?;
}
HDPCliCommands::RunModule(args) => {
module_entry_run(args).await?;
}
HDPCliCommands::Run(args) => {
entry_run(args).await?;
}
}
let duration_run = start_run.elapsed();
info!("HDP Cli Finished in: {:?}", duration_run);
Ok(())
}

/// Initialize the CLI
fn init_cli() -> Result<HDPCli> {
dotenv::dotenv().ok();
let rust_log = env::var("RUST_LOG").unwrap_or_else(|_| "info".to_string());
let subscriber = FmtSubscriber::builder()
.with_env_filter(EnvFilter::new(&rust_log))
.finish();
tracing::subscriber::set_global_default(subscriber).expect("Failed to set subscriber");
info!("running on log level: {}", rust_log);
let cli = HDPCli::parse();
Ok(cli)
}

pub async fn module_entry_run(args: RunModuleArgs) -> Result<()> {
let config = hdp_run::HdpRunConfig::init(
args.rpc_url,
args.chain_id,
Some(args.dry_run_cairo_file),
args.sound_run_cairo_file,
args.preprocessor_output_file,
args.save_fetch_keys_file,
args.output_file,
args.cairo_pie_file,
);
let module_registry = ModuleRegistry::new();
let module = module_registry
.get_extended_module_from_class_source_string(
args.program_hash,
args.local_class_path,
args.module_inputs,
)
.await?;
// TODO: for now, we only support one task if its a module
let tasks = vec![TaskEnvelope::Module(module)];

hdp::run(&config, tasks).await?;
Ok(())
}

pub async fn datalake_entry_run(args: RunDatalakeArgs) -> Result<()> {
let config = hdp_run::HdpRunConfig::init(
args.rpc_url,
args.chain_id,
None,
args.sound_run_cairo_file,
args.preprocessor_output_file,
None,
args.output_file,
args.cairo_pie_file,
);
let parsed_datalake = match args.datalake {
DataLakeCommands::BlockSampled {
block_range_start,
block_range_end,
sampled_property,
increment,
} => DatalakeEnvelope::BlockSampled(BlockSampledDatalake::new(
11155111,
block_range_start,
block_range_end,
increment,
sampled_property,
)),
DataLakeCommands::TransactionsInBlock {
target_block,
sampled_property,
start_index,
end_index,
increment,
included_types,
} => DatalakeEnvelope::TransactionsInBlock(TransactionsInBlockDatalake::new(
11155111,
target_block,
sampled_property,
start_index,
end_index,
increment,
included_types,
)),
};
let tasks = vec![TaskEnvelope::DatalakeCompute(DatalakeCompute::new(
parsed_datalake,
Computation::new(args.aggregate_fn_id, args.aggregate_fn_ctx),
))];

hdp::run(&config, tasks).await?;
Ok(())
}

pub async fn entry_run(args: RunArgs) -> Result<()> {
let request_context =
fs::read_to_string(args.request_file).expect("No request file exist in the path");
let parsed: SubmitBatchQuery = serde_json::from_str(&request_context)
.expect("Invalid format of request. Cannot parse it.");
let config = hdp_run::HdpRunConfig::init(
args.rpc_url,
Some(parsed.destination_chain_id),
args.dry_run_cairo_file,
args.sound_run_cairo_file,
args.preprocessor_output_file,
None,
args.output_file,
args.cairo_pie_file,
);
let module_registry = ModuleRegistry::new();
let mut task_envelopes = Vec::new();
for task in parsed.tasks {
match task {
Task::DatalakeCompute(task) => {
task_envelopes.push(TaskEnvelope::DatalakeCompute(task));
}
Task::Module(task) => {
let module = module_registry
.get_extended_module_from_class_source(
Some(task.program_hash),
None,
task.inputs,
)
.await?;
task_envelopes.push(TaskEnvelope::Module(module));
}
}
}
hdp::run(&config, task_envelopes).await?;
Ok(())
}
Loading

0 comments on commit ceb33b3

Please sign in to comment.