Skip to content

Commit

Permalink
chore(starknet_committer_and_os_cli): moved committer logic into sepa…
Browse files Browse the repository at this point in the history
…rate folder
  • Loading branch information
amosStarkware committed Feb 2, 2025
1 parent 148a9ef commit da0e82a
Show file tree
Hide file tree
Showing 29 changed files with 211 additions and 135 deletions.
6 changes: 3 additions & 3 deletions .github/workflows/committer_ci.yml
Original file line number Diff line number Diff line change
Expand Up @@ -43,7 +43,7 @@ jobs:
with:
credentials_json: ${{ secrets.COMMITER_PRODUCTS_EXT_WRITER_JSON }}
- uses: 'google-github-actions/setup-gcloud@v2'
- run: echo "BENCH_INPUT_FILES_PREFIX=$(cat ./crates/starknet_committer_and_os_cli/src/tests/flow_test_files_prefix)" >> $GITHUB_ENV
- run: echo "BENCH_INPUT_FILES_PREFIX=$(cat ./crates/starknet_committer_and_os_cli/src/committer_cli/tests/flow_test_files_prefix)" >> $GITHUB_ENV
- run: gcloud storage cp -r gs://committer-testing-artifacts/$BENCH_INPUT_FILES_PREFIX/* ./crates/starknet_committer_and_os_cli/test_inputs
- run: cargo test -p starknet_committer_and_os_cli --release -- --include-ignored test_regression

Expand All @@ -65,7 +65,7 @@ jobs:
with:
credentials_json: ${{ secrets.COMMITER_PRODUCTS_EXT_WRITER_JSON }}
- uses: 'google-github-actions/setup-gcloud@v2'
- run: echo "OLD_BENCH_INPUT_FILES_PREFIX=$(cat ./crates/starknet_committer_and_os_cli/src/tests/flow_test_files_prefix)" >> $GITHUB_ENV
- run: echo "OLD_BENCH_INPUT_FILES_PREFIX=$(cat ./crates/starknet_committer_and_os_cli/src/committer_cli/tests/flow_test_files_prefix)" >> $GITHUB_ENV
- run: gcloud storage cp -r gs://committer-testing-artifacts/$OLD_BENCH_INPUT_FILES_PREFIX/* ./crates/starknet_committer_and_os_cli/test_inputs

# List the existing benchmarks.
Expand All @@ -83,7 +83,7 @@ jobs:
- uses: actions/checkout@v4
with:
clean: false
- run: echo "NEW_BENCH_INPUT_FILES_PREFIX=$(cat ./crates/starknet_committer_and_os_cli/src/tests/flow_test_files_prefix)" >> $GITHUB_ENV
- run: echo "NEW_BENCH_INPUT_FILES_PREFIX=$(cat ./crates/starknet_committer_and_os_cli/src/committer_cli/tests/flow_test_files_prefix)" >> $GITHUB_ENV

# Input files didn't change.
- if: env.OLD_BENCH_INPUT_FILES_PREFIX == env.NEW_BENCH_INPUT_FILES_PREFIX
Expand Down
12 changes: 6 additions & 6 deletions crates/starknet_committer_and_os_cli/benches/committer_bench.rs
Original file line number Diff line number Diff line change
Expand Up @@ -2,9 +2,9 @@

// This file is for benchmarking the committer flow.
// The input files for the different benchmarks are downloaded from GCS, using the prefix stored in
// starknet_committer_and_os_cli/src/tests/flow_test_files_prefix. In order to update them, generate
// a new random prefix (the hash of the initial new commit can be used) and update it in the
// mentioned file. Then upload the new files to GCS with this new prefix (run e.g.,
// starknet_committer_and_os_cli/src/committer_cli/tests/flow_test_files_prefix. In order to
// update them, generate a new random prefix (the hash of the initial new commit can be used) and
// update it in the mentioned file. Then upload the new files to GCS with this new prefix (run e.g.,
// gcloud storage cp LOCAL_FILE gs://committer-testing-artifacts/NEW_PREFIX/tree_flow_inputs.json).

use std::collections::HashMap;
Expand All @@ -13,9 +13,9 @@ use criterion::{criterion_group, criterion_main, BatchSize, Criterion};
use starknet_committer::block_committer::input::StarknetStorageValue;
use starknet_committer::hash_function::hash::TreeHashFunctionImpl;
use starknet_committer::patricia_merkle_tree::tree::OriginalSkeletonStorageTrieConfig;
use starknet_committer_and_os_cli::commands::commit;
use starknet_committer_and_os_cli::parse_input::read::parse_input;
use starknet_committer_and_os_cli::tests::utils::parse_from_python::TreeFlowInput;
use starknet_committer_and_os_cli::committer_cli::commands::commit;
use starknet_committer_and_os_cli::committer_cli::parse_input::read::parse_input;
use starknet_committer_and_os_cli::committer_cli::tests::utils::parse_from_python::TreeFlowInput;
use starknet_patricia::patricia_merkle_tree::external_test_utils::tree_computation_flow;
use starknet_patricia::patricia_merkle_tree::node_data::leaf::LeafModifications;
use starknet_patricia::patricia_merkle_tree::types::NodeIndex;
Expand Down
6 changes: 6 additions & 0 deletions crates/starknet_committer_and_os_cli/src/committer_cli.rs
Original file line number Diff line number Diff line change
@@ -0,0 +1,6 @@
pub mod block_hash;
pub mod commands;
pub mod filled_tree_output;
pub mod parse_input;
pub mod run_committer_cli;
pub mod tests;
Original file line number Diff line number Diff line change
Expand Up @@ -5,10 +5,10 @@ use tracing::level_filters::LevelFilter;
use tracing_subscriber::reload::Handle;
use tracing_subscriber::Registry;

use crate::filled_tree_output::filled_forest::SerializedForest;
use crate::parse_input::cast::InputImpl;
use crate::parse_input::raw_input::RawInput;
use crate::parse_input::read::{load_input, write_to_file};
use crate::committer_cli::filled_tree_output::filled_forest::SerializedForest;
use crate::committer_cli::parse_input::cast::InputImpl;
use crate::committer_cli::parse_input::raw_input::RawInput;
use crate::shared_utils::read::{load_input, write_to_file};

pub async fn parse_and_commit(
input_path: String,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,7 @@ use starknet_patricia::hash::hash_trait::HashOutput;
use starknet_patricia::storage::errors::DeserializationError;
use starknet_patricia::storage::storage_trait::{StorageKey, StorageValue};

use crate::parse_input::raw_input::RawInput;
use crate::committer_cli::parse_input::raw_input::RawInput;

pub type InputImpl = Input<ConfigImpl>;

Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,14 @@
use starknet_patricia::storage::errors::DeserializationError;

use crate::committer_cli::parse_input::cast::InputImpl;
use crate::committer_cli::parse_input::raw_input::RawInput;

#[cfg(test)]
#[path = "read_test.rs"]
pub mod read_test;

type DeserializationResult<T> = Result<T, DeserializationError>;

pub fn parse_input(input: &str) -> DeserializationResult<InputImpl> {
serde_json::from_str::<RawInput>(input)?.try_into()
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,99 @@
use clap::{Parser, Subcommand};
use starknet_api::block_hash::block_hash_calculator::{
calculate_block_commitments,
calculate_block_hash,
};
use tracing::info;
use tracing::level_filters::LevelFilter;
use tracing_subscriber::reload::Handle;
use tracing_subscriber::Registry;

use crate::committer_cli::block_hash::{BlockCommitmentsInput, BlockHashInput};
use crate::committer_cli::commands::parse_and_commit;
use crate::committer_cli::tests::python_tests::PythonTest;
use crate::shared_utils::read::{load_input, read_input, write_to_file};
use crate::shared_utils::types::IoArgs;

#[derive(Parser, Debug)]
pub struct CommitterCliCommand {
#[clap(subcommand)]
command: Command,
}

#[derive(Debug, Subcommand)]
enum Command {
/// Calculates the block hash.
BlockHash {
#[clap(flatten)]
io_args: IoArgs,
},
/// Calculates commitments needed for the block hash.
BlockHashCommitments {
#[clap(flatten)]
io_args: IoArgs,
},
/// Given previous state tree skeleton and a state diff, computes the new commitment.
Commit {
#[clap(flatten)]
io_args: IoArgs,
},
PythonTest {
#[clap(flatten)]
io_args: IoArgs,

/// Test name.
#[clap(long)]
test_name: String,
},
}

pub async fn run_committer_cli(
committer_command: CommitterCliCommand,
log_filter_handle: Handle<LevelFilter, Registry>,
) {
info!("Starting committer-cli with command: \n{:?}", committer_command);
match committer_command.command {
Command::Commit { io_args: IoArgs { input_path, output_path } } => {
parse_and_commit(input_path, output_path, log_filter_handle).await;
}

Command::PythonTest { io_args: IoArgs { input_path, output_path }, test_name } => {
// Create PythonTest from test_name.
let test = PythonTest::try_from(test_name)
.unwrap_or_else(|error| panic!("Failed to create PythonTest: {}", error));
let input = read_input(input_path);

// Run relevant test.
let output = test
.run(Some(&input))
.await
.unwrap_or_else(|error| panic!("Failed to run test: {}", error));

// Write test's output.
write_to_file(&output_path, &output);
}

Command::BlockHash { io_args: IoArgs { input_path, output_path } } => {
let block_hash_input: BlockHashInput = load_input(input_path);
info!("Successfully loaded block hash input.");
let block_hash =
calculate_block_hash(block_hash_input.header, block_hash_input.block_commitments)
.unwrap_or_else(|error| panic!("Failed to calculate block hash: {}", error));
write_to_file(&output_path, &block_hash);
info!("Successfully computed block hash {:?}.", block_hash);
}

Command::BlockHashCommitments { io_args: IoArgs { input_path, output_path } } => {
let commitments_input: BlockCommitmentsInput = load_input(input_path);
info!("Successfully loaded block hash commitment input.");
let commitments = calculate_block_commitments(
&commitments_input.transactions_data,
&commitments_input.state_diff,
commitments_input.l1_da_mode,
&commitments_input.starknet_version,
);
write_to_file(&output_path, &commitments);
info!("Successfully computed block hash commitment: \n{:?}", commitments);
}
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -42,11 +42,11 @@ use tracing::{debug, error, info, warn};

use super::utils::objects::{get_thin_state_diff, get_transaction_output_for_hash, get_tx_data};
use super::utils::parse_from_python::TreeFlowInput;
use crate::filled_tree_output::filled_forest::SerializedForest;
use crate::parse_input::cast::InputImpl;
use crate::parse_input::read::parse_input;
use crate::tests::utils::parse_from_python::parse_input_single_storage_tree_flow_test;
use crate::tests::utils::random_structs::DummyRandomValue;
use crate::committer_cli::filled_tree_output::filled_forest::SerializedForest;
use crate::committer_cli::parse_input::cast::InputImpl;
use crate::committer_cli::parse_input::read::parse_input;
use crate::committer_cli::tests::utils::parse_from_python::parse_input_single_storage_tree_flow_test;
use crate::committer_cli::tests::utils::random_structs::DummyRandomValue;

// Enum representing different Python tests.
pub enum PythonTest {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -11,17 +11,17 @@ use starknet_patricia::patricia_merkle_tree::external_test_utils::single_tree_fl
use tempfile::NamedTempFile;

use super::utils::parse_from_python::parse_input_single_storage_tree_flow_test;
use crate::commands::commit;
use crate::parse_input::read::parse_input;
use crate::tests::utils::parse_from_python::TreeFlowInput;
use crate::committer_cli::commands::commit;
use crate::committer_cli::parse_input::read::parse_input;
use crate::committer_cli::tests::utils::parse_from_python::TreeFlowInput;

// TODO(Aner, 20/06/2024): these tests needs to be fixed to be run correctly in the CI:
// 1. Fix the test to measure cpu_time and not wall_time.
// 2. Fix the max time threshold to be the expected time for the benchmark test.
const MAX_TIME_FOR_SINGLE_TREE_BECHMARK_TEST: f64 = 5.0;
const MAX_TIME_FOR_COMMITTER_FLOW_BECHMARK_TEST: f64 = 5.0;
const SINGLE_TREE_FLOW_INPUT: &str = include_str!("../../test_inputs/tree_flow_inputs.json");
const FLOW_TEST_INPUT: &str = include_str!("../../test_inputs/committer_flow_inputs.json");
const SINGLE_TREE_FLOW_INPUT: &str = include_str!("../../../test_inputs/tree_flow_inputs.json");
const FLOW_TEST_INPUT: &str = include_str!("../../../test_inputs/committer_flow_inputs.json");
const OUTPUT_PATH: &str = "benchmark_output.txt";
const EXPECTED_NUMBER_OF_FILES: usize = 100;

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -10,8 +10,8 @@ use starknet_patricia::patricia_merkle_tree::types::NodeIndex;
use starknet_patricia::storage::map_storage::MapStorage;
use starknet_patricia::storage::storage_trait::{StorageKey, StorageValue};

use crate::parse_input::cast::add_unique;
use crate::parse_input::raw_input::RawStorageEntry;
use crate::committer_cli::parse_input::cast::add_unique;
use crate::committer_cli::parse_input::raw_input::RawStorageEntry;

pub struct TreeFlowInput {
pub leaf_modifications: LeafModifications<StarknetStorageValue>,
Expand Down
8 changes: 3 additions & 5 deletions crates/starknet_committer_and_os_cli/src/lib.rs
Original file line number Diff line number Diff line change
@@ -1,6 +1,4 @@
pub mod block_hash;
pub mod commands;
pub mod filled_tree_output;
pub mod parse_input;
pub mod tests;
pub mod committer_cli;
pub mod os_cli;
pub mod shared_utils;
pub mod tracing_utils;
109 changes: 20 additions & 89 deletions crates/starknet_committer_and_os_cli/src/main.rs
Original file line number Diff line number Diff line change
@@ -1,119 +1,50 @@
use clap::{Args, Parser, Subcommand};
use starknet_api::block_hash::block_hash_calculator::{
calculate_block_commitments,
calculate_block_hash,
use starknet_committer_and_os_cli::committer_cli::run_committer_cli::{
run_committer_cli,
CommitterCliCommand,
};
use starknet_committer_and_os_cli::block_hash::{BlockCommitmentsInput, BlockHashInput};
use starknet_committer_and_os_cli::commands::parse_and_commit;
use starknet_committer_and_os_cli::parse_input::read::{load_input, read_input, write_to_file};
use starknet_committer_and_os_cli::tests::python_tests::PythonTest;
use starknet_committer_and_os_cli::os_cli::run_os_cli::{run_os_cli, OsCliCommand};
use starknet_committer_and_os_cli::tracing_utils::configure_tracing;
use tracing::info;

/// Committer CLI.
/// Committer and OS CLI.
#[derive(Debug, Parser)]
#[clap(name = "committer-cli", version)]
pub struct CommitterCliArgs {
#[clap(name = "committer-and-os-cli", version)]
struct CliArgs {
#[clap(flatten)]
global_options: GlobalOptions,

#[clap(subcommand)]
command: Command,
}

#[derive(Debug, Args)]
pub struct IoArgs {
/// File path to input.
#[clap(long, short = 'i')]
input_path: String,

/// File path to output.
#[clap(long, short = 'o', default_value = "stdout")]
output_path: String,
command: CommitterOrOsCommand,
}

#[derive(Debug, Subcommand)]
enum Command {
/// Calculates the block hash.
BlockHash {
#[clap(flatten)]
io_args: IoArgs,
},
/// Calculates commitments needed for the block hash.
BlockHashCommitments {
#[clap(flatten)]
io_args: IoArgs,
},
/// Given previous state tree skeleton and a state diff, computes the new commitment.
Commit {
#[clap(flatten)]
io_args: IoArgs,
},
PythonTest {
#[clap(flatten)]
io_args: IoArgs,

/// Test name.
#[clap(long)]
test_name: String,
},
enum CommitterOrOsCommand {
/// Run Committer CLI.
Committer(CommitterCliCommand),
/// Run OS CLI.
OS(OsCliCommand),
}

#[derive(Debug, Args)]
struct GlobalOptions {}

#[tokio::main]
/// Main entry point of the committer CLI.
/// Main entry point of the committer & OS CLI.
async fn main() {
// Initialize the logger. The log_filter_handle is used to change the log level. The
// default log level is INFO.
let log_filter_handle = configure_tracing();

let args = CommitterCliArgs::parse();
info!("Starting committer-cli with args: \n{:?}", args);
let args = CliArgs::parse();
info!("Starting committer & OS cli with args: \n{:?}", args);

match args.command {
Command::Commit { io_args: IoArgs { input_path, output_path } } => {
parse_and_commit(input_path, output_path, log_filter_handle).await;
}

Command::PythonTest { io_args: IoArgs { input_path, output_path }, test_name } => {
// Create PythonTest from test_name.
let test = PythonTest::try_from(test_name)
.unwrap_or_else(|error| panic!("Failed to create PythonTest: {}", error));
let input = read_input(input_path);

// Run relevant test.
let output = test
.run(Some(&input))
.await
.unwrap_or_else(|error| panic!("Failed to run test: {}", error));

// Write test's output.
write_to_file(&output_path, &output);
CommitterOrOsCommand::Committer(command) => {
run_committer_cli(command, log_filter_handle).await;
}

Command::BlockHash { io_args: IoArgs { input_path, output_path } } => {
let block_hash_input: BlockHashInput = load_input(input_path);
info!("Successfully loaded block hash input.");
let block_hash =
calculate_block_hash(block_hash_input.header, block_hash_input.block_commitments)
.unwrap_or_else(|error| panic!("Failed to calculate block hash: {}", error));
write_to_file(&output_path, &block_hash);
info!("Successfully computed block hash {:?}.", block_hash);
}

Command::BlockHashCommitments { io_args: IoArgs { input_path, output_path } } => {
let commitments_input: BlockCommitmentsInput = load_input(input_path);
info!("Successfully loaded block hash commitment input.");
let commitments = calculate_block_commitments(
&commitments_input.transactions_data,
&commitments_input.state_diff,
commitments_input.l1_da_mode,
&commitments_input.starknet_version,
);
write_to_file(&output_path, &commitments);
info!("Successfully computed block hash commitment: \n{:?}", commitments);
CommitterOrOsCommand::OS(command) => {
run_os_cli(command, log_filter_handle).await;
}
}
}
Loading

0 comments on commit da0e82a

Please sign in to comment.