diff --git a/Cargo.lock b/Cargo.lock index 60048fc1..2ced3dc0 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1914,8 +1914,8 @@ dependencies = [ [[package]] name = "evm_arithmetization" -version = "0.1.3" -source = "git+https://github.com/fractal-zkp/zk_evm.git?branch=feat/partial_trie_builder#d29b17148194782e900473460a1ac16315a29448" +version = "0.2.0" +source = "git+https://github.com/0xPolygonZero/zk_evm.git?tag=v0.4.0#46eb449a5a97438ade3f22e2555d7f266b54b290" dependencies = [ "anyhow", "bytes", @@ -2947,8 +2947,8 @@ checksum = "c9be0862c1b3f26a88803c4a49de6889c10e608b3ee9344e6ef5b45fb37ad3d1" [[package]] name = "mpt_trie" -version = "0.2.1" -source = "git+https://github.com/fractal-zkp/zk_evm.git?branch=feat/partial_trie_builder#d29b17148194782e900473460a1ac16315a29448" +version = "0.3.0" +source = "git+https://github.com/0xPolygonZero/zk_evm.git?tag=v0.4.0#46eb449a5a97438ade3f22e2555d7f266b54b290" dependencies = [ "bytes", "enum-as-inner", @@ -3690,8 +3690,8 @@ dependencies = [ [[package]] name = "proof_gen" -version = "0.1.3" -source = "git+https://github.com/fractal-zkp/zk_evm.git?branch=feat/partial_trie_builder#d29b17148194782e900473460a1ac16315a29448" +version = "0.2.0" +source = "git+https://github.com/0xPolygonZero/zk_evm.git?tag=v0.4.0#46eb449a5a97438ade3f22e2555d7f266b54b290" dependencies = [ "ethereum-types", "evm_arithmetization", @@ -4927,8 +4927,8 @@ checksum = "b6bc1c9ce2b5135ac7f93c72918fc37feb872bdc6a5533a8b85eb4b86bfdae52" [[package]] name = "trace_decoder" -version = "0.3.1" -source = "git+https://github.com/fractal-zkp/zk_evm.git?branch=feat/partial_trie_builder#d29b17148194782e900473460a1ac16315a29448" +version = "0.4.0" +source = "git+https://github.com/0xPolygonZero/zk_evm.git?tag=v0.4.0#46eb449a5a97438ade3f22e2555d7f266b54b290" dependencies = [ "bytes", "ciborium", diff --git a/Cargo.toml b/Cargo.toml index a776f6a1..2dd5caf4 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -32,10 +32,10 @@ alloy = { git = "https://github.com/alloy-rs/alloy", features = [ # zk-evm dependencies plonky2 = "0.2.2" -evm_arithmetization = { git = "https://github.com/fractal-zkp/zk_evm.git", branch = "feat/partial_trie_builder" } -mpt_trie = { git = "https://github.com/fractal-zkp/zk_evm.git", branch = "feat/partial_trie_builder" } -trace_decoder = { git = "https://github.com/fractal-zkp/zk_evm.git", branch = "feat/partial_trie_builder" } -proof_gen = { git = "https://github.com/fractal-zkp/zk_evm.git", branch = "feat/partial_trie_builder" } +evm_arithmetization = { git = "https://github.com/0xPolygonZero/zk_evm.git", tag = "v0.4.0" } +mpt_trie = { git = "https://github.com/0xPolygonZero/zk_evm.git", tag = "v0.4.0" } +trace_decoder = { git = "https://github.com/0xPolygonZero/zk_evm.git", tag = "v0.4.0" } +proof_gen = { git = "https://github.com/0xPolygonZero/zk_evm.git", tag = "v0.4.0" } [workspace.package] edition = "2021" diff --git a/leader/src/cli.rs b/leader/src/cli.rs index bc318fac..48b2e537 100644 --- a/leader/src/cli.rs +++ b/leader/src/cli.rs @@ -19,7 +19,7 @@ pub(crate) struct Cli { pub(crate) prover_state_config: CliProverStateConfig, } -#[derive(Subcommand)] +#[derive(Subcommand, Clone)] pub(crate) enum Command { /// Reads input from stdin and writes output to stdout. Stdio { @@ -35,22 +35,35 @@ pub(crate) enum Command { // The Jerigon RPC URL. #[arg(long, short = 'u', value_hint = ValueHint::Url)] rpc_url: Url, - /// The block number for which to generate a proof. - #[arg(short, long)] - block_number: u64, + /// The block interval for which to generate a proof. + #[arg(long, short = 'i')] + block_interval: String, /// The checkpoint block number. #[arg(short, long, default_value_t = 0)] checkpoint_block_number: u64, /// The previous proof output. #[arg(long, short = 'f', value_hint = ValueHint::FilePath)] previous_proof: Option, - /// If provided, write the generated proof to this file instead of + /// If provided, write the generated proofs to this directory instead of /// stdout. #[arg(long, short = 'o', value_hint = ValueHint::FilePath)] - proof_output_path: Option, + proof_output_dir: Option, /// If true, save the public inputs to disk on error. #[arg(short, long, default_value_t = false)] save_inputs_on_error: bool, + /// Network block time in milliseconds. This value is used + /// to determine the blockchain node polling interval. + #[arg(short, long, env = "ZERO_BIN_BLOCK_TIME", default_value_t = 2000)] + block_time: u64, + /// Keep intermediate proofs. Default action is to + /// delete them after the final proof is generated. + #[arg( + short, + long, + env = "ZERO_BIN_KEEP_INTERMEDIATE_PROOFS", + default_value_t = false + )] + keep_intermediate_proofs: bool, /// Backoff in milliseconds for request retries #[arg(long, default_value_t = 0)] backoff: u64, diff --git a/leader/src/client.rs b/leader/src/client.rs index ff128c48..1941e5db 100644 --- a/leader/src/client.rs +++ b/leader/src/client.rs @@ -1,70 +1,118 @@ -use std::{ - fs::{create_dir_all, File}, - io::Write, - path::PathBuf, - sync::Arc, -}; +use std::io::Write; +use std::path::PathBuf; use alloy::transports::http::reqwest::Url; +use anyhow::Result; +use common::block_interval::BlockInterval; +use common::fs::generate_block_proof_file_name; use paladin::runtime::Runtime; -use proof_gen::types::PlonkyProofIntern; -use rpc::retry::build_http_retry_provider; +use proof_gen::proof_types::GeneratedBlockProof; +use rpc::{retry::build_http_retry_provider, RpcType}; +use tracing::{error, info, warn}; -/// The main function for the jerigon mode. -#[allow(clippy::too_many_arguments)] -pub(crate) async fn rpc_main( - rpc_type: &str, - rpc_url: Url, +#[derive(Debug)] +pub struct RpcParams { + pub rpc_url: Url, + pub rpc_type: RpcType, + pub backoff: u64, + pub max_retries: u32, +} + +#[derive(Debug, Default)] +pub struct ProofParams { + pub checkpoint_block_number: u64, + pub previous_proof: Option, + pub proof_output_dir: Option, + pub save_inputs_on_error: bool, + pub keep_intermediate_proofs: bool, +} + +/// The main function for the client. +pub(crate) async fn client_main( runtime: Runtime, - block_number: u64, - checkpoint_block_number: u64, - previous: Option, - proof_output_path_opt: Option, - save_inputs_on_error: bool, - backoff: u64, - max_retries: u32, -) -> anyhow::Result<()> { - let prover_input = match rpc_type { - "jerigon" => { - rpc::jerigon::prover_input( - build_http_retry_provider(rpc_url, backoff, max_retries), - block_number.into(), - checkpoint_block_number.into(), - ) - .await? - } - "native" => { - rpc::native::prover_input( - Arc::new(build_http_retry_provider(rpc_url, backoff, max_retries)), - block_number.into(), - checkpoint_block_number.into(), - ) - .await? - } - _ => unreachable!(), - }; + rpc_params: RpcParams, + block_interval: BlockInterval, + mut params: ProofParams, +) -> Result<()> { + let prover_input = rpc::prover_input( + build_http_retry_provider( + rpc_params.rpc_url, + rpc_params.backoff, + rpc_params.max_retries, + ), + block_interval, + params.checkpoint_block_number.into(), + rpc_params.rpc_type, + ) + .await?; + + if cfg!(feature = "test_only") { + info!("All proof witnesses have been generated successfully."); + } else { + info!("All proofs have been generated successfully."); + } - let proof = prover_input - .prove(&runtime, previous, save_inputs_on_error) + // If `keep_intermediate_proofs` is not set we only keep the last block + // proof from the interval. It contains all the necessary information to + // verify the whole sequence. + let proved_blocks = prover_input + .prove( + &runtime, + params.previous_proof.take(), + params.save_inputs_on_error, + params.proof_output_dir.clone(), + ) .await; runtime.close().await?; + let proved_blocks = proved_blocks?; - let proof = serde_json::to_vec(&proof?.intern)?; - write_proof(proof, proof_output_path_opt) -} - -fn write_proof(proof: Vec, proof_output_path_opt: Option) -> anyhow::Result<()> { - match proof_output_path_opt { - Some(p) => { - if let Some(parent) = p.parent() { - create_dir_all(parent)?; - } - - let mut f = File::create(p)?; - f.write_all(&proof)?; + if params.keep_intermediate_proofs { + if params.proof_output_dir.is_some() { + // All proof files (including intermediary) are written to disk and kept + warn!("Skipping cleanup, intermediate proof files are kept"); + } else { + // Output all proofs to stdout + std::io::stdout().write_all(&serde_json::to_vec( + &proved_blocks + .into_iter() + .filter_map(|(_, block)| block) + .collect::>(), + )?)?; + } + } else if let Some(proof_output_dir) = params.proof_output_dir.as_ref() { + // Remove intermediary proof files + proved_blocks + .into_iter() + .rev() + .skip(1) + .map(|(block_number, _)| { + generate_block_proof_file_name(&proof_output_dir.to_str(), block_number) + }) + .for_each(|path| { + if let Err(e) = std::fs::remove_file(path) { + error!("Failed to remove intermediate proof file: {e}"); + } + }); + } else { + // Output only last proof to stdout + if let Some(last_block) = proved_blocks + .into_iter() + .filter_map(|(_, block)| block) + .last() + { + std::io::stdout().write_all(&serde_json::to_vec(&last_block)?)?; } - None => std::io::stdout().write_all(&proof)?, } Ok(()) } + +impl From for RpcType { + fn from(command: super::cli::Command) -> Self { + match command { + super::cli::Command::Native { .. } => RpcType::Native, + super::cli::Command::Jerigon { .. } => RpcType::Jerigon, + _ => panic!("Unsupported command type"), + } + } +} diff --git a/leader/src/main.rs b/leader/src/main.rs index e3fd850f..dbe25d6e 100644 --- a/leader/src/main.rs +++ b/leader/src/main.rs @@ -4,6 +4,7 @@ use std::{fs::File, path::PathBuf}; use anyhow::Result; use clap::Parser; use cli::Command; +use client::RpcParams; use common::block_interval::BlockInterval; use dotenvy::dotenv; use ops::register; @@ -11,7 +12,7 @@ use paladin::runtime::Runtime; use proof_gen::proof_types::GeneratedBlockProof; use tracing::info; -use crate::jerigon::{jerigon_main, ProofParams}; +use crate::client::{client_main, ProofParams}; use crate::utils::get_package_version; mod cli; @@ -62,7 +63,7 @@ async fn main() -> Result<()> { let runtime = Runtime::from_config(&args.paladin, register()).await?; - match args.command { + match args.command.clone() { Command::Stdio { previous_proof, save_inputs_on_error, @@ -97,30 +98,16 @@ async fn main() -> Result<()> { keep_intermediate_proofs, backoff, max_retries, - } => { - let previous_proof = get_previous_proof(previous_proof)?; - - client::rpc_main( - "jerigon", - rpc_url, - runtime, - block_number, - checkpoint_block_number, - previous_proof, - proof_output_path, - save_inputs_on_error, - backoff, - max_retries, - ) - .await?; } - Command::Native { + | Command::Native { rpc_url, - block_number, + block_interval, checkpoint_block_number, previous_proof, - proof_output_path, + proof_output_dir, save_inputs_on_error, + block_time, + keep_intermediate_proofs, backoff, max_retries, } => { @@ -136,8 +123,14 @@ async fn main() -> Result<()> { } info!("Proving interval {block_interval}"); - jerigon_main( + client_main( runtime, + RpcParams { + rpc_url, + rpc_type: args.command.into(), + backoff, + max_retries, + }, block_interval, ProofParams { checkpoint_block_number, @@ -146,8 +139,6 @@ async fn main() -> Result<()> { save_inputs_on_error, keep_intermediate_proofs, }, - backoff, - max_retries, ) .await?; } diff --git a/rpc/src/jerigon.rs b/rpc/src/jerigon.rs index 67998cb2..a2fc380c 100644 --- a/rpc/src/jerigon.rs +++ b/rpc/src/jerigon.rs @@ -1,58 +1,100 @@ -use alloy::{providers::Provider, rpc::types::eth::BlockId, transports::Transport}; +use alloy::{ + primitives::B256, + providers::Provider, + rpc::types::eth::{BlockId, BlockNumberOrTag, BlockTransactionsKind}, + transports::Transport, +}; use anyhow::Context as _; -use itertools::{Either, Itertools as _}; +use common::block_interval::BlockInterval; +use futures::StreamExt as _; +use prover::BlockProverInput; use prover::ProverInput; use serde::Deserialize; use serde_json::json; -use trace_decoder::trace_protocol::{BlockTrace, BlockTraceTriePreImages, TxnInfo}; +use trace_decoder::trace_protocol::{ + BlockTrace, BlockTraceTriePreImages, CombinedPreImages, TrieCompact, TxnInfo, +}; use super::fetch_other_block_data; -#[derive(Deserialize, Debug)] -#[serde(rename_all = "snake_case")] -#[allow(clippy::large_enum_variant)] -enum ZeroTrace { - Result(TxnInfo), - BlockWitness(BlockTraceTriePreImages), +/// Transaction traces retrieved from Erigon zeroTracer. +#[derive(Debug, Deserialize)] +pub struct ZeroTxResult { + #[serde(rename(deserialize = "txHash"))] + pub tx_hash: alloy::primitives::TxHash, + pub result: TxnInfo, } -/// Fetches the prover input for the given BlockId. -pub async fn prover_input( +/// Block witness retrieved from Erigon zeroTracer. +#[derive(Debug, Deserialize)] +pub struct ZeroBlockWitness(TrieCompact); + +pub async fn block_prover_input( provider: ProviderT, target_block_id: BlockId, - checkpoint_block_id: BlockId, -) -> anyhow::Result + checkpoint_state_trie_root: B256, +) -> anyhow::Result where ProviderT: Provider, TransportT: Transport + Clone, { // Grab trace information - ///////////////////////// - let traces = provider - .raw_request::<_, Vec>( + let tx_results = provider + .raw_request::<_, Vec>( "debug_traceBlockByNumber".into(), (target_block_id, json!({"tracer": "zeroTracer"})), ) .await?; - let (txn_info, mut pre_images) = - traces - .into_iter() - .partition_map::, Vec<_>, _, _, _>(|it| match it { - ZeroTrace::Result(it) => Either::Left(it), - ZeroTrace::BlockWitness(it) => Either::Right(it), - }); + // Grab block witness info (packed as combined trie pre-images) + let block_witness = provider + .raw_request::<_, ZeroBlockWitness>("eth_getWitness".into(), vec![target_block_id]) + .await?; - let other_data = fetch_other_block_data(provider, target_block_id, checkpoint_block_id).await?; + let other_data = + fetch_other_block_data(provider, target_block_id, checkpoint_state_trie_root).await?; // Assemble - /////////// - Ok(ProverInput { + Ok(BlockProverInput { block_trace: BlockTrace { - trie_pre_images: pre_images.pop().context("trace had no BlockWitness")?, - code_db: None, - txn_info, + trie_pre_images: BlockTraceTriePreImages::Combined(CombinedPreImages { + compact: block_witness.0, + }), + txn_info: tx_results.into_iter().map(|it| it.result).collect(), + code_db: Default::default(), }, other_data, }) } + +/// Obtain the prover input for a given block interval +pub async fn prover_input( + provider: ProviderT, + block_interval: BlockInterval, + checkpoint_block_id: BlockId, +) -> anyhow::Result +where + ProviderT: Provider, + TransportT: Transport + Clone, +{ + // Grab interval checkpoint block state trie + let checkpoint_state_trie_root = provider + .get_block(checkpoint_block_id, BlockTransactionsKind::Hashes) + .await? + .context("block does not exist")? + .header + .state_root; + + let mut block_proofs = Vec::new(); + let mut block_interval = block_interval.into_bounded_stream()?; + + while let Some(block_num) = block_interval.next().await { + let block_id = BlockId::Number(BlockNumberOrTag::Number(block_num)); + let block_prover_input = + block_prover_input(&provider, block_id, checkpoint_state_trie_root).await?; + block_proofs.push(block_prover_input); + } + Ok(ProverInput { + blocks: block_proofs, + }) +} diff --git a/rpc/src/lib.rs b/rpc/src/lib.rs index 9fbbe122..b67e36bf 100644 --- a/rpc/src/lib.rs +++ b/rpc/src/lib.rs @@ -1,85 +1,83 @@ -use alloy::primitives::B256; -use alloy::rpc::types::eth::BlockNumberOrTag; use alloy::{ + primitives::B256, providers::Provider, - rpc::types::eth::{Block, BlockId, Withdrawal}, + rpc::types::eth::{BlockId, BlockNumberOrTag, BlockTransactionsKind, Withdrawal}, transports::Transport, }; use anyhow::Context as _; +use clap::ValueEnum; use common::block_interval::BlockInterval; use evm_arithmetization::proof::{BlockHashes, BlockMetadata}; use futures::{StreamExt as _, TryStreamExt as _}; -use prover::{BlockProverInput, ProverInput}; -use serde::Deserialize; -use serde_json::json; -use trace_decoder::{ - trace_protocol::{ - BlockTrace, BlockTraceTriePreImages, CombinedPreImages, TrieCompact, TxnInfo, - }, - types::{BlockLevelData, OtherBlockData}, -}; +use prover::ProverInput; +use trace_decoder::types::{BlockLevelData, OtherBlockData}; -/// Transaction traces retrieved from Erigon zeroTracer. -#[derive(Debug, Deserialize)] -pub struct ZeroTxResult { - #[serde(rename(deserialize = "txHash"))] - pub tx_hash: alloy::primitives::TxHash, - pub result: TxnInfo, -} +mod compat; +pub mod jerigon; +pub mod native; +pub mod retry; -/// Block witness retrieved from Erigon zeroTracer. -#[derive(Debug, Deserialize)] -pub struct ZeroBlockWitness(TrieCompact); +use compat::Compat; -/// When [fetching a block over RPC](https://ethereum.org/en/developers/docs/apis/json-rpc/#eth_getblockbynumber), -/// we can choose the transaction format, between: -/// - Full JSON. -/// - Just the hash. -/// -/// We only need the latter. -const BLOCK_WITH_FULL_TRANSACTIONS: bool = false; +/// The RPC type. +#[derive(ValueEnum, Clone, Debug)] +pub enum RpcType { + Jerigon, + Native, +} -/// Retrieve block information from the provider -pub async fn get_block( - provider: &mut ProviderT, - target_block_id: BlockId, - full_transaction_data: bool, -) -> anyhow::Result +/// Obtain the prover input for a given block interval +pub async fn prover_input( + provider: ProviderT, + block_interval: BlockInterval, + checkpoint_block_id: BlockId, + rpc_type: RpcType, +) -> anyhow::Result where ProviderT: Provider, TransportT: Transport + Clone, { - provider - .get_block(target_block_id, full_transaction_data) + // Grab interval checkpoint block state trie + let checkpoint_state_trie_root = provider + .get_block(checkpoint_block_id, BlockTransactionsKind::Hashes) .await? - .context("block does not exist") + .context("block does not exist")? + .header + .state_root; + + let mut block_proofs = Vec::new(); + let mut block_interval = block_interval.into_bounded_stream()?; + + while let Some(block_num) = block_interval.next().await { + let block_id = BlockId::Number(BlockNumberOrTag::Number(block_num)); + let block_prover_input = match rpc_type { + RpcType::Jerigon => { + jerigon::block_prover_input(&provider, block_id, checkpoint_state_trie_root).await? + } + RpcType::Native => { + native::block_prover_input(&provider, block_id, checkpoint_state_trie_root).await? + } + }; + + block_proofs.push(block_prover_input); + } + Ok(ProverInput { + blocks: block_proofs, + }) } -pub async fn block_prover_input( +/// Fetches other block data +async fn fetch_other_block_data( provider: ProviderT, target_block_id: BlockId, checkpoint_state_trie_root: B256, -) -> anyhow::Result +) -> anyhow::Result where ProviderT: Provider, TransportT: Transport + Clone, { - // Grab trace information - let tx_results = provider - .raw_request::<_, Vec>( - "debug_traceBlockByNumber".into(), - (target_block_id, json!({"tracer": "zeroTracer"})), - ) - .await?; - - // Grab block witness info (packed as combined trie pre-images) - let block_witness = provider - .raw_request::<_, ZeroBlockWitness>("eth_getWitness".into(), vec![target_block_id]) - .await?; - - // Grab block info let target_block = provider - .get_block(target_block_id, BLOCK_WITH_FULL_TRANSACTIONS) + .get_block(target_block_id, BlockTransactionsKind::Hashes) .await? .context("target block does not exist")?; let target_block_number = target_block @@ -101,7 +99,7 @@ where let provider = &provider; async move { let block = provider - .get_block(n.into(), BLOCK_WITH_FULL_TRANSACTIONS) + .get_block(n.into(), BlockTransactionsKind::Hashes) .await .context("couldn't get block")? .context("no such block")?; @@ -115,133 +113,48 @@ where .await .context("couldn't fill previous hashes")?; - // Assemble - Ok(BlockProverInput { - block_trace: BlockTrace { - trie_pre_images: BlockTraceTriePreImages::Combined(CombinedPreImages { - compact: block_witness.0, - }), - txn_info: tx_results.into_iter().map(|it| it.result).collect(), - code_db: Default::default(), - }, - other_data: OtherBlockData { - b_data: BlockLevelData { - b_meta: BlockMetadata { - block_beneficiary: target_block.header.miner.compat(), - block_timestamp: target_block.header.timestamp.into(), - block_number: target_block_number.into(), - block_difficulty: target_block.header.difficulty.into(), - block_random: target_block - .header - .mix_hash - .context("target block is missing field `mix_hash`")? - .compat(), - block_gaslimit: target_block.header.gas_limit.into(), - block_chain_id: chain_id.into(), - block_base_fee: target_block - .header - .base_fee_per_gas - .context("target block is missing field `base_fee_per_gas`")? - .into(), - block_gas_used: target_block.header.gas_used.into(), - block_bloom: target_block.header.logs_bloom.compat(), - }, - b_hashes: BlockHashes { - prev_hashes: prev_hashes.map(|it| it.compat()).into(), - cur_hash: target_block - .header - .hash - .context("target block is missing field `hash`")? - .compat(), - }, - withdrawals: target_block - .withdrawals - .into_iter() - .flatten() - .map( - |Withdrawal { - address, amount, .. - }| { (address.compat(), amount.into()) }, - ) - .collect(), + let other_data = OtherBlockData { + b_data: BlockLevelData { + b_meta: BlockMetadata { + block_beneficiary: target_block.header.miner.compat(), + block_timestamp: target_block.header.timestamp.into(), + block_number: target_block_number.into(), + block_difficulty: target_block.header.difficulty.into(), + block_random: target_block + .header + .mix_hash + .context("target block is missing field `mix_hash`")? + .compat(), + block_gaslimit: target_block.header.gas_limit.into(), + block_chain_id: chain_id.into(), + block_base_fee: target_block + .header + .base_fee_per_gas + .context("target block is missing field `base_fee_per_gas`")? + .into(), + block_gas_used: target_block.header.gas_used.into(), + block_bloom: target_block.header.logs_bloom.compat(), }, - checkpoint_state_trie_root: checkpoint_state_trie_root.compat(), + b_hashes: BlockHashes { + prev_hashes: prev_hashes.map(|it| it.compat()).into(), + cur_hash: target_block + .header + .hash + .context("target block is missing field `hash`")? + .compat(), + }, + withdrawals: target_block + .withdrawals + .into_iter() + .flatten() + .map( + |Withdrawal { + address, amount, .. + }| { (address.compat(), amount.into()) }, + ) + .collect(), }, - }) -} - -/// Obtain the prover input for a given block interval -pub async fn prover_input( - mut provider: ProviderT, - block_interval: BlockInterval, - checkpoint_block_id: BlockId, -) -> anyhow::Result -where - ProviderT: Provider, - TransportT: Transport + Clone, -{ - // Grab interval checkpoint block state trie - let checkpoint_state_trie_root = get_block( - &mut provider, - checkpoint_block_id, - BLOCK_WITH_FULL_TRANSACTIONS, - ) - .await? - .header - .state_root; - - let mut block_proofs = Vec::new(); - let mut block_interval = block_interval.into_bounded_stream()?; - - while let Some(block_num) = block_interval.next().await { - let block_id = BlockId::Number(BlockNumberOrTag::Number(block_num)); - let block_prover_input = - block_prover_input(&provider, block_id, checkpoint_state_trie_root).await?; - block_proofs.push(block_prover_input); - } - Ok(ProverInput { - blocks: block_proofs, - }) -} - -trait Compat { - fn compat(self) -> Out; -} - -impl Compat<__compat_primitive_types::H160> for alloy::primitives::Address { - fn compat(self) -> __compat_primitive_types::H160 { - let alloy::primitives::Address(alloy::primitives::FixedBytes(arr)) = self; - __compat_primitive_types::H160(arr) - } -} - -impl Compat<__compat_primitive_types::H256> for alloy::primitives::B256 { - fn compat(self) -> __compat_primitive_types::H256 { - let alloy::primitives::FixedBytes(arr) = self; - __compat_primitive_types::H256(arr) - } -} - -impl Compat<[__compat_primitive_types::U256; 8]> for alloy::primitives::Bloom { - fn compat(self) -> [__compat_primitive_types::U256; 8] { - let alloy::primitives::Bloom(alloy::primitives::FixedBytes(src)) = self; - // have u8 * 256 - // want U256 * 8 - // (no unsafe, no unstable) - let mut chunks = src.chunks_exact(32); - let dst = core::array::from_fn(|_ix| { - // This is a bit spicy because we're going from an uninterpeted array of bytes - // to wide integers, but we trust this `From` impl to do the right thing - __compat_primitive_types::U256::from( - <[u8; 32]>::try_from(chunks.next().unwrap()).unwrap(), - ) - }); - assert_eq!(chunks.len(), 0); - dst - } -} - -#[test] -fn bloom() { - let _did_not_panic = alloy::primitives::Bloom::ZERO.compat(); + checkpoint_state_trie_root: checkpoint_state_trie_root.compat(), + }; + Ok(other_data) } diff --git a/rpc/src/main.rs b/rpc/src/main.rs index 87aa7d0f..45575df8 100644 --- a/rpc/src/main.rs +++ b/rpc/src/main.rs @@ -1,13 +1,9 @@ -use std::{io, sync::Arc}; +use std::io; -<<<<<<< HEAD -use alloy::{providers::RootProvider, rpc::types::eth::BlockId}; +use alloy::rpc::types::eth::BlockId; use clap::{Parser, ValueHint}; use common::block_interval::BlockInterval; -======= -use clap::{Parser, ValueEnum, ValueHint}; -use rpc::retry::build_http_retry_provider; ->>>>>>> 7cd3d62 (Introduce native tracer support) +use rpc::{retry::build_http_retry_provider, RpcType}; use tracing_subscriber::{prelude::*, EnvFilter}; use url::Url; @@ -15,7 +11,6 @@ use url::Url; pub enum Cli { /// Fetch and generate prover input from the RPC endpoint Fetch { -<<<<<<< HEAD // Starting block of interval to fetch #[arg(short, long)] start_block: u64, @@ -25,71 +20,49 @@ pub enum Cli { /// The RPC URL. #[arg(short = 'u', long, value_hint = ValueHint::Url)] rpc_url: Url, - /// The checkpoint block number. If not provided, - /// block before the `start_block` is the checkpoint - #[arg(short, long)] - checkpoint_block_number: Option, -======= - /// The RPC URL - #[arg(short = 'u', long, value_hint = ValueHint::Url)] - rpc_url: Url, /// The RPC Tracer Type #[arg(short = 't', long, default_value = "jerigon")] rpc_type: RpcType, - /// The block number + /// The checkpoint block number. If not provided, + /// block before the `start_block` is the checkpoint #[arg(short, long)] - block_number: u64, - /// The checkpoint block number - #[arg(short, long, default_value_t = 0)] - checkpoint_block_number: u64, + checkpoint_block_number: Option, /// Backoff in milliseconds for request retries #[arg(long, default_value_t = 0)] backoff: u64, /// The maximum number of retries #[arg(long, default_value_t = 0)] max_retries: u32, ->>>>>>> 7cd3d62 (Introduce native tracer support) }, } -/// The RPC type. -#[derive(ValueEnum, Clone)] -pub enum RpcType { - Jerigon, - Native, -} - impl Cli { /// Execute the cli command. pub async fn execute(self) -> anyhow::Result<()> { match self { Self::Fetch { + start_block, + end_block, rpc_url, rpc_type, - block_number, checkpoint_block_number, backoff, max_retries, } => { - let prover_input = match rpc_type { - RpcType::Jerigon => { - rpc::jerigon::prover_input( - build_http_retry_provider(rpc_url, backoff, max_retries), - block_number.into(), - checkpoint_block_number.into(), - ) - .await? - } - RpcType::Native => { - rpc::native::prover_input( - Arc::new(build_http_retry_provider(rpc_url, backoff, max_retries)), - block_number.into(), - checkpoint_block_number.into(), - ) - .await? - } - }; - serde_json::to_writer_pretty(io::stdout(), &prover_input)?; + let checkpoint_block_number = + checkpoint_block_number.unwrap_or((start_block - 1).into()); + let block_interval = BlockInterval::Range(start_block..end_block + 1); + + // Retrieve prover input from the Erigon node + let prover_input = rpc::prover_input( + build_http_retry_provider(rpc_url, backoff, max_retries), + block_interval, + checkpoint_block_number, + rpc_type, + ) + .await?; + + serde_json::to_writer_pretty(io::stdout(), &prover_input.blocks)?; } } Ok(()) @@ -107,29 +80,5 @@ async fn main() -> anyhow::Result<()> { ) .init(); -<<<<<<< HEAD - let Args::Fetch { - start_block, - end_block, - rpc_url, - checkpoint_block_number, - } = Args::parse(); - - let checkpoint_block_number = checkpoint_block_number.unwrap_or((start_block - 1).into()); - let block_interval = BlockInterval::Range(start_block..end_block + 1); - - // Retrieve prover input from the Erigon node - let prover_input = rpc::prover_input( - RootProvider::new_http(rpc_url), - block_interval, - checkpoint_block_number, - ) - .await?; - - serde_json::to_writer_pretty(io::stdout(), &prover_input.blocks)?; - -======= - Cli::parse().execute().await?; ->>>>>>> 7cd3d62 (Introduce native tracer support) - Ok(()) + Cli::parse().execute().await } diff --git a/rpc/src/native/mod.rs b/rpc/src/native/mod.rs index 2dbd2107..75de3d5d 100644 --- a/rpc/src/native/mod.rs +++ b/rpc/src/native/mod.rs @@ -1,13 +1,14 @@ -use std::{collections::HashMap, sync::Arc}; +use std::collections::HashMap; use alloy::{ + primitives::B256, providers::Provider, rpc::types::eth::{BlockId, BlockTransactionsKind}, transports::Transport, }; use anyhow::Context as _; use futures::try_join; -use prover::ProverInput; +use prover::BlockProverInput; use trace_decoder::trace_protocol::BlockTrace; mod state; @@ -16,21 +17,21 @@ mod txn; type CodeDb = HashMap<__compat_primitive_types::H256, Vec>; /// Fetches the prover input for the given BlockId. -pub async fn prover_input( - provider: Arc, +pub async fn block_prover_input( + provider: &ProviderT, block_number: BlockId, - checkpoint_block_number: BlockId, -) -> anyhow::Result + checkpoint_state_trie_root: B256, +) -> anyhow::Result where ProviderT: Provider, TransportT: Transport + Clone, { let (block_trace, other_data) = try_join!( process_block_trace(&provider, block_number), - crate::fetch_other_block_data(&provider, block_number, checkpoint_block_number,) + crate::fetch_other_block_data(&provider, block_number, checkpoint_state_trie_root,) )?; - Ok(ProverInput { + Ok(BlockProverInput { block_trace, other_data, })