diff --git a/crates/derive/src/stages/batch_queue.rs b/crates/derive/src/stages/batch_queue.rs index 4000af67d..a1318b0ca 100644 --- a/crates/derive/src/stages/batch_queue.rs +++ b/crates/derive/src/stages/batch_queue.rs @@ -143,9 +143,10 @@ where // any undecided ones. let mut remaining = Vec::new(); for i in 0..self.batches.len() { - let batch = &self.batches[i]; + let batch = &mut self.batches[i]; let validity = batch.check_batch(&self.cfg, &self.l1_blocks, parent, &mut self.fetcher).await; + match validity { BatchValidity::Future => { remaining.push(batch.clone()); @@ -234,7 +235,8 @@ where panic!("Cannot add batch without an origin"); } let origin = self.origin.ok_or_else(|| anyhow!("cannot add batch with missing origin"))?; - let data = BatchWithInclusionBlock { inclusion_block: origin, batch }; + let mut data = + BatchWithInclusionBlock { inclusion_block: origin, batch, ..Default::default() }; // If we drop the batch, validation logs the drop reason with WARN level. if data.check_batch(&self.cfg, &self.l1_blocks, parent, &mut self.fetcher).await.is_drop() { return Ok(()); @@ -669,10 +671,11 @@ mod tests { }; let res = bq.next_batch(parent).await.unwrap_err(); let logs = trace_store.get_by_level(Level::INFO); - assert_eq!(logs.len(), 2); + assert_eq!(logs.len(), 3); let str = alloc::format!("Advancing batch queue origin: {:?}", origin); assert!(logs[0].contains(&str)); - assert!(logs[1].contains("Deriving next batch for epoch: 16988980031808077784")); + assert!(logs[1].contains("Checking batch validity with inclusion block")); + assert!(logs[2].contains("Deriving next batch for epoch: 16988980031808077784")); let warns = trace_store.get_by_level(Level::WARN); assert_eq!(warns.len(), 1); assert!(warns[0].contains("span batch has no new blocks after safe head")); diff --git a/crates/derive/src/types/batch/mod.rs b/crates/derive/src/types/batch/mod.rs index 68243d756..cc780d92f 100644 --- a/crates/derive/src/types/batch/mod.rs +++ b/crates/derive/src/types/batch/mod.rs @@ -26,12 +26,14 @@ mod single_batch; pub use single_batch::SingleBatch; /// A batch with its inclusion block. -#[derive(Debug, Clone, PartialEq, Eq)] +#[derive(Default, Debug, Clone, PartialEq, Eq)] pub struct BatchWithInclusionBlock { /// The inclusion block pub inclusion_block: BlockInfo, /// The batch pub batch: Batch, + /// Whether or not the batch has been validated yet. + pub validity: Option, } impl BatchWithInclusionBlock { @@ -41,13 +43,18 @@ impl BatchWithInclusionBlock { /// In case of only a single L1 block, the decision whether a batch is valid may have to stay /// undecided. pub async fn check_batch( - &self, + &mut self, cfg: &RollupConfig, l1_blocks: &[BlockInfo], l2_safe_head: L2BlockInfo, fetcher: &mut BF, ) -> BatchValidity { - match &self.batch { + if let Some(BatchValidity::Accept) = self.validity { + return BatchValidity::Accept; + } + + tracing::info!(target: "batch", "Checking batch validity with inclusion block (hash: {})", self.inclusion_block.hash); + let result = match &self.batch { Batch::Single(single_batch) => { single_batch.check_batch(cfg, l1_blocks, l2_safe_head, &self.inclusion_block) } @@ -56,7 +63,9 @@ impl BatchWithInclusionBlock { .check_batch(cfg, l1_blocks, l2_safe_head, &self.inclusion_block, fetcher) .await } - } + }; + self.validity = Some(result); + result } } @@ -69,6 +78,12 @@ pub enum Batch { Span(SpanBatch), } +impl Default for Batch { + fn default() -> Self { + Self::Single(Default::default()) + } +} + impl Batch { /// Returns the timestamp for the batch. pub fn timestamp(&self) -> u64 { diff --git a/crates/derive/src/types/batch/validity.rs b/crates/derive/src/types/batch/validity.rs index 0e074983a..435be6372 100644 --- a/crates/derive/src/types/batch/validity.rs +++ b/crates/derive/src/types/batch/validity.rs @@ -5,7 +5,7 @@ use serde::{Deserialize, Serialize}; /// Batch Validity #[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] -#[derive(Debug, Clone, PartialEq, Eq)] +#[derive(Debug, Clone, Copy, PartialEq, Eq)] pub enum BatchValidity { /// The batch is invalid now and in the future, unless we reorg Drop,