Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Create codec mod #22

Merged
merged 11 commits into from
Jan 3, 2025
Merged
Show file tree
Hide file tree
Changes from 5 commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions .gitignore
Original file line number Diff line number Diff line change
@@ -1,3 +1,4 @@
target/
data/
optimism/
tags
samlaf marked this conversation as resolved.
Show resolved Hide resolved
3 changes: 3 additions & 0 deletions bin/client/justfile
Original file line number Diff line number Diff line change
Expand Up @@ -119,6 +119,9 @@ run-client-native block_number l1_rpc l1_beacon_rpc l2_rpc rollup_node_rpc rollu
# Move to the workspace root
cd $(git rev-parse --show-toplevel)

rm -rf ./data
mkdir ./data

echo "Running host program with native client program..."
cargo r --bin hokulea-host -- \
--l1-head $L1_HEAD \
Expand Down
34 changes: 12 additions & 22 deletions bin/host/src/eigenda_fetcher/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -8,6 +8,7 @@ use alloy_rlp::Decodable;
use anyhow::{anyhow, Result};
use core::panic;
use hokulea_eigenda::BlobInfo;
use hokulea_eigenda::EigenDABlobData;
use hokulea_eigenda::BLOB_ENCODING_VERSION_0;
use hokulea_proof::hint::{ExtendedHint, ExtendedHintType};
use kona_host::{blobs::OnlineBlobProvider, fetcher::Fetcher, kv::KeyValueStore};
Expand Down Expand Up @@ -141,45 +142,34 @@ where

if hint_type == ExtendedHintType::EigenDACommitment {
let cert = hint_data;
info!(target: "fetcher_with_eigenda_support", "Fetching eigenda commitment cert: {:?}", cert);
trace!(target: "fetcher_with_eigenda_support", "Fetching eigenda commitment cert: {:?}", cert);
// Fetch the blob sidecar from the blob provider.
let rollup_data = self
.eigenda_blob_provider
.fetch_eigenda_blob(&cert)
.await
.map_err(|e| anyhow!("Failed to fetch eigenda blob: {e}"))?;

// Acquire a lock on the key-value store and set the preimages.
let mut kv_write_lock = self.kv_store.write().await;

// the fourth because 0x01010000 in the beginning is metadata
let rollup_data_len = rollup_data.len() as u32;
let item_slice = cert.as_ref();
let cert_blob_info = BlobInfo::decode(&mut &item_slice[4..]).unwrap();

// Todo ensure data_length is always power of 2. Proxy made mistake
// TODO ensure data_length is always power of 2. Proxy made mistake
// Proxy should return a cert whose data_length measured in symbol (i.e. 32 Bytes)
// Currently, it returns number of bytes. We need to fix proxy and here later.
samlaf marked this conversation as resolved.
Show resolved Hide resolved
let data_size = cert_blob_info.blob_header.data_length as u64;
let blob_length: u64 = data_size / 32;
samlaf marked this conversation as resolved.
Show resolved Hide resolved

// encode to become raw blob
let codec_rollup_data = helpers::convert_by_padding_empty_byte(rollup_data.as_ref());
let codec_rollup_data_len = codec_rollup_data.len() as u32;

let mut raw_blob = vec![0u8; data_size as usize];
let eigenda_blob = EigenDABlobData::encode(rollup_data.as_ref());

if 32 + codec_rollup_data_len as u64 > data_size {
return Err(anyhow!("data size is less than reconstructed data codec_rollup_data_len {} data_size {}", codec_rollup_data_len, data_size));
if eigenda_blob.blob.len() != data_size as usize {
return Err(
anyhow!("data size from cert does not equal to reconstructed data codec_rollup_data_len {} data_size {}",
eigenda_blob.blob.len(), data_size));
}

// blob header
// https://github.com/Layr-Labs/eigenda/blob/f8b0d31d65b29e60172507074922668f4ca89420/api/clients/codecs/default_blob_codec.go#L25
// raw blob the immediate data just before taking IFFT
raw_blob[1] = BLOB_ENCODING_VERSION_0;
raw_blob[2..6].copy_from_slice(&rollup_data_len.to_be_bytes());

// encode length as uint32
raw_blob[32..(32 + codec_rollup_data_len as usize)].copy_from_slice(&codec_rollup_data);

// Write all the field elements to the key-value store.
// The preimage oracle key for each field element is the keccak256 hash of
// `abi.encodePacked(cert.KZGCommitment, uint256(i))`
Expand All @@ -189,7 +179,7 @@ where
blob_key[..32].copy_from_slice(cert_blob_info.blob_header.commitment.x.as_ref());
blob_key[32..64].copy_from_slice(cert_blob_info.blob_header.commitment.y.as_ref());

info!("cert_blob_info blob_length {:?}", blob_length);
trace!("cert_blob_info blob_length {:?}", blob_length);

for i in 0..blob_length {
blob_key[88..].copy_from_slice(i.to_be_bytes().as_ref());
Expand All @@ -201,7 +191,7 @@ where
)?;
kv_write_lock.set(
PreimageKey::new(*blob_key_hash, PreimageKeyType::GlobalGeneric).into(),
raw_blob[(i as usize) << 5..(i as usize + 1) << 5].to_vec(),
eigenda_blob.blob[(i as usize) << 5..(i as usize + 1) << 5].to_vec(),
)?;
}

Expand Down
4 changes: 2 additions & 2 deletions crates/eigenda/src/eigenda_blobs.rs
Original file line number Diff line number Diff line change
Expand Up @@ -48,8 +48,8 @@ where
// Otherwise, ignore blob and recurse next.
match next_data.decode() {
Ok(d) => Ok(d),
Err(_) => {
warn!(target: "blob-source", "Failed to decode blob data, skipping");
Err(e) => {
warn!(target: "blob-source", "Failed to decode blob data, skipping {}", e);
panic!()
}
}
Expand Down
95 changes: 83 additions & 12 deletions crates/eigenda/src/eigenda_data.rs
Original file line number Diff line number Diff line change
@@ -1,33 +1,34 @@
use crate::BLOB_ENCODING_VERSION_0;
use alloc::vec;
use alloy_primitives::Bytes;
use bytes::buf::Buf;

use kona_derive::errors::BlobDecodingError;

use rust_kzg_bn254::helpers;

#[derive(Default, Clone, Debug)]
/// Represents the data structure for EigenDA Blob.
pub struct EigenDABlobData {
/// The calldata
pub(crate) blob: Bytes,
pub blob: Bytes,
}

impl EigenDABlobData {
/// Decodes the blob into raw byte data.
/// Decodes the blob into raw byte data. Reverse of the encode function below
/// Returns a [BlobDecodingError] if the blob is invalid.
pub(crate) fn decode(&self) -> Result<Bytes, BlobDecodingError> {
if self.blob.len() < 32 {
pub fn decode(&self) -> Result<Bytes, BlobDecodingError> {
let blob = &self.blob;
if blob.len() < 32 {
return Err(BlobDecodingError::InvalidLength);
}

info!(target: "eigenda-datasource", "padded_eigenda_blob {:?}", self.blob);
info!(target: "eigenda-datasource", "padded_eigenda_blob {:?}", blob);

// see https://github.com/Layr-Labs/eigenda/blob/f8b0d31d65b29e60172507074922668f4ca89420/api/clients/codecs/default_blob_codec.go#L44
let content_size = self.blob.slice(2..6).get_u32();
let content_size = blob.slice(2..6).get_u32();
info!(target: "eigenda-datasource", "content_size {:?}", content_size);

// the first 32 Bytes are reserved as the header field element
let codec_data = self.blob.slice(32..);
let codec_data = blob.slice(32..);

// rust kzg bn254 impl already
let blob_content =
Expand All @@ -37,11 +38,46 @@ impl EigenDABlobData {
if blob_content.len() < content_size as usize {
return Err(BlobDecodingError::InvalidLength);
}
// might insert a FFT here,

// take data
Ok(blob_content.slice(..content_size as usize))
}

/// The encode function accepts an input of opaque rollup data array into an EigenDABlobData.
/// EigenDABlobData contains a header of 32 bytes and a transformation of input data
/// The 0 index byte of header is always 0, to comply to bn254 field element constraint
/// The 1 index byte of header is proxy encoding version.
/// The 2-4 indices of header are storing the length of the input rollup data in big endien
/// The payload is prepared by padding an empty byte for every 31 bytes from the rollup data
/// This matches exactly the eigenda proxy implementation, whose logic is in
/// <https://github.com/Layr-Labs/eigenda/blob/master/encoding/utils/codec/codec.go#L12>
///
/// The length of (header + payload) by the encode function is always power of 2
/// The eigenda proxy does not take such constraint.
/// TODO it is possible to remove such power of 2 constraint, such that the client is not
/// relying on the data_length from eigenda cert. It might save some comm rounds between
/// host and client.
pub fn encode(rollup_data: &[u8]) -> Self {
let rollup_data_size = rollup_data.len() as u32;

// encode to become raw blob
let codec_rollup_data = helpers::convert_by_padding_empty_byte(rollup_data.as_ref());

let blob_payload_size = codec_rollup_data.len();

let blob_size = blob_payload_size + 32;
let blob_size = blob_size.next_power_of_two();

let mut raw_blob = vec![0u8; blob_size as usize];

raw_blob[1] = BLOB_ENCODING_VERSION_0;
raw_blob[2..6].copy_from_slice(&rollup_data_size.to_be_bytes());

// encode length as uint32
raw_blob[32..(32 + blob_payload_size as usize)].copy_from_slice(&codec_rollup_data);

Self {
blob: Bytes::from(raw_blob),
}
}
}

#[cfg(test)]
Expand Down Expand Up @@ -90,4 +126,39 @@ mod tests {
assert!(result.is_err());
assert_eq!(result.unwrap_err(), BlobDecodingError::InvalidLength);
}

#[test]
fn test_encode_and_decode_success() {
let rollup_data = vec![1, 2, 3, 4];
let eigenda_blob = EigenDABlobData::encode(&rollup_data);
let data_len = eigenda_blob.blob.len();
assert!(data_len.is_power_of_two());
samlaf marked this conversation as resolved.
Show resolved Hide resolved

let result = eigenda_blob.decode();
assert!(result.is_ok());
assert_eq!(result.unwrap(), Bytes::from(rollup_data));
}

#[test]
fn test_encode_and_decode_success_empty() {
let rollup_data = vec![];
let eigenda_blob = EigenDABlobData::encode(&rollup_data);
let data_len = eigenda_blob.blob.len();
// 32 is eigenda blob header size
assert!(data_len == 32);

let result = eigenda_blob.decode();
assert!(result.is_ok());
assert_eq!(result.unwrap(), Bytes::from(rollup_data));
}

#[test]
fn test_encode_and_decode_error_invalid_length() {
let rollup_data = vec![1, 2, 3, 4];
let mut eigenda_blob = EigenDABlobData::encode(&rollup_data);
eigenda_blob.blob.truncate(33);
let result = eigenda_blob.decode();
assert!(result.is_err());
assert_eq!(result.unwrap_err(), BlobDecodingError::InvalidLength);
}
}
Loading