Skip to content

Commit

Permalink
Create codec mod (#22)
Browse files Browse the repository at this point in the history
* move codec logic into one file

* fix lint

* remove codec and add comments, make blob from EigendaBlobDAta public

* fix lint

* fix doc

* fix comments, fix proxy bug

* rm unneeded deps

* rm unneeded test

* fix power of 2 bug

* fix lint

* add todo
  • Loading branch information
bxue-l2 authored Jan 3, 2025
1 parent 6da009d commit 9cce574
Show file tree
Hide file tree
Showing 11 changed files with 101 additions and 79 deletions.
1 change: 1 addition & 0 deletions .gitignore
Original file line number Diff line number Diff line change
@@ -1,3 +1,4 @@
target/
data/
optimism/
tags
1 change: 0 additions & 1 deletion Cargo.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

3 changes: 3 additions & 0 deletions bin/client/justfile
Original file line number Diff line number Diff line change
Expand Up @@ -119,6 +119,9 @@ run-client-native block_number l1_rpc l1_beacon_rpc l2_rpc rollup_node_rpc rollu
# Move to the workspace root
cd $(git rev-parse --show-toplevel)

rm -rf ./data
mkdir ./data

echo "Running host program with native client program..."
cargo r --bin hokulea-host -- \
--l1-head $L1_HEAD \
Expand Down
3 changes: 0 additions & 3 deletions bin/host/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -26,8 +26,5 @@ async-trait.workspace = true
tokio = { workspace = true, features = ["full"] }
clap = { workspace = true, features = ["derive", "env"] }

# Cryptography
rust-kzg-bn254.workspace = true

[dev-dependencies]
proptest.workspace = true
41 changes: 14 additions & 27 deletions bin/host/src/eigenda_fetcher/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -8,14 +8,14 @@ use alloy_rlp::Decodable;
use anyhow::{anyhow, Result};
use core::panic;
use hokulea_eigenda::BlobInfo;
use hokulea_eigenda::BLOB_ENCODING_VERSION_0;
use hokulea_eigenda::EigenDABlobData;
use hokulea_eigenda::BYTES_PER_FIELD_ELEMENT;
use hokulea_proof::hint::{ExtendedHint, ExtendedHintType};
use kona_host::{blobs::OnlineBlobProvider, fetcher::Fetcher, kv::KeyValueStore};
use kona_preimage::{PreimageKey, PreimageKeyType};
use rust_kzg_bn254::helpers;
use std::sync::Arc;
use tokio::sync::RwLock;
use tracing::{error, info, trace, warn};
use tracing::{error, trace, warn};

/// The [FetcherWithEigenDASupport] struct wraps and extends kona's [Fetcher] struct with the ability
/// to fetch preimages from EigenDA.
Expand Down Expand Up @@ -141,45 +141,32 @@ where

if hint_type == ExtendedHintType::EigenDACommitment {
let cert = hint_data;
info!(target: "fetcher_with_eigenda_support", "Fetching eigenda commitment cert: {:?}", cert);
trace!(target: "fetcher_with_eigenda_support", "Fetching eigenda commitment cert: {:?}", cert);
// Fetch the blob sidecar from the blob provider.
let rollup_data = self
.eigenda_blob_provider
.fetch_eigenda_blob(&cert)
.await
.map_err(|e| anyhow!("Failed to fetch eigenda blob: {e}"))?;

// Acquire a lock on the key-value store and set the preimages.
let mut kv_write_lock = self.kv_store.write().await;

// the fourth because 0x01010000 in the beginning is metadata
let rollup_data_len = rollup_data.len() as u32;
let item_slice = cert.as_ref();
let cert_blob_info = BlobInfo::decode(&mut &item_slice[4..]).unwrap();

// Todo ensure data_length is always power of 2. Proxy made mistake
let data_size = cert_blob_info.blob_header.data_length as u64;
let blob_length: u64 = data_size / 32;

// encode to become raw blob
let codec_rollup_data = helpers::convert_by_padding_empty_byte(rollup_data.as_ref());
let codec_rollup_data_len = codec_rollup_data.len() as u32;
// Proxy should return a cert whose data_length measured in symbol (i.e. 32 Bytes)
let blob_length = cert_blob_info.blob_header.data_length as u64;
warn!("blob length: {:?}", blob_length);

let mut raw_blob = vec![0u8; data_size as usize];
let eigenda_blob = EigenDABlobData::encode(rollup_data.as_ref());

if 32 + codec_rollup_data_len as u64 > data_size {
return Err(anyhow!("data size is less than reconstructed data codec_rollup_data_len {} data_size {}", codec_rollup_data_len, data_size));
if eigenda_blob.blob.len() != blob_length as usize * BYTES_PER_FIELD_ELEMENT {
return Err(
anyhow!("data size from cert does not equal to reconstructed data codec_rollup_data_len {} blob size {}",
eigenda_blob.blob.len(), blob_length as usize * BYTES_PER_FIELD_ELEMENT));
}

// blob header
// https://github.com/Layr-Labs/eigenda/blob/f8b0d31d65b29e60172507074922668f4ca89420/api/clients/codecs/default_blob_codec.go#L25
// raw blob the immediate data just before taking IFFT
raw_blob[1] = BLOB_ENCODING_VERSION_0;
raw_blob[2..6].copy_from_slice(&rollup_data_len.to_be_bytes());

// encode length as uint32
raw_blob[32..(32 + codec_rollup_data_len as usize)].copy_from_slice(&codec_rollup_data);

// Write all the field elements to the key-value store.
// The preimage oracle key for each field element is the keccak256 hash of
// `abi.encodePacked(cert.KZGCommitment, uint256(i))`
Expand All @@ -189,7 +176,7 @@ where
blob_key[..32].copy_from_slice(cert_blob_info.blob_header.commitment.x.as_ref());
blob_key[32..64].copy_from_slice(cert_blob_info.blob_header.commitment.y.as_ref());

info!("cert_blob_info blob_length {:?}", blob_length);
trace!("cert_blob_info blob_length {:?}", blob_length);

for i in 0..blob_length {
blob_key[88..].copy_from_slice(i.to_be_bytes().as_ref());
Expand All @@ -201,7 +188,7 @@ where
)?;
kv_write_lock.set(
PreimageKey::new(*blob_key_hash, PreimageKeyType::GlobalGeneric).into(),
raw_blob[(i as usize) << 5..(i as usize + 1) << 5].to_vec(),
eigenda_blob.blob[(i as usize) << 5..(i as usize + 1) << 5].to_vec(),
)?;
}

Expand Down
2 changes: 2 additions & 0 deletions crates/eigenda/src/constant.rs
Original file line number Diff line number Diff line change
Expand Up @@ -3,3 +3,5 @@
pub const BLOB_ENCODING_VERSION_0: u8 = 0x0;
/// TODO: make it part of rollup config
pub const STALE_GAP: u64 = 100;
/// Number of fields for field element on bn254
pub const BYTES_PER_FIELD_ELEMENT: usize = 32;
4 changes: 2 additions & 2 deletions crates/eigenda/src/eigenda_blobs.rs
Original file line number Diff line number Diff line change
Expand Up @@ -48,8 +48,8 @@ where
// Otherwise, ignore blob and recurse next.
match next_data.decode() {
Ok(d) => Ok(d),
Err(_) => {
warn!(target: "blob-source", "Failed to decode blob data, skipping");
Err(e) => {
warn!(target: "blob-source", "Failed to decode blob data, skipping {}", e);
panic!()
}
}
Expand Down
110 changes: 71 additions & 39 deletions crates/eigenda/src/eigenda_data.rs
Original file line number Diff line number Diff line change
@@ -1,33 +1,34 @@
use crate::{BLOB_ENCODING_VERSION_0, BYTES_PER_FIELD_ELEMENT};
use alloc::vec;
use alloy_primitives::Bytes;
use bytes::buf::Buf;

use kona_derive::errors::BlobDecodingError;

use rust_kzg_bn254::helpers;

#[derive(Default, Clone, Debug)]
/// Represents the data structure for EigenDA Blob.
pub struct EigenDABlobData {
/// The calldata
pub(crate) blob: Bytes,
pub blob: Bytes,
}

impl EigenDABlobData {
/// Decodes the blob into raw byte data.
/// Decodes the blob into raw byte data. Reverse of the encode function below
/// Returns a [BlobDecodingError] if the blob is invalid.
pub(crate) fn decode(&self) -> Result<Bytes, BlobDecodingError> {
if self.blob.len() < 32 {
pub fn decode(&self) -> Result<Bytes, BlobDecodingError> {
let blob = &self.blob;
if blob.len() < 32 {
return Err(BlobDecodingError::InvalidLength);
}

info!(target: "eigenda-datasource", "padded_eigenda_blob {:?}", self.blob);
info!(target: "eigenda-datasource", "padded_eigenda_blob {:?}", blob);

// see https://github.com/Layr-Labs/eigenda/blob/f8b0d31d65b29e60172507074922668f4ca89420/api/clients/codecs/default_blob_codec.go#L44
let content_size = self.blob.slice(2..6).get_u32();
let content_size = blob.slice(2..6).get_u32();
info!(target: "eigenda-datasource", "content_size {:?}", content_size);

// the first 32 Bytes are reserved as the header field element
let codec_data = self.blob.slice(32..);
let codec_data = blob.slice(32..);

// rust kzg bn254 impl already
let blob_content =
Expand All @@ -37,56 +38,87 @@ impl EigenDABlobData {
if blob_content.len() < content_size as usize {
return Err(BlobDecodingError::InvalidLength);
}
// might insert a FFT here,

// take data
Ok(blob_content.slice(..content_size as usize))
}

/// The encode function accepts an input of opaque rollup data array into an EigenDABlobData.
/// EigenDABlobData contains a header of 32 bytes and a transformation of input data
/// The 0 index byte of header is always 0, to comply to bn254 field element constraint
/// The 1 index byte of header is proxy encoding version.
/// The 2-4 indices of header are storing the length of the input rollup data in big endien
/// The payload is prepared by padding an empty byte for every 31 bytes from the rollup data
/// This matches exactly the eigenda proxy implementation, whose logic is in
/// <https://github.com/Layr-Labs/eigenda/blob/master/encoding/utils/codec/codec.go#L12>
///
/// The length of (header + payload) by the encode function is always multiple of 32
/// The eigenda proxy does not take such constraint.
pub fn encode(rollup_data: &[u8]) -> Self {
let rollup_data_size = rollup_data.len() as u32;

// encode to become raw blob
let codec_rollup_data = helpers::convert_by_padding_empty_byte(rollup_data);

let blob_payload_size = codec_rollup_data.len();

// the first field element contains the header
let blob_size = blob_payload_size + BYTES_PER_FIELD_ELEMENT;

// round up to the closest multiple of 32
let blob_size = blob_size.div_ceil(BYTES_PER_FIELD_ELEMENT) * BYTES_PER_FIELD_ELEMENT;

let mut raw_blob = vec![0u8; blob_size as usize];

raw_blob[1] = BLOB_ENCODING_VERSION_0;
raw_blob[2..6].copy_from_slice(&rollup_data_size.to_be_bytes());

// encode length as uint32
raw_blob[BYTES_PER_FIELD_ELEMENT..(BYTES_PER_FIELD_ELEMENT + blob_payload_size as usize)]
.copy_from_slice(&codec_rollup_data);

Self {
blob: Bytes::from(raw_blob),
}
}
}

#[cfg(test)]
mod tests {
use crate::BLOB_ENCODING_VERSION_0;

use super::*;
use alloc::vec;
use alloy_primitives::Bytes;
use kona_derive::errors::BlobDecodingError;

fn generate_blob_data(content: &[u8]) -> EigenDABlobData {
let mut blob = vec![0; 32];
blob[1] = BLOB_ENCODING_VERSION_0;
blob[2..6].copy_from_slice(&(content.len() as u32).to_be_bytes());
blob.extend_from_slice(&helpers::convert_by_padding_empty_byte(content));
EigenDABlobData {
blob: Bytes::from(blob),
}
}

#[test]
fn test_decode_success() {
let content = vec![1, 2, 3, 4];
let data = generate_blob_data(&content);
let result = data.decode();
fn test_encode_and_decode_success() {
let rollup_data = vec![1, 2, 3, 4];
let eigenda_blob = EigenDABlobData::encode(&rollup_data);
let data_len = eigenda_blob.blob.len();
assert!(data_len % BYTES_PER_FIELD_ELEMENT == 0);

let result = eigenda_blob.decode();
assert!(result.is_ok());
assert_eq!(result.unwrap(), Bytes::from(content));
assert_eq!(result.unwrap(), Bytes::from(rollup_data));
}

#[test]
fn test_decode_success_empty() {
let content = vec![];
let data = generate_blob_data(&content);
let result = data.decode();
fn test_encode_and_decode_success_empty() {
let rollup_data = vec![];
let eigenda_blob = EigenDABlobData::encode(&rollup_data);
let data_len = eigenda_blob.blob.len();
// 32 is eigenda blob header size
assert!(data_len == 32);

let result = eigenda_blob.decode();
assert!(result.is_ok());
assert_eq!(result.unwrap(), Bytes::from(content));
assert_eq!(result.unwrap(), Bytes::from(rollup_data));
}

#[test]
fn test_decode_error_invalid_length() {
let data = EigenDABlobData {
blob: Bytes::from(vec![0; 31]), // one byte short of having a full header
};
let result = data.decode();
fn test_encode_and_decode_error_invalid_length() {
let rollup_data = vec![1, 2, 3, 4];
let mut eigenda_blob = EigenDABlobData::encode(&rollup_data);
eigenda_blob.blob.truncate(33);
let result = eigenda_blob.decode();
assert!(result.is_err());
assert_eq!(result.unwrap_err(), BlobDecodingError::InvalidLength);
}
Expand Down
1 change: 1 addition & 0 deletions crates/eigenda/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -32,4 +32,5 @@ pub use certificate::BlobInfo;

mod constant;
pub use constant::BLOB_ENCODING_VERSION_0;
pub use constant::BYTES_PER_FIELD_ELEMENT;
pub use constant::STALE_GAP;
12 changes: 6 additions & 6 deletions crates/proof/src/eigenda_provider.rs
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@ use alloc::boxed::Box;
use alloc::sync::Arc;
use alloy_primitives::{keccak256, Bytes};
use async_trait::async_trait;
use hokulea_eigenda::{BlobInfo, EigenDABlobProvider};
use hokulea_eigenda::{BlobInfo, EigenDABlobProvider, BYTES_PER_FIELD_ELEMENT};
use kona_preimage::{errors::PreimageOracleError, CommsClient, PreimageKey, PreimageKeyType};

use kona_proof::errors::OracleProviderError;
Expand Down Expand Up @@ -51,7 +51,9 @@ impl<T: CommsClient + Sync + Send> EigenDABlobProvider for OracleEigenDAProvider
let cert_blob_info = BlobInfo::decode(&mut &item_slice[4..]).unwrap();
info!("cert_blob_info {:?}", cert_blob_info);

let mut blob: Vec<u8> = vec![0; cert_blob_info.blob_header.data_length as usize];
// data_length measurs in field element, multiply to get num bytes
let mut blob: Vec<u8> =
vec![0; cert_blob_info.blob_header.data_length as usize * BYTES_PER_FIELD_ELEMENT];

// 96 because our g1 commitment has 64 bytes in v1
// why 96, the original 4844 has bytes length of 80 (it has 48 bytes for commitment)
Expand All @@ -62,10 +64,8 @@ impl<T: CommsClient + Sync + Send> EigenDABlobProvider for OracleEigenDAProvider
let mut blob_key = [0u8; 96];

// In eigenDA terminology, length describes the number of field element, size describes
// number of bytes. In eigenda proxy memstore mode, the datalength is wronly assigned to
// be the bytes lenght. We need to resolve it later.
// For now, we internally divides 32. ToDo
let data_length = cert_blob_info.blob_header.data_length as u64 / 32;
// number of bytes.
let data_length = cert_blob_info.blob_header.data_length as u64;

info!("cert_blob_info.blob_header.data_length {:?}", data_length);

Expand Down
2 changes: 1 addition & 1 deletion op-devnet.docker-compose.yml.patch
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,7 @@ index adcaea8f4..5c5e2e8ee 100644
- --port=3100
- --log.level=debug
- --generic-commitment="${ALTDA_GENERIC_DA}"
+ image: ghcr.io/layr-labs/eigenda-proxy:v1.6.1
+ image: ghcr.io/layr-labs/eigenda-proxy:main # TODO update image to v1.6.2 once this PR is released https://github.com/Layr-Labs/hokulea/pull/22
+ environment:
+ EIGENDA_PROXY_ADDR: 0.0.0.0
+ EIGENDA_PROXY_PORT: 3100
Expand Down

0 comments on commit 9cce574

Please sign in to comment.