diff --git a/Cargo.lock b/Cargo.lock index 8966d8f..3a0b9e0 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1947,12 +1947,20 @@ name = "hokulea-client" version = "0.1.0" dependencies = [ "alloy-consensus", + "alloy-primitives", + "alloy-rlp", + "ark-bn254", + "ark-ff 0.5.0", + "async-trait", + "hokulea-eigenda", "hokulea-proof", "kona-client", "kona-driver", "kona-executor", "kona-preimage", "kona-proof", + "num", + "rust-kzg-bn254", "tracing", ] @@ -3631,8 +3639,7 @@ checksum = "48fd7bd8a6377e15ad9d42a8ec25371b94ddc67abe7c8b9127bec79bebaaae18" [[package]] name = "rust-kzg-bn254" version = "0.2.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "acdae4058a9f604acf7023d99d931d6f30261fff93787bcfd1f1ccfc725b701c" +source = "git+https://github.com/Layr-Labs/rust-kzg-bn254?rev=4ad14ea4ce9473e13ed6437140fcbbff3a8ccce1#4ad14ea4ce9473e13ed6437140fcbbff3a8ccce1" dependencies = [ "ark-bn254", "ark-ec", @@ -3644,6 +3651,7 @@ dependencies = [ "crossbeam-channel", "directories", "hex-literal", + "itertools 0.13.0", "num-bigint", "num-traits", "num_cpus", @@ -3651,6 +3659,7 @@ dependencies = [ "rayon", "sha2", "sys-info", + "thiserror 2.0.11", "ureq", ] diff --git a/Cargo.toml b/Cargo.toml index 1aef03e..8d5e1c7 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -63,13 +63,17 @@ reqwest = "0.12.12" async-trait = "0.1.85" linked_list_allocator = "0.10.5" bytes = "1.9.0" +num = "0.4" # General sha2 = { version = "0.10.8", default-features = false } c-kzg = { version = "2.0.0", default-features = false } anyhow = { version = "1.0.95", default-features = false } thiserror = { version = "2.0.9", default-features = false } -rust-kzg-bn254 = { version = "0.2.1", default-features = false } +rust-kzg-bn254 = { git = "https://github.com/Layr-Labs/rust-kzg-bn254", rev = "4ad14ea4ce9473e13ed6437140fcbbff3a8ccce1", default-features = false } + +ark-bn254 = "0.5.0" +ark-ff = { version = "0.5.0", features = ["parallel"] } # Tracing tracing-loki = "0.2.5" diff --git a/README.md b/README.md index 2ee09cc..b4a5d15 100644 --- a/README.md +++ b/README.md @@ -2,6 +2,9 @@ Hokulea is a library to provide the altda providers for a derivation pipeline built with [kona](https://github.com/anton-rs/kona) to understand eigenDA blobs, following the [kona book](https://anton-rs.github.io/kona/sdk/pipeline/providers.html#implementing-a-custom-data-availability-provider) recommendation (also see this [comment](https://github.com/anton-rs/kona/pull/862#issuecomment-2515038089)). +### Download SRS points +Hokulea host currently computes a challenge proof that validates the correctness of the eigenda blob against the provided kzg commitment. Such computation requires the host to have access to sufficient KZG SRS points. + ### Running against devnet First start the devnet: @@ -17,4 +20,4 @@ cd bin/client just run-client-native-against-devnet ``` -![](./hokulea.jpeg) \ No newline at end of file +![](./hokulea.jpeg) diff --git a/bin/client/Cargo.toml b/bin/client/Cargo.toml index 9f113b0..1d5114b 100644 --- a/bin/client/Cargo.toml +++ b/bin/client/Cargo.toml @@ -5,6 +5,8 @@ edition = "2021" [dependencies] alloy-consensus.workspace = true +alloy-primitives.workspace = true +alloy-rlp.workspace = true kona-client.workspace = true kona-preimage.workspace = true @@ -13,5 +15,13 @@ kona-driver.workspace = true kona-executor.workspace = true hokulea-proof.workspace = true +hokulea-eigenda.workspace = true tracing.workspace = true +async-trait.workspace = true +rust-kzg-bn254.workspace = true +num.workspace = true + +ark-bn254.workspace = true + +ark-ff.workspace = true diff --git a/bin/client/src/cached_eigenda_provider.rs b/bin/client/src/cached_eigenda_provider.rs new file mode 100644 index 0000000..14b0189 --- /dev/null +++ b/bin/client/src/cached_eigenda_provider.rs @@ -0,0 +1,136 @@ +use alloy_primitives::Bytes; +use alloy_rlp::Decodable; +use async_trait::async_trait; +use kona_preimage::errors::PreimageOracleError; +use kona_preimage::CommsClient; + +use hokulea_eigenda::BlobInfo; +use hokulea_eigenda::EigenDABlobProvider; +use hokulea_proof::eigenda_provider::OracleEigenDAProvider; +use kona_proof::errors::OracleProviderError; + +use crate::witness::EigenDABlobWitness; + +use num::BigUint; +use rust_kzg_bn254::blob::Blob; +use rust_kzg_bn254::kzg::KZG; + +/// CachedOracleEigenDAProvider is a wrapper outside OracleEigenDAProvider. Its intended use +/// case is to fetch all eigenda blobs received during the derivation pipeline. So that it +/// is able to compute and cache the kzg witnesses, which can be verified inside ZKVM by checking +/// the point opening at the random Fiat Shamir evaluation index. +#[derive(Debug, Clone)] +pub struct CachedOracleEigenDAProvider { + /// The preimage oracle client. + oracle: OracleEigenDAProvider, + /// kzg proof witness + witness: EigenDABlobWitness, +} + +impl CachedOracleEigenDAProvider { + /// Constructs a new oracle-backed EigenDA provider. + pub fn new(oracle: OracleEigenDAProvider, witness: EigenDABlobWitness) -> Self { + Self { oracle, witness } + } +} + +#[async_trait] +impl EigenDABlobProvider for CachedOracleEigenDAProvider { + type Error = OracleProviderError; + + async fn get_blob(&mut self, cert: &Bytes) -> Result { + let blob = self.oracle.get_blob(cert).await?; + let cert_blob_info = match BlobInfo::decode(&mut &cert[4..]) { + Ok(c) => c, + Err(_) => { + return Err(OracleProviderError::Preimage(PreimageOracleError::Other( + "does not contain header".into(), + ))) + } + }; + + let output = self.compute_witness(&blob)?; + // make sure locally computed proof equals to returned proof from the provider + if output[..32] != cert_blob_info.blob_header.commitment.x[..] + || output[32..64] != cert_blob_info.blob_header.commitment.y[..] + { + return Err(OracleProviderError::Preimage(PreimageOracleError::Other( + "proxy commitment is different from computed commitment proxy".into(), + ))); + }; + + let commitment = Bytes::copy_from_slice(&output[..64]); + + let kzg_proof = Bytes::copy_from_slice(&output[64..128]); + + // push data into witness + self.witness + .write(blob.clone().into(), commitment, kzg_proof.into()); + + Ok(blob) + } +} + +// nitro code https://github.com/Layr-Labs/nitro/blob/14f09745b74321f91d1f702c3e7bb5eb7d0e49ce/arbitrator/prover/src/kzgbn254.rs#L141 +// could refactor in the future, such that both host and client can compute the proof +impl CachedOracleEigenDAProvider { + /// Return Bytes array so that the host can reuse the code + fn compute_witness(&mut self, blob: &[u8]) -> Result, OracleProviderError> { + // TODO remove the need for G2 access + // Add command line to specify where are g1 and g2 path + // In the future, it might make sense to let the proxy to return such + // value, instead of local computation + let mut kzg = KZG::setup( + "resources/g1.32mb.point", + "", + "resources/g2.point.powerOf2", + 268435456, + 1024, + ) + .map_err(|_| { + OracleProviderError::Preimage(PreimageOracleError::Other( + "does not contain header".into(), + )) + })?; + + let input = Blob::new(blob); + let input_poly = input.to_polynomial_eval_form(); + + kzg.data_setup_custom(1, input.len().try_into().unwrap()) + .unwrap(); + + let mut commitment_and_proof = vec![0u8; 0]; + + let commitment = kzg.commit_eval_form(&input_poly).map_err(|_| { + OracleProviderError::Preimage(PreimageOracleError::Other("kzg.commit_eval_form".into())) + })?; + + // TODO the library should have returned the bytes, or provide a helper + // for conversion. For both proof and commitment + let commitment_x_bigint: BigUint = commitment.x.into(); + let commitment_y_bigint: BigUint = commitment.y.into(); + + self.append_left_padded_biguint_be(&mut commitment_and_proof, &commitment_x_bigint); + self.append_left_padded_biguint_be(&mut commitment_and_proof, &commitment_y_bigint); + + let proof = kzg.compute_blob_proof(&input, &commitment).map_err(|_| { + OracleProviderError::Preimage(PreimageOracleError::Other( + "kzg.compute_blob_kzg_proof {}".into(), + )) + })?; + let proof_x_bigint: BigUint = proof.x.into(); + let proof_y_bigint: BigUint = proof.y.into(); + + self.append_left_padded_biguint_be(&mut commitment_and_proof, &proof_x_bigint); + self.append_left_padded_biguint_be(&mut commitment_and_proof, &proof_y_bigint); + + Ok(commitment_and_proof) + } + + pub fn append_left_padded_biguint_be(&self, vec: &mut Vec, biguint: &BigUint) { + let bytes = biguint.to_bytes_be(); + let padding = 32 - bytes.len(); + vec.extend(std::iter::repeat(0).take(padding)); + vec.extend_from_slice(&bytes); + } +} diff --git a/bin/client/src/lib.rs b/bin/client/src/lib.rs index bf5d889..d9fa77e 100644 --- a/bin/client/src/lib.rs +++ b/bin/client/src/lib.rs @@ -22,6 +22,9 @@ use tracing::{error, info}; use hokulea_proof::eigenda_provider::OracleEigenDAProvider; +pub mod cached_eigenda_provider; +pub mod witness; + #[inline] pub async fn run(oracle_client: P, hint_client: H) -> Result<(), FaultProofProgramError> where diff --git a/bin/client/src/witness.rs b/bin/client/src/witness.rs new file mode 100644 index 0000000..f721e09 --- /dev/null +++ b/bin/client/src/witness.rs @@ -0,0 +1,81 @@ +use alloc::vec::Vec; +use alloy_primitives::Bytes; +use ark_bn254::{Fq, G1Affine}; +use ark_ff::PrimeField; +use rust_kzg_bn254::blob::Blob; +use rust_kzg_bn254::kzg::KZG; +use tracing::info; + +#[derive(Debug, Clone, Default)] +pub struct EigenDABlobWitness { + pub eigenda_blobs: Vec, + pub commitments: Vec, + pub proofs: Vec, +} + +impl EigenDABlobWitness { + pub fn new() -> Self { + EigenDABlobWitness { + eigenda_blobs: Vec::new(), + commitments: Vec::new(), + proofs: Vec::new(), + } + } + + pub fn write(&mut self, blob: Bytes, commitment: Bytes, proof: Bytes) { + self.eigenda_blobs.push(blob); + self.commitments.push(commitment); + self.proofs.push(proof); + info!("added a blob"); + } + + pub fn verify(&self) -> bool { + // TODO we should not need so many g1 and g2 points for kzg verification + // improve kzg library instead + let kzg = match KZG::setup( + "resources/g1.32mb.point", + "", + "resources/g2.point.powerOf2", + 268435456, + 1024, + ) { + Ok(k) => k, + Err(e) => panic!("cannot setup kzg {}", e), + }; + + info!("lib_blobs len {:?}", self.eigenda_blobs.len()); + + // transform to rust-kzg-bn254 inputs types + // TODO should make library do the parsing the return result + let lib_blobs: Vec = self.eigenda_blobs.iter().map(|b| Blob::new(b)).collect(); + let lib_commitments: Vec = self + .commitments + .iter() + .map(|c| { + let x = Fq::from_be_bytes_mod_order(&c[..32]); + let y = Fq::from_be_bytes_mod_order(&c[32..64]); + G1Affine::new(x, y) + }) + .collect(); + let lib_proofs: Vec = self + .proofs + .iter() + .map(|p| { + let x = Fq::from_be_bytes_mod_order(&p[..32]); + let y = Fq::from_be_bytes_mod_order(&p[32..64]); + + G1Affine::new(x, y) + }) + .collect(); + let pairing_result = kzg + .verify_blob_kzg_proof_batch(&lib_blobs, &lib_commitments, &lib_proofs) + .unwrap(); + + //info!("lib_blobs {:?}", lib_blobs); + //info!("lib_commitments {:?}", lib_commitments); + //info!("lib_proofs {:?}", lib_proofs); + //info!("pairing_result {:?}", pairing_result); + + pairing_result + } +} diff --git a/bin/host/src/eigenda_fetcher/mod.rs b/bin/host/src/eigenda_fetcher/mod.rs index a0a1dc6..daae6e3 100644 --- a/bin/host/src/eigenda_fetcher/mod.rs +++ b/bin/host/src/eigenda_fetcher/mod.rs @@ -156,15 +156,15 @@ where let cert_blob_info = BlobInfo::decode(&mut &item_slice[4..]).unwrap(); // Proxy should return a cert whose data_length measured in symbol (i.e. 32 Bytes) - let blob_length = cert_blob_info.blob_header.data_length as u64; - warn!("blob length: {:?}", blob_length); + let data_length = cert_blob_info.blob_header.data_length as u64; + warn!("data length: {:?}", data_length); let eigenda_blob = EigenDABlobData::encode(rollup_data.as_ref()); - if eigenda_blob.blob.len() != blob_length as usize * BYTES_PER_FIELD_ELEMENT { + if eigenda_blob.blob.len() != data_length as usize * BYTES_PER_FIELD_ELEMENT { return Err( anyhow!("data size from cert does not equal to reconstructed data codec_rollup_data_len {} blob size {}", - eigenda_blob.blob.len(), blob_length as usize * BYTES_PER_FIELD_ELEMENT)); + eigenda_blob.blob.len(), data_length as usize * BYTES_PER_FIELD_ELEMENT)); } // Write all the field elements to the key-value store. @@ -176,9 +176,9 @@ where blob_key[..32].copy_from_slice(cert_blob_info.blob_header.commitment.x.as_ref()); blob_key[32..64].copy_from_slice(cert_blob_info.blob_header.commitment.y.as_ref()); - trace!("cert_blob_info blob_length {:?}", blob_length); + trace!("cert_blob_info data_length {:?}", data_length); - for i in 0..blob_length { + for i in 0..data_length { blob_key[88..].copy_from_slice(i.to_be_bytes().as_ref()); let blob_key_hash = keccak256(blob_key.as_ref()); @@ -192,12 +192,11 @@ where )?; } - // TODO proof is at the random point, but we need to figure out where to generate - // + // TODO currenlty proof is only computed in the client side if cached_eigenda_provider + // is used. We can add this back, if hosts needs to get the proof. // Write the KZG Proof as the last element, needed for ZK - //blob_key[88..].copy_from_slice((blob_length).to_be_bytes().as_ref()); + //blob_key[88..].copy_from_slice((data_length).to_be_bytes().as_ref()); //let blob_key_hash = keccak256(blob_key.as_ref()); - //kv_write_lock.set( // PreimageKey::new(*blob_key_hash, PreimageKeyType::Keccak256).into(), // blob_key.into(), @@ -205,7 +204,7 @@ where // proof to be done //kv_write_lock.set( // PreimageKey::new(*blob_key_hash, PreimageKeyType::GlobalGeneric).into(), - // [1, 2, 3].to_vec(), + // output[64..].to_vec(), //)?; } else { panic!("Invalid hint type: {hint_type}. FetcherWithEigenDASupport.prefetch only supports EigenDACommitment hints."); diff --git a/crates/eigenda/src/traits.rs b/crates/eigenda/src/traits.rs index 5fad440..c8db2c6 100644 --- a/crates/eigenda/src/traits.rs +++ b/crates/eigenda/src/traits.rs @@ -13,7 +13,4 @@ pub trait EigenDABlobProvider { /// Fetches a blob. async fn get_blob(&mut self, cert: &Bytes) -> Result; - - /// Fetches an element from a blob. - async fn get_element(&mut self, cert: &Bytes, element: &Bytes) -> Result; } diff --git a/crates/proof/src/eigenda_provider.rs b/crates/proof/src/eigenda_provider.rs index 5c62f23..8f0a4b7 100644 --- a/crates/proof/src/eigenda_provider.rs +++ b/crates/proof/src/eigenda_provider.rs @@ -100,27 +100,4 @@ impl EigenDABlobProvider for OracleEigenDAProvider Ok(blob.into()) } - - async fn get_element(&mut self, cert: &Bytes, element: &Bytes) -> Result { - self.oracle - .write(&ExtendedHintType::EigenDACommitment.encode_with(&[cert])) - .await - .map_err(OracleProviderError::Preimage)?; - - let cert_point_key = Bytes::copy_from_slice(&[cert.to_vec(), element.to_vec()].concat()); - - self.oracle - .write(&ExtendedHintType::EigenDACommitment.encode_with(&[&cert_point_key])) - .await - .map_err(OracleProviderError::Preimage)?; - let data = self - .oracle - .get(PreimageKey::new( - *keccak256(cert_point_key), - PreimageKeyType::GlobalGeneric, - )) - .await - .map_err(OracleProviderError::Preimage)?; - Ok(data.into()) - } }