Skip to content

Commit

Permalink
update inclusion proof
Browse files Browse the repository at this point in the history
Signed-off-by: Gustavo Inacio <[email protected]>
  • Loading branch information
gusinacio committed Nov 4, 2024
1 parent 560957b commit 0883044
Show file tree
Hide file tree
Showing 4 changed files with 90 additions and 36 deletions.
2 changes: 1 addition & 1 deletion crates/header-accumulator/src/epoch.rs
Original file line number Diff line number Diff line change
Expand Up @@ -84,7 +84,7 @@ impl TryFrom<Vec<ExtHeaderRecord>> for Epoch {

impl From<Epoch> for EpochAccumulator {
fn from(value: Epoch) -> Self {
let vec: Vec<HeaderRecord> = value.data.into_iter().collect();
let vec: Vec<HeaderRecord> = value.data.to_vec();
EpochAccumulator::from(vec)
}
}
Expand Down
6 changes: 6 additions & 0 deletions crates/header-accumulator/src/errors.rs
Original file line number Diff line number Diff line change
Expand Up @@ -41,6 +41,12 @@ pub enum EraValidateError {
InvalidBlockRange(u64, u64),
#[error("Epoch is in post merge: {0}")]
EpochPostMerge(usize),

#[error("Header block number ({block_number}) is different than expected ({expected_number})")]
HeaderMismatch {
expected_number: u64,
block_number: u64,
},
}

impl From<ProtosError> for EraValidateError {
Expand Down
86 changes: 66 additions & 20 deletions crates/header-accumulator/src/inclusion_proof.rs
Original file line number Diff line number Diff line change
@@ -1,23 +1,45 @@
use std::ops::Range;
use crate::{epoch::MAX_EPOCH_SIZE, errors::EraValidateError, Epoch};

use crate::{epoch::MAX_EPOCH_SIZE, errors::EraValidateError, types::ExtHeaderRecord, Epoch};

use alloy_primitives::FixedBytes;
use ethportal_api::{
types::execution::{
accumulator::{EpochAccumulator, HeaderRecord},
accumulator::EpochAccumulator,
header_with_proof::{BlockHeaderProof, HeaderWithProof, PreMergeAccumulatorProof},
},
Header,
};
use firehose_protos::ethereum_v2::Block;
use tree_hash::Hash256;
use trin_validation::{
accumulator::PreMergeAccumulator, header_validator::HeaderValidator,
historical_roots_acc::HistoricalRootsAccumulator,
};

type InclusionProof = [Hash256; 15];
#[derive(Clone)]
pub struct InclusionProof {
block_number: u64,
proof: [Hash256; 15],
}

impl InclusionProof {
pub fn with_header(self, header: Header) -> Result<ProovableHeader, EraValidateError> {
if self.block_number != header.number {
Err(EraValidateError::HeaderMismatch {
expected_number: self.block_number,
block_number: header.number,
})
} else {
Ok(ProovableHeader {
proof: self,
header,
})
}
}
}

impl From<InclusionProof> for PreMergeAccumulatorProof {
fn from(value: InclusionProof) -> Self {
Self { proof: value.proof }
}
}

/// generates an inclusion proof over headers, given blocks between `start_block` and `end_block`
///
Expand All @@ -33,21 +55,26 @@ pub fn generate_inclusion_proofs(
) -> Result<Vec<InclusionProof>, EraValidateError> {
// We need to load blocks from an entire epoch to be able to generate inclusion proofs
// First compute epoch accumulators and the Merkle tree for all the epochs of interest
// let mut epoch_accumulators = Vec::new();
// let mut headers: Vec<Header> = Vec::new();
let mut inclusion_proof_vec: Vec<InclusionProof> = Vec::new();
let epoch_list: Vec<_> = epochs.iter().map(|epoch| epoch.number()).collect();
let accumulators: Vec<_> = epochs
.into_iter()
.map(|epoch| (epoch.number(), EpochAccumulator::from(epoch)))
.collect();

for header in headers_to_prove {
let block_epoch = (header.number / MAX_EPOCH_SIZE as u64) as usize;
let epoch = epochs

let accumulator = accumulators
.iter()
.find(|epoch| epoch.number() == block_epoch)
.find(|epoch| epoch.0 == block_epoch)
.map(|epoch| &epoch.1)
.ok_or(EraValidateError::EpochNotFoundInProvidedList {
block_epoch,
epoch_list: epoch_list.clone(),
})?;
inclusion_proof_vec.push(generate_inclusion_proof(header, epoch.clone())?);

inclusion_proof_vec.push(do_generate_inclusion_proof(&header, accumulator)?);
}

Ok(inclusion_proof_vec)
Expand All @@ -57,18 +84,29 @@ pub fn generate_inclusion_proof(
header: Header,
epoch: Epoch,
) -> Result<InclusionProof, EraValidateError> {
let block_epoch = (header.number / MAX_EPOCH_SIZE as u64) as usize;
let block_number = header.number;
let block_epoch = (block_number / MAX_EPOCH_SIZE as u64) as usize;
if block_epoch != epoch.number() {
return Err(EraValidateError::EpochNotMatchForHeader {
epoch_number: epoch.number(),
block_number: header.number,
block_number,
block_epoch,
});
}

let epoch_accumulator = EpochAccumulator::from(epoch);
do_generate_inclusion_proof(&header, &epoch_accumulator)
}

PreMergeAccumulator::construct_proof(&header, &epoch_accumulator)
fn do_generate_inclusion_proof(
header: &Header,
epoch_accumulator: &EpochAccumulator,
) -> Result<InclusionProof, EraValidateError> {
PreMergeAccumulator::construct_proof(header, epoch_accumulator)
.map(|proof| InclusionProof {
proof,
block_number: header.number,
})
.map_err(|_| EraValidateError::ProofGenerationFailure)
}

Expand All @@ -80,28 +118,36 @@ pub fn generate_inclusion_proof(
/// * `inclusion_proof` - The inclusion proof generated from [`generate_inclusion_proof`].
pub fn verify_inclusion_proofs(
pre_merge_accumulator_file: Option<PreMergeAccumulator>,
block_proofs: Vec<(Block, InclusionProof)>,
header_proofs: Vec<ProovableHeader>,
) -> Result<(), EraValidateError> {
let pre_merge_acc = pre_merge_accumulator_file.unwrap_or_default();
let header_validator = HeaderValidator {
pre_merge_acc,
historical_roots_acc: HistoricalRootsAccumulator::default(),
};

for (block, proof) in block_proofs {
let header = Header::try_from(&block)?;
verify_inclusion_proof(&header_validator, header, proof)?;
for proovable_header in header_proofs {
verify_inclusion_proof(
&header_validator,
proovable_header.header,
proovable_header.proof,
)?;
}

Ok(())
}

pub struct ProovableHeader {
header: Header,
proof: InclusionProof,
}

pub fn verify_inclusion_proof(
header_validator: &HeaderValidator,
header: Header,
proof: InclusionProof,
) -> Result<(), EraValidateError> {
let proof = BlockHeaderProof::PreMergeAccumulatorProof(PreMergeAccumulatorProof { proof });
let proof = BlockHeaderProof::PreMergeAccumulatorProof(proof.into());

let hwp = HeaderWithProof { header, proof };

Expand Down
32 changes: 17 additions & 15 deletions crates/header-accumulator/tests/inclusion_proof.rs
Original file line number Diff line number Diff line change
@@ -1,13 +1,11 @@
use firehose_protos::ethereum_v2::Block;
use flat_files_decoder::{decoder::decode_flat_files, decompression::Decompression};
use header_accumulator::{
generate_inclusion_proof, verify_inclusion_proof, EraValidateError, ExtHeaderRecord,
generate_inclusion_proofs, verify_inclusion_proofs, Epoch, EraValidateError, ExtHeaderRecord,
};

#[test]
fn test_inclusion_proof() -> Result<(), EraValidateError> {
let mut headers: Vec<ExtHeaderRecord> = Vec::new();
let mut all_blocks: Vec<Block> = Vec::new(); // Vector to hold all blocks

for flat_file_number in (0..=8200).step_by(100) {
let file_name = format!(
Expand All @@ -22,7 +20,7 @@ fn test_inclusion_proof() -> Result<(), EraValidateError> {
.map(|block| ExtHeaderRecord::try_from(block).unwrap())
.collect::<Vec<ExtHeaderRecord>>(),
);
all_blocks.extend(blocks); // Extend the all_blocks vector with the decoded blocks
// all_blocks.extend(blocks); // Extend the all_blocks vector with the decoded blocks
}
Err(e) => {
eprintln!("error: {:?}", e);
Expand All @@ -33,24 +31,28 @@ fn test_inclusion_proof() -> Result<(), EraValidateError> {

let start_block = 301;
let end_block = 402;
let inclusion_proof = generate_inclusion_proofs(headers, start_block, end_block)
let headers_to_proof: Vec<_> = headers[start_block..end_block]
.iter()
.map(|ext| ext.full_header.as_ref().unwrap().clone())
.collect();
let epoch: Epoch = headers.try_into().unwrap();

let inclusion_proof = generate_inclusion_proofs(vec![epoch], headers_to_proof.clone())
.unwrap_or_else(|e| {
println!("Error occurred: {}", e);
// Handle the error, e.g., by exiting the program or returning a default value
std::process::exit(1); // Exiting the program, for example
});
assert_eq!(
inclusion_proof.len() as usize,
(end_block - start_block + 1) as usize
);
assert_eq!(inclusion_proof.len(), headers_to_proof.len());

// Verify inclusion proof
let proof_blocks: Vec<Block> = all_blocks[start_block as usize..=end_block as usize].to_vec();
assert!(verify_inclusion_proof(proof_blocks, None, inclusion_proof.clone()).is_ok());
let proof_headers = headers_to_proof
.into_iter()
.zip(inclusion_proof)
.map(|(header, proof)| proof.with_header(header))
.collect::<Result<Vec<_>, _>>()?;

// verify if inclusion proof fails on not proven blocks
let proof_blocks: Vec<Block> = all_blocks[302..=403].to_vec();
assert!(verify_inclusion_proof(proof_blocks, None, inclusion_proof.clone()).is_err());
// Verify inclusion proof
assert!(verify_inclusion_proofs(None, proof_headers).is_ok());

Ok(())
}

0 comments on commit 0883044

Please sign in to comment.