Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

clean up inclusion proof #33

Merged
merged 8 commits into from
Nov 28, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
33 changes: 17 additions & 16 deletions crates/header-accumulator/examples/inclusion_proof.rs
Original file line number Diff line number Diff line change
@@ -1,9 +1,8 @@
use std::{fs::File, io::BufReader};

use firehose_protos::EthBlock as Block;
use flat_files_decoder::{read_blocks_from_reader, Compression};
use header_accumulator::{
generate_inclusion_proof, verify_inclusion_proof, EraValidateError, ExtHeaderRecord,
generate_inclusion_proofs, verify_inclusion_proofs, Epoch, EraValidateError, ExtHeaderRecord,
};

fn create_test_reader(path: &str) -> BufReader<File> {
Expand All @@ -12,7 +11,6 @@ fn create_test_reader(path: &str) -> BufReader<File> {

fn main() -> Result<(), EraValidateError> {
let mut headers: Vec<ExtHeaderRecord> = Vec::new();
let mut all_blocks: Vec<Block> = Vec::new();

for flat_file_number in (0..=8200).step_by(100) {
let file = format!(
Expand All @@ -27,7 +25,6 @@ fn main() -> Result<(), EraValidateError> {
.map(|block| ExtHeaderRecord::try_from(block).unwrap())
.collect::<Vec<ExtHeaderRecord>>(),
);
all_blocks.extend(blocks);
}
Err(e) => {
eprintln!("error: {:?}", e);
Expand All @@ -38,23 +35,27 @@ fn main() -> Result<(), EraValidateError> {

let start_block = 301;
let end_block = 402;
let inclusion_proof =
generate_inclusion_proof(headers, start_block, end_block).unwrap_or_else(|e| {
let headers_to_prove: Vec<_> = headers[start_block..end_block]
.iter()
.map(|ext| ext.full_header.as_ref().unwrap().clone())
.collect();
let epoch: Epoch = headers.try_into().unwrap();

let inclusion_proof = generate_inclusion_proofs(vec![epoch], headers_to_prove.clone())
.unwrap_or_else(|e| {
println!("Error occurred: {}", e);
std::process::exit(1);
});
assert_eq!(
inclusion_proof.len() as usize,
(end_block - start_block + 1) as usize
);
assert_eq!(inclusion_proof.len(), headers_to_prove.len());

// Verify inclusion proof
let proof_blocks: Vec<Block> = all_blocks[start_block as usize..=end_block as usize].to_vec();
assert!(verify_inclusion_proof(proof_blocks, None, inclusion_proof.clone()).is_ok());
let proof_headers = headers_to_prove
.into_iter()
.zip(inclusion_proof)
.map(|(header, proof)| proof.with_header(header))
.collect::<Result<Vec<_>, _>>()?;

// Verify if inclusion proof fails on not proven blocks
let proof_blocks: Vec<Block> = all_blocks[302..=403].to_vec();
assert!(verify_inclusion_proof(proof_blocks, None, inclusion_proof.clone()).is_err());
// Verify inclusion proof
assert!(verify_inclusion_proofs(None, proof_headers).is_ok());

println!("Inclusion proof verified successfully!");

Expand Down
10 changes: 9 additions & 1 deletion crates/header-accumulator/src/epoch.rs
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
use std::array::IntoIter;

use alloy_primitives::map::HashSet;
use ethportal_api::types::execution::accumulator::HeaderRecord;
use ethportal_api::types::execution::accumulator::{EpochAccumulator, HeaderRecord};

use crate::{errors::EraValidateError, types::ExtHeaderRecord};

Expand Down Expand Up @@ -30,6 +30,7 @@ pub const MERGE_BLOCK: u64 = 15537394;
/// 0 must start from block 0 to block 8191.
///
/// All blocks must be at the same epoch
#[derive(Clone)]
pub struct Epoch {
number: usize,
data: Box<[HeaderRecord; MAX_EPOCH_SIZE]>,
Expand Down Expand Up @@ -81,6 +82,13 @@ impl TryFrom<Vec<ExtHeaderRecord>> for Epoch {
}
}

impl From<Epoch> for EpochAccumulator {
fn from(value: Epoch) -> Self {
let vec: Vec<HeaderRecord> = value.data.to_vec();
EpochAccumulator::from(vec)
}
}

impl Epoch {
pub fn number(&self) -> usize {
self.number
Expand Down
19 changes: 19 additions & 0 deletions crates/header-accumulator/src/errors.rs
Original file line number Diff line number Diff line change
Expand Up @@ -9,6 +9,19 @@ pub enum EraValidateError {
#[error("Era accumulator mismatch")]
EraAccumulatorMismatch,

#[error("Block epoch {block_epoch} (block number {block_number}) could not be proven with provided epoch {epoch_number}.")]
EpochNotMatchForHeader {
epoch_number: usize,
block_number: u64,
block_epoch: usize,
},

#[error("Expected epoch {block_epoch} was not found in the provided epoch list. Epochs provided: {epoch_list:?}.")]
EpochNotFoundInProvidedList {
block_epoch: usize,
epoch_list: Vec<usize>,
},

#[error("Error generating inclusion proof")]
ProofGenerationFailure,
#[error("Error validating inclusion proof")]
Expand All @@ -28,6 +41,12 @@ pub enum EraValidateError {
InvalidBlockRange(u64, u64),
#[error("Epoch is in post merge: {0}")]
EpochPostMerge(usize),

#[error("Header block number ({block_number}) is different than expected ({expected_number})")]
HeaderMismatch {
expected_number: u64,
block_number: u64,
},
}

impl From<ProtosError> for EraValidateError {
Expand Down
205 changes: 134 additions & 71 deletions crates/header-accumulator/src/inclusion_proof.rs
Original file line number Diff line number Diff line change
@@ -1,106 +1,169 @@
use crate::{epoch::MAX_EPOCH_SIZE, errors::EraValidateError, types::ExtHeaderRecord};
use crate::{epoch::MAX_EPOCH_SIZE, errors::EraValidateError, Epoch};

use alloy_primitives::FixedBytes;
use ethportal_api::{
types::execution::{
accumulator::{EpochAccumulator, HeaderRecord},
header_with_proof::{BlockHeaderProof, HeaderWithProof, PreMergeAccumulatorProof},
accumulator::EpochAccumulator,
header_with_proof::{
BlockHeaderProof, HeaderWithProof as PortalHeaderWithProof, PreMergeAccumulatorProof,
},
},
Header,
};
use firehose_protos::EthBlock as Block;
use tree_hash::Hash256;
use trin_validation::{
accumulator::PreMergeAccumulator, header_validator::HeaderValidator,
historical_roots_acc::HistoricalRootsAccumulator,
};

/// generates an inclusion proof over headers, given blocks between `start_block` and `end_block`
const PROOF_SIZE: usize = 15;

/// A proof that contains the block number
#[derive(Clone)]
pub struct InclusionProof {
block_number: u64,
proof: [Hash256; PROOF_SIZE],
}

impl InclusionProof {
/// Takes a header and turns the proof into a provable header
pub fn with_header(self, header: Header) -> Result<HeaderWithProof, EraValidateError> {
if self.block_number != header.number {
Err(EraValidateError::HeaderMismatch {
expected_number: self.block_number,
block_number: header.number,
})
} else {
Ok(HeaderWithProof {
proof: self,
header,
})
}
}
}

impl From<InclusionProof> for PreMergeAccumulatorProof {
fn from(value: InclusionProof) -> Self {
Self { proof: value.proof }
}
}

/// Generates inclusion proofs for headers, given a list epochs that contains
/// the headers to be proven
///
/// # Arguments
///
/// * `ext_headers`- A mutable [`Vec<ExtHeaderRecord>`]. The Vector can be any size, however, it must be in chunks of 8192 blocks to work properly
/// to function without error
/// * `start_block` - The starting point of blocks that are to be included in the proofs. This interval is inclusive.
/// * `end_epoch` - The ending point of blocks that are to be included in the proofs. This interval is inclusive.
pub fn generate_inclusion_proof(
mut ext_headers: Vec<ExtHeaderRecord>,
start_block: u64,
end_block: u64,
) -> Result<Vec<[Hash256; 15]>, EraValidateError> {
if start_block > end_block {
return Err(EraValidateError::InvalidBlockRange(start_block, end_block));
}
/// * `epochs`- A list of epochs [`Vec<Epoch>`].
/// * `headers_to_prove` - A list of headers [`Vec<Header>`]
pub fn generate_inclusion_proofs(
epochs: Vec<Epoch>,
headers_to_prove: Vec<Header>,
) -> Result<Vec<InclusionProof>, EraValidateError> {
suchapalaver marked this conversation as resolved.
Show resolved Hide resolved
let mut inclusion_proof_vec: Vec<InclusionProof> = Vec::with_capacity(headers_to_prove.len());
let epoch_list: Vec<_> = epochs.iter().map(|epoch| epoch.number()).collect();
let accumulators: Vec<_> = epochs
.into_iter()
.map(|epoch| (epoch.number(), EpochAccumulator::from(epoch)))
.collect();

// Compute the epoch accumulator for the blocks
// The epochs start on a multiple of 8192 blocks, so we need to round down to the nearest 8192
let epoch_start = start_block / MAX_EPOCH_SIZE as u64;

// The epochs end on a multiple of 8192 blocks, so we need to round up to the nearest 8192
let epoch_end = ((end_block as f32) / MAX_EPOCH_SIZE as f32).ceil() as u64;

// We need to load blocks from an entire epoch to be able to generate inclusion proofs
// First compute epoch accumulators and the Merkle tree for all the epochs of interest
let mut epoch_accumulators = Vec::new();
let mut inclusion_proof_vec: Vec<[FixedBytes<32>; 15]> = Vec::new();
let mut headers: Vec<Header> = Vec::new();

for _ in epoch_start..epoch_end {
let epoch_headers: Vec<ExtHeaderRecord> = ext_headers.drain(0..MAX_EPOCH_SIZE).collect();
let header_records: Vec<HeaderRecord> = epoch_headers.iter().map(Into::into).collect();
let tmp_headers: Vec<Header> = epoch_headers
.into_iter()
.map(ExtHeaderRecord::try_into)
.collect::<Result<_, _>>()?;
headers.extend(tmp_headers);
epoch_accumulators.push(EpochAccumulator::from(header_records));
}
for header in headers_to_prove {
let block_epoch = (header.number / MAX_EPOCH_SIZE as u64) as usize;

let accumulator = accumulators
.iter()
.find(|epoch| epoch.0 == block_epoch)
.map(|epoch| &epoch.1)
.ok_or(EraValidateError::EpochNotFoundInProvidedList {
block_epoch,
epoch_list: epoch_list.clone(),
})?;

for block_idx in start_block..=end_block {
let epoch = block_idx / MAX_EPOCH_SIZE as u64;
let epoch_acc = epoch_accumulators[epoch as usize].clone();
let header = headers[block_idx as usize].clone();
inclusion_proof_vec.push(
PreMergeAccumulator::construct_proof(&header, &epoch_acc)
.map_err(|_| EraValidateError::ProofGenerationFailure)?,
);
inclusion_proof_vec.push(do_generate_inclusion_proof(&header, accumulator)?);
}

Ok(inclusion_proof_vec)
}

/// verifies an inclusion proof generate by [`generate_inclusion_proof`]
/// Generates an inclusion proof for the header, given the epoch that contains
/// the header to be proven
///
/// * `blocks`- A [`Vec<Block>`]. The blocks included in the inclusion proof interval, set in `start_block` and `end_block` of [`generate_inclusion_proof`]
/// * `pre_merge_accumulator_file`- An instance of [`PreMergeAccumulator`] which is a file that maintains a record of historical epoch
/// it is used to verify canonical-ness of headers accumulated from the `blocks`
/// * `inclusion_proof` - The inclusion proof generated from [`generate_inclusion_proof`].
pub fn verify_inclusion_proof(
blocks: Vec<Block>,
/// Returns an error if the header is not inside the epoch.
///
/// # Arguments
///
/// * `header`- Header to be proven
/// * `epoch` - Epoch in which the header is located
pub fn generate_inclusion_proof(
header: Header,
epoch: Epoch,
) -> Result<InclusionProof, EraValidateError> {
let block_number = header.number;
let block_epoch = (block_number / MAX_EPOCH_SIZE as u64) as usize;
if block_epoch != epoch.number() {
return Err(EraValidateError::EpochNotMatchForHeader {
epoch_number: epoch.number(),
block_number,
block_epoch,
});
}

let epoch_accumulator = EpochAccumulator::from(epoch);
do_generate_inclusion_proof(&header, &epoch_accumulator)
}

fn do_generate_inclusion_proof(
header: &Header,
anirudh2 marked this conversation as resolved.
Show resolved Hide resolved
epoch_accumulator: &EpochAccumulator,
) -> Result<InclusionProof, EraValidateError> {
PreMergeAccumulator::construct_proof(header, epoch_accumulator)
.map(|proof| InclusionProof {
proof,
block_number: header.number,
})
.map_err(|_| EraValidateError::ProofGenerationFailure)
}

/// Verifies a list of provable headers
///
/// * `pre_merge_accumulator_file`- An optional instance of [`PreMergeAccumulator`]
/// which is a file that maintains a record of historical epoch it is used to
/// verify canonical-ness of headers accumulated from the `blocks`
/// * `header_proofs`- A [`Vec<HeaderWithProof>`].
pub fn verify_inclusion_proofs(
pre_merge_accumulator_file: Option<PreMergeAccumulator>,
inclusion_proof: Vec<[Hash256; 15]>,
header_proofs: Vec<HeaderWithProof>,
) -> Result<(), EraValidateError> {
let pre_merge_acc = pre_merge_accumulator_file.unwrap_or_default();

let header_validator = HeaderValidator {
pre_merge_acc,
historical_roots_acc: HistoricalRootsAccumulator::default(),
};

for (block_idx, _) in blocks.iter().enumerate() {
let bhp = BlockHeaderProof::PreMergeAccumulatorProof(PreMergeAccumulatorProof {
proof: inclusion_proof[block_idx],
});

let hwp = HeaderWithProof {
header: Header::try_from(&blocks[block_idx])?,
proof: bhp,
};

header_validator
.validate_header_with_proof(&hwp)
.map_err(|_| EraValidateError::ProofValidationFailure)?;
for provable_header in header_proofs {
verify_inclusion_proof(&header_validator, provable_header)?;
}

Ok(())
}

/// A header with an inclusion proof attached
pub struct HeaderWithProof {
header: Header,
proof: InclusionProof,
}

/// Verifies if a proof is contained in the header validator
pub fn verify_inclusion_proof(
header_validator: &HeaderValidator,
provable_header: HeaderWithProof,
) -> Result<(), EraValidateError> {
let proof = BlockHeaderProof::PreMergeAccumulatorProof(provable_header.proof.into());

let hwp = PortalHeaderWithProof {
header: provable_header.header,
proof,
};

header_validator
.validate_header_with_proof(&hwp)
.map_err(|_| EraValidateError::ProofValidationFailure)
}
Loading