Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Add Tests for Segment Proving Without Keccak Tables #648

Merged
merged 23 commits into from
Oct 2, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
5 changes: 3 additions & 2 deletions evm_arithmetization/src/arithmetic/mod.rs
Original file line number Diff line number Diff line change
@@ -1,5 +1,6 @@
use ethereum_types::U256;
use plonky2::field::types::PrimeField64;
use serde::{Deserialize, Serialize};

use self::columns::{
INPUT_REGISTER_0, INPUT_REGISTER_1, INPUT_REGISTER_2, OPCODE_COL, OUTPUT_REGISTER,
Expand All @@ -24,7 +25,7 @@ pub(crate) mod columns;
///
/// `Shl` and `Shr` are handled differently, by leveraging `Mul` and `Div`
/// respectively.
#[derive(Clone, Copy, Debug, Eq, PartialEq)]
#[derive(Clone, Copy, Debug, Eq, PartialEq, Hash, Serialize, Deserialize)]
pub(crate) enum BinaryOperator {
Add,
Mul,
Expand Down Expand Up @@ -114,7 +115,7 @@ impl BinaryOperator {

/// An enum representing different ternary operations.
#[allow(clippy::enum_variant_names)]
#[derive(Clone, Copy, Debug, Eq, PartialEq)]
#[derive(Clone, Copy, Debug, Eq, PartialEq, Hash, Serialize, Deserialize)]
pub(crate) enum TernaryOperator {
AddMod,
MulMod,
Expand Down
14 changes: 10 additions & 4 deletions evm_arithmetization/src/cpu/kernel/interpreter.rs
Original file line number Diff line number Diff line change
Expand Up @@ -55,8 +55,7 @@ pub(crate) struct Interpreter<F: RichField> {
/// halt_context
pub(crate) halt_context: Option<usize>,
/// Counts the number of appearances of each opcode. For debugging purposes.
#[allow(unused)]
pub(crate) opcode_count: [usize; 0x100],
pub(crate) opcode_count: HashMap<Operation, usize>,
jumpdest_table: HashMap<usize, BTreeSet<usize>>,
/// `true` if the we are currently carrying out a jumpdest analysis.
pub(crate) is_jumpdest_analysis: bool,
Expand Down Expand Up @@ -179,7 +178,7 @@ impl<F: RichField> Interpreter<F> {
// while the label `halt` is the halting label in the kernel.
halt_offsets: vec![DEFAULT_HALT_OFFSET, KERNEL.global_labels["halt_final"]],
halt_context: None,
opcode_count: [0; 256],
opcode_count: HashMap::new(),
jumpdest_table: HashMap::new(),
is_jumpdest_analysis: false,
clock: 0,
Expand Down Expand Up @@ -210,7 +209,7 @@ impl<F: RichField> Interpreter<F> {
generation_state: state.soft_clone(),
halt_offsets: vec![halt_offset],
halt_context: Some(halt_context),
opcode_count: [0; 256],
opcode_count: HashMap::new(),
jumpdest_table: HashMap::new(),
is_jumpdest_analysis: true,
clock: 0,
Expand Down Expand Up @@ -429,6 +428,10 @@ impl<F: RichField> Interpreter<F> {
self.max_cpu_len_log
}

pub(crate) fn reset_opcode_counts(&mut self) {
self.opcode_count = HashMap::new();
}

pub(crate) fn code(&self) -> &MemorySegmentState {
// The context is 0 if we are in kernel mode.
&self.generation_state.memory.contexts[(1 - self.is_kernel() as usize) * self.context()]
Expand Down Expand Up @@ -662,6 +665,9 @@ impl<F: RichField> State<F> for Interpreter<F> {

let op = decode(registers, opcode)?;

// Increment the opcode count
*self.opcode_count.entry(op).or_insert(0) += 1;

fill_op_flag(op, &mut row);

self.fill_stack_fields(&mut row)?;
Expand Down
77 changes: 77 additions & 0 deletions evm_arithmetization/src/fixed_recursive_verifier.rs
Original file line number Diff line number Diff line change
Expand Up @@ -2997,3 +2997,80 @@ where
circuit.verifier_only.circuit_digest.elements.len()
+ (1 << circuit.common.config.fri_config.cap_height) * NUM_HASH_OUT_ELTS
}

#[cfg(test)]
#[cfg(not(feature = "cdk_erigon"))]
mod tests {
use plonky2::field::goldilocks_field::GoldilocksField;
use plonky2::plonk::config::PoseidonGoldilocksConfig;
use plonky2::timed;

use super::*;
use crate::testing_utils::{empty_payload, init_logger};
use crate::witness::operation::Operation;

type F = GoldilocksField;
const D: usize = 2;
type C = PoseidonGoldilocksConfig;

#[test]
#[ignore]
fn test_segment_proof_generation_without_keccak() -> anyhow::Result<()> {
init_logger();

let all_stark = AllStark::<F, D>::default();
let config = StarkConfig::standard_fast_config();

// Generate a dummy payload for testing
let payload = empty_payload()?;
let max_cpu_len_log = Some(7);
let mut segment_iterator = SegmentDataIterator::<F>::new(&payload, max_cpu_len_log);
let (_, mut segment_data) = segment_iterator.next().unwrap()?;

let opcode_counts = &segment_data.opcode_counts;
assert!(!opcode_counts.contains_key(&Operation::KeccakGeneral));

let timing = &mut TimingTree::new(
"Segment Proof Generation Without Keccak Test",
log::Level::Info,
);
// Process and prove segment
let all_circuits = timed!(
timing,
log::Level::Info,
"Create all recursive circuits",
AllRecursiveCircuits::<F, C, D>::new(
&all_stark,
&[16..17, 8..9, 7..8, 4..9, 8..9, 4..7, 17..18, 17..18, 17..18],
&config,
)
);

let segment_proof = timed!(
timing,
log::Level::Info,
"Prove segment",
all_circuits.prove_segment(
&all_stark,
&config,
payload.trim(),
&mut segment_data,
timing,
None,
)?
);

// Verify the generated segment proof
timed!(
timing,
log::Level::Info,
"Verify segment proof",
all_circuits.verify_root(segment_proof.proof_with_pvs.intern.clone())?
);

// Print timing details
timing.print();

Ok(())
}
}
11 changes: 11 additions & 0 deletions evm_arithmetization/src/generation/segments.rs
Original file line number Diff line number Diff line change
@@ -1,6 +1,8 @@
//! Module defining the logic around proof segmentation into chunks,
//! which allows what is commonly known as zk-continuations.

use std::collections::HashMap;

use anyhow::Result;
use plonky2::hash::hash_types::RichField;
use serde::{Deserialize, Serialize};
Expand All @@ -11,6 +13,7 @@ use crate::cpu::kernel::interpreter::{set_registers_and_run, ExtraSegmentData, I
use crate::generation::state::State;
use crate::generation::{collect_debug_tries, debug_inputs, ErrorWithTries, GenerationInputs};
use crate::witness::memory::MemoryState;
use crate::witness::operation::Operation;
use crate::witness::state::RegistersState;

/// Structure holding the data needed to initialize a segment.
Expand All @@ -29,6 +32,8 @@ pub struct GenerationSegmentData {
pub(crate) extra_data: ExtraSegmentData,
/// Log of the maximal cpu length.
pub(crate) max_cpu_len_log: Option<usize>,
/// Counts the number of appearances of each opcode. For debugging purposes.
pub(crate) opcode_counts: HashMap<Operation, usize>,
}

impl GenerationSegmentData {
Expand Down Expand Up @@ -77,6 +82,7 @@ fn build_segment_data<F: RichField>(
access_lists_ptrs: interpreter.generation_state.access_lists_ptrs.clone(),
state_ptrs: interpreter.generation_state.state_ptrs.clone(),
},
opcode_counts: interpreter.opcode_count.clone(),
}
}

Expand Down Expand Up @@ -133,6 +139,9 @@ impl<F: RichField> SegmentDataIterator<F> {

let segment_index = segment_data.segment_index;

// Reset opcode counts before executing the segment
self.interpreter.reset_opcode_counts();

// Run the interpreter to get `registers_after` and the partial data for the
// next segment.
let execution_result =
Expand All @@ -147,6 +156,8 @@ impl<F: RichField> SegmentDataIterator<F> {
));

segment_data.registers_after = updated_registers;
segment_data.opcode_counts = self.interpreter.opcode_count.clone();

Ok(Some(Box::new((segment_data, partial_segment_data))))
} else {
let inputs = &self.interpreter.get_generation_state().inputs;
Expand Down
3 changes: 2 additions & 1 deletion evm_arithmetization/src/logic.rs
Original file line number Diff line number Diff line change
Expand Up @@ -11,6 +11,7 @@ use plonky2::hash::hash_types::RichField;
use plonky2::iop::ext_target::ExtensionTarget;
use plonky2::timed;
use plonky2::util::timing::TimingTree;
use serde::{Deserialize, Serialize};
use starky::constraint_consumer::{ConstraintConsumer, RecursiveConstraintConsumer};
use starky::evaluation_frame::StarkEvaluationFrame;
use starky::lookup::{Column, Filter};
Expand Down Expand Up @@ -118,7 +119,7 @@ pub(crate) struct LogicStark<F, const D: usize> {
}

/// Logic operations.
#[derive(Copy, Clone, Debug, Eq, PartialEq)]
#[derive(Copy, Clone, Debug, Eq, PartialEq, Hash, Serialize, Deserialize)]
pub(crate) enum Op {
And,
Or,
Expand Down
56 changes: 54 additions & 2 deletions evm_arithmetization/src/testing_utils.rs
Original file line number Diff line number Diff line change
@@ -1,8 +1,9 @@
//! A set of utility functions and constants to be used by `evm_arithmetization`
//! unit and integration tests.

use anyhow::Result;
use env_logger::{try_init_from_env, Env, DEFAULT_FILTER_ENV};
use ethereum_types::{BigEndianHash, H256, U256};
use ethereum_types::{Address, BigEndianHash, H256, U256};
use hex_literal::hex;
use keccak_hash::keccak;
use mpt_trie::{
Expand All @@ -12,7 +13,9 @@ use mpt_trie::{

pub use crate::cpu::kernel::cancun_constants::*;
pub use crate::cpu::kernel::constants::global_exit_root::*;
use crate::{generation::mpt::AccountRlp, proof::BlockMetadata, util::h2u};
use crate::generation::TrieInputs;
use crate::proof::TrieRoots;
use crate::{generation::mpt::AccountRlp, proof::BlockMetadata, util::h2u, GenerationInputs};

pub const EMPTY_NODE_HASH: H256 = H256(hex!(
"56e81f171bcc55a6ff8345e692c0f86e5b48e01b996cadc001622fb5e363b421"
Expand Down Expand Up @@ -161,3 +164,52 @@ pub fn scalable_contract_from_storage(storage_trie: &HashedPartialTrie) -> Accou
..Default::default()
}
}

pub fn empty_payload() -> Result<GenerationInputs> {
// Set up default block metadata
let block_metadata = BlockMetadata {
block_beneficiary: Address::zero(),
block_timestamp: U256::zero(),
block_number: U256::one(),
block_difficulty: U256::zero(),
block_random: H256::zero(),
block_gaslimit: U256::zero(),
block_chain_id: U256::one(),
block_base_fee: U256::zero(),
..Default::default()
};

// Initialize an empty state trie and storage tries
let state_trie_before = HashedPartialTrie::from(crate::Node::Empty);
let storage_tries = Vec::new();
let checkpoint_state_trie_root = state_trie_before.hash();

// Prepare the tries without any transactions or receipts
let tries_before = TrieInputs {
state_trie: state_trie_before.clone(),
storage_tries: storage_tries.clone(),
transactions_trie: HashedPartialTrie::from(crate::Node::Empty),
receipts_trie: HashedPartialTrie::from(crate::Node::Empty),
};

// The expected state trie after execution remains the same as before
let expected_state_trie_after = state_trie_before;

// Compute the trie roots after execution
let trie_roots_after = TrieRoots {
state_root: expected_state_trie_after.hash(),
transactions_root: tries_before.transactions_trie.hash(),
receipts_root: tries_before.receipts_trie.hash(),
};

// Construct the GenerationInputs without any transactions or state changes
let inputs = GenerationInputs {
tries: tries_before,
trie_roots_after,
checkpoint_state_trie_root,
block_metadata,
..Default::default()
};

Ok(inputs)
}
3 changes: 2 additions & 1 deletion evm_arithmetization/src/witness/operation.rs
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,7 @@ use ethereum_types::{BigEndianHash, U256};
use itertools::Itertools;
use keccak_hash::keccak;
use plonky2::hash::hash_types::RichField;
use serde::{Deserialize, Serialize};

use super::state::KERNEL_CONTEXT;
use super::transition::Transition;
Expand Down Expand Up @@ -29,7 +30,7 @@ use crate::witness::util::{
};
use crate::{arithmetic, logic};

#[derive(Clone, Copy, Debug, Eq, PartialEq)]
#[derive(Clone, Copy, Debug, Eq, PartialEq, Hash, Serialize, Deserialize)]
pub(crate) enum Operation {
Iszero,
Not,
Expand Down
Loading