Skip to content
This repository has been archived by the owner on Feb 21, 2024. It is now read-only.

Commit

Permalink
Update starky and leverage it as dependency for plonky2_evm (0xPo…
Browse files Browse the repository at this point in the history
…lygonZero#1503)

* Update prover logic

* Add helper method for CTL data

* Some cleanup

* Update some methods

* Fix

* Some more fixes

* More tweaks

* Final

* Leverage starky crate

* Additional tweaks

* Cleanup

* More cleanup

* Fix

* Cleanup imports

* Fix

* Final tweaks

* Cleanup and hide behind debug_assertions attribute

* Clippy

* Fix no-std

* Make wasm compatible

* Doc and remove todo

* API cleanup and remove TODO

* Add Debug impls

* Add documentation for public items

* Feature-gate alloc imports

* Import method from starky instead

* Add simple crate and module documentation

* Apply comments

* Add lib level documentation

* Add test without lookups

* Fix starks without logup

* Cleanup

* Some more cleanup

* Fix get_challenges for non-lookup STARKs

* Add additional config methods and tests

* Apply comments

* More comments
  • Loading branch information
Nashtare authored Feb 13, 2024
1 parent b6fec06 commit 3ec1bfd
Show file tree
Hide file tree
Showing 82 changed files with 2,295 additions and 3,823 deletions.
9 changes: 7 additions & 2 deletions evm/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -27,8 +27,9 @@ num-bigint = "0.4.3"
once_cell = "1.13.0"
pest = "2.1.3"
pest_derive = "2.1.0"
plonky2 = { path = "../plonky2", default-features = false, features = ["timing"] }
plonky2 = { path = "../plonky2", features = ["timing"] }
plonky2_util = { path = "../util" }
starky = { path = "../starky" }
rand = "0.8.5"
rand_chacha = "0.3.1"
rlp = "0.5.1"
Expand All @@ -51,7 +52,11 @@ sha2 = "0.10.6"
[features]
default = ["parallel"]
asmtools = ["hex"]
parallel = ["plonky2/parallel", "plonky2_maybe_rayon/parallel"]
parallel = [
"plonky2/parallel",
"plonky2_maybe_rayon/parallel",
"starky/parallel"
]

[[bin]]
name = "assemble"
Expand Down
9 changes: 6 additions & 3 deletions evm/src/all_stark.rs
Original file line number Diff line number Diff line change
Expand Up @@ -3,15 +3,17 @@ use core::ops::Deref;
use plonky2::field::extension::Extendable;
use plonky2::field::types::Field;
use plonky2::hash::hash_types::RichField;
use starky::config::StarkConfig;
use starky::cross_table_lookup::{CrossTableLookup, TableIdx, TableWithColumns};
use starky::evaluation_frame::StarkFrame;
use starky::stark::Stark;

use crate::arithmetic::arithmetic_stark;
use crate::arithmetic::arithmetic_stark::ArithmeticStark;
use crate::byte_packing::byte_packing_stark::{self, BytePackingStark};
use crate::config::StarkConfig;
use crate::cpu::cpu_stark;
use crate::cpu::cpu_stark::CpuStark;
use crate::cpu::membus::NUM_GP_CHANNELS;
use crate::cross_table_lookup::{CrossTableLookup, TableIdx, TableWithColumns};
use crate::keccak::keccak_stark;
use crate::keccak::keccak_stark::KeccakStark;
use crate::keccak_sponge::columns::KECCAK_RATE_BYTES;
Expand All @@ -21,7 +23,6 @@ use crate::logic;
use crate::logic::LogicStark;
use crate::memory::memory_stark;
use crate::memory::memory_stark::MemoryStark;
use crate::stark::Stark;

/// Structure containing all STARKs and the cross-table lookups.
#[derive(Clone)]
Expand Down Expand Up @@ -66,6 +67,8 @@ impl<F: RichField + Extendable<D>, const D: usize> AllStark<F, D> {
}
}

pub type EvmStarkFrame<T, U, const N: usize> = StarkFrame<T, U, N, 0>;

/// Associates STARK tables with a unique index.
#[derive(Debug, Copy, Clone, Eq, PartialEq)]
pub enum Table {
Expand Down
16 changes: 8 additions & 8 deletions evm/src/arithmetic/addcy.rs
Original file line number Diff line number Diff line change
Expand Up @@ -22,10 +22,10 @@ use plonky2::field::types::{Field, PrimeField64};
use plonky2::hash::hash_types::RichField;
use plonky2::iop::ext_target::ExtensionTarget;
use plonky2::plonk::circuit_builder::CircuitBuilder;
use starky::constraint_consumer::{ConstraintConsumer, RecursiveConstraintConsumer};

use crate::arithmetic::columns::*;
use crate::arithmetic::utils::u256_to_array;
use crate::constraint_consumer::{ConstraintConsumer, RecursiveConstraintConsumer};

/// Generate row for ADD, SUB, GT and LT operations.
pub(crate) fn generate<F: PrimeField64>(
Expand Down Expand Up @@ -263,10 +263,10 @@ mod tests {
use plonky2::field::types::{Field, Sample};
use rand::{Rng, SeedableRng};
use rand_chacha::ChaCha8Rng;
use starky::constraint_consumer::ConstraintConsumer;

use super::*;
use crate::arithmetic::columns::NUM_ARITH_COLUMNS;
use crate::constraint_consumer::ConstraintConsumer;

// TODO: Should be able to refactor this test to apply to all operations.
#[test]
Expand All @@ -284,14 +284,14 @@ mod tests {
lv[IS_LT] = F::ZERO;
lv[IS_GT] = F::ZERO;

let mut constrant_consumer = ConstraintConsumer::new(
let mut constraint_consumer = ConstraintConsumer::new(
vec![GoldilocksField(2), GoldilocksField(3), GoldilocksField(5)],
F::ONE,
F::ONE,
F::ONE,
);
eval_packed_generic(&lv, &mut constrant_consumer);
for &acc in &constrant_consumer.constraint_accs {
eval_packed_generic(&lv, &mut constraint_consumer);
for &acc in &constraint_consumer.accumulators() {
assert_eq!(acc, F::ZERO);
}
}
Expand Down Expand Up @@ -324,14 +324,14 @@ mod tests {

generate(&mut lv, op_filter, left_in, right_in);

let mut constrant_consumer = ConstraintConsumer::new(
let mut constraint_consumer = ConstraintConsumer::new(
vec![GoldilocksField(2), GoldilocksField(3), GoldilocksField(5)],
F::ONE,
F::ONE,
F::ONE,
);
eval_packed_generic(&lv, &mut constrant_consumer);
for &acc in &constrant_consumer.constraint_accs {
eval_packed_generic(&lv, &mut constraint_consumer);
for &acc in &constraint_consumer.accumulators() {
assert_eq!(acc, F::ZERO);
}

Expand Down
23 changes: 14 additions & 9 deletions evm/src/arithmetic/arithmetic_stark.rs
Original file line number Diff line number Diff line change
Expand Up @@ -9,18 +9,18 @@ use plonky2::hash::hash_types::RichField;
use plonky2::iop::ext_target::ExtensionTarget;
use plonky2::plonk::circuit_builder::CircuitBuilder;
use plonky2::util::transpose;
use starky::constraint_consumer::{ConstraintConsumer, RecursiveConstraintConsumer};
use starky::cross_table_lookup::TableWithColumns;
use starky::evaluation_frame::StarkEvaluationFrame;
use starky::lookup::{Column, Filter, Lookup};
use starky::stark::Stark;
use static_assertions::const_assert;

use super::columns::{op_flags, NUM_ARITH_COLUMNS};
use super::shift;
use crate::all_stark::Table;
use crate::all_stark::{EvmStarkFrame, Table};
use crate::arithmetic::columns::{NUM_SHARED_COLS, RANGE_COUNTER, RC_FREQUENCIES, SHARED_COLS};
use crate::arithmetic::{addcy, byte, columns, divmod, modular, mul, Operation};
use crate::constraint_consumer::{ConstraintConsumer, RecursiveConstraintConsumer};
use crate::cross_table_lookup::TableWithColumns;
use crate::evaluation_frame::{StarkEvaluationFrame, StarkFrame};
use crate::lookup::{Column, Filter, Lookup};
use crate::stark::Stark;

/// Creates a vector of `Columns` to link the 16-bit columns of the arithmetic table,
/// split into groups of N_LIMBS at a time in `regs`, with the corresponding 32-bit
Expand Down Expand Up @@ -190,12 +190,13 @@ impl<F: RichField, const D: usize> ArithmeticStark<F, D> {
}

impl<F: RichField + Extendable<D>, const D: usize> Stark<F, D> for ArithmeticStark<F, D> {
type EvaluationFrame<FE, P, const D2: usize> = StarkFrame<P, NUM_ARITH_COLUMNS>
type EvaluationFrame<FE, P, const D2: usize> = EvmStarkFrame<P, FE, NUM_ARITH_COLUMNS>
where
FE: FieldExtension<D2, BaseField = F>,
P: PackedField<Scalar = FE>;

type EvaluationFrameTarget = StarkFrame<ExtensionTarget<D>, NUM_ARITH_COLUMNS>;
type EvaluationFrameTarget =
EvmStarkFrame<ExtensionTarget<D>, ExtensionTarget<D>, NUM_ARITH_COLUMNS>;

fn eval_packed_generic<FE, P, const D2: usize>(
&self,
Expand Down Expand Up @@ -320,6 +321,10 @@ impl<F: RichField + Extendable<D>, const D: usize> Stark<F, D> for ArithmeticSta
filter_columns: vec![None; NUM_SHARED_COLS],
}]
}

fn requires_ctls(&self) -> bool {
true
}
}

#[cfg(test)]
Expand All @@ -330,11 +335,11 @@ mod tests {
use plonky2::plonk::config::{GenericConfig, PoseidonGoldilocksConfig};
use rand::{Rng, SeedableRng};
use rand_chacha::ChaCha8Rng;
use starky::stark_testing::{test_stark_circuit_constraints, test_stark_low_degree};

use super::{columns, ArithmeticStark};
use crate::arithmetic::columns::OUTPUT_REGISTER;
use crate::arithmetic::*;
use crate::stark_testing::{test_stark_circuit_constraints, test_stark_low_degree};

#[test]
fn degree() -> Result<()> {
Expand Down
8 changes: 4 additions & 4 deletions evm/src/arithmetic/byte.rs
Original file line number Diff line number Diff line change
Expand Up @@ -69,11 +69,11 @@ use plonky2::field::types::{Field, PrimeField64};
use plonky2::hash::hash_types::RichField;
use plonky2::iop::ext_target::ExtensionTarget;
use plonky2::plonk::circuit_builder::CircuitBuilder;
use starky::constraint_consumer::{ConstraintConsumer, RecursiveConstraintConsumer};
use static_assertions::const_assert;

use crate::arithmetic::columns::*;
use crate::arithmetic::utils::u256_to_array;
use crate::constraint_consumer::{ConstraintConsumer, RecursiveConstraintConsumer};

// Give meaningful names to the columns of AUX_INPUT_REGISTER_0 that
// we're using
Expand Down Expand Up @@ -480,14 +480,14 @@ mod tests {
let out_byte = val.byte(31 - i) as u64;
verify_output(&lv, out_byte);

let mut constrant_consumer = ConstraintConsumer::new(
let mut constraint_consumer = ConstraintConsumer::new(
vec![GoldilocksField(2), GoldilocksField(3), GoldilocksField(5)],
F::ONE,
F::ONE,
F::ONE,
);
eval_packed(&lv, &mut constrant_consumer);
for &acc in &constrant_consumer.constraint_accs {
eval_packed(&lv, &mut constraint_consumer);
for &acc in &constraint_consumer.accumulators() {
assert_eq!(acc, F::ZERO);
}
}
Expand Down
103 changes: 54 additions & 49 deletions evm/src/arithmetic/divmod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -11,13 +11,13 @@ use plonky2::field::types::PrimeField64;
use plonky2::hash::hash_types::RichField;
use plonky2::iop::ext_target::ExtensionTarget;
use plonky2::plonk::circuit_builder::CircuitBuilder;
use starky::constraint_consumer::{ConstraintConsumer, RecursiveConstraintConsumer};

use crate::arithmetic::columns::*;
use crate::arithmetic::modular::{
generate_modular_op, modular_constr_poly, modular_constr_poly_ext_circuit,
};
use crate::arithmetic::utils::*;
use crate::constraint_consumer::{ConstraintConsumer, RecursiveConstraintConsumer};

/// Generates the output and auxiliary values for modular operations,
/// assuming the input, modular and output limbs are already set.
Expand Down Expand Up @@ -215,10 +215,10 @@ mod tests {
use plonky2::field::types::{Field, Sample};
use rand::{Rng, SeedableRng};
use rand_chacha::ChaCha8Rng;
use starky::constraint_consumer::ConstraintConsumer;

use super::*;
use crate::arithmetic::columns::NUM_ARITH_COLUMNS;
use crate::constraint_consumer::ConstraintConsumer;

const N_RND_TESTS: usize = 1000;
const MODULAR_OPS: [usize; 2] = [IS_MOD, IS_DIV];
Expand Down Expand Up @@ -247,7 +247,7 @@ mod tests {
GoldilocksField::ONE,
);
eval_packed(&lv, &nv, &mut constraint_consumer);
for &acc in &constraint_consumer.constraint_accs {
for &acc in &constraint_consumer.accumulators() {
assert_eq!(acc, GoldilocksField::ZERO);
}
}
Expand Down Expand Up @@ -306,7 +306,7 @@ mod tests {
GoldilocksField::ZERO,
);
eval_packed(&lv, &nv, &mut constraint_consumer);
for &acc in &constraint_consumer.constraint_accs {
for &acc in &constraint_consumer.accumulators() {
assert_eq!(acc, GoldilocksField::ZERO);
}
}
Expand All @@ -321,52 +321,57 @@ mod tests {

for op_filter in MODULAR_OPS {
for _i in 0..N_RND_TESTS {
// set inputs to random values and the modulus to zero;
// the output is defined to be zero when modulus is zero.
let mut lv = [F::default(); NUM_ARITH_COLUMNS]
.map(|_| F::from_canonical_u16(rng.gen::<u16>()));
let mut nv = [F::default(); NUM_ARITH_COLUMNS]
.map(|_| F::from_canonical_u16(rng.gen::<u16>()));

// Reset operation columns, then select one
for op in MODULAR_OPS {
lv[op] = F::ZERO;
for corrupt_constraints in [false, true] {
// set inputs to random values and the modulus to zero;
// the output is defined to be zero when modulus is zero.
let mut lv = [F::default(); NUM_ARITH_COLUMNS]
.map(|_| F::from_canonical_u16(rng.gen::<u16>()));
let mut nv = [F::default(); NUM_ARITH_COLUMNS]
.map(|_| F::from_canonical_u16(rng.gen::<u16>()));

// Reset operation columns, then select one
for op in MODULAR_OPS {
lv[op] = F::ZERO;
}
// Since SHR uses the logic for DIV, `IS_SHR` should also be set to 0 here.
lv[IS_SHR] = F::ZERO;
lv[op_filter] = F::ONE;

let input0 = U256::from(rng.gen::<[u8; 32]>());
let input1 = U256::zero();

generate(&mut lv, &mut nv, op_filter, input0, input1, U256::zero());

// check that the correct output was generated
assert!(lv[OUTPUT_REGISTER].iter().all(|&c| c == F::ZERO));

let mut constraint_consumer = ConstraintConsumer::new(
vec![GoldilocksField(2), GoldilocksField(3), GoldilocksField(5)],
GoldilocksField::ONE,
GoldilocksField::ZERO,
GoldilocksField::ZERO,
);
eval_packed(&lv, &nv, &mut constraint_consumer);

if corrupt_constraints {
// Corrupt one output limb by setting it to a non-zero value.
let random_oi = OUTPUT_REGISTER.start + rng.gen::<usize>() % N_LIMBS;
lv[random_oi] = F::from_canonical_u16(rng.gen_range(1..u16::MAX));

eval_packed(&lv, &nv, &mut constraint_consumer);

// Check that at least one of the constraints was non-zero.
assert!(constraint_consumer
.accumulators()
.iter()
.any(|&acc| acc != F::ZERO));
} else {
assert!(constraint_consumer
.accumulators()
.iter()
.all(|&acc| acc == F::ZERO));
}
}
// Since SHR uses the logic for DIV, `IS_SHR` should also be set to 0 here.
lv[IS_SHR] = F::ZERO;
lv[op_filter] = F::ONE;

let input0 = U256::from(rng.gen::<[u8; 32]>());
let input1 = U256::zero();

generate(&mut lv, &mut nv, op_filter, input0, input1, U256::zero());

// check that the correct output was generated
assert!(lv[OUTPUT_REGISTER].iter().all(|&c| c == F::ZERO));

let mut constraint_consumer = ConstraintConsumer::new(
vec![GoldilocksField(2), GoldilocksField(3), GoldilocksField(5)],
GoldilocksField::ONE,
GoldilocksField::ZERO,
GoldilocksField::ZERO,
);
eval_packed(&lv, &nv, &mut constraint_consumer);
assert!(constraint_consumer
.constraint_accs
.iter()
.all(|&acc| acc == F::ZERO));

// Corrupt one output limb by setting it to a non-zero value
let random_oi = OUTPUT_REGISTER.start + rng.gen::<usize>() % N_LIMBS;
lv[random_oi] = F::from_canonical_u16(rng.gen_range(1..u16::MAX));

eval_packed(&lv, &nv, &mut constraint_consumer);

// Check that at least one of the constraints was non-zero
assert!(constraint_consumer
.constraint_accs
.iter()
.any(|&acc| acc != F::ZERO));
}
}
}
Expand Down
Loading

0 comments on commit 3ec1bfd

Please sign in to comment.