Skip to content

Commit

Permalink
Add keccak256 hasher for transcript (#2)
Browse files Browse the repository at this point in the history
* Add keccak256 hasher for transcript

* Fix keccak256 common point prefix

* Remove unnecessary hasher_* variables
  • Loading branch information
kilic authored Jan 17, 2023
1 parent 4b34aa2 commit 342d07a
Show file tree
Hide file tree
Showing 3 changed files with 240 additions and 2 deletions.
1 change: 1 addition & 0 deletions halo2_proofs/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -52,6 +52,7 @@ halo2curves = { git = 'https://github.com/privacy-scaling-explorations/halo2curv
rand_core = { version = "0.6", default-features = false }
tracing = "0.1"
blake2b_simd = "1"
sha3 = "0.9.1"

# Developer tooling dependencies
plotters = { version = "0.3.0", optional = true }
Expand Down
42 changes: 40 additions & 2 deletions halo2_proofs/src/poly/multiopen_test.rs
Original file line number Diff line number Diff line change
Expand Up @@ -13,8 +13,9 @@ mod test {
};
use crate::poly::{Coeff, Polynomial};
use crate::transcript::{
self, Blake2bRead, Blake2bWrite, Challenge255, EncodedChallenge, TranscriptRead,
TranscriptReadBuffer, TranscriptWrite, TranscriptWriterBuffer,
self, Blake2bRead, Blake2bWrite, Challenge255, EncodedChallenge, Keccak256Read,
Keccak256Write, TranscriptRead, TranscriptReadBuffer, TranscriptWrite,
TranscriptWriterBuffer,
};
use ff::Field;
use group::{Curve, Group};
Expand Down Expand Up @@ -59,6 +60,43 @@ mod test {
>(verifier_params, &proof[..], true);
}

#[test]
fn test_roundtrip_ipa_keccak() {
use crate::poly::ipa::commitment::{IPACommitmentScheme, ParamsIPA};
use crate::poly::ipa::multiopen::{ProverIPA, VerifierIPA};
use crate::poly::ipa::strategy::AccumulatorStrategy;
use halo2curves::pasta::{Ep, EqAffine, Fp};

const K: u32 = 4;

let params = ParamsIPA::<EqAffine>::new(K);

let proof = create_proof::<
IPACommitmentScheme<EqAffine>,
ProverIPA<_>,
_,
Keccak256Write<_, _, Challenge255<_>>,
>(&params);

let verifier_params = params.verifier_params();

verify::<
IPACommitmentScheme<EqAffine>,
VerifierIPA<_>,
_,
Keccak256Read<_, _, Challenge255<_>>,
AccumulatorStrategy<_>,
>(verifier_params, &proof[..], false);

verify::<
IPACommitmentScheme<EqAffine>,
VerifierIPA<_>,
_,
Keccak256Read<_, _, Challenge255<_>>,
AccumulatorStrategy<_>,
>(verifier_params, &proof[..], true);
}

#[test]
fn test_roundtrip_gwc() {
use crate::poly::kzg::commitment::{KZGCommitmentScheme, ParamsKZG};
Expand Down
199 changes: 199 additions & 0 deletions halo2_proofs/src/transcript.rs
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,7 @@

use blake2b_simd::{Params as Blake2bParams, State as Blake2bState};
use group::ff::PrimeField;
use sha3::{Digest, Keccak256};
use std::convert::TryInto;

use halo2curves::{Coordinates, CurveAffine, FieldExt};
Expand All @@ -19,6 +20,23 @@ const BLAKE2B_PREFIX_POINT: u8 = 1;
/// Prefix to a prover's message containing a scalar
const BLAKE2B_PREFIX_SCALAR: u8 = 2;

/// Prefix to a prover's message soliciting a challenge
const KECCAK256_PREFIX_CHALLENGE: u8 = 0;

/// First prefix to a prover's message soliciting a challenge
/// Not included in the growing state!
const KECCAK256_PREFIX_CHALLENGE_LO: u8 = 10;

/// Second prefix to a prover's message soliciting a challenge
/// Not included in the growing state!
const KECCAK256_PREFIX_CHALLENGE_HI: u8 = 11;

/// Prefix to a prover's message containing a curve point
const KECCAK256_PREFIX_POINT: u8 = 1;

/// Prefix to a prover's message containing a scalar
const KECCAK256_PREFIX_SCALAR: u8 = 2;

/// Generic transcript view (from either the prover or verifier's perspective)
pub trait Transcript<C: CurveAffine, E: EncodedChallenge<C>> {
/// Squeeze an encoded verifier challenge from the transcript.
Expand Down Expand Up @@ -88,6 +106,14 @@ pub struct Blake2bRead<R: Read, C: CurveAffine, E: EncodedChallenge<C>> {
_marker: PhantomData<(C, E)>,
}

/// Keccak256 hash function reader for EVM compatibility
#[derive(Debug, Clone)]
pub struct Keccak256Read<R: Read, C: CurveAffine, E: EncodedChallenge<C>> {
state: Keccak256,
reader: R,
_marker: PhantomData<(C, E)>,
}

impl<R: Read, C: CurveAffine> TranscriptReadBuffer<R, C, Challenge255<C>>
for Blake2bRead<R, C, Challenge255<C>>
{
Expand All @@ -104,6 +130,21 @@ impl<R: Read, C: CurveAffine> TranscriptReadBuffer<R, C, Challenge255<C>>
}
}

impl<R: Read, C: CurveAffine> TranscriptReadBuffer<R, C, Challenge255<C>>
for Keccak256Read<R, C, Challenge255<C>>
{
/// Initialize a transcript given an input buffer.
fn init(reader: R) -> Self {
let mut state = Keccak256::new();
state.update(b"Halo2-Transcript");
Keccak256Read {
state,
reader,
_marker: PhantomData,
}
}
}

impl<R: Read, C: CurveAffine> TranscriptRead<C, Challenge255<C>>
for Blake2bRead<R, C, Challenge255<C>>
{
Expand Down Expand Up @@ -133,6 +174,35 @@ impl<R: Read, C: CurveAffine> TranscriptRead<C, Challenge255<C>>
}
}

impl<R: Read, C: CurveAffine> TranscriptRead<C, Challenge255<C>>
for Keccak256Read<R, C, Challenge255<C>>
{
fn read_point(&mut self) -> io::Result<C> {
let mut compressed = C::Repr::default();
self.reader.read_exact(compressed.as_mut())?;
let point: C = Option::from(C::from_bytes(&compressed)).ok_or_else(|| {
io::Error::new(io::ErrorKind::Other, "invalid point encoding in proof")
})?;
self.common_point(point)?;

Ok(point)
}

fn read_scalar(&mut self) -> io::Result<C::Scalar> {
let mut data = <C::Scalar as PrimeField>::Repr::default();
self.reader.read_exact(data.as_mut())?;
let scalar: C::Scalar = Option::from(C::Scalar::from_repr(data)).ok_or_else(|| {
io::Error::new(
io::ErrorKind::Other,
"invalid field element encoding in proof",
)
})?;
self.common_scalar(scalar)?;

Ok(scalar)
}
}

impl<R: Read, C: CurveAffine> Transcript<C, Challenge255<C>>
for Blake2bRead<R, C, Challenge255<C>>
{
Expand Down Expand Up @@ -165,6 +235,48 @@ impl<R: Read, C: CurveAffine> Transcript<C, Challenge255<C>>
}
}

impl<R: Read, C: CurveAffine> Transcript<C, Challenge255<C>>
for Keccak256Read<R, C, Challenge255<C>>
{
fn squeeze_challenge(&mut self) -> Challenge255<C> {
self.state.update(&[KECCAK256_PREFIX_CHALLENGE]);

let mut state_lo = self.state.clone();
let mut state_hi = self.state.clone();
state_lo.update(&[KECCAK256_PREFIX_CHALLENGE_LO]);
state_hi.update(&[KECCAK256_PREFIX_CHALLENGE_HI]);
let result_lo: [u8; 32] = state_lo.finalize().as_slice().try_into().unwrap();
let result_hi: [u8; 32] = state_hi.finalize().as_slice().try_into().unwrap();

let mut t = result_lo.to_vec();
t.extend_from_slice(&result_hi[..]);
let result: [u8; 64] = t.as_slice().try_into().unwrap();

Challenge255::<C>::new(&result)
}

fn common_point(&mut self, point: C) -> io::Result<()> {
self.state.update(&[KECCAK256_PREFIX_POINT]);
let coords: Coordinates<C> = Option::from(point.coordinates()).ok_or_else(|| {
io::Error::new(
io::ErrorKind::Other,
"cannot write points at infinity to the transcript",
)
})?;
self.state.update(coords.x().to_repr().as_ref());
self.state.update(coords.y().to_repr().as_ref());

Ok(())
}

fn common_scalar(&mut self, scalar: C::Scalar) -> io::Result<()> {
self.state.update(&[KECCAK256_PREFIX_SCALAR]);
self.state.update(scalar.to_repr().as_ref());

Ok(())
}
}

/// We will replace BLAKE2b with an algebraic hash function in a later version.
#[derive(Debug, Clone)]
pub struct Blake2bWrite<W: Write, C: CurveAffine, E: EncodedChallenge<C>> {
Expand All @@ -173,9 +285,18 @@ pub struct Blake2bWrite<W: Write, C: CurveAffine, E: EncodedChallenge<C>> {
_marker: PhantomData<(C, E)>,
}

/// Keccak256 hash function writer for EVM compatibility
#[derive(Debug, Clone)]
pub struct Keccak256Write<W: Write, C: CurveAffine, E: EncodedChallenge<C>> {
state: Keccak256,
writer: W,
_marker: PhantomData<(C, E)>,
}

impl<W: Write, C: CurveAffine> TranscriptWriterBuffer<W, C, Challenge255<C>>
for Blake2bWrite<W, C, Challenge255<C>>
{
/// Initialize a transcript given an output buffer.
fn init(writer: W) -> Self {
Blake2bWrite {
state: Blake2bParams::new()
Expand All @@ -193,6 +314,27 @@ impl<W: Write, C: CurveAffine> TranscriptWriterBuffer<W, C, Challenge255<C>>
}
}

impl<W: Write, C: CurveAffine> TranscriptWriterBuffer<W, C, Challenge255<C>>
for Keccak256Write<W, C, Challenge255<C>>
{
/// Initialize a transcript given an output buffer.
fn init(writer: W) -> Self {
let mut state = Keccak256::new();
state.update(b"Halo2-Transcript");
Keccak256Write {
state,
writer,
_marker: PhantomData,
}
}

/// Conclude the interaction and return the output buffer (writer).
fn finalize(self) -> W {
// TODO: handle outstanding scalars? see issue #138
self.writer
}
}

impl<W: Write, C: CurveAffine> TranscriptWrite<C, Challenge255<C>>
for Blake2bWrite<W, C, Challenge255<C>>
{
Expand All @@ -208,6 +350,21 @@ impl<W: Write, C: CurveAffine> TranscriptWrite<C, Challenge255<C>>
}
}

impl<W: Write, C: CurveAffine> TranscriptWrite<C, Challenge255<C>>
for Keccak256Write<W, C, Challenge255<C>>
{
fn write_point(&mut self, point: C) -> io::Result<()> {
self.common_point(point)?;
let compressed = point.to_bytes();
self.writer.write_all(compressed.as_ref())
}
fn write_scalar(&mut self, scalar: C::Scalar) -> io::Result<()> {
self.common_scalar(scalar)?;
let data = scalar.to_repr();
self.writer.write_all(data.as_ref())
}
}

impl<W: Write, C: CurveAffine> Transcript<C, Challenge255<C>>
for Blake2bWrite<W, C, Challenge255<C>>
{
Expand Down Expand Up @@ -240,6 +397,48 @@ impl<W: Write, C: CurveAffine> Transcript<C, Challenge255<C>>
}
}

impl<W: Write, C: CurveAffine> Transcript<C, Challenge255<C>>
for Keccak256Write<W, C, Challenge255<C>>
{
fn squeeze_challenge(&mut self) -> Challenge255<C> {
self.state.update(&[KECCAK256_PREFIX_CHALLENGE]);

let mut state_lo = self.state.clone();
let mut state_hi = self.state.clone();
state_lo.update(&[KECCAK256_PREFIX_CHALLENGE_LO]);
state_hi.update(&[KECCAK256_PREFIX_CHALLENGE_HI]);
let result_lo: [u8; 32] = state_lo.finalize().as_slice().try_into().unwrap();
let result_hi: [u8; 32] = state_hi.finalize().as_slice().try_into().unwrap();

let mut t = result_lo.to_vec();
t.extend_from_slice(&result_hi[..]);
let result: [u8; 64] = t.as_slice().try_into().unwrap();

Challenge255::<C>::new(&result)
}

fn common_point(&mut self, point: C) -> io::Result<()> {
self.state.update(&[KECCAK256_PREFIX_POINT]);
let coords: Coordinates<C> = Option::from(point.coordinates()).ok_or_else(|| {
io::Error::new(
io::ErrorKind::Other,
"cannot write points at infinity to the transcript",
)
})?;
self.state.update(coords.x().to_repr().as_ref());
self.state.update(coords.y().to_repr().as_ref());

Ok(())
}

fn common_scalar(&mut self, scalar: C::Scalar) -> io::Result<()> {
self.state.update(&[KECCAK256_PREFIX_SCALAR]);
self.state.update(scalar.to_repr().as_ref());

Ok(())
}
}

/// The scalar representation of a verifier challenge.
///
/// The `Type` type can be used to scope the challenge to a specific context, or
Expand Down

0 comments on commit 342d07a

Please sign in to comment.