From d457988b531ba971a70e74a51388b2d3e03e142d Mon Sep 17 00:00:00 2001 From: dante <45801863+alexander-camuto@users.noreply.github.com> Date: Fri, 8 Nov 2024 02:55:12 +0000 Subject: [PATCH] feat: lookupless sqrt and rsqrt + no range check div and recip --- src/bindings/python.rs | 10 - src/circuit/ops/hybrid.rs | 61 ++-- src/circuit/ops/layouts.rs | 555 +++++++++++++++++++++++++++--------- src/circuit/ops/lookup.rs | 12 - src/commands.rs | 3 - src/execute.rs | 53 ++-- src/graph/model.rs | 15 +- src/graph/node.rs | 14 +- src/graph/utilities.rs | 13 +- src/lib.rs | 3 - tests/assets/model.compiled | Bin 1819 -> 1818 bytes tests/assets/proof.json | 2 +- tests/integration_tests.rs | 30 +- 13 files changed, 505 insertions(+), 266 deletions(-) diff --git a/src/bindings/python.rs b/src/bindings/python.rs index 59fa4aa26..06b0d9f78 100644 --- a/src/bindings/python.rs +++ b/src/bindings/python.rs @@ -180,9 +180,6 @@ struct PyRunArgs { /// list[tuple[str, int]]: Hand-written parser for graph variables, eg. batch_size=1 pub variables: Vec<(String, usize)>, #[pyo3(get, set)] - /// bool: Rebase the scale using lookup table for division instead of using a range check - pub div_rebasing: bool, - #[pyo3(get, set)] /// bool: Should constants with 0.0 fraction be rebased to scale 0 pub rebase_frac_zero_constants: bool, #[pyo3(get, set)] @@ -227,7 +224,6 @@ impl From for RunArgs { output_visibility: py_run_args.output_visibility, param_visibility: py_run_args.param_visibility, variables: py_run_args.variables, - div_rebasing: py_run_args.div_rebasing, rebase_frac_zero_constants: py_run_args.rebase_frac_zero_constants, check_mode: py_run_args.check_mode, commitment: Some(py_run_args.commitment.into()), @@ -252,7 +248,6 @@ impl Into for RunArgs { output_visibility: self.output_visibility, param_visibility: self.param_visibility, variables: self.variables, - div_rebasing: self.div_rebasing, rebase_frac_zero_constants: self.rebase_frac_zero_constants, check_mode: self.check_mode, commitment: self.commitment.into(), @@ -878,8 +873,6 @@ fn gen_settings( /// max_logrows: int /// Optional max logrows to use for calibration /// -/// only_range_check_rebase: bool -/// Check ranges when rebasing /// /// Returns /// ------- @@ -894,7 +887,6 @@ fn gen_settings( scales = None, scale_rebase_multiplier = DEFAULT_SCALE_REBASE_MULTIPLIERS.split(",").map(|x| x.parse().unwrap()).collect(), max_logrows = None, - only_range_check_rebase = DEFAULT_ONLY_RANGE_CHECK_REBASE.parse().unwrap(), ))] fn calibrate_settings( py: Python, @@ -906,7 +898,6 @@ fn calibrate_settings( scales: Option>, scale_rebase_multiplier: Vec, max_logrows: Option, - only_range_check_rebase: bool, ) -> PyResult> { pyo3_asyncio::tokio::future_into_py(py, async move { crate::execute::calibrate( @@ -917,7 +908,6 @@ fn calibrate_settings( lookup_safety_margin, scales, scale_rebase_multiplier, - only_range_check_rebase, max_logrows, ) .await diff --git a/src/circuit/ops/hybrid.rs b/src/circuit/ops/hybrid.rs index 2bf6b2c11..a32674af6 100644 --- a/src/circuit/ops/hybrid.rs +++ b/src/circuit/ops/hybrid.rs @@ -16,7 +16,16 @@ pub enum HybridOp { Ln { scale: utils::F32, }, - + Rsqrt { + input_scale: utils::F32, + output_scale: utils::F32, + }, + Exp { + scale: utils::F32, + }, + Sqrt { + scale: utils::F32, + }, RoundHalfToEven { scale: utils::F32, legs: usize, @@ -39,7 +48,6 @@ pub enum HybridOp { }, Div { denom: utils::F32, - use_range_check_for_int: bool, }, ReduceMax { axes: Vec, @@ -116,6 +124,15 @@ impl Op for Hybrid fn as_string(&self) -> String { match self { + HybridOp::Exp { scale } => format!("EXP(scale={})", scale), + HybridOp::Rsqrt { + input_scale, + output_scale, + } => format!( + "RSQRT (input_scale={}, output_scale={})", + input_scale, output_scale + ), + HybridOp::Sqrt { scale } => format!("SQRT(scale={})", scale), HybridOp::Ln { scale } => format!("LN(scale={})", scale), HybridOp::RoundHalfToEven { scale, legs } => { format!("ROUND_HALF_TO_EVEN(scale={}, legs={})", scale, legs) @@ -133,13 +150,7 @@ impl Op for Hybrid "RECIP (input_scale={}, output_scale={})", input_scale, output_scale ), - HybridOp::Div { - denom, - use_range_check_for_int, - } => format!( - "DIV (denom={}, use_range_check_for_int={})", - denom, use_range_check_for_int - ), + HybridOp::Div { denom } => format!("DIV (denom={})", denom), HybridOp::SumPool { padding, stride, @@ -194,6 +205,22 @@ impl Op for Hybrid values: &[ValTensor], ) -> Result>, CircuitError> { Ok(Some(match self { + HybridOp::Rsqrt { + input_scale, + output_scale, + } => layouts::rsqrt( + config, + region, + values[..].try_into()?, + *input_scale, + *output_scale, + )?, + HybridOp::Exp { scale } => { + layouts::exp(config, region, values[..].try_into()?, *scale)? + } + HybridOp::Sqrt { scale } => { + layouts::sqrt(config, region, values[..].try_into()?, *scale)? + } HybridOp::Ln { scale } => layouts::ln(config, region, values[..].try_into()?, *scale)?, HybridOp::RoundHalfToEven { scale, legs } => { layouts::round_half_to_even(config, region, values[..].try_into()?, *scale, *legs)? @@ -233,13 +260,9 @@ impl Op for Hybrid integer_rep_to_felt(input_scale.0 as i128), integer_rep_to_felt(output_scale.0 as i128), )?, - HybridOp::Div { - denom, - use_range_check_for_int, - .. - } => { - if denom.0.fract() == 0.0 && *use_range_check_for_int { - layouts::loop_div( + HybridOp::Div { denom, .. } => { + if denom.0.fract() == 0.0 { + layouts::div( config, region, values[..].try_into()?, @@ -330,9 +353,9 @@ impl Op for Hybrid | HybridOp::ReduceArgMax { .. } | HybridOp::OneHot { .. } | HybridOp::ReduceArgMin { .. } => 0, - HybridOp::Softmax { output_scale, .. } | HybridOp::Recip { output_scale, .. } => { - multiplier_to_scale(output_scale.0 as f64) - } + HybridOp::Softmax { output_scale, .. } + | HybridOp::Recip { output_scale, .. } + | HybridOp::Rsqrt { output_scale, .. } => multiplier_to_scale(output_scale.0 as f64), HybridOp::Ln { scale: output_scale, } => 4 * multiplier_to_scale(output_scale.0 as f64), diff --git a/src/circuit/ops/layouts.rs b/src/circuit/ops/layouts.rs index 85d878bcf..5e0809368 100644 --- a/src/circuit/ops/layouts.rs +++ b/src/circuit/ops/layouts.rs @@ -29,41 +29,70 @@ use crate::{ use super::*; use crate::circuit::ops::lookup::LookupOp; -/// Same as div but splits the division into N parts -pub(crate) fn loop_div( +/// Calculate the L1 distance between two tensors. +/// ``` +/// use ezkl::tensor::Tensor; +/// use ezkl::fieldutils::IntegerRep; +/// use ezkl::circuit::ops::layouts::l1_distance; +/// use halo2curves::bn256::Fr as Fp; +/// use ezkl::circuit::region::RegionCtx; +/// use ezkl::circuit::region::RegionSettings; +/// use ezkl::circuit::BaseConfig; +/// use ezkl::tensor::ValTensor; +/// use ezkl::circuit::layouts::dot; +/// use ezkl::circuit::layouts::l1_distance; +/// let dummy_config = BaseConfig::dummy(12, 2); +/// let mut dummy_region = RegionCtx::new_dummy(0,2,RegionSettings::all_true(128,2)); +/// let x = ValTensor::from_integer_rep_tensor(Tensor::::new( +/// Some(&[1, 2, 3, 2, 3, 4, 3, 4, 5]), +/// &[3, 3], +/// ).unwrap()); +/// let k = ValTensor::from_integer_rep_tensor(Tensor::::new( +/// Some(&[1, 2, 3, 1, 2, 3, 1, 2, 3]), +/// &[3, 3], +/// ).unwrap()); +/// let result = l1_distance::(&dummy_config, &mut dummy_region, &[x, k]).unwrap(); +/// let expected = Tensor::::new(Some(&[0, 0, 0, 1, 1, 1, 2, 2, 2]), &[3, 3]).unwrap(); +/// assert_eq!(result.int_evals().unwrap(), expected); +/// ``` +pub fn l1_distance( config: &BaseConfig, region: &mut RegionCtx, - value: &[ValTensor; 1], - divisor: F, + values: &[ValTensor; 2], ) -> Result, CircuitError> { - if divisor == F::ONE { - return Ok(value[0].clone()); - } + let diff = pairwise(config, region, values, BaseOp::Sub)?; + let abs_diff = abs(config, region, &[diff])?; - // if integer val is divisible by 2, we can use a faster method and div > F::S - let mut divisor = divisor; - let mut num_parts = 1; + Ok(abs_diff) +} - while felt_to_integer_rep(divisor) % 2 == 0 - && felt_to_integer_rep(divisor) > (2_i128.pow(F::S - 4)) - { - divisor = integer_rep_to_felt(felt_to_integer_rep(divisor) / 2); - num_parts += 1; - } +/// Determines if from a set of 3 tensors the 1st is closest to a reference tensor. +pub fn is_closest_to( + config: &BaseConfig, + region: &mut RegionCtx, + values: &[ValTensor; 3], + reference: &[ValTensor; 1], +) -> Result<(), CircuitError> { + let l1_distance_0 = l1_distance(config, region, &[values[0].clone(), reference[0].clone()])?; + let l1_distance_1 = l1_distance(config, region, &[values[1].clone(), reference[0].clone()])?; + let l1_distance_2 = l1_distance(config, region, &[values[2].clone(), reference[0].clone()])?; - let output = div(config, region, value, divisor)?; - if num_parts == 1 { - return Ok(output); - } + let is_closest_to_0 = less(config, region, &[l1_distance_0.clone(), l1_distance_1])?; + let is_closest_to_1 = less(config, region, &[l1_distance_0, l1_distance_2])?; - let divisor_int = 2_i128.pow(num_parts - 1); - let divisor_felt = integer_rep_to_felt(divisor_int); - if divisor_int <= 2_i128.pow(F::S - 3) { - div(config, region, &[output], divisor_felt) - } else { - // keep splitting the divisor until it satisfies the condition - loop_div(config, region, &[output], divisor_felt) - } + let is_closest_to = and(config, region, &[is_closest_to_0, is_closest_to_1])?; + + let mut comparison_unit = create_constant_tensor(integer_rep_to_felt(1), is_closest_to.len()); + + comparison_unit.reshape(is_closest_to.dims())?; + // assigned unit + let assigned_unit = region.assign(&config.custom_gates.inputs[1], &comparison_unit)?; + region.increment(assigned_unit.len()); + + // assert that the result is 1 + enforce_equality(config, region, &[is_closest_to, assigned_unit])?; + + Ok(()) } /// Div accumulated layout @@ -80,8 +109,6 @@ pub(crate) fn div( let input = value[0].clone(); let input_dims = input.dims(); - let range_check_bracket = felt_to_integer_rep(div) / 2; - let divisor = create_constant_tensor(div, 1); let divisor = region.assign(&config.custom_gates.inputs[1], &divisor)?; @@ -117,18 +144,47 @@ pub(crate) fn div( BaseOp::Mult, )?; - let diff_with_input = pairwise( + // take the claimed output and subtract 1 + let one = create_constant_tensor(F::ONE, 1); + let one = region.assign(&config.custom_gates.inputs[1], &one)?; + + let claimed_output_minus_one = pairwise( config, region, - &[product.clone(), input.clone()], + &[claimed_output.clone(), one.clone()], BaseOp::Sub, )?; - range_check( + let claimed_output_minus_one_product = pairwise( config, region, - &[diff_with_input], - &(-range_check_bracket, range_check_bracket), + &[claimed_output_minus_one.clone(), divisor.clone()], + BaseOp::Mult, + )?; + + let claimed_output_plus_one = pairwise( + config, + region, + &[claimed_output.clone(), one.clone()], + BaseOp::Add, + )?; + + let claimed_output_plus_one_product = pairwise( + config, + region, + &[claimed_output_plus_one.clone(), divisor.clone()], + BaseOp::Mult, + )?; + + is_closest_to( + config, + region, + &[ + product, + claimed_output_minus_one_product, + claimed_output_plus_one_product, + ], + &[input.clone()], )?; Ok(claimed_output) @@ -145,19 +201,12 @@ pub(crate) fn recip( let input = value[0].clone(); let input_dims = input.dims(); - let integer_input_scale = felt_to_integer_rep(input_scale); - let integer_output_scale = felt_to_integer_rep(output_scale); + let one = create_constant_tensor(F::ONE, 1); + let one = region.assign(&config.custom_gates.inputs[0], &one)?; - // range_check_bracket is min of input_scale * output_scale and 2^F::S - 3 - let range_check_len = std::cmp::min(integer_output_scale, 2_i128.pow(F::S - 4)); - - let input_scale_ratio = if range_check_len > 0 { - integer_rep_to_felt(integer_input_scale * integer_output_scale / range_check_len) - } else { - F::ONE - }; - - let range_check_bracket = range_check_len / 2; + let unit_scale = create_constant_tensor(output_scale * input_scale, 1); + let unit_scale = region.assign(&config.custom_gates.inputs[1], &unit_scale)?; + region.increment(1); let is_assigned = !input.any_unknowns()?; @@ -191,9 +240,35 @@ pub(crate) fn recip( BaseOp::Mult, )?; - // divide by input_scale - let rebased_div = loop_div(config, region, &[product], input_scale_ratio)?; + let claimed_output_minus_one = pairwise( + config, + region, + &[claimed_output.clone(), one.clone()], + BaseOp::Sub, + )?; + + let claimed_output_minus_one_product = pairwise( + config, + region, + &[claimed_output_minus_one.clone(), input.clone()], + BaseOp::Mult, + )?; + + let claimed_output_plus_one = pairwise( + config, + region, + &[claimed_output.clone(), one.clone()], + BaseOp::Add, + )?; + + let claimed_output_plus_one_product = pairwise( + config, + region, + &[claimed_output_plus_one.clone(), input.clone()], + BaseOp::Mult, + )?; + // divide by input_scale let zero_inverse_val = tensor::ops::nonlinearities::zero_recip(felt_to_integer_rep(output_scale) as f64)[0]; let zero_inverse = create_constant_tensor(integer_rep_to_felt(zero_inverse_val), 1); @@ -209,24 +284,178 @@ pub(crate) fn recip( &[equal_zero_mask.clone(), equal_inverse_mask], )?; - let unit_scale = create_constant_tensor(integer_rep_to_felt(range_check_len), 1); + // now add the unit mask to the rebased_div - let unit_mask = pairwise(config, region, &[equal_zero_mask, unit_scale], BaseOp::Mult)?; + is_closest_to( + config, + region, + &[ + product, + claimed_output_minus_one_product, + claimed_output_plus_one_product, + ], + &[unit_scale], + )?; - // now add the unit mask to the rebased_div - let rebased_offset_div = pairwise(config, region, &[rebased_div, unit_mask], BaseOp::Add)?; + Ok(claimed_output) +} - // at most the error should be in the original unit scale's range - range_check( +/// Square root accumulated layout +/// # Example +/// ``` +/// use ezkl::tensor::Tensor; +/// use ezkl::fieldutils::IntegerRep; +/// use ezkl::circuit::ops::layouts::sqrt; +/// use halo2curves::bn256::Fr as Fp; +/// use ezkl::circuit::region::RegionCtx; +/// use ezkl::circuit::region::RegionSettings; +/// use ezkl::circuit::BaseConfig; +/// use ezkl::tensor::ValTensor; +/// use ezkl::circuit::layouts::dot; +/// use ezkl::circuit::layouts::sqrt; +/// let dummy_config = BaseConfig::dummy(12, 2); +/// let mut dummy_region = RegionCtx::new_dummy(0,2,RegionSettings::all_true(128,2)); +/// let x = ValTensor::from_integer_rep_tensor(Tensor::::new( +/// Some(&[1, 2, 3, 2, 3, 4, 3, 4, 5]), +/// &[3, 3], +/// ).unwrap()); +/// let result = sqrt::(&dummy_config, &mut dummy_region, &[x], 1.0).unwrap(); +/// let expected = Tensor::::new(Some(&[1, 1, 1, 1, 1, 2, 1, 2, 2]), &[3, 3]).unwrap(); +/// assert_eq!(result.int_evals().unwrap(), expected); +/// ``` + +pub fn sqrt( + config: &BaseConfig, + region: &mut RegionCtx, + value: &[ValTensor; 1], + input_scale: utils::F32, +) -> Result, CircuitError> { + let input = value[0].clone(); + let input_dims = input.dims(); + + let one = create_constant_tensor(F::ONE, 1); + let one = region.assign(&config.custom_gates.inputs[0], &one)?; + + let unit_scale = create_constant_tensor(integer_rep_to_felt(input_scale.0 as IntegerRep), 1); + let unit_scale = region.assign(&config.custom_gates.inputs[1], &unit_scale)?; + region.increment(1); + + let is_assigned = !input.any_unknowns()?; + + let mut claimed_output: ValTensor = if is_assigned { + let input_evals = input.int_evals()?; + tensor::ops::nonlinearities::sqrt(&input_evals, input_scale.0 as f64) + .par_iter() + .map(|x| Value::known(integer_rep_to_felt(*x))) + .collect::>>() + .into() + } else { + Tensor::new( + Some(&vec![Value::::unknown(); input.len()]), + &[input.len()], + )? + .into() + }; + claimed_output.reshape(input_dims)?; + let claimed_output = region.assign(&config.custom_gates.output, &claimed_output)?; + region.increment(claimed_output.len()); + + // this is now of scale 2 * scale + let product = pairwise( + config, + region, + &[claimed_output.clone(), claimed_output.clone()], + BaseOp::Mult, + )?; + + let claimed_output_minus_one = pairwise( + config, + region, + &[claimed_output.clone(), one.clone()], + BaseOp::Sub, + )?; + + let claimed_output_minus_one_product = pairwise( + config, + region, + &[ + claimed_output_minus_one.clone(), + claimed_output_minus_one.clone(), + ], + BaseOp::Mult, + )?; + + let claimed_output_plus_one = pairwise( + config, + region, + &[claimed_output.clone(), one.clone()], + BaseOp::Add, + )?; + + let claimed_output_plus_one_product = pairwise( + config, + region, + &[ + claimed_output_plus_one.clone(), + claimed_output_plus_one.clone(), + ], + BaseOp::Mult, + )?; + + // rescaled input + let rescaled_input = pairwise(config, region, &[input.clone(), unit_scale], BaseOp::Mult)?; + + is_closest_to( config, region, - &[rebased_offset_div], - &(range_check_bracket, 3 * range_check_bracket), + &[ + product, + claimed_output_minus_one_product, + claimed_output_plus_one_product, + ], + &[rescaled_input], )?; Ok(claimed_output) } +/// Reciprocal square root accumulated layout +/// # Example +/// ``` +/// use ezkl::tensor::Tensor; +/// use ezkl::fieldutils::IntegerRep; +/// use ezkl::circuit::ops::layouts::rsqrt; +/// use halo2curves::bn256::Fr as Fp; +/// use ezkl::circuit::region::RegionCtx; +/// use ezkl::circuit::region::RegionSettings; +/// use ezkl::circuit::BaseConfig; +/// use ezkl::tensor::ValTensor; +/// use ezkl::circuit::layouts::dot; +/// use ezkl::circuit::layouts::rsqrt; +/// let dummy_config = BaseConfig::dummy(12, 2); +/// let mut dummy_region = RegionCtx::new_dummy(0,2,RegionSettings::all_true(128,2)); +/// let x = ValTensor::from_integer_rep_tensor(Tensor::::new( +/// Some(&[1, 2, 3, 2, 3, 4, 3, 4, 5]), +/// &[3, 3], +/// ).unwrap()); +/// let result = rsqrt::(&dummy_config, &mut dummy_region, &[x], 1.0).unwrap(); +/// let expected = Tensor::::new(Some(&[1, 1, 1, 1, 1, 1, 1, 1, 1]), &[3, 3]).unwrap(); +/// assert_eq!(result.int_evals().unwrap(), expected); +/// ``` +pub fn rsqrt( + config: &BaseConfig, + region: &mut RegionCtx, + value: &[ValTensor; 1], + input_scale: utils::F32, + output_scale: utils::F32, +) -> Result, CircuitError> { + let sqrt = sqrt(config, region, value, input_scale)?; + let felt_output_scale = integer_rep_to_felt(output_scale.0 as IntegerRep); + let felt_input_scale = integer_rep_to_felt(input_scale.0 as IntegerRep); + + recip(config, region, &[sqrt], felt_input_scale, felt_output_scale) +} + /// Dot product of two tensors. /// ``` /// use ezkl::tensor::Tensor; @@ -1805,6 +2034,10 @@ pub fn sum( region: &mut RegionCtx, values: &[ValTensor; 1], ) -> Result, CircuitError> { + if values[0].len() == 1 { + return Ok(values[0].clone()); + } + region.flush()?; // time this entire function run let global_start = instant::Instant::now(); @@ -3102,7 +3335,7 @@ pub fn sumpool( last_elem.reshape(&[&[batch_size, image_channels], shape].concat())?; if normalized { - last_elem = loop_div(config, region, &[last_elem], F::from(kernel_len as u64))?; + last_elem = div(config, region, &[last_elem], F::from(kernel_len as u64))?; } Ok(last_elem) } @@ -4547,13 +4780,10 @@ pub fn ln( // first generate the claimed val let mut input = values[0].clone(); - - println!("input {}", input.show()); - let scale_as_felt = integer_rep_to_felt(scale.0.round() as IntegerRep); let assigned_triple_scaled_as_felt_tensor = region.assign( - &config.custom_gates.inputs[1], + &config.custom_gates.output, &create_constant_tensor(scale_as_felt * scale_as_felt * scale_as_felt, 1), )?; @@ -4639,27 +4869,6 @@ pub fn ln( &LookupOp::PowersOfTwo { scale }, )?; - // assert that the original input is closest to the claimed output than the prior power of 2 and the next power of 2 - let distance_to_prior = pairwise( - config, - region, - &[input.clone(), prior_pow2.clone()], - BaseOp::Sub, - )?; - - // now take abs of the distance - let distance_to_prior_l1 = abs(config, region, &[distance_to_prior.clone()])?; - - let distance_to_next = pairwise( - config, - region, - &[input.clone(), next_pow2.clone()], - BaseOp::Sub, - )?; - - // now take abs of the distance - let distance_to_next_l1 = abs(config, region, &[distance_to_next.clone()])?; - let distance_to_claimed = pairwise( config, region, @@ -4667,51 +4876,15 @@ pub fn ln( BaseOp::Sub, )?; - // now take abs of the distance - let distance_to_claimed_l1 = abs(config, region, &[distance_to_claimed.clone()])?; - - // can be less than or equal because we round up - let is_distance_to_prior_less = less_equal( - config, - region, - &[distance_to_claimed_l1.clone(), distance_to_prior_l1.clone()], - )?; - - // should be striclty less because we round up - let is_distance_to_next_less = less( - config, - region, - &[distance_to_claimed_l1, distance_to_next_l1.clone()], - )?; - - let is_distance_to_prior_less_and_distance_to_next_less = and( - config, - region, - &[ - is_distance_to_prior_less.clone(), - is_distance_to_next_less.clone(), - ], - )?; - - let mut comparison_unit = create_constant_tensor( - integer_rep_to_felt(1), - is_distance_to_prior_less_and_distance_to_next_less.len(), - ); - - comparison_unit.reshape(is_distance_to_prior_less_and_distance_to_next_less.dims())?; - - // assigned unit - let assigned_unit = region.assign(&config.custom_gates.inputs[1], &comparison_unit)?; - region.increment(assigned_unit.len()); - - // assert that the values are truthy - enforce_equality( + is_closest_to( config, region, &[ - is_distance_to_prior_less_and_distance_to_next_less, - assigned_unit.clone(), + pow2_of_claimed_output.clone(), + prior_pow2.clone(), + next_pow2.clone(), ], + &[input.clone()], )?; // get a linear interpolation now @@ -4720,7 +4893,7 @@ pub fn ln( let sign_of_distance_to_claimed_is_positive = equals( config, region, - &[sign_of_distance_to_claimed.clone(), assigned_unit.clone()], + &[sign_of_distance_to_claimed.clone(), unit.clone()], )?; let sign_of_distance_to_claimed_is_negative = not( @@ -4833,6 +5006,134 @@ pub fn ln( pairwise(config, region, &[claimed_output, ln2_tensor], BaseOp::Mult) } +/// Exponential layout +/// # Arguments +/// * `config` - BaseConfig +/// * `region` - RegionCtx +/// * `values` - &[ValTensor; 1] +/// * `scale` - utils::F32 +/// # Returns +/// * ValTensor +/// # Example +/// ``` +/// use ezkl::tensor::Tensor; +/// use ezkl::fieldutils::IntegerRep; +/// use ezkl::circuit::ops::layouts::exp; +/// use ezkl::tensor::val::ValTensor; +/// use halo2curves::bn256::Fr as Fp; +/// use ezkl::circuit::region::RegionCtx; +/// use ezkl::circuit::region::RegionSettings; +/// use ezkl::circuit::BaseConfig; +/// let dummy_config = BaseConfig::dummy(12, 2); +/// let mut dummy_region = RegionCtx::new_dummy(0,2,RegionSettings::all_true(128,2)); +/// let x = ValTensor::from_integer_rep_tensor(Tensor::::new( +/// Some(&[3, 2, 3, 1]), +/// &[1, 1, 2, 2], +/// ).unwrap()); +/// let result = exp::(&dummy_config, &mut dummy_region, &[x], 2.0.into()).unwrap(); +/// let expected = Tensor::::new(Some(&[9, 4, 9, 1]), &[1, 1, 2, 2]).unwrap(); +/// assert_eq!(result.int_evals().unwrap(), expected); +/// ``` +pub fn exp( + config: &BaseConfig, + region: &mut RegionCtx, + values: &[ValTensor; 1], + scale: utils::F32, +) -> Result, CircuitError> { + // first generate the claimed val + + let mut input = values[0].clone(); + let scale_as_felt: F = integer_rep_to_felt(scale.0.round() as IntegerRep); + + let assigned_triple_scaled_as_felt_tensor = region.assign( + &config.custom_gates.output, + &create_constant_tensor(scale_as_felt * scale_as_felt * scale_as_felt, 1), + )?; + + let unit = create_constant_tensor(integer_rep_to_felt(1), 1); + let unit = region.assign(&config.custom_gates.inputs[1], &unit)?; + + region.increment(1); + + // 2. assign the image + if !input.all_prev_assigned() { + input = region.assign(&config.custom_gates.inputs[0], &input)?; + // don't need to increment because the claimed output is assigned to output and incremented accordingly + } + + let is_assigned = !input.any_unknowns()?; + + let mut claimed_output: ValTensor = if is_assigned { + let input_evals = input.int_evals()?; + // returns an integer with the base 2 logarithm + tensor::ops::nonlinearities::exp(&input_evals.clone(), scale.0 as f64) + .par_iter() + .map(|x| Value::known(integer_rep_to_felt(*x))) + .collect::>>() + .into() + } else { + Tensor::new( + Some(&vec![Value::::unknown(); input.len()]), + &[input.len()], + )? + .into() + }; + claimed_output.reshape(input.dims())?; + region.assign(&config.custom_gates.output, &claimed_output)?; + region.increment(claimed_output.len()); + + let ln_claimed_output = nonlinearity( + config, + region, + &[claimed_output.clone()], + &LookupOp::Ln { scale }, + )?; + + let claimed_output_minus_one = pairwise( + config, + region, + &[claimed_output.clone(), unit.clone()], + BaseOp::Sub, + )?; + + let ln_claimed_output_minus_one = nonlinearity( + config, + region, + &[claimed_output_minus_one], + &LookupOp::Ln { scale }, + )?; + + let claimed_output_plus_one = + pairwise(config, region, &[claimed_output.clone(), unit], BaseOp::Add)?; + + let ln_claimed_output_plus_one = nonlinearity( + config, + region, + &[claimed_output_plus_one], + &LookupOp::Ln { scale }, + )?; + + let rescaled_input = pairwise( + config, + region, + &[input.clone(), assigned_triple_scaled_as_felt_tensor], + BaseOp::Mult, + )?; + + is_closest_to( + config, + region, + &[ + ln_claimed_output.clone(), + ln_claimed_output_minus_one.clone(), + ln_claimed_output_plus_one.clone(), + ], + &[rescaled_input.clone()], + )?; + + Ok(claimed_output) +} + /// round layout /// # Arguments /// * `config` - BaseConfig @@ -5498,7 +5799,7 @@ pub(crate) fn percent let percent = pairwise(config, region, &[input, inv_denom], BaseOp::Mult)?; // rebase the percent to 2x the scale - loop_div(config, region, &[percent], input_felt_scale) + div(config, region, &[percent], input_felt_scale) } /// Applies softmax @@ -5628,7 +5929,7 @@ pub fn range_check_percent "is_odd".to_string(), LookupOp::Div { denom } => format!("div_{}", denom), LookupOp::Sigmoid { scale } => format!("sigmoid_{}", scale), - LookupOp::Sqrt { scale } => format!("sqrt_{}", scale), - LookupOp::Rsqrt { scale } => format!("rsqrt_{}", scale), LookupOp::Erf { scale } => format!("erf_{}", scale), LookupOp::Exp { scale } => format!("exp_{}", scale), LookupOp::Cos { scale } => format!("cos_{}", scale), @@ -100,12 +96,6 @@ impl LookupOp { LookupOp::Sigmoid { scale } => { Ok::<_, TensorError>(tensor::ops::nonlinearities::sigmoid(&x, scale.into())) } - LookupOp::Sqrt { scale } => { - Ok::<_, TensorError>(tensor::ops::nonlinearities::sqrt(&x, scale.into())) - } - LookupOp::Rsqrt { scale } => { - Ok::<_, TensorError>(tensor::ops::nonlinearities::rsqrt(&x, scale.into())) - } LookupOp::Erf { scale } => { Ok::<_, TensorError>(tensor::ops::nonlinearities::erffunc(&x, scale.into())) } @@ -174,9 +164,7 @@ impl Op for Lookup LookupOp::Pow { a, scale } => format!("POW(scale={}, exponent={})", scale, a), LookupOp::Div { denom, .. } => format!("DIV(denom={})", denom), LookupOp::Sigmoid { scale } => format!("SIGMOID(scale={})", scale), - LookupOp::Sqrt { scale } => format!("SQRT(scale={})", scale), LookupOp::Erf { scale } => format!("ERF(scale={})", scale), - LookupOp::Rsqrt { scale } => format!("RSQRT(scale={})", scale), LookupOp::Exp { scale } => format!("EXP(scale={})", scale), LookupOp::Tan { scale } => format!("TAN(scale={})", scale), LookupOp::ATan { scale } => format!("ATAN(scale={})", scale), diff --git a/src/commands.rs b/src/commands.rs index 2e0a2247e..03f57d501 100644 --- a/src/commands.rs +++ b/src/commands.rs @@ -474,9 +474,6 @@ pub enum Commands { /// max logrows to use for calibration, 26 is the max public SRS size #[arg(long, value_hint = clap::ValueHint::Other)] max_logrows: Option, - // whether to only range check rebases (instead of trying both range check and lookup) - #[arg(long, default_value = DEFAULT_ONLY_RANGE_CHECK_REBASE, action = clap::ArgAction::SetTrue)] - only_range_check_rebase: Option, }, /// Generates a dummy SRS diff --git a/src/execute.rs b/src/execute.rs index 68fa87bc9..47295df57 100644 --- a/src/execute.rs +++ b/src/execute.rs @@ -140,7 +140,6 @@ pub async fn run(command: Commands) -> Result { scales, scale_rebase_multiplier, max_logrows, - only_range_check_rebase, } => calibrate( model.unwrap_or(DEFAULT_MODEL.into()), data.unwrap_or(DEFAULT_DATA.into()), @@ -149,7 +148,6 @@ pub async fn run(command: Commands) -> Result { lookup_safety_margin, scales, scale_rebase_multiplier, - only_range_check_rebase.unwrap_or(DEFAULT_ONLY_RANGE_CHECK_REBASE.parse().unwrap()), max_logrows, ) .await @@ -671,10 +669,10 @@ pub(crate) async fn get_srs_cmd( let srs_uri = format!("{}{}", PUBLIC_SRS_URL, k); let mut reader = Cursor::new(fetch_srs(&srs_uri).await?); // check the SRS - let pb = init_spinner(); - pb.set_message("Validating SRS (this may take a while) ..."); + let pb = init_spinner(); + pb.set_message("Validating SRS (this may take a while) ..."); let params = ParamsKZG::::read(&mut reader)?; - pb.finish_with_message("SRS validated."); + pb.finish_with_message("SRS validated."); info!("Saving SRS to disk..."); let computed_srs_path = get_srs_path(k, srs_path.clone(), commitment); @@ -682,7 +680,10 @@ pub(crate) async fn get_srs_cmd( let mut buffer = BufWriter::with_capacity(*EZKL_BUF_CAPACITY, &mut file); params.write(&mut buffer)?; - info!("Saved SRS to {}.", computed_srs_path.as_os_str().to_str().unwrap_or("disk")); + info!( + "Saved SRS to {}.", + computed_srs_path.as_os_str().to_str().unwrap_or("disk") + ); info!("SRS downloaded"); } else { @@ -728,7 +729,7 @@ pub(crate) async fn gen_witness( None }; - let mut input = circuit.load_graph_input(&data).await?; + let mut input = circuit.load_graph_input(&data).await?; #[cfg(any(not(feature = "ezkl"), target_arch = "wasm32"))] let mut input = circuit.load_graph_input(&data)?; @@ -968,7 +969,6 @@ pub(crate) async fn calibrate( lookup_safety_margin: f64, scales: Option>, scale_rebase_multiplier: Vec, - only_range_check_rebase: bool, max_logrows: Option, ) -> Result { use log::error; @@ -1004,12 +1004,6 @@ pub(crate) async fn calibrate( (11..14).collect::>() }; - let div_rebasing = if only_range_check_rebase { - vec![false] - } else { - vec![true, false] - }; - let mut found_params: Vec = vec![]; // 2 x 2 grid @@ -1047,12 +1041,6 @@ pub(crate) async fn calibrate( .map(|(a, b)| (*a, *b)) .collect::>(); - let range_grid = range_grid - .iter() - .cartesian_product(div_rebasing.iter()) - .map(|(a, b)| (*a, *b)) - .collect::>(); - let mut forward_pass_res = HashMap::new(); let pb = init_bar(range_grid.len() as u64); @@ -1061,30 +1049,23 @@ pub(crate) async fn calibrate( let mut num_failed = 0; let mut num_passed = 0; - for (((input_scale, param_scale), scale_rebase_multiplier), div_rebasing) in range_grid { + for ((input_scale, param_scale), scale_rebase_multiplier) in range_grid { pb.set_message(format!( - "i-scale: {}, p-scale: {}, rebase-(x): {}, div-rebase: {}, fail: {}, pass: {}", + "i-scale: {}, p-scale: {}, rebase-(x): {}, fail: {}, pass: {}", input_scale.to_string().blue(), param_scale.to_string().blue(), - scale_rebase_multiplier.to_string().blue(), - div_rebasing.to_string().yellow(), + scale_rebase_multiplier.to_string().yellow(), num_failed.to_string().red(), num_passed.to_string().green() )); - let key = ( - input_scale, - param_scale, - scale_rebase_multiplier, - div_rebasing, - ); + let key = (input_scale, param_scale, scale_rebase_multiplier); forward_pass_res.insert(key, vec![]); let local_run_args = RunArgs { input_scale, param_scale, scale_rebase_multiplier, - div_rebasing, lookup_range: (IntegerRep::MIN, IntegerRep::MAX), ..settings.run_args.clone() }; @@ -1188,7 +1169,6 @@ pub(crate) async fn calibrate( let found_run_args = RunArgs { input_scale: new_settings.run_args.input_scale, param_scale: new_settings.run_args.param_scale, - div_rebasing: new_settings.run_args.div_rebasing, lookup_range: new_settings.run_args.lookup_range, logrows: new_settings.run_args.logrows, scale_rebase_multiplier: new_settings.run_args.scale_rebase_multiplier, @@ -1296,7 +1276,6 @@ pub(crate) async fn calibrate( best_params.run_args.input_scale, best_params.run_args.param_scale, best_params.run_args.scale_rebase_multiplier, - best_params.run_args.div_rebasing, )) .ok_or("no params found")? .iter() @@ -2022,7 +2001,7 @@ pub(crate) fn mock_aggregate( } } // proof aggregation - let pb = { + let pb = { let pb = init_spinner(); pb.set_message("Aggregating (may take a while)..."); pb @@ -2033,7 +2012,7 @@ pub(crate) fn mock_aggregate( let prover = halo2_proofs::dev::MockProver::run(logrows, &circuit, vec![circuit.instances()]) .map_err(|e| ExecutionError::MockProverError(e.to_string()))?; prover.verify().map_err(ExecutionError::VerifyError)?; - pb.finish_with_message("Done."); + pb.finish_with_message("Done."); Ok(String::new()) } @@ -2127,7 +2106,7 @@ pub(crate) fn aggregate( } // proof aggregation - let pb = { + let pb = { let pb = init_spinner(); pb.set_message("Aggregating (may take a while)..."); pb @@ -2276,7 +2255,7 @@ pub(crate) fn aggregate( ); snark.save(&proof_path)?; - pb.finish_with_message("Done."); + pb.finish_with_message("Done."); Ok(snark) } diff --git a/src/graph/model.rs b/src/graph/model.rs index eb89344ad..941a52900 100644 --- a/src/graph/model.rs +++ b/src/graph/model.rs @@ -915,20 +915,9 @@ impl Model { if scales.contains_key(&i) { let scale_diff = n.out_scale - scales[&i]; n.opkind = if scale_diff > 0 { - RebaseScale::rebase( - n.opkind, - scales[&i], - n.out_scale, - 1, - run_args.div_rebasing, - ) + RebaseScale::rebase(n.opkind, scales[&i], n.out_scale, 1) } else { - RebaseScale::rebase_up( - n.opkind, - scales[&i], - n.out_scale, - run_args.div_rebasing, - ) + RebaseScale::rebase_up(n.opkind, scales[&i], n.out_scale) }; n.out_scale = scales[&i]; } diff --git a/src/graph/node.rs b/src/graph/node.rs index 34b1fbdb1..3e151c472 100644 --- a/src/graph/node.rs +++ b/src/graph/node.rs @@ -120,7 +120,6 @@ impl RebaseScale { global_scale: crate::Scale, op_out_scale: crate::Scale, scale_rebase_multiplier: u32, - div_rebasing: bool, ) -> SupportedOp { if (op_out_scale > (global_scale * scale_rebase_multiplier as i32)) && !inner.is_constant() @@ -137,7 +136,6 @@ impl RebaseScale { multiplier, rebase_op: HybridOp::Div { denom: crate::circuit::utils::F32((multiplier) as f32), - use_range_check_for_int: !div_rebasing, }, original_scale: op.original_scale, }) @@ -148,7 +146,6 @@ impl RebaseScale { multiplier, rebase_op: HybridOp::Div { denom: crate::circuit::utils::F32(multiplier as f32), - use_range_check_for_int: !div_rebasing, }, original_scale: op_out_scale, }) @@ -163,7 +160,6 @@ impl RebaseScale { inner: SupportedOp, target_scale: crate::Scale, op_out_scale: crate::Scale, - div_rebasing: bool, ) -> SupportedOp { if (op_out_scale < (target_scale)) && !inner.is_constant() && !inner.is_input() { let multiplier = scale_to_multiplier(op_out_scale - target_scale); @@ -176,7 +172,6 @@ impl RebaseScale { original_scale: op.original_scale, rebase_op: HybridOp::Div { denom: crate::circuit::utils::F32((multiplier) as f32), - use_range_check_for_int: !div_rebasing, }, }) } else { @@ -187,7 +182,6 @@ impl RebaseScale { original_scale: op_out_scale, rebase_op: HybridOp::Div { denom: crate::circuit::utils::F32(multiplier as f32), - use_range_check_for_int: !div_rebasing, }, }) } @@ -595,13 +589,7 @@ impl Node { let mut out_scale = opkind.out_scale(in_scales.clone())?; // rescale the inputs if necessary to get consistent fixed points, we select the largest scale (highest precision) let global_scale = scales.get_max(); - opkind = RebaseScale::rebase( - opkind, - global_scale, - out_scale, - scales.rebase_multiplier, - run_args.div_rebasing, - ); + opkind = RebaseScale::rebase(opkind, global_scale, out_scale, scales.rebase_multiplier); out_scale = opkind.out_scale(in_scales)?; diff --git a/src/graph/utilities.rs b/src/graph/utilities.rs index f2379b273..6d505bf30 100644 --- a/src/graph/utilities.rs +++ b/src/graph/utilities.rs @@ -842,12 +842,17 @@ pub fn new_op_from_onnx( "Sigmoid" => SupportedOp::Nonlinear(LookupOp::Sigmoid { scale: scale_to_multiplier(input_scales[0]).into(), }), - "Sqrt" => SupportedOp::Nonlinear(LookupOp::Sqrt { - scale: scale_to_multiplier(input_scales[0]).into(), - }), - "Rsqrt" => SupportedOp::Nonlinear(LookupOp::Rsqrt { + "Sqrt" => SupportedOp::Hybrid(HybridOp::Sqrt { scale: scale_to_multiplier(input_scales[0]).into(), }), + "Rsqrt" => { + let in_scale = input_scales[0]; + let max_scale = std::cmp::max(scales.get_max(), in_scale); + SupportedOp::Hybrid(HybridOp::Rsqrt { + input_scale: (scale_to_multiplier(in_scale) as f32).into(), + output_scale: (scale_to_multiplier(max_scale) as f32).into(), + }) + } "Exp" => SupportedOp::Nonlinear(LookupOp::Exp { scale: scale_to_multiplier(input_scales[0]).into(), }), diff --git a/src/lib.rs b/src/lib.rs index 788d42f8a..39a0bee07 100644 --- a/src/lib.rs +++ b/src/lib.rs @@ -309,8 +309,6 @@ pub struct RunArgs { all(feature = "ezkl", not(target_arch = "wasm32")), arg(long, default_value = "false") )] - /// Rebase the scale using lookup table for division instead of using a range check - pub div_rebasing: bool, /// Should constants with 0.0 fraction be rebased to scale 0 #[cfg_attr( all(feature = "ezkl", not(target_arch = "wasm32")), @@ -352,7 +350,6 @@ impl Default for RunArgs { input_visibility: Visibility::Private, output_visibility: Visibility::Public, param_visibility: Visibility::Private, - div_rebasing: false, rebase_frac_zero_constants: false, check_mode: CheckMode::UNSAFE, commitment: None, diff --git a/tests/assets/model.compiled b/tests/assets/model.compiled index 93fa3cb50a625276c8790730d97b4407c203a875..9a0ae76a5d21dde36c5349ed44234089a6e5e9ad 100644 GIT binary patch delta 12 TcmbQuH;ZqB2kT}})=P{499aY- delta 12 TcmbQmH=A#R2kT~c*2|0l98m-! diff --git a/tests/assets/proof.json b/tests/assets/proof.json index 8763e1b18..3a29496b4 100644 --- a/tests/assets/proof.json +++ b/tests/assets/proof.json @@ -1 +1 @@ -{"protocol":null,"instances":[["0000000000000000000000000000000000000000000000000000000000000000","0000000000000000000000000000000000000000000000000000000000000000","0000000000000000000000000000000000000000000000000000000000000000","0000000000000000000000000000000000000000000000000000000000000000"]],"proof":[28,134,252,102,88,38,51,189,29,82,241,102,213,71,193,128,93,63,131,208,154,125,160,183,214,179,10,64,127,94,195,36,13,4,111,78,138,235,165,211,151,59,74,255,164,81,114,1,121,117,146,21,172,221,63,103,101,210,125,47,162,199,197,54,14,120,91,63,180,131,207,15,59,113,30,248,252,76,116,125,143,167,122,76,38,54,209,159,242,182,190,139,255,74,151,247,39,188,79,77,180,139,23,99,101,17,126,142,34,64,188,174,114,159,58,31,157,168,195,160,157,48,91,185,90,210,43,114,34,29,122,43,34,157,110,167,210,75,238,165,19,171,196,127,36,148,2,212,11,226,140,97,108,30,30,236,230,97,223,183,43,231,93,64,130,132,154,125,163,48,190,250,252,152,5,194,146,52,11,11,72,223,138,206,57,22,44,144,114,234,84,68,6,90,119,120,210,126,102,3,2,179,3,128,119,53,71,219,157,151,85,52,102,242,58,169,65,232,20,125,100,150,133,100,30,44,110,13,63,93,215,134,108,251,45,70,86,76,197,224,210,208,4,254,80,117,212,39,32,54,154,72,137,162,17,254,47,21,131,120,184,84,222,202,250,167,205,126,144,98,4,143,167,24,84,49,10,88,238,144,214,247,218,28,231,255,240,243,4,223,207,76,47,36,54,205,123,251,152,163,30,218,122,245,132,121,121,110,120,55,72,53,242,235,20,192,199,243,109,30,1,31,150,46,221,38,105,211,58,122,170,248,177,123,42,247,87,117,106,150,175,20,114,138,65,5,80,226,217,17,160,74,4,129,156,37,233,202,48,94,252,159,194,219,41,111,179,142,177,190,14,115,222,11,73,204,65,222,224,207,114,187,14,15,9,207,169,136,53,34,166,195,225,22,236,10,200,246,61,4,236,31,71,161,12,17,126,135,26,197,8,101,142,82,231,57,44,76,64,86,37,222,181,85,166,186,2,138,108,70,116,45,60,86,220,44,23,240,162,185,141,196,147,50,163,42,197,7,29,215,253,51,30,13,160,202,14,34,89,185,112,183,170,9,43,64,87,86,87,223,238,221,185,181,181,105,132,245,167,217,24,206,84,81,109,69,112,31,14,90,22,99,59,222,83,190,241,72,86,103,39,90,98,201,42,29,5,149,233,120,234,57,42,29,23,75,127,138,84,57,241,193,71,212,213,184,25,163,131,79,55,28,182,52,178,65,193,214,211,84,24,52,155,247,21,200,242,170,146,244,46,164,38,166,5,201,19,214,103,89,20,8,5,173,157,189,211,53,137,20,32,222,97,102,44,188,29,215,253,51,30,13,160,202,14,34,89,185,112,183,170,9,43,64,87,86,87,223,238,221,185,181,181,105,132,245,167,217,24,206,84,81,109,69,112,31,14,90,22,99,59,222,83,190,241,72,86,103,39,90,98,201,42,29,5,149,233,120,234,57,29,215,253,51,30,13,160,202,14,34,89,185,112,183,170,9,43,64,87,86,87,223,238,221,185,181,181,105,132,245,167,217,24,206,84,81,109,69,112,31,14,90,22,99,59,222,83,190,241,72,86,103,39,90,98,201,42,29,5,149,233,120,234,57,29,215,253,51,30,13,160,202,14,34,89,185,112,183,170,9,43,64,87,86,87,223,238,221,185,181,181,105,132,245,167,217,24,206,84,81,109,69,112,31,14,90,22,99,59,222,83,190,241,72,86,103,39,90,98,201,42,29,5,149,233,120,234,57,29,215,253,51,30,13,160,202,14,34,89,185,112,183,170,9,43,64,87,86,87,223,238,221,185,181,181,105,132,245,167,217,24,206,84,81,109,69,112,31,14,90,22,99,59,222,83,190,241,72,86,103,39,90,98,201,42,29,5,149,233,120,234,57,40,200,161,104,221,3,86,139,87,93,227,76,127,3,162,81,55,178,72,104,251,37,165,185,69,179,56,140,107,124,200,207,28,162,35,216,101,253,219,75,52,201,32,69,195,105,215,12,172,132,126,166,18,220,122,113,125,199,17,184,87,109,160,16,2,72,124,98,208,195,51,167,27,61,38,169,242,200,176,253,167,62,206,106,14,188,106,34,150,198,197,231,179,227,122,176,7,24,213,57,235,1,160,189,64,220,131,244,109,240,157,255,44,247,114,196,200,190,43,186,18,125,12,137,104,93,198,205,44,69,237,46,160,43,107,13,26,200,21,135,85,168,59,237,84,176,240,123,34,91,239,102,168,179,198,116,56,222,131,86,30,242,112,193,187,242,14,110,226,21,152,149,66,86,15,196,51,28,78,148,173,221,34,138,145,67,210,96,246,163,242,232,27,242,161,5,48,207,33,157,85,233,127,37,188,123,98,58,252,5,119,244,29,114,219,17,176,62,208,129,67,248,16,21,33,105,108,97,11,220,84,155,196,81,75,71,144,189,240,99,70,19,98,147,183,136,39,224,246,16,167,191,129,236,113,130,32,23,36,143,218,236,117,187,26,171,247,65,107,239,248,46,11,136,186,12,223,210,243,233,37,2,225,201,230,129,118,228,27,32,65,104,138,159,211,143,156,47,198,24,29,219,23,82,4,202,242,144,81,106,234,179,115,8,89,37,240,243,159,214,37,230,118,107,20,172,116,186,225,209,139,188,224,89,166,148,9,162,105,64,153,161,56,169,125,208,199,219,141,138,121,143,28,152,245,8,68,143,246,140,153,131,21,97,51,56,193,59,136,183,181,143,112,240,82,117,81,30,9,111,124,231,214,73,37,230,118,107,20,172,116,186,225,209,139,188,224,89,166,148,9,162,105,64,153,161,56,169,125,208,199,219,141,138,121,143,28,152,245,8,68,143,246,140,153,131,21,97,51,56,193,59,136,183,181,143,112,240,82,117,81,30,9,111,124,231,214,73,27,83,206,137,32,176,48,101,101,182,234,189,139,136,28,68,243,193,242,113,211,5,255,164,110,227,185,126,162,211,124,146,26,53,76,88,252,243,81,77,95,101,91,13,34,163,143,158,141,178,158,100,213,79,47,111,168,171,205,38,221,202,137,44,27,83,206,137,32,176,48,101,101,182,234,189,139,136,28,68,243,193,242,113,211,5,255,164,110,227,185,126,162,211,124,146,26,53,76,88,252,243,81,77,95,101,91,13,34,163,143,158,141,178,158,100,213,79,47,111,168,171,205,38,221,202,137,44,5,91,192,202,233,232,200,52,124,207,67,114,112,176,31,38,167,143,83,23,136,45,190,126,35,243,177,182,165,107,128,26,9,110,32,37,236,214,236,59,114,104,119,57,72,175,129,44,131,17,114,228,170,73,93,39,182,51,219,116,195,254,230,76,42,248,155,167,242,117,235,253,49,207,28,57,14,195,64,217,241,157,47,162,242,213,64,254,197,247,204,47,232,64,237,22,38,161,131,43,43,102,48,114,95,58,53,67,53,157,217,247,215,105,179,122,75,34,153,32,162,102,27,32,29,20,236,76,44,92,207,30,140,63,240,209,19,59,253,151,217,54,11,114,141,30,35,219,32,197,141,206,227,162,208,13,34,229,65,74,36,166,113,222,239,96,187,22,32,181,156,120,226,25,125,48,113,59,171,28,227,236,86,233,213,245,65,122,60,21,149,135,34,226,44,174,153,87,201,64,68,76,158,97,101,98,220,47,172,211,157,118,57,60,248,229,230,202,194,152,139,1,22,191,10,59,161,86,189,26,94,253,192,101,2,86,53,201,41,162,222,118,168,52,66,179,33,64,44,246,248,234,153,114,93,162,6,81,90,158,38,73,1,23,209,101,169,91,248,38,1,173,66,225,49,136,26,164,91,215,25,246,31,10,197,12,217,186,26,112,59,251,54,35,206,68,146,81,76,250,235,213,242,102,21,41,63,110,244,212,236,27,135,158,7,67,183,111,4,38,22,229,202,170,187,25,234,98,33,220,61,48,97,104,51,60,175,2,21,227,9,41,252,1,82,176,119,15,128,145,81,232,1,38,40,57,171,7,51,155,175,90,106,158,110,156,167,20,59,66,185,179,224,191,218,25,177,91,168,231,242,70,110,213,7,107,198,18,246,193,248,23,112,163,98,90,103,184,192,135,160,62,79,8,31,121,55,202,68,146,133,158,193,0,78,6,33,67,63,128,136,93,5,163,82,139,224,252,54,132,5,216,230,172,152,59,195,228,103,149,111,58,44,126,170,55,232,180,9,52,159,44,20,107,243,166,128,248,35,5,244,232,182,213,137,160,228,193,157,107,32,5,86,66,202,126,87,14,86,40,0,217,190,70,169,25,91,94,91,221,242,210,212,93,108,205,30,217,144,233,210,4,30,102,57,105,129,236,48,227,141,202,2,30,196,63,158,80,158,123,134,106,115,254,237,135,163,139,240,186,22,87,10,225,76,236,41,4,158,48,235,210,57,229,30,53,216,176,178,6,78,250,69,84,238,139,135,187,199,183,239,104,174,134,0,197,147,92,249,72,56,29,44,158,177,84,28,143,77,83,210,146,90,72,89,55,237,83,239,108,102,104,70,64,62,118,98,197,188,14,62,54,244,243,118,208,156,145,0,64,175,26,201,155,8,5,138,0,8,104,254,148,167,242,225,61,98,115,178,60,33,223,233,190,101,47,117,34,45,55,13,86,158,212,148,118,8,229,116,218,56,34,3,65,209,86,23,211,102,67,201,185,0,231,51,62,255,127,0,95,98,206,37,181,26,49,0,47,188,209,166,16,68,137,116,112,14,44,185,78,131,198,192,130,123,239,26,157,226,25,234,125,195,194,18,141,33,111,5,90,17,186,221,220,197,116,27,51,173,6,87,70,0,205,69,252,233,74,176,161,211,215,38,12,5,102,29,35,171,198,135,24,139,255,154,123,244,34,153,9,4,104,85,119,199,198,150,139,108,224,98,203,221,188,226,127,13,30,12,181,147,22,241,252,119,243,54,143,150,150,207,139,176,28,189,251,67,25,222,169,102,213,224,209,234,199,177,195,11,8,47,38,178,39,110,76,109,91,82,181,195,146,226,233,201,213,183,185,33,149,215,18,4,123,87,84,206,46,237,89,159,15,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,25,66,41,242,149,59,82,128,171,201,177,64,223,60,62,69,148,190,17,34,206,82,48,26,247,16,81,50,106,62,77,223,10,95,38,157,50,172,29,55,107,41,196,20,139,172,188,117,17,16,95,151,142,42,97,240,201,146,1,234,225,224,30,99,10,95,38,157,50,172,29,55,107,41,196,20,139,172,188,117,17,16,95,151,142,42,97,240,201,146,1,234,225,224,30,99,42,215,170,245,164,227,15,4,170,15,22,169,180,120,15,247,114,226,231,137,116,224,96,199,74,197,179,126,37,129,85,69,37,131,172,145,176,76,93,197,41,245,182,138,9,62,36,39,202,23,115,219,66,22,240,222,190,126,146,141,62,13,25,208,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,4,21,218,152,155,163,91,203,145,180,15,32,160,100,169,203,69,204,115,68,207,228,18,23,50,182,251,55,111,23,68,120,12,145,237,151,211,232,33,130,50,15,125,172,159,106,255,250,139,117,199,137,106,186,62,221,131,177,140,17,14,90,210,75,10,188,14,235,14,46,1,194,87,242,158,203,35,55,163,209,52,123,13,129,216,75,85,248,213,252,31,118,187,9,156,116,20,201,95,212,148,192,247,136,237,2,13,132,150,43,49,228,99,20,78,44,183,171,108,253,213,121,84,205,246,85,2,155,33,77,146,166,135,161,206,69,96,83,92,230,109,243,141,41,66,163,218,219,55,133,38,231,166,247,203,210,247,175,3,161,22,138,234,176,66,237,124,175,187,71,81,189,212,85,127,145,147,47,186,113,99,246,193,5,71,186,205,188,191,144,177,53,22,48,6,117,128,52,178,69,111,35,82,194,171,216,156,91,169,42,13,143,22,250,118,168,171,118,232,128,33,233,169,85,43,227,57,166,226,117,104,141,247,17,8,152,1,61,146,16,143,232,20,111,2,181,185,155,141,204,48,22,231,16,93,190,9,7,87,142,126,243,27,44,80,10,174,65,229,23,189,125,226,11,165,28,250,195,147,170,124,156,152,21,8,204,121,202,16,17,165,247,241,167,214,214,156,234,175,2,140,124,183,3,34,15,151,99,177,158,135,189,243,170,225,125,132,47,248,160,5,127,37,75,84,211,242,89,249,152,105,93,249,175,163,63,221,53,81,212,5,248,158,203,82,206,242,43,159,96,198,134,15,194,119,208,189,140,66,51,138,91,234,185,77,29,101,5,185,241,118,123,44,247,192,160,130,67,2,187,48,0,239,239,8,232,191,55,254,73,21,165,68,7,40,48,223,51,121,210,11,117,114,4,51,238,105,178,155,97,206,128,200,174,26,209,41,79,246,89,174,203,59,196,85,138,209,195,120,79,189,42,68,239,3,164,57,180,90,7,246,47,214,231,133,136,193,136,16,201,157,119,0,211,127,218,15,188,48,218,203,53,29,191,58,228,166,38,227,98,124,52,178,74,185,16,68,33,126,66,45,98,2,161,255,158,130,112,169,40,73,101,169,2,69,108,131,32,226,36,236,101,84,83,151,237,28,111,87,3,247,74,47,247,118,125,95,16,140,234,36,64,229,36,85,165,162,249,199,32,114,171,44,214,2,74,248,224,124,80,75,62,80,35,45,229,135,197,121,140,97,63,144,115,228,196,170,226,230,34,20,48,136,254,204,214,141,174,4,87,92,201,29,49,11,215,41,121,193,144,248,32,123,194,1,105,96,69,35,228,229,213,178,54,197,79,97,70,186,37,254,252,249,205,122,77,75,90,31,223,243,244,16,193,43,154,165,176,142,23,87,146,193,241,40,154,55,5,14,53,224,52,17,228,248,222,206,26,89,225,37,102,189,239,195,46,183,229,176,236,4,113,206,194,53,10,61,129,73,47,143,35,80,153,87,105,103,197,202,103,72,5,37,209,227,21,24,181,58,181,145,163,200,55,176,43,79,92,50,221,6,26,161,221,168,208,223,4,144,85,251,154,159,52,16,23,130,137,78,79,97,79,126,144,87,12,254,18,225,8,158,134,59,166,30,160,34,22,230,251,131,113,125,35,246,66,27,153,101,55,196,167,150,184,144,53,77,193,14,233,162,149,155,84,105,53,20,226,187,249,74,207,12,178,130,196,40,199,22,71,157,253,128,26,192,53,9,136,73,137,126,255,125,202,158,207,201,245,9,137,57,250,133,13,236,60,134,145,26,244,31,223,243,244,16,193,43,154,165,176,142,23,87,146,193,241,40,154,55,5,14,53,224,52,17,228,248,222,206,26,89,225,27,153,101,55,196,167,150,184,144,53,77,193,14,233,162,149,155,84,105,53,20,226,187,249,74,207,12,178,130,196,40,199,22,71,157,253,128,26,192,53,9,136,73,137,126,255,125,202,158,207,201,245,9,137,57,250,133,13,236,60,134,145,26,244,31,223,243,244,16,193,43,154,165,176,142,23,87,146,193,241,40,154,55,5,14,53,224,52,17,228,248,222,206,26,89,225,26,70,183,7,230,115,219,30,228,244,235,84,56,195,146,63,174,206,236,83,12,199,169,8,88,168,134,107,29,90,231,246,6,245,209,248,63,244,161,238,169,150,82,65,10,146,52,112,61,254,115,49,212,216,39,84,137,253,236,89,24,227,145,168,31,223,243,244,16,193,43,154,165,176,142,23,87,146,193,241,40,154,55,5,14,53,224,52,17,228,248,222,206,26,89,225,26,70,183,7,230,115,219,30,228,244,235,84,56,195,146,63,174,206,236,83,12,199,169,8,88,168,134,107,29,90,231,246,6,245,209,248,63,244,161,238,169,150,82,65,10,146,52,112,61,254,115,49,212,216,39,84,137,253,236,89,24,227,145,168,31,223,243,244,16,193,43,154,165,176,142,23,87,146,193,241,40,154,55,5,14,53,224,52,17,228,248,222,206,26,89,225,39,62,153,9,32,62,187,180,175,212,38,130,202,74,101,80,13,106,40,130,78,1,115,189,77,89,66,158,95,38,238,233,23,147,43,128,118,178,68,93,92,9,75,226,131,111,53,134,120,236,222,39,224,145,131,102,30,208,19,247,251,30,166,245,18,40,152,37,160,226,112,246,247,90,47,216,172,185,199,4,203,143,229,170,31,231,32,137,163,228,164,187,80,228,153,209,23,69,68,203,80,190,148,80,220,126,253,185,224,197,6,213,42,215,249,226,23,57,38,8,98,160,212,78,31,128,23,82],"hex_proof":"0x1c86fc66582633bd1d52f166d547c1805d3f83d09a7da0b7d6b30a407f5ec3240d046f4e8aeba5d3973b4affa451720179759215acdd3f6765d27d2fa2c7c5360e785b3fb483cf0f3b711ef8fc4c747d8fa77a4c2636d19ff2b6be8bff4a97f727bc4f4db48b176365117e8e2240bcae729f3a1f9da8c3a09d305bb95ad22b72221d7a2b229d6ea7d24beea513abc47f249402d40be28c616c1e1eece661dfb72be75d4082849a7da330befafc9805c292340b0b48df8ace39162c9072ea5444065a7778d27e660302b30380773547db9d97553466f23aa941e8147d649685641e2c6e0d3f5dd7866cfb2d46564cc5e0d2d004fe5075d42720369a4889a211fe2f158378b854decafaa7cd7e9062048fa71854310a58ee90d6f7da1ce7fff0f304dfcf4c2f2436cd7bfb98a31eda7af58479796e78374835f2eb14c0c7f36d1e011f962edd2669d33a7aaaf8b17b2af757756a96af14728a410550e2d911a04a04819c25e9ca305efc9fc2db296fb38eb1be0e73de0b49cc41dee0cf72bb0e0f09cfa9883522a6c3e116ec0ac8f63d04ec1f47a10c117e871ac508658e52e7392c4c405625deb555a6ba028a6c46742d3c56dc2c17f0a2b98dc49332a32ac5071dd7fd331e0da0ca0e2259b970b7aa092b40575657dfeeddb9b5b56984f5a7d918ce54516d45701f0e5a16633bde53bef1485667275a62c92a1d0595e978ea392a1d174b7f8a5439f1c147d4d5b819a3834f371cb634b241c1d6d35418349bf715c8f2aa92f42ea426a605c913d66759140805ad9dbdd335891420de61662cbc1dd7fd331e0da0ca0e2259b970b7aa092b40575657dfeeddb9b5b56984f5a7d918ce54516d45701f0e5a16633bde53bef1485667275a62c92a1d0595e978ea391dd7fd331e0da0ca0e2259b970b7aa092b40575657dfeeddb9b5b56984f5a7d918ce54516d45701f0e5a16633bde53bef1485667275a62c92a1d0595e978ea391dd7fd331e0da0ca0e2259b970b7aa092b40575657dfeeddb9b5b56984f5a7d918ce54516d45701f0e5a16633bde53bef1485667275a62c92a1d0595e978ea391dd7fd331e0da0ca0e2259b970b7aa092b40575657dfeeddb9b5b56984f5a7d918ce54516d45701f0e5a16633bde53bef1485667275a62c92a1d0595e978ea3928c8a168dd03568b575de34c7f03a25137b24868fb25a5b945b3388c6b7cc8cf1ca223d865fddb4b34c92045c369d70cac847ea612dc7a717dc711b8576da01002487c62d0c333a71b3d26a9f2c8b0fda73ece6a0ebc6a2296c6c5e7b3e37ab00718d539eb01a0bd40dc83f46df09dff2cf772c4c8be2bba127d0c89685dc6cd2c45ed2ea02b6b0d1ac8158755a83bed54b0f07b225bef66a8b3c67438de83561ef270c1bbf20e6ee215989542560fc4331c4e94addd228a9143d260f6a3f2e81bf2a10530cf219d55e97f25bc7b623afc0577f41d72db11b03ed08143f8101521696c610bdc549bc4514b4790bdf06346136293b78827e0f610a7bf81ec71822017248fdaec75bb1aabf7416beff82e0b88ba0cdfd2f3e92502e1c9e68176e41b2041688a9fd38f9c2fc6181ddb175204caf290516aeab373085925f0f39fd625e6766b14ac74bae1d18bbce059a69409a2694099a138a97dd0c7db8d8a798f1c98f508448ff68c998315613338c13b88b7b58f70f05275511e096f7ce7d64925e6766b14ac74bae1d18bbce059a69409a2694099a138a97dd0c7db8d8a798f1c98f508448ff68c998315613338c13b88b7b58f70f05275511e096f7ce7d6491b53ce8920b0306565b6eabd8b881c44f3c1f271d305ffa46ee3b97ea2d37c921a354c58fcf3514d5f655b0d22a38f9e8db29e64d54f2f6fa8abcd26ddca892c1b53ce8920b0306565b6eabd8b881c44f3c1f271d305ffa46ee3b97ea2d37c921a354c58fcf3514d5f655b0d22a38f9e8db29e64d54f2f6fa8abcd26ddca892c055bc0cae9e8c8347ccf437270b01f26a78f5317882dbe7e23f3b1b6a56b801a096e2025ecd6ec3b7268773948af812c831172e4aa495d27b633db74c3fee64c2af89ba7f275ebfd31cf1c390ec340d9f19d2fa2f2d540fec5f7cc2fe840ed1626a1832b2b6630725f3a3543359dd9f7d769b37a4b229920a2661b201d14ec4c2c5ccf1e8c3ff0d1133bfd97d9360b728d1e23db20c58dcee3a2d00d22e5414a24a671deef60bb1620b59c78e2197d30713bab1ce3ec56e9d5f5417a3c15958722e22cae9957c940444c9e616562dc2facd39d76393cf8e5e6cac2988b0116bf0a3ba156bd1a5efdc065025635c929a2de76a83442b321402cf6f8ea99725da206515a9e26490117d165a95bf82601ad42e131881aa45bd719f61f0ac50cd9ba1a703bfb3623ce4492514cfaebd5f26615293f6ef4d4ec1b879e0743b76f042616e5caaabb19ea6221dc3d306168333caf0215e30929fc0152b0770f809151e801262839ab07339baf5a6a9e6e9ca7143b42b9b3e0bfda19b15ba8e7f2466ed5076bc612f6c1f81770a3625a67b8c087a03e4f081f7937ca4492859ec1004e0621433f80885d05a3528be0fc368405d8e6ac983bc3e467956f3a2c7eaa37e8b409349f2c146bf3a680f82305f4e8b6d589a0e4c19d6b20055642ca7e570e562800d9be46a9195b5e5bddf2d2d45d6ccd1ed990e9d2041e66396981ec30e38dca021ec43f9e509e7b866a73feed87a38bf0ba16570ae14cec29049e30ebd239e51e35d8b0b2064efa4554ee8b87bbc7b7ef68ae8600c5935cf948381d2c9eb1541c8f4d53d2925a485937ed53ef6c666846403e7662c5bc0e3e36f4f376d09c910040af1ac99b08058a000868fe94a7f2e13d6273b23c21dfe9be652f75222d370d569ed4947608e574da38220341d15617d36643c9b900e7333eff7f005f62ce25b51a31002fbcd1a610448974700e2cb94e83c6c0827bef1a9de219ea7dc3c2128d216f055a11badddcc5741b33ad06574600cd45fce94ab0a1d3d7260c05661d23abc687188bff9a7bf422990904685577c7c6968b6ce062cbddbce27f0d1e0cb59316f1fc77f3368f9696cf8bb01cbdfb4319dea966d5e0d1eac7b1c30b082f26b2276e4c6d5b52b5c392e2e9c9d5b7b92195d712047b5754ce2eed599f0f0000000000000000000000000000000000000000000000000000000000000000194229f2953b5280abc9b140df3c3e4594be1122ce52301af71051326a3e4ddf0a5f269d32ac1d376b29c4148bacbc7511105f978e2a61f0c99201eae1e01e630a5f269d32ac1d376b29c4148bacbc7511105f978e2a61f0c99201eae1e01e632ad7aaf5a4e30f04aa0f16a9b4780ff772e2e78974e060c74ac5b37e258155452583ac91b04c5dc529f5b68a093e2427ca1773db4216f0debe7e928d3e0d19d000000000000000000000000000000000000000000000000000000000000000000415da989ba35bcb91b40f20a064a9cb45cc7344cfe4121732b6fb376f1744780c91ed97d3e82182320f7dac9f6afffa8b75c7896aba3edd83b18c110e5ad24b0abc0eeb0e2e01c257f29ecb2337a3d1347b0d81d84b55f8d5fc1f76bb099c7414c95fd494c0f788ed020d84962b31e463144e2cb7ab6cfdd57954cdf655029b214d92a687a1ce4560535ce66df38d2942a3dadb378526e7a6f7cbd2f7af03a1168aeab042ed7cafbb4751bdd4557f91932fba7163f6c10547bacdbcbf90b135163006758034b2456f2352c2abd89c5ba92a0d8f16fa76a8ab76e88021e9a9552be339a6e275688df7110898013d92108fe8146f02b5b99b8dcc3016e7105dbe0907578e7ef31b2c500aae41e517bd7de20ba51cfac393aa7c9c981508cc79ca1011a5f7f1a7d6d69ceaaf028c7cb703220f9763b19e87bdf3aae17d842ff8a0057f254b54d3f259f998695df9afa33fdd3551d405f89ecb52cef22b9f60c6860fc277d0bd8c42338a5beab94d1d6505b9f1767b2cf7c0a0824302bb3000efef08e8bf37fe4915a544072830df3379d20b75720433ee69b29b61ce80c8ae1ad1294ff659aecb3bc4558ad1c3784fbd2a44ef03a439b45a07f62fd6e78588c18810c99d7700d37fda0fbc30dacb351dbf3ae4a626e3627c34b24ab91044217e422d6202a1ff9e8270a9284965a902456c8320e224ec65545397ed1c6f5703f74a2ff7767d5f108cea2440e52455a5a2f9c72072ab2cd6024af8e07c504b3e50232de587c5798c613f9073e4c4aae2e622143088feccd68dae04575cc91d310bd72979c190f8207bc20169604523e4e5d5b236c54f6146ba25fefcf9cd7a4d4b5a1fdff3f410c12b9aa5b08e175792c1f1289a37050e35e03411e4f8dece1a59e12566bdefc32eb7e5b0ec0471cec2350a3d81492f8f235099576967c5ca67480525d1e31518b53ab591a3c837b02b4f5c32dd061aa1dda8d0df049055fb9a9f34101782894e4f614f7e90570cfe12e1089e863ba61ea02216e6fb83717d23f6421b996537c4a796b890354dc10ee9a2959b54693514e2bbf94acf0cb282c428c716479dfd801ac035098849897eff7dca9ecfc9f5098939fa850dec3c86911af41fdff3f410c12b9aa5b08e175792c1f1289a37050e35e03411e4f8dece1a59e11b996537c4a796b890354dc10ee9a2959b54693514e2bbf94acf0cb282c428c716479dfd801ac035098849897eff7dca9ecfc9f5098939fa850dec3c86911af41fdff3f410c12b9aa5b08e175792c1f1289a37050e35e03411e4f8dece1a59e11a46b707e673db1ee4f4eb5438c3923faeceec530cc7a90858a8866b1d5ae7f606f5d1f83ff4a1eea99652410a9234703dfe7331d4d8275489fdec5918e391a81fdff3f410c12b9aa5b08e175792c1f1289a37050e35e03411e4f8dece1a59e11a46b707e673db1ee4f4eb5438c3923faeceec530cc7a90858a8866b1d5ae7f606f5d1f83ff4a1eea99652410a9234703dfe7331d4d8275489fdec5918e391a81fdff3f410c12b9aa5b08e175792c1f1289a37050e35e03411e4f8dece1a59e1273e9909203ebbb4afd42682ca4a65500d6a28824e0173bd4d59429e5f26eee917932b8076b2445d5c094be2836f358678ecde27e09183661ed013f7fb1ea6f512289825a0e270f6f75a2fd8acb9c704cb8fe5aa1fe72089a3e4a4bb50e499d1174544cb50be9450dc7efdb9e0c506d52ad7f9e21739260862a0d44e1f801752","transcript_type":"EVM","split":null,"pretty_public_inputs":{"rescaled_inputs":[],"inputs":[],"processed_inputs":[],"processed_params":[],"processed_outputs":[],"rescaled_outputs":[["0","0","0","0"]],"outputs":[["0x0000000000000000000000000000000000000000000000000000000000000000","0x0000000000000000000000000000000000000000000000000000000000000000","0x0000000000000000000000000000000000000000000000000000000000000000","0x0000000000000000000000000000000000000000000000000000000000000000"]]},"timestamp":1731011005876,"commitment":"KZG","version":"source - no compatibility guaranteed"} \ No newline at end of file +{"protocol":null,"instances":[["0000000000000000000000000000000000000000000000000000000000000000","0000000000000000000000000000000000000000000000000000000000000000","0000000000000000000000000000000000000000000000000000000000000000","0000000000000000000000000000000000000000000000000000000000000000"]],"proof":[16,178,128,209,204,30,244,41,71,89,39,70,84,216,25,244,26,144,252,30,70,73,247,224,225,175,168,251,32,227,238,14,9,9,144,140,201,242,109,156,193,133,216,166,135,87,127,112,168,197,246,133,223,244,65,129,33,115,104,102,208,5,182,167,13,200,134,205,243,137,56,129,147,59,252,203,170,49,212,77,226,12,234,76,14,168,96,228,183,135,132,119,246,15,54,221,3,94,196,94,177,190,221,76,114,193,208,203,254,182,83,31,235,0,221,104,90,198,29,139,57,0,17,95,67,155,147,70,7,126,56,81,186,2,151,141,195,237,8,96,152,139,206,175,230,45,233,196,206,21,61,37,159,187,210,168,228,45,125,55,8,235,32,207,192,223,129,22,64,245,128,124,147,200,9,108,146,123,48,134,39,69,194,154,143,81,245,50,44,197,50,41,36,232,3,253,145,47,144,58,240,28,193,164,219,98,230,206,0,101,4,208,77,206,144,101,134,121,250,99,209,103,32,220,30,251,129,27,166,30,136,174,45,240,168,7,65,81,66,78,190,17,96,128,7,56,71,166,227,194,21,113,115,58,254,133,36,230,74,8,221,64,1,168,80,182,98,44,106,250,42,209,115,131,120,189,220,33,208,136,5,200,184,63,75,142,10,227,1,120,251,207,249,100,198,117,112,201,245,85,15,207,130,85,46,60,200,79,146,231,111,191,8,50,230,73,44,146,107,18,39,24,217,3,63,196,9,118,8,187,2,78,188,212,228,199,111,6,21,58,209,159,189,34,60,160,225,172,123,37,51,68,7,37,11,249,218,117,171,118,148,220,65,158,110,201,205,80,138,123,144,65,161,135,216,60,212,91,120,36,102,225,29,168,9,207,169,136,53,34,166,195,225,22,236,10,200,246,61,4,236,31,71,161,12,17,126,135,26,197,8,101,142,82,231,57,44,76,64,86,37,222,181,85,166,186,2,138,108,70,116,45,60,86,220,44,23,240,162,185,141,196,147,50,163,42,197,7,29,215,253,51,30,13,160,202,14,34,89,185,112,183,170,9,43,64,87,86,87,223,238,221,185,181,181,105,132,245,167,217,24,206,84,81,109,69,112,31,14,90,22,99,59,222,83,190,241,72,86,103,39,90,98,201,42,29,5,149,233,120,234,57,42,29,23,75,127,138,84,57,241,193,71,212,213,184,25,163,131,79,55,28,182,52,178,65,193,214,211,84,24,52,155,247,21,200,242,170,146,244,46,164,38,166,5,201,19,214,103,89,20,8,5,173,157,189,211,53,137,20,32,222,97,102,44,188,29,215,253,51,30,13,160,202,14,34,89,185,112,183,170,9,43,64,87,86,87,223,238,221,185,181,181,105,132,245,167,217,24,206,84,81,109,69,112,31,14,90,22,99,59,222,83,190,241,72,86,103,39,90,98,201,42,29,5,149,233,120,234,57,29,215,253,51,30,13,160,202,14,34,89,185,112,183,170,9,43,64,87,86,87,223,238,221,185,181,181,105,132,245,167,217,24,206,84,81,109,69,112,31,14,90,22,99,59,222,83,190,241,72,86,103,39,90,98,201,42,29,5,149,233,120,234,57,29,215,253,51,30,13,160,202,14,34,89,185,112,183,170,9,43,64,87,86,87,223,238,221,185,181,181,105,132,245,167,217,24,206,84,81,109,69,112,31,14,90,22,99,59,222,83,190,241,72,86,103,39,90,98,201,42,29,5,149,233,120,234,57,29,215,253,51,30,13,160,202,14,34,89,185,112,183,170,9,43,64,87,86,87,223,238,221,185,181,181,105,132,245,167,217,24,206,84,81,109,69,112,31,14,90,22,99,59,222,83,190,241,72,86,103,39,90,98,201,42,29,5,149,233,120,234,57,6,79,187,167,218,29,37,122,130,180,149,74,200,4,40,115,228,202,34,97,216,186,156,64,20,85,241,190,91,55,75,71,43,182,74,221,220,191,113,159,122,169,131,23,23,147,37,56,214,212,117,149,174,157,254,40,50,43,1,221,42,90,72,144,14,166,34,138,19,243,49,221,13,199,245,69,141,38,15,148,244,182,16,220,92,217,91,162,183,188,112,112,106,67,216,18,32,91,53,132,250,210,26,67,119,35,177,249,227,59,19,124,232,80,157,228,194,244,139,11,127,141,4,226,228,92,177,158,3,251,182,48,12,241,26,88,90,220,247,162,5,103,140,105,144,242,55,221,138,225,163,27,19,31,71,161,210,225,31,166,21,4,119,4,152,87,160,76,109,10,163,107,228,164,207,45,130,71,188,200,77,104,191,17,231,37,131,218,115,80,137,108,19,18,100,219,97,222,69,218,10,55,148,112,203,34,198,55,235,175,104,4,146,28,230,138,43,32,25,84,248,190,46,59,43,170,58,186,221,114,168,43,168,106,49,202,162,128,182,48,29,26,114,129,106,95,159,73,103,58,165,154,110,133,174,57,25,153,220,191,21,30,45,224,55,138,130,242,59,65,100,11,108,254,53,79,251,107,44,172,162,22,55,82,233,121,255,220,6,100,31,232,17,137,126,229,249,181,196,16,90,178,204,41,171,216,93,158,138,139,107,103,16,30,169,123,122,132,200,244,46,77,77,33,5,107,108,183,134,197,181,97,35,43,53,91,126,226,75,198,126,97,183,20,41,254,76,242,31,219,86,64,23,131,96,145,87,150,14,41,25,248,186,94,243,137,155,20,209,229,137,113,248,187,48,52,56,144,55,204,245,115,12,182,46,77,77,33,5,107,108,183,134,197,181,97,35,43,53,91,126,226,75,198,126,97,183,20,41,254,76,242,31,219,86,64,23,131,96,145,87,150,14,41,25,248,186,94,243,137,155,20,209,229,137,113,248,187,48,52,56,144,55,204,245,115,12,182,32,137,56,59,234,158,87,9,239,192,46,41,150,42,237,92,19,131,157,51,131,59,186,150,236,39,165,3,172,90,135,136,42,200,214,170,151,18,146,16,81,108,185,127,129,77,203,153,128,208,10,82,31,55,7,191,207,127,79,34,19,89,237,168,32,137,56,59,234,158,87,9,239,192,46,41,150,42,237,92,19,131,157,51,131,59,186,150,236,39,165,3,172,90,135,136,42,200,214,170,151,18,146,16,81,108,185,127,129,77,203,153,128,208,10,82,31,55,7,191,207,127,79,34,19,89,237,168,13,35,128,106,215,129,224,23,238,230,115,74,99,25,6,218,73,224,234,184,117,224,128,224,205,236,99,104,218,112,220,237,41,5,224,195,72,126,128,41,149,22,184,67,21,180,74,124,214,6,152,128,179,128,240,109,99,79,142,180,5,65,104,166,28,76,168,201,204,141,244,42,208,122,171,167,180,195,63,161,37,250,217,142,60,175,147,239,34,66,106,79,11,62,159,195,6,22,69,58,66,112,138,96,146,89,234,137,58,33,41,228,146,253,71,155,207,134,150,28,189,225,27,122,54,61,49,57,7,251,83,74,30,83,116,117,119,120,164,149,50,155,77,52,174,4,77,24,126,113,37,190,230,227,249,64,255,162,33,95,6,104,61,4,2,84,187,238,74,96,97,81,89,152,208,54,82,10,198,153,249,84,118,51,242,228,45,99,174,35,252,93,12,0,127,241,0,57,66,205,34,203,10,54,149,40,136,1,198,204,87,84,105,230,68,76,98,94,38,34,114,184,121,77,14,152,133,187,72,186,243,181,228,33,149,240,144,89,42,251,149,218,235,56,120,242,225,135,203,90,182,149,153,217,93,54,21,22,102,6,39,253,214,104,127,195,94,40,218,41,91,114,223,190,98,140,31,169,235,196,109,143,213,119,185,184,62,109,8,119,145,68,165,125,241,55,106,64,132,242,237,200,136,241,108,205,90,89,216,38,61,169,78,95,158,200,101,119,63,82,9,183,50,72,238,200,166,129,25,199,179,165,121,129,175,118,245,134,253,202,243,149,215,218,110,142,68,197,108,46,194,21,39,240,158,98,158,49,62,218,150,208,200,100,26,141,221,183,234,94,59,140,86,167,249,231,153,102,99,156,185,19,64,26,24,47,41,254,204,175,60,72,79,32,114,59,232,111,189,15,230,214,233,144,8,14,11,241,9,234,176,242,58,210,67,164,20,247,41,174,113,99,19,5,141,142,52,144,86,177,212,31,157,1,93,167,113,171,50,87,227,164,248,66,23,37,23,82,39,157,250,121,194,142,240,246,10,90,28,90,15,223,51,70,3,209,3,163,60,120,214,24,138,62,199,75,142,3,198,166,39,58,245,198,223,16,160,3,180,178,178,72,173,254,187,161,72,117,9,233,74,141,66,14,248,52,152,202,103,163,210,70,38,83,60,15,240,122,51,49,46,130,207,61,253,46,76,197,223,63,225,117,77,207,129,29,235,150,122,189,41,113,66,180,21,69,59,186,213,44,203,73,237,176,184,151,72,181,149,250,52,51,236,119,241,135,227,193,203,115,22,107,129,140,213,168,35,206,175,95,84,124,174,100,49,45,121,191,155,121,44,71,167,179,243,96,142,170,155,169,101,179,96,118,190,63,175,2,21,224,194,107,117,217,205,168,27,230,125,109,160,83,94,69,163,94,216,83,50,53,169,116,18,241,64,49,166,213,107,155,22,43,0,21,61,177,155,146,235,37,241,124,57,132,231,19,77,240,119,17,112,82,78,126,156,231,248,30,229,143,33,44,31,5,154,241,187,76,9,197,71,149,201,194,16,208,85,43,225,229,26,97,34,87,84,147,95,55,108,115,26,156,201,75,22,89,86,47,120,58,60,125,184,2,219,81,217,237,75,177,234,123,10,161,247,133,226,179,197,252,146,126,33,170,194,220,30,228,243,62,176,105,5,144,198,56,6,233,78,141,190,70,28,146,94,36,226,162,116,102,178,81,3,166,118,11,31,16,34,135,69,122,166,237,8,43,51,45,82,13,18,239,251,170,201,249,46,248,121,224,49,190,159,147,106,105,194,146,199,223,15,194,86,155,166,176,115,174,228,233,171,136,148,243,241,128,138,138,212,136,36,22,118,138,192,193,135,207,191,101,192,211,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,13,145,224,174,95,210,170,163,17,109,66,39,169,177,174,134,77,253,20,220,53,97,152,34,3,59,124,162,205,38,39,11,11,204,118,199,231,10,147,81,181,167,247,6,2,184,253,131,177,161,98,134,32,20,79,58,61,134,50,134,194,203,99,80,11,204,118,199,231,10,147,81,181,167,247,6,2,184,253,131,177,161,98,134,32,20,79,58,61,134,50,134,194,203,99,80,37,243,176,96,212,214,105,111,199,70,95,65,187,32,103,187,51,173,59,221,246,214,136,37,143,2,210,108,40,75,169,122,25,126,29,18,121,255,100,169,233,144,100,34,57,80,125,247,134,148,221,221,112,213,115,66,252,152,122,220,174,210,218,133,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,29,117,179,229,40,175,239,246,166,222,58,157,203,213,202,227,142,15,167,104,207,207,189,121,56,111,161,174,205,217,35,84,1,255,99,81,244,247,204,117,116,191,120,156,33,130,254,27,2,153,246,235,71,163,253,226,197,93,182,191,45,147,114,186,17,57,82,21,48,29,90,63,154,5,157,76,107,15,37,9,56,228,220,253,159,118,20,148,213,59,155,241,102,195,24,142,5,176,219,62,183,40,51,55,228,249,152,198,234,161,176,142,225,74,218,32,145,5,20,108,241,158,120,26,24,107,5,191,12,155,165,93,16,92,186,14,56,145,186,45,104,103,219,210,65,211,7,83,203,93,110,92,225,150,252,233,188,175,86,15,27,246,237,16,75,26,134,69,250,106,88,213,113,41,250,185,55,227,139,102,225,245,236,245,98,173,238,1,112,4,111,22,45,104,165,200,110,53,194,138,194,62,224,252,154,93,77,43,203,99,31,175,180,171,45,152,239,176,21,89,185,111,175,240,20,184,72,97,139,252,221,52,86,18,125,243,220,141,123,166,62,160,154,123,19,133,251,214,54,155,243,140,34,31,225,67,47,25,77,83,109,232,152,8,146,92,166,49,4,73,248,68,88,163,232,176,149,97,113,80,14,72,86,71,90,160,62,121,43,145,156,196,47,170,198,209,45,192,71,145,76,163,147,58,142,228,195,38,255,133,130,105,241,19,131,109,157,83,64,5,16,61,2,9,51,129,102,108,187,121,237,223,250,208,218,187,42,93,1,120,214,148,147,43,64,253,72,228,167,203,216,183,29,125,89,160,192,102,200,98,170,111,240,81,128,47,25,124,156,1,211,243,174,108,199,86,206,199,25,39,118,156,246,229,45,30,254,131,56,116,245,162,198,42,251,81,122,219,121,200,45,254,254,95,2,133,115,224,237,215,236,68,134,239,139,199,37,117,198,147,35,61,126,65,46,9,46,220,7,11,227,170,143,172,177,2,116,188,129,85,224,235,65,203,175,253,113,179,2,103,169,103,12,128,66,243,83,54,98,27,154,73,200,30,172,226,67,233,124,137,171,67,234,34,184,101,1,151,215,188,20,116,41,230,236,27,46,144,47,165,58,139,212,65,245,127,137,205,74,186,226,167,82,138,78,96,194,44,150,183,122,244,8,55,113,109,181,144,164,227,87,68,99,14,206,248,244,247,253,99,183,175,144,202,89,45,127,9,2,242,56,121,195,111,5,19,250,251,127,253,154,238,174,56,187,141,182,217,109,45,62,32,228,203,247,111,69,154,221,66,143,196,172,63,153,55,10,82,59,52,153,65,168,91,220,253,74,148,214,219,172,219,67,193,31,67,247,109,147,12,92,97,19,235,160,253,118,189,11,178,115,143,76,15,110,71,146,161,214,7,137,40,255,85,26,63,47,234,160,110,36,111,110,246,194,68,101,3,76,60,19,162,116,58,179,53,171,129,162,137,157,46,77,114,50,228,27,139,164,221,195,100,244,157,205,143,235,36,191,63,180,215,21,129,119,236,221,237,51,107,130,230,84,200,72,108,180,160,148,164,171,123,130,100,168,77,160,230,241,152,117,6,170,224,44,233,76,81,74,178,214,197,124,242,210,189,199,81,78,0,11,88,112,13,106,21,165,79,83,244,54,65,195,118,119,52,36,155,145,89,85,22,25,96,211,149,135,81,97,85,9,234,114,248,131,127,190,225,7,207,144,192,180,7,43,253,174,49,41,97,192,140,220,174,63,135,127,40,125,145,151,159,43,105,177,193,152,8,213,35,10,69,88,56,192,74,41,187,113,237,11,178,115,143,76,15,110,71,146,161,214,7,137,40,255,85,26,63,47,234,160,110,36,111,110,246,194,68,101,3,76,60,36,155,145,89,85,22,25,96,211,149,135,81,97,85,9,234,114,248,131,127,190,225,7,207,144,192,180,7,43,253,174,49,41,97,192,140,220,174,63,135,127,40,125,145,151,159,43,105,177,193,152,8,213,35,10,69,88,56,192,74,41,187,113,237,11,178,115,143,76,15,110,71,146,161,214,7,137,40,255,85,26,63,47,234,160,110,36,111,110,246,194,68,101,3,76,60,7,173,230,112,102,232,113,192,149,183,170,19,200,71,64,113,158,205,187,146,106,190,22,188,180,97,75,125,8,143,187,160,35,180,89,37,3,106,76,249,71,94,124,19,8,174,35,2,44,131,41,191,176,117,77,115,121,135,171,74,110,130,112,111,11,178,115,143,76,15,110,71,146,161,214,7,137,40,255,85,26,63,47,234,160,110,36,111,110,246,194,68,101,3,76,60,7,173,230,112,102,232,113,192,149,183,170,19,200,71,64,113,158,205,187,146,106,190,22,188,180,97,75,125,8,143,187,160,35,180,89,37,3,106,76,249,71,94,124,19,8,174,35,2,44,131,41,191,176,117,77,115,121,135,171,74,110,130,112,111,11,178,115,143,76,15,110,71,146,161,214,7,137,40,255,85,26,63,47,234,160,110,36,111,110,246,194,68,101,3,76,60,15,177,232,145,186,235,247,8,213,216,127,172,237,170,30,182,248,82,178,244,9,163,234,205,61,146,134,124,77,140,74,71,24,98,69,37,141,5,76,169,232,150,253,95,60,17,85,111,118,139,104,4,138,75,75,238,249,144,25,62,200,5,211,87,32,163,25,184,198,219,224,70,7,174,32,8,122,92,10,33,218,126,217,175,155,80,149,14,177,228,202,128,29,214,37,9,32,28,164,11,15,21,52,225,17,40,229,140,240,236,76,172,92,237,35,236,34,207,10,101,191,148,128,196,195,252,99,241],"hex_proof":"0x10b280d1cc1ef4294759274654d819f41a90fc1e4649f7e0e1afa8fb20e3ee0e0909908cc9f26d9cc185d8a687577f70a8c5f685dff4418121736866d005b6a70dc886cdf3893881933bfccbaa31d44de20cea4c0ea860e4b7878477f60f36dd035ec45eb1bedd4c72c1d0cbfeb6531feb00dd685ac61d8b3900115f439b9346077e3851ba02978dc3ed0860988bceafe62de9c4ce153d259fbbd2a8e42d7d3708eb20cfc0df811640f5807c93c8096c927b30862745c29a8f51f5322cc5322924e803fd912f903af01cc1a4db62e6ce006504d04dce90658679fa63d16720dc1efb811ba61e88ae2df0a8074151424ebe116080073847a6e3c21571733afe8524e64a08dd4001a850b6622c6afa2ad1738378bddc21d08805c8b83f4b8e0ae30178fbcff964c67570c9f5550fcf82552e3cc84f92e76fbf0832e6492c926b122718d9033fc4097608bb024ebcd4e4c76f06153ad19fbd223ca0e1ac7b25334407250bf9da75ab7694dc419e6ec9cd508a7b9041a187d83cd45b782466e11da809cfa9883522a6c3e116ec0ac8f63d04ec1f47a10c117e871ac508658e52e7392c4c405625deb555a6ba028a6c46742d3c56dc2c17f0a2b98dc49332a32ac5071dd7fd331e0da0ca0e2259b970b7aa092b40575657dfeeddb9b5b56984f5a7d918ce54516d45701f0e5a16633bde53bef1485667275a62c92a1d0595e978ea392a1d174b7f8a5439f1c147d4d5b819a3834f371cb634b241c1d6d35418349bf715c8f2aa92f42ea426a605c913d66759140805ad9dbdd335891420de61662cbc1dd7fd331e0da0ca0e2259b970b7aa092b40575657dfeeddb9b5b56984f5a7d918ce54516d45701f0e5a16633bde53bef1485667275a62c92a1d0595e978ea391dd7fd331e0da0ca0e2259b970b7aa092b40575657dfeeddb9b5b56984f5a7d918ce54516d45701f0e5a16633bde53bef1485667275a62c92a1d0595e978ea391dd7fd331e0da0ca0e2259b970b7aa092b40575657dfeeddb9b5b56984f5a7d918ce54516d45701f0e5a16633bde53bef1485667275a62c92a1d0595e978ea391dd7fd331e0da0ca0e2259b970b7aa092b40575657dfeeddb9b5b56984f5a7d918ce54516d45701f0e5a16633bde53bef1485667275a62c92a1d0595e978ea39064fbba7da1d257a82b4954ac8042873e4ca2261d8ba9c401455f1be5b374b472bb64adddcbf719f7aa9831717932538d6d47595ae9dfe28322b01dd2a5a48900ea6228a13f331dd0dc7f5458d260f94f4b610dc5cd95ba2b7bc70706a43d812205b3584fad21a437723b1f9e33b137ce8509de4c2f48b0b7f8d04e2e45cb19e03fbb6300cf11a585adcf7a205678c6990f237dd8ae1a31b131f47a1d2e11fa6150477049857a04c6d0aa36be4a4cf2d8247bcc84d68bf11e72583da7350896c131264db61de45da0a379470cb22c637ebaf6804921ce68a2b201954f8be2e3b2baa3abadd72a82ba86a31caa280b6301d1a72816a5f9f49673aa59a6e85ae391999dcbf151e2de0378a82f23b41640b6cfe354ffb6b2caca2163752e979ffdc06641fe811897ee5f9b5c4105ab2cc29abd85d9e8a8b6b67101ea97b7a84c8f42e4d4d21056b6cb786c5b561232b355b7ee24bc67e61b71429fe4cf21fdb56401783609157960e2919f8ba5ef3899b14d1e58971f8bb3034389037ccf5730cb62e4d4d21056b6cb786c5b561232b355b7ee24bc67e61b71429fe4cf21fdb56401783609157960e2919f8ba5ef3899b14d1e58971f8bb3034389037ccf5730cb62089383bea9e5709efc02e29962aed5c13839d33833bba96ec27a503ac5a87882ac8d6aa97129210516cb97f814dcb9980d00a521f3707bfcf7f4f221359eda82089383bea9e5709efc02e29962aed5c13839d33833bba96ec27a503ac5a87882ac8d6aa97129210516cb97f814dcb9980d00a521f3707bfcf7f4f221359eda80d23806ad781e017eee6734a631906da49e0eab875e080e0cdec6368da70dced2905e0c3487e80299516b84315b44a7cd6069880b380f06d634f8eb4054168a61c4ca8c9cc8df42ad07aaba7b4c33fa125fad98e3caf93ef22426a4f0b3e9fc30616453a42708a609259ea893a2129e492fd479bcf86961cbde11b7a363d313907fb534a1e5374757778a495329b4d34ae044d187e7125bee6e3f940ffa2215f06683d040254bbee4a6061515998d036520ac699f9547633f2e42d63ae23fc5d0c007ff1003942cd22cb0a3695288801c6cc575469e6444c625e262272b8794d0e9885bb48baf3b5e42195f090592afb95daeb3878f2e187cb5ab69599d95d361516660627fdd6687fc35e28da295b72dfbe628c1fa9ebc46d8fd577b9b83e6d08779144a57df1376a4084f2edc888f16ccd5a59d8263da94e5f9ec865773f5209b73248eec8a68119c7b3a57981af76f586fdcaf395d7da6e8e44c56c2ec21527f09e629e313eda96d0c8641a8dddb7ea5e3b8c56a7f9e79966639cb913401a182f29feccaf3c484f20723be86fbd0fe6d6e990080e0bf109eab0f23ad243a414f729ae716313058d8e349056b1d41f9d015da771ab3257e3a4f84217251752279dfa79c28ef0f60a5a1c5a0fdf334603d103a33c78d6188a3ec74b8e03c6a6273af5c6df10a003b4b2b248adfebba1487509e94a8d420ef83498ca67a3d24626533c0ff07a33312e82cf3dfd2e4cc5df3fe1754dcf811deb967abd297142b415453bbad52ccb49edb0b89748b595fa3433ec77f187e3c1cb73166b818cd5a823ceaf5f547cae64312d79bf9b792c47a7b3f3608eaa9ba965b36076be3faf0215e0c26b75d9cda81be67d6da0535e45a35ed8533235a97412f14031a6d56b9b162b00153db19b92eb25f17c3984e7134df0771170524e7e9ce7f81ee58f212c1f059af1bb4c09c54795c9c210d0552be1e51a61225754935f376c731a9cc94b1659562f783a3c7db802db51d9ed4bb1ea7b0aa1f785e2b3c5fc927e21aac2dc1ee4f33eb0690590c63806e94e8dbe461c925e24e2a27466b25103a6760b1f102287457aa6ed082b332d520d12effbaac9f92ef879e031be9f936a69c292c7df0fc2569ba6b073aee4e9ab8894f3f1808a8ad4882416768ac0c187cfbf65c0d300000000000000000000000000000000000000000000000000000000000000000d91e0ae5fd2aaa3116d4227a9b1ae864dfd14dc35619822033b7ca2cd26270b0bcc76c7e70a9351b5a7f70602b8fd83b1a1628620144f3a3d863286c2cb63500bcc76c7e70a9351b5a7f70602b8fd83b1a1628620144f3a3d863286c2cb635025f3b060d4d6696fc7465f41bb2067bb33ad3bddf6d688258f02d26c284ba97a197e1d1279ff64a9e990642239507df78694dddd70d57342fc987adcaed2da8500000000000000000000000000000000000000000000000000000000000000001d75b3e528afeff6a6de3a9dcbd5cae38e0fa768cfcfbd79386fa1aecdd9235401ff6351f4f7cc7574bf789c2182fe1b0299f6eb47a3fde2c55db6bf2d9372ba11395215301d5a3f9a059d4c6b0f250938e4dcfd9f761494d53b9bf166c3188e05b0db3eb7283337e4f998c6eaa1b08ee14ada209105146cf19e781a186b05bf0c9ba55d105cba0e3891ba2d6867dbd241d30753cb5d6e5ce196fce9bcaf560f1bf6ed104b1a8645fa6a58d57129fab937e38b66e1f5ecf562adee0170046f162d68a5c86e35c28ac23ee0fc9a5d4d2bcb631fafb4ab2d98efb01559b96faff014b848618bfcdd3456127df3dc8d7ba63ea09a7b1385fbd6369bf38c221fe1432f194d536de89808925ca6310449f84458a3e8b0956171500e4856475aa03e792b919cc42faac6d12dc047914ca3933a8ee4c326ff858269f113836d9d534005103d02093381666cbb79eddffad0dabb2a5d0178d694932b40fd48e4a7cbd8b71d7d59a0c066c862aa6ff051802f197c9c01d3f3ae6cc756cec71927769cf6e52d1efe833874f5a2c62afb517adb79c82dfefe5f028573e0edd7ec4486ef8bc72575c693233d7e412e092edc070be3aa8facb10274bc8155e0eb41cbaffd71b30267a9670c8042f35336621b9a49c81eace243e97c89ab43ea22b8650197d7bc147429e6ec1b2e902fa53a8bd441f57f89cd4abae2a7528a4e60c22c96b77af40837716db590a4e35744630ecef8f4f7fd63b7af90ca592d7f0902f23879c36f0513fafb7ffd9aeeae38bb8db6d96d2d3e20e4cbf76f459add428fc4ac3f99370a523b349941a85bdcfd4a94d6dbacdb43c11f43f76d930c5c6113eba0fd76bd0bb2738f4c0f6e4792a1d6078928ff551a3f2feaa06e246f6ef6c24465034c3c13a2743ab335ab81a2899d2e4d7232e41b8ba4ddc364f49dcd8feb24bf3fb4d7158177ecdded336b82e654c8486cb4a094a4ab7b8264a84da0e6f1987506aae02ce94c514ab2d6c57cf2d2bdc7514e000b58700d6a15a54f53f43641c3767734249b915955161960d3958751615509ea72f8837fbee107cf90c0b4072bfdae312961c08cdcae3f877f287d91979f2b69b1c19808d5230a455838c04a29bb71ed0bb2738f4c0f6e4792a1d6078928ff551a3f2feaa06e246f6ef6c24465034c3c249b915955161960d3958751615509ea72f8837fbee107cf90c0b4072bfdae312961c08cdcae3f877f287d91979f2b69b1c19808d5230a455838c04a29bb71ed0bb2738f4c0f6e4792a1d6078928ff551a3f2feaa06e246f6ef6c24465034c3c07ade67066e871c095b7aa13c84740719ecdbb926abe16bcb4614b7d088fbba023b45925036a4cf9475e7c1308ae23022c8329bfb0754d737987ab4a6e82706f0bb2738f4c0f6e4792a1d6078928ff551a3f2feaa06e246f6ef6c24465034c3c07ade67066e871c095b7aa13c84740719ecdbb926abe16bcb4614b7d088fbba023b45925036a4cf9475e7c1308ae23022c8329bfb0754d737987ab4a6e82706f0bb2738f4c0f6e4792a1d6078928ff551a3f2feaa06e246f6ef6c24465034c3c0fb1e891baebf708d5d87facedaa1eb6f852b2f409a3eacd3d92867c4d8c4a47186245258d054ca9e896fd5f3c11556f768b68048a4b4beef990193ec805d35720a319b8c6dbe04607ae20087a5c0a21da7ed9af9b50950eb1e4ca801dd62509201ca40b0f1534e11128e58cf0ec4cac5ced23ec22cf0a65bf9480c4c3fc63f1","transcript_type":"EVM","split":null,"pretty_public_inputs":{"rescaled_inputs":[],"inputs":[],"processed_inputs":[],"processed_params":[],"processed_outputs":[],"rescaled_outputs":[["0","0","0","0"]],"outputs":[["0x0000000000000000000000000000000000000000000000000000000000000000","0x0000000000000000000000000000000000000000000000000000000000000000","0x0000000000000000000000000000000000000000000000000000000000000000","0x0000000000000000000000000000000000000000000000000000000000000000"]]},"timestamp":1731034409977,"commitment":"KZG","version":"source - no compatibility guaranteed"} \ No newline at end of file diff --git a/tests/integration_tests.rs b/tests/integration_tests.rs index f7bfb8801..d1a9ac8c8 100644 --- a/tests/integration_tests.rs +++ b/tests/integration_tests.rs @@ -556,15 +556,7 @@ mod native_tests { test_dir.close().unwrap(); } - #(#[test_case(TESTS[N])])* - fn accuracy_measurement_div_rebase_(test: &str) { - crate::native_tests::init_binary(); - crate::native_tests::setup_py_env(); - let test_dir = TempDir::new(test).unwrap(); - let path = test_dir.path().to_str().unwrap(); crate::native_tests::mv_test_(path, test); - accuracy_measurement(path, test.to_string(), "private", "private", "public", 1, "accuracy", 2.6, true); - test_dir.close().unwrap(); - } + #(#[test_case(TESTS[N])])* fn accuracy_measurement_public_outputs_(test: &str) { @@ -572,7 +564,7 @@ mod native_tests { crate::native_tests::setup_py_env(); let test_dir = TempDir::new(test).unwrap(); let path = test_dir.path().to_str().unwrap(); crate::native_tests::mv_test_(path, test); - accuracy_measurement(path, test.to_string(), "private", "private", "public", 1, "accuracy", 2.6, false); + accuracy_measurement(path, test.to_string(), "private", "private", "public", 1, "accuracy", 2.6); test_dir.close().unwrap(); } @@ -582,7 +574,7 @@ mod native_tests { crate::native_tests::setup_py_env(); let test_dir = TempDir::new(test).unwrap(); let path = test_dir.path().to_str().unwrap(); crate::native_tests::mv_test_(path, test); - accuracy_measurement(path, test.to_string(), "private", "fixed", "private", 1, "accuracy", 2.6 , false); + accuracy_measurement(path, test.to_string(), "private", "fixed", "private", 1, "accuracy", 2.6 ); test_dir.close().unwrap(); } @@ -592,7 +584,7 @@ mod native_tests { crate::native_tests::setup_py_env(); let test_dir = TempDir::new(test).unwrap(); let path = test_dir.path().to_str().unwrap(); crate::native_tests::mv_test_(path, test); - accuracy_measurement(path, test.to_string(), "public", "private", "private", 1, "accuracy", 2.6, false); + accuracy_measurement(path, test.to_string(), "public", "private", "private", 1, "accuracy", 2.6); test_dir.close().unwrap(); } @@ -603,7 +595,7 @@ mod native_tests { crate::native_tests::setup_py_env(); let test_dir = TempDir::new(test).unwrap(); let path = test_dir.path().to_str().unwrap(); crate::native_tests::mv_test_(path, test); - accuracy_measurement(path, test.to_string(), "private", "private", "public", 1, "resources", 3.1, false); + accuracy_measurement(path, test.to_string(), "private", "private", "public", 1, "resources", 3.1); test_dir.close().unwrap(); } @@ -1466,7 +1458,6 @@ mod native_tests { cal_target, scales_to_use, 2, - false, &mut tolerance, Commitments::KZG, 2, @@ -1607,12 +1598,11 @@ mod native_tests { cal_target: &str, scales_to_use: Option>, num_inner_columns: usize, - div_rebasing: bool, tolerance: &mut f32, commitment: Commitments, lookup_safety_margin: usize, ) { - let mut args = vec![ + let args = vec![ "gen-settings".to_string(), "-M".to_string(), format!("{}/{}/network.onnx", test_dir, example_name), @@ -1629,10 +1619,6 @@ mod native_tests { format!("--commitment={}", commitment), ]; - if div_rebasing { - args.push("--div-rebasing".to_string()); - }; - let status = Command::new(format!("{}/release/ezkl", *CARGO_TARGET_DIR)) .args(args) .status() @@ -1731,7 +1717,6 @@ mod native_tests { batch_size: usize, cal_target: &str, target_perc: f32, - div_rebasing: bool, ) { gen_circuit_settings_and_witness( test_dir, @@ -1743,7 +1728,6 @@ mod native_tests { cal_target, None, 2, - div_rebasing, &mut 0.0, Commitments::KZG, 2, @@ -2027,7 +2011,6 @@ mod native_tests { target_str, scales_to_use, num_inner_columns, - false, &mut 0.0, commitment, lookup_safety_margin, @@ -2459,7 +2442,6 @@ mod native_tests { // we need the accuracy Some(vec![4]), 1, - false, &mut 0.0, Commitments::KZG, 2,