diff --git a/Cargo.lock b/Cargo.lock index cacfc06327d..eb058008359 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -2836,6 +2836,7 @@ name = "noirc_evaluator" version = "0.31.0" dependencies = [ "acvm", + "bn254_blackbox_solver", "chrono", "fxhash", "im", diff --git a/acvm-repo/acir/src/circuit/black_box_functions.rs b/acvm-repo/acir/src/circuit/black_box_functions.rs index 419c0266b69..aadee59f507 100644 --- a/acvm-repo/acir/src/circuit/black_box_functions.rs +++ b/acvm-repo/acir/src/circuit/black_box_functions.rs @@ -82,43 +82,10 @@ pub enum BlackBoxFunc { /// /// [grumpkin]: https://hackmd.io/@aztec-network/ByzgNxBfd#2-Grumpkin---A-curve-on-top-of-BN-254-for-SNARK-efficient-group-operations SchnorrVerify, - - /// Calculates a Pedersen commitment to the inputs. - /// - /// Computes a Pedersen commitment of the inputs using generators of the - /// embedded curve - /// - input: vector of (witness, 254) - /// - output: 2 witnesses representing the x,y coordinates of the resulting - /// Grumpkin point - /// - domain separator: a constant public value (a field element) that you - /// can use so that the commitment also depends on the domain separator. - /// Noir uses 0 as domain separator. - /// - /// The backend should handle proper conversion between the inputs being ACIR - /// field elements and the scalar field of the embedded curve. In the case of - /// Aztec's Barretenberg, the latter is bigger than the ACIR field so it is - /// straightforward. The Pedersen generators are managed by the proving - /// system. - /// - /// The commitment is expected to be additively homomorphic + /// Deprecated. To be removed with a sync from aztec-packages PedersenCommitment, - - /// Calculates a Pedersen hash to the inputs. - /// - /// Computes a Pedersen hash of the inputs and their number, using - /// generators of the embedded curve - /// - input: vector of (witness, 254) - /// - output: the x-coordinate of the pedersen commitment of the - /// 'prepended input' (see below) - /// - domain separator: a constant public value (a field element) that you - /// can use so that the hash also depends on the domain separator. Noir - /// uses 0 as domain separator. - /// - /// In Barretenberg, PedersenHash is doing the same as PedersenCommitment, - /// except that it prepends the inputs with their length. This is expected - /// to not be additively homomorphic. + /// Deprecated. To be removed with a sync from aztec-packages PedersenHash, - /// Verifies a ECDSA signature over the secp256k1 curve. /// - inputs: /// - x coordinate of public key as 32 bytes @@ -242,8 +209,6 @@ impl BlackBoxFunc { BlackBoxFunc::SchnorrVerify => "schnorr_verify", BlackBoxFunc::Blake2s => "blake2s", BlackBoxFunc::Blake3 => "blake3", - BlackBoxFunc::PedersenCommitment => "pedersen_commitment", - BlackBoxFunc::PedersenHash => "pedersen_hash", BlackBoxFunc::EcdsaSecp256k1 => "ecdsa_secp256k1", BlackBoxFunc::MultiScalarMul => "multi_scalar_mul", BlackBoxFunc::EmbeddedCurveAdd => "embedded_curve_add", @@ -262,6 +227,8 @@ impl BlackBoxFunc { BlackBoxFunc::BigIntToLeBytes => "bigint_to_le_bytes", BlackBoxFunc::Poseidon2Permutation => "poseidon2_permutation", BlackBoxFunc::Sha256Compression => "sha256_compression", + BlackBoxFunc::PedersenCommitment => "deprecated pedersen commitment", + BlackBoxFunc::PedersenHash => "deprecated pedersen hash", } } @@ -272,8 +239,6 @@ impl BlackBoxFunc { "schnorr_verify" => Some(BlackBoxFunc::SchnorrVerify), "blake2s" => Some(BlackBoxFunc::Blake2s), "blake3" => Some(BlackBoxFunc::Blake3), - "pedersen_commitment" => Some(BlackBoxFunc::PedersenCommitment), - "pedersen_hash" => Some(BlackBoxFunc::PedersenHash), "ecdsa_secp256k1" => Some(BlackBoxFunc::EcdsaSecp256k1), "ecdsa_secp256r1" => Some(BlackBoxFunc::EcdsaSecp256r1), "multi_scalar_mul" => Some(BlackBoxFunc::MultiScalarMul), @@ -292,6 +257,8 @@ impl BlackBoxFunc { "bigint_to_le_bytes" => Some(BlackBoxFunc::BigIntToLeBytes), "poseidon2_permutation" => Some(BlackBoxFunc::Poseidon2Permutation), "sha256_compression" => Some(BlackBoxFunc::Sha256Compression), + "deprecated pedersen commitment" => Some(BlackBoxFunc::PedersenCommitment), + "deprecated pedersen hash" => Some(BlackBoxFunc::PedersenHash), _ => None, } } diff --git a/acvm-repo/acir/src/circuit/opcodes/black_box_function_call.rs b/acvm-repo/acir/src/circuit/opcodes/black_box_function_call.rs index 362e9ba5936..09b39964813 100644 --- a/acvm-repo/acir/src/circuit/opcodes/black_box_function_call.rs +++ b/acvm-repo/acir/src/circuit/opcodes/black_box_function_call.rs @@ -54,11 +54,13 @@ pub enum BlackBoxFuncCall { message: Vec, output: Witness, }, + /// Deprecated. To be removed with a sync from aztec-packages PedersenCommitment { inputs: Vec, domain_separator: u32, outputs: (Witness, Witness), }, + /// Deprecated. To be removed with a sync from aztec-packages PedersenHash { inputs: Vec, domain_separator: u32, @@ -189,8 +191,6 @@ impl BlackBoxFuncCall { BlackBoxFuncCall::Blake2s { .. } => BlackBoxFunc::Blake2s, BlackBoxFuncCall::Blake3 { .. } => BlackBoxFunc::Blake3, BlackBoxFuncCall::SchnorrVerify { .. } => BlackBoxFunc::SchnorrVerify, - BlackBoxFuncCall::PedersenCommitment { .. } => BlackBoxFunc::PedersenCommitment, - BlackBoxFuncCall::PedersenHash { .. } => BlackBoxFunc::PedersenHash, BlackBoxFuncCall::EcdsaSecp256k1 { .. } => BlackBoxFunc::EcdsaSecp256k1, BlackBoxFuncCall::EcdsaSecp256r1 { .. } => BlackBoxFunc::EcdsaSecp256r1, BlackBoxFuncCall::MultiScalarMul { .. } => BlackBoxFunc::MultiScalarMul, @@ -206,6 +206,8 @@ impl BlackBoxFuncCall { BlackBoxFuncCall::BigIntToLeBytes { .. } => BlackBoxFunc::BigIntToLeBytes, BlackBoxFuncCall::Poseidon2Permutation { .. } => BlackBoxFunc::Poseidon2Permutation, BlackBoxFuncCall::Sha256Compression { .. } => BlackBoxFunc::Sha256Compression, + BlackBoxFuncCall::PedersenCommitment { .. } => BlackBoxFunc::PedersenCommitment, + BlackBoxFuncCall::PedersenHash { .. } => BlackBoxFunc::PedersenHash, } } @@ -219,8 +221,6 @@ impl BlackBoxFuncCall { | BlackBoxFuncCall::SHA256 { inputs, .. } | BlackBoxFuncCall::Blake2s { inputs, .. } | BlackBoxFuncCall::Blake3 { inputs, .. } - | BlackBoxFuncCall::PedersenCommitment { inputs, .. } - | BlackBoxFuncCall::PedersenHash { inputs, .. } | BlackBoxFuncCall::BigIntFromLeBytes { inputs, .. } | BlackBoxFuncCall::Poseidon2Permutation { inputs, .. } => inputs.to_vec(), @@ -318,6 +318,8 @@ impl BlackBoxFuncCall { inputs.push(*key_hash); inputs } + BlackBoxFuncCall::PedersenCommitment { .. } => todo!(), + BlackBoxFuncCall::PedersenHash { .. } => todo!(), } } @@ -339,9 +341,7 @@ impl BlackBoxFuncCall { | BlackBoxFuncCall::XOR { output, .. } | BlackBoxFuncCall::SchnorrVerify { output, .. } | BlackBoxFuncCall::EcdsaSecp256k1 { output, .. } - | BlackBoxFuncCall::PedersenHash { output, .. } | BlackBoxFuncCall::EcdsaSecp256r1 { output, .. } => vec![*output], - BlackBoxFuncCall::PedersenCommitment { outputs, .. } => vec![outputs.0, outputs.1], BlackBoxFuncCall::MultiScalarMul { outputs, .. } | BlackBoxFuncCall::EmbeddedCurveAdd { outputs, .. } => { vec![outputs.0, outputs.1, outputs.2] @@ -356,6 +356,8 @@ impl BlackBoxFuncCall { vec![] } BlackBoxFuncCall::BigIntToLeBytes { outputs, .. } => outputs.to_vec(), + BlackBoxFuncCall::PedersenCommitment { .. } => todo!(), + BlackBoxFuncCall::PedersenHash { .. } => todo!(), } } } @@ -421,6 +423,14 @@ fn get_outputs_string(outputs: &[Witness]) -> String { impl std::fmt::Display for BlackBoxFuncCall { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + match self { + BlackBoxFuncCall::PedersenCommitment { .. } => { + return write!(f, "BLACKBOX::Deprecated") + } + BlackBoxFuncCall::PedersenHash { .. } => return write!(f, "BLACKBOX::Deprecated"), + _ => (), + } + let uppercase_name = self.name().to_uppercase(); write!(f, "BLACKBOX::{uppercase_name} ")?; // INPUTS @@ -440,13 +450,7 @@ impl std::fmt::Display for BlackBoxFuncCall { write!(f, "]")?; - // SPECIFIC PARAMETERS - match self { - BlackBoxFuncCall::PedersenCommitment { domain_separator, .. } => { - write!(f, " domain_separator: {domain_separator}") - } - _ => write!(f, ""), - } + write!(f, "") } } diff --git a/acvm-repo/acir/tests/test_program_serialization.rs b/acvm-repo/acir/tests/test_program_serialization.rs index dfcb1a8bb86..84a9aa719f2 100644 --- a/acvm-repo/acir/tests/test_program_serialization.rs +++ b/acvm-repo/acir/tests/test_program_serialization.rs @@ -100,33 +100,6 @@ fn multi_scalar_mul_circuit() { assert_eq!(bytes, expected_serialization) } -#[test] -fn pedersen_circuit() { - let pedersen = Opcode::BlackBoxFuncCall(BlackBoxFuncCall::PedersenCommitment { - inputs: vec![FunctionInput { witness: Witness(1), num_bits: FieldElement::max_num_bits() }], - outputs: (Witness(2), Witness(3)), - domain_separator: 0, - }); - - let circuit: Circuit = Circuit { - current_witness_index: 4, - opcodes: vec![pedersen], - private_parameters: BTreeSet::from([Witness(1)]), - return_values: PublicInputs(BTreeSet::from_iter(vec![Witness(2), Witness(3)])), - ..Circuit::default() - }; - let program = Program { functions: vec![circuit], unconstrained_functions: vec![] }; - - let bytes = Program::serialize_program(&program); - - let expected_serialization: Vec = vec![ - 31, 139, 8, 0, 0, 0, 0, 0, 0, 255, 93, 74, 73, 10, 0, 0, 4, 180, 29, 252, 255, 193, 66, 40, - 76, 77, 179, 34, 20, 36, 136, 237, 83, 245, 101, 107, 79, 65, 94, 253, 214, 217, 255, 239, - 192, 1, 43, 124, 181, 238, 113, 0, 0, 0, - ]; - assert_eq!(bytes, expected_serialization) -} - #[test] fn schnorr_verify_circuit() { let public_key_x = diff --git a/acvm-repo/acvm/src/compiler/transformers/csat.rs b/acvm-repo/acvm/src/compiler/transformers/csat.rs index f2a3cc2c84e..19cc18ca7f3 100644 --- a/acvm-repo/acvm/src/compiler/transformers/csat.rs +++ b/acvm-repo/acvm/src/compiler/transformers/csat.rs @@ -432,7 +432,7 @@ fn fits_in_one_identity(expr: &Expression, width: usize) -> boo return true; } - // We now know that we have a single mul term. We also know that the mul term must match up with two other terms + // We now know that we have a single mul term. We also know that the mul term must match up with at least one of the other terms // A polynomial whose mul terms are non zero which do not match up with two terms in the fan-in cannot fit into one opcode // An example of this is: Axy + Bx + Cy + ... // Notice how the bivariate monomial xy has two univariate monomials with their respective coefficients @@ -461,7 +461,25 @@ fn fits_in_one_identity(expr: &Expression, width: usize) -> boo } } - found_x & found_y + // If the multiplication is a squaring then we must assign the two witnesses to separate wires and so we + // can never get a zero contribution to the width. + let multiplication_is_squaring = mul_term.1 == mul_term.2; + + let mul_term_width_contribution = if !multiplication_is_squaring && (found_x & found_y) { + // Both witnesses involved in the multiplication exist elsewhere in the expression. + // They both do not contribute to the width of the expression as this would be double-counting + // due to their appearance in the linear terms. + 0 + } else if found_x || found_y { + // One of the witnesses involved in the multiplication exists elsewhere in the expression. + // The multiplication then only contributes 1 new witness to the width. + 1 + } else { + // Worst case scenario, the multiplication is using completely unique witnesses so has a contribution of 2. + 2 + }; + + mul_term_width_contribution + expr.linear_combinations.len() <= width } #[cfg(test)] @@ -573,4 +591,20 @@ mod tests { let contains_b = got_optimized_opcode_a.linear_combinations.iter().any(|(_, w)| *w == b); assert!(contains_b); } + + #[test] + fn recognize_expr_with_single_shared_witness_which_fits_in_single_identity() { + // Regression test for an expression which Zac found which should have been preserved but + // was being split into two expressions. + let expr = Expression { + mul_terms: vec![(-FieldElement::from(555u128), Witness(8), Witness(10))], + linear_combinations: vec![ + (FieldElement::one(), Witness(10)), + (FieldElement::one(), Witness(11)), + (-FieldElement::one(), Witness(13)), + ], + q_c: FieldElement::zero(), + }; + assert!(fits_in_one_identity(&expr, 4)); + } } diff --git a/acvm-repo/acvm/src/pwg/blackbox/mod.rs b/acvm-repo/acvm/src/pwg/blackbox/mod.rs index 8bda9221d8a..b3064c47d82 100644 --- a/acvm-repo/acvm/src/pwg/blackbox/mod.rs +++ b/acvm-repo/acvm/src/pwg/blackbox/mod.rs @@ -7,7 +7,7 @@ use acvm_blackbox_solver::{blake2s, blake3, keccak256, keccakf1600, sha256}; use self::{ aes128::solve_aes128_encryption_opcode, bigint::AcvmBigIntSolver, - hash::solve_poseidon2_permutation_opcode, pedersen::pedersen_hash, + hash::solve_poseidon2_permutation_opcode, }; use super::{insert_value, OpcodeNotSolvable, OpcodeResolutionError}; @@ -18,7 +18,6 @@ pub(crate) mod bigint; mod embedded_curve_ops; mod hash; mod logic; -mod pedersen; mod range; mod signature; pub(crate) mod utils; @@ -27,7 +26,6 @@ use embedded_curve_ops::{embedded_curve_add, multi_scalar_mul}; // Hash functions should eventually be exposed for external consumers. use hash::{solve_generic_256_hash_opcode, solve_sha_256_permutation_opcode}; use logic::{and, xor}; -use pedersen::pedersen; pub(crate) use range::solve_range_opcode; use signature::{ ecdsa::{secp256k1_prehashed, secp256r1_prehashed}, @@ -127,12 +125,6 @@ pub(crate) fn solve( message, *output, ), - BlackBoxFuncCall::PedersenCommitment { inputs, domain_separator, outputs } => { - pedersen(backend, initial_witness, inputs, *domain_separator, *outputs) - } - BlackBoxFuncCall::PedersenHash { inputs, domain_separator, output } => { - pedersen_hash(backend, initial_witness, inputs, *domain_separator, *output) - } BlackBoxFuncCall::EcdsaSecp256k1 { public_key_x, public_key_y, @@ -187,5 +179,7 @@ pub(crate) fn solve( BlackBoxFuncCall::Poseidon2Permutation { inputs, outputs, len } => { solve_poseidon2_permutation_opcode(backend, initial_witness, inputs, outputs, *len) } + BlackBoxFuncCall::PedersenCommitment { .. } => todo!("Deprecated BlackBox"), + BlackBoxFuncCall::PedersenHash { .. } => todo!("Deprecated BlackBox"), } } diff --git a/acvm-repo/acvm/src/pwg/blackbox/pedersen.rs b/acvm-repo/acvm/src/pwg/blackbox/pedersen.rs deleted file mode 100644 index f64a3a79465..00000000000 --- a/acvm-repo/acvm/src/pwg/blackbox/pedersen.rs +++ /dev/null @@ -1,47 +0,0 @@ -use acir::{ - circuit::opcodes::FunctionInput, - native_types::{Witness, WitnessMap}, - AcirField, -}; - -use crate::{ - pwg::{insert_value, witness_to_value, OpcodeResolutionError}, - BlackBoxFunctionSolver, -}; - -pub(super) fn pedersen( - backend: &impl BlackBoxFunctionSolver, - initial_witness: &mut WitnessMap, - inputs: &[FunctionInput], - domain_separator: u32, - outputs: (Witness, Witness), -) -> Result<(), OpcodeResolutionError> { - let scalars: Result, _> = - inputs.iter().map(|input| witness_to_value(initial_witness, input.witness)).collect(); - let scalars: Vec<_> = scalars?.into_iter().cloned().collect(); - - let (res_x, res_y) = backend.pedersen_commitment(&scalars, domain_separator)?; - - insert_value(&outputs.0, res_x, initial_witness)?; - insert_value(&outputs.1, res_y, initial_witness)?; - - Ok(()) -} - -pub(super) fn pedersen_hash( - backend: &impl BlackBoxFunctionSolver, - initial_witness: &mut WitnessMap, - inputs: &[FunctionInput], - domain_separator: u32, - output: Witness, -) -> Result<(), OpcodeResolutionError> { - let scalars: Result, _> = - inputs.iter().map(|input| witness_to_value(initial_witness, input.witness)).collect(); - let scalars: Vec<_> = scalars?.into_iter().cloned().collect(); - - let res = backend.pedersen_hash(&scalars, domain_separator)?; - - insert_value(&output, res, initial_witness)?; - - Ok(()) -} diff --git a/acvm-repo/acvm_js/test/browser/execute_circuit.test.ts b/acvm-repo/acvm_js/test/browser/execute_circuit.test.ts index cfd5523b79f..aaa82f8f1e5 100644 --- a/acvm-repo/acvm_js/test/browser/execute_circuit.test.ts +++ b/acvm-repo/acvm_js/test/browser/execute_circuit.test.ts @@ -75,16 +75,6 @@ it('successfully processes complex brillig foreign call opcodes', async () => { expect(solved_witness).to.be.deep.eq(expectedWitnessMap); }); -it('successfully executes a Pedersen opcode', async function () { - const { bytecode, initialWitnessMap, expectedWitnessMap } = await import('../shared/pedersen'); - - const solvedWitness: WitnessMap = await executeCircuit(bytecode, initialWitnessMap, () => { - throw Error('unexpected oracle'); - }); - - expect(solvedWitness).to.be.deep.eq(expectedWitnessMap); -}); - it('successfully executes a MultiScalarMul opcode', async () => { const { bytecode, initialWitnessMap, expectedWitnessMap } = await import('../shared/multi_scalar_mul'); diff --git a/acvm-repo/acvm_js/test/node/execute_circuit.test.ts b/acvm-repo/acvm_js/test/node/execute_circuit.test.ts index 1e3517e8814..120ad0fa738 100644 --- a/acvm-repo/acvm_js/test/node/execute_circuit.test.ts +++ b/acvm-repo/acvm_js/test/node/execute_circuit.test.ts @@ -76,17 +76,6 @@ it('successfully processes complex brillig foreign call opcodes', async () => { expect(solved_witness).to.be.deep.eq(expectedWitnessMap); }); -it('successfully executes a Pedersen opcode', async function () { - this.timeout(10000); - const { bytecode, initialWitnessMap, expectedWitnessMap } = await import('../shared/pedersen'); - - const solvedWitness: WitnessMap = await executeCircuit(bytecode, initialWitnessMap, () => { - throw Error('unexpected oracle'); - }); - - expect(solvedWitness).to.be.deep.eq(expectedWitnessMap); -}); - it('successfully executes a MultiScalarMul opcode', async () => { const { bytecode, initialWitnessMap, expectedWitnessMap } = await import('../shared/multi_scalar_mul'); @@ -117,25 +106,6 @@ it('successfully executes a MemoryOp opcode', async () => { expect(solvedWitness).to.be.deep.eq(expectedWitnessMap); }); -it('successfully executes 500 pedersen circuits', async function () { - this.timeout(100000); - - // Pedersen opcodes used to have a large upfront cost due to generator calculation - // so we'd need to pass around the blackbox solver in JS to avoid redoing this work. - // - // This test now shows that we don't need to do this anymore without a performance regression. - - const { bytecode, initialWitnessMap, expectedWitnessMap } = await import('../shared/pedersen'); - - for (let i = 0; i < 500; i++) { - const solvedWitness = await executeCircuit(bytecode, initialWitnessMap, () => { - throw Error('unexpected oracle'); - }); - - expect(solvedWitness).to.be.deep.eq(expectedWitnessMap); - } -}); - /** * Below are all the same tests as above but using `executeProgram` * TODO: also add a couple tests for executing multiple circuits diff --git a/acvm-repo/acvm_js/test/shared/pedersen.ts b/acvm-repo/acvm_js/test/shared/pedersen.ts deleted file mode 100644 index 6e3ec403d65..00000000000 --- a/acvm-repo/acvm_js/test/shared/pedersen.ts +++ /dev/null @@ -1,13 +0,0 @@ -// See `pedersen_circuit` integration test in `acir/tests/test_program_serialization.rs`. -export const bytecode = Uint8Array.from([ - 31, 139, 8, 0, 0, 0, 0, 0, 0, 255, 93, 74, 73, 10, 0, 0, 4, 180, 29, 252, 255, 193, 66, 40, 76, 77, 179, 34, 20, 36, - 136, 237, 83, 245, 101, 107, 79, 65, 94, 253, 214, 217, 255, 239, 192, 1, 43, 124, 181, 238, 113, 0, 0, 0, -]); - -export const initialWitnessMap = new Map([[1, '0x0000000000000000000000000000000000000000000000000000000000000001']]); - -export const expectedWitnessMap = new Map([ - [1, '0x0000000000000000000000000000000000000000000000000000000000000001'], - [2, '0x083e7911d835097629f0067531fc15cafd79a89beecb39903f69572c636f4a5a'], - [3, '0x1a7f5efaad7f315c25a918f30cc8d7333fccab7ad7c90f14de81bcc528f9935d'], -]); diff --git a/acvm-repo/blackbox_solver/src/curve_specific_solver.rs b/acvm-repo/blackbox_solver/src/curve_specific_solver.rs index 0ee3a252840..869017f52ee 100644 --- a/acvm-repo/blackbox_solver/src/curve_specific_solver.rs +++ b/acvm-repo/blackbox_solver/src/curve_specific_solver.rs @@ -14,16 +14,6 @@ pub trait BlackBoxFunctionSolver { signature: &[u8; 64], message: &[u8], ) -> Result; - fn pedersen_commitment( - &self, - inputs: &[F], - domain_separator: u32, - ) -> Result<(F, F), BlackBoxResolutionError>; - fn pedersen_hash( - &self, - inputs: &[F], - domain_separator: u32, - ) -> Result; fn multi_scalar_mul( &self, points: &[F], @@ -67,20 +57,6 @@ impl BlackBoxFunctionSolver for StubbedBlackBoxSolver { ) -> Result { Err(Self::fail(BlackBoxFunc::SchnorrVerify)) } - fn pedersen_commitment( - &self, - _inputs: &[F], - _domain_separator: u32, - ) -> Result<(F, F), BlackBoxResolutionError> { - Err(Self::fail(BlackBoxFunc::PedersenCommitment)) - } - fn pedersen_hash( - &self, - _inputs: &[F], - _domain_separator: u32, - ) -> Result { - Err(Self::fail(BlackBoxFunc::PedersenHash)) - } fn multi_scalar_mul( &self, _points: &[F], diff --git a/acvm-repo/bn254_blackbox_solver/benches/criterion.rs b/acvm-repo/bn254_blackbox_solver/benches/criterion.rs index cbcb75a3291..e7917fa1adc 100644 --- a/acvm-repo/bn254_blackbox_solver/benches/criterion.rs +++ b/acvm-repo/bn254_blackbox_solver/benches/criterion.rs @@ -13,22 +13,6 @@ fn bench_poseidon2(c: &mut Criterion) { c.bench_function("poseidon2", |b| b.iter(|| poseidon2_permutation(black_box(&inputs), 4))); } -fn bench_pedersen_commitment(c: &mut Criterion) { - let inputs = [FieldElement::one(); 2]; - - c.bench_function("pedersen_commitment", |b| { - b.iter(|| Bn254BlackBoxSolver.pedersen_commitment(black_box(&inputs), 0)) - }); -} - -fn bench_pedersen_hash(c: &mut Criterion) { - let inputs = [FieldElement::one(); 2]; - - c.bench_function("pedersen_hash", |b| { - b.iter(|| Bn254BlackBoxSolver.pedersen_hash(black_box(&inputs), 0)) - }); -} - fn bench_schnorr_verify(c: &mut Criterion) { let pub_key_x = FieldElement::from_hex( "0x04b260954662e97f00cab9adb773a259097f7a274b83b113532bce27fa3fb96a", @@ -62,7 +46,7 @@ fn bench_schnorr_verify(c: &mut Criterion) { criterion_group!( name = benches; config = Criterion::default().sample_size(40).measurement_time(Duration::from_secs(20)).with_profiler(PProfProfiler::new(100, Output::Flamegraph(None))); - targets = bench_poseidon2, bench_pedersen_commitment, bench_pedersen_hash, bench_schnorr_verify + targets = bench_poseidon2, bench_schnorr_verify ); criterion_main!(benches); diff --git a/acvm-repo/bn254_blackbox_solver/src/generator/generators.rs b/acvm-repo/bn254_blackbox_solver/src/generator/generators.rs index f89d582d167..bb51426b33b 100644 --- a/acvm-repo/bn254_blackbox_solver/src/generator/generators.rs +++ b/acvm-repo/bn254_blackbox_solver/src/generator/generators.rs @@ -38,7 +38,7 @@ fn default_generators() -> &'static [Affine; NUM_DEFAULT_GEN /// index-addressable generators. /// /// [hash_to_curve]: super::hash_to_curve::hash_to_curve -pub(crate) fn derive_generators( +pub fn derive_generators( domain_separator_bytes: &[u8], num_generators: u32, starting_index: u32, diff --git a/acvm-repo/bn254_blackbox_solver/src/lib.rs b/acvm-repo/bn254_blackbox_solver/src/lib.rs index 08e0fb66a6d..ec69c3797f6 100644 --- a/acvm-repo/bn254_blackbox_solver/src/lib.rs +++ b/acvm-repo/bn254_blackbox_solver/src/lib.rs @@ -10,8 +10,8 @@ mod pedersen; mod poseidon2; mod schnorr; -use ark_ec::AffineRepr; pub use embedded_curve_ops::{embedded_curve_add, multi_scalar_mul}; +pub use generator::generators::derive_generators; pub use poseidon2::poseidon2_permutation; // Temporary hack, this ensure that we always use a bn254 field here @@ -40,31 +40,6 @@ impl BlackBoxFunctionSolver for Bn254BlackBoxSolver { )) } - fn pedersen_commitment( - &self, - inputs: &[FieldElement], - domain_separator: u32, - ) -> Result<(FieldElement, FieldElement), BlackBoxResolutionError> { - let inputs: Vec = inputs.iter().map(|input| input.into_repr()).collect(); - let result = pedersen::commitment::commit_native_with_index(&inputs, domain_separator); - let res_x = - FieldElement::from_repr(*result.x().expect("should not commit to point at infinity")); - let res_y = - FieldElement::from_repr(*result.y().expect("should not commit to point at infinity")); - Ok((res_x, res_y)) - } - - fn pedersen_hash( - &self, - inputs: &[FieldElement], - domain_separator: u32, - ) -> Result { - let inputs: Vec = inputs.iter().map(|input| input.into_repr()).collect(); - let result = pedersen::hash::hash_with_index(&inputs, domain_separator); - let result = FieldElement::from_repr(result); - Ok(result) - } - fn multi_scalar_mul( &self, points: &[FieldElement], diff --git a/acvm-repo/brillig/src/black_box.rs b/acvm-repo/brillig/src/black_box.rs index 3887092a8c2..2b39e279aa8 100644 --- a/acvm-repo/brillig/src/black_box.rs +++ b/acvm-repo/brillig/src/black_box.rs @@ -61,13 +61,13 @@ pub enum BlackBoxOp { signature: HeapVector, result: MemoryAddress, }, - /// Calculates a Pedersen commitment to the inputs. + /// Deprecated. To be removed with a sync from aztec-packages PedersenCommitment { inputs: HeapVector, domain_separator: MemoryAddress, output: HeapArray, }, - /// Calculates a Pedersen hash to the inputs. + /// Deprecated. To be removed with a sync from aztec-packages PedersenHash { inputs: HeapVector, domain_separator: MemoryAddress, diff --git a/acvm-repo/brillig_vm/src/black_box.rs b/acvm-repo/brillig_vm/src/black_box.rs index 2053f4e7c86..544963b00db 100644 --- a/acvm-repo/brillig_vm/src/black_box.rs +++ b/acvm-repo/brillig_vm/src/black_box.rs @@ -232,41 +232,6 @@ pub(crate) fn evaluate_black_box ); Ok(()) } - BlackBoxOp::PedersenCommitment { inputs, domain_separator, output } => { - let inputs: Vec = read_heap_vector(memory, inputs) - .iter() - .map(|x| *x.extract_field().unwrap()) - .collect(); - let domain_separator: u32 = - memory.read(*domain_separator).try_into().map_err(|_| { - BlackBoxResolutionError::Failed( - BlackBoxFunc::PedersenCommitment, - "Invalid signature length".to_string(), - ) - })?; - let (x, y) = solver.pedersen_commitment(&inputs, domain_separator)?; - memory.write_slice( - memory.read_ref(output.pointer), - &[MemoryValue::new_field(x), MemoryValue::new_field(y)], - ); - Ok(()) - } - BlackBoxOp::PedersenHash { inputs, domain_separator, output } => { - let inputs: Vec = read_heap_vector(memory, inputs) - .iter() - .map(|x| *x.extract_field().unwrap()) - .collect(); - let domain_separator: u32 = - memory.read(*domain_separator).try_into().map_err(|_| { - BlackBoxResolutionError::Failed( - BlackBoxFunc::PedersenCommitment, - "Invalid signature length".to_string(), - ) - })?; - let hash = solver.pedersen_hash(&inputs, domain_separator)?; - memory.write(*output, MemoryValue::new_field(hash)); - Ok(()) - } BlackBoxOp::BigIntAdd { lhs, rhs, output } => { let lhs = memory.read(*lhs).try_into().unwrap(); let rhs = memory.read(*rhs).try_into().unwrap(); @@ -378,6 +343,8 @@ pub(crate) fn evaluate_black_box Ok(()) } + BlackBoxOp::PedersenCommitment { .. } => todo!("Deprecated Blackbox"), + BlackBoxOp::PedersenHash { .. } => todo!("Deprecated Blackbox"), } } @@ -392,8 +359,6 @@ fn black_box_function_from_op(op: &BlackBoxOp) -> BlackBoxFunc { BlackBoxOp::EcdsaSecp256k1 { .. } => BlackBoxFunc::EcdsaSecp256k1, BlackBoxOp::EcdsaSecp256r1 { .. } => BlackBoxFunc::EcdsaSecp256r1, BlackBoxOp::SchnorrVerify { .. } => BlackBoxFunc::SchnorrVerify, - BlackBoxOp::PedersenCommitment { .. } => BlackBoxFunc::PedersenCommitment, - BlackBoxOp::PedersenHash { .. } => BlackBoxFunc::PedersenHash, BlackBoxOp::MultiScalarMul { .. } => BlackBoxFunc::MultiScalarMul, BlackBoxOp::EmbeddedCurveAdd { .. } => BlackBoxFunc::EmbeddedCurveAdd, BlackBoxOp::BigIntAdd { .. } => BlackBoxFunc::BigIntAdd, @@ -405,6 +370,8 @@ fn black_box_function_from_op(op: &BlackBoxOp) -> BlackBoxFunc { BlackBoxOp::Poseidon2Permutation { .. } => BlackBoxFunc::Poseidon2Permutation, BlackBoxOp::Sha256Compression { .. } => BlackBoxFunc::Sha256Compression, BlackBoxOp::ToRadix { .. } => unreachable!("ToRadix is not an ACIR BlackBoxFunc"), + BlackBoxOp::PedersenCommitment { .. } => BlackBoxFunc::PedersenCommitment, + BlackBoxOp::PedersenHash { .. } => BlackBoxFunc::PedersenHash, } } diff --git a/acvm-repo/brillig_vm/src/lib.rs b/acvm-repo/brillig_vm/src/lib.rs index 862162ddccf..01f45bf653c 100644 --- a/acvm-repo/brillig_vm/src/lib.rs +++ b/acvm-repo/brillig_vm/src/lib.rs @@ -500,9 +500,14 @@ impl<'a, F: AcirField, B: BlackBoxFunctionSolver> VM<'a, F, B> { match output { ForeignCallParam::Array(values) => { if values.len() != *size { - return Err("Foreign call result array doesn't match expected size".to_string()); + // foreign call returning flattened values into a nested type, so the sizes do not match + let destination = self.memory.read_ref(*pointer_index); + let return_type = value_type; + let mut flatten_values_idx = 0; //index of values read from flatten_values + self.write_slice_of_values_to_memory(destination, &output.fields(), &mut flatten_values_idx, return_type)?; + } else { + self.write_values_to_memory_slice(*pointer_index, values, value_types)?; } - self.write_values_to_memory_slice(*pointer_index, values, value_types)?; } _ => { return Err("Function result size does not match brillig bytecode size".to_string()); diff --git a/compiler/noirc_evaluator/Cargo.toml b/compiler/noirc_evaluator/Cargo.toml index aa30eef9156..72a52b43741 100644 --- a/compiler/noirc_evaluator/Cargo.toml +++ b/compiler/noirc_evaluator/Cargo.toml @@ -12,6 +12,7 @@ license.workspace = true noirc_frontend.workspace = true noirc_errors.workspace = true acvm.workspace = true +bn254_blackbox_solver.workspace = true fxhash.workspace = true iter-extended.workspace = true thiserror.workspace = true diff --git a/compiler/noirc_evaluator/src/brillig/brillig_gen/brillig_black_box.rs b/compiler/noirc_evaluator/src/brillig/brillig_gen/brillig_black_box.rs index 367cdbe4973..c62365162ba 100644 --- a/compiler/noirc_evaluator/src/brillig/brillig_gen/brillig_black_box.rs +++ b/compiler/noirc_evaluator/src/brillig/brillig_gen/brillig_black_box.rs @@ -137,39 +137,6 @@ pub(crate) fn convert_black_box_call( ) } } - - BlackBoxFunc::PedersenCommitment => { - if let ( - [message, BrilligVariable::SingleAddr(domain_separator)], - [BrilligVariable::BrilligArray(result_array)], - ) = (function_arguments, function_results) - { - let message_vector = convert_array_or_vector(brillig_context, message, bb_func); - brillig_context.black_box_op_instruction(BlackBoxOp::PedersenCommitment { - inputs: message_vector.to_heap_vector(), - domain_separator: domain_separator.address, - output: result_array.to_heap_array(), - }); - } else { - unreachable!("ICE: Pedersen expects one array argument, a register for the domain separator, and one array result") - } - } - BlackBoxFunc::PedersenHash => { - if let ( - [message, BrilligVariable::SingleAddr(domain_separator)], - [BrilligVariable::SingleAddr(result)], - ) = (function_arguments, function_results) - { - let message_vector = convert_array_or_vector(brillig_context, message, bb_func); - brillig_context.black_box_op_instruction(BlackBoxOp::PedersenHash { - inputs: message_vector.to_heap_vector(), - domain_separator: domain_separator.address, - output: result.address, - }); - } else { - unreachable!("ICE: Pedersen hash expects one array argument, a register for the domain separator, and one register result") - } - } BlackBoxFunc::SchnorrVerify => { if let ( [BrilligVariable::SingleAddr(public_key_x), BrilligVariable::SingleAddr(public_key_y), BrilligVariable::BrilligArray(signature), message], @@ -424,6 +391,8 @@ pub(crate) fn convert_black_box_call( unreachable!("ICE: AES128Encrypt expects three array arguments, one array result") } } + BlackBoxFunc::PedersenCommitment => todo!("Deprecated Blackbox"), + BlackBoxFunc::PedersenHash => todo!("Deprecated Blackbox"), } } diff --git a/compiler/noirc_evaluator/src/brillig/brillig_ir.rs b/compiler/noirc_evaluator/src/brillig/brillig_ir.rs index 9785e073be9..a0bf89fff0d 100644 --- a/compiler/noirc_evaluator/src/brillig/brillig_ir.rs +++ b/compiler/noirc_evaluator/src/brillig/brillig_ir.rs @@ -158,20 +158,7 @@ pub(crate) mod tests { ) -> Result { Ok(true) } - fn pedersen_commitment( - &self, - _inputs: &[FieldElement], - _domain_separator: u32, - ) -> Result<(FieldElement, FieldElement), BlackBoxResolutionError> { - Ok((2_u128.into(), 3_u128.into())) - } - fn pedersen_hash( - &self, - _inputs: &[FieldElement], - _domain_separator: u32, - ) -> Result { - Ok(6_u128.into()) - } + fn multi_scalar_mul( &self, _points: &[FieldElement], diff --git a/compiler/noirc_evaluator/src/brillig/brillig_ir/debug_show.rs b/compiler/noirc_evaluator/src/brillig/brillig_ir/debug_show.rs index b258905d657..a595584b376 100644 --- a/compiler/noirc_evaluator/src/brillig/brillig_ir/debug_show.rs +++ b/compiler/noirc_evaluator/src/brillig/brillig_ir/debug_show.rs @@ -347,24 +347,6 @@ impl DebugShow { result ); } - BlackBoxOp::PedersenCommitment { inputs, domain_separator, output } => { - debug_println!( - self.enable_debug_trace, - " PEDERSEN {} {} -> {}", - inputs, - domain_separator, - output - ); - } - BlackBoxOp::PedersenHash { inputs, domain_separator, output } => { - debug_println!( - self.enable_debug_trace, - " PEDERSEN_HASH {} {} -> {}", - inputs, - domain_separator, - output - ); - } BlackBoxOp::SchnorrVerify { public_key_x, public_key_y, @@ -462,6 +444,8 @@ impl DebugShow { output ); } + BlackBoxOp::PedersenCommitment { .. } => todo!("Deprecated Blackbox"), + BlackBoxOp::PedersenHash { .. } => todo!("Deprecated Blackbox"), } } diff --git a/compiler/noirc_evaluator/src/ssa/acir_gen/acir_ir/acir_variable.rs b/compiler/noirc_evaluator/src/ssa/acir_gen/acir_ir/acir_variable.rs index 56b869fbf6b..e09f95508de 100644 --- a/compiler/noirc_evaluator/src/ssa/acir_gen/acir_ir/acir_variable.rs +++ b/compiler/noirc_evaluator/src/ssa/acir_gen/acir_ir/acir_variable.rs @@ -1215,31 +1215,6 @@ impl AcirContext { ) -> Result, RuntimeError> { // Separate out any arguments that should be constants let (constant_inputs, constant_outputs) = match name { - BlackBoxFunc::PedersenCommitment | BlackBoxFunc::PedersenHash => { - // The last argument of pedersen is the domain separator, which must be a constant - let domain_var = match inputs.pop() { - Some(domain_var) => domain_var.into_var()?, - None => { - return Err(RuntimeError::InternalError(InternalError::MissingArg { - name: "pedersen call".to_string(), - arg: "domain separator".to_string(), - call_stack: self.get_call_stack(), - })) - } - }; - - let domain_constant = match self.vars[&domain_var].as_constant() { - Some(domain_constant) => domain_constant, - None => { - return Err(RuntimeError::InternalError(InternalError::NotAConstant { - name: "domain separator".to_string(), - call_stack: self.get_call_stack(), - })) - } - }; - - (vec![*domain_constant], Vec::new()) - } BlackBoxFunc::Poseidon2Permutation => { // The last argument is the state length, which must be a constant let state_len = match inputs.pop() { diff --git a/compiler/noirc_evaluator/src/ssa/acir_gen/acir_ir/generated_acir.rs b/compiler/noirc_evaluator/src/ssa/acir_gen/acir_ir/generated_acir.rs index 9d271f7cd9c..6a1118de059 100644 --- a/compiler/noirc_evaluator/src/ssa/acir_gen/acir_ir/generated_acir.rs +++ b/compiler/noirc_evaluator/src/ssa/acir_gen/acir_ir/generated_acir.rs @@ -224,16 +224,6 @@ impl GeneratedAcir { output: outputs[0], } } - BlackBoxFunc::PedersenCommitment => BlackBoxFuncCall::PedersenCommitment { - inputs: inputs[0].clone(), - outputs: (outputs[0], outputs[1]), - domain_separator: constant_inputs[0].to_u128() as u32, - }, - BlackBoxFunc::PedersenHash => BlackBoxFuncCall::PedersenHash { - inputs: inputs[0].clone(), - output: outputs[0], - domain_separator: constant_inputs[0].to_u128() as u32, - }, BlackBoxFunc::EcdsaSecp256k1 => { BlackBoxFuncCall::EcdsaSecp256k1 { // 32 bytes for each public key co-ordinate @@ -371,6 +361,8 @@ impl GeneratedAcir { .expect("Compiler should generate correct size inputs"), outputs: outputs.try_into().expect("Compiler should generate correct size outputs"), }, + BlackBoxFunc::PedersenCommitment => todo!("Deprecated Blackbox"), + BlackBoxFunc::PedersenHash => todo!("Deprecated Blackbox"), }; self.push_opcode(AcirOpcode::BlackBoxFuncCall(black_box_func_call)); @@ -649,9 +641,7 @@ fn black_box_func_expected_input_size(name: BlackBoxFunc) -> Option { | BlackBoxFunc::Keccak256 | BlackBoxFunc::SHA256 | BlackBoxFunc::Blake2s - | BlackBoxFunc::Blake3 - | BlackBoxFunc::PedersenCommitment - | BlackBoxFunc::PedersenHash => None, + | BlackBoxFunc::Blake3 => None, BlackBoxFunc::Keccakf1600 => Some(25), // The permutation takes a fixed number of inputs, but the inputs length depends on the proving system implementation. @@ -687,6 +677,8 @@ fn black_box_func_expected_input_size(name: BlackBoxFunc) -> Option { // FromLeBytes takes a variable array of bytes as input BlackBoxFunc::BigIntFromLeBytes => None, + BlackBoxFunc::PedersenCommitment => todo!(), + BlackBoxFunc::PedersenHash => todo!(), } } @@ -709,11 +701,6 @@ fn black_box_expected_output_size(name: BlackBoxFunc) -> Option { BlackBoxFunc::Poseidon2Permutation => None, BlackBoxFunc::Sha256Compression => Some(8), - // Pedersen commitment returns a point - BlackBoxFunc::PedersenCommitment => Some(2), - - // Pedersen hash returns a field - BlackBoxFunc::PedersenHash => Some(1), // Can only apply a range constraint to one // witness at a time. @@ -743,6 +730,8 @@ fn black_box_expected_output_size(name: BlackBoxFunc) -> Option { // AES encryption returns a variable number of outputs BlackBoxFunc::AES128Encrypt => None, + BlackBoxFunc::PedersenCommitment => todo!(), + BlackBoxFunc::PedersenHash => todo!(), } } diff --git a/compiler/noirc_evaluator/src/ssa/ir/function_inserter.rs b/compiler/noirc_evaluator/src/ssa/ir/function_inserter.rs index 68ece87c7c7..a063a7ff268 100644 --- a/compiler/noirc_evaluator/src/ssa/ir/function_inserter.rs +++ b/compiler/noirc_evaluator/src/ssa/ir/function_inserter.rs @@ -16,11 +16,12 @@ pub(crate) struct FunctionInserter<'f> { pub(crate) function: &'f mut Function, values: HashMap, + const_arrays: HashMap, ValueId>, } impl<'f> FunctionInserter<'f> { pub(crate) fn new(function: &'f mut Function) -> FunctionInserter<'f> { - Self { function, values: HashMap::default() } + Self { function, values: HashMap::default(), const_arrays: HashMap::default() } } /// Resolves a ValueId to its new, updated value. @@ -34,10 +35,17 @@ impl<'f> FunctionInserter<'f> { super::value::Value::Array { array, typ } => { let array = array.clone(); let typ = typ.clone(); - let new_array = array.iter().map(|id| self.resolve(*id)).collect(); - let new_id = self.function.dfg.make_array(new_array, typ); - self.values.insert(value, new_id); - new_id + let new_array: im::Vector = + array.iter().map(|id| self.resolve(*id)).collect(); + if self.const_arrays.get(&new_array) == Some(&value) { + value + } else { + let new_array_clone = new_array.clone(); + let new_id = self.function.dfg.make_array(new_array, typ); + self.values.insert(value, new_id); + self.const_arrays.insert(new_array_clone, new_id); + new_id + } } _ => value, }, diff --git a/compiler/noirc_evaluator/src/ssa/ir/instruction.rs b/compiler/noirc_evaluator/src/ssa/ir/instruction.rs index e21deb9ef79..8f881b86e47 100644 --- a/compiler/noirc_evaluator/src/ssa/ir/instruction.rs +++ b/compiler/noirc_evaluator/src/ssa/ir/instruction.rs @@ -67,6 +67,7 @@ pub(crate) enum Intrinsic { AsField, AsWitness, IsUnconstrained, + DerivePedersenGenerators, } impl std::fmt::Display for Intrinsic { @@ -92,6 +93,7 @@ impl std::fmt::Display for Intrinsic { Intrinsic::AsField => write!(f, "as_field"), Intrinsic::AsWitness => write!(f, "as_witness"), Intrinsic::IsUnconstrained => write!(f, "is_unconstrained"), + Intrinsic::DerivePedersenGenerators => write!(f, "derive_pedersen_generators"), } } } @@ -120,7 +122,8 @@ impl Intrinsic { | Intrinsic::StrAsBytes | Intrinsic::FromField | Intrinsic::AsField - | Intrinsic::IsUnconstrained => false, + | Intrinsic::IsUnconstrained + | Intrinsic::DerivePedersenGenerators => false, // Some black box functions have side-effects Intrinsic::BlackBox(func) => matches!( @@ -155,6 +158,7 @@ impl Intrinsic { "as_field" => Some(Intrinsic::AsField), "as_witness" => Some(Intrinsic::AsWitness), "is_unconstrained" => Some(Intrinsic::IsUnconstrained), + "derive_pedersen_generators" => Some(Intrinsic::DerivePedersenGenerators), other => BlackBoxFunc::lookup(other).map(Intrinsic::BlackBox), } } diff --git a/compiler/noirc_evaluator/src/ssa/ir/instruction/call.rs b/compiler/noirc_evaluator/src/ssa/ir/instruction/call.rs index 74e5653c7ba..a9e3570ba0f 100644 --- a/compiler/noirc_evaluator/src/ssa/ir/instruction/call.rs +++ b/compiler/noirc_evaluator/src/ssa/ir/instruction/call.rs @@ -2,6 +2,7 @@ use fxhash::FxHashMap as HashMap; use std::{collections::VecDeque, rc::Rc}; use acvm::{acir::AcirField, acir::BlackBoxFunc, BlackBoxResolutionError, FieldElement}; +use bn254_blackbox_solver::derive_generators; use iter_extended::vecmap; use num_bigint::BigUint; @@ -295,6 +296,13 @@ pub(super) fn simplify_call( } Intrinsic::AsWitness => SimplifyResult::None, Intrinsic::IsUnconstrained => SimplifyResult::None, + Intrinsic::DerivePedersenGenerators => { + if let Some(Type::Array(_, len)) = ctrl_typevars.unwrap().first() { + simplify_derive_generators(dfg, arguments, *len as u32) + } else { + unreachable!("Derive Pedersen Generators must return an array"); + } + } } } @@ -468,8 +476,6 @@ fn simplify_black_box_func( BlackBoxFunc::MultiScalarMul | BlackBoxFunc::SchnorrVerify - | BlackBoxFunc::PedersenCommitment - | BlackBoxFunc::PedersenHash | BlackBoxFunc::EmbeddedCurveAdd => { // Currently unsolvable here as we rely on an implementation in the backend. SimplifyResult::None @@ -495,6 +501,8 @@ fn simplify_black_box_func( } BlackBoxFunc::Sha256Compression => SimplifyResult::None, //TODO(Guillaume) BlackBoxFunc::AES128Encrypt => SimplifyResult::None, + BlackBoxFunc::PedersenCommitment => todo!("Deprecated Blackbox"), + BlackBoxFunc::PedersenHash => todo!("Deprecated Blackbox"), } } @@ -626,3 +634,47 @@ fn simplify_signature( _ => SimplifyResult::None, } } + +fn simplify_derive_generators( + dfg: &mut DataFlowGraph, + arguments: &[ValueId], + num_generators: u32, +) -> SimplifyResult { + if arguments.len() == 2 { + let domain_separator_string = dfg.get_array_constant(arguments[0]); + let starting_index = dfg.get_numeric_constant(arguments[1]); + if let (Some(domain_separator_string), Some(starting_index)) = + (domain_separator_string, starting_index) + { + let domain_separator_bytes = domain_separator_string + .0 + .iter() + .map(|&x| dfg.get_numeric_constant(x).unwrap().to_u128() as u8) + .collect::>(); + let generators = derive_generators( + &domain_separator_bytes, + num_generators, + starting_index.try_to_u32().expect("argument is declared as u32"), + ); + let is_infinite = dfg.make_constant(FieldElement::zero(), Type::bool()); + let mut results = Vec::new(); + for gen in generators { + let x_big: BigUint = gen.x.into(); + let x = FieldElement::from_be_bytes_reduce(&x_big.to_bytes_be()); + let y_big: BigUint = gen.y.into(); + let y = FieldElement::from_be_bytes_reduce(&y_big.to_bytes_be()); + results.push(dfg.make_constant(x, Type::field())); + results.push(dfg.make_constant(y, Type::field())); + results.push(is_infinite); + } + let len = results.len(); + let result = + dfg.make_array(results.into(), Type::Array(vec![Type::field()].into(), len)); + SimplifyResult::SimplifiedTo(result) + } else { + SimplifyResult::None + } + } else { + unreachable!("Unexpected number of arguments to derive_generators"); + } +} diff --git a/compiler/noirc_evaluator/src/ssa/opt/flatten_cfg.rs b/compiler/noirc_evaluator/src/ssa/opt/flatten_cfg.rs index 58f70ba9192..c7ce3aaa155 100644 --- a/compiler/noirc_evaluator/src/ssa/opt/flatten_cfg.rs +++ b/compiler/noirc_evaluator/src/ssa/opt/flatten_cfg.rs @@ -1379,28 +1379,28 @@ mod test { fn should_not_merge_incorrectly_to_false() { // Regression test for #1792 // Tests that it does not simplify a true constraint an always-false constraint - // fn main f1 { - // b0(): - // v4 = call pedersen([Field 0], u32 0) - // v5 = array_get v4, index Field 0 - // v6 = cast v5 as u32 - // v8 = mod v6, u32 2 - // v9 = cast v8 as u1 - // v10 = allocate - // store Field 0 at v10 - // jmpif v9 then: b1, else: b2 - // b1(): - // v14 = add v5, Field 1 - // store v14 at v10 - // jmp b3() - // b3(): - // v12 = eq v9, u1 1 - // constrain v12 - // return - // b2(): - // store Field 0 at v10 - // jmp b3() - // } + // acir(inline) fn main f1 { + // b0(v0: [u8; 2]): + // v4 = call keccak256(v0, u8 2) + // v5 = array_get v4, index u8 0 + // v6 = cast v5 as u32 + // v8 = truncate v6 to 1 bits, max_bit_size: 32 + // v9 = cast v8 as u1 + // v10 = allocate + // store u8 0 at v10 + // jmpif v9 then: b2, else: b3 + // b2(): + // v12 = cast v5 as Field + // v13 = add v12, Field 1 + // store v13 at v10 + // jmp b4() + // b4(): + // constrain v9 == u1 1 + // return + // b3(): + // store u8 0 at v10 + // jmp b4() + // } let main_id = Id::test_new(1); let mut builder = FunctionBuilder::new("main".into(), main_id); @@ -1409,20 +1409,18 @@ mod test { let b2 = builder.insert_block(); let b3 = builder.insert_block(); - let element_type = Rc::new(vec![Type::field()]); - let array_type = Type::Array(element_type.clone(), 1); - - let zero = builder.field_constant(0_u128); - let zero_array = builder.array_constant(im::Vector::unit(zero), array_type); - let i_zero = builder.numeric_constant(0_u128, Type::unsigned(32)); - let pedersen = builder - .import_intrinsic_id(Intrinsic::BlackBox(acvm::acir::BlackBoxFunc::PedersenCommitment)); - let v4 = builder.insert_call( - pedersen, - vec![zero_array, i_zero], - vec![Type::Array(element_type, 2)], - )[0]; - let v5 = builder.insert_array_get(v4, zero, Type::field()); + let element_type = Rc::new(vec![Type::unsigned(8)]); + let array_type = Type::Array(element_type.clone(), 2); + let array = builder.add_parameter(array_type); + + let zero = builder.numeric_constant(0_u128, Type::unsigned(8)); + let two = builder.numeric_constant(2_u128, Type::unsigned(8)); + + let keccak = + builder.import_intrinsic_id(Intrinsic::BlackBox(acvm::acir::BlackBoxFunc::Keccak256)); + let v4 = + builder.insert_call(keccak, vec![array, two], vec![Type::Array(element_type, 32)])[0]; + let v5 = builder.insert_array_get(v4, zero, Type::unsigned(8)); let v6 = builder.insert_cast(v5, Type::unsigned(32)); let i_two = builder.numeric_constant(2_u128, Type::unsigned(32)); let v8 = builder.insert_binary(v6, BinaryOp::Mod, i_two); @@ -1435,7 +1433,9 @@ mod test { builder.switch_to_block(b1); let one = builder.field_constant(1_u128); - let v14 = builder.insert_binary(v5, BinaryOp::Add, one); + let v5b = builder.insert_cast(v5, Type::field()); + let v13: Id = builder.insert_binary(v5b, BinaryOp::Add, one); + let v14 = builder.insert_cast(v13, Type::unsigned(8)); builder.insert_store(v10, v14); builder.terminate_with_jmp(b3, vec![]); @@ -1449,8 +1449,9 @@ mod test { builder.insert_constrain(v12, v_true, None); builder.terminate_with_return(vec![]); - let ssa = builder.finish().flatten_cfg(); - let main = ssa.main(); + let ssa = builder.finish(); + let flattened_ssa = ssa.flatten_cfg(); + let main = flattened_ssa.main(); // Now assert that there is not an always-false constraint after flattening: let mut constrain_count = 0; diff --git a/compiler/noirc_evaluator/src/ssa/opt/remove_enable_side_effects.rs b/compiler/noirc_evaluator/src/ssa/opt/remove_enable_side_effects.rs index 6db76996747..c9a6b7bf9c3 100644 --- a/compiler/noirc_evaluator/src/ssa/opt/remove_enable_side_effects.rs +++ b/compiler/noirc_evaluator/src/ssa/opt/remove_enable_side_effects.rs @@ -159,7 +159,8 @@ impl Context { | Intrinsic::AsField | Intrinsic::AsSlice | Intrinsic::AsWitness - | Intrinsic::IsUnconstrained => false, + | Intrinsic::IsUnconstrained + | Intrinsic::DerivePedersenGenerators => false, }, // We must assume that functions contain a side effect as we cannot inspect more deeply. diff --git a/compiler/noirc_evaluator/src/ssa/opt/remove_if_else.rs b/compiler/noirc_evaluator/src/ssa/opt/remove_if_else.rs index 6ca7eb74e9d..fd7a1a06fc8 100644 --- a/compiler/noirc_evaluator/src/ssa/opt/remove_if_else.rs +++ b/compiler/noirc_evaluator/src/ssa/opt/remove_if_else.rs @@ -233,6 +233,7 @@ fn slice_capacity_change( | Intrinsic::FromField | Intrinsic::AsField | Intrinsic::AsWitness - | Intrinsic::IsUnconstrained => SizeChange::None, + | Intrinsic::IsUnconstrained + | Intrinsic::DerivePedersenGenerators => SizeChange::None, } } diff --git a/compiler/noirc_frontend/src/elaborator/lints.rs b/compiler/noirc_frontend/src/elaborator/lints.rs index b86912940eb..af6f4cdb42f 100644 --- a/compiler/noirc_frontend/src/elaborator/lints.rs +++ b/compiler/noirc_frontend/src/elaborator/lints.rs @@ -137,7 +137,7 @@ pub(super) fn unconstrained_function_args( function_args .iter() .filter_map(|(typ, _, span)| { - if type_contains_mutable_reference(typ) { + if !typ.is_valid_for_unconstrained_boundary() { Some(TypeCheckError::ConstrainedReferenceToUnconstrained { span: *span }) } else { None @@ -153,17 +153,13 @@ pub(super) fn unconstrained_function_return( ) -> Option { if return_type.contains_slice() { Some(TypeCheckError::UnconstrainedSliceReturnToConstrained { span }) - } else if type_contains_mutable_reference(return_type) { + } else if !return_type.is_valid_for_unconstrained_boundary() { Some(TypeCheckError::UnconstrainedReferenceToConstrained { span }) } else { None } } -fn type_contains_mutable_reference(typ: &Type) -> bool { - matches!(&typ.follow_bindings(), Type::MutableReference(_)) -} - /// Only entrypoint functions require a `pub` visibility modifier applied to their return types. /// /// Application of `pub` to other functions is not meaningful and is a mistake. diff --git a/compiler/noirc_frontend/src/elaborator/mod.rs b/compiler/noirc_frontend/src/elaborator/mod.rs index 91d6ba71927..dc99ceae2f0 100644 --- a/compiler/noirc_frontend/src/elaborator/mod.rs +++ b/compiler/noirc_frontend/src/elaborator/mod.rs @@ -1302,9 +1302,12 @@ impl<'context> Elaborator<'context> { for (local_module, id, func) in &mut function_set.functions { self.local_module = *local_module; + let was_in_contract = self.in_contract; + self.in_contract = self.module_id().module(self.def_maps).is_contract; self.recover_generics(|this| { this.define_function_meta(func, *id, false); }); + self.in_contract = was_in_contract; } } diff --git a/compiler/noirc_frontend/src/elaborator/patterns.rs b/compiler/noirc_frontend/src/elaborator/patterns.rs index e337726b579..4d07009e064 100644 --- a/compiler/noirc_frontend/src/elaborator/patterns.rs +++ b/compiler/noirc_frontend/src/elaborator/patterns.rs @@ -11,11 +11,10 @@ use crate::{ }, hir_def::{ expr::{HirIdent, ImplKind}, - function::FunctionBody, stmt::HirPattern, }, macros_api::{HirExpression, Ident, Path, Pattern}, - node_interner::{DefinitionId, DefinitionKind, DependencyId, ExprId, GlobalId, TraitImplKind}, + node_interner::{DefinitionId, DefinitionKind, ExprId, GlobalId, TraitImplKind}, Shared, StructType, Type, TypeBindings, }; @@ -415,16 +414,6 @@ impl<'context> Elaborator<'context> { match self.interner.definition(hir_ident.id).kind { DefinitionKind::Function(id) => { if let Some(current_item) = self.current_item { - // Lazily evaluate functions found within globals if necessary. - // Otherwise if we later attempt to evaluate the global it will - // see an empty function body. - if matches!(current_item, DependencyId::Global(_)) { - let meta = self.interner.function_meta(&id); - - if matches!(&meta.function_body, FunctionBody::Unresolved(..)) { - self.elaborate_function(id); - } - } self.interner.add_function_dependency(current_item, id); } } diff --git a/compiler/noirc_frontend/src/hir/type_check/expr.rs b/compiler/noirc_frontend/src/hir/type_check/expr.rs index 46e8db8f5ff..a1302fd15ff 100644 --- a/compiler/noirc_frontend/src/hir/type_check/expr.rs +++ b/compiler/noirc_frontend/src/hir/type_check/expr.rs @@ -340,7 +340,7 @@ impl<'interner> TypeChecker<'interner> { // Check that we are not passing a mutable reference from a constrained runtime to an unconstrained runtime if is_current_func_constrained && is_unconstrained_call { for (typ, _, _) in args.iter() { - if matches!(&typ.follow_bindings(), Type::MutableReference(_)) { + if !typ.is_valid_for_unconstrained_boundary() { self.errors.push(TypeCheckError::ConstrainedReferenceToUnconstrained { span }); } } diff --git a/compiler/noirc_frontend/src/hir_def/types.rs b/compiler/noirc_frontend/src/hir_def/types.rs index ce36a22cf88..772558ec31a 100644 --- a/compiler/noirc_frontend/src/hir_def/types.rs +++ b/compiler/noirc_frontend/src/hir_def/types.rs @@ -788,6 +788,48 @@ impl Type { } } + /// Returns true if a value of this type can safely pass between constrained and + /// unconstrained functions (and vice-versa). + pub(crate) fn is_valid_for_unconstrained_boundary(&self) -> bool { + match self { + Type::FieldElement + | Type::Integer(_, _) + | Type::Bool + | Type::Unit + | Type::Constant(_) + | Type::Slice(_) + | Type::TypeVariable(_, _) + | Type::NamedGeneric(_, _) + | Type::Function(_, _, _) + | Type::FmtString(_, _) + | Type::Error => true, + + Type::MutableReference(_) + | Type::Forall(_, _) + | Type::Quoted(_) + | Type::TraitAsType(..) => false, + + Type::Alias(alias, generics) => { + let alias = alias.borrow(); + alias.get_type(generics).is_valid_for_unconstrained_boundary() + } + + Type::Array(length, element) => { + length.is_valid_for_unconstrained_boundary() + && element.is_valid_for_unconstrained_boundary() + } + Type::String(length) => length.is_valid_for_unconstrained_boundary(), + Type::Tuple(elements) => { + elements.iter().all(|elem| elem.is_valid_for_unconstrained_boundary()) + } + Type::Struct(definition, generics) => definition + .borrow() + .get_fields(generics) + .into_iter() + .all(|(_, field)| field.is_valid_for_unconstrained_boundary()), + } + } + /// Returns the number of `Forall`-quantified type variables on this type. /// Returns 0 if this is not a Type::Forall pub fn generic_count(&self) -> usize { diff --git a/compiler/noirc_frontend/src/monomorphization/errors.rs b/compiler/noirc_frontend/src/monomorphization/errors.rs index 2db570540d6..df61c138c02 100644 --- a/compiler/noirc_frontend/src/monomorphization/errors.rs +++ b/compiler/noirc_frontend/src/monomorphization/errors.rs @@ -6,6 +6,7 @@ use crate::hir::comptime::InterpreterError; pub enum MonomorphizationError { UnknownArrayLength { location: Location }, TypeAnnotationsNeeded { location: Location }, + InternalError { message: &'static str, location: Location }, InterpreterError(InterpreterError), } @@ -13,6 +14,7 @@ impl MonomorphizationError { fn location(&self) -> Location { match self { MonomorphizationError::UnknownArrayLength { location } + | MonomorphizationError::InternalError { location, .. } | MonomorphizationError::TypeAnnotationsNeeded { location } => *location, MonomorphizationError::InterpreterError(error) => error.get_location(), } @@ -36,6 +38,7 @@ impl MonomorphizationError { } MonomorphizationError::TypeAnnotationsNeeded { .. } => "Type annotations needed", MonomorphizationError::InterpreterError(error) => return (&error).into(), + MonomorphizationError::InternalError { message, .. } => message, }; let location = self.location(); diff --git a/compiler/noirc_frontend/src/monomorphization/mod.rs b/compiler/noirc_frontend/src/monomorphization/mod.rs index 821ae7e7c4c..a95fc0e7ace 100644 --- a/compiler/noirc_frontend/src/monomorphization/mod.rs +++ b/compiler/noirc_frontend/src/monomorphization/mod.rs @@ -889,7 +889,11 @@ impl<'interner> Monomorphizer<'interner> { DefinitionKind::Local(_) => match self.lookup_captured_expr(ident.id) { Some(expr) => expr, None => { - let ident = self.local_ident(&ident)?.unwrap(); + let Some(ident) = self.local_ident(&ident)? else { + let location = self.interner.id_location(expr_id); + let message = "ICE: Variable not found during monomorphization"; + return Err(MonomorphizationError::InternalError { location, message }); + }; ast::Expression::Ident(ident) } }, @@ -1271,19 +1275,19 @@ impl<'interner> Monomorphizer<'interner> { } "modulus_le_bits" => { let bits = FieldElement::modulus().to_radix_le(2); - Some(self.modulus_array_literal(bits, IntegerBitSize::One, location)) + Some(self.modulus_slice_literal(bits, IntegerBitSize::One, location)) } "modulus_be_bits" => { let bits = FieldElement::modulus().to_radix_be(2); - Some(self.modulus_array_literal(bits, IntegerBitSize::One, location)) + Some(self.modulus_slice_literal(bits, IntegerBitSize::One, location)) } "modulus_be_bytes" => { let bytes = FieldElement::modulus().to_bytes_be(); - Some(self.modulus_array_literal(bytes, IntegerBitSize::Eight, location)) + Some(self.modulus_slice_literal(bytes, IntegerBitSize::Eight, location)) } "modulus_le_bytes" => { let bytes = FieldElement::modulus().to_bytes_le(); - Some(self.modulus_array_literal(bytes, IntegerBitSize::Eight, location)) + Some(self.modulus_slice_literal(bytes, IntegerBitSize::Eight, location)) } _ => None, }; @@ -1292,7 +1296,7 @@ impl<'interner> Monomorphizer<'interner> { None } - fn modulus_array_literal( + fn modulus_slice_literal( &self, bytes: Vec, arr_elem_bits: IntegerBitSize, @@ -1306,10 +1310,9 @@ impl<'interner> Monomorphizer<'interner> { Expression::Literal(Literal::Integer((byte as u128).into(), int_type.clone(), location)) }); - let typ = Type::Array(bytes_as_expr.len() as u32, Box::new(int_type)); - + let typ = Type::Slice(Box::new(int_type)); let arr_literal = ArrayLiteral { typ, contents: bytes_as_expr }; - Expression::Literal(Literal::Array(arr_literal)) + Expression::Literal(Literal::Slice(arr_literal)) } fn queue_function( diff --git a/noir_stdlib/src/embedded_curve_ops.nr b/noir_stdlib/src/embedded_curve_ops.nr index cd8c421e136..8e768b97479 100644 --- a/noir_stdlib/src/embedded_curve_ops.nr +++ b/noir_stdlib/src/embedded_curve_ops.nr @@ -52,6 +52,14 @@ struct EmbeddedCurveScalar { hi: Field, } +impl EmbeddedCurveScalar { + #[field(bn254)] + fn from_field(scalar: Field) -> EmbeddedCurveScalar { + let (a,b) = crate::field::bn254::decompose(scalar); + EmbeddedCurveScalar { lo: a, hi: b } + } +} + // Computes a multi scalar multiplication over the embedded curve. // For bn254, We have Grumpkin and Baby JubJub. // For bls12-381, we have JubJub and Bandersnatch. diff --git a/noir_stdlib/src/hash.nr b/noir_stdlib/src/hash.nr index 6c295d127ab..b72c1ecba8f 100644 --- a/noir_stdlib/src/hash.nr +++ b/noir_stdlib/src/hash.nr @@ -5,7 +5,7 @@ mod poseidon2; use crate::default::Default; use crate::uint128::U128; use crate::sha256::{digest, sha256_var}; -use crate::embedded_curve_ops::EmbeddedCurvePoint; +use crate::embedded_curve_ops::{EmbeddedCurvePoint, EmbeddedCurveScalar, multi_scalar_mul}; #[foreign(sha256)] // docs:start:sha256 @@ -28,15 +28,22 @@ pub fn blake3(input: [u8; N]) -> [u8; 32] // docs:start:pedersen_commitment pub fn pedersen_commitment(input: [Field; N]) -> EmbeddedCurvePoint { // docs:end:pedersen_commitment - pedersen_commitment_with_separator(input, 0) + let value = pedersen_commitment_with_separator(input, 0); + if (value.x == 0) & (value.y == 0) { + EmbeddedCurvePoint { x: 0, y: 0, is_infinite: true } + } else { + EmbeddedCurvePoint { x: value.x, y: value.y, is_infinite: false } + } } -#[foreign(pedersen_commitment)] -pub fn __pedersen_commitment_with_separator(input: [Field; N], separator: u32) -> [Field; 2] {} - pub fn pedersen_commitment_with_separator(input: [Field; N], separator: u32) -> EmbeddedCurvePoint { - let values = __pedersen_commitment_with_separator(input, separator); - EmbeddedCurvePoint { x: values[0], y: values[1], is_infinite: false } + let mut points = [EmbeddedCurveScalar { lo: 0, hi: 0 }; N]; + for i in 0..N { + points[i] = EmbeddedCurveScalar::from_field(input[i]); + } + let generators = derive_generators("DEFAULT_DOMAIN_SEPARATOR".as_bytes(), separator); + let values = multi_scalar_mul(generators, points); + EmbeddedCurvePoint { x: values[0], y: values[1], is_infinite: values[2] as bool } } // docs:start:pedersen_hash @@ -46,8 +53,25 @@ pub fn pedersen_hash(input: [Field; N]) -> Field pedersen_hash_with_separator(input, 0) } -#[foreign(pedersen_hash)] -pub fn pedersen_hash_with_separator(input: [Field; N], separator: u32) -> Field {} +#[field(bn254)] +fn derive_generators(domain_separator_bytes: [u8; M], starting_index: u32) -> [EmbeddedCurvePoint; N] { + crate::assert_constant(domain_separator_bytes); + crate::assert_constant(starting_index); + __derive_generators(domain_separator_bytes, starting_index) +} + +#[builtin(derive_pedersen_generators)] +#[field(bn254)] +fn __derive_generators(domain_separator_bytes: [u8; M], starting_index: u32) -> [EmbeddedCurvePoint; N] {} + +pub fn pedersen_hash_with_separator(input: [Field; N], separator: u32) -> Field { + let v1 = pedersen_commitment(input); + let length_generator :[EmbeddedCurvePoint;1] = derive_generators("pedersen_hash_length".as_bytes(), separator); + multi_scalar_mul( + [length_generator[0], v1], + [EmbeddedCurveScalar { lo: N as Field, hi: 0 }, EmbeddedCurveScalar { lo: 1, hi: 0 }] + )[0] +} pub fn hash_to_field(inputs: [Field]) -> Field { let mut sum = 0; diff --git a/test_programs/compile_failure/regression_5008/Nargo.toml b/test_programs/compile_failure/regression_5008/Nargo.toml new file mode 100644 index 00000000000..920c00660cf --- /dev/null +++ b/test_programs/compile_failure/regression_5008/Nargo.toml @@ -0,0 +1,7 @@ +[package] +name = "regression_5008" +type = "bin" +authors = [""] +compiler_version = ">=0.28.0" + +[dependencies] diff --git a/test_programs/compile_failure/regression_5008/src/main.nr b/test_programs/compile_failure/regression_5008/src/main.nr new file mode 100644 index 00000000000..6d9645ee6eb --- /dev/null +++ b/test_programs/compile_failure/regression_5008/src/main.nr @@ -0,0 +1,17 @@ +struct Bar { + value: Field, +} + +struct Foo{ + bar: &mut Bar, +} + +impl Foo { + unconstrained fn crash_fn(self) {} +} + +fn main() { + let foo = Foo { bar: &mut Bar { value: 0 } }; + + foo.crash_fn(); +} diff --git a/test_programs/compile_success_contract/recursive_method/Nargo.toml b/test_programs/compile_success_contract/recursive_method/Nargo.toml new file mode 100644 index 00000000000..8142e5b3278 --- /dev/null +++ b/test_programs/compile_success_contract/recursive_method/Nargo.toml @@ -0,0 +1,6 @@ +[package] +name = "recursive_method" +type = "contract" +authors = [""] + +[dependencies] diff --git a/test_programs/compile_success_contract/recursive_method/src/main.nr b/test_programs/compile_success_contract/recursive_method/src/main.nr new file mode 100644 index 00000000000..6fd4bf3338d --- /dev/null +++ b/test_programs/compile_success_contract/recursive_method/src/main.nr @@ -0,0 +1,6 @@ +contract Foo { + #[recursive] + fn contract_entrypoint() -> pub Field { + 1 + } +} diff --git a/test_programs/compile_success_empty/intrinsic_die/src/main.nr b/test_programs/compile_success_empty/intrinsic_die/src/main.nr index c6e269c155d..17aaf02c283 100644 --- a/test_programs/compile_success_empty/intrinsic_die/src/main.nr +++ b/test_programs/compile_success_empty/intrinsic_die/src/main.nr @@ -1,6 +1,5 @@ // This test checks that we perform dead-instruction-elimination on intrinsic functions. fn main(x: Field) { - let hash = std::hash::pedersen_commitment([x]); let g1_x = 0x0000000000000000000000000000000000000000000000000000000000000001; let g1_y = 0x0000000000000002cf135e7506a45d632d270d45f1181294833fc48d823f272c; let g1 = std::embedded_curve_ops::EmbeddedCurvePoint { x: g1_x, y: g1_y, is_infinite: false }; diff --git a/test_programs/execution_success/modulus/src/main.nr b/test_programs/execution_success/modulus/src/main.nr index c7d6a2e2c7d..1627cc0dba2 100644 --- a/test_programs/execution_success/modulus/src/main.nr +++ b/test_programs/execution_success/modulus/src/main.nr @@ -3,6 +3,11 @@ fn main(bn254_modulus_be_bytes: [u8; 32], bn254_modulus_be_bits: [u1; 254]) { // NOTE: The constraints used in this circuit will only work when testing nargo with the plonk bn254 backend assert(modulus_size == 254); + assert_reverse( + std::field::modulus_be_bytes(), + std::field::modulus_le_bytes() + ); + let modulus_be_byte_array = std::field::modulus_be_bytes(); for i in 0..32 { assert(modulus_be_byte_array[i] == bn254_modulus_be_bytes[i]); @@ -21,3 +26,9 @@ fn main(bn254_modulus_be_bytes: [u8; 32], bn254_modulus_be_bits: [u1; 254]) { assert(modulus_le_bits[i] == bn254_modulus_be_bits[253 - i]); } } + +fn assert_reverse(forwards: [u8], backwards: [u8]) { + for i in 0..32 { + assert_eq(forwards[i], backwards[31 - i]); + } +} diff --git a/test_programs/noir_test_success/fuzzer_checks/Nargo.toml b/test_programs/noir_test_success/fuzzer_checks/Nargo.toml new file mode 100644 index 00000000000..cd09d0d344d --- /dev/null +++ b/test_programs/noir_test_success/fuzzer_checks/Nargo.toml @@ -0,0 +1,5 @@ +[package] +name = "fuzzer_checks" +type = "bin" +authors = [""] +[dependencies] diff --git a/test_programs/noir_test_success/fuzzer_checks/src/main.nr b/test_programs/noir_test_success/fuzzer_checks/src/main.nr new file mode 100644 index 00000000000..2b928db092e --- /dev/null +++ b/test_programs/noir_test_success/fuzzer_checks/src/main.nr @@ -0,0 +1,6 @@ + +#[test(should_fail_with = "42 is not allowed")] +fn finds_magic_value(x: u32) { + let x = x as u64; + assert(2 * x != 42, "42 is not allowed"); +} diff --git a/test_programs/noir_test_success/regression_4561/src/main.nr b/test_programs/noir_test_success/regression_4561/src/main.nr index 70c447b49af..ad40941ff51 100644 --- a/test_programs/noir_test_success/regression_4561/src/main.nr +++ b/test_programs/noir_test_success/regression_4561/src/main.nr @@ -42,3 +42,37 @@ fn two_nested_return() { OracleMock::mock("two_nested_return").returns((0, [1, 2, 3, 4, 5, 6], 7, [1, 2, 3, 4, 5, 6])); assert_eq(two_nested_return_unconstrained(), (0, [[1, 2, 3], [4, 5, 6]], 7, [[1, 2, 3], [4, 5, 6]])); } + +#[oracle(foo_return)] +unconstrained fn foo_return() -> (Field, TReturn, TestTypeFoo) {} +unconstrained fn foo_return_unconstrained() -> (Field, TReturn, TestTypeFoo) { + foo_return() +} + +struct TestTypeFoo { + a: Field, + b: [[[Field; 3]; 4]; 2], + c: [TReturnElem; 2], + d: TReturnElem, +} + +#[test] +fn complexe_struct_return() { + OracleMock::mock("foo_return").returns( + ( + 0, [1, 2, 3, 4, 5, 6], 7, [1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24], [1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12], [1, 2, 3, 4, 5, 6] + ) + ); + let foo_x = foo_return_unconstrained(); + assert_eq((foo_x.0, foo_x.1), (0, [[1, 2, 3], [4, 5, 6]])); + assert_eq(foo_x.2.a, 7); + assert_eq( + foo_x.2.b, [ + [[1, 2, 3], [4, 5, 6], [7, 8, 9], [10, 11, 12]], [[13, 14, 15], [16, 17, 18], [19, 20, 21], [22, 23, 24]] + ] + ); + let a: TReturnElem = [1, 2, 3]; + let b: TReturnElem = [4, 5, 6]; + assert_eq(foo_x.2.c, [a, b]); + assert_eq(foo_x.2.d, a); +} diff --git a/tooling/fuzzer/src/dictionary/mod.rs b/tooling/fuzzer/src/dictionary/mod.rs new file mode 100644 index 00000000000..bf2ab87be29 --- /dev/null +++ b/tooling/fuzzer/src/dictionary/mod.rs @@ -0,0 +1,124 @@ +//! This module defines how to build a dictionary of values which are likely to be correspond +//! to significant inputs during fuzzing by inspecting the [Program] being fuzzed. +//! +//! This dictionary can be fed into the fuzzer's [strategy][proptest::strategy::Strategy] in order to bias it towards +//! generating these values to ensure they get proper coverage. +use std::collections::HashSet; + +use acvm::{ + acir::{ + circuit::{ + brillig::{BrilligBytecode, BrilligInputs}, + directives::Directive, + opcodes::{BlackBoxFuncCall, FunctionInput}, + Circuit, Opcode, Program, + }, + native_types::Expression, + }, + brillig_vm::brillig::Opcode as BrilligOpcode, + AcirField, +}; + +/// Constructs a [HashSet] of values pulled from a [Program] which are likely to be correspond +/// to significant inputs during fuzzing. +pub(super) fn build_dictionary_from_program(program: &Program) -> HashSet { + let constrained_dictionaries = program.functions.iter().map(build_dictionary_from_circuit); + let unconstrained_dictionaries = + program.unconstrained_functions.iter().map(build_dictionary_from_unconstrained_function); + let dictionaries = constrained_dictionaries.chain(unconstrained_dictionaries); + + let mut constants: HashSet = HashSet::new(); + for dictionary in dictionaries { + constants.extend(dictionary); + } + constants +} + +fn build_dictionary_from_circuit(circuit: &Circuit) -> HashSet { + let mut constants: HashSet = HashSet::new(); + + fn insert_expr(dictionary: &mut HashSet, expr: &Expression) { + let quad_coefficients = expr.mul_terms.iter().map(|(k, _, _)| *k); + let linear_coefficients = expr.linear_combinations.iter().map(|(k, _)| *k); + let coefficients = linear_coefficients.chain(quad_coefficients); + + dictionary.extend(coefficients.clone()); + dictionary.insert(expr.q_c); + + // We divide the constant term by any coefficients in the expression to aid solving constraints such as `2 * x - 4 == 0`. + let scaled_constants = coefficients.map(|coefficient| expr.q_c / coefficient); + dictionary.extend(scaled_constants); + } + + fn insert_array_len(dictionary: &mut HashSet, array: &[T]) { + let array_length = array.len() as u128; + dictionary.insert(F::from(array_length)); + dictionary.insert(F::from(array_length - 1)); + } + + for opcode in &circuit.opcodes { + match opcode { + Opcode::AssertZero(expr) + | Opcode::Call { predicate: Some(expr), .. } + | Opcode::MemoryOp { predicate: Some(expr), .. } + | Opcode::Directive(Directive::ToLeRadix { a: expr, .. }) => { + insert_expr(&mut constants, expr) + } + + Opcode::MemoryInit { init, .. } => insert_array_len(&mut constants, init), + + Opcode::BrilligCall { inputs, predicate, .. } => { + for input in inputs { + match input { + BrilligInputs::Single(expr) => insert_expr(&mut constants, expr), + BrilligInputs::Array(exprs) => { + exprs.iter().for_each(|expr| insert_expr(&mut constants, expr)); + insert_array_len(&mut constants, exprs); + } + BrilligInputs::MemoryArray(_) => (), + } + } + if let Some(predicate) = predicate { + insert_expr(&mut constants, predicate) + } + } + + Opcode::BlackBoxFuncCall(BlackBoxFuncCall::RANGE { + input: FunctionInput { num_bits, .. }, + }) => { + let field = 1u128.wrapping_shl(*num_bits); + constants.insert(F::from(field)); + constants.insert(F::from(field - 1)); + } + _ => (), + } + } + + constants +} + +fn build_dictionary_from_unconstrained_function( + function: &BrilligBytecode, +) -> HashSet { + let mut constants: HashSet = HashSet::new(); + + for opcode in &function.bytecode { + match opcode { + BrilligOpcode::Cast { bit_size, .. } => { + let field = 1u128.wrapping_shl(*bit_size); + constants.insert(F::from(field)); + constants.insert(F::from(field - 1)); + } + BrilligOpcode::Const { bit_size, value, .. } => { + constants.insert(*value); + + let field = 1u128.wrapping_shl(*bit_size); + constants.insert(F::from(field)); + constants.insert(F::from(field - 1)); + } + _ => (), + } + } + + constants +} diff --git a/tooling/fuzzer/src/lib.rs b/tooling/fuzzer/src/lib.rs index 42dccc1dc83..28d7353f35a 100644 --- a/tooling/fuzzer/src/lib.rs +++ b/tooling/fuzzer/src/lib.rs @@ -4,9 +4,11 @@ //! Code is used under the MIT license. use acvm::{blackbox_solver::StubbedBlackBoxSolver, FieldElement}; +use dictionary::build_dictionary_from_program; use noirc_abi::InputMap; use proptest::test_runner::{TestCaseError, TestError, TestRunner}; +mod dictionary; mod strategies; mod types; @@ -37,7 +39,8 @@ impl FuzzedExecutor { /// Fuzzes the provided program. pub fn fuzz(&self) -> FuzzTestResult { - let strategy = strategies::arb_input_map(&self.program.abi); + let dictionary = build_dictionary_from_program(&self.program.bytecode); + let strategy = strategies::arb_input_map(&self.program.abi, dictionary); let run_result: Result<(), TestError> = self.runner.clone().run(&strategy, |input_map| { diff --git a/tooling/fuzzer/src/strategies/mod.rs b/tooling/fuzzer/src/strategies/mod.rs index f5b03953ba8..46187a28d5b 100644 --- a/tooling/fuzzer/src/strategies/mod.rs +++ b/tooling/fuzzer/src/strategies/mod.rs @@ -5,28 +5,22 @@ use proptest::prelude::*; use acvm::{AcirField, FieldElement}; use noirc_abi::{input_parser::InputValue, Abi, AbiType, InputMap, Sign}; -use std::collections::BTreeMap; +use std::collections::{BTreeMap, HashSet}; use uint::UintStrategy; mod int; mod uint; -proptest::prop_compose! { - pub(super) fn arb_field_from_integer(bit_size: u32)(value: u128)-> FieldElement { - let width = (bit_size % 128).clamp(1, 127); - let max_value = 2u128.pow(width) - 1; - let value = value % max_value; - FieldElement::from(value) - } -} - -pub(super) fn arb_value_from_abi_type(abi_type: &AbiType) -> SBoxedStrategy { +pub(super) fn arb_value_from_abi_type( + abi_type: &AbiType, + dictionary: HashSet, +) -> SBoxedStrategy { match abi_type { AbiType::Field => vec(any::(), 32) .prop_map(|bytes| InputValue::Field(FieldElement::from_be_bytes_reduce(&bytes))) .sboxed(), AbiType::Integer { width, sign } if sign == &Sign::Unsigned => { - UintStrategy::new(*width as usize) + UintStrategy::new(*width as usize, dictionary) .prop_map(|uint| InputValue::Field(uint.into())) .sboxed() } @@ -55,7 +49,7 @@ pub(super) fn arb_value_from_abi_type(abi_type: &AbiType) -> SBoxedStrategy { let length = *length as usize; - let elements = vec(arb_value_from_abi_type(typ), length..=length); + let elements = vec(arb_value_from_abi_type(typ, dictionary), length..=length); elements.prop_map(InputValue::Vec).sboxed() } @@ -63,7 +57,9 @@ pub(super) fn arb_value_from_abi_type(abi_type: &AbiType) -> SBoxedStrategy { let fields: Vec> = fields .iter() - .map(|(name, typ)| (Just(name.clone()), arb_value_from_abi_type(typ)).sboxed()) + .map(|(name, typ)| { + (Just(name.clone()), arb_value_from_abi_type(typ, dictionary.clone())).sboxed() + }) .collect(); fields @@ -75,17 +71,23 @@ pub(super) fn arb_value_from_abi_type(abi_type: &AbiType) -> SBoxedStrategy { - let fields: Vec<_> = fields.iter().map(arb_value_from_abi_type).collect(); + let fields: Vec<_> = + fields.iter().map(|typ| arb_value_from_abi_type(typ, dictionary.clone())).collect(); fields.prop_map(InputValue::Vec).sboxed() } } } -pub(super) fn arb_input_map(abi: &Abi) -> BoxedStrategy { +pub(super) fn arb_input_map( + abi: &Abi, + dictionary: HashSet, +) -> BoxedStrategy { let values: Vec<_> = abi .parameters .iter() - .map(|param| (Just(param.name.clone()), arb_value_from_abi_type(¶m.typ))) + .map(|param| { + (Just(param.name.clone()), arb_value_from_abi_type(¶m.typ, dictionary.clone())) + }) .collect(); values diff --git a/tooling/fuzzer/src/strategies/uint.rs b/tooling/fuzzer/src/strategies/uint.rs index 5021e832b97..94610dbc829 100644 --- a/tooling/fuzzer/src/strategies/uint.rs +++ b/tooling/fuzzer/src/strategies/uint.rs @@ -1,3 +1,6 @@ +use std::collections::HashSet; + +use acvm::{AcirField, FieldElement}; use proptest::{ strategy::{NewTree, Strategy}, test_runner::TestRunner, @@ -13,9 +16,12 @@ use rand::Rng; pub struct UintStrategy { /// Bit size of uint (e.g. 128) bits: usize, - + /// A set of fixtures to be generated + fixtures: Vec, /// The weight for edge cases (+/- 3 around 0 and max possible value) edge_weight: usize, + /// The weight for fixtures + fixtures_weight: usize, /// The weight for purely random values random_weight: usize, } @@ -24,8 +30,15 @@ impl UintStrategy { /// Create a new strategy. /// # Arguments /// * `bits` - Size of uint in bits - pub fn new(bits: usize) -> Self { - Self { bits, edge_weight: 10usize, random_weight: 50usize } + /// * `fixtures` - Set of `FieldElements` representing values which the fuzzer weight towards testing. + pub fn new(bits: usize, fixtures: HashSet) -> Self { + Self { + bits, + fixtures: fixtures.into_iter().collect(), + edge_weight: 10usize, + fixtures_weight: 40usize, + random_weight: 50usize, + } } fn generate_edge_tree(&self, runner: &mut TestRunner) -> NewTree { @@ -37,6 +50,22 @@ impl UintStrategy { Ok(proptest::num::u128::BinarySearch::new(start)) } + fn generate_fixtures_tree(&self, runner: &mut TestRunner) -> NewTree { + // generate random cases if there's no fixtures + if self.fixtures.is_empty() { + return self.generate_random_tree(runner); + } + + // Generate value tree from fixture. + let fixture = &self.fixtures[runner.rng().gen_range(0..self.fixtures.len())]; + if fixture.num_bits() <= self.bits as u32 { + return Ok(proptest::num::u128::BinarySearch::new(fixture.to_u128())); + } + + // If fixture is not a valid type, generate random value. + self.generate_random_tree(runner) + } + fn generate_random_tree(&self, runner: &mut TestRunner) -> NewTree { let rng = runner.rng(); let start = rng.gen_range(0..=self.type_max()); @@ -57,11 +86,12 @@ impl Strategy for UintStrategy { type Tree = proptest::num::u128::BinarySearch; type Value = u128; fn new_tree(&self, runner: &mut TestRunner) -> NewTree { - let total_weight = self.random_weight + self.edge_weight; + let total_weight = self.random_weight + self.fixtures_weight + self.edge_weight; let bias = runner.rng().gen_range(0..total_weight); - // randomly select one of 2 strategies + // randomly select one of 3 strategies match bias { x if x < self.edge_weight => self.generate_edge_tree(runner), + x if x < self.edge_weight + self.fixtures_weight => self.generate_fixtures_tree(runner), _ => self.generate_random_tree(runner), } } diff --git a/tooling/lsp/src/solver.rs b/tooling/lsp/src/solver.rs index 0fcac73b905..3c2d7499880 100644 --- a/tooling/lsp/src/solver.rs +++ b/tooling/lsp/src/solver.rs @@ -16,14 +16,6 @@ impl BlackBoxFunctionSolver for WrapperSolver { self.0.schnorr_verify(public_key_x, public_key_y, signature, message) } - fn pedersen_commitment( - &self, - inputs: &[acvm::FieldElement], - domain_separator: u32, - ) -> Result<(acvm::FieldElement, acvm::FieldElement), acvm::BlackBoxResolutionError> { - self.0.pedersen_commitment(inputs, domain_separator) - } - fn multi_scalar_mul( &self, points: &[acvm::FieldElement], @@ -36,14 +28,6 @@ impl BlackBoxFunctionSolver for WrapperSolver { self.0.multi_scalar_mul(points, scalars_lo, scalars_hi) } - fn pedersen_hash( - &self, - inputs: &[acvm::FieldElement], - domain_separator: u32, - ) -> Result { - self.0.pedersen_hash(inputs, domain_separator) - } - fn ec_add( &self, input1_x: &acvm::FieldElement, diff --git a/tooling/nargo_cli/build.rs b/tooling/nargo_cli/build.rs index 43c277ba03e..f2da161267d 100644 --- a/tooling/nargo_cli/build.rs +++ b/tooling/nargo_cli/build.rs @@ -64,392 +64,291 @@ const IGNORED_BRILLIG_TESTS: [&str; 11] = [ const IGNORED_NEW_FEATURE_TESTS: [&str; 3] = ["macros", "wildcard_type", "type_definition_annotation"]; -fn generate_execution_success_tests(test_file: &mut File, test_data_dir: &Path) { - let test_sub_dir = "execution_success"; +fn read_test_cases( + test_data_dir: &Path, + test_sub_dir: &str, +) -> impl Iterator { let test_data_dir = test_data_dir.join(test_sub_dir); - let test_case_dirs = fs::read_dir(test_data_dir).unwrap().flatten().filter(|c| c.path().is_dir()); - for test_dir in test_case_dirs { + test_case_dirs.into_iter().map(|dir| { let test_name = - test_dir.file_name().into_string().expect("Directory can't be converted to string"); + dir.file_name().into_string().expect("Directory can't be converted to string"); if test_name.contains('-') { panic!( "Invalid test directory: {test_name}. Cannot include `-`, please convert to `_`" ); - }; - let test_dir = &test_dir.path(); - - let brillig_ignored = - if IGNORED_BRILLIG_TESTS.contains(&test_name.as_str()) { "\n#[ignore]" } else { "" }; - let new_features_ignored = if IGNORED_NEW_FEATURE_TESTS.contains(&test_name.as_str()) { - "\n#[ignore]" - } else { - "" - }; - - write!( - test_file, - r#" -#[test]{new_features_ignored} -fn execution_success_legacy_{test_name}() {{ - let test_program_dir = PathBuf::from("{test_dir}"); - - let mut cmd = Command::cargo_bin("nargo").unwrap(); - cmd.arg("--program-dir").arg(test_program_dir); - cmd.arg("execute").arg("--force").arg("--use-legacy"); - - cmd.assert().success(); -}} + } + (test_name, dir.path()) + }) +} +fn generate_test_case( + test_file: &mut File, + test_type: &str, + test_name: &str, + test_dir: &std::path::Display, + test_content: &str, +) { + write!( + test_file, + r#" #[test] -fn execution_success_{test_name}() {{ +fn {test_type}_{test_name}() {{ let test_program_dir = PathBuf::from("{test_dir}"); - let mut cmd = Command::cargo_bin("nargo").unwrap(); - cmd.arg("--program-dir").arg(test_program_dir); - cmd.arg("execute").arg("--force"); - - cmd.assert().success(); + let mut nargo = Command::cargo_bin("nargo").unwrap(); + nargo.arg("--program-dir").arg(test_program_dir); + {test_content} }} +"# + ) + .expect("Could not write templated test file."); +} -#[test]{brillig_ignored} -fn execution_success_{test_name}_brillig() {{ - let test_program_dir = PathBuf::from("{test_dir}"); - - let mut cmd = Command::cargo_bin("nargo").unwrap(); - cmd.arg("--program-dir").arg(test_program_dir); - cmd.arg("execute").arg("--force").arg("--force-brillig"); +fn generate_execution_success_tests(test_file: &mut File, test_data_dir: &Path) { + let test_type = "execution_success"; + let test_cases = read_test_cases(test_data_dir, test_type); + for (test_name, test_dir) in test_cases { + let test_dir = test_dir.display(); - cmd.assert().success(); -}} - "#, - test_dir = test_dir.display(), - ) - .expect("Could not write templated test file."); + generate_test_case( + test_file, + test_type, + &test_name, + &test_dir, + r#" + nargo.arg("execute").arg("--force"); + + nargo.assert().success();"#, + ); + + if !IGNORED_NEW_FEATURE_TESTS.contains(&test_name.as_str()) { + generate_test_case( + test_file, + test_type, + &format!("legacy_{test_name}"), + &test_dir, + r#" + nargo.arg("execute").arg("--force").arg("--use-legacy"); + + nargo.assert().success();"#, + ); + } + + if !IGNORED_BRILLIG_TESTS.contains(&test_name.as_str()) { + generate_test_case( + test_file, + test_type, + &format!("{test_name}_brillig"), + &test_dir, + r#" + nargo.arg("execute").arg("--force").arg("--force-brillig"); + + nargo.assert().success();"#, + ); + } } } fn generate_execution_failure_tests(test_file: &mut File, test_data_dir: &Path) { - let test_sub_dir = "execution_failure"; - let test_data_dir = test_data_dir.join(test_sub_dir); - - let test_case_dirs = - fs::read_dir(test_data_dir).unwrap().flatten().filter(|c| c.path().is_dir()); - - for test_dir in test_case_dirs { - let test_name = - test_dir.file_name().into_string().expect("Directory can't be converted to string"); - if test_name.contains('-') { - panic!( - "Invalid test directory: {test_name}. Cannot include `-`, please convert to `_`" - ); - }; - let test_dir = &test_dir.path(); + let test_type = "execution_failure"; + let test_cases = read_test_cases(test_data_dir, test_type); + for (test_name, test_dir) in test_cases { + let test_dir = test_dir.display(); - write!( + generate_test_case( test_file, + test_type, + &test_name, + &test_dir, r#" -#[test] -fn execution_failure_legacy_{test_name}() {{ - let test_program_dir = PathBuf::from("{test_dir}"); - - let mut cmd = Command::cargo_bin("nargo").unwrap(); - cmd.arg("--program-dir").arg(test_program_dir); - cmd.arg("execute").arg("--force").arg("--use-legacy"); - - cmd.assert().failure().stderr(predicate::str::contains("The application panicked (crashed).").not()); -}} - -#[test] -fn execution_failure_{test_name}() {{ - let test_program_dir = PathBuf::from("{test_dir}"); - - let mut cmd = Command::cargo_bin("nargo").unwrap(); - cmd.arg("--program-dir").arg(test_program_dir); - cmd.arg("execute").arg("--force"); + nargo.arg("execute").arg("--force"); + + nargo.assert().failure().stderr(predicate::str::contains("The application panicked (crashed).").not());"#, + ); - cmd.assert().failure().stderr(predicate::str::contains("The application panicked (crashed).").not()); -}} - "#, - test_dir = test_dir.display(), - ) - .expect("Could not write templated test file."); + generate_test_case( + test_file, + test_type, + &format!("legacy_{test_name}"), + &test_dir, + r#" + nargo.arg("execute").arg("--force").arg("--use-legacy"); + + nargo.assert().failure().stderr(predicate::str::contains("The application panicked (crashed).").not());"#, + ); } } fn generate_noir_test_success_tests(test_file: &mut File, test_data_dir: &Path) { - let test_sub_dir = "noir_test_success"; - let test_data_dir = test_data_dir.join(test_sub_dir); - - let test_case_dirs = - fs::read_dir(test_data_dir).unwrap().flatten().filter(|c| c.path().is_dir()); + let test_type = "noir_test_success"; + let test_cases = read_test_cases(test_data_dir, "noir_test_success"); + for (test_name, test_dir) in test_cases { + let test_dir = test_dir.display(); - for test_dir in test_case_dirs { - let test_name = - test_dir.file_name().into_string().expect("Directory can't be converted to string"); - if test_name.contains('-') { - panic!( - "Invalid test directory: {test_name}. Cannot include `-`, please convert to `_`" - ); - }; - let test_dir = &test_dir.path(); - - write!( + generate_test_case( test_file, + test_type, + &test_name, + &test_dir, r#" -#[test] -fn noir_test_success_legacy_{test_name}() {{ - let test_program_dir = PathBuf::from("{test_dir}"); + nargo.arg("test"); + + nargo.assert().success();"#, + ); - let mut cmd = Command::cargo_bin("nargo").unwrap(); - cmd.arg("--program-dir").arg(test_program_dir); - cmd.arg("test").arg("--use-legacy"); - - cmd.assert().success(); -}} - -#[test] -fn noir_test_success_{test_name}() {{ - let test_program_dir = PathBuf::from("{test_dir}"); - - let mut cmd = Command::cargo_bin("nargo").unwrap(); - cmd.arg("--program-dir").arg(test_program_dir); - cmd.arg("test"); - - cmd.assert().success(); -}} - "#, - test_dir = test_dir.display(), - ) - .expect("Could not write templated test file."); + generate_test_case( + test_file, + test_type, + &format!("legacy_{test_name}"), + &test_dir, + r#" + nargo.arg("test").arg("--use-legacy"); + + nargo.assert().success();"#, + ); } } fn generate_noir_test_failure_tests(test_file: &mut File, test_data_dir: &Path) { - let test_sub_dir = "noir_test_failure"; - let test_data_dir = test_data_dir.join(test_sub_dir); - - let test_case_dirs = - fs::read_dir(test_data_dir).unwrap().flatten().filter(|c| c.path().is_dir()); - - for test_dir in test_case_dirs { - let test_name = - test_dir.file_name().into_string().expect("Directory can't be converted to string"); - if test_name.contains('-') { - panic!( - "Invalid test directory: {test_name}. Cannot include `-`, please convert to `_`" - ); - }; - let test_dir = &test_dir.path(); - - write!( + let test_type = "noir_test_failure"; + let test_cases = read_test_cases(test_data_dir, test_type); + for (test_name, test_dir) in test_cases { + let test_dir = test_dir.display(); + generate_test_case( test_file, + test_type, + &test_name, + &test_dir, r#" -#[test] -fn noir_test_failure_legacy_{test_name}() {{ - let test_program_dir = PathBuf::from("{test_dir}"); - - let mut cmd = Command::cargo_bin("nargo").unwrap(); - cmd.arg("--program-dir").arg(test_program_dir); - cmd.arg("test").arg("--use-legacy"); - - cmd.assert().failure(); -}} - -#[test] -fn noir_test_failure_{test_name}() {{ - let test_program_dir = PathBuf::from("{test_dir}"); - - let mut cmd = Command::cargo_bin("nargo").unwrap(); - cmd.arg("--program-dir").arg(test_program_dir); - cmd.arg("test"); + nargo.arg("test"); + + nargo.assert().failure();"#, + ); - cmd.assert().failure(); -}} - "#, - test_dir = test_dir.display(), - ) - .expect("Could not write templated test file."); + generate_test_case( + test_file, + test_type, + &format!("legacy_{test_name}"), + &test_dir, + r#" + nargo.arg("test").arg("--use-legacy"); + + nargo.assert().failure();"#, + ); } } fn generate_compile_success_empty_tests(test_file: &mut File, test_data_dir: &Path) { - let test_sub_dir = "compile_success_empty"; - let test_data_dir = test_data_dir.join(test_sub_dir); - - let test_case_dirs = - fs::read_dir(test_data_dir).unwrap().flatten().filter(|c| c.path().is_dir()); - - for test_dir in test_case_dirs { - let test_name = - test_dir.file_name().into_string().expect("Directory can't be converted to string"); - if test_name.contains('-') { - panic!( - "Invalid test directory: {test_name}. Cannot include `-`, please convert to `_`" - ); - }; - let test_dir = &test_dir.path(); - - let new_feature_ignored = if IGNORED_NEW_FEATURE_TESTS.contains(&test_name.as_str()) { - "\n#[ignore]" - } else { - "" - }; - - write!( + let test_type = "compile_success_empty"; + let test_cases = read_test_cases(test_data_dir, test_type); + for (test_name, test_dir) in test_cases { + let test_dir = test_dir.display(); + + let assert_zero_opcodes = r#" + let output = nargo.output().expect("Failed to execute command"); + + if !output.status.success() {{ + panic!("`nargo info` failed with: {}", String::from_utf8(output.stderr).unwrap_or_default()); + }} + + // `compile_success_empty` tests should be able to compile down to an empty circuit. + let json: serde_json::Value = serde_json::from_slice(&output.stdout).unwrap_or_else(|e| {{ + panic!("JSON was not well-formatted {:?}\n\n{:?}", e, std::str::from_utf8(&output.stdout)) + }}); + let num_opcodes = &json["programs"][0]["functions"][0]["acir_opcodes"]; + assert_eq!(num_opcodes.as_u64().expect("number of opcodes should fit in a u64"), 0); + "#; + + generate_test_case( test_file, - r#" -#[test]{new_feature_ignored} -fn compile_success_empty_legacy_{test_name}() {{ - let test_program_dir = PathBuf::from("{test_dir}"); - let mut cmd = Command::cargo_bin("nargo").unwrap(); - cmd.arg("--program-dir").arg(test_program_dir); - cmd.arg("info"); - cmd.arg("--json"); - cmd.arg("--force"); - cmd.arg("--use-legacy"); - - let output = cmd.output().expect("Failed to execute command"); - - if !output.status.success() {{ - panic!("`nargo info` failed with: {{}}", String::from_utf8(output.stderr).unwrap_or_default()); - }} - - // `compile_success_empty` tests should be able to compile down to an empty circuit. - let json: serde_json::Value = serde_json::from_slice(&output.stdout).unwrap_or_else(|e| {{ - panic!("JSON was not well-formatted {{:?}}\n\n{{:?}}", e, std::str::from_utf8(&output.stdout)) - }}); - let num_opcodes = &json["programs"][0]["functions"][0]["acir_opcodes"]; - assert_eq!(num_opcodes.as_u64().expect("number of opcodes should fit in a u64"), 0); -}} - -#[test] -fn compile_success_empty_{test_name}() {{ - let test_program_dir = PathBuf::from("{test_dir}"); - let mut cmd = Command::cargo_bin("nargo").unwrap(); - cmd.arg("--program-dir").arg(test_program_dir); - cmd.arg("info"); - cmd.arg("--json"); - cmd.arg("--force"); - - let output = cmd.output().expect("Failed to execute command"); - - if !output.status.success() {{ - panic!("`nargo info` failed with: {{}}", String::from_utf8(output.stderr).unwrap_or_default()); - }} - - // `compile_success_empty` tests should be able to compile down to an empty circuit. - let json: serde_json::Value = serde_json::from_slice(&output.stdout).unwrap_or_else(|e| {{ - panic!("JSON was not well-formatted {{:?}}\n\n{{:?}}", e, std::str::from_utf8(&output.stdout)) - }}); - let num_opcodes = &json["programs"][0]["functions"][0]["acir_opcodes"]; - assert_eq!(num_opcodes.as_u64().expect("number of opcodes should fit in a u64"), 0); -}} - "#, - test_dir = test_dir.display(), - ) - .expect("Could not write templated test file."); + test_type, + &test_name, + &test_dir, + &format!( + r#" + nargo.arg("info").arg("--json").arg("--force"); + + {assert_zero_opcodes}"#, + ), + ); + + if !IGNORED_NEW_FEATURE_TESTS.contains(&test_name.as_str()) { + generate_test_case( + test_file, + test_type, + &format!("legacy_{test_name}"), + &test_dir, + &format!( + r#" + nargo.arg("info").arg("--json").arg("--force").arg("--use-legacy"); + + {assert_zero_opcodes}"#, + ), + ); + } } } fn generate_compile_success_contract_tests(test_file: &mut File, test_data_dir: &Path) { - let test_sub_dir = "compile_success_contract"; - let test_data_dir = test_data_dir.join(test_sub_dir); - - let test_case_dirs = - fs::read_dir(test_data_dir).unwrap().flatten().filter(|c| c.path().is_dir()); - - for test_dir in test_case_dirs { - let test_name = - test_dir.file_name().into_string().expect("Directory can't be converted to string"); - if test_name.contains('-') { - panic!( - "Invalid test directory: {test_name}. Cannot include `-`, please convert to `_`" - ); - }; - let test_dir = &test_dir.path(); + let test_type = "compile_success_contract"; + let test_cases = read_test_cases(test_data_dir, test_type); + for (test_name, test_dir) in test_cases { + let test_dir = test_dir.display(); - write!( + generate_test_case( test_file, + test_type, + &test_name, + &test_dir, r#" -#[test] -fn compile_success_contract_legacy_{test_name}() {{ - let test_program_dir = PathBuf::from("{test_dir}"); - - let mut cmd = Command::cargo_bin("nargo").unwrap(); - cmd.arg("--program-dir").arg(test_program_dir); - cmd.arg("compile").arg("--force").arg("--use-legacy"); + nargo.arg("compile").arg("--force"); + + nargo.assert().success();"#, + ); - cmd.assert().success(); -}} -#[test] -fn compile_success_contract_{test_name}() {{ - let test_program_dir = PathBuf::from("{test_dir}"); - - let mut cmd = Command::cargo_bin("nargo").unwrap(); - cmd.arg("--program-dir").arg(test_program_dir); - cmd.arg("compile").arg("--force"); - - cmd.assert().success(); -}} - "#, - test_dir = test_dir.display(), - ) - .expect("Could not write templated test file."); + generate_test_case( + test_file, + test_type, + &format!("legacy_{test_name}"), + &test_dir, + r#" + nargo.arg("compile").arg("--force").arg("--use-legacy"); + + nargo.assert().success();"#, + ); } } fn generate_compile_failure_tests(test_file: &mut File, test_data_dir: &Path) { - let test_sub_dir = "compile_failure"; - let test_data_dir = test_data_dir.join(test_sub_dir); - - let test_case_dirs = - fs::read_dir(test_data_dir).unwrap().flatten().filter(|c| c.path().is_dir()); + let test_type = "compile_failure"; + let test_cases = read_test_cases(test_data_dir, test_type); + for (test_name, test_dir) in test_cases { + let test_dir = test_dir.display(); - for test_dir in test_case_dirs { - let test_name = - test_dir.file_name().into_string().expect("Directory can't be converted to string"); - if test_name.contains('-') { - panic!( - "Invalid test directory: {test_name}. Cannot include `-`, please convert to `_`" - ); - }; - let test_dir = &test_dir.path(); - - let new_feature_ignored = if IGNORED_NEW_FEATURE_TESTS.contains(&test_name.as_str()) { - "\n#[ignore]" - } else { - "" - }; - - write!( + generate_test_case( test_file, - r#" -#[test]{new_feature_ignored} -fn compile_failure_legacy_{test_name}() {{ - let test_program_dir = PathBuf::from("{test_dir}"); - - let mut cmd = Command::cargo_bin("nargo").unwrap(); - cmd.arg("--program-dir").arg(test_program_dir); - cmd.arg("compile").arg("--force").arg("--use-legacy"); - - cmd.assert().failure().stderr(predicate::str::contains("The application panicked (crashed).").not()); -}} -#[test] -fn compile_failure_{test_name}() {{ - let test_program_dir = PathBuf::from("{test_dir}"); - - let mut cmd = Command::cargo_bin("nargo").unwrap(); - cmd.arg("--program-dir").arg(test_program_dir); - cmd.arg("compile").arg("--force"); - - cmd.assert().failure().stderr(predicate::str::contains("The application panicked (crashed).").not()); -}} - "#, - test_dir = test_dir.display(), - ) - .expect("Could not write templated test file."); + test_type, + &test_name, + &test_dir, + r#"nargo.arg("compile").arg("--force"); + + nargo.assert().failure().stderr(predicate::str::contains("The application panicked (crashed).").not());"#, + ); + + if !IGNORED_NEW_FEATURE_TESTS.contains(&test_name.as_str()) { + generate_test_case( + test_file, + test_type, + &format!("legacy_{test_name}"), + &test_dir, + r#" + nargo.arg("compile").arg("--force").arg("--use-legacy"); + + nargo.assert().failure().stderr(predicate::str::contains("The application panicked (crashed).").not());"#, + ); + } } } diff --git a/tooling/nargo_fmt/src/visitor/item.rs b/tooling/nargo_fmt/src/visitor/item.rs index a5d042dc71e..3cfee4f46ad 100644 --- a/tooling/nargo_fmt/src/visitor/item.rs +++ b/tooling/nargo_fmt/src/visitor/item.rs @@ -44,7 +44,7 @@ impl super::FmtVisitor<'_> { if !func.def.generics.is_empty() { let full_span = name_span.end()..params_open; - let start = name_span.end(); + let start = self.span_before(full_span.clone(), Token::Less).start(); let end = self.span_after(full_span, Token::Greater).start(); let generics = func.def.generics; diff --git a/tooling/nargo_fmt/tests/expected/fn.nr b/tooling/nargo_fmt/tests/expected/fn.nr index 3d231cd3f7f..961e67faf1c 100644 --- a/tooling/nargo_fmt/tests/expected/fn.nr +++ b/tooling/nargo_fmt/tests/expected/fn.nr @@ -61,3 +61,7 @@ fn main( ) {} pub fn from_baz(x: [Field; crate::foo::MAGIC_NUMBER]) {} + +fn whitespace_before_generics(foo: T) {} + +fn more_whitespace_before_generics(foo: T) {} diff --git a/tooling/nargo_fmt/tests/input/fn.nr b/tooling/nargo_fmt/tests/input/fn.nr index 1c6d201fa39..03806b0fef9 100644 --- a/tooling/nargo_fmt/tests/input/fn.nr +++ b/tooling/nargo_fmt/tests/input/fn.nr @@ -44,3 +44,8 @@ fn main( ) {} pub fn from_baz(x: [Field; crate::foo::MAGIC_NUMBER]) {} + +fn whitespace_before_generics < T > (foo: T) {} + +fn more_whitespace_before_generics < +T > (foo: T) {}