Skip to content

Commit

Permalink
Merge pull request #775 from dusk-network/mocello/fix_clippy
Browse files Browse the repository at this point in the history
Fix clippy warnings
  • Loading branch information
moCello authored Oct 17, 2023
2 parents d3bcc6a + 1b63afc commit 29c9500
Show file tree
Hide file tree
Showing 10 changed files with 66 additions and 72 deletions.
2 changes: 2 additions & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -10,6 +10,7 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0
### Changed

- Update `criterion` dev-dependency to 0.5
- Fix clippy warnings [#774]

## [0.16.0] - 2023-10-11

Expand Down Expand Up @@ -515,6 +516,7 @@ is necessary since `rkyv/validation` was required as a bound.
- Proof system module.

<!-- ISSUES -->
[#774]: https://github.com/dusk-network/plonk/issues/774
[#763]: https://github.com/dusk-network/plonk/issues/763
[#760]: https://github.com/dusk-network/plonk/issues/760
[#752]: https://github.com/dusk-network/plonk/pull/752
Expand Down
2 changes: 1 addition & 1 deletion src/commitment_scheme/kzg10/key.rs
Original file line number Diff line number Diff line change
Expand Up @@ -117,7 +117,7 @@ impl CommitKey {
pub fn from_slice(bytes: &[u8]) -> Result<CommitKey, Error> {
let powers_of_g = bytes
.chunks(G1Affine::SIZE)
.map(|chunk| G1Affine::from_slice(chunk))
.map(G1Affine::from_slice)
.collect::<Result<Vec<G1Affine>, dusk_bytes::Error>>()?;

Ok(CommitKey { powers_of_g })
Expand Down
2 changes: 1 addition & 1 deletion src/commitment_scheme/kzg10/srs.rs
Original file line number Diff line number Diff line change
Expand Up @@ -65,7 +65,7 @@ impl PublicParameters {
}

// we update the degree to match the required one (n + 6)
max_degree = max_degree + Self::ADDED_BLINDING_DEGREE;
max_degree += Self::ADDED_BLINDING_DEGREE;

// Generate the secret scalar x
let x = BlsScalar::random(&mut rng);
Expand Down
24 changes: 13 additions & 11 deletions src/composer.rs
Original file line number Diff line number Diff line change
Expand Up @@ -56,7 +56,7 @@ pub trait Composer: Sized + Index<Witness, Output = BlsScalar> {
///
/// This shouldn't be used directly; instead, use [`Self::initialized`]
#[deprecated(
since = "13.0",
since = "0.13.0",
note = "this function is meant for internal use. call `initialized` instead"
)]
fn uninitialized() -> Self;
Expand All @@ -66,14 +66,14 @@ pub trait Composer: Sized + Index<Witness, Output = BlsScalar> {

/// Allocate a witness value into the composer and return its index.
#[deprecated(
since = "13.0",
since = "0.13.0",
note = "this function is meant for internal use. call `append_witness` instead"
)]
fn append_witness_internal(&mut self, witness: BlsScalar) -> Witness;

/// Append a new width-4 poly gate/constraint.
#[deprecated(
since = "13.0",
since = "0.13.0",
note = "this function is meant for internal use. call `append_custom_gate` instead"
)]
fn append_custom_gate_internal(&mut self, constraint: Constraint);
Expand Down Expand Up @@ -364,10 +364,10 @@ pub trait Composer: Sized + Index<Witness, Output = BlsScalar> {
.left(wnaf_round.x_beta)
.right(wnaf_round.y_beta)
.constant(wnaf_round.xy_beta)
.a(wnaf_round.acc_x.into())
.b(wnaf_round.acc_y.into())
.o(wnaf_round.xy_alpha.into())
.d(wnaf_round.accumulated_bit.into());
.a(wnaf_round.acc_x)
.b(wnaf_round.acc_y)
.o(wnaf_round.xy_alpha)
.d(wnaf_round.accumulated_bit);

self.append_custom_gate(constraint)
}
Expand Down Expand Up @@ -989,16 +989,18 @@ pub trait Composer: Sized + Index<Witness, Output = BlsScalar> {

// last constraint is zeroed as it is reserved for the genesis quad or
// padding
constraints.last_mut().map(|c| *c = Constraint::new());
if let Some(c) = constraints.last_mut() {
*c = Constraint::new();
}

// the accumulators count is a function to the number of quads. hence,
// this optional gate will not cause different circuits depending on the
// witness because this computation is bound to the constant bits count
// alone.
if let Some(accumulator) = accumulators.last() {
constraints
.last_mut()
.map(|c| c.set_witness(WiredWitness::D, *accumulator));
if let Some(c) = constraints.last_mut() {
c.set_witness(WiredWitness::D, *accumulator);
}
}

constraints
Expand Down
6 changes: 3 additions & 3 deletions src/composer/compiler.rs
Original file line number Diff line number Diff line change
Expand Up @@ -65,7 +65,7 @@ impl Compiler {
where
C: Circuit,
{
compress::CompressedCircuit::from_circuit::<C>(compress::Version::V2)
compress::CompressedCircuit::from_circuit::<C>(true)
}

/// Generates a [Prover] and [Verifier] from a buffer created by
Expand All @@ -91,7 +91,7 @@ impl Compiler {
let (commit, opening) = pp.trim(n)?;

let (prover, verifier) =
Self::preprocess(label, commit, opening, &builder)?;
Self::preprocess(label, commit, opening, builder)?;

Ok((prover, verifier))
}
Expand Down Expand Up @@ -405,7 +405,7 @@ impl Compiler {
label.clone(),
prover_key,
commit_key,
verifier_key.clone(),
verifier_key,
size,
constraints,
);
Expand Down
74 changes: 30 additions & 44 deletions src/composer/compiler/compress.rs
Original file line number Diff line number Diff line change
Expand Up @@ -45,46 +45,33 @@ pub struct CompressedPolynomial {
pub q_variable_group_add: usize,
}

#[derive(
Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, MsgPacker,
)]
pub enum Version {
V1,
V2,
}

impl Version {
pub fn into_scalars(self) -> HashMap<BlsScalar, usize> {
match self {
Version::V1 => {
[BlsScalar::zero(), BlsScalar::one(), -BlsScalar::one()]
.into_iter()
.enumerate()
.map(|(i, s)| (s, i))
.collect()
}
Version::V2 => {
let mut scalars = Self::V1.into_scalars();
// assert we don't override a previously inserted constant
for s in hades::constants() {
let len = scalars.len();
scalars.entry(s).or_insert(len);
}
for r in hades::mds() {
for s in r {
let len = scalars.len();
scalars.entry(s).or_insert(len);
}
}
scalars
fn scalar_map(hades_optimization: bool) -> HashMap<BlsScalar, usize> {
let mut scalars: HashMap<BlsScalar, usize> = {
[BlsScalar::zero(), BlsScalar::one(), -BlsScalar::one()]
.into_iter()
.enumerate()
.map(|(i, s)| (s, i))
.collect()
};
if hades_optimization {
// assert we don't override a previously inserted constant
for s in hades::constants() {
let len = scalars.len();
scalars.entry(s).or_insert(len);
}
for r in hades::mds() {
for s in r {
let len = scalars.len();
scalars.entry(s).or_insert(len);
}
}
}
scalars
}

#[derive(Debug, Clone, PartialEq, Eq, MsgPacker)]
pub struct CompressedCircuit {
version: Version,
hades_optimization: bool,
public_inputs: Vec<usize>,
witnesses: usize,
scalars: Vec<[u8; BlsScalar::SIZE]>,
Expand All @@ -93,16 +80,16 @@ pub struct CompressedCircuit {
}

impl CompressedCircuit {
pub fn from_circuit<C>(version: Version) -> Result<Vec<u8>, Error>
pub fn from_circuit<C>(hades_optimization: bool) -> Result<Vec<u8>, Error>
where
C: Circuit,
{
let mut builder = Builder::initialized();
C::default().circuit(&mut builder)?;
Ok(Self::from_builder(version, builder))
Ok(Self::from_builder(hades_optimization, builder))
}

pub fn from_builder(version: Version, builder: Builder) -> Vec<u8> {
pub fn from_builder(hades_optimization: bool, builder: Builder) -> Vec<u8> {
let mut public_inputs: Vec<_> =
builder.public_inputs.keys().copied().collect();
public_inputs.sort();
Expand All @@ -111,7 +98,7 @@ impl CompressedCircuit {
let polynomials = builder.constraints;

let constraints = polynomials.into_iter();
let mut scalars = version.into_scalars();
let mut scalars = scalar_map(hades_optimization);
let base_scalars_len = scalars.len();
let mut polynomials = HashMap::new();
let constraints = constraints
Expand Down Expand Up @@ -193,7 +180,7 @@ impl CompressedCircuit {
.for_each(|(s, i)| scalars[i] = s.to_bytes());

// clear the scalars that can be determiniscally reconstructed from the
// version
// scalar_map
let scalars = scalars.split_off(base_scalars_len);

let polynomials_map = polynomials;
Expand All @@ -204,7 +191,7 @@ impl CompressedCircuit {
.for_each(|(p, i)| polynomials[i] = p);

let compressed = Self {
version,
hades_optimization,
public_inputs,
witnesses,
scalars,
Expand All @@ -230,7 +217,7 @@ impl CompressedCircuit {
let (
_,
Self {
version,
hades_optimization,
public_inputs,
witnesses,
scalars,
Expand All @@ -240,10 +227,9 @@ impl CompressedCircuit {
) = Self::unpack(&compressed)
.map_err(|_| Error::InvalidCompressedCircuit)?;

let version_scalars_map = version.into_scalars();
let mut version_scalars =
vec![BlsScalar::zero(); version_scalars_map.len()];
version_scalars_map
let scalar_map = scalar_map(hades_optimization);
let mut version_scalars = vec![BlsScalar::zero(); scalar_map.len()];
scalar_map
.into_iter()
.for_each(|(s, i)| version_scalars[i] = s);
for s in scalars {
Expand Down
5 changes: 2 additions & 3 deletions src/composer/prover.rs
Original file line number Diff line number Diff line change
Expand Up @@ -87,7 +87,7 @@ impl Prover {
for i in 0..hiding_degree + 1 {
let blinding_scalar = BlsScalar::random(&mut *rng);

w_vec_inverse[i] = w_vec_inverse[i] - blinding_scalar;
w_vec_inverse[i] -= blinding_scalar;
w_vec_inverse.push(blinding_scalar);
}

Expand Down Expand Up @@ -206,8 +206,7 @@ impl Prover {
let prover_key = ProverKey::from_slice(prover_key)?;

// Safety: checked len
let commit_key =
unsafe { CommitKey::from_slice_unchecked(&commit_key) };
let commit_key = unsafe { CommitKey::from_slice_unchecked(commit_key) };

let verifier_key = VerifierKey::from_slice(verifier_key)?;

Expand Down
5 changes: 2 additions & 3 deletions src/fft/evaluations.rs
Original file line number Diff line number Diff line change
Expand Up @@ -49,8 +49,7 @@ impl Evaluations {
bytes.extend(
self.evals
.iter()
.map(|scalar| scalar.to_bytes().to_vec())
.flatten(),
.flat_map(|scalar| scalar.to_bytes().to_vec()),
);

bytes
Expand All @@ -62,7 +61,7 @@ impl Evaluations {
let domain = EvaluationDomain::from_reader(&mut buffer)?;
let evals = buffer
.chunks(BlsScalar::SIZE)
.map(|chunk| BlsScalar::from_slice(chunk))
.map(BlsScalar::from_slice)
.collect::<Result<Vec<BlsScalar>, dusk_bytes::Error>>()?;
Ok(Evaluations::from_vec_and_domain(evals, domain))
}
Expand Down
11 changes: 5 additions & 6 deletions src/fft/polynomial.rs
Original file line number Diff line number Diff line change
Expand Up @@ -136,16 +136,15 @@ impl Polynomial {
pub fn to_var_bytes(&self) -> Vec<u8> {
self.coeffs
.iter()
.map(|item| item.to_bytes().to_vec())
.flatten()
.flat_map(|item| item.to_bytes().to_vec())
.collect()
}

/// Generate a Polynomial from a slice of bytes.
pub fn from_slice(bytes: &[u8]) -> Result<Polynomial, Error> {
let coeffs = bytes
.chunks(BlsScalar::SIZE)
.map(|chunk| BlsScalar::from_slice(chunk))
.map(BlsScalar::from_slice)
.collect::<Result<Vec<BlsScalar>, dusk_bytes::Error>>()?;

Ok(Polynomial { coeffs })
Expand Down Expand Up @@ -198,7 +197,7 @@ impl<'a, 'b> Add<&'a Polynomial> for &'b Polynomial {
}
}

impl<'a, 'b> AddAssign<&'a Polynomial> for Polynomial {
impl<'a> AddAssign<&'a Polynomial> for Polynomial {
fn add_assign(&mut self, other: &'a Polynomial) {
if self.is_zero() {
self.coeffs.truncate(0);
Expand All @@ -219,7 +218,7 @@ impl<'a, 'b> AddAssign<&'a Polynomial> for Polynomial {
}
}

impl<'a, 'b> AddAssign<(BlsScalar, &'a Polynomial)> for Polynomial {
impl<'a> AddAssign<(BlsScalar, &'a Polynomial)> for Polynomial {
fn add_assign(&mut self, (f, other): (BlsScalar, &'a Polynomial)) {
if self.is_zero() {
self.coeffs.truncate(0);
Expand Down Expand Up @@ -285,7 +284,7 @@ impl<'a, 'b> Sub<&'a Polynomial> for &'b Polynomial {
}
}

impl<'a, 'b> SubAssign<&'a Polynomial> for Polynomial {
impl<'a> SubAssign<&'a Polynomial> for Polynomial {
#[inline]
fn sub_assign(&mut self, other: &'a Polynomial) {
if self.is_zero() {
Expand Down
7 changes: 7 additions & 0 deletions src/runtime.rs
Original file line number Diff line number Diff line change
Expand Up @@ -15,6 +15,7 @@ use crate::debugger::Debugger;

/// Runtime events
#[derive(Debug, Clone, Copy)]
#[allow(clippy::large_enum_variant)]
pub enum RuntimeEvent {
/// A witness was appended to the constraint system
WitnessAppended {
Expand All @@ -41,6 +42,12 @@ pub struct Runtime {
debugger: Debugger,
}

impl Default for Runtime {
fn default() -> Self {
Self::new()
}
}

impl Runtime {
/// Create a new PLONK runtime
#[allow(unused_variables)]
Expand Down

0 comments on commit 29c9500

Please sign in to comment.