diff --git a/.github/workflows/production.yml b/.github/workflows/production.yml
index 0203b9349..cebe9fc5d 100644
--- a/.github/workflows/production.yml
+++ b/.github/workflows/production.yml
@@ -35,10 +35,10 @@ jobs:
run: yarn
- name: Compile contracts
- run: yarn compile:sol
+ run: yarn compile:contracts
- name: Build libraries
- run: yarn build:js
+ run: yarn build:libraries
- name: Run Prettier
run: yarn prettier
@@ -72,6 +72,9 @@ jobs:
- name: Setup Circom
run: wget https://github.com/iden3/circom/releases/latest/download/circom-linux-amd64 && sudo mv ./circom-linux-amd64 /usr/bin/circom && sudo chmod +x /usr/bin/circom
+ - name: Install Nargo
+ uses: noir-lang/noirup@v0.1.3
+
- name: Get yarn cache directory path
id: yarn-cache-dir-path
run: echo "dir=$(yarn config get cacheFolder)" >> $GITHUB_OUTPUT
diff --git a/.github/workflows/pull-requests.yml b/.github/workflows/pull-requests.yml
index 08df69724..c6c6c2e17 100644
--- a/.github/workflows/pull-requests.yml
+++ b/.github/workflows/pull-requests.yml
@@ -33,10 +33,10 @@ jobs:
run: yarn
- name: Compile contracts
- run: yarn compile:sol
+ run: yarn compile:contracts
- name: Build libraries
- run: yarn build:js
+ run: yarn build:libraries
- name: Run Prettier
run: yarn prettier
@@ -64,6 +64,9 @@ jobs:
- name: Setup Circom
run: wget https://github.com/iden3/circom/releases/latest/download/circom-linux-amd64 && sudo mv ./circom-linux-amd64 /usr/bin/circom && sudo chmod +x /usr/bin/circom
+ - name: Install Nargo
+ uses: noir-lang/noirup@v0.1.3
+
- name: Get yarn cache directory path
id: yarn-cache-dir-path
run: echo "dir=$(yarn config get cacheFolder)" >> $GITHUB_OUTPUT
diff --git a/README.md b/README.md
index f99200140..48c4b0979 100644
--- a/README.md
+++ b/README.md
@@ -86,6 +86,34 @@
|
+
+
+
+ @zk-kit/eddsa-poseidon
+
+
+ (docs)
+
+ |
+
+
+
+
+
+ |
+
+
+
+
+
+ |
+
+
+
+
+
+ |
+
diff --git a/package.json b/package.json
index 5c7f00fad..a1216580c 100644
--- a/package.json
+++ b/package.json
@@ -8,9 +8,9 @@
"bugs": "https://github.com/privacy-scaling-explorations/zk-kit/issues",
"private": true,
"scripts": {
- "build": "yarn build:js && yarn compile:sol",
- "build:js": "yarn workspaces foreach --no-private run build",
- "compile:sol": "yarn workspaces foreach run compile",
+ "build": "yarn build:libraries && yarn compile:contracts",
+ "build:libraries": "yarn workspaces foreach --no-private run build",
+ "compile:contracts": "yarn workspaces foreach run compile",
"test": "yarn test:libraries && yarn test:contracts && yarn test:circuits",
"test:libraries": "jest --coverage",
"test:circuits": "yarn workspace @zk-kit/circuits test",
diff --git a/packages/circuits/README.md b/packages/circuits/README.md
index 7ac1fb160..958cb0224 100644
--- a/packages/circuits/README.md
+++ b/packages/circuits/README.md
@@ -31,15 +31,24 @@
| This package offers a collection of reusable circuits designed for integration into other projects or protocols, promoting code modularization within the zero-knowledge ecosystem. |
| ----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- |
+> [!IMPORTANT]
+> Installation of [Circom](https://docs.circom.io/getting-started/installation/) and [Nargo](https://noir-lang.org/getting_started/nargo_installation) required for circuit tests.
+
## Circuits
- Circom:
- [PoseidonProof](./circom/poseidon-proof.circom): It proves the possession of a Poseidon pre-image without revealing the pre-image itself.
- [BinaryMerkleRoot](./circom/binary-merkle-root.circom): It calculates the root of a binary Merkle tree using a provided proof-of-membership.
+- Noir:
+ - [Sparse Merkle Tree PoseidonBN254](./noir/crates/smt_bn254/src/lib.nr): A reusable library of functions related to Sparse Merkle Trees based on the JS implementation of [@zk-kit/smt](../smt). The library uses the Poseidon hash to implement the following functions:
+ - verifying membership and non-membership proofs
+ - adding a new entry to a SMT
+ - updating an entry of an SMT
+ - deleting an existing entry from an SMT
## 🛠 Install
-### npm or yarn
+### Using NPM or Yarn (Circom circuits)
Install the `@zk-kit/circuits` package with npm:
@@ -52,3 +61,12 @@ or yarn:
```bash
yarn add @zk-kit/circuits
```
+
+### Using Nargo (Noir circuits)
+
+In your Nargo.toml file, add the following dependency:
+
+```toml
+[dependencies]
+smt_bn254 = { tag = "v0.1.0", git = "https://github.com/privacy-scaling-explorations/zk-kit/packages/circuits/noir", directory="crates/smt_bn254" }
+```
diff --git a/packages/circuits/noir/.gitkeep b/packages/circuits/noir/.gitkeep
deleted file mode 100644
index e69de29bb..000000000
diff --git a/packages/circuits/noir/Nargo.toml b/packages/circuits/noir/Nargo.toml
new file mode 100644
index 000000000..60e8a4b40
--- /dev/null
+++ b/packages/circuits/noir/Nargo.toml
@@ -0,0 +1,2 @@
+[workspace]
+members = ["crates/smt_bn254"]
\ No newline at end of file
diff --git a/packages/circuits/noir/crates/smt_bn254/Nargo.toml b/packages/circuits/noir/crates/smt_bn254/Nargo.toml
new file mode 100644
index 000000000..bace4ce60
--- /dev/null
+++ b/packages/circuits/noir/crates/smt_bn254/Nargo.toml
@@ -0,0 +1,5 @@
+[package]
+name = "smt_bn254"
+authors = ["fabianschu"]
+type = "lib"
+compiler_version = ">=0.19.3"
diff --git a/packages/circuits/noir/crates/smt_bn254/src/lib.nr b/packages/circuits/noir/crates/smt_bn254/src/lib.nr
new file mode 100644
index 000000000..ca7a714bd
--- /dev/null
+++ b/packages/circuits/noir/crates/smt_bn254/src/lib.nr
@@ -0,0 +1,246 @@
+use dep::std::option::Option;
+
+mod utils;
+
+global TREE_DEPTH: u32 = 256;
+
+/**
+ * Verifies a membership or a non-membership proof, ie it calculates the tree root
+ * based on an entry or matching entry and all siblings and compares that calculated root
+ * with the root that is passed to this function.
+ * @param entry Contains key and value of an entry: [key, value]
+ * @param matching_entry Contains [key, value] of a matching entry only for non-membership proofs
+ * @param siblings Contains array of siblings of entry / matching_entry
+ * @param root The expected root of the tree
+ */
+pub fn verify(entry: [Field; 2], matching_entry: [Option; 2], siblings: [Field; TREE_DEPTH], root: Field) {
+ let mut calculcated_root: Field = 0;
+ let path = utils::key_to_path(entry[0]);
+ // if there is no matching_entry it is a membership proof
+ // if there is a matching_entry it is a non_membership proof
+ if matching_entry[0].is_none() | matching_entry[1].is_none() {
+ // membership proof: the root is calculated based on the entry, the siblings,
+ // and the path determined by the key of entry through consecutive hashing
+ calculcated_root = utils::calculcate_root(entry, siblings, path);
+ } else {
+ // non-membership proof: the root is calculated based on the matching_entry, the siblings
+ // and the path that is determined by the key of entry. This makes sure that matching_entry is in fact
+ // a matching entry for entry meaning that it shares the same first bits as path
+ calculcated_root = utils::calculcate_root([matching_entry[0].unwrap(), matching_entry[1].unwrap()], siblings, path);
+ }
+ assert(calculcated_root == root);
+}
+
+/**
+ * Adds a NEW entry to an existing tree. Based on the siblings first validates the correctness of
+ * the old root. Then uses the new entry and the siblings to calculate the new tree root.
+ * NOTE: this function doesn't validate if the key for the new entry already exists in the tree, ie
+ * if the operation is actually an update. For this operation there is a separate function.
+ * @param entry Contains key and value of an entry: [key, value]
+ * @param old_root The root of the tree before the new entry is added
+ * @param siblings Contains array of siblings of entry / matching_entry
+ * @returns The new root after the addition
+ */
+pub fn add(new_entry: [Field; 2], old_root: Field, siblings: [Field; TREE_DEPTH]) -> Field {
+ // if the root node is zero the first leaf is added to the tree in which case
+ // the new root equals H(k,v,1)
+ // otherwise the correctness of the old root is validated based on the siblings after which
+ // the new root is calculated and returned
+ if (old_root == 0) {
+ utils::hash(new_entry[0], new_entry[1], true)
+ } else {
+ let (old, new) = utils::calculate_two_roots(new_entry, siblings);
+ assert(old == old_root);
+ new
+ }
+}
+
+/**
+ * Deletes an existing entry from a tree. Based on the siblings first does a membership proof
+ * of that existing entry and then calculates the new root (without the entry).
+ * @param entry Contains key and value of the to-be-deleted entry: [key, value]
+ * @param old_root The root of the tree if the entry is still included
+ * @param sigbils Contains array of siblings of entry
+ * @returns The new root after the deletion
+ */
+pub fn delete(entry: [Field; 2], old_root: Field, siblings: [Field; TREE_DEPTH]) -> Field {
+ // proves membership of entry in the old root, then calculates and returns the new root
+ let (new, old) = utils::calculate_two_roots(entry, siblings);
+ assert(old == old_root);
+ new
+}
+
+/**
+ * Updates the value of an existing entry in a tree. Based on the siblings first does a membership proof
+ * first verifies the membership of the old entry. Then recalculates the new root.
+ * @param new_value The new value to be added (instead of old_entry[1])
+ * @param old_entry Contains key and value of the entry to be updated: [key, value]
+ * @param old_root The root of the tree before the update
+ * @param siblings Contains an array of siblings of old_entry
+ * @returns The new root after the update
+ */
+pub fn update(new_value: Field, old_entry: [Field; 2], old_root: Field, siblings: [Field; TREE_DEPTH]) -> Field {
+ let key = old_entry[0];
+ let old_value = old_entry[1];
+ // both the old entry and new entry share the same key that is used to calculate the path
+ let path = utils::key_to_path(key);
+ // old_parent is a container to temporarily store the nodes that ultimately lead to the OLD root
+ let mut old_parent: Field = utils::hash(key, old_value, true);
+ // new_parent is a container to temporarily store the nodes that ultimately lead to the NEW root
+ let mut new_parent: Field = utils::hash(key, new_value, true);
+ // starting from the botton of the tree, for each level it checks whether there is a sibling and if
+ // that is the case, it hashes the two containers with the sibling and updates the containers with the
+ // resulting hashes until the uppermost level is reached aka the root node
+ for i in 0..TREE_DEPTH {
+ let sibling = siblings[i];
+ if sibling != 0 {
+ if path[i] == 0 {
+ new_parent = utils::hash(new_parent, sibling, false);
+ old_parent = utils::hash(old_parent, sibling, false);
+ } else {
+ new_parent = utils::hash(sibling, new_parent, false);
+ old_parent = utils::hash(sibling, old_parent, false);
+ }
+ }
+ }
+ assert(old_parent == old_root);
+ new_parent
+}
+
+/*
+Visual representations of the trees used in the tests for reference
+
+The big tree corresponds to the tree that is used for
+testing in @zk-kit/smt:
+
+big_tree_root: 46574...31272
+├── 1: 78429...40557
+│ ├── 1
+│ ├── v: 17150...90784
+│ └── k: 20438...35547
+└── 0:
+ ├── 1: 74148...2867
+ │ ├── 1: 89272...68433 || This leaf
+ │ │ ├── 1 || is missing
+ │ │ ├── v: 85103...45170 || for the
+ │ │ └── k: 84596...08785 || small_tree_root
+ │ └── 0: 18126...22196
+ │ ├── 1
+ │ ├── v: 13761...25802
+ │ └── k: 13924...78098
+ └── 0: 79011...20495
+ ├── 1
+ ├── v: 10223...67791
+ └── k: 18746...38844
+
+The small tree lacks one leaf as indicated in the previous
+tree and looks as follows:
+
+small_tree_root: 35328...54128
+├── 1: 78429...40557
+│ ├── 1
+│ ├── v: 17150...90784
+│ └── k: 20438...35547
+└── 0:
+ ├── 1: 18126...22196
+ │ ├── 1
+ │ ├── v: 13761...25802
+ │ └── k: 13924...78098
+ └── 0: 79011...20495
+ ├── 1
+ ├── v: 10223...67791
+ └── k: 18746...38844
+*/
+
+#[test]
+fn test_verify_membership_proof() {
+ let small_tree_root = 3532809757480436997969526334543526996242857122876262144596246439822675654128;
+ let key = 18746990989203767017840856832962652635369613415011636432610873672704085238844;
+ let value = 10223238458026721676606706894638558676629446348345239719814856822628482567791;
+ let entry = [key, value];
+ let matching_entry = [Option::none(), Option::none()];
+ let mut siblings: [Field; TREE_DEPTH] = [0; TREE_DEPTH];
+ siblings[254] = 18126944477260144816572365299295230808286197301459941187567621915186392922196;
+ siblings[255] = 7842913321420301106140788486336995496832503825951977327575501561489697540557;
+ verify(entry, matching_entry, siblings, small_tree_root);
+}
+
+#[test]
+fn test_verify_non_membership_proof() {
+ let small_tree_root = 3532809757480436997969526334543526996242857122876262144596246439822675654128;
+ let key = 8459688297517826598613412977307486050019239051864711035321718508109192087854;
+ let value = 8510347201346963732943571140849185725417245763047403804445415726302354045170;
+ let entry = [key, value];
+ let matching_entry = [
+ Option::some(13924553918840562069536446401916499801909138643922241340476956069386532478098),
+ Option::some(13761779908325789083343687318102407319424329800042729673292939195255502025802)
+ ];
+ let mut siblings: [Field; TREE_DEPTH] = [0; TREE_DEPTH];
+ siblings[254] = 14443001516360873457302534246953033880503978184674311810335857314606403404583;
+ siblings[255] = 7842913321420301106140788486336995496832503825951977327575501561489697540557;
+ verify(entry, matching_entry, siblings, small_tree_root);
+}
+
+#[test]
+fn test_add_first_element() {
+ let key = 20438969296305830531522370305156029982566273432331621236661483041446048135547;
+ let value = 17150136040889237739751319962368206600863150289695545292530539263327413090784;
+ let entry = [key, value];
+ let siblings: [Field; TREE_DEPTH] = [0; TREE_DEPTH];
+ let zero_node = 0;
+ assert(add(entry, zero_node, siblings) == 7842913321420301106140788486336995496832503825951977327575501561489697540557);
+}
+
+#[test]
+fn test_add_element_to_one_element_tree() {
+ let key = 8459688297517826598613412977307486050019239051864711035321718508109192087854;
+ let value = 8510347201346963732943571140849185725417245763047403804445415726302354045170;
+ let entry = [key, value];
+ let old_root = 7842913321420301106140788486336995496832503825951977327575501561489697540557;
+ let mut siblings: [Field; TREE_DEPTH] = [0; TREE_DEPTH];
+ siblings[255] = 7842913321420301106140788486336995496832503825951977327575501561489697540557;
+ assert(add(entry, old_root, siblings) == 6309163561753770186763792861087421800063032915545949912480764922611421686766);
+}
+
+#[test]
+fn test_add_element_to_existing_tree() {
+ let key = 8459688297517826598613412977307486050019239051864711035321718508109192087854;
+ let value = 8510347201346963732943571140849185725417245763047403804445415726302354045170;
+ let entry = [key, value];
+ let small_tree_root = 3532809757480436997969526334543526996242857122876262144596246439822675654128;
+ let mut siblings: [Field; TREE_DEPTH] = [0; TREE_DEPTH];
+ siblings[253] = 18126944477260144816572365299295230808286197301459941187567621915186392922196;
+ siblings[254] = 14443001516360873457302534246953033880503978184674311810335857314606403404583;
+ siblings[255] = 7842913321420301106140788486336995496832503825951977327575501561489697540557;
+ let big_tree_root = 4657474665007910823901096287220097081233671466281873230928277896829046731272;
+ assert(add(entry, small_tree_root, siblings) == big_tree_root);
+}
+
+#[test]
+fn test_delete() {
+ let key = 8459688297517826598613412977307486050019239051864711035321718508109192087854;
+ let value = 8510347201346963732943571140849185725417245763047403804445415726302354045170;
+ let entry = [key, value];
+ let big_tree_root = 4657474665007910823901096287220097081233671466281873230928277896829046731272;
+ let mut siblings: [Field; TREE_DEPTH] = [0; TREE_DEPTH];
+ siblings[253] = 18126944477260144816572365299295230808286197301459941187567621915186392922196;
+ siblings[254] = 14443001516360873457302534246953033880503978184674311810335857314606403404583;
+ siblings[255] = 7842913321420301106140788486336995496832503825951977327575501561489697540557;
+ let small_tree_root = 3532809757480436997969526334543526996242857122876262144596246439822675654128;
+ assert(delete(entry, big_tree_root, siblings) == small_tree_root);
+}
+
+#[test]
+fn test_update() {
+ let key = 8459688297517826598613412977307486050019239051864711035321718508109192087854;
+ let old_value = 8510347201346963732943571140849185725417245763047403804445415726302354045169;
+ let new_value = 8510347201346963732943571140849185725417245763047403804445415726302354045170;
+ let old_entry = [key, old_value];
+ let old_root = 4202917944688591919039016743999516589372052081571553696755434379850460220435;
+ let mut siblings: [Field; TREE_DEPTH] = [0; TREE_DEPTH];
+ siblings[253] = 18126944477260144816572365299295230808286197301459941187567621915186392922196;
+ siblings[254] = 14443001516360873457302534246953033880503978184674311810335857314606403404583;
+ siblings[255] = 7842913321420301106140788486336995496832503825951977327575501561489697540557;
+ let big_tree_root = 4657474665007910823901096287220097081233671466281873230928277896829046731272;
+ assert(update(new_value, old_entry, old_root, siblings) == big_tree_root);
+}
\ No newline at end of file
diff --git a/packages/circuits/noir/crates/smt_bn254/src/utils.nr b/packages/circuits/noir/crates/smt_bn254/src/utils.nr
new file mode 100644
index 000000000..c9e5c48e8
--- /dev/null
+++ b/packages/circuits/noir/crates/smt_bn254/src/utils.nr
@@ -0,0 +1,118 @@
+use dep::std::hash::poseidon;
+use crate::TREE_DEPTH;
+
+/*
+ * Transforms the key into into a big endian array of bits so that when determining the position
+ * of a tree entry starting from the root node, the first array element to look at is the last.
+ * @param key The key of a tree entry
+ * @returns The path that determines the position of a key in the tree
+ */
+pub fn key_to_path(key: Field) -> [u1] {
+ key.to_be_bits(TREE_DEPTH)
+}
+
+/*
+ * Calculates the poseidon bn254 hash. If a leaf node is created, the number 1 is appended to
+ * the hashed values as follows: H(k,v,1).
+ * @param left The left element of the hashing pair
+ * @param right The right element of the hashing pair
+ * @param is_leaf Whether what is created is a leaf node or not
+ * @returns The poseidon hash
+ */
+pub fn hash(left: Field, right: Field, is_leaf: bool) -> Field {
+ if (is_leaf) {
+ poseidon::bn254::hash_3([left, right, 1])
+ } else {
+ poseidon::bn254::hash_2([left, right])
+ }
+}
+
+
+/*
+ * Calculates the root for a given tree entry based on the passed array of siblings and the passed path.
+ * @param entry The key and value of an entry [k, v]
+ * @param siblings Contains the siblings from bottom to top
+ * @param path The position of the entry in the tree as represented by bits from bottom to top
+ * @returns The calculated root node
+ */
+pub fn calculcate_root(entry: [Field; 2], siblings: [Field; TREE_DEPTH], path: [u1]) -> Field {
+ // serves as container for hashes and is initialized to be the leaf node
+ let mut node = hash(entry[0], entry[1], true);
+ // iterates over the list of siblings until the first sibling is found
+ // arbitrarily assigns the sibling to be the left and the node to be the
+ // right element of the hashing pair unless the path indicates the opposite
+ // order in which case the order is changed. The new hash is stored in the container
+ // until the root node is reached and returned.
+ for i in 0..TREE_DEPTH {
+ let sibling = siblings[i];
+ if sibling != 0 {
+ let mut left = sibling;
+ let mut right = node;
+ if path[i] == 0 {
+ left = node;
+ right = sibling;
+ }
+ node = hash(left, right, false);
+ }
+ }
+ node
+}
+
+/*
+ * Calculates two roots for a given leaf entry based on the passed array of siblings: one root
+ * for if the leaf entry was included in the tree and one for if the leaf entry was not included
+ * in the tree. This is useful for efficiently proving the membership of leaf entries for a
+ * tree while simultaneously modifiying the tree.
+ * @param entry The key and value of an entry [k, v]
+ * @param siblings Contains the siblings from bottom to top
+ * @returns Two root nodes: the first one doesn't include entry, the second does
+ */
+pub fn calculate_two_roots(entry: [Field; 2], siblings: [Field; TREE_DEPTH]) -> (Field, Field) {
+ let path = key_to_path(entry[0]);
+ // long_path_node is a container for hashes to derive the root node for the tree that
+ // includes the entry
+ let mut long_path_node = hash(entry[0], entry[1], true);
+ // long_path_node is a container for hashes to derive the root node for the tree that
+ // doesn't include the entry
+ let mut short_path_node: Field = 0;
+ // iterate over the levels of the tree from bottom to top
+ for i in 0..TREE_DEPTH {
+ let sibling = siblings[i];
+ // After the first sibling is found, the processes are started to calculate the two root nodes.
+ // The calulcation of the root node that includes the entry is comparable to `calculate_root`.
+ // To calc the root node that doesn't include entry, the first sibling is put into the container
+ // and starting from each SUBSEQUENT iteration it is hashed with its sibling and the resulting hash
+ // again stored in the container until the root is reached
+ if sibling != 0 {
+ if siblings[i - 1] == 0 {
+ short_path_node = siblings[i];
+ }
+ if path[i] == 0 {
+ long_path_node = hash(long_path_node, sibling, false);
+ if(short_path_node != sibling) {
+ short_path_node = hash(short_path_node, sibling, false);
+ }
+ } else {
+ long_path_node = hash(sibling, long_path_node, false);
+ if(short_path_node != sibling) {
+ short_path_node = hash(sibling, short_path_node, false);
+ }
+ }
+ }
+ }
+ (short_path_node, long_path_node)
+}
+
+#[test]
+fn test_hash_leaf_node() {
+ let key = 20438969296305830531522370305156029982566273432331621236661483041446048135547;
+ let value = 17150136040889237739751319962368206600863150289695545292530539263327413090784;
+ assert(hash(key, value, true) == 7842913321420301106140788486336995496832503825951977327575501561489697540557);
+}
+
+#[test]
+fn test_hash_node() {
+ let left = 7901139023013500965671892970738327280683439536483910503527659926438417204955;
+ let right = 7842913321420301106140788486336995496832503825951977327575501561489697540557;
+ assert(hash(left, right, false) == 4657474665007910823901096287220097081233671466281873230928277896829046731272);
+}
diff --git a/packages/circuits/package.json b/packages/circuits/package.json
index 6e0f284c9..614f836d5 100644
--- a/packages/circuits/package.json
+++ b/packages/circuits/package.json
@@ -7,7 +7,6 @@
"circom/**/*.circom",
"!circom/main",
"!circom/test",
- "noir",
"LICENSE",
"README.md"
],
@@ -16,7 +15,7 @@
"scripts": {
"circom:compile": "circomkit compile",
"circom:setup": "circomkit setup",
- "test": "mocha"
+ "test": "mocha && cd noir && nargo test"
},
"dependencies": {
"circomlib": "^2.0.5"
diff --git a/packages/eddsa-poseidon/LICENSE b/packages/eddsa-poseidon/LICENSE
new file mode 100644
index 000000000..4377091ec
--- /dev/null
+++ b/packages/eddsa-poseidon/LICENSE
@@ -0,0 +1,21 @@
+MIT License
+
+Copyright (c) 2023 Ethereum Foundation
+
+Permission is hereby granted, free of charge, to any person obtaining a copy
+of this software and associated documentation files (the "Software"), to deal
+in the Software without restriction, including without limitation the rights
+to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+copies of the Software, and to permit persons to whom the Software is
+furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in all
+copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+SOFTWARE.
diff --git a/packages/eddsa-poseidon/README.md b/packages/eddsa-poseidon/README.md
new file mode 100644
index 000000000..c18b6d8f1
--- /dev/null
+++ b/packages/eddsa-poseidon/README.md
@@ -0,0 +1,144 @@
+
+
+ EdDSA Poseidon
+
+ A JavaScript EdDSA library for secure signing and verification using Poseidon and the Baby Jubjub elliptic curve.
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+| This package offers a simplified JavaScript codebase essential for creating and validating digital signatures using EdDSA and Poseidon. It's built upon the Baby Jubjub elliptic curve, ensuring seamless integration with [Circom](https://github.com/iden3/circom) and enhancing the developer experience. |
+| ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------ |
+
+- Super lightweight: [**~33kB**](https://bundlephobia.com/package/@zk-kit/eddsa-poseidon) (minified)
+- Compatible with browsers and NodeJS
+- TS type support
+- Comprehensive code [documentation](https://zkkit.pse.dev/modules/_zk_kit_eddsa_poseidon.html)
+- Full test coverage
+
+👾 Would you like to try it now? Explore it now on [Ceditor](https://ceditor.cedoor.dev/52787e4ad57d2f2076648d509efc3448)!
+
+> [!WARNING]
+> This library has **not** been audited.
+
+## 🛠 Install
+
+### npm or yarn
+
+Install the `@zk-kit/eddsa-poseidon` package and its peer dependencies with npm:
+
+```bash
+npm i @zk-kit/eddsa-poseidon
+```
+
+or yarn:
+
+```bash
+yarn add @zk-kit/eddsa-poseidon
+```
+
+### CDN
+
+You can also load it using a `script` tag using [unpkg](https://unpkg.com/):
+
+```html
+
+```
+
+or [JSDelivr](https://www.jsdelivr.com/):
+
+```html
+
+```
+
+## 📜 Usage
+
+\# **derivePublicKey**(privateKey: _BigNumberish_): _Point\_
+
+```typescript
+import { derivePublicKey } from "@zk-kit/eddsa-poseidon"
+
+const privateKey = "secret"
+const publicKey = derivePublicKey(privateKey)
+
+console.log(publicKey)
+/*
+[
+ '17191193026255111087474416516591393721975640005415762645730433950079177536248',
+ '13751717961795090314625781035919035073474308127816403910435238282697898234143'
+]
+*/
+```
+
+\# **signMessage**(privateKey: _BigNumberish_, message: _BigNumberish_): _Signature\_
+
+```typescript
+import { derivePublicKey, signMessage } from "@zk-kit/eddsa-poseidon"
+
+const privateKey = "secret"
+const publicKey = derivePublicKey(privateKey)
+
+const message = "message"
+const signature = signMessage(privateKey, message)
+
+console.log(signature)
+/*
+{
+ R8: [
+ '12949573675545142400102669657964360005184873166024880859462384824349649539693',
+ '18253636630408169174294927826710424418689461166073329946402765380454102840608'
+ ],
+ S: '701803947557694254685424075312408605924670918868054593580245088593184746870'
+}
+*/
+```
+
+\# **verifySignature**(message: _BigNumberish_, signature: _Signature_, publicKey: _Point_): _boolean_
+
+```typescript
+import { derivePublicKey, signMessage, verifySignature } from "@zk-kit/eddsa-poseidon"
+
+const privateKey = "secret"
+const publicKey = derivePublicKey(privateKey)
+
+const message = "message"
+const signature = signMessage(privateKey, message)
+
+const response = verifySignature(message, signature, publicKey)
+
+console.log(response) // true
+```
diff --git a/packages/eddsa-poseidon/build.tsconfig.json b/packages/eddsa-poseidon/build.tsconfig.json
new file mode 100644
index 000000000..2d4a1d6da
--- /dev/null
+++ b/packages/eddsa-poseidon/build.tsconfig.json
@@ -0,0 +1,8 @@
+{
+ "extends": "../../tsconfig.json",
+ "compilerOptions": {
+ "baseUrl": ".",
+ "declarationDir": "dist/types"
+ },
+ "include": ["src"]
+}
diff --git a/packages/eddsa-poseidon/package.json b/packages/eddsa-poseidon/package.json
new file mode 100644
index 000000000..65ec53e8b
--- /dev/null
+++ b/packages/eddsa-poseidon/package.json
@@ -0,0 +1,46 @@
+{
+ "name": "@zk-kit/eddsa-poseidon",
+ "version": "0.2.0",
+ "description": "A JavaScript EdDSA library for secure signing and verification using Poseidon the Baby Jubjub elliptic curve.",
+ "license": "MIT",
+ "iife": "dist/index.js",
+ "unpkg": "dist/index.min.js",
+ "jsdelivr": "dist/index.min.js",
+ "main": "dist/index.node.js",
+ "exports": {
+ "import": "./dist/index.mjs",
+ "require": "./dist/index.node.js",
+ "types": "./dist/types/index.d.ts"
+ },
+ "types": "dist/types/index.d.ts",
+ "files": [
+ "dist/",
+ "src/",
+ "LICENSE",
+ "README.md"
+ ],
+ "repository": "https://github.com/privacy-scaling-explorations/zk-kit",
+ "homepage": "https://github.com/privacy-scaling-explorations/zk-kit/tree/main/packages/eddsa-poseidon",
+ "bugs": {
+ "url": "https://github.com/privacy-scaling-explorations/zk-kit.git/issues"
+ },
+ "scripts": {
+ "build": "rimraf dist && rollup -c rollup.config.ts --configPlugin typescript && yarn build:iife",
+ "build:iife": "rollup -c rollup.iife.config.ts --configPlugin typescript",
+ "prepublishOnly": "yarn build"
+ },
+ "publishConfig": {
+ "access": "public"
+ },
+ "devDependencies": {
+ "@rollup/plugin-commonjs": "^25.0.7",
+ "@rollup/plugin-node-resolve": "^15.2.3",
+ "blake-hash": "2.0.0",
+ "circomlibjs": "0.0.8",
+ "poseidon-lite": "0.2.0",
+ "rollup-plugin-cleanup": "^3.2.1",
+ "rollup-plugin-polyfill-node": "^0.13.0",
+ "rollup-plugin-terser": "^7.0.2",
+ "rollup-plugin-typescript2": "^0.31.2"
+ }
+}
diff --git a/packages/eddsa-poseidon/rollup.config.ts b/packages/eddsa-poseidon/rollup.config.ts
new file mode 100644
index 000000000..7d2994bd6
--- /dev/null
+++ b/packages/eddsa-poseidon/rollup.config.ts
@@ -0,0 +1,32 @@
+import commonjs from "@rollup/plugin-commonjs"
+import { nodeResolve } from "@rollup/plugin-node-resolve"
+import fs from "fs"
+import cleanup from "rollup-plugin-cleanup"
+import typescript from "rollup-plugin-typescript2"
+
+const pkg = JSON.parse(fs.readFileSync("./package.json", "utf8"))
+const banner = `/**
+ * @module ${pkg.name}
+ * @version ${pkg.version}
+ * @file ${pkg.description}
+ * @copyright Ethereum Foundation ${new Date().getFullYear()}
+ * @license ${pkg.license}
+ * @see [Github]{@link ${pkg.homepage}}
+*/`
+
+export default {
+ input: "src/index.ts",
+ output: [
+ { file: pkg.exports.require, format: "cjs", banner },
+ { file: pkg.exports.import, format: "es", banner }
+ ],
+ external: [],
+ plugins: [
+ typescript({ tsconfig: "./build.tsconfig.json", useTsconfigDeclarationDir: true }),
+ commonjs(),
+ nodeResolve({
+ preferBuiltins: true
+ }),
+ cleanup({ comments: "jsdoc" })
+ ]
+}
diff --git a/packages/eddsa-poseidon/rollup.iife.config.ts b/packages/eddsa-poseidon/rollup.iife.config.ts
new file mode 100644
index 000000000..f7936620a
--- /dev/null
+++ b/packages/eddsa-poseidon/rollup.iife.config.ts
@@ -0,0 +1,47 @@
+import commonjs from "@rollup/plugin-commonjs"
+import { nodeResolve } from "@rollup/plugin-node-resolve"
+import fs from "fs"
+import nodePolyfills from "rollup-plugin-polyfill-node"
+import cleanup from "rollup-plugin-cleanup"
+import { terser } from "rollup-plugin-terser"
+import typescript from "rollup-plugin-typescript2"
+
+const pkg = JSON.parse(fs.readFileSync("./package.json", "utf8"))
+const banner = `/**
+ * @module ${pkg.name}
+ * @version ${pkg.version}
+ * @file ${pkg.description}
+ * @copyright Ethereum Foundation ${new Date().getFullYear()}
+ * @license ${pkg.license}
+ * @see [Github]{@link ${pkg.homepage}}
+*/`
+
+const name = pkg.name.split("/")[1].replace(/[-/]./g, (x: string) => x.toUpperCase()[1])
+
+export default {
+ input: "src/index.ts",
+ output: [
+ {
+ file: pkg.iife,
+ name,
+ format: "iife",
+ banner
+ },
+ {
+ file: pkg.unpkg,
+ name,
+ format: "iife",
+ plugins: [terser({ output: { preamble: banner } })]
+ }
+ ],
+ external: [],
+ plugins: [
+ typescript({ tsconfig: "./build.tsconfig.json", useTsconfigDeclarationDir: true }),
+ commonjs(),
+ nodeResolve({
+ preferBuiltins: true
+ }),
+ nodePolyfills({ include: null }),
+ cleanup({ comments: "jsdoc" })
+ ]
+}
diff --git a/packages/eddsa-poseidon/src/babyjub.ts b/packages/eddsa-poseidon/src/babyjub.ts
new file mode 100644
index 000000000..749fb3ee5
--- /dev/null
+++ b/packages/eddsa-poseidon/src/babyjub.ts
@@ -0,0 +1,96 @@
+import Field from "./field"
+import * as scalar from "./scalar"
+import { Point } from "./types"
+
+// Spec: https://eips.ethereum.org/EIPS/eip-2494
+
+// 'r' is the alt_bn128 prime order.
+export const r = BigInt("21888242871839275222246405745257275088548364400416034343698204186575808495617")
+
+// 'F' (F_r) is the prime finite field with r elements.
+export const Fr = new Field(r)
+
+// Base8 is the base point used to generate other points on the curve.
+export const Base8: Point = [
+ Fr.e(BigInt("5299619240641551281634865583518297030282874472190772894086521144482721001553")),
+ Fr.e(BigInt("16950150798460657717958625567821834550301663161624707787222815936182638968203"))
+]
+
+// Let E be the twisted Edwards elliptic curve defined over 'F_r'
+// described by the equation 'ax^2 + y^2 = 1 + dx^2y^2'.
+
+// 'a' and 'd' are the parameters of the equation:
+const a = Fr.e(BigInt("168700"))
+const d = Fr.e(BigInt("168696"))
+
+// We call Baby Jubjub the curve 'E(F_r)', that is, the subgroup of 'F_r'-rational points of 'E'.
+
+// 'order' is order of the elliptic curve 'E'.
+export const order = BigInt("21888242871839275222246405745257275088614511777268538073601725287587578984328")
+export const subOrder = scalar.shiftRight(order, BigInt(3))
+
+/**
+ * Performs point addition on the Baby Jubjub elliptic curve,
+ * calculating a third point from two given points.
+ * Let P1 = (x1, y1) and P2 = (x2, y2) be two arbitrary points of the curve.
+ * Then P1 + P2 = (x3, y3) is calculated in the following way:
+ * x3 = (x1*y2 + y1*x2)/(1 + d*x1*x2*y1*y2)
+ * y3 = (y1*y2 - a*x1*x2)/(1 - d*x1*x2*y1*y2)
+ * @param p1 - First point on the curve.
+ * @param p2 - Second point on the curve.
+ * @returns Resultant third point on the curve.
+ */
+export function addPoint(p1: Point, p2: Point): Point {
+ // beta = x1*y2
+ const beta = Fr.mul(p1[0], p2[1])
+ // gamma = y1*x2
+ const gamma = Fr.mul(p1[1], p2[0])
+ // delta = (y1-(a*x1))*(x2+y2)
+ const delta = Fr.mul(Fr.sub(p1[1], Fr.mul(a, p1[0])), Fr.add(p2[0], p2[1]))
+
+ // x1*x2*y1*y2
+ const tau = Fr.mul(beta, gamma)
+ // d*x1*x2*y1*y2
+ const dtau = Fr.mul(d, tau)
+
+ // x3 = (x1*y2 + y1*x2)/(1 + d*x1*x2*y1*y2)
+ const p3x = Fr.div(Fr.add(beta, gamma), Fr.add(Fr.one, dtau))
+ // y3 = (y1*y2 - a*x1*x2)/(1 - d*x1*x2*y1*y2)
+ const p3y = Fr.div(Fr.add(delta, Fr.sub(Fr.mul(a, beta), gamma)), Fr.sub(Fr.one, dtau))
+
+ return [p3x, p3y]
+}
+
+/**
+ * Performs a scalar multiplication by starting from the 'base' point and 'adding'
+ * it to itself 'e' times.
+ * @param base - The base point used as a starting point.
+ * @param e - A secret number representing the private key.
+ * @returns The resulting point representing the public key.
+ */
+export function mulPointEscalar(base: Point, e: bigint): Point {
+ let res: Point = [Fr.e(BigInt(0)), Fr.e(BigInt(1))]
+ let rem: bigint = e
+ let exp: Point = base
+
+ while (!scalar.isZero(rem)) {
+ if (scalar.isOdd(rem)) {
+ res = addPoint(res, exp)
+ }
+
+ exp = addPoint(exp, exp)
+ rem = scalar.shiftRight(rem, BigInt(1))
+ }
+
+ return res
+}
+
+export function inCurve(p: Point) {
+ p[0] = BigInt(p[0])
+ p[1] = BigInt(p[1])
+
+ const x2 = Fr.square(p[0])
+ const y2 = Fr.square(p[1])
+
+ return Fr.eq(Fr.add(Fr.mul(a, x2), y2), Fr.add(Fr.one, Fr.mul(Fr.mul(x2, y2), d)))
+}
diff --git a/packages/eddsa-poseidon/src/blake.ts b/packages/eddsa-poseidon/src/blake.ts
new file mode 100644
index 000000000..9b258ea8e
--- /dev/null
+++ b/packages/eddsa-poseidon/src/blake.ts
@@ -0,0 +1,10 @@
+// @ts-ignore
+import { Blake512 } from "blake-hash/lib"
+
+export default function hash(message: Buffer): Buffer {
+ const engine = new Blake512()
+
+ engine.update(message)
+
+ return engine.digest()
+}
diff --git a/packages/eddsa-poseidon/src/eddsa-poseidon.ts b/packages/eddsa-poseidon/src/eddsa-poseidon.ts
new file mode 100644
index 000000000..cc85c4625
--- /dev/null
+++ b/packages/eddsa-poseidon/src/eddsa-poseidon.ts
@@ -0,0 +1,108 @@
+import { poseidon5 } from "poseidon-lite/poseidon5"
+import * as babyjub from "./babyjub"
+import blake from "./blake"
+import Field from "./field"
+import * as scalar from "./scalar"
+import { BigNumberish, Point, Signature } from "./types"
+import * as utils from "./utils"
+
+/**
+ * Derives a public key from a given private key using the
+ * {@link https://eips.ethereum.org/EIPS/eip-2494|Baby Jubjub} elliptic curve.
+ * This function utilizes the Baby Jubjub elliptic curve for cryptographic operations.
+ * The private key should be securely stored and managed, and it should never be exposed
+ * or transmitted in an unsecured manner.
+ * @param privateKey - The private key used for generating the public key.
+ * @returns The derived public key.
+ */
+export function derivePublicKey(privateKey: BigNumberish): Point {
+ // Convert the private key to buffer.
+ privateKey = utils.checkPrivateKey(privateKey)
+
+ const hash = blake(privateKey)
+
+ const s = utils.leBuff2int(utils.pruneBuffer(hash.slice(0, 32)))
+
+ const publicKey = babyjub.mulPointEscalar(babyjub.Base8, scalar.shiftRight(s, BigInt(3)))
+
+ // Convert the public key values to strings so that it can easily be exported as a JSON.
+ return [publicKey[0].toString(), publicKey[1].toString()]
+}
+
+/**
+ * Signs a message using the provided private key, employing Poseidon hashing and
+ * EdDSA with the Baby Jubjub elliptic curve.
+ * @param privateKey - The private key used to sign the message.
+ * @param message - The message to be signed.
+ * @returns The signature object, containing properties relevant to EdDSA signatures, such as 'R8' and 'S' values.
+ */
+export function signMessage(privateKey: BigNumberish, message: BigNumberish): Signature {
+ // Convert the private key to buffer.
+ privateKey = utils.checkPrivateKey(privateKey)
+
+ // Convert the message to big integer.
+ message = utils.checkMessage(message)
+
+ const hash = blake(privateKey)
+
+ const sBuff = utils.pruneBuffer(hash.slice(0, 32))
+ const s = utils.leBuff2int(sBuff)
+ const A = babyjub.mulPointEscalar(babyjub.Base8, scalar.shiftRight(s, BigInt(3)))
+
+ const msgBuff = utils.leInt2Buff(message)
+
+ const rBuff = blake(Buffer.concat([hash.slice(32, 64), msgBuff]))
+
+ const Fr = new Field(babyjub.subOrder)
+ const r = Fr.e(utils.leBuff2int(rBuff))
+
+ const R8 = babyjub.mulPointEscalar(babyjub.Base8, r)
+ const hm = poseidon5([R8[0], R8[1], A[0], A[1], message])
+ const S = Fr.add(r, Fr.mul(hm, s))
+
+ // Convert the signature values to strings so that it can easily be exported as a JSON.
+ return {
+ R8: [R8[0].toString(), R8[1].toString()],
+ S: S.toString()
+ }
+}
+
+/**
+ * Verifies an EdDSA signature using the Baby Jubjub elliptic curve and Poseidon hash function.
+ * @param message - The original message that was be signed.
+ * @param signature - The EdDSA signature to be verified.
+ * @param publicKey - The public key associated with the private key used to sign the message.
+ * @returns Returns true if the signature is valid and corresponds to the message and public key, false otherwise.
+ */
+export function verifySignature(message: BigNumberish, signature: Signature, publicKey: Point): boolean {
+ if (
+ !utils.isPoint(publicKey) ||
+ !utils.isSignature(signature) ||
+ !babyjub.inCurve(signature.R8) ||
+ !babyjub.inCurve(publicKey) ||
+ BigInt(signature.S) >= babyjub.subOrder
+ ) {
+ return false
+ }
+
+ // Convert the message to big integer.
+ message = utils.checkMessage(message)
+
+ // Convert the signature values to big integers for calculations.
+ const _signature: Signature = {
+ R8: [BigInt(signature.R8[0]), BigInt(signature.R8[1])],
+ S: BigInt(signature.S)
+ }
+ // Convert the public key values to big integers for calculations.
+ const _publicKey: Point = [BigInt(publicKey[0]), BigInt(publicKey[1])]
+
+ const hm = poseidon5([signature.R8[0], signature.R8[1], publicKey[0], publicKey[1], message])
+
+ const pLeft = babyjub.mulPointEscalar(babyjub.Base8, BigInt(signature.S))
+ let pRight = babyjub.mulPointEscalar(_publicKey, scalar.mul(hm, BigInt(8)))
+
+ pRight = babyjub.addPoint(_signature.R8, pRight)
+
+ // Return true if the points match.
+ return babyjub.Fr.eq(BigInt(pLeft[0]), pRight[0]) && babyjub.Fr.eq(pLeft[1], pRight[1])
+}
diff --git a/packages/eddsa-poseidon/src/field.ts b/packages/eddsa-poseidon/src/field.ts
new file mode 100644
index 000000000..660bdcd74
--- /dev/null
+++ b/packages/eddsa-poseidon/src/field.ts
@@ -0,0 +1,59 @@
+export default class Field {
+ one = BigInt(1)
+ zero = BigInt(0)
+
+ _order: bigint
+
+ constructor(order: bigint) {
+ this._order = order
+ }
+
+ e(res: bigint): bigint {
+ return res >= this._order ? res % this._order : res
+ }
+
+ mul(a: bigint, b: bigint): bigint {
+ return (a * b) % this._order
+ }
+
+ sub(a: bigint, b: bigint): bigint {
+ return a >= b ? a - b : this._order - b + a
+ }
+
+ add(a: bigint, b: bigint): bigint {
+ const res = a + b
+
+ return res >= this._order ? res - this._order : res
+ }
+
+ inv(a: bigint): bigint {
+ let t = this.zero
+ let r = this._order
+ let newt = this.one
+ let newr = a % this._order
+
+ while (newr) {
+ const q = r / newr
+ ;[t, newt] = [newt, t - q * newt]
+ ;[r, newr] = [newr, r - q * newr]
+ }
+
+ if (t < this.zero) {
+ t += this._order
+ }
+
+ return t
+ }
+
+ div(a: bigint, b: bigint): bigint {
+ return this.mul(a, this.inv(b))
+ }
+
+ eq(a: bigint, b: bigint): boolean {
+ return a === b
+ }
+
+ square(a: bigint): bigint {
+ return (a * a) % this._order
+ }
+}
diff --git a/packages/eddsa-poseidon/src/index.ts b/packages/eddsa-poseidon/src/index.ts
new file mode 100644
index 000000000..1ce366761
--- /dev/null
+++ b/packages/eddsa-poseidon/src/index.ts
@@ -0,0 +1,2 @@
+export * from "./eddsa-poseidon"
+export * from "./types"
diff --git a/packages/eddsa-poseidon/src/scalar.ts b/packages/eddsa-poseidon/src/scalar.ts
new file mode 100644
index 000000000..e77d7f02d
--- /dev/null
+++ b/packages/eddsa-poseidon/src/scalar.ts
@@ -0,0 +1,15 @@
+export function isZero(a: bigint): boolean {
+ return !a
+}
+
+export function isOdd(a: bigint): boolean {
+ return (a & BigInt(1)) === BigInt(1)
+}
+
+export function shiftRight(a: bigint, n: bigint): bigint {
+ return a >> n
+}
+
+export function mul(a: bigint, b: bigint): bigint {
+ return a * b
+}
diff --git a/packages/eddsa-poseidon/src/types/index.ts b/packages/eddsa-poseidon/src/types/index.ts
new file mode 100644
index 000000000..eefb42be3
--- /dev/null
+++ b/packages/eddsa-poseidon/src/types/index.ts
@@ -0,0 +1,10 @@
+export type BigNumber = bigint | string
+
+export type BigNumberish = BigNumber | number | Buffer
+
+export type Point = [N, N]
+
+export type Signature = {
+ R8: Point
+ S: N
+}
diff --git a/packages/eddsa-poseidon/src/utils.ts b/packages/eddsa-poseidon/src/utils.ts
new file mode 100644
index 000000000..9e652ecc9
--- /dev/null
+++ b/packages/eddsa-poseidon/src/utils.ts
@@ -0,0 +1,128 @@
+import { BigNumber, BigNumberish, Point, Signature } from "./types"
+
+export function pruneBuffer(buff: Buffer): Buffer {
+ buff[0] &= 0xf8
+ buff[31] &= 0x7f
+ buff[31] |= 0x40
+
+ return buff
+}
+
+function isStringifiedBigint(s: BigNumber | string): boolean {
+ try {
+ BigInt(s)
+
+ return true
+ } catch (e) {
+ return false
+ }
+}
+
+export function isHexadecimal(s: string) {
+ return /^(0x|0X)[0-9a-fA-F]+$/.test(s)
+}
+
+export function isBigNumberish(value: BigNumberish): boolean {
+ return (
+ typeof value === "number" ||
+ typeof value === "bigint" ||
+ (typeof value === "string" && isStringifiedBigint(value)) ||
+ (typeof value === "string" && isHexadecimal(value)) ||
+ Buffer.isBuffer(value)
+ )
+}
+
+export function isPoint(point: Point): boolean {
+ return Array.isArray(point) && point.length === 2 && isStringifiedBigint(point[0]) && isStringifiedBigint(point[1])
+}
+
+export function isSignature(signature: Signature): boolean {
+ return (
+ typeof signature === "object" &&
+ Object.prototype.hasOwnProperty.call(signature, "R8") &&
+ Object.prototype.hasOwnProperty.call(signature, "S") &&
+ isPoint(signature.R8) &&
+ isStringifiedBigint(signature.S)
+ )
+}
+
+export function int2hex(n: bigint) {
+ let hex = n.toString(16)
+
+ // Ensure even length.
+ if (hex.length % 2 !== 0) {
+ hex = `0${hex}`
+ }
+
+ return hex
+}
+
+export function bigNumberish2Buff(value: BigNumberish): Buffer {
+ if (
+ typeof value === "number" ||
+ typeof value === "bigint" ||
+ (typeof value === "string" && isStringifiedBigint(value))
+ ) {
+ const hex = int2hex(BigInt(value))
+
+ return Buffer.from(hex, "hex")
+ }
+
+ return value as Buffer
+}
+
+export function buff2int(buffer: Buffer): bigint {
+ return BigInt(`0x${buffer.toString("hex")}`)
+}
+
+export function bigNumberish2BigNumber(value: BigNumberish): bigint {
+ if (
+ typeof value === "number" ||
+ typeof value === "bigint" ||
+ (typeof value === "string" && isStringifiedBigint(value)) ||
+ (typeof value === "string" && isHexadecimal(value))
+ ) {
+ return BigInt(value)
+ }
+
+ return buff2int(value as Buffer)
+}
+
+export function leBuff2int(buffer: Buffer): bigint {
+ return BigInt(`0x${buffer.reverse().toString("hex")}`)
+}
+
+export function leInt2Buff(n: bigint): Buffer {
+ const hex = int2hex(n)
+
+ // Allocate buffer of the desired size, filled with zeros.
+ const buffer = Buffer.alloc(32, 0)
+
+ Buffer.from(hex, "hex").reverse().copy(buffer)
+
+ return buffer
+}
+
+export function checkPrivateKey(privateKey: BigNumberish): Buffer {
+ if (isBigNumberish(privateKey)) {
+ return bigNumberish2Buff(privateKey)
+ }
+
+ if (typeof privateKey !== "string") {
+ throw TypeError("Invalid private key type. Supported types: number, bigint, buffer, string.")
+ }
+
+ return Buffer.from(privateKey)
+}
+
+export function checkMessage(message: BigNumberish): bigint {
+ if (isBigNumberish(message)) {
+ return bigNumberish2BigNumber(message)
+ }
+
+ if (typeof message !== "string") {
+ throw TypeError("Invalid message type. Supported types: number, bigint, buffer, string.")
+ }
+
+ return buff2int(Buffer.from(message))
+}
diff --git a/packages/eddsa-poseidon/tests/index.test.ts b/packages/eddsa-poseidon/tests/index.test.ts
new file mode 100644
index 000000000..ccd178705
--- /dev/null
+++ b/packages/eddsa-poseidon/tests/index.test.ts
@@ -0,0 +1,200 @@
+import { eddsa } from "circomlibjs"
+import crypto from "crypto"
+import { derivePublicKey, signMessage, verifySignature } from "../src"
+
+describe("EdDSAPoseidon", () => {
+ const privateKey = "secret"
+ const message = BigInt(2)
+
+ it("Should derive a public key from a private key (string)", async () => {
+ const publicKey = derivePublicKey(privateKey)
+
+ const circomlibPublicKey = eddsa.prv2pub(privateKey)
+
+ expect(publicKey[0]).toBe(circomlibPublicKey[0].toString())
+ expect(publicKey[1]).toBe(circomlibPublicKey[1].toString())
+ })
+
+ it("Should derive a public key from a private key (hexadecimal)", async () => {
+ const privateKey = "0x12"
+
+ const publicKey = derivePublicKey(privateKey)
+
+ const circomlibPublicKey = eddsa.prv2pub(Buffer.from(privateKey.slice(2), "hex"))
+
+ expect(publicKey[0]).toBe(circomlibPublicKey[0].toString())
+ expect(publicKey[1]).toBe(circomlibPublicKey[1].toString())
+ })
+
+ it("Should derive a public key from a private key (buffer)", async () => {
+ const privateKey = Buffer.from("secret")
+
+ const publicKey = derivePublicKey(privateKey)
+
+ const circomlibPublicKey = eddsa.prv2pub(privateKey)
+
+ expect(publicKey[0]).toBe(circomlibPublicKey[0].toString())
+ expect(publicKey[1]).toBe(circomlibPublicKey[1].toString())
+ })
+
+ it("Should derive a public key from a private key (bigint)", async () => {
+ const privateKey = BigInt(22)
+
+ const publicKey = derivePublicKey(privateKey)
+
+ const circomlibPublicKey = eddsa.prv2pub(Buffer.from(privateKey.toString(16), "hex"))
+
+ expect(publicKey[0]).toBe(circomlibPublicKey[0].toString())
+ expect(publicKey[1]).toBe(circomlibPublicKey[1].toString())
+ })
+
+ it("Should derive a public key from a private key (number)", async () => {
+ const privateKey = 22
+
+ const publicKey = derivePublicKey(privateKey)
+
+ const circomlibPublicKey = eddsa.prv2pub(Buffer.from(privateKey.toString(16), "hex"))
+
+ expect(publicKey[0]).toBe(circomlibPublicKey[0].toString())
+ expect(publicKey[1]).toBe(circomlibPublicKey[1].toString())
+ })
+
+ it("Should throw an error if the secret type is not supported", async () => {
+ const privateKey = true
+
+ const fun = () => derivePublicKey(privateKey as any)
+
+ expect(fun).toThrow("Invalid private key type.")
+ })
+
+ it("Should sign a message (bigint)", async () => {
+ const signature = signMessage(privateKey, message)
+
+ const circomlibSignature = eddsa.signPoseidon(privateKey, message)
+
+ expect(signature.R8[0]).toBe(circomlibSignature.R8[0].toString())
+ expect(signature.R8[1]).toBe(circomlibSignature.R8[1].toString())
+ expect(signature.S).toBe(circomlibSignature.S.toString())
+ })
+
+ it("Should sign a message (number)", async () => {
+ const message = 22
+
+ const signature = signMessage(privateKey, message)
+
+ const circomlibSignature = eddsa.signPoseidon(privateKey, BigInt(message))
+
+ expect(signature.R8[0]).toBe(circomlibSignature.R8[0].toString())
+ expect(signature.R8[1]).toBe(circomlibSignature.R8[1].toString())
+ expect(signature.S).toBe(circomlibSignature.S.toString())
+ })
+
+ it("Should sign a message (hexadecimal)", async () => {
+ const message = "0x12"
+
+ const signature = signMessage(privateKey, message)
+
+ const circomlibSignature = eddsa.signPoseidon(privateKey, BigInt(message))
+
+ expect(signature.R8[0]).toBe(circomlibSignature.R8[0].toString())
+ expect(signature.R8[1]).toBe(circomlibSignature.R8[1].toString())
+ expect(signature.S).toBe(circomlibSignature.S.toString())
+ })
+
+ it("Should sign a message (buffer)", async () => {
+ const message = Buffer.from("message")
+
+ const signature = signMessage(privateKey, message)
+
+ const circomlibSignature = eddsa.signPoseidon(privateKey, BigInt(`0x${message.toString("hex")}`))
+
+ expect(signature.R8[0]).toBe(circomlibSignature.R8[0].toString())
+ expect(signature.R8[1]).toBe(circomlibSignature.R8[1].toString())
+ expect(signature.S).toBe(circomlibSignature.S.toString())
+ })
+
+ it("Should sign a message (string)", async () => {
+ const message = "message"
+
+ const signature = signMessage(privateKey, message)
+
+ const circomlibSignature = eddsa.signPoseidon(privateKey, BigInt(`0x${Buffer.from(message).toString("hex")}`))
+
+ expect(signature.R8[0]).toBe(circomlibSignature.R8[0].toString())
+ expect(signature.R8[1]).toBe(circomlibSignature.R8[1].toString())
+ expect(signature.S).toBe(circomlibSignature.S.toString())
+ })
+
+ it("Should throw an error if the message type is not supported", async () => {
+ const message = true
+
+ const fun = () => signMessage(privateKey, message as any)
+
+ expect(fun).toThrow("Invalid message type.")
+ })
+
+ it("Should verify a signature", async () => {
+ const publicKey = derivePublicKey(privateKey)
+ const signature = signMessage(privateKey, message)
+
+ expect(verifySignature(message, signature, publicKey)).toBeTruthy()
+ })
+
+ it("Should not verify a signature if the public key is malformed", async () => {
+ const publicKey = derivePublicKey(privateKey)
+ const signature = signMessage(privateKey, message)
+
+ publicKey[1] = 3 as any
+
+ expect(verifySignature(message, signature, publicKey)).toBeFalsy()
+ })
+
+ it("Should not verify a signature if the signature is malformed", async () => {
+ const publicKey = derivePublicKey(privateKey)
+ const signature = signMessage(privateKey, message)
+
+ signature.S = 3 as any
+
+ expect(verifySignature(message, signature, publicKey)).toBeFalsy()
+ })
+
+ it("Should not verify a signature if the signature is not on the curve", async () => {
+ const publicKey = derivePublicKey(privateKey)
+ const signature = signMessage(privateKey, message)
+
+ signature.R8[1] = BigInt(3).toString()
+
+ expect(verifySignature(message, signature, publicKey)).toBeFalsy()
+ })
+
+ it("Should not verify a signature if the public key is not on the curve", async () => {
+ const publicKey = derivePublicKey(privateKey)
+ const signature = signMessage(privateKey, message)
+
+ publicKey[1] = BigInt(3).toString()
+
+ expect(verifySignature(message, signature, publicKey)).toBeFalsy()
+ })
+
+ it("Should not verify a signature S value exceeds the predefined sub order", async () => {
+ const publicKey = derivePublicKey(privateKey)
+ const signature = signMessage(privateKey, message)
+
+ signature.S = "3421888242871839275222246405745257275088614511777268538073601725287587578984328"
+
+ expect(verifySignature(message, signature, publicKey)).toBeFalsy()
+ })
+
+ it("Should derive a public key from N random private keys", async () => {
+ for (let i = 0, len = 10; i < len; i += 1) {
+ const privateKey = crypto.randomBytes(32)
+
+ const publicKey = derivePublicKey(privateKey)
+
+ const circomlibPublicKey = eddsa.prv2pub(privateKey)
+
+ expect(publicKey[0]).toBe(circomlibPublicKey[0].toString())
+ expect(publicKey[1]).toBe(circomlibPublicKey[1].toString())
+ }
+ })
+})
diff --git a/packages/eddsa-poseidon/tsconfig.json b/packages/eddsa-poseidon/tsconfig.json
new file mode 100644
index 000000000..81e592a16
--- /dev/null
+++ b/packages/eddsa-poseidon/tsconfig.json
@@ -0,0 +1,4 @@
+{
+ "extends": "../../tsconfig.json",
+ "include": ["src", "tests", "rollup.config.ts", "rollup.iife.config.ts"]
+}
diff --git a/packages/eddsa-poseidon/typedoc.json b/packages/eddsa-poseidon/typedoc.json
new file mode 100644
index 000000000..77a471c91
--- /dev/null
+++ b/packages/eddsa-poseidon/typedoc.json
@@ -0,0 +1,3 @@
+{
+ "entryPoints": ["src/index.ts"]
+}
diff --git a/packages/imt.sol/contracts/internal/InternalBinaryIMT.sol b/packages/imt.sol/contracts/internal/InternalBinaryIMT.sol
index 3588aec2d..273f89554 100644
--- a/packages/imt.sol/contracts/internal/InternalBinaryIMT.sol
+++ b/packages/imt.sol/contracts/internal/InternalBinaryIMT.sol
@@ -16,6 +16,15 @@ struct BinaryIMTData {
bool useDefaultZeroes;
}
+error ValueGreaterThanSnarkScalarField();
+error DepthNotSupported();
+error WrongDefaultZeroIndex();
+error TreeIsFull();
+error NewLeafCannotEqualOldLeaf();
+error LeafDoesNotExist();
+error LeafIndexOutOfRange();
+error WrongMerkleProofPath();
+
/// @title Incremental binary Merkle tree.
/// @dev The incremental tree allows to calculate the root hash each time a leaf is added, ensuring
/// the integrity of the tree.
@@ -88,7 +97,7 @@ library InternalBinaryIMT {
if (index == 30) return Z_30;
if (index == 31) return Z_31;
if (index == 32) return Z_32;
- revert("IncrementalBinaryTree: defaultZero bad index");
+ revert WrongDefaultZeroIndex();
}
/// @dev Initializes a tree.
@@ -96,8 +105,11 @@ library InternalBinaryIMT {
/// @param depth: Depth of the tree.
/// @param zero: Zero value to be used.
function _init(BinaryIMTData storage self, uint256 depth, uint256 zero) internal {
- require(zero < SNARK_SCALAR_FIELD, "BinaryIMT: leaf must be < SNARK_SCALAR_FIELD");
- require(depth > 0 && depth <= MAX_DEPTH, "BinaryIMT: tree depth must be between 1 and 32");
+ if (zero >= SNARK_SCALAR_FIELD) {
+ revert ValueGreaterThanSnarkScalarField();
+ } else if (depth <= 0 || depth > MAX_DEPTH) {
+ revert DepthNotSupported();
+ }
self.depth = depth;
@@ -114,7 +126,9 @@ library InternalBinaryIMT {
}
function _initWithDefaultZeroes(BinaryIMTData storage self, uint256 depth) internal {
- require(depth > 0 && depth <= MAX_DEPTH, "BinaryIMT: tree depth must be between 1 and 32");
+ if (depth <= 0 || depth > MAX_DEPTH) {
+ revert DepthNotSupported();
+ }
self.depth = depth;
self.useDefaultZeroes = true;
@@ -128,8 +142,11 @@ library InternalBinaryIMT {
function _insert(BinaryIMTData storage self, uint256 leaf) internal returns (uint256) {
uint256 depth = self.depth;
- require(leaf < SNARK_SCALAR_FIELD, "BinaryIMT: leaf must be < SNARK_SCALAR_FIELD");
- require(self.numberOfLeaves < 2 ** depth, "BinaryIMT: tree is full");
+ if (leaf >= SNARK_SCALAR_FIELD) {
+ revert ValueGreaterThanSnarkScalarField();
+ } else if (self.numberOfLeaves >= 2 ** depth) {
+ revert TreeIsFull();
+ }
uint256 index = self.numberOfLeaves;
uint256 hash = leaf;
@@ -168,9 +185,13 @@ library InternalBinaryIMT {
uint256[] calldata proofSiblings,
uint8[] calldata proofPathIndices
) internal {
- require(newLeaf != leaf, "BinaryIMT: new leaf cannot be the same as the old one");
- require(newLeaf < SNARK_SCALAR_FIELD, "BinaryIMT: new leaf must be < SNARK_SCALAR_FIELD");
- require(_verify(self, leaf, proofSiblings, proofPathIndices), "BinaryIMT: leaf is not part of the tree");
+ if (newLeaf == leaf) {
+ revert NewLeafCannotEqualOldLeaf();
+ } else if (newLeaf >= SNARK_SCALAR_FIELD) {
+ revert ValueGreaterThanSnarkScalarField();
+ } else if (!_verify(self, leaf, proofSiblings, proofPathIndices)) {
+ revert LeafDoesNotExist();
+ }
uint256 depth = self.depth;
uint256 hash = newLeaf;
@@ -197,7 +218,10 @@ library InternalBinaryIMT {
++i;
}
}
- require(updateIndex < self.numberOfLeaves, "BinaryIMT: leaf index out of range");
+
+ if (updateIndex >= self.numberOfLeaves) {
+ revert LeafIndexOutOfRange();
+ }
self.root = hash;
}
@@ -228,19 +252,22 @@ library InternalBinaryIMT {
uint256[] calldata proofSiblings,
uint8[] calldata proofPathIndices
) internal view returns (bool) {
- require(leaf < SNARK_SCALAR_FIELD, "BinaryIMT: leaf must be < SNARK_SCALAR_FIELD");
uint256 depth = self.depth;
- require(
- proofPathIndices.length == depth && proofSiblings.length == depth,
- "BinaryIMT: length of path is not correct"
- );
+
+ if (leaf >= SNARK_SCALAR_FIELD) {
+ revert ValueGreaterThanSnarkScalarField();
+ } else if (proofPathIndices.length != depth || proofSiblings.length != depth) {
+ revert WrongMerkleProofPath();
+ }
uint256 hash = leaf;
for (uint8 i = 0; i < depth; ) {
- require(proofSiblings[i] < SNARK_SCALAR_FIELD, "BinaryIMT: sibling node must be < SNARK_SCALAR_FIELD");
-
- require(proofPathIndices[i] == 1 || proofPathIndices[i] == 0, "BinaryIMT: path index is neither 0 nor 1");
+ if (proofSiblings[i] >= SNARK_SCALAR_FIELD) {
+ revert ValueGreaterThanSnarkScalarField();
+ } else if (proofPathIndices[i] != 1 && proofPathIndices[i] != 0) {
+ revert WrongMerkleProofPath();
+ }
if (proofPathIndices[i] == 0) {
hash = PoseidonT3.hash([hash, proofSiblings[i]]);
diff --git a/packages/imt.sol/contracts/internal/InternalLeanIMT.sol b/packages/imt.sol/contracts/internal/InternalLeanIMT.sol
index 30fb0733e..be0b695d9 100644
--- a/packages/imt.sol/contracts/internal/InternalLeanIMT.sol
+++ b/packages/imt.sol/contracts/internal/InternalLeanIMT.sol
@@ -23,12 +23,13 @@ error LeafCannotBeZero();
error LeafAlreadyExists();
error LeafDoesNotExist();
-// The LeanIMT is an optimized version of the BinaryIMT.
-// This implementation eliminates the use of zeroes, and make the tree depth dynamic.
-// When a node doesn't have the right child, instead of using a zero hash as in the BinaryIMT,
-// the node's value becomes that of its left child. Furthermore, rather than utilizing a static tree depth,
-// it is updated based on the number of leaves in the tree. This approach
-// results in the calculation of significantly fewer hashes, making the tree more efficient.
+/// @title Lean Incremental binary Merkle tree.
+/// @dev The LeanIMT is an optimized version of the BinaryIMT.
+/// This implementation eliminates the use of zeroes, and make the tree depth dynamic.
+/// When a node doesn't have the right child, instead of using a zero hash as in the BinaryIMT,
+/// the node's value becomes that of its left child. Furthermore, rather than utilizing a static tree depth,
+/// it is updated based on the number of leaves in the tree. This approach
+/// results in the calculation of significantly fewer hashes, making the tree more efficient.
library InternalLeanIMT {
/// @dev Inserts a new leaf into the incremental merkle tree.
/// The function ensures that the leaf is valid according to the
diff --git a/packages/imt.sol/contracts/internal/InternalQuinaryIMT.sol b/packages/imt.sol/contracts/internal/InternalQuinaryIMT.sol
index 27e89546c..93f60732c 100644
--- a/packages/imt.sol/contracts/internal/InternalQuinaryIMT.sol
+++ b/packages/imt.sol/contracts/internal/InternalQuinaryIMT.sol
@@ -15,6 +15,14 @@ struct QuinaryIMTData {
mapping(uint256 => uint256[5]) lastSubtrees; // Caching these values is essential to efficient appends.
}
+error ValueGreaterThanSnarkScalarField();
+error DepthNotSupported();
+error TreeIsFull();
+error NewLeafCannotEqualOldLeaf();
+error LeafDoesNotExist();
+error LeafIndexOutOfRange();
+error WrongMerkleProofPath();
+
/// @title Incremental quinary Merkle tree.
/// @dev The incremental tree allows to calculate the root hash each time a leaf is added, ensuring
/// the integrity of the tree.
@@ -24,8 +32,11 @@ library InternalQuinaryIMT {
/// @param depth: Depth of the tree.
/// @param zero: Zero value to be used.
function _init(QuinaryIMTData storage self, uint256 depth, uint256 zero) internal {
- require(zero < SNARK_SCALAR_FIELD, "QuinaryIMT: leaf must be < SNARK_SCALAR_FIELD");
- require(depth > 0 && depth <= MAX_DEPTH, "QuinaryIMT: tree depth must be between 1 and 32");
+ if (zero >= SNARK_SCALAR_FIELD) {
+ revert ValueGreaterThanSnarkScalarField();
+ } else if (depth <= 0 || depth > MAX_DEPTH) {
+ revert DepthNotSupported();
+ }
self.depth = depth;
@@ -56,8 +67,11 @@ library InternalQuinaryIMT {
function _insert(QuinaryIMTData storage self, uint256 leaf) internal {
uint256 depth = self.depth;
- require(leaf < SNARK_SCALAR_FIELD, "QuinaryIMT: leaf must be < SNARK_SCALAR_FIELD");
- require(self.numberOfLeaves < 5 ** depth, "QuinaryIMT: tree is full");
+ if (leaf >= SNARK_SCALAR_FIELD) {
+ revert ValueGreaterThanSnarkScalarField();
+ } else if (self.numberOfLeaves >= 5 ** depth) {
+ revert TreeIsFull();
+ }
uint256 index = self.numberOfLeaves;
uint256 hash = leaf;
@@ -101,9 +115,13 @@ library InternalQuinaryIMT {
uint256[4][] calldata proofSiblings,
uint8[] calldata proofPathIndices
) internal {
- require(newLeaf != leaf, "QuinaryIMT: new leaf cannot be the same as the old one");
- require(newLeaf < SNARK_SCALAR_FIELD, "QuinaryIMT: new leaf must be < SNARK_SCALAR_FIELD");
- require(_verify(self, leaf, proofSiblings, proofPathIndices), "QuinaryIMT: leaf is not part of the tree");
+ if (newLeaf == leaf) {
+ revert NewLeafCannotEqualOldLeaf();
+ } else if (newLeaf >= SNARK_SCALAR_FIELD) {
+ revert ValueGreaterThanSnarkScalarField();
+ } else if (!_verify(self, leaf, proofSiblings, proofPathIndices)) {
+ revert LeafDoesNotExist();
+ }
uint256 depth = self.depth;
uint256 hash = newLeaf;
@@ -136,7 +154,10 @@ library InternalQuinaryIMT {
++i;
}
}
- require(updateIndex < self.numberOfLeaves, "QuinaryIMT: leaf index out of range");
+
+ if (updateIndex >= self.numberOfLeaves) {
+ revert LeafIndexOutOfRange();
+ }
self.root = hash;
}
@@ -167,22 +188,22 @@ library InternalQuinaryIMT {
uint256[4][] calldata proofSiblings,
uint8[] calldata proofPathIndices
) internal view returns (bool) {
- require(leaf < SNARK_SCALAR_FIELD, "QuinaryIMT: leaf must be < SNARK_SCALAR_FIELD");
uint256 depth = self.depth;
- require(
- proofPathIndices.length == depth && proofSiblings.length == depth,
- "QuinaryIMT: length of path is not correct"
- );
+
+ if (leaf >= SNARK_SCALAR_FIELD) {
+ revert ValueGreaterThanSnarkScalarField();
+ } else if (proofPathIndices.length != depth || proofSiblings.length != depth) {
+ revert WrongMerkleProofPath();
+ }
uint256 hash = leaf;
for (uint8 i = 0; i < depth; ) {
uint256[5] memory nodes;
- require(
- proofPathIndices[i] >= 0 && proofPathIndices[i] < 5,
- "QuinaryIMT: path index is not between 0 and 4"
- );
+ if (proofPathIndices[i] < 0 || proofPathIndices[i] >= 5) {
+ revert WrongMerkleProofPath();
+ }
for (uint8 j = 0; j < 5; ) {
if (j < proofPathIndices[i]) {
diff --git a/packages/imt.sol/test/BinaryIMT.ts b/packages/imt.sol/test/BinaryIMT.ts
index 2055848d8..ef11ac4c0 100644
--- a/packages/imt.sol/test/BinaryIMT.ts
+++ b/packages/imt.sol/test/BinaryIMT.ts
@@ -3,16 +3,18 @@ import { expect } from "chai"
import { poseidon } from "circomlibjs"
import { run } from "hardhat"
import { poseidon2 } from "poseidon-lite"
-import { BinaryIMTTest } from "../typechain-types"
+import { BinaryIMT, BinaryIMTTest } from "../typechain-types"
describe("BinaryIMT", () => {
const SNARK_SCALAR_FIELD = BigInt("21888242871839275222246405745257275088548364400416034343698204186575808495617")
+ let binaryIMT: BinaryIMT
let binaryIMTTest: BinaryIMTTest
let jsBinaryIMT: JSBinaryIMT
beforeEach(async () => {
- const { contract } = await run("deploy:imt-test", { library: "BinaryIMT", logs: false })
+ const { library, contract } = await run("deploy:imt-test", { library: "BinaryIMT", logs: false })
+ binaryIMT = library
binaryIMTTest = contract
jsBinaryIMT = new JSBinaryIMT(poseidon2, 16, 0, 2)
})
@@ -21,7 +23,7 @@ describe("BinaryIMT", () => {
it("Should not create a tree with a depth > 32", async () => {
const transaction = binaryIMTTest.init(33)
- await expect(transaction).to.be.revertedWith("BinaryIMT: tree depth must be between 1 and 32")
+ await expect(transaction).to.be.revertedWithCustomError(binaryIMT, "DepthNotSupported")
})
it("Should create a tree", async () => {
@@ -46,7 +48,7 @@ describe("BinaryIMT", () => {
it("Should not insert a leaf if its value is > SNARK_SCALAR_FIELD", async () => {
const transaction = binaryIMTTest.insert(SNARK_SCALAR_FIELD)
- await expect(transaction).to.be.revertedWith("BinaryIMT: leaf must be < SNARK_SCALAR_FIELD")
+ await expect(transaction).to.be.revertedWithCustomError(binaryIMT, "ValueGreaterThanSnarkScalarField")
})
it("Should insert a leaf in a tree", async () => {
@@ -112,7 +114,7 @@ describe("BinaryIMT", () => {
const transaction = binaryIMTTest.insert(3)
- await expect(transaction).to.be.revertedWith("BinaryIMT: tree is full")
+ await expect(transaction).to.be.revertedWithCustomError(binaryIMT, "TreeIsFull")
})
})
@@ -123,7 +125,7 @@ describe("BinaryIMT", () => {
const transaction = binaryIMTTest.update(1, 1, [0, 1], [0, 1])
- await expect(transaction).to.be.revertedWith("BinaryIMT: new leaf cannot be the same as the old one")
+ await expect(transaction).to.be.revertedWithCustomError(binaryIMT, "NewLeafCannotEqualOldLeaf")
})
it("Should not update a leaf if its new value is > SNARK_SCALAR_FIELD", async () => {
@@ -132,7 +134,7 @@ describe("BinaryIMT", () => {
const transaction = binaryIMTTest.update(1, SNARK_SCALAR_FIELD, [0, 1], [0, 1])
- await expect(transaction).to.be.revertedWith("BinaryIMT: new leaf must be < SNARK_SCALAR_FIELD")
+ await expect(transaction).to.be.revertedWithCustomError(binaryIMT, "ValueGreaterThanSnarkScalarField")
})
it("Should not update a leaf if its original value is > SNARK_SCALAR_FIELD", async () => {
@@ -141,7 +143,7 @@ describe("BinaryIMT", () => {
const transaction = binaryIMTTest.update(SNARK_SCALAR_FIELD, 2, [0, 1], [0, 1])
- await expect(transaction).to.be.revertedWith("BinaryIMT: leaf must be < SNARK_SCALAR_FIELD")
+ await expect(transaction).to.be.revertedWithCustomError(binaryIMT, "ValueGreaterThanSnarkScalarField")
})
it("Should not update a leaf if the path indices are wrong", async () => {
@@ -162,7 +164,7 @@ describe("BinaryIMT", () => {
pathIndices
)
- await expect(transaction).to.be.revertedWith("BinaryIMT: path index is neither 0 nor 1")
+ await expect(transaction).to.be.revertedWithCustomError(binaryIMT, "WrongMerkleProofPath")
})
it("Should not update a leaf if the old leaf is wrong", async () => {
@@ -181,7 +183,7 @@ describe("BinaryIMT", () => {
pathIndices
)
- await expect(transaction).to.be.revertedWith("BinaryIMT: leaf is not part of the tree")
+ await expect(transaction).to.be.revertedWithCustomError(binaryIMT, "LeafDoesNotExist")
})
it("Should update a leaf", async () => {
@@ -244,7 +246,7 @@ describe("BinaryIMT", () => {
pathIndices
)
- await expect(transaction).to.be.revertedWith("BinaryIMT: leaf index out of range")
+ await expect(transaction).to.be.revertedWithCustomError(binaryIMT, "LeafIndexOutOfRange")
})
})
@@ -252,7 +254,7 @@ describe("BinaryIMT", () => {
it("Should not remove a leaf if its value is > SNARK_SCALAR_FIELD", async () => {
const transaction = binaryIMTTest.remove(SNARK_SCALAR_FIELD, [0, 1], [0, 1])
- await expect(transaction).to.be.revertedWith("BinaryIMT: leaf must be < SNARK_SCALAR_FIELD")
+ await expect(transaction).to.be.revertedWithCustomError(binaryIMT, "ValueGreaterThanSnarkScalarField")
})
it("Should not remove a leaf that does not exist", async () => {
@@ -270,7 +272,7 @@ describe("BinaryIMT", () => {
pathIndices
)
- await expect(transaction).to.be.revertedWith("BinaryIMT: leaf is not part of the tree")
+ await expect(transaction).to.be.revertedWithCustomError(binaryIMT, "LeafDoesNotExist")
})
it("Should remove a leaf", async () => {
diff --git a/packages/imt.sol/test/QuinaryIMT.ts b/packages/imt.sol/test/QuinaryIMT.ts
index 18d9be383..674a6ba08 100644
--- a/packages/imt.sol/test/QuinaryIMT.ts
+++ b/packages/imt.sol/test/QuinaryIMT.ts
@@ -2,16 +2,18 @@ import { IMT as JSQuinaryIMT } from "@zk-kit/imt"
import { expect } from "chai"
import { run } from "hardhat"
import { poseidon5 } from "poseidon-lite"
-import { QuinaryIMTTest } from "../typechain-types"
+import { QuinaryIMT, QuinaryIMTTest } from "../typechain-types"
describe("QuinaryIMT", () => {
const SNARK_SCALAR_FIELD = BigInt("21888242871839275222246405745257275088548364400416034343698204186575808495617")
+ let quinaryIMT: QuinaryIMT
let quinaryIMTTest: QuinaryIMTTest
let jsQuinaryIMT: JSQuinaryIMT
beforeEach(async () => {
- const { contract } = await run("deploy:imt-test", { library: "QuinaryIMT", arity: 5, logs: false })
+ const { library, contract } = await run("deploy:imt-test", { library: "QuinaryIMT", arity: 5, logs: false })
+ quinaryIMT = library
quinaryIMTTest = contract
jsQuinaryIMT = new JSQuinaryIMT(poseidon5, 16, 0, 5)
})
@@ -20,7 +22,7 @@ describe("QuinaryIMT", () => {
it("Should not create a tree with a depth > 32", async () => {
const transaction = quinaryIMTTest.init(33)
- await expect(transaction).to.be.revertedWith("QuinaryIMT: tree depth must be between 1 and 32")
+ await expect(transaction).to.be.revertedWithCustomError(quinaryIMT, "DepthNotSupported")
})
it("Should create a tree", async () => {
@@ -36,7 +38,7 @@ describe("QuinaryIMT", () => {
it("Should not insert a leaf if its value is > SNARK_SCALAR_FIELD", async () => {
const transaction = quinaryIMTTest.insert(SNARK_SCALAR_FIELD)
- await expect(transaction).to.be.revertedWith("QuinaryIMT: leaf must be < SNARK_SCALAR_FIELD")
+ await expect(transaction).to.be.revertedWithCustomError(quinaryIMT, "ValueGreaterThanSnarkScalarField")
})
it("Should insert a leaf in a tree", async () => {
@@ -75,7 +77,7 @@ describe("QuinaryIMT", () => {
const transaction = quinaryIMTTest.insert(3)
- await expect(transaction).to.be.revertedWith("QuinaryIMT: tree is full")
+ await expect(transaction).to.be.revertedWithCustomError(quinaryIMT, "TreeIsFull")
})
})
@@ -86,7 +88,7 @@ describe("QuinaryIMT", () => {
const transaction = quinaryIMTTest.update(1, 1, [[0, 1, 2, 3]], [0])
- await expect(transaction).to.be.revertedWith("QuinaryIMT: new leaf cannot be the same as the old one")
+ await expect(transaction).to.be.revertedWithCustomError(quinaryIMT, "NewLeafCannotEqualOldLeaf")
})
it("Should not update a leaf if its new value is > SNARK_SCALAR_FIELD", async () => {
@@ -95,7 +97,7 @@ describe("QuinaryIMT", () => {
const transaction = quinaryIMTTest.update(1, SNARK_SCALAR_FIELD, [[0, 1, 2, 3]], [0])
- await expect(transaction).to.be.revertedWith("QuinaryIMT: new leaf must be < SNARK_SCALAR_FIELD")
+ await expect(transaction).to.be.revertedWithCustomError(quinaryIMT, "ValueGreaterThanSnarkScalarField")
})
it("Should not update a leaf if its original value is > SNARK_SCALAR_FIELD", async () => {
@@ -104,7 +106,7 @@ describe("QuinaryIMT", () => {
const transaction = quinaryIMTTest.update(SNARK_SCALAR_FIELD, 2, [[0, 1, 2, 3]], [0])
- await expect(transaction).to.be.revertedWith("QuinaryIMT: leaf must be < SNARK_SCALAR_FIELD")
+ await expect(transaction).to.be.revertedWithCustomError(quinaryIMT, "ValueGreaterThanSnarkScalarField")
})
it("Should not update a leaf if the path indices are wrong", async () => {
@@ -120,7 +122,7 @@ describe("QuinaryIMT", () => {
const transaction = quinaryIMTTest.update(1, 2, siblings, pathIndices)
- await expect(transaction).to.be.revertedWith("QuinaryIMT: path index is not between 0 and 4")
+ await expect(transaction).to.be.revertedWithCustomError(quinaryIMT, "WrongMerkleProofPath")
})
it("Should not update a leaf if the old leaf is wrong", async () => {
@@ -134,7 +136,7 @@ describe("QuinaryIMT", () => {
const transaction = quinaryIMTTest.update(2, 3, siblings, pathIndices)
- await expect(transaction).to.be.revertedWith("QuinaryIMT: leaf is not part of the tree")
+ await expect(transaction).to.be.revertedWithCustomError(quinaryIMT, "LeafDoesNotExist")
})
it("Should update a leaf", async () => {
@@ -187,7 +189,7 @@ describe("QuinaryIMT", () => {
const transaction = quinaryIMTTest.update(0, leaf, siblings, pathIndices)
- await expect(transaction).to.be.revertedWith("QuinaryIMT: leaf index out of range")
+ await expect(transaction).to.be.revertedWithCustomError(quinaryIMT, "LeafIndexOutOfRange")
})
})
@@ -195,7 +197,7 @@ describe("QuinaryIMT", () => {
it("Should not remove a leaf if its value is > SNARK_SCALAR_FIELD", async () => {
const transaction = quinaryIMTTest.remove(SNARK_SCALAR_FIELD, [[0, 1, 2, 3]], [0])
- await expect(transaction).to.be.revertedWith("QuinaryIMT: leaf must be < SNARK_SCALAR_FIELD")
+ await expect(transaction).to.be.revertedWithCustomError(quinaryIMT, "ValueGreaterThanSnarkScalarField")
})
it("Should not remove a leaf that does not exist", async () => {
@@ -209,7 +211,7 @@ describe("QuinaryIMT", () => {
const transaction = quinaryIMTTest.remove(2, siblings, pathIndices)
- await expect(transaction).to.be.revertedWith("QuinaryIMT: leaf is not part of the tree")
+ await expect(transaction).to.be.revertedWithCustomError(quinaryIMT, "LeafDoesNotExist")
})
it("Should remove a leaf", async () => {
diff --git a/yarn.lock b/yarn.lock
index 614d0c04c..710c2ee35 100644
--- a/yarn.lock
+++ b/yarn.lock
@@ -2779,7 +2779,7 @@ __metadata:
languageName: node
linkType: hard
-"@jridgewell/sourcemap-codec@npm:^1.4.10, @jridgewell/sourcemap-codec@npm:^1.4.13, @jridgewell/sourcemap-codec@npm:^1.4.14":
+"@jridgewell/sourcemap-codec@npm:^1.4.10, @jridgewell/sourcemap-codec@npm:^1.4.13, @jridgewell/sourcemap-codec@npm:^1.4.14, @jridgewell/sourcemap-codec@npm:^1.4.15":
version: 1.4.15
resolution: "@jridgewell/sourcemap-codec@npm:1.4.15"
checksum: b881c7e503db3fc7f3c1f35a1dd2655a188cc51a3612d76efc8a6eb74728bef5606e6758ee77423e564092b4a518aba569bbb21c9bac5ab7a35b0c6ae7e344c8
@@ -3305,6 +3305,41 @@ __metadata:
languageName: node
linkType: hard
+"@rollup/plugin-commonjs@npm:^25.0.7":
+ version: 25.0.7
+ resolution: "@rollup/plugin-commonjs@npm:25.0.7"
+ dependencies:
+ "@rollup/pluginutils": ^5.0.1
+ commondir: ^1.0.1
+ estree-walker: ^2.0.2
+ glob: ^8.0.3
+ is-reference: 1.2.1
+ magic-string: ^0.30.3
+ peerDependencies:
+ rollup: ^2.68.0||^3.0.0||^4.0.0
+ peerDependenciesMeta:
+ rollup:
+ optional: true
+ checksum: 052e11839a9edc556eda5dcc759ab816dcc57e9f0f905a1e6e14fff954eaa6b1e2d0d544f5bd18d863993c5eba43d8ac9c19d9bb53b1c3b1213f32cfc9d50b2e
+ languageName: node
+ linkType: hard
+
+"@rollup/plugin-inject@npm:^5.0.4":
+ version: 5.0.5
+ resolution: "@rollup/plugin-inject@npm:5.0.5"
+ dependencies:
+ "@rollup/pluginutils": ^5.0.1
+ estree-walker: ^2.0.2
+ magic-string: ^0.30.3
+ peerDependencies:
+ rollup: ^1.20.0||^2.0.0||^3.0.0||^4.0.0
+ peerDependenciesMeta:
+ rollup:
+ optional: true
+ checksum: 22cb772fd6f7178308b2ece95cdde5f8615f6257197832166294552a7e4c0d3976dc996cbfa6470af3151d8b86c00091aa93da5f4db6ec563f11b6db29fd1b63
+ languageName: node
+ linkType: hard
+
"@rollup/plugin-json@npm:^5.0.1":
version: 5.0.2
resolution: "@rollup/plugin-json@npm:5.0.2"
@@ -3319,7 +3354,7 @@ __metadata:
languageName: node
linkType: hard
-"@rollup/plugin-node-resolve@npm:^15.0.2":
+"@rollup/plugin-node-resolve@npm:^15.0.2, @rollup/plugin-node-resolve@npm:^15.2.3":
version: 15.2.3
resolution: "@rollup/plugin-node-resolve@npm:15.2.3"
dependencies:
@@ -4401,6 +4436,22 @@ __metadata:
languageName: unknown
linkType: soft
+"@zk-kit/eddsa-poseidon@workspace:packages/eddsa-poseidon":
+ version: 0.0.0-use.local
+ resolution: "@zk-kit/eddsa-poseidon@workspace:packages/eddsa-poseidon"
+ dependencies:
+ "@rollup/plugin-commonjs": ^25.0.7
+ "@rollup/plugin-node-resolve": ^15.2.3
+ blake-hash: 2.0.0
+ circomlibjs: 0.0.8
+ poseidon-lite: 0.2.0
+ rollup-plugin-cleanup: ^3.2.1
+ rollup-plugin-polyfill-node: ^0.13.0
+ rollup-plugin-terser: ^7.0.2
+ rollup-plugin-typescript2: ^0.31.2
+ languageName: unknown
+ linkType: soft
+
"@zk-kit/groth16@0.4.0, @zk-kit/groth16@workspace:packages/groth16":
version: 0.0.0-use.local
resolution: "@zk-kit/groth16@workspace:packages/groth16"
@@ -5493,6 +5544,18 @@ __metadata:
languageName: node
linkType: hard
+"blake-hash@npm:2.0.0, blake-hash@npm:^2.0.0":
+ version: 2.0.0
+ resolution: "blake-hash@npm:2.0.0"
+ dependencies:
+ node-addon-api: ^3.0.0
+ node-gyp: latest
+ node-gyp-build: ^4.2.2
+ readable-stream: ^3.6.0
+ checksum: a0d9a8f3953b986d3b30a741a6c000dedcc9a03b1318f52cc01ae62d18829ba6cb1a4d8cbe74785abfdc952a21db410984523bd457764aca716162cfd3ca8ea4
+ languageName: node
+ linkType: hard
+
"blake-hash@npm:^1.1.0":
version: 1.1.1
resolution: "blake-hash@npm:1.1.1"
@@ -5505,18 +5568,6 @@ __metadata:
languageName: node
linkType: hard
-"blake-hash@npm:^2.0.0":
- version: 2.0.0
- resolution: "blake-hash@npm:2.0.0"
- dependencies:
- node-addon-api: ^3.0.0
- node-gyp: latest
- node-gyp-build: ^4.2.2
- readable-stream: ^3.6.0
- checksum: a0d9a8f3953b986d3b30a741a6c000dedcc9a03b1318f52cc01ae62d18829ba6cb1a4d8cbe74785abfdc952a21db410984523bd457764aca716162cfd3ca8ea4
- languageName: node
- linkType: hard
-
"blake2b-wasm@git+https://github.com/jbaylina/blake2b-wasm.git":
version: 2.1.0
resolution: "blake2b-wasm@https://github.com/jbaylina/blake2b-wasm.git#commit=0d5f024b212429c7f50a7f533aa3a2406b5b42b3"
@@ -6450,7 +6501,7 @@ __metadata:
languageName: node
linkType: hard
-"circomlibjs@npm:^0.0.8":
+"circomlibjs@npm:0.0.8, circomlibjs@npm:^0.0.8":
version: 0.0.8
resolution: "circomlibjs@npm:0.0.8"
dependencies:
@@ -13458,6 +13509,15 @@ __metadata:
languageName: node
linkType: hard
+"magic-string@npm:^0.30.3":
+ version: 0.30.5
+ resolution: "magic-string@npm:0.30.5"
+ dependencies:
+ "@jridgewell/sourcemap-codec": ^1.4.15
+ checksum: da10fecff0c0a7d3faf756913ce62bd6d5e7b0402be48c3b27bfd651b90e29677e279069a63b764bcdc1b8ecdcdb898f29a5c5ec510f2323e8d62ee057a6eb18
+ languageName: node
+ linkType: hard
+
"make-dir@npm:^1.0.0":
version: 1.3.0
resolution: "make-dir@npm:1.3.0"
@@ -15193,7 +15253,7 @@ __metadata:
languageName: node
linkType: hard
-"poseidon-lite@npm:^0.2.0":
+"poseidon-lite@npm:0.2.0, poseidon-lite@npm:^0.2.0":
version: 0.2.0
resolution: "poseidon-lite@npm:0.2.0"
checksum: c47c6fd0a29a78ca1f7cf6ccb8b0c4f4e72930d944e63425e36f60c15d37fb0aeca30b8a22a30640ed68d631142282c0b8308da83b1a2b2bb92b87f5a2432c93
@@ -16091,6 +16151,17 @@ __metadata:
languageName: node
linkType: hard
+"rollup-plugin-polyfill-node@npm:^0.13.0":
+ version: 0.13.0
+ resolution: "rollup-plugin-polyfill-node@npm:0.13.0"
+ dependencies:
+ "@rollup/plugin-inject": ^5.0.4
+ peerDependencies:
+ rollup: ^1.20.0 || ^2.0.0 || ^3.0.0 || ^4.0.0
+ checksum: 73c5b9086955afa108c940c13205fab4cece149d020a3faa696c5711bbb391d11aecd4c913ad2cc5ac24f9d43a4969ad8d087d085dd8d423dece45b6be4039bb
+ languageName: node
+ linkType: hard
+
"rollup-plugin-terser@npm:^7.0.2":
version: 7.0.2
resolution: "rollup-plugin-terser@npm:7.0.2"
|