Date: Sat, 2 Dec 2023 10:29:57 +0000
Subject: [PATCH 17/35] docs(eddsa-poseidon): update README.md
---
packages/eddsa-poseidon/README.md | 73 ++++++++++++++++++++++++++++++-
1 file changed, 72 insertions(+), 1 deletion(-)
diff --git a/packages/eddsa-poseidon/README.md b/packages/eddsa-poseidon/README.md
index 9a2f5be2f..de45a2511 100644
--- a/packages/eddsa-poseidon/README.md
+++ b/packages/eddsa-poseidon/README.md
@@ -2,7 +2,7 @@
EdDSA Poseidon
- A JavaScript EdDSA library for secure signing and verification using the Baby Jubjub elliptic curve.
+ A JavaScript EdDSA library for secure signing and verification using Poseidon and the Baby Jubjub elliptic curve.
@@ -41,6 +41,18 @@
+| This package offers a simplified JavaScript codebase essential for creating and validating digital signatures using EdDSA and Poseidon. It's built upon the Baby Jubjub elliptic curve, ensuring seamless integration with [Circom](https://github.com/iden3/circom) and enhancing the developer experience. |
+| ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------ |
+
+- Super lightweight: [**~33kB**](https://bundlephobia.com/package/@zk-kit/eddsa-poseidon@0.1.0) (minified)
+- Compatible with browsers and NodeJS
+- TS type support
+- Comprehensive code [documentation](https://zkkit.pse.dev/modules/_zk_kit_eddsa_poseidon.html)
+- Full test coverage
+
+> [!WARNING]
+> This library has **not** been audited.
+
## 🛠 Install
### npm or yarn
@@ -56,3 +68,62 @@ or yarn:
```bash
yarn add @zk-kit/eddsa-poseidon
```
+
+## 📜 Usage
+
+\# **derivePublicKey**(privateKey: _BigNumberish_): _Point\_
+
+```typescript
+import { derivePublicKey } from "@zk-kit/eddsa-poseidon"
+
+const privateKey = "secret"
+const publicKey = derivePublicKey(privateKey)
+
+console.log(publicKey)
+/*
+[
+ '17191193026255111087474416516591393721975640005415762645730433950079177536248',
+ '13751717961795090314625781035919035073474308127816403910435238282697898234143'
+]
+*/
+```
+
+\# **signMessage**(privateKey: _BigNumberish_, message: _BigNumberish_): _Signature\_
+
+```typescript
+import { derivePublicKey, signMessage } from "@zk-kit/eddsa-poseidon"
+
+const privateKey = "secret"
+const publicKey = derivePublicKey(privateKey)
+
+const message = "message"
+const signature = signMessage(privateKey, message)
+
+console.log(signature)
+/*
+{
+ R8: [
+ '12949573675545142400102669657964360005184873166024880859462384824349649539693',
+ '18253636630408169174294927826710424418689461166073329946402765380454102840608'
+ ],
+ S: '701803947557694254685424075312408605924670918868054593580245088593184746870'
+}
+*/
+```
+
+
+\# **verifySignature**(message: _BigNumberish_, signature: _Signature_, publicKey: _Point_): _boolean_
+
+```typescript
+import { derivePublicKey, signMessage, verifySignature } from "@zk-kit/eddsa-poseidon"
+
+const privateKey = "secret"
+const publicKey = derivePublicKey(privateKey)
+
+const message = "message"
+const signature = signMessage(privateKey, message)
+
+const response = verifySignature(message, signature, publicKey)
+
+console.log(response) // true
+```
From d1a576e584a3ab001ccf29fac7a1e84744444c88 Mon Sep 17 00:00:00 2001
From: cedoor
Date: Sat, 2 Dec 2023 10:31:04 +0000
Subject: [PATCH 18/35] chore(eddsa-poseidon): update package description
---
packages/eddsa-poseidon/package.json | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/packages/eddsa-poseidon/package.json b/packages/eddsa-poseidon/package.json
index 772501184..549ef3832 100644
--- a/packages/eddsa-poseidon/package.json
+++ b/packages/eddsa-poseidon/package.json
@@ -1,7 +1,7 @@
{
"name": "@zk-kit/eddsa-poseidon",
"version": "0.1.0",
- "description": "A JavaScript EdDSA library for secure signing and verification using the Baby Jubjub elliptic curve.",
+ "description": "A JavaScript EdDSA library for secure signing and verification using Poseidon the Baby Jubjub elliptic curve.",
"license": "MIT",
"main": "dist/index.node.js",
"exports": {
From 7c424a51ef4aca158a663031890250b5d5048015 Mon Sep 17 00:00:00 2001
From: cedoor
Date: Sat, 2 Dec 2023 10:31:23 +0000
Subject: [PATCH 19/35] style(eddsa-poseidon): update function name
---
packages/eddsa-poseidon/src/eddsa-poseidon.ts | 4 +--
packages/eddsa-poseidon/tests/index.test.ts | 28 +++++++++----------
2 files changed, 16 insertions(+), 16 deletions(-)
diff --git a/packages/eddsa-poseidon/src/eddsa-poseidon.ts b/packages/eddsa-poseidon/src/eddsa-poseidon.ts
index 6ab1c05a9..7c1a8096d 100644
--- a/packages/eddsa-poseidon/src/eddsa-poseidon.ts
+++ b/packages/eddsa-poseidon/src/eddsa-poseidon.ts
@@ -7,7 +7,7 @@ import { BigNumberish, Point, Signature } from "./types"
import * as utils from "./utils"
/**
- * Generates a public key from a given private key using the
+ * Derives a public key from a given private key using the
* {@link https://eips.ethereum.org/EIPS/eip-2494|Baby Jubjub} elliptic curve.
* This function utilizes the Baby Jubjub elliptic curve for cryptographic operations.
* The private key should be securely stored and managed, and it should never be exposed
@@ -15,7 +15,7 @@ import * as utils from "./utils"
* @param privateKey - The private key used for generating the public key.
* @returns The derived public key.
*/
-export function generatePublicKey(privateKey: BigNumberish | string): Point {
+export function derivePublicKey(privateKey: BigNumberish): Point {
// Convert the private key to buffer.
privateKey = utils.checkPrivateKey(privateKey)
diff --git a/packages/eddsa-poseidon/tests/index.test.ts b/packages/eddsa-poseidon/tests/index.test.ts
index 3fe01b52d..ccd178705 100644
--- a/packages/eddsa-poseidon/tests/index.test.ts
+++ b/packages/eddsa-poseidon/tests/index.test.ts
@@ -1,13 +1,13 @@
import { eddsa } from "circomlibjs"
import crypto from "crypto"
-import { generatePublicKey, signMessage, verifySignature } from "../src"
+import { derivePublicKey, signMessage, verifySignature } from "../src"
describe("EdDSAPoseidon", () => {
const privateKey = "secret"
const message = BigInt(2)
it("Should derive a public key from a private key (string)", async () => {
- const publicKey = generatePublicKey(privateKey)
+ const publicKey = derivePublicKey(privateKey)
const circomlibPublicKey = eddsa.prv2pub(privateKey)
@@ -18,7 +18,7 @@ describe("EdDSAPoseidon", () => {
it("Should derive a public key from a private key (hexadecimal)", async () => {
const privateKey = "0x12"
- const publicKey = generatePublicKey(privateKey)
+ const publicKey = derivePublicKey(privateKey)
const circomlibPublicKey = eddsa.prv2pub(Buffer.from(privateKey.slice(2), "hex"))
@@ -29,7 +29,7 @@ describe("EdDSAPoseidon", () => {
it("Should derive a public key from a private key (buffer)", async () => {
const privateKey = Buffer.from("secret")
- const publicKey = generatePublicKey(privateKey)
+ const publicKey = derivePublicKey(privateKey)
const circomlibPublicKey = eddsa.prv2pub(privateKey)
@@ -40,7 +40,7 @@ describe("EdDSAPoseidon", () => {
it("Should derive a public key from a private key (bigint)", async () => {
const privateKey = BigInt(22)
- const publicKey = generatePublicKey(privateKey)
+ const publicKey = derivePublicKey(privateKey)
const circomlibPublicKey = eddsa.prv2pub(Buffer.from(privateKey.toString(16), "hex"))
@@ -51,7 +51,7 @@ describe("EdDSAPoseidon", () => {
it("Should derive a public key from a private key (number)", async () => {
const privateKey = 22
- const publicKey = generatePublicKey(privateKey)
+ const publicKey = derivePublicKey(privateKey)
const circomlibPublicKey = eddsa.prv2pub(Buffer.from(privateKey.toString(16), "hex"))
@@ -62,7 +62,7 @@ describe("EdDSAPoseidon", () => {
it("Should throw an error if the secret type is not supported", async () => {
const privateKey = true
- const fun = () => generatePublicKey(privateKey as any)
+ const fun = () => derivePublicKey(privateKey as any)
expect(fun).toThrow("Invalid private key type.")
})
@@ -134,14 +134,14 @@ describe("EdDSAPoseidon", () => {
})
it("Should verify a signature", async () => {
- const publicKey = generatePublicKey(privateKey)
+ const publicKey = derivePublicKey(privateKey)
const signature = signMessage(privateKey, message)
expect(verifySignature(message, signature, publicKey)).toBeTruthy()
})
it("Should not verify a signature if the public key is malformed", async () => {
- const publicKey = generatePublicKey(privateKey)
+ const publicKey = derivePublicKey(privateKey)
const signature = signMessage(privateKey, message)
publicKey[1] = 3 as any
@@ -150,7 +150,7 @@ describe("EdDSAPoseidon", () => {
})
it("Should not verify a signature if the signature is malformed", async () => {
- const publicKey = generatePublicKey(privateKey)
+ const publicKey = derivePublicKey(privateKey)
const signature = signMessage(privateKey, message)
signature.S = 3 as any
@@ -159,7 +159,7 @@ describe("EdDSAPoseidon", () => {
})
it("Should not verify a signature if the signature is not on the curve", async () => {
- const publicKey = generatePublicKey(privateKey)
+ const publicKey = derivePublicKey(privateKey)
const signature = signMessage(privateKey, message)
signature.R8[1] = BigInt(3).toString()
@@ -168,7 +168,7 @@ describe("EdDSAPoseidon", () => {
})
it("Should not verify a signature if the public key is not on the curve", async () => {
- const publicKey = generatePublicKey(privateKey)
+ const publicKey = derivePublicKey(privateKey)
const signature = signMessage(privateKey, message)
publicKey[1] = BigInt(3).toString()
@@ -177,7 +177,7 @@ describe("EdDSAPoseidon", () => {
})
it("Should not verify a signature S value exceeds the predefined sub order", async () => {
- const publicKey = generatePublicKey(privateKey)
+ const publicKey = derivePublicKey(privateKey)
const signature = signMessage(privateKey, message)
signature.S = "3421888242871839275222246405745257275088614511777268538073601725287587578984328"
@@ -189,7 +189,7 @@ describe("EdDSAPoseidon", () => {
for (let i = 0, len = 10; i < len; i += 1) {
const privateKey = crypto.randomBytes(32)
- const publicKey = generatePublicKey(privateKey)
+ const publicKey = derivePublicKey(privateKey)
const circomlibPublicKey = eddsa.prv2pub(privateKey)
From 3619d8182587e78f672cac60ec8c52120e27eb62 Mon Sep 17 00:00:00 2001
From: cedoor
Date: Sat, 2 Dec 2023 10:34:23 +0000
Subject: [PATCH 20/35] chore(eddsa-poseidon): remove comments
---
packages/eddsa-poseidon/src/field1.ts | 10 ----------
packages/eddsa-poseidon/src/scalar.ts | 8 --------
2 files changed, 18 deletions(-)
diff --git a/packages/eddsa-poseidon/src/field1.ts b/packages/eddsa-poseidon/src/field1.ts
index 0457b3baf..945e1d57f 100644
--- a/packages/eddsa-poseidon/src/field1.ts
+++ b/packages/eddsa-poseidon/src/field1.ts
@@ -9,14 +9,6 @@ export default class Field1 {
}
e(res: bigint): bigint {
- // if (res < 0) {
- // let nres = -res
-
- // if (nres >= this._order) nres %= this._order
-
- // return this._order - nres
- // }
-
return res >= this._order ? res % this._order : res
}
@@ -35,8 +27,6 @@ export default class Field1 {
}
inv(a: bigint): bigint {
- // if (!a) throw new Error("Division by zero")
-
let t = this.zero
let r = this._order
let newt = this.one
diff --git a/packages/eddsa-poseidon/src/scalar.ts b/packages/eddsa-poseidon/src/scalar.ts
index 8e54d4eb5..e77d7f02d 100644
--- a/packages/eddsa-poseidon/src/scalar.ts
+++ b/packages/eddsa-poseidon/src/scalar.ts
@@ -1,5 +1,3 @@
-// const hexLen = [0, 1, 2, 2, 3, 3, 3, 3, 4, 4, 4, 4, 4, 4, 4, 4]
-
export function isZero(a: bigint): boolean {
return !a
}
@@ -12,12 +10,6 @@ export function shiftRight(a: bigint, n: bigint): bigint {
return a >> n
}
-// export function bitLength(a: bigint): number {
-// const aS = a.toString(16)
-
-// return (aS.length - 1) * 4 + hexLen[parseInt(aS[0], 16)]
-// }
-
export function mul(a: bigint, b: bigint): bigint {
return a * b
}
From 460db7456b4838174163c2c2d30008d4def41cd3 Mon Sep 17 00:00:00 2001
From: cedoor
Date: Sat, 2 Dec 2023 11:15:14 +0000
Subject: [PATCH 21/35] chore(eddsa-poseidon): add iife bundle with polyfills
---
packages/eddsa-poseidon/package.json | 17 ++++--
packages/eddsa-poseidon/rollup.config.ts | 1 +
packages/eddsa-poseidon/rollup.iife.config.ts | 47 +++++++++++++++
packages/eddsa-poseidon/tsconfig.json | 2 +-
yarn.lock | 57 ++++++++++++++-----
5 files changed, 103 insertions(+), 21 deletions(-)
create mode 100644 packages/eddsa-poseidon/rollup.iife.config.ts
diff --git a/packages/eddsa-poseidon/package.json b/packages/eddsa-poseidon/package.json
index 549ef3832..35020e0c0 100644
--- a/packages/eddsa-poseidon/package.json
+++ b/packages/eddsa-poseidon/package.json
@@ -3,10 +3,14 @@
"version": "0.1.0",
"description": "A JavaScript EdDSA library for secure signing and verification using Poseidon the Baby Jubjub elliptic curve.",
"license": "MIT",
+ "iife": "dist/index.js",
+ "unpkg": "dist/index.min.js",
+ "jsdelivr": "dist/index.min.js",
"main": "dist/index.node.js",
"exports": {
"import": "./dist/index.mjs",
- "require": "./dist/index.node.js"
+ "require": "./dist/index.node.js",
+ "types": "./dist/types/index.d.ts"
},
"types": "dist/types/index.d.ts",
"files": [
@@ -21,8 +25,8 @@
"url": "https://github.com/privacy-scaling-explorations/zk-kit.git/issues"
},
"scripts": {
- "build:watch": "rollup -c rollup.config.ts -w --configPlugin typescript",
- "build": "rimraf dist && rollup -c rollup.config.ts --configPlugin typescript",
+ "build": "rimraf dist && rollup -c rollup.config.ts --configPlugin typescript && yarn build:iife",
+ "build:iife": "rollup -c rollup.iife.config.ts --configPlugin typescript",
"prepublishOnly": "yarn build"
},
"publishConfig": {
@@ -31,11 +35,12 @@
"devDependencies": {
"@rollup/plugin-commonjs": "^25.0.7",
"@rollup/plugin-node-resolve": "^15.2.3",
+ "blake-hash": "2.0.0",
"circomlibjs": "0.0.8",
+ "poseidon-lite": "0.2.0",
"rollup-plugin-cleanup": "^3.2.1",
+ "rollup-plugin-polyfill-node": "^0.13.0",
+ "rollup-plugin-terser": "^7.0.2",
"rollup-plugin-typescript2": "^0.31.2"
- },
- "dependencies": {
- "poseidon-lite": "^0.2.0"
}
}
diff --git a/packages/eddsa-poseidon/rollup.config.ts b/packages/eddsa-poseidon/rollup.config.ts
index 0b77ecf79..7d2994bd6 100644
--- a/packages/eddsa-poseidon/rollup.config.ts
+++ b/packages/eddsa-poseidon/rollup.config.ts
@@ -20,6 +20,7 @@ export default {
{ file: pkg.exports.require, format: "cjs", banner },
{ file: pkg.exports.import, format: "es", banner }
],
+ external: [],
plugins: [
typescript({ tsconfig: "./build.tsconfig.json", useTsconfigDeclarationDir: true }),
commonjs(),
diff --git a/packages/eddsa-poseidon/rollup.iife.config.ts b/packages/eddsa-poseidon/rollup.iife.config.ts
new file mode 100644
index 000000000..f7936620a
--- /dev/null
+++ b/packages/eddsa-poseidon/rollup.iife.config.ts
@@ -0,0 +1,47 @@
+import commonjs from "@rollup/plugin-commonjs"
+import { nodeResolve } from "@rollup/plugin-node-resolve"
+import fs from "fs"
+import nodePolyfills from "rollup-plugin-polyfill-node"
+import cleanup from "rollup-plugin-cleanup"
+import { terser } from "rollup-plugin-terser"
+import typescript from "rollup-plugin-typescript2"
+
+const pkg = JSON.parse(fs.readFileSync("./package.json", "utf8"))
+const banner = `/**
+ * @module ${pkg.name}
+ * @version ${pkg.version}
+ * @file ${pkg.description}
+ * @copyright Ethereum Foundation ${new Date().getFullYear()}
+ * @license ${pkg.license}
+ * @see [Github]{@link ${pkg.homepage}}
+*/`
+
+const name = pkg.name.split("/")[1].replace(/[-/]./g, (x: string) => x.toUpperCase()[1])
+
+export default {
+ input: "src/index.ts",
+ output: [
+ {
+ file: pkg.iife,
+ name,
+ format: "iife",
+ banner
+ },
+ {
+ file: pkg.unpkg,
+ name,
+ format: "iife",
+ plugins: [terser({ output: { preamble: banner } })]
+ }
+ ],
+ external: [],
+ plugins: [
+ typescript({ tsconfig: "./build.tsconfig.json", useTsconfigDeclarationDir: true }),
+ commonjs(),
+ nodeResolve({
+ preferBuiltins: true
+ }),
+ nodePolyfills({ include: null }),
+ cleanup({ comments: "jsdoc" })
+ ]
+}
diff --git a/packages/eddsa-poseidon/tsconfig.json b/packages/eddsa-poseidon/tsconfig.json
index 71510a096..81e592a16 100644
--- a/packages/eddsa-poseidon/tsconfig.json
+++ b/packages/eddsa-poseidon/tsconfig.json
@@ -1,4 +1,4 @@
{
"extends": "../../tsconfig.json",
- "include": ["src", "tests", "rollup.config.ts"]
+ "include": ["src", "tests", "rollup.config.ts", "rollup.iife.config.ts"]
}
diff --git a/yarn.lock b/yarn.lock
index 443913e6d..710c2ee35 100644
--- a/yarn.lock
+++ b/yarn.lock
@@ -3324,6 +3324,22 @@ __metadata:
languageName: node
linkType: hard
+"@rollup/plugin-inject@npm:^5.0.4":
+ version: 5.0.5
+ resolution: "@rollup/plugin-inject@npm:5.0.5"
+ dependencies:
+ "@rollup/pluginutils": ^5.0.1
+ estree-walker: ^2.0.2
+ magic-string: ^0.30.3
+ peerDependencies:
+ rollup: ^1.20.0||^2.0.0||^3.0.0||^4.0.0
+ peerDependenciesMeta:
+ rollup:
+ optional: true
+ checksum: 22cb772fd6f7178308b2ece95cdde5f8615f6257197832166294552a7e4c0d3976dc996cbfa6470af3151d8b86c00091aa93da5f4db6ec563f11b6db29fd1b63
+ languageName: node
+ linkType: hard
+
"@rollup/plugin-json@npm:^5.0.1":
version: 5.0.2
resolution: "@rollup/plugin-json@npm:5.0.2"
@@ -4426,9 +4442,11 @@ __metadata:
dependencies:
"@rollup/plugin-commonjs": ^25.0.7
"@rollup/plugin-node-resolve": ^15.2.3
+ blake-hash: 2.0.0
circomlibjs: 0.0.8
- poseidon-lite: ^0.2.0
+ poseidon-lite: 0.2.0
rollup-plugin-cleanup: ^3.2.1
+ rollup-plugin-polyfill-node: ^0.13.0
rollup-plugin-terser: ^7.0.2
rollup-plugin-typescript2: ^0.31.2
languageName: unknown
@@ -5526,6 +5544,18 @@ __metadata:
languageName: node
linkType: hard
+"blake-hash@npm:2.0.0, blake-hash@npm:^2.0.0":
+ version: 2.0.0
+ resolution: "blake-hash@npm:2.0.0"
+ dependencies:
+ node-addon-api: ^3.0.0
+ node-gyp: latest
+ node-gyp-build: ^4.2.2
+ readable-stream: ^3.6.0
+ checksum: a0d9a8f3953b986d3b30a741a6c000dedcc9a03b1318f52cc01ae62d18829ba6cb1a4d8cbe74785abfdc952a21db410984523bd457764aca716162cfd3ca8ea4
+ languageName: node
+ linkType: hard
+
"blake-hash@npm:^1.1.0":
version: 1.1.1
resolution: "blake-hash@npm:1.1.1"
@@ -5538,18 +5568,6 @@ __metadata:
languageName: node
linkType: hard
-"blake-hash@npm:^2.0.0":
- version: 2.0.0
- resolution: "blake-hash@npm:2.0.0"
- dependencies:
- node-addon-api: ^3.0.0
- node-gyp: latest
- node-gyp-build: ^4.2.2
- readable-stream: ^3.6.0
- checksum: a0d9a8f3953b986d3b30a741a6c000dedcc9a03b1318f52cc01ae62d18829ba6cb1a4d8cbe74785abfdc952a21db410984523bd457764aca716162cfd3ca8ea4
- languageName: node
- linkType: hard
-
"blake2b-wasm@git+https://github.com/jbaylina/blake2b-wasm.git":
version: 2.1.0
resolution: "blake2b-wasm@https://github.com/jbaylina/blake2b-wasm.git#commit=0d5f024b212429c7f50a7f533aa3a2406b5b42b3"
@@ -15235,7 +15253,7 @@ __metadata:
languageName: node
linkType: hard
-"poseidon-lite@npm:^0.2.0":
+"poseidon-lite@npm:0.2.0, poseidon-lite@npm:^0.2.0":
version: 0.2.0
resolution: "poseidon-lite@npm:0.2.0"
checksum: c47c6fd0a29a78ca1f7cf6ccb8b0c4f4e72930d944e63425e36f60c15d37fb0aeca30b8a22a30640ed68d631142282c0b8308da83b1a2b2bb92b87f5a2432c93
@@ -16133,6 +16151,17 @@ __metadata:
languageName: node
linkType: hard
+"rollup-plugin-polyfill-node@npm:^0.13.0":
+ version: 0.13.0
+ resolution: "rollup-plugin-polyfill-node@npm:0.13.0"
+ dependencies:
+ "@rollup/plugin-inject": ^5.0.4
+ peerDependencies:
+ rollup: ^1.20.0 || ^2.0.0 || ^3.0.0 || ^4.0.0
+ checksum: 73c5b9086955afa108c940c13205fab4cece149d020a3faa696c5711bbb391d11aecd4c913ad2cc5ac24f9d43a4969ad8d087d085dd8d423dece45b6be4039bb
+ languageName: node
+ linkType: hard
+
"rollup-plugin-terser@npm:^7.0.2":
version: 7.0.2
resolution: "rollup-plugin-terser@npm:7.0.2"
From c77a6033540bafcde95c1fea7aace386c62002d0 Mon Sep 17 00:00:00 2001
From: cedoor
Date: Sat, 2 Dec 2023 11:16:27 +0000
Subject: [PATCH 22/35] docs(eddsa-poseidon): add cdn references to readme
---
packages/eddsa-poseidon/README.md | 17 +++++++++++++++--
1 file changed, 15 insertions(+), 2 deletions(-)
diff --git a/packages/eddsa-poseidon/README.md b/packages/eddsa-poseidon/README.md
index de45a2511..11c7dacb0 100644
--- a/packages/eddsa-poseidon/README.md
+++ b/packages/eddsa-poseidon/README.md
@@ -42,7 +42,7 @@
| This package offers a simplified JavaScript codebase essential for creating and validating digital signatures using EdDSA and Poseidon. It's built upon the Baby Jubjub elliptic curve, ensuring seamless integration with [Circom](https://github.com/iden3/circom) and enhancing the developer experience. |
-| ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------ |
+| ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------ |
- Super lightweight: [**~33kB**](https://bundlephobia.com/package/@zk-kit/eddsa-poseidon@0.1.0) (minified)
- Compatible with browsers and NodeJS
@@ -69,6 +69,20 @@ or yarn:
yarn add @zk-kit/eddsa-poseidon
```
+### CDN
+
+You can also load it using a `script` tag using [unpkg](https://unpkg.com/):
+
+```html
+
+```
+
+or [JSDelivr](https://www.jsdelivr.com/):
+
+```html
+
+```
+
## 📜 Usage
\# **derivePublicKey**(privateKey: _BigNumberish_): _Point\_
@@ -111,7 +125,6 @@ console.log(signature)
*/
```
-
\# **verifySignature**(message: _BigNumberish_, signature: _Signature_, publicKey: _Point_): _boolean_
```typescript
From 53772d46d839cd0a825408bd43180deeda62a092 Mon Sep 17 00:00:00 2001
From: cedoor
Date: Sat, 2 Dec 2023 11:16:54 +0000
Subject: [PATCH 23/35] chore(eddsa-poseidon): v0.2.0
---
packages/eddsa-poseidon/package.json | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/packages/eddsa-poseidon/package.json b/packages/eddsa-poseidon/package.json
index 35020e0c0..65ec53e8b 100644
--- a/packages/eddsa-poseidon/package.json
+++ b/packages/eddsa-poseidon/package.json
@@ -1,6 +1,6 @@
{
"name": "@zk-kit/eddsa-poseidon",
- "version": "0.1.0",
+ "version": "0.2.0",
"description": "A JavaScript EdDSA library for secure signing and verification using Poseidon the Baby Jubjub elliptic curve.",
"license": "MIT",
"iife": "dist/index.js",
From b67c32503d4c0613615015076df5fde4b89bb77c Mon Sep 17 00:00:00 2001
From: Cedoor
Date: Sat, 2 Dec 2023 11:34:07 +0000
Subject: [PATCH 24/35] docs: update README.md
---
packages/eddsa-poseidon/README.md | 4 +++-
1 file changed, 3 insertions(+), 1 deletion(-)
diff --git a/packages/eddsa-poseidon/README.md b/packages/eddsa-poseidon/README.md
index 11c7dacb0..c18b6d8f1 100644
--- a/packages/eddsa-poseidon/README.md
+++ b/packages/eddsa-poseidon/README.md
@@ -44,12 +44,14 @@
| This package offers a simplified JavaScript codebase essential for creating and validating digital signatures using EdDSA and Poseidon. It's built upon the Baby Jubjub elliptic curve, ensuring seamless integration with [Circom](https://github.com/iden3/circom) and enhancing the developer experience. |
| ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------ |
-- Super lightweight: [**~33kB**](https://bundlephobia.com/package/@zk-kit/eddsa-poseidon@0.1.0) (minified)
+- Super lightweight: [**~33kB**](https://bundlephobia.com/package/@zk-kit/eddsa-poseidon) (minified)
- Compatible with browsers and NodeJS
- TS type support
- Comprehensive code [documentation](https://zkkit.pse.dev/modules/_zk_kit_eddsa_poseidon.html)
- Full test coverage
+👾 Would you like to try it now? Explore it now on [Ceditor](https://ceditor.cedoor.dev/52787e4ad57d2f2076648d509efc3448)!
+
> [!WARNING]
> This library has **not** been audited.
From d1b4244bef6d9242665225086a3d1d7eee1cfa1d Mon Sep 17 00:00:00 2001
From: cedoor
Date: Sat, 2 Dec 2023 11:38:40 +0000
Subject: [PATCH 25/35] docs(eddsa-poseidon): add package to root readme
---
README.md | 28 ++++++++++++++++++++++++++++
1 file changed, 28 insertions(+)
diff --git a/README.md b/README.md
index f99200140..48c4b0979 100644
--- a/README.md
+++ b/README.md
@@ -86,6 +86,34 @@
|
+
+
+
+ @zk-kit/eddsa-poseidon
+
+
+ (docs)
+
+ |
+
+
+
+
+
+ |
+
+
+
+
+
+ |
+
+
+
+
+
+ |
+
From 221dacaea7f323b986d1690de06472708bc06817 Mon Sep 17 00:00:00 2001
From: cedoor
Date: Sat, 2 Dec 2023 11:46:28 +0000
Subject: [PATCH 26/35] chore: update npm build scripts
---
package.json | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/package.json b/package.json
index 1db55cece..a1216580c 100644
--- a/package.json
+++ b/package.json
@@ -8,7 +8,7 @@
"bugs": "https://github.com/privacy-scaling-explorations/zk-kit/issues",
"private": true,
"scripts": {
- "build": "yarn build:js && yarn compile:sol",
+ "build": "yarn build:libraries && yarn compile:contracts",
"build:libraries": "yarn workspaces foreach --no-private run build",
"compile:contracts": "yarn workspaces foreach run compile",
"test": "yarn test:libraries && yarn test:contracts && yarn test:circuits",
From 45f1c2b3144ac7dd8f7d3a380bfac336714f627e Mon Sep 17 00:00:00 2001
From: Fabian Scherer <48454910+fabianschu@users.noreply.github.com>
Date: Sat, 2 Dec 2023 16:40:04 +0100
Subject: [PATCH 27/35] chore: adds tree visualisations
---
.../circuits/noir/crates/smt_bn254/src/lib.nr | 73 +++++++++++++++----
1 file changed, 58 insertions(+), 15 deletions(-)
diff --git a/packages/circuits/noir/crates/smt_bn254/src/lib.nr b/packages/circuits/noir/crates/smt_bn254/src/lib.nr
index f5cba72ee..e78a973f2 100644
--- a/packages/circuits/noir/crates/smt_bn254/src/lib.nr
+++ b/packages/circuits/noir/crates/smt_bn254/src/lib.nr
@@ -50,9 +50,54 @@ pub fn update(new_value: Field, old_entry: [Field; 2], old_root: Field, siblings
new_parent
}
+/*
+Visual representations of the trees used in the tests for reference
+
+The big tree corresponds to the tree that is used for
+testing in @zk-kit/smt:
+
+big_tree_root: 46574...31272
+├── 1: 78429...40557
+│ ├── 1
+│ ├── v: 17150...90784
+│ └── k: 20438...35547
+└── 0:
+ ├── 1: 74148...2867
+ │ ├── 1: 89272...68433 || This leaf
+ │ │ ├── 1 || is missing
+ │ │ ├── v: 85103...45170 || for the
+ │ │ └── k: 84596...08785 || small_tree_root
+ │ └── 0: 18126...22196
+ │ ├── 1
+ │ ├── v: 13761...25802
+ │ └── k: 13924...78098
+ └── 0: 79011...20495
+ ├── 1
+ ├── v: 10223...67791
+ └── k: 18746...38844
+
+The small tree lacks one leaf as indicated in the previous
+tree and looks as follows:
+
+small_tree_root: 35328...54128
+├── 1: 78429...40557
+│ ├── 1
+│ ├── v: 17150...90784
+│ └── k: 20438...35547
+└── 0:
+ ├── 1: 18126...22196
+ │ ├── 1
+ │ ├── v: 13761...25802
+ │ └── k: 13924...78098
+ └── 0: 79011...20495
+ ├── 1
+ ├── v: 10223...67791
+ └── k: 18746...38844
+*/
+
#[test]
fn test_verify_membership_proof() {
- let root = 3532809757480436997969526334543526996242857122876262144596246439822675654128;
+ let small_tree_root = 3532809757480436997969526334543526996242857122876262144596246439822675654128;
let key = 18746990989203767017840856832962652635369613415011636432610873672704085238844;
let value = 10223238458026721676606706894638558676629446348345239719814856822628482567791;
let entry = [key, value];
@@ -60,12 +105,12 @@ fn test_verify_membership_proof() {
let mut siblings: [Field; 256] = [0; 256];
siblings[254] = 18126944477260144816572365299295230808286197301459941187567621915186392922196;
siblings[255] = 7842913321420301106140788486336995496832503825951977327575501561489697540557;
- verify(entry, matching_entry, siblings, root);
+ verify(entry, matching_entry, siblings, small_tree_root);
}
#[test]
fn test_verify_non_membership_proof() {
- let root = 3532809757480436997969526334543526996242857122876262144596246439822675654128;
+ let small_tree_root = 3532809757480436997969526334543526996242857122876262144596246439822675654128;
let key = 8459688297517826598613412977307486050019239051864711035321718508109192087854;
let value = 8510347201346963732943571140849185725417245763047403804445415726302354045170;
let entry = [key, value];
@@ -76,7 +121,7 @@ fn test_verify_non_membership_proof() {
let mut siblings: [Field; 256] = [0; 256];
siblings[254] = 14443001516360873457302534246953033880503978184674311810335857314606403404583;
siblings[255] = 7842913321420301106140788486336995496832503825951977327575501561489697540557;
- verify(entry, matching_entry, siblings, root);
+ verify(entry, matching_entry, siblings, small_tree_root);
}
#[test]
@@ -89,7 +134,6 @@ fn test_add_first_element() {
assert(add(entry, zero_node, siblings) == 7842913321420301106140788486336995496832503825951977327575501561489697540557);
}
-
#[test]
fn test_add_element_to_one_element_tree() {
let key = 8459688297517826598613412977307486050019239051864711035321718508109192087854;
@@ -98,8 +142,7 @@ fn test_add_element_to_one_element_tree() {
let old_root = 7842913321420301106140788486336995496832503825951977327575501561489697540557;
let mut siblings: [Field; 256] = [0; 256];
siblings[255] = 7842913321420301106140788486336995496832503825951977327575501561489697540557;
- let expected_new_root = 6309163561753770186763792861087421800063032915545949912480764922611421686766;
- assert(add(entry, old_root, siblings) == expected_new_root);
+ assert(add(entry, old_root, siblings) == 6309163561753770186763792861087421800063032915545949912480764922611421686766);
}
#[test]
@@ -107,13 +150,13 @@ fn test_add_element_to_existing_tree() {
let key = 8459688297517826598613412977307486050019239051864711035321718508109192087854;
let value = 8510347201346963732943571140849185725417245763047403804445415726302354045170;
let entry = [key, value];
- let old_root = 3532809757480436997969526334543526996242857122876262144596246439822675654128;
+ let small_tree_root = 3532809757480436997969526334543526996242857122876262144596246439822675654128;
let mut siblings: [Field; 256] = [0; 256];
siblings[253] = 18126944477260144816572365299295230808286197301459941187567621915186392922196;
siblings[254] = 14443001516360873457302534246953033880503978184674311810335857314606403404583;
siblings[255] = 7842913321420301106140788486336995496832503825951977327575501561489697540557;
- let expected_new_root = 4657474665007910823901096287220097081233671466281873230928277896829046731272;
- assert(add(entry, old_root, siblings) == expected_new_root);
+ let big_tree_root = 4657474665007910823901096287220097081233671466281873230928277896829046731272;
+ assert(add(entry, small_tree_root, siblings) == big_tree_root);
}
#[test]
@@ -121,13 +164,13 @@ fn test_delete() {
let key = 8459688297517826598613412977307486050019239051864711035321718508109192087854;
let value = 8510347201346963732943571140849185725417245763047403804445415726302354045170;
let entry = [key, value];
- let old_root = 4657474665007910823901096287220097081233671466281873230928277896829046731272;
+ let big_tree_root = 4657474665007910823901096287220097081233671466281873230928277896829046731272;
let mut siblings: [Field; 256] = [0; 256];
siblings[253] = 18126944477260144816572365299295230808286197301459941187567621915186392922196;
siblings[254] = 14443001516360873457302534246953033880503978184674311810335857314606403404583;
siblings[255] = 7842913321420301106140788486336995496832503825951977327575501561489697540557;
- let expected_new_root = 3532809757480436997969526334543526996242857122876262144596246439822675654128;
- assert(delete(entry, old_root, siblings) == expected_new_root);
+ let small_tree_root = 3532809757480436997969526334543526996242857122876262144596246439822675654128;
+ assert(delete(entry, big_tree_root, siblings) == small_tree_root);
}
#[test]
@@ -141,6 +184,6 @@ fn test_update() {
siblings[253] = 18126944477260144816572365299295230808286197301459941187567621915186392922196;
siblings[254] = 14443001516360873457302534246953033880503978184674311810335857314606403404583;
siblings[255] = 7842913321420301106140788486336995496832503825951977327575501561489697540557;
- let expected_new_root = 4657474665007910823901096287220097081233671466281873230928277896829046731272;
- assert(update(new_value, old_entry, old_root, siblings) == expected_new_root);
+ let big_tree_root = 4657474665007910823901096287220097081233671466281873230928277896829046731272;
+ assert(update(new_value, old_entry, old_root, siblings) == big_tree_root);
}
\ No newline at end of file
From 2b38a9de10597329d0e0b3c2dd9c917e420430e9 Mon Sep 17 00:00:00 2001
From: Fabian Scherer <48454910+fabianschu@users.noreply.github.com>
Date: Sat, 2 Dec 2023 18:19:14 +0100
Subject: [PATCH 28/35] refactor: tree depth as global var
---
.../circuits/noir/crates/smt_bn254/src/lib.nr | 26 ++++++++++---------
.../noir/crates/smt_bn254/src/utils.nr | 11 ++++----
2 files changed, 20 insertions(+), 17 deletions(-)
diff --git a/packages/circuits/noir/crates/smt_bn254/src/lib.nr b/packages/circuits/noir/crates/smt_bn254/src/lib.nr
index e78a973f2..1e4a1f33a 100644
--- a/packages/circuits/noir/crates/smt_bn254/src/lib.nr
+++ b/packages/circuits/noir/crates/smt_bn254/src/lib.nr
@@ -2,7 +2,9 @@ use dep::std::option::Option;
mod utils;
-pub fn verify(entry: [Field; 2], matching_entry: [Option; 2], siblings: [Field; 256], root: Field) {
+global TREE_DEPTH: u32 = 256;
+
+pub fn verify(entry: [Field; 2], matching_entry: [Option; 2], siblings: [Field; TREE_DEPTH], root: Field) {
let mut calculcated_root: Field = 0;
if matching_entry[0].is_none() | matching_entry[1].is_none() {
calculcated_root = utils::calculcate_root(entry, siblings);
@@ -12,7 +14,7 @@ pub fn verify(entry: [Field; 2], matching_entry: [Option; 2], siblings: [
assert(calculcated_root == root);
}
-pub fn add(entry: [Field; 2], old_root: Field, siblings: [Field; 256]) -> Field {
+pub fn add(entry: [Field; 2], old_root: Field, siblings: [Field; TREE_DEPTH]) -> Field {
if (old_root == 0) {
utils::hash(entry[0], entry[1], true)
} else {
@@ -22,19 +24,19 @@ pub fn add(entry: [Field; 2], old_root: Field, siblings: [Field; 256]) -> Field
}
}
-pub fn delete(entry: [Field; 2], old_root: Field, siblings: [Field; 256]) -> Field {
+pub fn delete(entry: [Field; 2], old_root: Field, siblings: [Field; TREE_DEPTH]) -> Field {
let (new, old) = utils::calculate_two_roots(entry, siblings);
assert(old == old_root);
new
}
-pub fn update(new_value: Field, old_entry: [Field; 2], old_root: Field, siblings: [Field; 256]) -> Field {
+pub fn update(new_value: Field, old_entry: [Field; 2], old_root: Field, siblings: [Field; TREE_DEPTH]) -> Field {
let key = old_entry[0];
let old_value = old_entry[1];
let mut old_parent: Field = utils::hash(key, old_value, true);
let mut new_parent: Field = utils::hash(key, new_value, true);
let path = utils::key_to_path(key);
- for i in 0..256 {
+ for i in 0..TREE_DEPTH {
let sibling = siblings[i];
if sibling != 0 {
if path[i] == 0 {
@@ -102,7 +104,7 @@ fn test_verify_membership_proof() {
let value = 10223238458026721676606706894638558676629446348345239719814856822628482567791;
let entry = [key, value];
let matching_entry = [Option::none(), Option::none()];
- let mut siblings: [Field; 256] = [0; 256];
+ let mut siblings: [Field; TREE_DEPTH] = [0; TREE_DEPTH];
siblings[254] = 18126944477260144816572365299295230808286197301459941187567621915186392922196;
siblings[255] = 7842913321420301106140788486336995496832503825951977327575501561489697540557;
verify(entry, matching_entry, siblings, small_tree_root);
@@ -118,7 +120,7 @@ fn test_verify_non_membership_proof() {
Option::some(13924553918840562069536446401916499801909138643922241340476956069386532478098),
Option::some(13761779908325789083343687318102407319424329800042729673292939195255502025802)
];
- let mut siblings: [Field; 256] = [0; 256];
+ let mut siblings: [Field; TREE_DEPTH] = [0; TREE_DEPTH];
siblings[254] = 14443001516360873457302534246953033880503978184674311810335857314606403404583;
siblings[255] = 7842913321420301106140788486336995496832503825951977327575501561489697540557;
verify(entry, matching_entry, siblings, small_tree_root);
@@ -129,7 +131,7 @@ fn test_add_first_element() {
let key = 20438969296305830531522370305156029982566273432331621236661483041446048135547;
let value = 17150136040889237739751319962368206600863150289695545292530539263327413090784;
let entry = [key, value];
- let siblings: [Field; 256] = [0; 256];
+ let siblings: [Field; TREE_DEPTH] = [0; TREE_DEPTH];
let zero_node = 0;
assert(add(entry, zero_node, siblings) == 7842913321420301106140788486336995496832503825951977327575501561489697540557);
}
@@ -140,7 +142,7 @@ fn test_add_element_to_one_element_tree() {
let value = 8510347201346963732943571140849185725417245763047403804445415726302354045170;
let entry = [key, value];
let old_root = 7842913321420301106140788486336995496832503825951977327575501561489697540557;
- let mut siblings: [Field; 256] = [0; 256];
+ let mut siblings: [Field; TREE_DEPTH] = [0; TREE_DEPTH];
siblings[255] = 7842913321420301106140788486336995496832503825951977327575501561489697540557;
assert(add(entry, old_root, siblings) == 6309163561753770186763792861087421800063032915545949912480764922611421686766);
}
@@ -151,7 +153,7 @@ fn test_add_element_to_existing_tree() {
let value = 8510347201346963732943571140849185725417245763047403804445415726302354045170;
let entry = [key, value];
let small_tree_root = 3532809757480436997969526334543526996242857122876262144596246439822675654128;
- let mut siblings: [Field; 256] = [0; 256];
+ let mut siblings: [Field; TREE_DEPTH] = [0; TREE_DEPTH];
siblings[253] = 18126944477260144816572365299295230808286197301459941187567621915186392922196;
siblings[254] = 14443001516360873457302534246953033880503978184674311810335857314606403404583;
siblings[255] = 7842913321420301106140788486336995496832503825951977327575501561489697540557;
@@ -165,7 +167,7 @@ fn test_delete() {
let value = 8510347201346963732943571140849185725417245763047403804445415726302354045170;
let entry = [key, value];
let big_tree_root = 4657474665007910823901096287220097081233671466281873230928277896829046731272;
- let mut siblings: [Field; 256] = [0; 256];
+ let mut siblings: [Field; TREE_DEPTH] = [0; TREE_DEPTH];
siblings[253] = 18126944477260144816572365299295230808286197301459941187567621915186392922196;
siblings[254] = 14443001516360873457302534246953033880503978184674311810335857314606403404583;
siblings[255] = 7842913321420301106140788486336995496832503825951977327575501561489697540557;
@@ -180,7 +182,7 @@ fn test_update() {
let new_value = 8510347201346963732943571140849185725417245763047403804445415726302354045170;
let old_entry = [key, old_value];
let old_root = 4202917944688591919039016743999516589372052081571553696755434379850460220435;
- let mut siblings: [Field; 256] = [0; 256];
+ let mut siblings: [Field; TREE_DEPTH] = [0; TREE_DEPTH];
siblings[253] = 18126944477260144816572365299295230808286197301459941187567621915186392922196;
siblings[254] = 14443001516360873457302534246953033880503978184674311810335857314606403404583;
siblings[255] = 7842913321420301106140788486336995496832503825951977327575501561489697540557;
diff --git a/packages/circuits/noir/crates/smt_bn254/src/utils.nr b/packages/circuits/noir/crates/smt_bn254/src/utils.nr
index 0c578f096..65ccda432 100644
--- a/packages/circuits/noir/crates/smt_bn254/src/utils.nr
+++ b/packages/circuits/noir/crates/smt_bn254/src/utils.nr
@@ -1,7 +1,8 @@
use dep::std::hash::poseidon;
+use crate::TREE_DEPTH;
pub fn key_to_path(key: Field) -> [u1] {
- key.to_be_bits(256)
+ key.to_be_bits(TREE_DEPTH)
}
pub fn hash(left: Field, right: Field, is_leaf: bool) -> Field {
@@ -12,10 +13,10 @@ pub fn hash(left: Field, right: Field, is_leaf: bool) -> Field {
}
}
-pub fn calculcate_root(entry: [Field; 2], siblings: [Field; 256]) -> Field {
+pub fn calculcate_root(entry: [Field; 2], siblings: [Field; TREE_DEPTH]) -> Field {
let path = key_to_path(entry[0]);
let mut node = hash(entry[0], entry[1], true);
- for i in 0..256 {
+ for i in 0..TREE_DEPTH {
let sibling = siblings[i];
if sibling != 0 {
let mut left = sibling;
@@ -31,11 +32,11 @@ pub fn calculcate_root(entry: [Field; 2], siblings: [Field; 256]) -> Field {
node
}
-pub fn calculate_two_roots(entry: [Field; 2], siblings: [Field; 256]) -> (Field, Field) {
+pub fn calculate_two_roots(entry: [Field; 2], siblings: [Field; TREE_DEPTH]) -> (Field, Field) {
let path = key_to_path(entry[0]);
let mut long_path_node = hash(entry[0], entry[1], true);
let mut short_path_node: Field = 0;
- for i in 0..256 {
+ for i in 0..TREE_DEPTH {
let sibling = siblings[i];
if sibling != 0 {
if siblings[i - 1] == 0 {
From 9650c9f09f2bc79cc20f9e5ce7e4bc88637b7fb3 Mon Sep 17 00:00:00 2001
From: Fabian Scherer <48454910+fabianschu@users.noreply.github.com>
Date: Sat, 2 Dec 2023 18:27:31 +0100
Subject: [PATCH 29/35] fix: use entry key for path in non-membership proof
---
packages/circuits/noir/crates/smt_bn254/src/lib.nr | 5 +++--
packages/circuits/noir/crates/smt_bn254/src/utils.nr | 3 +--
2 files changed, 4 insertions(+), 4 deletions(-)
diff --git a/packages/circuits/noir/crates/smt_bn254/src/lib.nr b/packages/circuits/noir/crates/smt_bn254/src/lib.nr
index 1e4a1f33a..c85721c59 100644
--- a/packages/circuits/noir/crates/smt_bn254/src/lib.nr
+++ b/packages/circuits/noir/crates/smt_bn254/src/lib.nr
@@ -6,10 +6,11 @@ global TREE_DEPTH: u32 = 256;
pub fn verify(entry: [Field; 2], matching_entry: [Option; 2], siblings: [Field; TREE_DEPTH], root: Field) {
let mut calculcated_root: Field = 0;
+ let path = utils::key_to_path(entry[0]);
if matching_entry[0].is_none() | matching_entry[1].is_none() {
- calculcated_root = utils::calculcate_root(entry, siblings);
+ calculcated_root = utils::calculcate_root(entry, siblings, path);
} else {
- calculcated_root = utils::calculcate_root([matching_entry[0].unwrap(), matching_entry[1].unwrap()], siblings);
+ calculcated_root = utils::calculcate_root([matching_entry[0].unwrap(), matching_entry[1].unwrap()], siblings, path);
}
assert(calculcated_root == root);
}
diff --git a/packages/circuits/noir/crates/smt_bn254/src/utils.nr b/packages/circuits/noir/crates/smt_bn254/src/utils.nr
index 65ccda432..0d6ca80bd 100644
--- a/packages/circuits/noir/crates/smt_bn254/src/utils.nr
+++ b/packages/circuits/noir/crates/smt_bn254/src/utils.nr
@@ -13,8 +13,7 @@ pub fn hash(left: Field, right: Field, is_leaf: bool) -> Field {
}
}
-pub fn calculcate_root(entry: [Field; 2], siblings: [Field; TREE_DEPTH]) -> Field {
- let path = key_to_path(entry[0]);
+pub fn calculcate_root(entry: [Field; 2], siblings: [Field; TREE_DEPTH], path: [u1]) -> Field {
let mut node = hash(entry[0], entry[1], true);
for i in 0..TREE_DEPTH {
let sibling = siblings[i];
From 0e1a24e53bd58624bfcd1d8135b5598591d8c331 Mon Sep 17 00:00:00 2001
From: Fabian Scherer <48454910+fabianschu@users.noreply.github.com>
Date: Sat, 2 Dec 2023 20:52:17 +0100
Subject: [PATCH 30/35] docs: adds comments
---
.../circuits/noir/crates/smt_bn254/src/lib.nr | 62 +++++++++++++++++--
.../noir/crates/smt_bn254/src/utils.nr | 54 ++++++++++++++--
2 files changed, 107 insertions(+), 9 deletions(-)
diff --git a/packages/circuits/noir/crates/smt_bn254/src/lib.nr b/packages/circuits/noir/crates/smt_bn254/src/lib.nr
index c85721c59..ca7a714bd 100644
--- a/packages/circuits/noir/crates/smt_bn254/src/lib.nr
+++ b/packages/circuits/noir/crates/smt_bn254/src/lib.nr
@@ -4,39 +4,93 @@ mod utils;
global TREE_DEPTH: u32 = 256;
+/**
+ * Verifies a membership or a non-membership proof, ie it calculates the tree root
+ * based on an entry or matching entry and all siblings and compares that calculated root
+ * with the root that is passed to this function.
+ * @param entry Contains key and value of an entry: [key, value]
+ * @param matching_entry Contains [key, value] of a matching entry only for non-membership proofs
+ * @param siblings Contains array of siblings of entry / matching_entry
+ * @param root The expected root of the tree
+ */
pub fn verify(entry: [Field; 2], matching_entry: [Option; 2], siblings: [Field; TREE_DEPTH], root: Field) {
let mut calculcated_root: Field = 0;
let path = utils::key_to_path(entry[0]);
+ // if there is no matching_entry it is a membership proof
+ // if there is a matching_entry it is a non_membership proof
if matching_entry[0].is_none() | matching_entry[1].is_none() {
+ // membership proof: the root is calculated based on the entry, the siblings,
+ // and the path determined by the key of entry through consecutive hashing
calculcated_root = utils::calculcate_root(entry, siblings, path);
} else {
+ // non-membership proof: the root is calculated based on the matching_entry, the siblings
+ // and the path that is determined by the key of entry. This makes sure that matching_entry is in fact
+ // a matching entry for entry meaning that it shares the same first bits as path
calculcated_root = utils::calculcate_root([matching_entry[0].unwrap(), matching_entry[1].unwrap()], siblings, path);
}
assert(calculcated_root == root);
}
-pub fn add(entry: [Field; 2], old_root: Field, siblings: [Field; TREE_DEPTH]) -> Field {
+/**
+ * Adds a NEW entry to an existing tree. Based on the siblings first validates the correctness of
+ * the old root. Then uses the new entry and the siblings to calculate the new tree root.
+ * NOTE: this function doesn't validate if the key for the new entry already exists in the tree, ie
+ * if the operation is actually an update. For this operation there is a separate function.
+ * @param entry Contains key and value of an entry: [key, value]
+ * @param old_root The root of the tree before the new entry is added
+ * @param siblings Contains array of siblings of entry / matching_entry
+ * @returns The new root after the addition
+ */
+pub fn add(new_entry: [Field; 2], old_root: Field, siblings: [Field; TREE_DEPTH]) -> Field {
+ // if the root node is zero the first leaf is added to the tree in which case
+ // the new root equals H(k,v,1)
+ // otherwise the correctness of the old root is validated based on the siblings after which
+ // the new root is calculated and returned
if (old_root == 0) {
- utils::hash(entry[0], entry[1], true)
+ utils::hash(new_entry[0], new_entry[1], true)
} else {
- let (old, new) = utils::calculate_two_roots(entry, siblings);
+ let (old, new) = utils::calculate_two_roots(new_entry, siblings);
assert(old == old_root);
new
}
}
+/**
+ * Deletes an existing entry from a tree. Based on the siblings first does a membership proof
+ * of that existing entry and then calculates the new root (without the entry).
+ * @param entry Contains key and value of the to-be-deleted entry: [key, value]
+ * @param old_root The root of the tree if the entry is still included
+ * @param sigbils Contains array of siblings of entry
+ * @returns The new root after the deletion
+ */
pub fn delete(entry: [Field; 2], old_root: Field, siblings: [Field; TREE_DEPTH]) -> Field {
+ // proves membership of entry in the old root, then calculates and returns the new root
let (new, old) = utils::calculate_two_roots(entry, siblings);
assert(old == old_root);
new
}
+/**
+ * Updates the value of an existing entry in a tree. Based on the siblings first does a membership proof
+ * first verifies the membership of the old entry. Then recalculates the new root.
+ * @param new_value The new value to be added (instead of old_entry[1])
+ * @param old_entry Contains key and value of the entry to be updated: [key, value]
+ * @param old_root The root of the tree before the update
+ * @param siblings Contains an array of siblings of old_entry
+ * @returns The new root after the update
+ */
pub fn update(new_value: Field, old_entry: [Field; 2], old_root: Field, siblings: [Field; TREE_DEPTH]) -> Field {
let key = old_entry[0];
let old_value = old_entry[1];
+ // both the old entry and new entry share the same key that is used to calculate the path
+ let path = utils::key_to_path(key);
+ // old_parent is a container to temporarily store the nodes that ultimately lead to the OLD root
let mut old_parent: Field = utils::hash(key, old_value, true);
+ // new_parent is a container to temporarily store the nodes that ultimately lead to the NEW root
let mut new_parent: Field = utils::hash(key, new_value, true);
- let path = utils::key_to_path(key);
+ // starting from the botton of the tree, for each level it checks whether there is a sibling and if
+ // that is the case, it hashes the two containers with the sibling and updates the containers with the
+ // resulting hashes until the uppermost level is reached aka the root node
for i in 0..TREE_DEPTH {
let sibling = siblings[i];
if sibling != 0 {
diff --git a/packages/circuits/noir/crates/smt_bn254/src/utils.nr b/packages/circuits/noir/crates/smt_bn254/src/utils.nr
index 0d6ca80bd..c9e5c48e8 100644
--- a/packages/circuits/noir/crates/smt_bn254/src/utils.nr
+++ b/packages/circuits/noir/crates/smt_bn254/src/utils.nr
@@ -1,10 +1,24 @@
use dep::std::hash::poseidon;
use crate::TREE_DEPTH;
+/*
+ * Transforms the key into into a big endian array of bits so that when determining the position
+ * of a tree entry starting from the root node, the first array element to look at is the last.
+ * @param key The key of a tree entry
+ * @returns The path that determines the position of a key in the tree
+ */
pub fn key_to_path(key: Field) -> [u1] {
key.to_be_bits(TREE_DEPTH)
}
+/*
+ * Calculates the poseidon bn254 hash. If a leaf node is created, the number 1 is appended to
+ * the hashed values as follows: H(k,v,1).
+ * @param left The left element of the hashing pair
+ * @param right The right element of the hashing pair
+ * @param is_leaf Whether what is created is a leaf node or not
+ * @returns The poseidon hash
+ */
pub fn hash(left: Field, right: Field, is_leaf: bool) -> Field {
if (is_leaf) {
poseidon::bn254::hash_3([left, right, 1])
@@ -13,15 +27,28 @@ pub fn hash(left: Field, right: Field, is_leaf: bool) -> Field {
}
}
+
+/*
+ * Calculates the root for a given tree entry based on the passed array of siblings and the passed path.
+ * @param entry The key and value of an entry [k, v]
+ * @param siblings Contains the siblings from bottom to top
+ * @param path The position of the entry in the tree as represented by bits from bottom to top
+ * @returns The calculated root node
+ */
pub fn calculcate_root(entry: [Field; 2], siblings: [Field; TREE_DEPTH], path: [u1]) -> Field {
+ // serves as container for hashes and is initialized to be the leaf node
let mut node = hash(entry[0], entry[1], true);
+ // iterates over the list of siblings until the first sibling is found
+ // arbitrarily assigns the sibling to be the left and the node to be the
+ // right element of the hashing pair unless the path indicates the opposite
+ // order in which case the order is changed. The new hash is stored in the container
+ // until the root node is reached and returned.
for i in 0..TREE_DEPTH {
let sibling = siblings[i];
if sibling != 0 {
let mut left = sibling;
let mut right = node;
- let own_position = path[i];
- if own_position == 0 {
+ if path[i] == 0 {
left = node;
right = sibling;
}
@@ -31,19 +58,36 @@ pub fn calculcate_root(entry: [Field; 2], siblings: [Field; TREE_DEPTH], path: [
node
}
+/*
+ * Calculates two roots for a given leaf entry based on the passed array of siblings: one root
+ * for if the leaf entry was included in the tree and one for if the leaf entry was not included
+ * in the tree. This is useful for efficiently proving the membership of leaf entries for a
+ * tree while simultaneously modifiying the tree.
+ * @param entry The key and value of an entry [k, v]
+ * @param siblings Contains the siblings from bottom to top
+ * @returns Two root nodes: the first one doesn't include entry, the second does
+ */
pub fn calculate_two_roots(entry: [Field; 2], siblings: [Field; TREE_DEPTH]) -> (Field, Field) {
let path = key_to_path(entry[0]);
+ // long_path_node is a container for hashes to derive the root node for the tree that
+ // includes the entry
let mut long_path_node = hash(entry[0], entry[1], true);
+ // long_path_node is a container for hashes to derive the root node for the tree that
+ // doesn't include the entry
let mut short_path_node: Field = 0;
+ // iterate over the levels of the tree from bottom to top
for i in 0..TREE_DEPTH {
let sibling = siblings[i];
+ // After the first sibling is found, the processes are started to calculate the two root nodes.
+ // The calulcation of the root node that includes the entry is comparable to `calculate_root`.
+ // To calc the root node that doesn't include entry, the first sibling is put into the container
+ // and starting from each SUBSEQUENT iteration it is hashed with its sibling and the resulting hash
+ // again stored in the container until the root is reached
if sibling != 0 {
if siblings[i - 1] == 0 {
short_path_node = siblings[i];
}
- let sibling = siblings[i];
- let own_position = path[i];
- if own_position == 0 {
+ if path[i] == 0 {
long_path_node = hash(long_path_node, sibling, false);
if(short_path_node != sibling) {
short_path_node = hash(short_path_node, sibling, false);
From 32d0aa469a92e95e9f2d650c3dee7c1e34dc3dfa Mon Sep 17 00:00:00 2001
From: Fabian Scherer <48454910+fabianschu@users.noreply.github.com>
Date: Sat, 2 Dec 2023 21:11:51 +0100
Subject: [PATCH 31/35] docs: add to README
---
packages/circuits/README.md | 15 +++++++++++++++
1 file changed, 15 insertions(+)
diff --git a/packages/circuits/README.md b/packages/circuits/README.md
index 7ac1fb160..a8c406cbe 100644
--- a/packages/circuits/README.md
+++ b/packages/circuits/README.md
@@ -36,6 +36,12 @@
- Circom:
- [PoseidonProof](./circom/poseidon-proof.circom): It proves the possession of a Poseidon pre-image without revealing the pre-image itself.
- [BinaryMerkleRoot](./circom/binary-merkle-root.circom): It calculates the root of a binary Merkle tree using a provided proof-of-membership.
+- Noir:
+ - [Sparse Merkle Tree PoseidonBN254](./noir/crates/smt_bn254/src/lib.nr): A reusable library of functions related to Sparse Merkle Trees based on the JS implementation of [@zk-kit/smt](../smt). The library uses the Poseidon hash to implement the following functions:
+ - verifying membership and non-membership proofs
+ - adding a new entry to a SMT
+ - updating an entry of an SMT
+ - deleting an existing entry from an SMT
## 🛠 Install
@@ -52,3 +58,12 @@ or yarn:
```bash
yarn add @zk-kit/circuits
```
+
+### Using Nargo (for Noir circuits)
+
+In your Nargo.toml file, add the following dependency:
+
+```toml
+[dependencies]
+smt_bn254 = { tag = "v0.1.0", git = "https://github.com/privacy-scaling-explorations/zk-kit", directory="crates/smt_bn254" }
+```
From 072335f285245ebf7e72ef2c50c4ca0e6298d12e Mon Sep 17 00:00:00 2001
From: Fabian Scherer <48454910+fabianschu@users.noreply.github.com>
Date: Sat, 2 Dec 2023 21:50:08 +0100
Subject: [PATCH 32/35] docs: correct link for installation via Nargo
---
packages/circuits/README.md | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/packages/circuits/README.md b/packages/circuits/README.md
index a8c406cbe..01dc94e1a 100644
--- a/packages/circuits/README.md
+++ b/packages/circuits/README.md
@@ -65,5 +65,5 @@ In your Nargo.toml file, add the following dependency:
```toml
[dependencies]
-smt_bn254 = { tag = "v0.1.0", git = "https://github.com/privacy-scaling-explorations/zk-kit", directory="crates/smt_bn254" }
+smt_bn254 = { tag = "v0.1.0", git = "https://github.com/privacy-scaling-explorations/zk-kit/packages/circuits/noir", directory="crates/smt_bn254" }
```
From 23314a7b123fa2ce2ca53d266f9b24478bc2fc36 Mon Sep 17 00:00:00 2001
From: cedoor
Date: Sat, 2 Dec 2023 22:38:04 +0000
Subject: [PATCH 33/35] docs(eddsa-poseidon): add doc to baby jubjub functions
---
packages/eddsa-poseidon/src/babyjub.ts | 79 ++++++++++++++-----
packages/eddsa-poseidon/src/eddsa-poseidon.ts | 6 +-
.../src/{field1.ts => field.ts} | 2 +-
3 files changed, 63 insertions(+), 24 deletions(-)
rename packages/eddsa-poseidon/src/{field1.ts => field.ts} (97%)
diff --git a/packages/eddsa-poseidon/src/babyjub.ts b/packages/eddsa-poseidon/src/babyjub.ts
index c85b21bb1..749fb3ee5 100644
--- a/packages/eddsa-poseidon/src/babyjub.ts
+++ b/packages/eddsa-poseidon/src/babyjub.ts
@@ -1,36 +1,75 @@
-import Field1 from "./field1"
+import Field from "./field"
import * as scalar from "./scalar"
import { Point } from "./types"
-export const F = new Field1(BigInt("21888242871839275222246405745257275088548364400416034343698204186575808495617"))
+// Spec: https://eips.ethereum.org/EIPS/eip-2494
+// 'r' is the alt_bn128 prime order.
+export const r = BigInt("21888242871839275222246405745257275088548364400416034343698204186575808495617")
+
+// 'F' (F_r) is the prime finite field with r elements.
+export const Fr = new Field(r)
+
+// Base8 is the base point used to generate other points on the curve.
export const Base8: Point = [
- F.e(BigInt("5299619240641551281634865583518297030282874472190772894086521144482721001553")),
- F.e(BigInt("16950150798460657717958625567821834550301663161624707787222815936182638968203"))
+ Fr.e(BigInt("5299619240641551281634865583518297030282874472190772894086521144482721001553")),
+ Fr.e(BigInt("16950150798460657717958625567821834550301663161624707787222815936182638968203"))
]
+// Let E be the twisted Edwards elliptic curve defined over 'F_r'
+// described by the equation 'ax^2 + y^2 = 1 + dx^2y^2'.
+
+// 'a' and 'd' are the parameters of the equation:
+const a = Fr.e(BigInt("168700"))
+const d = Fr.e(BigInt("168696"))
+
+// We call Baby Jubjub the curve 'E(F_r)', that is, the subgroup of 'F_r'-rational points of 'E'.
+
+// 'order' is order of the elliptic curve 'E'.
export const order = BigInt("21888242871839275222246405745257275088614511777268538073601725287587578984328")
export const subOrder = scalar.shiftRight(order, BigInt(3))
-const A = F.e(BigInt("168700"))
-const D = F.e(BigInt("168696"))
-
-export function addPoint(a: Point, b: Point): Point {
- const beta = F.mul(a[0], b[1])
- const gamma = F.mul(a[1], b[0])
- const delta = F.mul(F.sub(a[1], F.mul(A, a[0])), F.add(b[0], b[1]))
+/**
+ * Performs point addition on the Baby Jubjub elliptic curve,
+ * calculating a third point from two given points.
+ * Let P1 = (x1, y1) and P2 = (x2, y2) be two arbitrary points of the curve.
+ * Then P1 + P2 = (x3, y3) is calculated in the following way:
+ * x3 = (x1*y2 + y1*x2)/(1 + d*x1*x2*y1*y2)
+ * y3 = (y1*y2 - a*x1*x2)/(1 - d*x1*x2*y1*y2)
+ * @param p1 - First point on the curve.
+ * @param p2 - Second point on the curve.
+ * @returns Resultant third point on the curve.
+ */
+export function addPoint(p1: Point, p2: Point): Point {
+ // beta = x1*y2
+ const beta = Fr.mul(p1[0], p2[1])
+ // gamma = y1*x2
+ const gamma = Fr.mul(p1[1], p2[0])
+ // delta = (y1-(a*x1))*(x2+y2)
+ const delta = Fr.mul(Fr.sub(p1[1], Fr.mul(a, p1[0])), Fr.add(p2[0], p2[1]))
- const tau = F.mul(beta, gamma)
- const dtau = F.mul(D, tau)
+ // x1*x2*y1*y2
+ const tau = Fr.mul(beta, gamma)
+ // d*x1*x2*y1*y2
+ const dtau = Fr.mul(d, tau)
- const x = F.div(F.add(beta, gamma), F.add(F.one, dtau))
- const y = F.div(F.add(delta, F.sub(F.mul(A, beta), gamma)), F.sub(F.one, dtau))
+ // x3 = (x1*y2 + y1*x2)/(1 + d*x1*x2*y1*y2)
+ const p3x = Fr.div(Fr.add(beta, gamma), Fr.add(Fr.one, dtau))
+ // y3 = (y1*y2 - a*x1*x2)/(1 - d*x1*x2*y1*y2)
+ const p3y = Fr.div(Fr.add(delta, Fr.sub(Fr.mul(a, beta), gamma)), Fr.sub(Fr.one, dtau))
- return [x, y]
+ return [p3x, p3y]
}
+/**
+ * Performs a scalar multiplication by starting from the 'base' point and 'adding'
+ * it to itself 'e' times.
+ * @param base - The base point used as a starting point.
+ * @param e - A secret number representing the private key.
+ * @returns The resulting point representing the public key.
+ */
export function mulPointEscalar(base: Point, e: bigint): Point {
- let res: Point = [F.e(BigInt(0)), F.e(BigInt(1))]
+ let res: Point = [Fr.e(BigInt(0)), Fr.e(BigInt(1))]
let rem: bigint = e
let exp: Point = base
@@ -50,8 +89,8 @@ export function inCurve(p: Point) {
p[0] = BigInt(p[0])
p[1] = BigInt(p[1])
- const x2 = F.square(p[0])
- const y2 = F.square(p[1])
+ const x2 = Fr.square(p[0])
+ const y2 = Fr.square(p[1])
- return F.eq(F.add(F.mul(A, x2), y2), F.add(F.one, F.mul(F.mul(x2, y2), D)))
+ return Fr.eq(Fr.add(Fr.mul(a, x2), y2), Fr.add(Fr.one, Fr.mul(Fr.mul(x2, y2), d)))
}
diff --git a/packages/eddsa-poseidon/src/eddsa-poseidon.ts b/packages/eddsa-poseidon/src/eddsa-poseidon.ts
index 7c1a8096d..cc85c4625 100644
--- a/packages/eddsa-poseidon/src/eddsa-poseidon.ts
+++ b/packages/eddsa-poseidon/src/eddsa-poseidon.ts
@@ -1,7 +1,7 @@
import { poseidon5 } from "poseidon-lite/poseidon5"
import * as babyjub from "./babyjub"
import blake from "./blake"
-import Field1 from "./field1"
+import Field from "./field"
import * as scalar from "./scalar"
import { BigNumberish, Point, Signature } from "./types"
import * as utils from "./utils"
@@ -53,7 +53,7 @@ export function signMessage(privateKey: BigNumberish, message: BigNumberish): Si
const rBuff = blake(Buffer.concat([hash.slice(32, 64), msgBuff]))
- const Fr = new Field1(babyjub.subOrder)
+ const Fr = new Field(babyjub.subOrder)
const r = Fr.e(utils.leBuff2int(rBuff))
const R8 = babyjub.mulPointEscalar(babyjub.Base8, r)
@@ -104,5 +104,5 @@ export function verifySignature(message: BigNumberish, signature: Signature, pub
pRight = babyjub.addPoint(_signature.R8, pRight)
// Return true if the points match.
- return babyjub.F.eq(BigInt(pLeft[0]), pRight[0]) && babyjub.F.eq(pLeft[1], pRight[1])
+ return babyjub.Fr.eq(BigInt(pLeft[0]), pRight[0]) && babyjub.Fr.eq(pLeft[1], pRight[1])
}
diff --git a/packages/eddsa-poseidon/src/field1.ts b/packages/eddsa-poseidon/src/field.ts
similarity index 97%
rename from packages/eddsa-poseidon/src/field1.ts
rename to packages/eddsa-poseidon/src/field.ts
index 945e1d57f..660bdcd74 100644
--- a/packages/eddsa-poseidon/src/field1.ts
+++ b/packages/eddsa-poseidon/src/field.ts
@@ -1,4 +1,4 @@
-export default class Field1 {
+export default class Field {
one = BigInt(1)
zero = BigInt(0)
From a5995692747485ae1bdc3e264f27a74d13572161 Mon Sep 17 00:00:00 2001
From: cedoor
Date: Mon, 4 Dec 2023 10:52:11 +0000
Subject: [PATCH 34/35] chore(circuits): fix noir conf
---
.github/workflows/production.yml | 3 +++
packages/circuits/README.md | 7 +++++--
packages/circuits/noir/.gitkeep | 0
packages/circuits/package.json | 1 -
4 files changed, 8 insertions(+), 3 deletions(-)
delete mode 100644 packages/circuits/noir/.gitkeep
diff --git a/.github/workflows/production.yml b/.github/workflows/production.yml
index 0203b9349..e244b2ebb 100644
--- a/.github/workflows/production.yml
+++ b/.github/workflows/production.yml
@@ -72,6 +72,9 @@ jobs:
- name: Setup Circom
run: wget https://github.com/iden3/circom/releases/latest/download/circom-linux-amd64 && sudo mv ./circom-linux-amd64 /usr/bin/circom && sudo chmod +x /usr/bin/circom
+ - name: Install Nargo
+ uses: noir-lang/noirup@v0.1.3
+
- name: Get yarn cache directory path
id: yarn-cache-dir-path
run: echo "dir=$(yarn config get cacheFolder)" >> $GITHUB_OUTPUT
diff --git a/packages/circuits/README.md b/packages/circuits/README.md
index 01dc94e1a..958cb0224 100644
--- a/packages/circuits/README.md
+++ b/packages/circuits/README.md
@@ -31,6 +31,9 @@
| This package offers a collection of reusable circuits designed for integration into other projects or protocols, promoting code modularization within the zero-knowledge ecosystem. |
| ----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- |
+> [!IMPORTANT]
+> Installation of [Circom](https://docs.circom.io/getting-started/installation/) and [Nargo](https://noir-lang.org/getting_started/nargo_installation) required for circuit tests.
+
## Circuits
- Circom:
@@ -45,7 +48,7 @@
## 🛠 Install
-### npm or yarn
+### Using NPM or Yarn (Circom circuits)
Install the `@zk-kit/circuits` package with npm:
@@ -59,7 +62,7 @@ or yarn:
yarn add @zk-kit/circuits
```
-### Using Nargo (for Noir circuits)
+### Using Nargo (Noir circuits)
In your Nargo.toml file, add the following dependency:
diff --git a/packages/circuits/noir/.gitkeep b/packages/circuits/noir/.gitkeep
deleted file mode 100644
index e69de29bb..000000000
diff --git a/packages/circuits/package.json b/packages/circuits/package.json
index 56dd92ee5..614f836d5 100644
--- a/packages/circuits/package.json
+++ b/packages/circuits/package.json
@@ -7,7 +7,6 @@
"circom/**/*.circom",
"!circom/main",
"!circom/test",
- "noir",
"LICENSE",
"README.md"
],
From bced68b395e95acba15fb85f6e9ba199c01183d4 Mon Sep 17 00:00:00 2001
From: cedoor
Date: Mon, 4 Dec 2023 13:15:20 +0000
Subject: [PATCH 35/35] refactor(imt.sol): update solidity errors with revert
re #91
---
packages/imt.sol/contracts/BinaryIMT.sol | 65 +++++++++++++++------
packages/imt.sol/contracts/LeanIMT.sol | 13 +++--
packages/imt.sol/contracts/QuinaryIMT.sol | 69 +++++++++++++++--------
packages/imt.sol/test/BinaryIMT.ts | 22 ++++----
packages/imt.sol/test/QuinaryIMT.ts | 22 ++++----
5 files changed, 120 insertions(+), 71 deletions(-)
diff --git a/packages/imt.sol/contracts/BinaryIMT.sol b/packages/imt.sol/contracts/BinaryIMT.sol
index 0204ec15c..06c2a6a1f 100644
--- a/packages/imt.sol/contracts/BinaryIMT.sol
+++ b/packages/imt.sol/contracts/BinaryIMT.sol
@@ -15,6 +15,15 @@ struct BinaryIMTData {
bool useDefaultZeroes;
}
+error ValueGreaterThanSnarkScalarField();
+error DepthNotSupported();
+error WrongDefaultZeroIndex();
+error TreeIsFull();
+error NewLeafCannotEqualOldLeaf();
+error LeafDoesNotExist();
+error LeafIndexOutOfRange();
+error WrongMerkleProofPath();
+
/// @title Incremental binary Merkle tree.
/// @dev The incremental tree allows to calculate the root hash each time a leaf is added, ensuring
/// the integrity of the tree.
@@ -91,7 +100,8 @@ library BinaryIMT {
if (index == 30) return Z_30;
if (index == 31) return Z_31;
if (index == 32) return Z_32;
- revert("IncrementalBinaryTree: defaultZero bad index");
+
+ revert WrongDefaultZeroIndex();
}
/// @dev Initializes a tree.
@@ -99,8 +109,11 @@ library BinaryIMT {
/// @param depth: Depth of the tree.
/// @param zero: Zero value to be used.
function init(BinaryIMTData storage self, uint256 depth, uint256 zero) public {
- require(zero < SNARK_SCALAR_FIELD, "BinaryIMT: leaf must be < SNARK_SCALAR_FIELD");
- require(depth > 0 && depth <= MAX_DEPTH, "BinaryIMT: tree depth must be between 1 and 32");
+ if (zero >= SNARK_SCALAR_FIELD) {
+ revert ValueGreaterThanSnarkScalarField();
+ } else if (depth <= 0 || depth > MAX_DEPTH) {
+ revert DepthNotSupported();
+ }
self.depth = depth;
@@ -117,7 +130,9 @@ library BinaryIMT {
}
function initWithDefaultZeroes(BinaryIMTData storage self, uint256 depth) public {
- require(depth > 0 && depth <= MAX_DEPTH, "BinaryIMT: tree depth must be between 1 and 32");
+ if (depth <= 0 || depth > MAX_DEPTH) {
+ revert DepthNotSupported();
+ }
self.depth = depth;
self.useDefaultZeroes = true;
@@ -131,8 +146,11 @@ library BinaryIMT {
function insert(BinaryIMTData storage self, uint256 leaf) public returns (uint256) {
uint256 depth = self.depth;
- require(leaf < SNARK_SCALAR_FIELD, "BinaryIMT: leaf must be < SNARK_SCALAR_FIELD");
- require(self.numberOfLeaves < 2 ** depth, "BinaryIMT: tree is full");
+ if (leaf >= SNARK_SCALAR_FIELD) {
+ revert ValueGreaterThanSnarkScalarField();
+ } else if (self.numberOfLeaves >= 2 ** depth) {
+ revert TreeIsFull();
+ }
uint256 index = self.numberOfLeaves;
uint256 hash = leaf;
@@ -155,6 +173,7 @@ library BinaryIMT {
self.root = hash;
self.numberOfLeaves += 1;
+
return hash;
}
@@ -171,9 +190,13 @@ library BinaryIMT {
uint256[] calldata proofSiblings,
uint8[] calldata proofPathIndices
) public {
- require(newLeaf != leaf, "BinaryIMT: new leaf cannot be the same as the old one");
- require(newLeaf < SNARK_SCALAR_FIELD, "BinaryIMT: new leaf must be < SNARK_SCALAR_FIELD");
- require(verify(self, leaf, proofSiblings, proofPathIndices), "BinaryIMT: leaf is not part of the tree");
+ if (newLeaf == leaf) {
+ revert NewLeafCannotEqualOldLeaf();
+ } else if (newLeaf >= SNARK_SCALAR_FIELD) {
+ revert ValueGreaterThanSnarkScalarField();
+ } else if (!verify(self, leaf, proofSiblings, proofPathIndices)) {
+ revert LeafDoesNotExist();
+ }
uint256 depth = self.depth;
uint256 hash = newLeaf;
@@ -200,7 +223,10 @@ library BinaryIMT {
++i;
}
}
- require(updateIndex < self.numberOfLeaves, "BinaryIMT: leaf index out of range");
+
+ if (updateIndex >= self.numberOfLeaves) {
+ revert LeafIndexOutOfRange();
+ }
self.root = hash;
}
@@ -231,19 +257,22 @@ library BinaryIMT {
uint256[] calldata proofSiblings,
uint8[] calldata proofPathIndices
) private view returns (bool) {
- require(leaf < SNARK_SCALAR_FIELD, "BinaryIMT: leaf must be < SNARK_SCALAR_FIELD");
uint256 depth = self.depth;
- require(
- proofPathIndices.length == depth && proofSiblings.length == depth,
- "BinaryIMT: length of path is not correct"
- );
+
+ if (leaf >= SNARK_SCALAR_FIELD) {
+ revert ValueGreaterThanSnarkScalarField();
+ } else if (proofPathIndices.length != depth || proofSiblings.length != depth) {
+ revert WrongMerkleProofPath();
+ }
uint256 hash = leaf;
for (uint8 i = 0; i < depth; ) {
- require(proofSiblings[i] < SNARK_SCALAR_FIELD, "BinaryIMT: sibling node must be < SNARK_SCALAR_FIELD");
-
- require(proofPathIndices[i] == 1 || proofPathIndices[i] == 0, "BinaryIMT: path index is neither 0 nor 1");
+ if (proofSiblings[i] >= SNARK_SCALAR_FIELD) {
+ revert ValueGreaterThanSnarkScalarField();
+ } else if (proofPathIndices[i] != 1 && proofPathIndices[i] != 0) {
+ revert WrongMerkleProofPath();
+ }
if (proofPathIndices[i] == 0) {
hash = PoseidonT3.hash([hash, proofSiblings[i]]);
diff --git a/packages/imt.sol/contracts/LeanIMT.sol b/packages/imt.sol/contracts/LeanIMT.sol
index 9e6186a61..f250d73c0 100644
--- a/packages/imt.sol/contracts/LeanIMT.sol
+++ b/packages/imt.sol/contracts/LeanIMT.sol
@@ -22,12 +22,13 @@ error LeafCannotBeZero();
error LeafAlreadyExists();
error LeafDoesNotExist();
-// The LeanIMT is an optimized version of the BinaryIMT.
-// This implementation eliminates the use of zeroes, and make the tree depth dynamic.
-// When a node doesn't have the right child, instead of using a zero hash as in the BinaryIMT,
-// the node's value becomes that of its left child. Furthermore, rather than utilizing a static tree depth,
-// it is updated based on the number of leaves in the tree. This approach
-// results in the calculation of significantly fewer hashes, making the tree more efficient.
+/// @title Lean Incremental binary Merkle tree.
+/// @dev The LeanIMT is an optimized version of the BinaryIMT.
+/// This implementation eliminates the use of zeroes, and make the tree depth dynamic.
+/// When a node doesn't have the right child, instead of using a zero hash as in the BinaryIMT,
+/// the node's value becomes that of its left child. Furthermore, rather than utilizing a static tree depth,
+/// it is updated based on the number of leaves in the tree. This approach
+/// results in the calculation of significantly fewer hashes, making the tree more efficient.
library LeanIMT {
uint256 public constant SNARK_SCALAR_FIELD =
21888242871839275222246405745257275088548364400416034343698204186575808495617;
diff --git a/packages/imt.sol/contracts/QuinaryIMT.sol b/packages/imt.sol/contracts/QuinaryIMT.sol
index c12352302..2a00d2991 100644
--- a/packages/imt.sol/contracts/QuinaryIMT.sol
+++ b/packages/imt.sol/contracts/QuinaryIMT.sol
@@ -14,6 +14,14 @@ struct QuinaryIMTData {
mapping(uint256 => uint256[5]) lastSubtrees; // Caching these values is essential to efficient appends.
}
+error ValueGreaterThanSnarkScalarField();
+error DepthNotSupported();
+error TreeIsFull();
+error NewLeafCannotEqualOldLeaf();
+error LeafDoesNotExist();
+error LeafIndexOutOfRange();
+error WrongMerkleProofPath();
+
/// @title Incremental quinary Merkle tree.
/// @dev The incremental tree allows to calculate the root hash each time a leaf is added, ensuring
/// the integrity of the tree.
@@ -27,8 +35,11 @@ library QuinaryIMT {
/// @param depth: Depth of the tree.
/// @param zero: Zero value to be used.
function init(QuinaryIMTData storage self, uint256 depth, uint256 zero) public {
- require(zero < SNARK_SCALAR_FIELD, "QuinaryIMT: leaf must be < SNARK_SCALAR_FIELD");
- require(depth > 0 && depth <= MAX_DEPTH, "QuinaryIMT: tree depth must be between 1 and 32");
+ if (zero >= SNARK_SCALAR_FIELD) {
+ revert ValueGreaterThanSnarkScalarField();
+ } else if (depth <= 0 || depth > MAX_DEPTH) {
+ revert DepthNotSupported();
+ }
self.depth = depth;
@@ -59,8 +70,11 @@ library QuinaryIMT {
function insert(QuinaryIMTData storage self, uint256 leaf) public {
uint256 depth = self.depth;
- require(leaf < SNARK_SCALAR_FIELD, "QuinaryIMT: leaf must be < SNARK_SCALAR_FIELD");
- require(self.numberOfLeaves < 5 ** depth, "QuinaryIMT: tree is full");
+ if (leaf >= SNARK_SCALAR_FIELD) {
+ revert ValueGreaterThanSnarkScalarField();
+ } else if (self.numberOfLeaves >= 5 ** depth) {
+ revert TreeIsFull();
+ }
uint256 index = self.numberOfLeaves;
uint256 hash = leaf;
@@ -104,9 +118,13 @@ library QuinaryIMT {
uint256[4][] calldata proofSiblings,
uint8[] calldata proofPathIndices
) public {
- require(newLeaf != leaf, "QuinaryIMT: new leaf cannot be the same as the old one");
- require(newLeaf < SNARK_SCALAR_FIELD, "QuinaryIMT: new leaf must be < SNARK_SCALAR_FIELD");
- require(verify(self, leaf, proofSiblings, proofPathIndices), "QuinaryIMT: leaf is not part of the tree");
+ if (newLeaf == leaf) {
+ revert NewLeafCannotEqualOldLeaf();
+ } else if (newLeaf >= SNARK_SCALAR_FIELD) {
+ revert ValueGreaterThanSnarkScalarField();
+ } else if (!verify(self, leaf, proofSiblings, proofPathIndices)) {
+ revert LeafDoesNotExist();
+ }
uint256 depth = self.depth;
uint256 hash = newLeaf;
@@ -139,7 +157,10 @@ library QuinaryIMT {
++i;
}
}
- require(updateIndex < self.numberOfLeaves, "QuinaryIMT: leaf index out of range");
+
+ if (updateIndex >= self.numberOfLeaves) {
+ revert LeafIndexOutOfRange();
+ }
self.root = hash;
}
@@ -170,38 +191,36 @@ library QuinaryIMT {
uint256[4][] calldata proofSiblings,
uint8[] calldata proofPathIndices
) private view returns (bool) {
- require(leaf < SNARK_SCALAR_FIELD, "QuinaryIMT: leaf must be < SNARK_SCALAR_FIELD");
uint256 depth = self.depth;
- require(
- proofPathIndices.length == depth && proofSiblings.length == depth,
- "QuinaryIMT: length of path is not correct"
- );
+
+ if (leaf >= SNARK_SCALAR_FIELD) {
+ revert ValueGreaterThanSnarkScalarField();
+ } else if (proofPathIndices.length != depth || proofSiblings.length != depth) {
+ revert WrongMerkleProofPath();
+ }
uint256 hash = leaf;
for (uint8 i = 0; i < depth; ) {
uint256[5] memory nodes;
- require(
- proofPathIndices[i] >= 0 && proofPathIndices[i] < 5,
- "QuinaryIMT: path index is not between 0 and 4"
- );
+ if (proofPathIndices[i] < 0 || proofPathIndices[i] >= 5) {
+ revert WrongMerkleProofPath();
+ }
for (uint8 j = 0; j < 5; ) {
if (j < proofPathIndices[i]) {
- require(
- proofSiblings[i][j] < SNARK_SCALAR_FIELD,
- "QuinaryIMT: sibling node must be < SNARK_SCALAR_FIELD"
- );
+ if (proofSiblings[i][j] >= SNARK_SCALAR_FIELD) {
+ revert ValueGreaterThanSnarkScalarField();
+ }
nodes[j] = proofSiblings[i][j];
} else if (j == proofPathIndices[i]) {
nodes[j] = hash;
} else {
- require(
- proofSiblings[i][j - 1] < SNARK_SCALAR_FIELD,
- "QuinaryIMT: sibling node must be < SNARK_SCALAR_FIELD"
- );
+ if (proofSiblings[i][j - 1] >= SNARK_SCALAR_FIELD) {
+ revert ValueGreaterThanSnarkScalarField();
+ }
nodes[j] = proofSiblings[i][j - 1];
}
diff --git a/packages/imt.sol/test/BinaryIMT.ts b/packages/imt.sol/test/BinaryIMT.ts
index bdb28a78e..77ae8a673 100644
--- a/packages/imt.sol/test/BinaryIMT.ts
+++ b/packages/imt.sol/test/BinaryIMT.ts
@@ -22,7 +22,7 @@ describe("BinaryIMT", () => {
it("Should not create a tree with a depth > 32", async () => {
const transaction = binaryIMTTest.init(33)
- await expect(transaction).to.be.revertedWith("BinaryIMT: tree depth must be between 1 and 32")
+ await expect(transaction).to.be.revertedWithCustomError(binaryIMT, "DepthNotSupported")
})
it("Should create a tree", async () => {
@@ -49,7 +49,7 @@ describe("BinaryIMT", () => {
const transaction = binaryIMTTest.insert(leaf)
- await expect(transaction).to.be.revertedWith("BinaryIMT: leaf must be < SNARK_SCALAR_FIELD")
+ await expect(transaction).to.be.revertedWithCustomError(binaryIMT, "ValueGreaterThanSnarkScalarField")
})
it("Should insert a leaf in a tree", async () => {
@@ -115,7 +115,7 @@ describe("BinaryIMT", () => {
const transaction = binaryIMTTest.insert(3)
- await expect(transaction).to.be.revertedWith("BinaryIMT: tree is full")
+ await expect(transaction).to.be.revertedWithCustomError(binaryIMT, "TreeIsFull")
})
})
@@ -126,7 +126,7 @@ describe("BinaryIMT", () => {
const transaction = binaryIMTTest.update(1, 1, [0, 1], [0, 1])
- await expect(transaction).to.be.revertedWith("BinaryIMT: new leaf cannot be the same as the old one")
+ await expect(transaction).to.be.revertedWithCustomError(binaryIMT, "NewLeafCannotEqualOldLeaf")
})
it("Should not update a leaf if its new value is > SNARK_SCALAR_FIELD", async () => {
@@ -137,7 +137,7 @@ describe("BinaryIMT", () => {
const transaction = binaryIMTTest.update(1, newLeaf, [0, 1], [0, 1])
- await expect(transaction).to.be.revertedWith("BinaryIMT: new leaf must be < SNARK_SCALAR_FIELD")
+ await expect(transaction).to.be.revertedWithCustomError(binaryIMT, "ValueGreaterThanSnarkScalarField")
})
it("Should not update a leaf if its original value is > SNARK_SCALAR_FIELD", async () => {
@@ -148,7 +148,7 @@ describe("BinaryIMT", () => {
const transaction = binaryIMTTest.update(oldLeaf, 2, [0, 1], [0, 1])
- await expect(transaction).to.be.revertedWith("BinaryIMT: leaf must be < SNARK_SCALAR_FIELD")
+ await expect(transaction).to.be.revertedWithCustomError(binaryIMT, "ValueGreaterThanSnarkScalarField")
})
it("Should not update a leaf if the path indices are wrong", async () => {
@@ -169,7 +169,7 @@ describe("BinaryIMT", () => {
pathIndices
)
- await expect(transaction).to.be.revertedWith("BinaryIMT: path index is neither 0 nor 1")
+ await expect(transaction).to.be.revertedWithCustomError(binaryIMT, "WrongMerkleProofPath")
})
it("Should not update a leaf if the old leaf is wrong", async () => {
@@ -188,7 +188,7 @@ describe("BinaryIMT", () => {
pathIndices
)
- await expect(transaction).to.be.revertedWith("BinaryIMT: leaf is not part of the tree")
+ await expect(transaction).to.be.revertedWithCustomError(binaryIMT, "LeafDoesNotExist")
})
it("Should update a leaf", async () => {
@@ -251,7 +251,7 @@ describe("BinaryIMT", () => {
pathIndices
)
- await expect(transaction).to.be.revertedWith("BinaryIMT: leaf index out of range")
+ await expect(transaction).to.be.revertedWithCustomError(binaryIMT, "LeafIndexOutOfRange")
})
})
@@ -261,7 +261,7 @@ describe("BinaryIMT", () => {
const transaction = binaryIMTTest.remove(leaf, [0, 1], [0, 1])
- await expect(transaction).to.be.revertedWith("BinaryIMT: leaf must be < SNARK_SCALAR_FIELD")
+ await expect(transaction).to.be.revertedWithCustomError(binaryIMT, "ValueGreaterThanSnarkScalarField")
})
it("Should not remove a leaf that does not exist", async () => {
@@ -279,7 +279,7 @@ describe("BinaryIMT", () => {
pathIndices
)
- await expect(transaction).to.be.revertedWith("BinaryIMT: leaf is not part of the tree")
+ await expect(transaction).to.be.revertedWithCustomError(binaryIMT, "LeafDoesNotExist")
})
it("Should remove a leaf", async () => {
diff --git a/packages/imt.sol/test/QuinaryIMT.ts b/packages/imt.sol/test/QuinaryIMT.ts
index c6c7c874f..8d570f389 100644
--- a/packages/imt.sol/test/QuinaryIMT.ts
+++ b/packages/imt.sol/test/QuinaryIMT.ts
@@ -21,7 +21,7 @@ describe("QuinaryIMT", () => {
it("Should not create a tree with a depth > 32", async () => {
const transaction = quinaryIMTTest.init(33)
- await expect(transaction).to.be.revertedWith("QuinaryIMT: tree depth must be between 1 and 32")
+ await expect(transaction).to.be.revertedWithCustomError(quinaryIMT, "DepthNotSupported")
})
it("Should create a tree", async () => {
@@ -39,7 +39,7 @@ describe("QuinaryIMT", () => {
const transaction = quinaryIMTTest.insert(leaf)
- await expect(transaction).to.be.revertedWith("QuinaryIMT: leaf must be < SNARK_SCALAR_FIELD")
+ await expect(transaction).to.be.revertedWithCustomError(quinaryIMT, "ValueGreaterThanSnarkScalarField")
})
it("Should insert a leaf in a tree", async () => {
@@ -78,7 +78,7 @@ describe("QuinaryIMT", () => {
const transaction = quinaryIMTTest.insert(3)
- await expect(transaction).to.be.revertedWith("QuinaryIMT: tree is full")
+ await expect(transaction).to.be.revertedWithCustomError(quinaryIMT, "TreeIsFull")
})
})
@@ -89,7 +89,7 @@ describe("QuinaryIMT", () => {
const transaction = quinaryIMTTest.update(1, 1, [[0, 1, 2, 3]], [0])
- await expect(transaction).to.be.revertedWith("QuinaryIMT: new leaf cannot be the same as the old one")
+ await expect(transaction).to.be.revertedWithCustomError(quinaryIMT, "NewLeafCannotEqualOldLeaf")
})
it("Should not update a leaf if its new value is > SNARK_SCALAR_FIELD", async () => {
@@ -100,7 +100,7 @@ describe("QuinaryIMT", () => {
const transaction = quinaryIMTTest.update(1, newLeaf, [[0, 1, 2, 3]], [0])
- await expect(transaction).to.be.revertedWith("QuinaryIMT: new leaf must be < SNARK_SCALAR_FIELD")
+ await expect(transaction).to.be.revertedWithCustomError(quinaryIMT, "ValueGreaterThanSnarkScalarField")
})
it("Should not update a leaf if its original value is > SNARK_SCALAR_FIELD", async () => {
@@ -111,7 +111,7 @@ describe("QuinaryIMT", () => {
const transaction = quinaryIMTTest.update(oldLeaf, 2, [[0, 1, 2, 3]], [0])
- await expect(transaction).to.be.revertedWith("QuinaryIMT: leaf must be < SNARK_SCALAR_FIELD")
+ await expect(transaction).to.be.revertedWithCustomError(quinaryIMT, "ValueGreaterThanSnarkScalarField")
})
it("Should not update a leaf if the path indices are wrong", async () => {
@@ -127,7 +127,7 @@ describe("QuinaryIMT", () => {
const transaction = quinaryIMTTest.update(1, 2, siblings, pathIndices)
- await expect(transaction).to.be.revertedWith("QuinaryIMT: path index is not between 0 and 4")
+ await expect(transaction).to.be.revertedWithCustomError(quinaryIMT, "WrongMerkleProofPath")
})
it("Should not update a leaf if the old leaf is wrong", async () => {
@@ -141,7 +141,7 @@ describe("QuinaryIMT", () => {
const transaction = quinaryIMTTest.update(2, 3, siblings, pathIndices)
- await expect(transaction).to.be.revertedWith("QuinaryIMT: leaf is not part of the tree")
+ await expect(transaction).to.be.revertedWithCustomError(quinaryIMT, "LeafDoesNotExist")
})
it("Should update a leaf", async () => {
@@ -194,7 +194,7 @@ describe("QuinaryIMT", () => {
const transaction = quinaryIMTTest.update(0, leaf, siblings, pathIndices)
- await expect(transaction).to.be.revertedWith("QuinaryIMT: leaf index out of range")
+ await expect(transaction).to.be.revertedWithCustomError(quinaryIMT, "LeafIndexOutOfRange")
})
})
@@ -204,7 +204,7 @@ describe("QuinaryIMT", () => {
const transaction = quinaryIMTTest.remove(leaf, [[0, 1, 2, 3]], [0])
- await expect(transaction).to.be.revertedWith("QuinaryIMT: leaf must be < SNARK_SCALAR_FIELD")
+ await expect(transaction).to.be.revertedWithCustomError(quinaryIMT, "ValueGreaterThanSnarkScalarField")
})
it("Should not remove a leaf that does not exist", async () => {
@@ -218,7 +218,7 @@ describe("QuinaryIMT", () => {
const transaction = quinaryIMTTest.remove(2, siblings, pathIndices)
- await expect(transaction).to.be.revertedWith("QuinaryIMT: leaf is not part of the tree")
+ await expect(transaction).to.be.revertedWithCustomError(quinaryIMT, "LeafDoesNotExist")
})
it("Should remove a leaf", async () => {
|