diff --git a/askeladd-dvm-marketplace/src/app/components/InternalProgram.tsx b/askeladd-dvm-marketplace/src/app/components/InternalProgram.tsx new file mode 100644 index 0000000..6128958 --- /dev/null +++ b/askeladd-dvm-marketplace/src/app/components/InternalProgram.tsx @@ -0,0 +1,494 @@ +import { NDKEvent, NDKKind } from '@nostr-dev-kit/ndk'; +import { Event as NostrEvent, Relay, SimplePool } from 'nostr-tools'; +import React, { useEffect, useMemo, useState } from 'react'; +import { ContractUploadType, IGenerateZKPRequestDVM, JobResultProver, KIND_JOB_REQUEST, KIND_JOB_RESULT, ProgramInternalContractName } from '@/types'; +import { useFetchEvents } from '@/hooks/useFetchEvents'; +import { ASKELADD_RELAY } from '@/constants/relay'; +import init, { verify_stark_proof, verify_stark_proof_wide_fibo, prove_and_verify, stark_proof_wide_fibo, prove_stark_proof_poseidon, verify_stark_proof_poseidon, prove_and_verify_fib, verify_stark_proof_fib } from "../../pkg" +import { useNostrContext } from '@/context/NostrContext'; +// Define the props for the component +interface TagsCardProps { + event?: NDKEvent | NostrEvent; // Array of array of strings + zkp_request?: IGenerateZKPRequestDVM +} +const InternalProgram: React.FC = ({ event, zkp_request }) => { + const { fetchEvents, fetchEventsTools, setupSubscriptionNostr } = useFetchEvents() + const { ndk, pool } = useNostrContext() + const [form, setForm] = useState({}) + const program = zkp_request?.program; + + const [isOpenForm, setIsOpenForm] = useState(false) + const [logSize, setLogSize] = useState(5); + const [claim, setClaim] = useState(443693538); + const [publicKey, setPublicKey] = useState(); + const [jobId, setJobId] = useState(); + const [error, setError] = useState() + const [starkProof, setStarkProof] = useState() + const [jobEventResult, setJobEventResult] = useState() + const [seeTag, setSeeTag] = useState(false) + const [proof, setProof] = useState(null); + const [isLoading, setIsLoading] = useState(false); + const [isInitialized, setIsInitialized] = useState(false); + const [isFetchJob, setIsFetchJob] = useState(false); + const [isLoadingJobResult, setIsLoadingJobResult] = useState(false); + const [isWaitingJob, setIsWaitingJob] = useState(false); + const [timestampJob, setTimestampJob] = useState(); + const [proofStatus, setProofStatus] = useState< + "idle" | "pending" | "received" | "verified" + >("idle"); + const [selectedEvent, setSelectedEvent] = useState() + + let eventIdRequest = useMemo(() => { + return jobId + }, [jobId]) + + // Init wasm module to run_fibonacci_verify + useEffect(() => { + init() + .then(() => setIsInitialized(true)) + .catch((error) => { + console.error("Failed to initialize WASM module:", error); + + }); + }, []); + + useEffect(() => { + // const pool = new SimplePool(); + if (pool) { + runSubscriptionEvent(pool) + } + if (!jobId && !jobEventResult) { + timeoutWaitingForJobResult() + } + }, [jobId, jobEventResult, pool]) + + + const runSubscriptionEvent = (pool: SimplePool, pubkey?: string) => { + + // WebSocket connection setup + // const ws = new WebSocket([ASKELADD_RELAY[0]]); // Replace with your Nostr relay URL + + // ws.onopen = () => { + // // Subscribe to specific events, adjust filters as needed + // ws.send(JSON.stringify({ + // "req": "EVENTS", + // // "filter": { + // // "#e": ["3a5f5b4..."] // Your event criteria here + // // } + // })); + // }; + + // ws.onmessage = (event) => { + // const data = JSON.parse(event.data); + // if (data) { + // if (!jobId) return; + // if (pubkey && data?.pubkey == pubkey) { + // setJobId(data?.id) + // } + // // setEvents(currentEvents => [...currentEvents, data]); + // } + // }; + + // ws.onerror = (error) => { + // console.error("WebSocket error:", error); + // }; + + let poolSubscription = pool.subscribeMany( + ASKELADD_RELAY, + [ + // { + // kinds: [KIND_JOB_REQUEST as NDKKind], + // // since:timestampJob + // // authors: pubkey ? [pubkey] : [] + // }, + { + kinds: [KIND_JOB_RESULT as NDKKind], + // since:timestampJob + }, + ], + { + onevent(event) { + // if (event?.kind == KIND_JOB_REQUEST) { + // if (!jobId) return; + // if (pubkey && event?.pubkey == pubkey) { + // setJobId(event?.id) + // } + // poolSubscription.close(); + // } + if (event?.kind == KIND_JOB_RESULT) { + if (!jobId) return; + let id = jobId ?? eventIdRequest; + if (id && !jobEventResult) { + console.log("Event job result received: ", event?.id); + console.log("event job content result include job: ", id); + let isIncludedJobId = event?.content?.includes(jobId) + let jobEventResultFind = event?.content?.includes(jobId) + console.log("isIncludedJobId", isIncludedJobId); + if (isIncludedJobId) { + console.log("Event JOB_RESULT find", jobEventResultFind); + getDataOfEvent(event); + setJobEventResult(event) + } + } + poolSubscription.close(); + } + }, + onclose: () => { + poolSubscription.close() + }, + oneose() { + poolSubscription.close() + } + } + ) + } + + + const timeoutWaitingForJobResult = async () => { + console.log("waiting timeout job result") + setTimeout(() => { + waitingForJobResult() + }, 5000); + } + + /** Effect to fetch the job result when a job request is sent */ + const waitingForJobResult = async () => { + if (jobEventResult && jobId) return; + fetchEventsProof() + setIsLoading(false); + setIsWaitingJob(false) + } + + const fetchEventsProof = async () => { + console.log("fetch events job result proof") + // if(jobEventResult && jobId)return; + setIsFetchJob(false); + setIsLoadingJobResult(true); + const { events } = await fetchEventsTools({ + kind: KIND_JOB_RESULT, + // since: timestampJob, + // search: jobId + // search: `#${jobId}`, + }) + console.log("events job result", events); + if (!events) return; + let lastEvent = events[events?.length - 1] + if (!lastEvent) return; + let id = jobId ?? eventIdRequest; + if (jobEventResult && jobEventResult?.id == id && proof && proofStatus != "pending") return; + if (id && !jobEventResult) { + let jobEventResultFind = events?.find((e) => e?.content?.includes(id)) + console.log("jobEventResultFind", jobEventResultFind); + let filterJob = events?.filter((e) => e?.id?.includes(id)) + // console.log("filterJob", filterJob); + if (jobEventResultFind?.id) { + console.log("Event JOB_RESULT find", jobEventResultFind); + getDataOfEvent(jobEventResultFind); + setJobEventResult(jobEventResultFind) + } + } + } + + const getDataOfEvent = (lastEvent?: NDKEvent | NostrEvent) => { + if (!lastEvent || !lastEvent?.content) return; + setSelectedEvent(lastEvent); + setProof(lastEvent?.content?.toString()) + const jobProofSerialize: any = JSON.parse(lastEvent?.content) + console.log('jobProofSerialize serialize', jobProofSerialize); + const proofSerialize = jobProofSerialize?.response?.proof; + console.log('proof serialize', proofSerialize); + setStarkProof(proofSerialize); + setProofStatus("received"); + return proofSerialize + } + + const fetchJobRequest = async (pubkey?: string) => { + const { events } = await fetchEventsTools({ + kind: KIND_JOB_REQUEST, + since: timestampJob, + // authors: pubkey ? [pubkey] : [] + }); + console.log("events job request", events); + if (!events) return; + const lastEvent = events[0] + if (!lastEvent?.id) return; + const lastEventId = lastEvent?.id; + if (pubkey && pubkey == lastEvent?.pubkey) { + console.log("lastEventId", lastEventId) + setJobId(lastEventId); + eventIdRequest = lastEventId; + setIsWaitingJob(true) + } + } + + + /** Submit job with JOB_REQUEST 5600 + * - Use extension NIP-7 + * - Default public key demo + * - NDK generate key or import later +*/ + const submitJob = async () => { + try { + + /** Todo better check */ + if (!isLoading && !isOpenForm && Object.entries(form).length == 0) return; + setIsLoading(true); + setIsFetchJob(false); + setJobId(undefined) + setProofStatus("pending"); + setProof(null); + setJobEventResult(undefined); + setError(undefined); + let tags: string[][] = [ + // ['param', 'log_size', logSize.toString()], + // ['param', 'claim', claim.toString()], + // ['output', 'text/json'] + ]; + + const inputs: Map = new Map(); + { + Object.entries(form).map(([key, value]) => { + inputs.set(key, value as string) + } + ) + } + + for (let [key, value] of inputs) { + tags.push(["param", key, value]) + } + console.log("inputs", Object.fromEntries(inputs)) + const content = JSON.stringify({ + request: form, + program: { + contract_name: zkp_request?.program?.contract_name, + internal_contract_name: zkp_request?.program?.internal_contract_name, + contract_reached: zkp_request?.program?.contract_reached, + inputs: Object.fromEntries(inputs), + inputs_types: undefined, + inputs_encrypted: undefined + } + }) + // Define the timestamp before which you want to fetch events + setTimestampJob(new Date().getTime()) + console.log("inputs", inputs) + console.log("content", content) + /** Use Nostr extension to send event */ + const pool = new SimplePool(); + if (typeof window !== "undefined" && window.nostr) { + const pubkey = await window.nostr.getPublicKey(); + let created_at = new Date().getTime(); + setPublicKey(pubkey) + const event = await window.nostr.signEvent({ + pubkey: pubkey, + created_at: created_at, + kind: 5600, + tags: tags, + content: content + }) // takes an event object, adds `id`, `pubkey` and `sig` and returns it + // Setup job request to fetch job id + + /** @TODO why the event id is not return? + * - get the last event and fetch job_id event + * - check if events is sent with subscription + * + */ + // let eventID = await relay.publish(event as EventNostr); + const eventID = await Promise.any(pool.publish(ASKELADD_RELAY, event as NostrEvent)); + console.log("eventID", eventID[0]) + await fetchJobRequest(pubkey) + setIsWaitingJob(true); + await timeoutWaitingForJobResult() + + } else { + + /** @TODO flow is user doesn't have NIP-07 extension */ + // let { result, event } = await sendNote({ content, tags, kind: 5600 }) + // console.log("event", event) + // if (event?.sig) { + // setJobId(event?.sig); + // } + // setIsWaitingJob(true) + /** NDK event + * Generate or import private key after + */ + } + } catch (e) { + } finally { + setIsLoading(false); + } + + }; + + const verifyProofHandler = async () => { + try { + if (proof) { + setIsLoading(true); + const inputs: Map = new Map(); + { + Object.entries(form).map(([key, value]) => { + inputs.set(key, value as string) + } + ) + } + + if (zkp_request?.program?.internal_contract_name == ProgramInternalContractName.WideFibonnaciProvingRequest) { + let log_n_instances = inputs.get("log_n_instances"); + let log_fibonacci_size = inputs.get("log_fibonacci_size"); + if (!log_n_instances && !log_fibonacci_size) return; + const prove_result = stark_proof_wide_fibo(Number(log_fibonacci_size), Number(log_n_instances)); + console.log("wide fibo prove_result", prove_result); + const serialised_proof_from_nostr_event = JSON.stringify(starkProof); + console.log("serialised_proof_from_nostr_event", serialised_proof_from_nostr_event); + const verify_result = verify_stark_proof_wide_fibo(Number(log_fibonacci_size), Number(log_n_instances), serialised_proof_from_nostr_event); + console.log("verify result", verify_result); + console.log("verify message", verify_result.message); + console.log("verify success", verify_result.success); + if (verify_result?.success) { + console.log("is success verify result") + setProofStatus("verified"); + } else { + setError(verify_result?.message) + } + } + else if (zkp_request?.program?.internal_contract_name == ProgramInternalContractName?.PoseidonProvingRequest) { + + let log_n_instances = inputs.get("log_n_instances"); + if (!log_n_instances) return; + const prove_result = prove_stark_proof_poseidon(Number(log_n_instances)); + console.log("poseidon prove_result", prove_result); + const serialised_proof_from_nostr_event = JSON.stringify(starkProof); + console.log("serialised_proof_from_nostr_event", serialised_proof_from_nostr_event); + const verify_result = verify_stark_proof_poseidon(Number(log_n_instances), serialised_proof_from_nostr_event); + console.log("verify result", verify_result); + console.log("verify message", verify_result.message); + console.log("verify success", verify_result.success); + if (verify_result?.success) { + console.log("is success verify result") + setProofStatus("verified"); + } else { + setError(verify_result?.message) + } + } + else if (zkp_request?.program?.internal_contract_name == ProgramInternalContractName.FibonnacciProvingRequest) { + const prove_result = prove_and_verify_fib(logSize, claim); + console.log("prove_result", prove_result); + const serialised_proof_from_nostr_event = JSON.stringify(starkProof); + console.log("serialised_proof_from_nostr_event", serialised_proof_from_nostr_event); + const verify_result = verify_stark_proof_fib(logSize, claim, serialised_proof_from_nostr_event); + console.log("verify result", verify_result); + console.log("verify message", verify_result.message); + console.log("verify success", verify_result.success); + if (verify_result?.success) { + console.log("is success verify result") + setProofStatus("verified"); + } else { + setError(verify_result?.message) + } + } + + setIsLoading(false); + setIsFetchJob(true) + } + } catch (e) { + console.log("Verify error", e); + } finally { + setIsLoading(false); + setIsFetchJob(true) + + } + }; + + const date: string | undefined = event?.created_at ? new Date(event?.created_at).toDateString() : undefined + + const params = Object.fromEntries(zkp_request?.program?.inputs?.entries() ?? []) + + // Handle changes in form inputs + const handleChange = (e: React.ChangeEvent) => { + const { name, value } = e.target; + setForm(prev => ({ + ...prev, + [name]: value + })); + console.log("form", form) + }; + + return ( +
+ {program?.event_id && +

Event id: {zkp_request?.program?.event_id}

+ } +

{zkp_request?.program?.contract_name?.toString()}

+

Deployed: {zkp_request?.program?.contract_reached == ContractUploadType.InternalAskeladd && "Internal Program"}

+ {isLoading &&
} + + {isOpenForm && + +
+ {Object.entries(form).map(([key, value]) => ( +

{`${key}: ${value}`}

+ ))} + + {Object.entries(params).map((e, i) => { + return ( +
+

{e?.[1]}

+ +
+ ) + })} +
+ } + + {jobId && ( +
+

Job ID: {jobId}

+

Status: {proofStatus}

+ + {error &&

Error: {error}

} + {proof && ( +
+

Proof received:

+
+                                {proof}
+                            
+ {starkProof && ( +

+ Proof of work nonce: {starkProof?.commitment_scheme_proof?.proof_of_work?.nonce} +

+ )} + +
+ )} +
+ )} + + + +
+ ) +}; + +export default InternalProgram; diff --git a/askeladd-dvm-marketplace/src/app/components/ProgramCard.tsx b/askeladd-dvm-marketplace/src/app/components/ProgramCard.tsx index 2924be4..7150512 100644 --- a/askeladd-dvm-marketplace/src/app/components/ProgramCard.tsx +++ b/askeladd-dvm-marketplace/src/app/components/ProgramCard.tsx @@ -4,17 +4,23 @@ import React, { useEffect, useMemo, useState } from 'react'; import { ContractUploadType, IGenerateZKPRequestDVM, JobResultProver, KIND_JOB_REQUEST, KIND_JOB_RESULT, ProgramInternalContractName } from '@/types'; import { useFetchEvents } from '@/hooks/useFetchEvents'; import { ASKELADD_RELAY } from '@/constants/relay'; -import init, { verify_stark_proof, verify_stark_proof_wide_fibo, prove_and_verify, stark_proof_wide_fibo, prove_stark_proof_poseidon, verify_stark_proof_poseidon } from "../../pkg" +import init, { verify_stark_proof, verify_stark_proof_wide_fibo, prove_and_verify, stark_proof_wide_fibo, prove_stark_proof_poseidon, verify_stark_proof_poseidon, prove_and_verify_fib, verify_stark_proof_fib } from "../../pkg" import { useNostrContext } from '@/context/NostrContext'; // Define the props for the component interface TagsCardProps { event?: NDKEvent | NostrEvent; // Array of array of strings - program?: IGenerateZKPRequestDVM + zkp_request?: IGenerateZKPRequestDVM } -const ProgramCard: React.FC = ({ event, program }) => { +const ProgramCard: React.FC = ({ event, zkp_request }) => { + // console.log("zkp_request config", zkp_request) const { fetchEvents, fetchEventsTools, setupSubscriptionNostr } = useFetchEvents() const { ndk, pool } = useNostrContext() - + const inputs = zkp_request?.program?.inputs + const [form, setForm] = useState(zkp_request?.request ? zkp_request?.request : inputs ? inputs : {}) + // const [form, setForm] = useState(zkp_request?.request ? zkp_request?.request : inputs ? Object.fromEntries(inputs) : {}) + const [requestTemplate, setRequestTemplate] = useState(zkp_request?.request ? zkp_request?.request : inputs ? Object.fromEntries(inputs) : {}) + // const [requestValue, setRequetValue] = useState(inputs ? inputs Object.fromEntries(inputs) : {}) + const [requestValue, setRequetValue] = useState(inputs ? inputs : {}) const [isOpenForm, setIsOpenForm] = useState(false) const [logSize, setLogSize] = useState(5); const [claim, setClaim] = useState(443693538); @@ -36,6 +42,9 @@ const ProgramCard: React.FC = ({ event, program }) => { >("idle"); const [selectedEvent, setSelectedEvent] = useState() + + const program = zkp_request?.program; + const contract_reached = zkp_request?.program?.contract_reached; let eventIdRequest = useMemo(() => { return jobId }, [jobId]) @@ -52,94 +61,94 @@ const ProgramCard: React.FC = ({ event, program }) => { useEffect(() => { // const pool = new SimplePool(); - if (pool) { - runSubscriptionEvent(pool) - } + // if (pool) { + // runSubscriptionEvent(pool) + // } if (!jobId && !jobEventResult) { timeoutWaitingForJobResult() } }, [jobId, jobEventResult, pool]) - const runSubscriptionEvent = (pool: SimplePool, pubkey?: string) => { - - // WebSocket connection setup - // const ws = new WebSocket([ASKELADD_RELAY[0]]); // Replace with your Nostr relay URL - - // ws.onopen = () => { - // // Subscribe to specific events, adjust filters as needed - // ws.send(JSON.stringify({ - // "req": "EVENTS", - // // "filter": { - // // "#e": ["3a5f5b4..."] // Your event criteria here - // // } - // })); - // }; - - // ws.onmessage = (event) => { - // const data = JSON.parse(event.data); - // if (data) { - // if (!jobId) return; - // if (pubkey && data?.pubkey == pubkey) { - // setJobId(data?.id) - // } - // // setEvents(currentEvents => [...currentEvents, data]); - // } - // }; - - // ws.onerror = (error) => { - // console.error("WebSocket error:", error); - // }; - - let poolSubscription = pool.subscribeMany( - ASKELADD_RELAY, - [ - // { - // kinds: [KIND_JOB_REQUEST as NDKKind], - // // since:timestampJob - // // authors: pubkey ? [pubkey] : [] - // }, - { - kinds: [KIND_JOB_RESULT as NDKKind], - // since:timestampJob - }, - ], - { - onevent(event) { - // if (event?.kind == KIND_JOB_REQUEST) { - // if (!jobId) return; - // if (pubkey && event?.pubkey == pubkey) { - // setJobId(event?.id) - // } - // poolSubscription.close(); - // } - if (event?.kind == KIND_JOB_RESULT) { - if (!jobId) return; - let id = jobId ?? eventIdRequest; - if (id && !jobEventResult) { - console.log("Event job result received: ", event?.id); - console.log("event job content result include job: ", id); - let isIncludedJobId = event?.content?.includes(jobId) - let jobEventResultFind = event?.content?.includes(jobId) - console.log("isIncludedJobId", isIncludedJobId); - if (isIncludedJobId) { - console.log("Event JOB_RESULT find", jobEventResultFind); - getDataOfEvent(event); - setJobEventResult(event) - } - } - poolSubscription.close(); - } - }, - onclose: () => { - poolSubscription.close() - }, - oneose() { - poolSubscription.close() - } - } - ) - } + // const runSubscriptionEvent = (pool: SimplePool, pubkey?: string) => { + + // // WebSocket connection setup + // // const ws = new WebSocket([ASKELADD_RELAY[0]]); // Replace with your Nostr relay URL + + // // ws.onopen = () => { + // // // Subscribe to specific events, adjust filters as needed + // // ws.send(JSON.stringify({ + // // "req": "EVENTS", + // // // "filter": { + // // // "#e": ["3a5f5b4..."] // Your event criteria here + // // // } + // // })); + // // }; + + // // ws.onmessage = (event) => { + // // const data = JSON.parse(event.data); + // // if (data) { + // // if (!jobId) return; + // // if (pubkey && data?.pubkey == pubkey) { + // // setJobId(data?.id) + // // } + // // // setEvents(currentEvents => [...currentEvents, data]); + // // } + // // }; + + // // ws.onerror = (error) => { + // // console.error("WebSocket error:", error); + // // }; + + // let poolSubscription = pool.subscribeMany( + // ASKELADD_RELAY, + // [ + // // { + // // kinds: [KIND_JOB_REQUEST as NDKKind], + // // // since:timestampJob + // // // authors: pubkey ? [pubkey] : [] + // // }, + // { + // kinds: [KIND_JOB_RESULT as NDKKind], + // // since:timestampJob + // }, + // ], + // { + // onevent(event) { + // // if (event?.kind == KIND_JOB_REQUEST) { + // // if (!jobId) return; + // // if (pubkey && event?.pubkey == pubkey) { + // // setJobId(event?.id) + // // } + // // poolSubscription.close(); + // // } + // if (event?.kind == KIND_JOB_RESULT) { + // if (!jobId) return; + // let id = jobId ?? eventIdRequest; + // if (id && !jobEventResult) { + // console.log("Event job result received: ", event?.id); + // console.log("event job content result include job: ", id); + // let isIncludedJobId = event?.content?.includes(jobId) + // let jobEventResultFind = event?.content?.includes(jobId) + // console.log("isIncludedJobId", isIncludedJobId); + // if (isIncludedJobId) { + // console.log("Event JOB_RESULT find", jobEventResultFind); + // getDataOfEvent(event); + // setJobEventResult(event) + // } + // } + // poolSubscription.close(); + // } + // }, + // onclose: () => { + // poolSubscription.close() + // }, + // oneose() { + // poolSubscription.close() + // } + // } + // ) + // } const timeoutWaitingForJobResult = async () => { @@ -168,7 +177,7 @@ const ProgramCard: React.FC = ({ event, program }) => { // search: jobId // search: `#${jobId}`, }) - console.log("events job result", events); + // console.log("events job result", events); if (!events) return; let lastEvent = events[events?.length - 1] if (!lastEvent) return; @@ -237,35 +246,44 @@ const ProgramCard: React.FC = ({ event, program }) => { setProof(null); setJobEventResult(undefined); setError(undefined); - const tags = [ - ['param', 'log_size', logSize.toString()], - ['param', 'claim', claim.toString()], - ['output', 'text/json'] + let tags: string[][] = [ + // ['param', 'log_size', logSize.toString()], + // ['param', 'claim', claim.toString()], + // ['output', 'text/json'] ]; const inputs: Map = new Map(); { Object.entries(form).map(([key, value]) => { - inputs.set(key, value as string) + + if (!requestValue[key]) { + inputs.set(key, value as string) + } + } ) } + + for (let [key, value] of inputs) { + tags.push(["param", key, value]) + } console.log("inputs", Object.fromEntries(inputs)) const content = JSON.stringify({ - request: form, + // request: form, + request: Object.fromEntries(inputs), program: { - contract_name: program?.program_params?.contract_name, - internal_contract_name: program?.program_params?.internal_contract_name, - contract_reached: program?.program_params?.contract_reached, + contract_name: zkp_request?.program?.contract_name, + internal_contract_name: zkp_request?.program?.internal_contract_name, + contract_reached: zkp_request?.program?.contract_reached, inputs: Object.fromEntries(inputs), inputs_types: undefined, inputs_encrypted: undefined } }) + console.log("content", content) // Define the timestamp before which you want to fetch events setTimestampJob(new Date().getTime()) console.log("inputs", inputs) - console.log("content", content) /** Use Nostr extension to send event */ const pool = new SimplePool(); if (typeof window !== "undefined" && window.nostr) { @@ -289,9 +307,9 @@ const ProgramCard: React.FC = ({ event, program }) => { // let eventID = await relay.publish(event as EventNostr); const eventID = await Promise.any(pool.publish(ASKELADD_RELAY, event as NostrEvent)); console.log("eventID", eventID[0]) - await fetchJobRequest(pubkey) - setIsWaitingJob(true); - await timeoutWaitingForJobResult() + // await fetchJobRequest(pubkey) + // setIsWaitingJob(true); + // await timeoutWaitingForJobResult() } else { @@ -324,12 +342,17 @@ const ProgramCard: React.FC = ({ event, program }) => { } ) } - if (program?.program_params?.internal_contract_name == ProgramInternalContractName.FibonnacciProvingRequest) { - const prove_result = prove_and_verify(logSize, claim); - console.log("prove_result", prove_result); + + + if (zkp_request?.program?.internal_contract_name == ProgramInternalContractName.WideFibonnaciProvingRequest) { + let log_n_instances = inputs.get("log_n_instances"); + let log_fibonacci_size = inputs.get("log_fibonacci_size"); + if (!log_n_instances && !log_fibonacci_size) return; + const prove_result = stark_proof_wide_fibo(Number(log_fibonacci_size), Number(log_n_instances)); + console.log("wide fibo prove_result", prove_result); const serialised_proof_from_nostr_event = JSON.stringify(starkProof); console.log("serialised_proof_from_nostr_event", serialised_proof_from_nostr_event); - const verify_result = verify_stark_proof(logSize, claim, serialised_proof_from_nostr_event); + const verify_result = verify_stark_proof_wide_fibo(Number(log_fibonacci_size), Number(log_n_instances), serialised_proof_from_nostr_event); console.log("verify result", verify_result); console.log("verify message", verify_result.message); console.log("verify success", verify_result.success); @@ -340,16 +363,15 @@ const ProgramCard: React.FC = ({ event, program }) => { setError(verify_result?.message) } } + else if (zkp_request?.program?.internal_contract_name == ProgramInternalContractName?.PoseidonProvingRequest) { - else if (program?.program_params?.internal_contract_name == ProgramInternalContractName.WideFibonnaciProvingRequest) { let log_n_instances = inputs.get("log_n_instances"); - let log_fibonacci_size = inputs.get("log_fibonacci_size"); - if (!log_n_instances && !log_fibonacci_size) return; - const prove_result = stark_proof_wide_fibo(Number(log_fibonacci_size), Number(log_n_instances)); - console.log("wide fibo prove_result", prove_result); + if (!log_n_instances) return; + const prove_result = prove_stark_proof_poseidon(Number(log_n_instances)); + console.log("poseidon prove_result", prove_result); const serialised_proof_from_nostr_event = JSON.stringify(starkProof); console.log("serialised_proof_from_nostr_event", serialised_proof_from_nostr_event); - const verify_result = verify_stark_proof_wide_fibo(Number(log_fibonacci_size), Number(log_n_instances), serialised_proof_from_nostr_event); + const verify_result = verify_stark_proof_poseidon(Number(log_n_instances), serialised_proof_from_nostr_event); console.log("verify result", verify_result); console.log("verify message", verify_result.message); console.log("verify success", verify_result.success); @@ -360,15 +382,12 @@ const ProgramCard: React.FC = ({ event, program }) => { setError(verify_result?.message) } } - else if (program?.program_params?.internal_contract_name == ProgramInternalContractName?.PoseidonProvingRequest) { - - let log_n_instances = inputs.get("log_n_instances"); - if (!log_n_instances) return; - const prove_result = prove_stark_proof_poseidon(Number(log_n_instances)); - console.log("poseidon prove_result", prove_result); + else if (zkp_request?.program?.internal_contract_name == ProgramInternalContractName.FibonnacciProvingRequest) { + const prove_result = prove_and_verify_fib(logSize, claim); + console.log("prove_result", prove_result); const serialised_proof_from_nostr_event = JSON.stringify(starkProof); console.log("serialised_proof_from_nostr_event", serialised_proof_from_nostr_event); - const verify_result = verify_stark_proof_poseidon(Number(log_n_instances), serialised_proof_from_nostr_event); + const verify_result = verify_stark_proof_fib(logSize, claim, serialised_proof_from_nostr_event); console.log("verify result", verify_result); console.log("verify message", verify_result.message); console.log("verify success", verify_result.success); @@ -393,28 +412,40 @@ const ProgramCard: React.FC = ({ event, program }) => { }; const date: string | undefined = event?.created_at ? new Date(event?.created_at).toDateString() : undefined + const params = zkp_request?.program?.inputs ?? [] + // Handle changes in form inputs + const handleChange = (e: React.ChangeEvent) => { + const { name, value } = e.target; + setForm((prev: any) => ({ + ...prev, + [name]: value + })); + console.log("form", form) + }; - const params = Object.fromEntries(program?.program_params?.inputs?.entries() ?? []) - const [form, setForm] = useState({}) // Handle changes in form inputs - const handleChange = (e: React.ChangeEvent) => { + const handleRequestChange = (e: React.ChangeEvent) => { const { name, value } = e.target; - setForm(prev => ({ + setRequestTemplate((prev: any) => ({ ...prev, [name]: value })); console.log("form", form) }; - const program_params = program?.program_params; + return (
- {program_params?.event_id && -

Event id: {program?.program_params?.event_id}

+ {program?.event_id && +

Event id: {zkp_request?.program?.event_id}

} -

{program?.program_params?.contract_name?.toString()}

-

Deployed: {program?.program_params?.contract_reached == ContractUploadType.InternalAskeladd && "Internal Program"}

+

{zkp_request?.program?.contract_name?.toString()}

+

Deployed: {zkp_request?.program?.contract_reached == ContractUploadType.InternalAskeladd ? "Internal Program" + + : contract_reached == ContractUploadType.Ipfs + && "Ipfs" + }

{isLoading &&
}
diff --git a/askeladd-dvm-marketplace/src/app/globals.css b/askeladd-dvm-marketplace/src/app/globals.css index 03cf15e..8681616 100644 --- a/askeladd-dvm-marketplace/src/app/globals.css +++ b/askeladd-dvm-marketplace/src/app/globals.css @@ -385,4 +385,34 @@ main { border-radius: 0.25rem; transition: all 0.3s ease; text-shadow: none; +} + +.secondary-button { + /* width: 100%; */ + background: var(--neon-blue); + color: #000000; + border: 1px solid #000000; + box-shadow: 0 0 8px var(--neon-blue); + text-transform: uppercase; + letter-spacing: 1px; + font-weight: bold; + padding: 0.5rem 1rem; + border-radius: 0.25rem; + transition: all 0.3s ease; + text-shadow: none; +} + +.basic-button { + /* width: 100%; */ + background: var(--neon-blue); + color: #000000; + border: 1px solid #000000; + box-shadow: 0 0 2px var(--neon-blue); + text-transform: uppercase; + letter-spacing: 1px; + font-weight: bold; + padding: 0.25rem 0.5rem; + border-radius: 0.25rem; + transition: all 0.3s ease; + text-shadow: none; } \ No newline at end of file diff --git a/askeladd-dvm-marketplace/src/app/launch-program/page.tsx b/askeladd-dvm-marketplace/src/app/launch-program/page.tsx new file mode 100644 index 0000000..3077a99 --- /dev/null +++ b/askeladd-dvm-marketplace/src/app/launch-program/page.tsx @@ -0,0 +1,463 @@ +"use client"; + +import { useState, useEffect, useMemo } from "react"; +import { NDKEvent, NDKKind } from '@nostr-dev-kit/ndk'; +import { useSendNote } from "@/hooks/useSendNote"; +import { useFetchEvents } from "@/hooks/useFetchEvents"; +import { APPLICATION_PUBKEY_DVM, ASKELADD_RELAY } from "@/constants/relay"; +import { Event as EventNostr, SimplePool } from "nostr-tools"; +import { ASKELADD_KINDS, ConfigHandle, ContractUploadType, IGenerateZKPRequestDVM, IProgramParams, KIND_JOB_ADD_PROGRAM } from "@/types"; +import EventCard from "../components/EventCard"; +import { generateContentAndTags } from "../utils/generateAppHandler"; +import { HowItWork } from "../components/description"; +import { PROGRAM_INTERAL_REQUEST } from "@/constants/program"; + +export default function LaunchProgram() { + const [publicKey, setPublicKey] = useState(); + const [appKind, setAppKind] = useState(ASKELADD_KINDS.KIND_JOB_REQUEST) + const [configKind, setConfigKind] = useState(ConfigHandle.ALL_KIND) + const [jobId, setJobId] = useState(); + const [error, setError] = useState() + const [lastConfig, setLastConfig] = useState() + const [events, setEvents] = useState([]) + const [proofStatus, setProofStatus] = useState< + "idle" | "pending" | "received" | "verified" + >("idle"); + const [isLoading, setIsLoading] = useState(false); + const [isInitialized, setIsInitialized] = useState(false); + const [isNeedLoadEvents, setIsNeedLoadEvents] = useState(true); + const [isAdmin, setIsAdmin] = useState(false); + const [timestampJob, setTimestampJob] = useState(); + const { fetchEvents, fetchEventsTools, setupSubscriptionNostr } = useFetchEvents() + const { sendNote, publishNote } = useSendNote() + const [logSize, setLogSize] = useState(5); + const [claim, setClaim] = useState(443693538); + const [inputIndex, setInputsIndex] = useState(0) + const [isOpenForm, setIsOpenForm] = useState(false) + const [formState, setForm] = useState({}) + + const form = useMemo(() => { + return formState + }, [formState]) + const [formType, setFormType] = useState({}) + const [formEncrypted, setFormEncrypted] = useState({}) + const [programParam, setProgramParam] = useState({ + pubkey_app: undefined, + event_id: undefined, + unique_id: undefined, + inputs: new Map(), + contract_name: undefined, + contract_reached: undefined, + internal_contract_name: undefined + }) + // Handle changes in form inputs + const handleChange = (e: React.ChangeEvent) => { + const { name, value } = e.target; + setForm((prev: any) => ({ + ...prev, + [name]: value + })); + + // setFormEncrypted(prev => ({ + // ...prev, + // [value]: false + // })); + + setFormType((prev: any) => ({ + ...prev, + [name]: "String" + })); + console.log("form", form) + console.log("form encrypted", formEncrypted) + console.log("form type", formType) + }; + + // Handle changes in form inputs + const handleInputType = (e: React.ChangeEvent) => { + const { name, value } = e.target; + setFormType((prev: any) => ({ + ...prev, + [name]: value + })); + console.log("form type", form) + }; + + // Handle changes in form inputs + const handleInputEncrypted = (e: React.ChangeEvent) => { + const { name, value } = e.target; + setFormEncrypted((prev: any) => ({ + ...prev, + [name]: value + })); + console.log("formEncrypted", formEncrypted) + }; + + useEffect(() => { + if (isNeedLoadEvents || !isInitialized) { + fetchEventsApp() + setIsNeedLoadEvents(false) + setIsInitialized(true) + } + }, [isNeedLoadEvents]) + + useEffect(() => { + + if (publicKey) { + + if (process.env.NEXT_PUBLIC_DVM_PUBKEY && process.env.NEXT_PUBLIC_DVM_PUBKEY != publicKey) { + setIsAdmin(true) + } + } + }, [publicKey]) + + const fetchEventsApp = async () => { + console.log("fetch events config"); + const { events } = await fetchEventsTools({ + kinds: [NDKKind.AppHandler], + limit: 100, + }) + console.log("events config NIP-89", events); + setLastConfig(events[0]) + setEvents(events); + setIsNeedLoadEvents(false) + } + + + /** Connect you */ + const connectExtension = async () => { + try { + + if (typeof window !== "undefined" && window.nostr) { + const pubkey = await window.nostr.getPublicKey(); + let created_at = new Date().getTime(); + setPublicKey(pubkey) + return pubkey; + } + + } catch (e) { + console.log("connect extension error", e) + } finally { + setIsLoading(false); + } + + }; + + /** Submit Recommended App Handler for: + * JOB_REQUEST 5700: Laucn hrpgraom + * JOB_RESULT 6600: Result + */ + const submitJob = async () => { + try { + setIsLoading(true); + setJobId(undefined) + setProofStatus("pending"); + setError(undefined); + + submitProgram() + setIsNeedLoadEvents(true) + + } catch (e) { + } finally { + setIsLoading(false); + + } + + }; + + const uploadWasm = async () => { + try { + setIsLoading(true); + setJobId(undefined) + setProofStatus("pending"); + setError(undefined); + } catch (e) { + } finally { + setIsLoading(false); + + } + + }; + + const mockProgram = async () => { + /** Todo better check */ + if (!isLoading && !isOpenForm && Object.entries(form).length == 0) return; + setIsLoading(true); + setJobId(undefined) + setProofStatus("pending"); + setError(undefined); + const tags = [ + ['param', 'log_size', logSize.toString()], + ['param', 'claim', claim.toString()], + ['output', 'text/json'] + ]; + + } + const submitProgram = async () => { + try { + setIsLoading(true); + setProofStatus("pending"); + setLastConfig(undefined); + setError(undefined); + console.log("formEncrypted", formEncrypted) + + let tags: string[][] = [] + const inputs: Map = new Map(); + { + Object.entries(form).map(([key, value]) => { + // inputs.set(key, value as string) + inputs.set(value as string, "") + } + ) + } + + const inputs_encrypted: Map = new Map(); + Object.entries(formEncrypted).map(([key, value]) => { + inputs_encrypted.set(key, value as string) + } + ) + for (let [key, value] of inputs_encrypted) { + tags.push(["param_encrypted", key, value]) + } + console.log("inputs_encrypted", Object.fromEntries(inputs_encrypted)) + + const inputs_types: Map = new Map(); + { + Object.entries(formType).map(([key, value]) => { + inputs_types.set(key, value as string) + } + ) + } + for (let [key, value] of inputs_encrypted) { + tags.push(["param_encrypted", key, value]) + } + + for (let [key, value] of inputs) { + tags.push(["param", key, value, inputs_encrypted.get(key) ?? "false", inputs_types.get(key) ?? "String"]) + } + + const content = JSON.stringify({ + // request: form as any, + // request: form, + request: Object.fromEntries(inputs), + program: { + contract_name: programParam?.contract_name ?? "test", + // internal_contract_name: programParam?.internal_contract_name ?? "test", + contract_reached: programParam?.contract_reached ?? ContractUploadType.Ipfs, + inputs: Object.fromEntries(inputs), + // inputs_types: Object.fromEntries(inputs), + // inputs_encrypted: Object.fromEntries(inputs_encrypted), + // tags: tags + } + }) + + console.log("tags", tags) + console.log("content", content) + setTimestampJob(new Date().getTime()) + /** Use Nostr extension to send event */ + const pool = new SimplePool(); + let pubkey; + if (typeof window !== "undefined" && window.nostr) { + let pubkey = await connectExtension() + + console.log("pubkey", pubkey) + if (!pubkey) return; + if (!content) return; + + let created_at = new Date().getTime(); + const event = await window.nostr.signEvent({ + pubkey: pubkey, + created_at: created_at, + kind: KIND_JOB_ADD_PROGRAM, + tags: tags, + content: content + }) // takes an event object, adds `id`, `pubkey` and `sig` and returns it + // // Setup job request to fetch job id + + // // let eventID = await relay.publish(event as EventNostr); + const eventID = await Promise.any(pool.publish(ASKELADD_RELAY, event as EventNostr)); + console.log("eventID", eventID[0]) + setIsNeedLoadEvents(true) + + } else { + + /** @TODO flow is user doesn't have NIP-07 extension */ + // let { result, event } = await sendNote({ content, tags, kind: 5600 }) + // console.log("event", event) + // if (event?.sig) { + // setJobId(event?.sig); + // } + // setIsWaitingJob(true) + /** NDK event + * Generate or import private key after + */ + } + } catch (e) { + } finally { + setIsLoading(false); + } + + }; + + const handleLoadFormEncrypted = () => { + console.log("form load key") + Object.entries(form).map(([key, value]) => { + setFormEncrypted({ ...formEncrypted, [value as string]: false }) + } + ) + + } + + const handleAllInputsEncrypted = () => { + Object.entries(form).map(([key, value]) => { + setFormEncrypted({ ...formEncrypted, [value as string]: true }) + } + ) + + } + + return ( +
+
+
+
+ + + +
+

Askeladd DVM

+

Launch program

+ +
+
+

Program param

+ { + programParam.pubkey_app = e.target.value + }} + > + +
+

Inputs

+ {form && Object.entries(form).map(([key, value], i) => { + return ( +
+

{`${key}`}

+

{`Name: ${value}`}

+
+ + +
+ +
+ ) + })} + +
+ +
+

Inputs encrypted

+ + + {formEncrypted && Object.entries(formEncrypted).map(([key, value], i) => { + return ( +
+

{`${key}: ${value}`}

+ +
+ + {formEncrypted && formEncrypted[key] == false ? + <> + + setFormEncrypted({ ...formEncrypted, [key]: true })} + /> + + : + <> + + setFormEncrypted({ ...formEncrypted, [key]: false })} + /> + + } +
+
+ ) + })} +
+ +
+ + <> + + + +
+ {isLoading &&
} +
+ {/* */} + {/* */} + {/* */} + +
+
+
+ ); +} diff --git a/askeladd-dvm-marketplace/src/app/page.tsx b/askeladd-dvm-marketplace/src/app/page.tsx index 9f0ca9f..f4d740e 100644 --- a/askeladd-dvm-marketplace/src/app/page.tsx +++ b/askeladd-dvm-marketplace/src/app/page.tsx @@ -4,13 +4,15 @@ import { useState, useEffect, useMemo } from "react"; import { NDKEvent, NDKKind } from '@nostr-dev-kit/ndk'; import { useNostrContext } from "@/context/NostrContext"; import { useSendNote } from "@/hooks/useSendNote"; -import { JobResultProver, KIND_JOB_REQUEST, KIND_JOB_RESULT } from "@/types"; -import init, { verify_stark_proof, prove_and_verify } from "../pkg/stwo_wasm"; +import { JobResultProver, KIND_JOB_REQUEST, KIND_JOB_RESULT, ProgramInternalContractName } from "@/types"; +import init, { verify_stark_proof, prove_and_verify, prove_and_verify_fib, verify_stark_proof_fib, stark_proof_wide_fibo, verify_stark_proof_wide_fibo } from "../pkg/stwo_wasm"; import { useFetchEvents } from "@/hooks/useFetchEvents"; import { ASKELADD_RELAY } from "@/constants/relay"; import { Relay } from 'nostr-tools/relay'; import { Event as EventNostr, SimplePool } from "nostr-tools"; export default function Home() { + const [log_n_instances, setLogNInstances] = useState(0); + const [log_fibonnacci_size, setLogFibonnacciSize] = useState(5); const [logSize, setLogSize] = useState(5); const [claim, setClaim] = useState(443693538); const [publicKey, setPublicKey] = useState(); @@ -69,6 +71,7 @@ export default function Home() { useEffect(() => { if (jobId && !jobEventResult) { waitingForJobResult() + timeoutWaitingForJobResult() } }, [jobId, isFetchJob, jobEventResult]) @@ -131,51 +134,68 @@ export default function Home() { setJobEventResult(undefined); setError(undefined); const tags = [ - ['param', 'log_size', logSize.toString()], - ['param', 'claim', claim.toString()], + ['param', 'log_n_instances', log_n_instances.toString()], + ['param', 'log_fibonnacci_size', log_fibonnacci_size.toString()], ['output', 'text/json'] ]; + // const tags = [ + // ['param', 'log_size', logSize.toString()], + // ['param', 'claim', claim.toString()], + // ['output', 'text/json'] + // ]; const tags_values = [ - ['param', 'log_size', logSize.toString()], - ['param', 'claim', claim.toString()], + ['param', 'log_n_instances', log_n_instances.toString()], + ['param', 'log_fibonnacci_size', log_fibonnacci_size.toString()], + // ['param', 'claim', claim.toString()], + // ['param', 'log_size', logSize.toString()], + // ['param', 'claim', claim.toString()], ]; - const inputs:Map= new Map(); + const inputs: Map = new Map(); - for(let tag of tags_values) { + for (let tag of tags_values) { inputs.set(tag[1], tag[2]) } - console.log("inputs",Object.fromEntries(inputs)) + console.log("inputs", Object.fromEntries(inputs)) const content = JSON.stringify({ request: { - log_size: logSize.toString(), - claim: claim.toString() + // log_size: logSize.toString(), + log_n_instances: log_n_instances.toString(), + log_fibonnacci_size: log_fibonnacci_size.toString(), + // claim: claim.toString() }, - program:{ - contract_name:"FibonnacciProvingRequest", - internal_contract_name:"FibonnacciProvingRequest", - contract_reached:"InternalAskeladd", + program: { + // contract_name: "PoseidonProvingRequest", + // internal_contract_name: "PoseidonProvingRequest", + contract_name: ProgramInternalContractName.WideFibonnaciProvingRequest.toString(), + internal_contract_name: ProgramInternalContractName.WideFibonnaciProvingRequest.toString(), + // internal_contract_name: "PoseidonProvingRequest", + + // contract_name:"FibonnacciProvingRequest", + // internal_contract_name:"FibonnacciProvingRequest", + contract_reached: "InternalAskeladd", // inputs:JSON.stringify(Object.fromEntries(inputs)), - inputs:Object.fromEntries(inputs), + inputs: Object.fromEntries(inputs), // inputs:tags } }) // Define the timestamp before which you want to fetch events // setTimestampJob(new Date().getTime() / 1000) setTimestampJob(new Date().getTime()) - console.log("inputs",inputs) - console.log("content",content) + console.log("inputs", inputs) + console.log("content", content) // return ; /** Use Nostr extension to send event */ const pool = new SimplePool(); const poolJob = new SimplePool(); const relay = await Relay.connect(ASKELADD_RELAY[0]) if (typeof window !== "undefined" && window.nostr) { - + const pubkey = await window.nostr.getPublicKey(); + console.log("pubkey",pubkey) let created_at = new Date().getTime(); setPublicKey(pubkey) const event = await window.nostr.signEvent({ @@ -303,11 +323,29 @@ export default function Home() { try { if (proof) { setIsLoading(true); - const prove_result = prove_and_verify(logSize, claim); - console.log("prove_result", prove_result); + + /** Change Poseidon to default */ + // const prove_result = prove_and_verify(log_n_instances); + // console.log("prove_result", prove_result); + // const serialised_proof_from_nostr_event = JSON.stringify(starkProof); + // console.log("serialised_proof_from_nostr_event", serialised_proof_from_nostr_event); + // const verify_result = verify_stark_proof(logSize, serialised_proof_from_nostr_event); + // console.log("verify result", verify_result); + // console.log("verify message", verify_result.message); + // console.log("verify success", verify_result.success); + // if (verify_result?.success) { + // console.log("is success verify result") + // setProofStatus("verified"); + // } else { + // setError(verify_result?.message) + // } + + if (!log_n_instances && !log_fibonnacci_size) return; + const prove_result = stark_proof_wide_fibo(Number(log_fibonnacci_size), Number(log_n_instances)); + console.log("wide fibo prove_result", prove_result); const serialised_proof_from_nostr_event = JSON.stringify(starkProof); console.log("serialised_proof_from_nostr_event", serialised_proof_from_nostr_event); - const verify_result = verify_stark_proof(logSize, claim, serialised_proof_from_nostr_event); + const verify_result = verify_stark_proof_wide_fibo(Number(log_fibonnacci_size), Number(log_n_instances), serialised_proof_from_nostr_event); console.log("verify result", verify_result); console.log("verify message", verify_result.message); console.log("verify success", verify_result.success); @@ -318,20 +356,22 @@ export default function Home() { setError(verify_result?.message) } - /** @TODO fix ERROR verify loop between all stark proof*/ - for (let event of events) { - const jobProofSerialize: JobResultProver = JSON.parse(event?.content) - const proofSerialize = jobProofSerialize?.response?.proof; - const verify_result = verify_stark_proof(logSize, claim, JSON.stringify(proofSerialize)); - if (verify_result?.success) { - console.log("loop verify result", verify_result.message); - console.log("loop verify success", verify_result.success); - console.log("is success verify result") - setProofStatus("verified"); - } else { - // setError(verify_result?.message) - } - } + /** FIB default */ + // const prove_result = prove_and_verify_fib(logSize, claim); + // console.log("prove_result", prove_result); + // const serialised_proof_from_nostr_event = JSON.stringify(starkProof); + // console.log("serialised_proof_from_nostr_event", serialised_proof_from_nostr_event); + // const verify_result = verify_stark_proof_fib(logSize, claim, serialised_proof_from_nostr_event); + // console.log("verify result", verify_result); + // console.log("verify message", verify_result.message); + // console.log("verify success", verify_result.success); + // if (verify_result?.success) { + // console.log("is success verify result") + // setProofStatus("verified"); + // } else { + // setError(verify_result?.message) + // } + setIsLoading(false); setIsFetchJob(true) } @@ -355,7 +395,42 @@ export default function Home() {

Censorship resistant global proving network

Verifiable computation for DVMs

+ + {/*

Prove poseidon

*/} +

Wide Fibonnacci

+
+ + setLogFibonnacciSize(Number(e.target.value))} + className="w-full bg-black text-neon-green px-3 py-2 rounded border-neon-green border-2" + /> +
+ +
+ + setLogNInstances(Number(e.target.value))} + className="w-full bg-black text-neon-green px-3 py-2 rounded border-neon-green border-2" + /> +
+ + {/*
+ + setClaim(Number(e.target.value))} + className="w-full bg-black text-neon-green px-3 py-2 rounded border-neon-green border-2" + /> +
*/} + + + {/*
setClaim(Number(e.target.value))} className="w-full bg-black text-neon-green px-3 py-2 rounded border-neon-green border-2" /> -
+
*/}

Askeladd DVM

@@ -134,14 +160,27 @@ export default function StwoProgramMarketplace() {

Check the STWO Prover ready to use!

-
{internalProgram?.map((p, i) => { +
{internalProgram?.map((p, i) => { return ( - + ) })}
+
+ +
+
{events?.map((e, i) => { + console.log("e program", e) + const p: IGenerateZKPRequestDVM = JSON.parse(e.content) + console.log("p", p) + + return ( + + ) + })} +
Prove your claims and conquer the Nostr realm! diff --git a/askeladd-dvm-marketplace/src/constants/program.ts b/askeladd-dvm-marketplace/src/constants/program.ts index 2a9c89c..73c5262 100644 --- a/askeladd-dvm-marketplace/src/constants/program.ts +++ b/askeladd-dvm-marketplace/src/constants/program.ts @@ -27,28 +27,14 @@ program_map_multi_fibo.set("1","claims"); export const PROGRAM_INTERAL_REQUEST:IGenerateZKPRequestDVM[] = [ - { - // Fibonnaci - request: { - log_size:0, - claim:0 - }, - program_params: { - contract_name:ProgramInternalContractName.FibonnacciProvingRequest.toString(), - internal_contract_name:ProgramInternalContractName.FibonnacciProvingRequest, - contract_reached:ContractUploadType.InternalAskeladd, - inputs:program_map_fibo - } - - }, - + { // Wide Fibonnaci request: { log_fibonacci_size:0, log_n_instances:0 }, - program_params: { + program: { contract_name:ProgramInternalContractName.WideFibonnaciProvingRequest.toString(), internal_contract_name:ProgramInternalContractName.WideFibonnaciProvingRequest, contract_reached:ContractUploadType.InternalAskeladd, @@ -63,7 +49,7 @@ export const PROGRAM_INTERAL_REQUEST:IGenerateZKPRequestDVM[] = [ request: { log_n_instances:0 }, - program_params: { + program: { contract_name:ProgramInternalContractName.PoseidonProvingRequest.toString(), internal_contract_name:ProgramInternalContractName.PoseidonProvingRequest, contract_reached:ContractUploadType.InternalAskeladd, @@ -72,19 +58,35 @@ export const PROGRAM_INTERAL_REQUEST:IGenerateZKPRequestDVM[] = [ }, - { - // Multi Fibonnaci - request: { - log_sizes:0, - claims:0 - }, - program_params: { - contract_name:ProgramInternalContractName.MultiFibonnacciProvingRequest.toString(), - internal_contract_name:ProgramInternalContractName.MultiFibonnacciProvingRequest, - contract_reached:ContractUploadType.InternalAskeladd, - inputs:program_map_multi_fibo - } - - }, + // { + // // Fibonnaci + // request: { + // log_size:0, + // claim:0 + // }, + // program: { + // contract_name:ProgramInternalContractName.FibonnacciProvingRequest.toString(), + // internal_contract_name:ProgramInternalContractName.FibonnacciProvingRequest, + // contract_reached:ContractUploadType.InternalAskeladd, + // inputs:program_map_fibo + // } + + // }, + + + // { + // // Multi Fibonnaci + // request: { + // log_sizes:0, + // claims:0 + // }, + // program: { + // contract_name:ProgramInternalContractName.MultiFibonnacciProvingRequest.toString(), + // internal_contract_name:ProgramInternalContractName.MultiFibonnacciProvingRequest, + // contract_reached:ContractUploadType.InternalAskeladd, + // inputs:program_map_multi_fibo + // } + + // }, ] \ No newline at end of file diff --git a/askeladd-dvm-marketplace/src/pkg/stwo_wasm.d.ts b/askeladd-dvm-marketplace/src/pkg/stwo_wasm.d.ts index 5486370..d93e0c8 100644 --- a/askeladd-dvm-marketplace/src/pkg/stwo_wasm.d.ts +++ b/askeladd-dvm-marketplace/src/pkg/stwo_wasm.d.ts @@ -1,55 +1,59 @@ /* tslint:disable */ /* eslint-disable */ /** -* @param {number} log_fibonacci_size -* @param {number} log_n_instances +* @param {Uint32Array} log_sizes +* @param {Uint32Array} claims_int * @returns {StwoResult} */ -export function stark_proof_wide_fibo(log_fibonacci_size: number, log_n_instances: number): StwoResult; +export function stark_proof_multi_fibo(log_sizes: Uint32Array, claims_int: Uint32Array): StwoResult; /** -* @param {number} log_fibonacci_size * @param {number} log_n_instances -* @param {string} stark_proof_str * @returns {StwoResult} */ -export function verify_stark_proof_wide_fibo(log_fibonacci_size: number, log_n_instances: number, stark_proof_str: string): StwoResult; -/** -* @param {number} log_n_instances -* @returns {StwoResult} -*/ -export function prove_stark_proof_poseidon(log_n_instances: number): StwoResult; +export function prove_and_verify(log_n_instances: number): StwoResult; /** * @param {number} log_n_instances * @param {string} stark_proof_str * @returns {StwoResult} */ -export function verify_stark_proof_poseidon(log_n_instances: number, stark_proof_str: string): StwoResult; +export function verify_stark_proof(log_n_instances: number, stark_proof_str: string): StwoResult; /** * @param {number} log_size * @param {number} claim * @returns {StwoResult} */ -export function prove_and_verify(log_size: number, claim: number): StwoResult; +export function prove_and_verify_fib(log_size: number, claim: number): StwoResult; /** * @param {number} log_size * @param {number} claim * @param {string} stark_proof_str * @returns {StwoResult} */ -export function verify_stark_proof(log_size: number, claim: number, stark_proof_str: string): StwoResult; +export function verify_stark_proof_fib(log_size: number, claim: number, stark_proof_str: string): StwoResult; /** -* @param {Uint32Array} log_sizes -* @param {Uint32Array} claims_int +* @param {number} log_n_instances * @returns {StwoResult} */ -export function stark_proof_multi_fibo(log_sizes: Uint32Array, claims_int: Uint32Array): StwoResult; +export function prove_stark_proof_poseidon(log_n_instances: number): StwoResult; /** -* @param {Uint32Array} log_sizes -* @param {Uint32Array} claims_int +* @param {number} log_n_instances * @param {string} stark_proof_str * @returns {StwoResult} */ -export function verify_stark_proof_multi_fibo(log_sizes: Uint32Array, claims_int: Uint32Array, stark_proof_str: string): StwoResult; +export function verify_stark_proof_poseidon(log_n_instances: number, stark_proof_str: string): StwoResult; +/** +* @param {number} log_fibonacci_size +* @param {number} log_n_instances +* @returns {StwoResult} +*/ +export function stark_proof_wide_fibo(log_fibonacci_size: number, log_n_instances: number): StwoResult; +/** +* @param {number} log_fibonacci_size +* @param {number} log_n_instances +* @param {string} stark_proof_str +* @returns {StwoResult} +*/ +export function verify_stark_proof_wide_fibo(log_fibonacci_size: number, log_n_instances: number, stark_proof_str: string): StwoResult; /** */ export class StwoResult { @@ -66,21 +70,22 @@ export type InitInput = RequestInfo | URL | Response | BufferSource | WebAssembl export interface InitOutput { readonly memory: WebAssembly.Memory; - readonly stark_proof_wide_fibo: (a: number, b: number) => number; - readonly verify_stark_proof_wide_fibo: (a: number, b: number, c: number, d: number) => number; - readonly prove_stark_proof_poseidon: (a: number) => number; - readonly verify_stark_proof_poseidon: (a: number, b: number, c: number) => number; + readonly stark_proof_multi_fibo: (a: number, b: number, c: number, d: number) => number; readonly __wbg_stworesult_free: (a: number) => void; readonly stworesult_success: (a: number) => number; readonly stworesult_message: (a: number, b: number) => void; - readonly prove_and_verify: (a: number, b: number) => number; - readonly verify_stark_proof: (a: number, b: number, c: number, d: number) => number; - readonly stark_proof_multi_fibo: (a: number, b: number, c: number, d: number) => number; - readonly verify_stark_proof_multi_fibo: (a: number, b: number, c: number, d: number, e: number, f: number) => number; + readonly prove_and_verify: (a: number) => number; + readonly verify_stark_proof: (a: number, b: number, c: number) => number; + readonly prove_and_verify_fib: (a: number, b: number) => number; + readonly verify_stark_proof_fib: (a: number, b: number, c: number, d: number) => number; + readonly prove_stark_proof_poseidon: (a: number) => number; + readonly verify_stark_proof_poseidon: (a: number, b: number, c: number) => number; + readonly stark_proof_wide_fibo: (a: number, b: number) => number; + readonly verify_stark_proof_wide_fibo: (a: number, b: number, c: number, d: number) => number; readonly __wbindgen_malloc: (a: number, b: number) => number; - readonly __wbindgen_realloc: (a: number, b: number, c: number, d: number) => number; readonly __wbindgen_add_to_stack_pointer: (a: number) => number; readonly __wbindgen_free: (a: number, b: number, c: number) => void; + readonly __wbindgen_realloc: (a: number, b: number, c: number, d: number) => number; } export type SyncInitInput = BufferSource | WebAssembly.Module; diff --git a/askeladd-dvm-marketplace/src/pkg/stwo_wasm.js b/askeladd-dvm-marketplace/src/pkg/stwo_wasm.js index bf6de10..abf3f1b 100644 --- a/askeladd-dvm-marketplace/src/pkg/stwo_wasm.js +++ b/askeladd-dvm-marketplace/src/pkg/stwo_wasm.js @@ -17,17 +17,54 @@ function getStringFromWasm0(ptr, len) { ptr = ptr >>> 0; return cachedTextDecoder.decode(getUint8Memory0().subarray(ptr, ptr + len)); } + +let cachedUint32Memory0 = null; + +function getUint32Memory0() { + if (cachedUint32Memory0 === null || cachedUint32Memory0.byteLength === 0) { + cachedUint32Memory0 = new Uint32Array(wasm.memory.buffer); + } + return cachedUint32Memory0; +} + +let WASM_VECTOR_LEN = 0; + +function passArray32ToWasm0(arg, malloc) { + const ptr = malloc(arg.length * 4, 4) >>> 0; + getUint32Memory0().set(arg, ptr / 4); + WASM_VECTOR_LEN = arg.length; + return ptr; +} /** -* @param {number} log_fibonacci_size -* @param {number} log_n_instances +* @param {Uint32Array} log_sizes +* @param {Uint32Array} claims_int * @returns {StwoResult} */ -export function stark_proof_wide_fibo(log_fibonacci_size, log_n_instances) { - const ret = wasm.stark_proof_wide_fibo(log_fibonacci_size, log_n_instances); +export function stark_proof_multi_fibo(log_sizes, claims_int) { + const ptr0 = passArray32ToWasm0(log_sizes, wasm.__wbindgen_malloc); + const len0 = WASM_VECTOR_LEN; + const ptr1 = passArray32ToWasm0(claims_int, wasm.__wbindgen_malloc); + const len1 = WASM_VECTOR_LEN; + const ret = wasm.stark_proof_multi_fibo(ptr0, len0, ptr1, len1); return StwoResult.__wrap(ret); } -let WASM_VECTOR_LEN = 0; +let cachedInt32Memory0 = null; + +function getInt32Memory0() { + if (cachedInt32Memory0 === null || cachedInt32Memory0.byteLength === 0) { + cachedInt32Memory0 = new Int32Array(wasm.memory.buffer); + } + return cachedInt32Memory0; +} +/** +* @param {number} log_n_instances +* @returns {StwoResult} +*/ +export function prove_and_verify(log_n_instances) { + const ret = wasm.prove_and_verify(log_n_instances); + return StwoResult.__wrap(ret); +} const cachedTextEncoder = (typeof TextEncoder !== 'undefined' ? new TextEncoder('utf-8') : { encode: () => { throw Error('TextEncoder not available') } } ); @@ -83,113 +120,81 @@ function passStringToWasm0(arg, malloc, realloc) { return ptr; } /** -* @param {number} log_fibonacci_size * @param {number} log_n_instances * @param {string} stark_proof_str * @returns {StwoResult} */ -export function verify_stark_proof_wide_fibo(log_fibonacci_size, log_n_instances, stark_proof_str) { +export function verify_stark_proof(log_n_instances, stark_proof_str) { const ptr0 = passStringToWasm0(stark_proof_str, wasm.__wbindgen_malloc, wasm.__wbindgen_realloc); const len0 = WASM_VECTOR_LEN; - const ret = wasm.verify_stark_proof_wide_fibo(log_fibonacci_size, log_n_instances, ptr0, len0); + const ret = wasm.verify_stark_proof(log_n_instances, ptr0, len0); return StwoResult.__wrap(ret); } /** -* @param {number} log_n_instances +* @param {number} log_size +* @param {number} claim * @returns {StwoResult} */ -export function prove_stark_proof_poseidon(log_n_instances) { - const ret = wasm.prove_stark_proof_poseidon(log_n_instances); +export function prove_and_verify_fib(log_size, claim) { + const ret = wasm.prove_and_verify_fib(log_size, claim); return StwoResult.__wrap(ret); } /** -* @param {number} log_n_instances +* @param {number} log_size +* @param {number} claim * @param {string} stark_proof_str * @returns {StwoResult} */ -export function verify_stark_proof_poseidon(log_n_instances, stark_proof_str) { +export function verify_stark_proof_fib(log_size, claim, stark_proof_str) { const ptr0 = passStringToWasm0(stark_proof_str, wasm.__wbindgen_malloc, wasm.__wbindgen_realloc); const len0 = WASM_VECTOR_LEN; - const ret = wasm.verify_stark_proof_poseidon(log_n_instances, ptr0, len0); + const ret = wasm.verify_stark_proof_fib(log_size, claim, ptr0, len0); return StwoResult.__wrap(ret); } -let cachedInt32Memory0 = null; - -function getInt32Memory0() { - if (cachedInt32Memory0 === null || cachedInt32Memory0.byteLength === 0) { - cachedInt32Memory0 = new Int32Array(wasm.memory.buffer); - } - return cachedInt32Memory0; -} /** -* @param {number} log_size -* @param {number} claim +* @param {number} log_n_instances * @returns {StwoResult} */ -export function prove_and_verify(log_size, claim) { - const ret = wasm.prove_and_verify(log_size, claim); +export function prove_stark_proof_poseidon(log_n_instances) { + const ret = wasm.prove_stark_proof_poseidon(log_n_instances); return StwoResult.__wrap(ret); } /** -* @param {number} log_size -* @param {number} claim +* @param {number} log_n_instances * @param {string} stark_proof_str * @returns {StwoResult} */ -export function verify_stark_proof(log_size, claim, stark_proof_str) { +export function verify_stark_proof_poseidon(log_n_instances, stark_proof_str) { const ptr0 = passStringToWasm0(stark_proof_str, wasm.__wbindgen_malloc, wasm.__wbindgen_realloc); const len0 = WASM_VECTOR_LEN; - const ret = wasm.verify_stark_proof(log_size, claim, ptr0, len0); + const ret = wasm.verify_stark_proof_poseidon(log_n_instances, ptr0, len0); return StwoResult.__wrap(ret); } -let cachedUint32Memory0 = null; - -function getUint32Memory0() { - if (cachedUint32Memory0 === null || cachedUint32Memory0.byteLength === 0) { - cachedUint32Memory0 = new Uint32Array(wasm.memory.buffer); - } - return cachedUint32Memory0; -} - -function passArray32ToWasm0(arg, malloc) { - const ptr = malloc(arg.length * 4, 4) >>> 0; - getUint32Memory0().set(arg, ptr / 4); - WASM_VECTOR_LEN = arg.length; - return ptr; -} /** -* @param {Uint32Array} log_sizes -* @param {Uint32Array} claims_int +* @param {number} log_fibonacci_size +* @param {number} log_n_instances * @returns {StwoResult} */ -export function stark_proof_multi_fibo(log_sizes, claims_int) { - const ptr0 = passArray32ToWasm0(log_sizes, wasm.__wbindgen_malloc); - const len0 = WASM_VECTOR_LEN; - const ptr1 = passArray32ToWasm0(claims_int, wasm.__wbindgen_malloc); - const len1 = WASM_VECTOR_LEN; - const ret = wasm.stark_proof_multi_fibo(ptr0, len0, ptr1, len1); +export function stark_proof_wide_fibo(log_fibonacci_size, log_n_instances) { + const ret = wasm.stark_proof_wide_fibo(log_fibonacci_size, log_n_instances); return StwoResult.__wrap(ret); } /** -* @param {Uint32Array} log_sizes -* @param {Uint32Array} claims_int +* @param {number} log_fibonacci_size +* @param {number} log_n_instances * @param {string} stark_proof_str * @returns {StwoResult} */ -export function verify_stark_proof_multi_fibo(log_sizes, claims_int, stark_proof_str) { - const ptr0 = passArray32ToWasm0(log_sizes, wasm.__wbindgen_malloc); +export function verify_stark_proof_wide_fibo(log_fibonacci_size, log_n_instances, stark_proof_str) { + const ptr0 = passStringToWasm0(stark_proof_str, wasm.__wbindgen_malloc, wasm.__wbindgen_realloc); const len0 = WASM_VECTOR_LEN; - const ptr1 = passArray32ToWasm0(claims_int, wasm.__wbindgen_malloc); - const len1 = WASM_VECTOR_LEN; - const ptr2 = passStringToWasm0(stark_proof_str, wasm.__wbindgen_malloc, wasm.__wbindgen_realloc); - const len2 = WASM_VECTOR_LEN; - const ret = wasm.verify_stark_proof_multi_fibo(ptr0, len0, ptr1, len1, ptr2, len2); + const ret = wasm.verify_stark_proof_wide_fibo(log_fibonacci_size, log_n_instances, ptr0, len0); return StwoResult.__wrap(ret); } diff --git a/askeladd-dvm-marketplace/src/pkg/stwo_wasm_bg.wasm b/askeladd-dvm-marketplace/src/pkg/stwo_wasm_bg.wasm index b9320c2..a9bca5c 100644 Binary files a/askeladd-dvm-marketplace/src/pkg/stwo_wasm_bg.wasm and b/askeladd-dvm-marketplace/src/pkg/stwo_wasm_bg.wasm differ diff --git a/askeladd-dvm-marketplace/src/pkg/stwo_wasm_bg.wasm.d.ts b/askeladd-dvm-marketplace/src/pkg/stwo_wasm_bg.wasm.d.ts index 64f42bd..e25573d 100644 --- a/askeladd-dvm-marketplace/src/pkg/stwo_wasm_bg.wasm.d.ts +++ b/askeladd-dvm-marketplace/src/pkg/stwo_wasm_bg.wasm.d.ts @@ -1,18 +1,19 @@ /* tslint:disable */ /* eslint-disable */ export const memory: WebAssembly.Memory; -export function stark_proof_wide_fibo(a: number, b: number): number; -export function verify_stark_proof_wide_fibo(a: number, b: number, c: number, d: number): number; -export function prove_stark_proof_poseidon(a: number): number; -export function verify_stark_proof_poseidon(a: number, b: number, c: number): number; +export function stark_proof_multi_fibo(a: number, b: number, c: number, d: number): number; export function __wbg_stworesult_free(a: number): void; export function stworesult_success(a: number): number; export function stworesult_message(a: number, b: number): void; -export function prove_and_verify(a: number, b: number): number; -export function verify_stark_proof(a: number, b: number, c: number, d: number): number; -export function stark_proof_multi_fibo(a: number, b: number, c: number, d: number): number; -export function verify_stark_proof_multi_fibo(a: number, b: number, c: number, d: number, e: number, f: number): number; +export function prove_and_verify(a: number): number; +export function verify_stark_proof(a: number, b: number, c: number): number; +export function prove_and_verify_fib(a: number, b: number): number; +export function verify_stark_proof_fib(a: number, b: number, c: number, d: number): number; +export function prove_stark_proof_poseidon(a: number): number; +export function verify_stark_proof_poseidon(a: number, b: number, c: number): number; +export function stark_proof_wide_fibo(a: number, b: number): number; +export function verify_stark_proof_wide_fibo(a: number, b: number, c: number, d: number): number; export function __wbindgen_malloc(a: number, b: number): number; -export function __wbindgen_realloc(a: number, b: number, c: number, d: number): number; export function __wbindgen_add_to_stack_pointer(a: number): number; export function __wbindgen_free(a: number, b: number, c: number): void; +export function __wbindgen_realloc(a: number, b: number, c: number, d: number): number; diff --git a/askeladd-dvm-marketplace/src/types/index.ts b/askeladd-dvm-marketplace/src/types/index.ts index 1e8b29c..ae39cd4 100644 --- a/askeladd-dvm-marketplace/src/types/index.ts +++ b/askeladd-dvm-marketplace/src/types/index.ts @@ -49,6 +49,7 @@ export interface CommitmentSchemeProof { export enum ASKELADD_KINDS { KIND_JOB_REQUEST = 5600, KIND_JOB_RESULT = 6600, + KIND_JOB_LAUNCH_PROGRAM = 5700, // KIND_SUBMIT_PROGRAM } @@ -82,6 +83,7 @@ export enum ProgramInternalContractName { export enum ContractUploadType { InternalAskeladd = "InternalAskeladd", + Ipfs = "Ipfs", } export interface IProgramParams { @@ -100,5 +102,5 @@ export interface IProgramParams { } export interface IGenerateZKPRequestDVM { request?: any; - program_params?: IProgramParams; + program?: IProgramParams; } \ No newline at end of file diff --git a/crates/cli/src/dvm_customer.rs b/crates/cli/src/dvm_customer.rs index 9838d52..66324e6 100644 --- a/crates/cli/src/dvm_customer.rs +++ b/crates/cli/src/dvm_customer.rs @@ -75,10 +75,19 @@ async fn main() -> Result<(), Box> { contract_reached: ContractUploadType::InternalAskeladd, contract_name: Some("FibonacciProvingRequest".to_owned()), internal_contract_name: Some(ProgramInternalContractName::FibonnacciProvingRequest), + tags: None, }), }; println!("{}", "Job prepared successfully.".green()); + // /// Add poseidon + let settings = Settings::new().expect("Failed to load settings"); + + poseidon_program(customer).await?; + + let mut customer = Customer::new(settings)?; + customer.init().await?; + // ****************************************************** // ****************** SUBMIT JOB ************************ // ****************************************************** @@ -121,14 +130,6 @@ async fn main() -> Result<(), Box> { println!("{}", "└─────────────────────────────────────┘".red()); } - // /// Add poseidon - // let settings = Settings::new().expect("Failed to load settings"); - - // let mut customer = Customer::new(settings)?; - // customer.init().await?; - - // poseidon_program(customer).await?; - Ok(()) } @@ -154,6 +155,7 @@ pub async fn poseidon_program(customer: Customer) -> Result<(), CustomerError> { contract_reached: ContractUploadType::InternalAskeladd, contract_name: Some("PoseidonProvingRequest".to_owned()), internal_contract_name: Some(ProgramInternalContractName::PoseidonProvingRequest), + tags: None, }), }; println!("{}", "Job prepared successfully.".green()); diff --git a/crates/core/src/db.rs b/crates/core/src/db.rs index 1c0e01b..0b35fd9 100644 --- a/crates/core/src/db.rs +++ b/crates/core/src/db.rs @@ -33,9 +33,8 @@ impl Database { CREATE TABLE IF NOT EXISTS stwo_prover_launched ( id TEXT PRIMARY KEY, request_json TEXT NOT NULL, - program_param TEXT NOT NULL, - response_json TEXT, status TEXT NOT NULL, + program TEXT NOT NULL, created_at DATETIME DEFAULT CURRENT_TIMESTAMP, updated_at DATETIME DEFAULT CURRENT_TIMESTAMP ); @@ -98,7 +97,7 @@ impl Database { let request_json = serde_json::to_string(request).unwrap(); let program_json = serde_json::to_string(program).unwrap(); self.conn.execute( - "INSERT INTO stwo_prover_launched (id, request_json, status, program) VALUES (?1, ?2, ?3)", + "INSERT INTO stwo_prover_launched (id, request_json, status, program) VALUES (?1, ?2, ?3, ?4)", params![job_id, request_json, RequestStatus::Pending.to_string(), program_json.to_string()], )?; Ok(()) diff --git a/crates/core/src/dvm/mod.rs b/crates/core/src/dvm/mod.rs index c96d63c..5652d71 100644 --- a/crates/core/src/dvm/mod.rs +++ b/crates/core/src/dvm/mod.rs @@ -15,7 +15,7 @@ pub mod constants { pub mod types { use std::collections::HashMap; - use nostr_sdk::EventId; + use nostr_sdk::{EventId, Tag}; use serde::{Deserialize, Serialize}; use serde_json::Value; use stwo_prover::core::prover::StarkProof; @@ -37,9 +37,9 @@ pub mod types { #[derive(Debug, Serialize, Deserialize, Clone)] pub enum ContractUploadType { InternalAskeladd, + Ipfs, // URl, // BackendEndpoint, - // Ipfs, } // Enum for internal_name program on ASKELADD @@ -65,6 +65,7 @@ pub mod types { pub contract_reached: ContractUploadType, pub contract_name: Option, pub internal_contract_name: Option, + pub tags: Option>, // For External program // pub endpoint:Option, } @@ -172,7 +173,7 @@ pub mod types { #[derive(Debug, Serialize, Deserialize, Clone)] pub struct WideFibonnacciProvingRequest { - pub log_fibonacci_size: u32, + pub log_fibonnacci_size: u32, pub log_n_instances: u32, } diff --git a/crates/core/src/dvm/service_provider.rs b/crates/core/src/dvm/service_provider.rs index ccabc49..6b52713 100644 --- a/crates/core/src/dvm/service_provider.rs +++ b/crates/core/src/dvm/service_provider.rs @@ -11,6 +11,7 @@ use crate::config::Settings; use crate::db::{Database, RequestStatus}; use crate::dvm::constants::{JOB_LAUNCH_PROGRAM_KIND, JOB_REQUEST_KIND}; use crate::dvm::types::{GenerateZKPJobRequest, GenerateZKPJobResult, ProgramParams}; +use crate::nostr_utils::extract_params_from_tags; // use crate::nostr_utils::extract_params_from_tags; use crate::prover_service::ProverService; use crate::utils::convert_inputs_to_run_program; @@ -114,16 +115,6 @@ impl ServiceProvider { info!("Subscribed to proving requests, waiting for requests..."); - // Start handling Nostr notifications - self.nostr_client - .handle_notifications(|notification| async { - match self.handle_notification(notification).await { - Ok(result) => Ok(result), - Err(e) => Err(Box::new(e) as Box), - } - }) - .await?; - // Start JOB LAUNCH PROGRAM subscription let launch_program_req_id = SubscriptionId::new(&self.settings.launch_program_req_id); let filter_launch_program = Filter::new() @@ -140,7 +131,9 @@ impl ServiceProvider { .await .map_err(|e| ServiceProviderError::NostrSubscriptionError(e.to_string()))?; - // Start handling LAUNCH_PROGRAM + info!("Subscribed to launch program, waiting for requests..."); + + // Start handling Nostr notifications self.nostr_client .handle_notifications(|notification| async { match self.handle_notification(notification).await { @@ -266,12 +259,18 @@ impl ServiceProvider { proof: response.proof, }; + let tags = vec![ + // Reply tag directly to the JOB_REQUEST + Tag::parse(&["e", &job_id.clone(), "", "reply"]).unwrap(), + ]; + let response_json = serde_json::to_string(&job_result)?; println!("Response JSON: {:?}", response_json); let job_result_event: Event = EventBuilder::job_result(*event, response_json, 0, None) .unwrap() + .add_tags(tags) .to_event(&self.prover_agent_keys) .unwrap(); @@ -302,42 +301,62 @@ impl ServiceProvider { info!("LAUNCH_PROGRAM request received [{}]", event.id); let job_id = event.id.to_string(); - // let tags = &event.tags; - // let params = extract_params_from_tags(tags); + println!("job_id {:?}", job_id); + + let tags = &event.tags; + let params = extract_params_from_tags(tags); + + println!("params {:?}", params); + + println!("event {:?}", event.content); // Deserialze content - let zkp_request = ServiceProvider::deserialize_zkp_request_data(&event.content.to_owned())?; + // let zkp_request = + // ServiceProvider::deserialize_zkp_request_data(&event.content.to_owned())?; // let params_program: Option = zkp_request.program.clone(); + // println!("zkp_request {:?}", zkp_request); // Request on the content // Check request of the launch_program - let request_str = serde_json::to_string(&zkp_request.request).unwrap(); - // let request_str = serde_json::to_string(&request).unwrap(); - let request_value = serde_json::from_str(&request_str).unwrap(); + let request_str = serde_json::to_string(&event.content).unwrap(); + // let request_str = serde_json::to_string(&zkp_request.request).unwrap(); + let request_value: Value = serde_json::from_str(&request_str).unwrap(); + println!("request_value {:?}", request_value); // TAGS - let program_str = serde_json::to_string(&zkp_request.program).unwrap(); - let program_value = serde_json::from_str(&program_str).unwrap(); - - // Look if this program is already launched and save - if let Some(status) = self.db.get_program_status(&job_id)? { - match status { - RequestStatus::Completed => { - info!("Request {} already processed, skipping", &job_id); - return Ok(()); - } - RequestStatus::Failed => { - info!("Request {} failed before, retrying", &job_id); - } - RequestStatus::Pending => { - info!("Request {} is already pending, skipping", &job_id); - return Ok(()); + // let program_str = serde_json::to_string(&zkp_request.program).unwrap(); + let program_value: Value = serde_json::from_str(&request_str).unwrap(); + println!("program_value {:?}", program_value); + + let zkp_request = + match ServiceProvider::deserialize_zkp_request_data(&event.content.to_owned()) { + Ok(zkp) => zkp, + Err(e) => { + println!("{:?}", e); + return Err(e); } - } - } else { - self.db - .insert_program_launched(&job_id, &request_value, &program_value)?; - } + }; + println!("zkp_request {:?}", zkp_request); + // TODO + // Look if this program is already launched and save + // if let Some(status) = self.db.get_program_status(&job_id)? { + // match status { + // RequestStatus::Completed => { + // info!("Request LAUNCH_PROGRAM {} already processed, skipping", &job_id); + // return Ok(()); + // } + // RequestStatus::Failed => { + // info!("Request LAUNCH_PROGRAM {} failed before, retrying", &job_id); + // } + // RequestStatus::Pending => { + // info!("Request LAUNCH_PROGRAM {} is already pending, skipping", &job_id); + // return Ok(()); + // } + // } + // } else { + // self.db + // .insert_program_launched(&job_id, &request_value, &program_value)?; + // } // Look program param @@ -346,6 +365,23 @@ impl ServiceProvider { // Backend endpoint // WASM program // Maybe other way to do it + + // TODO check + + // Send JOB_RESULT + let response_json = serde_json::to_string(&request_str)?; + println!("Response JSON: {:?}", response_json); + + let job_result_event: Event = EventBuilder::job_result(*event, response_json, 0, None) + .unwrap() + .to_event(&self.prover_agent_keys) + .unwrap(); + + let event_id = self.nostr_client.send_event(job_result_event).await?; + info!( + "LAUNCH PROGRAM response published [{}]", + event_id.to_string() + ); Ok(()) } } diff --git a/crates/core/src/prover_service.rs b/crates/core/src/prover_service.rs index 1701dff..ea5459a 100644 --- a/crates/core/src/prover_service.rs +++ b/crates/core/src/prover_service.rs @@ -4,19 +4,26 @@ use std::fmt; use serde_json::Result as SerdeResult; use stwo_prover::core::backend::simd::fft::MIN_FFT_LOG_SIZE; use stwo_prover::core::circle::M31_CIRCLE_LOG_ORDER; -use stwo_prover::core::fields::m31::{self, BaseField}; +use stwo_prover::core::fields::m31::BaseField; use stwo_prover::core::prover::ProvingError; use stwo_prover::core::vcs::blake2_merkle::Blake2sMerkleHasher; -use stwo_prover::examples::fibonacci::{Fibonacci, MultiFibonacci}; +use stwo_wasm::fibonnaci::Fibonacci; use stwo_wasm::poseidon::{PoseidonStruct, LOG_N_LANES, N_LOG_INSTANCES_PER_ROW}; use stwo_wasm::wide_fibonnacci::WideFibStruct; use thiserror::Error; use crate::dvm::types::{ - ContractUploadType, FibonnacciProvingRequest, FibonnacciProvingResponse, - GenericProvingResponse, MultiFibonnacciProvingRequest, PoseidonProvingRequest, - ProgramInternalContractName, ProgramParams, WideFibonnacciProvingRequest, + ContractUploadType, + FibonnacciProvingRequest, + FibonnacciProvingResponse, + GenericProvingResponse, + PoseidonProvingRequest, + ProgramInternalContractName, + ProgramParams, + WideFibonnacciProvingRequest, + // MultiFibonnacciProvingRequest }; +// use stwo_wasm::fibonnaci::multi_fibonacci::MultiFibonacci; use crate::utils::convert_inputs_to_run_program; #[derive(Error, Debug, Clone)] @@ -84,6 +91,10 @@ impl ProverService { match p.contract_reached { ContractUploadType::InternalAskeladd => { self.internal_program(request, request_str, p) + } + ContractUploadType::Ipfs => { + println!("TODO implement IPFS WASM"); + Err("IPFS_CONTRACT_IN_PROCESS".to_string()) } // => Err(ProverServiceError::NoProgramParam.to_string()), } } else { @@ -106,35 +117,46 @@ impl ProverService { Some(internal_contract) => match internal_contract { ProgramInternalContractName::FibonnacciProvingRequest => { println!("try check request fib"); - let fib_req_res: SerdeResult = - serde_json::from_str(serialized_request); - let fib_req = match fib_req_res.as_ref() { - Ok(req) => req.clone(), - Err(e) => return Err(e.to_string()), - }; - let fib = Fibonacci::new(fib_req.log_size, BaseField::from(fib_req.claim)); - match fib.prove() { - Ok(proof) => Ok(GenericProvingResponse::new(request.clone(), proof)), - Err(e) => Err(e.to_string()), - } + // let fib_req_res: SerdeResult = + // serde_json::from_str(serialized_request); + // let fib_req = match fib_req_res.as_ref() { + // Ok(req) => req.clone(), + // Err(e) => return Err(e.to_string()), + // }; + // println!("init fib program"); + + // let fib = Fibonacci::new(fib_req.log_size, BaseField::from(fib_req.claim)); + // println!("try prove"); + + println!("WIP FIX Fibonnacci WASM"); + + Err(ProvingError::ConstraintsNotSatisfied.to_string()) + + // match fib.prove() { + // Ok(proof) => Ok(GenericProvingResponse::new(request.clone(), proof)), + // Err(e) => Err(e.to_string()), + // } } ProgramInternalContractName::MultiFibonnaciProvingRequest => { - let multi_fibo_res: SerdeResult = - serde_json::from_str(serialized_request); - let mul_fib_req = match multi_fibo_res.as_ref() { - Ok(req) => req.clone(), - Err(e) => return Err(e.to_string()), - }; - let claims: Vec = mul_fib_req - .claims - .into_iter() - .map(m31::M31::from_u32_unchecked) - .collect(); - let multi_fibo = MultiFibonacci::new(mul_fib_req.log_sizes, claims); - match multi_fibo.prove() { - Ok(proof) => Ok(GenericProvingResponse::new(request.clone(), proof)), - Err(e) => Err(e.to_string()), - } + println!("WIP FIX Multi Fibonnacci WASM"); + + Err(ProvingError::ConstraintsNotSatisfied.to_string()) + // let multi_fibo_res: SerdeResult = + // serde_json::from_str(serialized_request); + // let mul_fib_req = match multi_fibo_res.as_ref() { + // Ok(req) => req.clone(), + // Err(e) => return Err(e.to_string()), + // }; + // let claims: Vec = mul_fib_req + // .claims + // .into_iter() + // .map(m31::M31::from_u32_unchecked) + // .collect(); + // let multi_fibo = MultiFibonacci::new(mul_fib_req.log_sizes, claims); + // match multi_fibo.prove() { + // Ok(proof) => Ok(GenericProvingResponse::new(request.clone(), proof)), + // Err(e) => Err(e.to_string()), + // } } ProgramInternalContractName::Custom(_) => { println!("Custom internal contract"); @@ -219,7 +241,7 @@ impl ProverService { Err(e) => return Err(e.to_string()), }; let wide_fib = WideFibStruct::new( - wide_fib_req.log_fibonacci_size, + wide_fib_req.log_fibonnacci_size, wide_fib_req.log_n_instances, ); match wide_fib.prove::() { diff --git a/crates/core/src/verifier_service.rs b/crates/core/src/verifier_service.rs index 8e2685b..d9efd97 100644 --- a/crates/core/src/verifier_service.rs +++ b/crates/core/src/verifier_service.rs @@ -1,7 +1,7 @@ use serde::{Deserialize, Serialize}; use stwo_prover::core::fields::m31::BaseField; use stwo_prover::core::prover::VerificationError; -use stwo_prover::examples::fibonacci::Fibonacci; +use stwo_wasm::fibonnaci::Fibonacci; use crate::dvm::types::{FibonnacciProvingResponse, GenericProvingResponse}; // Define an enum to encapsulate possible deserialized types diff --git a/crates/stwo_wasm/Cargo.toml b/crates/stwo_wasm/Cargo.toml index 3de541a..96e3e6e 100644 --- a/crates/stwo_wasm/Cargo.toml +++ b/crates/stwo_wasm/Cargo.toml @@ -15,6 +15,8 @@ wasm-bindgen = "0.2.84" serde = "1.0.204" serde_json = "1.0.121" getrandom = { version = "0.2", features = ["js"] } +num-traits = "0.2.17" +itertools = "0.12.0" [dev-dependencies] wasm-bindgen-test = "0.3.34" diff --git a/crates/stwo_wasm/src/fibonnaci/air.rs b/crates/stwo_wasm/src/fibonnaci/air.rs new file mode 100644 index 0000000..bb4d10c --- /dev/null +++ b/crates/stwo_wasm/src/fibonnaci/air.rs @@ -0,0 +1,186 @@ +use itertools::{zip_eq, Itertools}; +use stwo_prover::core::air::{Air, AirProver, Component, ComponentProver}; +use stwo_prover::core::backend::CpuBackend; +use stwo_prover::core::channel::Channel; +use stwo_prover::core::fields::m31::BaseField; +use stwo_prover::core::poly::circle::CircleEvaluation; +use stwo_prover::core::poly::BitReversedOrder; +use stwo_prover::core::prover::VerificationError; +use stwo_prover::core::{ColumnVec, InteractionElements, LookupValues}; +use stwo_prover::trace_generation::registry::ComponentGenerationRegistry; +use stwo_prover::trace_generation::{AirTraceGenerator, AirTraceVerifier, ComponentTraceGenerator}; + +use super::component::{FibonacciComponent, FibonacciInput, FibonacciTraceGenerator}; + +pub struct FibonacciAirGenerator { + pub registry: ComponentGenerationRegistry, +} + +impl FibonacciAirGenerator { + pub fn new(inputs: &FibonacciInput) -> Self { + let mut component_generator = FibonacciTraceGenerator::new(); + component_generator.add_inputs(inputs); + let mut registry = ComponentGenerationRegistry::default(); + registry.register("fibonacci", component_generator); + Self { registry } + } +} + +impl AirTraceVerifier for FibonacciAirGenerator { + fn interaction_elements(&self, _channel: &mut impl Channel) -> InteractionElements { + InteractionElements::default() + } + + fn verify_lookups(&self, _lookup_values: &LookupValues) -> Result<(), VerificationError> { + Ok(()) + } +} + +impl AirTraceGenerator for FibonacciAirGenerator { + fn write_trace(&mut self) -> Vec> { + FibonacciTraceGenerator::write_trace("fibonacci", &mut self.registry) + } + + fn interact( + &self, + _trace: &ColumnVec>, + _elements: &InteractionElements, + ) -> Vec> { + vec![] + } + + fn to_air_prover(&self) -> impl AirProver { + let component_generator = self + .registry + .get_generator::("fibonacci"); + FibonacciAir { + component: component_generator.component(), + } + } + + fn composition_log_degree_bound(&self) -> u32 { + let component_generator = self + .registry + .get_generator::("fibonacci"); + assert!(component_generator.inputs_set(), "Fibonacci input not set."); + component_generator + .component() + .max_constraint_log_degree_bound() + } +} + +#[derive(Clone)] +pub struct FibonacciAir { + pub component: FibonacciComponent, +} + +impl FibonacciAir { + pub fn new(component: FibonacciComponent) -> Self { + Self { component } + } +} + +impl Air for FibonacciAir { + fn components(&self) -> Vec<&dyn Component> { + vec![&self.component] + } +} + +impl AirTraceVerifier for FibonacciAir { + fn interaction_elements(&self, _channel: &mut impl Channel) -> InteractionElements { + InteractionElements::default() + } + + fn verify_lookups(&self, _lookup_values: &LookupValues) -> Result<(), VerificationError> { + Ok(()) + } +} + +impl AirTraceGenerator for FibonacciAir { + fn interact( + &self, + _trace: &ColumnVec>, + _elements: &InteractionElements, + ) -> Vec> { + vec![] + } + + fn to_air_prover(&self) -> impl AirProver { + self.clone() + } + + fn composition_log_degree_bound(&self) -> u32 { + self.component.max_constraint_log_degree_bound() + } +} + +impl AirProver for FibonacciAir { + fn component_provers(&self) -> Vec<&dyn ComponentProver> { + vec![&self.component] + } +} + +#[derive(Clone)] +pub struct MultiFibonacciAir { + pub components: Vec, +} + +impl MultiFibonacciAir { + pub fn new(log_sizes: &[u32], claim: &[BaseField]) -> Self { + let mut components = Vec::new(); + for (log_size, claim) in zip_eq(log_sizes.iter(), claim.iter()) { + components.push(FibonacciComponent::new(*log_size, *claim)); + } + Self { components } + } +} + +impl Air for MultiFibonacciAir { + fn components(&self) -> Vec<&dyn Component> { + self.components + .iter() + .map(|c| c as &dyn Component) + .collect_vec() + } +} + +impl AirTraceVerifier for MultiFibonacciAir { + fn interaction_elements(&self, _channel: &mut impl Channel) -> InteractionElements { + InteractionElements::default() + } + + fn verify_lookups(&self, _lookup_values: &LookupValues) -> Result<(), VerificationError> { + Ok(()) + } +} + +impl AirTraceGenerator for MultiFibonacciAir { + fn interact( + &self, + _trace: &ColumnVec>, + _elements: &InteractionElements, + ) -> Vec> { + vec![] + } + + fn to_air_prover(&self) -> impl AirProver { + self.clone() + } + + fn composition_log_degree_bound(&self) -> u32 { + self.components + .iter() + .map(|component| component.max_constraint_log_degree_bound()) + .max() + .unwrap() + } +} + +impl AirProver for MultiFibonacciAir { + fn component_provers(&self) -> Vec<&dyn ComponentProver> { + self.components + .iter() + .map(|c| c as &dyn ComponentProver) + .collect_vec() + } +} diff --git a/crates/stwo_wasm/src/fibonnaci/component.rs b/crates/stwo_wasm/src/fibonnaci/component.rs new file mode 100644 index 0000000..b54cf8d --- /dev/null +++ b/crates/stwo_wasm/src/fibonnaci/component.rs @@ -0,0 +1,231 @@ +use std::ops::Div; + +use num_traits::One; +use stwo_prover::core::air::accumulation::{ + DomainEvaluationAccumulator, PointEvaluationAccumulator, +}; +use stwo_prover::core::air::mask::shifted_mask_points; +use stwo_prover::core::air::{Component, ComponentProver, ComponentTrace}; +use stwo_prover::core::backend::CpuBackend; +use stwo_prover::core::circle::{CirclePoint, Coset}; +use stwo_prover::core::constraints::{coset_vanishing, pair_vanishing}; +use stwo_prover::core::fields::m31::BaseField; +use stwo_prover::core::fields::qm31::SecureField; +use stwo_prover::core::fields::{ExtensionOf, FieldExpOps}; +use stwo_prover::core::pcs::TreeVec; +use stwo_prover::core::poly::circle::{CanonicCoset, CircleEvaluation}; +use stwo_prover::core::poly::BitReversedOrder; +use stwo_prover::core::utils::bit_reverse_index; +use stwo_prover::core::{ColumnVec, InteractionElements, LookupValues}; +use stwo_prover::trace_generation::registry::ComponentGenerationRegistry; +use stwo_prover::trace_generation::{ComponentGen, ComponentTraceGenerator, BASE_TRACE}; + +#[derive(Clone)] +pub struct FibonacciComponent { + pub log_size: u32, + pub claim: BaseField, +} + +impl FibonacciComponent { + pub fn new(log_size: u32, claim: BaseField) -> Self { + Self { log_size, claim } + } + + /// Evaluates the step constraint quotient polynomial on a single point. + /// The step constraint is defined as: + /// mask[0]^2 + mask[1]^2 - mask[2] + fn step_constraint_eval_quotient_by_mask>( + &self, + point: CirclePoint, + mask: &[F; 3], + ) -> F { + let constraint_zero_domain = Coset::subgroup(self.log_size); + let constraint_value = mask[0].square() + mask[1].square() - mask[2]; + let selector = pair_vanishing( + constraint_zero_domain + .at(constraint_zero_domain.size() - 2) + .into_ef(), + constraint_zero_domain + .at(constraint_zero_domain.size() - 1) + .into_ef(), + point, + ); + let num = constraint_value * selector; + let denom = coset_vanishing(constraint_zero_domain, point); + num / denom + } + + /// Evaluates the boundary constraint quotient polynomial on a single point. + fn boundary_constraint_eval_quotient_by_mask>( + &self, + point: CirclePoint, + mask: &[F; 1], + ) -> F { + let constraint_zero_domain = Coset::subgroup(self.log_size); + let p = constraint_zero_domain.at(constraint_zero_domain.size() - 1); + // On (1,0), we should get 1. + // On p, we should get self.claim. + // 1 + y * (self.claim - 1) * p.y^-1 + // TODO(spapini): Cache the constant. + let linear = F::one() + point.y * (self.claim - BaseField::one()) * p.y.inverse(); + + let num = mask[0] - linear; + let denom = pair_vanishing(p.into_ef(), CirclePoint::zero(), point); + num / denom + } +} + +impl Component for FibonacciComponent { + fn n_constraints(&self) -> usize { + 2 + } + + fn max_constraint_log_degree_bound(&self) -> u32 { + // Step constraint is of degree 2. + self.log_size + 1 + } + + fn trace_log_degree_bounds(&self) -> TreeVec> { + TreeVec::new(vec![vec![self.log_size]]) + } + + fn mask_points( + &self, + point: CirclePoint, + ) -> TreeVec>>> { + TreeVec::new(vec![shifted_mask_points( + &vec![vec![0, 1, 2]], + &[CanonicCoset::new(self.log_size)], + point, + )]) + } + + fn evaluate_constraint_quotients_at_point( + &self, + point: CirclePoint, + mask: &TreeVec>>, + evaluation_accumulator: &mut PointEvaluationAccumulator, + _interaction_elements: &InteractionElements, + _lookup_values: &LookupValues, + ) { + evaluation_accumulator.accumulate( + self.step_constraint_eval_quotient_by_mask(point, &mask[0][0][..].try_into().unwrap()), + ); + evaluation_accumulator.accumulate(self.boundary_constraint_eval_quotient_by_mask( + point, + &mask[0][0][..1].try_into().unwrap(), + )); + } +} + +#[derive(Copy, Clone)] +pub struct FibonacciInput { + pub log_size: u32, + pub claim: BaseField, +} + +#[derive(Clone)] +pub struct FibonacciTraceGenerator { + input: Option, +} + +impl ComponentGen for FibonacciTraceGenerator {} + +impl FibonacciTraceGenerator { + #[allow(clippy::new_without_default)] + pub fn new() -> Self { + Self { input: None } + } + + pub fn inputs_set(&self) -> bool { + self.input.is_some() + } +} + +impl ComponentTraceGenerator for FibonacciTraceGenerator { + type Component = FibonacciComponent; + type Inputs = FibonacciInput; + + fn add_inputs(&mut self, inputs: &Self::Inputs) { + assert!(!self.inputs_set(), "Fibonacci input already set."); + self.input = Some(*inputs); + } + + fn write_trace( + component_id: &str, + registry: &mut ComponentGenerationRegistry, + ) -> ColumnVec> { + let trace_generator = registry.get_generator_mut::(component_id); + assert!(trace_generator.inputs_set(), "Fibonacci input not set."); + let trace_domain = CanonicCoset::new(trace_generator.input.unwrap().log_size); + let mut trace = Vec::with_capacity(trace_domain.size()); + + // Fill trace with fibonacci squared. + let mut a = BaseField::one(); + let mut b = BaseField::one(); + for _ in 0..trace_domain.size() { + trace.push(a); + let tmp = a.square() + b.square(); + a = b; + b = tmp; + } + + // Returns as a CircleEvaluation. + vec![CircleEvaluation::new_canonical_ordered(trace_domain, trace)] + } + + fn write_interaction_trace( + &self, + _trace: &ColumnVec<&CircleEvaluation>, + _elements: &InteractionElements, + ) -> ColumnVec> { + vec![] + } + + fn component(&self) -> Self::Component { + assert!(self.inputs_set(), "Fibonacci input not set."); + FibonacciComponent::new(self.input.unwrap().log_size, self.input.unwrap().claim) + } +} + +impl ComponentProver for FibonacciComponent { + fn evaluate_constraint_quotients_on_domain( + &self, + trace: &ComponentTrace<'_, CpuBackend>, + evaluation_accumulator: &mut DomainEvaluationAccumulator, + _interaction_elements: &InteractionElements, + _lookup_values: &LookupValues, + ) { + let poly = &trace.polys[BASE_TRACE][0]; + let trace_domain = CanonicCoset::new(self.log_size); + let trace_eval_domain = CanonicCoset::new(self.log_size + 1).circle_domain(); + let trace_eval = poly.evaluate(trace_eval_domain).bit_reverse(); + + // Step constraint. + let constraint_log_degree_bound = trace_domain.log_size() + 1; + let [mut accum] = evaluation_accumulator.columns([(constraint_log_degree_bound, 2)]); + let constraint_eval_domain = trace_eval_domain; + for (off, point_coset) in [ + (0, constraint_eval_domain.half_coset), + ( + constraint_eval_domain.half_coset.size(), + constraint_eval_domain.half_coset.conjugate(), + ), + ] { + let eval = trace_eval.fetch_eval_on_coset(point_coset.shift(trace_domain.index_at(0))); + let mul = trace_domain.step_size().div(point_coset.step_size); + for (i, point) in point_coset.iter().enumerate() { + let mask = [eval[i], eval[i as isize + mul], eval[i as isize + 2 * mul]]; + let mut res = self.boundary_constraint_eval_quotient_by_mask(point, &[mask[0]]) + * accum.random_coeff_powers[0]; + res += self.step_constraint_eval_quotient_by_mask(point, &mask) + * accum.random_coeff_powers[1]; + accum.accumulate(bit_reverse_index(i + off, constraint_log_degree_bound), res); + } + } + } + + fn lookup_values(&self, _trace: &ComponentTrace<'_, CpuBackend>) -> LookupValues { + LookupValues::default() + } +} diff --git a/crates/stwo_wasm/src/fibonnaci/mod.rs b/crates/stwo_wasm/src/fibonnaci/mod.rs new file mode 100644 index 0000000..7945676 --- /dev/null +++ b/crates/stwo_wasm/src/fibonnaci/mod.rs @@ -0,0 +1,249 @@ +// mod.rs fibonnaci +use self::component::FibonacciComponent; +use crate::StwoResult; +pub mod air; +pub mod component; +pub mod multi_fibonacci; + +use air::FibonacciAir; +use num_traits::One; +// use serde::{Deserialize, Serialize}; +use stwo_prover::core::backend::cpu::CpuCircleEvaluation; +use stwo_prover::core::backend::CpuBackend; +use stwo_prover::core::channel::{Blake2sChannel, Channel}; +use stwo_prover::core::fields::m31::BaseField; +use stwo_prover::core::fields::{FieldExpOps, IntoSlice}; +use stwo_prover::core::pcs::CommitmentSchemeProver; +use stwo_prover::core::poly::circle::{CanonicCoset, CircleEvaluation, PolyOps}; +use stwo_prover::core::poly::BitReversedOrder; +use stwo_prover::core::prover::{ + prove, + // verify, + ProvingError, + StarkProof, + VerificationError, + LOG_BLOWUP_FACTOR, +}; +use stwo_prover::core::vcs::blake2_hash::Blake2sHasher; +use stwo_prover::core::vcs::blake2_merkle::Blake2sMerkleHasher; +use stwo_prover::core::InteractionElements; +use stwo_prover::trace_generation::{ + // commit_and_prove, + commit_and_verify, +}; +// use stwo_prover::core::pcs::{ CommitmentSchemeVerifier}; + +// use stwo_prover::trace_generation::{commit_and_prove, commit_and_verify}; +use wasm_bindgen::prelude::*; + +// use num_traits::One; + +// use self::air::{FibonacciAir, MultiFibonacciAir}; + +#[wasm_bindgen] +extern "C" { + // Use `js_namespace` here to bind `console.log(..)` instead of just + // `log(..)` + #[wasm_bindgen(js_namespace = console)] + fn log(s: &str); +} + +macro_rules! console_log { + // Note that this is using the `log` function imported above + ($($t:tt)*) => (log(&format_args!($($t)*).to_string())) +} + +#[derive(Clone)] +pub struct Fibonacci { + pub component: FibonacciComponent, + pub air: FibonacciAir, +} + +impl Fibonacci { + pub fn new(log_size: u32, claim: BaseField) -> Self { + let component = FibonacciComponent::new(log_size, claim); + let air = FibonacciAir { + component: component.clone(), + }; + Self { + component: component.clone(), + air, + } + } + + pub fn get_trace(&self) -> CpuCircleEvaluation { + // Trace. + let trace_domain = CanonicCoset::new(self.component.log_size); + let mut trace = Vec::with_capacity(trace_domain.size()); + + // Fill trace with fibonacci squared. + let mut a = BaseField::one(); + let mut b = BaseField::one(); + for _ in 0..trace_domain.size() { + trace.push(a); + let tmp = a.square() + b.square(); + a = b; + b = tmp; + } + + // Returns as a CircleEvaluation. + CircleEvaluation::new_canonical_ordered(trace_domain, trace) + } + + pub fn prove(&self) -> Result, ProvingError> { + println!("channel"); + + let channel = &mut Blake2sChannel::new(Blake2sHasher::hash(BaseField::into_slice(&[self + .component + .claim]))); + println!("twiddles"); + + let twiddles = CpuBackend::precompute_twiddles( + CanonicCoset::new(self.component.log_size) + .circle_domain() + .half_coset, + ); + println!("commitment_scheme"); + + let commitment_scheme = &mut CommitmentSchemeProver::new(LOG_BLOWUP_FACTOR, &twiddles); + println!("get trace"); + + // let trace = self.get_trace(); + + println!("trace_domain"); + // let trace_domain = CanonicCoset::new(self.component.log_size, self.component.claim); + + // let trace = trace + // .into_iter() + // .map(|eval| CpuCircleEvaluation::new_canonical_ordered(trace_domain, eval)) + // .collect_vec(); + + // let trace = self.get_trace(); + // let trace = self.get_trace(); + // let channel = &mut Blake2sChannel::new(Blake2sHasher::hash(BaseField::into_slice(&[self + // .air + // .component + // .claim]))); + + // commit_and_prove(&self.air, channel, trace); + // commit_and_prove(&self.air, channel, vec![trace]); + + let proof = prove( + &[&self.component], + channel, + &InteractionElements::default(), + commitment_scheme, // vec![trace], + ) + .map_err(Err::, ProvingError>); + + match proof { + Ok(p) => Ok(p), + Err(_) => Err(ProvingError::ConstraintsNotSatisfied), + } + } + + pub fn verify(&self, proof: StarkProof) -> Result<(), VerificationError> { + // let verifier_channel = + // &mut Blake2sChannel::new(Blake2sHasher::hash(BaseField::into_slice(&[]))); + // let commitment_scheme = &mut CommitmentSchemeVerifier::new(); + // verify( + // &[&self.component], + // verifier_channel, + // &InteractionElements::default(), + // commitment_scheme, + // proof, + // ) + + let channel = &mut Blake2sChannel::new(Blake2sHasher::hash(BaseField::into_slice(&[self + .air + .component + .claim]))); + commit_and_verify(proof, &self.air, channel) + } +} + +#[wasm_bindgen] +pub fn prove_and_verify_fib(log_size: u32, claim: u32) -> StwoResult { + console_log!( + "Starting prove_and_verify with log_size: {}, claim: {}", + log_size, + claim + ); + let fib = Fibonacci::new(log_size, BaseField::from(claim)); + + match fib.prove() { + Ok(proof) => { + console_log!("Proof generated successfully"); + let serialized = serde_json::to_string(&proof).unwrap(); + console_log!("Serialized proof: {}", serialized); + + match fib.verify(proof) { + Ok(_) => { + console_log!("Proof verified successfully"); + StwoResult { + success: true, + message: serialized.to_string(), + } + } + Err(e) => { + console_log!("Proof verification failed: {:?}", e); + StwoResult { + success: false, + message: format!("Proof verification failed: {:?}", e), + } + } + } + } + Err(e) => { + console_log!("Proof generation failed: {:?}", e); + StwoResult { + success: false, + message: format!("Proof generation failed: {:?}", e), + } + } + } +} + +#[wasm_bindgen] +pub fn verify_stark_proof_fib(log_size: u32, claim: u32, stark_proof_str: &str) -> StwoResult { + console_log!( + "Starting verify_stark_proof with log_size: {}, claim: {}", + log_size, + claim + ); + console_log!("Received proof string length: {}", stark_proof_str.len()); + + let fib = Fibonacci::new(log_size, BaseField::from(claim)); + + let stark_proof: Result, serde_json::Error> = + serde_json::from_str(stark_proof_str); + + match stark_proof { + Ok(proof) => { + console_log!("Proof deserialized successfully"); + match fib.verify(proof) { + Ok(()) => { + console_log!("Proof verified successfully"); + StwoResult { + success: true, + message: "Proof verified successfully".to_string(), + } + } + Err(e) => { + console_log!("Proof verification failed: {:?}", e); + StwoResult { + success: false, + message: format!("Proof verification failed: {:?}", e), + } + } + } + } + Err(e) => { + console_log!("Failed to deserialize proof: {:?}", e); + StwoResult { + success: false, + message: format!("Failed to deserialize proof: {:?}", e), + } + } + } +} diff --git a/crates/stwo_wasm/src/fibonnaci/multi_fibonacci.rs b/crates/stwo_wasm/src/fibonnaci/multi_fibonacci.rs new file mode 100644 index 0000000..e35ea31 --- /dev/null +++ b/crates/stwo_wasm/src/fibonnaci/multi_fibonacci.rs @@ -0,0 +1,230 @@ +// lib.rs + +use std::iter::zip; + +use stwo_prover::core::air::{Air, Component}; +use stwo_prover::core::backend::cpu::CpuCircleEvaluation; +use stwo_prover::core::fields::m31::{self, BaseField}; +use stwo_prover::core::poly::BitReversedOrder; +use stwo_prover::core::prover::{ProvingError, StarkProof, VerificationError}; +use stwo_prover::core::vcs::blake2_merkle::Blake2sMerkleHasher; +use stwo_prover::examples::wide_fibonacci::component::WideFibComponent; +// use stwo_prover::examples::fibonacci::MultiFibonacci; +// use stwo_prover::core::vcs::blake2_hash::Blake2sHasher; +// use stwo_prover::core::channel::{Blake2sChannel, Channel}; +// use stwo_prover::core::fields::IntoSlice; +use wasm_bindgen::prelude::*; + +use crate::fibonnaci::Fibonacci; +use crate::StwoResult; + +#[wasm_bindgen] +extern "C" { + // Use `js_namespace` here to bind `console.log(..)` instead of just + // `log(..)` + #[wasm_bindgen(js_namespace = console)] + fn log(s: &str); +} + +macro_rules! console_log { + // Note that this is using the `log` function imported above + ($($t:tt)*) => (log(&format_args!($($t)*).to_string())) +} + +pub struct MultiFibonacci { + log_sizes: Vec, + claims: Vec, +} +#[derive(Clone)] +pub struct WideFibAir { + pub component: WideFibComponent, +} + +impl Air for WideFibAir { + fn components(&self) -> Vec<&dyn Component> { + vec![&self.component] + } +} + +impl MultiFibonacci { + pub fn new(log_sizes: Vec, claims: Vec) -> Self { + assert!(!log_sizes.is_empty()); + assert_eq!(log_sizes.len(), claims.len()); + Self { log_sizes, claims } + } + + pub fn get_trace(&self) -> Vec> { + zip(&self.log_sizes, &self.claims) + .map(|(log_size, claim)| { + let fib = Fibonacci::new(*log_size, *claim); + fib.get_trace() + }) + .collect() + } + + pub fn prove(&self) -> Result, ProvingError> { + println!("try proof of multi fibo"); + + // let channel = + // &mut Blake2sChannel::new(Blake2sHasher::hash(BaseField::into_slice(&self.claims))); + // let trace = self.get_trace(); + Err(ProvingError::ConstraintsNotSatisfied) + } + + pub fn verify(&self, proof: StarkProof) -> Result<(), VerificationError> { + // println!("try verify proof of multi fibo"); + println!("try verify proof of multi fibo"); + println!("stark proof {:?}", proof); + // println!("stark proof {:?}", proof.commitment_scheme_proof.proof_of_work.nonce); + // let channel = + // &mut Blake2sChannel::new(Blake2sHasher::hash(BaseField::into_slice(&self.claims))); + // commit_and_verify(proof, &self, channel) + Err(VerificationError::OodsNotMatching) + } +} + +#[wasm_bindgen] +pub fn stark_proof_multi_fibo(log_sizes: Vec, claims_int: Vec) -> StwoResult { + let claims: Vec = claims_int + .into_iter() + .map(m31::M31::from_u32_unchecked) + .collect(); + let multi_fibo = MultiFibonacci::new(log_sizes, claims); + + match multi_fibo.prove() { + Ok(proof) => { + console_log!("Proof deserialized successfully"); + match multi_fibo.verify(proof) { + Ok(()) => { + console_log!("Proof verified successfully"); + StwoResult { + success: true, + message: "Proof verified successfully".to_string(), + } + } + Err(e) => { + console_log!("Proof verification failed: {:?}", e); + StwoResult { + success: false, + message: format!("Proof verification failed: {:?}", e), + } + } + } + } + Err(e) => { + console_log!("Failed to deserialize proof: {:?}", e); + StwoResult { + success: false, + message: format!("Failed to deserialize proof: {:?}", e), + } + } + } +} + +// #[wasm_bindgen] +// pub fn stark_proof_multi_fibo(log_sizes: Vec, claims_int: Vec) -> StwoResult { +// let claims: Vec = claims_int +// .into_iter() +// .map(m31::M31::from_u32_unchecked) +// .collect(); +// let multi_fibo = MultiFibonacci::new(log_sizes, claims); + +// match multi_fibo.prove() { +// Ok(proof) => { +// console_log!("Proof deserialized successfully"); +// match multi_fibo.verify(proof) { +// Ok(()) => { +// console_log!("Proof verified successfully"); +// StwoResult { +// success: true, +// message: "Proof verified successfully".to_string(), +// } +// } +// Err(e) => { +// console_log!("Proof verification failed: {:?}", e); +// StwoResult { +// success: false, +// message: format!("Proof verification failed: {:?}", e), +// } +// } +// } +// } +// Err(e) => { +// console_log!("Failed to deserialize proof: {:?}", e); +// StwoResult { +// success: false, +// message: format!("Failed to deserialize proof: {:?}", e), +// } +// } +// } +// } + +// #[wasm_bindgen] +// pub fn verify_stark_proof_multi_fibo( +// log_sizes: Vec, +// claims_int: Vec, +// stark_proof_str: &str, +// ) -> StwoResult { +// let claims: Vec = claims_int +// .into_iter() +// .map(m31::M31::from_u32_unchecked) +// .collect(); +// let multi_fibo = MultiFibonacci::new(log_sizes, claims); +// // StwoResult { +// // success: false, +// // message: format!("Proof verification failed: {:?}", "no generic value"), +// // } +// let stark_proof: Result, serde_json::Error> = +// serde_json::from_str(stark_proof_str); +// match multi_fibo.verify(stark_proof.unwrap()) { +// Ok(()) => { +// console_log!("Proof verified successfully"); +// StwoResult { +// success: true, +// message: "Proof verified successfully".to_string(), +// } +// } +// Err(e) => { +// console_log!("Proof verification failed: {:?}", e); +// StwoResult { +// success: false, +// message: format!("Proof verification failed: {:?}", e), +// } +// } +// } +// } + +// #[wasm_bindgen] +pub fn verify_stark_proof_multi_fibo( + log_sizes: Vec, + claims_int: Vec, + stark_proof_str: &str, +) -> StwoResult { + let claims: Vec = claims_int + .into_iter() + .map(m31::M31::from_u32_unchecked) + .collect(); + let multi_fibo = MultiFibonacci::new(log_sizes, claims); + // StwoResult { + // success: false, + // message: format!("Proof verification failed: {:?}", "no generic value"), + // } + let stark_proof: Result, serde_json::Error> = + serde_json::from_str(stark_proof_str); + match multi_fibo.verify(stark_proof.unwrap()) { + Ok(()) => { + console_log!("Proof verified successfully"); + StwoResult { + success: true, + message: "Proof verified successfully".to_string(), + } + } + Err(e) => { + console_log!("Proof verification failed: {:?}", e); + StwoResult { + success: false, + message: format!("Proof verification failed: {:?}", e), + } + } + } +} diff --git a/crates/stwo_wasm/src/lib.rs b/crates/stwo_wasm/src/lib.rs index dfd8f68..d4f6127 100644 --- a/crates/stwo_wasm/src/lib.rs +++ b/crates/stwo_wasm/src/lib.rs @@ -1,12 +1,15 @@ // lib.rs -pub mod multi_fibonacci; pub mod poseidon; pub mod wide_fibonnacci; +// Deprecated program examples on the STWO +// Recreate it internally +pub mod fibonnaci; +// pub mod multi_fibonacci; + +use poseidon::PoseidonStruct; use serde::{Deserialize, Serialize}; -use stwo_prover::core::fields::m31::BaseField; use stwo_prover::core::prover::StarkProof; use stwo_prover::core::vcs::blake2_merkle::Blake2sMerkleHasher; -use stwo_prover::examples::fibonacci::Fibonacci; use wasm_bindgen::prelude::*; #[wasm_bindgen] @@ -43,87 +46,103 @@ impl StwoResult { } #[wasm_bindgen] -pub fn prove_and_verify(log_size: u32, claim: u32) -> StwoResult { +// pub fn prove_and_verify(log_size: u32, claim: u32) -> StwoResult { +pub fn prove_and_verify(log_n_instances: u32) -> StwoResult { console_log!( - "Starting prove_and_verify with log_size: {}, claim: {}", - log_size, - claim + "Starting prove_and_verify with log_n_instances: {}", + log_n_instances, ); - let fib = Fibonacci::new(log_size, BaseField::from(claim)); - match fib.prove() { - Ok(proof) => { - console_log!("Proof generated successfully"); - let serialized = serde_json::to_string(&proof).unwrap(); - console_log!("Serialized proof: {}", serialized); + let poseidon = PoseidonStruct::new(log_n_instances); - match fib.verify(proof) { - Ok(_) => { - console_log!("Proof verified successfully"); - StwoResult { - success: true, - message: serialized.to_string(), + match poseidon { + Err(e) => StwoResult { + success: false, + message: format!("Failed to deserialize proof: {:?}", e), + }, + Ok(p) => match p.prove::() { + Ok(proof) => { + console_log!("Proof generated successfully"); + let serialized = serde_json::to_string(&proof).unwrap(); + console_log!("Serialized proof: {}", serialized); + + match p.verify::(proof) { + Ok(_) => { + console_log!("Proof verified successfully"); + StwoResult { + success: true, + message: serialized.to_string(), + } } - } - Err(e) => { - console_log!("Proof verification failed: {:?}", e); - StwoResult { - success: false, - message: format!("Proof verification failed: {:?}", e), + Err(e) => { + console_log!("Proof verification failed: {:?}", e); + StwoResult { + success: false, + message: format!("Proof verification failed: {:?}", e), + } } } } - } - Err(e) => { - console_log!("Proof generation failed: {:?}", e); - StwoResult { - success: false, - message: format!("Proof generation failed: {:?}", e), + Err(e) => { + console_log!("Proof generation failed: {:?}", e); + StwoResult { + success: false, + message: format!("Proof generation failed: {:?}", e), + } } - } + }, } } #[wasm_bindgen] -pub fn verify_stark_proof(log_size: u32, claim: u32, stark_proof_str: &str) -> StwoResult { +pub fn verify_stark_proof(log_n_instances: u32, stark_proof_str: &str) -> StwoResult { console_log!( - "Starting verify_stark_proof with log_size: {}, claim: {}", - log_size, - claim + "Starting verify_stark_proof with log_n_instances: {}", + log_n_instances ); console_log!("Received proof string length: {}", stark_proof_str.len()); - let fib = Fibonacci::new(log_size, BaseField::from(claim)); + let poseidon = PoseidonStruct::new(log_n_instances); let stark_proof: Result, serde_json::Error> = serde_json::from_str(stark_proof_str); match stark_proof { + Err(e) => { + console_log!("Failed to deserialize proof: {:?}", e); + StwoResult { + success: false, + message: format!("Failed to deserialize proof: {:?}", e), + } + } Ok(proof) => { console_log!("Proof deserialized successfully"); - match fib.verify(proof) { - Ok(()) => { - console_log!("Proof verified successfully"); - StwoResult { - success: true, - message: "Proof verified successfully".to_string(), + + match poseidon { + Ok(p) => match p.verify(proof) { + Ok(()) => { + console_log!("Proof verified successfully"); + StwoResult { + success: true, + message: "Proof verified successfully".to_string(), + } } - } + Err(e) => { + console_log!("Proof verification failed: {:?}", e); + StwoResult { + success: false, + message: format!("Proof verification failed: {:?}", e), + } + } + }, Err(e) => { - console_log!("Proof verification failed: {:?}", e); + console_log!("Failed to deserialize proof: {:?}", e); StwoResult { success: false, - message: format!("Proof verification failed: {:?}", e), + message: format!("Failed to deserialize proof: {:?}", e), } } } } - Err(e) => { - console_log!("Failed to deserialize proof: {:?}", e); - StwoResult { - success: false, - message: format!("Failed to deserialize proof: {:?}", e), - } - } } } diff --git a/crates/stwo_wasm/src/multi_fibonacci.rs b/crates/stwo_wasm/src/multi_fibonacci.rs deleted file mode 100644 index b00ea35..0000000 --- a/crates/stwo_wasm/src/multi_fibonacci.rs +++ /dev/null @@ -1,129 +0,0 @@ -// lib.rs - -use stwo_prover::core::fields::m31::{self, BaseField}; -use stwo_prover::core::prover::StarkProof; -use stwo_prover::core::vcs::blake2_merkle::Blake2sMerkleHasher; -use stwo_prover::examples::fibonacci::MultiFibonacci; -use wasm_bindgen::prelude::*; - -use crate::StwoResult; - -#[wasm_bindgen] -extern "C" { - // Use `js_namespace` here to bind `console.log(..)` instead of just - // `log(..)` - #[wasm_bindgen(js_namespace = console)] - fn log(s: &str); -} - -macro_rules! console_log { - // Note that this is using the `log` function imported above - ($($t:tt)*) => (log(&format_args!($($t)*).to_string())) -} - -#[wasm_bindgen] -pub fn stark_proof_multi_fibo(log_sizes: Vec, claims_int: Vec) -> StwoResult { - let claims: Vec = claims_int - .into_iter() - .map(m31::M31::from_u32_unchecked) - .collect(); - let multi_fibo = MultiFibonacci::new(log_sizes, claims); - - match multi_fibo.prove() { - Ok(proof) => { - console_log!("Proof deserialized successfully"); - match multi_fibo.verify(proof) { - Ok(()) => { - console_log!("Proof verified successfully"); - StwoResult { - success: true, - message: "Proof verified successfully".to_string(), - } - } - Err(e) => { - console_log!("Proof verification failed: {:?}", e); - StwoResult { - success: false, - message: format!("Proof verification failed: {:?}", e), - } - } - } - } - Err(e) => { - console_log!("Failed to deserialize proof: {:?}", e); - StwoResult { - success: false, - message: format!("Failed to deserialize proof: {:?}", e), - } - } - } -} - -#[wasm_bindgen] -pub fn verify_stark_proof_multi_fibo( - log_sizes: Vec, - claims_int: Vec, - stark_proof_str: &str, -) -> StwoResult { - let claims: Vec = claims_int - .into_iter() - .map(m31::M31::from_u32_unchecked) - .collect(); - let multi_fibo = MultiFibonacci::new(log_sizes, claims); - // StwoResult { - // success: false, - // message: format!("Proof verification failed: {:?}", "no generic value"), - // } - let stark_proof: Result, serde_json::Error> = - serde_json::from_str(stark_proof_str); - match multi_fibo.verify(stark_proof.unwrap()) { - Ok(()) => { - console_log!("Proof verified successfully"); - StwoResult { - success: true, - message: "Proof verified successfully".to_string(), - } - } - Err(e) => { - console_log!("Proof verification failed: {:?}", e); - StwoResult { - success: false, - message: format!("Proof verification failed: {:?}", e), - } - } - } -} - -// #[wasm_bindgen] -// pub fn verify_stark_proof_multi_fibo( -// log_sizes: Vec, -// claims_int: Vec, -// stark_proof_str: &str, -// ) -> StwoResult { -// let claims: Vec = claims_int -// .into_iter() -// .map(|f| m31::M31::from_u32_unchecked(f)) -// .collect(); -// let multi_fibo = MultiFibonacci::new(log_sizes, claims); -// StwoResult { -// success: false, -// message: format!("Proof verification failed: {:?}", "no generic value"), -// } -// // let stark_proof: Result, serde_json::Error> = -// serde_json::from_str(stark_proof_str); // match multi_fibo.verify(stark_proof.unwrap()) { -// // Ok(()) => { -// // console_log!("Proof verified successfully"); -// // StwoResult { -// // success: true, -// // message: "Proof verified successfully".to_string(), -// // } -// // } -// // Err(e) => { -// // console_log!("Proof verification failed: {:?}", e); -// // StwoResult { -// // success: false, -// // message: format!("Proof verification failed: {:?}", e), -// // } -// // } -// // } -// } diff --git a/crates/stwo_wasm/src/poseidon.rs b/crates/stwo_wasm/src/poseidon.rs index 8954162..d3a72f8 100644 --- a/crates/stwo_wasm/src/poseidon.rs +++ b/crates/stwo_wasm/src/poseidon.rs @@ -18,7 +18,7 @@ use stwo_prover::core::InteractionElements; use stwo_prover::examples::poseidon::{ gen_interaction_trace, gen_trace, - PoseidonAir, + // PoseidonAir, PoseidonComponent, // PoseidonComponent, }; use wasm_bindgen::prelude::*; @@ -29,7 +29,7 @@ pub const N_LOG_INSTANCES_PER_ROW: usize = 3; pub const LOG_N_ROWS: u32 = 8; pub const LOG_EXPAND: u32 = 2; pub const LOG_N_LANES: u32 = 4; -const N_STATE: usize = 16; +// const N_STATE: usize = 16; #[wasm_bindgen] extern "C" { @@ -46,7 +46,7 @@ macro_rules! console_log { #[derive(Clone)] pub struct PoseidonStruct { - pub air: PoseidonAir, + pub component: PoseidonComponent, } impl PoseidonStruct { @@ -75,7 +75,7 @@ impl PoseidonStruct { // Draw lookup element. // let lookup_elements = LookupElements::draw(channel); - let lookup_elements = LookupElements::draw(channel, N_STATE * 2); + let lookup_elements = LookupElements::draw(channel); // let component = PoseidonComponent { // Precompute twiddles. @@ -104,15 +104,14 @@ impl PoseidonStruct { lookup_elements, claimed_sum, // claimed_sum, }; - let air = PoseidonAir { component }; + // let air = PoseidonAir { component }; - Ok(Self { air }) + Ok(Self { component }) } pub fn prove(&self) -> Result, ProvingError> { // let (trace, lookup_data) = gen_trace(self.air.component.log_n_rows); // let res = PoseidonStruct::prove_poseidon(self.air.component.log_n_rows); - let res = - PoseidonStruct::prove_poseidon::(self.air.component.log_n_rows); + let res = PoseidonStruct::prove_poseidon::(self.component.log_n_rows); match res { Ok(proof) => Ok(proof), Err(_) => Err(ProvingError::ConstraintsNotSatisfied), @@ -179,7 +178,7 @@ impl PoseidonStruct { tree_builder.commit(channel); // Draw lookup element. - let lookup_elements = LookupElements::draw(channel, N_STATE * 2); + let lookup_elements = LookupElements::draw(channel); // Interaction trace. let (trace, claimed_sum) = gen_interaction_trace(log_n_rows, lookup_data, &lookup_elements); @@ -198,9 +197,9 @@ impl PoseidonStruct { lookup_elements, claimed_sum, }; - let air = PoseidonAir { component }; + // let air = PoseidonAir { component }; let proof = prove( - &air, + &[&component], channel, &InteractionElements::default(), commitment_scheme, @@ -225,7 +224,8 @@ impl PoseidonStruct { let commitment_scheme = &mut CommitmentSchemeVerifier::new(); verify( - &self.air, + // &self.component, + &[&self.component], verifier_channel, &InteractionElements::default(), commitment_scheme,