diff --git a/.env-local b/.env-local index af45ee1f..497f5ff9 100644 --- a/.env-local +++ b/.env-local @@ -8,7 +8,7 @@ NODE_MODE="service" # The nodes address SERVICE_URL="http://localhost:8080" # For CI/CD purpose to automate the staking wallet creation -INITIAL_STAKING_WALLET_BALANCE=1 +INITIAL_STAKING_WALLET_BALANCE=15 # Intial balance for the distribution wallet which will be used to hold the distribution list. INITIAL_DISTRIBUTION_WALLET_BALANCE=1 # Global timers which track the round time, submission window and audit window and call those functions @@ -35,16 +35,13 @@ K2_NODE_URL="https://k2-testnet.koii.live" # registering with the crete-task-cli. This variable supports a comma separated list: # TASKS="id1,id2,id3" # TASK_STAKES="1,1,1" -TASKS="" -TASK_STAKES=1 +TASKS="7jP87G1LJzWmLrr6RqQcA8bH6spZven4RHxGCgbPFzSo" +TASK_STAKES=10 # User can enter as many environment variables as they like below. These can be task # specific variables that are needed for the task to perform it's job. Some examples: # Secrets must follow this convention for task to be able to use it (SECRET_) -SECRET_WEB3_STORAGE_KEY="" -TWITTER_CONSUMER_KEY="" -TWITTER_CONSUMER_SECRET="" -TWITTER_BEARER_TOKEN="" +SECRET_WEB3_STORAGE_KEY="eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJzdWIiOiJkaWQ6ZXRocjoweGY0ODYxMzAzOTdDNTY1QzlDYTRCOTUzZTA2RWQ4NUI4MGRBQzRkYTIiLCJpc3MiOiJ3ZWIzLXN0b3JhZ2UiLCJpYXQiOjE2NjYzNjU1OTk5MDMsIm5hbWUiOiJTb21hIn0.TU-KUFS9vjI9blN5dx6VsLLuIjJnpjPrxDHBvjXQUxw" diff --git a/.gitignore b/.gitignore index daf63eb6..bfe68a21 100644 --- a/.gitignore +++ b/.gitignore @@ -8,5 +8,9 @@ data/* executables/* namespace/* config/* +taskStateInfoKeypair.json +localKOIIDB .env -taskStateInfoKeypair.json \ No newline at end of file +taskStateInfoKeypair.json +linktrees +test/testdb \ No newline at end of file diff --git a/.prettierrc b/.prettierrc new file mode 100644 index 00000000..7e476395 --- /dev/null +++ b/.prettierrc @@ -0,0 +1,12 @@ +{ + "printWidth": 80, + "tabWidth": 2, + "useTabs": false, + "semi": true, + "singleQuote": true, + "trailingComma": "all", + "bracketSpacing": true, + "jsxBracketSameLine": false, + "arrowParens": "avoid", + "endOfLine": "auto" + } diff --git a/README.md b/README.md index d862c15e..636f84bb 100644 --- a/README.md +++ b/README.md @@ -1,59 +1,91 @@ -# K2-Task-Template +# KOII LINKTREE TASK + +LinkTree is a koii task that creates a LinkTree page. You can add links to your favorite websites or social media profiles on your page. + Tasks run following a periodic structure of 'rounds': -![Screenshot_20230307-091958](https://user-images.githubusercontent.com/66934242/223565192-3ecce9c6-0f9a-4a58-8b02-2db19c61141f.png) +executeTask => { + 1. Do the work + 2. Audit the work of other nodes + 3. Pay the rewards or slash stake +} -Each round is set by a specific time period, and nodes participate by uploading data to IPFS, posting CIDs to the K2 settlement layer, and sending messages across REST APIs and WebSockets. -For more information on how the Task Flow works, check out [the runtime environment docs](https://docs.koii.network/microservices-and-tasks/what-are-tasks/gradual-consensus). +## The structure of the LinkTree task -If this is your first time writing a Koii Task, you might want to use the [task organizer](https://www.figma.com/community/file/1220194939977550205/Task-Outline). +## coreLogic.js -## Requirements - - [Node >=16.0.0](https://nodejs.org) - - [Docker compose](https://docs.docker.com/compose/install/docker) +The most important file in any koii task is the coreLogic.js file. It is provided in the task template and is where most of the functionality will be coded. -## What's in the template? -`index.js` is the hub of your app, and ties together the other pieces. This will be the entrypoint when your task runs on Task Nodes +In the Linktree task’s coreLogic, the first function is the task(). This function calls the linktree_task function from the linktree_task.js file. + + ## Linktree_task => this function fetches proofs from the local database, creates a submission object for those proofs, and uploads it to IPFS. IPFS gives back a reference CID of the submission, which the function returns. -`NamespaceWrappers.js` contains the interfaces to make API calls to the core of the task-node. It contains all the necessary functions required to submit and audit the work, as well as the distribution lists +The task function gets the CID from the linktree_task and stores it to the levelDB. -`coreLogic.js` is where you'll define your task, audit, and distribution logic, and controls the majority of task functionality. You can of course break out separate features into sub-files and import them into the core logic before web-packing. +The fetchSubmission function can be used to get the CID of the submission back. -## Runtime Options -There are two ways to run your task when doing development: +The generateDistributionList function generates a distribution list for rewards based on submissions made by the nodes. It takes all the votes that the nodes have made on the submission and if the false votes are higher than the true votes, this function slashes 70% of the stake of the task submitter as a penalty. If the true votes are higher than the submission is valid and it distributes the rewards. -1. With Timer ON (see .env-local)- When the timer is ON, IPC calls are made by calculating the average time slots of all the task running your node. +The submitDistributionList function submits the distribution list generated by the "generateDistributionList" function. -2. With Timer OFF - This allows you to do manual calls to K2 and disables the triggers for round managemnt on K2. Transactions are only accepted during the correct period. Guide for manual calls is in index.js +The validateNode function is called when a node is selected to validate the submission value. It calls the linktree_validate function from the linktree_validate file. + + ## Linktree_validate.js => this file verifies the validity of a Linktree CID submission. + + +The validateDistribution function validates a distribution list submitted by a node for a specific round. + +The submitTask function submits the address with a distribution list to K2. -# Modifying CoreLogic.js -Task nodes will trigger a set of predefined functions during operation. +The auditTask function checks a submission in a specific round and confirms that the task is valid -There are in total 9 functions in CoreLogic which the you can modify according to your needs: +The auditDistribution checks the distribution list is valid for a specific round -1. *task()* - The logic for what your task should do goes here. There is a window in round that is dedicated to do work. The code in task is executed in that window. +## Db_model.js -2. *fetchSubmission()* - After completing the task , the results/work will be stored somewhere like on IPFS or local levelDB. This function is the place where you can write the logic to fetch that work. It is called in submitTask() function which does the actual submission on K2. +This file contains all the functions required for storing and retrieving data related to the linktree and node proofs used in the Linktree CID Validation task as well as the authorized users list. -3. *submitTask()* - It makes the call to namespace function of task-node using the wrapper. +getLinktree: retrieves the linktree by it’s public key +setLinktree: sets the linktree associated with the given public key +getAllLinktrees: retrieves all link trees stored in the database +getProofs: retrieves the proofs associated with a given public key +setProofs: sets the proofs associated with a given public key +getAllProofs: retrieves all proofs stored in the database +getNodeProofCid: retrieves the CID associated with a given round of node proofs +setNodeProofCid: sets the CID associated with a given round of node proofs +getAllNodeProofCids: retrieves all CIDs associated with node proofs stored in the database +getAuthList: retrieves the list of the authorized users +setAuthList: sets the authorized list +getAllAuthLists: retrieves all authorized lists stored in the database +namespaceWrapper.js +contains the interfaces to make API calls to the core of the task-node. It contains all the necessary functions required to submit and audit the work, as well as the distribution lists. -4. *generateDistributionList()* - You have full freedom to prepare your reward distributions as you like and the logic for that goes here. We have provided a sample logic that rewards 1 KOII to all the needs who did the correct submission for that round. This function is called in submitDistributionList() +## Index.js -5. *submitDistributionList()* - makes call to the namesapce function of task-node to upload the list and on succesful upload does the transaction to update the state. +This is the entry point of the task. It sets the round timers and ties together all the other parts of the task -6. *validateNode()* - this function is called to verify the submission value, so based on the value received from the task-state we can vote on the submission. +## The test folder -7. *validateDistribution()* - The logic to validate the distribution list goes here and the function will receive the distribution list submitted form task-state. +This folder contains all the test files for the linktree task. -8. *auditTask()* - makes call to namespace of task-node to raise an audit against the submission value if the validation fails. +## Router.js + +This file contains a set of API endpoints for the linktree. + + +## Runtime Options +There are two ways to run your task when doing development: + +1. With Timer ON (see .env-local)- When the timer is ON, IPC calls are made by calculating the average time slots of all the task running your node. + +2. With Timer OFF - This allows you to do manual calls to K2 and disables the triggers for round managemnt on K2. Transactions are only accepted during the correct period. Guide for manual calls is in index.js -9. *auditDistribution()* - makes call to namespace of task-node to raise an audit against the distribution list if the validation fails. # Testing and Deploying -Before you begin this process, be sure to check your code and write unit tests wherever possible to verify individual core logic functions. Testing using the docker container should be mostly used for consensus flows, as it will take longer to rebuild and re-deploy the docker container. +Before you begin this process, be sure to check your code and write unit tests wherever possible to verify individual core logic functions. `unitTest.js` file helps you to mock task state parameters that are required in core logic function and test it. Testing using the docker container should be mostly used for consensus flows, as it will take longer to rebuild and re-deploy the docker container. ## Build Before deploying a task, you'll need to build it into a single file executable by running diff --git a/config-task.yml b/config-task.yml index e280f7f8..1c691ce3 100644 --- a/config-task.yml +++ b/config-task.yml @@ -1,13 +1,13 @@ # Name and desciption sof your task -task_name: "test-task" -task_description: "This task is to test out the namespace function" +task_name: "linktree-task" +task_description: "linktree-task" # network value can be DEVELOPMENT , ARWEAVE or IPFS task_executable_network: "DEVELOPMENT" # Provide your web.storage key in case of IPFS otherwise leave blank -secret_web3_storage_key: "" +secret_web3_storage_key: "eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJzdWIiOiJkaWQ6ZXRocjoweGY0ODYxMzAzOTdDNTY1QzlDYTRCOTUzZTA2RWQ4NUI4MGRBQzRkYTIiLCJpc3MiOiJ3ZWIzLXN0b3JhZ2UiLCJpYXQiOjE2NjYzNjU1OTk5MDMsIm5hbWUiOiJTb21hIn0.TU-KUFS9vjI9blN5dx6VsLLuIjJnpjPrxDHBvjXQUxw" # Path to your executable webpack if the selected network is IPFS otherwise leave blank -task_audit_program: "" +task_audit_program: "/home/soma/code-ie/task-template-linktree/dist/main.js" # Provide your transaction ID in case of ARWEAVE and in case of DEVELOPMENT give your executable name as main otherwise leave blank task_audit_program_id: "main" diff --git a/coreLogic.js b/coreLogic.js index dbae774a..7555473a 100644 --- a/coreLogic.js +++ b/coreLogic.js @@ -1,210 +1,280 @@ -const { namespaceWrapper } = require("./namespaceWrapper"); +const { namespaceWrapper } = require('./namespaceWrapper'); +const linktree_task = require('./linktree_task'); +const linktree_validate = require('./linktree_validate'); const crypto = require('crypto'); +const dataFromCid = require("./helpers/dataFromCid"); +const db = require('./db_model'); -class CoreLogic{ +class CoreLogic { + async task() { + // TODO remove all of the prompts like the following line from the template version -async task() { - // Write the logic to do the work required for submitting the values and optionally store the result in levelDB - - // Below is just a sample of work that a task can do + // run linktree task + console.log('*********task() started*********'); - try{ - - const x = Math.random().toString(); // generate random number and convert to string - const cid = crypto.createHash("sha1").update(x).digest("hex"); // convert to CID - console.log("HASH:", cid); + const proof_cid = await linktree_task(); - // fetching round number to store work accordingly + const round = await namespaceWrapper.getRound(); - if (cid) { - await namespaceWrapper.storeSet("cid", cid); // store CID in levelDB - } -}catch(err){ - console.log("ERROR IN EXECUTING TASK", err); -} - -} -async fetchSubmission(){ - // Write the logic to fetch the submission values here and return the cid string + // TEST For only testing purposes: + // const round = 1000 - // fetching round number to store work accordingly + if (proof_cid) { + await db.setNodeProofCid(round, proof_cid); // store CID in levelDB + console.log('Node Proof CID stored in round', round) + } else { + console.log('CID NOT FOUND'); + } - console.log("IN FETCH SUBMISSION"); + console.log('*********task() completed*********'); + } - const round = await namespaceWrapper.getRound(); - // The code below shows how you can fetch your stored value from level DB + async fetchSubmission() { + // The logic to fetch the submission values and return the cid string - const cid = await namespaceWrapper.storeGet("cid"); // retrieves the cid - console.log("CID", cid); - return cid; -} + // fetching round number to store work accordingly -async generateDistributionList(round){ - try{ - console.log("GenerateDistributionList called"); - console.log("I am selected node"); - - // Write the logic to generate the distribution list here by introducing the rules of your choice + console.log('***********IN FETCH SUBMISSION**************'); + // The code below shows how you can fetch your stored value from level DB + + // TEST For only testing purposes: + // const round = 1000 + const round = await namespaceWrapper.getRound(); + + const proof_cid = await db.getNodeProofCid(round - 1); // retrieves the cid + console.log('Linktree proofs CID', proof_cid, "in round", round - 1); + return proof_cid; + } - /* **** SAMPLE LOGIC FOR GENERATING DISTRIBUTION LIST ******/ - - let distributionList = {}; - const taskAccountDataJSON = await namespaceWrapper.getTaskState(); - const submissions = taskAccountDataJSON.submissions[round]; - const submissions_audit_trigger = - taskAccountDataJSON.submissions_audit_trigger[round]; - if (submissions == null) { - console.log("No submisssions found in N-2 round"); - return distributionList; - } else { - const keys = Object.keys(submissions); - const values = Object.values(submissions); - const size = values.length; - console.log("Submissions from last round: ", keys, values, size); - for (let i = 0; i < size; i++) { - const candidatePublicKey = keys[i]; - if (submissions_audit_trigger && submissions_audit_trigger[candidatePublicKey]) { - console.log(submissions_audit_trigger[candidatePublicKey].votes, "distributions_audit_trigger votes "); - const votes = submissions_audit_trigger[candidatePublicKey].votes; - let numOfVotes = 0; - for (let index = 0; index < votes.length; index++) { - if(votes[i].is_valid) - numOfVotes++; - else numOfVotes--; - } - if(numOfVotes < 0) - continue; + async generateDistributionList(round, _dummyTaskState) { + try { + console.log('GenerateDistributionList called'); + console.log('I am selected node'); + console.log('Round', round, 'Task State', _dummyTaskState); + // The logic to generate the distribution list here + + let distributionList = {}; + let distributionCandidates = []; + let taskAccountDataJSON = await namespaceWrapper.getTaskState(); + + if (taskAccountDataJSON == null) taskAccountDataJSON = _dummyTaskState; + + console.log('Task Account Data', taskAccountDataJSON); + + const submissions = taskAccountDataJSON.submissions[round]; + const submissions_audit_trigger = + taskAccountDataJSON.submissions_audit_trigger[round]; + + if (submissions == null) { + + console.log('No submisssions found in N-2 round'); + return distributionList; + + } else { + const keys = Object.keys(submissions); + const values = Object.values(submissions); + const size = values.length; + console.log('Submissions from last round: ', keys, values, size); + + // Logic for slashing the stake of the candidate who has been audited and found to be false + for (let i = 0; i < size; i++) { + const candidatePublicKey = keys[i]; + if ( + submissions_audit_trigger && + submissions_audit_trigger[candidatePublicKey] + ) { + console.log( + 'distributions_audit_trigger votes ', + submissions_audit_trigger[candidatePublicKey].votes, + ); + const votes = submissions_audit_trigger[candidatePublicKey].votes; + if (votes.length === 0) { + // slash 70% of the stake as still the audit is triggered but no votes are casted + // Note that the votes are on the basis of the submission value + // to do so we need to fetch the stakes of the candidate from the task state + const stake_list = taskAccountDataJSON.stake_list; + const candidateStake = stake_list[candidatePublicKey]; + const slashedStake = candidateStake * 0.7; + distributionList[candidatePublicKey] = -slashedStake; + console.log('Candidate Stake', candidateStake); + } else { + let numOfVotes = 0; + for (let index = 0; index < votes.length; index++) { + if (votes[index].is_valid) numOfVotes++; + else numOfVotes--; + } + + if (numOfVotes < 0) { + // slash 70% of the stake as the number of false votes are more than the number of true votes + // Note that the votes are on the basis of the submission value + // to do so we need to fetch the stakes of the candidate from the task state + const stake_list = taskAccountDataJSON.stake_list; + const candidateStake = stake_list[candidatePublicKey]; + const slashedStake = candidateStake * 0.7; + distributionList[candidatePublicKey] = -slashedStake; + console.log('Candidate Stake', candidateStake); + } + + if (numOfVotes > 0) { + distributionCandidates.push(candidatePublicKey); + } + } + + } } - distributionList[candidatePublicKey] = 1; } - } - console.log("Distribution List", distributionList); - return distributionList; - }catch(err){ - console.log("ERROR IN GENERATING DISTRIBUTION LIST", err); - } -} + // now distribute the rewards based on the valid submissions + // Here it is assumed that all the nodes doing valid submission gets the same reward -async submitDistributionList(round) { - -// This function just upload your generated dustribution List and do the transaction for that + const reward = + taskAccountDataJSON.bounty_amount_per_round / + distributionCandidates.length; + console.log('REWARD RECEIVED BY EACH NODE', reward); + for (let i = 0; i < distributionCandidates.length; i++) { + distributionList[distributionCandidates[i]] = reward; + } - console.log("SubmitDistributionList called"); + console.log('Distribution List', distributionList); - try{ - - const distributionList = await this.generateDistributionList(round); - - const decider = await namespaceWrapper.uploadDistributionList( - distributionList, round - ); - console.log("DECIDER", decider); - - if (decider) { - const response = await namespaceWrapper.distributionListSubmissionOnChain(round); - console.log("RESPONSE FROM DISTRIBUTION LIST", response); + return distributionList; + } catch (err) { + console.log('ERROR IN GENERATING DISTRIBUTION LIST', err); } - }catch(err){ - console.log("ERROR IN SUBMIT DISTRIBUTION", err); } -} + async submitDistributionList(round) { + // This upload the generated dustribution List -async validateNode(submission_value, round) { - -// Write your logic for the validation of submission value here and return a boolean value in response + console.log('SubmitDistributionList called'); -// The sample logic can be something like mentioned below to validate the submission + try { + const distributionList = await this.generateDistributionList(round); -// try{ + const decider = await namespaceWrapper.uploadDistributionList( + distributionList, + round, + ); + console.log('DECIDER', decider); -console.log("Received submission_value", submission_value, round); -// const generatedValue = await namespaceWrapper.storeGet("cid"); -// console.log("GENERATED VALUE", generatedValue); -// if(generatedValue == submission_value){ -// return true; -// }else{ -// return false; -// } -// }catch(err){ -// console.log("ERROR IN VALDIATION", err); -// return false; -// } - -// For succesfull flow we return true for now -return true; -} + if (decider) { + const response = + await namespaceWrapper.distributionListSubmissionOnChain(round); + console.log('RESPONSE FROM DISTRIBUTION LIST', response); + } -async shallowEqual(object1, object2) { - const keys1 = Object.keys(object1); - const keys2 = Object.keys(object2); - if (keys1.length !== keys2.length) { - return false; - } - for (let key of keys1) { - if (object1[key] !== object2[key]) { - return false; + } catch (err) { + console.log('ERROR IN SUBMIT DISTRIBUTION', err); } } - return true; -} - -validateDistribution = async(distributionListSubmitter, round) => { -// Write your logic for the validation of submission value here and return a boolean value in response -// this logic can be same as generation of distribution list function and based on the comparision will final object , decision can be made + // this function is called when a node is selected to validate the submission value + async validateNode(submission_value, round) { + console.log('Received submission_value', submission_value, round); -try{ - console.log("Distribution list Submitter", distributionListSubmitter); - const fetchedDistributionList = JSON.parse(await namespaceWrapper.getDistributionList(distributionListSubmitter,round)); - console.log("FETCHED DISTRIBUTION LIST",fetchedDistributionList); - const generateDistributionList = await this.generateDistributionList(round); - - // compare distribution list + // import the linktree validate module + const vote = await linktree_validate(submission_value, round); + console.log('Vote', vote); + return vote; + } - const parsed = JSON.parse(fetchedDistributionList); - const result = await this.shallowEqual(parsed,generateDistributionList); - console.log("RESULT", result); - return result; -}catch(err){ - console.log("ERROR IN VALIDATING DISTRIBUTION", err); - return false; + async shallowEqual(object1, object2) { + const keys1 = Object.keys(object1); + const keys2 = Object.keys(object2); + if (keys1.length !== keys2.length) { + return false; + } + for (let key of keys1) { + if (object1[key] !== object2[key]) { + return false; + } + } + return true; + } -} + validateDistribution = async ( + distributionListSubmitter, + round, + _dummyDistributionList, + _dummyTaskState, + ) => { + + try { + console.log('Distribution list Submitter', distributionListSubmitter); + const rawDistributionList = await namespaceWrapper.getDistributionList( + distributionListSubmitter, + round, + ); + let fetchedDistributionList; + if (rawDistributionList == null) { + fetchedDistributionList = _dummyDistributionList; + } else { + fetchedDistributionList = JSON.parse(rawDistributionList); + } -} -// Submit Address with distributioon list to K2 -async submitTask(roundNumber) { - console.log("submitTask called with round", roundNumber); - try { - console.log("inside try"); - console.log(await namespaceWrapper.getSlot(), "current slot while calling submit"); - const submission = await this.fetchSubmission(); - console.log("SUBMISSION", submission); - await namespaceWrapper.checkSubmissionAndUpdateRound(submission, roundNumber); - console.log("after the submission call"); - } catch (error) { - console.log("error in submission", error); + console.log('FETCHED DISTRIBUTION LIST', fetchedDistributionList); + const generateDistributionList = await this.generateDistributionList( + round, + _dummyTaskState, + ); + + // compare distribution list + + const parsed = fetchedDistributionList; + console.log('compare distribution list', parsed, generateDistributionList); + const result = await this.shallowEqual(parsed, generateDistributionList); + console.log('RESULT', result); + return result; + } catch (err) { + console.log('ERROR IN VALIDATING DISTRIBUTION', err); + return false; + } + }; + // Submit Address with distributioon list to K2 + async submitTask(roundNumber) { + console.log('submitTask called with round', roundNumber); + try { + console.log('inside try'); + console.log( + await namespaceWrapper.getSlot(), + 'current slot while calling submit', + ); + const submission = await this.fetchSubmission(); + console.log('SUBMISSION', submission); + // submit the submission to the K2 + await namespaceWrapper.checkSubmissionAndUpdateRound( + submission, + roundNumber, + ); + console.log('after the submission call'); + } catch (error) { + console.log('error in submission', error); + } } -} -async auditTask(roundNumber) { - console.log("auditTask called with round", roundNumber); - console.log(await namespaceWrapper.getSlot(), "current slot while calling auditTask"); - await namespaceWrapper.validateAndVoteOnNodes(this.validateNode, roundNumber); -} + async auditTask(roundNumber) { -async auditDistribution(roundNumber) { - console.log("auditDistribution called with round", roundNumber); - await namespaceWrapper.validateAndVoteOnDistributionList(this.validateDistribution, roundNumber); -} + console.log('auditTask called with round', roundNumber); + console.log( + await namespaceWrapper.getSlot(), + 'current slot while calling auditTask', + ); + await namespaceWrapper.validateAndVoteOnNodes( + this.validateNode, + roundNumber, + ); + } + async auditDistribution(roundNumber) { + console.log('auditDistribution called with round', roundNumber); + await namespaceWrapper.validateAndVoteOnDistributionList( + this.validateDistribution, + roundNumber, + ); + } } const coreLogic = new CoreLogic(); -module.exports = { - coreLogic -}; \ No newline at end of file +module.exports = coreLogic; diff --git a/dbSharing.js b/dbSharing.js new file mode 100644 index 00000000..f9ed4656 --- /dev/null +++ b/dbSharing.js @@ -0,0 +1,94 @@ +const { app, MAIN_ACCOUNT_PUBKEY, SERVICE_URL, TASK_ID } = require("./init"); +const {default: axios} = require('axios'); +const db = require('./db_model'); +const nacl = require('tweetnacl'); +const bs58 = require('bs58'); + + +const share = async () => { + try { + // find another node + const nodesUrl = `${SERVICE_URL}/nodes/${TASK_ID}`; + + // check if the node is online + const res = await axios.get(nodesUrl); + if (res.status != 200) { + console.error('Error', res.status); + return; + } + + if (!res.data) { + console.error('res has no valid urls'); + return; + } + + let nodeUrlList = res.data.map((e) => { + return e.data.url; + }); + + console.log(nodeUrlList); + + // fetch local linktrees + let allLinktrees = await db.getAllLinktrees(); // TODO + allLinktrees = allLinktrees || '[]'; + + // for each node, get all linktrees? + // TODO - get only one linktree per node, and compare them + // it will be cleaner to focus on one pubkey, and compare with many nodes (maybe 3 nodes) + for (let url of nodeUrlList) { + console.log(url); + const res = await axios.get(`${url}/task/${TASK_ID}/linktree/all`); + if (res.status != 200) { + console.error('ERROR', res.status); + continue; + } + const payload = res.data; + + + // TODO - there are several things to compare + /* + 1. the list of all linktrees held by each node (the list of public keys) + 2. the linktree data for each public key + 3. the timestamp of each linktree item on each node (we should only download newer data) + */ + + /* + 1. Verify the signature + 2. Only update your db if incoming timestamp > your timestamp or you don't have the data + */ + // TODO - fix by adding linktree comparisons for each publickey - the list shared between nodes should be the public keys of all known linktrees + + // TODO2 - whenever a linktree is not found on this node, it should be added to the db + + // TODO3 - whenever a linktree is found on this node, it should be compared to the one in the db and updated if the timestamp is newer + + if (!payload || payload.length == 0) continue; + for (let i = 0; i < payload.length; i++) { + const value = payload[i].value; + const isVerified = nacl.sign.detached.verify( + new TextEncoder().encode(JSON.stringify(value.data)), + bs58.decode(value.signature), + bs58.decode(value.publicKey) + ); + if (!isVerified) { + console.warn(`${url} is not able to verify the signature`); + continue; + } + let localExistingLinktree = allLinktrees.find((e) => { + e.uuid == linkTreePayload.data.uuid; + }); + if (localExistingLinktree) { + if (localExistingLinktree.data.timestamp < linkTreePayload.data.timestamp) { + allLinktrees.push(linkTreePayload); + } + } else { + allLinktrees.push(linkTreePayload); + } + } + } + } catch (error) { + console.error('Some went wrong:', error); + } + } + +module.exports = { share } \ No newline at end of file diff --git a/db_model.js b/db_model.js new file mode 100644 index 00000000..081c8e15 --- /dev/null +++ b/db_model.js @@ -0,0 +1,230 @@ +const levelup = require('levelup'); +const leveldown = require('leveldown'); +const { namespaceWrapper } = require('./namespaceWrapper'); +const fs = require('fs'); + +// db functions for linktree +const getLinktree = async (publicKey) => { + return new Promise((resolve, reject) => { + namespaceWrapper.levelDB.get(getLinktreeId(publicKey), (err, value) => { + if (err) { + console.error("Error in getLinktree:", err); + resolve(null); + } else { + resolve(JSON.parse(value || "[]")); + } + }); + }); +} + +const setLinktree = async (publicKey, linktree) => { + namespaceWrapper.levelDB.put(getLinktreeId(publicKey), JSON.stringify(linktree)); + return console.log("Linktree set"); +} + +const getAllLinktrees = async (values) => { + return new Promise((resolve, reject) => { + let dataStore = []; + + if (!values) values = true; + namespaceWrapper.levelDB.createReadStream({ + lt: 'linktree~', + gt: `linktree`, + reverse: true, + keys: true, + values: values + }) + .on('data', function (data) { + console.log( data.key.toString(), '=', data.value.toString()) + dataStore.push({ key: data.key.toString(), value: JSON.parse(data.value.toString()) }); + }) + .on('error', function (err) { + console.log('Something went wrong in read linktreesStream!', err); + reject(err); + }) + .on('close', function () { + console.log('Stream closed') + }) + .on('end', function () { + console.log('Stream ended') + resolve(dataStore); + }) + }); +} + +// namespaceWrapper.levelDB functions for proofs +const getProofs = async (pubkey) => { + return new Promise((resolve, reject) => { + namespaceWrapper.levelDB.get(getProofsId(pubkey), (err, value) => { + if (err) { + console.error("Error in getProofs:", err); + resolve(null); + } else { + resolve(JSON.parse(value || "[]")); + } + }); + }); +} + +const setProofs = async (pubkey, proofs) => { + namespaceWrapper.levelDB.put(getProofsId(pubkey), JSON.stringify(proofs)); + return console.log("Proofs set"); +} + +const getAllProofs = async () => { + return new Promise((resolve, reject) => { + let dataStore = []; + namespaceWrapper.levelDB.createReadStream({ + gte: 'proofs', + reverse: true, + keys: true, + values: true + }) + .on('data', function (data) { + console.log( data.key.toString(), '=', data.value.toString()) + dataStore.push( JSON.parse(data.value.toString())); + }) + .on('error', function (err) { + console.log('Something went wrong in read proofsStream!', err); + reject(err); + }) + .on('close', function () { + console.log('Stream closed') + }) + .on('end', function () { + console.log('Stream ended') + resolve(dataStore); + }) + }); +} + +// db functions for node proofs +const getNodeProofCid = async (round) => { + return new Promise((resolve, reject) => { + namespaceWrapper.levelDB.get(getNodeProofCidid(round), (err, value) => { + if (err) { + console.error("Error in getNodeProofCid:", err); + resolve(null); + } else { + resolve(value.toString() || "[]"); + } + }); + }); +} + +const setNodeProofCid = async (round, cid) => { + namespaceWrapper.levelDB.put(getNodeProofCidid(round), cid); + return console.log("Node CID set"); +} + +const getAllNodeProofCids = async () => { + return new Promise((resolve, reject) => { + let dataStore = []; + const nodeProofsStream = namespaceWrapper.levelDB.createReadStream({ + gt: 'node_proofs:', + lt: 'node_proofs~', + reverse: true, + keys: true, + values: true + }) + nodeProofsStream + .on('data', function (data) { + console.log( data.key.toString(), '=', data.value.toString()) + dataStore.push({ key: data.key.toString(), value: data.value.toString() }); + }) + .on('error', function (err) { + console.log('Something went wrong in read nodeProofsStream!', err); + reject(err); + }) + .on('close', function () { + console.log('Stream closed') + }) + .on('end', function () { + console.log('Stream ended') + resolve(dataStore); + }) + }); +} + +//db functions fro Auth list +const getAuthList = async (pubkey) => { + return new Promise((resolve, reject) => { + namespaceWrapper.levelDB.get(getAuthListId(pubkey), (err, value) => { + if (err) { + console.error("Error in getAuthList:", err); + resolve(null); + } else { + resolve(JSON.parse(value || "[]")); + } + }); + }); +} + +const setAuthList = async (pubkey) => { + namespaceWrapper.levelDB.put(getAuthListId(pubkey), JSON.stringify(pubkey)); + return console.log("Auth List set"); +} + +const getAllAuthLists = async (values) => { + if (!values) values = true; + return new Promise((resolve, reject) => { + let dataStore = []; + const authListStream = namespaceWrapper.levelDB.createReadStream({ + gt: 'auth_list:', + lt: 'auth_list~', + reverse: true, + keys: true, + values: values + }) + authListStream + .on('data', function (data) { + console.log( data.key.toString(), '=', data.value.toString()) + dataStore.push( JSON.parse(data.value.toString()) ); + }) + .on('error', function (err) { + console.log('Something went wrong in read authListStream!', err); + reject(err); + }) + .on('close', function () { + console.log('Stream closed') + }) + .on('end', function () { + console.log('Stream ended') + resolve(dataStore); + }) + }); +} + + + +const getNodeProofCidid = (round) => { + return `node_proofs:${round}`; +} + +const getLinktreeId = (publicKey) => { + return `linktree:${publicKey}`; +} + +const getProofsId = (pubkey) => { + return `proofs:${pubkey}`; +} + +const getAuthListId = (round) => { + return `auth_list:${round}`; +} + +module.exports = { + getLinktree, + setLinktree, + getAllLinktrees, + getProofs, + setProofs, + getAllProofs, + getNodeProofCid, + setNodeProofCid, + getAllNodeProofCids, + getAuthList, + setAuthList, + getAllAuthLists, + getAuthListId +} \ No newline at end of file diff --git a/docker-compose.yaml b/docker-compose.yaml index d28c15ba..33f14c5d 100644 --- a/docker-compose.yaml +++ b/docker-compose.yaml @@ -19,6 +19,8 @@ services: - redis ports: - '8080:8080' + - '10000:10000' + env_file: .env-local container_name: task_node diff --git a/helpers/createFile.js b/helpers/createFile.js new file mode 100644 index 00000000..c585cf5c --- /dev/null +++ b/helpers/createFile.js @@ -0,0 +1,11 @@ +const fsPromise = require("fs/promises"); + +module.exports = async (path, data) => { + //if (!fs.existsSync('userIndex')) fs.mkdirSync('userIndex'); + + await fsPromise.writeFile(path, JSON.stringify(data), (err) => { + if (err) { + console.error(err); + } + }); +}; diff --git a/helpers/dataFromCid.js b/helpers/dataFromCid.js new file mode 100644 index 00000000..278e0204 --- /dev/null +++ b/helpers/dataFromCid.js @@ -0,0 +1,32 @@ +const axios = require("axios"); +const { Web3Storage, getFilesFromPath } = require("web3.storage"); +const storageClient = new Web3Storage({ + token: "eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJzdWIiOiJkaWQ6ZXRocjoweGY0ODYxMzAzOTdDNTY1QzlDYTRCOTUzZTA2RWQ4NUI4MGRBQzRkYTIiLCJpc3MiOiJ3ZWIzLXN0b3JhZ2UiLCJpYXQiOjE2NjYzNjU1OTk5MDMsIm5hbWUiOiJTb21hIn0.TU-KUFS9vjI9blN5dx6VsLLuIjJnpjPrxDHBvjXQUxw", +}); + +module.exports = async (cid) => { + console.log("CID", cid); + if (storageClient) { + const res = await storageClient.get(cid); + if (!res.ok) { + // voting false + console.log("VOTE FALSE"); + + console.log("SLASH VOTE DUE TO FAKE VALUE"); + //console.log("VOTE", vote); + return false; + } else { + const file = await res.files(); + //console.log("FILE", file); + //console.log("CID", file[0].cid); + const url = `https://${file[0].cid}.ipfs.w3s.link/?filename=${file[0].name}`; + console.log("URL", url); + try { + const output = await axios.get(url); + return output; + } catch (error) { + console.log("ERROR", error); + } + } + } +}; diff --git a/helpers/deleteFile.js b/helpers/deleteFile.js new file mode 100644 index 00000000..1f17d2ef --- /dev/null +++ b/helpers/deleteFile.js @@ -0,0 +1,11 @@ +const fsPromise = require("fs/promises"); + +module.exports = async (path) => { + //if (!fs.existsSync('userIndex')) fs.mkdirSync('userIndex'); + + await fsPromise.unlink(path, (err) => { + if (err) { + console.error(err); + } + }); +}; diff --git a/helpers/getKey.js b/helpers/getKey.js new file mode 100644 index 00000000..66294729 --- /dev/null +++ b/helpers/getKey.js @@ -0,0 +1,3 @@ +module.exports = async (obj, value) => { + return Object.keys(obj).find((key) => obj[key] === value); +}; diff --git a/helpers/hashCompare.js b/helpers/hashCompare.js new file mode 100644 index 00000000..62f21022 --- /dev/null +++ b/helpers/hashCompare.js @@ -0,0 +1,26 @@ +const crypto = require("crypto"); +const { namespaceWrapper } = require("../namespaceWrapper"); + +module.exports = async (index, signature, publicKey) => { + const hash = await namespaceWrapper.verifySignature(signature, publicKey); + if (hash.error) { + console.error("Could not verify the signatures"); + } + + console.log("DATA HASH", hash.data); + + // comparing the data Hash + const expectedHash = crypto + .createHash("sha256") + .update(JSON.stringify(index)) + .digest("hex"); + + const expectedString = JSON.stringify(expectedHash); + console.log("EXPECTED HASH", expectedString); + + if (hash.data == expectedString) { + return true; + } else { + return false; + } +}; diff --git a/index.js b/index.js index ac613d92..fc79d414 100644 --- a/index.js +++ b/index.js @@ -1,9 +1,32 @@ -const {coreLogic} = require("./coreLogic"); -const { app } = require("./init"); -const { namespaceWrapper } = require("./namespaceWrapper"); - +const coreLogic = require("./coreLogic"); +const dbSharing = require("./dbSharing"); +// const localShim = require("./localTestingShim"); // TEST to enable testing with K2 without round timers, enable this line and line 59 +const { app, MAIN_ACCOUNT_PUBKEY, SERVICE_URL, TASK_ID } = require("./init"); +const express = require('express'); +const { namespaceWrapper, taskNodeAdministered } = require("./namespaceWrapper"); +const {default: axios} = require('axios'); +const bs58 = require('bs58'); +const solanaWeb3 = require('@solana/web3.js'); +const nacl = require('tweetnacl'); +const fs = require('fs'); +const db = require('./db_model'); +const routes = require('./routes'); async function setup() { + const originalConsoleLog = console.log; + + // Create a writable stream to the log file + const logStream = fs.createWriteStream('./namespace/logs.txt', { flags: 'a' }); + + // Overwrite the console.log function to write to the log file + console.log = function (...args) { + originalConsoleLog.apply(console, args); + const message = args.map(arg => (typeof arg === 'object' ? JSON.stringify(arg) : arg)).join(' ') + '\n'; + + // Write the message to the log file + logStream.write(message); + }; + console.log("setup function called"); // Run default setup await namespaceWrapper.defaultTaskSetup(); @@ -30,75 +53,25 @@ async function setup() { } }); + // Code for the data replication among the nodes + setInterval(() => { + dbSharing.share(); + }, 20000); - /* GUIDE TO CALLS K2 FUNCTIONS MANUALLY - - If you wish to do the development by avoiding the timers then you can do the intended calls to K2 - directly using these function calls. - - To disable timers please set the TIMERS flag in task-node ENV to disable - - NOTE : K2 will still have the windows to accept the submission value, audit, so you are expected - to make calls in the intended slots of your round time. - - */ - - // Get the task state - //console.log(await namespaceWrapper.getTaskState()); - - //GET ROUND - - // const round = await namespaceWrapper.getRound(); - // console.log("ROUND", round); - - - // Call to do the work for the task - - //await coreLogic.task(); - - // Submission to K2 (Preferablly you should submit the cid received from IPFS) - - - //await coreLogic.submitTask(round - 1); - - // Audit submissions - - //await coreLogic.auditTask(round - 1); - - // upload distribution list to K2 - - //await coreLogic.submitDistributionList(round - 2) - - // Audit distribution list - - //await coreLogic.auditDistribution(round - 2); - - // Payout trigger - - // const responsePayout = await namespaceWrapper.payoutTrigger(); - // console.log("RESPONSE TRIGGER", responsePayout); - - - - + // localShim(); // TEST enable this to run the localShim for testing with K2 without timers +} +if (taskNodeAdministered){ + setup(); } -setup(); if (app) { - // Write your Express Endpoints here. - // For Example - // app.post('/accept-cid', async (req, res) => {}) + app.use(express.json()); + app.use('/', routes) - // Sample API that return your task state - app.get('/taskState', async (req, res) => { - const state = await namespaceWrapper.getTaskState(); - console.log("TASK STATE", state); - res.status(200).json({ taskState: state }) - }) } diff --git a/init.js b/init.js index 054648db..681f7875 100644 --- a/init.js +++ b/init.js @@ -1,7 +1,7 @@ const express = require('express'); -const TASK_NAME = process.argv[2]; +const TASK_NAME = process.argv[2] || "Local"; const TASK_ID = process.argv[3]; -const EXPRESS_PORT = process.argv[4]; +const EXPRESS_PORT = process.argv[4] || 10000; const NODE_MODE = process.argv[5]; const MAIN_ACCOUNT_PUBKEY = process.argv[6]; const SECRET_KEY = process.argv[7]; @@ -11,9 +11,9 @@ const STAKE = Number(process.argv[10]); const app = express(); -console.log('SETTING UP EXPRESS', NODE_MODE); +console.log('SETTING UP EXPRESS'); app.get('/', (req, res) => { - res.send('Hello World!'); + res.send('Linktree task running'); }); app.listen(EXPRESS_PORT, () => { diff --git a/linktree_task.js b/linktree_task.js new file mode 100644 index 00000000..36ececb8 --- /dev/null +++ b/linktree_task.js @@ -0,0 +1,67 @@ +const { namespaceWrapper } = require('./namespaceWrapper'); +const createFile = require('./helpers/createFile.js'); +const deleteFile = require('./helpers/deleteFile'); +const fs = require('fs'); +const { Web3Storage, getFilesFromPath } = require('web3.storage'); +const storageClient = new Web3Storage({ + token: process.env.SECRET_WEB3_STORAGE_KEY || "eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJzdWIiOiJkaWQ6ZXRocjoweGY0ODYxMzAzOTdDNTY1QzlDYTRCOTUzZTA2RWQ4NUI4MGRBQzRkYTIiLCJpc3MiOiJ3ZWIzLXN0b3JhZ2UiLCJpYXQiOjE2NjYzNjU1OTk5MDMsIm5hbWUiOiJTb21hIn0.TU-KUFS9vjI9blN5dx6VsLLuIjJnpjPrxDHBvjXQUxw", +}); // TODO remove the default web3.storage key for production +const bs58 = require('bs58'); +const nacl = require('tweetnacl'); +const db = require('./db_model'); +const { Keypair } = require('@solana/web3.js'); // TEST For local testing + +const main = async () => { + console.log('******/ IN Linktree Task FUNCTION /******'); + + // Load node's keypair from the JSON file + const keypair = await namespaceWrapper.getSubmitterAccount(); + + // TEST For local testing, hardcode the keypair + // const keypair = Keypair.generate(); + + // Get linktree list fron localdb + const proofs_list_object = await db.getAllProofs(); + + // Use the node's keypair to sign the linktree list + const messageUint8Array = new Uint8Array( + Buffer.from(JSON.stringify(proofs_list_object)), + ); + + const signedMessage = nacl.sign(messageUint8Array, keypair.secretKey); + const signature = signedMessage.slice(0, nacl.sign.signatureLength); + + const submission_value = { + proofs: proofs_list_object, + node_publicKey: keypair.publicKey, + node_signature: bs58.encode(signature), + }; + + // upload the proofs of the linktree on web3.storage + const path = `./Linktree/proofs.json`; + + if (!fs.existsSync('./Linktree')) fs.mkdirSync('./Linktree'); + + console.log('PATH', path); + + await createFile(path, submission_value); + + if (storageClient) { + + const file = await getFilesFromPath(path); + const proof_cid = await storageClient.put(file); + console.log('User Linktrees proof uploaded to IPFS: ', proof_cid); + + // deleting the file from fs once it is uploaded to IPFS + await deleteFile(path); + + return proof_cid; + + } else { + + console.log('NODE DO NOT HAVE ACCESS TO WEB3.STORAGE'); + + } +}; + +module.exports = main; \ No newline at end of file diff --git a/linktree_validate.js b/linktree_validate.js new file mode 100644 index 00000000..9ad740df --- /dev/null +++ b/linktree_validate.js @@ -0,0 +1,130 @@ +const dataFromCid = require("./helpers/dataFromCid"); +const db = require('./db_model'); +const nacl = require('tweetnacl'); +const bs58 = require('bs58'); +const {default: axios} = require('axios'); +const { namespaceWrapper } = require("./namespaceWrapper"); + +module.exports = async (submission_value, round) => { + console.log('******/ Linktree CID VALIDATION Task FUNCTION /******'); + const outputraw = await dataFromCid(submission_value); + const output = outputraw.data; + console.log('OUTPUT', output); + console.log('RESPONSE DATA length', output.proofs[0].LENGTH); + console.log('PUBLIC KEY', output.node_publicKey); + console.log('SIGNATURE', output.node_signature); + + // TODO - can we safely remove this, from a game theory perspective? + // Check that the node who submitted the proofs is a valid staked node + let isNode = await verifyNode(output.proofs, output.node_signature, output.node_publicKey); + console.log("Is the node's signature on the CID payload correct?", isNode); + + // check each item in the linktrees list and verify that the node is holding that payload, and the signature matches + let isLinktree = await verifyLinktrees(output.proofs); + console.log('IS LINKTREE True?', isLinktree); + + if (isNode && isLinktree) return true; // if both are true, return true + else return false; // if one of them is false, return false +} + +// verify the linktree signature by querying the other node to get it's copy of the linktree +async function verifyLinktrees(proofs_list_object) { + let allSignaturesValid = true; + let AuthUserList = await db.getAllAuthLists(); + console.log('Authenticated Users List:', AuthUserList); + + for (const proofs of proofs_list_object) { + + let publicKey = proofs.value[0].publicKey + + // call other nodes to get the node list + const nodeUrlList = await namespaceWrapper.getNodes(); + + // TEST hardcode the node list + // const nodeUrlList = [ + // "http://localhost:10000", + // ] + + // verify the signature of the linktree for each nodes + for (const nodeUrl of nodeUrlList) { + console.log("cheking linktree on ", nodeUrl) + + // get all linktree in this node + const res = await axios.get(`${url}/task/${TASK_ID}/linktree/get/${publicKey}`); + + // TEST hardcode the node endpoint + // const res = await axios.get(`${nodeUrl}/linktree/get/${publicKey}`); + + // check node's status + if (res.status != 200) { + console.error('ERROR', res.status); + continue; + } + + // get the payload + const linktree = res.data; + + // check if the user's pubkey is on the authlist + if (AuthUserList.hasOwnProperty(linktree.publicKey) ) { + + // TODO write logic to quersy other node and verify registration events + /* + 1. REST API that returns a user's registration proof and accepts :pubkey + 2. Add logic here to verify 'proofs' from (1) and then add the user to the AuthUserList + */ + + } else { + + // Verify the signature + const messageUint8Array = new Uint8Array( + Buffer.from(JSON.stringify(linktree.data)), + ); + const signature = linktree.signature; + const publicKey = linktree.publicKey; + const signatureUint8Array = bs58.decode(signature); + const publicKeyUint8Array = bs58.decode(publicKey); + const isSignatureValid = await verifySignature( + messageUint8Array, + signatureUint8Array, + publicKeyUint8Array, + ); + console.log(`IS SIGNATURE ${publicKey} VALID?`, isSignatureValid); + + if (isSignatureValid) { + await db.setAuthList(publicKey) // TODO refactor for direct database usage and read / writes of individual authorized users by pubkey (otherwise full rewrite could risk overwriting another write if running in parallel) + } else { + allSignaturesValid = false; + } + + } + } + } + return allSignaturesValid; +} + +// verifies that a node's signature is valid, and rejects situations where CIDs from IPFS return no data or are not JSON +async function verifyNode(proofs_list_object, signature, publicKey) { + const messageUint8Array = new Uint8Array( + Buffer.from(JSON.stringify(proofs_list_object)), + ); + const signatureUint8Array = bs58.decode(signature); + const publicKeyUint8Array = bs58.decode(publicKey); + + if (!proofs_list_object || !signature || !publicKey) { + console.error('No data received from web3.storage'); + return false; + } + + // verify the node signature + const isSignatureValid = await verifySignature( + messageUint8Array, + signatureUint8Array, + publicKeyUint8Array, + ); + + return isSignatureValid; +}; + +async function verifySignature(message, signature, publicKey) { + return nacl.sign.detached.verify(message, signature, publicKey); +} diff --git a/localTestingShim.js b/localTestingShim.js new file mode 100644 index 00000000..e26e627b --- /dev/null +++ b/localTestingShim.js @@ -0,0 +1,57 @@ +const coreLogic = require("./coreLogic"); +const namespaceWrapper = require("./namespaceWrapper"); + +// TEST Set round +// let round = 1000 +const round = await namespaceWrapper.getRound(); +const localShim = async () => { + + /* GUIDE TO CALLS K2 FUNCTIONS MANUALLY + + If you wish to do the development by avoiding the timers then you can do the intended calls to K2 + directly using these function calls. + + To disable timers please set the TIMERS flag in task-node ENV to disable + + NOTE : K2 will still have the windows to accept the submission value, audit, so you are expected + to make calls in the intended slots of your round time. + + */ + + // console.log("*******************TESTING*******************") + // Get the task state + // console.log(await namespaceWrapper.getTaskState()); + + // Get account public key + // console.log(MAIN_ACCOUNT_PUBKEY); + + // GET ROUND + // const round = await namespaceWrapper.getRound(); + // console.log("ROUND", round); + + + // Call to do the work for the task + // await coreLogic.task(); + + // Submission to K2 (Preferablly you should submit the cid received from IPFS) + // await coreLogic.submitTask(round - 1); + + // Audit submissions + // await coreLogic.auditTask(round - 1); + + // upload distribution list to K2 + + //await coreLogic.submitDistributionList(round - 2) + + // Audit distribution list + + //await coreLogic.auditDistribution(round - 2); + + // Payout trigger + + // const responsePayout = await namespaceWrapper.payoutTrigger(); + // console.log("RESPONSE TRIGGER", responsePayout); + +} + +module.exports = localShim; \ No newline at end of file diff --git a/namespaceWrapper-backup.js b/namespaceWrapper-backup.js new file mode 100644 index 00000000..2b2b38af --- /dev/null +++ b/namespaceWrapper-backup.js @@ -0,0 +1,436 @@ +const { default: axios } = require("axios"); +const levelup = require('levelup'); +const leveldown = require('leveldown'); +const BASE_ROOT_URL = "http://localhost:8080/namespace-wrapper"; +const { TASK_ID, MAIN_ACCOUNT_PUBKEY, SECRET_KEY } = require("./init"); +const { Connection, PublicKey, Keypair } = require("@_koi/web3.js"); + +const taskNodeAdministered = !!TASK_ID; +let localLevelDB; +class NamespaceWrapper { + /** + * Namespace wrapper of storeGetAsync + * @param {string} key // Path to get + */ + async storeGet(key) { + if (taskNodeAdministered){ + return await genericHandler("storeGet", key); + } + instantiateLevelDb(); + return new Promise((resolve, reject) => { + localLevelDB.get(key, { asBuffer: false }, (err, value) => { + if (err) { + resolve(null); + } else { + resolve(value); + } + }); + }); + } + /** + * Namespace wrapper over storeSetAsync + * @param {string} key Path to set + * @param {*} value Data to set + */ + async storeSet(key, value) { + if (taskNodeAdministered){ + return await genericHandler("storeSet", key, value); + } + instantiateLevelDb(); + return new Promise((resolve, reject) => { + localLevelDB.put(key, value, {}, (err) => { + if (err) { + reject(err); + } else { + resolve(); + } + }); + }); + + } + /** + * Namespace wrapper over fsPromises methods + * @param {*} method The fsPromise method to call + * @param {*} path Path for the express call + * @param {...any} args Remaining parameters for the FS call + */ + async fs(method, path, ...args) { + return await genericHandler("fs", method, path, ...args); + } + async fsStaking(method, path, ...args) { + return await genericHandler("fsStaking", method, path, ...args); + } + async fsWriteStream(imagepath) { + return await genericHandler("fsWriteStream", imagepath); + } + async fsReadStream(imagepath) { + return await genericHandler("fsReadStream", imagepath); + } + + async getSlot() { + return await genericHandler("getCurrentSlot"); + } + + async submissionOnChain(submitterKeypair, submission) { + return await genericHandler( + "submissionOnChain", + submitterKeypair, + submission + ); + } + + async stakeOnChain( + taskStateInfoPublicKey, + stakingAccKeypair, + stakePotAccount, + stakeAmount + ) { + return await genericHandler( + "stakeOnChain", + taskStateInfoPublicKey, + stakingAccKeypair, + stakePotAccount, + stakeAmount + ); + } + async claimReward(stakePotAccount, beneficiaryAccount, claimerKeypair) { + return await genericHandler( + "claimReward", + stakePotAccount, + beneficiaryAccount, + claimerKeypair + ); + } + async sendTransaction(serviceNodeAccount, beneficiaryAccount, amount) { + return await genericHandler( + "sendTransaction", + serviceNodeAccount, + beneficiaryAccount, + amount + ); + } + + async getSubmitterAccount() { + const submitterAccountResp = await genericHandler("getSubmitterAccount"); + return Keypair.fromSecretKey( + Uint8Array.from(Object.values(submitterAccountResp._keypair.secretKey)) + ); + } + + /** + * sendAndConfirmTransaction wrapper that injects mainSystemWallet as the first signer for paying the tx fees + * @param {connection} method // Receive method ["get", "post", "put", "delete"] + * @param {transaction} path // Endpoint path appended to namespace + * @param {Function} callback // Callback function on traffic receive + */ + async sendAndConfirmTransactionWrapper(transaction, signers) { + const blockhash = (await connection.getRecentBlockhash("finalized")) + .blockhash; + transaction.recentBlockhash = blockhash; + transaction.feePayer = new PublicKey(MAIN_ACCOUNT_PUBKEY); + return await genericHandler( + "sendAndConfirmTransactionWrapper", + transaction.serialize({ + requireAllSignatures: false, + verifySignatures: false, + }), + signers + ); + } + + async signArweave(transaction) { + let tx = await genericHandler('signArweave',transaction.toJSON()); + return arweave.transactions.fromRaw(tx); + } + async signEth(transaction) { + return await genericHandler('signEth',transaction); + + } + async getTaskState() { + const response = await genericHandler("getTaskState"); + if(response.error){ + return null + } + return response + + } + + async auditSubmission(candidatePubkey, isValid, voterKeypair, round) { + return await genericHandler( + "auditSubmission", + candidatePubkey, + isValid, + voterKeypair, + round + ); + } + + async distributionListAuditSubmission( + candidatePubkey, + isValid, + voterKeypair, + round + ) { + return await genericHandler( + "distributionListAuditSubmission", + candidatePubkey, + isValid, + round + ); + } + + async getRound() { + return await genericHandler("getRound"); + } + + async nodeSelectionDistributionList() { + return await genericHandler("nodeSelectionDistributionList"); + } + + async payoutTrigger() { + return await genericHandler("payloadTrigger"); + } + + async uploadDistributionList(distributionList, round) { + return await genericHandler("uploadDistributionList", distributionList, round); + } + + async distributionListSubmissionOnChain(round) { + return await genericHandler("distributionListSubmissionOnChain", round); + } + + async payloadTrigger() { + return await genericHandler("payloadTrigger"); + } + + async verifySignature(signedMessage, pubKey) { + return await genericHandler("verifySignedData", signedMessage, pubKey); + } + + async payloadSigning(body) { + return await genericHandler("signData", body); + } + + async checkSubmissionAndUpdateRound(submissionValue = "default", round) { + return await genericHandler( + "checkSubmissionAndUpdateRound", + submissionValue, + round + ); + } + async getProgramAccounts() { + return await genericHandler("getProgramAccounts"); + } + async defaultTaskSetup() { + return await genericHandler("defaultTaskSetup"); + } + async getRpcUrl() { + return await genericHandler("getRpcUrl"); + } + async getNodes(url) { + return await genericHandler("getNodes", url); + } + + // Wrapper for selection of node to prepare a distribution list + + async nodeSelectionDistributionList(round) { + return await genericHandler("nodeSelectionDistributionList", round); + } + + async getDistributionList(publicKey,round) { + const response = await genericHandler('getDistributionList', publicKey, round); + if (response.error) { + return null; + } + return response + + } + + async validateAndVoteOnNodes(validate, round) { + // await this.checkVoteStatus(); + console.log("******/ IN VOTING /******"); + const taskAccountDataJSON = await this.getTaskState(); + + console.log( + "Fetching the submissions of N - 1 round", + taskAccountDataJSON.submissions[round] + ); + const submissions = taskAccountDataJSON.submissions[round]; + if (submissions == null) { + console.log("No submisssions found in N-1 round"); + return "No submisssions found in N-1 round"; + } else { + const keys = Object.keys(submissions); + const values = Object.values(submissions); + const size = values.length; + console.log("Submissions from last round: ", keys, values, size); + let isValid + const submitterAccountKeyPair = await this.getSubmitterAccount(); + const submitterPubkey = submitterAccountKeyPair.publicKey.toBase58(); + for (let i = 0; i < size; i++) { + let candidatePublicKey = keys[i]; + console.log("FOR CANDIDATE KEY", candidatePublicKey); + let candidateKeyPairPublicKey = new PublicKey(keys[i]); + if (candidatePublicKey == submitterPubkey) { + console.log("YOU CANNOT VOTE ON YOUR OWN SUBMISSIONS"); + } else { + try { + console.log( + "SUBMISSION VALUE TO CHECK", + values[i].submission_value + ); + isValid = await validate(values[i].submission_value, round); + console.log(`Voting ${isValid} to ${candidatePublicKey}`); + + if (isValid) { + // check for the submissions_audit_trigger , if it exists then vote true on that otherwise do nothing + const submissions_audit_trigger = + taskAccountDataJSON.submissions_audit_trigger[round]; + console.log("SUBMIT AUDIT TRIGGER", submissions_audit_trigger); + // console.log( + // "CANDIDATE PUBKEY CHECK IN AUDIT TRIGGER", + // submissions_audit_trigger[candidatePublicKey] + // ); + if (submissions_audit_trigger && submissions_audit_trigger[candidatePublicKey]) { + console.log("VOTING TRUE ON AUDIT"); + const response = await this.auditSubmission( + candidateKeyPairPublicKey, + isValid, + submitterAccountKeyPair, + round + ); + console.log("RESPONSE FROM AUDIT FUNCTION", response); + } + } else if (isValid == false) { + // Call auditSubmission function and isValid is passed as false + console.log("RAISING AUDIT / VOTING FALSE"); + const response = await this.auditSubmission( + candidateKeyPairPublicKey, + isValid, + submitterAccountKeyPair, + round + ); + console.log("RESPONSE FROM AUDIT FUNCTION", response); + } + } catch (err) { + console.log("ERROR IN ELSE CONDITION", err); + } + } + } + } + } + + + async validateAndVoteOnDistributionList(validateDistribution, round) { + // await this.checkVoteStatus(); + console.log("******/ IN VOTING OF DISTRIBUTION LIST /******"); + const taskAccountDataJSON = await this.getTaskState(); + console.log( + "Fetching the Distribution submissions of N - 2 round", + taskAccountDataJSON.distribution_rewards_submission[round] + ); + const submissions = + taskAccountDataJSON.distribution_rewards_submission[round]; + if (submissions == null) { + console.log("No submisssions found in N-2 round"); + return "No submisssions found in N-2 round"; + } else { + const keys = Object.keys(submissions); + const values = Object.values(submissions); + const size = values.length; + console.log("Distribution Submissions from last round: ", keys, values, size); + let isValid + const submitterAccountKeyPair = await this.getSubmitterAccount(); + const submitterPubkey = submitterAccountKeyPair.publicKey.toBase58(); + + for (let i = 0; i < size; i++) { + let candidatePublicKey = keys[i]; + console.log("FOR CANDIDATE KEY", candidatePublicKey); + let candidateKeyPairPublicKey = new PublicKey(keys[i]); + if (candidatePublicKey == submitterPubkey) { + console.log("YOU CANNOT VOTE ON YOUR OWN DISTRIBUTION SUBMISSIONS"); + } else { + try { + console.log( + "DISTRIBUTION SUBMISSION VALUE TO CHECK", + values[i].submission_value + ); + isValid = await validateDistribution(values[i].submission_value, round); + console.log(`Voting ${isValid} to ${candidatePublicKey}`); + + if (isValid) { + // check for the submissions_audit_trigger , if it exists then vote true on that otherwise do nothing + const distributions_audit_trigger = + taskAccountDataJSON.distributions_audit_trigger[round]; + console.log("SUBMIT DISTRIBUTION AUDIT TRIGGER", distributions_audit_trigger); + // console.log( + // "CANDIDATE PUBKEY CHECK IN AUDIT TRIGGER", + // distributions_audit_trigger[candidatePublicKey] + // ); + if (distributions_audit_trigger && distributions_audit_trigger[candidatePublicKey]) { + console.log("VOTING TRUE ON DISTRIBUTION AUDIT"); + const response = await this.distributionListAuditSubmission( + candidateKeyPairPublicKey, + isValid, + submitterAccountKeyPair, + round + ); + console.log("RESPONSE FROM DISTRIBUTION AUDIT FUNCTION", response); + } + } else if (isValid == false) { + // Call auditSubmission function and isValid is passed as false + console.log("RAISING AUDIT / VOTING FALSE ON DISTRIBUTION"); + const response = await this.distributionListAuditSubmission( + candidateKeyPairPublicKey, + isValid, + submitterAccountKeyPair, + round + ); + console.log("RESPONSE FROM DISTRIBUTION AUDIT FUNCTION", response); + } + } catch (err) { + console.log("ERROR IN ELSE CONDITION FOR DISTRIBUTION", err); + } + } + } + } + } +} + +async function instantiateLevelDb() { + if(!localLevelDB){ + localLevelDB = levelup(leveldown(__dirname + "/taskDB/HjWJmb2gcwwm99VhyNVJZir3ToAJTfUB4j7buWnMMUEP")); + } +} + +async function genericHandler(...args) { + try { + let response = await axios.post(BASE_ROOT_URL, { + args, + taskId: TASK_ID, + secret: SECRET_KEY, + }); + if (response.status == 200) return response.data.response; + else { + console.error(response.status, response.data); + return null; + } + } catch (err) { + console.error(`Error in genericHandler: "${args[0]}"`,err.message); + console.error(err?.response?.data); + return {error:err}; + } +} +let connection; +const namespaceWrapper = new NamespaceWrapper(); +if(taskNodeAdministered){ + namespaceWrapper.getRpcUrl().then((rpcUrl) => { + console.log(rpcUrl, "RPC URL"); + connection = new Connection(rpcUrl, "confirmed"); + }); +} + +module.exports = { + namespaceWrapper, + taskNodeAdministered, +}; diff --git a/namespaceWrapper.js b/namespaceWrapper.js index 631863be..54d6b145 100644 --- a/namespaceWrapper.js +++ b/namespaceWrapper.js @@ -1,15 +1,39 @@ -const { default: axios } = require("axios"); -const BASE_ROOT_URL = "http://localhost:8080/namespace-wrapper"; -const { TASK_ID, MAIN_ACCOUNT_PUBKEY, SECRET_KEY } = require("./init"); -const { Connection, PublicKey, Keypair } = require("@_koi/web3.js"); - +const { default: axios } = require('axios'); +const levelup = require('levelup'); +const leveldown = require('leveldown'); +const BASE_ROOT_URL = 'http://localhost:8080/namespace-wrapper'; +const { TASK_ID, SECRET_KEY } = require('./init'); +const { Connection, PublicKey, Keypair } = require('@_koi/web3.js'); +const taskNodeAdministered = !!TASK_ID; class NamespaceWrapper { + levelDB; + + constructor() { + if(taskNodeAdministered){ + this.getTaskLevelDBPath().then((path)=>{ + this.levelDB = levelup(leveldown(path)); + }).catch((err)=>{ + console.error(err) + this.levelDB=levelup(leveldown(`../namespace/${TASK_ID}/KOIILevelDB`)) + }) + }else{ + this.levelDB = levelup(leveldown('./localKOIIDB')); + } + } /** * Namespace wrapper of storeGetAsync * @param {string} key // Path to get */ async storeGet(key) { - return await genericHandler("storeGet", key); + return new Promise((resolve, reject) => { + this.levelDB.get(key, { asBuffer: false }, (err, value) => { + if (err) { + reject(err); + } else { + resolve(value); + } + }); + }); } /** * Namespace wrapper over storeSetAsync @@ -17,7 +41,15 @@ class NamespaceWrapper { * @param {*} value Data to set */ async storeSet(key, value) { - return await genericHandler("storeSet", key, value); + return new Promise((resolve, reject) => { + this.levelDB.put(key, value, {}, err => { + if (err) { + reject(err); + } else { + resolve(); + } + }); + }); } /** * Namespace wrapper over fsPromises methods @@ -25,66 +57,80 @@ class NamespaceWrapper { * @param {*} path Path for the express call * @param {...any} args Remaining parameters for the FS call */ - async fs(method, path, ...args) { - return await genericHandler("fs", method, path, ...args); - } - async fsStaking(method, path, ...args) { - return await genericHandler("fsStaking", method, path, ...args); - } - async fsWriteStream(imagepath) { - return await genericHandler("fsWriteStream", imagepath); - } - async fsReadStream(imagepath) { - return await genericHandler("fsReadStream", imagepath); - } + // async fs(method, path, ...args) { + // return await genericHandler('fs', method, path, ...args); + // } + // async fsStaking(method, path, ...args) { + // return await genericHandler('fsStaking', method, path, ...args); + // } + // async fsWriteStream(imagepath) { + // return await genericHandler('fsWriteStream', imagepath); + // } + // async fsReadStream(imagepath) { + // return await genericHandler('fsReadStream', imagepath); + // } async getSlot() { - return await genericHandler("getCurrentSlot"); + return await genericHandler('getCurrentSlot'); } - async submissionOnChain(submitterKeypair, submission) { - return await genericHandler( - "submissionOnChain", - submitterKeypair, - submission - ); + async payloadSigning(body) { + return await genericHandler('signData', body); + } + + + /** + * Namespace wrapper of storeGetAsync + * @param {string} signedMessage // Path to get + */ + + async verifySignature(signedMessage, pubKey) { + return await genericHandler('verifySignedData', signedMessage, pubKey); } + // async submissionOnChain(submitterKeypair, submission) { + // return await genericHandler( + // 'submissionOnChain', + // submitterKeypair, + // submission, + // ); + // } + async stakeOnChain( taskStateInfoPublicKey, stakingAccKeypair, stakePotAccount, - stakeAmount + stakeAmount, ) { return await genericHandler( - "stakeOnChain", + 'stakeOnChain', taskStateInfoPublicKey, stakingAccKeypair, stakePotAccount, - stakeAmount + stakeAmount, ); } async claimReward(stakePotAccount, beneficiaryAccount, claimerKeypair) { return await genericHandler( - "claimReward", + 'claimReward', stakePotAccount, beneficiaryAccount, - claimerKeypair + claimerKeypair, ); } async sendTransaction(serviceNodeAccount, beneficiaryAccount, amount) { return await genericHandler( - "sendTransaction", + 'sendTransaction', serviceNodeAccount, beneficiaryAccount, - amount + amount, ); } async getSubmitterAccount() { - const submitterAccountResp = await genericHandler("getSubmitterAccount"); + const submitterAccountResp = await genericHandler('getSubmitterAccount'); return Keypair.fromSecretKey( - Uint8Array.from(Object.values(submitterAccountResp._keypair.secretKey)) + Uint8Array.from(Object.values(submitterAccountResp._keypair.secretKey)), ); } @@ -95,39 +141,42 @@ class NamespaceWrapper { * @param {Function} callback // Callback function on traffic receive */ async sendAndConfirmTransactionWrapper(transaction, signers) { - const blockhash = (await connection.getRecentBlockhash("finalized")) + const blockhash = (await connection.getRecentBlockhash('finalized')) .blockhash; transaction.recentBlockhash = blockhash; transaction.feePayer = new PublicKey(MAIN_ACCOUNT_PUBKEY); return await genericHandler( - "sendAndConfirmTransactionWrapper", + 'sendAndConfirmTransactionWrapper', transaction.serialize({ requireAllSignatures: false, verifySignatures: false, }), - signers + signers, ); } - - async signArweave(transaction) { - let tx = await genericHandler('signArweave',transaction.toJSON()); - return arweave.transactions.fromRaw(tx); - } - async signEth(transaction) { - return await genericHandler('signEth',transaction); - } + // async signArweave(transaction) { + // let tx = await genericHandler('signArweave', transaction.toJSON()); + // return arweave.transactions.fromRaw(tx); + // } + // async signEth(transaction) { + // return await genericHandler('signEth', transaction); + // } async getTaskState() { - return await genericHandler("getTaskState"); + const response = await genericHandler('getTaskState'); + if (response.error) { + return null; + } + return response; } async auditSubmission(candidatePubkey, isValid, voterKeypair, round) { return await genericHandler( - "auditSubmission", + 'auditSubmission', candidatePubkey, isValid, - voterKeypair, - round + voterKeypair, + round, ); } @@ -135,102 +184,114 @@ class NamespaceWrapper { candidatePubkey, isValid, voterKeypair, - round + round, ) { return await genericHandler( - "distributionListAuditSubmission", + 'distributionListAuditSubmission', candidatePubkey, isValid, - round + round, ); } async getRound() { - return await genericHandler("getRound"); + return await genericHandler('getRound'); } async nodeSelectionDistributionList() { - return await genericHandler("nodeSelectionDistributionList"); + return await genericHandler('nodeSelectionDistributionList'); } async payoutTrigger() { - return await genericHandler("payloadTrigger"); + return await genericHandler('payloadTrigger'); } async uploadDistributionList(distributionList, round) { - return await genericHandler("uploadDistributionList", distributionList, round); + return await genericHandler( + 'uploadDistributionList', + distributionList, + round, + ); } async distributionListSubmissionOnChain(round) { - return await genericHandler("distributionListSubmissionOnChain", round); + return await genericHandler('distributionListSubmissionOnChain', round); } async payloadTrigger() { - return await genericHandler("payloadTrigger"); + return await genericHandler('payloadTrigger'); } - async checkSubmissionAndUpdateRound(submissionValue = "default", round) { + async checkSubmissionAndUpdateRound(submissionValue = 'default', round) { return await genericHandler( - "checkSubmissionAndUpdateRound", + 'checkSubmissionAndUpdateRound', submissionValue, - round + round, ); } async getProgramAccounts() { - return await genericHandler("getProgramAccounts"); + return await genericHandler('getProgramAccounts'); } async defaultTaskSetup() { - return await genericHandler("defaultTaskSetup"); + return await genericHandler('defaultTaskSetup'); } async getRpcUrl() { - return await genericHandler("getRpcUrl"); + return await genericHandler('getRpcUrl'); } async getNodes(url) { - return await genericHandler("getNodes", url); + return await genericHandler('getNodes', url); } // Wrapper for selection of node to prepare a distribution list async nodeSelectionDistributionList(round) { - return await genericHandler("nodeSelectionDistributionList", round); + return await genericHandler('nodeSelectionDistributionList', round); } - async getDistributionList(publicKey,round) { - return await genericHandler("getDistributionList", publicKey, round); + async getDistributionList(publicKey, round) { + const response = await genericHandler( + 'getDistributionList', + publicKey, + round, + ); + if (response.error) { + return null; + } + return response; } async validateAndVoteOnNodes(validate, round) { - // await this.checkVoteStatus(); - console.log("******/ IN VOTING /******"); + + console.log('******/ IN VOTING /******'); const taskAccountDataJSON = await this.getTaskState(); console.log( - "Fetching the submissions of N - 1 round", - taskAccountDataJSON.submissions[round] + 'Fetching the submissions of N - 1 round', + taskAccountDataJSON.submissions[round], ); const submissions = taskAccountDataJSON.submissions[round]; if (submissions == null) { - console.log("No submisssions found in N-1 round"); - return "No submisssions found in N-1 round"; + console.log('No submisssions found in N-1 round'); + return 'No submisssions found in N-1 round'; } else { const keys = Object.keys(submissions); const values = Object.values(submissions); const size = values.length; - console.log("Submissions from last round: ", keys, values, size); - let isValid + console.log('Submissions from last round: ', keys, values, size); + let isValid; const submitterAccountKeyPair = await this.getSubmitterAccount(); const submitterPubkey = submitterAccountKeyPair.publicKey.toBase58(); for (let i = 0; i < size; i++) { let candidatePublicKey = keys[i]; - console.log("FOR CANDIDATE KEY", candidatePublicKey); + console.log('FOR CANDIDATE KEY', candidatePublicKey); let candidateKeyPairPublicKey = new PublicKey(keys[i]); if (candidatePublicKey == submitterPubkey) { - console.log("YOU CANNOT VOTE ON YOUR OWN SUBMISSIONS"); + console.log('YOU CANNOT VOTE ON YOUR OWN SUBMISSIONS'); } else { try { console.log( - "SUBMISSION VALUE TO CHECK", - values[i].submission_value + 'SUBMISSION VALUE TO CHECK', + values[i].submission_value, ); isValid = await validate(values[i].submission_value, round); console.log(`Voting ${isValid} to ${candidatePublicKey}`); @@ -239,115 +300,140 @@ class NamespaceWrapper { // check for the submissions_audit_trigger , if it exists then vote true on that otherwise do nothing const submissions_audit_trigger = taskAccountDataJSON.submissions_audit_trigger[round]; - console.log("SUBMIT AUDIT TRIGGER", submissions_audit_trigger); + console.log('SUBMIT AUDIT TRIGGER', submissions_audit_trigger); // console.log( // "CANDIDATE PUBKEY CHECK IN AUDIT TRIGGER", // submissions_audit_trigger[candidatePublicKey] // ); - if (submissions_audit_trigger && submissions_audit_trigger[candidatePublicKey]) { - console.log("VOTING TRUE ON AUDIT"); + if ( + submissions_audit_trigger && + submissions_audit_trigger[candidatePublicKey] + ) { + console.log('VOTING TRUE ON AUDIT'); const response = await this.auditSubmission( candidateKeyPairPublicKey, isValid, submitterAccountKeyPair, - round + round, ); - console.log("RESPONSE FROM AUDIT FUNCTION", response); + console.log('RESPONSE FROM AUDIT FUNCTION', response); } } else if (isValid == false) { // Call auditSubmission function and isValid is passed as false - console.log("RAISING AUDIT / VOTING FALSE"); + console.log('RAISING AUDIT / VOTING FALSE'); const response = await this.auditSubmission( candidateKeyPairPublicKey, isValid, submitterAccountKeyPair, - round + round, ); - console.log("RESPONSE FROM AUDIT FUNCTION", response); + console.log('RESPONSE FROM AUDIT FUNCTION', response); } } catch (err) { - console.log("ERROR IN ELSE CONDITION", err); + console.log('ERROR IN ELSE CONDITION', err); } } } } } - async validateAndVoteOnDistributionList(validateDistribution, round) { // await this.checkVoteStatus(); - console.log("******/ IN VOTING OF DISTRIBUTION LIST /******"); + console.log('******/ IN VOTING OF DISTRIBUTION LIST /******'); const taskAccountDataJSON = await this.getTaskState(); console.log( - "Fetching the Distribution submissions of N - 2 round", - taskAccountDataJSON.distribution_rewards_submission[round] + 'Fetching the Distribution submissions of N - 2 round', + taskAccountDataJSON.distribution_rewards_submission[round], ); const submissions = taskAccountDataJSON.distribution_rewards_submission[round]; if (submissions == null) { - console.log("No submisssions found in N-2 round"); - return "No submisssions found in N-2 round"; + console.log('No submisssions found in N-2 round'); + return 'No submisssions found in N-2 round'; } else { const keys = Object.keys(submissions); const values = Object.values(submissions); const size = values.length; - console.log("Distribution Submissions from last round: ", keys, values, size); - let isValid + console.log( + 'Distribution Submissions from last round: ', + keys, + values, + size, + ); + let isValid; const submitterAccountKeyPair = await this.getSubmitterAccount(); const submitterPubkey = submitterAccountKeyPair.publicKey.toBase58(); for (let i = 0; i < size; i++) { let candidatePublicKey = keys[i]; - console.log("FOR CANDIDATE KEY", candidatePublicKey); + console.log('FOR CANDIDATE KEY', candidatePublicKey); let candidateKeyPairPublicKey = new PublicKey(keys[i]); if (candidatePublicKey == submitterPubkey) { - console.log("YOU CANNOT VOTE ON YOUR OWN DISTRIBUTION SUBMISSIONS"); + console.log('YOU CANNOT VOTE ON YOUR OWN DISTRIBUTION SUBMISSIONS'); } else { try { console.log( - "DISTRIBUTION SUBMISSION VALUE TO CHECK", - values[i].submission_value + 'DISTRIBUTION SUBMISSION VALUE TO CHECK', + values[i].submission_value, + ); + isValid = await validateDistribution( + values[i].submission_value, + round, ); - isValid = await validateDistribution(values[i].submission_value, round); console.log(`Voting ${isValid} to ${candidatePublicKey}`); if (isValid) { // check for the submissions_audit_trigger , if it exists then vote true on that otherwise do nothing const distributions_audit_trigger = taskAccountDataJSON.distributions_audit_trigger[round]; - console.log("SUBMIT DISTRIBUTION AUDIT TRIGGER", distributions_audit_trigger); + console.log( + 'SUBMIT DISTRIBUTION AUDIT TRIGGER', + distributions_audit_trigger, + ); // console.log( // "CANDIDATE PUBKEY CHECK IN AUDIT TRIGGER", // distributions_audit_trigger[candidatePublicKey] // ); - if (distributions_audit_trigger && distributions_audit_trigger[candidatePublicKey]) { - console.log("VOTING TRUE ON DISTRIBUTION AUDIT"); + if ( + distributions_audit_trigger && + distributions_audit_trigger[candidatePublicKey] + ) { + console.log('VOTING TRUE ON DISTRIBUTION AUDIT'); const response = await this.distributionListAuditSubmission( candidateKeyPairPublicKey, isValid, submitterAccountKeyPair, - round + round, + ); + console.log( + 'RESPONSE FROM DISTRIBUTION AUDIT FUNCTION', + response, ); - console.log("RESPONSE FROM DISTRIBUTION AUDIT FUNCTION", response); } } else if (isValid == false) { // Call auditSubmission function and isValid is passed as false - console.log("RAISING AUDIT / VOTING FALSE ON DISTRIBUTION"); + console.log('RAISING AUDIT / VOTING FALSE ON DISTRIBUTION'); const response = await this.distributionListAuditSubmission( candidateKeyPairPublicKey, isValid, submitterAccountKeyPair, - round + round, + ); + console.log( + 'RESPONSE FROM DISTRIBUTION AUDIT FUNCTION', + response, ); - console.log("RESPONSE FROM DISTRIBUTION AUDIT FUNCTION", response); } } catch (err) { - console.log("ERROR IN ELSE CONDITION FOR DISTRIBUTION", err); + console.log('ERROR IN ELSE CONDITION FOR DISTRIBUTION', err); } } } } } + async getTaskLevelDBPath() { + return await genericHandler('getTaskLevelDBPath'); + } } async function genericHandler(...args) { @@ -363,18 +449,20 @@ async function genericHandler(...args) { return null; } } catch (err) { - console.log("Error in genericHandler", err); - console.error(err.message); + console.error(`Error in genericHandler: "${args[0]}"`, err.message); console.error(err?.response?.data); - return null; + return { error: err }; } } let connection; const namespaceWrapper = new NamespaceWrapper(); -namespaceWrapper.getRpcUrl().then((rpcUrl) => { - console.log(rpcUrl, "RPC URL"); - connection = new Connection(rpcUrl, "confirmed"); -}); +if (taskNodeAdministered) { + namespaceWrapper.getRpcUrl().then(rpcUrl => { + console.log(rpcUrl, 'RPC URL'); + connection = new Connection(rpcUrl, 'confirmed'); + }); +} module.exports = { namespaceWrapper, -}; + taskNodeAdministered, +}; \ No newline at end of file diff --git a/package.json b/package.json index 91d5f9e3..46d7a651 100644 --- a/package.json +++ b/package.json @@ -4,7 +4,10 @@ "description": "", "main": "index.js", "scripts": { - "test": "echo \"Error: no test specified\" && exit 1", + "test": "node test/unitTest.js", + "test:submit": "node test/test_submitLinktree.js", + "test:getdb": "node test/get_local_leveldb.js", + "status": "node test/check_task-status.js", "start": "node index.js", "webpack": "webpack", "webpack:prod": "webpack --mode production" @@ -14,8 +17,11 @@ "dependencies": { "@_koi/web3.js": "^0.0.6", "@_koii/create-task-cli": "^0.1.12", + "@solana/web3.js": "^1.74.0", "axios": "^0.27.2", "express": "^4.18.1", + "leveldown": "^6.1.1", + "levelup": "^5.1.1", "node-cron": "^3.0.2", "request": "^2.88.2", "web3.storage": "^4.4.0" diff --git a/routes.js b/routes.js new file mode 100644 index 00000000..bda64104 --- /dev/null +++ b/routes.js @@ -0,0 +1,133 @@ +const express = require('express'); +const router = express.Router(); +const db = require('./db_model'); +const fs = require('fs'); +const { namespaceWrapper } = require('./namespaceWrapper'); + +// Middleware to log incoming requests +router.use((req, res, next) => { + console.log(`Incoming ${req.method} request to ${req.originalUrl}`); + next(); +}); + + + + router.get('/taskState', async (req, res) => { + const state = await namespaceWrapper.getTaskState(); + console.log("TASK STATE", state); + + res.status(200).json({ taskState: state }) + }) + + // API to register the linktree + router.post('/linktree', async (req, res) => { + const linktree = req.body.payload; + // Check req.body + if (!linktree) { + res.status(400).json({ error: 'Invalid request' }); + return; + } else { + console.log(linktree); + } + + // Use the code below to sign the data payload + let signature = linktree.signature; + let pubkey = linktree.publicKey + + let proof = { + publicKey: pubkey, + signature: signature, + } + console.log('Check Proof:', proof); + // use fs to write the linktree and proof to a file + if (!fs.existsSync('./Linktree')) fs.mkdirSync('./Linktree'); + fs.writeFileSync("./Linktree/" + `linktree_${pubkey}.json`, JSON.stringify(linktree)); + // fs.writeFileSync('proof.json', JSON.stringify(proof)); + await db.setLinktree(pubkey, linktree); + + const round = await namespaceWrapper.getRound(); + // TEST For only testing purposes: + // const round = 1000 + + let proofs = await db.getProofs(pubkey); + proofs = JSON.parse(proofs || '[]'); + proofs.push(proof); + console.log(`${pubkey} Proofs: `, proofs); + await db.setProofs(pubkey, proofs); + + return res.status(200).send({message: 'Proof and linktree registered successfully'}); + }); + + router.get("/logs", async (req, res) => { + const logs = fs.readFileSync("./namespace/logs.txt", "utf8") + res.status(200).send(logs); + }) + // endpoint for specific linktree data by publicKey + router.get('/linktree/get', async (req, res) => { + const log = "Nothing to see here, check /:publicKey to get the linktree" + return res.status(200).send(log); + }); + router.get('/linktree/get/:publicKey', async (req, res) => { + const { publicKey } = req.params; + let linktree = await db.getLinktree(publicKey); + linktree = linktree || '[]'; + return res.status(200).send(linktree); + }); + router.get('/linktree/all', async (req, res) => { + linktree = await db.getAllLinktrees() || '[]'; + return res.status(200).send(linktree); + } + + ); + + router.get('/linktree/list', async (req, res) => { + linktree = await db.getAllLinktrees(true) || '[]'; + return res.status(200).send(linktree); + } + ); + router.get('/proofs/all', async (req, res) => { + linktree = await db.getAllProofs() || '[]'; + return res.status(200).send(linktree); + } + ); + router.get('/proofs/get/:publicKey', async (req, res) => { + const { publicKey } = req.params; + let proof = await db.getProofs(publicKey); + proof = proof || '[]'; + return res.status(200).send(proof); + } + ); + router.get('/node-proof/all', async (req, res) => { + linktree = await db.getAllNodeProofCids() || '[]'; + return res.status(200).send(linktree); + }); + router.get('/node-proof/:round', async (req, res) => { + const { round } = req.params; + let nodeproof = await db.getNodeProofCid(round) || '[]'; + return res.status(200).send(nodeproof); + }); + router.get('/authlist/get/:publicKey', async (req, res) => { + const { publicKey } = req.params; + let authlist = await db.getAuthList(publicKey); + authlist = authlist || '[]'; + return res.status(200).send(authlist); + }); + router.get('/authlist/list', async (req, res) => { + authlist = await db.getAllAuthLists(false) || '[]'; + authlist.forEach((authuser) => { + authuser = authuser.toString().split("auth_list:")[0] + }); + return res.status(200).send(authlist); + }); + router.get('/nodeurl', async (req, res) => { + const nodeUrlList = await namespaceWrapper.getNodes(); + return res.status(200).send(nodeUrlList); + }); + // router.post('/register-authlist', async (req, res) => { + // const pubkey = req.body.pubkey; + // await db.setAuthList(pubkey); + // return res.status(200).send({message: 'Authlist registered successfully'}); + // } + // ) + +module.exports = router; \ No newline at end of file diff --git a/test/check_task-status.js b/test/check_task-status.js new file mode 100644 index 00000000..7fde847e --- /dev/null +++ b/test/check_task-status.js @@ -0,0 +1,10 @@ +const { Connection, PublicKey } = require('@_koi/web3.js'); +async function main() { + const connection = new Connection('https://k2-devnet.koii.live'); + const accountInfo = await connection.getAccountInfo( + new PublicKey('HjWJmb2gcwwm99VhyNVJZir3ToAJTfUB4j7buWnMMUEP'), + ); + console.log(JSON.parse(accountInfo.data+"")); +} + +main(); \ No newline at end of file diff --git a/test/test_cidValidation.js b/test/test_cidValidation.js new file mode 100644 index 00000000..90ee3e3c --- /dev/null +++ b/test/test_cidValidation.js @@ -0,0 +1,93 @@ +const dataFromCid = require('../helpers/dataFromCid'); +const nacl = require('tweetnacl'); +const db = require('../db_model'); +const bs58 = require('bs58'); + +let submission_value = "bafybeig5322lknop4u6m4p26jd4bcp7rdbwkuqy3ifeytqrmo2ogia5kwe" +let round = 1000 +async function test_cidValidation(submission_value) { + console.log('******/ TEST Linktree CID VALIDATION Task FUNCTION /******'); + const outputraw = await dataFromCid(submission_value); + const output = outputraw.data; + + const proofs_list_object = output.proofs; + console.log('RESPONSE DATA', proofs_list_object); + const publicKey = output.node_publicKey; + console.log('PUBLIC KEY', publicKey); + const signature = output.node_signature; + console.log('SIGNATURE', signature); + + let isNode = await verifyNode(proofs_list_object, signature, publicKey); + console.log('IS NODE True?', isNode); + + const AuthUserList = []; + let isLinktree = await verifyLinktree(proofs_list_object, AuthUserList); + console.log('Authenticated Users List:', AuthUserList); + console.log('IS LINKTREE True?', isLinktree); + + if (isNode && isLinktree) return true; // if both are true, return true + else return false; // if one of them is false, return false +} + +async function verifyLinktree(proofs_list_object, AuthUserList) { + let allSignaturesValid = true; + for (const proofs of proofs_list_object) { + console.log('proofs.value.publicKey', proofs.value[0].publicKey); + const linktree_object = await db.getLinktree(proofs.value[0].publicKey); + console.log('LINKTREE OBJECT', linktree_object); + const messageUint8Array = new Uint8Array( + Buffer.from(JSON.stringify(linktree_object.data)), + ); + const signature = proofs.value[0].signature; + console.log('SIGNATURE', signature); + const publicKey = proofs.value[0].publicKey; + console.log('PUBLIC KEY', publicKey); + const signatureUint8Array = bs58.decode(signature); + const publicKeyUint8Array = bs58.decode(publicKey); + + // verify the linktree signature + const isSignatureValid = await verifySignature( + messageUint8Array, + signatureUint8Array, + publicKeyUint8Array, + ); + console.log('IS SIGNATURE VALID', isSignatureValid); + + if (isSignatureValid) { + AuthUserList.push(publicKey); + } else { + allSignaturesValid = false; + } + } + return allSignaturesValid; +} + +async function verifyNode(proofs_list_object, signature, publicKey) { + const messageUint8Array = new Uint8Array( + Buffer.from(JSON.stringify(proofs_list_object)), + ); + const signatureUint8Array = bs58.decode(signature); + const publicKeyUint8Array = bs58.decode(publicKey); + + if (!proofs_list_object || !signature || !publicKey) { + console.error('No data received from web3.storage'); + return false; + } + + // verify the node signature + const isSignatureValid = await verifySignature( + messageUint8Array, + signatureUint8Array, + publicKeyUint8Array, + ); + + return isSignatureValid; +} + +async function verifySignature(message, signature, publicKey) { + return nacl.sign.detached.verify(message, signature, publicKey); +} + +module.exports = test_cidValidation; + +test_cidValidation(submission_value); diff --git a/test/test_dbmodel.js b/test/test_dbmodel.js new file mode 100644 index 00000000..0a64b60a --- /dev/null +++ b/test/test_dbmodel.js @@ -0,0 +1,54 @@ +const dbmodel = require('../db_model'); + +const PublicKey = "test-pubkey1" + +async function testdb() { +const round = 1000; +const pubkey = PublicKey; + + // get linktree + // let linktree = await dbmodel.getLinktree(PublicKey); + // console.log(linktree); + + // get all linktrees + // await dbmodel.getAllLinktrees(); + + // set linktree + // let linktree2 = { + // "name": "test1", + // "description": "test1", + // "avatar": "test1", + // "links": [ + // { + // "name": "test1", + // "url": "test1" + // } + // ] + // } + // await dbmodel.setLinktree(PublicKey, linktree2); + + // set node proofs + // let cid = "testcid" + // await dbmodel.setNodeProofCid(round, cid); + + // get node proofs + // let nodeProofs = await dbmodel.getNodeProofCid(round); + // console.log(nodeProofs); + + // set proofs + // let proofs = { + // publicKey: "test-pubkey1", + // signature: "test-signature1", + // } + // await dbmodel.setProofs(pubkey, proofs); + + // get proofs + // let proofs = await dbmodel.getProofs(round); + // console.log(proofs); + + // get all proofs + // await dbmodel.getAllProofs(); + +} + +testdb() \ No newline at end of file diff --git a/test/test_docker_submitlinktree.js b/test/test_docker_submitlinktree.js new file mode 100644 index 00000000..73376a34 --- /dev/null +++ b/test/test_docker_submitlinktree.js @@ -0,0 +1,71 @@ +const {default: axios} = require('axios'); +const {v4: uuidv4} = require('uuid'); +const bs58 = require('bs58'); +const nacl = require('tweetnacl'); +const fs = require("fs") +const solanaWeb3 = require('@solana/web3.js'); +const crypto = require('crypto'); + +// This test submits linktrees from differnet publicKey to the service and stored in localdb +async function main() { +try { + const keyPair = nacl.sign.keyPair(); + const publicKey = keyPair.publicKey; + const privateKey = keyPair.secretKey; + + const data = { + uuid: uuidv4(), + linktree: [ + { + key: 'official', + label: 'Official Website', + redirectUrl: 'https://spheron.network/', + }, + { + key: 'twitter', + label: 'Twitter', + redirectUrl: 'https://twitter.com/blockchainbalak', + }, + { + key: 'github', + label: 'GitHub', + redirectUrl: 'https://github.com/spheronFdn/', + }, + ], + timestamp: Date.now(), + }; + + const messageUint8Array = new Uint8Array( + Buffer.from(JSON.stringify(data)), + ); + const signedMessage = nacl.sign(messageUint8Array, privateKey); + const signature = signedMessage.slice(0, nacl.sign.signatureLength); + const payload = { + data, + publicKey: bs58.encode(publicKey), + signature: bs58.encode(signature), + }; + + + // Check payload + // console.log(payload); + + await axios + .post('https://k2-tasknet-ports-2.koii.live/task/HjWJmb2gcwwm99VhyNVJZir3ToAJTfUB4j7buWnMMUEP/linktree', {payload}) + .then((e) => { + if (e.status != 200) { + console.log(e); + } + console.log(e.data); + }) + .catch((e) => { + console.error(e); + }); +} catch (e) { + console.error(e) +} +} + +main(); + +module.exports = main; \ No newline at end of file diff --git a/test/test_endpoint.js b/test/test_endpoint.js new file mode 100644 index 00000000..e6082df7 --- /dev/null +++ b/test/test_endpoint.js @@ -0,0 +1,20 @@ +const {default: axios} = require('axios'); +const {v4: uuidv4} = require('uuid'); +const bs58 = require('bs58'); +const nacl = require('tweetnacl'); +const fs = require("fs") +try { + axios + .get('http://localhost:10000/get-alllinktree') + .then((e) => { + if (e.status != 200) { + console.log(e); + } + console.log(e.data); + }) + .catch((e) => { + console.error(e); + }); +} catch (e) { + console.error(e) +} \ No newline at end of file diff --git a/test/test_mergedData.js b/test/test_mergedData.js new file mode 100644 index 00000000..17014e4b --- /dev/null +++ b/test/test_mergedData.js @@ -0,0 +1,99 @@ +const dataFromCid = require("../helpers/dataFromCid"); + +async function mergedData() { + // Write the logic to fetch the submission values here and return the cid string + + // fetching round number to store work accordingly + + console.log('***********IN FETCH SUBMISSION**************'); + // The code below shows how you can fetch your stored value from level DB + let cid = 'bafybeiawmee7fohpdm7po7txii22pawjvzy374fhppdruvcckjowxs74ay'; + console.log('Linktree CID', cid); + + // fetch the cid data from IPFS + const outputraw = await dataFromCid(cid); + const output = outputraw.data; + const linktrees_list_object = output.data; + console.log('RESPONSE DATA', linktrees_list_object); + + // compare the linktrees_list_object with the data stored in levelDB + const linktrees_list_object_local = [{ + data: { + uuid: '202400b2-7c8f-420d-8215-7cf0e53dfd76', + linktree: [ + { + key: 'New data', + label: 'New data', + redirectUrl: 'New data', + }, + { + key: 'twitter', + label: 'Twitter', + redirectUrl: 'https://twitter.com/blockchainbalak', + }, + { + key: 'github', + label: 'GitHub', + redirectUrl: 'https://github.com/spheronFdn/', + }, + ], + timestamp: 1680805628220, + }, + publicKey: '7Se5mr1WyfzvXvNPu4f8Ck8WxeByACX3pfuxGQsMgsz5', + signature: + '5LQ8NBP9SFy2N9ePdkUrfqR1P6cyqLP2HjCDxcxYQN9ZxAdNQuH43oQ1MH3HtiDKMUKmqkNkZunkRHkLfg8VJVoZ', + }, + {data: { + uuid: '202400b2-7c8f-420d-8215-7cf0e53dfd76', + linktree: [ + { + key: 'New data', + label: 'New data', + redirectUrl: 'New data', + }, + { + key: 'twitter', + label: 'Twitter', + redirectUrl: 'https://twitter.com/blockchainbalak', + }, + { + key: 'github', + label: 'GitHub', + redirectUrl: 'https://github.com/spheronFdn/', + }, + ], + timestamp: 1680805628220, + }, + publicKey: 'newpublickey', + signature: + '5LQ8NBP9SFy2N9ePdkUrfqR1P6cyqLP2HjCDxcxYQN9ZxAdNQuH43oQ1MH3HtiDKMUKmqkNkZunkRHkLfg8VJVoZ', +}]; + + // if the same key is present in both the objects, the value from the first object will be taken + const mergedData = []; + + linktrees_list_object.forEach((itemCID) => { + // Check if an item with the same publicKey exists in linktrees_list_object_local + const matchingItemIndex = linktrees_list_object_local.findIndex((itemLocal) => itemLocal.publicKey === itemCID.publicKey); + if (matchingItemIndex >= 0) { + // If a matching item is found, compare timestamps + const matchingItemLocal = linktrees_list_object_local[matchingItemIndex]; + if (matchingItemLocal.data.timestamp > itemCID.data.timestamp) { + mergedData.push(matchingItemLocal); + // Remove the matching item from linktrees_list_object_local + linktrees_list_object_local.splice(matchingItemIndex, 1); + } else { + mergedData.push(itemCID); + } + } else { + mergedData.push(itemCID); + } + }); + + mergedData.push(...linktrees_list_object_local); + + console.log('mergedData', mergedData); + console.log('mergedData', mergedData[0].data.linktree); +} + +mergedData(); diff --git a/test/test_nacl.js b/test/test_nacl.js new file mode 100644 index 00000000..d1c66b64 --- /dev/null +++ b/test/test_nacl.js @@ -0,0 +1,55 @@ +// test nacl verified + +const nacl = require('tweetnacl'); +const bs58 = require('bs58'); + +async function test_main() { + const submission_value = await generateAndSubmitDistributionList(); + await validate(submission_value); +} + +async function generateAndSubmitDistributionList() { + const keyPair = nacl.sign.keyPair(); + const publicKey = keyPair.publicKey; + const privateKey = keyPair.secretKey; + + const message = { + data: "data", + publicKey: '7AwybFMYogGa8LJ3n9i8QthUs6ybEcanC8UPejM76U7h', + signature: 'P6McSGFMniTdaH5546b8b1xuL91UtjxS9RnXMxBcg8ewuvKuFwijqJHH9BSZnEnqs1niE1xx7DreRVCNqK4ZJSE' + }; + const messageUint8Array = new Uint8Array(Buffer.from(JSON.stringify(message))); + + const signedMessage = nacl.sign(messageUint8Array, privateKey); + const signature = signedMessage.slice(0, nacl.sign.signatureLength); + + const submission_value = { + data: message, + publicKey: bs58.encode(publicKey), + signature: bs58.encode(signature), + } + return submission_value +} + +async function validate(submission_value) { + const output = submission_value + const message = output.data; + console.log("RESPONSE DATA", message); + const publicKey = output.publicKey; + console.log("PUBLIC KEY", publicKey); + const signature = output.signature; + console.log("SIGNATURE", signature); + const messageUint8Array = new Uint8Array(Buffer.from(JSON.stringify(message))); + const signatureUint8Array = bs58.decode(signature); + const publicKeyUint8Array = bs58.decode(publicKey); + + const isSignatureValid = await verifySignature(messageUint8Array, signatureUint8Array, publicKeyUint8Array); + console.log(`Is the signature valid? ${isSignatureValid}`); +} + +async function verifySignature(message, signature, publicKey) { + return nacl.sign.detached.verify(message, signature, publicKey); +} + +test_main(); + diff --git a/test/test_signature.js b/test/test_signature.js new file mode 100644 index 00000000..e7c0548a --- /dev/null +++ b/test/test_signature.js @@ -0,0 +1,19 @@ +const nacl = require('tweetnacl'); +const {namespaceWrapper} = require('../namespaceWrapper'); + +async function main() { +const keypair = await namespaceWrapper.getSubmitterAccount(); + +// Generate a signature +const message = 'Hello, Solana!'; +const messageBuffer = Buffer.from(message, 'utf8'); +const signature = nacl.sign.detached(messageBuffer, keypair.secretKey); + +console.log('Signature:', Buffer.from(signature).toString('hex')); + +// Verify the signature +const isValid = nacl.sign.detached.verify(messageBuffer, signature, keypair.publicKey); +console.log('Is the signature valid?', isValid); +} + +main() \ No newline at end of file diff --git a/test/test_submitLinktree.js b/test/test_submitLinktree.js new file mode 100644 index 00000000..1088c9fb --- /dev/null +++ b/test/test_submitLinktree.js @@ -0,0 +1,73 @@ +const {default: axios} = require('axios'); +const {v4: uuidv4} = require('uuid'); +const bs58 = require('bs58'); +const nacl = require('tweetnacl'); +const fs = require("fs") +const solanaWeb3 = require('@solana/web3.js'); +const crypto = require('crypto'); + +// This test submits linktrees from differnet publicKey to the service and stored in localdb +async function main() { +try { + const keyPair = nacl.sign.keyPair(); + const publicKey = keyPair.publicKey; + const privateKey = keyPair.secretKey; + // const {publicKey, secretKey} = nacl.sign.keyPair.fromSecretKey( + // new Uint8Array(JSON.parse(fs.readFileSync("./test_wallet.json", 'utf-8'))) + // ); + console.log('publicKey', bs58.encode(publicKey)); + const data = { + uuid: uuidv4(), + linktree: [ + { + key: 'official', + label: 'Official Website', + redirectUrl: 'https://spheron.network/', + }, + { + key: 'twitter', + label: 'Twitter', + redirectUrl: 'https://twitter.com/blockchainbalak', + }, + { + key: 'github', + label: 'GitHub', + redirectUrl: 'https://github.com/spheronFdn/', + }, + ], + timestamp: Date.now(), + } + const messageUint8Array = new Uint8Array( + Buffer.from(JSON.stringify(data)), + ); + const signedMessage = nacl.sign(messageUint8Array, privateKey); + const signature = signedMessage.slice(0, nacl.sign.signatureLength); + const payload = { + data, + publicKey: bs58.encode(publicKey), + signature: bs58.encode(signature), + }; + + // Check payload + // console.log(payload); + + await axios + .post('http://localhost:10000/linktree', {payload}) + .then((e) => { + if (e.status != 200) { + console.log(e); + } + console.log(e.data); + }) + .catch((e) => { + console.error(e); + }); + } + catch (e) { + console.error(e); + } +} + +main(); + +module.exports = main; \ No newline at end of file diff --git a/test/test_wallet.json b/test/test_wallet.json new file mode 100644 index 00000000..bfc5eba2 --- /dev/null +++ b/test/test_wallet.json @@ -0,0 +1 @@ +[158,15,47,49,79,77,203,210,130,53,57,104,184,70,233,89,56,199,75,85,60,200,254,207,246,106,71,37,238,238,120,194,243,101,151,29,124,180,174,229,189,224,88,5,141,139,110,195,51,40,151,44,14,112,71,128,87,69,149,204,80,0,132,162] diff --git a/test/unitTest.js b/test/unitTest.js new file mode 100644 index 00000000..4db6affd --- /dev/null +++ b/test/unitTest.js @@ -0,0 +1,88 @@ +const coreLogic = require('../coreLogic'); +const index = require('../index'); + +async function test_coreLogic() { + // await coreLogic.task(); + + // const submission = await coreLogic.fetchSubmission(); + + // TEST hardcode the submission + // let submission= "bafybeiaipp6owksgigqx73putgxr7qfiuly32isubrqjc4meqwty6xu5xa" + // console.log('SUBMISSION', submission); + + // const vote = await coreLogic.validateNode(submission, 1000); + +let vote = true + console.log('VOTE', vote); + + + const _dummyTaskState = { + stake_list: { + '2NstaKU4kif7uytmS2PQi9P5M5bDLYSF2dhUNFhJbxHL': 20000000000, + '2NstaKU4kif7uytmS2PQi9P5M5bDLYSF2dhUNFhJbxHH': 10000000000, + }, + bounty_amount_per_round: 1000000000, + + submissions: { + 1: { + '2NstaKU4kif7uytmS2PQi9P5M5bDLYSF2dhUNFhJbxHL': { + submission_value: '8164bb07ee54172a184bf35f267bc3f0052a90cd', + slot: 1889700, + round: 1, + }, + '2NstaKU4kif7uytmS2PQi9P5M5bDLYSF2dhUNFhJbxHH': { + submission_value: '8164bb07ee54172a184bf35f267bc3f0052a90cc', + slot: 1890002, + round: 1, + }, + }, + }, + submissions_audit_trigger: { + 1: { + // round number + '2NstaKU4kif7uytmS2PQi9P5M5bDLYSF2dhUNFhJbxHL': { + // Data Submitter (send data to K2) + trigger_by: '2NstaKU4kif7uytmS2PQi9P5M5bDLYSF2dhUNFhJbxHH', // Audit trigger + slot: 1890002, + votes: [ + { + is_valid: false, // Submission is invalid(Slashed) + voter: '2NstaKU4kif7uytmS2PQi9P5M5bDLYSF2dhUNFhJbxHZ', // Voter + slot: 1890003, + }, + ], + + }, + '2NstaKU4kif7uytmS2PQi9P5M5bDLYSF2dhUNFhJbxHH': { + // Data Submitter (send data to K2) + trigger_by: '2NstaKU4kif7uytmS2PQi9P5M5bDLYSF2dhUNFhJbxHL', // Audit trigger + slot: 1890002, + votes: [ + { + is_valid: true, // Submission is valid + voter: '2NstaKU4kif7uytmS2PQi9P5M5bDLYSF2dhUNFhJbxHZ', // Voter + slot: 1890003, + + }] + } + } + }, + }; + if (vote == true) { + console.log('Submission is valid, generating distribution list'); + const distributionList = await coreLogic.generateDistributionList( + 1, + _dummyTaskState, + ); + await coreLogic.validateDistribution( + null, + 1, + distributionList, + _dummyTaskState, + ); + } else { + console.log('Submission is invalid, not generating distribution list'); + } +} + +test_coreLogic(); diff --git a/webpack.config.js b/webpack.config.js index 55585067..dc5f0a1e 100644 --- a/webpack.config.js +++ b/webpack.config.js @@ -10,5 +10,8 @@ module.exports={ }, stats:{ moduleTrace:false + }, + node:{ + __dirname: true } } \ No newline at end of file diff --git a/yarn.lock b/yarn.lock index b8e0e278..f13b103e 100644 --- a/yarn.lock +++ b/yarn.lock @@ -217,11 +217,26 @@ resolved "https://registry.npmjs.org/@noble/ed25519/-/ed25519-1.7.1.tgz" integrity sha512-Rk4SkJFaXZiznFyC/t77Q0NKS4FL7TLJJsVG2V2oiEq3kJVeTdxysEe/yRWSpnWMe808XRDJ+VFh5pt/FN5plw== +"@noble/ed25519@^1.7.0": + version "1.7.3" + resolved "https://registry.yarnpkg.com/@noble/ed25519/-/ed25519-1.7.3.tgz#57e1677bf6885354b466c38e2b620c62f45a7123" + integrity sha512-iR8GBkDt0Q3GyaVcIu7mSsVIqnFbkbRzGLWlvhwunacoLwt4J3swfKhfaM6rN6WY+TBGoYT1GtT1mIh2/jGbRQ== + +"@noble/hashes@^1.1.2": + version "1.3.0" + resolved "https://registry.yarnpkg.com/@noble/hashes/-/hashes-1.3.0.tgz#085fd70f6d7d9d109671090ccae1d3bec62554a1" + integrity sha512-ilHEACi9DwqJB0pw7kv+Apvh50jiiSyR/cQ3y4W7lOR5mhvn/50FLUfsnfJz0BDZtl/RR16kXvptiv6q1msYZg== + "@noble/secp256k1@^1.3.0": version "1.7.0" resolved "https://registry.npmjs.org/@noble/secp256k1/-/secp256k1-1.7.0.tgz" integrity sha512-kbacwGSsH/CTout0ZnZWxnW1B+jH/7r/WAAKLBtrRJ/+CUH7lgmQzl3GTrQua3SGKWNSDsS6lmjnDpIJ5Dxyaw== +"@noble/secp256k1@^1.6.3": + version "1.7.1" + resolved "https://registry.yarnpkg.com/@noble/secp256k1/-/secp256k1-1.7.1.tgz#b251c70f824ce3ca7f8dc3df08d58f005cc0507c" + integrity sha512-hOUk6AyBFmqVrv7k5WAw/LpszxVbj9gGN4JRkIX52fdFAj1UA61KXmZDvqVEm+pOyec3+fIeZB02LYa/pWOArw== + "@nodelib/fs.scandir@2.1.5": version "2.1.5" resolved "https://registry.npmjs.org/@nodelib/fs.scandir/-/fs.scandir-2.1.5.tgz" @@ -310,6 +325,28 @@ dependencies: buffer "~6.0.3" +"@solana/web3.js@^1.74.0": + version "1.74.0" + resolved "https://registry.yarnpkg.com/@solana/web3.js/-/web3.js-1.74.0.tgz#dbcbeabb830dd7cbbcf5e31404ca79c9785cbf2d" + integrity sha512-RKZyPqizPCxmpMGfpu4fuplNZEWCrhRBjjVstv5QnAJvgln1jgOfgui+rjl1ExnqDnWKg9uaZ5jtGROH/cwabg== + dependencies: + "@babel/runtime" "^7.12.5" + "@noble/ed25519" "^1.7.0" + "@noble/hashes" "^1.1.2" + "@noble/secp256k1" "^1.6.3" + "@solana/buffer-layout" "^4.0.0" + agentkeepalive "^4.2.1" + bigint-buffer "^1.1.5" + bn.js "^5.0.0" + borsh "^0.7.0" + bs58 "^4.0.1" + buffer "6.0.1" + fast-stable-stringify "^1.0.0" + jayson "^3.4.4" + node-fetch "^2.6.7" + rpc-websockets "^7.5.1" + superstruct "^0.14.2" + "@types/bn.js@^4.11.5": version "4.11.6" resolved "https://registry.npmjs.org/@types/bn.js/-/bn.js-4.11.6.tgz" @@ -703,6 +740,18 @@ abort-controller@^3.0.0: dependencies: event-target-shim "^5.0.0" +abstract-leveldown@^7.2.0: + version "7.2.0" + resolved "https://registry.yarnpkg.com/abstract-leveldown/-/abstract-leveldown-7.2.0.tgz#08d19d4e26fb5be426f7a57004851b39e1795a2e" + integrity sha512-DnhQwcFEaYsvYDnACLZhMmCWd3rkOeEvglpa4q5i/5Jlm3UIsWaxVzuXvDLFCSCWRO3yy2/+V/G7FusFgejnfQ== + dependencies: + buffer "^6.0.3" + catering "^2.0.0" + is-buffer "^2.0.5" + level-concat-iterator "^3.0.0" + level-supports "^2.0.1" + queue-microtask "^1.2.3" + accepts@~1.3.8: version "1.3.8" resolved "https://registry.npmjs.org/accepts/-/accepts-1.3.8.tgz" @@ -731,6 +780,15 @@ acorn@^8.5.0, acorn@^8.7.1: resolved "https://registry.npmjs.org/acorn/-/acorn-8.8.0.tgz" integrity sha512-QOxyigPVrpZ2GXT+PFyZTl6TtOFc5egxHIP9IlQ+RbupQuX4RkT/Bee4/kQuC02Xkzg84JcT7oLYtDIQxp+v7w== +agentkeepalive@^4.2.1: + version "4.3.0" + resolved "https://registry.yarnpkg.com/agentkeepalive/-/agentkeepalive-4.3.0.tgz#bb999ff07412653c1803b3ced35e50729830a255" + integrity sha512-7Epl1Blf4Sy37j4v9f9FjICCh4+KAQOyXgHEwlyBiAQLbhKdq/i2QQU3amQalS/wPhdPzDXPL5DMR5bkn+YeWg== + dependencies: + debug "^4.1.0" + depd "^2.0.0" + humanize-ms "^1.2.1" + ajv-keywords@^3.5.2: version "3.5.2" resolved "https://registry.npmjs.org/ajv-keywords/-/ajv-keywords-3.5.2.tgz" @@ -925,6 +983,20 @@ bcrypt-pbkdf@^1.0.0: dependencies: tweetnacl "^0.14.3" +bigint-buffer@^1.1.5: + version "1.1.5" + resolved "https://registry.yarnpkg.com/bigint-buffer/-/bigint-buffer-1.1.5.tgz#d038f31c8e4534c1f8d0015209bf34b4fa6dd442" + integrity sha512-trfYco6AoZ+rKhKnxA0hgX0HAbVP/s808/EuDSe2JDzUnCp/xAsli35Orvk67UrTEcwuxZqYZDmfA2RXJgxVvA== + dependencies: + bindings "^1.3.0" + +bindings@^1.3.0: + version "1.5.0" + resolved "https://registry.yarnpkg.com/bindings/-/bindings-1.5.0.tgz#10353c9e945334bc0511a6d90b38fbc7c9c504df" + integrity sha512-p2q/t/mhvuOj/UeLlV6566GD/guowlr0hHxClI0W9m7MWYkL1F0hLo+0Aexs9HSPCtR1SXQ0TD3MMKrXZajbiQ== + dependencies: + file-uri-to-path "1.0.0" + bl@^5.0.0: version "5.0.0" resolved "https://registry.npmjs.org/bl/-/bl-5.0.0.tgz" @@ -1002,6 +1074,15 @@ borsh@^0.6.0: bs58 "^4.0.0" text-encoding-utf-8 "^1.0.2" +borsh@^0.7.0: + version "0.7.0" + resolved "https://registry.yarnpkg.com/borsh/-/borsh-0.7.0.tgz#6e9560d719d86d90dc589bca60ffc8a6c51fec2a" + integrity sha512-CLCsZGIBCFnPtkNnieW/a8wmreDmfUtjU2m9yHrzPXIlNbqVs0AQrSatSG6vdNYUqdc83tkQi2eHfF98ubzQLA== + dependencies: + bn.js "^5.2.0" + bs58 "^4.0.0" + text-encoding-utf-8 "^1.0.2" + brace-expansion@^1.1.7: version "1.1.11" resolved "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.11.tgz" @@ -1124,6 +1205,11 @@ caseless@~0.12.0: resolved "https://registry.npmjs.org/caseless/-/caseless-0.12.0.tgz" integrity sha512-4tYFyifaFfGacoiObjJegolkwSU4xQNGbVgUiNYVUxbQ2x2lUsFvY4hVgVzGiIe6WLOPqycWXA40l+PWsxthUw== +catering@^2.0.0, catering@^2.1.0: + version "2.1.1" + resolved "https://registry.yarnpkg.com/catering/-/catering-2.1.1.tgz#66acba06ed5ee28d5286133982a927de9a04b510" + integrity sha512-K7Qy8O9p76sL3/3m7/zLKbRkyOlSZAgzEaLhyj2mXS8PsCud2Eo4hAb8aLtZqHh0QGqLcb9dlJSu6lHRVENm1w== + cborg@^1.3.3, cborg@^1.5.4, cborg@^1.6.0, cborg@^1.8.0, cborg@^1.9.4: version "1.9.5" resolved "https://registry.npmjs.org/cborg/-/cborg-1.9.5.tgz" @@ -1307,7 +1393,7 @@ debug@^3.2.7: dependencies: ms "^2.1.1" -debug@^4.0.1, debug@^4.1.1, debug@^4.2.0, debug@^4.3.1: +debug@^4.0.1, debug@^4.1.0, debug@^4.1.1, debug@^4.2.0, debug@^4.3.1: version "4.3.4" resolved "https://registry.npmjs.org/debug/-/debug-4.3.4.tgz" integrity sha512-PRWFHuSU3eDtQJPvnNY7Jcket1j0t5OuOsFzPPzsekD52Zl8qUfFIPEiswXqIvHWGVHOgX+7G/vCNNhehwxfkQ== @@ -1339,6 +1425,14 @@ defaults@^1.0.3: dependencies: clone "^1.0.2" +deferred-leveldown@^7.0.0: + version "7.0.0" + resolved "https://registry.yarnpkg.com/deferred-leveldown/-/deferred-leveldown-7.0.0.tgz#39802715fda6ec06d0159a8b28bd1c7e2b1cf0bf" + integrity sha512-QKN8NtuS3BC6m0B8vAnBls44tX1WXAFATUsJlruyAYbZpysWV3siH6o/i3g9DCHauzodksO60bdj5NazNbjCmg== + dependencies: + abstract-leveldown "^7.2.0" + inherits "^2.0.3" + define-properties@^1.1.3, define-properties@^1.1.4: version "1.1.4" resolved "https://registry.npmjs.org/define-properties/-/define-properties-1.1.4.tgz" @@ -1357,7 +1451,7 @@ delayed-stream@~1.0.0: resolved "https://registry.npmjs.org/delayed-stream/-/delayed-stream-1.0.0.tgz" integrity sha512-ZySD7Nf91aLB0RxL4KGrKHBXl7Eds1DAmEdcoVawXnLD7SDhpNgtuII2aAkg7a7QS41jxPSZ17p4VdGnMHk3MQ== -depd@2.0.0: +depd@2.0.0, depd@^2.0.0: version "2.0.0" resolved "https://registry.npmjs.org/depd/-/depd-2.0.0.tgz" integrity sha512-g7nH6P6dyDioJogAAGprGpCtVImJhpPk/roCzdb3fIh61/s/nPsfR6onyMwkCAR/OlC3yBC0lESvUoQEAssIrw== @@ -1893,6 +1987,11 @@ fast-levenshtein@^2.0.6: resolved "https://registry.npmjs.org/fast-levenshtein/-/fast-levenshtein-2.0.6.tgz" integrity sha512-DCXu6Ifhqcks7TZKY3Hxp3y6qphY5SJZmrWMDrKcERSOXWQdMhU9Ig/PYrzyw/ul9jOIyh0N4M0tbC5hodg8dw== +fast-stable-stringify@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/fast-stable-stringify/-/fast-stable-stringify-1.0.0.tgz#5c5543462b22aeeefd36d05b34e51c78cb86d313" + integrity sha512-wpYMUmFu5f00Sm0cj2pfivpmawLZ0NKdviQ4w9zJeR8JVtOpOxHmLaJuj0vxvGqMJQWyP/COUkF75/57OKyRag== + fastest-levenshtein@^1.0.12: version "1.0.16" resolved "https://registry.npmjs.org/fastest-levenshtein/-/fastest-levenshtein-1.0.16.tgz" @@ -1912,6 +2011,11 @@ file-entry-cache@^6.0.1: dependencies: flat-cache "^3.0.4" +file-uri-to-path@1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/file-uri-to-path/-/file-uri-to-path-1.0.0.tgz#553a7b8446ff6f684359c445f1e37a05dacc33dd" + integrity sha512-0Zt+s3L7Vf1biwWZ29aARiVYLx7iMGnEUl9x33fbB/j3jR81u/O2LbqK+Bm1CDSNDKVtJ/YjwY7TUd5SkeLQLw== + files-from-path@^0.2.4: version "0.2.6" resolved "https://registry.npmjs.org/files-from-path/-/files-from-path-0.2.6.tgz" @@ -2243,6 +2347,13 @@ http-signature@~1.2.0: jsprim "^1.2.2" sshpk "^1.7.0" +humanize-ms@^1.2.1: + version "1.2.1" + resolved "https://registry.yarnpkg.com/humanize-ms/-/humanize-ms-1.2.1.tgz#c46e3159a293f6b896da29316d8b6fe8bb79bbed" + integrity sha512-Fl70vYtsAFb/C06PTS9dZBo7ihau+Tu/DNCk/OyHhea07S+aeMWpFFkUaXRa8fI+ScZbEI8dfSxwY7gxZ9SAVQ== + dependencies: + ms "^2.0.0" + iconv-lite@0.4.24: version "0.4.24" resolved "https://registry.npmjs.org/iconv-lite/-/iconv-lite-0.4.24.tgz" @@ -2536,6 +2647,11 @@ is-boolean-object@^1.1.0: call-bind "^1.0.2" has-tostringtag "^1.0.0" +is-buffer@^2.0.5: + version "2.0.5" + resolved "https://registry.yarnpkg.com/is-buffer/-/is-buffer-2.0.5.tgz#ebc252e400d22ff8d77fa09888821a24a658c191" + integrity sha512-i2R6zNFDwgEHJyQUtJEk0XFi1i0dPFn/oqjK3/vPCcDeJvW5NQ83V8QbicfF1SupOaB0h8ntgBC2YiE7dfyctQ== + is-callable@^1.1.3, is-callable@^1.1.4, is-callable@^1.2.6: version "1.2.7" resolved "https://registry.npmjs.org/is-callable/-/is-callable-1.2.7.tgz" @@ -2931,6 +3047,52 @@ kleur@^3.0.3: resolved "https://registry.npmjs.org/kleur/-/kleur-3.0.3.tgz" integrity sha512-eTIzlVOSUR+JxdDFepEYcBMtZ9Qqdef+rnzWdRZuMbOywu5tO2w2N7rqjoANZ5k9vywhL6Br1VRjUIgTQx4E8w== +level-concat-iterator@^3.0.0: + version "3.1.0" + resolved "https://registry.yarnpkg.com/level-concat-iterator/-/level-concat-iterator-3.1.0.tgz#5235b1f744bc34847ed65a50548aa88d22e881cf" + integrity sha512-BWRCMHBxbIqPxJ8vHOvKUsaO0v1sLYZtjN3K2iZJsRBYtp+ONsY6Jfi6hy9K3+zolgQRryhIn2NRZjZnWJ9NmQ== + dependencies: + catering "^2.1.0" + +level-errors@^3.0.1: + version "3.0.1" + resolved "https://registry.yarnpkg.com/level-errors/-/level-errors-3.0.1.tgz#4bed48a33108cd83b0e39fdf9bbd84e96fbbef9f" + integrity sha512-tqTL2DxzPDzpwl0iV5+rBCv65HWbHp6eutluHNcVIftKZlQN//b6GEnZDM2CvGZvzGYMwyPtYppYnydBQd2SMQ== + +level-iterator-stream@^5.0.0: + version "5.0.0" + resolved "https://registry.yarnpkg.com/level-iterator-stream/-/level-iterator-stream-5.0.0.tgz#85b3438e1b4c54ce5aa8c0eb973cfb628117df9e" + integrity sha512-wnb1+o+CVFUDdiSMR/ZymE2prPs3cjVLlXuDeSq9Zb8o032XrabGEXcTCsBxprAtseO3qvFeGzh6406z9sOTRA== + dependencies: + inherits "^2.0.4" + readable-stream "^3.4.0" + +level-supports@^2.0.1: + version "2.1.0" + resolved "https://registry.yarnpkg.com/level-supports/-/level-supports-2.1.0.tgz#9af908d853597ecd592293b2fad124375be79c5f" + integrity sha512-E486g1NCjW5cF78KGPrMDRBYzPuueMZ6VBXHT6gC7A8UYWGiM14fGgp+s/L1oFfDWSPV/+SFkYCmZ0SiESkRKA== + +leveldown@^6.1.1: + version "6.1.1" + resolved "https://registry.yarnpkg.com/leveldown/-/leveldown-6.1.1.tgz#0f0e480fa88fd807abf94c33cb7e40966ea4b5ce" + integrity sha512-88c+E+Eizn4CkQOBHwqlCJaTNEjGpaEIikn1S+cINc5E9HEvJ77bqY4JY/HxT5u0caWqsc3P3DcFIKBI1vHt+A== + dependencies: + abstract-leveldown "^7.2.0" + napi-macros "~2.0.0" + node-gyp-build "^4.3.0" + +levelup@^5.1.1: + version "5.1.1" + resolved "https://registry.yarnpkg.com/levelup/-/levelup-5.1.1.tgz#9f99699f414ac084a3f8a28fc262a1f49cd7a52c" + integrity sha512-0mFCcHcEebOwsQuk00WJwjLI6oCjbBuEYdh/RaRqhjnyVlzqf41T1NnDtCedumZ56qyIh8euLFDqV1KfzTAVhg== + dependencies: + catering "^2.0.0" + deferred-leveldown "^7.0.0" + level-errors "^3.0.1" + level-iterator-stream "^5.0.0" + level-supports "^2.0.1" + queue-microtask "^1.2.3" + levn@^0.4.1: version "0.4.1" resolved "https://registry.npmjs.org/levn/-/levn-0.4.1.tgz" @@ -3179,7 +3341,7 @@ ms@2.1.2: resolved "https://registry.npmjs.org/ms/-/ms-2.1.2.tgz" integrity sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w== -ms@2.1.3, ms@^2.1.1: +ms@2.1.3, ms@^2.0.0, ms@^2.1.1: version "2.1.3" resolved "https://registry.npmjs.org/ms/-/ms-2.1.3.tgz" integrity sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA== @@ -3227,6 +3389,11 @@ nanoid@^3.0.2, nanoid@^3.1.20, nanoid@^3.1.23: resolved "https://registry.npmjs.org/nanoid/-/nanoid-3.3.4.tgz" integrity sha512-MqBkQh/OHTS2egovRtLk45wEyNXwF+cokD+1YPf9u5VfJiRdAiRwB2froX5Co9Rh20xs4siNPm8naNotSD6RBw== +napi-macros@~2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/napi-macros/-/napi-macros-2.0.0.tgz#2b6bae421e7b96eb687aa6c77a7858640670001b" + integrity sha512-A0xLykHtARfueITVDernsAWdtIMbOJgKgcluwENp3AlsKN/PloyO10HtmoqnFAQAcxPkgZN7wdfPfEd0zNGxbg== + native-abort-controller@^1.0.3: version "1.0.4" resolved "https://registry.npmjs.org/native-abort-controller/-/native-abort-controller-1.0.4.tgz" @@ -3269,6 +3436,13 @@ node-fetch@2.6.7, node-fetch@^2.6.1, "node-fetch@https://registry.npmjs.org/@ach resolved "https://registry.npmjs.org/@achingbrain/node-fetch/-/node-fetch-2.6.7.tgz" integrity sha512-iTASGs+HTFK5E4ZqcMsHmeJ4zodyq8L38lZV33jwqcBJYoUt3HjN4+ot+O9/0b+ke8ddE7UgOtVuZN/OkV19/g== +node-fetch@^2.6.7: + version "2.6.9" + resolved "https://registry.yarnpkg.com/node-fetch/-/node-fetch-2.6.9.tgz#7c7f744b5cc6eb5fd404e0c7a9fec630a55657e6" + integrity sha512-DJm/CJkZkRjKKj4Zi4BsKVZh3ValV5IR5s7LVZnW+6YMh0W1BfNA8XSs6DLMGYlId5F3KnA70uu2qepcR08Qqg== + dependencies: + whatwg-url "^5.0.0" + node-forge@^1.2.1: version "1.3.1" resolved "https://registry.npmjs.org/node-forge/-/node-forge-1.3.1.tgz" @@ -3656,7 +3830,7 @@ qs@~6.5.2: resolved "https://registry.npmjs.org/qs/-/qs-6.5.3.tgz" integrity sha512-qxXIEh4pCGfHICj1mAJQ2/2XVZkjCDTcEgfoSQxc/fYivUZxTkk7L3bDBJSoNrEzXI17oUO5Dp07ktqE5KzczA== -queue-microtask@^1.2.2: +queue-microtask@^1.2.2, queue-microtask@^1.2.3: version "1.2.3" resolved "https://registry.npmjs.org/queue-microtask/-/queue-microtask-1.2.3.tgz" integrity sha512-NuaNSa6flKT5JaSYQzJok04JzTL1CA6aGhv5rfLW3PgqA+M2ChpZQnAC8h8i4ZFkBS8X5RqkDBHA7r4hej3K9A== @@ -3890,6 +4064,19 @@ rpc-websockets@^7.4.2: bufferutil "^4.0.1" utf-8-validate "^5.0.2" +rpc-websockets@^7.5.1: + version "7.5.1" + resolved "https://registry.yarnpkg.com/rpc-websockets/-/rpc-websockets-7.5.1.tgz#e0a05d525a97e7efc31a0617f093a13a2e10c401" + integrity sha512-kGFkeTsmd37pHPMaHIgN1LVKXMi0JD782v4Ds9ZKtLlwdTKjn+CxM9A9/gLT2LaOuEcEFGL98h1QWQtlOIdW0w== + dependencies: + "@babel/runtime" "^7.17.2" + eventemitter3 "^4.0.7" + uuid "^8.3.2" + ws "^8.5.0" + optionalDependencies: + bufferutil "^4.0.1" + utf-8-validate "^5.0.2" + run-parallel@^1.1.9: version "1.2.0" resolved "https://registry.npmjs.org/run-parallel/-/run-parallel-1.2.0.tgz" @@ -4354,6 +4541,11 @@ tough-cookie@~2.5.0: psl "^1.1.28" punycode "^2.1.1" +tr46@~0.0.3: + version "0.0.3" + resolved "https://registry.yarnpkg.com/tr46/-/tr46-0.0.3.tgz#8184fd347dac9cdc185992f3a6622e14b9d9ab6a" + integrity sha512-N3WMsuqV66lT30CrXNbEjx4GEwlow3v6rr4mCcv6prnfwhS01rkgyFdjPNBYd9br7LpXV1+Emh01fHnq2Gdgrw== + trim-newlines@^3.0.0: version "3.0.1" resolved "https://registry.npmjs.org/trim-newlines/-/trim-newlines-3.0.1.tgz" @@ -4629,6 +4821,11 @@ web3.storage@^4.4.0: uint8arrays "^3.0.0" w3name "^1.0.4" +webidl-conversions@^3.0.0: + version "3.0.1" + resolved "https://registry.yarnpkg.com/webidl-conversions/-/webidl-conversions-3.0.1.tgz#24534275e2a7bc6be7bc86611cc16ae0a5654871" + integrity sha512-2JAn3z8AR6rjK8Sm8orRC0h/bcl/DqL7tRPdGZ4I1CjdF+EaMLmYxBHyXuKL849eucPFhvBoxMsflfOb8kxaeQ== + webpack-cli@^4.5.0: version "4.10.0" resolved "https://registry.npmjs.org/webpack-cli/-/webpack-cli-4.10.0.tgz" @@ -4690,6 +4887,14 @@ webpack@^5.28.0: watchpack "^2.4.0" webpack-sources "^3.2.3" +whatwg-url@^5.0.0: + version "5.0.0" + resolved "https://registry.yarnpkg.com/whatwg-url/-/whatwg-url-5.0.0.tgz#966454e8765462e37644d3626f6742ce8b70965d" + integrity sha512-saE57nupxk6v3HY35+jzBwYa0rKSy0XR8JSxZPwgLr7ys0IBzhGviA1/TUGJLmSVqs8pb9AnvICXEuOHLprYTw== + dependencies: + tr46 "~0.0.3" + webidl-conversions "^3.0.0" + which-boxed-primitive@^1.0.2: version "1.0.2" resolved "https://registry.npmjs.org/which-boxed-primitive/-/which-boxed-primitive-1.0.2.tgz"