From 27608885f39bcfc7e8efe69108f39a3353e10690 Mon Sep 17 00:00:00 2001 From: tmpolaczyk <44604217+tmpolaczyk@users.noreply.github.com> Date: Wed, 20 Nov 2024 16:50:30 +0100 Subject: [PATCH 1/2] Add smoke tests for BABE (#749) --- pnpm-lock.yaml | 3 + test/package.json | 1 + .../suites/smoke-test-dancelight/test-babe.ts | 199 ++++++++++++++++++ 3 files changed, 203 insertions(+) create mode 100644 test/suites/smoke-test-dancelight/test-babe.ts diff --git a/pnpm-lock.yaml b/pnpm-lock.yaml index fcd081fa3..8a000259e 100644 --- a/pnpm-lock.yaml +++ b/pnpm-lock.yaml @@ -51,6 +51,9 @@ importers: '@polkadot/util-crypto': specifier: 13.2.3 version: 13.2.3(@polkadot/util@13.2.3) + '@polkadot/wasm-crypto': + specifier: ^7.4.1 + version: 7.4.1(@polkadot/util@13.2.3)(@polkadot/x-randomvalues@13.2.3(@polkadot/util@13.2.3)(@polkadot/wasm-util@7.4.1(@polkadot/util@13.2.3))) '@tanssi/api-augment': specifier: workspace:* version: link:../typescript-api diff --git a/test/package.json b/test/package.json index a3b57ed31..5cbb186b6 100644 --- a/test/package.json +++ b/test/package.json @@ -35,6 +35,7 @@ "@polkadot/types-codec": "14.3.1", "@polkadot/util": "13.2.3", "@polkadot/util-crypto": "13.2.3", + "@polkadot/wasm-crypto": "^7.4.1", "@tanssi/api-augment": "workspace:*", "@types/debug": "4.1.12", "@types/node": "22.9.0", diff --git a/test/suites/smoke-test-dancelight/test-babe.ts b/test/suites/smoke-test-dancelight/test-babe.ts new file mode 100644 index 000000000..4e52fba19 --- /dev/null +++ b/test/suites/smoke-test-dancelight/test-babe.ts @@ -0,0 +1,199 @@ +import { beforeAll, describeSuite, expect } from "@moonwall/cli"; +import { getBlockArray } from "@moonwall/util"; +import { ApiPromise } from "@polkadot/api"; +import { GenericExtrinsic } from "@polkadot/types"; +import { FrameSystemEventRecord } from "@polkadot/types/lookup"; +import { AnyTuple } from "@polkadot/types/types"; +import { hexToU8a, stringToHex } from "@polkadot/util"; +import { sr25519Verify } from "@polkadot/wasm-crypto"; +import Bottleneck from "bottleneck"; + +const timePeriod = process.env.TIME_PERIOD ? Number(process.env.TIME_PERIOD) : 1 * 60 * 60 * 1000; +const timeout = Math.max(Math.floor(timePeriod / 12), 5000); +const hours = (timePeriod / (1000 * 60 * 60)).toFixed(2); + +type BlockFilteredRecord = { + blockNum: number; + blockHash; + header; + preHash; + extrinsics: GenericExtrinsic[]; + events: FrameSystemEventRecord[]; + logs; + authorities; + accountKeys; +}; + +describeSuite({ + id: "S20", + title: "Sample suite that only runs on Dancelight chains", + foundationMethods: "read_only", + testCases: ({ it, context, log }) => { + let api: ApiPromise; + let blockData: BlockFilteredRecord[]; + + beforeAll(async () => { + api = context.polkadotJs(); + + const blockNumArray = await getBlockArray(api, timePeriod); + log(`Collecting ${hours} hours worth of authors`); + + const getBlockData = async (blockNum: number) => { + const blockHash = await api.rpc.chain.getBlockHash(blockNum); + const signedBlock = await api.rpc.chain.getBlock(blockHash); + const header = signedBlock.block.header; + const apiAt = await api.at(blockHash); + const preHash = getPreHash(api, header); + + // Get the session keys from all the authorities because we would need to parse the logs here to know + // which one is the expected author. + const authorities = await apiAt.query.session.validators(); + const accountKeys = new Map( + await Promise.all( + authorities.map(async (validator) => { + const nextKeys = await apiAt.query.session.nextKeys(validator); + return [validator.toJSON(), nextKeys.toJSON()]; + }) + ) + ); + + return { + blockNum: blockNum, + preHash, + extrinsics: signedBlock.block.extrinsics, + events: await apiAt.query.system.events(), + logs: signedBlock.block.header.digest.logs, + authorities, + accountKeys, + }; + }; + const limiter = new Bottleneck({ maxConcurrent: 5, minTime: 100 }); + blockData = await Promise.all(blockNumArray.map((num) => limiter.schedule(() => getBlockData(num)))); + }, timeout); + + it({ + id: "C01", + title: "BABE keys are set and validators from logs match validators from pallet", + test: async function () { + const blockToCheck = (await api.query.babe.epochStart()).toJSON()[1]; + + const apiAtSessionChange = await api.at(await api.rpc.chain.getBlockHash(blockToCheck)); + + const digestsInSessionChange = (await apiAtSessionChange.query.system.digest()).logs; + const filteredDigests = digestsInSessionChange.filter( + (log) => log.isConsensus === true && log.asConsensus[0].toHex() == stringToHex("BABE") + ); + expect(filteredDigests.length).to.eq(1); + + // 0x01 corresponds to ConsensusLog::NextEpochData enum variant. + expect(filteredDigests[0].asConsensus[1].toHex().startsWith("0x01")).to.be.true; + + // Assert that authorities from log == authorities from pallet + const babeAuthoritiesFromPallet = await api.query.babe.authorities(); + const babeConsensusLog = api.registry.createType( + "(u8, Vec<(SpConsensusBabeAppPublic, u64)>, [u8; 32])", + filteredDigests[0].asConsensus[1].toHex() + ); + + expect(babeConsensusLog[1]).to.deep.equal(babeAuthoritiesFromPallet); + + // Get babe keys from pallet session + const sessionValidators = await api.query.session.validators(); + + const babeKeysInPalletSession = []; + + for (const account of sessionValidators) { + const accountKeys = await api.query.session.nextKeys(account); + expect(accountKeys.isSome, `Missing babe key for validator ${account.toJSON()}`).toBeTruthy(); + babeKeysInPalletSession.push(accountKeys.unwrap().babe.toHex()); + } + + // Assert that all validators have babe keys + const babeAuthoritiesSorted = babeAuthoritiesFromPallet.map((x) => x[0].toHex()); + babeAuthoritiesSorted.sort(); + babeKeysInPalletSession.sort(); + expect(babeKeysInPalletSession).to.deep.equal(babeAuthoritiesSorted); + }, + }); + + it({ + id: "C02", + title: "BABE author signature valid", + test: async function () { + const failures = blockData + .map(({ blockNum, preHash, logs, authorities, accountKeys }) => { + const babeLogs = logs.filter( + (log) => log.isPreRuntime === true && log.asPreRuntime[0].toHex() == stringToHex("BABE") + ); + expect(babeLogs.length).to.eq(1); + + const babeLogEnum = api.registry.createType( + "SpConsensusBabeDigestsPreDigest", + babeLogs[0].asPreRuntime[1].toHex() + ); + + expect(babeLogEnum.isSecondaryVRF || babeLogEnum.isPrimary).toBeTruthy(); + const babeLog = babeLogEnum.isSecondaryVRF ? babeLogEnum.asSecondaryVRF : babeLogEnum.asPrimary; + + // Get expected author from BABE log and on chain authorities + const authorityIndex = babeLog.authorityIndex; + const orchestratorAuthorities = authorities.toJSON(); + const expectedAuthor = orchestratorAuthorities[authorityIndex.toNumber()]; + + // Get block author signature from seal log + const sealLogs = logs.filter( + (log) => log.isSeal === true && log.asSeal[0].toHex() == stringToHex("BABE") + ); + + expect(sealLogs.length).to.eq(1); + const sealLog = api.registry.createType( + "PolkadotPrimitivesV7ValidatorAppSignature", + sealLogs[0].asSeal[1].toHex() + ); + + // Verify seal signature + const message = hexToU8a(preHash); + const signature = hexToU8a(sealLog.toHex()); + const authorKeys = accountKeys.get(expectedAuthor); + expect( + authorKeys && authorKeys.babe, + `Missing babe key for block author: ${expectedAuthor}` + ).toBeTruthy(); + const pubKey = hexToU8a(authorKeys.babe); + + const authorValid = sr25519Verify(signature, message, pubKey); + + return { blockNum, expectedAuthor, authorValid }; + }) + .filter(({ authorValid }) => authorValid == false); + + failures.forEach(({ blockNum, expectedAuthor }) => { + log( + `Author at block #${blockNum} should have been #${expectedAuthor.toString()}, but seal signature does not match` + ); + }); + + expect( + failures.length, + `Please investigate blocks ${failures.map((a) => a.blockNum).join(`, `)}; authors ` + ).to.equal(0); + }, + }); + }, +}); + +// Given a block header, returns its preHash. This is the hash of the header before adding the seal. +// The hash of the block header after adding the seal is the block hash. +function getPreHash(api, header) { + const logsNoSeal = header.digest.logs.filter((log) => !log.isSeal); + const headerWithoutSeal = api.registry.createType("Header", { + parentHash: header.parentHash, + number: header.number, + stateRoot: header.stateRoot, + extrinsicsRoot: header.extrinsicsRoot, + digest: { + logs: logsNoSeal, + }, + }); + return headerWithoutSeal.hash.toHex(); +} From fa4be1c2c9910d1b930c69d4dc2ec6b218305e26 Mon Sep 17 00:00:00 2001 From: nanocryk <6422796+nanocryk@users.noreply.github.com> Date: Thu, 21 Nov 2024 11:05:09 +0100 Subject: [PATCH 2/2] Smoke tests for Data Preservers pallet (#751) * test data preservers deposit * matching request and witness * lint * smoke test for all chains + fail on unknown payment mode * test profile urls have expected prefix --- .../test-data-preservers.ts | 83 +++++++++++++++++++ 1 file changed, 83 insertions(+) create mode 100644 test/suites/smoke-test-common-all/test-data-preservers.ts diff --git a/test/suites/smoke-test-common-all/test-data-preservers.ts b/test/suites/smoke-test-common-all/test-data-preservers.ts new file mode 100644 index 000000000..f561f91c0 --- /dev/null +++ b/test/suites/smoke-test-common-all/test-data-preservers.ts @@ -0,0 +1,83 @@ +import "@tanssi/api-augment"; +import { describeSuite, expect, beforeAll } from "@moonwall/cli"; +import { ApiPromise } from "@polkadot/api"; + +describeSuite({ + id: "S16", + title: "Verify data preservers consistency", + foundationMethods: "read_only", + testCases: ({ context, it }) => { + let paraApi: ApiPromise; + + beforeAll(async function () { + paraApi = context.polkadotJs("para"); + }); + + it({ + id: "C01", + title: "all profiles should have a deposit of either 0 or value fixed in the runtime", + test: async function () { + // Add more if we change ProfileDeposit value. Keep previous values for profiles + // created before the change. + const validDeposits = [0, 11330000000000]; + + const entries = await paraApi.query.dataPreservers.profiles.entries(); + + for (const [, entry] of entries) { + expect(validDeposits.includes(entry.deposit)); + } + }, + }); + + it({ + id: "C02", + title: "all assigned profile have assignement witness corresponding to request and whished para id", + test: async function () { + const entries = await paraApi.query.dataPreservers.profiles.entries(); + + for (const [, entry] of entries) { + if (entry.assignment == null) { + continue; + } + + const [para_id, witness] = entry.assignment; + + if (entry.profile.paraIds.whitelist != null) { + expect(entry.profile.paraIds.whitelist.includes(para_id)); + } else if (entry.profile.paraIds.blacklist != null) { + expect(!entry.profile.paraIds.blacklist.includes(para_id)); + } + + if (entry.profile.assignmentRequest == "Free") { + expect(witness).to.be.eq("Free"); + } else if (entry.profile.assignmentRequest.streamPayment != null) { + expect(witness.streamPayment).to.not.be.undefined(); + } else { + // Make test fail on unknown assignment modes. + // This force use to update this test when we add new modes. + expect.fail("unknown assignment mode"); + } + } + }, + }); + + it({ + id: "C03", + title: "all profiles should have valid url", + test: async function () { + const entries = await paraApi.query.dataPreservers.profiles.entries(); + + for (const [, entry] of entries) { + const profile = entry.unwrap().profile; + expect(isValidEndpointUrl(profile.url.toHuman()), `Invalid URL {profile.url}`); + } + }, + }); + }, +}); + +function isValidEndpointUrl(string) { + const prefixes = ["/dns4/", "https://", "http://", "wss://", "ws://"]; + + return prefixes.some((prefix) => string.startsWith(prefix)); +}