Skip to content

Commit

Permalink
Merge remote-tracking branch 'origin/master' into tomasz-generic-aggr…
Browse files Browse the repository at this point in the history
…egate-message-origin
  • Loading branch information
tmpolaczyk committed Nov 21, 2024
2 parents 54ec73d + fa4be1c commit d0b70c3
Show file tree
Hide file tree
Showing 4 changed files with 286 additions and 0 deletions.
3 changes: 3 additions & 0 deletions pnpm-lock.yaml

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

1 change: 1 addition & 0 deletions test/package.json
Original file line number Diff line number Diff line change
Expand Up @@ -35,6 +35,7 @@
"@polkadot/types-codec": "14.3.1",
"@polkadot/util": "13.2.3",
"@polkadot/util-crypto": "13.2.3",
"@polkadot/wasm-crypto": "^7.4.1",
"@tanssi/api-augment": "workspace:*",
"@types/debug": "4.1.12",
"@types/node": "22.9.0",
Expand Down
83 changes: 83 additions & 0 deletions test/suites/smoke-test-common-all/test-data-preservers.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,83 @@
import "@tanssi/api-augment";
import { describeSuite, expect, beforeAll } from "@moonwall/cli";
import { ApiPromise } from "@polkadot/api";

describeSuite({
id: "S16",
title: "Verify data preservers consistency",
foundationMethods: "read_only",
testCases: ({ context, it }) => {
let paraApi: ApiPromise;

beforeAll(async function () {
paraApi = context.polkadotJs("para");
});

it({
id: "C01",
title: "all profiles should have a deposit of either 0 or value fixed in the runtime",
test: async function () {
// Add more if we change ProfileDeposit value. Keep previous values for profiles
// created before the change.
const validDeposits = [0, 11330000000000];

const entries = await paraApi.query.dataPreservers.profiles.entries();

for (const [, entry] of entries) {
expect(validDeposits.includes(entry.deposit));
}
},
});

it({
id: "C02",
title: "all assigned profile have assignement witness corresponding to request and whished para id",
test: async function () {
const entries = await paraApi.query.dataPreservers.profiles.entries();

for (const [, entry] of entries) {
if (entry.assignment == null) {
continue;
}

const [para_id, witness] = entry.assignment;

if (entry.profile.paraIds.whitelist != null) {
expect(entry.profile.paraIds.whitelist.includes(para_id));
} else if (entry.profile.paraIds.blacklist != null) {
expect(!entry.profile.paraIds.blacklist.includes(para_id));
}

if (entry.profile.assignmentRequest == "Free") {
expect(witness).to.be.eq("Free");
} else if (entry.profile.assignmentRequest.streamPayment != null) {
expect(witness.streamPayment).to.not.be.undefined();
} else {
// Make test fail on unknown assignment modes.
// This force use to update this test when we add new modes.
expect.fail("unknown assignment mode");
}
}
},
});

it({
id: "C03",
title: "all profiles should have valid url",
test: async function () {
const entries = await paraApi.query.dataPreservers.profiles.entries();

for (const [, entry] of entries) {
const profile = entry.unwrap().profile;
expect(isValidEndpointUrl(profile.url.toHuman()), `Invalid URL {profile.url}`);
}
},
});
},
});

function isValidEndpointUrl(string) {
const prefixes = ["/dns4/", "https://", "http://", "wss://", "ws://"];

return prefixes.some((prefix) => string.startsWith(prefix));
}
199 changes: 199 additions & 0 deletions test/suites/smoke-test-dancelight/test-babe.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,199 @@
import { beforeAll, describeSuite, expect } from "@moonwall/cli";
import { getBlockArray } from "@moonwall/util";
import { ApiPromise } from "@polkadot/api";
import { GenericExtrinsic } from "@polkadot/types";
import { FrameSystemEventRecord } from "@polkadot/types/lookup";
import { AnyTuple } from "@polkadot/types/types";
import { hexToU8a, stringToHex } from "@polkadot/util";
import { sr25519Verify } from "@polkadot/wasm-crypto";
import Bottleneck from "bottleneck";

const timePeriod = process.env.TIME_PERIOD ? Number(process.env.TIME_PERIOD) : 1 * 60 * 60 * 1000;
const timeout = Math.max(Math.floor(timePeriod / 12), 5000);
const hours = (timePeriod / (1000 * 60 * 60)).toFixed(2);

type BlockFilteredRecord = {
blockNum: number;
blockHash;
header;
preHash;
extrinsics: GenericExtrinsic<AnyTuple>[];
events: FrameSystemEventRecord[];
logs;
authorities;
accountKeys;
};

describeSuite({
id: "S20",
title: "Sample suite that only runs on Dancelight chains",
foundationMethods: "read_only",
testCases: ({ it, context, log }) => {
let api: ApiPromise;
let blockData: BlockFilteredRecord[];

beforeAll(async () => {
api = context.polkadotJs();

const blockNumArray = await getBlockArray(api, timePeriod);
log(`Collecting ${hours} hours worth of authors`);

const getBlockData = async (blockNum: number) => {
const blockHash = await api.rpc.chain.getBlockHash(blockNum);
const signedBlock = await api.rpc.chain.getBlock(blockHash);
const header = signedBlock.block.header;
const apiAt = await api.at(blockHash);
const preHash = getPreHash(api, header);

// Get the session keys from all the authorities because we would need to parse the logs here to know
// which one is the expected author.
const authorities = await apiAt.query.session.validators();
const accountKeys = new Map(
await Promise.all(
authorities.map(async (validator) => {
const nextKeys = await apiAt.query.session.nextKeys(validator);
return [validator.toJSON(), nextKeys.toJSON()];
})
)
);

return {
blockNum: blockNum,
preHash,
extrinsics: signedBlock.block.extrinsics,
events: await apiAt.query.system.events(),
logs: signedBlock.block.header.digest.logs,
authorities,
accountKeys,
};
};
const limiter = new Bottleneck({ maxConcurrent: 5, minTime: 100 });
blockData = await Promise.all(blockNumArray.map((num) => limiter.schedule(() => getBlockData(num))));
}, timeout);

it({
id: "C01",
title: "BABE keys are set and validators from logs match validators from pallet",
test: async function () {
const blockToCheck = (await api.query.babe.epochStart()).toJSON()[1];

const apiAtSessionChange = await api.at(await api.rpc.chain.getBlockHash(blockToCheck));

const digestsInSessionChange = (await apiAtSessionChange.query.system.digest()).logs;
const filteredDigests = digestsInSessionChange.filter(
(log) => log.isConsensus === true && log.asConsensus[0].toHex() == stringToHex("BABE")
);
expect(filteredDigests.length).to.eq(1);

// 0x01 corresponds to ConsensusLog::NextEpochData enum variant.
expect(filteredDigests[0].asConsensus[1].toHex().startsWith("0x01")).to.be.true;

// Assert that authorities from log == authorities from pallet
const babeAuthoritiesFromPallet = await api.query.babe.authorities();
const babeConsensusLog = api.registry.createType(
"(u8, Vec<(SpConsensusBabeAppPublic, u64)>, [u8; 32])",
filteredDigests[0].asConsensus[1].toHex()
);

expect(babeConsensusLog[1]).to.deep.equal(babeAuthoritiesFromPallet);

// Get babe keys from pallet session
const sessionValidators = await api.query.session.validators();

const babeKeysInPalletSession = [];

for (const account of sessionValidators) {
const accountKeys = await api.query.session.nextKeys(account);
expect(accountKeys.isSome, `Missing babe key for validator ${account.toJSON()}`).toBeTruthy();
babeKeysInPalletSession.push(accountKeys.unwrap().babe.toHex());
}

// Assert that all validators have babe keys
const babeAuthoritiesSorted = babeAuthoritiesFromPallet.map((x) => x[0].toHex());
babeAuthoritiesSorted.sort();
babeKeysInPalletSession.sort();
expect(babeKeysInPalletSession).to.deep.equal(babeAuthoritiesSorted);
},
});

it({
id: "C02",
title: "BABE author signature valid",
test: async function () {
const failures = blockData
.map(({ blockNum, preHash, logs, authorities, accountKeys }) => {
const babeLogs = logs.filter(
(log) => log.isPreRuntime === true && log.asPreRuntime[0].toHex() == stringToHex("BABE")
);
expect(babeLogs.length).to.eq(1);

const babeLogEnum = api.registry.createType(
"SpConsensusBabeDigestsPreDigest",
babeLogs[0].asPreRuntime[1].toHex()
);

expect(babeLogEnum.isSecondaryVRF || babeLogEnum.isPrimary).toBeTruthy();
const babeLog = babeLogEnum.isSecondaryVRF ? babeLogEnum.asSecondaryVRF : babeLogEnum.asPrimary;

// Get expected author from BABE log and on chain authorities
const authorityIndex = babeLog.authorityIndex;
const orchestratorAuthorities = authorities.toJSON();
const expectedAuthor = orchestratorAuthorities[authorityIndex.toNumber()];

// Get block author signature from seal log
const sealLogs = logs.filter(
(log) => log.isSeal === true && log.asSeal[0].toHex() == stringToHex("BABE")
);

expect(sealLogs.length).to.eq(1);
const sealLog = api.registry.createType(
"PolkadotPrimitivesV7ValidatorAppSignature",
sealLogs[0].asSeal[1].toHex()
);

// Verify seal signature
const message = hexToU8a(preHash);
const signature = hexToU8a(sealLog.toHex());
const authorKeys = accountKeys.get(expectedAuthor);
expect(
authorKeys && authorKeys.babe,
`Missing babe key for block author: ${expectedAuthor}`
).toBeTruthy();
const pubKey = hexToU8a(authorKeys.babe);

const authorValid = sr25519Verify(signature, message, pubKey);

return { blockNum, expectedAuthor, authorValid };
})
.filter(({ authorValid }) => authorValid == false);

failures.forEach(({ blockNum, expectedAuthor }) => {
log(
`Author at block #${blockNum} should have been #${expectedAuthor.toString()}, but seal signature does not match`
);
});

expect(
failures.length,
`Please investigate blocks ${failures.map((a) => a.blockNum).join(`, `)}; authors `
).to.equal(0);
},
});
},
});

// Given a block header, returns its preHash. This is the hash of the header before adding the seal.
// The hash of the block header after adding the seal is the block hash.
function getPreHash(api, header) {
const logsNoSeal = header.digest.logs.filter((log) => !log.isSeal);
const headerWithoutSeal = api.registry.createType("Header", {
parentHash: header.parentHash,
number: header.number,
stateRoot: header.stateRoot,
extrinsicsRoot: header.extrinsicsRoot,
digest: {
logs: logsNoSeal,
},
});
return headerWithoutSeal.hash.toHex();
}

0 comments on commit d0b70c3

Please sign in to comment.