From 71ebe36a8b393bda3fe954391118b682343739cb Mon Sep 17 00:00:00 2001 From: Brian Warner Date: Thu, 5 Sep 2024 21:19:03 -0400 Subject: [PATCH] chore(swingset): add misc analysis tools This is a grab-bag of tools I've assembled over the last few years, to analyze slogfiles for various things (mostly timing and leaks). Some in JS, some in Python. I wanted to get them off my local hard drive and into the tree where other folks could benefit from them. * monitor-slog-block-time.py : I use this on a follower node to print a one-line summary of each block, from the slogfile * prune-transcripts.js : I run this on a copy of the swingstore DB to delete all the old transcript spans, since most of my analysis doesn't need them, and it reduces the size by about 90%. * follower-run-tools/ : I use these to measure size data about the original and pruned DB. These tools are not TSC or eslint -clean: they use @ ts-nocheck and an .eslintignore to suppress complaints. --- .eslintignore | 1 + packages/SwingSet/.eslintignore | 1 + .../all-delivery-time-computrons.py | 34 + .../SwingSet/misc-tools/audit-refgraph.py | 115 ++++ packages/SwingSet/misc-tools/block-times.py | 54 ++ .../misc-tools/bootstrap-exhaust-keyids.js | 32 + .../SwingSet/misc-tools/categorize-kvstore.js | 452 ++++++++++++ .../misc-tools/computrons-for-deliveries.py | 54 ++ .../SwingSet/misc-tools/count-empty-blocks.py | 64 ++ .../SwingSet/misc-tools/delivery-summary.py | 85 +++ .../delivery-time-computrons-kernel-worker.py | 21 + .../misc-tools/delivery-time-computrons.py | 44 ++ .../SwingSet/misc-tools/elapsed-per-method.py | 40 +- packages/SwingSet/misc-tools/extract-block.py | 20 +- .../misc-tools/extract-bootstrap-block.py | 18 + .../SwingSet/misc-tools/extract-clist-db.js | 102 +++ .../misc-tools/extract-non-empty-blocks.py | 78 +++ .../SwingSet/misc-tools/find-ephemerals.js | 158 +++++ .../SwingSet/misc-tools/find-provisions.py | 39 ++ .../misc-tools/follower-run-tools/do-clist.py | 75 ++ .../misc-tools/follower-run-tools/process.sh | 71 ++ .../follower-run-tools/size-report.py | 41 ++ packages/SwingSet/misc-tools/make-tiny-vat.js | 33 + .../SwingSet/misc-tools/measure-boyd-gc.py | 4 +- .../misc-tools/monitor-slog-block-time.py | 46 +- .../misc-tools/my-build-mezzanine-db.js | 650 ++++++++++++++++++ .../SwingSet/misc-tools/parse_timestamps.py | 91 ++- packages/SwingSet/misc-tools/perc100.py | 25 + .../SwingSet/misc-tools/prune-transcripts.js | 74 ++ .../SwingSet/misc-tools/push-price-times.js | 26 + .../misc-tools/run-in-xs-supervisor.js | 69 ++ packages/SwingSet/misc-tools/run-in-xs.js | 55 ++ .../misc-tools/scan-abandoned-unreachables.py | 47 ++ .../SwingSet/misc-tools/short-block-times.py | 38 + .../misc-tools/source-file-used-by.js | 372 ++++++++++ packages/SwingSet/misc-tools/summarize-ckv.js | 11 + .../misc-tools/trades-and-writes-per-block.py | 36 + packages/SwingSet/misc-tools/zoe-notifier.js | 80 +++ packages/SwingSet/misc-tools/zoe-vat-done.js | 28 + 39 files changed, 3244 insertions(+), 40 deletions(-) create mode 100644 packages/SwingSet/.eslintignore create mode 100644 packages/SwingSet/misc-tools/all-delivery-time-computrons.py create mode 100644 packages/SwingSet/misc-tools/audit-refgraph.py create mode 100644 packages/SwingSet/misc-tools/block-times.py create mode 100644 packages/SwingSet/misc-tools/bootstrap-exhaust-keyids.js create mode 100644 packages/SwingSet/misc-tools/categorize-kvstore.js create mode 100644 packages/SwingSet/misc-tools/computrons-for-deliveries.py create mode 100644 packages/SwingSet/misc-tools/count-empty-blocks.py create mode 100644 packages/SwingSet/misc-tools/delivery-summary.py create mode 100644 packages/SwingSet/misc-tools/delivery-time-computrons-kernel-worker.py create mode 100644 packages/SwingSet/misc-tools/delivery-time-computrons.py create mode 100644 packages/SwingSet/misc-tools/extract-bootstrap-block.py create mode 100644 packages/SwingSet/misc-tools/extract-clist-db.js create mode 100644 packages/SwingSet/misc-tools/extract-non-empty-blocks.py create mode 100644 packages/SwingSet/misc-tools/find-ephemerals.js create mode 100644 packages/SwingSet/misc-tools/find-provisions.py create mode 100755 packages/SwingSet/misc-tools/follower-run-tools/do-clist.py create mode 100755 packages/SwingSet/misc-tools/follower-run-tools/process.sh create mode 100644 packages/SwingSet/misc-tools/follower-run-tools/size-report.py create mode 100644 packages/SwingSet/misc-tools/make-tiny-vat.js create mode 100644 packages/SwingSet/misc-tools/my-build-mezzanine-db.js create mode 100644 packages/SwingSet/misc-tools/perc100.py create mode 100644 packages/SwingSet/misc-tools/prune-transcripts.js create mode 100644 packages/SwingSet/misc-tools/push-price-times.js create mode 100644 packages/SwingSet/misc-tools/run-in-xs-supervisor.js create mode 100644 packages/SwingSet/misc-tools/run-in-xs.js create mode 100644 packages/SwingSet/misc-tools/scan-abandoned-unreachables.py create mode 100644 packages/SwingSet/misc-tools/short-block-times.py create mode 100644 packages/SwingSet/misc-tools/source-file-used-by.js create mode 100644 packages/SwingSet/misc-tools/summarize-ckv.js create mode 100644 packages/SwingSet/misc-tools/trades-and-writes-per-block.py create mode 100644 packages/SwingSet/misc-tools/zoe-notifier.js create mode 100644 packages/SwingSet/misc-tools/zoe-vat-done.js diff --git a/.eslintignore b/.eslintignore index c684d095051..368a3701958 100644 --- a/.eslintignore +++ b/.eslintignore @@ -1,3 +1,4 @@ # also ignored in packages/cosmic-proto/.eslintignore, but IDE's pick up the root config packages/cosmic-proto/dist packages/cosmic-proto/src/codegen/ +packages/SwingSet/misc-tools/ diff --git a/packages/SwingSet/.eslintignore b/packages/SwingSet/.eslintignore new file mode 100644 index 00000000000..bf17ed83caa --- /dev/null +++ b/packages/SwingSet/.eslintignore @@ -0,0 +1 @@ +misc-tools/ diff --git a/packages/SwingSet/misc-tools/all-delivery-time-computrons.py b/packages/SwingSet/misc-tools/all-delivery-time-computrons.py new file mode 100644 index 00000000000..2574fafb3d7 --- /dev/null +++ b/packages/SwingSet/misc-tools/all-delivery-time-computrons.py @@ -0,0 +1,34 @@ +# +# Given a slogfile on stdin and a vatID, this emits a CSV of every +# delivery (including BOYD and GC actions), with their wallclock time +# and computrons (if any). This can be turned into a scatter chart +# which might show trends like organic GC taking longer over time. It +# ignores replays. + +import sys, json +from collections import defaultdict +from itertools import count + +cranks = [] # (deliveryNum, wallclock, computrons) +deliveryNum = None +start = None +summary = None + +vatID = sys.argv[1] + +print("crankNum,deliveryNum,elapsed,computrons") +for line in sys.stdin: + d = json.loads(line) + time = d["time"] + stype = d["type"] + if d.get("vatID") != vatID: + continue + if stype == "deliver" and not d["replay"]: + crankNum = d["crankNum"] + deliveryNum = d["deliveryNum"] + start = time + if stype and deliveryNum is not None and stype == "deliver-result" and not d["replay"]: + elapsed = time - start + computrons = d["dr"][2]["compute"] + print("%s,%s,%s,%s" % (crankNum, deliveryNum, elapsed, computrons)) + deliveryNum = None diff --git a/packages/SwingSet/misc-tools/audit-refgraph.py b/packages/SwingSet/misc-tools/audit-refgraph.py new file mode 100644 index 00000000000..788b939a870 --- /dev/null +++ b/packages/SwingSet/misc-tools/audit-refgraph.py @@ -0,0 +1,115 @@ +import sys, json, re +from collections import defaultdict + +# first build an underscore-separated list of all kvStore values +# sqlite3 -separator _ ss.sqlite 'SELECT * FROM kvStore' |sort >all-kv.txt + +# then feed that into stdin + +vatRE = re.compile(r'^(v\d+)\.(.*)') +vcRE = re.compile(r'^vc\.(\d+)\.(.*)') +rcRE = re.compile(r'^vom\.rc\.(.*)') + +class Collection: + def __init__(self, vatID, collectionID): + self.vatID = vatID + self.collectionID = collectionID + self.data = {} + self.meta = {} + self.ordinals = {} + + def __str__(self): + return "Collection(%s.c%d)" % (self.vatID, self.collectionID) + + def add(self, c_key, value): + reachable_vrefs = [] + if c_key in ["|entryCount", "|schemata", "|nextOrdinal"]: + self.meta[c_key] = value + elif c_key.startswith("|"): + # ordinal assignment record + # 'o+d31/35:1' -> 3 + self.ordinals[c_key[1:]] = int(value) + else: + self.data[c_key] = value + print(c_key, value) + data = json.loads(value) + reachable_vrefs.extend(data["slots"]) + return reachable_vrefs + + def audit(self): + assert(int(self.meta["|entryCount"]) == len(self.data)) + + def audit_ordinals(self): + for c_key in self.data: + if c_key.startswith("r"): + # data record, where the key is an ordinal + # 'r0000000003:o+d31/35:1' -> '{"body":"#null","slots":[]}' + pieces = c_key.split(":", maxsplit=1) + dr_ordinal_s = pieces[0][1:] + dr_ordinal = int(dr_ordinal_s) + dr_ordinal_vref = pieces[1] + if dr_ordinal_vref not in self.ordinals: + raise ValueError("%s.c%s vref=%s ordinal=%s" % (self.vatID, self.collectionID, dr_ordinal_vref, dr_ordinal)) + oa_ordinal = self.ordinals[dr_ordinal_vref] + if dr_ordinal != oa_ordinal: + raise ValueError("%s.c%s vref=%s ordinal=%s/%s" % (self.vatID, self.collectionID, dr_ordinal_vref, dr_ordinal, oa_ordinal)) + for oa_vref, oa_ordinal in self.ordinals.items(): + dr_key = "r%010d:%s" % (oa_ordinal, oa_vref) + if dr_key not in self.data: + raise ValueError("%s.c%s vref=%s ordinal=%s" % (self.vatID, self.collectionID, oa_vref, oa_ordinal)) + + +def make_vatID(): + return defaultdict(Collection) + +class Vat: + def __init__(self, vatID): + self.vatID = vatID + self.collections = {} + self.refcounts = {} # vom.rc.$vref: $vref -> reachable_count + self.refs = defaultdict(set) # $vref -> inbound vrefs + + def add_line(self, v_line, value): + if v_line.startswith("vs."): + vs_key = v_line[len("vs."):] + self.add_vatstore(vs_key, value) + def add_vatstore(self, vs_key, value): # vom.rc. or vc.$cid. , etc + #print(vatID, vs_key, value) + mo = vcRE.match(vs_key) # vc.$cid. + if mo: + (collectionID, c_key) = mo.groups() + collectionID = int(collectionID) + if collectionID not in self.collections: + self.collections[collectionID] = Collection(self.vatID, collectionID) + c = self.collections[collectionID] + reachable_vrefs = c.add(c_key, value) + for vref in reachable_vrefs: + self.refs[vref].add("collection-%d" % collectionID) + mo = rcRE.search(vs_key) # vom.rc. + if mo: + vref = mo.group(1) + reachable_s = value + self.refcounts[vref] = int(reachable_s) + + + +collections_by_vatID = {} + + +for row in sys.stdin: + key,value = row.strip().split("_", maxsplit=1) + mo = vatRE.search(key) + if mo: + (vatID, rest) = mo.groups() + if vatID not in collections_by_vatID: + collections_by_vatID[vatID] = Vat(vatID) + v = collections_by_vatID[vatID] + v.add_line(rest, value) + +c = collections_by_vatID["v9"].collections[94] +#print(c.data) +#print(c.ordinals) +for v in collections_by_vatID.values(): + for c in v.collections.values(): + c.audit_ordinals() + print("%s ok" % c) diff --git a/packages/SwingSet/misc-tools/block-times.py b/packages/SwingSet/misc-tools/block-times.py new file mode 100644 index 00000000000..8c90c2481a7 --- /dev/null +++ b/packages/SwingSet/misc-tools/block-times.py @@ -0,0 +1,54 @@ +# +# given a slogfile on stdin, emit a CSV of (blockHeight, start, +# elapsed) that measures the swingset time for each block (from +# cosmic-swingset-end-block-start to +# cosmic-swingset-end-block-finish), suitable for pasting into a +# spreadsheet to make a graph + +import sys, json, statistics + +print("height,start,lag,swingset,deliveries,computrons,bridge_inbounds,consensus_time,block_time") +# consensus_time is from cosmic-swingset-after-commit-block to cosmic-swingset-begin-block +# block_time is from cosmic-swingset-end-block-start to cosmic-swingset-end-block-start + +lag = None # time - blockTime, how far was the node behind +start = None # latest cosmic-swingset-end-block-start time +last_after_commit_block = None +last_begin_block = None +deliveries = 0 +computrons = 0 +bridge_inbounds = 0 +consensus_time = 0 +block_time = 0 + +for line in sys.stdin: + d = json.loads(line) + time = d["time"] + dtype = d["type"] + if dtype == "cosmic-swingset-begin-block": + if last_begin_block is not None: + block_time = time - last_begin_block + last_begin_block = time + lag = d["time"] - d["blockTime"] + if dtype == "cosmic-swingset-bridge-inbound": + bridge_inbounds += 1 + if dtype == "cosmic-swingset-end-block-start": + start = time + if dtype == "cosmic-swingset-after-commit-block": + last_after_commit_block = time + if dtype == "cosmic-swingset-begin-block" and last_after_commit_block is not None: + consensus_time = time - last_after_commit_block + if start is not None and dtype == "deliver-result": + deliveries += 1 + if d["dr"][2] and "compute" in d["dr"][2]: + computrons += d["dr"][2]["compute"] + if start is not None and dtype == "cosmic-swingset-end-block-finish": + height = d["blockHeight"] + swingset = time - start # end-block-start to end-block-finish + print("%d,%f,%f,%f,%d,%d,%d,%f,%f" % (height, start, lag, swingset, deliveries, + computrons, bridge_inbounds, + consensus_time, block_time)) + start = None + deliveries = 0 + computrons = 0 + bridge_inbounds = 0 diff --git a/packages/SwingSet/misc-tools/bootstrap-exhaust-keyids.js b/packages/SwingSet/misc-tools/bootstrap-exhaust-keyids.js new file mode 100644 index 00000000000..d82b3680511 --- /dev/null +++ b/packages/SwingSet/misc-tools/bootstrap-exhaust-keyids.js @@ -0,0 +1,32 @@ +// @ts-nocheck +/* eslint-disable */ +import { E } from '@endo/eventual-send'; +import { Far } from '@endo/marshal'; +import { defineKind } from '@agoric/vat-data'; + +export function buildRootObject() { + let count = 0; + const makeHolder = defineKind('holder', () => ({ held: 0 }), { + hold: ({ state }, value) => { + count += 1; + state.held = value; + }, + }); + const holder = makeHolder(); + const makeHeld = defineKind('held', () => ({}), {}); + + return Far('root', { + async bootstrap(vats, devices) { + holder.hold(makeHeld()); + holder.hold(makeHeld()); + console.log(`count: ${count}`); + }, + + async more() { + for (let i = 0; i < 100; i++) { + holder.hold(makeHeld()); + } + console.log(`count: ${count}`); + }, + }); +} diff --git a/packages/SwingSet/misc-tools/categorize-kvstore.js b/packages/SwingSet/misc-tools/categorize-kvstore.js new file mode 100644 index 00000000000..14bc621b7d6 --- /dev/null +++ b/packages/SwingSet/misc-tools/categorize-kvstore.js @@ -0,0 +1,452 @@ +// @ts-nocheck +import fs from 'fs'; +import sqlite3 from 'better-sqlite3'; + +function ingestSwingSstore(datafile, swingstore_dbfn) { + const ssdb = sqlite3(swingstore_dbfn); + + // object, not Map, so we can JSON-dump it when we're done + const vats = {}; + function provideVat(vatID) { + if (!vats[vatID]) { + vats[vatID] = { + collections: {}, + kinds: {}, + stats: { + kv: { rows: 0, chars: 0 }, + clist: { rows: 0, chars: 0 }, + vatstore: { rows: 0, chars: 0 }, + vc: { rows: 0, chars: 0 }, + vom: { + total: { rows: 0, chars: 0 }, + data: { rows: 0, chars: 0 }, + exportStatus: { rows: 0, chars: 0 }, + ir: { rows: 0, chars: 0 }, + rc: { + total: { rows: 0, chars: 0 }, + imports: { rows: 0, chars: 0 }, + exports: { + total: { rows: 0, chars: 0 }, + ephemeral: { rows: 0, chars: 0 }, + virtual: { rows: 0, chars: 0 }, + durable: { rows: 0, chars: 0 }, + }, + }, + }, + }, + }; + } + return vats[vatID]; + } + function provideCollection(vat, collectionID) { + if (!vat.collections[collectionID]) { + vat.collections[collectionID] = { entries: 0, ordinals: 0 }; + } + return vat.collections[collectionID]; + } + function provideKind(vat, kindID) { + if (!vat.kinds[kindID]) { + vat.kinds[kindID] = { + exported: 0, + referenced: 0, + recognized: 0, + defined: 0, + }; + } + return vat.kinds[kindID]; + } + + // note: this LIKE ought to be optimized into a SEARCH (prepend + // "EXPLAIN QUERY PLAN" to the query and sqlite will tell you its best + // strategy), but something is inhibiting the optimization, maybe + // the column needs to be indexed as NOCASE. + // https://www.sqlite.org/optoverview.html#the_like_optimization + // const sqlVatNames = ssdb.prepare("SELECT * FROM kvStore WHERE key LIKE 'vat.name%'"); + + const sqlVatNames = ssdb.prepare( + "SELECT * FROM kvStore WHERE key > 'vat.' and key < 'vat/' and key LIKE 'vat.name%'", + ); + const sqlGet = ssdb.prepare('SELECT value FROM kvStore WHERE key=?'); + const sqlAllKV = ssdb.prepare('SELECT * FROM kvStore'); + + // We need to grab the `storeKindIDTable` for each vat first, so we + // can recognize vrefs that describe collections of those types when + // we see them in the vom.rc tables. + const allVatIDs = []; + const vatIDToCollectionKindIDs = {}; + + for (const { key, value } of sqlVatNames.iterate()) { + if (key.startsWith('vat.name.')) { + allVatIDs.push(value); + } + } + for (const vatID of JSON.parse(sqlGet.get('vat.dynamicIDs').value)) { + allVatIDs.push(vatID); + } + + for (const vatID of allVatIDs) { + const vat = provideVat(vatID); + const key = `${vatID}.vs.storeKindIDTable`; + const row = sqlGet.get(key); + if (!row) { + continue; // non-liveslots vats (comms) lack storeKindIDTable + } + const table = JSON.parse(row.value); + vat.storeTypeToKindID = table; + vat.kindIDToStoreType = {}; + // the values of this table are integers, but the output of our + // regexp will be strings, so convert them now + const kids = new Set(); + vatIDToCollectionKindIDs[vatID] = kids; + for (const [storeType, kindIDNumber] of Object.entries(table)) { + vat.kindIDToStoreType[`${kindIDNumber}`] = storeType; + kids.add(`${kindIDNumber}`); + } + } + + function gleanCollections(vatID, vref, vat) { + // Liveslots doesn't store the CollectionID->KindID mapping + // directly, so we glean it: if we observe o+dXX/YY, where XX is a + // KindID that matches one of the collection types from + // storeKindIDTable, then YY is a CollectionID. + const r = /^o\+d(\d+)\/(\d+)/.exec(vref); + // we don't anchor with $, because faceted vrefs end with :ZZ + if (r) { + const [kindID, collectionID] = r.slice(1); + if (vatIDToCollectionKindIDs[vatID].has(kindID)) { + const c = provideCollection(vat, collectionID); + if (!c.kindID) { + // console.log(`:gleaned ${vatID} k${kindID}->co${collectionID} from ${vref}`); + c.kindID = kindID; + } + } + } + } + + for (const { key, value } of sqlAllKV.iterate()) { + const r0 = /^(v\d+)\.(.*)/.exec(key); + if (!r0) { + continue; + } + const [vatID, vatsubkey] = r0.slice(1); + const vat = provideVat(vatID); + const stats = vat.stats; + function incstat(stat, newRows = 1) { + stat.rows += newRows; + stat.chars += key.length + value.length; + } + incstat(stats.kv); + + if (vatsubkey.startsWith('c.')) { + if (vatsubkey.startsWith('c.ko')) { + // only count each pair once + incstat(stats.clist); + } else { + incstat(stats.clist, 0); + } + } + + const r1 = /^vs\.(.*)/.exec(vatsubkey); + if (!r1) { + continue; + } + incstat(stats.vatstore); + + const [subkey] = r1.slice(1); + // console.log(vatID, subkey, key); + const r2 = /^vc\.(\d+)\.(.*)$/.exec(subkey); + if (r2) { + incstat(stats.vc); // entries, ordinals, and meta + const [collectionID, csubkey] = r2.slice(1); + // console.log(vatID, collectionID, csubkey, key); + const c = provideCollection(vat, collectionID); + if (csubkey === '|schemata') { + c.schemata = value; + } else if (csubkey === '|entryCount') { + c.entryCount = Number(value); + } else if (csubkey === '|nextOrdinal') { + c.nextOrdinal = Number(value); + } else if (csubkey.startsWith('|')) { + // probably an ordinal mapping + c.ordinals += 1; + } else { + c.entries += 1; + if (!c.sample) { + c.sample = [key, value]; + } + } + } + const r3 = /^vom\.(.*)$/.exec(subkey); + if (r3) { + incstat(stats.vom.total); + const [vsubkey] = r3.slice(1); + if (vsubkey.startsWith('dkind.')) { + // vom.dkind.23.descriptor or .nextID + const [kindID, ksubkey] = /^dkind\.(\d+)\.(.*)$/.exec(vsubkey).slice(1); + const kind = provideKind(vat, kindID); + kind[ksubkey] = value; + } else if (vsubkey.startsWith('es.')) { + // vom.es.o+d11/123 + incstat(stats.vom.exportStatus); + const [vref] = /^es\.(.*)$/.exec(vsubkey).slice(1); + const kindID = /^o\+d(\d+)\//.exec(vref).slice(1); + const kind = provideKind(vat, kindID); + // the value has one 'r' or 's' or '_' per facet (for reachable/recognizable/none) + // for now, collapse all of that into a single "exported" count + kind.exported += 1; + gleanCollections(vatID, vref, vat); + } else if (vsubkey.startsWith('rc.')) { + // vom.rc.o+d11/123 + // vom.rc.o-56 + + incstat(stats.vom.rc.total); + if (vsubkey.startsWith('rc.o+')) { + incstat(stats.vom.rc.exports.total); + if (vsubkey.startsWith('rc.o+d')) { + incstat(stats.vom.rc.exports.durable); + } else if (vsubkey.startsWith('rc.o+v')) { + incstat(stats.vom.rc.exports.virtual); + } else { + incstat(stats.vom.rc.exports.ephemeral); + } + } else { + incstat(stats.vom.rc.imports); + } + + if (vsubkey.startsWith('rc.o+d')) { + // only vom.rc.o+d11/123, ignore vom.rc.o-56 + const [vref] = /^rc\.(.*)$/.exec(vsubkey).slice(1); + const kindID = /^o\+d(\d+)\//.exec(vref).slice(1); + const kind = provideKind(vat, kindID); + kind.referenced += 1; + gleanCollections(vatID, vref, vat); + } + } else if (vsubkey.startsWith('ir.')) { + // vom.ir.o+d10/1 + // vom.ir.o-54 + incstat(stats.vom.ir); + if (vsubkey.startsWith('ir.o+d')) { + const [vref] = /^ir\.(.*)$/.exec(vsubkey).slice(1); + const kindID = /^o\+d(\d+)\//.exec(vref).slice(1); + const kind = provideKind(vat, kindID); + kind.recognized += 1; + gleanCollections(vatID, vref, vat); + } + } else { + // vom.$(vref) + incstat(stats.vom.data); + const [vref] = /^(.*)$/.exec(vsubkey).slice(1); + const kindID = /^o\+d(\d+)\//.exec(vref).slice(1); + const kind = provideKind(vat, kindID); + kind.defined += 1; + if (!kind.sample) { + kind.sample = [key, value]; + } + } + } + } + for (const [vatID, vat] of Object.entries(vats)) { + vat.numCollections = vat.collections.length; + } + fs.writeFileSync(datafile, JSON.stringify(vats)); +} + +function manyCollections(datafile) { + const vats = JSON.parse(fs.readFileSync(datafile)); + const counts = []; // [vatID, numCollections] + for (const vatID of Object.keys(vats)) { + const vat = vats[vatID]; + if (!vat.collections) { + continue; + } + counts.push([vatID, Object.keys(vat.collections).length]); + } + counts.sort((a, b) => b[1] - a[1]); // descendng by numCollections + for (const [vatID, numCollections] of counts) { + console.log(vatID.padEnd(5, ' '), numCollections); + } +} + +// v43 has 1077 collections, v9 has 377, v11 has 312, v14 has 174, v16 has 64 + +function extractLabel(schemataCDs) { + const { body } = JSON.parse(schemataCDs); + if (!body.startsWith('#')) { + return body; // close enough + } + const { label } = JSON.parse(body.slice(1)); + return label; +} + +function largeCollections(datafile) { + const vats = JSON.parse(fs.readFileSync(datafile)); + const counts = []; // [vatID, collectionID, length] + for (const vatID of Object.keys(vats)) { + const vat = vats[vatID]; + if (!vat.collections) { + // continue; + } + for (const collectionID of Object.keys(vat.collections)) { + const collection = vat.collections[collectionID]; + counts.push([vatID, collectionID, collection.entries]); + } + } + counts.sort((a, b) => b[2] - a[2]); // descendng by collection.entries + for (const [vatID, collectionID, entries] of counts) { + const collection = vats[vatID].collections[collectionID]; + const label = extractLabel(collection.schemata); + let sample = ''; + if (collection.sample) { + let [key, value] = collection.sample; + key = key.slice(`${vatID}.vs.vc.${collectionID}.`.length); + sample = `${key.padStart(46, ' ')} -> ${value}`; + } + console.log( + vatID.padStart(3, ' '), + `c${collectionID}`.padStart(5, ' '), + `${entries}`.padStart(7, ' '), + label.padEnd(30, ' '), + ' ', + sample, + ); + } +} + +// top 6 collections: +// v29 c13 120544 paymentLedger +// v29 c14 120544 paymentRecoverySets +// v29 c16 120544 recovery set +// v46 c14 90408 paymentLedger +// v46 c15 90408 paymentRecoverySets +// v46 c17 90408 recovery set + +function extractVOMTag(descriptorJSON) { + const descriptor = JSON.parse(descriptorJSON); + const { tag } = descriptor; + return tag; +} + +function manyVOMs(datafile) { + const vats = JSON.parse(fs.readFileSync(datafile)); + // 'recognized' here means there's a vom.ir. key, which involves Weak collections + const counts = []; // [vatID, kindID, defined, exported, referenced, recognized] + for (const vatID of Object.keys(vats)) { + const vat = vats[vatID]; + for (const kindID of Object.keys(vat.kinds)) { + const kind = vat.kinds[kindID]; + // console.log(vatID, kindID, kind); + counts.push([ + vatID, + kindID, + kind.defined, + kind.exported, + kind.referenced, + kind.recognized, + ]); + } + } + counts.sort((a, b) => b[2] - a[2]); // descending by numDefined + console.log( + 'vatID'.padEnd(5, ' '), + 'kindID'.padStart(7, ' '), + 'tag'.padEnd(20, ' '), + `defined`.padStart(7, ' '), + `referenced`.padStart(10, ' '), + `recognized`.padStart(10, ' '), + `exported`.padStart(8, ' '), + `sample`, + ); + for (const [ + vatID, + kindID, + defined, + exported, + referenced, + recognized, + ] of counts) { + const kind = vats[vatID].kinds[kindID]; + const tag = extractVOMTag(kind.descriptor || '{"tag":""}'); + let sample = ''; + if (kind.sample) { + const [key, value] = kind.sample; + sample = `${value}`; + if (sample.length > 60) { + sample = `${sample.slice(0, 60)} ..`; + } + } + console.log( + vatID.padEnd(5, ' '), + kindID.padStart(7, ' '), + `${tag}`.padEnd(20, ' '), + `${defined}`.padStart(7, ' '), + `${referenced}`.padStart(10, ' '), + `${recognized}`.padStart(10, ' '), + `${exported}`.padStart(8, ' '), + sample, + ); + } +} + +// top 15: +// vatID kindID tag defined referenced recognized exported sample +// v29 22 quote payment 120556 120544 241088 33 {} +// v46 22 quote payment 90441 90408 180816 30167 {} +// v9 30 zoe Seat publisher 55867 55867 0 0 {"valueDurability": +// v9 31 ZoeSeatKit 55867 55867 0 22 {"currentAllocation": +// v9 32 SeatHandle 54068 0 54068 54068 {} +// v29 11 ExitObject 50570 0 0 50570 {"zcfSeat": +// v29 18 zcfSeat 50570 50570 50570 0 {"proposal": +// v48 11 ExitObject 3684 0 0 3684 {"zcfSeat": +// v48 18 zcfSeat 3684 3684 5422 0 {"proposal": +// v9 11 IST payment 3446 3427 3436 30 {} +// v48 10 SeatHandle 1776 1776 0 1776 {} +// v17 22 USDC_axl payment 510 1 2 475 {} +// v37 11 ExitObject 477 0 0 477 {"zcfSeat": +// v37 18 zcfSeat 477 477 480 0 {"proposal": + +function describeKind(datafile, vatID, kindID) { + const vats = JSON.parse(fs.readFileSync(datafile)); + const kind = vats[vatID].kinds[kindID]; + console.log(`vat: ${vatID}`); + console.log(`kind: ${kindID}`); + console.log(kind); +} + +function describeCollection(datafile, vatID, collectionID) { + const vats = JSON.parse(fs.readFileSync(datafile)); + const vat = vats[vatID]; + const collection = vat.collections[collectionID]; + console.log(`vat: ${vatID}`); + console.log(`collection: ${collectionID}`); + if (collection.kindID) { + const storeType = vat.kindIDToStoreType[collection.kindID]; + console.log(`storeType: ${storeType} (kindID ${collection.kindID})`); + } + console.log(collection); +} + +const [eqdf, datafile, subcommand, ...subargs] = process.argv.slice(2); +if (!(eqdf === '--datafile')) { + throw Error( + 'usage: categorize-kvstore.js --datafile df.json SUBCOMMAND SUBARGS..', + ); +} + +if (subcommand === 'ingest') { + ingestSwingSstore(datafile, ...subargs); +} else if (subcommand === 'many-collections') { + manyCollections(datafile, ...subargs); +} else if (subcommand === 'large-collections') { + largeCollections(datafile, ...subargs); +} else if (subcommand === 'many-voms') { + manyVOMs(datafile, ...subargs); +} else if (subcommand === 'describe-kind') { + describeKind(datafile, ...subargs); +} else if (subcommand === 'describe-collection') { + describeCollection(datafile, ...subargs); +} else { + console.log(`unknown subcommand ${subcommand}`); +} + +// v29.c12: zcfSeatToSeatHandle +// 'sagoric1megzytg65cyrgzs6fvzxgrcqvwwl7ugpt62346' diff --git a/packages/SwingSet/misc-tools/computrons-for-deliveries.py b/packages/SwingSet/misc-tools/computrons-for-deliveries.py new file mode 100644 index 00000000000..67b7391f6d6 --- /dev/null +++ b/packages/SwingSet/misc-tools/computrons-for-deliveries.py @@ -0,0 +1,54 @@ +import sys, json, gzip, re, time +from collections import defaultdict +from pprint import pprint + +# Find all deliveries of a certain type and emit a histogram of how +# many computrons they took. If the delivery is supposed to take +# constant time, this might reveal JS-level growth (e.g. `serialize()` +# taking longer because the data it is working with is growing). + +# You'll need to edit this file to change the selection criteria. + +fn = sys.argv[1] +vatID = "v7" +ko = None # "ko258" +method = "serialize" + +crankNum = None + +print("crankNum,computrons") + +opener = gzip.open if fn.endswith(".gz") else open +with opener(sys.argv[1]) as f: + for line in f: + if isinstance(line, bytes): + line = line.decode("utf-8") + if not line.strip(): + continue + + data = json.loads(line.strip()) + type = data["type"] + when = data["time"] + + if type == 'deliver': + if vatID and data["vatID"] != vatID: + continue + kd = data["kd"] + if kd[0] == "message": + target = kd[1] + if ko and target != ko: + continue + msg = kd[2] + methargsCD = msg["methargs"] + result = msg["result"] + if method and method != json.loads(methargsCD["body"])[0]: + continue + crankNum = data["crankNum"] # want this one + #elif kd[0] == "notify": + # method = "(notify)" + # start = when + if crankNum is not None and type == 'deliver-result' and data["crankNum"] == crankNum: + computrons = data["dr"][2]["compute"] + print("%d,%d" % (crankNum,computrons)) + crankNum = None + diff --git a/packages/SwingSet/misc-tools/count-empty-blocks.py b/packages/SwingSet/misc-tools/count-empty-blocks.py new file mode 100644 index 00000000000..e2aad51d490 --- /dev/null +++ b/packages/SwingSet/misc-tools/count-empty-blocks.py @@ -0,0 +1,64 @@ +import os, sys, json, gzip + +# given a slogfile on stdin, emit a block utilization report: how many were +# empty (no computrons spent) vs non-empty + +# given a filename as an argument, print an ETA, then process the file + +eta = None + +if len(sys.argv) > 1: + fn = sys.argv[1] + size = os.stat(fn).st_size + if fn.endswith(".gz"): + eta = 98 * size / 1e9 # on "muon", my MBP(M1) + f = gzip.open(fn, "rb") + else: + eta = 10.34 * size / 1e9 + f = open(fn, "rb") +else: + f = sys.stdin + +if eta: + print("ETA: %d seconds" % eta) + +empty = True +num_blocks = 0 +num_empty = 0 + +for line in f: + try: + d = json.loads(line) + except Exception as e: + print("error parsing JSON", e) + continue + stype = d["type"] + if stype == "cosmic-swingset-begin-block": + block_num = d["blockHeight"] + empty = True + if block_num % 1000 == 0: + print("block %d" % block_num, file=sys.stderr) + elif stype == "cosmic-swingset-bootstrap-block-start": + block_num = 0 + empty = True + elif stype == "cosmic-swingset-run-finish": + if d.get("usedBeans", 0): + empty = False + elif stype == "cosmic-swingset-end-block-finish": + num_blocks += 1 + if empty: + num_empty += 1 + + +perc = 100.0 * num_empty / num_blocks +full = num_blocks - num_empty +print("%d empty = %.2f%% of %d total (%d full)" % (num_empty, perc, num_blocks, full)) + +# on muon, run-01 took 60s to process slogfile (800MB .gz, 7.5GB uncompressed) +# run-02: 97s 1,228,000,669 .gz, 11,651,784,879 uncompressed +# that's 78s per 1GB .gz, or 8.23s per 1GB uncompressed + +#run-05 123s 1,244,057,795 .gz = 99s/GB.gz +#run-26 287s 2,951,596,979 .gz (246s with 'grep cosmic-swingset-' preprocessor) +#run-39 383s 3,903,070,407 .gz = 97s/GB.gz +#run-41 2,862,818,061 .gz diff --git a/packages/SwingSet/misc-tools/delivery-summary.py b/packages/SwingSet/misc-tools/delivery-summary.py new file mode 100644 index 00000000000..348787c9b44 --- /dev/null +++ b/packages/SwingSet/misc-tools/delivery-summary.py @@ -0,0 +1,85 @@ +# +# Given a slogfile on stdin, this measures which deliveries happened +# in which blocks, and emits a CSV of (blockHeight, del1, del2, ..) +# with a count of delivery types. It may be useful to get a sense of +# when a chain is doing lots of different things, vvs when it settles +# down into catching up on a backlog that consists of lots of +# instances of the same thing. + +import sys, json +from collections import defaultdict +from itertools import count + +mode = "count" +if len(sys.argv) > 1: + mode = sys.argv[1] +assert mode in ["count","computrons"] + +# blocks[n] = [blocknum, deliveryTypeCounts] +blocks = [] +all_dtypes = defaultdict(int) + +blocknum = None +deliveryTypeCounts = None +dtype = None + +def summarize_delivery(vatID, kd): + if kd[0] == "notify": + return "%s." % vatID + if kd[0] == "message": + method = json.loads(kd[2]["methargs"]["body"])[0] + if isinstance(method, dict): + # probably @@asyncIterator + assert method["@qclass"] == "symbol" + method = method["name"] + return "%s.%s" % (vatID, method) + return None # ignore BOYD, GC + +for line in sys.stdin: + d = json.loads(line) + time = d["time"] + stype = d["type"] + if stype == "cosmic-swingset-begin-block": + blocknum = d["blockHeight"] + deliveryTypeCounts = defaultdict(int) + #if blocknum is not None and stype == "deliver": + # dtype = summarize_delivery(d["vatID"], d["kd"]) + # if dtype: + # deliveryTypeCounts[dtype] += 1 + # all_dtypes[dtype] += 1 + if blocknum is not None and stype == "deliver": + dtype = summarize_delivery(d["vatID"], d["kd"]) + crankNum = d["crankNum"] + if dtype and crankNum is not None and stype == "deliver-result": + crankNum = None + computrons = d["dr"][2]["compute"] + increment = 1 if mode == "count" else computrons + deliveryTypeCounts[dtype] += increment + all_dtypes[dtype] += increment + + if blocknum is not None and stype == "cosmic-swingset-end-block-finish": + blocks.append((blocknum, deliveryTypeCounts)) + +batch_size = 100 +if batch_size: + batched = [] + for start in count(0, batch_size): + if start > blocks[-1][0]: + break + batch = defaultdict(int) + for (blocknum, deliveryTypeCounts) in blocks[start:start+batch_size]: + for (dtype,value) in deliveryTypeCounts.items(): + batch[dtype] += value + batched.append((start, batch)) + blocks = batched + +# Using Google Sheets to plot this as a column chart: the left-most +# columns get the distinctive colors, so use reverse=True +# (i.e. descending) so the most common deliveries are distinctive. +# Move the legend to the bottom, otherwise only the right-most column +# names will be visible. + +dtypes = sorted(all_dtypes, key=lambda t: all_dtypes[t], reverse=True) +print(",".join(["height"] + dtypes)) +for (blocknum, deliveries) in blocks: + print(",".join([str(blocknum)] + [str(deliveries[dtype]) for dtype in dtypes])) diff --git a/packages/SwingSet/misc-tools/delivery-time-computrons-kernel-worker.py b/packages/SwingSet/misc-tools/delivery-time-computrons-kernel-worker.py new file mode 100644 index 00000000000..153301cf292 --- /dev/null +++ b/packages/SwingSet/misc-tools/delivery-time-computrons-kernel-worker.py @@ -0,0 +1,21 @@ +# +# Given a slogfile on stdin and a vatID, this emits a CSV of every +# delivery (including BOYD and GC actions), with their wallclock time, +# computrons (if any), and how much wallclock time was spent in the +# worker, vs in the kernel (syscalls). This can be turned into a +# scatter chart which might show trends like organic GC taking longer +# over time. It ignores replays. + +import sys +from parse_timestamps import stream_file + +fn = sys.argv[1] +vatID = sys.argv[2] + +print("blockHeight,blockTime,tx_delivery,crankNum,deliveryNum,total,worker,kernel,pipe,computrons") +for d in stream_file(fn, vatID): + print("%s,%s,%s,%s,%s,%s,%s,%s,%s,%s" % (d.blockheight, d.blocktime, d.tx_delivery, + d.cranknum, d.deliverynum, + d.k_to_k, d.total_worker, d.total_kernel, d.total_pipe, + d.computrons or 0)) + diff --git a/packages/SwingSet/misc-tools/delivery-time-computrons.py b/packages/SwingSet/misc-tools/delivery-time-computrons.py new file mode 100644 index 00000000000..80f9a2b32aa --- /dev/null +++ b/packages/SwingSet/misc-tools/delivery-time-computrons.py @@ -0,0 +1,44 @@ +# +# Given a slogfile on stdin, this emits a CSV that can be used to make +# a horizontal bar chart of deliveries, where the width of each bar is +# either wallclock time or computrons. This can give a sense of which +# deliveries are more expensive than others. + +import sys, json +from collections import defaultdict +from itertools import count + +cranks = [] # (cranknum, wallclock, computrons) +cranknum = None +start = None +summary = None + +def summarize_delivery(vatID, kd): + if kd[0] == "notify": + return "%s." % vatID + if kd[0] == "message": + method = json.loads(kd[2]["methargs"]["body"])[0] + if isinstance(method, dict): + # probably @@asyncIterator + assert method["@qclass"] == "symbol" + method = method["name"] + return "%s.%s" % (vatID, method) + return None # ignore BOYD, GC + +for line in sys.stdin: + d = json.loads(line) + time = d["time"] + stype = d["type"] + if stype == "deliver": + summary = summarize_delivery(d["vatID"], d["kd"]) + cranknum = d["crankNum"] + start = time + if stype and cranknum is not None and stype == "deliver-result": + wallclock = time - start + computrons = d["dr"][2]["compute"] + cranks.append((summary, wallclock, computrons)) + cranknum = None + +print(",".join([""] + [c[0] for c in cranks])) +print(",".join(["wallclock"] + [str(c[1]) for c in cranks])) +print(",".join(["computrons"] + [str(c[2]) for c in cranks])) diff --git a/packages/SwingSet/misc-tools/elapsed-per-method.py b/packages/SwingSet/misc-tools/elapsed-per-method.py index 0fa5168fcdf..739195a35da 100644 --- a/packages/SwingSet/misc-tools/elapsed-per-method.py +++ b/packages/SwingSet/misc-tools/elapsed-per-method.py @@ -3,16 +3,18 @@ from pprint import pprint # Given a slogfile, for each method name, report the number of times -# that message was delivered into a vat, the total time spent on those -# kinds of deliveries, and the average time per delivery. +# that message was delivered into a vat, the total time/computrons +# spent on those kinds of deliveries, and the average time per +# delivery. fn = sys.argv[1] vatID = sys.argv[2] if len(sys.argv) > 2 else None start = None method = None -deliveries = defaultdict(lambda: (0, 0.0)) # of (count, elapsed) -total = 0.0 +deliveries = defaultdict(lambda: (0, 0.0, computrons)) # of (count, elapsed, computrons) +total_time = 0.0 +total_computrons = 0 opener = gzip.open if fn.endswith(".gz") else open with opener(sys.argv[1]) as f: @@ -23,7 +25,7 @@ continue data = json.loads(line.strip()) - if vatID and data["vatID"] != vat: + if vatID and data.get("vatID") != vatID: continue type = data["type"] when = data["time"] @@ -35,7 +37,13 @@ msg = vd[2] methargsCD = msg["methargs"] result = msg["result"] - method = json.loads(methargsCD["body"])[0] + methodobj = json.loads(methargsCD["body"])[0] + if isinstance(methodobj, str): + method = methodobj + elif isinstance(methodobj, dict) and methodobj["@qclass"] == "symbol": + method = methodobj["name"] + else: + raise ValueError("cannot determine method of %s" % json.dumps(methodobj)) start = when elif vd[0] == "notify": method = "(notify)" @@ -43,18 +51,22 @@ if type == 'deliver-result': if start is not None: elapsed = when - start - (old_count, old_elapsed) = deliveries[method] + computrons = data["dr"][2]["compute"] + (old_count, old_elapsed, old_computrons) = deliveries[method] new_count = old_count + 1 new_elapsed = old_elapsed + elapsed - total += elapsed - deliveries[method] = (new_count, new_elapsed) + total_time += elapsed + new_computrons = old_computrons + computrons + total_computrons += computrons + deliveries[method] = (new_count, new_elapsed, new_computrons) start = None method = None -print("| count| method | total time | avg each |") -print("| ---- | ------------------------------ | ---------- | --------- |") + +print("| count| method | total computrons | total time | avg each |") +print("| ---- | ------------------------------ | ---------------- | ---------- | --------- |") for method in sorted(deliveries, key=lambda method: deliveries[method][1], reverse=True): - (count, elapsed) = deliveries[method] + (count, elapsed, computrons) = deliveries[method] avg = elapsed / count - print("| %3dx | %30s | %2.3f s | %.3f s |" % (count, method, elapsed, avg)) -print("total: %2.3f s" % total) + print("| {:3d}x | {:>28} s | {:>16_d} | {:2.3f} s | {:.3f} s |".format(count, method, computrons, elapsed, avg)) +print("total: {:2.3f} s, {:_d} computrons".format(total_time, total_computrons)) diff --git a/packages/SwingSet/misc-tools/extract-block.py b/packages/SwingSet/misc-tools/extract-block.py index fe23d8c239a..149d2805db0 100644 --- a/packages/SwingSet/misc-tools/extract-block.py +++ b/packages/SwingSet/misc-tools/extract-block.py @@ -1,10 +1,14 @@ import sys, gzip, json fn = sys.argv[1] -blocknum = int(sys.argv[2]) -endblocknum = blocknum -if len(sys.argv) > 3: - endblocknum = int(sys.argv[3]) +start = sys.argv[2] +if start == "bootstrap": + end = 1 +else: + start = int(start) + end = start + if len(sys.argv) > 3: + end = int(sys.argv[3]) yes = False opener = gzip.open if fn.endswith(".gz") else open @@ -14,9 +18,13 @@ line = line.decode("utf-8") data = json.loads(line.strip()) type = data["type"] - if type == "cosmic-swingset-begin-block" and data["blockHeight"] == blocknum: + if start == "bootstrap" and type == "cosmic-swingset-bootstrap-block-start": + yes = True + if start != "bootstrap" and type == "cosmic-swingset-begin-block" and data["blockHeight"] == start: yes = True if yes: sys.stdout.write(line) - if type == "cosmic-swingset-end-block-finish" and data["blockHeight"] == endblocknum: + if start == "bootstrap" and type == "cosmic-swingset-bootstrap-block-finish": + break + if type == "cosmic-swingset-end-block-finish" and data["blockHeight"] == end: break diff --git a/packages/SwingSet/misc-tools/extract-bootstrap-block.py b/packages/SwingSet/misc-tools/extract-bootstrap-block.py new file mode 100644 index 00000000000..c3f8ec0a7de --- /dev/null +++ b/packages/SwingSet/misc-tools/extract-bootstrap-block.py @@ -0,0 +1,18 @@ +import sys, gzip, json + +fn = sys.argv[1] +yes = False + +opener = gzip.open if fn.endswith(".gz") else open +with opener(sys.argv[1]) as f: + for line in f: + if isinstance(line, bytes): + line = line.decode("utf-8") + data = json.loads(line.strip()) + type = data["type"] + if type == "cosmic-swingset-bootstrap-block-start": + yes = True + if yes: + sys.stdout.write(line) + if type == "cosmic-swingset-bootstrap-block-finish": + break diff --git a/packages/SwingSet/misc-tools/extract-clist-db.js b/packages/SwingSet/misc-tools/extract-clist-db.js new file mode 100644 index 00000000000..c18e962b9c4 --- /dev/null +++ b/packages/SwingSet/misc-tools/extract-clist-db.js @@ -0,0 +1,102 @@ +#!/usr/bin/env node +// @ts-nocheck +/* eslint no-labels: "off", no-extra-label: "off", no-underscore-dangle: "off" */ +import process from 'process'; +import fs from 'fs'; +import sqlite3 from 'better-sqlite3'; +import yargsParser from 'yargs-parser'; +import '@endo/init/debug.js'; + +const main = rawArgv => { + const { _: args, ...options } = yargsParser(rawArgv.slice(2)); + console.log(args, options); + if (Reflect.ownKeys(options).length > 0 || args.length !== 2) { + const q = str => `'${str.replaceAll("'", String.raw`'\''`)}'`; + console.error( + [ + `Usage: ${rawArgv[1]} /path/to/swingstore.sqlite clist.sqlite`, + 'Extracts c-list entries to a new DB.', + 'note: clist.sqlite must not already exist', + ].join('\n'), + ); + process.exitCode = 1; + return; + } + + const [ssDBPath, clDBPath] = args; + console.log(`ssDB`, ssDBPath); + console.log(`clDBPath`, clDBPath); + if (fs.existsSync(clDBPath)) { + throw Error(`c-list DB path (${clDBPath}) must not already exist`); + } + const ssDB = sqlite3(/** @type {string} */ (ssDBPath)); + const clDB = sqlite3(/** @type {string} */ (clDBPath)); + + clDB.exec(`CREATE TABLE clist ( +vatID STRING, +kref STRING, +vref STRING, +exported BOOL, +durability BOOL, -- NULL for imports, (1: ephemeral, 2: virtual, 3: durable) +reachable BOOL, +recognizable BOOL +)`); + clDB.exec(`CREATE INDEX clist_kref ON clist (kref)`); + clDB.exec(`CREATE INDEX clist_vref ON clist (vref)`); + + const addClist = clDB.prepare(`INSERT INTO clist +(vatID, kref, vref, exported, durability, reachable, recognizable) +VALUES (?, ?, ?, ?, ?, ?, ?) +`); + clDB.prepare('BEGIN IMMEDIATE TRANSACTION').run(); + + const getKVs = ssDB.prepare(`SELECT key,value from kvStore`); + for (const row of getKVs.iterate()) { + const { key, value } = row; + const matches = /^(v\d+)\.c\.(ko.*)$/.exec(key); + if (matches) { + const [_, vatID, kref] = matches; + // v9.c.ko123 -> R o+456 + // v9.c.ko123 -> _ o+456 + const rc = value.slice(0, 1); + const vref = value.slice(2); + let reachable = 0; + const recognizable = 1; + if (rc === 'R') { + reachable = 1; + } else if (rc === '_') { + reachable = 0; + } else { + throw Error(`key '${key}', value '${value}', bad refcount '${rc}'`); + } + let durability = null; + let exported = 0; + if (vref.slice(1, 2) === '+') { + exported = 1; + const c = vref.slice(2, 3); + if (c === 'd') { + durability = 3; + } else if (c === 'v') { + durability = 2; + } else { + durability = 1; + } + } + addClist.run( + vatID, + kref, + vref, + exported, + durability, + reachable, + recognizable, + ); + // console.log(vatID, kref, vref, exported, durability, reachable, recognizable); + } + } + clDB.prepare('COMMIT').run(); +}; + +// run-18 took 1m42s (68GB, 17M kvStore entries) to produce 1.2M rows (37MB) + +main(process.argv); diff --git a/packages/SwingSet/misc-tools/extract-non-empty-blocks.py b/packages/SwingSet/misc-tools/extract-non-empty-blocks.py new file mode 100644 index 00000000000..6f0574c2af3 --- /dev/null +++ b/packages/SwingSet/misc-tools/extract-non-empty-blocks.py @@ -0,0 +1,78 @@ +import sys, gzip, json, os + +# find all non-empty blocks in the given slogfile, write to bNNN.slog in the +# current directory + +def unmarshal(s): + if s.startswith("#"): + s = s[1:] + return json.loads(s) + +fn = sys.argv[1] +blocknum = None +capture = False +early_lines = [] +emit_f = None +why = [] + +opener = gzip.open if fn.endswith(".gz") else open +opener = (lambda _: sys.stdin) if fn == "-" else opener +with opener(sys.argv[1]) as f: + for line in f: + if isinstance(line, bytes): + line = line.decode("utf-8") + data = json.loads(line.strip()) + type = data["type"] + if type == "cosmic-swingset-begin-block": + blocknum = data["blockHeight"] + early_lines = [] + capture = True + emit_f = None + why.clear() + if type == "cosmic-swingset-bridge-inbound": + why.append(data["source"]) # bank, provision, wallet + if emit_f: + emit_f.write(line) + if capture: + early_lines.append(line) + if type == "crank-start": + print("open {}".format(blocknum)); + emit_f = open("b{}.slog".format(blocknum), "w") + for l in early_lines: # retroactively interesting + emit_f.write(l) + early_lines.clear() + capture = False + if type == "crank-start" and data["crankType"] == "routing" and data["message"]["type"] == "send": + if data["message"]["target"] == "ko296": + why.append("timer") + if data["message"]["target"] == "ko62": + d1 = unmarshal(data["message"]["msg"]["methargs"]["body"]) + if d1[0] == "inbound" and d1[1][0] == "wallet": + print("d1", d1) + d2 = unmarshal(json.loads(d1[1][1]["spendAction"])["body"]) + print("d2", d2) + if d2["method"] == "executeOffer": + spec = d2["offer"]["invitationSpec"] + if "instance" in spec: + if "publicInvitationMaker" in spec: + why.append(spec["publicInvitationMaker"]) + # "makeWantMintedInvitation": PSM trade (buy IST) + # "makeGiveMintedInvitation": PSM trade (sell IST) + else: + why.append("offer.instance") + elif "callPipe" in spec: + why.append("callPipe") + elif "invitationMakerName" in spec: + why.append(spec["invitationMakerName"]) + else: + why.append("unknown.offer") + if len(early_lines) > 20: + capture = False # the current block is probably not interesting, give up + if type == "cosmic-swingset-end-block-finish": + capture = False + if emit_f: + print("close") + emit_f.close() + emit_f = None + whystr = "-".join(why) + os.rename("b{}.slog".format(blocknum), "b{}-{}.slog".format(blocknum, whystr)) diff --git a/packages/SwingSet/misc-tools/find-ephemerals.js b/packages/SwingSet/misc-tools/find-ephemerals.js new file mode 100644 index 00000000000..a3296b22b62 --- /dev/null +++ b/packages/SwingSet/misc-tools/find-ephemerals.js @@ -0,0 +1,158 @@ +#!/usr/bin/env node +// @ts-nocheck +/* eslint no-labels: "off", no-extra-label: "off", no-underscore-dangle: "off" */ +import process from 'process'; +import sqlite3 from 'better-sqlite3'; +import yargsParser from 'yargs-parser'; +import '@endo/init/debug.js'; + +const vatMap = { + v1: 'bootstrap', + v2: 'vatAdmin', + v3: 'comms', + v4: 'vattp', + v5: 'timer', + v6: 'agoricNames', + v7: 'board', + v8: 'priceAuthority', + v9: 'zoe', + v10: 'bridge', + v11: 'provisioning', + // v12 distributed tokens from the pismo era, deleted during vaults/bulldozer bootstrap + v13: 'zcf-mintHolder-BLD', + v14: 'bank', + v15: 'zcf-b1-2371d-econCommitteeCharter', + v16: 'zcf-mintHolder-ATOM', + v17: 'zcf-mintHolder-USDC_axl', + v18: 'zcf-mintHolder-USDC_grv', + v19: 'zcf-mintHolder-USDT_axl', + v20: 'zcf-mintHolder-USDT_grv', + v21: 'zcf-mintHolder-DAI_axl', + v22: 'zcf-mintHolder-DAI_grv', + v23: 'zcf-b1-e9e0b-feeDistributor', + v24: 'zcf-b1-941ef-economicCommittee', + v25: 'zcf-b1-9f877-provisionPool-governor', + v26: 'zcf-b1-9f877-ATOM-USD_price_feed-governor', + v27: 'zcf-b1-9f877-reserve.governor', + v28: 'zcf-b1-db93f-provisionPool', + v29: 'zcf-b1-4522b-ATOM-USD_price_feed', + v30: 'zcf-b1-9f877-psm-IST-USDC_axl.governor', + v31: 'zcf-b1-9f877-psm-IST-USDC_grv.governor', + v32: 'zcf-b1-9f877-psm-IST-USDT_axl.governor', + v33: 'zcf-b1-9f877-psm-IST-USDT_grv.governor', + v34: 'zcf-b1-9f877-psm-IST-DAI_axl.governor', + v35: 'zcf-b1-9f877-psm-IST-DAI_grv.governor', + v36: 'zcf-b1-92c07-reserve', + v37: 'zcf-b1-c25fb-psm-IST-USDC_axl', + v38: 'zcf-b1-c25fb-psm-IST-USDC_grv', + v39: 'zcf-b1-c25fb-psm-IST-USDT_axl', + v40: 'zcf-b1-c25fb-psm-IST-USDT_grv', + v41: 'zcf-b1-c25fb-psm-IST-DAI_axl', + v42: 'zcf-b1-c25fb-psm-IST-DAI_grv', + v43: 'zcf-b1-94e20-walletFactory', + v44: 'zcf-b1-9f877-auctioneer.governor', + v45: 'zcf-b1-a5683-auctioneer', + v46: 'zcf-b1-0b217-scaledPriceAuthority-ATOM', + v47: 'zcf-b1-9f877-vaultFactory.governor', + v48: 'zcf-b1-6c08a-vaultFactory', + v49: 'zcf-b1-78999-voteCounter.1687287341', + v50: 'zcf-b1-78999-voteCounter.1687335085', + v51: 'zcf-b1-78999-voteCounter.1687426440', + v52: 'zcf-b1-78999-voteCounter.1687426867', + v53: 'zcf-b1-78999-voteCounter.1687426949', + v54: 'zcf-b1-78999-voteCounter.1687808052', + v55: 'zcf-b1-78999-voteCounter.1687809028', + v56: 'zcf-b1-78999-voteCounter.1687915582', + v57: 'zcf-b1-78999-voteCounter.1691808218', + v58: 'zcf-b1-78999-voteCounter.1695077916', + v59: 'zcf-b1-78999-voteCounter.1695198706', + v60: 'zcf-b1-51085-kreadCommitteeCharter', + v61: 'zcf-b1-941ef-kreadCommittee', + v62: 'zcf-b1-9f877-KREAd-governor', + v63: 'zcf-b1-853ac-KREAd', + v64: 'zcf-b1-78999-voteCounter.1697649905', + v65: 'zcf-b1-78999-voteCounter.1697651249', + v66: 'zcf-mintHolder-stATOM', + v67: 'zcf-b1-9f877-stATOM-USD_price_feed-governor', + v68: 'zcf-b1-4522b-stATOM-USD_price_feed', + v69: 'zcf-b1-0b217-scaledPriceAuthority-stATOM', + v70: 'zcf-b1-78999-voteCounter.1697751150', + v71: 'zcf-mintHolder-USDC', + v72: 'zcf-b1-9f877-psm-IST-USDC.governor', + v73: 'zcf-b1-c25fb-psm-IST-USDC', + v74: 'zcf-mintHolder-USDT', + v75: 'zcf-b1-9f877-psm-IST-USDT.governor', + v76: 'zcf-b1-c25fb-psm-IST-USDT', + v77: 'zcf-b1-78999-voteCounter.1698944868', + v78: 'zcf-b1-78999-voteCounter.1698944950', + v79: 'zcf-b1-78999-voteCounter.1699049156', + v80: 'zcf-b1-78999-voteCounter.1699215618', + v81: 'zcf-b1-78999-voteCounter.1700540726', + v82: 'zcf-b1-78999-voteCounter.1700638821', + v83: 'zcf-b1-78999-voteCounter.1700638922', + v84: 'zcf-b1-78999-voteCounter.1700857048', + v85: 'zcf-b1-941ef-CrabbleCommittee', + v86: 'zcf-b1-0bfb8a-Crabble-governor', + v87: 'zcf-b1-3af818-Crabble', + v88: 'zcf-b1-78999-voteCounter.1702995108', +}; + +const main = rawArgv => { + const { _: args, ...options } = yargsParser(rawArgv.slice(2)); + if (Reflect.ownKeys(options).length > 0 || args.length < 2) { + const q = str => `'${str.replaceAll("'", String.raw`'\''`)}'`; + console.error( + [ + `Usage: ${rawArgv[1]} /path/to/clist.sqlite VATIDs..`, + 'Identifies ephemeral exports of the given vats.', + ].join('\n'), + ); + process.exitCode = 1; + return; + } + + const [clDBPAth, ...vatIDs] = args; + const db = sqlite3(/** @type {string} */ (clDBPAth)); + const getEphemeralExports = db.prepare( + `SELECT * FROM clist WHERE vatID=? AND durability=1 OR durability=2`, + ); + const getImports = db.prepare( + `SELECT * FROM clist WHERE kref=? AND exported=0`, + ); + const edges = new Map(); // Map>> + for (const exportingVatID of vatIDs) { + const exMap = new Map(); + edges.set(exportingVatID, exMap); + for (const exportRow of getEphemeralExports.iterate(exportingVatID)) { + if (exportRow.vref === 'o+0') { + continue; // special case, root object is not actually ephemeral + } + const { kref } = exportRow; + for (const importRow of getImports.iterate(kref)) { + const importingVatID = importRow.vatID; + if (!exMap.has(importingVatID)) { + exMap.set(importingVatID, new Set()); + } + exMap.get(importingVatID).add([importRow.vref, kref, exportRow.vref]); + } + } + } + + // report + for (const exportingVatID of [...edges.keys()].sort()) { + const exMap = edges.get(exportingVatID); + for (const importingVatID of [...exMap.keys()].sort()) { + const paths = [...exMap.get(importingVatID)]; + const renderPath = path => `${path[0]}->${path[1]}->${path[2]}`; + let examples = paths.slice(0, 5).map(renderPath).join(', '); + if (paths.length > 5) { + examples += '...'; + } + console.log( + `${importingVatID} -> ${exportingVatID}: (${paths.length}) ${examples}`, + ); + } + } +}; + +main(process.argv); diff --git a/packages/SwingSet/misc-tools/find-provisions.py b/packages/SwingSet/misc-tools/find-provisions.py new file mode 100644 index 00000000000..6f54e6ad4b6 --- /dev/null +++ b/packages/SwingSet/misc-tools/find-provisions.py @@ -0,0 +1,39 @@ +# +# Given a slogfile on stdin, find the block and cranknums of the +# smart-wallet account provisioning events + +import sys, json +from collections import defaultdict +from itertools import count + +targetVatID = "v1" # vat-bootstrap +targetKref = "ko26" +blockNum = None +crankNum = None +blockTime = None + + +print("blockNum,crankNum,blockTime") +#print("crankNum,deliveryNum,elapsed,computrons,follower restart,provision") + +for line in sys.stdin: + d = json.loads(line) + stype = d["type"] + if stype == "cosmic-swingset-begin-block": + blockNum = d["blockHeight"] + blockTime = d["blockTime"] + if stype == "deliver" and not d["replay"]: + vatID = d["vatID"] + kd = d["kd"] + #print(line) + #print(vatID, kd[0], kd[1]) + if vatID == targetVatID and kd[0] == "message" and kd[1] == targetKref: + body = json.loads(kd[2]["methargs"]["body"]) + method = body[0] + if method == "inbound": + source = body[1][0] + if source == "provision": + crankNum = d["crankNum"] + print("%s,%s,%s" % (blockNum, crankNum, blockTime)) + #print("%s,,,,,0" % crankNum) + diff --git a/packages/SwingSet/misc-tools/follower-run-tools/do-clist.py b/packages/SwingSet/misc-tools/follower-run-tools/do-clist.py new file mode 100755 index 00000000000..7f1ca3b6b73 --- /dev/null +++ b/packages/SwingSet/misc-tools/follower-run-tools/do-clist.py @@ -0,0 +1,75 @@ +#!/usr/bin/env python3 + +# extract the #8400/#8401 counts from kvdata.json +# and the exported-object counts from clist.sqlite + +import os.path, sys, json, sqlite3 + +datafn = sys.argv[1] +clistfn = sys.argv[2] + +with open(datafn) as f: + vats = json.load(f) + +a = 2 + +# total cycles v9.c24 seatHandleToZoeSeatAdmin +# ATOM v29.c12 zcfSeatToSeatHandle +# stATOM v68.c12 +# vaultFactory v48.c12 + +def getC12(name, vatID): + if vatID in vats: + count = vats[vatID]["collections"]["12"]["entries"] + print("{:8s} {:>4s}.c12: {:>7d}".format(name, vatID, count)) + +print("{:8s} {:>4s}.c24: {:>7d}".format("zoe", "v9", vats["v9"]["collections"]["24"]["entries"])) +getC12("vaultF", "v48") +getC12("ATOM", "v29") +getC12("stATOM", "v68") +getC12("stTIA", "v104") +getC12("stOSMO", "v98") +getC12("stkATOM", "v111") + +print() + +# QuotePayments +# ATOM v29.k22 +# scaled-ATOM v46.k22 + +def getK22(name, vatID): + if vatID in vats: + count = vats[vatID]["kinds"]["22"]["defined"] + print("{:12s} {:>4s}.k22: {:>7d}".format(name, vatID, count)) +getK22("ATOM", "v29") +getK22("sPA-ATOM", "v46") +getK22("stATOM", "v68") +getK22("sPA-stATOM", "v69") +getK22("stOSMO", "v98") +getK22("sPA-stOSMO", "v99") +getK22("stTIA", "v104") +getK22("sPA-stTIA", "v105") +getK22("stkATOM", "v111") +getK22("sPA-stkATOM", "v112") + +print() + +assert(os.path.exists(clistfn)) +db = sqlite3.connect(clistfn) +print("exported objects:") +print("v9-zoe", db.execute('SELECT COUNT(*) FROM clist WHERE vatID="v9" AND exported=1').fetchone()[0]) +print("v29-pf-ATOM", db.execute('SELECT COUNT(*) FROM clist WHERE vatID="v29" AND exported=1').fetchone()[0]) +print("v46-sPA-ATOM", db.execute('SELECT COUNT(*) FROM clist WHERE vatID="v9" AND exported=1').fetchone()[0]) +print("v69-sPA-stATOM", db.execute('SELECT COUNT(*) FROM clist WHERE vatID="v69" AND exported=1').fetchone()[0]) +print("v99-sPA-stOSMO", db.execute('SELECT COUNT(*) FROM clist WHERE vatID="v99" AND exported=1').fetchone()[0]) +print("v105-sPA-stTIA", db.execute('SELECT COUNT(*) FROM clist WHERE vatID="v105" AND exported=1').fetchone()[0]) +print("v112-sPA-stkATOM", db.execute('SELECT COUNT(*) FROM clist WHERE vatID="v112" AND exported=1').fetchone()[0]) + +print() +print("imported objects:") +print("v29", db.execute('SELECT COUNT(*) FROM clist WHERE vatID="v29" AND exported=0').fetchone()[0]) +print("v68", db.execute('SELECT COUNT(*) FROM clist WHERE vatID="v68" AND exported=0').fetchone()[0]) +print("v98", db.execute('SELECT COUNT(*) FROM clist WHERE vatID="v98" AND exported=0').fetchone()[0]) +print("v104", db.execute('SELECT COUNT(*) FROM clist WHERE vatID="v104" AND exported=0').fetchone()[0]) +print("v111", db.execute('SELECT COUNT(*) FROM clist WHERE vatID="v111" AND exported=0').fetchone()[0]) + diff --git a/packages/SwingSet/misc-tools/follower-run-tools/process.sh b/packages/SwingSet/misc-tools/follower-run-tools/process.sh new file mode 100755 index 00000000000..5651ccb7b6c --- /dev/null +++ b/packages/SwingSet/misc-tools/follower-run-tools/process.sh @@ -0,0 +1,71 @@ +#!/bin/sh + +# run in the background with: +# cd run-NN +# ../process.sh NN process.out 2>&1 & +# +# if you omit the + +# exit on any error +set -e +# error on undefined variables +set -u +# print command before execution +set -x +# exit on command pipe failure +set -o pipefail + +# $@ is the run number +FULL_COMPRESSED=run-$@-swingstore.sqlite.gz +TMPDB=ss.sqlite +PRUNED=run-$@-swingstore-pruned.sqlite +PRUNED_COMPRESSED=${PRUNED}.gz + +if [ ! -f ${FULL_COMPRESSED} ]; then + echo "missing run-NN-swingstore.sqlite.gz (${FULL_COMPRESSED}), bailing" + exit 1 +fi + +echo "decompressing.." +gzcat ${FULL_COMPRESSED} > ${TMPDB} + +echo "measuring size-full.." +python3 ../size-report.py ${TMPDB} | tee size-full.json + +# this takes several hours +echo "pruning (takes several hours).." +time node ~/stuff/agoric/agoric-sdk/packages/swing-store/misc-tools/prune-transcripts.js ${TMPDB} --vacuum-into ${PRUNED} +rm ${TMPDB} + +echo "measuring size-pruned.." +python3 ../size-report.py ${PRUNED} | tee size-pruned.json + +echo "using categorize-kvstore.js to create kvdata.json.." +node ~/stuff/agoric/agoric-sdk/packages/SwingSet/misc-tools/categorize-kvstore.js --datafile kvdata.json ingest ${PRUNED} + +echo "using extract-clist-db.js to create clist.sqlite.." +node ~/stuff/agoric/agoric-sdk/packages/SwingSet/misc-tools/extract-clist-db.js ${PRUNED} clist.sqlite + +echo "extracting #8400/#8401 counts" +python3 ../do-clist.py kvdata.json clist.sqlite + +echo "extracting whole kvStore" +sqlite3 ${PRUNED} 'select * from kvStore' | sort | gzip > all-kv.txt.gz + +echo "compressing kvdata.json.." +gzip kvdata.json + +echo "compressing clist.sqlite.." +gzip clist.sqlite + +echo "compressing to ${PRUNED_COMPRESSED} .." +gzip ${PRUNED} + +echo "Done" diff --git a/packages/SwingSet/misc-tools/follower-run-tools/size-report.py b/packages/SwingSet/misc-tools/follower-run-tools/size-report.py new file mode 100644 index 00000000000..3ca1bf7c03e --- /dev/null +++ b/packages/SwingSet/misc-tools/follower-run-tools/size-report.py @@ -0,0 +1,41 @@ +#!/usr/bin/env python3 + +import os, sys, json +import sqlite3 + +dbfile = sys.argv[1] +db = sqlite3.connect(dbfile) +def db_exec(stmt): + return db.execute(stmt).fetchone()[0] + +NUM_SNAPSHOTS = "SELECT count(compressedSize) FROM snapshots WHERE compressedSnapshot is not null" +SNAPSHOTS = "SELECT sum(compressedSize) FROM snapshots WHERE compressedSnapshot is not null" + +NUM_KVSTORE = "SELECT count(*) FROM kvStore" +KVSTORE = "SELECT sum(length(key)+length(value)) FROM kvStore" + +NUM_BUNDLES = "SELECT count(bundle) FROM bundles" +BUNDLES = "SELECT sum(length(bundle)) FROM bundles" + +NUM_TRANSCRIPT_SPANS = "SELECT count(*) FROM transcriptSpans" +NUM_TRANSCRIPTS = "SELECT count(*) FROM transcriptItems" +TRANSCRIPTS = "SELECT sum(length(item)) FROM transcriptItems" + +sizes = {} + +sizes["file_size"] = os.stat(dbfile).st_size + +sizes["swingset_snapshot_size"] = db_exec(SNAPSHOTS) # 430ms +sizes["swingset_snapshot_count"] = db_exec(NUM_SNAPSHOTS) # 460ms + +sizes["swingset_kvstore_count"] = db_exec(NUM_KVSTORE) # 95ms +sizes["swingset_kvstore_size"] = db_exec(KVSTORE) # slow + +sizes["swingset_bundle_count"] = db_exec(NUM_BUNDLES) # 21ms +sizes["swingset_bundle_size"] = db_exec(BUNDLES) # 3ms + +sizes["swingset_transcript_span_count"] = db_exec(NUM_TRANSCRIPT_SPANS) # 7ms +sizes["swingset_transcript_count"] = db_exec(NUM_TRANSCRIPTS) # 550ms +sizes["swingset_transcript_size"] = db_exec(TRANSCRIPTS) # slow + +print(json.dumps(sizes, indent=1)) diff --git a/packages/SwingSet/misc-tools/make-tiny-vat.js b/packages/SwingSet/misc-tools/make-tiny-vat.js new file mode 100644 index 00000000000..1c3e22579f0 --- /dev/null +++ b/packages/SwingSet/misc-tools/make-tiny-vat.js @@ -0,0 +1,33 @@ +// @ts-nocheck + +// drop this into agoric-sdk/packages/xsnap/manykeys.js + +import '@endo/init/debug.js'; +import * as proc from 'child_process'; +import * as os from 'os'; +import { xsnap } from '@agoric/xsnap'; + +const snapshot = './tinyvat.snapshot'; +const opts = { + name: 'xsnap test worker', + stderr: 'inherit', + stdout: 'inherit', + spawn: proc.spawn, + os: os.type(), +}; + +async function setup() { + const vat0 = xsnap(opts); + await vat0.evaluate(`globalThis.anchor = { mykey: 4 };`); + await vat0.snapshot(snapshot); + console.log(`wrote snapshot`); + await vat0.close(); +} + +setup().then( + () => console.log(`main complete`), + err => { + console.log(`main error:`); + console.log(err); + }, +); diff --git a/packages/SwingSet/misc-tools/measure-boyd-gc.py b/packages/SwingSet/misc-tools/measure-boyd-gc.py index 6064dd95eeb..c301cb1cce7 100644 --- a/packages/SwingSet/misc-tools/measure-boyd-gc.py +++ b/packages/SwingSet/misc-tools/measure-boyd-gc.py @@ -15,7 +15,7 @@ deliveries = parse_file(fn, vatID) -print("cranknum,deliver_monotime,gc_time") +print("cranknum,deliverynum,deliver_monotime,gc_time") for d in deliveries: if d.vd[0] == "bringOutYourDead": # BOYD does the forced GC between receipt of the delivery and @@ -23,4 +23,4 @@ # results) elapsed = d.firsttime() if elapsed is not None: - print("%d,%.6f,%.6f" % (d.cranknum, d.tx_delivery_monotime, elapsed)) + print("%d,%d,%.6f,%.6f" % (d.cranknum, d.deliverynum, d.tx_delivery_monotime, elapsed)) diff --git a/packages/SwingSet/misc-tools/monitor-slog-block-time.py b/packages/SwingSet/misc-tools/monitor-slog-block-time.py index c94bdecc418..3db86c75569 100755 --- a/packages/SwingSet/misc-tools/monitor-slog-block-time.py +++ b/packages/SwingSet/misc-tools/monitor-slog-block-time.py @@ -3,6 +3,9 @@ from collections import defaultdict import subprocess +print("writing to monitor.out") +mf = open("monitor.out", "a", buffering=1) + # run like this: # tail -n 10000 -F chain.slog | python3 monitor-slog-block-time.py [keys.txt] # produces output like: @@ -98,7 +101,15 @@ def load_genesis_keys(fn): def wait_for_block(height): while True: - out = subprocess.check_output(["agd", "status"]).decode() + # TODO: 'agd status' has started failing with messages like: + ## diff: node_modules/.cache/agoric/golang-built.sum.new: No such file or directory + ## golang/cosmos node_modules/.cache/agoric/golang-built.sum has changed + ## mv: cannot stat 'node_modules/.cache/agoric/golang-built.sum.new': No such file or directory + try: + out = subprocess.check_output(["agd", "status"]).decode() + except subprocess.CalledProcessError as e: + time.sleep(1) + continue now = int(json.loads(out)["SyncInfo"]["latest_block_height"]) if now >= height: return @@ -108,7 +119,10 @@ def count_signatures(height): if not do_count_signatures: return (0, 0, 0, "") wait_for_block(height) + #try: out = subprocess.check_output(["agd", "query", "block", str(height)]).decode() + #except subprocess.CalledProcessError as e: + # return (0, 0, 0, "") block = json.loads(out)["block"] sigs = block["last_commit"]["signatures"] twos = len([s for s in sigs if s["block_id_flag"] == 2]) @@ -118,14 +132,19 @@ def count_signatures(height): proposer = block["header"]["proposer_address"] return (twos,threes, rounds, proposer) -head = "- block blockTime lag -> cranks(avg) computrons swingset(avg) + cosmos = proc% (avg) [sigs2/3/tot] rounds proposer" -fmt = " %5d %2d(%4s) %6s -> %4s(%5s) %9d %6s(%6s) + %6s = %4s (%4s) [%3d/%3d/%3d] r%d %s" +head = "- block blockTime lag -> cranks(avg) computrons swingset(avg) + cosmos = proc% (avg) [sigs2/3/tot] rounds proposer" +fmt = " %8d %2d(%4s) %6s -> %4s(%5s) %9d %6s(%6s) + %6s = %4s (%4s) [%3d/%3d/%3d] r%d %s" class Summary: headline_counter = 0 + total_swingset_time = 0 + total_computrons = 0 + total_cranks = 0 + total_blocks = 0 + def summarize(self): if self.headline_counter == 0: - print(head) + print(head, file=mf) self.headline_counter = 20 self.headline_counter -= 1 ( height, cranks, @@ -144,6 +163,12 @@ def summarize(self): avg_swingset_percentage = sum(b[7] for b in recent) / len(recent) moniker = validators.get(sigs[3], "(%s)" % sigs[3][:6]) + self.total_swingset_time += swingset_time + self.total_computrons += computrons + if cranks: + self.total_cranks += cranks + self.total_blocks += 1 + print(fmt % (height, chain_block_time, "%2.1f" % avg_chain_block_time, abbrev(lag), @@ -154,7 +179,13 @@ def summarize(self): perc(proc_frac), abbrev1(100.0 * avg_proc_frac), sigs[0], sigs[1], sigs[0]+sigs[1], sigs[2], moniker, - )) + ), file=mf) + + def finish(self): + print("-- total swingset time: %s" % (abbrev(self.total_swingset_time))) + print("-- total computrons: %d" % self.total_computrons) + print("-- total cranks: %d" % self.total_cranks) + print("-- total blocks: %d" % self.total_blocks) s = Summary() @@ -171,7 +202,7 @@ def summarize(self): if data["type"] == "deliver-result": mr = data["dr"][2]; if mr and "compute" in mr: - computrons += mr["compute"] + computrons += mr["compute"] # TODO: includes replay? mismatch block 16543830 if data["type"] in ["cosmic-swingset-begin-block", "cosmic-swingset-end-block-start", "cosmic-swingset-end-block-finish"]: @@ -201,10 +232,11 @@ def summarize(self): if "last-crank" in blocks[height-1]: cranks = blocks[height]["last-crank"] - blocks[height-1]["last-crank"] chain_block_time = blocks[height]["blockTime"] - blocks[height-1]["blockTime"] - sigs = count_signatures(height) + sigs = count_signatures(height) recent_blocks.append([ height, cranks, block_time, proc_frac, cosmos_time, chain_block_time, swingset_time, swingset_percentage, lag, computrons, sigs]) s.summarize() +s.finish() diff --git a/packages/SwingSet/misc-tools/my-build-mezzanine-db.js b/packages/SwingSet/misc-tools/my-build-mezzanine-db.js new file mode 100644 index 00000000000..1da75ab442b --- /dev/null +++ b/packages/SwingSet/misc-tools/my-build-mezzanine-db.js @@ -0,0 +1,650 @@ +/* eslint-disable */ +// @ts-nocheck +// import '@endo/init'; +// import { E } from '@endo/far'; +import fs from 'fs'; +import zlib from 'zlib'; +import process from 'process'; +import readline from 'readline'; + +import sqlite3 from 'better-sqlite3'; + +function makeDB(dbfn) { + const db = sqlite3(dbfn); + const indexes = []; + const sql = { db }; + db.exec(`PRAGMA journal_mode=WAL`); + + db.exec(`CREATE TABLE block ( +blockNum INTEGER, -- or 'bootstrap' +blockTime INTEGER, +compute_time FLOAT, -- cosmic-swingset-{begin,end}-block : includes cosmos-sdk time +swingset_time FLOAT -- cosmic-swingset-end-block-{start,finish} : only swingset time +-- PRIMARY KEY (blockNum) -- INTEGER or STRING, so cannot be PRIMARY KEY +)`); + sql.addBlock = db.prepare( + `INSERT INTO block VALUES (@blockNum, @blockTime, @compute_time, @swingset_time)`, + ); + indexes.push(`CREATE INDEX block_blockNum ON block (blockNum)`); + + db.exec(`CREATE TABLE run ( +blockNum INTEGER, +runNum INTEGER, +time FLOAT, -- cosmic-swingset-run-{start,finish} +usedBeans INTEGER, +remainingBeans INTEGER +-- PRIMARY KEY (blockNum, runNum) -- same +)`); + sql.addRun = db.prepare( + `INSERT INTO run VALUES (@blockNum, @runNum, @time, @usedBeans, @remainingBeans)`, + ); + indexes.push(`CREATE INDEX run_blockNum ON run (blockNum)`); + + ///// all deliveries + db.exec(`CREATE TABLE delivery ( +blockNum INTEGER, +runNum INTEGER, +crankNum INTEGER, +vatID STRING, +deliveryNum INTEGER, +kd_json STRING, +deliver_ok BOOLEAN, +time FLOAT, -- deliver to deliver-result +meterType STRING, +computrons INTEGER +-- PRIMARY KEY (crankNum) +)`); + // we duplicate crankNums: https://github.com/Agoric/agoric-sdk/issues/8264 + // so we can't use PRIMARY KEY (crankNum), but instead add a (non-UNIQUIE) index + indexes.push(`CREATE INDEX delivery_crankNum ON delivery (crankNum)`); + indexes.push( + `CREATE INDEX delivery_blockNum_runNum ON delivery (blockNum, runNum)`, + ); + indexes.push( + `CREATE INDEX delivery_vatID_deliveryNum ON delivery (vatID, deliveryNum)`, + ); + sql.addDelivery = db.prepare( + `INSERT INTO delivery VALUES ( + @blockNum, @runNum, + @crankNum, @vatID, @deliveryNum, + @kd_json, @deliver_ok, @time, + @meterType, @computrons)`, + ); + + // dispatch.deliver, indexed by methodname and result_kpid + db.exec(`CREATE TABLE delivery_message ( +blockNum INTEGER, +runNum INTEGER, +crankNum INTEGER, +vatID STRING, +deliveryNum INTEGER, +target_kref STRING, +methname STRING, +result_kpid STRING +)`); + indexes.push( + `CREATE INDEX delivery_message_methname ON delivery_message (methname)`, + ); + indexes.push( + `CREATE INDEX delivery_message_result ON delivery_message (result_kpid)`, + ); + indexes.push( + `CREATE INDEX delivery_message_crankNum ON delivery_message (crankNum)`, + ); + sql.addDispatchDeliver = db.prepare( + `INSERT INTO delivery_message VALUES ( + @blockNum, @runNum, + @crankNum, @vatID, @deliveryNum, + @target_kref, @methname, @result_kpid + )`, + ); + + // dispatch.notify, indexed by kpid + db.exec(`CREATE TABLE delivery_notify ( +blockNum INTEGER, +runNum INTEGER, +crankNum INTEGER, +vatID STRING, +deliveryNum INTEGER, +kpid STRING +)`); + indexes.push(`CREATE INDEX delivery_notify_kpid ON delivery_notify (kpid)`); + sql.addDispatchNotify = db.prepare( + `INSERT INTO delivery_notify VALUES ( + @blockNum, @runNum, + @crankNum, @vatID, @deliveryNum, + @kpid + )`, + ); + + // deliveries which target or mention krefs + db.exec(`CREATE TABLE delivery_mention ( +blockNum INTEGER, +runNum INTEGER, +crankNum INTEGER, +vatID STRING, +deliveryNum INTEGER, +kref STRING +)`); + indexes.push(`CREATE INDEX delivery_mention_kref ON delivery_mention (kref)`); + sql.addDispatchMention = db.prepare( + `INSERT INTO delivery_mention VALUES ( + @blockNum, @runNum, + @crankNum, @vatID, @deliveryNum, + @kref + )`, + ); + + ///// all syscalls + db.exec(`CREATE TABLE syscall ( +blockNum INTEGER, +runNum INTEGER, +crankNum INTEGER, +vatID STRING, +deliveryNum INTEGER, +syscallNum INTEGER, +type STRING, +ksc_json STRING, +result_ok STRING, +result_json STRING +)`); + indexes.push( + `CREATE INDEX syscall_blockNum_runNum ON syscall (blockNum, runNum)`, + ); + indexes.push(`CREATE INDEX syscall_crankNum ON syscall (crankNum)`); + indexes.push( + `CREATE INDEX syscall_vatID_deliveryNum ON syscall (vatID, deliveryNum)`, + ); + sql.addSyscall = db.prepare( + `INSERT INTO syscall VALUES ( +@blockNum, @runNum, @crankNum, @vatID, @deliveryNum, @syscallNum, +@type, @ksc_json, @result_ok, @result_json +)`, + ); + + // syscall.sends, indexed by methodname and result_kpid + db.exec(`CREATE TABLE syscall_message ( +blockNum INTEGER, +runNum INTEGER, +crankNum INTEGER, +vatID STRING, +deliveryNum INTEGER, +syscallNum INTEGER, +target_kref STRING, +methname STRING, +result_kpid STRING +)`); + indexes.push( + `CREATE INDEX syscall_message_methname ON syscall_message (methname)`, + ); + indexes.push( + `CREATE INDEX syscall_message_result ON syscall_message (result_kpid)`, + ); + sql.addSyscallSend = db.prepare( + `INSERT INTO syscall_message VALUES ( + @blockNum, @runNum, + @crankNum, @vatID, @deliveryNum, + @syscallNum, @target_kref, @methname, @result_kpid + )`, + ); + + // syscall.resolve, indexed by kpid + db.exec(`CREATE TABLE syscall_resolve ( +blockNum INTEGER, +runNum INTEGER, +crankNum INTEGER, +vatID STRING, +deliveryNum INTEGER, +syscallNum INTEGER, +kpid STRING +)`); + indexes.push(`CREATE INDEX syscall_resolve_kpid ON syscall_resolve (kpid)`); + sql.addSyscallResolve = db.prepare( + `INSERT INTO syscall_resolve VALUES ( + @blockNum, @runNum, + @crankNum, @vatID, @deliveryNum, + @syscallNum, @kpid + )`, + ); + + // syscall which target or mention a kref + db.exec(`CREATE TABLE syscall_mention ( +blockNum INTEGER, +runNum INTEGER, +crankNum INTEGER, +vatID STRING, +deliveryNum INTEGER, +syscallNum INTEGER, +kref STRING +)`); + indexes.push(`CREATE INDEX syscall_mention_kref ON syscall_mention (kref)`); + sql.addSyscallMention = db.prepare( + `INSERT INTO syscall_mention VALUES ( + @blockNum, @runNum, + @crankNum, @vatID, @deliveryNum, + @syscallNum, @kref + )`, + ); + + ///// current data + db.exec(`CREATE TABLE promise_decider (kpid STRING, decider STRING)`); + sql.addPromiseDecider = db.prepare( + `INSERT INTO promise_decider VALUES (@kpid, @decider)`, + ); + + db.exec(`CREATE TABLE promise_subscriber (kpid STRING, subscriber STRING)`); + sql.addPromiseSubscriber = db.prepare( + `INSERT INTO promise_subscriber VALUES (@kpid, @subscriber)`, + ); + + return { db, sql, indexes }; +} + +function copySwingStore(sql, swingstore_dbfn) { + const ssdb = sqlite3(swingstore_dbfn); + const deciders = ssdb.prepare( + "SELECT * FROM kvStore WHERE key LIKE 'kp%.decider'", + ); + for (const row of deciders.iterate()) { + const [kpid, _] = row.key.split('.'); + const decider = row.value; + sql.addPromiseDecider.run({ kpid, decider }); + } + const subscribers = ssdb.prepare( + "SELECT * FROM kvStore WHERE key LIKE 'kp%.subscribers'", + ); + for (const row of subscribers.iterate()) { + const [kpid, _] = row.key.split('.'); + for (const subscriber of row.value.split(',')) { + sql.addPromiseSubscriber.run({ kpid, subscriber }); + } + } +} + +function createIndexes(db, indexes) { + for (const index of indexes) { + db.exec(index); + } +} + +const td = new TextDecoder(); +function decode(a) { + return td.decode(a); +} +const NL = '\n'.codePointAt(0); + +async function* lineReader(s) { + let buffer = new Uint8Array(0); + let offset = 0; + for await (const chunk of s) { + console.log(`chunk is`, chunk?.length); //decode(chunk)); + const leftover = buffer; + const leftover_length = leftover.length - offset; + buffer = new Uint8Array(leftover_length + chunk.length); + //console.log(`leftover: ${offset}+${leftover_length} + buffer.set(leftover.slice(offset), 0); + buffer.set(chunk, leftover_length); + offset = 0; + for (;;) { + const nl = buffer.indexOf(NL, offset); + console.log(`found`, offset, nl, buffer.length); + if (nl === -1) break; + const s = decode(buffer.slice(offset, nl)); + console.log(`s is`, offset, nl, s); + offset = nl + 1; + } + console.log(` getting next chunk`); + } +} + +function extractSmallcaps(methargs_smallcaps) { + const { body, slots } = methargs_smallcaps; + if (body[0] !== '#') { + throw Error('ersatz decoder only handles smallcaps'); + } + const methargs = JSON.parse(body.slice(1)); + const methname = methargs[0]; + return { methname, slots }; +} + +async function processFile(fn, sql, commitAndReopenTransaction) { + console.log(`processFile`, fn); + let s = fs.createReadStream(fn); + if (fn.endsWith('.gz')) { + s = s.pipe(zlib.createGunzip()); + } + const lines = readline.createInterface({ input: s }); + let lino = 0; + let blockNum; + let blockTime; + let blockStartTime; + let blockSwingsetStartTime; + let runStartTime; + let runNum; + let delivery; + let deliveryStartTime; + let syscall; + for await (const line of lines) { + lino += 1; + //if (lino > 10000) break; + const data = JSON.parse(line); + + if (data.replay) { + continue; + } + + if (data.type === 'cosmic-swingset-bootstrap-block-start') { + blockNum = 'bootstrap'; + blockTime = data.blockTime; + blockStartTime = data.time; + runNum = 0; + } + if (data.type === 'cosmic-swingset-bootstrap-block-finish') { + const compute_time = data.time - blockStartTime; + const swingset_time = compute_time; + sql.addBlock.run({ blockNum, blockTime, compute_time, swingset_time }); + blockNum = undefined; + blockTime = undefined; + blockStartTime = undefined; + } + if (data.type === 'cosmic-swingset-begin-block') { + blockNum = data.blockHeight; + blockTime = data.blockTime; + blockStartTime = data.time; + if (blockNum % 1000 === 0) { + console.log(`blockHeight`, blockNum); + } + if (blockNum % 20000 === 0) { + console.log(`committing`); + commitAndReopenTransaction(); + } + } + if (data.type === 'cosmic-swingset-end-block-start') { + blockSwingsetStartTime = data.time; + } + if (data.type === 'cosmic-swingset-end-block-finish') { + const compute_time = data.time - blockStartTime; + const swingset_time = data.time - blockSwingsetStartTime; + sql.addBlock.run({ blockNum, blockTime, compute_time, swingset_time }); + blockNum = undefined; + blockTime = undefined; + blockStartTime = undefined; + blockSwingsetStartTime = undefined; + } + + if (data.type === 'cosmic-swingset-run-start') { + runNum = data.runNum; + runStartTime = data.time; + } + if (data.type === 'cosmic-swingset-run-finish') { + const time = data.time - runStartTime; + const { usedBeans, remainingBeans } = data; + sql.addRun.run({ blockNum, runNum, time, usedBeans, remainingBeans }); + runNum = undefined; + runStartTime = undefined; + } + + if (data.type === 'deliver') { + delivery = data; + deliveryStartTime = data.time; + continue; + } + + if (data.type === 'syscall') { + syscall = data; + } + + if (data.type === 'syscall-result') { + const { crankNum, vatID, deliveryNum } = delivery; + const { syscallNum, ksc } = syscall; + syscall = undefined; + const { ksr } = data; + const type = ksc[0]; + const [result_ok_s, result_value_data] = ksr; + const result_ok = result_ok_s === 'ok' ? 1 : 0; // sqlite hates booleans + const ksc_json = JSON.stringify(ksc); + // syscall.invoke gets capdata in result_value + const result_json = JSON.stringify(result_value_data); + sql.addSyscall.run({ + blockNum, + runNum, + crankNum, + vatID, + deliveryNum, + syscallNum, + type, + ksc_json, + result_ok, + result_json, + }); + if (type === 'send') { + const target_kref = ksc[1]; + const { methargs, result: result_kpid } = ksc[2]; + const { methname, slots } = extractSmallcaps(methargs); + sql.addSyscallSend.run({ + blockNum, + runNum, + crankNum, + vatID, + deliveryNum, + syscallNum, + target_kref, + methname, + result_kpid, + }); + sql.addSyscallMention.run({ + blockNum, + runNum, + crankNum, + vatID, + deliveryNum, + syscallNum, + kref: target_kref, + }); + if (result_kpid) { + sql.addSyscallMention.run({ + blockNum, + runNum, + crankNum, + vatID, + deliveryNum, + syscallNum, + kref: result_kpid, + }); + } + for (const kref of slots) { + sql.addSyscallMention.run({ + blockNum, + runNum, + crankNum, + vatID, + deliveryNum, + syscallNum, + kref, + }); + } + } + if (type === 'resolve') { + for (const resolution of ksc[2]) { + const [kpid, rejected, data] = resolution; + const { slots } = extractSmallcaps(data); + sql.addSyscallResolve.run({ + blockNum, + runNum, + crankNum, + vatID, + deliveryNum, + syscallNum, + kpid, + }); + sql.addSyscallMention.run({ + blockNum, + runNum, + crankNum, + vatID, + deliveryNum, + syscallNum, + kref: kpid, + }); + for (const kref of slots) { + sql.addSyscallMention.run({ + blockNum, + runNum, + crankNum, + vatID, + deliveryNum, + syscallNum, + kref, + }); + } + } + } + if (type === 'subscribe') { + const kpid = ksc[2]; + sql.addSyscallMention.run({ + blockNum, + runNum, + crankNum, + vatID, + deliveryNum, + syscallNum, + kref: kpid, + }); + } + + // TODO: track syscall.invoke krefs in both args and results, in syscall_mention + // TODO: track GC mentions in a separate table + // TODO: track syscall.exit mentions in a separate table + } + + if (data.type === 'deliver-result') { + const { kd, crankNum, vatID, deliveryNum } = delivery; + const time = data.time - deliveryStartTime; + delivery = undefined; + deliveryStartTime = undefined; + const { dr } = data; + // BOYD dr[1] is a stats object, so skip deliver_error + const [deliver_ok_s, deliver_error, deliver_meter] = dr; + const deliver_ok = deliver_ok_s === 'ok' ? 1 : 0; // sqlite hates booleans + const { meterType, compute: computrons } = deliver_meter || {}; + const kd_json = JSON.stringify(kd); + sql.addDelivery.run({ + blockNum, + runNum, + crankNum, + vatID, + deliveryNum, + kd_json, + deliver_ok, + time, + meterType, + computrons, + }); + if (kd[0] === 'message') { + const target_kref = kd[1]; + const { methargs, result: result_kpid } = kd[2]; + const { methname, slots } = extractSmallcaps(methargs); + sql.addDispatchDeliver.run({ + blockNum, + runNum, + crankNum, + vatID, + deliveryNum, + target_kref, + methname, + result_kpid, + }); + sql.addDispatchMention.run({ + blockNum, + runNum, + crankNum, + vatID, + deliveryNum, + kref: target_kref, + }); + if (result_kpid) { + sql.addDispatchMention.run({ + blockNum, + runNum, + crankNum, + vatID, + deliveryNum, + kref: result_kpid, + }); + } + for (const kref of slots) { + sql.addDispatchMention.run({ + blockNum, + runNum, + crankNum, + vatID, + deliveryNum, + kref, + }); + } + } + if (kd[0] === 'notify') { + for (const resolution of kd[1]) { + const [kpid, { state, data: resdata }] = resolution; + sql.addDispatchNotify.run({ + blockNum, + runNum, + crankNum, + vatID, + deliveryNum, + kpid, + }); + sql.addDispatchMention.run({ + blockNum, + runNum, + crankNum, + vatID, + deliveryNum, + kref: kpid, + }); + const { slots } = extractSmallcaps(resdata); + for (const kref of slots) { + sql.addDispatchMention.run({ + blockNum, + runNum, + crankNum, + vatID, + deliveryNum, + kref, + }); + } + } + } + } + } +} + +async function run() { + const args = process.argv.slice(2); + const [dbfn, swingstore_dbfn, ...slogfiles] = args; + + const { db, sql, indexes } = makeDB(dbfn); + db.prepare('BEGIN IMMEDIATE TRANSACTION').run(); + + function commitAndReopenTransaction() { + db.prepare('COMMIT').run(); + db.prepare('BEGIN IMMEDIATE TRANSACTION').run(); + } + + console.log(`copying current swingstore data..`); + await copySwingStore(sql, swingstore_dbfn); + + for (const sffn of slogfiles) { + await processFile(sffn, sql, commitAndReopenTransaction); + } + commitAndReopenTransaction(); + + console.log(`creating indexes..`); + createIndexes(db, indexes); + db.prepare('COMMIT').run(); + + console.log(`performing wal_checkpoint`); + db.prepare('PRAGMA wal_checkpoint(FULL)').run(); +} + +run().catch(err => console.log('err', err)); diff --git a/packages/SwingSet/misc-tools/parse_timestamps.py b/packages/SwingSet/misc-tools/parse_timestamps.py index 735456c9989..d0cbc9b8773 100644 --- a/packages/SwingSet/misc-tools/parse_timestamps.py +++ b/packages/SwingSet/misc-tools/parse_timestamps.py @@ -1,4 +1,4 @@ -import json, gzip, re, time +import sys, json, gzip, re, time # Library for use by correlate_timestamps.py and friends, to extract # and correlate timestamps. Returns a list of Deliveries, each with @@ -16,7 +16,7 @@ def __init__(self, vsc, num, rx_request): target = vsc[1] msg = vsc[2] methargsCD = msg["methargs"] - method = json.loads(methargsCD["body"])[0] + method = json.loads(methargsCD["body"][1:])[0] result = msg.get("result") self.description = "(%s).%s -> %s" % (target, method, result) elif vsc[0] == "callNow": @@ -85,9 +85,12 @@ def kerneltime(self): return 0 class Delivery: - def __init__(self, vatID, cranknum, vd, when, monotime): + def __init__(self, vatID, cranknum, deliverynum, vd, when, monotime, blockheight, blocktime): self.vatID = vatID self.cranknum = cranknum + self.deliverynum = deliverynum + self.blockheight = blockheight + self.blocktime = blocktime self.syscalls = [] self.consoles = [] self.events = [] # includes both @@ -96,12 +99,13 @@ def __init__(self, vatID, cranknum, vd, when, monotime): self.rx_delivery = None self.tx_result = None self.rx_result = None + self.computrons = None self.vd = vd if vd[0] == "message": target = vd[1] msg = vd[2] methargsCD = msg["methargs"] - method = json.loads(methargsCD["body"])[0] + method = json.loads(methargsCD["body"][1:])[0] result = msg.get("result") self.description = "(%s).%s -> %s" % (target, method, result) elif vd[0] == "notify": @@ -132,6 +136,48 @@ def set_rx_delivery(self, when): self.rx_delivery = when def set_tx_result(self, when): self.tx_result = when + def set_computrons(self, computrons): + self.computrons = computrons + + def complete(self): + # figure out total_pipe, total_worker, total_kernel, total (k->k) + total_pipe = 0 + total_worker = 0 + total_kernel = 0 + if self.rx_delivery: + # k -> w: sending the delivery + total_pipe += (self.rx_delivery - self.tx_delivery) + # else unknown + prev_event = None + for event in self.events: + if not prev_event and self.rx_delivery and event.tx_request: + # work done after receiving delivery, before first syscall + total_worker += (event.tx_request - self.rx_delivery) + if prev_event and prev_event.rx_result and event.tx_request: + # work done between syscalls + total_worker += (event.tx_request - prev_event.rx_result) + prev_event = event + if event.tx_request and event.rx_result: + # from sending a syscall to receiving the result, we + # have pipe send, kernel time, pipe receive + total_pipe += (event.rx_request - event.tx_request) + total_kernel += (event.tx_result - event.rx_request) + total_pipe += (event.rx_result - event.tx_result) + # else unknown + if prev_event and self.tx_result: + # work done after the last syscall, before sending results + total_worker += (self.tx_result - prev_event.rx_result) + # else unknown + if not prev_event and self.tx_result: + # no syscalls, just worker time + total_worker += (self.tx_result - self.rx_delivery) + if self.tx_result: + # sending results to kernel counts towards pipe time + total_pipe += (self.rx_result - self.tx_result) + self.total_pipe = total_pipe + self.total_worker = total_worker + self.total_kernel = total_kernel + self.k_to_k = self.rx_result - self.tx_delivery def firsttime(self): # time between receipt of delivery and (first syscall) or (results) @@ -149,9 +195,12 @@ def pipetime(self): +(self.rx_result - self.tx_result)) return None -def parse_file(fn, vatID): +def stream_file(fn, vatID): deliveries = [] + blockheight = None + blocktime = None opener = gzip.open if fn.endswith(".gz") else open + opener = (lambda fn: sys.stdin) if fn == "-" else opener with opener(fn) as f: for line in f: if isinstance(line, bytes): @@ -163,22 +212,35 @@ def parse_file(fn, vatID): if vatID and data.get("vatID") != vatID: continue type = data["type"] + if type == "cosmic-swingset-bootstrap-block-start": + blockheight = 0 + blocktime = data["blockTime"] + if type == 'cosmic-swingset-begin-block': + blockheight = data["blockHeight"] + blocktime = data["blockTime"] + if type == 'cosmic-swingset-end-block-finish': + blockheight = None + blocktime = None + if vatID and data.get("vatID") != vatID: + continue + if data.get("replay") == True: + continue when = data["time"] if type == 'deliver': cranknum = data["crankNum"] deliverynum = data["deliveryNum"] monotime = data["monotime"] - d = Delivery(data.get("vatID"), cranknum, data["vd"], when, monotime) + d = Delivery(data.get("vatID"), cranknum, deliverynum, data["vd"], when, monotime, blockheight, blocktime) deliveries.append(d) - if type == "console": + if type == "console" and deliveries: deliveries[-1].add_console(when) - if type == 'syscall': + if type == 'syscall' and deliveries: syscallNum = data['syscallNum'] deliveries[-1].add_syscall(data['vsc'], syscallNum, when) - if type == 'syscall-result': + if type == 'syscall-result' and deliveries: deliveries[-1].get_syscall(-1).got_result(when) - if type == 'deliver-result': + if type == 'deliver-result' and deliveries: deliveries[-1].set_rx_result(when) dr = data["dr"] mr = dr[2] @@ -196,4 +258,11 @@ def parse_file(fn, vatID): event.set_rx_result(timestamps.pop(0)) if len(timestamps): d.set_tx_result(timestamps.pop(0)) - return deliveries + if mr and "compute" in dr[2]: + deliveries[-1].set_computrons(dr[2]["compute"]) + d.complete() + yield deliveries[-1] + deliveries.pop() + +def parse_file(fn, vatID): + return list(stream_file(fn, vatID)) diff --git a/packages/SwingSet/misc-tools/perc100.py b/packages/SwingSet/misc-tools/perc100.py new file mode 100644 index 00000000000..3bf00b832d0 --- /dev/null +++ b/packages/SwingSet/misc-tools/perc100.py @@ -0,0 +1,25 @@ +import csv +import sys +import itertools +import statistics + +SIZE = 1000 + +print("first_cranknum,deliver_median,gc_p10,gc_p50,gc_p90") +out = [] +with open(sys.argv[1]) as f: + r = csv.reader(f) + headers = next(r) + while True: + rows = list(itertools.islice(r, 0, SIZE)) + cranknums = [int(row[0]) for row in rows] + if len(cranknums) < SIZE: + break + deliver_monotimes = [float(row[1]) for row in rows] + deliver_median = statistics.quantiles(deliver_monotimes, n=2)[0] + gc_times = [float(row[2]) for row in rows] + quantiles = statistics.quantiles(gc_times, n=10) + p10 = quantiles[0] + p50 = quantiles[4] + p90 = quantiles[8] + print("%d,%f,%f,%f,%f" % (cranknums[0],deliver_median,p10,p50,p90)) diff --git a/packages/SwingSet/misc-tools/prune-transcripts.js b/packages/SwingSet/misc-tools/prune-transcripts.js new file mode 100644 index 00000000000..1619c9baab0 --- /dev/null +++ b/packages/SwingSet/misc-tools/prune-transcripts.js @@ -0,0 +1,74 @@ +// @ts-nocheck +import '@endo/init'; +import { performance } from 'perf_hooks'; +import fs from 'fs'; +import sqlite3 from 'better-sqlite3'; + +// Given a swingstore.sqlite filename, prune all the old (historical, +// not-current-span) transcript entries from it. Note that this is destructive. +// +// With --vacuum-into FN, it will then produce a new DB that has all +// the same (pruned) data, but is a lot smaller. + +let [dbfn, ...args] = process.argv.slice(2); +let intofn; +if (args.length >= 2) { + if (args[0] === '--vacuum-into') { + intofn = args[1]; + args = args.slice(2); + } +} +if (args.length) { + throw Error(`unknown args: ${args.join(' ')}`); +} + +const ssdb = sqlite3(dbfn); + +let per; +const sql_count = ssdb.prepare( + 'SELECT COUNT(*) FROM transcriptItems WHERE vatID=? AND position Number(a.vatID.slice(1)) - Number(b.vatID.slice(1))); + +for (const data of allVats) { + const { vatID, startPos } = data; + const count = sql_count.get(vatID, startPos); + if (count === 0) { + console.log(`skipping vat ${vatID}, already pruned`); + continue; + } + let expected = '?'; + if (per !== undefined) { + expected = `${Math.floor(1000 * per * startPos) / 1000} s`; + } + console.log(`pruning vat ${vatID}: count = ${count}, eta ${expected}`); + const start = performance.now() / 1000; + sql_del.run(vatID, startPos); + const finish = performance.now() / 1000; + const elapsed = finish - start; + const new_per = elapsed / count; + if (per === undefined) { + per = new_per; + } + per = (per + new_per) / 2; + console.log( + ` took ${Math.floor(1000 * elapsed) / 1000} s, ${Math.floor( + per * 1e6, + )} us/item`, + ); +} +console.log(`pruning complete`); + +if (intofn) { + console.log(`vacuuming, takes a minute or two`); + ssdb.prepare(`VACUUM INTO '${intofn}'`).run(); + console.log(`vacuum complete: ${intofn}`); +} diff --git a/packages/SwingSet/misc-tools/push-price-times.js b/packages/SwingSet/misc-tools/push-price-times.js new file mode 100644 index 00000000000..102832f63d4 --- /dev/null +++ b/packages/SwingSet/misc-tools/push-price-times.js @@ -0,0 +1,26 @@ +// @ts-nocheck +import sqlite3 from 'better-sqlite3'; + +const db = sqlite3('mezzanine.sqlite'); +let one = `SELECT DISTINCT blockNum,runNum FROM delivery WHERE kd_json LIKE '%PushPrice%'`; +// one = one + ' LIMIT 10'; +one = db.prepare(one); +const two = db.prepare( + `SELECT * FROM run WHERE blockNum=@blockNum AND runNum=@runNum`, +); +const times = []; +for (const row of one.iterate()) { + const brow = two.get({ blockNum: row.blockNum, runNum: row.runNum }); + // console.log(brow.blockNum, brow.runNum, brow.time, brow.usedBeans); + times.push(brow.time); +} + +times.sort(); + +const sum = times.reduce((a, b) => a + b); +console.log(`count:`, times.length); +console.log(`avg :`, sum / times.length); +console.log(`med :`, times[Math.floor(0.5 * times.length)]); +console.log(`90p :`, times[Math.floor(0.9 * times.length)]); +console.log(`95p :`, times[Math.floor(0.95 * times.length)]); +console.log(`99p :`, times[Math.floor(0.9 * times.length)]); diff --git a/packages/SwingSet/misc-tools/run-in-xs-supervisor.js b/packages/SwingSet/misc-tools/run-in-xs-supervisor.js new file mode 100644 index 00000000000..ff73dc5c6bc --- /dev/null +++ b/packages/SwingSet/misc-tools/run-in-xs-supervisor.js @@ -0,0 +1,69 @@ +// @ts-nocheck +/* eslint-disable */ +/* global globalThis */ + +import { assert, details as X } from '@endo/errors'; +import { importBundle } from '@endo/import-bundle'; + +const console = { + log(...args) { + print(JSON.stringify(args)); + }, +}; + +const encoder = new TextEncoder(); +const decoder = new TextDecoder(); + +function decode(msg) { + return JSON.parse(decoder.decode(msg)); +} + +function encode(item) { + return encode.encode(JSON.stringify(iterm)).buffer; +} + +const managerPort = { + send: item => { + globalThis.issueCommand(encode(item)); + }, + call: item => { + decode(globalThis.issueCommand(encode(item))); + }, +}; + +const workerPort = async msg => { + const [tag, ...args] = msg; + switch (tag) { + case 'importBundle': { + const bundle = args[0]; + const endowments = { + console, + assert, + HandledPromise, + TextEncoder, + TextDecoder, + Base64, + }; + const ns = await importBundle(bundle, { endowments }); + return ['importBundle', 'ok']; + } + default: { + console.log('handleItem: bad tag', tag, args.length); + return ['bad tag', tag]; + } + } +}; + +globalThis.handleCommand = msg => { + // The xsnap handleCommand is sync and expects a report={result} + // object, but doesn't extract .result until the runLoop is idle, + // which lets the real supervisor-subprocess-xsnap.js shoehorn an + // async worker.handleItem() in. Here, we just use issueCommand to + // get the workerPort() result back to the calling process, and give + // an empty one back to handleCommand. + workerPort(decode(msg)).then(result => + globalThis.issueCommand(encode({ result })), + ); + const report = { result: 'done' }; + return report; +}; diff --git a/packages/SwingSet/misc-tools/run-in-xs.js b/packages/SwingSet/misc-tools/run-in-xs.js new file mode 100644 index 00000000000..3e1e808359a --- /dev/null +++ b/packages/SwingSet/misc-tools/run-in-xs.js @@ -0,0 +1,55 @@ +// @ts-nocheck +/* eslint-disable */ +import '@endo/init'; +import { E } from '@endo/eventual-send'; +import { Far } from '@endo/marshal'; +import { makePromiseKit } from '@endo/promise-kit'; +import bundleSource from '@endo/bundle-source'; +import { getLockdownBundle } from '@agoric/xsnap-lockdown'; +import { spawn } from 'child_process'; +import { xsnap, recordXSnap } from '@agoric/xsnap'; +import { type as osType } from 'os'; + +async function run() { + const args = process.argv.slice(2); + if (args.length < 1) { + console.log(`run-in-xs.js entrypoint.js`); + return; + } + const entrypoint = args[0]; + // const bundle = await bundleSource(entrypoint); + + const srcGE = rel => + bundleSource(new URL(rel, import.meta.url).pathname, 'getExport'); + const lockdown = await getLockdownBundle(); + const supervisor = await srcGE('./run-in-xs-supervisor.js'); + const xsnapOpts = { + os: osType(), + spawn, + stdout: 'inherit', + stderr: 'inherit', + debug: false, + }; + const name = 'worker'; + + const encoder = new TextEncoder(); + const decoder = new TextDecoder(); + async function handleCommand(msg) { + // parentLog('handleCommand', { length: msg.byteLength }); + // const tagged = handleUpstream(JSON.parse(decoder.decode(msg))); + console.log(`worker:`, decoder.decode(msg)); + const tagged = []; + return encoder.encode(JSON.stringify(tagged)); + } + + const worker = xsnap({ handleCommand, name, meteringLimit: 0, ...xsnapOpts }); + await worker.evaluate(`(${lockdown.source}\n)()`.trim()); + await worker.evaluate(`(${supervisor.source}\n)()`.trim()); + + // const result = await worker.issueStringCommand(JSON.stringify(['command'])) + + console.log(`closing worker`); + await worker.close().then(_ => undefined); +} + +run(); diff --git a/packages/SwingSet/misc-tools/scan-abandoned-unreachables.py b/packages/SwingSet/misc-tools/scan-abandoned-unreachables.py new file mode 100644 index 00000000000..cbf75b604e1 --- /dev/null +++ b/packages/SwingSet/misc-tools/scan-abandoned-unreachables.py @@ -0,0 +1,47 @@ +#!/usr/bin/env python3 + +import sys, re + +# Pass the output of `select * from kvStore` into stdin. This will emit a list +# of krefs which are both: +# * not owned by any vat (i.e. they were abandoned by termination or upgrade) +# * not reachable by any vat (i.e. they are merely-recognizable == weakly-referenced) +# +# These are the krefs that https://github.com/Agoric/agoric-sdk/issues/7212 is +# about retiring. + +ko_re = re.compile(r"^(ko\d+)\.(\w+)$") +owners = {} +reachable = set() +recognizable = set() +all_krefs = set() + +for line in sys.stdin: + key,value = line.strip().split("|", maxsplit=1) + mo = ko_re.search(key) + if mo: + kref, sub = mo.groups() + all_krefs.add(kref) + if sub == "owner": + owners[kref] = value + if sub == "refCount": + reach_count, recognize_count = map(int, value.split(",")) + if reach_count: + reachable.add(kref) + if recognize_count: + recognizable.add(kref) + +print("%d krefs" % len(all_krefs)) +print("%d owned" % len(owners)) +print("%d recognizable" % len(recognizable)) +print("%d reachable" % len(reachable)) + +abandoned_unreachable = set() +for kref in all_krefs: + if kref not in owners and kref not in reachable: + abandoned_unreachable.add(kref) +print("%d abandoned unreachable" % len(abandoned_unreachable)) +if len(abandoned_unreachable) < 10: + print(abandoned_unreachable) + +# as of run-17 (dec-2023), only ko111 (v9.c.ko111|_ o-54) is in this state diff --git a/packages/SwingSet/misc-tools/short-block-times.py b/packages/SwingSet/misc-tools/short-block-times.py new file mode 100644 index 00000000000..ceeaae05b8e --- /dev/null +++ b/packages/SwingSet/misc-tools/short-block-times.py @@ -0,0 +1,38 @@ +# feed this the output of block-times.py + +# assume the chain performs cycles of (idle, busy, recovering), where +# 'idle' and 'recovering' have computrons=0, 'budy' has elevated block +# times, and 'recovering' has depressed block times. Also assume that +# the bootstrap block (not included in the output of block-times.py) +# is "busy", so we start out in the 'recovering' state. + +import sys, csv +(RECOVERING, IDLE, BUSY, POSTBUSY) = ('recovering', 'idle', 'busy', 'post-busy') +state = RECOVERING +enter_height = None + +fields = None +for row in csv.reader(sys.stdin): + if not fields: + fields = row + continue + data = dict(zip(fields, row)) + #print(data) + height = int(data["height"]) + computrons = int(data["computrons"]) + block_time = float(data["block_time"]) + if enter_height is None: + enter_height = height + if computrons > 0: + new_state = BUSY + elif block_time < 5.0: + new_state = RECOVERING + elif block_time > 7.0: + new_state = POSTBUSY + else: + new_state = IDLE + if new_state != state: + spent = height - enter_height + print("b%5d: spent %d in %s, -> %s" % (height, spent, state, new_state)) + state = new_state + enter_height = height diff --git a/packages/SwingSet/misc-tools/source-file-used-by.js b/packages/SwingSet/misc-tools/source-file-used-by.js new file mode 100644 index 00000000000..76638b1ef05 --- /dev/null +++ b/packages/SwingSet/misc-tools/source-file-used-by.js @@ -0,0 +1,372 @@ +// @ts-nocheck +/* eslint-disable */ +import '@endo/init'; + +import fs from 'fs'; +import url from 'url'; +import path from 'path'; +import crypto from 'crypto'; +import zlib from 'zlib'; +import process from 'process'; +import sqlite3 from 'better-sqlite3'; + +import bundleSource from '@endo/bundle-source'; +import { decodeBase64 } from '@endo/base64'; +import { ZipReader } from '@endo/zip'; + +import { makeReadPowers } from '@endo/compartment-mapper/node-powers.js'; +const readPowers0 = makeReadPowers({ fs, url, crypto }); +const loggingRead = async location => { + try { + const result = await readPowers0.read(location); + console.log(`read: ${location}`); + return result; + } catch (e) { + console.log(`err : ${location}`); + throw e; + } +}; +const readPowers = { + ...readPowers0, + // read: loggingRead +}; + +import { makeArchive } from '@endo/compartment-mapper'; + +async function offrun() { + const startFilename = + '/home/node/trees/agoric-sdk/packages/SwingSet/src/vats/timer/vat-timer.js'; + const entry = url.pathToFileURL(path.resolve(startFilename)); + const options = {}; + const compartmentMap = await digestLocation(readPowers, entry, options); + console.log(compartmentMap); +} + +async function showCompartmentMap(archive) { + //const compartmentMapBytes = archive.read('compartment-map.json'); + //const textDecoder = new TextDecoder(); + //const compartmentMapText = textDecoder.decode(compartmentMapBytes); + //const compartmentMap = JSON.parse(compartmentMapText); + + // compartmentMap has an entry for every package involved in the + // import graph, since each package will be loaded into its own + // Compartment. The keys are compartment names, which are also + // fully-qualified version-bearing package names, and the values are + // a description of the compartment, e.g.: + // '@agoric/store-v0.9.0' -> { + // name: '@agoric/store', + // label: '@agoric/store-v0.9.0', + // location: '@agoric/store-v0.9.0', + // modules + // } + // + // 'modules' contains an entry for every external package cited in + // the 'package.json' (for inter-package imports), + // e.g. '@agoric/assert' to: + // { compartment: '@agoric/assert-v.0.6.0', module: './src/index.js' } + // as well as an entry for every module defined within the same + // package, e.g. './src/stores/scalarMapStore.js' to: + // { location: 'src/stores/scalarMapStore.js', parser, sha512 } + // + // and then the zipfile will have members for + // e.g. '@agoric/store-v0.9.0/src/stores/scalarMapStore.js' + // but only the subset that are actually used. Note that 'modules' + // can contain unused packages and unused modules. + + //console.log(compartmentMap); + + return; + //return; + for (let [k, v] of Object.entries(compartmentMap.compartments)) { + const { name, modules } = v; + // name: @agoric/assert + // label: @agoric/assert-v0.6.0 + // location: @agoric/assert-v0.6.0 + console.log(`--`, name); + for (let [mk, mv] of Object.entries(modules)) { + // when $name says "import $mk", they get $mv + let path; + if (mv.compartment) { + // from different compartment + console.log(`##`, mk, mv); + console.log(compartmentMap.compartments[mv.compartment]); + path = `${compartmentMap.compartments[mv.compartment].name} / ${mv.module}`; + } else { + // from same compartment + path = `(me) / ${mv.location}`; + } + console.log(` ${mk} -> ${path}`); + } + } +} + +const pkgExceptions = new Map([['@swingset-vat', 'SwingSet']]); + +function pkgToSDKPath(pkg, path) { + if (!pkg.startsWith('@agoric/')) return; + if (pkg === '@agoric/babel-generator') return; + let prefix = `packages/${pkg.slice('@agoric/'.length)}`; + if (pkg === '@agoric/swingset-vat') { + prefix = 'packages/SwingSet'; + } + return `${prefix}/${path}`; +} + +function createDB(dbfn) { + const db = sqlite3(dbfn); + db.prepare('BEGIN IMMEDIATE TRANSACTION').run(); + db.exec(`PRAGMA journal_mode=WAL`); + // these tables hold the snapshot contents more-or-less verbatim + db.exec(`CREATE TABLE uses (deployment STRING, imported STRING)`); + db.exec( + `CREATE UNIQUE INDEX uses_deployment_imported ON uses (deployment, imported)`, + ); + db.exec( + `CREATE UNIQUE INDEX uses_imported_deployment ON uses (imported, deployment)`, + ); + db.prepare('COMMIT').run(); + return db; +} + +function makeDB(dbfn) { + let db; + if (!fs.existsSync(dbfn)) { + db = createDB(dbfn); + } else { + db = sqlite3(dbfn); + } + + const sql = { db }; + sql.addUses = db.prepare( + 'INSERT INTO uses (deployment, imported) VALUES (?,?) ON CONFLICT DO NOTHING', + ); + + sql.getImports = db + .prepare('SELECT imported FROM uses WHERE deployment=? ORDER BY imported') + .pluck(); + + sql.getUsers = db + .prepare('SELECT deployment FROM uses WHERE imported=? ORDER BY deployment') + .pluck(); + + return { db, sql }; +} + +function extractMemberNames(archiveBytes) { + // fs.writeFileSync('bundle.zip', archiveBytes); + const archive = new ZipReader(archiveBytes); + + const memberNames = []; + for (let fn of archive.files.keys()) { + //console.log(fn); + if (fn !== 'compartment-map.json') { + const [_, pkg, version, path] = /^(.+)-v([^/]+)\/(.+)$/.exec(fn); + const sdkPath = pkgToSDKPath(pkg, path); + //console.log(pkg, version, path, sdkPath); + if (sdkPath) { + //console.log(sdkPath); + const absPath = `/home/node/trees/agoric-sdk/${sdkPath}`; + if (!fs.existsSync(absPath)) { + throw Error(`cannot find ${pkg} at ${absPath}`); + } + memberNames.push(sdkPath); + } + } + // value is { name, type, mode, date, content: Uint8Array, comment } + } + return memberNames.sort(); +} + +const builtinsToIgnore = [ + 'buffer', + 'child_process', + 'crypto', + 'fs', + 'fs/promises', + 'module', + 'node:assert', + 'node:child_process', + 'node:fs', + 'node:fs/promises', + 'node:module', + 'node:path', + 'node:process', + 'node:stream', + 'node:url', + 'node:util', + 'node:v8', + 'os', + 'path', + 'perf_hooks', + 'process', + 'stream', + 'stream/promises', + 'url', + 'util', + 'v8', + 'vm', + 'zlib', +]; + +async function findContents(entryfn) { + const moduleLocation = url.pathToFileURL(path.resolve(entryfn)); + const modules = {}; + builtinsToIgnore.forEach(name => (modules[name] = null)); // value is unused + const options = { modules, dev: true }; + const archiveBytes = await makeArchive(readPowers, moduleLocation, options); + return extractMemberNames(archiveBytes); +} + +async function OFFfindContents(entryfn) { + const bundle = await bundleSource(entryfn); + const archiveBytes = decodeBase64(bundle.endoZipBase64); + return extractMemberNames(archiveBytes); +} + +async function addEntryPoint(dbfn, name, entryfn) { + const { db, sql } = makeDB(dbfn); + const uses = await findContents(entryfn); + db.prepare('BEGIN IMMEDIATE TRANSACTION').run(); + for (let used of uses) { + sql.addUses.run(name, used); + } + db.prepare('COMMIT').run(); + console.log(`${name}: uses ${uses.length} files`); + //console.log(uses); +} + +const usualSuspects = [ + // chain upgrade requires a staker governance action and chain halt/upgrade/restart + + // the real entrypoint is packages/cosmic-swingset/src/entrypoint.js + // but https://github.com/endojs/endo/issues/1596 stumbles on + // anylogger-agoric.js and SwingSet/src/index.js . Workaround: + // comment out the 'export *' lines from index.js and start from + // chain-main.js + ['chain', 'packages/cosmic-swingset/src/chain-main.js'], + + // upgrading the kernel happens upon chain upgrade + ['kernel', 'packages/SwingSet/src/kernel.js'], + + // supervisor/lockdown are delivered with chain upgrades, sampled + // upon vat creation/upgrade, and retained for the remainder of that + // incarnation + ['xsnap-supervisor', 'packages/swingset-xsnap-supervisor/lib/entry.js'], + ['xsnap-lockdown', 'packages/xsnap-lockdown/lib/ses-boot.js'], + + // swingset built-in bundles: upgrade by installing new bundle and + // arranging to call controller.upgradeStaticVat() + ['vat-comms', 'packages/SwingSet/src/vats/comms/index.js'], + // vat-network is not automatically included by swingset, see packages/vats/src/vat-network.js + ['vat-timer', 'packages/SwingSet/src/vats/timer/vat-timer.js'], + ['vat-vat-admin', 'packages/SwingSet/src/vats/vat-admin/vat-vat-admin.js'], + ['vat-vattp', 'packages/SwingSet/src/vats/vattp/vat-vattp.js'], + + // decentral-main-vaults-config.json + ['vat-bootstrap', 'packages/vats/src/core/boot-chain.js'], + + // from decentral-core-config.json + ['vat-agoricNames', 'packages/vats/src/vat-agoricNames.js'], + ['centralSupply', 'packages/vats/src/centralSupply.js'], + ['mintHolder', 'packages/vats/src/mintHolder.js'], + ['zcf', 'packages/zoe/contractFacet.js'], + ['vat-bank', 'packages/vats/src/vat-bank.js'], + ['vat-board', 'packages/vats/src/vat-board.js'], + ['vat-bridge', 'packages/vats/src/vat-bridge.js'], + ['vat-mints', 'packages/vats/src/vat-mints.js'], + ['vat-priceAuthority', 'packages/vats/src/vat-priceAuthority.js'], + ['provisionPool', 'packages/vats/src/provisionPool.js'], + ['vat-provisioning', 'packages/vats/src/vat-provisioning.js'], + ['vat-sharing', 'packages/vats/src/vat-sharing.js'], + ['walletFactory', 'packages/smart-wallet/src/walletFactory.js'], + ['vat-zoe', 'packages/vats/src/vat-zoe.js'], + + // proposals from decentral-main-vaults-config.json + // + // @agoric/vats/scripts/init-core.js + // @agoric/inter-protocol/scripts/init-core.js + // ??? these are a few manually-located bundles used by this bit + ['bundle-contractGovernor', 'packages/governance/src/contractGovernor.js'], + ['bundle-committee', 'packages/governance/src/committee.js'], + ['bundle-binaryVoteCounter', 'packages/governance/src/binaryVoteCounter.js'], + ['bundle-auctioneer', 'packages/inter-protocol/src/auction/auctioneer.js'], + [ + 'bundle-vaultFactory', + 'packages/inter-protocol/src/vaultFactory/vaultFactory.js', + ], + ['bundle-feeDistributor', 'packages/inter-protocol/src/feeDistributor.js'], + ['bundle-reserve', 'packages/inter-protocol/src/reserve/assetReserve.js'], + + // add-collateral-core.js + [ + 'bundle-scaledPriceAuthority', + 'packages/zoe/src/contracts/scaledPriceAuthority.js', + ], + ['bundle-psm', 'packages/inter-protocol/src/psm/psm.js'], + // ['bundle-mintHolder', 'packages/vats/src/mintHolder.js'], // see above + + // price-feed-core.js + [ + 'bundle-fluxAggregator', + 'packages/inter-protocol/src/price/fluxAggregatorContract.js', + ], + + // invite-committee-core.js + [ + 'bundle-econCommitteeCharter.js', + 'packages/inter-protocol/src/econCommitteeCharter.js', + ], +]; + +async function addUsual(dbfn) { + // run from top of agoric-sdk + for (let [name, entryfn] of usualSuspects) { + await addEntryPoint(dbfn, name, entryfn); + } +} + +async function findDeployments(dbfn, sdkfn) { + const { db, sql } = makeDB(dbfn); + for (let deployment of sql.getUsers.iterate(sdkfn)) { + console.log(deployment); + } +} + +async function findImports(dbfn, deployment) { + const { db, sql } = makeDB(dbfn); + for (let sdkfn of sql.getImports.iterate(deployment)) { + console.log(sdkfn); + } +} + +// source-file-used-by.js add vat-timer packages/SwingSet/src/vats/timer/vat-timer.js +// source-file-used-by.js add vat-board packages/vats/src/vat-board.js +// source-file-used-by.js add-usual +// source-file-used-by.js uses vat-board +// source-file-used-by.js used-by packages/store/src/stores/scalarMapStore.js + +async function run() { + const dbfn = 'uses.sqlite'; + //const name = 'vat-timer'; + //const entryfn = '/home/node/trees/agoric-sdk/packages/SwingSet/src/vats/timer/vat-timer.js'; + const [cmd] = process.argv.slice(2); + if (cmd === 'add') { + const [_cmd, name, entryfn] = process.argv.slice(2); + if (!entryfn || !fs.existsSync(entryfn) || !entryfn.endsWith('.js')) { + throw Error(` must exist and be .js`); + } + await addEntryPoint('uses.sqlite', name, entryfn); + } else if (cmd === 'add-usual') { + await addUsual(dbfn); + } else if (cmd === 'uses') { + const [_cmd, deployment] = process.argv.slice(2); + await findImports(dbfn, deployment); + } else if (cmd === 'used-by') { + const [_cmd, sdkfn] = process.argv.slice(2); + await findDeployments(dbfn, sdkfn); + } else { + throw Error(` must be 'add'`); + } +} + +run().catch(err => console.log('err', err)); diff --git a/packages/SwingSet/misc-tools/summarize-ckv.js b/packages/SwingSet/misc-tools/summarize-ckv.js new file mode 100644 index 00000000000..7b84d15f8ee --- /dev/null +++ b/packages/SwingSet/misc-tools/summarize-ckv.js @@ -0,0 +1,11 @@ +// @ts-nocheck +import fs from 'fs'; + +const [datafn] = process.argv.slice(2); +const stats = { kv: { rows: 0, chars: 0 } }; +const data = JSON.parse(fs.readFileSync(datafn)); +for (const [vatID, vat] of Object.entries(data)) { + stats.kv.rows += vat.stats.kv.rows; + stats.kv.chars += vat.stats.kv.chars; +} +console.log(JSON.stringify(stats)); diff --git a/packages/SwingSet/misc-tools/trades-and-writes-per-block.py b/packages/SwingSet/misc-tools/trades-and-writes-per-block.py new file mode 100644 index 00000000000..16ba2faa59e --- /dev/null +++ b/packages/SwingSet/misc-tools/trades-and-writes-per-block.py @@ -0,0 +1,36 @@ +# +# Given a slogfile on stdin, this measures which deliveries happened +# in which blocks, and emits a CSV of (blockHeight, bridges, write-metrics, write-wallet) + +import sys, json +from collections import defaultdict + +write_metrics = 0 +write_wallet = 0 +bridges = 0 + +blocknum = None + +print("blocknum,bridges,write-metrics,write-wallet") + +for line in sys.stdin: + d = json.loads(line) + time = d["time"] + stype = d["type"] + if stype == "syscall" and d["ksc"][0] == "invoke" and d["ksc"][2] == "callOutbound": + msg = json.loads(d["ksc"][3]["body"]) + if msg[0] == "storage": + key = msg[1]["key"] + if key == "published.psm.IST.AUSD.metrics": + write_metrics += 1 + if key.startswith("published.wallet."): + write_wallet += 1 + if stype == "cosmic-swingset-bridge-inbound": + bridges += 1 + if stype == "cosmic-swingset-begin-block": + blocknum = d["blockHeight"] + if stype == "cosmic-swingset-end-block-finish": + print("%d,%d,%d,%d" % (blocknum, bridges, write_metrics, write_wallet)) + bridges = 0 + write_metrics = 0 + write_wallet = 0 diff --git a/packages/SwingSet/misc-tools/zoe-notifier.js b/packages/SwingSet/misc-tools/zoe-notifier.js new file mode 100644 index 00000000000..b0506d7016e --- /dev/null +++ b/packages/SwingSet/misc-tools/zoe-notifier.js @@ -0,0 +1,80 @@ +// @ts-nocheck +import sqlite3 from 'better-sqlite3'; + +const db = sqlite3('mezzanine.sqlite'); +const kpids = db + .prepare( + `SELECT pd.kpid FROM promise_decider AS pd, promise_subscriber AS ps WHERE pd.kpid=ps.kpid AND pd.decider='v9' AND ps.subscriber='v43'`, + ) + .pluck() + .all(); + +const two = db + .prepare( + `SELECT crankNum FROM delivery_mention WHERE kref=? ORDER BY crankNum LIMIT 1`, + ) + .pluck(); +const three = db.prepare( + `SELECT * FROM delivery WHERE crankNum=? ORDER BY crankNum LIMIT 1`, +); +const four = db.prepare(`SELECT * FROM delivery_message WHERE crankNum=?`); +const five = db + .prepare( + `SELECT crankNum FROM syscall_mention WHERE kref=? ORDER BY crankNum LIMIT 1`, + ) + .pluck(); +const six = db.prepare(`SELECT * FROM delivery_message WHERE crankNum=?`); + +for (const kpid of kpids) { + // console.log(kpid); + // the kpid is probably the result of a E(xx).getUpdateSince() call, delivered in.. + const cn = two.get(kpid); + // console.log(cn); + const gUS = three.get(cn); + // console.log(gUS); + const d = four.get(cn); + // console.log(d); + if (d.methname !== 'getUpdateSince') { + console.log(`-- oops ${kpid}, not a getUpdateSince`); + console.log(d); + continue; + } + // the getUpdateSince() was sent to a Notifier, with kref.. + const { target_kref } = d; + // console.log(`target_kref:`, target_kref); + // .. which was first created by a syscall.resolve in crank.. + const resolveCN = five.get(target_kref); + // console.log(`resolveCN:`, resolveCN); + // .. which was in response to a method named: + const create = six.get(resolveCN); + // console.log(create); + if (create.vatID !== 'v9' || create.methname !== 'getCurrentAmountNotifier') { + console.log(`-- oops ${kpid}, not v9-zoe~.getCurrentAmountNotifier`); + } + console.log(`ok ${kpid}`); +} + +/* + + +// DISTINCT blockNum,runNum FROM delivery WHERE kd_json LIKE '%PushPrice%'`; +//one = one + ' LIMIT 10'; +one = db.prepare(one); +let two = db.prepare(`SELECT * FROM run WHERE blockNum=@blockNum AND runNum=@runNum`); +const times = []; +for (const row of one.iterate()) { + const brow = two.get({ blockNum: row.blockNum, runNum: row.runNum }); + //console.log(brow.blockNum, brow.runNum, brow.time, brow.usedBeans); + times.push(brow.time); +} + +times.sort(); + +const sum = times.reduce((a,b) => a+b); +console.log(`count:`, times.length); +console.log(`avg :`, sum / times.length); +console.log(`med :`, times[Math.floor(0.5 * times.length)]); +console.log(`90p :`, times[Math.floor(0.9 * times.length)]); +console.log(`95p :`, times[Math.floor(0.95 * times.length)]); +console.log(`99p :`, times[Math.floor(0.90 * times.length)]); +*/ diff --git a/packages/SwingSet/misc-tools/zoe-vat-done.js b/packages/SwingSet/misc-tools/zoe-vat-done.js new file mode 100644 index 00000000000..c9aec92cce8 --- /dev/null +++ b/packages/SwingSet/misc-tools/zoe-vat-done.js @@ -0,0 +1,28 @@ +// @ts-nocheck +import sqlite3 from 'better-sqlite3'; + +const db = sqlite3('mezzanine.sqlite'); +const kpids = db + .prepare( + `SELECT pd.kpid FROM promise_decider AS pd, promise_subscriber AS ps WHERE pd.kpid=ps.kpid AND pd.decider='v2' AND ps.subscriber='v9'`, + ) + .pluck() + .all(); + +const two = db + .prepare( + `SELECT crankNum FROM delivery_mention WHERE kref=? ORDER BY crankNum LIMIT 1`, + ) + .pluck(); +const three = db.prepare(`SELECT * FROM delivery_message WHERE crankNum=?`); + +for (const kpid of kpids) { + // console.log(kpid); + // the kpid is probably the result of a E(adminNode).done() call, delivered in.. + const cn = two.get(kpid); + const create = three.get(cn); + if (create.vatID !== 'v2' || create.methname !== 'done') { + console.log(`-- oops ${kpid}, not v2-vatAdmin~.done`); + } + console.log(`ok ${kpid}`); +}