Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Add benchmark for bytecode execution #3437

Open
wants to merge 17 commits into
base: master
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions packages/evm/src/evm.ts
Original file line number Diff line number Diff line change
Expand Up @@ -972,6 +972,7 @@ export class EVM implements EVMInterface {
selfdestruct: opts.selfdestruct ?? new Set(),
isStatic: opts.isStatic,
blobVersionedHashes: opts.blobVersionedHashes,
createdAddresses: opts.createdAddresses,
})

return this.runInterpreter(message, { pc: opts.pc })
Expand Down
4 changes: 4 additions & 0 deletions packages/evm/src/types.ts
Original file line number Diff line number Diff line change
Expand Up @@ -93,6 +93,10 @@ export interface EVMRunCodeOpts extends EVMRunOpts {
* The initial program counter. Defaults to `0`
*/
pc?: number
/**
* Created addresses in current context. Used in EIP 6780
*/
createdAddresses?: Set<PrefixedHexString>
}

/**
Expand Down
6 changes: 6 additions & 0 deletions packages/vm/DEVELOPER.md
Original file line number Diff line number Diff line change
Expand Up @@ -208,6 +208,12 @@ Then:

To define the number of samples to be run pass in a number like so: `npm run benchmarks -- mainnetBlocks:10`

To benchmark individual bytecode against empty state:
`node --max-old-space-size=4096 ./benchmarks/run.js benchmarks bytecode:10 -b 60FF60005261FFFF600020`

To benchmark individual bytecode against a state:
`node --max-old-space-size=4096 ./benchmarks/run.js benchmarks bytecode:10 -b 60FF60005261FFFF600020 -p prestate.json`

If you want to get a more detailed look to find bottlenecks we can use [0x](https://github.com/davidmarkclements/0x):

```
Expand Down
46 changes: 46 additions & 0 deletions packages/vm/benchmarks/bytecode.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,46 @@
import { readFileSync } from 'fs'
import { Chain, Common, Hardfork, Mainnet, StateManagerInterface } from '@ethereumjs/common'
import { getPreState } from './util.js'
import { EVM } from '@ethereumjs/evm'
import { Address, createZeroAddress, hexToBytes, PrefixedHexString } from '@ethereumjs/util'
import { createEVM } from '@ethereumjs/evm'
import { Bench } from 'tinybench'
import { SimpleStateManager } from '@ethereumjs/statemanager'

export async function bytecode(numSamples?: number, bytecode?: string, preState?: string) {
const common = new Common({ chain: Mainnet, hardfork: Hardfork.Cancun })

let stateManager: StateManagerInterface = new SimpleStateManager()

if (preState) {
let preStateData = JSON.parse(readFileSync(preState, 'utf8'))
stateManager = await getPreState(preStateData, common)
}

let evm = await createEVM({ stateManager, common })
const bytecodeHex = hexToBytes(`0x${bytecode}`)
const gasLimit = BigInt(0xffff)

const bench = new Bench({
time: 100,
teardown: async () => {
await evm.stateManager.clearStorage(createZeroAddress())
},
})

for (let i = 0; i < (numSamples ?? 1); i++) {
bench.add('bytecode sample ' + i.toString(), async () => {
// evm.stateManager.clearContractStorage(Address.zero())
await evm.runCode({
code: bytecodeHex,
gasLimit: gasLimit,
createdAddresses: new Set<PrefixedHexString>(),
})
})
}

await bench.warmup() // make results more reliable, ref: https://github.com/tinylibs/tinybench/pull/50
await bench.run()

return bench.table()
}
20 changes: 13 additions & 7 deletions packages/vm/benchmarks/mainnetBlocks.ts
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,7 @@ import { createVM, runBlock as runBlockVM, VM } from '@ethereumjs/vm'
import Benchmark from 'benchmark'
import { readFileSync } from 'fs'
import { getBlockchain, getPreState, verifyResult } from './util.js'
import { Bench } from 'tinybench'

const BLOCK_FIXTURE = 'benchmarks/fixture/blocks-prestate.json'

Expand All @@ -16,7 +17,7 @@ const runBlock = async (vm: VM, block: Block, receipts: any) => {
verifyResult(block, receipts)
}

export async function mainnetBlocks(suite?: Benchmark.Suite, numSamples?: number) {
export async function mainnetBlocks(numSamples?: number) {
let data = JSON.parse(readFileSync(BLOCK_FIXTURE, 'utf8'))
if (!Array.isArray(data)) data = [data]
console.log(`Total number of blocks in data set: ${data.length}`)
Expand All @@ -27,6 +28,10 @@ export async function mainnetBlocks(suite?: Benchmark.Suite, numSamples?: number

const common = new Common({ chain: Mainnet, hardfork: Hardfork.MuirGlacier })

const bench = new Bench({
time: 10000,
})

for (const blockData of data) {
const block = createBlockFromRPC(blockData.block, [], { common })
const blockNumber = Number(block.header.number)
Expand All @@ -44,12 +49,13 @@ export async function mainnetBlocks(suite?: Benchmark.Suite, numSamples?: number
const blockchain = getBlockchain(blockhashes) as any
const vm = await createVM({ stateManager, common, blockchain })

if (suite) {
suite.add(`Block ${blockNumber}`, async () => {
await runBlock(vm, block, receipts)
})
} else {
bench.add('block ' + blockNumber.toString(), async () => {
await runBlock(vm, block, receipts)
}
})
}

await bench.warmup()
await bench.run()

return bench.table()
}
94 changes: 63 additions & 31 deletions packages/vm/benchmarks/run.ts
Original file line number Diff line number Diff line change
@@ -1,3 +1,6 @@
import { bytecode } from './bytecode.js'
import * as yargs from 'yargs'
import { hideBin } from 'yargs/helpers'
import Benchmark from 'benchmark'
import { BenchmarksType } from './util.js'
import { mainnetBlocks } from './mainnetBlocks.js'
Expand All @@ -7,59 +10,88 @@ const BENCHMARKS: BenchmarksType = {
mainnetBlocks: {
function: mainnetBlocks,
},
}

const onCycle = (event: Benchmark.Event) => {
console.log(String(event.target))
bytecode: {
function: bytecode,
},
}

async function main() {
const args = process.argv
// Argument parsing
const args = yargs
.default(hideBin(process.argv))
.command('benchmarks <benchmarks>', 'Run benchmarks', (yargs) => {
yargs.positional('benchmarks', {
describe: `Name(s) of benchmarks to run: BENCHMARK_NAME[:NUM_SAMPLES][,BENCHMARK_NAME[:NUM_SAMPLES]]. Benchmarks available: ${Object.keys(
BENCHMARKS,
).join(', ')}`,
type: 'string',
required: true,
})
})
.option('bytecode', {
alias: 'b',
describe: 'Bytecode to run',
type: 'string',
})
.option('preState', {
alias: 'p',
describe: 'File containing prestate to load',
type: 'string',
})
.option('csv', {
alias: 'c',
describe: 'Output results as CSV',
type: 'boolean',
default: false,
})
.help()
.parse()

// Input validation
if (args.length < 4) {
console.log(
'Please provide at least one benchmark name when running a benchmark or doing profiling.'
)
console.log(
'Usage: npm run benchmarks|profiling -- BENCHMARK_NAME[:NUM_SAMPLES][,BENCHMARK_NAME[:NUM_SAMPLES]]'
)
console.log(`Benchmarks available: ${Object.keys(BENCHMARKS).join(', ')}`)
return process.exit(1)
}
const benchmarksStr = (args as any).benchmarks
const bytecode = (args as any).bytecode
const preState = (args as any).preState
const csv = (args as any).csv

// Initialization
let suite
// Choose between benchmarking or profiling (with 0x)
if (args[2] === 'benchmarks') {
console.log('Benchmarking started...')
suite = new Benchmark.Suite()
} else {
console.log('Profiling started...')
if (!benchmarksStr) {
console.log('No benchmarks to run, exiting...')
return
}

const benchmarks = args[3].split(',')

// Benchmark execution
const benchmarks = benchmarksStr.split(',')
for (const benchmark of benchmarks) {
const [name, numSamples] = benchmark.split(':')

if (name in BENCHMARKS) {
console.log(`Running '${name}':`)
await BENCHMARKS[name].function(suite, Number(numSamples))
if (!csv) {
console.log(`Running '${name}':`)
console.log(` Number of samples: ${numSamples}`)
console.log(` Bytecode: ${bytecode}`)
console.log(` Prestate: ${preState}`)
}
const results = await BENCHMARKS[name].function(Number(numSamples), bytecode, preState)
if (csv) {
console.log('Task Name,Average Time (ns),Margin,Samples')
for (const result of results) {
console.log(
`${result['Task Name']},${result['Average Time (ns)']},${result['Margin']},${result['Samples']}`,
)
}
} else {
console.table(results)
}
} else {
console.log(`Benchmark with name ${name} doesn't exist, skipping...`)
}
}

if (suite) {
suite.on('cycle', onCycle).run()
if (!csv) {
console.log('Benchmark run finished.')
}
}

main()
.then(() => {
console.log('Benchmark run finished.')
process.exit(0)
})
.catch((e: Error) => {
Expand Down