Skip to content

Commit

Permalink
chore: update the performance tests to use @chainsafe/benchmark (#7373)
Browse files Browse the repository at this point in the history
  • Loading branch information
nazarhussain authored Jan 31, 2025
1 parent f9d080c commit c43b070
Show file tree
Hide file tree
Showing 76 changed files with 938 additions and 906 deletions.
15 changes: 8 additions & 7 deletions .benchrc.yaml
Original file line number Diff line number Diff line change
@@ -1,10 +1,11 @@
# Mocha opts
extension: ["ts"]
colors: true
node-option:
- "loader=ts-node/register"

# benchmark opts
threshold: 3
maxMs: 60_000
maxMs: 60000
minRuns: 10
# Default is set to 0.005, which is too low considering the benchmark setup we have
# Changing it to 0.05 which is 5/100, so 5% difference of moving average among run times
convergeFactor: 0.075 # 7.5 / 100
triggerGC: false
sort: true
convergence: linear
averageCalculation: clean-outliers
8 changes: 0 additions & 8 deletions biome.jsonc
Original file line number Diff line number Diff line change
Expand Up @@ -255,14 +255,6 @@
}
}
},
// Dependencies still using mocha
{
"include": ["packages/**/test/perf/**/*.test.ts", "packages/state-transition/test/utils/beforeValueMocha.ts"],
"javascript": {
// These are used by mocha
"globals": ["describe", "it", "before", "after"]
}
},
{
"include": [
// These files are using mix cases e.g. `engine_newPayloadV4`
Expand Down
4 changes: 1 addition & 3 deletions package.json
Original file line number Diff line number Diff line change
Expand Up @@ -48,9 +48,8 @@
},
"devDependencies": {
"@actions/core": "^1.10.1",
"@dapplion/benchmark": "^0.2.4",
"@chainsafe/benchmark": "^1.2.3",
"@biomejs/biome": "^1.9.3",
"@types/mocha": "^10.0.6",
"@types/node": "^20.12.8",
"@vitest/browser": "^2.0.4",
"@vitest/coverage-v8": "^2.0.4",
Expand All @@ -61,7 +60,6 @@
"jsdom": "^23.0.1",
"lerna": "^7.3.0",
"libp2p": "1.4.3",
"mocha": "^10.2.0",
"node-gyp": "^9.4.0",
"npm-run-all": "^4.1.5",
"path-browserify": "^1.0.1",
Expand Down
3 changes: 2 additions & 1 deletion packages/api/test/perf/compileRouteUrlFormater.test.ts
Original file line number Diff line number Diff line change
@@ -1,7 +1,8 @@
import {bench, describe} from "@chainsafe/benchmark";
import {compileRouteUrlFormatter} from "../../src/utils/urlFormat.js";

describe("route parse", () => {
it.skip("Benchmark compileRouteUrlFormatter", () => {
bench.skip("Benchmark compileRouteUrlFormatter", () => {
const path = "/eth/v1/validator/:name/attester/:epoch";
const args = {epoch: 5, name: "HEAD"};

Expand Down
5 changes: 2 additions & 3 deletions packages/beacon-node/test/e2e/sync/finalizedSync.test.ts
Original file line number Diff line number Diff line change
Expand Up @@ -5,8 +5,7 @@ import {ChainConfig} from "@lodestar/config";
import {TimestampFormatCode} from "@lodestar/logger";
import {SLOTS_PER_EPOCH} from "@lodestar/params";
import {phase0} from "@lodestar/types";
import {assert} from "chai";
import {afterEach, describe, it, vi} from "vitest";
import {afterEach, describe, expect, it, vi} from "vitest";
import {ChainEvent} from "../../../src/chain/index.js";
import {waitForEvent} from "../../utils/events/resolver.js";
import {LogLevel, TestLoggerOpts, testLogger} from "../../utils/logger.js";
Expand Down Expand Up @@ -114,7 +113,7 @@ describe("sync / finalized sync", () => {
await waitForSynced;
loggerNodeB.info("Node B synced to Node A, received head block", {slot: head.message.slot});
} catch (_e) {
assert.fail("Failed to sync to other node in time");
expect.fail("Failed to sync to other node in time");
}
});
});
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
import {itBench} from "@dapplion/benchmark";
import {beforeAll, bench, describe} from "@chainsafe/benchmark";
import {generatePerfTestCachedStatePhase0, numValidators} from "../../../../../../state-transition/test/perf/util.js";
import {getPubkeysForIndices} from "../../../../../src/api/impl/validator/utils.js";
import {linspace} from "../../../../../src/util/numpy.js";
Expand All @@ -20,15 +20,14 @@ import {linspace} from "../../../../../src/util/numpy.js";
describe("api / impl / validator", () => {
let state: ReturnType<typeof generatePerfTestCachedStatePhase0>;

before(function () {
this.timeout(60 * 1000);
beforeAll(() => {
state = generatePerfTestCachedStatePhase0();
});

const reqCounts = process.env.CI ? [1000] : [1, 100, 1000];

for (const reqCount of reqCounts) {
itBench({
bench({
id: `getPubkeys - index2pubkey - req ${reqCount} vs - ${numValidators} vc`,
noThreshold: true,
fn: () => {
Expand All @@ -42,7 +41,7 @@ describe("api / impl / validator", () => {

// 7.17 ms / op (1000)
for (const reqCount of reqCounts) {
itBench({
bench({
id: `getPubkeys - validatorsArr - req ${reqCount} vs - ${numValidators} vc`,
// Only track regressions for 1000 in CI to ensure performance does not degrade
noThreshold: reqCount < 1000,
Expand Down
12 changes: 6 additions & 6 deletions packages/beacon-node/test/perf/bls/bls.test.ts
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
import crypto from "node:crypto";
import {bench, describe} from "@chainsafe/benchmark";
import {
PublicKey,
SecretKey,
Expand All @@ -8,7 +9,6 @@ import {
verify,
verifyMultipleAggregateSignatures,
} from "@chainsafe/blst";
import {itBench} from "@dapplion/benchmark";
import {linspace} from "../../../src/util/numpy.js";

describe("BLS ops", () => {
Expand Down Expand Up @@ -60,15 +60,15 @@ describe("BLS ops", () => {
}

// Note: getSet() caches the value, does not re-compute every time
itBench({id: "BLS verify - blst", beforeEach: () => getSet(0)}, (set) => {
bench({id: "BLS verify - blst", beforeEach: () => getSet(0)}, (set) => {
const isValid = verify(set.message, set.publicKey, Signature.fromBytes(set.signature));
if (!isValid) throw Error("Invalid");
});

// An aggregate and proof object has 3 signatures.
// We may want to bundle up to 32 sets in a single batch.
for (const count of [3, 8, 32, 64, 128]) {
itBench({
bench({
id: `BLS verifyMultipleSignatures ${count} - blst`,
beforeEach: () => linspace(0, count - 1).map((i) => getSet(i)),
fn: (sets) => {
Expand All @@ -88,7 +88,7 @@ describe("BLS ops", () => {
// ideally we want to track 700_000, 1_400_000, 2_100_000 validators but it takes too long
for (const numValidators of [10_000, 100_000]) {
const signatures = linspace(0, numValidators - 1).map((i) => getSet(i % 256).signature);
itBench({
bench({
id: `BLS deserializing ${numValidators} signatures`,
fn: () => {
for (const signature of signatures) {
Expand All @@ -103,7 +103,7 @@ describe("BLS ops", () => {
// We may want to bundle up to 32 sets in a single batch.
// TODO: figure out why it does not work with 256 or more
for (const count of [3, 8, 32, 64, 128]) {
itBench({
bench({
id: `BLS verifyMultipleSignatures - same message - ${count} - blst`,
beforeEach: () => linspace(0, count - 1).map((i) => getSetSameMessage(i)),
fn: (sets) => {
Expand All @@ -118,7 +118,7 @@ describe("BLS ops", () => {

// Attestations in Mainnet contain 128 max on average
for (const count of [32, 128]) {
itBench({
bench({
id: `BLS aggregatePubkeys ${count} - blst`,
beforeEach: () => linspace(0, count - 1).map((i) => getKeypair(i).publicKey),
fn: (pubkeys) => {
Expand Down
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
import {beforeAll, bench, describe} from "@chainsafe/benchmark";
import {BitArray, toHexString} from "@chainsafe/ssz";
import {itBench} from "@dapplion/benchmark";
import {DataAvailabilityStatus, ExecutionStatus, ForkChoice, IForkChoiceStore, ProtoArray} from "@lodestar/fork-choice";
import {HISTORICAL_ROOTS_LIMIT, SLOTS_PER_EPOCH} from "@lodestar/params";
import {
Expand Down Expand Up @@ -30,57 +30,30 @@ describe(`getAttestationsForBlock vc=${vc}`, () => {
let protoArray: ProtoArray;
let forkchoice: ForkChoice;

before(function () {
this.timeout(5 * 60 * 1000); // Generating the states for the first time is very slow
beforeAll(
() => {
originalState = generatePerfTestCachedStateAltair({goBackOneSlot: true, vc});

originalState = generatePerfTestCachedStateAltair({goBackOneSlot: true, vc});

const {blockHeader, checkpoint} = computeAnchorCheckpoint(originalState.config, originalState);
// TODO figure out why getBlockRootAtSlot(originalState, justifiedSlot) is not the same to justifiedCheckpoint.root
const finalizedEpoch = originalState.finalizedCheckpoint.epoch;
const finalizedCheckpoint = {
epoch: finalizedEpoch,
root: getBlockRootAtSlot(originalState, computeStartSlotAtEpoch(finalizedEpoch)),
};
const justifiedEpoch = originalState.currentJustifiedCheckpoint.epoch;
const justifiedCheckpoint = {
epoch: justifiedEpoch,
root: getBlockRootAtSlot(originalState, computeStartSlotAtEpoch(justifiedEpoch)),
};

protoArray = ProtoArray.initialize(
{
slot: blockHeader.slot,
parentRoot: toHexString(blockHeader.parentRoot),
stateRoot: toHexString(blockHeader.stateRoot),
blockRoot: toHexString(checkpoint.root),

justifiedEpoch: justifiedCheckpoint.epoch,
justifiedRoot: toHexString(justifiedCheckpoint.root),
finalizedEpoch: finalizedCheckpoint.epoch,
finalizedRoot: toHexString(finalizedCheckpoint.root),
unrealizedJustifiedEpoch: justifiedCheckpoint.epoch,
unrealizedJustifiedRoot: toHexString(justifiedCheckpoint.root),
unrealizedFinalizedEpoch: finalizedCheckpoint.epoch,
unrealizedFinalizedRoot: toHexString(finalizedCheckpoint.root),
executionPayloadBlockHash: null,
executionStatus: ExecutionStatus.PreMerge,

timeliness: false,
dataAvailabilityStatus: DataAvailabilityStatus.PreData,
},
originalState.slot
);
const {blockHeader, checkpoint} = computeAnchorCheckpoint(originalState.config, originalState);
// TODO figure out why getBlockRootAtSlot(originalState, justifiedSlot) is not the same to justifiedCheckpoint.root
const finalizedEpoch = originalState.finalizedCheckpoint.epoch;
const finalizedCheckpoint = {
epoch: finalizedEpoch,
root: getBlockRootAtSlot(originalState, computeStartSlotAtEpoch(finalizedEpoch)),
};
const justifiedEpoch = originalState.currentJustifiedCheckpoint.epoch;
const justifiedCheckpoint = {
epoch: justifiedEpoch,
root: getBlockRootAtSlot(originalState, computeStartSlotAtEpoch(justifiedEpoch)),
};

for (let slot = computeStartSlotAtEpoch(finalizedCheckpoint.epoch); slot < originalState.slot; slot++) {
const epoch = computeEpochAtSlot(slot);
protoArray.onBlock(
protoArray = ProtoArray.initialize(
{
slot,
blockRoot: toHexString(getBlockRootAtSlot(originalState, slot)),
parentRoot: toHexString(getBlockRootAtSlot(originalState, slot - 1)),
stateRoot: toHexString(originalState.stateRoots.get(slot % HISTORICAL_ROOTS_LIMIT)),
targetRoot: toHexString(getBlockRootAtSlot(originalState, computeStartSlotAtEpoch(epoch))),
slot: blockHeader.slot,
parentRoot: toHexString(blockHeader.parentRoot),
stateRoot: toHexString(blockHeader.stateRoot),
blockRoot: toHexString(checkpoint.root),

justifiedEpoch: justifiedCheckpoint.epoch,
justifiedRoot: toHexString(justifiedCheckpoint.root),
finalizedEpoch: finalizedCheckpoint.epoch,
Expand All @@ -91,36 +64,64 @@ describe(`getAttestationsForBlock vc=${vc}`, () => {
unrealizedFinalizedRoot: toHexString(finalizedCheckpoint.root),
executionPayloadBlockHash: null,
executionStatus: ExecutionStatus.PreMerge,

timeliness: false,
dataAvailabilityStatus: DataAvailabilityStatus.PreData,
},
slot
originalState.slot
);
}

let totalBalance = 0;
for (let i = 0; i < originalState.epochCtx.effectiveBalanceIncrements.length; i++) {
totalBalance += originalState.epochCtx.effectiveBalanceIncrements[i];
}
for (let slot = computeStartSlotAtEpoch(finalizedCheckpoint.epoch); slot < originalState.slot; slot++) {
const epoch = computeEpochAtSlot(slot);
protoArray.onBlock(
{
slot,
blockRoot: toHexString(getBlockRootAtSlot(originalState, slot)),
parentRoot: toHexString(getBlockRootAtSlot(originalState, slot - 1)),
stateRoot: toHexString(originalState.stateRoots.get(slot % HISTORICAL_ROOTS_LIMIT)),
targetRoot: toHexString(getBlockRootAtSlot(originalState, computeStartSlotAtEpoch(epoch))),
justifiedEpoch: justifiedCheckpoint.epoch,
justifiedRoot: toHexString(justifiedCheckpoint.root),
finalizedEpoch: finalizedCheckpoint.epoch,
finalizedRoot: toHexString(finalizedCheckpoint.root),
unrealizedJustifiedEpoch: justifiedCheckpoint.epoch,
unrealizedJustifiedRoot: toHexString(justifiedCheckpoint.root),
unrealizedFinalizedEpoch: finalizedCheckpoint.epoch,
unrealizedFinalizedRoot: toHexString(finalizedCheckpoint.root),
executionPayloadBlockHash: null,
executionStatus: ExecutionStatus.PreMerge,
timeliness: false,
dataAvailabilityStatus: DataAvailabilityStatus.PreData,
},
slot
);
}

const fcStore: IForkChoiceStore = {
currentSlot: originalState.slot,
justified: {
checkpoint: {...justifiedCheckpoint, rootHex: toHexString(justifiedCheckpoint.root)},
balances: originalState.epochCtx.effectiveBalanceIncrements,
totalBalance,
},
unrealizedJustified: {
checkpoint: {...justifiedCheckpoint, rootHex: toHexString(justifiedCheckpoint.root)},
balances: originalState.epochCtx.effectiveBalanceIncrements,
},
finalizedCheckpoint: {...finalizedCheckpoint, rootHex: toHexString(finalizedCheckpoint.root)},
unrealizedFinalizedCheckpoint: {...finalizedCheckpoint, rootHex: toHexString(finalizedCheckpoint.root)},
justifiedBalancesGetter: () => originalState.epochCtx.effectiveBalanceIncrements,
equivocatingIndices: new Set(),
};
forkchoice = new ForkChoice(originalState.config, fcStore, protoArray);
});
let totalBalance = 0;
for (let i = 0; i < originalState.epochCtx.effectiveBalanceIncrements.length; i++) {
totalBalance += originalState.epochCtx.effectiveBalanceIncrements[i];
}

const fcStore: IForkChoiceStore = {
currentSlot: originalState.slot,
justified: {
checkpoint: {...justifiedCheckpoint, rootHex: toHexString(justifiedCheckpoint.root)},
balances: originalState.epochCtx.effectiveBalanceIncrements,
totalBalance,
},
unrealizedJustified: {
checkpoint: {...justifiedCheckpoint, rootHex: toHexString(justifiedCheckpoint.root)},
balances: originalState.epochCtx.effectiveBalanceIncrements,
},
finalizedCheckpoint: {...finalizedCheckpoint, rootHex: toHexString(finalizedCheckpoint.root)},
unrealizedFinalizedCheckpoint: {...finalizedCheckpoint, rootHex: toHexString(finalizedCheckpoint.root)},
justifiedBalancesGetter: () => originalState.epochCtx.effectiveBalanceIncrements,
equivocatingIndices: new Set(),
};
forkchoice = new ForkChoice(originalState.config, fcStore, protoArray);
},
5 * 60 * 1000
);

// notSeenSlots should be >=1
for (const [notSeenSlots, numMissedVotes, numBadVotes] of [
Expand All @@ -129,7 +130,7 @@ describe(`getAttestationsForBlock vc=${vc}`, () => {
// notSeenSlots=2 means the previous block slot is missed
[2, 1, 10],
]) {
itBench({
bench({
id: `notSeenSlots=${notSeenSlots} numMissedVotes=${numMissedVotes} numBadVotes=${numBadVotes}`,
before: () => {
const state = originalState.clone();
Expand Down Expand Up @@ -181,7 +182,7 @@ describe.skip("getAttestationsForBlock aggregationBits intersectValues vs get",
const aggregationBits = BitArray.fromBoolArray(Array.from({length: committeeLen}, () => true));
const notSeenValidatorIndices = Array.from({length: committeeLen}, (_, i) => i);

itBench({
bench({
id: "aggregationBits.intersectValues()",
fn: () => {
for (let i = 0; i < runsFactor; i++) {
Expand All @@ -191,7 +192,7 @@ describe.skip("getAttestationsForBlock aggregationBits intersectValues vs get",
runsFactor,
});

itBench({
bench({
id: "aggregationBits.get()",
fn: () => {
for (let i = 0; i < runsFactor; i++) {
Expand All @@ -203,7 +204,7 @@ describe.skip("getAttestationsForBlock aggregationBits intersectValues vs get",
runsFactor,
});

itBench({
bench({
id: "aggregationBits.get() with push()",
fn: () => {
for (let i = 0; i < runsFactor; i++) {
Expand Down
Loading

0 comments on commit c43b070

Please sign in to comment.