From ef99ed711437f8b86941fed6eb8b92010c91df3a Mon Sep 17 00:00:00 2001 From: twoeths Date: Wed, 4 Sep 2024 08:20:35 +0700 Subject: [PATCH 1/7] fix: toPubkeyHex (#7065) * feat: add toPubkeyHex util * feat: consume toPubkeyHex * fix: use toPubkeyHex instead of toHex * chore: remove redundant comments --- packages/api/src/builder/routes.ts | 3 +- .../validator/keymanager/keystoreCache.ts | 6 +- .../validator/slashingProtection/export.ts | 5 +- .../cli/src/cmds/validator/voluntaryExit.ts | 4 +- packages/flare/src/cmds/selfSlashAttester.ts | 4 +- packages/flare/src/cmds/selfSlashProposer.ts | 4 +- .../src/cache/syncCommitteeCache.ts | 5 +- packages/utils/src/bytes/browser.ts | 71 ++++++++++++------- packages/utils/src/bytes/index.ts | 18 ++++- packages/utils/src/bytes/nodejs.ts | 16 +++++ packages/utils/test/unit/bytes.test.ts | 21 +++++- .../src/services/attestationDuties.ts | 7 +- packages/validator/src/services/block.ts | 5 +- .../validator/src/services/blockDuties.ts | 9 ++- packages/validator/src/services/indices.ts | 5 +- .../src/services/syncCommitteeDuties.ts | 4 +- .../validator/src/services/validatorStore.ts | 9 +-- .../validator/src/slashingProtection/index.ts | 7 +- .../interchange/formats/completeV4.ts | 3 +- .../interchange/formats/v5.ts | 3 +- 20 files changed, 138 insertions(+), 71 deletions(-) diff --git a/packages/api/src/builder/routes.ts b/packages/api/src/builder/routes.ts index 297c4fc5e9a9..3d74101bb046 100644 --- a/packages/api/src/builder/routes.ts +++ b/packages/api/src/builder/routes.ts @@ -13,6 +13,7 @@ import { } from "@lodestar/types"; import {ForkName, isForkBlobs} from "@lodestar/params"; import {ChainForkConfig} from "@lodestar/config"; +import {toPubkeyHex} from "@lodestar/utils"; import {Endpoint, RouteDefinitions, Schema} from "../utils/index.js"; import {MetaHeader, VersionCodec, VersionMeta} from "../utils/metadata.js"; @@ -105,7 +106,7 @@ export function getDefinitions(config: ChainForkConfig): RouteDefinitions ({ - params: {slot, parent_hash: toHexString(parentHash), pubkey: toHexString(proposerPubKey)}, + params: {slot, parent_hash: toHexString(parentHash), pubkey: toPubkeyHex(proposerPubKey)}, }), parseReq: ({params}) => ({ slot: params.slot, diff --git a/packages/cli/src/cmds/validator/keymanager/keystoreCache.ts b/packages/cli/src/cmds/validator/keymanager/keystoreCache.ts index 85b1702892ee..2997a6b6b113 100644 --- a/packages/cli/src/cmds/validator/keymanager/keystoreCache.ts +++ b/packages/cli/src/cmds/validator/keymanager/keystoreCache.ts @@ -3,7 +3,7 @@ import path from "node:path"; import {Keystore} from "@chainsafe/bls-keystore"; import {SecretKey} from "@chainsafe/blst"; import {SignerLocal, SignerType} from "@lodestar/validator"; -import {fromHex, toHex} from "@lodestar/utils"; +import {fromHex, toHex, toPubkeyHex} from "@lodestar/utils"; import {writeFile600Perm} from "../../../util/file.js"; import {lockFilepath, unlockFilepath} from "../../../util/lockfile.js"; import {LocalKeystoreDefinition} from "./interface.js"; @@ -42,9 +42,9 @@ export async function loadKeystoreCache( const secretKey = SecretKey.fromBytes(secretKeyBytes); const publicKey = secretKey.toPublicKey().toBytes(); - if (toHex(publicKey) !== toHex(fromHex(k.pubkey))) { + if (toPubkeyHex(publicKey) !== toPubkeyHex(fromHex(k.pubkey))) { throw new Error( - `Keystore ${k.uuid} does not match the expected pubkey. expected=${toHex(fromHex(k.pubkey))}, found=${toHex( + `Keystore ${k.uuid} does not match the expected pubkey. expected=${toPubkeyHex(fromHex(k.pubkey))}, found=${toHex( publicKey )}` ); diff --git a/packages/cli/src/cmds/validator/slashingProtection/export.ts b/packages/cli/src/cmds/validator/slashingProtection/export.ts index 7d1a4f8e6e2f..c18b020f5782 100644 --- a/packages/cli/src/cmds/validator/slashingProtection/export.ts +++ b/packages/cli/src/cmds/validator/slashingProtection/export.ts @@ -1,8 +1,7 @@ import path from "node:path"; -import {toHexString} from "@chainsafe/ssz"; import {InterchangeFormatVersion} from "@lodestar/validator"; import {getNodeLogger} from "@lodestar/logger/node"; -import {CliCommand} from "@lodestar/utils"; +import {CliCommand, toPubkeyHex} from "@lodestar/utils"; import {YargsError, ensure0xPrefix, isValidatePubkeyHex, writeFile600Perm} from "../../../util/index.js"; import {parseLoggerArgs} from "../../../util/logger.js"; import {GlobalArgs} from "../../../options/index.js"; @@ -86,7 +85,7 @@ export const exportCmd: CliCommand toHexString(pubkey) === pubkeyHex); + const existingPubkey = allPubkeys.find((pubkey) => toPubkeyHex(pubkey) === pubkeyHex); if (!existingPubkey) { logger.warn("Pubkey not found in slashing protection db", {pubkey: pubkeyHex}); } else { diff --git a/packages/cli/src/cmds/validator/voluntaryExit.ts b/packages/cli/src/cmds/validator/voluntaryExit.ts index 279076b619f2..a399a763662d 100644 --- a/packages/cli/src/cmds/validator/voluntaryExit.ts +++ b/packages/cli/src/cmds/validator/voluntaryExit.ts @@ -8,7 +8,7 @@ import { } from "@lodestar/state-transition"; import {createBeaconConfig, BeaconConfig} from "@lodestar/config"; import {phase0, ssz, ValidatorIndex, Epoch} from "@lodestar/types"; -import {CliCommand, fromHex, toHex} from "@lodestar/utils"; +import {CliCommand, fromHex, toPubkeyHex} from "@lodestar/utils"; import {externalSignerPostSignature, SignableMessageType, Signer, SignerType} from "@lodestar/validator"; import {ApiClient, getClient} from "@lodestar/api"; import {ensure0xPrefix, YargsError, wrapError} from "../../util/index.js"; @@ -209,7 +209,7 @@ async function resolveValidatorIndexes(client: ApiClient, signersToExit: SignerP const validators = (await client.beacon.getStateValidators({stateId: "head", validatorIds: pubkeys})).value(); - const dataByPubkey = new Map(validators.map((item) => [toHex(item.validator.pubkey), item])); + const dataByPubkey = new Map(validators.map((item) => [toPubkeyHex(item.validator.pubkey), item])); return signersToExit.map(({signer, pubkey}) => { const item = dataByPubkey.get(pubkey); diff --git a/packages/flare/src/cmds/selfSlashAttester.ts b/packages/flare/src/cmds/selfSlashAttester.ts index e29a956a9306..a37c6c765bd3 100644 --- a/packages/flare/src/cmds/selfSlashAttester.ts +++ b/packages/flare/src/cmds/selfSlashAttester.ts @@ -4,7 +4,7 @@ import {AttesterSlashing, phase0, ssz} from "@lodestar/types"; import {config as chainConfig} from "@lodestar/config/default"; import {createBeaconConfig, BeaconConfig} from "@lodestar/config"; import {DOMAIN_BEACON_ATTESTER, MAX_VALIDATORS_PER_COMMITTEE} from "@lodestar/params"; -import {CliCommand, toHexString} from "@lodestar/utils"; +import {CliCommand, toPubkeyHex} from "@lodestar/utils"; import {computeSigningRoot} from "@lodestar/state-transition"; import {deriveSecretKeys, SecretKeysArgs, secretKeysOptions} from "../util/deriveSecretKeys.js"; @@ -90,7 +90,7 @@ export async function selfSlashAttesterHandler(args: SelfSlashArgs): Promise; @@ -82,7 +83,7 @@ function computeSyncCommitteeIndices( for (const pubkey of pubkeys) { const validatorIndex = pubkey2index.get(pubkey); if (validatorIndex === undefined) { - throw Error(`SyncCommittee pubkey is unknown ${toHexString(pubkey)}`); + throw Error(`SyncCommittee pubkey is unknown ${toPubkeyHex(pubkey)}`); } validatorIndices.push(validatorIndex); diff --git a/packages/utils/src/bytes/browser.ts b/packages/utils/src/bytes/browser.ts index e6c04f79835f..f610e2912c04 100644 --- a/packages/utils/src/bytes/browser.ts +++ b/packages/utils/src/bytes/browser.ts @@ -1,26 +1,20 @@ +// "0".charCodeAt(0) = 48 +const CHAR_CODE_0 = 48; +// "x".charCodeAt(0) = 120 +const CHAR_CODE_X = 120; + export function toHex(bytes: Uint8Array): string { const charCodes = new Array(bytes.length * 2 + 2); - charCodes[0] = 48; - charCodes[1] = 120; - - for (let i = 0; i < bytes.length; i++) { - const byte = bytes[i]; - const first = (byte & 0xf0) >> 4; - const second = byte & 0x0f; + charCodes[0] = CHAR_CODE_0; + charCodes[1] = CHAR_CODE_X; - // "0".charCodeAt(0) = 48 - // "a".charCodeAt(0) = 97 => delta = 87 - charCodes[2 + 2 * i] = first < 10 ? first + 48 : first + 87; - charCodes[2 + 2 * i + 1] = second < 10 ? second + 48 : second + 87; - } + bytesIntoCharCodes(bytes, charCodes); return String.fromCharCode(...charCodes); } const rootCharCodes = new Array(32 * 2 + 2); -// "0".charCodeAt(0) -rootCharCodes[0] = 48; -// "x".charCodeAt(0) -rootCharCodes[1] = 120; +rootCharCodes[0] = CHAR_CODE_0; +rootCharCodes[1] = CHAR_CODE_X; /** * Convert a Uint8Array, length 32, to 0x-prefixed hex string @@ -30,17 +24,24 @@ export function toRootHex(root: Uint8Array): string { throw Error(`Expect root to be 32 bytes, got ${root.length}`); } - for (let i = 0; i < root.length; i++) { - const byte = root[i]; - const first = (byte & 0xf0) >> 4; - const second = byte & 0x0f; + bytesIntoCharCodes(root, rootCharCodes); + return String.fromCharCode(...rootCharCodes); +} - // "0".charCodeAt(0) = 48 - // "a".charCodeAt(0) = 97 => delta = 87 - rootCharCodes[2 + 2 * i] = first < 10 ? first + 48 : first + 87; - rootCharCodes[2 + 2 * i + 1] = second < 10 ? second + 48 : second + 87; +const pubkeyCharCodes = new Array(48 * 2 + 2); +pubkeyCharCodes[0] = CHAR_CODE_0; +pubkeyCharCodes[1] = CHAR_CODE_X; + +/** + * Convert a Uint8Array, length 48, to 0x-prefixed hex string + */ +export function toPubkeyHex(pubkey: Uint8Array): string { + if (pubkey.length !== CHAR_CODE_0) { + throw Error(`Expect pubkey to be 48 bytes, got ${pubkey.length}`); } - return String.fromCharCode(...rootCharCodes); + + bytesIntoCharCodes(pubkey, pubkeyCharCodes); + return String.fromCharCode(...pubkeyCharCodes); } export function fromHex(hex: string): Uint8Array { @@ -64,3 +65,23 @@ export function fromHex(hex: string): Uint8Array { } return bytes; } + +/** + * Populate charCodes from bytes. Note that charCodes index 0 and 1 ("0x") are not populated. + */ +function bytesIntoCharCodes(bytes: Uint8Array, charCodes: number[]): void { + if (bytes.length * 2 + 2 !== charCodes.length) { + throw Error(`Expect charCodes to be of length ${bytes.length * 2 + 2}, got ${charCodes.length}`); + } + + for (let i = 0; i < bytes.length; i++) { + const byte = bytes[i]; + const first = (byte & 0xf0) >> 4; + const second = byte & 0x0f; + + // "0".charCodeAt(0) = 48 + // "a".charCodeAt(0) = 97 => delta = 87 + charCodes[2 + 2 * i] = first < 10 ? first + 48 : first + 87; + charCodes[2 + 2 * i + 1] = second < 10 ? second + 48 : second + 87; + } +} diff --git a/packages/utils/src/bytes/index.ts b/packages/utils/src/bytes/index.ts index fe6a9fc40e54..a079764738b7 100644 --- a/packages/utils/src/bytes/index.ts +++ b/packages/utils/src/bytes/index.ts @@ -1,14 +1,26 @@ -import {toHex as browserToHex, toRootHex as browserToRootHex, fromHex as browserFromHex} from "./browser.js"; -import {toHex as nodeToHex, toRootHex as nodeToRootHex, fromHex as nodeFromHex} from "./nodejs.js"; +import { + toHex as browserToHex, + toRootHex as browserToRootHex, + fromHex as browserFromHex, + toPubkeyHex as browserToPubkeyHex, +} from "./browser.js"; +import { + toHex as nodeToHex, + toRootHex as nodeToRootHex, + fromHex as nodeFromHex, + toPubkeyHex as nodeToPubkeyHex, +} from "./nodejs.js"; let toHex = browserToHex; let toRootHex = browserToRootHex; +let toPubkeyHex = browserToPubkeyHex; let fromHex = browserFromHex; if (typeof Buffer !== "undefined") { toHex = nodeToHex; toRootHex = nodeToRootHex; + toPubkeyHex = nodeToPubkeyHex; fromHex = nodeFromHex; } -export {toHex, toRootHex, fromHex}; +export {toHex, toRootHex, toPubkeyHex, fromHex}; diff --git a/packages/utils/src/bytes/nodejs.ts b/packages/utils/src/bytes/nodejs.ts index 7f0fe50d2a4a..636f49bd8e76 100644 --- a/packages/utils/src/bytes/nodejs.ts +++ b/packages/utils/src/bytes/nodejs.ts @@ -27,6 +27,22 @@ export function toRootHex(root: Uint8Array): string { return `0x${rootBuf.toString("hex")}`; } +// Shared buffer to convert pubkey to hex +let pubkeyBuf: Buffer | undefined; + +export function toPubkeyHex(pubkey: Uint8Array): string { + if (pubkey.length !== 48) { + throw Error(`Expect pubkey to be 48 bytes, got ${pubkey.length}`); + } + + if (pubkeyBuf === undefined) { + pubkeyBuf = Buffer.alloc(48); + } + + pubkeyBuf.set(pubkey); + return `0x${pubkeyBuf.toString("hex")}`; +} + export function fromHex(hex: string): Uint8Array { const b = Buffer.from(hex.replace("0x", ""), "hex"); return new Uint8Array(b.buffer, b.byteOffset, b.length); diff --git a/packages/utils/test/unit/bytes.test.ts b/packages/utils/test/unit/bytes.test.ts index aefa3e240954..05789b839cfd 100644 --- a/packages/utils/test/unit/bytes.test.ts +++ b/packages/utils/test/unit/bytes.test.ts @@ -1,5 +1,5 @@ import {describe, it, expect} from "vitest"; -import {intToBytes, bytesToInt, toHex, fromHex, toHexString, toRootHex} from "../../src/index.js"; +import {intToBytes, bytesToInt, toHex, fromHex, toHexString, toRootHex, toPubkeyHex} from "../../src/index.js"; describe("intToBytes", () => { const zeroedArray = (length: number): number[] => Array.from({length}, () => 0); @@ -80,6 +80,25 @@ describe("toRootHex", () => { } }); +describe("toPubkeyHex", () => { + const testCases: {input: Uint8Array; output: string}[] = [ + { + input: new Uint8Array(Array.from({length: 48}, (_, i) => i)), + output: "0x000102030405060708090a0b0c0d0e0f101112131415161718191a1b1c1d1e1f202122232425262728292a2b2c2d2e2f", + }, + { + input: new Uint8Array(Array.from({length: 48}, () => 0)), + output: "0x000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000", + }, + ]; + + for (const {input, output} of testCases) { + it(`should convert root to hex string ${output}`, () => { + expect(toPubkeyHex(input)).toBe(output); + }); + } +}); + describe("fromHex", () => { const testCases: {input: string; output: Buffer | Uint8Array}[] = [ { diff --git a/packages/validator/src/services/attestationDuties.ts b/packages/validator/src/services/attestationDuties.ts index ea82bf0a4c72..83838afe1492 100644 --- a/packages/validator/src/services/attestationDuties.ts +++ b/packages/validator/src/services/attestationDuties.ts @@ -1,6 +1,5 @@ -import {toHexString} from "@chainsafe/ssz"; import {SLOTS_PER_EPOCH} from "@lodestar/params"; -import {sleep} from "@lodestar/utils"; +import {sleep, toPubkeyHex} from "@lodestar/utils"; import {computeEpochAtSlot, isAggregatorFromCommitteeLength, isStartSlotOfEpoch} from "@lodestar/state-transition"; import {BLSSignature, Epoch, Slot, ValidatorIndex, RootHex} from "@lodestar/types"; import {ApiClient, routes} from "@lodestar/api"; @@ -97,7 +96,7 @@ export class AttestationDutiesService { removeDutiesForKey(pubkey: PubkeyHex): void { for (const [epoch, attDutiesAtEpoch] of this.dutiesByIndexByEpoch) { for (const [vIndex, attDutyAndProof] of attDutiesAtEpoch.dutiesByIndex) { - if (toHexString(attDutyAndProof.duty.pubkey) === pubkey) { + if (toPubkeyHex(attDutyAndProof.duty.pubkey) === pubkey) { attDutiesAtEpoch.dutiesByIndex.delete(vIndex); if (attDutiesAtEpoch.dutiesByIndex.size === 0) { this.dutiesByIndexByEpoch.delete(epoch); @@ -244,7 +243,7 @@ export class AttestationDutiesService { const attesterDuties = res.value(); const {dependentRoot} = res.meta(); const relevantDuties = attesterDuties.filter((duty) => { - const pubkeyHex = toHexString(duty.pubkey); + const pubkeyHex = toPubkeyHex(duty.pubkey); return this.validatorStore.hasVotingPubkey(pubkeyHex) && this.validatorStore.isDoppelgangerSafe(pubkeyHex); }); diff --git a/packages/validator/src/services/block.ts b/packages/validator/src/services/block.ts index 7792ef85be95..c3acf19c1669 100644 --- a/packages/validator/src/services/block.ts +++ b/packages/validator/src/services/block.ts @@ -1,4 +1,3 @@ -import {toHexString} from "@chainsafe/ssz"; import { BLSPubkey, Slot, @@ -15,7 +14,7 @@ import { } from "@lodestar/types"; import {ChainForkConfig} from "@lodestar/config"; import {ForkPreBlobs, ForkBlobs, ForkSeq, ForkExecution, ForkName} from "@lodestar/params"; -import {extendError, prettyBytes, prettyWeiToEth} from "@lodestar/utils"; +import {extendError, prettyBytes, prettyWeiToEth, toPubkeyHex} from "@lodestar/utils"; import {ApiClient, routes} from "@lodestar/api"; import {IClock, LoggerVc} from "../util/index.js"; import {PubkeyHex} from "../types.js"; @@ -110,7 +109,7 @@ export class BlockProposingService { /** Produce a block at the given slot for pubkey */ private async createAndPublishBlock(pubkey: BLSPubkey, slot: Slot): Promise { - const pubkeyHex = toHexString(pubkey); + const pubkeyHex = toPubkeyHex(pubkey); const logCtx = {slot, validator: prettyBytes(pubkeyHex)}; // Wrap with try catch here to re-use `logCtx` diff --git a/packages/validator/src/services/blockDuties.ts b/packages/validator/src/services/blockDuties.ts index 3282987f5d9e..d0e16f60e816 100644 --- a/packages/validator/src/services/blockDuties.ts +++ b/packages/validator/src/services/blockDuties.ts @@ -1,8 +1,7 @@ -import {toHexString} from "@chainsafe/ssz"; import {computeEpochAtSlot, computeStartSlotAtEpoch} from "@lodestar/state-transition"; import {BLSPubkey, Epoch, RootHex, Slot} from "@lodestar/types"; import {ApiClient, routes} from "@lodestar/api"; -import {sleep} from "@lodestar/utils"; +import {sleep, toPubkeyHex} from "@lodestar/utils"; import {ChainConfig} from "@lodestar/config"; import {IClock, differenceHex, LoggerVc} from "../util/index.js"; import {PubkeyHex} from "../types.js"; @@ -67,7 +66,7 @@ export class BlockDutiesService { if (dutyAtEpoch) { for (const proposer of dutyAtEpoch.data) { if (proposer.slot === slot) { - publicKeys.set(toHexString(proposer.pubkey), proposer.pubkey); + publicKeys.set(toPubkeyHex(proposer.pubkey), proposer.pubkey); } } } @@ -78,7 +77,7 @@ export class BlockDutiesService { removeDutiesForKey(pubkey: PubkeyHex): void { for (const blockDutyAtEpoch of this.proposers.values()) { blockDutyAtEpoch.data = blockDutyAtEpoch.data.filter((proposer) => { - return toHexString(proposer.pubkey) !== pubkey; + return toPubkeyHex(proposer.pubkey) !== pubkey; }); } } @@ -187,7 +186,7 @@ export class BlockDutiesService { const proposerDuties = res.value(); const {dependentRoot} = res.meta(); const relevantDuties = proposerDuties.filter((duty) => { - const pubkeyHex = toHexString(duty.pubkey); + const pubkeyHex = toPubkeyHex(duty.pubkey); return this.validatorStore.hasVotingPubkey(pubkeyHex) && this.validatorStore.isDoppelgangerSafe(pubkeyHex); }); diff --git a/packages/validator/src/services/indices.ts b/packages/validator/src/services/indices.ts index c6ef40b473e5..ec5155322918 100644 --- a/packages/validator/src/services/indices.ts +++ b/packages/validator/src/services/indices.ts @@ -1,6 +1,5 @@ -import {toHexString} from "@chainsafe/ssz"; import {ValidatorIndex} from "@lodestar/types"; -import {Logger, MapDef} from "@lodestar/utils"; +import {Logger, MapDef, toPubkeyHex} from "@lodestar/utils"; import {ApiClient, routes} from "@lodestar/api"; import {batchItems} from "../util/index.js"; import {Metrics} from "../metrics.js"; @@ -135,7 +134,7 @@ export class IndicesService { const status = statusToSimpleStatusMapping(validator.status); allValidatorStatuses.set(status, allValidatorStatuses.getOrDefault(status) + 1); - const pubkeyHex = toHexString(validator.validator.pubkey); + const pubkeyHex = toPubkeyHex(validator.validator.pubkey); if (!this.pubkey2index.has(pubkeyHex)) { this.logger.info("Validator seen on beacon chain", { validatorIndex: validator.index, diff --git a/packages/validator/src/services/syncCommitteeDuties.ts b/packages/validator/src/services/syncCommitteeDuties.ts index dd663528f751..ea448add15ec 100644 --- a/packages/validator/src/services/syncCommitteeDuties.ts +++ b/packages/validator/src/services/syncCommitteeDuties.ts @@ -1,4 +1,3 @@ -import {toHexString} from "@chainsafe/ssz"; import {EPOCHS_PER_SYNC_COMMITTEE_PERIOD, SYNC_COMMITTEE_SUBNET_SIZE} from "@lodestar/params"; import { computeEpochAtSlot, @@ -10,6 +9,7 @@ import { import {ChainForkConfig} from "@lodestar/config"; import {BLSSignature, Epoch, Slot, SyncPeriod, ValidatorIndex} from "@lodestar/types"; import {ApiClient, routes} from "@lodestar/api"; +import {toPubkeyHex} from "@lodestar/utils"; import {IClock, LoggerVc} from "../util/index.js"; import {PubkeyHex} from "../types.js"; import {Metrics} from "../metrics.js"; @@ -287,7 +287,7 @@ export class SyncCommitteeDutiesService { // Using `alreadyWarnedReorg` avoids excessive logs. // TODO: Use memory-efficient toHexString() - const pubkeyHex = toHexString(duty.pubkey); + const pubkeyHex = toPubkeyHex(duty.pubkey); dutiesByIndex.set(validatorIndex, {duty: {pubkey: pubkeyHex, validatorIndex, subnets}}); } diff --git a/packages/validator/src/services/validatorStore.ts b/packages/validator/src/services/validatorStore.ts index fa9d855aa24a..c6130f1fab95 100644 --- a/packages/validator/src/services/validatorStore.ts +++ b/packages/validator/src/services/validatorStore.ts @@ -42,6 +42,7 @@ import { SignedAggregateAndProof, } from "@lodestar/types"; import {routes} from "@lodestar/api"; +import {toPubkeyHex} from "@lodestar/utils"; import {ISlashingProtection} from "../slashingProtection/index.js"; import {PubkeyHex} from "../types.js"; import {externalSignerPostSignature, SignableMessageType, SignableMessage} from "../util/externalSignerClient.js"; @@ -723,7 +724,7 @@ export class ValidatorStore { regAttributes: {feeRecipient: Eth1Address; gasLimit: number}, slot: Slot ): Promise { - const pubkeyHex = typeof pubkeyMaybeHex === "string" ? pubkeyMaybeHex : toHexString(pubkeyMaybeHex); + const pubkeyHex = typeof pubkeyMaybeHex === "string" ? pubkeyMaybeHex : toPubkeyHex(pubkeyMaybeHex); const {feeRecipient, gasLimit} = regAttributes; const regFullKey = `${feeRecipient}-${gasLimit}`; const validatorData = this.validators.get(pubkeyHex); @@ -748,7 +749,7 @@ export class ValidatorStore { signableMessage: SignableMessage ): Promise { // TODO: Refactor indexing to not have to run toHexString() on the pubkey every time - const pubkeyHex = typeof pubkey === "string" ? pubkey : toHexString(pubkey); + const pubkeyHex = typeof pubkey === "string" ? pubkey : toPubkeyHex(pubkey); const signer = this.validators.get(pubkeyHex)?.signer; if (!signer) { @@ -787,7 +788,7 @@ export class ValidatorStore { private getSignerAndPubkeyHex(pubkey: BLSPubkeyMaybeHex): [Signer, string] { // TODO: Refactor indexing to not have to run toHexString() on the pubkey every time - const pubkeyHex = typeof pubkey === "string" ? pubkey : toHexString(pubkey); + const pubkeyHex = typeof pubkey === "string" ? pubkey : toPubkeyHex(pubkey); const signer = this.validators.get(pubkeyHex)?.signer; if (!signer) { throw Error(`Validator pubkey ${pubkeyHex} not known`); @@ -813,7 +814,7 @@ export class ValidatorStore { } private assertDoppelgangerSafe(pubKey: PubkeyHex | BLSPubkey): void { - const pubkeyHex = typeof pubKey === "string" ? pubKey : toHexString(pubKey); + const pubkeyHex = typeof pubKey === "string" ? pubKey : toPubkeyHex(pubKey); if (!this.isDoppelgangerSafe(pubkeyHex)) { throw new Error(`Doppelganger state for key ${pubkeyHex} is not safe`); } diff --git a/packages/validator/src/slashingProtection/index.ts b/packages/validator/src/slashingProtection/index.ts index dedbccf6cf94..bc57b0e51c13 100644 --- a/packages/validator/src/slashingProtection/index.ts +++ b/packages/validator/src/slashingProtection/index.ts @@ -1,6 +1,5 @@ -import {toHexString} from "@chainsafe/ssz"; import {BLSPubkey, Epoch, Root} from "@lodestar/types"; -import {Logger} from "@lodestar/utils"; +import {Logger, toPubkeyHex} from "@lodestar/utils"; import {LodestarValidatorDatabaseController} from "../types.js"; import {uniqueVectorArr} from "../slashingProtection/utils.js"; import {BlockBySlotRepository, SlashingProtectionBlockService} from "./block/index.js"; @@ -63,7 +62,7 @@ export class SlashingProtection implements ISlashingProtection { async importInterchange(interchange: Interchange, genesisValidatorsRoot: Root, logger?: Logger): Promise { const {data} = parseInterchange(interchange, genesisValidatorsRoot); for (const validator of data) { - logger?.info("Importing slashing protection", {pubkey: toHexString(validator.pubkey)}); + logger?.info("Importing slashing protection", {pubkey: toPubkeyHex(validator.pubkey)}); await this.blockService.importBlocks(validator.pubkey, validator.signedBlocks); await this.attestationService.importAttestations(validator.pubkey, validator.signedAttestations); } @@ -77,7 +76,7 @@ export class SlashingProtection implements ISlashingProtection { ): Promise { const validatorData: InterchangeLodestar["data"] = []; for (const pubkey of pubkeys) { - logger?.info("Exporting slashing protection", {pubkey: toHexString(pubkey)}); + logger?.info("Exporting slashing protection", {pubkey: toPubkeyHex(pubkey)}); validatorData.push({ pubkey, signedBlocks: await this.blockService.exportBlocks(pubkey), diff --git a/packages/validator/src/slashingProtection/interchange/formats/completeV4.ts b/packages/validator/src/slashingProtection/interchange/formats/completeV4.ts index 26d7f44f2e83..66aa31c52194 100644 --- a/packages/validator/src/slashingProtection/interchange/formats/completeV4.ts +++ b/packages/validator/src/slashingProtection/interchange/formats/completeV4.ts @@ -1,5 +1,6 @@ /* eslint-disable @typescript-eslint/naming-convention */ import {fromHexString, toHexString} from "@chainsafe/ssz"; +import {toPubkeyHex} from "@lodestar/utils"; import {InterchangeLodestar} from "../types.js"; import {fromOptionalHexString, numToString, toOptionalHexString} from "../../utils.js"; @@ -93,7 +94,7 @@ export function serializeInterchangeCompleteV4({ genesis_validators_root: toHexString(genesisValidatorsRoot), }, data: data.map((validator) => ({ - pubkey: toHexString(validator.pubkey), + pubkey: toPubkeyHex(validator.pubkey), signed_blocks: validator.signedBlocks.map((block) => ({ slot: numToString(block.slot), signing_root: toOptionalHexString(block.signingRoot), diff --git a/packages/validator/src/slashingProtection/interchange/formats/v5.ts b/packages/validator/src/slashingProtection/interchange/formats/v5.ts index c70dc84b1ed0..1c7f67b706a5 100644 --- a/packages/validator/src/slashingProtection/interchange/formats/v5.ts +++ b/packages/validator/src/slashingProtection/interchange/formats/v5.ts @@ -1,5 +1,6 @@ /* eslint-disable @typescript-eslint/naming-convention */ import {fromHexString, toHexString} from "@chainsafe/ssz"; +import {toPubkeyHex} from "@lodestar/utils"; import {InterchangeLodestar} from "../types.js"; import {fromOptionalHexString, numToString, toOptionalHexString} from "../../utils.js"; @@ -88,7 +89,7 @@ export function serializeInterchangeV5({data, genesisValidatorsRoot}: Interchang genesis_validators_root: toHexString(genesisValidatorsRoot), }, data: data.map((validator) => ({ - pubkey: toHexString(validator.pubkey), + pubkey: toPubkeyHex(validator.pubkey), signed_blocks: validator.signedBlocks.map((block) => ({ slot: numToString(block.slot), signing_root: toOptionalHexString(block.signingRoot), From 681bdcd775990d788c8674d6376b18778f6f588f Mon Sep 17 00:00:00 2001 From: twoeths Date: Wed, 4 Sep 2024 08:21:12 +0700 Subject: [PATCH 2/7] fix: improve performance of getExpectedWithdrawals (#7045) * fix: improve performance of getExpectedWithdrawals * chore: use isPostElectra variable * chore: check pre-capella --- .../src/block/processWithdrawals.ts | 47 +++++++++++++--- packages/state-transition/src/util/electra.ts | 55 +------------------ 2 files changed, 41 insertions(+), 61 deletions(-) diff --git a/packages/state-transition/src/block/processWithdrawals.ts b/packages/state-transition/src/block/processWithdrawals.ts index b06209167be3..185ddd80eb32 100644 --- a/packages/state-transition/src/block/processWithdrawals.ts +++ b/packages/state-transition/src/block/processWithdrawals.ts @@ -7,6 +7,7 @@ import { MAX_PENDING_PARTIALS_PER_WITHDRAWALS_SWEEP, FAR_FUTURE_EPOCH, MIN_ACTIVATION_BALANCE, + MAX_EFFECTIVE_BALANCE, } from "@lodestar/params"; import {toRootHex} from "@lodestar/utils"; @@ -14,9 +15,9 @@ import {CachedBeaconStateCapella, CachedBeaconStateElectra} from "../types.js"; import { decreaseBalance, getValidatorMaxEffectiveBalance, + hasEth1WithdrawalCredential, + hasExecutionWithdrawalCredential, isCapellaPayloadHeader, - isFullyWithdrawableValidator, - isPartiallyWithdrawableValidator, } from "../util/index.js"; export function processWithdrawals( @@ -24,6 +25,8 @@ export function processWithdrawals( state: CachedBeaconStateCapella | CachedBeaconStateElectra, payload: capella.FullOrBlindedExecutionPayload ): void { + // partialWithdrawalsCount is withdrawals coming from EL since electra (EIP-7002) + // TODO - electra: may switch to executionWithdrawalsCount const {withdrawals: expectedWithdrawals, partialWithdrawalsCount} = getExpectedWithdrawals(fork, state); const numWithdrawals = expectedWithdrawals.length; @@ -86,16 +89,33 @@ export function getExpectedWithdrawals( sampledValidators: number; partialWithdrawalsCount: number; } { + if (fork < ForkSeq.capella) { + throw new Error(`getExpectedWithdrawals not supported at forkSeq=${fork} < ForkSeq.capella`); + } + const epoch = state.epochCtx.epoch; let withdrawalIndex = state.nextWithdrawalIndex; const {validators, balances, nextWithdrawalValidatorIndex} = state; const withdrawals: capella.Withdrawal[] = []; + const isPostElectra = fork >= ForkSeq.electra; - if (fork >= ForkSeq.electra) { + if (isPostElectra) { const stateElectra = state as CachedBeaconStateElectra; - for (const withdrawal of stateElectra.pendingPartialWithdrawals.getAllReadonly()) { + // MAX_PENDING_PARTIALS_PER_WITHDRAWALS_SWEEP = 8, PENDING_PARTIAL_WITHDRAWALS_LIMIT: 134217728 so we should only call getAllReadonly() if it makes sense + // pendingPartialWithdrawals comes from EIP-7002 smart contract where it takes fee so it's more likely than not validator is in correct condition to withdraw + // also we may break early if withdrawableEpoch > epoch + const allPendingPartialWithdrawals = + stateElectra.pendingPartialWithdrawals.length <= MAX_PENDING_PARTIALS_PER_WITHDRAWALS_SWEEP + ? stateElectra.pendingPartialWithdrawals.getAllReadonly() + : null; + + // EIP-7002: Execution layer triggerable withdrawals + for (let i = 0; i < stateElectra.pendingPartialWithdrawals.length; i++) { + const withdrawal = allPendingPartialWithdrawals + ? allPendingPartialWithdrawals[i] + : stateElectra.pendingPartialWithdrawals.getReadonly(i); if (withdrawal.withdrawableEpoch > epoch || withdrawals.length === MAX_PENDING_PARTIALS_PER_WITHDRAWALS_SWEEP) { break; } @@ -121,6 +141,7 @@ export function getExpectedWithdrawals( } } + // partialWithdrawalsCount is withdrawals coming from EL since electra (EIP-7002) const partialWithdrawalsCount = withdrawals.length; const bound = Math.min(validators.length, MAX_VALIDATORS_PER_WITHDRAWALS_SWEEP); let n = 0; @@ -132,13 +153,18 @@ export function getExpectedWithdrawals( const validator = validators.getReadonly(validatorIndex); const balance = balances.get(validatorIndex); + const {withdrawableEpoch, withdrawalCredentials, effectiveBalance} = validator; + const hasWithdrawableCredentials = isPostElectra + ? hasExecutionWithdrawalCredential(withdrawalCredentials) + : hasEth1WithdrawalCredential(withdrawalCredentials); // early skip for balance = 0 as its now more likely that validator has exited/slahed with // balance zero than not have withdrawal credentials set - if (balance === 0) { + if (balance === 0 || !hasWithdrawableCredentials) { continue; } - if (isFullyWithdrawableValidator(fork, validator, balance, epoch)) { + // capella full withdrawal + if (withdrawableEpoch <= epoch) { withdrawals.push({ index: withdrawalIndex, validatorIndex, @@ -146,12 +172,17 @@ export function getExpectedWithdrawals( amount: BigInt(balance), }); withdrawalIndex++; - } else if (isPartiallyWithdrawableValidator(fork, validator, balance)) { + } else if ( + effectiveBalance === + (isPostElectra ? getValidatorMaxEffectiveBalance(withdrawalCredentials) : MAX_EFFECTIVE_BALANCE) && + balance > effectiveBalance + ) { + // capella partial withdrawal withdrawals.push({ index: withdrawalIndex, validatorIndex, address: validator.withdrawalCredentials.subarray(12), - amount: BigInt(balance - getValidatorMaxEffectiveBalance(validator.withdrawalCredentials)), + amount: BigInt(balance - effectiveBalance), }); withdrawalIndex++; } diff --git a/packages/state-transition/src/util/electra.ts b/packages/state-transition/src/util/electra.ts index 63f74bc96cc9..ac34da6407de 100644 --- a/packages/state-transition/src/util/electra.ts +++ b/packages/state-transition/src/util/electra.ts @@ -1,17 +1,8 @@ -import { - COMPOUNDING_WITHDRAWAL_PREFIX, - FAR_FUTURE_EPOCH, - ForkSeq, - MAX_EFFECTIVE_BALANCE, - MIN_ACTIVATION_BALANCE, -} from "@lodestar/params"; -import {ValidatorIndex, phase0, ssz} from "@lodestar/types"; +import {COMPOUNDING_WITHDRAWAL_PREFIX, FAR_FUTURE_EPOCH, MIN_ACTIVATION_BALANCE} from "@lodestar/params"; +import {ValidatorIndex, ssz} from "@lodestar/types"; import {CachedBeaconStateElectra} from "../types.js"; -import {getValidatorMaxEffectiveBalance} from "./validator.js"; import {hasEth1WithdrawalCredential} from "./capella.js"; -type ValidatorInfo = Pick; - export function hasCompoundingWithdrawalCredential(withdrawalCredentials: Uint8Array): boolean { return withdrawalCredentials[0] === COMPOUNDING_WITHDRAWAL_PREFIX; } @@ -22,48 +13,6 @@ export function hasExecutionWithdrawalCredential(withdrawalCredentials: Uint8Arr ); } -export function isFullyWithdrawableValidator( - fork: ForkSeq, - validatorCredential: ValidatorInfo, - balance: number, - epoch: number -): boolean { - const {withdrawableEpoch, withdrawalCredentials} = validatorCredential; - - if (fork < ForkSeq.capella) { - throw new Error(`isFullyWithdrawableValidator not supported at forkSeq=${fork} < ForkSeq.capella`); - } - const hasWithdrawableCredentials = - fork >= ForkSeq.electra - ? hasExecutionWithdrawalCredential(withdrawalCredentials) - : hasEth1WithdrawalCredential(withdrawalCredentials); - - return hasWithdrawableCredentials && withdrawableEpoch <= epoch && balance > 0; -} - -export function isPartiallyWithdrawableValidator( - fork: ForkSeq, - validatorCredential: ValidatorInfo, - balance: number -): boolean { - const {effectiveBalance, withdrawalCredentials} = validatorCredential; - - if (fork < ForkSeq.capella) { - throw new Error(`isPartiallyWithdrawableValidator not supported at forkSeq=${fork} < ForkSeq.capella`); - } - const hasWithdrawableCredentials = - fork >= ForkSeq.electra - ? hasExecutionWithdrawalCredential(withdrawalCredentials) - : hasEth1WithdrawalCredential(withdrawalCredentials); - - const validatorMaxEffectiveBalance = - fork >= ForkSeq.electra ? getValidatorMaxEffectiveBalance(withdrawalCredentials) : MAX_EFFECTIVE_BALANCE; - const hasMaxEffectiveBalance = effectiveBalance === validatorMaxEffectiveBalance; - const hasExcessBalance = balance > validatorMaxEffectiveBalance; - - return hasWithdrawableCredentials && hasMaxEffectiveBalance && hasExcessBalance; -} - export function switchToCompoundingValidator(state: CachedBeaconStateElectra, index: ValidatorIndex): void { const validator = state.validators.get(index); From 4e22884db58ac119fdb0e59424841c92d4322ae9 Mon Sep 17 00:00:00 2001 From: twoeths Date: Wed, 4 Sep 2024 08:21:48 +0700 Subject: [PATCH 3/7] fix: improve regen state (#7033) * fix: improve regen state * fix: check for null block returned from db * feat: track state.hashTreeRoot() in regen.getState() * fix: transfer cache when regen state * fix: add caller as label to regenGetState metrics --- packages/beacon-node/src/chain/regen/regen.ts | 70 +++++++++++++++---- .../stateCache/persistentCheckpointsCache.ts | 4 ++ .../src/metrics/metrics/lodestar.ts | 28 ++++++++ .../state-transition/src/stateTransition.ts | 1 + 4 files changed, 91 insertions(+), 12 deletions(-) diff --git a/packages/beacon-node/src/chain/regen/regen.ts b/packages/beacon-node/src/chain/regen/regen.ts index 409c12c77b21..04cf5b40b494 100644 --- a/packages/beacon-node/src/chain/regen/regen.ts +++ b/packages/beacon-node/src/chain/regen/regen.ts @@ -1,5 +1,5 @@ import {fromHexString} from "@chainsafe/ssz"; -import {phase0, Slot, RootHex, BeaconBlock} from "@lodestar/types"; +import {phase0, Slot, RootHex, BeaconBlock, SignedBeaconBlock} from "@lodestar/types"; import { CachedBeaconStateAllForks, computeEpochAtSlot, @@ -8,6 +8,7 @@ import { DataAvailableStatus, processSlots, stateTransition, + StateHashTreeRootSource, } from "@lodestar/state-transition"; import {IForkChoice, ProtoBlock} from "@lodestar/fork-choice"; import {Logger, toRootHex} from "@lodestar/utils"; @@ -145,7 +146,7 @@ export class StateRegenerator implements IStateRegeneratorInternal { */ async getState( stateRoot: RootHex, - _rCaller: RegenCaller, + caller: RegenCaller, opts?: StateCloneOpts, // internal option, don't want to expose to external caller allowDiskReload = false @@ -156,6 +157,13 @@ export class StateRegenerator implements IStateRegeneratorInternal { return cachedStateCtx; } + // in block gossip validation (getPreState() call), dontTransferCache is specified as true because we only want to transfer cache in verifyBlocksStateTransitionOnly() + // but here we want to process blocks as fast as possible so force to transfer cache in this case + if (opts && allowDiskReload) { + // if there is no `opts` specified, it already means "false" + opts.dontTransferCache = false; + } + // Otherwise we have to use the fork choice to traverse backwards, block by block, // searching the state caches // then replay blocks forward to the desired stateRoot @@ -166,6 +174,8 @@ export class StateRegenerator implements IStateRegeneratorInternal { const blocksToReplay = [block]; let state: CachedBeaconStateAllForks | null = null; const {checkpointStateCache} = this.modules; + + const getSeedStateTimer = this.modules.metrics?.regenGetState.getSeedState.startTimer({caller}); // iterateAncestorBlocks only returns ancestor blocks, not the block itself for (const b of this.modules.forkChoice.iterateAncestorBlocks(block.blockRoot)) { state = this.modules.blockStateCache.get(b.stateRoot, opts); @@ -181,6 +191,7 @@ export class StateRegenerator implements IStateRegeneratorInternal { } blocksToReplay.push(b); } + getSeedStateTimer?.(); if (state === null) { throw new RegenError({ @@ -188,19 +199,50 @@ export class StateRegenerator implements IStateRegeneratorInternal { }); } + const blockCount = blocksToReplay.length; const MAX_EPOCH_TO_PROCESS = 5; - if (blocksToReplay.length > MAX_EPOCH_TO_PROCESS * SLOTS_PER_EPOCH) { + if (blockCount > MAX_EPOCH_TO_PROCESS * SLOTS_PER_EPOCH) { throw new RegenError({ code: RegenErrorCode.TOO_MANY_BLOCK_PROCESSED, stateRoot, }); } - const replaySlots = blocksToReplay.map((b) => b.slot).join(","); - this.modules.logger.debug("Replaying blocks to get state", {stateRoot, replaySlots}); - for (const b of blocksToReplay.reverse()) { - const block = await this.modules.db.block.get(fromHexString(b.blockRoot)); - if (!block) { + this.modules.metrics?.regenGetState.blockCount.observe({caller}, blockCount); + + const replaySlots = new Array(blockCount); + const blockPromises = new Array>(blockCount); + + const protoBlocksAsc = blocksToReplay.reverse(); + for (const [i, protoBlock] of protoBlocksAsc.entries()) { + replaySlots[i] = protoBlock.slot; + blockPromises[i] = this.modules.db.block.get(fromHexString(protoBlock.blockRoot)); + } + + const logCtx = {stateRoot, replaySlots: replaySlots.join(",")}; + this.modules.logger.debug("Replaying blocks to get state", logCtx); + + const loadBlocksTimer = this.modules.metrics?.regenGetState.loadBlocks.startTimer({caller}); + const blockOrNulls = await Promise.all(blockPromises); + loadBlocksTimer?.(); + + const blocksByRoot = new Map(); + for (const [i, blockOrNull] of blockOrNulls.entries()) { + // checking early here helps prevent unneccessary state transition below + if (blockOrNull === null) { + throw new RegenError({ + code: RegenErrorCode.BLOCK_NOT_IN_DB, + blockRoot: protoBlocksAsc[i].blockRoot, + }); + } + blocksByRoot.set(protoBlocksAsc[i].blockRoot, blockOrNull); + } + + const stateTransitionTimer = this.modules.metrics?.regenGetState.stateTransition.startTimer({caller}); + for (const b of protoBlocksAsc) { + const block = blocksByRoot.get(b.blockRoot); + // just to make compiler happy, we checked in the above for loop already + if (block === undefined) { throw new RegenError({ code: RegenErrorCode.BLOCK_NOT_IN_DB, blockRoot: b.blockRoot, @@ -224,7 +266,12 @@ export class StateRegenerator implements IStateRegeneratorInternal { this.modules.metrics ); + const hashTreeRootTimer = this.modules.metrics?.stateHashTreeRootTime.startTimer({ + source: StateHashTreeRootSource.regenState, + }); const stateRoot = toRootHex(state.hashTreeRoot()); + hashTreeRootTimer?.(); + if (b.stateRoot !== stateRoot) { throw new RegenError({ slot: b.slot, @@ -238,9 +285,6 @@ export class StateRegenerator implements IStateRegeneratorInternal { // also with allowDiskReload flag, we "reload" it to the state cache too this.modules.blockStateCache.add(state); } - - // this avoids keeping our node busy processing blocks - await nextEventLoop(); } catch (e) { throw new RegenError({ code: RegenErrorCode.STATE_TRANSITION_ERROR, @@ -248,7 +292,9 @@ export class StateRegenerator implements IStateRegeneratorInternal { }); } } - this.modules.logger.debug("Replayed blocks to get state", {stateRoot, replaySlots}); + stateTransitionTimer?.(); + + this.modules.logger.debug("Replayed blocks to get state", {...logCtx, stateSlot: state.slot}); return state; } diff --git a/packages/beacon-node/src/chain/stateCache/persistentCheckpointsCache.ts b/packages/beacon-node/src/chain/stateCache/persistentCheckpointsCache.ts index b315beba46d9..823f066abcd6 100644 --- a/packages/beacon-node/src/chain/stateCache/persistentCheckpointsCache.ts +++ b/packages/beacon-node/src/chain/stateCache/persistentCheckpointsCache.ts @@ -229,6 +229,10 @@ export class PersistentCheckpointStateCache implements CheckpointStateCache { newCachedState.commit(); const stateRoot = toRootHex(newCachedState.hashTreeRoot()); timer?.(); + + // load all cache in order for consumers (usually regen.getState()) to process blocks faster + newCachedState.validators.getAllReadonlyValues(); + newCachedState.balances.getAll(); this.logger.debug("Reload: cached state load successful", { ...logMeta, stateSlot: newCachedState.slot, diff --git a/packages/beacon-node/src/metrics/metrics/lodestar.ts b/packages/beacon-node/src/metrics/metrics/lodestar.ts index a0cf0a185c2f..55d43922d936 100644 --- a/packages/beacon-node/src/metrics/metrics/lodestar.ts +++ b/packages/beacon-node/src/metrics/metrics/lodestar.ts @@ -1413,6 +1413,34 @@ export function createLodestarMetrics( help: "UnhandledPromiseRejection total count", }), + // regen.getState metrics + regenGetState: { + blockCount: register.histogram<{caller: RegenCaller}>({ + name: "lodestar_regen_get_state_block_count", + help: "Block count in regen.getState", + labelNames: ["caller"], + buckets: [4, 8, 16, 32, 64], + }), + getSeedState: register.histogram<{caller: RegenCaller}>({ + name: "lodestar_regen_get_state_get_seed_state_seconds", + help: "Duration of get seed state in regen.getState", + labelNames: ["caller"], + buckets: [0.1, 0.5, 1, 2, 3, 4], + }), + loadBlocks: register.histogram<{caller: RegenCaller}>({ + name: "lodestar_regen_get_state_load_blocks_seconds", + help: "Duration of load blocks in regen.getState", + labelNames: ["caller"], + buckets: [0.1, 0.5, 1, 2, 3, 4], + }), + stateTransition: register.histogram<{caller: RegenCaller}>({ + name: "lodestar_regen_get_state_state_transition_seconds", + help: "Duration of state transition in regen.getState", + labelNames: ["caller"], + buckets: [0.1, 0.5, 1, 2, 3, 4], + }), + }, + // Precompute next epoch transition precomputeNextEpochTransition: { count: register.counter<{result: string}>({ diff --git a/packages/state-transition/src/stateTransition.ts b/packages/state-transition/src/stateTransition.ts index 40e87c8d07d2..3b97f19282a4 100644 --- a/packages/state-transition/src/stateTransition.ts +++ b/packages/state-transition/src/stateTransition.ts @@ -54,6 +54,7 @@ export enum StateHashTreeRootSource { blockTransition = "block_transition", prepareNextSlot = "prepare_next_slot", prepareNextEpoch = "prepare_next_epoch", + regenState = "regen_state", computeNewStateRoot = "compute_new_state_root", } From fe6c4acbfcc6ee3027b1c4833706b415417e448e Mon Sep 17 00:00:00 2001 From: twoeths Date: Thu, 5 Sep 2024 08:34:43 +0700 Subject: [PATCH 4/7] feat: archive state using BufferPool if provided (#7042) * fix: archive state using BufferPool if provided * chore: fix comment --- .../src/chain/archiver/archiveStates.ts | 14 ++++- .../beacon-node/src/chain/archiver/index.ts | 2 +- packages/beacon-node/src/chain/chain.ts | 6 +- packages/beacon-node/src/chain/interface.ts | 2 + .../beacon-node/src/chain/serializeState.ts | 33 +++++++++++ .../stateCache/persistentCheckpointsCache.ts | 59 ++++++------------- .../src/metrics/metrics/lodestar.ts | 11 ++-- packages/beacon-node/src/util/bufferPool.ts | 20 ++++--- .../test/unit/util/bufferPool.test.ts | 10 ++-- 9 files changed, 94 insertions(+), 63 deletions(-) create mode 100644 packages/beacon-node/src/chain/serializeState.ts diff --git a/packages/beacon-node/src/chain/archiver/archiveStates.ts b/packages/beacon-node/src/chain/archiver/archiveStates.ts index 2231cd3ff513..53f2033d80e8 100644 --- a/packages/beacon-node/src/chain/archiver/archiveStates.ts +++ b/packages/beacon-node/src/chain/archiver/archiveStates.ts @@ -6,6 +6,8 @@ import {CheckpointWithHex} from "@lodestar/fork-choice"; import {IBeaconDb} from "../../db/index.js"; import {IStateRegenerator} from "../regen/interface.js"; import {getStateSlotFromBytes} from "../../util/multifork.js"; +import {serializeState} from "../serializeState.js"; +import {AllocSource, BufferPool} from "../../util/bufferPool.js"; /** * Minimum number of epochs between single temp archived states @@ -30,7 +32,8 @@ export class StatesArchiver { private readonly regen: IStateRegenerator, private readonly db: IBeaconDb, private readonly logger: Logger, - private readonly opts: StatesArchiverOpts + private readonly opts: StatesArchiverOpts, + private readonly bufferPool?: BufferPool | null ) {} /** @@ -95,8 +98,13 @@ export class StatesArchiver { await this.db.stateArchive.putBinary(slot, finalizedStateOrBytes); this.logger.verbose("Archived finalized state bytes", {epoch: finalized.epoch, slot, root: rootHex}); } else { - // state - await this.db.stateArchive.put(finalizedStateOrBytes.slot, finalizedStateOrBytes); + // serialize state using BufferPool if provided + await serializeState( + finalizedStateOrBytes, + AllocSource.ARCHIVE_STATE, + (stateBytes) => this.db.stateArchive.putBinary(finalizedStateOrBytes.slot, stateBytes), + this.bufferPool + ); // don't delete states before the finalized state, auto-prune will take care of it this.logger.verbose("Archived finalized state", { epoch: finalized.epoch, diff --git a/packages/beacon-node/src/chain/archiver/index.ts b/packages/beacon-node/src/chain/archiver/index.ts index ee0711e05e4b..294c2281e19b 100644 --- a/packages/beacon-node/src/chain/archiver/index.ts +++ b/packages/beacon-node/src/chain/archiver/index.ts @@ -48,7 +48,7 @@ export class Archiver { opts: ArchiverOpts ) { this.archiveBlobEpochs = opts.archiveBlobEpochs; - this.statesArchiver = new StatesArchiver(chain.regen, db, logger, opts); + this.statesArchiver = new StatesArchiver(chain.regen, db, logger, opts, chain.bufferPool); this.prevFinalized = chain.forkChoice.getFinalizedCheckpoint(); this.jobQueue = new JobItemQueue<[CheckpointWithHex], void>(this.processFinalizedCheckpoint, { maxLength: PROCESS_FINALIZED_CHECKPOINT_QUEUE_LEN, diff --git a/packages/beacon-node/src/chain/chain.ts b/packages/beacon-node/src/chain/chain.ts index b410a1e84655..8dbb49798538 100644 --- a/packages/beacon-node/src/chain/chain.ts +++ b/packages/beacon-node/src/chain/chain.ts @@ -119,6 +119,7 @@ export class BeaconChain implements IBeaconChain { readonly config: BeaconConfig; readonly logger: Logger; readonly metrics: Metrics | null; + readonly bufferPool: BufferPool | null; readonly anchorStateLatestBlockSlot: Slot; @@ -272,6 +273,9 @@ export class BeaconChain implements IBeaconChain { const blockStateCache = this.opts.nHistoricalStates ? new FIFOBlockStateCache(this.opts, {metrics}) : new BlockStateCacheImpl({metrics}); + this.bufferPool = this.opts.nHistoricalStates + ? new BufferPool(anchorState.type.tree_serializedSize(anchorState.node), metrics) + : null; const checkpointStateCache = this.opts.nHistoricalStates ? new PersistentCheckpointStateCache( { @@ -280,7 +284,7 @@ export class BeaconChain implements IBeaconChain { clock, shufflingCache: this.shufflingCache, blockStateCache, - bufferPool: new BufferPool(anchorState.type.tree_serializedSize(anchorState.node), metrics), + bufferPool: this.bufferPool, datastore: fileDataStore ? // debug option if we want to investigate any issues with the DB new FileCPStateDatastore() diff --git a/packages/beacon-node/src/chain/interface.ts b/packages/beacon-node/src/chain/interface.ts index ca13dc604ea0..5185662eaa4f 100644 --- a/packages/beacon-node/src/chain/interface.ts +++ b/packages/beacon-node/src/chain/interface.ts @@ -30,6 +30,7 @@ import {IEth1ForBlockProduction} from "../eth1/index.js"; import {IExecutionEngine, IExecutionBuilder} from "../execution/index.js"; import {Metrics} from "../metrics/metrics.js"; import {IClock} from "../util/clock.js"; +import {BufferPool} from "../util/bufferPool.js"; import {ChainEventEmitter} from "./emitter.js"; import {IStateRegenerator, RegenCaller} from "./regen/index.js"; import {IBlsVerifier} from "./bls/index.js"; @@ -86,6 +87,7 @@ export interface IBeaconChain { readonly config: BeaconConfig; readonly logger: Logger; readonly metrics: Metrics | null; + readonly bufferPool: BufferPool | null; /** The initial slot that the chain is started with */ readonly anchorStateLatestBlockSlot: Slot; diff --git a/packages/beacon-node/src/chain/serializeState.ts b/packages/beacon-node/src/chain/serializeState.ts new file mode 100644 index 000000000000..cbb2ecd18cff --- /dev/null +++ b/packages/beacon-node/src/chain/serializeState.ts @@ -0,0 +1,33 @@ +import {CachedBeaconStateAllForks} from "@lodestar/state-transition"; +import {AllocSource, BufferPool} from "../util/bufferPool.js"; + +type ProcessStateBytesFn = (stateBytes: Uint8Array) => Promise; + +/* + * Serialize state using the BufferPool if provided. + */ +export async function serializeState( + state: CachedBeaconStateAllForks, + source: AllocSource, + processFn: ProcessStateBytesFn, + bufferPool?: BufferPool | null +): Promise { + const size = state.type.tree_serializedSize(state.node); + let stateBytes: Uint8Array | null = null; + if (bufferPool) { + const bufferWithKey = bufferPool.alloc(size, source); + if (bufferWithKey) { + stateBytes = bufferWithKey.buffer; + const dataView = new DataView(stateBytes.buffer, stateBytes.byteOffset, stateBytes.byteLength); + state.serializeToBytes({uint8Array: stateBytes, dataView}, 0); + } + } + + if (!stateBytes) { + // we already have metrics in BufferPool so no need to do it here + stateBytes = state.serialize(); + } + + return processFn(stateBytes); + // release the buffer back to the pool automatically +} diff --git a/packages/beacon-node/src/chain/stateCache/persistentCheckpointsCache.ts b/packages/beacon-node/src/chain/stateCache/persistentCheckpointsCache.ts index 823f066abcd6..190b79e58cd6 100644 --- a/packages/beacon-node/src/chain/stateCache/persistentCheckpointsCache.ts +++ b/packages/beacon-node/src/chain/stateCache/persistentCheckpointsCache.ts @@ -8,8 +8,9 @@ import {INTERVALS_PER_SLOT} from "@lodestar/params"; import {Metrics} from "../../metrics/index.js"; import {IClock} from "../../util/clock.js"; import {ShufflingCache} from "../shufflingCache.js"; -import {BufferPool, BufferWithKey} from "../../util/bufferPool.js"; +import {AllocSource, BufferPool, BufferWithKey} from "../../util/bufferPool.js"; import {StateCloneOpts} from "../regen/interface.js"; +import {serializeState} from "../serializeState.js"; import {MapTracker} from "./mapMetrics.js"; import {CPStateDatastore, DatastoreKey, datastoreKeyToCheckpoint} from "./datastore/index.js"; import {CheckpointHex, CacheItemType, CheckpointStateCache, BlockStateCache} from "./types.js"; @@ -29,7 +30,7 @@ type PersistentCheckpointStateCacheModules = { shufflingCache: ShufflingCache; datastore: CPStateDatastore; blockStateCache: BlockStateCache; - bufferPool?: BufferPool; + bufferPool?: BufferPool | null; }; /** checkpoint serialized as a string */ @@ -106,7 +107,7 @@ export class PersistentCheckpointStateCache implements CheckpointStateCache { private readonly datastore: CPStateDatastore; private readonly shufflingCache: ShufflingCache; private readonly blockStateCache: BlockStateCache; - private readonly bufferPool?: BufferPool; + private readonly bufferPool?: BufferPool | null; constructor( { @@ -698,19 +699,20 @@ export class PersistentCheckpointStateCache implements CheckpointStateCache { // persist and do not update epochIndex this.metrics?.statePersistSecFromSlot.observe(this.clock?.secFromSlot(this.clock?.currentSlot ?? 0) ?? 0); const cpPersist = {epoch: epoch, root: fromHexString(rootHex)}; - { - const timer = this.metrics?.stateSerializeDuration.startTimer(); - // automatically free the buffer pool after this scope - using stateBytesWithKey = this.serializeState(state); - let stateBytes = stateBytesWithKey?.buffer; - if (stateBytes == null) { - // fallback logic to use regular way to get state ssz bytes - this.metrics?.persistedStateAllocCount.inc(); - stateBytes = state.serialize(); - } - timer?.(); - persistedKey = await this.datastore.write(cpPersist, stateBytes); - } + // It's not sustainable to allocate ~240MB for each state every epoch, so we use buffer pool to reuse the memory. + // As monitored on holesky as of Jan 2024: + // - This does not increase heap allocation while gc time is the same + // - It helps stabilize persist time and save ~300ms in average (1.5s vs 1.2s) + // - It also helps the state reload to save ~500ms in average (4.3s vs 3.8s) + // - Also `serializeState.test.ts` perf test shows a lot of differences allocating ~240MB once vs per state serialization + const timer = this.metrics?.stateSerializeDuration.startTimer(); + persistedKey = await serializeState( + state, + AllocSource.PERSISTENT_CHECKPOINTS_CACHE_STATE, + (stateBytes) => this.datastore.write(cpPersist, stateBytes), + this.bufferPool + ); + timer?.(); persistCount++; this.logger.verbose("Pruned checkpoint state from memory and persisted to disk", { ...logMeta, @@ -767,29 +769,6 @@ export class PersistentCheckpointStateCache implements CheckpointStateCache { }); } - /* - * It's not sustainable to allocate ~240MB for each state every epoch, so we use buffer pool to reuse the memory. - * As monitored on holesky as of Jan 2024: - * - This does not increase heap allocation while gc time is the same - * - It helps stabilize persist time and save ~300ms in average (1.5s vs 1.2s) - * - It also helps the state reload to save ~500ms in average (4.3s vs 3.8s) - * - Also `serializeState.test.ts` perf test shows a lot of differences allocating ~240MB once vs per state serialization - */ - private serializeState(state: CachedBeaconStateAllForks): BufferWithKey | null { - const size = state.type.tree_serializedSize(state.node); - if (this.bufferPool) { - const bufferWithKey = this.bufferPool.alloc(size); - if (bufferWithKey) { - const stateBytes = bufferWithKey.buffer; - const dataView = new DataView(stateBytes.buffer, stateBytes.byteOffset, stateBytes.byteLength); - state.serializeToBytes({uint8Array: stateBytes, dataView}, 0); - return bufferWithKey; - } - } - - return null; - } - /** * Serialize validators to bytes leveraging the buffer pool to save memory allocation. * - As monitored on holesky as of Jan 2024, it helps save ~500ms state reload time (4.3s vs 3.8s) @@ -800,7 +779,7 @@ export class PersistentCheckpointStateCache implements CheckpointStateCache { const type = state.type.fields.validators; const size = type.tree_serializedSize(state.validators.node); if (this.bufferPool) { - const bufferWithKey = this.bufferPool.alloc(size); + const bufferWithKey = this.bufferPool.alloc(size, AllocSource.PERSISTENT_CHECKPOINTS_CACHE_VALIDATORS); if (bufferWithKey) { const validatorsBytes = bufferWithKey.buffer; const dataView = new DataView(validatorsBytes.buffer, validatorsBytes.byteOffset, validatorsBytes.byteLength); diff --git a/packages/beacon-node/src/metrics/metrics/lodestar.ts b/packages/beacon-node/src/metrics/metrics/lodestar.ts index 55d43922d936..737a900e5f64 100644 --- a/packages/beacon-node/src/metrics/metrics/lodestar.ts +++ b/packages/beacon-node/src/metrics/metrics/lodestar.ts @@ -18,6 +18,7 @@ import {LodestarMetadata} from "../options.js"; import {RegistryMetricCreator} from "../utils/registryMetricCreator.js"; import {OpSource} from "../validatorMonitor.js"; import {CacheItemType} from "../../chain/stateCache/types.js"; +import {AllocSource} from "../../util/bufferPool.js"; export type LodestarMetrics = ReturnType; @@ -1165,13 +1166,15 @@ export function createLodestarMetrics( name: "lodestar_buffer_pool_length", help: "Buffer pool length", }), - hits: register.counter({ + hits: register.counter<{source: AllocSource}>({ name: "lodestar_buffer_pool_hits_total", help: "Total number of buffer pool hits", + labelNames: ["source"], }), - misses: register.counter({ + misses: register.counter<{source: AllocSource}>({ name: "lodestar_buffer_pool_misses_total", help: "Total number of buffer pool misses", + labelNames: ["source"], }), grows: register.counter({ name: "lodestar_buffer_pool_grows_total", @@ -1271,10 +1274,6 @@ export function createLodestarMetrics( name: "lodestar_cp_state_cache_persisted_state_remove_count", help: "Total number of persisted states removed", }), - persistedStateAllocCount: register.counter({ - name: "lodestar_cp_state_cache_persisted_state_alloc_count", - help: "Total number time to allocate memory for persisted state", - }), }, balancesCache: { diff --git a/packages/beacon-node/src/util/bufferPool.ts b/packages/beacon-node/src/util/bufferPool.ts index f9e18a6d64a5..e3cf10fa88b3 100644 --- a/packages/beacon-node/src/util/bufferPool.ts +++ b/packages/beacon-node/src/util/bufferPool.ts @@ -5,6 +5,12 @@ import {Metrics} from "../metrics/metrics.js"; */ const GROW_RATIO = 1.1; +export enum AllocSource { + PERSISTENT_CHECKPOINTS_CACHE_VALIDATORS = "persistent_checkpoints_cache_validators", + PERSISTENT_CHECKPOINTS_CACHE_STATE = "persistent_checkpoints_cache_state", + ARCHIVE_STATE = "archive_state", +} + /** * A simple implementation to manage a single buffer. * This is initially used for state serialization at every epoch and for state reload. @@ -36,24 +42,24 @@ export class BufferPool { * If the buffer is already in use, return null. * Grow the buffer if the requested size is larger than the current buffer. */ - alloc(size: number): BufferWithKey | null { - return this.doAlloc(size, false); + alloc(size: number, source: AllocSource): BufferWithKey | null { + return this.doAlloc(size, source, false); } /** * Same to alloc() but the buffer is not zeroed. */ - allocUnsafe(size: number): BufferWithKey | null { - return this.doAlloc(size, true); + allocUnsafe(size: number, source: AllocSource): BufferWithKey | null { + return this.doAlloc(size, source, true); } - private doAlloc(size: number, isUnsafe = false): BufferWithKey | null { + private doAlloc(size: number, source: AllocSource, isUnsafe = false): BufferWithKey | null { if (this.inUse) { - this.metrics?.misses.inc(); + this.metrics?.misses.inc({source}); return null; } this.inUse = true; - this.metrics?.hits.inc(); + this.metrics?.hits.inc({source}); this.currentKey += 1; if (size > this.buffer.length) { this.metrics?.grows.inc(); diff --git a/packages/beacon-node/test/unit/util/bufferPool.test.ts b/packages/beacon-node/test/unit/util/bufferPool.test.ts index 2c789c19f74d..ff66504ae65f 100644 --- a/packages/beacon-node/test/unit/util/bufferPool.test.ts +++ b/packages/beacon-node/test/unit/util/bufferPool.test.ts @@ -1,12 +1,12 @@ import {describe, it, expect} from "vitest"; -import {BufferPool} from "../../../src/util/bufferPool.js"; +import {AllocSource, BufferPool} from "../../../src/util/bufferPool.js"; describe("BufferPool", () => { const pool = new BufferPool(100); it("should increase length", () => { expect(pool.length).toEqual(110); - using mem = pool.alloc(200); + using mem = pool.alloc(200, AllocSource.PERSISTENT_CHECKPOINTS_CACHE_STATE); if (mem === null) { throw Error("Expected non-null mem"); } @@ -15,15 +15,15 @@ describe("BufferPool", () => { it("should not allow alloc if in use", () => { { - using mem = pool.alloc(20); + using mem = pool.alloc(20, AllocSource.PERSISTENT_CHECKPOINTS_CACHE_STATE); if (mem === null) { throw Error("Expected non-null mem"); } // in the same scope we can't allocate again - expect(pool.alloc(20)).toEqual(null); + expect(pool.alloc(20, AllocSource.PERSISTENT_CHECKPOINTS_CACHE_STATE)).toEqual(null); } // out of the scope we can allocate again - expect(pool.alloc(20)).not.toEqual(null); + expect(pool.alloc(20, AllocSource.PERSISTENT_CHECKPOINTS_CACHE_STATE)).not.toEqual(null); }); }); From b05c93e1471083d10fefda20c11514fd3afe94df Mon Sep 17 00:00:00 2001 From: Nico Flaig Date: Thu, 5 Sep 2024 02:35:10 +0100 Subject: [PATCH 5/7] feat: include more details in validator attestation logs (#7064) --- packages/validator/src/services/attestation.ts | 17 +++++++++++++---- 1 file changed, 13 insertions(+), 4 deletions(-) diff --git a/packages/validator/src/services/attestation.ts b/packages/validator/src/services/attestation.ts index fc43b603c6b2..927bd3d92bb4 100644 --- a/packages/validator/src/services/attestation.ts +++ b/packages/validator/src/services/attestation.ts @@ -2,7 +2,7 @@ import {toHexString} from "@chainsafe/ssz"; import {BLSSignature, phase0, Slot, ssz, Attestation, SignedAggregateAndProof} from "@lodestar/types"; import {ForkSeq} from "@lodestar/params"; import {computeEpochAtSlot, isAggregatorFromCommitteeLength} from "@lodestar/state-transition"; -import {sleep} from "@lodestar/utils"; +import {prettyBytes, sleep} from "@lodestar/utils"; import {ApiClient, routes} from "@lodestar/api"; import {ChainForkConfig} from "@lodestar/config"; import {IClock, LoggerVc} from "../util/index.js"; @@ -242,7 +242,11 @@ export class AttestationService { } else { (await this.api.beacon.submitPoolAttestations({signedAttestations})).assertOk(); } - this.logger.info("Published attestations", {...logCtx, count: signedAttestations.length}); + this.logger.info("Published attestations", { + ...logCtx, + head: prettyBytes(headRootHex), + count: signedAttestations.length, + }); this.metrics?.publishedAttestations.inc(signedAttestations.length); } catch (e) { // Note: metric counts only 1 since we don't know how many signedAttestations are invalid @@ -286,7 +290,8 @@ export class AttestationService { slot: attestation.slot, }); const aggregate = res.value(); - this.metrics?.numParticipantsInAggregate.observe(aggregate.aggregationBits.getTrueBitIndexes().length); + const participants = aggregate.aggregationBits.getTrueBitIndexes().length; + this.metrics?.numParticipantsInAggregate.observe(participants); const signedAggregateAndProofs: SignedAggregateAndProof[] = []; @@ -316,7 +321,11 @@ export class AttestationService { } else { (await this.api.validator.publishAggregateAndProofs({signedAggregateAndProofs})).assertOk(); } - this.logger.info("Published aggregateAndProofs", {...logCtx, count: signedAggregateAndProofs.length}); + this.logger.info("Published aggregateAndProofs", { + ...logCtx, + participants, + count: signedAggregateAndProofs.length, + }); this.metrics?.publishedAggregates.inc(signedAggregateAndProofs.length); } catch (e) { this.logger.error("Error publishing aggregateAndProofs", logCtx, e as Error); From 0e79d29720185e180ff684162ffba188313e8df2 Mon Sep 17 00:00:00 2001 From: NC <17676176+ensi321@users.noreply.github.com> Date: Mon, 9 Sep 2024 14:35:40 -0700 Subject: [PATCH 6/7] feat: rename getValidatorMaxEffectiveBalance (#7070) * Rename getValidatorMaxEffectiveBalance * Lint --- packages/state-transition/src/block/processWithdrawals.ts | 5 ++--- packages/state-transition/src/util/genesis.ts | 4 ++-- packages/state-transition/src/util/validator.ts | 4 ++-- 3 files changed, 6 insertions(+), 7 deletions(-) diff --git a/packages/state-transition/src/block/processWithdrawals.ts b/packages/state-transition/src/block/processWithdrawals.ts index 185ddd80eb32..d4dfd47b4d94 100644 --- a/packages/state-transition/src/block/processWithdrawals.ts +++ b/packages/state-transition/src/block/processWithdrawals.ts @@ -14,7 +14,7 @@ import {toRootHex} from "@lodestar/utils"; import {CachedBeaconStateCapella, CachedBeaconStateElectra} from "../types.js"; import { decreaseBalance, - getValidatorMaxEffectiveBalance, + getMaxEffectiveBalance, hasEth1WithdrawalCredential, hasExecutionWithdrawalCredential, isCapellaPayloadHeader, @@ -173,8 +173,7 @@ export function getExpectedWithdrawals( }); withdrawalIndex++; } else if ( - effectiveBalance === - (isPostElectra ? getValidatorMaxEffectiveBalance(withdrawalCredentials) : MAX_EFFECTIVE_BALANCE) && + effectiveBalance === (isPostElectra ? getMaxEffectiveBalance(withdrawalCredentials) : MAX_EFFECTIVE_BALANCE) && balance > effectiveBalance ) { // capella partial withdrawal diff --git a/packages/state-transition/src/util/genesis.ts b/packages/state-transition/src/util/genesis.ts index 02bcef00bb55..54507d0ef235 100644 --- a/packages/state-transition/src/util/genesis.ts +++ b/packages/state-transition/src/util/genesis.ts @@ -18,7 +18,7 @@ import {EpochCacheImmutableData} from "../cache/epochCache.js"; import {processDeposit} from "../block/processDeposit.js"; import {increaseBalance} from "../index.js"; import {computeEpochAtSlot} from "./epoch.js"; -import {getActiveValidatorIndices, getValidatorMaxEffectiveBalance} from "./validator.js"; +import {getActiveValidatorIndices, getMaxEffectiveBalance} from "./validator.js"; import {getTemporaryBlockHeader} from "./blockRoot.js"; import {newFilledArray} from "./array.js"; import {getNextSyncCommittee} from "./syncCommittee.js"; @@ -195,7 +195,7 @@ export function applyDeposits( const balance = balancesArr[i]; const effectiveBalance = Math.min( balance - (balance % EFFECTIVE_BALANCE_INCREMENT), - getValidatorMaxEffectiveBalance(validator.withdrawalCredentials) + getMaxEffectiveBalance(validator.withdrawalCredentials) ); validator.effectiveBalance = effectiveBalance; diff --git a/packages/state-transition/src/util/validator.ts b/packages/state-transition/src/util/validator.ts index 728f14587fde..ebad21d9d25c 100644 --- a/packages/state-transition/src/util/validator.ts +++ b/packages/state-transition/src/util/validator.ts @@ -75,7 +75,7 @@ export function getConsolidationChurnLimit(epochCtx: EpochCache): number { return getBalanceChurnLimit(epochCtx) - getActivationExitChurnLimit(epochCtx); } -export function getValidatorMaxEffectiveBalance(withdrawalCredentials: Uint8Array): number { +export function getMaxEffectiveBalance(withdrawalCredentials: Uint8Array): number { // Compounding withdrawal credential only available since Electra if (hasCompoundingWithdrawalCredential(withdrawalCredentials)) { return MAX_EFFECTIVE_BALANCE_ELECTRA; @@ -85,7 +85,7 @@ export function getValidatorMaxEffectiveBalance(withdrawalCredentials: Uint8Arra } export function getActiveBalance(state: CachedBeaconStateElectra, validatorIndex: ValidatorIndex): number { - const validatorMaxEffectiveBalance = getValidatorMaxEffectiveBalance( + const validatorMaxEffectiveBalance = getMaxEffectiveBalance( state.validators.getReadonly(validatorIndex).withdrawalCredentials ); From cbc00c7c16384fe9de82e6f4670792245efd0452 Mon Sep 17 00:00:00 2001 From: Matthew Keil Date: Mon, 9 Sep 2024 17:37:50 -0400 Subject: [PATCH 7/7] feat: add util to diff ssz objects (#7041) --- packages/utils/src/diff.ts | 232 ++++++++++++++++++++++++++++++++++++ packages/utils/src/index.ts | 1 + 2 files changed, 233 insertions(+) create mode 100644 packages/utils/src/diff.ts diff --git a/packages/utils/src/diff.ts b/packages/utils/src/diff.ts new file mode 100644 index 000000000000..204989016b46 --- /dev/null +++ b/packages/utils/src/diff.ts @@ -0,0 +1,232 @@ +/* eslint-disable no-console */ +import fs from "node:fs"; + +const primitiveTypeof = ["number", "string", "bigint", "boolean"]; +export type BufferType = Uint8Array | Uint32Array; +export type PrimitiveType = number | string | bigint | boolean | BufferType; +export type DiffableCollection = Record; +export type Diffable = PrimitiveType | Array | DiffableCollection; + +export interface Diff { + objectPath: string; + errorMessage?: string; + val1: Diffable; + val2: Diffable; +} + +export function diffUint8Array(val1: Uint8Array, val2: PrimitiveType, objectPath: string): Diff[] { + if (!(val2 instanceof Uint8Array)) { + return [ + { + objectPath, + errorMessage: `val1${objectPath} is a Uint8Array, but val2${objectPath} is not`, + val1, + val2, + }, + ]; + } + const hex1 = Buffer.from(val1).toString("hex"); + const hex2 = Buffer.from(val2).toString("hex"); + if (hex1 !== hex2) { + return [ + { + objectPath, + val1: `0x${hex1}`, + val2: `0x${hex2}`, + }, + ]; + } + return []; +} + +export function diffUint32Array(val1: Uint32Array, val2: PrimitiveType, objectPath: string): Diff[] { + if (!(val2 instanceof Uint32Array)) { + return [ + { + objectPath, + errorMessage: `val1${objectPath} is a Uint32Array, but val2${objectPath} is not`, + val1, + val2, + }, + ]; + } + const diffs: Diff[] = []; + val1.forEach((value, index) => { + const value2 = val2[index]; + if (value !== value2) { + diffs.push({ + objectPath: `${objectPath}[${index}]`, + val1: `0x${value.toString(16).padStart(8, "0")}`, + val2: value2 ? `0x${val2[index].toString(16).padStart(8, "0")}` : "undefined", + }); + } + }); + return diffs; +} + +function diffPrimitiveValue(val1: PrimitiveType, val2: PrimitiveType, objectPath: string): Diff[] { + if (val1 instanceof Uint8Array) { + return diffUint8Array(val1, val2, objectPath); + } + if (val1 instanceof Uint32Array) { + return diffUint32Array(val1, val2, objectPath); + } + + const diff = {objectPath, val1, val2} as Diff; + const type1 = typeof val1; + if (!primitiveTypeof.includes(type1)) { + diff.errorMessage = `val1${objectPath} is not a supported type`; + } + const type2 = typeof val2; + if (!primitiveTypeof.includes(type2)) { + diff.errorMessage = `val2${objectPath} is not a supported type`; + } + if (type1 !== type2) { + diff.errorMessage = `val1${objectPath} is not the same type as val2${objectPath}`; + } + if (val1 !== val2) { + return [diff]; + } + return []; +} + +function isPrimitiveValue(val: unknown): val is PrimitiveType { + if (Array.isArray(val)) return false; + if (typeof val === "object") { + return val instanceof Uint8Array || val instanceof Uint32Array; + } + return true; +} + +function isDiffable(val: unknown): val is Diffable { + return !(typeof val === "function" || typeof val === "symbol" || typeof val === "undefined" || val === null); +} + +export function getDiffs(val1: Diffable, val2: Diffable, objectPath: string): Diff[] { + if (isPrimitiveValue(val1)) { + if (!isPrimitiveValue(val2)) { + return [ + { + objectPath, + errorMessage: `val1${objectPath} is a primitive value and val2${objectPath} is not`, + val1, + val2, + }, + ]; + } + return diffPrimitiveValue(val1, val2, objectPath); + } + + const isArray = Array.isArray(val1); + let errorMessage: string | undefined; + if (isArray && !Array.isArray(val2)) { + errorMessage = `val1${objectPath} is an array and val2${objectPath} is not`; + } else if (typeof val1 === "object" && typeof val2 !== "object") { + errorMessage = `val1${objectPath} is a nested object and val2${objectPath} is not`; + } + if (errorMessage) { + return [ + { + objectPath, + errorMessage, + val1, + val2, + }, + ]; + } + + const diffs: Diff[] = []; + for (const [index, value] of Object.entries(val1)) { + if (!isDiffable(value)) { + diffs.push({objectPath, val1, val2, errorMessage: `val1${objectPath} is not Diffable`}); + continue; + } + const value2 = (val2 as DiffableCollection)[index]; + if (!isDiffable(value2)) { + diffs.push({objectPath, val1, val2, errorMessage: `val2${objectPath} is not Diffable`}); + continue; + } + const innerPath = isArray ? `${objectPath}[${index}]` : `${objectPath}.${index}`; + diffs.push(...getDiffs(value, value2, innerPath)); + } + return diffs; +} + +/** + * Find the different values on complex, nested objects. Outputs the path through the object to + * each value that does not match from val1 and val2. Optionally can output the values that differ. + * + * For objects that differ greatly, can write to a file instead of the terminal for analysis + * + * ## Example + * ```ts + * const obj1 = { + * key1: { + * key2: [ + * { key3: 1 }, + * { key3: new Uint8Array([1, 2, 3]) } + * ] + * }, + * key4: new Uint32Array([1, 2, 3]), + * key5: 362436 + * }; + * + * const obj2 = { + * key1: { + * key2: [ + * { key3: 1 }, + * { key3: new Uint8Array([1, 2, 4]) } + * ] + * }, + * key4: new Uint32Array([1, 2, 4]) + * key5: true + * }; + * + * diffObjects(obj1, obj2, true); + * + * + * ``` + * + * ## Output + * ```sh + * val.key1.key2[1].key3 + * - 0x010203 + * - 0x010204 + * val.key4[2] + * - 0x00000003 + * - 0x00000004 + * val.key5 + * val1.key5 is not the same type as val2.key5 + * - 362436 + * - true + * ``` + */ +export function diff(val1: unknown, val2: unknown, outputValues = false, filename?: string): void { + if (!isDiffable(val1)) { + console.log("val1 is not Diffable"); + return; + } + if (!isDiffable(val2)) { + console.log("val2 is not Diffable"); + return; + } + const diffs = getDiffs(val1, val2, ""); + let output = ""; + if (diffs.length) { + diffs.forEach((diff) => { + let diffOutput = `value${diff.objectPath}`; + if (diff.errorMessage) { + diffOutput += `\n ${diff.errorMessage}`; + } + if (outputValues) { + diffOutput += `\n - ${diff.val1.toString()}\n - ${diff.val2.toString()}\n`; + } + output += `${diffOutput}\n`; + }); + if (filename) { + fs.writeFileSync(filename, output); + } else { + console.log(output); + } + } +} diff --git a/packages/utils/src/index.ts b/packages/utils/src/index.ts index 13ea1ffb7e69..4e0be0c592a1 100644 --- a/packages/utils/src/index.ts +++ b/packages/utils/src/index.ts @@ -4,6 +4,7 @@ export * from "./base64.js"; export * from "./bytes.js"; export * from "./bytes/index.js"; export * from "./command.js"; +export * from "./diff.js"; export * from "./err.js"; export * from "./errors.js"; export * from "./format.js";