From 20546b0ebd524842ed0ffd2cd086fce59ae6b084 Mon Sep 17 00:00:00 2001 From: Santiago Palladino Date: Mon, 28 Jul 2025 22:48:58 -0300 Subject: [PATCH 1/5] feat: Proposer invalidates previous block if needed If the previous pending block on L1 has invalid attestations, the proposer for the next slot invalidates it as part of its multicall. Next step is for the prover to invalidate the last block in the epoch it wants to prove. --- .../archiver/src/archiver/archiver.ts | 26 ++- .../archiver/src/archiver/validation.test.ts | 26 ++- .../archiver/src/archiver/validation.ts | 23 +- .../archiver/src/test/mock_l2_block_source.ts | 10 +- .../ethereum/src/contracts/empire_base.ts | 2 + .../ethereum/src/contracts/rollup.test.ts | 110 +++++++++ yarn-project/ethereum/src/contracts/rollup.ts | 85 ++++++- .../ethereum/src/test/rollup_cheat_codes.ts | 15 +- yarn-project/ethereum/src/utils.ts | 2 +- .../src/publisher/sequencer-publisher.ts | 218 ++++++++++++++---- .../src/sequencer/sequencer.ts | 66 ++++-- yarn-project/stdlib/src/block/l2_block.ts | 2 + .../stdlib/src/block/l2_block_source.ts | 44 +++- .../stdlib/src/interfaces/archiver.test.ts | 8 +- .../stdlib/src/interfaces/archiver.ts | 4 +- .../txe/src/state_machine/archiver.ts | 12 +- 16 files changed, 560 insertions(+), 93 deletions(-) create mode 100644 yarn-project/ethereum/src/contracts/rollup.test.ts diff --git a/yarn-project/archiver/src/archiver/archiver.ts b/yarn-project/archiver/src/archiver/archiver.ts index b3530950c502..3c9ff89b3354 100644 --- a/yarn-project/archiver/src/archiver/archiver.ts +++ b/yarn-project/archiver/src/archiver/archiver.ts @@ -10,6 +10,7 @@ import { } from '@aztec/ethereum'; import { maxBigint } from '@aztec/foundation/bigint'; import { Buffer16, Buffer32 } from '@aztec/foundation/buffer'; +import { pick } from '@aztec/foundation/collection'; import type { EthAddress } from '@aztec/foundation/eth-address'; import { Fr } from '@aztec/foundation/fields'; import { type Logger, createLogger } from '@aztec/foundation/log'; @@ -87,7 +88,7 @@ import { InitialBlockNumberNotSequentialError, NoBlobBodiesFoundError } from './ import { ArchiverInstrumentation } from './instrumentation.js'; import type { InboxMessage } from './structs/inbox_message.js'; import type { PublishedL2Block } from './structs/published.js'; -import { validateBlockAttestations } from './validation.js'; +import { type ValidateBlockResult, validateBlockAttestations } from './validation.js'; /** * Helper interface to combine all sources this archiver implementation provides. @@ -119,6 +120,7 @@ export class Archiver extends (EventEmitter as new () => ArchiverEmitter) implem private l1BlockNumber: bigint | undefined; private l1Timestamp: bigint | undefined; + private pendingChainValidationStatus: ValidateBlockResult = { valid: true }; private initialSyncComplete: boolean = false; public readonly tracer: Tracer; @@ -356,10 +358,11 @@ export class Archiver extends (EventEmitter as new () => ArchiverEmitter) implem // We only do this if rollup cant prune on the next submission. Otherwise we will end up // re-syncing the blocks we have just unwound above. We also dont do this if the last block is invalid, // since the archiver will rightfully refuse to sync up to it. - if (!rollupCanPrune && !rollupStatus.lastBlockIsInvalid) { + if (!rollupCanPrune && !rollupStatus.lastBlockValidationResult.valid) { await this.checkForNewBlocksBeforeL1SyncPoint(rollupStatus, blocksSynchedTo, currentL1BlockNumber); } + this.pendingChainValidationStatus = rollupStatus.lastBlockValidationResult; this.instrumentation.updateL1BlockHeight(currentL1BlockNumber); } @@ -617,7 +620,7 @@ export class Archiver extends (EventEmitter as new () => ArchiverEmitter) implem provenArchive, pendingBlockNumber: Number(pendingBlockNumber), pendingArchive, - lastBlockIsInvalid: false, + lastBlockValidationResult: { valid: true } as ValidateBlockResult, }; this.log.trace(`Retrieved rollup status at current L1 block ${currentL1BlockNumber}.`, { localPendingBlockNumber, @@ -793,16 +796,19 @@ export class Archiver extends (EventEmitter as new () => ArchiverEmitter) implem for (const block of publishedBlocks) { const isProven = block.block.number <= provenBlockNumber; - if (!isProven && !(await validateBlockAttestations(block, this.epochCache, this.l1constants, this.log))) { + rollupStatus.lastBlockValidationResult = isProven + ? { valid: true } + : await validateBlockAttestations(block, this.epochCache, this.l1constants, this.log); + + if (!rollupStatus.lastBlockValidationResult.valid) { this.log.warn(`Skipping block ${block.block.number} due to invalid attestations`, { blockHash: block.block.hash(), l1BlockNumber: block.l1.blockNumber, + ...pick(rollupStatus.lastBlockValidationResult, 'reason'), }); - rollupStatus.lastBlockIsInvalid = true; continue; } - rollupStatus.lastBlockIsInvalid = false; validBlocks.push(block); this.log.debug(`Ingesting new L2 block ${block.block.number} with ${block.block.body.txEffects.length} txs`, { blockHash: block.block.hash(), @@ -1200,6 +1206,14 @@ export class Archiver extends (EventEmitter as new () => ArchiverEmitter) implem return this.store.getDebugFunctionName(address, selector); } + getPendingChainValidationStatus(): Promise { + return Promise.resolve(this.pendingChainValidationStatus); + } + + isPendingChainInvalid(): Promise { + return Promise.resolve(this.pendingChainValidationStatus.valid === false); + } + async getL2Tips(): Promise { const [latestBlockNumber, provenBlockNumber] = await Promise.all([ this.getBlockNumber(), diff --git a/yarn-project/archiver/src/archiver/validation.test.ts b/yarn-project/archiver/src/archiver/validation.test.ts index bc630f258c09..3bc51dcee5cc 100644 --- a/yarn-project/archiver/src/archiver/validation.test.ts +++ b/yarn-project/archiver/src/archiver/validation.test.ts @@ -45,7 +45,8 @@ describe('validateBlockAttestations', () => { const block = await makeBlock([], []); const result = await validateBlockAttestations(block, epochCache, constants, logger); - expect(result).toBe(true); + expect(result.valid).toBe(true); + expect(result.block).toBe(block); expect(epochCache.getCommitteeForEpoch).toHaveBeenCalledWith(0n); }); @@ -53,7 +54,8 @@ describe('validateBlockAttestations', () => { const block = await makeBlock(signers, committee); const result = await validateBlockAttestations(block, epochCache, constants, logger); - expect(result).toBe(true); + expect(result.valid).toBe(true); + expect(result.block).toBe(block); expect(epochCache.getCommitteeForEpoch).toHaveBeenCalledWith(0n); }); }); @@ -73,19 +75,33 @@ describe('validateBlockAttestations', () => { const badSigner = Secp256k1Signer.random(); const block = await makeBlock([...signers, badSigner], [...committee, badSigner.address]); const result = await validateBlockAttestations(block, epochCache, constants, logger); - expect(result).toBe(false); + expect(result.valid).toBe(false); + if (!result.valid) { + expect(result.reason).toBe('invalid-attestation'); + expect(result.block).toBe(block); + expect(result.committee).toEqual(committee); + if (result.reason === 'invalid-attestation') { + expect(result.invalidIndex).toBe(5); // The bad signer is at index 5 + } + } }); it('returns false if insufficient attestations', async () => { const block = await makeBlock(signers.slice(0, 2), committee); const result = await validateBlockAttestations(block, epochCache, constants, logger); - expect(result).toBe(false); + expect(result.valid).toBe(false); + if (!result.valid) { + expect(result.reason).toBe('insufficient-attestations'); + expect(result.block).toBe(block); + expect(result.committee).toEqual(committee); + } }); it('returns true if all attestations are valid and sufficient', async () => { const block = await makeBlock(signers.slice(0, 4), committee); const result = await validateBlockAttestations(block, epochCache, constants, logger); - expect(result).toBe(true); + expect(result.valid).toBe(true); + expect(result.block).toBe(block); }); }); }); diff --git a/yarn-project/archiver/src/archiver/validation.ts b/yarn-project/archiver/src/archiver/validation.ts index da55170132c7..f013e78be1db 100644 --- a/yarn-project/archiver/src/archiver/validation.ts +++ b/yarn-project/archiver/src/archiver/validation.ts @@ -1,18 +1,24 @@ import type { EpochCache } from '@aztec/epoch-cache'; import type { Logger } from '@aztec/foundation/log'; -import { type PublishedL2Block, getAttestationsFromPublishedL2Block } from '@aztec/stdlib/block'; +import { + type PublishedL2Block, + type ValidateBlockResult, + getAttestationsFromPublishedL2Block, +} from '@aztec/stdlib/block'; import { type L1RollupConstants, getEpochAtSlot } from '@aztec/stdlib/epoch-helpers'; +export type { ValidateBlockResult }; + /** * Validates the attestations submitted for the given block. * Returns true if the attestations are valid and sufficient, false otherwise. */ export async function validateBlockAttestations( - publishedBlock: Pick, + publishedBlock: PublishedL2Block, epochCache: EpochCache, constants: Pick, logger?: Logger, -): Promise { +): Promise { const attestations = getAttestationsFromPublishedL2Block(publishedBlock); const { block } = publishedBlock; const blockHash = await block.hash().then(hash => hash.toString()); @@ -33,17 +39,18 @@ export async function validateBlockAttestations( if (!committee || committee.length === 0) { // Q: Should we accept blocks with no committee? logger?.warn(`No committee found for epoch ${epoch} at slot ${slot}. Accepting block without validation.`, logData); - return true; + return { valid: true, block: publishedBlock }; } const committeeSet = new Set(committee.map(member => member.toString())); const requiredAttestationCount = Math.floor((committee.length * 2) / 3) + 1; - for (const attestation of attestations) { + for (let i = 0; i < attestations.length; i++) { + const attestation = attestations[i]; const signer = attestation.getSender().toString(); if (!committeeSet.has(signer)) { logger?.warn(`Attestation from non-committee member ${signer} at slot ${slot}`, { committee }); - return false; + return { valid: false, reason: 'invalid-attestation', invalidIndex: i, block: publishedBlock, committee }; } } @@ -53,9 +60,9 @@ export async function validateBlockAttestations( actualAttestations: attestations.length, ...logData, }); - return false; + return { valid: false, reason: 'insufficient-attestations', block: publishedBlock, committee }; } logger?.debug(`Block attestations validated successfully for block ${block.number} at slot ${slot}`, logData); - return true; + return { valid: true, block: publishedBlock }; } diff --git a/yarn-project/archiver/src/test/mock_l2_block_source.ts b/yarn-project/archiver/src/test/mock_l2_block_source.ts index aa66e3f1eee4..d15906f27219 100644 --- a/yarn-project/archiver/src/test/mock_l2_block_source.ts +++ b/yarn-project/archiver/src/test/mock_l2_block_source.ts @@ -5,7 +5,7 @@ import type { Fr } from '@aztec/foundation/fields'; import { createLogger } from '@aztec/foundation/log'; import type { FunctionSelector } from '@aztec/stdlib/abi'; import type { AztecAddress } from '@aztec/stdlib/aztec-address'; -import { L2Block, L2BlockHash, type L2BlockSource, type L2Tips } from '@aztec/stdlib/block'; +import { L2Block, L2BlockHash, type L2BlockSource, type L2Tips, type ValidateBlockResult } from '@aztec/stdlib/block'; import type { ContractClassPublic, ContractDataSource, ContractInstanceWithAddress } from '@aztec/stdlib/contract'; import { EmptyL1RollupConstants, type L1RollupConstants, getSlotRangeForEpoch } from '@aztec/stdlib/epoch-helpers'; import { type BlockHeader, TxHash, TxReceipt, TxStatus } from '@aztec/stdlib/tx'; @@ -271,4 +271,12 @@ export class MockL2BlockSource implements L2BlockSource, ContractDataSource { syncImmediate(): Promise { return Promise.resolve(); } + + isPendingChainInvalid(): Promise { + return Promise.resolve(false); + } + + getPendingChainValidationStatus(): Promise { + return Promise.resolve({ valid: true }); + } } diff --git a/yarn-project/ethereum/src/contracts/empire_base.ts b/yarn-project/ethereum/src/contracts/empire_base.ts index 750dfafc993c..95ebfb812391 100644 --- a/yarn-project/ethereum/src/contracts/empire_base.ts +++ b/yarn-project/ethereum/src/contracts/empire_base.ts @@ -1,3 +1,4 @@ +import type { EthAddress } from '@aztec/foundation/eth-address'; import { Signature } from '@aztec/foundation/eth-signature'; import { EmpireBaseAbi } from '@aztec/l1-artifacts/EmpireBaseAbi'; @@ -6,6 +7,7 @@ import { type Hex, type TypedDataDefinition, encodeFunctionData } from 'viem'; import type { L1TxRequest } from '../l1_tx_utils.js'; export interface IEmpireBase { + get address(): EthAddress; getRoundInfo( rollupAddress: Hex, round: bigint, diff --git a/yarn-project/ethereum/src/contracts/rollup.test.ts b/yarn-project/ethereum/src/contracts/rollup.test.ts new file mode 100644 index 000000000000..a5b0ce8a6405 --- /dev/null +++ b/yarn-project/ethereum/src/contracts/rollup.test.ts @@ -0,0 +1,110 @@ +import { getPublicClient } from '@aztec/ethereum'; +import { EthAddress } from '@aztec/foundation/eth-address'; +import { Fr } from '@aztec/foundation/fields'; +import { type Logger, createLogger } from '@aztec/foundation/log'; +import { RollupAbi } from '@aztec/l1-artifacts/RollupAbi'; + +import type { Anvil } from '@viem/anvil'; +import type { Abi } from 'viem'; +import { type PrivateKeyAccount, privateKeyToAccount } from 'viem/accounts'; +import { foundry } from 'viem/chains'; + +import { DefaultL1ContractsConfig } from '../config.js'; +import { deployL1Contracts } from '../deploy_l1_contracts.js'; +import { EthCheatCodes } from '../test/eth_cheat_codes.js'; +import { startAnvil } from '../test/start_anvil.js'; +import type { ViemClient } from '../types.js'; +import { RollupContract } from './rollup.js'; + +describe('Rollup', () => { + let anvil: Anvil; + let rpcUrl: string; + let privateKey: PrivateKeyAccount; + let logger: Logger; + let publicClient: ViemClient; + let cheatCodes: EthCheatCodes; + + let vkTreeRoot: Fr; + let protocolContractTreeRoot: Fr; + let rollupAddress: `0x${string}`; + let rollup: RollupContract; + + beforeAll(async () => { + logger = createLogger('ethereum:test:rollup'); + // this is the 6th address that gets funded by the junk mnemonic + privateKey = privateKeyToAccount('0x8b3a350cf5c34c9194ca85829a2df0ec3153be0318b5e2d3348e872092edffba'); + vkTreeRoot = Fr.random(); + protocolContractTreeRoot = Fr.random(); + + ({ anvil, rpcUrl } = await startAnvil()); + + publicClient = getPublicClient({ l1RpcUrls: [rpcUrl], l1ChainId: 31337 }); + cheatCodes = new EthCheatCodes([rpcUrl]); + + const deployed = await deployL1Contracts([rpcUrl], privateKey, foundry, logger, { + ...DefaultL1ContractsConfig, + salt: undefined, + vkTreeRoot, + protocolContractTreeRoot, + genesisArchiveRoot: Fr.random(), + realVerifier: false, + }); + + rollupAddress = deployed.l1ContractAddresses.rollupAddress.toString(); + rollup = new RollupContract(publicClient, rollupAddress); + }); + + afterAll(async () => { + await cheatCodes.setIntervalMining(0); + await anvil?.stop().catch(err => createLogger('cleanup').error(err)); + }); + + describe('makePendingBlockNumberOverride', () => { + it('creates state override that correctly overrides pending block number', async () => { + const testProvenBlockNumber = 42n; + const testPendingBlockNumber = 100n; + const newPendingBlockNumber = 150; + + // Set storage directly using cheat codes + // The storage slot stores both values: pending (high 128 bits) | proven (low 128 bits) + const storageSlot = RollupContract.stfStorageSlot; + const packedValue = (testPendingBlockNumber << 128n) | testProvenBlockNumber; + await cheatCodes.store(EthAddress.fromString(rollupAddress), BigInt(storageSlot), packedValue); + + // Verify the values were set correctly by calling the getters directly + const provenBlockNumber = await rollup.getProvenBlockNumber(); + const pendingBlockNumber = await rollup.getBlockNumber(); + + expect(provenBlockNumber).toBe(testProvenBlockNumber); + expect(pendingBlockNumber).toBe(testPendingBlockNumber); + + // Create the override + const stateOverride = await rollup.makePendingBlockNumberOverride(newPendingBlockNumber); + + // Test the override using simulateContract + const { result: overriddenPendingBlockNumber } = await publicClient.simulateContract({ + address: rollupAddress, + abi: RollupAbi as Abi, + functionName: 'getPendingBlockNumber', + stateOverride, + }); + + // The overridden value should be the new pending block number + expect(overriddenPendingBlockNumber).toBe(BigInt(newPendingBlockNumber)); + + // Verify that the proven block number is preserved in the override + const { result: overriddenProvenBlockNumber } = await publicClient.simulateContract({ + address: rollupAddress, + abi: RollupAbi as Abi, + functionName: 'getProvenBlockNumber', + stateOverride, + }); + + expect(overriddenProvenBlockNumber).toBe(testProvenBlockNumber); + + // Verify the actual storage hasn't changed + const actualPendingBlockNumber = await rollup.getBlockNumber(); + expect(actualPendingBlockNumber).toBe(testPendingBlockNumber); + }); + }); +}); diff --git a/yarn-project/ethereum/src/contracts/rollup.ts b/yarn-project/ethereum/src/contracts/rollup.ts index 7e63130b52b3..062b2eb68440 100644 --- a/yarn-project/ethereum/src/contracts/rollup.ts +++ b/yarn-project/ethereum/src/contracts/rollup.ts @@ -5,13 +5,23 @@ import { RollupAbi } from '@aztec/l1-artifacts/RollupAbi'; import { RollupStorage } from '@aztec/l1-artifacts/RollupStorage'; import { SlasherAbi } from '@aztec/l1-artifacts/SlasherAbi'; -import { type Account, type GetContractReturnType, type Hex, encodeFunctionData, getAddress, getContract } from 'viem'; +import { + type Account, + type GetContractReturnType, + type Hex, + type StateOverride, + encodeFunctionData, + getAddress, + getContract, + hexToBigInt, + keccak256, +} from 'viem'; import { getPublicClient } from '../client.js'; import type { DeployL1ContractsReturnType } from '../deploy_l1_contracts.js'; import type { L1ContractAddresses } from '../l1_contract_addresses.js'; import type { L1ReaderConfig } from '../l1_reader.js'; -import type { L1TxUtils } from '../l1_tx_utils.js'; +import type { L1TxRequest, L1TxUtils } from '../l1_tx_utils.js'; import type { ViemClient } from '../types.js'; import { formatViemError } from '../utils.js'; import { SlashingProposerContract } from './slashing_proposer.js'; @@ -87,6 +97,8 @@ export type ViemAppendOnlyTreeSnapshot = { export class RollupContract { private readonly rollup: GetContractReturnType; + private static cachedStfStorageSlot: Hex | undefined; + static get checkBlobStorageSlot(): bigint { const asString = RollupStorage.find(storage => storage.label === 'checkBlob')?.slot; if (asString === undefined) { @@ -95,6 +107,10 @@ export class RollupContract { return BigInt(asString); } + static get stfStorageSlot(): Hex { + return (RollupContract.cachedStfStorageSlot ??= keccak256(Buffer.from('aztec.stf.storage', 'utf-8'))); + } + static getFromL1ContractsValues(deployL1ContractsValues: DeployL1ContractsReturnType) { const { l1Client, @@ -498,6 +514,7 @@ export class RollupContract { archive: Buffer, account: `0x${string}` | Account, slotDuration: bigint | number, + opts: { forcePendingBlockNumber?: number } = {}, ): Promise<{ slot: bigint; blockNumber: bigint; timeOfNextL1Slot: bigint }> { if (typeof slotDuration === 'number') { slotDuration = BigInt(slotDuration); @@ -515,6 +532,7 @@ export class RollupContract { functionName: 'canProposeAtTime', args: [timeOfNextL1Slot, `0x${archive.toString('hex')}`, who], account, + stateOverride: await this.makePendingBlockNumberOverride(opts.forcePendingBlockNumber), }); return { slot, blockNumber, timeOfNextL1Slot }; @@ -523,6 +541,69 @@ export class RollupContract { } } + /** + * Returns a state override that sets the pending block number to the specified value. Useful for simulations. + * Requires querying the current state of the contract to get the current proven block number, as they are both + * stored in the same slot. If the argument is undefined, it returns an empty override. + */ + public async makePendingBlockNumberOverride(forcePendingBlockNumber: number | undefined): Promise { + if (forcePendingBlockNumber === undefined) { + return []; + } + const slot = RollupContract.stfStorageSlot; + const currentValue = await this.client.getStorageAt({ address: this.address, slot }); + const currentProvenBlockNumber = currentValue ? hexToBigInt(currentValue) & ((1n << 128n) - 1n) : 0n; + const newValue = (BigInt(forcePendingBlockNumber) << 128n) | currentProvenBlockNumber; + return [ + { + address: this.address, + stateDiff: [{ slot, value: `0x${newValue.toString(16).padStart(64, '0')}` }], + }, + ]; + } + + /** Creates a request to Rollup#invalidateBadAttestation to be simulated or sent */ + public getInvalidateBadAttestationRequest( + blockNumber: number, + attestations: ViemCommitteeAttestation[], + committee: EthAddress[], + invalidIndex: number, + ): L1TxRequest { + return { + to: this.address, + data: encodeFunctionData({ + abi: RollupAbi, + functionName: 'invalidateBadAttestation', + args: [ + BigInt(blockNumber), + RollupContract.packAttestations(attestations), + committee.map(addr => addr.toString()), + BigInt(invalidIndex), + ], + }), + }; + } + + /** Creates a request to Rollup#invalidateInsufficientAttestations to be simulated or sent */ + public getInvalidateInsufficientAttestationsRequest( + blockNumber: number, + attestations: ViemCommitteeAttestation[], + committee: EthAddress[], + ): L1TxRequest { + return { + to: this.address, + data: encodeFunctionData({ + abi: RollupAbi, + functionName: 'invalidateInsufficientAttestations', + args: [ + BigInt(blockNumber), + RollupContract.packAttestations(attestations), + committee.map(addr => addr.toString()), + ], + }), + }; + } + /** Calls getHasSubmitted directly. Returns whether the given prover has submitted a proof with the given length for the given epoch. */ public getHasSubmittedProof(epochNumber: number, numberOfBlocksInEpoch: number, prover: Hex | EthAddress) { if (prover instanceof EthAddress) { diff --git a/yarn-project/ethereum/src/test/rollup_cheat_codes.ts b/yarn-project/ethereum/src/test/rollup_cheat_codes.ts index 333b6a4ebed0..28e798385331 100644 --- a/yarn-project/ethereum/src/test/rollup_cheat_codes.ts +++ b/yarn-project/ethereum/src/test/rollup_cheat_codes.ts @@ -5,7 +5,15 @@ import { createLogger } from '@aztec/foundation/log'; import type { TestDateProvider } from '@aztec/foundation/timer'; import { RollupAbi } from '@aztec/l1-artifacts/RollupAbi'; -import { type GetContractReturnType, type Hex, createPublicClient, fallback, getContract, http, keccak256 } from 'viem'; +import { + type GetContractReturnType, + type Hex, + createPublicClient, + fallback, + getContract, + hexToBigInt, + http, +} from 'viem'; import { foundry } from 'viem/chains'; import { EthCheatCodes } from './eth_cheat_codes.js'; @@ -174,10 +182,7 @@ export class RollupCheatCodes { // @note @LHerskind this is heavily dependent on the storage layout and size of values // The rollupStore is a struct and if the size of elements or the struct changes, this can break - - // Convert string to bytes and then compute keccak256 - const storageSlot = keccak256(Buffer.from('aztec.stf.storage', 'utf-8')); - const provenBlockNumberSlot = BigInt(storageSlot); + const provenBlockNumberSlot = hexToBigInt(RollupContract.stfStorageSlot); // Need to pack it as a single 32 byte word const newValue = (BigInt(tipsBefore.pending) << 128n) | BigInt(blockNumber); diff --git a/yarn-project/ethereum/src/utils.ts b/yarn-project/ethereum/src/utils.ts index 1f285f0273db..89d22e9d6ff8 100644 --- a/yarn-project/ethereum/src/utils.ts +++ b/yarn-project/ethereum/src/utils.ts @@ -50,7 +50,7 @@ export function extractEvent< return event; } -function tryExtractEvent< +export function tryExtractEvent< const TAbi extends Abi | readonly unknown[], TEventName extends ContractEventName, TEventType = DecodeEventLogReturnType, diff --git a/yarn-project/sequencer-client/src/publisher/sequencer-publisher.ts b/yarn-project/sequencer-client/src/publisher/sequencer-publisher.ts index b04acb754b7a..98afd75a6b8c 100644 --- a/yarn-project/sequencer-client/src/publisher/sequencer-publisher.ts +++ b/yarn-project/sequencer-client/src/publisher/sequencer-publisher.ts @@ -20,29 +20,24 @@ import { type ViemHeader, type ViemStateReference, formatViemError, + tryExtractEvent, } from '@aztec/ethereum'; import type { L1TxUtilsWithBlobs } from '@aztec/ethereum/l1-tx-utils-with-blobs'; import { sumBigint } from '@aztec/foundation/bigint'; import { toHex as toPaddedHex } from '@aztec/foundation/bigint-buffer'; import { EthAddress } from '@aztec/foundation/eth-address'; +import type { Fr } from '@aztec/foundation/fields'; import { createLogger } from '@aztec/foundation/log'; import { DateProvider, Timer } from '@aztec/foundation/timer'; import { EmpireBaseAbi, ErrorsAbi, RollupAbi } from '@aztec/l1-artifacts'; -import { CommitteeAttestation } from '@aztec/stdlib/block'; +import { CommitteeAttestation, type ValidateBlockResult } from '@aztec/stdlib/block'; import { ConsensusPayload, SignatureDomainSeparator, getHashedSignaturePayload } from '@aztec/stdlib/p2p'; import type { L1PublishBlockStats } from '@aztec/stdlib/stats'; import { type ProposedBlockHeader, StateReference, TxHash } from '@aztec/stdlib/tx'; import { type TelemetryClient, getTelemetryClient } from '@aztec/telemetry-client'; import pick from 'lodash.pick'; -import { - type TransactionReceipt, - type TypedDataDefinition, - encodeFunctionData, - getAbiItem, - toEventSelector, - toHex, -} from 'viem'; +import { type TransactionReceipt, type TypedDataDefinition, encodeFunctionData, toHex } from 'viem'; import type { PublisherConfig, TxSenderConfig } from './config.js'; import { SequencerPublisherMetrics } from './sequencer-publisher-metrics.js'; @@ -70,12 +65,26 @@ export enum SignalType { type GetSlashPayloadCallBack = (slotNumber: bigint) => Promise; -const Actions = ['propose', 'governance-signal', 'slashing-signal'] as const; +const Actions = [ + 'invalidate-by-invalid-attestation', + 'invalidate-by-insufficient-attestations', + 'propose', + 'governance-signal', + 'slashing-signal', +] as const; export type Action = (typeof Actions)[number]; -// Sorting for actions such that proposals always go first +// Sorting for actions such that invalidations go first, then proposals, and last votes const compareActions = (a: Action, b: Action) => Actions.indexOf(b) - Actions.indexOf(a); +export type InvalidateBlockRequest = { + request: L1TxRequest; + reason: 'invalid-attestation' | 'insufficient-attestations'; + gasUsed: bigint; + blockNumber: number; + forcePendingBlockNumber: number; +}; + interface RequestWithExpiry { action: Action; request: L1TxRequest; @@ -302,12 +311,16 @@ export class SequencerPublisher { * @param tipArchive - The archive to check * @returns The slot and block number if it is possible to propose, undefined otherwise */ - public canProposeAtNextEthBlock(tipArchive: Buffer, msgSender: EthAddress) { + public canProposeAtNextEthBlock( + tipArchive: Fr, + msgSender: EthAddress, + opts: { forcePendingBlockNumber?: number } = {}, + ) { // TODO: #14291 - should loop through multiple keys to check if any of them can propose const ignoredErrors = ['SlotAlreadyInChain', 'InvalidProposer', 'InvalidArchive']; return this.rollupContract - .canProposeAtNextEthBlock(tipArchive, msgSender.toString(), this.ethereumSlotDuration) + .canProposeAtNextEthBlock(tipArchive.toBuffer(), msgSender.toString(), this.ethereumSlotDuration, opts) .catch(err => { if (err instanceof FormattedViemError && ignoredErrors.find(e => err.message.includes(e))) { this.log.warn(`Failed canProposeAtTime check with ${ignoredErrors.find(e => err.message.includes(e))}`, { @@ -325,7 +338,10 @@ export class SequencerPublisher { * It will throw if the block header is invalid. * @param header - The block header to validate */ - public async validateBlockHeader(header: ProposedBlockHeader) { + public async validateBlockHeader( + header: ProposedBlockHeader, + opts?: { forcePendingBlockNumber: number | undefined }, + ) { const flags = { ignoreDA: true, ignoreSignatures: true }; const args = [ @@ -347,18 +363,96 @@ export class SequencerPublisher { data: encodeFunctionData({ abi: RollupAbi, functionName: 'validateHeaderWithAttestations', args }), from: MULTI_CALL_3_ADDRESS, }, - { - time: ts + 1n, - }, + { time: ts + 1n }, [ - { - address: MULTI_CALL_3_ADDRESS, - balance, - }, + { address: MULTI_CALL_3_ADDRESS, balance }, + ...(await this.rollupContract.makePendingBlockNumberOverride(opts?.forcePendingBlockNumber)), ], ); } + /** + * Simulate making a call to invalidate a block with invalid attestations. Returns undefined if no need to invalidate. + * @param block - The block to invalidate and the criteria for invalidation (as returned by the archiver) + */ + public async simulateInvalidateBlock( + validationResult: ValidateBlockResult, + ): Promise { + if (validationResult.valid) { + return undefined; + } + + const request = this.getInvalidateBlockRequest(validationResult); + const { reason, block } = validationResult; + const blockNumber = block.block.number; + const logData = { ...block.block.toBlockInfo(), reason }; + this.log.debug(`Simulating invalidate block ${blockNumber}`, logData); + + try { + const { gasUsed } = await this.l1TxUtils.simulate(request, undefined, undefined, ErrorsAbi); + this.log.verbose(`Simulation for invalidate block ${blockNumber} succeeded`, { ...logData, request, gasUsed }); + + return { request, gasUsed, blockNumber, forcePendingBlockNumber: blockNumber - 1, reason }; + } catch (err) { + const viemError = formatViemError(err); + + // If the error is due to the block not being in the pending chain, and it was indeed removed by someone else, + // we can safely ignore it and return undefined so we go ahead with block building. + if (viemError.message?.includes('Rollup__BlockNotInPendingChain')) { + this.log.verbose( + `Simulation for invalidate block ${blockNumber} failed due to block not being in pending chain`, + { ...logData, request, error: viemError.message }, + ); + const latestPendingBlockNumber = await this.rollupContract.getBlockNumber(); + if (latestPendingBlockNumber < blockNumber) { + this.log.verbose(`Block number ${blockNumber} has already been invalidated`, { ...logData }); + return undefined; + } else { + this.log.error( + `Simulation for invalidate ${blockNumber} failed and it is still in pending chain`, + viemError, + logData, + ); + throw new Error(`Failed to simulate invalidate block ${blockNumber} while it is still in pending chain`, { + cause: viemError, + }); + } + } + + // Otherwise, throw. We cannot build the next block if we cannot invalidate the previous one. + this.log.error(`Simulation for invalidate block ${blockNumber} failed`, viemError, logData); + throw new Error(`Failed to simulate invalidate block ${blockNumber}`, { cause: viemError }); + } + } + + private getInvalidateBlockRequest(validationResult: ValidateBlockResult) { + if (validationResult.valid) { + throw new Error('Cannot invalidate a valid block'); + } + + const { block, committee, reason } = validationResult; + const logData = { ...block.block.toBlockInfo(), reason }; + this.log.debug(`Simulating invalidate block ${block.block.number}`, logData); + + if (reason === 'invalid-attestation') { + return this.rollupContract.getInvalidateBadAttestationRequest( + block.block.number, + block.attestations.map(a => a.toViem()), + committee, + validationResult.invalidIndex, + ); + } else if (reason === 'insufficient-attestations') { + return this.rollupContract.getInvalidateInsufficientAttestationsRequest( + block.block.number, + block.attestations.map(a => a.toViem()), + committee, + ); + } else { + const _: never = reason; + throw new Error(`Unknown reason for invalidation`); + } + } + /** * @notice Will simulate `propose` to make sure that the block is valid for submission * @@ -374,6 +468,7 @@ export class SequencerPublisher { digest: Buffer.alloc(32), attestations: [], }, + options: { forcePendingBlockNumber?: number }, ): Promise { const ts = BigInt((await this.l1TxUtils.getBlock()).timestamp + this.ethereumSlotDuration); @@ -414,7 +509,7 @@ export class SequencerPublisher { blobInput, ] as const; - await this.simulateProposeTx(args, ts); + await this.simulateProposeTx(args, ts, options); return ts; } @@ -481,9 +576,7 @@ export class SequencerPublisher { result && result.receipt && result.receipt.status === 'success' && - result.receipt.logs.find( - log => log.topics[0] === toEventSelector(getAbiItem({ abi: EmpireBaseAbi, name: 'SignalCast' })), - ); + tryExtractEvent(result.receipt.logs, base.address.toString(), EmpireBaseAbi, 'SignalCast'); const logData = { ...result, slotNumber, round, payload: payload.toString() }; if (!success) { @@ -559,7 +652,7 @@ export class SequencerPublisher { block: L2Block, attestations?: CommitteeAttestation[], txHashes?: TxHash[], - opts: { txTimeoutAt?: Date } = {}, + opts: { txTimeoutAt?: Date; forcePendingBlockNumber?: number } = {}, ): Promise { const proposedBlockHeader = block.header.toPropose(); @@ -584,23 +677,50 @@ export class SequencerPublisher { // This means that we can avoid the simulation issues in later checks. // By simulation issue, I mean the fact that the block.timestamp is equal to the last block, not the next, which // make time consistency checks break. - ts = await this.validateBlockForSubmission(block, { - digest: digest.toBuffer(), - attestations: attestations ?? [], - }); + const attestationData = { digest: digest.toBuffer(), attestations: attestations ?? [] }; + ts = await this.validateBlockForSubmission(block, attestationData, opts); } catch (err: any) { - this.log.error(`Block validation failed. ${err instanceof Error ? err.message : 'No error message'}`, undefined, { + this.log.error(`Block validation failed. ${err instanceof Error ? err.message : 'No error message'}`, err, { ...block.getStats(), slotNumber: block.header.globalVariables.slotNumber.toBigInt(), + forcePendingBlockNumber: opts.forcePendingBlockNumber, }); throw err; } - this.log.debug(`Submitting propose transaction`); + this.log.debug(`Enqueuing block propose transaction`, { ...block.toBlockInfo(), ...opts }); await this.addProposeTx(block, proposeTxArgs, opts, ts); return true; } + public enqueueInvalidateBlock(request: InvalidateBlockRequest | undefined, opts: { txTimeoutAt?: Date } = {}) { + if (!request) { + return; + } + + const logData = { ...pick(request, 'gasUsed', 'blockNumber'), opts }; + this.log.debug(`Enqueuing invalidate block`, logData); + this.addRequest({ + action: `invalidate-by-${request.reason}`, + request: request.request, + gasConfig: { gasLimit: request.gasUsed, txTimeoutAt: opts.txTimeoutAt }, + lastValidL2Slot: this.getCurrentL2Slot() + 2n, + checkSuccess: (req, result) => { + const success = + result && + result.receipt && + result.receipt.status === 'success' && + tryExtractEvent(result.receipt.logs, this.rollupContract.address, RollupAbi, 'BlockInvalidated'); + if (!success) { + this.log.warn(`Invalidate block ${request.blockNumber} failed`, { ...result, ...logData }); + } else { + this.log.info(`Invalidate block ${request.blockNumber} succeeded`, { ...result, ...logData }); + } + return !!success; + }, + }); + } + /** * Calling `interrupt` will cause any in progress call to `publishRollup` to return `false` asap. * Be warned, the call may return false even if the tx subsequently gets successfully mined. @@ -618,7 +738,11 @@ export class SequencerPublisher { this.l1TxUtils.restart(); } - private async prepareProposeTx(encodedData: L1ProcessArgs, timestamp: bigint) { + private async prepareProposeTx( + encodedData: L1ProcessArgs, + timestamp: bigint, + options: { forcePendingBlockNumber?: number }, + ) { if (!this.l1TxUtils.client.account) { throw new Error('L1 TX utils needs to be initialized with an account wallet.'); } @@ -671,7 +795,7 @@ export class SequencerPublisher { blobInput, ] as const; - const { rollupData, simulationResult } = await this.simulateProposeTx(args, timestamp); + const { rollupData, simulationResult } = await this.simulateProposeTx(args, timestamp, options); return { args, blobEvaluationGas, rollupData, simulationResult }; } @@ -698,6 +822,7 @@ export class SequencerPublisher { `0x${string}`, ], timestamp: bigint, + options: { forcePendingBlockNumber?: number }, ) { const rollupData = encodeFunctionData({ abi: RollupAbi, @@ -705,6 +830,13 @@ export class SequencerPublisher { args, }); + // override the pending block number if requested + const forcePendingBlockNumberStateDiff = ( + options.forcePendingBlockNumber + ? await this.rollupContract.makePendingBlockNumberOverride(options.forcePendingBlockNumber) + : [] + ).flatMap(override => override.stateDiff ?? []); + const simulationResult = await this.l1TxUtils .simulate( { @@ -723,10 +855,8 @@ export class SequencerPublisher { address: this.rollupContract.address, // @note we override checkBlob to false since blobs are not part simulate() stateDiff: [ - { - slot: toPaddedHex(RollupContract.checkBlobStorageSlot, true), - value: toPaddedHex(0n, true), - }, + { slot: toPaddedHex(RollupContract.checkBlobStorageSlot, true), value: toPaddedHex(0n, true) }, + ...forcePendingBlockNumberStateDiff, ], }, ], @@ -747,12 +877,16 @@ export class SequencerPublisher { private async addProposeTx( block: L2Block, encodedData: L1ProcessArgs, - opts: { txTimeoutAt?: Date } = {}, + opts: { txTimeoutAt?: Date; forcePendingBlockNumber?: number } = {}, timestamp: bigint, ): Promise { const timer = new Timer(); const kzg = Blob.getViemKzgInstance(); - const { rollupData, simulationResult, blobEvaluationGas } = await this.prepareProposeTx(encodedData, timestamp); + const { rollupData, simulationResult, blobEvaluationGas } = await this.prepareProposeTx( + encodedData, + timestamp, + opts, + ); const startBlock = await this.l1TxUtils.getBlockNumber(); const gasLimit = this.l1TxUtils.bumpGasLimit( BigInt(Math.ceil((Number(simulationResult.gasUsed) * 64) / 63)) + @@ -786,9 +920,7 @@ export class SequencerPublisher { const success = receipt && receipt.status === 'success' && - receipt.logs.find( - log => log.topics[0] === toEventSelector(getAbiItem({ abi: RollupAbi, name: 'L2BlockProposed' })), - ); + tryExtractEvent(receipt.logs, this.rollupContract.address, RollupAbi, 'L2BlockProposed'); if (success) { const endBlock = receipt.blockNumber; const inclusionBlocks = Number(endBlock - startBlock); diff --git a/yarn-project/sequencer-client/src/sequencer/sequencer.ts b/yarn-project/sequencer-client/src/sequencer/sequencer.ts index 5c89258f680c..6afa31ded655 100644 --- a/yarn-project/sequencer-client/src/sequencer/sequencer.ts +++ b/yarn-project/sequencer-client/src/sequencer/sequencer.ts @@ -1,7 +1,7 @@ import type { L2Block } from '@aztec/aztec.js'; import { INITIAL_L2_BLOCK_NUM } from '@aztec/constants'; import { FormattedViemError, NoCommitteeError, type ViemPublicClient } from '@aztec/ethereum'; -import { omit } from '@aztec/foundation/collection'; +import { omit, pick } from '@aztec/foundation/collection'; import { EthAddress } from '@aztec/foundation/eth-address'; import { Fr } from '@aztec/foundation/fields'; import { createLogger } from '@aztec/foundation/log'; @@ -11,7 +11,7 @@ import type { TypedEventEmitter } from '@aztec/foundation/types'; import type { P2P } from '@aztec/p2p'; import type { SlasherClient } from '@aztec/slasher'; import { AztecAddress } from '@aztec/stdlib/aztec-address'; -import type { CommitteeAttestation, L2BlockSource } from '@aztec/stdlib/block'; +import type { CommitteeAttestation, L2BlockSource, ValidateBlockResult } from '@aztec/stdlib/block'; import { type L1RollupConstants, getSlotAtTimestamp } from '@aztec/stdlib/epoch-helpers'; import { Gas } from '@aztec/stdlib/gas'; import { @@ -48,7 +48,12 @@ import type { ValidatorClient } from '@aztec/validator-client'; import EventEmitter from 'node:events'; import type { GlobalVariableBuilder } from '../global_variable_builder/global_builder.js'; -import { type Action, type SequencerPublisher, SignalType } from '../publisher/sequencer-publisher.js'; +import { + type Action, + type InvalidateBlockRequest, + type SequencerPublisher, + SignalType, +} from '../publisher/sequencer-publisher.js'; import type { SequencerConfig } from './config.js'; import { SequencerMetrics } from './metrics.js'; import { SequencerTimetable, SequencerTooSlowError } from './timetable.js'; @@ -302,6 +307,8 @@ export class Sequencer extends (EventEmitter as new () => TypedEventEmitter TypedEventEmitter TypedEventEmitter TypedEventEmitter TypedEventEmitter TypedEventEmitter { - // TODO(palla/sigs): We need to simulate the previous block being removed if invalid! - await this.publisher.validateBlockHeader(proposalHeader); + await this.publisher.validateBlockHeader(proposalHeader, invalidateBlock); const blockNumber = newGlobalVariables.blockNumber; const slot = proposalHeader.slotNumber.toBigInt(); @@ -613,7 +629,7 @@ export class Sequencer extends (EventEmitter as new () => TypedEventEmitter TypedEventEmitter TypedEventEmitter { // Publishes new block to the network and awaits the tx to be mined this.setState(SequencerState.PUBLISHING_BLOCK, block.header.globalVariables.slotNumber.toBigInt()); @@ -757,8 +774,13 @@ export class Sequencer extends (EventEmitter as new () => TypedEventEmitter TypedEventEmitter { const syncedBlocks = await Promise.all([ this.worldState.status().then(({ syncSummary }) => ({ @@ -783,9 +812,11 @@ export class Sequencer extends (EventEmitter as new () => TypedEventEmitter p2p.syncedToL2Block), this.l1ToL2MessageSource.getL2Tips().then(t => t.latest), this.l2BlockSource.getL1Timestamp(), + this.l2BlockSource.getPendingChainValidationStatus(), ] as const); - const [worldState, l2BlockSource, p2p, l1ToL2MessageSource, l1Timestamp] = syncedBlocks; + const [worldState, l2BlockSource, p2p, l1ToL2MessageSource, l1Timestamp, pendingChainValidationStatus] = + syncedBlocks; // The archiver reports 'undefined' hash for the genesis block // because it doesn't have access to world state to compute it (facepalm) @@ -817,10 +848,11 @@ export class Sequencer extends (EventEmitter as new () => TypedEventEmitter; + /** + * Returns whether the latest block in the pending chain on L1 is invalid (ie its attestations are incorrect). + * Note that invalid blocks do not get synced, so the latest block returned by the block source is always a valid one. + */ + isPendingChainInvalid(): Promise; + + /** + * Returns the status of the pending chain validation. + * This includes whether the chain is valid, and if not, the reason for invalidation. + */ + getPendingChainValidationStatus(): Promise; + /** Force a sync. */ syncImmediate(): Promise; } +/** Result type for validating a block attestations */ +export type ValidateBlockResult = + | { valid: true; block?: PublishedL2Block } + | { valid: false; block: PublishedL2Block; committee: EthAddress[]; reason: 'insufficient-attestations' } + | { + valid: false; + block: PublishedL2Block; + committee: EthAddress[]; + reason: 'invalid-attestation'; + invalidIndex: number; + }; + +export const ValidateBlockResultSchema = z.union([ + z.object({ valid: z.literal(true), block: PublishedL2Block.schema.optional() }), + z.object({ + valid: z.literal(false), + block: PublishedL2Block.schema, + committee: z.array(schemas.EthAddress), + reason: z.literal('insufficient-attestations'), + }), + z.object({ + valid: z.literal(false), + block: PublishedL2Block.schema, + committee: z.array(schemas.EthAddress), + reason: z.literal('invalid-attestation'), + invalidIndex: z.number(), + }), +]) satisfies ZodFor; + /** * L2BlockSource that emits events upon pending / proven chain changes. * see L2BlockSourceEvents for the events emitted. diff --git a/yarn-project/stdlib/src/interfaces/archiver.test.ts b/yarn-project/stdlib/src/interfaces/archiver.test.ts index f7db1ffd1040..6089516f52a7 100644 --- a/yarn-project/stdlib/src/interfaces/archiver.test.ts +++ b/yarn-project/stdlib/src/interfaces/archiver.test.ts @@ -10,7 +10,7 @@ import { FunctionSelector } from '../abi/function_selector.js'; import { AztecAddress } from '../aztec-address/index.js'; import { CommitteeAttestation, L2BlockHash } from '../block/index.js'; import { L2Block } from '../block/l2_block.js'; -import type { L2Tips } from '../block/l2_block_source.js'; +import type { L2Tips, ValidateBlockResult } from '../block/l2_block_source.js'; import type { PublishedL2Block } from '../block/published_l2_block.js'; import { getContractClassFromArtifact } from '../contract/contract_class.js'; import { @@ -250,6 +250,12 @@ describe('ArchiverApiSchema', () => { class MockArchiver implements ArchiverApi { constructor(private artifact: ContractArtifact) {} + isPendingChainInvalid(): Promise { + return Promise.resolve(false); + } + getPendingChainValidationStatus(): Promise { + return Promise.resolve({ valid: true }); + } syncImmediate() { return Promise.resolve(); } diff --git a/yarn-project/stdlib/src/interfaces/archiver.ts b/yarn-project/stdlib/src/interfaces/archiver.ts index 843688948700..022046a7bf56 100644 --- a/yarn-project/stdlib/src/interfaces/archiver.ts +++ b/yarn-project/stdlib/src/interfaces/archiver.ts @@ -3,7 +3,7 @@ import type { ApiSchemaFor } from '@aztec/foundation/schemas'; import { z } from 'zod'; import { L2Block } from '../block/l2_block.js'; -import { type L2BlockSource, L2TipsSchema } from '../block/l2_block_source.js'; +import { type L2BlockSource, L2TipsSchema, ValidateBlockResultSchema } from '../block/l2_block_source.js'; import { PublishedL2Block } from '../block/published_l2_block.js'; import { ContractClassPublicSchema, @@ -75,4 +75,6 @@ export const ArchiverApiSchema: ApiSchemaFor = { getL1Constants: z.function().args().returns(L1RollupConstantsSchema), getL1Timestamp: z.function().args().returns(schemas.BigInt), syncImmediate: z.function().args().returns(z.void()), + isPendingChainInvalid: z.function().args().returns(z.boolean()), + getPendingChainValidationStatus: z.function().args().returns(ValidateBlockResultSchema), }; diff --git a/yarn-project/txe/src/state_machine/archiver.ts b/yarn-project/txe/src/state_machine/archiver.ts index 0d2bc566bfe0..3f32e304fdeb 100644 --- a/yarn-project/txe/src/state_machine/archiver.ts +++ b/yarn-project/txe/src/state_machine/archiver.ts @@ -2,7 +2,7 @@ import { ArchiverStoreHelper, KVArchiverDataStore, type PublishedL2Block } from import type { EthAddress } from '@aztec/foundation/eth-address'; import type { AztecAsyncKVStore } from '@aztec/kv-store'; import type { AztecAddress } from '@aztec/stdlib/aztec-address'; -import type { L2Block, L2Tips } from '@aztec/stdlib/block'; +import type { L2Block, L2BlockSource, L2Tips, ValidateBlockResult } from '@aztec/stdlib/block'; import type { ContractInstanceWithAddress } from '@aztec/stdlib/contract'; import type { L1RollupConstants } from '@aztec/stdlib/epoch-helpers'; import type { BlockHeader } from '@aztec/stdlib/tx'; @@ -11,7 +11,7 @@ import type { UInt64 } from '@aztec/stdlib/types'; // We are extending the ArchiverDataStoreHelper here because it provides most of the endpoints needed by the // node for reading from and writing to state, without needing any of the extra overhead that the Archiver itself // requires (i.e. an L1 client) -export class TXEArchiver extends ArchiverStoreHelper { +export class TXEArchiver extends ArchiverStoreHelper implements L2BlockSource { constructor(db: AztecAsyncKVStore) { super(new KVArchiverDataStore(db, 9999)); } @@ -134,4 +134,12 @@ export class TXEArchiver extends ArchiverStoreHelper { public getL1Timestamp(): Promise { throw new Error('TXE Archiver does not implement "getL1Timestamp"'); } + + public isPendingChainInvalid(): Promise { + return Promise.resolve(false); + } + + public getPendingChainValidationStatus(): Promise { + return Promise.resolve({ valid: true }); + } } From 6d2586d7fdb8b784e026263f333ac86983ace67f Mon Sep 17 00:00:00 2001 From: Santiago Palladino Date: Tue, 29 Jul 2025 11:31:26 -0300 Subject: [PATCH 2/5] Add integration test --- .../archiver/src/archiver/archiver.test.ts | 13 +- .../integration_l1_publisher.test.ts | 279 +++++++++++------- .../integration_l1_publisher/write_json.ts | 74 +++++ yarn-project/ethereum/src/contracts/rollup.ts | 4 +- .../src/publisher/sequencer-publisher.ts | 17 +- .../src/sequencer/sequencer.test.ts | 2 + .../stdlib/src/interfaces/archiver.test.ts | 10 + 7 files changed, 276 insertions(+), 123 deletions(-) rename yarn-project/end-to-end/src/{integration => integration_l1_publisher}/integration_l1_publisher.test.ts (69%) create mode 100644 yarn-project/end-to-end/src/integration_l1_publisher/write_json.ts diff --git a/yarn-project/archiver/src/archiver/archiver.test.ts b/yarn-project/archiver/src/archiver/archiver.test.ts index b4faecd0d16c..f94584f9ac68 100644 --- a/yarn-project/archiver/src/archiver/archiver.test.ts +++ b/yarn-project/archiver/src/archiver/archiver.test.ts @@ -402,7 +402,7 @@ describe('Archiver', () => { const badBlock2BlobHashes = await makeVersionedBlobHashes(badBlock2); const badBlock2Blobs = await makeBlobsFromBlock(badBlock2); - // Return the archive root for the bad block 2 when queried + // Return the archive root for the bad block 2 when L1 is queried mockRollupRead.archiveAt.mockImplementation((args: readonly [bigint]) => Promise.resolve((args[0] === 2n ? badBlock2 : blocks[Number(args[0] - 1n)]).archive.root.toString()), ); @@ -423,6 +423,14 @@ describe('Archiver', () => { await archiver.start(true); latestBlockNum = await archiver.getBlockNumber(); expect(latestBlockNum).toEqual(1); + expect(await archiver.getPendingChainValidationStatus()).toEqual( + expect.objectContaining({ + valid: false, + reason: 'invalid-attestation', + invalidIndex: 0, + committee, + }), + ); // Now we go for another loop, where a proper block 2 is proposed with correct attestations // IRL there would be an "Invalidated" event, but we are not currently relying on it @@ -453,6 +461,9 @@ describe('Archiver', () => { expect(block2.block.number).toEqual(2); expect(block2.block.archive.root.toString()).toEqual(blocks[1].archive.root.toString()); expect(block2.attestations.length).toEqual(3); + + // With a valid pending chain validation status + expect(await archiver.getPendingChainValidationStatus()).toEqual(expect.objectContaining({ valid: true })); }, 10_000); it('skip event search if no changes found', async () => { diff --git a/yarn-project/end-to-end/src/integration/integration_l1_publisher.test.ts b/yarn-project/end-to-end/src/integration_l1_publisher/integration_l1_publisher.test.ts similarity index 69% rename from yarn-project/end-to-end/src/integration/integration_l1_publisher.test.ts rename to yarn-project/end-to-end/src/integration_l1_publisher/integration_l1_publisher.test.ts index df9d37c3fda3..f916e2fb01ad 100644 --- a/yarn-project/end-to-end/src/integration/integration_l1_publisher.test.ts +++ b/yarn-project/end-to-end/src/integration_l1_publisher/integration_l1_publisher.test.ts @@ -1,4 +1,4 @@ -import type { ArchiveSource } from '@aztec/archiver'; +import type { ArchiveSource, L1PublishedData } from '@aztec/archiver'; import { getConfigEnvVars } from '@aztec/aztec-node'; import { AztecAddress, Fr, GlobalVariables, type L2Block, createLogger } from '@aztec/aztec.js'; import { BatchedBlob, Blob } from '@aztec/blob-lib'; @@ -6,6 +6,7 @@ import { createBlobSinkClient } from '@aztec/blob-sink/client'; import { GENESIS_ARCHIVE_ROOT, MAX_NULLIFIERS_PER_TX, NUMBER_OF_L1_L2_MESSAGES_PER_ROLLUP } from '@aztec/constants'; import { EpochCache } from '@aztec/epoch-cache'; import { + type DeployL1ContractsArgs, type ExtendedViemWalletClient, GovernanceProposerContract, type L1ContractAddresses, @@ -15,11 +16,12 @@ import { createExtendedL1Client, } from '@aztec/ethereum'; import { L1TxUtilsWithBlobs } from '@aztec/ethereum/l1-tx-utils-with-blobs'; -import { EthCheatCodesWithState, startAnvil } from '@aztec/ethereum/test'; +import { EthCheatCodesWithState, RollupCheatCodes, startAnvil } from '@aztec/ethereum/test'; import { range } from '@aztec/foundation/array'; -import { timesParallel } from '@aztec/foundation/collection'; +import { Buffer32 } from '@aztec/foundation/buffer'; +import { times, timesParallel } from '@aztec/foundation/collection'; import { SecretValue } from '@aztec/foundation/config'; -import { SHA256Trunc, sha256ToField } from '@aztec/foundation/crypto'; +import { SHA256Trunc, Secp256k1Signer, sha256ToField } from '@aztec/foundation/crypto'; import { EthAddress } from '@aztec/foundation/eth-address'; import { Signature } from '@aztec/foundation/eth-signature'; import { hexToBuffer } from '@aztec/foundation/string'; @@ -31,9 +33,10 @@ import { getVKTreeRoot } from '@aztec/noir-protocol-circuits-types/vk-tree'; import { protocolContractTreeRoot } from '@aztec/protocol-contracts'; import { buildBlockWithCleanDB } from '@aztec/prover-client/block-factory'; import { SequencerPublisher, SignalType } from '@aztec/sequencer-client'; -import type { L2Tips } from '@aztec/stdlib/block'; +import { type CommitteeAttestation, type L2Tips, PublishedL2Block } from '@aztec/stdlib/block'; import { GasFees, GasSettings } from '@aztec/stdlib/gas'; -import { fr, makeBloatedProcessedTx } from '@aztec/stdlib/testing'; +import { orderAttestations } from '@aztec/stdlib/p2p'; +import { fr, makeBloatedProcessedTx, makeBlockAttestationFromBlock } from '@aztec/stdlib/testing'; import type { BlockHeader, ProcessedTx } from '@aztec/stdlib/tx'; import { type MerkleTreeAdminDatabase, @@ -44,7 +47,6 @@ import { import { beforeEach, describe, expect, it, jest } from '@jest/globals'; import type { Anvil } from '@viem/anvil'; -import { writeFile } from 'fs/promises'; import { type MockProxy, mock } from 'jest-mock-extended'; import { type Address, @@ -60,6 +62,11 @@ import { foundry } from 'viem/chains'; import { sendL1ToL2Message } from '../fixtures/l1_to_l2_messaging.js'; import { setupL1Contracts } from '../fixtures/utils.js'; +import { writeJson } from './write_json.js'; + +// To update the test data, run "export AZTEC_GENERATE_TEST_DATA=1" in shell and run the tests again +// If you have issues with RPC_URL, it is likely that you need to set the RPC_URL in the shell as well +// If running ANVIL locally, you can use ETHEREUM_HOSTS="http://0.0.0.0:8545" // Accounts 4 and 5 of Anvil default startup with mnemonic: 'test test test test test test test test test test test junk' const sequencerPK = '0x47e179ec197488593b187f80a00eb0da91f1b9d0b13f8733639f19c30a34926a'; @@ -101,34 +108,36 @@ describe('L1Publisher integration', () => { let coinbase: EthAddress; let feeRecipient: AztecAddress; let version: number; + let validators: Secp256k1Signer[]; + let committee: EthAddress[] | undefined; + let proposer: EthAddress | undefined; + let dateProvider: TestDateProvider; let ethCheatCodes: EthCheatCodesWithState; + let rollupCheatCodes: RollupCheatCodes; let worldStateSynchronizer: ServerWorldStateSynchronizer; + let epochCache: EpochCache; let rpcUrl: string; let anvil: Anvil; - // To update the test data, run "export AZTEC_GENERATE_TEST_DATA=1" in shell and run the tests again - // If you have issues with RPC_URL, it is likely that you need to set the RPC_URL in the shell as well - // If running ANVIL locally, you can use ETHEREUM_HOSTS="http://0.0.0.0:8545" - const AZTEC_GENERATE_TEST_DATA = !!process.env.AZTEC_GENERATE_TEST_DATA; - const progressTimeBySlot = async (slotsToJump = 1n) => { const currentTime = (await l1Client.getBlock()).timestamp; const currentSlot = await rollup.getSlotNumber(); const timestamp = await rollup.getTimestampForSlot(currentSlot + slotsToJump); if (timestamp > currentTime) { - await ethCheatCodes.warp(Number(timestamp), { resetBlockInterval: true }); + await ethCheatCodes.warp(Number(timestamp), { resetBlockInterval: true, updateDateProvider: dateProvider }); } }; - beforeEach(async () => { + const setup = async (deployL1ContractsArgs: Partial = {}) => { ({ rpcUrl, anvil } = await startAnvil()); config.l1RpcUrls = [rpcUrl]; deployerAccount = privateKeyToAccount(deployerPK); ({ l1ContractAddresses, l1Client } = await setupL1Contracts(config.l1RpcUrls, deployerAccount, logger, { aztecTargetCommitteeSize: 0, + ...deployL1ContractsArgs, })); ethCheatCodes = new EthCheatCodesWithState(config.l1RpcUrls); @@ -136,6 +145,8 @@ describe('L1Publisher integration', () => { rollupAddress = getAddress(l1ContractAddresses.rollupAddress.toString()); outboxAddress = getAddress(l1ContractAddresses.outboxAddress.toString()); + rollupCheatCodes = new RollupCheatCodes(ethCheatCodes, l1ContractAddresses); + // Set up contract instances rollup = new RollupContract(l1Client, l1ContractAddresses.rollupAddress); outbox = getContract({ @@ -144,6 +155,8 @@ describe('L1Publisher integration', () => { client: l1Client, }); + dateProvider = new TestDateProvider(); + builderDb = await NativeWorldStateService.tmp(EthAddress.fromString(rollupAddress)); blocks = []; blockSource = mock({ @@ -187,7 +200,6 @@ describe('L1Publisher integration', () => { worldStateSynchronizer = new ServerWorldStateSynchronizer(builderDb, blockSource, worldStateConfig); await worldStateSynchronizer.start(); - const dateProvider = new TestDateProvider(); const sequencerL1Client = createExtendedL1Client(config.l1RpcUrls, sequencerPK, foundry); const l1TxUtils = new L1TxUtilsWithBlobs(sequencerL1Client, logger, dateProvider, config); const rollupContract = new RollupContract(sequencerL1Client, l1ContractAddresses.rollupAddress.toString()); @@ -200,7 +212,7 @@ describe('L1Publisher integration', () => { sequencerL1Client, l1ContractAddresses.governanceProposerAddress.toString(), ); - const epochCache = await EpochCache.create(l1ContractAddresses.rollupAddress, config, { dateProvider }); + epochCache = await EpochCache.create(l1ContractAddresses.rollupAddress, config, { dateProvider }); const blobSinkClient = createBlobSinkClient(); publisher = new SequencerPublisher( @@ -238,9 +250,12 @@ describe('L1Publisher integration', () => { baseFee = new GasFees(0, await rollup.getManaBaseFeeAt(ts, true)); // We jump two epochs such that the committee can be setup. - const timeToJump = (await rollup.getEpochDuration()) * 2n; - await progressTimeBySlot(timeToJump); - }); + await rollupCheatCodes.advanceToEpoch(2n, { updateDateProvider: dateProvider }); + await rollupCheatCodes.setupEpoch(); + + ({ committee } = await epochCache.getCommittee()); + ({ currentProposer: proposer } = await epochCache.getProposerAttesterAddressInCurrentOrNextSlot()); + }; afterEach(async () => { await anvil.stop(); @@ -263,82 +278,41 @@ describe('L1Publisher integration', () => { ({ msgHash }) => msgHash, ); - /** - * Creates a json object that can be used to test the solidity contract. - * The json object must be put into - */ - const writeJson = async ( - fileName: string, - block: L2Block, - l1ToL2Content: Fr[], - blobs: Blob[], - batchedBlob: BatchedBlob, - recipientAddress: AztecAddress, - deployerAddress: `0x${string}`, - ): Promise => { - if (!AZTEC_GENERATE_TEST_DATA) { - return; - } - // Path relative to the package.json in the end-to-end folder - const path = `../../l1-contracts/test/fixtures/${fileName}.json`; - - const asHex = (value: Fr | Buffer | EthAddress | AztecAddress, size = 64) => { - const buffer = Buffer.isBuffer(value) ? value : value.toBuffer(); - return `0x${buffer.toString('hex').padStart(size, '0')}`; - }; - - const jsonObject = { - populate: { - l1ToL2Content: l1ToL2Content.map(asHex), - recipient: asHex(recipientAddress.toField()), - sender: deployerAddress, - }, - messages: { - l2ToL1Messages: block.body.txEffects.flatMap(txEffect => txEffect.l2ToL1Msgs).map(asHex), - }, - block: { - // The json formatting in forge is a bit brittle, so we convert Fr to a number in the few values below. - // This should not be a problem for testing as long as the values are not larger than u32. - archive: asHex(block.archive.root), - blobCommitments: Blob.getPrefixedEthBlobCommitments(blobs), - batchedBlobInputs: batchedBlob.getEthBlobEvaluationInputs(), - blockNumber: block.number, - body: `0x${block.body.toBuffer().toString('hex')}`, - header: { - lastArchiveRoot: asHex(block.header.lastArchive.root), - contentCommitment: { - blobsHash: asHex(block.header.contentCommitment.blobsHash), - inHash: asHex(block.header.contentCommitment.inHash), - outHash: asHex(block.header.contentCommitment.outHash), - }, - slotNumber: Number(block.header.globalVariables.slotNumber), - timestamp: Number(block.header.globalVariables.timestamp), - coinbase: asHex(block.header.globalVariables.coinbase, 40), - feeRecipient: asHex(block.header.globalVariables.feeRecipient), - gasFees: { - feePerDaGas: Number(block.header.globalVariables.gasFees.feePerDaGas), - feePerL2Gas: Number(block.header.globalVariables.gasFees.feePerL2Gas), - }, - totalManaUsed: block.header.totalManaUsed.toNumber(), - }, - headerHash: asHex(block.header.toPropose().hash()), - numTxs: block.body.txEffects.length, - }, - }; - - const output = JSON.stringify(jsonObject, null, 2); - await writeFile(path, output, 'utf8'); - }; - const buildBlock = async (globalVariables: GlobalVariables, txs: ProcessedTx[], l1ToL2Messages: Fr[]) => { await worldStateSynchronizer.syncImmediate(); - const tempFork = await worldStateSynchronizer.fork(); + const tempFork = await worldStateSynchronizer.fork(globalVariables.blockNumber - 1); const block = await buildBlockWithCleanDB(txs, globalVariables, l1ToL2Messages, tempFork); await tempFork.close(); return block; }; + const buildSingleBlock = async (opts: { l1ToL2Messages?: Fr[]; blockNumber?: number } = {}) => { + const l1ToL2Messages = opts.l1ToL2Messages ?? new Array(NUMBER_OF_L1_L2_MESSAGES_PER_ROLLUP).fill(Fr.ZERO); + + const txs = await Promise.all([makeProcessedTx(0x1000), makeProcessedTx(0x2000)]); + const ts = (await l1Client.getBlock()).timestamp; + const slot = await rollup.getSlotAt(ts + BigInt(config.ethereumSlotDuration)); + const timestamp = await rollup.getTimestampForSlot(slot); + const globalVariables = new GlobalVariables( + new Fr(chainId), + new Fr(version), + opts.blockNumber ?? 1, + new Fr(slot), + timestamp, + coinbase, + feeRecipient, + new GasFees(0, await rollup.getManaBaseFeeAt(timestamp, true)), + ); + const block = await buildBlock(globalVariables, txs, l1ToL2Messages); + blockSource.getL1ToL2Messages.mockResolvedValueOnce(l1ToL2Messages); + return block; + }; + describe('block building', () => { + beforeEach(async () => { + await setup(); + }); + const buildL2ToL1MsgTreeRoot = (l2ToL1MsgsArray: Fr[]) => { const treeHeight = Math.ceil(Math.log2(l2ToL1MsgsArray.length)); const tree = new StandardTree( @@ -533,32 +507,113 @@ describe('L1Publisher integration', () => { ); }); - describe('error handling', () => { - const buildSingleBlock = async (opts: { l1ToL2Messages?: Fr[] } = {}) => { - const archiveInRollup = await rollup.archive(); - expect(hexToBuffer(archiveInRollup.toString())).toEqual(new Fr(GENESIS_ARCHIVE_ROOT).toBuffer()); - - const l1ToL2Messages = opts.l1ToL2Messages ?? new Array(NUMBER_OF_L1_L2_MESSAGES_PER_ROLLUP).fill(Fr.ZERO); - - const txs = await Promise.all([makeProcessedTx(0x1000), makeProcessedTx(0x2000)]); - const ts = (await l1Client.getBlock()).timestamp; - const slot = await rollup.getSlotAt(ts + BigInt(config.ethereumSlotDuration)); - const timestamp = await rollup.getTimestampForSlot(slot); - const globalVariables = new GlobalVariables( - new Fr(chainId), - new Fr(version), - 1, // block number - new Fr(slot), - timestamp, - coinbase, - feeRecipient, - new GasFees(0, await rollup.getManaBaseFeeAt(timestamp, true)), - ); - const block = await buildBlock(globalVariables, txs, l1ToL2Messages); - blockSource.getL1ToL2Messages.mockResolvedValueOnce(l1ToL2Messages); - return block; + describe('with attestations', () => { + beforeEach(async () => { + validators = [new Secp256k1Signer(Buffer32.fromString(sequencerPK)), ...times(2, Secp256k1Signer.random)]; + await setup({ + aztecTargetCommitteeSize: 3, + initialValidators: validators.map(v => v.address).map(address => ({ attester: address, withdrawer: address })), + }); + }); + + const expectPublishBlock = async (block: L2Block, attestations: CommitteeAttestation[]) => { + await publisher.enqueueProposeL2Block(block, attestations); + const result = await publisher.sendRequests(); + expect(result!.successfulActions).toEqual(['propose']); + expect(result!.failedActions).toEqual([]); }; + it('publishes a block with attestations', async () => { + const block = await buildSingleBlock(); + + const blockAttestations = validators.map(v => makeBlockAttestationFromBlock(block, v)); + const attestations = orderAttestations(blockAttestations, committee!); + + const canPropose = await publisher.canProposeAtNextEthBlock(new Fr(GENESIS_ARCHIVE_ROOT), proposer!); + expect(canPropose?.slot).toEqual(block.header.getSlot()); + await publisher.validateBlockHeader(block.header.toPropose()); + + await expectPublishBlock(block, attestations); + }); + + it('fails to publish a block without the proposer attestation', async () => { + const block = await buildSingleBlock(); + const blockAttestations = validators.map(v => makeBlockAttestationFromBlock(block, v)); + + // Reverse attestations to break proposer attestation + const attestations = orderAttestations(blockAttestations, committee!).reverse(); + + const canPropose = await publisher.canProposeAtNextEthBlock(new Fr(GENESIS_ARCHIVE_ROOT), proposer!); + expect(canPropose?.slot).toEqual(block.header.getSlot()); + await publisher.validateBlockHeader(block.header.toPropose()); + + await expect(publisher.enqueueProposeL2Block(block, attestations)).rejects.toThrow( + /ValidatorSelection__InvalidCommitteeCommitment/, + ); + }); + + it('publishes a block invalidating the previous one', async () => { + const badBlock = await buildSingleBlock(); + + // Publish the first invalid block + const badBlockAttestations = validators + .filter(v => v.address.equals(proposer!)) + .map(v => makeBlockAttestationFromBlock(badBlock, v)); + const badAttestations = orderAttestations(badBlockAttestations, committee!); + + await expectPublishBlock(badBlock, badAttestations); + await progressTimeBySlot(); + + logger.warn(`Published bad block ${badBlock.number} with archive root ${badBlock.archive.root}`); + + // Update the current proposer + ({ currentProposer: proposer } = await epochCache.getProposerAttesterAddressInCurrentOrNextSlot()); + + // Prepare for invalidating the previous one and publish the same block with proper attestations + const block = await buildSingleBlock({ blockNumber: 1 }); + expect(block.number).toEqual(badBlock.number); + const blockAttestations = validators.map(v => makeBlockAttestationFromBlock(block, v)); + const attestations = orderAttestations(blockAttestations, committee!); + + // Check we can invalidate the block + logger.warn('Checking simulate invalidate block'); + const invalidateRequest = await publisher.simulateInvalidateBlock({ + valid: false, + committee: committee!, + block: new PublishedL2Block(block, {} as L1PublishedData, badAttestations), + reason: 'insufficient-attestations', + }); + expect(invalidateRequest).toBeDefined(); + const forcePendingBlockNumber = invalidateRequest?.forcePendingBlockNumber; + expect(forcePendingBlockNumber).toEqual(0); + + // We cannot propose directly, we need to assume the previous block is invalidated + const genesis = new Fr(GENESIS_ARCHIVE_ROOT); + logger.warn(`Checking can propose at next eth block on top of genesis ${genesis}`); + expect(await publisher.canProposeAtNextEthBlock(genesis, proposer!)).toBeUndefined(); + const canPropose = await publisher.canProposeAtNextEthBlock(genesis, proposer!, { forcePendingBlockNumber }); + expect(canPropose?.slot).toEqual(block.header.getSlot()); + + // Same for validation + logger.warn('Checking validate block header'); + await expect(publisher.validateBlockHeader(block.header.toPropose())).rejects.toThrow(/Rollup__InvalidArchive/); + await publisher.validateBlockHeader(block.header.toPropose(), { forcePendingBlockNumber }); + + // Invalidate and propose + logger.warn('Enqueuing requests to invalidate and propose the block'); + publisher.enqueueInvalidateBlock(invalidateRequest); + await publisher.enqueueProposeL2Block(block, attestations, undefined, { forcePendingBlockNumber }); + const result = await publisher.sendRequests(); + expect(result!.successfulActions).toEqual(['invalidate-by-insufficient-attestations', 'propose']); + expect(result!.failedActions).toEqual([]); + }); + }); + + describe('error handling', () => { + beforeEach(async () => { + await setup(); + }); + it(`succeeds proposing new block when vote fails`, async () => { const block = await buildSingleBlock(); publisher.registerSlashPayloadGetter(() => Promise.resolve(EthAddress.random())); @@ -591,7 +646,7 @@ describe('L1Publisher integration', () => { expect(loggerErrorSpy).toHaveBeenNthCalledWith( 2, expect.stringMatching('Rollup__InvalidInHash'), - undefined, + expect.anything(), expect.objectContaining({ blockNumber: 1 }), ); }); diff --git a/yarn-project/end-to-end/src/integration_l1_publisher/write_json.ts b/yarn-project/end-to-end/src/integration_l1_publisher/write_json.ts new file mode 100644 index 000000000000..fddea9438043 --- /dev/null +++ b/yarn-project/end-to-end/src/integration_l1_publisher/write_json.ts @@ -0,0 +1,74 @@ +import { AztecAddress, Fr, type L2Block } from '@aztec/aztec.js'; +import { BatchedBlob, Blob } from '@aztec/blob-lib'; +import { EthAddress } from '@aztec/foundation/eth-address'; + +import { writeFile } from 'fs/promises'; + +const AZTEC_GENERATE_TEST_DATA = !!process.env.AZTEC_GENERATE_TEST_DATA; + +/** + * Creates a json object that can be used to test the solidity contract. + * The json object must be put into + */ +export async function writeJson( + fileName: string, + block: L2Block, + l1ToL2Content: Fr[], + blobs: Blob[], + batchedBlob: BatchedBlob, + recipientAddress: AztecAddress, + deployerAddress: `0x${string}`, +): Promise { + if (!AZTEC_GENERATE_TEST_DATA) { + return; + } + // Path relative to the package.json in the end-to-end folder + const path = `../../l1-contracts/test/fixtures/${fileName}.json`; + + const asHex = (value: Fr | Buffer | EthAddress | AztecAddress, size = 64) => { + const buffer = Buffer.isBuffer(value) ? value : value.toBuffer(); + return `0x${buffer.toString('hex').padStart(size, '0')}`; + }; + + const jsonObject = { + populate: { + l1ToL2Content: l1ToL2Content.map(asHex), + recipient: asHex(recipientAddress.toField()), + sender: deployerAddress, + }, + messages: { + l2ToL1Messages: block.body.txEffects.flatMap(txEffect => txEffect.l2ToL1Msgs).map(asHex), + }, + block: { + // The json formatting in forge is a bit brittle, so we convert Fr to a number in the few values below. + // This should not be a problem for testing as long as the values are not larger than u32. + archive: asHex(block.archive.root), + blobCommitments: Blob.getPrefixedEthBlobCommitments(blobs), + batchedBlobInputs: batchedBlob.getEthBlobEvaluationInputs(), + blockNumber: block.number, + body: `0x${block.body.toBuffer().toString('hex')}`, + header: { + lastArchiveRoot: asHex(block.header.lastArchive.root), + contentCommitment: { + blobsHash: asHex(block.header.contentCommitment.blobsHash), + inHash: asHex(block.header.contentCommitment.inHash), + outHash: asHex(block.header.contentCommitment.outHash), + }, + slotNumber: Number(block.header.globalVariables.slotNumber), + timestamp: Number(block.header.globalVariables.timestamp), + coinbase: asHex(block.header.globalVariables.coinbase, 40), + feeRecipient: asHex(block.header.globalVariables.feeRecipient), + gasFees: { + feePerDaGas: Number(block.header.globalVariables.gasFees.feePerDaGas), + feePerL2Gas: Number(block.header.globalVariables.gasFees.feePerL2Gas), + }, + totalManaUsed: block.header.totalManaUsed.toNumber(), + }, + headerHash: asHex(block.header.toPropose().hash()), + numTxs: block.body.txEffects.length, + }, + }; + + const output = JSON.stringify(jsonObject, null, 2); + await writeFile(path, output, 'utf8'); +} diff --git a/yarn-project/ethereum/src/contracts/rollup.ts b/yarn-project/ethereum/src/contracts/rollup.ts index 062b2eb68440..db025d40c54e 100644 --- a/yarn-project/ethereum/src/contracts/rollup.ts +++ b/yarn-project/ethereum/src/contracts/rollup.ts @@ -563,7 +563,7 @@ export class RollupContract { } /** Creates a request to Rollup#invalidateBadAttestation to be simulated or sent */ - public getInvalidateBadAttestationRequest( + public buildInvalidateBadAttestationRequest( blockNumber: number, attestations: ViemCommitteeAttestation[], committee: EthAddress[], @@ -585,7 +585,7 @@ export class RollupContract { } /** Creates a request to Rollup#invalidateInsufficientAttestations to be simulated or sent */ - public getInvalidateInsufficientAttestationsRequest( + public buildInvalidateInsufficientAttestationsRequest( blockNumber: number, attestations: ViemCommitteeAttestation[], committee: EthAddress[], diff --git a/yarn-project/sequencer-client/src/publisher/sequencer-publisher.ts b/yarn-project/sequencer-client/src/publisher/sequencer-publisher.ts index 98afd75a6b8c..13837190edd2 100644 --- a/yarn-project/sequencer-client/src/publisher/sequencer-publisher.ts +++ b/yarn-project/sequencer-client/src/publisher/sequencer-publisher.ts @@ -66,11 +66,11 @@ export enum SignalType { type GetSlashPayloadCallBack = (slotNumber: bigint) => Promise; const Actions = [ - 'invalidate-by-invalid-attestation', - 'invalidate-by-insufficient-attestations', 'propose', 'governance-signal', 'slashing-signal', + 'invalidate-by-invalid-attestation', + 'invalidate-by-insufficient-attestations', ] as const; export type Action = (typeof Actions)[number]; @@ -382,7 +382,7 @@ export class SequencerPublisher { return undefined; } - const request = this.getInvalidateBlockRequest(validationResult); + const request = this.buildInvalidateBlockRequest(validationResult); const { reason, block } = validationResult; const blockNumber = block.block.number; const logData = { ...block.block.toBlockInfo(), reason }; @@ -425,7 +425,7 @@ export class SequencerPublisher { } } - private getInvalidateBlockRequest(validationResult: ValidateBlockResult) { + private buildInvalidateBlockRequest(validationResult: ValidateBlockResult) { if (validationResult.valid) { throw new Error('Cannot invalidate a valid block'); } @@ -435,14 +435,14 @@ export class SequencerPublisher { this.log.debug(`Simulating invalidate block ${block.block.number}`, logData); if (reason === 'invalid-attestation') { - return this.rollupContract.getInvalidateBadAttestationRequest( + return this.rollupContract.buildInvalidateBadAttestationRequest( block.block.number, block.attestations.map(a => a.toViem()), committee, validationResult.invalidIndex, ); } else if (reason === 'insufficient-attestations') { - return this.rollupContract.getInvalidateInsufficientAttestationsRequest( + return this.rollupContract.buildInvalidateInsufficientAttestationsRequest( block.block.number, block.attestations.map(a => a.toViem()), committee, @@ -678,6 +678,7 @@ export class SequencerPublisher { // By simulation issue, I mean the fact that the block.timestamp is equal to the last block, not the next, which // make time consistency checks break. const attestationData = { digest: digest.toBuffer(), attestations: attestations ?? [] }; + // TODO(palla): Check whether we're validating twice, once here and once within addProposeTx, since we call simulateProposeTx in both places. ts = await this.validateBlockForSubmission(block, attestationData, opts); } catch (err: any) { this.log.error(`Block validation failed. ${err instanceof Error ? err.message : 'No error message'}`, err, { @@ -832,7 +833,7 @@ export class SequencerPublisher { // override the pending block number if requested const forcePendingBlockNumberStateDiff = ( - options.forcePendingBlockNumber + options.forcePendingBlockNumber !== undefined ? await this.rollupContract.makePendingBlockNumberOverride(options.forcePendingBlockNumber) : [] ).flatMap(override => override.stateDiff ?? []); @@ -942,7 +943,7 @@ export class SequencerPublisher { return true; } else { this.metrics.recordFailedTx('process'); - this.log.error(`Rollup process tx failed. ${errorMsg ?? 'No error message'}`, undefined, { + this.log.error(`Rollup process tx failed: ${errorMsg ?? 'no error message'}`, undefined, { ...block.getStats(), receipt, txHash: receipt.transactionHash, diff --git a/yarn-project/sequencer-client/src/sequencer/sequencer.test.ts b/yarn-project/sequencer-client/src/sequencer/sequencer.test.ts index a6667ce62901..85e8007367a1 100644 --- a/yarn-project/sequencer-client/src/sequencer/sequencer.test.ts +++ b/yarn-project/sequencer-client/src/sequencer/sequencer.test.ts @@ -236,6 +236,8 @@ describe('sequencer', () => { getBlockNumber: mockFn().mockResolvedValue(lastBlockNumber), getL2Tips: mockFn().mockResolvedValue({ latest: { number: lastBlockNumber, hash } }), getL1Timestamp: mockFn().mockResolvedValue(1000n), + isPendingChainInvalid: mockFn().mockResolvedValue(false), + getPendingChainValidationStatus: mockFn().mockResolvedValue({ valid: true }), }); l1ToL2MessageSource = mock({ diff --git a/yarn-project/stdlib/src/interfaces/archiver.test.ts b/yarn-project/stdlib/src/interfaces/archiver.test.ts index 6089516f52a7..f75917a3332f 100644 --- a/yarn-project/stdlib/src/interfaces/archiver.test.ts +++ b/yarn-project/stdlib/src/interfaces/archiver.test.ts @@ -245,6 +245,16 @@ describe('ArchiverApiSchema', () => { const result = await context.client.getL1Timestamp(); expect(result).toBe(1n); }); + + it('getPendingChainValidationStatus', async () => { + const result = await context.client.getPendingChainValidationStatus(); + expect(result).toEqual({ valid: true }); + }); + + it('isPendingChainInvalid', async () => { + const result = await context.client.isPendingChainInvalid(); + expect(result).toBe(false); + }); }); class MockArchiver implements ArchiverApi { From e172b602305d37e1413e6d103d9048dc5348584c Mon Sep 17 00:00:00 2001 From: Santiago Palladino Date: Tue, 29 Jul 2025 15:36:06 -0300 Subject: [PATCH 3/5] Add e2e test --- .../epochs_invalidate_block.test.ts | 152 ++++++++++++++++++ yarn-project/sequencer-client/src/config.ts | 4 + .../src/sequencer/sequencer.ts | 17 +- .../src/sequencer/timetable.ts | 13 +- yarn-project/stdlib/src/interfaces/configs.ts | 3 + .../validator-client/src/validator.ts | 13 +- 6 files changed, 188 insertions(+), 14 deletions(-) create mode 100644 yarn-project/end-to-end/src/e2e_epochs/epochs_invalidate_block.test.ts diff --git a/yarn-project/end-to-end/src/e2e_epochs/epochs_invalidate_block.test.ts b/yarn-project/end-to-end/src/e2e_epochs/epochs_invalidate_block.test.ts new file mode 100644 index 000000000000..0855389de3b4 --- /dev/null +++ b/yarn-project/end-to-end/src/e2e_epochs/epochs_invalidate_block.test.ts @@ -0,0 +1,152 @@ +import type { AztecNodeService } from '@aztec/aztec-node'; +import { type Logger, retryUntil } from '@aztec/aztec.js'; +import { type ExtendedViemWalletClient, type Operator, RollupContract } from '@aztec/ethereum'; +import { asyncMap } from '@aztec/foundation/async-map'; +import { times } from '@aztec/foundation/collection'; +import { EthAddress } from '@aztec/foundation/eth-address'; +import { bufferToHex } from '@aztec/foundation/string'; +import { RollupAbi } from '@aztec/l1-artifacts'; +import type { SpamContract } from '@aztec/noir-test-contracts.js/Spam'; + +import { jest } from '@jest/globals'; +import { privateKeyToAccount } from 'viem/accounts'; + +import { type EndToEndContext, getPrivateKeyFromIndex } from '../fixtures/utils.js'; +import { EpochsTestContext } from './epochs_test.js'; + +jest.setTimeout(1000 * 60 * 10); + +const NODE_COUNT = 3; +const VALIDATOR_COUNT = 3; + +// This test validates the scenario where: +// 1. A sequencer posts a block without all necessary attestations +// 2. The next proposer sees the invalid block and invalidates it as part of publishing a new block +// 3. All nodes sync the block with correct attestations +describe('e2e_epochs/epochs_invalidate_block', () => { + let context: EndToEndContext; + let logger: Logger; + let l1Client: ExtendedViemWalletClient; + let rollupContract: RollupContract; + + let test: EpochsTestContext; + let validators: (Operator & { privateKey: `0x${string}` })[]; + let nodes: AztecNodeService[]; + let contract: SpamContract; + + beforeEach(async () => { + validators = times(VALIDATOR_COUNT, i => { + const privateKey = bufferToHex(getPrivateKeyFromIndex(i + 3)!); + const attester = EthAddress.fromString(privateKeyToAccount(privateKey).address); + return { attester, withdrawer: attester, privateKey }; + }); + + // Setup context with the given set of validators, mocked gossip sub network, and no anvil test watcher. + test = await EpochsTestContext.setup({ + numberOfAccounts: 1, + initialValidators: validators, + mockGossipSubNetwork: true, + disableAnvilTestWatcher: true, + aztecProofSubmissionEpochs: 1024, + startProverNode: false, + aztecTargetCommitteeSize: VALIDATOR_COUNT, + }); + + ({ context, logger, l1Client } = test); + rollupContract = new RollupContract(l1Client, test.rollup.address); + + // Halt block building in initial aztec node + logger.warn(`Stopping sequencer in initial aztec node.`); + await context.sequencer!.stop(); + + // Start the validator nodes + logger.warn(`Initial setup complete. Starting ${NODE_COUNT} validator nodes.`); + const validatorNodes = validators.slice(0, NODE_COUNT); + nodes = await asyncMap(validatorNodes, ({ privateKey }) => + test.createValidatorNode([privateKey], { dontStartSequencer: true, minTxsPerBlock: 1, maxTxsPerBlock: 1 }), + ); + logger.warn(`Started ${NODE_COUNT} validator nodes.`, { validators: validatorNodes.map(v => v.attester) }); + + // Register spam contract for sending txs. + contract = await test.registerSpamContract(context.wallet); + logger.warn(`Test setup completed.`, { validators: validators.map(v => v.attester.toString()) }); + }); + + afterEach(async () => { + jest.restoreAllMocks(); + await test.teardown(); + }); + + it('invalidates a block published without sufficient attestations', async () => { + const sequencers = nodes.map(node => node.getSequencer()!); + const initialBlockNumber = await nodes[0].getBlockNumber(); + + // Configure all sequencers to skip collecting attestations before starting + logger.warn('Configuring all sequencers to skip attestation collection'); + sequencers.forEach(sequencer => { + sequencer.updateSequencerConfig({ skipCollectingAttestations: true }); + }); + + // Send a transaction so the sequencer builds a block + logger.warn('Sending transaction to trigger block building'); + const sentTx = contract.methods.spam(1, 1n, false).send(); + + // Disable skipCollectingAttestations after the first block is mined + test.monitor.once('l2-block', ({ l2BlockNumber }) => { + logger.warn(`Disabling skipCollectingAttestations after L2 block ${l2BlockNumber} has been mined`); + sequencers.forEach(sequencer => { + sequencer.updateSequencerConfig({ skipCollectingAttestations: false }); + }); + }); + + // Start all sequencers + await Promise.all(sequencers.map(s => s.start())); + logger.warn(`Started all sequencers with skipCollectingAttestations=true`); + + // Create a filter for BlockInvalidated events + const blockInvalidatedFilter = await l1Client.createContractEventFilter({ + address: rollupContract.address, + abi: RollupAbi, + eventName: 'BlockInvalidated', + fromBlock: 1n, + toBlock: 'latest', + }); + + // The next proposer should invalidate the previous block and publish a new one + logger.warn('Waiting for next proposer to invalidate the previous block'); + + // Wait for the BlockInvalidated event + const blockInvalidatedEvents = await retryUntil( + async () => { + const events = await l1Client.getFilterLogs({ filter: blockInvalidatedFilter }); + return events.length > 0 ? events : undefined; + }, + 'BlockInvalidated event', + test.L2_SLOT_DURATION_IN_S * 5, + 0.1, + ); + + // Verify the BlockInvalidated event was emitted + const [event] = blockInvalidatedEvents; + logger.warn(`BlockInvalidated event emitted`, { event }); + expect(event.args.blockNumber).toBeGreaterThan(initialBlockNumber); + + // Wait for all nodes to sync the new block + logger.warn('Waiting for all nodes to sync'); + await retryUntil( + async () => { + const blockNumbers = await Promise.all(nodes.map(node => node.getBlockNumber())); + logger.info(`Node synced block numbers: ${blockNumbers.join(', ')}`); + return blockNumbers.every(bn => bn > initialBlockNumber); + }, + 'Node sync check', + test.L2_SLOT_DURATION_IN_S * 5, + 0.5, + ); + + // Verify the transaction was eventually included + const receipt = await sentTx.wait({ timeout: 30 }); + expect(receipt.status).toBe('success'); + logger.warn(`Transaction included in block ${receipt.blockNumber}`); + }); +}); diff --git a/yarn-project/sequencer-client/src/config.ts b/yarn-project/sequencer-client/src/config.ts index 5140fc284ea2..034cc3072b01 100644 --- a/yarn-project/sequencer-client/src/config.ts +++ b/yarn-project/sequencer-client/src/config.ts @@ -120,6 +120,10 @@ export const sequencerConfigMappings: ConfigMappingsType = { fakeProcessingDelayPerTxMs: { description: 'Used for testing to introduce a fake delay after processing each tx', }, + skipCollectingAttestations: { + description: 'Whether to skip collecting attestations from validators and only use self-attestations.', + ...booleanConfigHelper(false), + }, ...pickConfigMappings(p2pConfigMappings, ['txPublicSetupAllowList']), }; diff --git a/yarn-project/sequencer-client/src/sequencer/sequencer.ts b/yarn-project/sequencer-client/src/sequencer/sequencer.ts index 6afa31ded655..f05fe3fd77be 100644 --- a/yarn-project/sequencer-client/src/sequencer/sequencer.ts +++ b/yarn-project/sequencer-client/src/sequencer/sequencer.ts @@ -144,6 +144,9 @@ export class Sequencer extends (EventEmitter as new () => TypedEventEmitter TypedEventEmitter TypedEventEmitter TypedEventEmitter TypedEventEmitter; diff --git a/yarn-project/validator-client/src/validator.ts b/yarn-project/validator-client/src/validator.ts index 1697e40be3c8..17bf6195fa07 100644 --- a/yarn-project/validator-client/src/validator.ts +++ b/yarn-project/validator-client/src/validator.ts @@ -520,6 +520,13 @@ export class ValidatorClient extends (EventEmitter as new () => WatcherEmitter) await this.p2pClient.broadcastProposal(proposal); } + async collectOwnAttestations(proposal: BlockProposal): Promise { + const slot = proposal.payload.header.slotNumber.toBigInt(); + const inCommittee = await this.epochCache.filterInCommittee(slot, this.keyStore.getAddresses()); + this.log.debug(`Collecting ${inCommittee.length} self-attestations for slot ${slot}`, { inCommittee }); + return this.doAttestToProposal(proposal, inCommittee); + } + async collectAttestations(proposal: BlockProposal, required: number, deadline: Date): Promise { // Wait and poll the p2pClient's attestation pool for this block until we have enough attestations const slot = proposal.payload.header.slotNumber.toBigInt(); @@ -532,11 +539,9 @@ export class ValidatorClient extends (EventEmitter as new () => WatcherEmitter) throw new AttestationTimeoutError(0, required, slot); } - const proposalId = proposal.archive.toString(); - // adds attestations for all of my addresses locally - const inCommittee = await this.epochCache.filterInCommittee(slot, this.keyStore.getAddresses()); - await this.doAttestToProposal(proposal, inCommittee); + await this.collectOwnAttestations(proposal); + const proposalId = proposal.archive.toString(); const myAddresses = this.keyStore.getAddresses(); let attestations: BlockAttestation[] = []; From 3a1d6563b33193226834731605fa167c02cb76d4 Mon Sep 17 00:00:00 2001 From: Santiago Palladino Date: Thu, 31 Jul 2025 10:11:44 -0300 Subject: [PATCH 4/5] Fix archiver --- yarn-project/archiver/src/archiver/archiver.ts | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/yarn-project/archiver/src/archiver/archiver.ts b/yarn-project/archiver/src/archiver/archiver.ts index 3c9ff89b3354..eeec0ba83894 100644 --- a/yarn-project/archiver/src/archiver/archiver.ts +++ b/yarn-project/archiver/src/archiver/archiver.ts @@ -358,7 +358,7 @@ export class Archiver extends (EventEmitter as new () => ArchiverEmitter) implem // We only do this if rollup cant prune on the next submission. Otherwise we will end up // re-syncing the blocks we have just unwound above. We also dont do this if the last block is invalid, // since the archiver will rightfully refuse to sync up to it. - if (!rollupCanPrune && !rollupStatus.lastBlockValidationResult.valid) { + if (!rollupCanPrune && rollupStatus.lastBlockValidationResult.valid) { await this.checkForNewBlocksBeforeL1SyncPoint(rollupStatus, blocksSynchedTo, currentL1BlockNumber); } From 7269059b03c2b2c56ad6c48a433dfeb1e71f979d Mon Sep 17 00:00:00 2001 From: Santiago Palladino Date: Thu, 31 Jul 2025 11:27:49 -0300 Subject: [PATCH 5/5] Fix unit test --- yarn-project/validator-client/src/validator.test.ts | 8 ++------ 1 file changed, 2 insertions(+), 6 deletions(-) diff --git a/yarn-project/validator-client/src/validator.test.ts b/yarn-project/validator-client/src/validator.test.ts index e8457c342557..0e72b2bdd1de 100644 --- a/yarn-project/validator-client/src/validator.test.ts +++ b/yarn-project/validator-client/src/validator.test.ts @@ -52,13 +52,9 @@ describe('ValidatorClient', () => { p2pClient.getAttestationsForSlot.mockImplementation(() => Promise.resolve([])); p2pClient.handleAuthRequestFromPeer.mockResolvedValue(StatusMessage.random()); blockBuilder = mock(); - blockBuilder.getConfig.mockReturnValue({ - l1GenesisTime: 1n, - slotDuration: 24, - l1ChainId: 1, - rollupVersion: 1, - }); + blockBuilder.getConfig.mockReturnValue({ l1GenesisTime: 1n, slotDuration: 24, l1ChainId: 1, rollupVersion: 1 }); epochCache = mock(); + epochCache.filterInCommittee.mockImplementation((_slot, addresses) => Promise.resolve(addresses)); blockSource = mock(); l1ToL2MessageSource = mock(); txProvider = mock();