From f62a635522c0a93a61ab13d974c5159d9745a589 Mon Sep 17 00:00:00 2001 From: owen Date: Wed, 11 Mar 2026 01:04:58 +0000 Subject: [PATCH 1/3] mv sim to tile and add ssz support to simulator --- Cargo.lock | 20 +- crates/common/src/api/builder_api.rs | 4 +- crates/common/src/config.rs | 6 + crates/common/src/simulator.rs | 107 +++- crates/relay/src/auctioneer/context.rs | 41 +- crates/relay/src/auctioneer/get_header.rs | 13 +- crates/relay/src/auctioneer/mod.rs | 60 +- .../relay/src/auctioneer/simulator/manager.rs | 369 ------------ crates/relay/src/auctioneer/simulator/mod.rs | 144 ----- crates/relay/src/auctioneer/submit_block.rs | 84 ++- crates/relay/src/auctioneer/types.rs | 12 +- crates/relay/src/bid_decoder/tile.rs | 40 +- crates/relay/src/lib.rs | 8 +- crates/relay/src/main.rs | 27 +- .../src/{auctioneer => }/simulator/client.rs | 133 ++-- crates/relay/src/simulator/mod.rs | 97 +++ crates/relay/src/simulator/tile.rs | 567 ++++++++++++++++++ crates/relay/src/spine/messages.rs | 23 + crates/relay/src/spine/mod.rs | 8 + crates/simulator/Cargo.toml | 2 + crates/simulator/src/main.rs | 10 + crates/simulator/src/ssz_server.rs | 63 ++ crates/simulator/src/validation/mod.rs | 14 +- 23 files changed, 1181 insertions(+), 671 deletions(-) delete mode 100644 crates/relay/src/auctioneer/simulator/manager.rs delete mode 100644 crates/relay/src/auctioneer/simulator/mod.rs rename crates/relay/src/{auctioneer => }/simulator/client.rs (66%) create mode 100644 crates/relay/src/simulator/mod.rs create mode 100644 crates/relay/src/simulator/tile.rs create mode 100644 crates/simulator/src/ssz_server.rs diff --git a/Cargo.lock b/Cargo.lock index 64ccd6bab..f252b5289 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -944,7 +944,7 @@ version = "1.1.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "40c48f72fd53cd289104fc64099abca73db4166ad86ea0b4341abe65af83dadc" dependencies = [ - "windows-sys 0.60.2", + "windows-sys 0.61.2", ] [[package]] @@ -955,7 +955,7 @@ checksum = "291e6a250ff86cd4a820112fb8898808a366d8f9f58ce16d1f538353ad55747d" dependencies = [ "anstyle", "once_cell_polyfill", - "windows-sys 0.60.2", + "windows-sys 0.61.2", ] [[package]] @@ -3540,7 +3540,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "8d162beedaa69905488a8da94f5ac3edb4dd4788b732fadb7bd120b2625c1976" dependencies = [ "data-encoding", - "syn 1.0.109", + "syn 2.0.111", ] [[package]] @@ -3828,7 +3828,7 @@ dependencies = [ "libc", "option-ext", "redox_users 0.5.2", - "windows-sys 0.59.0", + "windows-sys 0.61.2", ] [[package]] @@ -4261,7 +4261,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "39cab71617ae0d63f51a36d69f866391735b51691dbda63cf6f96d042b63efeb" dependencies = [ "libc", - "windows-sys 0.52.0", + "windows-sys 0.61.2", ] [[package]] @@ -5731,10 +5731,12 @@ dependencies = [ "alloy-signer-local", "alloy-sol-types", "async-trait", + "axum 0.8.7", "bytes", "clap", "dashmap 5.5.3", "derive_more 2.1.0", + "ethereum_ssz", "futures", "helix-common", "helix-types", @@ -7675,7 +7677,7 @@ version = "0.50.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "7957b9740744892f114936ab4a57b3f487491bbeafaf8083688b16841a4240e5" dependencies = [ - "windows-sys 0.59.0", + "windows-sys 0.61.2", ] [[package]] @@ -12496,7 +12498,7 @@ dependencies = [ "errno", "libc", "linux-raw-sys 0.11.0", - "windows-sys 0.52.0", + "windows-sys 0.61.2", ] [[package]] @@ -13821,7 +13823,7 @@ dependencies = [ "getrandom 0.3.4", "once_cell", "rustix 1.1.2", - "windows-sys 0.52.0", + "windows-sys 0.61.2", ] [[package]] @@ -15343,7 +15345,7 @@ version = "0.1.11" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c2a7b1c03c876122aa43f3020e6c3c3ee5c05081c9a00739faf7503aeba10d22" dependencies = [ - "windows-sys 0.52.0", + "windows-sys 0.61.2", ] [[package]] diff --git a/crates/common/src/api/builder_api.rs b/crates/common/src/api/builder_api.rs index 0918c9505..97f480770 100644 --- a/crates/common/src/api/builder_api.rs +++ b/crates/common/src/api/builder_api.rs @@ -81,13 +81,13 @@ impl<'a> From<&'a InclusionListWithKey> for (&'a InclusionListWithMetadata, &'a } } -#[derive(Clone, Debug, Deserialize, Serialize, PartialEq, Eq)] +#[derive(Clone, Debug, Deserialize, Serialize, PartialEq, Eq, Encode, Decode)] pub struct InclusionListTxWithMetadata { pub hash: B256, pub bytes: Transaction, } -#[derive(Clone, Debug, Deserialize, Serialize)] +#[derive(Clone, Debug, Deserialize, Serialize, PartialEq, Eq, Encode, Decode)] pub struct InclusionListWithMetadata { pub txs: Vec, } diff --git a/crates/common/src/config.rs b/crates/common/src/config.rs index 6b06ea160..8b043ec58 100644 --- a/crates/common/src/config.rs +++ b/crates/common/src/config.rs @@ -101,6 +101,7 @@ impl RelayConfig { reg_workers: vec![], tcp_bid_submissions_tile: 2, decoder: vec![4], + simulator: 5, }, gossip_payload_on_header: false, api_port: 4040, @@ -151,6 +152,8 @@ pub struct CoresConfig { pub reg_workers: Vec, pub tcp_bid_submissions_tile: usize, pub decoder: Vec, + #[serde(default)] + pub simulator: usize, } impl Default for WebsiteConfig { @@ -258,6 +261,9 @@ pub struct SimulatorConfig { /// roughly number of cores on simulator #[serde(default = "default_usize::<32>")] pub max_concurrent_tasks: usize, + /// If set, use the SSZ binary endpoint at this URL instead of JSON-RPC + #[serde(default)] + pub ssz_url: Option, } fn default_namespace() -> String { diff --git a/crates/common/src/simulator.rs b/crates/common/src/simulator.rs index 24854a90b..b61abb952 100644 --- a/crates/common/src/simulator.rs +++ b/crates/common/src/simulator.rs @@ -1,6 +1,59 @@ -use alloy_primitives::B256; +use std::sync::Arc; + +use alloy_primitives::{B256, Bytes}; +use helix_types::{ + BidTrace, BlobsBundle, BlsSignatureBytes, ExecutionPayload, ExecutionRequests, + SignedBidSubmission, +}; +use ssz_derive::{Decode, Encode}; use thiserror::Error; +use crate::{ValidatorPreferences, api::builder_api::InclusionListWithMetadata}; + +/// Wire format of `signed_bid_submission` in `SimRequest`. +#[derive(Debug, Clone, Copy, PartialEq, Eq, Default)] +#[repr(u8)] +pub enum SubmissionFormat { + /// Uncompressed SSZ `SignedBidSubmission` (current default). + #[default] + FullSsz = 0, + /// Uncompressed SSZ `DehydratedBidSubmission`. + DehydratedSsz = 1, +} + +impl ssz::Encode for SubmissionFormat { + fn is_ssz_fixed_len() -> bool { + true + } + fn ssz_fixed_len() -> usize { + 1 + } + fn ssz_bytes_len(&self) -> usize { + 1 + } + fn ssz_append(&self, buf: &mut Vec) { + buf.push(*self as u8); + } +} + +impl ssz::Decode for SubmissionFormat { + fn is_ssz_fixed_len() -> bool { + true + } + fn ssz_fixed_len() -> usize { + 1 + } + fn from_ssz_bytes(bytes: &[u8]) -> Result { + match bytes { + [0] => Ok(Self::FullSsz), + [1] => Ok(Self::DehydratedSsz), + _ => { + Err(ssz::DecodeError::BytesInvalid(format!("unknown SubmissionFormat: {bytes:?}"))) + } + } + } +} + const UNKNOWN_ANCESTOR: &str = "unknown ancestor"; const PARENT_NOT_FOUND: &str = "parent block not found"; const MISSING_TRIE_NODE: &str = "missing trie node"; @@ -31,6 +84,9 @@ pub enum BlockSimError { #[error("simulation dropped")] SimulationDropped, + + #[error("hydration miss: simulator cache does not have required transactions/blobs")] + HydrationMiss, } impl BlockSimError { @@ -74,6 +130,55 @@ impl BlockSimError { } } +#[derive(Debug, Clone, Encode, Decode)] +pub struct SimRequest { + pub apply_blacklist: bool, + pub registered_gas_limit: u64, + pub parent_beacon_block_root: B256, + pub inclusion_list: InclusionListWithMetadata, + pub format: SubmissionFormat, + pub signed_bid_submission: Bytes, +} + +// TODO: refactor this in a SignedBidSubmission + extra fields +#[derive(Debug, Clone, serde::Serialize, serde::Deserialize)] +pub struct BlockSimRequest { + #[serde(with = "serde_utils::quoted_u64")] + pub registered_gas_limit: u64, + pub message: BidTrace, + pub execution_payload: ExecutionPayload, + pub signature: BlsSignatureBytes, + pub proposer_preferences: ValidatorPreferences, + pub blobs_bundle: Option>, + pub execution_requests: Option>, + pub parent_beacon_block_root: Option, + pub inclusion_list: Option, + pub apply_blacklist: bool, +} + +impl BlockSimRequest { + pub fn new( + registered_gas_limit: u64, + block: &SignedBidSubmission, + proposer_preferences: ValidatorPreferences, + parent_beacon_block_root: Option, + inclusion_list: Option, + ) -> Self { + Self { + registered_gas_limit, + message: block.bid_trace().clone(), + execution_payload: block.execution_payload_ref().clone(), + signature: *block.signature(), + apply_blacklist: proposer_preferences.filtering.is_regional(), + proposer_preferences, + blobs_bundle: Some(block.blobs_bundle().clone()), + execution_requests: Some(block.execution_requests_ref().clone()), + parent_beacon_block_root, + inclusion_list, + } + } +} + #[cfg(test)] mod tests { use super::*; diff --git a/crates/relay/src/auctioneer/context.rs b/crates/relay/src/auctioneer/context.rs index 32e4450d4..a80a32ce7 100644 --- a/crates/relay/src/auctioneer/context.rs +++ b/crates/relay/src/auctioneer/context.rs @@ -1,6 +1,9 @@ use std::{ ops::{Deref, DerefMut}, - sync::{Arc, atomic::Ordering}, + sync::{ + Arc, + atomic::{AtomicBool, Ordering}, + }, time::Instant, }; @@ -25,10 +28,13 @@ use crate::{ bid_adjustor::BidAdjustor, bid_sorter::BidSorter, block_merger::BlockMerger, - simulator::manager::{SimulationResult, SimulatorManager}, types::{PayloadEntry, PendingPayload, SubmissionRef}, }, - spine::{HelixSpineProducers, messages::SubmissionResultWithRef}, + simulator::{BlockMergeResponse, SimInboundPayload, tile::SimulationResult}, + spine::{ + HelixSpineProducers, + messages::{SubmissionResultWithRef, ToSimKind, ToSimMsg}, + }, }; // Context that is only valid for a given slot @@ -41,7 +47,6 @@ pub struct SlotContext { pub version: FxHashMap, pub hydration_cache: HydrationCache, pub payloads: FxHashMap, - pub sim_manager: SimulatorManager, pub block_merger: BlockMerger, } @@ -56,6 +61,9 @@ pub struct Context { pub completed_dry_run: bool, pub future_results: Arc>, pub auctioneer_handle: AuctioneerHandle, + pub sim_inbound: Arc>, + pub accept_optimistic: Arc, + pub failsafe_triggered: Arc, } const EXPECTED_PAYLOADS_PER_SLOT: usize = 5000; @@ -67,7 +75,9 @@ impl Context { pub fn new( chain_info: ChainInfo, config: RelayConfig, - sim_manager: SimulatorManager, + sim_inbound: Arc>, + accept_optimistic: Arc, + failsafe_triggered: Arc, db: DbHandle, bid_sorter: BidSorter, cache: LocalCache, @@ -87,7 +97,6 @@ impl Context { let block_merger = BlockMerger::new(0, chain_info.clone(), cache.clone(), config.clone()); let slot_context = SlotContext { - sim_manager, bid_slot: Slot::new(0), pending_payload: None, bid_sorter, @@ -114,6 +123,9 @@ impl Context { completed_dry_run: false, future_results, auctioneer_handle, + sim_inbound, + accept_optimistic, + failsafe_triggered, } } @@ -130,10 +142,7 @@ impl Context { already_sent: bool, producers: &mut HelixSpineProducers, ) { - let (id, result) = result; - - let paused_until = result.as_ref().and_then(|r| r.paused_until); - self.sim_manager.handle_task_response(id, paused_until); + let (_id, result) = result; let Some(result) = result else { return; @@ -171,7 +180,7 @@ impl Context { let reason = err.to_string(); let bid_slot = result.submission.slot(); - let failsafe_triggered = self.sim_manager.failsafe_triggered.clone(); + let failsafe_triggered = self.failsafe_triggered.clone(); self.db.db_demote_builder( bid_slot.as_u64(), @@ -198,7 +207,7 @@ impl Context { } } - pub fn on_new_slot(&mut self, bid_slot: Slot) { + pub fn on_new_slot(&mut self, bid_slot: Slot, producers: &mut HelixSpineProducers) { self.bid_slot = bid_slot; if let Some(pending) = self.pending_payload.take() { let _ = pending @@ -223,7 +232,13 @@ impl Context { self.version.clear(); self.hydration_cache.clear(); - self.sim_manager.on_new_slot(bid_slot.as_u64()); + + producers.produce(ToSimMsg { + kind: ToSimKind::NewSlot, + ix: 0, + bid_slot: bid_slot.as_u64(), + }); + self.block_merger.on_new_slot(bid_slot.as_u64()); self.bid_adjustor.on_new_slot(bid_slot.as_u64()); self.auctioneer_handle.clear_inflight_payloads(); diff --git a/crates/relay/src/auctioneer/get_header.rs b/crates/relay/src/auctioneer/get_header.rs index 3562dc27a..72851f4b4 100644 --- a/crates/relay/src/auctioneer/get_header.rs +++ b/crates/relay/src/auctioneer/get_header.rs @@ -12,6 +12,7 @@ use crate::{ context::Context, types::{GetHeaderResult, SlotData}, }, + spine::HelixSpineProducers, }; impl Context { @@ -20,12 +21,18 @@ impl Context { params: GetHeaderParams, slot_data: &SlotData, res_tx: oneshot::Sender, + producers: &mut HelixSpineProducers, ) { assert_eq!(params.slot, self.bid_slot.as_u64(), "params should already be validated!"); - let _ = res_tx.send(self.get_header(params.parent_hash, slot_data)); + let _ = res_tx.send(self.get_header(params.parent_hash, slot_data, producers)); } - fn get_header(&mut self, parent_hash: B256, slot_data: &SlotData) -> GetHeaderResult { + fn get_header( + &mut self, + parent_hash: B256, + slot_data: &SlotData, + producers: &mut HelixSpineProducers, + ) -> GetHeaderResult { let Some(best_block_hash) = self.bid_sorter.get_header(&parent_hash) else { warn!(%parent_hash, "no bids for this fork"); return Err(ProposerApiError::NoBidPrepared); @@ -50,7 +57,7 @@ impl Context { .with_label_values(&[strategy]) .observe(start.elapsed().as_micros() as f64); - self.store_data_and_sim(sim_request, adjusted_bid.clone(), true); + self.store_data_and_sim(sim_request, adjusted_bid.clone(), true, producers); if is_adjustable_slot { return Ok(adjusted_bid); diff --git a/crates/relay/src/auctioneer/mod.rs b/crates/relay/src/auctioneer/mod.rs index d79be596d..a12d29e1c 100644 --- a/crates/relay/src/auctioneer/mod.rs +++ b/crates/relay/src/auctioneer/mod.rs @@ -5,7 +5,6 @@ mod context; mod get_header; mod get_payload; mod handle; -mod simulator; mod submit_block; mod types; mod validation; @@ -13,7 +12,7 @@ mod worker; use std::{ cmp::Ordering, - sync::Arc, + sync::{Arc, atomic::AtomicBool}, time::{Duration, Instant}, }; @@ -33,7 +32,6 @@ use helix_common::{ use helix_database::handle::DbHandle; use helix_types::Slot; use rustc_hash::FxHashMap; -pub use simulator::*; use tracing::{debug, error, info, trace, warn}; pub use types::{ Event, GetPayloadResultData, PayloadBidData, PayloadEntry, SlotData, Submission, @@ -56,7 +54,21 @@ use crate::{ api::{FutureBidSubmissionResult, builder::error::BuilderApiError, proposer::ProposerApiError}, auctioneer::types::PendingPayload, housekeeper::PayloadAttributesUpdate, - spine::{HelixSpineProducers, messages::DecodedSubmission}, + simulator::{SimInboundPayload, SimOutboundPayload}, + spine::{ + HelixSpineProducers, + messages::{DecodedSubmission, FromSimMsg}, + }, +}; +pub use crate::{ + auctioneer::{ + bid_adjustor::{BidAdjustor, DefaultBidAdjustor}, + bid_sorter::BidSorter, + block_merger::get_mergeable_orders, + context::{Context, send_submission_result}, + types::{InternalBidSubmission, InternalBidSubmissionHeader, SubmissionRef}, + }, + simulator::{SimulatorRequest, SimulatorTile, client::SimulatorClient, *}, }; pub struct Auctioneer { @@ -65,6 +77,7 @@ pub struct Auctioneer { tel: Telemetry, event_rx: crossbeam_channel::Receiver, decoded: Arc>, + sim_outbound: Arc>, } impl Auctioneer { @@ -76,19 +89,22 @@ impl Auctioneer { bid_sorter: BidSorter, local_cache: LocalCache, bid_adjustor: B, - event_tx: crossbeam_channel::Sender, event_rx: crossbeam_channel::Receiver, id: usize, future_results: Arc>, decoded: Arc>, auctioneer_handle: AuctioneerHandle, + sim_inbound: Arc>, + sim_outbound: Arc>, + accept_optimistic: Arc, + failsafe_triggered: Arc, ) -> Self { - let sim_manager = SimulatorManager::new(config.simulators.clone(), event_tx.clone()); - let ctx = Context::new( chain_info, config, - sim_manager, + sim_inbound, + accept_optimistic, + failsafe_triggered, db, bid_sorter, local_cache, @@ -102,6 +118,7 @@ impl Auctioneer { tel: Telemetry::new(format!("auctioneer_{id}")), event_rx, decoded, + sim_outbound, } } } @@ -124,6 +141,20 @@ impl Tile for Auctioneer { } }); + adapter.consume(|msg: FromSimMsg, producers| { + let Some(payload) = self.sim_outbound.get(msg.ix) else { + tracing::error!(?msg, "sim outbound payload not found"); + return; + }; + let event = match payload.as_ref() { + SimOutboundPayload::SimResult(sim_result) => Event::SimResult(sim_result.clone()), + SimOutboundPayload::MergeResult(merge_result) => { + Event::MergeResult(merge_result.clone()) + } + }; + self.state.step(event, &mut self.ctx, &mut self.tel, producers); + }); + self.tel.telemetry(&self.event_rx); } @@ -226,7 +257,7 @@ impl State { ); } - ctx.on_new_slot(bid_slot); + ctx.on_new_slot(bid_slot, producers); (registration_data, FxHashMap::default(), il) } }; @@ -271,7 +302,7 @@ impl State { "gap in slot data received (sort)" ); - ctx.on_new_slot(bid_slot); + ctx.on_new_slot(bid_slot, producers); // another relay delivered the payload *self = Self::process_slot_data( bid_slot, @@ -307,7 +338,7 @@ impl State { "new slot while broacasting different block, was the slot missed?"); } - ctx.on_new_slot(bid_slot); + ctx.on_new_slot(bid_slot, producers); *self = Self::process_slot_data( bid_slot, FxHashMap::default(), @@ -319,11 +350,6 @@ impl State { } }, - // simulator sync status - (_, Event::SimulatorSync { id, is_synced }) => { - ctx.sim_manager.handle_sync_status(id, is_synced); - } - // late sim result (State::Broadcasting { .. } | State::Slot { .. }, Event::SimResult(result)) => { ctx.handle_simulation_result(result, false, producers); @@ -373,7 +399,7 @@ impl State { warn!(req =% params.pubkey, this =% slot_data.registration_data.entry.registration.message.pubkey, "get header for mismatched proposer"); let _ = res_tx.send(Err(ProposerApiError::NoBidPrepared)); } else { - ctx.handle_get_header(params, slot_data, res_tx) + ctx.handle_get_header(params, slot_data, res_tx, producers) } trace!("finished processing"); diff --git a/crates/relay/src/auctioneer/simulator/manager.rs b/crates/relay/src/auctioneer/simulator/manager.rs deleted file mode 100644 index 0468e8a70..000000000 --- a/crates/relay/src/auctioneer/simulator/manager.rs +++ /dev/null @@ -1,369 +0,0 @@ -use std::{ - self, - sync::{ - Arc, - atomic::{AtomicBool, Ordering}, - }, - time::{Duration, Instant}, -}; - -use flux::timing::Nanos; -use helix_common::{ - SimulatorConfig, SubmissionTrace, bid_submission::OptimisticVersion, is_local_dev, - metrics::SimulatorMetrics, record_submission_step, simulator::BlockSimError, spawn_tracked, -}; -use helix_types::{BlsPublicKeyBytes, SignedBidSubmission, SubmissionVersion}; -use tracing::{debug, error, info, warn}; - -use crate::auctioneer::{ - simulator::{BlockMergeRequest, SimulatorRequest, client::SimulatorClient}, - types::{Event, SubmissionRef}, -}; - -pub(crate) const SIMULATOR_REQUEST_TIMEOUT: Duration = Duration::from_secs(20); - -#[derive(Default)] -struct LocalTelemetry { - sims_reqs: usize, - sims_sent_immediately: usize, - sims_reqs_dropped: usize, - stale_sim_reqs: usize, - // waiting to be sent - max_pending: usize, - // waiting for result - max_in_flight: usize, - merge_reqs: usize, - dropped_merge_reqs: usize, -} - -// Sim id / Simulation Result, so we can use this for merging requests -pub type SimulationResult = (usize, Option); -pub struct SimulationResultInner { - pub result: Result<(), BlockSimError>, - pub submission_ref: SubmissionRef, - // TODO: move up - pub paused_until: Option, - pub submission: SignedBidSubmission, - pub trace: SubmissionTrace, - pub optimistic_version: OptimisticVersion, - pub version: SubmissionVersion, -} - -// TODO: -// - avoid sending blobs, and validate them here on a blocking task -// - send only block deltas -// - use SSZ not json -pub struct SimulatorManager { - simulators: Vec, - requests: PendingRquests, - priority_requests: PendingRquests, - last_bid_slot: u64, - local_telemetry: LocalTelemetry, - sim_result_tx: crossbeam_channel::Sender, - /// If we have any synced simulator - accept_optimistic: bool, - /// If we failed to demote a builder in the DB - pub failsafe_triggered: Arc, -} - -impl SimulatorManager { - pub fn new( - configs: Vec, - sim_result_tx: crossbeam_channel::Sender, - ) -> Self { - let client = - reqwest::ClientBuilder::new().timeout(SIMULATOR_REQUEST_TIMEOUT).build().unwrap(); - - let simulators: Vec<_> = configs - .into_iter() - .map(|config| SimulatorClient::new(client.clone(), config)) - .collect(); - - let requests = PendingRquests::with_capacity(200); - let priority_requests = PendingRquests::with_capacity(30); - - if !is_local_dev() { - spawn_tracked!({ - let sync_tx = sim_result_tx.clone(); - let simulators = simulators.clone(); - async move { - loop { - for (id, simulator) in simulators.iter().enumerate() { - let is_synced = simulator.is_synced().await.unwrap_or(false); - if sync_tx.try_send(Event::SimulatorSync { id, is_synced }).is_err() { - error!("failed to send sync result to sim manager"); - } - SimulatorMetrics::simulator_sync(simulator.endpoint(), is_synced); - } - - tokio::time::sleep(Duration::from_secs(1)).await; - } - } - }); - } - - Self { - simulators, - requests, - priority_requests, - - last_bid_slot: 0, - local_telemetry: LocalTelemetry::default(), - sim_result_tx, - - accept_optimistic: true, - failsafe_triggered: Arc::new(AtomicBool::new(false)), - } - } - - pub fn can_process_optimistic_submission(&self) -> bool { - self.accept_optimistic && !self.failsafe_triggered.load(Ordering::Relaxed) - } - - pub fn handle_sync_status(&mut self, id: usize, is_synced: bool) { - self.simulators[id].is_synced = is_synced; - let new = self.simulators.iter().any(|s| s.can_simulate_light()); - let prev = self.accept_optimistic; - if new != prev { - warn!(prev, new, "changing accept_optimistic simulation status"); - } - self.accept_optimistic = new; - } - - pub fn handle_sim_request(&mut self, req: SimulatorRequest, fast_track: bool) { - assert_eq!(req.bid_slot(), self.last_bid_slot); - - self.local_telemetry.sims_reqs += 1; - if let Some(id) = self.next_sim_client() { - self.local_telemetry.sims_sent_immediately += 1; - self.spawn_sim(id, req) - } else if fast_track { - self.priority_requests.store(req, &mut self.local_telemetry) - } else { - self.requests.store(req, &mut self.local_telemetry) - } - } - - pub fn handle_merge_request(&mut self, req: BlockMergeRequest) { - self.local_telemetry.merge_reqs += 1; - if let Some(id) = self.next_merge_client() { - let client = &mut self.simulators[id]; - let to_send = client.merge_request_builder(); - client.pending += 1; - - self.local_telemetry.max_in_flight = - self.local_telemetry.max_in_flight.max(client.pending); - let timer = SimulatorMetrics::block_merge_timer(client.endpoint()); - let tx = self.sim_result_tx.clone(); - spawn_tracked!(async move { - debug!(bid_slot = %req.bid_slot, block_hash = %req.block_hash, "sending merge request"); - let res = SimulatorClient::do_merge_request(&req, to_send).await; - if res.is_ok() { - timer.stop_and_record(); - } else { - timer.stop_and_discard(); - } - SimulatorMetrics::block_merge_status(res.is_ok()); - - let result = (id, res); - - let _ = tx.try_send(Event::MergeResult(result)); - }); - } else { - self.local_telemetry.dropped_merge_reqs += 1; - warn!("no client available for merging! Dropping request"); - } - } - - pub fn handle_task_response(&mut self, id: usize, paused_until: Option) { - let sim = &mut self.simulators[id]; - sim.pending = sim.pending.saturating_sub(1); - sim.paused_until = sim.paused_until.max(paused_until); // keep highest pause - - if let Some(id) = self.next_sim_client() && - let Some(req) = self.priority_requests.next_req().or(self.requests.next_req()) - { - self.spawn_sim(id, req); - } - } - - pub fn spawn_sim(&mut self, id: usize, req: SimulatorRequest) { - const PAUSE_DURATION: Duration = Duration::from_secs(60); - - let client = &mut self.simulators[id]; - let (to_send, sim_method) = client.sim_request_builder(req.submission.fork_name()); - client.pending += 1; - - self.local_telemetry.max_in_flight = self.local_telemetry.max_in_flight.max(client.pending); - let timer = SimulatorMetrics::timer(client.endpoint()); - let tx = self.sim_result_tx.clone(); - spawn_tracked!(async move { - let start_sim = Nanos::now(); - let block_hash = req.submission.block_hash(); - debug!(%block_hash, "sending simulation request"); - - let optimistic_version = req.optimistic_version(); - SimulatorMetrics::sim_count(optimistic_version.is_optimistic()); - let mut res = - SimulatorClient::do_sim_request(&req.request, req.is_top_bid, sim_method, to_send) - .await; - let time = timer.stop_and_record(); - - debug!(%block_hash, time_secs = time, ?res, "simulation completed"); - - let paused_until = if let Err(err) = res.as_ref() { - SimulatorMetrics::sim_status(false); - if err.is_temporary() { Some(Instant::now() + PAUSE_DURATION) } else { None } - } else { - SimulatorMetrics::sim_status(true); - None - }; - - if let Some(got) = req.tx_root { - let expected = req.submission.transactions_root(); - - if expected != got { - res = Err(BlockSimError::InvalidTxRoot { got, expected }) - } - } - - record_submission_step("simulation", start_sim.elapsed()); - - let result = ( - id, - Some(SimulationResultInner { - submission_ref: req.submission_ref, - result: res, - paused_until, - submission: req.submission, - trace: req.trace, - optimistic_version, - version: req.version, - }), - ); - - let _ = tx.try_send(Event::SimResult(result)); - }); - } - - fn next_sim_client(&self) -> Option { - self.simulators - .iter() - .enumerate() - .filter(|(_, s)| s.can_simulate()) - .min_by_key(|(_, s)| s.pending) - .map(|(i, _)| i) - } - - fn next_merge_client(&self) -> Option { - self.simulators - .iter() - .enumerate() - .filter(|(_, s)| s.can_merge()) - .min_by_key(|(_, s)| s.pending) - .map(|(i, _)| i) - } - - pub fn on_new_slot(&mut self, bid_slot: u64) { - if self.last_bid_slot > 0 { - self.report(); - } - - self.last_bid_slot = bid_slot; - self.requests.clear(bid_slot); - self.priority_requests.clear(bid_slot); - let now = Instant::now(); - for s in self.simulators.iter_mut() { - if s.paused_until.is_some_and(|until| until < now) { - s.paused_until = None; - } - } - } - - fn report(&mut self) { - let tel = std::mem::take(&mut self.local_telemetry); - - SimulatorMetrics::sim_mananger_count("sims_sent_immediately", tel.sims_sent_immediately); - SimulatorMetrics::sim_mananger_count("sims_reqs_dropped", tel.sims_reqs_dropped); - SimulatorMetrics::sim_mananger_count("stale_sim_reqs", tel.stale_sim_reqs); - SimulatorMetrics::sim_manager_gauge("max_pending", tel.max_pending); - SimulatorMetrics::sim_manager_gauge("max_in_flight", tel.max_in_flight); - SimulatorMetrics::sim_mananger_count("merge_reqs", tel.merge_reqs); - SimulatorMetrics::sim_mananger_count("dropped_merge_reqs", tel.dropped_merge_reqs); - - info!( - bid_slot = self.last_bid_slot, - sims_reqs = tel.sims_reqs, - sims_sent_immediately = tel.sims_sent_immediately, - sims_reqs_dropped = tel.sims_reqs_dropped, - stale_sim_reqs = tel.stale_sim_reqs, - max_pending = tel.max_pending, - max_in_flight = tel.max_in_flight, - merge_reqs = tel.merge_reqs, - dropped_merge_reqs = tel.dropped_merge_reqs, - "sim manager telemetry" - ) - } -} - -/// Pending requests, we only keep the last one for each builder -struct PendingRquests { - map: Vec, - sort_keys: Vec<(u8, u64)>, - builder_pubkeys: Vec, -} - -impl PendingRquests { - fn with_capacity(capacity: usize) -> Self { - Self { - map: Vec::with_capacity(capacity), - builder_pubkeys: Vec::with_capacity(capacity), - sort_keys: Vec::with_capacity(capacity), - } - } - - fn store(&mut self, req: SimulatorRequest, local_telemetry: &mut LocalTelemetry) { - if let Some((i, _)) = - self.builder_pubkeys.iter_mut().enumerate().find(|(_, r)| **r == *req.builder_pubkey()) - { - if req.on_receive_ns() > self.map[i].on_receive_ns() { - self.sort_keys[i] = req.sort_key(); - self.builder_pubkeys[i] = *req.builder_pubkey(); - self.map[i] = req; - } - - local_telemetry.sims_reqs_dropped += 1; - } else { - self.sort_keys.push(req.sort_key()); - self.builder_pubkeys.push(*req.builder_pubkey()); - self.map.push(req); - } - - local_telemetry.max_pending = local_telemetry.max_pending.max(self.map.len()) - } - - fn next_req(&mut self) -> Option { - let i = self.sort_keys.iter().enumerate().max_by_key(|(_, r)| *r).map(|(i, _)| i)?; - - self.sort_keys.swap_remove(i); - self.builder_pubkeys.swap_remove(i); - Some(self.map.swap_remove(i)) - } - - /// Clear backlog of simulations from the previous bid slot, this closes all optimistic - /// submissions and non-optimistic ones which have timed out - fn clear(&mut self, bid_slot: u64) { - let mut i = 0; - - while i < self.map.len() { - let req = &self.map[i]; - if req.bid_slot() < bid_slot { - self.sort_keys.swap_remove(i); - self.builder_pubkeys.swap_remove(i); - self.map.swap_remove(i); - } else { - i += 1; - } - } - } -} diff --git a/crates/relay/src/auctioneer/simulator/mod.rs b/crates/relay/src/auctioneer/simulator/mod.rs deleted file mode 100644 index f53dfb583..000000000 --- a/crates/relay/src/auctioneer/simulator/mod.rs +++ /dev/null @@ -1,144 +0,0 @@ -use std::{collections::HashMap, sync::Arc}; - -use alloy_primitives::{Address, B256, U256}; -use helix_common::{ - SubmissionTrace, ValidatorPreferences, api::builder_api::InclusionListWithMetadata, - bid_submission::OptimisticVersion, simulator::BlockSimError, -}; -use helix_types::{ - BidTrace, BlobsBundle, BlsPublicKeyBytes, BlsSignatureBytes, BuilderInclusionResult, - ExecutionPayload, ExecutionRequests, MergeableOrderWithOrigin, MergedBlockTrace, - SignedBidSubmission, SubmissionVersion, -}; - -use crate::auctioneer::types::SubmissionRef; - -pub mod client; -pub mod manager; - -// TODO: refactor this in a SignedBidSubmission + extra fields -#[derive(Debug, Clone, serde::Serialize, serde::Deserialize)] -pub struct BlockSimRequest { - #[serde(with = "serde_utils::quoted_u64")] - pub registered_gas_limit: u64, - pub message: BidTrace, - pub execution_payload: ExecutionPayload, - pub signature: BlsSignatureBytes, - pub proposer_preferences: ValidatorPreferences, - pub blobs_bundle: Option>, - pub execution_requests: Option>, - pub parent_beacon_block_root: Option, - pub inclusion_list: Option, - pub apply_blacklist: bool, -} - -impl BlockSimRequest { - pub fn new( - registered_gas_limit: u64, - block: &SignedBidSubmission, - proposer_preferences: ValidatorPreferences, - parent_beacon_block_root: Option, - inclusion_list: Option, - ) -> Self { - Self { - registered_gas_limit, - message: block.bid_trace().clone(), - execution_payload: block.execution_payload_ref().clone(), - signature: *block.signature(), - apply_blacklist: proposer_preferences.filtering.is_regional(), - proposer_preferences, - blobs_bundle: Some(block.blobs_bundle().clone()), - execution_requests: Some(block.execution_requests_ref().clone()), - parent_beacon_block_root, - inclusion_list, - } - } -} - -#[derive(Debug, Clone, serde::Serialize)] -pub struct BlockMergeRequestRef<'a> { - /// The original payload value - pub original_value: U256, - pub proposer_fee_recipient: Address, - pub execution_payload: &'a ExecutionPayload, - pub parent_beacon_block_root: Option, - pub merging_data: &'a [MergeableOrderWithOrigin], - pub trace: MergedBlockTrace, -} - -#[derive(Debug, Clone, serde::Serialize, serde::Deserialize)] -pub struct BlockMergeRequest { - pub bid_slot: u64, - /// The serialized request - pub request: serde_json::Value, - /// The block hash of the execution payload - pub block_hash: B256, -} - -#[derive(Debug, serde::Serialize, serde::Deserialize)] -pub struct JsonRpcError { - pub message: String, -} - -#[derive(serde::Serialize, serde::Deserialize, Debug)] -pub struct BlockSimRpcResponse { - pub error: Option, -} - -#[derive(serde::Serialize, serde::Deserialize, Debug)] -#[serde(untagged)] -pub enum RpcResult { - Ok { result: T }, - Err { error: JsonRpcError }, -} - -pub type BlockMergeResult = (usize, Result); - -#[derive(Debug, Clone, serde::Serialize, serde::Deserialize)] -pub struct BlockMergeResponse { - pub base_block_hash: B256, - pub execution_payload: ExecutionPayload, - pub execution_requests: ExecutionRequests, - /// Versioned hashes of the appended blob transactions. - pub appended_blobs: Vec, - /// Total value for the proposer - pub proposer_value: U256, - pub builder_inclusions: HashMap, - pub trace: MergedBlockTrace, -} - -pub struct SimulatorRequest { - pub is_optimistic: bool, - pub request: BlockSimRequest, - pub is_top_bid: bool, - pub version: SubmissionVersion, - pub submission: SignedBidSubmission, - pub submission_ref: SubmissionRef, - pub trace: SubmissionTrace, - // only Some for dehydrated submissions - pub tx_root: Option, -} - -impl SimulatorRequest { - pub fn on_receive_ns(&self) -> u64 { - self.trace.receive_ns.0 - } - - // TODO: use a "score" eg how close to top bid even if below - pub fn sort_key(&self) -> (u8, u64) { - let top = if self.is_top_bid { 1 } else { 0 }; - (top, u64::MAX - self.on_receive_ns()) - } - - pub fn bid_slot(&self) -> u64 { - self.request.message.slot - } - - pub fn builder_pubkey(&self) -> &BlsPublicKeyBytes { - &self.request.message.builder_pubkey - } - - pub fn optimistic_version(&self) -> OptimisticVersion { - if self.is_optimistic { OptimisticVersion::V1 } else { OptimisticVersion::NotOptimistic } - } -} diff --git a/crates/relay/src/auctioneer/submit_block.rs b/crates/relay/src/auctioneer/submit_block.rs index 080da5a54..9332b9dac 100644 --- a/crates/relay/src/auctioneer/submit_block.rs +++ b/crates/relay/src/auctioneer/submit_block.rs @@ -2,11 +2,14 @@ use std::sync::atomic::Ordering; use alloy_primitives::{Address, B256, U256}; use flux::timing::Nanos; +use flux::spine::SpineProducers; use helix_common::{ self, BuilderInfo, SubmissionTrace, + api::builder_api::InclusionListWithMetadata, bid_submission::OptimisticVersion, metrics::{BID_ADJUSTMENT_LATENCY, HYDRATION_CACHE_HITS}, record_submission_step, + simulator::{SimRequest, SubmissionFormat}, }; use helix_types::{ BidAdjustmentData, BlockValidationError, MergeableOrdersWithPref, SignedBidSubmission, @@ -19,11 +22,14 @@ use crate::{ auctioneer::{ bid_adjustor::BidAdjustor, context::{Context, send_submission_result}, - simulator::{BlockSimRequest, SimulatorRequest, manager::SimulationResult}, types::{PayloadEntry, SlotData, Submission, SubmissionData, SubmissionRef}, }, housekeeper::PayloadAttributesUpdate, - spine::HelixSpineProducers, + simulator::{SimInboundPayload, SimulatorRequest, tile::SimulationResult}, + spine::{ + HelixSpineProducers, + messages::{ToSimKind, ToSimMsg}, + }, }; impl Context { @@ -58,11 +64,11 @@ impl Context { .with_label_values(&[strategy]) .observe(start.elapsed().as_micros()); - self.store_data_and_sim(sim_request, adjusted_block, true); + self.store_data_and_sim(sim_request, adjusted_block, true, producers); } } - self.store_data_and_sim(req, entry, false); + self.store_data_and_sim(req, entry, false, producers); if self.config.block_merging_config.is_enabled && let Some(data) = merging_data @@ -73,7 +79,7 @@ impl Context { if is_top_bid { self.block_merger.update_base_block(base_block); } - self.request_merged_block(); + self.request_merged_block(producers); } } @@ -116,7 +122,7 @@ impl Context { if is_top_bid { self.block_merger.update_base_block(*result.submission.block_hash()); } - self.request_merged_block(); + self.request_merged_block(producers); if need_send_result { let block_hash = *result.submission.block_hash(); @@ -194,20 +200,20 @@ impl Context { record_submission_step("validated", start_val.elapsed()); trace!("validated"); - let (optimistic_version, is_top_bid) = - if self.sim_manager.can_process_optimistic_submission() && - self.should_process_optimistically(&submission, &builder_info, slot_data) - { - let is_top_bid = self.bid_sorter.sort( - submission_data.version, - &submission, - &mut submission_data.trace, - true, - ); - (OptimisticVersion::V1, is_top_bid) - } else { - (OptimisticVersion::NotOptimistic, false) - }; + let (optimistic_version, is_top_bid) = if self.accept_optimistic.load(Ordering::Relaxed) && + !self.failsafe_triggered.load(Ordering::Relaxed) && + self.should_process_optimistically(&submission, &builder_info, slot_data) + { + let is_top_bid = self.bid_sorter.sort( + submission_data.version, + &submission, + &mut submission_data.trace, + true, + ); + (OptimisticVersion::V1, is_top_bid) + } else { + (OptimisticVersion::NotOptimistic, false) + }; let merging_data = submission_data.merging_data.map(|data| MergeData { is_top_bid, @@ -229,6 +235,7 @@ impl Context { is_top_bid, trace: submission_data.trace, bid_adjustment_data: submission_data.bid_adjustment_data, + sim_bytes: submission_data.sim_bytes, }; Ok((validated, optimistic_version, merging_data)) @@ -240,18 +247,30 @@ impl Context { slot_data: &SlotData, is_optimistic: bool, ) -> (SimulatorRequest, PayloadEntry) { - let request = BlockSimRequest::new( - slot_data.registration_data.entry.registration.message.gas_limit, - &validated.submission, - slot_data.registration_data.entry.preferences.clone(), - validated.payload_attributes.parent_beacon_block_root, - slot_data.il.clone(), - ); + let request = SimRequest { + registered_gas_limit: slot_data.registration_data.entry.registration.message.gas_limit, + apply_blacklist: slot_data.registration_data.entry.preferences.filtering.is_regional(), + parent_beacon_block_root: validated + .payload_attributes + .parent_beacon_block_root + .unwrap_or_default(), + inclusion_list: slot_data + .il + .clone() + .unwrap_or(InclusionListWithMetadata { txs: vec![] }), + format: validated.sim_bytes.as_ref().map(|(_, f)| *f).unwrap_or_default(), + signed_bid_submission: match validated.sim_bytes { + Some((bytes, _)) => alloy_primitives::Bytes(bytes), + None => ssz::Encode::as_ssz_bytes(&validated.submission).into(), + }, + }; let req = SimulatorRequest { is_optimistic, submission_ref: validated.submission_ref, request, + builder_pubkey: *validated.submission.builder_public_key(), + bid_slot: validated.submission.slot().as_u64(), is_top_bid: validated.is_top_bid, submission: validated.submission.clone(), trace: validated.trace, @@ -277,6 +296,7 @@ impl Context { req: SimulatorRequest, entry: PayloadEntry, fast_track: bool, + producers: &mut HelixSpineProducers, ) { let is_adjusted = entry.is_adjusted(); let block_hash = *req.submission.block_hash(); @@ -291,7 +311,9 @@ impl Context { self.db.store_block_submission(sub_clone, req.trace, opt_version, is_adjusted, live_ts); - self.sim_manager.handle_sim_request(req, fast_track); + let ix = + self.sim_inbound.push(SimInboundPayload::SimRequest { req: Box::new(req), fast_track }); + producers.produce(ToSimMsg { kind: ToSimKind::Request, ix, bid_slot: 0 }); } fn should_process_optimistically( @@ -317,9 +339,10 @@ impl Context { false } - fn request_merged_block(&mut self) { + fn request_merged_block(&mut self, producers: &mut HelixSpineProducers) { if let Some(merge_request) = self.block_merger.fetch_merge_request() { - self.sim_manager.handle_merge_request(merge_request); + let ix = self.sim_inbound.push(SimInboundPayload::MergeRequest(merge_request)); + producers.produce(ToSimMsg { kind: ToSimKind::Request, ix, bid_slot: 0 }); } } } @@ -333,6 +356,7 @@ pub struct ValidatedData<'a> { pub is_top_bid: bool, pub trace: SubmissionTrace, pub bid_adjustment_data: Option, + pub sim_bytes: Option<(bytes::Bytes, SubmissionFormat)>, } pub struct MergeData { diff --git a/crates/relay/src/auctioneer/types.rs b/crates/relay/src/auctioneer/types.rs index 333eae443..662962721 100644 --- a/crates/relay/src/auctioneer/types.rs +++ b/crates/relay/src/auctioneer/types.rs @@ -14,6 +14,7 @@ use helix_common::{ proposer_api::GetHeaderParams, }, metrics::BID_CREATION_LATENCY, + simulator::SubmissionFormat, }; use helix_tcp_types::{BidSubmissionFlags, BidSubmissionHeader}; use helix_types::{ @@ -40,10 +41,11 @@ use crate::{ HEADER_API_KEY, HEADER_API_TOKEN, HEADER_HYDRATE, HEADER_IS_MERGEABLE, HEADER_MERGE_TYPE, HEADER_SEQUENCE, HEADER_WITH_ADJUSTMENTS, proposer::ProposerApiError, }, - auctioneer::{BlockMergeResult, simulator::manager::SimulationResult}, + auctioneer::BlockMergeResult, bid_decoder::{Encoding, SubmissionType}, gossip::BroadcastPayloadParams, housekeeper::PayloadAttributesUpdate, + simulator::tile::SimulationResult, }; #[derive(Clone, Copy, Debug)] @@ -200,6 +202,9 @@ pub struct SubmissionData { pub version: SubmissionVersion, pub withdrawals_root: B256, pub trace: SubmissionTrace, + /// Decompressed bytes to forward verbatim to the simulator, avoiding re-encoding. + /// `None` means the auctioneer must encode from `submission`. + pub sim_bytes: Option<(bytes::Bytes, SubmissionFormat)>, } impl Deref for SubmissionData { @@ -515,10 +520,6 @@ pub enum Event { }, GossipPayload(BroadcastPayloadParams), SimResult(SimulationResult), - SimulatorSync { - id: usize, - is_synced: bool, - }, MergeResult(BlockMergeResult), } @@ -531,7 +532,6 @@ impl Event { Event::GetPayload { .. } => "GetPayload", Event::GossipPayload(_) => "GossipPayload", Event::SimResult(_) => "SimResult", - Event::SimulatorSync { .. } => "SimulatorSync", Event::MergeResult(_) => "MergeResult", } } diff --git a/crates/relay/src/bid_decoder/tile.rs b/crates/relay/src/bid_decoder/tile.rs index a29f67516..2cf0a170c 100644 --- a/crates/relay/src/bid_decoder/tile.rs +++ b/crates/relay/src/bid_decoder/tile.rs @@ -9,7 +9,7 @@ use flux::{ use flux_utils::{DCache, SharedVector}; use helix_common::{ RelayConfig, SubmissionTrace, chain_info::ChainInfo, local_cache::LocalCache, - record_submission_step, + record_submission_step, simulator::SubmissionFormat, }; use helix_types::{ BidAdjustmentData, BlockMergingData, BlsPublicKeyBytes, MergeableOrdersWithPref, @@ -158,7 +158,7 @@ impl DecoderTile { tracing::Span::current().record("id", tracing::field::display(header.id)); record_submission_step("worker_recv", sent_at.elapsed()); trace!("received by worker"); - let (submission, withdrawals_root, version, merging_data, bid_adjustment_data) = + let (submission, withdrawals_root, version, merging_data, bid_adjustment_data, sim_bytes) = Self::try_handle_block_submission( cache, chain_info, @@ -209,6 +209,7 @@ impl DecoderTile { bid_adjustment_data, withdrawals_root, trace, + sim_bytes, }; Ok((submission_data, tracing::Span::current())) @@ -225,7 +226,14 @@ impl DecoderTile { buffer: &mut Vec, trace: &mut SubmissionTrace, ) -> Result< - (Submission, B256, SubmissionVersion, Option, Option), + ( + Submission, + B256, + SubmissionVersion, + Option, + Option, + Option<(bytes::Bytes, SubmissionFormat)>, + ), BuilderApiError, > { let mut decoder = SubmissionDecoder::new(header.compression, header.encoding); @@ -270,13 +278,37 @@ impl DecoderTile { decode_dehydrated(&mut decoder, body, trace, chain_info, &flags)? } else if with_mergeable_data { decode_merge(&mut decoder, body, trace, chain_info, &flags)? + } else { decode_default(&mut decoder, body, trace, chain_info, &flags)? }; + // For plain SSZ full submissions, capture the decompressed bytes so the + // auctioneer can forward them to the simulator without re-encoding. + let sim_bytes = if !is_dehydrated && + !with_mergeable_data && + matches!(header.encoding, crate::bid_decoder::Encoding::Ssz) + { + Some((body.clone(), SubmissionFormat::FullSsz)) + } else { + None + }; + + // For plain SSZ full submissions, capture the decompressed bytes so the + // auctioneer can forward them to the simulator without re-encoding. + let sim_bytes = if !is_dehydrated && + !with_mergeable_data && + matches!(header.encoding, crate::bid_decoder::Encoding::Ssz) + { + Some((body.clone(), SubmissionFormat::FullSsz)) + } else { + None + + }; + let withdrawals_root = submission.withdrawal_root(); let version = SubmissionVersion::new(trace.receive_ns.0, header.sequence_number); - Ok((submission, withdrawals_root, version, merging_data, bid_adjustment_data)) + Ok((submission, withdrawals_root, version, merging_data, bid_adjustment_data, sim_bytes)) } } diff --git a/crates/relay/src/lib.rs b/crates/relay/src/lib.rs index 333069dd0..53a16ec11 100644 --- a/crates/relay/src/lib.rs +++ b/crates/relay/src/lib.rs @@ -5,6 +5,7 @@ mod bid_decoder; mod gossip; mod housekeeper; mod network; +mod simulator; mod spine; mod tcp_bid_recv; mod website; @@ -26,14 +27,15 @@ pub use crate::{ start_api_service, }, auctioneer::{ - Auctioneer, AuctioneerHandle, BidSorter, BlockSimRequest, Context, Event, PayloadEntry, - RegWorker, RegWorkerHandle, SimulatorClient, SimulatorManager, SimulatorRequest, SlotData, - SubmissionPayload, SubmissionRef, + Auctioneer, AuctioneerHandle, BidSorter, Context, Event, InternalBidSubmission, + PayloadEntry, RegWorker, RegWorkerHandle, SimulatorClient, SimulatorRequest, SimulatorTile, + SlotData, SubmissionPayload, SubmissionRef, }, beacon::start_beacon_client, bid_decoder::{DecoderTile, SubmissionDataWithSpan}, housekeeper::start_housekeeper, network::RelayNetworkManager, + simulator::{SimInboundPayload, SimOutboundPayload}, spine::HelixSpine, tcp_bid_recv::{ BidSubmissionFlags, BidSubmissionHeader, BidSubmissionResponse, BidSubmissionTcpListener, diff --git a/crates/relay/src/main.rs b/crates/relay/src/main.rs index 963ce90e1..1725ab46b 100644 --- a/crates/relay/src/main.rs +++ b/crates/relay/src/main.rs @@ -27,9 +27,10 @@ use helix_common::{ use helix_relay::{ Api, Auctioneer, AuctioneerHandle, BidSorter, BidSubmissionTcpListener, DbHandle, DecoderTile, DefaultBidAdjustor, FutureBidSubmissionResult, HelixSpine, RegWorker, RegWorkerHandle, - RelayNetworkManager, S3PayloadSaver, SubmissionDataWithSpan, WebsiteService, - spawn_tokio_monitoring, start_admin_service, start_api_service, start_beacon_client, - start_db_service, start_housekeeper, + RelayNetworkManager, S3PayloadSaver, SimInboundPayload, SimOutboundPayload, + SimulatorTile, SubmissionDataWithSpan, WebsiteService, spawn_tokio_monitoring, + start_admin_service, start_api_service, start_beacon_client, start_db_service, + start_housekeeper, }; use helix_types::BlsKeypair; use tikv_jemallocator::Jemalloc; @@ -227,6 +228,21 @@ async fn run(instance_id: String, config: RelayConfig, keypair: BlsKeypair) -> e ), ); + let sim_inbound = Arc::new(SharedVector::::with_capacity( + MAX_SUBMISSIONS_PER_SLOT, + )); + let sim_outbound = Arc::new(SharedVector::::with_capacity( + MAX_SUBMISSIONS_PER_SLOT, + )); + + let (accept_optimistic, failsafe_triggered, sim_tile) = SimulatorTile::create( + config.simulators.clone(), + sim_inbound.clone(), + sim_outbound.clone(), + ); + let sim_core = config.cores.simulator; + attach_tile(sim_tile, spine, TileConfig::new(sim_core, ThreadPriority::OSDefault)); + let auctioneer_core = config.cores.auctioneer; let auctioneer = Auctioneer::new( chain_info.as_ref().clone(), @@ -235,12 +251,15 @@ async fn run(instance_id: String, config: RelayConfig, keypair: BlsKeypair) -> e BidSorter::new(top_bid_tx), local_cache.as_ref().clone(), DefaultBidAdjustor {}, - event_tx, event_rx, auctioneer_core, future_results, decoded, auctioneer_handle, + sim_inbound, + sim_outbound, + accept_optimistic, + failsafe_triggered, ); attach_tile( auctioneer, diff --git a/crates/relay/src/auctioneer/simulator/client.rs b/crates/relay/src/simulator/client.rs similarity index 66% rename from crates/relay/src/auctioneer/simulator/client.rs rename to crates/relay/src/simulator/client.rs index f6003c0eb..bbe3b0a5b 100644 --- a/crates/relay/src/auctioneer/simulator/client.rs +++ b/crates/relay/src/simulator/client.rs @@ -1,7 +1,8 @@ -use std::time::Instant; - use alloy_primitives::{Address, U256}; -use helix_common::{SimulatorConfig, simulator::BlockSimError}; +use helix_common::{ + SimulatorConfig, + simulator::{BlockSimError, BlockSimRequest, SimRequest}, +}; use helix_types::ForkName; use reqwest::{ RequestBuilder, @@ -9,14 +10,27 @@ use reqwest::{ }; use serde::de::DeserializeOwned; use serde_json::{Value, json}; +use ssz::Encode; use tracing::{debug, error}; -use crate::auctioneer::{ - JsonRpcError, - simulator::{ - BlockMergeRequest, BlockMergeResponse, BlockSimRequest, BlockSimRpcResponse, RpcResult, - }, -}; +use crate::simulator::{BlockMergeRequest, BlockMergeResponse}; + +#[derive(Debug, serde::Serialize, serde::Deserialize)] +struct JsonRpcError { + message: String, +} + +#[derive(Debug, serde::Deserialize)] +struct BlockSimRpcResponse { + error: Option, +} + +#[derive(serde::Serialize, serde::Deserialize, Debug)] +#[serde(untagged)] +enum RpcResult { + Ok { result: T }, + Err { error: JsonRpcError }, +} #[derive(Clone)] pub struct SimulatorClient { @@ -24,69 +38,41 @@ pub struct SimulatorClient { pub config: SimulatorConfig, pub sim_method_v4: String, pub sim_method_v5: String, - pub is_synced: bool, - /// For certain errors we pause sims for some time to allow time for the node to recover - // TODO: can we get these errors even if the node is reporting that it's synced? - pub paused_until: Option, - /// Current number of pending tasks (validation or merging) - pub pending: usize, + /// If set, use SSZ binary endpoint instead of JSON-RPC for simulations + pub ssz_url: Option, } impl SimulatorClient { pub fn new(client: reqwest::Client, config: SimulatorConfig) -> Self { let sim_method_v4 = format!("{}_validateBuilderSubmissionV4", config.namespace); let sim_method_v5 = format!("{}_validateBuilderSubmissionV5", config.namespace); - Self { - client, - config, - sim_method_v4, - sim_method_v5, - is_synced: false, - paused_until: None, - pending: 0, - } + let ssz_url = config.ssz_url.clone(); + Self { client, config, sim_method_v4, sim_method_v5, ssz_url } } pub fn endpoint(&self) -> &str { &self.config.url } - /// A lighter check to decide whether we should accept optimistic submissions - pub fn can_simulate_light(&self) -> bool { - self.is_synced && - match self.paused_until { - Some(until) => Instant::now() > until, - None => true, - } + pub fn ssz_request_builder(&self) -> Option { + self.ssz_url.as_ref().map(|url| self.client.post(format!("{url}/validate"))) } - pub fn can_simulate(&self) -> bool { - self.can_simulate_light() && self.pending < self.config.max_concurrent_tasks + pub fn sim_request_builder(&self, fork: ForkName) -> (RequestBuilder, &str) { + let method = if fork == ForkName::Fulu { &self.sim_method_v5 } else { &self.sim_method_v4 }; + (self.client.post(&self.config.url), method) } - pub fn can_merge(&self) -> bool { - self.can_simulate() && self.config.is_merging_simulator - } - - pub fn sim_request_builder(&self, fork: ForkName) -> (RequestBuilder, String) { - let mut sim_method = &self.sim_method_v4; - if fork == ForkName::Fulu { - sim_method = &self.sim_method_v5; - } - - (self.client.post(&self.config.url), sim_method.clone()) - } - - pub async fn do_sim_request( + pub async fn do_json_sim_request( request: &BlockSimRequest, is_top_bid: bool, - sim_method: String, + sim_method: &str, to_send: RequestBuilder, ) -> Result<(), BlockSimError> { let mut headers = HeaderMap::new(); if is_top_bid { headers.insert("X-High-Priority", HeaderValue::from_static("true")); - }; + } let rpc_payload = json!({ "jsonrpc": "2.0", @@ -95,9 +81,11 @@ impl SimulatorClient { "params": [request] }); - let to_send = to_send.headers(headers).json(&rpc_payload); - - let res = match Self::rpc_request::(to_send).await { + let res = match Self::rpc_request::( + to_send.headers(headers).json(&rpc_payload), + ) + .await + { Ok(res) => res, Err(err) => { error!(%err, "failed rpc simulation"); @@ -112,7 +100,38 @@ impl SimulatorClient { Ok(()) } - pub async fn balance_request(&self, address: &Address) -> Result { + pub async fn do_sim_request( + ssz_req: &SimRequest, + is_top_bid: bool, + to_send: RequestBuilder, + ) -> Result<(), BlockSimError> { + Self::ssz_request(to_send.body(ssz_req.as_ssz_bytes()), is_top_bid).await + } + + async fn ssz_request(to_send: RequestBuilder, is_top_bid: bool) -> Result<(), BlockSimError> { + let mut headers = HeaderMap::new(); + headers.insert("Content-Type", HeaderValue::from_static("application/octet-stream")); + if is_top_bid { + headers.insert("X-High-Priority", HeaderValue::from_static("true")); + } + + let res = match to_send.headers(headers).send().await { + Ok(r) => r, + Err(err) => { + error!(%err, "failed ssz simulation"); + return Err(BlockSimError::RpcError); + } + }; + + match res.status().as_u16() { + 200 => Ok(()), + 400 => Err(BlockSimError::BlockValidationFailed(res.text().await.unwrap_or_default())), + 424 => Err(BlockSimError::HydrationMiss), + _ => Err(BlockSimError::RpcError), + } + } + + pub async fn balance_request(&self, address: &Address) -> Result { let rpc_payload = json!({ "jsonrpc": "2.0", "id": "1", @@ -121,12 +140,9 @@ impl SimulatorClient { }); let to_send = self.client.post(&self.config.url).json(&rpc_payload); - match Self::rpc_request::>(to_send) - .await - .map_err(|e| JsonRpcError { message: e.to_string() })? - { + match Self::rpc_request::>(to_send).await.map_err(|e| e.to_string())? { RpcResult::Ok { result } => Ok(result), - RpcResult::Err { error } => Err(error), + RpcResult::Err { error } => Err(error.message), } } @@ -206,6 +222,7 @@ mod test { namespace: "relay".into(), is_merging_simulator: false, max_concurrent_tasks: 1, + ssz_url: None, }); let builder_address = super::Address::from_hex("0xD9d3A3f47a56a987A8119b15C994Bc126337dd27").unwrap(); diff --git a/crates/relay/src/simulator/mod.rs b/crates/relay/src/simulator/mod.rs new file mode 100644 index 000000000..b85a3d808 --- /dev/null +++ b/crates/relay/src/simulator/mod.rs @@ -0,0 +1,97 @@ +use std::collections::HashMap; + +use alloy_primitives::{Address, B256, U256}; +use helix_common::{ + SubmissionTrace, + bid_submission::OptimisticVersion, + simulator::{BlockSimError, SimRequest}, +}; +use helix_types::{ + BlsPublicKeyBytes, BuilderInclusionResult, ExecutionPayload, ExecutionRequests, + MergeableOrderWithOrigin, MergedBlockTrace, SignedBidSubmission, SubmissionVersion, +}; + +use crate::auctioneer::SubmissionRef; + +pub mod client; +pub mod tile; + +pub use tile::SimulatorTile; + +#[derive(Debug, Clone, serde::Serialize)] +pub struct BlockMergeRequestRef<'a> { + /// The original payload value + pub original_value: U256, + pub proposer_fee_recipient: Address, + pub execution_payload: &'a ExecutionPayload, + pub parent_beacon_block_root: Option, + pub merging_data: &'a [MergeableOrderWithOrigin], + pub trace: MergedBlockTrace, +} + +#[derive(Debug, Clone, serde::Serialize, serde::Deserialize)] +pub struct BlockMergeRequest { + pub bid_slot: u64, + /// The serialized request + pub request: serde_json::Value, + /// The block hash of the execution payload + pub block_hash: B256, +} + +pub type BlockMergeResult = (usize, Result); + +#[derive(Debug, Clone, serde::Serialize, serde::Deserialize)] +pub struct BlockMergeResponse { + pub base_block_hash: B256, + pub execution_payload: ExecutionPayload, + pub execution_requests: ExecutionRequests, + /// Versioned hashes of the appended blob transactions. + pub appended_blobs: Vec, + /// Total value for the proposer + pub proposer_value: U256, + pub builder_inclusions: HashMap, + pub trace: MergedBlockTrace, +} + +#[derive(Clone)] +pub struct SimulatorRequest { + pub is_optimistic: bool, + pub request: SimRequest, + pub is_top_bid: bool, + pub bid_slot: u64, + pub builder_pubkey: BlsPublicKeyBytes, + pub version: SubmissionVersion, + pub submission: SignedBidSubmission, + pub submission_ref: SubmissionRef, + pub trace: SubmissionTrace, + // only Some for dehydrated submissions + pub tx_root: Option, +} + +/// Large payload stored in `SharedVector` for auctioneer → sim tile transfer. +pub enum SimInboundPayload { + SimRequest { req: Box, fast_track: bool }, + MergeRequest(BlockMergeRequest), +} + +/// Large payload stored in `SharedVector` for sim tile → auctioneer transfer. +pub enum SimOutboundPayload { + SimResult(crate::simulator::tile::SimulationResult), + MergeResult(BlockMergeResult), +} + +impl SimulatorRequest { + pub fn on_receive_ns(&self) -> u64 { + self.trace.receive_ns.0 + } + + // TODO: use a "score" eg how close to top bid even if below + pub fn sort_key(&self) -> (u8, u64) { + let top = if self.is_top_bid { 1 } else { 0 }; + (top, u64::MAX - self.on_receive_ns()) + } + + pub fn optimistic_version(&self) -> OptimisticVersion { + if self.is_optimistic { OptimisticVersion::V1 } else { OptimisticVersion::NotOptimistic } + } +} diff --git a/crates/relay/src/simulator/tile.rs b/crates/relay/src/simulator/tile.rs new file mode 100644 index 000000000..5741dec48 --- /dev/null +++ b/crates/relay/src/simulator/tile.rs @@ -0,0 +1,567 @@ +use std::{ + self, + sync::{ + Arc, + atomic::{AtomicBool, Ordering}, + }, + time::{Duration, Instant}, +}; + +use flux::{ + spine::SpineProducers as _, + tile::{Tile, TileName}, +}; +use flux_utils::SharedVector; +use helix_common::{ + SimulatorConfig, SubmissionTrace, + bid_submission::OptimisticVersion, + is_local_dev, + metrics::SimulatorMetrics, + record_submission_step, + simulator::{BlockSimError, BlockSimRequest, SubmissionFormat}, + spawn_tracked, + validator_preferences::{Filtering, ValidatorPreferences}, +}; +use helix_types::{BlsPublicKeyBytes, SignedBidSubmission, SubmissionVersion}; +use ssz::Encode as _; +use tracing::{debug, error, info, warn}; + +use crate::{ + HelixSpine, + simulator::{ + BlockMergeRequest, SimInboundPayload, SimOutboundPayload, client::SimulatorClient, + }, + spine::{ + HelixSpineProducers, + messages::{FromSimMsg, ToSimKind, ToSimMsg}, + }, +}; + +pub struct SimulatorTile { + simulators: Vec, + /// Indices of simulators with an SSZ endpoint — static after construction. + ssz_sim_indices: Vec, + requests: PendingRequests, + priority_requests: PendingRequests, + last_bid_slot: u64, + local_telemetry: LocalTelemetry, + /// Internal channel: async tasks notify the sim tile when work completes. + task_tx: crossbeam_channel::Sender, + rx: crossbeam_channel::Receiver, + sim_inbound: Arc>, + sim_outbound: Arc>, + /// If we have any synced simulator + pub accept_optimistic: Arc, + /// If we failed to demote a builder in the DB + pub failsafe_triggered: Arc, +} + +impl Tile for SimulatorTile { + fn loop_body(&mut self, adapter: &mut flux::spine::SpineAdapter) { + // Process internal task-completion events (async tasks → sim tile). + // Collect first to release the borrow on self.rx before calling &mut self methods. + let events: Vec = self.rx.try_iter().collect(); + for event in events { + match event { + SimTileInternalEvent::TaskDone { id, paused_until, result_ix } => { + self.handle_task_response(id, paused_until, result_ix, &mut adapter.producers); + } + SimTileInternalEvent::SyncStatus { id, is_synced } => { + self.handle_sync_status(id, is_synced); + } + } + } + + // Consume inbound spine messages from the auctioneer. + adapter.consume(|msg: ToSimMsg, _producers| match msg.kind { + ToSimKind::Request => match self.sim_inbound.get(msg.ix) { + Some(payload) => match payload.as_ref() { + SimInboundPayload::SimRequest { req, fast_track } => { + self.handle_sim_request((**req).clone(), *fast_track); + } + SimInboundPayload::MergeRequest(req) => { + self.handle_merge_request(req.clone()); + } + }, + None => error!(?msg, "sim inbound payload not found"), + }, + ToSimKind::NewSlot => { + self.on_new_slot(msg.bid_slot); + } + }); + } + + fn name(&self) -> TileName { + TileName::from_str_truncate("simulator") + } +} + +impl SimulatorTile { + pub fn create( + configs: Vec, + sim_inbound: Arc>, + sim_outbound: Arc>, + ) -> (Arc, Arc, Self) { + let (task_tx, rx) = crossbeam_channel::bounded(512); + + let client = + reqwest::ClientBuilder::new().timeout(SIMULATOR_REQUEST_TIMEOUT).build().unwrap(); + + let simulators: Vec<_> = configs + .into_iter() + .map(|config| SimEntry::new(SimulatorClient::new(client.clone(), config))) + .collect(); + + let requests = PendingRequests::with_capacity(200); + let priority_requests = PendingRequests::with_capacity(30); + + if !is_local_dev() { + let clients: Vec = + simulators.iter().map(|e| e.client.clone()).collect(); + spawn_tracked!({ + let sync_tx = task_tx.clone(); + async move { + loop { + for (id, simulator) in clients.iter().enumerate() { + let is_synced = simulator.is_synced().await.unwrap_or(false); + if sync_tx + .try_send(SimTileInternalEvent::SyncStatus { id, is_synced }) + .is_err() + { + error!("failed to send sync status to sim tile"); + } + SimulatorMetrics::simulator_sync(simulator.endpoint(), is_synced); + } + + tokio::time::sleep(Duration::from_secs(1)).await; + } + } + }); + } + + let accept_optimistic = Arc::new(AtomicBool::new(true)); + let failsafe_triggered = Arc::new(AtomicBool::new(false)); + + let ssz_sim_indices: Vec = simulators + .iter() + .enumerate() + .filter(|(_, s)| s.client.ssz_url.is_some()) + .map(|(i, _)| i) + .collect(); + + let tile = Self { + simulators, + ssz_sim_indices, + requests, + priority_requests, + last_bid_slot: 0, + local_telemetry: LocalTelemetry::default(), + task_tx, + rx, + sim_inbound, + sim_outbound, + accept_optimistic: accept_optimistic.clone(), + failsafe_triggered: failsafe_triggered.clone(), + }; + + (accept_optimistic, failsafe_triggered, tile) + } + + fn handle_sync_status(&mut self, id: usize, is_synced: bool) { + self.simulators[id].is_synced = is_synced; + let new = self.simulators.iter().any(|s| s.can_simulate_light()); + let prev = self.accept_optimistic.load(Ordering::Relaxed); + if new != prev { + warn!(prev, new, "changing accept_optimistic simulation status"); + } + self.accept_optimistic.store(new, Ordering::Relaxed); + } + + fn handle_sim_request(&mut self, req: crate::simulator::SimulatorRequest, fast_track: bool) { + assert_eq!(req.bid_slot, self.last_bid_slot); + + self.local_telemetry.sims_reqs += 1; + + let sim_id = self.select_simulator(&req.builder_pubkey); + + if let Some(id) = sim_id { + self.local_telemetry.sims_sent_immediately += 1; + self.spawn_sim(id, req) + } else if fast_track { + self.priority_requests.store(req, &mut self.local_telemetry) + } else { + self.requests.store(req, &mut self.local_telemetry) + } + } + + fn handle_merge_request(&mut self, req: BlockMergeRequest) { + self.local_telemetry.merge_reqs += 1; + if let Some(id) = self.next_client(|s| s.can_merge()) { + let sim = &mut self.simulators[id]; + let to_send = sim.client.merge_request_builder(); + sim.pending += 1; + + self.local_telemetry.max_in_flight = + self.local_telemetry.max_in_flight.max(sim.pending); + let timer = SimulatorMetrics::block_merge_timer(sim.client.endpoint()); + let task_tx = self.task_tx.clone(); + let sim_outbound = self.sim_outbound.clone(); + spawn_tracked!(async move { + debug!(bid_slot = %req.bid_slot, block_hash = %req.block_hash, "sending merge request"); + let res = SimulatorClient::do_merge_request(&req, to_send).await; + if res.is_ok() { + timer.stop_and_record(); + } else { + timer.stop_and_discard(); + } + SimulatorMetrics::block_merge_status(res.is_ok()); + + let result_ix = sim_outbound.push(SimOutboundPayload::MergeResult((id, res))); + let _ = task_tx.try_send(SimTileInternalEvent::TaskDone { + id, + paused_until: None, + result_ix, + }); + }); + } else { + self.local_telemetry.dropped_merge_reqs += 1; + warn!("no client available for merging! Dropping request"); + } + } + + fn handle_task_response( + &mut self, + id: usize, + paused_until: Option, + result_ix: usize, + producers: &mut HelixSpineProducers, + ) { + let sim = &mut self.simulators[id]; + sim.pending = sim.pending.saturating_sub(1); + sim.paused_until = sim.paused_until.max(paused_until); // keep highest pause + + producers.produce(FromSimMsg { ix: result_ix }); + + if let Some(id) = self.next_client(|s| s.can_simulate()) && + let Some(req) = self.priority_requests.next_req().or(self.requests.next_req()) + { + self.spawn_sim(id, req); + } + } + + fn spawn_sim(&mut self, id: usize, req: crate::simulator::SimulatorRequest) { + const PAUSE_DURATION: Duration = Duration::from_secs(60); + + let sim = &mut self.simulators[id]; + let dispatch = if let Some(url) = &sim.client.ssz_url { + SimDispatch::Ssz { + to_send: sim.client.client.post(format!("{url}/validate")), + ssz_url: url.clone(), + http: sim.client.client.clone(), + } + } else { + let (builder, method) = sim.client.sim_request_builder(req.submission.fork_name()); + SimDispatch::Json { to_send: builder, method: method.to_owned() } + }; + sim.pending += 1; + + self.local_telemetry.max_in_flight = self.local_telemetry.max_in_flight.max(sim.pending); + let timer = SimulatorMetrics::timer(sim.client.endpoint()); + let task_tx = self.task_tx.clone(); + let sim_outbound = self.sim_outbound.clone(); + spawn_tracked!(async move { + let start_sim = Instant::now(); + let block_hash = req.submission.block_hash(); + debug!(%block_hash, "sending simulation request"); + + let optimistic_version = req.optimistic_version(); + SimulatorMetrics::sim_count(optimistic_version.is_optimistic()); + let (mut res, ssz_retry) = match dispatch { + SimDispatch::Ssz { to_send, ssz_url, http } => { + let res = + SimulatorClient::do_sim_request(&req.request, req.is_top_bid, to_send) + .await; + (res, Some((ssz_url, http))) + } + SimDispatch::Json { to_send, method } => { + let filtering = if req.request.apply_blacklist { + Filtering::Regional + } else { + Filtering::Global + }; + let json_req = BlockSimRequest::new( + req.request.registered_gas_limit, + &req.submission, + ValidatorPreferences { filtering, ..Default::default() }, + Some(req.request.parent_beacon_block_root), + Some(req.request.inclusion_list.clone()), + ); + let res = SimulatorClient::do_json_sim_request( + &json_req, + req.is_top_bid, + &method, + to_send, + ) + .await; + (res, None) + } + }; + + // On cache miss, retry with full uncompressed SSZ so the simulator + // can process the submission without a hydration cache entry. + if matches!(res, Err(BlockSimError::HydrationMiss)) { + debug!(%block_hash, "hydration miss — retrying with full SSZ"); + if let Some((ssz_url, http)) = ssz_retry { + let to_send = http.post(format!("{ssz_url}/validate")); + let mut retry_req = req.request.clone(); + retry_req.signed_bid_submission = req.submission.as_ssz_bytes().into(); + retry_req.format = SubmissionFormat::FullSsz; + res = + SimulatorClient::do_sim_request(&retry_req, req.is_top_bid, to_send).await; + } else { + res = Err(BlockSimError::RpcError); + } + } + + let time = timer.stop_and_record(); + + debug!(%block_hash, time_secs = time, ?res, "simulation completed"); + + let paused_until = if let Err(err) = res.as_ref() { + SimulatorMetrics::sim_status(false); + if err.is_temporary() { Some(Instant::now() + PAUSE_DURATION) } else { None } + } else { + SimulatorMetrics::sim_status(true); + None + }; + + if let Some(got) = req.tx_root { + let expected = req.submission.transactions_root(); + + if expected != got { + res = Err(BlockSimError::InvalidTxRoot { got, expected }) + } + } + + record_submission_step("simulation", start_sim.elapsed()); + + let inner = SimulationResultInner { + submission_ref: req.submission_ref, + result: res, + submission: req.submission, + trace: req.trace, + optimistic_version, + version: req.version, + }; + + let result_ix = sim_outbound.push(SimOutboundPayload::SimResult((id, Some(inner)))); + let _ = + task_tx.try_send(SimTileInternalEvent::TaskDone { id, paused_until, result_ix }); + }); + } + + /// Selection priority: + /// 1. Sticky sim with SSZ endpoint (state locality + binary protocol) + /// 2. Any SSZ-capable sim, least pending (binary protocol) + /// 3. Any sim, least pending (JSON-RPC fallback; stickiness irrelevant without SSZ) + fn select_simulator(&self, builder_pubkey: &BlsPublicKeyBytes) -> Option { + if !self.ssz_sim_indices.is_empty() { + let sticky = + self.ssz_sim_indices[sticky_sim_index(self.ssz_sim_indices.len(), builder_pubkey)]; + if self.simulators[sticky].can_simulate() { + return Some(sticky); + } + if let Some(id) = self + .ssz_sim_indices + .iter() + .filter(|&&i| self.simulators[i].can_simulate()) + .min_by_key(|&&i| self.simulators[i].pending) + .copied() + { + return Some(id); + } + } + self.next_client(|s| s.can_simulate()) + } + + fn next_client(&self, pred: impl Fn(&SimEntry) -> bool) -> Option { + self.simulators + .iter() + .enumerate() + .filter(|(_, s)| pred(s)) + .min_by_key(|(_, s)| s.pending) + .map(|(i, _)| i) + } + + fn on_new_slot(&mut self, bid_slot: u64) { + if self.last_bid_slot > 0 { + self.report(); + } + + self.last_bid_slot = bid_slot; + self.requests.clear(bid_slot); + self.priority_requests.clear(bid_slot); + let now = Instant::now(); + for s in self.simulators.iter_mut() { + if s.paused_until.is_some_and(|until| until < now) { + s.paused_until = None; + } + } + } + + fn report(&mut self) { + let tel = std::mem::take(&mut self.local_telemetry); + + SimulatorMetrics::sim_mananger_count("sims_sent_immediately", tel.sims_sent_immediately); + SimulatorMetrics::sim_mananger_count("sims_reqs_dropped", tel.sims_reqs_dropped); + SimulatorMetrics::sim_mananger_count("stale_sim_reqs", tel.stale_sim_reqs); + SimulatorMetrics::sim_manager_gauge("max_pending", tel.max_pending); + SimulatorMetrics::sim_manager_gauge("max_in_flight", tel.max_in_flight); + SimulatorMetrics::sim_mananger_count("merge_reqs", tel.merge_reqs); + SimulatorMetrics::sim_mananger_count("dropped_merge_reqs", tel.dropped_merge_reqs); + + info!( + bid_slot = self.last_bid_slot, + sims_reqs = tel.sims_reqs, + sims_sent_immediately = tel.sims_sent_immediately, + sims_reqs_dropped = tel.sims_reqs_dropped, + stale_sim_reqs = tel.stale_sim_reqs, + max_pending = tel.max_pending, + max_in_flight = tel.max_in_flight, + merge_reqs = tel.merge_reqs, + dropped_merge_reqs = tel.dropped_merge_reqs, + "sim manager telemetry" + ) + } +} + +struct SimEntry { + client: SimulatorClient, + is_synced: bool, + /// For certain errors we pause sims for some time to allow time for the node to recover + paused_until: Option, + /// Current number of pending tasks (validation or merging) + pending: usize, +} + +impl SimEntry { + fn new(client: SimulatorClient) -> Self { + Self { client, is_synced: false, paused_until: None, pending: 0 } + } + + /// A lighter check to decide whether we should accept optimistic submissions + fn can_simulate_light(&self) -> bool { + self.is_synced && + match self.paused_until { + Some(until) => Instant::now() > until, + None => true, + } + } + + fn can_simulate(&self) -> bool { + self.can_simulate_light() && self.pending < self.client.config.max_concurrent_tasks + } + + fn can_merge(&self) -> bool { + self.can_simulate() && self.client.config.is_merging_simulator + } +} + +pub(crate) const SIMULATOR_REQUEST_TIMEOUT: Duration = Duration::from_secs(20); + +#[derive(Default)] +struct LocalTelemetry { + sims_reqs: usize, + sims_sent_immediately: usize, + sims_reqs_dropped: usize, + stale_sim_reqs: usize, + // waiting to be sent + max_pending: usize, + // waiting for result + max_in_flight: usize, + merge_reqs: usize, + dropped_merge_reqs: usize, +} + +// Sim id / Simulation Result, so we can use this for merging requests +pub type SimulationResult = (usize, Option); +#[derive(Clone)] +pub struct SimulationResultInner { + pub result: Result<(), BlockSimError>, + pub submission_ref: crate::auctioneer::SubmissionRef, + pub submission: SignedBidSubmission, + pub trace: SubmissionTrace, + pub optimistic_version: OptimisticVersion, + pub version: SubmissionVersion, +} + +enum SimDispatch { + Ssz { to_send: reqwest::RequestBuilder, ssz_url: String, http: reqwest::Client }, + Json { to_send: reqwest::RequestBuilder, method: String }, +} + +/// Internal-only events: async task → sim tile (not tile-to-tile). +pub(super) enum SimTileInternalEvent { + TaskDone { id: usize, paused_until: Option, result_ix: usize }, + SyncStatus { id: usize, is_synced: bool }, +} + +/// Jump consistent hash — maps a builder pubkey to a simulator index with +/// minimal reassignment when the set size changes. +fn sticky_sim_index(num_simulators: usize, builder_pubkey: &BlsPublicKeyBytes) -> usize { + if num_simulators <= 1 { + return 0; + } + let key = u64::from_le_bytes(builder_pubkey.0[..8].try_into().unwrap()); + jump_hash(key, num_simulators) +} + +/// Stateless consistent hash — minimises slot reassignment as `n` changes. +/// +fn jump_hash(mut key: u64, n: usize) -> usize { + let mut b: i64 = -1; + let mut j: i64 = 0; + while j < n as i64 { + b = j; + key = key.wrapping_mul(2862933555777941757).wrapping_add(1); + j = ((b + 1) as f64 * ((1u64 << 31) as f64) / ((key >> 33) + 1) as f64) as i64; + } + b as usize +} + +/// Pending requests, we only keep the last one for each builder. +struct PendingRequests { + reqs: Vec, +} + +impl PendingRequests { + fn with_capacity(capacity: usize) -> Self { + Self { reqs: Vec::with_capacity(capacity) } + } + + fn store( + &mut self, + req: crate::simulator::SimulatorRequest, + local_telemetry: &mut LocalTelemetry, + ) { + if let Some(i) = self.reqs.iter().position(|r| r.builder_pubkey == req.builder_pubkey) { + if req.on_receive_ns() > self.reqs[i].on_receive_ns() { + self.reqs[i] = req; + } + local_telemetry.sims_reqs_dropped += 1; + } else { + self.reqs.push(req); + } + local_telemetry.max_pending = local_telemetry.max_pending.max(self.reqs.len()); + } + + fn next_req(&mut self) -> Option { + let i = self.reqs.iter().enumerate().max_by_key(|(_, r)| r.sort_key()).map(|(i, _)| i)?; + Some(self.reqs.swap_remove(i)) + } + + /// Clear backlog of simulations from the previous bid slot. + fn clear(&mut self, bid_slot: u64) { + self.reqs.retain(|r| r.bid_slot >= bid_slot); + } +} diff --git a/crates/relay/src/spine/messages.rs b/crates/relay/src/spine/messages.rs index c9808f250..35110bc12 100644 --- a/crates/relay/src/spine/messages.rs +++ b/crates/relay/src/spine/messages.rs @@ -62,3 +62,26 @@ impl SubmissionResultWithRef { pub struct DecodedSubmission { pub ix: usize, } + +/// Auctioneer → SimulatorTile: spine signal for a new sim/merge request or slot transition. +#[derive(Debug, Clone, Copy)] +pub struct ToSimMsg { + pub kind: ToSimKind, + /// Index into `SharedVector` (unused for `NewSlot`). + pub ix: usize, + /// Slot number; only meaningful for `NewSlot`. + pub bid_slot: u64, +} + +#[derive(Debug, Clone, Copy, PartialEq, Eq)] +pub enum ToSimKind { + /// SimRequest or MergeRequest stored at `ix`. + Request, + NewSlot, +} + +/// SimulatorTile → Auctioneer: index into `SharedVector`. +#[derive(Debug, Clone, Copy)] +pub struct FromSimMsg { + pub ix: usize, +} diff --git a/crates/relay/src/spine/mod.rs b/crates/relay/src/spine/mod.rs index 7a0281776..4ad5f6cff 100644 --- a/crates/relay/src/spine/mod.rs +++ b/crates/relay/src/spine/mod.rs @@ -15,4 +15,12 @@ pub struct HelixSpine { #[queue(size(2usize.pow(16)))] pub decoded: SpineQueue, + + /// Auctioneer → SimulatorTile. + #[queue(size(2usize.pow(16)))] + pub to_sim: SpineQueue, + + /// SimulatorTile → Auctioneer. + #[queue(size(2usize.pow(16)))] + pub from_sim: SpineQueue, } diff --git a/crates/simulator/Cargo.toml b/crates/simulator/Cargo.toml index f5953e9ba..7f70731e6 100644 --- a/crates/simulator/Cargo.toml +++ b/crates/simulator/Cargo.toml @@ -16,6 +16,8 @@ alloy-signer.workspace = true alloy-signer-local.workspace = true alloy-sol-types.workspace = true async-trait.workspace = true +axum.workspace = true +ethereum_ssz.workspace = true bytes.workspace = true clap.workspace = true dashmap.workspace = true diff --git a/crates/simulator/src/main.rs b/crates/simulator/src/main.rs index 15e28e10d..aaff51317 100644 --- a/crates/simulator/src/main.rs +++ b/crates/simulator/src/main.rs @@ -1,6 +1,7 @@ mod block_merging; mod common; mod inclusion; +mod ssz_server; mod state_recorder; mod validation; @@ -65,6 +66,11 @@ fn main() { ctx.modules.merge_configured(block_merging_api.into_rpc())?; } + if let Some(port) = args.sim_ssz_port { + let api = validation_api.clone(); + tokio::spawn(crate::ssz_server::run(api, port)); + } + ctx.modules.merge_configured(validation_api.into_rpc())?; if args.enable_inclusion_ext { @@ -131,6 +137,10 @@ struct CliExt { #[arg(long)] pub validate_merged_blocks: bool, + + /// If set, start an SSZ binary validation endpoint on this port + #[arg(long)] + pub sim_ssz_port: Option, } impl From for BlockMergingConfig { diff --git a/crates/simulator/src/ssz_server.rs b/crates/simulator/src/ssz_server.rs new file mode 100644 index 000000000..41f2dd421 --- /dev/null +++ b/crates/simulator/src/ssz_server.rs @@ -0,0 +1,63 @@ +use alloy_rpc_types::beacon::relay::{BuilderBlockValidationRequestV5, SignedBidSubmissionV5}; +use axum::{Router, extract::State, http::StatusCode, response::IntoResponse, routing::post}; +use helix_common::simulator::{SimRequest, SubmissionFormat}; +use ssz::Decode; +use tokio::net::TcpListener; +use tracing::error; + +use crate::validation::{ + BlockSubmissionValidationApiServer, ExtendedValidationRequestV5, ValidationApi, +}; + +pub async fn run(api: ValidationApi, port: u16) { + let router = Router::new().route("/validate", post(handler)).with_state(api); + let listener = match TcpListener::bind(("0.0.0.0", port)).await { + Ok(l) => l, + Err(e) => { + error!(%e, port, "failed to bind SSZ sim server"); + return; + } + }; + if let Err(e) = axum::serve(listener, router).await { + error!(%e, "SSZ sim server exited"); + } +} + +async fn handler(State(api): State, body: axum::body::Bytes) -> impl IntoResponse { + let req = match SimRequest::from_ssz_bytes(&body) { + Ok(r) => r, + Err(e) => return (StatusCode::BAD_REQUEST, format!("ssz decode: {e:?}")).into_response(), + }; + + let signed_bid_submission = match req.format { + SubmissionFormat::FullSsz => { + match SignedBidSubmissionV5::from_ssz_bytes(&req.signed_bid_submission) { + Ok(s) => s, + Err(e) => { + return (StatusCode::BAD_REQUEST, format!("signed bid submission decode: {e:?}")) + .into_response() + } + } + } + SubmissionFormat::DehydratedSsz => { + // Simulator-side hydration cache not yet implemented. + // Return 424 so the relay retries with full SSZ bytes. + return StatusCode::FAILED_DEPENDENCY.into_response(); + } + }; + + let ext = ExtendedValidationRequestV5 { + base: BuilderBlockValidationRequestV5 { + request: signed_bid_submission, + registered_gas_limit: req.registered_gas_limit, + parent_beacon_block_root: req.parent_beacon_block_root, + }, + inclusion_list: Some(req.inclusion_list), + apply_blacklist: req.apply_blacklist, + }; + + match api.validate_builder_submission_v5(ext).await { + Ok(()) => StatusCode::OK.into_response(), + Err(e) => (StatusCode::BAD_REQUEST, e.message().to_string()).into_response(), + } +} diff --git a/crates/simulator/src/validation/mod.rs b/crates/simulator/src/validation/mod.rs index 476dcbf37..83dd434fa 100644 --- a/crates/simulator/src/validation/mod.rs +++ b/crates/simulator/src/validation/mod.rs @@ -18,6 +18,7 @@ use alloy_rpc_types::{ }; use async_trait::async_trait; use dashmap::DashSet; +use helix_common::api::builder_api::InclusionListWithMetadata; use jsonrpsee::{core::RpcResult, proc_macros::rpc, types::ErrorObject}; use reth_ethereum::{ Block, EthPrimitives, Receipt, TransactionSigned, @@ -51,7 +52,6 @@ use tracing::{info, warn}; use crate::{ common::{RethConsensus, RethPayloadValidator, RethProvider}, - inclusion::types::InclusionList, validation::error::{GetParentError, ValidationApiError}, }; @@ -147,7 +147,7 @@ impl ValidationApi { message: BidTrace, _registered_gas_limit: u64, apply_blacklist: bool, - inclusion_list: Option, + inclusion_list: Option, ) -> Result<(), ValidationApiError> { self.validate_message_against_header(block.sealed_header(), &message)?; @@ -236,7 +236,7 @@ impl ValidationApi { &self, block: &RecoveredBlock, post_state: State, - inclusion_list: &InclusionList, + inclusion_list: &InclusionListWithMetadata, ) -> Result<(), ValidationApiError> where DB: Database + Debug, @@ -250,9 +250,7 @@ impl ValidationApi { // collect which inclusion‐list hashes appeared in the block let mut included_hashes = HashSet::new(); for tx in block.body().transactions() { - if let Some(req) = - inclusion_list.txs.iter().find(|t| t.hash.as_slice() == tx.tx_hash().as_slice()) - { + if let Some(req) = inclusion_list.txs.iter().find(|t| t.hash == *tx.tx_hash()) { included_hashes.insert(req.hash); } } @@ -698,7 +696,7 @@ pub struct ExtendedValidationRequestV4 { #[serde(flatten)] pub base: BuilderBlockValidationRequestV4, - pub inclusion_list: Option, + pub inclusion_list: Option, #[serde(default)] pub apply_blacklist: bool, @@ -710,7 +708,7 @@ pub struct ExtendedValidationRequestV5 { #[serde(flatten)] pub base: BuilderBlockValidationRequestV5, - pub inclusion_list: Option, + pub inclusion_list: Option, #[serde(default)] pub apply_blacklist: bool, From 5a9317d8fdf6dc54287b105eafdbd35c8b605f00 Mon Sep 17 00:00:00 2001 From: owen Date: Wed, 11 Mar 2026 18:12:49 +0000 Subject: [PATCH 2/3] fix names --- crates/common/src/config.rs | 50 +++++++------- crates/common/src/local_cache.rs | 38 +++++------ crates/common/src/simulator.rs | 6 +- crates/common/src/task.rs | 4 +- crates/data-api/src/api.rs | 16 ++--- crates/data-api/src/error.rs | 14 ++-- crates/data-api/src/stats.rs | 12 ++-- crates/database/src/lib.rs | 8 +-- crates/relay/src/api/builder/error.rs | 52 +++++++------- crates/relay/src/api/builder/submit_block.rs | 4 +- crates/relay/src/api/middleware/tracking.rs | 4 +- crates/relay/src/api/proposer/error.rs | 68 +++++++++---------- crates/relay/src/api/proposer/get_payload.rs | 4 +- crates/relay/src/auctioneer/bid_adjustor.rs | 6 +- crates/relay/src/auctioneer/bid_sorter.rs | 4 +- crates/relay/src/auctioneer/block_merger.rs | 31 +++++---- crates/relay/src/auctioneer/context.rs | 8 +-- crates/relay/src/auctioneer/get_payload.rs | 14 ++-- crates/relay/src/auctioneer/handle.rs | 2 +- crates/relay/src/auctioneer/mod.rs | 38 +++++------ crates/relay/src/auctioneer/submit_block.rs | 39 ++++++----- crates/relay/src/auctioneer/types.rs | 12 ++-- crates/relay/src/auctioneer/worker.rs | 6 +- crates/relay/src/bid_decoder/decoder.rs | 4 +- crates/relay/src/bid_decoder/tile.rs | 12 ++-- .../src/housekeeper/chain_event_updater.rs | 6 +- crates/relay/src/housekeeper/housekeeper.rs | 12 ++-- .../relay/src/housekeeper/primev_service.rs | 10 +-- crates/relay/src/lib.rs | 6 +- crates/relay/src/main.rs | 17 ++--- crates/relay/src/network/messages.rs | 7 +- crates/relay/src/simulator/client.rs | 27 ++++---- crates/relay/src/simulator/mod.rs | 24 +++---- crates/relay/src/simulator/tile.rs | 58 ++++++++-------- crates/simulator/src/block_merging/error.rs | 38 +++++------ crates/simulator/src/block_merging/mod.rs | 4 +- .../src/inclusion/inclusion_producer.rs | 8 +-- crates/simulator/src/inclusion/mod.rs | 1 - crates/simulator/src/inclusion/types.rs | 24 ------- crates/simulator/src/ssz_server.rs | 11 +-- crates/simulator/src/validation/error.rs | 24 +++---- crates/simulator/src/validation/mod.rs | 12 ++-- crates/tcp-types/src/lib.rs | 14 ++-- crates/types/src/blobs.rs | 8 +-- crates/types/src/block_merging.rs | 4 +- crates/types/src/execution_payload.rs | 14 ++-- crates/types/src/hydration.rs | 28 ++++---- 47 files changed, 401 insertions(+), 412 deletions(-) delete mode 100644 crates/simulator/src/inclusion/types.rs diff --git a/crates/common/src/config.rs b/crates/common/src/config.rs index 8b043ec58..e72b42131 100644 --- a/crates/common/src/config.rs +++ b/crates/common/src/config.rs @@ -415,29 +415,33 @@ impl RouterConfig { } // Replace BuilderApi, ProposerApi, DataApi with their real routes - self.replace_condensed_with_real(Route::BuilderApi, &[ - Route::GetValidators, - Route::SubmitBlock, - Route::GetTopBid, - Route::GetInclusionList, - ]); - - self.replace_condensed_with_real(Route::ProposerApi, &[ - Route::Status, - Route::RegisterValidators, - Route::GetHeader, - Route::GetPayload, - Route::GetPayloadV2, - ]); - - self.replace_condensed_with_real(Route::DataApi, &[ - Route::ProposerPayloadDelivered, - Route::ProposerHeaderDelivered, - Route::BuilderBidsReceived, - Route::ValidatorRegistration, - Route::DataAdjustments, - Route::MergedBlocks, - ]); + self.replace_condensed_with_real( + Route::BuilderApi, + &[Route::GetValidators, Route::SubmitBlock, Route::GetTopBid, Route::GetInclusionList], + ); + + self.replace_condensed_with_real( + Route::ProposerApi, + &[ + Route::Status, + Route::RegisterValidators, + Route::GetHeader, + Route::GetPayload, + Route::GetPayloadV2, + ], + ); + + self.replace_condensed_with_real( + Route::DataApi, + &[ + Route::ProposerPayloadDelivered, + Route::ProposerHeaderDelivered, + Route::BuilderBidsReceived, + Route::ValidatorRegistration, + Route::DataAdjustments, + Route::MergedBlocks, + ], + ); } pub fn enable_relay_network(&mut self) { diff --git a/crates/common/src/local_cache.rs b/crates/common/src/local_cache.rs index 3c6fb60dd..13c0f8196 100644 --- a/crates/common/src/local_cache.rs +++ b/crates/common/src/local_cache.rs @@ -69,17 +69,17 @@ pub enum AuctioneerError { impl IntoResponse for AuctioneerError { fn into_response(self) -> Response { let code = match self { - AuctioneerError::UnexpectedValueType | - AuctioneerError::CryptoError(_) | - AuctioneerError::FromUtf8Error(_) | - AuctioneerError::ParseIntError(_) | - AuctioneerError::FromHexError(_) | - AuctioneerError::PastSlotAlreadyDelivered | - AuctioneerError::AnotherPayloadAlreadyDeliveredForSlot | - AuctioneerError::SszDeserializeError(_) | - AuctioneerError::SliceConversionError(_) | - AuctioneerError::ExecutionPayloadNotFound | - AuctioneerError::BuilderNotFound { .. } => StatusCode::BAD_REQUEST, + AuctioneerError::UnexpectedValueType + | AuctioneerError::CryptoError(_) + | AuctioneerError::FromUtf8Error(_) + | AuctioneerError::ParseIntError(_) + | AuctioneerError::FromHexError(_) + | AuctioneerError::PastSlotAlreadyDelivered + | AuctioneerError::AnotherPayloadAlreadyDeliveredForSlot + | AuctioneerError::SszDeserializeError(_) + | AuctioneerError::SliceConversionError(_) + | AuctioneerError::ExecutionPayloadNotFound + | AuctioneerError::BuilderNotFound { .. } => StatusCode::BAD_REQUEST, }; (code, self.to_string()).into_response() @@ -250,16 +250,16 @@ impl LocalCache { registration: &SignedValidatorRegistration, ) -> bool { if let Some(existing_entry) = - self.validator_registration_cache.get(®istration.message.pubkey) && - existing_entry.registration_info.registration.message.timestamp >= - registration + self.validator_registration_cache.get(®istration.message.pubkey) + && existing_entry.registration_info.registration.message.timestamp + >= registration .message .timestamp - .saturating_sub(VALIDATOR_REGISTRATION_UPDATE_INTERVAL) && - existing_entry.registration_info.registration.message.fee_recipient == - registration.message.fee_recipient && - existing_entry.registration_info.registration.message.gas_limit == - registration.message.gas_limit + .saturating_sub(VALIDATOR_REGISTRATION_UPDATE_INTERVAL) + && existing_entry.registration_info.registration.message.fee_recipient + == registration.message.fee_recipient + && existing_entry.registration_info.registration.message.gas_limit + == registration.message.gas_limit { return false; } diff --git a/crates/common/src/simulator.rs b/crates/common/src/simulator.rs index b61abb952..c35ac64ab 100644 --- a/crates/common/src/simulator.rs +++ b/crates/common/src/simulator.rs @@ -131,7 +131,7 @@ impl BlockSimError { } #[derive(Debug, Clone, Encode, Decode)] -pub struct SimRequest { +pub struct SszValidationRequest { pub apply_blacklist: bool, pub registered_gas_limit: u64, pub parent_beacon_block_root: B256, @@ -142,7 +142,7 @@ pub struct SimRequest { // TODO: refactor this in a SignedBidSubmission + extra fields #[derive(Debug, Clone, serde::Serialize, serde::Deserialize)] -pub struct BlockSimRequest { +pub struct JsonValidationRequest { #[serde(with = "serde_utils::quoted_u64")] pub registered_gas_limit: u64, pub message: BidTrace, @@ -156,7 +156,7 @@ pub struct BlockSimRequest { pub apply_blacklist: bool, } -impl BlockSimRequest { +impl JsonValidationRequest { pub fn new( registered_gas_limit: u64, block: &SignedBidSubmission, diff --git a/crates/common/src/task.rs b/crates/common/src/task.rs index 5e48b3341..76febb4ad 100644 --- a/crates/common/src/task.rs +++ b/crates/common/src/task.rs @@ -117,8 +117,8 @@ impl Cores { } fn remove(&mut self, thread: ThreadId) { - if let Some(core) = self.by_id.remove(&thread) && - let Some(count) = self.counts.get_mut(&core) + if let Some(core) = self.by_id.remove(&thread) + && let Some(count) = self.counts.get_mut(&core) { *count -= 1 } diff --git a/crates/data-api/src/api.rs b/crates/data-api/src/api.rs index 2b78c9e33..d89930e5e 100644 --- a/crates/data-api/src/api.rs +++ b/crates/data-api/src/api.rs @@ -183,10 +183,10 @@ impl DataApi { Extension(cache): Extension, Query(mut params): Query, ) -> Result { - if params.slot.is_none() && - params.block_hash.is_none() && - params.block_number.is_none() && - params.builder_pubkey.is_none() + if params.slot.is_none() + && params.block_hash.is_none() + && params.block_number.is_none() + && params.builder_pubkey.is_none() { return Err(DataApiError::MissingFilter); } @@ -234,10 +234,10 @@ impl DataApi { Extension(cache): Extension, Query(mut params): Query, ) -> Result { - if params.slot.is_none() && - params.block_hash.is_none() && - params.block_number.is_none() && - params.builder_pubkey.is_none() + if params.slot.is_none() + && params.block_hash.is_none() + && params.block_number.is_none() + && params.builder_pubkey.is_none() { return Err(DataApiError::MissingFilter); } diff --git a/crates/data-api/src/error.rs b/crates/data-api/src/error.rs index ee9832440..d7e8ae357 100644 --- a/crates/data-api/src/error.rs +++ b/crates/data-api/src/error.rs @@ -27,13 +27,13 @@ pub enum DataApiError { impl IntoResponse for DataApiError { fn into_response(self) -> Response { let code = match self { - DataApiError::SlotAndCursor | - DataApiError::MissingFilter | - DataApiError::LimitReached { .. } | - DataApiError::ValidatorRegistrationNotFound { .. } | - DataApiError::BlockNumberNotSupported | - DataApiError::OrderByNotSupported | - DataApiError::BuilderPubkeyNotSupported => StatusCode::BAD_REQUEST, + DataApiError::SlotAndCursor + | DataApiError::MissingFilter + | DataApiError::LimitReached { .. } + | DataApiError::ValidatorRegistrationNotFound { .. } + | DataApiError::BlockNumberNotSupported + | DataApiError::OrderByNotSupported + | DataApiError::BuilderPubkeyNotSupported => StatusCode::BAD_REQUEST, DataApiError::InternalServerError => StatusCode::INTERNAL_SERVER_ERROR, }; diff --git a/crates/data-api/src/stats.rs b/crates/data-api/src/stats.rs index 4ec8ad679..5d096383c 100644 --- a/crates/data-api/src/stats.rs +++ b/crates/data-api/src/stats.rs @@ -155,12 +155,12 @@ impl Expiry> for _val: &Vec, _now: Instant, ) -> Option { - if key.slot.is_none() && - key.cursor.is_none() && - key.block_hash.is_none() && - key.block_number.is_none() && - key.proposer_pubkey.is_none() && - key.builder_pubkey.is_none() + if key.slot.is_none() + && key.cursor.is_none() + && key.block_hash.is_none() + && key.block_number.is_none() + && key.proposer_pubkey.is_none() + && key.builder_pubkey.is_none() { Some(Duration::from_secs(12)) } else { diff --git a/crates/database/src/lib.rs b/crates/database/src/lib.rs index 0cd1b58db..66c5363ee 100644 --- a/crates/database/src/lib.rs +++ b/crates/database/src/lib.rs @@ -99,8 +99,8 @@ async fn load_known_validators_with_snapshot( cache: &local_cache::LocalCache, snapshot_dir: Option<&std::path::Path>, ) { - if let Some(dir) = snapshot_dir && - let Some(set) = snapshot::try_load_known_validators(dir).await + if let Some(dir) = snapshot_dir + && let Some(set) = snapshot::try_load_known_validators(dir).await { info!(count = set.len(), "using known_validators snapshot"); *cache.known_validators_cache.write() = set; @@ -120,8 +120,8 @@ async fn load_validator_registrations_with_snapshot( cache: &local_cache::LocalCache, snapshot_dir: Option<&std::path::Path>, ) { - if let Some(dir) = snapshot_dir && - let Some(entries) = snapshot::try_load_validator_registrations(dir).await + if let Some(dir) = snapshot_dir + && let Some(entries) = snapshot::try_load_validator_registrations(dir).await { let count = entries.len(); info!(count, "using validator_registrations snapshot"); diff --git a/crates/relay/src/api/builder/error.rs b/crates/relay/src/api/builder/error.rs index 27284deb5..af5ebc870 100644 --- a/crates/relay/src/api/builder/error.rs +++ b/crates/relay/src/api/builder/error.rs @@ -83,25 +83,25 @@ impl IntoResponse for &BuilderApiError { impl BuilderApiError { pub fn http_status(&self) -> StatusCode { match self { - BuilderApiError::JsonDecodeError(_) | - BuilderApiError::IOError(_) | - BuilderApiError::SszDecode(_) | - BuilderApiError::PayloadDecode | - BuilderApiError::BidValidation(_) | - BuilderApiError::ProposerDutyNotFound | - BuilderApiError::HydrationError(_) | - BuilderApiError::SigError(_) | - BuilderApiError::SimOnNextSlot | - BuilderApiError::MergeableOrdersNotFound(_) | - BuilderApiError::InvalidBuilderPubkey(_, _) | - BuilderApiError::DeliveringPayload { .. } => StatusCode::BAD_REQUEST, - - BuilderApiError::InvalidApiKey | - BuilderApiError::UntrustedBuilderOnDehydratedPayload => StatusCode::UNAUTHORIZED, - - BuilderApiError::InternalError | - BuilderApiError::AuctioneerError(_) | - BuilderApiError::DatabaseError(_) => StatusCode::INTERNAL_SERVER_ERROR, + BuilderApiError::JsonDecodeError(_) + | BuilderApiError::IOError(_) + | BuilderApiError::SszDecode(_) + | BuilderApiError::PayloadDecode + | BuilderApiError::BidValidation(_) + | BuilderApiError::ProposerDutyNotFound + | BuilderApiError::HydrationError(_) + | BuilderApiError::SigError(_) + | BuilderApiError::SimOnNextSlot + | BuilderApiError::MergeableOrdersNotFound(_) + | BuilderApiError::InvalidBuilderPubkey(_, _) + | BuilderApiError::DeliveringPayload { .. } => StatusCode::BAD_REQUEST, + + BuilderApiError::InvalidApiKey + | BuilderApiError::UntrustedBuilderOnDehydratedPayload => StatusCode::UNAUTHORIZED, + + BuilderApiError::InternalError + | BuilderApiError::AuctioneerError(_) + | BuilderApiError::DatabaseError(_) => StatusCode::INTERNAL_SERVER_ERROR, BuilderApiError::BlockSimulation(err) => match err { BlockSimError::Timeout | BlockSimError::SimulationDropped => { @@ -121,13 +121,13 @@ impl BuilderApiError { #[allow(clippy::match_like_matches_macro)] pub fn should_report(&self) -> bool { match self { - Self::DeliveringPayload { .. } | - Self::ProposerDutyNotFound | - Self::BidValidation(BlockValidationError::OutOfSequence { .. }) | - Self::BidValidation(BlockValidationError::AlreadyProcessingNewerPayload) | - Self::BidValidation(BlockValidationError::SubmissionForWrongSlot { .. }) | - Self::BidValidation(BlockValidationError::PrevRandaoMismatch { .. }) | - Self::SimOnNextSlot => false, + Self::DeliveringPayload { .. } + | Self::ProposerDutyNotFound + | Self::BidValidation(BlockValidationError::OutOfSequence { .. }) + | Self::BidValidation(BlockValidationError::AlreadyProcessingNewerPayload) + | Self::BidValidation(BlockValidationError::SubmissionForWrongSlot { .. }) + | Self::BidValidation(BlockValidationError::PrevRandaoMismatch { .. }) + | Self::SimOnNextSlot => false, _ => true, } diff --git a/crates/relay/src/api/builder/submit_block.rs b/crates/relay/src/api/builder/submit_block.rs index af4990ea6..6aff6cca5 100644 --- a/crates/relay/src/api/builder/submit_block.rs +++ b/crates/relay/src/api/builder/submit_block.rs @@ -100,8 +100,8 @@ impl BuilderApi { } fn observe_client_to_server_latency(headers: &HeaderMap, receive_ns: u64) { - if let Some(send_ts) = headers.get(HEADER_SEND_TS) && - let Some(send_ts) = send_ts.to_str().ok().and_then(Nanos::from_rfc3339) + if let Some(send_ts) = headers.get(HEADER_SEND_TS) + && let Some(send_ts) = send_ts.to_str().ok().and_then(Nanos::from_rfc3339) { SUB_CLIENT_TO_SERVER_LATENCY .with_label_values(&["http"]) diff --git a/crates/relay/src/api/middleware/tracking.rs b/crates/relay/src/api/middleware/tracking.rs index 91b7178e6..63c137237 100644 --- a/crates/relay/src/api/middleware/tracking.rs +++ b/crates/relay/src/api/middleware/tracking.rs @@ -140,8 +140,8 @@ async fn do_request(mut req: Request, next: Next, stats: Arc) .headers() .get(CONTENT_LENGTH) .and_then(|h| h.to_str().ok()) - .and_then(|s| s.parse::().ok()) && - len > MAX_PAYLOAD_LENGTH + .and_then(|s| s.parse::().ok()) + && len > MAX_PAYLOAD_LENGTH { return StatusCode::PAYLOAD_TOO_LARGE.into_response(); } diff --git a/crates/relay/src/api/proposer/error.rs b/crates/relay/src/api/proposer/error.rs index d70087228..48615db78 100644 --- a/crates/relay/src/api/proposer/error.rs +++ b/crates/relay/src/api/proposer/error.rs @@ -134,43 +134,43 @@ impl IntoResponse for ProposerApiError { fn into_response(self) -> Response { let code = match self { - ProposerApiError::NoBidPrepared | - ProposerApiError::GetHeaderRequestTooLate { .. } => StatusCode::NO_CONTENT, - - ProposerApiError::HyperError(_) | - ProposerApiError::AxumError(_) | - ProposerApiError::ToStrError(_) | - ProposerApiError::UnexpectedProposerIndex { .. } | - ProposerApiError::NoValidatorsCouldBeRegistered | - ProposerApiError::InvalidFork | - ProposerApiError::SerdeDecodeError(_) | - ProposerApiError::ProposerNotRegistered | - ProposerApiError::TimestampTooEarly { .. } | - ProposerApiError::TimestampTooFarInTheFuture { .. } | - ProposerApiError::RequestWrongSlot { .. } | - ProposerApiError::SlotTooNew | - ProposerApiError::GetPayloadRequestTooLate { .. } | - ProposerApiError::BlindedBlockAndPayloadHeaderMismatch | - ProposerApiError::UnsupportedBeaconChainVersion | - ProposerApiError::BeaconClientError(_) | - ProposerApiError::DatabaseError(_) | - ProposerApiError::AuctioneerError(_) | - ProposerApiError::EmptyRequest | - ProposerApiError::BlindedBlobsBundleLengthMismatch | - ProposerApiError::InternalSlotMismatchesWithSlotDuty { .. } | - ProposerApiError::InvalidBlindedBlockSlot { .. } | - ProposerApiError::BlobKzgCommitmentsMismatch | - ProposerApiError::SszError(_) | - ProposerApiError::SszDecodeError(_) | - ProposerApiError::SigError(_) | - ProposerApiError::DeliveringPayload | - ProposerApiError::GetPayloadAlreadyReceived | - ProposerApiError::RequestForPastSlot { .. } => StatusCode::BAD_REQUEST, + ProposerApiError::NoBidPrepared + | ProposerApiError::GetHeaderRequestTooLate { .. } => StatusCode::NO_CONTENT, + + ProposerApiError::HyperError(_) + | ProposerApiError::AxumError(_) + | ProposerApiError::ToStrError(_) + | ProposerApiError::UnexpectedProposerIndex { .. } + | ProposerApiError::NoValidatorsCouldBeRegistered + | ProposerApiError::InvalidFork + | ProposerApiError::SerdeDecodeError(_) + | ProposerApiError::ProposerNotRegistered + | ProposerApiError::TimestampTooEarly { .. } + | ProposerApiError::TimestampTooFarInTheFuture { .. } + | ProposerApiError::RequestWrongSlot { .. } + | ProposerApiError::SlotTooNew + | ProposerApiError::GetPayloadRequestTooLate { .. } + | ProposerApiError::BlindedBlockAndPayloadHeaderMismatch + | ProposerApiError::UnsupportedBeaconChainVersion + | ProposerApiError::BeaconClientError(_) + | ProposerApiError::DatabaseError(_) + | ProposerApiError::AuctioneerError(_) + | ProposerApiError::EmptyRequest + | ProposerApiError::BlindedBlobsBundleLengthMismatch + | ProposerApiError::InternalSlotMismatchesWithSlotDuty { .. } + | ProposerApiError::InvalidBlindedBlockSlot { .. } + | ProposerApiError::BlobKzgCommitmentsMismatch + | ProposerApiError::SszError(_) + | ProposerApiError::SszDecodeError(_) + | ProposerApiError::SigError(_) + | ProposerApiError::DeliveringPayload + | ProposerApiError::GetPayloadAlreadyReceived + | ProposerApiError::RequestForPastSlot { .. } => StatusCode::BAD_REQUEST, ProposerApiError::InvalidApiKey => StatusCode::UNAUTHORIZED, - ProposerApiError::InternalServerError | - ProposerApiError::NoExecutionPayloadFound => StatusCode::INTERNAL_SERVER_ERROR, + ProposerApiError::InternalServerError + | ProposerApiError::NoExecutionPayloadFound => StatusCode::INTERNAL_SERVER_ERROR, ProposerApiError::ServiceUnavailableError => StatusCode::SERVICE_UNAVAILABLE, diff --git a/crates/relay/src/api/proposer/get_payload.rs b/crates/relay/src/api/proposer/get_payload.rs index bad1487bd..1a5324d6d 100644 --- a/crates/relay/src/api/proposer/get_payload.rs +++ b/crates/relay/src/api/proposer/get_payload.rs @@ -454,8 +454,8 @@ impl ProposerApi { if let Some(until_slot_start) = until_slot_start { info!("waiting until slot start t=0: {} ms", until_slot_start.as_millis()); sleep(until_slot_start).await; - } else if let Some(since_slot_start) = since_slot_start && - since_slot_start.as_millis() > GET_PAYLOAD_REQUEST_CUTOFF_MS as u128 + } else if let Some(since_slot_start) = since_slot_start + && since_slot_start.as_millis() > GET_PAYLOAD_REQUEST_CUTOFF_MS as u128 { return Err(ProposerApiError::GetPayloadRequestTooLate { cutoff: GET_PAYLOAD_REQUEST_CUTOFF_MS as u64, diff --git a/crates/relay/src/auctioneer/bid_adjustor.rs b/crates/relay/src/auctioneer/bid_adjustor.rs index 17aa73d1f..519c20ad0 100644 --- a/crates/relay/src/auctioneer/bid_adjustor.rs +++ b/crates/relay/src/auctioneer/bid_adjustor.rs @@ -1,4 +1,4 @@ -use crate::auctioneer::{PayloadEntry, SimulatorRequest, types::SlotData}; +use crate::auctioneer::{PayloadEntry, ValidationRequest, types::SlotData}; pub trait BidAdjustor: Send + Sync + 'static { fn try_apply_adjustments( @@ -6,7 +6,7 @@ pub trait BidAdjustor: Send + Sync + 'static { bid: &PayloadEntry, slot_data: &SlotData, is_dry_run: bool, - ) -> Option<(PayloadEntry, SimulatorRequest, bool, &str)>; + ) -> Option<(PayloadEntry, ValidationRequest, bool, &str)>; fn on_new_slot(&mut self, bid_slot: u64); } @@ -19,7 +19,7 @@ impl BidAdjustor for DefaultBidAdjustor { _bid: &PayloadEntry, _slot_data: &SlotData, _is_dry_run: bool, - ) -> Option<(PayloadEntry, SimulatorRequest, bool, &str)> { + ) -> Option<(PayloadEntry, ValidationRequest, bool, &str)> { None } diff --git a/crates/relay/src/auctioneer/bid_sorter.rs b/crates/relay/src/auctioneer/bid_sorter.rs index b522fc531..ea7ecc750 100644 --- a/crates/relay/src/auctioneer/bid_sorter.rs +++ b/crates/relay/src/auctioneer/bid_sorter.rs @@ -286,8 +286,8 @@ impl BidSorter { continue; } - if let Some((curr, _)) = &state.curr_bid && - *curr == demoted + if let Some((curr, _)) = &state.curr_bid + && *curr == demoted { state.traverse_update_top_bid(self.curr_bid_slot, None, false, &self.top_bid_tx); } diff --git a/crates/relay/src/auctioneer/block_merger.rs b/crates/relay/src/auctioneer/block_merger.rs index a8dfb53a5..2968b689a 100644 --- a/crates/relay/src/auctioneer/block_merger.rs +++ b/crates/relay/src/auctioneer/block_merger.rs @@ -27,7 +27,7 @@ use tracing::{debug, error, info, trace, warn}; use zstd::zstd_safe::WriteBuf; use crate::auctioneer::{ - BlockMergeRequest, BlockMergeRequestRef, BlockMergeResponse, PayloadBidData, + BlockMergeRequestRef, BlockMergeResponse, MergeRequest, PayloadBidData, submit_block::MergeData, types::PayloadEntry, }; @@ -230,7 +230,7 @@ impl BlockMerger { } } - pub fn fetch_merge_request(&mut self) -> Option { + pub fn fetch_merge_request(&mut self) -> Option { trace!("fetching merge request"); self.fetch_merge_request_count += 1; if !self.should_request_merge() { @@ -289,7 +289,7 @@ impl BlockMerger { trace!(count = self.trimmed_orders_buf.len(), "found orders"); self.found_orders_count += 1; - let merge_request = BlockMergeRequest { + let merge_request = MergeRequest { bid_slot: base_block.slot, request: json!(BlockMergeRequestRef { original_value: base_block.value, @@ -360,8 +360,8 @@ impl BlockMerger { original_tx_count: original_payload.execution_payload.transactions.len(), merged_tx_count: response.execution_payload.transactions.len(), original_blob_count: original_payload.blobs_bundle.blobs.len(), - merged_blob_count: original_payload.blobs_bundle.blobs.len() + - response.appended_blobs.len(), + merged_blob_count: original_payload.blobs_bundle.blobs.len() + + response.appended_blobs.len(), builder_inclusions: response.builder_inclusions, trace, }); @@ -411,13 +411,13 @@ impl BlockMerger { fn should_request_merge(&self) -> bool { let start_time = Instant::now(); - let has_new_data = self.best_mergeable_orders.has_new_orders() || - (self.best_mergeable_orders.has_orders() && self.has_new_base_block); + let has_new_data = self.best_mergeable_orders.has_new_orders() + || (self.best_mergeable_orders.has_orders() && self.has_new_base_block); if !has_new_data { return false; } - let res = utcnow_ms().saturating_sub(self.last_merge_request_time_ms) >= - MERGE_REQUEST_INTERVAL_MS; + let res = utcnow_ms().saturating_sub(self.last_merge_request_time_ms) + >= MERGE_REQUEST_INTERVAL_MS; record_step("should_request_merge", start_time.elapsed()); res } @@ -679,11 +679,14 @@ fn blobs_bundle_to_hashmap( .into_iter() .zip(bundle.iter_blobs()) .map(|(versioned_hash, (blob, commitment, proofs))| { - (versioned_hash, BlobWithMetadata { - commitment: *commitment, - proofs: proofs.to_vec(), - blob: blob.clone(), - }) + ( + versioned_hash, + BlobWithMetadata { + commitment: *commitment, + proofs: proofs.to_vec(), + blob: blob.clone(), + }, + ) }) .collect() } diff --git a/crates/relay/src/auctioneer/context.rs b/crates/relay/src/auctioneer/context.rs index a80a32ce7..df4ad0366 100644 --- a/crates/relay/src/auctioneer/context.rs +++ b/crates/relay/src/auctioneer/context.rs @@ -30,7 +30,7 @@ use crate::{ block_merger::BlockMerger, types::{PayloadEntry, PendingPayload, SubmissionRef}, }, - simulator::{BlockMergeResponse, SimInboundPayload, tile::SimulationResult}, + simulator::{BlockMergeResponse, SimRequest, tile::ValidationResult}, spine::{ HelixSpineProducers, messages::{SubmissionResultWithRef, ToSimKind, ToSimMsg}, @@ -61,7 +61,7 @@ pub struct Context { pub completed_dry_run: bool, pub future_results: Arc>, pub auctioneer_handle: AuctioneerHandle, - pub sim_inbound: Arc>, + pub sim_inbound: Arc>, pub accept_optimistic: Arc, pub failsafe_triggered: Arc, } @@ -75,7 +75,7 @@ impl Context { pub fn new( chain_info: ChainInfo, config: RelayConfig, - sim_inbound: Arc>, + sim_inbound: Arc>, accept_optimistic: Arc, failsafe_triggered: Arc, db: DbHandle, @@ -138,7 +138,7 @@ impl Context { /// 2. Store simulation to DB pub fn handle_simulation_result( &mut self, - result: SimulationResult, + result: ValidationResult, already_sent: bool, producers: &mut HelixSpineProducers, ) { diff --git a/crates/relay/src/auctioneer/get_payload.rs b/crates/relay/src/auctioneer/get_payload.rs index ac3d4b1da..1132c0774 100644 --- a/crates/relay/src/auctioneer/get_payload.rs +++ b/crates/relay/src/auctioneer/get_payload.rs @@ -153,13 +153,13 @@ impl Context { slot_data: &SlotData, ) -> Result<(GetPayloadResponse, VersionedSignedProposal), ProposerApiError> { match blinded { - SignedBlindedBeaconBlock::Altair(_) | - SignedBlindedBeaconBlock::Base(_) | - SignedBlindedBeaconBlock::Bellatrix(_) | - SignedBlindedBeaconBlock::Capella(_) | - SignedBlindedBeaconBlock::Deneb(_) | - SignedBlindedBeaconBlock::Electra(_) | - SignedBlindedBeaconBlock::Gloas(_) => { + SignedBlindedBeaconBlock::Altair(_) + | SignedBlindedBeaconBlock::Base(_) + | SignedBlindedBeaconBlock::Bellatrix(_) + | SignedBlindedBeaconBlock::Capella(_) + | SignedBlindedBeaconBlock::Deneb(_) + | SignedBlindedBeaconBlock::Electra(_) + | SignedBlindedBeaconBlock::Gloas(_) => { Err(ProposerApiError::UnsupportedBeaconChainVersion) } SignedBlindedBeaconBlock::Fulu(blinded_block) => { diff --git a/crates/relay/src/auctioneer/handle.rs b/crates/relay/src/auctioneer/handle.rs index e72c029ea..c452bce14 100644 --- a/crates/relay/src/auctioneer/handle.rs +++ b/crates/relay/src/auctioneer/handle.rs @@ -105,7 +105,7 @@ impl AuctioneerHandle { .is_err() { tracing::error!("failed to send get_payload to auctioneer"); - return Err(ChannelFull) + return Err(ChannelFull); } } Err(err) => { diff --git a/crates/relay/src/auctioneer/mod.rs b/crates/relay/src/auctioneer/mod.rs index a12d29e1c..add363f2c 100644 --- a/crates/relay/src/auctioneer/mod.rs +++ b/crates/relay/src/auctioneer/mod.rs @@ -54,7 +54,7 @@ use crate::{ api::{FutureBidSubmissionResult, builder::error::BuilderApiError, proposer::ProposerApiError}, auctioneer::types::PendingPayload, housekeeper::PayloadAttributesUpdate, - simulator::{SimInboundPayload, SimOutboundPayload}, + simulator::{SimRequest, SimResult}, spine::{ HelixSpineProducers, messages::{DecodedSubmission, FromSimMsg}, @@ -68,7 +68,7 @@ pub use crate::{ context::{Context, send_submission_result}, types::{InternalBidSubmission, InternalBidSubmissionHeader, SubmissionRef}, }, - simulator::{SimulatorRequest, SimulatorTile, client::SimulatorClient, *}, + simulator::{SimulatorTile, ValidationRequest, client::SimulatorClient, *}, }; pub struct Auctioneer { @@ -77,7 +77,7 @@ pub struct Auctioneer { tel: Telemetry, event_rx: crossbeam_channel::Receiver, decoded: Arc>, - sim_outbound: Arc>, + sim_outbound: Arc>, } impl Auctioneer { @@ -94,8 +94,8 @@ impl Auctioneer { future_results: Arc>, decoded: Arc>, auctioneer_handle: AuctioneerHandle, - sim_inbound: Arc>, - sim_outbound: Arc>, + sim_inbound: Arc>, + sim_outbound: Arc>, accept_optimistic: Arc, failsafe_triggered: Arc, ) -> Self { @@ -147,10 +147,8 @@ impl Tile for Auctioneer { return; }; let event = match payload.as_ref() { - SimOutboundPayload::SimResult(sim_result) => Event::SimResult(sim_result.clone()), - SimOutboundPayload::MergeResult(merge_result) => { - Event::MergeResult(merge_result.clone()) - } + SimResult::Validate(sim_result) => Event::SimResult(sim_result.clone()), + SimResult::Merge(merge_result) => Event::MergeResult(merge_result.clone()), }; self.state.step(event, &mut self.ctx, &mut self.tel, producers); }); @@ -274,8 +272,8 @@ impl State { Ordering::Less => (), Ordering::Equal => { // check fork - if let Some(update) = payload_attributes && - !slot_data.payload_attributes_map.contains_key(&update.parent_hash) + if let Some(update) = payload_attributes + && !slot_data.payload_attributes_map.contains_key(&update.parent_hash) { info!(bid_slot =% slot_data.bid_slot, received =? update.parent_hash, sorting =? slot_data.payload_attributes_map.keys(), "sorting for an additional fork"); @@ -328,8 +326,8 @@ impl State { "gap in slot data received (broadcast)" ); - if let Some(attributes) = &payload_attributes && - &attributes.parent_hash != block_hash + if let Some(attributes) = &payload_attributes + && &attributes.parent_hash != block_hash { warn!( maybe_missed_slot =% slot_data.bid_slot, @@ -393,8 +391,8 @@ impl State { // proposer is on a different fork warn!(req =% params.parent_hash, have =? slot_data.payload_attributes_map.keys(), "get header for unknown parent hash"); let _ = res_tx.send(Err(ProposerApiError::NoBidPrepared)); - } else if slot_data.registration_data.entry.registration.message.pubkey != - params.pubkey + } else if slot_data.registration_data.entry.registration.message.pubkey + != params.pubkey { warn!(req =% params.pubkey, this =% slot_data.registration_data.entry.registration.message.pubkey, "get header for mismatched proposer"); let _ = res_tx.send(Err(ProposerApiError::NoBidPrepared)); @@ -443,8 +441,8 @@ impl State { // sim result (State::Sorting(slot_data), Event::SimResult(mut result)) => { let already_sent = - result.1.as_ref().is_some_and(|r| r.submission.slot() == slot_data.bid_slot) && - ctx.sort_simulation_result(&mut result, producers); + result.1.as_ref().is_some_and(|r| r.submission.slot() == slot_data.bid_slot) + && ctx.sort_simulation_result(&mut result, producers); ctx.handle_simulation_result(result, already_sent, producers); } @@ -509,9 +507,9 @@ impl State { // gossiped payload, proposer equivocating? (State::Broadcasting { block_hash, slot_data }, Event::GossipPayload(payload)) => { - if *block_hash == payload.execution_payload.execution_payload.block_hash && - slot_data.bid_slot == payload.slot && - slot_data.proposer_pubkey() == &payload.proposer_pub_key + if *block_hash == payload.execution_payload.execution_payload.block_hash + && slot_data.bid_slot == payload.slot + && slot_data.proposer_pubkey() == &payload.proposer_pub_key { debug!("already broadcasting gossip payload"); } else { diff --git a/crates/relay/src/auctioneer/submit_block.rs b/crates/relay/src/auctioneer/submit_block.rs index 9332b9dac..c4a2bfe3b 100644 --- a/crates/relay/src/auctioneer/submit_block.rs +++ b/crates/relay/src/auctioneer/submit_block.rs @@ -9,7 +9,7 @@ use helix_common::{ bid_submission::OptimisticVersion, metrics::{BID_ADJUSTMENT_LATENCY, HYDRATION_CACHE_HITS}, record_submission_step, - simulator::{SimRequest, SubmissionFormat}, + simulator::{SszValidationRequest, SubmissionFormat}, }; use helix_types::{ BidAdjustmentData, BlockValidationError, MergeableOrdersWithPref, SignedBidSubmission, @@ -25,7 +25,7 @@ use crate::{ types::{PayloadEntry, SlotData, Submission, SubmissionData, SubmissionRef}, }, housekeeper::PayloadAttributesUpdate, - simulator::{SimInboundPayload, SimulatorRequest, tile::SimulationResult}, + simulator::{SimRequest, ValidationRequest, tile::ValidationResult}, spine::{ HelixSpineProducers, messages::{ToSimKind, ToSimMsg}, @@ -50,9 +50,9 @@ impl Context { let (req, entry) = self.prep_data_to_store_and_sim(validated, slot_data, is_optimistic); - if !self.completed_dry_run && - entry.is_adjustable() && - self.cache.adjustments_enabled.load(Ordering::Relaxed) + if !self.completed_dry_run + && entry.is_adjustable() + && self.cache.adjustments_enabled.load(Ordering::Relaxed) { let start = Nanos::now(); if let Some((adjusted_block, sim_request, _, strategy)) = @@ -70,8 +70,8 @@ impl Context { self.store_data_and_sim(req, entry, false, producers); - if self.config.block_merging_config.is_enabled && - let Some(data) = merging_data + if self.config.block_merging_config.is_enabled + && let Some(data) = merging_data { let base_block = data.block_hash; let is_top_bid = data.is_top_bid; @@ -91,7 +91,7 @@ impl Context { pub(super) fn sort_simulation_result( &mut self, - result: &mut SimulationResult, + result: &mut ValidationResult, producers: &mut HelixSpineProducers, ) -> bool { let Some(result) = &mut result.1 else { @@ -200,9 +200,9 @@ impl Context { record_submission_step("validated", start_val.elapsed()); trace!("validated"); - let (optimistic_version, is_top_bid) = if self.accept_optimistic.load(Ordering::Relaxed) && - !self.failsafe_triggered.load(Ordering::Relaxed) && - self.should_process_optimistically(&submission, &builder_info, slot_data) + let (optimistic_version, is_top_bid) = if self.accept_optimistic.load(Ordering::Relaxed) + && !self.failsafe_triggered.load(Ordering::Relaxed) + && self.should_process_optimistically(&submission, &builder_info, slot_data) { let is_top_bid = self.bid_sorter.sort( submission_data.version, @@ -246,8 +246,8 @@ impl Context { validated: ValidatedData, slot_data: &SlotData, is_optimistic: bool, - ) -> (SimulatorRequest, PayloadEntry) { - let request = SimRequest { + ) -> (ValidationRequest, PayloadEntry) { + let request = SszValidationRequest { registered_gas_limit: slot_data.registration_data.entry.registration.message.gas_limit, apply_blacklist: slot_data.registration_data.entry.preferences.filtering.is_regional(), parent_beacon_block_root: validated @@ -265,7 +265,7 @@ impl Context { }, }; - let req = SimulatorRequest { + let req = ValidationRequest { is_optimistic, submission_ref: validated.submission_ref, request, @@ -293,7 +293,7 @@ impl Context { pub fn store_data_and_sim( &mut self, - req: SimulatorRequest, + req: ValidationRequest, entry: PayloadEntry, fast_track: bool, producers: &mut HelixSpineProducers, @@ -311,8 +311,7 @@ impl Context { self.db.store_block_submission(sub_clone, req.trace, opt_version, is_adjusted, live_ts); - let ix = - self.sim_inbound.push(SimInboundPayload::SimRequest { req: Box::new(req), fast_track }); + let ix = self.sim_inbound.push(SimRequest::Validate { req: Box::new(req), fast_track }); producers.produce(ToSimMsg { kind: ToSimKind::Request, ix, bid_slot: 0 }); } @@ -327,8 +326,8 @@ impl Context { } if builder_info.is_optimistic && submission.message().value <= builder_info.collateral { - if slot_data.registration_data.entry.preferences.filtering.is_regional() && - !builder_info.can_process_regional_slot_optimistically() + if slot_data.registration_data.entry.preferences.filtering.is_regional() + && !builder_info.can_process_regional_slot_optimistically() { return false; } @@ -341,7 +340,7 @@ impl Context { fn request_merged_block(&mut self, producers: &mut HelixSpineProducers) { if let Some(merge_request) = self.block_merger.fetch_merge_request() { - let ix = self.sim_inbound.push(SimInboundPayload::MergeRequest(merge_request)); + let ix = self.sim_inbound.push(SimRequest::Merge(merge_request)); producers.produce(ToSimMsg { kind: ToSimKind::Request, ix, bid_slot: 0 }); } } diff --git a/crates/relay/src/auctioneer/types.rs b/crates/relay/src/auctioneer/types.rs index 662962721..1240668b8 100644 --- a/crates/relay/src/auctioneer/types.rs +++ b/crates/relay/src/auctioneer/types.rs @@ -41,11 +41,11 @@ use crate::{ HEADER_API_KEY, HEADER_API_TOKEN, HEADER_HYDRATE, HEADER_IS_MERGEABLE, HEADER_MERGE_TYPE, HEADER_SEQUENCE, HEADER_WITH_ADJUSTMENTS, proposer::ProposerApiError, }, - auctioneer::BlockMergeResult, + auctioneer::MergeResult, bid_decoder::{Encoding, SubmissionType}, gossip::BroadcastPayloadParams, housekeeper::PayloadAttributesUpdate, - simulator::tile::SimulationResult, + simulator::tile::ValidationResult, }; #[derive(Clone, Copy, Debug)] @@ -122,8 +122,8 @@ impl InternalBidSubmissionHeader { ) -> MergeType { match header_map.get(HEADER_MERGE_TYPE) { None => { - if sub_type.is_some_and(|sub_type| sub_type == SubmissionType::Merge) || - matches!(header_map.get(HEADER_IS_MERGEABLE), Some(header) if header == HeaderValue::from_static("true")) + if sub_type.is_some_and(|sub_type| sub_type == SubmissionType::Merge) + || matches!(header_map.get(HEADER_IS_MERGEABLE), Some(header) if header == HeaderValue::from_static("true")) { MergeType::Mergeable } else { @@ -519,8 +519,8 @@ pub enum Event { span: tracing::Span, }, GossipPayload(BroadcastPayloadParams), - SimResult(SimulationResult), - MergeResult(BlockMergeResult), + SimResult(ValidationResult), + MergeResult(MergeResult), } impl Event { diff --git a/crates/relay/src/auctioneer/worker.rs b/crates/relay/src/auctioneer/worker.rs index c566116ce..601e0475c 100644 --- a/crates/relay/src/auctioneer/worker.rs +++ b/crates/relay/src/auctioneer/worker.rs @@ -59,9 +59,9 @@ impl Default for Telemetry { Self { work: Default::default(), spin: Default::default(), - next_record: Instant::now() + - Self::REPORT_FREQ + - Duration::from_millis(utcnow_ns() % 10 * 5), // to scatter worker reports + next_record: Instant::now() + + Self::REPORT_FREQ + + Duration::from_millis(utcnow_ns() % 10 * 5), // to scatter worker reports loop_start: Instant::now(), loop_worked: Default::default(), } diff --git a/crates/relay/src/bid_decoder/decoder.rs b/crates/relay/src/bid_decoder/decoder.rs index 3319db576..b0e8479da 100644 --- a/crates/relay/src/bid_decoder/decoder.rs +++ b/crates/relay/src/bid_decoder/decoder.rs @@ -77,8 +77,8 @@ impl Encoding { pub fn from_accept(headers: &HeaderMap) -> Self { match headers.get(ACCEPT) { Some(header) - if header == HeaderValue::from_static(HEADER_SSZ) || - header == HeaderValue::from_static(HEADER_ACCEPT_SSZ) => + if header == HeaderValue::from_static(HEADER_SSZ) + || header == HeaderValue::from_static(HEADER_ACCEPT_SSZ) => { Encoding::Ssz } diff --git a/crates/relay/src/bid_decoder/tile.rs b/crates/relay/src/bid_decoder/tile.rs index 2cf0a170c..6163da289 100644 --- a/crates/relay/src/bid_decoder/tile.rs +++ b/crates/relay/src/bid_decoder/tile.rs @@ -250,7 +250,10 @@ impl DecoderTile { let builder_pubkey = decoder.extract_builder_pubkey(body, with_mergeable_data)?; let skip_sigverify = if let Some(expected_pubkey) = expected_pubkey { if builder_pubkey != *expected_pubkey { - return Err(BuilderApiError::InvalidBuilderPubkey(*expected_pubkey, builder_pubkey)); + return Err(BuilderApiError::InvalidBuilderPubkey( + *expected_pubkey, + builder_pubkey, + )); } true @@ -285,9 +288,10 @@ impl DecoderTile { // For plain SSZ full submissions, capture the decompressed bytes so the // auctioneer can forward them to the simulator without re-encoding. - let sim_bytes = if !is_dehydrated && - !with_mergeable_data && - matches!(header.encoding, crate::bid_decoder::Encoding::Ssz) + let sim_bytes = if !is_dehydrated + && !with_mergeable_data + && !with_adjustments + && matches!(header.encoding, crate::bid_decoder::Encoding::Ssz) { Some((body.clone(), SubmissionFormat::FullSsz)) } else { diff --git a/crates/relay/src/housekeeper/chain_event_updater.rs b/crates/relay/src/housekeeper/chain_event_updater.rs index 50fc08a97..d259f6c9a 100644 --- a/crates/relay/src/housekeeper/chain_event_updater.rs +++ b/crates/relay/src/housekeeper/chain_event_updater.rs @@ -110,9 +110,9 @@ impl ChainEventUpdater { mut head_event_rx: broadcast::Receiver, mut payload_attributes_rx: broadcast::Receiver, ) { - let start_instant = Instant::now() + - self.chain_info.clock.duration_to_next_slot().unwrap() + - Duration::from_secs(CUTOFF_TIME); + let start_instant = Instant::now() + + self.chain_info.clock.duration_to_next_slot().unwrap() + + Duration::from_secs(CUTOFF_TIME); let mut timer = interval_at(start_instant, Duration::from_secs(self.chain_info.seconds_per_slot())); diff --git a/crates/relay/src/housekeeper/housekeeper.rs b/crates/relay/src/housekeeper/housekeeper.rs index dac69de58..29d0f86e3 100644 --- a/crates/relay/src/housekeeper/housekeeper.rs +++ b/crates/relay/src/housekeeper/housekeeper.rs @@ -141,8 +141,8 @@ impl Housekeeper { loop { let head = self.slots.head(); let timeout = (self.chain_info.clock.start_of(head + 1).unwrap()) - .saturating_sub(utcnow_dur()) + - CUTOFF_TIME; + .saturating_sub(utcnow_dur()) + + CUTOFF_TIME; if let Ok(head_event_result) = tokio::time::timeout(timeout, head_event_rx.recv()).await { @@ -290,8 +290,8 @@ impl Housekeeper { self.slots.update_proposer_duties(head_slot); - if let Some(inclusion_list_service) = self.inclusion_list_service.as_ref() && - let Some(next_duty) = proposer_duties.iter().find(|duty| duty.slot == head_slot) + if let Some(inclusion_list_service) = self.inclusion_list_service.as_ref() + && let Some(next_duty) = proposer_duties.iter().find(|duty| duty.slot == head_slot) { let pub_key = next_duty.pubkey; let inclusion_list_service = inclusion_list_service.clone(); @@ -406,8 +406,8 @@ impl Housekeeper { for builder_pubkey in primev_builders { match auctioneer.get_builder_info(&builder_pubkey) { Some(builder_info) => { - if builder_info.builder_id == Some("PrimevBuilder".to_string()) || - builder_info + if builder_info.builder_id == Some("PrimevBuilder".to_string()) + || builder_info .builder_ids .as_ref() .is_some_and(|v| v.contains(&"PrimevBuilder".to_string())) diff --git a/crates/relay/src/housekeeper/primev_service.rs b/crates/relay/src/housekeeper/primev_service.rs index abe78840f..16ebc0146 100644 --- a/crates/relay/src/housekeeper/primev_service.rs +++ b/crates/relay/src/housekeeper/primev_service.rs @@ -220,9 +220,9 @@ impl EthereumPrimevService { tuples .iter() .map(|token| { - if let ethers::abi::Token::Tuple(values) = token && - values.len() >= 3 && - let ( + if let ethers::abi::Token::Tuple(values) = token + && values.len() >= 3 + && let ( ethers::abi::Token::Bool(vanilla_opted_in), ethers::abi::Token::Bool(avs_opted_in), ethers::abi::Token::Bool(middleware_opted_in), @@ -245,8 +245,8 @@ impl EthereumPrimevService { // Extract the public keys of validators that are opted into any Primev service let mut opted_in_validators = Vec::new(); for (index, status) in opted_in_statuses.iter().enumerate() { - if (status.0 || status.1 || status.2) && - let Some(duty) = proposer_duties.get(index) + if (status.0 || status.1 || status.2) + && let Some(duty) = proposer_duties.get(index) { opted_in_validators.push(duty.pubkey); } diff --git a/crates/relay/src/lib.rs b/crates/relay/src/lib.rs index 53a16ec11..002a7a13a 100644 --- a/crates/relay/src/lib.rs +++ b/crates/relay/src/lib.rs @@ -28,14 +28,14 @@ pub use crate::{ }, auctioneer::{ Auctioneer, AuctioneerHandle, BidSorter, Context, Event, InternalBidSubmission, - PayloadEntry, RegWorker, RegWorkerHandle, SimulatorClient, SimulatorRequest, SimulatorTile, - SlotData, SubmissionPayload, SubmissionRef, + PayloadEntry, RegWorker, RegWorkerHandle, SimulatorClient, SimulatorTile, SlotData, + SubmissionPayload, SubmissionRef, ValidationRequest, }, beacon::start_beacon_client, bid_decoder::{DecoderTile, SubmissionDataWithSpan}, housekeeper::start_housekeeper, network::RelayNetworkManager, - simulator::{SimInboundPayload, SimOutboundPayload}, + simulator::{SimRequest, SimResult}, spine::HelixSpine, tcp_bid_recv::{ BidSubmissionFlags, BidSubmissionHeader, BidSubmissionResponse, BidSubmissionTcpListener, diff --git a/crates/relay/src/main.rs b/crates/relay/src/main.rs index 1725ab46b..5bd61217e 100644 --- a/crates/relay/src/main.rs +++ b/crates/relay/src/main.rs @@ -27,10 +27,9 @@ use helix_common::{ use helix_relay::{ Api, Auctioneer, AuctioneerHandle, BidSorter, BidSubmissionTcpListener, DbHandle, DecoderTile, DefaultBidAdjustor, FutureBidSubmissionResult, HelixSpine, RegWorker, RegWorkerHandle, - RelayNetworkManager, S3PayloadSaver, SimInboundPayload, SimOutboundPayload, - SimulatorTile, SubmissionDataWithSpan, WebsiteService, spawn_tokio_monitoring, - start_admin_service, start_api_service, start_beacon_client, start_db_service, - start_housekeeper, + RelayNetworkManager, S3PayloadSaver, SimRequest, SimResult, SimulatorTile, + SubmissionDataWithSpan, WebsiteService, spawn_tokio_monitoring, start_admin_service, + start_api_service, start_beacon_client, start_db_service, start_housekeeper, }; use helix_types::BlsKeypair; use tikv_jemallocator::Jemalloc; @@ -228,12 +227,10 @@ async fn run(instance_id: String, config: RelayConfig, keypair: BlsKeypair) -> e ), ); - let sim_inbound = Arc::new(SharedVector::::with_capacity( - MAX_SUBMISSIONS_PER_SLOT, - )); - let sim_outbound = Arc::new(SharedVector::::with_capacity( - MAX_SUBMISSIONS_PER_SLOT, - )); + let sim_inbound = + Arc::new(SharedVector::::with_capacity(MAX_SUBMISSIONS_PER_SLOT)); + let sim_outbound = + Arc::new(SharedVector::::with_capacity(MAX_SUBMISSIONS_PER_SLOT)); let (accept_optimistic, failsafe_triggered, sim_tile) = SimulatorTile::create( config.simulators.clone(), diff --git a/crates/relay/src/network/messages.rs b/crates/relay/src/network/messages.rs index 20455ead7..849a2f443 100644 --- a/crates/relay/src/network/messages.rs +++ b/crates/relay/src/network/messages.rs @@ -243,9 +243,10 @@ mod tests { // Check that old messages still deserialize correctly let old_message = RawNetworkMessage::Other(NetworkMessage::InclusionList( - InclusionListMessage::Local(InclusionListMessageInfo::new(123, InclusionList { - txs: vec![Transaction(Bytes::from([0, 6, 5]))].into(), - })), + InclusionListMessage::Local(InclusionListMessageInfo::new( + 123, + InclusionList { txs: vec![Transaction(Bytes::from([0, 6, 5]))].into() }, + )), )); let serialized = serde_json::to_string(&old_message).unwrap(); let _: NewRawNetworkMessage = serde_json::from_str(&serialized).unwrap(); diff --git a/crates/relay/src/simulator/client.rs b/crates/relay/src/simulator/client.rs index bbe3b0a5b..fa558d6e0 100644 --- a/crates/relay/src/simulator/client.rs +++ b/crates/relay/src/simulator/client.rs @@ -1,7 +1,7 @@ use alloy_primitives::{Address, U256}; use helix_common::{ SimulatorConfig, - simulator::{BlockSimError, BlockSimRequest, SimRequest}, + simulator::{BlockSimError, JsonValidationRequest, SszValidationRequest}, }; use helix_types::ForkName; use reqwest::{ @@ -13,7 +13,7 @@ use serde_json::{Value, json}; use ssz::Encode; use tracing::{debug, error}; -use crate::simulator::{BlockMergeRequest, BlockMergeResponse}; +use crate::simulator::{BlockMergeResponse, MergeRequest}; #[derive(Debug, serde::Serialize, serde::Deserialize)] struct JsonRpcError { @@ -64,7 +64,7 @@ impl SimulatorClient { } pub async fn do_json_sim_request( - request: &BlockSimRequest, + request: &JsonValidationRequest, is_top_bid: bool, sim_method: &str, to_send: RequestBuilder, @@ -101,7 +101,7 @@ impl SimulatorClient { } pub async fn do_sim_request( - ssz_req: &SimRequest, + ssz_req: &SszValidationRequest, is_top_bid: bool, to_send: RequestBuilder, ) -> Result<(), BlockSimError> { @@ -159,7 +159,7 @@ impl SimulatorClient { } pub async fn do_merge_request( - request: &BlockMergeRequest, + request: &MergeRequest, to_send: RequestBuilder, ) -> Result { let rpc_payload = json!({ @@ -217,13 +217,16 @@ mod test { #[tokio::test] async fn balance_request() { - let sim_client = super::SimulatorClient::new(reqwest::Client::new(), SimulatorConfig { - url: "http://54.175.81.132:8545".into(), - namespace: "relay".into(), - is_merging_simulator: false, - max_concurrent_tasks: 1, - ssz_url: None, - }); + let sim_client = super::SimulatorClient::new( + reqwest::Client::new(), + SimulatorConfig { + url: "http://54.175.81.132:8545".into(), + namespace: "relay".into(), + is_merging_simulator: false, + max_concurrent_tasks: 1, + ssz_url: None, + }, + ); let builder_address = super::Address::from_hex("0xD9d3A3f47a56a987A8119b15C994Bc126337dd27").unwrap(); let builder_balance = sim_client.balance_request(&builder_address).await; diff --git a/crates/relay/src/simulator/mod.rs b/crates/relay/src/simulator/mod.rs index b85a3d808..2a0800351 100644 --- a/crates/relay/src/simulator/mod.rs +++ b/crates/relay/src/simulator/mod.rs @@ -4,7 +4,7 @@ use alloy_primitives::{Address, B256, U256}; use helix_common::{ SubmissionTrace, bid_submission::OptimisticVersion, - simulator::{BlockSimError, SimRequest}, + simulator::{BlockSimError, SszValidationRequest}, }; use helix_types::{ BlsPublicKeyBytes, BuilderInclusionResult, ExecutionPayload, ExecutionRequests, @@ -30,7 +30,7 @@ pub struct BlockMergeRequestRef<'a> { } #[derive(Debug, Clone, serde::Serialize, serde::Deserialize)] -pub struct BlockMergeRequest { +pub struct MergeRequest { pub bid_slot: u64, /// The serialized request pub request: serde_json::Value, @@ -38,7 +38,7 @@ pub struct BlockMergeRequest { pub block_hash: B256, } -pub type BlockMergeResult = (usize, Result); +pub type MergeResult = (usize, Result); #[derive(Debug, Clone, serde::Serialize, serde::Deserialize)] pub struct BlockMergeResponse { @@ -54,9 +54,9 @@ pub struct BlockMergeResponse { } #[derive(Clone)] -pub struct SimulatorRequest { +pub struct ValidationRequest { pub is_optimistic: bool, - pub request: SimRequest, + pub request: SszValidationRequest, pub is_top_bid: bool, pub bid_slot: u64, pub builder_pubkey: BlsPublicKeyBytes, @@ -69,18 +69,18 @@ pub struct SimulatorRequest { } /// Large payload stored in `SharedVector` for auctioneer → sim tile transfer. -pub enum SimInboundPayload { - SimRequest { req: Box, fast_track: bool }, - MergeRequest(BlockMergeRequest), +pub enum SimRequest { + Validate { req: Box, fast_track: bool }, + Merge(MergeRequest), } /// Large payload stored in `SharedVector` for sim tile → auctioneer transfer. -pub enum SimOutboundPayload { - SimResult(crate::simulator::tile::SimulationResult), - MergeResult(BlockMergeResult), +pub enum SimResult { + Validate(crate::simulator::tile::ValidationResult), + Merge(MergeResult), } -impl SimulatorRequest { +impl ValidationRequest { pub fn on_receive_ns(&self) -> u64 { self.trace.receive_ns.0 } diff --git a/crates/relay/src/simulator/tile.rs b/crates/relay/src/simulator/tile.rs index 5741dec48..d38444f1d 100644 --- a/crates/relay/src/simulator/tile.rs +++ b/crates/relay/src/simulator/tile.rs @@ -18,7 +18,7 @@ use helix_common::{ is_local_dev, metrics::SimulatorMetrics, record_submission_step, - simulator::{BlockSimError, BlockSimRequest, SubmissionFormat}, + simulator::{BlockSimError, JsonValidationRequest, SubmissionFormat}, spawn_tracked, validator_preferences::{Filtering, ValidatorPreferences}, }; @@ -27,10 +27,8 @@ use ssz::Encode as _; use tracing::{debug, error, info, warn}; use crate::{ - HelixSpine, - simulator::{ - BlockMergeRequest, SimInboundPayload, SimOutboundPayload, client::SimulatorClient, - }, + HelixSpine, SimRequest, ValidationRequest, + simulator::{MergeRequest, SimResult, client::SimulatorClient}, spine::{ HelixSpineProducers, messages::{FromSimMsg, ToSimKind, ToSimMsg}, @@ -48,8 +46,8 @@ pub struct SimulatorTile { /// Internal channel: async tasks notify the sim tile when work completes. task_tx: crossbeam_channel::Sender, rx: crossbeam_channel::Receiver, - sim_inbound: Arc>, - sim_outbound: Arc>, + sim_requests: Arc>, + sim_results: Arc>, /// If we have any synced simulator pub accept_optimistic: Arc, /// If we failed to demote a builder in the DB @@ -74,12 +72,12 @@ impl Tile for SimulatorTile { // Consume inbound spine messages from the auctioneer. adapter.consume(|msg: ToSimMsg, _producers| match msg.kind { - ToSimKind::Request => match self.sim_inbound.get(msg.ix) { + ToSimKind::Request => match self.sim_requests.get(msg.ix) { Some(payload) => match payload.as_ref() { - SimInboundPayload::SimRequest { req, fast_track } => { + SimRequest::Validate { req, fast_track } => { self.handle_sim_request((**req).clone(), *fast_track); } - SimInboundPayload::MergeRequest(req) => { + SimRequest::Merge(req) => { self.handle_merge_request(req.clone()); } }, @@ -99,8 +97,8 @@ impl Tile for SimulatorTile { impl SimulatorTile { pub fn create( configs: Vec, - sim_inbound: Arc>, - sim_outbound: Arc>, + sim_requests: Arc>, + sim_results: Arc>, ) -> (Arc, Arc, Self) { let (task_tx, rx) = crossbeam_channel::bounded(512); @@ -158,8 +156,8 @@ impl SimulatorTile { local_telemetry: LocalTelemetry::default(), task_tx, rx, - sim_inbound, - sim_outbound, + sim_requests, + sim_results, accept_optimistic: accept_optimistic.clone(), failsafe_triggered: failsafe_triggered.clone(), }; @@ -177,7 +175,7 @@ impl SimulatorTile { self.accept_optimistic.store(new, Ordering::Relaxed); } - fn handle_sim_request(&mut self, req: crate::simulator::SimulatorRequest, fast_track: bool) { + fn handle_sim_request(&mut self, req: crate::simulator::ValidationRequest, fast_track: bool) { assert_eq!(req.bid_slot, self.last_bid_slot); self.local_telemetry.sims_reqs += 1; @@ -194,7 +192,7 @@ impl SimulatorTile { } } - fn handle_merge_request(&mut self, req: BlockMergeRequest) { + fn handle_merge_request(&mut self, req: MergeRequest) { self.local_telemetry.merge_reqs += 1; if let Some(id) = self.next_client(|s| s.can_merge()) { let sim = &mut self.simulators[id]; @@ -205,7 +203,7 @@ impl SimulatorTile { self.local_telemetry.max_in_flight.max(sim.pending); let timer = SimulatorMetrics::block_merge_timer(sim.client.endpoint()); let task_tx = self.task_tx.clone(); - let sim_outbound = self.sim_outbound.clone(); + let sim_results = self.sim_results.clone(); spawn_tracked!(async move { debug!(bid_slot = %req.bid_slot, block_hash = %req.block_hash, "sending merge request"); let res = SimulatorClient::do_merge_request(&req, to_send).await; @@ -216,7 +214,7 @@ impl SimulatorTile { } SimulatorMetrics::block_merge_status(res.is_ok()); - let result_ix = sim_outbound.push(SimOutboundPayload::MergeResult((id, res))); + let result_ix = sim_results.push(SimResult::Merge((id, res))); let _ = task_tx.try_send(SimTileInternalEvent::TaskDone { id, paused_until: None, @@ -242,14 +240,14 @@ impl SimulatorTile { producers.produce(FromSimMsg { ix: result_ix }); - if let Some(id) = self.next_client(|s| s.can_simulate()) && - let Some(req) = self.priority_requests.next_req().or(self.requests.next_req()) + if let Some(id) = self.next_client(|s| s.can_simulate()) + && let Some(req) = self.priority_requests.next_req().or(self.requests.next_req()) { self.spawn_sim(id, req); } } - fn spawn_sim(&mut self, id: usize, req: crate::simulator::SimulatorRequest) { + fn spawn_sim(&mut self, id: usize, req: ValidationRequest) { const PAUSE_DURATION: Duration = Duration::from_secs(60); let sim = &mut self.simulators[id]; @@ -268,7 +266,7 @@ impl SimulatorTile { self.local_telemetry.max_in_flight = self.local_telemetry.max_in_flight.max(sim.pending); let timer = SimulatorMetrics::timer(sim.client.endpoint()); let task_tx = self.task_tx.clone(); - let sim_outbound = self.sim_outbound.clone(); + let sim_results = self.sim_results.clone(); spawn_tracked!(async move { let start_sim = Instant::now(); let block_hash = req.submission.block_hash(); @@ -289,7 +287,7 @@ impl SimulatorTile { } else { Filtering::Global }; - let json_req = BlockSimRequest::new( + let json_req = JsonValidationRequest::new( req.request.registered_gas_limit, &req.submission, ValidatorPreferences { filtering, ..Default::default() }, @@ -354,7 +352,7 @@ impl SimulatorTile { version: req.version, }; - let result_ix = sim_outbound.push(SimOutboundPayload::SimResult((id, Some(inner)))); + let result_ix = sim_results.push(SimResult::Validate((id, Some(inner)))); let _ = task_tx.try_send(SimTileInternalEvent::TaskDone { id, paused_until, result_ix }); }); @@ -451,8 +449,8 @@ impl SimEntry { /// A lighter check to decide whether we should accept optimistic submissions fn can_simulate_light(&self) -> bool { - self.is_synced && - match self.paused_until { + self.is_synced + && match self.paused_until { Some(until) => Instant::now() > until, None => true, } @@ -484,7 +482,7 @@ struct LocalTelemetry { } // Sim id / Simulation Result, so we can use this for merging requests -pub type SimulationResult = (usize, Option); +pub type ValidationResult = (usize, Option); #[derive(Clone)] pub struct SimulationResultInner { pub result: Result<(), BlockSimError>, @@ -531,7 +529,7 @@ fn jump_hash(mut key: u64, n: usize) -> usize { /// Pending requests, we only keep the last one for each builder. struct PendingRequests { - reqs: Vec, + reqs: Vec, } impl PendingRequests { @@ -541,7 +539,7 @@ impl PendingRequests { fn store( &mut self, - req: crate::simulator::SimulatorRequest, + req: crate::simulator::ValidationRequest, local_telemetry: &mut LocalTelemetry, ) { if let Some(i) = self.reqs.iter().position(|r| r.builder_pubkey == req.builder_pubkey) { @@ -555,7 +553,7 @@ impl PendingRequests { local_telemetry.max_pending = local_telemetry.max_pending.max(self.reqs.len()); } - fn next_req(&mut self) -> Option { + fn next_req(&mut self) -> Option { let i = self.reqs.iter().enumerate().max_by_key(|(_, r)| r.sort_key()).map(|(i, _)| i)?; Some(self.reqs.swap_remove(i)) } diff --git a/crates/simulator/src/block_merging/error.rs b/crates/simulator/src/block_merging/error.rs index a64d8ac62..17e3f64bf 100644 --- a/crates/simulator/src/block_merging/error.rs +++ b/crates/simulator/src/block_merging/error.rs @@ -62,28 +62,28 @@ pub(crate) enum BlockMergingApiError { impl From for ErrorObject<'static> { fn from(error: BlockMergingApiError) -> Self { match error { - BlockMergingApiError::MissingProposerPayment | - BlockMergingApiError::InvalidProposerPayment | - BlockMergingApiError::NoSafeForBuilder(_) | - BlockMergingApiError::NotEnoughGasForPayment(_) | - BlockMergingApiError::InvalidSignatureInBaseBlock | - BlockMergingApiError::BaseBlockBlobLimitExceeded { .. } => { + BlockMergingApiError::MissingProposerPayment + | BlockMergingApiError::InvalidProposerPayment + | BlockMergingApiError::NoSafeForBuilder(_) + | BlockMergingApiError::NotEnoughGasForPayment(_) + | BlockMergingApiError::InvalidSignatureInBaseBlock + | BlockMergingApiError::BaseBlockBlobLimitExceeded { .. } => { invalid_params_rpc_err(error.to_string()) } - BlockMergingApiError::GetParent(_) | - BlockMergingApiError::BlobLimitReached | - BlockMergingApiError::NextEvmEnvFail | - BlockMergingApiError::BlockContext | - BlockMergingApiError::RevenueAllocationReverted | - BlockMergingApiError::ExecutionRequests | - BlockMergingApiError::ZeroRevenueForWinningBuilder | - BlockMergingApiError::ZeroMergedBlockRevenue | - BlockMergingApiError::EmptyBuilderSignerAccount(_) | - BlockMergingApiError::EmptyBuilderSafe(_) | - BlockMergingApiError::NoBalanceInBuilderSafe { .. } | - BlockMergingApiError::BuilderBalanceDeltaMismatch(_) | - BlockMergingApiError::Provider(_) => internal_rpc_err(error.to_string()), + BlockMergingApiError::GetParent(_) + | BlockMergingApiError::BlobLimitReached + | BlockMergingApiError::NextEvmEnvFail + | BlockMergingApiError::BlockContext + | BlockMergingApiError::RevenueAllocationReverted + | BlockMergingApiError::ExecutionRequests + | BlockMergingApiError::ZeroRevenueForWinningBuilder + | BlockMergingApiError::ZeroMergedBlockRevenue + | BlockMergingApiError::EmptyBuilderSignerAccount(_) + | BlockMergingApiError::EmptyBuilderSafe(_) + | BlockMergingApiError::NoBalanceInBuilderSafe { .. } + | BlockMergingApiError::BuilderBalanceDeltaMismatch(_) + | BlockMergingApiError::Provider(_) => internal_rpc_err(error.to_string()), BlockMergingApiError::Execution(err) => match err { error @ BlockExecutionError::Validation(_) => { diff --git a/crates/simulator/src/block_merging/mod.rs b/crates/simulator/src/block_merging/mod.rs index ddf340c81..ecf7b44cb 100644 --- a/crates/simulator/src/block_merging/mod.rs +++ b/crates/simulator/src/block_merging/mod.rs @@ -750,8 +750,8 @@ pub(crate) fn prepare_revenues( // We divide the revenue among the different bundle origins. for (origin, origin_revenue) in revenues { // Update the revenue, subtracting part of the payment cost - let actualized_revenue = (origin_revenue.revenue.widening_mul(expected_revenue) / - U512::from(total_revenue)) + let actualized_revenue = (origin_revenue.revenue.widening_mul(expected_revenue) + / U512::from(total_revenue)) .to(); let builder_revenue = distribution_config.merged_builder_split(actualized_revenue); updated_revenues diff --git a/crates/simulator/src/inclusion/inclusion_producer.rs b/crates/simulator/src/inclusion/inclusion_producer.rs index fe8fb4daf..53843b27e 100644 --- a/crates/simulator/src/inclusion/inclusion_producer.rs +++ b/crates/simulator/src/inclusion/inclusion_producer.rs @@ -74,10 +74,10 @@ fn handle_tx_event( } } } - FullTransactionEvent::Queued(tx_hash, _) | - FullTransactionEvent::Mined { tx_hash, .. } | - FullTransactionEvent::Discarded(tx_hash) | - FullTransactionEvent::Invalid(tx_hash) => { + FullTransactionEvent::Queued(tx_hash, _) + | FullTransactionEvent::Mined { tx_hash, .. } + | FullTransactionEvent::Discarded(tx_hash) + | FullTransactionEvent::Invalid(tx_hash) => { // Remove from tx mapping. pending_txs.remove(&tx_hash); } diff --git a/crates/simulator/src/inclusion/mod.rs b/crates/simulator/src/inclusion/mod.rs index 2dd3fdd81..23ffa5c99 100644 --- a/crates/simulator/src/inclusion/mod.rs +++ b/crates/simulator/src/inclusion/mod.rs @@ -1,3 +1,2 @@ pub mod api; pub mod inclusion_producer; -pub mod types; diff --git a/crates/simulator/src/inclusion/types.rs b/crates/simulator/src/inclusion/types.rs deleted file mode 100644 index 70605ff25..000000000 --- a/crates/simulator/src/inclusion/types.rs +++ /dev/null @@ -1,24 +0,0 @@ -use alloy_primitives::{Address, B256}; -use bytes::Bytes; -use serde::{Deserialize, Serialize}; - -#[derive(Debug, Clone, PartialEq, Eq, Deserialize, Serialize)] -pub struct InclusionList { - pub txs: Vec, -} - -impl InclusionList { - pub const fn _empty() -> Self { - Self { txs: vec![] } - } -} - -#[derive(Debug, Clone, PartialEq, Eq, Deserialize, Serialize)] -pub struct InclusionListTx { - pub hash: B256, - pub nonce: u64, - pub sender: Address, - pub gas_priority_fee: u64, - pub bytes: Bytes, - pub wait_time: u32, -} diff --git a/crates/simulator/src/ssz_server.rs b/crates/simulator/src/ssz_server.rs index 41f2dd421..e554ca7ee 100644 --- a/crates/simulator/src/ssz_server.rs +++ b/crates/simulator/src/ssz_server.rs @@ -1,6 +1,6 @@ use alloy_rpc_types::beacon::relay::{BuilderBlockValidationRequestV5, SignedBidSubmissionV5}; use axum::{Router, extract::State, http::StatusCode, response::IntoResponse, routing::post}; -use helix_common::simulator::{SimRequest, SubmissionFormat}; +use helix_common::simulator::{SszValidationRequest, SubmissionFormat}; use ssz::Decode; use tokio::net::TcpListener; use tracing::error; @@ -24,7 +24,7 @@ pub async fn run(api: ValidationApi, port: u16) { } async fn handler(State(api): State, body: axum::body::Bytes) -> impl IntoResponse { - let req = match SimRequest::from_ssz_bytes(&body) { + let req = match SszValidationRequest::from_ssz_bytes(&body) { Ok(r) => r, Err(e) => return (StatusCode::BAD_REQUEST, format!("ssz decode: {e:?}")).into_response(), }; @@ -34,8 +34,11 @@ async fn handler(State(api): State, body: axum::body::Bytes) -> i match SignedBidSubmissionV5::from_ssz_bytes(&req.signed_bid_submission) { Ok(s) => s, Err(e) => { - return (StatusCode::BAD_REQUEST, format!("signed bid submission decode: {e:?}")) - .into_response() + return ( + StatusCode::BAD_REQUEST, + format!("signed bid submission decode: {e:?}"), + ) + .into_response(); } } } diff --git a/crates/simulator/src/validation/error.rs b/crates/simulator/src/validation/error.rs index b075c6952..6d9f1aced 100644 --- a/crates/simulator/src/validation/error.rs +++ b/crates/simulator/src/validation/error.rs @@ -46,19 +46,19 @@ pub(crate) enum ValidationApiError { impl From for ErrorObject<'static> { fn from(error: ValidationApiError) -> Self { match error { - ValidationApiError::GasLimitMismatch(_) | - ValidationApiError::GasUsedMismatch(_) | - ValidationApiError::ParentHashMismatch(_) | - ValidationApiError::BlockHashMismatch(_) | - ValidationApiError::Blacklist(_) | - ValidationApiError::ProposerPayment | - ValidationApiError::InvalidBlobsBundle | - ValidationApiError::InclusionList | - ValidationApiError::Blob(_) => invalid_params_rpc_err(error.to_string()), + ValidationApiError::GasLimitMismatch(_) + | ValidationApiError::GasUsedMismatch(_) + | ValidationApiError::ParentHashMismatch(_) + | ValidationApiError::BlockHashMismatch(_) + | ValidationApiError::Blacklist(_) + | ValidationApiError::ProposerPayment + | ValidationApiError::InvalidBlobsBundle + | ValidationApiError::InclusionList + | ValidationApiError::Blob(_) => invalid_params_rpc_err(error.to_string()), - ValidationApiError::GetParent(_) | - ValidationApiError::Consensus(_) | - ValidationApiError::Provider(_) => internal_rpc_err(error.to_string()), + ValidationApiError::GetParent(_) + | ValidationApiError::Consensus(_) + | ValidationApiError::Provider(_) => internal_rpc_err(error.to_string()), ValidationApiError::Execution(err) => match err { error @ BlockExecutionError::Validation(_) => { invalid_params_rpc_err(error.to_string()) diff --git a/crates/simulator/src/validation/mod.rs b/crates/simulator/src/validation/mod.rs index 83dd434fa..2de723607 100644 --- a/crates/simulator/src/validation/mod.rs +++ b/crates/simulator/src/validation/mod.rs @@ -428,8 +428,8 @@ impl ValidationApi { &self, mut blobs_bundle: BlobsBundleV1, ) -> Result, ValidationApiError> { - if blobs_bundle.commitments.len() != blobs_bundle.proofs.len() || - blobs_bundle.commitments.len() != blobs_bundle.blobs.len() + if blobs_bundle.commitments.len() != blobs_bundle.proofs.len() + || blobs_bundle.commitments.len() != blobs_bundle.blobs.len() { return Err(ValidationApiError::InvalidBlobsBundle); } @@ -521,8 +521,8 @@ impl ValidationApi { // Check block size as per EIP-7934 (only applies when Osaka hardfork is active) let chain_spec = self.provider.chain_spec(); - if chain_spec.is_osaka_active_at_timestamp(block.timestamp()) && - block.rlp_length() > MAX_RLP_BLOCK_SIZE + if chain_spec.is_osaka_active_at_timestamp(block.timestamp()) + && block.rlp_length() > MAX_RLP_BLOCK_SIZE { return Err(ValidationApiError::Consensus(ConsensusError::BlockTooLarge { rlp_length: block.rlp_length(), @@ -556,8 +556,8 @@ impl ValidationApi { .sealed_header_by_hash(parent_hash)? .ok_or_else(|| GetParentError::MissingParentBlock)?; - if latest_header.number().saturating_sub(parent_header.number()) > - self.validation_window + if latest_header.number().saturating_sub(parent_header.number()) + > self.validation_window { return Err(GetParentError::BlockTooOld); } diff --git a/crates/tcp-types/src/lib.rs b/crates/tcp-types/src/lib.rs index 121b1215c..9e8de5418 100644 --- a/crates/tcp-types/src/lib.rs +++ b/crates/tcp-types/src/lib.rs @@ -67,11 +67,15 @@ impl Compression { impl std::fmt::Display for Compression { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { - write!(f, "{}", match self { - Compression::None => "NONE", - Compression::Gzip => "GZIP", - Compression::Zstd => "ZSTD", - }) + write!( + f, + "{}", + match self { + Compression::None => "NONE", + Compression::Gzip => "GZIP", + Compression::Zstd => "ZSTD", + } + ) } } diff --git a/crates/types/src/blobs.rs b/crates/types/src/blobs.rs index 23fa5c221..ef6c970d7 100644 --- a/crates/types/src/blobs.rs +++ b/crates/types/src/blobs.rs @@ -67,8 +67,8 @@ impl ssz::Decode for BlobsBundle { let raw = BlobsBundleRaw::from_ssz_bytes(bytes)?; - if raw.proofs.len() == raw.blobs.len() * CELLS_PER_EXT_BLOB && - raw.commitments.len() == raw.blobs.len() + if raw.proofs.len() == raw.blobs.len() * CELLS_PER_EXT_BLOB + && raw.commitments.len() == raw.blobs.len() { Ok(Self { commitments: raw.commitments, proofs: raw.proofs, blobs: raw.blobs }) } else { @@ -113,8 +113,8 @@ impl BlobsBundle { &self, max_blobs_per_block: usize, ) -> Result<(), BlockValidationError> { - if self.commitments.len() != self.blobs.len() || - self.proofs.len() != self.blobs.len() * CELLS_PER_EXT_BLOB + if self.commitments.len() != self.blobs.len() + || self.proofs.len() != self.blobs.len() * CELLS_PER_EXT_BLOB { return Err(BlockValidationError::BlobsError(BlobsError::BundleMismatch { proofs: self.proofs.len(), diff --git a/crates/types/src/block_merging.rs b/crates/types/src/block_merging.rs index 1ef5fd513..c698f850b 100644 --- a/crates/types/src/block_merging.rs +++ b/crates/types/src/block_merging.rs @@ -79,8 +79,8 @@ pub struct InvalidTxIndex; impl BundleOrder { pub fn validate(&self) -> Result<(), InvalidTxIndex> { - if self.reverting_txs.iter().any(|&i| i >= self.txs.len()) || - self.dropping_txs.iter().any(|&i| i >= self.txs.len()) + if self.reverting_txs.iter().any(|&i| i >= self.txs.len()) + || self.dropping_txs.iter().any(|&i| i >= self.txs.len()) { return Err(InvalidTxIndex); } diff --git a/crates/types/src/execution_payload.rs b/crates/types/src/execution_payload.rs index ea2fab125..c325e234b 100644 --- a/crates/types/src/execution_payload.rs +++ b/crates/types/src/execution_payload.rs @@ -161,13 +161,13 @@ impl ForkVersionDecode for ExecutionPayload { /// SSZ decode with explicit fork variant. fn from_ssz_bytes_by_fork(bytes: &[u8], fork_name: ForkName) -> Result { let builder_bid = match fork_name { - ForkName::Altair | - ForkName::Base | - ForkName::Bellatrix | - ForkName::Capella | - ForkName::Deneb | - ForkName::Electra | - ForkName::Gloas => { + ForkName::Altair + | ForkName::Base + | ForkName::Bellatrix + | ForkName::Capella + | ForkName::Deneb + | ForkName::Electra + | ForkName::Gloas => { return Err(ssz::DecodeError::BytesInvalid(format!( "unsupported fork for ExecutionPayloadHeader: {fork_name}", ))); diff --git a/crates/types/src/hydration.rs b/crates/types/src/hydration.rs index 310d3e13b..229a6beef 100644 --- a/crates/types/src/hydration.rs +++ b/crates/types/src/hydration.rs @@ -27,13 +27,13 @@ pub enum DehydratedBidSubmission { impl ForkVersionDecode for DehydratedBidSubmission { fn from_ssz_bytes_by_fork(bytes: &[u8], fork: ForkName) -> Result { match fork { - ForkName::Base | - ForkName::Altair | - ForkName::Bellatrix | - ForkName::Capella | - ForkName::Deneb | - ForkName::Electra | - ForkName::Gloas => Err(DecodeError::NoMatchingVariant), + ForkName::Base + | ForkName::Altair + | ForkName::Bellatrix + | ForkName::Capella + | ForkName::Deneb + | ForkName::Electra + | ForkName::Gloas => Err(DecodeError::NoMatchingVariant), ForkName::Fulu => DehydratedBidSubmissionFulu::from_ssz_bytes(bytes) .map(DehydratedBidSubmission::Fulu), } @@ -136,13 +136,13 @@ impl DehydratedBidSubmissionFuluWithAdjustments { impl ForkVersionDecode for DehydratedBidSubmissionFuluWithAdjustments { fn from_ssz_bytes_by_fork(bytes: &[u8], fork: ForkName) -> Result { match fork { - ForkName::Base | - ForkName::Altair | - ForkName::Bellatrix | - ForkName::Capella | - ForkName::Deneb | - ForkName::Gloas | - ForkName::Electra => Err(DecodeError::NoMatchingVariant), + ForkName::Base + | ForkName::Altair + | ForkName::Bellatrix + | ForkName::Capella + | ForkName::Deneb + | ForkName::Gloas + | ForkName::Electra => Err(DecodeError::NoMatchingVariant), ForkName::Fulu => DehydratedBidSubmissionFuluWithAdjustments::from_ssz_bytes(bytes), } } From 56e415f8293ccf4f8791fbc96ee9bc0471ad5696 Mon Sep 17 00:00:00 2001 From: owen Date: Fri, 13 Mar 2026 10:37:35 +0000 Subject: [PATCH 3/3] send network bytes direct to simulator --- Cargo.lock | 3 + crates/common/Cargo.toml | 3 + crates/common/src/api/builder_api.rs | 2 + crates/common/src/config.rs | 50 +- crates/common/src/decoder.rs | 595 ++++++++++++++++++ crates/common/src/lib.rs | 1 + crates/common/src/local_cache.rs | 38 +- crates/common/src/simulator.rs | 11 +- crates/common/src/task.rs | 4 +- crates/data-api/src/api.rs | 16 +- crates/data-api/src/error.rs | 14 +- crates/data-api/src/stats.rs | 12 +- crates/database/src/lib.rs | 8 +- crates/relay/src/api/builder/api.rs | 2 - crates/relay/src/api/builder/error.rs | 56 +- crates/relay/src/api/builder/submit_block.rs | 4 +- crates/relay/src/api/middleware/tracking.rs | 11 +- crates/relay/src/api/mod.rs | 1 - crates/relay/src/api/proposer/error.rs | 68 +- crates/relay/src/api/proposer/get_header.rs | 14 +- crates/relay/src/api/proposer/get_payload.rs | 6 +- crates/relay/src/auctioneer/bid_sorter.rs | 4 +- crates/relay/src/auctioneer/block_merger.rs | 25 +- crates/relay/src/auctioneer/context.rs | 2 +- crates/relay/src/auctioneer/get_payload.rs | 14 +- crates/relay/src/auctioneer/mod.rs | 57 +- crates/relay/src/auctioneer/submit_block.rs | 75 ++- crates/relay/src/auctioneer/types.rs | 66 +- crates/relay/src/auctioneer/worker.rs | 6 +- crates/relay/src/bid_decoder/decoder.rs | 566 ----------------- crates/relay/src/bid_decoder/mod.rs | 4 +- crates/relay/src/bid_decoder/tile.rs | 172 ++--- .../src/housekeeper/chain_event_updater.rs | 6 +- crates/relay/src/housekeeper/housekeeper.rs | 12 +- .../relay/src/housekeeper/primev_service.rs | 10 +- crates/relay/src/lib.rs | 6 +- crates/relay/src/main.rs | 15 +- crates/relay/src/network/messages.rs | 7 +- crates/relay/src/simulator/client.rs | 17 +- crates/relay/src/simulator/mod.rs | 44 +- crates/relay/src/simulator/tile.rs | 83 ++- crates/simulator/src/block_merging/error.rs | 38 +- crates/simulator/src/block_merging/mod.rs | 4 +- .../src/inclusion/inclusion_producer.rs | 8 +- crates/simulator/src/ssz_server.rs | 56 +- crates/simulator/src/validation/error.rs | 24 +- crates/simulator/src/validation/mod.rs | 12 +- crates/tcp-types/src/lib.rs | 43 +- crates/types/src/bid_submission.rs | 297 ++++++++- crates/types/src/blobs.rs | 8 +- crates/types/src/block_merging.rs | 15 +- crates/types/src/execution_payload.rs | 14 +- crates/types/src/hydration.rs | 28 +- 53 files changed, 1462 insertions(+), 1195 deletions(-) create mode 100644 crates/common/src/decoder.rs delete mode 100644 crates/relay/src/bid_decoder/decoder.rs diff --git a/Cargo.lock b/Cargo.lock index f252b5289..9599a3b03 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -5567,6 +5567,7 @@ dependencies = [ "ethereum_ssz", "ethereum_ssz_derive", "eyre", + "flate2", "flux", "flux-utils", "futures", @@ -5587,6 +5588,7 @@ dependencies = [ "serde_yaml", "sha2 0.10.9", "ssz_types", + "strum 0.27.2", "teloxide", "thiserror 1.0.69", "tokio", @@ -5598,6 +5600,7 @@ dependencies = [ "tree_hash_derive", "url", "uuid 1.19.0", + "zstd 0.13.3", ] [[package]] diff --git a/crates/common/Cargo.toml b/crates/common/Cargo.toml index f2fbe5424..6cc423b4c 100644 --- a/crates/common/Cargo.toml +++ b/crates/common/Cargo.toml @@ -21,6 +21,7 @@ ethereum_serde_utils.workspace = true ethereum_ssz.workspace = true ethereum_ssz_derive.workspace = true eyre.workspace = true +flate2.workspace = true flux.workspace = true flux-utils.workspace = true futures.workspace = true @@ -41,6 +42,7 @@ serde_json.workspace = true serde_yaml.workspace = true sha2.workspace = true ssz_types.workspace = true +strum.workspace = true teloxide.workspace = true thiserror.workspace = true tokio.workspace = true @@ -52,3 +54,4 @@ tree_hash.workspace = true tree_hash_derive.workspace = true url.workspace = true uuid.workspace = true +zstd.workspace = true diff --git a/crates/common/src/api/builder_api.rs b/crates/common/src/api/builder_api.rs index 97f480770..772335c0b 100644 --- a/crates/common/src/api/builder_api.rs +++ b/crates/common/src/api/builder_api.rs @@ -17,6 +17,8 @@ use tree_hash_derive::TreeHash; use crate::{BuilderValidatorPreferences, api::proposer_api::ValidatorRegistrationInfo}; +pub const MAX_PAYLOAD_LENGTH: usize = 1024 * 1024 * 20; // 20MB + #[derive(Debug, Default, Clone, serde::Serialize, serde::Deserialize)] pub struct BuilderGetValidatorsResponseEntry { pub slot: Slot, diff --git a/crates/common/src/config.rs b/crates/common/src/config.rs index e72b42131..8b043ec58 100644 --- a/crates/common/src/config.rs +++ b/crates/common/src/config.rs @@ -415,33 +415,29 @@ impl RouterConfig { } // Replace BuilderApi, ProposerApi, DataApi with their real routes - self.replace_condensed_with_real( - Route::BuilderApi, - &[Route::GetValidators, Route::SubmitBlock, Route::GetTopBid, Route::GetInclusionList], - ); - - self.replace_condensed_with_real( - Route::ProposerApi, - &[ - Route::Status, - Route::RegisterValidators, - Route::GetHeader, - Route::GetPayload, - Route::GetPayloadV2, - ], - ); - - self.replace_condensed_with_real( - Route::DataApi, - &[ - Route::ProposerPayloadDelivered, - Route::ProposerHeaderDelivered, - Route::BuilderBidsReceived, - Route::ValidatorRegistration, - Route::DataAdjustments, - Route::MergedBlocks, - ], - ); + self.replace_condensed_with_real(Route::BuilderApi, &[ + Route::GetValidators, + Route::SubmitBlock, + Route::GetTopBid, + Route::GetInclusionList, + ]); + + self.replace_condensed_with_real(Route::ProposerApi, &[ + Route::Status, + Route::RegisterValidators, + Route::GetHeader, + Route::GetPayload, + Route::GetPayloadV2, + ]); + + self.replace_condensed_with_real(Route::DataApi, &[ + Route::ProposerPayloadDelivered, + Route::ProposerHeaderDelivered, + Route::BuilderBidsReceived, + Route::ValidatorRegistration, + Route::DataAdjustments, + Route::MergedBlocks, + ]); } pub fn enable_relay_network(&mut self) { diff --git a/crates/common/src/decoder.rs b/crates/common/src/decoder.rs new file mode 100644 index 000000000..d583b1af4 --- /dev/null +++ b/crates/common/src/decoder.rs @@ -0,0 +1,595 @@ +use std::{ + io::Read, + time::{Duration, Instant}, +}; + +use axum::response::{IntoResponse, Response}; +use flate2::read::GzDecoder; +use helix_types::{ + BidAdjustmentData, BlockMergingData, Compression, DehydratedBidSubmission, + DehydratedBidSubmissionFuluWithAdjustments, ForkName, ForkVersionDecode, MergeType, + SignedBidSubmission, SignedBidSubmissionWithAdjustments, SignedBidSubmissionWithMergingData, + Submission, +}; +use http::{ + HeaderMap, HeaderValue, StatusCode, + header::{ACCEPT, CONTENT_TYPE}, +}; +use serde::de::DeserializeOwned; +use ssz::Decode; +use ssz_derive::{Decode, Encode}; +use strum::{AsRefStr, EnumString}; +use tracing::{error, trace}; +use zstd::{ + stream::read::Decoder as ZstdDecoder, + zstd_safe::{CONTENTSIZE_ERROR, CONTENTSIZE_UNKNOWN, get_frame_content_size}, +}; + +use crate::{ + api::builder_api::MAX_PAYLOAD_LENGTH, + metrics::{ + BID_DECODING_LATENCY, BID_DECOMPRESS_SIZEHINT_REL_ERROR, DECOMPRESSION_LATENCY, + SUBMISSION_BY_COMPRESSION, SUBMISSION_BY_ENCODING, SUBMISSION_COMPRESSED_BYTES, + SUBMISSION_DECOMPRESSED_BYTES, + }, +}; + +#[derive(Debug, thiserror::Error)] +pub enum DecoderError { + #[error("json decode error: {0}")] + JsonDecodeError(#[from] serde_json::Error), + + #[error("ssz decode error: {0:?}")] + SszDecode(ssz::DecodeError), + + #[error("IO error: {0}")] + IOError(#[from] std::io::Error), + + #[error("failed to decode payload")] + PayloadDecode, +} + +impl IntoResponse for DecoderError { + fn into_response(self) -> Response { + (&self).into_response() + } +} + +impl IntoResponse for &DecoderError { + fn into_response(self) -> Response { + (self.http_status(), self.to_string()).into_response() + } +} + +impl From for DecoderError { + fn from(e: ssz::DecodeError) -> Self { + DecoderError::SszDecode(e) + } +} + +impl DecoderError { + pub fn http_status(&self) -> StatusCode { + match self { + DecoderError::JsonDecodeError(_) | + DecoderError::SszDecode(_) | + DecoderError::IOError(_) | + DecoderError::PayloadDecode => StatusCode::BAD_REQUEST, + } + } +} + +pub const HEADER_SUBMISSION_TYPE: &str = "x-submission-type"; + +#[derive(Debug, Clone, Copy, PartialEq, Eq, EnumString, AsRefStr)] +#[strum(serialize_all = "snake_case", ascii_case_insensitive)] +pub enum SubmissionType { + Default, + Merge, + Dehydrated, +} + +impl SubmissionType { + pub fn from_headers(header_map: &HeaderMap) -> Option { + let submission_type = header_map.get(HEADER_SUBMISSION_TYPE)?.to_str().ok()?; + submission_type.parse().ok() + } +} + +#[repr(u8)] +#[derive(Clone, Copy, Debug)] +pub enum Encoding { + Json = 0, + Ssz = 1, +} + +pub const HEADER_SSZ: &str = "application/octet-stream"; +const HEADER_ACCEPT_SSZ: &str = "application/octet-stream;q=1.0,application/json;q=0.9"; + +impl ssz::Encode for Encoding { + fn is_ssz_fixed_len() -> bool { + true + } + fn ssz_fixed_len() -> usize { + 1 + } + fn ssz_bytes_len(&self) -> usize { + 1 + } + fn ssz_append(&self, buf: &mut Vec) { + buf.push(*self as u8); + } +} + +impl ssz::Decode for Encoding { + fn is_ssz_fixed_len() -> bool { + true + } + fn ssz_fixed_len() -> usize { + 1 + } + fn from_ssz_bytes(bytes: &[u8]) -> Result { + match bytes { + [0] => Ok(Encoding::Json), + [1] => Ok(Encoding::Ssz), + _ => Err(ssz::DecodeError::BytesInvalid(format!("invalid Encoding byte: {bytes:?}"))), + } + } +} + +impl Encoding { + pub fn from_content_type(headers: &HeaderMap) -> Self { + match headers.get(CONTENT_TYPE) { + Some(header) if header == HeaderValue::from_static(HEADER_SSZ) => Encoding::Ssz, + _ => Encoding::Json, + } + } + + pub fn from_accept(headers: &HeaderMap) -> Self { + match headers.get(ACCEPT) { + Some(header) + if header == HeaderValue::from_static(HEADER_SSZ) || + header == HeaderValue::from_static(HEADER_ACCEPT_SSZ) => + { + Encoding::Ssz + } + _ => Encoding::Json, + } + } +} + +#[derive(Debug, Clone, Encode, Decode)] +pub struct SubmissionDecoderParams { + pub compression: Compression, + pub encoding: Encoding, + pub merge_type: MergeType, + pub is_dehydrated: bool, + pub with_mergeable_data: bool, + pub with_adjustments: bool, + pub block_merging_dry_run: bool, + pub fork_name: ForkName, +} + +#[derive(Debug)] +pub struct SubmissionDecoder { + compression: Compression, + encoding: Encoding, + merge_type: MergeType, + is_dehydrated: bool, + with_mergeable_data: bool, + with_adjustments: bool, + block_merging_dry_run: bool, + fork_name: ForkName, + + bytes_before_decompress: usize, + bytes_after_decompress: usize, + estimated_decompress: usize, + + decompress_latency: Duration, + decode_latency: Duration, +} + +impl SubmissionDecoder { + pub fn new(params: &SubmissionDecoderParams) -> Self { + Self { + compression: params.compression, + encoding: params.encoding, + merge_type: params.merge_type, + is_dehydrated: params.is_dehydrated, + with_mergeable_data: params.with_mergeable_data, + with_adjustments: params.with_adjustments, + block_merging_dry_run: params.block_merging_dry_run, + fork_name: params.fork_name, + bytes_before_decompress: 0, + bytes_after_decompress: 0, + estimated_decompress: 0, + decompress_latency: Default::default(), + decode_latency: Default::default(), + } + } + + pub fn decompress( + &mut self, + payload: &[u8], + buf: &mut Vec, + ) -> Option> { + let start = Instant::now(); + self.bytes_before_decompress = payload.len(); + + match self.compression { + Compression::None => return None, + Compression::Gzip => { + let cap = gzip_size_hint(payload).unwrap_or(payload.len() * 2); + self.estimated_decompress = cap; + buf.clear(); + buf.reserve(cap); + let mut decoder = GzDecoder::new(payload).take(MAX_PAYLOAD_LENGTH as u64); + if let Err(e) = decoder.read_to_end(buf) { + return Some(Err(e.into())); + } + } + Compression::Zstd => { + let cap = zstd_size_hint(payload).unwrap_or(payload.len() * 2); + self.estimated_decompress = cap; + buf.clear(); + buf.reserve(cap); + let inner = match ZstdDecoder::new(payload) { + Ok(d) => d, + Err(e) => return Some(Err(e.into())), + }; + let mut decoder = inner.take(MAX_PAYLOAD_LENGTH as u64); + if let Err(e) = decoder.read_to_end(buf) { + return Some(Err(e.into())); + } + } + } + + self.bytes_after_decompress = buf.len(); + self.decompress_latency = start.elapsed(); + + Some(Ok(())) + } + + pub fn decode( + &mut self, + payload: &[u8], + buf: &mut Vec, + ) -> Result<(Submission, Option, Option), DecoderError> + { + let body: &[u8] = match self.decompress(payload, buf) { + None => payload, + Some(Ok(())) => buf, + Some(Err(e)) => return Err(e), + }; + + if self.is_dehydrated { + self.decode_dehydrated(body) + } else if self.with_mergeable_data { + self.decode_merge(body) + } else { + self.decode_default(body) + } + } + + fn decode_dehydrated( + &mut self, + body: &[u8], + ) -> Result<(Submission, Option, Option), DecoderError> + { + let (submission, bid_adjustment) = if self.with_adjustments { + let sub_with_adjustment: DehydratedBidSubmissionFuluWithAdjustments = + self.decode_by_fork(body, self.fork_name)?; + let (sub, adjustment_data) = sub_with_adjustment.split(); + + (sub, Some(adjustment_data)) + } else { + let submission: DehydratedBidSubmission = self.decode_by_fork(body, self.fork_name)?; + + (submission, None) + }; + + let merging_data = match self.merge_type { + MergeType::Mergeable => { + //Should this return an error instead? + error!("mergeable dehydrated submissions are not supported"); + None + } + MergeType::AppendOnly => { + Some(BlockMergingData::append_only(submission.fee_recipient())) + } + MergeType::None => { + if self.block_merging_dry_run { + Some(BlockMergingData::append_only(submission.fee_recipient())) + } else { + None + } + } + }; + + Ok((Submission::Dehydrated(submission), merging_data, bid_adjustment)) + } + + fn decode_merge( + &mut self, + body: &[u8], + ) -> Result<(Submission, Option, Option), DecoderError> + { + let sub_with_merging: SignedBidSubmissionWithMergingData = self._decode(body)?; + let merging_data = match self.merge_type { + MergeType::Mergeable => Some(sub_with_merging.merging_data), + //Handle append-only by creating empty mergeable orders + //this allows builder to switch between append-only and mergeable without changing + // submission alternatively we could reject or ignore append-only here if the + // submission is mergeable? + MergeType::AppendOnly => Some(BlockMergingData { + allow_appending: sub_with_merging.merging_data.allow_appending, + builder_address: sub_with_merging.merging_data.builder_address, + merge_orders: vec![], + }), + MergeType::None => Some(sub_with_merging.merging_data), + }; + Ok((Submission::Full(sub_with_merging.submission), merging_data, None)) + } + + fn decode_default( + &mut self, + body: &[u8], + ) -> Result<(Submission, Option, Option), DecoderError> + { + let (submission, bid_adjustment) = if self.with_adjustments { + let sub_with_adjustment: SignedBidSubmissionWithAdjustments = self._decode(body)?; + let (sub, adjustment_data) = sub_with_adjustment.split(); + + (sub, Some(adjustment_data)) + } else { + let submission: SignedBidSubmission = self._decode(body)?; + + (submission, None) + }; + + let merging_data = match self.merge_type { + MergeType::Mergeable => { + //Should this return an error instead? + error!("mergeable dehydrated submissions are not supported"); + None + } + MergeType::AppendOnly => { + Some(BlockMergingData::append_only(submission.fee_recipient())) + } + MergeType::None => { + if self.block_merging_dry_run { + Some(BlockMergingData::allow_all( + submission.fee_recipient(), + submission.num_txs(), + )) + } else { + None + } + } + }; + Ok((Submission::Full(submission), merging_data, bid_adjustment)) + } + + // TODO: pass a buffer pool to avoid allocations + fn _decode(&mut self, body: &[u8]) -> Result { + let start = Instant::now(); + let payload: T = match self.encoding { + Encoding::Ssz => T::from_ssz_bytes(body).map_err(DecoderError::SszDecode)?, + Encoding::Json => serde_json::from_slice(body)?, + }; + + self.decode_latency = start.elapsed().saturating_sub(self.decompress_latency); + self.record_metrics(); + + Ok(payload) + } + + pub fn decode_by_fork( + &mut self, + body: &[u8], + fork: ForkName, + ) -> Result { + let start = Instant::now(); + let payload: T = match self.encoding { + Encoding::Ssz => { + T::from_ssz_bytes_by_fork(body, fork).map_err(DecoderError::SszDecode)? + } + Encoding::Json => serde_json::from_slice(body)?, + }; + + self.decode_latency = start.elapsed().saturating_sub(self.decompress_latency); + self.record_metrics(); + + Ok(payload) + } + + fn record_metrics(&self) { + let compression_label = self.compression.as_str(); + SUBMISSION_BY_COMPRESSION.with_label_values(&[compression_label]).inc(); + + if self.compression != Compression::None { + SUBMISSION_COMPRESSED_BYTES + .with_label_values(&[compression_label]) + .inc_by(self.bytes_before_decompress as u64); + SUBMISSION_DECOMPRESSED_BYTES + .with_label_values(&[compression_label]) + .inc_by(self.bytes_after_decompress as u64); + DECOMPRESSION_LATENCY + .with_label_values(&[compression_label]) + .observe(self.decompress_latency.as_micros() as f64); + + if self.estimated_decompress > 0 { + let actual = self.bytes_after_decompress as f64; + let estimate = self.estimated_decompress as f64; + let error = (actual - estimate).abs() / actual.max(1.0); + BID_DECOMPRESS_SIZEHINT_REL_ERROR + .with_label_values(&[compression_label]) + .observe(error) + } + } + + let encoding_label = match self.encoding { + Encoding::Json => "json", + Encoding::Ssz => "ssz", + }; + SUBMISSION_BY_ENCODING.with_label_values(&[encoding_label]).inc(); + BID_DECODING_LATENCY + .with_label_values(&[encoding_label]) + .observe(self.decode_latency.as_micros() as f64); + + trace!( + size_compressed = self.bytes_before_decompress, + size_uncompressed = self.bytes_after_decompress, + compression =? self.compression, + decode_latency =? self.decode_latency, + "decoded payload" + ); + } +} + +fn zstd_size_hint(buf: &[u8]) -> Option { + match get_frame_content_size(buf) { + Ok(Some(size)) if size != CONTENTSIZE_ERROR && size != CONTENTSIZE_UNKNOWN => { + Some((size as usize).min(MAX_PAYLOAD_LENGTH)) + } + + Ok(_) | Err(_) => None, + } +} + +fn gzip_size_hint(buf: &[u8]) -> Option { + if buf.len() >= 4 { + let isize = u32::from_le_bytes(buf[buf.len() - 4..].try_into().ok()?); + Some((isize as usize).min(MAX_PAYLOAD_LENGTH)) + } else { + None + } +} + +#[cfg(test)] +mod tests { + use alloy_primitives::hex::FromHex; + use helix_types::{ + MergeType, SignedBidSubmission, SignedBidSubmissionWithMergingData, TestRandomSeed, + }; + use ssz::Encode; + + use super::*; + + // #[test] + // fn test_get_builder_pubkey() { + // let expected = + // BlsPublicKeyBytes::from_hex(" + // 0x81f8ed149a60b16f4b22ba759f0a5420caa753768341bb41b27c15eb9b219afa5494f7d7b72d18c1a1b2904c66d2a30c" + // ).unwrap(); + + // let data_json = + // include_bytes!("../../../types/src/testdata/signed-bid-submission-fulu-2.json"); + // let decoder = SubmissionDecoder { + // compression: Compression::Gzip, + // encoding: Encoding::Json, + // bytes_before_decompress: 0, + // bytes_after_decompress: 0, + // estimated_decompress: 0, + // decompress_latency: Default::default(), + // decode_latency: Default::default(), + // }; + + // let pubkey = decoder.extract_builder_pubkey(data_json, false).unwrap(); + // assert_eq!(pubkey, expected); + + // let data_ssz = + // include_bytes!("../../../types/src/testdata/signed-bid-submission-fulu.ssz"); + // let decoder = SubmissionDecoder { + // compression: Compression::Gzip, + // encoding: Encoding::Ssz, + // bytes_before_decompress: 0, + // bytes_after_decompress: 0, + // estimated_decompress: 0, + // decompress_latency: Default::default(), + // decode_latency: Default::default(), + // }; + + // let pubkey = decoder.extract_builder_pubkey(data_ssz, false).unwrap(); + // assert_eq!(pubkey, expected); + // } + + // #[test] + // fn test_get_builder_pubkey_merging() { + // let sub = SignedBidSubmission::test_random(); + // let sub = SignedBidSubmissionWithMergingData { + // submission: sub, + // merging_data: Default::default(), + // }; + + // let data_json = serde_json::to_vec(&sub).unwrap(); + // let decoder = SubmissionDecoder { + // compression: Compression::Gzip, + // encoding: Encoding::Json, + // bytes_before_decompress: 0, + // bytes_after_decompress: 0, + // estimated_decompress: 0, + // decompress_latency: Default::default(), + // decode_latency: Default::default(), + // }; + + // let pubkey_json = decoder.extract_builder_pubkey(data_json.as_slice(), true).unwrap(); + + // let data_ssz = sub.as_ssz_bytes(); + // let decoder = SubmissionDecoder { + // compression: Compression::Gzip, + // encoding: Encoding::Ssz, + // bytes_before_decompress: 0, + // bytes_after_decompress: 0, + // estimated_decompress: 0, + // decompress_latency: Default::default(), + // decode_latency: Default::default(), + // }; + + // let pubkey_ssz = decoder.extract_builder_pubkey(data_ssz.as_slice(), true).unwrap(); + + // assert_eq!(pubkey_json, pubkey_ssz) + // } + + #[test] + fn test_submission_type_serialization() { + assert_eq!(SubmissionType::Default.as_ref(), "default"); + assert_eq!(SubmissionType::Merge.as_ref(), "merge"); + assert_eq!(SubmissionType::Dehydrated.as_ref(), "dehydrated"); + } + + #[test] + fn test_submission_type_deserialization() { + assert_eq!("default".parse::().unwrap(), SubmissionType::Default); + assert_eq!("merge".parse::().unwrap(), SubmissionType::Merge); + assert_eq!("dehydrated".parse::().unwrap(), SubmissionType::Dehydrated); + + //Case shouldn't matter + assert_eq!("Default".parse::().unwrap(), SubmissionType::Default); + assert_eq!("Merge".parse::().unwrap(), SubmissionType::Merge); + assert_eq!("Dehydrated".parse::().unwrap(), SubmissionType::Dehydrated); + + // Test that invalid values fail + assert!("invalid".parse::().is_err()); + assert!("MergeAppendOnly".parse::().is_err()); // CamelCase should fail + } + + #[test] + fn test_merge_type_serialization() { + assert_eq!(MergeType::Mergeable.as_ref(), "mergeable"); + assert_eq!(MergeType::AppendOnly.as_ref(), "append_only"); + } + + #[test] + fn test_merge_type_deserialization() { + assert_eq!("mergeable".parse::().unwrap(), MergeType::Mergeable); + assert_eq!("append_only".parse::().unwrap(), MergeType::AppendOnly); + + //Case shouldn't matter + assert_eq!("Mergeable".parse::().unwrap(), MergeType::Mergeable); + assert_eq!("Append_Only".parse::().unwrap(), MergeType::AppendOnly); + + // Test that invalid values fail + assert!("invalid".parse::().is_err()); + assert!("AppendOnly".parse::().is_err()); // CamelCase should fail + } +} diff --git a/crates/common/src/lib.rs b/crates/common/src/lib.rs index 7035e7a6c..dfd0a8529 100644 --- a/crates/common/src/lib.rs +++ b/crates/common/src/lib.rs @@ -8,6 +8,7 @@ pub mod bid_submission; pub mod builder_info; pub mod chain_info; pub mod config; +pub mod decoder; pub mod local_cache; pub mod metrics; pub mod proposer; diff --git a/crates/common/src/local_cache.rs b/crates/common/src/local_cache.rs index 13c0f8196..3c6fb60dd 100644 --- a/crates/common/src/local_cache.rs +++ b/crates/common/src/local_cache.rs @@ -69,17 +69,17 @@ pub enum AuctioneerError { impl IntoResponse for AuctioneerError { fn into_response(self) -> Response { let code = match self { - AuctioneerError::UnexpectedValueType - | AuctioneerError::CryptoError(_) - | AuctioneerError::FromUtf8Error(_) - | AuctioneerError::ParseIntError(_) - | AuctioneerError::FromHexError(_) - | AuctioneerError::PastSlotAlreadyDelivered - | AuctioneerError::AnotherPayloadAlreadyDeliveredForSlot - | AuctioneerError::SszDeserializeError(_) - | AuctioneerError::SliceConversionError(_) - | AuctioneerError::ExecutionPayloadNotFound - | AuctioneerError::BuilderNotFound { .. } => StatusCode::BAD_REQUEST, + AuctioneerError::UnexpectedValueType | + AuctioneerError::CryptoError(_) | + AuctioneerError::FromUtf8Error(_) | + AuctioneerError::ParseIntError(_) | + AuctioneerError::FromHexError(_) | + AuctioneerError::PastSlotAlreadyDelivered | + AuctioneerError::AnotherPayloadAlreadyDeliveredForSlot | + AuctioneerError::SszDeserializeError(_) | + AuctioneerError::SliceConversionError(_) | + AuctioneerError::ExecutionPayloadNotFound | + AuctioneerError::BuilderNotFound { .. } => StatusCode::BAD_REQUEST, }; (code, self.to_string()).into_response() @@ -250,16 +250,16 @@ impl LocalCache { registration: &SignedValidatorRegistration, ) -> bool { if let Some(existing_entry) = - self.validator_registration_cache.get(®istration.message.pubkey) - && existing_entry.registration_info.registration.message.timestamp - >= registration + self.validator_registration_cache.get(®istration.message.pubkey) && + existing_entry.registration_info.registration.message.timestamp >= + registration .message .timestamp - .saturating_sub(VALIDATOR_REGISTRATION_UPDATE_INTERVAL) - && existing_entry.registration_info.registration.message.fee_recipient - == registration.message.fee_recipient - && existing_entry.registration_info.registration.message.gas_limit - == registration.message.gas_limit + .saturating_sub(VALIDATOR_REGISTRATION_UPDATE_INTERVAL) && + existing_entry.registration_info.registration.message.fee_recipient == + registration.message.fee_recipient && + existing_entry.registration_info.registration.message.gas_limit == + registration.message.gas_limit { return false; } diff --git a/crates/common/src/simulator.rs b/crates/common/src/simulator.rs index c35ac64ab..79727955b 100644 --- a/crates/common/src/simulator.rs +++ b/crates/common/src/simulator.rs @@ -1,6 +1,6 @@ use std::sync::Arc; -use alloy_primitives::{B256, Bytes}; +use alloy_primitives::B256; use helix_types::{ BidTrace, BlobsBundle, BlsSignatureBytes, ExecutionPayload, ExecutionRequests, SignedBidSubmission, @@ -8,7 +8,10 @@ use helix_types::{ use ssz_derive::{Decode, Encode}; use thiserror::Error; -use crate::{ValidatorPreferences, api::builder_api::InclusionListWithMetadata}; +use crate::{ + ValidatorPreferences, api::builder_api::InclusionListWithMetadata, + decoder::SubmissionDecoderParams, +}; /// Wire format of `signed_bid_submission` in `SimRequest`. #[derive(Debug, Clone, Copy, PartialEq, Eq, Default)] @@ -136,8 +139,8 @@ pub struct SszValidationRequest { pub registered_gas_limit: u64, pub parent_beacon_block_root: B256, pub inclusion_list: InclusionListWithMetadata, - pub format: SubmissionFormat, - pub signed_bid_submission: Bytes, + pub decoder_params: Option, + pub signed_bid_submission: Vec, } // TODO: refactor this in a SignedBidSubmission + extra fields diff --git a/crates/common/src/task.rs b/crates/common/src/task.rs index 76febb4ad..5e48b3341 100644 --- a/crates/common/src/task.rs +++ b/crates/common/src/task.rs @@ -117,8 +117,8 @@ impl Cores { } fn remove(&mut self, thread: ThreadId) { - if let Some(core) = self.by_id.remove(&thread) - && let Some(count) = self.counts.get_mut(&core) + if let Some(core) = self.by_id.remove(&thread) && + let Some(count) = self.counts.get_mut(&core) { *count -= 1 } diff --git a/crates/data-api/src/api.rs b/crates/data-api/src/api.rs index d89930e5e..2b78c9e33 100644 --- a/crates/data-api/src/api.rs +++ b/crates/data-api/src/api.rs @@ -183,10 +183,10 @@ impl DataApi { Extension(cache): Extension, Query(mut params): Query, ) -> Result { - if params.slot.is_none() - && params.block_hash.is_none() - && params.block_number.is_none() - && params.builder_pubkey.is_none() + if params.slot.is_none() && + params.block_hash.is_none() && + params.block_number.is_none() && + params.builder_pubkey.is_none() { return Err(DataApiError::MissingFilter); } @@ -234,10 +234,10 @@ impl DataApi { Extension(cache): Extension, Query(mut params): Query, ) -> Result { - if params.slot.is_none() - && params.block_hash.is_none() - && params.block_number.is_none() - && params.builder_pubkey.is_none() + if params.slot.is_none() && + params.block_hash.is_none() && + params.block_number.is_none() && + params.builder_pubkey.is_none() { return Err(DataApiError::MissingFilter); } diff --git a/crates/data-api/src/error.rs b/crates/data-api/src/error.rs index d7e8ae357..ee9832440 100644 --- a/crates/data-api/src/error.rs +++ b/crates/data-api/src/error.rs @@ -27,13 +27,13 @@ pub enum DataApiError { impl IntoResponse for DataApiError { fn into_response(self) -> Response { let code = match self { - DataApiError::SlotAndCursor - | DataApiError::MissingFilter - | DataApiError::LimitReached { .. } - | DataApiError::ValidatorRegistrationNotFound { .. } - | DataApiError::BlockNumberNotSupported - | DataApiError::OrderByNotSupported - | DataApiError::BuilderPubkeyNotSupported => StatusCode::BAD_REQUEST, + DataApiError::SlotAndCursor | + DataApiError::MissingFilter | + DataApiError::LimitReached { .. } | + DataApiError::ValidatorRegistrationNotFound { .. } | + DataApiError::BlockNumberNotSupported | + DataApiError::OrderByNotSupported | + DataApiError::BuilderPubkeyNotSupported => StatusCode::BAD_REQUEST, DataApiError::InternalServerError => StatusCode::INTERNAL_SERVER_ERROR, }; diff --git a/crates/data-api/src/stats.rs b/crates/data-api/src/stats.rs index 5d096383c..4ec8ad679 100644 --- a/crates/data-api/src/stats.rs +++ b/crates/data-api/src/stats.rs @@ -155,12 +155,12 @@ impl Expiry> for _val: &Vec, _now: Instant, ) -> Option { - if key.slot.is_none() - && key.cursor.is_none() - && key.block_hash.is_none() - && key.block_number.is_none() - && key.proposer_pubkey.is_none() - && key.builder_pubkey.is_none() + if key.slot.is_none() && + key.cursor.is_none() && + key.block_hash.is_none() && + key.block_number.is_none() && + key.proposer_pubkey.is_none() && + key.builder_pubkey.is_none() { Some(Duration::from_secs(12)) } else { diff --git a/crates/database/src/lib.rs b/crates/database/src/lib.rs index 66c5363ee..0cd1b58db 100644 --- a/crates/database/src/lib.rs +++ b/crates/database/src/lib.rs @@ -99,8 +99,8 @@ async fn load_known_validators_with_snapshot( cache: &local_cache::LocalCache, snapshot_dir: Option<&std::path::Path>, ) { - if let Some(dir) = snapshot_dir - && let Some(set) = snapshot::try_load_known_validators(dir).await + if let Some(dir) = snapshot_dir && + let Some(set) = snapshot::try_load_known_validators(dir).await { info!(count = set.len(), "using known_validators snapshot"); *cache.known_validators_cache.write() = set; @@ -120,8 +120,8 @@ async fn load_validator_registrations_with_snapshot( cache: &local_cache::LocalCache, snapshot_dir: Option<&std::path::Path>, ) { - if let Some(dir) = snapshot_dir - && let Some(entries) = snapshot::try_load_validator_registrations(dir).await + if let Some(dir) = snapshot_dir && + let Some(entries) = snapshot::try_load_validator_registrations(dir).await { let count = entries.len(); info!(count, "using validator_registrations snapshot"); diff --git a/crates/relay/src/api/builder/api.rs b/crates/relay/src/api/builder/api.rs index c17b1ace0..d46c1828b 100644 --- a/crates/relay/src/api/builder/api.rs +++ b/crates/relay/src/api/builder/api.rs @@ -13,8 +13,6 @@ use crate::{ spine::messages::NewBidSubmission, }; -pub(crate) const MAX_PAYLOAD_LENGTH: usize = 1024 * 1024 * 20; // 20MB - pub struct BuilderApi { pub local_cache: Arc, pub db: DbHandle, diff --git a/crates/relay/src/api/builder/error.rs b/crates/relay/src/api/builder/error.rs index af5ebc870..27fcbaa80 100644 --- a/crates/relay/src/api/builder/error.rs +++ b/crates/relay/src/api/builder/error.rs @@ -1,5 +1,5 @@ use axum::response::{IntoResponse, Response}; -use helix_common::{local_cache::AuctioneerError, simulator::BlockSimError}; +use helix_common::{decoder::DecoderError, local_cache::AuctioneerError, simulator::BlockSimError}; use helix_database::error::DatabaseError; use helix_types::{BlockValidationError, BlsPublicKeyBytes, HydrationError, SigError}; use http::StatusCode; @@ -18,7 +18,7 @@ pub enum BuilderApiError { IOError(#[from] std::io::Error), #[error("failed to decode payload")] - PayloadDecode, + PayloadDecode(#[from] DecoderError), #[error("block validation: {0}")] BidValidation(#[from] BlockValidationError), @@ -83,25 +83,25 @@ impl IntoResponse for &BuilderApiError { impl BuilderApiError { pub fn http_status(&self) -> StatusCode { match self { - BuilderApiError::JsonDecodeError(_) - | BuilderApiError::IOError(_) - | BuilderApiError::SszDecode(_) - | BuilderApiError::PayloadDecode - | BuilderApiError::BidValidation(_) - | BuilderApiError::ProposerDutyNotFound - | BuilderApiError::HydrationError(_) - | BuilderApiError::SigError(_) - | BuilderApiError::SimOnNextSlot - | BuilderApiError::MergeableOrdersNotFound(_) - | BuilderApiError::InvalidBuilderPubkey(_, _) - | BuilderApiError::DeliveringPayload { .. } => StatusCode::BAD_REQUEST, - - BuilderApiError::InvalidApiKey - | BuilderApiError::UntrustedBuilderOnDehydratedPayload => StatusCode::UNAUTHORIZED, - - BuilderApiError::InternalError - | BuilderApiError::AuctioneerError(_) - | BuilderApiError::DatabaseError(_) => StatusCode::INTERNAL_SERVER_ERROR, + BuilderApiError::JsonDecodeError(_) | + BuilderApiError::IOError(_) | + BuilderApiError::SszDecode(_) | + BuilderApiError::PayloadDecode(_) | + BuilderApiError::BidValidation(_) | + BuilderApiError::ProposerDutyNotFound | + BuilderApiError::HydrationError(_) | + BuilderApiError::SigError(_) | + BuilderApiError::SimOnNextSlot | + BuilderApiError::MergeableOrdersNotFound(_) | + BuilderApiError::InvalidBuilderPubkey(_, _) | + BuilderApiError::DeliveringPayload { .. } => StatusCode::BAD_REQUEST, + + BuilderApiError::InvalidApiKey | + BuilderApiError::UntrustedBuilderOnDehydratedPayload => StatusCode::UNAUTHORIZED, + + BuilderApiError::InternalError | + BuilderApiError::AuctioneerError(_) | + BuilderApiError::DatabaseError(_) => StatusCode::INTERNAL_SERVER_ERROR, BuilderApiError::BlockSimulation(err) => match err { BlockSimError::Timeout | BlockSimError::SimulationDropped => { @@ -121,13 +121,13 @@ impl BuilderApiError { #[allow(clippy::match_like_matches_macro)] pub fn should_report(&self) -> bool { match self { - Self::DeliveringPayload { .. } - | Self::ProposerDutyNotFound - | Self::BidValidation(BlockValidationError::OutOfSequence { .. }) - | Self::BidValidation(BlockValidationError::AlreadyProcessingNewerPayload) - | Self::BidValidation(BlockValidationError::SubmissionForWrongSlot { .. }) - | Self::BidValidation(BlockValidationError::PrevRandaoMismatch { .. }) - | Self::SimOnNextSlot => false, + Self::DeliveringPayload { .. } | + Self::ProposerDutyNotFound | + Self::BidValidation(BlockValidationError::OutOfSequence { .. }) | + Self::BidValidation(BlockValidationError::AlreadyProcessingNewerPayload) | + Self::BidValidation(BlockValidationError::SubmissionForWrongSlot { .. }) | + Self::BidValidation(BlockValidationError::PrevRandaoMismatch { .. }) | + Self::SimOnNextSlot => false, _ => true, } diff --git a/crates/relay/src/api/builder/submit_block.rs b/crates/relay/src/api/builder/submit_block.rs index 6aff6cca5..af4990ea6 100644 --- a/crates/relay/src/api/builder/submit_block.rs +++ b/crates/relay/src/api/builder/submit_block.rs @@ -100,8 +100,8 @@ impl BuilderApi { } fn observe_client_to_server_latency(headers: &HeaderMap, receive_ns: u64) { - if let Some(send_ts) = headers.get(HEADER_SEND_TS) - && let Some(send_ts) = send_ts.to_str().ok().and_then(Nanos::from_rfc3339) + if let Some(send_ts) = headers.get(HEADER_SEND_TS) && + let Some(send_ts) = send_ts.to_str().ok().and_then(Nanos::from_rfc3339) { SUB_CLIENT_TO_SERVER_LATENCY .with_label_values(&["http"]) diff --git a/crates/relay/src/api/middleware/tracking.rs b/crates/relay/src/api/middleware/tracking.rs index 63c137237..4fcc62136 100644 --- a/crates/relay/src/api/middleware/tracking.rs +++ b/crates/relay/src/api/middleware/tracking.rs @@ -13,14 +13,15 @@ use axum::{ response::{IntoResponse, Response}, }; use bytes::Bytes; -use helix_common::{BodyTimings, RequestTimings, metrics::ApiMetrics, utils::utcnow_ns}; +use helix_common::{ + BodyTimings, RequestTimings, api::builder_api::MAX_PAYLOAD_LENGTH, metrics::ApiMetrics, + utils::utcnow_ns, +}; use http::header::CONTENT_LENGTH; use http_body::{Body as HttpBody, Frame}; use http_body_util::Limited; use pin_project_lite::pin_project; -use crate::api::builder::api::MAX_PAYLOAD_LENGTH; - pin_project! { /// Timing wrapper that tracks latencies /// - Time spent waiting for sender (pending) @@ -140,8 +141,8 @@ async fn do_request(mut req: Request, next: Next, stats: Arc) .headers() .get(CONTENT_LENGTH) .and_then(|h| h.to_str().ok()) - .and_then(|s| s.parse::().ok()) - && len > MAX_PAYLOAD_LENGTH + .and_then(|s| s.parse::().ok()) && + len > MAX_PAYLOAD_LENGTH { return StatusCode::PAYLOAD_TOO_LARGE.into_response(); } diff --git a/crates/relay/src/api/mod.rs b/crates/relay/src/api/mod.rs index ee8e84ca3..6533108f8 100644 --- a/crates/relay/src/api/mod.rs +++ b/crates/relay/src/api/mod.rs @@ -33,6 +33,5 @@ pub const HEADER_API_TOKEN: &str = "x-api-token"; pub const HEADER_SEQUENCE: &str = "x-sequence"; pub const HEADER_HYDRATE: &str = "x-hydrate"; pub const HEADER_IS_MERGEABLE: &str = "x-mergeable"; -pub const HEADER_SUBMISSION_TYPE: &str = "x-submission-type"; pub const HEADER_MERGE_TYPE: &str = "x-merge-type"; pub const HEADER_WITH_ADJUSTMENTS: &str = "x-with-adjustments"; diff --git a/crates/relay/src/api/proposer/error.rs b/crates/relay/src/api/proposer/error.rs index 48615db78..d70087228 100644 --- a/crates/relay/src/api/proposer/error.rs +++ b/crates/relay/src/api/proposer/error.rs @@ -134,43 +134,43 @@ impl IntoResponse for ProposerApiError { fn into_response(self) -> Response { let code = match self { - ProposerApiError::NoBidPrepared - | ProposerApiError::GetHeaderRequestTooLate { .. } => StatusCode::NO_CONTENT, - - ProposerApiError::HyperError(_) - | ProposerApiError::AxumError(_) - | ProposerApiError::ToStrError(_) - | ProposerApiError::UnexpectedProposerIndex { .. } - | ProposerApiError::NoValidatorsCouldBeRegistered - | ProposerApiError::InvalidFork - | ProposerApiError::SerdeDecodeError(_) - | ProposerApiError::ProposerNotRegistered - | ProposerApiError::TimestampTooEarly { .. } - | ProposerApiError::TimestampTooFarInTheFuture { .. } - | ProposerApiError::RequestWrongSlot { .. } - | ProposerApiError::SlotTooNew - | ProposerApiError::GetPayloadRequestTooLate { .. } - | ProposerApiError::BlindedBlockAndPayloadHeaderMismatch - | ProposerApiError::UnsupportedBeaconChainVersion - | ProposerApiError::BeaconClientError(_) - | ProposerApiError::DatabaseError(_) - | ProposerApiError::AuctioneerError(_) - | ProposerApiError::EmptyRequest - | ProposerApiError::BlindedBlobsBundleLengthMismatch - | ProposerApiError::InternalSlotMismatchesWithSlotDuty { .. } - | ProposerApiError::InvalidBlindedBlockSlot { .. } - | ProposerApiError::BlobKzgCommitmentsMismatch - | ProposerApiError::SszError(_) - | ProposerApiError::SszDecodeError(_) - | ProposerApiError::SigError(_) - | ProposerApiError::DeliveringPayload - | ProposerApiError::GetPayloadAlreadyReceived - | ProposerApiError::RequestForPastSlot { .. } => StatusCode::BAD_REQUEST, + ProposerApiError::NoBidPrepared | + ProposerApiError::GetHeaderRequestTooLate { .. } => StatusCode::NO_CONTENT, + + ProposerApiError::HyperError(_) | + ProposerApiError::AxumError(_) | + ProposerApiError::ToStrError(_) | + ProposerApiError::UnexpectedProposerIndex { .. } | + ProposerApiError::NoValidatorsCouldBeRegistered | + ProposerApiError::InvalidFork | + ProposerApiError::SerdeDecodeError(_) | + ProposerApiError::ProposerNotRegistered | + ProposerApiError::TimestampTooEarly { .. } | + ProposerApiError::TimestampTooFarInTheFuture { .. } | + ProposerApiError::RequestWrongSlot { .. } | + ProposerApiError::SlotTooNew | + ProposerApiError::GetPayloadRequestTooLate { .. } | + ProposerApiError::BlindedBlockAndPayloadHeaderMismatch | + ProposerApiError::UnsupportedBeaconChainVersion | + ProposerApiError::BeaconClientError(_) | + ProposerApiError::DatabaseError(_) | + ProposerApiError::AuctioneerError(_) | + ProposerApiError::EmptyRequest | + ProposerApiError::BlindedBlobsBundleLengthMismatch | + ProposerApiError::InternalSlotMismatchesWithSlotDuty { .. } | + ProposerApiError::InvalidBlindedBlockSlot { .. } | + ProposerApiError::BlobKzgCommitmentsMismatch | + ProposerApiError::SszError(_) | + ProposerApiError::SszDecodeError(_) | + ProposerApiError::SigError(_) | + ProposerApiError::DeliveringPayload | + ProposerApiError::GetPayloadAlreadyReceived | + ProposerApiError::RequestForPastSlot { .. } => StatusCode::BAD_REQUEST, ProposerApiError::InvalidApiKey => StatusCode::UNAUTHORIZED, - ProposerApiError::InternalServerError - | ProposerApiError::NoExecutionPayloadFound => StatusCode::INTERNAL_SERVER_ERROR, + ProposerApiError::InternalServerError | + ProposerApiError::NoExecutionPayloadFound => StatusCode::INTERNAL_SERVER_ERROR, ProposerApiError::ServiceUnavailableError => StatusCode::SERVICE_UNAVAILABLE, diff --git a/crates/relay/src/api/proposer/get_header.rs b/crates/relay/src/api/proposer/get_header.rs index 0501f42ef..2cfd33964 100644 --- a/crates/relay/src/api/proposer/get_header.rs +++ b/crates/relay/src/api/proposer/get_header.rs @@ -9,6 +9,7 @@ use helix_common::{ api::proposer_api::GetHeaderParams, api_provider::{ApiProvider, TimingResult}, chain_info::ChainInfo, + decoder::{Encoding, HEADER_SSZ}, metrics::{BID_SIGNING_LATENCY, HEADER_TIMEOUT_FETCH, HEADER_TIMEOUT_SLEEP}, signing::RelaySigningContext, spawn_tracked, @@ -20,15 +21,10 @@ use ssz::Encode; use tracing::{Instrument, debug, error, info, trace, warn}; use super::ProposerApi; -use crate::{ - api::{ - Api, - proposer::{ - CONSENSUS_VERSION_HEADER, GET_HEADER_REQUEST_CUTOFF_MS, error::ProposerApiError, - }, - router::Terminating, - }, - bid_decoder::{Encoding, HEADER_SSZ}, +use crate::api::{ + Api, + proposer::{CONSENSUS_VERSION_HEADER, GET_HEADER_REQUEST_CUTOFF_MS, error::ProposerApiError}, + router::Terminating, }; impl ProposerApi { diff --git a/crates/relay/src/api/proposer/get_payload.rs b/crates/relay/src/api/proposer/get_payload.rs index 1a5324d6d..afe3a988d 100644 --- a/crates/relay/src/api/proposer/get_payload.rs +++ b/crates/relay/src/api/proposer/get_payload.rs @@ -6,6 +6,7 @@ use helix_common::{ Filtering, GetPayloadTrace, RequestTimings, api_provider::ApiProvider, chain_info::ChainInfo, + decoder::{Encoding, HEADER_SSZ}, spawn_tracked, utils::{extract_request_id, utcnow_ns}, }; @@ -28,7 +29,6 @@ use crate::{ }, auctioneer::{GetPayloadKind, GetPayloadResultData, PayloadBidData}, beacon::types::BroadcastValidation, - bid_decoder::{Encoding, HEADER_SSZ}, gossip::{BroadcastGetPayloadParams, BroadcastPayloadParams}, }; @@ -454,8 +454,8 @@ impl ProposerApi { if let Some(until_slot_start) = until_slot_start { info!("waiting until slot start t=0: {} ms", until_slot_start.as_millis()); sleep(until_slot_start).await; - } else if let Some(since_slot_start) = since_slot_start - && since_slot_start.as_millis() > GET_PAYLOAD_REQUEST_CUTOFF_MS as u128 + } else if let Some(since_slot_start) = since_slot_start && + since_slot_start.as_millis() > GET_PAYLOAD_REQUEST_CUTOFF_MS as u128 { return Err(ProposerApiError::GetPayloadRequestTooLate { cutoff: GET_PAYLOAD_REQUEST_CUTOFF_MS as u64, diff --git a/crates/relay/src/auctioneer/bid_sorter.rs b/crates/relay/src/auctioneer/bid_sorter.rs index ea7ecc750..b522fc531 100644 --- a/crates/relay/src/auctioneer/bid_sorter.rs +++ b/crates/relay/src/auctioneer/bid_sorter.rs @@ -286,8 +286,8 @@ impl BidSorter { continue; } - if let Some((curr, _)) = &state.curr_bid - && *curr == demoted + if let Some((curr, _)) = &state.curr_bid && + *curr == demoted { state.traverse_update_top_bid(self.curr_bid_slot, None, false, &self.top_bid_tx); } diff --git a/crates/relay/src/auctioneer/block_merger.rs b/crates/relay/src/auctioneer/block_merger.rs index 2968b689a..4942bb971 100644 --- a/crates/relay/src/auctioneer/block_merger.rs +++ b/crates/relay/src/auctioneer/block_merger.rs @@ -360,8 +360,8 @@ impl BlockMerger { original_tx_count: original_payload.execution_payload.transactions.len(), merged_tx_count: response.execution_payload.transactions.len(), original_blob_count: original_payload.blobs_bundle.blobs.len(), - merged_blob_count: original_payload.blobs_bundle.blobs.len() - + response.appended_blobs.len(), + merged_blob_count: original_payload.blobs_bundle.blobs.len() + + response.appended_blobs.len(), builder_inclusions: response.builder_inclusions, trace, }); @@ -411,13 +411,13 @@ impl BlockMerger { fn should_request_merge(&self) -> bool { let start_time = Instant::now(); - let has_new_data = self.best_mergeable_orders.has_new_orders() - || (self.best_mergeable_orders.has_orders() && self.has_new_base_block); + let has_new_data = self.best_mergeable_orders.has_new_orders() || + (self.best_mergeable_orders.has_orders() && self.has_new_base_block); if !has_new_data { return false; } - let res = utcnow_ms().saturating_sub(self.last_merge_request_time_ms) - >= MERGE_REQUEST_INTERVAL_MS; + let res = utcnow_ms().saturating_sub(self.last_merge_request_time_ms) >= + MERGE_REQUEST_INTERVAL_MS; record_step("should_request_merge", start_time.elapsed()); res } @@ -679,14 +679,11 @@ fn blobs_bundle_to_hashmap( .into_iter() .zip(bundle.iter_blobs()) .map(|(versioned_hash, (blob, commitment, proofs))| { - ( - versioned_hash, - BlobWithMetadata { - commitment: *commitment, - proofs: proofs.to_vec(), - blob: blob.clone(), - }, - ) + (versioned_hash, BlobWithMetadata { + commitment: *commitment, + proofs: proofs.to_vec(), + blob: blob.clone(), + }) }) .collect() } diff --git a/crates/relay/src/auctioneer/context.rs b/crates/relay/src/auctioneer/context.rs index df4ad0366..18a972ac5 100644 --- a/crates/relay/src/auctioneer/context.rs +++ b/crates/relay/src/auctioneer/context.rs @@ -30,7 +30,7 @@ use crate::{ block_merger::BlockMerger, types::{PayloadEntry, PendingPayload, SubmissionRef}, }, - simulator::{BlockMergeResponse, SimRequest, tile::ValidationResult}, + simulator::{SimRequest, tile::ValidationResult}, spine::{ HelixSpineProducers, messages::{SubmissionResultWithRef, ToSimKind, ToSimMsg}, diff --git a/crates/relay/src/auctioneer/get_payload.rs b/crates/relay/src/auctioneer/get_payload.rs index 1132c0774..ac3d4b1da 100644 --- a/crates/relay/src/auctioneer/get_payload.rs +++ b/crates/relay/src/auctioneer/get_payload.rs @@ -153,13 +153,13 @@ impl Context { slot_data: &SlotData, ) -> Result<(GetPayloadResponse, VersionedSignedProposal), ProposerApiError> { match blinded { - SignedBlindedBeaconBlock::Altair(_) - | SignedBlindedBeaconBlock::Base(_) - | SignedBlindedBeaconBlock::Bellatrix(_) - | SignedBlindedBeaconBlock::Capella(_) - | SignedBlindedBeaconBlock::Deneb(_) - | SignedBlindedBeaconBlock::Electra(_) - | SignedBlindedBeaconBlock::Gloas(_) => { + SignedBlindedBeaconBlock::Altair(_) | + SignedBlindedBeaconBlock::Base(_) | + SignedBlindedBeaconBlock::Bellatrix(_) | + SignedBlindedBeaconBlock::Capella(_) | + SignedBlindedBeaconBlock::Deneb(_) | + SignedBlindedBeaconBlock::Electra(_) | + SignedBlindedBeaconBlock::Gloas(_) => { Err(ProposerApiError::UnsupportedBeaconChainVersion) } SignedBlindedBeaconBlock::Fulu(blinded_block) => { diff --git a/crates/relay/src/auctioneer/mod.rs b/crates/relay/src/auctioneer/mod.rs index add363f2c..3582c2bae 100644 --- a/crates/relay/src/auctioneer/mod.rs +++ b/crates/relay/src/auctioneer/mod.rs @@ -34,21 +34,11 @@ use helix_types::Slot; use rustc_hash::FxHashMap; use tracing::{debug, error, info, trace, warn}; pub use types::{ - Event, GetPayloadResultData, PayloadBidData, PayloadEntry, SlotData, Submission, - SubmissionData, SubmissionPayload, + Event, GetPayloadResultData, PayloadBidData, PayloadEntry, SlotData, SubmissionData, + SubmissionPayload, }; pub use worker::RegWorker; -pub use crate::auctioneer::{ - bid_adjustor::{BidAdjustor, DefaultBidAdjustor}, - bid_sorter::BidSorter, - block_merger::get_mergeable_orders, - context::{Context, send_submission_result}, - simulator::{ - BlockSimRequest, SimulatorRequest, client::SimulatorClient, manager::SimulatorManager, - }, - types::{InternalBidSubmissionHeader, SubmissionRef}, -}; use crate::{ HelixSpine, SubmissionDataWithSpan, api::{FutureBidSubmissionResult, builder::error::BuilderApiError, proposer::ProposerApiError}, @@ -66,7 +56,7 @@ pub use crate::{ bid_sorter::BidSorter, block_merger::get_mergeable_orders, context::{Context, send_submission_result}, - types::{InternalBidSubmission, InternalBidSubmissionHeader, SubmissionRef}, + types::{InternalBidSubmissionHeader, SubmissionRef}, }, simulator::{SimulatorTile, ValidationRequest, client::SimulatorClient, *}, }; @@ -77,7 +67,7 @@ pub struct Auctioneer { tel: Telemetry, event_rx: crossbeam_channel::Receiver, decoded: Arc>, - sim_outbound: Arc>, + sim_results: Arc>, } impl Auctioneer { @@ -94,15 +84,15 @@ impl Auctioneer { future_results: Arc>, decoded: Arc>, auctioneer_handle: AuctioneerHandle, - sim_inbound: Arc>, - sim_outbound: Arc>, + sim_requests: Arc>, + sim_results: Arc>, accept_optimistic: Arc, failsafe_triggered: Arc, ) -> Self { let ctx = Context::new( chain_info, config, - sim_inbound, + sim_requests, accept_optimistic, failsafe_triggered, db, @@ -118,7 +108,7 @@ impl Auctioneer { tel: Telemetry::new(format!("auctioneer_{id}")), event_rx, decoded, - sim_outbound, + sim_results, } } } @@ -142,7 +132,7 @@ impl Tile for Auctioneer { }); adapter.consume(|msg: FromSimMsg, producers| { - let Some(payload) = self.sim_outbound.get(msg.ix) else { + let Some(payload) = self.sim_results.get(msg.ix) else { tracing::error!(?msg, "sim outbound payload not found"); return; }; @@ -272,8 +262,8 @@ impl State { Ordering::Less => (), Ordering::Equal => { // check fork - if let Some(update) = payload_attributes - && !slot_data.payload_attributes_map.contains_key(&update.parent_hash) + if let Some(update) = payload_attributes && + !slot_data.payload_attributes_map.contains_key(&update.parent_hash) { info!(bid_slot =% slot_data.bid_slot, received =? update.parent_hash, sorting =? slot_data.payload_attributes_map.keys(), "sorting for an additional fork"); @@ -326,8 +316,8 @@ impl State { "gap in slot data received (broadcast)" ); - if let Some(attributes) = &payload_attributes - && &attributes.parent_hash != block_hash + if let Some(attributes) = &payload_attributes && + &attributes.parent_hash != block_hash { warn!( maybe_missed_slot =% slot_data.bid_slot, @@ -367,7 +357,12 @@ impl State { let _guard = submission_data.span.enter(); trace!("received in auctioneer"); - ctx.handle_submission(&submission_data.submission_data, slot_data, producers); + ctx.handle_submission( + submission_data.original_data_ref, + &submission_data.submission_data, + slot_data, + producers, + ); trace!("finished processing"); drop(_guard); @@ -391,8 +386,8 @@ impl State { // proposer is on a different fork warn!(req =% params.parent_hash, have =? slot_data.payload_attributes_map.keys(), "get header for unknown parent hash"); let _ = res_tx.send(Err(ProposerApiError::NoBidPrepared)); - } else if slot_data.registration_data.entry.registration.message.pubkey - != params.pubkey + } else if slot_data.registration_data.entry.registration.message.pubkey != + params.pubkey { warn!(req =% params.pubkey, this =% slot_data.registration_data.entry.registration.message.pubkey, "get header for mismatched proposer"); let _ = res_tx.send(Err(ProposerApiError::NoBidPrepared)); @@ -441,8 +436,8 @@ impl State { // sim result (State::Sorting(slot_data), Event::SimResult(mut result)) => { let already_sent = - result.1.as_ref().is_some_and(|r| r.submission.slot() == slot_data.bid_slot) - && ctx.sort_simulation_result(&mut result, producers); + result.1.as_ref().is_some_and(|r| r.submission.slot() == slot_data.bid_slot) && + ctx.sort_simulation_result(&mut result, producers); ctx.handle_simulation_result(result, already_sent, producers); } @@ -507,9 +502,9 @@ impl State { // gossiped payload, proposer equivocating? (State::Broadcasting { block_hash, slot_data }, Event::GossipPayload(payload)) => { - if *block_hash == payload.execution_payload.execution_payload.block_hash - && slot_data.bid_slot == payload.slot - && slot_data.proposer_pubkey() == &payload.proposer_pub_key + if *block_hash == payload.execution_payload.execution_payload.block_hash && + slot_data.bid_slot == payload.slot && + slot_data.proposer_pubkey() == &payload.proposer_pub_key { debug!("already broadcasting gossip payload"); } else { diff --git a/crates/relay/src/auctioneer/submit_block.rs b/crates/relay/src/auctioneer/submit_block.rs index c4a2bfe3b..ad2bb7100 100644 --- a/crates/relay/src/auctioneer/submit_block.rs +++ b/crates/relay/src/auctioneer/submit_block.rs @@ -1,19 +1,19 @@ use std::sync::atomic::Ordering; use alloy_primitives::{Address, B256, U256}; -use flux::timing::Nanos; -use flux::spine::SpineProducers; +use flux::{spine::SpineProducers, timing::Nanos}; +use flux_utils::DCacheRef; use helix_common::{ self, BuilderInfo, SubmissionTrace, api::builder_api::InclusionListWithMetadata, bid_submission::OptimisticVersion, + decoder::SubmissionDecoderParams, metrics::{BID_ADJUSTMENT_LATENCY, HYDRATION_CACHE_HITS}, record_submission_step, - simulator::{SszValidationRequest, SubmissionFormat}, }; use helix_types::{ BidAdjustmentData, BlockValidationError, MergeableOrdersWithPref, SignedBidSubmission, - SubmissionVersion, + Submission, SubmissionVersion, }; use tracing::trace; @@ -22,7 +22,7 @@ use crate::{ auctioneer::{ bid_adjustor::BidAdjustor, context::{Context, send_submission_result}, - types::{PayloadEntry, SlotData, Submission, SubmissionData, SubmissionRef}, + types::{PayloadEntry, SlotData, SubmissionData, SubmissionRef}, }, housekeeper::PayloadAttributesUpdate, simulator::{SimRequest, ValidationRequest, tile::ValidationResult}, @@ -35,6 +35,7 @@ use crate::{ impl Context { pub(super) fn handle_submission( &mut self, + original_data_ref: DCacheRef, submission_data: &SubmissionData, slot_data: &SlotData, producers: &mut HelixSpineProducers, @@ -47,12 +48,16 @@ impl Context { send_submission_result(producers, &self.future_results, submission_ref, Ok(())); }; - let (req, entry) = - self.prep_data_to_store_and_sim(validated, slot_data, is_optimistic); + let (req, entry) = self.prep_data_to_store_and_sim( + original_data_ref, + validated, + slot_data, + is_optimistic, + ); - if !self.completed_dry_run - && entry.is_adjustable() - && self.cache.adjustments_enabled.load(Ordering::Relaxed) + if !self.completed_dry_run && + entry.is_adjustable() && + self.cache.adjustments_enabled.load(Ordering::Relaxed) { let start = Nanos::now(); if let Some((adjusted_block, sim_request, _, strategy)) = @@ -70,8 +75,8 @@ impl Context { self.store_data_and_sim(req, entry, false, producers); - if self.config.block_merging_config.is_enabled - && let Some(data) = merging_data + if self.config.block_merging_config.is_enabled && + let Some(data) = merging_data { let base_block = data.block_hash; let is_top_bid = data.is_top_bid; @@ -200,9 +205,9 @@ impl Context { record_submission_step("validated", start_val.elapsed()); trace!("validated"); - let (optimistic_version, is_top_bid) = if self.accept_optimistic.load(Ordering::Relaxed) - && !self.failsafe_triggered.load(Ordering::Relaxed) - && self.should_process_optimistically(&submission, &builder_info, slot_data) + let (optimistic_version, is_top_bid) = if self.accept_optimistic.load(Ordering::Relaxed) && + !self.failsafe_triggered.load(Ordering::Relaxed) && + self.should_process_optimistically(&submission, &builder_info, slot_data) { let is_top_bid = self.bid_sorter.sort( submission_data.version, @@ -235,7 +240,7 @@ impl Context { is_top_bid, trace: submission_data.trace, bid_adjustment_data: submission_data.bid_adjustment_data, - sim_bytes: submission_data.sim_bytes, + decoder_params: submission_data.decoder_params, }; Ok((validated, optimistic_version, merging_data)) @@ -243,39 +248,33 @@ impl Context { fn prep_data_to_store_and_sim( &mut self, + original_data_ref: DCacheRef, validated: ValidatedData, slot_data: &SlotData, is_optimistic: bool, ) -> (ValidationRequest, PayloadEntry) { - let request = SszValidationRequest { - registered_gas_limit: slot_data.registration_data.entry.registration.message.gas_limit, + let req = ValidationRequest { + is_top_bid: validated.is_top_bid, + is_optimistic, apply_blacklist: slot_data.registration_data.entry.preferences.filtering.is_regional(), + bid_slot: validated.submission.slot().as_u64(), + registered_gas_limit: slot_data.registration_data.entry.registration.message.gas_limit, + builder_pubkey: *validated.submission.builder_public_key(), parent_beacon_block_root: validated .payload_attributes .parent_beacon_block_root .unwrap_or_default(), + submission_ref: validated.submission_ref, + version: validated.version, + tx_root: validated.tx_root, inclusion_list: slot_data .il .clone() .unwrap_or(InclusionListWithMetadata { txs: vec![] }), - format: validated.sim_bytes.as_ref().map(|(_, f)| *f).unwrap_or_default(), - signed_bid_submission: match validated.sim_bytes { - Some((bytes, _)) => alloy_primitives::Bytes(bytes), - None => ssz::Encode::as_ssz_bytes(&validated.submission).into(), - }, - }; - - let req = ValidationRequest { - is_optimistic, - submission_ref: validated.submission_ref, - request, - builder_pubkey: *validated.submission.builder_public_key(), - bid_slot: validated.submission.slot().as_u64(), - is_top_bid: validated.is_top_bid, - submission: validated.submission.clone(), trace: validated.trace, - tx_root: validated.tx_root, - version: validated.version, + submission: validated.submission.clone(), + original_data_ref, + decoder_params: validated.decoder_params, }; let entry = PayloadEntry::new_submission( @@ -326,8 +325,8 @@ impl Context { } if builder_info.is_optimistic && submission.message().value <= builder_info.collateral { - if slot_data.registration_data.entry.preferences.filtering.is_regional() - && !builder_info.can_process_regional_slot_optimistically() + if slot_data.registration_data.entry.preferences.filtering.is_regional() && + !builder_info.can_process_regional_slot_optimistically() { return false; } @@ -355,7 +354,7 @@ pub struct ValidatedData<'a> { pub is_top_bid: bool, pub trace: SubmissionTrace, pub bid_adjustment_data: Option, - pub sim_bytes: Option<(bytes::Bytes, SubmissionFormat)>, + pub decoder_params: SubmissionDecoderParams, } pub struct MergeData { diff --git a/crates/relay/src/auctioneer/types.rs b/crates/relay/src/auctioneer/types.rs index 1240668b8..ffa1e0118 100644 --- a/crates/relay/src/auctioneer/types.rs +++ b/crates/relay/src/auctioneer/types.rs @@ -13,16 +13,15 @@ use helix_common::{ builder_api::{BuilderGetValidatorsResponseEntry, InclusionListWithMetadata}, proposer_api::GetHeaderParams, }, + decoder::{Encoding, SubmissionDecoderParams, SubmissionType}, metrics::BID_CREATION_LATENCY, - simulator::SubmissionFormat, }; use helix_tcp_types::{BidSubmissionFlags, BidSubmissionHeader}; use helix_types::{ - BidAdjustmentData, BlsPublicKeyBytes, BuilderBid, Compression, DehydratedBidSubmission, - ExecutionPayload, ExecutionRequests, ForkName, GetPayloadResponse, MergeType, - MergeableOrdersWithPref, PayloadAndBlobs, SignedBidSubmission, SignedBlindedBeaconBlock, - SignedValidatorRegistration, Slot, SubmissionVersion, VersionedSignedProposal, - mock_public_key_bytes, + BidAdjustmentData, BlsPublicKeyBytes, BuilderBid, Compression, ExecutionPayload, + ExecutionRequests, ForkName, GetPayloadResponse, MergeType, MergeableOrdersWithPref, + PayloadAndBlobs, SignedBidSubmission, SignedBlindedBeaconBlock, SignedValidatorRegistration, + Slot, Submission, SubmissionVersion, VersionedSignedProposal, mock_public_key_bytes, }; use http::{ HeaderMap, HeaderValue, @@ -42,7 +41,6 @@ use crate::{ HEADER_SEQUENCE, HEADER_WITH_ADJUSTMENTS, proposer::ProposerApiError, }, auctioneer::MergeResult, - bid_decoder::{Encoding, SubmissionType}, gossip::BroadcastPayloadParams, housekeeper::PayloadAttributesUpdate, simulator::tile::ValidationResult, @@ -122,8 +120,8 @@ impl InternalBidSubmissionHeader { ) -> MergeType { match header_map.get(HEADER_MERGE_TYPE) { None => { - if sub_type.is_some_and(|sub_type| sub_type == SubmissionType::Merge) - || matches!(header_map.get(HEADER_IS_MERGEABLE), Some(header) if header == HeaderValue::from_static("true")) + if sub_type.is_some_and(|sub_type| sub_type == SubmissionType::Merge) || + matches!(header_map.get(HEADER_IS_MERGEABLE), Some(header) if header == HeaderValue::from_static("true")) { MergeType::Mergeable } else { @@ -202,9 +200,7 @@ pub struct SubmissionData { pub version: SubmissionVersion, pub withdrawals_root: B256, pub trace: SubmissionTrace, - /// Decompressed bytes to forward verbatim to the simulator, avoiding re-encoding. - /// `None` means the auctioneer must encode from `submission`. - pub sim_bytes: Option<(bytes::Bytes, SubmissionFormat)>, + pub decoder_params: SubmissionDecoderParams, } impl Deref for SubmissionData { @@ -215,52 +211,6 @@ impl Deref for SubmissionData { } } -#[allow(clippy::large_enum_variant)] -#[derive(Clone, Debug)] -pub enum Submission { - // received after sigverify - Full(SignedBidSubmission), - // need to validate do the validate_payload_ssz_lengths - Dehydrated(DehydratedBidSubmission), -} - -impl Submission { - pub fn bid_slot(&self) -> u64 { - match self { - Submission::Full(s) => s.slot().as_u64(), - Submission::Dehydrated(s) => s.slot(), - } - } - - pub fn builder_pubkey(&self) -> &BlsPublicKeyBytes { - match self { - Submission::Full(s) => &s.message().builder_pubkey, - Submission::Dehydrated(s) => s.builder_pubkey(), - } - } - - pub fn block_hash(&self) -> &B256 { - match self { - Submission::Full(s) => &s.message().block_hash, - Submission::Dehydrated(s) => s.block_hash(), - } - } - - pub fn withdrawal_root(&self) -> B256 { - match self { - Submission::Full(s) => s.withdrawals_root(), - Submission::Dehydrated(s) => s.withdrawal_root(), - } - } - - pub fn parent_hash(&self) -> &B256 { - match self { - Submission::Full(s) => s.parent_hash(), - Submission::Dehydrated(s) => s.parent_hash(), - } - } -} - #[derive(Clone)] pub struct GossipPayload { pub payload_and_blobs: PayloadAndBlobs, diff --git a/crates/relay/src/auctioneer/worker.rs b/crates/relay/src/auctioneer/worker.rs index 601e0475c..c566116ce 100644 --- a/crates/relay/src/auctioneer/worker.rs +++ b/crates/relay/src/auctioneer/worker.rs @@ -59,9 +59,9 @@ impl Default for Telemetry { Self { work: Default::default(), spin: Default::default(), - next_record: Instant::now() - + Self::REPORT_FREQ - + Duration::from_millis(utcnow_ns() % 10 * 5), // to scatter worker reports + next_record: Instant::now() + + Self::REPORT_FREQ + + Duration::from_millis(utcnow_ns() % 10 * 5), // to scatter worker reports loop_start: Instant::now(), loop_worked: Default::default(), } diff --git a/crates/relay/src/bid_decoder/decoder.rs b/crates/relay/src/bid_decoder/decoder.rs deleted file mode 100644 index b0e8479da..000000000 --- a/crates/relay/src/bid_decoder/decoder.rs +++ /dev/null @@ -1,566 +0,0 @@ -use std::{ - io::Read, - time::{Duration, Instant}, -}; - -use flate2::read::GzDecoder; -use flux::timing::Nanos; -use helix_common::{ - SubmissionTrace, - chain_info::ChainInfo, - metrics::{ - BID_DECODING_LATENCY, BID_DECOMPRESS_SIZEHINT_REL_ERROR, DECOMPRESSION_LATENCY, - SUBMISSION_BY_COMPRESSION, SUBMISSION_BY_ENCODING, SUBMISSION_COMPRESSED_BYTES, - SUBMISSION_DECOMPRESSED_BYTES, - }, - record_submission_step, -}; -use helix_types::{ - BidAdjustmentData, BlockMergingData, BlsPublicKeyBytes, Compression, DehydratedBidSubmission, - DehydratedBidSubmissionFuluWithAdjustments, ForkName, ForkVersionDecode, MergeType, - SignedBidSubmission, SignedBidSubmissionWithAdjustments, SignedBidSubmissionWithMergingData, -}; -use http::{ - HeaderMap, HeaderValue, - header::{ACCEPT, CONTENT_TYPE}, -}; -use serde::de::DeserializeOwned; -use ssz::Decode; -use strum::{AsRefStr, EnumString}; -use tracing::{error, trace}; -use zstd::{ - stream::read::Decoder as ZstdDecoder, - zstd_safe::{CONTENTSIZE_ERROR, CONTENTSIZE_UNKNOWN, get_frame_content_size}, -}; - -use crate::{ - api::{ - HEADER_SUBMISSION_TYPE, - builder::{api::MAX_PAYLOAD_LENGTH, error::BuilderApiError}, - }, - auctioneer::Submission, -}; - -#[derive(Debug, Clone, Copy, PartialEq, Eq, EnumString, AsRefStr)] -#[strum(serialize_all = "snake_case", ascii_case_insensitive)] -pub enum SubmissionType { - Default, - Merge, - Dehydrated, -} - -impl SubmissionType { - pub fn from_headers(header_map: &HeaderMap) -> Option { - let submission_type = header_map.get(HEADER_SUBMISSION_TYPE)?.to_str().ok()?; - submission_type.parse().ok() - } -} - -#[repr(u8)] -#[derive(Clone, Copy, Debug)] -pub enum Encoding { - Json = 0, - Ssz = 1, -} - -pub const HEADER_SSZ: &str = "application/octet-stream"; -const HEADER_ACCEPT_SSZ: &str = "application/octet-stream;q=1.0,application/json;q=0.9"; - -impl Encoding { - pub fn from_content_type(headers: &HeaderMap) -> Self { - match headers.get(CONTENT_TYPE) { - Some(header) if header == HeaderValue::from_static(HEADER_SSZ) => Encoding::Ssz, - _ => Encoding::Json, - } - } - - pub fn from_accept(headers: &HeaderMap) -> Self { - match headers.get(ACCEPT) { - Some(header) - if header == HeaderValue::from_static(HEADER_SSZ) - || header == HeaderValue::from_static(HEADER_ACCEPT_SSZ) => - { - Encoding::Ssz - } - _ => Encoding::Json, - } - } -} - -pub(super) struct DecodeFlags { - pub(super) skip_sigverify: bool, - pub(super) merge_type: MergeType, - pub(super) with_adjustments: bool, - pub(super) block_merging_dry_run: bool, -} - -pub(super) fn decode_dehydrated( - decoder: &mut SubmissionDecoder, - body: &[u8], - trace: &mut SubmissionTrace, - chain_info: &ChainInfo, - flags: &DecodeFlags, -) -> Result<(Submission, Option, Option), BuilderApiError> { - if !flags.skip_sigverify { - return Err(BuilderApiError::UntrustedBuilderOnDehydratedPayload); - } - - let (submission, bid_adjustment) = if flags.with_adjustments { - let sub_with_adjustment: DehydratedBidSubmissionFuluWithAdjustments = - decoder.decode_by_fork(body, chain_info.current_fork_name())?; - let (sub, adjustment_data) = sub_with_adjustment.split(); - - (sub, Some(adjustment_data)) - } else { - let submission: DehydratedBidSubmission = - decoder.decode_by_fork(body, chain_info.current_fork_name())?; - - (submission, None) - }; - - trace.decoded_ns = Nanos::now(); - - let merging_data = match flags.merge_type { - MergeType::Mergeable => { - //Should this return an error instead? - error!("mergeable dehydrated submissions are not supported"); - None - } - MergeType::AppendOnly => Some(BlockMergingData::append_only(submission.fee_recipient())), - MergeType::None => { - if flags.block_merging_dry_run { - Some(BlockMergingData::append_only(submission.fee_recipient())) - } else { - None - } - } - }; - - Ok((Submission::Dehydrated(submission), merging_data, bid_adjustment)) -} - -pub(super) fn decode_merge( - decoder: &mut SubmissionDecoder, - body: &[u8], - trace: &mut SubmissionTrace, - chain_info: &ChainInfo, - flags: &DecodeFlags, -) -> Result<(Submission, Option, Option), BuilderApiError> { - let sub_with_merging: SignedBidSubmissionWithMergingData = decoder.decode(body)?; - let mut upgraded = sub_with_merging.maybe_upgrade_to_fulu(chain_info.current_fork_name()); - trace.decoded_ns = Nanos::now(); - let merging_data = match flags.merge_type { - MergeType::Mergeable => Some(upgraded.merging_data), - //Handle append-only by creating empty mergeable orders - //this allows builder to switch between append-only and mergeable without changing - // submission alternatively we could reject or ignore append-only here if the - // submission is mergeable? - MergeType::AppendOnly => Some(BlockMergingData { - allow_appending: upgraded.merging_data.allow_appending, - builder_address: upgraded.merging_data.builder_address, - merge_orders: vec![], - }), - MergeType::None => Some(upgraded.merging_data), - }; - verify_and_validate(&mut upgraded.submission, flags.skip_sigverify, chain_info)?; - Ok((Submission::Full(upgraded.submission), merging_data, None)) -} - -pub(super) fn decode_default( - decoder: &mut SubmissionDecoder, - body: &[u8], - trace: &mut SubmissionTrace, - chain_info: &ChainInfo, - flags: &DecodeFlags, -) -> Result<(Submission, Option, Option), BuilderApiError> { - let (submission, bid_adjustment) = if flags.with_adjustments { - let sub_with_adjustment: SignedBidSubmissionWithAdjustments = decoder.decode(body)?; - let (sub, adjustment_data) = sub_with_adjustment.split(); - - (sub, Some(adjustment_data)) - } else { - let submission: SignedBidSubmission = decoder.decode(body)?; - - (submission, None) - }; - - let mut upgraded = submission.maybe_upgrade_to_fulu(chain_info.current_fork_name()); - trace.decoded_ns = Nanos::now(); - let merging_data = match flags.merge_type { - MergeType::Mergeable => { - //Should this return an error instead? - error!("mergeable dehydrated submissions are not supported"); - None - } - MergeType::AppendOnly => Some(BlockMergingData::append_only(upgraded.fee_recipient())), - MergeType::None => { - if flags.block_merging_dry_run { - Some(BlockMergingData::allow_all(upgraded.fee_recipient(), upgraded.num_txs())) - } else { - None - } - } - }; - verify_and_validate(&mut upgraded, flags.skip_sigverify, chain_info)?; - Ok((Submission::Full(upgraded), merging_data, bid_adjustment)) -} - -fn verify_and_validate( - submission: &mut SignedBidSubmission, - skip_sigverify: bool, - chain_info: &ChainInfo, -) -> Result<(), BuilderApiError> { - if !skip_sigverify { - trace!("verifying signature"); - let start_sig = Nanos::now(); - submission.verify_signature(chain_info.builder_domain)?; - trace!("signature ok"); - record_submission_step("signature", start_sig.elapsed()); - } - submission.validate_payload_ssz_lengths(chain_info.max_blobs_per_block())?; - Ok(()) -} - -#[derive(Debug)] -pub struct SubmissionDecoder { - compression: Compression, - encoding: Encoding, - - bytes_before_decompress: usize, - bytes_after_decompress: usize, - estimated_decompress: usize, - - decompress_latency: Duration, - decode_latency: Duration, -} - -impl SubmissionDecoder { - pub fn new(compression: Compression, encoding: Encoding) -> Self { - Self { - compression, - encoding, - bytes_before_decompress: 0, - bytes_after_decompress: 0, - estimated_decompress: 0, - decompress_latency: Default::default(), - decode_latency: Default::default(), - } - } - - // TODO: we could also just extract the bid trace and send that through before the rest is - // decoded after some light validation - /// Assume buf is already decompressed - pub fn extract_builder_pubkey( - &self, - buf: &[u8], - has_mergeable_data: bool, - ) -> Result { - match self.encoding { - Encoding::Json => { - #[derive(serde::Deserialize)] - struct Outer { - submission: Bid, - } - - #[derive(serde::Deserialize)] - struct Bid { - message: Message, - } - #[derive(serde::Deserialize)] - struct Message { - builder_pubkey: BlsPublicKeyBytes, - } - - let bid: Bid = if has_mergeable_data { - serde_json::from_slice::(buf)?.submission - } else { - serde_json::from_slice(buf)? - }; - - Ok(bid.message.builder_pubkey) - } - Encoding::Ssz => { - const BUILDER_PUBKEY_OFFSET: usize = 8 + /* slot */ - 32 + /* parent_hash */ - 32; /* block_hash */ - - if buf.len() < BUILDER_PUBKEY_OFFSET + BlsPublicKeyBytes::len_bytes() { - return Err(BuilderApiError::PayloadDecode); - } - - let pubkey = unsafe { - core::ptr::read_unaligned( - buf.as_ptr().add(BUILDER_PUBKEY_OFFSET) as *const BlsPublicKeyBytes - ) - }; - - Ok(pubkey) - } - } - } - - pub fn decompress( - &mut self, - payload: &[u8], - buf: &mut Vec, - ) -> Option> { - let start = Instant::now(); - self.bytes_before_decompress = payload.len(); - - match self.compression { - Compression::None => return None, - Compression::Gzip => { - let cap = gzip_size_hint(payload).unwrap_or(payload.len() * 2); - self.estimated_decompress = cap; - buf.clear(); - buf.reserve(cap); - let mut decoder = GzDecoder::new(payload).take(MAX_PAYLOAD_LENGTH as u64); - if let Err(e) = decoder.read_to_end(buf) { - return Some(Err(e.into())); - } - } - Compression::Zstd => { - let cap = zstd_size_hint(payload).unwrap_or(payload.len() * 2); - self.estimated_decompress = cap; - buf.clear(); - buf.reserve(cap); - let inner = match ZstdDecoder::new(payload) { - Ok(d) => d, - Err(e) => return Some(Err(e.into())), - }; - let mut decoder = inner.take(MAX_PAYLOAD_LENGTH as u64); - if let Err(e) = decoder.read_to_end(buf) { - return Some(Err(e.into())); - } - } - } - - self.bytes_after_decompress = buf.len(); - self.decompress_latency = start.elapsed(); - - Some(Ok(())) - } - - // TODO: pass a buffer pool to avoid allocations - pub fn decode( - &mut self, - body: &[u8], - ) -> Result { - let start = Instant::now(); - let payload: T = match self.encoding { - Encoding::Ssz => T::from_ssz_bytes(body).map_err(BuilderApiError::SszDecode)?, - Encoding::Json => serde_json::from_slice(body)?, - }; - - self.decode_latency = start.elapsed().saturating_sub(self.decompress_latency); - self.record_metrics(); - - Ok(payload) - } - - pub fn decode_by_fork( - &mut self, - body: &[u8], - fork: ForkName, - ) -> Result { - let start = Instant::now(); - let payload: T = match self.encoding { - Encoding::Ssz => { - T::from_ssz_bytes_by_fork(body, fork).map_err(BuilderApiError::SszDecode)? - } - Encoding::Json => serde_json::from_slice(body)?, - }; - - self.decode_latency = start.elapsed().saturating_sub(self.decompress_latency); - self.record_metrics(); - - Ok(payload) - } - - fn record_metrics(&self) { - let compression_label = self.compression.as_str(); - SUBMISSION_BY_COMPRESSION.with_label_values(&[compression_label]).inc(); - - if self.compression != Compression::None { - SUBMISSION_COMPRESSED_BYTES - .with_label_values(&[compression_label]) - .inc_by(self.bytes_before_decompress as u64); - SUBMISSION_DECOMPRESSED_BYTES - .with_label_values(&[compression_label]) - .inc_by(self.bytes_after_decompress as u64); - DECOMPRESSION_LATENCY - .with_label_values(&[compression_label]) - .observe(self.decompress_latency.as_micros() as f64); - - if self.estimated_decompress > 0 { - let actual = self.bytes_after_decompress as f64; - let estimate = self.estimated_decompress as f64; - let error = (actual - estimate).abs() / actual.max(1.0); - BID_DECOMPRESS_SIZEHINT_REL_ERROR - .with_label_values(&[compression_label]) - .observe(error) - } - } - - let encoding_label = match self.encoding { - Encoding::Json => "json", - Encoding::Ssz => "ssz", - }; - SUBMISSION_BY_ENCODING.with_label_values(&[encoding_label]).inc(); - BID_DECODING_LATENCY - .with_label_values(&[encoding_label]) - .observe(self.decode_latency.as_micros() as f64); - - trace!( - size_compressed = self.bytes_before_decompress, - size_uncompressed = self.bytes_after_decompress, - compression =? self.compression, - decode_latency =? self.decode_latency, - "decoded payload" - ); - } -} - -fn zstd_size_hint(buf: &[u8]) -> Option { - match get_frame_content_size(buf) { - Ok(Some(size)) if size != CONTENTSIZE_ERROR && size != CONTENTSIZE_UNKNOWN => { - Some((size as usize).min(MAX_PAYLOAD_LENGTH)) - } - - Ok(_) | Err(_) => None, - } -} - -fn gzip_size_hint(buf: &[u8]) -> Option { - if buf.len() >= 4 { - let isize = u32::from_le_bytes(buf[buf.len() - 4..].try_into().ok()?); - Some((isize as usize).min(MAX_PAYLOAD_LENGTH)) - } else { - None - } -} - -#[cfg(test)] -mod tests { - use alloy_primitives::hex::FromHex; - use helix_types::{ - MergeType, SignedBidSubmission, SignedBidSubmissionWithMergingData, TestRandomSeed, - }; - use ssz::Encode; - - use super::*; - - #[test] - fn test_get_builder_pubkey() { - let expected = BlsPublicKeyBytes::from_hex("0x81f8ed149a60b16f4b22ba759f0a5420caa753768341bb41b27c15eb9b219afa5494f7d7b72d18c1a1b2904c66d2a30c").unwrap(); - - let data_json = - include_bytes!("../../../types/src/testdata/signed-bid-submission-fulu-2.json"); - let decoder = SubmissionDecoder { - compression: Compression::Gzip, - encoding: Encoding::Json, - bytes_before_decompress: 0, - bytes_after_decompress: 0, - estimated_decompress: 0, - decompress_latency: Default::default(), - decode_latency: Default::default(), - }; - - let pubkey = decoder.extract_builder_pubkey(data_json, false).unwrap(); - assert_eq!(pubkey, expected); - - let data_ssz = include_bytes!("../../../types/src/testdata/signed-bid-submission-fulu.ssz"); - let decoder = SubmissionDecoder { - compression: Compression::Gzip, - encoding: Encoding::Ssz, - bytes_before_decompress: 0, - bytes_after_decompress: 0, - estimated_decompress: 0, - decompress_latency: Default::default(), - decode_latency: Default::default(), - }; - - let pubkey = decoder.extract_builder_pubkey(data_ssz, false).unwrap(); - assert_eq!(pubkey, expected); - } - - #[test] - fn test_get_builder_pubkey_merging() { - let sub = SignedBidSubmission::test_random(); - let sub = SignedBidSubmissionWithMergingData { - submission: sub, - merging_data: Default::default(), - }; - - let data_json = serde_json::to_vec(&sub).unwrap(); - let decoder = SubmissionDecoder { - compression: Compression::Gzip, - encoding: Encoding::Json, - bytes_before_decompress: 0, - bytes_after_decompress: 0, - estimated_decompress: 0, - decompress_latency: Default::default(), - decode_latency: Default::default(), - }; - - let pubkey_json = decoder.extract_builder_pubkey(data_json.as_slice(), true).unwrap(); - - let data_ssz = sub.as_ssz_bytes(); - let decoder = SubmissionDecoder { - compression: Compression::Gzip, - encoding: Encoding::Ssz, - bytes_before_decompress: 0, - bytes_after_decompress: 0, - estimated_decompress: 0, - decompress_latency: Default::default(), - decode_latency: Default::default(), - }; - - let pubkey_ssz = decoder.extract_builder_pubkey(data_ssz.as_slice(), true).unwrap(); - - assert_eq!(pubkey_json, pubkey_ssz) - } - - #[test] - fn test_submission_type_serialization() { - assert_eq!(SubmissionType::Default.as_ref(), "default"); - assert_eq!(SubmissionType::Merge.as_ref(), "merge"); - assert_eq!(SubmissionType::Dehydrated.as_ref(), "dehydrated"); - } - - #[test] - fn test_submission_type_deserialization() { - assert_eq!("default".parse::().unwrap(), SubmissionType::Default); - assert_eq!("merge".parse::().unwrap(), SubmissionType::Merge); - assert_eq!("dehydrated".parse::().unwrap(), SubmissionType::Dehydrated); - - //Case shouldn't matter - assert_eq!("Default".parse::().unwrap(), SubmissionType::Default); - assert_eq!("Merge".parse::().unwrap(), SubmissionType::Merge); - assert_eq!("Dehydrated".parse::().unwrap(), SubmissionType::Dehydrated); - - // Test that invalid values fail - assert!("invalid".parse::().is_err()); - assert!("MergeAppendOnly".parse::().is_err()); // CamelCase should fail - } - - #[test] - fn test_merge_type_serialization() { - assert_eq!(MergeType::Mergeable.as_ref(), "mergeable"); - assert_eq!(MergeType::AppendOnly.as_ref(), "append_only"); - } - - #[test] - fn test_merge_type_deserialization() { - assert_eq!("mergeable".parse::().unwrap(), MergeType::Mergeable); - assert_eq!("append_only".parse::().unwrap(), MergeType::AppendOnly); - - //Case shouldn't matter - assert_eq!("Mergeable".parse::().unwrap(), MergeType::Mergeable); - assert_eq!("Append_Only".parse::().unwrap(), MergeType::AppendOnly); - - // Test that invalid values fail - assert!("invalid".parse::().is_err()); - assert!("AppendOnly".parse::().is_err()); // CamelCase should fail - } -} diff --git a/crates/relay/src/bid_decoder/mod.rs b/crates/relay/src/bid_decoder/mod.rs index 571cd4136..4108aa210 100644 --- a/crates/relay/src/bid_decoder/mod.rs +++ b/crates/relay/src/bid_decoder/mod.rs @@ -1,14 +1,14 @@ -pub use decoder::{Encoding, HEADER_SSZ, SubmissionType}; use flux::timing::Nanos; +use flux_utils::DCacheRef; pub use tile::DecoderTile; use crate::auctioneer::SubmissionData; -mod decoder; mod tile; pub struct SubmissionDataWithSpan { pub submission_data: SubmissionData, pub span: tracing::Span, pub sent_at: Nanos, + pub original_data_ref: DCacheRef, } diff --git a/crates/relay/src/bid_decoder/tile.rs b/crates/relay/src/bid_decoder/tile.rs index 6163da289..0becc5826 100644 --- a/crates/relay/src/bid_decoder/tile.rs +++ b/crates/relay/src/bid_decoder/tile.rs @@ -8,31 +8,27 @@ use flux::{ }; use flux_utils::{DCache, SharedVector}; use helix_common::{ - RelayConfig, SubmissionTrace, chain_info::ChainInfo, local_cache::LocalCache, - record_submission_step, simulator::SubmissionFormat, + RelayConfig, SubmissionTrace, + api::builder_api::MAX_PAYLOAD_LENGTH, + chain_info::ChainInfo, + decoder::{SubmissionDecoder, SubmissionDecoderParams}, + local_cache::LocalCache, + record_submission_step, }; use helix_types::{ BidAdjustmentData, BlockMergingData, BlsPublicKeyBytes, MergeableOrdersWithPref, - SubmissionVersion, + SignedBidSubmission, Submission, SubmissionVersion, }; use tracing::trace; use crate::{ HelixSpine, - api::{ - FutureBidSubmissionResult, - builder::{api::MAX_PAYLOAD_LENGTH, error::BuilderApiError}, - }, + api::{FutureBidSubmissionResult, builder::error::BuilderApiError}, auctioneer::{ - InternalBidSubmissionHeader, Submission, SubmissionData, SubmissionRef, - get_mergeable_orders, send_submission_result, - }, - bid_decoder::{ - SubmissionDataWithSpan, - decoder::{ - DecodeFlags, SubmissionDecoder, decode_default, decode_dehydrated, decode_merge, - }, + InternalBidSubmissionHeader, SubmissionData, SubmissionRef, get_mergeable_orders, + send_submission_result, }, + bid_decoder::SubmissionDataWithSpan, spine::messages::{DecodedSubmission, NewBidSubmission}, }; @@ -74,6 +70,7 @@ impl Tile for DecoderTile { submission_data: submission, span, sent_at: new_bid.tracking_timestamp().publish_t(), + original_data_ref: new_bid.dref, }); producers.produce(DecodedSubmission { ix }); } @@ -158,17 +155,23 @@ impl DecoderTile { tracing::Span::current().record("id", tracing::field::display(header.id)); record_submission_step("worker_recv", sent_at.elapsed()); trace!("received by worker"); - let (submission, withdrawals_root, version, merging_data, bid_adjustment_data, sim_bytes) = - Self::try_handle_block_submission( - cache, - chain_info, - config, - header, - expected_pubkey, - payload, - buffer, - &mut trace, - )?; + let ( + submission, + withdrawals_root, + version, + merging_data, + bid_adjustment_data, + decoder_params, + ) = Self::try_handle_block_submission( + cache, + chain_info, + config, + header, + expected_pubkey, + payload, + buffer, + &mut trace, + )?; tracing::Span::current().record("slot", tracing::field::display(submission.bid_slot())); tracing::Span::current() @@ -209,7 +212,7 @@ impl DecoderTile { bid_adjustment_data, withdrawals_root, trace, - sim_bytes, + decoder_params, }; Ok((submission_data, tracing::Span::current())) @@ -232,28 +235,35 @@ impl DecoderTile { SubmissionVersion, Option, Option, - Option<(bytes::Bytes, SubmissionFormat)>, + SubmissionDecoderParams, ), BuilderApiError, > { - let mut decoder = SubmissionDecoder::new(header.compression, header.encoding); - let body: &[u8] = match decoder.decompress(payload, buffer) { - None => payload, - Some(Ok(())) => buffer, - Some(Err(e)) => return Err(e), - }; - let with_mergeable_data = header.merge_type.is_some(); let with_adjustments = header.flags.with_adjustments(); let is_dehydrated = header.flags.is_dehydrated(); - let builder_pubkey = decoder.extract_builder_pubkey(body, with_mergeable_data)?; + let decoder_params = SubmissionDecoderParams { + compression: header.compression, + encoding: header.encoding, + is_dehydrated, + merge_type: header.merge_type, + with_mergeable_data, + with_adjustments, + block_merging_dry_run: config.block_merging_config.is_dry_run, + fork_name: chain_info.current_fork_name(), + }; + + let mut decoder = SubmissionDecoder::new(&decoder_params); + let (mut submission, merging_data, bid_adjustment_data) = + decoder.decode(payload, buffer)?; + + trace.decoded_ns = Nanos::now(); + + let builder_pubkey = *submission.builder_pubkey(); let skip_sigverify = if let Some(expected_pubkey) = expected_pubkey { if builder_pubkey != *expected_pubkey { - return Err(BuilderApiError::InvalidBuilderPubkey( - *expected_pubkey, - builder_pubkey, - )); + return Err(BuilderApiError::InvalidBuilderPubkey(*expected_pubkey, builder_pubkey)); } true @@ -261,58 +271,52 @@ impl DecoderTile { header.api_key.is_some_and(|api_key| cache.validate_api_key(&api_key, &builder_pubkey)) }; + match submission { + Submission::Full(ref mut signed_bid_submission) => { + verify_and_validate(signed_bid_submission, skip_sigverify, chain_info)?; + } + Submission::Dehydrated { .. } => { + if !skip_sigverify { + return Err(BuilderApiError::UntrustedBuilderOnDehydratedPayload); + } + } + } + + let withdrawals_root = submission.withdrawal_root(); + trace!( ?header.sequence_number, is_dehydrated, skip_sigverify, with_mergeable_data, with_adjustments, - "processing payload" + "processed payload" ); - let flags = DecodeFlags { - skip_sigverify, - merge_type: header.merge_type, - with_adjustments, - block_merging_dry_run: config.block_merging_config.is_dry_run, - }; - - let (submission, merging_data, bid_adjustment_data) = if is_dehydrated { - decode_dehydrated(&mut decoder, body, trace, chain_info, &flags)? - } else if with_mergeable_data { - decode_merge(&mut decoder, body, trace, chain_info, &flags)? - - } else { - decode_default(&mut decoder, body, trace, chain_info, &flags)? - }; - - // For plain SSZ full submissions, capture the decompressed bytes so the - // auctioneer can forward them to the simulator without re-encoding. - let sim_bytes = if !is_dehydrated - && !with_mergeable_data - && !with_adjustments - && matches!(header.encoding, crate::bid_decoder::Encoding::Ssz) - { - Some((body.clone(), SubmissionFormat::FullSsz)) - } else { - None - }; - - // For plain SSZ full submissions, capture the decompressed bytes so the - // auctioneer can forward them to the simulator without re-encoding. - let sim_bytes = if !is_dehydrated && - !with_mergeable_data && - matches!(header.encoding, crate::bid_decoder::Encoding::Ssz) - { - Some((body.clone(), SubmissionFormat::FullSsz)) - } else { - None - - }; - - let withdrawals_root = submission.withdrawal_root(); - let version = SubmissionVersion::new(trace.receive_ns.0, header.sequence_number); - Ok((submission, withdrawals_root, version, merging_data, bid_adjustment_data, sim_bytes)) + Ok(( + submission, + withdrawals_root, + version, + merging_data, + bid_adjustment_data, + decoder_params, + )) + } +} + +fn verify_and_validate( + submission: &mut SignedBidSubmission, + skip_sigverify: bool, + chain_info: &ChainInfo, +) -> Result<(), BuilderApiError> { + if !skip_sigverify { + trace!("verifying signature"); + let start_sig = Nanos::now(); + submission.verify_signature(chain_info.builder_domain)?; + trace!("signature ok"); + record_submission_step("signature", start_sig.elapsed()); } + submission.validate_payload_ssz_lengths(chain_info.max_blobs_per_block())?; + Ok(()) } diff --git a/crates/relay/src/housekeeper/chain_event_updater.rs b/crates/relay/src/housekeeper/chain_event_updater.rs index d259f6c9a..50fc08a97 100644 --- a/crates/relay/src/housekeeper/chain_event_updater.rs +++ b/crates/relay/src/housekeeper/chain_event_updater.rs @@ -110,9 +110,9 @@ impl ChainEventUpdater { mut head_event_rx: broadcast::Receiver, mut payload_attributes_rx: broadcast::Receiver, ) { - let start_instant = Instant::now() - + self.chain_info.clock.duration_to_next_slot().unwrap() - + Duration::from_secs(CUTOFF_TIME); + let start_instant = Instant::now() + + self.chain_info.clock.duration_to_next_slot().unwrap() + + Duration::from_secs(CUTOFF_TIME); let mut timer = interval_at(start_instant, Duration::from_secs(self.chain_info.seconds_per_slot())); diff --git a/crates/relay/src/housekeeper/housekeeper.rs b/crates/relay/src/housekeeper/housekeeper.rs index 29d0f86e3..dac69de58 100644 --- a/crates/relay/src/housekeeper/housekeeper.rs +++ b/crates/relay/src/housekeeper/housekeeper.rs @@ -141,8 +141,8 @@ impl Housekeeper { loop { let head = self.slots.head(); let timeout = (self.chain_info.clock.start_of(head + 1).unwrap()) - .saturating_sub(utcnow_dur()) - + CUTOFF_TIME; + .saturating_sub(utcnow_dur()) + + CUTOFF_TIME; if let Ok(head_event_result) = tokio::time::timeout(timeout, head_event_rx.recv()).await { @@ -290,8 +290,8 @@ impl Housekeeper { self.slots.update_proposer_duties(head_slot); - if let Some(inclusion_list_service) = self.inclusion_list_service.as_ref() - && let Some(next_duty) = proposer_duties.iter().find(|duty| duty.slot == head_slot) + if let Some(inclusion_list_service) = self.inclusion_list_service.as_ref() && + let Some(next_duty) = proposer_duties.iter().find(|duty| duty.slot == head_slot) { let pub_key = next_duty.pubkey; let inclusion_list_service = inclusion_list_service.clone(); @@ -406,8 +406,8 @@ impl Housekeeper { for builder_pubkey in primev_builders { match auctioneer.get_builder_info(&builder_pubkey) { Some(builder_info) => { - if builder_info.builder_id == Some("PrimevBuilder".to_string()) - || builder_info + if builder_info.builder_id == Some("PrimevBuilder".to_string()) || + builder_info .builder_ids .as_ref() .is_some_and(|v| v.contains(&"PrimevBuilder".to_string())) diff --git a/crates/relay/src/housekeeper/primev_service.rs b/crates/relay/src/housekeeper/primev_service.rs index 16ebc0146..abe78840f 100644 --- a/crates/relay/src/housekeeper/primev_service.rs +++ b/crates/relay/src/housekeeper/primev_service.rs @@ -220,9 +220,9 @@ impl EthereumPrimevService { tuples .iter() .map(|token| { - if let ethers::abi::Token::Tuple(values) = token - && values.len() >= 3 - && let ( + if let ethers::abi::Token::Tuple(values) = token && + values.len() >= 3 && + let ( ethers::abi::Token::Bool(vanilla_opted_in), ethers::abi::Token::Bool(avs_opted_in), ethers::abi::Token::Bool(middleware_opted_in), @@ -245,8 +245,8 @@ impl EthereumPrimevService { // Extract the public keys of validators that are opted into any Primev service let mut opted_in_validators = Vec::new(); for (index, status) in opted_in_statuses.iter().enumerate() { - if (status.0 || status.1 || status.2) - && let Some(duty) = proposer_duties.get(index) + if (status.0 || status.1 || status.2) && + let Some(duty) = proposer_duties.get(index) { opted_in_validators.push(duty.pubkey); } diff --git a/crates/relay/src/lib.rs b/crates/relay/src/lib.rs index 002a7a13a..830ecfe77 100644 --- a/crates/relay/src/lib.rs +++ b/crates/relay/src/lib.rs @@ -27,9 +27,9 @@ pub use crate::{ start_api_service, }, auctioneer::{ - Auctioneer, AuctioneerHandle, BidSorter, Context, Event, InternalBidSubmission, - PayloadEntry, RegWorker, RegWorkerHandle, SimulatorClient, SimulatorTile, SlotData, - SubmissionPayload, SubmissionRef, ValidationRequest, + Auctioneer, AuctioneerHandle, BidSorter, Context, Event, PayloadEntry, RegWorker, + RegWorkerHandle, SimulatorClient, SimulatorTile, SlotData, SubmissionPayload, + SubmissionRef, ValidationRequest, }, beacon::start_beacon_client, bid_decoder::{DecoderTile, SubmissionDataWithSpan}, diff --git a/crates/relay/src/main.rs b/crates/relay/src/main.rs index 5bd61217e..7f28380b4 100644 --- a/crates/relay/src/main.rs +++ b/crates/relay/src/main.rs @@ -216,7 +216,7 @@ async fn run(instance_id: String, config: RelayConfig, keypair: BlsKeypair) -> e sock_addr, local_cache.api_key_cache.clone(), config.tcp_max_connections, - submissions, + submissions.clone(), ); attach_tile( block_submission_tcp_listener, @@ -227,15 +227,16 @@ async fn run(instance_id: String, config: RelayConfig, keypair: BlsKeypair) -> e ), ); - let sim_inbound = + let sim_requests = Arc::new(SharedVector::::with_capacity(MAX_SUBMISSIONS_PER_SLOT)); - let sim_outbound = + let sim_results = Arc::new(SharedVector::::with_capacity(MAX_SUBMISSIONS_PER_SLOT)); let (accept_optimistic, failsafe_triggered, sim_tile) = SimulatorTile::create( config.simulators.clone(), - sim_inbound.clone(), - sim_outbound.clone(), + submissions.clone(), + sim_requests.clone(), + sim_results.clone(), ); let sim_core = config.cores.simulator; attach_tile(sim_tile, spine, TileConfig::new(sim_core, ThreadPriority::OSDefault)); @@ -253,8 +254,8 @@ async fn run(instance_id: String, config: RelayConfig, keypair: BlsKeypair) -> e future_results, decoded, auctioneer_handle, - sim_inbound, - sim_outbound, + sim_requests, + sim_results, accept_optimistic, failsafe_triggered, ); diff --git a/crates/relay/src/network/messages.rs b/crates/relay/src/network/messages.rs index 849a2f443..20455ead7 100644 --- a/crates/relay/src/network/messages.rs +++ b/crates/relay/src/network/messages.rs @@ -243,10 +243,9 @@ mod tests { // Check that old messages still deserialize correctly let old_message = RawNetworkMessage::Other(NetworkMessage::InclusionList( - InclusionListMessage::Local(InclusionListMessageInfo::new( - 123, - InclusionList { txs: vec![Transaction(Bytes::from([0, 6, 5]))].into() }, - )), + InclusionListMessage::Local(InclusionListMessageInfo::new(123, InclusionList { + txs: vec![Transaction(Bytes::from([0, 6, 5]))].into(), + })), )); let serialized = serde_json::to_string(&old_message).unwrap(); let _: NewRawNetworkMessage = serde_json::from_str(&serialized).unwrap(); diff --git a/crates/relay/src/simulator/client.rs b/crates/relay/src/simulator/client.rs index fa558d6e0..287b970d6 100644 --- a/crates/relay/src/simulator/client.rs +++ b/crates/relay/src/simulator/client.rs @@ -217,16 +217,13 @@ mod test { #[tokio::test] async fn balance_request() { - let sim_client = super::SimulatorClient::new( - reqwest::Client::new(), - SimulatorConfig { - url: "http://54.175.81.132:8545".into(), - namespace: "relay".into(), - is_merging_simulator: false, - max_concurrent_tasks: 1, - ssz_url: None, - }, - ); + let sim_client = super::SimulatorClient::new(reqwest::Client::new(), SimulatorConfig { + url: "http://54.175.81.132:8545".into(), + namespace: "relay".into(), + is_merging_simulator: false, + max_concurrent_tasks: 1, + ssz_url: None, + }); let builder_address = super::Address::from_hex("0xD9d3A3f47a56a987A8119b15C994Bc126337dd27").unwrap(); let builder_balance = sim_client.balance_request(&builder_address).await; diff --git a/crates/relay/src/simulator/mod.rs b/crates/relay/src/simulator/mod.rs index 2a0800351..1b506416f 100644 --- a/crates/relay/src/simulator/mod.rs +++ b/crates/relay/src/simulator/mod.rs @@ -1,23 +1,42 @@ use std::collections::HashMap; use alloy_primitives::{Address, B256, U256}; +use flux_utils::DCacheRef; use helix_common::{ - SubmissionTrace, - bid_submission::OptimisticVersion, - simulator::{BlockSimError, SszValidationRequest}, + SubmissionTrace, api::builder_api::InclusionListWithMetadata, + bid_submission::OptimisticVersion, decoder::SubmissionDecoderParams, simulator::BlockSimError, }; use helix_types::{ BlsPublicKeyBytes, BuilderInclusionResult, ExecutionPayload, ExecutionRequests, MergeableOrderWithOrigin, MergedBlockTrace, SignedBidSubmission, SubmissionVersion, }; -use crate::auctioneer::SubmissionRef; +use crate::{auctioneer::SubmissionRef, simulator::tile::ValidationResult}; pub mod client; pub mod tile; pub use tile::SimulatorTile; +#[derive(Debug, Clone)] +pub struct ValidationRequest { + pub is_top_bid: bool, + pub is_optimistic: bool, + pub apply_blacklist: bool, + pub bid_slot: u64, + pub registered_gas_limit: u64, + pub builder_pubkey: BlsPublicKeyBytes, + pub parent_beacon_block_root: B256, + pub submission_ref: SubmissionRef, + pub version: SubmissionVersion, + pub tx_root: Option, // None if submission wasn't dehydrated + pub inclusion_list: InclusionListWithMetadata, + pub trace: SubmissionTrace, + pub submission: SignedBidSubmission, + pub original_data_ref: DCacheRef, + pub decoder_params: SubmissionDecoderParams, +} + #[derive(Debug, Clone, serde::Serialize)] pub struct BlockMergeRequestRef<'a> { /// The original payload value @@ -53,21 +72,6 @@ pub struct BlockMergeResponse { pub trace: MergedBlockTrace, } -#[derive(Clone)] -pub struct ValidationRequest { - pub is_optimistic: bool, - pub request: SszValidationRequest, - pub is_top_bid: bool, - pub bid_slot: u64, - pub builder_pubkey: BlsPublicKeyBytes, - pub version: SubmissionVersion, - pub submission: SignedBidSubmission, - pub submission_ref: SubmissionRef, - pub trace: SubmissionTrace, - // only Some for dehydrated submissions - pub tx_root: Option, -} - /// Large payload stored in `SharedVector` for auctioneer → sim tile transfer. pub enum SimRequest { Validate { req: Box, fast_track: bool }, @@ -76,7 +80,7 @@ pub enum SimRequest { /// Large payload stored in `SharedVector` for sim tile → auctioneer transfer. pub enum SimResult { - Validate(crate::simulator::tile::ValidationResult), + Validate(ValidationResult), Merge(MergeResult), } diff --git a/crates/relay/src/simulator/tile.rs b/crates/relay/src/simulator/tile.rs index d38444f1d..76c98be80 100644 --- a/crates/relay/src/simulator/tile.rs +++ b/crates/relay/src/simulator/tile.rs @@ -10,15 +10,16 @@ use std::{ use flux::{ spine::SpineProducers as _, tile::{Tile, TileName}, + timing::Nanos, }; -use flux_utils::SharedVector; +use flux_utils::{DCache, SharedVector}; use helix_common::{ SimulatorConfig, SubmissionTrace, bid_submission::OptimisticVersion, is_local_dev, metrics::SimulatorMetrics, record_submission_step, - simulator::{BlockSimError, JsonValidationRequest, SubmissionFormat}, + simulator::{BlockSimError, JsonValidationRequest, SszValidationRequest}, spawn_tracked, validator_preferences::{Filtering, ValidatorPreferences}, }; @@ -37,6 +38,7 @@ use crate::{ pub struct SimulatorTile { simulators: Vec, + submissions: Arc, /// Indices of simulators with an SSZ endpoint — static after construction. ssz_sim_indices: Vec, requests: PendingRequests, @@ -97,6 +99,7 @@ impl Tile for SimulatorTile { impl SimulatorTile { pub fn create( configs: Vec, + submissions: Arc, sim_requests: Arc>, sim_results: Arc>, ) -> (Arc, Arc, Self) { @@ -149,6 +152,7 @@ impl SimulatorTile { let tile = Self { simulators, + submissions, ssz_sim_indices, requests, priority_requests, @@ -240,8 +244,8 @@ impl SimulatorTile { producers.produce(FromSimMsg { ix: result_ix }); - if let Some(id) = self.next_client(|s| s.can_simulate()) - && let Some(req) = self.priority_requests.next_req().or(self.requests.next_req()) + if let Some(id) = self.next_client(|s| s.can_simulate()) && + let Some(req) = self.priority_requests.next_req().or(self.requests.next_req()) { self.spawn_sim(id, req); } @@ -267,8 +271,9 @@ impl SimulatorTile { let timer = SimulatorMetrics::timer(sim.client.endpoint()); let task_tx = self.task_tx.clone(); let sim_results = self.sim_results.clone(); + let subs_cache = self.submissions.clone(); spawn_tracked!(async move { - let start_sim = Instant::now(); + let start_sim = Nanos::now(); let block_hash = req.submission.block_hash(); debug!(%block_hash, "sending simulation request"); @@ -276,23 +281,20 @@ impl SimulatorTile { SimulatorMetrics::sim_count(optimistic_version.is_optimistic()); let (mut res, ssz_retry) = match dispatch { SimDispatch::Ssz { to_send, ssz_url, http } => { + let request = create_ssz_request(&req, subs_cache); let res = - SimulatorClient::do_sim_request(&req.request, req.is_top_bid, to_send) - .await; - (res, Some((ssz_url, http))) + SimulatorClient::do_sim_request(&request, req.is_top_bid, to_send).await; + (res, Some((request, ssz_url, http))) } SimDispatch::Json { to_send, method } => { - let filtering = if req.request.apply_blacklist { - Filtering::Regional - } else { - Filtering::Global - }; + let filtering = + if req.apply_blacklist { Filtering::Regional } else { Filtering::Global }; let json_req = JsonValidationRequest::new( - req.request.registered_gas_limit, + req.registered_gas_limit, &req.submission, ValidatorPreferences { filtering, ..Default::default() }, - Some(req.request.parent_beacon_block_root), - Some(req.request.inclusion_list.clone()), + Some(req.parent_beacon_block_root), + Some(req.inclusion_list.clone()), ); let res = SimulatorClient::do_json_sim_request( &json_req, @@ -309,11 +311,10 @@ impl SimulatorTile { // can process the submission without a hydration cache entry. if matches!(res, Err(BlockSimError::HydrationMiss)) { debug!(%block_hash, "hydration miss — retrying with full SSZ"); - if let Some((ssz_url, http)) = ssz_retry { + if let Some((request, ssz_url, http)) = ssz_retry { let to_send = http.post(format!("{ssz_url}/validate")); - let mut retry_req = req.request.clone(); - retry_req.signed_bid_submission = req.submission.as_ssz_bytes().into(); - retry_req.format = SubmissionFormat::FullSsz; + let mut retry_req = request.clone(); + retry_req.signed_bid_submission = req.submission.as_ssz_bytes(); res = SimulatorClient::do_sim_request(&retry_req, req.is_top_bid, to_send).await; } else { @@ -449,8 +450,8 @@ impl SimEntry { /// A lighter check to decide whether we should accept optimistic submissions fn can_simulate_light(&self) -> bool { - self.is_synced - && match self.paused_until { + self.is_synced && + match self.paused_until { Some(until) => Instant::now() > until, None => true, } @@ -563,3 +564,41 @@ impl PendingRequests { self.reqs.retain(|r| r.bid_slot >= bid_slot); } } + +fn create_ssz_request(req: &ValidationRequest, cache: Arc) -> SszValidationRequest { + // TODO: remove this when hydration cache is added to simulator + if req.decoder_params.is_dehydrated { + return SszValidationRequest { + apply_blacklist: req.apply_blacklist, + registered_gas_limit: req.registered_gas_limit, + parent_beacon_block_root: req.parent_beacon_block_root, + inclusion_list: req.inclusion_list.clone(), + decoder_params: None, + signed_bid_submission: req.submission.as_ssz_bytes(), + }; + } + + let mut bytes = Vec::with_capacity(req.original_data_ref.len); + match cache.read(req.original_data_ref, &mut bytes) { + Ok(_) => SszValidationRequest { + apply_blacklist: req.apply_blacklist, + registered_gas_limit: req.registered_gas_limit, + parent_beacon_block_root: req.parent_beacon_block_root, + inclusion_list: req.inclusion_list.clone(), + decoder_params: Some(req.decoder_params.clone()), + signed_bid_submission: bytes, + }, + Err(_) => { + // In case of cache read failure, we fall back to sending the full SSZ bytes. + // This is not ideal but allows the simulation to proceed without a cache entry. + SszValidationRequest { + apply_blacklist: req.apply_blacklist, + registered_gas_limit: req.registered_gas_limit, + parent_beacon_block_root: req.parent_beacon_block_root, + inclusion_list: req.inclusion_list.clone(), + decoder_params: None, + signed_bid_submission: req.submission.as_ssz_bytes(), + } + } + } +} diff --git a/crates/simulator/src/block_merging/error.rs b/crates/simulator/src/block_merging/error.rs index 17e3f64bf..a64d8ac62 100644 --- a/crates/simulator/src/block_merging/error.rs +++ b/crates/simulator/src/block_merging/error.rs @@ -62,28 +62,28 @@ pub(crate) enum BlockMergingApiError { impl From for ErrorObject<'static> { fn from(error: BlockMergingApiError) -> Self { match error { - BlockMergingApiError::MissingProposerPayment - | BlockMergingApiError::InvalidProposerPayment - | BlockMergingApiError::NoSafeForBuilder(_) - | BlockMergingApiError::NotEnoughGasForPayment(_) - | BlockMergingApiError::InvalidSignatureInBaseBlock - | BlockMergingApiError::BaseBlockBlobLimitExceeded { .. } => { + BlockMergingApiError::MissingProposerPayment | + BlockMergingApiError::InvalidProposerPayment | + BlockMergingApiError::NoSafeForBuilder(_) | + BlockMergingApiError::NotEnoughGasForPayment(_) | + BlockMergingApiError::InvalidSignatureInBaseBlock | + BlockMergingApiError::BaseBlockBlobLimitExceeded { .. } => { invalid_params_rpc_err(error.to_string()) } - BlockMergingApiError::GetParent(_) - | BlockMergingApiError::BlobLimitReached - | BlockMergingApiError::NextEvmEnvFail - | BlockMergingApiError::BlockContext - | BlockMergingApiError::RevenueAllocationReverted - | BlockMergingApiError::ExecutionRequests - | BlockMergingApiError::ZeroRevenueForWinningBuilder - | BlockMergingApiError::ZeroMergedBlockRevenue - | BlockMergingApiError::EmptyBuilderSignerAccount(_) - | BlockMergingApiError::EmptyBuilderSafe(_) - | BlockMergingApiError::NoBalanceInBuilderSafe { .. } - | BlockMergingApiError::BuilderBalanceDeltaMismatch(_) - | BlockMergingApiError::Provider(_) => internal_rpc_err(error.to_string()), + BlockMergingApiError::GetParent(_) | + BlockMergingApiError::BlobLimitReached | + BlockMergingApiError::NextEvmEnvFail | + BlockMergingApiError::BlockContext | + BlockMergingApiError::RevenueAllocationReverted | + BlockMergingApiError::ExecutionRequests | + BlockMergingApiError::ZeroRevenueForWinningBuilder | + BlockMergingApiError::ZeroMergedBlockRevenue | + BlockMergingApiError::EmptyBuilderSignerAccount(_) | + BlockMergingApiError::EmptyBuilderSafe(_) | + BlockMergingApiError::NoBalanceInBuilderSafe { .. } | + BlockMergingApiError::BuilderBalanceDeltaMismatch(_) | + BlockMergingApiError::Provider(_) => internal_rpc_err(error.to_string()), BlockMergingApiError::Execution(err) => match err { error @ BlockExecutionError::Validation(_) => { diff --git a/crates/simulator/src/block_merging/mod.rs b/crates/simulator/src/block_merging/mod.rs index ecf7b44cb..ddf340c81 100644 --- a/crates/simulator/src/block_merging/mod.rs +++ b/crates/simulator/src/block_merging/mod.rs @@ -750,8 +750,8 @@ pub(crate) fn prepare_revenues( // We divide the revenue among the different bundle origins. for (origin, origin_revenue) in revenues { // Update the revenue, subtracting part of the payment cost - let actualized_revenue = (origin_revenue.revenue.widening_mul(expected_revenue) - / U512::from(total_revenue)) + let actualized_revenue = (origin_revenue.revenue.widening_mul(expected_revenue) / + U512::from(total_revenue)) .to(); let builder_revenue = distribution_config.merged_builder_split(actualized_revenue); updated_revenues diff --git a/crates/simulator/src/inclusion/inclusion_producer.rs b/crates/simulator/src/inclusion/inclusion_producer.rs index 53843b27e..fe8fb4daf 100644 --- a/crates/simulator/src/inclusion/inclusion_producer.rs +++ b/crates/simulator/src/inclusion/inclusion_producer.rs @@ -74,10 +74,10 @@ fn handle_tx_event( } } } - FullTransactionEvent::Queued(tx_hash, _) - | FullTransactionEvent::Mined { tx_hash, .. } - | FullTransactionEvent::Discarded(tx_hash) - | FullTransactionEvent::Invalid(tx_hash) => { + FullTransactionEvent::Queued(tx_hash, _) | + FullTransactionEvent::Mined { tx_hash, .. } | + FullTransactionEvent::Discarded(tx_hash) | + FullTransactionEvent::Invalid(tx_hash) => { // Remove from tx mapping. pending_txs.remove(&tx_hash); } diff --git a/crates/simulator/src/ssz_server.rs b/crates/simulator/src/ssz_server.rs index e554ca7ee..a99aaa947 100644 --- a/crates/simulator/src/ssz_server.rs +++ b/crates/simulator/src/ssz_server.rs @@ -1,6 +1,16 @@ use alloy_rpc_types::beacon::relay::{BuilderBlockValidationRequestV5, SignedBidSubmissionV5}; -use axum::{Router, extract::State, http::StatusCode, response::IntoResponse, routing::post}; -use helix_common::simulator::{SszValidationRequest, SubmissionFormat}; +use axum::{ + Router, + extract::State, + http::StatusCode, + response::{IntoResponse, Response}, + routing::post, +}; +use helix_common::{ + decoder::{DecoderError, SubmissionDecoder}, + simulator::SszValidationRequest, +}; +use helix_types::Submission; use ssz::Decode; use tokio::net::TcpListener; use tracing::error; @@ -23,30 +33,28 @@ pub async fn run(api: ValidationApi, port: u16) { } } -async fn handler(State(api): State, body: axum::body::Bytes) -> impl IntoResponse { - let req = match SszValidationRequest::from_ssz_bytes(&body) { - Ok(r) => r, - Err(e) => return (StatusCode::BAD_REQUEST, format!("ssz decode: {e:?}")).into_response(), - }; +async fn handler( + State(api): State, + body: axum::body::Bytes, +) -> Result { + let req = SszValidationRequest::from_ssz_bytes(&body)?; - let signed_bid_submission = match req.format { - SubmissionFormat::FullSsz => { - match SignedBidSubmissionV5::from_ssz_bytes(&req.signed_bid_submission) { - Ok(s) => s, - Err(e) => { - return ( - StatusCode::BAD_REQUEST, - format!("signed bid submission decode: {e:?}"), - ) - .into_response(); + let signed_bid_submission = match req.decoder_params { + Some(decode_params) => { + let mut buf = vec![]; + let mut decoder = SubmissionDecoder::new(&decode_params); + let (submission, _, _) = + decoder.decode(req.signed_bid_submission.as_slice(), &mut buf)?; + match submission { + Submission::Full(s) => s.into(), + Submission::Dehydrated(_) => { + // Simulator-side hydration cache not yet implemented. + // Return 424 so the relay retries with full SSZ bytes. + return Ok(StatusCode::FAILED_DEPENDENCY.into_response()); } } } - SubmissionFormat::DehydratedSsz => { - // Simulator-side hydration cache not yet implemented. - // Return 424 so the relay retries with full SSZ bytes. - return StatusCode::FAILED_DEPENDENCY.into_response(); - } + None => SignedBidSubmissionV5::from_ssz_bytes(req.signed_bid_submission.as_slice())?, }; let ext = ExtendedValidationRequestV5 { @@ -59,8 +67,8 @@ async fn handler(State(api): State, body: axum::body::Bytes) -> i apply_blacklist: req.apply_blacklist, }; - match api.validate_builder_submission_v5(ext).await { + Ok(match api.validate_builder_submission_v5(ext).await { Ok(()) => StatusCode::OK.into_response(), Err(e) => (StatusCode::BAD_REQUEST, e.message().to_string()).into_response(), - } + }) } diff --git a/crates/simulator/src/validation/error.rs b/crates/simulator/src/validation/error.rs index 6d9f1aced..b075c6952 100644 --- a/crates/simulator/src/validation/error.rs +++ b/crates/simulator/src/validation/error.rs @@ -46,19 +46,19 @@ pub(crate) enum ValidationApiError { impl From for ErrorObject<'static> { fn from(error: ValidationApiError) -> Self { match error { - ValidationApiError::GasLimitMismatch(_) - | ValidationApiError::GasUsedMismatch(_) - | ValidationApiError::ParentHashMismatch(_) - | ValidationApiError::BlockHashMismatch(_) - | ValidationApiError::Blacklist(_) - | ValidationApiError::ProposerPayment - | ValidationApiError::InvalidBlobsBundle - | ValidationApiError::InclusionList - | ValidationApiError::Blob(_) => invalid_params_rpc_err(error.to_string()), + ValidationApiError::GasLimitMismatch(_) | + ValidationApiError::GasUsedMismatch(_) | + ValidationApiError::ParentHashMismatch(_) | + ValidationApiError::BlockHashMismatch(_) | + ValidationApiError::Blacklist(_) | + ValidationApiError::ProposerPayment | + ValidationApiError::InvalidBlobsBundle | + ValidationApiError::InclusionList | + ValidationApiError::Blob(_) => invalid_params_rpc_err(error.to_string()), - ValidationApiError::GetParent(_) - | ValidationApiError::Consensus(_) - | ValidationApiError::Provider(_) => internal_rpc_err(error.to_string()), + ValidationApiError::GetParent(_) | + ValidationApiError::Consensus(_) | + ValidationApiError::Provider(_) => internal_rpc_err(error.to_string()), ValidationApiError::Execution(err) => match err { error @ BlockExecutionError::Validation(_) => { invalid_params_rpc_err(error.to_string()) diff --git a/crates/simulator/src/validation/mod.rs b/crates/simulator/src/validation/mod.rs index 2de723607..83dd434fa 100644 --- a/crates/simulator/src/validation/mod.rs +++ b/crates/simulator/src/validation/mod.rs @@ -428,8 +428,8 @@ impl ValidationApi { &self, mut blobs_bundle: BlobsBundleV1, ) -> Result, ValidationApiError> { - if blobs_bundle.commitments.len() != blobs_bundle.proofs.len() - || blobs_bundle.commitments.len() != blobs_bundle.blobs.len() + if blobs_bundle.commitments.len() != blobs_bundle.proofs.len() || + blobs_bundle.commitments.len() != blobs_bundle.blobs.len() { return Err(ValidationApiError::InvalidBlobsBundle); } @@ -521,8 +521,8 @@ impl ValidationApi { // Check block size as per EIP-7934 (only applies when Osaka hardfork is active) let chain_spec = self.provider.chain_spec(); - if chain_spec.is_osaka_active_at_timestamp(block.timestamp()) - && block.rlp_length() > MAX_RLP_BLOCK_SIZE + if chain_spec.is_osaka_active_at_timestamp(block.timestamp()) && + block.rlp_length() > MAX_RLP_BLOCK_SIZE { return Err(ValidationApiError::Consensus(ConsensusError::BlockTooLarge { rlp_length: block.rlp_length(), @@ -556,8 +556,8 @@ impl ValidationApi { .sealed_header_by_hash(parent_hash)? .ok_or_else(|| GetParentError::MissingParentBlock)?; - if latest_header.number().saturating_sub(parent_header.number()) - > self.validation_window + if latest_header.number().saturating_sub(parent_header.number()) > + self.validation_window { return Err(GetParentError::BlockTooOld); } diff --git a/crates/tcp-types/src/lib.rs b/crates/tcp-types/src/lib.rs index 9e8de5418..f01ab5ee3 100644 --- a/crates/tcp-types/src/lib.rs +++ b/crates/tcp-types/src/lib.rs @@ -7,9 +7,21 @@ use strum::{AsRefStr, EnumString}; #[repr(u8)] #[derive( - Debug, Default, Clone, Copy, PartialEq, Eq, EnumString, AsRefStr, Serialize, Deserialize, + Debug, + Default, + Clone, + Copy, + PartialEq, + Eq, + EnumString, + AsRefStr, + Serialize, + Deserialize, + ssz_derive::Encode, + ssz_derive::Decode, )] #[strum(serialize_all = "snake_case", ascii_case_insensitive)] +#[ssz(enum_behaviour = "tag")] pub enum MergeType { #[default] None = 0, @@ -38,8 +50,21 @@ impl TryFrom for MergeType { #[repr(u8)] #[derive( - Debug, Eq, PartialEq, Clone, Copy, Serialize, Deserialize, Hash, PartialOrd, Ord, Default, + Debug, + Eq, + PartialEq, + Clone, + Copy, + Serialize, + Deserialize, + Hash, + PartialOrd, + Ord, + Default, + ssz_derive::Encode, + ssz_derive::Decode, )] +#[ssz(enum_behaviour = "tag")] pub enum Compression { #[default] None = 0, @@ -67,15 +92,11 @@ impl Compression { impl std::fmt::Display for Compression { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { - write!( - f, - "{}", - match self { - Compression::None => "NONE", - Compression::Gzip => "GZIP", - Compression::Zstd => "ZSTD", - } - ) + write!(f, "{}", match self { + Compression::None => "NONE", + Compression::Gzip => "GZIP", + Compression::Zstd => "ZSTD", + }) } } diff --git a/crates/types/src/bid_submission.rs b/crates/types/src/bid_submission.rs index 1555b541c..fb2cbe4a6 100644 --- a/crates/types/src/bid_submission.rs +++ b/crates/types/src/bid_submission.rs @@ -1,6 +1,7 @@ use std::{cmp::Ordering, sync::Arc}; use alloy_primitives::{Address, B256, U256}; +use alloy_rpc_types::beacon::relay::SignedBidSubmissionV5; use lh_types::{ForkName, SignedRoot, Slot, test_utils::TestRandom}; use serde::{Deserialize, Serialize}; use ssz::{Decode, DecodeError}; @@ -10,8 +11,8 @@ use tree_hash_derive::TreeHash; use crate::{ BlobsBundle, BlobsError, Bloom, BlsPublicKey, BlsPublicKeyBytes, BlsSignature, - BlsSignatureBytes, ExecutionPayload, ExtraData, PayloadAndBlobs, SszError, - bid_adjustment_data::BidAdjustmentData, error::SigError, fields::ExecutionRequests, + BlsSignatureBytes, DehydratedBidSubmission, ExecutionPayload, ExtraData, PayloadAndBlobs, + SszError, bid_adjustment_data::BidAdjustmentData, error::SigError, fields::ExecutionRequests, }; #[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize, Encode, Decode, TreeHash)] @@ -65,6 +66,52 @@ impl BidTrace { } } +#[allow(clippy::large_enum_variant)] +#[derive(Clone, Debug)] +pub enum Submission { + // received after sigverify + Full(SignedBidSubmission), + // need to validate do the validate_payload_ssz_lengths + Dehydrated(DehydratedBidSubmission), +} + +impl Submission { + pub fn bid_slot(&self) -> u64 { + match self { + Submission::Full(s) => s.slot().as_u64(), + Submission::Dehydrated(s) => s.slot(), + } + } + + pub fn builder_pubkey(&self) -> &BlsPublicKeyBytes { + match self { + Submission::Full(s) => &s.message().builder_pubkey, + Submission::Dehydrated(s) => s.builder_pubkey(), + } + } + + pub fn block_hash(&self) -> &B256 { + match self { + Submission::Full(s) => &s.message().block_hash, + Submission::Dehydrated(s) => s.block_hash(), + } + } + + pub fn withdrawal_root(&self) -> B256 { + match self { + Submission::Full(s) => s.withdrawals_root(), + Submission::Dehydrated(s) => s.withdrawal_root(), + } + } + + pub fn parent_hash(&self) -> &B256 { + match self { + Submission::Full(s) => s.parent_hash(), + Submission::Dehydrated(s) => s.parent_hash(), + } + } +} + #[derive(Debug, Clone, Serialize, Encode)] #[serde(deny_unknown_fields)] pub struct SignedBidSubmission { @@ -87,6 +134,221 @@ impl TestRandom for SignedBidSubmission { } } +impl From for SignedBidSubmission { + fn from(v: SignedBidSubmissionV5) -> SignedBidSubmission { + use crate::fields::{Transaction, Withdrawal}; + let m = v.message; + let v2 = v.execution_payload.payload_inner; + let v1 = v2.payload_inner; + SignedBidSubmission { + message: BidTrace { + slot: m.slot, + parent_hash: m.parent_hash, + block_hash: m.block_hash, + builder_pubkey: m.builder_pubkey, + proposer_pubkey: m.proposer_pubkey, + proposer_fee_recipient: m.proposer_fee_recipient, + gas_limit: m.gas_limit, + gas_used: m.gas_used, + value: m.value, + }, + execution_payload: Arc::new(ExecutionPayload { + parent_hash: v1.parent_hash, + fee_recipient: v1.fee_recipient, + state_root: v1.state_root, + receipts_root: v1.receipts_root, + logs_bloom: *v1.logs_bloom, + prev_randao: v1.prev_randao, + block_number: v1.block_number, + gas_limit: v1.gas_limit, + gas_used: v1.gas_used, + timestamp: v1.timestamp, + extra_data: ExtraData(v1.extra_data), + base_fee_per_gas: v1.base_fee_per_gas, + block_hash: v1.block_hash, + transactions: lh_types::VariableList::new( + v1.transactions.into_iter().map(Transaction).collect(), + ) + .expect("transactions exceed spec limit"), + withdrawals: lh_types::VariableList::new( + v2.withdrawals + .into_iter() + .map(|w| Withdrawal { + index: w.index, + validator_index: w.validator_index, + address: w.address, + amount: w.amount, + }) + .collect(), + ) + .expect("withdrawals exceed spec limit"), + blob_gas_used: v.execution_payload.blob_gas_used, + excess_blob_gas: v.execution_payload.excess_blob_gas, + }), + blobs_bundle: Arc::new(BlobsBundle { + commitments: lh_types::VariableList::new(v.blobs_bundle.commitments) + .expect("commitments exceed spec limit"), + proofs: v.blobs_bundle.proofs, + blobs: v.blobs_bundle.blobs.into_iter().map(Arc::new).collect(), + }), + execution_requests: Arc::new(ExecutionRequests { + deposits: lh_types::VariableList::new( + v.execution_requests + .deposits + .into_iter() + .map(|d| lh_types::DepositRequest { + pubkey: lh_types::PublicKeyBytes::deserialize(&d.pubkey[..]) + .expect("len=48"), + withdrawal_credentials: d.withdrawal_credentials, + amount: d.amount, + signature: lh_types::SignatureBytes::deserialize(&d.signature[..]) + .expect("len=96"), + index: d.index, + }) + .collect(), + ) + .expect("deposits exceed spec limit"), + withdrawals: lh_types::VariableList::new( + v.execution_requests + .withdrawals + .into_iter() + .map(|w| lh_types::WithdrawalRequest { + source_address: w.source_address, + validator_pubkey: lh_types::PublicKeyBytes::deserialize( + &w.validator_pubkey[..], + ) + .expect("len=48"), + amount: w.amount, + }) + .collect(), + ) + .expect("withdrawal requests exceed spec limit"), + consolidations: lh_types::VariableList::new( + v.execution_requests + .consolidations + .into_iter() + .map(|c| lh_types::ConsolidationRequest { + source_address: c.source_address, + source_pubkey: lh_types::PublicKeyBytes::deserialize( + &c.source_pubkey[..], + ) + .expect("len=48"), + target_pubkey: lh_types::PublicKeyBytes::deserialize( + &c.target_pubkey[..], + ) + .expect("len=48"), + }) + .collect(), + ) + .expect("consolidations exceed spec limit"), + }), + signature: v.signature, + } + } +} + +impl From for SignedBidSubmissionV5 { + fn from(s: SignedBidSubmission) -> SignedBidSubmissionV5 { + use alloy_eips::{ + eip4895::Withdrawal as AlloyWithdrawal, eip6110::DepositRequest as AlloyDepositRequest, + eip7002::WithdrawalRequest as AlloyWithdrawalRequest, + eip7251::ConsolidationRequest as AlloyConsolidationRequest, + }; + // alloy re-exports this as the same type but with a different struct name + use alloy_rpc_types::beacon::relay::BidTrace as AlloyBidTrace; + use alloy_rpc_types::{ + beacon::requests::ExecutionRequestsV4, + engine::{BlobsBundleV2, ExecutionPayloadV1, ExecutionPayloadV2, ExecutionPayloadV3}, + }; + let ep = &*s.execution_payload; + let bb = &*s.blobs_bundle; + let er = &*s.execution_requests; + let m = s.message; + SignedBidSubmissionV5 { + message: AlloyBidTrace { + slot: m.slot, + parent_hash: m.parent_hash, + block_hash: m.block_hash, + builder_pubkey: m.builder_pubkey, + proposer_pubkey: m.proposer_pubkey, + proposer_fee_recipient: m.proposer_fee_recipient, + gas_limit: m.gas_limit, + gas_used: m.gas_used, + value: m.value, + }, + execution_payload: ExecutionPayloadV3 { + payload_inner: ExecutionPayloadV2 { + payload_inner: ExecutionPayloadV1 { + parent_hash: ep.parent_hash, + fee_recipient: ep.fee_recipient, + state_root: ep.state_root, + receipts_root: ep.receipts_root, + logs_bloom: alloy_primitives::Bloom(ep.logs_bloom), + prev_randao: ep.prev_randao, + block_number: ep.block_number, + gas_limit: ep.gas_limit, + gas_used: ep.gas_used, + timestamp: ep.timestamp, + extra_data: ep.extra_data.0.clone(), + base_fee_per_gas: ep.base_fee_per_gas, + block_hash: ep.block_hash, + transactions: ep.transactions.iter().map(|t| t.0.clone()).collect(), + }, + withdrawals: ep + .withdrawals + .iter() + .map(|w| AlloyWithdrawal { + index: w.index, + validator_index: w.validator_index, + address: w.address, + amount: w.amount, + }) + .collect(), + }, + blob_gas_used: ep.blob_gas_used, + excess_blob_gas: ep.excess_blob_gas, + }, + blobs_bundle: BlobsBundleV2 { + commitments: bb.commitments.iter().cloned().collect(), + proofs: bb.proofs.clone(), + blobs: bb.blobs.iter().map(|b| (**b).clone()).collect(), + }, + execution_requests: ExecutionRequestsV4 { + deposits: er + .deposits + .iter() + .map(|d| AlloyDepositRequest { + pubkey: d.pubkey.serialize().into(), + withdrawal_credentials: d.withdrawal_credentials, + amount: d.amount, + signature: d.signature.serialize().into(), + index: d.index, + }) + .collect(), + withdrawals: er + .withdrawals + .iter() + .map(|w| AlloyWithdrawalRequest { + source_address: w.source_address, + validator_pubkey: w.validator_pubkey.serialize().into(), + amount: w.amount, + }) + .collect(), + consolidations: er + .consolidations + .iter() + .map(|c| AlloyConsolidationRequest { + source_address: c.source_address, + source_pubkey: c.source_pubkey.serialize().into(), + target_pubkey: c.target_pubkey.serialize().into(), + }) + .collect(), + }, + signature: s.signature, + } + } +} + impl<'de> Deserialize<'de> for SignedBidSubmission { fn deserialize(deserializer: D) -> Result where @@ -352,10 +614,6 @@ impl SignedBidSubmission { pub fn fork_name(&self) -> ForkName { ForkName::Fulu } - - pub fn maybe_upgrade_to_fulu(self, _current_fork: ForkName) -> SignedBidSubmission { - self - } } #[derive(Debug, Clone, Serialize, Deserialize, Encode, Decode)] @@ -502,33 +760,6 @@ mod tests { assert_eq!(s.fork_name(), ForkName::Fulu); } - #[test] - // from alloy - fn fulu_bid_submission_2() { - let data_json = include_str!("testdata/signed-bid-submission-fulu-2.json"); - let s = test_decode_json::(data_json); - - let blobs_empty = s.blobs_bundle().blobs.is_empty(); - - if blobs_empty { - // When blobs are empty, we can't distinguish variants reliably - let s = s.maybe_upgrade_to_fulu(ForkName::Fulu); - assert_eq!(s.fork_name(), ForkName::Fulu); - } else { - assert_eq!(s.fork_name(), ForkName::Fulu); - } - - let data_ssz = include_bytes!("testdata/signed-bid-submission-fulu.ssz"); - let mut s = test_encode_decode_ssz::(data_ssz); - if blobs_empty { - s = s.maybe_upgrade_to_fulu(ForkName::Fulu); - assert_eq!(s.fork_name(), ForkName::Fulu); - } else { - assert_eq!(s.fork_name(), ForkName::Fulu); - } - assert_eq!(data_ssz, s.as_ssz_bytes().as_slice()); - } - #[test] fn fulu_bid_submission_ssz() { let data_ssz = SignedBidSubmission::random_for_test(&mut rand::rng()).as_ssz_bytes(); diff --git a/crates/types/src/blobs.rs b/crates/types/src/blobs.rs index ef6c970d7..23fa5c221 100644 --- a/crates/types/src/blobs.rs +++ b/crates/types/src/blobs.rs @@ -67,8 +67,8 @@ impl ssz::Decode for BlobsBundle { let raw = BlobsBundleRaw::from_ssz_bytes(bytes)?; - if raw.proofs.len() == raw.blobs.len() * CELLS_PER_EXT_BLOB - && raw.commitments.len() == raw.blobs.len() + if raw.proofs.len() == raw.blobs.len() * CELLS_PER_EXT_BLOB && + raw.commitments.len() == raw.blobs.len() { Ok(Self { commitments: raw.commitments, proofs: raw.proofs, blobs: raw.blobs }) } else { @@ -113,8 +113,8 @@ impl BlobsBundle { &self, max_blobs_per_block: usize, ) -> Result<(), BlockValidationError> { - if self.commitments.len() != self.blobs.len() - || self.proofs.len() != self.blobs.len() * CELLS_PER_EXT_BLOB + if self.commitments.len() != self.blobs.len() || + self.proofs.len() != self.blobs.len() * CELLS_PER_EXT_BLOB { return Err(BlockValidationError::BlobsError(BlobsError::BundleMismatch { proofs: self.proofs.len(), diff --git a/crates/types/src/block_merging.rs b/crates/types/src/block_merging.rs index c698f850b..6d10e60d8 100644 --- a/crates/types/src/block_merging.rs +++ b/crates/types/src/block_merging.rs @@ -2,7 +2,7 @@ use std::{collections::HashMap, hash::Hash}; use alloy_primitives::{Address, B256, Bytes, U256}; use lh_test_random::TestRandom; -use lh_types::{ForkName, test_utils::TestRandom}; +use lh_types::test_utils::TestRandom; use rand::Rng; use serde::{Deserialize, Serialize}; use smallvec::SmallVec; @@ -19,15 +19,6 @@ pub struct SignedBidSubmissionWithMergingData { pub merging_data: BlockMergingData, } -impl SignedBidSubmissionWithMergingData { - pub fn maybe_upgrade_to_fulu(self, fork: ForkName) -> Self { - Self { - submission: self.submission.maybe_upgrade_to_fulu(fork), - merging_data: self.merging_data, - } - } -} - #[derive(Debug, Clone, Serialize, Deserialize, Encode, Decode, PartialEq, Eq)] #[ssz(enum_behaviour = "union")] #[serde(untagged)] @@ -79,8 +70,8 @@ pub struct InvalidTxIndex; impl BundleOrder { pub fn validate(&self) -> Result<(), InvalidTxIndex> { - if self.reverting_txs.iter().any(|&i| i >= self.txs.len()) - || self.dropping_txs.iter().any(|&i| i >= self.txs.len()) + if self.reverting_txs.iter().any(|&i| i >= self.txs.len()) || + self.dropping_txs.iter().any(|&i| i >= self.txs.len()) { return Err(InvalidTxIndex); } diff --git a/crates/types/src/execution_payload.rs b/crates/types/src/execution_payload.rs index c325e234b..ea2fab125 100644 --- a/crates/types/src/execution_payload.rs +++ b/crates/types/src/execution_payload.rs @@ -161,13 +161,13 @@ impl ForkVersionDecode for ExecutionPayload { /// SSZ decode with explicit fork variant. fn from_ssz_bytes_by_fork(bytes: &[u8], fork_name: ForkName) -> Result { let builder_bid = match fork_name { - ForkName::Altair - | ForkName::Base - | ForkName::Bellatrix - | ForkName::Capella - | ForkName::Deneb - | ForkName::Electra - | ForkName::Gloas => { + ForkName::Altair | + ForkName::Base | + ForkName::Bellatrix | + ForkName::Capella | + ForkName::Deneb | + ForkName::Electra | + ForkName::Gloas => { return Err(ssz::DecodeError::BytesInvalid(format!( "unsupported fork for ExecutionPayloadHeader: {fork_name}", ))); diff --git a/crates/types/src/hydration.rs b/crates/types/src/hydration.rs index 229a6beef..310d3e13b 100644 --- a/crates/types/src/hydration.rs +++ b/crates/types/src/hydration.rs @@ -27,13 +27,13 @@ pub enum DehydratedBidSubmission { impl ForkVersionDecode for DehydratedBidSubmission { fn from_ssz_bytes_by_fork(bytes: &[u8], fork: ForkName) -> Result { match fork { - ForkName::Base - | ForkName::Altair - | ForkName::Bellatrix - | ForkName::Capella - | ForkName::Deneb - | ForkName::Electra - | ForkName::Gloas => Err(DecodeError::NoMatchingVariant), + ForkName::Base | + ForkName::Altair | + ForkName::Bellatrix | + ForkName::Capella | + ForkName::Deneb | + ForkName::Electra | + ForkName::Gloas => Err(DecodeError::NoMatchingVariant), ForkName::Fulu => DehydratedBidSubmissionFulu::from_ssz_bytes(bytes) .map(DehydratedBidSubmission::Fulu), } @@ -136,13 +136,13 @@ impl DehydratedBidSubmissionFuluWithAdjustments { impl ForkVersionDecode for DehydratedBidSubmissionFuluWithAdjustments { fn from_ssz_bytes_by_fork(bytes: &[u8], fork: ForkName) -> Result { match fork { - ForkName::Base - | ForkName::Altair - | ForkName::Bellatrix - | ForkName::Capella - | ForkName::Deneb - | ForkName::Gloas - | ForkName::Electra => Err(DecodeError::NoMatchingVariant), + ForkName::Base | + ForkName::Altair | + ForkName::Bellatrix | + ForkName::Capella | + ForkName::Deneb | + ForkName::Gloas | + ForkName::Electra => Err(DecodeError::NoMatchingVariant), ForkName::Fulu => DehydratedBidSubmissionFuluWithAdjustments::from_ssz_bytes(bytes), } }