From 57c2e8bb53a7dfeff67d8a0ac90c02b209a19bb1 Mon Sep 17 00:00:00 2001 From: Daniel Sharifi Date: Fri, 27 Feb 2026 16:20:27 +0100 Subject: [PATCH 01/82] . --- Cargo.lock | 20 + Cargo.toml | 1 + crates/tee-launcher/Cargo.toml | 33 + crates/tee-launcher/src/main.rs | 1553 +++++++++++++++++++++++++++++++ 4 files changed, 1607 insertions(+) create mode 100644 crates/tee-launcher/Cargo.toml create mode 100644 crates/tee-launcher/src/main.rs diff --git a/Cargo.lock b/Cargo.lock index 616a70cc6..9bcefd9d5 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -10538,6 +10538,26 @@ dependencies = [ "tracing", ] +[[package]] +name = "tee-launcher" +version = "3.5.1" +dependencies = [ + "assert_matches", + "dstack-sdk", + "hex", + "mpc-primitives", + "regex", + "reqwest 0.12.28", + "rstest", + "serde", + "serde_json", + "tempfile", + "thiserror 2.0.18", + "tokio", + "tracing", + "tracing-subscriber", +] + [[package]] name = "tempfile" version = "3.25.0" diff --git a/Cargo.toml b/Cargo.toml index d70890a96..57f1ae560 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -19,6 +19,7 @@ members = [ "crates/node-types", "crates/primitives", "crates/tee-authority", + "crates/tee-launcher", "crates/test-migration-contract", "crates/test-parallel-contract", "crates/test-utils", diff --git a/crates/tee-launcher/Cargo.toml b/crates/tee-launcher/Cargo.toml new file mode 100644 index 000000000..78b4334de --- /dev/null +++ b/crates/tee-launcher/Cargo.toml @@ -0,0 +1,33 @@ +[package] +name = "tee-launcher" +version = { workspace = true } +license = { workspace = true } +edition = { workspace = true } + +[[bin]] +name = "tee-launcher" +path = "src/main.rs" + +[features] +integration-test = [] + +[dependencies] +dstack-sdk = { workspace = true } +hex = { workspace = true } +mpc-primitives = { path = "../primitives" } +regex = { workspace = true } +reqwest = { workspace = true } +serde = { workspace = true } +serde_json = { workspace = true } +thiserror = { workspace = true } +tokio = { workspace = true } +tracing = { workspace = true } +tracing-subscriber = { workspace = true } + +[dev-dependencies] +assert_matches = { workspace = true } +rstest = { workspace = true } +tempfile = { workspace = true } + +[lints] +workspace = true diff --git a/crates/tee-launcher/src/main.rs b/crates/tee-launcher/src/main.rs new file mode 100644 index 000000000..2ad98bbbf --- /dev/null +++ b/crates/tee-launcher/src/main.rs @@ -0,0 +1,1553 @@ +use std::collections::{BTreeMap, HashSet, VecDeque}; +use std::process::Command; +use std::sync::LazyLock; + +use regex::Regex; +use serde::Deserialize; +use thiserror::Error; + +// Reuse the workspace hash type for type-safe image hash handling. +use mpc_primitives::hash::MpcDockerImageHash; + +// --------------------------------------------------------------------------- +// Error +// --------------------------------------------------------------------------- + +#[derive(Error, Debug)] +pub enum LauncherError { + #[error("PLATFORM must be set to one of [TEE, NONTEE], got: {0}")] + InvalidPlatform(String), + + #[error("DOCKER_CONTENT_TRUST must be set to 1")] + DockerContentTrustNotEnabled, + + #[error("PLATFORM=TEE requires dstack unix socket at {0}")] + DstackSocketMissing(String), + + #[error("GetQuote failed before extending RTMR3: {0}")] + DstackGetQuoteFailed(String), + + #[error("EmitEvent failed while extending RTMR3: {0}")] + DstackEmitEventFailed(String), + + #[error("DEFAULT_IMAGE_DIGEST invalid: {0}")] + InvalidDefaultDigest(String), + + #[error("Invalid JSON in {path}: approved_hashes missing or empty")] + InvalidApprovedHashes { path: String }, + + #[error("MPC_HASH_OVERRIDE invalid: {0}")] + InvalidHashOverride(String), + + #[error("Image hash not found among tags")] + ImageHashNotFoundAmongTags, + + #[error("Failed to get auth token from registry: {0}")] + RegistryAuthFailed(String), + + #[error("Failed to get successful response from {url} after {attempts} attempts")] + RegistryRequestFailed { url: String, attempts: u32 }, + + #[error("docker pull failed for {0}")] + DockerPullFailed(String), + + #[error("docker inspect failed for {0}")] + DockerInspectFailed(String), + + #[error("Digest mismatch: pulled {pulled} != expected {expected}")] + DigestMismatch { pulled: String, expected: String }, + + #[error("MPC image hash validation failed: {0}")] + ImageValidationFailed(String), + + #[error("docker run failed for validated hash={0}")] + DockerRunFailed(String), + + #[error("Too many env vars to pass through (>{0})")] + TooManyEnvVars(usize), + + #[error("Total env payload too large (>{0} bytes)")] + EnvPayloadTooLarge(usize), + + #[error("Unsafe docker command: LD_PRELOAD detected")] + LdPreloadDetected, + + #[error("Failed to read {path}: {source}")] + FileRead { + path: String, + source: std::io::Error, + }, + + #[error("Failed to parse {path}: {source}")] + JsonParse { + path: String, + source: serde_json::Error, + }, + + #[error("Required environment variable not set: {0}")] + MissingEnvVar(String), + + #[error("HTTP error: {0}")] + Http(#[from] reqwest::Error), + + #[error("Registry response parse error: {0}")] + RegistryResponseParse(String), +} + +type Result = std::result::Result; + +// --------------------------------------------------------------------------- +// Constants — matching Python launcher exactly +// --------------------------------------------------------------------------- + +const MPC_CONTAINER_NAME: &str = "mpc-node"; +const IMAGE_DIGEST_FILE: &str = "/mnt/shared/image-digest.bin"; +const DSTACK_UNIX_SOCKET: &str = "/var/run/dstack.sock"; +const DSTACK_USER_CONFIG_FILE: &str = "/tapp/user_config"; + +const SHA256_PREFIX: &str = "sha256:"; + +// Docker Hub defaults +const DEFAULT_RPC_REQUEST_TIMEOUT_SECS: f64 = 10.0; +const DEFAULT_RPC_REQUEST_INTERVAL_SECS: f64 = 1.0; +const DEFAULT_RPC_MAX_ATTEMPTS: u32 = 20; + +const DEFAULT_MPC_IMAGE_NAME: &str = "nearone/mpc-node"; +const DEFAULT_MPC_REGISTRY: &str = "registry.hub.docker.com"; +const DEFAULT_MPC_IMAGE_TAG: &str = "latest"; + +// Env var names +const ENV_VAR_PLATFORM: &str = "PLATFORM"; +const ENV_VAR_DEFAULT_IMAGE_DIGEST: &str = "DEFAULT_IMAGE_DIGEST"; +const ENV_VAR_DOCKER_CONTENT_TRUST: &str = "DOCKER_CONTENT_TRUST"; +const ENV_VAR_MPC_HASH_OVERRIDE: &str = "MPC_HASH_OVERRIDE"; +const ENV_VAR_RPC_REQUEST_TIMEOUT_SECS: &str = "RPC_REQUEST_TIMEOUT_SECS"; +const ENV_VAR_RPC_REQUEST_INTERVAL_SECS: &str = "RPC_REQUEST_INTERVAL_SECS"; +const ENV_VAR_RPC_MAX_ATTEMPTS: &str = "RPC_MAX_ATTEMPTS"; + +const DSTACK_USER_CONFIG_MPC_IMAGE_TAGS: &str = "MPC_IMAGE_TAGS"; +const DSTACK_USER_CONFIG_MPC_IMAGE_NAME: &str = "MPC_IMAGE_NAME"; +const DSTACK_USER_CONFIG_MPC_IMAGE_REGISTRY: &str = "MPC_REGISTRY"; + +// Security limits +const MAX_PASSTHROUGH_ENV_VARS: usize = 64; +const MAX_ENV_VALUE_LEN: usize = 1024; +const MAX_TOTAL_ENV_BYTES: usize = 32 * 1024; + +// Regex patterns (compiled once) +static SHA256_REGEX: LazyLock = + LazyLock::new(|| Regex::new(r"^sha256:[0-9a-f]{64}$").unwrap()); +static MPC_ENV_KEY_RE: LazyLock = + LazyLock::new(|| Regex::new(r"^MPC_[A-Z0-9_]{1,64}$").unwrap()); +static HOST_ENTRY_RE: LazyLock = + LazyLock::new(|| Regex::new(r"^[a-zA-Z0-9\-\.]+:\d{1,3}(\.\d{1,3}){3}$").unwrap()); +static PORT_MAPPING_RE: LazyLock = + LazyLock::new(|| Regex::new(r"^(\d{1,5}):(\d{1,5})$").unwrap()); +static INVALID_HOST_ENTRY_PATTERN: LazyLock = + LazyLock::new(|| Regex::new(r"^[;&|`$\\<>\-]|^--").unwrap()); + +// Denied env keys — never pass these to the container +static DENIED_CONTAINER_ENV_KEYS: LazyLock> = + LazyLock::new(|| HashSet::from(["MPC_P2P_PRIVATE_KEY", "MPC_ACCOUNT_SK"])); + +// Allowed non-MPC env vars (backward compatibility) +static ALLOWED_MPC_ENV_VARS: LazyLock> = LazyLock::new(|| { + HashSet::from([ + "MPC_ACCOUNT_ID", + "MPC_LOCAL_ADDRESS", + "MPC_SECRET_STORE_KEY", + "MPC_CONTRACT_ID", + "MPC_ENV", + "MPC_HOME_DIR", + "NEAR_BOOT_NODES", + "RUST_BACKTRACE", + "RUST_LOG", + "MPC_RESPONDER_ID", + "MPC_BACKUP_ENCRYPTION_KEY_HEX", + ]) +}); + +// Launcher-only env vars — read from user config but never forwarded to container +static ALLOWED_LAUNCHER_ENV_VARS: LazyLock> = LazyLock::new(|| { + HashSet::from([ + DSTACK_USER_CONFIG_MPC_IMAGE_TAGS, + DSTACK_USER_CONFIG_MPC_IMAGE_NAME, + DSTACK_USER_CONFIG_MPC_IMAGE_REGISTRY, + ENV_VAR_MPC_HASH_OVERRIDE, + ENV_VAR_RPC_REQUEST_TIMEOUT_SECS, + ENV_VAR_RPC_REQUEST_INTERVAL_SECS, + ENV_VAR_RPC_MAX_ATTEMPTS, + ]) +}); + +// --------------------------------------------------------------------------- +// Types +// --------------------------------------------------------------------------- + +#[derive(Debug, Clone, Copy, PartialEq, Eq)] +pub enum Platform { + Tee, + NonTee, +} + +#[derive(Debug, Clone)] +pub struct RpcTimingConfig { + pub request_timeout_secs: f64, + pub request_interval_secs: f64, + pub max_attempts: u32, +} + +#[derive(Debug, Clone)] +pub struct ImageSpec { + pub tags: Vec, + pub image_name: String, + pub registry: String, +} + +#[derive(Debug, Clone)] +pub struct ResolvedImage { + pub spec: ImageSpec, + pub digest: String, +} + +/// JSON structure for the approved hashes file written by the MPC node. +/// Must stay aligned with `crates/node/src/tee/allowed_image_hashes_watcher.rs`. +#[derive(Debug, Deserialize)] +struct ApprovedHashesFile { + approved_hashes: Vec, +} + +// --------------------------------------------------------------------------- +// Validation functions — security policy for env passthrough +// --------------------------------------------------------------------------- + +fn has_control_chars(s: &str) -> bool { + for ch in s.chars() { + if ch == '\n' || ch == '\r' || ch == '\0' { + return true; + } + if (ch as u32) < 0x20 && ch != '\t' { + return true; + } + } + false +} + +fn is_safe_env_value(value: &str) -> bool { + if value.len() > MAX_ENV_VALUE_LEN { + return false; + } + if has_control_chars(value) { + return false; + } + if value.contains("LD_PRELOAD") { + return false; + } + true +} + +fn is_valid_ip(ip: &str) -> bool { + ip.parse::().is_ok() +} + +fn is_valid_host_entry(entry: &str) -> bool { + if !HOST_ENTRY_RE.is_match(entry) { + return false; + } + if let Some((_host, ip)) = entry.rsplit_once(':') { + is_valid_ip(ip) + } else { + false + } +} + +fn is_valid_port_mapping(entry: &str) -> bool { + if let Some(caps) = PORT_MAPPING_RE.captures(entry) { + let host_port: u32 = caps[1].parse().unwrap_or(0); + let container_port: u32 = caps[2].parse().unwrap_or(0); + host_port > 0 && host_port <= 65535 && container_port > 0 && container_port <= 65535 + } else { + false + } +} + +fn is_safe_host_entry(entry: &str) -> bool { + if INVALID_HOST_ENTRY_PATTERN.is_match(entry) { + return false; + } + if entry.contains("LD_PRELOAD") { + return false; + } + true +} + +fn is_safe_port_mapping(mapping: &str) -> bool { + !INVALID_HOST_ENTRY_PATTERN.is_match(mapping) +} + +fn is_allowed_container_env_key(key: &str) -> bool { + if DENIED_CONTAINER_ENV_KEYS.contains(key) { + return false; + } + if MPC_ENV_KEY_RE.is_match(key) { + return true; + } + if ALLOWED_MPC_ENV_VARS.contains(key) { + return true; + } + false +} + +// --------------------------------------------------------------------------- +// Config parsing +// --------------------------------------------------------------------------- + +fn parse_env_lines(lines: &[&str]) -> BTreeMap { + let mut env = BTreeMap::new(); + for line in lines { + let line = line.trim(); + if line.is_empty() || line.starts_with('#') || !line.contains('=') { + continue; + } + if let Some((key, value)) = line.split_once('=') { + let key = key.trim(); + let value = value.trim(); + if key.is_empty() { + continue; + } + env.insert(key.to_string(), value.to_string()); + } + } + env +} + +fn parse_env_file(path: &str) -> Result> { + let content = std::fs::read_to_string(path).map_err(|source| LauncherError::FileRead { + path: path.to_string(), + source, + })?; + let lines: Vec<&str> = content.lines().collect(); + Ok(parse_env_lines(&lines)) +} + +fn parse_platform() -> Result { + let raw = std::env::var(ENV_VAR_PLATFORM).map_err(|_| { + LauncherError::InvalidPlatform(format!( + "{ENV_VAR_PLATFORM} must be set to one of [TEE, NONTEE]" + )) + })?; + let val = raw.trim(); + match val { + "TEE" => Ok(Platform::Tee), + "NONTEE" => Ok(Platform::NonTee), + other => Err(LauncherError::InvalidPlatform(other.to_string())), + } +} + +fn load_rpc_timing_config(dstack_config: &BTreeMap) -> RpcTimingConfig { + let timeout_secs = dstack_config + .get(ENV_VAR_RPC_REQUEST_TIMEOUT_SECS) + .and_then(|v| v.parse().ok()) + .unwrap_or(DEFAULT_RPC_REQUEST_TIMEOUT_SECS); + let interval_secs = dstack_config + .get(ENV_VAR_RPC_REQUEST_INTERVAL_SECS) + .and_then(|v| v.parse().ok()) + .unwrap_or(DEFAULT_RPC_REQUEST_INTERVAL_SECS); + let max_attempts = dstack_config + .get(ENV_VAR_RPC_MAX_ATTEMPTS) + .and_then(|v| v.parse().ok()) + .unwrap_or(DEFAULT_RPC_MAX_ATTEMPTS); + RpcTimingConfig { + request_timeout_secs: timeout_secs, + request_interval_secs: interval_secs, + max_attempts, + } +} + +fn get_image_spec(dstack_config: &BTreeMap) -> ImageSpec { + let tags_raw = dstack_config + .get(DSTACK_USER_CONFIG_MPC_IMAGE_TAGS) + .cloned() + .unwrap_or_else(|| DEFAULT_MPC_IMAGE_TAG.to_string()); + let tags: Vec = tags_raw + .split(',') + .map(|t| t.trim().to_string()) + .filter(|t| !t.is_empty()) + .collect(); + tracing::info!("Using tags {tags:?} to find matching MPC node docker image."); + + let image_name = dstack_config + .get(DSTACK_USER_CONFIG_MPC_IMAGE_NAME) + .cloned() + .unwrap_or_else(|| DEFAULT_MPC_IMAGE_NAME.to_string()); + tracing::info!("Using image name {image_name}."); + + let registry = dstack_config + .get(DSTACK_USER_CONFIG_MPC_IMAGE_REGISTRY) + .cloned() + .unwrap_or_else(|| DEFAULT_MPC_REGISTRY.to_string()); + tracing::info!("Using registry {registry}."); + + ImageSpec { + tags, + image_name, + registry, + } +} + +// --------------------------------------------------------------------------- +// Hash selection +// --------------------------------------------------------------------------- + +fn is_valid_sha256_digest(digest: &str) -> bool { + SHA256_REGEX.is_match(digest) +} + +fn get_bare_digest(full_digest: &str) -> Result { + full_digest + .strip_prefix(SHA256_PREFIX) + .map(|s| s.to_string()) + .ok_or_else(|| { + LauncherError::InvalidDefaultDigest(format!( + "Invalid digest (missing sha256: prefix): {full_digest}" + )) + }) +} + +fn load_and_select_hash(dstack_config: &BTreeMap) -> Result { + let approved_hashes = if std::path::Path::new(IMAGE_DIGEST_FILE).is_file() { + let content = + std::fs::read_to_string(IMAGE_DIGEST_FILE).map_err(|source| LauncherError::FileRead { + path: IMAGE_DIGEST_FILE.to_string(), + source, + })?; + let data: ApprovedHashesFile = + serde_json::from_str(&content).map_err(|source| LauncherError::JsonParse { + path: IMAGE_DIGEST_FILE.to_string(), + source, + })?; + if data.approved_hashes.is_empty() { + return Err(LauncherError::InvalidApprovedHashes { + path: IMAGE_DIGEST_FILE.to_string(), + }); + } + data.approved_hashes + } else { + let fallback = std::env::var(ENV_VAR_DEFAULT_IMAGE_DIGEST) + .map_err(|_| LauncherError::MissingEnvVar(ENV_VAR_DEFAULT_IMAGE_DIGEST.to_string()))?; + let fallback = fallback.trim().to_string(); + let fallback = if fallback.starts_with(SHA256_PREFIX) { + fallback + } else { + format!("{SHA256_PREFIX}{fallback}") + }; + if !is_valid_sha256_digest(&fallback) { + return Err(LauncherError::InvalidDefaultDigest(fallback)); + } + tracing::info!( + "{IMAGE_DIGEST_FILE} missing → fallback to DEFAULT_IMAGE_DIGEST={fallback}" + ); + vec![fallback] + }; + + tracing::info!("Approved MPC image hashes (newest → oldest):"); + for h in &approved_hashes { + tracing::info!(" - {h}"); + } + + // Optional override + if let Some(override_hash) = dstack_config.get(ENV_VAR_MPC_HASH_OVERRIDE) { + if !is_valid_sha256_digest(override_hash) { + return Err(LauncherError::InvalidHashOverride(override_hash.clone())); + } + if !approved_hashes.contains(override_hash) { + tracing::error!( + "MPC_HASH_OVERRIDE={override_hash} does NOT match any approved hash!" + ); + return Err(LauncherError::InvalidHashOverride(override_hash.clone())); + } + tracing::info!("MPC_HASH_OVERRIDE provided → selecting: {override_hash}"); + return Ok(override_hash.clone()); + } + + // No override → select newest (first in list) + let selected = approved_hashes[0].clone(); + tracing::info!("Selected MPC hash (newest allowed): {selected}"); + Ok(selected) +} + +// --------------------------------------------------------------------------- +// Docker registry communication +// --------------------------------------------------------------------------- + +async fn request_until_success( + client: &reqwest::Client, + url: &str, + headers: &[(String, String)], + timing: &RpcTimingConfig, +) -> Result { + let mut interval = timing.request_interval_secs; + + for attempt in 1..=timing.max_attempts { + // Sleep before request (matching Python behavior) + tokio::time::sleep(std::time::Duration::from_secs_f64(interval)).await; + interval = (interval.max(1.0) * 1.5).min(60.0); + + let mut req = client.get(url); + for (k, v) in headers { + req = req.header(k.as_str(), v.as_str()); + } + + match req + .timeout(std::time::Duration::from_secs_f64(timing.request_timeout_secs)) + .send() + .await + { + Err(e) => { + tracing::warn!( + "Attempt {attempt}/{}: Failed to fetch {url}. Status: Timeout/Error: {e}", + timing.max_attempts + ); + continue; + } + Ok(resp) if resp.status() != reqwest::StatusCode::OK => { + tracing::warn!( + "Attempt {attempt}/{}: Failed to fetch {url}. Status: {}", + timing.max_attempts, + resp.status() + ); + continue; + } + Ok(resp) => return Ok(resp), + } + } + + Err(LauncherError::RegistryRequestFailed { + url: url.to_string(), + attempts: timing.max_attempts, + }) +} + +async fn get_manifest_digest( + image: &ResolvedImage, + timing: &RpcTimingConfig, +) -> Result { + if image.spec.tags.is_empty() { + return Err(LauncherError::ImageHashNotFoundAmongTags); + } + + // Get auth token + let token_url = format!( + "https://auth.docker.io/token?service=registry.docker.io&scope=repository:{}:pull", + image.spec.image_name + ); + let client = reqwest::Client::new(); + let token_resp = client + .get(&token_url) + .send() + .await + .map_err(|e| LauncherError::RegistryAuthFailed(e.to_string()))?; + if token_resp.status() != reqwest::StatusCode::OK { + return Err(LauncherError::RegistryAuthFailed(format!( + "status: {}", + token_resp.status() + ))); + } + let token_json: serde_json::Value = token_resp + .json() + .await + .map_err(|e| LauncherError::RegistryAuthFailed(e.to_string()))?; + let token = token_json["token"] + .as_str() + .ok_or_else(|| LauncherError::RegistryAuthFailed("no token in response".to_string()))? + .to_string(); + + let mut tags: VecDeque = image.spec.tags.iter().cloned().collect(); + + while let Some(tag) = tags.pop_front() { + let manifest_url = format!( + "https://{}/v2/{}/manifests/{tag}", + image.spec.registry, image.spec.image_name + ); + let headers = vec![ + ( + "Accept".to_string(), + "application/vnd.docker.distribution.manifest.v2+json".to_string(), + ), + ("Authorization".to_string(), format!("Bearer {token}")), + ]; + + match request_until_success(&client, &manifest_url, &headers, timing).await { + Ok(resp) => { + let content_digest = resp + .headers() + .get("Docker-Content-Digest") + .and_then(|v| v.to_str().ok()) + .map(|s| s.to_string()); + + let manifest: serde_json::Value = + resp.json().await.map_err(|e| { + LauncherError::RegistryResponseParse(e.to_string()) + })?; + + let media_type = manifest["mediaType"].as_str().unwrap_or(""); + match media_type { + "application/vnd.oci.image.index.v1+json" => { + // Multi-platform manifest; scan for amd64/linux + if let Some(manifests) = manifest["manifests"].as_array() { + for m in manifests { + let arch = m["platform"]["architecture"].as_str().unwrap_or(""); + let os = m["platform"]["os"].as_str().unwrap_or(""); + if arch == "amd64" && os == "linux" { + if let Some(digest) = m["digest"].as_str() { + tags.push_back(digest.to_string()); + } + } + } + } + } + "application/vnd.docker.distribution.manifest.v2+json" + | "application/vnd.oci.image.manifest.v1+json" => { + let config_digest = + manifest["config"]["digest"].as_str().unwrap_or(""); + if config_digest == image.digest { + if let Some(digest) = content_digest { + return Ok(digest); + } + } + } + _ => {} + } + } + Err(e) => { + tracing::warn!( + "{e}: Exceeded number of maximum RPC requests for any given attempt. \ + Will continue in the hopes of finding the matching image hash among remaining tags" + ); + } + } + } + + Err(LauncherError::ImageHashNotFoundAmongTags) +} + +async fn validate_image_hash( + image_digest: &str, + dstack_config: &BTreeMap, + timing: &RpcTimingConfig, +) -> Result { + tracing::info!("Validating MPC hash: {image_digest}"); + + let image_spec = get_image_spec(dstack_config); + let docker_image = ResolvedImage { + spec: image_spec, + digest: image_digest.to_string(), + }; + + let manifest_digest = get_manifest_digest(&docker_image, timing).await?; + let name_and_digest = format!("{}@{manifest_digest}", docker_image.spec.image_name); + + // Pull + let pull = Command::new("docker") + .args(["pull", &name_and_digest]) + .output() + .map_err(|e| LauncherError::DockerPullFailed(e.to_string()))?; + if !pull.status.success() { + tracing::error!("docker pull failed for {image_digest}"); + return Ok(false); + } + + // Verify digest + let inspect = Command::new("docker") + .args(["image", "inspect", "--format", "{{index .ID}}", &name_and_digest]) + .output() + .map_err(|e| LauncherError::DockerInspectFailed(e.to_string()))?; + if !inspect.status.success() { + tracing::error!("docker inspect failed for {image_digest}"); + return Ok(false); + } + + let pulled_digest = String::from_utf8_lossy(&inspect.stdout).trim().to_string(); + if pulled_digest != image_digest { + tracing::error!("digest mismatch: {pulled_digest} != {image_digest}"); + return Ok(false); + } + + tracing::info!("MPC hash {image_digest} validated successfully."); + Ok(true) +} + +// --------------------------------------------------------------------------- +// Docker command builder +// --------------------------------------------------------------------------- + +fn remove_existing_container() { + let output = Command::new("docker") + .args(["ps", "-a", "--format", "{{.Names}}"]) + .output(); + + match output { + Ok(out) => { + let names = String::from_utf8_lossy(&out.stdout); + if names.lines().any(|n| n == MPC_CONTAINER_NAME) { + tracing::info!("Removing existing container: {MPC_CONTAINER_NAME}"); + let _ = Command::new("docker") + .args(["rm", "-f", MPC_CONTAINER_NAME]) + .output(); + } + } + Err(e) => { + tracing::warn!("Failed to check/remove container {MPC_CONTAINER_NAME}: {e}"); + } + } +} + +fn build_docker_cmd( + platform: Platform, + user_env: &BTreeMap, + image_digest: &str, +) -> Result> { + let bare_digest = get_bare_digest(image_digest)?; + + let mut cmd: Vec = vec!["docker".into(), "run".into()]; + + // Required environment variables + cmd.extend(["--env".into(), format!("MPC_IMAGE_HASH={bare_digest}")]); + cmd.extend([ + "--env".into(), + format!("MPC_LATEST_ALLOWED_HASH_FILE={IMAGE_DIGEST_FILE}"), + ]); + + if platform == Platform::Tee { + cmd.extend([ + "--env".into(), + format!("DSTACK_ENDPOINT={DSTACK_UNIX_SOCKET}"), + ]); + cmd.extend([ + "-v".into(), + format!("{DSTACK_UNIX_SOCKET}:{DSTACK_UNIX_SOCKET}"), + ]); + } + + // Track env passthrough size/caps + let mut passed_env_count: usize = 0; + let mut total_env_bytes: usize = 0; + + // BTreeMap iteration is already sorted by key (deterministic) + for (key, value) in user_env { + if ALLOWED_LAUNCHER_ENV_VARS.contains(key.as_str()) { + continue; + } + + if key == "EXTRA_HOSTS" { + for host_entry in value.split(',') { + let clean = host_entry.trim(); + if is_safe_host_entry(clean) && is_valid_host_entry(clean) { + cmd.extend(["--add-host".into(), clean.to_string()]); + } else { + tracing::warn!("Ignoring invalid or unsafe EXTRA_HOSTS entry: {clean}"); + } + } + continue; + } + + if key == "PORTS" { + for port_pair in value.split(',') { + let clean = port_pair.trim(); + if is_safe_port_mapping(clean) && is_valid_port_mapping(clean) { + cmd.extend(["-p".into(), clean.to_string()]); + } else { + tracing::warn!("Ignoring invalid or unsafe PORTS entry: {clean}"); + } + } + continue; + } + + if !is_allowed_container_env_key(key) { + tracing::warn!("Ignoring unknown or unapproved env var: {key}"); + continue; + } + + if !is_safe_env_value(value) { + tracing::warn!("Ignoring env var with unsafe value: {key}"); + continue; + } + + passed_env_count += 1; + if passed_env_count > MAX_PASSTHROUGH_ENV_VARS { + return Err(LauncherError::TooManyEnvVars(MAX_PASSTHROUGH_ENV_VARS)); + } + + total_env_bytes += key.len() + 1 + value.len(); + if total_env_bytes > MAX_TOTAL_ENV_BYTES { + return Err(LauncherError::EnvPayloadTooLarge(MAX_TOTAL_ENV_BYTES)); + } + + cmd.extend(["--env".into(), format!("{key}={value}")]); + } + + // Container run configuration + cmd.extend([ + "--security-opt".into(), + "no-new-privileges:true".into(), + "-v".into(), + "/tapp:/tapp:ro".into(), + "-v".into(), + "shared-volume:/mnt/shared".into(), + "-v".into(), + "mpc-data:/data".into(), + "--name".into(), + MPC_CONTAINER_NAME.into(), + "--detach".into(), + image_digest.to_string(), + ]); + + tracing::info!("docker cmd {}", cmd.join(" ")); + + // Final LD_PRELOAD safeguard + let cmd_str = cmd.join(" "); + if cmd_str.contains("LD_PRELOAD") { + return Err(LauncherError::LdPreloadDetected); + } + + Ok(cmd) +} + +fn launch_mpc_container( + platform: Platform, + valid_hash: &str, + user_env: &BTreeMap, +) -> Result<()> { + tracing::info!("Launching MPC node with validated hash: {valid_hash}"); + + remove_existing_container(); + let docker_cmd = build_docker_cmd(platform, user_env, valid_hash)?; + + let status = Command::new(&docker_cmd[0]) + .args(&docker_cmd[1..]) + .status() + .map_err(|e| LauncherError::DockerRunFailed(e.to_string()))?; + + if !status.success() { + return Err(LauncherError::DockerRunFailed(format!( + "validated hash={valid_hash}" + ))); + } + + tracing::info!("MPC launched successfully."); + Ok(()) +} + +// --------------------------------------------------------------------------- +// Dstack TEE communication (via dstack-sdk, no curl) +// --------------------------------------------------------------------------- + +fn is_unix_socket(path: &str) -> bool { + use std::os::unix::fs::FileTypeExt; + match std::fs::metadata(path) { + Ok(meta) => meta.file_type().is_socket(), + Err(_) => false, + } +} + +async fn extend_rtmr3(platform: Platform, valid_hash: &str) -> Result<()> { + if platform == Platform::NonTee { + tracing::info!("PLATFORM=NONTEE → skipping RTMR3 extension step."); + return Ok(()); + } + + if !is_unix_socket(DSTACK_UNIX_SOCKET) { + return Err(LauncherError::DstackSocketMissing( + DSTACK_UNIX_SOCKET.to_string(), + )); + } + + let bare = get_bare_digest(valid_hash)?; + tracing::info!("Extending RTMR3 with validated hash: {bare}"); + + let client = + dstack_sdk::dstack_client::DstackClient::new(Some(DSTACK_UNIX_SOCKET)); + + // GetQuote first + client + .get_quote(vec![]) + .await + .map_err(|e| LauncherError::DstackGetQuoteFailed(e.to_string()))?; + + // EmitEvent with the image digest + client + .emit_event("mpc-image-digest".to_string(), bare.into_bytes()) + .await + .map_err(|e| LauncherError::DstackEmitEventFailed(e.to_string()))?; + + Ok(()) +} + +// --------------------------------------------------------------------------- +// Main orchestration +// --------------------------------------------------------------------------- + +async fn run() -> Result<()> { + tracing::info!("start"); + + let platform = parse_platform()?; + tracing::info!("Launcher platform: {}", match platform { + Platform::Tee => "TEE", + Platform::NonTee => "NONTEE", + }); + + if platform == Platform::Tee && !is_unix_socket(DSTACK_UNIX_SOCKET) { + return Err(LauncherError::DstackSocketMissing( + DSTACK_UNIX_SOCKET.to_string(), + )); + } + + // DOCKER_CONTENT_TRUST must be enabled + let dct = std::env::var(ENV_VAR_DOCKER_CONTENT_TRUST).unwrap_or_default(); + if dct != "1" { + return Err(LauncherError::DockerContentTrustNotEnabled); + } + + // Load dstack user config + let dstack_config: BTreeMap = + if std::path::Path::new(DSTACK_USER_CONFIG_FILE).is_file() { + parse_env_file(DSTACK_USER_CONFIG_FILE)? + } else { + BTreeMap::new() + }; + + let rpc_cfg = load_rpc_timing_config(&dstack_config); + + let selected_hash = load_and_select_hash(&dstack_config)?; + + if !validate_image_hash(&selected_hash, &dstack_config, &rpc_cfg).await? { + return Err(LauncherError::ImageValidationFailed(selected_hash)); + } + + tracing::info!("MPC image hash validated successfully: {selected_hash}"); + + extend_rtmr3(platform, &selected_hash).await?; + + launch_mpc_container(platform, &selected_hash, &dstack_config)?; + + Ok(()) +} + +#[tokio::main] +async fn main() { + tracing_subscriber::fmt() + .with_env_filter( + tracing_subscriber::EnvFilter::from_default_env() + .add_directive(tracing::Level::INFO.into()), + ) + .init(); + + if let Err(e) = run().await { + tracing::error!("Error: {e}"); + std::process::exit(1); + } +} + +// --------------------------------------------------------------------------- +// Tests +// --------------------------------------------------------------------------- + +#[cfg(test)] +mod tests { + use super::*; + use assert_matches::assert_matches; + use rstest::rstest; + + // -- Config parsing tests ----------------------------------------------- + + #[test] + fn test_parse_env_lines_basic() { + let lines = vec![ + "# a comment", + "KEY1=value1", + " KEY2 = value2 ", + "", + "INVALIDLINE", + "EMPTY_KEY=", + ]; + let env = parse_env_lines(&lines); + assert_eq!(env.get("KEY1").unwrap(), "value1"); + assert_eq!(env.get("KEY2").unwrap(), "value2"); + assert_eq!(env.get("EMPTY_KEY").unwrap(), ""); + assert!(!env.contains_key("INVALIDLINE")); + } + + #[test] + fn test_config_ignores_blank_lines_and_comments() { + let lines = vec!["", " # This is a comment", " MPC_SECRET_STORE_KEY=topsecret", ""]; + let env = parse_env_lines(&lines); + assert_eq!(env.get("MPC_SECRET_STORE_KEY").unwrap(), "topsecret"); + assert_eq!(env.len(), 1); + } + + #[test] + fn test_config_skips_malformed_lines() { + let lines = vec![ + "GOOD_KEY=value", + "bad_line_without_equal", + "ANOTHER_GOOD=ok", + "=", + ]; + let env = parse_env_lines(&lines); + assert!(env.contains_key("GOOD_KEY")); + assert!(env.contains_key("ANOTHER_GOOD")); + assert!(!env.contains_key("bad_line_without_equal")); + assert!(!env.contains_key("")); + } + + #[test] + fn test_config_overrides_duplicate_keys() { + let lines = vec!["MPC_ACCOUNT_ID=first", "MPC_ACCOUNT_ID=second"]; + let env = parse_env_lines(&lines); + assert_eq!(env.get("MPC_ACCOUNT_ID").unwrap(), "second"); + } + + // -- Host/port validation tests ----------------------------------------- + + #[test] + fn test_valid_host_entry() { + assert!(is_valid_host_entry("node.local:192.168.1.1")); + assert!(!is_valid_host_entry("node.local:not-an-ip")); + assert!(!is_valid_host_entry("--env LD_PRELOAD=hack.so")); + } + + #[test] + fn test_valid_port_mapping() { + assert!(is_valid_port_mapping("11780:11780")); + assert!(!is_valid_port_mapping("65536:11780")); + assert!(!is_valid_port_mapping("--volume /:/mnt")); + } + + // -- Security validation tests ------------------------------------------ + + #[test] + fn test_has_control_chars_rejects_newline_and_cr() { + assert!(has_control_chars("a\nb")); + assert!(has_control_chars("a\rb")); + } + + #[test] + fn test_has_control_chars_allows_tab() { + assert!(!has_control_chars("a\tb")); + } + + #[test] + fn test_has_control_chars_rejects_other_control_chars() { + assert!(has_control_chars(&format!("a{}b", '\x1F'))); + } + + #[test] + fn test_is_safe_env_value_rejects_control_chars() { + assert!(!is_safe_env_value("ok\nno")); + assert!(!is_safe_env_value("ok\rno")); + assert!(!is_safe_env_value(&format!("ok{}no", '\x1F'))); + } + + #[test] + fn test_is_safe_env_value_rejects_ld_preload() { + assert!(!is_safe_env_value("LD_PRELOAD=/tmp/x.so")); + assert!(!is_safe_env_value("foo LD_PRELOAD bar")); + } + + #[test] + fn test_is_safe_env_value_rejects_too_long() { + assert!(!is_safe_env_value(&"a".repeat(MAX_ENV_VALUE_LEN + 1))); + assert!(is_safe_env_value(&"a".repeat(MAX_ENV_VALUE_LEN))); + } + + #[test] + fn test_is_allowed_container_env_key_allows_mpc_prefix_uppercase() { + assert!(is_allowed_container_env_key("MPC_FOO")); + assert!(is_allowed_container_env_key("MPC_FOO_123")); + assert!(is_allowed_container_env_key("MPC_A_B_C")); + } + + #[test] + fn test_is_allowed_container_env_key_rejects_lowercase_or_invalid() { + assert!(!is_allowed_container_env_key("MPC_foo")); + assert!(!is_allowed_container_env_key("MPC-FOO")); + assert!(!is_allowed_container_env_key("MPC.FOO")); + assert!(!is_allowed_container_env_key("MPC_")); + } + + #[test] + fn test_is_allowed_container_env_key_allows_compat_non_mpc_keys() { + assert!(is_allowed_container_env_key("RUST_LOG")); + assert!(is_allowed_container_env_key("RUST_BACKTRACE")); + assert!(is_allowed_container_env_key("NEAR_BOOT_NODES")); + } + + #[test] + fn test_is_allowed_container_env_key_denies_sensitive_keys() { + assert!(!is_allowed_container_env_key("MPC_P2P_PRIVATE_KEY")); + assert!(!is_allowed_container_env_key("MPC_ACCOUNT_SK")); + } + + // -- Docker cmd builder tests ------------------------------------------- + + fn make_digest() -> String { + format!("sha256:{}", "a".repeat(64)) + } + + fn base_env() -> BTreeMap { + BTreeMap::from([ + ("MPC_ACCOUNT_ID".into(), "mpc-user-123".into()), + ("MPC_CONTRACT_ID".into(), "contract.near".into()), + ("MPC_ENV".into(), "testnet".into()), + ("MPC_HOME_DIR".into(), "/data".into()), + ("NEAR_BOOT_NODES".into(), "boot1,boot2".into()), + ("RUST_LOG".into(), "info".into()), + ]) + } + + #[test] + fn test_build_docker_cmd_sanitizes_ports_and_hosts() { + let env = BTreeMap::from([ + ("PORTS".into(), "11780:11780,--env BAD=1".into()), + ( + "EXTRA_HOSTS".into(), + "node:192.168.1.1,--volume /:/mnt".into(), + ), + ("MPC_ACCOUNT_ID".into(), "mpc-user-123".into()), + ]); + let cmd = build_docker_cmd(Platform::Tee, &env, &make_digest()).unwrap(); + + assert!(cmd.contains(&"MPC_ACCOUNT_ID=mpc-user-123".to_string())); + assert!(cmd.contains(&"11780:11780".to_string())); + assert!(cmd.contains(&"node:192.168.1.1".to_string())); + // Injection strings filtered + assert!(!cmd.iter().any(|arg| arg.contains("BAD=1"))); + assert!(!cmd.iter().any(|arg| arg.contains("/:/mnt"))); + } + + #[test] + fn test_extra_hosts_does_not_allow_ld_preload() { + let env = BTreeMap::from([ + ( + "EXTRA_HOSTS".into(), + "host:1.2.3.4,--env LD_PRELOAD=/evil.so".into(), + ), + ("MPC_ACCOUNT_ID".into(), "safe".into()), + ]); + let cmd = build_docker_cmd(Platform::Tee, &env, &make_digest()).unwrap(); + assert!(cmd.contains(&"host:1.2.3.4".to_string())); + assert!(!cmd.iter().any(|arg| arg.contains("LD_PRELOAD"))); + } + + #[test] + fn test_ports_does_not_allow_volume_injection() { + let env = BTreeMap::from([ + ("PORTS".into(), "2200:2200,--volume /:/mnt".into()), + ("MPC_ACCOUNT_ID".into(), "safe".into()), + ]); + let cmd = build_docker_cmd(Platform::Tee, &env, &make_digest()).unwrap(); + assert!(cmd.contains(&"2200:2200".to_string())); + assert!(!cmd.iter().any(|arg| arg.contains("/:/mnt"))); + } + + #[test] + fn test_invalid_env_key_is_ignored() { + let env = BTreeMap::from([ + ("BAD_KEY".into(), "should_not_be_used".into()), + ("MPC_ACCOUNT_ID".into(), "safe".into()), + ]); + let cmd = build_docker_cmd(Platform::Tee, &env, &make_digest()).unwrap(); + assert!(!cmd.join(" ").contains("should_not_be_used")); + assert!(cmd.contains(&"MPC_ACCOUNT_ID=safe".to_string())); + } + + #[test] + fn test_mpc_backup_encryption_key_is_allowed() { + let env = BTreeMap::from([( + "MPC_BACKUP_ENCRYPTION_KEY_HEX".into(), + "0".repeat(64), + )]); + let cmd = build_docker_cmd(Platform::Tee, &env, &make_digest()).unwrap(); + assert!(cmd + .join(" ") + .contains(&format!("MPC_BACKUP_ENCRYPTION_KEY_HEX={}", "0".repeat(64)))); + } + + #[test] + fn test_malformed_extra_host_is_ignored() { + let env = BTreeMap::from([ + ( + "EXTRA_HOSTS".into(), + "badhostentry,no-colon,also--bad".into(), + ), + ("MPC_ACCOUNT_ID".into(), "safe".into()), + ]); + let cmd = build_docker_cmd(Platform::Tee, &env, &make_digest()).unwrap(); + assert!(!cmd.contains(&"--add-host".to_string())); + } + + #[test] + fn test_env_value_with_shell_injection_is_handled_safely() { + let env = BTreeMap::from([("MPC_ACCOUNT_ID".into(), "safe; rm -rf /".into())]); + let cmd = build_docker_cmd(Platform::Tee, &env, &make_digest()).unwrap(); + assert!(cmd.contains(&"MPC_ACCOUNT_ID=safe; rm -rf /".to_string())); + } + + #[test] + fn test_build_docker_cmd_nontee_no_dstack_mount() { + let mut env = BTreeMap::new(); + env.insert("MPC_ACCOUNT_ID".into(), "x".into()); + env.insert(ENV_VAR_RPC_MAX_ATTEMPTS.into(), "5".into()); + let cmd = build_docker_cmd(Platform::NonTee, &env, &make_digest()).unwrap(); + let s = cmd.join(" "); + assert!(!s.contains("DSTACK_ENDPOINT=")); + assert!(!s.contains(&format!( + "{DSTACK_UNIX_SOCKET}:{DSTACK_UNIX_SOCKET}" + ))); + } + + #[test] + fn test_build_docker_cmd_tee_has_dstack_mount() { + let env = BTreeMap::from([("MPC_ACCOUNT_ID".into(), "x".into())]); + let cmd = build_docker_cmd(Platform::Tee, &env, &make_digest()).unwrap(); + let s = cmd.join(" "); + assert!(s.contains(&format!("DSTACK_ENDPOINT={DSTACK_UNIX_SOCKET}"))); + assert!(s.contains(&format!( + "{DSTACK_UNIX_SOCKET}:{DSTACK_UNIX_SOCKET}" + ))); + } + + #[test] + fn test_build_docker_cmd_allows_arbitrary_mpc_prefix_env_vars() { + let mut env = base_env(); + env.insert("MPC_NEW_FEATURE_FLAG".into(), "1".into()); + env.insert("MPC_SOME_CONFIG".into(), "value".into()); + let cmd = build_docker_cmd(Platform::NonTee, &env, &make_digest()).unwrap(); + let cmd_str = cmd.join(" "); + assert!(cmd_str.contains("MPC_NEW_FEATURE_FLAG=1")); + assert!(cmd_str.contains("MPC_SOME_CONFIG=value")); + } + + #[test] + fn test_build_docker_cmd_blocks_sensitive_mpc_private_keys() { + let mut env = base_env(); + env.insert("MPC_P2P_PRIVATE_KEY".into(), "supersecret".into()); + env.insert("MPC_ACCOUNT_SK".into(), "supersecret2".into()); + let cmd = build_docker_cmd(Platform::NonTee, &env, &make_digest()).unwrap(); + let cmd_str = cmd.join(" "); + assert!(!cmd_str.contains("MPC_P2P_PRIVATE_KEY")); + assert!(!cmd_str.contains("MPC_ACCOUNT_SK")); + } + + #[test] + fn test_build_docker_cmd_rejects_env_value_with_newline() { + let mut env = base_env(); + env.insert("MPC_NEW_FEATURE_FLAG".into(), "ok\nbad".into()); + let cmd = build_docker_cmd(Platform::NonTee, &env, &make_digest()).unwrap(); + let cmd_str = cmd.join(" "); + assert!(!cmd_str.contains("MPC_NEW_FEATURE_FLAG")); + } + + #[test] + fn test_build_docker_cmd_enforces_max_env_count_cap() { + let mut env = base_env(); + for i in 0..=MAX_PASSTHROUGH_ENV_VARS { + env.insert(format!("MPC_X_{i}"), "1".into()); + } + let result = build_docker_cmd(Platform::NonTee, &env, &make_digest()); + assert_matches!(result, Err(LauncherError::TooManyEnvVars(_))); + } + + #[test] + fn test_build_docker_cmd_enforces_total_env_bytes_cap() { + let mut env = base_env(); + for i in 0..40 { + env.insert(format!("MPC_BIG_{i}"), "a".repeat(MAX_ENV_VALUE_LEN)); + } + let result = build_docker_cmd(Platform::NonTee, &env, &make_digest()); + assert_matches!(result, Err(LauncherError::EnvPayloadTooLarge(_))); + } + + // -- LD_PRELOAD injection tests ----------------------------------------- + + #[test] + fn test_ld_preload_injection_blocked_via_env_key() { + let env = BTreeMap::from([ + ("MPC_ACCOUNT_ID".into(), "mpc-user-123".into()), + ( + "--env LD_PRELOAD".into(), + "/path/to/my/malloc.so".into(), + ), + ]); + let cmd = build_docker_cmd(Platform::Tee, &env, &make_digest()).unwrap(); + assert!(!cmd.iter().any(|arg| arg.contains("LD_PRELOAD"))); + } + + #[test] + fn test_ld_preload_injection_blocked_via_extra_hosts() { + let env = BTreeMap::from([ + ("MPC_ACCOUNT_ID".into(), "mpc-user-123".into()), + ( + "EXTRA_HOSTS".into(), + "host1:192.168.0.1,host2:192.168.0.2,--env LD_PRELOAD=/path/to/my/malloc.so" + .into(), + ), + ]); + let cmd = build_docker_cmd(Platform::Tee, &env, &make_digest()).unwrap(); + assert!(cmd.contains(&"--add-host".to_string())); + assert!(!cmd.iter().any(|arg| arg.contains("LD_PRELOAD"))); + } + + #[test] + fn test_ld_preload_injection_blocked_via_ports() { + let env = BTreeMap::from([ + ("MPC_ACCOUNT_ID".into(), "mpc-user-123".into()), + ( + "PORTS".into(), + "11780:11780,--env LD_PRELOAD=/path/to/my/malloc.so".into(), + ), + ]); + let cmd = build_docker_cmd(Platform::Tee, &env, &make_digest()).unwrap(); + assert!(cmd.contains(&"-p".to_string())); + assert!(!cmd.iter().any(|arg| arg.contains("LD_PRELOAD"))); + } + + #[test] + fn test_ld_preload_injection_blocked_via_mpc_account_id() { + let env = BTreeMap::from([ + ( + "MPC_ACCOUNT_ID".into(), + "mpc-user-123, --env LD_PRELOAD=/path/to/my/malloc.so".into(), + ), + ( + "EXTRA_HOSTS".into(), + "host1:192.168.0.1,host2:192.168.0.2".into(), + ), + ]); + let cmd = build_docker_cmd(Platform::Tee, &env, &make_digest()).unwrap(); + assert!(!cmd.iter().any(|arg| arg.contains("LD_PRELOAD"))); + } + + #[test] + fn test_ld_preload_injection_blocked_via_dash_e() { + let env = BTreeMap::from([ + ("MPC_ACCOUNT_ID".into(), "mpc-user-123".into()), + ("-e LD_PRELOAD".into(), "/path/to/my/malloc.so".into()), + ]); + let cmd = build_docker_cmd(Platform::Tee, &env, &make_digest()).unwrap(); + assert!(!cmd.iter().any(|arg| arg.contains("LD_PRELOAD"))); + } + + #[test] + fn test_ld_preload_injection_blocked_via_extra_hosts_dash_e() { + let env = BTreeMap::from([ + ("MPC_ACCOUNT_ID".into(), "mpc-user-123".into()), + ( + "EXTRA_HOSTS".into(), + "host1:192.168.0.1,host2:192.168.0.2,-e LD_PRELOAD=/path/to/my/malloc.so".into(), + ), + ]); + let cmd = build_docker_cmd(Platform::Tee, &env, &make_digest()).unwrap(); + assert!(cmd.contains(&"--add-host".to_string())); + assert!(!cmd.iter().any(|arg| arg.contains("LD_PRELOAD"))); + } + + #[test] + fn test_ld_preload_injection_blocked_via_ports_dash_e() { + let env = BTreeMap::from([ + ("MPC_ACCOUNT_ID".into(), "mpc-user-123".into()), + ( + "PORTS".into(), + "11780:11780,-e LD_PRELOAD=/path/to/my/malloc.so".into(), + ), + ]); + let cmd = build_docker_cmd(Platform::Tee, &env, &make_digest()).unwrap(); + assert!(cmd.contains(&"-p".to_string())); + assert!(!cmd.iter().any(|arg| arg.contains("LD_PRELOAD"))); + } + + // -- Hash selection tests ----------------------------------------------- + + fn make_digest_json(hashes: &[&str]) -> String { + serde_json::json!({"approved_hashes": hashes}).to_string() + } + + #[test] + fn test_override_present() { + let dir = tempfile::tempdir().unwrap(); + let file = dir.path().join("image-digest.bin"); + let override_value = format!("sha256:{}", "a".repeat(64)); + let approved = vec![ + format!("sha256:{}", "b".repeat(64)), + override_value.clone(), + format!("sha256:{}", "c".repeat(64)), + ]; + let json = serde_json::json!({"approved_hashes": approved}).to_string(); + std::fs::write(&file, &json).unwrap(); + + // We can't easily override IMAGE_DIGEST_FILE constant, so test load_and_select_hash + // by creating a standalone test that reads from a custom path. + // Instead test the core logic directly: + let data: ApprovedHashesFile = serde_json::from_str(&json).unwrap(); + assert!(data.approved_hashes.contains(&override_value)); + + let config = BTreeMap::from([( + ENV_VAR_MPC_HASH_OVERRIDE.to_string(), + override_value.clone(), + )]); + // The override is in the approved list, so it should be valid + assert!(is_valid_sha256_digest(&override_value)); + assert!(data.approved_hashes.contains(&override_value)); + } + + #[test] + fn test_override_not_in_list() { + let approved = vec!["sha256:aaa", "sha256:bbb"]; + let json = make_digest_json(&approved); + let data: ApprovedHashesFile = serde_json::from_str(&json).unwrap(); + let override_hash = "sha256:xyz"; + assert!(!data.approved_hashes.contains(&override_hash.to_string())); + } + + #[test] + fn test_no_override_picks_newest() { + let approved = vec!["sha256:newest", "sha256:older", "sha256:oldest"]; + let json = make_digest_json(&approved); + let data: ApprovedHashesFile = serde_json::from_str(&json).unwrap(); + assert_eq!(data.approved_hashes[0], "sha256:newest"); + } + + #[test] + fn test_json_key_matches_node() { + // Must stay aligned with crates/node/src/tee/allowed_image_hashes_watcher.rs + let json = r#"{"approved_hashes": ["sha256:abc"]}"#; + let data: ApprovedHashesFile = serde_json::from_str(json).unwrap(); + assert_eq!(data.approved_hashes.len(), 1); + } + + #[test] + fn test_get_bare_digest() { + assert_eq!( + get_bare_digest(&format!("sha256:{}", "a".repeat(64))).unwrap(), + "a".repeat(64) + ); + assert!(get_bare_digest("invalid").is_err()); + } + + #[test] + fn test_is_valid_sha256_digest() { + assert!(is_valid_sha256_digest(&format!("sha256:{}", "a".repeat(64)))); + assert!(!is_valid_sha256_digest("sha256:tooshort")); + assert!(!is_valid_sha256_digest("not-a-digest")); + // Uppercase hex should be rejected + assert!(!is_valid_sha256_digest(&format!("sha256:{}", "A".repeat(64)))); + } + + // -- Platform parsing tests --------------------------------------------- + + #[test] + fn test_parse_platform_missing() { + // Can't easily test env var absence in unit tests without side effects. + // This is tested via the error type: + let err = LauncherError::InvalidPlatform("not set".into()); + assert!(format!("{err}").contains("PLATFORM")); + } + + // -- Full flow docker cmd test ------------------------------------------ + + #[test] + fn test_parse_and_build_docker_cmd_full_flow() { + let config_str = "MPC_ACCOUNT_ID=test-user\nPORTS=11780:11780, --env BAD=oops\nEXTRA_HOSTS=host1:192.168.1.1, --volume /:/mnt\nIMAGE_HASH=sha256:abc123"; + let lines: Vec<&str> = config_str.lines().collect(); + let env = parse_env_lines(&lines); + let cmd = build_docker_cmd(Platform::Tee, &env, &make_digest()).unwrap(); + let cmd_str = cmd.join(" "); + + assert!(cmd_str.contains("MPC_ACCOUNT_ID=test-user")); + assert!(cmd_str.contains("11780:11780")); + assert!(cmd_str.contains("host1:192.168.1.1")); + assert!(!cmd_str.contains("BAD=oops")); + assert!(!cmd_str.contains("/:/mnt")); + } + + #[test] + fn test_full_docker_cmd_structure() { + let env = BTreeMap::from([("MPC_ACCOUNT_ID".into(), "test-user".into())]); + let digest = make_digest(); + let cmd = build_docker_cmd(Platform::NonTee, &env, &digest).unwrap(); + + // Check required subsequence + assert!(cmd.contains(&"docker".to_string())); + assert!(cmd.contains(&"run".to_string())); + assert!(cmd.contains(&"--security-opt".to_string())); + assert!(cmd.contains(&"no-new-privileges:true".to_string())); + assert!(cmd.contains(&"/tapp:/tapp:ro".to_string())); + assert!(cmd.contains(&"shared-volume:/mnt/shared".to_string())); + assert!(cmd.contains(&"mpc-data:/data".to_string())); + assert!(cmd.contains(&MPC_CONTAINER_NAME.to_string())); + assert!(cmd.contains(&"--detach".to_string())); + // Image digest should be the last argument + assert_eq!(cmd.last().unwrap(), &digest); + } + + // -- Dstack tests ------------------------------------------------------- + + #[test] + fn test_extend_rtmr3_nontee_is_noop() { + // NonTee should return immediately without touching dstack + let rt = tokio::runtime::Runtime::new().unwrap(); + let result = rt.block_on(extend_rtmr3(Platform::NonTee, &make_digest())); + assert!(result.is_ok()); + } + + #[test] + fn test_extend_rtmr3_tee_requires_socket() { + // TEE mode should fail when socket doesn't exist + let rt = tokio::runtime::Runtime::new().unwrap(); + let result = rt.block_on(extend_rtmr3(Platform::Tee, &make_digest())); + assert_matches!(result, Err(LauncherError::DstackSocketMissing(_))); + } + + // -- MpcDockerImageHash integration test -------------------------------- + + #[test] + fn test_mpc_docker_image_hash_from_bare_hex() { + let bare_hex = "a".repeat(64); + let hash: MpcDockerImageHash = bare_hex.parse().unwrap(); + assert_eq!(hash.as_hex(), bare_hex); + } + + // -- Integration test (feature-gated) ----------------------------------- + + #[cfg(feature = "integration-test")] + mod integration { + use super::*; + + const TEST_DIGEST: &str = + "sha256:f2472280c437efc00fa25a030a24990ae16c4fbec0d74914e178473ce4d57372"; + + fn test_dstack_config() -> BTreeMap { + BTreeMap::from([ + ( + "MPC_IMAGE_TAGS".into(), + "83b52da4e2270c688cdd30da04f6b9d3565f25bb".into(), + ), + ("MPC_IMAGE_NAME".into(), "nearone/testing".into()), + ("MPC_REGISTRY".into(), "registry.hub.docker.com".into()), + ]) + } + + #[tokio::test] + async fn test_validate_image_hash_real_registry() { + let timing = RpcTimingConfig { + request_timeout_secs: 10.0, + request_interval_secs: 1.0, + max_attempts: 20, + }; + let result = validate_image_hash(TEST_DIGEST, &test_dstack_config(), &timing) + .await + .unwrap(); + assert!(result, "validate_image_hash() failed for test image"); + } + } +} From b2621f83d7444108273af4e4789e9a48117fd83d Mon Sep 17 00:00:00 2001 From: Daniel Sharifi Date: Fri, 27 Feb 2026 17:08:41 +0100 Subject: [PATCH 02/82] wip --- Cargo.lock | 1 - crates/tee-launcher/Cargo.toml | 1 - crates/tee-launcher/src/main.rs | 51 ++++++++++++++++++++------------- deployment/Dockerfile-launcher | 7 ++--- 4 files changed, 34 insertions(+), 26 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 9bcefd9d5..c9eae8c18 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -10548,7 +10548,6 @@ dependencies = [ "mpc-primitives", "regex", "reqwest 0.12.28", - "rstest", "serde", "serde_json", "tempfile", diff --git a/crates/tee-launcher/Cargo.toml b/crates/tee-launcher/Cargo.toml index 78b4334de..3c7d05bf9 100644 --- a/crates/tee-launcher/Cargo.toml +++ b/crates/tee-launcher/Cargo.toml @@ -26,7 +26,6 @@ tracing-subscriber = { workspace = true } [dev-dependencies] assert_matches = { workspace = true } -rstest = { workspace = true } tempfile = { workspace = true } [lints] diff --git a/crates/tee-launcher/src/main.rs b/crates/tee-launcher/src/main.rs index 2ad98bbbf..76b117184 100644 --- a/crates/tee-launcher/src/main.rs +++ b/crates/tee-launcher/src/main.rs @@ -135,8 +135,6 @@ const MAX_ENV_VALUE_LEN: usize = 1024; const MAX_TOTAL_ENV_BYTES: usize = 32 * 1024; // Regex patterns (compiled once) -static SHA256_REGEX: LazyLock = - LazyLock::new(|| Regex::new(r"^sha256:[0-9a-f]{64}$").unwrap()); static MPC_ENV_KEY_RE: LazyLock = LazyLock::new(|| Regex::new(r"^MPC_[A-Z0-9_]{1,64}$").unwrap()); static HOST_ENTRY_RE: LazyLock = @@ -399,19 +397,29 @@ fn get_image_spec(dstack_config: &BTreeMap) -> ImageSpec { // Hash selection // --------------------------------------------------------------------------- -fn is_valid_sha256_digest(digest: &str) -> bool { - SHA256_REGEX.is_match(digest) -} - -fn get_bare_digest(full_digest: &str) -> Result { - full_digest +/// Parse a full `sha256:` digest into a validated [`MpcDockerImageHash`]. +/// +/// Uses the workspace type's `FromStr` impl which does `hex::decode` + 32-byte +/// length check — no regex needed. +fn parse_image_digest(full_digest: &str) -> Result { + let bare_hex = full_digest .strip_prefix(SHA256_PREFIX) - .map(|s| s.to_string()) .ok_or_else(|| { LauncherError::InvalidDefaultDigest(format!( "Invalid digest (missing sha256: prefix): {full_digest}" )) - }) + })?; + bare_hex + .parse::() + .map_err(|e| LauncherError::InvalidDefaultDigest(format!("{full_digest}: {e}"))) +} + +fn is_valid_sha256_digest(digest: &str) -> bool { + parse_image_digest(digest).is_ok() +} + +fn get_bare_digest(full_digest: &str) -> Result { + Ok(parse_image_digest(full_digest)?.as_hex()) } fn load_and_select_hash(dstack_config: &BTreeMap) -> Result { @@ -956,7 +964,7 @@ async fn main() { mod tests { use super::*; use assert_matches::assert_matches; - use rstest::rstest; + // -- Config parsing tests ----------------------------------------------- @@ -1391,10 +1399,6 @@ mod tests { let data: ApprovedHashesFile = serde_json::from_str(&json).unwrap(); assert!(data.approved_hashes.contains(&override_value)); - let config = BTreeMap::from([( - ENV_VAR_MPC_HASH_OVERRIDE.to_string(), - override_value.clone(), - )]); // The override is in the approved list, so it should be valid assert!(is_valid_sha256_digest(&override_value)); assert!(data.approved_hashes.contains(&override_value)); @@ -1431,7 +1435,7 @@ mod tests { get_bare_digest(&format!("sha256:{}", "a".repeat(64))).unwrap(), "a".repeat(64) ); - assert!(get_bare_digest("invalid").is_err()); + get_bare_digest("invalid").unwrap_err(); } #[test] @@ -1439,8 +1443,15 @@ mod tests { assert!(is_valid_sha256_digest(&format!("sha256:{}", "a".repeat(64)))); assert!(!is_valid_sha256_digest("sha256:tooshort")); assert!(!is_valid_sha256_digest("not-a-digest")); - // Uppercase hex should be rejected - assert!(!is_valid_sha256_digest(&format!("sha256:{}", "A".repeat(64)))); + // hex::decode accepts uppercase; as_hex() normalizes to lowercase + assert!(is_valid_sha256_digest(&format!("sha256:{}", "A".repeat(64)))); + } + + #[test] + fn test_parse_image_digest_normalizes_case() { + let upper = format!("sha256:{}", "AB".repeat(32)); + let hash = parse_image_digest(&upper).unwrap(); + assert_eq!(hash.as_hex(), "ab".repeat(32)); } // -- Platform parsing tests --------------------------------------------- @@ -1496,8 +1507,8 @@ mod tests { fn test_extend_rtmr3_nontee_is_noop() { // NonTee should return immediately without touching dstack let rt = tokio::runtime::Runtime::new().unwrap(); - let result = rt.block_on(extend_rtmr3(Platform::NonTee, &make_digest())); - assert!(result.is_ok()); + rt.block_on(extend_rtmr3(Platform::NonTee, &make_digest())) + .unwrap(); } #[test] diff --git a/deployment/Dockerfile-launcher b/deployment/Dockerfile-launcher index 557e01c07..30bb734e4 100644 --- a/deployment/Dockerfile-launcher +++ b/deployment/Dockerfile-launcher @@ -8,11 +8,10 @@ RUN \ --mount=type=bind,source=./deployment/repro-sources-list.sh,target=/usr/local/bin/repro-sources-list.sh \ repro-sources-list.sh && \ apt-get update && \ - apt-get install -y --no-install-recommends docker.io docker-compose curl jq python3 && \ + apt-get install -y --no-install-recommends docker.io && \ : "Clean up for improving reproducibility" && \ rm -rf /var/log/* /var/cache/ldconfig/aux-cache -COPY --chmod=0755 tee_launcher/launcher.py /scripts/ -ENV PATH="/scripts:${PATH}" +COPY --chmod=0755 target/release/tee-launcher /usr/local/bin/tee-launcher RUN mkdir -p /app-data && mkdir -p /mnt/shared -CMD ["python3", "/scripts/launcher.py"] +CMD ["tee-launcher"] From f08c453a2b00e9535204da293e30461d03de892a Mon Sep 17 00:00:00 2001 From: Daniel Sharifi Date: Fri, 27 Feb 2026 17:25:14 +0100 Subject: [PATCH 03/82] wip --- crates/tee-launcher/src/contants.rs | 33 ++++ crates/tee-launcher/src/main.rs | 294 +++++++++++++--------------- tee_launcher/launcher.py | 1 + 3 files changed, 166 insertions(+), 162 deletions(-) create mode 100644 crates/tee-launcher/src/contants.rs diff --git a/crates/tee-launcher/src/contants.rs b/crates/tee-launcher/src/contants.rs new file mode 100644 index 000000000..454d82642 --- /dev/null +++ b/crates/tee-launcher/src/contants.rs @@ -0,0 +1,33 @@ +pub(crate) const MPC_CONTAINER_NAME: &str = "mpc-node"; +pub(crate) const IMAGE_DIGEST_FILE: &str = "/mnt/shared/image-digest.bin"; +pub(crate) const DSTACK_UNIX_SOCKET: &str = "/var/run/dstack.sock"; +pub(crate) const DSTACK_USER_CONFIG_FILE: &str = "/tapp/user_config"; + +pub(crate) const SHA256_PREFIX: &str = "sha256:"; + +// Docker Hub defaults +pub(crate) const DEFAULT_RPC_REQUEST_TIMEOUT_SECS: f64 = 10.0; +pub(crate) const DEFAULT_RPC_REQUEST_INTERVAL_SECS: f64 = 1.0; +pub(crate) const DEFAULT_RPC_MAX_ATTEMPTS: u32 = 20; + +pub(crate) const DEFAULT_MPC_IMAGE_NAME: &str = "nearone/mpc-node"; +pub(crate) const DEFAULT_MPC_REGISTRY: &str = "registry.hub.docker.com"; +pub(crate) const DEFAULT_MPC_IMAGE_TAG: &str = "latest"; + +// Env var names +pub(crate) const ENV_VAR_PLATFORM: &str = "PLATFORM"; +pub(crate) const ENV_VAR_DEFAULT_IMAGE_DIGEST: &str = "DEFAULT_IMAGE_DIGEST"; +pub(crate) const ENV_VAR_DOCKER_CONTENT_TRUST: &str = "DOCKER_CONTENT_TRUST"; +pub(crate) const ENV_VAR_MPC_HASH_OVERRIDE: &str = "MPC_HASH_OVERRIDE"; +pub(crate) const ENV_VAR_RPC_REQUEST_TIMEOUT_SECS: &str = "RPC_REQUEST_TIMEOUT_SECS"; +pub(crate) const ENV_VAR_RPC_REQUEST_INTERVAL_SECS: &str = "RPC_REQUEST_INTERVAL_SECS"; +pub(crate) const ENV_VAR_RPC_MAX_ATTEMPTS: &str = "RPC_MAX_ATTEMPTS"; + +pub(crate) const DSTACK_USER_CONFIG_MPC_IMAGE_TAGS: &str = "MPC_IMAGE_TAGS"; +pub(crate) const DSTACK_USER_CONFIG_MPC_IMAGE_NAME: &str = "MPC_IMAGE_NAME"; +pub(crate) const DSTACK_USER_CONFIG_MPC_IMAGE_REGISTRY: &str = "MPC_REGISTRY"; + +// Security limits +pub(crate) const MAX_PASSTHROUGH_ENV_VARS: usize = 64; +pub(crate) const MAX_ENV_VALUE_LEN: usize = 1024; +pub(crate) const MAX_TOTAL_ENV_BYTES: usize = 32 * 1024; diff --git a/crates/tee-launcher/src/main.rs b/crates/tee-launcher/src/main.rs index 76b117184..587576828 100644 --- a/crates/tee-launcher/src/main.rs +++ b/crates/tee-launcher/src/main.rs @@ -2,6 +2,7 @@ use std::collections::{BTreeMap, HashSet, VecDeque}; use std::process::Command; use std::sync::LazyLock; +use contants::*; use regex::Regex; use serde::Deserialize; use thiserror::Error; @@ -9,6 +10,72 @@ use thiserror::Error; // Reuse the workspace hash type for type-safe image hash handling. use mpc_primitives::hash::MpcDockerImageHash; +mod contants; + +#[tokio::main] +async fn main() { + tracing_subscriber::fmt() + .with_env_filter( + tracing_subscriber::EnvFilter::from_default_env() + .add_directive(tracing::Level::INFO.into()), + ) + .init(); + + if let Err(e) = run().await { + tracing::error!("Error: {e}"); + std::process::exit(1); + } +} + +async fn run() -> Result<()> { + tracing::info!("start"); + + let platform = parse_platform()?; + tracing::info!( + "Launcher platform: {}", + match platform { + Platform::Tee => "TEE", + Platform::NonTee => "NONTEE", + } + ); + + if platform == Platform::Tee && !is_unix_socket(DSTACK_UNIX_SOCKET) { + return Err(LauncherError::DstackSocketMissing( + DSTACK_UNIX_SOCKET.to_string(), + )); + } + + // DOCKER_CONTENT_TRUST must be enabled + let dct = std::env::var(ENV_VAR_DOCKER_CONTENT_TRUST).unwrap_or_default(); + if dct != "1" { + return Err(LauncherError::DockerContentTrustNotEnabled); + } + + // Load dstack user config + let dstack_config: BTreeMap = + if std::path::Path::new(DSTACK_USER_CONFIG_FILE).is_file() { + parse_env_file(DSTACK_USER_CONFIG_FILE)? + } else { + BTreeMap::new() + }; + + let rpc_cfg = load_rpc_timing_config(&dstack_config); + + let selected_hash = load_and_select_hash(&dstack_config)?; + + if !validate_image_hash(&selected_hash, &dstack_config, &rpc_cfg).await? { + return Err(LauncherError::ImageValidationFailed(selected_hash)); + } + + tracing::info!("MPC image hash validated successfully: {selected_hash}"); + + extend_rtmr3(platform, &selected_hash).await?; + + launch_mpc_container(platform, &selected_hash, &dstack_config)?; + + Ok(()) +} + // --------------------------------------------------------------------------- // Error // --------------------------------------------------------------------------- @@ -100,40 +167,6 @@ type Result = std::result::Result; // Constants — matching Python launcher exactly // --------------------------------------------------------------------------- -const MPC_CONTAINER_NAME: &str = "mpc-node"; -const IMAGE_DIGEST_FILE: &str = "/mnt/shared/image-digest.bin"; -const DSTACK_UNIX_SOCKET: &str = "/var/run/dstack.sock"; -const DSTACK_USER_CONFIG_FILE: &str = "/tapp/user_config"; - -const SHA256_PREFIX: &str = "sha256:"; - -// Docker Hub defaults -const DEFAULT_RPC_REQUEST_TIMEOUT_SECS: f64 = 10.0; -const DEFAULT_RPC_REQUEST_INTERVAL_SECS: f64 = 1.0; -const DEFAULT_RPC_MAX_ATTEMPTS: u32 = 20; - -const DEFAULT_MPC_IMAGE_NAME: &str = "nearone/mpc-node"; -const DEFAULT_MPC_REGISTRY: &str = "registry.hub.docker.com"; -const DEFAULT_MPC_IMAGE_TAG: &str = "latest"; - -// Env var names -const ENV_VAR_PLATFORM: &str = "PLATFORM"; -const ENV_VAR_DEFAULT_IMAGE_DIGEST: &str = "DEFAULT_IMAGE_DIGEST"; -const ENV_VAR_DOCKER_CONTENT_TRUST: &str = "DOCKER_CONTENT_TRUST"; -const ENV_VAR_MPC_HASH_OVERRIDE: &str = "MPC_HASH_OVERRIDE"; -const ENV_VAR_RPC_REQUEST_TIMEOUT_SECS: &str = "RPC_REQUEST_TIMEOUT_SECS"; -const ENV_VAR_RPC_REQUEST_INTERVAL_SECS: &str = "RPC_REQUEST_INTERVAL_SECS"; -const ENV_VAR_RPC_MAX_ATTEMPTS: &str = "RPC_MAX_ATTEMPTS"; - -const DSTACK_USER_CONFIG_MPC_IMAGE_TAGS: &str = "MPC_IMAGE_TAGS"; -const DSTACK_USER_CONFIG_MPC_IMAGE_NAME: &str = "MPC_IMAGE_NAME"; -const DSTACK_USER_CONFIG_MPC_IMAGE_REGISTRY: &str = "MPC_REGISTRY"; - -// Security limits -const MAX_PASSTHROUGH_ENV_VARS: usize = 64; -const MAX_ENV_VALUE_LEN: usize = 1024; -const MAX_TOTAL_ENV_BYTES: usize = 32 * 1024; - // Regex patterns (compiled once) static MPC_ENV_KEY_RE: LazyLock = LazyLock::new(|| Regex::new(r"^MPC_[A-Z0-9_]{1,64}$").unwrap()); @@ -145,8 +178,7 @@ static INVALID_HOST_ENTRY_PATTERN: LazyLock = LazyLock::new(|| Regex::new(r"^[;&|`$\\<>\-]|^--").unwrap()); // Denied env keys — never pass these to the container -static DENIED_CONTAINER_ENV_KEYS: LazyLock> = - LazyLock::new(|| HashSet::from(["MPC_P2P_PRIVATE_KEY", "MPC_ACCOUNT_SK"])); +const DENIED_CONTAINER_ENV_KEYS: &[&str] = &["MPC_P2P_PRIVATE_KEY", "MPC_ACCOUNT_SK"]; // Allowed non-MPC env vars (backward compatibility) static ALLOWED_MPC_ENV_VARS: LazyLock> = LazyLock::new(|| { @@ -220,11 +252,13 @@ struct ApprovedHashesFile { // --------------------------------------------------------------------------- fn has_control_chars(s: &str) -> bool { - for ch in s.chars() { - if ch == '\n' || ch == '\r' || ch == '\0' { + let control_chars = ['\n', '\r', '\0']; + + for character in s.chars() { + if control_chars.contains(&character) { return true; } - if (ch as u32) < 0x20 && ch != '\t' { + if (character as u32) < 0x20 && character != '\t' { return true; } } @@ -284,12 +318,14 @@ fn is_safe_port_mapping(mapping: &str) -> bool { } fn is_allowed_container_env_key(key: &str) -> bool { - if DENIED_CONTAINER_ENV_KEYS.contains(key) { + if DENIED_CONTAINER_ENV_KEYS.contains(&key) { return false; } + // Allow MPC_* keys with strict regex if MPC_ENV_KEY_RE.is_match(key) { return true; } + // Keep allowlist if ALLOWED_MPC_ENV_VARS.contains(key) { return true; } @@ -402,13 +438,11 @@ fn get_image_spec(dstack_config: &BTreeMap) -> ImageSpec { /// Uses the workspace type's `FromStr` impl which does `hex::decode` + 32-byte /// length check — no regex needed. fn parse_image_digest(full_digest: &str) -> Result { - let bare_hex = full_digest - .strip_prefix(SHA256_PREFIX) - .ok_or_else(|| { - LauncherError::InvalidDefaultDigest(format!( - "Invalid digest (missing sha256: prefix): {full_digest}" - )) - })?; + let bare_hex = full_digest.strip_prefix(SHA256_PREFIX).ok_or_else(|| { + LauncherError::InvalidDefaultDigest(format!( + "Invalid digest (missing sha256: prefix): {full_digest}" + )) + })?; bare_hex .parse::() .map_err(|e| LauncherError::InvalidDefaultDigest(format!("{full_digest}: {e}"))) @@ -424,11 +458,12 @@ fn get_bare_digest(full_digest: &str) -> Result { fn load_and_select_hash(dstack_config: &BTreeMap) -> Result { let approved_hashes = if std::path::Path::new(IMAGE_DIGEST_FILE).is_file() { - let content = - std::fs::read_to_string(IMAGE_DIGEST_FILE).map_err(|source| LauncherError::FileRead { + let content = std::fs::read_to_string(IMAGE_DIGEST_FILE).map_err(|source| { + LauncherError::FileRead { path: IMAGE_DIGEST_FILE.to_string(), source, - })?; + } + })?; let data: ApprovedHashesFile = serde_json::from_str(&content).map_err(|source| LauncherError::JsonParse { path: IMAGE_DIGEST_FILE.to_string(), @@ -452,9 +487,7 @@ fn load_and_select_hash(dstack_config: &BTreeMap) -> Result) -> Result Result { +async fn get_manifest_digest(image: &ResolvedImage, timing: &RpcTimingConfig) -> Result { if image.spec.tags.is_empty() { return Err(LauncherError::ImageHashNotFoundAmongTags); } @@ -593,10 +623,10 @@ async fn get_manifest_digest( .and_then(|v| v.to_str().ok()) .map(|s| s.to_string()); - let manifest: serde_json::Value = - resp.json().await.map_err(|e| { - LauncherError::RegistryResponseParse(e.to_string()) - })?; + let manifest: serde_json::Value = resp + .json() + .await + .map_err(|e| LauncherError::RegistryResponseParse(e.to_string()))?; let media_type = manifest["mediaType"].as_str().unwrap_or(""); match media_type { @@ -616,8 +646,7 @@ async fn get_manifest_digest( } "application/vnd.docker.distribution.manifest.v2+json" | "application/vnd.oci.image.manifest.v1+json" => { - let config_digest = - manifest["config"]["digest"].as_str().unwrap_or(""); + let config_digest = manifest["config"]["digest"].as_str().unwrap_or(""); if config_digest == image.digest { if let Some(digest) = content_digest { return Ok(digest); @@ -667,7 +696,13 @@ async fn validate_image_hash( // Verify digest let inspect = Command::new("docker") - .args(["image", "inspect", "--format", "{{index .ID}}", &name_and_digest]) + .args([ + "image", + "inspect", + "--format", + "{{index .ID}}", + &name_and_digest, + ]) .output() .map_err(|e| LauncherError::DockerInspectFailed(e.to_string()))?; if !inspect.status.success() { @@ -695,8 +730,9 @@ fn remove_existing_container() { .output(); match output { - Ok(out) => { - let names = String::from_utf8_lossy(&out.stdout); + Ok(output) => { + let names = String::from_utf8_lossy(&output.stdout); + if names.lines().any(|n| n == MPC_CONTAINER_NAME) { tracing::info!("Removing existing container: {MPC_CONTAINER_NAME}"); let _ = Command::new("docker") @@ -704,8 +740,8 @@ fn remove_existing_container() { .output(); } } - Err(e) => { - tracing::warn!("Failed to check/remove container {MPC_CONTAINER_NAME}: {e}"); + Err(error) => { + tracing::warn!("Failed to check/remove container {MPC_CONTAINER_NAME}: {error}"); } } } @@ -873,8 +909,7 @@ async fn extend_rtmr3(platform: Platform, valid_hash: &str) -> Result<()> { let bare = get_bare_digest(valid_hash)?; tracing::info!("Extending RTMR3 with validated hash: {bare}"); - let client = - dstack_sdk::dstack_client::DstackClient::new(Some(DSTACK_UNIX_SOCKET)); + let client = dstack_sdk::dstack_client::DstackClient::new(Some(DSTACK_UNIX_SOCKET)); // GetQuote first client @@ -891,71 +926,6 @@ async fn extend_rtmr3(platform: Platform, valid_hash: &str) -> Result<()> { Ok(()) } -// --------------------------------------------------------------------------- -// Main orchestration -// --------------------------------------------------------------------------- - -async fn run() -> Result<()> { - tracing::info!("start"); - - let platform = parse_platform()?; - tracing::info!("Launcher platform: {}", match platform { - Platform::Tee => "TEE", - Platform::NonTee => "NONTEE", - }); - - if platform == Platform::Tee && !is_unix_socket(DSTACK_UNIX_SOCKET) { - return Err(LauncherError::DstackSocketMissing( - DSTACK_UNIX_SOCKET.to_string(), - )); - } - - // DOCKER_CONTENT_TRUST must be enabled - let dct = std::env::var(ENV_VAR_DOCKER_CONTENT_TRUST).unwrap_or_default(); - if dct != "1" { - return Err(LauncherError::DockerContentTrustNotEnabled); - } - - // Load dstack user config - let dstack_config: BTreeMap = - if std::path::Path::new(DSTACK_USER_CONFIG_FILE).is_file() { - parse_env_file(DSTACK_USER_CONFIG_FILE)? - } else { - BTreeMap::new() - }; - - let rpc_cfg = load_rpc_timing_config(&dstack_config); - - let selected_hash = load_and_select_hash(&dstack_config)?; - - if !validate_image_hash(&selected_hash, &dstack_config, &rpc_cfg).await? { - return Err(LauncherError::ImageValidationFailed(selected_hash)); - } - - tracing::info!("MPC image hash validated successfully: {selected_hash}"); - - extend_rtmr3(platform, &selected_hash).await?; - - launch_mpc_container(platform, &selected_hash, &dstack_config)?; - - Ok(()) -} - -#[tokio::main] -async fn main() { - tracing_subscriber::fmt() - .with_env_filter( - tracing_subscriber::EnvFilter::from_default_env() - .add_directive(tracing::Level::INFO.into()), - ) - .init(); - - if let Err(e) = run().await { - tracing::error!("Error: {e}"); - std::process::exit(1); - } -} - // --------------------------------------------------------------------------- // Tests // --------------------------------------------------------------------------- @@ -965,7 +935,6 @@ mod tests { use super::*; use assert_matches::assert_matches; - // -- Config parsing tests ----------------------------------------------- #[test] @@ -987,7 +956,12 @@ mod tests { #[test] fn test_config_ignores_blank_lines_and_comments() { - let lines = vec!["", " # This is a comment", " MPC_SECRET_STORE_KEY=topsecret", ""]; + let lines = vec![ + "", + " # This is a comment", + " MPC_SECRET_STORE_KEY=topsecret", + "", + ]; let env = parse_env_lines(&lines); assert_eq!(env.get("MPC_SECRET_STORE_KEY").unwrap(), "topsecret"); assert_eq!(env.len(), 1); @@ -1171,14 +1145,12 @@ mod tests { #[test] fn test_mpc_backup_encryption_key_is_allowed() { - let env = BTreeMap::from([( - "MPC_BACKUP_ENCRYPTION_KEY_HEX".into(), - "0".repeat(64), - )]); + let env = BTreeMap::from([("MPC_BACKUP_ENCRYPTION_KEY_HEX".into(), "0".repeat(64))]); let cmd = build_docker_cmd(Platform::Tee, &env, &make_digest()).unwrap(); - assert!(cmd - .join(" ") - .contains(&format!("MPC_BACKUP_ENCRYPTION_KEY_HEX={}", "0".repeat(64)))); + assert!( + cmd.join(" ") + .contains(&format!("MPC_BACKUP_ENCRYPTION_KEY_HEX={}", "0".repeat(64))) + ); } #[test] @@ -1209,9 +1181,7 @@ mod tests { let cmd = build_docker_cmd(Platform::NonTee, &env, &make_digest()).unwrap(); let s = cmd.join(" "); assert!(!s.contains("DSTACK_ENDPOINT=")); - assert!(!s.contains(&format!( - "{DSTACK_UNIX_SOCKET}:{DSTACK_UNIX_SOCKET}" - ))); + assert!(!s.contains(&format!("{DSTACK_UNIX_SOCKET}:{DSTACK_UNIX_SOCKET}"))); } #[test] @@ -1220,9 +1190,7 @@ mod tests { let cmd = build_docker_cmd(Platform::Tee, &env, &make_digest()).unwrap(); let s = cmd.join(" "); assert!(s.contains(&format!("DSTACK_ENDPOINT={DSTACK_UNIX_SOCKET}"))); - assert!(s.contains(&format!( - "{DSTACK_UNIX_SOCKET}:{DSTACK_UNIX_SOCKET}" - ))); + assert!(s.contains(&format!("{DSTACK_UNIX_SOCKET}:{DSTACK_UNIX_SOCKET}"))); } #[test] @@ -1282,10 +1250,7 @@ mod tests { fn test_ld_preload_injection_blocked_via_env_key() { let env = BTreeMap::from([ ("MPC_ACCOUNT_ID".into(), "mpc-user-123".into()), - ( - "--env LD_PRELOAD".into(), - "/path/to/my/malloc.so".into(), - ), + ("--env LD_PRELOAD".into(), "/path/to/my/malloc.so".into()), ]); let cmd = build_docker_cmd(Platform::Tee, &env, &make_digest()).unwrap(); assert!(!cmd.iter().any(|arg| arg.contains("LD_PRELOAD"))); @@ -1297,8 +1262,7 @@ mod tests { ("MPC_ACCOUNT_ID".into(), "mpc-user-123".into()), ( "EXTRA_HOSTS".into(), - "host1:192.168.0.1,host2:192.168.0.2,--env LD_PRELOAD=/path/to/my/malloc.so" - .into(), + "host1:192.168.0.1,host2:192.168.0.2,--env LD_PRELOAD=/path/to/my/malloc.so".into(), ), ]); let cmd = build_docker_cmd(Platform::Tee, &env, &make_digest()).unwrap(); @@ -1440,11 +1404,17 @@ mod tests { #[test] fn test_is_valid_sha256_digest() { - assert!(is_valid_sha256_digest(&format!("sha256:{}", "a".repeat(64)))); + assert!(is_valid_sha256_digest(&format!( + "sha256:{}", + "a".repeat(64) + ))); assert!(!is_valid_sha256_digest("sha256:tooshort")); assert!(!is_valid_sha256_digest("not-a-digest")); // hex::decode accepts uppercase; as_hex() normalizes to lowercase - assert!(is_valid_sha256_digest(&format!("sha256:{}", "A".repeat(64)))); + assert!(is_valid_sha256_digest(&format!( + "sha256:{}", + "A".repeat(64) + ))); } #[test] diff --git a/tee_launcher/launcher.py b/tee_launcher/launcher.py index a0dd34958..73ae00cf4 100644 --- a/tee_launcher/launcher.py +++ b/tee_launcher/launcher.py @@ -260,6 +260,7 @@ class ImageSpec: image_name: str registry: str + # TODO: This post validation is not covered def __post_init__(self): if not self.tags or not all(is_non_empty_and_cleaned(tag) for tag in self.tags): raise ValueError( From 315a8335090332636aeeeab245f5dd695e13dd16 Mon Sep 17 00:00:00 2001 From: Daniel Sharifi Date: Fri, 27 Feb 2026 17:52:12 +0100 Subject: [PATCH 04/82] . --- Cargo.lock | 1 + crates/tee-launcher/Cargo.toml | 1 + crates/tee-launcher/src/contants.rs | 3 - crates/tee-launcher/src/error.rs | 81 ++++++++++ crates/tee-launcher/src/main.rs | 243 +++++----------------------- crates/tee-launcher/src/types.rs | 54 +++++++ 6 files changed, 181 insertions(+), 202 deletions(-) create mode 100644 crates/tee-launcher/src/error.rs create mode 100644 crates/tee-launcher/src/types.rs diff --git a/Cargo.lock b/Cargo.lock index c9eae8c18..ecc7a1f21 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -10543,6 +10543,7 @@ name = "tee-launcher" version = "3.5.1" dependencies = [ "assert_matches", + "clap", "dstack-sdk", "hex", "mpc-primitives", diff --git a/crates/tee-launcher/Cargo.toml b/crates/tee-launcher/Cargo.toml index 3c7d05bf9..204b8ab68 100644 --- a/crates/tee-launcher/Cargo.toml +++ b/crates/tee-launcher/Cargo.toml @@ -12,6 +12,7 @@ path = "src/main.rs" integration-test = [] [dependencies] +clap = { workspace = true } dstack-sdk = { workspace = true } hex = { workspace = true } mpc-primitives = { path = "../primitives" } diff --git a/crates/tee-launcher/src/contants.rs b/crates/tee-launcher/src/contants.rs index 454d82642..c3926b2b6 100644 --- a/crates/tee-launcher/src/contants.rs +++ b/crates/tee-launcher/src/contants.rs @@ -15,9 +15,6 @@ pub(crate) const DEFAULT_MPC_REGISTRY: &str = "registry.hub.docker.com"; pub(crate) const DEFAULT_MPC_IMAGE_TAG: &str = "latest"; // Env var names -pub(crate) const ENV_VAR_PLATFORM: &str = "PLATFORM"; -pub(crate) const ENV_VAR_DEFAULT_IMAGE_DIGEST: &str = "DEFAULT_IMAGE_DIGEST"; -pub(crate) const ENV_VAR_DOCKER_CONTENT_TRUST: &str = "DOCKER_CONTENT_TRUST"; pub(crate) const ENV_VAR_MPC_HASH_OVERRIDE: &str = "MPC_HASH_OVERRIDE"; pub(crate) const ENV_VAR_RPC_REQUEST_TIMEOUT_SECS: &str = "RPC_REQUEST_TIMEOUT_SECS"; pub(crate) const ENV_VAR_RPC_REQUEST_INTERVAL_SECS: &str = "RPC_REQUEST_INTERVAL_SECS"; diff --git a/crates/tee-launcher/src/error.rs b/crates/tee-launcher/src/error.rs new file mode 100644 index 000000000..73a594bd0 --- /dev/null +++ b/crates/tee-launcher/src/error.rs @@ -0,0 +1,81 @@ +use thiserror::Error; + +#[derive(Error, Debug)] +pub enum LauncherError { + #[error("DOCKER_CONTENT_TRUST must be set to 1")] + DockerContentTrustNotEnabled, + + #[error("PLATFORM=TEE requires dstack unix socket at {0}")] + DstackSocketMissing(String), + + #[error("GetQuote failed before extending RTMR3: {0}")] + DstackGetQuoteFailed(String), + + #[error("EmitEvent failed while extending RTMR3: {0}")] + DstackEmitEventFailed(String), + + #[error("DEFAULT_IMAGE_DIGEST invalid: {0}")] + InvalidDefaultDigest(String), + + #[error("Invalid JSON in {path}: approved_hashes missing or empty")] + InvalidApprovedHashes { path: String }, + + #[error("MPC_HASH_OVERRIDE invalid: {0}")] + InvalidHashOverride(String), + + #[error("Image hash not found among tags")] + ImageHashNotFoundAmongTags, + + #[error("Failed to get auth token from registry: {0}")] + RegistryAuthFailed(String), + + #[error("Failed to get successful response from {url} after {attempts} attempts")] + RegistryRequestFailed { url: String, attempts: u32 }, + + #[error("docker pull failed for {0}")] + DockerPullFailed(String), + + #[error("docker inspect failed for {0}")] + DockerInspectFailed(String), + + #[error("Digest mismatch: pulled {pulled} != expected {expected}")] + DigestMismatch { pulled: String, expected: String }, + + #[error("MPC image hash validation failed: {0}")] + ImageValidationFailed(String), + + #[error("docker run failed for validated hash={0}")] + DockerRunFailed(String), + + #[error("Too many env vars to pass through (>{0})")] + TooManyEnvVars(usize), + + #[error("Total env payload too large (>{0} bytes)")] + EnvPayloadTooLarge(usize), + + #[error("Unsafe docker command: LD_PRELOAD detected")] + LdPreloadDetected, + + #[error("Failed to read {path}: {source}")] + FileRead { + path: String, + source: std::io::Error, + }, + + #[error("Failed to parse {path}: {source}")] + JsonParse { + path: String, + source: serde_json::Error, + }, + + #[error("Required environment variable not set: {0}")] + MissingEnvVar(String), + + #[error("HTTP error: {0}")] + Http(#[from] reqwest::Error), + + #[error("Registry response parse error: {0}")] + RegistryResponseParse(String), +} + +pub type Result = std::result::Result; diff --git a/crates/tee-launcher/src/main.rs b/crates/tee-launcher/src/main.rs index 587576828..8ad64faf7 100644 --- a/crates/tee-launcher/src/main.rs +++ b/crates/tee-launcher/src/main.rs @@ -2,15 +2,20 @@ use std::collections::{BTreeMap, HashSet, VecDeque}; use std::process::Command; use std::sync::LazyLock; -use contants::*; +use clap::Parser; use regex::Regex; -use serde::Deserialize; -use thiserror::Error; +use std::os::unix::fs::FileTypeExt as _; // Reuse the workspace hash type for type-safe image hash handling. use mpc_primitives::hash::MpcDockerImageHash; +use contants::*; +use error::*; +use types::*; + mod contants; +mod error; +mod types; #[tokio::main] async fn main() { @@ -30,24 +35,20 @@ async fn main() { async fn run() -> Result<()> { tracing::info!("start"); - let platform = parse_platform()?; - tracing::info!( - "Launcher platform: {}", - match platform { - Platform::Tee => "TEE", - Platform::NonTee => "NONTEE", - } - ); + let args = CliArgs::parse(); - if platform == Platform::Tee && !is_unix_socket(DSTACK_UNIX_SOCKET) { + tracing::info!(platform = ?args.platform, "starting launcher"); + + // TODO is_unix_socket can be a compile time check + if args.platform == Platform::Tee && !is_unix_socket(DSTACK_UNIX_SOCKET) { return Err(LauncherError::DstackSocketMissing( DSTACK_UNIX_SOCKET.to_string(), )); } + // TODO: `docker_content_trust` parse it to a type that only accepts deserialization into number 1 // DOCKER_CONTENT_TRUST must be enabled - let dct = std::env::var(ENV_VAR_DOCKER_CONTENT_TRUST).unwrap_or_default(); - if dct != "1" { + if args.docker_content_trust != "1" { return Err(LauncherError::DockerContentTrustNotEnabled); } @@ -61,7 +62,7 @@ async fn run() -> Result<()> { let rpc_cfg = load_rpc_timing_config(&dstack_config); - let selected_hash = load_and_select_hash(&dstack_config)?; + let selected_hash = load_and_select_hash(&args, &dstack_config)?; if !validate_image_hash(&selected_hash, &dstack_config, &rpc_cfg).await? { return Err(LauncherError::ImageValidationFailed(selected_hash)); @@ -69,100 +70,13 @@ async fn run() -> Result<()> { tracing::info!("MPC image hash validated successfully: {selected_hash}"); - extend_rtmr3(platform, &selected_hash).await?; + extend_rtmr3(args.platform, &selected_hash).await?; - launch_mpc_container(platform, &selected_hash, &dstack_config)?; + launch_mpc_container(args.platform, &selected_hash, &dstack_config)?; Ok(()) } -// --------------------------------------------------------------------------- -// Error -// --------------------------------------------------------------------------- - -#[derive(Error, Debug)] -pub enum LauncherError { - #[error("PLATFORM must be set to one of [TEE, NONTEE], got: {0}")] - InvalidPlatform(String), - - #[error("DOCKER_CONTENT_TRUST must be set to 1")] - DockerContentTrustNotEnabled, - - #[error("PLATFORM=TEE requires dstack unix socket at {0}")] - DstackSocketMissing(String), - - #[error("GetQuote failed before extending RTMR3: {0}")] - DstackGetQuoteFailed(String), - - #[error("EmitEvent failed while extending RTMR3: {0}")] - DstackEmitEventFailed(String), - - #[error("DEFAULT_IMAGE_DIGEST invalid: {0}")] - InvalidDefaultDigest(String), - - #[error("Invalid JSON in {path}: approved_hashes missing or empty")] - InvalidApprovedHashes { path: String }, - - #[error("MPC_HASH_OVERRIDE invalid: {0}")] - InvalidHashOverride(String), - - #[error("Image hash not found among tags")] - ImageHashNotFoundAmongTags, - - #[error("Failed to get auth token from registry: {0}")] - RegistryAuthFailed(String), - - #[error("Failed to get successful response from {url} after {attempts} attempts")] - RegistryRequestFailed { url: String, attempts: u32 }, - - #[error("docker pull failed for {0}")] - DockerPullFailed(String), - - #[error("docker inspect failed for {0}")] - DockerInspectFailed(String), - - #[error("Digest mismatch: pulled {pulled} != expected {expected}")] - DigestMismatch { pulled: String, expected: String }, - - #[error("MPC image hash validation failed: {0}")] - ImageValidationFailed(String), - - #[error("docker run failed for validated hash={0}")] - DockerRunFailed(String), - - #[error("Too many env vars to pass through (>{0})")] - TooManyEnvVars(usize), - - #[error("Total env payload too large (>{0} bytes)")] - EnvPayloadTooLarge(usize), - - #[error("Unsafe docker command: LD_PRELOAD detected")] - LdPreloadDetected, - - #[error("Failed to read {path}: {source}")] - FileRead { - path: String, - source: std::io::Error, - }, - - #[error("Failed to parse {path}: {source}")] - JsonParse { - path: String, - source: serde_json::Error, - }, - - #[error("Required environment variable not set: {0}")] - MissingEnvVar(String), - - #[error("HTTP error: {0}")] - Http(#[from] reqwest::Error), - - #[error("Registry response parse error: {0}")] - RegistryResponseParse(String), -} - -type Result = std::result::Result; - // --------------------------------------------------------------------------- // Constants — matching Python launcher exactly // --------------------------------------------------------------------------- @@ -181,21 +95,19 @@ static INVALID_HOST_ENTRY_PATTERN: LazyLock = const DENIED_CONTAINER_ENV_KEYS: &[&str] = &["MPC_P2P_PRIVATE_KEY", "MPC_ACCOUNT_SK"]; // Allowed non-MPC env vars (backward compatibility) -static ALLOWED_MPC_ENV_VARS: LazyLock> = LazyLock::new(|| { - HashSet::from([ - "MPC_ACCOUNT_ID", - "MPC_LOCAL_ADDRESS", - "MPC_SECRET_STORE_KEY", - "MPC_CONTRACT_ID", - "MPC_ENV", - "MPC_HOME_DIR", - "NEAR_BOOT_NODES", - "RUST_BACKTRACE", - "RUST_LOG", - "MPC_RESPONDER_ID", - "MPC_BACKUP_ENCRYPTION_KEY_HEX", - ]) -}); +const ALLOWED_MPC_ENV_VARS: &[&str] = &[ + "MPC_ACCOUNT_ID", + "MPC_LOCAL_ADDRESS", + "MPC_SECRET_STORE_KEY", + "MPC_CONTRACT_ID", + "MPC_ENV", + "MPC_HOME_DIR", + "NEAR_BOOT_NODES", + "RUST_BACKTRACE", + "RUST_LOG", + "MPC_RESPONDER_ID", + "MPC_BACKUP_ENCRYPTION_KEY_HEX", +]; // Launcher-only env vars — read from user config but never forwarded to container static ALLOWED_LAUNCHER_ENV_VARS: LazyLock> = LazyLock::new(|| { @@ -210,43 +122,6 @@ static ALLOWED_LAUNCHER_ENV_VARS: LazyLock> = LazyLock::new(|| { ]) }); -// --------------------------------------------------------------------------- -// Types -// --------------------------------------------------------------------------- - -#[derive(Debug, Clone, Copy, PartialEq, Eq)] -pub enum Platform { - Tee, - NonTee, -} - -#[derive(Debug, Clone)] -pub struct RpcTimingConfig { - pub request_timeout_secs: f64, - pub request_interval_secs: f64, - pub max_attempts: u32, -} - -#[derive(Debug, Clone)] -pub struct ImageSpec { - pub tags: Vec, - pub image_name: String, - pub registry: String, -} - -#[derive(Debug, Clone)] -pub struct ResolvedImage { - pub spec: ImageSpec, - pub digest: String, -} - -/// JSON structure for the approved hashes file written by the MPC node. -/// Must stay aligned with `crates/node/src/tee/allowed_image_hashes_watcher.rs`. -#[derive(Debug, Deserialize)] -struct ApprovedHashesFile { - approved_hashes: Vec, -} - // --------------------------------------------------------------------------- // Validation functions — security policy for env passthrough // --------------------------------------------------------------------------- @@ -326,7 +201,7 @@ fn is_allowed_container_env_key(key: &str) -> bool { return true; } // Keep allowlist - if ALLOWED_MPC_ENV_VARS.contains(key) { + if ALLOWED_MPC_ENV_VARS.contains(&key) { return true; } false @@ -364,20 +239,6 @@ fn parse_env_file(path: &str) -> Result> { Ok(parse_env_lines(&lines)) } -fn parse_platform() -> Result { - let raw = std::env::var(ENV_VAR_PLATFORM).map_err(|_| { - LauncherError::InvalidPlatform(format!( - "{ENV_VAR_PLATFORM} must be set to one of [TEE, NONTEE]" - )) - })?; - let val = raw.trim(); - match val { - "TEE" => Ok(Platform::Tee), - "NONTEE" => Ok(Platform::NonTee), - other => Err(LauncherError::InvalidPlatform(other.to_string())), - } -} - fn load_rpc_timing_config(dstack_config: &BTreeMap) -> RpcTimingConfig { let timeout_secs = dstack_config .get(ENV_VAR_RPC_REQUEST_TIMEOUT_SECS) @@ -456,7 +317,10 @@ fn get_bare_digest(full_digest: &str) -> Result { Ok(parse_image_digest(full_digest)?.as_hex()) } -fn load_and_select_hash(dstack_config: &BTreeMap) -> Result { +fn load_and_select_hash( + args: &CliArgs, + dstack_config: &BTreeMap, +) -> Result { let approved_hashes = if std::path::Path::new(IMAGE_DIGEST_FILE).is_file() { let content = std::fs::read_to_string(IMAGE_DIGEST_FILE).map_err(|source| { LauncherError::FileRead { @@ -476,11 +340,13 @@ fn load_and_select_hash(dstack_config: &BTreeMap) -> Result bool { - use std::os::unix::fs::FileTypeExt; - match std::fs::metadata(path) { - Ok(meta) => meta.file_type().is_socket(), - Err(_) => false, - } + std::fs::metadata(path).is_ok_and(|meta| meta.file_type().is_socket()) } async fn extend_rtmr3(platform: Platform, valid_hash: &str) -> Result<()> { @@ -926,10 +785,6 @@ async fn extend_rtmr3(platform: Platform, valid_hash: &str) -> Result<()> { Ok(()) } -// --------------------------------------------------------------------------- -// Tests -// --------------------------------------------------------------------------- - #[cfg(test)] mod tests { use super::*; @@ -1424,16 +1279,6 @@ mod tests { assert_eq!(hash.as_hex(), "ab".repeat(32)); } - // -- Platform parsing tests --------------------------------------------- - - #[test] - fn test_parse_platform_missing() { - // Can't easily test env var absence in unit tests without side effects. - // This is tested via the error type: - let err = LauncherError::InvalidPlatform("not set".into()); - assert!(format!("{err}").contains("PLATFORM")); - } - // -- Full flow docker cmd test ------------------------------------------ #[test] diff --git a/crates/tee-launcher/src/types.rs b/crates/tee-launcher/src/types.rs new file mode 100644 index 000000000..707326904 --- /dev/null +++ b/crates/tee-launcher/src/types.rs @@ -0,0 +1,54 @@ +use clap::{Parser, ValueEnum}; +use serde::Deserialize; + +/// CLI arguments parsed from environment variables via clap. +#[derive(Parser, Debug)] +#[command(name = "tee-launcher")] +pub struct CliArgs { + /// Platform mode: TEE or NONTEE + #[arg(long, env = "PLATFORM")] + pub platform: Platform, + + /// Must be set to "1" to enable Docker Content Trust + #[arg(long, env = "DOCKER_CONTENT_TRUST", default_value = "")] + pub docker_content_trust: String, + + /// Fallback image digest when the approved-hashes file is absent + #[arg(long, env = "DEFAULT_IMAGE_DIGEST")] + pub default_image_digest: Option, +} + +#[derive(Debug, Clone, Copy, PartialEq, Eq, ValueEnum)] +pub enum Platform { + #[value(name = "TEE")] + Tee, + #[value(name = "NONTEE")] + NonTee, +} + +#[derive(Debug, Clone)] +pub struct RpcTimingConfig { + pub request_timeout_secs: f64, + pub request_interval_secs: f64, + pub max_attempts: u32, +} + +#[derive(Debug, Clone)] +pub struct ImageSpec { + pub tags: Vec, + pub image_name: String, + pub registry: String, +} + +#[derive(Debug, Clone)] +pub struct ResolvedImage { + pub spec: ImageSpec, + pub digest: String, +} + +/// JSON structure for the approved hashes file written by the MPC node. +/// Must stay aligned with `crates/node/src/tee/allowed_image_hashes_watcher.rs`. +#[derive(Debug, Deserialize)] +pub struct ApprovedHashesFile { + pub approved_hashes: Vec, +} From 07b3be510ce7c051ea0b177548283b15f76fc367 Mon Sep 17 00:00:00 2001 From: Daniel Sharifi Date: Fri, 27 Feb 2026 18:33:00 +0100 Subject: [PATCH 05/82] wip --- Cargo.lock | 10 +++ Cargo.toml | 2 + crates/launcher-interface/Cargo.toml | 13 ++++ crates/launcher-interface/src/lib.rs | 12 ++++ crates/node/Cargo.toml | 1 + .../src/tee/allowed_image_hashes_watcher.rs | 24 ++----- crates/tee-launcher/Cargo.toml | 3 +- crates/tee-launcher/src/main.rs | 65 ++++++++++--------- crates/tee-launcher/src/types.rs | 11 +--- 9 files changed, 85 insertions(+), 56 deletions(-) create mode 100644 crates/launcher-interface/Cargo.toml create mode 100644 crates/launcher-interface/src/lib.rs diff --git a/Cargo.lock b/Cargo.lock index ecc7a1f21..49e17fb79 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -4925,6 +4925,14 @@ dependencies = [ "sha3-asm", ] +[[package]] +name = "launcher-interface" +version = "3.5.1" +dependencies = [ + "mpc-primitives", + "serde", +] + [[package]] name = "lazy_static" version = "1.5.0" @@ -5489,6 +5497,7 @@ dependencies = [ "hyper 0.14.32", "itertools 0.14.0", "k256", + "launcher-interface", "lru 0.16.3", "mockall", "mpc-attestation", @@ -10546,6 +10555,7 @@ dependencies = [ "clap", "dstack-sdk", "hex", + "launcher-interface", "mpc-primitives", "regex", "reqwest 0.12.28", diff --git a/Cargo.toml b/Cargo.toml index 57f1ae560..5c1ba25fb 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -13,6 +13,7 @@ members = [ "crates/foreign-chain-inspector", "crates/foreign-chain-rpc-interfaces", "crates/include-measurements", + "crates/launcher-interface", "crates/mpc-attestation", "crates/near-mpc-sdk", "crates/node", @@ -45,6 +46,7 @@ include-measurements = { path = "crates/include-measurements" } mpc-attestation = { path = "crates/mpc-attestation" } mpc-contract = { path = "crates/contract", features = ["dev-utils"] } mpc-node = { path = "crates/node" } +launcher-interface = { path = "crates/launcher-interface" } mpc-primitives = { path = "crates/primitives", features = ["abi"] } mpc-tls = { path = "crates/tls" } near-mpc-sdk = { path = "crates/near-mpc-sdk" } diff --git a/crates/launcher-interface/Cargo.toml b/crates/launcher-interface/Cargo.toml new file mode 100644 index 000000000..b7cb17847 --- /dev/null +++ b/crates/launcher-interface/Cargo.toml @@ -0,0 +1,13 @@ +[package] +name = "launcher-interface" +version.workspace = true +edition.workspace = true +license.workspace = true + +[dependencies] +mpc-primitives = { workspace = true } +serde = { workspace = true } + + +[lints] +workspace = true diff --git a/crates/launcher-interface/src/lib.rs b/crates/launcher-interface/src/lib.rs new file mode 100644 index 000000000..dd0601a0c --- /dev/null +++ b/crates/launcher-interface/src/lib.rs @@ -0,0 +1,12 @@ +pub mod types { + use mpc_primitives::hash::MpcDockerImageHash; + use serde::{Deserialize, Serialize}; + + /// JSON structure for the approved hashes file written by the MPC node. + #[derive(Debug, Serialize, Deserialize)] + pub struct ApprovedHashesFile { + pub approved_hashes: Vec, + } +} + +mod paths {} diff --git a/crates/node/Cargo.toml b/crates/node/Cargo.toml index 5b70d1291..7c80f03e5 100644 --- a/crates/node/Cargo.toml +++ b/crates/node/Cargo.toml @@ -17,6 +17,7 @@ backon = { workspace = true } base64 = { workspace = true } borsh = { workspace = true } bounded-collections = { workspace = true } +launcher-interface = { workspace = true } bs58 = { workspace = true } bytes = { workspace = true } clap = { workspace = true } diff --git a/crates/node/src/tee/allowed_image_hashes_watcher.rs b/crates/node/src/tee/allowed_image_hashes_watcher.rs index 4596c9dba..b5501038d 100644 --- a/crates/node/src/tee/allowed_image_hashes_watcher.rs +++ b/crates/node/src/tee/allowed_image_hashes_watcher.rs @@ -1,5 +1,6 @@ use derive_more::From; use itertools::Itertools; +use launcher_interface::types::ApprovedHashesFile; use mpc_contract::tee::proposal::MpcDockerImageHash; use std::{future::Future, io, panic, path::PathBuf}; use thiserror::Error; @@ -29,10 +30,6 @@ pub struct AllowedImageHashesFile { file_path: PathBuf, } -// important: must stay aligned with the launcher implementation in: -// mpc/tee_launcher/launcher.py -const JSON_KEY_APPROVED_HASHES: &str = "approved_hashes"; - impl AllowedImageHashesStorage for AllowedImageHashesFile { async fn set(&mut self, approved_hashes: &[MpcDockerImageHash]) -> Result<(), io::Error> { tracing::info!( @@ -41,21 +38,14 @@ impl AllowedImageHashesStorage for AllowedImageHashesFile { "Writing approved MPC image hashes to disk (JSON format)." ); - let hash_strings: Vec = approved_hashes - .iter() - .map(|h| format!("sha256:{}", h.as_hex())) - .collect(); + let approved_hashes = ApprovedHashesFile { + approved_hashes: approved_hashes.to_vec(), + }; - let json = serde_json::json!({ - JSON_KEY_APPROVED_HASHES: hash_strings - }); + let json = serde_json::to_string_pretty(&approved_hashes) + .expect("previous json! macro would also panic. TODO figure out what to return"); - tracing::debug!( - %JSON_KEY_APPROVED_HASHES, - approved = ?hash_strings, - json = %json.to_string(), - "approved image hashes JSON that will be written to disk" - ); + tracing::debug!(?approved_hashes, "writing approved hashes to disk"); let tmp_path = self.file_path.with_extension("tmp"); // Write to a temporary file first. diff --git a/crates/tee-launcher/Cargo.toml b/crates/tee-launcher/Cargo.toml index 204b8ab68..b3d8dcad5 100644 --- a/crates/tee-launcher/Cargo.toml +++ b/crates/tee-launcher/Cargo.toml @@ -15,7 +15,8 @@ integration-test = [] clap = { workspace = true } dstack-sdk = { workspace = true } hex = { workspace = true } -mpc-primitives = { path = "../primitives" } +mpc-primitives = { workspace = true } +launcher-interface = { workspace = true } regex = { workspace = true } reqwest = { workspace = true } serde = { workspace = true } diff --git a/crates/tee-launcher/src/main.rs b/crates/tee-launcher/src/main.rs index 8ad64faf7..9454449ef 100644 --- a/crates/tee-launcher/src/main.rs +++ b/crates/tee-launcher/src/main.rs @@ -3,6 +3,7 @@ use std::process::Command; use std::sync::LazyLock; use clap::Parser; +use launcher_interface::types::ApprovedHashesFile; use regex::Regex; use std::os::unix::fs::FileTypeExt as _; @@ -230,6 +231,8 @@ fn parse_env_lines(lines: &[&str]) -> BTreeMap { env } +// TODO: this should be a struct with hard expectations, that we deserialize into, instead of +// a btreemap fn parse_env_file(path: &str) -> Result> { let content = std::fs::read_to_string(path).map_err(|source| LauncherError::FileRead { path: path.to_string(), @@ -319,6 +322,7 @@ fn get_bare_digest(full_digest: &str) -> Result { fn load_and_select_hash( args: &CliArgs, + // TODO: why is this btreemap not a struct with hard fields? dstack_config: &BTreeMap, ) -> Result { let approved_hashes = if std::path::Path::new(IMAGE_DIGEST_FILE).is_file() { @@ -340,45 +344,45 @@ fn load_and_select_hash( } data.approved_hashes } else { - let fallback = args + let fallback_image = (&args) .default_image_digest - .as_deref() + .clone() .ok_or_else(|| LauncherError::MissingEnvVar("DEFAULT_IMAGE_DIGEST".to_string()))?; - let fallback = fallback.trim(); - let fallback = if fallback.starts_with(SHA256_PREFIX) { - fallback.to_string() - } else { - format!("{SHA256_PREFIX}{fallback}") - }; - if !is_valid_sha256_digest(&fallback) { - return Err(LauncherError::InvalidDefaultDigest(fallback)); - } - tracing::info!("{IMAGE_DIGEST_FILE} missing → fallback to DEFAULT_IMAGE_DIGEST={fallback}"); - vec![fallback] + + tracing::info!( + ?IMAGE_DIGEST_FILE, + ?fallback_image, + "image digest file missing, will use fall back image" + ); + + vec![fallback_image] }; tracing::info!("Approved MPC image hashes (newest → oldest):"); for h in &approved_hashes { - tracing::info!(" - {h}"); + // TODO: Fix this output... + // tracing::info!(" - {h}"); } // Optional override - if let Some(override_hash) = dstack_config.get(ENV_VAR_MPC_HASH_OVERRIDE) { - if !is_valid_sha256_digest(override_hash) { - return Err(LauncherError::InvalidHashOverride(override_hash.clone())); - } - if !approved_hashes.contains(override_hash) { - tracing::error!("MPC_HASH_OVERRIDE={override_hash} does NOT match any approved hash!"); - return Err(LauncherError::InvalidHashOverride(override_hash.clone())); - } - tracing::info!("MPC_HASH_OVERRIDE provided → selecting: {override_hash}"); - return Ok(override_hash.clone()); - } - - // No override → select newest (first in list) - let selected = approved_hashes[0].clone(); - tracing::info!("Selected MPC hash (newest allowed): {selected}"); - Ok(selected) + // if let Some(override_hash) = dstack_config.get(ENV_VAR_MPC_HASH_OVERRIDE) { + // if !is_valid_sha256_digest(override_hash) { + // return Err(LauncherError::InvalidHashOverride(override_hash.clone())); + // } + // if !approved_hashes.contains(override_hash) { + // tracing::error!("MPC_HASH_OVERRIDE={override_hash} does NOT match any approved hash!"); + // return Err(LauncherError::InvalidHashOverride(override_hash.clone())); + // } + // tracing::info!("MPC_HASH_OVERRIDE provided → selecting: {override_hash}"); + // return Ok(override_hash.clone()); + // } + + // // No override → select newest (first in list) + // let selected = approved_hashes[0].clone(); + // tracing::info!("Selected MPC hash (newest allowed): {selected}"); + // Ok(selected) + + todo!() } // --------------------------------------------------------------------------- @@ -789,6 +793,7 @@ async fn extend_rtmr3(platform: Platform, valid_hash: &str) -> Result<()> { mod tests { use super::*; use assert_matches::assert_matches; + use launcher_interface::types::ApprovedHashesFile; // -- Config parsing tests ----------------------------------------------- diff --git a/crates/tee-launcher/src/types.rs b/crates/tee-launcher/src/types.rs index 707326904..89207a3fe 100644 --- a/crates/tee-launcher/src/types.rs +++ b/crates/tee-launcher/src/types.rs @@ -1,4 +1,5 @@ use clap::{Parser, ValueEnum}; +use mpc_primitives::hash::MpcDockerImageHash; use serde::Deserialize; /// CLI arguments parsed from environment variables via clap. @@ -10,12 +11,13 @@ pub struct CliArgs { pub platform: Platform, /// Must be set to "1" to enable Docker Content Trust + // TODO: make it optional and only accept value 1 #[arg(long, env = "DOCKER_CONTENT_TRUST", default_value = "")] pub docker_content_trust: String, /// Fallback image digest when the approved-hashes file is absent #[arg(long, env = "DEFAULT_IMAGE_DIGEST")] - pub default_image_digest: Option, + pub default_image_digest: Option, } #[derive(Debug, Clone, Copy, PartialEq, Eq, ValueEnum)] @@ -45,10 +47,3 @@ pub struct ResolvedImage { pub spec: ImageSpec, pub digest: String, } - -/// JSON structure for the approved hashes file written by the MPC node. -/// Must stay aligned with `crates/node/src/tee/allowed_image_hashes_watcher.rs`. -#[derive(Debug, Deserialize)] -pub struct ApprovedHashesFile { - pub approved_hashes: Vec, -} From cbf5afaa5ab7bfcb336e22ac9a8a5922a64eb5a4 Mon Sep 17 00:00:00 2001 From: Daniel Sharifi Date: Mon, 2 Mar 2026 11:04:31 +0100 Subject: [PATCH 06/82] chore: enforce docker_content_trust at compile time --- crates/tee-launcher/src/error.rs | 3 --- crates/tee-launcher/src/main.rs | 6 ------ crates/tee-launcher/src/types.rs | 14 +++++++++----- 3 files changed, 9 insertions(+), 14 deletions(-) diff --git a/crates/tee-launcher/src/error.rs b/crates/tee-launcher/src/error.rs index 73a594bd0..f25dca210 100644 --- a/crates/tee-launcher/src/error.rs +++ b/crates/tee-launcher/src/error.rs @@ -2,9 +2,6 @@ use thiserror::Error; #[derive(Error, Debug)] pub enum LauncherError { - #[error("DOCKER_CONTENT_TRUST must be set to 1")] - DockerContentTrustNotEnabled, - #[error("PLATFORM=TEE requires dstack unix socket at {0}")] DstackSocketMissing(String), diff --git a/crates/tee-launcher/src/main.rs b/crates/tee-launcher/src/main.rs index 9454449ef..8b594f0f0 100644 --- a/crates/tee-launcher/src/main.rs +++ b/crates/tee-launcher/src/main.rs @@ -47,12 +47,6 @@ async fn run() -> Result<()> { )); } - // TODO: `docker_content_trust` parse it to a type that only accepts deserialization into number 1 - // DOCKER_CONTENT_TRUST must be enabled - if args.docker_content_trust != "1" { - return Err(LauncherError::DockerContentTrustNotEnabled); - } - // Load dstack user config let dstack_config: BTreeMap = if std::path::Path::new(DSTACK_USER_CONFIG_FILE).is_file() { diff --git a/crates/tee-launcher/src/types.rs b/crates/tee-launcher/src/types.rs index 89207a3fe..3c987394b 100644 --- a/crates/tee-launcher/src/types.rs +++ b/crates/tee-launcher/src/types.rs @@ -1,6 +1,5 @@ use clap::{Parser, ValueEnum}; use mpc_primitives::hash::MpcDockerImageHash; -use serde::Deserialize; /// CLI arguments parsed from environment variables via clap. #[derive(Parser, Debug)] @@ -10,16 +9,21 @@ pub struct CliArgs { #[arg(long, env = "PLATFORM")] pub platform: Platform, - /// Must be set to "1" to enable Docker Content Trust - // TODO: make it optional and only accept value 1 - #[arg(long, env = "DOCKER_CONTENT_TRUST", default_value = "")] - pub docker_content_trust: String, + #[arg(long, env = "DOCKER_CONTENT_TRUST")] + // ensure that `docker_content_trust` is enabled. + docker_content_trust: DockerContentTrust, /// Fallback image digest when the approved-hashes file is absent #[arg(long, env = "DEFAULT_IMAGE_DIGEST")] pub default_image_digest: Option, } +#[derive(Debug, Clone, Copy, PartialEq, Eq, ValueEnum)] +enum DockerContentTrust { + #[value(name = "1")] + Enabled, +} + #[derive(Debug, Clone, Copy, PartialEq, Eq, ValueEnum)] pub enum Platform { #[value(name = "TEE")] From 8044407f3844e527fa2bc9e0ba1792758ea84e98 Mon Sep 17 00:00:00 2001 From: Daniel Sharifi Date: Mon, 2 Mar 2026 17:21:29 +0100 Subject: [PATCH 07/82] created json --- Cargo.toml | 1 + crates/tee-launcher/src/error.rs | 12 + crates/tee-launcher/src/main.rs | 291 +++++++++++------------- crates/tee-launcher/src/types.rs | 52 ++++- deployment/localnet/tee/frodo_conf.json | 21 ++ deployment/localnet/tee/sam_conf.json | 21 ++ 6 files changed, 225 insertions(+), 173 deletions(-) create mode 100644 deployment/localnet/tee/frodo_conf.json create mode 100644 deployment/localnet/tee/sam_conf.json diff --git a/Cargo.toml b/Cargo.toml index 5c1ba25fb..aa47cba05 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -93,6 +93,7 @@ derive_more = { version = "2.1.1", features = [ "into", ] } digest = "0.10.7" +dotenvy = "0.15" dstack-sdk = { version = "0.1.2" } dstack-sdk-types = { version = "0.1.2", features = ["borsh"] } ecdsa = { version = "0.16.9", features = ["digest", "hazmat"] } diff --git a/crates/tee-launcher/src/error.rs b/crates/tee-launcher/src/error.rs index f25dca210..23e225ef8 100644 --- a/crates/tee-launcher/src/error.rs +++ b/crates/tee-launcher/src/error.rs @@ -1,3 +1,5 @@ +use std::path::PathBuf; + use thiserror::Error; #[derive(Error, Debug)] @@ -59,6 +61,13 @@ pub enum LauncherError { source: std::io::Error, }, + #[error("Failed to parse env file {path}: {source}")] + EnvFileParse { + path: PathBuf, + #[source] + source: dotenvy::Error, + }, + #[error("Failed to parse {path}: {source}")] JsonParse { path: String, @@ -68,6 +77,9 @@ pub enum LauncherError { #[error("Required environment variable not set: {0}")] MissingEnvVar(String), + #[error("Invalid value for {key}: {value}")] + InvalidEnvVar { key: String, value: String }, + #[error("HTTP error: {0}")] Http(#[from] reqwest::Error), diff --git a/crates/tee-launcher/src/main.rs b/crates/tee-launcher/src/main.rs index 8b594f0f0..6c105fa3b 100644 --- a/crates/tee-launcher/src/main.rs +++ b/crates/tee-launcher/src/main.rs @@ -1,4 +1,4 @@ -use std::collections::{BTreeMap, HashSet, VecDeque}; +use std::collections::{BTreeMap, VecDeque}; use std::process::Command; use std::sync::LazyLock; @@ -48,12 +48,12 @@ async fn run() -> Result<()> { } // Load dstack user config - let dstack_config: BTreeMap = - if std::path::Path::new(DSTACK_USER_CONFIG_FILE).is_file() { - parse_env_file(DSTACK_USER_CONFIG_FILE)? - } else { - BTreeMap::new() - }; + let config_file = std::fs::OpenOptions::new() + .read(true) + .open(DSTACK_USER_CONFIG_FILE) + .expect("dstack user config file exists"); + + let dstack_config: Config = serde_json::from_reader(config_file).expect("config file is valid"); let rpc_cfg = load_rpc_timing_config(&dstack_config); @@ -67,7 +67,11 @@ async fn run() -> Result<()> { extend_rtmr3(args.platform, &selected_hash).await?; - launch_mpc_container(args.platform, &selected_hash, &dstack_config)?; + launch_mpc_container( + args.platform, + &selected_hash, + &dstack_config.passthrough_env, + )?; Ok(()) } @@ -104,19 +108,6 @@ const ALLOWED_MPC_ENV_VARS: &[&str] = &[ "MPC_BACKUP_ENCRYPTION_KEY_HEX", ]; -// Launcher-only env vars — read from user config but never forwarded to container -static ALLOWED_LAUNCHER_ENV_VARS: LazyLock> = LazyLock::new(|| { - HashSet::from([ - DSTACK_USER_CONFIG_MPC_IMAGE_TAGS, - DSTACK_USER_CONFIG_MPC_IMAGE_NAME, - DSTACK_USER_CONFIG_MPC_IMAGE_REGISTRY, - ENV_VAR_MPC_HASH_OVERRIDE, - ENV_VAR_RPC_REQUEST_TIMEOUT_SECS, - ENV_VAR_RPC_REQUEST_INTERVAL_SECS, - ENV_VAR_RPC_MAX_ATTEMPTS, - ]) -}); - // --------------------------------------------------------------------------- // Validation functions — security policy for env passthrough // --------------------------------------------------------------------------- @@ -202,88 +193,17 @@ fn is_allowed_container_env_key(key: &str) -> bool { false } -// --------------------------------------------------------------------------- -// Config parsing -// --------------------------------------------------------------------------- - -fn parse_env_lines(lines: &[&str]) -> BTreeMap { - let mut env = BTreeMap::new(); - for line in lines { - let line = line.trim(); - if line.is_empty() || line.starts_with('#') || !line.contains('=') { - continue; - } - if let Some((key, value)) = line.split_once('=') { - let key = key.trim(); - let value = value.trim(); - if key.is_empty() { - continue; - } - env.insert(key.to_string(), value.to_string()); - } - } - env -} - -// TODO: this should be a struct with hard expectations, that we deserialize into, instead of -// a btreemap -fn parse_env_file(path: &str) -> Result> { - let content = std::fs::read_to_string(path).map_err(|source| LauncherError::FileRead { - path: path.to_string(), - source, - })?; - let lines: Vec<&str> = content.lines().collect(); - Ok(parse_env_lines(&lines)) -} - -fn load_rpc_timing_config(dstack_config: &BTreeMap) -> RpcTimingConfig { - let timeout_secs = dstack_config - .get(ENV_VAR_RPC_REQUEST_TIMEOUT_SECS) - .and_then(|v| v.parse().ok()) - .unwrap_or(DEFAULT_RPC_REQUEST_TIMEOUT_SECS); - let interval_secs = dstack_config - .get(ENV_VAR_RPC_REQUEST_INTERVAL_SECS) - .and_then(|v| v.parse().ok()) - .unwrap_or(DEFAULT_RPC_REQUEST_INTERVAL_SECS); - let max_attempts = dstack_config - .get(ENV_VAR_RPC_MAX_ATTEMPTS) - .and_then(|v| v.parse().ok()) - .unwrap_or(DEFAULT_RPC_MAX_ATTEMPTS); - RpcTimingConfig { - request_timeout_secs: timeout_secs, - request_interval_secs: interval_secs, - max_attempts, - } -} - -fn get_image_spec(dstack_config: &BTreeMap) -> ImageSpec { - let tags_raw = dstack_config - .get(DSTACK_USER_CONFIG_MPC_IMAGE_TAGS) - .cloned() - .unwrap_or_else(|| DEFAULT_MPC_IMAGE_TAG.to_string()); - let tags: Vec = tags_raw - .split(',') - .map(|t| t.trim().to_string()) - .filter(|t| !t.is_empty()) - .collect(); - tracing::info!("Using tags {tags:?} to find matching MPC node docker image."); - - let image_name = dstack_config - .get(DSTACK_USER_CONFIG_MPC_IMAGE_NAME) - .cloned() - .unwrap_or_else(|| DEFAULT_MPC_IMAGE_NAME.to_string()); - tracing::info!("Using image name {image_name}."); - - let registry = dstack_config - .get(DSTACK_USER_CONFIG_MPC_IMAGE_REGISTRY) - .cloned() - .unwrap_or_else(|| DEFAULT_MPC_REGISTRY.to_string()); - tracing::info!("Using registry {registry}."); - +fn get_image_spec(config: &Config) -> ImageSpec { + tracing::info!( + "Using tags {:?} to find matching MPC node docker image.", + config.image_tags + ); + tracing::info!("Using image name {}.", config.image_name); + tracing::info!("Using registry {}.", config.registry); ImageSpec { - tags, - image_name, - registry, + tags: config.image_tags.clone(), + image_name: config.image_name.clone(), + registry: config.registry.clone(), } } @@ -314,11 +234,7 @@ fn get_bare_digest(full_digest: &str) -> Result { Ok(parse_image_digest(full_digest)?.as_hex()) } -fn load_and_select_hash( - args: &CliArgs, - // TODO: why is this btreemap not a struct with hard fields? - dstack_config: &BTreeMap, -) -> Result { +fn load_and_select_hash(args: &CliArgs, dstack_config: &Config) -> Result { let approved_hashes = if std::path::Path::new(IMAGE_DIGEST_FILE).is_file() { let content = std::fs::read_to_string(IMAGE_DIGEST_FILE).map_err(|source| { LauncherError::FileRead { @@ -534,7 +450,7 @@ async fn get_manifest_digest(image: &ResolvedImage, timing: &RpcTimingConfig) -> async fn validate_image_hash( image_digest: &str, - dstack_config: &BTreeMap, + dstack_config: &Config, timing: &RpcTimingConfig, ) -> Result { tracing::info!("Validating MPC hash: {image_digest}"); @@ -643,10 +559,6 @@ fn build_docker_cmd( // BTreeMap iteration is already sorted by key (deterministic) for (key, value) in user_env { - if ALLOWED_LAUNCHER_ENV_VARS.contains(key.as_str()) { - continue; - } - if key == "EXTRA_HOSTS" { for host_entry in value.split(',') { let clean = host_entry.trim(); @@ -789,58 +701,110 @@ mod tests { use assert_matches::assert_matches; use launcher_interface::types::ApprovedHashesFile; - // -- Config parsing tests ----------------------------------------------- + // -- DstackUserConfig parsing tests ------------------------------------- #[test] - fn test_parse_env_lines_basic() { - let lines = vec![ - "# a comment", - "KEY1=value1", - " KEY2 = value2 ", - "", - "INVALIDLINE", - "EMPTY_KEY=", - ]; - let env = parse_env_lines(&lines); - assert_eq!(env.get("KEY1").unwrap(), "value1"); - assert_eq!(env.get("KEY2").unwrap(), "value2"); - assert_eq!(env.get("EMPTY_KEY").unwrap(), ""); - assert!(!env.contains_key("INVALIDLINE")); + fn test_user_config_defaults_when_map_is_empty() { + let config = user_config_from_map(BTreeMap::new()).unwrap(); + assert_eq!(config.image_tags, vec![DEFAULT_MPC_IMAGE_TAG]); + assert_eq!(config.image_name, DEFAULT_MPC_IMAGE_NAME); + assert_eq!(config.registry, DEFAULT_MPC_REGISTRY); + assert_eq!( + config.rpc_request_timeout_secs, + DEFAULT_RPC_REQUEST_TIMEOUT_SECS + ); + assert_eq!( + config.rpc_request_interval_secs, + DEFAULT_RPC_REQUEST_INTERVAL_SECS + ); + assert_eq!(config.rpc_max_attempts, DEFAULT_RPC_MAX_ATTEMPTS); + assert!(config.mpc_hash_override.is_none()); + assert!(config.passthrough_env.is_empty()); } #[test] - fn test_config_ignores_blank_lines_and_comments() { - let lines = vec![ - "", - " # This is a comment", - " MPC_SECRET_STORE_KEY=topsecret", - "", - ]; - let env = parse_env_lines(&lines); - assert_eq!(env.get("MPC_SECRET_STORE_KEY").unwrap(), "topsecret"); - assert_eq!(env.len(), 1); + fn test_user_config_typed_fields_extracted_from_map() { + let map = BTreeMap::from([ + ( + DSTACK_USER_CONFIG_MPC_IMAGE_TAGS.into(), + "v1.0, v1.1".into(), + ), + (DSTACK_USER_CONFIG_MPC_IMAGE_NAME.into(), "my/image".into()), + ( + DSTACK_USER_CONFIG_MPC_IMAGE_REGISTRY.into(), + "my.registry.io".into(), + ), + (ENV_VAR_RPC_REQUEST_TIMEOUT_SECS.into(), "30.0".into()), + (ENV_VAR_RPC_MAX_ATTEMPTS.into(), "5".into()), + ("MPC_ACCOUNT_ID".into(), "account.near".into()), + ]); + let config = user_config_from_map(map).unwrap(); + assert_eq!(config.image_tags, vec!["v1.0", "v1.1"]); + assert_eq!(config.image_name, "my/image"); + assert_eq!(config.registry, "my.registry.io"); + assert_eq!(config.rpc_request_timeout_secs, 30.0); + assert_eq!(config.rpc_max_attempts, 5); + // Launcher-only keys are NOT in passthrough_env + assert!( + !config + .passthrough_env + .contains_key(DSTACK_USER_CONFIG_MPC_IMAGE_TAGS) + ); + assert!( + !config + .passthrough_env + .contains_key(ENV_VAR_RPC_MAX_ATTEMPTS) + ); + // Container passthrough keys ARE in passthrough_env + assert_eq!( + config.passthrough_env.get("MPC_ACCOUNT_ID").unwrap(), + "account.near" + ); } #[test] - fn test_config_skips_malformed_lines() { - let lines = vec![ - "GOOD_KEY=value", - "bad_line_without_equal", - "ANOTHER_GOOD=ok", - "=", - ]; - let env = parse_env_lines(&lines); - assert!(env.contains_key("GOOD_KEY")); - assert!(env.contains_key("ANOTHER_GOOD")); - assert!(!env.contains_key("bad_line_without_equal")); - assert!(!env.contains_key("")); + fn test_user_config_malformed_rpc_fields_error() { + let map = BTreeMap::from([(ENV_VAR_RPC_MAX_ATTEMPTS.into(), "not_a_number".into())]); + let err = user_config_from_map(map).unwrap_err(); + assert_matches!(err, LauncherError::InvalidEnvVar { key, .. } if key == ENV_VAR_RPC_MAX_ATTEMPTS); + + let map = BTreeMap::from([(ENV_VAR_RPC_REQUEST_TIMEOUT_SECS.into(), "bad".into())]); + let err = user_config_from_map(map).unwrap_err(); + assert_matches!(err, LauncherError::InvalidEnvVar { key, .. } if key == ENV_VAR_RPC_REQUEST_TIMEOUT_SECS); + + let map = BTreeMap::from([(ENV_VAR_RPC_REQUEST_INTERVAL_SECS.into(), "bad".into())]); + let err = user_config_from_map(map).unwrap_err(); + assert_matches!(err, LauncherError::InvalidEnvVar { key, .. } if key == ENV_VAR_RPC_REQUEST_INTERVAL_SECS); + } + + #[test] + fn test_user_config_hash_override_extracted() { + let map = BTreeMap::from([(ENV_VAR_MPC_HASH_OVERRIDE.into(), "sha256:abc".into())]); + let config = user_config_from_map(map).unwrap(); + assert_eq!(config.mpc_hash_override.unwrap(), "sha256:abc"); + assert!( + !config + .passthrough_env + .contains_key(ENV_VAR_MPC_HASH_OVERRIDE) + ); } #[test] - fn test_config_overrides_duplicate_keys() { - let lines = vec!["MPC_ACCOUNT_ID=first", "MPC_ACCOUNT_ID=second"]; - let env = parse_env_lines(&lines); - assert_eq!(env.get("MPC_ACCOUNT_ID").unwrap(), "second"); + fn test_parse_user_config_from_file() { + let dir = tempfile::tempdir().unwrap(); + let file = dir.path().join("user_config"); + std::fs::write( + &file, + "# comment\nMPC_ACCOUNT_ID=test\nMPC_IMAGE_NAME=my/image\n", + ) + .unwrap(); + let config = parse_user_config(file.to_str().unwrap()).unwrap(); + assert_eq!(config.image_name, "my/image"); + assert_eq!( + config.passthrough_env.get("MPC_ACCOUNT_ID").unwrap(), + "test" + ); + assert!(!config.passthrough_env.contains_key("MPC_IMAGE_NAME")); } // -- Host/port validation tests ----------------------------------------- @@ -1031,7 +995,6 @@ mod tests { fn test_build_docker_cmd_nontee_no_dstack_mount() { let mut env = BTreeMap::new(); env.insert("MPC_ACCOUNT_ID".into(), "x".into()); - env.insert(ENV_VAR_RPC_MAX_ATTEMPTS.into(), "5".into()); let cmd = build_docker_cmd(Platform::NonTee, &env, &make_digest()).unwrap(); let s = cmd.join(" "); assert!(!s.contains("DSTACK_ENDPOINT=")); @@ -1282,10 +1245,15 @@ mod tests { #[test] fn test_parse_and_build_docker_cmd_full_flow() { - let config_str = "MPC_ACCOUNT_ID=test-user\nPORTS=11780:11780, --env BAD=oops\nEXTRA_HOSTS=host1:192.168.1.1, --volume /:/mnt\nIMAGE_HASH=sha256:abc123"; - let lines: Vec<&str> = config_str.lines().collect(); - let env = parse_env_lines(&lines); - let cmd = build_docker_cmd(Platform::Tee, &env, &make_digest()).unwrap(); + let dir = tempfile::tempdir().unwrap(); + let file = dir.path().join("user_config"); + std::fs::write( + &file, + "MPC_ACCOUNT_ID=test-user\nPORTS=11780:11780, --env BAD=oops\nEXTRA_HOSTS=host1:192.168.1.1, --volume /:/mnt\n", + ) + .unwrap(); + let config = parse_user_config(file.to_str().unwrap()).unwrap(); + let cmd = build_docker_cmd(Platform::Tee, &config.passthrough_env, &make_digest()).unwrap(); let cmd_str = cmd.join(" "); assert!(cmd_str.contains("MPC_ACCOUNT_ID=test-user")); @@ -1351,15 +1319,16 @@ mod tests { const TEST_DIGEST: &str = "sha256:f2472280c437efc00fa25a030a24990ae16c4fbec0d74914e178473ce4d57372"; - fn test_dstack_config() -> BTreeMap { - BTreeMap::from([ + fn test_dstack_config() -> Config { + user_config_from_map(BTreeMap::from([ ( "MPC_IMAGE_TAGS".into(), "83b52da4e2270c688cdd30da04f6b9d3565f25bb".into(), ), ("MPC_IMAGE_NAME".into(), "nearone/testing".into()), ("MPC_REGISTRY".into(), "registry.hub.docker.com".into()), - ]) + ])) + .unwrap() } #[tokio::test] diff --git a/crates/tee-launcher/src/types.rs b/crates/tee-launcher/src/types.rs index 3c987394b..de681896a 100644 --- a/crates/tee-launcher/src/types.rs +++ b/crates/tee-launcher/src/types.rs @@ -1,5 +1,6 @@ use clap::{Parser, ValueEnum}; use mpc_primitives::hash::MpcDockerImageHash; +use serde::{Deserialize, Serialize}; /// CLI arguments parsed from environment variables via clap. #[derive(Parser, Debug)] @@ -32,22 +33,49 @@ pub enum Platform { NonTee, } -#[derive(Debug, Clone)] -pub struct RpcTimingConfig { - pub request_timeout_secs: f64, - pub request_interval_secs: f64, - pub max_attempts: u32, +/// Typed representation of the dstack user config file (`/tapp/user_config`). +/// +/// Launcher-only keys are extracted into typed fields; all remaining keys are +/// kept in `passthrough_env` for forwarding to the MPC container. +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct Config { + pub launcher_config: LauncherConfig, + /// Remaining env vars forwarded to the MPC container. + pub mpc_passthrough_env: MpcBinaryConfig, } -#[derive(Debug, Clone)] -pub struct ImageSpec { - pub tags: Vec, +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct LauncherConfig { + /// Docker image tags to search (from `MPC_IMAGE_TAGS`, comma-separated). + pub image_tags: Vec, + /// Docker image name (from `MPC_IMAGE_NAME`). pub image_name: String, + /// Docker registry (from `MPC_REGISTRY`). pub registry: String, + /// Per-request timeout for registry RPC calls (from `RPC_REQUEST_TIMEOUT_SECS`). + pub rpc_request_timeout_secs: f64, + /// Delay between registry RPC retries (from `RPC_REQUEST_INTERVAL_SECS`). + pub rpc_request_interval_secs: f64, + /// Maximum registry RPC attempts (from `RPC_MAX_ATTEMPTS`). + pub rpc_max_attempts: u32, + /// Optional hash override that bypasses registry lookup (from `MPC_HASH_OVERRIDE`). + pub mpc_hash_override: Option, } -#[derive(Debug, Clone)] -pub struct ResolvedImage { - pub spec: ImageSpec, - pub digest: String, +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct MpcBinaryConfig { + // mpc + mpc_account_id: String, + mpc_local_address: String, + mpc_secret_key_store: String, + mpc_contract_isd: String, + mpc_env: String, + mpc_home_dir: String, + mpc_responder_id: String, + mpc_backup_encryption_key_hex: String, + // near + near_boot_nodes: String, + // rust + rust_backtrace: String, + rust_log: String, } diff --git a/deployment/localnet/tee/frodo_conf.json b/deployment/localnet/tee/frodo_conf.json new file mode 100644 index 000000000..081733e2e --- /dev/null +++ b/deployment/localnet/tee/frodo_conf.json @@ -0,0 +1,21 @@ +{ + "MPC_IMAGE_NAME": "nearone/mpc-node", + "MPC_IMAGE_TAGS": "main-260e88b", + "MPC_REGISTRY": "registry.hub.docker.com", + "MPC_ACCOUNT_ID": "frodo.test.near", + "MPC_LOCAL_ADDRESS": "127.0.0.1", + "MPC_SECRET_STORE_KEY": "AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA", + "MPC_CONTRACT_ID": "mpc-contract.test.near", + "MPC_ENV": "mpc-localnet", + "MPC_HOME_DIR": "/data", + "RUST_BACKTRACE": "full", + "RUST_LOG": "info", + "NEAR_BOOT_NODES": [ + "ed25519:BGa4WiBj43Mr66f9Ehf6swKtR6wZmWuwCsV3s4PSR3nx@${MACHINE_IP}:24566" + ], + "PORTS": [ + "8080:8080", + "24566:24566", + "13001:13001" + ] +} \ No newline at end of file diff --git a/deployment/localnet/tee/sam_conf.json b/deployment/localnet/tee/sam_conf.json new file mode 100644 index 000000000..8c307acd1 --- /dev/null +++ b/deployment/localnet/tee/sam_conf.json @@ -0,0 +1,21 @@ +{ + "MPC_IMAGE_NAME": "nearone/mpc-node", + "MPC_IMAGE_TAGS": "main-260e88b", + "MPC_REGISTRY": "registry.hub.docker.com", + "MPC_ACCOUNT_ID": "sam.test.near", + "MPC_LOCAL_ADDRESS": "127.0.0.1", + "MPC_SECRET_STORE_KEY": "AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA", + "MPC_CONTRACT_ID": "mpc-contract.test.near", + "MPC_ENV": "mpc-localnet", + "MPC_HOME_DIR": "/data", + "RUST_BACKTRACE": "full", + "RUST_LOG": "info", + "NEAR_BOOT_NODES": [ + "ed25519:BGa4WiBj43Mr66f9Ehf6swKtR6wZmWuwCsV3s4PSR3nx@${MACHINE_IP}:24566" + ], + "PORTS": [ + "8080:8080", + "24566:24566", + "13002:13002" + ] +} \ No newline at end of file From 34302f65c65c8320dc7bbdc7b19105611a16684d Mon Sep 17 00:00:00 2001 From: Daniel Sharifi Date: Mon, 2 Mar 2026 18:26:51 +0100 Subject: [PATCH 08/82] it compiles --- Cargo.lock | 1 + crates/tee-launcher/Cargo.toml | 1 + crates/tee-launcher/src/error.rs | 7 - crates/tee-launcher/src/main.rs | 1486 ++++++++++++++---------------- crates/tee-launcher/src/types.rs | 7 +- 5 files changed, 713 insertions(+), 789 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 49e17fb79..a7c74e242 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -10552,6 +10552,7 @@ name = "tee-launcher" version = "3.5.1" dependencies = [ "assert_matches", + "bounded-collections", "clap", "dstack-sdk", "hex", diff --git a/crates/tee-launcher/Cargo.toml b/crates/tee-launcher/Cargo.toml index b3d8dcad5..f983a8ad1 100644 --- a/crates/tee-launcher/Cargo.toml +++ b/crates/tee-launcher/Cargo.toml @@ -12,6 +12,7 @@ path = "src/main.rs" integration-test = [] [dependencies] +bounded-collections = { workspace = true } clap = { workspace = true } dstack-sdk = { workspace = true } hex = { workspace = true } diff --git a/crates/tee-launcher/src/error.rs b/crates/tee-launcher/src/error.rs index 23e225ef8..92836663d 100644 --- a/crates/tee-launcher/src/error.rs +++ b/crates/tee-launcher/src/error.rs @@ -61,13 +61,6 @@ pub enum LauncherError { source: std::io::Error, }, - #[error("Failed to parse env file {path}: {source}")] - EnvFileParse { - path: PathBuf, - #[source] - source: dotenvy::Error, - }, - #[error("Failed to parse {path}: {source}")] JsonParse { path: String, diff --git a/crates/tee-launcher/src/main.rs b/crates/tee-launcher/src/main.rs index 6c105fa3b..eec320fd5 100644 --- a/crates/tee-launcher/src/main.rs +++ b/crates/tee-launcher/src/main.rs @@ -55,11 +55,9 @@ async fn run() -> Result<()> { let dstack_config: Config = serde_json::from_reader(config_file).expect("config file is valid"); - let rpc_cfg = load_rpc_timing_config(&dstack_config); - let selected_hash = load_and_select_hash(&args, &dstack_config)?; - if !validate_image_hash(&selected_hash, &dstack_config, &rpc_cfg).await? { + if !validate_image_hash(&selected_hash, &dstack_config, &dstack_config).await? { return Err(LauncherError::ImageValidationFailed(selected_hash)); } @@ -70,7 +68,7 @@ async fn run() -> Result<()> { launch_mpc_container( args.platform, &selected_hash, - &dstack_config.passthrough_env, + &dstack_config.mpc_passthrough_env, )?; Ok(()) @@ -126,19 +124,6 @@ fn has_control_chars(s: &str) -> bool { false } -fn is_safe_env_value(value: &str) -> bool { - if value.len() > MAX_ENV_VALUE_LEN { - return false; - } - if has_control_chars(value) { - return false; - } - if value.contains("LD_PRELOAD") { - return false; - } - true -} - fn is_valid_ip(ip: &str) -> bool { ip.parse::().is_ok() } @@ -178,35 +163,6 @@ fn is_safe_port_mapping(mapping: &str) -> bool { !INVALID_HOST_ENTRY_PATTERN.is_match(mapping) } -fn is_allowed_container_env_key(key: &str) -> bool { - if DENIED_CONTAINER_ENV_KEYS.contains(&key) { - return false; - } - // Allow MPC_* keys with strict regex - if MPC_ENV_KEY_RE.is_match(key) { - return true; - } - // Keep allowlist - if ALLOWED_MPC_ENV_VARS.contains(&key) { - return true; - } - false -} - -fn get_image_spec(config: &Config) -> ImageSpec { - tracing::info!( - "Using tags {:?} to find matching MPC node docker image.", - config.image_tags - ); - tracing::info!("Using image name {}.", config.image_name); - tracing::info!("Using registry {}.", config.registry); - ImageSpec { - tags: config.image_tags.clone(), - image_name: config.image_name.clone(), - registry: config.registry.clone(), - } -} - // --------------------------------------------------------------------------- // Hash selection // --------------------------------------------------------------------------- @@ -299,15 +255,16 @@ fn load_and_select_hash(args: &CliArgs, dstack_config: &Config) -> Result Result { - let mut interval = timing.request_interval_secs; + let mut interval = config.rpc_request_interval_secs as f64; - for attempt in 1..=timing.max_attempts { + for attempt in 1..=config.rpc_max_attempts { // Sleep before request (matching Python behavior) tokio::time::sleep(std::time::Duration::from_secs_f64(interval)).await; interval = (interval.max(1.0) * 1.5).min(60.0); @@ -318,8 +275,8 @@ async fn request_until_success( } match req - .timeout(std::time::Duration::from_secs_f64( - timing.request_timeout_secs, + .timeout(std::time::Duration::from_secs( + config.rpc_request_timeout_secs, )) .send() .await @@ -327,14 +284,14 @@ async fn request_until_success( Err(e) => { tracing::warn!( "Attempt {attempt}/{}: Failed to fetch {url}. Status: Timeout/Error: {e}", - timing.max_attempts + config.rpc_max_attempts ); continue; } Ok(resp) if resp.status() != reqwest::StatusCode::OK => { tracing::warn!( "Attempt {attempt}/{}: Failed to fetch {url}. Status: {}", - timing.max_attempts, + config.rpc_max_attempts, resp.status() ); continue; @@ -345,20 +302,18 @@ async fn request_until_success( Err(LauncherError::RegistryRequestFailed { url: url.to_string(), - attempts: timing.max_attempts, + attempts: config.rpc_max_attempts, }) } -async fn get_manifest_digest(image: &ResolvedImage, timing: &RpcTimingConfig) -> Result { - if image.spec.tags.is_empty() { - return Err(LauncherError::ImageHashNotFoundAmongTags); - } +async fn get_manifest_digest(config: &LauncherConfig) -> Result { + let tags = config.image_tags.clone(); - // Get auth token let token_url = format!( "https://auth.docker.io/token?service=registry.docker.io&scope=repository:{}:pull", - image.spec.image_name + config.image_name ); + let client = reqwest::Client::new(); let token_resp = client .get(&token_url) @@ -380,12 +335,12 @@ async fn get_manifest_digest(image: &ResolvedImage, timing: &RpcTimingConfig) -> .ok_or_else(|| LauncherError::RegistryAuthFailed("no token in response".to_string()))? .to_string(); - let mut tags: VecDeque = image.spec.tags.iter().cloned().collect(); + let mut tags: VecDeque = tags.into_iter().collect(); while let Some(tag) = tags.pop_front() { let manifest_url = format!( "https://{}/v2/{}/manifests/{tag}", - image.spec.registry, image.spec.image_name + config.registry, config.image_name ); let headers = vec![ ( @@ -395,7 +350,7 @@ async fn get_manifest_digest(image: &ResolvedImage, timing: &RpcTimingConfig) -> ("Authorization".to_string(), format!("Bearer {token}")), ]; - match request_until_success(&client, &manifest_url, &headers, timing).await { + match request_until_success(&client, &manifest_url, &headers, config).await { Ok(resp) => { let content_digest = resp .headers() @@ -424,15 +379,16 @@ async fn get_manifest_digest(image: &ResolvedImage, timing: &RpcTimingConfig) -> } } } - "application/vnd.docker.distribution.manifest.v2+json" - | "application/vnd.oci.image.manifest.v1+json" => { - let config_digest = manifest["config"]["digest"].as_str().unwrap_or(""); - if config_digest == image.digest { - if let Some(digest) = content_digest { - return Ok(digest); - } - } - } + // TODO: + // "application/vnd.docker.distribution.manifest.v2+json" + // | "application/vnd.oci.image.manifest.v1+json" => { + // let config_digest = manifest["config"]["digest"].as_str().unwrap_or(""); + // if config_digest == config. { + // if let Some(digest) = content_digest { + // return Ok(digest); + // } + // } + // } _ => {} } } @@ -451,18 +407,12 @@ async fn get_manifest_digest(image: &ResolvedImage, timing: &RpcTimingConfig) -> async fn validate_image_hash( image_digest: &str, dstack_config: &Config, - timing: &RpcTimingConfig, + config: &Config, ) -> Result { tracing::info!("Validating MPC hash: {image_digest}"); - let image_spec = get_image_spec(dstack_config); - let docker_image = ResolvedImage { - spec: image_spec, - digest: image_digest.to_string(), - }; - - let manifest_digest = get_manifest_digest(&docker_image, timing).await?; - let name_and_digest = format!("{}@{manifest_digest}", docker_image.spec.image_name); + let manifest_digest = get_manifest_digest(&config.launcher_config).await?; + let name_and_digest = format!("{}@{manifest_digest}", config.launcher_config.image_name); // Pull let pull = Command::new("docker") @@ -528,7 +478,7 @@ fn remove_existing_container() { fn build_docker_cmd( platform: Platform, - user_env: &BTreeMap, + mpc_config: &MpcBinaryConfig, image_digest: &str, ) -> Result> { let bare_digest = get_bare_digest(image_digest)?; @@ -557,54 +507,32 @@ fn build_docker_cmd( let mut passed_env_count: usize = 0; let mut total_env_bytes: usize = 0; - // BTreeMap iteration is already sorted by key (deterministic) - for (key, value) in user_env { - if key == "EXTRA_HOSTS" { - for host_entry in value.split(',') { - let clean = host_entry.trim(); - if is_safe_host_entry(clean) && is_valid_host_entry(clean) { - cmd.extend(["--add-host".into(), clean.to_string()]); - } else { - tracing::warn!("Ignoring invalid or unsafe EXTRA_HOSTS entry: {clean}"); - } - } - continue; - } - - if key == "PORTS" { - for port_pair in value.split(',') { - let clean = port_pair.trim(); - if is_safe_port_mapping(clean) && is_valid_port_mapping(clean) { - cmd.extend(["-p".into(), clean.to_string()]); - } else { - tracing::warn!("Ignoring invalid or unsafe PORTS entry: {clean}"); - } - } - continue; - } - - if !is_allowed_container_env_key(key) { - tracing::warn!("Ignoring unknown or unapproved env var: {key}"); - continue; - } - - if !is_safe_env_value(value) { - tracing::warn!("Ignoring env var with unsafe value: {key}"); - continue; - } + // // BTreeMap iteration is already sorted by key (deterministic) + // for (key, value) in mpc_config { + // if key == "EXTRA_HOSTS" { + // for host_entry in value.split(',') { + // let clean = host_entry.trim(); + // if is_safe_host_entry(clean) && is_valid_host_entry(clean) { + // cmd.extend(["--add-host".into(), clean.to_string()]); + // } else { + // tracing::warn!("Ignoring invalid or unsafe EXTRA_HOSTS entry: {clean}"); + // } + // } + // continue; + // } - passed_env_count += 1; - if passed_env_count > MAX_PASSTHROUGH_ENV_VARS { - return Err(LauncherError::TooManyEnvVars(MAX_PASSTHROUGH_ENV_VARS)); - } + // passed_env_count += 1; + // if passed_env_count > MAX_PASSTHROUGH_ENV_VARS { + // return Err(LauncherError::TooManyEnvVars(MAX_PASSTHROUGH_ENV_VARS)); + // } - total_env_bytes += key.len() + 1 + value.len(); - if total_env_bytes > MAX_TOTAL_ENV_BYTES { - return Err(LauncherError::EnvPayloadTooLarge(MAX_TOTAL_ENV_BYTES)); - } + // total_env_bytes += key.len() + 1 + value.len(); + // if total_env_bytes > MAX_TOTAL_ENV_BYTES { + // return Err(LauncherError::EnvPayloadTooLarge(MAX_TOTAL_ENV_BYTES)); + // } - cmd.extend(["--env".into(), format!("{key}={value}")]); - } + // cmd.extend(["--env".into(), format!("{key}={value}")]); + // } // Container run configuration cmd.extend([ @@ -636,12 +564,12 @@ fn build_docker_cmd( fn launch_mpc_container( platform: Platform, valid_hash: &str, - user_env: &BTreeMap, + mpc_config: &MpcBinaryConfig, ) -> Result<()> { tracing::info!("Launching MPC node with validated hash: {valid_hash}"); remove_existing_container(); - let docker_cmd = build_docker_cmd(platform, user_env, valid_hash)?; + let docker_cmd = build_docker_cmd(platform, mpc_config, valid_hash)?; let status = Command::new(&docker_cmd[0]) .args(&docker_cmd[1..]) @@ -695,653 +623,653 @@ async fn extend_rtmr3(platform: Platform, valid_hash: &str) -> Result<()> { Ok(()) } -#[cfg(test)] -mod tests { - use super::*; - use assert_matches::assert_matches; - use launcher_interface::types::ApprovedHashesFile; - - // -- DstackUserConfig parsing tests ------------------------------------- - - #[test] - fn test_user_config_defaults_when_map_is_empty() { - let config = user_config_from_map(BTreeMap::new()).unwrap(); - assert_eq!(config.image_tags, vec![DEFAULT_MPC_IMAGE_TAG]); - assert_eq!(config.image_name, DEFAULT_MPC_IMAGE_NAME); - assert_eq!(config.registry, DEFAULT_MPC_REGISTRY); - assert_eq!( - config.rpc_request_timeout_secs, - DEFAULT_RPC_REQUEST_TIMEOUT_SECS - ); - assert_eq!( - config.rpc_request_interval_secs, - DEFAULT_RPC_REQUEST_INTERVAL_SECS - ); - assert_eq!(config.rpc_max_attempts, DEFAULT_RPC_MAX_ATTEMPTS); - assert!(config.mpc_hash_override.is_none()); - assert!(config.passthrough_env.is_empty()); - } - - #[test] - fn test_user_config_typed_fields_extracted_from_map() { - let map = BTreeMap::from([ - ( - DSTACK_USER_CONFIG_MPC_IMAGE_TAGS.into(), - "v1.0, v1.1".into(), - ), - (DSTACK_USER_CONFIG_MPC_IMAGE_NAME.into(), "my/image".into()), - ( - DSTACK_USER_CONFIG_MPC_IMAGE_REGISTRY.into(), - "my.registry.io".into(), - ), - (ENV_VAR_RPC_REQUEST_TIMEOUT_SECS.into(), "30.0".into()), - (ENV_VAR_RPC_MAX_ATTEMPTS.into(), "5".into()), - ("MPC_ACCOUNT_ID".into(), "account.near".into()), - ]); - let config = user_config_from_map(map).unwrap(); - assert_eq!(config.image_tags, vec!["v1.0", "v1.1"]); - assert_eq!(config.image_name, "my/image"); - assert_eq!(config.registry, "my.registry.io"); - assert_eq!(config.rpc_request_timeout_secs, 30.0); - assert_eq!(config.rpc_max_attempts, 5); - // Launcher-only keys are NOT in passthrough_env - assert!( - !config - .passthrough_env - .contains_key(DSTACK_USER_CONFIG_MPC_IMAGE_TAGS) - ); - assert!( - !config - .passthrough_env - .contains_key(ENV_VAR_RPC_MAX_ATTEMPTS) - ); - // Container passthrough keys ARE in passthrough_env - assert_eq!( - config.passthrough_env.get("MPC_ACCOUNT_ID").unwrap(), - "account.near" - ); - } - - #[test] - fn test_user_config_malformed_rpc_fields_error() { - let map = BTreeMap::from([(ENV_VAR_RPC_MAX_ATTEMPTS.into(), "not_a_number".into())]); - let err = user_config_from_map(map).unwrap_err(); - assert_matches!(err, LauncherError::InvalidEnvVar { key, .. } if key == ENV_VAR_RPC_MAX_ATTEMPTS); - - let map = BTreeMap::from([(ENV_VAR_RPC_REQUEST_TIMEOUT_SECS.into(), "bad".into())]); - let err = user_config_from_map(map).unwrap_err(); - assert_matches!(err, LauncherError::InvalidEnvVar { key, .. } if key == ENV_VAR_RPC_REQUEST_TIMEOUT_SECS); - - let map = BTreeMap::from([(ENV_VAR_RPC_REQUEST_INTERVAL_SECS.into(), "bad".into())]); - let err = user_config_from_map(map).unwrap_err(); - assert_matches!(err, LauncherError::InvalidEnvVar { key, .. } if key == ENV_VAR_RPC_REQUEST_INTERVAL_SECS); - } - - #[test] - fn test_user_config_hash_override_extracted() { - let map = BTreeMap::from([(ENV_VAR_MPC_HASH_OVERRIDE.into(), "sha256:abc".into())]); - let config = user_config_from_map(map).unwrap(); - assert_eq!(config.mpc_hash_override.unwrap(), "sha256:abc"); - assert!( - !config - .passthrough_env - .contains_key(ENV_VAR_MPC_HASH_OVERRIDE) - ); - } - - #[test] - fn test_parse_user_config_from_file() { - let dir = tempfile::tempdir().unwrap(); - let file = dir.path().join("user_config"); - std::fs::write( - &file, - "# comment\nMPC_ACCOUNT_ID=test\nMPC_IMAGE_NAME=my/image\n", - ) - .unwrap(); - let config = parse_user_config(file.to_str().unwrap()).unwrap(); - assert_eq!(config.image_name, "my/image"); - assert_eq!( - config.passthrough_env.get("MPC_ACCOUNT_ID").unwrap(), - "test" - ); - assert!(!config.passthrough_env.contains_key("MPC_IMAGE_NAME")); - } - - // -- Host/port validation tests ----------------------------------------- - - #[test] - fn test_valid_host_entry() { - assert!(is_valid_host_entry("node.local:192.168.1.1")); - assert!(!is_valid_host_entry("node.local:not-an-ip")); - assert!(!is_valid_host_entry("--env LD_PRELOAD=hack.so")); - } - - #[test] - fn test_valid_port_mapping() { - assert!(is_valid_port_mapping("11780:11780")); - assert!(!is_valid_port_mapping("65536:11780")); - assert!(!is_valid_port_mapping("--volume /:/mnt")); - } - - // -- Security validation tests ------------------------------------------ - - #[test] - fn test_has_control_chars_rejects_newline_and_cr() { - assert!(has_control_chars("a\nb")); - assert!(has_control_chars("a\rb")); - } - - #[test] - fn test_has_control_chars_allows_tab() { - assert!(!has_control_chars("a\tb")); - } - - #[test] - fn test_has_control_chars_rejects_other_control_chars() { - assert!(has_control_chars(&format!("a{}b", '\x1F'))); - } - - #[test] - fn test_is_safe_env_value_rejects_control_chars() { - assert!(!is_safe_env_value("ok\nno")); - assert!(!is_safe_env_value("ok\rno")); - assert!(!is_safe_env_value(&format!("ok{}no", '\x1F'))); - } - - #[test] - fn test_is_safe_env_value_rejects_ld_preload() { - assert!(!is_safe_env_value("LD_PRELOAD=/tmp/x.so")); - assert!(!is_safe_env_value("foo LD_PRELOAD bar")); - } - - #[test] - fn test_is_safe_env_value_rejects_too_long() { - assert!(!is_safe_env_value(&"a".repeat(MAX_ENV_VALUE_LEN + 1))); - assert!(is_safe_env_value(&"a".repeat(MAX_ENV_VALUE_LEN))); - } - - #[test] - fn test_is_allowed_container_env_key_allows_mpc_prefix_uppercase() { - assert!(is_allowed_container_env_key("MPC_FOO")); - assert!(is_allowed_container_env_key("MPC_FOO_123")); - assert!(is_allowed_container_env_key("MPC_A_B_C")); - } - - #[test] - fn test_is_allowed_container_env_key_rejects_lowercase_or_invalid() { - assert!(!is_allowed_container_env_key("MPC_foo")); - assert!(!is_allowed_container_env_key("MPC-FOO")); - assert!(!is_allowed_container_env_key("MPC.FOO")); - assert!(!is_allowed_container_env_key("MPC_")); - } - - #[test] - fn test_is_allowed_container_env_key_allows_compat_non_mpc_keys() { - assert!(is_allowed_container_env_key("RUST_LOG")); - assert!(is_allowed_container_env_key("RUST_BACKTRACE")); - assert!(is_allowed_container_env_key("NEAR_BOOT_NODES")); - } - - #[test] - fn test_is_allowed_container_env_key_denies_sensitive_keys() { - assert!(!is_allowed_container_env_key("MPC_P2P_PRIVATE_KEY")); - assert!(!is_allowed_container_env_key("MPC_ACCOUNT_SK")); - } - - // -- Docker cmd builder tests ------------------------------------------- - - fn make_digest() -> String { - format!("sha256:{}", "a".repeat(64)) - } - - fn base_env() -> BTreeMap { - BTreeMap::from([ - ("MPC_ACCOUNT_ID".into(), "mpc-user-123".into()), - ("MPC_CONTRACT_ID".into(), "contract.near".into()), - ("MPC_ENV".into(), "testnet".into()), - ("MPC_HOME_DIR".into(), "/data".into()), - ("NEAR_BOOT_NODES".into(), "boot1,boot2".into()), - ("RUST_LOG".into(), "info".into()), - ]) - } - - #[test] - fn test_build_docker_cmd_sanitizes_ports_and_hosts() { - let env = BTreeMap::from([ - ("PORTS".into(), "11780:11780,--env BAD=1".into()), - ( - "EXTRA_HOSTS".into(), - "node:192.168.1.1,--volume /:/mnt".into(), - ), - ("MPC_ACCOUNT_ID".into(), "mpc-user-123".into()), - ]); - let cmd = build_docker_cmd(Platform::Tee, &env, &make_digest()).unwrap(); - - assert!(cmd.contains(&"MPC_ACCOUNT_ID=mpc-user-123".to_string())); - assert!(cmd.contains(&"11780:11780".to_string())); - assert!(cmd.contains(&"node:192.168.1.1".to_string())); - // Injection strings filtered - assert!(!cmd.iter().any(|arg| arg.contains("BAD=1"))); - assert!(!cmd.iter().any(|arg| arg.contains("/:/mnt"))); - } - - #[test] - fn test_extra_hosts_does_not_allow_ld_preload() { - let env = BTreeMap::from([ - ( - "EXTRA_HOSTS".into(), - "host:1.2.3.4,--env LD_PRELOAD=/evil.so".into(), - ), - ("MPC_ACCOUNT_ID".into(), "safe".into()), - ]); - let cmd = build_docker_cmd(Platform::Tee, &env, &make_digest()).unwrap(); - assert!(cmd.contains(&"host:1.2.3.4".to_string())); - assert!(!cmd.iter().any(|arg| arg.contains("LD_PRELOAD"))); - } - - #[test] - fn test_ports_does_not_allow_volume_injection() { - let env = BTreeMap::from([ - ("PORTS".into(), "2200:2200,--volume /:/mnt".into()), - ("MPC_ACCOUNT_ID".into(), "safe".into()), - ]); - let cmd = build_docker_cmd(Platform::Tee, &env, &make_digest()).unwrap(); - assert!(cmd.contains(&"2200:2200".to_string())); - assert!(!cmd.iter().any(|arg| arg.contains("/:/mnt"))); - } - - #[test] - fn test_invalid_env_key_is_ignored() { - let env = BTreeMap::from([ - ("BAD_KEY".into(), "should_not_be_used".into()), - ("MPC_ACCOUNT_ID".into(), "safe".into()), - ]); - let cmd = build_docker_cmd(Platform::Tee, &env, &make_digest()).unwrap(); - assert!(!cmd.join(" ").contains("should_not_be_used")); - assert!(cmd.contains(&"MPC_ACCOUNT_ID=safe".to_string())); - } - - #[test] - fn test_mpc_backup_encryption_key_is_allowed() { - let env = BTreeMap::from([("MPC_BACKUP_ENCRYPTION_KEY_HEX".into(), "0".repeat(64))]); - let cmd = build_docker_cmd(Platform::Tee, &env, &make_digest()).unwrap(); - assert!( - cmd.join(" ") - .contains(&format!("MPC_BACKUP_ENCRYPTION_KEY_HEX={}", "0".repeat(64))) - ); - } - - #[test] - fn test_malformed_extra_host_is_ignored() { - let env = BTreeMap::from([ - ( - "EXTRA_HOSTS".into(), - "badhostentry,no-colon,also--bad".into(), - ), - ("MPC_ACCOUNT_ID".into(), "safe".into()), - ]); - let cmd = build_docker_cmd(Platform::Tee, &env, &make_digest()).unwrap(); - assert!(!cmd.contains(&"--add-host".to_string())); - } - - #[test] - fn test_env_value_with_shell_injection_is_handled_safely() { - let env = BTreeMap::from([("MPC_ACCOUNT_ID".into(), "safe; rm -rf /".into())]); - let cmd = build_docker_cmd(Platform::Tee, &env, &make_digest()).unwrap(); - assert!(cmd.contains(&"MPC_ACCOUNT_ID=safe; rm -rf /".to_string())); - } - - #[test] - fn test_build_docker_cmd_nontee_no_dstack_mount() { - let mut env = BTreeMap::new(); - env.insert("MPC_ACCOUNT_ID".into(), "x".into()); - let cmd = build_docker_cmd(Platform::NonTee, &env, &make_digest()).unwrap(); - let s = cmd.join(" "); - assert!(!s.contains("DSTACK_ENDPOINT=")); - assert!(!s.contains(&format!("{DSTACK_UNIX_SOCKET}:{DSTACK_UNIX_SOCKET}"))); - } - - #[test] - fn test_build_docker_cmd_tee_has_dstack_mount() { - let env = BTreeMap::from([("MPC_ACCOUNT_ID".into(), "x".into())]); - let cmd = build_docker_cmd(Platform::Tee, &env, &make_digest()).unwrap(); - let s = cmd.join(" "); - assert!(s.contains(&format!("DSTACK_ENDPOINT={DSTACK_UNIX_SOCKET}"))); - assert!(s.contains(&format!("{DSTACK_UNIX_SOCKET}:{DSTACK_UNIX_SOCKET}"))); - } - - #[test] - fn test_build_docker_cmd_allows_arbitrary_mpc_prefix_env_vars() { - let mut env = base_env(); - env.insert("MPC_NEW_FEATURE_FLAG".into(), "1".into()); - env.insert("MPC_SOME_CONFIG".into(), "value".into()); - let cmd = build_docker_cmd(Platform::NonTee, &env, &make_digest()).unwrap(); - let cmd_str = cmd.join(" "); - assert!(cmd_str.contains("MPC_NEW_FEATURE_FLAG=1")); - assert!(cmd_str.contains("MPC_SOME_CONFIG=value")); - } - - #[test] - fn test_build_docker_cmd_blocks_sensitive_mpc_private_keys() { - let mut env = base_env(); - env.insert("MPC_P2P_PRIVATE_KEY".into(), "supersecret".into()); - env.insert("MPC_ACCOUNT_SK".into(), "supersecret2".into()); - let cmd = build_docker_cmd(Platform::NonTee, &env, &make_digest()).unwrap(); - let cmd_str = cmd.join(" "); - assert!(!cmd_str.contains("MPC_P2P_PRIVATE_KEY")); - assert!(!cmd_str.contains("MPC_ACCOUNT_SK")); - } - - #[test] - fn test_build_docker_cmd_rejects_env_value_with_newline() { - let mut env = base_env(); - env.insert("MPC_NEW_FEATURE_FLAG".into(), "ok\nbad".into()); - let cmd = build_docker_cmd(Platform::NonTee, &env, &make_digest()).unwrap(); - let cmd_str = cmd.join(" "); - assert!(!cmd_str.contains("MPC_NEW_FEATURE_FLAG")); - } - - #[test] - fn test_build_docker_cmd_enforces_max_env_count_cap() { - let mut env = base_env(); - for i in 0..=MAX_PASSTHROUGH_ENV_VARS { - env.insert(format!("MPC_X_{i}"), "1".into()); - } - let result = build_docker_cmd(Platform::NonTee, &env, &make_digest()); - assert_matches!(result, Err(LauncherError::TooManyEnvVars(_))); - } - - #[test] - fn test_build_docker_cmd_enforces_total_env_bytes_cap() { - let mut env = base_env(); - for i in 0..40 { - env.insert(format!("MPC_BIG_{i}"), "a".repeat(MAX_ENV_VALUE_LEN)); - } - let result = build_docker_cmd(Platform::NonTee, &env, &make_digest()); - assert_matches!(result, Err(LauncherError::EnvPayloadTooLarge(_))); - } - - // -- LD_PRELOAD injection tests ----------------------------------------- - - #[test] - fn test_ld_preload_injection_blocked_via_env_key() { - let env = BTreeMap::from([ - ("MPC_ACCOUNT_ID".into(), "mpc-user-123".into()), - ("--env LD_PRELOAD".into(), "/path/to/my/malloc.so".into()), - ]); - let cmd = build_docker_cmd(Platform::Tee, &env, &make_digest()).unwrap(); - assert!(!cmd.iter().any(|arg| arg.contains("LD_PRELOAD"))); - } - - #[test] - fn test_ld_preload_injection_blocked_via_extra_hosts() { - let env = BTreeMap::from([ - ("MPC_ACCOUNT_ID".into(), "mpc-user-123".into()), - ( - "EXTRA_HOSTS".into(), - "host1:192.168.0.1,host2:192.168.0.2,--env LD_PRELOAD=/path/to/my/malloc.so".into(), - ), - ]); - let cmd = build_docker_cmd(Platform::Tee, &env, &make_digest()).unwrap(); - assert!(cmd.contains(&"--add-host".to_string())); - assert!(!cmd.iter().any(|arg| arg.contains("LD_PRELOAD"))); - } - - #[test] - fn test_ld_preload_injection_blocked_via_ports() { - let env = BTreeMap::from([ - ("MPC_ACCOUNT_ID".into(), "mpc-user-123".into()), - ( - "PORTS".into(), - "11780:11780,--env LD_PRELOAD=/path/to/my/malloc.so".into(), - ), - ]); - let cmd = build_docker_cmd(Platform::Tee, &env, &make_digest()).unwrap(); - assert!(cmd.contains(&"-p".to_string())); - assert!(!cmd.iter().any(|arg| arg.contains("LD_PRELOAD"))); - } - - #[test] - fn test_ld_preload_injection_blocked_via_mpc_account_id() { - let env = BTreeMap::from([ - ( - "MPC_ACCOUNT_ID".into(), - "mpc-user-123, --env LD_PRELOAD=/path/to/my/malloc.so".into(), - ), - ( - "EXTRA_HOSTS".into(), - "host1:192.168.0.1,host2:192.168.0.2".into(), - ), - ]); - let cmd = build_docker_cmd(Platform::Tee, &env, &make_digest()).unwrap(); - assert!(!cmd.iter().any(|arg| arg.contains("LD_PRELOAD"))); - } - - #[test] - fn test_ld_preload_injection_blocked_via_dash_e() { - let env = BTreeMap::from([ - ("MPC_ACCOUNT_ID".into(), "mpc-user-123".into()), - ("-e LD_PRELOAD".into(), "/path/to/my/malloc.so".into()), - ]); - let cmd = build_docker_cmd(Platform::Tee, &env, &make_digest()).unwrap(); - assert!(!cmd.iter().any(|arg| arg.contains("LD_PRELOAD"))); - } - - #[test] - fn test_ld_preload_injection_blocked_via_extra_hosts_dash_e() { - let env = BTreeMap::from([ - ("MPC_ACCOUNT_ID".into(), "mpc-user-123".into()), - ( - "EXTRA_HOSTS".into(), - "host1:192.168.0.1,host2:192.168.0.2,-e LD_PRELOAD=/path/to/my/malloc.so".into(), - ), - ]); - let cmd = build_docker_cmd(Platform::Tee, &env, &make_digest()).unwrap(); - assert!(cmd.contains(&"--add-host".to_string())); - assert!(!cmd.iter().any(|arg| arg.contains("LD_PRELOAD"))); - } - - #[test] - fn test_ld_preload_injection_blocked_via_ports_dash_e() { - let env = BTreeMap::from([ - ("MPC_ACCOUNT_ID".into(), "mpc-user-123".into()), - ( - "PORTS".into(), - "11780:11780,-e LD_PRELOAD=/path/to/my/malloc.so".into(), - ), - ]); - let cmd = build_docker_cmd(Platform::Tee, &env, &make_digest()).unwrap(); - assert!(cmd.contains(&"-p".to_string())); - assert!(!cmd.iter().any(|arg| arg.contains("LD_PRELOAD"))); - } - - // -- Hash selection tests ----------------------------------------------- - - fn make_digest_json(hashes: &[&str]) -> String { - serde_json::json!({"approved_hashes": hashes}).to_string() - } - - #[test] - fn test_override_present() { - let dir = tempfile::tempdir().unwrap(); - let file = dir.path().join("image-digest.bin"); - let override_value = format!("sha256:{}", "a".repeat(64)); - let approved = vec![ - format!("sha256:{}", "b".repeat(64)), - override_value.clone(), - format!("sha256:{}", "c".repeat(64)), - ]; - let json = serde_json::json!({"approved_hashes": approved}).to_string(); - std::fs::write(&file, &json).unwrap(); - - // We can't easily override IMAGE_DIGEST_FILE constant, so test load_and_select_hash - // by creating a standalone test that reads from a custom path. - // Instead test the core logic directly: - let data: ApprovedHashesFile = serde_json::from_str(&json).unwrap(); - assert!(data.approved_hashes.contains(&override_value)); - - // The override is in the approved list, so it should be valid - assert!(is_valid_sha256_digest(&override_value)); - assert!(data.approved_hashes.contains(&override_value)); - } - - #[test] - fn test_override_not_in_list() { - let approved = vec!["sha256:aaa", "sha256:bbb"]; - let json = make_digest_json(&approved); - let data: ApprovedHashesFile = serde_json::from_str(&json).unwrap(); - let override_hash = "sha256:xyz"; - assert!(!data.approved_hashes.contains(&override_hash.to_string())); - } - - #[test] - fn test_no_override_picks_newest() { - let approved = vec!["sha256:newest", "sha256:older", "sha256:oldest"]; - let json = make_digest_json(&approved); - let data: ApprovedHashesFile = serde_json::from_str(&json).unwrap(); - assert_eq!(data.approved_hashes[0], "sha256:newest"); - } - - #[test] - fn test_json_key_matches_node() { - // Must stay aligned with crates/node/src/tee/allowed_image_hashes_watcher.rs - let json = r#"{"approved_hashes": ["sha256:abc"]}"#; - let data: ApprovedHashesFile = serde_json::from_str(json).unwrap(); - assert_eq!(data.approved_hashes.len(), 1); - } - - #[test] - fn test_get_bare_digest() { - assert_eq!( - get_bare_digest(&format!("sha256:{}", "a".repeat(64))).unwrap(), - "a".repeat(64) - ); - get_bare_digest("invalid").unwrap_err(); - } - - #[test] - fn test_is_valid_sha256_digest() { - assert!(is_valid_sha256_digest(&format!( - "sha256:{}", - "a".repeat(64) - ))); - assert!(!is_valid_sha256_digest("sha256:tooshort")); - assert!(!is_valid_sha256_digest("not-a-digest")); - // hex::decode accepts uppercase; as_hex() normalizes to lowercase - assert!(is_valid_sha256_digest(&format!( - "sha256:{}", - "A".repeat(64) - ))); - } - - #[test] - fn test_parse_image_digest_normalizes_case() { - let upper = format!("sha256:{}", "AB".repeat(32)); - let hash = parse_image_digest(&upper).unwrap(); - assert_eq!(hash.as_hex(), "ab".repeat(32)); - } - - // -- Full flow docker cmd test ------------------------------------------ - - #[test] - fn test_parse_and_build_docker_cmd_full_flow() { - let dir = tempfile::tempdir().unwrap(); - let file = dir.path().join("user_config"); - std::fs::write( - &file, - "MPC_ACCOUNT_ID=test-user\nPORTS=11780:11780, --env BAD=oops\nEXTRA_HOSTS=host1:192.168.1.1, --volume /:/mnt\n", - ) - .unwrap(); - let config = parse_user_config(file.to_str().unwrap()).unwrap(); - let cmd = build_docker_cmd(Platform::Tee, &config.passthrough_env, &make_digest()).unwrap(); - let cmd_str = cmd.join(" "); - - assert!(cmd_str.contains("MPC_ACCOUNT_ID=test-user")); - assert!(cmd_str.contains("11780:11780")); - assert!(cmd_str.contains("host1:192.168.1.1")); - assert!(!cmd_str.contains("BAD=oops")); - assert!(!cmd_str.contains("/:/mnt")); - } - - #[test] - fn test_full_docker_cmd_structure() { - let env = BTreeMap::from([("MPC_ACCOUNT_ID".into(), "test-user".into())]); - let digest = make_digest(); - let cmd = build_docker_cmd(Platform::NonTee, &env, &digest).unwrap(); - - // Check required subsequence - assert!(cmd.contains(&"docker".to_string())); - assert!(cmd.contains(&"run".to_string())); - assert!(cmd.contains(&"--security-opt".to_string())); - assert!(cmd.contains(&"no-new-privileges:true".to_string())); - assert!(cmd.contains(&"/tapp:/tapp:ro".to_string())); - assert!(cmd.contains(&"shared-volume:/mnt/shared".to_string())); - assert!(cmd.contains(&"mpc-data:/data".to_string())); - assert!(cmd.contains(&MPC_CONTAINER_NAME.to_string())); - assert!(cmd.contains(&"--detach".to_string())); - // Image digest should be the last argument - assert_eq!(cmd.last().unwrap(), &digest); - } - - // -- Dstack tests ------------------------------------------------------- - - #[test] - fn test_extend_rtmr3_nontee_is_noop() { - // NonTee should return immediately without touching dstack - let rt = tokio::runtime::Runtime::new().unwrap(); - rt.block_on(extend_rtmr3(Platform::NonTee, &make_digest())) - .unwrap(); - } - - #[test] - fn test_extend_rtmr3_tee_requires_socket() { - // TEE mode should fail when socket doesn't exist - let rt = tokio::runtime::Runtime::new().unwrap(); - let result = rt.block_on(extend_rtmr3(Platform::Tee, &make_digest())); - assert_matches!(result, Err(LauncherError::DstackSocketMissing(_))); - } - - // -- MpcDockerImageHash integration test -------------------------------- - - #[test] - fn test_mpc_docker_image_hash_from_bare_hex() { - let bare_hex = "a".repeat(64); - let hash: MpcDockerImageHash = bare_hex.parse().unwrap(); - assert_eq!(hash.as_hex(), bare_hex); - } - - // -- Integration test (feature-gated) ----------------------------------- - - #[cfg(feature = "integration-test")] - mod integration { - use super::*; - - const TEST_DIGEST: &str = - "sha256:f2472280c437efc00fa25a030a24990ae16c4fbec0d74914e178473ce4d57372"; - - fn test_dstack_config() -> Config { - user_config_from_map(BTreeMap::from([ - ( - "MPC_IMAGE_TAGS".into(), - "83b52da4e2270c688cdd30da04f6b9d3565f25bb".into(), - ), - ("MPC_IMAGE_NAME".into(), "nearone/testing".into()), - ("MPC_REGISTRY".into(), "registry.hub.docker.com".into()), - ])) - .unwrap() - } - - #[tokio::test] - async fn test_validate_image_hash_real_registry() { - let timing = RpcTimingConfig { - request_timeout_secs: 10.0, - request_interval_secs: 1.0, - max_attempts: 20, - }; - let result = validate_image_hash(TEST_DIGEST, &test_dstack_config(), &timing) - .await - .unwrap(); - assert!(result, "validate_image_hash() failed for test image"); - } - } -} +// #[cfg(test)] +// mod tests { +// use super::*; +// use assert_matches::assert_matches; +// use launcher_interface::types::ApprovedHashesFile; + +// // -- DstackUserConfig parsing tests ------------------------------------- + +// #[test] +// fn test_user_config_defaults_when_map_is_empty() { +// let config = user_config_from_map(BTreeMap::new()).unwrap(); +// assert_eq!(config.image_tags, vec![DEFAULT_MPC_IMAGE_TAG]); +// assert_eq!(config.image_name, DEFAULT_MPC_IMAGE_NAME); +// assert_eq!(config.registry, DEFAULT_MPC_REGISTRY); +// assert_eq!( +// config.rpc_request_timeout_secs, +// DEFAULT_RPC_REQUEST_TIMEOUT_SECS +// ); +// assert_eq!( +// config.rpc_request_interval_secs, +// DEFAULT_RPC_REQUEST_INTERVAL_SECS +// ); +// assert_eq!(config.rpc_max_attempts, DEFAULT_RPC_MAX_ATTEMPTS); +// assert!(config.mpc_hash_override.is_none()); +// assert!(config.passthrough_env.is_empty()); +// } + +// #[test] +// fn test_user_config_typed_fields_extracted_from_map() { +// let map = BTreeMap::from([ +// ( +// DSTACK_USER_CONFIG_MPC_IMAGE_TAGS.into(), +// "v1.0, v1.1".into(), +// ), +// (DSTACK_USER_CONFIG_MPC_IMAGE_NAME.into(), "my/image".into()), +// ( +// DSTACK_USER_CONFIG_MPC_IMAGE_REGISTRY.into(), +// "my.registry.io".into(), +// ), +// (ENV_VAR_RPC_REQUEST_TIMEOUT_SECS.into(), "30.0".into()), +// (ENV_VAR_RPC_MAX_ATTEMPTS.into(), "5".into()), +// ("MPC_ACCOUNT_ID".into(), "account.near".into()), +// ]); +// let config = user_config_from_map(map).unwrap(); +// assert_eq!(config.image_tags, vec!["v1.0", "v1.1"]); +// assert_eq!(config.image_name, "my/image"); +// assert_eq!(config.registry, "my.registry.io"); +// assert_eq!(config.rpc_request_timeout_secs, 30.0); +// assert_eq!(config.rpc_max_attempts, 5); +// // Launcher-only keys are NOT in passthrough_env +// assert!( +// !config +// .passthrough_env +// .contains_key(DSTACK_USER_CONFIG_MPC_IMAGE_TAGS) +// ); +// assert!( +// !config +// .passthrough_env +// .contains_key(ENV_VAR_RPC_MAX_ATTEMPTS) +// ); +// // Container passthrough keys ARE in passthrough_env +// assert_eq!( +// config.passthrough_env.get("MPC_ACCOUNT_ID").unwrap(), +// "account.near" +// ); +// } + +// #[test] +// fn test_user_config_malformed_rpc_fields_error() { +// let map = BTreeMap::from([(ENV_VAR_RPC_MAX_ATTEMPTS.into(), "not_a_number".into())]); +// let err = user_config_from_map(map).unwrap_err(); +// assert_matches!(err, LauncherError::InvalidEnvVar { key, .. } if key == ENV_VAR_RPC_MAX_ATTEMPTS); + +// let map = BTreeMap::from([(ENV_VAR_RPC_REQUEST_TIMEOUT_SECS.into(), "bad".into())]); +// let err = user_config_from_map(map).unwrap_err(); +// assert_matches!(err, LauncherError::InvalidEnvVar { key, .. } if key == ENV_VAR_RPC_REQUEST_TIMEOUT_SECS); + +// let map = BTreeMap::from([(ENV_VAR_RPC_REQUEST_INTERVAL_SECS.into(), "bad".into())]); +// let err = user_config_from_map(map).unwrap_err(); +// assert_matches!(err, LauncherError::InvalidEnvVar { key, .. } if key == ENV_VAR_RPC_REQUEST_INTERVAL_SECS); +// } + +// #[test] +// fn test_user_config_hash_override_extracted() { +// let map = BTreeMap::from([(ENV_VAR_MPC_HASH_OVERRIDE.into(), "sha256:abc".into())]); +// let config = user_config_from_map(map).unwrap(); +// assert_eq!(config.mpc_hash_override.unwrap(), "sha256:abc"); +// assert!( +// !config +// .passthrough_env +// .contains_key(ENV_VAR_MPC_HASH_OVERRIDE) +// ); +// } + +// #[test] +// fn test_parse_user_config_from_file() { +// let dir = tempfile::tempdir().unwrap(); +// let file = dir.path().join("user_config"); +// std::fs::write( +// &file, +// "# comment\nMPC_ACCOUNT_ID=test\nMPC_IMAGE_NAME=my/image\n", +// ) +// .unwrap(); +// let config = parse_user_config(file.to_str().unwrap()).unwrap(); +// assert_eq!(config.image_name, "my/image"); +// assert_eq!( +// config.passthrough_env.get("MPC_ACCOUNT_ID").unwrap(), +// "test" +// ); +// assert!(!config.passthrough_env.contains_key("MPC_IMAGE_NAME")); +// } + +// // -- Host/port validation tests ----------------------------------------- + +// #[test] +// fn test_valid_host_entry() { +// assert!(is_valid_host_entry("node.local:192.168.1.1")); +// assert!(!is_valid_host_entry("node.local:not-an-ip")); +// assert!(!is_valid_host_entry("--env LD_PRELOAD=hack.so")); +// } + +// #[test] +// fn test_valid_port_mapping() { +// assert!(is_valid_port_mapping("11780:11780")); +// assert!(!is_valid_port_mapping("65536:11780")); +// assert!(!is_valid_port_mapping("--volume /:/mnt")); +// } + +// // -- Security validation tests ------------------------------------------ + +// #[test] +// fn test_has_control_chars_rejects_newline_and_cr() { +// assert!(has_control_chars("a\nb")); +// assert!(has_control_chars("a\rb")); +// } + +// #[test] +// fn test_has_control_chars_allows_tab() { +// assert!(!has_control_chars("a\tb")); +// } + +// #[test] +// fn test_has_control_chars_rejects_other_control_chars() { +// assert!(has_control_chars(&format!("a{}b", '\x1F'))); +// } + +// #[test] +// fn test_is_safe_env_value_rejects_control_chars() { +// assert!(!is_safe_env_value("ok\nno")); +// assert!(!is_safe_env_value("ok\rno")); +// assert!(!is_safe_env_value(&format!("ok{}no", '\x1F'))); +// } + +// #[test] +// fn test_is_safe_env_value_rejects_ld_preload() { +// assert!(!is_safe_env_value("LD_PRELOAD=/tmp/x.so")); +// assert!(!is_safe_env_value("foo LD_PRELOAD bar")); +// } + +// #[test] +// fn test_is_safe_env_value_rejects_too_long() { +// assert!(!is_safe_env_value(&"a".repeat(MAX_ENV_VALUE_LEN + 1))); +// assert!(is_safe_env_value(&"a".repeat(MAX_ENV_VALUE_LEN))); +// } + +// #[test] +// fn test_is_allowed_container_env_key_allows_mpc_prefix_uppercase() { +// assert!(is_allowed_container_env_key("MPC_FOO")); +// assert!(is_allowed_container_env_key("MPC_FOO_123")); +// assert!(is_allowed_container_env_key("MPC_A_B_C")); +// } + +// #[test] +// fn test_is_allowed_container_env_key_rejects_lowercase_or_invalid() { +// assert!(!is_allowed_container_env_key("MPC_foo")); +// assert!(!is_allowed_container_env_key("MPC-FOO")); +// assert!(!is_allowed_container_env_key("MPC.FOO")); +// assert!(!is_allowed_container_env_key("MPC_")); +// } + +// #[test] +// fn test_is_allowed_container_env_key_allows_compat_non_mpc_keys() { +// assert!(is_allowed_container_env_key("RUST_LOG")); +// assert!(is_allowed_container_env_key("RUST_BACKTRACE")); +// assert!(is_allowed_container_env_key("NEAR_BOOT_NODES")); +// } + +// #[test] +// fn test_is_allowed_container_env_key_denies_sensitive_keys() { +// assert!(!is_allowed_container_env_key("MPC_P2P_PRIVATE_KEY")); +// assert!(!is_allowed_container_env_key("MPC_ACCOUNT_SK")); +// } + +// // -- Docker cmd builder tests ------------------------------------------- + +// fn make_digest() -> String { +// format!("sha256:{}", "a".repeat(64)) +// } + +// fn base_env() -> BTreeMap { +// BTreeMap::from([ +// ("MPC_ACCOUNT_ID".into(), "mpc-user-123".into()), +// ("MPC_CONTRACT_ID".into(), "contract.near".into()), +// ("MPC_ENV".into(), "testnet".into()), +// ("MPC_HOME_DIR".into(), "/data".into()), +// ("NEAR_BOOT_NODES".into(), "boot1,boot2".into()), +// ("RUST_LOG".into(), "info".into()), +// ]) +// } + +// #[test] +// fn test_build_docker_cmd_sanitizes_ports_and_hosts() { +// let env = BTreeMap::from([ +// ("PORTS".into(), "11780:11780,--env BAD=1".into()), +// ( +// "EXTRA_HOSTS".into(), +// "node:192.168.1.1,--volume /:/mnt".into(), +// ), +// ("MPC_ACCOUNT_ID".into(), "mpc-user-123".into()), +// ]); +// let cmd = build_docker_cmd(Platform::Tee, &env, &make_digest()).unwrap(); + +// assert!(cmd.contains(&"MPC_ACCOUNT_ID=mpc-user-123".to_string())); +// assert!(cmd.contains(&"11780:11780".to_string())); +// assert!(cmd.contains(&"node:192.168.1.1".to_string())); +// // Injection strings filtered +// assert!(!cmd.iter().any(|arg| arg.contains("BAD=1"))); +// assert!(!cmd.iter().any(|arg| arg.contains("/:/mnt"))); +// } + +// #[test] +// fn test_extra_hosts_does_not_allow_ld_preload() { +// let env = BTreeMap::from([ +// ( +// "EXTRA_HOSTS".into(), +// "host:1.2.3.4,--env LD_PRELOAD=/evil.so".into(), +// ), +// ("MPC_ACCOUNT_ID".into(), "safe".into()), +// ]); +// let cmd = build_docker_cmd(Platform::Tee, &env, &make_digest()).unwrap(); +// assert!(cmd.contains(&"host:1.2.3.4".to_string())); +// assert!(!cmd.iter().any(|arg| arg.contains("LD_PRELOAD"))); +// } + +// #[test] +// fn test_ports_does_not_allow_volume_injection() { +// let env = BTreeMap::from([ +// ("PORTS".into(), "2200:2200,--volume /:/mnt".into()), +// ("MPC_ACCOUNT_ID".into(), "safe".into()), +// ]); +// let cmd = build_docker_cmd(Platform::Tee, &env, &make_digest()).unwrap(); +// assert!(cmd.contains(&"2200:2200".to_string())); +// assert!(!cmd.iter().any(|arg| arg.contains("/:/mnt"))); +// } + +// #[test] +// fn test_invalid_env_key_is_ignored() { +// let env = BTreeMap::from([ +// ("BAD_KEY".into(), "should_not_be_used".into()), +// ("MPC_ACCOUNT_ID".into(), "safe".into()), +// ]); +// let cmd = build_docker_cmd(Platform::Tee, &env, &make_digest()).unwrap(); +// assert!(!cmd.join(" ").contains("should_not_be_used")); +// assert!(cmd.contains(&"MPC_ACCOUNT_ID=safe".to_string())); +// } + +// #[test] +// fn test_mpc_backup_encryption_key_is_allowed() { +// let env = BTreeMap::from([("MPC_BACKUP_ENCRYPTION_KEY_HEX".into(), "0".repeat(64))]); +// let cmd = build_docker_cmd(Platform::Tee, &env, &make_digest()).unwrap(); +// assert!( +// cmd.join(" ") +// .contains(&format!("MPC_BACKUP_ENCRYPTION_KEY_HEX={}", "0".repeat(64))) +// ); +// } + +// #[test] +// fn test_malformed_extra_host_is_ignored() { +// let env = BTreeMap::from([ +// ( +// "EXTRA_HOSTS".into(), +// "badhostentry,no-colon,also--bad".into(), +// ), +// ("MPC_ACCOUNT_ID".into(), "safe".into()), +// ]); +// let cmd = build_docker_cmd(Platform::Tee, &env, &make_digest()).unwrap(); +// assert!(!cmd.contains(&"--add-host".to_string())); +// } + +// #[test] +// fn test_env_value_with_shell_injection_is_handled_safely() { +// let env = BTreeMap::from([("MPC_ACCOUNT_ID".into(), "safe; rm -rf /".into())]); +// let cmd = build_docker_cmd(Platform::Tee, &env, &make_digest()).unwrap(); +// assert!(cmd.contains(&"MPC_ACCOUNT_ID=safe; rm -rf /".to_string())); +// } + +// #[test] +// fn test_build_docker_cmd_nontee_no_dstack_mount() { +// let mut env = BTreeMap::new(); +// env.insert("MPC_ACCOUNT_ID".into(), "x".into()); +// let cmd = build_docker_cmd(Platform::NonTee, &env, &make_digest()).unwrap(); +// let s = cmd.join(" "); +// assert!(!s.contains("DSTACK_ENDPOINT=")); +// assert!(!s.contains(&format!("{DSTACK_UNIX_SOCKET}:{DSTACK_UNIX_SOCKET}"))); +// } + +// #[test] +// fn test_build_docker_cmd_tee_has_dstack_mount() { +// let env = BTreeMap::from([("MPC_ACCOUNT_ID".into(), "x".into())]); +// let cmd = build_docker_cmd(Platform::Tee, &env, &make_digest()).unwrap(); +// let s = cmd.join(" "); +// assert!(s.contains(&format!("DSTACK_ENDPOINT={DSTACK_UNIX_SOCKET}"))); +// assert!(s.contains(&format!("{DSTACK_UNIX_SOCKET}:{DSTACK_UNIX_SOCKET}"))); +// } + +// #[test] +// fn test_build_docker_cmd_allows_arbitrary_mpc_prefix_env_vars() { +// let mut env = base_env(); +// env.insert("MPC_NEW_FEATURE_FLAG".into(), "1".into()); +// env.insert("MPC_SOME_CONFIG".into(), "value".into()); +// let cmd = build_docker_cmd(Platform::NonTee, &env, &make_digest()).unwrap(); +// let cmd_str = cmd.join(" "); +// assert!(cmd_str.contains("MPC_NEW_FEATURE_FLAG=1")); +// assert!(cmd_str.contains("MPC_SOME_CONFIG=value")); +// } + +// #[test] +// fn test_build_docker_cmd_blocks_sensitive_mpc_private_keys() { +// let mut env = base_env(); +// env.insert("MPC_P2P_PRIVATE_KEY".into(), "supersecret".into()); +// env.insert("MPC_ACCOUNT_SK".into(), "supersecret2".into()); +// let cmd = build_docker_cmd(Platform::NonTee, &env, &make_digest()).unwrap(); +// let cmd_str = cmd.join(" "); +// assert!(!cmd_str.contains("MPC_P2P_PRIVATE_KEY")); +// assert!(!cmd_str.contains("MPC_ACCOUNT_SK")); +// } + +// #[test] +// fn test_build_docker_cmd_rejects_env_value_with_newline() { +// let mut env = base_env(); +// env.insert("MPC_NEW_FEATURE_FLAG".into(), "ok\nbad".into()); +// let cmd = build_docker_cmd(Platform::NonTee, &env, &make_digest()).unwrap(); +// let cmd_str = cmd.join(" "); +// assert!(!cmd_str.contains("MPC_NEW_FEATURE_FLAG")); +// } + +// #[test] +// fn test_build_docker_cmd_enforces_max_env_count_cap() { +// let mut env = base_env(); +// for i in 0..=MAX_PASSTHROUGH_ENV_VARS { +// env.insert(format!("MPC_X_{i}"), "1".into()); +// } +// let result = build_docker_cmd(Platform::NonTee, &env, &make_digest()); +// assert_matches!(result, Err(LauncherError::TooManyEnvVars(_))); +// } + +// #[test] +// fn test_build_docker_cmd_enforces_total_env_bytes_cap() { +// let mut env = base_env(); +// for i in 0..40 { +// env.insert(format!("MPC_BIG_{i}"), "a".repeat(MAX_ENV_VALUE_LEN)); +// } +// let result = build_docker_cmd(Platform::NonTee, &env, &make_digest()); +// assert_matches!(result, Err(LauncherError::EnvPayloadTooLarge(_))); +// } + +// // -- LD_PRELOAD injection tests ----------------------------------------- + +// #[test] +// fn test_ld_preload_injection_blocked_via_env_key() { +// let env = BTreeMap::from([ +// ("MPC_ACCOUNT_ID".into(), "mpc-user-123".into()), +// ("--env LD_PRELOAD".into(), "/path/to/my/malloc.so".into()), +// ]); +// let cmd = build_docker_cmd(Platform::Tee, &env, &make_digest()).unwrap(); +// assert!(!cmd.iter().any(|arg| arg.contains("LD_PRELOAD"))); +// } + +// #[test] +// fn test_ld_preload_injection_blocked_via_extra_hosts() { +// let env = BTreeMap::from([ +// ("MPC_ACCOUNT_ID".into(), "mpc-user-123".into()), +// ( +// "EXTRA_HOSTS".into(), +// "host1:192.168.0.1,host2:192.168.0.2,--env LD_PRELOAD=/path/to/my/malloc.so".into(), +// ), +// ]); +// let cmd = build_docker_cmd(Platform::Tee, &env, &make_digest()).unwrap(); +// assert!(cmd.contains(&"--add-host".to_string())); +// assert!(!cmd.iter().any(|arg| arg.contains("LD_PRELOAD"))); +// } + +// #[test] +// fn test_ld_preload_injection_blocked_via_ports() { +// let env = BTreeMap::from([ +// ("MPC_ACCOUNT_ID".into(), "mpc-user-123".into()), +// ( +// "PORTS".into(), +// "11780:11780,--env LD_PRELOAD=/path/to/my/malloc.so".into(), +// ), +// ]); +// let cmd = build_docker_cmd(Platform::Tee, &env, &make_digest()).unwrap(); +// assert!(cmd.contains(&"-p".to_string())); +// assert!(!cmd.iter().any(|arg| arg.contains("LD_PRELOAD"))); +// } + +// #[test] +// fn test_ld_preload_injection_blocked_via_mpc_account_id() { +// let env = BTreeMap::from([ +// ( +// "MPC_ACCOUNT_ID".into(), +// "mpc-user-123, --env LD_PRELOAD=/path/to/my/malloc.so".into(), +// ), +// ( +// "EXTRA_HOSTS".into(), +// "host1:192.168.0.1,host2:192.168.0.2".into(), +// ), +// ]); +// let cmd = build_docker_cmd(Platform::Tee, &env, &make_digest()).unwrap(); +// assert!(!cmd.iter().any(|arg| arg.contains("LD_PRELOAD"))); +// } + +// #[test] +// fn test_ld_preload_injection_blocked_via_dash_e() { +// let env = BTreeMap::from([ +// ("MPC_ACCOUNT_ID".into(), "mpc-user-123".into()), +// ("-e LD_PRELOAD".into(), "/path/to/my/malloc.so".into()), +// ]); +// let cmd = build_docker_cmd(Platform::Tee, &env, &make_digest()).unwrap(); +// assert!(!cmd.iter().any(|arg| arg.contains("LD_PRELOAD"))); +// } + +// #[test] +// fn test_ld_preload_injection_blocked_via_extra_hosts_dash_e() { +// let env = BTreeMap::from([ +// ("MPC_ACCOUNT_ID".into(), "mpc-user-123".into()), +// ( +// "EXTRA_HOSTS".into(), +// "host1:192.168.0.1,host2:192.168.0.2,-e LD_PRELOAD=/path/to/my/malloc.so".into(), +// ), +// ]); +// let cmd = build_docker_cmd(Platform::Tee, &env, &make_digest()).unwrap(); +// assert!(cmd.contains(&"--add-host".to_string())); +// assert!(!cmd.iter().any(|arg| arg.contains("LD_PRELOAD"))); +// } + +// #[test] +// fn test_ld_preload_injection_blocked_via_ports_dash_e() { +// let env = BTreeMap::from([ +// ("MPC_ACCOUNT_ID".into(), "mpc-user-123".into()), +// ( +// "PORTS".into(), +// "11780:11780,-e LD_PRELOAD=/path/to/my/malloc.so".into(), +// ), +// ]); +// let cmd = build_docker_cmd(Platform::Tee, &env, &make_digest()).unwrap(); +// assert!(cmd.contains(&"-p".to_string())); +// assert!(!cmd.iter().any(|arg| arg.contains("LD_PRELOAD"))); +// } + +// // -- Hash selection tests ----------------------------------------------- + +// fn make_digest_json(hashes: &[&str]) -> String { +// serde_json::json!({"approved_hashes": hashes}).to_string() +// } + +// #[test] +// fn test_override_present() { +// let dir = tempfile::tempdir().unwrap(); +// let file = dir.path().join("image-digest.bin"); +// let override_value = format!("sha256:{}", "a".repeat(64)); +// let approved = vec![ +// format!("sha256:{}", "b".repeat(64)), +// override_value.clone(), +// format!("sha256:{}", "c".repeat(64)), +// ]; +// let json = serde_json::json!({"approved_hashes": approved}).to_string(); +// std::fs::write(&file, &json).unwrap(); + +// // We can't easily override IMAGE_DIGEST_FILE constant, so test load_and_select_hash +// // by creating a standalone test that reads from a custom path. +// // Instead test the core logic directly: +// let data: ApprovedHashesFile = serde_json::from_str(&json).unwrap(); +// assert!(data.approved_hashes.contains(&override_value)); + +// // The override is in the approved list, so it should be valid +// assert!(is_valid_sha256_digest(&override_value)); +// assert!(data.approved_hashes.contains(&override_value)); +// } + +// #[test] +// fn test_override_not_in_list() { +// let approved = vec!["sha256:aaa", "sha256:bbb"]; +// let json = make_digest_json(&approved); +// let data: ApprovedHashesFile = serde_json::from_str(&json).unwrap(); +// let override_hash = "sha256:xyz"; +// assert!(!data.approved_hashes.contains(&override_hash.to_string())); +// } + +// #[test] +// fn test_no_override_picks_newest() { +// let approved = vec!["sha256:newest", "sha256:older", "sha256:oldest"]; +// let json = make_digest_json(&approved); +// let data: ApprovedHashesFile = serde_json::from_str(&json).unwrap(); +// assert_eq!(data.approved_hashes[0], "sha256:newest"); +// } + +// #[test] +// fn test_json_key_matches_node() { +// // Must stay aligned with crates/node/src/tee/allowed_image_hashes_watcher.rs +// let json = r#"{"approved_hashes": ["sha256:abc"]}"#; +// let data: ApprovedHashesFile = serde_json::from_str(json).unwrap(); +// assert_eq!(data.approved_hashes.len(), 1); +// } + +// #[test] +// fn test_get_bare_digest() { +// assert_eq!( +// get_bare_digest(&format!("sha256:{}", "a".repeat(64))).unwrap(), +// "a".repeat(64) +// ); +// get_bare_digest("invalid").unwrap_err(); +// } + +// #[test] +// fn test_is_valid_sha256_digest() { +// assert!(is_valid_sha256_digest(&format!( +// "sha256:{}", +// "a".repeat(64) +// ))); +// assert!(!is_valid_sha256_digest("sha256:tooshort")); +// assert!(!is_valid_sha256_digest("not-a-digest")); +// // hex::decode accepts uppercase; as_hex() normalizes to lowercase +// assert!(is_valid_sha256_digest(&format!( +// "sha256:{}", +// "A".repeat(64) +// ))); +// } + +// #[test] +// fn test_parse_image_digest_normalizes_case() { +// let upper = format!("sha256:{}", "AB".repeat(32)); +// let hash = parse_image_digest(&upper).unwrap(); +// assert_eq!(hash.as_hex(), "ab".repeat(32)); +// } + +// // -- Full flow docker cmd test ------------------------------------------ + +// #[test] +// fn test_parse_and_build_docker_cmd_full_flow() { +// let dir = tempfile::tempdir().unwrap(); +// let file = dir.path().join("user_config"); +// std::fs::write( +// &file, +// "MPC_ACCOUNT_ID=test-user\nPORTS=11780:11780, --env BAD=oops\nEXTRA_HOSTS=host1:192.168.1.1, --volume /:/mnt\n", +// ) +// .unwrap(); +// let config = parse_user_config(file.to_str().unwrap()).unwrap(); +// let cmd = build_docker_cmd(Platform::Tee, &config.passthrough_env, &make_digest()).unwrap(); +// let cmd_str = cmd.join(" "); + +// assert!(cmd_str.contains("MPC_ACCOUNT_ID=test-user")); +// assert!(cmd_str.contains("11780:11780")); +// assert!(cmd_str.contains("host1:192.168.1.1")); +// assert!(!cmd_str.contains("BAD=oops")); +// assert!(!cmd_str.contains("/:/mnt")); +// } + +// #[test] +// fn test_full_docker_cmd_structure() { +// let env = BTreeMap::from([("MPC_ACCOUNT_ID".into(), "test-user".into())]); +// let digest = make_digest(); +// let cmd = build_docker_cmd(Platform::NonTee, &env, &digest).unwrap(); + +// // Check required subsequence +// assert!(cmd.contains(&"docker".to_string())); +// assert!(cmd.contains(&"run".to_string())); +// assert!(cmd.contains(&"--security-opt".to_string())); +// assert!(cmd.contains(&"no-new-privileges:true".to_string())); +// assert!(cmd.contains(&"/tapp:/tapp:ro".to_string())); +// assert!(cmd.contains(&"shared-volume:/mnt/shared".to_string())); +// assert!(cmd.contains(&"mpc-data:/data".to_string())); +// assert!(cmd.contains(&MPC_CONTAINER_NAME.to_string())); +// assert!(cmd.contains(&"--detach".to_string())); +// // Image digest should be the last argument +// assert_eq!(cmd.last().unwrap(), &digest); +// } + +// // -- Dstack tests ------------------------------------------------------- + +// #[test] +// fn test_extend_rtmr3_nontee_is_noop() { +// // NonTee should return immediately without touching dstack +// let rt = tokio::runtime::Runtime::new().unwrap(); +// rt.block_on(extend_rtmr3(Platform::NonTee, &make_digest())) +// .unwrap(); +// } + +// #[test] +// fn test_extend_rtmr3_tee_requires_socket() { +// // TEE mode should fail when socket doesn't exist +// let rt = tokio::runtime::Runtime::new().unwrap(); +// let result = rt.block_on(extend_rtmr3(Platform::Tee, &make_digest())); +// assert_matches!(result, Err(LauncherError::DstackSocketMissing(_))); +// } + +// // -- MpcDockerImageHash integration test -------------------------------- + +// #[test] +// fn test_mpc_docker_image_hash_from_bare_hex() { +// let bare_hex = "a".repeat(64); +// let hash: MpcDockerImageHash = bare_hex.parse().unwrap(); +// assert_eq!(hash.as_hex(), bare_hex); +// } + +// // -- Integration test (feature-gated) ----------------------------------- + +// #[cfg(feature = "integration-test")] +// mod integration { +// use super::*; + +// const TEST_DIGEST: &str = +// "sha256:f2472280c437efc00fa25a030a24990ae16c4fbec0d74914e178473ce4d57372"; + +// fn test_dstack_config() -> Config { +// user_config_from_map(BTreeMap::from([ +// ( +// "MPC_IMAGE_TAGS".into(), +// "83b52da4e2270c688cdd30da04f6b9d3565f25bb".into(), +// ), +// ("MPC_IMAGE_NAME".into(), "nearone/testing".into()), +// ("MPC_REGISTRY".into(), "registry.hub.docker.com".into()), +// ])) +// .unwrap() +// } + +// #[tokio::test] +// async fn test_validate_image_hash_real_registry() { +// let timing = RpcTimingConfig { +// request_timeout_secs: 10.0, +// request_interval_secs: 1.0, +// max_attempts: 20, +// }; +// let result = validate_image_hash(TEST_DIGEST, &test_dstack_config(), &timing) +// .await +// .unwrap(); +// assert!(result, "validate_image_hash() failed for test image"); +// } +// } +// } diff --git a/crates/tee-launcher/src/types.rs b/crates/tee-launcher/src/types.rs index de681896a..6339920bd 100644 --- a/crates/tee-launcher/src/types.rs +++ b/crates/tee-launcher/src/types.rs @@ -1,3 +1,4 @@ +use bounded_collections::NonEmptyVec; use clap::{Parser, ValueEnum}; use mpc_primitives::hash::MpcDockerImageHash; use serde::{Deserialize, Serialize}; @@ -47,15 +48,15 @@ pub struct Config { #[derive(Debug, Clone, Serialize, Deserialize)] pub struct LauncherConfig { /// Docker image tags to search (from `MPC_IMAGE_TAGS`, comma-separated). - pub image_tags: Vec, + pub image_tags: NonEmptyVec, /// Docker image name (from `MPC_IMAGE_NAME`). pub image_name: String, /// Docker registry (from `MPC_REGISTRY`). pub registry: String, /// Per-request timeout for registry RPC calls (from `RPC_REQUEST_TIMEOUT_SECS`). - pub rpc_request_timeout_secs: f64, + pub rpc_request_timeout_secs: u64, /// Delay between registry RPC retries (from `RPC_REQUEST_INTERVAL_SECS`). - pub rpc_request_interval_secs: f64, + pub rpc_request_interval_secs: u64, /// Maximum registry RPC attempts (from `RPC_MAX_ATTEMPTS`). pub rpc_max_attempts: u32, /// Optional hash override that bypasses registry lookup (from `MPC_HASH_OVERRIDE`). From 2965cdea433bb3d1914f13890c29641b4a82389c Mon Sep 17 00:00:00 2001 From: Daniel Sharifi Date: Mon, 2 Mar 2026 18:41:19 +0100 Subject: [PATCH 09/82] wip --- crates/tee-launcher/src/error.rs | 5 +-- crates/tee-launcher/src/main.rs | 59 +++++++++++--------------------- 2 files changed, 23 insertions(+), 41 deletions(-) diff --git a/crates/tee-launcher/src/error.rs b/crates/tee-launcher/src/error.rs index 92836663d..2c67af0a4 100644 --- a/crates/tee-launcher/src/error.rs +++ b/crates/tee-launcher/src/error.rs @@ -1,5 +1,6 @@ use std::path::PathBuf; +use mpc_primitives::hash::MpcDockerImageHash; use thiserror::Error; #[derive(Error, Debug)] @@ -43,8 +44,8 @@ pub enum LauncherError { #[error("MPC image hash validation failed: {0}")] ImageValidationFailed(String), - #[error("docker run failed for validated hash={0}")] - DockerRunFailed(String), + #[error("docker run failed for validated hash")] + DockerRunFailed(MpcDockerImageHash), #[error("Too many env vars to pass through (>{0})")] TooManyEnvVars(usize), diff --git a/crates/tee-launcher/src/main.rs b/crates/tee-launcher/src/main.rs index eec320fd5..d609bdfa4 100644 --- a/crates/tee-launcher/src/main.rs +++ b/crates/tee-launcher/src/main.rs @@ -167,29 +167,6 @@ fn is_safe_port_mapping(mapping: &str) -> bool { // Hash selection // --------------------------------------------------------------------------- -/// Parse a full `sha256:` digest into a validated [`MpcDockerImageHash`]. -/// -/// Uses the workspace type's `FromStr` impl which does `hex::decode` + 32-byte -/// length check — no regex needed. -fn parse_image_digest(full_digest: &str) -> Result { - let bare_hex = full_digest.strip_prefix(SHA256_PREFIX).ok_or_else(|| { - LauncherError::InvalidDefaultDigest(format!( - "Invalid digest (missing sha256: prefix): {full_digest}" - )) - })?; - bare_hex - .parse::() - .map_err(|e| LauncherError::InvalidDefaultDigest(format!("{full_digest}: {e}"))) -} - -fn is_valid_sha256_digest(digest: &str) -> bool { - parse_image_digest(digest).is_ok() -} - -fn get_bare_digest(full_digest: &str) -> Result { - Ok(parse_image_digest(full_digest)?.as_hex()) -} - fn load_and_select_hash(args: &CliArgs, dstack_config: &Config) -> Result { let approved_hashes = if std::path::Path::new(IMAGE_DIGEST_FILE).is_file() { let content = std::fs::read_to_string(IMAGE_DIGEST_FILE).map_err(|source| { @@ -198,7 +175,7 @@ fn load_and_select_hash(args: &CliArgs, dstack_config: &Config) -> Result Result> { - let bare_digest = get_bare_digest(image_digest)?; - let mut cmd: Vec = vec!["docker".into(), "run".into()]; // Required environment variables - cmd.extend(["--env".into(), format!("MPC_IMAGE_HASH={bare_digest}")]); + cmd.extend([ + "--env".into(), + format!("MPC_IMAGE_HASH={}", image_digest.as_hex()), + ]); cmd.extend([ "--env".into(), format!("MPC_LATEST_ALLOWED_HASH_FILE={IMAGE_DIGEST_FILE}"), @@ -547,7 +525,7 @@ fn build_docker_cmd( "--name".into(), MPC_CONTAINER_NAME.into(), "--detach".into(), - image_digest.to_string(), + image_digest.as_hex(), ]); tracing::info!("docker cmd {}", cmd.join(" ")); @@ -563,10 +541,13 @@ fn build_docker_cmd( fn launch_mpc_container( platform: Platform, - valid_hash: &str, + valid_hash: &MpcDockerImageHash, mpc_config: &MpcBinaryConfig, ) -> Result<()> { - tracing::info!("Launching MPC node with validated hash: {valid_hash}"); + tracing::info!( + "Launching MPC node with validated hash: {}", + valid_hash.as_hex() + ); remove_existing_container(); let docker_cmd = build_docker_cmd(platform, mpc_config, valid_hash)?; @@ -574,12 +555,10 @@ fn launch_mpc_container( let status = Command::new(&docker_cmd[0]) .args(&docker_cmd[1..]) .status() - .map_err(|e| LauncherError::DockerRunFailed(e.to_string()))?; + .map_err(|e| LauncherError::DockerRunFailed(valid_hash.clone()))?; if !status.success() { - return Err(LauncherError::DockerRunFailed(format!( - "validated hash={valid_hash}" - ))); + return Err(LauncherError::DockerRunFailed(valid_hash.clone())); } tracing::info!("MPC launched successfully."); @@ -591,7 +570,7 @@ fn is_unix_socket(path: &str) -> bool { std::fs::metadata(path).is_ok_and(|meta| meta.file_type().is_socket()) } -async fn extend_rtmr3(platform: Platform, valid_hash: &str) -> Result<()> { +async fn extend_rtmr3(platform: Platform, image_hash: MpcDockerImageHash) -> Result<()> { if platform == Platform::NonTee { tracing::info!("PLATFORM=NONTEE → skipping RTMR3 extension step."); return Ok(()); @@ -603,8 +582,7 @@ async fn extend_rtmr3(platform: Platform, valid_hash: &str) -> Result<()> { )); } - let bare = get_bare_digest(valid_hash)?; - tracing::info!("Extending RTMR3 with validated hash: {bare}"); + tracing::info!(?image_hash, "extending RTMR3"); let client = dstack_sdk::dstack_client::DstackClient::new(Some(DSTACK_UNIX_SOCKET)); @@ -616,7 +594,10 @@ async fn extend_rtmr3(platform: Platform, valid_hash: &str) -> Result<()> { // EmitEvent with the image digest client - .emit_event("mpc-image-digest".to_string(), bare.into_bytes()) + .emit_event( + "mpc-image-digest".to_string(), + image_hash.as_hex().into_bytes(), + ) .await .map_err(|e| LauncherError::DstackEmitEventFailed(e.to_string()))?; From 795576ab88dada49117b92fab5ea9c487af70b61 Mon Sep 17 00:00:00 2001 From: Daniel Sharifi Date: Tue, 3 Mar 2026 16:30:52 +0100 Subject: [PATCH 10/82] wip --- Cargo.lock | 1 + crates/launcher-interface/Cargo.toml | 1 + crates/launcher-interface/src/lib.rs | 10 +- crates/tee-launcher/src/error.rs | 24 +++- crates/tee-launcher/src/main.rs | 183 +++++++++------------------ crates/tee-launcher/src/types.rs | 2 +- 6 files changed, 89 insertions(+), 132 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index a7c74e242..dc3bae887 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -4929,6 +4929,7 @@ dependencies = [ name = "launcher-interface" version = "3.5.1" dependencies = [ + "bounded-collections", "mpc-primitives", "serde", ] diff --git a/crates/launcher-interface/Cargo.toml b/crates/launcher-interface/Cargo.toml index b7cb17847..0da73cbaa 100644 --- a/crates/launcher-interface/Cargo.toml +++ b/crates/launcher-interface/Cargo.toml @@ -5,6 +5,7 @@ edition.workspace = true license.workspace = true [dependencies] +bounded-collections = { workspace = true } mpc-primitives = { workspace = true } serde = { workspace = true } diff --git a/crates/launcher-interface/src/lib.rs b/crates/launcher-interface/src/lib.rs index dd0601a0c..6e952ca4c 100644 --- a/crates/launcher-interface/src/lib.rs +++ b/crates/launcher-interface/src/lib.rs @@ -5,8 +5,16 @@ pub mod types { /// JSON structure for the approved hashes file written by the MPC node. #[derive(Debug, Serialize, Deserialize)] pub struct ApprovedHashesFile { - pub approved_hashes: Vec, + pub approved_hashes: bounded_collections::NonEmptyVec, + } + + impl ApprovedHashesFile { + pub fn newest_approved_hash(&self) -> &MpcDockerImageHash { + self.approved_hashes.first() + } } } +// TODO: add insta snapshot test for this type + mod paths {} diff --git a/crates/tee-launcher/src/error.rs b/crates/tee-launcher/src/error.rs index 2c67af0a4..720f89cf0 100644 --- a/crates/tee-launcher/src/error.rs +++ b/crates/tee-launcher/src/error.rs @@ -32,12 +32,6 @@ pub enum LauncherError { #[error("Failed to get successful response from {url} after {attempts} attempts")] RegistryRequestFailed { url: String, attempts: u32 }, - #[error("docker pull failed for {0}")] - DockerPullFailed(String), - - #[error("docker inspect failed for {0}")] - DockerInspectFailed(String), - #[error("Digest mismatch: pulled {pulled} != expected {expected}")] DigestMismatch { pulled: String, expected: String }, @@ -79,6 +73,22 @@ pub enum LauncherError { #[error("Registry response parse error: {0}")] RegistryResponseParse(String), + + #[error("The selected image failed digest validation: {0}")] + ImageDigestValidationFailed(#[from] ImageDigestValidationFailed), } -pub type Result = std::result::Result; +#[derive(Error, Debug)] +pub enum ImageDigestValidationFailed { + #[error("docker pull failed for {0}")] + DockerPullFailed(String), + #[error("docker inspect failed for {0}")] + DockerInspectFailed(String), + #[error( + "pulled image has mismatching digest. pulled: {pulled_digest}, expected: {expected_digest}" + )] + PulledImageHasMismatchedDigest { + expected_digest: String, + pulled_digest: String, + }, +} diff --git a/crates/tee-launcher/src/main.rs b/crates/tee-launcher/src/main.rs index d609bdfa4..aed65da78 100644 --- a/crates/tee-launcher/src/main.rs +++ b/crates/tee-launcher/src/main.rs @@ -33,20 +33,13 @@ async fn main() { } } -async fn run() -> Result<()> { +async fn run() -> Result<(), LauncherError> { tracing::info!("start"); let args = CliArgs::parse(); tracing::info!(platform = ?args.platform, "starting launcher"); - // TODO is_unix_socket can be a compile time check - if args.platform == Platform::Tee && !is_unix_socket(DSTACK_UNIX_SOCKET) { - return Err(LauncherError::DstackSocketMissing( - DSTACK_UNIX_SOCKET.to_string(), - )); - } - // Load dstack user config let config_file = std::fs::OpenOptions::new() .read(true) @@ -55,15 +48,36 @@ async fn run() -> Result<()> { let dstack_config: Config = serde_json::from_reader(config_file).expect("config file is valid"); - let selected_hash = load_and_select_hash(&args, &dstack_config)?; + let image_hash: MpcDockerImageHash = { + match dstack_config.launcher_config.mpc_hash_override.clone() { + Some(override_hash) => override_hash, + None => { + let approved_hashes_file = std::fs::OpenOptions::new() + .read(true) + .open(IMAGE_DIGEST_FILE) + .map_err(|source| LauncherError::FileRead { + path: IMAGE_DIGEST_FILE.to_string(), + source, + })?; + + let approved_hashes_on_disk: ApprovedHashesFile = + serde_json::from_reader(approved_hashes_file).map_err(|source| { + LauncherError::JsonParse { + path: IMAGE_DIGEST_FILE.to_string(), + source, + } + })?; - if !validate_image_hash(&selected_hash, &dstack_config, &dstack_config).await? { - return Err(LauncherError::ImageValidationFailed(selected_hash)); - } + approved_hashes_on_disk.newest_approved_hash().clone() + } + } + }; - tracing::info!("MPC image hash validated successfully: {selected_hash}"); + let () = check_image_digest_exists_on_docker_hub(image_hash)?; - extend_rtmr3(args.platform, &selected_hash).await?; + if args.platform == Platform::Tee { + extend_rtmr3(&image_hash).await?; + } launch_mpc_container( args.platform, @@ -163,71 +177,6 @@ fn is_safe_port_mapping(mapping: &str) -> bool { !INVALID_HOST_ENTRY_PATTERN.is_match(mapping) } -// --------------------------------------------------------------------------- -// Hash selection -// --------------------------------------------------------------------------- - -fn load_and_select_hash(args: &CliArgs, dstack_config: &Config) -> Result { - let approved_hashes = if std::path::Path::new(IMAGE_DIGEST_FILE).is_file() { - let content = std::fs::read_to_string(IMAGE_DIGEST_FILE).map_err(|source| { - LauncherError::FileRead { - path: IMAGE_DIGEST_FILE.to_string(), - source, - } - })?; - let data: ApprovxedHashesFile = - serde_json::from_str(&content).map_err(|source| LauncherError::JsonParse { - path: IMAGE_DIGEST_FILE.to_string(), - source, - })?; - if data.approved_hashes.is_empty() { - return Err(LauncherError::InvalidApprovedHashes { - path: IMAGE_DIGEST_FILE.to_string(), - }); - } - data.approved_hashes - } else { - let fallback_image = (&args) - .default_image_digest - .clone() - .ok_or_else(|| LauncherError::MissingEnvVar("DEFAULT_IMAGE_DIGEST".to_string()))?; - - tracing::info!( - ?IMAGE_DIGEST_FILE, - ?fallback_image, - "image digest file missing, will use fall back image" - ); - - vec![fallback_image] - }; - - tracing::info!("Approved MPC image hashes (newest → oldest):"); - for h in &approved_hashes { - // TODO: Fix this output... - // tracing::info!(" - {h}"); - } - - // Optional override - // if let Some(override_hash) = dstack_config.get(ENV_VAR_MPC_HASH_OVERRIDE) { - // if !is_valid_sha256_digest(override_hash) { - // return Err(LauncherError::InvalidHashOverride(override_hash.clone())); - // } - // if !approved_hashes.contains(override_hash) { - // tracing::error!("MPC_HASH_OVERRIDE={override_hash} does NOT match any approved hash!"); - // return Err(LauncherError::InvalidHashOverride(override_hash.clone())); - // } - // tracing::info!("MPC_HASH_OVERRIDE provided → selecting: {override_hash}"); - // return Ok(override_hash.clone()); - // } - - // // No override → select newest (first in list) - // let selected = approved_hashes[0].clone(); - // tracing::info!("Selected MPC hash (newest allowed): {selected}"); - // Ok(selected) - - todo!() -} - // --------------------------------------------------------------------------- // Docker registry communication // --------------------------------------------------------------------------- @@ -381,24 +330,22 @@ async fn get_manifest_digest(config: &LauncherConfig) -> Result { Err(LauncherError::ImageHashNotFoundAmongTags) } -async fn validate_image_hash( - image_digest: &str, - dstack_config: &Config, - config: &Config, -) -> Result { - tracing::info!("Validating MPC hash: {image_digest}"); - - let manifest_digest = get_manifest_digest(&config.launcher_config).await?; - let name_and_digest = format!("{}@{manifest_digest}", config.launcher_config.image_name); +fn check_image_digest_exists_on_docker_hub( + image_hash: MpcDockerImageHash, +) -> Result<(), ImageDigestValidationFailed> { + let image_hash_name = format!("sha256:{}", image_hash.as_hex()); // Pull let pull = Command::new("docker") - .args(["pull", &name_and_digest]) + .args(["pull", &image_hash_name]) .output() - .map_err(|e| LauncherError::DockerPullFailed(e.to_string()))?; - if !pull.status.success() { - tracing::error!("docker pull failed for {image_digest}"); - return Ok(false); + .map_err(|e| ImageDigestValidationFailed::DockerPullFailed(e.to_string()))?; + + let pull_failed = !pull.status.success(); + if pull_failed { + return Err(ImageDigestValidationFailed::DockerPullFailed( + "docker pull terminated with unsuccessful status".to_string(), + )); } // Verify digest @@ -408,23 +355,29 @@ async fn validate_image_hash( "inspect", "--format", "{{index .ID}}", - &name_and_digest, + &image_hash_name, ]) .output() - .map_err(|e| LauncherError::DockerInspectFailed(e.to_string()))?; - if !inspect.status.success() { - tracing::error!("docker inspect failed for {image_digest}"); - return Ok(false); + .map_err(|e| ImageDigestValidationFailed::DockerInspectFailed(e.to_string()))?; + + let docker_inspect_failed = !inspect.status.success(); + if docker_inspect_failed { + return Err(ImageDigestValidationFailed::DockerPullFailed( + "docker inspect terminated with unsuccessful status".to_string(), + )); } let pulled_digest = String::from_utf8_lossy(&inspect.stdout).trim().to_string(); - if pulled_digest != image_digest { - tracing::error!("digest mismatch: {pulled_digest} != {image_digest}"); - return Ok(false); + if pulled_digest != image_hash_name { + return Err( + ImageDigestValidationFailed::PulledImageHasMismatchedDigest { + pulled_digest, + expected_digest: image_hash_name, + }, + ); } - tracing::info!("MPC hash {image_digest} validated successfully."); - Ok(true) + Ok(()) } // --------------------------------------------------------------------------- @@ -565,35 +518,19 @@ fn launch_mpc_container( Ok(()) } -// TODO: We should kill this check. It's called with the constant `DSTACK_UNIX_SOCKET` -fn is_unix_socket(path: &str) -> bool { - std::fs::metadata(path).is_ok_and(|meta| meta.file_type().is_socket()) -} - -async fn extend_rtmr3(platform: Platform, image_hash: MpcDockerImageHash) -> Result<()> { - if platform == Platform::NonTee { - tracing::info!("PLATFORM=NONTEE → skipping RTMR3 extension step."); - return Ok(()); - } - - if !is_unix_socket(DSTACK_UNIX_SOCKET) { - return Err(LauncherError::DstackSocketMissing( - DSTACK_UNIX_SOCKET.to_string(), - )); - } - +async fn extend_rtmr3(image_hash: &MpcDockerImageHash) -> Result<(), LauncherError> { tracing::info!(?image_hash, "extending RTMR3"); - let client = dstack_sdk::dstack_client::DstackClient::new(Some(DSTACK_UNIX_SOCKET)); + let dstack_cient = dstack_sdk::dstack_client::DstackClient::new(Some(DSTACK_UNIX_SOCKET)); // GetQuote first - client + dstack_cient .get_quote(vec![]) .await .map_err(|e| LauncherError::DstackGetQuoteFailed(e.to_string()))?; // EmitEvent with the image digest - client + dstack_cient .emit_event( "mpc-image-digest".to_string(), image_hash.as_hex().into_bytes(), diff --git a/crates/tee-launcher/src/types.rs b/crates/tee-launcher/src/types.rs index 6339920bd..20dd7c33a 100644 --- a/crates/tee-launcher/src/types.rs +++ b/crates/tee-launcher/src/types.rs @@ -60,7 +60,7 @@ pub struct LauncherConfig { /// Maximum registry RPC attempts (from `RPC_MAX_ATTEMPTS`). pub rpc_max_attempts: u32, /// Optional hash override that bypasses registry lookup (from `MPC_HASH_OVERRIDE`). - pub mpc_hash_override: Option, + pub mpc_hash_override: Option, } #[derive(Debug, Clone, Serialize, Deserialize)] From c1ca0b2ac7fac3df74b60fe5d175a0c8868d06ed Mon Sep 17 00:00:00 2001 From: Daniel Sharifi Date: Tue, 3 Mar 2026 23:28:57 +0100 Subject: [PATCH 11/82] wip --- crates/tee-launcher/src/main.rs | 44 ++++++++++++--------------------- flake.nix | 29 +++++++++++----------- 2 files changed, 31 insertions(+), 42 deletions(-) diff --git a/crates/tee-launcher/src/main.rs b/crates/tee-launcher/src/main.rs index aed65da78..4c13a588f 100644 --- a/crates/tee-launcher/src/main.rs +++ b/crates/tee-launcher/src/main.rs @@ -75,13 +75,24 @@ async fn run() -> Result<(), LauncherError> { let () = check_image_digest_exists_on_docker_hub(image_hash)?; - if args.platform == Platform::Tee { - extend_rtmr3(&image_hash).await?; + let should_extend_rtmr_3 = args.platform == Platform::Tee; + + if should_extend_rtmr_3 { + let dstack_cient = dstack_sdk::dstack_client::DstackClient::new(Some(DSTACK_UNIX_SOCKET)); + + // EmitEvent with the image digest + dstack_cient + .emit_event( + "mpc-image-digest".to_string(), + image_hash.as_hex().into_bytes(), + ) + .await + .map_err(|e| LauncherError::DstackEmitEventFailed(e.to_string()))?; } launch_mpc_container( args.platform, - &selected_hash, + &image_hash, &dstack_config.mpc_passthrough_env, )?; @@ -410,7 +421,7 @@ fn build_docker_cmd( platform: Platform, mpc_config: &MpcBinaryConfig, image_digest: &MpcDockerImageHash, -) -> Result> { +) -> Result, LauncherError> { let mut cmd: Vec = vec!["docker".into(), "run".into()]; // Required environment variables @@ -496,7 +507,7 @@ fn launch_mpc_container( platform: Platform, valid_hash: &MpcDockerImageHash, mpc_config: &MpcBinaryConfig, -) -> Result<()> { +) -> Result<(), LauncherError> { tracing::info!( "Launching MPC node with validated hash: {}", valid_hash.as_hex() @@ -518,29 +529,6 @@ fn launch_mpc_container( Ok(()) } -async fn extend_rtmr3(image_hash: &MpcDockerImageHash) -> Result<(), LauncherError> { - tracing::info!(?image_hash, "extending RTMR3"); - - let dstack_cient = dstack_sdk::dstack_client::DstackClient::new(Some(DSTACK_UNIX_SOCKET)); - - // GetQuote first - dstack_cient - .get_quote(vec![]) - .await - .map_err(|e| LauncherError::DstackGetQuoteFailed(e.to_string()))?; - - // EmitEvent with the image digest - dstack_cient - .emit_event( - "mpc-image-digest".to_string(), - image_hash.as_hex().into_bytes(), - ) - .await - .map_err(|e| LauncherError::DstackEmitEventFailed(e.to_string()))?; - - Ok(()) -} - // #[cfg(test)] // mod tests { // use super::*; diff --git a/flake.nix b/flake.nix index 4ddefe07b..6a3c51ef0 100644 --- a/flake.nix +++ b/flake.nix @@ -103,20 +103,9 @@ }; envDarwin = lib.optionalAttrs stdenv.isDarwin { - # Force build scripts to use Nix wrappers (not host clang) - CC = "${stdenv.cc}/bin/cc"; - CXX = "${stdenv.cc}/bin/c++"; - - # cc crate looks for these first on macOS - CC_aarch64_apple_darwin = "${stdenv.cc}/bin/cc"; - CXX_aarch64_apple_darwin = "${stdenv.cc}/bin/c++"; - - AR = "${stdenv.cc.bintools}/bin/ar"; - RANLIB = "${stdenv.cc.bintools}/bin/ranlib"; - - # Cargo resolves its linker separately from CC — force it to use the - # SDK-aware wrapper so -lSystem (and other SDK libs) are found. - CARGO_TARGET_AARCH64_APPLE_DARWIN_LINKER = "${stdenv.cc}/bin/cc"; + # Cargo resolves its linker separately from CC — force it to use + # the LLVM 19 wrapper so -lSystem (and other SDK libs) are found. + CARGO_TARGET_AARCH64_APPLE_DARWIN_LINKER = "${llvmPkgs.clang}/bin/clang"; }; dockerTools = with pkgs; [ @@ -206,6 +195,18 @@ hardeningDisable = hardening; shellHook = '' + ${lib.optionalString stdenv.isDarwin '' + # Override CC/CXX to use LLVM 19 clang, matching Rust 1.86's + # bundled LLVM version. These must live in shellHook because + # rust-overlay propagates the default stdenv clang (now 21) + # via setup hooks that run after env vars are set. + export CC="${llvmPkgs.clang}/bin/clang" + export CXX="${llvmPkgs.clang}/bin/clang++" + export CC_aarch64_apple_darwin="${llvmPkgs.clang}/bin/clang" + export CXX_aarch64_apple_darwin="${llvmPkgs.clang}/bin/clang++" + export AR="${llvmPkgs.llvm}/bin/llvm-ar" + export RANLIB="${llvmPkgs.llvm}/bin/llvm-ranlib" + ''} printf "\e[32m🦀 NEAR Dev Shell Active\e[0m\n" ''; }; From d2ddedc1a4eea8bb0452369d37ac2e8126910cd9 Mon Sep 17 00:00:00 2001 From: Daniel Sharifi Date: Wed, 4 Mar 2026 08:41:16 +0100 Subject: [PATCH 12/82] use const MPC_IMAGE_HASH_EVENT --- Cargo.lock | 1 + crates/launcher-interface/src/lib.rs | 2 ++ crates/mpc-attestation/Cargo.toml | 1 + crates/mpc-attestation/src/attestation.rs | 3 +-- crates/tee-launcher/src/main.rs | 11 ++++++----- 5 files changed, 11 insertions(+), 7 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index dc3bae887..d6949ffc4 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -5382,6 +5382,7 @@ dependencies = [ "derive_more 2.1.1", "hex", "include-measurements", + "launcher-interface", "mpc-primitives", "serde", "serde_json", diff --git a/crates/launcher-interface/src/lib.rs b/crates/launcher-interface/src/lib.rs index 6e952ca4c..a4dd7049d 100644 --- a/crates/launcher-interface/src/lib.rs +++ b/crates/launcher-interface/src/lib.rs @@ -1,3 +1,5 @@ +pub const MPC_IMAGE_HASH_EVENT: &str = "mpc-image-digest"; + pub mod types { use mpc_primitives::hash::MpcDockerImageHash; use serde::{Deserialize, Serialize}; diff --git a/crates/mpc-attestation/Cargo.toml b/crates/mpc-attestation/Cargo.toml index f96b1fd79..b16ef09fc 100644 --- a/crates/mpc-attestation/Cargo.toml +++ b/crates/mpc-attestation/Cargo.toml @@ -15,6 +15,7 @@ serde = { workspace = true } serde_json = { workspace = true } sha2 = { workspace = true } sha3 = { workspace = true } +launcher-interface = { workspace = true } [dev-dependencies] assert_matches = { workspace = true } diff --git a/crates/mpc-attestation/src/attestation.rs b/crates/mpc-attestation/src/attestation.rs index ea2ec0641..da7ea2920 100644 --- a/crates/mpc-attestation/src/attestation.rs +++ b/crates/mpc-attestation/src/attestation.rs @@ -13,14 +13,13 @@ pub use attestation::attestation::{DstackAttestation, VerificationError}; use mpc_primitives::hash::{LauncherDockerComposeHash, MpcDockerImageHash}; use borsh::{BorshDeserialize, BorshSerialize}; +use launcher_interface::MPC_IMAGE_HASH_EVENT; use serde::{Deserialize, Serialize}; use sha2::{Digest as _, Sha256}; use crate::alloc::format; use crate::alloc::string::ToString; -const MPC_IMAGE_HASH_EVENT: &str = "mpc-image-digest"; - // TODO(#1639): extract timestamp from certificate itself pub const DEFAULT_EXPIRATION_DURATION_SECONDS: u64 = 60 * 60 * 24 * 7; // 7 days diff --git a/crates/tee-launcher/src/main.rs b/crates/tee-launcher/src/main.rs index 4c13a588f..ebc1516b1 100644 --- a/crates/tee-launcher/src/main.rs +++ b/crates/tee-launcher/src/main.rs @@ -3,6 +3,7 @@ use std::process::Command; use std::sync::LazyLock; use clap::Parser; +use launcher_interface::MPC_IMAGE_HASH_EVENT; use launcher_interface::types::ApprovedHashesFile; use regex::Regex; use std::os::unix::fs::FileTypeExt as _; @@ -73,18 +74,18 @@ async fn run() -> Result<(), LauncherError> { } }; - let () = check_image_digest_exists_on_docker_hub(image_hash)?; + let () = check_image_digest_exists_on_docker_hub(image_hash.clone())?; let should_extend_rtmr_3 = args.platform == Platform::Tee; if should_extend_rtmr_3 { - let dstack_cient = dstack_sdk::dstack_client::DstackClient::new(Some(DSTACK_UNIX_SOCKET)); + let dstack_client = dstack_sdk::dstack_client::DstackClient::new(Some(DSTACK_UNIX_SOCKET)); // EmitEvent with the image digest - dstack_cient + dstack_client .emit_event( - "mpc-image-digest".to_string(), - image_hash.as_hex().into_bytes(), + MPC_IMAGE_HASH_EVENT.to_string(), + image_hash.as_hex().as_bytes().to_vec(), ) .await .map_err(|e| LauncherError::DstackEmitEventFailed(e.to_string()))?; From 391be027b201b3ea46c552adc0a7ee38d0db61c4 Mon Sep 17 00:00:00 2001 From: Daniel Sharifi Date: Wed, 4 Mar 2026 09:45:59 +0100 Subject: [PATCH 13/82] add deref to bounded vec --- crates/bounded-collections/src/bounded_vec.rs | 9 +++++++++ 1 file changed, 9 insertions(+) diff --git a/crates/bounded-collections/src/bounded_vec.rs b/crates/bounded-collections/src/bounded_vec.rs index 1fc79ebf8..965200171 100644 --- a/crates/bounded-collections/src/bounded_vec.rs +++ b/crates/bounded-collections/src/bounded_vec.rs @@ -1,5 +1,6 @@ use std::{ convert::{TryFrom, TryInto}, + ops::Deref, slice::{Iter, IterMut}, vec, }; @@ -20,6 +21,14 @@ pub struct BoundedVec Deref for BoundedVec { + type Target = Vec; + + fn deref(&self) -> &Self::Target { + &self.inner + } +} + /// BoundedVec errors #[derive(Error, PartialEq, Eq, Debug, Clone)] pub enum BoundedVecOutOfBounds { From 1253c03562b8dfa5ed120fe285e01e8f867bf082 Mon Sep 17 00:00:00 2001 From: Daniel Sharifi Date: Wed, 4 Mar 2026 11:16:28 +0100 Subject: [PATCH 14/82] compiles --- Cargo.lock | 2 + crates/tee-launcher/Cargo.toml | 2 + crates/tee-launcher/src/main.rs | 178 ++++++++++--------------------- crates/tee-launcher/src/types.rs | 127 +++++++++++++++++++--- tee_launcher/launcher.py | 2 +- 5 files changed, 174 insertions(+), 137 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index d6949ffc4..f3283e62c 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -10558,6 +10558,7 @@ dependencies = [ "clap", "dstack-sdk", "hex", + "itertools 0.14.0", "launcher-interface", "mpc-primitives", "regex", @@ -10569,6 +10570,7 @@ dependencies = [ "tokio", "tracing", "tracing-subscriber", + "url", ] [[package]] diff --git a/crates/tee-launcher/Cargo.toml b/crates/tee-launcher/Cargo.toml index f983a8ad1..0d4fb45ba 100644 --- a/crates/tee-launcher/Cargo.toml +++ b/crates/tee-launcher/Cargo.toml @@ -18,6 +18,7 @@ dstack-sdk = { workspace = true } hex = { workspace = true } mpc-primitives = { workspace = true } launcher-interface = { workspace = true } +itertools = { workspace = true } regex = { workspace = true } reqwest = { workspace = true } serde = { workspace = true } @@ -26,6 +27,7 @@ thiserror = { workspace = true } tokio = { workspace = true } tracing = { workspace = true } tracing-subscriber = { workspace = true } +url = { workspace = true, features = ["serde"] } [dev-dependencies] assert_matches = { workspace = true } diff --git a/crates/tee-launcher/src/main.rs b/crates/tee-launcher/src/main.rs index ebc1516b1..0d03e4ec5 100644 --- a/crates/tee-launcher/src/main.rs +++ b/crates/tee-launcher/src/main.rs @@ -1,12 +1,9 @@ -use std::collections::{BTreeMap, VecDeque}; +use std::collections::VecDeque; use std::process::Command; -use std::sync::LazyLock; use clap::Parser; use launcher_interface::MPC_IMAGE_HASH_EVENT; use launcher_interface::types::ApprovedHashesFile; -use regex::Regex; -use std::os::unix::fs::FileTypeExt as _; // Reuse the workspace hash type for type-safe image hash handling. use mpc_primitives::hash::MpcDockerImageHash; @@ -49,18 +46,26 @@ async fn run() -> Result<(), LauncherError> { let dstack_config: Config = serde_json::from_reader(config_file).expect("config file is valid"); - let image_hash: MpcDockerImageHash = { - match dstack_config.launcher_config.mpc_hash_override.clone() { - Some(override_hash) => override_hash, - None => { - let approved_hashes_file = std::fs::OpenOptions::new() - .read(true) - .open(IMAGE_DIGEST_FILE) - .map_err(|source| LauncherError::FileRead { - path: IMAGE_DIGEST_FILE.to_string(), - source, - })?; + let approved_hashes_file = std::fs::OpenOptions::new() + .read(true) + .open(IMAGE_DIGEST_FILE) + .map_err(|source| LauncherError::FileRead { + path: IMAGE_DIGEST_FILE.to_string(), + source, + }); + let image_hash: MpcDockerImageHash = { + match approved_hashes_file { + Err(err) => { + let default_image_digest = args.default_image_digest; + tracing::warn!( + ?err, + ?default_image_digest, + "approved hashes file does not exist on disk, falling back to default digest" + ); + default_image_digest + } + Ok(approved_hashes_file) => { let approved_hashes_on_disk: ApprovedHashesFile = serde_json::from_reader(approved_hashes_file).map_err(|source| { LauncherError::JsonParse { @@ -69,7 +74,21 @@ async fn run() -> Result<(), LauncherError> { } })?; - approved_hashes_on_disk.newest_approved_hash().clone() + if let Some(override_image) = dstack_config.launcher_config.mpc_hash_override { + tracing::info!(?override_image, "override mpc image hash provided"); + + let override_image_is_allowed = approved_hashes_on_disk + .approved_hashes + .contains(&override_image); + + if !override_image_is_allowed { + panic!("TODO: panic if override image is not allowed?"); + } + + override_image + } else { + approved_hashes_on_disk.newest_approved_hash().clone() + } } } }; @@ -85,6 +104,7 @@ async fn run() -> Result<(), LauncherError> { dstack_client .emit_event( MPC_IMAGE_HASH_EVENT.to_string(), + // TODO: mpc binary has to go back from back hex as well. Just send the raw bytes as payload. image_hash.as_hex().as_bytes().to_vec(), ) .await @@ -95,47 +115,13 @@ async fn run() -> Result<(), LauncherError> { args.platform, &image_hash, &dstack_config.mpc_passthrough_env, + &dstack_config.docker_command_config, )?; Ok(()) } -// --------------------------------------------------------------------------- -// Constants — matching Python launcher exactly -// --------------------------------------------------------------------------- - -// Regex patterns (compiled once) -static MPC_ENV_KEY_RE: LazyLock = - LazyLock::new(|| Regex::new(r"^MPC_[A-Z0-9_]{1,64}$").unwrap()); -static HOST_ENTRY_RE: LazyLock = - LazyLock::new(|| Regex::new(r"^[a-zA-Z0-9\-\.]+:\d{1,3}(\.\d{1,3}){3}$").unwrap()); -static PORT_MAPPING_RE: LazyLock = - LazyLock::new(|| Regex::new(r"^(\d{1,5}):(\d{1,5})$").unwrap()); -static INVALID_HOST_ENTRY_PATTERN: LazyLock = - LazyLock::new(|| Regex::new(r"^[;&|`$\\<>\-]|^--").unwrap()); - -// Denied env keys — never pass these to the container -const DENIED_CONTAINER_ENV_KEYS: &[&str] = &["MPC_P2P_PRIVATE_KEY", "MPC_ACCOUNT_SK"]; - -// Allowed non-MPC env vars (backward compatibility) -const ALLOWED_MPC_ENV_VARS: &[&str] = &[ - "MPC_ACCOUNT_ID", - "MPC_LOCAL_ADDRESS", - "MPC_SECRET_STORE_KEY", - "MPC_CONTRACT_ID", - "MPC_ENV", - "MPC_HOME_DIR", - "NEAR_BOOT_NODES", - "RUST_BACKTRACE", - "RUST_LOG", - "MPC_RESPONDER_ID", - "MPC_BACKUP_ENCRYPTION_KEY_HEX", -]; - -// --------------------------------------------------------------------------- -// Validation functions — security policy for env passthrough -// --------------------------------------------------------------------------- - +// TODO: this needs to be checked. fn has_control_chars(s: &str) -> bool { let control_chars = ['\n', '\r', '\0']; @@ -150,45 +136,6 @@ fn has_control_chars(s: &str) -> bool { false } -fn is_valid_ip(ip: &str) -> bool { - ip.parse::().is_ok() -} - -fn is_valid_host_entry(entry: &str) -> bool { - if !HOST_ENTRY_RE.is_match(entry) { - return false; - } - if let Some((_host, ip)) = entry.rsplit_once(':') { - is_valid_ip(ip) - } else { - false - } -} - -fn is_valid_port_mapping(entry: &str) -> bool { - if let Some(caps) = PORT_MAPPING_RE.captures(entry) { - let host_port: u32 = caps[1].parse().unwrap_or(0); - let container_port: u32 = caps[2].parse().unwrap_or(0); - host_port > 0 && host_port <= 65535 && container_port > 0 && container_port <= 65535 - } else { - false - } -} - -fn is_safe_host_entry(entry: &str) -> bool { - if INVALID_HOST_ENTRY_PATTERN.is_match(entry) { - return false; - } - if entry.contains("LD_PRELOAD") { - return false; - } - true -} - -fn is_safe_port_mapping(mapping: &str) -> bool { - !INVALID_HOST_ENTRY_PATTERN.is_match(mapping) -} - // --------------------------------------------------------------------------- // Docker registry communication // --------------------------------------------------------------------------- @@ -199,7 +146,7 @@ async fn request_until_success( url: &str, headers: &[(String, String)], config: &LauncherConfig, -) -> Result { +) -> Result { let mut interval = config.rpc_request_interval_secs as f64; for attempt in 1..=config.rpc_max_attempts { @@ -244,7 +191,7 @@ async fn request_until_success( }) } -async fn get_manifest_digest(config: &LauncherConfig) -> Result { +async fn get_manifest_digest(config: &LauncherConfig) -> Result { let tags = config.image_tags.clone(); let token_url = format!( @@ -421,6 +368,7 @@ fn remove_existing_container() { fn build_docker_cmd( platform: Platform, mpc_config: &MpcBinaryConfig, + docker_flags: &DockerLaunchFlags, image_digest: &MpcDockerImageHash, ) -> Result, LauncherError> { let mut cmd: Vec = vec!["docker".into(), "run".into()]; @@ -446,36 +394,17 @@ fn build_docker_cmd( ]); } - // Track env passthrough size/caps - let mut passed_env_count: usize = 0; - let mut total_env_bytes: usize = 0; - - // // BTreeMap iteration is already sorted by key (deterministic) - // for (key, value) in mpc_config { - // if key == "EXTRA_HOSTS" { - // for host_entry in value.split(',') { - // let clean = host_entry.trim(); - // if is_safe_host_entry(clean) && is_valid_host_entry(clean) { - // cmd.extend(["--add-host".into(), clean.to_string()]); - // } else { - // tracing::warn!("Ignoring invalid or unsafe EXTRA_HOSTS entry: {clean}"); - // } - // } - // continue; - // } - - // passed_env_count += 1; - // if passed_env_count > MAX_PASSTHROUGH_ENV_VARS { - // return Err(LauncherError::TooManyEnvVars(MAX_PASSTHROUGH_ENV_VARS)); - // } - - // total_env_bytes += key.len() + 1 + value.len(); - // if total_env_bytes > MAX_TOTAL_ENV_BYTES { - // return Err(LauncherError::EnvPayloadTooLarge(MAX_TOTAL_ENV_BYTES)); - // } - - // cmd.extend(["--env".into(), format!("{key}={value}")]); - // } + for (key, value) in mpc_config.env_vars() { + cmd.extend(["--env".into(), format!("{key}={value}")]); + } + + let (host_flag, host_value) = docker_flags.extra_hosts.docker_flag_and_value(); + cmd.extend([host_flag, host_value]); + + let (port_forwarding_flag, port_forwarding_value) = + docker_flags.port_mappings.docker_flag_and_value(); + + cmd.extend([port_forwarding_flag, port_forwarding_value]); // Container run configuration cmd.extend([ @@ -508,6 +437,7 @@ fn launch_mpc_container( platform: Platform, valid_hash: &MpcDockerImageHash, mpc_config: &MpcBinaryConfig, + docker_flags: &DockerLaunchFlags, ) -> Result<(), LauncherError> { tracing::info!( "Launching MPC node with validated hash: {}", @@ -515,7 +445,7 @@ fn launch_mpc_container( ); remove_existing_container(); - let docker_cmd = build_docker_cmd(platform, mpc_config, valid_hash)?; + let docker_cmd = build_docker_cmd(platform, mpc_config, docker_flags, valid_hash)?; let status = Command::new(&docker_cmd[0]) .args(&docker_cmd[1..]) diff --git a/crates/tee-launcher/src/types.rs b/crates/tee-launcher/src/types.rs index 20dd7c33a..daa87ba1c 100644 --- a/crates/tee-launcher/src/types.rs +++ b/crates/tee-launcher/src/types.rs @@ -1,3 +1,10 @@ +use std::fmt; +use std::net::{IpAddr, Ipv4Addr}; +use std::num::NonZeroU16; +use std::path::PathBuf; + +use url::Host; + use bounded_collections::NonEmptyVec; use clap::{Parser, ValueEnum}; use mpc_primitives::hash::MpcDockerImageHash; @@ -17,7 +24,7 @@ pub struct CliArgs { /// Fallback image digest when the approved-hashes file is absent #[arg(long, env = "DEFAULT_IMAGE_DIGEST")] - pub default_image_digest: Option, + pub default_image_digest: MpcDockerImageHash, } #[derive(Debug, Clone, Copy, PartialEq, Eq, ValueEnum)] @@ -41,6 +48,7 @@ pub enum Platform { #[derive(Debug, Clone, Serialize, Deserialize)] pub struct Config { pub launcher_config: LauncherConfig, + pub docker_command_config: DockerLaunchFlags, /// Remaining env vars forwarded to the MPC container. pub mpc_passthrough_env: MpcBinaryConfig, } @@ -66,17 +74,112 @@ pub struct LauncherConfig { #[derive(Debug, Clone, Serialize, Deserialize)] pub struct MpcBinaryConfig { // mpc - mpc_account_id: String, - mpc_local_address: String, - mpc_secret_key_store: String, - mpc_contract_isd: String, - mpc_env: String, - mpc_home_dir: String, - mpc_responder_id: String, - mpc_backup_encryption_key_hex: String, + pub mpc_account_id: String, + pub mpc_local_address: IpAddr, + pub mpc_secret_key_store: String, + pub mpc_contract_isd: String, + pub mpc_env: MpcEnv, + pub mpc_home_dir: PathBuf, + pub mpc_responder_id: String, + pub mpc_backup_encryption_key_hex: String, // near - near_boot_nodes: String, + pub near_boot_nodes: String, // rust - rust_backtrace: String, - rust_log: String, + pub rust_backtrace: String, + pub rust_log: String, +} + +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct DockerLaunchFlags { + pub extra_hosts: ExtraHosts, + pub port_mappings: PortMappings, +} + +#[derive(Debug, Clone, Serialize, Deserialize)] +pub(crate) struct ExtraHosts { + hosts: Vec, +} + +impl ExtraHosts { + pub fn docker_flag_and_value(&self) -> (String, String) { + let flag = "--add-host".into(); + let value = self + .hosts + .iter() + .map(|HostEntry { hostname, ip }| format!("{hostname}:{ip}")) + .collect::>() + .join(","); + + (flag, value) + } +} + +/// A `--add-host` entry: `hostname:IPv4`. +#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)] +pub struct HostEntry { + pub hostname: Host, + pub ip: Ipv4Addr, +} + +#[derive(Debug, Clone, Serialize, Deserialize)] +pub(crate) struct PortMappings { + pub ports: Vec, +} + +#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)] +pub struct PortMapping { + src: NonZeroU16, + dst: NonZeroU16, +} + +impl PortMappings { + pub fn docker_flag_and_value(&self) -> (String, String) { + let flag = "-p".into(); + let value = self + .ports + .iter() + .map(|PortMapping { src, dst }| format!("{src}:{dst}")) + .collect::>() + .join(","); + + (flag, value) + } +} + +#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)] +enum MpcEnv { + Localnet, + Testnet, + Mainnet, +} + +impl fmt::Display for MpcEnv { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + match self { + MpcEnv::Localnet => write!(f, "localnet"), + MpcEnv::Testnet => write!(f, "testnet"), + MpcEnv::Mainnet => write!(f, "mainnet"), + } + } +} + +impl MpcBinaryConfig { + pub fn env_vars(&self) -> Vec<(&'static str, String)> { + vec![ + ("MPC_ACCOUNT_ID", self.mpc_account_id.clone()), + ("MPC_LOCAL_ADDRESS", self.mpc_local_address.to_string()), + ("MPC_SECRET_STORE_KEY", self.mpc_secret_key_store.clone()), + ("MPC_CONTRACT_ID", self.mpc_contract_isd.clone()), + ("MPC_ENV", self.mpc_env.to_string()), + ("MPC_HOME_DIR", self.mpc_home_dir.display().to_string()), + ("MPC_RESPONDER_ID", self.mpc_responder_id.clone()), + ( + "MPC_BACKUP_ENCRYPTION_KEY_HEX", + self.mpc_backup_encryption_key_hex.clone(), + ), + ("NEAR_BOOT_NODES", self.near_boot_nodes.clone()), + ("RUST_BACKTRACE", self.rust_backtrace.clone()), + ("RUST_LOG", self.rust_log.clone()), + ] + } } diff --git a/tee_launcher/launcher.py b/tee_launcher/launcher.py index 73ae00cf4..17f049faf 100644 --- a/tee_launcher/launcher.py +++ b/tee_launcher/launcher.py @@ -147,7 +147,7 @@ class Platform(Enum): ALLOWED_MPC_ENV_VARS = { "MPC_ACCOUNT_ID", # ID of the MPC account on the network "MPC_LOCAL_ADDRESS", # Local IP address or hostname used by the MPC node - "MPC_SECRET_STORE_KEY", # Key used to encrypt/decrypt secrets + "MPC_SECRET_STORE_KEY", # Key used to encrypt/decrypt secrets // Isn't this deprecated?, "MPC_CONTRACT_ID", # Contract ID associated with the MPC node "MPC_ENV", # Environment (e.g., 'testnet', 'mainnet') "MPC_HOME_DIR", # Home directory for the MPC node From 9fe5b9a72c8a26e94bdbde9026b52e440137f8b2 Mon Sep 17 00:00:00 2001 From: Daniel Sharifi Date: Wed, 4 Mar 2026 21:17:36 +0100 Subject: [PATCH 15/82] wip --- Cargo.toml | 6 +- crates/tee-launcher/src/error.rs | 11 +- crates/tee-launcher/src/main.rs | 193 +++++++++++++++---------------- crates/tee-launcher/src/types.rs | 88 ++++++++++++-- tee_launcher/launcher.py | 1 + 5 files changed, 187 insertions(+), 112 deletions(-) diff --git a/Cargo.toml b/Cargo.toml index aa47cba05..6b66d38c5 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -107,7 +107,9 @@ either = "1.15.0" elliptic-curve = "0.13.8" ethereum-types = "0.16.0" flume = "0.12.0" -frost-core = { version = "2.2.0", default-features = false, features = ["serde"] } +frost-core = { version = "2.2.0", default-features = false, features = [ + "serde", +] } frost-ed25519 = { version = "2.2.0", default-features = false } frost-secp256k1 = { version = "2.2.0", default-features = false } fs2 = "0.4.3" @@ -188,7 +190,7 @@ tracing-subscriber = { version = "0.3.22", features = [ "json", ] } tracing-test = "0.2.6" -url = "2" +url = "2.5.8" x509-parser = "0.18.1" zeroize = { version = "1.8.2", features = ["zeroize_derive"] } zstd = "0.13.3" diff --git a/crates/tee-launcher/src/error.rs b/crates/tee-launcher/src/error.rs index 720f89cf0..5d922ddb1 100644 --- a/crates/tee-launcher/src/error.rs +++ b/crates/tee-launcher/src/error.rs @@ -2,6 +2,7 @@ use std::path::PathBuf; use mpc_primitives::hash::MpcDockerImageHash; use thiserror::Error; +use url::Url; #[derive(Error, Debug)] pub enum LauncherError { @@ -30,7 +31,7 @@ pub enum LauncherError { RegistryAuthFailed(String), #[error("Failed to get successful response from {url} after {attempts} attempts")] - RegistryRequestFailed { url: String, attempts: u32 }, + RegistryRequestFailed { url: Url, attempts: u32 }, #[error("Digest mismatch: pulled {pulled} != expected {expected}")] DigestMismatch { pulled: String, expected: String }, @@ -39,7 +40,13 @@ pub enum LauncherError { ImageValidationFailed(String), #[error("docker run failed for validated hash")] - DockerRunFailed(MpcDockerImageHash), + DockerRunFailed { + image_hash: MpcDockerImageHash, + inner: std::io::Error, + }, + + #[error("docker run failed for validated hash")] + DockerRunFailedExitStatus { image_hash: MpcDockerImageHash }, #[error("Too many env vars to pass through (>{0})")] TooManyEnvVars(usize), diff --git a/crates/tee-launcher/src/main.rs b/crates/tee-launcher/src/main.rs index 0d03e4ec5..9f4ecf027 100644 --- a/crates/tee-launcher/src/main.rs +++ b/crates/tee-launcher/src/main.rs @@ -1,5 +1,5 @@ -use std::collections::VecDeque; use std::process::Command; +use std::{collections::VecDeque, time::Duration}; use clap::Parser; use launcher_interface::MPC_IMAGE_HASH_EVENT; @@ -10,12 +10,17 @@ use mpc_primitives::hash::MpcDockerImageHash; use contants::*; use error::*; +use reqwest::header::{ACCEPT, AUTHORIZATION, HeaderMap, HeaderValue}; use types::*; +use url::Url; mod contants; mod error; mod types; +const DOCKER_AUTH_ACCEPT_HEADER_VALUE: HeaderValue = + HeaderValue::from_static("application/vnd.docker.distribution.manifest.v2+json"); + #[tokio::main] async fn main() { tracing_subscriber::fmt() @@ -140,85 +145,33 @@ fn has_control_chars(s: &str) -> bool { // Docker registry communication // --------------------------------------------------------------------------- -// TODO: Use backon -async fn request_until_success( - client: &reqwest::Client, - url: &str, - headers: &[(String, String)], - config: &LauncherConfig, -) -> Result { - let mut interval = config.rpc_request_interval_secs as f64; - - for attempt in 1..=config.rpc_max_attempts { - // Sleep before request (matching Python behavior) - tokio::time::sleep(std::time::Duration::from_secs_f64(interval)).await; - interval = (interval.max(1.0) * 1.5).min(60.0); - - let mut req = client.get(url); - for (k, v) in headers { - req = req.header(k.as_str(), v.as_str()); - } - - match req - .timeout(std::time::Duration::from_secs( - config.rpc_request_timeout_secs, - )) - .send() - .await - { - Err(e) => { - tracing::warn!( - "Attempt {attempt}/{}: Failed to fetch {url}. Status: Timeout/Error: {e}", - config.rpc_max_attempts - ); - continue; - } - Ok(resp) if resp.status() != reqwest::StatusCode::OK => { - tracing::warn!( - "Attempt {attempt}/{}: Failed to fetch {url}. Status: {}", - config.rpc_max_attempts, - resp.status() - ); - continue; - } - Ok(resp) => return Ok(resp), - } - } - - Err(LauncherError::RegistryRequestFailed { - url: url.to_string(), - attempts: config.rpc_max_attempts, - }) -} - async fn get_manifest_digest(config: &LauncherConfig) -> Result { let tags = config.image_tags.clone(); + // We need an authorization token to fetch manifests. + // TODO: this still has the registry hard-coded in the url. also, if we use a different registry, we need a different auth-endpoint let token_url = format!( "https://auth.docker.io/token?service=registry.docker.io&scope=repository:{}:pull", config.image_name ); - let client = reqwest::Client::new(); - let token_resp = client - .get(&token_url) + let reqwest_client = reqwest::Client::new(); + + let token_request_response = reqwest_client + .get(token_url) .send() .await .map_err(|e| LauncherError::RegistryAuthFailed(e.to_string()))?; - if token_resp.status() != reqwest::StatusCode::OK { - return Err(LauncherError::RegistryAuthFailed(format!( - "status: {}", - token_resp.status() - ))); + + let status = token_request_response.status(); + if !status.is_success() { + todo!("add error case for non success http codes"); } - let token_json: serde_json::Value = token_resp + + let token_response: DockerTokenResponse = token_request_response .json() .await .map_err(|e| LauncherError::RegistryAuthFailed(e.to_string()))?; - let token = token_json["token"] - .as_str() - .ok_or_else(|| LauncherError::RegistryAuthFailed("no token in response".to_string()))? - .to_string(); let mut tags: VecDeque = tags.into_iter().collect(); @@ -232,10 +185,19 @@ async fn get_manifest_digest(config: &LauncherConfig) -> Result { let content_digest = resp .headers() @@ -289,6 +251,54 @@ async fn get_manifest_digest(config: &LauncherConfig) -> Result Result { + let mut interval = config.rpc_request_interval_secs as f64; + + for attempt in 1..=config.rpc_max_attempts { + // Sleep before request (matching Python behavior) + tokio::time::sleep(std::time::Duration::from_secs_f64(interval)).await; + interval = (interval.max(1.0) * 1.5).min(60.0); + + let request_timeout_duration = Duration::from_secs(config.rpc_request_timeout_secs); + let request = client + .get(url.clone()) + .headers(headers.clone()) + .timeout(request_timeout_duration) + .send() + .await; + + match request { + Err(e) => { + tracing::warn!( + "Attempt {attempt}/{}: Failed to fetch {url}. Status: Timeout/Error: {e}", + config.rpc_max_attempts + ); + continue; + } + Ok(resp) if resp.status() != reqwest::StatusCode::OK => { + tracing::warn!( + "Attempt {attempt}/{}: Failed to fetch {url}. Status: {}", + config.rpc_max_attempts, + resp.status() + ); + continue; + } + Ok(resp) => return Ok(resp), + } + } + + Err(LauncherError::RegistryRequestFailed { + url, + attempts: config.rpc_max_attempts, + }) +} + fn check_image_digest_exists_on_docker_hub( image_hash: MpcDockerImageHash, ) -> Result<(), ImageDigestValidationFailed> { @@ -339,32 +349,6 @@ fn check_image_digest_exists_on_docker_hub( Ok(()) } -// --------------------------------------------------------------------------- -// Docker command builder -// --------------------------------------------------------------------------- - -fn remove_existing_container() { - let output = Command::new("docker") - .args(["ps", "-a", "--format", "{{.Names}}"]) - .output(); - - match output { - Ok(output) => { - let names = String::from_utf8_lossy(&output.stdout); - - if names.lines().any(|n| n == MPC_CONTAINER_NAME) { - tracing::info!("Removing existing container: {MPC_CONTAINER_NAME}"); - let _ = Command::new("docker") - .args(["rm", "-f", MPC_CONTAINER_NAME]) - .output(); - } - } - Err(error) => { - tracing::warn!("Failed to check/remove container {MPC_CONTAINER_NAME}: {error}"); - } - } -} - fn build_docker_cmd( platform: Platform, mpc_config: &MpcBinaryConfig, @@ -444,16 +428,25 @@ fn launch_mpc_container( valid_hash.as_hex() ); - remove_existing_container(); + // shutdown container if one is already running + let _ = Command::new("docker") + .args(["rm", "-f", MPC_CONTAINER_NAME]) + .output(); + let docker_cmd = build_docker_cmd(platform, mpc_config, docker_flags, valid_hash)?; - let status = Command::new(&docker_cmd[0]) + let run_output = Command::new(&docker_cmd[0]) .args(&docker_cmd[1..]) - .status() - .map_err(|e| LauncherError::DockerRunFailed(valid_hash.clone()))?; - - if !status.success() { - return Err(LauncherError::DockerRunFailed(valid_hash.clone())); + .output() + .map_err(|inner| LauncherError::DockerRunFailed { + image_hash: valid_hash.clone(), + inner, + })?; + + if !run_output.status.success() { + return Err(LauncherError::DockerRunFailedExitStatus { + image_hash: valid_hash.clone(), + }); } tracing::info!("MPC launched successfully."); diff --git a/crates/tee-launcher/src/types.rs b/crates/tee-launcher/src/types.rs index daa87ba1c..e42d47fba 100644 --- a/crates/tee-launcher/src/types.rs +++ b/crates/tee-launcher/src/types.rs @@ -74,19 +74,24 @@ pub struct LauncherConfig { #[derive(Debug, Clone, Serialize, Deserialize)] pub struct MpcBinaryConfig { // mpc + // TODO: use near type to not accept any string pub mpc_account_id: String, pub mpc_local_address: IpAddr, + // TODO: think this is no longer needed with node generated keys pub mpc_secret_key_store: String, - pub mpc_contract_isd: String, + // TODO: think this is no longer needed with node generated keys + pub mpc_backup_encryption_key_hex: String, pub mpc_env: MpcEnv, pub mpc_home_dir: PathBuf, + // TODO: use near type to not accept any string + pub mpc_contract_id: String, + // TODO: use near type to not accept any string pub mpc_responder_id: String, - pub mpc_backup_encryption_key_hex: String, // near pub near_boot_nodes: String, // rust - pub rust_backtrace: String, - pub rust_log: String, + pub rust_backtrace: RustBacktrace, + pub rust_log: RustLog, } #[derive(Debug, Clone, Serialize, Deserialize)] @@ -147,7 +152,7 @@ impl PortMappings { } #[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)] -enum MpcEnv { +pub enum MpcEnv { Localnet, Testnet, Mainnet, @@ -163,13 +168,74 @@ impl fmt::Display for MpcEnv { } } +#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)] +pub enum RustBacktrace { + #[serde(rename = "0")] + Disabled, + #[serde(rename = "1")] + Enabled, + #[serde(rename = "short")] + Short, + #[serde(rename = "full")] + Full, +} + +impl fmt::Display for RustBacktrace { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + match self { + RustBacktrace::Disabled => write!(f, "0"), + RustBacktrace::Enabled => write!(f, "1"), + RustBacktrace::Short => write!(f, "short"), + RustBacktrace::Full => write!(f, "full"), + } + } +} + +#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)] +#[serde(rename_all = "lowercase")] +pub enum RustLogLevel { + Error, + Warn, + Info, + Debug, + Trace, +} + +impl fmt::Display for RustLogLevel { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + match self { + RustLogLevel::Error => write!(f, "error"), + RustLogLevel::Warn => write!(f, "warn"), + RustLogLevel::Info => write!(f, "info"), + RustLogLevel::Debug => write!(f, "debug"), + RustLogLevel::Trace => write!(f, "trace"), + } + } +} + +#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)] +#[serde(untagged)] +pub enum RustLog { + Level(RustLogLevel), + Filter(String), +} + +impl fmt::Display for RustLog { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + match self { + RustLog::Level(level) => level.fmt(f), + RustLog::Filter(filter) => write!(f, "{filter}"), + } + } +} + impl MpcBinaryConfig { pub fn env_vars(&self) -> Vec<(&'static str, String)> { vec![ ("MPC_ACCOUNT_ID", self.mpc_account_id.clone()), ("MPC_LOCAL_ADDRESS", self.mpc_local_address.to_string()), ("MPC_SECRET_STORE_KEY", self.mpc_secret_key_store.clone()), - ("MPC_CONTRACT_ID", self.mpc_contract_isd.clone()), + ("MPC_CONTRACT_ID", self.mpc_contract_id.clone()), ("MPC_ENV", self.mpc_env.to_string()), ("MPC_HOME_DIR", self.mpc_home_dir.display().to_string()), ("MPC_RESPONDER_ID", self.mpc_responder_id.clone()), @@ -178,8 +244,14 @@ impl MpcBinaryConfig { self.mpc_backup_encryption_key_hex.clone(), ), ("NEAR_BOOT_NODES", self.near_boot_nodes.clone()), - ("RUST_BACKTRACE", self.rust_backtrace.clone()), - ("RUST_LOG", self.rust_log.clone()), + ("RUST_BACKTRACE", self.rust_backtrace.to_string()), + ("RUST_LOG", self.rust_log.to_string()), ] } } + +/// Partial response https://auth.docker.io/token +#[derive(Debug, Deserialize, Serialize)] +pub struct DockerTokenResponse { + pub token: String, +} diff --git a/tee_launcher/launcher.py b/tee_launcher/launcher.py index 17f049faf..e8beeb9c2 100644 --- a/tee_launcher/launcher.py +++ b/tee_launcher/launcher.py @@ -242,6 +242,7 @@ def is_safe_port_mapping(mapping: str) -> bool: def remove_existing_container(): + # changed in rust, no point checking current container exists. Just send shutdown signal to MPC_CONTAINER_NAME """Stop and remove the MPC container if it exists.""" try: containers = check_output( From 422178a87d58ea6008de2c2291c8c02f7d520d73 Mon Sep 17 00:00:00 2001 From: Daniel Sharifi Date: Thu, 5 Mar 2026 10:31:08 +0100 Subject: [PATCH 16/82] wip --- crates/tee-launcher/src/main.rs | 23 +++++++++++++---------- 1 file changed, 13 insertions(+), 10 deletions(-) diff --git a/crates/tee-launcher/src/main.rs b/crates/tee-launcher/src/main.rs index 9f4ecf027..de3f16427 100644 --- a/crates/tee-launcher/src/main.rs +++ b/crates/tee-launcher/src/main.rs @@ -1,4 +1,5 @@ use std::process::Command; +use std::str::FromStr; use std::{collections::VecDeque, time::Duration}; use clap::Parser; @@ -176,17 +177,12 @@ async fn get_manifest_digest(config: &LauncherConfig) -> Result = tags.into_iter().collect(); while let Some(tag) = tags.pop_front() { - let manifest_url = format!( + let manifest_url: Url = format!( "https://{}/v2/{}/manifests/{tag}", config.registry, config.image_name - ); - let headers = vec![ - ( - "Accept".to_string(), - "application/vnd.docker.distribution.manifest.v2+json".to_string(), - ), - // (AUTHORIZATION,), - ]; + ) + .parse() + .expect("TODO handle error"); let authorization_value: HeaderValue = format!("Bearer {}", token_response.token) .parse() @@ -197,7 +193,14 @@ async fn get_manifest_digest(config: &LauncherConfig) -> Result { let content_digest = resp .headers() From cba77338040da689ba0d1cc27886a31586ac6c10 Mon Sep 17 00:00:00 2001 From: Daniel Sharifi Date: Thu, 5 Mar 2026 11:27:40 +0100 Subject: [PATCH 17/82] wip --- crates/tee-launcher/src/docker_types.rs | 45 +++++++++++++++++++ crates/tee-launcher/src/main.rs | 59 ++++++++++++------------- crates/tee-launcher/src/types.rs | 6 --- 3 files changed, 74 insertions(+), 36 deletions(-) create mode 100644 crates/tee-launcher/src/docker_types.rs diff --git a/crates/tee-launcher/src/docker_types.rs b/crates/tee-launcher/src/docker_types.rs new file mode 100644 index 000000000..793507163 --- /dev/null +++ b/crates/tee-launcher/src/docker_types.rs @@ -0,0 +1,45 @@ +use serde::{Deserialize, Serialize}; + +/// Partial response https://auth.docker.io/token +#[derive(Debug, Deserialize, Serialize)] +pub struct DockerTokenResponse { + pub token: String, +} + +/// Response from `GET /v2/{name}/manifests/{reference}`. +/// +/// The `mediaType` field determines the variant: +/// - OCI image index → multi-platform manifest with a list of platform entries +/// - Docker V2 / OCI manifest → single-platform manifest with a config digest +#[derive(Debug, Deserialize, Serialize)] +#[serde(tag = "mediaType")] +pub enum ManifestResponse { + /// Multi-platform manifest (OCI image index). + #[serde(rename = "application/vnd.oci.image.index.v1+json")] + ImageIndex { manifests: Vec }, + + /// Single-platform Docker V2 manifest. + #[serde(rename = "application/vnd.docker.distribution.manifest.v2+json")] + DockerV2 { config: ManifestConfig }, + + /// Single-platform OCI manifest. + #[serde(rename = "application/vnd.oci.image.manifest.v1+json")] + OciManifest { config: ManifestConfig }, +} + +#[derive(Debug, Deserialize, Serialize)] +pub struct ManifestEntry { + pub digest: String, + pub platform: ManifestPlatform, +} + +#[derive(Debug, Deserialize, Serialize)] +pub struct ManifestPlatform { + pub architecture: String, + pub os: String, +} + +#[derive(Debug, Deserialize, Serialize)] +pub struct ManifestConfig { + pub digest: String, +} diff --git a/crates/tee-launcher/src/main.rs b/crates/tee-launcher/src/main.rs index de3f16427..a5e3dd203 100644 --- a/crates/tee-launcher/src/main.rs +++ b/crates/tee-launcher/src/main.rs @@ -10,12 +10,14 @@ use launcher_interface::types::ApprovedHashesFile; use mpc_primitives::hash::MpcDockerImageHash; use contants::*; +use docker_types::*; use error::*; use reqwest::header::{ACCEPT, AUTHORIZATION, HeaderMap, HeaderValue}; use types::*; use url::Url; mod contants; +mod docker_types; mod error; mod types; @@ -142,12 +144,12 @@ fn has_control_chars(s: &str) -> bool { false } -// --------------------------------------------------------------------------- -// Docker registry communication -// --------------------------------------------------------------------------- - -async fn get_manifest_digest(config: &LauncherConfig) -> Result { +async fn get_manifest_digest( + config: &LauncherConfig, + expected_image_digest: &MpcDockerImageHash, +) -> Result { let tags = config.image_tags.clone(); + let expected_digest = format!("sha256:{}", expected_image_digest.as_hex()); // We need an authorization token to fetch manifests. // TODO: this still has the registry hard-coded in the url. also, if we use a different registry, we need a different auth-endpoint @@ -208,38 +210,35 @@ async fn get_manifest_digest(config: &LauncherConfig) -> Result { + match manifest { + ManifestResponse::ImageIndex { manifests } => { // Multi-platform manifest; scan for amd64/linux - if let Some(manifests) = manifest["manifests"].as_array() { - for m in manifests { - let arch = m["platform"]["architecture"].as_str().unwrap_or(""); - let os = m["platform"]["os"].as_str().unwrap_or(""); - if arch == "amd64" && os == "linux" { - if let Some(digest) = m["digest"].as_str() { - tags.push_back(digest.to_string()); - } - } - } + manifests + .into_iter() + .filter(|manifest| { + manifest.platform.architecture == "amd64" + && manifest.platform.os == "linux" + }) + .for_each(|manifest| tags.push_back(manifest.digest)); + } + ManifestResponse::DockerV2 { config } + | ManifestResponse::OciManifest { config } => { + let incorrect_config_digest = config.digest == expected_digest; + if incorrect_config_digest { + continue; } + + let Some(digest) = content_digest else { + continue; + }; + + return Ok(digest); } - // TODO: - // "application/vnd.docker.distribution.manifest.v2+json" - // | "application/vnd.oci.image.manifest.v1+json" => { - // let config_digest = manifest["config"]["digest"].as_str().unwrap_or(""); - // if config_digest == config. { - // if let Some(digest) = content_digest { - // return Ok(digest); - // } - // } - // } - _ => {} } } Err(e) => { diff --git a/crates/tee-launcher/src/types.rs b/crates/tee-launcher/src/types.rs index e42d47fba..b38ef7968 100644 --- a/crates/tee-launcher/src/types.rs +++ b/crates/tee-launcher/src/types.rs @@ -249,9 +249,3 @@ impl MpcBinaryConfig { ] } } - -/// Partial response https://auth.docker.io/token -#[derive(Debug, Deserialize, Serialize)] -pub struct DockerTokenResponse { - pub token: String, -} From 4b2f887fb5bae79a95a67e1d23e0e81e53ae698a Mon Sep 17 00:00:00 2001 From: Daniel Sharifi Date: Thu, 5 Mar 2026 11:31:28 +0100 Subject: [PATCH 18/82] fix bug --- crates/tee-launcher/src/main.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/crates/tee-launcher/src/main.rs b/crates/tee-launcher/src/main.rs index a5e3dd203..a69223ec3 100644 --- a/crates/tee-launcher/src/main.rs +++ b/crates/tee-launcher/src/main.rs @@ -228,7 +228,7 @@ async fn get_manifest_digest( } ManifestResponse::DockerV2 { config } | ManifestResponse::OciManifest { config } => { - let incorrect_config_digest = config.digest == expected_digest; + let incorrect_config_digest = config.digest != expected_digest; if incorrect_config_digest { continue; } From 70c6fb1827700ab0db34b761e9a06b5931e2cf0a Mon Sep 17 00:00:00 2001 From: Daniel Sharifi Date: Thu, 5 Mar 2026 13:47:21 +0100 Subject: [PATCH 19/82] . --- crates/tee-launcher/src/contants.rs | 40 +++++++++++------------ crates/tee-launcher/src/main.rs | 50 +++++++++++++++++------------ 2 files changed, 49 insertions(+), 41 deletions(-) diff --git a/crates/tee-launcher/src/contants.rs b/crates/tee-launcher/src/contants.rs index c3926b2b6..cf5a62405 100644 --- a/crates/tee-launcher/src/contants.rs +++ b/crates/tee-launcher/src/contants.rs @@ -3,28 +3,28 @@ pub(crate) const IMAGE_DIGEST_FILE: &str = "/mnt/shared/image-digest.bin"; pub(crate) const DSTACK_UNIX_SOCKET: &str = "/var/run/dstack.sock"; pub(crate) const DSTACK_USER_CONFIG_FILE: &str = "/tapp/user_config"; -pub(crate) const SHA256_PREFIX: &str = "sha256:"; +// pub(crate) const SHA256_PREFIX: &str = "sha256:"; -// Docker Hub defaults -pub(crate) const DEFAULT_RPC_REQUEST_TIMEOUT_SECS: f64 = 10.0; -pub(crate) const DEFAULT_RPC_REQUEST_INTERVAL_SECS: f64 = 1.0; -pub(crate) const DEFAULT_RPC_MAX_ATTEMPTS: u32 = 20; +// // Docker Hub defaults +// pub(crate) const DEFAULT_RPC_REQUEST_TIMEOUT_SECS: f64 = 10.0; +// pub(crate) const DEFAULT_RPC_REQUEST_INTERVAL_SECS: f64 = 1.0; +// pub(crate) const DEFAULT_RPC_MAX_ATTEMPTS: u32 = 20; -pub(crate) const DEFAULT_MPC_IMAGE_NAME: &str = "nearone/mpc-node"; -pub(crate) const DEFAULT_MPC_REGISTRY: &str = "registry.hub.docker.com"; -pub(crate) const DEFAULT_MPC_IMAGE_TAG: &str = "latest"; +// pub(crate) const DEFAULT_MPC_IMAGE_NAME: &str = "nearone/mpc-node"; +// pub(crate) const DEFAULT_MPC_REGISTRY: &str = "registry.hub.docker.com"; +// pub(crate) const DEFAULT_MPC_IMAGE_TAG: &str = "latest"; -// Env var names -pub(crate) const ENV_VAR_MPC_HASH_OVERRIDE: &str = "MPC_HASH_OVERRIDE"; -pub(crate) const ENV_VAR_RPC_REQUEST_TIMEOUT_SECS: &str = "RPC_REQUEST_TIMEOUT_SECS"; -pub(crate) const ENV_VAR_RPC_REQUEST_INTERVAL_SECS: &str = "RPC_REQUEST_INTERVAL_SECS"; -pub(crate) const ENV_VAR_RPC_MAX_ATTEMPTS: &str = "RPC_MAX_ATTEMPTS"; +// // Env var names +// pub(crate) const ENV_VAR_MPC_HASH_OVERRIDE: &str = "MPC_HASH_OVERRIDE"; +// pub(crate) const ENV_VAR_RPC_REQUEST_TIMEOUT_SECS: &str = "RPC_REQUEST_TIMEOUT_SECS"; +// pub(crate) const ENV_VAR_RPC_REQUEST_INTERVAL_SECS: &str = "RPC_REQUEST_INTERVAL_SECS"; +// pub(crate) const ENV_VAR_RPC_MAX_ATTEMPTS: &str = "RPC_MAX_ATTEMPTS"; -pub(crate) const DSTACK_USER_CONFIG_MPC_IMAGE_TAGS: &str = "MPC_IMAGE_TAGS"; -pub(crate) const DSTACK_USER_CONFIG_MPC_IMAGE_NAME: &str = "MPC_IMAGE_NAME"; -pub(crate) const DSTACK_USER_CONFIG_MPC_IMAGE_REGISTRY: &str = "MPC_REGISTRY"; +// pub(crate) const DSTACK_USER_CONFIG_MPC_IMAGE_TAGS: &str = "MPC_IMAGE_TAGS"; +// pub(crate) const DSTACK_USER_CONFIG_MPC_IMAGE_NAME: &str = "MPC_IMAGE_NAME"; +// pub(crate) const DSTACK_USER_CONFIG_MPC_IMAGE_REGISTRY: &str = "MPC_REGISTRY"; -// Security limits -pub(crate) const MAX_PASSTHROUGH_ENV_VARS: usize = 64; -pub(crate) const MAX_ENV_VALUE_LEN: usize = 1024; -pub(crate) const MAX_TOTAL_ENV_BYTES: usize = 32 * 1024; +// // Security limits +// pub(crate) const MAX_PASSTHROUGH_ENV_VARS: usize = 64; +// pub(crate) const MAX_ENV_VALUE_LEN: usize = 1024; +// pub(crate) const MAX_TOTAL_ENV_BYTES: usize = 32 * 1024; diff --git a/crates/tee-launcher/src/main.rs b/crates/tee-launcher/src/main.rs index a69223ec3..76e05b0a7 100644 --- a/crates/tee-launcher/src/main.rs +++ b/crates/tee-launcher/src/main.rs @@ -24,6 +24,8 @@ mod types; const DOCKER_AUTH_ACCEPT_HEADER_VALUE: HeaderValue = HeaderValue::from_static("application/vnd.docker.distribution.manifest.v2+json"); +const DOCKER_CONTENT_DIGEST_HEADER: &str = "Docker-Content-Digest"; + #[tokio::main] async fn main() { tracing_subscriber::fmt() @@ -82,18 +84,18 @@ async fn run() -> Result<(), LauncherError> { } })?; - if let Some(override_image) = dstack_config.launcher_config.mpc_hash_override { + if let Some(override_image) = &dstack_config.launcher_config.mpc_hash_override { tracing::info!(?override_image, "override mpc image hash provided"); let override_image_is_allowed = approved_hashes_on_disk .approved_hashes - .contains(&override_image); + .contains(override_image); if !override_image_is_allowed { panic!("TODO: panic if override image is not allowed?"); } - override_image + override_image.clone() } else { approved_hashes_on_disk.newest_approved_hash().clone() } @@ -101,7 +103,7 @@ async fn run() -> Result<(), LauncherError> { } }; - let () = check_image_digest_exists_on_docker_hub(image_hash.clone())?; + let () = validate_image_hash(&dstack_config.launcher_config, image_hash.clone()).await?; let should_extend_rtmr_3 = args.platform == Platform::Tee; @@ -148,7 +150,7 @@ async fn get_manifest_digest( config: &LauncherConfig, expected_image_digest: &MpcDockerImageHash, ) -> Result { - let tags = config.image_tags.clone(); + let mut tags: VecDeque = config.image_tags.iter().cloned().collect(); let expected_digest = format!("sha256:{}", expected_image_digest.as_hex()); // We need an authorization token to fetch manifests. @@ -176,8 +178,6 @@ async fn get_manifest_digest( .await .map_err(|e| LauncherError::RegistryAuthFailed(e.to_string()))?; - let mut tags: VecDeque = tags.into_iter().collect(); - while let Some(tag) = tags.pop_front() { let manifest_url: Url = format!( "https://{}/v2/{}/manifests/{tag}", @@ -204,12 +204,7 @@ async fn get_manifest_digest( .await { Ok(resp) => { - let content_digest = resp - .headers() - .get("Docker-Content-Digest") - .and_then(|v| v.to_str().ok()) - .map(|s| s.to_string()); - + let response_headers = resp.headers().clone(); let manifest: ManifestResponse = resp .json() .await @@ -233,11 +228,15 @@ async fn get_manifest_digest( continue; } - let Some(digest) = content_digest else { + let Some(content_digest) = response_headers + .get(DOCKER_CONTENT_DIGEST_HEADER) + .and_then(|v| v.to_str().ok()) + .map(|s| s.to_string()) + else { continue; }; - return Ok(digest); + return Ok(content_digest); } } } @@ -301,14 +300,22 @@ async fn request_until_success( }) } -fn check_image_digest_exists_on_docker_hub( +/// Returns if the given image digest is valid (pull + manifest + digest match). +/// Does NOT extend RTMR3 and does NOT run the container. +async fn validate_image_hash( + launcher_config: &LauncherConfig, image_hash: MpcDockerImageHash, ) -> Result<(), ImageDigestValidationFailed> { - let image_hash_name = format!("sha256:{}", image_hash.as_hex()); + let manifest_digest = get_manifest_digest(launcher_config, &image_hash) + .await + .expect("TODO: handle error"); + let image_name = &launcher_config.image_name; + + let name_and_digest = format!("{image_name}@{manifest_digest}"); // Pull let pull = Command::new("docker") - .args(["pull", &image_hash_name]) + .args(["pull", &name_and_digest]) .output() .map_err(|e| ImageDigestValidationFailed::DockerPullFailed(e.to_string()))?; @@ -326,7 +333,7 @@ fn check_image_digest_exists_on_docker_hub( "inspect", "--format", "{{index .ID}}", - &image_hash_name, + &name_and_digest, ]) .output() .map_err(|e| ImageDigestValidationFailed::DockerInspectFailed(e.to_string()))?; @@ -339,11 +346,12 @@ fn check_image_digest_exists_on_docker_hub( } let pulled_digest = String::from_utf8_lossy(&inspect.stdout).trim().to_string(); - if pulled_digest != image_hash_name { + let image_hash_string = image_hash.as_hex(); + if pulled_digest != image_hash_string { return Err( ImageDigestValidationFailed::PulledImageHasMismatchedDigest { pulled_digest, - expected_digest: image_hash_name, + expected_digest: image_hash_string, }, ); } From 1b9bc0863018b5fcd18f9ed1e1821dfcc39d0446 Mon Sep 17 00:00:00 2001 From: Daniel Sharifi Date: Thu, 5 Mar 2026 13:58:03 +0100 Subject: [PATCH 20/82] fix sha256 prefix --- crates/primitives/src/hash.rs | 8 ++++++++ crates/tee-launcher/src/main.rs | 4 ++-- 2 files changed, 10 insertions(+), 2 deletions(-) diff --git a/crates/primitives/src/hash.rs b/crates/primitives/src/hash.rs index 4f7c6ba87..7a65ddee0 100644 --- a/crates/primitives/src/hash.rs +++ b/crates/primitives/src/hash.rs @@ -55,6 +55,14 @@ impl Hash32 { } } +impl MpcDockerImageHash { + /// Converts the hash to a hexadecimal string representation with a `sha256:` prefix + pub fn as_hex_sha256(&self) -> String { + let hex_encoding = self.as_hex(); + format!("sha256:{hex_encoding}") + } +} + #[derive(Error, Debug)] pub enum Hash32ParseError { #[error("not a valid hex string")] diff --git a/crates/tee-launcher/src/main.rs b/crates/tee-launcher/src/main.rs index 76e05b0a7..07296037e 100644 --- a/crates/tee-launcher/src/main.rs +++ b/crates/tee-launcher/src/main.rs @@ -346,7 +346,7 @@ async fn validate_image_hash( } let pulled_digest = String::from_utf8_lossy(&inspect.stdout).trim().to_string(); - let image_hash_string = image_hash.as_hex(); + let image_hash_string = image_hash.as_hex_sha256(); if pulled_digest != image_hash_string { return Err( ImageDigestValidationFailed::PulledImageHasMismatchedDigest { @@ -413,7 +413,7 @@ fn build_docker_cmd( "--name".into(), MPC_CONTAINER_NAME.into(), "--detach".into(), - image_digest.as_hex(), + image_digest.as_hex_sha256(), ]); tracing::info!("docker cmd {}", cmd.join(" ")); From 9b368966495441a5ac99497530301feb27079014 Mon Sep 17 00:00:00 2001 From: Daniel Sharifi Date: Thu, 5 Mar 2026 14:21:10 +0100 Subject: [PATCH 21/82] fix docker args --- crates/tee-launcher/src/main.rs | 9 ++------- crates/tee-launcher/src/types.rs | 30 ++++++++++++------------------ 2 files changed, 14 insertions(+), 25 deletions(-) diff --git a/crates/tee-launcher/src/main.rs b/crates/tee-launcher/src/main.rs index 07296037e..84738e085 100644 --- a/crates/tee-launcher/src/main.rs +++ b/crates/tee-launcher/src/main.rs @@ -392,13 +392,8 @@ fn build_docker_cmd( cmd.extend(["--env".into(), format!("{key}={value}")]); } - let (host_flag, host_value) = docker_flags.extra_hosts.docker_flag_and_value(); - cmd.extend([host_flag, host_value]); - - let (port_forwarding_flag, port_forwarding_value) = - docker_flags.port_mappings.docker_flag_and_value(); - - cmd.extend([port_forwarding_flag, port_forwarding_value]); + cmd.extend(docker_flags.extra_hosts.docker_args()); + cmd.extend(docker_flags.port_mappings.docker_args()); // Container run configuration cmd.extend([ diff --git a/crates/tee-launcher/src/types.rs b/crates/tee-launcher/src/types.rs index b38ef7968..8fb62a0c6 100644 --- a/crates/tee-launcher/src/types.rs +++ b/crates/tee-launcher/src/types.rs @@ -106,16 +106,14 @@ pub(crate) struct ExtraHosts { } impl ExtraHosts { - pub fn docker_flag_and_value(&self) -> (String, String) { - let flag = "--add-host".into(); - let value = self - .hosts + /// Returns `["--add-host", "h1:ip1", "--add-host", "h2:ip2", ...]`. + pub fn docker_args(&self) -> Vec { + self.hosts .iter() - .map(|HostEntry { hostname, ip }| format!("{hostname}:{ip}")) - .collect::>() - .join(","); - - (flag, value) + .flat_map(|HostEntry { hostname, ip }| { + ["--add-host".into(), format!("{hostname}:{ip}")] + }) + .collect() } } @@ -138,16 +136,12 @@ pub struct PortMapping { } impl PortMappings { - pub fn docker_flag_and_value(&self) -> (String, String) { - let flag = "-p".into(); - let value = self - .ports + /// Returns `["-p", "src1:dst1", "-p", "src2:dst2", ...]`. + pub fn docker_args(&self) -> Vec { + self.ports .iter() - .map(|PortMapping { src, dst }| format!("{src}:{dst}")) - .collect::>() - .join(","); - - (flag, value) + .flat_map(|PortMapping { src, dst }| ["-p".into(), format!("{src}:{dst}")]) + .collect() } } From 8a7c6767844ba5dbd1413e714979ad992e671974 Mon Sep 17 00:00:00 2001 From: Daniel Sharifi Date: Thu, 5 Mar 2026 14:27:24 +0100 Subject: [PATCH 22/82] add some json examples of new config --- deployment/localnet/tee/sam.json | 36 +++++++++++++++++++++++++++++++ deployment/testnet/frodo.json | 37 ++++++++++++++++++++++++++++++++ deployment/testnet/sam.json | 37 ++++++++++++++++++++++++++++++++ 3 files changed, 110 insertions(+) create mode 100644 deployment/localnet/tee/sam.json create mode 100644 deployment/testnet/frodo.json create mode 100644 deployment/testnet/sam.json diff --git a/deployment/localnet/tee/sam.json b/deployment/localnet/tee/sam.json new file mode 100644 index 000000000..a2ab8f528 --- /dev/null +++ b/deployment/localnet/tee/sam.json @@ -0,0 +1,36 @@ +{ + "launcher_config": { + "image_tags": ["main-260e88b"], + "image_name": "nearone/mpc-node", + "registry": "registry.hub.docker.com", + "rpc_request_timeout_secs": 10, + "rpc_request_interval_secs": 1, + "rpc_max_attempts": 20, + "mpc_hash_override": null + }, + "docker_command_config": { + "extra_hosts": { + "hosts": [] + }, + "port_mappings": { + "ports": [ + { "src": 8080, "dst": 8080 }, + { "src": 24566, "dst": 24566 }, + { "src": 13002, "dst": 13002 } + ] + } + }, + "mpc_passthrough_env": { + "mpc_account_id": "sam.test.near", + "mpc_local_address": "127.0.0.1", + "mpc_secret_key_store": "AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA", + "mpc_backup_encryption_key_hex": "0000000000000000000000000000000000000000000000000000000000000000", + "mpc_env": "Localnet", + "mpc_home_dir": "/data", + "mpc_contract_id": "mpc-contract.test.near", + "mpc_responder_id": "sam.test.near", + "near_boot_nodes": "ed25519:BGa4WiBj43Mr66f9Ehf6swKtR6wZmWuwCsV3s4PSR3nx@${MACHINE_IP}:24566", + "rust_backtrace": "full", + "rust_log": "info" + } +} diff --git a/deployment/testnet/frodo.json b/deployment/testnet/frodo.json new file mode 100644 index 000000000..4827afc67 --- /dev/null +++ b/deployment/testnet/frodo.json @@ -0,0 +1,37 @@ +{ + "launcher_config": { + "image_tags": ["barak-doc-update_localnet_guide-b12bc7d"], + "image_name": "nearone/mpc-node", + "registry": "registry.hub.docker.com", + "rpc_request_timeout_secs": 10, + "rpc_request_interval_secs": 1, + "rpc_max_attempts": 20, + "mpc_hash_override": null + }, + "docker_command_config": { + "extra_hosts": { + "hosts": [] + }, + "port_mappings": { + "ports": [ + { "src": 8080, "dst": 8080 }, + { "src": 24567, "dst": 24567 }, + { "src": 13001, "dst": 13001 }, + { "src": 80, "dst": 80 } + ] + } + }, + "mpc_passthrough_env": { + "mpc_account_id": "$FRODO_ACCOUNT", + "mpc_local_address": "127.0.0.1", + "mpc_secret_key_store": "AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA", + "mpc_backup_encryption_key_hex": "0000000000000000000000000000000000000000000000000000000000000000", + "mpc_env": "Testnet", + "mpc_home_dir": "/data", + "mpc_contract_id": "$MPC_CONTRACT_ACCOUNT", + "mpc_responder_id": "$FRODO_ACCOUNT", + "near_boot_nodes": "$BOOTNODES", + "rust_backtrace": "full", + "rust_log": "info" + } +} diff --git a/deployment/testnet/sam.json b/deployment/testnet/sam.json new file mode 100644 index 000000000..9b96a09a1 --- /dev/null +++ b/deployment/testnet/sam.json @@ -0,0 +1,37 @@ +{ + "launcher_config": { + "image_tags": ["barak-doc-update_localnet_guide-b12bc7d"], + "image_name": "nearone/mpc-node", + "registry": "registry.hub.docker.com", + "rpc_request_timeout_secs": 10, + "rpc_request_interval_secs": 1, + "rpc_max_attempts": 20, + "mpc_hash_override": null + }, + "docker_command_config": { + "extra_hosts": { + "hosts": [] + }, + "port_mappings": { + "ports": [ + { "src": 8080, "dst": 8080 }, + { "src": 24567, "dst": 24567 }, + { "src": 13002, "dst": 13002 }, + { "src": 80, "dst": 80 } + ] + } + }, + "mpc_passthrough_env": { + "mpc_account_id": "$SAM_ACCOUNT", + "mpc_local_address": "127.0.0.1", + "mpc_secret_key_store": "AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA", + "mpc_backup_encryption_key_hex": "0000000000000000000000000000000000000000000000000000000000000000", + "mpc_env": "Testnet", + "mpc_home_dir": "/data", + "mpc_contract_id": "$MPC_CONTRACT_ACCOUNT", + "mpc_responder_id": "$SAM_ACCOUNT", + "near_boot_nodes": "$BOOTNODES", + "rust_backtrace": "full", + "rust_log": "info" + } +} From 5815988025d968b3a53c3bb74b3d45d37cccff78 Mon Sep 17 00:00:00 2001 From: Daniel Sharifi Date: Thu, 5 Mar 2026 14:58:24 +0100 Subject: [PATCH 23/82] allow dynamically passthrough envs --- crates/tee-launcher/src/env_validation.rs | 162 ++++++++++++++++++++++ crates/tee-launcher/src/error.rs | 3 + crates/tee-launcher/src/main.rs | 18 +-- crates/tee-launcher/src/types.rs | 83 +++++++++-- 4 files changed, 236 insertions(+), 30 deletions(-) create mode 100644 crates/tee-launcher/src/env_validation.rs diff --git a/crates/tee-launcher/src/env_validation.rs b/crates/tee-launcher/src/env_validation.rs new file mode 100644 index 000000000..9df1d3656 --- /dev/null +++ b/crates/tee-launcher/src/env_validation.rs @@ -0,0 +1,162 @@ +use std::sync::LazyLock; + +use regex::Regex; + +/// Hard caps to prevent DoS via huge env payloads (matching Python launcher). +pub(crate) const MAX_PASSTHROUGH_ENV_VARS: usize = 64; +pub(crate) const MAX_ENV_VALUE_LEN: usize = 1024; +pub(crate) const MAX_TOTAL_ENV_BYTES: usize = 32 * 1024; // 32 KB + +/// Never pass raw private keys via launcher. +const DENIED_CONTAINER_ENV_KEYS: &[&str] = &["MPC_P2P_PRIVATE_KEY", "MPC_ACCOUNT_SK"]; + +/// Matches `MPC_[A-Z0-9_]{1,64}` — same pattern as the Python launcher. +static MPC_ENV_KEY_RE: LazyLock = + LazyLock::new(|| Regex::new(r"^MPC_[A-Z0-9_]{1,64}$").unwrap()); + +/// Non-MPC keys that are explicitly allowed for backwards compatibility. +const COMPAT_ALLOWED_KEYS: &[&str] = &["RUST_LOG", "RUST_BACKTRACE", "NEAR_BOOT_NODES"]; + +// --------------------------------------------------------------------------- +// Key validation +// --------------------------------------------------------------------------- + +/// Validates an extra env key (from the catch-all `extra_env` map). +/// +/// - Must match `MPC_[A-Z0-9_]{1,64}` **or** be in the compat allowlist +/// - Must not be in the deny list +pub(crate) fn validate_env_key(key: &str) -> Result<(), crate::error::LauncherError> { + if DENIED_CONTAINER_ENV_KEYS.contains(&key) { + return Err(crate::error::LauncherError::UnsafeEnvValue { + key: key.to_owned(), + reason: "denied key".into(), + }); + } + if MPC_ENV_KEY_RE.is_match(key) || COMPAT_ALLOWED_KEYS.contains(&key) { + return Ok(()); + } + Err(crate::error::LauncherError::UnsafeEnvValue { + key: key.to_owned(), + reason: "key does not match allowlist".into(), + }) +} + +// --------------------------------------------------------------------------- +// Value validation +// --------------------------------------------------------------------------- + +fn has_control_chars(s: &str) -> bool { + for ch in s.chars() { + if ch == '\n' || ch == '\r' || ch == '\0' { + return true; + } + if (ch as u32) < 0x20 && ch != '\t' { + return true; + } + } + false +} + +/// Validates an env value (applied to ALL vars, typed and extra). +/// +/// - Length <= `MAX_ENV_VALUE_LEN` +/// - No ASCII control characters (except tab) +/// - Does not contain `LD_PRELOAD` +pub(crate) fn validate_env_value( + key: &str, + value: &str, +) -> Result<(), crate::error::LauncherError> { + if value.len() > MAX_ENV_VALUE_LEN { + return Err(crate::error::LauncherError::UnsafeEnvValue { + key: key.to_owned(), + reason: format!("value too long ({} > {MAX_ENV_VALUE_LEN})", value.len()), + }); + } + if has_control_chars(value) { + return Err(crate::error::LauncherError::UnsafeEnvValue { + key: key.to_owned(), + reason: "contains control characters".into(), + }); + } + if value.contains("LD_PRELOAD") { + return Err(crate::error::LauncherError::UnsafeEnvValue { + key: key.to_owned(), + reason: "contains LD_PRELOAD".into(), + }); + } + Ok(()) +} + +#[cfg(test)] +mod tests { + use super::*; + + // -- Key validation tests -- + + #[test] + fn key_allows_mpc_prefix_uppercase() { + assert!(validate_env_key("MPC_FOO").is_ok()); + assert!(validate_env_key("MPC_FOO_123").is_ok()); + assert!(validate_env_key("MPC_A_B_C").is_ok()); + } + + #[test] + fn key_rejects_lowercase_or_invalid_format() { + assert!(validate_env_key("MPC_foo").is_err()); + assert!(validate_env_key("MPC-FOO").is_err()); + assert!(validate_env_key("MPC.FOO").is_err()); + assert!(validate_env_key("MPC_").is_err()); + } + + #[test] + fn key_allows_compat_non_mpc_keys() { + assert!(validate_env_key("RUST_LOG").is_ok()); + assert!(validate_env_key("RUST_BACKTRACE").is_ok()); + assert!(validate_env_key("NEAR_BOOT_NODES").is_ok()); + } + + #[test] + fn key_denies_sensitive_keys() { + assert!(validate_env_key("MPC_P2P_PRIVATE_KEY").is_err()); + assert!(validate_env_key("MPC_ACCOUNT_SK").is_err()); + } + + #[test] + fn key_rejects_unknown_non_mpc_key() { + assert!(validate_env_key("BAD_KEY").is_err()); + assert!(validate_env_key("HOME").is_err()); + } + + // -- Value validation tests -- + + #[test] + fn value_rejects_control_chars() { + assert!(validate_env_value("K", "ok\nno").is_err()); + assert!(validate_env_value("K", "ok\rno").is_err()); + assert!(validate_env_value("K", &format!("a{}b", '\x1F')).is_err()); + } + + #[test] + fn value_allows_tab() { + assert!(validate_env_value("K", "a\tb").is_ok()); + } + + #[test] + fn value_rejects_ld_preload() { + assert!(validate_env_value("K", "LD_PRELOAD=/tmp/x.so").is_err()); + assert!(validate_env_value("K", "foo LD_PRELOAD bar").is_err()); + } + + #[test] + fn value_rejects_too_long() { + assert!(validate_env_value("K", &"a".repeat(MAX_ENV_VALUE_LEN + 1)).is_err()); + assert!(validate_env_value("K", &"a".repeat(MAX_ENV_VALUE_LEN)).is_ok()); + } + + #[test] + fn value_accepts_normal() { + assert!(validate_env_value("K", "hello-world").is_ok()); + assert!(validate_env_value("K", "192.168.1.1").is_ok()); + assert!(validate_env_value("K", "info,mpc_node=debug").is_ok()); + } +} diff --git a/crates/tee-launcher/src/error.rs b/crates/tee-launcher/src/error.rs index 5d922ddb1..cdb900876 100644 --- a/crates/tee-launcher/src/error.rs +++ b/crates/tee-launcher/src/error.rs @@ -54,6 +54,9 @@ pub enum LauncherError { #[error("Total env payload too large (>{0} bytes)")] EnvPayloadTooLarge(usize), + #[error("Env var '{key}' has unsafe value: {reason}")] + UnsafeEnvValue { key: String, reason: String }, + #[error("Unsafe docker command: LD_PRELOAD detected")] LdPreloadDetected, diff --git a/crates/tee-launcher/src/main.rs b/crates/tee-launcher/src/main.rs index 84738e085..e54a3d4f6 100644 --- a/crates/tee-launcher/src/main.rs +++ b/crates/tee-launcher/src/main.rs @@ -18,6 +18,7 @@ use url::Url; mod contants; mod docker_types; +mod env_validation; mod error; mod types; @@ -131,21 +132,6 @@ async fn run() -> Result<(), LauncherError> { Ok(()) } -// TODO: this needs to be checked. -fn has_control_chars(s: &str) -> bool { - let control_chars = ['\n', '\r', '\0']; - - for character in s.chars() { - if control_chars.contains(&character) { - return true; - } - if (character as u32) < 0x20 && character != '\t' { - return true; - } - } - false -} - async fn get_manifest_digest( config: &LauncherConfig, expected_image_digest: &MpcDockerImageHash, @@ -388,7 +374,7 @@ fn build_docker_cmd( ]); } - for (key, value) in mpc_config.env_vars() { + for (key, value) in mpc_config.env_vars()? { cmd.extend(["--env".into(), format!("{key}={value}")]); } diff --git a/crates/tee-launcher/src/types.rs b/crates/tee-launcher/src/types.rs index 8fb62a0c6..f079399a6 100644 --- a/crates/tee-launcher/src/types.rs +++ b/crates/tee-launcher/src/types.rs @@ -1,3 +1,4 @@ +use std::collections::BTreeMap; use std::fmt; use std::net::{IpAddr, Ipv4Addr}; use std::num::NonZeroU16; @@ -10,6 +11,8 @@ use clap::{Parser, ValueEnum}; use mpc_primitives::hash::MpcDockerImageHash; use serde::{Deserialize, Serialize}; +use crate::env_validation; + /// CLI arguments parsed from environment variables via clap. #[derive(Parser, Debug)] #[command(name = "tee-launcher")] @@ -92,6 +95,11 @@ pub struct MpcBinaryConfig { // rust pub rust_backtrace: RustBacktrace, pub rust_log: RustLog, + /// Additional env vars not covered by the typed fields above. + /// Allows operators to pass new `MPC_*` vars without a launcher rebuild. + /// Keys and values are validated at emission time in `env_vars()`. + #[serde(flatten)] + pub extra_env: BTreeMap, } #[derive(Debug, Clone, Serialize, Deserialize)] @@ -224,22 +232,69 @@ impl fmt::Display for RustLog { } impl MpcBinaryConfig { - pub fn env_vars(&self) -> Vec<(&'static str, String)> { - vec![ - ("MPC_ACCOUNT_ID", self.mpc_account_id.clone()), - ("MPC_LOCAL_ADDRESS", self.mpc_local_address.to_string()), - ("MPC_SECRET_STORE_KEY", self.mpc_secret_key_store.clone()), - ("MPC_CONTRACT_ID", self.mpc_contract_id.clone()), - ("MPC_ENV", self.mpc_env.to_string()), - ("MPC_HOME_DIR", self.mpc_home_dir.display().to_string()), - ("MPC_RESPONDER_ID", self.mpc_responder_id.clone()), + /// Returns all env vars to pass to the MPC container. + /// + /// Typed fields are emitted first (deterministic order), followed by + /// validated extras from `extra_env`. All keys and values are validated + /// uniformly before returning. + pub fn env_vars(&self) -> Result, crate::error::LauncherError> { + let mut vars: Vec<(String, String)> = vec![ + ("MPC_ACCOUNT_ID".into(), self.mpc_account_id.clone()), + ( + "MPC_LOCAL_ADDRESS".into(), + self.mpc_local_address.to_string(), + ), + ( + "MPC_SECRET_STORE_KEY".into(), + self.mpc_secret_key_store.clone(), + ), + ("MPC_CONTRACT_ID".into(), self.mpc_contract_id.clone()), + ("MPC_ENV".into(), self.mpc_env.to_string()), + ( + "MPC_HOME_DIR".into(), + self.mpc_home_dir.display().to_string(), + ), + ("MPC_RESPONDER_ID".into(), self.mpc_responder_id.clone()), ( - "MPC_BACKUP_ENCRYPTION_KEY_HEX", + "MPC_BACKUP_ENCRYPTION_KEY_HEX".into(), self.mpc_backup_encryption_key_hex.clone(), ), - ("NEAR_BOOT_NODES", self.near_boot_nodes.clone()), - ("RUST_BACKTRACE", self.rust_backtrace.to_string()), - ("RUST_LOG", self.rust_log.to_string()), - ] + ("NEAR_BOOT_NODES".into(), self.near_boot_nodes.clone()), + ("RUST_BACKTRACE".into(), self.rust_backtrace.to_string()), + ("RUST_LOG".into(), self.rust_log.to_string()), + ]; + + // Keys already emitted via typed fields — skip duplicates from extra_env. + let typed_keys: std::collections::HashSet = + vars.iter().map(|(k, _)| k.clone()).collect(); + + if self.extra_env.len() > env_validation::MAX_PASSTHROUGH_ENV_VARS { + return Err(crate::error::LauncherError::TooManyEnvVars( + env_validation::MAX_PASSTHROUGH_ENV_VARS, + )); + } + + // BTreeMap iteration is sorted, giving deterministic output. + for (key, value) in &self.extra_env { + if typed_keys.contains(key.as_str()) { + continue; + } + env_validation::validate_env_key(key)?; + vars.push((key.clone(), value.clone())); + } + + // Validate ALL env vars uniformly (typed + extra) and enforce aggregate caps. + let mut total_bytes: usize = 0; + for (key, value) in &vars { + env_validation::validate_env_value(key, value)?; + total_bytes += key.len() + 1 + value.len(); + } + if total_bytes > env_validation::MAX_TOTAL_ENV_BYTES { + return Err(crate::error::LauncherError::EnvPayloadTooLarge( + env_validation::MAX_TOTAL_ENV_BYTES, + )); + } + + Ok(vars) } } From a35736568fe4614dffb225edd7070d2f5fe03ef5 Mon Sep 17 00:00:00 2001 From: Daniel Sharifi Date: Thu, 5 Mar 2026 15:05:26 +0100 Subject: [PATCH 24/82] remove todo panics --- crates/tee-launcher/src/error.rs | 5 +++++ crates/tee-launcher/src/main.rs | 28 +++++++++++++++++++++------- 2 files changed, 26 insertions(+), 7 deletions(-) diff --git a/crates/tee-launcher/src/error.rs b/crates/tee-launcher/src/error.rs index cdb900876..4c11e39ff 100644 --- a/crates/tee-launcher/src/error.rs +++ b/crates/tee-launcher/src/error.rs @@ -84,12 +84,17 @@ pub enum LauncherError { #[error("Registry response parse error: {0}")] RegistryResponseParse(String), + #[error("Invalid manifest URL: {0}")] + InvalidManifestUrl(String), + #[error("The selected image failed digest validation: {0}")] ImageDigestValidationFailed(#[from] ImageDigestValidationFailed), } #[derive(Error, Debug)] pub enum ImageDigestValidationFailed { + #[error("manifest digest lookup failed: {0}")] + ManifestDigestLookupFailed(String), #[error("docker pull failed for {0}")] DockerPullFailed(String), #[error("docker inspect failed for {0}")] diff --git a/crates/tee-launcher/src/main.rs b/crates/tee-launcher/src/main.rs index e54a3d4f6..bdb413088 100644 --- a/crates/tee-launcher/src/main.rs +++ b/crates/tee-launcher/src/main.rs @@ -1,5 +1,4 @@ use std::process::Command; -use std::str::FromStr; use std::{collections::VecDeque, time::Duration}; use clap::Parser; @@ -53,9 +52,16 @@ async fn run() -> Result<(), LauncherError> { let config_file = std::fs::OpenOptions::new() .read(true) .open(DSTACK_USER_CONFIG_FILE) - .expect("dstack user config file exists"); + .map_err(|source| LauncherError::FileRead { + path: DSTACK_USER_CONFIG_FILE.to_string(), + source, + })?; - let dstack_config: Config = serde_json::from_reader(config_file).expect("config file is valid"); + let dstack_config: Config = + serde_json::from_reader(config_file).map_err(|source| LauncherError::JsonParse { + path: DSTACK_USER_CONFIG_FILE.to_string(), + source, + })?; let approved_hashes_file = std::fs::OpenOptions::new() .read(true) @@ -93,7 +99,10 @@ async fn run() -> Result<(), LauncherError> { .contains(override_image); if !override_image_is_allowed { - panic!("TODO: panic if override image is not allowed?"); + return Err(LauncherError::InvalidHashOverride(format!( + "MPC_HASH_OVERRIDE={} does not match any approved hash", + override_image.as_hex_sha256() + ))); } override_image.clone() @@ -156,7 +165,9 @@ async fn get_manifest_digest( let status = token_request_response.status(); if !status.is_success() { - todo!("add error case for non success http codes"); + return Err(LauncherError::RegistryAuthFailed(format!( + "token request returned non-success status: {status}" + ))); } let token_response: DockerTokenResponse = token_request_response @@ -170,7 +181,10 @@ async fn get_manifest_digest( config.registry, config.image_name ) .parse() - .expect("TODO handle error"); + .map_err(|_| LauncherError::InvalidManifestUrl(format!( + "https://{}/v2/{}/manifests/{tag}", + config.registry, config.image_name + )))?; let authorization_value: HeaderValue = format!("Bearer {}", token_response.token) .parse() @@ -294,7 +308,7 @@ async fn validate_image_hash( ) -> Result<(), ImageDigestValidationFailed> { let manifest_digest = get_manifest_digest(launcher_config, &image_hash) .await - .expect("TODO: handle error"); + .map_err(|e| ImageDigestValidationFailed::ManifestDigestLookupFailed(e.to_string()))?; let image_name = &launcher_config.image_name; let name_and_digest = format!("{image_name}@{manifest_digest}"); From 107a16628f0c8b9146a559a515e9d2c8b49fbf0c Mon Sep 17 00:00:00 2001 From: Daniel Sharifi Date: Thu, 5 Mar 2026 15:13:38 +0100 Subject: [PATCH 25/82] use backon for retries --- Cargo.lock | 1 + crates/tee-launcher/Cargo.toml | 1 + crates/tee-launcher/src/main.rs | 71 ++++++++++++++------------------- 3 files changed, 33 insertions(+), 40 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index f3283e62c..c18f698cc 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -10554,6 +10554,7 @@ name = "tee-launcher" version = "3.5.1" dependencies = [ "assert_matches", + "backon", "bounded-collections", "clap", "dstack-sdk", diff --git a/crates/tee-launcher/Cargo.toml b/crates/tee-launcher/Cargo.toml index 0d4fb45ba..8503826ae 100644 --- a/crates/tee-launcher/Cargo.toml +++ b/crates/tee-launcher/Cargo.toml @@ -12,6 +12,7 @@ path = "src/main.rs" integration-test = [] [dependencies] +backon = { workspace = true } bounded-collections = { workspace = true } clap = { workspace = true } dstack-sdk = { workspace = true } diff --git a/crates/tee-launcher/src/main.rs b/crates/tee-launcher/src/main.rs index bdb413088..c04a7b070 100644 --- a/crates/tee-launcher/src/main.rs +++ b/crates/tee-launcher/src/main.rs @@ -1,6 +1,7 @@ use std::process::Command; use std::{collections::VecDeque, time::Duration}; +use backon::{ExponentialBuilder, Retryable}; use clap::Parser; use launcher_interface::MPC_IMAGE_HASH_EVENT; use launcher_interface::types::ApprovedHashesFile; @@ -181,10 +182,12 @@ async fn get_manifest_digest( config.registry, config.image_name ) .parse() - .map_err(|_| LauncherError::InvalidManifestUrl(format!( - "https://{}/v2/{}/manifests/{tag}", - config.registry, config.image_name - )))?; + .map_err(|_| { + LauncherError::InvalidManifestUrl(format!( + "https://{}/v2/{}/manifests/{tag}", + config.registry, config.image_name + )) + })?; let authorization_value: HeaderValue = format!("Bearer {}", token_response.token) .parse() @@ -252,52 +255,40 @@ async fn get_manifest_digest( Err(LauncherError::ImageHashNotFoundAmongTags) } -// TODO: Use backon async fn request_until_success( client: &reqwest::Client, url: Url, headers: HeaderMap, config: &LauncherConfig, ) -> Result { - let mut interval = config.rpc_request_interval_secs as f64; - - for attempt in 1..=config.rpc_max_attempts { - // Sleep before request (matching Python behavior) - tokio::time::sleep(std::time::Duration::from_secs_f64(interval)).await; - interval = (interval.max(1.0) * 1.5).min(60.0); - - let request_timeout_duration = Duration::from_secs(config.rpc_request_timeout_secs); - let request = client + let request_timeout = Duration::from_secs(config.rpc_request_timeout_secs); + let backoff = ExponentialBuilder::default() + .with_min_delay(Duration::from_secs(config.rpc_request_interval_secs)) + .with_factor(1.5) + .with_max_delay(Duration::from_secs(60)) + .with_max_times(config.rpc_max_attempts as usize); + + let request_future = || async { + client .get(url.clone()) .headers(headers.clone()) - .timeout(request_timeout_duration) + .timeout(request_timeout) .send() - .await; - - match request { - Err(e) => { - tracing::warn!( - "Attempt {attempt}/{}: Failed to fetch {url}. Status: Timeout/Error: {e}", - config.rpc_max_attempts - ); - continue; - } - Ok(resp) if resp.status() != reqwest::StatusCode::OK => { - tracing::warn!( - "Attempt {attempt}/{}: Failed to fetch {url}. Status: {}", - config.rpc_max_attempts, - resp.status() - ); - continue; - } - Ok(resp) => return Ok(resp), - } - } + .await? + .error_for_status() + }; - Err(LauncherError::RegistryRequestFailed { - url, - attempts: config.rpc_max_attempts, - }) + request_future + .retry(backoff) + .when(|_: &reqwest::Error| true) + .notify(|err, retrying_in_duration| { + tracing::warn!(?url, ?retrying_in_duration, ?err, "failed to fetch"); + }) + .await + .map_err(|_| LauncherError::RegistryRequestFailed { + url, + attempts: config.rpc_max_attempts, + }) } /// Returns if the given image digest is valid (pull + manifest + digest match). From 3913f9751cb9c73ba2da442982178c03025542f8 Mon Sep 17 00:00:00 2001 From: Daniel Sharifi Date: Thu, 5 Mar 2026 15:22:02 +0100 Subject: [PATCH 26/82] inline the backon --- crates/tee-launcher/src/docker_types.rs | 2 +- crates/tee-launcher/src/main.rs | 153 +++++++++++------------- 2 files changed, 70 insertions(+), 85 deletions(-) diff --git a/crates/tee-launcher/src/docker_types.rs b/crates/tee-launcher/src/docker_types.rs index 793507163..eb2c552fd 100644 --- a/crates/tee-launcher/src/docker_types.rs +++ b/crates/tee-launcher/src/docker_types.rs @@ -33,7 +33,7 @@ pub struct ManifestEntry { pub platform: ManifestPlatform, } -#[derive(Debug, Deserialize, Serialize)] +#[derive(Debug, Deserialize, Serialize, PartialEq, Eq)] pub struct ManifestPlatform { pub architecture: String, pub os: String, diff --git a/crates/tee-launcher/src/main.rs b/crates/tee-launcher/src/main.rs index c04a7b070..36a145db9 100644 --- a/crates/tee-launcher/src/main.rs +++ b/crates/tee-launcher/src/main.rs @@ -27,6 +27,9 @@ const DOCKER_AUTH_ACCEPT_HEADER_VALUE: HeaderValue = const DOCKER_CONTENT_DIGEST_HEADER: &str = "Docker-Content-Digest"; +const AMD64: &str = "amd64"; +const LINUX: &str = "linux"; + #[tokio::main] async fn main() { tracing_subscriber::fmt() @@ -198,56 +201,74 @@ async fn get_manifest_digest( (AUTHORIZATION, authorization_value), ]); - match request_until_success( - &reqwest_client, - manifest_url.clone(), - headers.clone(), - config, - ) - .await - { - Ok(resp) => { - let response_headers = resp.headers().clone(); - let manifest: ManifestResponse = resp - .json() - .await - .map_err(|e| LauncherError::RegistryResponseParse(e.to_string()))?; - - match manifest { - ManifestResponse::ImageIndex { manifests } => { - // Multi-platform manifest; scan for amd64/linux - manifests - .into_iter() - .filter(|manifest| { - manifest.platform.architecture == "amd64" - && manifest.platform.os == "linux" - }) - .for_each(|manifest| tags.push_back(manifest.digest)); - } - ManifestResponse::DockerV2 { config } - | ManifestResponse::OciManifest { config } => { - let incorrect_config_digest = config.digest != expected_digest; - if incorrect_config_digest { - continue; - } - - let Some(content_digest) = response_headers - .get(DOCKER_CONTENT_DIGEST_HEADER) - .and_then(|v| v.to_str().ok()) - .map(|s| s.to_string()) - else { - continue; - }; - - return Ok(content_digest); - } - } - } - Err(e) => { + let request_timeout = Duration::from_secs(config.rpc_request_timeout_secs); + let backoff = ExponentialBuilder::default() + .with_min_delay(Duration::from_secs(config.rpc_request_interval_secs)) + .with_factor(1.5) + .with_max_delay(Duration::from_secs(60)) + .with_max_times(config.rpc_max_attempts as usize); + + let request_future = || async { + reqwest_client + .get(manifest_url.clone()) + .headers(headers.clone()) + .timeout(request_timeout) + .send() + .await? + .error_for_status() + }; + + let request_with_retry_future = request_future + .retry(backoff) + .when(|_: &reqwest::Error| true) + .notify(|err, dur| { tracing::warn!( - "{e}: Exceeded number of maximum RPC requests for any given attempt. \ - Will continue in the hopes of finding the matching image hash among remaining tags" + ?manifest_url, + ?dur, + ?err, + "failed to fetch manifest, retrying" ); + }); + + let Ok(resp) = request_with_retry_future.await else { + tracing::warn!( + ?manifest_url, + "exceeded max RPC attempts. \ + Will continue in the hopes of finding the matching image hash among remaining tags" + ); + continue; + }; + + let response_headers = resp.headers().clone(); + let manifest: ManifestResponse = resp + .json() + .await + .map_err(|e| LauncherError::RegistryResponseParse(e.to_string()))?; + + match manifest { + ManifestResponse::ImageIndex { manifests } => { + // Multi-platform manifest; scan for amd64/linux + manifests + .into_iter() + .filter(|manifest| { + manifest.platform.architecture == AMD64 && manifest.platform.os == LINUX + }) + .for_each(|manifest| tags.push_back(manifest.digest)); + } + ManifestResponse::DockerV2 { config } | ManifestResponse::OciManifest { config } => { + if config.digest != expected_digest { + continue; + } + + let Some(content_digest) = response_headers + .get(DOCKER_CONTENT_DIGEST_HEADER) + .and_then(|v| v.to_str().ok()) + .map(|s| s.to_string()) + else { + continue; + }; + + return Ok(content_digest); } } } @@ -255,42 +276,6 @@ async fn get_manifest_digest( Err(LauncherError::ImageHashNotFoundAmongTags) } -async fn request_until_success( - client: &reqwest::Client, - url: Url, - headers: HeaderMap, - config: &LauncherConfig, -) -> Result { - let request_timeout = Duration::from_secs(config.rpc_request_timeout_secs); - let backoff = ExponentialBuilder::default() - .with_min_delay(Duration::from_secs(config.rpc_request_interval_secs)) - .with_factor(1.5) - .with_max_delay(Duration::from_secs(60)) - .with_max_times(config.rpc_max_attempts as usize); - - let request_future = || async { - client - .get(url.clone()) - .headers(headers.clone()) - .timeout(request_timeout) - .send() - .await? - .error_for_status() - }; - - request_future - .retry(backoff) - .when(|_: &reqwest::Error| true) - .notify(|err, retrying_in_duration| { - tracing::warn!(?url, ?retrying_in_duration, ?err, "failed to fetch"); - }) - .await - .map_err(|_| LauncherError::RegistryRequestFailed { - url, - attempts: config.rpc_max_attempts, - }) -} - /// Returns if the given image digest is valid (pull + manifest + digest match). /// Does NOT extend RTMR3 and does NOT run the container. async fn validate_image_hash( From 5e69675d650b37adb272a3de460d06e81317f665 Mon Sep 17 00:00:00 2001 From: Daniel Sharifi Date: Thu, 5 Mar 2026 15:49:55 +0100 Subject: [PATCH 27/82] add snapshot tests --- Cargo.lock | 3 + crates/launcher-interface/Cargo.toml | 5 + crates/launcher-interface/src/lib.rs | 110 +++++++++++++++++- ...nterface__tests__approved_hashes_file.snap | 9 ++ ...ncher_interface__tests__docker_digest.snap | 5 + ...rface__tests__docker_digest_roundtrip.snap | 5 + 6 files changed, 132 insertions(+), 5 deletions(-) create mode 100644 crates/launcher-interface/src/snapshots/launcher_interface__tests__approved_hashes_file.snap create mode 100644 crates/launcher-interface/src/snapshots/launcher_interface__tests__docker_digest.snap create mode 100644 crates/launcher-interface/src/snapshots/launcher_interface__tests__docker_digest_roundtrip.snap diff --git a/Cargo.lock b/Cargo.lock index c18f698cc..a2ae05752 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -4930,8 +4930,11 @@ name = "launcher-interface" version = "3.5.1" dependencies = [ "bounded-collections", + "derive_more 2.1.1", + "insta", "mpc-primitives", "serde", + "serde_json", ] [[package]] diff --git a/crates/launcher-interface/Cargo.toml b/crates/launcher-interface/Cargo.toml index 0da73cbaa..235790226 100644 --- a/crates/launcher-interface/Cargo.toml +++ b/crates/launcher-interface/Cargo.toml @@ -5,10 +5,15 @@ edition.workspace = true license.workspace = true [dependencies] +derive_more = { workspace = true } bounded-collections = { workspace = true } mpc-primitives = { workspace = true } serde = { workspace = true } +[dev-dependencies] +insta = { workspace = true } +serde_json = { workspace = true } + [lints] workspace = true diff --git a/crates/launcher-interface/src/lib.rs b/crates/launcher-interface/src/lib.rs index a4dd7049d..c2ac95258 100644 --- a/crates/launcher-interface/src/lib.rs +++ b/crates/launcher-interface/src/lib.rs @@ -4,19 +4,119 @@ pub mod types { use mpc_primitives::hash::MpcDockerImageHash; use serde::{Deserialize, Serialize}; - /// JSON structure for the approved hashes file written by the MPC node. + /// JSON structure for the approved hashes file written by the MPC node, and read by the launcher. #[derive(Debug, Serialize, Deserialize)] pub struct ApprovedHashesFile { - pub approved_hashes: bounded_collections::NonEmptyVec, + pub approved_hashes: bounded_collections::NonEmptyVec, } impl ApprovedHashesFile { - pub fn newest_approved_hash(&self) -> &MpcDockerImageHash { + pub fn newest_approved_hash(&self) -> &DockerDigest { self.approved_hashes.first() } } -} -// TODO: add insta snapshot test for this type + const SHA256_PREFIX: &str = "sha256:"; + + #[derive(Debug, Clone, derive_more::From)] + pub struct DockerDigest(MpcDockerImageHash); + + impl Serialize for DockerDigest { + fn serialize(&self, serializer: S) -> Result + where + S: serde::Serializer, + { + let image_hash_hex = self.0.as_hex(); + let docker_digest_representation = format!("{SHA256_PREFIX}{image_hash_hex}"); + docker_digest_representation.serialize(serializer) + } + } + + impl<'de> Deserialize<'de> for DockerDigest { + fn deserialize(deserializer: D) -> Result + where + D: serde::Deserializer<'de>, + { + let s = String::deserialize(deserializer)?; + let hex_str = s.strip_prefix(SHA256_PREFIX).ok_or_else(|| { + serde::de::Error::custom(format!("missing {SHA256_PREFIX} prefix")) + })?; + + hex_str + .parse() + .map(DockerDigest) + .map_err(serde::de::Error::custom) + } + } +} mod paths {} + +#[cfg(test)] +mod tests { + use super::types::{ApprovedHashesFile, DockerDigest}; + use mpc_primitives::hash::MpcDockerImageHash; + + fn sample_digest() -> DockerDigest { + let hash: MpcDockerImageHash = [0xab; 32].into(); + DockerDigest::from(hash) + } + + #[test] + fn serialize_docker_digest() { + let digest = sample_digest(); + let json = serde_json::to_value(&digest).unwrap(); + insta::assert_json_snapshot!("docker_digest", json); + } + + #[test] + fn roundtrip_docker_digest() { + let digest = sample_digest(); + let serialized = serde_json::to_string(&digest).unwrap(); + let deserialized: DockerDigest = serde_json::from_str(&serialized).unwrap(); + insta::assert_json_snapshot!( + "docker_digest_roundtrip", + serde_json::to_value(&deserialized).unwrap() + ); + } + + #[test] + fn deserialize_rejects_missing_prefix() { + let json = serde_json::json!( + "abababababababababababababababababababababababababababababababababab" + ); + let result = serde_json::from_value::(json); + assert!(result.is_err()); + assert!( + result + .unwrap_err() + .to_string() + .contains("missing sha256: prefix"), + "error should mention missing prefix" + ); + } + + #[test] + fn deserialize_rejects_invalid_hex() { + let json = serde_json::json!("sha256:not_valid_hex!"); + let result = serde_json::from_value::(json); + assert!(result.is_err()); + } + + #[test] + fn deserialize_rejects_wrong_length() { + let json = serde_json::json!("sha256:abab"); + let result = serde_json::from_value::(json); + assert!(result.is_err()); + } + + #[test] + fn serialize_approved_hashes_file() { + let file = ApprovedHashesFile { + approved_hashes: bounded_collections::NonEmptyVec::from_vec(vec![sample_digest()]) + .unwrap(), + }; + let json = serde_json::to_value(&file).unwrap(); + insta::assert_json_snapshot!("approved_hashes_file", json); + } +} diff --git a/crates/launcher-interface/src/snapshots/launcher_interface__tests__approved_hashes_file.snap b/crates/launcher-interface/src/snapshots/launcher_interface__tests__approved_hashes_file.snap new file mode 100644 index 000000000..61f7f301c --- /dev/null +++ b/crates/launcher-interface/src/snapshots/launcher_interface__tests__approved_hashes_file.snap @@ -0,0 +1,9 @@ +--- +source: crates/launcher-interface/src/lib.rs +expression: json +--- +{ + "approved_hashes": [ + "sha256:abababababababababababababababababababababababababababababababab" + ] +} diff --git a/crates/launcher-interface/src/snapshots/launcher_interface__tests__docker_digest.snap b/crates/launcher-interface/src/snapshots/launcher_interface__tests__docker_digest.snap new file mode 100644 index 000000000..268452a91 --- /dev/null +++ b/crates/launcher-interface/src/snapshots/launcher_interface__tests__docker_digest.snap @@ -0,0 +1,5 @@ +--- +source: crates/launcher-interface/src/lib.rs +expression: json +--- +"sha256:abababababababababababababababababababababababababababababababab" diff --git a/crates/launcher-interface/src/snapshots/launcher_interface__tests__docker_digest_roundtrip.snap b/crates/launcher-interface/src/snapshots/launcher_interface__tests__docker_digest_roundtrip.snap new file mode 100644 index 000000000..568aec643 --- /dev/null +++ b/crates/launcher-interface/src/snapshots/launcher_interface__tests__docker_digest_roundtrip.snap @@ -0,0 +1,5 @@ +--- +source: crates/launcher-interface/src/lib.rs +expression: "serde_json::to_value(&deserialized).unwrap()" +--- +"sha256:abababababababababababababababababababababababababababababababab" From 055f0afba2503b1221851edda12404d6fabb35b7 Mon Sep 17 00:00:00 2001 From: Daniel Sharifi Date: Thu, 5 Mar 2026 16:14:05 +0100 Subject: [PATCH 28/82] use DockerSha256Digest --- Cargo.lock | 1 + crates/launcher-interface/Cargo.toml | 1 + crates/launcher-interface/src/lib.rs | 114 ++++++++++++++---- ...terface__tests__docker_digest_display.snap | 5 + crates/tee-launcher/src/docker_types.rs | 3 +- crates/tee-launcher/src/error.rs | 11 +- crates/tee-launcher/src/main.rs | 50 ++++---- crates/tee-launcher/src/types.rs | 6 +- 8 files changed, 131 insertions(+), 60 deletions(-) create mode 100644 crates/launcher-interface/src/snapshots/launcher_interface__tests__docker_digest_display.snap diff --git a/Cargo.lock b/Cargo.lock index a2ae05752..eb2ea871a 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -4935,6 +4935,7 @@ dependencies = [ "mpc-primitives", "serde", "serde_json", + "thiserror 2.0.18", ] [[package]] diff --git a/crates/launcher-interface/Cargo.toml b/crates/launcher-interface/Cargo.toml index 235790226..09d5e0063 100644 --- a/crates/launcher-interface/Cargo.toml +++ b/crates/launcher-interface/Cargo.toml @@ -9,6 +9,7 @@ derive_more = { workspace = true } bounded-collections = { workspace = true } mpc-primitives = { workspace = true } serde = { workspace = true } +thiserror = { workspace = true } [dev-dependencies] diff --git a/crates/launcher-interface/src/lib.rs b/crates/launcher-interface/src/lib.rs index c2ac95258..1bcb17e16 100644 --- a/crates/launcher-interface/src/lib.rs +++ b/crates/launcher-interface/src/lib.rs @@ -1,51 +1,76 @@ pub const MPC_IMAGE_HASH_EVENT: &str = "mpc-image-digest"; pub mod types { + use std::fmt; + use std::str::FromStr; + use mpc_primitives::hash::MpcDockerImageHash; use serde::{Deserialize, Serialize}; /// JSON structure for the approved hashes file written by the MPC node, and read by the launcher. #[derive(Debug, Serialize, Deserialize)] pub struct ApprovedHashesFile { - pub approved_hashes: bounded_collections::NonEmptyVec, + pub approved_hashes: bounded_collections::NonEmptyVec, } impl ApprovedHashesFile { - pub fn newest_approved_hash(&self) -> &DockerDigest { + pub fn newest_approved_hash(&self) -> &DockerSha256Digest { self.approved_hashes.first() } } const SHA256_PREFIX: &str = "sha256:"; - #[derive(Debug, Clone, derive_more::From)] - pub struct DockerDigest(MpcDockerImageHash); + #[derive(Debug, Clone, PartialEq, Eq, derive_more::From)] + pub struct DockerSha256Digest(MpcDockerImageHash); + + impl fmt::Display for DockerSha256Digest { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + write!(f, "{SHA256_PREFIX}{}", self.0.as_hex()) + } + } + + #[derive(Debug, thiserror::Error)] + pub enum DockerDigestParseError { + #[error("missing {SHA256_PREFIX} prefix")] + MissingPrefix, + #[error(transparent)] + InvalidHash(#[from] mpc_primitives::hash::Hash32ParseError), + } - impl Serialize for DockerDigest { + impl DockerSha256Digest { + pub fn as_raw_hex(&self) -> String { + self.0.as_hex() + } + } + + impl FromStr for DockerSha256Digest { + type Err = DockerDigestParseError; + + fn from_str(s: &str) -> Result { + let hex_str = s + .strip_prefix(SHA256_PREFIX) + .ok_or(DockerDigestParseError::MissingPrefix)?; + Ok(DockerSha256Digest(hex_str.parse()?)) + } + } + + impl Serialize for DockerSha256Digest { fn serialize(&self, serializer: S) -> Result where S: serde::Serializer, { - let image_hash_hex = self.0.as_hex(); - let docker_digest_representation = format!("{SHA256_PREFIX}{image_hash_hex}"); - docker_digest_representation.serialize(serializer) + self.to_string().serialize(serializer) } } - impl<'de> Deserialize<'de> for DockerDigest { + impl<'de> Deserialize<'de> for DockerSha256Digest { fn deserialize(deserializer: D) -> Result where D: serde::Deserializer<'de>, { let s = String::deserialize(deserializer)?; - let hex_str = s.strip_prefix(SHA256_PREFIX).ok_or_else(|| { - serde::de::Error::custom(format!("missing {SHA256_PREFIX} prefix")) - })?; - - hex_str - .parse() - .map(DockerDigest) - .map_err(serde::de::Error::custom) + s.parse().map_err(serde::de::Error::custom) } } } @@ -54,12 +79,12 @@ mod paths {} #[cfg(test)] mod tests { - use super::types::{ApprovedHashesFile, DockerDigest}; + use super::types::{ApprovedHashesFile, DockerSha256Digest}; use mpc_primitives::hash::MpcDockerImageHash; - fn sample_digest() -> DockerDigest { + fn sample_digest() -> DockerSha256Digest { let hash: MpcDockerImageHash = [0xab; 32].into(); - DockerDigest::from(hash) + DockerSha256Digest::from(hash) } #[test] @@ -73,7 +98,7 @@ mod tests { fn roundtrip_docker_digest() { let digest = sample_digest(); let serialized = serde_json::to_string(&digest).unwrap(); - let deserialized: DockerDigest = serde_json::from_str(&serialized).unwrap(); + let deserialized: DockerSha256Digest = serde_json::from_str(&serialized).unwrap(); insta::assert_json_snapshot!( "docker_digest_roundtrip", serde_json::to_value(&deserialized).unwrap() @@ -85,7 +110,7 @@ mod tests { let json = serde_json::json!( "abababababababababababababababababababababababababababababababababab" ); - let result = serde_json::from_value::(json); + let result = serde_json::from_value::(json); assert!(result.is_err()); assert!( result @@ -99,17 +124,58 @@ mod tests { #[test] fn deserialize_rejects_invalid_hex() { let json = serde_json::json!("sha256:not_valid_hex!"); - let result = serde_json::from_value::(json); + let result = serde_json::from_value::(json); assert!(result.is_err()); } #[test] fn deserialize_rejects_wrong_length() { let json = serde_json::json!("sha256:abab"); - let result = serde_json::from_value::(json); + let result = serde_json::from_value::(json); assert!(result.is_err()); } + #[test] + fn display_docker_digest() { + let digest = sample_digest(); + insta::assert_snapshot!("docker_digest_display", digest.to_string()); + } + + #[test] + fn parse_docker_digest() { + let input = "sha256:abababababababababababababababababababababababababababababababab"; + let parsed: DockerSha256Digest = input.parse().unwrap(); + assert_eq!(parsed.to_string(), input); + } + + #[test] + fn parse_rejects_missing_prefix() { + let result = "abababababababababababababababababababababababababababababababababab" + .parse::(); + assert!(matches!( + result, + Err(super::types::DockerDigestParseError::MissingPrefix) + )); + } + + #[test] + fn parse_rejects_invalid_hex() { + let result = "sha256:not_valid_hex!".parse::(); + assert!(matches!( + result, + Err(super::types::DockerDigestParseError::InvalidHash(_)) + )); + } + + #[test] + fn parse_rejects_wrong_length() { + let result = "sha256:abab".parse::(); + assert!(matches!( + result, + Err(super::types::DockerDigestParseError::InvalidHash(_)) + )); + } + #[test] fn serialize_approved_hashes_file() { let file = ApprovedHashesFile { diff --git a/crates/launcher-interface/src/snapshots/launcher_interface__tests__docker_digest_display.snap b/crates/launcher-interface/src/snapshots/launcher_interface__tests__docker_digest_display.snap new file mode 100644 index 000000000..44b276fe5 --- /dev/null +++ b/crates/launcher-interface/src/snapshots/launcher_interface__tests__docker_digest_display.snap @@ -0,0 +1,5 @@ +--- +source: crates/launcher-interface/src/lib.rs +expression: digest.to_string() +--- +sha256:abababababababababababababababababababababababababababababababab diff --git a/crates/tee-launcher/src/docker_types.rs b/crates/tee-launcher/src/docker_types.rs index eb2c552fd..16f0aad59 100644 --- a/crates/tee-launcher/src/docker_types.rs +++ b/crates/tee-launcher/src/docker_types.rs @@ -1,3 +1,4 @@ +use launcher_interface::types::DockerSha256Digest; use serde::{Deserialize, Serialize}; /// Partial response https://auth.docker.io/token @@ -41,5 +42,5 @@ pub struct ManifestPlatform { #[derive(Debug, Deserialize, Serialize)] pub struct ManifestConfig { - pub digest: String, + pub digest: DockerSha256Digest, } diff --git a/crates/tee-launcher/src/error.rs b/crates/tee-launcher/src/error.rs index 4c11e39ff..6904bed09 100644 --- a/crates/tee-launcher/src/error.rs +++ b/crates/tee-launcher/src/error.rs @@ -1,6 +1,7 @@ use std::path::PathBuf; -use mpc_primitives::hash::MpcDockerImageHash; +use launcher_interface::types::DockerSha256Digest; +use mpc_primitives::hash::DockerSha256Digest; use thiserror::Error; use url::Url; @@ -41,12 +42,12 @@ pub enum LauncherError { #[error("docker run failed for validated hash")] DockerRunFailed { - image_hash: MpcDockerImageHash, + image_hash: DockerSha256Digest, inner: std::io::Error, }, #[error("docker run failed for validated hash")] - DockerRunFailedExitStatus { image_hash: MpcDockerImageHash }, + DockerRunFailedExitStatus { image_hash: DockerSha256Digest }, #[error("Too many env vars to pass through (>{0})")] TooManyEnvVars(usize), @@ -103,7 +104,7 @@ pub enum ImageDigestValidationFailed { "pulled image has mismatching digest. pulled: {pulled_digest}, expected: {expected_digest}" )] PulledImageHasMismatchedDigest { - expected_digest: String, - pulled_digest: String, + expected_digest: DockerSha256Digest, + pulled_digest: DockerSha256Digest, }, } diff --git a/crates/tee-launcher/src/main.rs b/crates/tee-launcher/src/main.rs index 36a145db9..e8debf7a2 100644 --- a/crates/tee-launcher/src/main.rs +++ b/crates/tee-launcher/src/main.rs @@ -4,10 +4,7 @@ use std::{collections::VecDeque, time::Duration}; use backon::{ExponentialBuilder, Retryable}; use clap::Parser; use launcher_interface::MPC_IMAGE_HASH_EVENT; -use launcher_interface::types::ApprovedHashesFile; - -// Reuse the workspace hash type for type-safe image hash handling. -use mpc_primitives::hash::MpcDockerImageHash; +use launcher_interface::types::{ApprovedHashesFile, DockerSha256Digest}; use contants::*; use docker_types::*; @@ -75,7 +72,7 @@ async fn run() -> Result<(), LauncherError> { source, }); - let image_hash: MpcDockerImageHash = { + let image_hash: DockerSha256Digest = { match approved_hashes_file { Err(err) => { let default_image_digest = args.default_image_digest; @@ -104,8 +101,7 @@ async fn run() -> Result<(), LauncherError> { if !override_image_is_allowed { return Err(LauncherError::InvalidHashOverride(format!( - "MPC_HASH_OVERRIDE={} does not match any approved hash", - override_image.as_hex_sha256() + "MPC_HASH_OVERRIDE={override_image} does not match any approved hash", ))); } @@ -129,7 +125,7 @@ async fn run() -> Result<(), LauncherError> { .emit_event( MPC_IMAGE_HASH_EVENT.to_string(), // TODO: mpc binary has to go back from back hex as well. Just send the raw bytes as payload. - image_hash.as_hex().as_bytes().to_vec(), + image_hash.as_raw_hex().as_bytes().to_vec(), ) .await .map_err(|e| LauncherError::DstackEmitEventFailed(e.to_string()))?; @@ -147,10 +143,9 @@ async fn run() -> Result<(), LauncherError> { async fn get_manifest_digest( config: &LauncherConfig, - expected_image_digest: &MpcDockerImageHash, + expected_image_digest: &DockerSha256Digest, ) -> Result { let mut tags: VecDeque = config.image_tags.iter().cloned().collect(); - let expected_digest = format!("sha256:{}", expected_image_digest.as_hex()); // We need an authorization token to fetch manifests. // TODO: this still has the registry hard-coded in the url. also, if we use a different registry, we need a different auth-endpoint @@ -256,7 +251,7 @@ async fn get_manifest_digest( .for_each(|manifest| tags.push_back(manifest.digest)); } ManifestResponse::DockerV2 { config } | ManifestResponse::OciManifest { config } => { - if config.digest != expected_digest { + if config.digest != *expected_image_digest { continue; } @@ -280,7 +275,7 @@ async fn get_manifest_digest( /// Does NOT extend RTMR3 and does NOT run the container. async fn validate_image_hash( launcher_config: &LauncherConfig, - image_hash: MpcDockerImageHash, + image_hash: DockerSha256Digest, ) -> Result<(), ImageDigestValidationFailed> { let manifest_digest = get_manifest_digest(launcher_config, &image_hash) .await @@ -321,13 +316,17 @@ async fn validate_image_hash( )); } - let pulled_digest = String::from_utf8_lossy(&inspect.stdout).trim().to_string(); - let image_hash_string = image_hash.as_hex_sha256(); - if pulled_digest != image_hash_string { + let pulled_digest = String::from_utf8_lossy(&inspect.stdout) + .trim() + .to_string() + .parse() + .expect("is valid digest"); + + if pulled_digest != image_hash { return Err( ImageDigestValidationFailed::PulledImageHasMismatchedDigest { pulled_digest, - expected_digest: image_hash_string, + expected_digest: image_hash, }, ); } @@ -339,14 +338,14 @@ fn build_docker_cmd( platform: Platform, mpc_config: &MpcBinaryConfig, docker_flags: &DockerLaunchFlags, - image_digest: &MpcDockerImageHash, + image_digest: &DockerSha256Digest, ) -> Result, LauncherError> { let mut cmd: Vec = vec!["docker".into(), "run".into()]; // Required environment variables cmd.extend([ "--env".into(), - format!("MPC_IMAGE_HASH={}", image_digest.as_hex()), + format!("MPC_IMAGE_HASH={}", image_digest.as_raw_hex()), ]); cmd.extend([ "--env".into(), @@ -384,14 +383,14 @@ fn build_docker_cmd( "--name".into(), MPC_CONTAINER_NAME.into(), "--detach".into(), - image_digest.as_hex_sha256(), + format!("{image_digest}"), ]); - tracing::info!("docker cmd {}", cmd.join(" ")); + let docker_command_string = cmd.join(" "); + tracing::info!(?docker_command_string, "docker cmd"); // Final LD_PRELOAD safeguard - let cmd_str = cmd.join(" "); - if cmd_str.contains("LD_PRELOAD") { + if docker_command_string.contains("LD_PRELOAD") { return Err(LauncherError::LdPreloadDetected); } @@ -400,14 +399,11 @@ fn build_docker_cmd( fn launch_mpc_container( platform: Platform, - valid_hash: &MpcDockerImageHash, + valid_hash: &DockerSha256Digest, mpc_config: &MpcBinaryConfig, docker_flags: &DockerLaunchFlags, ) -> Result<(), LauncherError> { - tracing::info!( - "Launching MPC node with validated hash: {}", - valid_hash.as_hex() - ); + tracing::info!("Launching MPC node with validated hash: {valid_hash}",); // shutdown container if one is already running let _ = Command::new("docker") diff --git a/crates/tee-launcher/src/types.rs b/crates/tee-launcher/src/types.rs index f079399a6..10a3fda8d 100644 --- a/crates/tee-launcher/src/types.rs +++ b/crates/tee-launcher/src/types.rs @@ -4,11 +4,11 @@ use std::net::{IpAddr, Ipv4Addr}; use std::num::NonZeroU16; use std::path::PathBuf; +use launcher_interface::types::DockerSha256Digest; use url::Host; use bounded_collections::NonEmptyVec; use clap::{Parser, ValueEnum}; -use mpc_primitives::hash::MpcDockerImageHash; use serde::{Deserialize, Serialize}; use crate::env_validation; @@ -27,7 +27,7 @@ pub struct CliArgs { /// Fallback image digest when the approved-hashes file is absent #[arg(long, env = "DEFAULT_IMAGE_DIGEST")] - pub default_image_digest: MpcDockerImageHash, + pub default_image_digest: DockerSha256Digest, } #[derive(Debug, Clone, Copy, PartialEq, Eq, ValueEnum)] @@ -71,7 +71,7 @@ pub struct LauncherConfig { /// Maximum registry RPC attempts (from `RPC_MAX_ATTEMPTS`). pub rpc_max_attempts: u32, /// Optional hash override that bypasses registry lookup (from `MPC_HASH_OVERRIDE`). - pub mpc_hash_override: Option, + pub mpc_hash_override: Option, } #[derive(Debug, Clone, Serialize, Deserialize)] From 4bde6c6521b77b9b4d71c8c36dadc1b9d60cd76b Mon Sep 17 00:00:00 2001 From: Daniel Sharifi Date: Thu, 5 Mar 2026 16:30:52 +0100 Subject: [PATCH 29/82] cleanup --- Cargo.lock | 1 - crates/tee-launcher/Cargo.toml | 1 - crates/tee-launcher/src/error.rs | 31 ------------------------------- crates/tee-launcher/src/main.rs | 2 +- 4 files changed, 1 insertion(+), 34 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index eb2ea871a..91acd7b73 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -10565,7 +10565,6 @@ dependencies = [ "hex", "itertools 0.14.0", "launcher-interface", - "mpc-primitives", "regex", "reqwest 0.12.28", "serde", diff --git a/crates/tee-launcher/Cargo.toml b/crates/tee-launcher/Cargo.toml index 8503826ae..f43e19bd8 100644 --- a/crates/tee-launcher/Cargo.toml +++ b/crates/tee-launcher/Cargo.toml @@ -17,7 +17,6 @@ bounded-collections = { workspace = true } clap = { workspace = true } dstack-sdk = { workspace = true } hex = { workspace = true } -mpc-primitives = { workspace = true } launcher-interface = { workspace = true } itertools = { workspace = true } regex = { workspace = true } diff --git a/crates/tee-launcher/src/error.rs b/crates/tee-launcher/src/error.rs index 6904bed09..1409f048e 100644 --- a/crates/tee-launcher/src/error.rs +++ b/crates/tee-launcher/src/error.rs @@ -1,27 +1,11 @@ -use std::path::PathBuf; - use launcher_interface::types::DockerSha256Digest; -use mpc_primitives::hash::DockerSha256Digest; use thiserror::Error; -use url::Url; #[derive(Error, Debug)] pub enum LauncherError { - #[error("PLATFORM=TEE requires dstack unix socket at {0}")] - DstackSocketMissing(String), - - #[error("GetQuote failed before extending RTMR3: {0}")] - DstackGetQuoteFailed(String), - #[error("EmitEvent failed while extending RTMR3: {0}")] DstackEmitEventFailed(String), - #[error("DEFAULT_IMAGE_DIGEST invalid: {0}")] - InvalidDefaultDigest(String), - - #[error("Invalid JSON in {path}: approved_hashes missing or empty")] - InvalidApprovedHashes { path: String }, - #[error("MPC_HASH_OVERRIDE invalid: {0}")] InvalidHashOverride(String), @@ -31,15 +15,6 @@ pub enum LauncherError { #[error("Failed to get auth token from registry: {0}")] RegistryAuthFailed(String), - #[error("Failed to get successful response from {url} after {attempts} attempts")] - RegistryRequestFailed { url: Url, attempts: u32 }, - - #[error("Digest mismatch: pulled {pulled} != expected {expected}")] - DigestMismatch { pulled: String, expected: String }, - - #[error("MPC image hash validation failed: {0}")] - ImageValidationFailed(String), - #[error("docker run failed for validated hash")] DockerRunFailed { image_hash: DockerSha256Digest, @@ -73,12 +48,6 @@ pub enum LauncherError { source: serde_json::Error, }, - #[error("Required environment variable not set: {0}")] - MissingEnvVar(String), - - #[error("Invalid value for {key}: {value}")] - InvalidEnvVar { key: String, value: String }, - #[error("HTTP error: {0}")] Http(#[from] reqwest::Error), diff --git a/crates/tee-launcher/src/main.rs b/crates/tee-launcher/src/main.rs index e8debf7a2..0bc11d511 100644 --- a/crates/tee-launcher/src/main.rs +++ b/crates/tee-launcher/src/main.rs @@ -383,7 +383,7 @@ fn build_docker_cmd( "--name".into(), MPC_CONTAINER_NAME.into(), "--detach".into(), - format!("{image_digest}"), + image_digest.to_string(), ]); let docker_command_string = cmd.join(" "); From 40289e10f263b6a5fd2943f1fefed31ec3b8bec0 Mon Sep 17 00:00:00 2001 From: Daniel Sharifi Date: Thu, 5 Mar 2026 16:50:41 +0100 Subject: [PATCH 30/82] fix bugs --- crates/tee-launcher/src/error.rs | 5 ++++- crates/tee-launcher/src/main.rs | 19 +++++++++++++------ 2 files changed, 17 insertions(+), 7 deletions(-) diff --git a/crates/tee-launcher/src/error.rs b/crates/tee-launcher/src/error.rs index 1409f048e..812f87509 100644 --- a/crates/tee-launcher/src/error.rs +++ b/crates/tee-launcher/src/error.rs @@ -22,7 +22,10 @@ pub enum LauncherError { }, #[error("docker run failed for validated hash")] - DockerRunFailedExitStatus { image_hash: DockerSha256Digest }, + DockerRunFailedExitStatus { + image_hash: DockerSha256Digest, + output: String, + }, #[error("Too many env vars to pass through (>{0})")] TooManyEnvVars(usize), diff --git a/crates/tee-launcher/src/main.rs b/crates/tee-launcher/src/main.rs index 0bc11d511..a64a75f09 100644 --- a/crates/tee-launcher/src/main.rs +++ b/crates/tee-launcher/src/main.rs @@ -1,3 +1,5 @@ +// A rewrite of launcher.py + use std::process::Command; use std::{collections::VecDeque, time::Duration}; @@ -311,7 +313,7 @@ async fn validate_image_hash( let docker_inspect_failed = !inspect.status.success(); if docker_inspect_failed { - return Err(ImageDigestValidationFailed::DockerPullFailed( + return Err(ImageDigestValidationFailed::DockerInspectFailed( "docker inspect terminated with unsuccessful status".to_string(), )); } @@ -334,13 +336,13 @@ async fn validate_image_hash( Ok(()) } -fn build_docker_cmd( +fn docker_run_args( platform: Platform, mpc_config: &MpcBinaryConfig, docker_flags: &DockerLaunchFlags, image_digest: &DockerSha256Digest, ) -> Result, LauncherError> { - let mut cmd: Vec = vec!["docker".into(), "run".into()]; + let mut cmd: Vec = vec![]; // Required environment variables cmd.extend([ @@ -410,10 +412,11 @@ fn launch_mpc_container( .args(["rm", "-f", MPC_CONTAINER_NAME]) .output(); - let docker_cmd = build_docker_cmd(platform, mpc_config, docker_flags, valid_hash)?; + let docker_run_args = docker_run_args(platform, mpc_config, docker_flags, valid_hash)?; - let run_output = Command::new(&docker_cmd[0]) - .args(&docker_cmd[1..]) + let run_output = Command::new("docker") + .arg("run") + .args(&docker_run_args) .output() .map_err(|inner| LauncherError::DockerRunFailed { image_hash: valid_hash.clone(), @@ -421,8 +424,12 @@ fn launch_mpc_container( })?; if !run_output.status.success() { + let stderr = String::from_utf8_lossy(&run_output.stderr); + let stdout = String::from_utf8_lossy(&run_output.stdout); + tracing::error!(%stderr, %stdout, "docker run failed"); return Err(LauncherError::DockerRunFailedExitStatus { image_hash: valid_hash.clone(), + output: stderr.into_owned(), }); } From 110d9857ca96b6485823d70ca9319fa4548cc961 Mon Sep 17 00:00:00 2001 From: Daniel Sharifi Date: Thu, 5 Mar 2026 16:50:49 +0100 Subject: [PATCH 31/82] remove dead constants --- crates/tee-launcher/src/contants.rs | 26 -------------------------- 1 file changed, 26 deletions(-) diff --git a/crates/tee-launcher/src/contants.rs b/crates/tee-launcher/src/contants.rs index cf5a62405..af89e71b3 100644 --- a/crates/tee-launcher/src/contants.rs +++ b/crates/tee-launcher/src/contants.rs @@ -2,29 +2,3 @@ pub(crate) const MPC_CONTAINER_NAME: &str = "mpc-node"; pub(crate) const IMAGE_DIGEST_FILE: &str = "/mnt/shared/image-digest.bin"; pub(crate) const DSTACK_UNIX_SOCKET: &str = "/var/run/dstack.sock"; pub(crate) const DSTACK_USER_CONFIG_FILE: &str = "/tapp/user_config"; - -// pub(crate) const SHA256_PREFIX: &str = "sha256:"; - -// // Docker Hub defaults -// pub(crate) const DEFAULT_RPC_REQUEST_TIMEOUT_SECS: f64 = 10.0; -// pub(crate) const DEFAULT_RPC_REQUEST_INTERVAL_SECS: f64 = 1.0; -// pub(crate) const DEFAULT_RPC_MAX_ATTEMPTS: u32 = 20; - -// pub(crate) const DEFAULT_MPC_IMAGE_NAME: &str = "nearone/mpc-node"; -// pub(crate) const DEFAULT_MPC_REGISTRY: &str = "registry.hub.docker.com"; -// pub(crate) const DEFAULT_MPC_IMAGE_TAG: &str = "latest"; - -// // Env var names -// pub(crate) const ENV_VAR_MPC_HASH_OVERRIDE: &str = "MPC_HASH_OVERRIDE"; -// pub(crate) const ENV_VAR_RPC_REQUEST_TIMEOUT_SECS: &str = "RPC_REQUEST_TIMEOUT_SECS"; -// pub(crate) const ENV_VAR_RPC_REQUEST_INTERVAL_SECS: &str = "RPC_REQUEST_INTERVAL_SECS"; -// pub(crate) const ENV_VAR_RPC_MAX_ATTEMPTS: &str = "RPC_MAX_ATTEMPTS"; - -// pub(crate) const DSTACK_USER_CONFIG_MPC_IMAGE_TAGS: &str = "MPC_IMAGE_TAGS"; -// pub(crate) const DSTACK_USER_CONFIG_MPC_IMAGE_NAME: &str = "MPC_IMAGE_NAME"; -// pub(crate) const DSTACK_USER_CONFIG_MPC_IMAGE_REGISTRY: &str = "MPC_REGISTRY"; - -// // Security limits -// pub(crate) const MAX_PASSTHROUGH_ENV_VARS: usize = 64; -// pub(crate) const MAX_ENV_VALUE_LEN: usize = 1024; -// pub(crate) const MAX_TOTAL_ENV_BYTES: usize = 32 * 1024; From 58b44219689d5abbf6c41e1a364e8342052675ac Mon Sep 17 00:00:00 2001 From: Daniel Sharifi Date: Thu, 5 Mar 2026 16:55:24 +0100 Subject: [PATCH 32/82] rename to constants --- crates/tee-launcher/src/{contants.rs => constants.rs} | 0 crates/tee-launcher/src/main.rs | 4 ++-- 2 files changed, 2 insertions(+), 2 deletions(-) rename crates/tee-launcher/src/{contants.rs => constants.rs} (100%) diff --git a/crates/tee-launcher/src/contants.rs b/crates/tee-launcher/src/constants.rs similarity index 100% rename from crates/tee-launcher/src/contants.rs rename to crates/tee-launcher/src/constants.rs diff --git a/crates/tee-launcher/src/main.rs b/crates/tee-launcher/src/main.rs index a64a75f09..db38cdf57 100644 --- a/crates/tee-launcher/src/main.rs +++ b/crates/tee-launcher/src/main.rs @@ -8,14 +8,14 @@ use clap::Parser; use launcher_interface::MPC_IMAGE_HASH_EVENT; use launcher_interface::types::{ApprovedHashesFile, DockerSha256Digest}; -use contants::*; +use constants::*; use docker_types::*; use error::*; use reqwest::header::{ACCEPT, AUTHORIZATION, HeaderMap, HeaderValue}; use types::*; use url::Url; -mod contants; +mod constants; mod docker_types; mod env_validation; mod error; From 6b78e6723660d4686234b154883a09580f291282 Mon Sep 17 00:00:00 2001 From: Daniel Sharifi Date: Thu, 5 Mar 2026 17:50:16 +0100 Subject: [PATCH 33/82] update image hashes watcher tests --- .../src/tee/allowed_image_hashes_watcher.rs | 42 +++++++++---------- 1 file changed, 21 insertions(+), 21 deletions(-) diff --git a/crates/node/src/tee/allowed_image_hashes_watcher.rs b/crates/node/src/tee/allowed_image_hashes_watcher.rs index b5501038d..32bc37b62 100644 --- a/crates/node/src/tee/allowed_image_hashes_watcher.rs +++ b/crates/node/src/tee/allowed_image_hashes_watcher.rs @@ -1,6 +1,7 @@ +use bounded_collections::NonEmptyVec; use derive_more::From; use itertools::Itertools; -use launcher_interface::types::ApprovedHashesFile; +use launcher_interface::types::{ApprovedHashesFile, DockerSha256Digest}; use mpc_contract::tee::proposal::MpcDockerImageHash; use std::{future::Future, io, panic, path::PathBuf}; use thiserror::Error; @@ -21,7 +22,7 @@ use mockall::automock; pub trait AllowedImageHashesStorage { fn set( &mut self, - approved_hashes: &[MpcDockerImageHash], + approved_hashes: NonEmptyVec, ) -> impl Future> + Send; } @@ -31,7 +32,10 @@ pub struct AllowedImageHashesFile { } impl AllowedImageHashesStorage for AllowedImageHashesFile { - async fn set(&mut self, approved_hashes: &[MpcDockerImageHash]) -> Result<(), io::Error> { + async fn set( + &mut self, + approved_hashes: NonEmptyVec, + ) -> Result<(), io::Error> { tracing::info!( ?self.file_path, len = approved_hashes.len(), @@ -39,7 +43,7 @@ impl AllowedImageHashesStorage for AllowedImageHashesFile { ); let approved_hashes = ApprovedHashesFile { - approved_hashes: approved_hashes.to_vec(), + approved_hashes: approved_hashes.mapped(DockerSha256Digest::from), }; let json = serde_json::to_string_pretty(&approved_hashes) @@ -171,13 +175,13 @@ where let allowed_hashes = self.allowed_hashes_in_contract.borrow_and_update().clone(); - if allowed_hashes.is_empty() { - tracing::warn!("Indexer provided an empty list of allowed image hashes."); + let Ok(allowed_hashes) = NonEmptyVec::from_vec(allowed_hashes) else { + tracing::warn!("indexer provided an empty list of allowed image hashes."); return Ok(()); - } + }; // Write all hashes, newest-first (as provided by contract) - self.image_hash_storage.set(&allowed_hashes).await?; + self.image_hash_storage.set(allowed_hashes.clone()).await?; let running_image_is_not_allowed = !allowed_hashes.iter().contains(&self.current_image); @@ -222,10 +226,12 @@ mod tests { #[rstest] #[tokio::test] async fn test_allowed_image_hash_list_is_written() { - let allowed_images = vec![image_hash_1(), image_hash_2(), image_hash_3()]; - for current_hash in &allowed_images[..2] { + let allowed_images: NonEmptyVec<_> = + NonEmptyVec::from_vec(vec![image_hash_1(), image_hash_2(), image_hash_3()]).unwrap(); + + for current_hash in allowed_images.iter().take(2) { let cancellation_token = CancellationToken::new(); - let (sender, receiver) = watch::channel(allowed_images.clone()); + let (sender, receiver) = watch::channel(allowed_images.clone().to_vec()); let (sender_shutdown, mut receiver_shutdown) = mpsc::channel(1); let write_is_called = Arc::new(Notify::new()); @@ -317,6 +323,7 @@ mod tests { let allowed_image = image_hash_2(); let allowed_list = vec![allowed_image.clone()]; + let expected_non_empty = NonEmptyVec::from_vec(allowed_list.clone()).unwrap(); let cancellation_token = CancellationToken::new(); let (_sender, receiver) = watch::channel(allowed_list.clone()); @@ -330,7 +337,7 @@ mod tests { storage_mock .expect_set() .once() - .with(predicate::eq(allowed_list.clone())) + .with(predicate::eq(expected_non_empty.clone())) .returning(move |_| { write_is_called.notify_one(); Box::pin(async { Ok(()) }) @@ -380,7 +387,7 @@ mod tests { let mut storage_mock = MockAllowedImageHashesStorage::new(); { - let expected = allowed_images.clone(); + let expected = NonEmptyVec::from_vec(allowed_images.clone()).unwrap(); storage_mock .expect_set() @@ -433,7 +440,7 @@ mod tests { // Mock storage expecting exactly the full list let mut storage_mock = MockAllowedImageHashesStorage::new(); { - let expected = full_list.clone(); + let expected = NonEmptyVec::from_vec(full_list.clone()).unwrap(); let write_is_called = write_is_called.clone(); storage_mock @@ -498,11 +505,4 @@ mod tests { "Shutdown should NOT be sent when list is empty" ); } - - #[test] - fn test_json_key_matches_launcher() { - // important: must stay aligned with the launcher implementation in: - // mpc/tee_launcher/launcher.py - assert_eq!(JSON_KEY_APPROVED_HASHES, "approved_hashes"); - } } From 8294a991df12d1b83373434059a38958a9644099 Mon Sep 17 00:00:00 2001 From: Daniel Sharifi Date: Thu, 5 Mar 2026 21:35:14 +0100 Subject: [PATCH 34/82] cargo clippy on launcher tests --- Cargo.lock | 1 + crates/tee-launcher/Cargo.toml | 1 + crates/tee-launcher/src/env_validation.rs | 105 +++++++++++++--------- 3 files changed, 65 insertions(+), 42 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 91acd7b73..27d1c2a05 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -10567,6 +10567,7 @@ dependencies = [ "launcher-interface", "regex", "reqwest 0.12.28", + "rstest", "serde", "serde_json", "tempfile", diff --git a/crates/tee-launcher/Cargo.toml b/crates/tee-launcher/Cargo.toml index f43e19bd8..9dd627211 100644 --- a/crates/tee-launcher/Cargo.toml +++ b/crates/tee-launcher/Cargo.toml @@ -31,6 +31,7 @@ url = { workspace = true, features = ["serde"] } [dev-dependencies] assert_matches = { workspace = true } +rstest = { workspace = true } tempfile = { workspace = true } [lints] diff --git a/crates/tee-launcher/src/env_validation.rs b/crates/tee-launcher/src/env_validation.rs index 9df1d3656..96a3af3cb 100644 --- a/crates/tee-launcher/src/env_validation.rs +++ b/crates/tee-launcher/src/env_validation.rs @@ -89,74 +89,95 @@ pub(crate) fn validate_env_value( #[cfg(test)] mod tests { - use super::*; + use assert_matches::assert_matches; + use rstest::rstest; - // -- Key validation tests -- + use super::*; - #[test] - fn key_allows_mpc_prefix_uppercase() { - assert!(validate_env_key("MPC_FOO").is_ok()); - assert!(validate_env_key("MPC_FOO_123").is_ok()); - assert!(validate_env_key("MPC_A_B_C").is_ok()); + #[rstest] + #[case("MPC_FOO")] + #[case("MPC_FOO_123")] + #[case("MPC_A_B_C")] + fn key_allows_mpc_prefix_uppercase(#[case] key: &str) { + assert_matches!(validate_env_key(key), Ok(_)); } - #[test] - fn key_rejects_lowercase_or_invalid_format() { - assert!(validate_env_key("MPC_foo").is_err()); - assert!(validate_env_key("MPC-FOO").is_err()); - assert!(validate_env_key("MPC.FOO").is_err()); - assert!(validate_env_key("MPC_").is_err()); + #[rstest] + #[case("MPC_foo")] + #[case("MPC-FOO")] + #[case("MPC.FOO")] + #[case("MPC_")] + fn key_rejects_lowercase_or_invalid_format(#[case] key: &str) { + assert_matches!(validate_env_key(key), Err(_)); } - #[test] - fn key_allows_compat_non_mpc_keys() { - assert!(validate_env_key("RUST_LOG").is_ok()); - assert!(validate_env_key("RUST_BACKTRACE").is_ok()); - assert!(validate_env_key("NEAR_BOOT_NODES").is_ok()); + #[rstest] + #[case("RUST_LOG")] + #[case("RUST_BACKTRACE")] + #[case("NEAR_BOOT_NODES")] + fn key_allows_compat_non_mpc_keys(#[case] key: &str) { + assert_matches!(validate_env_key(key), Ok(_)); } - #[test] - fn key_denies_sensitive_keys() { - assert!(validate_env_key("MPC_P2P_PRIVATE_KEY").is_err()); - assert!(validate_env_key("MPC_ACCOUNT_SK").is_err()); + #[rstest] + #[case("MPC_P2P_PRIVATE_KEY")] + #[case("MPC_ACCOUNT_SK")] + fn key_denies_sensitive_keys(#[case] key: &str) { + assert_matches!(validate_env_key(key), Err(_)); } - #[test] - fn key_rejects_unknown_non_mpc_key() { - assert!(validate_env_key("BAD_KEY").is_err()); - assert!(validate_env_key("HOME").is_err()); + #[rstest] + #[case("BAD_KEY")] + #[case("HOME")] + fn key_rejects_unknown_non_mpc_key(#[case] key: &str) { + assert_matches!(validate_env_key(key), Err(_)); } - // -- Value validation tests -- + #[rstest] + #[case("ok\nno")] + #[case("ok\rno")] + fn value_rejects_control_chars(#[case] value: &str) { + assert_matches!(validate_env_value("K", value), Err(_)); + } #[test] - fn value_rejects_control_chars() { - assert!(validate_env_value("K", "ok\nno").is_err()); - assert!(validate_env_value("K", "ok\rno").is_err()); - assert!(validate_env_value("K", &format!("a{}b", '\x1F')).is_err()); + fn value_rejects_control_char_unit_separator() { + assert_matches!(validate_env_value("K", &format!("a{}b", '\x1F')), Err(_)); } #[test] fn value_allows_tab() { - assert!(validate_env_value("K", "a\tb").is_ok()); + assert_matches!(validate_env_value("K", "a\tb"), Ok(_)); } - #[test] - fn value_rejects_ld_preload() { - assert!(validate_env_value("K", "LD_PRELOAD=/tmp/x.so").is_err()); - assert!(validate_env_value("K", "foo LD_PRELOAD bar").is_err()); + #[rstest] + #[case("LD_PRELOAD=/tmp/x.so")] + #[case("foo LD_PRELOAD bar")] + fn value_rejects_ld_preload(#[case] value: &str) { + assert_matches!(validate_env_value("K", value), Err(_)); } #[test] fn value_rejects_too_long() { - assert!(validate_env_value("K", &"a".repeat(MAX_ENV_VALUE_LEN + 1)).is_err()); - assert!(validate_env_value("K", &"a".repeat(MAX_ENV_VALUE_LEN)).is_ok()); + assert_matches!( + validate_env_value("K", &"a".repeat(MAX_ENV_VALUE_LEN + 1)), + Err(_) + ); } #[test] - fn value_accepts_normal() { - assert!(validate_env_value("K", "hello-world").is_ok()); - assert!(validate_env_value("K", "192.168.1.1").is_ok()); - assert!(validate_env_value("K", "info,mpc_node=debug").is_ok()); + fn value_accepts_at_length_limit() { + assert_matches!( + validate_env_value("K", &"a".repeat(MAX_ENV_VALUE_LEN)), + Ok(_) + ); + } + + #[rstest] + #[case("hello-world")] + #[case("192.168.1.1")] + #[case("info,mpc_node=debug")] + fn value_accepts_normal(#[case] value: &str) { + assert_matches!(validate_env_value("K", value), Ok(_)); } } From 86811bdc16b19ee6f4a40cf87d7596d5f3098b56 Mon Sep 17 00:00:00 2001 From: Daniel Sharifi Date: Thu, 5 Mar 2026 21:35:43 +0100 Subject: [PATCH 35/82] cargo clippy launcher interface tests --- Cargo.lock | 1 + crates/launcher-interface/Cargo.toml | 1 + crates/launcher-interface/src/lib.rs | 36 +++++++++------------------- 3 files changed, 13 insertions(+), 25 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 27d1c2a05..eb06ae572 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -4929,6 +4929,7 @@ dependencies = [ name = "launcher-interface" version = "3.5.1" dependencies = [ + "assert_matches", "bounded-collections", "derive_more 2.1.1", "insta", diff --git a/crates/launcher-interface/Cargo.toml b/crates/launcher-interface/Cargo.toml index 09d5e0063..56f46ec29 100644 --- a/crates/launcher-interface/Cargo.toml +++ b/crates/launcher-interface/Cargo.toml @@ -13,6 +13,7 @@ thiserror = { workspace = true } [dev-dependencies] +assert_matches = { workspace = true } insta = { workspace = true } serde_json = { workspace = true } diff --git a/crates/launcher-interface/src/lib.rs b/crates/launcher-interface/src/lib.rs index 1bcb17e16..0d6fd5cb4 100644 --- a/crates/launcher-interface/src/lib.rs +++ b/crates/launcher-interface/src/lib.rs @@ -79,7 +79,9 @@ mod paths {} #[cfg(test)] mod tests { - use super::types::{ApprovedHashesFile, DockerSha256Digest}; + use assert_matches::assert_matches; + + use super::types::{ApprovedHashesFile, DockerSha256Digest, DockerDigestParseError}; use mpc_primitives::hash::MpcDockerImageHash; fn sample_digest() -> DockerSha256Digest { @@ -110,29 +112,22 @@ mod tests { let json = serde_json::json!( "abababababababababababababababababababababababababababababababababab" ); - let result = serde_json::from_value::(json); - assert!(result.is_err()); - assert!( - result - .unwrap_err() - .to_string() - .contains("missing sha256: prefix"), - "error should mention missing prefix" + assert_matches!( + serde_json::from_value::(json), + Err(ref e) if e.to_string().contains("missing sha256: prefix") ); } #[test] fn deserialize_rejects_invalid_hex() { let json = serde_json::json!("sha256:not_valid_hex!"); - let result = serde_json::from_value::(json); - assert!(result.is_err()); + assert_matches!(serde_json::from_value::(json), Err(_)); } #[test] fn deserialize_rejects_wrong_length() { let json = serde_json::json!("sha256:abab"); - let result = serde_json::from_value::(json); - assert!(result.is_err()); + assert_matches!(serde_json::from_value::(json), Err(_)); } #[test] @@ -152,28 +147,19 @@ mod tests { fn parse_rejects_missing_prefix() { let result = "abababababababababababababababababababababababababababababababababab" .parse::(); - assert!(matches!( - result, - Err(super::types::DockerDigestParseError::MissingPrefix) - )); + assert_matches!(result, Err(DockerDigestParseError::MissingPrefix)); } #[test] fn parse_rejects_invalid_hex() { let result = "sha256:not_valid_hex!".parse::(); - assert!(matches!( - result, - Err(super::types::DockerDigestParseError::InvalidHash(_)) - )); + assert_matches!(result, Err(DockerDigestParseError::InvalidHash(_))); } #[test] fn parse_rejects_wrong_length() { let result = "sha256:abab".parse::(); - assert!(matches!( - result, - Err(super::types::DockerDigestParseError::InvalidHash(_)) - )); + assert_matches!(result, Err(DockerDigestParseError::InvalidHash(_))); } #[test] From dcbe19a0fd31d00aa746568004356469870d3eee Mon Sep 17 00:00:00 2001 From: Daniel Sharifi Date: Thu, 5 Mar 2026 22:57:26 +0100 Subject: [PATCH 36/82] let claude add tests --- crates/tee-launcher/src/main.rs | 814 +++++++------------------------ crates/tee-launcher/src/types.rs | 419 ++++++++++++++++ 2 files changed, 583 insertions(+), 650 deletions(-) diff --git a/crates/tee-launcher/src/main.rs b/crates/tee-launcher/src/main.rs index db38cdf57..0e7de6ec6 100644 --- a/crates/tee-launcher/src/main.rs +++ b/crates/tee-launcher/src/main.rs @@ -437,653 +437,167 @@ fn launch_mpc_container( Ok(()) } -// #[cfg(test)] -// mod tests { -// use super::*; -// use assert_matches::assert_matches; -// use launcher_interface::types::ApprovedHashesFile; - -// // -- DstackUserConfig parsing tests ------------------------------------- - -// #[test] -// fn test_user_config_defaults_when_map_is_empty() { -// let config = user_config_from_map(BTreeMap::new()).unwrap(); -// assert_eq!(config.image_tags, vec![DEFAULT_MPC_IMAGE_TAG]); -// assert_eq!(config.image_name, DEFAULT_MPC_IMAGE_NAME); -// assert_eq!(config.registry, DEFAULT_MPC_REGISTRY); -// assert_eq!( -// config.rpc_request_timeout_secs, -// DEFAULT_RPC_REQUEST_TIMEOUT_SECS -// ); -// assert_eq!( -// config.rpc_request_interval_secs, -// DEFAULT_RPC_REQUEST_INTERVAL_SECS -// ); -// assert_eq!(config.rpc_max_attempts, DEFAULT_RPC_MAX_ATTEMPTS); -// assert!(config.mpc_hash_override.is_none()); -// assert!(config.passthrough_env.is_empty()); -// } - -// #[test] -// fn test_user_config_typed_fields_extracted_from_map() { -// let map = BTreeMap::from([ -// ( -// DSTACK_USER_CONFIG_MPC_IMAGE_TAGS.into(), -// "v1.0, v1.1".into(), -// ), -// (DSTACK_USER_CONFIG_MPC_IMAGE_NAME.into(), "my/image".into()), -// ( -// DSTACK_USER_CONFIG_MPC_IMAGE_REGISTRY.into(), -// "my.registry.io".into(), -// ), -// (ENV_VAR_RPC_REQUEST_TIMEOUT_SECS.into(), "30.0".into()), -// (ENV_VAR_RPC_MAX_ATTEMPTS.into(), "5".into()), -// ("MPC_ACCOUNT_ID".into(), "account.near".into()), -// ]); -// let config = user_config_from_map(map).unwrap(); -// assert_eq!(config.image_tags, vec!["v1.0", "v1.1"]); -// assert_eq!(config.image_name, "my/image"); -// assert_eq!(config.registry, "my.registry.io"); -// assert_eq!(config.rpc_request_timeout_secs, 30.0); -// assert_eq!(config.rpc_max_attempts, 5); -// // Launcher-only keys are NOT in passthrough_env -// assert!( -// !config -// .passthrough_env -// .contains_key(DSTACK_USER_CONFIG_MPC_IMAGE_TAGS) -// ); -// assert!( -// !config -// .passthrough_env -// .contains_key(ENV_VAR_RPC_MAX_ATTEMPTS) -// ); -// // Container passthrough keys ARE in passthrough_env -// assert_eq!( -// config.passthrough_env.get("MPC_ACCOUNT_ID").unwrap(), -// "account.near" -// ); -// } - -// #[test] -// fn test_user_config_malformed_rpc_fields_error() { -// let map = BTreeMap::from([(ENV_VAR_RPC_MAX_ATTEMPTS.into(), "not_a_number".into())]); -// let err = user_config_from_map(map).unwrap_err(); -// assert_matches!(err, LauncherError::InvalidEnvVar { key, .. } if key == ENV_VAR_RPC_MAX_ATTEMPTS); - -// let map = BTreeMap::from([(ENV_VAR_RPC_REQUEST_TIMEOUT_SECS.into(), "bad".into())]); -// let err = user_config_from_map(map).unwrap_err(); -// assert_matches!(err, LauncherError::InvalidEnvVar { key, .. } if key == ENV_VAR_RPC_REQUEST_TIMEOUT_SECS); - -// let map = BTreeMap::from([(ENV_VAR_RPC_REQUEST_INTERVAL_SECS.into(), "bad".into())]); -// let err = user_config_from_map(map).unwrap_err(); -// assert_matches!(err, LauncherError::InvalidEnvVar { key, .. } if key == ENV_VAR_RPC_REQUEST_INTERVAL_SECS); -// } - -// #[test] -// fn test_user_config_hash_override_extracted() { -// let map = BTreeMap::from([(ENV_VAR_MPC_HASH_OVERRIDE.into(), "sha256:abc".into())]); -// let config = user_config_from_map(map).unwrap(); -// assert_eq!(config.mpc_hash_override.unwrap(), "sha256:abc"); -// assert!( -// !config -// .passthrough_env -// .contains_key(ENV_VAR_MPC_HASH_OVERRIDE) -// ); -// } - -// #[test] -// fn test_parse_user_config_from_file() { -// let dir = tempfile::tempdir().unwrap(); -// let file = dir.path().join("user_config"); -// std::fs::write( -// &file, -// "# comment\nMPC_ACCOUNT_ID=test\nMPC_IMAGE_NAME=my/image\n", -// ) -// .unwrap(); -// let config = parse_user_config(file.to_str().unwrap()).unwrap(); -// assert_eq!(config.image_name, "my/image"); -// assert_eq!( -// config.passthrough_env.get("MPC_ACCOUNT_ID").unwrap(), -// "test" -// ); -// assert!(!config.passthrough_env.contains_key("MPC_IMAGE_NAME")); -// } - -// // -- Host/port validation tests ----------------------------------------- - -// #[test] -// fn test_valid_host_entry() { -// assert!(is_valid_host_entry("node.local:192.168.1.1")); -// assert!(!is_valid_host_entry("node.local:not-an-ip")); -// assert!(!is_valid_host_entry("--env LD_PRELOAD=hack.so")); -// } - -// #[test] -// fn test_valid_port_mapping() { -// assert!(is_valid_port_mapping("11780:11780")); -// assert!(!is_valid_port_mapping("65536:11780")); -// assert!(!is_valid_port_mapping("--volume /:/mnt")); -// } - -// // -- Security validation tests ------------------------------------------ - -// #[test] -// fn test_has_control_chars_rejects_newline_and_cr() { -// assert!(has_control_chars("a\nb")); -// assert!(has_control_chars("a\rb")); -// } - -// #[test] -// fn test_has_control_chars_allows_tab() { -// assert!(!has_control_chars("a\tb")); -// } - -// #[test] -// fn test_has_control_chars_rejects_other_control_chars() { -// assert!(has_control_chars(&format!("a{}b", '\x1F'))); -// } - -// #[test] -// fn test_is_safe_env_value_rejects_control_chars() { -// assert!(!is_safe_env_value("ok\nno")); -// assert!(!is_safe_env_value("ok\rno")); -// assert!(!is_safe_env_value(&format!("ok{}no", '\x1F'))); -// } - -// #[test] -// fn test_is_safe_env_value_rejects_ld_preload() { -// assert!(!is_safe_env_value("LD_PRELOAD=/tmp/x.so")); -// assert!(!is_safe_env_value("foo LD_PRELOAD bar")); -// } - -// #[test] -// fn test_is_safe_env_value_rejects_too_long() { -// assert!(!is_safe_env_value(&"a".repeat(MAX_ENV_VALUE_LEN + 1))); -// assert!(is_safe_env_value(&"a".repeat(MAX_ENV_VALUE_LEN))); -// } - -// #[test] -// fn test_is_allowed_container_env_key_allows_mpc_prefix_uppercase() { -// assert!(is_allowed_container_env_key("MPC_FOO")); -// assert!(is_allowed_container_env_key("MPC_FOO_123")); -// assert!(is_allowed_container_env_key("MPC_A_B_C")); -// } - -// #[test] -// fn test_is_allowed_container_env_key_rejects_lowercase_or_invalid() { -// assert!(!is_allowed_container_env_key("MPC_foo")); -// assert!(!is_allowed_container_env_key("MPC-FOO")); -// assert!(!is_allowed_container_env_key("MPC.FOO")); -// assert!(!is_allowed_container_env_key("MPC_")); -// } - -// #[test] -// fn test_is_allowed_container_env_key_allows_compat_non_mpc_keys() { -// assert!(is_allowed_container_env_key("RUST_LOG")); -// assert!(is_allowed_container_env_key("RUST_BACKTRACE")); -// assert!(is_allowed_container_env_key("NEAR_BOOT_NODES")); -// } - -// #[test] -// fn test_is_allowed_container_env_key_denies_sensitive_keys() { -// assert!(!is_allowed_container_env_key("MPC_P2P_PRIVATE_KEY")); -// assert!(!is_allowed_container_env_key("MPC_ACCOUNT_SK")); -// } - -// // -- Docker cmd builder tests ------------------------------------------- - -// fn make_digest() -> String { -// format!("sha256:{}", "a".repeat(64)) -// } - -// fn base_env() -> BTreeMap { -// BTreeMap::from([ -// ("MPC_ACCOUNT_ID".into(), "mpc-user-123".into()), -// ("MPC_CONTRACT_ID".into(), "contract.near".into()), -// ("MPC_ENV".into(), "testnet".into()), -// ("MPC_HOME_DIR".into(), "/data".into()), -// ("NEAR_BOOT_NODES".into(), "boot1,boot2".into()), -// ("RUST_LOG".into(), "info".into()), -// ]) -// } - -// #[test] -// fn test_build_docker_cmd_sanitizes_ports_and_hosts() { -// let env = BTreeMap::from([ -// ("PORTS".into(), "11780:11780,--env BAD=1".into()), -// ( -// "EXTRA_HOSTS".into(), -// "node:192.168.1.1,--volume /:/mnt".into(), -// ), -// ("MPC_ACCOUNT_ID".into(), "mpc-user-123".into()), -// ]); -// let cmd = build_docker_cmd(Platform::Tee, &env, &make_digest()).unwrap(); - -// assert!(cmd.contains(&"MPC_ACCOUNT_ID=mpc-user-123".to_string())); -// assert!(cmd.contains(&"11780:11780".to_string())); -// assert!(cmd.contains(&"node:192.168.1.1".to_string())); -// // Injection strings filtered -// assert!(!cmd.iter().any(|arg| arg.contains("BAD=1"))); -// assert!(!cmd.iter().any(|arg| arg.contains("/:/mnt"))); -// } - -// #[test] -// fn test_extra_hosts_does_not_allow_ld_preload() { -// let env = BTreeMap::from([ -// ( -// "EXTRA_HOSTS".into(), -// "host:1.2.3.4,--env LD_PRELOAD=/evil.so".into(), -// ), -// ("MPC_ACCOUNT_ID".into(), "safe".into()), -// ]); -// let cmd = build_docker_cmd(Platform::Tee, &env, &make_digest()).unwrap(); -// assert!(cmd.contains(&"host:1.2.3.4".to_string())); -// assert!(!cmd.iter().any(|arg| arg.contains("LD_PRELOAD"))); -// } - -// #[test] -// fn test_ports_does_not_allow_volume_injection() { -// let env = BTreeMap::from([ -// ("PORTS".into(), "2200:2200,--volume /:/mnt".into()), -// ("MPC_ACCOUNT_ID".into(), "safe".into()), -// ]); -// let cmd = build_docker_cmd(Platform::Tee, &env, &make_digest()).unwrap(); -// assert!(cmd.contains(&"2200:2200".to_string())); -// assert!(!cmd.iter().any(|arg| arg.contains("/:/mnt"))); -// } - -// #[test] -// fn test_invalid_env_key_is_ignored() { -// let env = BTreeMap::from([ -// ("BAD_KEY".into(), "should_not_be_used".into()), -// ("MPC_ACCOUNT_ID".into(), "safe".into()), -// ]); -// let cmd = build_docker_cmd(Platform::Tee, &env, &make_digest()).unwrap(); -// assert!(!cmd.join(" ").contains("should_not_be_used")); -// assert!(cmd.contains(&"MPC_ACCOUNT_ID=safe".to_string())); -// } - -// #[test] -// fn test_mpc_backup_encryption_key_is_allowed() { -// let env = BTreeMap::from([("MPC_BACKUP_ENCRYPTION_KEY_HEX".into(), "0".repeat(64))]); -// let cmd = build_docker_cmd(Platform::Tee, &env, &make_digest()).unwrap(); -// assert!( -// cmd.join(" ") -// .contains(&format!("MPC_BACKUP_ENCRYPTION_KEY_HEX={}", "0".repeat(64))) -// ); -// } - -// #[test] -// fn test_malformed_extra_host_is_ignored() { -// let env = BTreeMap::from([ -// ( -// "EXTRA_HOSTS".into(), -// "badhostentry,no-colon,also--bad".into(), -// ), -// ("MPC_ACCOUNT_ID".into(), "safe".into()), -// ]); -// let cmd = build_docker_cmd(Platform::Tee, &env, &make_digest()).unwrap(); -// assert!(!cmd.contains(&"--add-host".to_string())); -// } - -// #[test] -// fn test_env_value_with_shell_injection_is_handled_safely() { -// let env = BTreeMap::from([("MPC_ACCOUNT_ID".into(), "safe; rm -rf /".into())]); -// let cmd = build_docker_cmd(Platform::Tee, &env, &make_digest()).unwrap(); -// assert!(cmd.contains(&"MPC_ACCOUNT_ID=safe; rm -rf /".to_string())); -// } - -// #[test] -// fn test_build_docker_cmd_nontee_no_dstack_mount() { -// let mut env = BTreeMap::new(); -// env.insert("MPC_ACCOUNT_ID".into(), "x".into()); -// let cmd = build_docker_cmd(Platform::NonTee, &env, &make_digest()).unwrap(); -// let s = cmd.join(" "); -// assert!(!s.contains("DSTACK_ENDPOINT=")); -// assert!(!s.contains(&format!("{DSTACK_UNIX_SOCKET}:{DSTACK_UNIX_SOCKET}"))); -// } - -// #[test] -// fn test_build_docker_cmd_tee_has_dstack_mount() { -// let env = BTreeMap::from([("MPC_ACCOUNT_ID".into(), "x".into())]); -// let cmd = build_docker_cmd(Platform::Tee, &env, &make_digest()).unwrap(); -// let s = cmd.join(" "); -// assert!(s.contains(&format!("DSTACK_ENDPOINT={DSTACK_UNIX_SOCKET}"))); -// assert!(s.contains(&format!("{DSTACK_UNIX_SOCKET}:{DSTACK_UNIX_SOCKET}"))); -// } - -// #[test] -// fn test_build_docker_cmd_allows_arbitrary_mpc_prefix_env_vars() { -// let mut env = base_env(); -// env.insert("MPC_NEW_FEATURE_FLAG".into(), "1".into()); -// env.insert("MPC_SOME_CONFIG".into(), "value".into()); -// let cmd = build_docker_cmd(Platform::NonTee, &env, &make_digest()).unwrap(); -// let cmd_str = cmd.join(" "); -// assert!(cmd_str.contains("MPC_NEW_FEATURE_FLAG=1")); -// assert!(cmd_str.contains("MPC_SOME_CONFIG=value")); -// } - -// #[test] -// fn test_build_docker_cmd_blocks_sensitive_mpc_private_keys() { -// let mut env = base_env(); -// env.insert("MPC_P2P_PRIVATE_KEY".into(), "supersecret".into()); -// env.insert("MPC_ACCOUNT_SK".into(), "supersecret2".into()); -// let cmd = build_docker_cmd(Platform::NonTee, &env, &make_digest()).unwrap(); -// let cmd_str = cmd.join(" "); -// assert!(!cmd_str.contains("MPC_P2P_PRIVATE_KEY")); -// assert!(!cmd_str.contains("MPC_ACCOUNT_SK")); -// } - -// #[test] -// fn test_build_docker_cmd_rejects_env_value_with_newline() { -// let mut env = base_env(); -// env.insert("MPC_NEW_FEATURE_FLAG".into(), "ok\nbad".into()); -// let cmd = build_docker_cmd(Platform::NonTee, &env, &make_digest()).unwrap(); -// let cmd_str = cmd.join(" "); -// assert!(!cmd_str.contains("MPC_NEW_FEATURE_FLAG")); -// } - -// #[test] -// fn test_build_docker_cmd_enforces_max_env_count_cap() { -// let mut env = base_env(); -// for i in 0..=MAX_PASSTHROUGH_ENV_VARS { -// env.insert(format!("MPC_X_{i}"), "1".into()); -// } -// let result = build_docker_cmd(Platform::NonTee, &env, &make_digest()); -// assert_matches!(result, Err(LauncherError::TooManyEnvVars(_))); -// } - -// #[test] -// fn test_build_docker_cmd_enforces_total_env_bytes_cap() { -// let mut env = base_env(); -// for i in 0..40 { -// env.insert(format!("MPC_BIG_{i}"), "a".repeat(MAX_ENV_VALUE_LEN)); -// } -// let result = build_docker_cmd(Platform::NonTee, &env, &make_digest()); -// assert_matches!(result, Err(LauncherError::EnvPayloadTooLarge(_))); -// } - -// // -- LD_PRELOAD injection tests ----------------------------------------- - -// #[test] -// fn test_ld_preload_injection_blocked_via_env_key() { -// let env = BTreeMap::from([ -// ("MPC_ACCOUNT_ID".into(), "mpc-user-123".into()), -// ("--env LD_PRELOAD".into(), "/path/to/my/malloc.so".into()), -// ]); -// let cmd = build_docker_cmd(Platform::Tee, &env, &make_digest()).unwrap(); -// assert!(!cmd.iter().any(|arg| arg.contains("LD_PRELOAD"))); -// } - -// #[test] -// fn test_ld_preload_injection_blocked_via_extra_hosts() { -// let env = BTreeMap::from([ -// ("MPC_ACCOUNT_ID".into(), "mpc-user-123".into()), -// ( -// "EXTRA_HOSTS".into(), -// "host1:192.168.0.1,host2:192.168.0.2,--env LD_PRELOAD=/path/to/my/malloc.so".into(), -// ), -// ]); -// let cmd = build_docker_cmd(Platform::Tee, &env, &make_digest()).unwrap(); -// assert!(cmd.contains(&"--add-host".to_string())); -// assert!(!cmd.iter().any(|arg| arg.contains("LD_PRELOAD"))); -// } - -// #[test] -// fn test_ld_preload_injection_blocked_via_ports() { -// let env = BTreeMap::from([ -// ("MPC_ACCOUNT_ID".into(), "mpc-user-123".into()), -// ( -// "PORTS".into(), -// "11780:11780,--env LD_PRELOAD=/path/to/my/malloc.so".into(), -// ), -// ]); -// let cmd = build_docker_cmd(Platform::Tee, &env, &make_digest()).unwrap(); -// assert!(cmd.contains(&"-p".to_string())); -// assert!(!cmd.iter().any(|arg| arg.contains("LD_PRELOAD"))); -// } - -// #[test] -// fn test_ld_preload_injection_blocked_via_mpc_account_id() { -// let env = BTreeMap::from([ -// ( -// "MPC_ACCOUNT_ID".into(), -// "mpc-user-123, --env LD_PRELOAD=/path/to/my/malloc.so".into(), -// ), -// ( -// "EXTRA_HOSTS".into(), -// "host1:192.168.0.1,host2:192.168.0.2".into(), -// ), -// ]); -// let cmd = build_docker_cmd(Platform::Tee, &env, &make_digest()).unwrap(); -// assert!(!cmd.iter().any(|arg| arg.contains("LD_PRELOAD"))); -// } - -// #[test] -// fn test_ld_preload_injection_blocked_via_dash_e() { -// let env = BTreeMap::from([ -// ("MPC_ACCOUNT_ID".into(), "mpc-user-123".into()), -// ("-e LD_PRELOAD".into(), "/path/to/my/malloc.so".into()), -// ]); -// let cmd = build_docker_cmd(Platform::Tee, &env, &make_digest()).unwrap(); -// assert!(!cmd.iter().any(|arg| arg.contains("LD_PRELOAD"))); -// } - -// #[test] -// fn test_ld_preload_injection_blocked_via_extra_hosts_dash_e() { -// let env = BTreeMap::from([ -// ("MPC_ACCOUNT_ID".into(), "mpc-user-123".into()), -// ( -// "EXTRA_HOSTS".into(), -// "host1:192.168.0.1,host2:192.168.0.2,-e LD_PRELOAD=/path/to/my/malloc.so".into(), -// ), -// ]); -// let cmd = build_docker_cmd(Platform::Tee, &env, &make_digest()).unwrap(); -// assert!(cmd.contains(&"--add-host".to_string())); -// assert!(!cmd.iter().any(|arg| arg.contains("LD_PRELOAD"))); -// } - -// #[test] -// fn test_ld_preload_injection_blocked_via_ports_dash_e() { -// let env = BTreeMap::from([ -// ("MPC_ACCOUNT_ID".into(), "mpc-user-123".into()), -// ( -// "PORTS".into(), -// "11780:11780,-e LD_PRELOAD=/path/to/my/malloc.so".into(), -// ), -// ]); -// let cmd = build_docker_cmd(Platform::Tee, &env, &make_digest()).unwrap(); -// assert!(cmd.contains(&"-p".to_string())); -// assert!(!cmd.iter().any(|arg| arg.contains("LD_PRELOAD"))); -// } - -// // -- Hash selection tests ----------------------------------------------- - -// fn make_digest_json(hashes: &[&str]) -> String { -// serde_json::json!({"approved_hashes": hashes}).to_string() -// } - -// #[test] -// fn test_override_present() { -// let dir = tempfile::tempdir().unwrap(); -// let file = dir.path().join("image-digest.bin"); -// let override_value = format!("sha256:{}", "a".repeat(64)); -// let approved = vec![ -// format!("sha256:{}", "b".repeat(64)), -// override_value.clone(), -// format!("sha256:{}", "c".repeat(64)), -// ]; -// let json = serde_json::json!({"approved_hashes": approved}).to_string(); -// std::fs::write(&file, &json).unwrap(); - -// // We can't easily override IMAGE_DIGEST_FILE constant, so test load_and_select_hash -// // by creating a standalone test that reads from a custom path. -// // Instead test the core logic directly: -// let data: ApprovedHashesFile = serde_json::from_str(&json).unwrap(); -// assert!(data.approved_hashes.contains(&override_value)); - -// // The override is in the approved list, so it should be valid -// assert!(is_valid_sha256_digest(&override_value)); -// assert!(data.approved_hashes.contains(&override_value)); -// } - -// #[test] -// fn test_override_not_in_list() { -// let approved = vec!["sha256:aaa", "sha256:bbb"]; -// let json = make_digest_json(&approved); -// let data: ApprovedHashesFile = serde_json::from_str(&json).unwrap(); -// let override_hash = "sha256:xyz"; -// assert!(!data.approved_hashes.contains(&override_hash.to_string())); -// } - -// #[test] -// fn test_no_override_picks_newest() { -// let approved = vec!["sha256:newest", "sha256:older", "sha256:oldest"]; -// let json = make_digest_json(&approved); -// let data: ApprovedHashesFile = serde_json::from_str(&json).unwrap(); -// assert_eq!(data.approved_hashes[0], "sha256:newest"); -// } - -// #[test] -// fn test_json_key_matches_node() { -// // Must stay aligned with crates/node/src/tee/allowed_image_hashes_watcher.rs -// let json = r#"{"approved_hashes": ["sha256:abc"]}"#; -// let data: ApprovedHashesFile = serde_json::from_str(json).unwrap(); -// assert_eq!(data.approved_hashes.len(), 1); -// } - -// #[test] -// fn test_get_bare_digest() { -// assert_eq!( -// get_bare_digest(&format!("sha256:{}", "a".repeat(64))).unwrap(), -// "a".repeat(64) -// ); -// get_bare_digest("invalid").unwrap_err(); -// } - -// #[test] -// fn test_is_valid_sha256_digest() { -// assert!(is_valid_sha256_digest(&format!( -// "sha256:{}", -// "a".repeat(64) -// ))); -// assert!(!is_valid_sha256_digest("sha256:tooshort")); -// assert!(!is_valid_sha256_digest("not-a-digest")); -// // hex::decode accepts uppercase; as_hex() normalizes to lowercase -// assert!(is_valid_sha256_digest(&format!( -// "sha256:{}", -// "A".repeat(64) -// ))); -// } - -// #[test] -// fn test_parse_image_digest_normalizes_case() { -// let upper = format!("sha256:{}", "AB".repeat(32)); -// let hash = parse_image_digest(&upper).unwrap(); -// assert_eq!(hash.as_hex(), "ab".repeat(32)); -// } - -// // -- Full flow docker cmd test ------------------------------------------ - -// #[test] -// fn test_parse_and_build_docker_cmd_full_flow() { -// let dir = tempfile::tempdir().unwrap(); -// let file = dir.path().join("user_config"); -// std::fs::write( -// &file, -// "MPC_ACCOUNT_ID=test-user\nPORTS=11780:11780, --env BAD=oops\nEXTRA_HOSTS=host1:192.168.1.1, --volume /:/mnt\n", -// ) -// .unwrap(); -// let config = parse_user_config(file.to_str().unwrap()).unwrap(); -// let cmd = build_docker_cmd(Platform::Tee, &config.passthrough_env, &make_digest()).unwrap(); -// let cmd_str = cmd.join(" "); - -// assert!(cmd_str.contains("MPC_ACCOUNT_ID=test-user")); -// assert!(cmd_str.contains("11780:11780")); -// assert!(cmd_str.contains("host1:192.168.1.1")); -// assert!(!cmd_str.contains("BAD=oops")); -// assert!(!cmd_str.contains("/:/mnt")); -// } - -// #[test] -// fn test_full_docker_cmd_structure() { -// let env = BTreeMap::from([("MPC_ACCOUNT_ID".into(), "test-user".into())]); -// let digest = make_digest(); -// let cmd = build_docker_cmd(Platform::NonTee, &env, &digest).unwrap(); - -// // Check required subsequence -// assert!(cmd.contains(&"docker".to_string())); -// assert!(cmd.contains(&"run".to_string())); -// assert!(cmd.contains(&"--security-opt".to_string())); -// assert!(cmd.contains(&"no-new-privileges:true".to_string())); -// assert!(cmd.contains(&"/tapp:/tapp:ro".to_string())); -// assert!(cmd.contains(&"shared-volume:/mnt/shared".to_string())); -// assert!(cmd.contains(&"mpc-data:/data".to_string())); -// assert!(cmd.contains(&MPC_CONTAINER_NAME.to_string())); -// assert!(cmd.contains(&"--detach".to_string())); -// // Image digest should be the last argument -// assert_eq!(cmd.last().unwrap(), &digest); -// } - -// // -- Dstack tests ------------------------------------------------------- - -// #[test] -// fn test_extend_rtmr3_nontee_is_noop() { -// // NonTee should return immediately without touching dstack -// let rt = tokio::runtime::Runtime::new().unwrap(); -// rt.block_on(extend_rtmr3(Platform::NonTee, &make_digest())) -// .unwrap(); -// } - -// #[test] -// fn test_extend_rtmr3_tee_requires_socket() { -// // TEE mode should fail when socket doesn't exist -// let rt = tokio::runtime::Runtime::new().unwrap(); -// let result = rt.block_on(extend_rtmr3(Platform::Tee, &make_digest())); -// assert_matches!(result, Err(LauncherError::DstackSocketMissing(_))); -// } - -// // -- MpcDockerImageHash integration test -------------------------------- - -// #[test] -// fn test_mpc_docker_image_hash_from_bare_hex() { -// let bare_hex = "a".repeat(64); -// let hash: MpcDockerImageHash = bare_hex.parse().unwrap(); -// assert_eq!(hash.as_hex(), bare_hex); -// } - -// // -- Integration test (feature-gated) ----------------------------------- - -// #[cfg(feature = "integration-test")] -// mod integration { -// use super::*; - -// const TEST_DIGEST: &str = -// "sha256:f2472280c437efc00fa25a030a24990ae16c4fbec0d74914e178473ce4d57372"; - -// fn test_dstack_config() -> Config { -// user_config_from_map(BTreeMap::from([ -// ( -// "MPC_IMAGE_TAGS".into(), -// "83b52da4e2270c688cdd30da04f6b9d3565f25bb".into(), -// ), -// ("MPC_IMAGE_NAME".into(), "nearone/testing".into()), -// ("MPC_REGISTRY".into(), "registry.hub.docker.com".into()), -// ])) -// .unwrap() -// } - -// #[tokio::test] -// async fn test_validate_image_hash_real_registry() { -// let timing = RpcTimingConfig { -// request_timeout_secs: 10.0, -// request_interval_secs: 1.0, -// max_attempts: 20, -// }; -// let result = validate_image_hash(TEST_DIGEST, &test_dstack_config(), &timing) -// .await -// .unwrap(); -// assert!(result, "validate_image_hash() failed for test image"); -// } -// } -// } +#[cfg(test)] +mod tests { + use std::collections::BTreeMap; + + use assert_matches::assert_matches; + use launcher_interface::types::DockerSha256Digest; + + use crate::constants::*; + use crate::docker_run_args; + use crate::error::LauncherError; + use crate::types::*; + + fn sample_digest() -> DockerSha256Digest { + format!("sha256:{}", "a".repeat(64)).parse().unwrap() + } + + fn base_mpc_config() -> MpcBinaryConfig { + MpcBinaryConfig { + mpc_account_id: "test-account".into(), + mpc_local_address: "127.0.0.1".parse().unwrap(), + mpc_secret_key_store: "secret".into(), + mpc_backup_encryption_key_hex: "0".repeat(64), + mpc_env: MpcEnv::Testnet, + mpc_home_dir: "/data".into(), + mpc_contract_id: "contract.near".into(), + mpc_responder_id: "responder-1".into(), + near_boot_nodes: "boot1,boot2".into(), + rust_backtrace: RustBacktrace::Enabled, + rust_log: RustLog::Level(RustLogLevel::Info), + extra_env: BTreeMap::new(), + } + } + + fn empty_docker_flags() -> DockerLaunchFlags { + serde_json::from_value(serde_json::json!({ + "extra_hosts": {"hosts": []}, + "port_mappings": {"ports": []} + })) + .unwrap() + } + + fn docker_flags_with_host_and_port() -> DockerLaunchFlags { + serde_json::from_value(serde_json::json!({ + "extra_hosts": {"hosts": [{"hostname": {"Domain": "node1"}, "ip": "192.168.1.1"}]}, + "port_mappings": {"ports": [{"src": 11780, "dst": 11780}]} + })) + .unwrap() + } + + #[test] + fn tee_mode_includes_dstack_mount() { + // given + let config = base_mpc_config(); + let flags = empty_docker_flags(); + let digest = sample_digest(); + + // when + let args = docker_run_args(Platform::Tee, &config, &flags, &digest).unwrap(); + + // then + let joined = args.join(" "); + assert!(joined.contains(&format!("DSTACK_ENDPOINT={DSTACK_UNIX_SOCKET}"))); + assert!(joined.contains(&format!("{DSTACK_UNIX_SOCKET}:{DSTACK_UNIX_SOCKET}"))); + } + + #[test] + fn nontee_mode_excludes_dstack_mount() { + // given + let config = base_mpc_config(); + let flags = empty_docker_flags(); + let digest = sample_digest(); + + // when + let args = docker_run_args(Platform::NonTee, &config, &flags, &digest).unwrap(); + + // then + let joined = args.join(" "); + assert!(!joined.contains("DSTACK_ENDPOINT=")); + assert!(!joined.contains(&format!("{DSTACK_UNIX_SOCKET}:{DSTACK_UNIX_SOCKET}"))); + } + + #[test] + fn includes_security_opts_and_required_volumes() { + // given + let config = base_mpc_config(); + let flags = empty_docker_flags(); + let digest = sample_digest(); + + // when + let args = docker_run_args(Platform::NonTee, &config, &flags, &digest).unwrap(); + + // then + let joined = args.join(" "); + assert!(joined.contains("--security-opt no-new-privileges:true")); + assert!(joined.contains("/tapp:/tapp:ro")); + assert!(joined.contains("shared-volume:/mnt/shared")); + assert!(joined.contains("mpc-data:/data")); + assert!(joined.contains(&format!("--name {MPC_CONTAINER_NAME}"))); + assert!(joined.contains("--detach")); + } + + #[test] + fn image_digest_is_last_argument() { + // given + let config = base_mpc_config(); + let flags = empty_docker_flags(); + let digest = sample_digest(); + + // when + let args = docker_run_args(Platform::NonTee, &config, &flags, &digest).unwrap(); + + // then + assert_eq!(args.last().unwrap(), &digest.to_string()); + } + + #[test] + fn includes_ports_and_extra_hosts() { + // given + let config = base_mpc_config(); + let flags = docker_flags_with_host_and_port(); + let digest = sample_digest(); + + // when + let args = docker_run_args(Platform::NonTee, &config, &flags, &digest).unwrap(); + + // then + let joined = args.join(" "); + assert!(joined.contains("--add-host node1:192.168.1.1")); + assert!(joined.contains("-p 11780:11780")); + } + + #[test] + fn includes_mpc_env_vars() { + // given + let config = base_mpc_config(); + let flags = empty_docker_flags(); + let digest = sample_digest(); + + // when + let args = docker_run_args(Platform::NonTee, &config, &flags, &digest).unwrap(); + + // then + let joined = args.join(" "); + assert!(joined.contains("MPC_ACCOUNT_ID=test-account")); + assert!(joined.contains("MPC_IMAGE_HASH=")); + assert!(joined.contains(&format!("MPC_LATEST_ALLOWED_HASH_FILE={IMAGE_DIGEST_FILE}"))); + } + + #[test] + fn ld_preload_in_typed_field_is_rejected_by_env_validation() { + // given - typed fields are also validated by env_validation::validate_env_value, + // so LD_PRELOAD in any env value is caught before the final safeguard. + let mut config = base_mpc_config(); + config.mpc_account_id = "LD_PRELOAD=/evil.so".into(); + let flags = empty_docker_flags(); + let digest = sample_digest(); + + // when + let result = docker_run_args(Platform::NonTee, &config, &flags, &digest); + + // then + assert_matches!(result, Err(LauncherError::UnsafeEnvValue { .. })); + } +} diff --git a/crates/tee-launcher/src/types.rs b/crates/tee-launcher/src/types.rs index 10a3fda8d..6017d6c23 100644 --- a/crates/tee-launcher/src/types.rs +++ b/crates/tee-launcher/src/types.rs @@ -237,6 +237,12 @@ impl MpcBinaryConfig { /// Typed fields are emitted first (deterministic order), followed by /// validated extras from `extra_env`. All keys and values are validated /// uniformly before returning. + #[cfg(test)] + pub(crate) fn with_extra_env(mut self, extra: std::collections::BTreeMap) -> Self { + self.extra_env = extra; + self + } + pub fn env_vars(&self) -> Result, crate::error::LauncherError> { let mut vars: Vec<(String, String)> = vec![ ("MPC_ACCOUNT_ID".into(), self.mpc_account_id.clone()), @@ -298,3 +304,416 @@ impl MpcBinaryConfig { Ok(vars) } } + +#[cfg(test)] +mod tests { + use assert_matches::assert_matches; + use std::collections::BTreeMap; + use std::net::Ipv4Addr; + use std::num::NonZeroU16; + + use super::*; + + fn base_mpc_config() -> MpcBinaryConfig { + MpcBinaryConfig { + mpc_account_id: "test-account".into(), + mpc_local_address: "127.0.0.1".parse().unwrap(), + mpc_secret_key_store: "secret".into(), + mpc_backup_encryption_key_hex: "0".repeat(64), + mpc_env: MpcEnv::Testnet, + mpc_home_dir: "/data".into(), + mpc_contract_id: "contract.near".into(), + mpc_responder_id: "responder-1".into(), + near_boot_nodes: "boot1,boot2".into(), + rust_backtrace: RustBacktrace::Enabled, + rust_log: RustLog::Level(RustLogLevel::Info), + extra_env: BTreeMap::new(), + } + } + + // --- HostEntry deserialization --- + + #[test] + fn host_entry_valid_deserialization() { + // given + let json = + serde_json::json!({"hostname": {"Domain": "node.local"}, "ip": "192.168.1.1"}); + + // when + let result = serde_json::from_value::(json); + + // then + assert_matches!(result, Ok(entry) => { + assert_eq!(entry.ip, Ipv4Addr::new(192, 168, 1, 1)); + }); + } + + #[test] + fn host_entry_rejects_invalid_ip() { + // given + let json = + serde_json::json!({"hostname": {"Domain": "node.local"}, "ip": "not-an-ip"}); + + // when + let result = serde_json::from_value::(json); + + // then + assert_matches!(result, Err(_)); + } + + #[test] + fn host_entry_rejects_plain_string_as_hostname() { + // given - url::Host requires tagged variant, plain string is rejected + let json = serde_json::json!({"hostname": "node.local", "ip": "192.168.1.1"}); + + // when + let result = serde_json::from_value::(json); + + // then + assert_matches!(result, Err(_)); + } + + #[test] + fn host_entry_rejects_injection_string_as_hostname() { + // given + let json = serde_json::json!({"hostname": "--env LD_PRELOAD=hack.so", "ip": "192.168.1.1"}); + + // when + let result = serde_json::from_value::(json); + + // then + assert_matches!(result, Err(_)); + } + + // --- PortMapping deserialization --- + + #[test] + fn port_mapping_valid_deserialization() { + // given + let json = serde_json::json!({"src": 11780, "dst": 11780}); + + // when + let result = serde_json::from_value::(json); + + // then + assert_matches!(result, Ok(_)); + } + + #[test] + fn port_mapping_rejects_zero_port() { + // given + let json = serde_json::json!({"src": 0, "dst": 11780}); + + // when + let result = serde_json::from_value::(json); + + // then + assert_matches!(result, Err(_)); + } + + #[test] + fn port_mapping_rejects_out_of_range_port() { + // given + let json = serde_json::json!({"src": 65536, "dst": 11780}); + + // when + let result = serde_json::from_value::(json); + + // then + assert_matches!(result, Err(_)); + } + + // --- docker_args output format --- + + #[test] + fn extra_hosts_docker_args_format() { + // given + let hosts = ExtraHosts { + hosts: vec![HostEntry { + hostname: url::Host::Domain("node.local".into()), + ip: Ipv4Addr::new(192, 168, 1, 1), + }], + }; + + // when + let args = hosts.docker_args(); + + // then + assert_eq!(args, vec!["--add-host", "node.local:192.168.1.1"]); + } + + #[test] + fn empty_extra_hosts_produces_no_docker_args() { + // given + let hosts = ExtraHosts { hosts: vec![] }; + + // when + let args = hosts.docker_args(); + + // then + assert!(args.is_empty()); + } + + #[test] + fn port_mappings_docker_args_format() { + // given + let mappings = PortMappings { + ports: vec![PortMapping { + src: NonZeroU16::new(11780).unwrap(), + dst: NonZeroU16::new(11780).unwrap(), + }], + }; + + // when + let args = mappings.docker_args(); + + // then + assert_eq!(args, vec!["-p", "11780:11780"]); + } + + // --- MpcBinaryConfig::env_vars --- + + #[test] + fn env_vars_includes_all_typed_fields() { + // given + let config = base_mpc_config(); + + // when + let vars = config.env_vars().unwrap(); + + // then + let keys: Vec<&str> = vars.iter().map(|(k, _)| k.as_str()).collect(); + assert!(keys.contains(&"MPC_ACCOUNT_ID")); + assert!(keys.contains(&"MPC_LOCAL_ADDRESS")); + assert!(keys.contains(&"MPC_SECRET_STORE_KEY")); + assert!(keys.contains(&"MPC_CONTRACT_ID")); + assert!(keys.contains(&"MPC_ENV")); + assert!(keys.contains(&"MPC_HOME_DIR")); + assert!(keys.contains(&"MPC_RESPONDER_ID")); + assert!(keys.contains(&"MPC_BACKUP_ENCRYPTION_KEY_HEX")); + assert!(keys.contains(&"NEAR_BOOT_NODES")); + assert!(keys.contains(&"RUST_BACKTRACE")); + assert!(keys.contains(&"RUST_LOG")); + } + + #[test] + fn env_vars_passes_valid_extra_mpc_key() { + // given + let mut extra = BTreeMap::new(); + extra.insert("MPC_NEW_FEATURE".into(), "enabled".into()); + let config = base_mpc_config().with_extra_env(extra); + + // when + let vars = config.env_vars().unwrap(); + + // then + assert!(vars.iter().any(|(k, v)| k == "MPC_NEW_FEATURE" && v == "enabled")); + } + + #[test] + fn env_vars_deduplicates_typed_key_from_extra() { + // given + let mut extra = BTreeMap::new(); + extra.insert("MPC_ACCOUNT_ID".into(), "duplicate".into()); + let config = base_mpc_config().with_extra_env(extra); + + // when + let vars = config.env_vars().unwrap(); + + // then + let account_values: Vec<&str> = vars + .iter() + .filter(|(k, _)| k == "MPC_ACCOUNT_ID") + .map(|(_, v)| v.as_str()) + .collect(); + assert_eq!(account_values.len(), 1); + assert_eq!(account_values[0], "test-account"); + } + + #[test] + fn env_vars_rejects_sensitive_key_in_extra() { + // given + let mut extra = BTreeMap::new(); + extra.insert("MPC_P2P_PRIVATE_KEY".into(), "secret".into()); + let config = base_mpc_config().with_extra_env(extra); + + // when + let result = config.env_vars(); + + // then + assert_matches!(result, Err(crate::error::LauncherError::UnsafeEnvValue { .. })); + } + + #[test] + fn env_vars_rejects_account_sk_in_extra() { + // given + let mut extra = BTreeMap::new(); + extra.insert("MPC_ACCOUNT_SK".into(), "secret".into()); + let config = base_mpc_config().with_extra_env(extra); + + // when + let result = config.env_vars(); + + // then + assert_matches!(result, Err(crate::error::LauncherError::UnsafeEnvValue { .. })); + } + + #[test] + fn env_vars_rejects_value_with_newline() { + // given + let mut extra = BTreeMap::new(); + extra.insert("MPC_INJECTED".into(), "ok\nbad".into()); + let config = base_mpc_config().with_extra_env(extra); + + // when + let result = config.env_vars(); + + // then + assert_matches!(result, Err(crate::error::LauncherError::UnsafeEnvValue { .. })); + } + + #[test] + fn env_vars_rejects_value_containing_ld_preload() { + // given + let mut extra = BTreeMap::new(); + extra.insert("MPC_INJECTED".into(), "LD_PRELOAD=/tmp/x.so".into()); + let config = base_mpc_config().with_extra_env(extra); + + // when + let result = config.env_vars(); + + // then + assert_matches!(result, Err(crate::error::LauncherError::UnsafeEnvValue { .. })); + } + + #[test] + fn env_vars_rejects_too_many_extra_vars() { + // given + let mut extra = BTreeMap::new(); + for i in 0..=crate::env_validation::MAX_PASSTHROUGH_ENV_VARS { + extra.insert(format!("MPC_X_{i}"), "1".into()); + } + let config = base_mpc_config().with_extra_env(extra); + + // when + let result = config.env_vars(); + + // then + assert_matches!(result, Err(crate::error::LauncherError::TooManyEnvVars(_))); + } + + #[test] + fn env_vars_rejects_total_bytes_exceeded() { + // given + let mut extra = BTreeMap::new(); + for i in 0..40 { + extra.insert( + format!("MPC_BIG_{i}"), + "a".repeat(crate::env_validation::MAX_ENV_VALUE_LEN), + ); + } + let config = base_mpc_config().with_extra_env(extra); + + // when + let result = config.env_vars(); + + // then + assert_matches!(result, Err(crate::error::LauncherError::EnvPayloadTooLarge(_))); + } + + #[test] + fn env_vars_rejects_unknown_non_mpc_key() { + // given + let mut extra = BTreeMap::new(); + extra.insert("BAD_KEY".into(), "value".into()); + let config = base_mpc_config().with_extra_env(extra); + + // when + let result = config.env_vars(); + + // then + assert_matches!(result, Err(crate::error::LauncherError::UnsafeEnvValue { .. })); + } + + // --- Config full deserialization --- + + #[test] + fn config_deserializes_valid_json() { + // given + let json = serde_json::json!({ + "launcher_config": { + "image_tags": ["tag1"], + "image_name": "nearone/mpc-node", + "registry": "registry.hub.docker.com", + "rpc_request_timeout_secs": 10, + "rpc_request_interval_secs": 1, + "rpc_max_attempts": 20, + "mpc_hash_override": null + }, + "docker_command_config": { + "extra_hosts": {"hosts": [{"hostname": {"Domain": "node1"}, "ip": "192.168.1.1"}]}, + "port_mappings": {"ports": [{"src": 11780, "dst": 11780}]} + }, + "mpc_passthrough_env": { + "mpc_account_id": "account123", + "mpc_local_address": "127.0.0.1", + "mpc_secret_key_store": "secret", + "mpc_backup_encryption_key_hex": "0000000000000000000000000000000000000000000000000000000000000000", + "mpc_env": "Testnet", + "mpc_home_dir": "/data", + "mpc_contract_id": "contract.near", + "mpc_responder_id": "responder-1", + "near_boot_nodes": "boot1", + "rust_backtrace": "1", + "rust_log": "info" + } + }); + + // when + let result = serde_json::from_value::(json); + + // then + assert_matches!(result, Ok(config) => { + assert_eq!(config.mpc_passthrough_env.mpc_account_id, "account123"); + assert_eq!(config.launcher_config.image_name, "nearone/mpc-node"); + }); + } + + #[test] + fn config_rejects_missing_required_field() { + // given - mpc_account_id is missing + let json = serde_json::json!({ + "launcher_config": { + "image_tags": ["tag1"], + "image_name": "nearone/mpc-node", + "registry": "registry.hub.docker.com", + "rpc_request_timeout_secs": 10, + "rpc_request_interval_secs": 1, + "rpc_max_attempts": 20, + "mpc_hash_override": null + }, + "docker_command_config": { + "extra_hosts": {"hosts": []}, + "port_mappings": {"ports": []} + }, + "mpc_passthrough_env": { + "mpc_local_address": "127.0.0.1", + "mpc_secret_key_store": "secret", + "mpc_backup_encryption_key_hex": "0000000000000000000000000000000000000000000000000000000000000000", + "mpc_env": "Testnet", + "mpc_home_dir": "/data", + "mpc_contract_id": "contract.near", + "mpc_responder_id": "responder-1", + "near_boot_nodes": "boot1", + "rust_backtrace": "1", + "rust_log": "info" + } + }); + + // when + let result = serde_json::from_value::(json); + + // then + assert_matches!(result, Err(_)); + } +} From e658af4cfdebf8b9a7ccdb3a0b546feb79336b6a Mon Sep 17 00:00:00 2001 From: Daniel Sharifi Date: Fri, 6 Mar 2026 00:34:51 +0100 Subject: [PATCH 37/82] add more tests --- crates/tee-launcher/src/docker_types.rs | 104 ++++++++++ crates/tee-launcher/src/main.rs | 250 ++++++++++++++++++++---- 2 files changed, 314 insertions(+), 40 deletions(-) diff --git a/crates/tee-launcher/src/docker_types.rs b/crates/tee-launcher/src/docker_types.rs index 16f0aad59..e48780fd3 100644 --- a/crates/tee-launcher/src/docker_types.rs +++ b/crates/tee-launcher/src/docker_types.rs @@ -44,3 +44,107 @@ pub struct ManifestPlatform { pub struct ManifestConfig { pub digest: DockerSha256Digest, } + +#[cfg(test)] +mod tests { + use assert_matches::assert_matches; + + use super::*; + + fn sample_digest_str() -> String { + format!("sha256:{}", "ab".repeat(32)) + } + + #[test] + fn image_index_deserializes() { + // given + let json = serde_json::json!({ + "mediaType": "application/vnd.oci.image.index.v1+json", + "manifests": [ + { + "digest": "sha256:abc123", + "platform": { "architecture": "amd64", "os": "linux" } + }, + { + "digest": "sha256:def456", + "platform": { "architecture": "arm64", "os": "linux" } + } + ] + }); + + // when + let result = serde_json::from_value::(json); + + // then + assert_matches!(result, Ok(ManifestResponse::ImageIndex { manifests }) => { + assert_eq!(manifests.len(), 2); + assert_eq!(manifests[0].platform, ManifestPlatform { + architecture: "amd64".into(), + os: "linux".into(), + }); + }); + } + + #[test] + fn docker_v2_manifest_deserializes() { + // given + let json = serde_json::json!({ + "mediaType": "application/vnd.docker.distribution.manifest.v2+json", + "config": { "digest": sample_digest_str() } + }); + + // when + let result = serde_json::from_value::(json); + + // then + assert_matches!(result, Ok(ManifestResponse::DockerV2 { config }) => { + assert_eq!(config.digest.to_string(), sample_digest_str()); + }); + } + + #[test] + fn oci_manifest_deserializes() { + // given + let json = serde_json::json!({ + "mediaType": "application/vnd.oci.image.manifest.v1+json", + "config": { "digest": sample_digest_str() } + }); + + // when + let result = serde_json::from_value::(json); + + // then + assert_matches!(result, Ok(ManifestResponse::OciManifest { config }) => { + assert_eq!(config.digest.to_string(), sample_digest_str()); + }); + } + + #[test] + fn unknown_media_type_is_rejected() { + // given + let json = serde_json::json!({ + "mediaType": "application/vnd.unknown.format", + "config": { "digest": sample_digest_str() } + }); + + // when + let result = serde_json::from_value::(json); + + // then + assert_matches!(result, Err(_)); + } + + #[test] + fn docker_token_response_deserializes() { + // given + let json = serde_json::json!({ "token": "abc.def.ghi" }); + + // when + let result = serde_json::from_value::(json); + + // then + assert_matches!(result, Ok(resp) => { + assert_eq!(resp.token, "abc.def.ghi"); + }); + } +} diff --git a/crates/tee-launcher/src/main.rs b/crates/tee-launcher/src/main.rs index 0e7de6ec6..d50982569 100644 --- a/crates/tee-launcher/src/main.rs +++ b/crates/tee-launcher/src/main.rs @@ -74,47 +74,31 @@ async fn run() -> Result<(), LauncherError> { source, }); - let image_hash: DockerSha256Digest = { - match approved_hashes_file { - Err(err) => { - let default_image_digest = args.default_image_digest; - tracing::warn!( - ?err, - ?default_image_digest, - "approved hashes file does not exist on disk, falling back to default digest" - ); - default_image_digest - } - Ok(approved_hashes_file) => { - let approved_hashes_on_disk: ApprovedHashesFile = - serde_json::from_reader(approved_hashes_file).map_err(|source| { - LauncherError::JsonParse { - path: IMAGE_DIGEST_FILE.to_string(), - source, - } - })?; - - if let Some(override_image) = &dstack_config.launcher_config.mpc_hash_override { - tracing::info!(?override_image, "override mpc image hash provided"); - - let override_image_is_allowed = approved_hashes_on_disk - .approved_hashes - .contains(override_image); - - if !override_image_is_allowed { - return Err(LauncherError::InvalidHashOverride(format!( - "MPC_HASH_OVERRIDE={override_image} does not match any approved hash", - ))); - } - - override_image.clone() - } else { - approved_hashes_on_disk.newest_approved_hash().clone() - } - } + let approved_hashes_on_disk: Option = match approved_hashes_file { + Err(err) => { + tracing::warn!( + ?err, + default_image_digest = ?args.default_image_digest, + "approved hashes file does not exist on disk, falling back to default digest" + ); + None + } + Ok(file) => { + let parsed: ApprovedHashesFile = + serde_json::from_reader(file).map_err(|source| LauncherError::JsonParse { + path: IMAGE_DIGEST_FILE.to_string(), + source, + })?; + Some(parsed) } }; + let image_hash = select_image_hash( + approved_hashes_on_disk.as_ref(), + &args.default_image_digest, + dstack_config.launcher_config.mpc_hash_override.as_ref(), + )?; + let () = validate_image_hash(&dstack_config.launcher_config, image_hash.clone()).await?; let should_extend_rtmr_3 = args.platform == Platform::Tee; @@ -143,6 +127,39 @@ async fn run() -> Result<(), LauncherError> { Ok(()) } +/// Select which image hash to use, given the approved hashes file (if present), +/// a fallback default digest, and an optional user override. +/// +/// Selection rules: +/// - If the approved hashes file is absent → use `default_digest` +/// - If `override_hash` is set and appears in the approved list → use it +/// - If `override_hash` is set but NOT in the approved list → error +/// - Otherwise → use the newest approved hash (first in the list) +fn select_image_hash( + approved_hashes: Option<&ApprovedHashesFile>, + default_digest: &DockerSha256Digest, + override_hash: Option<&DockerSha256Digest>, +) -> Result { + let Some(approved) = approved_hashes else { + tracing::info!("no approved hashes file, using default digest"); + return Ok(default_digest.clone()); + }; + + if let Some(override_image) = override_hash { + tracing::info!(?override_image, "override mpc image hash provided"); + if !approved.approved_hashes.contains(override_image) { + return Err(LauncherError::InvalidHashOverride(format!( + "MPC_HASH_OVERRIDE={override_image} does not match any approved hash", + ))); + } + return Ok(override_image.clone()); + } + + let selected = approved.newest_approved_hash().clone(); + tracing::info!(?selected, "selected newest approved hash"); + Ok(selected) +} + async fn get_manifest_digest( config: &LauncherConfig, expected_image_digest: &DockerSha256Digest, @@ -442,15 +459,29 @@ mod tests { use std::collections::BTreeMap; use assert_matches::assert_matches; - use launcher_interface::types::DockerSha256Digest; + use bounded_collections::NonEmptyVec; + use launcher_interface::types::{ApprovedHashesFile, DockerSha256Digest}; use crate::constants::*; use crate::docker_run_args; use crate::error::LauncherError; + use crate::select_image_hash; use crate::types::*; + fn digest(hex_char: char) -> DockerSha256Digest { + format!("sha256:{}", std::iter::repeat_n(hex_char, 64).collect::()) + .parse() + .unwrap() + } + fn sample_digest() -> DockerSha256Digest { - format!("sha256:{}", "a".repeat(64)).parse().unwrap() + digest('a') + } + + fn approved_file(hashes: Vec) -> ApprovedHashesFile { + ApprovedHashesFile { + approved_hashes: NonEmptyVec::from_vec(hashes).unwrap(), + } } fn base_mpc_config() -> MpcBinaryConfig { @@ -600,4 +631,143 @@ mod tests { // then assert_matches!(result, Err(LauncherError::UnsafeEnvValue { .. })); } + + // --- select_image_hash --- + + #[test] + fn select_hash_override_present_and_in_approved_list() { + // given + let override_digest = digest('b'); + let approved = approved_file(vec![digest('c'), override_digest.clone(), digest('d')]); + + // when + let result = select_image_hash(Some(&approved), &digest('f'), Some(&override_digest)); + + // then + assert_matches!(result, Ok(selected) => { + assert_eq!(selected, override_digest); + }); + } + + #[test] + fn select_hash_override_not_in_approved_list() { + // given + let override_digest = digest('b'); + let approved = approved_file(vec![digest('c'), digest('d')]); + + // when + let result = select_image_hash(Some(&approved), &digest('f'), Some(&override_digest)); + + // then + assert_matches!(result, Err(LauncherError::InvalidHashOverride(_))); + } + + #[test] + fn select_hash_no_override_picks_newest() { + // given - first entry is "newest" + let newest = digest('a'); + let approved = approved_file(vec![newest.clone(), digest('b'), digest('c')]); + + // when + let result = select_image_hash(Some(&approved), &digest('f'), None); + + // then + assert_matches!(result, Ok(selected) => { + assert_eq!(selected, newest); + }); + } + + #[test] + fn select_hash_missing_file_falls_back_to_default() { + // given + let default = digest('d'); + + // when + let result = select_image_hash(None, &default, None); + + // then + assert_matches!(result, Ok(selected) => { + assert_eq!(selected, default); + }); + } + + #[test] + fn select_hash_missing_file_ignores_override() { + // given - override is set but file is missing, so default wins + let default = digest('d'); + let override_digest = digest('b'); + + // when + let result = select_image_hash(None, &default, Some(&override_digest)); + + // then + assert_matches!(result, Ok(selected) => { + assert_eq!(selected, default); + }); + } + + // --- approved_hashes JSON key alignment --- + + #[test] + fn approved_hashes_json_key_is_approved_hashes() { + // given - the JSON field name must match between launcher and MPC node + let file = approved_file(vec![sample_digest()]); + + // when + let json = serde_json::to_value(&file).unwrap(); + + // then + assert!(json.get("approved_hashes").is_some()); + } +} + +/// Integration tests requiring network access and Docker Hub. +/// Run with: cargo test -p tee-launcher --features integration-test +#[cfg(all(test, feature = "integration-test"))] +mod integration_tests { + use super::*; + + const TEST_DIGEST: &str = + "sha256:f2472280c437efc00fa25a030a24990ae16c4fbec0d74914e178473ce4d57372"; + const TEST_TAG: &str = "83b52da4e2270c688cdd30da04f6b9d3565f25bb"; + const TEST_IMAGE_NAME: &str = "nearone/testing"; + const TEST_REGISTRY: &str = "registry.hub.docker.com"; + + fn test_launcher_config() -> LauncherConfig { + LauncherConfig { + image_tags: bounded_collections::NonEmptyVec::from_vec(vec![TEST_TAG.into()]).unwrap(), + image_name: TEST_IMAGE_NAME.into(), + registry: TEST_REGISTRY.into(), + rpc_request_timeout_secs: 10, + rpc_request_interval_secs: 1, + rpc_max_attempts: 20, + mpc_hash_override: None, + } + } + + #[tokio::test] + async fn get_manifest_digest_resolves_known_image() { + // given + let config = test_launcher_config(); + let expected_digest: DockerSha256Digest = TEST_DIGEST.parse().unwrap(); + + // when + let result = get_manifest_digest(&config, &expected_digest).await; + + // then + assert!(result.is_ok(), "get_manifest_digest failed: {result:?}"); + } + + #[tokio::test] + async fn validate_image_hash_succeeds_for_known_image() { + // given + let config = test_launcher_config(); + let expected_digest: DockerSha256Digest = TEST_DIGEST.parse().unwrap(); + + // when + let result = validate_image_hash(&config, expected_digest).await; + + // then + assert!(result.is_ok(), "validate_image_hash failed: {result:?}"); + } } From 4281065823cd73210f740cf2a17b4b0886948ee0 Mon Sep 17 00:00:00 2001 From: Daniel Sharifi Date: Fri, 6 Mar 2026 11:17:38 +0100 Subject: [PATCH 38/82] rename to ApprovedHashes --- crates/launcher-interface/src/lib.rs | 8 +++---- .../src/tee/allowed_image_hashes_watcher.rs | 4 ++-- crates/tee-launcher/src/main.rs | 23 +++++++++++-------- 3 files changed, 19 insertions(+), 16 deletions(-) diff --git a/crates/launcher-interface/src/lib.rs b/crates/launcher-interface/src/lib.rs index 0d6fd5cb4..17d7e172e 100644 --- a/crates/launcher-interface/src/lib.rs +++ b/crates/launcher-interface/src/lib.rs @@ -9,11 +9,11 @@ pub mod types { /// JSON structure for the approved hashes file written by the MPC node, and read by the launcher. #[derive(Debug, Serialize, Deserialize)] - pub struct ApprovedHashesFile { + pub struct ApprovedHashes { pub approved_hashes: bounded_collections::NonEmptyVec, } - impl ApprovedHashesFile { + impl ApprovedHashes { pub fn newest_approved_hash(&self) -> &DockerSha256Digest { self.approved_hashes.first() } @@ -81,7 +81,7 @@ mod paths {} mod tests { use assert_matches::assert_matches; - use super::types::{ApprovedHashesFile, DockerSha256Digest, DockerDigestParseError}; + use super::types::{ApprovedHashes, DockerDigestParseError, DockerSha256Digest}; use mpc_primitives::hash::MpcDockerImageHash; fn sample_digest() -> DockerSha256Digest { @@ -164,7 +164,7 @@ mod tests { #[test] fn serialize_approved_hashes_file() { - let file = ApprovedHashesFile { + let file = ApprovedHashes { approved_hashes: bounded_collections::NonEmptyVec::from_vec(vec![sample_digest()]) .unwrap(), }; diff --git a/crates/node/src/tee/allowed_image_hashes_watcher.rs b/crates/node/src/tee/allowed_image_hashes_watcher.rs index 32bc37b62..2a5af11ad 100644 --- a/crates/node/src/tee/allowed_image_hashes_watcher.rs +++ b/crates/node/src/tee/allowed_image_hashes_watcher.rs @@ -1,7 +1,7 @@ use bounded_collections::NonEmptyVec; use derive_more::From; use itertools::Itertools; -use launcher_interface::types::{ApprovedHashesFile, DockerSha256Digest}; +use launcher_interface::types::{ApprovedHashes, DockerSha256Digest}; use mpc_contract::tee::proposal::MpcDockerImageHash; use std::{future::Future, io, panic, path::PathBuf}; use thiserror::Error; @@ -42,7 +42,7 @@ impl AllowedImageHashesStorage for AllowedImageHashesFile { "Writing approved MPC image hashes to disk (JSON format)." ); - let approved_hashes = ApprovedHashesFile { + let approved_hashes = ApprovedHashes { approved_hashes: approved_hashes.mapped(DockerSha256Digest::from), }; diff --git a/crates/tee-launcher/src/main.rs b/crates/tee-launcher/src/main.rs index d50982569..3bb4e8391 100644 --- a/crates/tee-launcher/src/main.rs +++ b/crates/tee-launcher/src/main.rs @@ -6,7 +6,7 @@ use std::{collections::VecDeque, time::Duration}; use backon::{ExponentialBuilder, Retryable}; use clap::Parser; use launcher_interface::MPC_IMAGE_HASH_EVENT; -use launcher_interface::types::{ApprovedHashesFile, DockerSha256Digest}; +use launcher_interface::types::{ApprovedHashes, DockerSha256Digest}; use constants::*; use docker_types::*; @@ -74,7 +74,7 @@ async fn run() -> Result<(), LauncherError> { source, }); - let approved_hashes_on_disk: Option = match approved_hashes_file { + let approved_hashes_on_disk: Option = match approved_hashes_file { Err(err) => { tracing::warn!( ?err, @@ -84,7 +84,7 @@ async fn run() -> Result<(), LauncherError> { None } Ok(file) => { - let parsed: ApprovedHashesFile = + let parsed: ApprovedHashes = serde_json::from_reader(file).map_err(|source| LauncherError::JsonParse { path: IMAGE_DIGEST_FILE.to_string(), source, @@ -136,7 +136,7 @@ async fn run() -> Result<(), LauncherError> { /// - If `override_hash` is set but NOT in the approved list → error /// - Otherwise → use the newest approved hash (first in the list) fn select_image_hash( - approved_hashes: Option<&ApprovedHashesFile>, + approved_hashes: Option<&ApprovedHashes>, default_digest: &DockerSha256Digest, override_hash: Option<&DockerSha256Digest>, ) -> Result { @@ -460,7 +460,7 @@ mod tests { use assert_matches::assert_matches; use bounded_collections::NonEmptyVec; - use launcher_interface::types::{ApprovedHashesFile, DockerSha256Digest}; + use launcher_interface::types::{ApprovedHashes, DockerSha256Digest}; use crate::constants::*; use crate::docker_run_args; @@ -469,17 +469,20 @@ mod tests { use crate::types::*; fn digest(hex_char: char) -> DockerSha256Digest { - format!("sha256:{}", std::iter::repeat_n(hex_char, 64).collect::()) - .parse() - .unwrap() + format!( + "sha256:{}", + std::iter::repeat_n(hex_char, 64).collect::() + ) + .parse() + .unwrap() } fn sample_digest() -> DockerSha256Digest { digest('a') } - fn approved_file(hashes: Vec) -> ApprovedHashesFile { - ApprovedHashesFile { + fn approved_file(hashes: Vec) -> ApprovedHashes { + ApprovedHashes { approved_hashes: NonEmptyVec::from_vec(hashes).unwrap(), } } From 154e27b4274be0ebf1d20b776391cc4fdb21c0ec Mon Sep 17 00:00:00 2001 From: Daniel Sharifi Date: Fri, 6 Mar 2026 11:26:54 +0100 Subject: [PATCH 39/82] update docker build launcher --- .github/workflows/ci.yml | 12 +++++++++--- .github/workflows/docker_build_launcher.yml | 18 ++++++++++++------ deployment/build-images.sh | 3 +++ 3 files changed, 24 insertions(+), 9 deletions(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 20c0333ec..00d0600bb 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -64,7 +64,7 @@ jobs: docker-launcher-build-and-verify: name: "Build MPC Launcher Docker image and verify" - runs-on: warp-ubuntu-2404-x64-2x + runs-on: warp-ubuntu-2404-x64-8x timeout-minutes: 60 permissions: contents: read @@ -75,10 +75,16 @@ jobs: with: persist-credentials: false - - name: Install skopeo + - name: Install build dependencies run: | sudo apt-get update - sudo apt-get install -y skopeo + sudo apt-get install -y skopeo liblzma-dev + + - name: Cache Rust dependencies + uses: Swatinem/rust-cache@f13886b937689c021905a6b90929199931d60db1 # v2.8.1 + with: + save-if: ${{ github.ref == 'refs/heads/main' }} + cache-provider: "warpbuild" - name: Build launcher docker image and verify its hash shell: bash diff --git a/.github/workflows/docker_build_launcher.yml b/.github/workflows/docker_build_launcher.yml index 2cab91641..b0d69d76f 100644 --- a/.github/workflows/docker_build_launcher.yml +++ b/.github/workflows/docker_build_launcher.yml @@ -13,7 +13,7 @@ on: jobs: build-and-push-images: name: "Build and push Docker launcher image with commit hash" - runs-on: warp-ubuntu-2404-x64-2x + runs-on: warp-ubuntu-2404-x64-8x permissions: contents: read @@ -23,17 +23,23 @@ jobs: with: persist-credentials: false + - name: Install build dependencies + run: | + sudo apt-get update + sudo apt-get install -y skopeo liblzma-dev + + - name: Cache Rust dependencies + uses: Swatinem/rust-cache@f13886b937689c021905a6b90929199931d60db1 # v2.8.1 + with: + save-if: ${{ github.ref == 'refs/heads/main' }} + cache-provider: "warpbuild" + - name: Login to Docker Hub uses: docker/login-action@5e57cd118135c172c3672efd75eb46360885c0ef # v3.6.0 with: username: ${{ secrets.DOCKERHUB_USERNAME }} password: ${{ secrets.DOCKERHUB_TOKEN }} - - name: Install skopeo - run: | - sudo apt-get update - sudo apt-get install -y skopeo - - name: Build and push launcher image run: | export LAUNCHER_IMAGE_NAME=mpc-launcher diff --git a/deployment/build-images.sh b/deployment/build-images.sh index 933531a66..cc4bfe02e 100755 --- a/deployment/build-images.sh +++ b/deployment/build-images.sh @@ -117,6 +117,8 @@ get_image_hash() { } if $USE_LAUNCHER; then + cargo build -p tee-launcher --release --locked + launcher_binary_hash=$(sha256sum target/release/tee-launcher | cut -d' ' -f1) build_reproducible_image $LAUNCHER_IMAGE_NAME $DOCKERFILE_LAUNCHER launcher_image_hash=$(get_image_hash $LAUNCHER_IMAGE_NAME) fi @@ -182,5 +184,6 @@ if $USE_NODE_GCP; then echo "node gcp docker image hash: $node_gcp_image_hash" fi if $USE_LAUNCHER; then + echo "launcher binary hash: $launcher_binary_hash" echo "launcher docker image hash: $launcher_image_hash" fi From a696478257301e130765290a1693dfce235c0b7b Mon Sep 17 00:00:00 2001 From: Daniel Sharifi Date: Fri, 6 Mar 2026 11:28:01 +0100 Subject: [PATCH 40/82] update cargo lock --- Cargo.lock | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 98c4194f2..03775211b 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -4926,7 +4926,7 @@ dependencies = [ [[package]] name = "launcher-interface" -version = "3.5.1" +version = "3.6.0" dependencies = [ "assert_matches", "bounded-collections", @@ -10593,7 +10593,7 @@ dependencies = [ [[package]] name = "tee-launcher" -version = "3.5.1" +version = "3.6.0" dependencies = [ "assert_matches", "backon", From 98d85fb7784b53bcdbfe63fccd511e1468bdfb3f Mon Sep 17 00:00:00 2001 From: Daniel Sharifi Date: Fri, 6 Mar 2026 11:30:26 +0100 Subject: [PATCH 41/82] undo hash.rs change --- crates/primitives/src/hash.rs | 8 -------- 1 file changed, 8 deletions(-) diff --git a/crates/primitives/src/hash.rs b/crates/primitives/src/hash.rs index 7a65ddee0..4f7c6ba87 100644 --- a/crates/primitives/src/hash.rs +++ b/crates/primitives/src/hash.rs @@ -55,14 +55,6 @@ impl Hash32 { } } -impl MpcDockerImageHash { - /// Converts the hash to a hexadecimal string representation with a `sha256:` prefix - pub fn as_hex_sha256(&self) -> String { - let hex_encoding = self.as_hex(); - format!("sha256:{hex_encoding}") - } -} - #[derive(Error, Debug)] pub enum Hash32ParseError { #[error("not a valid hex string")] From 9d5d521bdd5512943d3c055c57ab7c48bb485b8c Mon Sep 17 00:00:00 2001 From: Daniel Sharifi Date: Fri, 6 Mar 2026 13:58:57 +0100 Subject: [PATCH 42/82] use assert_matches! --- crates/tee-launcher/src/main.rs | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/crates/tee-launcher/src/main.rs b/crates/tee-launcher/src/main.rs index 3bb4e8391..e11043af8 100644 --- a/crates/tee-launcher/src/main.rs +++ b/crates/tee-launcher/src/main.rs @@ -729,6 +729,7 @@ mod tests { #[cfg(all(test, feature = "integration-test"))] mod integration_tests { use super::*; + use assert_matches::assert_matches; const TEST_DIGEST: &str = "sha256:f2472280c437efc00fa25a030a24990ae16c4fbec0d74914e178473ce4d57372"; @@ -771,6 +772,6 @@ mod integration_tests { let result = validate_image_hash(&config, expected_digest).await; // then - assert!(result.is_ok(), "validate_image_hash failed: {result:?}"); + assert_matches!(result, Ok(_)); } } From b25072fcf90d40f8353585cb501557554a5ac48c Mon Sep 17 00:00:00 2001 From: Daniel Sharifi Date: Fri, 6 Mar 2026 14:25:36 +0100 Subject: [PATCH 43/82] claude first pass --- crates/node/src/cli.rs | 714 ++++++++++---------------------- crates/node/src/config.rs | 3 + crates/node/src/config/start.rs | 80 ++++ crates/node/src/lib.rs | 1 + crates/node/src/run.rs | 321 ++++++++++++++ libs/nearcore | 2 +- 6 files changed, 629 insertions(+), 492 deletions(-) create mode 100644 crates/node/src/config/start.rs create mode 100644 crates/node/src/run.rs diff --git a/crates/node/src/cli.rs b/crates/node/src/cli.rs index 3f976a547..b4ca08405 100644 --- a/crates/node/src/cli.rs +++ b/crates/node/src/cli.rs @@ -1,60 +1,30 @@ use crate::{ config::{ - generate_and_write_backup_encryption_key_to_disk, load_config_file, BlockArgs, CKDConfig, - ConfigFile, ForeignChainsConfig, IndexerConfig, KeygenConfig, PersistentSecrets, - PresignatureConfig, RespondConfig, SecretsConfig, SignatureConfig, SyncMode, TripleConfig, - }, - coordinator::Coordinator, - db::SecretDB, - indexer::{ - real::spawn_real_indexer, tx_sender::TransactionSender, IndexerAPI, ReadForeignChainPolicy, + BlockArgs, CKDConfig, ConfigFile, ForeignChainsConfig, IndexerConfig, KeygenConfig, + PersistentSecrets, PresignatureConfig, SignatureConfig, StartConfig, SyncMode, + TeeAuthorityStartConfig, TripleConfig, }, keyshare::{ compat::legacy_ecdsa_key_from_keyshares, local::LocalPermanentKeyStorageBackend, permanent::{PermanentKeyStorage, PermanentKeyStorageBackend, PermanentKeyshareData}, - GcpPermanentKeyStorageConfig, KeyStorageConfig, KeyshareStorage, }, - migration_service::spawn_recovery_server_and_run_onboarding, p2p::testing::{generate_test_p2p_configs, PortSeed}, - profiler, - tracking::{self, start_root_task}, - web::{start_web_server, static_web_data, DebugRequest}, }; -use anyhow::{anyhow, Context}; use clap::{Args, Parser, Subcommand, ValueEnum}; use hex::FromHex; -use mpc_attestation::report_data::ReportDataV1; -use mpc_contract::state::ProtocolContractState; use near_account_id::AccountId; use near_indexer_primitives::types::Finality; -use near_time::Clock; use std::{ - collections::BTreeMap, net::{Ipv4Addr, SocketAddr}, - sync::Mutex, -}; -use std::{path::PathBuf, sync::Arc, sync::OnceLock, time::Duration}; -use tee_authority::tee_authority::{ - DstackTeeAuthorityConfig, LocalTeeAuthorityConfig, TeeAuthority, DEFAULT_DSTACK_ENDPOINT, - DEFAULT_PHALA_TDX_QUOTE_UPLOAD_URL, + path::PathBuf, }; -use tokio::sync::{broadcast, mpsc, oneshot, watch, RwLock}; -use tokio_util::sync::CancellationToken; +use tee_authority::tee_authority::{DEFAULT_DSTACK_ENDPOINT, DEFAULT_PHALA_TDX_QUOTE_UPLOAD_URL}; use url::Url; -use contract_interface::types::Ed25519PublicKey; -use { - crate::tee::{ - monitor_allowed_image_hashes, - remote_attestation::{monitor_attestation_removal, periodic_attestation_submission}, - AllowedImageHashesFile, - }, - mpc_contract::tee::proposal::MpcDockerImageHash, - tracing::info, -}; - -pub const ATTESTATION_RESUBMISSION_INTERVAL: Duration = Duration::from_secs(60 * 60); // 1 hour +// --------------------------------------------------------------------------- +// Top-level CLI +// --------------------------------------------------------------------------- #[derive(Parser, Debug)] #[command(name = "mpc-node")] @@ -77,6 +47,13 @@ pub enum LogFormat { #[derive(Subcommand, Debug)] pub enum CliCommand { + /// Starts the MPC node using a single JSON configuration file instead of + /// environment variables and CLI flags. + StartWithConfigFile { + /// Path to a JSON configuration file containing all settings needed to + /// start the MPC node. + config_path: PathBuf, + }, Start(StartCmd), /// Generates/downloads required files for Near node to run Init(InitConfigArgs), @@ -109,32 +86,9 @@ pub enum CliCommand { }, } -#[derive(Args, Debug)] -pub struct InitConfigArgs { - #[arg(long, env("MPC_HOME_DIR"))] - pub dir: std::path::PathBuf, - /// chain/network id (localnet, testnet, devnet, betanet) - #[arg(long)] - pub chain_id: Option, - /// Genesis file to use when initialize testnet (including downloading) - #[arg(long)] - pub genesis: Option, - /// Download the verified NEAR config file automatically. - #[arg(long)] - pub download_config: bool, - #[arg(long)] - pub download_config_url: Option, - /// Download the verified NEAR genesis file automatically. - #[arg(long)] - pub download_genesis: bool, - /// Specify a custom download URL for the genesis-file. - #[arg(long)] - pub download_genesis_url: Option, - #[arg(long)] - pub download_genesis_records_url: Option, - #[arg(long)] - pub boot_nodes: Option, -} +// --------------------------------------------------------------------------- +// Start subcommand (CLI flags / env vars) +// --------------------------------------------------------------------------- #[derive(Args, Debug)] pub struct StartCmd { @@ -153,17 +107,17 @@ pub struct StartCmd { pub gcp_project_id: Option, /// TEE authority config #[command(subcommand)] - pub tee_authority: TeeAuthorityConfig, + pub tee_authority: CliTeeAuthorityConfig, /// TEE related configuration settings. #[command(flatten)] - pub image_hash_config: MpcImageHashConfig, + pub image_hash_config: CliImageHashConfig, /// Hex-encoded 32 byte AES key for backup encryption. #[arg(env("MPC_BACKUP_ENCRYPTION_KEY_HEX"))] pub backup_encryption_key_hex: Option, } #[derive(Subcommand, Debug, Clone)] -pub enum TeeAuthorityConfig { +pub enum CliTeeAuthorityConfig { Local, Dstack { #[arg(long, env("DSTACK_ENDPOINT"), default_value = DEFAULT_DSTACK_ENDPOINT)] @@ -173,24 +127,8 @@ pub enum TeeAuthorityConfig { }, } -impl TryFrom for TeeAuthority { - type Error = anyhow::Error; - - fn try_from(cmd: TeeAuthorityConfig) -> Result { - let authority_config = match cmd { - TeeAuthorityConfig::Local => LocalTeeAuthorityConfig::default().into(), - TeeAuthorityConfig::Dstack { - dstack_endpoint, - quote_upload_url, - } => DstackTeeAuthorityConfig::new(dstack_endpoint, quote_upload_url).into(), - }; - - Ok(authority_config) - } -} - #[derive(Args, Debug)] -pub struct MpcImageHashConfig { +pub struct CliImageHashConfig { #[arg( long, env("MPC_IMAGE_HASH"), @@ -205,6 +143,65 @@ pub struct MpcImageHashConfig { pub latest_allowed_hash_file: Option, } +impl From for StartConfig { + fn from(cmd: StartCmd) -> Self { + StartConfig { + home_dir: cmd.home_dir, + secret_store_key_hex: cmd.secret_store_key_hex, + gcp_keyshare_secret_id: cmd.gcp_keyshare_secret_id, + gcp_project_id: cmd.gcp_project_id, + tee_authority: match cmd.tee_authority { + CliTeeAuthorityConfig::Local => TeeAuthorityStartConfig::Local, + CliTeeAuthorityConfig::Dstack { + dstack_endpoint, + quote_upload_url, + } => TeeAuthorityStartConfig::Dstack { + dstack_endpoint, + quote_upload_url: quote_upload_url.to_string(), + }, + }, + image_hash: cmd.image_hash_config.image_hash, + latest_allowed_hash_file: cmd.image_hash_config.latest_allowed_hash_file, + backup_encryption_key_hex: cmd.backup_encryption_key_hex, + } + } +} + +// --------------------------------------------------------------------------- +// Init subcommand +// --------------------------------------------------------------------------- + +#[derive(Args, Debug)] +pub struct InitConfigArgs { + #[arg(long, env("MPC_HOME_DIR"))] + pub dir: std::path::PathBuf, + /// chain/network id (localnet, testnet, devnet, betanet) + #[arg(long)] + pub chain_id: Option, + /// Genesis file to use when initialize testnet (including downloading) + #[arg(long)] + pub genesis: Option, + /// Download the verified NEAR config file automatically. + #[arg(long)] + pub download_config: bool, + #[arg(long)] + pub download_config_url: Option, + /// Download the verified NEAR genesis file automatically. + #[arg(long)] + pub download_genesis: bool, + /// Specify a custom download URL for the genesis-file. + #[arg(long)] + pub download_genesis_url: Option, + #[arg(long)] + pub download_genesis_records_url: Option, + #[arg(long)] + pub boot_nodes: Option, +} + +// --------------------------------------------------------------------------- +// Import/Export keyshare subcommands +// --------------------------------------------------------------------------- + #[derive(Args, Debug)] pub struct ImportKeyshareCmd { /// Path to home directory @@ -233,292 +230,17 @@ pub struct ExportKeyshareCmd { pub local_encryption_key_hex: String, } -impl StartCmd { - async fn run(self) -> anyhow::Result<()> { - let root_runtime = tokio::runtime::Builder::new_multi_thread() - .enable_all() - .worker_threads(1) - .build()?; - - let _tokio_enter_guard = root_runtime.enter(); - - // Load configuration and initialize persistent secrets - let home_dir = PathBuf::from(self.home_dir.clone()); - let config = load_config_file(&home_dir)?; - let persistent_secrets = PersistentSecrets::generate_or_get_existing( - &home_dir, - config.number_of_responder_keys, - )?; - - profiler::web_server::start_web_server(config.pprof_bind_address).await?; - root_runtime.spawn(crate::metrics::tokio_task_metrics::run_monitor_loop()); - - // TODO(#1296): Decide if the MPC responder account is actually needed - let respond_config = RespondConfig::from_parts(&config, &persistent_secrets); - - let backup_encryption_key_hex = match &self.backup_encryption_key_hex { - Some(key) => key.clone(), - None => generate_and_write_backup_encryption_key_to_disk(&home_dir)?, - }; - - // Load secrets from configuration and persistent storage - let secrets = SecretsConfig::from_parts( - &self.secret_store_key_hex, - persistent_secrets.clone(), - &backup_encryption_key_hex, - )?; - - // Generate attestation - let tee_authority = TeeAuthority::try_from(self.tee_authority.clone())?; - let tls_public_key = &secrets.persistent_secrets.p2p_private_key.verifying_key(); - - let account_public_key = &secrets.persistent_secrets.near_signer_key.verifying_key(); - - let report_data = ReportDataV1::new( - *Ed25519PublicKey::from(tls_public_key).as_bytes(), - *Ed25519PublicKey::from(account_public_key).as_bytes(), - ) - .into(); - - let attestation = tee_authority.generate_attestation(report_data).await?; - - // Create communication channels and runtime - let (debug_request_sender, _) = tokio::sync::broadcast::channel(10); - let root_task_handle = Arc::new(OnceLock::new()); - - let (protocol_state_sender, protocol_state_receiver) = - watch::channel(ProtocolContractState::NotInitialized); - - let (migration_state_sender, migration_state_receiver) = - watch::channel((0, BTreeMap::new())); - let web_server = root_runtime - .block_on(start_web_server( - root_task_handle.clone(), - debug_request_sender.clone(), - config.web_ui, - static_web_data(&secrets, Some(attestation)), - protocol_state_receiver, - migration_state_receiver, - )) - .context("Failed to create web server.")?; - - let _web_server_join_handle = root_runtime.spawn(web_server); - - // Create Indexer and wait for indexer to be synced. - let (indexer_exit_sender, indexer_exit_receiver) = oneshot::channel(); - let indexer_api = spawn_real_indexer( - home_dir.clone(), - config.indexer.clone(), - config.my_near_account_id.clone(), - persistent_secrets.near_signer_key.clone(), - respond_config, - indexer_exit_sender, - protocol_state_sender, - migration_state_sender, - *tls_public_key, - ); - - let (shutdown_signal_sender, mut shutdown_signal_receiver) = mpsc::channel(1); - let cancellation_token = CancellationToken::new(); - - let image_hash_watcher_handle = if let (Some(image_hash), Some(latest_allowed_hash_file)) = ( - &self.image_hash_config.image_hash, - &self.image_hash_config.latest_allowed_hash_file, - ) { - let current_image_hash_bytes: [u8; 32] = hex::decode(image_hash) - .expect("The currently running image is a hex string.") - .try_into() - .expect("The currently running image hash hex representation is 32 bytes."); - - let allowed_hashes_in_contract = indexer_api.allowed_docker_images_receiver.clone(); - let image_hash_storage = AllowedImageHashesFile::from(latest_allowed_hash_file.clone()); - - Some(root_runtime.spawn(monitor_allowed_image_hashes( - cancellation_token.child_token(), - MpcDockerImageHash::from(current_image_hash_bytes), - allowed_hashes_in_contract, - image_hash_storage, - shutdown_signal_sender.clone(), - ))) - } else { - tracing::info!( - "MPC_IMAGE_HASH and/or MPC_LATEST_ALLOWED_HASH_FILE not set, skipping TEE image hash monitoring" - ); - None - }; - - let root_future = self.create_root_future( - home_dir.clone(), - config.clone(), - secrets.clone(), - indexer_api, - debug_request_sender, - root_task_handle, - tee_authority, - ); - - let root_task = root_runtime.spawn(start_root_task("root", root_future).0); - - let exit_reason = tokio::select! { - root_task_result = root_task => { - root_task_result? - } - indexer_exit_response = indexer_exit_receiver => { - indexer_exit_response.context("Indexer thread dropped response channel.")? - } - Some(()) = shutdown_signal_receiver.recv() => { - Err(anyhow!("TEE allowed image hashes watcher is sending shutdown signal.")) - } - }; - - // Perform graceful shutdown - cancellation_token.cancel(); - - if let Some(handle) = image_hash_watcher_handle { - info!("Waiting for image hash watcher to gracefully exit."); - let exit_result = handle.await; - info!(?exit_result, "Image hash watcher exited."); - } - - exit_reason - } - - #[allow(clippy::too_many_arguments)] - async fn create_root_future( - self, - home_dir: PathBuf, - config: ConfigFile, - secrets: SecretsConfig, - indexer_api: IndexerAPI, - debug_request_sender: broadcast::Sender, - // Cloning a OnceLock returns a new cell, which is why we have to wrap it in an arc. - // Otherwise we would not write to the same cell/lock. - root_task_handle_once_lock: Arc>>, - tee_authority: TeeAuthority, - ) -> anyhow::Result<()> - where - TransactionSenderImpl: TransactionSender + 'static, - ForeignChainPolicyReader: ReadForeignChainPolicy + Clone + Send + Sync + 'static, - { - let root_task_handle = tracking::current_task(); - - root_task_handle_once_lock - .set(root_task_handle.clone()) - .map_err(|_| anyhow!("Root task handle was already set"))?; - - let tls_public_key = - Ed25519PublicKey::from(&secrets.persistent_secrets.p2p_private_key.verifying_key()); - let account_public_key = - Ed25519PublicKey::from(&secrets.persistent_secrets.near_signer_key.verifying_key()); - - let secret_db = SecretDB::new(&home_dir.join("assets"), secrets.local_storage_aes_key)?; - - let key_storage_config = KeyStorageConfig { - home_dir: home_dir.clone(), - local_encryption_key: secrets.local_storage_aes_key, - gcp: if let Some(secret_id) = self.gcp_keyshare_secret_id { - let project_id = self.gcp_project_id.ok_or_else(|| { - anyhow::anyhow!( - "GCP_PROJECT_ID must be specified to use GCP_KEYSHARE_SECRET_ID" - ) - })?; - Some(GcpPermanentKeyStorageConfig { - project_id, - secret_id, - }) - } else { - None - }, - }; - - // Spawn periodic attestation submission task - let tee_authority_clone = tee_authority.clone(); - let tx_sender_clone = indexer_api.txn_sender.clone(); - let tls_public_key_clone = tls_public_key.clone(); - let account_public_key_clone = account_public_key.clone(); - let allowed_docker_images_receiver_clone = - indexer_api.allowed_docker_images_receiver.clone(); - let allowed_launcher_compose_receiver_clone = - indexer_api.allowed_launcher_compose_receiver.clone(); - tokio::spawn(async move { - if let Err(e) = periodic_attestation_submission( - tee_authority_clone, - tx_sender_clone, - tls_public_key_clone, - account_public_key_clone, - allowed_docker_images_receiver_clone, - allowed_launcher_compose_receiver_clone, - tokio::time::interval(ATTESTATION_RESUBMISSION_INTERVAL), - ) - .await - { - tracing::error!( - error = ?e, - "periodic attestation submission task failed" - ); - } - }); - - // Spawn TEE attestation monitoring task - let tx_sender_clone = indexer_api.txn_sender.clone(); - let tee_accounts_receiver = indexer_api.attested_nodes_receiver.clone(); - let account_id_clone = config.my_near_account_id.clone(); - let allowed_docker_images_receiver_clone = - indexer_api.allowed_docker_images_receiver.clone(); - let allowed_launcher_compose_receiver_clone = - indexer_api.allowed_launcher_compose_receiver.clone(); - tokio::spawn(async move { - if let Err(e) = monitor_attestation_removal( - account_id_clone, - tee_authority, - tx_sender_clone, - tls_public_key, - account_public_key, - allowed_docker_images_receiver_clone, - allowed_launcher_compose_receiver_clone, - tee_accounts_receiver, - ) - .await - { - tracing::error!( - error = ?e, - "attestation removal monitoring task failed" - ); - } - }); - - let keyshare_storage: Arc> = - RwLock::new(key_storage_config.create().await?).into(); - - spawn_recovery_server_and_run_onboarding( - config.migration_web_ui, - (&secrets).into(), - config.my_near_account_id.clone(), - keyshare_storage.clone(), - indexer_api.my_migration_info_receiver.clone(), - indexer_api.contract_state_receiver.clone(), - indexer_api.txn_sender.clone(), - ) - .await?; - - let coordinator = Coordinator { - clock: Clock::real(), - config_file: config, - secrets, - secret_db, - keyshare_storage, - indexer: indexer_api, - currently_running_job_name: Arc::new(Mutex::new(String::new())), - debug_request_sender, - }; - coordinator.run().await - } -} +// --------------------------------------------------------------------------- +// Dispatch +// --------------------------------------------------------------------------- impl Cli { pub async fn run(self) -> anyhow::Result<()> { match self.command { - CliCommand::Start(start) => start.run().await, + CliCommand::StartWithConfigFile { config_path } => { + StartConfig::from_json_file(&config_path)?.run().await + } + CliCommand::Start(start) => StartConfig::from(start).run().await, CliCommand::Init(config) => { let (download_config_type, download_config_url) = if config.download_config { ( @@ -565,7 +287,7 @@ impl Cli { participants.len() == responders.len(), "Number of participants must match number of responders" ); - self.run_generate_test_configs( + run_generate_test_configs( output_dir, participants.clone(), responders.clone(), @@ -578,132 +300,12 @@ impl Cli { } } } - - fn duplicate_migrating_accounts( - mut accounts: Vec, - migrating_nodes: &[usize], - ) -> anyhow::Result> { - for migrating_node_idx in migrating_nodes { - let migrating_node_account: AccountId = accounts - .get(*migrating_node_idx) - .ok_or_else(|| { - anyhow::anyhow!("index {} out of bounds for accounts", migrating_node_idx) - })? - .clone(); - - accounts.push(migrating_node_account); - } - Ok(accounts) - } - - #[allow(clippy::too_many_arguments)] - fn run_generate_test_configs( - &self, - output_dir: &str, - participants: Vec, - responders: Vec, - threshold: usize, - desired_triples_to_buffer: usize, - desired_presignatures_to_buffer: usize, - desired_responder_keys_per_participant: usize, - migrating_nodes: &[usize], - ) -> anyhow::Result<()> { - let participants = Self::duplicate_migrating_accounts(participants, migrating_nodes)?; - let responders = Self::duplicate_migrating_accounts(responders, migrating_nodes)?; - - let p2p_key_pairs = participants - .iter() - .enumerate() - .map(|(idx, _account_id)| { - let subdir = PathBuf::from(output_dir).join(idx.to_string()); - PersistentSecrets::generate_or_get_existing( - &subdir, - desired_responder_keys_per_participant, - ) - .map(|secret| secret.p2p_private_key) - }) - .collect::, _>>()?; - let configs = generate_test_p2p_configs( - &participants, - threshold, - PortSeed::CLI_FOR_PYTEST, - Some(p2p_key_pairs), - )?; - let participants_config = configs[0].0.participants.clone(); - for (i, (_config, _p2p_private_key)) in configs.into_iter().enumerate() { - let subdir = format!("{}/{}", output_dir, i); - std::fs::create_dir_all(&subdir)?; - let file_config = self.create_file_config( - &participants[i], - &responders[i], - i, - desired_triples_to_buffer, - desired_presignatures_to_buffer, - )?; - std::fs::write( - format!("{}/config.yaml", subdir), - serde_yaml::to_string(&file_config)?, - )?; - } - std::fs::write( - format!("{}/participants.json", output_dir), - serde_json::to_string(&participants_config)?, - )?; - Ok(()) - } - - fn create_file_config( - &self, - participant: &AccountId, - responder: &AccountId, - index: usize, - desired_triples_to_buffer: usize, - desired_presignatures_to_buffer: usize, - ) -> anyhow::Result { - Ok(ConfigFile { - my_near_account_id: participant.clone(), - near_responder_account_id: responder.clone(), - number_of_responder_keys: 1, - web_ui: SocketAddr::new( - Ipv4Addr::LOCALHOST.into(), - PortSeed::CLI_FOR_PYTEST.web_port(index), - ), - migration_web_ui: SocketAddr::new( - Ipv4Addr::LOCALHOST.into(), - PortSeed::CLI_FOR_PYTEST.migration_web_port(index), - ), - pprof_bind_address: SocketAddr::new( - Ipv4Addr::LOCALHOST.into(), - PortSeed::CLI_FOR_PYTEST.pprof_web_port(index), - ), - indexer: IndexerConfig { - validate_genesis: true, - sync_mode: SyncMode::Block(BlockArgs { height: 0 }), - concurrency: 1.try_into().unwrap(), - mpc_contract_id: "test0".parse().unwrap(), - finality: Finality::None, - port_override: None, - }, - triple: TripleConfig { - concurrency: 2, - desired_triples_to_buffer, - timeout_sec: 60, - parallel_triple_generation_stagger_time_sec: 1, - }, - presignature: PresignatureConfig { - concurrency: 2, - desired_presignatures_to_buffer, - timeout_sec: 60, - }, - signature: SignatureConfig { timeout_sec: 60 }, - ckd: CKDConfig { timeout_sec: 60 }, - keygen: KeygenConfig { timeout_sec: 60 }, - foreign_chains: ForeignChainsConfig::default(), - cores: Some(4), - }) - } } +// --------------------------------------------------------------------------- +// Import/Export keyshare implementations +// --------------------------------------------------------------------------- + impl ImportKeyshareCmd { pub async fn run(&self) -> anyhow::Result<()> { let runtime = tokio::runtime::Runtime::new()?; @@ -801,6 +403,136 @@ impl ExportKeyshareCmd { } } +// --------------------------------------------------------------------------- +// Test config generation +// --------------------------------------------------------------------------- + +fn duplicate_migrating_accounts( + mut accounts: Vec, + migrating_nodes: &[usize], +) -> anyhow::Result> { + for migrating_node_idx in migrating_nodes { + let migrating_node_account: AccountId = accounts + .get(*migrating_node_idx) + .ok_or_else(|| { + anyhow::anyhow!("index {} out of bounds for accounts", migrating_node_idx) + })? + .clone(); + + accounts.push(migrating_node_account); + } + Ok(accounts) +} + +#[allow(clippy::too_many_arguments)] +fn run_generate_test_configs( + output_dir: &str, + participants: Vec, + responders: Vec, + threshold: usize, + desired_triples_to_buffer: usize, + desired_presignatures_to_buffer: usize, + desired_responder_keys_per_participant: usize, + migrating_nodes: &[usize], +) -> anyhow::Result<()> { + let participants = duplicate_migrating_accounts(participants, migrating_nodes)?; + let responders = duplicate_migrating_accounts(responders, migrating_nodes)?; + + let p2p_key_pairs = participants + .iter() + .enumerate() + .map(|(idx, _account_id)| { + let subdir = PathBuf::from(output_dir).join(idx.to_string()); + PersistentSecrets::generate_or_get_existing( + &subdir, + desired_responder_keys_per_participant, + ) + .map(|secret| secret.p2p_private_key) + }) + .collect::, _>>()?; + let configs = generate_test_p2p_configs( + &participants, + threshold, + PortSeed::CLI_FOR_PYTEST, + Some(p2p_key_pairs), + )?; + let participants_config = configs[0].0.participants.clone(); + for (i, (_config, _p2p_private_key)) in configs.into_iter().enumerate() { + let subdir = format!("{}/{}", output_dir, i); + std::fs::create_dir_all(&subdir)?; + let file_config = create_file_config( + &participants[i], + &responders[i], + i, + desired_triples_to_buffer, + desired_presignatures_to_buffer, + ); + std::fs::write( + format!("{}/config.yaml", subdir), + serde_yaml::to_string(&file_config)?, + )?; + } + std::fs::write( + format!("{}/participants.json", output_dir), + serde_json::to_string(&participants_config)?, + )?; + Ok(()) +} + +fn create_file_config( + participant: &AccountId, + responder: &AccountId, + index: usize, + desired_triples_to_buffer: usize, + desired_presignatures_to_buffer: usize, +) -> ConfigFile { + ConfigFile { + my_near_account_id: participant.clone(), + near_responder_account_id: responder.clone(), + number_of_responder_keys: 1, + web_ui: SocketAddr::new( + Ipv4Addr::LOCALHOST.into(), + PortSeed::CLI_FOR_PYTEST.web_port(index), + ), + migration_web_ui: SocketAddr::new( + Ipv4Addr::LOCALHOST.into(), + PortSeed::CLI_FOR_PYTEST.migration_web_port(index), + ), + pprof_bind_address: SocketAddr::new( + Ipv4Addr::LOCALHOST.into(), + PortSeed::CLI_FOR_PYTEST.pprof_web_port(index), + ), + indexer: IndexerConfig { + validate_genesis: true, + sync_mode: SyncMode::Block(BlockArgs { height: 0 }), + concurrency: 1.try_into().unwrap(), + mpc_contract_id: "test0".parse().unwrap(), + finality: Finality::None, + port_override: None, + }, + triple: TripleConfig { + concurrency: 2, + desired_triples_to_buffer, + timeout_sec: 60, + parallel_triple_generation_stagger_time_sec: 1, + }, + presignature: PresignatureConfig { + concurrency: 2, + desired_presignatures_to_buffer, + timeout_sec: 60, + }, + signature: SignatureConfig { timeout_sec: 60 }, + ckd: CKDConfig { timeout_sec: 60 }, + keygen: KeygenConfig { timeout_sec: 60 }, + foreign_chains: ForeignChainsConfig::default(), + cores: Some(4), + } +} + +// --------------------------------------------------------------------------- +// Tests +// --------------------------------------------------------------------------- + #[cfg(test)] mod tests { use super::*; diff --git a/crates/node/src/config.rs b/crates/node/src/config.rs index fdabd98bf..9969e1464 100644 --- a/crates/node/src/config.rs +++ b/crates/node/src/config.rs @@ -13,6 +13,9 @@ use std::{ path::Path, }; +mod start; +pub use start::{StartConfig, TeeAuthorityStartConfig}; + mod foreign_chains; pub use foreign_chains::{ AbstractApiVariant, AbstractChainConfig, AbstractProviderConfig, AuthConfig, BitcoinApiVariant, diff --git a/crates/node/src/config/start.rs b/crates/node/src/config/start.rs new file mode 100644 index 000000000..29a6b3a9f --- /dev/null +++ b/crates/node/src/config/start.rs @@ -0,0 +1,80 @@ +use anyhow::Context; +use serde::{Deserialize, Serialize}; +use std::path::PathBuf; +use tee_authority::tee_authority::{ + DstackTeeAuthorityConfig, LocalTeeAuthorityConfig, TeeAuthority, DEFAULT_DSTACK_ENDPOINT, + DEFAULT_PHALA_TDX_QUOTE_UPLOAD_URL, +}; +use url::Url; + +/// Configuration for starting the MPC node. This is the canonical type used +/// by the run logic. Both `StartCmd` (CLI flags) and `StartWithConfigFileCmd` +/// (JSON file) convert into this type. +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct StartConfig { + pub home_dir: String, + /// Hex-encoded 16 byte AES key for local storage encryption. + pub secret_store_key_hex: String, + /// If provided, the root keyshare is stored on GCP. + #[serde(default)] + pub gcp_keyshare_secret_id: Option, + #[serde(default)] + pub gcp_project_id: Option, + /// TEE authority configuration. + pub tee_authority: TeeAuthorityStartConfig, + /// Hex representation of the hash of the running image. Only required in TEE. + #[serde(default)] + pub image_hash: Option, + /// Path to the file where the node writes the latest allowed hash. + /// If not set, assumes running outside of TEE and skips image hash monitoring. + #[serde(default)] + pub latest_allowed_hash_file: Option, + /// Hex-encoded 32 byte AES key for backup encryption. + #[serde(default)] + pub backup_encryption_key_hex: Option, +} + +/// TEE authority configuration for JSON deserialization. +#[derive(Debug, Clone, Serialize, Deserialize)] +#[serde(tag = "type", rename_all = "snake_case")] +pub enum TeeAuthorityStartConfig { + Local, + Dstack { + #[serde(default = "default_dstack_endpoint")] + dstack_endpoint: String, + #[serde(default = "default_quote_upload_url")] + quote_upload_url: String, + }, +} + +fn default_dstack_endpoint() -> String { + DEFAULT_DSTACK_ENDPOINT.to_string() +} + +fn default_quote_upload_url() -> String { + DEFAULT_PHALA_TDX_QUOTE_UPLOAD_URL.to_string() +} + +impl TeeAuthorityStartConfig { + pub fn into_tee_authority(self) -> anyhow::Result { + Ok(match self { + TeeAuthorityStartConfig::Local => LocalTeeAuthorityConfig::default().into(), + TeeAuthorityStartConfig::Dstack { + dstack_endpoint, + quote_upload_url, + } => { + let url: Url = quote_upload_url.parse().context("invalid quote_upload_url")?; + DstackTeeAuthorityConfig::new(dstack_endpoint, url).into() + } + }) + } +} + +impl StartConfig { + pub fn from_json_file(path: &std::path::Path) -> anyhow::Result { + let content = std::fs::read_to_string(path) + .with_context(|| format!("failed to read config file: {}", path.display()))?; + serde_json::from_str(&content) + .with_context(|| format!("failed to parse config file: {}", path.display())) + } +} diff --git a/crates/node/src/lib.rs b/crates/node/src/lib.rs index 71778f6fe..dffd498fe 100644 --- a/crates/node/src/lib.rs +++ b/crates/node/src/lib.rs @@ -39,6 +39,7 @@ mod protocol; mod protocol_version; mod providers; pub mod requests; +mod run; mod runtime; mod storage; pub mod tracing; diff --git a/crates/node/src/run.rs b/crates/node/src/run.rs new file mode 100644 index 000000000..127c749ef --- /dev/null +++ b/crates/node/src/run.rs @@ -0,0 +1,321 @@ +use crate::{ + config::{ + generate_and_write_backup_encryption_key_to_disk, load_config_file, ConfigFile, + PersistentSecrets, RespondConfig, SecretsConfig, StartConfig, + }, + coordinator::Coordinator, + db::SecretDB, + indexer::{ + real::spawn_real_indexer, tx_sender::TransactionSender, IndexerAPI, ReadForeignChainPolicy, + }, + keyshare::{GcpPermanentKeyStorageConfig, KeyStorageConfig, KeyshareStorage}, + migration_service::spawn_recovery_server_and_run_onboarding, + profiler, + tracking::{self, start_root_task}, + web::{start_web_server, static_web_data, DebugRequest}, +}; +use anyhow::{anyhow, Context}; +use contract_interface::types::Ed25519PublicKey; +use mpc_attestation::report_data::ReportDataV1; +use mpc_contract::state::ProtocolContractState; +use mpc_contract::tee::proposal::MpcDockerImageHash; +use near_time::Clock; +use std::{ + collections::BTreeMap, + path::PathBuf, + sync::{Arc, Mutex, OnceLock}, + time::Duration, +}; +use tee_authority::tee_authority::TeeAuthority; +use tokio::sync::{broadcast, mpsc, oneshot, watch, RwLock}; +use tokio_util::sync::CancellationToken; +use tracing::info; + +use crate::tee::{ + monitor_allowed_image_hashes, + remote_attestation::{monitor_attestation_removal, periodic_attestation_submission}, + AllowedImageHashesFile, +}; + +pub const ATTESTATION_RESUBMISSION_INTERVAL: Duration = Duration::from_secs(60 * 60); // 1 hour + +impl StartConfig { + pub async fn run(self) -> anyhow::Result<()> { + let root_runtime = tokio::runtime::Builder::new_multi_thread() + .enable_all() + .worker_threads(1) + .build()?; + + let _tokio_enter_guard = root_runtime.enter(); + + // Load configuration and initialize persistent secrets + let home_dir = PathBuf::from(self.home_dir.clone()); + let config = load_config_file(&home_dir)?; + let persistent_secrets = PersistentSecrets::generate_or_get_existing( + &home_dir, + config.number_of_responder_keys, + )?; + + profiler::web_server::start_web_server(config.pprof_bind_address).await?; + root_runtime.spawn(crate::metrics::tokio_task_metrics::run_monitor_loop()); + + // TODO(#1296): Decide if the MPC responder account is actually needed + let respond_config = RespondConfig::from_parts(&config, &persistent_secrets); + + let backup_encryption_key_hex = match &self.backup_encryption_key_hex { + Some(key) => key.clone(), + None => generate_and_write_backup_encryption_key_to_disk(&home_dir)?, + }; + + // Load secrets from configuration and persistent storage + let secrets = SecretsConfig::from_parts( + &self.secret_store_key_hex, + persistent_secrets.clone(), + &backup_encryption_key_hex, + )?; + + // Generate attestation + let tee_authority = self.tee_authority.clone().into_tee_authority()?; + let tls_public_key = &secrets.persistent_secrets.p2p_private_key.verifying_key(); + + let account_public_key = &secrets.persistent_secrets.near_signer_key.verifying_key(); + + let report_data = ReportDataV1::new( + *Ed25519PublicKey::from(tls_public_key).as_bytes(), + *Ed25519PublicKey::from(account_public_key).as_bytes(), + ) + .into(); + + let attestation = tee_authority.generate_attestation(report_data).await?; + + // Create communication channels and runtime + let (debug_request_sender, _) = tokio::sync::broadcast::channel(10); + let root_task_handle = Arc::new(OnceLock::new()); + + let (protocol_state_sender, protocol_state_receiver) = + watch::channel(ProtocolContractState::NotInitialized); + + let (migration_state_sender, migration_state_receiver) = + watch::channel((0, BTreeMap::new())); + let web_server = root_runtime + .block_on(start_web_server( + root_task_handle.clone(), + debug_request_sender.clone(), + config.web_ui, + static_web_data(&secrets, Some(attestation)), + protocol_state_receiver, + migration_state_receiver, + )) + .context("Failed to create web server.")?; + + let _web_server_join_handle = root_runtime.spawn(web_server); + + // Create Indexer and wait for indexer to be synced. + let (indexer_exit_sender, indexer_exit_receiver) = oneshot::channel(); + let indexer_api = spawn_real_indexer( + home_dir.clone(), + config.indexer.clone(), + config.my_near_account_id.clone(), + persistent_secrets.near_signer_key.clone(), + respond_config, + indexer_exit_sender, + protocol_state_sender, + migration_state_sender, + *tls_public_key, + ); + + let (shutdown_signal_sender, mut shutdown_signal_receiver) = mpsc::channel(1); + let cancellation_token = CancellationToken::new(); + + let image_hash_watcher_handle = + if let (Some(image_hash), Some(latest_allowed_hash_file)) = + (&self.image_hash, &self.latest_allowed_hash_file) + { + let current_image_hash_bytes: [u8; 32] = hex::decode(image_hash) + .expect("The currently running image is a hex string.") + .try_into() + .expect("The currently running image hash hex representation is 32 bytes."); + + let allowed_hashes_in_contract = + indexer_api.allowed_docker_images_receiver.clone(); + let image_hash_storage = + AllowedImageHashesFile::from(latest_allowed_hash_file.clone()); + + Some(root_runtime.spawn(monitor_allowed_image_hashes( + cancellation_token.child_token(), + MpcDockerImageHash::from(current_image_hash_bytes), + allowed_hashes_in_contract, + image_hash_storage, + shutdown_signal_sender.clone(), + ))) + } else { + tracing::info!( + "MPC_IMAGE_HASH and/or MPC_LATEST_ALLOWED_HASH_FILE not set, skipping TEE image hash monitoring" + ); + None + }; + + let root_future = create_root_future( + self, + home_dir.clone(), + config.clone(), + secrets.clone(), + indexer_api, + debug_request_sender, + root_task_handle, + tee_authority, + ); + + let root_task = root_runtime.spawn(start_root_task("root", root_future).0); + + let exit_reason = tokio::select! { + root_task_result = root_task => { + root_task_result? + } + indexer_exit_response = indexer_exit_receiver => { + indexer_exit_response.context("Indexer thread dropped response channel.")? + } + Some(()) = shutdown_signal_receiver.recv() => { + Err(anyhow!("TEE allowed image hashes watcher is sending shutdown signal.")) + } + }; + + // Perform graceful shutdown + cancellation_token.cancel(); + + if let Some(handle) = image_hash_watcher_handle { + info!("Waiting for image hash watcher to gracefully exit."); + let exit_result = handle.await; + info!(?exit_result, "Image hash watcher exited."); + } + + exit_reason + } +} + +#[allow(clippy::too_many_arguments)] +async fn create_root_future( + start_config: StartConfig, + home_dir: PathBuf, + config: ConfigFile, + secrets: SecretsConfig, + indexer_api: IndexerAPI, + debug_request_sender: broadcast::Sender, + // Cloning a OnceLock returns a new cell, which is why we have to wrap it in an arc. + // Otherwise we would not write to the same cell/lock. + root_task_handle_once_lock: Arc>>, + tee_authority: TeeAuthority, +) -> anyhow::Result<()> +where + TransactionSenderImpl: TransactionSender + 'static, + ForeignChainPolicyReader: ReadForeignChainPolicy + Clone + Send + Sync + 'static, +{ + let root_task_handle = tracking::current_task(); + + root_task_handle_once_lock + .set(root_task_handle.clone()) + .map_err(|_| anyhow!("Root task handle was already set"))?; + + let tls_public_key = + Ed25519PublicKey::from(&secrets.persistent_secrets.p2p_private_key.verifying_key()); + let account_public_key = + Ed25519PublicKey::from(&secrets.persistent_secrets.near_signer_key.verifying_key()); + + let secret_db = SecretDB::new(&home_dir.join("assets"), secrets.local_storage_aes_key)?; + + let key_storage_config = KeyStorageConfig { + home_dir: home_dir.clone(), + local_encryption_key: secrets.local_storage_aes_key, + gcp: if let Some(secret_id) = start_config.gcp_keyshare_secret_id { + let project_id = start_config.gcp_project_id.ok_or_else(|| { + anyhow::anyhow!("GCP_PROJECT_ID must be specified to use GCP_KEYSHARE_SECRET_ID") + })?; + Some(GcpPermanentKeyStorageConfig { + project_id, + secret_id, + }) + } else { + None + }, + }; + + // Spawn periodic attestation submission task + let tee_authority_clone = tee_authority.clone(); + let tx_sender_clone = indexer_api.txn_sender.clone(); + let tls_public_key_clone = tls_public_key.clone(); + let account_public_key_clone = account_public_key.clone(); + let allowed_docker_images_receiver_clone = indexer_api.allowed_docker_images_receiver.clone(); + let allowed_launcher_compose_receiver_clone = + indexer_api.allowed_launcher_compose_receiver.clone(); + tokio::spawn(async move { + if let Err(e) = periodic_attestation_submission( + tee_authority_clone, + tx_sender_clone, + tls_public_key_clone, + account_public_key_clone, + allowed_docker_images_receiver_clone, + allowed_launcher_compose_receiver_clone, + tokio::time::interval(ATTESTATION_RESUBMISSION_INTERVAL), + ) + .await + { + tracing::error!( + error = ?e, + "periodic attestation submission task failed" + ); + } + }); + + // Spawn TEE attestation monitoring task + let tx_sender_clone = indexer_api.txn_sender.clone(); + let tee_accounts_receiver = indexer_api.attested_nodes_receiver.clone(); + let account_id_clone = config.my_near_account_id.clone(); + let allowed_docker_images_receiver_clone = indexer_api.allowed_docker_images_receiver.clone(); + let allowed_launcher_compose_receiver_clone = + indexer_api.allowed_launcher_compose_receiver.clone(); + tokio::spawn(async move { + if let Err(e) = monitor_attestation_removal( + account_id_clone, + tee_authority, + tx_sender_clone, + tls_public_key, + account_public_key, + allowed_docker_images_receiver_clone, + allowed_launcher_compose_receiver_clone, + tee_accounts_receiver, + ) + .await + { + tracing::error!( + error = ?e, + "attestation removal monitoring task failed" + ); + } + }); + + let keyshare_storage: Arc> = + RwLock::new(key_storage_config.create().await?).into(); + + spawn_recovery_server_and_run_onboarding( + config.migration_web_ui, + (&secrets).into(), + config.my_near_account_id.clone(), + keyshare_storage.clone(), + indexer_api.my_migration_info_receiver.clone(), + indexer_api.contract_state_receiver.clone(), + indexer_api.txn_sender.clone(), + ) + .await?; + + let coordinator = Coordinator { + clock: Clock::real(), + config_file: config, + secrets, + secret_db, + keyshare_storage, + indexer: indexer_api, + currently_running_job_name: Arc::new(Mutex::new(String::new())), + debug_request_sender, + }; + coordinator.run().await +} diff --git a/libs/nearcore b/libs/nearcore index 8a8c21bc8..3def2f7eb 160000 --- a/libs/nearcore +++ b/libs/nearcore @@ -1 +1 @@ -Subproject commit 8a8c21bc81999af93edd1b6bca5b7c6c6337aa63 +Subproject commit 3def2f7ebb7455199e7b3f7b371e3735c23e2930 From af78d2c518e5f01c1f346081b08856d688937eac Mon Sep 17 00:00:00 2001 From: Daniel Sharifi Date: Fri, 6 Mar 2026 14:53:35 +0100 Subject: [PATCH 44/82] testing manually with localnet works --- crates/node/src/cli.rs | 58 ++-- crates/node/src/config.rs | 4 +- crates/node/src/config/foreign_chains/auth.rs | 4 + crates/node/src/config/start.rs | 50 +++- crates/node/src/run.rs | 31 +- docs/localnet/localnet.md | 276 ++++++++++-------- 6 files changed, 255 insertions(+), 168 deletions(-) diff --git a/crates/node/src/cli.rs b/crates/node/src/cli.rs index b4ca08405..a21dc1f01 100644 --- a/crates/node/src/cli.rs +++ b/crates/node/src/cli.rs @@ -1,8 +1,9 @@ use crate::{ config::{ - BlockArgs, CKDConfig, ConfigFile, ForeignChainsConfig, IndexerConfig, KeygenConfig, - PersistentSecrets, PresignatureConfig, SignatureConfig, StartConfig, SyncMode, - TeeAuthorityStartConfig, TripleConfig, + load_config_file, BlockArgs, CKDConfig, ConfigFile, ForeignChainsConfig, GcpStartConfig, + IndexerConfig, KeygenConfig, PersistentSecrets, PresignatureConfig, SecretsStartConfig, + SignatureConfig, StartConfig, SyncMode, TeeAuthorityStartConfig, TeeStartConfig, + TripleConfig, }, keyshare::{ compat::legacy_ecdsa_key_from_keyshares, @@ -143,26 +144,37 @@ pub struct CliImageHashConfig { pub latest_allowed_hash_file: Option, } -impl From for StartConfig { - fn from(cmd: StartCmd) -> Self { +impl StartCmd { + fn into_start_config(self, config: ConfigFile) -> StartConfig { + let gcp = match (self.gcp_keyshare_secret_id, self.gcp_project_id) { + (Some(keyshare_secret_id), Some(project_id)) => Some(GcpStartConfig { + keyshare_secret_id, + project_id, + }), + _ => None, + }; StartConfig { - home_dir: cmd.home_dir, - secret_store_key_hex: cmd.secret_store_key_hex, - gcp_keyshare_secret_id: cmd.gcp_keyshare_secret_id, - gcp_project_id: cmd.gcp_project_id, - tee_authority: match cmd.tee_authority { - CliTeeAuthorityConfig::Local => TeeAuthorityStartConfig::Local, - CliTeeAuthorityConfig::Dstack { - dstack_endpoint, - quote_upload_url, - } => TeeAuthorityStartConfig::Dstack { - dstack_endpoint, - quote_upload_url: quote_upload_url.to_string(), + home_dir: self.home_dir, + secrets: SecretsStartConfig { + secret_store_key_hex: self.secret_store_key_hex, + backup_encryption_key_hex: self.backup_encryption_key_hex, + }, + tee: TeeStartConfig { + authority: match self.tee_authority { + CliTeeAuthorityConfig::Local => TeeAuthorityStartConfig::Local, + CliTeeAuthorityConfig::Dstack { + dstack_endpoint, + quote_upload_url, + } => TeeAuthorityStartConfig::Dstack { + dstack_endpoint, + quote_upload_url: quote_upload_url.to_string(), + }, }, + image_hash: self.image_hash_config.image_hash, + latest_allowed_hash_file: self.image_hash_config.latest_allowed_hash_file, }, - image_hash: cmd.image_hash_config.image_hash, - latest_allowed_hash_file: cmd.image_hash_config.latest_allowed_hash_file, - backup_encryption_key_hex: cmd.backup_encryption_key_hex, + gcp, + node: config, } } } @@ -240,7 +252,11 @@ impl Cli { CliCommand::StartWithConfigFile { config_path } => { StartConfig::from_json_file(&config_path)?.run().await } - CliCommand::Start(start) => StartConfig::from(start).run().await, + CliCommand::Start(start) => { + let home_dir = std::path::Path::new(&start.home_dir); + let config_file = load_config_file(home_dir)?; + start.into_start_config(config_file).run().await + } CliCommand::Init(config) => { let (download_config_type, download_config_url) = if config.download_config { ( diff --git a/crates/node/src/config.rs b/crates/node/src/config.rs index 9969e1464..f48c9f922 100644 --- a/crates/node/src/config.rs +++ b/crates/node/src/config.rs @@ -14,7 +14,9 @@ use std::{ }; mod start; -pub use start::{StartConfig, TeeAuthorityStartConfig}; +pub use start::{ + GcpStartConfig, SecretsStartConfig, StartConfig, TeeAuthorityStartConfig, TeeStartConfig, +}; mod foreign_chains; pub use foreign_chains::{ diff --git a/crates/node/src/config/foreign_chains/auth.rs b/crates/node/src/config/foreign_chains/auth.rs index 5269b3130..48d916acd 100644 --- a/crates/node/src/config/foreign_chains/auth.rs +++ b/crates/node/src/config/foreign_chains/auth.rs @@ -52,6 +52,10 @@ pub enum TokenConfig { impl TokenConfig { pub fn resolve(&self) -> anyhow::Result { match self { + // TODO: do not resolve env variables this deep in the binary. + // Should be resolved at start, preferably in the config so we can kill env configs + // + // One option is to have a separate secrets config file. TokenConfig::Env { env } => { std::env::var(env).with_context(|| format!("environment variable {env} is not set")) } diff --git a/crates/node/src/config/start.rs b/crates/node/src/config/start.rs index 29a6b3a9f..c5ed3d802 100644 --- a/crates/node/src/config/start.rs +++ b/crates/node/src/config/start.rs @@ -1,3 +1,4 @@ +use super::ConfigFile; use anyhow::Context; use serde::{Deserialize, Serialize}; use std::path::PathBuf; @@ -13,15 +14,33 @@ use url::Url; #[derive(Debug, Clone, Serialize, Deserialize)] pub struct StartConfig { pub home_dir: String, + /// Encryption keys and backup settings. + pub secrets: SecretsStartConfig, + /// TEE authority and image hash monitoring settings. + pub tee: TeeStartConfig, + /// GCP keyshare storage settings. Optional — omit if not using GCP. + #[serde(default)] + pub gcp: Option, + /// Node configuration (indexer, protocol parameters, etc.). + pub node: ConfigFile, +} + +/// Encryption keys needed at startup. +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct SecretsStartConfig { /// Hex-encoded 16 byte AES key for local storage encryption. pub secret_store_key_hex: String, - /// If provided, the root keyshare is stored on GCP. - #[serde(default)] - pub gcp_keyshare_secret_id: Option, + /// Hex-encoded 32 byte AES key for backup encryption. + /// If not provided, a key is generated and written to disk. #[serde(default)] - pub gcp_project_id: Option, + pub backup_encryption_key_hex: Option, +} + +/// TEE-related configuration. +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct TeeStartConfig { /// TEE authority configuration. - pub tee_authority: TeeAuthorityStartConfig, + pub authority: TeeAuthorityStartConfig, /// Hex representation of the hash of the running image. Only required in TEE. #[serde(default)] pub image_hash: Option, @@ -29,9 +48,15 @@ pub struct StartConfig { /// If not set, assumes running outside of TEE and skips image hash monitoring. #[serde(default)] pub latest_allowed_hash_file: Option, - /// Hex-encoded 32 byte AES key for backup encryption. - #[serde(default)] - pub backup_encryption_key_hex: Option, +} + +/// GCP keyshare storage configuration. +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct GcpStartConfig { + /// GCP secret ID for storing the root keyshare. + pub keyshare_secret_id: String, + /// GCP project ID. + pub project_id: String, } /// TEE authority configuration for JSON deserialization. @@ -74,7 +99,12 @@ impl StartConfig { pub fn from_json_file(path: &std::path::Path) -> anyhow::Result { let content = std::fs::read_to_string(path) .with_context(|| format!("failed to read config file: {}", path.display()))?; - serde_json::from_str(&content) - .with_context(|| format!("failed to parse config file: {}", path.display())) + let config: Self = serde_json::from_str(&content) + .with_context(|| format!("failed to parse config file: {}", path.display()))?; + config + .node + .validate() + .context("invalid node config in config file")?; + Ok(config) } } diff --git a/crates/node/src/run.rs b/crates/node/src/run.rs index 127c749ef..263b30a56 100644 --- a/crates/node/src/run.rs +++ b/crates/node/src/run.rs @@ -1,7 +1,7 @@ use crate::{ config::{ - generate_and_write_backup_encryption_key_to_disk, load_config_file, ConfigFile, - PersistentSecrets, RespondConfig, SecretsConfig, StartConfig, + generate_and_write_backup_encryption_key_to_disk, ConfigFile, PersistentSecrets, + RespondConfig, SecretsConfig, StartConfig, }, coordinator::Coordinator, db::SecretDB, @@ -50,7 +50,7 @@ impl StartConfig { // Load configuration and initialize persistent secrets let home_dir = PathBuf::from(self.home_dir.clone()); - let config = load_config_file(&home_dir)?; + let config = self.node.clone(); let persistent_secrets = PersistentSecrets::generate_or_get_existing( &home_dir, config.number_of_responder_keys, @@ -62,20 +62,20 @@ impl StartConfig { // TODO(#1296): Decide if the MPC responder account is actually needed let respond_config = RespondConfig::from_parts(&config, &persistent_secrets); - let backup_encryption_key_hex = match &self.backup_encryption_key_hex { + let backup_encryption_key_hex = match &self.secrets.backup_encryption_key_hex { Some(key) => key.clone(), None => generate_and_write_backup_encryption_key_to_disk(&home_dir)?, }; // Load secrets from configuration and persistent storage let secrets = SecretsConfig::from_parts( - &self.secret_store_key_hex, + &self.secrets.secret_store_key_hex, persistent_secrets.clone(), &backup_encryption_key_hex, )?; // Generate attestation - let tee_authority = self.tee_authority.clone().into_tee_authority()?; + let tee_authority = self.tee.authority.clone().into_tee_authority()?; let tls_public_key = &secrets.persistent_secrets.p2p_private_key.verifying_key(); let account_public_key = &secrets.persistent_secrets.near_signer_key.verifying_key(); @@ -129,7 +129,7 @@ impl StartConfig { let image_hash_watcher_handle = if let (Some(image_hash), Some(latest_allowed_hash_file)) = - (&self.image_hash, &self.latest_allowed_hash_file) + (&self.tee.image_hash, &self.tee.latest_allowed_hash_file) { let current_image_hash_bytes: [u8; 32] = hex::decode(image_hash) .expect("The currently running image is a hex string.") @@ -150,7 +150,7 @@ impl StartConfig { ))) } else { tracing::info!( - "MPC_IMAGE_HASH and/or MPC_LATEST_ALLOWED_HASH_FILE not set, skipping TEE image hash monitoring" + "image_hash and/or latest_allowed_hash_file not set, skipping TEE image hash monitoring" ); None }; @@ -226,17 +226,10 @@ where let key_storage_config = KeyStorageConfig { home_dir: home_dir.clone(), local_encryption_key: secrets.local_storage_aes_key, - gcp: if let Some(secret_id) = start_config.gcp_keyshare_secret_id { - let project_id = start_config.gcp_project_id.ok_or_else(|| { - anyhow::anyhow!("GCP_PROJECT_ID must be specified to use GCP_KEYSHARE_SECRET_ID") - })?; - Some(GcpPermanentKeyStorageConfig { - project_id, - secret_id, - }) - } else { - None - }, + gcp: start_config.gcp.map(|gcp| GcpPermanentKeyStorageConfig { + project_id: gcp.project_id, + secret_id: gcp.keyshare_secret_id, + }), }; // Spawn periodic attestation submission task diff --git a/docs/localnet/localnet.md b/docs/localnet/localnet.md index 24dc69ce8..fd55b62e4 100644 --- a/docs/localnet/localnet.md +++ b/docs/localnet/localnet.md @@ -191,64 +191,86 @@ Since this is not a validator node, we can remove `validator_key.json` rm ~/.near/mpc-frodo/validator_key.json ``` -Next we'll create a `config.yaml` for the MPC-indexer: - -```shell -cat > ~/.near/mpc-frodo/config.yaml << 'EOF' -my_near_account_id: frodo.test.near -near_responder_account_id: frodo.test.near -number_of_responder_keys: 1 -web_ui: 127.0.0.1:8081 -migration_web_ui: 127.0.0.1:8079 -pprof_bind_address: 127.0.0.1:34001 -triple: - concurrency: 2 - desired_triples_to_buffer: 128 - timeout_sec: 60 - parallel_triple_generation_stagger_time_sec: 1 -presignature: - concurrency: 4 - desired_presignatures_to_buffer: 64 - timeout_sec: 60 -signature: - timeout_sec: 60 -indexer: - validate_genesis: false - sync_mode: Latest - concurrency: 1 - mpc_contract_id: mpc-contract.test.near - finality: optimistic -ckd: - timeout_sec: 60 -cores: 4 -foreign_chains: - bitcoin: - timeout_sec: 30 - max_retries: 3 - providers: - public: - api_variant: esplora - rpc_url: "https://bitcoin-rpc.publicnode.com" - auth: - kind: none - abstract: - timeout_sec: 30 - max_retries: 3 - providers: - public: - api_variant: standard - rpc_url: "https://api.testnet.abs.xyz" - auth: - kind: none - starknet: - timeout_sec: 30 - max_retries: 3 - providers: - public: - api_variant: standard - rpc_url: "https://starknet-rpc.publicnode.com" - auth: - kind: none +Next we'll create a JSON configuration file for Frodo's MPC node. This single file +contains all settings (secrets, TEE config, and node parameters): + +```shell +cat > ~/.near/mpc-frodo/mpc-config.json << EOF +{ + "home_dir": "$HOME/.near/mpc-frodo", + "secrets": { + "secret_store_key_hex": "11111111111111111111111111111111" + }, + "tee": { + "authority": { "type": "local" }, + "image_hash": "8b40f81f77b8c22d6c777a6e14d307a1d11cb55ab83541fbb8575d02d86a74b0", + "latest_allowed_hash_file": "/tmp/LATEST_ALLOWED_HASH_FILE.txt" + }, + "node": { + "my_near_account_id": "frodo.test.near", + "near_responder_account_id": "frodo.test.near", + "number_of_responder_keys": 1, + "web_ui": "127.0.0.1:8081", + "migration_web_ui": "127.0.0.1:8079", + "pprof_bind_address": "127.0.0.1:34001", + "triple": { + "concurrency": 2, + "desired_triples_to_buffer": 128, + "timeout_sec": 60, + "parallel_triple_generation_stagger_time_sec": 1 + }, + "presignature": { + "concurrency": 4, + "desired_presignatures_to_buffer": 64, + "timeout_sec": 60 + }, + "signature": { "timeout_sec": 60 }, + "indexer": { + "validate_genesis": false, + "sync_mode": "Latest", + "concurrency": 1, + "mpc_contract_id": "mpc-contract.test.near", + "finality": "optimistic" + }, + "ckd": { "timeout_sec": 60 }, + "cores": 4, + "foreign_chains": { + "bitcoin": { + "timeout_sec": 30, + "max_retries": 3, + "providers": { + "public": { + "api_variant": "esplora", + "rpc_url": "https://bitcoin-rpc.publicnode.com", + "auth": { "kind": "none" } + } + } + }, + "abstract": { + "timeout_sec": 30, + "max_retries": 3, + "providers": { + "public": { + "api_variant": "standard", + "rpc_url": "https://api.testnet.abs.xyz", + "auth": { "kind": "none" } + } + } + }, + "starknet": { + "timeout_sec": 30, + "max_retries": 3, + "providers": { + "public": { + "api_variant": "standard", + "rpc_url": "https://starknet-rpc.publicnode.com", + "auth": { "kind": "none" } + } + } + } + } + } +} EOF ``` @@ -273,79 +295,99 @@ rm ~/.near/mpc-sam/validator_key.json ``` ```shell -cat > ~/.near/mpc-sam/config.yaml << 'EOF' -my_near_account_id: sam.test.near -near_responder_account_id: sam.test.near -number_of_responder_keys: 1 -web_ui: 127.0.0.1:8082 -migration_web_ui: 127.0.0.1:8078 -pprof_bind_address: 127.0.0.1:34002 -triple: - concurrency: 2 - desired_triples_to_buffer: 128 - timeout_sec: 60 - parallel_triple_generation_stagger_time_sec: 1 -presignature: - concurrency: 4 - desired_presignatures_to_buffer: 64 - timeout_sec: 60 -signature: - timeout_sec: 60 -indexer: - validate_genesis: false - sync_mode: Latest - concurrency: 1 - mpc_contract_id: mpc-contract.test.near - finality: optimistic -ckd: - timeout_sec: 60 -cores: 4 -foreign_chains: - bitcoin: - timeout_sec: 30 - max_retries: 3 - providers: - public: - api_variant: esplora - rpc_url: "https://bitcoin-rpc.publicnode.com" - auth: - kind: none - abstract: - timeout_sec: 30 - max_retries: 3 - providers: - public: - api_variant: standard - rpc_url: "https://api.testnet.abs.xyz" - auth: - kind: none - starknet: - timeout_sec: 30 - max_retries: 3 - providers: - public: - api_variant: standard - rpc_url: "https://starknet-rpc.publicnode.com" - auth: - kind: none +cat > ~/.near/mpc-sam/mpc-config.json << EOF +{ + "home_dir": "$HOME/.near/mpc-sam", + "secrets": { + "secret_store_key_hex": "11111111111111111111111111111111" + }, + "tee": { + "authority": { "type": "local" }, + "image_hash": "8b40f81f77b8c22d6c777a6e14d307a1d11cb55ab83541fbb8575d02d86a74b0", + "latest_allowed_hash_file": "/tmp/LATEST_ALLOWED_HASH_FILE.txt" + }, + "node": { + "my_near_account_id": "sam.test.near", + "near_responder_account_id": "sam.test.near", + "number_of_responder_keys": 1, + "web_ui": "127.0.0.1:8082", + "migration_web_ui": "127.0.0.1:8078", + "pprof_bind_address": "127.0.0.1:34002", + "triple": { + "concurrency": 2, + "desired_triples_to_buffer": 128, + "timeout_sec": 60, + "parallel_triple_generation_stagger_time_sec": 1 + }, + "presignature": { + "concurrency": 4, + "desired_presignatures_to_buffer": 64, + "timeout_sec": 60 + }, + "signature": { "timeout_sec": 60 }, + "indexer": { + "validate_genesis": false, + "sync_mode": "Latest", + "concurrency": 1, + "mpc_contract_id": "mpc-contract.test.near", + "finality": "optimistic" + }, + "ckd": { "timeout_sec": 60 }, + "cores": 4, + "foreign_chains": { + "bitcoin": { + "timeout_sec": 30, + "max_retries": 3, + "providers": { + "public": { + "api_variant": "esplora", + "rpc_url": "https://bitcoin-rpc.publicnode.com", + "auth": { "kind": "none" } + } + } + }, + "abstract": { + "timeout_sec": 30, + "max_retries": 3, + "providers": { + "public": { + "api_variant": "standard", + "rpc_url": "https://api.testnet.abs.xyz", + "auth": { "kind": "none" } + } + } + }, + "starknet": { + "timeout_sec": 30, + "max_retries": 3, + "providers": { + "public": { + "api_variant": "standard", + "rpc_url": "https://starknet-rpc.publicnode.com", + "auth": { "kind": "none" } + } + } + } + } + } +} EOF ``` ### Run the MPC binary -In two separate shells run the MPC binary for frodo and sam. Note the last argument repeating (`11111111111111111111111111111111`) is the encryption key for the secret storage, and can be any arbitrary value. +In two separate shells run the MPC binary for Frodo and Sam using their JSON config files: ```shell -RUST_LOG=info mpc-node start --home-dir ~/.near/mpc-sam/ 11111111111111111111111111111111 --image-hash "8b40f81f77b8c22d6c777a6e14d307a1d11cb55ab83541fbb8575d02d86a74b0" --latest-allowed-hash-file /temp/LATEST_ALLOWED_HASH_FILE.txt local +RUST_LOG=info mpc-node start-with-config-file ~/.near/mpc-sam/mpc-config.json ``` ```shell -RUST_LOG=info mpc-node start --home-dir ~/.near/mpc-frodo/ 11111111111111111111111111111111 --image-hash "8b40f81f77b8c22d6c777a6e14d307a1d11cb55ab83541fbb8575d02d86a74b0" --latest-allowed-hash-file /temp/LATEST_ALLOWED_HASH_FILE.txt local +RUST_LOG=info mpc-node start-with-config-file ~/.near/mpc-frodo/mpc-config.json ``` Notes: -- `8b40f81f77b8c22d6c777a6e14d307a1d11cb55ab83541fbb8575d02d86a74b0` is just an arbitrary hash. - If you get the following error: ```console From ae8e459a986bc3e9745880c555e3e0ef929d3898 Mon Sep 17 00:00:00 2001 From: Daniel Sharifi Date: Fri, 6 Mar 2026 14:59:38 +0100 Subject: [PATCH 45/82] use envsubst --- docs/localnet/localnet.md | 161 +------------------------ docs/localnet/mpc-config.template.json | 75 ++++++++++++ 2 files changed, 81 insertions(+), 155 deletions(-) create mode 100644 docs/localnet/mpc-config.template.json diff --git a/docs/localnet/localnet.md b/docs/localnet/localnet.md index fd55b62e4..e9c0674ef 100644 --- a/docs/localnet/localnet.md +++ b/docs/localnet/localnet.md @@ -191,87 +191,13 @@ Since this is not a validator node, we can remove `validator_key.json` rm ~/.near/mpc-frodo/validator_key.json ``` -Next we'll create a JSON configuration file for Frodo's MPC node. This single file +Next we'll create a JSON configuration file for Frodo's MPC node using the +shared template at `docs/localnet/mpc-config.template.json`. This single file contains all settings (secrets, TEE config, and node parameters): ```shell -cat > ~/.near/mpc-frodo/mpc-config.json << EOF -{ - "home_dir": "$HOME/.near/mpc-frodo", - "secrets": { - "secret_store_key_hex": "11111111111111111111111111111111" - }, - "tee": { - "authority": { "type": "local" }, - "image_hash": "8b40f81f77b8c22d6c777a6e14d307a1d11cb55ab83541fbb8575d02d86a74b0", - "latest_allowed_hash_file": "/tmp/LATEST_ALLOWED_HASH_FILE.txt" - }, - "node": { - "my_near_account_id": "frodo.test.near", - "near_responder_account_id": "frodo.test.near", - "number_of_responder_keys": 1, - "web_ui": "127.0.0.1:8081", - "migration_web_ui": "127.0.0.1:8079", - "pprof_bind_address": "127.0.0.1:34001", - "triple": { - "concurrency": 2, - "desired_triples_to_buffer": 128, - "timeout_sec": 60, - "parallel_triple_generation_stagger_time_sec": 1 - }, - "presignature": { - "concurrency": 4, - "desired_presignatures_to_buffer": 64, - "timeout_sec": 60 - }, - "signature": { "timeout_sec": 60 }, - "indexer": { - "validate_genesis": false, - "sync_mode": "Latest", - "concurrency": 1, - "mpc_contract_id": "mpc-contract.test.near", - "finality": "optimistic" - }, - "ckd": { "timeout_sec": 60 }, - "cores": 4, - "foreign_chains": { - "bitcoin": { - "timeout_sec": 30, - "max_retries": 3, - "providers": { - "public": { - "api_variant": "esplora", - "rpc_url": "https://bitcoin-rpc.publicnode.com", - "auth": { "kind": "none" } - } - } - }, - "abstract": { - "timeout_sec": 30, - "max_retries": 3, - "providers": { - "public": { - "api_variant": "standard", - "rpc_url": "https://api.testnet.abs.xyz", - "auth": { "kind": "none" } - } - } - }, - "starknet": { - "timeout_sec": 30, - "max_retries": 3, - "providers": { - "public": { - "api_variant": "standard", - "rpc_url": "https://starknet-rpc.publicnode.com", - "auth": { "kind": "none" } - } - } - } - } - } -} -EOF +MPC_NODE_ID=mpc-frodo NEAR_ACCOUNT_ID=frodo.test.near WEB_UI_PORT=8081 MIGRATION_WEB_UI_PORT=8079 PPROF_PORT=34001 \ + envsubst < docs/localnet/mpc-config.template.json > ~/.near/mpc-frodo/mpc-config.json ``` ### Initialize Sam's node @@ -295,83 +221,8 @@ rm ~/.near/mpc-sam/validator_key.json ``` ```shell -cat > ~/.near/mpc-sam/mpc-config.json << EOF -{ - "home_dir": "$HOME/.near/mpc-sam", - "secrets": { - "secret_store_key_hex": "11111111111111111111111111111111" - }, - "tee": { - "authority": { "type": "local" }, - "image_hash": "8b40f81f77b8c22d6c777a6e14d307a1d11cb55ab83541fbb8575d02d86a74b0", - "latest_allowed_hash_file": "/tmp/LATEST_ALLOWED_HASH_FILE.txt" - }, - "node": { - "my_near_account_id": "sam.test.near", - "near_responder_account_id": "sam.test.near", - "number_of_responder_keys": 1, - "web_ui": "127.0.0.1:8082", - "migration_web_ui": "127.0.0.1:8078", - "pprof_bind_address": "127.0.0.1:34002", - "triple": { - "concurrency": 2, - "desired_triples_to_buffer": 128, - "timeout_sec": 60, - "parallel_triple_generation_stagger_time_sec": 1 - }, - "presignature": { - "concurrency": 4, - "desired_presignatures_to_buffer": 64, - "timeout_sec": 60 - }, - "signature": { "timeout_sec": 60 }, - "indexer": { - "validate_genesis": false, - "sync_mode": "Latest", - "concurrency": 1, - "mpc_contract_id": "mpc-contract.test.near", - "finality": "optimistic" - }, - "ckd": { "timeout_sec": 60 }, - "cores": 4, - "foreign_chains": { - "bitcoin": { - "timeout_sec": 30, - "max_retries": 3, - "providers": { - "public": { - "api_variant": "esplora", - "rpc_url": "https://bitcoin-rpc.publicnode.com", - "auth": { "kind": "none" } - } - } - }, - "abstract": { - "timeout_sec": 30, - "max_retries": 3, - "providers": { - "public": { - "api_variant": "standard", - "rpc_url": "https://api.testnet.abs.xyz", - "auth": { "kind": "none" } - } - } - }, - "starknet": { - "timeout_sec": 30, - "max_retries": 3, - "providers": { - "public": { - "api_variant": "standard", - "rpc_url": "https://starknet-rpc.publicnode.com", - "auth": { "kind": "none" } - } - } - } - } - } -} -EOF +MPC_NODE_ID=mpc-sam NEAR_ACCOUNT_ID=sam.test.near WEB_UI_PORT=8082 MIGRATION_WEB_UI_PORT=8078 PPROF_PORT=34002 \ + envsubst < docs/localnet/mpc-config.template.json > ~/.near/mpc-sam/mpc-config.json ``` ### Run the MPC binary diff --git a/docs/localnet/mpc-config.template.json b/docs/localnet/mpc-config.template.json new file mode 100644 index 000000000..07e15ff27 --- /dev/null +++ b/docs/localnet/mpc-config.template.json @@ -0,0 +1,75 @@ +{ + "home_dir": "$HOME/.near/$MPC_NODE_ID", + "secrets": { + "secret_store_key_hex": "11111111111111111111111111111111" + }, + "tee": { + "authority": { "type": "local" }, + "image_hash": "8b40f81f77b8c22d6c777a6e14d307a1d11cb55ab83541fbb8575d02d86a74b0", + "latest_allowed_hash_file": "/tmp/LATEST_ALLOWED_HASH_FILE.txt" + }, + "node": { + "my_near_account_id": "$NEAR_ACCOUNT_ID", + "near_responder_account_id": "$NEAR_ACCOUNT_ID", + "number_of_responder_keys": 1, + "web_ui": "127.0.0.1:$WEB_UI_PORT", + "migration_web_ui": "127.0.0.1:$MIGRATION_WEB_UI_PORT", + "pprof_bind_address": "127.0.0.1:$PPROF_PORT", + "triple": { + "concurrency": 2, + "desired_triples_to_buffer": 128, + "timeout_sec": 60, + "parallel_triple_generation_stagger_time_sec": 1 + }, + "presignature": { + "concurrency": 4, + "desired_presignatures_to_buffer": 64, + "timeout_sec": 60 + }, + "signature": { "timeout_sec": 60 }, + "indexer": { + "validate_genesis": false, + "sync_mode": "Latest", + "concurrency": 1, + "mpc_contract_id": "mpc-contract.test.near", + "finality": "optimistic" + }, + "ckd": { "timeout_sec": 60 }, + "cores": 4, + "foreign_chains": { + "bitcoin": { + "timeout_sec": 30, + "max_retries": 3, + "providers": { + "public": { + "api_variant": "esplora", + "rpc_url": "https://bitcoin-rpc.publicnode.com", + "auth": { "kind": "none" } + } + } + }, + "abstract": { + "timeout_sec": 30, + "max_retries": 3, + "providers": { + "public": { + "api_variant": "standard", + "rpc_url": "https://api.testnet.abs.xyz", + "auth": { "kind": "none" } + } + } + }, + "starknet": { + "timeout_sec": 30, + "max_retries": 3, + "providers": { + "public": { + "api_variant": "standard", + "rpc_url": "https://starknet-rpc.publicnode.com", + "auth": { "kind": "none" } + } + } + } + } + } +} From a989b042101605fc016cf44d55af3500414ea498 Mon Sep 17 00:00:00 2001 From: Daniel Sharifi Date: Fri, 6 Mar 2026 14:59:52 +0100 Subject: [PATCH 46/82] make it portable for fish --- docs/localnet/localnet.md | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/docs/localnet/localnet.md b/docs/localnet/localnet.md index e9c0674ef..90045af21 100644 --- a/docs/localnet/localnet.md +++ b/docs/localnet/localnet.md @@ -196,7 +196,7 @@ shared template at `docs/localnet/mpc-config.template.json`. This single file contains all settings (secrets, TEE config, and node parameters): ```shell -MPC_NODE_ID=mpc-frodo NEAR_ACCOUNT_ID=frodo.test.near WEB_UI_PORT=8081 MIGRATION_WEB_UI_PORT=8079 PPROF_PORT=34001 \ +env MPC_NODE_ID=mpc-frodo NEAR_ACCOUNT_ID=frodo.test.near WEB_UI_PORT=8081 MIGRATION_WEB_UI_PORT=8079 PPROF_PORT=34001 \ envsubst < docs/localnet/mpc-config.template.json > ~/.near/mpc-frodo/mpc-config.json ``` @@ -221,7 +221,7 @@ rm ~/.near/mpc-sam/validator_key.json ``` ```shell -MPC_NODE_ID=mpc-sam NEAR_ACCOUNT_ID=sam.test.near WEB_UI_PORT=8082 MIGRATION_WEB_UI_PORT=8078 PPROF_PORT=34002 \ +env MPC_NODE_ID=mpc-sam NEAR_ACCOUNT_ID=sam.test.near WEB_UI_PORT=8082 MIGRATION_WEB_UI_PORT=8078 PPROF_PORT=34002 \ envsubst < docs/localnet/mpc-config.template.json > ~/.near/mpc-sam/mpc-config.json ``` From 62efec6044f63143c3b1192dd00fb4b2749e8857 Mon Sep 17 00:00:00 2001 From: Daniel Sharifi Date: Fri, 6 Mar 2026 15:05:55 +0100 Subject: [PATCH 47/82] remove section comments --- crates/node/src/cli.rs | 40 ---------------------------------------- 1 file changed, 40 deletions(-) diff --git a/crates/node/src/cli.rs b/crates/node/src/cli.rs index a21dc1f01..0ea6c4811 100644 --- a/crates/node/src/cli.rs +++ b/crates/node/src/cli.rs @@ -22,11 +22,6 @@ use std::{ }; use tee_authority::tee_authority::{DEFAULT_DSTACK_ENDPOINT, DEFAULT_PHALA_TDX_QUOTE_UPLOAD_URL}; use url::Url; - -// --------------------------------------------------------------------------- -// Top-level CLI -// --------------------------------------------------------------------------- - #[derive(Parser, Debug)] #[command(name = "mpc-node")] #[command(about = "MPC Node for Near Protocol")] @@ -86,11 +81,6 @@ pub enum CliCommand { migrating_nodes: Vec, }, } - -// --------------------------------------------------------------------------- -// Start subcommand (CLI flags / env vars) -// --------------------------------------------------------------------------- - #[derive(Args, Debug)] pub struct StartCmd { #[arg(long, env("MPC_HOME_DIR"))] @@ -178,11 +168,6 @@ impl StartCmd { } } } - -// --------------------------------------------------------------------------- -// Init subcommand -// --------------------------------------------------------------------------- - #[derive(Args, Debug)] pub struct InitConfigArgs { #[arg(long, env("MPC_HOME_DIR"))] @@ -209,11 +194,6 @@ pub struct InitConfigArgs { #[arg(long)] pub boot_nodes: Option, } - -// --------------------------------------------------------------------------- -// Import/Export keyshare subcommands -// --------------------------------------------------------------------------- - #[derive(Args, Debug)] pub struct ImportKeyshareCmd { /// Path to home directory @@ -241,11 +221,6 @@ pub struct ExportKeyshareCmd { #[arg(help = "Hex-encoded 16 byte AES key for local storage encryption")] pub local_encryption_key_hex: String, } - -// --------------------------------------------------------------------------- -// Dispatch -// --------------------------------------------------------------------------- - impl Cli { pub async fn run(self) -> anyhow::Result<()> { match self.command { @@ -317,11 +292,6 @@ impl Cli { } } } - -// --------------------------------------------------------------------------- -// Import/Export keyshare implementations -// --------------------------------------------------------------------------- - impl ImportKeyshareCmd { pub async fn run(&self) -> anyhow::Result<()> { let runtime = tokio::runtime::Runtime::new()?; @@ -418,11 +388,6 @@ impl ExportKeyshareCmd { }) } } - -// --------------------------------------------------------------------------- -// Test config generation -// --------------------------------------------------------------------------- - fn duplicate_migrating_accounts( mut accounts: Vec, migrating_nodes: &[usize], @@ -544,11 +509,6 @@ fn create_file_config( cores: Some(4), } } - -// --------------------------------------------------------------------------- -// Tests -// --------------------------------------------------------------------------- - #[cfg(test)] mod tests { use super::*; From 50f27e069a2e3b32b365336002d260b86be6a7dd Mon Sep 17 00:00:00 2001 From: Daniel Sharifi Date: Fri, 6 Mar 2026 15:06:15 +0100 Subject: [PATCH 48/82] cargo fmt --- crates/node/src/config/start.rs | 4 ++- crates/node/src/run.rs | 47 +++++++++++++++------------------ 2 files changed, 25 insertions(+), 26 deletions(-) diff --git a/crates/node/src/config/start.rs b/crates/node/src/config/start.rs index c5ed3d802..8ee8ea9a7 100644 --- a/crates/node/src/config/start.rs +++ b/crates/node/src/config/start.rs @@ -88,7 +88,9 @@ impl TeeAuthorityStartConfig { dstack_endpoint, quote_upload_url, } => { - let url: Url = quote_upload_url.parse().context("invalid quote_upload_url")?; + let url: Url = quote_upload_url + .parse() + .context("invalid quote_upload_url")?; DstackTeeAuthorityConfig::new(dstack_endpoint, url).into() } }) diff --git a/crates/node/src/run.rs b/crates/node/src/run.rs index 263b30a56..9c033f305 100644 --- a/crates/node/src/run.rs +++ b/crates/node/src/run.rs @@ -127,33 +127,30 @@ impl StartConfig { let (shutdown_signal_sender, mut shutdown_signal_receiver) = mpsc::channel(1); let cancellation_token = CancellationToken::new(); - let image_hash_watcher_handle = - if let (Some(image_hash), Some(latest_allowed_hash_file)) = - (&self.tee.image_hash, &self.tee.latest_allowed_hash_file) - { - let current_image_hash_bytes: [u8; 32] = hex::decode(image_hash) - .expect("The currently running image is a hex string.") - .try_into() - .expect("The currently running image hash hex representation is 32 bytes."); - - let allowed_hashes_in_contract = - indexer_api.allowed_docker_images_receiver.clone(); - let image_hash_storage = - AllowedImageHashesFile::from(latest_allowed_hash_file.clone()); - - Some(root_runtime.spawn(monitor_allowed_image_hashes( - cancellation_token.child_token(), - MpcDockerImageHash::from(current_image_hash_bytes), - allowed_hashes_in_contract, - image_hash_storage, - shutdown_signal_sender.clone(), - ))) - } else { - tracing::info!( + let image_hash_watcher_handle = if let (Some(image_hash), Some(latest_allowed_hash_file)) = + (&self.tee.image_hash, &self.tee.latest_allowed_hash_file) + { + let current_image_hash_bytes: [u8; 32] = hex::decode(image_hash) + .expect("The currently running image is a hex string.") + .try_into() + .expect("The currently running image hash hex representation is 32 bytes."); + + let allowed_hashes_in_contract = indexer_api.allowed_docker_images_receiver.clone(); + let image_hash_storage = AllowedImageHashesFile::from(latest_allowed_hash_file.clone()); + + Some(root_runtime.spawn(monitor_allowed_image_hashes( + cancellation_token.child_token(), + MpcDockerImageHash::from(current_image_hash_bytes), + allowed_hashes_in_contract, + image_hash_storage, + shutdown_signal_sender.clone(), + ))) + } else { + tracing::info!( "image_hash and/or latest_allowed_hash_file not set, skipping TEE image hash monitoring" ); - None - }; + None + }; let root_future = create_root_future( self, From b5aedee8bddcaeffeecd002c2fc7832ca805cd1b Mon Sep 17 00:00:00 2001 From: Daniel Sharifi Date: Fri, 6 Mar 2026 15:20:15 +0100 Subject: [PATCH 49/82] Have run as standalone function --- crates/node/src/cli.rs | 8 +- crates/node/src/run.rs | 285 ++++++++++++++++++++--------------------- 2 files changed, 147 insertions(+), 146 deletions(-) diff --git a/crates/node/src/cli.rs b/crates/node/src/cli.rs index 0ea6c4811..2b1aa8abf 100644 --- a/crates/node/src/cli.rs +++ b/crates/node/src/cli.rs @@ -11,6 +11,7 @@ use crate::{ permanent::{PermanentKeyStorage, PermanentKeyStorageBackend, PermanentKeyshareData}, }, p2p::testing::{generate_test_p2p_configs, PortSeed}, + run::run, }; use clap::{Args, Parser, Subcommand, ValueEnum}; use hex::FromHex; @@ -225,12 +226,15 @@ impl Cli { pub async fn run(self) -> anyhow::Result<()> { match self.command { CliCommand::StartWithConfigFile { config_path } => { - StartConfig::from_json_file(&config_path)?.run().await + let node_configuration = StartConfig::from_json_file(&config_path)?; + run(node_configuration).await } CliCommand::Start(start) => { let home_dir = std::path::Path::new(&start.home_dir); let config_file = load_config_file(home_dir)?; - start.into_start_config(config_file).run().await + + let node_configuration = start.into_start_config(config_file); + run(node_configuration).await } CliCommand::Init(config) => { let (download_config_type, download_config_url) = if config.download_config { diff --git a/crates/node/src/run.rs b/crates/node/src/run.rs index 9c033f305..f403a299f 100644 --- a/crates/node/src/run.rs +++ b/crates/node/src/run.rs @@ -39,155 +39,152 @@ use crate::tee::{ pub const ATTESTATION_RESUBMISSION_INTERVAL: Duration = Duration::from_secs(60 * 60); // 1 hour -impl StartConfig { - pub async fn run(self) -> anyhow::Result<()> { - let root_runtime = tokio::runtime::Builder::new_multi_thread() - .enable_all() - .worker_threads(1) - .build()?; - - let _tokio_enter_guard = root_runtime.enter(); - - // Load configuration and initialize persistent secrets - let home_dir = PathBuf::from(self.home_dir.clone()); - let config = self.node.clone(); - let persistent_secrets = PersistentSecrets::generate_or_get_existing( - &home_dir, - config.number_of_responder_keys, - )?; - - profiler::web_server::start_web_server(config.pprof_bind_address).await?; - root_runtime.spawn(crate::metrics::tokio_task_metrics::run_monitor_loop()); - - // TODO(#1296): Decide if the MPC responder account is actually needed - let respond_config = RespondConfig::from_parts(&config, &persistent_secrets); - - let backup_encryption_key_hex = match &self.secrets.backup_encryption_key_hex { - Some(key) => key.clone(), - None => generate_and_write_backup_encryption_key_to_disk(&home_dir)?, - }; - - // Load secrets from configuration and persistent storage - let secrets = SecretsConfig::from_parts( - &self.secrets.secret_store_key_hex, - persistent_secrets.clone(), - &backup_encryption_key_hex, - )?; - - // Generate attestation - let tee_authority = self.tee.authority.clone().into_tee_authority()?; - let tls_public_key = &secrets.persistent_secrets.p2p_private_key.verifying_key(); - - let account_public_key = &secrets.persistent_secrets.near_signer_key.verifying_key(); - - let report_data = ReportDataV1::new( - *Ed25519PublicKey::from(tls_public_key).as_bytes(), - *Ed25519PublicKey::from(account_public_key).as_bytes(), - ) - .into(); - - let attestation = tee_authority.generate_attestation(report_data).await?; - - // Create communication channels and runtime - let (debug_request_sender, _) = tokio::sync::broadcast::channel(10); - let root_task_handle = Arc::new(OnceLock::new()); - - let (protocol_state_sender, protocol_state_receiver) = - watch::channel(ProtocolContractState::NotInitialized); - - let (migration_state_sender, migration_state_receiver) = - watch::channel((0, BTreeMap::new())); - let web_server = root_runtime - .block_on(start_web_server( - root_task_handle.clone(), - debug_request_sender.clone(), - config.web_ui, - static_web_data(&secrets, Some(attestation)), - protocol_state_receiver, - migration_state_receiver, - )) - .context("Failed to create web server.")?; - - let _web_server_join_handle = root_runtime.spawn(web_server); - - // Create Indexer and wait for indexer to be synced. - let (indexer_exit_sender, indexer_exit_receiver) = oneshot::channel(); - let indexer_api = spawn_real_indexer( - home_dir.clone(), - config.indexer.clone(), - config.my_near_account_id.clone(), - persistent_secrets.near_signer_key.clone(), - respond_config, - indexer_exit_sender, - protocol_state_sender, - migration_state_sender, - *tls_public_key, - ); - - let (shutdown_signal_sender, mut shutdown_signal_receiver) = mpsc::channel(1); - let cancellation_token = CancellationToken::new(); - - let image_hash_watcher_handle = if let (Some(image_hash), Some(latest_allowed_hash_file)) = - (&self.tee.image_hash, &self.tee.latest_allowed_hash_file) - { - let current_image_hash_bytes: [u8; 32] = hex::decode(image_hash) - .expect("The currently running image is a hex string.") - .try_into() - .expect("The currently running image hash hex representation is 32 bytes."); - - let allowed_hashes_in_contract = indexer_api.allowed_docker_images_receiver.clone(); - let image_hash_storage = AllowedImageHashesFile::from(latest_allowed_hash_file.clone()); - - Some(root_runtime.spawn(monitor_allowed_image_hashes( - cancellation_token.child_token(), - MpcDockerImageHash::from(current_image_hash_bytes), - allowed_hashes_in_contract, - image_hash_storage, - shutdown_signal_sender.clone(), - ))) - } else { - tracing::info!( +pub async fn run(config: StartConfig) -> anyhow::Result<()> { + let root_runtime = tokio::runtime::Builder::new_multi_thread() + .enable_all() + .worker_threads(1) + .build()?; + + let _tokio_enter_guard = root_runtime.enter(); + + // Load configuration and initialize persistent secrets + let home_dir = PathBuf::from(config.home_dir.clone()); + let node_config = config.node.clone(); + let persistent_secrets = PersistentSecrets::generate_or_get_existing( + &home_dir, + node_config.number_of_responder_keys, + )?; + + profiler::web_server::start_web_server(node_config.pprof_bind_address).await?; + root_runtime.spawn(crate::metrics::tokio_task_metrics::run_monitor_loop()); + + // TODO(#1296): Decide if the MPC responder account is actually needed + let respond_config = RespondConfig::from_parts(&node_config, &persistent_secrets); + + let backup_encryption_key_hex = match &config.secrets.backup_encryption_key_hex { + Some(key) => key.clone(), + None => generate_and_write_backup_encryption_key_to_disk(&home_dir)?, + }; + + // Load secrets from configuration and persistent storage + let secrets = SecretsConfig::from_parts( + &config.secrets.secret_store_key_hex, + persistent_secrets.clone(), + &backup_encryption_key_hex, + )?; + + // Generate attestation + let tee_authority = config.tee.authority.clone().into_tee_authority()?; + let tls_public_key = &secrets.persistent_secrets.p2p_private_key.verifying_key(); + + let account_public_key = &secrets.persistent_secrets.near_signer_key.verifying_key(); + + let report_data = ReportDataV1::new( + *Ed25519PublicKey::from(tls_public_key).as_bytes(), + *Ed25519PublicKey::from(account_public_key).as_bytes(), + ) + .into(); + + let attestation = tee_authority.generate_attestation(report_data).await?; + + // Create communication channels and runtime + let (debug_request_sender, _) = tokio::sync::broadcast::channel(10); + let root_task_handle = Arc::new(OnceLock::new()); + + let (protocol_state_sender, protocol_state_receiver) = + watch::channel(ProtocolContractState::NotInitialized); + + let (migration_state_sender, migration_state_receiver) = watch::channel((0, BTreeMap::new())); + let web_server = root_runtime + .block_on(start_web_server( + root_task_handle.clone(), + debug_request_sender.clone(), + node_config.web_ui, + static_web_data(&secrets, Some(attestation)), + protocol_state_receiver, + migration_state_receiver, + )) + .context("Failed to create web server.")?; + + let _web_server_join_handle = root_runtime.spawn(web_server); + + // Create Indexer and wait for indexer to be synced. + let (indexer_exit_sender, indexer_exit_receiver) = oneshot::channel(); + let indexer_api = spawn_real_indexer( + home_dir.clone(), + node_config.indexer.clone(), + node_config.my_near_account_id.clone(), + persistent_secrets.near_signer_key.clone(), + respond_config, + indexer_exit_sender, + protocol_state_sender, + migration_state_sender, + *tls_public_key, + ); + + let (shutdown_signal_sender, mut shutdown_signal_receiver) = mpsc::channel(1); + let cancellation_token = CancellationToken::new(); + + let image_hash_watcher_handle = if let (Some(image_hash), Some(latest_allowed_hash_file)) = + (&config.tee.image_hash, &config.tee.latest_allowed_hash_file) + { + let current_image_hash_bytes: [u8; 32] = hex::decode(image_hash) + .expect("The currently running image is a hex string.") + .try_into() + .expect("The currently running image hash hex representation is 32 bytes."); + + let allowed_hashes_in_contract = indexer_api.allowed_docker_images_receiver.clone(); + let image_hash_storage = AllowedImageHashesFile::from(latest_allowed_hash_file.clone()); + + Some(root_runtime.spawn(monitor_allowed_image_hashes( + cancellation_token.child_token(), + MpcDockerImageHash::from(current_image_hash_bytes), + allowed_hashes_in_contract, + image_hash_storage, + shutdown_signal_sender.clone(), + ))) + } else { + tracing::info!( "image_hash and/or latest_allowed_hash_file not set, skipping TEE image hash monitoring" ); - None - }; - - let root_future = create_root_future( - self, - home_dir.clone(), - config.clone(), - secrets.clone(), - indexer_api, - debug_request_sender, - root_task_handle, - tee_authority, - ); - - let root_task = root_runtime.spawn(start_root_task("root", root_future).0); - - let exit_reason = tokio::select! { - root_task_result = root_task => { - root_task_result? - } - indexer_exit_response = indexer_exit_receiver => { - indexer_exit_response.context("Indexer thread dropped response channel.")? - } - Some(()) = shutdown_signal_receiver.recv() => { - Err(anyhow!("TEE allowed image hashes watcher is sending shutdown signal.")) - } - }; - - // Perform graceful shutdown - cancellation_token.cancel(); - - if let Some(handle) = image_hash_watcher_handle { - info!("Waiting for image hash watcher to gracefully exit."); - let exit_result = handle.await; - info!(?exit_result, "Image hash watcher exited."); + None + }; + + let root_future = create_root_future( + config, + home_dir.clone(), + node_config.clone(), + secrets.clone(), + indexer_api, + debug_request_sender, + root_task_handle, + tee_authority, + ); + + let root_task = root_runtime.spawn(start_root_task("root", root_future).0); + + let exit_reason = tokio::select! { + root_task_result = root_task => { + root_task_result? + } + indexer_exit_response = indexer_exit_receiver => { + indexer_exit_response.context("Indexer thread dropped response channel.")? } + Some(()) = shutdown_signal_receiver.recv() => { + Err(anyhow!("TEE allowed image hashes watcher is sending shutdown signal.")) + } + }; - exit_reason + // Perform graceful shutdown + cancellation_token.cancel(); + + if let Some(handle) = image_hash_watcher_handle { + info!("Waiting for image hash watcher to gracefully exit."); + let exit_result = handle.await; + info!(?exit_result, "Image hash watcher exited."); } + + exit_reason } #[allow(clippy::too_many_arguments)] From 0636c9292618b5210a3780c971943b4718603f97 Mon Sep 17 00:00:00 2001 From: Daniel Sharifi Date: Fri, 6 Mar 2026 15:20:41 +0100 Subject: [PATCH 50/82] rename to run_mpc_node --- crates/node/src/cli.rs | 6 +++--- crates/node/src/run.rs | 2 +- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/crates/node/src/cli.rs b/crates/node/src/cli.rs index 2b1aa8abf..5062cc7dd 100644 --- a/crates/node/src/cli.rs +++ b/crates/node/src/cli.rs @@ -11,7 +11,7 @@ use crate::{ permanent::{PermanentKeyStorage, PermanentKeyStorageBackend, PermanentKeyshareData}, }, p2p::testing::{generate_test_p2p_configs, PortSeed}, - run::run, + run::run_mpc_node, }; use clap::{Args, Parser, Subcommand, ValueEnum}; use hex::FromHex; @@ -227,14 +227,14 @@ impl Cli { match self.command { CliCommand::StartWithConfigFile { config_path } => { let node_configuration = StartConfig::from_json_file(&config_path)?; - run(node_configuration).await + run_mpc_node(node_configuration).await } CliCommand::Start(start) => { let home_dir = std::path::Path::new(&start.home_dir); let config_file = load_config_file(home_dir)?; let node_configuration = start.into_start_config(config_file); - run(node_configuration).await + run_mpc_node(node_configuration).await } CliCommand::Init(config) => { let (download_config_type, download_config_url) = if config.download_config { diff --git a/crates/node/src/run.rs b/crates/node/src/run.rs index f403a299f..dcde5b1db 100644 --- a/crates/node/src/run.rs +++ b/crates/node/src/run.rs @@ -39,7 +39,7 @@ use crate::tee::{ pub const ATTESTATION_RESUBMISSION_INTERVAL: Duration = Duration::from_secs(60 * 60); // 1 hour -pub async fn run(config: StartConfig) -> anyhow::Result<()> { +pub async fn run_mpc_node(config: StartConfig) -> anyhow::Result<()> { let root_runtime = tokio::runtime::Builder::new_multi_thread() .enable_all() .worker_threads(1) From 599d99900f3c3ff8892fdd03554346088cfc6e58 Mon Sep 17 00:00:00 2001 From: Daniel Sharifi Date: Fri, 6 Mar 2026 15:26:29 +0100 Subject: [PATCH 51/82] use pathbuf --- crates/node/src/cli.rs | 1 + crates/node/src/config/start.rs | 3 ++- crates/node/src/run.rs | 3 +-- 3 files changed, 4 insertions(+), 3 deletions(-) diff --git a/crates/node/src/cli.rs b/crates/node/src/cli.rs index 5062cc7dd..cf141edfb 100644 --- a/crates/node/src/cli.rs +++ b/crates/node/src/cli.rs @@ -229,6 +229,7 @@ impl Cli { let node_configuration = StartConfig::from_json_file(&config_path)?; run_mpc_node(node_configuration).await } + // TODO: deprecate this CliCommand::Start(start) => { let home_dir = std::path::Path::new(&start.home_dir); let config_file = load_config_file(home_dir)?; diff --git a/crates/node/src/config/start.rs b/crates/node/src/config/start.rs index 8ee8ea9a7..ffda2122b 100644 --- a/crates/node/src/config/start.rs +++ b/crates/node/src/config/start.rs @@ -13,7 +13,7 @@ use url::Url; /// (JSON file) convert into this type. #[derive(Debug, Clone, Serialize, Deserialize)] pub struct StartConfig { - pub home_dir: String, + pub home_dir: PathBuf, /// Encryption keys and backup settings. pub secrets: SecretsStartConfig, /// TEE authority and image hash monitoring settings. @@ -68,6 +68,7 @@ pub enum TeeAuthorityStartConfig { #[serde(default = "default_dstack_endpoint")] dstack_endpoint: String, #[serde(default = "default_quote_upload_url")] + // TODO: use URL type for this type quote_upload_url: String, }, } diff --git a/crates/node/src/run.rs b/crates/node/src/run.rs index dcde5b1db..7dd40d392 100644 --- a/crates/node/src/run.rs +++ b/crates/node/src/run.rs @@ -48,10 +48,9 @@ pub async fn run_mpc_node(config: StartConfig) -> anyhow::Result<()> { let _tokio_enter_guard = root_runtime.enter(); // Load configuration and initialize persistent secrets - let home_dir = PathBuf::from(config.home_dir.clone()); let node_config = config.node.clone(); let persistent_secrets = PersistentSecrets::generate_or_get_existing( - &home_dir, + &config.home_dir, node_config.number_of_responder_keys, )?; From ffa3497cd8a1f356043840ba857ef750888c1466 Mon Sep 17 00:00:00 2001 From: Daniel Sharifi Date: Fri, 6 Mar 2026 15:28:41 +0100 Subject: [PATCH 52/82] add todo issue links --- crates/node/src/cli.rs | 2 +- crates/node/src/config/foreign_chains/auth.rs | 2 +- crates/node/src/config/start.rs | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/crates/node/src/cli.rs b/crates/node/src/cli.rs index cf141edfb..365729816 100644 --- a/crates/node/src/cli.rs +++ b/crates/node/src/cli.rs @@ -229,7 +229,7 @@ impl Cli { let node_configuration = StartConfig::from_json_file(&config_path)?; run_mpc_node(node_configuration).await } - // TODO: deprecate this + // TODO(#2334): deprecate this CliCommand::Start(start) => { let home_dir = std::path::Path::new(&start.home_dir); let config_file = load_config_file(home_dir)?; diff --git a/crates/node/src/config/foreign_chains/auth.rs b/crates/node/src/config/foreign_chains/auth.rs index 48d916acd..3540fe837 100644 --- a/crates/node/src/config/foreign_chains/auth.rs +++ b/crates/node/src/config/foreign_chains/auth.rs @@ -52,7 +52,7 @@ pub enum TokenConfig { impl TokenConfig { pub fn resolve(&self) -> anyhow::Result { match self { - // TODO: do not resolve env variables this deep in the binary. + // TODO(#2335): do not resolve env variables this deep in the binary. // Should be resolved at start, preferably in the config so we can kill env configs // // One option is to have a separate secrets config file. diff --git a/crates/node/src/config/start.rs b/crates/node/src/config/start.rs index ffda2122b..78aa8be3d 100644 --- a/crates/node/src/config/start.rs +++ b/crates/node/src/config/start.rs @@ -68,7 +68,7 @@ pub enum TeeAuthorityStartConfig { #[serde(default = "default_dstack_endpoint")] dstack_endpoint: String, #[serde(default = "default_quote_upload_url")] - // TODO: use URL type for this type + // TODO(#2333): use URL type for this type quote_upload_url: String, }, } From 0b53d1c0a01a7885c6da1fed6c2ed08bcff3636c Mon Sep 17 00:00:00 2001 From: Daniel Sharifi Date: Fri, 6 Mar 2026 15:47:40 +0100 Subject: [PATCH 53/82] fix pathbuf issue --- crates/node/src/cli.rs | 2 +- crates/node/src/run.rs | 5 +++-- 2 files changed, 4 insertions(+), 3 deletions(-) diff --git a/crates/node/src/cli.rs b/crates/node/src/cli.rs index 365729816..47cefd480 100644 --- a/crates/node/src/cli.rs +++ b/crates/node/src/cli.rs @@ -85,7 +85,7 @@ pub enum CliCommand { #[derive(Args, Debug)] pub struct StartCmd { #[arg(long, env("MPC_HOME_DIR"))] - pub home_dir: String, + pub home_dir: PathBuf, /// Hex-encoded 16 byte AES key for local storage encryption. /// This key should come from a secure secret storage. /// TODO(#444): After TEE integration decide on what to do with AES encryption key diff --git a/crates/node/src/run.rs b/crates/node/src/run.rs index 7dd40d392..93b9c99fe 100644 --- a/crates/node/src/run.rs +++ b/crates/node/src/run.rs @@ -62,7 +62,7 @@ pub async fn run_mpc_node(config: StartConfig) -> anyhow::Result<()> { let backup_encryption_key_hex = match &config.secrets.backup_encryption_key_hex { Some(key) => key.clone(), - None => generate_and_write_backup_encryption_key_to_disk(&home_dir)?, + None => generate_and_write_backup_encryption_key_to_disk(&config.home_dir)?, }; // Load secrets from configuration and persistent storage @@ -110,7 +110,7 @@ pub async fn run_mpc_node(config: StartConfig) -> anyhow::Result<()> { // Create Indexer and wait for indexer to be synced. let (indexer_exit_sender, indexer_exit_receiver) = oneshot::channel(); let indexer_api = spawn_real_indexer( - home_dir.clone(), + config.home_dir.clone(), node_config.indexer.clone(), node_config.my_near_account_id.clone(), persistent_secrets.near_signer_key.clone(), @@ -149,6 +149,7 @@ pub async fn run_mpc_node(config: StartConfig) -> anyhow::Result<()> { None }; + let home_dir = config.home_dir.clone(); let root_future = create_root_future( config, home_dir.clone(), From 34515c211401108e4f61945a72419051a1d9f4ab Mon Sep 17 00:00:00 2001 From: Daniel Sharifi Date: Fri, 6 Mar 2026 15:49:20 +0100 Subject: [PATCH 54/82] update pytests --- pytest/common_lib/shared/__init__.py | 4 ++++ pytest/common_lib/shared/mpc_node.py | 34 +++++++++++++++++++++------- 2 files changed, 30 insertions(+), 8 deletions(-) diff --git a/pytest/common_lib/shared/__init__.py b/pytest/common_lib/shared/__init__.py index 48cb7cb3a..27c391764 100644 --- a/pytest/common_lib/shared/__init__.py +++ b/pytest/common_lib/shared/__init__.py @@ -230,6 +230,7 @@ class ConfigValues: migration_address: str pprof_address: str backup_key: bytes + node_config: dict # JSON-serializable dict matching Rust ConfigFile def generate_mpc_configs( @@ -331,6 +332,7 @@ def generate_mpc_configs( ] backup_key = os.urandom(32) + configs.append( ConfigValues( signer_key, @@ -341,6 +343,7 @@ def generate_mpc_configs( migration_address, pprof_address, backup_key, + node_config=config, ) ) return configs @@ -531,6 +534,7 @@ def start_cluster_with_mpc( pytest_signer_keys=pytest_signer_keys, backup_key=config.backup_key, pprof_address=config.pprof_address, + node_config=config.node_config, ) mpc_node.init_nonces(validators[0]) mpc_node.set_block_ingestion(True) diff --git a/pytest/common_lib/shared/mpc_node.py b/pytest/common_lib/shared/mpc_node.py index 436ded22a..c8b87d5ce 100644 --- a/pytest/common_lib/shared/mpc_node.py +++ b/pytest/common_lib/shared/mpc_node.py @@ -61,6 +61,7 @@ def __init__( p2p_public_key: str, pytest_signer_keys: list[Key], backup_key: bytes, + node_config: dict, ): super().__init__(near_node, signer_key, pytest_signer_keys) self.p2p_url: str = p2p_url @@ -74,6 +75,7 @@ def __init__( self.is_running = False self.metrics = MetricsTracker(near_node) self.backup_key = backup_key + self.node_config = node_config def print(self): if not self.is_running: @@ -127,22 +129,38 @@ def reset_mpc_data(self): for file_path in pathlib.Path(self.home_dir).glob(pattern): file_path.unlink() + def _write_start_config(self) -> str: + """Build a StartConfig JSON file and write it to the node's home dir. + Returns the path to the written config file.""" + start_config = { + "home_dir": self.home_dir, + "secrets": { + "secret_store_key_hex": self.secret_store_key, + "backup_encryption_key_hex": self.backup_key.hex(), + }, + "tee": { + "authority": {"type": "local"}, + "image_hash": DUMMY_MPC_IMAGE_HASH, + "latest_allowed_hash_file": "latest_allowed_hash.txt", + }, + "node": self.node_config, + } + config_path = str(pathlib.Path(self.home_dir) / "start_config.json") + with open(config_path, "w") as f: + json.dump(start_config, f, indent=2) + return config_path + def run(self): assert not self.is_running self.is_running = True + config_path = self._write_start_config() extra_env = { "RUST_LOG": "INFO", # mpc-node produces too much output on DEBUG - "MPC_SECRET_STORE_KEY": self.secret_store_key, - "MPC_IMAGE_HASH": DUMMY_MPC_IMAGE_HASH, - "MPC_LATEST_ALLOWED_HASH_FILE": "latest_allowed_hash.txt", - "MPC_BACKUP_ENCRYPTION_KEY_HEX": self.backup_key.hex(), } cmd = ( MPC_BINARY_PATH, - "start", - "--home-dir", - self.home_dir, - "local", + "start-with-config-file", + config_path, ) self.near_node.run_cmd(cmd=cmd, extra_env=extra_env) From ca80bca948cf4b07981a40fc90daf9b599685189 Mon Sep 17 00:00:00 2001 From: Daniel Sharifi Date: Fri, 6 Mar 2026 16:08:20 +0100 Subject: [PATCH 55/82] fix: test config was overwriting neard config --- crates/node/src/cli.rs | 4 ++-- pytest/common_lib/shared/__init__.py | 6 +++--- 2 files changed, 5 insertions(+), 5 deletions(-) diff --git a/crates/node/src/cli.rs b/crates/node/src/cli.rs index 47cefd480..f5529b72b 100644 --- a/crates/node/src/cli.rs +++ b/crates/node/src/cli.rs @@ -454,8 +454,8 @@ fn run_generate_test_configs( desired_presignatures_to_buffer, ); std::fs::write( - format!("{}/config.yaml", subdir), - serde_yaml::to_string(&file_config)?, + format!("{}/mpc_node_config.json", subdir), + serde_json::to_string_pretty(&file_config)?, )?; } std::fs::write( diff --git a/pytest/common_lib/shared/__init__.py b/pytest/common_lib/shared/__init__.py index 27c391764..8fdfdad44 100644 --- a/pytest/common_lib/shared/__init__.py +++ b/pytest/common_lib/shared/__init__.py @@ -42,7 +42,7 @@ dot_near = pathlib.Path.home() / ".near" SECRETS_JSON = "secrets.json" NUMBER_OF_VALIDATORS = 1 -CONFIG_YAML = "config.yaml" +MPC_NODE_CONFIG_JSON = "mpc_node_config.json" def create_function_call_access_key_action( @@ -308,9 +308,9 @@ def generate_mpc_configs( my_port = participant["port"] p2p_url = f"http://{my_addr}:{my_port}" - config_file_path = os.path.join(dot_near, str(idx), CONFIG_YAML) + config_file_path = os.path.join(dot_near, str(idx), MPC_NODE_CONFIG_JSON) with open(config_file_path, "r") as f: - config = yaml.load(f, Loader=SafeLoaderIgnoreUnknown) + config = json.load(f) web_address = config.get("web_ui") migration_address = config.get("migration_web_ui") From 44b9a0e765b9f9937169136b0f975e4caae320f4 Mon Sep 17 00:00:00 2001 From: Daniel Sharifi Date: Fri, 6 Mar 2026 16:09:10 +0100 Subject: [PATCH 56/82] reset nearcore change --- libs/nearcore | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/libs/nearcore b/libs/nearcore index 3def2f7eb..8a8c21bc8 160000 --- a/libs/nearcore +++ b/libs/nearcore @@ -1 +1 @@ -Subproject commit 3def2f7ebb7455199e7b3f7b371e3735c23e2930 +Subproject commit 8a8c21bc81999af93edd1b6bca5b7c6c6337aa63 From bc9b2fcd462946a775b97f1087f4a2ecfd71b024 Mon Sep 17 00:00:00 2001 From: Daniel Sharifi Date: Fri, 6 Mar 2026 16:23:25 +0100 Subject: [PATCH 57/82] fix yml failure --- pytest/common_lib/shared/foreign_chains.py | 25 +++------------------- 1 file changed, 3 insertions(+), 22 deletions(-) diff --git a/pytest/common_lib/shared/foreign_chains.py b/pytest/common_lib/shared/foreign_chains.py index 5d71cb30c..fa6193b87 100644 --- a/pytest/common_lib/shared/foreign_chains.py +++ b/pytest/common_lib/shared/foreign_chains.py @@ -1,32 +1,13 @@ """Shared helpers for foreign chain configuration and policy tests.""" -import pathlib -import re from typing import Any -import yaml - - -def node_config_path(node) -> pathlib.Path: - return pathlib.Path(node.home_dir) / "config.yaml" - def set_foreign_chains_config(node, foreign_chains: dict[str, Any] | None) -> None: - config_path = node_config_path(node) - - config_text = config_path.read_text(encoding="utf-8") - # Keep generated YAML tags intact by editing only the trailing `foreign_chains` section. - config_text = ( - re.sub(r"\nforeign_chains:[\s\S]*\Z", "\n", config_text).rstrip() + "\n" - ) - if foreign_chains is not None: - foreign_chains_text = yaml.safe_dump( - {"foreign_chains": foreign_chains}, sort_keys=False - ) - config_text += "\n" + foreign_chains_text - - config_path.write_text(config_text, encoding="utf-8") + node.node_config["foreign_chains"] = foreign_chains + else: + node.node_config["foreign_chains"] = {} def normalize_policy(policy: dict[str, Any]) -> list[tuple[str, tuple[str, ...]]]: From 99084d1886a39023f529161a9836531bb9bff979 Mon Sep 17 00:00:00 2001 From: Daniel Sharifi Date: Fri, 6 Mar 2026 16:51:46 +0100 Subject: [PATCH 58/82] redact secrets --- crates/node/src/config/start.rs | 11 ++++++++++- 1 file changed, 10 insertions(+), 1 deletion(-) diff --git a/crates/node/src/config/start.rs b/crates/node/src/config/start.rs index 78aa8be3d..963248731 100644 --- a/crates/node/src/config/start.rs +++ b/crates/node/src/config/start.rs @@ -26,7 +26,7 @@ pub struct StartConfig { } /// Encryption keys needed at startup. -#[derive(Debug, Clone, Serialize, Deserialize)] +#[derive(Clone, Serialize, Deserialize)] pub struct SecretsStartConfig { /// Hex-encoded 16 byte AES key for local storage encryption. pub secret_store_key_hex: String, @@ -36,6 +36,15 @@ pub struct SecretsStartConfig { pub backup_encryption_key_hex: Option, } +impl std::fmt::Debug for SecretsStartConfig { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + f.debug_struct("SecretsStartConfig") + .field("secret_store_key_hex", &"[REDACTED]") + .field("backup_encryption_key_hex", &self.backup_encryption_key_hex.as_ref().map(|_| "[REDACTED]")) + .finish() + } +} + /// TEE-related configuration. #[derive(Debug, Clone, Serialize, Deserialize)] pub struct TeeStartConfig { From 6f90606a32a01e045127578371dac62c67f7ef2e Mon Sep 17 00:00:00 2001 From: Daniel Sharifi Date: Fri, 6 Mar 2026 16:54:30 +0100 Subject: [PATCH 59/82] fmt --- crates/node/src/config/start.rs | 8 +++++++- 1 file changed, 7 insertions(+), 1 deletion(-) diff --git a/crates/node/src/config/start.rs b/crates/node/src/config/start.rs index 963248731..b2a26e565 100644 --- a/crates/node/src/config/start.rs +++ b/crates/node/src/config/start.rs @@ -40,7 +40,13 @@ impl std::fmt::Debug for SecretsStartConfig { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { f.debug_struct("SecretsStartConfig") .field("secret_store_key_hex", &"[REDACTED]") - .field("backup_encryption_key_hex", &self.backup_encryption_key_hex.as_ref().map(|_| "[REDACTED]")) + .field( + "backup_encryption_key_hex", + &self + .backup_encryption_key_hex + .as_ref() + .map(|_| "[REDACTED]"), + ) .finish() } } From 79c001dbd0d17b98e039ed45e3d49004eb9e7e51 Mon Sep 17 00:00:00 2001 From: Daniel Sharifi Date: Fri, 6 Mar 2026 17:17:15 +0100 Subject: [PATCH 60/82] change to pass forward path instead --- Cargo.lock | 4 - crates/tee-launcher/Cargo.toml | 4 - crates/tee-launcher/src/constants.rs | 3 + crates/tee-launcher/src/env_validation.rs | 183 ---------- crates/tee-launcher/src/error.rs | 12 - crates/tee-launcher/src/main.rs | 150 ++++---- crates/tee-launcher/src/types.rs | 407 +--------------------- libs/nearcore | 2 +- 8 files changed, 88 insertions(+), 677 deletions(-) delete mode 100644 crates/tee-launcher/src/env_validation.rs diff --git a/Cargo.lock b/Cargo.lock index 03775211b..af0abd50a 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -10600,12 +10600,8 @@ dependencies = [ "bounded-collections", "clap", "dstack-sdk", - "hex", - "itertools 0.14.0", "launcher-interface", - "regex", "reqwest 0.12.28", - "rstest", "serde", "serde_json", "tempfile", diff --git a/crates/tee-launcher/Cargo.toml b/crates/tee-launcher/Cargo.toml index 9dd627211..da20a9ba4 100644 --- a/crates/tee-launcher/Cargo.toml +++ b/crates/tee-launcher/Cargo.toml @@ -16,10 +16,7 @@ backon = { workspace = true } bounded-collections = { workspace = true } clap = { workspace = true } dstack-sdk = { workspace = true } -hex = { workspace = true } launcher-interface = { workspace = true } -itertools = { workspace = true } -regex = { workspace = true } reqwest = { workspace = true } serde = { workspace = true } serde_json = { workspace = true } @@ -31,7 +28,6 @@ url = { workspace = true, features = ["serde"] } [dev-dependencies] assert_matches = { workspace = true } -rstest = { workspace = true } tempfile = { workspace = true } [lints] diff --git a/crates/tee-launcher/src/constants.rs b/crates/tee-launcher/src/constants.rs index af89e71b3..a158586d1 100644 --- a/crates/tee-launcher/src/constants.rs +++ b/crates/tee-launcher/src/constants.rs @@ -2,3 +2,6 @@ pub(crate) const MPC_CONTAINER_NAME: &str = "mpc-node"; pub(crate) const IMAGE_DIGEST_FILE: &str = "/mnt/shared/image-digest.bin"; pub(crate) const DSTACK_UNIX_SOCKET: &str = "/var/run/dstack.sock"; pub(crate) const DSTACK_USER_CONFIG_FILE: &str = "/tapp/user_config"; + +/// Path inside the container where the MPC config file is bind-mounted. +pub(crate) const MPC_CONFIG_CONTAINER_PATH: &str = "/mnt/shared/mpc-config.json"; diff --git a/crates/tee-launcher/src/env_validation.rs b/crates/tee-launcher/src/env_validation.rs deleted file mode 100644 index 96a3af3cb..000000000 --- a/crates/tee-launcher/src/env_validation.rs +++ /dev/null @@ -1,183 +0,0 @@ -use std::sync::LazyLock; - -use regex::Regex; - -/// Hard caps to prevent DoS via huge env payloads (matching Python launcher). -pub(crate) const MAX_PASSTHROUGH_ENV_VARS: usize = 64; -pub(crate) const MAX_ENV_VALUE_LEN: usize = 1024; -pub(crate) const MAX_TOTAL_ENV_BYTES: usize = 32 * 1024; // 32 KB - -/// Never pass raw private keys via launcher. -const DENIED_CONTAINER_ENV_KEYS: &[&str] = &["MPC_P2P_PRIVATE_KEY", "MPC_ACCOUNT_SK"]; - -/// Matches `MPC_[A-Z0-9_]{1,64}` — same pattern as the Python launcher. -static MPC_ENV_KEY_RE: LazyLock = - LazyLock::new(|| Regex::new(r"^MPC_[A-Z0-9_]{1,64}$").unwrap()); - -/// Non-MPC keys that are explicitly allowed for backwards compatibility. -const COMPAT_ALLOWED_KEYS: &[&str] = &["RUST_LOG", "RUST_BACKTRACE", "NEAR_BOOT_NODES"]; - -// --------------------------------------------------------------------------- -// Key validation -// --------------------------------------------------------------------------- - -/// Validates an extra env key (from the catch-all `extra_env` map). -/// -/// - Must match `MPC_[A-Z0-9_]{1,64}` **or** be in the compat allowlist -/// - Must not be in the deny list -pub(crate) fn validate_env_key(key: &str) -> Result<(), crate::error::LauncherError> { - if DENIED_CONTAINER_ENV_KEYS.contains(&key) { - return Err(crate::error::LauncherError::UnsafeEnvValue { - key: key.to_owned(), - reason: "denied key".into(), - }); - } - if MPC_ENV_KEY_RE.is_match(key) || COMPAT_ALLOWED_KEYS.contains(&key) { - return Ok(()); - } - Err(crate::error::LauncherError::UnsafeEnvValue { - key: key.to_owned(), - reason: "key does not match allowlist".into(), - }) -} - -// --------------------------------------------------------------------------- -// Value validation -// --------------------------------------------------------------------------- - -fn has_control_chars(s: &str) -> bool { - for ch in s.chars() { - if ch == '\n' || ch == '\r' || ch == '\0' { - return true; - } - if (ch as u32) < 0x20 && ch != '\t' { - return true; - } - } - false -} - -/// Validates an env value (applied to ALL vars, typed and extra). -/// -/// - Length <= `MAX_ENV_VALUE_LEN` -/// - No ASCII control characters (except tab) -/// - Does not contain `LD_PRELOAD` -pub(crate) fn validate_env_value( - key: &str, - value: &str, -) -> Result<(), crate::error::LauncherError> { - if value.len() > MAX_ENV_VALUE_LEN { - return Err(crate::error::LauncherError::UnsafeEnvValue { - key: key.to_owned(), - reason: format!("value too long ({} > {MAX_ENV_VALUE_LEN})", value.len()), - }); - } - if has_control_chars(value) { - return Err(crate::error::LauncherError::UnsafeEnvValue { - key: key.to_owned(), - reason: "contains control characters".into(), - }); - } - if value.contains("LD_PRELOAD") { - return Err(crate::error::LauncherError::UnsafeEnvValue { - key: key.to_owned(), - reason: "contains LD_PRELOAD".into(), - }); - } - Ok(()) -} - -#[cfg(test)] -mod tests { - use assert_matches::assert_matches; - use rstest::rstest; - - use super::*; - - #[rstest] - #[case("MPC_FOO")] - #[case("MPC_FOO_123")] - #[case("MPC_A_B_C")] - fn key_allows_mpc_prefix_uppercase(#[case] key: &str) { - assert_matches!(validate_env_key(key), Ok(_)); - } - - #[rstest] - #[case("MPC_foo")] - #[case("MPC-FOO")] - #[case("MPC.FOO")] - #[case("MPC_")] - fn key_rejects_lowercase_or_invalid_format(#[case] key: &str) { - assert_matches!(validate_env_key(key), Err(_)); - } - - #[rstest] - #[case("RUST_LOG")] - #[case("RUST_BACKTRACE")] - #[case("NEAR_BOOT_NODES")] - fn key_allows_compat_non_mpc_keys(#[case] key: &str) { - assert_matches!(validate_env_key(key), Ok(_)); - } - - #[rstest] - #[case("MPC_P2P_PRIVATE_KEY")] - #[case("MPC_ACCOUNT_SK")] - fn key_denies_sensitive_keys(#[case] key: &str) { - assert_matches!(validate_env_key(key), Err(_)); - } - - #[rstest] - #[case("BAD_KEY")] - #[case("HOME")] - fn key_rejects_unknown_non_mpc_key(#[case] key: &str) { - assert_matches!(validate_env_key(key), Err(_)); - } - - #[rstest] - #[case("ok\nno")] - #[case("ok\rno")] - fn value_rejects_control_chars(#[case] value: &str) { - assert_matches!(validate_env_value("K", value), Err(_)); - } - - #[test] - fn value_rejects_control_char_unit_separator() { - assert_matches!(validate_env_value("K", &format!("a{}b", '\x1F')), Err(_)); - } - - #[test] - fn value_allows_tab() { - assert_matches!(validate_env_value("K", "a\tb"), Ok(_)); - } - - #[rstest] - #[case("LD_PRELOAD=/tmp/x.so")] - #[case("foo LD_PRELOAD bar")] - fn value_rejects_ld_preload(#[case] value: &str) { - assert_matches!(validate_env_value("K", value), Err(_)); - } - - #[test] - fn value_rejects_too_long() { - assert_matches!( - validate_env_value("K", &"a".repeat(MAX_ENV_VALUE_LEN + 1)), - Err(_) - ); - } - - #[test] - fn value_accepts_at_length_limit() { - assert_matches!( - validate_env_value("K", &"a".repeat(MAX_ENV_VALUE_LEN)), - Ok(_) - ); - } - - #[rstest] - #[case("hello-world")] - #[case("192.168.1.1")] - #[case("info,mpc_node=debug")] - fn value_accepts_normal(#[case] value: &str) { - assert_matches!(validate_env_value("K", value), Ok(_)); - } -} diff --git a/crates/tee-launcher/src/error.rs b/crates/tee-launcher/src/error.rs index 812f87509..02a45a3ff 100644 --- a/crates/tee-launcher/src/error.rs +++ b/crates/tee-launcher/src/error.rs @@ -27,18 +27,6 @@ pub enum LauncherError { output: String, }, - #[error("Too many env vars to pass through (>{0})")] - TooManyEnvVars(usize), - - #[error("Total env payload too large (>{0} bytes)")] - EnvPayloadTooLarge(usize), - - #[error("Env var '{key}' has unsafe value: {reason}")] - UnsafeEnvValue { key: String, reason: String }, - - #[error("Unsafe docker command: LD_PRELOAD detected")] - LdPreloadDetected, - #[error("Failed to read {path}: {source}")] FileRead { path: String, diff --git a/crates/tee-launcher/src/main.rs b/crates/tee-launcher/src/main.rs index e11043af8..5b5cb480a 100644 --- a/crates/tee-launcher/src/main.rs +++ b/crates/tee-launcher/src/main.rs @@ -17,7 +17,6 @@ use url::Url; mod constants; mod docker_types; -mod env_validation; mod error; mod types; @@ -120,7 +119,7 @@ async fn run() -> Result<(), LauncherError> { launch_mpc_container( args.platform, &image_hash, - &dstack_config.mpc_passthrough_env, + &dstack_config.mpc_config_file, &dstack_config.docker_command_config, )?; @@ -355,22 +354,12 @@ async fn validate_image_hash( fn docker_run_args( platform: Platform, - mpc_config: &MpcBinaryConfig, + mpc_config_file: &std::path::Path, docker_flags: &DockerLaunchFlags, image_digest: &DockerSha256Digest, -) -> Result, LauncherError> { +) -> Vec { let mut cmd: Vec = vec![]; - // Required environment variables - cmd.extend([ - "--env".into(), - format!("MPC_IMAGE_HASH={}", image_digest.as_raw_hex()), - ]); - cmd.extend([ - "--env".into(), - format!("MPC_LATEST_ALLOWED_HASH_FILE={IMAGE_DIGEST_FILE}"), - ]); - if platform == Platform::Tee { cmd.extend([ "--env".into(), @@ -382,9 +371,12 @@ fn docker_run_args( ]); } - for (key, value) in mpc_config.env_vars()? { - cmd.extend(["--env".into(), format!("{key}={value}")]); - } + // Mount the MPC config file into the container (read-only) + let host_path = mpc_config_file.display(); + cmd.extend([ + "-v".into(), + format!("{host_path}:{MPC_CONFIG_CONTAINER_PATH}:ro"), + ]); cmd.extend(docker_flags.extra_hosts.docker_args()); cmd.extend(docker_flags.port_mappings.docker_args()); @@ -403,23 +395,21 @@ fn docker_run_args( MPC_CONTAINER_NAME.into(), "--detach".into(), image_digest.to_string(), + // Command for the MPC binary: read config from file + "start-with-config-file".into(), + MPC_CONFIG_CONTAINER_PATH.into(), ]); let docker_command_string = cmd.join(" "); tracing::info!(?docker_command_string, "docker cmd"); - // Final LD_PRELOAD safeguard - if docker_command_string.contains("LD_PRELOAD") { - return Err(LauncherError::LdPreloadDetected); - } - - Ok(cmd) + cmd } fn launch_mpc_container( platform: Platform, valid_hash: &DockerSha256Digest, - mpc_config: &MpcBinaryConfig, + mpc_config_file: &std::path::Path, docker_flags: &DockerLaunchFlags, ) -> Result<(), LauncherError> { tracing::info!("Launching MPC node with validated hash: {valid_hash}",); @@ -429,7 +419,7 @@ fn launch_mpc_container( .args(["rm", "-f", MPC_CONTAINER_NAME]) .output(); - let docker_run_args = docker_run_args(platform, mpc_config, docker_flags, valid_hash)?; + let docker_run_args = docker_run_args(platform, mpc_config_file, docker_flags, valid_hash); let run_output = Command::new("docker") .arg("run") @@ -456,7 +446,7 @@ fn launch_mpc_container( #[cfg(test)] mod tests { - use std::collections::BTreeMap; + use std::path::Path; use assert_matches::assert_matches; use bounded_collections::NonEmptyVec; @@ -468,6 +458,8 @@ mod tests { use crate::select_image_hash; use crate::types::*; + const SAMPLE_CONFIG_PATH: &str = "/tapp/mpc-config.json"; + fn digest(hex_char: char) -> DockerSha256Digest { format!( "sha256:{}", @@ -487,23 +479,6 @@ mod tests { } } - fn base_mpc_config() -> MpcBinaryConfig { - MpcBinaryConfig { - mpc_account_id: "test-account".into(), - mpc_local_address: "127.0.0.1".parse().unwrap(), - mpc_secret_key_store: "secret".into(), - mpc_backup_encryption_key_hex: "0".repeat(64), - mpc_env: MpcEnv::Testnet, - mpc_home_dir: "/data".into(), - mpc_contract_id: "contract.near".into(), - mpc_responder_id: "responder-1".into(), - near_boot_nodes: "boot1,boot2".into(), - rust_backtrace: RustBacktrace::Enabled, - rust_log: RustLog::Level(RustLogLevel::Info), - extra_env: BTreeMap::new(), - } - } - fn empty_docker_flags() -> DockerLaunchFlags { serde_json::from_value(serde_json::json!({ "extra_hosts": {"hosts": []}, @@ -523,12 +498,11 @@ mod tests { #[test] fn tee_mode_includes_dstack_mount() { // given - let config = base_mpc_config(); let flags = empty_docker_flags(); let digest = sample_digest(); // when - let args = docker_run_args(Platform::Tee, &config, &flags, &digest).unwrap(); + let args = docker_run_args(Platform::Tee, Path::new(SAMPLE_CONFIG_PATH), &flags, &digest); // then let joined = args.join(" "); @@ -539,12 +513,12 @@ mod tests { #[test] fn nontee_mode_excludes_dstack_mount() { // given - let config = base_mpc_config(); let flags = empty_docker_flags(); let digest = sample_digest(); // when - let args = docker_run_args(Platform::NonTee, &config, &flags, &digest).unwrap(); + let args = + docker_run_args(Platform::NonTee, Path::new(SAMPLE_CONFIG_PATH), &flags, &digest); // then let joined = args.join(" "); @@ -555,12 +529,12 @@ mod tests { #[test] fn includes_security_opts_and_required_volumes() { // given - let config = base_mpc_config(); let flags = empty_docker_flags(); let digest = sample_digest(); // when - let args = docker_run_args(Platform::NonTee, &config, &flags, &digest).unwrap(); + let args = + docker_run_args(Platform::NonTee, Path::new(SAMPLE_CONFIG_PATH), &flags, &digest); // then let joined = args.join(" "); @@ -573,66 +547,94 @@ mod tests { } #[test] - fn image_digest_is_last_argument() { + fn mounts_config_file_read_only() { // given - let config = base_mpc_config(); let flags = empty_docker_flags(); let digest = sample_digest(); // when - let args = docker_run_args(Platform::NonTee, &config, &flags, &digest).unwrap(); + let args = + docker_run_args(Platform::NonTee, Path::new(SAMPLE_CONFIG_PATH), &flags, &digest); // then - assert_eq!(args.last().unwrap(), &digest.to_string()); + let joined = args.join(" "); + assert!(joined.contains(&format!( + "{SAMPLE_CONFIG_PATH}:{MPC_CONFIG_CONTAINER_PATH}:ro" + ))); } #[test] - fn includes_ports_and_extra_hosts() { + fn includes_start_with_config_file_command() { // given - let config = base_mpc_config(); - let flags = docker_flags_with_host_and_port(); + let flags = empty_docker_flags(); let digest = sample_digest(); // when - let args = docker_run_args(Platform::NonTee, &config, &flags, &digest).unwrap(); + let args = + docker_run_args(Platform::NonTee, Path::new(SAMPLE_CONFIG_PATH), &flags, &digest); // then let joined = args.join(" "); - assert!(joined.contains("--add-host node1:192.168.1.1")); - assert!(joined.contains("-p 11780:11780")); + assert!(joined.contains(&format!( + "start-with-config-file {MPC_CONFIG_CONTAINER_PATH}" + ))); } #[test] - fn includes_mpc_env_vars() { + fn image_digest_appears_before_command() { // given - let config = base_mpc_config(); let flags = empty_docker_flags(); let digest = sample_digest(); // when - let args = docker_run_args(Platform::NonTee, &config, &flags, &digest).unwrap(); + let args = + docker_run_args(Platform::NonTee, Path::new(SAMPLE_CONFIG_PATH), &flags, &digest); + + // then - image digest should appear before "start-with-config-file" + let digest_pos = args.iter().position(|a| a == &digest.to_string()).unwrap(); + let cmd_pos = args + .iter() + .position(|a| a == "start-with-config-file") + .unwrap(); + assert!(digest_pos < cmd_pos); + } + + #[test] + fn includes_ports_and_extra_hosts() { + // given + let flags = docker_flags_with_host_and_port(); + let digest = sample_digest(); + + // when + let args = + docker_run_args(Platform::NonTee, Path::new(SAMPLE_CONFIG_PATH), &flags, &digest); // then let joined = args.join(" "); - assert!(joined.contains("MPC_ACCOUNT_ID=test-account")); - assert!(joined.contains("MPC_IMAGE_HASH=")); - assert!(joined.contains(&format!("MPC_LATEST_ALLOWED_HASH_FILE={IMAGE_DIGEST_FILE}"))); + assert!(joined.contains("--add-host node1:192.168.1.1")); + assert!(joined.contains("-p 11780:11780")); } #[test] - fn ld_preload_in_typed_field_is_rejected_by_env_validation() { - // given - typed fields are also validated by env_validation::validate_env_value, - // so LD_PRELOAD in any env value is caught before the final safeguard. - let mut config = base_mpc_config(); - config.mpc_account_id = "LD_PRELOAD=/evil.so".into(); + fn no_env_vars_forwarded_for_mpc_config() { + // given let flags = empty_docker_flags(); let digest = sample_digest(); // when - let result = docker_run_args(Platform::NonTee, &config, &flags, &digest); - - // then - assert_matches!(result, Err(LauncherError::UnsafeEnvValue { .. })); + let args = + docker_run_args(Platform::NonTee, Path::new(SAMPLE_CONFIG_PATH), &flags, &digest); + + // then - no MPC_* env vars should be present (only DSTACK_ENDPOINT in TEE mode) + let env_args: Vec<&String> = args + .windows(2) + .filter(|w| w[0] == "--env") + .map(|w| &w[1]) + .collect(); + assert!( + env_args.is_empty(), + "expected no --env args in non-TEE mode, got: {env_args:?}" + ); } // --- select_image_hash --- diff --git a/crates/tee-launcher/src/types.rs b/crates/tee-launcher/src/types.rs index 6017d6c23..4d7c6b911 100644 --- a/crates/tee-launcher/src/types.rs +++ b/crates/tee-launcher/src/types.rs @@ -1,6 +1,4 @@ -use std::collections::BTreeMap; -use std::fmt; -use std::net::{IpAddr, Ipv4Addr}; +use std::net::Ipv4Addr; use std::num::NonZeroU16; use std::path::PathBuf; @@ -11,8 +9,6 @@ use bounded_collections::NonEmptyVec; use clap::{Parser, ValueEnum}; use serde::{Deserialize, Serialize}; -use crate::env_validation; - /// CLI arguments parsed from environment variables via clap. #[derive(Parser, Debug)] #[command(name = "tee-launcher")] @@ -45,15 +41,14 @@ pub enum Platform { } /// Typed representation of the dstack user config file (`/tapp/user_config`). -/// -/// Launcher-only keys are extracted into typed fields; all remaining keys are -/// kept in `passthrough_env` for forwarding to the MPC container. #[derive(Debug, Clone, Serialize, Deserialize)] pub struct Config { pub launcher_config: LauncherConfig, pub docker_command_config: DockerLaunchFlags, - /// Remaining env vars forwarded to the MPC container. - pub mpc_passthrough_env: MpcBinaryConfig, + /// Path to the MPC node JSON config file on the host. + /// This file is mounted into the container and passed via + /// `start-with-config-file ` to the MPC binary. + pub mpc_config_file: PathBuf, } #[derive(Debug, Clone, Serialize, Deserialize)] @@ -74,34 +69,6 @@ pub struct LauncherConfig { pub mpc_hash_override: Option, } -#[derive(Debug, Clone, Serialize, Deserialize)] -pub struct MpcBinaryConfig { - // mpc - // TODO: use near type to not accept any string - pub mpc_account_id: String, - pub mpc_local_address: IpAddr, - // TODO: think this is no longer needed with node generated keys - pub mpc_secret_key_store: String, - // TODO: think this is no longer needed with node generated keys - pub mpc_backup_encryption_key_hex: String, - pub mpc_env: MpcEnv, - pub mpc_home_dir: PathBuf, - // TODO: use near type to not accept any string - pub mpc_contract_id: String, - // TODO: use near type to not accept any string - pub mpc_responder_id: String, - // near - pub near_boot_nodes: String, - // rust - pub rust_backtrace: RustBacktrace, - pub rust_log: RustLog, - /// Additional env vars not covered by the typed fields above. - /// Allows operators to pass new `MPC_*` vars without a launcher rebuild. - /// Keys and values are validated at emission time in `env_vars()`. - #[serde(flatten)] - pub extra_env: BTreeMap, -} - #[derive(Debug, Clone, Serialize, Deserialize)] pub struct DockerLaunchFlags { pub extra_hosts: ExtraHosts, @@ -153,184 +120,14 @@ impl PortMappings { } } -#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)] -pub enum MpcEnv { - Localnet, - Testnet, - Mainnet, -} - -impl fmt::Display for MpcEnv { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - match self { - MpcEnv::Localnet => write!(f, "localnet"), - MpcEnv::Testnet => write!(f, "testnet"), - MpcEnv::Mainnet => write!(f, "mainnet"), - } - } -} - -#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)] -pub enum RustBacktrace { - #[serde(rename = "0")] - Disabled, - #[serde(rename = "1")] - Enabled, - #[serde(rename = "short")] - Short, - #[serde(rename = "full")] - Full, -} - -impl fmt::Display for RustBacktrace { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - match self { - RustBacktrace::Disabled => write!(f, "0"), - RustBacktrace::Enabled => write!(f, "1"), - RustBacktrace::Short => write!(f, "short"), - RustBacktrace::Full => write!(f, "full"), - } - } -} - -#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)] -#[serde(rename_all = "lowercase")] -pub enum RustLogLevel { - Error, - Warn, - Info, - Debug, - Trace, -} - -impl fmt::Display for RustLogLevel { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - match self { - RustLogLevel::Error => write!(f, "error"), - RustLogLevel::Warn => write!(f, "warn"), - RustLogLevel::Info => write!(f, "info"), - RustLogLevel::Debug => write!(f, "debug"), - RustLogLevel::Trace => write!(f, "trace"), - } - } -} - -#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)] -#[serde(untagged)] -pub enum RustLog { - Level(RustLogLevel), - Filter(String), -} - -impl fmt::Display for RustLog { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - match self { - RustLog::Level(level) => level.fmt(f), - RustLog::Filter(filter) => write!(f, "{filter}"), - } - } -} - -impl MpcBinaryConfig { - /// Returns all env vars to pass to the MPC container. - /// - /// Typed fields are emitted first (deterministic order), followed by - /// validated extras from `extra_env`. All keys and values are validated - /// uniformly before returning. - #[cfg(test)] - pub(crate) fn with_extra_env(mut self, extra: std::collections::BTreeMap) -> Self { - self.extra_env = extra; - self - } - - pub fn env_vars(&self) -> Result, crate::error::LauncherError> { - let mut vars: Vec<(String, String)> = vec![ - ("MPC_ACCOUNT_ID".into(), self.mpc_account_id.clone()), - ( - "MPC_LOCAL_ADDRESS".into(), - self.mpc_local_address.to_string(), - ), - ( - "MPC_SECRET_STORE_KEY".into(), - self.mpc_secret_key_store.clone(), - ), - ("MPC_CONTRACT_ID".into(), self.mpc_contract_id.clone()), - ("MPC_ENV".into(), self.mpc_env.to_string()), - ( - "MPC_HOME_DIR".into(), - self.mpc_home_dir.display().to_string(), - ), - ("MPC_RESPONDER_ID".into(), self.mpc_responder_id.clone()), - ( - "MPC_BACKUP_ENCRYPTION_KEY_HEX".into(), - self.mpc_backup_encryption_key_hex.clone(), - ), - ("NEAR_BOOT_NODES".into(), self.near_boot_nodes.clone()), - ("RUST_BACKTRACE".into(), self.rust_backtrace.to_string()), - ("RUST_LOG".into(), self.rust_log.to_string()), - ]; - - // Keys already emitted via typed fields — skip duplicates from extra_env. - let typed_keys: std::collections::HashSet = - vars.iter().map(|(k, _)| k.clone()).collect(); - - if self.extra_env.len() > env_validation::MAX_PASSTHROUGH_ENV_VARS { - return Err(crate::error::LauncherError::TooManyEnvVars( - env_validation::MAX_PASSTHROUGH_ENV_VARS, - )); - } - - // BTreeMap iteration is sorted, giving deterministic output. - for (key, value) in &self.extra_env { - if typed_keys.contains(key.as_str()) { - continue; - } - env_validation::validate_env_key(key)?; - vars.push((key.clone(), value.clone())); - } - - // Validate ALL env vars uniformly (typed + extra) and enforce aggregate caps. - let mut total_bytes: usize = 0; - for (key, value) in &vars { - env_validation::validate_env_value(key, value)?; - total_bytes += key.len() + 1 + value.len(); - } - if total_bytes > env_validation::MAX_TOTAL_ENV_BYTES { - return Err(crate::error::LauncherError::EnvPayloadTooLarge( - env_validation::MAX_TOTAL_ENV_BYTES, - )); - } - - Ok(vars) - } -} - #[cfg(test)] mod tests { use assert_matches::assert_matches; - use std::collections::BTreeMap; use std::net::Ipv4Addr; use std::num::NonZeroU16; use super::*; - fn base_mpc_config() -> MpcBinaryConfig { - MpcBinaryConfig { - mpc_account_id: "test-account".into(), - mpc_local_address: "127.0.0.1".parse().unwrap(), - mpc_secret_key_store: "secret".into(), - mpc_backup_encryption_key_hex: "0".repeat(64), - mpc_env: MpcEnv::Testnet, - mpc_home_dir: "/data".into(), - mpc_contract_id: "contract.near".into(), - mpc_responder_id: "responder-1".into(), - near_boot_nodes: "boot1,boot2".into(), - rust_backtrace: RustBacktrace::Enabled, - rust_log: RustLog::Level(RustLogLevel::Info), - extra_env: BTreeMap::new(), - } - } - // --- HostEntry deserialization --- #[test] @@ -471,170 +268,6 @@ mod tests { assert_eq!(args, vec!["-p", "11780:11780"]); } - // --- MpcBinaryConfig::env_vars --- - - #[test] - fn env_vars_includes_all_typed_fields() { - // given - let config = base_mpc_config(); - - // when - let vars = config.env_vars().unwrap(); - - // then - let keys: Vec<&str> = vars.iter().map(|(k, _)| k.as_str()).collect(); - assert!(keys.contains(&"MPC_ACCOUNT_ID")); - assert!(keys.contains(&"MPC_LOCAL_ADDRESS")); - assert!(keys.contains(&"MPC_SECRET_STORE_KEY")); - assert!(keys.contains(&"MPC_CONTRACT_ID")); - assert!(keys.contains(&"MPC_ENV")); - assert!(keys.contains(&"MPC_HOME_DIR")); - assert!(keys.contains(&"MPC_RESPONDER_ID")); - assert!(keys.contains(&"MPC_BACKUP_ENCRYPTION_KEY_HEX")); - assert!(keys.contains(&"NEAR_BOOT_NODES")); - assert!(keys.contains(&"RUST_BACKTRACE")); - assert!(keys.contains(&"RUST_LOG")); - } - - #[test] - fn env_vars_passes_valid_extra_mpc_key() { - // given - let mut extra = BTreeMap::new(); - extra.insert("MPC_NEW_FEATURE".into(), "enabled".into()); - let config = base_mpc_config().with_extra_env(extra); - - // when - let vars = config.env_vars().unwrap(); - - // then - assert!(vars.iter().any(|(k, v)| k == "MPC_NEW_FEATURE" && v == "enabled")); - } - - #[test] - fn env_vars_deduplicates_typed_key_from_extra() { - // given - let mut extra = BTreeMap::new(); - extra.insert("MPC_ACCOUNT_ID".into(), "duplicate".into()); - let config = base_mpc_config().with_extra_env(extra); - - // when - let vars = config.env_vars().unwrap(); - - // then - let account_values: Vec<&str> = vars - .iter() - .filter(|(k, _)| k == "MPC_ACCOUNT_ID") - .map(|(_, v)| v.as_str()) - .collect(); - assert_eq!(account_values.len(), 1); - assert_eq!(account_values[0], "test-account"); - } - - #[test] - fn env_vars_rejects_sensitive_key_in_extra() { - // given - let mut extra = BTreeMap::new(); - extra.insert("MPC_P2P_PRIVATE_KEY".into(), "secret".into()); - let config = base_mpc_config().with_extra_env(extra); - - // when - let result = config.env_vars(); - - // then - assert_matches!(result, Err(crate::error::LauncherError::UnsafeEnvValue { .. })); - } - - #[test] - fn env_vars_rejects_account_sk_in_extra() { - // given - let mut extra = BTreeMap::new(); - extra.insert("MPC_ACCOUNT_SK".into(), "secret".into()); - let config = base_mpc_config().with_extra_env(extra); - - // when - let result = config.env_vars(); - - // then - assert_matches!(result, Err(crate::error::LauncherError::UnsafeEnvValue { .. })); - } - - #[test] - fn env_vars_rejects_value_with_newline() { - // given - let mut extra = BTreeMap::new(); - extra.insert("MPC_INJECTED".into(), "ok\nbad".into()); - let config = base_mpc_config().with_extra_env(extra); - - // when - let result = config.env_vars(); - - // then - assert_matches!(result, Err(crate::error::LauncherError::UnsafeEnvValue { .. })); - } - - #[test] - fn env_vars_rejects_value_containing_ld_preload() { - // given - let mut extra = BTreeMap::new(); - extra.insert("MPC_INJECTED".into(), "LD_PRELOAD=/tmp/x.so".into()); - let config = base_mpc_config().with_extra_env(extra); - - // when - let result = config.env_vars(); - - // then - assert_matches!(result, Err(crate::error::LauncherError::UnsafeEnvValue { .. })); - } - - #[test] - fn env_vars_rejects_too_many_extra_vars() { - // given - let mut extra = BTreeMap::new(); - for i in 0..=crate::env_validation::MAX_PASSTHROUGH_ENV_VARS { - extra.insert(format!("MPC_X_{i}"), "1".into()); - } - let config = base_mpc_config().with_extra_env(extra); - - // when - let result = config.env_vars(); - - // then - assert_matches!(result, Err(crate::error::LauncherError::TooManyEnvVars(_))); - } - - #[test] - fn env_vars_rejects_total_bytes_exceeded() { - // given - let mut extra = BTreeMap::new(); - for i in 0..40 { - extra.insert( - format!("MPC_BIG_{i}"), - "a".repeat(crate::env_validation::MAX_ENV_VALUE_LEN), - ); - } - let config = base_mpc_config().with_extra_env(extra); - - // when - let result = config.env_vars(); - - // then - assert_matches!(result, Err(crate::error::LauncherError::EnvPayloadTooLarge(_))); - } - - #[test] - fn env_vars_rejects_unknown_non_mpc_key() { - // given - let mut extra = BTreeMap::new(); - extra.insert("BAD_KEY".into(), "value".into()); - let config = base_mpc_config().with_extra_env(extra); - - // when - let result = config.env_vars(); - - // then - assert_matches!(result, Err(crate::error::LauncherError::UnsafeEnvValue { .. })); - } - // --- Config full deserialization --- #[test] @@ -654,19 +287,7 @@ mod tests { "extra_hosts": {"hosts": [{"hostname": {"Domain": "node1"}, "ip": "192.168.1.1"}]}, "port_mappings": {"ports": [{"src": 11780, "dst": 11780}]} }, - "mpc_passthrough_env": { - "mpc_account_id": "account123", - "mpc_local_address": "127.0.0.1", - "mpc_secret_key_store": "secret", - "mpc_backup_encryption_key_hex": "0000000000000000000000000000000000000000000000000000000000000000", - "mpc_env": "Testnet", - "mpc_home_dir": "/data", - "mpc_contract_id": "contract.near", - "mpc_responder_id": "responder-1", - "near_boot_nodes": "boot1", - "rust_backtrace": "1", - "rust_log": "info" - } + "mpc_config_file": "/tapp/mpc-config.json" }); // when @@ -674,14 +295,14 @@ mod tests { // then assert_matches!(result, Ok(config) => { - assert_eq!(config.mpc_passthrough_env.mpc_account_id, "account123"); assert_eq!(config.launcher_config.image_name, "nearone/mpc-node"); + assert_eq!(config.mpc_config_file, PathBuf::from("/tapp/mpc-config.json")); }); } #[test] fn config_rejects_missing_required_field() { - // given - mpc_account_id is missing + // given - mpc_config_file is missing let json = serde_json::json!({ "launcher_config": { "image_tags": ["tag1"], @@ -695,18 +316,6 @@ mod tests { "docker_command_config": { "extra_hosts": {"hosts": []}, "port_mappings": {"ports": []} - }, - "mpc_passthrough_env": { - "mpc_local_address": "127.0.0.1", - "mpc_secret_key_store": "secret", - "mpc_backup_encryption_key_hex": "0000000000000000000000000000000000000000000000000000000000000000", - "mpc_env": "Testnet", - "mpc_home_dir": "/data", - "mpc_contract_id": "contract.near", - "mpc_responder_id": "responder-1", - "near_boot_nodes": "boot1", - "rust_backtrace": "1", - "rust_log": "info" } }); diff --git a/libs/nearcore b/libs/nearcore index 8a8c21bc8..3def2f7eb 160000 --- a/libs/nearcore +++ b/libs/nearcore @@ -1 +1 @@ -Subproject commit 8a8c21bc81999af93edd1b6bca5b7c6c6337aa63 +Subproject commit 3def2f7ebb7455199e7b3f7b371e3735c23e2930 From 3812aa4d634f32ee6156e6f3ad394bea45fde23f Mon Sep 17 00:00:00 2001 From: Daniel Sharifi Date: Fri, 6 Mar 2026 17:38:43 +0100 Subject: [PATCH 61/82] fmt --- crates/tee-launcher/src/main.rs | 63 ++++++++++++++++++++++++-------- crates/tee-launcher/src/types.rs | 6 +-- 2 files changed, 50 insertions(+), 19 deletions(-) diff --git a/crates/tee-launcher/src/main.rs b/crates/tee-launcher/src/main.rs index 5b5cb480a..757000c77 100644 --- a/crates/tee-launcher/src/main.rs +++ b/crates/tee-launcher/src/main.rs @@ -502,7 +502,12 @@ mod tests { let digest = sample_digest(); // when - let args = docker_run_args(Platform::Tee, Path::new(SAMPLE_CONFIG_PATH), &flags, &digest); + let args = docker_run_args( + Platform::Tee, + Path::new(SAMPLE_CONFIG_PATH), + &flags, + &digest, + ); // then let joined = args.join(" "); @@ -517,8 +522,12 @@ mod tests { let digest = sample_digest(); // when - let args = - docker_run_args(Platform::NonTee, Path::new(SAMPLE_CONFIG_PATH), &flags, &digest); + let args = docker_run_args( + Platform::NonTee, + Path::new(SAMPLE_CONFIG_PATH), + &flags, + &digest, + ); // then let joined = args.join(" "); @@ -533,8 +542,12 @@ mod tests { let digest = sample_digest(); // when - let args = - docker_run_args(Platform::NonTee, Path::new(SAMPLE_CONFIG_PATH), &flags, &digest); + let args = docker_run_args( + Platform::NonTee, + Path::new(SAMPLE_CONFIG_PATH), + &flags, + &digest, + ); // then let joined = args.join(" "); @@ -553,8 +566,12 @@ mod tests { let digest = sample_digest(); // when - let args = - docker_run_args(Platform::NonTee, Path::new(SAMPLE_CONFIG_PATH), &flags, &digest); + let args = docker_run_args( + Platform::NonTee, + Path::new(SAMPLE_CONFIG_PATH), + &flags, + &digest, + ); // then let joined = args.join(" "); @@ -570,8 +587,12 @@ mod tests { let digest = sample_digest(); // when - let args = - docker_run_args(Platform::NonTee, Path::new(SAMPLE_CONFIG_PATH), &flags, &digest); + let args = docker_run_args( + Platform::NonTee, + Path::new(SAMPLE_CONFIG_PATH), + &flags, + &digest, + ); // then let joined = args.join(" "); @@ -587,8 +608,12 @@ mod tests { let digest = sample_digest(); // when - let args = - docker_run_args(Platform::NonTee, Path::new(SAMPLE_CONFIG_PATH), &flags, &digest); + let args = docker_run_args( + Platform::NonTee, + Path::new(SAMPLE_CONFIG_PATH), + &flags, + &digest, + ); // then - image digest should appear before "start-with-config-file" let digest_pos = args.iter().position(|a| a == &digest.to_string()).unwrap(); @@ -606,8 +631,12 @@ mod tests { let digest = sample_digest(); // when - let args = - docker_run_args(Platform::NonTee, Path::new(SAMPLE_CONFIG_PATH), &flags, &digest); + let args = docker_run_args( + Platform::NonTee, + Path::new(SAMPLE_CONFIG_PATH), + &flags, + &digest, + ); // then let joined = args.join(" "); @@ -622,8 +651,12 @@ mod tests { let digest = sample_digest(); // when - let args = - docker_run_args(Platform::NonTee, Path::new(SAMPLE_CONFIG_PATH), &flags, &digest); + let args = docker_run_args( + Platform::NonTee, + Path::new(SAMPLE_CONFIG_PATH), + &flags, + &digest, + ); // then - no MPC_* env vars should be present (only DSTACK_ENDPOINT in TEE mode) let env_args: Vec<&String> = args diff --git a/crates/tee-launcher/src/types.rs b/crates/tee-launcher/src/types.rs index 4d7c6b911..352ed1979 100644 --- a/crates/tee-launcher/src/types.rs +++ b/crates/tee-launcher/src/types.rs @@ -133,8 +133,7 @@ mod tests { #[test] fn host_entry_valid_deserialization() { // given - let json = - serde_json::json!({"hostname": {"Domain": "node.local"}, "ip": "192.168.1.1"}); + let json = serde_json::json!({"hostname": {"Domain": "node.local"}, "ip": "192.168.1.1"}); // when let result = serde_json::from_value::(json); @@ -148,8 +147,7 @@ mod tests { #[test] fn host_entry_rejects_invalid_ip() { // given - let json = - serde_json::json!({"hostname": {"Domain": "node.local"}, "ip": "not-an-ip"}); + let json = serde_json::json!({"hostname": {"Domain": "node.local"}, "ip": "not-an-ip"}); // when let result = serde_json::from_value::(json); From 5fa48e5a1e4cf05687b6e57952d0a6d7ac5aec36 Mon Sep 17 00:00:00 2001 From: Daniel Sharifi Date: Fri, 6 Mar 2026 17:39:39 +0100 Subject: [PATCH 62/82] shear and sort --- Cargo.lock | 1 - Cargo.toml | 3 +-- crates/launcher-interface/Cargo.toml | 3 +-- crates/mpc-attestation/Cargo.toml | 2 +- crates/node/Cargo.toml | 2 +- crates/node/src/tee/allowed_image_hashes_watcher.rs | 2 +- crates/tee-launcher/Cargo.toml | 1 - 7 files changed, 5 insertions(+), 9 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index af0abd50a..50b4e2ec6 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -10604,7 +10604,6 @@ dependencies = [ "reqwest 0.12.28", "serde", "serde_json", - "tempfile", "thiserror 2.0.18", "tokio", "tracing", diff --git a/Cargo.toml b/Cargo.toml index d4a3addde..27e118ab3 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -45,11 +45,11 @@ contract-interface = { path = "crates/contract-interface" } foreign-chain-inspector = { path = "crates/foreign-chain-inspector" } foreign-chain-rpc-interfaces = { path = "crates/foreign-chain-rpc-interfaces" } include-measurements = { path = "crates/include-measurements" } +launcher-interface = { path = "crates/launcher-interface" } mpc-attestation = { path = "crates/mpc-attestation" } mpc-contract = { path = "crates/contract", features = ["dev-utils"] } mpc-crypto-types = { path = "crates/crypto-types" } mpc-node = { path = "crates/node" } -launcher-interface = { path = "crates/launcher-interface" } mpc-primitives = { path = "crates/primitives", features = ["abi"] } mpc-tls = { path = "crates/tls" } near-mpc-sdk = { path = "crates/near-mpc-sdk" } @@ -97,7 +97,6 @@ derive_more = { version = "2.1.1", features = [ "into", ] } digest = "0.10.7" -dotenvy = "0.15" dstack-sdk = { version = "0.1.2" } dstack-sdk-types = { version = "0.1.2", features = ["borsh"] } ecdsa = { version = "0.16.9", features = ["digest", "hazmat"] } diff --git a/crates/launcher-interface/Cargo.toml b/crates/launcher-interface/Cargo.toml index 56f46ec29..b559eed81 100644 --- a/crates/launcher-interface/Cargo.toml +++ b/crates/launcher-interface/Cargo.toml @@ -5,13 +5,12 @@ edition.workspace = true license.workspace = true [dependencies] -derive_more = { workspace = true } bounded-collections = { workspace = true } +derive_more = { workspace = true } mpc-primitives = { workspace = true } serde = { workspace = true } thiserror = { workspace = true } - [dev-dependencies] assert_matches = { workspace = true } insta = { workspace = true } diff --git a/crates/mpc-attestation/Cargo.toml b/crates/mpc-attestation/Cargo.toml index 7af910854..7922a8230 100644 --- a/crates/mpc-attestation/Cargo.toml +++ b/crates/mpc-attestation/Cargo.toml @@ -13,12 +13,12 @@ borsh = { workspace = true } derive_more = { workspace = true } hex = { workspace = true } include-measurements = { workspace = true } +launcher-interface = { workspace = true } mpc-primitives = { workspace = true } serde = { workspace = true } serde_json = { workspace = true } sha2 = { workspace = true } sha3 = { workspace = true } -launcher-interface = { workspace = true } [dev-dependencies] assert_matches = { workspace = true } diff --git a/crates/node/Cargo.toml b/crates/node/Cargo.toml index 0e682ba0d..af793fcd1 100644 --- a/crates/node/Cargo.toml +++ b/crates/node/Cargo.toml @@ -17,7 +17,6 @@ backon = { workspace = true } base64 = { workspace = true } borsh = { workspace = true } bounded-collections = { workspace = true } -launcher-interface = { workspace = true } bs58 = { workspace = true } bytes = { workspace = true } clap = { workspace = true } @@ -35,6 +34,7 @@ humantime = { workspace = true } hyper = { workspace = true } itertools = { workspace = true } k256 = { workspace = true } +launcher-interface = { workspace = true } lru = { workspace = true } mpc-attestation = { workspace = true } mpc-contract = { workspace = true } diff --git a/crates/node/src/tee/allowed_image_hashes_watcher.rs b/crates/node/src/tee/allowed_image_hashes_watcher.rs index 2a5af11ad..de2ba5d53 100644 --- a/crates/node/src/tee/allowed_image_hashes_watcher.rs +++ b/crates/node/src/tee/allowed_image_hashes_watcher.rs @@ -47,7 +47,7 @@ impl AllowedImageHashesStorage for AllowedImageHashesFile { }; let json = serde_json::to_string_pretty(&approved_hashes) - .expect("previous json! macro would also panic. TODO figure out what to return"); + .expect("previous json! macro would also panic. figure out what to return"); tracing::debug!(?approved_hashes, "writing approved hashes to disk"); diff --git a/crates/tee-launcher/Cargo.toml b/crates/tee-launcher/Cargo.toml index da20a9ba4..32a65aad0 100644 --- a/crates/tee-launcher/Cargo.toml +++ b/crates/tee-launcher/Cargo.toml @@ -28,7 +28,6 @@ url = { workspace = true, features = ["serde"] } [dev-dependencies] assert_matches = { workspace = true } -tempfile = { workspace = true } [lints] workspace = true From a5cbac2a1a8688bc729f52a7474f8a27caeac839 Mon Sep 17 00:00:00 2001 From: Daniel Sharifi Date: Fri, 6 Mar 2026 17:41:53 +0100 Subject: [PATCH 63/82] make check all fast pass --- crates/tee-launcher/src/docker_types.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/crates/tee-launcher/src/docker_types.rs b/crates/tee-launcher/src/docker_types.rs index e48780fd3..6c3bf27ce 100644 --- a/crates/tee-launcher/src/docker_types.rs +++ b/crates/tee-launcher/src/docker_types.rs @@ -1,7 +1,7 @@ use launcher_interface::types::DockerSha256Digest; use serde::{Deserialize, Serialize}; -/// Partial response https://auth.docker.io/token +/// Partial response #[derive(Debug, Deserialize, Serialize)] pub struct DockerTokenResponse { pub token: String, From 3138c557bc5bf37f06e4539ea9549e4935f0fcbc Mon Sep 17 00:00:00 2001 From: Daniel Sharifi Date: Fri, 6 Mar 2026 17:46:34 +0100 Subject: [PATCH 64/82] undo nearcore --- libs/nearcore | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/libs/nearcore b/libs/nearcore index 3def2f7eb..8a8c21bc8 160000 --- a/libs/nearcore +++ b/libs/nearcore @@ -1 +1 @@ -Subproject commit 3def2f7ebb7455199e7b3f7b371e3735c23e2930 +Subproject commit 8a8c21bc81999af93edd1b6bca5b7c6c6337aa63 From d8f44a65f2e7f8a38107a0ec9311060dc168dbb4 Mon Sep 17 00:00:00 2001 From: Daniel Sharifi Date: Fri, 6 Mar 2026 17:46:51 +0100 Subject: [PATCH 65/82] undo launcher --- tee_launcher/launcher.py | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/tee_launcher/launcher.py b/tee_launcher/launcher.py index e8beeb9c2..a0dd34958 100644 --- a/tee_launcher/launcher.py +++ b/tee_launcher/launcher.py @@ -147,7 +147,7 @@ class Platform(Enum): ALLOWED_MPC_ENV_VARS = { "MPC_ACCOUNT_ID", # ID of the MPC account on the network "MPC_LOCAL_ADDRESS", # Local IP address or hostname used by the MPC node - "MPC_SECRET_STORE_KEY", # Key used to encrypt/decrypt secrets // Isn't this deprecated?, + "MPC_SECRET_STORE_KEY", # Key used to encrypt/decrypt secrets "MPC_CONTRACT_ID", # Contract ID associated with the MPC node "MPC_ENV", # Environment (e.g., 'testnet', 'mainnet') "MPC_HOME_DIR", # Home directory for the MPC node @@ -242,7 +242,6 @@ def is_safe_port_mapping(mapping: str) -> bool: def remove_existing_container(): - # changed in rust, no point checking current container exists. Just send shutdown signal to MPC_CONTAINER_NAME """Stop and remove the MPC container if it exists.""" try: containers = check_output( @@ -261,7 +260,6 @@ class ImageSpec: image_name: str registry: str - # TODO: This post validation is not covered def __post_init__(self): if not self.tags or not all(is_non_empty_and_cleaned(tag) for tag in self.tags): raise ValueError( From 2ebb35f03eb55178e45169cc5958f2805f7972a2 Mon Sep 17 00:00:00 2001 From: Daniel Sharifi Date: Mon, 9 Mar 2026 13:44:22 +0100 Subject: [PATCH 66/82] oneshot change to toml --- Cargo.lock | 50 +++++++++++++++-- Cargo.toml | 1 + crates/node/Cargo.toml | 1 + crates/node/src/cli.rs | 6 +- crates/node/src/config/start.rs | 8 +-- docs/localnet/localnet.md | 14 ++--- docs/localnet/mpc-config.template.json | 75 ------------------------- docs/localnet/mpc-config.template.toml | 77 ++++++++++++++++++++++++++ libs/nearcore | 2 +- pytest/common_lib/shared/mpc_node.py | 10 ++-- pytest/requirements.txt | 1 + 11 files changed, 146 insertions(+), 99 deletions(-) delete mode 100644 docs/localnet/mpc-config.template.json create mode 100644 docs/localnet/mpc-config.template.toml diff --git a/Cargo.lock b/Cargo.lock index 43161c697..c6466809f 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1640,7 +1640,7 @@ dependencies = [ "serde-untagged", "serde-value", "thiserror 2.0.18", - "toml", + "toml 0.8.23", "unicode-xid", "url", ] @@ -5554,6 +5554,7 @@ dependencies = [ "tokio-rustls", "tokio-stream", "tokio-util", + "toml 1.0.6+spec-1.1.0", "tower", "tracing", "tracing-subscriber", @@ -9950,6 +9951,15 @@ dependencies = [ "serde", ] +[[package]] +name = "serde_spanned" +version = "1.0.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f8bbf91e5a4d6315eee45e704372590b30e260ee83af6639d64557f51b067776" +dependencies = [ + "serde_core", +] + [[package]] name = "serde_urlencoded" version = "0.7.1" @@ -10963,11 +10973,26 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "dc1beb996b9d83529a9e75c17a1686767d148d70663143c7854d8b4a09ced362" dependencies = [ "serde", - "serde_spanned", + "serde_spanned 0.6.9", "toml_datetime 0.6.11", "toml_edit 0.22.27", ] +[[package]] +name = "toml" +version = "1.0.6+spec-1.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "399b1124a3c9e16766831c6bba21e50192572cdd98706ea114f9502509686ffc" +dependencies = [ + "indexmap 2.13.0", + "serde_core", + "serde_spanned 1.0.4", + "toml_datetime 1.0.0+spec-1.1.0", + "toml_parser", + "toml_writer", + "winnow", +] + [[package]] name = "toml_datetime" version = "0.6.11" @@ -10986,6 +11011,15 @@ dependencies = [ "serde_core", ] +[[package]] +name = "toml_datetime" +version = "1.0.0+spec-1.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "32c2555c699578a4f59f0cc68e5116c8d7cabbd45e1409b989d4be085b53f13e" +dependencies = [ + "serde_core", +] + [[package]] name = "toml_edit" version = "0.22.27" @@ -10994,7 +11028,7 @@ checksum = "41fe8c660ae4257887cf66394862d21dbca4a6ddd26f04a3560410406a2f819a" dependencies = [ "indexmap 2.13.0", "serde", - "serde_spanned", + "serde_spanned 0.6.9", "toml_datetime 0.6.11", "toml_write", "winnow", @@ -11014,9 +11048,9 @@ dependencies = [ [[package]] name = "toml_parser" -version = "1.0.7+spec-1.1.0" +version = "1.0.9+spec-1.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "247eaa3197818b831697600aadf81514e577e0cba5eab10f7e064e78ae154df1" +checksum = "702d4415e08923e7e1ef96cd5727c0dfed80b4d2fa25db9647fe5eb6f7c5a4c4" dependencies = [ "winnow", ] @@ -11027,6 +11061,12 @@ version = "0.1.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "5d99f8c9a7727884afe522e9bd5edbfc91a3312b36a77b5fb8926e4c31a41801" +[[package]] +name = "toml_writer" +version = "1.0.6+spec-1.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ab16f14aed21ee8bfd8ec22513f7287cd4a91aa92e44edfe2c17ddd004e92607" + [[package]] name = "tonic" version = "0.13.1" diff --git a/Cargo.toml b/Cargo.toml index 459adf731..81f61d02d 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -166,6 +166,7 @@ rustls = { version = "0.23.36", default-features = false, features = ["std"] } serde = { version = "1.0", features = ["derive"] } serde_bytes = "0.11.19" serde_json = "1.0" +toml = "1.0.6" serde_repr = "0.1.20" serde_with = { version = "3.16.1", features = ["hex"] } serial_test = "3.4.0" diff --git a/crates/node/Cargo.toml b/crates/node/Cargo.toml index 65233197f..1f3a02a20 100644 --- a/crates/node/Cargo.toml +++ b/crates/node/Cargo.toml @@ -59,6 +59,7 @@ rustls = { workspace = true } serde = { workspace = true } serde_json = { workspace = true } serde_with = { workspace = true } +toml = { workspace = true } serde_yaml = { workspace = true } sha3 = { workspace = true } socket2 = { workspace = true } diff --git a/crates/node/src/cli.rs b/crates/node/src/cli.rs index f5529b72b..514691aec 100644 --- a/crates/node/src/cli.rs +++ b/crates/node/src/cli.rs @@ -44,10 +44,10 @@ pub enum LogFormat { #[derive(Subcommand, Debug)] pub enum CliCommand { - /// Starts the MPC node using a single JSON configuration file instead of + /// Starts the MPC node using a single TOML configuration file instead of /// environment variables and CLI flags. StartWithConfigFile { - /// Path to a JSON configuration file containing all settings needed to + /// Path to a TOML configuration file containing all settings needed to /// start the MPC node. config_path: PathBuf, }, @@ -226,7 +226,7 @@ impl Cli { pub async fn run(self) -> anyhow::Result<()> { match self.command { CliCommand::StartWithConfigFile { config_path } => { - let node_configuration = StartConfig::from_json_file(&config_path)?; + let node_configuration = StartConfig::from_toml_file(&config_path)?; run_mpc_node(node_configuration).await } // TODO(#2334): deprecate this diff --git a/crates/node/src/config/start.rs b/crates/node/src/config/start.rs index b2a26e565..dea203f84 100644 --- a/crates/node/src/config/start.rs +++ b/crates/node/src/config/start.rs @@ -10,7 +10,7 @@ use url::Url; /// Configuration for starting the MPC node. This is the canonical type used /// by the run logic. Both `StartCmd` (CLI flags) and `StartWithConfigFileCmd` -/// (JSON file) convert into this type. +/// (TOML file) convert into this type. #[derive(Debug, Clone, Serialize, Deserialize)] pub struct StartConfig { pub home_dir: PathBuf, @@ -74,7 +74,7 @@ pub struct GcpStartConfig { pub project_id: String, } -/// TEE authority configuration for JSON deserialization. +/// TEE authority configuration for deserialization. #[derive(Debug, Clone, Serialize, Deserialize)] #[serde(tag = "type", rename_all = "snake_case")] pub enum TeeAuthorityStartConfig { @@ -114,10 +114,10 @@ impl TeeAuthorityStartConfig { } impl StartConfig { - pub fn from_json_file(path: &std::path::Path) -> anyhow::Result { + pub fn from_toml_file(path: &std::path::Path) -> anyhow::Result { let content = std::fs::read_to_string(path) .with_context(|| format!("failed to read config file: {}", path.display()))?; - let config: Self = serde_json::from_str(&content) + let config: Self = toml::from_str(&content) .with_context(|| format!("failed to parse config file: {}", path.display()))?; config .node diff --git a/docs/localnet/localnet.md b/docs/localnet/localnet.md index 90045af21..14a8d81db 100644 --- a/docs/localnet/localnet.md +++ b/docs/localnet/localnet.md @@ -191,13 +191,13 @@ Since this is not a validator node, we can remove `validator_key.json` rm ~/.near/mpc-frodo/validator_key.json ``` -Next we'll create a JSON configuration file for Frodo's MPC node using the -shared template at `docs/localnet/mpc-config.template.json`. This single file +Next we'll create a TOML configuration file for Frodo's MPC node using the +shared template at `docs/localnet/mpc-config.template.toml`. This single file contains all settings (secrets, TEE config, and node parameters): ```shell env MPC_NODE_ID=mpc-frodo NEAR_ACCOUNT_ID=frodo.test.near WEB_UI_PORT=8081 MIGRATION_WEB_UI_PORT=8079 PPROF_PORT=34001 \ - envsubst < docs/localnet/mpc-config.template.json > ~/.near/mpc-frodo/mpc-config.json + envsubst < docs/localnet/mpc-config.template.toml > ~/.near/mpc-frodo/mpc-config.toml ``` ### Initialize Sam's node @@ -222,19 +222,19 @@ rm ~/.near/mpc-sam/validator_key.json ```shell env MPC_NODE_ID=mpc-sam NEAR_ACCOUNT_ID=sam.test.near WEB_UI_PORT=8082 MIGRATION_WEB_UI_PORT=8078 PPROF_PORT=34002 \ - envsubst < docs/localnet/mpc-config.template.json > ~/.near/mpc-sam/mpc-config.json + envsubst < docs/localnet/mpc-config.template.toml > ~/.near/mpc-sam/mpc-config.toml ``` ### Run the MPC binary -In two separate shells run the MPC binary for Frodo and Sam using their JSON config files: +In two separate shells run the MPC binary for Frodo and Sam using their TOML config files: ```shell -RUST_LOG=info mpc-node start-with-config-file ~/.near/mpc-sam/mpc-config.json +RUST_LOG=info mpc-node start-with-config-file ~/.near/mpc-sam/mpc-config.toml ``` ```shell -RUST_LOG=info mpc-node start-with-config-file ~/.near/mpc-frodo/mpc-config.json +RUST_LOG=info mpc-node start-with-config-file ~/.near/mpc-frodo/mpc-config.toml ``` Notes: diff --git a/docs/localnet/mpc-config.template.json b/docs/localnet/mpc-config.template.json deleted file mode 100644 index 07e15ff27..000000000 --- a/docs/localnet/mpc-config.template.json +++ /dev/null @@ -1,75 +0,0 @@ -{ - "home_dir": "$HOME/.near/$MPC_NODE_ID", - "secrets": { - "secret_store_key_hex": "11111111111111111111111111111111" - }, - "tee": { - "authority": { "type": "local" }, - "image_hash": "8b40f81f77b8c22d6c777a6e14d307a1d11cb55ab83541fbb8575d02d86a74b0", - "latest_allowed_hash_file": "/tmp/LATEST_ALLOWED_HASH_FILE.txt" - }, - "node": { - "my_near_account_id": "$NEAR_ACCOUNT_ID", - "near_responder_account_id": "$NEAR_ACCOUNT_ID", - "number_of_responder_keys": 1, - "web_ui": "127.0.0.1:$WEB_UI_PORT", - "migration_web_ui": "127.0.0.1:$MIGRATION_WEB_UI_PORT", - "pprof_bind_address": "127.0.0.1:$PPROF_PORT", - "triple": { - "concurrency": 2, - "desired_triples_to_buffer": 128, - "timeout_sec": 60, - "parallel_triple_generation_stagger_time_sec": 1 - }, - "presignature": { - "concurrency": 4, - "desired_presignatures_to_buffer": 64, - "timeout_sec": 60 - }, - "signature": { "timeout_sec": 60 }, - "indexer": { - "validate_genesis": false, - "sync_mode": "Latest", - "concurrency": 1, - "mpc_contract_id": "mpc-contract.test.near", - "finality": "optimistic" - }, - "ckd": { "timeout_sec": 60 }, - "cores": 4, - "foreign_chains": { - "bitcoin": { - "timeout_sec": 30, - "max_retries": 3, - "providers": { - "public": { - "api_variant": "esplora", - "rpc_url": "https://bitcoin-rpc.publicnode.com", - "auth": { "kind": "none" } - } - } - }, - "abstract": { - "timeout_sec": 30, - "max_retries": 3, - "providers": { - "public": { - "api_variant": "standard", - "rpc_url": "https://api.testnet.abs.xyz", - "auth": { "kind": "none" } - } - } - }, - "starknet": { - "timeout_sec": 30, - "max_retries": 3, - "providers": { - "public": { - "api_variant": "standard", - "rpc_url": "https://starknet-rpc.publicnode.com", - "auth": { "kind": "none" } - } - } - } - } - } -} diff --git a/docs/localnet/mpc-config.template.toml b/docs/localnet/mpc-config.template.toml new file mode 100644 index 000000000..c9f7d6d4d --- /dev/null +++ b/docs/localnet/mpc-config.template.toml @@ -0,0 +1,77 @@ +home_dir = "$HOME/.near/$MPC_NODE_ID" + +[secrets] +secret_store_key_hex = "11111111111111111111111111111111" + +[tee] +image_hash = "8b40f81f77b8c22d6c777a6e14d307a1d11cb55ab83541fbb8575d02d86a74b0" +latest_allowed_hash_file = "/tmp/LATEST_ALLOWED_HASH_FILE.txt" + +[tee.authority] +type = "local" + +[node] +my_near_account_id = "$NEAR_ACCOUNT_ID" +near_responder_account_id = "$NEAR_ACCOUNT_ID" +number_of_responder_keys = 1 +web_ui = "127.0.0.1:$WEB_UI_PORT" +migration_web_ui = "127.0.0.1:$MIGRATION_WEB_UI_PORT" +pprof_bind_address = "127.0.0.1:$PPROF_PORT" +cores = 4 + +[node.triple] +concurrency = 2 +desired_triples_to_buffer = 128 +timeout_sec = 60 +parallel_triple_generation_stagger_time_sec = 1 + +[node.presignature] +concurrency = 4 +desired_presignatures_to_buffer = 64 +timeout_sec = 60 + +[node.signature] +timeout_sec = 60 + +[node.indexer] +validate_genesis = false +sync_mode = "Latest" +concurrency = 1 +mpc_contract_id = "mpc-contract.test.near" +finality = "optimistic" + +[node.ckd] +timeout_sec = 60 + +[node.foreign_chains.bitcoin] +timeout_sec = 30 +max_retries = 3 + +[node.foreign_chains.bitcoin.providers.public] +api_variant = "esplora" +rpc_url = "https://bitcoin-rpc.publicnode.com" + +[node.foreign_chains.bitcoin.providers.public.auth] +kind = "none" + +[node.foreign_chains.abstract] +timeout_sec = 30 +max_retries = 3 + +[node.foreign_chains.abstract.providers.public] +api_variant = "standard" +rpc_url = "https://api.testnet.abs.xyz" + +[node.foreign_chains.abstract.providers.public.auth] +kind = "none" + +[node.foreign_chains.starknet] +timeout_sec = 30 +max_retries = 3 + +[node.foreign_chains.starknet.providers.public] +api_variant = "standard" +rpc_url = "https://starknet-rpc.publicnode.com" + +[node.foreign_chains.starknet.providers.public.auth] +kind = "none" diff --git a/libs/nearcore b/libs/nearcore index 8a8c21bc8..3def2f7eb 160000 --- a/libs/nearcore +++ b/libs/nearcore @@ -1 +1 @@ -Subproject commit 8a8c21bc81999af93edd1b6bca5b7c6c6337aa63 +Subproject commit 3def2f7ebb7455199e7b3f7b371e3735c23e2930 diff --git a/pytest/common_lib/shared/mpc_node.py b/pytest/common_lib/shared/mpc_node.py index c8b87d5ce..fcbb007b3 100644 --- a/pytest/common_lib/shared/mpc_node.py +++ b/pytest/common_lib/shared/mpc_node.py @@ -6,6 +6,8 @@ import time from typing import cast +import tomli_w + from key import Key @@ -130,7 +132,7 @@ def reset_mpc_data(self): file_path.unlink() def _write_start_config(self) -> str: - """Build a StartConfig JSON file and write it to the node's home dir. + """Build a StartConfig TOML file and write it to the node's home dir. Returns the path to the written config file.""" start_config = { "home_dir": self.home_dir, @@ -145,9 +147,9 @@ def _write_start_config(self) -> str: }, "node": self.node_config, } - config_path = str(pathlib.Path(self.home_dir) / "start_config.json") - with open(config_path, "w") as f: - json.dump(start_config, f, indent=2) + config_path = str(pathlib.Path(self.home_dir) / "start_config.toml") + with open(config_path, "wb") as f: + tomli_w.dump(start_config, f) return config_path def run(self): diff --git a/pytest/requirements.txt b/pytest/requirements.txt index 1f5f14c5e..1bbc919bd 100644 --- a/pytest/requirements.txt +++ b/pytest/requirements.txt @@ -4,3 +4,4 @@ blspy py-arkworks-bls12381 pytest==8.3.4 gitpython +tomli_w From 7d5cc0951aedc8f730beed2e8d60b25e3b804de4 Mon Sep 17 00:00:00 2001 From: Daniel Sharifi Date: Mon, 9 Mar 2026 13:51:10 +0100 Subject: [PATCH 67/82] sort toml declaration --- Cargo.toml | 2 +- crates/node/Cargo.toml | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/Cargo.toml b/Cargo.toml index 81f61d02d..85d9c5b67 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -166,7 +166,6 @@ rustls = { version = "0.23.36", default-features = false, features = ["std"] } serde = { version = "1.0", features = ["derive"] } serde_bytes = "0.11.19" serde_json = "1.0" -toml = "1.0.6" serde_repr = "0.1.20" serde_with = { version = "3.16.1", features = ["hex"] } serial_test = "3.4.0" @@ -184,6 +183,7 @@ tokio-metrics = { version = "0.4.8" } tokio-rustls = { version = "0.26.4", default-features = false } tokio-stream = { version = "0.1" } tokio-util = { version = "0.7.12", features = ["time"] } +toml = "1.0.6" tower = "0.5.3" tracing = "0.1.44" tracing-subscriber = { version = "0.3.22", features = [ diff --git a/crates/node/Cargo.toml b/crates/node/Cargo.toml index 1f3a02a20..2ce8b5e54 100644 --- a/crates/node/Cargo.toml +++ b/crates/node/Cargo.toml @@ -59,7 +59,6 @@ rustls = { workspace = true } serde = { workspace = true } serde_json = { workspace = true } serde_with = { workspace = true } -toml = { workspace = true } serde_yaml = { workspace = true } sha3 = { workspace = true } socket2 = { workspace = true } @@ -74,6 +73,7 @@ tokio-metrics = { workspace = true } tokio-rustls = { workspace = true } tokio-stream = { workspace = true } tokio-util = { workspace = true } +toml = { workspace = true } tower = { workspace = true } tracing = { workspace = true } tracing-subscriber = { workspace = true } From ee45ca9d523cb1083ea092e945f6a9a6bc62cb4f Mon Sep 17 00:00:00 2001 From: Daniel Sharifi Date: Mon, 9 Mar 2026 13:55:51 +0100 Subject: [PATCH 68/82] revert nearcore change --- libs/nearcore | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/libs/nearcore b/libs/nearcore index 3def2f7eb..8a8c21bc8 160000 --- a/libs/nearcore +++ b/libs/nearcore @@ -1 +1 @@ -Subproject commit 3def2f7ebb7455199e7b3f7b371e3735c23e2930 +Subproject commit 8a8c21bc81999af93edd1b6bca5b7c6c6337aa63 From 3b4dcbbacc0dc708672cb7348e99a280bab10610 Mon Sep 17 00:00:00 2001 From: Daniel Sharifi Date: Mon, 9 Mar 2026 14:13:47 +0100 Subject: [PATCH 69/82] pytest, dont incldue None fields --- pytest/common_lib/shared/mpc_node.py | 11 ++++++++++- 1 file changed, 10 insertions(+), 1 deletion(-) diff --git a/pytest/common_lib/shared/mpc_node.py b/pytest/common_lib/shared/mpc_node.py index fcbb007b3..9f6fad0d1 100644 --- a/pytest/common_lib/shared/mpc_node.py +++ b/pytest/common_lib/shared/mpc_node.py @@ -131,6 +131,15 @@ def reset_mpc_data(self): for file_path in pathlib.Path(self.home_dir).glob(pattern): file_path.unlink() + @staticmethod + def _strip_none(obj): + """Recursively remove keys with None values since TOML has no null.""" + if isinstance(obj, dict): + return {k: MpcNode._strip_none(v) for k, v in obj.items() if v is not None} + if isinstance(obj, list): + return [MpcNode._strip_none(v) for v in obj] + return obj + def _write_start_config(self) -> str: """Build a StartConfig TOML file and write it to the node's home dir. Returns the path to the written config file.""" @@ -149,7 +158,7 @@ def _write_start_config(self) -> str: } config_path = str(pathlib.Path(self.home_dir) / "start_config.toml") with open(config_path, "wb") as f: - tomli_w.dump(start_config, f) + tomli_w.dump(self._strip_none(start_config), f) return config_path def run(self): From 7d45c50ee93c953fd3f01e844b5723cf7b96c074 Mon Sep 17 00:00:00 2001 From: Daniel Sharifi Date: Fri, 13 Mar 2026 13:11:19 +0100 Subject: [PATCH 70/82] remove extra hosts functionality, see #2438 --- crates/tee-launcher/src/main.rs | 10 ++----- crates/tee-launcher/src/types.rs | 49 -------------------------------- 2 files changed, 3 insertions(+), 56 deletions(-) diff --git a/crates/tee-launcher/src/main.rs b/crates/tee-launcher/src/main.rs index 8e60a8bcc..0df736096 100644 --- a/crates/tee-launcher/src/main.rs +++ b/crates/tee-launcher/src/main.rs @@ -378,7 +378,6 @@ fn docker_run_args( format!("{host_path}:{MPC_CONFIG_CONTAINER_PATH}:ro"), ]); - cmd.extend(docker_flags.extra_hosts.docker_args()); cmd.extend(docker_flags.port_mappings.docker_args()); // Container run configuration @@ -481,15 +480,13 @@ mod tests { fn empty_docker_flags() -> DockerLaunchFlags { serde_json::from_value(serde_json::json!({ - "extra_hosts": {"hosts": []}, "port_mappings": {"ports": []} })) .unwrap() } - fn docker_flags_with_host_and_port() -> DockerLaunchFlags { + fn docker_flags_with_port() -> DockerLaunchFlags { serde_json::from_value(serde_json::json!({ - "extra_hosts": {"hosts": [{"hostname": {"Domain": "node1"}, "ip": "192.168.1.1"}]}, "port_mappings": {"ports": [{"src": 11780, "dst": 11780}]} })) .unwrap() @@ -625,9 +622,9 @@ mod tests { } #[test] - fn includes_ports_and_extra_hosts() { + fn includes_ports() { // given - let flags = docker_flags_with_host_and_port(); + let flags = docker_flags_with_port(); let digest = sample_digest(); // when @@ -640,7 +637,6 @@ mod tests { // then let joined = args.join(" "); - assert!(joined.contains("--add-host node1:192.168.1.1")); assert!(joined.contains("-p 11780:11780")); } diff --git a/crates/tee-launcher/src/types.rs b/crates/tee-launcher/src/types.rs index 29b6510e0..bfd2f29cb 100644 --- a/crates/tee-launcher/src/types.rs +++ b/crates/tee-launcher/src/types.rs @@ -71,27 +71,9 @@ pub struct LauncherConfig { #[derive(Debug, Clone, Serialize, Deserialize)] pub struct DockerLaunchFlags { - pub extra_hosts: ExtraHosts, pub port_mappings: PortMappings, } -#[derive(Debug, Clone, Serialize, Deserialize)] -pub(crate) struct ExtraHosts { - hosts: Vec, -} - -impl ExtraHosts { - /// Returns `["--add-host", "h1:ip1", "--add-host", "h2:ip2", ...]`. - pub fn docker_args(&self) -> Vec { - self.hosts - .iter() - .flat_map(|HostEntry { hostname, ip }| { - ["--add-host".into(), format!("{hostname}:{ip}")] - }) - .collect() - } -} - /// A `--add-host` entry: `hostname:IPv4`. #[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)] pub struct HostEntry { @@ -220,35 +202,6 @@ mod tests { // --- docker_args output format --- - #[test] - fn extra_hosts_docker_args_format() { - // given - let hosts = ExtraHosts { - hosts: vec![HostEntry { - hostname: url::Host::Domain("node.local".into()), - ip: Ipv4Addr::new(192, 168, 1, 1), - }], - }; - - // when - let args = hosts.docker_args(); - - // then - assert_eq!(args, vec!["--add-host", "node.local:192.168.1.1"]); - } - - #[test] - fn empty_extra_hosts_produces_no_docker_args() { - // given - let hosts = ExtraHosts { hosts: vec![] }; - - // when - let args = hosts.docker_args(); - - // then - assert!(args.is_empty()); - } - #[test] fn port_mappings_docker_args_format() { // given @@ -282,7 +235,6 @@ mod tests { "mpc_hash_override": null }, "docker_command_config": { - "extra_hosts": {"hosts": [{"hostname": {"Domain": "node1"}, "ip": "192.168.1.1"}]}, "port_mappings": {"ports": [{"src": 11780, "dst": 11780}]} }, "mpc_config_file": "/tapp/mpc-config.json" @@ -312,7 +264,6 @@ mod tests { "mpc_hash_override": null }, "docker_command_config": { - "extra_hosts": {"hosts": []}, "port_mappings": {"ports": []} } }); From 4fcfdc6213b989ba221784a844ad77763173034a Mon Sep 17 00:00:00 2001 From: Daniel Sharifi Date: Fri, 13 Mar 2026 21:39:54 +0100 Subject: [PATCH 71/82] use compose tempalte file --- Cargo.lock | 1 + crates/tee-launcher/Cargo.toml | 1 + .../docker-compose.tee.template.yml | 20 ++ .../tee-launcher/docker-compose.template.yml | 17 ++ crates/tee-launcher/src/error.rs | 9 + crates/tee-launcher/src/main.rs | 228 +++++++----------- crates/tee-launcher/src/types.rs | 29 +-- 7 files changed, 146 insertions(+), 159 deletions(-) create mode 100644 crates/tee-launcher/docker-compose.tee.template.yml create mode 100644 crates/tee-launcher/docker-compose.template.yml diff --git a/Cargo.lock b/Cargo.lock index 28413386c..4013b8aba 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -10658,6 +10658,7 @@ dependencies = [ "reqwest 0.12.28", "serde", "serde_json", + "tempfile", "thiserror 2.0.18", "tokio", "tracing", diff --git a/crates/tee-launcher/Cargo.toml b/crates/tee-launcher/Cargo.toml index a9e4e2cb9..cf93a02eb 100644 --- a/crates/tee-launcher/Cargo.toml +++ b/crates/tee-launcher/Cargo.toml @@ -20,6 +20,7 @@ launcher-interface = { workspace = true } reqwest = { workspace = true } serde = { workspace = true } serde_json = { workspace = true } +tempfile = { workspace = true } thiserror = { workspace = true } tokio = { workspace = true } tracing = { workspace = true } diff --git a/crates/tee-launcher/docker-compose.tee.template.yml b/crates/tee-launcher/docker-compose.tee.template.yml new file mode 100644 index 000000000..6312625e1 --- /dev/null +++ b/crates/tee-launcher/docker-compose.tee.template.yml @@ -0,0 +1,20 @@ +services: + mpc-node: + image: "{{IMAGE}}" + container_name: "{{CONTAINER_NAME}}" + security_opt: + - no-new-privileges:true + ports: {{PORTS}} + environment: + - "DSTACK_ENDPOINT={{DSTACK_UNIX_SOCKET}}" + volumes: + - "{{MPC_CONFIG_HOST_PATH}}:{{MPC_CONFIG_CONTAINER_PATH}}:ro" + - /tapp:/tapp:ro + - shared-volume:/mnt/shared + - mpc-data:/data + - "{{DSTACK_UNIX_SOCKET}}:{{DSTACK_UNIX_SOCKET}}" + command: ["start-with-config-file", "{{MPC_CONFIG_CONTAINER_PATH}}"] + +volumes: + shared-volume: + mpc-data: diff --git a/crates/tee-launcher/docker-compose.template.yml b/crates/tee-launcher/docker-compose.template.yml new file mode 100644 index 000000000..29b44651e --- /dev/null +++ b/crates/tee-launcher/docker-compose.template.yml @@ -0,0 +1,17 @@ +services: + mpc-node: + image: "{{IMAGE}}" + container_name: "{{CONTAINER_NAME}}" + security_opt: + - no-new-privileges:true + ports: {{PORTS}} + volumes: + - "{{MPC_CONFIG_HOST_PATH}}:{{MPC_CONFIG_CONTAINER_PATH}}:ro" + - /tapp:/tapp:ro + - shared-volume:/mnt/shared + - mpc-data:/data + command: ["start-with-config-file", "{{MPC_CONFIG_CONTAINER_PATH}}"] + +volumes: + shared-volume: + mpc-data: diff --git a/crates/tee-launcher/src/error.rs b/crates/tee-launcher/src/error.rs index 02a45a3ff..9ce1c75a2 100644 --- a/crates/tee-launcher/src/error.rs +++ b/crates/tee-launcher/src/error.rs @@ -33,6 +33,15 @@ pub enum LauncherError { source: std::io::Error, }, + #[error("Failed to write {path}: {source}")] + FileWrite { + path: String, + source: std::io::Error, + }, + + #[error("Failed to create temp file: {0}")] + TempFileCreate(std::io::Error), + #[error("Failed to parse {path}: {source}")] JsonParse { path: String, diff --git a/crates/tee-launcher/src/main.rs b/crates/tee-launcher/src/main.rs index 0df736096..3206b0022 100644 --- a/crates/tee-launcher/src/main.rs +++ b/crates/tee-launcher/src/main.rs @@ -1,5 +1,6 @@ // A rewrite of launcher.py +use std::io::Write; use std::process::Command; use std::{collections::VecDeque, time::Duration}; @@ -20,6 +21,9 @@ mod docker_types; mod error; mod types; +const COMPOSE_TEMPLATE: &str = include_str!("../docker-compose.template.yml"); +const COMPOSE_TEE_TEMPLATE: &str = include_str!("../docker-compose.tee.template.yml"); + const DOCKER_AUTH_ACCEPT_HEADER_VALUE: HeaderValue = HeaderValue::from_static("application/vnd.docker.distribution.manifest.v2+json"); @@ -352,57 +356,47 @@ async fn validate_image_hash( Ok(()) } -fn docker_run_args( +fn render_compose_file( platform: Platform, mpc_config_file: &std::path::Path, docker_flags: &DockerLaunchFlags, image_digest: &DockerSha256Digest, -) -> Vec { - let mut cmd: Vec = vec![]; +) -> Result { + let template = match platform { + Platform::Tee => COMPOSE_TEE_TEMPLATE, + Platform::NonTee => COMPOSE_TEMPLATE, + }; - if platform == Platform::Tee { - cmd.extend([ - "--env".into(), - format!("DSTACK_ENDPOINT={DSTACK_UNIX_SOCKET}"), - ]); - cmd.extend([ - "-v".into(), - format!("{DSTACK_UNIX_SOCKET}:{DSTACK_UNIX_SOCKET}"), - ]); - } + let ports: Vec = docker_flags + .port_mappings + .ports + .iter() + .map(|p| p.docker_compose_value()) + .collect(); + let ports_json = serde_json::to_string(&ports).expect("port list is serializable"); + + let rendered = template + .replace("{{IMAGE}}", &image_digest.to_string()) + .replace("{{CONTAINER_NAME}}", MPC_CONTAINER_NAME) + .replace( + "{{MPC_CONFIG_HOST_PATH}}", + &mpc_config_file.display().to_string(), + ) + .replace("{{MPC_CONFIG_CONTAINER_PATH}}", MPC_CONFIG_CONTAINER_PATH) + .replace("{{DSTACK_UNIX_SOCKET}}", DSTACK_UNIX_SOCKET) + .replace("{{PORTS}}", &ports_json); + + tracing::info!(compose = %rendered, "rendered docker-compose file"); - // Mount the MPC config file into the container (read-only) - let host_path = mpc_config_file.display(); - cmd.extend([ - "-v".into(), - format!("{host_path}:{MPC_CONFIG_CONTAINER_PATH}:ro"), - ]); - - cmd.extend(docker_flags.port_mappings.docker_args()); - - // Container run configuration - cmd.extend([ - "--security-opt".into(), - "no-new-privileges:true".into(), - "-v".into(), - "/tapp:/tapp:ro".into(), - "-v".into(), - "shared-volume:/mnt/shared".into(), - "-v".into(), - "mpc-data:/data".into(), - "--name".into(), - MPC_CONTAINER_NAME.into(), - "--detach".into(), - image_digest.to_string(), - // Command for the MPC binary: read config from file - "start-with-config-file".into(), - MPC_CONFIG_CONTAINER_PATH.into(), - ]); - - let docker_command_string = cmd.join(" "); - tracing::info!(?docker_command_string, "docker cmd"); - - cmd + let mut file = + tempfile::NamedTempFile::new().map_err(|source| LauncherError::TempFileCreate(source))?; + file.write_all(rendered.as_bytes()) + .map_err(|source| LauncherError::FileWrite { + path: file.path().display().to_string(), + source, + })?; + + Ok(file) } fn launch_mpc_container( @@ -413,16 +407,17 @@ fn launch_mpc_container( ) -> Result<(), LauncherError> { tracing::info!("Launching MPC node with validated hash: {valid_hash}",); - // shutdown container if one is already running + let compose_file = + render_compose_file(platform, mpc_config_file, docker_flags, valid_hash)?; + let compose_path = compose_file.path().display().to_string(); + + // Remove any existing container from a previous run (by name, independent of compose file) let _ = Command::new("docker") .args(["rm", "-f", MPC_CONTAINER_NAME]) .output(); - let docker_run_args = docker_run_args(platform, mpc_config_file, docker_flags, valid_hash); - let run_output = Command::new("docker") - .arg("run") - .args(&docker_run_args) + .args(["compose", "-f", &compose_path, "up", "-d"]) .output() .map_err(|inner| LauncherError::DockerRunFailed { image_hash: valid_hash.clone(), @@ -432,7 +427,7 @@ fn launch_mpc_container( if !run_output.status.success() { let stderr = String::from_utf8_lossy(&run_output.stderr); let stdout = String::from_utf8_lossy(&run_output.stdout); - tracing::error!(%stderr, %stdout, "docker run failed"); + tracing::error!(%stderr, %stdout, "docker compose up failed"); return Err(LauncherError::DockerRunFailedExitStatus { image_hash: valid_hash.clone(), output: stderr.into_owned(), @@ -452,13 +447,23 @@ mod tests { use near_mpc_bounded_collections::NonEmptyVec; use crate::constants::*; - use crate::docker_run_args; use crate::error::LauncherError; + use crate::render_compose_file; use crate::select_image_hash; use crate::types::*; const SAMPLE_CONFIG_PATH: &str = "/tapp/mpc-config.json"; + fn render( + platform: Platform, + config_path: &str, + flags: &DockerLaunchFlags, + digest: &DockerSha256Digest, + ) -> String { + let file = render_compose_file(platform, Path::new(config_path), flags, digest).unwrap(); + std::fs::read_to_string(file.path()).unwrap() + } + fn digest(hex_char: char) -> DockerSha256Digest { format!( "sha256:{}", @@ -493,43 +498,31 @@ mod tests { } #[test] - fn tee_mode_includes_dstack_mount() { + fn tee_mode_includes_dstack_env_and_volume() { // given let flags = empty_docker_flags(); let digest = sample_digest(); // when - let args = docker_run_args( - Platform::Tee, - Path::new(SAMPLE_CONFIG_PATH), - &flags, - &digest, - ); + let rendered = render(Platform::Tee, SAMPLE_CONFIG_PATH, &flags, &digest); // then - let joined = args.join(" "); - assert!(joined.contains(&format!("DSTACK_ENDPOINT={DSTACK_UNIX_SOCKET}"))); - assert!(joined.contains(&format!("{DSTACK_UNIX_SOCKET}:{DSTACK_UNIX_SOCKET}"))); + assert!(rendered.contains(&format!("DSTACK_ENDPOINT={DSTACK_UNIX_SOCKET}"))); + assert!(rendered.contains(&format!("{DSTACK_UNIX_SOCKET}:{DSTACK_UNIX_SOCKET}"))); } #[test] - fn nontee_mode_excludes_dstack_mount() { + fn nontee_mode_excludes_dstack() { // given let flags = empty_docker_flags(); let digest = sample_digest(); // when - let args = docker_run_args( - Platform::NonTee, - Path::new(SAMPLE_CONFIG_PATH), - &flags, - &digest, - ); + let rendered = render(Platform::NonTee, SAMPLE_CONFIG_PATH, &flags, &digest); // then - let joined = args.join(" "); - assert!(!joined.contains("DSTACK_ENDPOINT=")); - assert!(!joined.contains(&format!("{DSTACK_UNIX_SOCKET}:{DSTACK_UNIX_SOCKET}"))); + assert!(!rendered.contains("DSTACK_ENDPOINT")); + assert!(!rendered.contains(DSTACK_UNIX_SOCKET)); } #[test] @@ -539,21 +532,14 @@ mod tests { let digest = sample_digest(); // when - let args = docker_run_args( - Platform::NonTee, - Path::new(SAMPLE_CONFIG_PATH), - &flags, - &digest, - ); + let rendered = render(Platform::NonTee, SAMPLE_CONFIG_PATH, &flags, &digest); // then - let joined = args.join(" "); - assert!(joined.contains("--security-opt no-new-privileges:true")); - assert!(joined.contains("/tapp:/tapp:ro")); - assert!(joined.contains("shared-volume:/mnt/shared")); - assert!(joined.contains("mpc-data:/data")); - assert!(joined.contains(&format!("--name {MPC_CONTAINER_NAME}"))); - assert!(joined.contains("--detach")); + assert!(rendered.contains("no-new-privileges:true")); + assert!(rendered.contains("/tapp:/tapp:ro")); + assert!(rendered.contains("shared-volume:/mnt/shared")); + assert!(rendered.contains("mpc-data:/data")); + assert!(rendered.contains(&format!("container_name: \"{MPC_CONTAINER_NAME}\""))); } #[test] @@ -563,16 +549,10 @@ mod tests { let digest = sample_digest(); // when - let args = docker_run_args( - Platform::NonTee, - Path::new(SAMPLE_CONFIG_PATH), - &flags, - &digest, - ); + let rendered = render(Platform::NonTee, SAMPLE_CONFIG_PATH, &flags, &digest); // then - let joined = args.join(" "); - assert!(joined.contains(&format!( + assert!(rendered.contains(&format!( "{SAMPLE_CONFIG_PATH}:{MPC_CONFIG_CONTAINER_PATH}:ro" ))); } @@ -584,41 +564,24 @@ mod tests { let digest = sample_digest(); // when - let args = docker_run_args( - Platform::NonTee, - Path::new(SAMPLE_CONFIG_PATH), - &flags, - &digest, - ); + let rendered = render(Platform::NonTee, SAMPLE_CONFIG_PATH, &flags, &digest); // then - let joined = args.join(" "); - assert!(joined.contains(&format!( - "start-with-config-file {MPC_CONFIG_CONTAINER_PATH}" - ))); + assert!(rendered.contains("start-with-config-file")); + assert!(rendered.contains(MPC_CONFIG_CONTAINER_PATH)); } #[test] - fn image_digest_appears_before_command() { + fn image_is_set() { // given let flags = empty_docker_flags(); let digest = sample_digest(); // when - let args = docker_run_args( - Platform::NonTee, - Path::new(SAMPLE_CONFIG_PATH), - &flags, - &digest, - ); + let rendered = render(Platform::NonTee, SAMPLE_CONFIG_PATH, &flags, &digest); - // then - image digest should appear before "start-with-config-file" - let digest_pos = args.iter().position(|a| a == &digest.to_string()).unwrap(); - let cmd_pos = args - .iter() - .position(|a| a == "start-with-config-file") - .unwrap(); - assert!(digest_pos < cmd_pos); + // then + assert!(rendered.contains(&format!("image: \"{digest}\""))); } #[test] @@ -628,42 +591,23 @@ mod tests { let digest = sample_digest(); // when - let args = docker_run_args( - Platform::NonTee, - Path::new(SAMPLE_CONFIG_PATH), - &flags, - &digest, - ); + let rendered = render(Platform::NonTee, SAMPLE_CONFIG_PATH, &flags, &digest); // then - let joined = args.join(" "); - assert!(joined.contains("-p 11780:11780")); + assert!(rendered.contains("11780:11780")); } #[test] - fn no_env_vars_forwarded_for_mpc_config() { + fn no_env_section_in_nontee_mode() { // given let flags = empty_docker_flags(); let digest = sample_digest(); // when - let args = docker_run_args( - Platform::NonTee, - Path::new(SAMPLE_CONFIG_PATH), - &flags, - &digest, - ); + let rendered = render(Platform::NonTee, SAMPLE_CONFIG_PATH, &flags, &digest); - // then - no MPC_* env vars should be present (only DSTACK_ENDPOINT in TEE mode) - let env_args: Vec<&String> = args - .windows(2) - .filter(|w| w[0] == "--env") - .map(|w| &w[1]) - .collect(); - assert!( - env_args.is_empty(), - "expected no --env args in non-TEE mode, got: {env_args:?}" - ); + // then + assert!(!rendered.contains("environment:")); } // --- select_image_hash --- diff --git a/crates/tee-launcher/src/types.rs b/crates/tee-launcher/src/types.rs index bfd2f29cb..09e196020 100644 --- a/crates/tee-launcher/src/types.rs +++ b/crates/tee-launcher/src/types.rs @@ -82,7 +82,7 @@ pub struct HostEntry { } #[derive(Debug, Clone, Serialize, Deserialize)] -pub(crate) struct PortMappings { +pub struct PortMappings { pub ports: Vec, } @@ -92,13 +92,10 @@ pub struct PortMapping { dst: NonZeroU16, } -impl PortMappings { - /// Returns `["-p", "src1:dst1", "-p", "src2:dst2", ...]`. - pub fn docker_args(&self) -> Vec { - self.ports - .iter() - .flat_map(|PortMapping { src, dst }| ["-p".into(), format!("{src}:{dst}")]) - .collect() +impl PortMapping { + /// Returns e.g. `"11780:11780"` for use in docker-compose port lists. + pub fn docker_compose_value(&self) -> String { + format!("{}:{}", self.src, self.dst) } } @@ -200,23 +197,21 @@ mod tests { assert_matches!(result, Err(_)); } - // --- docker_args output format --- + // --- docker_compose_value output format --- #[test] - fn port_mappings_docker_args_format() { + fn port_mapping_docker_compose_value() { // given - let mappings = PortMappings { - ports: vec![PortMapping { - src: NonZeroU16::new(11780).unwrap(), - dst: NonZeroU16::new(11780).unwrap(), - }], + let mapping = PortMapping { + src: NonZeroU16::new(11780).unwrap(), + dst: NonZeroU16::new(11780).unwrap(), }; // when - let args = mappings.docker_args(); + let value = mapping.docker_compose_value(); // then - assert_eq!(args, vec!["-p", "11780:11780"]); + assert_eq!(value, "11780:11780"); } // --- Config full deserialization --- From eca6da3b68b8e7bbd7285e6eb60d5b15e30f37ef Mon Sep 17 00:00:00 2001 From: Daniel Sharifi Date: Fri, 13 Mar 2026 22:01:05 +0100 Subject: [PATCH 72/82] create file and forward content in the file --- crates/tee-launcher/src/constants.rs | 2 +- crates/tee-launcher/src/main.rs | 13 +++++++++---- crates/tee-launcher/src/types.rs | 17 ++++++++--------- 3 files changed, 18 insertions(+), 14 deletions(-) diff --git a/crates/tee-launcher/src/constants.rs b/crates/tee-launcher/src/constants.rs index a158586d1..9cdf69794 100644 --- a/crates/tee-launcher/src/constants.rs +++ b/crates/tee-launcher/src/constants.rs @@ -4,4 +4,4 @@ pub(crate) const DSTACK_UNIX_SOCKET: &str = "/var/run/dstack.sock"; pub(crate) const DSTACK_USER_CONFIG_FILE: &str = "/tapp/user_config"; /// Path inside the container where the MPC config file is bind-mounted. -pub(crate) const MPC_CONFIG_CONTAINER_PATH: &str = "/mnt/shared/mpc-config.json"; +pub(crate) const MPC_CONFIG_CONTAINER_PATH: &str = "/mnt/shared/mpc-config"; diff --git a/crates/tee-launcher/src/main.rs b/crates/tee-launcher/src/main.rs index 3206b0022..e135f0ee2 100644 --- a/crates/tee-launcher/src/main.rs +++ b/crates/tee-launcher/src/main.rs @@ -13,6 +13,7 @@ use constants::*; use docker_types::*; use error::*; use reqwest::header::{ACCEPT, AUTHORIZATION, HeaderMap, HeaderValue}; +use tempfile::NamedTempFile; use types::*; use url::Url; @@ -120,10 +121,15 @@ async fn run() -> Result<(), LauncherError> { .map_err(|e| LauncherError::DstackEmitEventFailed(e.to_string()))?; } + let mut mpc_binary_config_file = NamedTempFile::new().expect("file creation works"); + mpc_binary_config_file + .write(dstack_config.mpc_config_content.as_bytes()) + .expect("writing to file works"); + launch_mpc_container( args.platform, &image_hash, - &dstack_config.mpc_config_file, + mpc_binary_config_file.path(), &dstack_config.docker_command_config, )?; @@ -371,7 +377,7 @@ fn render_compose_file( .port_mappings .ports .iter() - .map(|p| p.docker_compose_value()) + .map(PortMapping::docker_compose_value) .collect(); let ports_json = serde_json::to_string(&ports).expect("port list is serializable"); @@ -407,8 +413,7 @@ fn launch_mpc_container( ) -> Result<(), LauncherError> { tracing::info!("Launching MPC node with validated hash: {valid_hash}",); - let compose_file = - render_compose_file(platform, mpc_config_file, docker_flags, valid_hash)?; + let compose_file = render_compose_file(platform, mpc_config_file, docker_flags, valid_hash)?; let compose_path = compose_file.path().display().to_string(); // Remove any existing container from a previous run (by name, independent of compose file) diff --git a/crates/tee-launcher/src/types.rs b/crates/tee-launcher/src/types.rs index 09e196020..82089acb7 100644 --- a/crates/tee-launcher/src/types.rs +++ b/crates/tee-launcher/src/types.rs @@ -1,6 +1,5 @@ use std::net::Ipv4Addr; use std::num::NonZeroU16; -use std::path::PathBuf; use launcher_interface::types::DockerSha256Digest; use url::Host; @@ -45,10 +44,10 @@ pub enum Platform { pub struct Config { pub launcher_config: LauncherConfig, pub docker_command_config: DockerLaunchFlags, - /// Path to the MPC node JSON config file on the host. - /// This file is mounted into the container and passed via - /// `start-with-config-file ` to the MPC binary. - pub mpc_config_file: PathBuf, + /// Inline MPC node config content (opaque to the launcher). + /// Written to a temporary file on disk, mounted into the container, + /// and passed via `start-with-config-file ` to the MPC binary. + pub mpc_config_content: String, } #[derive(Debug, Clone, Serialize, Deserialize)] @@ -88,8 +87,8 @@ pub struct PortMappings { #[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)] pub struct PortMapping { - src: NonZeroU16, - dst: NonZeroU16, + pub(crate) src: NonZeroU16, + pub(crate) dst: NonZeroU16, } impl PortMapping { @@ -232,7 +231,7 @@ mod tests { "docker_command_config": { "port_mappings": {"ports": [{"src": 11780, "dst": 11780}]} }, - "mpc_config_file": "/tapp/mpc-config.json" + "mpc_config_file": "[some_config = true]" }); // when @@ -241,7 +240,7 @@ mod tests { // then assert_matches!(result, Ok(config) => { assert_eq!(config.launcher_config.image_name, "nearone/mpc-node"); - assert_eq!(config.mpc_config_file, PathBuf::from("/tapp/mpc-config.json")); + assert_eq!(config.mpc_config_content, "[some_config = true]"); }); } From 75343925d4f595bbc6c6e982634fc169771d32e0 Mon Sep 17 00:00:00 2001 From: Daniel Sharifi Date: Fri, 13 Mar 2026 22:07:34 +0100 Subject: [PATCH 73/82] remove rewrite comment --- crates/tee-launcher/src/main.rs | 2 -- 1 file changed, 2 deletions(-) diff --git a/crates/tee-launcher/src/main.rs b/crates/tee-launcher/src/main.rs index e135f0ee2..98182d8c8 100644 --- a/crates/tee-launcher/src/main.rs +++ b/crates/tee-launcher/src/main.rs @@ -1,5 +1,3 @@ -// A rewrite of launcher.py - use std::io::Write; use std::process::Command; use std::{collections::VecDeque, time::Duration}; From 515d4257f41863c9975ac9049f8115fcfe0a8726 Mon Sep 17 00:00:00 2001 From: Daniel Sharifi Date: Fri, 13 Mar 2026 22:11:27 +0100 Subject: [PATCH 74/82] use write_all --- crates/tee-launcher/src/main.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/crates/tee-launcher/src/main.rs b/crates/tee-launcher/src/main.rs index 98182d8c8..cfdd3897e 100644 --- a/crates/tee-launcher/src/main.rs +++ b/crates/tee-launcher/src/main.rs @@ -121,7 +121,7 @@ async fn run() -> Result<(), LauncherError> { let mut mpc_binary_config_file = NamedTempFile::new().expect("file creation works"); mpc_binary_config_file - .write(dstack_config.mpc_config_content.as_bytes()) + .write_all(dstack_config.mpc_config_content.as_bytes()) .expect("writing to file works"); launch_mpc_container( From 12d7e478ad2938977ee83df07432ddcd9a64ff9f Mon Sep 17 00:00:00 2001 From: Daniel Sharifi Date: Fri, 13 Mar 2026 22:13:37 +0100 Subject: [PATCH 75/82] dont use tempfile for passed config, since it gets dropped --- crates/tee-launcher/src/main.rs | 14 ++++++++------ 1 file changed, 8 insertions(+), 6 deletions(-) diff --git a/crates/tee-launcher/src/main.rs b/crates/tee-launcher/src/main.rs index cfdd3897e..a3b94ecfd 100644 --- a/crates/tee-launcher/src/main.rs +++ b/crates/tee-launcher/src/main.rs @@ -11,7 +11,7 @@ use constants::*; use docker_types::*; use error::*; use reqwest::header::{ACCEPT, AUTHORIZATION, HeaderMap, HeaderValue}; -use tempfile::NamedTempFile; + use types::*; use url::Url; @@ -119,15 +119,17 @@ async fn run() -> Result<(), LauncherError> { .map_err(|e| LauncherError::DstackEmitEventFailed(e.to_string()))?; } - let mut mpc_binary_config_file = NamedTempFile::new().expect("file creation works"); - mpc_binary_config_file - .write_all(dstack_config.mpc_config_content.as_bytes()) - .expect("writing to file works"); + let mpc_binary_config_path = std::path::Path::new("/tmp/mpc-config"); + std::fs::write(mpc_binary_config_path, dstack_config.mpc_config_content.as_bytes()) + .map_err(|source| LauncherError::FileWrite { + path: mpc_binary_config_path.display().to_string(), + source, + })?; launch_mpc_container( args.platform, &image_hash, - mpc_binary_config_file.path(), + mpc_binary_config_path, &dstack_config.docker_command_config, )?; From 1e7ec24d0fffaf8be83ed13107cb37fde107a3d2 Mon Sep 17 00:00:00 2001 From: Daniel Sharifi Date: Fri, 13 Mar 2026 22:37:37 +0100 Subject: [PATCH 76/82] change to toml and forward toml file --- Cargo.lock | 1 + crates/tee-launcher/Cargo.toml | 1 + crates/tee-launcher/src/error.rs | 6 ++ crates/tee-launcher/src/main.rs | 25 +++---- crates/tee-launcher/src/types.rs | 108 ++++++++++++++++++++----------- deployment/localnet/tee/sam.json | 36 ----------- deployment/localnet/tee/sam.toml | 56 ++++++++++++++++ deployment/testnet/sam.json | 37 ----------- deployment/testnet/sam.toml | 57 ++++++++++++++++ 9 files changed, 203 insertions(+), 124 deletions(-) delete mode 100644 deployment/localnet/tee/sam.json create mode 100644 deployment/localnet/tee/sam.toml delete mode 100644 deployment/testnet/sam.json create mode 100644 deployment/testnet/sam.toml diff --git a/Cargo.lock b/Cargo.lock index 4013b8aba..2cbcd7bee 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -10661,6 +10661,7 @@ dependencies = [ "tempfile", "thiserror 2.0.18", "tokio", + "toml 1.0.6+spec-1.1.0", "tracing", "tracing-subscriber", "url", diff --git a/crates/tee-launcher/Cargo.toml b/crates/tee-launcher/Cargo.toml index cf93a02eb..a06018d7a 100644 --- a/crates/tee-launcher/Cargo.toml +++ b/crates/tee-launcher/Cargo.toml @@ -20,6 +20,7 @@ launcher-interface = { workspace = true } reqwest = { workspace = true } serde = { workspace = true } serde_json = { workspace = true } +toml = { workspace = true } tempfile = { workspace = true } thiserror = { workspace = true } tokio = { workspace = true } diff --git a/crates/tee-launcher/src/error.rs b/crates/tee-launcher/src/error.rs index 9ce1c75a2..03295a6c8 100644 --- a/crates/tee-launcher/src/error.rs +++ b/crates/tee-launcher/src/error.rs @@ -48,6 +48,12 @@ pub enum LauncherError { source: serde_json::Error, }, + #[error("Failed to parse {path}: {source}")] + TomlParse { + path: String, + source: toml::de::Error, + }, + #[error("HTTP error: {0}")] Http(#[from] reqwest::Error), diff --git a/crates/tee-launcher/src/main.rs b/crates/tee-launcher/src/main.rs index a3b94ecfd..cb670a47d 100644 --- a/crates/tee-launcher/src/main.rs +++ b/crates/tee-launcher/src/main.rs @@ -53,17 +53,16 @@ async fn run() -> Result<(), LauncherError> { tracing::info!(platform = ?args.platform, "starting launcher"); - // Load dstack user config - let config_file = std::fs::OpenOptions::new() - .read(true) - .open(DSTACK_USER_CONFIG_FILE) - .map_err(|source| LauncherError::FileRead { + // Load dstack user config (TOML) + let config_contents = std::fs::read_to_string(DSTACK_USER_CONFIG_FILE).map_err(|source| { + LauncherError::FileRead { path: DSTACK_USER_CONFIG_FILE.to_string(), source, - })?; + } + })?; let dstack_config: Config = - serde_json::from_reader(config_file).map_err(|source| LauncherError::JsonParse { + toml::from_str(&config_contents).map_err(|source| LauncherError::TomlParse { path: DSTACK_USER_CONFIG_FILE.to_string(), source, })?; @@ -120,11 +119,14 @@ async fn run() -> Result<(), LauncherError> { } let mpc_binary_config_path = std::path::Path::new("/tmp/mpc-config"); - std::fs::write(mpc_binary_config_path, dstack_config.mpc_config_content.as_bytes()) - .map_err(|source| LauncherError::FileWrite { + let mpc_config_toml = toml::to_string(&dstack_config.mpc_config) + .expect("re-serializing a toml::Table always succeeds"); + std::fs::write(mpc_binary_config_path, mpc_config_toml.as_bytes()).map_err(|source| { + LauncherError::FileWrite { path: mpc_binary_config_path.display().to_string(), source, - })?; + } + })?; launch_mpc_container( args.platform, @@ -394,8 +396,7 @@ fn render_compose_file( tracing::info!(compose = %rendered, "rendered docker-compose file"); - let mut file = - tempfile::NamedTempFile::new().map_err(|source| LauncherError::TempFileCreate(source))?; + let mut file = tempfile::NamedTempFile::new().map_err(LauncherError::TempFileCreate)?; file.write_all(rendered.as_bytes()) .map_err(|source| LauncherError::FileWrite { path: file.path().display().to_string(), diff --git a/crates/tee-launcher/src/types.rs b/crates/tee-launcher/src/types.rs index 82089acb7..c85480eb3 100644 --- a/crates/tee-launcher/src/types.rs +++ b/crates/tee-launcher/src/types.rs @@ -44,10 +44,11 @@ pub enum Platform { pub struct Config { pub launcher_config: LauncherConfig, pub docker_command_config: DockerLaunchFlags, - /// Inline MPC node config content (opaque to the launcher). - /// Written to a temporary file on disk, mounted into the container, - /// and passed via `start-with-config-file ` to the MPC binary. - pub mpc_config_content: String, + /// Opaque MPC node configuration table. + /// The launcher does not interpret these fields — they are re-serialized + /// to a TOML string, written to a file on disk, and mounted into the + /// container for the MPC binary to consume via `start-with-config-file`. + pub mpc_config: toml::Table, } #[derive(Debug, Clone, Serialize, Deserialize)] @@ -213,57 +214,86 @@ mod tests { assert_eq!(value, "11780:11780"); } - // --- Config full deserialization --- + // --- Config full deserialization (TOML) --- #[test] - fn config_deserializes_valid_json() { + fn config_deserializes_valid_toml() { // given - let json = serde_json::json!({ - "launcher_config": { - "image_tags": ["tag1"], - "image_name": "nearone/mpc-node", - "registry": "registry.hub.docker.com", - "rpc_request_timeout_secs": 10, - "rpc_request_interval_secs": 1, - "rpc_max_attempts": 20, - "mpc_hash_override": null - }, - "docker_command_config": { - "port_mappings": {"ports": [{"src": 11780, "dst": 11780}]} - }, - "mpc_config_file": "[some_config = true]" - }); + let toml_str = r#" +[launcher_config] +image_tags = ["tag1"] +image_name = "nearone/mpc-node" +registry = "registry.hub.docker.com" +rpc_request_timeout_secs = 10 +rpc_request_interval_secs = 1 +rpc_max_attempts = 20 + +[docker_command_config.port_mappings] +ports = [{ src = 11780, dst = 11780 }] + +[mpc_config] +home_dir = "/data" +some_opaque_field = true +"#; // when - let result = serde_json::from_value::(json); + let result = toml::from_str::(toml_str); // then assert_matches!(result, Ok(config) => { assert_eq!(config.launcher_config.image_name, "nearone/mpc-node"); - assert_eq!(config.mpc_config_content, "[some_config = true]"); + assert_eq!(config.mpc_config["home_dir"].as_str(), Some("/data")); + assert_eq!(config.mpc_config["some_opaque_field"].as_bool(), Some(true)); }); } + #[test] + fn config_mpc_config_round_trips_to_toml_string() { + // given + let toml_str = r#" +[launcher_config] +image_tags = ["tag1"] +image_name = "nearone/mpc-node" +registry = "registry.hub.docker.com" +rpc_request_timeout_secs = 10 +rpc_request_interval_secs = 1 +rpc_max_attempts = 20 + +[docker_command_config.port_mappings] +ports = [{ src = 11780, dst = 11780 }] + +[mpc_config] +home_dir = "/data" +arbitrary_key = "arbitrary_value" +"#; + let config: Config = toml::from_str(toml_str).unwrap(); + + // when — re-serialize the opaque table (what the launcher writes to disk) + let serialized = toml::to_string(&config.mpc_config).unwrap(); + + // then + assert!(serialized.contains("home_dir")); + assert!(serialized.contains("arbitrary_key")); + } + #[test] fn config_rejects_missing_required_field() { - // given - mpc_config_file is missing - let json = serde_json::json!({ - "launcher_config": { - "image_tags": ["tag1"], - "image_name": "nearone/mpc-node", - "registry": "registry.hub.docker.com", - "rpc_request_timeout_secs": 10, - "rpc_request_interval_secs": 1, - "rpc_max_attempts": 20, - "mpc_hash_override": null - }, - "docker_command_config": { - "port_mappings": {"ports": []} - } - }); + // given - mpc_config is missing + let toml_str = r#" +[launcher_config] +image_tags = ["tag1"] +image_name = "nearone/mpc-node" +registry = "registry.hub.docker.com" +rpc_request_timeout_secs = 10 +rpc_request_interval_secs = 1 +rpc_max_attempts = 20 + +[docker_command_config.port_mappings] +ports = [] +"#; // when - let result = serde_json::from_value::(json); + let result = toml::from_str::(toml_str); // then assert_matches!(result, Err(_)); diff --git a/deployment/localnet/tee/sam.json b/deployment/localnet/tee/sam.json deleted file mode 100644 index a2ab8f528..000000000 --- a/deployment/localnet/tee/sam.json +++ /dev/null @@ -1,36 +0,0 @@ -{ - "launcher_config": { - "image_tags": ["main-260e88b"], - "image_name": "nearone/mpc-node", - "registry": "registry.hub.docker.com", - "rpc_request_timeout_secs": 10, - "rpc_request_interval_secs": 1, - "rpc_max_attempts": 20, - "mpc_hash_override": null - }, - "docker_command_config": { - "extra_hosts": { - "hosts": [] - }, - "port_mappings": { - "ports": [ - { "src": 8080, "dst": 8080 }, - { "src": 24566, "dst": 24566 }, - { "src": 13002, "dst": 13002 } - ] - } - }, - "mpc_passthrough_env": { - "mpc_account_id": "sam.test.near", - "mpc_local_address": "127.0.0.1", - "mpc_secret_key_store": "AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA", - "mpc_backup_encryption_key_hex": "0000000000000000000000000000000000000000000000000000000000000000", - "mpc_env": "Localnet", - "mpc_home_dir": "/data", - "mpc_contract_id": "mpc-contract.test.near", - "mpc_responder_id": "sam.test.near", - "near_boot_nodes": "ed25519:BGa4WiBj43Mr66f9Ehf6swKtR6wZmWuwCsV3s4PSR3nx@${MACHINE_IP}:24566", - "rust_backtrace": "full", - "rust_log": "info" - } -} diff --git a/deployment/localnet/tee/sam.toml b/deployment/localnet/tee/sam.toml new file mode 100644 index 000000000..f9bc42440 --- /dev/null +++ b/deployment/localnet/tee/sam.toml @@ -0,0 +1,56 @@ +[launcher_config] +image_tags = ["main-260e88b"] +image_name = "nearone/mpc-node" +registry = "registry.hub.docker.com" +rpc_request_timeout_secs = 10 +rpc_request_interval_secs = 1 +rpc_max_attempts = 20 + +[docker_command_config.port_mappings] +ports = [ + { src = 8080, dst = 8080 }, + { src = 24566, dst = 24566 }, + { src = 13002, dst = 13002 }, +] + +[mpc_config] +home_dir = "/data" + +[mpc_config.secrets] +secret_store_key_hex = "AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA" +backup_encryption_key_hex = "0000000000000000000000000000000000000000000000000000000000000000" + +[mpc_config.tee.authority] +type = "local" + +[mpc_config.node] +my_near_account_id = "sam.test.near" +near_responder_account_id = "sam.test.near" +number_of_responder_keys = 1 +web_ui = "0.0.0.0:8080" +migration_web_ui = "0.0.0.0:8078" +cores = 4 + +[mpc_config.node.indexer] +validate_genesis = false +sync_mode = "Latest" +concurrency = 1 +mpc_contract_id = "mpc-contract.test.near" +finality = "optimistic" + +[mpc_config.node.triple] +concurrency = 2 +desired_triples_to_buffer = 128 +timeout_sec = 60 +parallel_triple_generation_stagger_time_sec = 1 + +[mpc_config.node.presignature] +concurrency = 4 +desired_presignatures_to_buffer = 64 +timeout_sec = 60 + +[mpc_config.node.signature] +timeout_sec = 60 + +[mpc_config.node.ckd] +timeout_sec = 60 diff --git a/deployment/testnet/sam.json b/deployment/testnet/sam.json deleted file mode 100644 index 9b96a09a1..000000000 --- a/deployment/testnet/sam.json +++ /dev/null @@ -1,37 +0,0 @@ -{ - "launcher_config": { - "image_tags": ["barak-doc-update_localnet_guide-b12bc7d"], - "image_name": "nearone/mpc-node", - "registry": "registry.hub.docker.com", - "rpc_request_timeout_secs": 10, - "rpc_request_interval_secs": 1, - "rpc_max_attempts": 20, - "mpc_hash_override": null - }, - "docker_command_config": { - "extra_hosts": { - "hosts": [] - }, - "port_mappings": { - "ports": [ - { "src": 8080, "dst": 8080 }, - { "src": 24567, "dst": 24567 }, - { "src": 13002, "dst": 13002 }, - { "src": 80, "dst": 80 } - ] - } - }, - "mpc_passthrough_env": { - "mpc_account_id": "$SAM_ACCOUNT", - "mpc_local_address": "127.0.0.1", - "mpc_secret_key_store": "AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA", - "mpc_backup_encryption_key_hex": "0000000000000000000000000000000000000000000000000000000000000000", - "mpc_env": "Testnet", - "mpc_home_dir": "/data", - "mpc_contract_id": "$MPC_CONTRACT_ACCOUNT", - "mpc_responder_id": "$SAM_ACCOUNT", - "near_boot_nodes": "$BOOTNODES", - "rust_backtrace": "full", - "rust_log": "info" - } -} diff --git a/deployment/testnet/sam.toml b/deployment/testnet/sam.toml new file mode 100644 index 000000000..5d9c9586c --- /dev/null +++ b/deployment/testnet/sam.toml @@ -0,0 +1,57 @@ +[launcher_config] +image_tags = ["barak-doc-update_localnet_guide-b12bc7d"] +image_name = "nearone/mpc-node" +registry = "registry.hub.docker.com" +rpc_request_timeout_secs = 10 +rpc_request_interval_secs = 1 +rpc_max_attempts = 20 + +[docker_command_config.port_mappings] +ports = [ + { src = 8080, dst = 8080 }, + { src = 24567, dst = 24567 }, + { src = 13002, dst = 13002 }, + { src = 80, dst = 80 }, +] + +[mpc_config] +home_dir = "/data" + +[mpc_config.secrets] +secret_store_key_hex = "AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA" +backup_encryption_key_hex = "0000000000000000000000000000000000000000000000000000000000000000" + +[mpc_config.tee.authority] +type = "local" + +[mpc_config.node] +my_near_account_id = "$SAM_ACCOUNT" +near_responder_account_id = "$SAM_ACCOUNT" +number_of_responder_keys = 1 +web_ui = "0.0.0.0:8080" +migration_web_ui = "0.0.0.0:8078" +cores = 4 + +[mpc_config.node.indexer] +validate_genesis = false +sync_mode = "Latest" +concurrency = 1 +mpc_contract_id = "$MPC_CONTRACT_ACCOUNT" +finality = "optimistic" + +[mpc_config.node.triple] +concurrency = 2 +desired_triples_to_buffer = 128 +timeout_sec = 60 +parallel_triple_generation_stagger_time_sec = 1 + +[mpc_config.node.presignature] +concurrency = 4 +desired_presignatures_to_buffer = 64 +timeout_sec = 60 + +[mpc_config.node.signature] +timeout_sec = 60 + +[mpc_config.node.ckd] +timeout_sec = 60 From e7f69b7309513c96f39fb26a9f3dd9cacac2a1c9 Mon Sep 17 00:00:00 2001 From: Daniel Sharifi Date: Fri, 13 Mar 2026 22:46:46 +0100 Subject: [PATCH 77/82] update launcher hash :) --- tee_launcher/launcher_docker_compose.yaml | 2 +- tee_launcher/launcher_docker_compose_nontee.yaml | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/tee_launcher/launcher_docker_compose.yaml b/tee_launcher/launcher_docker_compose.yaml index db24475aa..b59b44b32 100644 --- a/tee_launcher/launcher_docker_compose.yaml +++ b/tee_launcher/launcher_docker_compose.yaml @@ -2,7 +2,7 @@ version: '3.8' services: launcher: - image: nearone/mpc-launcher@sha256:84c7537a2f84d3477eac2e5ef3ba0765b5d688f86096947eea4744ce25b27054 + image: nearone/mpc-launcher@sha256:85a4fa6d1eec05e8f43dba17d3f4368f89719a2a06b9e2051d84813c3f651068 container_name: launcher diff --git a/tee_launcher/launcher_docker_compose_nontee.yaml b/tee_launcher/launcher_docker_compose_nontee.yaml index 48b0bc4fc..e3fa51311 100644 --- a/tee_launcher/launcher_docker_compose_nontee.yaml +++ b/tee_launcher/launcher_docker_compose_nontee.yaml @@ -1,6 +1,6 @@ services: launcher: - image: nearone/mpc-launcher@sha256:84c7537a2f84d3477eac2e5ef3ba0765b5d688f86096947eea4744ce25b27054 + image: nearone/mpc-launcher@sha256:85a4fa6d1eec05e8f43dba17d3f4368f89719a2a06b9e2051d84813c3f651068 container_name: "${LAUNCHER_IMAGE_NAME}" environment: From 70989692ee464540863d8b28be30de1fc583300b Mon Sep 17 00:00:00 2001 From: Daniel Sharifi Date: Fri, 13 Mar 2026 22:47:27 +0100 Subject: [PATCH 78/82] sort deps --- crates/launcher-interface/Cargo.toml | 2 +- crates/tee-launcher/Cargo.toml | 4 ++-- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/crates/launcher-interface/Cargo.toml b/crates/launcher-interface/Cargo.toml index 85572724e..db901977d 100644 --- a/crates/launcher-interface/Cargo.toml +++ b/crates/launcher-interface/Cargo.toml @@ -5,9 +5,9 @@ edition.workspace = true license.workspace = true [dependencies] -near-mpc-bounded-collections = { workspace = true } derive_more = { workspace = true } mpc-primitives = { workspace = true } +near-mpc-bounded-collections = { workspace = true } serde = { workspace = true } thiserror = { workspace = true } diff --git a/crates/tee-launcher/Cargo.toml b/crates/tee-launcher/Cargo.toml index a06018d7a..e0bc44094 100644 --- a/crates/tee-launcher/Cargo.toml +++ b/crates/tee-launcher/Cargo.toml @@ -13,17 +13,17 @@ integration-test = [] [dependencies] backon = { workspace = true } -near-mpc-bounded-collections = { workspace = true } clap = { workspace = true } dstack-sdk = { workspace = true } launcher-interface = { workspace = true } +near-mpc-bounded-collections = { workspace = true } reqwest = { workspace = true } serde = { workspace = true } serde_json = { workspace = true } -toml = { workspace = true } tempfile = { workspace = true } thiserror = { workspace = true } tokio = { workspace = true } +toml = { workspace = true } tracing = { workspace = true } tracing-subscriber = { workspace = true } url = { workspace = true, features = ["serde"] } From f5d9d5c11e163616df6a0e281ff2aac597292e1b Mon Sep 17 00:00:00 2001 From: Daniel Sharifi Date: Sun, 15 Mar 2026 14:26:33 +0100 Subject: [PATCH 79/82] chore: add frodo.toml file --- deployment/testnet/frodo.toml | 57 +++++++++++++++++++++++++++++++++++ 1 file changed, 57 insertions(+) create mode 100644 deployment/testnet/frodo.toml diff --git a/deployment/testnet/frodo.toml b/deployment/testnet/frodo.toml new file mode 100644 index 000000000..e3a7599ce --- /dev/null +++ b/deployment/testnet/frodo.toml @@ -0,0 +1,57 @@ +[launcher_config] +image_tags = ["barak-doc-update_localnet_guide-b12bc7d"] +image_name = "nearone/mpc-node" +registry = "registry.hub.docker.com" +rpc_request_timeout_secs = 10 +rpc_request_interval_secs = 1 +rpc_max_attempts = 20 + +[docker_command_config.port_mappings] +ports = [ + { src = 8080, dst = 8080 }, + { src = 24567, dst = 24567 }, + { src = 13001, dst = 13001 }, + { src = 80, dst = 80 }, +] + +[mpc_config] +home_dir = "/data" + +[mpc_config.secrets] +secret_store_key_hex = "AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA" +backup_encryption_key_hex = "0000000000000000000000000000000000000000000000000000000000000000" + +[mpc_config.tee.authority] +type = "local" + +[mpc_config.node] +my_near_account_id = "$FRODO_ACCOUNT" +near_responder_account_id = "$FRODO_ACCOUNT" +number_of_responder_keys = 1 +web_ui = "0.0.0.0:8080" +migration_web_ui = "0.0.0.0:8078" +cores = 4 + +[mpc_config.node.indexer] +validate_genesis = false +sync_mode = "Latest" +concurrency = 1 +mpc_contract_id = "$MPC_CONTRACT_ACCOUNT" +finality = "optimistic" + +[mpc_config.node.triple] +concurrency = 2 +desired_triples_to_buffer = 128 +timeout_sec = 60 +parallel_triple_generation_stagger_time_sec = 1 + +[mpc_config.node.presignature] +concurrency = 4 +desired_presignatures_to_buffer = 64 +timeout_sec = 60 + +[mpc_config.node.signature] +timeout_sec = 60 + +[mpc_config.node.ckd] +timeout_sec = 60 From 1ee4ba436a2d375547cd090dea62fca12b72c816 Mon Sep 17 00:00:00 2001 From: Daniel Sharifi Date: Sun, 15 Mar 2026 15:15:41 +0100 Subject: [PATCH 80/82] fix: install openssl --- deployment/Dockerfile-launcher | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/deployment/Dockerfile-launcher b/deployment/Dockerfile-launcher index 30bb734e4..b39c42608 100644 --- a/deployment/Dockerfile-launcher +++ b/deployment/Dockerfile-launcher @@ -8,7 +8,7 @@ RUN \ --mount=type=bind,source=./deployment/repro-sources-list.sh,target=/usr/local/bin/repro-sources-list.sh \ repro-sources-list.sh && \ apt-get update && \ - apt-get install -y --no-install-recommends docker.io && \ + apt-get install -y --no-install-recommends docker.io libssl3 ca-certificates && \ : "Clean up for improving reproducibility" && \ rm -rf /var/log/* /var/cache/ldconfig/aux-cache From 104eaf7a652a80a1bee0e1d4e6fca8cf8ac5e758 Mon Sep 17 00:00:00 2001 From: Daniel Sharifi Date: Sun, 15 Mar 2026 15:23:17 +0100 Subject: [PATCH 81/82] update compose file to use new image and binary with config file accepting --- tee_launcher/launcher_docker_compose_nontee.yaml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/tee_launcher/launcher_docker_compose_nontee.yaml b/tee_launcher/launcher_docker_compose_nontee.yaml index e3fa51311..fd7101b55 100644 --- a/tee_launcher/launcher_docker_compose_nontee.yaml +++ b/tee_launcher/launcher_docker_compose_nontee.yaml @@ -1,12 +1,12 @@ services: launcher: - image: nearone/mpc-launcher@sha256:85a4fa6d1eec05e8f43dba17d3f4368f89719a2a06b9e2051d84813c3f651068 + image: nearone/mpc-launcher@sha256:70e6d08328123b44406523af3147aebad37a9472839b8ebf0a303cecd7174fb0 container_name: "${LAUNCHER_IMAGE_NAME}" environment: - PLATFORM=NONTEE - DOCKER_CONTENT_TRUST=1 - - DEFAULT_IMAGE_DIGEST=sha256:9143bc98aaae3408c14cf4490d7b0e96a5a32d989ec865a0cf8dde391831a7a9 # 3.6.0 release + - DEFAULT_IMAGE_DIGEST=sha256:e7d1df7453b9bb9e89969f02a0ae59c3c2743cd895963e97a8e7666defbf4dab # latest main with config file support volumes: - /var/run/docker.sock:/var/run/docker.sock From de02ec04663ba856c091f4f256cf595db70040eb Mon Sep 17 00:00:00 2001 From: Barakeinav1 Date: Sun, 15 Mar 2026 14:31:38 +0000 Subject: [PATCH 82/82] update localnet scripts to generate TOML config for Rust launcher The tee-launcher now expects a structured TOML config instead of flat key=value env files. Update deploy-tee-localnet.sh and single-node.sh to render TOML configs matching the new Config struct, and rename the old template to .bak. Co-Authored-By: Claude Opus 4.6 (1M context) --- localnet/tee/scripts/deploy-tee-localnet.sh | 20 ++++++- .../how-to-run-localnet-tee-setup-script.md | 2 +- .../tee/scripts/node.conf.localnet.toml.tpl | 53 +++++++++++++++++++ ...ocalnet.tpl => node.conf.localnet.tpl.bak} | 0 localnet/tee/scripts/single-node.sh | 19 ++++++- 5 files changed, 89 insertions(+), 5 deletions(-) create mode 100644 localnet/tee/scripts/node.conf.localnet.toml.tpl rename localnet/tee/scripts/{node.conf.localnet.tpl => node.conf.localnet.tpl.bak} (100%) diff --git a/localnet/tee/scripts/deploy-tee-localnet.sh b/localnet/tee/scripts/deploy-tee-localnet.sh index 6af59068f..f8369628b 100644 --- a/localnet/tee/scripts/deploy-tee-localnet.sh +++ b/localnet/tee/scripts/deploy-tee-localnet.sh @@ -144,11 +144,25 @@ MODE="${MODE:-testnet}" # testnet|localnet # templates live here (UPDATED for move to localnet/tee/scripts) ENV_TPL="$REPO_ROOT/localnet/tee/scripts/node.env.tpl" if [ "$MODE" = "localnet" ]; then - CONF_TPL="$REPO_ROOT/localnet/tee/scripts/node.conf.localnet.tpl" + CONF_TPL="$REPO_ROOT/localnet/tee/scripts/node.conf.localnet.toml.tpl" else CONF_TPL="$REPO_ROOT/localnet/tee/scripts/node.conf.tpl" fi +# Convert comma-separated "src:dst" port string to TOML inline table array entries. +# E.g. "8080:8080,24566:24566" -> " { src = 8080, dst = 8080 },\n { src = 24566, dst = 24566 }," +ports_to_toml() { + local ports="$1" result="" + IFS=',' read -ra pairs <<< "$ports" + for pair in "${pairs[@]}"; do + local src="${pair%%:*}" + local dst="${pair##*:}" + result+=" { src = $src, dst = $dst }, +" + done + echo -n "$result" +} + WORKDIR="/tmp/$USER/mpc_testnet_scale/$MPC_NETWORK_NAME" mkdir -p "$WORKDIR" @@ -713,7 +727,7 @@ render_node_files_range() { local env_out conf_out env_out="$WORKDIR/node${i}.env" - conf_out="$WORKDIR/node${i}.conf" + conf_out="$WORKDIR/node${i}.toml" export APP_NAME="$app_name" export VMM_RPC @@ -749,6 +763,8 @@ render_node_files_range() { export MPC_SECRET_STORE_KEY="$(printf '%032x' "$i")" export MPC_CONTRACT_ID="$MPC_CONTRACT_ACCOUNT" export PORTS="8080:8080,24566:24566,${future_port}:${future_port}" + export PORTS_TOML + PORTS_TOML="$(ports_to_toml "$PORTS")" export NEAR_BOOT_NODES="ed25519:BGa4WiBj43Mr66f9Ehf6swKtR6wZmWuwCsV3s4PSR3nx@${MACHINE_IP}:24566" envsubst <"$ENV_TPL" >"$env_out" diff --git a/localnet/tee/scripts/how-to-run-localnet-tee-setup-script.md b/localnet/tee/scripts/how-to-run-localnet-tee-setup-script.md index 88b0084ef..d0c001ebb 100644 --- a/localnet/tee/scripts/how-to-run-localnet-tee-setup-script.md +++ b/localnet/tee/scripts/how-to-run-localnet-tee-setup-script.md @@ -152,7 +152,7 @@ All generated files are stored under: ``` Important artifacts: -- `node{i}.conf`, `node{i}.env` +- `node{i}.toml`, `node{i}.env` - `keys.json` - `init_args.json` diff --git a/localnet/tee/scripts/node.conf.localnet.toml.tpl b/localnet/tee/scripts/node.conf.localnet.toml.tpl new file mode 100644 index 000000000..2c1b44ab0 --- /dev/null +++ b/localnet/tee/scripts/node.conf.localnet.toml.tpl @@ -0,0 +1,53 @@ +[launcher_config] +image_tags = ["${MPC_IMAGE_TAGS}"] +image_name = "${MPC_IMAGE_NAME}" +registry = "${MPC_REGISTRY}" +rpc_request_timeout_secs = 10 +rpc_request_interval_secs = 1 +rpc_max_attempts = 20 + +[docker_command_config.port_mappings] +ports = [ +${PORTS_TOML}] + +[mpc_config] +home_dir = "/data" + +[mpc_config.secrets] +secret_store_key_hex = "${MPC_SECRET_STORE_KEY}" +backup_encryption_key_hex = "0000000000000000000000000000000000000000000000000000000000000000" + +[mpc_config.tee.authority] +type = "local" + +[mpc_config.node] +my_near_account_id = "${MPC_ACCOUNT_ID}" +near_responder_account_id = "${MPC_ACCOUNT_ID}" +number_of_responder_keys = 1 +web_ui = "0.0.0.0:8080" +migration_web_ui = "0.0.0.0:8078" +cores = 4 + +[mpc_config.node.indexer] +validate_genesis = false +sync_mode = "Latest" +concurrency = 1 +mpc_contract_id = "${MPC_CONTRACT_ID}" +finality = "optimistic" + +[mpc_config.node.triple] +concurrency = 2 +desired_triples_to_buffer = 128 +timeout_sec = 60 +parallel_triple_generation_stagger_time_sec = 1 + +[mpc_config.node.presignature] +concurrency = 4 +desired_presignatures_to_buffer = 64 +timeout_sec = 60 + +[mpc_config.node.signature] +timeout_sec = 60 + +[mpc_config.node.ckd] +timeout_sec = 60 diff --git a/localnet/tee/scripts/node.conf.localnet.tpl b/localnet/tee/scripts/node.conf.localnet.tpl.bak similarity index 100% rename from localnet/tee/scripts/node.conf.localnet.tpl rename to localnet/tee/scripts/node.conf.localnet.tpl.bak diff --git a/localnet/tee/scripts/single-node.sh b/localnet/tee/scripts/single-node.sh index 13b62d135..c1a0eb435 100755 --- a/localnet/tee/scripts/single-node.sh +++ b/localnet/tee/scripts/single-node.sh @@ -132,13 +132,26 @@ DISK="${DISK:-500G}" REPO_ROOT="${REPO_ROOT:-$(git rev-parse --show-toplevel 2>/dev/null || pwd)}" TEE_LAUNCHER_DIR="$REPO_ROOT/tee_launcher" ENV_TPL="${ENV_TPL:-$REPO_ROOT/localnet/tee/scripts/node.env.tpl}" -CONF_TPL="${CONF_TPL:-$REPO_ROOT/localnet/tee/scripts/node.conf.localnet.tpl}" +CONF_TPL="${CONF_TPL:-$REPO_ROOT/localnet/tee/scripts/node.conf.localnet.toml.tpl}" + +# Convert comma-separated "src:dst" port string to TOML inline table array entries. +ports_to_toml() { + local ports="$1" result="" + IFS=',' read -ra pairs <<< "$ports" + for pair in "${pairs[@]}"; do + local src="${pair%%:*}" + local dst="${pair##*:}" + result+=" { src = $src, dst = $dst }, +" + done + echo -n "$result" +} WORKDIR="${WORKDIR:-$(mktemp -d /tmp/mpc_localnet_one_node.XXXXXX)}" mkdir -p "$WORKDIR" log "Work directory: $WORKDIR" ENV_OUT="$WORKDIR/node.env" -CONF_OUT="$WORKDIR/node.conf" +CONF_OUT="$WORKDIR/node.toml" PUBLIC_DATA_JSON_OUT="${PUBLIC_DATA_JSON_OUT:-$WORKDIR/public_data.json}" near_account_exists() { @@ -193,6 +206,8 @@ render_env_and_conf() { export MPC_CONTRACT_ID="$CONTRACT_ACCOUNT" export MPC_SECRET_STORE_KEY="${MPC_SECRET_STORE_KEY:-00000000000000000000000000000000}" export PORTS="${PORTS:-8080:8080,24566:24566,${FUTURE_PORT}:${FUTURE_PORT}}" + export PORTS_TOML + PORTS_TOML="$(ports_to_toml "$PORTS")" envsubst <"$ENV_TPL" >"$ENV_OUT" envsubst <"$CONF_TPL" >"$CONF_OUT"